summaryrefslogtreecommitdiff
path: root/chromium/media
diff options
context:
space:
mode:
authorAllan Sandfeld Jensen <allan.jensen@qt.io>2022-02-02 12:21:57 +0100
committerAllan Sandfeld Jensen <allan.jensen@qt.io>2022-02-12 08:13:00 +0000
commit606d85f2a5386472314d39923da28c70c60dc8e7 (patch)
treea8f4d7bf997f349f45605e6058259fba0630e4d7 /chromium/media
parent5786336dda477d04fb98483dca1a5426eebde2d7 (diff)
downloadqtwebengine-chromium-606d85f2a5386472314d39923da28c70c60dc8e7.tar.gz
BASELINE: Update Chromium to 96.0.4664.181
Change-Id: I762cd1da89d73aa6313b4a753fe126c34833f046 Reviewed-by: Allan Sandfeld Jensen <allan.jensen@qt.io>
Diffstat (limited to 'chromium/media')
-rw-r--r--chromium/media/BUILD.gn5
-rw-r--r--chromium/media/COMMON_METADATA3
-rw-r--r--chromium/media/DIR_METADATA4
-rw-r--r--chromium/media/OWNERS2
-rw-r--r--chromium/media/audio/agc_audio_stream.h5
-rw-r--r--chromium/media/audio/alive_checker.h5
-rw-r--r--chromium/media/audio/alive_checker_unittest.cc48
-rw-r--r--chromium/media/audio/alsa/alsa_input.cc4
-rw-r--r--chromium/media/audio/alsa/alsa_input.h5
-rw-r--r--chromium/media/audio/alsa/alsa_output.cc6
-rw-r--r--chromium/media/audio/alsa/alsa_output.h5
-rw-r--r--chromium/media/audio/alsa/alsa_wrapper.h6
-rw-r--r--chromium/media/audio/alsa/audio_manager_alsa.h6
-rw-r--r--chromium/media/audio/alsa/mock_alsa_wrapper.h6
-rw-r--r--chromium/media/audio/android/aaudio_output.cc38
-rw-r--r--chromium/media/audio/android/aaudio_output.h6
-rw-r--r--chromium/media/audio/android/audio_android_unittest.cc37
-rw-r--r--chromium/media/audio/android/audio_manager_android.cc2
-rw-r--r--chromium/media/audio/android/audio_manager_android.h6
-rw-r--r--chromium/media/audio/android/audio_track_output_stream.h6
-rw-r--r--chromium/media/audio/android/opensles_input.cc4
-rw-r--r--chromium/media/audio/android/opensles_input.h5
-rw-r--r--chromium/media/audio/android/opensles_output.cc7
-rw-r--r--chromium/media/audio/android/opensles_output.h5
-rw-r--r--chromium/media/audio/audio_debug_file_writer.h5
-rw-r--r--chromium/media/audio/audio_debug_recording_helper.h6
-rw-r--r--chromium/media/audio/audio_debug_recording_helper_unittest.cc22
-rw-r--r--chromium/media/audio/audio_debug_recording_manager.h6
-rw-r--r--chromium/media/audio/audio_debug_recording_manager_unittest.cc22
-rw-r--r--chromium/media/audio/audio_debug_recording_session.h7
-rw-r--r--chromium/media/audio/audio_debug_recording_session_impl.h9
-rw-r--r--chromium/media/audio/audio_debug_recording_test.h7
-rw-r--r--chromium/media/audio/audio_encoders_unittest.cc9
-rw-r--r--chromium/media/audio/audio_input_device.cc21
-rw-r--r--chromium/media/audio/audio_input_stream_data_interceptor.h7
-rw-r--r--chromium/media/audio/audio_input_unittest.cc6
-rw-r--r--chromium/media/audio/audio_manager.cc6
-rw-r--r--chromium/media/audio/audio_manager.h4
-rw-r--r--chromium/media/audio/audio_manager_base.cc3
-rw-r--r--chromium/media/audio/audio_manager_base.h6
-rw-r--r--chromium/media/audio/audio_manager_unittest.cc5
-rw-r--r--chromium/media/audio/audio_output_device_thread_callback.cc6
-rw-r--r--chromium/media/audio/audio_output_device_thread_callback.h8
-rw-r--r--chromium/media/audio/audio_output_device_unittest.cc9
-rw-r--r--chromium/media/audio/audio_output_dispatcher.h6
-rw-r--r--chromium/media/audio/audio_output_dispatcher_impl.h6
-rw-r--r--chromium/media/audio/audio_output_proxy_unittest.cc9
-rw-r--r--chromium/media/audio/audio_output_resampler.cc6
-rw-r--r--chromium/media/audio/audio_output_resampler.h5
-rw-r--r--chromium/media/audio/audio_system_helper.h6
-rw-r--r--chromium/media/audio/audio_system_test_util.h7
-rw-r--r--chromium/media/audio/audio_thread_hang_monitor.cc5
-rw-r--r--chromium/media/audio/audio_thread_hang_monitor.h5
-rw-r--r--chromium/media/audio/audio_thread_hang_monitor_unittest.cc24
-rw-r--r--chromium/media/audio/audio_thread_impl.h5
-rw-r--r--chromium/media/audio/cras/audio_manager_chromeos.h6
-rw-r--r--chromium/media/audio/cras/audio_manager_cras.h6
-rw-r--r--chromium/media/audio/cras/audio_manager_cras_base.h7
-rw-r--r--chromium/media/audio/cras/cras_input.h5
-rw-r--r--chromium/media/audio/cras/cras_unified.h5
-rw-r--r--chromium/media/audio/fake_audio_log_factory.h7
-rw-r--r--chromium/media/audio/fake_audio_manager.h7
-rw-r--r--chromium/media/audio/fuchsia/DIR_METADATA2
-rw-r--r--chromium/media/audio/fuchsia/audio_manager_fuchsia.h7
-rw-r--r--chromium/media/audio/fuchsia/audio_output_stream_fuchsia.cc2
-rw-r--r--chromium/media/audio/linux/audio_manager_linux.cc10
-rw-r--r--chromium/media/audio/mac/audio_auhal_mac.cc3
-rw-r--r--chromium/media/audio/mac/audio_auhal_mac.h6
-rw-r--r--chromium/media/audio/mac/audio_auhal_mac_unittest.cc6
-rw-r--r--chromium/media/audio/mac/audio_device_listener_mac.h6
-rw-r--r--chromium/media/audio/mac/audio_device_listener_mac_unittest.cc6
-rw-r--r--chromium/media/audio/mac/audio_input_mac.cc11
-rw-r--r--chromium/media/audio/mac/audio_input_mac.h7
-rw-r--r--chromium/media/audio/mac/audio_low_latency_input_mac.cc18
-rw-r--r--chromium/media/audio/mac/audio_low_latency_input_mac.h6
-rw-r--r--chromium/media/audio/mac/audio_manager_mac.cc20
-rw-r--r--chromium/media/audio/mac/audio_manager_mac.h6
-rw-r--r--chromium/media/audio/mac/scoped_audio_unit.h6
-rw-r--r--chromium/media/audio/mock_audio_debug_recording_manager.h8
-rw-r--r--chromium/media/audio/mock_audio_manager.h6
-rw-r--r--chromium/media/audio/mock_audio_source_callback.h7
-rw-r--r--chromium/media/audio/power_observer_helper.h5
-rw-r--r--chromium/media/audio/pulse/audio_manager_pulse.h6
-rw-r--r--chromium/media/audio/pulse/pulse_input.cc2
-rw-r--r--chromium/media/audio/pulse/pulse_input.h5
-rw-r--r--chromium/media/audio/pulse/pulse_output.h5
-rw-r--r--chromium/media/audio/pulse/pulse_util.cc9
-rw-r--r--chromium/media/audio/pulse/pulse_util.h4
-rw-r--r--chromium/media/audio/simple_sources.cc2
-rw-r--r--chromium/media/audio/test_audio_thread.h5
-rw-r--r--chromium/media/audio/wav_audio_handler.cc3
-rw-r--r--chromium/media/audio/wav_audio_handler.h5
-rw-r--r--chromium/media/audio/win/audio_device_listener_win.h9
-rw-r--r--chromium/media/audio/win/audio_device_listener_win_unittest.cc10
-rw-r--r--chromium/media/audio/win/audio_low_latency_input_win.cc27
-rw-r--r--chromium/media/audio/win/audio_low_latency_input_win.h5
-rw-r--r--chromium/media/audio/win/audio_low_latency_input_win_unittest.cc9
-rw-r--r--chromium/media/audio/win/audio_low_latency_output_win.cc12
-rw-r--r--chromium/media/audio/win/audio_low_latency_output_win.h5
-rw-r--r--chromium/media/audio/win/audio_low_latency_output_win_unittest.cc6
-rw-r--r--chromium/media/audio/win/audio_manager_win.h6
-rw-r--r--chromium/media/audio/win/audio_output_win_unittest.cc11
-rw-r--r--chromium/media/audio/win/core_audio_util_win.cc2
-rw-r--r--chromium/media/audio/win/waveout_output_win.cc6
-rw-r--r--chromium/media/audio/win/waveout_output_win.h7
-rw-r--r--chromium/media/base/BUILD.gn4
-rw-r--r--chromium/media/base/android/android_cdm_factory.h6
-rw-r--r--chromium/media/base/android/android_overlay.h5
-rw-r--r--chromium/media/base/android/jni_hdr_metadata.h6
-rw-r--r--chromium/media/base/android/media_codec_bridge.h6
-rw-r--r--chromium/media/base/android/media_codec_bridge_impl.cc12
-rw-r--r--chromium/media/base/android/media_codec_bridge_impl.h16
-rw-r--r--chromium/media/base/android/media_codec_bridge_impl_unittest.cc58
-rw-r--r--chromium/media/base/android/media_codec_loop.cc7
-rw-r--r--chromium/media/base/android/media_codec_loop_unittest.cc13
-rw-r--r--chromium/media/base/android/media_codec_util.cc30
-rw-r--r--chromium/media/base/android/media_codec_util_unittest.cc5
-rw-r--r--chromium/media/base/android/media_crypto_context.h7
-rw-r--r--chromium/media/base/android/media_crypto_context_impl.h5
-rw-r--r--chromium/media/base/android/media_drm_bridge_client.h6
-rw-r--r--chromium/media/base/android/media_drm_bridge_delegate.h7
-rw-r--r--chromium/media/base/android/media_drm_bridge_factory.h6
-rw-r--r--chromium/media/base/android/media_drm_storage_bridge.h6
-rw-r--r--chromium/media/base/android/media_player_bridge.cc6
-rw-r--r--chromium/media/base/android/media_player_bridge.h6
-rw-r--r--chromium/media/base/android/media_player_bridge_unittest.cc2
-rw-r--r--chromium/media/base/android/media_player_listener.h6
-rw-r--r--chromium/media/base/android/media_server_crash_listener.h5
-rw-r--r--chromium/media/base/android/media_service_throttler.cc17
-rw-r--r--chromium/media/base/android/media_service_throttler_unittest.cc31
-rw-r--r--chromium/media/base/android/mock_android_overlay.h6
-rw-r--r--chromium/media/base/android/mock_media_codec_bridge.h6
-rw-r--r--chromium/media/base/android/mock_media_crypto_context.h5
-rw-r--r--chromium/media/base/android/test_destruction_observable.h10
-rw-r--r--chromium/media/base/audio_block_fifo.h6
-rw-r--r--chromium/media/base/audio_block_fifo_unittest.cc11
-rw-r--r--chromium/media/base/audio_buffer.cc8
-rw-r--r--chromium/media/base/audio_buffer_converter_unittest.cc2
-rw-r--r--chromium/media/base/audio_buffer_queue.h6
-rw-r--r--chromium/media/base/audio_buffer_unittest.cc20
-rw-r--r--chromium/media/base/audio_bus.h5
-rw-r--r--chromium/media/base/audio_bus_unittest.cc6
-rw-r--r--chromium/media/base/audio_codecs.cc64
-rw-r--r--chromium/media/base/audio_codecs.h46
-rw-r--r--chromium/media/base/audio_decoder.h6
-rw-r--r--chromium/media/base/audio_decoder_config.cc9
-rw-r--r--chromium/media/base/audio_decoder_config.h13
-rw-r--r--chromium/media/base/audio_discard_helper_unittest.cc48
-rw-r--r--chromium/media/base/audio_fifo.h6
-rw-r--r--chromium/media/base/audio_fifo_unittest.cc7
-rw-r--r--chromium/media/base/audio_hash.h6
-rw-r--r--chromium/media/base/audio_hash_unittest.cc5
-rw-r--r--chromium/media/base/audio_latency_unittest.cc77
-rw-r--r--chromium/media/base/audio_parameters.cc2
-rw-r--r--chromium/media/base/audio_power_monitor.h5
-rw-r--r--chromium/media/base/audio_power_monitor_unittest.cc4
-rw-r--r--chromium/media/base/audio_pull_fifo.h6
-rw-r--r--chromium/media/base/audio_pull_fifo_unittest.cc6
-rw-r--r--chromium/media/base/audio_push_fifo.h5
-rw-r--r--chromium/media/base/audio_push_fifo_unittest.cc6
-rw-r--r--chromium/media/base/audio_renderer.h6
-rw-r--r--chromium/media/base/audio_renderer_mixer.cc2
-rw-r--r--chromium/media/base/audio_renderer_mixer.h6
-rw-r--r--chromium/media/base/audio_renderer_mixer_pool.h7
-rw-r--r--chromium/media/base/audio_renderer_mixer_unittest.cc4
-rw-r--r--chromium/media/base/audio_shifter.cc31
-rw-r--r--chromium/media/base/audio_shifter_unittest.cc37
-rw-r--r--chromium/media/base/audio_timestamp_helper.cc6
-rw-r--r--chromium/media/base/audio_timestamp_helper_unittest.cc48
-rw-r--r--chromium/media/base/bit_reader.h6
-rw-r--r--chromium/media/base/bit_reader_core.h6
-rw-r--r--chromium/media/base/bitstream_buffer.h5
-rw-r--r--chromium/media/base/byte_queue.h6
-rw-r--r--chromium/media/base/callback_registry.h18
-rw-r--r--chromium/media/base/cdm_callback_promise.h6
-rw-r--r--chromium/media/base/cdm_context.cc3
-rw-r--r--chromium/media/base/cdm_context.h11
-rw-r--r--chromium/media/base/cdm_factory.h7
-rw-r--r--chromium/media/base/cdm_promise.h16
-rw-r--r--chromium/media/base/cdm_promise_adapter.h5
-rw-r--r--chromium/media/base/cdm_session_tracker.h6
-rw-r--r--chromium/media/base/channel_mixer.h6
-rw-r--r--chromium/media/base/channel_mixing_matrix.h5
-rw-r--r--chromium/media/base/data_buffer_unittest.cc8
-rw-r--r--chromium/media/base/data_source.h7
-rw-r--r--chromium/media/base/decode_status.h9
-rw-r--r--chromium/media/base/decoder_buffer_queue.h6
-rw-r--r--chromium/media/base/decoder_buffer_queue_unittest.cc2
-rw-r--r--chromium/media/base/decoder_factory.h7
-rw-r--r--chromium/media/base/decryptor.h7
-rw-r--r--chromium/media/base/demuxer.h7
-rw-r--r--chromium/media/base/demuxer_memory_limit_cast.cc12
-rw-r--r--chromium/media/base/demuxer_memory_limit_cast_unittest.cc12
-rw-r--r--chromium/media/base/encryption_scheme.cc16
-rw-r--r--chromium/media/base/encryption_scheme.h5
-rw-r--r--chromium/media/base/fake_audio_render_callback.h6
-rw-r--r--chromium/media/base/fake_audio_worker.h6
-rw-r--r--chromium/media/base/fake_audio_worker_unittest.cc17
-rw-r--r--chromium/media/base/fake_demuxer_stream.cc6
-rw-r--r--chromium/media/base/fake_demuxer_stream.h12
-rw-r--r--chromium/media/base/fake_demuxer_stream_unittest.cc7
-rw-r--r--chromium/media/base/fake_single_thread_task_runner.cc4
-rw-r--r--chromium/media/base/fake_text_track_stream.h6
-rw-r--r--chromium/media/base/feedback_signal_accumulator_unittest.cc10
-rw-r--r--chromium/media/base/frame_rate_estimator_unittest.cc4
-rw-r--r--chromium/media/base/ipc/media_param_traits_macros.h4
-rw-r--r--chromium/media/base/key_systems.cc30
-rw-r--r--chromium/media/base/key_systems_unittest.cc1
-rw-r--r--chromium/media/base/loopback_audio_converter.h5
-rw-r--r--chromium/media/base/mac/video_frame_mac_unittests.cc2
-rw-r--r--chromium/media/base/media_content_type.cc2
-rw-r--r--chromium/media/base/media_drm_storage.h7
-rw-r--r--chromium/media/base/media_log.h6
-rw-r--r--chromium/media/base/media_permission.h7
-rw-r--r--chromium/media/base/media_resource.h7
-rw-r--r--chromium/media/base/media_serializers.h51
-rw-r--r--chromium/media/base/media_serializers_unittest.cc2
-rw-r--r--chromium/media/base/media_switches.cc42
-rw-r--r--chromium/media/base/media_switches.h10
-rw-r--r--chromium/media/base/media_tracks.h6
-rw-r--r--chromium/media/base/media_types.cc22
-rw-r--r--chromium/media/base/media_url_demuxer.h6
-rw-r--r--chromium/media/base/media_util.h7
-rw-r--r--chromium/media/base/memory_dump_provider_proxy.h5
-rw-r--r--chromium/media/base/mime_util_internal.cc52
-rw-r--r--chromium/media/base/mime_util_internal.h7
-rw-r--r--chromium/media/base/mime_util_unittest.cc50
-rw-r--r--chromium/media/base/mock_demuxer_host.h7
-rw-r--r--chromium/media/base/mock_filters.h141
-rw-r--r--chromium/media/base/mock_media_log.h6
-rw-r--r--chromium/media/base/moving_average.cc2
-rw-r--r--chromium/media/base/moving_average.h6
-rw-r--r--chromium/media/base/moving_average_unittest.cc30
-rw-r--r--chromium/media/base/multi_channel_resampler.h6
-rw-r--r--chromium/media/base/multi_channel_resampler_unittest.cc7
-rw-r--r--chromium/media/base/null_video_sink.h6
-rw-r--r--chromium/media/base/null_video_sink_unittest.cc12
-rw-r--r--chromium/media/base/offloading_video_encoder.cc3
-rw-r--r--chromium/media/base/pipeline_impl.cc27
-rw-r--r--chromium/media/base/pipeline_impl.h6
-rw-r--r--chromium/media/base/pipeline_impl_unittest.cc111
-rw-r--r--chromium/media/base/reentrancy_checker.h6
-rw-r--r--chromium/media/base/renderer.h6
-rw-r--r--chromium/media/base/renderer_factory.h7
-rw-r--r--chromium/media/base/renderer_factory_selector.h6
-rw-r--r--chromium/media/base/scoped_async_trace.h5
-rw-r--r--chromium/media/base/scopedfd_helper.h1
-rw-r--r--chromium/media/base/seekable_buffer.cc3
-rw-r--r--chromium/media/base/seekable_buffer.h5
-rw-r--r--chromium/media/base/seekable_buffer_unittest.cc6
-rw-r--r--chromium/media/base/serial_runner_unittest.cc6
-rw-r--r--chromium/media/base/silent_sink_suspender.cc11
-rw-r--r--chromium/media/base/silent_sink_suspender.h6
-rw-r--r--chromium/media/base/silent_sink_suspender_unittest.cc16
-rw-r--r--chromium/media/base/simple_watch_timer.cc3
-rw-r--r--chromium/media/base/simple_watch_timer.h6
-rw-r--r--chromium/media/base/sinc_resampler.h6
-rw-r--r--chromium/media/base/sinc_resampler_unittest.cc6
-rw-r--r--chromium/media/base/status.cc82
-rw-r--r--chromium/media/base/status.h419
-rw-r--r--chromium/media/base/status.md178
-rw-r--r--chromium/media/base/status_codes.h261
-rw-r--r--chromium/media/base/status_unittest.cc119
-rw-r--r--chromium/media/base/stream_parser.h7
-rw-r--r--chromium/media/base/stream_parser_buffer.h6
-rw-r--r--chromium/media/base/supported_types.cc116
-rw-r--r--chromium/media/base/supported_types_unittest.cc166
-rw-r--r--chromium/media/base/supported_video_decoder_config_unittest.cc2
-rw-r--r--chromium/media/base/svc_scalability_mode.cc79
-rw-r--r--chromium/media/base/svc_scalability_mode.h54
-rw-r--r--chromium/media/base/test_data_util.cc8
-rw-r--r--chromium/media/base/test_data_util.h8
-rw-r--r--chromium/media/base/test_helpers.cc36
-rw-r--r--chromium/media/base/test_helpers.h28
-rw-r--r--chromium/media/base/text_ranges.h6
-rw-r--r--chromium/media/base/text_ranges_unittest.cc4
-rw-r--r--chromium/media/base/text_renderer_unittest.cc9
-rw-r--r--chromium/media/base/time_delta_interpolator.cc3
-rw-r--r--chromium/media/base/time_delta_interpolator.h6
-rw-r--r--chromium/media/base/time_delta_interpolator_unittest.cc48
-rw-r--r--chromium/media/base/tuneable.cc6
-rw-r--r--chromium/media/base/tuneable_unittest.cc12
-rw-r--r--chromium/media/base/unaligned_shared_memory.h15
-rw-r--r--chromium/media/base/use_after_free_checker.h1
-rw-r--r--chromium/media/base/user_input_monitor.h13
-rw-r--r--chromium/media/base/user_input_monitor_linux.cc6
-rw-r--r--chromium/media/base/user_input_monitor_mac.cc8
-rw-r--r--chromium/media/base/user_input_monitor_win.cc12
-rw-r--r--chromium/media/base/video_codecs.cc65
-rw-r--r--chromium/media/base/video_codecs.h31
-rw-r--r--chromium/media/base/video_color_space_unittest.cc2
-rw-r--r--chromium/media/base/video_decoder_config.cc2
-rw-r--r--chromium/media/base/video_decoder_config.h2
-rw-r--r--chromium/media/base/video_decoder_config_unittest.cc13
-rw-r--r--chromium/media/base/video_encoder.h6
-rw-r--r--chromium/media/base/video_frame.cc59
-rw-r--r--chromium/media/base/video_frame.h7
-rw-r--r--chromium/media/base/video_frame_metadata.cc3
-rw-r--r--chromium/media/base/video_frame_metadata.h13
-rw-r--r--chromium/media/base/video_frame_pool.cc2
-rw-r--r--chromium/media/base/video_frame_pool_unittest.cc12
-rw-r--r--chromium/media/base/video_frame_unittest.cc150
-rw-r--r--chromium/media/base/video_thumbnail_decoder_unittest.cc7
-rw-r--r--chromium/media/base/video_util.cc386
-rw-r--r--chromium/media/base/video_util.h21
-rw-r--r--chromium/media/base/video_util_unittest.cc43
-rw-r--r--chromium/media/base/wall_clock_time_source.cc4
-rw-r--r--chromium/media/base/wall_clock_time_source.h6
-rw-r--r--chromium/media/base/wall_clock_time_source_unittest.cc17
-rw-r--r--chromium/media/base/win/d3d11_mocks.cc3
-rw-r--r--chromium/media/base/win/d3d11_mocks.h13
-rw-r--r--chromium/media/base/win/dcomp_texture_wrapper.h31
-rw-r--r--chromium/media/base/win/dxgi_device_manager.cc2
-rw-r--r--chromium/media/base/win/mf_helpers.cc23
-rw-r--r--chromium/media/base/win/mf_helpers.h13
-rw-r--r--chromium/media/capabilities/in_memory_video_decode_stats_db_impl.h8
-rw-r--r--chromium/media/capabilities/video_decode_stats_db_impl.cc7
-rw-r--r--chromium/media/capabilities/video_decode_stats_db_impl.h5
-rw-r--r--chromium/media/capabilities/video_decode_stats_db_impl_unittest.cc40
-rw-r--r--chromium/media/capture/content/android/screen_capture_machine_android.cc4
-rw-r--r--chromium/media/capture/content/android/screen_capture_machine_android.h7
-rw-r--r--chromium/media/capture/content/android/thread_safe_capture_oracle.cc2
-rw-r--r--chromium/media/capture/content/animated_content_sampler.cc10
-rw-r--r--chromium/media/capture/content/animated_content_sampler_unittest.cc55
-rw-r--r--chromium/media/capture/content/smooth_event_sampler_unittest.cc35
-rw-r--r--chromium/media/capture/content/video_capture_oracle.cc22
-rw-r--r--chromium/media/capture/content/video_capture_oracle.h4
-rw-r--r--chromium/media/capture/content/video_capture_oracle_unittest.cc57
-rw-r--r--chromium/media/capture/mojom/BUILD.gn1
-rw-r--r--chromium/media/capture/video/android/video_capture_device_android.cc5
-rw-r--r--chromium/media/capture/video/android/video_capture_device_factory_android.h8
-rw-r--r--chromium/media/capture/video/chromeos/camera_3a_controller.cc14
-rw-r--r--chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.cc4
-rw-r--r--chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.h6
-rw-r--r--chromium/media/capture/video/chromeos/camera_app_device_impl.cc48
-rw-r--r--chromium/media/capture/video/chromeos/camera_app_device_impl.h18
-rw-r--r--chromium/media/capture/video/chromeos/camera_app_device_provider_impl.h7
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_delegate.cc7
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc10
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_delegate.cc3
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc66
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl_unittest.cc13
-rw-r--r--chromium/media/capture/video/chromeos/gpu_memory_buffer_tracker.h6
-rw-r--r--chromium/media/capture/video/chromeos/mock_camera_module.h5
-rw-r--r--chromium/media/capture/video/chromeos/mojom/BUILD.gn17
-rw-r--r--chromium/media/capture/video/chromeos/mojom/camera_app.mojom18
-rw-r--r--chromium/media/capture/video/chromeos/request_manager.cc3
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_device_chromeos_delegate.cc3
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h7
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_features_chromeos.cc5
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_features_chromeos.h7
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.h7
-rw-r--r--chromium/media/capture/video/fake_video_capture_device.cc5
-rw-r--r--chromium/media/capture/video/fake_video_capture_device.h6
-rw-r--r--chromium/media/capture/video/file_video_capture_device.cc14
-rw-r--r--chromium/media/capture/video/file_video_capture_device.h5
-rw-r--r--chromium/media/capture/video/file_video_capture_device_factory.cc2
-rw-r--r--chromium/media/capture/video/file_video_capture_device_unittest.cc2
-rw-r--r--chromium/media/capture/video/fuchsia/DIR_METADATA1
-rw-r--r--chromium/media/capture/video/fuchsia/video_capture_device_fuchsia_test.cc3
-rw-r--r--chromium/media/capture/video/linux/fake_v4l2_impl.cc11
-rw-r--r--chromium/media/capture/video/linux/v4l2_capture_delegate.cc5
-rw-r--r--chromium/media/capture/video/linux/v4l2_capture_delegate.h6
-rw-r--r--chromium/media/capture/video/linux/video_capture_device_factory_linux.h7
-rw-r--r--chromium/media/capture/video/mac/gpu_memory_buffer_tracker_mac.h7
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.mm10
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_decklink_mac.h7
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_decklink_mac.mm2
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_factory_mac.h7
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_mac.h6
-rw-r--r--chromium/media/capture/video/mock_gpu_memory_buffer_manager.h7
-rw-r--r--chromium/media/capture/video/shared_memory_buffer_tracker.h7
-rw-r--r--chromium/media/capture/video/video_capture_device_client.h6
-rw-r--r--chromium/media/capture/video/video_capture_device_client_unittest.cc8
-rw-r--r--chromium/media/capture/video/video_capture_device_factory.h8
-rw-r--r--chromium/media/capture/video/video_capture_device_unittest.cc7
-rw-r--r--chromium/media/capture/video/video_capture_feedback.cc4
-rw-r--r--chromium/media/capture/video/video_capture_feedback.h2
-rw-r--r--chromium/media/capture/video/win/gpu_memory_buffer_tracker.h6
-rw-r--r--chromium/media/capture/video/win/sink_input_pin_win.cc2
-rw-r--r--chromium/media/capture/video/win/video_capture_device_factory_win.cc6
-rw-r--r--chromium/media/capture/video/win/video_capture_device_factory_win.h7
-rw-r--r--chromium/media/capture/video/win/video_capture_device_mf_win.cc6
-rw-r--r--chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc8
-rw-r--r--chromium/media/capture/video/win/video_capture_device_win.cc6
-rw-r--r--chromium/media/cast/BUILD.gn21
-rw-r--r--chromium/media/cast/DEPS1
-rw-r--r--chromium/media/cast/cast_config.cc3
-rw-r--r--chromium/media/cast/cast_config.h10
-rw-r--r--chromium/media/cast/cast_sender_impl.h5
-rw-r--r--chromium/media/cast/common/clock_drift_smoother.cc5
-rw-r--r--chromium/media/cast/common/rtp_time.cc2
-rw-r--r--chromium/media/cast/common/rtp_time_unittest.cc49
-rw-r--r--chromium/media/cast/common/transport_encryption_handler.h7
-rw-r--r--chromium/media/cast/logging/encoding_event_subscriber.h5
-rw-r--r--chromium/media/cast/logging/encoding_event_subscriber_unittest.cc20
-rw-r--r--chromium/media/cast/logging/log_event_dispatcher.h5
-rw-r--r--chromium/media/cast/logging/raw_event_subscriber_bundle.h15
-rw-r--r--chromium/media/cast/logging/receiver_time_offset_estimator_impl.h6
-rw-r--r--chromium/media/cast/logging/receiver_time_offset_estimator_impl_unittest.cc34
-rw-r--r--chromium/media/cast/logging/simple_event_subscriber.h5
-rw-r--r--chromium/media/cast/logging/simple_event_subscriber_unittest.cc2
-rw-r--r--chromium/media/cast/logging/stats_event_subscriber.h4
-rw-r--r--chromium/media/cast/logging/stats_event_subscriber_unittest.cc47
-rw-r--r--chromium/media/cast/net/cast_transport_impl.h5
-rw-r--r--chromium/media/cast/net/cast_transport_impl_unittest.cc51
-rw-r--r--chromium/media/cast/net/pacing/paced_sender.cc2
-rw-r--r--chromium/media/cast/net/pacing/paced_sender.h5
-rw-r--r--chromium/media/cast/net/pacing/paced_sender_unittest.cc35
-rw-r--r--chromium/media/cast/net/rtcp/receiver_rtcp_event_subscriber.h6
-rw-r--r--chromium/media/cast/net/rtcp/receiver_rtcp_event_subscriber_unittest.cc4
-rw-r--r--chromium/media/cast/net/rtcp/receiver_rtcp_session.cc3
-rw-r--r--chromium/media/cast/net/rtcp/receiver_rtcp_session.h5
-rw-r--r--chromium/media/cast/net/rtcp/rtcp_builder.h6
-rw-r--r--chromium/media/cast/net/rtcp/rtcp_builder_unittest.cc29
-rw-r--r--chromium/media/cast/net/rtcp/rtcp_unittest.cc21
-rw-r--r--chromium/media/cast/net/rtcp/rtcp_utility.cc17
-rw-r--r--chromium/media/cast/net/rtcp/rtcp_utility.h6
-rw-r--r--chromium/media/cast/net/rtcp/rtcp_utility_unittest.cc20
-rw-r--r--chromium/media/cast/net/rtcp/sender_rtcp_session.cc6
-rw-r--r--chromium/media/cast/net/rtcp/sender_rtcp_session.h5
-rw-r--r--chromium/media/cast/net/rtp/packet_storage.h6
-rw-r--r--chromium/media/cast/net/rtp/rtp_packetizer_unittest.cc8
-rw-r--r--chromium/media/cast/net/rtp/rtp_parser.h5
-rw-r--r--chromium/media/cast/net/rtp/rtp_sender.cc11
-rw-r--r--chromium/media/cast/net/rtp/rtp_sender.h5
-rw-r--r--chromium/media/cast/net/udp_packet_pipe.h10
-rw-r--r--chromium/media/cast/net/udp_packet_pipe_unittest.cc6
-rw-r--r--chromium/media/cast/net/udp_transport_impl.h6
-rw-r--r--chromium/media/cast/net/udp_transport_unittest.cc6
-rw-r--r--chromium/media/cast/sender/audio_encoder.cc16
-rw-r--r--chromium/media/cast/sender/audio_encoder.h6
-rw-r--r--chromium/media/cast/sender/audio_encoder_unittest.cc16
-rw-r--r--chromium/media/cast/sender/audio_sender.h5
-rw-r--r--chromium/media/cast/sender/audio_sender_unittest.cc6
-rw-r--r--chromium/media/cast/sender/av1_encoder.cc377
-rw-r--r--chromium/media/cast/sender/av1_encoder.h90
-rw-r--r--chromium/media/cast/sender/congestion_control.cc18
-rw-r--r--chromium/media/cast/sender/congestion_control_unittest.cc52
-rw-r--r--chromium/media/cast/sender/external_video_encoder.cc4
-rw-r--r--chromium/media/cast/sender/external_video_encoder.h18
-rw-r--r--chromium/media/cast/sender/fake_software_video_encoder.cc2
-rw-r--r--chromium/media/cast/sender/fake_software_video_encoder.h2
-rw-r--r--chromium/media/cast/sender/fake_video_encode_accelerator_factory.h8
-rw-r--r--chromium/media/cast/sender/frame_sender.cc8
-rw-r--r--chromium/media/cast/sender/frame_sender.h6
-rw-r--r--chromium/media/cast/sender/h264_vt_encoder.h6
-rw-r--r--chromium/media/cast/sender/h264_vt_encoder_unittest.cc7
-rw-r--r--chromium/media/cast/sender/performance_metrics_overlay.cc4
-rw-r--r--chromium/media/cast/sender/size_adaptable_video_encoder_base.h6
-rw-r--r--chromium/media/cast/sender/software_video_encoder.h2
-rw-r--r--chromium/media/cast/sender/video_encoder_impl.cc23
-rw-r--r--chromium/media/cast/sender/video_encoder_impl.h5
-rw-r--r--chromium/media/cast/sender/video_encoder_unittest.cc4
-rw-r--r--chromium/media/cast/sender/video_sender.cc15
-rw-r--r--chromium/media/cast/sender/video_sender.h5
-rw-r--r--chromium/media/cast/sender/video_sender_unittest.cc18
-rw-r--r--chromium/media/cast/sender/vpx_encoder.cc (renamed from chromium/media/cast/sender/vp8_encoder.cc)68
-rw-r--r--chromium/media/cast/sender/vpx_encoder.h (renamed from chromium/media/cast/sender/vp8_encoder.h)27
-rw-r--r--chromium/media/cast/sender/vpx_quantizer_parser.cc (renamed from chromium/media/cast/sender/vp8_quantizer_parser.cc)47
-rw-r--r--chromium/media/cast/sender/vpx_quantizer_parser.h (renamed from chromium/media/cast/sender/vp8_quantizer_parser.h)8
-rw-r--r--chromium/media/cast/sender/vpx_quantizer_parser_unittest.cc (renamed from chromium/media/cast/sender/vp8_quantizer_parser_unittest.cc)38
-rw-r--r--chromium/media/cdm/BUILD.gn8
-rw-r--r--chromium/media/cdm/aes_cbc_crypto.h6
-rw-r--r--chromium/media/cdm/aes_decryptor.cc7
-rw-r--r--chromium/media/cdm/aes_decryptor.h6
-rw-r--r--chromium/media/cdm/cbcs_decryptor_unittest.cc4
-rw-r--r--chromium/media/cdm/cdm_adapter.cc6
-rw-r--r--chromium/media/cdm/cdm_adapter_factory.h6
-rw-r--r--chromium/media/cdm/cdm_adapter_unittest.cc6
-rw-r--r--chromium/media/cdm/cdm_allocator.h6
-rw-r--r--chromium/media/cdm/cdm_auxiliary_helper.cc9
-rw-r--r--chromium/media/cdm/cdm_auxiliary_helper.h9
-rw-r--r--chromium/media/cdm/cdm_context_ref_impl.h6
-rw-r--r--chromium/media/cdm/cdm_document_service.h11
-rw-r--r--chromium/media/cdm/cdm_helpers.h19
-rw-r--r--chromium/media/cdm/cdm_host_files.h6
-rw-r--r--chromium/media/cdm/cdm_module.h5
-rw-r--r--chromium/media/cdm/cdm_paths.cc9
-rw-r--r--chromium/media/cdm/cdm_paths.h9
-rw-r--r--chromium/media/cdm/cdm_preference_data.cc18
-rw-r--r--chromium/media/cdm/cdm_preference_data.h30
-rw-r--r--chromium/media/cdm/cdm_type_conversion.cc24
-rw-r--r--chromium/media/cdm/cdm_wrapper.h11
-rw-r--r--chromium/media/cdm/cenc_decryptor_unittest.cc4
-rw-r--r--chromium/media/cdm/default_cdm_factory.h7
-rw-r--r--chromium/media/cdm/external_clear_key_test_helper.h7
-rw-r--r--chromium/media/cdm/json_web_key.cc6
-rw-r--r--chromium/media/cdm/library_cdm/cdm_host_proxy_impl.h6
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/cdm_file_adapter.h6
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/cdm_file_io_test.h12
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/cdm_video_decoder.cc17
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.cc5
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.h6
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_persistent_session_cdm.cc14
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/ffmpeg_cdm_audio_decoder.cc3
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/ffmpeg_cdm_audio_decoder.h6
-rw-r--r--chromium/media/cdm/media_foundation_cdm_data.cc21
-rw-r--r--chromium/media/cdm/media_foundation_cdm_data.h35
-rw-r--r--chromium/media/cdm/mock_helpers.h10
-rw-r--r--chromium/media/cdm/output_protection.h7
-rw-r--r--chromium/media/cdm/simple_cdm_allocator.cc9
-rw-r--r--chromium/media/cdm/simple_cdm_allocator.h7
-rw-r--r--chromium/media/cdm/simple_cdm_allocator_unittest.cc7
-rw-r--r--chromium/media/cdm/supported_audio_codecs.cc4
-rw-r--r--chromium/media/cdm/win/media_foundation_cdm_factory.cc24
-rw-r--r--chromium/media/cdm/win/media_foundation_cdm_factory.h8
-rw-r--r--chromium/media/cdm/win/media_foundation_cdm_factory_unittest.cc40
-rw-r--r--chromium/media/device_monitors/device_monitor_mac.h6
-rw-r--r--chromium/media/device_monitors/device_monitor_mac.mm13
-rw-r--r--chromium/media/device_monitors/device_monitor_udev.cc6
-rw-r--r--chromium/media/device_monitors/device_monitor_udev.h6
-rw-r--r--chromium/media/device_monitors/system_message_window_win.h5
-rw-r--r--chromium/media/ffmpeg/ffmpeg_common.cc127
-rw-r--r--chromium/media/ffmpeg/ffmpeg_common_unittest.cc6
-rw-r--r--chromium/media/ffmpeg/ffmpeg_decoding_loop.h6
-rw-r--r--chromium/media/ffmpeg/ffmpeg_regression_tests.cc2
-rw-r--r--chromium/media/filters/android/media_codec_audio_decoder.cc23
-rw-r--r--chromium/media/filters/android/media_codec_audio_decoder.h6
-rw-r--r--chromium/media/filters/android/video_frame_extractor.h6
-rw-r--r--chromium/media/filters/android/video_frame_extractor_unittest.cc8
-rw-r--r--chromium/media/filters/audio_clock.cc9
-rw-r--r--chromium/media/filters/audio_clock.h12
-rw-r--r--chromium/media/filters/audio_clock_unittest.cc23
-rw-r--r--chromium/media/filters/audio_decoder_stream_unittest.cc10
-rw-r--r--chromium/media/filters/audio_decoder_unittest.cc56
-rw-r--r--chromium/media/filters/audio_file_reader.cc10
-rw-r--r--chromium/media/filters/audio_file_reader.h6
-rw-r--r--chromium/media/filters/audio_file_reader_unittest.cc32
-rw-r--r--chromium/media/filters/audio_renderer_algorithm.cc13
-rw-r--r--chromium/media/filters/audio_renderer_algorithm.h6
-rw-r--r--chromium/media/filters/audio_renderer_algorithm_unittest.cc8
-rw-r--r--chromium/media/filters/audio_timestamp_validator.h6
-rw-r--r--chromium/media/filters/audio_timestamp_validator_unittest.cc25
-rw-r--r--chromium/media/filters/audio_video_metadata_extractor.h7
-rw-r--r--chromium/media/filters/blocking_url_protocol_unittest.cc6
-rw-r--r--chromium/media/filters/chunk_demuxer.cc95
-rw-r--r--chromium/media/filters/chunk_demuxer.h6
-rw-r--r--chromium/media/filters/chunk_demuxer_unittest.cc325
-rw-r--r--chromium/media/filters/dav1d_video_decoder.cc12
-rw-r--r--chromium/media/filters/dav1d_video_decoder.h8
-rw-r--r--chromium/media/filters/dav1d_video_decoder_unittest.cc10
-rw-r--r--chromium/media/filters/decoder_stream.cc5
-rw-r--r--chromium/media/filters/decrypting_audio_decoder.h6
-rw-r--r--chromium/media/filters/decrypting_audio_decoder_unittest.cc19
-rw-r--r--chromium/media/filters/decrypting_demuxer_stream.h5
-rw-r--r--chromium/media/filters/decrypting_demuxer_stream_unittest.cc13
-rw-r--r--chromium/media/filters/decrypting_media_resource.h6
-rw-r--r--chromium/media/filters/decrypting_video_decoder.h6
-rw-r--r--chromium/media/filters/decrypting_video_decoder_unittest.cc7
-rw-r--r--chromium/media/filters/demuxer_perftest.cc13
-rw-r--r--chromium/media/filters/fake_video_decoder.h5
-rw-r--r--chromium/media/filters/fake_video_decoder_unittest.cc11
-rw-r--r--chromium/media/filters/ffmpeg_aac_bitstream_converter.h7
-rw-r--r--chromium/media/filters/ffmpeg_audio_decoder.cc35
-rw-r--r--chromium/media/filters/ffmpeg_audio_decoder.h7
-rw-r--r--chromium/media/filters/ffmpeg_demuxer.cc29
-rw-r--r--chromium/media/filters/ffmpeg_demuxer.h11
-rw-r--r--chromium/media/filters/ffmpeg_demuxer_unittest.cc68
-rw-r--r--chromium/media/filters/ffmpeg_glue.h6
-rw-r--r--chromium/media/filters/ffmpeg_glue_unittest.cc24
-rw-r--r--chromium/media/filters/ffmpeg_h264_to_annex_b_bitstream_converter.h7
-rw-r--r--chromium/media/filters/ffmpeg_h265_to_annex_b_bitstream_converter.h8
-rw-r--r--chromium/media/filters/ffmpeg_video_decoder.cc61
-rw-r--r--chromium/media/filters/ffmpeg_video_decoder.h15
-rw-r--r--chromium/media/filters/ffmpeg_video_decoder_unittest.cc8
-rw-r--r--chromium/media/filters/file_data_source.h6
-rw-r--r--chromium/media/filters/frame_buffer_pool.cc12
-rw-r--r--chromium/media/filters/frame_buffer_pool_unittest.cc6
-rw-r--r--chromium/media/filters/frame_processor.cc10
-rw-r--r--chromium/media/filters/frame_processor.h6
-rw-r--r--chromium/media/filters/frame_processor_unittest.cc42
-rw-r--r--chromium/media/filters/fuchsia/DIR_METADATA2
-rw-r--r--chromium/media/filters/fuchsia/fuchsia_video_decoder.cc17
-rw-r--r--chromium/media/filters/fuchsia/fuchsia_video_decoder_unittest.cc13
-rw-r--r--chromium/media/filters/gav1_video_decoder.cc5
-rw-r--r--chromium/media/filters/gav1_video_decoder.h2
-rw-r--r--chromium/media/filters/gav1_video_decoder_unittest.cc10
-rw-r--r--chromium/media/filters/h264_to_annex_b_bitstream_converter.h8
-rw-r--r--chromium/media/filters/media_file_checker.cc3
-rw-r--r--chromium/media/filters/media_file_checker.h6
-rw-r--r--chromium/media/filters/media_file_checker_unittest.cc2
-rw-r--r--chromium/media/filters/memory_data_source.h5
-rw-r--r--chromium/media/filters/offloading_video_decoder.h6
-rw-r--r--chromium/media/filters/offloading_video_decoder_unittest.cc47
-rw-r--r--chromium/media/filters/pipeline_controller.h6
-rw-r--r--chromium/media/filters/pipeline_controller_unittest.cc35
-rw-r--r--chromium/media/filters/source_buffer_range.cc2
-rw-r--r--chromium/media/filters/source_buffer_range.h5
-rw-r--r--chromium/media/filters/source_buffer_state.cc103
-rw-r--r--chromium/media/filters/source_buffer_state.h51
-rw-r--r--chromium/media/filters/source_buffer_state_unittest.cc63
-rw-r--r--chromium/media/filters/source_buffer_stream.cc24
-rw-r--r--chromium/media/filters/source_buffer_stream.h5
-rw-r--r--chromium/media/filters/source_buffer_stream_unittest.cc193
-rw-r--r--chromium/media/filters/video_cadence_estimator.cc2
-rw-r--r--chromium/media/filters/video_cadence_estimator.h6
-rw-r--r--chromium/media/filters/video_cadence_estimator_unittest.cc20
-rw-r--r--chromium/media/filters/video_decoder_stream_unittest.cc11
-rw-r--r--chromium/media/filters/video_renderer_algorithm.cc15
-rw-r--r--chromium/media/filters/video_renderer_algorithm.h6
-rw-r--r--chromium/media/filters/video_renderer_algorithm_unittest.cc43
-rw-r--r--chromium/media/filters/vp9_bool_decoder.h6
-rw-r--r--chromium/media/filters/vp9_raw_bits_reader.h6
-rw-r--r--chromium/media/filters/vpx_video_decoder.cc39
-rw-r--r--chromium/media/filters/vpx_video_decoder.h18
-rw-r--r--chromium/media/filters/vpx_video_decoder_fuzzertest.cc4
-rw-r--r--chromium/media/filters/vpx_video_decoder_unittest.cc10
-rw-r--r--chromium/media/formats/common/offset_byte_queue.h6
-rw-r--r--chromium/media/formats/common/stream_parser_test_base.h6
-rw-r--r--chromium/media/formats/mp2t/es_adapter_video.cc6
-rw-r--r--chromium/media/formats/mp2t/es_adapter_video.h6
-rw-r--r--chromium/media/formats/mp2t/es_adapter_video_unittest.cc12
-rw-r--r--chromium/media/formats/mp2t/es_parser.h6
-rw-r--r--chromium/media/formats/mp2t/es_parser_adts.cc4
-rw-r--r--chromium/media/formats/mp2t/es_parser_adts.h5
-rw-r--r--chromium/media/formats/mp2t/es_parser_adts_unittest.cc6
-rw-r--r--chromium/media/formats/mp2t/es_parser_h264.cc2
-rw-r--r--chromium/media/formats/mp2t/es_parser_h264.h6
-rw-r--r--chromium/media/formats/mp2t/es_parser_h264_unittest.cc4
-rw-r--r--chromium/media/formats/mp2t/es_parser_mpeg1audio.cc4
-rw-r--r--chromium/media/formats/mp2t/es_parser_mpeg1audio.h6
-rw-r--r--chromium/media/formats/mp2t/es_parser_mpeg1audio_unittest.cc2
-rw-r--r--chromium/media/formats/mp2t/es_parser_test_base.h6
-rw-r--r--chromium/media/formats/mp2t/mp2t_stream_parser.cc10
-rw-r--r--chromium/media/formats/mp2t/mp2t_stream_parser.h6
-rw-r--r--chromium/media/formats/mp2t/mp2t_stream_parser_unittest.cc2
-rw-r--r--chromium/media/formats/mp2t/timestamp_unroller.h6
-rw-r--r--chromium/media/formats/mp2t/ts_packet.h5
-rw-r--r--chromium/media/formats/mp2t/ts_section_cat.h6
-rw-r--r--chromium/media/formats/mp2t/ts_section_cets_ecm.h6
-rw-r--r--chromium/media/formats/mp2t/ts_section_cets_pssh.h6
-rw-r--r--chromium/media/formats/mp2t/ts_section_pat.h6
-rw-r--r--chromium/media/formats/mp2t/ts_section_pes.cc2
-rw-r--r--chromium/media/formats/mp2t/ts_section_pes.h6
-rw-r--r--chromium/media/formats/mp2t/ts_section_pmt.h6
-rw-r--r--chromium/media/formats/mp2t/ts_section_psi.h6
-rw-r--r--chromium/media/formats/mp4/aac.cc4
-rw-r--r--chromium/media/formats/mp4/aac.h5
-rw-r--r--chromium/media/formats/mp4/box_definitions.cc70
-rw-r--r--chromium/media/formats/mp4/box_definitions.h1
-rw-r--r--chromium/media/formats/mp4/h264_annex_b_to_avc_bitstream_converter.h8
-rw-r--r--chromium/media/formats/mp4/mp4_stream_parser.cc40
-rw-r--r--chromium/media/formats/mp4/mp4_stream_parser.h6
-rw-r--r--chromium/media/formats/mp4/mp4_stream_parser_unittest.cc25
-rw-r--r--chromium/media/formats/mp4/sample_to_group_iterator.h6
-rw-r--r--chromium/media/formats/mp4/track_run_iterator.cc2
-rw-r--r--chromium/media/formats/mp4/track_run_iterator.h6
-rw-r--r--chromium/media/formats/mp4/track_run_iterator_unittest.cc4
-rw-r--r--chromium/media/formats/mpeg/adts_stream_parser.cc2
-rw-r--r--chromium/media/formats/mpeg/adts_stream_parser.h6
-rw-r--r--chromium/media/formats/mpeg/mpeg1_audio_stream_parser.cc4
-rw-r--r--chromium/media/formats/mpeg/mpeg1_audio_stream_parser.h6
-rw-r--r--chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.cc2
-rw-r--r--chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.h7
-rw-r--r--chromium/media/formats/webcodecs/webcodecs_encoded_chunk_stream_parser.h8
-rw-r--r--chromium/media/formats/webm/cluster_builder.h6
-rw-r--r--chromium/media/formats/webm/opus_packet_builder.h6
-rw-r--r--chromium/media/formats/webm/tracks_builder.h6
-rw-r--r--chromium/media/formats/webm/webm_audio_client.cc18
-rw-r--r--chromium/media/formats/webm/webm_audio_client.h6
-rw-r--r--chromium/media/formats/webm/webm_cluster_parser.cc30
-rw-r--r--chromium/media/formats/webm/webm_cluster_parser_unittest.cc24
-rw-r--r--chromium/media/formats/webm/webm_colour_parser.h12
-rw-r--r--chromium/media/formats/webm/webm_content_encodings.h6
-rw-r--r--chromium/media/formats/webm/webm_content_encodings_client.h7
-rw-r--r--chromium/media/formats/webm/webm_info_parser.cc3
-rw-r--r--chromium/media/formats/webm/webm_info_parser.h6
-rw-r--r--chromium/media/formats/webm/webm_parser.h11
-rw-r--r--chromium/media/formats/webm/webm_projection_parser.h6
-rw-r--r--chromium/media/formats/webm/webm_stream_parser.cc2
-rw-r--r--chromium/media/formats/webm/webm_stream_parser.h6
-rw-r--r--chromium/media/formats/webm/webm_tracks_parser.cc2
-rw-r--r--chromium/media/formats/webm/webm_tracks_parser.h7
-rw-r--r--chromium/media/formats/webm/webm_tracks_parser_unittest.cc24
-rw-r--r--chromium/media/formats/webm/webm_video_client.cc11
-rw-r--r--chromium/media/formats/webm/webm_video_client.h6
-rw-r--r--chromium/media/formats/webm/webm_video_client_unittest.cc2
-rw-r--r--chromium/media/fuchsia/audio/fake_audio_consumer.cc6
-rw-r--r--chromium/media/fuchsia/audio/fuchsia_audio_capturer_source.cc176
-rw-r--r--chromium/media/fuchsia/audio/fuchsia_audio_capturer_source.h30
-rw-r--r--chromium/media/fuchsia/audio/fuchsia_audio_capturer_source_test.cc13
-rw-r--r--chromium/media/fuchsia/audio/fuchsia_audio_output_device.cc9
-rw-r--r--chromium/media/fuchsia/audio/fuchsia_audio_output_device_test.cc12
-rw-r--r--chromium/media/fuchsia/audio/fuchsia_audio_renderer.cc20
-rw-r--r--chromium/media/fuchsia/audio/fuchsia_audio_renderer.h4
-rw-r--r--chromium/media/fuchsia/audio/fuchsia_audio_renderer_test.cc53
-rw-r--r--chromium/media/fuchsia/cdm/BUILD.gn6
-rw-r--r--chromium/media/fuchsia/cdm/DEPS1
-rw-r--r--chromium/media/fuchsia/cdm/client/BUILD.gn2
-rw-r--r--chromium/media/fuchsia/cdm/client/mojo_fuchsia_cdm_provider.cc7
-rw-r--r--chromium/media/fuchsia/cdm/client/mojo_fuchsia_cdm_provider.h11
-rw-r--r--chromium/media/fuchsia/cdm/fuchsia_cdm.cc18
-rw-r--r--chromium/media/fuchsia/cdm/fuchsia_cdm.h6
-rw-r--r--chromium/media/fuchsia/cdm/fuchsia_cdm_factory.h6
-rw-r--r--chromium/media/fuchsia/cdm/fuchsia_decryptor.h6
-rw-r--r--chromium/media/fuchsia/cdm/service/BUILD.gn3
-rw-r--r--chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager_unittest.cc4
-rw-r--r--chromium/media/fuchsia/cdm/service/provisioning_fetcher_impl.cc11
-rw-r--r--chromium/media/fuchsia/cdm/service/provisioning_fetcher_impl_unittest.cc23
-rw-r--r--chromium/media/fuchsia/common/stream_processor_helper.cc2
-rw-r--r--chromium/media/fuchsia/common/stream_processor_helper.h6
-rw-r--r--chromium/media/fuchsia/common/sysmem_client.h4
-rw-r--r--chromium/media/fuchsia/mojom/BUILD.gn30
-rw-r--r--chromium/media/fuchsia/mojom/fuchsia_cdm_provider.mojom20
-rw-r--r--chromium/media/fuchsia/mojom/fuchsia_cdm_provider_mojom_traits.h24
-rw-r--r--chromium/media/fuchsia/mojom/fuchsia_media_resource_provider.mojom11
-rw-r--r--chromium/media/fuchsia/mojom/fuchsia_media_resource_provider_mojom_traits.h10
-rw-r--r--chromium/media/gpu/BUILD.gn5
-rw-r--r--chromium/media/gpu/accelerated_video_decoder.h7
-rw-r--r--chromium/media/gpu/android/android_video_encode_accelerator.cc18
-rw-r--r--chromium/media/gpu/android/android_video_encode_accelerator.h7
-rw-r--r--chromium/media/gpu/android/android_video_surface_chooser.h8
-rw-r--r--chromium/media/gpu/android/android_video_surface_chooser_impl.cc3
-rw-r--r--chromium/media/gpu/android/android_video_surface_chooser_impl.h8
-rw-r--r--chromium/media/gpu/android/android_video_surface_chooser_impl_unittest.cc6
-rw-r--r--chromium/media/gpu/android/codec_allocator.cc3
-rw-r--r--chromium/media/gpu/android/codec_allocator_unittest.cc26
-rw-r--r--chromium/media/gpu/android/codec_buffer_wait_coordinator.h7
-rw-r--r--chromium/media/gpu/android/codec_image.cc26
-rw-r--r--chromium/media/gpu/android/codec_image.h11
-rw-r--r--chromium/media/gpu/android/codec_image_unittest.cc37
-rw-r--r--chromium/media/gpu/android/codec_wrapper.h9
-rw-r--r--chromium/media/gpu/android/direct_shared_image_video_provider.cc4
-rw-r--r--chromium/media/gpu/android/direct_shared_image_video_provider.h15
-rw-r--r--chromium/media/gpu/android/fake_codec_allocator.h6
-rw-r--r--chromium/media/gpu/android/maybe_render_early_manager.cc13
-rw-r--r--chromium/media/gpu/android/maybe_render_early_manager.h6
-rw-r--r--chromium/media/gpu/android/media_codec_video_decoder.cc32
-rw-r--r--chromium/media/gpu/android/media_codec_video_decoder.h5
-rw-r--r--chromium/media/gpu/android/media_codec_video_decoder_unittest.cc25
-rw-r--r--chromium/media/gpu/android/mock_android_video_surface_chooser.h9
-rw-r--r--chromium/media/gpu/android/pooled_shared_image_video_provider.h14
-rw-r--r--chromium/media/gpu/android/promotion_hint_aggregator_impl.cc5
-rw-r--r--chromium/media/gpu/android/promotion_hint_aggregator_impl.h7
-rw-r--r--chromium/media/gpu/android/promotion_hint_aggregator_impl_unittest.cc15
-rw-r--r--chromium/media/gpu/android/shared_image_video_provider.h14
-rw-r--r--chromium/media/gpu/android/surface_chooser_helper.cc2
-rw-r--r--chromium/media/gpu/android/surface_chooser_helper.h6
-rw-r--r--chromium/media/gpu/android/surface_chooser_helper_unittest.cc5
-rw-r--r--chromium/media/gpu/android/video_frame_factory_impl.cc4
-rw-r--r--chromium/media/gpu/android/video_frame_factory_impl.h6
-rw-r--r--chromium/media/gpu/args.gni13
-rw-r--r--chromium/media/gpu/av1_decoder.cc3
-rw-r--r--chromium/media/gpu/chromeos/BUILD.gn6
-rw-r--r--chromium/media/gpu/chromeos/dmabuf_video_frame_pool.h31
-rw-r--r--chromium/media/gpu/chromeos/generic_dmabuf_video_frame_mapper.h6
-rw-r--r--chromium/media/gpu/chromeos/gpu_memory_buffer_video_frame_mapper.h7
-rw-r--r--chromium/media/gpu/chromeos/image_processor.cc15
-rw-r--r--chromium/media/gpu/chromeos/image_processor_backend.h7
-rw-r--r--chromium/media/gpu/chromeos/image_processor_factory.cc84
-rw-r--r--chromium/media/gpu/chromeos/image_processor_factory.h22
-rw-r--r--chromium/media/gpu/chromeos/image_processor_test.cc110
-rw-r--r--chromium/media/gpu/chromeos/image_processor_with_pool.cc23
-rw-r--r--chromium/media/gpu/chromeos/image_processor_with_pool.h10
-rw-r--r--chromium/media/gpu/chromeos/libyuv_image_processor_backend.cc212
-rw-r--r--chromium/media/gpu/chromeos/mailbox_video_frame_converter.cc11
-rw-r--r--chromium/media/gpu/chromeos/mailbox_video_frame_converter_unittest.cc8
-rw-r--r--chromium/media/gpu/chromeos/platform_video_frame_pool.cc26
-rw-r--r--chromium/media/gpu/chromeos/platform_video_frame_pool.h19
-rw-r--r--chromium/media/gpu/chromeos/platform_video_frame_pool_unittest.cc14
-rw-r--r--chromium/media/gpu/chromeos/platform_video_frame_utils.cc28
-rw-r--r--chromium/media/gpu/chromeos/platform_video_frame_utils_unittest.cc2
-rw-r--r--chromium/media/gpu/chromeos/vd_video_decode_accelerator.cc2
-rw-r--r--chromium/media/gpu/chromeos/vda_video_frame_pool.cc20
-rw-r--r--chromium/media/gpu/chromeos/vda_video_frame_pool.h13
-rw-r--r--chromium/media/gpu/chromeos/video_decoder_pipeline.cc111
-rw-r--r--chromium/media/gpu/chromeos/video_decoder_pipeline.h69
-rw-r--r--chromium/media/gpu/chromeos/video_decoder_pipeline_unittest.cc104
-rw-r--r--chromium/media/gpu/decode_surface_handler.h41
-rw-r--r--chromium/media/gpu/gpu_video_decode_accelerator_factory.h1
-rw-r--r--chromium/media/gpu/h264_decoder.cc55
-rw-r--r--chromium/media/gpu/h264_decoder.h15
-rw-r--r--chromium/media/gpu/h264_dpb.h9
-rw-r--r--chromium/media/gpu/ipc/client/gpu_video_decode_accelerator_host.cc8
-rw-r--r--chromium/media/gpu/ipc/service/vda_video_decoder.cc3
-rw-r--r--chromium/media/gpu/ipc/service/vda_video_decoder_unittest.cc28
-rw-r--r--chromium/media/gpu/mac/vt_video_decode_accelerator_mac.cc24
-rw-r--r--chromium/media/gpu/mac/vt_video_decode_accelerator_mac.h5
-rw-r--r--chromium/media/gpu/mac/vt_video_encode_accelerator_mac.h6
-rw-r--r--chromium/media/gpu/v4l2/BUILD.gn9
-rw-r--r--chromium/media/gpu/v4l2/v4l2_decode_surface_handler.h7
-rw-r--r--chromium/media/gpu/v4l2/v4l2_device.cc27
-rw-r--r--chromium/media/gpu/v4l2/v4l2_device.h4
-rw-r--r--chromium/media/gpu/v4l2/v4l2_framerate_control.cc7
-rw-r--r--chromium/media/gpu/v4l2/v4l2_image_processor_backend.cc10
-rw-r--r--chromium/media/gpu/v4l2/v4l2_image_processor_backend.h10
-rw-r--r--chromium/media/gpu/v4l2/v4l2_jpeg_encode_accelerator.h7
-rw-r--r--chromium/media/gpu/v4l2/v4l2_mjpeg_decode_accelerator.cc10
-rw-r--r--chromium/media/gpu/v4l2/v4l2_mjpeg_decode_accelerator.h7
-rw-r--r--chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc37
-rw-r--r--chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.h8
-rw-r--r--chromium/media/gpu/v4l2/v4l2_stateful_workaround.cc126
-rw-r--r--chromium/media/gpu/v4l2/v4l2_stateful_workaround.h14
-rw-r--r--chromium/media/gpu/v4l2/v4l2_stateful_workaround_unittest.cc135
-rw-r--r--chromium/media/gpu/v4l2/v4l2_vda_helpers.cc6
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.cc37
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.h7
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decoder.cc37
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decoder_backend.h4
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateful.cc6
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateless.cc13
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateless.h7
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decoder_delegate_h264.h7
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decoder_delegate_h264_legacy.h8
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decoder_delegate_vp8.h7
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decoder_delegate_vp8_legacy.h8
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decoder_delegate_vp9_chromium.h8
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decoder_delegate_vp9_legacy.h8
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc14
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.h7
-rw-r--r--chromium/media/gpu/vaapi/BUILD.gn17
-rw-r--r--chromium/media/gpu/vaapi/av1_vaapi_video_decoder_delegate.cc69
-rw-r--r--chromium/media/gpu/vaapi/av1_vaapi_video_decoder_delegate.h10
-rw-r--r--chromium/media/gpu/vaapi/h264_vaapi_video_decoder_delegate.cc35
-rw-r--r--chromium/media/gpu/vaapi/h264_vaapi_video_decoder_delegate.h7
-rw-r--r--chromium/media/gpu/vaapi/h264_vaapi_video_encoder_delegate.cc312
-rw-r--r--chromium/media/gpu/vaapi/h264_vaapi_video_encoder_delegate.h22
-rw-r--r--chromium/media/gpu/vaapi/h264_vaapi_video_encoder_delegate_unittest.cc176
-rw-r--r--chromium/media/gpu/vaapi/h265_vaapi_video_decoder_delegate.cc35
-rw-r--r--chromium/media/gpu/vaapi/va_stub_header.fragment6
-rw-r--r--chromium/media/gpu/vaapi/vaapi_common.cc15
-rw-r--r--chromium/media/gpu/vaapi/vaapi_common.h27
-rw-r--r--chromium/media/gpu/vaapi/vaapi_dmabuf_video_frame_mapper.cc3
-rw-r--r--chromium/media/gpu/vaapi/vaapi_dmabuf_video_frame_mapper.h6
-rw-r--r--chromium/media/gpu/vaapi/vaapi_image_decode_accelerator_worker.h7
-rw-r--r--chromium/media/gpu/vaapi/vaapi_image_decoder.h5
-rw-r--r--chromium/media/gpu/vaapi/vaapi_image_processor_backend.cc287
-rw-r--r--chromium/media/gpu/vaapi/vaapi_image_processor_backend.h77
-rw-r--r--chromium/media/gpu/vaapi/vaapi_jpeg_decoder.h6
-rw-r--r--chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.cc6
-rw-r--r--chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.h7
-rw-r--r--chromium/media/gpu/vaapi/vaapi_jpeg_encoder.h6
-rw-r--r--chromium/media/gpu/vaapi/vaapi_mjpeg_decode_accelerator.cc363
-rw-r--r--chromium/media/gpu/vaapi/vaapi_mjpeg_decode_accelerator.h49
-rw-r--r--chromium/media/gpu/vaapi/vaapi_picture.h5
-rw-r--r--chromium/media/gpu/vaapi/vaapi_picture_factory.cc72
-rw-r--r--chromium/media/gpu/vaapi/vaapi_picture_factory.h6
-rw-r--r--chromium/media/gpu/vaapi/vaapi_picture_native_pixmap.h7
-rw-r--r--chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_angle.cc2
-rw-r--r--chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_angle.h6
-rw-r--r--chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_egl.h6
-rw-r--r--chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_ozone.h6
-rw-r--r--chromium/media/gpu/vaapi/vaapi_picture_tfp.cc2
-rw-r--r--chromium/media/gpu/vaapi/vaapi_picture_tfp.h5
-rw-r--r--chromium/media/gpu/vaapi/vaapi_unittest.cc345
-rw-r--r--chromium/media/gpu/vaapi/vaapi_utils.h18
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.cc17
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.h6
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc19
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_decoder.cc275
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_decoder.h37
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_decoder_delegate.cc77
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_decoder_delegate.h15
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc452
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.h106
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_encode_accelerator_unittest.cc239
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_encoder_delegate.h6
-rw-r--r--chromium/media/gpu/vaapi/vaapi_webp_decoder.h6
-rw-r--r--chromium/media/gpu/vaapi/vaapi_wrapper.cc315
-rw-r--r--chromium/media/gpu/vaapi/vaapi_wrapper.h64
-rw-r--r--chromium/media/gpu/vaapi/vp8_vaapi_video_decoder_delegate.h6
-rw-r--r--chromium/media/gpu/vaapi/vp8_vaapi_video_encoder_delegate.cc18
-rw-r--r--chromium/media/gpu/vaapi/vp8_vaapi_video_encoder_delegate.h7
-rw-r--r--chromium/media/gpu/vaapi/vp9_vaapi_video_decoder_delegate.cc42
-rw-r--r--chromium/media/gpu/vaapi/vp9_vaapi_video_decoder_delegate.h8
-rw-r--r--chromium/media/gpu/vaapi/vp9_vaapi_video_encoder_delegate.cc5
-rw-r--r--chromium/media/gpu/vaapi/vp9_vaapi_video_encoder_delegate.h7
-rw-r--r--chromium/media/gpu/video_decode_accelerator_perf_tests.cc11
-rw-r--r--chromium/media/gpu/video_decode_accelerator_tests.cc22
-rw-r--r--chromium/media/gpu/video_encode_accelerator_perf_tests.cc25
-rw-r--r--chromium/media/gpu/video_encode_accelerator_tests.cc45
-rw-r--r--chromium/media/gpu/video_frame_mapper.h5
-rw-r--r--chromium/media/gpu/vp8_decoder.h13
-rw-r--r--chromium/media/gpu/vp8_reference_frame_vector.h5
-rw-r--r--chromium/media/gpu/vp9_decoder.cc3
-rw-r--r--chromium/media/gpu/vp9_decoder.h13
-rw-r--r--chromium/media/gpu/vp9_reference_frame_vector.h5
-rw-r--r--chromium/media/gpu/windows/d3d11_av1_accelerator.cc18
-rw-r--r--chromium/media/gpu/windows/d3d11_av1_accelerator.h8
-rw-r--r--chromium/media/gpu/windows/d3d11_copying_texture_wrapper.cc18
-rw-r--r--chromium/media/gpu/windows/d3d11_copying_texture_wrapper.h2
-rw-r--r--chromium/media/gpu/windows/d3d11_copying_texture_wrapper_unittest.cc16
-rw-r--r--chromium/media/gpu/windows/d3d11_decoder_configurator.cc62
-rw-r--r--chromium/media/gpu/windows/d3d11_decoder_configurator.h11
-rw-r--r--chromium/media/gpu/windows/d3d11_decoder_configurator_unittest.cc6
-rw-r--r--chromium/media/gpu/windows/d3d11_h264_accelerator.cc19
-rw-r--r--chromium/media/gpu/windows/d3d11_h264_accelerator.h9
-rw-r--r--chromium/media/gpu/windows/d3d11_picture_buffer.cc20
-rw-r--r--chromium/media/gpu/windows/d3d11_picture_buffer.h6
-rw-r--r--chromium/media/gpu/windows/d3d11_texture_selector.cc36
-rw-r--r--chromium/media/gpu/windows/d3d11_texture_selector.h18
-rw-r--r--chromium/media/gpu/windows/d3d11_texture_selector_unittest.cc3
-rw-r--r--chromium/media/gpu/windows/d3d11_texture_wrapper.cc97
-rw-r--r--chromium/media/gpu/windows/d3d11_texture_wrapper.h19
-rw-r--r--chromium/media/gpu/windows/d3d11_video_context_wrapper.h6
-rw-r--r--chromium/media/gpu/windows/d3d11_video_decoder.cc68
-rw-r--r--chromium/media/gpu/windows/d3d11_video_decoder_impl.h6
-rw-r--r--chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc35
-rw-r--r--chromium/media/gpu/windows/d3d11_video_device_format_support.cc2
-rw-r--r--chromium/media/gpu/windows/d3d11_video_device_format_support.h6
-rw-r--r--chromium/media/gpu/windows/d3d11_vp9_accelerator.cc20
-rw-r--r--chromium/media/gpu/windows/d3d11_vp9_accelerator.h7
-rw-r--r--chromium/media/gpu/windows/dxva_picture_buffer_win.cc9
-rw-r--r--chromium/media/gpu/windows/dxva_picture_buffer_win.h6
-rw-r--r--chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc61
-rw-r--r--chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h7
-rw-r--r--chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.cc230
-rw-r--r--chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.h11
-rw-r--r--chromium/media/gpu/windows/supported_profile_helpers.cc3
-rw-r--r--chromium/media/learning/common/feature_dictionary.h6
-rw-r--r--chromium/media/learning/common/learning_session.h7
-rw-r--r--chromium/media/learning/common/learning_task_controller.h7
-rw-r--r--chromium/media/learning/impl/distribution_reporter.h5
-rw-r--r--chromium/media/learning/impl/extra_trees_trainer.cc8
-rw-r--r--chromium/media/learning/impl/extra_trees_trainer.h6
-rw-r--r--chromium/media/learning/impl/feature_provider.h7
-rw-r--r--chromium/media/learning/impl/lookup_table_trainer.h7
-rw-r--r--chromium/media/learning/impl/one_hot.h12
-rw-r--r--chromium/media/learning/impl/random_number_generator.h7
-rw-r--r--chromium/media/learning/impl/random_tree_trainer.h6
-rw-r--r--chromium/media/learning/impl/training_algorithm.h7
-rw-r--r--chromium/media/learning/impl/voting_ensemble.h6
-rw-r--r--chromium/media/learning/mojo/mojo_learning_task_controller_service.h8
-rw-r--r--chromium/media/learning/mojo/public/cpp/mojo_learning_task_controller.h7
-rw-r--r--chromium/media/media_options.gni1
-rw-r--r--chromium/media/midi/midi_manager.h6
-rw-r--r--chromium/media/midi/midi_manager_alsa.h35
-rw-r--r--chromium/media/midi/midi_manager_mac.cc2
-rw-r--r--chromium/media/midi/midi_manager_mac.h6
-rw-r--r--chromium/media/midi/midi_manager_mac_unittest.cc6
-rw-r--r--chromium/media/midi/midi_manager_unittest.cc27
-rw-r--r--chromium/media/midi/midi_manager_usb.h6
-rw-r--r--chromium/media/midi/midi_manager_usb_unittest.cc38
-rw-r--r--chromium/media/midi/midi_manager_win.cc2
-rw-r--r--chromium/media/midi/midi_manager_win.h6
-rw-r--r--chromium/media/midi/midi_manager_winrt.h6
-rw-r--r--chromium/media/midi/midi_message_queue.h5
-rw-r--r--chromium/media/midi/midi_service.h12
-rw-r--r--chromium/media/midi/task_service.h6
-rw-r--r--chromium/media/midi/task_service_unittest.cc2
-rw-r--r--chromium/media/midi/usb_midi_descriptor_parser.h6
-rw-r--r--chromium/media/midi/usb_midi_device_factory_android.h7
-rw-r--r--chromium/media/midi/usb_midi_input_stream.h6
-rw-r--r--chromium/media/midi/usb_midi_input_stream_unittest.cc12
-rw-r--r--chromium/media/midi/usb_midi_output_stream.cc2
-rw-r--r--chromium/media/midi/usb_midi_output_stream_unittest.cc6
-rw-r--r--chromium/media/mojo/BUILD.gn4
-rw-r--r--chromium/media/mojo/README.md10
-rw-r--r--chromium/media/mojo/clients/BUILD.gn1
-rw-r--r--chromium/media/mojo/clients/mojo_android_overlay.h5
-rw-r--r--chromium/media/mojo/clients/mojo_audio_decoder.h6
-rw-r--r--chromium/media/mojo/clients/mojo_audio_decoder_unittest.cc12
-rw-r--r--chromium/media/mojo/clients/mojo_cdm_factory.h6
-rw-r--r--chromium/media/mojo/clients/mojo_cdm_unittest.cc7
-rw-r--r--chromium/media/mojo/clients/mojo_decoder_factory.h6
-rw-r--r--chromium/media/mojo/clients/mojo_decryptor.h6
-rw-r--r--chromium/media/mojo/clients/mojo_decryptor_unittest.cc12
-rw-r--r--chromium/media/mojo/clients/mojo_demuxer_stream_impl.h5
-rw-r--r--chromium/media/mojo/clients/mojo_media_log_service.cc9
-rw-r--r--chromium/media/mojo/clients/mojo_media_log_service.h14
-rw-r--r--chromium/media/mojo/clients/mojo_renderer.h11
-rw-r--r--chromium/media/mojo/clients/mojo_renderer_factory.cc2
-rw-r--r--chromium/media/mojo/clients/mojo_renderer_factory.h7
-rw-r--r--chromium/media/mojo/clients/mojo_renderer_unittest.cc15
-rw-r--r--chromium/media/mojo/clients/mojo_renderer_wrapper.h7
-rw-r--r--chromium/media/mojo/clients/mojo_video_decoder.cc88
-rw-r--r--chromium/media/mojo/clients/mojo_video_decoder.h24
-rw-r--r--chromium/media/mojo/clients/mojo_video_encode_accelerator.cc17
-rw-r--r--chromium/media/mojo/clients/win/media_foundation_renderer_client.cc300
-rw-r--r--chromium/media/mojo/clients/win/media_foundation_renderer_client.h36
-rw-r--r--chromium/media/mojo/clients/win/media_foundation_renderer_client_factory.cc21
-rw-r--r--chromium/media/mojo/clients/win/media_foundation_renderer_client_factory.h7
-rw-r--r--chromium/media/mojo/common/media_type_converters_unittest.cc11
-rw-r--r--chromium/media/mojo/common/mojo_data_pipe_read_write.h10
-rw-r--r--chromium/media/mojo/common/mojo_decoder_buffer_converter.h10
-rw-r--r--chromium/media/mojo/common/mojo_decoder_buffer_converter_unittest.cc9
-rw-r--r--chromium/media/mojo/common/mojo_shared_buffer_video_frame_unittest.cc14
-rw-r--r--chromium/media/mojo/mojom/BUILD.gn31
-rw-r--r--chromium/media/mojo/mojom/audio_decoder_config_mojom_traits.cc9
-rw-r--r--chromium/media/mojo/mojom/audio_decoder_config_mojom_traits.h5
-rw-r--r--chromium/media/mojo/mojom/audio_decoder_config_mojom_traits_unittest.cc95
-rw-r--r--chromium/media/mojo/mojom/cdm_document_service.mojom19
-rw-r--r--chromium/media/mojo/mojom/cdm_preference_data_mojom_traits.cc28
-rw-r--r--chromium/media/mojo/mojom/cdm_preference_data_mojom_traits.h35
-rw-r--r--chromium/media/mojo/mojom/interface_factory.mojom2
-rw-r--r--chromium/media/mojo/mojom/media_foundation_cdm_data_mojom_traits.cc33
-rw-r--r--chromium/media/mojo/mojom/media_foundation_cdm_data_mojom_traits.h41
-rw-r--r--chromium/media/mojo/mojom/media_player.mojom13
-rw-r--r--chromium/media/mojo/mojom/media_service.mojom11
-rw-r--r--chromium/media/mojo/mojom/media_types.mojom31
-rw-r--r--chromium/media/mojo/mojom/media_types_enum_mojom_traits.h79
-rw-r--r--chromium/media/mojo/mojom/renderer_extensions.mojom9
-rw-r--r--chromium/media/mojo/mojom/speech_recognition_result_mojom_traits.cc2
-rw-r--r--chromium/media/mojo/mojom/speech_recognition_result_mojom_traits_unittest.cc26
-rw-r--r--chromium/media/mojo/mojom/speech_recognition_service.mojom2
-rw-r--r--chromium/media/mojo/mojom/stable/BUILD.gn23
-rw-r--r--chromium/media/mojo/mojom/stable/OWNERS2
-rw-r--r--chromium/media/mojo/mojom/stable/README3
-rw-r--r--chromium/media/mojo/mojom/stable/stable_video_decoder.mojom113
-rw-r--r--chromium/media/mojo/mojom/stable/stable_video_decoder_types.mojom516
-rw-r--r--chromium/media/mojo/mojom/status_mojom_traits.cc37
-rw-r--r--chromium/media/mojo/mojom/status_mojom_traits.h62
-rw-r--r--chromium/media/mojo/mojom/video_decoder.mojom2
-rw-r--r--chromium/media/mojo/mojom/video_decoder_config_mojom_traits_unittest.cc48
-rw-r--r--chromium/media/mojo/mojom/video_encode_accelerator.mojom1
-rw-r--r--chromium/media/mojo/mojom/video_encode_accelerator_mojom_traits.cc6
-rw-r--r--chromium/media/mojo/mojom/video_encode_accelerator_mojom_traits.h5
-rw-r--r--chromium/media/mojo/mojom/video_encode_accelerator_mojom_traits_unittest.cc18
-rw-r--r--chromium/media/mojo/mojom/video_frame_metadata_mojom_traits_unittest.cc18
-rw-r--r--chromium/media/mojo/mojom/video_frame_mojom_traits_unittest.cc10
-rw-r--r--chromium/media/mojo/services/BUILD.gn8
-rw-r--r--chromium/media/mojo/services/DEPS2
-rw-r--r--chromium/media/mojo/services/android_mojo_media_client.h7
-rw-r--r--chromium/media/mojo/services/cdm_service_unittest.cc6
-rw-r--r--chromium/media/mojo/services/gpu_mojo_media_client.cc96
-rw-r--r--chromium/media/mojo/services/gpu_mojo_media_client.h106
-rw-r--r--chromium/media/mojo/services/gpu_mojo_media_client_android.cc151
-rw-r--r--chromium/media/mojo/services/gpu_mojo_media_client_cros.cc107
-rw-r--r--chromium/media/mojo/services/gpu_mojo_media_client_cros_ash.cc15
-rw-r--r--chromium/media/mojo/services/gpu_mojo_media_client_default.cc14
-rw-r--r--chromium/media/mojo/services/gpu_mojo_media_client_mac.cc71
-rw-r--r--chromium/media/mojo/services/gpu_mojo_media_client_stubs.cc43
-rw-r--r--chromium/media/mojo/services/gpu_mojo_media_client_win.cc103
-rw-r--r--chromium/media/mojo/services/interface_factory_impl.cc3
-rw-r--r--chromium/media/mojo/services/interface_factory_impl.h8
-rw-r--r--chromium/media/mojo/services/media_foundation_mojo_media_client.cc9
-rw-r--r--chromium/media/mojo/services/media_foundation_mojo_media_client.h13
-rw-r--r--chromium/media/mojo/services/media_foundation_renderer_wrapper.cc17
-rw-r--r--chromium/media/mojo/services/media_foundation_renderer_wrapper.h4
-rw-r--r--chromium/media/mojo/services/media_foundation_service.cc137
-rw-r--r--chromium/media/mojo/services/media_foundation_service.h5
-rw-r--r--chromium/media/mojo/services/media_foundation_service_broker.cc6
-rw-r--r--chromium/media/mojo/services/media_foundation_service_broker.h2
-rw-r--r--chromium/media/mojo/services/media_metrics_provider.cc8
-rw-r--r--chromium/media/mojo/services/media_metrics_provider.h6
-rw-r--r--chromium/media/mojo/services/media_metrics_provider_unittest.cc35
-rw-r--r--chromium/media/mojo/services/media_resource_shim.h6
-rw-r--r--chromium/media/mojo/services/media_service.h6
-rw-r--r--chromium/media/mojo/services/media_service_unittest.cc14
-rw-r--r--chromium/media/mojo/services/mojo_audio_decoder_service.h5
-rw-r--r--chromium/media/mojo/services/mojo_audio_input_stream.h5
-rw-r--r--chromium/media/mojo/services/mojo_audio_input_stream_unittest.cc5
-rw-r--r--chromium/media/mojo/services/mojo_audio_output_stream.h5
-rw-r--r--chromium/media/mojo/services/mojo_audio_output_stream_provider.cc2
-rw-r--r--chromium/media/mojo/services/mojo_audio_output_stream_provider.h6
-rw-r--r--chromium/media/mojo/services/mojo_audio_output_stream_unittest.cc5
-rw-r--r--chromium/media/mojo/services/mojo_cdm_allocator.cc8
-rw-r--r--chromium/media/mojo/services/mojo_cdm_allocator.h6
-rw-r--r--chromium/media/mojo/services/mojo_cdm_allocator_unittest.cc5
-rw-r--r--chromium/media/mojo/services/mojo_cdm_helper.cc5
-rw-r--r--chromium/media/mojo/services/mojo_cdm_helper.h2
-rw-r--r--chromium/media/mojo/services/mojo_cdm_service.h6
-rw-r--r--chromium/media/mojo/services/mojo_cdm_service_context.h6
-rw-r--r--chromium/media/mojo/services/mojo_decryptor_service.cc7
-rw-r--r--chromium/media/mojo/services/mojo_decryptor_service.h5
-rw-r--r--chromium/media/mojo/services/mojo_demuxer_stream_adapter.h5
-rw-r--r--chromium/media/mojo/services/mojo_media_client.cc7
-rw-r--r--chromium/media/mojo/services/mojo_media_client.h9
-rw-r--r--chromium/media/mojo/services/mojo_media_drm_storage.h6
-rw-r--r--chromium/media/mojo/services/mojo_media_log.cc12
-rw-r--r--chromium/media/mojo/services/mojo_media_log.h19
-rw-r--r--chromium/media/mojo/services/mojo_provision_fetcher.h6
-rw-r--r--chromium/media/mojo/services/mojo_renderer_service.cc4
-rw-r--r--chromium/media/mojo/services/mojo_renderer_service.h5
-rw-r--r--chromium/media/mojo/services/mojo_video_decoder_service.cc39
-rw-r--r--chromium/media/mojo/services/mojo_video_decoder_service.h11
-rw-r--r--chromium/media/mojo/services/mojo_video_encode_accelerator_provider.h8
-rw-r--r--chromium/media/mojo/services/mojo_video_encode_accelerator_service.h8
-rw-r--r--chromium/media/mojo/services/playback_events_recorder.cc3
-rw-r--r--chromium/media/mojo/services/playback_events_recorder_test.cc2
-rw-r--r--chromium/media/mojo/services/stable_video_decoder_factory_service.cc24
-rw-r--r--chromium/media/mojo/services/stable_video_decoder_factory_service.h39
-rw-r--r--chromium/media/mojo/services/test_mojo_media_client.cc3
-rw-r--r--chromium/media/mojo/services/test_mojo_media_client.h6
-rw-r--r--chromium/media/mojo/services/video_decode_perf_history.h6
-rw-r--r--chromium/media/mojo/services/video_decode_stats_recorder.h6
-rw-r--r--chromium/media/mojo/services/watch_time_recorder.cc24
-rw-r--r--chromium/media/mojo/services/watch_time_recorder.h6
-rw-r--r--chromium/media/mojo/services/watch_time_recorder_unittest.cc251
-rw-r--r--chromium/media/muxers/webm_muxer.cc30
-rw-r--r--chromium/media/muxers/webm_muxer_fuzzertest.cc16
-rw-r--r--chromium/media/muxers/webm_muxer_unittest.cc84
-rw-r--r--chromium/media/parsers/vp8_parser.h6
-rw-r--r--chromium/media/remoting/BUILD.gn23
-rw-r--r--chromium/media/remoting/DEPS1
-rw-r--r--chromium/media/remoting/courier_renderer.cc96
-rw-r--r--chromium/media/remoting/courier_renderer.h23
-rw-r--r--chromium/media/remoting/courier_renderer_factory.h6
-rw-r--r--chromium/media/remoting/courier_renderer_unittest.cc179
-rw-r--r--chromium/media/remoting/demuxer_stream_adapter.cc51
-rw-r--r--chromium/media/remoting/demuxer_stream_adapter.h32
-rw-r--r--chromium/media/remoting/demuxer_stream_adapter_unittest.cc31
-rw-r--r--chromium/media/remoting/end2end_test_renderer.cc46
-rw-r--r--chromium/media/remoting/end2end_test_renderer.h9
-rw-r--r--chromium/media/remoting/fake_media_resource.cc12
-rw-r--r--chromium/media/remoting/fake_media_resource.h12
-rw-r--r--chromium/media/remoting/fake_remoter.h19
-rw-r--r--chromium/media/remoting/integration_test.cc2
-rw-r--r--chromium/media/remoting/metrics.cc52
-rw-r--r--chromium/media/remoting/metrics.h12
-rw-r--r--chromium/media/remoting/mock_receiver_controller.cc12
-rw-r--r--chromium/media/remoting/mock_receiver_controller.h2
-rw-r--r--chromium/media/remoting/proto_enum_utils.cc120
-rw-r--r--chromium/media/remoting/proto_utils.cc18
-rw-r--r--chromium/media/remoting/proto_utils_unittest.cc4
-rw-r--r--chromium/media/remoting/receiver.cc42
-rw-r--r--chromium/media/remoting/receiver.h13
-rw-r--r--chromium/media/remoting/receiver_controller.cc13
-rw-r--r--chromium/media/remoting/receiver_controller.h17
-rw-r--r--chromium/media/remoting/receiver_unittest.cc124
-rw-r--r--chromium/media/remoting/remoting_renderer_factory.cc36
-rw-r--r--chromium/media/remoting/remoting_renderer_factory.h10
-rw-r--r--chromium/media/remoting/renderer_controller.cc99
-rw-r--r--chromium/media/remoting/renderer_controller.h17
-rw-r--r--chromium/media/remoting/renderer_controller_unittest.cc45
-rw-r--r--chromium/media/remoting/rpc_broker.cc109
-rw-r--r--chromium/media/remoting/rpc_broker.h109
-rw-r--r--chromium/media/remoting/rpc_broker_unittest.cc252
-rw-r--r--chromium/media/remoting/stream_provider.cc68
-rw-r--r--chromium/media/remoting/stream_provider.h19
-rw-r--r--chromium/media/remoting/stream_provider_unittest.cc55
-rw-r--r--chromium/media/renderers/BUILD.gn6
-rw-r--r--chromium/media/renderers/audio_renderer_impl.cc13
-rw-r--r--chromium/media/renderers/audio_renderer_impl.h6
-rw-r--r--chromium/media/renderers/audio_renderer_impl_unittest.cc38
-rw-r--r--chromium/media/renderers/decrypting_renderer.h6
-rw-r--r--chromium/media/renderers/decrypting_renderer_factory.h7
-rw-r--r--chromium/media/renderers/default_decoder_factory.h6
-rw-r--r--chromium/media/renderers/default_renderer_factory.h6
-rw-r--r--chromium/media/renderers/paint_canvas_video_renderer.cc60
-rw-r--r--chromium/media/renderers/paint_canvas_video_renderer.h9
-rw-r--r--chromium/media/renderers/paint_canvas_video_renderer_unittest.cc60
-rw-r--r--chromium/media/renderers/renderer_impl.h10
-rw-r--r--chromium/media/renderers/renderer_impl_unittest.cc22
-rw-r--r--chromium/media/renderers/shared_image_video_frame_test_utils.cc7
-rw-r--r--chromium/media/renderers/video_frame_rgba_to_yuva_converter.cc111
-rw-r--r--chromium/media/renderers/video_overlay_factory.h6
-rw-r--r--chromium/media/renderers/video_renderer_impl.cc3
-rw-r--r--chromium/media/renderers/video_renderer_impl.h6
-rw-r--r--chromium/media/renderers/video_renderer_impl_unittest.cc64
-rw-r--r--chromium/media/renderers/video_resource_updater.cc19
-rw-r--r--chromium/media/renderers/video_resource_updater.h5
-rw-r--r--chromium/media/renderers/video_resource_updater_unittest.cc8
-rw-r--r--chromium/media/renderers/win/media_engine_notify_impl.cc2
-rw-r--r--chromium/media/renderers/win/media_engine_notify_impl.h14
-rw-r--r--chromium/media/renderers/win/media_foundation_audio_stream.cc59
-rw-r--r--chromium/media/renderers/win/media_foundation_audio_stream.h2
-rw-r--r--chromium/media/renderers/win/media_foundation_protection_manager.cc2
-rw-r--r--chromium/media/renderers/win/media_foundation_renderer.cc52
-rw-r--r--chromium/media/renderers/win/media_foundation_renderer.h31
-rw-r--r--chromium/media/renderers/win/media_foundation_renderer_extension.h4
-rw-r--r--chromium/media/renderers/win/media_foundation_renderer_integration_test.cc13
-rw-r--r--chromium/media/renderers/win/media_foundation_renderer_unittest.cc4
-rw-r--r--chromium/media/renderers/win/media_foundation_source_wrapper.cc5
-rw-r--r--chromium/media/renderers/win/media_foundation_source_wrapper.h3
-rw-r--r--chromium/media/renderers/win/media_foundation_stream_wrapper.cc37
-rw-r--r--chromium/media/renderers/win/media_foundation_stream_wrapper.h13
-rw-r--r--chromium/media/renderers/win/media_foundation_video_stream.cc37
-rw-r--r--chromium/media/renderers/win/media_foundation_video_stream.h1
-rw-r--r--chromium/media/video/BUILD.gn1
-rw-r--r--chromium/media/video/fake_video_encode_accelerator.h7
-rw-r--r--chromium/media/video/gpu_memory_buffer_video_frame_pool.cc233
-rw-r--r--chromium/media/video/gpu_memory_buffer_video_frame_pool.h7
-rw-r--r--chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc6
-rw-r--r--chromium/media/video/gpu_video_accelerator_factories.h9
-rw-r--r--chromium/media/video/h264_bit_reader.h6
-rw-r--r--chromium/media/video/h264_parser.h6
-rw-r--r--chromium/media/video/h264_poc.h6
-rw-r--r--chromium/media/video/h265_parser.cc27
-rw-r--r--chromium/media/video/h265_parser.h10
-rw-r--r--chromium/media/video/h265_parser_unittest.cc4
-rw-r--r--chromium/media/video/mock_gpu_video_accelerator_factories.h12
-rw-r--r--chromium/media/video/mock_video_decode_accelerator.h6
-rw-r--r--chromium/media/video/mock_video_encode_accelerator.h6
-rw-r--r--chromium/media/video/openh264_video_encoder.cc19
-rw-r--r--chromium/media/video/renderable_gpu_memory_buffer_video_frame_pool.cc36
-rw-r--r--chromium/media/video/software_video_encoder_test.cc105
-rw-r--r--chromium/media/video/supported_video_decoder_config_unittest.cc2
-rw-r--r--chromium/media/video/video_encode_accelerator.cc31
-rw-r--r--chromium/media/video/video_encode_accelerator.h31
-rw-r--r--chromium/media/video/video_encode_accelerator_adapter.cc22
-rw-r--r--chromium/media/video/video_encode_accelerator_adapter_test.cc54
-rw-r--r--chromium/media/video/video_encoder_fallback_test.cc28
-rw-r--r--chromium/media/video/vpx_video_encoder.cc26
-rw-r--r--chromium/media/webrtc/BUILD.gn17
-rw-r--r--chromium/media/webrtc/OWNERS6
-rw-r--r--chromium/media/webrtc/audio_delay_stats_reporter.cc4
-rw-r--r--chromium/media/webrtc/audio_delay_stats_reporter.h6
-rw-r--r--chromium/media/webrtc/helpers.cc262
-rw-r--r--chromium/media/webrtc/helpers.h29
-rw-r--r--chromium/media/webrtc/helpers_unittests.cc401
-rw-r--r--chromium/media/webrtc/webrtc_features.cc (renamed from chromium/media/webrtc/webrtc_switches.cc)34
-rw-r--r--chromium/media/webrtc/webrtc_features.h (renamed from chromium/media/webrtc/webrtc_switches.h)17
1194 files changed, 16499 insertions, 9877 deletions
diff --git a/chromium/media/BUILD.gn b/chromium/media/BUILD.gn
index b3e92b8dc30..d5c158c08f5 100644
--- a/chromium/media/BUILD.gn
+++ b/chromium/media/BUILD.gn
@@ -5,6 +5,7 @@
import("//build/buildflag_header.gni")
import("//build/config/android/config.gni")
import("//build/config/arm.gni")
+import("//build/config/chromecast_build.gni")
import("//build/config/features.gni")
import("//build/config/linux/pkg_config.gni")
import("//build/config/ui.gni")
@@ -21,6 +22,7 @@ buildflag_header("media_buildflags") {
"ALTERNATE_CDM_STORAGE_ID_KEY=\"$alternate_cdm_storage_id_key\"",
"CDM_PLATFORM_SPECIFIC_PATH=\"$cdm_platform_specific_path\"",
"ENABLE_PLATFORM_AC3_EAC3_AUDIO=$enable_platform_ac3_eac3_audio",
+ "ENABLE_CAST_AUDIO_RENDERER=$enable_cast_audio_renderer",
"ENABLE_CDM_HOST_VERIFICATION=$enable_cdm_host_verification",
"ENABLE_CDM_STORAGE_ID=$enable_cdm_storage_id",
"ENABLE_DAV1D_DECODER=$enable_dav1d_decoder",
@@ -128,9 +130,6 @@ component("media") {
public_deps += [ "//media/base/mac" ]
}
- if (use_x11) {
- deps += [ "//ui/base/x" ]
- }
if (use_ozone) {
deps += [ "//ui/ozone" ]
}
diff --git a/chromium/media/COMMON_METADATA b/chromium/media/COMMON_METADATA
new file mode 100644
index 00000000000..0198eda079e
--- /dev/null
+++ b/chromium/media/COMMON_METADATA
@@ -0,0 +1,3 @@
+monorail {
+ component: "Internals>Media"
+} \ No newline at end of file
diff --git a/chromium/media/DIR_METADATA b/chromium/media/DIR_METADATA
index 2dc14a41ec9..69cd629e16d 100644
--- a/chromium/media/DIR_METADATA
+++ b/chromium/media/DIR_METADATA
@@ -6,6 +6,4 @@
# For the schema of this file, see Metadata message:
# https://source.chromium.org/chromium/infra/infra/+/main:go/src/infra/tools/dirmd/proto/dir_metadata.proto
-monorail {
- component: "Internals>Media"
-} \ No newline at end of file
+mixins: "//media/COMMON_METADATA"
diff --git a/chromium/media/OWNERS b/chromium/media/OWNERS
index 7a377c6d96f..262c8c221de 100644
--- a/chromium/media/OWNERS
+++ b/chromium/media/OWNERS
@@ -22,7 +22,7 @@ wolenetz@chromium.org
xhwang@chromium.org
# For Fuchsia-specific changes:
-per-file *_fuchsia*=file://build/fuchsia/OWNERS
+per-file ..._fuchsia*=file://build/fuchsia/OWNERS
# For GpuMemoryBuffer-related changes:
per-file *gpu_memory_buffer*=dcastagna@chromium.org
diff --git a/chromium/media/audio/agc_audio_stream.h b/chromium/media/audio/agc_audio_stream.h
index 1d387f49870..41448c55ff8 100644
--- a/chromium/media/audio/agc_audio_stream.h
+++ b/chromium/media/audio/agc_audio_stream.h
@@ -101,9 +101,8 @@ class MEDIA_EXPORT AgcAudioStream : public AudioInterface {
// volume from 0.
QueryAndStoreNewMicrophoneVolume();
- timer_.Start(FROM_HERE,
- base::TimeDelta::FromMilliseconds(kIntervalBetweenVolumeUpdatesMs),
- this, &AgcAudioStream::QueryAndStoreNewMicrophoneVolume);
+ timer_.Start(FROM_HERE, base::Milliseconds(kIntervalBetweenVolumeUpdatesMs),
+ this, &AgcAudioStream::QueryAndStoreNewMicrophoneVolume);
}
// Stops the periodic timer which periodically checks and updates the
diff --git a/chromium/media/audio/alive_checker.h b/chromium/media/audio/alive_checker.h
index ed06e14042c..bea12bc381f 100644
--- a/chromium/media/audio/alive_checker.h
+++ b/chromium/media/audio/alive_checker.h
@@ -69,6 +69,9 @@ class MEDIA_EXPORT AliveChecker {
PowerObserverHelperFactoryCallback
power_observer_helper_factory_callback);
+ AliveChecker(const AliveChecker&) = delete;
+ AliveChecker& operator=(const AliveChecker&) = delete;
+
~AliveChecker();
// Start and stop checking if the client is alive.
@@ -136,8 +139,6 @@ class MEDIA_EXPORT AliveChecker {
std::unique_ptr<PowerObserverHelper> power_observer_;
base::WeakPtrFactory<AliveChecker> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(AliveChecker);
};
} // namespace media
diff --git a/chromium/media/audio/alive_checker_unittest.cc b/chromium/media/audio/alive_checker_unittest.cc
index a2f13bd3129..729cf867c09 100644
--- a/chromium/media/audio/alive_checker_unittest.cc
+++ b/chromium/media/audio/alive_checker_unittest.cc
@@ -182,8 +182,7 @@ class AliveCheckerTest : public testing::Test {
alive_checker_ = std::make_unique<AliveChecker>(
base::BindRepeating(&AliveCheckerTest::OnDetectedDead,
base::Unretained(this)),
- base::TimeDelta::FromMilliseconds(kCheckIntervalMs),
- base::TimeDelta::FromMilliseconds(kTimeoutMs),
+ base::Milliseconds(kCheckIntervalMs), base::Milliseconds(kTimeoutMs),
stop_at_first_alive_notification,
base::BindOnce(&AliveCheckerTest::CreatePowerObserverHelper,
base::Unretained(this)));
@@ -191,8 +190,7 @@ class AliveCheckerTest : public testing::Test {
alive_checker_ = std::make_unique<AliveChecker>(
base::BindRepeating(&AliveCheckerTest::OnDetectedDead,
base::Unretained(this)),
- base::TimeDelta::FromMilliseconds(kCheckIntervalMs),
- base::TimeDelta::FromMilliseconds(kTimeoutMs),
+ base::Milliseconds(kCheckIntervalMs), base::Milliseconds(kTimeoutMs),
stop_at_first_alive_notification, false);
}
@@ -233,7 +231,7 @@ TEST_F(AliveCheckerTest, DISABLED_StartStop) {
// It can take up to the timeout + the check interval until detection. Add a
// margin to this.
EXPECT_FALSE(WaitUntilDetectedDeadWithTimeout(
- base::TimeDelta::FromMilliseconds(kTimeoutMs + kCheckIntervalMs + 10)));
+ base::Milliseconds(kTimeoutMs + kCheckIntervalMs + 10)));
EXPECT_FALSE(GetDetectedDead());
}
@@ -253,7 +251,7 @@ TEST_F(AliveCheckerTest, NoAliveNotificationsDetectTwice) {
// margin to this. The detect state should still be that we have detected
// dead.
EXPECT_FALSE(WaitUntilDetectedDeadWithTimeout(
- base::TimeDelta::FromMilliseconds(kTimeoutMs + kCheckIntervalMs + 10)));
+ base::Milliseconds(kTimeoutMs + kCheckIntervalMs + 10)));
EXPECT_TRUE(GetDetectedDead());
// Start again, the detect state should be reset.
@@ -274,8 +272,7 @@ TEST_F(AliveCheckerTest, DISABLED_NotifyThenStop) {
StartAliveChecker();
EXPECT_FALSE(GetDetectedDead());
- NotifyAliveMultipleTimes(
- 10, base::TimeDelta::FromMilliseconds(kNotifyIntervalMs));
+ NotifyAliveMultipleTimes(10, base::Milliseconds(kNotifyIntervalMs));
EXPECT_FALSE(GetDetectedDead());
StopAliveChecker();
@@ -284,7 +281,7 @@ TEST_F(AliveCheckerTest, DISABLED_NotifyThenStop) {
// It can take up to the timeout + the check interval until detection. Add a
// margin to this.
EXPECT_FALSE(WaitUntilDetectedDeadWithTimeout(
- base::TimeDelta::FromMilliseconds(kTimeoutMs + kCheckIntervalMs + 10)));
+ base::Milliseconds(kTimeoutMs + kCheckIntervalMs + 10)));
EXPECT_FALSE(GetDetectedDead());
}
@@ -296,15 +293,13 @@ TEST_F(AliveCheckerTest, DISABLED_NotifyThenDetectDead) {
CreateAliveChecker(false, false);
StartAliveChecker();
- NotifyAliveMultipleTimes(
- 10, base::TimeDelta::FromMilliseconds(kNotifyIntervalMs));
+ NotifyAliveMultipleTimes(10, base::Milliseconds(kNotifyIntervalMs));
WaitUntilDetectedDead();
EXPECT_TRUE(GetDetectedDead());
StartAliveChecker();
EXPECT_FALSE(GetDetectedDead());
- NotifyAliveMultipleTimes(
- 10, base::TimeDelta::FromMilliseconds(kNotifyIntervalMs));
+ NotifyAliveMultipleTimes(10, base::Milliseconds(kNotifyIntervalMs));
EXPECT_FALSE(GetDetectedDead());
WaitUntilDetectedDead();
EXPECT_TRUE(GetDetectedDead());
@@ -321,7 +316,7 @@ TEST_F(AliveCheckerTest, StopAtFirstAliveNotification_DoNotify) {
// It can take up to the timeout + the check interval until detection. Add a
// margin to this.
EXPECT_FALSE(WaitUntilDetectedDeadWithTimeout(
- base::TimeDelta::FromMilliseconds(kTimeoutMs + kCheckIntervalMs + 10)));
+ base::Milliseconds(kTimeoutMs + kCheckIntervalMs + 10)));
EXPECT_FALSE(GetDetectedDead());
}
@@ -351,8 +346,7 @@ TEST_F(AliveCheckerTest, DISABLED_SuspendResume_StartBeforeSuspend) {
StartAliveChecker();
EXPECT_FALSE(GetDetectedDead());
- NotifyAliveMultipleTimes(
- 10, base::TimeDelta::FromMilliseconds(kNotifyIntervalMs));
+ NotifyAliveMultipleTimes(10, base::Milliseconds(kNotifyIntervalMs));
alive_checker_thread_.task_runner()->PostTask(
FROM_HERE, base::BindOnce(&MockPowerObserverHelper::Suspend,
@@ -361,7 +355,7 @@ TEST_F(AliveCheckerTest, DISABLED_SuspendResume_StartBeforeSuspend) {
// It can take up to the timeout + the check interval until detection. Add a
// margin to this.
EXPECT_FALSE(WaitUntilDetectedDeadWithTimeout(
- base::TimeDelta::FromMilliseconds(kTimeoutMs + kCheckIntervalMs + 10)));
+ base::Milliseconds(kTimeoutMs + kCheckIntervalMs + 10)));
EXPECT_FALSE(GetDetectedDead());
alive_checker_thread_.task_runner()->PostTask(
@@ -389,7 +383,7 @@ TEST_F(AliveCheckerTest, SuspendResume_StartBetweenSuspendAndResume) {
// It can take up to the timeout + the check interval until detection. Add a
// margin to this.
EXPECT_FALSE(WaitUntilDetectedDeadWithTimeout(
- base::TimeDelta::FromMilliseconds(kTimeoutMs + kCheckIntervalMs + 10)));
+ base::Milliseconds(kTimeoutMs + kCheckIntervalMs + 10)));
EXPECT_FALSE(GetDetectedDead());
alive_checker_thread_.task_runner()->PostTask(
@@ -414,7 +408,7 @@ TEST_F(AliveCheckerTest, SuspendResumeWithAutoStop_NotifyBeforeSuspend) {
// It can take up to the timeout + the check interval until detection. Add a
// margin to this.
EXPECT_FALSE(WaitUntilDetectedDeadWithTimeout(
- base::TimeDelta::FromMilliseconds(kTimeoutMs + kCheckIntervalMs + 10)));
+ base::Milliseconds(kTimeoutMs + kCheckIntervalMs + 10)));
EXPECT_FALSE(GetDetectedDead());
alive_checker_thread_.task_runner()->PostTask(
@@ -422,7 +416,7 @@ TEST_F(AliveCheckerTest, SuspendResumeWithAutoStop_NotifyBeforeSuspend) {
base::Unretained(mock_power_observer_helper_)));
EXPECT_FALSE(WaitUntilDetectedDeadWithTimeout(
- base::TimeDelta::FromMilliseconds(kTimeoutMs + kCheckIntervalMs + 10)));
+ base::Milliseconds(kTimeoutMs + kCheckIntervalMs + 10)));
EXPECT_FALSE(GetDetectedDead());
alive_checker_thread_.task_runner()->PostTask(
@@ -430,7 +424,7 @@ TEST_F(AliveCheckerTest, SuspendResumeWithAutoStop_NotifyBeforeSuspend) {
base::Unretained(mock_power_observer_helper_)));
EXPECT_FALSE(WaitUntilDetectedDeadWithTimeout(
- base::TimeDelta::FromMilliseconds(kTimeoutMs + kCheckIntervalMs + 10)));
+ base::Milliseconds(kTimeoutMs + kCheckIntervalMs + 10)));
EXPECT_FALSE(GetDetectedDead());
}
@@ -449,7 +443,7 @@ TEST_F(AliveCheckerTest,
// It can take up to the timeout + the check interval until detection. Add a
// margin to this.
EXPECT_FALSE(WaitUntilDetectedDeadWithTimeout(
- base::TimeDelta::FromMilliseconds(kTimeoutMs + kCheckIntervalMs + 10)));
+ base::Milliseconds(kTimeoutMs + kCheckIntervalMs + 10)));
EXPECT_FALSE(GetDetectedDead());
StartAliveChecker();
@@ -460,7 +454,7 @@ TEST_F(AliveCheckerTest,
base::Unretained(mock_power_observer_helper_)));
EXPECT_FALSE(WaitUntilDetectedDeadWithTimeout(
- base::TimeDelta::FromMilliseconds(kTimeoutMs + kCheckIntervalMs + 10)));
+ base::Milliseconds(kTimeoutMs + kCheckIntervalMs + 10)));
EXPECT_FALSE(GetDetectedDead());
alive_checker_thread_.task_runner()->PostTask(
@@ -490,7 +484,7 @@ TEST_F(AliveCheckerTest,
// It can take up to the timeout + the check interval until detection. Add a
// margin to this.
EXPECT_FALSE(WaitUntilDetectedDeadWithTimeout(
- base::TimeDelta::FromMilliseconds(kTimeoutMs + kCheckIntervalMs + 10)));
+ base::Milliseconds(kTimeoutMs + kCheckIntervalMs + 10)));
EXPECT_FALSE(GetDetectedDead());
alive_checker_thread_.task_runner()->PostTask(
@@ -498,7 +492,7 @@ TEST_F(AliveCheckerTest,
base::Unretained(mock_power_observer_helper_)));
EXPECT_FALSE(WaitUntilDetectedDeadWithTimeout(
- base::TimeDelta::FromMilliseconds(kTimeoutMs + kCheckIntervalMs + 10)));
+ base::Milliseconds(kTimeoutMs + kCheckIntervalMs + 10)));
EXPECT_FALSE(GetDetectedDead());
}
@@ -524,7 +518,7 @@ TEST_F(AliveCheckerTest, SuspendResumeWithAutoStop_NotifyAfterResume) {
// It can take up to the timeout + the check interval until detection. Add a
// margin to this.
EXPECT_FALSE(WaitUntilDetectedDeadWithTimeout(
- base::TimeDelta::FromMilliseconds(kTimeoutMs + kCheckIntervalMs + 10)));
+ base::Milliseconds(kTimeoutMs + kCheckIntervalMs + 10)));
EXPECT_FALSE(GetDetectedDead());
}
@@ -544,7 +538,7 @@ TEST_F(AliveCheckerTest, SuspendResumeWithAutoStop_DontNotify) {
// It can take up to the timeout + the check interval until detection. Add a
// margin to this.
EXPECT_FALSE(WaitUntilDetectedDeadWithTimeout(
- base::TimeDelta::FromMilliseconds(kTimeoutMs + kCheckIntervalMs + 10)));
+ base::Milliseconds(kTimeoutMs + kCheckIntervalMs + 10)));
EXPECT_FALSE(GetDetectedDead());
alive_checker_thread_.task_runner()->PostTask(
diff --git a/chromium/media/audio/alsa/alsa_input.cc b/chromium/media/audio/alsa/alsa_input.cc
index a635cb5bc53..e4eaa0b1701 100644
--- a/chromium/media/audio/alsa/alsa_input.cc
+++ b/chromium/media/audio/alsa/alsa_input.cc
@@ -38,7 +38,7 @@ AlsaPcmInputStream::AlsaPcmInputStream(AudioManagerBase* audio_manager,
params_(params),
bytes_per_buffer_(params.GetBytesPerBuffer(kSampleFormat)),
wrapper_(wrapper),
- buffer_duration_(base::TimeDelta::FromMicroseconds(
+ buffer_duration_(base::Microseconds(
params.frames_per_buffer() * base::Time::kMicrosecondsPerSecond /
static_cast<float>(params.sample_rate()))),
callback_(nullptr),
@@ -220,7 +220,7 @@ void AlsaPcmInputStream::ReadAudio() {
<< wrapper_->StrError(avail_frames);
avail_frames = 0; // Error getting number of avail frames, set it to 0
}
- base::TimeDelta hardware_delay = base::TimeDelta::FromSecondsD(
+ base::TimeDelta hardware_delay = base::Seconds(
avail_frames / static_cast<double>(params_.sample_rate()));
callback_->OnData(audio_bus_.get(),
diff --git a/chromium/media/audio/alsa/alsa_input.h b/chromium/media/audio/alsa/alsa_input.h
index 6301fba9afc..f96b39136f8 100644
--- a/chromium/media/audio/alsa/alsa_input.h
+++ b/chromium/media/audio/alsa/alsa_input.h
@@ -43,6 +43,9 @@ class MEDIA_EXPORT AlsaPcmInputStream
const AudioParameters& params,
AlsaWrapper* wrapper);
+ AlsaPcmInputStream(const AlsaPcmInputStream&) = delete;
+ AlsaPcmInputStream& operator=(const AlsaPcmInputStream&) = delete;
+
~AlsaPcmInputStream() override;
// Implementation of AudioInputStream.
@@ -92,8 +95,6 @@ class MEDIA_EXPORT AlsaPcmInputStream
std::unique_ptr<AudioBus> audio_bus_;
base::Thread capture_thread_;
bool running_;
-
- DISALLOW_COPY_AND_ASSIGN(AlsaPcmInputStream);
};
} // namespace media
diff --git a/chromium/media/audio/alsa/alsa_output.cc b/chromium/media/audio/alsa/alsa_output.cc
index a3e8dbded62..a05f60ddbd5 100644
--- a/chromium/media/audio/alsa/alsa_output.cc
+++ b/chromium/media/audio/alsa/alsa_output.cc
@@ -156,7 +156,7 @@ AlsaPcmOutputStream::AlsaPcmOutputStream(const std::string& device_name,
bytes_per_frame_(params.GetBytesPerFrame(kSampleFormat)),
packet_size_(params.GetBytesPerBuffer(kSampleFormat)),
latency_(std::max(
- base::TimeDelta::FromMicroseconds(kMinLatencyMicros),
+ base::Microseconds(kMinLatencyMicros),
AudioTimestampHelper::FramesToTime(params.frames_per_buffer() * 2,
sample_rate_))),
bytes_per_output_frame_(bytes_per_frame_),
@@ -525,7 +525,7 @@ void AlsaPcmOutputStream::ScheduleNextWrite(bool source_exhausted) {
// Polling in this manner allows us to ensure a more consistent callback
// schedule. In testing this yields a variance of +/- 5ms versus the non-
// polling strategy which is around +/- 30ms and bimodal.
- next_fill_time = base::TimeDelta::FromMilliseconds(5);
+ next_fill_time = base::Milliseconds(5);
} else if (available_frames < kTargetFramesAvailable) {
// Schedule the next write for the moment when the available buffer of the
// sound card hits |kTargetFramesAvailable|.
@@ -538,7 +538,7 @@ void AlsaPcmOutputStream::ScheduleNextWrite(bool source_exhausted) {
} else {
// The sound card has frames available, but our source is exhausted, so
// avoid busy looping by delaying a bit.
- next_fill_time = base::TimeDelta::FromMilliseconds(10);
+ next_fill_time = base::Milliseconds(10);
}
task_runner_->PostDelayedTask(FROM_HERE,
diff --git a/chromium/media/audio/alsa/alsa_output.h b/chromium/media/audio/alsa/alsa_output.h
index 9922eae8763..d10bbba7991 100644
--- a/chromium/media/audio/alsa/alsa_output.h
+++ b/chromium/media/audio/alsa/alsa_output.h
@@ -74,6 +74,9 @@ class MEDIA_EXPORT AlsaPcmOutputStream : public AudioOutputStream {
AlsaWrapper* wrapper,
AudioManagerBase* manager);
+ AlsaPcmOutputStream(const AlsaPcmOutputStream&) = delete;
+ AlsaPcmOutputStream& operator=(const AlsaPcmOutputStream&) = delete;
+
~AlsaPcmOutputStream() override;
// Implementation of AudioOutputStream.
@@ -220,8 +223,6 @@ class MEDIA_EXPORT AlsaPcmOutputStream : public AudioOutputStream {
// bound by its lifetime.
// NOTE: Weak pointers must be invalidated before all other member variables.
base::WeakPtrFactory<AlsaPcmOutputStream> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(AlsaPcmOutputStream);
};
MEDIA_EXPORT std::ostream& operator<<(std::ostream& os,
diff --git a/chromium/media/audio/alsa/alsa_wrapper.h b/chromium/media/audio/alsa/alsa_wrapper.h
index e5920ceaf9f..30905b4d2f0 100644
--- a/chromium/media/audio/alsa/alsa_wrapper.h
+++ b/chromium/media/audio/alsa/alsa_wrapper.h
@@ -19,6 +19,10 @@ namespace media {
class MEDIA_EXPORT AlsaWrapper {
public:
AlsaWrapper();
+
+ AlsaWrapper(const AlsaWrapper&) = delete;
+ AlsaWrapper& operator=(const AlsaWrapper&) = delete;
+
virtual ~AlsaWrapper();
virtual int DeviceNameHint(int card, const char* iface, void*** hints);
@@ -152,8 +156,6 @@ class MEDIA_EXPORT AlsaWrapper {
virtual void MixerSelemIdFree(snd_mixer_selem_id_t* obj);
virtual const char* StrError(int errnum);
-
- DISALLOW_COPY_AND_ASSIGN(AlsaWrapper);
};
} // namespace media
diff --git a/chromium/media/audio/alsa/audio_manager_alsa.h b/chromium/media/audio/alsa/audio_manager_alsa.h
index ab146785c55..8b15c0a40ee 100644
--- a/chromium/media/audio/alsa/audio_manager_alsa.h
+++ b/chromium/media/audio/alsa/audio_manager_alsa.h
@@ -22,6 +22,10 @@ class MEDIA_EXPORT AudioManagerAlsa : public AudioManagerBase {
public:
AudioManagerAlsa(std::unique_ptr<AudioThread> audio_thread,
AudioLogFactory* audio_log_factory);
+
+ AudioManagerAlsa(const AudioManagerAlsa&) = delete;
+ AudioManagerAlsa& operator=(const AudioManagerAlsa&) = delete;
+
~AudioManagerAlsa() override;
// Implementation of AudioManager.
@@ -88,8 +92,6 @@ class MEDIA_EXPORT AudioManagerAlsa : public AudioManagerBase {
const std::string& device_id);
std::unique_ptr<AlsaWrapper> wrapper_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioManagerAlsa);
};
} // namespace media
diff --git a/chromium/media/audio/alsa/mock_alsa_wrapper.h b/chromium/media/audio/alsa/mock_alsa_wrapper.h
index 04aa2d0eba5..91371ef610b 100644
--- a/chromium/media/audio/alsa/mock_alsa_wrapper.h
+++ b/chromium/media/audio/alsa/mock_alsa_wrapper.h
@@ -15,6 +15,9 @@ class MockAlsaWrapper : public AlsaWrapper {
public:
MockAlsaWrapper();
+ MockAlsaWrapper(const MockAlsaWrapper&) = delete;
+ MockAlsaWrapper& operator=(const MockAlsaWrapper&) = delete;
+
~MockAlsaWrapper() override;
MOCK_METHOD3(DeviceNameHint, int(int card, const char* iface, void*** hints));
@@ -181,9 +184,6 @@ class MockAlsaWrapper : public AlsaWrapper {
MOCK_METHOD1(MixerSelemIdMalloc, int(snd_mixer_selem_id_t** ptr));
MOCK_METHOD1(MixerSelemIdFree, void(snd_mixer_selem_id_t* obj));
MOCK_METHOD1(StrError, const char*(int errnum));
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MockAlsaWrapper);
};
} // namespace media
diff --git a/chromium/media/audio/android/aaudio_output.cc b/chromium/media/audio/android/aaudio_output.cc
index c054f3910a2..d06a6b894ff 100644
--- a/chromium/media/audio/android/aaudio_output.cc
+++ b/chromium/media/audio/android/aaudio_output.cc
@@ -4,7 +4,7 @@
#include "media/audio/android/aaudio_output.h"
-#include "base/callback_helpers.h"
+#include "base/android/build_info.h"
#include "base/logging.h"
#include "base/thread_annotations.h"
#include "base/threading/sequenced_task_runner_handle.h"
@@ -72,9 +72,15 @@ static aaudio_data_callback_result_t OnAudioDataRequestedCallback(
static void OnStreamErrorCallback(AAudioStream* stream,
void* user_data,
aaudio_result_t error) {
- AAudioOutputStream* output_stream =
- reinterpret_cast<AAudioOutputStream*>(user_data);
- output_stream->OnStreamError(error);
+ AAudioDestructionHelper* destruction_helper =
+ reinterpret_cast<AAudioDestructionHelper*>(user_data);
+
+ AAudioOutputStream* output_stream = destruction_helper->GetAndLockStream();
+
+ if (output_stream)
+ output_stream->OnStreamError(error);
+
+ destruction_helper->UnlockStream();
}
AAudioOutputStream::AAudioOutputStream(AudioManagerAndroid* manager,
@@ -114,18 +120,25 @@ AAudioOutputStream::AAudioOutputStream(AudioManagerAndroid* manager,
AAudioOutputStream::~AAudioOutputStream() {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+
+ if (base::android::SdkVersion::SDK_VERSION_S >=
+ base::android::BuildInfo::GetInstance()->sdk_int()) {
+ // On Android S+, |destruction_helper_| can be destroyed as part of the
+ // normal class teardown.
+ return;
+ }
+
// In R and earlier, it is possible for callbacks to still be running even
// after calling AAudioStream_close(). The code below is a mitigation to work
// around this issue. See crbug.com/1183255.
// Keep |destruction_helper_| alive longer than |this|, so the |user_data|
- // bound to the callback stays valid until the callbacks stop.
+ // bound to the callback stays valid, until the callbacks stop.
base::SequencedTaskRunnerHandle::Get()->PostDelayedTask(
FROM_HERE,
- base::BindOnce(
- base::DoNothing::Once<std::unique_ptr<AAudioDestructionHelper>>(),
- std::move(destruction_helper_)),
- base::TimeDelta::FromMilliseconds(250));
+ base::BindOnce([](std::unique_ptr<AAudioDestructionHelper>) {},
+ std::move(destruction_helper_)),
+ base::Seconds(1));
}
void AAudioOutputStream::Flush() {}
@@ -151,7 +164,8 @@ bool AAudioOutputStream::Open() {
// Callbacks
AAudioStreamBuilder_setDataCallback(builder, OnAudioDataRequestedCallback,
destruction_helper_.get());
- AAudioStreamBuilder_setErrorCallback(builder, OnStreamErrorCallback, this);
+ AAudioStreamBuilder_setErrorCallback(builder, OnStreamErrorCallback,
+ destruction_helper_.get());
result = AAudioStreamBuilder_openStream(builder, &aaudio_stream_);
@@ -274,8 +288,8 @@ base::TimeDelta AAudioOutputStream::GetDelay(base::TimeTicks delay_timestamp) {
AAudioStream_getFramesWritten(aaudio_stream_) - existing_frame_index;
// Calculate the time which the next frame will be presented.
- const base::TimeDelta next_frame_pts = base::TimeDelta::FromNanosecondsD(
- existing_frame_pts + frame_index_delta * ns_per_frame_);
+ const base::TimeDelta next_frame_pts =
+ base::Nanoseconds(existing_frame_pts + frame_index_delta * ns_per_frame_);
// Calculate the latency between write time and presentation time. At startup
// we may end up with negative values here.
diff --git a/chromium/media/audio/android/aaudio_output.h b/chromium/media/audio/android/aaudio_output.h
index 35343369e05..9576ab3ad31 100644
--- a/chromium/media/audio/android/aaudio_output.h
+++ b/chromium/media/audio/android/aaudio_output.h
@@ -23,6 +23,10 @@ class AAudioOutputStream : public MuteableAudioOutputStream {
AAudioOutputStream(AudioManagerAndroid* manager,
const AudioParameters& params,
aaudio_usage_t usage);
+
+ AAudioOutputStream(const AAudioOutputStream&) = delete;
+ AAudioOutputStream& operator=(const AAudioOutputStream&) = delete;
+
~AAudioOutputStream() override;
// Implementation of MuteableAudioOutputStream.
@@ -73,8 +77,6 @@ class AAudioOutputStream : public MuteableAudioOutputStream {
bool muted_ GUARDED_BY(lock_) = false;
double volume_ GUARDED_BY(lock_) = 1.0;
bool device_changed_ GUARDED_BY(lock_) = false;
-
- DISALLOW_COPY_AND_ASSIGN(AAudioOutputStream);
};
} // namespace media
diff --git a/chromium/media/audio/android/audio_android_unittest.cc b/chromium/media/audio/android/audio_android_unittest.cc
index 20e696fc555..f95b6a5020c 100644
--- a/chromium/media/audio/android/audio_android_unittest.cc
+++ b/chromium/media/audio/android/audio_android_unittest.cc
@@ -84,9 +84,10 @@ std::string LayoutToString(ChannelLayout channel_layout) {
}
double ExpectedTimeBetweenCallbacks(AudioParameters params) {
- return (base::TimeDelta::FromMicroseconds(
- params.frames_per_buffer() * base::Time::kMicrosecondsPerSecond /
- static_cast<double>(params.sample_rate()))).InMillisecondsF();
+ return (base::Microseconds(params.frames_per_buffer() *
+ base::Time::kMicrosecondsPerSecond /
+ static_cast<double>(params.sample_rate())))
+ .InMillisecondsF();
}
// Helper method which verifies that the device list starts with a valid
@@ -178,6 +179,9 @@ class FileAudioSource : public AudioOutputStream::AudioSourceCallback {
DVLOG(0) << "Reading from file: " << file_path.value().c_str();
}
+ FileAudioSource(const FileAudioSource&) = delete;
+ FileAudioSource& operator=(const FileAudioSource&) = delete;
+
~FileAudioSource() override {}
// AudioOutputStream::AudioSourceCallback implementation.
@@ -223,8 +227,6 @@ class FileAudioSource : public AudioOutputStream::AudioSourceCallback {
base::WaitableEvent* event_;
int pos_;
scoped_refptr<DecoderBuffer> file_;
-
- DISALLOW_COPY_AND_ASSIGN(FileAudioSource);
};
// Implements AudioInputStream::AudioInputCallback and writes the recorded
@@ -252,6 +254,9 @@ class FileAudioSink : public AudioInputStream::AudioInputCallback {
DVLOG(0) << "Writing to file: " << file_path.value().c_str();
}
+ FileAudioSink(const FileAudioSink&) = delete;
+ FileAudioSink& operator=(const FileAudioSink&) = delete;
+
~FileAudioSink() override {
int bytes_written = 0;
while (bytes_written < buffer_->forward_capacity()) {
@@ -296,8 +301,6 @@ class FileAudioSink : public AudioInputStream::AudioInputCallback {
AudioParameters params_;
std::unique_ptr<media::SeekableBuffer> buffer_;
FILE* binary_file_;
-
- DISALLOW_COPY_AND_ASSIGN(FileAudioSink);
};
// Implements AudioInputCallback and AudioSourceCallback to support full
@@ -318,6 +321,10 @@ class FullDuplexAudioSinkSource
buffer_.reset(new uint8_t[buffer_size]);
}
+ FullDuplexAudioSinkSource(const FullDuplexAudioSinkSource&) = delete;
+ FullDuplexAudioSinkSource& operator=(const FullDuplexAudioSinkSource&) =
+ delete;
+
~FullDuplexAudioSinkSource() override {}
// AudioInputStream::AudioInputCallback implementation
@@ -399,9 +406,9 @@ class FullDuplexAudioSinkSource
// audio parameters.
double BytesToMilliseconds(int bytes) const {
const int frames = bytes / params_.GetBytesPerFrame(kSampleFormat);
- return (base::TimeDelta::FromMicroseconds(
- frames * base::Time::kMicrosecondsPerSecond /
- static_cast<double>(params_.sample_rate()))).InMillisecondsF();
+ return (base::Microseconds(frames * base::Time::kMicrosecondsPerSecond /
+ static_cast<double>(params_.sample_rate())))
+ .InMillisecondsF();
}
AudioParameters params_;
@@ -410,8 +417,6 @@ class FullDuplexAudioSinkSource
std::unique_ptr<media::SeekableBuffer> fifo_;
std::unique_ptr<uint8_t[]> buffer_;
bool started_;
-
- DISALLOW_COPY_AND_ASSIGN(FullDuplexAudioSinkSource);
};
// Test fixture class for tests which only exercise the output path.
@@ -428,6 +433,9 @@ class AudioAndroidOutputTest : public testing::Test {
base::RunLoop().RunUntilIdle();
}
+ AudioAndroidOutputTest(const AudioAndroidOutputTest&) = delete;
+ AudioAndroidOutputTest& operator=(const AudioAndroidOutputTest&) = delete;
+
~AudioAndroidOutputTest() override {
audio_manager_->Shutdown();
base::RunLoop().RunUntilIdle();
@@ -578,9 +586,6 @@ class AudioAndroidOutputTest : public testing::Test {
AudioOutputStream* audio_output_stream_;
base::TimeTicks start_time_;
base::TimeTicks end_time_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioAndroidOutputTest);
};
// Test fixture class for tests which exercise the input path, or both input and
@@ -920,7 +925,7 @@ TEST_P(AudioAndroidInputTest,
<< "once per second during this test.";
DVLOG(0) << ">> Speak into the mic and listen to the audio in loopback...";
fflush(stdout);
- base::PlatformThread::Sleep(base::TimeDelta::FromSeconds(20));
+ base::PlatformThread::Sleep(base::Seconds(20));
printf("\n");
StopAndCloseAudioOutputStreamOnAudioThread();
StopAndCloseAudioInputStreamOnAudioThread();
diff --git a/chromium/media/audio/android/audio_manager_android.cc b/chromium/media/audio/android/audio_manager_android.cc
index 5cbcd509ca0..7008e592fb5 100644
--- a/chromium/media/audio/android/audio_manager_android.cc
+++ b/chromium/media/audio/android/audio_manager_android.cc
@@ -348,7 +348,7 @@ bool AudioManagerAndroid::HasOutputVolumeOverride(double* out_volume) const {
base::TimeDelta AudioManagerAndroid::GetOutputLatency() {
DCHECK(GetTaskRunner()->BelongsToCurrentThread());
JNIEnv* env = AttachCurrentThread();
- return base::TimeDelta::FromMilliseconds(
+ return base::Milliseconds(
Java_AudioManagerAndroid_getOutputLatency(env, GetJavaAudioManager()));
}
diff --git a/chromium/media/audio/android/audio_manager_android.h b/chromium/media/audio/android/audio_manager_android.h
index a58a4e1ddf5..92632a8ea78 100644
--- a/chromium/media/audio/android/audio_manager_android.h
+++ b/chromium/media/audio/android/audio_manager_android.h
@@ -22,6 +22,10 @@ class MEDIA_EXPORT AudioManagerAndroid : public AudioManagerBase {
public:
AudioManagerAndroid(std::unique_ptr<AudioThread> audio_thread,
AudioLogFactory* audio_log_factory);
+
+ AudioManagerAndroid(const AudioManagerAndroid&) = delete;
+ AudioManagerAndroid& operator=(const AudioManagerAndroid&) = delete;
+
~AudioManagerAndroid() override;
void InitializeIfNeeded();
@@ -127,8 +131,6 @@ class MEDIA_EXPORT AudioManagerAndroid : public AudioManagerBase {
// If set, overrides volume level on output streams
bool output_volume_override_set_;
double output_volume_override_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioManagerAndroid);
};
} // namespace media
diff --git a/chromium/media/audio/android/audio_track_output_stream.h b/chromium/media/audio/android/audio_track_output_stream.h
index 500671bff04..6bc6e693339 100644
--- a/chromium/media/audio/android/audio_track_output_stream.h
+++ b/chromium/media/audio/android/audio_track_output_stream.h
@@ -22,6 +22,10 @@ class MEDIA_EXPORT AudioTrackOutputStream : public MuteableAudioOutputStream {
public:
AudioTrackOutputStream(AudioManagerBase* manager,
const AudioParameters& params);
+
+ AudioTrackOutputStream(const AudioTrackOutputStream&) = delete;
+ AudioTrackOutputStream& operator=(const AudioTrackOutputStream&) = delete;
+
~AudioTrackOutputStream() override;
// AudioOutputStream implementation.
@@ -59,8 +63,6 @@ class MEDIA_EXPORT AudioTrackOutputStream : public MuteableAudioOutputStream {
// Java AudioTrackOutputStream instance.
base::android::ScopedJavaGlobalRef<jobject> j_audio_output_stream_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioTrackOutputStream);
};
} // namespace media
diff --git a/chromium/media/audio/android/opensles_input.cc b/chromium/media/audio/android/opensles_input.cc
index 73f457979cf..ca104730e09 100644
--- a/chromium/media/audio/android/opensles_input.cc
+++ b/chromium/media/audio/android/opensles_input.cc
@@ -47,8 +47,8 @@ OpenSLESInputStream::OpenSLESInputStream(AudioManagerAndroid* audio_manager,
format_.channelMask = ChannelCountToSLESChannelMask(params.channels());
buffer_size_bytes_ = params.GetBytesPerBuffer(kSampleFormat);
- hardware_delay_ = base::TimeDelta::FromSecondsD(
- params.frames_per_buffer() / static_cast<double>(params.sample_rate()));
+ hardware_delay_ = base::Seconds(params.frames_per_buffer() /
+ static_cast<double>(params.sample_rate()));
memset(&audio_data_, 0, sizeof(audio_data_));
}
diff --git a/chromium/media/audio/android/opensles_input.h b/chromium/media/audio/android/opensles_input.h
index ee354f214ae..f0d86fd73ad 100644
--- a/chromium/media/audio/android/opensles_input.h
+++ b/chromium/media/audio/android/opensles_input.h
@@ -35,6 +35,9 @@ class OpenSLESInputStream : public AudioInputStream {
OpenSLESInputStream(AudioManagerAndroid* manager,
const AudioParameters& params);
+ OpenSLESInputStream(const OpenSLESInputStream&) = delete;
+ OpenSLESInputStream& operator=(const OpenSLESInputStream&) = delete;
+
~OpenSLESInputStream() override;
// Implementation of AudioInputStream.
@@ -107,8 +110,6 @@ class OpenSLESInputStream : public AudioInputStream {
// Set to true at construction if user wants to disable all audio effects.
const bool no_effects_ = false;
-
- DISALLOW_COPY_AND_ASSIGN(OpenSLESInputStream);
};
} // namespace media
diff --git a/chromium/media/audio/android/opensles_output.cc b/chromium/media/audio/android/opensles_output.cc
index cfe217868e9..a15692a49f9 100644
--- a/chromium/media/audio/android/opensles_output.cc
+++ b/chromium/media/audio/android/opensles_output.cc
@@ -161,8 +161,7 @@ void OpenSLESOutputStream::Start(AudioSourceCallback* callback) {
// we're continuing on from this previous position.
uint32_t position_in_ms = 0;
LOG_ON_FAILURE_AND_RETURN((*player_)->GetPosition(player_, &position_in_ms));
- delay_calculator_.SetBaseTimestamp(
- base::TimeDelta::FromMilliseconds(position_in_ms));
+ delay_calculator_.SetBaseTimestamp(base::Milliseconds(position_in_ms));
delay_calculator_.AddFrames(audio_bus_->frames());
started_ = true;
@@ -498,10 +497,10 @@ void OpenSLESOutputStream::CacheHardwareLatencyIfNeeded() {
base::TimeDelta OpenSLESOutputStream::AdjustPositionForHardwareLatency(
uint32_t position_in_ms) {
- base::TimeDelta position = base::TimeDelta::FromMilliseconds(position_in_ms);
+ base::TimeDelta position = base::Milliseconds(position_in_ms);
if (position <= hardware_latency_)
- return base::TimeDelta::FromMilliseconds(0);
+ return base::Milliseconds(0);
return position - hardware_latency_;
}
diff --git a/chromium/media/audio/android/opensles_output.h b/chromium/media/audio/android/opensles_output.h
index aafc81657d5..a932da99c34 100644
--- a/chromium/media/audio/android/opensles_output.h
+++ b/chromium/media/audio/android/opensles_output.h
@@ -37,6 +37,9 @@ class OpenSLESOutputStream : public MuteableAudioOutputStream {
const AudioParameters& params,
SLint32 stream_type);
+ OpenSLESOutputStream(const OpenSLESOutputStream&) = delete;
+ OpenSLESOutputStream& operator=(const OpenSLESOutputStream&) = delete;
+
~OpenSLESOutputStream() override;
// Implementation of MuteableAudioOutputStream.
@@ -150,8 +153,6 @@ class OpenSLESOutputStream : public MuteableAudioOutputStream {
// Adjustment for hardware latency. Needed for some cast targets, since
// OpenSLES's GetPosition doesn't properly account for HAL latency.
base::TimeDelta hardware_latency_;
-
- DISALLOW_COPY_AND_ASSIGN(OpenSLESOutputStream);
};
} // namespace media
diff --git a/chromium/media/audio/audio_debug_file_writer.h b/chromium/media/audio/audio_debug_file_writer.h
index c80dd939218..bf073c704d1 100644
--- a/chromium/media/audio/audio_debug_file_writer.h
+++ b/chromium/media/audio/audio_debug_file_writer.h
@@ -33,6 +33,9 @@ class MEDIA_EXPORT AudioDebugFileWriter {
// Write() must match |params|.
explicit AudioDebugFileWriter(const AudioParameters& params);
+ AudioDebugFileWriter(const AudioDebugFileWriter&) = delete;
+ AudioDebugFileWriter& operator=(const AudioDebugFileWriter&) = delete;
+
virtual ~AudioDebugFileWriter();
// Must be called before calling Write() for the first time after creation or
@@ -70,8 +73,6 @@ class MEDIA_EXPORT AudioDebugFileWriter {
AudioFileWriterUniquePtr file_writer_;
SEQUENCE_CHECKER(client_sequence_checker_);
-
- DISALLOW_COPY_AND_ASSIGN(AudioDebugFileWriter);
};
} // namespace media
diff --git a/chromium/media/audio/audio_debug_recording_helper.h b/chromium/media/audio/audio_debug_recording_helper.h
index 8a54e1a8b94..86d2ed7b368 100644
--- a/chromium/media/audio/audio_debug_recording_helper.h
+++ b/chromium/media/audio/audio_debug_recording_helper.h
@@ -60,6 +60,11 @@ class MEDIA_EXPORT AudioDebugRecordingHelper : public AudioDebugRecorder {
const AudioParameters& params,
scoped_refptr<base::SingleThreadTaskRunner> task_runner,
base::OnceClosure on_destruction_closure);
+
+ AudioDebugRecordingHelper(const AudioDebugRecordingHelper&) = delete;
+ AudioDebugRecordingHelper& operator=(const AudioDebugRecordingHelper&) =
+ delete;
+
~AudioDebugRecordingHelper() override;
// Enable debug recording. Creates |debug_writer_| and runs
@@ -103,7 +108,6 @@ class MEDIA_EXPORT AudioDebugRecordingHelper : public AudioDebugRecorder {
base::OnceClosure on_destruction_closure_;
base::WeakPtrFactory<AudioDebugRecordingHelper> weak_factory_{this};
- DISALLOW_COPY_AND_ASSIGN(AudioDebugRecordingHelper);
};
} // namespace media
diff --git a/chromium/media/audio/audio_debug_recording_helper_unittest.cc b/chromium/media/audio/audio_debug_recording_helper_unittest.cc
index cffb90bc350..99f67b31352 100644
--- a/chromium/media/audio/audio_debug_recording_helper_unittest.cc
+++ b/chromium/media/audio/audio_debug_recording_helper_unittest.cc
@@ -38,6 +38,10 @@ class MockAudioDebugFileWriter : public AudioDebugFileWriter {
public:
explicit MockAudioDebugFileWriter(const AudioParameters& params)
: AudioDebugFileWriter(params), reference_data_(nullptr) {}
+
+ MockAudioDebugFileWriter(const MockAudioDebugFileWriter&) = delete;
+ MockAudioDebugFileWriter& operator=(const MockAudioDebugFileWriter&) = delete;
+
~MockAudioDebugFileWriter() override = default;
MOCK_METHOD1(DoStart, void(bool));
@@ -72,8 +76,6 @@ class MockAudioDebugFileWriter : public AudioDebugFileWriter {
private:
AudioBus* reference_data_;
-
- DISALLOW_COPY_AND_ASSIGN(MockAudioDebugFileWriter);
};
// Sub-class of the helper that overrides the CreateAudioDebugFileWriter
@@ -87,6 +89,12 @@ class AudioDebugRecordingHelperUnderTest : public AudioDebugRecordingHelper {
: AudioDebugRecordingHelper(params,
std::move(task_runner),
std::move(on_destruction_closure)) {}
+
+ AudioDebugRecordingHelperUnderTest(
+ const AudioDebugRecordingHelperUnderTest&) = delete;
+ AudioDebugRecordingHelperUnderTest& operator=(
+ const AudioDebugRecordingHelperUnderTest&) = delete;
+
~AudioDebugRecordingHelperUnderTest() override = default;
private:
@@ -98,13 +106,16 @@ class AudioDebugRecordingHelperUnderTest : public AudioDebugRecordingHelper {
EXPECT_CALL(*writer, DoStart(true));
return base::WrapUnique<AudioDebugFileWriter>(writer);
}
-
- DISALLOW_COPY_AND_ASSIGN(AudioDebugRecordingHelperUnderTest);
};
class AudioDebugRecordingHelperTest : public ::testing::Test {
public:
AudioDebugRecordingHelperTest() {}
+
+ AudioDebugRecordingHelperTest(const AudioDebugRecordingHelperTest&) = delete;
+ AudioDebugRecordingHelperTest& operator=(
+ const AudioDebugRecordingHelperTest&) = delete;
+
~AudioDebugRecordingHelperTest() override = default;
// Helper function that creates a recording helper.
@@ -147,9 +158,6 @@ class AudioDebugRecordingHelperTest : public ::testing::Test {
// The test task environment.
base::test::TaskEnvironment task_environment_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioDebugRecordingHelperTest);
};
// Creates a helper with an on destruction closure, and verifies that it's run.
diff --git a/chromium/media/audio/audio_debug_recording_manager.h b/chromium/media/audio/audio_debug_recording_manager.h
index cef0a5e43c0..5ea162d6733 100644
--- a/chromium/media/audio/audio_debug_recording_manager.h
+++ b/chromium/media/audio/audio_debug_recording_manager.h
@@ -61,6 +61,11 @@ class MEDIA_EXPORT AudioDebugRecordingManager {
AudioDebugRecordingManager(
scoped_refptr<base::SingleThreadTaskRunner> task_runner);
+
+ AudioDebugRecordingManager(const AudioDebugRecordingManager&) = delete;
+ AudioDebugRecordingManager& operator=(const AudioDebugRecordingManager&) =
+ delete;
+
virtual ~AudioDebugRecordingManager();
// Enables and disables debug recording.
@@ -111,7 +116,6 @@ class MEDIA_EXPORT AudioDebugRecordingManager {
CreateWavFileCallback create_file_callback_;
base::WeakPtrFactory<AudioDebugRecordingManager> weak_factory_{this};
- DISALLOW_COPY_AND_ASSIGN(AudioDebugRecordingManager);
};
} // namespace media
diff --git a/chromium/media/audio/audio_debug_recording_manager_unittest.cc b/chromium/media/audio/audio_debug_recording_manager_unittest.cc
index 46b977fd92e..a9f903e343d 100644
--- a/chromium/media/audio/audio_debug_recording_manager_unittest.cc
+++ b/chromium/media/audio/audio_debug_recording_manager_unittest.cc
@@ -64,6 +64,10 @@ class MockAudioDebugRecordingHelper : public AudioDebugRecordingHelper {
EXPECT_CALL(*this, DoEnableDebugRecording(_, _));
}
+ MockAudioDebugRecordingHelper(const MockAudioDebugRecordingHelper&) = delete;
+ MockAudioDebugRecordingHelper& operator=(
+ const MockAudioDebugRecordingHelper&) = delete;
+
~MockAudioDebugRecordingHelper() override {
if (on_destruction_closure_in_mock_)
std::move(on_destruction_closure_in_mock_).Run();
@@ -84,8 +88,6 @@ class MockAudioDebugRecordingHelper : public AudioDebugRecordingHelper {
// We let the mock run the destruction closure to not rely on the real
// implementation.
base::OnceClosure on_destruction_closure_in_mock_;
-
- DISALLOW_COPY_AND_ASSIGN(MockAudioDebugRecordingHelper);
};
// Sub-class of the manager that overrides the CreateAudioDebugRecordingHelper
@@ -95,6 +97,12 @@ class AudioDebugRecordingManagerUnderTest : public AudioDebugRecordingManager {
AudioDebugRecordingManagerUnderTest(
scoped_refptr<base::SingleThreadTaskRunner> task_runner)
: AudioDebugRecordingManager(std::move(task_runner)) {}
+
+ AudioDebugRecordingManagerUnderTest(
+ const AudioDebugRecordingManagerUnderTest&) = delete;
+ AudioDebugRecordingManagerUnderTest& operator=(
+ const AudioDebugRecordingManagerUnderTest&) = delete;
+
~AudioDebugRecordingManagerUnderTest() override = default;
private:
@@ -106,8 +114,6 @@ class AudioDebugRecordingManagerUnderTest : public AudioDebugRecordingManager {
params, std::move(task_runner),
std::move(on_destruction_closure));
}
-
- DISALLOW_COPY_AND_ASSIGN(AudioDebugRecordingManagerUnderTest);
};
// The test fixture.
@@ -116,6 +122,11 @@ class AudioDebugRecordingManagerTest : public ::testing::Test {
AudioDebugRecordingManagerTest()
: manager_(task_environment_.GetMainThreadTaskRunner()) {}
+ AudioDebugRecordingManagerTest(const AudioDebugRecordingManagerTest&) =
+ delete;
+ AudioDebugRecordingManagerTest& operator=(
+ const AudioDebugRecordingManagerTest&) = delete;
+
~AudioDebugRecordingManagerTest() override = default;
// Registers a source and increases counter for the expected next source id.
@@ -135,9 +146,6 @@ class AudioDebugRecordingManagerTest : public ::testing::Test {
// manager uses a global running id, thus doesn't restart at each
// instantiation.
static uint32_t expected_next_source_id_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioDebugRecordingManagerTest);
};
uint32_t AudioDebugRecordingManagerTest::expected_next_source_id_ = 1;
diff --git a/chromium/media/audio/audio_debug_recording_session.h b/chromium/media/audio/audio_debug_recording_session.h
index e70a824e2ff..b6b582a6ac8 100644
--- a/chromium/media/audio/audio_debug_recording_session.h
+++ b/chromium/media/audio/audio_debug_recording_session.h
@@ -14,13 +14,14 @@ namespace media {
// are created using audio::CreateAudioDebugRecordingSession.
class MEDIA_EXPORT AudioDebugRecordingSession {
public:
+ AudioDebugRecordingSession(const AudioDebugRecordingSession&) = delete;
+ AudioDebugRecordingSession& operator=(const AudioDebugRecordingSession&) =
+ delete;
+
virtual ~AudioDebugRecordingSession() = default;
protected:
AudioDebugRecordingSession() = default;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioDebugRecordingSession);
};
} // namespace media
diff --git a/chromium/media/audio/audio_debug_recording_session_impl.h b/chromium/media/audio/audio_debug_recording_session_impl.h
index 5f8e6b1700b..957c39bcf69 100644
--- a/chromium/media/audio/audio_debug_recording_session_impl.h
+++ b/chromium/media/audio/audio_debug_recording_session_impl.h
@@ -19,10 +19,13 @@ class MEDIA_EXPORT AudioDebugRecordingSessionImpl
public:
explicit AudioDebugRecordingSessionImpl(
const base::FilePath& debug_recording_file_path);
- ~AudioDebugRecordingSessionImpl() override;
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioDebugRecordingSessionImpl);
+ AudioDebugRecordingSessionImpl(const AudioDebugRecordingSessionImpl&) =
+ delete;
+ AudioDebugRecordingSessionImpl& operator=(
+ const AudioDebugRecordingSessionImpl&) = delete;
+
+ ~AudioDebugRecordingSessionImpl() override;
};
} // namespace media
diff --git a/chromium/media/audio/audio_debug_recording_test.h b/chromium/media/audio/audio_debug_recording_test.h
index 1ccae83206e..2c20ccc044b 100644
--- a/chromium/media/audio/audio_debug_recording_test.h
+++ b/chromium/media/audio/audio_debug_recording_test.h
@@ -21,6 +21,10 @@ class MockAudioManager;
class AudioDebugRecordingTest : public testing::Test {
public:
AudioDebugRecordingTest();
+
+ AudioDebugRecordingTest(const AudioDebugRecordingTest&) = delete;
+ AudioDebugRecordingTest& operator=(const AudioDebugRecordingTest&) = delete;
+
~AudioDebugRecordingTest() override;
protected:
@@ -31,9 +35,6 @@ class AudioDebugRecordingTest : public testing::Test {
base::test::TaskEnvironment task_environment_;
std::unique_ptr<MockAudioManager> mock_audio_manager_;
MockAudioDebugRecordingManager* mock_debug_recording_manager_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioDebugRecordingTest);
};
} // namespace media
diff --git a/chromium/media/audio/audio_encoders_unittest.cc b/chromium/media/audio/audio_encoders_unittest.cc
index d81a6729c5e..0be36ca4731 100644
--- a/chromium/media/audio/audio_encoders_unittest.cc
+++ b/chromium/media/audio/audio_encoders_unittest.cc
@@ -28,8 +28,7 @@ constexpr int kAudioSampleRate = 48000;
// This is the preferred opus buffer duration (60 ms), which corresponds to a
// value of 2880 frames per buffer (|kOpusFramesPerBuffer|).
-constexpr base::TimeDelta kOpusBufferDuration =
- base::TimeDelta::FromMilliseconds(60);
+constexpr base::TimeDelta kOpusBufferDuration = base::Milliseconds(60);
constexpr int kOpusFramesPerBuffer = kOpusBufferDuration.InMicroseconds() *
kAudioSampleRate /
base::Time::kMicrosecondsPerSecond;
@@ -130,7 +129,7 @@ class AudioEncodersTest : public ::testing::TestWithParam<TestAudioParams> {
// by ProduceAudioAndEncode().
std::unique_ptr<AudioBus> current_audio_bus_;
- base::TimeDelta buffer_duration_ = base::TimeDelta::FromMilliseconds(10);
+ base::TimeDelta buffer_duration_ = base::Milliseconds(10);
};
TEST_P(AudioEncodersTest, OpusTimestamps) {
@@ -169,7 +168,7 @@ TEST_P(AudioEncodersTest, OpusTimestamps) {
current_timestamp = base::TimeTicks();
for (auto& ts : timestamps) {
auto drift = (current_timestamp - ts).magnitude();
- EXPECT_LE(drift, base::TimeDelta::FromMicroseconds(1));
+ EXPECT_LE(drift, base::Microseconds(1));
current_timestamp += kOpusBufferDuration;
}
}
@@ -216,7 +215,7 @@ TEST_P(AudioEncodersTest, OpusExtraData) {
// 2. timestamps of buffers coming immediately after Flush() calls.
TEST_P(AudioEncodersTest, OpusTimeContinuityBreak) {
base::TimeTicks current_timestamp = base::TimeTicks::Now();
- base::TimeDelta gap = base::TimeDelta::FromMicroseconds(1500);
+ base::TimeDelta gap = base::Microseconds(1500);
buffer_duration_ = kOpusBufferDuration;
std::vector<base::TimeTicks> timestamps;
diff --git a/chromium/media/audio/audio_input_device.cc b/chromium/media/audio/audio_input_device.cc
index 803c1430101..8b238c3413d 100644
--- a/chromium/media/audio/audio_input_device.cc
+++ b/chromium/media/audio/audio_input_device.cc
@@ -70,6 +70,10 @@ class AudioInputDevice::AudioThreadCallback
bool enable_uma,
CaptureCallback* capture_callback,
base::RepeatingClosure got_data_callback);
+
+ AudioThreadCallback(const AudioThreadCallback&) = delete;
+ AudioThreadCallback& operator=(const AudioThreadCallback&) = delete;
+
~AudioThreadCallback() override;
void MapSharedMemory() override;
@@ -94,8 +98,6 @@ class AudioInputDevice::AudioThreadCallback
const int got_data_callback_interval_in_frames_;
int frames_since_last_got_data_callback_;
base::RepeatingClosure got_data_callback_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioThreadCallback);
};
AudioInputDevice::AudioInputDevice(std::unique_ptr<AudioInputIPC> ipc,
@@ -260,11 +262,11 @@ void AudioInputDevice::OnStreamCreated(
const bool stop_at_first_alive_notification = false;
const bool pause_check_during_suspend = true;
#endif
- alive_checker_ = std::make_unique<AliveChecker>(
- base::BindRepeating(&AudioInputDevice::DetectedDeadInputStream, this),
- base::TimeDelta::FromSeconds(kCheckMissingCallbacksIntervalSeconds),
- base::TimeDelta::FromSeconds(kMissingCallbacksTimeBeforeErrorSeconds),
- stop_at_first_alive_notification, pause_check_during_suspend);
+ alive_checker_ = std::make_unique<AliveChecker>(
+ base::BindRepeating(&AudioInputDevice::DetectedDeadInputStream, this),
+ base::Seconds(kCheckMissingCallbacksIntervalSeconds),
+ base::Seconds(kMissingCallbacksTimeBeforeErrorSeconds),
+ stop_at_first_alive_notification, pause_check_during_suspend);
}
// Unretained is safe since |alive_checker_| outlives |audio_callback_|.
@@ -272,7 +274,7 @@ void AudioInputDevice::OnStreamCreated(
alive_checker_
? base::BindRepeating(&AliveChecker::NotifyAlive,
base::Unretained(alive_checker_.get()))
- : base::DoNothing::Repeatedly();
+ : base::DoNothing();
audio_callback_ = std::make_unique<AudioInputDevice::AudioThreadCallback>(
audio_parameters_, std::move(shared_memory_region),
@@ -462,8 +464,7 @@ void AudioInputDevice::AudioThreadCallback::Process(uint32_t pending_data) {
// the audio delay measurement.
// TODO(olka, tommi): Take advantage of |capture_time| in the renderer.
const base::TimeTicks capture_time =
- base::TimeTicks() +
- base::TimeDelta::FromMicroseconds(buffer->params.capture_time_us);
+ base::TimeTicks() + base::Microseconds(buffer->params.capture_time_us);
const base::TimeTicks now_time = base::TimeTicks::Now();
DCHECK_GE(now_time, capture_time);
diff --git a/chromium/media/audio/audio_input_stream_data_interceptor.h b/chromium/media/audio/audio_input_stream_data_interceptor.h
index 6794267c840..3ba937c1d18 100644
--- a/chromium/media/audio/audio_input_stream_data_interceptor.h
+++ b/chromium/media/audio/audio_input_stream_data_interceptor.h
@@ -33,6 +33,11 @@ class MEDIA_EXPORT AudioInputStreamDataInterceptor
CreateDebugRecorderCB create_debug_recorder_cb,
AudioInputStream* stream);
+ AudioInputStreamDataInterceptor(const AudioInputStreamDataInterceptor&) =
+ delete;
+ AudioInputStreamDataInterceptor& operator=(
+ const AudioInputStreamDataInterceptor&) = delete;
+
~AudioInputStreamDataInterceptor() override;
// Implementation of AudioInputStream.
@@ -61,8 +66,6 @@ class MEDIA_EXPORT AudioInputStreamDataInterceptor
AudioInputStream* const stream_;
AudioInputStream::AudioInputCallback* callback_;
SEQUENCE_CHECKER(sequence_checker_);
-
- DISALLOW_COPY_AND_ASSIGN(AudioInputStreamDataInterceptor);
};
} // namespace media
diff --git a/chromium/media/audio/audio_input_unittest.cc b/chromium/media/audio/audio_input_unittest.cc
index cd9195987c6..8756f4449a9 100644
--- a/chromium/media/audio/audio_input_unittest.cc
+++ b/chromium/media/audio/audio_input_unittest.cc
@@ -78,6 +78,9 @@ class AudioInputTest : public testing::Test {
base::RunLoop().RunUntilIdle();
}
+ AudioInputTest(const AudioInputTest&) = delete;
+ AudioInputTest& operator=(const AudioInputTest&) = delete;
+
~AudioInputTest() override { audio_manager_->Shutdown(); }
protected:
@@ -188,9 +191,6 @@ class AudioInputTest : public testing::Test {
base::TestMessageLoop message_loop_;
std::unique_ptr<AudioManager> audio_manager_;
AudioInputStream* audio_input_stream_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioInputTest);
};
// Test create and close of an AudioInputStream without recording audio.
diff --git a/chromium/media/audio/audio_manager.cc b/chromium/media/audio/audio_manager.cc
index f04c67116c0..0c33df6aaaf 100644
--- a/chromium/media/audio/audio_manager.cc
+++ b/chromium/media/audio/audio_manager.cc
@@ -36,6 +36,10 @@ AudioManager* g_last_created = nullptr;
class AudioManagerHelper {
public:
AudioManagerHelper() = default;
+
+ AudioManagerHelper(const AudioManagerHelper&) = delete;
+ AudioManagerHelper& operator=(const AudioManagerHelper&) = delete;
+
~AudioManagerHelper() = default;
AudioLogFactory* fake_log_factory() { return &fake_log_factory_; }
@@ -59,8 +63,6 @@ class AudioManagerHelper {
#endif
std::string app_name_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioManagerHelper);
};
AudioManagerHelper* GetHelper() {
diff --git a/chromium/media/audio/audio_manager.h b/chromium/media/audio/audio_manager.h
index 508775f23f9..bb2fc8b3822 100644
--- a/chromium/media/audio/audio_manager.h
+++ b/chromium/media/audio/audio_manager.h
@@ -36,6 +36,9 @@ class AudioSourceDiverter;
// the need to provide iterators over the existing streams.
class MEDIA_EXPORT AudioManager {
public:
+ AudioManager(const AudioManager&) = delete;
+ AudioManager& operator=(const AudioManager&) = delete;
+
virtual ~AudioManager();
// Construct the audio manager; only one instance is allowed.
@@ -274,7 +277,6 @@ class MEDIA_EXPORT AudioManager {
RemoveDiverterCallback remove_diverter_callback_;
THREAD_CHECKER(thread_checker_);
- DISALLOW_COPY_AND_ASSIGN(AudioManager);
};
} // namespace media
diff --git a/chromium/media/audio/audio_manager_base.cc b/chromium/media/audio/audio_manager_base.cc
index f7d31c9c3ae..5400e094bb5 100644
--- a/chromium/media/audio/audio_manager_base.cc
+++ b/chromium/media/audio/audio_manager_base.cc
@@ -428,8 +428,7 @@ AudioOutputStream* AudioManagerBase::MakeAudioOutputStreamProxy(
if (it != output_dispatchers_.end())
return (*it)->dispatcher->CreateStreamProxy();
- const base::TimeDelta kCloseDelay =
- base::TimeDelta::FromSeconds(kStreamCloseDelaySeconds);
+ const base::TimeDelta kCloseDelay = base::Seconds(kStreamCloseDelaySeconds);
std::unique_ptr<AudioOutputDispatcher> dispatcher;
if (output_params.format() != AudioParameters::AUDIO_FAKE &&
!output_params.IsBitstreamFormat()) {
diff --git a/chromium/media/audio/audio_manager_base.h b/chromium/media/audio/audio_manager_base.h
index 32546d29182..f1e36cab95f 100644
--- a/chromium/media/audio/audio_manager_base.h
+++ b/chromium/media/audio/audio_manager_base.h
@@ -12,6 +12,7 @@
#include <vector>
#include "base/compiler_specific.h"
+#include "base/gtest_prod_util.h"
#include "base/macros.h"
#include "base/observer_list.h"
#include "base/single_thread_task_runner.h"
@@ -35,6 +36,9 @@ class MEDIA_EXPORT AudioManagerBase : public AudioManager {
public:
enum class VoiceProcessingMode { kDisabled = 0, kEnabled = 1 };
+ AudioManagerBase(const AudioManagerBase&) = delete;
+ AudioManagerBase& operator=(const AudioManagerBase&) = delete;
+
~AudioManagerBase() override;
AudioOutputStream* MakeAudioOutputStream(
@@ -211,8 +215,6 @@ class MEDIA_EXPORT AudioManagerBase : public AudioManager {
// Debug recording manager.
std::unique_ptr<AudioDebugRecordingManager> debug_recording_manager_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioManagerBase);
};
} // namespace media
diff --git a/chromium/media/audio/audio_manager_unittest.cc b/chromium/media/audio/audio_manager_unittest.cc
index c4ced09ce28..fdf54f8ef35 100644
--- a/chromium/media/audio/audio_manager_unittest.cc
+++ b/chromium/media/audio/audio_manager_unittest.cc
@@ -997,6 +997,9 @@ class TestAudioSourceCallback : public AudioOutputStream::AudioSourceCallback {
: expected_frames_per_buffer_(expected_frames_per_buffer),
event_(event) {}
+ TestAudioSourceCallback(const TestAudioSourceCallback&) = delete;
+ TestAudioSourceCallback& operator=(const TestAudioSourceCallback&) = delete;
+
~TestAudioSourceCallback() override {}
int OnMoreData(base::TimeDelta,
@@ -1013,8 +1016,6 @@ class TestAudioSourceCallback : public AudioOutputStream::AudioSourceCallback {
private:
const int expected_frames_per_buffer_;
base::WaitableEvent* event_;
-
- DISALLOW_COPY_AND_ASSIGN(TestAudioSourceCallback);
};
// Test that we can create an AudioOutputStream with kMinAudioBufferSize and
diff --git a/chromium/media/audio/audio_output_device_thread_callback.cc b/chromium/media/audio/audio_output_device_thread_callback.cc
index c6f8b16469b..d64d1c151a7 100644
--- a/chromium/media/audio/audio_output_device_thread_callback.cc
+++ b/chromium/media/audio/audio_output_device_thread_callback.cc
@@ -57,12 +57,10 @@ void AudioOutputDeviceThreadCallback::Process(uint32_t control_signal) {
"callback_num", callback_num_, "frames skipped",
frames_skipped);
- base::TimeDelta delay =
- base::TimeDelta::FromMicroseconds(buffer->params.delay_us);
+ base::TimeDelta delay = base::Microseconds(buffer->params.delay_us);
base::TimeTicks delay_timestamp =
- base::TimeTicks() +
- base::TimeDelta::FromMicroseconds(buffer->params.delay_timestamp_us);
+ base::TimeTicks() + base::Microseconds(buffer->params.delay_timestamp_us);
DVLOG(4) << __func__ << " delay:" << delay << " delay_timestamp:" << delay
<< " frames_skipped:" << frames_skipped;
diff --git a/chromium/media/audio/audio_output_device_thread_callback.h b/chromium/media/audio/audio_output_device_thread_callback.h
index 603dc399cbf..988187dfdd1 100644
--- a/chromium/media/audio/audio_output_device_thread_callback.h
+++ b/chromium/media/audio/audio_output_device_thread_callback.h
@@ -23,6 +23,12 @@ class MEDIA_EXPORT AudioOutputDeviceThreadCallback
const media::AudioParameters& audio_parameters,
base::UnsafeSharedMemoryRegion shared_memory_region,
media::AudioRendererSink::RenderCallback* render_callback);
+
+ AudioOutputDeviceThreadCallback(const AudioOutputDeviceThreadCallback&) =
+ delete;
+ AudioOutputDeviceThreadCallback& operator=(
+ const AudioOutputDeviceThreadCallback&) = delete;
+
~AudioOutputDeviceThreadCallback() override;
void MapSharedMemory() override;
@@ -46,8 +52,6 @@ class MEDIA_EXPORT AudioOutputDeviceThreadCallback
media::AudioRendererSink::RenderCallback* render_callback_;
std::unique_ptr<media::AudioBus> output_bus_;
uint64_t callback_num_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioOutputDeviceThreadCallback);
};
} // namespace media
diff --git a/chromium/media/audio/audio_output_device_unittest.cc b/chromium/media/audio/audio_output_device_unittest.cc
index 768156d41ce..0f7db33e75c 100644
--- a/chromium/media/audio/audio_output_device_unittest.cc
+++ b/chromium/media/audio/audio_output_device_unittest.cc
@@ -46,8 +46,7 @@ namespace {
constexpr char kDefaultDeviceId[] = "";
constexpr char kNonDefaultDeviceId[] = "valid-nondefault-device-id";
constexpr char kUnauthorizedDeviceId[] = "unauthorized-device-id";
-constexpr base::TimeDelta kAuthTimeout =
- base::TimeDelta::FromMilliseconds(10000);
+constexpr base::TimeDelta kAuthTimeout = base::Milliseconds(10000);
class MockRenderCallback : public AudioRendererSink::RenderCallback {
public:
@@ -88,6 +87,10 @@ class MockAudioOutputIPC : public AudioOutputIPC {
class AudioOutputDeviceTest : public testing::Test {
public:
AudioOutputDeviceTest();
+
+ AudioOutputDeviceTest(const AudioOutputDeviceTest&) = delete;
+ AudioOutputDeviceTest& operator=(const AudioOutputDeviceTest&) = delete;
+
~AudioOutputDeviceTest() override;
void ReceiveAuthorization(OutputDeviceStatus device_status);
@@ -117,8 +120,6 @@ class AudioOutputDeviceTest : public testing::Test {
WritableSharedMemoryMapping shared_memory_mapping_;
CancelableSyncSocket browser_socket_;
CancelableSyncSocket renderer_socket_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioOutputDeviceTest);
};
AudioOutputDeviceTest::AudioOutputDeviceTest()
diff --git a/chromium/media/audio/audio_output_dispatcher.h b/chromium/media/audio/audio_output_dispatcher.h
index aa79817b8a9..a6aaaf8ca55 100644
--- a/chromium/media/audio/audio_output_dispatcher.h
+++ b/chromium/media/audio/audio_output_dispatcher.h
@@ -29,6 +29,10 @@ class AudioOutputProxy;
class MEDIA_EXPORT AudioOutputDispatcher {
public:
AudioOutputDispatcher(AudioManager* audio_manager);
+
+ AudioOutputDispatcher(const AudioOutputDispatcher&) = delete;
+ AudioOutputDispatcher& operator=(const AudioOutputDispatcher&) = delete;
+
virtual ~AudioOutputDispatcher();
// Creates an instance of AudioOutputProxy, which uses |this| as dispatcher.
@@ -69,8 +73,6 @@ class MEDIA_EXPORT AudioOutputDispatcher {
// A no-reference-held pointer (we don't want circular references) back to the
// AudioManager that owns this object.
AudioManager* const audio_manager_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioOutputDispatcher);
};
} // namespace media
diff --git a/chromium/media/audio/audio_output_dispatcher_impl.h b/chromium/media/audio/audio_output_dispatcher_impl.h
index 01b200d3a75..6d0f6ea5f61 100644
--- a/chromium/media/audio/audio_output_dispatcher_impl.h
+++ b/chromium/media/audio/audio_output_dispatcher_impl.h
@@ -40,6 +40,11 @@ class MEDIA_EXPORT AudioOutputDispatcherImpl
const AudioParameters& params,
const std::string& output_device_id,
base::TimeDelta close_delay);
+
+ AudioOutputDispatcherImpl(const AudioOutputDispatcherImpl&) = delete;
+ AudioOutputDispatcherImpl& operator=(const AudioOutputDispatcherImpl&) =
+ delete;
+
~AudioOutputDispatcherImpl() override;
// AudioOutputDispatcher implementation.
@@ -95,7 +100,6 @@ class MEDIA_EXPORT AudioOutputDispatcherImpl
int audio_stream_id_;
base::WeakPtrFactory<AudioOutputDispatcherImpl> weak_factory_{this};
- DISALLOW_COPY_AND_ASSIGN(AudioOutputDispatcherImpl);
};
} // namespace media
diff --git a/chromium/media/audio/audio_output_proxy_unittest.cc b/chromium/media/audio/audio_output_proxy_unittest.cc
index b3cc8e1b1c4..2ec7ae3a0f7 100644
--- a/chromium/media/audio/audio_output_proxy_unittest.cc
+++ b/chromium/media/audio/audio_output_proxy_unittest.cc
@@ -174,7 +174,7 @@ class AudioOutputProxyTest : public testing::Test {
// RunUntilIdle() will never terminate.
params_ = AudioParameters(AudioParameters::AUDIO_PCM_LINEAR,
CHANNEL_LAYOUT_STEREO, 8000, 2048);
- InitDispatcher(base::TimeDelta::FromMilliseconds(kTestCloseDelayMs));
+ InitDispatcher(base::Milliseconds(kTestCloseDelayMs));
}
void TearDown() override {
@@ -512,8 +512,7 @@ class AudioOutputResamplerTest : public AudioOutputProxyTest {
// Let Start() run for a bit.
base::RunLoop run_loop;
task_environment_.GetMainThreadTaskRunner()->PostDelayedTask(
- FROM_HERE, run_loop.QuitClosure(),
- base::TimeDelta::FromMilliseconds(kStartRunTimeMs));
+ FROM_HERE, run_loop.QuitClosure(), base::Milliseconds(kStartRunTimeMs));
run_loop.Run();
}
@@ -642,7 +641,7 @@ TEST_F(AudioOutputResamplerTest, DispatcherDestroyed_AfterStop) {
TEST_F(AudioOutputProxyTest, DispatcherDeviceChangeClosesIdleStreams) {
// Set close delay so long that it triggers a test timeout if relied upon.
- InitDispatcher(base::TimeDelta::FromSeconds(1000));
+ InitDispatcher(base::Seconds(1000));
MockAudioOutputStream stream(&manager_, params_);
@@ -852,7 +851,7 @@ TEST_F(AudioOutputResamplerTest, FallbackRecovery) {
base::RunLoop run_loop;
task_environment_.GetMainThreadTaskRunner()->PostDelayedTask(
FROM_HERE, run_loop.QuitClosure(),
- base::TimeDelta::FromMilliseconds(2 * kTestCloseDelayMs));
+ base::Milliseconds(2 * kTestCloseDelayMs));
run_loop.Run();
// Verify a non-fake stream can be created.
diff --git a/chromium/media/audio/audio_output_resampler.cc b/chromium/media/audio/audio_output_resampler.cc
index ffcece8d70c..8b75d9822cd 100644
--- a/chromium/media/audio/audio_output_resampler.cc
+++ b/chromium/media/audio/audio_output_resampler.cc
@@ -38,6 +38,10 @@ class OnMoreDataConverter
OnMoreDataConverter(const AudioParameters& input_params,
const AudioParameters& output_params,
std::unique_ptr<AudioDebugRecorder> debug_recorder);
+
+ OnMoreDataConverter(const OnMoreDataConverter&) = delete;
+ OnMoreDataConverter& operator=(const OnMoreDataConverter&) = delete;
+
~OnMoreDataConverter() override;
// AudioSourceCallback interface.
@@ -86,8 +90,6 @@ class OnMoreDataConverter
// For audio debug recordings.
std::unique_ptr<AudioDebugRecorder> debug_recorder_;
-
- DISALLOW_COPY_AND_ASSIGN(OnMoreDataConverter);
};
namespace {
diff --git a/chromium/media/audio/audio_output_resampler.h b/chromium/media/audio/audio_output_resampler.h
index dc29d9bd8a7..dc79475c789 100644
--- a/chromium/media/audio/audio_output_resampler.h
+++ b/chromium/media/audio/audio_output_resampler.h
@@ -44,6 +44,10 @@ class MEDIA_EXPORT AudioOutputResampler : public AudioOutputDispatcher {
base::TimeDelta close_delay,
const RegisterDebugRecordingSourceCallback&
register_debug_recording_source_callback);
+
+ AudioOutputResampler(const AudioOutputResampler&) = delete;
+ AudioOutputResampler& operator=(const AudioOutputResampler&) = delete;
+
~AudioOutputResampler() override;
// AudioOutputDispatcher interface.
@@ -106,7 +110,6 @@ class MEDIA_EXPORT AudioOutputResampler : public AudioOutputDispatcher {
register_debug_recording_source_callback_;
base::WeakPtrFactory<AudioOutputResampler> weak_factory_{this};
- DISALLOW_COPY_AND_ASSIGN(AudioOutputResampler);
};
} // namespace media
diff --git a/chromium/media/audio/audio_system_helper.h b/chromium/media/audio/audio_system_helper.h
index 6bd62161c78..30897d983bf 100644
--- a/chromium/media/audio/audio_system_helper.h
+++ b/chromium/media/audio/audio_system_helper.h
@@ -18,6 +18,10 @@ class AudioManager;
class MEDIA_EXPORT AudioSystemHelper {
public:
AudioSystemHelper(AudioManager* audio_manager);
+
+ AudioSystemHelper(const AudioSystemHelper&) = delete;
+ AudioSystemHelper& operator=(const AudioSystemHelper&) = delete;
+
~AudioSystemHelper();
void GetInputStreamParameters(
@@ -51,8 +55,6 @@ class MEDIA_EXPORT AudioSystemHelper {
const std::string& device_id);
AudioManager* const audio_manager_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioSystemHelper);
};
} // namespace media
diff --git a/chromium/media/audio/audio_system_test_util.h b/chromium/media/audio/audio_system_test_util.h
index 20edd161fd4..b14c1ccd05d 100644
--- a/chromium/media/audio/audio_system_test_util.h
+++ b/chromium/media/audio/audio_system_test_util.h
@@ -93,6 +93,10 @@ template <class T>
class AudioSystemTestTemplate : public T {
public:
AudioSystemTestTemplate() {}
+
+ AudioSystemTestTemplate(const AudioSystemTestTemplate&) = delete;
+ AudioSystemTestTemplate& operator=(const AudioSystemTestTemplate&) = delete;
+
~AudioSystemTestTemplate() override {}
void SetUp() override {
@@ -136,9 +140,6 @@ class AudioSystemTestTemplate : public T {
AudioParameters default_output_params_;
AudioDeviceDescriptions input_device_descriptions_;
AudioDeviceDescriptions output_device_descriptions_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioSystemTestTemplate);
};
TYPED_TEST_SUITE_P(AudioSystemTestTemplate);
diff --git a/chromium/media/audio/audio_thread_hang_monitor.cc b/chromium/media/audio/audio_thread_hang_monitor.cc
index 6131b53cb65..f69a79b483f 100644
--- a/chromium/media/audio/audio_thread_hang_monitor.cc
+++ b/chromium/media/audio/audio_thread_hang_monitor.cc
@@ -33,8 +33,7 @@ namespace {
constexpr int kMaxFailedPingsCount = 3;
// The default deadline after which we consider the audio thread hung.
-constexpr base::TimeDelta kDefaultHangDeadline =
- base::TimeDelta::FromMinutes(3);
+constexpr base::TimeDelta kDefaultHangDeadline = base::Minutes(3);
} // namespace
@@ -125,7 +124,7 @@ void AudioThreadHangMonitor::CheckIfAudioThreadIsAlive() {
// An unexpected |time_since_last_check| may indicate that the system has been
// in sleep mode, in which case the audio thread may have had insufficient
// time to respond to the ping. In such a case, skip the check for now.
- if (time_since_last_check > ping_interval_ + base::TimeDelta::FromSeconds(1))
+ if (time_since_last_check > ping_interval_ + base::Seconds(1))
return;
const bool audio_thread_responded_to_last_ping = alive_flag_->flag_;
diff --git a/chromium/media/audio/audio_thread_hang_monitor.h b/chromium/media/audio/audio_thread_hang_monitor.h
index 014ac272b66..f625545468f 100644
--- a/chromium/media/audio/audio_thread_hang_monitor.h
+++ b/chromium/media/audio/audio_thread_hang_monitor.h
@@ -73,6 +73,9 @@ class MEDIA_EXPORT AudioThreadHangMonitor final {
scoped_refptr<base::SingleThreadTaskRunner> audio_thread_task_runner,
scoped_refptr<base::SequencedTaskRunner> monitor_task_runner = nullptr);
+ AudioThreadHangMonitor(const AudioThreadHangMonitor&) = delete;
+ AudioThreadHangMonitor& operator=(const AudioThreadHangMonitor&) = delete;
+
~AudioThreadHangMonitor();
// Thread-safe.
@@ -162,8 +165,6 @@ class MEDIA_EXPORT AudioThreadHangMonitor final {
// successive successful pings. If the most recent ping was failed, the number
// is the negative of the number of successive failed pings.
int recent_ping_state_ = 0;
-
- DISALLOW_COPY_AND_ASSIGN(AudioThreadHangMonitor);
};
} // namespace media
diff --git a/chromium/media/audio/audio_thread_hang_monitor_unittest.cc b/chromium/media/audio/audio_thread_hang_monitor_unittest.cc
index 5ed9585f2e9..0412cfb2483 100644
--- a/chromium/media/audio/audio_thread_hang_monitor_unittest.cc
+++ b/chromium/media/audio/audio_thread_hang_monitor_unittest.cc
@@ -30,8 +30,8 @@ constexpr int kHung =
constexpr int kRecovered =
static_cast<int>(AudioThreadHangMonitor::ThreadStatus::kRecovered);
-constexpr base::TimeDelta kShortHangDeadline = base::TimeDelta::FromSeconds(5);
-constexpr base::TimeDelta kLongHangDeadline = base::TimeDelta::FromMinutes(30);
+constexpr base::TimeDelta kShortHangDeadline = base::Seconds(5);
+constexpr base::TimeDelta kLongHangDeadline = base::Minutes(30);
} // namespace
@@ -107,7 +107,7 @@ TEST_F(AudioThreadHangMonitorTest, DoesNotLogThreadHungWhenOk) {
// Flush the audio thread, then advance the clock. The audio thread should
// register as "alive" every time.
FlushAudioThread();
- task_env_.FastForwardBy(base::TimeDelta::FromMinutes(1));
+ task_env_.FastForwardBy(base::Minutes(1));
}
EXPECT_THAT(histograms_.GetAllSamples("Media.AudioThreadStatus"),
@@ -118,7 +118,7 @@ TEST_F(AudioThreadHangMonitorTest, LogsHungWhenAudioThreadIsBlocked) {
RunUntilIdle();
BlockAudioThreadUntilEvent();
- task_env_.FastForwardBy(base::TimeDelta::FromMinutes(10));
+ task_env_.FastForwardBy(base::Minutes(10));
event_.Signal();
EXPECT_THAT(histograms_.GetAllSamples("Media.AudioThreadStatus"),
@@ -200,14 +200,14 @@ TEST_F(AudioThreadHangMonitorTest, ZeroDeadlineMeansDefaultDeadline) {
// Flush the audio thread, then advance the clock. The audio thread should
// register as "alive" every time.
FlushAudioThread();
- task_env_.FastForwardBy(base::TimeDelta::FromMinutes(1));
+ task_env_.FastForwardBy(base::Minutes(1));
}
EXPECT_THAT(histograms_.GetAllSamples("Media.AudioThreadStatus"),
ElementsAre(base::Bucket(kStarted, 2)));
BlockAudioThreadUntilEvent();
- task_env_.FastForwardBy(base::TimeDelta::FromMinutes(10));
+ task_env_.FastForwardBy(base::Minutes(10));
event_.Signal();
EXPECT_THAT(histograms_.GetAllSamples("Media.AudioThreadStatus"),
@@ -219,14 +219,14 @@ TEST_F(AudioThreadHangMonitorTest,
RunUntilIdle();
BlockAudioThreadUntilEvent();
- task_env_.FastForwardBy(base::TimeDelta::FromMinutes(10));
+ task_env_.FastForwardBy(base::Minutes(10));
event_.Signal();
for (int i = 0; i < 10; ++i) {
// Flush the audio thread, then advance the clock. The audio thread should
// register as "alive" every time.
FlushAudioThread();
- task_env_.FastForwardBy(base::TimeDelta::FromMinutes(1));
+ task_env_.FastForwardBy(base::Minutes(1));
}
EXPECT_THAT(histograms_.GetAllSamples("Media.AudioThreadStatus"),
@@ -242,7 +242,7 @@ TEST_F(AudioThreadHangMonitorTest, NoHangActionWhenOk) {
// Flush the audio thread, then advance the clock. The audio thread should
// register as "alive" every time.
FlushAudioThread();
- task_env_.FastForwardBy(base::TimeDelta::FromMinutes(1));
+ task_env_.FastForwardBy(base::Minutes(1));
}
EXPECT_THAT(histograms_.GetAllSamples("Media.AudioThreadStatus"),
@@ -259,7 +259,7 @@ TEST_F(AudioThreadHangMonitorTest, DumpsWhenAudioThreadIsBlocked) {
EXPECT_CALL(*this, HangActionDump).Times(1);
BlockAudioThreadUntilEvent();
- task_env_.FastForwardBy(base::TimeDelta::FromMinutes(10));
+ task_env_.FastForwardBy(base::Minutes(10));
event_.Signal();
EXPECT_THAT(histograms_.GetAllSamples("Media.AudioThreadStatus"),
@@ -277,7 +277,7 @@ TEST_F(AudioThreadHangMonitorTest, TerminatesProcessWhenAudioThreadIsBlocked) {
EXPECT_CALL(*this, HangActionTerminate).Times(1);
BlockAudioThreadUntilEvent();
- task_env_.FastForwardBy(base::TimeDelta::FromMinutes(10));
+ task_env_.FastForwardBy(base::Minutes(10));
event_.Signal();
EXPECT_THAT(histograms_.GetAllSamples("Media.AudioThreadStatus"),
@@ -297,7 +297,7 @@ TEST_F(AudioThreadHangMonitorTest,
EXPECT_CALL(*this, HangActionTerminate).Times(1);
BlockAudioThreadUntilEvent();
- task_env_.FastForwardBy(base::TimeDelta::FromMinutes(10));
+ task_env_.FastForwardBy(base::Minutes(10));
event_.Signal();
EXPECT_THAT(histograms_.GetAllSamples("Media.AudioThreadStatus"),
diff --git a/chromium/media/audio/audio_thread_impl.h b/chromium/media/audio/audio_thread_impl.h
index 70100352440..a63a40ff1af 100644
--- a/chromium/media/audio/audio_thread_impl.h
+++ b/chromium/media/audio/audio_thread_impl.h
@@ -16,6 +16,10 @@ namespace media {
class MEDIA_EXPORT AudioThreadImpl final : public AudioThread {
public:
AudioThreadImpl();
+
+ AudioThreadImpl(const AudioThreadImpl&) = delete;
+ AudioThreadImpl& operator=(const AudioThreadImpl&) = delete;
+
~AudioThreadImpl() final;
// AudioThread implementation.
@@ -33,7 +37,6 @@ class MEDIA_EXPORT AudioThreadImpl final : public AudioThread {
AudioThreadHangMonitor::Ptr hang_monitor_;
THREAD_CHECKER(thread_checker_);
- DISALLOW_COPY_AND_ASSIGN(AudioThreadImpl);
};
} // namespace media
diff --git a/chromium/media/audio/cras/audio_manager_chromeos.h b/chromium/media/audio/cras/audio_manager_chromeos.h
index 3d38a3eb89f..100664422bd 100644
--- a/chromium/media/audio/cras/audio_manager_chromeos.h
+++ b/chromium/media/audio/cras/audio_manager_chromeos.h
@@ -25,6 +25,10 @@ class MEDIA_EXPORT AudioManagerChromeOS : public AudioManagerCrasBase {
public:
AudioManagerChromeOS(std::unique_ptr<AudioThread> audio_thread,
AudioLogFactory* audio_log_factory);
+
+ AudioManagerChromeOS(const AudioManagerChromeOS&) = delete;
+ AudioManagerChromeOS& operator=(const AudioManagerChromeOS&) = delete;
+
~AudioManagerChromeOS() override;
// AudioManager implementation.
@@ -108,8 +112,6 @@ class MEDIA_EXPORT AudioManagerChromeOS : public AudioManagerCrasBase {
base::WeakPtr<AudioManagerChromeOS> weak_this_;
base::WeakPtrFactory<AudioManagerChromeOS> weak_ptr_factory_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioManagerChromeOS);
};
} // namespace media
diff --git a/chromium/media/audio/cras/audio_manager_cras.h b/chromium/media/audio/cras/audio_manager_cras.h
index 448f0a25312..ffd8656009d 100644
--- a/chromium/media/audio/cras/audio_manager_cras.h
+++ b/chromium/media/audio/cras/audio_manager_cras.h
@@ -22,6 +22,10 @@ class MEDIA_EXPORT AudioManagerCras : public AudioManagerCrasBase {
public:
AudioManagerCras(std::unique_ptr<AudioThread> audio_thread,
AudioLogFactory* audio_log_factory);
+
+ AudioManagerCras(const AudioManagerCras&) = delete;
+ AudioManagerCras& operator=(const AudioManagerCras&) = delete;
+
~AudioManagerCras() override;
// AudioManager implementation.
@@ -65,8 +69,6 @@ class MEDIA_EXPORT AudioManagerCras : public AudioManagerCrasBase {
base::WeakPtr<AudioManagerCras> weak_this_;
base::WeakPtrFactory<AudioManagerCras> weak_ptr_factory_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioManagerCras);
};
} // namespace media
diff --git a/chromium/media/audio/cras/audio_manager_cras_base.h b/chromium/media/audio/cras/audio_manager_cras_base.h
index f9c70233525..4f6762631c9 100644
--- a/chromium/media/audio/cras/audio_manager_cras_base.h
+++ b/chromium/media/audio/cras/audio_manager_cras_base.h
@@ -22,6 +22,10 @@ class MEDIA_EXPORT AudioManagerCrasBase : public AudioManagerBase {
public:
AudioManagerCrasBase(std::unique_ptr<AudioThread> audio_thread,
AudioLogFactory* audio_log_factory);
+
+ AudioManagerCrasBase(const AudioManagerCrasBase&) = delete;
+ AudioManagerCrasBase& operator=(const AudioManagerCrasBase&) = delete;
+
~AudioManagerCrasBase() override;
// AudioManager implementation.
@@ -59,9 +63,6 @@ class MEDIA_EXPORT AudioManagerCrasBase : public AudioManagerBase {
// Called by MakeLinearInputStream and MakeLowLatencyInputStream.
AudioInputStream* MakeInputStream(const AudioParameters& params,
const std::string& device_id);
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioManagerCrasBase);
};
} // namespace media
diff --git a/chromium/media/audio/cras/cras_input.h b/chromium/media/audio/cras/cras_input.h
index 8efe59a35c8..4acbc0ccb20 100644
--- a/chromium/media/audio/cras/cras_input.h
+++ b/chromium/media/audio/cras/cras_input.h
@@ -34,6 +34,9 @@ class MEDIA_EXPORT CrasInputStream : public AgcAudioStream<AudioInputStream> {
AudioManagerCrasBase* manager,
const std::string& device_id);
+ CrasInputStream(const CrasInputStream&) = delete;
+ CrasInputStream& operator=(const CrasInputStream&) = delete;
+
// The dtor is typically called by the AudioManager only and it is usually
// triggered by calling AudioOutputStream::Close().
~CrasInputStream() override;
@@ -121,8 +124,6 @@ class MEDIA_EXPORT CrasInputStream : public AgcAudioStream<AudioInputStream> {
double input_volume_;
std::unique_ptr<AudioBus> audio_bus_;
-
- DISALLOW_COPY_AND_ASSIGN(CrasInputStream);
};
} // namespace media
diff --git a/chromium/media/audio/cras/cras_unified.h b/chromium/media/audio/cras/cras_unified.h
index 26055a726b2..9aeca2785f7 100644
--- a/chromium/media/audio/cras/cras_unified.h
+++ b/chromium/media/audio/cras/cras_unified.h
@@ -38,6 +38,9 @@ class MEDIA_EXPORT CrasUnifiedStream : public AudioOutputStream {
AudioManagerCrasBase* manager,
const std::string& device_id);
+ CrasUnifiedStream(const CrasUnifiedStream&) = delete;
+ CrasUnifiedStream& operator=(const CrasUnifiedStream&) = delete;
+
// The dtor is typically called by the AudioManager only and it is usually
// triggered by calling AudioUnifiedStream::Close().
~CrasUnifiedStream() override;
@@ -98,8 +101,6 @@ class MEDIA_EXPORT CrasUnifiedStream : public AudioOutputStream {
// Index of the CRAS device to stream output to.
const int pin_device_;
-
- DISALLOW_COPY_AND_ASSIGN(CrasUnifiedStream);
};
} // namespace media
diff --git a/chromium/media/audio/fake_audio_log_factory.h b/chromium/media/audio/fake_audio_log_factory.h
index 7c8cfd8f807..9e2eff80019 100644
--- a/chromium/media/audio/fake_audio_log_factory.h
+++ b/chromium/media/audio/fake_audio_log_factory.h
@@ -18,12 +18,13 @@ namespace media {
class MEDIA_EXPORT FakeAudioLogFactory : public AudioLogFactory {
public:
FakeAudioLogFactory();
+
+ FakeAudioLogFactory(const FakeAudioLogFactory&) = delete;
+ FakeAudioLogFactory& operator=(const FakeAudioLogFactory&) = delete;
+
~FakeAudioLogFactory() override;
std::unique_ptr<AudioLog> CreateAudioLog(AudioComponent component,
int component_id) override;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(FakeAudioLogFactory);
};
} // namespace media
diff --git a/chromium/media/audio/fake_audio_manager.h b/chromium/media/audio/fake_audio_manager.h
index a40e7159ef1..1979afac641 100644
--- a/chromium/media/audio/fake_audio_manager.h
+++ b/chromium/media/audio/fake_audio_manager.h
@@ -18,6 +18,10 @@ class MEDIA_EXPORT FakeAudioManager : public AudioManagerBase {
public:
FakeAudioManager(std::unique_ptr<AudioThread> audio_thread,
AudioLogFactory* audio_log_factory);
+
+ FakeAudioManager(const FakeAudioManager&) = delete;
+ FakeAudioManager& operator=(const FakeAudioManager&) = delete;
+
~FakeAudioManager() override;
// Implementation of AudioManager.
@@ -48,9 +52,6 @@ class MEDIA_EXPORT FakeAudioManager : public AudioManagerBase {
AudioParameters GetPreferredOutputStreamParameters(
const std::string& output_device_id,
const AudioParameters& input_params) override;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(FakeAudioManager);
};
} // namespace media
diff --git a/chromium/media/audio/fuchsia/DIR_METADATA b/chromium/media/audio/fuchsia/DIR_METADATA
index abc57ac0fd5..5b3985ecc8b 100644
--- a/chromium/media/audio/fuchsia/DIR_METADATA
+++ b/chromium/media/audio/fuchsia/DIR_METADATA
@@ -6,5 +6,5 @@
# For the schema of this file, see Metadata message:
# https://source.chromium.org/chromium/infra/infra/+/main:go/src/infra/tools/dirmd/proto/dir_metadata.proto
-team_email: "cr-fuchsia@chromium.org"
+mixins: "//build/fuchsia/COMMON_METADATA"
os: FUCHSIA \ No newline at end of file
diff --git a/chromium/media/audio/fuchsia/audio_manager_fuchsia.h b/chromium/media/audio/fuchsia/audio_manager_fuchsia.h
index 7ce59254984..105aca48634 100644
--- a/chromium/media/audio/fuchsia/audio_manager_fuchsia.h
+++ b/chromium/media/audio/fuchsia/audio_manager_fuchsia.h
@@ -13,6 +13,10 @@ class AudioManagerFuchsia : public AudioManagerBase {
public:
AudioManagerFuchsia(std::unique_ptr<AudioThread> audio_thread,
AudioLogFactory* audio_log_factory);
+
+ AudioManagerFuchsia(const AudioManagerFuchsia&) = delete;
+ AudioManagerFuchsia& operator=(const AudioManagerFuchsia&) = delete;
+
~AudioManagerFuchsia() override;
// Implementation of AudioManager.
@@ -45,9 +49,6 @@ class AudioManagerFuchsia : public AudioManagerBase {
AudioParameters GetPreferredOutputStreamParameters(
const std::string& output_device_id,
const AudioParameters& input_params) override;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioManagerFuchsia);
};
} // namespace media
diff --git a/chromium/media/audio/fuchsia/audio_output_stream_fuchsia.cc b/chromium/media/audio/fuchsia/audio_output_stream_fuchsia.cc
index 4fed2f56214..d872b081d73 100644
--- a/chromium/media/audio/fuchsia/audio_output_stream_fuchsia.cc
+++ b/chromium/media/audio/fuchsia/audio_output_stream_fuchsia.cc
@@ -174,7 +174,7 @@ bool AudioOutputStreamFuchsia::InitializePayloadBuffer() {
void AudioOutputStreamFuchsia::OnMinLeadTimeChanged(int64_t min_lead_time) {
bool min_lead_time_was_unknown = !min_lead_time_.has_value();
- min_lead_time_ = base::TimeDelta::FromNanoseconds(min_lead_time);
+ min_lead_time_ = base::Nanoseconds(min_lead_time);
// When min_lead_time_ increases we may need to reallocate |payload_buffer_|.
// Code below just unmaps the current buffer. The new buffer will be allocated
diff --git a/chromium/media/audio/linux/audio_manager_linux.cc b/chromium/media/audio/linux/audio_manager_linux.cc
index def8da7dc2a..ed7a3962624 100644
--- a/chromium/media/audio/linux/audio_manager_linux.cc
+++ b/chromium/media/audio/linux/audio_manager_linux.cc
@@ -28,13 +28,6 @@
namespace media {
-enum LinuxAudioIO {
- kPulse,
- kAlsa,
- kCras,
- kAudioIOMax = kCras // Must always be equal to largest logged entry.
-};
-
std::unique_ptr<media::AudioManager> CreateAudioManager(
std::unique_ptr<AudioThread> audio_thread,
AudioLogFactory* audio_log_factory) {
@@ -47,7 +40,6 @@ std::unique_ptr<media::AudioManager> CreateAudioManager(
#if defined(USE_CRAS)
if (base::CommandLine::ForCurrentProcess()->HasSwitch(switches::kUseCras)) {
- UMA_HISTOGRAM_ENUMERATION("Media.LinuxAudioIO", kCras, kAudioIOMax + 1);
#if BUILDFLAG(IS_CHROMEOS_ASH)
return std::make_unique<AudioManagerChromeOS>(std::move(audio_thread),
audio_log_factory);
@@ -62,7 +54,6 @@ std::unique_ptr<media::AudioManager> CreateAudioManager(
pa_threaded_mainloop* pa_mainloop = nullptr;
pa_context* pa_context = nullptr;
if (pulse::InitPulse(&pa_mainloop, &pa_context)) {
- UMA_HISTOGRAM_ENUMERATION("Media.LinuxAudioIO", kPulse, kAudioIOMax + 1);
return std::make_unique<AudioManagerPulse>(
std::move(audio_thread), audio_log_factory, pa_mainloop, pa_context);
}
@@ -71,7 +62,6 @@ std::unique_ptr<media::AudioManager> CreateAudioManager(
#endif
#if defined(USE_ALSA)
- UMA_HISTOGRAM_ENUMERATION("Media.LinuxAudioIO", kAlsa, kAudioIOMax + 1);
return std::make_unique<AudioManagerAlsa>(std::move(audio_thread),
audio_log_factory);
#else
diff --git a/chromium/media/audio/mac/audio_auhal_mac.cc b/chromium/media/audio/mac/audio_auhal_mac.cc
index b7660d62638..0c7c8d3ea94 100644
--- a/chromium/media/audio/mac/audio_auhal_mac.cc
+++ b/chromium/media/audio/mac/audio_auhal_mac.cc
@@ -254,8 +254,7 @@ void AUHALStream::Start(AudioSourceCallback* callback) {
base::BindOnce(&AUHALStream::Start, base::Unretained(this), callback));
manager_->GetTaskRunner()->PostDelayedTask(
FROM_HERE, deferred_start_cb_.callback(),
- base::TimeDelta::FromSeconds(
- AudioManagerMac::kStartDelayInSecsForPowerEvents));
+ base::Seconds(AudioManagerMac::kStartDelayInSecsForPowerEvents));
return;
}
diff --git a/chromium/media/audio/mac/audio_auhal_mac.h b/chromium/media/audio/mac/audio_auhal_mac.h
index c71400cce40..376ca4655b8 100644
--- a/chromium/media/audio/mac/audio_auhal_mac.h
+++ b/chromium/media/audio/mac/audio_auhal_mac.h
@@ -79,6 +79,10 @@ class AUHALStream : public AudioOutputStream {
const AudioParameters& params,
AudioDeviceID device,
const AudioManager::LogCallback& log_callback);
+
+ AUHALStream(const AUHALStream&) = delete;
+ AUHALStream& operator=(const AUHALStream&) = delete;
+
// The dtor is typically called by the AudioManager only and it is usually
// triggered by calling AudioOutputStream::Close().
~AUHALStream() override;
@@ -210,8 +214,6 @@ class AUHALStream : public AudioOutputStream {
// Used to make sure control functions (Start(), Stop() etc) are called on the
// right thread.
base::ThreadChecker thread_checker_;
-
- DISALLOW_COPY_AND_ASSIGN(AUHALStream);
};
} // namespace media
diff --git a/chromium/media/audio/mac/audio_auhal_mac_unittest.cc b/chromium/media/audio/mac/audio_auhal_mac_unittest.cc
index 26bb36593b5..8ba6e39324d 100644
--- a/chromium/media/audio/mac/audio_auhal_mac_unittest.cc
+++ b/chromium/media/audio/mac/audio_auhal_mac_unittest.cc
@@ -47,6 +47,9 @@ class AUHALStreamTest : public testing::Test {
base::RunLoop().RunUntilIdle();
}
+ AUHALStreamTest(const AUHALStreamTest&) = delete;
+ AUHALStreamTest& operator=(const AUHALStreamTest&) = delete;
+
~AUHALStreamTest() override { manager_->Shutdown(); }
AudioOutputStream* Create() {
@@ -68,9 +71,6 @@ class AUHALStreamTest : public testing::Test {
AudioDeviceInfoAccessorForTests manager_device_info_;
MockAudioSourceCallback source_;
std::string log_message_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AUHALStreamTest);
};
TEST_F(AUHALStreamTest, HardwareSampleRate) {
diff --git a/chromium/media/audio/mac/audio_device_listener_mac.h b/chromium/media/audio/mac/audio_device_listener_mac.h
index 09c64cf08da..0c6dced3fc5 100644
--- a/chromium/media/audio/mac/audio_device_listener_mac.h
+++ b/chromium/media/audio/mac/audio_device_listener_mac.h
@@ -31,6 +31,10 @@ class MEDIA_EXPORT AudioDeviceListenerMac {
bool monitor_default_input = false,
bool monitor_addition_removal = false,
bool monitor_sources = false);
+
+ AudioDeviceListenerMac(const AudioDeviceListenerMac&) = delete;
+ AudioDeviceListenerMac& operator=(const AudioDeviceListenerMac&) = delete;
+
~AudioDeviceListenerMac();
private:
@@ -67,8 +71,6 @@ class MEDIA_EXPORT AudioDeviceListenerMac {
THREAD_CHECKER(thread_checker_);
base::WeakPtrFactory<AudioDeviceListenerMac> weak_factory_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioDeviceListenerMac);
};
} // namespace media
diff --git a/chromium/media/audio/mac/audio_device_listener_mac_unittest.cc b/chromium/media/audio/mac/audio_device_listener_mac_unittest.cc
index b5346a93352..0a4599b3ae4 100644
--- a/chromium/media/audio/mac/audio_device_listener_mac_unittest.cc
+++ b/chromium/media/audio/mac/audio_device_listener_mac_unittest.cc
@@ -32,6 +32,10 @@ class AudioDeviceListenerMacTest : public testing::Test {
base::RunLoop().RunUntilIdle();
}
+ AudioDeviceListenerMacTest(const AudioDeviceListenerMacTest&) = delete;
+ AudioDeviceListenerMacTest& operator=(const AudioDeviceListenerMacTest&) =
+ delete;
+
virtual ~AudioDeviceListenerMacTest() {
// It's important to destroy the device listener from the message loop in
// order to ensure we don't end up with unbalanced TaskObserver calls.
@@ -100,8 +104,6 @@ class AudioDeviceListenerMacTest : public testing::Test {
protected:
base::test::SingleThreadTaskEnvironment task_environment_;
std::unique_ptr<AudioDeviceListenerMac> device_listener_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioDeviceListenerMacTest);
};
// Simulate a device change event and ensure we get the right callback.
diff --git a/chromium/media/audio/mac/audio_input_mac.cc b/chromium/media/audio/mac/audio_input_mac.cc
index 3151f4e687a..275d2849c80 100644
--- a/chromium/media/audio/mac/audio_input_mac.cc
+++ b/chromium/media/audio/mac/audio_input_mac.cc
@@ -95,10 +95,8 @@ void PCMQueueInAudioInputStream::Start(AudioInputCallback* callback) {
deferred_start_cb_.Reset(base::BindOnce(&PCMQueueInAudioInputStream::Start,
base::Unretained(this), callback));
manager_->GetTaskRunner()->PostDelayedTask(
- FROM_HERE,
- deferred_start_cb_.callback(),
- base::TimeDelta::FromSeconds(
- AudioManagerMac::kStartDelayInSecsForPowerEvents));
+ FROM_HERE, deferred_start_cb_.callback(),
+ base::Seconds(AudioManagerMac::kStartDelayInSecsForPowerEvents));
return;
}
@@ -116,8 +114,7 @@ void PCMQueueInAudioInputStream::Start(AudioInputCallback* callback) {
// true when the timer expires.
input_callback_timer_ = std::make_unique<base::OneShotTimer>();
input_callback_timer_->Start(
- FROM_HERE,
- base::TimeDelta::FromSeconds(kInputCallbackStartTimeoutInSeconds), this,
+ FROM_HERE, base::Seconds(kInputCallbackStartTimeoutInSeconds), this,
&PCMQueueInAudioInputStream::CheckInputStartupSuccess);
DCHECK(input_callback_timer_->IsRunning());
}
@@ -267,7 +264,7 @@ void PCMQueueInAudioInputStream::HandleInputBuffer(
// TODO(dalecurtis): Delete all this. It shouldn't be necessary now that we
// have a ring buffer and FIFO on the actual shared memory.
base::TimeDelta elapsed = base::TimeTicks::Now() - last_fill_;
- const base::TimeDelta kMinDelay = base::TimeDelta::FromMilliseconds(5);
+ const base::TimeDelta kMinDelay = base::Milliseconds(5);
if (elapsed < kMinDelay) {
TRACE_EVENT0("audio",
"PCMQueueInAudioInputStream::HandleInputBuffer sleep");
diff --git a/chromium/media/audio/mac/audio_input_mac.h b/chromium/media/audio/mac/audio_input_mac.h
index 15da618e448..7cab4336b07 100644
--- a/chromium/media/audio/mac/audio_input_mac.h
+++ b/chromium/media/audio/mac/audio_input_mac.h
@@ -32,6 +32,11 @@ class PCMQueueInAudioInputStream : public AudioInputStream {
// Parameters as per AudioManager::MakeAudioInputStream.
PCMQueueInAudioInputStream(AudioManagerMac* manager,
const AudioParameters& params);
+
+ PCMQueueInAudioInputStream(const PCMQueueInAudioInputStream&) = delete;
+ PCMQueueInAudioInputStream& operator=(const PCMQueueInAudioInputStream&) =
+ delete;
+
~PCMQueueInAudioInputStream() override;
// Implementation of AudioInputStream.
@@ -111,8 +116,6 @@ class PCMQueueInAudioInputStream : public AudioInputStream {
std::unique_ptr<base::OneShotTimer> input_callback_timer_;
std::unique_ptr<media::AudioBus> audio_bus_;
-
- DISALLOW_COPY_AND_ASSIGN(PCMQueueInAudioInputStream);
};
} // namespace media
diff --git a/chromium/media/audio/mac/audio_low_latency_input_mac.cc b/chromium/media/audio/mac/audio_low_latency_input_mac.cc
index eb39b38c308..e28d37435da 100644
--- a/chromium/media/audio/mac/audio_low_latency_input_mac.cc
+++ b/chromium/media/audio/mac/audio_low_latency_input_mac.cc
@@ -687,8 +687,7 @@ void AUAudioInputStream::Start(AudioInputCallback* callback) {
base::Unretained(this), callback));
manager_->GetTaskRunner()->PostDelayedTask(
FROM_HERE, deferred_start_cb_.callback(),
- base::TimeDelta::FromSeconds(
- AudioManagerMac::kStartDelayInSecsForPowerEvents));
+ base::Seconds(AudioManagerMac::kStartDelayInSecsForPowerEvents));
return;
}
@@ -719,8 +718,7 @@ void AUAudioInputStream::Start(AudioInputCallback* callback) {
// true when the timer expires.
input_callback_timer_ = std::make_unique<base::OneShotTimer>();
input_callback_timer_->Start(
- FROM_HERE,
- base::TimeDelta::FromSeconds(kInputCallbackStartTimeoutInSeconds), this,
+ FROM_HERE, base::Seconds(kInputCallbackStartTimeoutInSeconds), this,
&AUAudioInputStream::CheckInputStartupSuccess);
DCHECK(input_callback_timer_->IsRunning());
}
@@ -1123,7 +1121,7 @@ OSStatus AUAudioInputStream::OnDataIsAvailable(
base::TimeDelta time_since_last_success =
base::TimeTicks::Now() - last_success_time_;
if ((time_since_last_success >
- base::TimeDelta::FromSeconds(kMaxErrorTimeoutInSeconds))) {
+ base::Seconds(kMaxErrorTimeoutInSeconds))) {
const char* err = (result == kAudioUnitErr_TooManyFramesToProcess)
? "kAudioUnitErr_TooManyFramesToProcess"
: "kAudioUnitErr_CannotDoInCurrentContext";
@@ -1430,14 +1428,12 @@ void AUAudioInputStream::ReportAndResetStats() {
if (glitches_detected_ != 0) {
UMA_HISTOGRAM_LONG_TIMES("Media.Audio.Capture.LostFramesInMs",
- base::TimeDelta::FromMilliseconds(lost_frames_ms));
+ base::Milliseconds(lost_frames_ms));
auto largest_glitch_ms =
(largest_glitch_frames_ * 1000) / format_.mSampleRate;
- UMA_HISTOGRAM_CUSTOM_TIMES(
- "Media.Audio.Capture.LargestGlitchMs",
- base::TimeDelta::FromMilliseconds(largest_glitch_ms),
- base::TimeDelta::FromMilliseconds(1), base::TimeDelta::FromMinutes(1),
- 50);
+ UMA_HISTOGRAM_CUSTOM_TIMES("Media.Audio.Capture.LargestGlitchMs",
+ base::Milliseconds(largest_glitch_ms),
+ base::Milliseconds(1), base::Minutes(1), 50);
DLOG(WARNING) << log_message;
}
diff --git a/chromium/media/audio/mac/audio_low_latency_input_mac.h b/chromium/media/audio/mac/audio_low_latency_input_mac.h
index 1344ba83e3e..5ea6c83194d 100644
--- a/chromium/media/audio/mac/audio_low_latency_input_mac.h
+++ b/chromium/media/audio/mac/audio_low_latency_input_mac.h
@@ -67,6 +67,10 @@ class MEDIA_EXPORT AUAudioInputStream
AudioDeviceID audio_device_id,
const AudioManager::LogCallback& log_callback,
AudioManagerBase::VoiceProcessingMode voice_processing_mode);
+
+ AUAudioInputStream(const AUAudioInputStream&) = delete;
+ AUAudioInputStream& operator=(const AUAudioInputStream&) = delete;
+
// The dtor is typically called by the AudioManager only and it is usually
// triggered by calling AudioInputStream::Close().
~AUAudioInputStream() override;
@@ -271,8 +275,6 @@ class MEDIA_EXPORT AUAudioInputStream
// Callback to send statistics info.
AudioManager::LogCallback log_callback_;
-
- DISALLOW_COPY_AND_ASSIGN(AUAudioInputStream);
};
} // namespace media
diff --git a/chromium/media/audio/mac/audio_manager_mac.cc b/chromium/media/audio/mac/audio_manager_mac.cc
index 261596b9b74..2066a12b194 100644
--- a/chromium/media/audio/mac/audio_manager_mac.cc
+++ b/chromium/media/audio/mac/audio_manager_mac.cc
@@ -12,6 +12,7 @@
#include "base/bind.h"
#include "base/command_line.h"
+#include "base/containers/flat_set.h"
#include "base/mac/mac_logging.h"
#include "base/mac/mac_util.h"
#include "base/mac/scoped_cftyperef.h"
@@ -453,6 +454,9 @@ class AudioManagerMac::AudioPowerObserver : public base::PowerSuspendObserver {
base::PowerMonitor::AddPowerSuspendObserver(this);
}
+ AudioPowerObserver(const AudioPowerObserver&) = delete;
+ AudioPowerObserver& operator=(const AudioPowerObserver&) = delete;
+
~AudioPowerObserver() override {
DCHECK(thread_checker_.CalledOnValidThread());
if (!is_monitoring_)
@@ -491,8 +495,8 @@ class AudioManagerMac::AudioPowerObserver : public base::PowerSuspendObserver {
DVLOG(1) << "OnResume";
++num_resume_notifications_;
is_suspending_ = false;
- earliest_start_time_ = base::TimeTicks::Now() +
- base::TimeDelta::FromSeconds(kStartDelayInSecsForPowerEvents);
+ earliest_start_time_ =
+ base::TimeTicks::Now() + base::Seconds(kStartDelayInSecsForPowerEvents);
}
bool is_suspending_;
@@ -500,8 +504,6 @@ class AudioManagerMac::AudioPowerObserver : public base::PowerSuspendObserver {
base::TimeTicks earliest_start_time_;
base::ThreadChecker thread_checker_;
size_t num_resume_notifications_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioPowerObserver);
};
AudioManagerMac::AudioManagerMac(std::unique_ptr<AudioThread> audio_thread,
@@ -669,10 +671,12 @@ std::string AudioManagerMac::GetAssociatedOutputDeviceID(
std::vector<AudioObjectID> related_device_ids =
core_audio_mac::GetRelatedDeviceIDs(input_device_id);
- std::vector<AudioObjectID> related_output_device_ids;
+ // Defined as a set as device IDs might be duplicated in
+ // GetRelatedDeviceIDs().
+ base::flat_set<AudioObjectID> related_output_device_ids;
for (AudioObjectID device_id : related_device_ids) {
if (core_audio_mac::GetNumStreams(device_id, false /* is_input */) > 0)
- related_output_device_ids.push_back(device_id);
+ related_output_device_ids.insert(device_id);
}
// Return the device ID if there is only one associated device.
@@ -681,7 +685,7 @@ std::string AudioManagerMac::GetAssociatedOutputDeviceID(
// to an endpoint, so we cannot randomly pick a device.
if (related_output_device_ids.size() == 1) {
absl::optional<std::string> related_unique_id =
- core_audio_mac::GetDeviceUniqueID(related_output_device_ids[0]);
+ core_audio_mac::GetDeviceUniqueID(*related_output_device_ids.begin());
if (related_unique_id)
return std::move(*related_unique_id);
}
@@ -1122,7 +1126,7 @@ base::TimeDelta AudioManagerMac::GetHardwareLatency(
<< "Could not get audio device stream ids size.";
}
- return base::TimeDelta::FromSecondsD(audio_unit_latency_sec) +
+ return base::Seconds(audio_unit_latency_sec) +
AudioTimestampHelper::FramesToTime(
device_latency_frames + stream_latency_frames, sample_rate);
}
diff --git a/chromium/media/audio/mac/audio_manager_mac.h b/chromium/media/audio/mac/audio_manager_mac.h
index 74de0a11bdf..8abe18988e9 100644
--- a/chromium/media/audio/mac/audio_manager_mac.h
+++ b/chromium/media/audio/mac/audio_manager_mac.h
@@ -32,6 +32,10 @@ class MEDIA_EXPORT AudioManagerMac : public AudioManagerBase {
public:
AudioManagerMac(std::unique_ptr<AudioThread> audio_thread,
AudioLogFactory* audio_log_factory);
+
+ AudioManagerMac(const AudioManagerMac&) = delete;
+ AudioManagerMac& operator=(const AudioManagerMac&) = delete;
+
~AudioManagerMac() override;
// Implementation of AudioManager.
@@ -198,8 +202,6 @@ class MEDIA_EXPORT AudioManagerMac : public AudioManagerBase {
bool in_shutdown_;
base::WeakPtrFactory<AudioManagerMac> weak_ptr_factory_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioManagerMac);
};
} // namespace media
diff --git a/chromium/media/audio/mac/scoped_audio_unit.h b/chromium/media/audio/mac/scoped_audio_unit.h
index cb6b85121d8..d688399b1e1 100644
--- a/chromium/media/audio/mac/scoped_audio_unit.h
+++ b/chromium/media/audio/mac/scoped_audio_unit.h
@@ -24,6 +24,10 @@ class ScopedAudioUnit {
// the operation fails, is_valid() will return false and audio_unit() will
// return nullptr.
ScopedAudioUnit(AudioDeviceID device, AUElement element);
+
+ ScopedAudioUnit(const ScopedAudioUnit&) = delete;
+ ScopedAudioUnit& operator=(const ScopedAudioUnit&) = delete;
+
~ScopedAudioUnit();
bool is_valid() const { return audio_unit_ != nullptr; }
@@ -31,8 +35,6 @@ class ScopedAudioUnit {
private:
AudioUnit audio_unit_ = nullptr;
-
- DISALLOW_COPY_AND_ASSIGN(ScopedAudioUnit);
};
} // namespace media
diff --git a/chromium/media/audio/mock_audio_debug_recording_manager.h b/chromium/media/audio/mock_audio_debug_recording_manager.h
index 3e9073e24da..1ef5983f0ef 100644
--- a/chromium/media/audio/mock_audio_debug_recording_manager.h
+++ b/chromium/media/audio/mock_audio_debug_recording_manager.h
@@ -17,15 +17,17 @@ class MockAudioDebugRecordingManager : public AudioDebugRecordingManager {
explicit MockAudioDebugRecordingManager(
scoped_refptr<base::SingleThreadTaskRunner> task_runner);
+ MockAudioDebugRecordingManager(const MockAudioDebugRecordingManager&) =
+ delete;
+ MockAudioDebugRecordingManager& operator=(
+ const MockAudioDebugRecordingManager&) = delete;
+
~MockAudioDebugRecordingManager() override;
MOCK_METHOD1(EnableDebugRecording,
void(AudioDebugRecordingManager::CreateWavFileCallback
create_file_callback));
MOCK_METHOD0(DisableDebugRecording, void());
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MockAudioDebugRecordingManager);
};
} // namespace media.
diff --git a/chromium/media/audio/mock_audio_manager.h b/chromium/media/audio/mock_audio_manager.h
index dd8cce8a40f..b40bb43ac0d 100644
--- a/chromium/media/audio/mock_audio_manager.h
+++ b/chromium/media/audio/mock_audio_manager.h
@@ -36,6 +36,10 @@ class MockAudioManager : public AudioManager {
const std::string& device_id)>;
explicit MockAudioManager(std::unique_ptr<AudioThread> audio_thread);
+
+ MockAudioManager(const MockAudioManager&) = delete;
+ MockAudioManager& operator=(const MockAudioManager&) = delete;
+
~MockAudioManager() override;
AudioOutputStream* MakeAudioOutputStream(
@@ -116,8 +120,6 @@ class MockAudioManager : public AudioManager {
GetDeviceDescriptionsCallback get_output_device_descriptions_cb_;
GetAssociatedOutputDeviceIDCallback get_associated_output_device_id_cb_;
std::unique_ptr<AudioDebugRecordingManager> debug_recording_manager_;
-
- DISALLOW_COPY_AND_ASSIGN(MockAudioManager);
};
} // namespace media.
diff --git a/chromium/media/audio/mock_audio_source_callback.h b/chromium/media/audio/mock_audio_source_callback.h
index b31ac30e5ff..a01e0671186 100644
--- a/chromium/media/audio/mock_audio_source_callback.h
+++ b/chromium/media/audio/mock_audio_source_callback.h
@@ -17,14 +17,15 @@ namespace media {
class MockAudioSourceCallback : public AudioOutputStream::AudioSourceCallback {
public:
MockAudioSourceCallback();
+
+ MockAudioSourceCallback(const MockAudioSourceCallback&) = delete;
+ MockAudioSourceCallback& operator=(const MockAudioSourceCallback&) = delete;
+
~MockAudioSourceCallback() override;
MOCK_METHOD4(OnMoreData,
int(base::TimeDelta, base::TimeTicks, int, AudioBus*));
MOCK_METHOD1(OnError, void(ErrorType));
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MockAudioSourceCallback);
};
} // namespace media
diff --git a/chromium/media/audio/power_observer_helper.h b/chromium/media/audio/power_observer_helper.h
index 8722e12cfc5..beb263c0636 100644
--- a/chromium/media/audio/power_observer_helper.h
+++ b/chromium/media/audio/power_observer_helper.h
@@ -26,6 +26,9 @@ class MEDIA_EXPORT PowerObserverHelper : public base::PowerSuspendObserver {
base::RepeatingClosure suspend_callback,
base::RepeatingClosure resume_callback);
+ PowerObserverHelper(const PowerObserverHelper&) = delete;
+ PowerObserverHelper& operator=(const PowerObserverHelper&) = delete;
+
~PowerObserverHelper() override;
// Must be called on |task_runner|.
@@ -66,8 +69,6 @@ class MEDIA_EXPORT PowerObserverHelper : public base::PowerSuspendObserver {
bool is_suspending_ = false;
base::WeakPtrFactory<PowerObserverHelper> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(PowerObserverHelper);
};
} // namespace media
diff --git a/chromium/media/audio/pulse/audio_manager_pulse.h b/chromium/media/audio/pulse/audio_manager_pulse.h
index 4e10d881682..856f2ea76e7 100644
--- a/chromium/media/audio/pulse/audio_manager_pulse.h
+++ b/chromium/media/audio/pulse/audio_manager_pulse.h
@@ -23,6 +23,10 @@ class MEDIA_EXPORT AudioManagerPulse : public AudioManagerBase {
AudioLogFactory* audio_log_factory,
pa_threaded_mainloop* pa_mainloop,
pa_context* pa_context);
+
+ AudioManagerPulse(const AudioManagerPulse&) = delete;
+ AudioManagerPulse& operator=(const AudioManagerPulse&) = delete;
+
~AudioManagerPulse() override;
// Implementation of AudioManager.
@@ -107,8 +111,6 @@ class MEDIA_EXPORT AudioManagerPulse : public AudioManagerBase {
int native_channel_count_;
std::string default_source_name_;
bool default_source_is_monitor_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioManagerPulse);
};
} // namespace media
diff --git a/chromium/media/audio/pulse/pulse_input.cc b/chromium/media/audio/pulse/pulse_input.cc
index 59b288eda69..1f7cca7388e 100644
--- a/chromium/media/audio/pulse/pulse_input.cc
+++ b/chromium/media/audio/pulse/pulse_input.cc
@@ -371,7 +371,7 @@ void PulseAudioInputStream::ReadData() {
// TODO(dalecurtis): Delete all this. It shouldn't be necessary now that we
// have a ring buffer and FIFO on the actual shared memory.,
if (fifo_.available_blocks())
- base::PlatformThread::Sleep(base::TimeDelta::FromMilliseconds(5));
+ base::PlatformThread::Sleep(base::Milliseconds(5));
}
pa_threaded_mainloop_signal(pa_mainloop_, 0);
diff --git a/chromium/media/audio/pulse/pulse_input.h b/chromium/media/audio/pulse/pulse_input.h
index b340bda07e9..df3f28bde2a 100644
--- a/chromium/media/audio/pulse/pulse_input.h
+++ b/chromium/media/audio/pulse/pulse_input.h
@@ -31,6 +31,9 @@ class PulseAudioInputStream : public AgcAudioStream<AudioInputStream> {
pa_context* context,
AudioManager::LogCallback log_callback);
+ PulseAudioInputStream(const PulseAudioInputStream&) = delete;
+ PulseAudioInputStream& operator=(const PulseAudioInputStream&) = delete;
+
~PulseAudioInputStream() override;
// Implementation of AudioInputStream.
@@ -90,8 +93,6 @@ class PulseAudioInputStream : public AgcAudioStream<AudioInputStream> {
pa_stream* handle_;
base::ThreadChecker thread_checker_;
-
- DISALLOW_COPY_AND_ASSIGN(PulseAudioInputStream);
};
} // namespace media
diff --git a/chromium/media/audio/pulse/pulse_output.h b/chromium/media/audio/pulse/pulse_output.h
index 6a85b2647f3..4dae18e76b9 100644
--- a/chromium/media/audio/pulse/pulse_output.h
+++ b/chromium/media/audio/pulse/pulse_output.h
@@ -45,6 +45,9 @@ class PulseAudioOutputStream : public AudioOutputStream {
AudioManagerBase* manager,
AudioManager::LogCallback log_callback);
+ PulseAudioOutputStream(const PulseAudioOutputStream&) = delete;
+ PulseAudioOutputStream& operator=(const PulseAudioOutputStream&) = delete;
+
~PulseAudioOutputStream() override;
// Implementation of AudioOutputStream.
@@ -105,8 +108,6 @@ class PulseAudioOutputStream : public AudioOutputStream {
const size_t buffer_size_;
base::ThreadChecker thread_checker_;
-
- DISALLOW_COPY_AND_ASSIGN(PulseAudioOutputStream);
};
} // namespace media
diff --git a/chromium/media/audio/pulse/pulse_util.cc b/chromium/media/audio/pulse/pulse_util.cc
index d8e6ce1ca5c..9d8eb236c33 100644
--- a/chromium/media/audio/pulse/pulse_util.cc
+++ b/chromium/media/audio/pulse/pulse_util.cc
@@ -92,13 +92,16 @@ pa_channel_position ChromiumToPAChannelPosition(Channels channel) {
class ScopedPropertyList {
public:
ScopedPropertyList() : property_list_(pa_proplist_new()) {}
+
+ ScopedPropertyList(const ScopedPropertyList&) = delete;
+ ScopedPropertyList& operator=(const ScopedPropertyList&) = delete;
+
~ScopedPropertyList() { pa_proplist_free(property_list_); }
pa_proplist* get() const { return property_list_; }
private:
pa_proplist* property_list_;
- DISALLOW_COPY_AND_ASSIGN(ScopedPropertyList);
};
struct InputBusData {
@@ -259,7 +262,7 @@ bool InitPulse(pa_threaded_mainloop** mainloop, pa_context** context) {
// browser startup (other times it's during audio process startup). In the
// normal case, this should only take ~50ms, but we've seen some test bots
// hang indefinitely when the pulse daemon can't be started.
- constexpr base::TimeDelta kStartupTimeout = base::TimeDelta::FromSeconds(5);
+ constexpr base::TimeDelta kStartupTimeout = base::Seconds(5);
const bool was_signaled = context_wait.TimedWait(kStartupTimeout);
// Require the mainloop lock before checking the context state.
@@ -384,7 +387,7 @@ base::TimeDelta GetHardwareLatency(pa_stream* stream) {
if (negative)
return base::TimeDelta();
- return base::TimeDelta::FromMicroseconds(latency_micros);
+ return base::Microseconds(latency_micros);
}
// Helper macro for CreateInput/OutputStream() to avoid code spam and
diff --git a/chromium/media/audio/pulse/pulse_util.h b/chromium/media/audio/pulse/pulse_util.h
index 93d740e993f..0e80f8113ad 100644
--- a/chromium/media/audio/pulse/pulse_util.h
+++ b/chromium/media/audio/pulse/pulse_util.h
@@ -31,13 +31,15 @@ class AutoPulseLock {
pa_threaded_mainloop_lock(pa_mainloop_);
}
+ AutoPulseLock(const AutoPulseLock&) = delete;
+ AutoPulseLock& operator=(const AutoPulseLock&) = delete;
+
~AutoPulseLock() {
pa_threaded_mainloop_unlock(pa_mainloop_);
}
private:
pa_threaded_mainloop* pa_mainloop_;
- DISALLOW_COPY_AND_ASSIGN(AutoPulseLock);
};
bool MEDIA_EXPORT InitPulse(pa_threaded_mainloop** mainloop,
diff --git a/chromium/media/audio/simple_sources.cc b/chromium/media/audio/simple_sources.cc
index ebcdb175b6a..004df99dba1 100644
--- a/chromium/media/audio/simple_sources.cc
+++ b/chromium/media/audio/simple_sources.cc
@@ -274,7 +274,7 @@ int BeepingSource::OnMoreData(base::TimeDelta /* delay */,
BeepContext* beep_context = GetBeepContext();
if (beep_context->automatic_beep()) {
base::TimeDelta delta = interval_from_last_beep_ -
- base::TimeDelta::FromMilliseconds(kAutomaticBeepIntervalInMs);
+ base::Milliseconds(kAutomaticBeepIntervalInMs);
if (delta > base::TimeDelta()) {
should_beep = true;
interval_from_last_beep_ = delta;
diff --git a/chromium/media/audio/test_audio_thread.h b/chromium/media/audio/test_audio_thread.h
index ddcffb7e2d7..db7acf5a23b 100644
--- a/chromium/media/audio/test_audio_thread.h
+++ b/chromium/media/audio/test_audio_thread.h
@@ -18,6 +18,10 @@ class TestAudioThread final : public AudioThread {
public:
TestAudioThread();
explicit TestAudioThread(bool use_real_thread);
+
+ TestAudioThread(const TestAudioThread&) = delete;
+ TestAudioThread& operator=(const TestAudioThread&) = delete;
+
~TestAudioThread() final;
// AudioThread implementation.
@@ -31,7 +35,6 @@ class TestAudioThread final : public AudioThread {
scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
THREAD_CHECKER(thread_checker_);
- DISALLOW_COPY_AND_ASSIGN(TestAudioThread);
};
} // namespace media
diff --git a/chromium/media/audio/wav_audio_handler.cc b/chromium/media/audio/wav_audio_handler.cc
index 99e8a73a3d4..ee2395e325d 100644
--- a/chromium/media/audio/wav_audio_handler.cc
+++ b/chromium/media/audio/wav_audio_handler.cc
@@ -304,8 +304,7 @@ bool WavAudioHandler::CopyTo(AudioBus* bus,
}
base::TimeDelta WavAudioHandler::GetDuration() const {
- return base::TimeDelta::FromSecondsD(total_frames_ /
- static_cast<double>(sample_rate_));
+ return base::Seconds(total_frames_ / static_cast<double>(sample_rate_));
}
} // namespace media
diff --git a/chromium/media/audio/wav_audio_handler.h b/chromium/media/audio/wav_audio_handler.h
index bbfb1ced362..17acd80d39a 100644
--- a/chromium/media/audio/wav_audio_handler.h
+++ b/chromium/media/audio/wav_audio_handler.h
@@ -30,6 +30,9 @@ class MEDIA_EXPORT WavAudioHandler {
kAudioFormatExtensible = 0xfffe
};
+ WavAudioHandler(const WavAudioHandler&) = delete;
+ WavAudioHandler& operator=(const WavAudioHandler&) = delete;
+
virtual ~WavAudioHandler();
// Create a WavAudioHandler using |wav_data|. If |wav_data| cannot be parsed
@@ -73,8 +76,6 @@ class MEDIA_EXPORT WavAudioHandler {
const uint16_t bits_per_sample_;
const AudioFormat audio_format_;
uint32_t total_frames_;
-
- DISALLOW_COPY_AND_ASSIGN(WavAudioHandler);
};
} // namespace media
diff --git a/chromium/media/audio/win/audio_device_listener_win.h b/chromium/media/audio/win/audio_device_listener_win.h
index 4ea3568482c..42bcfbeef5e 100644
--- a/chromium/media/audio/win/audio_device_listener_win.h
+++ b/chromium/media/audio/win/audio_device_listener_win.h
@@ -35,14 +35,17 @@ class MEDIA_EXPORT AudioDeviceListenerWin : public IMMNotificationClient {
// thus the callee must be thread safe. |listener_cb| is a permanent callback
// and must outlive AudioDeviceListenerWin.
explicit AudioDeviceListenerWin(base::RepeatingClosure listener_cb);
+
+ AudioDeviceListenerWin(const AudioDeviceListenerWin&) = delete;
+ AudioDeviceListenerWin& operator=(const AudioDeviceListenerWin&) = delete;
+
virtual ~AudioDeviceListenerWin();
private:
friend class AudioDeviceListenerWinTest;
// Minimum allowed time between device change notifications.
- static constexpr base::TimeDelta kDeviceChangeLimit =
- base::TimeDelta::FromMilliseconds(250);
+ static constexpr base::TimeDelta kDeviceChangeLimit = base::Milliseconds(250);
// IMMNotificationClient implementation.
IFACEMETHODIMP_(ULONG) AddRef() override;
@@ -69,8 +72,6 @@ class MEDIA_EXPORT AudioDeviceListenerWin : public IMMNotificationClient {
THREAD_CHECKER(thread_checker_);
const base::TickClock* tick_clock_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioDeviceListenerWin);
};
} // namespace media
diff --git a/chromium/media/audio/win/audio_device_listener_win_unittest.cc b/chromium/media/audio/win/audio_device_listener_win_unittest.cc
index 07266757f10..f0333c5c4b1 100644
--- a/chromium/media/audio/win/audio_device_listener_win_unittest.cc
+++ b/chromium/media/audio/win/audio_device_listener_win_unittest.cc
@@ -43,17 +43,21 @@ class AudioDeviceListenerWinTest
base::BindRepeating(&AudioDeviceListenerWinTest::OnDeviceChange,
base::Unretained(this)));
- tick_clock_.Advance(base::TimeDelta::FromSeconds(12345));
+ tick_clock_.Advance(base::Seconds(12345));
output_device_listener_->tick_clock_ = &tick_clock_;
}
+ AudioDeviceListenerWinTest(const AudioDeviceListenerWinTest&) = delete;
+ AudioDeviceListenerWinTest& operator=(const AudioDeviceListenerWinTest&) =
+ delete;
+
~AudioDeviceListenerWinTest() override {
system_monitor_.RemoveDevicesChangedObserver(this);
}
void AdvanceLastDeviceChangeTime() {
tick_clock_.Advance(AudioDeviceListenerWin::kDeviceChangeLimit +
- base::TimeDelta::FromMilliseconds(1));
+ base::Milliseconds(1));
}
// Simulate a device change where no output devices are available.
@@ -81,8 +85,6 @@ class AudioDeviceListenerWinTest
base::SystemMonitor system_monitor_;
base::SimpleTestTickClock tick_clock_;
std::unique_ptr<AudioDeviceListenerWin> output_device_listener_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioDeviceListenerWinTest);
};
// Simulate a device change events and ensure we get the right callbacks.
diff --git a/chromium/media/audio/win/audio_low_latency_input_win.cc b/chromium/media/audio/win/audio_low_latency_input_win.cc
index 81ab2039682..54faba84bf3 100644
--- a/chromium/media/audio/win/audio_low_latency_input_win.cc
+++ b/chromium/media/audio/win/audio_low_latency_input_win.cc
@@ -575,13 +575,6 @@ void WASAPIAudioInputStream::Stop() {
__func__, min_timestamp_diff_.InMillisecondsF(),
max_timestamp_diff_.InMillisecondsF());
- const bool monotonic_timestamps =
- min_timestamp_diff_ >= base::TimeDelta::FromMicroseconds(1);
- base::UmaHistogramBoolean("Media.Audio.Capture.Win.MonotonicTimestamps",
- monotonic_timestamps);
- SendLogMessage("%s => (Media.Audio.Capture.Win.MonotonicTimestamps=%s)",
- __func__, monotonic_timestamps ? "true" : "false");
-
started_ = false;
sink_ = nullptr;
}
@@ -895,13 +888,12 @@ void WASAPIAudioInputStream::PullCaptureDataAndPushToSink() {
base::TimeTicks capture_time;
if (!timestamp_error_was_detected) {
// Use the latest |capture_time_100ns| since it is marked as valid.
- capture_time +=
- base::TimeDelta::FromMicroseconds(capture_time_100ns / 10.0);
+ capture_time += base::Microseconds(capture_time_100ns / 10.0);
}
if (capture_time <= last_capture_time_) {
// Latest |capture_time_100ns| can't be trusted. Ensure a monotonic time-
// stamp sequence by adding one microsecond to the latest timestamp.
- capture_time = last_capture_time_ + base::TimeDelta::FromMicroseconds(1);
+ capture_time = last_capture_time_ + base::Microseconds(1);
}
// Keep track of max and min time difference between two successive time-
@@ -1629,14 +1621,12 @@ void WASAPIAudioInputStream::ReportAndResetGlitchStats() {
__func__, total_glitches_, total_lost_frames_, lost_frames_ms);
if (total_glitches_ != 0) {
UMA_HISTOGRAM_LONG_TIMES("Media.Audio.Capture.LostFramesInMs",
- base::TimeDelta::FromMilliseconds(lost_frames_ms));
+ base::Milliseconds(lost_frames_ms));
int64_t largest_glitch_ms =
(largest_glitch_frames_ * 1000) / input_format_.Format.nSamplesPerSec;
- UMA_HISTOGRAM_CUSTOM_TIMES(
- "Media.Audio.Capture.LargestGlitchMs",
- base::TimeDelta::FromMilliseconds(largest_glitch_ms),
- base::TimeDelta::FromMilliseconds(1), base::TimeDelta::FromMinutes(1),
- 50);
+ UMA_HISTOGRAM_CUSTOM_TIMES("Media.Audio.Capture.LargestGlitchMs",
+ base::Milliseconds(largest_glitch_ms),
+ base::Milliseconds(1), base::Minutes(1), 50);
}
// TODO(https://crbug.com/825744): It can be possible to replace
@@ -1645,14 +1635,9 @@ void WASAPIAudioInputStream::ReportAndResetGlitchStats() {
num_data_discontinuity_warnings_);
SendLogMessage("%s => (discontinuity warnings=[%" PRIu64 "])", __func__,
num_data_discontinuity_warnings_);
- base::UmaHistogramCounts1M("Media.Audio.Capture.Win.TimestampErrors",
- num_timestamp_errors_);
SendLogMessage("%s => (timstamp errors=[%" PRIu64 "])", __func__,
num_timestamp_errors_);
if (num_timestamp_errors_ > 0) {
- base::UmaHistogramLongTimes(
- "Media.Audio.Capture.Win.TimeUntilFirstTimestampError",
- time_until_first_timestamp_error_);
SendLogMessage("%s => (time until first timestamp error=[%" PRId64 " ms])",
__func__,
time_until_first_timestamp_error_.InMilliseconds());
diff --git a/chromium/media/audio/win/audio_low_latency_input_win.h b/chromium/media/audio/win/audio_low_latency_input_win.h
index 91b8fb19d3a..4ef0c026187 100644
--- a/chromium/media/audio/win/audio_low_latency_input_win.h
+++ b/chromium/media/audio/win/audio_low_latency_input_win.h
@@ -125,6 +125,9 @@ class MEDIA_EXPORT WASAPIAudioInputStream
const std::string& device_id,
AudioManager::LogCallback log_callback);
+ WASAPIAudioInputStream(const WASAPIAudioInputStream&) = delete;
+ WASAPIAudioInputStream& operator=(const WASAPIAudioInputStream&) = delete;
+
// The dtor is typically called by the AudioManager only and it is usually
// triggered by calling AudioInputStream::Close().
~WASAPIAudioInputStream() override;
@@ -339,8 +342,6 @@ class MEDIA_EXPORT WASAPIAudioInputStream
std::vector<ABI::Windows::Media::Effects::AudioEffectType> raw_effect_types_;
SEQUENCE_CHECKER(sequence_checker_);
-
- DISALLOW_COPY_AND_ASSIGN(WASAPIAudioInputStream);
};
} // namespace media
diff --git a/chromium/media/audio/win/audio_low_latency_input_win_unittest.cc b/chromium/media/audio/win/audio_low_latency_input_win_unittest.cc
index 2a7a0ad5a12..3d651a671f5 100644
--- a/chromium/media/audio/win/audio_low_latency_input_win_unittest.cc
+++ b/chromium/media/audio/win/audio_low_latency_input_win_unittest.cc
@@ -243,6 +243,9 @@ class ScopedAudioInputStream {
public:
explicit ScopedAudioInputStream(AudioInputStream* stream) : stream_(stream) {}
+ ScopedAudioInputStream(const ScopedAudioInputStream&) = delete;
+ ScopedAudioInputStream& operator=(const ScopedAudioInputStream&) = delete;
+
~ScopedAudioInputStream() {
if (stream_)
stream_->Close();
@@ -265,8 +268,6 @@ class ScopedAudioInputStream {
private:
AudioInputStream* stream_;
-
- DISALLOW_COPY_AND_ASSIGN(ScopedAudioInputStream);
};
class WinAudioInputTest : public ::testing::Test,
@@ -414,10 +415,6 @@ TEST_F(WinAudioInputTest, WASAPIAudioInputStreamHistograms) {
ais->Stop();
ais.Close();
histogram_tester.ExpectTotalCount("Media.Audio.Capture.Win.Glitches", 1);
- histogram_tester.ExpectTotalCount("Media.Audio.Capture.Win.TimestampErrors",
- 1);
- histogram_tester.ExpectTotalCount(
- "Media.Audio.Capture.Win.TimeUntilFirstTimestampError", 0);
}
// Test some additional calling sequences.
diff --git a/chromium/media/audio/win/audio_low_latency_output_win.cc b/chromium/media/audio/win/audio_low_latency_output_win.cc
index 039bde4b8e5..68a9652e053 100644
--- a/chromium/media/audio/win/audio_low_latency_output_win.cc
+++ b/chromium/media/audio/win/audio_low_latency_output_win.cc
@@ -678,8 +678,8 @@ bool WASAPIAudioOutputStream::RenderAudioFromSource(UINT64 device_frequency) {
if (qpc_position_diff_us - position_diff_us > buffer_duration_us / 2) {
++num_glitches_detected_;
- base::TimeDelta glitch_duration = base::TimeDelta::FromMicroseconds(
- qpc_position_diff_us - position_diff_us);
+ base::TimeDelta glitch_duration =
+ base::Microseconds(qpc_position_diff_us - position_diff_us);
if (glitch_duration > largest_glitch_)
largest_glitch_ = glitch_duration;
@@ -696,13 +696,13 @@ bool WASAPIAudioOutputStream::RenderAudioFromSource(UINT64 device_frequency) {
const uint64_t delay_frames = num_written_frames_ - played_out_frames;
// Convert the delay from frames to time.
- delay = base::TimeDelta::FromMicroseconds(
- delay_frames * base::Time::kMicrosecondsPerSecond /
- format_.Format.nSamplesPerSec);
+ delay =
+ base::Microseconds(delay_frames * base::Time::kMicrosecondsPerSecond /
+ format_.Format.nSamplesPerSec);
// Note: the obtained |qpc_position| value is in 100ns intervals and from
// the same time origin as QPC. We can simply convert it into us dividing
// by 10.0 since 10x100ns = 1us.
- delay_timestamp += base::TimeDelta::FromMicroseconds(qpc_position * 0.1);
+ delay_timestamp += base::Microseconds(qpc_position * 0.1);
} else {
RecordAudioFailure(kRenderFailureHistogram, hr);
LOG(ERROR) << "WAOS::" << __func__
diff --git a/chromium/media/audio/win/audio_low_latency_output_win.h b/chromium/media/audio/win/audio_low_latency_output_win.h
index 7447419101c..818759e2a46 100644
--- a/chromium/media/audio/win/audio_low_latency_output_win.h
+++ b/chromium/media/audio/win/audio_low_latency_output_win.h
@@ -133,6 +133,9 @@ class MEDIA_EXPORT WASAPIAudioOutputStream :
ERole device_role,
AudioManager::LogCallback log_callback);
+ WASAPIAudioOutputStream(const WASAPIAudioOutputStream&) = delete;
+ WASAPIAudioOutputStream& operator=(const WASAPIAudioOutputStream&) = delete;
+
// The dtor is typically called by the AudioManager only and it is usually
// triggered by calling AudioOutputStream::Close().
~WASAPIAudioOutputStream() override;
@@ -281,8 +284,6 @@ class MEDIA_EXPORT WASAPIAudioOutputStream :
// thread, it's possible to end up in a state where that task would execute
// after destruction of this class -- so use a WeakPtr to cancel safely.
base::WeakPtrFactory<WASAPIAudioOutputStream> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(WASAPIAudioOutputStream);
};
} // namespace media
diff --git a/chromium/media/audio/win/audio_low_latency_output_win_unittest.cc b/chromium/media/audio/win/audio_low_latency_output_win_unittest.cc
index 177bf469de4..a1683a25c1a 100644
--- a/chromium/media/audio/win/audio_low_latency_output_win_unittest.cc
+++ b/chromium/media/audio/win/audio_low_latency_output_win_unittest.cc
@@ -369,7 +369,7 @@ TEST_F(WASAPIAudioOutputStreamTest, ValidPacketSize) {
EXPECT_TRUE(aos->Open());
// Derive the expected duration of each packet.
- base::TimeDelta packet_duration = base::TimeDelta::FromSecondsD(
+ base::TimeDelta packet_duration = base::Seconds(
static_cast<double>(aosw.samples_per_packet()) / aosw.sample_rate());
// Wait for the first callback and verify its parameters. Ignore any
@@ -510,7 +510,7 @@ TEST_F(WASAPIAudioOutputStreamTest,
EXPECT_TRUE(aos->Open());
// Derive the expected size in bytes of each packet.
- base::TimeDelta packet_duration = base::TimeDelta::FromSecondsD(
+ base::TimeDelta packet_duration = base::Seconds(
static_cast<double>(aosw.samples_per_packet()) / aosw.sample_rate());
// Wait for the first callback and verify its parameters.
@@ -544,7 +544,7 @@ TEST_F(WASAPIAudioOutputStreamTest,
EXPECT_TRUE(aos->Open());
// Derive the expected size in bytes of each packet.
- base::TimeDelta packet_duration = base::TimeDelta::FromSecondsD(
+ base::TimeDelta packet_duration = base::Seconds(
static_cast<double>(aosw.samples_per_packet()) / aosw.sample_rate());
// Wait for the first callback and verify its parameters.
diff --git a/chromium/media/audio/win/audio_manager_win.h b/chromium/media/audio/win/audio_manager_win.h
index 1b4b3e62f41..f6c73c7b763 100644
--- a/chromium/media/audio/win/audio_manager_win.h
+++ b/chromium/media/audio/win/audio_manager_win.h
@@ -22,6 +22,10 @@ class MEDIA_EXPORT AudioManagerWin : public AudioManagerBase {
public:
AudioManagerWin(std::unique_ptr<AudioThread> audio_thread,
AudioLogFactory* audio_log_factory);
+
+ AudioManagerWin(const AudioManagerWin&) = delete;
+ AudioManagerWin& operator=(const AudioManagerWin&) = delete;
+
~AudioManagerWin() override;
// Implementation of AudioManager.
@@ -74,8 +78,6 @@ class MEDIA_EXPORT AudioManagerWin : public AudioManagerBase {
// Listen for output device changes.
std::unique_ptr<AudioDeviceListenerWin> output_device_listener_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioManagerWin);
};
} // namespace media
diff --git a/chromium/media/audio/win/audio_output_win_unittest.cc b/chromium/media/audio/win/audio_output_win_unittest.cc
index 346cfde42b9..bd4431469b9 100644
--- a/chromium/media/audio/win/audio_output_win_unittest.cc
+++ b/chromium/media/audio/win/audio_output_win_unittest.cc
@@ -434,8 +434,8 @@ TEST_F(WinAudioTest, PCMWaveStreamPendingBytes) {
NiceMock<MockAudioSourceCallback> source;
EXPECT_TRUE(oas->Open());
- const base::TimeDelta delay_100_ms = base::TimeDelta::FromMilliseconds(100);
- const base::TimeDelta delay_200_ms = base::TimeDelta::FromMilliseconds(200);
+ const base::TimeDelta delay_100_ms = base::Milliseconds(100);
+ const base::TimeDelta delay_200_ms = base::Milliseconds(200);
// Audio output stream has either a double or triple buffer scheme. We expect
// the delay to reach up to 200 ms depending on the number of buffers used.
@@ -569,11 +569,10 @@ DWORD __stdcall SyncSocketThread(void* context) {
// blocking call and will not proceed until we receive the signal.
if (ctx.socket->Receive(&control_signal, sizeof(control_signal)) == 0)
break;
- base::TimeDelta delay =
- base::TimeDelta::FromMicroseconds(ctx.buffer->params.delay_us);
+ base::TimeDelta delay = base::Microseconds(ctx.buffer->params.delay_us);
base::TimeTicks delay_timestamp =
- base::TimeTicks() + base::TimeDelta::FromMicroseconds(
- ctx.buffer->params.delay_timestamp_us);
+ base::TimeTicks() +
+ base::Microseconds(ctx.buffer->params.delay_timestamp_us);
sine.OnMoreData(delay, delay_timestamp, 0, audio_bus.get());
// Send the audio data to the Audio Stream.
diff --git a/chromium/media/audio/win/core_audio_util_win.cc b/chromium/media/audio/win/core_audio_util_win.cc
index 0281167b4be..bb8498e7e1b 100644
--- a/chromium/media/audio/win/core_audio_util_win.cc
+++ b/chromium/media/audio/win/core_audio_util_win.cc
@@ -718,7 +718,7 @@ std::string CoreAudioUtil::WaveFormatToString(const WaveFormatWrapper format) {
base::TimeDelta CoreAudioUtil::ReferenceTimeToTimeDelta(REFERENCE_TIME time) {
// Each unit of reference time is 100 nanoseconds <=> 0.1 microsecond.
- return base::TimeDelta::FromMicroseconds(0.1 * time + 0.5);
+ return base::Microseconds(0.1 * time + 0.5);
}
uint32_t CoreAudioUtil::GetIAudioClientVersion() {
diff --git a/chromium/media/audio/win/waveout_output_win.cc b/chromium/media/audio/win/waveout_output_win.cc
index c2c96fcba62..67a732ad38b 100644
--- a/chromium/media/audio/win/waveout_output_win.cc
+++ b/chromium/media/audio/win/waveout_output_win.cc
@@ -332,9 +332,9 @@ void PCMWaveOutAudioOutputStream::QueueNextPacket(WAVEHDR *buffer) {
// TODO(fbarchard): Handle used 0 by queueing more.
// TODO(sergeyu): Specify correct hardware delay for |delay|.
- const base::TimeDelta delay = base::TimeDelta::FromMicroseconds(
- pending_bytes_ * base::Time::kMicrosecondsPerSecond /
- format_.Format.nAvgBytesPerSec);
+ const base::TimeDelta delay =
+ base::Microseconds(pending_bytes_ * base::Time::kMicrosecondsPerSecond /
+ format_.Format.nAvgBytesPerSec);
int frames_filled =
callback_->OnMoreData(delay, base::TimeTicks::Now(), 0, audio_bus_.get());
uint32_t used = frames_filled * audio_bus_->channels() *
diff --git a/chromium/media/audio/win/waveout_output_win.h b/chromium/media/audio/win/waveout_output_win.h
index e066e7c2e0a..b97eb95b429 100644
--- a/chromium/media/audio/win/waveout_output_win.h
+++ b/chromium/media/audio/win/waveout_output_win.h
@@ -41,6 +41,11 @@ class PCMWaveOutAudioOutputStream : public AudioOutputStream {
const AudioParameters& params,
int num_buffers,
UINT device_id);
+
+ PCMWaveOutAudioOutputStream(const PCMWaveOutAudioOutputStream&) = delete;
+ PCMWaveOutAudioOutputStream& operator=(const PCMWaveOutAudioOutputStream&) =
+ delete;
+
~PCMWaveOutAudioOutputStream() override;
// Implementation of AudioOutputStream.
@@ -136,8 +141,6 @@ class PCMWaveOutAudioOutputStream : public AudioOutputStream {
// Container for retrieving data from AudioSourceCallback::OnMoreData().
std::unique_ptr<AudioBus> audio_bus_;
-
- DISALLOW_COPY_AND_ASSIGN(PCMWaveOutAudioOutputStream);
};
} // namespace media
diff --git a/chromium/media/base/BUILD.gn b/chromium/media/base/BUILD.gn
index d9a305666ee..49851ba6197 100644
--- a/chromium/media/base/BUILD.gn
+++ b/chromium/media/base/BUILD.gn
@@ -295,6 +295,8 @@ source_set("base") {
"supported_types.h",
"supported_video_decoder_config.cc",
"supported_video_decoder_config.h",
+ "svc_scalability_mode.cc",
+ "svc_scalability_mode.h",
"text_cue.cc",
"text_cue.h",
"text_ranges.cc",
@@ -360,6 +362,7 @@ source_set("base") {
":video_facing",
"//media:media_buildflags",
"//media:shared_memory_support",
+ "//media/gpu:buildflags",
"//ui/gfx:color_space",
"//ui/gl",
]
@@ -539,6 +542,7 @@ static_library("test_support") {
deps = [
"//base",
"//base/test:test_support",
+ "//build:chromeos_buildflags",
"//testing/gmock",
"//ui/gfx:test_support",
"//url",
diff --git a/chromium/media/base/android/android_cdm_factory.h b/chromium/media/base/android/android_cdm_factory.h
index 3ff0ae5e2c8..862ab3662b5 100644
--- a/chromium/media/base/android/android_cdm_factory.h
+++ b/chromium/media/base/android/android_cdm_factory.h
@@ -25,6 +25,10 @@ class MEDIA_EXPORT AndroidCdmFactory final : public CdmFactory {
public:
AndroidCdmFactory(CreateFetcherCB create_fetcher_cb,
CreateStorageCB create_storage_cb);
+
+ AndroidCdmFactory(const AndroidCdmFactory&) = delete;
+ AndroidCdmFactory& operator=(const AndroidCdmFactory&) = delete;
+
~AndroidCdmFactory() override;
// CdmFactory implementation.
@@ -53,8 +57,6 @@ class MEDIA_EXPORT AndroidCdmFactory final : public CdmFactory {
base::flat_map<uint32_t, PendingCreation> pending_creations_;
base::WeakPtrFactory<AndroidCdmFactory> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(AndroidCdmFactory);
};
} // namespace media
diff --git a/chromium/media/base/android/android_overlay.h b/chromium/media/base/android/android_overlay.h
index 3f8998e651c..03d4215548f 100644
--- a/chromium/media/base/android/android_overlay.h
+++ b/chromium/media/base/android/android_overlay.h
@@ -40,6 +40,9 @@ namespace media {
// AndroidOverlay isn't technically supposed to do that.
class MEDIA_EXPORT AndroidOverlay {
public:
+ AndroidOverlay(const AndroidOverlay&) = delete;
+ AndroidOverlay& operator=(const AndroidOverlay&) = delete;
+
virtual ~AndroidOverlay();
// Schedules a relayout of this overlay. If called before the client is
@@ -73,8 +76,6 @@ class MEDIA_EXPORT AndroidOverlay {
std::list<AndroidOverlayConfig::DeletedCB> deletion_cbs_;
base::WeakPtrFactory<AndroidOverlay> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(AndroidOverlay);
};
} // namespace media
diff --git a/chromium/media/base/android/jni_hdr_metadata.h b/chromium/media/base/android/jni_hdr_metadata.h
index eb2bf5ffa92..8b94fdd6bc8 100644
--- a/chromium/media/base/android/jni_hdr_metadata.h
+++ b/chromium/media/base/android/jni_hdr_metadata.h
@@ -17,6 +17,10 @@ class JniHdrMetadata {
public:
JniHdrMetadata(const VideoColorSpace& color_space,
const gfx::HDRMetadata& hdr_metadata);
+
+ JniHdrMetadata(const JniHdrMetadata&) = delete;
+ JniHdrMetadata& operator=(const JniHdrMetadata&) = delete;
+
~JniHdrMetadata();
base::android::ScopedJavaLocalRef<jobject> obj() { return jobject_; }
@@ -62,8 +66,6 @@ class JniHdrMetadata {
const VideoColorSpace& color_space_;
const gfx::HDRMetadata& hdr_metadata_;
base::android::ScopedJavaLocalRef<jobject> jobject_;
-
- DISALLOW_COPY_AND_ASSIGN(JniHdrMetadata);
};
} // namespace media
diff --git a/chromium/media/base/android/media_codec_bridge.h b/chromium/media/base/android/media_codec_bridge.h
index c4306a194cb..3d99eebe486 100644
--- a/chromium/media/base/android/media_codec_bridge.h
+++ b/chromium/media/base/android/media_codec_bridge.h
@@ -47,6 +47,10 @@ enum MediaCodecStatus {
class MEDIA_EXPORT MediaCodecBridge {
public:
MediaCodecBridge() = default;
+
+ MediaCodecBridge(const MediaCodecBridge&) = delete;
+ MediaCodecBridge& operator=(const MediaCodecBridge&) = delete;
+
virtual ~MediaCodecBridge() = default;
// Calls MediaCodec#stop(). However, due to buggy implementations (b/8125974)
@@ -164,8 +168,6 @@ class MEDIA_EXPORT MediaCodecBridge {
// Returns the max input size we configured the codec with.
virtual size_t GetMaxInputSize() = 0;
-
- DISALLOW_COPY_AND_ASSIGN(MediaCodecBridge);
};
} // namespace media
diff --git a/chromium/media/base/android/media_codec_bridge_impl.cc b/chromium/media/base/android/media_codec_bridge_impl.cc
index 54d9ede545e..6e65daad8cb 100644
--- a/chromium/media/base/android/media_codec_bridge_impl.cc
+++ b/chromium/media/base/android/media_codec_bridge_impl.cc
@@ -73,11 +73,11 @@ bool GetCodecSpecificDataForAudio(const AudioDecoderConfig& config,
const size_t extra_data_size = config.extra_data().size();
*output_frame_has_adts_header = false;
- if (extra_data_size == 0 && config.codec() != kCodecOpus)
+ if (extra_data_size == 0 && config.codec() != AudioCodec::kOpus)
return true;
switch (config.codec()) {
- case kCodecVorbis: {
+ case AudioCodec::kVorbis: {
if (extra_data[0] != 2) {
LOG(ERROR) << "Invalid number of vorbis headers before the codec "
<< "header: " << extra_data[0];
@@ -118,7 +118,7 @@ bool GetCodecSpecificDataForAudio(const AudioDecoderConfig& config,
extra_data + extra_data_size);
break;
}
- case kCodecFLAC: {
+ case AudioCodec::kFLAC: {
// According to MediaCodec spec, CSB buffer #0 for FLAC should be:
// "fLaC", the FLAC stream marker in ASCII, followed by the STREAMINFO
// block (the mandatory metadata block), optionally followed by any number
@@ -131,13 +131,13 @@ bool GetCodecSpecificDataForAudio(const AudioDecoderConfig& config,
extra_data + extra_data_size);
break;
}
- case kCodecAAC: {
+ case AudioCodec::kAAC: {
output_csd0->assign(extra_data, extra_data + extra_data_size);
*output_frame_has_adts_header =
config.profile() != AudioCodecProfile::kXHE_AAC;
break;
}
- case kCodecOpus: {
+ case AudioCodec::kOpus: {
if (!extra_data || extra_data_size == 0 || codec_delay_ns < 0 ||
seek_preroll_ns < 0) {
LOG(ERROR) << "Invalid Opus Header";
@@ -493,7 +493,7 @@ MediaCodecStatus MediaCodecBridgeImpl::DequeueOutputBuffer(
*size = base::checked_cast<size_t>(
Java_DequeueOutputResult_numBytes(env, result));
if (presentation_time) {
- *presentation_time = base::TimeDelta::FromMicroseconds(
+ *presentation_time = base::Microseconds(
Java_DequeueOutputResult_presentationTimeMicroseconds(env, result));
}
int flags = Java_DequeueOutputResult_flags(env, result);
diff --git a/chromium/media/base/android/media_codec_bridge_impl.h b/chromium/media/base/android/media_codec_bridge_impl.h
index 5b306bae53d..41ebb412677 100644
--- a/chromium/media/base/android/media_codec_bridge_impl.h
+++ b/chromium/media/base/android/media_codec_bridge_impl.h
@@ -30,9 +30,13 @@ class VideoColorSpace;
class MEDIA_EXPORT VideoCodecConfig {
public:
VideoCodecConfig();
+
+ VideoCodecConfig(const VideoCodecConfig&) = delete;
+ VideoCodecConfig& operator=(const VideoCodecConfig&) = delete;
+
~VideoCodecConfig();
- VideoCodec codec = kUnknownVideoCodec;
+ VideoCodec codec = VideoCodec::kUnknown;
CodecType codec_type = CodecType::kAny;
@@ -62,9 +66,6 @@ class MEDIA_EXPORT VideoCodecConfig {
//
// May only be used on API level 23 and higher.
base::RepeatingClosure on_buffers_available_cb;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(VideoCodecConfig);
};
// A bridge to a Java MediaCodec.
@@ -78,7 +79,7 @@ class MEDIA_EXPORT MediaCodecBridgeImpl : public MediaCodecBridge {
// Creates and starts a new MediaCodec configured for encoding. Returns
// nullptr on failure.
static std::unique_ptr<MediaCodecBridge> CreateVideoEncoder(
- VideoCodec codec, // e.g. media::kCodecVP8
+ VideoCodec codec, // e.g. media::VideoCodec::kVP8
const gfx::Size& size, // input frame size
int bit_rate, // bits/second
int frame_rate, // frames/second
@@ -102,6 +103,9 @@ class MEDIA_EXPORT MediaCodecBridgeImpl : public MediaCodecBridge {
// creating a MediaCodec. Does nothing unless on API level 23+.
static void SetupCallbackHandlerForTesting();
+ MediaCodecBridgeImpl(const MediaCodecBridgeImpl&) = delete;
+ MediaCodecBridgeImpl& operator=(const MediaCodecBridgeImpl&) = delete;
+
~MediaCodecBridgeImpl() override;
// MediaCodecBridge implementation.
@@ -181,8 +185,6 @@ class MEDIA_EXPORT MediaCodecBridgeImpl : public MediaCodecBridge {
// The Java MediaCodecBridge instance.
base::android::ScopedJavaGlobalRef<jobject> j_bridge_;
-
- DISALLOW_COPY_AND_ASSIGN(MediaCodecBridgeImpl);
};
} // namespace media
diff --git a/chromium/media/base/android/media_codec_bridge_impl_unittest.cc b/chromium/media/base/android/media_codec_bridge_impl_unittest.cc
index 60328e5c82b..47ad1c212ca 100644
--- a/chromium/media/base/android/media_codec_bridge_impl_unittest.cc
+++ b/chromium/media/base/android/media_codec_bridge_impl_unittest.cc
@@ -131,7 +131,7 @@ static const int kPresentationTimeBase = 100;
static const int kMaxInputPts = kPresentationTimeBase + 2;
static inline const base::TimeDelta InfiniteTimeOut() {
- return base::TimeDelta::FromMicroseconds(-1);
+ return base::Microseconds(-1);
}
void DecodeMediaFrame(MediaCodecBridge* media_codec,
@@ -164,7 +164,7 @@ void DecodeMediaFrame(MediaCodecBridge* media_codec,
}
// Output time stamp should not be smaller than old timestamp.
ASSERT_TRUE(new_timestamp >= timestamp);
- input_pts += base::TimeDelta::FromMicroseconds(33000);
+ input_pts += base::Microseconds(33000);
timestamp = new_timestamp;
}
}
@@ -290,7 +290,7 @@ TEST(MediaCodecBridgeTest, CreateH264Decoder) {
SKIP_TEST_IF_MEDIA_CODEC_IS_NOT_AVAILABLE();
VideoCodecConfig config;
- config.codec = kCodecH264;
+ config.codec = VideoCodec::kH264;
config.codec_type = CodecType::kAny;
config.initial_expected_coded_size = gfx::Size(640, 480);
@@ -301,7 +301,7 @@ TEST(MediaCodecBridgeTest, DoNormal) {
SKIP_TEST_IF_MEDIA_CODEC_IS_NOT_AVAILABLE();
std::unique_ptr<media::MediaCodecBridge> media_codec =
- MediaCodecBridgeImpl::CreateAudioDecoder(NewAudioConfig(kCodecMP3),
+ MediaCodecBridgeImpl::CreateAudioDecoder(NewAudioConfig(AudioCodec::kMP3),
nullptr);
ASSERT_THAT(media_codec, NotNull());
@@ -313,11 +313,11 @@ TEST(MediaCodecBridgeTest, DoNormal) {
int64_t input_pts = kPresentationTimeBase;
media_codec->QueueInputBuffer(input_buf_index, test_mp3, sizeof(test_mp3),
- base::TimeDelta::FromMicroseconds(++input_pts));
+ base::Microseconds(++input_pts));
status = media_codec->DequeueInputBuffer(InfiniteTimeOut(), &input_buf_index);
media_codec->QueueInputBuffer(input_buf_index, test_mp3, sizeof(test_mp3),
- base::TimeDelta::FromMicroseconds(++input_pts));
+ base::Microseconds(++input_pts));
status = media_codec->DequeueInputBuffer(InfiniteTimeOut(), &input_buf_index);
media_codec->QueueEOS(input_buf_index);
@@ -336,7 +336,6 @@ TEST(MediaCodecBridgeTest, DoNormal) {
switch (status) {
case MEDIA_CODEC_TRY_AGAIN_LATER:
FAIL();
- return;
case MEDIA_CODEC_OUTPUT_FORMAT_CHANGED:
continue;
@@ -360,9 +359,10 @@ TEST(MediaCodecBridgeTest, InvalidVorbisHeader) {
// The first byte of the header is not 0x02.
std::vector<uint8_t> invalid_first_byte = {{0x00, 0xff, 0xff, 0xff, 0xff}};
- ASSERT_THAT(MediaCodecBridgeImpl::CreateAudioDecoder(
- NewAudioConfig(kCodecVorbis, invalid_first_byte), nullptr),
- IsNull());
+ ASSERT_THAT(
+ MediaCodecBridgeImpl::CreateAudioDecoder(
+ NewAudioConfig(AudioCodec::kVorbis, invalid_first_byte), nullptr),
+ IsNull());
// Size of the header is too large.
size_t large_size = 8 * 1024 * 1024 + 2;
@@ -370,7 +370,7 @@ TEST(MediaCodecBridgeTest, InvalidVorbisHeader) {
large_header.front() = 0x02;
large_header.back() = 0xfe;
ASSERT_THAT(MediaCodecBridgeImpl::CreateAudioDecoder(
- NewAudioConfig(kCodecVorbis, large_header), nullptr),
+ NewAudioConfig(AudioCodec::kVorbis, large_header), nullptr),
IsNull());
}
@@ -380,16 +380,16 @@ TEST(MediaCodecBridgeTest, InvalidOpusHeader) {
std::vector<uint8_t> dummy_extra_data = {{0, 0}};
// Codec Delay is < 0.
- ASSERT_THAT(
- MediaCodecBridgeImpl::CreateAudioDecoder(
- NewAudioConfig(kCodecOpus, dummy_extra_data, base::TimeDelta(), -1),
- nullptr),
- IsNull());
+ ASSERT_THAT(MediaCodecBridgeImpl::CreateAudioDecoder(
+ NewAudioConfig(AudioCodec::kOpus, dummy_extra_data,
+ base::TimeDelta(), -1),
+ nullptr),
+ IsNull());
// Seek Preroll is < 0.
ASSERT_THAT(MediaCodecBridgeImpl::CreateAudioDecoder(
- NewAudioConfig(kCodecOpus, dummy_extra_data,
- base::TimeDelta::FromMicroseconds(-1)),
+ NewAudioConfig(AudioCodec::kOpus, dummy_extra_data,
+ base::Microseconds(-1)),
nullptr),
IsNull());
}
@@ -401,7 +401,7 @@ TEST(MediaCodecBridgeTest, PresentationTimestampsDoNotDecrease) {
}
VideoCodecConfig config;
- config.codec = kCodecVP8;
+ config.codec = VideoCodec::kVP8;
config.codec_type = CodecType::kAny;
config.initial_expected_coded_size = gfx::Size(320, 240);
@@ -418,23 +418,21 @@ TEST(MediaCodecBridgeTest, PresentationTimestampsDoNotDecrease) {
buffer->data() + buffer->data_size());
media_codec->Flush();
DecodeMediaFrame(media_codec.get(), &chunk[0], chunk.size(),
- base::TimeDelta::FromMicroseconds(10000000),
- base::TimeDelta::FromMicroseconds(9900000));
+ base::Microseconds(10000000), base::Microseconds(9900000));
// Simulate a seek to 5 seconds.
media_codec->Flush();
DecodeMediaFrame(media_codec.get(), &chunk[0], chunk.size(),
- base::TimeDelta::FromMicroseconds(5000000),
- base::TimeDelta::FromMicroseconds(4900000));
+ base::Microseconds(5000000), base::Microseconds(4900000));
}
TEST(MediaCodecBridgeTest, CreateUnsupportedCodec) {
EXPECT_THAT(MediaCodecBridgeImpl::CreateAudioDecoder(
- NewAudioConfig(kUnknownAudioCodec), nullptr),
+ NewAudioConfig(AudioCodec::kUnknown), nullptr),
IsNull());
VideoCodecConfig config;
- config.codec = kUnknownVideoCodec;
+ config.codec = VideoCodec::kUnknown;
config.codec_type = CodecType::kAny;
config.initial_expected_coded_size = gfx::Size(320, 240);
EXPECT_THAT(MediaCodecBridgeImpl::CreateVideoDecoder(config), IsNull());
@@ -465,7 +463,7 @@ TEST(MediaCodecBridgeTest, H264VideoEncodeAndValidate) {
std::unique_ptr<MediaCodecBridge> media_codec(
MediaCodecBridgeImpl::CreateVideoEncoder(
- kCodecH264, gfx::Size(width, height), bit_rate, frame_rate,
+ VideoCodec::kH264, gfx::Size(width, height), bit_rate, frame_rate,
i_frame_interval, color_format));
ASSERT_THAT(media_codec, NotNull());
@@ -493,8 +491,8 @@ TEST(MediaCodecBridgeTest, H264VideoEncodeAndValidate) {
// Src_file contains 1 frames. Encode it 3 times.
for (int frame = 0; frame < num_frames && frame < 3; frame++) {
- input_timestamp += base::TimeDelta::FromMicroseconds(
- base::Time::kMicrosecondsPerSecond / frame_rate);
+ input_timestamp +=
+ base::Microseconds(base::Time::kMicrosecondsPerSecond / frame_rate);
EncodeMediaFrame(media_codec.get(), frame_data.get(), frame_size, width,
height, input_timestamp);
}
@@ -503,8 +501,8 @@ TEST(MediaCodecBridgeTest, H264VideoEncodeAndValidate) {
// also contain SPS/PPS NALUs.
media_codec->RequestKeyFrameSoon();
for (int frame = 0; frame < num_frames && frame < 3; frame++) {
- input_timestamp += base::TimeDelta::FromMicroseconds(
- base::Time::kMicrosecondsPerSecond / frame_rate);
+ input_timestamp +=
+ base::Microseconds(base::Time::kMicrosecondsPerSecond / frame_rate);
EncodeMediaFrame(media_codec.get(), frame_data.get(), frame_size, width,
height, input_timestamp);
}
diff --git a/chromium/media/base/android/media_codec_loop.cc b/chromium/media/base/android/media_codec_loop.cc
index 57227366eff..7e09e387173 100644
--- a/chromium/media/base/android/media_codec_loop.cc
+++ b/chromium/media/base/android/media_codec_loop.cc
@@ -15,10 +15,9 @@
namespace media {
namespace {
-constexpr base::TimeDelta kDecodePollDelay =
- base::TimeDelta::FromMilliseconds(10);
-constexpr base::TimeDelta kNoWaitTimeout = base::TimeDelta::FromMicroseconds(0);
-constexpr base::TimeDelta kIdleTimerTimeout = base::TimeDelta::FromSeconds(1);
+constexpr base::TimeDelta kDecodePollDelay = base::Milliseconds(10);
+constexpr base::TimeDelta kNoWaitTimeout = base::Microseconds(0);
+constexpr base::TimeDelta kIdleTimerTimeout = base::Seconds(1);
} // namespace
diff --git a/chromium/media/base/android/media_codec_loop_unittest.cc b/chromium/media/base/android/media_codec_loop_unittest.cc
index f3ac9638026..c7df7a22f6d 100644
--- a/chromium/media/base/android/media_codec_loop_unittest.cc
+++ b/chromium/media/base/android/media_codec_loop_unittest.cc
@@ -50,6 +50,9 @@ class MediaCodecLoopTest : public testing::Test {
: task_runner_handle_(mock_task_runner_),
client_(std::make_unique<MockMediaCodecLoopClient>()) {}
+ MediaCodecLoopTest(const MediaCodecLoopTest&) = delete;
+ MediaCodecLoopTest& operator=(const MediaCodecLoopTest&) = delete;
+
~MediaCodecLoopTest() override {}
protected:
@@ -85,7 +88,7 @@ class MediaCodecLoopTest : public testing::Test {
// TODO(liberato): assume that MCL doesn't retry for 30 seconds. Note
// that this doesn't actually wall-clock wait.
- mock_task_runner_->FastForwardBy(base::TimeDelta::FromSeconds(30));
+ mock_task_runner_->FastForwardBy(base::Seconds(30));
}
void ConstructCodecLoop(int sdk_int = base::android::SDK_VERSION_LOLLIPOP) {
@@ -139,7 +142,7 @@ class MediaCodecLoopTest : public testing::Test {
MediaCodecLoop::InputData data;
data.memory = reinterpret_cast<const uint8_t*>("big buck bunny");
data.length = 14;
- data.presentation_time = base::TimeDelta::FromSeconds(1);
+ data.presentation_time = base::Seconds(1);
return data;
}
@@ -147,7 +150,7 @@ class MediaCodecLoopTest : public testing::Test {
int index = 1;
size_t offset = 0;
size_t size = 1024;
- base::TimeDelta pts = base::TimeDelta::FromSeconds(1);
+ base::TimeDelta pts = base::Seconds(1);
bool eos = false;
bool key_frame = true;
};
@@ -191,8 +194,6 @@ class MediaCodecLoopTest : public testing::Test {
std::unique_ptr<MediaCodecLoop> codec_loop_;
std::unique_ptr<MockMediaCodecLoopClient> client_;
-
- DISALLOW_COPY_AND_ASSIGN(MediaCodecLoopTest);
};
TEST_F(MediaCodecLoopTest, TestConstructionWithNullCodec) {
@@ -403,7 +404,7 @@ TEST_F(MediaCodecLoopTest, TestSeveralPendingIOBuffers) {
OutputBuffer buffer;
buffer.index = i;
buffer.size += i;
- buffer.pts = base::TimeDelta::FromSeconds(i + 1);
+ buffer.pts = base::Seconds(i + 1);
ExpectDequeueOutputBuffer(buffer);
ExpectOnDecodedFrame(buffer);
}
diff --git a/chromium/media/base/android/media_codec_util.cc b/chromium/media/base/android/media_codec_util.cc
index 84e4c1be115..d9bbb10bd7b 100644
--- a/chromium/media/base/android/media_codec_util.cc
+++ b/chromium/media/base/android/media_codec_util.cc
@@ -127,19 +127,19 @@ std::string MediaCodecUtil::CodecToAndroidMimeType(AudioCodec codec) {
return kBitstreamAudioMimeType;
switch (codec) {
- case kCodecMP3:
+ case AudioCodec::kMP3:
return kMp3MimeType;
- case kCodecVorbis:
+ case AudioCodec::kVorbis:
return kVorbisMimeType;
- case kCodecFLAC:
+ case AudioCodec::kFLAC:
return kFLACMimeType;
- case kCodecOpus:
+ case AudioCodec::kOpus:
return kOpusMimeType;
- case kCodecAAC:
+ case AudioCodec::kAAC:
return kAacMimeType;
- case kCodecAC3:
+ case AudioCodec::kAC3:
return kAc3MimeType;
- case kCodecEAC3:
+ case AudioCodec::kEAC3:
return kEac3MimeType;
default:
return std::string();
@@ -149,17 +149,17 @@ std::string MediaCodecUtil::CodecToAndroidMimeType(AudioCodec codec) {
// static
std::string MediaCodecUtil::CodecToAndroidMimeType(VideoCodec codec) {
switch (codec) {
- case kCodecH264:
+ case VideoCodec::kH264:
return kAvcMimeType;
- case kCodecHEVC:
+ case VideoCodec::kHEVC:
return kHevcMimeType;
- case kCodecVP8:
+ case VideoCodec::kVP8:
return kVp8MimeType;
- case kCodecVP9:
+ case VideoCodec::kVP9:
return kVp9MimeType;
- case kCodecDolbyVision:
+ case VideoCodec::kDolbyVision:
return kDolbyVisionMimeType;
- case kCodecAV1:
+ case VideoCodec::kAV1:
return kAv1MimeType;
default:
return std::string();
@@ -309,7 +309,7 @@ bool MediaCodecUtil::IsSetOutputSurfaceSupported() {
// static
bool MediaCodecUtil::IsPassthroughAudioFormat(AudioCodec codec) {
- return codec == kCodecAC3 || codec == kCodecEAC3;
+ return codec == AudioCodec::kAC3 || codec == AudioCodec::kEAC3;
}
// static
@@ -376,7 +376,7 @@ bool MediaCodecUtil::IsKnownUnaccelerated(VideoCodec codec,
// MediaTek hardware vp8 is known slower than the software implementation.
if (base::StartsWith(codec_name, "OMX.MTK.", base::CompareCase::SENSITIVE)) {
- if (codec == kCodecVP8) {
+ if (codec == VideoCodec::kVP8) {
// We may still reject VP8 hardware decoding later on certain chipsets,
// see isDecoderSupportedForDevice(). We don't have the the chipset ID
// here to check now though.
diff --git a/chromium/media/base/android/media_codec_util_unittest.cc b/chromium/media/base/android/media_codec_util_unittest.cc
index d496e884f39..fb8760b7f19 100644
--- a/chromium/media/base/android/media_codec_util_unittest.cc
+++ b/chromium/media/base/android/media_codec_util_unittest.cc
@@ -20,10 +20,13 @@ using base::android::SDK_VERSION_NOUGAT_MR1;
class MediaCodecUtilTest : public testing::Test {
public:
MediaCodecUtilTest() {}
+
+ MediaCodecUtilTest(const MediaCodecUtilTest&) = delete;
+ MediaCodecUtilTest& operator=(const MediaCodecUtilTest&) = delete;
+
~MediaCodecUtilTest() override {}
public:
- DISALLOW_COPY_AND_ASSIGN(MediaCodecUtilTest);
};
TEST_F(MediaCodecUtilTest, TestCodecAvailableIfNewerVersion) {
diff --git a/chromium/media/base/android/media_crypto_context.h b/chromium/media/base/android/media_crypto_context.h
index bfeb84dfde5..c15ddfe3ae9 100644
--- a/chromium/media/base/android/media_crypto_context.h
+++ b/chromium/media/base/android/media_crypto_context.h
@@ -23,6 +23,10 @@ namespace media {
class MEDIA_EXPORT MediaCryptoContext {
public:
MediaCryptoContext() = default;
+
+ MediaCryptoContext(const MediaCryptoContext&) = delete;
+ MediaCryptoContext& operator=(const MediaCryptoContext&) = delete;
+
virtual ~MediaCryptoContext() = default;
// Notification called when MediaCrypto object is ready.
@@ -38,9 +42,6 @@ class MEDIA_EXPORT MediaCryptoContext {
bool requires_secure_video_codec)>;
virtual void SetMediaCryptoReadyCB(
MediaCryptoReadyCB media_crypto_ready_cb) = 0;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MediaCryptoContext);
};
} // namespace media
diff --git a/chromium/media/base/android/media_crypto_context_impl.h b/chromium/media/base/android/media_crypto_context_impl.h
index 24ec089555e..5951fea7759 100644
--- a/chromium/media/base/android/media_crypto_context_impl.h
+++ b/chromium/media/base/android/media_crypto_context_impl.h
@@ -25,6 +25,9 @@ class MEDIA_EXPORT MediaCryptoContextImpl final : public MediaCryptoContext {
// The |media_drm_bridge| owns |this| and is guaranteed to outlive |this|.
explicit MediaCryptoContextImpl(MediaDrmBridge* media_drm_bridge);
+ MediaCryptoContextImpl(const MediaCryptoContextImpl&) = delete;
+ MediaCryptoContextImpl& operator=(const MediaCryptoContextImpl&) = delete;
+
~MediaCryptoContextImpl() override;
// MediaCryptoContext implementation.
@@ -32,8 +35,6 @@ class MEDIA_EXPORT MediaCryptoContextImpl final : public MediaCryptoContext {
private:
MediaDrmBridge* const media_drm_bridge_;
-
- DISALLOW_COPY_AND_ASSIGN(MediaCryptoContextImpl);
};
} // namespace media
diff --git a/chromium/media/base/android/media_drm_bridge_client.h b/chromium/media/base/android/media_drm_bridge_client.h
index fc083a40e50..643d0d8075f 100644
--- a/chromium/media/base/android/media_drm_bridge_client.h
+++ b/chromium/media/base/android/media_drm_bridge_client.h
@@ -39,6 +39,10 @@ class MEDIA_EXPORT MediaDrmBridgeClient {
typedef std::unordered_map<std::string, UUID> KeySystemUuidMap;
MediaDrmBridgeClient();
+
+ MediaDrmBridgeClient(const MediaDrmBridgeClient&) = delete;
+ MediaDrmBridgeClient& operator=(const MediaDrmBridgeClient&) = delete;
+
virtual ~MediaDrmBridgeClient();
// Adds extra mappings from key-system name to Android UUID into |map|.
@@ -51,8 +55,6 @@ class MEDIA_EXPORT MediaDrmBridgeClient {
private:
friend class KeySystemManager;
-
- DISALLOW_COPY_AND_ASSIGN(MediaDrmBridgeClient);
};
} // namespace media
diff --git a/chromium/media/base/android/media_drm_bridge_delegate.h b/chromium/media/base/android/media_drm_bridge_delegate.h
index d1c66b96972..f3e1064ac42 100644
--- a/chromium/media/base/android/media_drm_bridge_delegate.h
+++ b/chromium/media/base/android/media_drm_bridge_delegate.h
@@ -22,6 +22,10 @@ namespace media {
class MEDIA_EXPORT MediaDrmBridgeDelegate {
public:
MediaDrmBridgeDelegate();
+
+ MediaDrmBridgeDelegate(const MediaDrmBridgeDelegate&) = delete;
+ MediaDrmBridgeDelegate& operator=(const MediaDrmBridgeDelegate&) = delete;
+
virtual ~MediaDrmBridgeDelegate();
// Returns the UUID of the DRM scheme that this delegate applies to.
@@ -38,9 +42,6 @@ class MEDIA_EXPORT MediaDrmBridgeDelegate {
const std::vector<uint8_t>& init_data,
std::vector<uint8_t>* init_data_out,
std::vector<std::string>* optional_parameters_out);
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MediaDrmBridgeDelegate);
};
} // namespace media
diff --git a/chromium/media/base/android/media_drm_bridge_factory.h b/chromium/media/base/android/media_drm_bridge_factory.h
index 264aeb1e18b..f15876c75b3 100644
--- a/chromium/media/base/android/media_drm_bridge_factory.h
+++ b/chromium/media/base/android/media_drm_bridge_factory.h
@@ -28,6 +28,10 @@ class MEDIA_EXPORT MediaDrmBridgeFactory final : public CdmFactory {
public:
MediaDrmBridgeFactory(CreateFetcherCB create_fetcher_cb,
CreateStorageCB create_storage_cb);
+
+ MediaDrmBridgeFactory(const MediaDrmBridgeFactory&) = delete;
+ MediaDrmBridgeFactory& operator=(const MediaDrmBridgeFactory&) = delete;
+
~MediaDrmBridgeFactory() override;
// CdmFactory implementation.
@@ -72,8 +76,6 @@ class MEDIA_EXPORT MediaDrmBridgeFactory final : public CdmFactory {
scoped_refptr<MediaDrmBridge> media_drm_bridge_;
base::WeakPtrFactory<MediaDrmBridgeFactory> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(MediaDrmBridgeFactory);
};
} // namespace media
diff --git a/chromium/media/base/android/media_drm_storage_bridge.h b/chromium/media/base/android/media_drm_storage_bridge.h
index ee4edf9fc43..8787f9f6d97 100644
--- a/chromium/media/base/android/media_drm_storage_bridge.h
+++ b/chromium/media/base/android/media_drm_storage_bridge.h
@@ -30,6 +30,10 @@ class MediaDrmStorageBridge {
using InitCB = base::OnceCallback<void(bool)>;
MediaDrmStorageBridge();
+
+ MediaDrmStorageBridge(const MediaDrmStorageBridge&) = delete;
+ MediaDrmStorageBridge& operator=(const MediaDrmStorageBridge&) = delete;
+
~MediaDrmStorageBridge();
// Once storage is initialized, |init_cb| will be called and it will have a
@@ -90,8 +94,6 @@ class MediaDrmStorageBridge {
scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
base::WeakPtrFactory<MediaDrmStorageBridge> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(MediaDrmStorageBridge);
};
} // namespace media
diff --git a/chromium/media/base/android/media_player_bridge.cc b/chromium/media/base/android/media_player_bridge.cc
index 1f06c8f7e82..3e08054e50e 100644
--- a/chromium/media/base/android/media_player_bridge.cc
+++ b/chromium/media/base/android/media_player_bridge.cc
@@ -384,7 +384,7 @@ base::TimeDelta MediaPlayerBridge::GetCurrentTime() {
if (!prepared_)
return pending_seek_;
JNIEnv* env = base::android::AttachCurrentThread();
- return base::TimeDelta::FromMilliseconds(
+ return base::Milliseconds(
Java_MediaPlayerBridge_getCurrentPosition(env, j_media_player_bridge_));
}
@@ -395,7 +395,7 @@ base::TimeDelta MediaPlayerBridge::GetDuration() {
const int duration_ms =
Java_MediaPlayerBridge_getDuration(env, j_media_player_bridge_);
return duration_ms < 0 ? media::kInfiniteDuration
- : base::TimeDelta::FromMilliseconds(duration_ms);
+ : base::Milliseconds(duration_ms);
}
void MediaPlayerBridge::Release() {
@@ -477,7 +477,7 @@ void MediaPlayerBridge::OnMediaPrepared() {
// events.
if (should_seek_on_prepare_) {
SeekInternal(pending_seek_);
- pending_seek_ = base::TimeDelta::FromMilliseconds(0);
+ pending_seek_ = base::Milliseconds(0);
should_seek_on_prepare_ = false;
}
diff --git a/chromium/media/base/android/media_player_bridge.h b/chromium/media/base/android/media_player_bridge.h
index 4b1bfe2d156..701299c0a0b 100644
--- a/chromium/media/base/android/media_player_bridge.h
+++ b/chromium/media/base/android/media_player_bridge.h
@@ -85,6 +85,10 @@ class MEDIA_EXPORT MediaPlayerBridge {
Client* client,
bool allow_credentials,
bool is_hls);
+
+ MediaPlayerBridge(const MediaPlayerBridge&) = delete;
+ MediaPlayerBridge& operator=(const MediaPlayerBridge&) = delete;
+
virtual ~MediaPlayerBridge();
// Initialize this object and extract the metadata from the media.
@@ -281,8 +285,6 @@ class MEDIA_EXPORT MediaPlayerBridge {
// Weak pointer passed to `listener_` for callbacks.
// NOTE: Weak pointers must be invalidated before all other member variables.
base::WeakPtrFactory<MediaPlayerBridge> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(MediaPlayerBridge);
};
} // namespace media
diff --git a/chromium/media/base/android/media_player_bridge_unittest.cc b/chromium/media/base/android/media_player_bridge_unittest.cc
index 36cf5c8297f..2fc4439db26 100644
--- a/chromium/media/base/android/media_player_bridge_unittest.cc
+++ b/chromium/media/base/android/media_player_bridge_unittest.cc
@@ -63,7 +63,7 @@ class MediaPlayerBridgeTest : public testing::Test {
};
TEST_F(MediaPlayerBridgeTest, Client_OnMediaMetadataChanged) {
- const base::TimeDelta kDuration = base::TimeDelta::FromSeconds(20);
+ const base::TimeDelta kDuration = base::Seconds(20);
EXPECT_CALL(client_, OnMediaDurationChanged(kDuration));
diff --git a/chromium/media/base/android/media_player_listener.h b/chromium/media/base/android/media_player_listener.h
index 8c7c5ed42a2..820473b856c 100644
--- a/chromium/media/base/android/media_player_listener.h
+++ b/chromium/media/base/android/media_player_listener.h
@@ -30,6 +30,10 @@ class MediaPlayerListener {
MediaPlayerListener(
const scoped_refptr<base::SingleThreadTaskRunner>& task_runner,
base::WeakPtr<MediaPlayerBridge> media_player);
+
+ MediaPlayerListener(const MediaPlayerListener&) = delete;
+ MediaPlayerListener& operator=(const MediaPlayerListener&) = delete;
+
virtual ~MediaPlayerListener();
// Called by the Java MediaPlayerListener and mirrored to corresponding
@@ -65,8 +69,6 @@ class MediaPlayerListener {
base::WeakPtr<MediaPlayerBridge> media_player_;
base::android::ScopedJavaGlobalRef<jobject> j_media_player_listener_;
-
- DISALLOW_COPY_AND_ASSIGN(MediaPlayerListener);
};
} // namespace media
diff --git a/chromium/media/base/android/media_server_crash_listener.h b/chromium/media/base/android/media_server_crash_listener.h
index 7a19e20205e..16c9977ec68 100644
--- a/chromium/media/base/android/media_server_crash_listener.h
+++ b/chromium/media/base/android/media_server_crash_listener.h
@@ -29,6 +29,10 @@ class MediaServerCrashListener {
MediaServerCrashListener(
OnMediaServerCrashCB on_server_crash_cb,
scoped_refptr<base::SingleThreadTaskRunner> callback_task_runner);
+
+ MediaServerCrashListener(const MediaServerCrashListener&) = delete;
+ MediaServerCrashListener& operator=(const MediaServerCrashListener&) = delete;
+
~MediaServerCrashListener();
// Ensure the underlying watchdog MediaPlayer is created.
@@ -50,7 +54,6 @@ class MediaServerCrashListener {
scoped_refptr<base::SingleThreadTaskRunner> callback_task_runner_;
base::android::ScopedJavaGlobalRef<jobject> j_crash_listener_;
- DISALLOW_COPY_AND_ASSIGN(MediaServerCrashListener);
};
} // namespace media
diff --git a/chromium/media/base/android/media_service_throttler.cc b/chromium/media/base/android/media_service_throttler.cc
index d950218eebd..603e5543365 100644
--- a/chromium/media/base/android/media_service_throttler.cc
+++ b/chromium/media/base/android/media_service_throttler.cc
@@ -19,32 +19,31 @@ namespace {
// Period of inactivity after which we stop listening for MediaServer crashes.
// NOTE: Server crashes don't count as activity. Only calls to
// GetDelayForClientCreation() do.
-constexpr auto kReleaseInactivityDelay = base::TimeDelta::FromMinutes(1);
+constexpr auto kReleaseInactivityDelay = base::Minutes(1);
// Elapsed time between crashes needed to completely reset the media server
// crash count.
-constexpr auto kTimeUntilCrashReset = base::TimeDelta::FromMinutes(1);
+constexpr auto kTimeUntilCrashReset = base::Minutes(1);
// Elapsed time between schedule calls needed to completely reset the
// scheduling clock.
-constexpr auto kTimeUntilScheduleReset = base::TimeDelta::FromMinutes(1);
+constexpr auto kTimeUntilScheduleReset = base::Minutes(1);
// Rate at which client creations will be exponentially throttled based on the
// number of media server crashes.
// NOTE: Since our exponential delay formula is 2^(server crashes), 0 server
// crashes still result in this delay being added once.
-constexpr auto kBaseExponentialDelay = base::TimeDelta::FromMilliseconds(120);
+constexpr auto kBaseExponentialDelay = base::Milliseconds(120);
// Base rate at which we schedule client creations.
// The minimal delay is |kLinearThrottlingDelay| + |kBaseExponentialDelay|.
constexpr auto kLinearThrottlingDelay =
- base::TimeDelta::FromSecondsD(0.2) - kBaseExponentialDelay;
+ base::Seconds(0.2) - kBaseExponentialDelay;
// Max exponential throttling rate from media server crashes.
// The max delay will still be |kLinearThrottlingDelay| +
// |kMaxExponentialDelay|.
-constexpr auto kMaxExponentialDelay =
- base::TimeDelta::FromSeconds(3) - kLinearThrottlingDelay;
+constexpr auto kMaxExponentialDelay = base::Seconds(3) - kLinearThrottlingDelay;
// Max number of clients to schedule immediately (e.g when loading a new page).
const uint32_t kMaxBurstClients = 10;
@@ -172,8 +171,8 @@ void MediaServiceThrottler::UpdateServerCrashes() {
current_crashes_ = 0.0;
} else {
// Decay at the rate of 1 crash/minute otherwise.
- const double decay = (now - last_current_crash_update_time_) /
- base::TimeDelta::FromMinutes(1);
+ const double decay =
+ (now - last_current_crash_update_time_) / base::Minutes(1);
current_crashes_ = std::max(0.0, current_crashes_ - decay);
}
diff --git a/chromium/media/base/android/media_service_throttler_unittest.cc b/chromium/media/base/android/media_service_throttler_unittest.cc
index af3b45a367e..554142b2358 100644
--- a/chromium/media/base/android/media_service_throttler_unittest.cc
+++ b/chromium/media/base/android/media_service_throttler_unittest.cc
@@ -140,11 +140,10 @@ TEST_F(MediaServiceThrottlerTest,
// reset.
TEST_F(MediaServiceThrottlerTest, NoCrash_LongInactivity_ShouldReset) {
// Schedule two minutes' worth of clients.
- SimulateClientCreations(
- base::ClampFloor(base::TimeDelta::FromMinutes(2) / base_delay_));
+ SimulateClientCreations(base::ClampFloor(base::Minutes(2) / base_delay_));
// Advance the time so the scheduler perceived a full minute of inactivity.
- clock_.Advance(base::TimeDelta::FromSeconds(61));
+ clock_.Advance(base::Seconds(61));
// Make sure new clients are burst scheduled.
EXPECT_EQ(base::TimeDelta(), throttler_->GetDelayForClientCreation());
@@ -172,7 +171,7 @@ TEST_F(MediaServiceThrottlerTest,
SimulateClientCreations(kMaxBurstClients);
SimulateCrashes(1);
- clock_.Advance(base::TimeDelta::FromMilliseconds(1));
+ clock_.Advance(base::Milliseconds(1));
// Because we use the floor function when calculating crashes, a small time
// advance should nullify a single crash.
@@ -185,7 +184,7 @@ TEST_F(MediaServiceThrottlerTest, WithCrash_ManyCrashes_DelayShouldIncrease) {
SimulateClientCreations(kMaxBurstClients);
SimulateCrashes(2);
- clock_.Advance(base::TimeDelta::FromMilliseconds(1));
+ clock_.Advance(base::Milliseconds(1));
// The delay after crashes should be greater than the base delay.
EXPECT_LT(base_delay_, GetCurrentDelayBetweenClients());
@@ -238,7 +237,7 @@ TEST_F(MediaServiceThrottlerTest, WithCrash_NoCrashesForAMinute_ShouldReset) {
// The effective server crash count should be reset because it has been over
// a minute since the last crash.
- clock_.Advance(base::TimeDelta::FromSeconds(61));
+ clock_.Advance(base::Seconds(61));
SimulateClientCreations(kMaxBurstClients);
@@ -250,9 +249,9 @@ TEST_F(MediaServiceThrottlerTest, WithCrash_ConstantCrashes_ShouldNotReset) {
SimulateCrashes(9);
// The effective server crash count should not be reset.
- clock_.Advance(base::TimeDelta::FromSeconds(59));
+ clock_.Advance(base::Seconds(59));
SimulateCrashes(1);
- clock_.Advance(base::TimeDelta::FromSeconds(2));
+ clock_.Advance(base::Seconds(2));
SimulateClientCreations(kMaxBurstClients);
@@ -265,11 +264,10 @@ TEST_F(MediaServiceThrottlerTest, CrashListener_NoRequests_ShouldShutDown) {
// Schedule many minutes worth of clients. This is to prove that the
// MediaServerCrashListener's clean up happens after lack of requests, as
// opposed to lack of actually scheduled clients.
- SimulateClientCreations(
- base::ClampFloor(base::TimeDelta::FromMinutes(3) / base_delay_));
+ SimulateClientCreations(base::ClampFloor(base::Minutes(3) / base_delay_));
// The MediaServerCrashListener should be alive, with 1s second to spare.
- clock_.Advance(base::TimeDelta::FromSeconds(59));
+ clock_.Advance(base::Seconds(59));
test_task_runner_->RunTasks();
EXPECT_TRUE(throttler_->IsCrashListenerAliveForTesting());
@@ -278,12 +276,12 @@ TEST_F(MediaServiceThrottlerTest, CrashListener_NoRequests_ShouldShutDown) {
throttler_->GetDelayForClientCreation();
// The MediaServerCrashListener should be alive, with 58s second to spare.
- clock_.Advance(base::TimeDelta::FromSeconds(2));
+ clock_.Advance(base::Seconds(2));
test_task_runner_->RunTasks();
EXPECT_TRUE(throttler_->IsCrashListenerAliveForTesting());
// The MediaServerCrashListener should be dead.
- clock_.Advance(base::TimeDelta::FromSeconds(59));
+ clock_.Advance(base::Seconds(59));
test_task_runner_->RunTasks();
EXPECT_FALSE(throttler_->IsCrashListenerAliveForTesting());
}
@@ -295,11 +293,10 @@ TEST_F(MediaServiceThrottlerTest,
// Schedule many minutes worth of clients. This is to prove that the
// MediaServerCrashListener's clean up happens after lack of requests, as
// opposed to lack of actually scheduled clients.
- SimulateClientCreations(
- base::ClampFloor(base::TimeDelta::FromMinutes(3) / base_delay_));
+ SimulateClientCreations(base::ClampFloor(base::Minutes(3) / base_delay_));
// The MediaServerCrashListener should be alive, with 1s second to spare.
- clock_.Advance(base::TimeDelta::FromSeconds(59));
+ clock_.Advance(base::Seconds(59));
test_task_runner_->RunTasks();
EXPECT_TRUE(throttler_->IsCrashListenerAliveForTesting());
@@ -307,7 +304,7 @@ TEST_F(MediaServiceThrottlerTest,
SimulateCrashes(1);
// The MediaServerCrashListener should be dead.
- clock_.Advance(base::TimeDelta::FromSeconds(2));
+ clock_.Advance(base::Seconds(2));
test_task_runner_->RunTasks();
EXPECT_FALSE(throttler_->IsCrashListenerAliveForTesting());
}
diff --git a/chromium/media/base/android/mock_android_overlay.h b/chromium/media/base/android/mock_android_overlay.h
index 34c1f7af57b..52d1cdf934e 100644
--- a/chromium/media/base/android/mock_android_overlay.h
+++ b/chromium/media/base/android/mock_android_overlay.h
@@ -19,6 +19,10 @@ class MockAndroidOverlay : public testing::NiceMock<AndroidOverlay>,
public DestructionObservable {
public:
MockAndroidOverlay();
+
+ MockAndroidOverlay(const MockAndroidOverlay&) = delete;
+ MockAndroidOverlay& operator=(const MockAndroidOverlay&) = delete;
+
~MockAndroidOverlay() override;
MOCK_METHOD1(ScheduleLayout, void(const gfx::Rect&));
@@ -63,8 +67,6 @@ class MockAndroidOverlay : public testing::NiceMock<AndroidOverlay>,
std::unique_ptr<AndroidOverlayConfig> config_;
base::WeakPtrFactory<MockAndroidOverlay> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(MockAndroidOverlay);
};
} // namespace media
diff --git a/chromium/media/base/android/mock_media_codec_bridge.h b/chromium/media/base/android/mock_media_codec_bridge.h
index db20d7f58a3..44b7a6c41be 100644
--- a/chromium/media/base/android/mock_media_codec_bridge.h
+++ b/chromium/media/base/android/mock_media_codec_bridge.h
@@ -18,6 +18,10 @@ class MockMediaCodecBridge : public MediaCodecBridge,
public DestructionObservable {
public:
MockMediaCodecBridge();
+
+ MockMediaCodecBridge(const MockMediaCodecBridge&) = delete;
+ MockMediaCodecBridge& operator=(const MockMediaCodecBridge&) = delete;
+
~MockMediaCodecBridge() override;
// Helpers for conveniently setting expectations.
@@ -87,8 +91,6 @@ class MockMediaCodecBridge : public MediaCodecBridge,
bool is_drained_ = true;
CodecType codec_type_ = CodecType::kAny;
-
- DISALLOW_COPY_AND_ASSIGN(MockMediaCodecBridge);
};
} // namespace media
diff --git a/chromium/media/base/android/mock_media_crypto_context.h b/chromium/media/base/android/mock_media_crypto_context.h
index 1cd64809cfd..ed6e81b4b25 100644
--- a/chromium/media/base/android/mock_media_crypto_context.h
+++ b/chromium/media/base/android/mock_media_crypto_context.h
@@ -18,6 +18,10 @@ class MEDIA_EXPORT MockMediaCryptoContext
public testing::NiceMock<MediaCryptoContext> {
public:
explicit MockMediaCryptoContext(bool has_media_crypto_context);
+
+ MockMediaCryptoContext(const MockMediaCryptoContext&) = delete;
+ MockMediaCryptoContext& operator=(const MockMediaCryptoContext&) = delete;
+
~MockMediaCryptoContext() override;
// CdmContext implementation.
@@ -38,7 +42,6 @@ class MEDIA_EXPORT MockMediaCryptoContext
private:
bool has_media_crypto_context_;
- DISALLOW_COPY_AND_ASSIGN(MockMediaCryptoContext);
};
} // namespace media
diff --git a/chromium/media/base/android/test_destruction_observable.h b/chromium/media/base/android/test_destruction_observable.h
index d11b7ab9dda..c4b77bbe3b4 100644
--- a/chromium/media/base/android/test_destruction_observable.h
+++ b/chromium/media/base/android/test_destruction_observable.h
@@ -19,11 +19,14 @@ class DestructionObserver;
class DestructionObservable {
public:
DestructionObservable();
+
+ DestructionObservable(const DestructionObservable&) = delete;
+ DestructionObservable& operator=(const DestructionObservable&) = delete;
+
virtual ~DestructionObservable();
std::unique_ptr<DestructionObserver> CreateDestructionObserver();
base::ScopedClosureRunner destruction_cb;
- DISALLOW_COPY_AND_ASSIGN(DestructionObservable);
};
// DestructionObserver lets you set expectations about the destruction of an
@@ -31,6 +34,10 @@ class DestructionObservable {
class DestructionObserver {
public:
DestructionObserver(DestructionObservable* observable);
+
+ DestructionObserver(const DestructionObserver&) = delete;
+ DestructionObserver& operator=(const DestructionObserver&) = delete;
+
virtual ~DestructionObserver();
void VerifyAndClearExpectations();
@@ -58,7 +65,6 @@ class DestructionObserver {
absl::optional<bool> expect_destruction_;
base::WeakPtrFactory<DestructionObserver> weak_factory_{this};
- DISALLOW_COPY_AND_ASSIGN(DestructionObserver);
};
} // namespace media
diff --git a/chromium/media/base/audio_block_fifo.h b/chromium/media/base/audio_block_fifo.h
index a88208aca4a..fb234d4b32a 100644
--- a/chromium/media/base/audio_block_fifo.h
+++ b/chromium/media/base/audio_block_fifo.h
@@ -21,6 +21,10 @@ class MEDIA_EXPORT AudioBlockFifo {
// Creates a new AudioBlockFifo and allocates |blocks| memory, each block
// of memory can store |channels| of length |frames| data.
AudioBlockFifo(int channels, int frames, int blocks);
+
+ AudioBlockFifo(const AudioBlockFifo&) = delete;
+ AudioBlockFifo& operator=(const AudioBlockFifo&) = delete;
+
virtual ~AudioBlockFifo();
// Pushes interleaved audio data from |source| to the FIFO.
@@ -78,8 +82,6 @@ class MEDIA_EXPORT AudioBlockFifo {
// Current write position in the current written block.
int write_pos_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioBlockFifo);
};
} // namespace media
diff --git a/chromium/media/base/audio_block_fifo_unittest.cc b/chromium/media/base/audio_block_fifo_unittest.cc
index cf0bb669d0a..383d8593155 100644
--- a/chromium/media/base/audio_block_fifo_unittest.cc
+++ b/chromium/media/base/audio_block_fifo_unittest.cc
@@ -15,6 +15,10 @@ namespace media {
class AudioBlockFifoTest : public testing::Test {
public:
AudioBlockFifoTest() = default;
+
+ AudioBlockFifoTest(const AudioBlockFifoTest&) = delete;
+ AudioBlockFifoTest& operator=(const AudioBlockFifoTest&) = delete;
+
~AudioBlockFifoTest() override = default;
void PushAndVerify(AudioBlockFifo* fifo,
@@ -54,9 +58,6 @@ class AudioBlockFifoTest : public testing::Test {
EXPECT_GT(bus->channel(i)[bus->frames() - 1], 0.0f);
}
}
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioBlockFifoTest);
};
// Verify that construction works as intended.
@@ -114,7 +115,7 @@ TEST_F(AudioBlockFifoTest, PushAndConsume) {
// Consume all blocks of data.
for (int i = 1; i <= blocks; ++i) {
- const AudioBus* bus = fifo.Consume();
+ bus = fifo.Consume();
EXPECT_TRUE(channels == bus->channels());
EXPECT_TRUE(frames == bus->frames());
EXPECT_TRUE(fifo.GetUnfilledFrames() == frames * i);
@@ -132,7 +133,7 @@ TEST_F(AudioBlockFifoTest, PushAndConsume) {
// Consume all the existing filled blocks of data.
while (fifo.available_blocks()) {
- const AudioBus* bus = fifo.Consume();
+ bus = fifo.Consume();
EXPECT_TRUE(channels == bus->channels());
EXPECT_TRUE(frames == bus->frames());
}
diff --git a/chromium/media/base/audio_buffer.cc b/chromium/media/base/audio_buffer.cc
index b153acd3964..cd50f60c2bf 100644
--- a/chromium/media/base/audio_buffer.cc
+++ b/chromium/media/base/audio_buffer.cc
@@ -6,7 +6,6 @@
#include <cmath>
-#include "base/callback_helpers.h"
#include "base/logging.h"
#include "base/notreached.h"
#include "media/base/audio_bus.h"
@@ -40,8 +39,8 @@ void CopyConvertFromInterleaved(
static base::TimeDelta CalculateDuration(int frames, double sample_rate) {
DCHECK_GT(sample_rate, 0);
- return base::TimeDelta::FromMicroseconds(
- frames * base::Time::kMicrosecondsPerSecond / sample_rate);
+ return base::Microseconds(frames * base::Time::kMicrosecondsPerSecond /
+ sample_rate);
}
AudioBufferMemoryPool::AudioBufferMemoryPool() = default;
@@ -301,8 +300,7 @@ std::unique_ptr<AudioBus> AudioBuffer::WrapOrCopyToAudioBus(
// Keep |buffer| alive as long as |audio_bus|.
audio_bus->SetWrappedDataDeleter(
- base::BindOnce(base::DoNothing::Once<scoped_refptr<AudioBuffer>>(),
- std::move(buffer)));
+ base::BindOnce([](scoped_refptr<AudioBuffer>) {}, std::move(buffer)));
return audio_bus;
}
diff --git a/chromium/media/base/audio_buffer_converter_unittest.cc b/chromium/media/base/audio_buffer_converter_unittest.cc
index 64d8e450f79..a72a1703ff3 100644
--- a/chromium/media/base/audio_buffer_converter_unittest.cc
+++ b/chromium/media/base/audio_buffer_converter_unittest.cc
@@ -28,7 +28,7 @@ static scoped_refptr<AudioBuffer> MakeTestBuffer(int sample_rate,
int frames) {
return MakeAudioBuffer<uint8_t>(kSampleFormatU8, channel_layout,
channel_count, sample_rate, 0, 1, frames,
- base::TimeDelta::FromSeconds(0));
+ base::Seconds(0));
}
class AudioBufferConverterTest : public ::testing::Test {
diff --git a/chromium/media/base/audio_buffer_queue.h b/chromium/media/base/audio_buffer_queue.h
index cb709e80e53..c16993946ce 100644
--- a/chromium/media/base/audio_buffer_queue.h
+++ b/chromium/media/base/audio_buffer_queue.h
@@ -23,6 +23,10 @@ class AudioBus;
class MEDIA_EXPORT AudioBufferQueue {
public:
AudioBufferQueue();
+
+ AudioBufferQueue(const AudioBufferQueue&) = delete;
+ AudioBufferQueue& operator=(const AudioBufferQueue&) = delete;
+
~AudioBufferQueue();
// Clears the buffer queue.
@@ -77,8 +81,6 @@ class MEDIA_EXPORT AudioBufferQueue {
// Number of frames available to be read in the buffer.
int frames_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioBufferQueue);
};
} // namespace media
diff --git a/chromium/media/base/audio_buffer_unittest.cc b/chromium/media/base/audio_buffer_unittest.cc
index bb4128bb01a..7a11dcf12c5 100644
--- a/chromium/media/base/audio_buffer_unittest.cc
+++ b/chromium/media/base/audio_buffer_unittest.cc
@@ -58,7 +58,7 @@ static void TrimRangeTest(SampleFormat sample_format) {
const int channels = ChannelLayoutToChannelCount(channel_layout);
const int frames = kSampleRate / 10;
const base::TimeDelta timestamp = base::TimeDelta();
- const base::TimeDelta duration = base::TimeDelta::FromMilliseconds(100);
+ const base::TimeDelta duration = base::Milliseconds(100);
scoped_refptr<AudioBuffer> buffer = MakeAudioBuffer<float>(sample_format,
channel_layout,
channels,
@@ -80,7 +80,7 @@ static void TrimRangeTest(SampleFormat sample_format) {
// Trim 10ms of frames from the middle of the buffer.
int trim_start = frames / 2;
const int trim_length = kSampleRate / 100;
- const base::TimeDelta trim_duration = base::TimeDelta::FromMilliseconds(10);
+ const base::TimeDelta trim_duration = base::Milliseconds(10);
buffer->TrimRange(trim_start, trim_start + trim_length);
EXPECT_EQ(frames - trim_length, buffer->frame_count());
EXPECT_EQ(timestamp, buffer->timestamp());
@@ -187,7 +187,7 @@ TEST(AudioBufferTest, CopyFromAudioBus) {
auto audio_bus = media::AudioBus::Create(kChannelCount, kFrameCount);
temp_buffer->ReadFrames(kFrameCount, 0, 0, audio_bus.get());
- const base::TimeDelta kTimestamp = base::TimeDelta::FromMilliseconds(123);
+ const base::TimeDelta kTimestamp = base::Milliseconds(123);
auto audio_buffer_from_bus =
media::AudioBuffer::CopyFrom(kSampleRate, kTimestamp, audio_bus.get());
@@ -219,7 +219,7 @@ TEST(AudioBufferTest, CopyBitstreamFrom) {
const uint8_t kTestData[] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10,
11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
22, 23, 24, 25, 26, 27, 28, 29, 30, 31};
- const base::TimeDelta kTimestamp = base::TimeDelta::FromMicroseconds(1337);
+ const base::TimeDelta kTimestamp = base::Microseconds(1337);
const uint8_t* const data[] = {kTestData};
scoped_refptr<AudioBuffer> buffer = AudioBuffer::CopyBitstreamFrom(
@@ -263,7 +263,7 @@ TEST(AudioBufferTest, FrameSize) {
const uint8_t kTestData[] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10,
11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
22, 23, 24, 25, 26, 27, 28, 29, 30, 31};
- const base::TimeDelta kTimestamp = base::TimeDelta::FromMicroseconds(1337);
+ const base::TimeDelta kTimestamp = base::Microseconds(1337);
const uint8_t* const data[] = {kTestData};
scoped_refptr<AudioBuffer> buffer =
@@ -548,7 +548,7 @@ TEST(AudioBufferTest, EmptyBuffer) {
channel_layout, channels, kSampleRate, frames, start_time);
EXPECT_EQ(frames, buffer->frame_count());
EXPECT_EQ(start_time, buffer->timestamp());
- EXPECT_EQ(base::TimeDelta::FromMilliseconds(10), buffer->duration());
+ EXPECT_EQ(base::Milliseconds(10), buffer->duration());
EXPECT_FALSE(buffer->end_of_stream());
// Read all frames from the buffer. All data should be 0.
@@ -567,7 +567,7 @@ TEST(AudioBufferTest, TrimEmptyBuffer) {
const int channels = ChannelLayoutToChannelCount(channel_layout);
const int frames = kSampleRate / 10;
const base::TimeDelta start_time;
- const base::TimeDelta duration = base::TimeDelta::FromMilliseconds(100);
+ const base::TimeDelta duration = base::Milliseconds(100);
scoped_refptr<AudioBuffer> buffer = AudioBuffer::CreateEmptyBuffer(
channel_layout, channels, kSampleRate, frames, start_time);
EXPECT_EQ(frames, buffer->frame_count());
@@ -583,7 +583,7 @@ TEST(AudioBufferTest, TrimEmptyBuffer) {
// Trim 10ms of frames from the middle of the buffer.
int trim_start = frames / 2;
const int trim_length = kSampleRate / 100;
- const base::TimeDelta trim_duration = base::TimeDelta::FromMilliseconds(10);
+ const base::TimeDelta trim_duration = base::Milliseconds(10);
buffer->TrimRange(trim_start, trim_start + trim_length);
EXPECT_EQ(frames - trim_length, buffer->frame_count());
EXPECT_EQ(start_time, buffer->timestamp());
@@ -598,7 +598,7 @@ TEST(AudioBufferTest, Trim) {
const int channels = ChannelLayoutToChannelCount(channel_layout);
const int frames = kSampleRate / 10;
const base::TimeDelta start_time;
- const base::TimeDelta duration = base::TimeDelta::FromMilliseconds(100);
+ const base::TimeDelta duration = base::Milliseconds(100);
scoped_refptr<AudioBuffer> buffer =
MakeAudioBuffer<float>(kSampleFormatPlanarF32,
channel_layout,
@@ -613,7 +613,7 @@ TEST(AudioBufferTest, Trim) {
EXPECT_EQ(duration, buffer->duration());
const int ten_ms_of_frames = kSampleRate / 100;
- const base::TimeDelta ten_ms = base::TimeDelta::FromMilliseconds(10);
+ const base::TimeDelta ten_ms = base::Milliseconds(10);
std::unique_ptr<AudioBus> bus = AudioBus::Create(channels, frames);
buffer->ReadFrames(buffer->frame_count(), 0, 0, bus.get());
diff --git a/chromium/media/base/audio_bus.h b/chromium/media/base/audio_bus.h
index 75792baa9b8..ce48cd79dda 100644
--- a/chromium/media/base/audio_bus.h
+++ b/chromium/media/base/audio_bus.h
@@ -182,6 +182,9 @@ class MEDIA_SHMEM_EXPORT AudioBus {
// the channels are valid.
void SwapChannels(int a, int b);
+ AudioBus(const AudioBus&) = delete;
+ AudioBus& operator=(const AudioBus&) = delete;
+
virtual ~AudioBus();
protected:
@@ -235,8 +238,6 @@ class MEDIA_SHMEM_EXPORT AudioBus {
// Run on destruction. Frees memory to the data set via SetChannelData().
// Only used with CreateWrapper().
base::OnceClosure wrapped_data_deleter_cb_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioBus);
};
// Delegates to FromInterleavedPartial()
diff --git a/chromium/media/base/audio_bus_unittest.cc b/chromium/media/base/audio_bus_unittest.cc
index f6b0815f41a..7b2f328582b 100644
--- a/chromium/media/base/audio_bus_unittest.cc
+++ b/chromium/media/base/audio_bus_unittest.cc
@@ -32,6 +32,10 @@ static const int kSampleRate = 48000;
class AudioBusTest : public testing::Test {
public:
AudioBusTest() = default;
+
+ AudioBusTest(const AudioBusTest&) = delete;
+ AudioBusTest& operator=(const AudioBusTest&) = delete;
+
~AudioBusTest() override {
for (size_t i = 0; i < data_.size(); ++i)
base::AlignedFree(data_[i]);
@@ -125,8 +129,6 @@ class AudioBusTest : public testing::Test {
protected:
std::vector<float*> data_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioBusTest);
};
// Verify basic Create(...) method works as advertised.
diff --git a/chromium/media/base/audio_codecs.cc b/chromium/media/base/audio_codecs.cc
index 06a9e73c295..901098149db 100644
--- a/chromium/media/base/audio_codecs.cc
+++ b/chromium/media/base/audio_codecs.cc
@@ -4,6 +4,8 @@
#include "media/base/audio_codecs.h"
+#include <ostream>
+
#include "base/strings/string_util.h"
namespace media {
@@ -11,39 +13,39 @@ namespace media {
// These names come from src/third_party/ffmpeg/libavcodec/codec_desc.c
std::string GetCodecName(AudioCodec codec) {
switch (codec) {
- case kUnknownAudioCodec:
+ case AudioCodec::kUnknown:
return "unknown";
- case kCodecAAC:
+ case AudioCodec::kAAC:
return "aac";
- case kCodecMP3:
+ case AudioCodec::kMP3:
return "mp3";
- case kCodecPCM:
- case kCodecPCM_S16BE:
- case kCodecPCM_S24BE:
+ case AudioCodec::kPCM:
+ case AudioCodec::kPCM_S16BE:
+ case AudioCodec::kPCM_S24BE:
return "pcm";
- case kCodecVorbis:
+ case AudioCodec::kVorbis:
return "vorbis";
- case kCodecFLAC:
+ case AudioCodec::kFLAC:
return "flac";
- case kCodecAMR_NB:
+ case AudioCodec::kAMR_NB:
return "amr_nb";
- case kCodecAMR_WB:
+ case AudioCodec::kAMR_WB:
return "amr_wb";
- case kCodecPCM_MULAW:
+ case AudioCodec::kPCM_MULAW:
return "pcm_mulaw";
- case kCodecGSM_MS:
+ case AudioCodec::kGSM_MS:
return "gsm_ms";
- case kCodecOpus:
+ case AudioCodec::kOpus:
return "opus";
- case kCodecPCM_ALAW:
+ case AudioCodec::kPCM_ALAW:
return "pcm_alaw";
- case kCodecEAC3:
+ case AudioCodec::kEAC3:
return "eac3";
- case kCodecALAC:
+ case AudioCodec::kALAC:
return "alac";
- case kCodecAC3:
+ case AudioCodec::kAC3:
return "ac3";
- case kCodecMpegHAudio:
+ case AudioCodec::kMpegHAudio:
return "mpeg-h-audio";
}
}
@@ -59,28 +61,32 @@ std::string GetProfileName(AudioCodecProfile profile) {
AudioCodec StringToAudioCodec(const std::string& codec_id) {
if (codec_id == "aac")
- return kCodecAAC;
+ return AudioCodec::kAAC;
if (codec_id == "ac-3" || codec_id == "mp4a.A5" || codec_id == "mp4a.a5")
- return kCodecAC3;
+ return AudioCodec::kAC3;
if (codec_id == "ec-3" || codec_id == "mp4a.A6" || codec_id == "mp4a.a6")
- return kCodecEAC3;
+ return AudioCodec::kEAC3;
if (codec_id == "mp3" || codec_id == "mp4a.69" || codec_id == "mp4a.6B")
- return kCodecMP3;
+ return AudioCodec::kMP3;
if (codec_id == "alac")
- return kCodecALAC;
+ return AudioCodec::kALAC;
if (codec_id == "flac")
- return kCodecFLAC;
+ return AudioCodec::kFLAC;
if (base::StartsWith(codec_id, "mhm1.", base::CompareCase::SENSITIVE) ||
base::StartsWith(codec_id, "mha1.", base::CompareCase::SENSITIVE)) {
- return kCodecMpegHAudio;
+ return AudioCodec::kMpegHAudio;
}
if (codec_id == "opus")
- return kCodecOpus;
+ return AudioCodec::kOpus;
if (codec_id == "vorbis")
- return kCodecVorbis;
+ return AudioCodec::kVorbis;
if (base::StartsWith(codec_id, "mp4a.40.", base::CompareCase::SENSITIVE))
- return kCodecAAC;
- return kUnknownAudioCodec;
+ return AudioCodec::kAAC;
+ return AudioCodec::kUnknown;
+}
+
+std::ostream& operator<<(std::ostream& os, const AudioCodec& codec) {
+ return os << GetCodecName(codec);
}
} // namespace media
diff --git a/chromium/media/base/audio_codecs.h b/chromium/media/base/audio_codecs.h
index 5eb5ddcb1a4..f679513a20b 100644
--- a/chromium/media/base/audio_codecs.h
+++ b/chromium/media/base/audio_codecs.h
@@ -10,36 +10,36 @@
namespace media {
-enum AudioCodec {
+enum class AudioCodec {
// These values are histogrammed over time; do not change their ordinal
// values. When deleting a codec replace it with a dummy value; when adding a
- // codec, do so at the bottom before kAudioCodecMax, and update the value of
- // kAudioCodecMax to equal the new codec.
- kUnknownAudioCodec = 0,
- kCodecAAC = 1,
- kCodecMP3 = 2,
- kCodecPCM = 3,
- kCodecVorbis = 4,
- kCodecFLAC = 5,
- kCodecAMR_NB = 6,
- kCodecAMR_WB = 7,
- kCodecPCM_MULAW = 8,
- kCodecGSM_MS = 9,
- kCodecPCM_S16BE = 10,
- kCodecPCM_S24BE = 11,
- kCodecOpus = 12,
- kCodecEAC3 = 13,
- kCodecPCM_ALAW = 14,
- kCodecALAC = 15,
- kCodecAC3 = 16,
- kCodecMpegHAudio = 17,
+ // codec, do so at the bottom before kMaxValue, and update the value of
+ // kMaxValue to equal the new codec.
+ kUnknown = 0,
+ kAAC = 1,
+ kMP3 = 2,
+ kPCM = 3,
+ kVorbis = 4,
+ kFLAC = 5,
+ kAMR_NB = 6,
+ kAMR_WB = 7,
+ kPCM_MULAW = 8,
+ kGSM_MS = 9,
+ kPCM_S16BE = 10,
+ kPCM_S24BE = 11,
+ kOpus = 12,
+ kEAC3 = 13,
+ kPCM_ALAW = 14,
+ kALAC = 15,
+ kAC3 = 16,
+ kMpegHAudio = 17,
// DO NOT ADD RANDOM AUDIO CODECS!
//
// The only acceptable time to add a new codec is if there is production code
// that uses said codec in the same CL.
// Must always be equal to the largest entry ever logged.
- kAudioCodecMax = kCodecMpegHAudio,
+ kMaxValue = kMpegHAudio,
};
enum class AudioCodecProfile {
@@ -55,6 +55,8 @@ enum class AudioCodecProfile {
std::string MEDIA_EXPORT GetCodecName(AudioCodec codec);
std::string MEDIA_EXPORT GetProfileName(AudioCodecProfile profile);
+MEDIA_EXPORT std::ostream& operator<<(std::ostream& os,
+ const AudioCodec& codec);
MEDIA_EXPORT AudioCodec StringToAudioCodec(const std::string& codec_id);
} // namespace media
diff --git a/chromium/media/base/audio_decoder.h b/chromium/media/base/audio_decoder.h
index 961d570e561..78853acf33e 100644
--- a/chromium/media/base/audio_decoder.h
+++ b/chromium/media/base/audio_decoder.h
@@ -42,6 +42,9 @@ class MEDIA_EXPORT AudioDecoder : public Decoder {
AudioDecoder();
+ AudioDecoder(const AudioDecoder&) = delete;
+ AudioDecoder& operator=(const AudioDecoder&) = delete;
+
// Fires any pending callbacks, stops and destroys the decoder.
// Note: Since this is a destructor, |this| will be destroyed after this call.
// Make sure the callbacks fired from this call doesn't post any task that
@@ -87,9 +90,6 @@ class MEDIA_EXPORT AudioDecoder : public Decoder {
// Returns the type of the decoder for statistics recording purposes.
virtual AudioDecoderType GetDecoderType() const = 0;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioDecoder);
};
} // namespace media
diff --git a/chromium/media/base/audio_decoder_config.cc b/chromium/media/base/audio_decoder_config.cc
index 1fcbb4119aa..5bbd6f7a73c 100644
--- a/chromium/media/base/audio_decoder_config.cc
+++ b/chromium/media/base/audio_decoder_config.cc
@@ -54,7 +54,7 @@ void AudioDecoderConfig::Initialize(AudioCodec codec,
AudioDecoderConfig::~AudioDecoderConfig() = default;
bool AudioDecoderConfig::IsValidConfig() const {
- return codec_ != kUnknownAudioCodec &&
+ return codec_ != AudioCodec::kUnknown &&
channel_layout_ != CHANNEL_LAYOUT_UNSUPPORTED &&
bytes_per_channel_ > 0 &&
bytes_per_channel_ <= limits::kMaxBytesPerSample &&
@@ -78,7 +78,8 @@ bool AudioDecoderConfig::Matches(const AudioDecoderConfig& config) const {
(should_discard_decoder_delay() ==
config.should_discard_decoder_delay()) &&
(target_output_channel_layout() ==
- config.target_output_channel_layout()));
+ config.target_output_channel_layout()) &&
+ (aac_extra_data() == config.aac_extra_data()));
}
std::string AudioDecoderConfig::AsHumanReadableString() const {
@@ -98,7 +99,9 @@ std::string AudioDecoderConfig::AsHumanReadableString() const {
<< ", discard decoder delay: "
<< (should_discard_decoder_delay() ? "true" : "false")
<< ", target_output_channel_layout: "
- << ChannelLayoutToString(target_output_channel_layout());
+ << ChannelLayoutToString(target_output_channel_layout())
+ << ", has aac extra data: "
+ << (aac_extra_data().empty() ? "false" : "true");
return s.str();
}
diff --git a/chromium/media/base/audio_decoder_config.h b/chromium/media/base/audio_decoder_config.h
index 1ec4ed073b9..7be1ed8a40e 100644
--- a/chromium/media/base/audio_decoder_config.h
+++ b/chromium/media/base/audio_decoder_config.h
@@ -116,6 +116,11 @@ class MEDIA_EXPORT AudioDecoderConfig {
return target_output_channel_layout_;
}
+ void set_aac_extra_data(std::vector<uint8_t> aac_extra_data) {
+ aac_extra_data_ = std::move(aac_extra_data);
+ }
+ const std::vector<uint8_t>& aac_extra_data() const { return aac_extra_data_; }
+
private:
// WARNING: When modifying or adding any parameters, update the following:
// - AudioDecoderConfig::AsHumanReadableString()
@@ -126,7 +131,7 @@ class MEDIA_EXPORT AudioDecoderConfig {
// Mandatory parameters passed in constructor:
- AudioCodec codec_ = kUnknownAudioCodec;
+ AudioCodec codec_ = AudioCodec::kUnknown;
SampleFormat sample_format_ = kUnknownSampleFormat;
ChannelLayout channel_layout_ = CHANNEL_LAYOUT_UNSUPPORTED;
int samples_per_second_ = 0;
@@ -148,6 +153,12 @@ class MEDIA_EXPORT AudioDecoderConfig {
// Layout of the output hardware. Optionally set. See setter comments.
ChannelLayout target_output_channel_layout_ = CHANNEL_LAYOUT_NONE;
+ // This is a hack for backward compatibility. For AAC, to preserve existing
+ // behavior, we set `aac_extra_data_` on all platforms but only set
+ // `extra_data` on Android.
+ // TODO(crbug.com/1250841): Remove this after we land a long term fix.
+ std::vector<uint8_t> aac_extra_data_;
+
// Indicates if a decoder should implicitly discard decoder delay without it
// being explicitly marked in discard padding.
bool should_discard_decoder_delay_ = true;
diff --git a/chromium/media/base/audio_discard_helper_unittest.cc b/chromium/media/base/audio_discard_helper_unittest.cc
index 9300fea296c..b934b0579f7 100644
--- a/chromium/media/base/audio_discard_helper_unittest.cc
+++ b/chromium/media/base/audio_discard_helper_unittest.cc
@@ -47,24 +47,21 @@ TEST(AudioDiscardHelperTest, TimeDeltaToFrames) {
AudioDiscardHelper discard_helper(kSampleRate, 0, false);
EXPECT_EQ(0u, discard_helper.TimeDeltaToFrames(base::TimeDelta()));
- EXPECT_EQ(
- kSampleRate / 100,
- discard_helper.TimeDeltaToFrames(base::TimeDelta::FromMilliseconds(10)));
+ EXPECT_EQ(kSampleRate / 100,
+ discard_helper.TimeDeltaToFrames(base::Milliseconds(10)));
// Ensure partial frames are rounded down correctly. The equation below
// calculates a frame count with a fractional part < 0.5.
const int small_remainder =
base::Time::kMicrosecondsPerSecond * (kSampleRate - 0.9) / kSampleRate;
- EXPECT_EQ(kSampleRate - 1,
- discard_helper.TimeDeltaToFrames(
- base::TimeDelta::FromMicroseconds(small_remainder)));
+ EXPECT_EQ(kSampleRate - 1, discard_helper.TimeDeltaToFrames(
+ base::Microseconds(small_remainder)));
// Ditto, but rounded up using a fractional part > 0.5.
const int large_remainder =
base::Time::kMicrosecondsPerSecond * (kSampleRate - 0.4) / kSampleRate;
- EXPECT_EQ(kSampleRate,
- discard_helper.TimeDeltaToFrames(
- base::TimeDelta::FromMicroseconds(large_remainder)));
+ EXPECT_EQ(kSampleRate, discard_helper.TimeDeltaToFrames(
+ base::Microseconds(large_remainder)));
}
TEST(AudioDiscardHelperTest, BasicProcessBuffers) {
@@ -75,9 +72,8 @@ TEST(AudioDiscardHelperTest, BasicProcessBuffers) {
// Use an estimated duration which doesn't match the number of decoded frames
// to ensure the helper is correctly setting durations based on output frames.
- const base::TimeDelta kEstimatedDuration =
- base::TimeDelta::FromMilliseconds(9);
- const base::TimeDelta kActualDuration = base::TimeDelta::FromMilliseconds(10);
+ const base::TimeDelta kEstimatedDuration = base::Milliseconds(9);
+ const base::TimeDelta kActualDuration = base::Milliseconds(10);
const int kTestFrames = discard_helper.TimeDeltaToFrames(kActualDuration);
scoped_refptr<DecoderBuffer> encoded_buffer =
@@ -104,8 +100,8 @@ TEST(AudioDiscardHelperTest, NegativeTimestampClampsToZero) {
AudioDiscardHelper discard_helper(kSampleRate, 0, false);
ASSERT_FALSE(discard_helper.initialized());
- const base::TimeDelta kTimestamp = -base::TimeDelta::FromSeconds(1);
- const base::TimeDelta kDuration = base::TimeDelta::FromMilliseconds(10);
+ const base::TimeDelta kTimestamp = -base::Seconds(1);
+ const base::TimeDelta kDuration = base::Milliseconds(10);
const int kTestFrames = discard_helper.TimeDeltaToFrames(kDuration);
scoped_refptr<DecoderBuffer> encoded_buffer =
@@ -126,7 +122,7 @@ TEST(AudioDiscardHelperTest, ProcessBuffersWithInitialDiscard) {
ASSERT_FALSE(discard_helper.initialized());
const base::TimeDelta kTimestamp = base::TimeDelta();
- const base::TimeDelta kDuration = base::TimeDelta::FromMilliseconds(10);
+ const base::TimeDelta kDuration = base::Milliseconds(10);
const int kTestFrames = discard_helper.TimeDeltaToFrames(kDuration);
// Tell the helper we want to discard half of the initial frames.
@@ -153,7 +149,7 @@ TEST(AudioDiscardHelperTest, ProcessBuffersWithLargeInitialDiscard) {
ASSERT_FALSE(discard_helper.initialized());
const base::TimeDelta kTimestamp = base::TimeDelta();
- const base::TimeDelta kDuration = base::TimeDelta::FromMilliseconds(10);
+ const base::TimeDelta kDuration = base::Milliseconds(10);
const int kTestFrames = discard_helper.TimeDeltaToFrames(kDuration);
// Tell the helper we want to discard 1.5 buffers worth of frames.
@@ -188,7 +184,7 @@ TEST(AudioDiscardHelperTest, AllowNonMonotonicTimestamps) {
ASSERT_FALSE(discard_helper.initialized());
const base::TimeDelta kTimestamp = base::TimeDelta();
- const base::TimeDelta kDuration = base::TimeDelta::FromMilliseconds(10);
+ const base::TimeDelta kDuration = base::Milliseconds(10);
const int kTestFrames = discard_helper.TimeDeltaToFrames(kDuration);
scoped_refptr<DecoderBuffer> encoded_buffer =
@@ -216,7 +212,7 @@ TEST(AudioDiscardHelperTest, DiscardEndPadding) {
ASSERT_FALSE(discard_helper.initialized());
const base::TimeDelta kTimestamp = base::TimeDelta();
- const base::TimeDelta kDuration = base::TimeDelta::FromMilliseconds(10);
+ const base::TimeDelta kDuration = base::Milliseconds(10);
const int kTestFrames = discard_helper.TimeDeltaToFrames(kDuration);
scoped_refptr<DecoderBuffer> encoded_buffer =
@@ -240,7 +236,7 @@ TEST(AudioDiscardHelperTest, BadDiscardEndPadding) {
ASSERT_FALSE(discard_helper.initialized());
const base::TimeDelta kTimestamp = base::TimeDelta();
- const base::TimeDelta kDuration = base::TimeDelta::FromMilliseconds(10);
+ const base::TimeDelta kDuration = base::Milliseconds(10);
const int kTestFrames = discard_helper.TimeDeltaToFrames(kDuration);
scoped_refptr<DecoderBuffer> encoded_buffer =
@@ -262,7 +258,7 @@ TEST(AudioDiscardHelperTest, InitialDiscardAndDiscardEndPadding) {
ASSERT_FALSE(discard_helper.initialized());
const base::TimeDelta kTimestamp = base::TimeDelta();
- const base::TimeDelta kDuration = base::TimeDelta::FromMilliseconds(10);
+ const base::TimeDelta kDuration = base::Milliseconds(10);
const int kTestFrames = discard_helper.TimeDeltaToFrames(kDuration);
scoped_refptr<DecoderBuffer> encoded_buffer =
@@ -292,7 +288,7 @@ TEST(AudioDiscardHelperTest, InitialDiscardAndDiscardPadding) {
ASSERT_FALSE(discard_helper.initialized());
const base::TimeDelta kTimestamp = base::TimeDelta();
- const base::TimeDelta kDuration = base::TimeDelta::FromMilliseconds(10);
+ const base::TimeDelta kDuration = base::Milliseconds(10);
const int kTestFrames = discard_helper.TimeDeltaToFrames(kDuration);
scoped_refptr<DecoderBuffer> encoded_buffer =
@@ -323,7 +319,7 @@ TEST(AudioDiscardHelperTest, InitialDiscardAndDiscardPaddingAndDecoderDelay) {
discard_helper.Reset(kDecoderDelay);
const base::TimeDelta kTimestamp = base::TimeDelta();
- const base::TimeDelta kDuration = base::TimeDelta::FromMilliseconds(10);
+ const base::TimeDelta kDuration = base::Milliseconds(10);
const int kTestFrames = discard_helper.TimeDeltaToFrames(kDuration);
scoped_refptr<DecoderBuffer> encoded_buffer =
@@ -434,7 +430,7 @@ TEST(AudioDiscardHelperTest, DelayedDiscardInitialDiscardAndDiscardPadding) {
ASSERT_FALSE(discard_helper.initialized());
const base::TimeDelta kTimestamp = base::TimeDelta();
- const base::TimeDelta kDuration = base::TimeDelta::FromMilliseconds(10);
+ const base::TimeDelta kDuration = base::Milliseconds(10);
const int kTestFrames = discard_helper.TimeDeltaToFrames(kDuration);
scoped_refptr<DecoderBuffer> encoded_buffer =
@@ -470,7 +466,7 @@ TEST(AudioDiscardHelperTest, CompleteDiscard) {
ASSERT_FALSE(discard_helper.initialized());
const base::TimeDelta kTimestamp = base::TimeDelta();
- const base::TimeDelta kDuration = base::TimeDelta::FromMilliseconds(10);
+ const base::TimeDelta kDuration = base::Milliseconds(10);
const int kTestFrames = discard_helper.TimeDeltaToFrames(kDuration);
discard_helper.Reset(0);
@@ -502,7 +498,7 @@ TEST(AudioDiscardHelperTest, CompleteDiscardWithDelayedDiscard) {
ASSERT_FALSE(discard_helper.initialized());
const base::TimeDelta kTimestamp = base::TimeDelta();
- const base::TimeDelta kDuration = base::TimeDelta::FromMilliseconds(10);
+ const base::TimeDelta kDuration = base::Milliseconds(10);
const int kTestFrames = discard_helper.TimeDeltaToFrames(kDuration);
discard_helper.Reset(0);
@@ -541,7 +537,7 @@ TEST(AudioDiscardHelperTest, CompleteDiscardWithInitialDiscardDecoderDelay) {
discard_helper.Reset(kDecoderDelay);
const base::TimeDelta kTimestamp = base::TimeDelta();
- const base::TimeDelta kDuration = base::TimeDelta::FromMilliseconds(10);
+ const base::TimeDelta kDuration = base::Milliseconds(10);
const int kTestFrames = discard_helper.TimeDeltaToFrames(kDuration);
scoped_refptr<DecoderBuffer> encoded_buffer =
diff --git a/chromium/media/base/audio_fifo.h b/chromium/media/base/audio_fifo.h
index 65349f50dd7..ac1af1f5bf2 100644
--- a/chromium/media/base/audio_fifo.h
+++ b/chromium/media/base/audio_fifo.h
@@ -22,6 +22,10 @@ class MEDIA_EXPORT AudioFifo {
public:
// Creates a new AudioFifo and allocates |channels| of length |frames|.
AudioFifo(int channels, int frames);
+
+ AudioFifo(const AudioFifo&) = delete;
+ AudioFifo& operator=(const AudioFifo&) = delete;
+
virtual ~AudioFifo();
// Pushes all audio channel data from |source| to the FIFO.
@@ -60,8 +64,6 @@ class MEDIA_EXPORT AudioFifo {
// Current write position.
int write_pos_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioFifo);
};
} // namespace media
diff --git a/chromium/media/base/audio_fifo_unittest.cc b/chromium/media/base/audio_fifo_unittest.cc
index 2f69e3628f2..752c87373ce 100644
--- a/chromium/media/base/audio_fifo_unittest.cc
+++ b/chromium/media/base/audio_fifo_unittest.cc
@@ -15,6 +15,10 @@ namespace media {
class AudioFifoTest : public testing::Test {
public:
AudioFifoTest() = default;
+
+ AudioFifoTest(const AudioFifoTest&) = delete;
+ AudioFifoTest& operator=(const AudioFifoTest&) = delete;
+
~AudioFifoTest() override = default;
void VerifyValue(const float data[], int size, float value) {
@@ -23,7 +27,6 @@ class AudioFifoTest : public testing::Test {
}
protected:
- DISALLOW_COPY_AND_ASSIGN(AudioFifoTest);
};
// Verify that construction works as intended.
@@ -121,7 +124,7 @@ TEST_F(AudioFifoTest, FramesInFifo) {
const int frames_in_fifo = bus2->frames();
fifo.Push(bus2.get());
EXPECT_EQ(fifo.frames(), frames_in_fifo);
- for (int n = 0; n < kMaxFrameCount; ++n) {
+ for (n = 0; n < kMaxFrameCount; ++n) {
fifo.Push(bus2.get());
fifo.Consume(bus2.get(), 0, frames_in_fifo);
EXPECT_EQ(fifo.frames(), frames_in_fifo);
diff --git a/chromium/media/base/audio_hash.h b/chromium/media/base/audio_hash.h
index 5ac1aa267b7..cc261abb228 100644
--- a/chromium/media/base/audio_hash.h
+++ b/chromium/media/base/audio_hash.h
@@ -35,6 +35,10 @@ class AudioBus;
class MEDIA_EXPORT AudioHash {
public:
AudioHash();
+
+ AudioHash(const AudioHash&) = delete;
+ AudioHash& operator=(const AudioHash&) = delete;
+
~AudioHash();
// Update current hash with the contents of the provided AudioBus.
@@ -57,8 +61,6 @@ class MEDIA_EXPORT AudioHash {
// The total number of samples processed per channel. Uses a uint32_t instead
// of size_t so overflows on 64-bit and 32-bit machines are equivalent.
uint32_t sample_count_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioHash);
};
} // namespace media
diff --git a/chromium/media/base/audio_hash_unittest.cc b/chromium/media/base/audio_hash_unittest.cc
index 03383cfb3f8..a14a3eb6dc8 100644
--- a/chromium/media/base/audio_hash_unittest.cc
+++ b/chromium/media/base/audio_hash_unittest.cc
@@ -41,14 +41,15 @@ class AudioHashTest : public testing::Test {
}
}
+ AudioHashTest(const AudioHashTest&) = delete;
+ AudioHashTest& operator=(const AudioHashTest&) = delete;
+
~AudioHashTest() override = default;
protected:
std::unique_ptr<AudioBus> bus_one_;
std::unique_ptr<AudioBus> bus_two_;
FakeAudioRenderCallback fake_callback_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioHashTest);
};
// Ensure the same data hashes the same.
diff --git a/chromium/media/base/audio_latency_unittest.cc b/chromium/media/base/audio_latency_unittest.cc
index aa9ac668785..d7722927fa0 100644
--- a/chromium/media/base/audio_latency_unittest.cc
+++ b/chromium/media/base/audio_latency_unittest.cc
@@ -46,63 +46,57 @@ class AudioLatencyTest : public testing::TestWithParam<AudioLatencyTestData> {
max_buffer_size
: (limits::kMaxWebAudioBufferSize / multiplier) * multiplier;
- EXPECT_EQ(platform_min_buffer_size,
- media::AudioLatency::GetExactBufferSize(
- base::TimeDelta::FromSecondsD(0.0), hardware_sample_rate,
- hardware_buffer_size, min_buffer_size, max_buffer_size,
- limits::kMaxWebAudioBufferSize));
EXPECT_EQ(
platform_min_buffer_size,
media::AudioLatency::GetExactBufferSize(
- base::TimeDelta::FromSecondsD(
- min_buffer_size / static_cast<double>(hardware_sample_rate)),
- hardware_sample_rate, hardware_buffer_size, min_buffer_size,
- max_buffer_size, limits::kMaxWebAudioBufferSize));
- EXPECT_EQ(
- multiplier * 2,
- media::AudioLatency::GetExactBufferSize(
- base::TimeDelta::FromSecondsD(
- (multiplier * 2) / static_cast<double>(hardware_sample_rate)),
- hardware_sample_rate, hardware_buffer_size, min_buffer_size,
- max_buffer_size, limits::kMaxWebAudioBufferSize));
+ base::Seconds(0.0), hardware_sample_rate, hardware_buffer_size,
+ min_buffer_size, max_buffer_size, limits::kMaxWebAudioBufferSize));
+ EXPECT_EQ(platform_min_buffer_size,
+ media::AudioLatency::GetExactBufferSize(
+ base::Seconds(min_buffer_size /
+ static_cast<double>(hardware_sample_rate)),
+ hardware_sample_rate, hardware_buffer_size, min_buffer_size,
+ max_buffer_size, limits::kMaxWebAudioBufferSize));
+ EXPECT_EQ(multiplier * 2,
+ media::AudioLatency::GetExactBufferSize(
+ base::Seconds((multiplier * 2) /
+ static_cast<double>(hardware_sample_rate)),
+ hardware_sample_rate, hardware_buffer_size, min_buffer_size,
+ max_buffer_size, limits::kMaxWebAudioBufferSize));
+ EXPECT_EQ(multiplier * 2,
+ media::AudioLatency::GetExactBufferSize(
+ base::Seconds((multiplier * 1.1) /
+ static_cast<double>(hardware_sample_rate)),
+ hardware_sample_rate, hardware_buffer_size, min_buffer_size,
+ max_buffer_size, limits::kMaxWebAudioBufferSize));
EXPECT_EQ(
- multiplier * 2,
+ platform_max_buffer_size,
media::AudioLatency::GetExactBufferSize(
- base::TimeDelta::FromSecondsD(
- (multiplier * 1.1) / static_cast<double>(hardware_sample_rate)),
- hardware_sample_rate, hardware_buffer_size, min_buffer_size,
- max_buffer_size, limits::kMaxWebAudioBufferSize));
- EXPECT_EQ(platform_max_buffer_size,
- media::AudioLatency::GetExactBufferSize(
- base::TimeDelta::FromSecondsD(10.0), hardware_sample_rate,
- hardware_buffer_size, min_buffer_size, max_buffer_size,
- limits::kMaxWebAudioBufferSize));
+ base::Seconds(10.0), hardware_sample_rate, hardware_buffer_size,
+ min_buffer_size, max_buffer_size, limits::kMaxWebAudioBufferSize));
if (max_buffer_size) {
- EXPECT_EQ(
- max_buffer_size,
- media::AudioLatency::GetExactBufferSize(
- base::TimeDelta::FromSecondsD(
- max_buffer_size / static_cast<double>(hardware_sample_rate)),
- hardware_sample_rate, hardware_buffer_size, min_buffer_size,
- max_buffer_size, limits::kMaxWebAudioBufferSize));
+ EXPECT_EQ(max_buffer_size,
+ media::AudioLatency::GetExactBufferSize(
+ base::Seconds(max_buffer_size /
+ static_cast<double>(hardware_sample_rate)),
+ hardware_sample_rate, hardware_buffer_size, min_buffer_size,
+ max_buffer_size, limits::kMaxWebAudioBufferSize));
}
#if defined(OS_WIN)
if (min_buffer_size && min_buffer_size < hardware_buffer_size) {
EXPECT_EQ(hardware_buffer_size,
media::AudioLatency::GetExactBufferSize(
- base::TimeDelta::FromSecondsD(
- (min_buffer_size * 1.1) /
- static_cast<double>(hardware_sample_rate)),
+ base::Seconds((min_buffer_size * 1.1) /
+ static_cast<double>(hardware_sample_rate)),
hardware_sample_rate, hardware_buffer_size, min_buffer_size,
max_buffer_size, limits::kMaxWebAudioBufferSize));
}
#elif defined(OS_MAC)
EXPECT_EQ(limits::kMaxWebAudioBufferSize,
media::AudioLatency::GetExactBufferSize(
- base::TimeDelta::FromSecondsD(
- (limits::kMaxAudioBufferSize * 1.1) /
- static_cast<double>(hardware_sample_rate)),
+ base::Seconds((limits::kMaxAudioBufferSize * 1.1) /
+ static_cast<double>(hardware_sample_rate)),
hardware_sample_rate, hardware_buffer_size, min_buffer_size,
max_buffer_size, limits::kMaxWebAudioBufferSize));
#endif
@@ -110,9 +104,8 @@ class AudioLatencyTest : public testing::TestWithParam<AudioLatencyTestData> {
int previous_buffer_size = 0;
for (int i = 0; i < 1000; i++) {
int buffer_size = media::AudioLatency::GetExactBufferSize(
- base::TimeDelta::FromSecondsD(i / 1000.0), hardware_sample_rate,
- hardware_buffer_size, min_buffer_size, max_buffer_size,
- limits::kMaxWebAudioBufferSize);
+ base::Seconds(i / 1000.0), hardware_sample_rate, hardware_buffer_size,
+ min_buffer_size, max_buffer_size, limits::kMaxWebAudioBufferSize);
EXPECT_GE(buffer_size, previous_buffer_size);
#if defined(OS_WIN)
EXPECT_TRUE(buffer_size == min_buffer_size ||
diff --git a/chromium/media/base/audio_parameters.cc b/chromium/media/base/audio_parameters.cc
index b3b07180107..73660aeecf1 100644
--- a/chromium/media/base/audio_parameters.cc
+++ b/chromium/media/base/audio_parameters.cc
@@ -162,7 +162,7 @@ double AudioParameters::GetMicrosecondsPerFrame() const {
}
base::TimeDelta AudioParameters::GetBufferDuration() const {
- return base::TimeDelta::FromMicroseconds(static_cast<int64_t>(
+ return base::Microseconds(static_cast<int64_t>(
frames_per_buffer_ * base::Time::kMicrosecondsPerSecond /
static_cast<float>(sample_rate_)));
}
diff --git a/chromium/media/base/audio_power_monitor.h b/chromium/media/base/audio_power_monitor.h
index d95b2836064..012879f1ecf 100644
--- a/chromium/media/base/audio_power_monitor.h
+++ b/chromium/media/base/audio_power_monitor.h
@@ -42,6 +42,9 @@ class MEDIA_EXPORT AudioPowerMonitor {
// ~63.2% of maximum given a step input signal.
AudioPowerMonitor(int sample_rate, base::TimeDelta time_constant);
+ AudioPowerMonitor(const AudioPowerMonitor&) = delete;
+ AudioPowerMonitor& operator=(const AudioPowerMonitor&) = delete;
+
~AudioPowerMonitor();
// Reset power monitor to initial state (zero power level). This should not
@@ -79,8 +82,6 @@ class MEDIA_EXPORT AudioPowerMonitor {
base::Lock reading_lock_;
float power_reading_;
bool clipped_reading_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioPowerMonitor);
};
} // namespace media
diff --git a/chromium/media/base/audio_power_monitor_unittest.cc b/chromium/media/base/audio_power_monitor_unittest.cc
index 7ce16e04301..6b01f582fe0 100644
--- a/chromium/media/base/audio_power_monitor_unittest.cc
+++ b/chromium/media/base/audio_power_monitor_unittest.cc
@@ -146,9 +146,7 @@ class MeasurementObserver {
class AudioPowerMonitorTest : public ::testing::TestWithParam<TestScenario> {
public:
AudioPowerMonitorTest()
- : power_monitor_(kSampleRate,
- base::TimeDelta::FromMilliseconds(kTimeConstantMillis)) {
- }
+ : power_monitor_(kSampleRate, base::Milliseconds(kTimeConstantMillis)) {}
void FeedAndCheckExpectedPowerIsMeasured(const AudioBus& bus,
float power,
diff --git a/chromium/media/base/audio_pull_fifo.h b/chromium/media/base/audio_pull_fifo.h
index 9d9ac6f5a9d..1a58d530bb8 100644
--- a/chromium/media/base/audio_pull_fifo.h
+++ b/chromium/media/base/audio_pull_fifo.h
@@ -33,6 +33,10 @@ class MEDIA_EXPORT AudioPullFifo {
// FIFO can contain |channel| number of channels, where each channel is of
// length |frames| audio frames.
AudioPullFifo(int channels, int frames, ReadCB read_cb);
+
+ AudioPullFifo(const AudioPullFifo&) = delete;
+ AudioPullFifo& operator=(const AudioPullFifo&) = delete;
+
virtual ~AudioPullFifo();
// Consumes |frames_to_consume| audio frames from the FIFO and copies
@@ -58,8 +62,6 @@ class MEDIA_EXPORT AudioPullFifo {
// Temporary audio bus to hold the data from the producer.
std::unique_ptr<AudioBus> fifo_;
int fifo_index_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioPullFifo);
};
} // namespace media
diff --git a/chromium/media/base/audio_pull_fifo_unittest.cc b/chromium/media/base/audio_pull_fifo_unittest.cc
index 358485a1831..073b433b77c 100644
--- a/chromium/media/base/audio_pull_fifo_unittest.cc
+++ b/chromium/media/base/audio_pull_fifo_unittest.cc
@@ -39,6 +39,10 @@ class AudioPullFifoTest
last_frame_delay_(-1) {
EXPECT_EQ(kMaxFramesInFifo, pull_fifo_.SizeInFrames());
}
+
+ AudioPullFifoTest(const AudioPullFifoTest&) = delete;
+ AudioPullFifoTest& operator=(const AudioPullFifoTest&) = delete;
+
virtual ~AudioPullFifoTest() = default;
void VerifyValue(const float data[], int size, float start_value) {
@@ -86,8 +90,6 @@ class AudioPullFifoTest
std::unique_ptr<AudioBus> audio_bus_;
int fill_value_;
int last_frame_delay_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioPullFifoTest);
};
TEST_P(AudioPullFifoTest, Consume) {
diff --git a/chromium/media/base/audio_push_fifo.h b/chromium/media/base/audio_push_fifo.h
index fb9c615c397..d0aa4a3c27b 100644
--- a/chromium/media/base/audio_push_fifo.h
+++ b/chromium/media/base/audio_push_fifo.h
@@ -36,6 +36,9 @@ class MEDIA_EXPORT AudioPushFifo final {
// |callback|.
explicit AudioPushFifo(const OutputCallback& callback);
+ AudioPushFifo(const AudioPushFifo&) = delete;
+ AudioPushFifo& operator=(const AudioPushFifo&) = delete;
+
~AudioPushFifo();
// Returns the number of frames in each AudioBus delivered to the
@@ -69,8 +72,6 @@ class MEDIA_EXPORT AudioPushFifo final {
// Queue of frames pending for delivery.
std::unique_ptr<AudioBus> audio_queue_;
int queued_frames_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioPushFifo);
};
} // namespace media
diff --git a/chromium/media/base/audio_push_fifo_unittest.cc b/chromium/media/base/audio_push_fifo_unittest.cc
index a5fc8a37885..ecbc8443254 100644
--- a/chromium/media/base/audio_push_fifo_unittest.cc
+++ b/chromium/media/base/audio_push_fifo_unittest.cc
@@ -20,6 +20,10 @@ namespace {
class AudioPushFifoTest : public testing::TestWithParam<int> {
public:
AudioPushFifoTest() = default;
+
+ AudioPushFifoTest(const AudioPushFifoTest&) = delete;
+ AudioPushFifoTest& operator=(const AudioPushFifoTest&) = delete;
+
~AudioPushFifoTest() override = default;
int output_chunk_size() const { return GetParam(); }
@@ -161,8 +165,6 @@ class AudioPushFifoTest : public testing::TestWithParam<int> {
}
uint32_t rand_seed_ = 0x7e110;
-
- DISALLOW_COPY_AND_ASSIGN(AudioPushFifoTest);
};
// Tests an atypical edge case: Push()ing one frame at a time.
diff --git a/chromium/media/base/audio_renderer.h b/chromium/media/base/audio_renderer.h
index 907d0fc7f82..6eb4fa59981 100644
--- a/chromium/media/base/audio_renderer.h
+++ b/chromium/media/base/audio_renderer.h
@@ -24,6 +24,9 @@ class MEDIA_EXPORT AudioRenderer {
public:
AudioRenderer();
+ AudioRenderer(const AudioRenderer&) = delete;
+ AudioRenderer& operator=(const AudioRenderer&) = delete;
+
// Stop all operations and fire all pending callbacks.
virtual ~AudioRenderer();
@@ -71,9 +74,6 @@ class MEDIA_EXPORT AudioRenderer {
// Sets a flag indicating whether the audio stream was initiated by autoplay.
virtual void SetAutoplayInitiated(bool autoplay_initiated) = 0;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioRenderer);
};
} // namespace media
diff --git a/chromium/media/base/audio_renderer_mixer.cc b/chromium/media/base/audio_renderer_mixer.cc
index 604a9ecba91..1c4cc33111f 100644
--- a/chromium/media/base/audio_renderer_mixer.cc
+++ b/chromium/media/base/audio_renderer_mixer.cc
@@ -15,7 +15,7 @@
namespace media {
-constexpr base::TimeDelta kPauseDelay = base::TimeDelta::FromSeconds(10);
+constexpr base::TimeDelta kPauseDelay = base::Seconds(10);
AudioRendererMixer::AudioRendererMixer(const AudioParameters& output_params,
scoped_refptr<AudioRendererSink> sink)
diff --git a/chromium/media/base/audio_renderer_mixer.h b/chromium/media/base/audio_renderer_mixer.h
index cc45740f147..6b7d5852942 100644
--- a/chromium/media/base/audio_renderer_mixer.h
+++ b/chromium/media/base/audio_renderer_mixer.h
@@ -31,6 +31,10 @@ class MEDIA_EXPORT AudioRendererMixer
public:
AudioRendererMixer(const AudioParameters& output_params,
scoped_refptr<AudioRendererSink> sink);
+
+ AudioRendererMixer(const AudioRendererMixer&) = delete;
+ AudioRendererMixer& operator=(const AudioRendererMixer&) = delete;
+
~AudioRendererMixer() override;
// Add or remove a mixer input from mixing; called by AudioRendererMixerInput.
@@ -94,8 +98,6 @@ class MEDIA_EXPORT AudioRendererMixer
base::TimeDelta pause_delay_ GUARDED_BY(lock_);
base::TimeTicks last_play_time_ GUARDED_BY(lock_);
bool playing_ GUARDED_BY(lock_);
-
- DISALLOW_COPY_AND_ASSIGN(AudioRendererMixer);
};
} // namespace media
diff --git a/chromium/media/base/audio_renderer_mixer_pool.h b/chromium/media/base/audio_renderer_mixer_pool.h
index 8562d5880f0..9071f1ff8e3 100644
--- a/chromium/media/base/audio_renderer_mixer_pool.h
+++ b/chromium/media/base/audio_renderer_mixer_pool.h
@@ -24,6 +24,10 @@ class AudioRendererSink;
class MEDIA_EXPORT AudioRendererMixerPool {
public:
AudioRendererMixerPool() = default;
+
+ AudioRendererMixerPool(const AudioRendererMixerPool&) = delete;
+ AudioRendererMixerPool& operator=(const AudioRendererMixerPool&) = delete;
+
virtual ~AudioRendererMixerPool() = default;
// Obtains a pointer to mixer instance based on AudioParameters. The pointer
@@ -50,9 +54,6 @@ class MEDIA_EXPORT AudioRendererMixerPool {
virtual scoped_refptr<AudioRendererSink> GetSink(
const base::UnguessableToken& owner_token,
const std::string& device_id) = 0;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioRendererMixerPool);
};
} // namespace media
diff --git a/chromium/media/base/audio_renderer_mixer_unittest.cc b/chromium/media/base/audio_renderer_mixer_unittest.cc
index bec6fd5e6cd..3e6c86b6386 100644
--- a/chromium/media/base/audio_renderer_mixer_unittest.cc
+++ b/chromium/media/base/audio_renderer_mixer_unittest.cc
@@ -479,7 +479,7 @@ TEST_P(AudioRendererMixerBehavioralTest, OnRenderErrorPausedInput) {
// Ensure the physical stream is paused after a certain amount of time with no
// inputs playing. The test will hang if the behavior is incorrect.
TEST_P(AudioRendererMixerBehavioralTest, MixerPausesStream) {
- const base::TimeDelta kPauseTime = base::TimeDelta::FromMilliseconds(500);
+ const base::TimeDelta kPauseTime = base::Milliseconds(500);
// This value can't be too low or valgrind, tsan will timeout on the bots.
const base::TimeDelta kTestTimeout = 10 * kPauseTime;
mixer_->SetPauseDelayForTesting(kPauseTime);
@@ -492,7 +492,7 @@ TEST_P(AudioRendererMixerBehavioralTest, MixerPausesStream) {
InitializeInputs(1);
// Ensure never playing the input results in a sink pause.
- const base::TimeDelta kSleepTime = base::TimeDelta::FromMilliseconds(100);
+ const base::TimeDelta kSleepTime = base::Milliseconds(100);
base::TimeTicks start_time = base::TimeTicks::Now();
while (!pause_event.IsSignaled()) {
mixer_callback_->Render(base::TimeDelta(), base::TimeTicks::Now(), 0,
diff --git a/chromium/media/base/audio_shifter.cc b/chromium/media/base/audio_shifter.cc
index 90c416168da..462fdf702fd 100644
--- a/chromium/media/base/audio_shifter.cc
+++ b/chromium/media/base/audio_shifter.cc
@@ -108,8 +108,7 @@ void AudioShifter::Push(std::unique_ptr<AudioBus> input,
(playout_time - base::TimeTicks()).InMillisecondsF());
if (!queue_.empty()) {
playout_time = input_clock_smoother_->Smooth(
- playout_time,
- base::TimeDelta::FromSecondsD(queue_.back().audio->frames() / rate_));
+ playout_time, base::Seconds(queue_.back().audio->frames() / rate_));
}
queue_.push_back(AudioQueueEntry(playout_time, std::move(input)));
while (!queue_.empty() &&
@@ -128,11 +127,9 @@ void AudioShifter::Pull(AudioBus* output,
// Add the kernel size since we incur some internal delay in resampling. All
// resamplers incur some delay, and for the SincResampler (used by
// MultiChannelResampler), this is (currently) kKernelSize / 2 frames.
- playout_time +=
- base::TimeDelta::FromSecondsD(SincResampler::kKernelSize / 2 / rate_);
+ playout_time += base::Seconds(SincResampler::kKernelSize / 2 / rate_);
playout_time = output_clock_smoother_->Smooth(
- playout_time,
- base::TimeDelta::FromSecondsD(previous_requested_samples_ / rate_));
+ playout_time, base::Seconds(previous_requested_samples_ / rate_));
previous_requested_samples_ = output->frames();
base::TimeTicks stream_time;
@@ -145,21 +142,19 @@ void AudioShifter::Pull(AudioBus* output,
stream_time = queue_.front().target_playout_time;
buffer_end_time = queue_.back().target_playout_time;
}
- stream_time += base::TimeDelta::FromSecondsD(
- (position_ - resampler_.BufferedFrames()) / rate_);
+ stream_time +=
+ base::Seconds((position_ - resampler_.BufferedFrames()) / rate_);
- if (!running_ && base::TimeDelta::FromSecondsD(output->frames() * 2 / rate_) +
- clock_accuracy_ >
- buffer_end_time - stream_time) {
+ if (!running_ &&
+ base::Seconds(output->frames() * 2 / rate_) + clock_accuracy_ >
+ buffer_end_time - stream_time) {
// We're not running right now, and we don't really have enough data
// to satisfy output reliably. Wait.
Zero(output);
return;
}
- if (playout_time <
- stream_time -
- base::TimeDelta::FromSecondsD(output->frames() / rate_ / 2) -
- (running_ ? clock_accuracy_ : base::TimeDelta())) {
+ if (playout_time < stream_time - base::Seconds(output->frames() / rate_ / 2) -
+ (running_ ? clock_accuracy_ : base::TimeDelta())) {
// |playout_time| is too far before the earliest known audio sample.
Zero(output);
return;
@@ -172,7 +167,7 @@ void AudioShifter::Pull(AudioBus* output,
// bias to avoid buffer underruns in the future.
if (bias_.is_zero()) {
bias_ = playout_time - stream_time + clock_accuracy_ +
- base::TimeDelta::FromSecondsD(output->frames() / rate_);
+ base::Seconds(output->frames() / rate_);
}
stream_time += bias_;
} else {
@@ -207,7 +202,7 @@ void AudioShifter::Pull(AudioBus* output,
double slow_ratio = steady_ratio + time_difference / adjustment_time_;
slow_ratio = base::clamp(slow_ratio, 0.9, 1.1);
const base::TimeDelta adjustment_time =
- base::TimeDelta::FromSecondsD(output->frames() / rate_);
+ base::Seconds(output->frames() / rate_);
// This is ratio we we'd need get perfect sync at the end of the
// current output audiobus.
double fast_ratio = steady_ratio + time_difference / adjustment_time;
@@ -260,7 +255,7 @@ void AudioShifter::ResamplerCallback(int frame_delay, AudioBus* destination) {
if (position_ >= static_cast<size_t>(queue_.front().audio->frames())) {
end_of_last_consumed_audiobus_ =
queue_.front().target_playout_time +
- base::TimeDelta::FromSecondsD(queue_.front().audio->frames() / rate_);
+ base::Seconds(queue_.front().audio->frames() / rate_);
position_ -= queue_.front().audio->frames();
queue_.pop_front();
}
diff --git a/chromium/media/base/audio_shifter_unittest.cc b/chromium/media/base/audio_shifter_unittest.cc
index 42a0e058380..0d651f9736e 100644
--- a/chromium/media/base/audio_shifter_unittest.cc
+++ b/chromium/media/base/audio_shifter_unittest.cc
@@ -22,18 +22,17 @@ class AudioShifterTest :
public ::testing::TestWithParam<::testing::tuple<int, int, int, bool> > {
public:
AudioShifterTest()
- : shifter_(base::TimeDelta::FromMilliseconds(2000),
- base::TimeDelta::FromMilliseconds(3),
- base::TimeDelta::FromMilliseconds(100),
+ : shifter_(base::Milliseconds(2000),
+ base::Milliseconds(3),
+ base::Milliseconds(100),
kSampleRate,
2),
- end2end_latency_(base::TimeDelta::FromMilliseconds(30)),
- playback_latency_(base::TimeDelta::FromMilliseconds(10)),
+ end2end_latency_(base::Milliseconds(30)),
+ playback_latency_(base::Milliseconds(10)),
tag_input_(false),
expect_smooth_output_(true),
input_sample_n_(0),
- output_sample_(0) {
- }
+ output_sample_(0) {}
void SetupInput(int size, base::TimeDelta rate) {
input_size_ = size;
@@ -62,12 +61,10 @@ class AudioShifterTest :
void SetUp() override {
SetupInput(
kInputPacketSize + ::testing::get<0>(GetParam()) - 1,
- base::TimeDelta::FromMicroseconds(
- 1000 + ::testing::get<1>(GetParam()) * 5 - 5));
+ base::Microseconds(1000 + ::testing::get<1>(GetParam()) * 5 - 5));
SetupOutput(
kOutputPacketSize,
- base::TimeDelta::FromMicroseconds(
- 500 + ::testing::get<2>(GetParam()) * 3 - 3));
+ base::Microseconds(500 + ::testing::get<2>(GetParam()) * 3 - 3));
if (::testing::get<3>(GetParam())) {
end2end_latency_ = -end2end_latency_;
}
@@ -89,9 +86,8 @@ class AudioShifterTest :
if (test_output_->channel(0)[j] != 0.0) {
silence = false;
if (test_output_->channel(0)[j] > 3000000.0) {
- marker_outputs_.push_back(
- now_ + playback_latency_ +
- base::TimeDelta::FromSeconds(j) / kSampleRate);
+ marker_outputs_.push_back(now_ + playback_latency_ +
+ base::Seconds(j) / kSampleRate);
} else {
// We don't expect smooth output once we insert a tag,
// or in the very beginning.
@@ -128,8 +124,8 @@ class AudioShifterTest :
if (end2end_latency_ > base::TimeDelta()) {
CHECK(!marker_outputs_.empty());
base::TimeDelta actual_offset = marker_outputs_[0] - expected_mark_time;
- EXPECT_LT(actual_offset, base::TimeDelta::FromMicroseconds(100));
- EXPECT_GT(actual_offset, base::TimeDelta::FromMicroseconds(-100));
+ EXPECT_LT(actual_offset, base::Microseconds(100));
+ EXPECT_GT(actual_offset, base::Microseconds(-100));
} else {
EXPECT_GT(marker_outputs_.size(), 0UL);
}
@@ -174,8 +170,7 @@ TEST_P(AudioShifterTest, TestSyncWithPull) {
expect_smooth_output_ = false;
Run(100);
for (int i = 0; i < 100; i++) {
- shifter_.Pull(test_output_.get(),
- now_ + base::TimeDelta::FromMilliseconds(i));
+ shifter_.Pull(test_output_.get(), now_ + base::Milliseconds(i));
}
RunAndCheckSync(1000);
EXPECT_LE(skip_outputs_.size(), 1UL);
@@ -185,12 +180,10 @@ TEST_P(AudioShifterTest, UnderOverFlow) {
expect_smooth_output_ = false;
SetupInput(
kInputPacketSize + ::testing::get<0>(GetParam()) * 10 - 10,
- base::TimeDelta::FromMicroseconds(
- 1000 + ::testing::get<1>(GetParam()) * 100 - 100));
+ base::Microseconds(1000 + ::testing::get<1>(GetParam()) * 100 - 100));
SetupOutput(
kOutputPacketSize,
- base::TimeDelta::FromMicroseconds(
- 500 + ::testing::get<2>(GetParam()) * 50 - 50));
+ base::Microseconds(500 + ::testing::get<2>(GetParam()) * 50 - 50));
// Sane output is not expected, but let's make sure we don't crash.
Run(1000);
}
diff --git a/chromium/media/base/audio_timestamp_helper.cc b/chromium/media/base/audio_timestamp_helper.cc
index ee87ed2ab45..01ec6be22ef 100644
--- a/chromium/media/base/audio_timestamp_helper.cc
+++ b/chromium/media/base/audio_timestamp_helper.cc
@@ -13,8 +13,8 @@ namespace media {
base::TimeDelta AudioTimestampHelper::FramesToTime(int64_t frames,
int samples_per_second) {
DCHECK_GT(samples_per_second, 0);
- return base::TimeDelta::FromMicroseconds(
- frames * base::Time::kMicrosecondsPerSecond / samples_per_second);
+ return base::Microseconds(frames * base::Time::kMicrosecondsPerSecond /
+ samples_per_second);
}
// static
@@ -82,7 +82,7 @@ base::TimeDelta AudioTimestampHelper::ComputeTimestamp(
DCHECK_GE(frame_count, 0);
DCHECK(base_timestamp_ != kNoTimestamp);
double frames_us = microseconds_per_frame_ * frame_count;
- return base_timestamp_ + base::TimeDelta::FromMicroseconds(frames_us);
+ return base_timestamp_ + base::Microseconds(frames_us);
}
} // namespace media
diff --git a/chromium/media/base/audio_timestamp_helper_unittest.cc b/chromium/media/base/audio_timestamp_helper_unittest.cc
index 272e9f728d1..1a70066e1fe 100644
--- a/chromium/media/base/audio_timestamp_helper_unittest.cc
+++ b/chromium/media/base/audio_timestamp_helper_unittest.cc
@@ -35,7 +35,7 @@ class AudioTimestampHelperTest : public ::testing::Test {
int64_t FramesToTarget(int target_in_microseconds) {
return helper_.GetFramesToTarget(
- base::TimeDelta::FromMicroseconds(target_in_microseconds));
+ base::Microseconds(target_in_microseconds));
}
void TestGetFramesToTargetRange(int frame_count, int start, int end) {
@@ -54,49 +54,49 @@ class AudioTimestampHelperTest : public ::testing::Test {
TEST_F(AudioTimestampHelperTest, FramesToTime) {
// Negative value.
- EXPECT_EQ(base::TimeDelta::FromSeconds(-1),
+ EXPECT_EQ(base::Seconds(-1),
AudioTimestampHelper::FramesToTime(-48000, k48kHz));
// Zero.
- EXPECT_EQ(base::TimeDelta::FromMicroseconds(0),
+ EXPECT_EQ(base::Microseconds(0),
AudioTimestampHelper::FramesToTime(0, k48kHz));
// One frame.
- EXPECT_EQ(base::TimeDelta::FromMicroseconds(20),
+ EXPECT_EQ(base::Microseconds(20),
AudioTimestampHelper::FramesToTime(1, k48kHz));
// Exact value with maximum precision of TimeDelta.
- EXPECT_EQ(base::TimeDelta::FromMicroseconds(15625),
+ EXPECT_EQ(base::Microseconds(15625),
AudioTimestampHelper::FramesToTime(750, k48kHz));
// One second.
- EXPECT_EQ(base::TimeDelta::FromSeconds(1),
+ EXPECT_EQ(base::Seconds(1),
AudioTimestampHelper::FramesToTime(48000, k48kHz));
// Argument and return value exceeding 32 bits.
- EXPECT_EQ(base::TimeDelta::FromSeconds(1000000),
+ EXPECT_EQ(base::Seconds(1000000),
AudioTimestampHelper::FramesToTime(48000000000, k48kHz));
}
TEST_F(AudioTimestampHelperTest, TimeToFrames) {
// Negative value.
- EXPECT_EQ(-48000, AudioTimestampHelper::TimeToFrames(
- base::TimeDelta::FromSeconds(-1), k48kHz));
+ EXPECT_EQ(-48000,
+ AudioTimestampHelper::TimeToFrames(base::Seconds(-1), k48kHz));
// Zero.
- EXPECT_EQ(0, AudioTimestampHelper::TimeToFrames(
- base::TimeDelta::FromMicroseconds(0), k48kHz));
+ EXPECT_EQ(0,
+ AudioTimestampHelper::TimeToFrames(base::Microseconds(0), k48kHz));
// Duration of each frame is 20.833 microseconds. The result is rounded to
// integral.
- EXPECT_EQ(0, AudioTimestampHelper::TimeToFrames(
- base::TimeDelta::FromMicroseconds(10), k48kHz));
- EXPECT_EQ(1, AudioTimestampHelper::TimeToFrames(
- base::TimeDelta::FromMicroseconds(20), k48kHz));
- EXPECT_EQ(1, AudioTimestampHelper::TimeToFrames(
- base::TimeDelta::FromMicroseconds(21), k48kHz));
+ EXPECT_EQ(0,
+ AudioTimestampHelper::TimeToFrames(base::Microseconds(10), k48kHz));
+ EXPECT_EQ(1,
+ AudioTimestampHelper::TimeToFrames(base::Microseconds(20), k48kHz));
+ EXPECT_EQ(1,
+ AudioTimestampHelper::TimeToFrames(base::Microseconds(21), k48kHz));
// Exact value with maximum precision of TimeDelta.
- EXPECT_EQ(750, AudioTimestampHelper::TimeToFrames(
- base::TimeDelta::FromMicroseconds(15625), k48kHz));
+ EXPECT_EQ(750, AudioTimestampHelper::TimeToFrames(base::Microseconds(15625),
+ k48kHz));
// One second.
- EXPECT_EQ(48000, AudioTimestampHelper::TimeToFrames(
- base::TimeDelta::FromSeconds(1), k48kHz));
+ EXPECT_EQ(48000,
+ AudioTimestampHelper::TimeToFrames(base::Seconds(1), k48kHz));
// Argument and return value exceeding 32 bits.
- EXPECT_EQ(48000000000, AudioTimestampHelper::TimeToFrames(
- base::TimeDelta::FromSeconds(1000000), k48kHz));
+ EXPECT_EQ(48000000000,
+ AudioTimestampHelper::TimeToFrames(base::Seconds(1000000), k48kHz));
}
TEST_F(AudioTimestampHelperTest, Basic) {
@@ -128,7 +128,7 @@ TEST_F(AudioTimestampHelperTest, Basic) {
TEST_F(AudioTimestampHelperTest, GetDuration) {
- helper_.SetBaseTimestamp(base::TimeDelta::FromMicroseconds(100));
+ helper_.SetBaseTimestamp(base::Microseconds(100));
int frame_count = 5;
int64_t expected_durations[] = {113, 113, 114, 113, 113, 114};
diff --git a/chromium/media/base/bit_reader.h b/chromium/media/base/bit_reader.h
index dd980ff1d99..a5e37ab8bfe 100644
--- a/chromium/media/base/bit_reader.h
+++ b/chromium/media/base/bit_reader.h
@@ -20,6 +20,10 @@ class MEDIA_EXPORT BitReader : private BitReaderCore::ByteStreamProvider {
// Initialize the reader to start reading at |data|, |size| being size
// of |data| in bytes.
BitReader(const uint8_t* data, int size);
+
+ BitReader(const BitReader&) = delete;
+ BitReader& operator=(const BitReader&) = delete;
+
~BitReader() override;
template<typename T> bool ReadBits(int num_bits, T* out) {
@@ -61,8 +65,6 @@ class MEDIA_EXPORT BitReader : private BitReaderCore::ByteStreamProvider {
int bytes_left_;
BitReaderCore bit_reader_core_;
-
- DISALLOW_COPY_AND_ASSIGN(BitReader);
};
} // namespace media
diff --git a/chromium/media/base/bit_reader_core.h b/chromium/media/base/bit_reader_core.h
index 7a9f7497ec0..9e39958585c 100644
--- a/chromium/media/base/bit_reader_core.h
+++ b/chromium/media/base/bit_reader_core.h
@@ -30,6 +30,10 @@ class MEDIA_EXPORT BitReaderCore {
// Lifetime of |byte_stream_provider| must be longer than BitReaderCore.
explicit BitReaderCore(ByteStreamProvider* byte_stream_provider);
+
+ BitReaderCore(const BitReaderCore&) = delete;
+ BitReaderCore& operator=(const BitReaderCore&) = delete;
+
~BitReaderCore();
// Read one bit from the stream and return it as a boolean in |*out|.
@@ -117,8 +121,6 @@ class MEDIA_EXPORT BitReaderCore {
// Note: bits are consumed from MSB to LSB.
int nbits_next_;
uint64_t reg_next_;
-
- DISALLOW_COPY_AND_ASSIGN(BitReaderCore);
};
} // namespace media
diff --git a/chromium/media/base/bitstream_buffer.h b/chromium/media/base/bitstream_buffer.h
index d6c2cea5161..19efcd04018 100644
--- a/chromium/media/base/bitstream_buffer.h
+++ b/chromium/media/base/bitstream_buffer.h
@@ -54,6 +54,9 @@ class MEDIA_EXPORT BitstreamBuffer {
BitstreamBuffer(BitstreamBuffer&&);
BitstreamBuffer& operator=(BitstreamBuffer&&);
+ BitstreamBuffer(const BitstreamBuffer&) = delete;
+ BitstreamBuffer& operator=(const BitstreamBuffer&) = delete;
+
~BitstreamBuffer();
// Produce an equivalent DecoderBuffer. This consumes region(), even if
@@ -130,8 +133,6 @@ class MEDIA_EXPORT BitstreamBuffer {
std::vector<SubsampleEntry> subsamples_; // clear/cypher sizes
friend struct IPC::ParamTraits<media::BitstreamBuffer>;
-
- DISALLOW_COPY_AND_ASSIGN(BitstreamBuffer);
};
} // namespace media
diff --git a/chromium/media/base/byte_queue.h b/chromium/media/base/byte_queue.h
index 23270ac7f3d..528f870a258 100644
--- a/chromium/media/base/byte_queue.h
+++ b/chromium/media/base/byte_queue.h
@@ -24,6 +24,10 @@ namespace media {
class MEDIA_EXPORT ByteQueue {
public:
ByteQueue();
+
+ ByteQueue(const ByteQueue&) = delete;
+ ByteQueue& operator=(const ByteQueue&) = delete;
+
~ByteQueue();
// Reset the queue to the empty state.
@@ -56,8 +60,6 @@ class MEDIA_EXPORT ByteQueue {
int used_ = 0;
std::unique_ptr<uint8_t[]> buffer_;
-
- DISALLOW_COPY_AND_ASSIGN(ByteQueue);
};
} // namespace media
diff --git a/chromium/media/base/callback_registry.h b/chromium/media/base/callback_registry.h
index 737eed8dc13..2089bc11a41 100644
--- a/chromium/media/base/callback_registry.h
+++ b/chromium/media/base/callback_registry.h
@@ -24,10 +24,11 @@ namespace media {
class CallbackRegistration {
public:
CallbackRegistration() = default;
- virtual ~CallbackRegistration() = default;
- private:
- DISALLOW_COPY_AND_ASSIGN(CallbackRegistration);
+ CallbackRegistration(const CallbackRegistration&) = delete;
+ CallbackRegistration& operator=(const CallbackRegistration&) = delete;
+
+ virtual ~CallbackRegistration() = default;
};
template <typename Sig>
@@ -45,6 +46,10 @@ class CallbackRegistry<void(Args...)> {
using CallbackType = base::RepeatingCallback<void(Args...)>;
CallbackRegistry() = default;
+
+ CallbackRegistry(const CallbackRegistry&) = delete;
+ CallbackRegistry& operator=(const CallbackRegistry&) = delete;
+
~CallbackRegistry() = default;
std::unique_ptr<CallbackRegistration> Register(CallbackType cb)
@@ -76,13 +81,14 @@ class CallbackRegistry<void(Args...)> {
uint32_t registration_id)
: registry_(registry), registration_id_(registration_id) {}
+ RegistrationImpl(const RegistrationImpl&) = delete;
+ RegistrationImpl& operator=(const RegistrationImpl&) = delete;
+
~RegistrationImpl() override { registry_->Unregister(registration_id_); }
private:
CallbackRegistry<void(Args...)>* registry_ = nullptr;
uint32_t registration_id_ = 0;
-
- DISALLOW_COPY_AND_ASSIGN(RegistrationImpl);
};
void Unregister(uint32_t registration_id) {
@@ -95,8 +101,6 @@ class CallbackRegistry<void(Args...)> {
base::Lock lock_;
uint32_t next_registration_id_ GUARDED_BY(lock_) = 0;
std::map<uint32_t, CallbackType> callbacks_ GUARDED_BY(lock_);
-
- DISALLOW_COPY_AND_ASSIGN(CallbackRegistry);
};
using ClosureRegistry = CallbackRegistry<void()>;
diff --git a/chromium/media/base/cdm_callback_promise.h b/chromium/media/base/cdm_callback_promise.h
index 48d51c97218..6a2fd08c61d 100644
--- a/chromium/media/base/cdm_callback_promise.h
+++ b/chromium/media/base/cdm_callback_promise.h
@@ -26,6 +26,10 @@ class MEDIA_EXPORT CdmCallbackPromise : public CdmPromiseTemplate<T...> {
public:
CdmCallbackPromise(base::OnceCallback<void(const T&...)> resolve_cb,
PromiseRejectedCB reject_cb);
+
+ CdmCallbackPromise(const CdmCallbackPromise&) = delete;
+ CdmCallbackPromise& operator=(const CdmCallbackPromise&) = delete;
+
virtual ~CdmCallbackPromise();
// CdmPromiseTemplate<T> implementation.
@@ -41,8 +45,6 @@ class MEDIA_EXPORT CdmCallbackPromise : public CdmPromiseTemplate<T...> {
base::OnceCallback<void(const T&...)> resolve_cb_;
PromiseRejectedCB reject_cb_;
-
- DISALLOW_COPY_AND_ASSIGN(CdmCallbackPromise);
};
} // namespace media
diff --git a/chromium/media/base/cdm_context.cc b/chromium/media/base/cdm_context.cc
index fc2901585bb..36cd529132c 100644
--- a/chromium/media/base/cdm_context.cc
+++ b/chromium/media/base/cdm_context.cc
@@ -3,7 +3,6 @@
// found in the LICENSE file.
#include "media/base/cdm_context.h"
-#include "build/chromeos_buildflags.h"
#include "media/base/callback_registry.h"
@@ -53,7 +52,7 @@ FuchsiaCdmContext* CdmContext::GetFuchsiaCdmContext() {
}
#endif
-#if BUILDFLAG(IS_CHROMEOS_ASH)
+#if defined(OS_CHROMEOS)
chromeos::ChromeOsCdmContext* CdmContext::GetChromeOsCdmContext() {
return nullptr;
}
diff --git a/chromium/media/base/cdm_context.h b/chromium/media/base/cdm_context.h
index 366e41d1b2c..53f93b53025 100644
--- a/chromium/media/base/cdm_context.h
+++ b/chromium/media/base/cdm_context.h
@@ -10,12 +10,11 @@
#include "base/memory/scoped_refptr.h"
#include "base/unguessable_token.h"
#include "build/build_config.h"
-#include "build/chromeos_buildflags.h"
#include "media/base/media_export.h"
#include "media/media_buildflags.h"
#include "third_party/abseil-cpp/absl/types/optional.h"
-#if BUILDFLAG(IS_CHROMEOS_ASH)
+#if defined(OS_CHROMEOS)
namespace chromeos {
class ChromeOsCdmContext;
}
@@ -62,6 +61,9 @@ class MEDIA_EXPORT CdmContext {
// Callback to notify the occurrence of an Event.
using EventCB = base::RepeatingCallback<void(Event)>;
+ CdmContext(const CdmContext&) = delete;
+ CdmContext& operator=(const CdmContext&) = delete;
+
virtual ~CdmContext();
// Registers a callback which will be called when an event happens in the CDM.
@@ -124,7 +126,7 @@ class MEDIA_EXPORT CdmContext {
virtual FuchsiaCdmContext* GetFuchsiaCdmContext();
#endif
-#if BUILDFLAG(IS_CHROMEOS_ASH)
+#if defined(OS_CHROMEOS)
// Returns a ChromeOsCdmContext interface when the context is backed by the
// ChromeOS CdmFactoryDaemon. Otherwise return nullptr.
virtual chromeos::ChromeOsCdmContext* GetChromeOsCdmContext();
@@ -132,9 +134,6 @@ class MEDIA_EXPORT CdmContext {
protected:
CdmContext();
-
- private:
- DISALLOW_COPY_AND_ASSIGN(CdmContext);
};
// A reference holder to make sure the CdmContext is always valid as long as
diff --git a/chromium/media/base/cdm_factory.h b/chromium/media/base/cdm_factory.h
index 96fcf3a8678..1885f240664 100644
--- a/chromium/media/base/cdm_factory.h
+++ b/chromium/media/base/cdm_factory.h
@@ -28,6 +28,10 @@ struct CdmConfig;
class MEDIA_EXPORT CdmFactory {
public:
CdmFactory();
+
+ CdmFactory(const CdmFactory&) = delete;
+ CdmFactory& operator=(const CdmFactory&) = delete;
+
virtual ~CdmFactory();
// Creates a CDM for |key_system| and returns it through |cdm_created_cb|
@@ -40,9 +44,6 @@ class MEDIA_EXPORT CdmFactory {
const SessionKeysChangeCB& session_keys_change_cb,
const SessionExpirationUpdateCB& session_expiration_update_cb,
CdmCreatedCB cdm_created_cb) = 0;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(CdmFactory);
};
} // namespace media
diff --git a/chromium/media/base/cdm_promise.h b/chromium/media/base/cdm_promise.h
index 4fc35221a85..5920d624b45 100644
--- a/chromium/media/base/cdm_promise.h
+++ b/chromium/media/base/cdm_promise.h
@@ -62,6 +62,10 @@ class MEDIA_EXPORT CdmPromise {
};
CdmPromise() = default;
+
+ CdmPromise(const CdmPromise&) = delete;
+ CdmPromise& operator=(const CdmPromise&) = delete;
+
virtual ~CdmPromise() = default;
// Used to indicate that the operation failed. |exception_code| must be
@@ -75,9 +79,6 @@ class MEDIA_EXPORT CdmPromise {
// Used to determine the template type of CdmPromiseTemplate<T> so that
// saved CdmPromise objects can be cast to the correct templated version.
virtual ResolveParameterType GetResolveParameterType() const = 0;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(CdmPromise);
};
template <typename... T>
@@ -110,15 +111,14 @@ class CdmPromiseTemplate : public CdmPromise {
public:
CdmPromiseTemplate() : is_settled_(false) {}
+ CdmPromiseTemplate(const CdmPromiseTemplate&) = delete;
+ CdmPromiseTemplate& operator=(const CdmPromiseTemplate&) = delete;
+
virtual ~CdmPromiseTemplate() { DCHECK(is_settled_); }
virtual void resolve(const T&... result) = 0;
// CdmPromise implementation.
- virtual void reject(Exception exception_code,
- uint32_t system_code,
- const std::string& error_message) = 0;
-
ResolveParameterType GetResolveParameterType() const final;
protected:
@@ -146,8 +146,6 @@ class CdmPromiseTemplate : public CdmPromise {
private:
// Keep track of whether the promise has been resolved or rejected yet.
bool is_settled_;
-
- DISALLOW_COPY_AND_ASSIGN(CdmPromiseTemplate);
};
// Explicitly defining all variants of GetResolveParameterType().
diff --git a/chromium/media/base/cdm_promise_adapter.h b/chromium/media/base/cdm_promise_adapter.h
index f678df4b340..b301797bb0c 100644
--- a/chromium/media/base/cdm_promise_adapter.h
+++ b/chromium/media/base/cdm_promise_adapter.h
@@ -23,6 +23,10 @@ namespace media {
class MEDIA_EXPORT CdmPromiseAdapter {
public:
CdmPromiseAdapter();
+
+ CdmPromiseAdapter(const CdmPromiseAdapter&) = delete;
+ CdmPromiseAdapter& operator=(const CdmPromiseAdapter&) = delete;
+
~CdmPromiseAdapter();
enum : uint32_t { kInvalidPromiseId = 0 };
@@ -62,7 +66,6 @@ class MEDIA_EXPORT CdmPromiseAdapter {
PromiseMap promises_;
base::ThreadChecker thread_checker_;
- DISALLOW_COPY_AND_ASSIGN(CdmPromiseAdapter);
};
} // namespace media
diff --git a/chromium/media/base/cdm_session_tracker.h b/chromium/media/base/cdm_session_tracker.h
index 504b0e3c4d1..e7bdadad53d 100644
--- a/chromium/media/base/cdm_session_tracker.h
+++ b/chromium/media/base/cdm_session_tracker.h
@@ -19,6 +19,10 @@ namespace media {
class MEDIA_EXPORT CdmSessionTracker {
public:
CdmSessionTracker();
+
+ CdmSessionTracker(const CdmSessionTracker&) = delete;
+ CdmSessionTracker& operator=(const CdmSessionTracker&) = delete;
+
~CdmSessionTracker();
// Adds `session_id` to the list of sessions being tracked.
@@ -37,8 +41,6 @@ class MEDIA_EXPORT CdmSessionTracker {
private:
std::unordered_set<std::string> session_ids_;
-
- DISALLOW_COPY_AND_ASSIGN(CdmSessionTracker);
};
} // namespace media
diff --git a/chromium/media/base/channel_mixer.h b/chromium/media/base/channel_mixer.h
index a7ed6536bb0..20ac9454a44 100644
--- a/chromium/media/base/channel_mixer.h
+++ b/chromium/media/base/channel_mixer.h
@@ -30,6 +30,10 @@ class MEDIA_EXPORT ChannelMixer {
ChannelMixer(ChannelLayout input_layout, ChannelLayout output_layout);
ChannelMixer(const AudioParameters& input, const AudioParameters& output);
+
+ ChannelMixer(const ChannelMixer&) = delete;
+ ChannelMixer& operator=(const ChannelMixer&) = delete;
+
~ChannelMixer();
// Transforms all channels from |input| into |output| channels.
@@ -53,8 +57,6 @@ class MEDIA_EXPORT ChannelMixer {
// Optimization case for when we can simply remap the input channels to output
// channels and don't need to do a multiply-accumulate loop over |matrix_|.
bool remapping_;
-
- DISALLOW_COPY_AND_ASSIGN(ChannelMixer);
};
} // namespace media
diff --git a/chromium/media/base/channel_mixing_matrix.h b/chromium/media/base/channel_mixing_matrix.h
index 64fcdb90228..cca95a73a2a 100644
--- a/chromium/media/base/channel_mixing_matrix.h
+++ b/chromium/media/base/channel_mixing_matrix.h
@@ -20,6 +20,9 @@ class MEDIA_EXPORT ChannelMixingMatrix {
ChannelLayout output_layout,
int output_channels);
+ ChannelMixingMatrix(const ChannelMixingMatrix&) = delete;
+ ChannelMixingMatrix& operator=(const ChannelMixingMatrix&) = delete;
+
~ChannelMixingMatrix();
// Create the transformation matrix of input channels to output channels.
@@ -55,8 +58,6 @@ class MEDIA_EXPORT ChannelMixingMatrix {
// remove the channel from |unaccounted_inputs_|.
void Mix(Channels input_ch, Channels output_ch, float scale);
void MixWithoutAccounting(Channels input_ch, Channels output_ch, float scale);
-
- DISALLOW_COPY_AND_ASSIGN(ChannelMixingMatrix);
};
} // namespace media
diff --git a/chromium/media/base/data_buffer_unittest.cc b/chromium/media/base/data_buffer_unittest.cc
index 5b9e5ac42bf..e4f53040b00 100644
--- a/chromium/media/base/data_buffer_unittest.cc
+++ b/chromium/media/base/data_buffer_unittest.cc
@@ -69,8 +69,8 @@ TEST(DataBufferTest, CreateEOSBuffer) {
TEST(DataBufferTest, Timestamp) {
const base::TimeDelta kZero;
- const base::TimeDelta kTimestampA = base::TimeDelta::FromMicroseconds(1337);
- const base::TimeDelta kTimestampB = base::TimeDelta::FromMicroseconds(1234);
+ const base::TimeDelta kTimestampA = base::Microseconds(1337);
+ const base::TimeDelta kTimestampB = base::Microseconds(1234);
scoped_refptr<DataBuffer> buffer = new DataBuffer(0);
EXPECT_TRUE(buffer->timestamp() == kZero);
@@ -84,8 +84,8 @@ TEST(DataBufferTest, Timestamp) {
TEST(DataBufferTest, Duration) {
const base::TimeDelta kZero;
- const base::TimeDelta kDurationA = base::TimeDelta::FromMicroseconds(1337);
- const base::TimeDelta kDurationB = base::TimeDelta::FromMicroseconds(1234);
+ const base::TimeDelta kDurationA = base::Microseconds(1337);
+ const base::TimeDelta kDurationB = base::Microseconds(1234);
scoped_refptr<DataBuffer> buffer = new DataBuffer(0);
EXPECT_TRUE(buffer->duration() == kZero);
diff --git a/chromium/media/base/data_source.h b/chromium/media/base/data_source.h
index 6ccfebf1eab..9a63c1302a9 100644
--- a/chromium/media/base/data_source.h
+++ b/chromium/media/base/data_source.h
@@ -21,6 +21,10 @@ class MEDIA_EXPORT DataSource {
enum { kReadError = -1, kAborted = -2 };
DataSource();
+
+ DataSource(const DataSource&) = delete;
+ DataSource& operator=(const DataSource&) = delete;
+
virtual ~DataSource();
// Reads |size| bytes from |position| into |data|. And when the read is done
@@ -57,9 +61,6 @@ class MEDIA_EXPORT DataSource {
// By default this just returns GetSize().
virtual int64_t GetMemoryUsage();
-
- private:
- DISALLOW_COPY_AND_ASSIGN(DataSource);
};
} // namespace media
diff --git a/chromium/media/base/decode_status.h b/chromium/media/base/decode_status.h
index d17d8573f71..0f05be295f4 100644
--- a/chromium/media/base/decode_status.h
+++ b/chromium/media/base/decode_status.h
@@ -9,12 +9,11 @@
#include "media/base/decoder_buffer.h"
#include "media/base/media_export.h"
+#include "media/base/status.h"
#include "media/base/status_codes.h"
namespace media {
-class Status;
-
// TODO(crbug.com/1129662): This is temporary, to allow DecodeStatus::OK to
// work, while we replace DecodeStatus with actual status codes.
using DecodeStatus = StatusCode;
@@ -37,6 +36,10 @@ class MEDIA_EXPORT ScopedDecodeTrace {
ScopedDecodeTrace(const char* trace_name,
bool is_key_frame,
base::TimeDelta timestamp);
+
+ ScopedDecodeTrace(const ScopedDecodeTrace&) = delete;
+ ScopedDecodeTrace& operator=(const ScopedDecodeTrace&) = delete;
+
~ScopedDecodeTrace();
// Completes the Decode() trace with the given status.
@@ -45,8 +48,6 @@ class MEDIA_EXPORT ScopedDecodeTrace {
private:
const char* trace_name_;
bool closed_ = false;
-
- DISALLOW_COPY_AND_ASSIGN(ScopedDecodeTrace);
};
} // namespace media
diff --git a/chromium/media/base/decoder_buffer_queue.h b/chromium/media/base/decoder_buffer_queue.h
index 2263234d50c..780d71f6342 100644
--- a/chromium/media/base/decoder_buffer_queue.h
+++ b/chromium/media/base/decoder_buffer_queue.h
@@ -27,6 +27,10 @@ class DecoderBuffer;
class MEDIA_EXPORT DecoderBufferQueue {
public:
DecoderBufferQueue();
+
+ DecoderBufferQueue(const DecoderBufferQueue&) = delete;
+ DecoderBufferQueue& operator=(const DecoderBufferQueue&) = delete;
+
~DecoderBufferQueue();
// Push |buffer| to the end of the queue. If |buffer| is queued out of order
@@ -69,8 +73,6 @@ class MEDIA_EXPORT DecoderBufferQueue {
// Total size in bytes of buffers in the queue.
size_t data_size_;
-
- DISALLOW_COPY_AND_ASSIGN(DecoderBufferQueue);
};
} // namespace media
diff --git a/chromium/media/base/decoder_buffer_queue_unittest.cc b/chromium/media/base/decoder_buffer_queue_unittest.cc
index be2a4b2b3a3..ce233a15dc7 100644
--- a/chromium/media/base/decoder_buffer_queue_unittest.cc
+++ b/chromium/media/base/decoder_buffer_queue_unittest.cc
@@ -12,7 +12,7 @@ namespace media {
static base::TimeDelta ToTimeDelta(int seconds) {
if (seconds < 0)
return kNoTimestamp;
- return base::TimeDelta::FromSeconds(seconds);
+ return base::Seconds(seconds);
}
// Helper to create buffers with specified timestamp in seconds.
diff --git a/chromium/media/base/decoder_factory.h b/chromium/media/base/decoder_factory.h
index fee2f69ff28..7013aae4fae 100644
--- a/chromium/media/base/decoder_factory.h
+++ b/chromium/media/base/decoder_factory.h
@@ -33,6 +33,10 @@ class VideoDecoder;
class MEDIA_EXPORT DecoderFactory {
public:
DecoderFactory();
+
+ DecoderFactory(const DecoderFactory&) = delete;
+ DecoderFactory& operator=(const DecoderFactory&) = delete;
+
virtual ~DecoderFactory();
// Creates audio decoders and append them to the end of |audio_decoders|.
@@ -58,9 +62,6 @@ class MEDIA_EXPORT DecoderFactory {
RequestOverlayInfoCB request_overlay_info_cb,
const gfx::ColorSpace& target_color_space,
std::vector<std::unique_ptr<VideoDecoder>>* video_decoders);
-
- private:
- DISALLOW_COPY_AND_ASSIGN(DecoderFactory);
};
} // namespace media
diff --git a/chromium/media/base/decryptor.h b/chromium/media/base/decryptor.h
index d88aac96da2..bbda9cdf4db 100644
--- a/chromium/media/base/decryptor.h
+++ b/chromium/media/base/decryptor.h
@@ -42,6 +42,10 @@ class MEDIA_EXPORT Decryptor {
enum StreamType { kAudio, kVideo, kStreamTypeMax = kVideo };
Decryptor();
+
+ Decryptor(const Decryptor&) = delete;
+ Decryptor& operator=(const Decryptor&) = delete;
+
virtual ~Decryptor();
// Indicates completion of a decryption operation.
@@ -148,9 +152,6 @@ class MEDIA_EXPORT Decryptor {
// Returns whether or not the decryptor implementation supports decrypt-only.
virtual bool CanAlwaysDecrypt();
-
- private:
- DISALLOW_COPY_AND_ASSIGN(Decryptor);
};
} // namespace media
diff --git a/chromium/media/base/demuxer.h b/chromium/media/base/demuxer.h
index e048fca2d9c..27d36ba4add 100644
--- a/chromium/media/base/demuxer.h
+++ b/chromium/media/base/demuxer.h
@@ -80,6 +80,10 @@ class MEDIA_EXPORT Demuxer : public MediaResource {
};
Demuxer();
+
+ Demuxer(const Demuxer&) = delete;
+ Demuxer& operator=(const Demuxer&) = delete;
+
~Demuxer() override;
// Returns the name of the demuxer for logging purpose.
@@ -165,9 +169,6 @@ class MEDIA_EXPORT Demuxer : public MediaResource {
const std::vector<MediaTrack::Id>& track_ids,
base::TimeDelta curr_time,
TrackChangeCB change_completed_cb) = 0;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(Demuxer);
};
} // namespace media
diff --git a/chromium/media/base/demuxer_memory_limit_cast.cc b/chromium/media/base/demuxer_memory_limit_cast.cc
index a4c2775c686..3020f48727e 100644
--- a/chromium/media/base/demuxer_memory_limit_cast.cc
+++ b/chromium/media/base/demuxer_memory_limit_cast.cc
@@ -17,10 +17,10 @@ size_t GetDemuxerStreamAudioMemoryLimit(
DCHECK(audio_config->IsValidConfig());
switch (audio_config->codec()) {
- case kCodecEAC3:
- case kCodecAC3:
+ case AudioCodec::kEAC3:
+ case AudioCodec::kAC3:
return internal::kDemuxerStreamAudioMemoryLimitMedium;
- case kCodecAAC:
+ case AudioCodec::kAAC:
if (ChannelLayoutToChannelCount(audio_config->channel_layout()) >= 5) {
return internal::kDemuxerStreamAudioMemoryLimitMedium;
}
@@ -42,9 +42,9 @@ size_t GetDemuxerStreamVideoMemoryLimit(
}
DCHECK(video_config->IsValidConfig());
switch (video_config->codec()) {
- case kCodecVP9:
- case kCodecHEVC:
- case kCodecDolbyVision:
+ case VideoCodec::kVP9:
+ case VideoCodec::kHEVC:
+ case VideoCodec::kDolbyVision:
return internal::kDemuxerStreamVideoMemoryLimitMedium;
default:
return internal::kDemuxerStreamVideoMemoryLimitLow;
diff --git a/chromium/media/base/demuxer_memory_limit_cast_unittest.cc b/chromium/media/base/demuxer_memory_limit_cast_unittest.cc
index 4dbe6d4308b..2f150da71dc 100644
--- a/chromium/media/base/demuxer_memory_limit_cast_unittest.cc
+++ b/chromium/media/base/demuxer_memory_limit_cast_unittest.cc
@@ -21,28 +21,28 @@ TEST(DemuxerMemoryLimitCastTest, GetDemuxerStreamAudioMemoryLimit) {
internal::kDemuxerStreamAudioMemoryLimitLow);
AudioDecoderConfig audio_config_opus(
- AudioCodec::kCodecOpus, SampleFormat::kSampleFormatS16,
+ AudioCodec::kOpus, SampleFormat::kSampleFormatS16,
ChannelLayout::CHANNEL_LAYOUT_STEREO, 5000 /* samples_per_second */,
EmptyExtraData(), EncryptionScheme::kUnencrypted);
EXPECT_EQ(GetDemuxerStreamAudioMemoryLimit(&audio_config_opus),
internal::kDemuxerStreamAudioMemoryLimitLow);
AudioDecoderConfig audio_config_ac3(
- AudioCodec::kCodecAC3, SampleFormat::kSampleFormatS16,
+ AudioCodec::kAC3, SampleFormat::kSampleFormatS16,
ChannelLayout::CHANNEL_LAYOUT_STEREO, 5000 /* samples_per_second */,
EmptyExtraData(), EncryptionScheme::kUnencrypted);
EXPECT_EQ(GetDemuxerStreamAudioMemoryLimit(&audio_config_ac3),
internal::kDemuxerStreamAudioMemoryLimitMedium);
AudioDecoderConfig audio_config_aac_1(
- AudioCodec::kCodecAAC, SampleFormat::kSampleFormatS16,
+ AudioCodec::kAAC, SampleFormat::kSampleFormatS16,
ChannelLayout::CHANNEL_LAYOUT_5_0, 5000 /* samples_per_second */,
EmptyExtraData(), EncryptionScheme::kUnencrypted);
EXPECT_EQ(GetDemuxerStreamAudioMemoryLimit(&audio_config_aac_1),
internal::kDemuxerStreamAudioMemoryLimitMedium);
AudioDecoderConfig audio_config_aac_2(
- AudioCodec::kCodecAAC, SampleFormat::kSampleFormatS16,
+ AudioCodec::kAAC, SampleFormat::kSampleFormatS16,
ChannelLayout::CHANNEL_LAYOUT_STEREO, 5000 /* samples_per_second */,
EmptyExtraData(), EncryptionScheme::kUnencrypted);
EXPECT_EQ(GetDemuxerStreamAudioMemoryLimit(&audio_config_aac_2),
@@ -61,7 +61,7 @@ TEST(DemuxerMemoryLimitCastTest, GetDemuxerStreamVideoMemoryLimit) {
internal::kDemuxerStreamVideoMemoryLimitLow);
VideoDecoderConfig video_config(
- kCodecVP8, VIDEO_CODEC_PROFILE_UNKNOWN,
+ VideoCodec::kVP8, VIDEO_CODEC_PROFILE_UNKNOWN,
VideoDecoderConfig::AlphaMode::kIsOpaque, VideoColorSpace(),
kNoTransformation, kCodedSize, kVisibleRect, kNaturalSize,
EmptyExtraData(), EncryptionScheme::kUnencrypted);
@@ -75,7 +75,7 @@ TEST(DemuxerMemoryLimitCastTest, GetDemuxerStreamVideoMemoryLimit) {
Demuxer::DemuxerTypes::kMediaUrlDemuxer, &video_config),
internal::kDemuxerStreamVideoMemoryLimitLow);
- video_config.Initialize(kCodecVP9, VIDEO_CODEC_PROFILE_UNKNOWN,
+ video_config.Initialize(VideoCodec::kVP9, VIDEO_CODEC_PROFILE_UNKNOWN,
VideoDecoderConfig::AlphaMode::kIsOpaque,
VideoColorSpace(), kNoTransformation, kCodedSize,
kVisibleRect, kNaturalSize, EmptyExtraData(),
diff --git a/chromium/media/base/encryption_scheme.cc b/chromium/media/base/encryption_scheme.cc
index 0fd36097378..12728f71c89 100644
--- a/chromium/media/base/encryption_scheme.cc
+++ b/chromium/media/base/encryption_scheme.cc
@@ -9,17 +9,21 @@
namespace media {
-std::ostream& operator<<(std::ostream& os, EncryptionScheme scheme) {
- switch (scheme) {
+std::string GetEncryptionSchemeName(EncryptionScheme encryption_scheme) {
+ switch (encryption_scheme) {
case EncryptionScheme::kUnencrypted:
- return os << "Unencrypted";
+ return "Unencrypted";
case EncryptionScheme::kCenc:
- return os << "CENC";
+ return "CENC";
case EncryptionScheme::kCbcs:
- return os << "CBCS";
+ return "CBCS";
default:
- return os << "Unknown";
+ return "Unknown";
}
}
+std::ostream& operator<<(std::ostream& os, EncryptionScheme encryption_scheme) {
+ return os << GetEncryptionSchemeName(encryption_scheme);
+}
+
} // namespace media
diff --git a/chromium/media/base/encryption_scheme.h b/chromium/media/base/encryption_scheme.h
index 00bd931ea17..24a7141f9e0 100644
--- a/chromium/media/base/encryption_scheme.h
+++ b/chromium/media/base/encryption_scheme.h
@@ -6,6 +6,7 @@
#define MEDIA_BASE_ENCRYPTION_SCHEME_H_
#include <iosfwd>
+#include <string>
#include "media/base/media_export.h"
@@ -21,6 +22,10 @@ enum class EncryptionScheme {
};
// For logging use only.
+MEDIA_EXPORT std::string GetEncryptionSchemeName(
+ EncryptionScheme encryption_scheme);
+
+// For logging use only.
MEDIA_EXPORT std::ostream& operator<<(std::ostream& os,
EncryptionScheme encryption_scheme);
diff --git a/chromium/media/base/fake_audio_render_callback.h b/chromium/media/base/fake_audio_render_callback.h
index c62be387809..bc7383c132b 100644
--- a/chromium/media/base/fake_audio_render_callback.h
+++ b/chromium/media/base/fake_audio_render_callback.h
@@ -25,6 +25,10 @@ class FakeAudioRenderCallback
// where x = [|number_of_frames| * m, |number_of_frames| * (m + 1)] and m =
// the number of Render() calls fulfilled thus far.
FakeAudioRenderCallback(double step, int sample_rate);
+
+ FakeAudioRenderCallback(const FakeAudioRenderCallback&) = delete;
+ FakeAudioRenderCallback& operator=(const FakeAudioRenderCallback&) = delete;
+
~FakeAudioRenderCallback() override;
// Renders a sine wave into the provided audio data buffer. If |half_fill_|
@@ -63,8 +67,6 @@ class FakeAudioRenderCallback
int last_channel_count_;
double volume_;
int sample_rate_;
-
- DISALLOW_COPY_AND_ASSIGN(FakeAudioRenderCallback);
};
} // namespace media
diff --git a/chromium/media/base/fake_audio_worker.h b/chromium/media/base/fake_audio_worker.h
index 13ba742c89f..1729c669827 100644
--- a/chromium/media/base/fake_audio_worker.h
+++ b/chromium/media/base/fake_audio_worker.h
@@ -36,6 +36,10 @@ class MEDIA_EXPORT FakeAudioWorker {
FakeAudioWorker(
const scoped_refptr<base::SingleThreadTaskRunner>& worker_task_runner,
const AudioParameters& params);
+
+ FakeAudioWorker(const FakeAudioWorker&) = delete;
+ FakeAudioWorker& operator=(const FakeAudioWorker&) = delete;
+
~FakeAudioWorker();
// Start executing |worker_cb| at a regular intervals. Stop() must be called
@@ -57,8 +61,6 @@ class MEDIA_EXPORT FakeAudioWorker {
// after the call to Stop() (on the main thread) returns.
class Worker;
const scoped_refptr<Worker> worker_;
-
- DISALLOW_COPY_AND_ASSIGN(FakeAudioWorker);
};
} // namespace media
diff --git a/chromium/media/base/fake_audio_worker_unittest.cc b/chromium/media/base/fake_audio_worker_unittest.cc
index e9977e58334..e2e6e2d90dd 100644
--- a/chromium/media/base/fake_audio_worker_unittest.cc
+++ b/chromium/media/base/fake_audio_worker_unittest.cc
@@ -35,11 +35,14 @@ class FakeAudioWorkerTest : public testing::Test {
FakeAudioWorkerTest()
: params_(AudioParameters::AUDIO_FAKE, CHANNEL_LAYOUT_STEREO, 44100, 128),
fake_worker_(task_environment_.GetMainThreadTaskRunner(), params_) {
- time_between_callbacks_ = base::TimeDelta::FromMicroseconds(
+ time_between_callbacks_ = base::Microseconds(
params_.frames_per_buffer() * base::Time::kMicrosecondsPerSecond /
static_cast<float>(params_.sample_rate()));
}
+ FakeAudioWorkerTest(const FakeAudioWorkerTest&) = delete;
+ FakeAudioWorkerTest& operator=(const FakeAudioWorkerTest&) = delete;
+
~FakeAudioWorkerTest() override = default;
void CalledByFakeWorker(base::TimeTicks ideal_time, base::TimeTicks now) {
@@ -93,9 +96,6 @@ class FakeAudioWorkerTest : public testing::Test {
FakeAudioWorker fake_worker_;
base::TimeDelta time_between_callbacks_;
std::vector<base::TimeTicks> callbacks_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(FakeAudioWorkerTest);
};
TEST_F(FakeAudioWorkerTest, FakeBasicCallback) {
@@ -181,13 +181,17 @@ class FakeAudioWorkerMockTaskTest : public testing::Test {
fake_worker_(task_runner_, params_) {
DCHECK(!global_clock_);
global_clock_ = task_runner_->GetMockTickClock();
- time_between_callbacks_ = base::TimeDelta::FromMicroseconds(
+ time_between_callbacks_ = base::Microseconds(
params_.frames_per_buffer() * base::Time::kMicrosecondsPerSecond /
static_cast<float>(params_.sample_rate()));
clock_overrides_ = std::make_unique<base::subtle::ScopedTimeClockOverrides>(
nullptr, TimeTicksOverride, nullptr);
}
+ FakeAudioWorkerMockTaskTest(const FakeAudioWorkerMockTaskTest&) = delete;
+ FakeAudioWorkerMockTaskTest& operator=(const FakeAudioWorkerMockTaskTest&) =
+ delete;
+
~FakeAudioWorkerMockTaskTest() override { global_clock_ = nullptr; }
void CalledByFakeWorker(base::TimeTicks ideal_time, base::TimeTicks now) {
@@ -225,9 +229,6 @@ class FakeAudioWorkerMockTaskTest : public testing::Test {
DCHECK(global_clock_);
return global_clock_->NowTicks();
}
-
- private:
- DISALLOW_COPY_AND_ASSIGN(FakeAudioWorkerMockTaskTest);
};
const base::TickClock* FakeAudioWorkerMockTaskTest::global_clock_ = nullptr;
diff --git a/chromium/media/base/fake_demuxer_stream.cc b/chromium/media/base/fake_demuxer_stream.cc
index 76865ead473..3577c27cc89 100644
--- a/chromium/media/base/fake_demuxer_stream.cc
+++ b/chromium/media/base/fake_demuxer_stream.cc
@@ -75,8 +75,8 @@ void FakeDemuxerStream::Initialize() {
num_configs_left_ = num_configs_;
num_buffers_left_in_current_config_ = num_buffers_in_one_config_;
num_buffers_returned_ = 0;
- current_timestamp_ = base::TimeDelta::FromMilliseconds(kStartTimestampMs);
- duration_ = base::TimeDelta::FromMilliseconds(kDurationMs);
+ current_timestamp_ = base::Milliseconds(kStartTimestampMs);
+ duration_ = base::Milliseconds(kDurationMs);
next_size_ = start_coded_size_;
next_read_num_ = 0;
}
@@ -172,7 +172,7 @@ void FakeDemuxerStream::SeekToEndOfStream() {
void FakeDemuxerStream::UpdateVideoDecoderConfig() {
const gfx::Rect kVisibleRect(next_size_.width(), next_size_.height());
video_decoder_config_.Initialize(
- kCodecVP8, VIDEO_CODEC_PROFILE_UNKNOWN,
+ VideoCodec::kVP8, VIDEO_CODEC_PROFILE_UNKNOWN,
VideoDecoderConfig::AlphaMode::kIsOpaque, VideoColorSpace(),
kNoTransformation, next_size_, kVisibleRect, next_size_, EmptyExtraData(),
is_encrypted_ ? EncryptionScheme::kCenc : EncryptionScheme::kUnencrypted);
diff --git a/chromium/media/base/fake_demuxer_stream.h b/chromium/media/base/fake_demuxer_stream.h
index 0a0a241d22d..959ab3b3589 100644
--- a/chromium/media/base/fake_demuxer_stream.h
+++ b/chromium/media/base/fake_demuxer_stream.h
@@ -39,6 +39,10 @@ class FakeDemuxerStream : public DemuxerStream {
bool is_encrypted,
gfx::Size start_coded_size,
gfx::Vector2dF coded_size_delta);
+
+ FakeDemuxerStream(const FakeDemuxerStream&) = delete;
+ FakeDemuxerStream& operator=(const FakeDemuxerStream&) = delete;
+
~FakeDemuxerStream() override;
// DemuxerStream implementation.
@@ -116,8 +120,6 @@ class FakeDemuxerStream : public DemuxerStream {
// Zero-based number indicating which read operation should be held. -1 means
// no read shall be held.
int read_to_hold_;
-
- DISALLOW_COPY_AND_ASSIGN(FakeDemuxerStream);
};
class FakeMediaResource : public MediaResource {
@@ -126,6 +128,10 @@ class FakeMediaResource : public MediaResource {
FakeMediaResource(int num_video_configs,
int num_video_buffers_in_one_config,
bool is_video_encrypted);
+
+ FakeMediaResource(const FakeMediaResource&) = delete;
+ FakeMediaResource& operator=(const FakeMediaResource&) = delete;
+
~FakeMediaResource() override;
// MediaResource implementation.
@@ -133,8 +139,6 @@ class FakeMediaResource : public MediaResource {
private:
FakeDemuxerStream fake_video_stream_;
-
- DISALLOW_COPY_AND_ASSIGN(FakeMediaResource);
};
} // namespace media
diff --git a/chromium/media/base/fake_demuxer_stream_unittest.cc b/chromium/media/base/fake_demuxer_stream_unittest.cc
index 5108d788f30..17b2194da2f 100644
--- a/chromium/media/base/fake_demuxer_stream_unittest.cc
+++ b/chromium/media/base/fake_demuxer_stream_unittest.cc
@@ -30,6 +30,10 @@ class FakeDemuxerStreamTest : public testing::Test {
: status_(DemuxerStream::kAborted),
read_pending_(false),
num_buffers_received_(0) {}
+
+ FakeDemuxerStreamTest(const FakeDemuxerStreamTest&) = delete;
+ FakeDemuxerStreamTest& operator=(const FakeDemuxerStreamTest&) = delete;
+
~FakeDemuxerStreamTest() override = default;
void BufferReady(DemuxerStream::Status status,
@@ -188,9 +192,6 @@ class FakeDemuxerStreamTest : public testing::Test {
scoped_refptr<DecoderBuffer> buffer_;
bool read_pending_;
int num_buffers_received_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(FakeDemuxerStreamTest);
};
TEST_F(FakeDemuxerStreamTest, Read_OneConfig) {
diff --git a/chromium/media/base/fake_single_thread_task_runner.cc b/chromium/media/base/fake_single_thread_task_runner.cc
index f2dcd5f09d5..c050d5ba6d5 100644
--- a/chromium/media/base/fake_single_thread_task_runner.cc
+++ b/chromium/media/base/fake_single_thread_task_runner.cc
@@ -36,8 +36,8 @@ bool FakeSingleThreadTaskRunner::PostDelayedTask(
// scheduling delayed tasks to be run via base::MessageLoop in a
// multi-threaded application.
if (!tasks_.empty()) {
- const auto after_it = tasks_.lower_bound(
- TaskKey(run_time + base::TimeDelta::FromMicroseconds(1), 0));
+ const auto after_it =
+ tasks_.lower_bound(TaskKey(run_time + base::Microseconds(1), 0));
if (after_it != tasks_.begin()) {
auto it = after_it;
--it;
diff --git a/chromium/media/base/fake_text_track_stream.h b/chromium/media/base/fake_text_track_stream.h
index 83b087e2b92..35839a0a0df 100644
--- a/chromium/media/base/fake_text_track_stream.h
+++ b/chromium/media/base/fake_text_track_stream.h
@@ -22,6 +22,10 @@ namespace media {
class FakeTextTrackStream : public DemuxerStream {
public:
FakeTextTrackStream();
+
+ FakeTextTrackStream(const FakeTextTrackStream&) = delete;
+ FakeTextTrackStream& operator=(const FakeTextTrackStream&) = delete;
+
~FakeTextTrackStream() override;
// DemuxerStream implementation.
@@ -48,8 +52,6 @@ class FakeTextTrackStream : public DemuxerStream {
scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
ReadCB read_cb_;
bool stopping_;
-
- DISALLOW_COPY_AND_ASSIGN(FakeTextTrackStream);
};
} // namespace media
diff --git a/chromium/media/base/feedback_signal_accumulator_unittest.cc b/chromium/media/base/feedback_signal_accumulator_unittest.cc
index 2a28fe7d308..d6c11a91673 100644
--- a/chromium/media/base/feedback_signal_accumulator_unittest.cc
+++ b/chromium/media/base/feedback_signal_accumulator_unittest.cc
@@ -11,9 +11,9 @@ namespace media {
class FeedbackSignalAccumulatorTest : public ::testing::Test {
public:
FeedbackSignalAccumulatorTest()
- : half_life_(base::TimeDelta::FromSeconds(1)),
+ : half_life_(base::Seconds(1)),
acc_(half_life_),
- t_(base::TimeTicks() + base::TimeDelta::FromSeconds(120)) {
+ t_(base::TimeTicks() + base::Seconds(120)) {
acc_.Reset(0.0, t_);
}
@@ -45,14 +45,12 @@ TEST_F(FeedbackSignalAccumulatorTest, DoesNotUpdateIfBeforeResetTime) {
ASSERT_EQ(0.0, acc_.current());
ASSERT_EQ(t_, acc_.update_time());
- const base::TimeTicks one_usec_before =
- t_ - base::TimeDelta::FromMicroseconds(1);
+ const base::TimeTicks one_usec_before = t_ - base::Microseconds(1);
ASSERT_FALSE(acc_.Update(1.0, one_usec_before));
ASSERT_EQ(0.0, acc_.current());
ASSERT_EQ(t_, acc_.update_time());
- const base::TimeTicks one_usec_after =
- t_ + base::TimeDelta::FromMicroseconds(1);
+ const base::TimeTicks one_usec_after = t_ + base::Microseconds(1);
ASSERT_TRUE(acc_.Update(1.0, one_usec_after));
ASSERT_LT(0.0, acc_.current());
ASSERT_EQ(one_usec_after, acc_.update_time());
diff --git a/chromium/media/base/frame_rate_estimator_unittest.cc b/chromium/media/base/frame_rate_estimator_unittest.cc
index 4701c3bd03a..2ba25e6ab5d 100644
--- a/chromium/media/base/frame_rate_estimator_unittest.cc
+++ b/chromium/media/base/frame_rate_estimator_unittest.cc
@@ -26,9 +26,7 @@ class FrameRateEstimatorTest : public testing::TestWithParam<FpsPair> {
int low_fps() const { return std::get<0>(GetParam()); }
int high_fps() const { return std::get<1>(GetParam()); }
- base::TimeDelta duration(int fps) {
- return base::TimeDelta::FromSecondsD(1.0 / fps);
- }
+ base::TimeDelta duration(int fps) { return base::Seconds(1.0 / fps); }
FrameRateEstimator estimator_;
};
diff --git a/chromium/media/base/ipc/media_param_traits_macros.h b/chromium/media/base/ipc/media_param_traits_macros.h
index 0d06c006ec0..0343a90c3bc 100644
--- a/chromium/media/base/ipc/media_param_traits_macros.h
+++ b/chromium/media/base/ipc/media_param_traits_macros.h
@@ -53,7 +53,7 @@
IPC_ENUM_TRAITS_MAX_VALUE(blink::WebFullscreenVideoStatus,
blink::WebFullscreenVideoStatus::kMaxValue)
-IPC_ENUM_TRAITS_MAX_VALUE(media::AudioCodec, media::AudioCodec::kAudioCodecMax)
+IPC_ENUM_TRAITS_MAX_VALUE(media::AudioCodec, media::AudioCodec::kMaxValue)
IPC_ENUM_TRAITS_MAX_VALUE(media::AudioCodecProfile,
media::AudioCodecProfile::kMaxValue)
@@ -117,7 +117,7 @@ IPC_ENUM_TRAITS_MAX_VALUE(media::PipelineStatus,
IPC_ENUM_TRAITS_MAX_VALUE(media::SampleFormat, media::kSampleFormatMax)
-IPC_ENUM_TRAITS_MAX_VALUE(media::VideoCodec, media::kVideoCodecMax)
+IPC_ENUM_TRAITS_MAX_VALUE(media::VideoCodec, media::VideoCodec::kMaxValue)
IPC_ENUM_TRAITS_MAX_VALUE(media::WaitingReason, media::WaitingReason::kMaxValue)
diff --git a/chromium/media/base/key_systems.cc b/chromium/media/base/key_systems.cc
index 9bfc335fb03..9205d7a8923 100644
--- a/chromium/media/base/key_systems.cc
+++ b/chromium/media/base/key_systems.cc
@@ -63,19 +63,19 @@ static const MimeTypeToCodecs kMimeTypeToCodecsMap[] = {
EmeCodec ToAudioEmeCodec(AudioCodec codec) {
switch (codec) {
- case kCodecAAC:
+ case AudioCodec::kAAC:
return EME_CODEC_AAC;
- case kCodecVorbis:
+ case AudioCodec::kVorbis:
return EME_CODEC_VORBIS;
- case kCodecFLAC:
+ case AudioCodec::kFLAC:
return EME_CODEC_FLAC;
- case kCodecOpus:
+ case AudioCodec::kOpus:
return EME_CODEC_OPUS;
- case kCodecEAC3:
+ case AudioCodec::kEAC3:
return EME_CODEC_EAC3;
- case kCodecAC3:
+ case AudioCodec::kAC3:
return EME_CODEC_AC3;
- case kCodecMpegHAudio:
+ case AudioCodec::kMpegHAudio:
return EME_CODEC_MPEG_H_AUDIO;
default:
DVLOG(1) << "Unsupported AudioCodec " << codec;
@@ -85,11 +85,11 @@ EmeCodec ToAudioEmeCodec(AudioCodec codec) {
EmeCodec ToVideoEmeCodec(VideoCodec codec, VideoCodecProfile profile) {
switch (codec) {
- case kCodecH264:
+ case VideoCodec::kH264:
return EME_CODEC_AVC1;
- case kCodecVP8:
+ case VideoCodec::kVP8:
return EME_CODEC_VP8;
- case kCodecVP9:
+ case VideoCodec::kVP9:
// ParseVideoCodecString() returns VIDEO_CODEC_PROFILE_UNKNOWN for "vp9"
// and "vp9.0". Since these codecs are essentially the same as profile 0,
// return EME_CODEC_VP9_PROFILE0.
@@ -102,14 +102,14 @@ EmeCodec ToVideoEmeCodec(VideoCodec codec, VideoCodecProfile profile) {
// Profile 1 and 3 not supported by EME. See https://crbug.com/898298.
return EME_CODEC_NONE;
}
- case kCodecHEVC:
+ case VideoCodec::kHEVC:
// Only handle Main and Main10 profiles for HEVC.
if (profile == HEVCPROFILE_MAIN)
return EME_CODEC_HEVC_PROFILE_MAIN;
if (profile == HEVCPROFILE_MAIN10)
return EME_CODEC_HEVC_PROFILE_MAIN10;
return EME_CODEC_NONE;
- case kCodecDolbyVision:
+ case VideoCodec::kDolbyVision:
// Only profiles 0, 4, 5, 7, 8, 9 are valid. Profile 0 and 9 are encoded
// based on AVC while profile 4, 5, 7 and 8 are based on HEVC.
if (profile == DOLBYVISION_PROFILE0 || profile == DOLBYVISION_PROFILE9) {
@@ -122,7 +122,7 @@ EmeCodec ToVideoEmeCodec(VideoCodec codec, VideoCodecProfile profile) {
} else {
return EME_CODEC_NONE;
}
- case kCodecAV1:
+ case VideoCodec::kAV1:
return EME_CODEC_AV1;
default:
DVLOG(1) << "Unsupported VideoCodec " << codec;
@@ -381,7 +381,7 @@ EmeCodec KeySystemsImpl::GetEmeCodecForString(
return iter->second;
if (media_type == EmeMediaType::AUDIO) {
- AudioCodec audio_codec = kUnknownAudioCodec;
+ AudioCodec audio_codec = AudioCodec::kUnknown;
ParseAudioCodecString(container_mime_type, codec_string, &is_ambiguous,
&audio_codec);
DVLOG(3) << "Audio codec = " << audio_codec;
@@ -396,7 +396,7 @@ EmeCodec KeySystemsImpl::GetEmeCodecForString(
// exceptions where we need to know the profile. For example, for VP9, there
// are older CDMs only supporting profile 0, hence EmeCodec differentiate
// between VP9 profile 0 and higher profiles.
- VideoCodec video_codec = kUnknownVideoCodec;
+ VideoCodec video_codec = VideoCodec::kUnknown;
VideoCodecProfile profile = VIDEO_CODEC_PROFILE_UNKNOWN;
uint8_t level = 0;
VideoColorSpace color_space;
diff --git a/chromium/media/base/key_systems_unittest.cc b/chromium/media/base/key_systems_unittest.cc
index 4a21bb67c4b..1d696f28a76 100644
--- a/chromium/media/base/key_systems_unittest.cc
+++ b/chromium/media/base/key_systems_unittest.cc
@@ -14,6 +14,7 @@
#include "base/check.h"
#include "base/notreached.h"
+#include "build/build_config.h"
#include "media/base/audio_parameters.h"
#include "media/base/decrypt_config.h"
#include "media/base/eme_constants.h"
diff --git a/chromium/media/base/loopback_audio_converter.h b/chromium/media/base/loopback_audio_converter.h
index 21815f0ac0d..c691f90ded6 100644
--- a/chromium/media/base/loopback_audio_converter.h
+++ b/chromium/media/base/loopback_audio_converter.h
@@ -22,6 +22,9 @@ class LoopbackAudioConverter : public AudioConverter::InputCallback {
const AudioParameters& output_params,
bool disable_fifo);
+ LoopbackAudioConverter(const LoopbackAudioConverter&) = delete;
+ LoopbackAudioConverter& operator=(const LoopbackAudioConverter&) = delete;
+
~LoopbackAudioConverter() override;
void AddInput(AudioConverter::InputCallback* input) {
@@ -38,8 +41,6 @@ class LoopbackAudioConverter : public AudioConverter::InputCallback {
double ProvideInput(AudioBus* audio_bus, uint32_t frames_delayed) override;
AudioConverter audio_converter_;
-
- DISALLOW_COPY_AND_ASSIGN(LoopbackAudioConverter);
};
} // namespace media
diff --git a/chromium/media/base/mac/video_frame_mac_unittests.cc b/chromium/media/base/mac/video_frame_mac_unittests.cc
index f0b6de5857f..1122f447615 100644
--- a/chromium/media/base/mac/video_frame_mac_unittests.cc
+++ b/chromium/media/base/mac/video_frame_mac_unittests.cc
@@ -21,7 +21,7 @@ namespace {
const int kWidth = 64;
const int kHeight = 48;
const int kVisibleRectOffset = 8;
-const base::TimeDelta kTimestamp = base::TimeDelta::FromMicroseconds(1337);
+const base::TimeDelta kTimestamp = base::Microseconds(1337);
struct FormatPair {
VideoPixelFormat chrome;
diff --git a/chromium/media/base/media_content_type.cc b/chromium/media/base/media_content_type.cc
index 3fc666b1f47..2d3ca0b83bf 100644
--- a/chromium/media/base/media_content_type.cc
+++ b/chromium/media/base/media_content_type.cc
@@ -14,7 +14,7 @@ MediaContentType DurationToMediaContentType(base::TimeDelta duration) {
// A zero duration indicates that the duration is unknown. "Persistent" type
// should be used in this case.
return (duration.is_zero() ||
- duration > base::TimeDelta::FromSeconds(kMinimumContentDurationSecs))
+ duration > base::Seconds(kMinimumContentDurationSecs))
? MediaContentType::Persistent
: MediaContentType::Transient;
}
diff --git a/chromium/media/base/media_drm_storage.h b/chromium/media/base/media_drm_storage.h
index d691f232b60..ea71d19d265 100644
--- a/chromium/media/base/media_drm_storage.h
+++ b/chromium/media/base/media_drm_storage.h
@@ -47,6 +47,10 @@ class MEDIA_EXPORT MediaDrmStorage
};
MediaDrmStorage();
+
+ MediaDrmStorage(const MediaDrmStorage&) = delete;
+ MediaDrmStorage& operator=(const MediaDrmStorage&) = delete;
+
virtual ~MediaDrmStorage();
// Callback to return whether the operation succeeded.
@@ -94,9 +98,6 @@ class MEDIA_EXPORT MediaDrmStorage
// to the storage backend.
virtual void RemovePersistentSession(const std::string& session_id,
ResultCB result_cb) = 0;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MediaDrmStorage);
};
using CreateStorageCB =
diff --git a/chromium/media/base/media_log.h b/chromium/media/base/media_log.h
index 46b52ae5587..4cf55ebd2d1 100644
--- a/chromium/media/base/media_log.h
+++ b/chromium/media/base/media_log.h
@@ -13,6 +13,7 @@
#include <string>
#include <utility>
+#include "base/gtest_prod_util.h"
#include "base/logging.h"
#include "base/macros.h"
#include "base/memory/ref_counted.h"
@@ -53,6 +54,9 @@ class MEDIA_EXPORT MediaLog {
static constexpr size_t kLogLimit = 512;
#endif
+ MediaLog(const MediaLog&) = delete;
+ MediaLog& operator=(const MediaLog&) = delete;
+
// Constructor is protected, see below.
virtual ~MediaLog();
@@ -192,8 +196,6 @@ class MEDIA_EXPORT MediaLog {
// The underlying media log.
scoped_refptr<ParentLogRecord> parent_log_record_;
-
- DISALLOW_COPY_AND_ASSIGN(MediaLog);
};
// Helper class to make it easier to use MediaLog like DVLOG().
diff --git a/chromium/media/base/media_permission.h b/chromium/media/base/media_permission.h
index 4226086ba47..0d0b65c4706 100644
--- a/chromium/media/base/media_permission.h
+++ b/chromium/media/base/media_permission.h
@@ -23,6 +23,10 @@ class MEDIA_EXPORT MediaPermission {
};
MediaPermission();
+
+ MediaPermission(const MediaPermission&) = delete;
+ MediaPermission& operator=(const MediaPermission&) = delete;
+
virtual ~MediaPermission();
// Checks whether |type| is permitted without triggering user interaction
@@ -40,9 +44,6 @@ class MEDIA_EXPORT MediaPermission {
// the use of Clear Key key systems, which is always allowed as required by
// the spec.
virtual bool IsEncryptedMediaEnabled() = 0;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MediaPermission);
};
} // namespace media
diff --git a/chromium/media/base/media_resource.h b/chromium/media/base/media_resource.h
index 567df727f9a..300b1e6aee2 100644
--- a/chromium/media/base/media_resource.h
+++ b/chromium/media/base/media_resource.h
@@ -30,6 +30,10 @@ class MEDIA_EXPORT MediaResource {
};
MediaResource();
+
+ MediaResource(const MediaResource&) = delete;
+ MediaResource& operator=(const MediaResource&) = delete;
+
virtual ~MediaResource();
virtual MediaResource::Type GetType() const;
@@ -62,9 +66,6 @@ class MEDIA_EXPORT MediaResource {
// This method could be refactored if WMPI was aware of the concrete type of
// Demuxer* it is dealing with.
virtual void ForwardDurationChangeToDemuxerHost(base::TimeDelta duration);
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MediaResource);
};
} // namespace media
diff --git a/chromium/media/base/media_serializers.h b/chromium/media/base/media_serializers.h
index e6c26ab66cf..b44da27a5e0 100644
--- a/chromium/media/base/media_serializers.h
+++ b/chromium/media/base/media_serializers.h
@@ -61,6 +61,16 @@ struct MediaSerializer<std::vector<VecType>> {
}
};
+// Serialize unique pointers
+template <typename T>
+struct MediaSerializer<std::unique_ptr<T>> {
+ static base::Value Serialize(const std::unique_ptr<T>& ptr) {
+ if (!ptr)
+ return base::Value("nullptr");
+ return MediaSerializer<T>::Serialize(*ptr);
+ }
+};
+
// serialize optional types
template <typename OptType>
struct MediaSerializer<absl::optional<OptType>> {
@@ -89,7 +99,18 @@ struct MediaSerializer<double> {
template <>
struct MediaSerializer<int64_t> {
static inline base::Value Serialize(int64_t value) {
- return MediaSerializer<double>::Serialize(static_cast<double>(value));
+ std::stringstream stream;
+ stream << "0x" << std::hex << value;
+ return MediaSerializer<std::string>::Serialize(stream.str());
+ }
+};
+
+template <>
+struct MediaSerializer<uint32_t> {
+ static inline base::Value Serialize(uint32_t value) {
+ std::stringstream stream;
+ stream << "0x" << std::hex << value;
+ return MediaSerializer<std::string>::Serialize(stream.str());
}
};
@@ -419,18 +440,30 @@ struct MediaSerializer<StatusCode> {
};
// Class (complex)
-template <>
-struct MediaSerializer<Status> {
- static base::Value Serialize(const Status& status) {
+template <typename T>
+struct MediaSerializer<TypedStatus<T>> {
+ static base::Value Serialize(const TypedStatus<T>& status) {
+ // TODO: replace this with some kind of static "description"
+ // of the default type, instead of "Ok".
if (status.is_ok())
return base::Value("Ok");
+ return MediaSerialize(status.data_);
+ }
+};
+// Class (complex)
+template <>
+struct MediaSerializer<StatusData> {
+ static base::Value Serialize(const StatusData& status) {
base::Value result(base::Value::Type::DICTIONARY);
- FIELD_SERIALIZE("status_code", status.code());
- FIELD_SERIALIZE("status_message", status.message());
- FIELD_SERIALIZE("stack", status.data_->frames);
- FIELD_SERIALIZE("data", status.data_->data);
- FIELD_SERIALIZE("causes", status.data_->causes);
+ // TODO: replace code with a stringified version, since
+ // this representation will only go to medialog anyway.
+ FIELD_SERIALIZE("code", status.code);
+ FIELD_SERIALIZE("group", status.group);
+ FIELD_SERIALIZE("message", status.message);
+ FIELD_SERIALIZE("stack", status.frames);
+ FIELD_SERIALIZE("data", status.data);
+ FIELD_SERIALIZE("causes", status.causes);
return result;
}
};
diff --git a/chromium/media/base/media_serializers_unittest.cc b/chromium/media/base/media_serializers_unittest.cc
index 466155569b8..53407ded0f0 100644
--- a/chromium/media/base/media_serializers_unittest.cc
+++ b/chromium/media/base/media_serializers_unittest.cc
@@ -30,7 +30,7 @@ TEST(MediaSerializersTest, BaseTypes) {
const char* g = "bar";
ASSERT_EQ(ToString(MediaSerialize(a)), "1");
- ASSERT_EQ(ToString(MediaSerialize(b)), "2.0");
+ ASSERT_EQ(ToString(MediaSerialize(b)), "0x2");
ASSERT_EQ(ToString(MediaSerialize(c)), "false");
ASSERT_EQ(ToString(MediaSerialize(d)), "100.0");
ASSERT_EQ(ToString(MediaSerialize(e)), "4523.0");
diff --git a/chromium/media/base/media_switches.cc b/chromium/media/base/media_switches.cc
index 9b32170f896..5f43904c668 100644
--- a/chromium/media/base/media_switches.cc
+++ b/chromium/media/base/media_switches.cc
@@ -218,6 +218,8 @@ MEDIA_EXPORT extern const char kLacrosEnablePlatformHevc[] =
"lacros-enable-platform-hevc";
MEDIA_EXPORT extern const char kLacrosUseChromeosProtectedMedia[] =
"lacros-use-chromeos-protected-media";
+MEDIA_EXPORT extern const char kLacrosUseChromeosProtectedAv1[] =
+ "lacros-use-chromeos-protected-av1";
#endif // defined(OS_CHROMEOS)
namespace autoplay {
@@ -257,6 +259,10 @@ const base::Feature kOverlayFullscreenVideo{"overlay-fullscreen-video",
const base::Feature kEnableMediaInternals{"enable-media-internals",
base::FEATURE_ENABLED_BY_DEFAULT};
+// Enables user control over muting tab audio from the tab strip.
+const base::Feature kEnableTabMuting{"enable-tab-muting",
+ base::FEATURE_DISABLED_BY_DEFAULT};
+
// Enable Picture-in-Picture.
const base::Feature kPictureInPicture {
"PictureInPicture",
@@ -283,6 +289,12 @@ const base::Feature kResumeBackgroundVideo {
#endif
};
+// Experimental: Try to avoid destroying the media player when transferring a
+// media element to a new document. This is a work in progress, and may cause
+// security and/or stability issues.
+const base::Feature kReuseMediaPlayer{"ReuseMediaPlayer",
+ base::FEATURE_DISABLED_BY_DEFAULT};
+
// When enabled, MediaCapabilities will check with GPU Video Accelerator
// Factories to determine isPowerEfficient = true/false.
const base::Feature kMediaCapabilitiesQueryGpuFactories{
@@ -387,23 +399,8 @@ const base::Feature kD3D11VideoDecoderVP9Profile2{
const base::Feature kD3D11VideoDecoderAV1{"D3D11VideoDecoderEnableAV1",
base::FEATURE_ENABLED_BY_DEFAULT};
-// Tell D3D11VideoDecoder not to switch the D3D11 device to multi-threaded mode.
-// This is to help us track down IGD crashes.
-const base::Feature kD3D11VideoDecoderSkipMultithreaded{
- "D3D11VideoDecoderSkipMultithreaded", base::FEATURE_DISABLED_BY_DEFAULT};
-
-// If enabled, D3D11VideoDecoder will always copy instead of bind textures.
-const base::Feature kD3D11VideoDecoderAlwaysCopy{
- "D3D11VideoDecoderAlwaysCopy", base::FEATURE_DISABLED_BY_DEFAULT};
-
-// If enabled, D3D11VideoDecoder may (but is not required to) choose to mark
-// VideoFrames as overlayable.
-const base::Feature kD3D11VideoDecoderAllowOverlay{
- "D3D11VideoDecoderAllowOverlay", base::FEATURE_ENABLED_BY_DEFAULT};
-
-// If enabled, D3D11VideoDecoder will enable HDR support even if the OS doesn't.
-const base::Feature kD3D11VideoDecoderForceEnableHDR{
- "D3D11VideoDecoderForceEnableHDR", base::FEATURE_DISABLED_BY_DEFAULT};
+const base::Feature kD3D11VideoDecoderUseSharedHandle{
+ "D3D11VideoDecoderUseSharedHandle", base::FEATURE_DISABLED_BY_DEFAULT};
// Falls back to other decoders after audio/video decode error happens. The
// implementation may choose different strategies on when to fallback. See
@@ -454,11 +451,6 @@ constexpr base::FeatureParam<kCrosGlobalMediaControlsPinOptions>
kCrosGlobalMediaControlsPinOptions::kHeuristic,
&kCrosGlobalMediaControlsParamOptions);
-// Allow global media controls notifications to be dragged out into overlay
-// notifications. It is no-op if kGlobalMediaControls is not enabled.
-const base::Feature kGlobalMediaControlsOverlayControls{
- "GlobalMediaControlsOverlayControls", base::FEATURE_DISABLED_BY_DEFAULT};
-
// Show picture-in-picture button in Global Media Controls.
const base::Feature kGlobalMediaControlsPictureInPicture {
"GlobalMediaControlsPictureInPicture",
@@ -543,6 +535,9 @@ const base::Feature kVaapiVP9Encoder{"VaapiVP9Encoder",
base::FEATURE_ENABLED_BY_DEFAULT};
#if defined(ARCH_CPU_X86_FAMILY) && BUILDFLAG(IS_CHROMEOS_ASH)
+// Enable H264 temporal layer encoding with HW encoder on ChromeOS.
+const base::Feature kVaapiH264TemporalLayerHWEncoding{
+ "VaapiH264TemporalLayerEncoding", base::FEATURE_ENABLED_BY_DEFAULT};
// Enable VP9 k-SVC decoding with HW decoder for webrtc use case on ChromeOS.
const base::Feature kVaapiVp9kSVCHWDecoding{"VaapiVp9kSVCHWDecoding",
base::FEATURE_ENABLED_BY_DEFAULT};
@@ -924,8 +919,9 @@ bool IsVideoCaptureAcceleratedJpegDecodingEnabled() {
}
#if BUILDFLAG(IS_CHROMEOS_ASH)
return true;
-#endif
+#else
return false;
+#endif
}
bool IsLiveCaptionFeatureEnabled() {
diff --git a/chromium/media/base/media_switches.h b/chromium/media/base/media_switches.h
index 4e79508bc27..e09620e4625 100644
--- a/chromium/media/base/media_switches.h
+++ b/chromium/media/base/media_switches.h
@@ -95,6 +95,7 @@ MEDIA_EXPORT extern const char kEnableClearHevcForTesting[];
MEDIA_EXPORT extern const char kLacrosEnablePlatformEncryptedHevc[];
MEDIA_EXPORT extern const char kLacrosEnablePlatformHevc[];
MEDIA_EXPORT extern const char kLacrosUseChromeosProtectedMedia[];
+MEDIA_EXPORT extern const char kLacrosUseChromeosProtectedAv1[];
#endif // defined(OS_CHROMEOS)
namespace autoplay {
@@ -129,11 +130,9 @@ MEDIA_EXPORT extern const base::Feature kD3D11VideoDecoder;
MEDIA_EXPORT extern const base::Feature kD3D11VideoDecoderIgnoreWorkarounds;
MEDIA_EXPORT extern const base::Feature kD3D11VideoDecoderVP9Profile2;
MEDIA_EXPORT extern const base::Feature kD3D11VideoDecoderAV1;
-MEDIA_EXPORT extern const base::Feature kD3D11VideoDecoderSkipMultithreaded;
-MEDIA_EXPORT extern const base::Feature kD3D11VideoDecoderAlwaysCopy;
-MEDIA_EXPORT extern const base::Feature kD3D11VideoDecoderAllowOverlay;
-MEDIA_EXPORT extern const base::Feature kD3D11VideoDecoderForceEnableHDR;
+MEDIA_EXPORT extern const base::Feature kD3D11VideoDecoderUseSharedHandle;
MEDIA_EXPORT extern const base::Feature kEnableMediaInternals;
+MEDIA_EXPORT extern const base::Feature kEnableTabMuting;
MEDIA_EXPORT extern const base::Feature kExposeSwDecodersToWebRTC;
MEDIA_EXPORT extern const base::Feature kExternalClearKeyForTesting;
MEDIA_EXPORT extern const base::Feature kFFmpegDecodeOpaqueVP8;
@@ -144,7 +143,6 @@ MEDIA_EXPORT extern const base::Feature kGlobalMediaControls;
MEDIA_EXPORT extern const base::Feature kGlobalMediaControlsAutoDismiss;
MEDIA_EXPORT extern const base::Feature kGlobalMediaControlsForCast;
MEDIA_EXPORT extern const base::Feature kGlobalMediaControlsForChromeOS;
-MEDIA_EXPORT extern const base::Feature kGlobalMediaControlsOverlayControls;
MEDIA_EXPORT extern const base::Feature kGlobalMediaControlsPictureInPicture;
MEDIA_EXPORT extern const base::Feature kGlobalMediaControlsSeamlessTransfer;
MEDIA_EXPORT extern const base::Feature kGlobalMediaControlsModernUI;
@@ -182,6 +180,7 @@ MEDIA_EXPORT extern const base::Feature kPreloadMetadataSuspend;
MEDIA_EXPORT extern const base::Feature kRecordMediaEngagementScores;
MEDIA_EXPORT extern const base::Feature kRecordWebAudioEngagement;
MEDIA_EXPORT extern const base::Feature kResumeBackgroundVideo;
+MEDIA_EXPORT extern const base::Feature kReuseMediaPlayer;
MEDIA_EXPORT extern const base::Feature kRevokeMediaSourceObjectURLOnAttach;
MEDIA_EXPORT extern const base::Feature kSpeakerChangeDetection;
MEDIA_EXPORT extern const base::Feature kSpecCompliantCanPlayThrough;
@@ -205,6 +204,7 @@ MEDIA_EXPORT extern const base::Feature kVaapiVideoMinResolutionForPerformance;
MEDIA_EXPORT extern const base::Feature kVaapiVP8Encoder;
MEDIA_EXPORT extern const base::Feature kVaapiVP9Encoder;
#if defined(ARCH_CPU_X86_FAMILY) && BUILDFLAG(IS_CHROMEOS_ASH)
+MEDIA_EXPORT extern const base::Feature kVaapiH264TemporalLayerHWEncoding;
MEDIA_EXPORT extern const base::Feature kVaapiVp9kSVCHWDecoding;
MEDIA_EXPORT extern const base::Feature kVaapiVp9kSVCHWEncoding;
#endif // defined(ARCH_CPU_X86_FAMILY) && BUILDFLAG(IS_CHROMEOS_ASH)
diff --git a/chromium/media/base/media_tracks.h b/chromium/media/base/media_tracks.h
index 36fbd2bef71..0df1b05bfd7 100644
--- a/chromium/media/base/media_tracks.h
+++ b/chromium/media/base/media_tracks.h
@@ -23,6 +23,10 @@ class MEDIA_EXPORT MediaTracks {
using MediaTracksCollection = std::vector<std::unique_ptr<MediaTrack>>;
MediaTracks();
+
+ MediaTracks(const MediaTracks&) = delete;
+ MediaTracks& operator=(const MediaTracks&) = delete;
+
~MediaTracks();
// Adds a new audio track. The |bytestreamTrackId| must uniquely identify the
@@ -51,8 +55,6 @@ class MEDIA_EXPORT MediaTracks {
MediaTracksCollection tracks_;
std::map<StreamParser::TrackId, AudioDecoderConfig> audio_configs_;
std::map<StreamParser::TrackId, VideoDecoderConfig> video_configs_;
-
- DISALLOW_COPY_AND_ASSIGN(MediaTracks);
};
} // namespace media
diff --git a/chromium/media/base/media_types.cc b/chromium/media/base/media_types.cc
index ceaddc7f7b6..a33fb235c3f 100644
--- a/chromium/media/base/media_types.cc
+++ b/chromium/media/base/media_types.cc
@@ -28,26 +28,26 @@ VideoType VideoType::FromDecoderConfig(const VideoDecoderConfig& config) {
switch (config.codec()) {
// These have no notion of level.
- case kUnknownVideoCodec:
- case kCodecTheora:
- case kCodecVP8:
+ case VideoCodec::kUnknown:
+ case VideoCodec::kTheora:
+ case VideoCodec::kVP8:
// These use non-numeric levels, aren't part of our mime code, and
// are ancient with very limited support.
- case kCodecVC1:
- case kCodecMPEG2:
- case kCodecMPEG4:
+ case VideoCodec::kVC1:
+ case VideoCodec::kMPEG2:
+ case VideoCodec::kMPEG4:
break;
- case kCodecH264:
- case kCodecVP9:
- case kCodecHEVC:
+ case VideoCodec::kH264:
+ case VideoCodec::kVP9:
+ case VideoCodec::kHEVC:
// 10 is the level_idc for level 1.0.
level = 10;
break;
- case kCodecDolbyVision:
+ case VideoCodec::kDolbyVision:
// Dolby doesn't do decimals, so 1 is just 1.
level = 1;
break;
- case kCodecAV1:
+ case VideoCodec::kAV1:
// Strangely, AV1 starts at 2.0.
level = 20;
break;
diff --git a/chromium/media/base/media_url_demuxer.h b/chromium/media/base/media_url_demuxer.h
index 756ae44ac35..a2ae1b2e4c2 100644
--- a/chromium/media/base/media_url_demuxer.h
+++ b/chromium/media/base/media_url_demuxer.h
@@ -43,6 +43,10 @@ class MEDIA_EXPORT MediaUrlDemuxer : public Demuxer {
const url::Origin& top_frame_origin,
bool allow_credentials,
bool is_hls);
+
+ MediaUrlDemuxer(const MediaUrlDemuxer&) = delete;
+ MediaUrlDemuxer& operator=(const MediaUrlDemuxer&) = delete;
+
~MediaUrlDemuxer() override;
// MediaResource interface.
@@ -76,8 +80,6 @@ class MEDIA_EXPORT MediaUrlDemuxer : public Demuxer {
DemuxerHost* host_;
scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
-
- DISALLOW_COPY_AND_ASSIGN(MediaUrlDemuxer);
};
} // namespace media
diff --git a/chromium/media/base/media_util.h b/chromium/media/base/media_util.h
index fac5af522da..0dac6c60587 100644
--- a/chromium/media/base/media_util.h
+++ b/chromium/media/base/media_util.h
@@ -26,13 +26,14 @@ MEDIA_EXPORT void ReportPepperVideoDecoderOutputPictureCountSW(int height);
class MEDIA_EXPORT NullMediaLog : public media::MediaLog {
public:
NullMediaLog() = default;
+
+ NullMediaLog(const NullMediaLog&) = delete;
+ NullMediaLog& operator=(const NullMediaLog&) = delete;
+
~NullMediaLog() override = default;
void AddLogRecordLocked(
std::unique_ptr<media::MediaLogRecord> event) override {}
-
- private:
- DISALLOW_COPY_AND_ASSIGN(NullMediaLog);
};
} // namespace media
diff --git a/chromium/media/base/memory_dump_provider_proxy.h b/chromium/media/base/memory_dump_provider_proxy.h
index 0bdda75b7c5..d815e15e51f 100644
--- a/chromium/media/base/memory_dump_provider_proxy.h
+++ b/chromium/media/base/memory_dump_provider_proxy.h
@@ -29,6 +29,9 @@ class MEDIA_EXPORT MemoryDumpProviderProxy final
scoped_refptr<base::SingleThreadTaskRunner> task_runner,
MemoryDumpCB dump_cb);
+ MemoryDumpProviderProxy(const MemoryDumpProviderProxy&) = delete;
+ MemoryDumpProviderProxy& operator=(const MemoryDumpProviderProxy&) = delete;
+
~MemoryDumpProviderProxy() override;
bool OnMemoryDump(const base::trace_event::MemoryDumpArgs& args,
@@ -36,8 +39,6 @@ class MEDIA_EXPORT MemoryDumpProviderProxy final
private:
MemoryDumpCB dump_cb_;
-
- DISALLOW_COPY_AND_ASSIGN(MemoryDumpProviderProxy);
};
} // namespace media
diff --git a/chromium/media/base/mime_util_internal.cc b/chromium/media/base/mime_util_internal.cc
index ccfe8e9a653..2a0ee050032 100644
--- a/chromium/media/base/mime_util_internal.cc
+++ b/chromium/media/base/mime_util_internal.cc
@@ -168,51 +168,51 @@ MimeUtil::~MimeUtil() = default;
AudioCodec MimeUtilToAudioCodec(MimeUtil::Codec codec) {
switch (codec) {
case MimeUtil::PCM:
- return kCodecPCM;
+ return AudioCodec::kPCM;
case MimeUtil::MP3:
- return kCodecMP3;
+ return AudioCodec::kMP3;
case MimeUtil::AC3:
- return kCodecAC3;
+ return AudioCodec::kAC3;
case MimeUtil::EAC3:
- return kCodecEAC3;
+ return AudioCodec::kEAC3;
case MimeUtil::MPEG2_AAC:
case MimeUtil::MPEG4_AAC:
case MimeUtil::MPEG4_XHE_AAC:
- return kCodecAAC;
+ return AudioCodec::kAAC;
case MimeUtil::MPEG_H_AUDIO:
- return kCodecMpegHAudio;
+ return AudioCodec::kMpegHAudio;
case MimeUtil::VORBIS:
- return kCodecVorbis;
+ return AudioCodec::kVorbis;
case MimeUtil::OPUS:
- return kCodecOpus;
+ return AudioCodec::kOpus;
case MimeUtil::FLAC:
- return kCodecFLAC;
+ return AudioCodec::kFLAC;
default:
break;
}
- return kUnknownAudioCodec;
+ return AudioCodec::kUnknown;
}
VideoCodec MimeUtilToVideoCodec(MimeUtil::Codec codec) {
switch (codec) {
case MimeUtil::AV1:
- return kCodecAV1;
+ return VideoCodec::kAV1;
case MimeUtil::H264:
- return kCodecH264;
+ return VideoCodec::kH264;
case MimeUtil::HEVC:
- return kCodecHEVC;
+ return VideoCodec::kHEVC;
case MimeUtil::VP8:
- return kCodecVP8;
+ return VideoCodec::kVP8;
case MimeUtil::VP9:
- return kCodecVP9;
+ return VideoCodec::kVP9;
case MimeUtil::THEORA:
- return kCodecTheora;
+ return VideoCodec::kTheora;
case MimeUtil::DOLBY_VISION:
- return kCodecDolbyVision;
+ return VideoCodec::kDolbyVision;
default:
break;
}
- return kUnknownVideoCodec;
+ return VideoCodec::kUnknown;
}
SupportsType MimeUtil::AreSupportedCodecs(
@@ -470,7 +470,7 @@ bool MimeUtil::ParseVideoCodecString(const std::string& mime_type,
*out_level = parsed_results[0].video_level;
*out_color_space = parsed_results[0].video_color_space;
- if (*out_codec == kUnknownVideoCodec) {
+ if (*out_codec == VideoCodec::kUnknown) {
DVLOG(3) << __func__ << " Codec string " << codec_id
<< " is not a VIDEO codec.";
return false;
@@ -504,7 +504,7 @@ bool MimeUtil::ParseAudioCodecString(const std::string& mime_type,
*out_is_ambiguous = parsed_results[0].is_ambiguous;
*out_codec = MimeUtilToAudioCodec(parsed_results[0].codec);
- if (*out_codec == kUnknownAudioCodec) {
+ if (*out_codec == AudioCodec::kUnknown) {
DVLOG(3) << __func__ << " Codec string " << codec_id
<< " is not an AUDIO codec.";
return false;
@@ -774,7 +774,7 @@ bool MimeUtil::ParseCodecHelper(const std::string& mime_type_lower_case,
out_result->codec = itr->second;
// Even "simple" video codecs should have an associated profile.
- if (MimeUtilToVideoCodec(out_result->codec) != kUnknownVideoCodec) {
+ if (MimeUtilToVideoCodec(out_result->codec) != VideoCodec::kUnknown) {
switch (out_result->codec) {
case Codec::VP8:
out_result->video_profile = VP8PROFILE_ANY;
@@ -875,12 +875,12 @@ SupportsType MimeUtil::IsCodecSupported(const std::string& mime_type_lower_case,
DCHECK_NE(codec, INVALID_CODEC);
VideoCodec video_codec = MimeUtilToVideoCodec(codec);
- if (video_codec != kUnknownVideoCodec &&
+ if (video_codec != VideoCodec::kUnknown &&
// Theora and VP8 do not have profiles/levels.
- video_codec != kCodecTheora && video_codec != kCodecVP8 &&
+ video_codec != VideoCodec::kTheora && video_codec != VideoCodec::kVP8 &&
// TODO(dalecurtis): AV1 has levels, but they aren't supported yet;
// http://crbug.com/784993
- video_codec != kCodecAV1) {
+ video_codec != VideoCodec::kAV1) {
DCHECK_NE(video_profile, VIDEO_CODEC_PROFILE_UNKNOWN);
DCHECK_GT(video_level, 0);
}
@@ -911,7 +911,7 @@ SupportsType MimeUtil::IsCodecSupported(const std::string& mime_type_lower_case,
}
AudioCodec audio_codec = MimeUtilToAudioCodec(codec);
- if (audio_codec != kUnknownAudioCodec) {
+ if (audio_codec != AudioCodec::kUnknown) {
AudioCodecProfile audio_profile = AudioCodecProfile::kUnknown;
if (codec == MPEG4_XHE_AAC)
audio_profile = AudioCodecProfile::kXHE_AAC;
@@ -920,7 +920,7 @@ SupportsType MimeUtil::IsCodecSupported(const std::string& mime_type_lower_case,
return IsNotSupported;
}
- if (video_codec != kUnknownVideoCodec) {
+ if (video_codec != VideoCodec::kUnknown) {
if (!IsSupportedVideoType(
{video_codec, video_profile, video_level, color_space})) {
return IsNotSupported;
diff --git a/chromium/media/base/mime_util_internal.h b/chromium/media/base/mime_util_internal.h
index 614d357c5b0..f0cf1a848ff 100644
--- a/chromium/media/base/mime_util_internal.h
+++ b/chromium/media/base/mime_util_internal.h
@@ -11,6 +11,7 @@
#include "base/containers/flat_map.h"
#include "base/containers/flat_set.h"
#include "base/macros.h"
+#include "build/build_config.h"
#include "media/base/media_export.h"
#include "media/base/mime_util.h"
#include "media/base/video_codecs.h"
@@ -25,6 +26,10 @@ namespace internal {
class MEDIA_EXPORT MimeUtil {
public:
MimeUtil();
+
+ MimeUtil(const MimeUtil&) = delete;
+ MimeUtil& operator=(const MimeUtil&) = delete;
+
~MimeUtil();
enum Codec {
@@ -196,8 +201,6 @@ class MEDIA_EXPORT MimeUtil {
// A map of mime_types and hash map of the supported codecs for the mime_type.
MediaFormatMappings media_format_map_;
-
- DISALLOW_COPY_AND_ASSIGN(MimeUtil);
};
} // namespace internal
diff --git a/chromium/media/base/mime_util_unittest.cc b/chromium/media/base/mime_util_unittest.cc
index 5826cdf29ab..4dcafd03f61 100644
--- a/chromium/media/base/mime_util_unittest.cc
+++ b/chromium/media/base/mime_util_unittest.cc
@@ -247,7 +247,7 @@ TEST(MimeUtilTest, ParseVideoCodecString) {
&out_colorspace));
if (kUsePropCodecs) {
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecH264, out_codec);
+ EXPECT_EQ(VideoCodec::kH264, out_codec);
EXPECT_EQ(H264PROFILE_BASELINE, out_profile);
EXPECT_EQ(30, out_level);
EXPECT_EQ(VideoColorSpace::REC709(), out_colorspace);
@@ -258,7 +258,7 @@ TEST(MimeUtilTest, ParseVideoCodecString) {
&out_is_ambiguous, &out_codec, &out_profile,
&out_level, &out_colorspace));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecVP9, out_codec);
+ EXPECT_EQ(VideoCodec::kVP9, out_codec);
EXPECT_EQ(VP9PROFILE_PROFILE0, out_profile);
EXPECT_EQ(10, out_level);
EXPECT_EQ(VideoColorSpace::REC709(), out_colorspace);
@@ -268,7 +268,7 @@ TEST(MimeUtilTest, ParseVideoCodecString) {
&out_is_ambiguous, &out_codec, &out_profile,
&out_level, &out_colorspace));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecVP9, out_codec);
+ EXPECT_EQ(VideoCodec::kVP9, out_codec);
EXPECT_EQ(VP9PROFILE_PROFILE2, out_profile);
EXPECT_EQ(10, out_level);
EXPECT_EQ(VideoColorSpace::REC601(), out_colorspace);
@@ -280,7 +280,7 @@ TEST(MimeUtilTest, ParseVideoCodecString) {
&out_profile, &out_level, &out_colorspace));
if (kUsePropCodecs) {
EXPECT_TRUE(out_is_ambiguous);
- EXPECT_EQ(kCodecH264, out_codec);
+ EXPECT_EQ(VideoCodec::kH264, out_codec);
EXPECT_EQ(VIDEO_CODEC_PROFILE_UNKNOWN, out_profile);
EXPECT_EQ(0, out_level);
EXPECT_EQ(VideoColorSpace::REC709(), out_colorspace);
@@ -316,7 +316,7 @@ TEST(MimeUtilTest, ParseVideoCodecString_NoMimeType) {
&out_codec, &out_profile, &out_level,
&out_colorspace));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecH264, out_codec);
+ EXPECT_EQ(VideoCodec::kH264, out_codec);
EXPECT_EQ(H264PROFILE_BASELINE, out_profile);
EXPECT_EQ(30, out_level);
EXPECT_EQ(VideoColorSpace::REC709(), out_colorspace);
@@ -326,7 +326,7 @@ TEST(MimeUtilTest, ParseVideoCodecString_NoMimeType) {
&out_codec, &out_profile, &out_level,
&out_colorspace));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecVP9, out_codec);
+ EXPECT_EQ(VideoCodec::kVP9, out_codec);
EXPECT_EQ(VP9PROFILE_PROFILE0, out_profile);
EXPECT_EQ(10, out_level);
EXPECT_EQ(VideoColorSpace::REC709(), out_colorspace);
@@ -335,7 +335,7 @@ TEST(MimeUtilTest, ParseVideoCodecString_NoMimeType) {
&out_is_ambiguous, &out_codec, &out_profile,
&out_level, &out_colorspace));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecVP9, out_codec);
+ EXPECT_EQ(VideoCodec::kVP9, out_codec);
EXPECT_EQ(VP9PROFILE_PROFILE2, out_profile);
EXPECT_EQ(10, out_level);
EXPECT_EQ(VideoColorSpace::REC601(), out_colorspace);
@@ -344,7 +344,7 @@ TEST(MimeUtilTest, ParseVideoCodecString_NoMimeType) {
EXPECT_TRUE(ParseVideoCodecString("", "avc3", &out_is_ambiguous, &out_codec,
&out_profile, &out_level, &out_colorspace));
EXPECT_TRUE(out_is_ambiguous);
- EXPECT_EQ(kCodecH264, out_codec);
+ EXPECT_EQ(VideoCodec::kH264, out_codec);
EXPECT_EQ(VIDEO_CODEC_PROFILE_UNKNOWN, out_profile);
EXPECT_EQ(0, out_level);
EXPECT_EQ(VideoColorSpace::REC709(), out_colorspace);
@@ -368,7 +368,7 @@ TEST(MimeUtilTest, ParseAudioCodecString) {
EXPECT_TRUE(ParseAudioCodecString("audio/webm", "opus", &out_is_ambiguous,
&out_codec));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecOpus, out_codec);
+ EXPECT_EQ(AudioCodec::kOpus, out_codec);
// Valid AAC string when proprietary codecs are supported.
EXPECT_EQ(kUsePropCodecs,
@@ -376,14 +376,14 @@ TEST(MimeUtilTest, ParseAudioCodecString) {
&out_codec));
if (kUsePropCodecs) {
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecAAC, out_codec);
+ EXPECT_EQ(AudioCodec::kAAC, out_codec);
}
// Valid FLAC string with MP4. Neither decoding nor demuxing is proprietary.
EXPECT_TRUE(ParseAudioCodecString("audio/mp4", "flac", &out_is_ambiguous,
&out_codec));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecFLAC, out_codec);
+ EXPECT_EQ(AudioCodec::kFLAC, out_codec);
// Ambiguous AAC string.
// TODO(chcunningha): This can probably be allowed. I think we treat all
@@ -393,20 +393,20 @@ TEST(MimeUtilTest, ParseAudioCodecString) {
&out_codec));
if (kUsePropCodecs) {
EXPECT_TRUE(out_is_ambiguous);
- EXPECT_EQ(kCodecAAC, out_codec);
+ EXPECT_EQ(AudioCodec::kAAC, out_codec);
}
// Valid empty codec string. Codec unambiguously implied by mime type.
EXPECT_TRUE(
ParseAudioCodecString("audio/flac", "", &out_is_ambiguous, &out_codec));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecFLAC, out_codec);
+ EXPECT_EQ(AudioCodec::kFLAC, out_codec);
// Valid audio codec should still be allowed with video mime type.
EXPECT_TRUE(ParseAudioCodecString("video/webm", "opus", &out_is_ambiguous,
&out_codec));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecOpus, out_codec);
+ EXPECT_EQ(AudioCodec::kOpus, out_codec);
// Video codec is not valid for audio API.
EXPECT_FALSE(ParseAudioCodecString("audio/webm", "vp09.00.10.08",
@@ -427,18 +427,18 @@ TEST(MimeUtilTest, ParseAudioCodecString_NoMimeType) {
// Valid Opus string.
EXPECT_TRUE(ParseAudioCodecString("", "opus", &out_is_ambiguous, &out_codec));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecOpus, out_codec);
+ EXPECT_EQ(AudioCodec::kOpus, out_codec);
// Valid AAC string when proprietary codecs are supported.
EXPECT_TRUE(
ParseAudioCodecString("", "mp4a.40.2", &out_is_ambiguous, &out_codec));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecAAC, out_codec);
+ EXPECT_EQ(AudioCodec::kAAC, out_codec);
// Valid FLAC string. Neither decoding nor demuxing is proprietary.
EXPECT_TRUE(ParseAudioCodecString("", "flac", &out_is_ambiguous, &out_codec));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecFLAC, out_codec);
+ EXPECT_EQ(AudioCodec::kFLAC, out_codec);
// Ambiguous AAC string.
// TODO(chcunningha): This can probably be allowed. I think we treat all
@@ -447,7 +447,7 @@ TEST(MimeUtilTest, ParseAudioCodecString_NoMimeType) {
ParseAudioCodecString("", "mp4a.40", &out_is_ambiguous, &out_codec));
if (kUsePropCodecs) {
EXPECT_TRUE(out_is_ambiguous);
- EXPECT_EQ(kCodecAAC, out_codec);
+ EXPECT_EQ(AudioCodec::kAAC, out_codec);
}
// Video codec is not valid for audio API.
@@ -468,26 +468,26 @@ TEST(MimeUtilTest, ParseAudioCodecString_Mp3) {
EXPECT_TRUE(ParseAudioCodecString("audio/mpeg", "mp3", &out_is_ambiguous,
&out_codec));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecMP3, out_codec);
+ EXPECT_EQ(AudioCodec::kMP3, out_codec);
EXPECT_TRUE(
ParseAudioCodecString("audio/mpeg", "", &out_is_ambiguous, &out_codec));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecMP3, out_codec);
+ EXPECT_EQ(AudioCodec::kMP3, out_codec);
EXPECT_TRUE(ParseAudioCodecString("", "mp3", &out_is_ambiguous, &out_codec));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecMP3, out_codec);
+ EXPECT_EQ(AudioCodec::kMP3, out_codec);
EXPECT_TRUE(
ParseAudioCodecString("", "mp4a.69", &out_is_ambiguous, &out_codec));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecMP3, out_codec);
+ EXPECT_EQ(AudioCodec::kMP3, out_codec);
EXPECT_TRUE(
ParseAudioCodecString("", "mp4a.6B", &out_is_ambiguous, &out_codec));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecMP3, out_codec);
+ EXPECT_EQ(AudioCodec::kMP3, out_codec);
}
// These codecs really only have one profile. Ensure that |out_profile| is
@@ -504,7 +504,7 @@ TEST(MimeUtilTest, ParseVideoCodecString_SimpleCodecsHaveProfiles) {
&out_codec, &out_profile, &out_level,
&out_colorspace));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecVP8, out_codec);
+ EXPECT_EQ(VideoCodec::kVP8, out_codec);
EXPECT_EQ(VP8PROFILE_ANY, out_profile);
EXPECT_EQ(0, out_level);
EXPECT_EQ(VideoColorSpace::REC709(), out_colorspace);
@@ -520,7 +520,7 @@ TEST(MimeUtilTest, ParseVideoCodecString_SimpleCodecsHaveProfiles) {
&out_codec, &out_profile, &out_level,
&out_colorspace));
EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecTheora, out_codec);
+ EXPECT_EQ(VideoCodec::kTheora, out_codec);
EXPECT_EQ(THEORAPROFILE_ANY, out_profile);
EXPECT_EQ(0, out_level);
EXPECT_EQ(VideoColorSpace::REC709(), out_colorspace);
diff --git a/chromium/media/base/mock_demuxer_host.h b/chromium/media/base/mock_demuxer_host.h
index 8cd4894c1fe..818d1f50759 100644
--- a/chromium/media/base/mock_demuxer_host.h
+++ b/chromium/media/base/mock_demuxer_host.h
@@ -15,15 +15,16 @@ namespace media {
class MockDemuxerHost : public DemuxerHost {
public:
MockDemuxerHost();
+
+ MockDemuxerHost(const MockDemuxerHost&) = delete;
+ MockDemuxerHost& operator=(const MockDemuxerHost&) = delete;
+
~MockDemuxerHost() override;
MOCK_METHOD1(OnBufferedTimeRangesChanged,
void(const Ranges<base::TimeDelta>&));
MOCK_METHOD1(SetDuration, void(base::TimeDelta duration));
MOCK_METHOD1(OnDemuxerError, void(PipelineStatus error));
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MockDemuxerHost);
};
} // namespace media
diff --git a/chromium/media/base/mock_filters.h b/chromium/media/base/mock_filters.h
index 08e089afc8f..f5db5e83556 100644
--- a/chromium/media/base/mock_filters.h
+++ b/chromium/media/base/mock_filters.h
@@ -14,6 +14,7 @@
#include "base/callback.h"
#include "base/macros.h"
#include "build/build_config.h"
+#include "build/chromeos_buildflags.h"
#include "media/base/audio_decoder.h"
#include "media/base/audio_decoder_config.h"
#include "media/base/audio_encoder.h"
@@ -79,6 +80,10 @@ class MockPipelineClient : public Pipeline::Client {
class MockPipeline : public Pipeline {
public:
MockPipeline();
+
+ MockPipeline(const MockPipeline&) = delete;
+ MockPipeline& operator=(const MockPipeline&) = delete;
+
~MockPipeline() override;
void Start(StartType start_type,
@@ -132,9 +137,6 @@ class MockPipeline : public Pipeline {
}
MOCK_METHOD2(OnSetCdm,
void(CdmContext* cdm_context, CdmAttachedCB& cdm_attached_cb));
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MockPipeline);
};
class MockMediaResource : public MediaResource {
@@ -152,6 +154,10 @@ class MockMediaResource : public MediaResource {
class MockDemuxer : public Demuxer {
public:
MockDemuxer();
+
+ MockDemuxer(const MockDemuxer&) = delete;
+ MockDemuxer& operator=(const MockDemuxer&) = delete;
+
~MockDemuxer() override;
// Demuxer implementation.
@@ -184,14 +190,15 @@ class MockDemuxer : public Demuxer {
void(const std::vector<MediaTrack::Id>&,
base::TimeDelta,
TrackChangeCB));
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MockDemuxer);
};
class MockDemuxerStream : public DemuxerStream {
public:
explicit MockDemuxerStream(DemuxerStream::Type type);
+
+ MockDemuxerStream(const MockDemuxerStream&) = delete;
+ MockDemuxerStream& operator=(const MockDemuxerStream&) = delete;
+
~MockDemuxerStream() override;
// DemuxerStream implementation.
@@ -213,8 +220,6 @@ class MockDemuxerStream : public DemuxerStream {
Liveness liveness_;
AudioDecoderConfig audio_decoder_config_;
VideoDecoderConfig video_decoder_config_;
-
- DISALLOW_COPY_AND_ASSIGN(MockDemuxerStream);
};
class MockVideoDecoder : public VideoDecoder {
@@ -227,6 +232,10 @@ class MockVideoDecoder : public VideoDecoder {
MockVideoDecoder(bool is_platform_decoder,
bool supports_decryption,
int decoder_id);
+
+ MockVideoDecoder(const MockVideoDecoder&) = delete;
+ MockVideoDecoder& operator=(const MockVideoDecoder&) = delete;
+
~MockVideoDecoder() override;
// Decoder implementation
@@ -271,12 +280,15 @@ class MockVideoDecoder : public VideoDecoder {
const bool is_platform_decoder_;
const bool supports_decryption_;
const int decoder_id_ = 0;
- DISALLOW_COPY_AND_ASSIGN(MockVideoDecoder);
};
class MockAudioEncoder : public AudioEncoder {
public:
MockAudioEncoder();
+
+ MockAudioEncoder(const MockAudioEncoder&) = delete;
+ MockAudioEncoder& operator=(const MockAudioEncoder&) = delete;
+
~MockAudioEncoder() override;
// AudioEncoder implementation.
@@ -298,14 +310,15 @@ class MockAudioEncoder : public AudioEncoder {
// A function for mocking destructor calls
MOCK_METHOD(void, OnDestruct, ());
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MockAudioEncoder);
};
class MockVideoEncoder : public VideoEncoder {
public:
MockVideoEncoder();
+
+ MockVideoEncoder(const MockVideoEncoder&) = delete;
+ MockVideoEncoder& operator=(const MockVideoEncoder&) = delete;
+
~MockVideoEncoder() override;
// VideoEncoder implementation.
@@ -335,9 +348,6 @@ class MockVideoEncoder : public VideoEncoder {
// A function for mocking destructor calls
MOCK_METHOD(void, Dtor, ());
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MockVideoEncoder);
};
class MockAudioDecoder : public AudioDecoder {
@@ -347,6 +357,10 @@ class MockAudioDecoder : public AudioDecoder {
explicit MockAudioDecoder(bool is_platform_decoder,
bool supports_decryption,
int decoder_id);
+
+ MockAudioDecoder(const MockAudioDecoder&) = delete;
+ MockAudioDecoder& operator=(const MockAudioDecoder&) = delete;
+
~MockAudioDecoder() override;
// Decoder implementation
@@ -382,7 +396,6 @@ class MockAudioDecoder : public AudioDecoder {
const bool is_platform_decoder_;
const bool supports_decryption_;
const int decoder_id_ = 0;
- DISALLOW_COPY_AND_ASSIGN(MockAudioDecoder);
};
class MockRendererClient : public RendererClient {
@@ -410,6 +423,10 @@ class MockRendererClient : public RendererClient {
class MockVideoRenderer : public VideoRenderer {
public:
MockVideoRenderer();
+
+ MockVideoRenderer(const MockVideoRenderer&) = delete;
+ MockVideoRenderer& operator=(const MockVideoRenderer&) = delete;
+
~MockVideoRenderer() override;
// VideoRenderer implementation.
@@ -432,14 +449,15 @@ class MockVideoRenderer : public VideoRenderer {
MOCK_METHOD0(OnTimeStopped, void());
MOCK_METHOD1(SetLatencyHint,
void(absl::optional<base::TimeDelta> latency_hint));
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MockVideoRenderer);
};
class MockAudioRenderer : public AudioRenderer {
public:
MockAudioRenderer();
+
+ MockAudioRenderer(const MockAudioRenderer&) = delete;
+ MockAudioRenderer& operator=(const MockAudioRenderer&) = delete;
+
~MockAudioRenderer() override;
// AudioRenderer implementation.
@@ -462,14 +480,15 @@ class MockAudioRenderer : public AudioRenderer {
void(absl::optional<base::TimeDelta> latency_hint));
MOCK_METHOD1(SetPreservesPitch, void(bool));
MOCK_METHOD1(SetAutoplayInitiated, void(bool));
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MockAudioRenderer);
};
class MockRenderer : public Renderer {
public:
MockRenderer();
+
+ MockRenderer(const MockRenderer&) = delete;
+ MockRenderer& operator=(const MockRenderer&) = delete;
+
~MockRenderer() override;
// Renderer implementation.
@@ -502,14 +521,15 @@ class MockRenderer : public Renderer {
void(std::vector<DemuxerStream*>, base::OnceClosure));
MOCK_METHOD2(OnSelectedAudioTracksChanged,
void(std::vector<DemuxerStream*>, base::OnceClosure));
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MockRenderer);
};
class MockRendererFactory : public RendererFactory {
public:
MockRendererFactory();
+
+ MockRendererFactory(const MockRendererFactory&) = delete;
+ MockRendererFactory& operator=(const MockRendererFactory&) = delete;
+
~MockRendererFactory() override;
// Renderer implementation.
@@ -521,14 +541,15 @@ class MockRendererFactory : public RendererFactory {
VideoRendererSink*,
RequestOverlayInfoCB,
const gfx::ColorSpace&));
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MockRendererFactory);
};
class MockTimeSource : public TimeSource {
public:
MockTimeSource();
+
+ MockTimeSource(const MockTimeSource&) = delete;
+ MockTimeSource& operator=(const MockTimeSource&) = delete;
+
~MockTimeSource() override;
// TimeSource implementation.
@@ -540,14 +561,15 @@ class MockTimeSource : public TimeSource {
MOCK_METHOD2(GetWallClockTimes,
bool(const std::vector<base::TimeDelta>&,
std::vector<base::TimeTicks>*));
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MockTimeSource);
};
class MockTextTrack : public TextTrack {
public:
MockTextTrack();
+
+ MockTextTrack(const MockTextTrack&) = delete;
+ MockTextTrack& operator=(const MockTextTrack&) = delete;
+
~MockTextTrack() override;
MOCK_METHOD5(addWebVTTCue,
@@ -556,9 +578,6 @@ class MockTextTrack : public TextTrack {
const std::string& id,
const std::string& content,
const std::string& settings));
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MockTextTrack);
};
// Mock CDM callbacks.
@@ -600,6 +619,10 @@ class MockCdmClient {
class MockDecryptor : public Decryptor {
public:
MockDecryptor();
+
+ MockDecryptor(const MockDecryptor&) = delete;
+ MockDecryptor& operator=(const MockDecryptor&) = delete;
+
~MockDecryptor() override;
MOCK_METHOD3(Decrypt,
@@ -620,14 +643,15 @@ class MockDecryptor : public Decryptor {
MOCK_METHOD1(ResetDecoder, void(StreamType stream_type));
MOCK_METHOD1(DeinitializeDecoder, void(StreamType stream_type));
MOCK_METHOD0(CanAlwaysDecrypt, bool());
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MockDecryptor);
};
class MockCdmContext : public CdmContext {
public:
MockCdmContext();
+
+ MockCdmContext(const MockCdmContext&) = delete;
+ MockCdmContext& operator=(const MockCdmContext&) = delete;
+
~MockCdmContext() override;
MOCK_METHOD1(RegisterEventCB,
@@ -648,8 +672,6 @@ class MockCdmContext : public CdmContext {
private:
absl::optional<base::UnguessableToken> cdm_id_;
-
- DISALLOW_COPY_AND_ASSIGN(MockCdmContext);
};
class MockCdmPromise : public SimpleCdmPromise {
@@ -657,14 +679,15 @@ class MockCdmPromise : public SimpleCdmPromise {
// |expect_success| is true if resolve() should be called, false if reject()
// is expected.
explicit MockCdmPromise(bool expect_success);
+
+ MockCdmPromise(const MockCdmPromise&) = delete;
+ MockCdmPromise& operator=(const MockCdmPromise&) = delete;
+
~MockCdmPromise() override;
MOCK_METHOD0(resolve, void());
MOCK_METHOD3(reject,
void(CdmPromise::Exception, uint32_t, const std::string&));
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MockCdmPromise);
};
class MockCdmSessionPromise : public NewSessionCdmPromise {
@@ -673,14 +696,15 @@ class MockCdmSessionPromise : public NewSessionCdmPromise {
// is expected. |new_session_id| is updated with the new session's ID on
// resolve().
MockCdmSessionPromise(bool expect_success, std::string* new_session_id);
+
+ MockCdmSessionPromise(const MockCdmSessionPromise&) = delete;
+ MockCdmSessionPromise& operator=(const MockCdmSessionPromise&) = delete;
+
~MockCdmSessionPromise() override;
MOCK_METHOD1(resolve, void(const std::string&));
MOCK_METHOD3(reject,
void(CdmPromise::Exception, uint32_t, const std::string&));
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MockCdmSessionPromise);
};
class MockCdmKeyStatusPromise : public KeyStatusCdmPromise {
@@ -689,14 +713,15 @@ class MockCdmKeyStatusPromise : public KeyStatusCdmPromise {
// is expected. |key_status| is updated with the key status on resolve().
MockCdmKeyStatusPromise(bool expect_success,
CdmKeyInformation::KeyStatus* key_status);
+
+ MockCdmKeyStatusPromise(const MockCdmKeyStatusPromise&) = delete;
+ MockCdmKeyStatusPromise& operator=(const MockCdmKeyStatusPromise&) = delete;
+
~MockCdmKeyStatusPromise() override;
MOCK_METHOD1(resolve, void(const CdmKeyInformation::KeyStatus&));
MOCK_METHOD3(reject,
void(CdmPromise::Exception, uint32_t, const std::string&));
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MockCdmKeyStatusPromise);
};
class MockCdm : public ContentDecryptionModule {
@@ -770,6 +795,10 @@ class MockCdm : public ContentDecryptionModule {
class MockCdmFactory : public CdmFactory {
public:
explicit MockCdmFactory(scoped_refptr<MockCdm> cdm);
+
+ MockCdmFactory(const MockCdmFactory&) = delete;
+ MockCdmFactory& operator=(const MockCdmFactory&) = delete;
+
~MockCdmFactory() override;
// CdmFactory implementation.
@@ -793,13 +822,15 @@ class MockCdmFactory : public CdmFactory {
// Callback to be used before Create() successfully calls |cdm_created_cb|.
base::RepeatingClosure before_creation_cb_;
-
- DISALLOW_COPY_AND_ASSIGN(MockCdmFactory);
};
class MockStreamParser : public StreamParser {
public:
MockStreamParser();
+
+ MockStreamParser(const MockStreamParser&) = delete;
+ MockStreamParser& operator=(const MockStreamParser&) = delete;
+
~MockStreamParser() override;
// StreamParser interface
@@ -816,14 +847,15 @@ class MockStreamParser : public StreamParser {
MOCK_METHOD0(Flush, void());
MOCK_CONST_METHOD0(GetGenerateTimestampsFlag, bool());
MOCK_METHOD2(Parse, bool(const uint8_t*, int));
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MockStreamParser);
};
class MockMediaClient : public media::MediaClient {
public:
MockMediaClient();
+
+ MockMediaClient(const MockMediaClient&) = delete;
+ MockMediaClient& operator=(const MockMediaClient&) = delete;
+
~MockMediaClient() override;
// MediaClient implementation.
@@ -837,9 +869,6 @@ class MockMediaClient : public media::MediaClient {
MOCK_METHOD1(GetAudioRendererAlgorithmParameters,
absl::optional<::media::AudioRendererAlgorithmParameters>(
media::AudioParameters audio_parameters));
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MockMediaClient);
};
} // namespace media
diff --git a/chromium/media/base/mock_media_log.h b/chromium/media/base/mock_media_log.h
index b568f7e1080..87d416975f3 100644
--- a/chromium/media/base/mock_media_log.h
+++ b/chromium/media/base/mock_media_log.h
@@ -81,6 +81,10 @@ MATCHER_P(MatchesPropertyAnyValue, message, "") {
class MockMediaLog : public MediaLog {
public:
MockMediaLog();
+
+ MockMediaLog(const MockMediaLog&) = delete;
+ MockMediaLog& operator=(const MockMediaLog&) = delete;
+
~MockMediaLog() override;
MOCK_METHOD1(DoAddLogRecordLogString, void(const std::string& event));
@@ -105,8 +109,6 @@ class MockMediaLog : public MediaLog {
private:
std::unique_ptr<MediaLogRecord> most_recent_event_;
-
- DISALLOW_COPY_AND_ASSIGN(MockMediaLog);
};
} // namespace media
diff --git a/chromium/media/base/moving_average.cc b/chromium/media/base/moving_average.cc
index 124e0b51046..0779ab6ee3a 100644
--- a/chromium/media/base/moving_average.cc
+++ b/chromium/media/base/moving_average.cc
@@ -45,7 +45,7 @@ base::TimeDelta MovingAverage::Deviation() const {
}
deviation_secs /= size;
- return base::TimeDelta::FromSecondsD(std::sqrt(deviation_secs));
+ return base::Seconds(std::sqrt(deviation_secs));
}
void MovingAverage::Reset() {
diff --git a/chromium/media/base/moving_average.h b/chromium/media/base/moving_average.h
index d1a966e632f..a931edf6173 100644
--- a/chromium/media/base/moving_average.h
+++ b/chromium/media/base/moving_average.h
@@ -23,6 +23,10 @@ class MEDIA_EXPORT MovingAverage {
public:
// Creates a MovingAverage instance with space for |depth| samples.
explicit MovingAverage(size_t depth);
+
+ MovingAverage(const MovingAverage&) = delete;
+ MovingAverage& operator=(const MovingAverage&) = delete;
+
~MovingAverage();
// Adds a new sample to the average; replaces the oldest sample if |depth_|
@@ -59,8 +63,6 @@ class MEDIA_EXPORT MovingAverage {
// Maximum value ever seen.
base::TimeDelta max_ = kNoTimestamp;
-
- DISALLOW_COPY_AND_ASSIGN(MovingAverage);
};
} // namespace media
diff --git a/chromium/media/base/moving_average_unittest.cc b/chromium/media/base/moving_average_unittest.cc
index a195985297d..f3d06d89db1 100644
--- a/chromium/media/base/moving_average_unittest.cc
+++ b/chromium/media/base/moving_average_unittest.cc
@@ -11,25 +11,23 @@ namespace media {
TEST(MovingAverageTest, AverageAndDeviation) {
const int kSamples = 5;
MovingAverage moving_average(kSamples);
- moving_average.AddSample(base::TimeDelta::FromSeconds(1));
- EXPECT_EQ(base::TimeDelta::FromSeconds(1), moving_average.Average());
+ moving_average.AddSample(base::Seconds(1));
+ EXPECT_EQ(base::Seconds(1), moving_average.Average());
EXPECT_EQ(base::TimeDelta(), moving_average.Deviation());
for (int i = 0; i < kSamples - 1; ++i)
- moving_average.AddSample(base::TimeDelta::FromSeconds(1));
- EXPECT_EQ(base::TimeDelta::FromSeconds(1), moving_average.Average());
+ moving_average.AddSample(base::Seconds(1));
+ EXPECT_EQ(base::Seconds(1), moving_average.Average());
EXPECT_EQ(base::TimeDelta(), moving_average.Deviation());
base::TimeDelta expect_deviation[] = {
- base::TimeDelta::FromMicroseconds(200000),
- base::TimeDelta::FromMicroseconds(244948),
- base::TimeDelta::FromMicroseconds(244948),
- base::TimeDelta::FromMicroseconds(200000),
- base::TimeDelta::FromMilliseconds(0),
+ base::Microseconds(200000), base::Microseconds(244948),
+ base::Microseconds(244948), base::Microseconds(200000),
+ base::Milliseconds(0),
};
for (int i = 0; i < kSamples; ++i) {
- moving_average.AddSample(base::TimeDelta::FromMilliseconds(500));
- EXPECT_EQ(base::TimeDelta::FromMilliseconds(1000 - (i + 1) * 100),
+ moving_average.AddSample(base::Milliseconds(500));
+ EXPECT_EQ(base::Milliseconds(1000 - (i + 1) * 100),
moving_average.Average());
EXPECT_EQ(expect_deviation[i], moving_average.Deviation());
}
@@ -37,8 +35,8 @@ TEST(MovingAverageTest, AverageAndDeviation) {
TEST(MovingAverageTest, Reset) {
MovingAverage moving_average(2);
- moving_average.AddSample(base::TimeDelta::FromSeconds(1));
- EXPECT_EQ(base::TimeDelta::FromSeconds(1), moving_average.Average());
+ moving_average.AddSample(base::Seconds(1));
+ EXPECT_EQ(base::Seconds(1), moving_average.Average());
moving_average.Reset();
moving_average.AddSample(base::TimeDelta());
EXPECT_EQ(base::TimeDelta(), moving_average.Average());
@@ -47,9 +45,9 @@ TEST(MovingAverageTest, Reset) {
TEST(MovingAverageTest, MinAndMax) {
MovingAverage moving_average(5);
- base::TimeDelta min = base::TimeDelta::FromSeconds(1);
- base::TimeDelta med = base::TimeDelta::FromSeconds(50);
- base::TimeDelta max = base::TimeDelta::FromSeconds(100);
+ base::TimeDelta min = base::Seconds(1);
+ base::TimeDelta med = base::Seconds(50);
+ base::TimeDelta max = base::Seconds(100);
moving_average.AddSample(min);
moving_average.AddSample(med);
moving_average.AddSample(med);
diff --git a/chromium/media/base/multi_channel_resampler.h b/chromium/media/base/multi_channel_resampler.h
index b455adb4163..fbef7e724f4 100644
--- a/chromium/media/base/multi_channel_resampler.h
+++ b/chromium/media/base/multi_channel_resampler.h
@@ -36,6 +36,10 @@ class MEDIA_EXPORT MultiChannelResampler {
double io_sample_rate_ratio,
size_t request_frames,
const ReadCB read_cb);
+
+ MultiChannelResampler(const MultiChannelResampler&) = delete;
+ MultiChannelResampler& operator=(const MultiChannelResampler&) = delete;
+
virtual ~MultiChannelResampler();
// Resamples |frames| of data from |read_cb_| into AudioBus.
@@ -84,8 +88,6 @@ class MEDIA_EXPORT MultiChannelResampler {
// The number of output frames that have successfully been processed during
// the current Resample() call.
int output_frames_ready_;
-
- DISALLOW_COPY_AND_ASSIGN(MultiChannelResampler);
};
} // namespace media
diff --git a/chromium/media/base/multi_channel_resampler_unittest.cc b/chromium/media/base/multi_channel_resampler_unittest.cc
index 1741286ad4f..a9de77492af 100644
--- a/chromium/media/base/multi_channel_resampler_unittest.cc
+++ b/chromium/media/base/multi_channel_resampler_unittest.cc
@@ -41,6 +41,11 @@ class MultiChannelResamplerTest
MultiChannelResamplerTest()
: last_frame_delay_(-1) {
}
+
+ MultiChannelResamplerTest(const MultiChannelResamplerTest&) = delete;
+ MultiChannelResamplerTest& operator=(const MultiChannelResamplerTest&) =
+ delete;
+
virtual ~MultiChannelResamplerTest() = default;
void InitializeAudioData(int channels, int frames) {
@@ -121,8 +126,6 @@ class MultiChannelResamplerTest
bool fill_junk_values_;
std::unique_ptr<AudioBus> audio_bus_;
int last_frame_delay_;
-
- DISALLOW_COPY_AND_ASSIGN(MultiChannelResamplerTest);
};
TEST_P(MultiChannelResamplerTest, HighLatency) {
diff --git a/chromium/media/base/null_video_sink.h b/chromium/media/base/null_video_sink.h
index 988799c4881..3eb4e32358e 100644
--- a/chromium/media/base/null_video_sink.h
+++ b/chromium/media/base/null_video_sink.h
@@ -30,6 +30,10 @@ class MEDIA_EXPORT NullVideoSink : public VideoRendererSink {
base::TimeDelta interval,
const NewFrameCB& new_frame_cb,
const scoped_refptr<base::SingleThreadTaskRunner>& task_runner);
+
+ NullVideoSink(const NullVideoSink&) = delete;
+ NullVideoSink& operator=(const NullVideoSink&) = delete;
+
~NullVideoSink() override;
// VideoRendererSink implementation.
@@ -86,8 +90,6 @@ class MEDIA_EXPORT NullVideoSink : public VideoRendererSink {
// Value passed to RenderCallback::Render().
bool background_render_;
-
- DISALLOW_COPY_AND_ASSIGN(NullVideoSink);
};
} // namespace media
diff --git a/chromium/media/base/null_video_sink_unittest.cc b/chromium/media/base/null_video_sink_unittest.cc
index 4fb456bc062..9b529249ff7 100644
--- a/chromium/media/base/null_video_sink_unittest.cc
+++ b/chromium/media/base/null_video_sink_unittest.cc
@@ -30,8 +30,12 @@ class NullVideoSinkTest : public testing::Test,
public:
NullVideoSinkTest() {
// Never use null TimeTicks since they have special connotations.
- tick_clock_.Advance(base::TimeDelta::FromMicroseconds(12345));
+ tick_clock_.Advance(base::Microseconds(12345));
}
+
+ NullVideoSinkTest(const NullVideoSinkTest&) = delete;
+ NullVideoSinkTest& operator=(const NullVideoSinkTest&) = delete;
+
~NullVideoSinkTest() override = default;
std::unique_ptr<NullVideoSink> ConstructSink(bool clockless,
@@ -67,12 +71,10 @@ class NullVideoSinkTest : public testing::Test,
protected:
base::test::SingleThreadTaskEnvironment task_environment_;
base::SimpleTestTickClock tick_clock_;
-
- DISALLOW_COPY_AND_ASSIGN(NullVideoSinkTest);
};
TEST_F(NullVideoSinkTest, BasicFunctionality) {
- const base::TimeDelta kInterval = base::TimeDelta::FromMilliseconds(25);
+ const base::TimeDelta kInterval = base::Milliseconds(25);
std::unique_ptr<NullVideoSink> sink = ConstructSink(false, kInterval);
scoped_refptr<VideoFrame> test_frame = CreateFrame(base::TimeDelta());
@@ -125,7 +127,7 @@ TEST_F(NullVideoSinkTest, BasicFunctionality) {
TEST_F(NullVideoSinkTest, ClocklessFunctionality) {
// Construct the sink with a huge interval, it should still complete quickly.
- const base::TimeDelta interval = base::TimeDelta::FromSeconds(10);
+ const base::TimeDelta interval = base::Seconds(10);
std::unique_ptr<NullVideoSink> sink = ConstructSink(true, interval);
scoped_refptr<VideoFrame> test_frame = CreateFrame(base::TimeDelta());
diff --git a/chromium/media/base/offloading_video_encoder.cc b/chromium/media/base/offloading_video_encoder.cc
index 473edb9e727..2b8e0ccaa77 100644
--- a/chromium/media/base/offloading_video_encoder.cc
+++ b/chromium/media/base/offloading_video_encoder.cc
@@ -30,7 +30,8 @@ OffloadingVideoEncoder::OffloadingVideoEncoder(
std::unique_ptr<VideoEncoder> wrapped_encoder)
: OffloadingVideoEncoder(std::move(wrapped_encoder),
base::ThreadPool::CreateSequencedTaskRunner(
- {base::TaskPriority::USER_BLOCKING}),
+ {base::TaskPriority::USER_BLOCKING,
+ base::WithBaseSyncPrimitives()}),
base::SequencedTaskRunnerHandle::Get()) {}
void OffloadingVideoEncoder::Initialize(VideoCodecProfile profile,
diff --git a/chromium/media/base/pipeline_impl.cc b/chromium/media/base/pipeline_impl.cc
index f2ccd2e40f6..3b10cc2117e 100644
--- a/chromium/media/base/pipeline_impl.cc
+++ b/chromium/media/base/pipeline_impl.cc
@@ -59,6 +59,10 @@ class PipelineImpl::RendererWrapper final : public DemuxerHost,
RendererWrapper(scoped_refptr<base::SingleThreadTaskRunner> media_task_runner,
scoped_refptr<base::SingleThreadTaskRunner> main_task_runner,
MediaLog* media_log);
+
+ RendererWrapper(const RendererWrapper&) = delete;
+ RendererWrapper& operator=(const RendererWrapper&) = delete;
+
~RendererWrapper() final;
void Start(StartType start_type,
@@ -233,7 +237,6 @@ class PipelineImpl::RendererWrapper final : public DemuxerHost,
PipelineStatusCB error_cb_;
base::WeakPtrFactory<RendererWrapper> weak_factory_{this};
- DISALLOW_COPY_AND_ASSIGN(RendererWrapper);
};
PipelineImpl::RendererWrapper::RendererWrapper(
@@ -892,12 +895,10 @@ void PipelineImpl::RendererWrapper::OnPipelineError(PipelineStatus error) {
if (status_ != PIPELINE_OK)
return;
- // Don't report pipeline error events to the media log here. The embedder
- // will log this when Client::OnError is called. If the pipeline is already
- // stopped or stopping we also don't want to log any event. In case we are
- // suspending or suspended, the error may be recoverable, so don't propagate
- // it now, instead let the subsequent seek during resume propagate it if
- // it's unrecoverable.
+ // If the pipeline is already stopping or stopped we don't need to report an
+ // error. Similarly if the pipeline is suspending or suspended, the error may
+ // be recoverable, so don't propagate it now, instead let the subsequent seek
+ // during resume propagate it if it's unrecoverable.
if (state_ == kStopping || state_ == kStopped || state_ == kSuspending ||
state_ == kSuspended) {
return;
@@ -980,6 +981,7 @@ void PipelineImpl::RendererWrapper::CompleteSeek(base::TimeDelta seek_time,
}
void PipelineImpl::RendererWrapper::CompleteSuspend(PipelineStatus status) {
+ DVLOG(1) << __func__ << ": status=" << status;
DCHECK(media_task_runner_->BelongsToCurrentThread());
DCHECK_EQ(kSuspending, state_);
@@ -1667,8 +1669,9 @@ void PipelineImpl::OnSeekDone(bool is_suspended) {
seek_time_ = kNoTimestamp;
is_suspended_ = is_suspended;
- DCHECK(seek_cb_);
- std::move(seek_cb_).Run(PIPELINE_OK);
+ // `seek_cb_` could have been reset in OnError().
+ if (seek_cb_)
+ std::move(seek_cb_).Run(PIPELINE_OK);
}
void PipelineImpl::OnSuspendDone() {
@@ -1677,8 +1680,10 @@ void PipelineImpl::OnSuspendDone() {
DCHECK(IsRunning());
is_suspended_ = true;
- DCHECK(suspend_cb_);
- std::move(suspend_cb_).Run(PIPELINE_OK);
+
+ // `suspend_cb_` could have been reset in OnError().
+ if (suspend_cb_)
+ std::move(suspend_cb_).Run(PIPELINE_OK);
}
} // namespace media
diff --git a/chromium/media/base/pipeline_impl.h b/chromium/media/base/pipeline_impl.h
index 2dcd76334f1..a933e2f12c6 100644
--- a/chromium/media/base/pipeline_impl.h
+++ b/chromium/media/base/pipeline_impl.h
@@ -86,6 +86,10 @@ class MEDIA_EXPORT PipelineImpl : public Pipeline {
scoped_refptr<base::SingleThreadTaskRunner> main_task_runner,
CreateRendererCB create_renderer_cb,
MediaLog* media_log);
+
+ PipelineImpl(const PipelineImpl&) = delete;
+ PipelineImpl& operator=(const PipelineImpl&) = delete;
+
~PipelineImpl() override;
// Pipeline implementation.
@@ -215,8 +219,6 @@ class MEDIA_EXPORT PipelineImpl : public Pipeline {
base::ThreadChecker thread_checker_;
base::WeakPtrFactory<PipelineImpl> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(PipelineImpl);
};
} // namespace media
diff --git a/chromium/media/base/pipeline_impl_unittest.cc b/chromium/media/base/pipeline_impl_unittest.cc
index d5f071b9b65..c5fc84fd668 100644
--- a/chromium/media/base/pipeline_impl_unittest.cc
+++ b/chromium/media/base/pipeline_impl_unittest.cc
@@ -90,6 +90,10 @@ class PipelineImplTest : public ::testing::Test {
class CallbackHelper : public MockPipelineClient {
public:
CallbackHelper() = default;
+
+ CallbackHelper(const CallbackHelper&) = delete;
+ CallbackHelper& operator=(const CallbackHelper&) = delete;
+
virtual ~CallbackHelper() = default;
MOCK_METHOD1(OnStart, void(PipelineStatus));
@@ -97,9 +101,6 @@ class PipelineImplTest : public ::testing::Test {
MOCK_METHOD1(OnSuspend, void(PipelineStatus));
MOCK_METHOD1(OnResume, void(PipelineStatus));
MOCK_METHOD1(OnCdmAttached, void(bool));
-
- private:
- DISALLOW_COPY_AND_ASSIGN(CallbackHelper);
};
PipelineImplTest()
@@ -131,6 +132,9 @@ class PipelineImplTest : public ::testing::Test {
EXPECT_CALL(*renderer_, SetPreservesPitch(true)).Times(AnyNumber());
}
+ PipelineImplTest(const PipelineImplTest&) = delete;
+ PipelineImplTest& operator=(const PipelineImplTest&) = delete;
+
~PipelineImplTest() override {
if (pipeline_->IsRunning()) {
ExpectDemuxerStop();
@@ -157,7 +161,7 @@ class PipelineImplTest : public ::testing::Test {
void SetDemuxerExpectations() {
// Initialize with a default non-zero duration.
- SetDemuxerExpectations(base::TimeDelta::FromSeconds(10));
+ SetDemuxerExpectations(base::Seconds(10));
}
std::unique_ptr<StrictMock<MockDemuxerStream>> CreateStream(
@@ -366,9 +370,6 @@ class PipelineImplTest : public ::testing::Test {
VideoDecoderConfig video_decoder_config_;
PipelineMetadata metadata_;
base::TimeDelta start_time_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(PipelineImplTest);
};
// Test that playback controls methods can be set even before the pipeline is
@@ -437,7 +438,7 @@ TEST_F(PipelineImplTest, StartThenStopImmediately) {
TEST_F(PipelineImplTest, StartSuspendedAndResumeAudioOnly) {
CreateAudioStream();
- SetDemuxerExpectations(base::TimeDelta::FromSeconds(3000));
+ SetDemuxerExpectations(base::Seconds(3000));
StartPipelineAndExpect(PIPELINE_OK,
Pipeline::StartType::kSuspendAfterMetadataForAudioOnly,
@@ -445,14 +446,14 @@ TEST_F(PipelineImplTest, StartSuspendedAndResumeAudioOnly) {
ASSERT_TRUE(pipeline_->IsSuspended());
ResetRenderer();
- base::TimeDelta expected = base::TimeDelta::FromSeconds(2000);
+ base::TimeDelta expected = base::Seconds(2000);
ExpectResume(expected);
DoResume(expected);
}
TEST_F(PipelineImplTest, StartSuspendedAndResumeAudioVideo) {
CreateAudioAndVideoStream();
- SetDemuxerExpectations(base::TimeDelta::FromSeconds(3000));
+ SetDemuxerExpectations(base::Seconds(3000));
StartPipelineAndExpect(PIPELINE_OK,
Pipeline::StartType::kSuspendAfterMetadata,
@@ -460,14 +461,14 @@ TEST_F(PipelineImplTest, StartSuspendedAndResumeAudioVideo) {
ASSERT_TRUE(pipeline_->IsSuspended());
ResetRenderer();
- base::TimeDelta expected = base::TimeDelta::FromSeconds(2000);
+ base::TimeDelta expected = base::Seconds(2000);
ExpectResume(expected);
DoResume(expected);
}
TEST_F(PipelineImplTest, StartSuspendedFailsOnVideoWithAudioOnlyExpectation) {
CreateAudioAndVideoStream();
- SetDemuxerExpectations(base::TimeDelta::FromSeconds(3000));
+ SetDemuxerExpectations(base::Seconds(3000));
// StartType kSuspendAfterMetadataForAudioOnly only applies to AudioOnly.
// Since this playback has video, renderer will be initialized and the
@@ -558,20 +559,20 @@ TEST_F(PipelineImplTest, EncryptedStream_SetCdmAfterStart) {
TEST_F(PipelineImplTest, Seek) {
CreateAudioAndVideoStream();
- SetDemuxerExpectations(base::TimeDelta::FromSeconds(3000));
+ SetDemuxerExpectations(base::Seconds(3000));
// Initialize then seek!
StartPipelineAndExpect(PIPELINE_OK);
// Every filter should receive a call to Seek().
- base::TimeDelta expected = base::TimeDelta::FromSeconds(2000);
+ base::TimeDelta expected = base::Seconds(2000);
ExpectSeek(expected, false);
DoSeek(expected);
}
TEST_F(PipelineImplTest, SeekAfterError) {
CreateAudioStream();
- SetDemuxerExpectations(base::TimeDelta::FromSeconds(3000));
+ SetDemuxerExpectations(base::Seconds(3000));
// Initialize then seek!
StartPipelineAndExpect(PIPELINE_OK);
@@ -584,14 +585,14 @@ TEST_F(PipelineImplTest, SeekAfterError) {
EXPECT_CALL(callbacks_, OnSeek(PIPELINE_ERROR_INVALID_STATE));
pipeline_->Seek(
- base::TimeDelta::FromMilliseconds(100),
+ base::Milliseconds(100),
base::BindOnce(&CallbackHelper::OnSeek, base::Unretained(&callbacks_)));
base::RunLoop().RunUntilIdle();
}
TEST_F(PipelineImplTest, SuspendResume) {
CreateAudioAndVideoStream();
- SetDemuxerExpectations(base::TimeDelta::FromSeconds(3000));
+ SetDemuxerExpectations(base::Seconds(3000));
StartPipelineAndExpect(PIPELINE_OK);
@@ -617,7 +618,7 @@ TEST_F(PipelineImplTest, SuspendResume) {
EXPECT_EQ(0, pipeline_->GetStatistics().audio_memory_usage);
EXPECT_EQ(0, pipeline_->GetStatistics().video_memory_usage);
- base::TimeDelta expected = base::TimeDelta::FromSeconds(2000);
+ base::TimeDelta expected = base::Seconds(2000);
ExpectResume(expected);
EXPECT_CALL(*renderer_, SetPreservesPitch(false)).Times(1);
@@ -679,7 +680,7 @@ TEST_F(PipelineImplTest, SetPreservesPitch) {
TEST_F(PipelineImplTest, Properties) {
CreateVideoStream();
- const auto kDuration = base::TimeDelta::FromSeconds(100);
+ const auto kDuration = base::Seconds(100);
SetDemuxerExpectations(kDuration);
StartPipelineAndExpect(PIPELINE_OK);
@@ -690,7 +691,7 @@ TEST_F(PipelineImplTest, Properties) {
TEST_F(PipelineImplTest, GetBufferedTimeRanges) {
CreateVideoStream();
- const auto kDuration = base::TimeDelta::FromSeconds(100);
+ const auto kDuration = base::Seconds(100);
SetDemuxerExpectations(kDuration);
StartPipelineAndExpect(PIPELINE_OK);
@@ -714,7 +715,7 @@ TEST_F(PipelineImplTest, BufferedTimeRangesCanChangeAfterStop) {
base::RunLoop().RunUntilIdle();
pipeline_->Stop();
- RunBufferedTimeRangesTest(base::TimeDelta::FromSeconds(5));
+ RunBufferedTimeRangesTest(base::Seconds(5));
}
TEST_F(PipelineImplTest, OnStatisticsUpdate) {
@@ -769,7 +770,7 @@ TEST_F(PipelineImplTest, EndedCallback) {
base::RunLoop().RunUntilIdle();
}
-TEST_F(PipelineImplTest, ErrorDuringSeek) {
+TEST_F(PipelineImplTest, DemuxerErrorDuringSeek) {
CreateAudioStream();
SetDemuxerExpectations();
StartPipelineAndExpect(PIPELINE_OK);
@@ -779,7 +780,7 @@ TEST_F(PipelineImplTest, ErrorDuringSeek) {
pipeline_->SetPlaybackRate(playback_rate);
base::RunLoop().RunUntilIdle();
- base::TimeDelta seek_time = base::TimeDelta::FromSeconds(5);
+ base::TimeDelta seek_time = base::Seconds(5);
EXPECT_CALL(*renderer_, OnFlush(_)).WillOnce(RunOnceClosure<0>());
@@ -795,6 +796,48 @@ TEST_F(PipelineImplTest, ErrorDuringSeek) {
base::RunLoop().RunUntilIdle();
}
+TEST_F(PipelineImplTest, PipelineErrorDuringSeek) {
+ CreateAudioStream();
+ SetDemuxerExpectations();
+ StartPipelineAndExpect(PIPELINE_OK);
+
+ base::TimeDelta seek_time = base::Seconds(5);
+
+ // Set expectations for seek.
+ EXPECT_CALL(*renderer_, OnFlush(_)).WillOnce(RunOnceClosure<0>());
+ EXPECT_CALL(*renderer_, SetPlaybackRate(_));
+ EXPECT_CALL(*renderer_, StartPlayingFrom(seek_time));
+ EXPECT_CALL(*demuxer_, AbortPendingReads());
+ EXPECT_CALL(*demuxer_, OnSeek(seek_time, _))
+ .WillOnce(RunOnceCallback<1>(PIPELINE_OK));
+ EXPECT_CALL(callbacks_, OnSeek(PIPELINE_ERROR_DECODE));
+
+ // Triggers pipeline error during pending seek.
+ pipeline_->Seek(seek_time, base::BindOnce(&CallbackHelper::OnSeek,
+ base::Unretained(&callbacks_)));
+ renderer_client_->OnError(PIPELINE_ERROR_DECODE);
+ base::RunLoop().RunUntilIdle();
+}
+
+TEST_F(PipelineImplTest, PipelineErrorDuringSuspend) {
+ CreateAudioAndVideoStream();
+ SetDemuxerExpectations(base::Seconds(3000));
+ StartPipelineAndExpect(PIPELINE_OK);
+
+ // Set expectations for suspend.
+ EXPECT_CALL(*demuxer_, AbortPendingReads());
+ EXPECT_CALL(*renderer_, SetPlaybackRate(0));
+ EXPECT_CALL(callbacks_, OnSuspend(PIPELINE_ERROR_DECODE));
+
+ // Triggers pipeline error during pending suspend. The order matters for
+ // reproducing crbug.com/1250636. Otherwise OnError() is ignored if already in
+ // kSuspending state.
+ renderer_client_->OnError(PIPELINE_ERROR_DECODE);
+ pipeline_->Suspend(base::BindOnce(&CallbackHelper::OnSuspend,
+ base::Unretained(&callbacks_)));
+ base::RunLoop().RunUntilIdle();
+}
+
TEST_F(PipelineImplTest, DestroyAfterStop) {
CreateAudioStream();
SetDemuxerExpectations();
@@ -819,13 +862,13 @@ TEST_F(PipelineImplTest, Underflow) {
base::RunLoop().RunUntilIdle();
// Seek while underflowed.
- base::TimeDelta expected = base::TimeDelta::FromSeconds(5);
+ base::TimeDelta expected = base::Seconds(5);
ExpectSeek(expected, true);
DoSeek(expected);
}
TEST_F(PipelineImplTest, PositiveStartTime) {
- start_time_ = base::TimeDelta::FromSeconds(1);
+ start_time_ = base::Seconds(1);
EXPECT_CALL(*demuxer_, GetStartTime()).WillRepeatedly(Return(start_time_));
CreateAudioStream();
SetDemuxerExpectations();
@@ -841,14 +884,14 @@ TEST_F(PipelineImplTest, GetMediaTime) {
StartPipelineAndExpect(PIPELINE_OK);
// Pipeline should report the same media time returned by the renderer.
- base::TimeDelta kMediaTime = base::TimeDelta::FromSeconds(2);
+ base::TimeDelta kMediaTime = base::Seconds(2);
EXPECT_CALL(*renderer_, GetMediaTime()).WillRepeatedly(Return(kMediaTime));
EXPECT_EQ(kMediaTime, pipeline_->GetMediaTime());
// Media time should not go backwards even if the renderer returns an
// errorneous value. PipelineImpl should clamp it to last reported value.
EXPECT_CALL(*renderer_, GetMediaTime())
- .WillRepeatedly(Return(base::TimeDelta::FromSeconds(1)));
+ .WillRepeatedly(Return(base::Seconds(1)));
EXPECT_EQ(kMediaTime, pipeline_->GetMediaTime());
}
@@ -862,13 +905,13 @@ TEST_F(PipelineImplTest, GetMediaTimeAfterSeek) {
StartPipelineAndExpect(PIPELINE_OK);
// Pipeline should report the same media time returned by the renderer.
- base::TimeDelta kMediaTime = base::TimeDelta::FromSeconds(2);
+ base::TimeDelta kMediaTime = base::Seconds(2);
EXPECT_CALL(*renderer_, GetMediaTime()).WillRepeatedly(Return(kMediaTime));
EXPECT_EQ(kMediaTime, pipeline_->GetMediaTime());
// Seek backward 1 second. Do not run RunLoop to ensure renderer is not yet
// notified of the seek (via media thread).
- base::TimeDelta kSeekTime = kMediaTime - base::TimeDelta::FromSeconds(1);
+ base::TimeDelta kSeekTime = kMediaTime - base::Seconds(1);
ExpectSeek(kSeekTime, false);
pipeline_->Seek(kSeekTime, base::BindOnce(&CallbackHelper::OnSeek,
base::Unretained(&callbacks_)));
@@ -939,6 +982,10 @@ class PipelineTeardownTest : public PipelineImplTest {
};
PipelineTeardownTest() = default;
+
+ PipelineTeardownTest(const PipelineTeardownTest&) = delete;
+ PipelineTeardownTest& operator=(const PipelineTeardownTest&) = delete;
+
~PipelineTeardownTest() override = default;
void RunTest(TeardownState state, StopOrError stop_or_error) {
@@ -999,7 +1046,7 @@ class PipelineTeardownTest : public PipelineImplTest {
CreateAudioStream();
CreateVideoStream();
- SetDemuxerExpectations(base::TimeDelta::FromSeconds(3000));
+ SetDemuxerExpectations(base::Seconds(3000));
EXPECT_CALL(*renderer_, SetVolume(1.0f));
if (state == kInitRenderer) {
@@ -1044,7 +1091,7 @@ class PipelineTeardownTest : public PipelineImplTest {
EXPECT_CALL(*demuxer_, Stop());
pipeline_->Seek(
- base::TimeDelta::FromSeconds(10),
+ base::Seconds(10),
base::BindOnce(&CallbackHelper::OnSeek, base::Unretained(&callbacks_)));
base::RunLoop().RunUntilIdle();
}
@@ -1155,8 +1202,6 @@ class PipelineTeardownTest : public PipelineImplTest {
base::RunLoop().RunUntilIdle();
}
-
- DISALLOW_COPY_AND_ASSIGN(PipelineTeardownTest);
};
#define INSTANTIATE_TEARDOWN_TEST(stop_or_error, state) \
diff --git a/chromium/media/base/reentrancy_checker.h b/chromium/media/base/reentrancy_checker.h
index 574a70b2e53..dbe08ac54c6 100644
--- a/chromium/media/base/reentrancy_checker.h
+++ b/chromium/media/base/reentrancy_checker.h
@@ -50,13 +50,15 @@ namespace media {
class SCOPED_LOCKABLE MEDIA_EXPORT NonReentrantScope {
public:
explicit NonReentrantScope(base::Lock& lock) EXCLUSIVE_LOCK_FUNCTION(lock);
+
+ NonReentrantScope(const NonReentrantScope&) = delete;
+ NonReentrantScope& operator=(const NonReentrantScope&) = delete;
+
~NonReentrantScope() UNLOCK_FUNCTION();
private:
base::Lock& lock_;
bool is_lock_holder_ = false;
-
- DISALLOW_COPY_AND_ASSIGN(NonReentrantScope);
};
} // namespace media
diff --git a/chromium/media/base/renderer.h b/chromium/media/base/renderer.h
index 09d775f0c99..809657cd991 100644
--- a/chromium/media/base/renderer.h
+++ b/chromium/media/base/renderer.h
@@ -25,6 +25,9 @@ class MEDIA_EXPORT Renderer {
public:
Renderer();
+ Renderer(const Renderer&) = delete;
+ Renderer& operator=(const Renderer&) = delete;
+
// Stops rendering and fires any pending callbacks.
virtual ~Renderer();
@@ -88,9 +91,6 @@ class MEDIA_EXPORT Renderer {
virtual void OnEnabledAudioTracksChanged(
const std::vector<DemuxerStream*>& enabled_tracks,
base::OnceClosure change_completed_cb);
-
- private:
- DISALLOW_COPY_AND_ASSIGN(Renderer);
};
} // namespace media
diff --git a/chromium/media/base/renderer_factory.h b/chromium/media/base/renderer_factory.h
index 1001d7e0ff0..20d4a955eab 100644
--- a/chromium/media/base/renderer_factory.h
+++ b/chromium/media/base/renderer_factory.h
@@ -29,6 +29,10 @@ class VideoRendererSink;
class MEDIA_EXPORT RendererFactory {
public:
RendererFactory();
+
+ RendererFactory(const RendererFactory&) = delete;
+ RendererFactory& operator=(const RendererFactory&) = delete;
+
virtual ~RendererFactory();
// Creates and returns a Renderer. All methods of the created Renderer except
@@ -48,9 +52,6 @@ class MEDIA_EXPORT RendererFactory {
// created by this factory.
// NOTE: Returns Type::STREAM by default.
virtual MediaResource::Type GetRequiredMediaResourceType();
-
- private:
- DISALLOW_COPY_AND_ASSIGN(RendererFactory);
};
} // namespace media
diff --git a/chromium/media/base/renderer_factory_selector.h b/chromium/media/base/renderer_factory_selector.h
index abe0c9335aa..bfef2293c2e 100644
--- a/chromium/media/base/renderer_factory_selector.h
+++ b/chromium/media/base/renderer_factory_selector.h
@@ -61,6 +61,10 @@ class MEDIA_EXPORT RendererFactorySelector {
using ConditionalFactoryCB = base::RepeatingCallback<bool()>;
RendererFactorySelector();
+
+ RendererFactorySelector(const RendererFactorySelector&) = delete;
+ RendererFactorySelector& operator=(const RendererFactorySelector&) = delete;
+
~RendererFactorySelector();
// See file level comments above.
@@ -107,8 +111,6 @@ class MEDIA_EXPORT RendererFactorySelector {
RequestRemotePlayStateChangeCB remote_play_state_change_cb_request_;
std::map<RendererType, std::unique_ptr<RendererFactory>> factories_;
-
- DISALLOW_COPY_AND_ASSIGN(RendererFactorySelector);
};
} // namespace media
diff --git a/chromium/media/base/scoped_async_trace.h b/chromium/media/base/scoped_async_trace.h
index e448689410f..a8502a7d56d 100644
--- a/chromium/media/base/scoped_async_trace.h
+++ b/chromium/media/base/scoped_async_trace.h
@@ -25,6 +25,9 @@ class MEDIA_EXPORT ScopedAsyncTrace {
// other words, use literal strings only. See trace_event_common.h .
static std::unique_ptr<ScopedAsyncTrace> CreateIfEnabled(const char* name);
+ ScopedAsyncTrace(const ScopedAsyncTrace&) = delete;
+ ScopedAsyncTrace& operator=(const ScopedAsyncTrace&) = delete;
+
~ScopedAsyncTrace();
// TODO(liberato): Add StepInto / StepPast.
@@ -33,8 +36,6 @@ class MEDIA_EXPORT ScopedAsyncTrace {
explicit ScopedAsyncTrace(const char* name);
const char* name_ = nullptr;
-
- DISALLOW_COPY_AND_ASSIGN(ScopedAsyncTrace);
};
} // namespace media
diff --git a/chromium/media/base/scopedfd_helper.h b/chromium/media/base/scopedfd_helper.h
index 5cc8086c499..7767244f47b 100644
--- a/chromium/media/base/scopedfd_helper.h
+++ b/chromium/media/base/scopedfd_helper.h
@@ -6,6 +6,7 @@
#define MEDIA_BASE_SCOPEDFD_HELPER_H_
#include "base/files/scoped_file.h"
+#include "build/build_config.h"
#include "media/base/media_export.h"
namespace media {
diff --git a/chromium/media/base/seekable_buffer.cc b/chromium/media/base/seekable_buffer.cc
index 9e8faf6f1e6..a3766bcc003 100644
--- a/chromium/media/base/seekable_buffer.cc
+++ b/chromium/media/base/seekable_buffer.cc
@@ -269,8 +269,7 @@ void SeekableBuffer::UpdateCurrentTime(BufferQueue::iterator buffer,
int64_t time_offset = ((*buffer)->duration().InMicroseconds() * offset) /
(*buffer)->data_size();
- current_time_ = (*buffer)->timestamp() +
- base::TimeDelta::FromMicroseconds(time_offset);
+ current_time_ = (*buffer)->timestamp() + base::Microseconds(time_offset);
}
}
diff --git a/chromium/media/base/seekable_buffer.h b/chromium/media/base/seekable_buffer.h
index 74a491bea3d..b31e62483ca 100644
--- a/chromium/media/base/seekable_buffer.h
+++ b/chromium/media/base/seekable_buffer.h
@@ -52,6 +52,9 @@ class MEDIA_EXPORT SeekableBuffer {
// The values are in bytes.
SeekableBuffer(int backward_capacity, int forward_capacity);
+ SeekableBuffer(const SeekableBuffer&) = delete;
+ SeekableBuffer& operator=(const SeekableBuffer&) = delete;
+
~SeekableBuffer();
// Clears the buffer queue.
@@ -180,8 +183,6 @@ class MEDIA_EXPORT SeekableBuffer {
// Keeps track of the most recent time we've seen in case the |buffers_| is
// empty when our owner asks what time it is.
base::TimeDelta current_time_;
-
- DISALLOW_COPY_AND_ASSIGN(SeekableBuffer);
};
} // namespace media
diff --git a/chromium/media/base/seekable_buffer_unittest.cc b/chromium/media/base/seekable_buffer_unittest.cc
index 37468740cc9..d1df6a3ca9b 100644
--- a/chromium/media/base/seekable_buffer_unittest.cc
+++ b/chromium/media/base/seekable_buffer_unittest.cc
@@ -338,10 +338,8 @@ TEST_F(SeekableBufferTest, GetTime) {
scoped_refptr<DataBuffer> buffer = DataBuffer::CopyFrom(data_, kWriteSize);
for (size_t i = 0; i < base::size(tests); ++i) {
- buffer->set_timestamp(base::TimeDelta::FromMicroseconds(
- tests[i].first_time_useconds));
- buffer->set_duration(base::TimeDelta::FromMicroseconds(
- tests[i].duration_useconds));
+ buffer->set_timestamp(base::Microseconds(tests[i].first_time_useconds));
+ buffer->set_duration(base::Microseconds(tests[i].duration_useconds));
buffer_.Append(buffer.get());
EXPECT_TRUE(buffer_.Seek(tests[i].consume_bytes));
diff --git a/chromium/media/base/serial_runner_unittest.cc b/chromium/media/base/serial_runner_unittest.cc
index 4d7b4435b6a..ae2cb83a885 100644
--- a/chromium/media/base/serial_runner_unittest.cc
+++ b/chromium/media/base/serial_runner_unittest.cc
@@ -21,6 +21,10 @@ class SerialRunnerTest : public ::testing::Test {
public:
SerialRunnerTest()
: inside_start_(false), done_called_(false), done_status_(PIPELINE_OK) {}
+
+ SerialRunnerTest(const SerialRunnerTest&) = delete;
+ SerialRunnerTest& operator=(const SerialRunnerTest&) = delete;
+
~SerialRunnerTest() override = default;
void RunSerialRunner() {
@@ -140,8 +144,6 @@ class SerialRunnerTest : public ::testing::Test {
// Tracks whether the final done callback was called + resulting status.
bool done_called_;
PipelineStatus done_status_;
-
- DISALLOW_COPY_AND_ASSIGN(SerialRunnerTest);
};
TEST_F(SerialRunnerTest, Empty) {
diff --git a/chromium/media/base/silent_sink_suspender.cc b/chromium/media/base/silent_sink_suspender.cc
index 42de9b7cd83..fbbf0269ca4 100644
--- a/chromium/media/base/silent_sink_suspender.cc
+++ b/chromium/media/base/silent_sink_suspender.cc
@@ -118,6 +118,10 @@ void SilentSinkSuspender::OnRenderError() {
void SilentSinkSuspender::OnPaused() {
DCHECK(task_runner_->BelongsToCurrentThread());
+ // This is a no-op if the sink isn't running, but must be executed without the
+ // |transition_lock_| being held to avoid possible deadlock.
+ fake_sink_.Stop();
+
base::AutoLock al(transition_lock_);
// Nothing to do if we haven't touched the sink.
@@ -128,12 +132,7 @@ void SilentSinkSuspender::OnPaused() {
// If we've moved over to the fake sink, we just need to stop it and cancel
// any pending transitions.
- if (is_using_fake_sink_) {
- is_using_fake_sink_ = false;
- fake_sink_.Stop();
- }
-
- // Cancel any pending transitions.
+ is_using_fake_sink_ = false;
is_transition_pending_ = false;
first_silence_time_ = base::TimeTicks();
sink_transition_callback_.Reset(base::BindRepeating(
diff --git a/chromium/media/base/silent_sink_suspender.h b/chromium/media/base/silent_sink_suspender.h
index 40fc611c402..3662f27fe56 100644
--- a/chromium/media/base/silent_sink_suspender.h
+++ b/chromium/media/base/silent_sink_suspender.h
@@ -43,6 +43,10 @@ class MEDIA_EXPORT SilentSinkSuspender
const AudioParameters& params,
scoped_refptr<AudioRendererSink> sink,
scoped_refptr<base::SingleThreadTaskRunner> worker);
+
+ SilentSinkSuspender(const SilentSinkSuspender&) = delete;
+ SilentSinkSuspender& operator=(const SilentSinkSuspender&) = delete;
+
~SilentSinkSuspender() override;
// AudioRendererSink::RenderCallback implementation.
@@ -125,8 +129,6 @@ class MEDIA_EXPORT SilentSinkSuspender
// Time when transition to |fake_sink_| starts.
base::TimeTicks fake_sink_transition_time_;
-
- DISALLOW_COPY_AND_ASSIGN(SilentSinkSuspender);
};
} // namespace media
diff --git a/chromium/media/base/silent_sink_suspender_unittest.cc b/chromium/media/base/silent_sink_suspender_unittest.cc
index 5ac51cc74a2..e9a69e3b318 100644
--- a/chromium/media/base/silent_sink_suspender_unittest.cc
+++ b/chromium/media/base/silent_sink_suspender_unittest.cc
@@ -19,19 +19,20 @@ namespace media {
class SilentSinkSuspenderTest : public testing::Test {
public:
SilentSinkSuspenderTest()
- : params_(AudioParameters::AUDIO_FAKE,
- CHANNEL_LAYOUT_MONO,
- 44100,
- 128),
+ : params_(AudioParameters::AUDIO_FAKE, CHANNEL_LAYOUT_MONO, 44100, 128),
mock_sink_(new testing::StrictMock<MockAudioRendererSink>()),
fake_callback_(0.1, params_.sample_rate()),
temp_bus_(AudioBus::Create(params_)),
// Set a negative timeout so any silence will suspend immediately.
suspender_(&fake_callback_,
- base::TimeDelta::FromSeconds(-1),
+ base::Seconds(-1),
params_,
mock_sink_,
test_loop_.task_runner()) {}
+
+ SilentSinkSuspenderTest(const SilentSinkSuspenderTest&) = delete;
+ SilentSinkSuspenderTest& operator=(const SilentSinkSuspenderTest&) = delete;
+
~SilentSinkSuspenderTest() override = default;
protected:
@@ -41,14 +42,11 @@ class SilentSinkSuspenderTest : public testing::Test {
FakeAudioRenderCallback fake_callback_;
std::unique_ptr<AudioBus> temp_bus_;
SilentSinkSuspender suspender_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(SilentSinkSuspenderTest);
};
TEST_F(SilentSinkSuspenderTest, BasicPassthough) {
temp_bus_->Zero();
- auto delay = base::TimeDelta::FromMilliseconds(20);
+ auto delay = base::Milliseconds(20);
EXPECT_EQ(temp_bus_->frames(),
suspender_.Render(delay, base::TimeTicks(), 0, temp_bus_.get()));
diff --git a/chromium/media/base/simple_watch_timer.cc b/chromium/media/base/simple_watch_timer.cc
index 83668dfd7a2..16869b35283 100644
--- a/chromium/media/base/simple_watch_timer.cc
+++ b/chromium/media/base/simple_watch_timer.cc
@@ -11,8 +11,7 @@ namespace media {
namespace {
-constexpr base::TimeDelta kQueryInterval =
- base::TimeDelta::FromMilliseconds(750);
+constexpr base::TimeDelta kQueryInterval = base::Milliseconds(750);
} // namespace
diff --git a/chromium/media/base/simple_watch_timer.h b/chromium/media/base/simple_watch_timer.h
index 777fc9b5ed2..e837c1a7904 100644
--- a/chromium/media/base/simple_watch_timer.h
+++ b/chromium/media/base/simple_watch_timer.h
@@ -33,6 +33,10 @@ class MEDIA_EXPORT SimpleWatchTimer {
using GetCurrentTimeCB = base::RepeatingCallback<base::TimeDelta()>;
SimpleWatchTimer(TickCB tick_cb, GetCurrentTimeCB get_current_time_cb);
+
+ SimpleWatchTimer(const SimpleWatchTimer&) = delete;
+ SimpleWatchTimer& operator=(const SimpleWatchTimer&) = delete;
+
~SimpleWatchTimer();
void Start();
@@ -47,8 +51,6 @@ class MEDIA_EXPORT SimpleWatchTimer {
int unreported_ms_ = 0;
base::TimeDelta last_current_time_;
base::RepeatingTimer timer_;
-
- DISALLOW_COPY_AND_ASSIGN(SimpleWatchTimer);
};
} // namespace media
diff --git a/chromium/media/base/sinc_resampler.h b/chromium/media/base/sinc_resampler.h
index b0e76d4118e..8cbae16a742 100644
--- a/chromium/media/base/sinc_resampler.h
+++ b/chromium/media/base/sinc_resampler.h
@@ -50,6 +50,10 @@ class MEDIA_EXPORT SincResampler {
SincResampler(double io_sample_rate_ratio,
int request_frames,
const ReadCB read_cb);
+
+ SincResampler(const SincResampler&) = delete;
+ SincResampler& operator=(const SincResampler&) = delete;
+
~SincResampler();
// Resample |frames| of data from |read_cb_| into |destination|.
@@ -170,8 +174,6 @@ class MEDIA_EXPORT SincResampler {
float* const r2_;
float* r3_;
float* r4_;
-
- DISALLOW_COPY_AND_ASSIGN(SincResampler);
};
} // namespace media
diff --git a/chromium/media/base/sinc_resampler_unittest.cc b/chromium/media/base/sinc_resampler_unittest.cc
index 79d393869ed..f8226a7c140 100644
--- a/chromium/media/base/sinc_resampler_unittest.cc
+++ b/chromium/media/base/sinc_resampler_unittest.cc
@@ -201,6 +201,10 @@ class SinusoidalLinearChirpSource {
k_ = (max_frequency_ - kMinFrequency) / duration;
}
+ SinusoidalLinearChirpSource(const SinusoidalLinearChirpSource&) = delete;
+ SinusoidalLinearChirpSource& operator=(const SinusoidalLinearChirpSource&) =
+ delete;
+
virtual ~SinusoidalLinearChirpSource() = default;
void ProvideInput(int frames, float* destination) {
@@ -234,8 +238,6 @@ class SinusoidalLinearChirpSource {
double max_frequency_;
double k_;
int current_index_;
-
- DISALLOW_COPY_AND_ASSIGN(SinusoidalLinearChirpSource);
};
typedef std::tuple<int, int, double, double> SincResamplerTestData;
diff --git a/chromium/media/base/status.cc b/chromium/media/base/status.cc
index 8bfa2f999b2..75bbde250b5 100644
--- a/chromium/media/base/status.cc
+++ b/chromium/media/base/status.cc
@@ -10,70 +10,52 @@
namespace media {
-Status::Status() = default;
+namespace internal {
-Status::Status(StatusCode code,
- base::StringPiece message,
- const base::Location& location) {
- // Note that |message| is dropped in this case.
- if (code == StatusCode::kOk) {
- DCHECK(message.empty());
- return;
- }
- data_ = std::make_unique<StatusInternal>(code, std::string(message));
- AddFrame(location);
-}
+StatusData::StatusData() = default;
-// Copy Constructor
-Status::Status(const Status& copy) {
+StatusData::StatusData(const StatusData& copy) {
*this = copy;
}
-Status& Status::operator=(const Status& copy) {
- if (copy.is_ok()) {
- data_.reset();
- return *this;
- }
-
- data_ = std::make_unique<StatusInternal>(copy.code(), copy.message());
- for (const base::Value& frame : copy.data_->frames)
- data_->frames.push_back(frame.Clone());
- for (const Status& err : copy.data_->causes)
- data_->causes.push_back(err);
- data_->data = copy.data_->data.Clone();
- return *this;
-}
-
-// Allow move.
-Status::Status(Status&&) = default;
-Status& Status::operator=(Status&&) = default;
-
-Status::~Status() = default;
-
-Status::StatusInternal::StatusInternal(StatusCode code, std::string message)
- : code(code),
+StatusData::StatusData(StatusGroupType group,
+ StatusCodeType code,
+ std::string message)
+ : group(group),
+ code(code),
message(std::move(message)),
data(base::Value(base::Value::Type::DICTIONARY)) {}
-Status::StatusInternal::~StatusInternal() = default;
-
-Status&& Status::AddHere(const base::Location& location) && {
- DCHECK(data_);
- AddFrame(location);
- return std::move(*this);
+std::unique_ptr<StatusData> StatusData::copy() const {
+ auto result = std::make_unique<StatusData>(group, code, message);
+ for (const auto& frame : frames)
+ result->frames.push_back(frame.Clone());
+ for (const auto& cause : causes)
+ result->causes.push_back(cause);
+ result->data = data.Clone();
+ return result;
}
-Status&& Status::AddCause(Status&& cause) && {
- DCHECK(data_ && cause.data_);
- data_->causes.push_back(std::move(cause));
- return std::move(*this);
+StatusData::~StatusData() = default;
+
+StatusData& StatusData::operator=(const StatusData& copy) {
+ group = copy.group;
+ code = copy.code;
+ message = copy.message;
+ for (const auto& frame : copy.frames)
+ frames.push_back(frame.Clone());
+ for (const auto& cause : copy.causes)
+ causes.push_back(cause);
+ data = copy.data.Clone();
+ return *this;
}
-void Status::AddFrame(const base::Location& location) {
- DCHECK(data_);
- data_->frames.push_back(MediaSerialize(location));
+void StatusData::AddLocation(const base::Location& location) {
+ frames.push_back(MediaSerialize(location));
}
+} // namespace internal
+
Status OkStatus() {
return Status(StatusCode::kOk);
}
diff --git a/chromium/media/base/status.h b/chromium/media/base/status.h
index d8e74805bb9..ced9c6e51e3 100644
--- a/chromium/media/base/status.h
+++ b/chromium/media/base/status.h
@@ -7,6 +7,7 @@
#include <memory>
#include <string>
+#include <type_traits>
#include <utility>
#include <vector>
@@ -25,243 +26,291 @@ struct StructTraits;
namespace media {
-namespace mojom {
-class StatusDataView;
-}
+// See media/base/status.md for details and instructions for
+// using TypedStatus<T>.
+
+// This is the type that enum classes used for specializing |TypedStatus| must
+// extend from.
+using StatusCodeType = uint16_t;
+
+// This is the type that TypedStatusTraits::Group should be.
+using StatusGroupType = base::StringPiece;
+
+namespace internal {
+
+struct MEDIA_EXPORT StatusData {
+ StatusData();
+ StatusData(const StatusData&);
+ StatusData(StatusGroupType group, StatusCodeType code, std::string message);
+ ~StatusData();
+ StatusData& operator=(const StatusData&);
+
+ std::unique_ptr<StatusData> copy() const;
+ void AddLocation(const base::Location&);
+
+ // Enum group ID.
+ std::string group;
+
+ // Entry within enum, cast to base type.
+ StatusCodeType code;
+
+ // The current error message (Can be used for
+ // https://developer.mozilla.org/en-US/docs/Web/API/Status)
+ std::string message;
+
+ // Stack frames
+ std::vector<base::Value> frames;
+
+ // Causes
+ std::vector<StatusData> causes;
+
+ // Data attached to the error
+ base::Value data;
+};
+
+} // namespace internal
+
+// See media/base/status.md for details and instructions for using TypedStatus.
+template <typename T>
+class MEDIA_EXPORT TypedStatus {
+ static_assert(std::is_enum<typename T::Codes>::value,
+ "TypedStatus must only be specialized with enum types.");
-// Status is meant to be a relatively small (sizeof(void*) bytes) object
-// that can be returned as a status value from functions or passed to callbacks
-// that want a report of status. Status allows attaching of arbitrary named
-// data, other Status' as causes, and stack frames, which can all be logged
-// and reported throughout the media stack. The status code and message are
-// immutable and can be used to give a stable numeric ID for any error
-// generated by media code.
-// There is also an OK state which can't hold any data and is only for
-// successful returns.
-class MEDIA_EXPORT Status {
public:
- // This will create a kOk status, but please don't use it. Use either
- // Status(StatusCode::kOk) or OkStatus(). This is here because the mojo
- // bindings assume that it is.
- // TODO(crbug.com/1106492): Remove this.
- Status();
+ using Traits = T;
+ using Codes = typename T::Codes;
+
+ // default constructor to please the Mojo Gods.
+ TypedStatus() = default;
- // Constructor to create a new Status from a numeric code & message.
+ // Constructor to create a new TypedStatus from a numeric code & message.
// These are immutable; if you'd like to change them, then you likely should
- // create a new Status. Either {StatusCode::kOk} or OkStatus() may be used to
- // create a success status.
+ // create a new TypedStatus.
// NOTE: This should never be given a location parameter when called - It is
// defaulted in order to grab the caller location.
- Status(StatusCode code,
- base::StringPiece message = "",
- const base::Location& location = base::Location::Current());
-
- // Copy Constructor & assignment. (Mojo uses both of these)
- Status(const Status&);
- Status& operator=(const Status&);
+ TypedStatus(Codes code,
+ base::StringPiece message = "",
+ const base::Location& location = base::Location::Current()) {
+ // Note that |message| would be dropped when code is the default value,
+ // so DCHECK that it is not set.
+ if (code == Traits::DefaultEnumValue()) {
+ DCHECK(!!message.empty());
+ return;
+ }
+ data_ = std::make_unique<internal::StatusData>(
+ Traits::Group(), static_cast<StatusCodeType>(code),
+ std::string(message));
+ data_->AddLocation(location);
+ }
- // Allows move.
- Status(Status&&);
- Status& operator=(Status&&);
+ TypedStatus(const TypedStatus<T>& copy) { *this = copy; }
- // Needs an out of line destructor...
- ~Status();
+ TypedStatus<T>& operator=(const TypedStatus<T>& copy) {
+ if (!copy.data_) {
+ data_.reset();
+ return *this;
+ }
+ data_ = copy.data_->copy();
+ return *this;
+ }
+ // DEPRECATED: check code() == ok value.
bool is_ok() const { return !data_; }
- // Getters for internal fields
+ Codes code() const {
+ if (!data_)
+ return *Traits::DefaultEnumValue();
+ return static_cast<Codes>(data_->code);
+ }
+
+ const std::string group() const {
+ return data_ ? data_->group : Traits::Group();
+ }
+
const std::string& message() const {
DCHECK(data_);
return data_->message;
}
- StatusCode code() const { return data_ ? data_->code : StatusCode::kOk; }
-
- // Adds the current location to Status as it’s passed upwards.
+ // Adds the current location to StatusBase as it’s passed upwards.
// This does not need to be called at every location that touches it, but
// should be called for those locations where the path is ambiguous or
// critical. This can be especially helpful across IPC boundaries. This will
// fail on an OK status.
// NOTE: This should never be given a parameter when called - It is defaulted
// in order to grab the caller location.
- Status&& AddHere(
- const base::Location& location = base::Location::Current()) &&;
-
- // Add |cause| as the error that triggered this one. For example,
- // DecoderStream might return kDecoderSelectionFailed with one or more causes
- // that are the specific errors from the decoders that it tried.
- Status&& AddCause(Status&& cause) &&;
- void AddCause(Status&& cause) &;
+ TypedStatus<T>&& AddHere(
+ const base::Location& location = base::Location::Current()) && {
+ DCHECK(data_);
+ // We can't call MediaSerialize directly, because we can't include the
+ // default serializers header, since it includes this header.
+ data_->AddLocation(location);
+ return std::move(*this);
+ }
// Allows us to append any datatype which can be converted to
// an int/bool/string/base::Value. Any existing data associated with |key|
// will be overwritten by |value|. This will fail on an OK status.
- template <typename T>
- Status&& WithData(const char* key, const T& value) && {
+ template <typename D>
+ TypedStatus<T>&& WithData(const char* key, const D& value) && {
DCHECK(data_);
data_->data.SetKey(key, MediaSerialize(value));
return std::move(*this);
}
- template <typename T>
- void WithData(const char* key, const T& value) & {
+ template <typename D>
+ void WithData(const char* key, const D& value) & {
DCHECK(data_);
data_->data.SetKey(key, MediaSerialize(value));
}
- private:
- // Private helper to add the current stack frame to the error trace.
- void AddFrame(const base::Location& location);
+ // Add |cause| as the error that triggered this one.
+ template <typename AnyTraitsType>
+ TypedStatus<T>&& AddCause(TypedStatus<AnyTraitsType>&& cause) && {
+ DCHECK(data_ && cause.data_);
+ data_->causes.push_back(*cause.data_);
+ return std::move(*this);
+ }
- // Keep the internal data in a unique ptr to minimize size of OK errors.
- struct MEDIA_EXPORT StatusInternal {
- StatusInternal(StatusCode code, std::string message);
- ~StatusInternal();
+ // Add |cause| as the error that triggered this one.
+ template <typename AnyTraitsType>
+ void AddCause(TypedStatus<AnyTraitsType>&& cause) & {
+ DCHECK(data_ && cause.data_);
+ data_->causes.push_back(*cause.data_);
+ }
- // The current error code
- StatusCode code = StatusCode::kOk;
+ inline bool operator==(T code) const { return code == this->code(); }
- // The current error message (Can be used for
- // https://developer.mozilla.org/en-US/docs/Web/API/Status)
- std::string message;
+ inline bool operator!=(T code) const { return code != this->code(); }
- // Stack frames
- std::vector<base::Value> frames;
+ inline bool operator==(const TypedStatus<T>& other) const {
+ return other.code() == code();
+ }
- // Causes
- std::vector<Status> causes;
+ inline bool operator!=(const TypedStatus<T>& other) const {
+ return other.code() != code();
+ }
- // Data attached to the error
- base::Value data;
+ template <typename OtherType>
+ class Or {
+ public:
+ ~Or() = default;
+
+ // Implicit constructors allow returning |OtherType| or |TypedStatus|
+ // directly.
+ Or(TypedStatus<T>&& error) : error_(std::move(error)) {
+ // |T| must either not have a default code, or not be default
+ DCHECK(!Traits::DefaultEnumValue() ||
+ *Traits::DefaultEnumValue() != code());
+ }
+ Or(const TypedStatus<T>& error) : error_(error) {
+ DCHECK(!Traits::DefaultEnumValue() ||
+ *Traits::DefaultEnumValue() != code());
+ }
+
+ Or(OtherType&& value) : value_(std::move(value)) {}
+ Or(const OtherType& value) : value_(value) {}
+ Or(typename T::Codes code,
+ const base::Location& location = base::Location::Current())
+ : error_(TypedStatus<T>(code, "", location)) {
+ DCHECK(!Traits::DefaultEnumValue() ||
+ *Traits::DefaultEnumValue() != code);
+ }
+
+ // Move- and copy- construction and assignment are okay.
+ Or(const Or&) = default;
+ Or(Or&&) = default;
+ Or& operator=(Or&) = default;
+ Or& operator=(Or&&) = default;
+
+ bool has_value() const { return value_.has_value(); }
+ bool has_error() const { return error_.has_value(); }
+
+ inline bool operator==(typename T::Codes code) const {
+ return code == this->code();
+ }
+
+ inline bool operator!=(typename T::Codes code) const {
+ return code != this->code();
+ }
+
+ // Return the error, if we have one.
+ // Callers should ensure that this |has_error()|.
+ TypedStatus<T> error() && {
+ CHECK(error_);
+ auto error = std::move(*error_);
+ error_.reset();
+ return error;
+ }
+
+ // Return the value, if we have one.
+ // Callers should ensure that this |has_value()|.
+ OtherType value() && {
+ CHECK(value_);
+ auto value = std::move(std::get<0>(*value_));
+ value_.reset();
+ return value;
+ }
+
+ typename T::Codes code() const {
+ DCHECK(error_ || value_);
+ // It is invalid to call |code()| on an |Or| with a value that
+ // is specialized in a TypedStatus with no DefaultEnumValue.
+ DCHECK(error_ || Traits::DefaultEnumValue());
+ return error_ ? error_->code() : *Traits::DefaultEnumValue();
+ }
+
+ private:
+ absl::optional<TypedStatus<T>> error_;
+
+ // We wrap |OtherType| in a container so that windows COM wrappers work.
+ // They override operator& and similar, and won't compile in a
+ // absl::optional.
+ absl::optional<std::tuple<OtherType>> value_;
};
- // Allow self-serialization
- friend struct internal::MediaSerializer<Status>;
-
- // Allow mojo-serialization
- friend struct mojo::StructTraits<media::mojom::StatusDataView, Status>;
+ private:
+ std::unique_ptr<internal::StatusData> data_;
- // A null internals is an implicit OK.
- std::unique_ptr<StatusInternal> data_;
-};
+ template <typename StatusEnum, typename DataView>
+ friend struct mojo::StructTraits;
-// Convenience function to return |kOk|.
-// OK won't have a message, trace, or data associated with them, and DCHECK
-// if they are added.
-MEDIA_EXPORT Status OkStatus();
+ // Allow media-serialization
+ friend struct internal::MediaSerializer<TypedStatus<T>>;
-// TODO(liberato): Add more helper functions for common error returns.
-
-// Helper class to allow returning a `T` or a Status.
-//
-// It is not okay to send a StatusOr with a status code of `kOk`. `kOk` is
-// reserved for cases where there is a `T` rather than a Status.
-//
-// Typical usage:
-//
-// StatusOr<std::unique_ptr<MyObject>> FactoryFn() {
-// if (success)
-// return std::make_unique<MyObject>();
-// return Status(StatusCodes::kSomethingBadHappened);
-// }
-//
-// auto result = FactoryFn();
-// if (result.has_error()) return std::move(result).error();
-// my_object_ = std::move(result).value();
-//
-// Can also be combined into a single switch using `code()`:
-//
-// switch (result.code()) {
-// case StatusCode::kOk:
-// // `kOk` is special; it means the StatusOr has a `T`.
-// // Do something with result.value()
-// break;
-// // Maybe switch on specific non-kOk codes for special processing.
-// default: // Send unknown errors upwards.
-// return std::move(result).error();
-// }
-//
-// Also useful if one would like to get an enum class return value, unless an
-// error occurs:
-//
-// enum class ResultType { kNeedMoreInput, kOutputIsReady, kFormatChanged };
-//
-// StatusOr<ResultType> Foo() { ... }
-//
-// auto result = Foo();
-// if (result.has_error()) return std::move(result).error();
-// switch (std::move(result).value()) {
-// case ResultType::kNeedMoreInput:
-// ...
-// }
-template <typename T>
-class StatusOr {
- public:
- // All of these may be implicit, so that one may just return Status or
- // the value in question.
- /* not explicit */ StatusOr(Status&& error) : error_(std::move(error)) {
- DCHECK_NE(code(), StatusCode::kOk);
- }
- /* not explicit */ StatusOr(const Status& error) : error_(error) {
- DCHECK_NE(code(), StatusCode::kOk);
- }
- StatusOr(StatusCode code,
- const base::Location& location = base::Location::Current())
- : error_(Status(code, "", location)) {
- DCHECK_NE(code, StatusCode::kOk);
+ void SetInternalData(std::unique_ptr<internal::StatusData> data) {
+ data_ = std::move(data);
}
+};
- StatusOr(T&& value) : value_(std::move(value)) {}
- StatusOr(const T& value) : value_(value) {}
-
- ~StatusOr() = default;
-
- // Move- and copy- construction and assignment are okay.
- StatusOr(const StatusOr&) = default;
- StatusOr(StatusOr&&) = default;
- StatusOr& operator=(StatusOr&) = default;
- StatusOr& operator=(StatusOr&&) = default;
-
- // Do we have a value?
- bool has_value() const { return value_.has_value(); }
-
- // Do we have an error?
- bool has_error() const { return error_.has_value(); }
-
- // Return the error, if we have one. Up to the caller to make sure that we
- // have one via |has_error()|.
- // NOTE: once this is called, the StatusOr is defunct and should not be used.
- Status error() && {
- CHECK(error_);
- auto error = std::move(*error_);
- error_.reset();
- return error;
- }
+template <typename T>
+inline bool operator==(typename T::Codes code, const TypedStatus<T>& status) {
+ return status == code;
+}
- // Return the value. It's up to the caller to verify that we have a value
- // before calling this. Also, this only works once, after which we will have
- // an error. Use like this: std::move(status_or).value();
- // NOTE: once this is called, the StatusOr is defunct and should not be used.
- T value() && {
- CHECK(value_);
- auto value = std::move(std::get<0>(*value_));
- value_.reset();
- return value;
- }
+template <typename T>
+inline bool operator!=(typename T::Codes code, const TypedStatus<T>& status) {
+ return status != code;
+}
- // Returns the error code we have, if any, or `kOk`.
- StatusCode code() const {
- CHECK(error_ || value_);
- return error_ ? error_->code() : StatusCode::kOk;
+// Define TypedStatus<StatusCode> as Status in the media namespace for
+// backwards compatibility. Also define StatusOr as Status::Or for the
+// same reason.
+struct GeneralStatusTraits {
+ using Codes = StatusCode;
+ static constexpr StatusGroupType Group() { return "GeneralStatusCode"; }
+ static constexpr absl::optional<StatusCode> DefaultEnumValue() {
+ return StatusCode::kOk;
}
-
- private:
- // Optional error.
- absl::optional<Status> error_;
- // We wrap |T| in a container so that windows COM wrappers work. They
- // override operator& and similar, and won't compile in a absl::optional.
- absl::optional<std::tuple<T>> value_;
};
+using Status = TypedStatus<GeneralStatusTraits>;
+template <typename T>
+using StatusOr = Status::Or<T>;
+
+// Convenience function to return |kOk|.
+// OK won't have a message, trace, or data associated with them, and DCHECK
+// if they are added.
+MEDIA_EXPORT Status OkStatus();
} // namespace media
diff --git a/chromium/media/base/status.md b/chromium/media/base/status.md
new file mode 100644
index 00000000000..86babf35b0a
--- /dev/null
+++ b/chromium/media/base/status.md
@@ -0,0 +1,178 @@
+# TypedStatus<T>
+
+The purpose of TypedStatus is to provide a thin wrapper around return-value
+enums that support causality tracking, data attachment, and general assistance
+with debugging, without adding slowdowns due to returning large structs,
+pointers, or more complicated types.
+
+TypedStatus<T> should be specialized with a traits struct that defines:
+
+ Codes - enum (usually enum class) that would be the return type, if we weren't
+ using TypedStatus.
+ static constexpr StatusGroupType Group() { return "NameOfStatus"; }
+ static constexpr absl::optional<Codes> DefaultEnumValue() {
+ return Codes::kCodeThatShouldBeSuperOptimizedEGSuccess;
+ // Can return nullopt to optimize none of them. No idea why you'd do that.
+ }
+
+Typically one would:
+
+ struct MyStatusTraits { ... };
+ using MyStatus = TypedStatus<MyStatusTraits>;
+
+## Using an existing `TypedStatus<T>`
+
+The current canonical TypedStatus is called `Status` for historical reasons,
+though that will soon change.
+
+All TypedStatus specializations have the following common API:
+
+```c++
+// The underlying code value.
+T::Codes code() const;
+
+// The underlying message.
+std::string& message() const;
+
+// Adds the current file & line number to the trace.
+TypedStatus<T>&& AddHere() &&;
+
+// Adds some named data to the status, such as a platform
+// specific error value, ie: HRESULT. This data is for human consumption only
+// in a developer setting, and can't be extracted from the TypedStatus
+// normally. The code value should be sufficiently informative between sender
+// and reciever of the TypedStatus.
+template<typename D>
+TypedStatus<T>&& WithData(const char *key, const D& value) &&;
+template<typename D>
+void WithData(const char *key, const D& value) &;
+
+// Adds a "causal" status to this one.
+// The type `R` will not be retained, and similarly with the data methods,
+// `cause` will only be used for human consumption, and cannot be extracted
+// under normal circumstances.
+template<typename R>
+TypedStatus<T>&& AddCause(TypedStatus<R>&& cause) &&;
+template<typename R>
+void AddCause(TypedStatus<R>&& cause) &;
+```
+
+
+## Quick usage guide
+
+If you have an existing enum, and would like to wrap it:
+```c++
+enum class MyExampleEnum : StatusCodeType {
+ kDefaultValue = 1,
+ kThisIsAnExample = 2,
+ kDontArgueInTheCommentSection = 3,
+};
+```
+
+Define an |TypedStatusTraits|, picking a name for the group of codes:
+(copying the desciptive comments is not suggested)
+
+```c++
+struct MyExampleStatusTraits {
+ // If you do not have an existing enum, you can `enum class Codes { ... };`
+ // here, instead of `using`.
+ using Codes = MyExampleEnum;
+ static constexpr StatusGroupType Group() { return "MyExampleStatus"; }
+ static constexpr absl::optional<Codes> { return Codes::kDefaultValue; }
+}
+```
+
+Bind your typename:
+```c++
+using MyExampleStatus = media::TypedStatus<MyExampleStatusTraits>;
+```
+
+Use your new type:
+```c++
+MyExampleStatus Foo() {
+ return MyExampleStatus::Codes::kThisIsAnExample;
+}
+
+int main() {
+ auto result = Foo();
+ switch(result.code()) {
+ case MyExampleStatus::Codes::...:
+ break;
+ ...
+ }
+}
+```
+
+For the common case where you'd like to return some constructed thing OR
+an error type, we've also created `TypedStatus<T>::Or<D>`.
+
+The `TypedStatus<T>::Or<D>` type can be constructed implicitly with either
+a `TypedStatus<T>`, a `T`, or a `D`.
+
+This type has methods:
+```c++
+bool has_value() const;
+bool has_error() const;
+
+// Return the error, if we have one.
+// Callers should ensure that this `has_error()`.
+TypedStatus<T> error() &&;
+
+// Return the value, if we have one.
+// Callers should ensure that this `has_value()`.
+OtherType value() &&;
+
+// It is invalid to call `code()` on an `Or<D>` type when
+// has_value() is true and TypedStatusTraits<T>::DefaultEnumValue is nullopt.
+T::Codes code();
+```
+
+Example usage:
+```c++
+MyExampleStatus::Or<std::unique_ptr<VideoDecoder>> CreateAndInitializeDecoder() {
+ std::unique_ptr<VideoDecoder> decoder = decoder_factory_->GiveMeYourBestDecoder();
+ auto init_status = decoder->Initialize(init_args_);
+ // If the decoder initialized successfully, then just return it.
+ if (init_status == InitStatusCodes::kOk)
+ return std::move(decoder);
+ // Otherwise, return a MediaExampleStatus caused by the init status.
+ return MyExampleStatus(MyExampleEnum::kDontArgueInTheCommentSection).AddCause(
+ std::move(init_status));
+}
+
+int main() {
+ auto result = CreateAndInitializeDecoder();
+ if (result.has_value())
+ decoder_loop_->SetDecoder(std::move(result).value());
+ else
+ logger_->SendError(std::move(result).error());
+}
+
+```
+
+
+## Additional setup for mojo
+
+If you want to send a specialization of TypedStatus over mojo,
+add the following to media_types.mojom:
+
+```
+struct MyExampleEnum {
+ StatusBase? internal;
+};
+```
+
+And add the following to media/mojo/mojom/BUILD.gn near the `StatusData` type
+binding.
+
+```
+{
+ mojom = "media.mojom.MyExampleEnum",
+ cpp = "::media::MyExampleEnum"
+},
+```
+
+
+
+## Design decisions
+See go/typedstatus for design decisions.
diff --git a/chromium/media/base/status_codes.h b/chromium/media/base/status_codes.h
index b14beb9abdb..c2d60df383e 100644
--- a/chromium/media/base/status_codes.h
+++ b/chromium/media/base/status_codes.h
@@ -13,181 +13,184 @@
namespace media {
-using StatusCodeType = int32_t;
+using StatusCodeType = uint16_t;
// TODO(tmathmeyer, liberato, xhwang) These numbers are not yet finalized:
// DO NOT use them for reporting statistics, and DO NOT report them to any
// user-facing feature, including media log.
// Codes are grouped with a bitmask:
-// 0xFFFFFFFF
-// └─┬┘├┘└┴ enumeration within the group
-// │ └─ group code
-// └─ reserved for now
+// 0xFFFF
+// ├┘└┴ enumeration within the group
+// └─ group code
// 256 groups is more than anyone will ever need on a computer.
enum class StatusCode : StatusCodeType {
kOk = 0,
// General errors: 0x00
- kAborted = 0x00000001,
- kInvalidArgument = 0x00000002,
- kKeyFrameRequired = 0x00000003,
+ kAborted = 0x0001,
+ kInvalidArgument = 0x0002,
+ kKeyFrameRequired = 0x0003,
// Decoder Errors: 0x01
- kDecoderInitializeNeverCompleted = 0x00000101,
- kDecoderFailedDecode = 0x00000102,
- kDecoderUnsupportedProfile = 0x00000103,
- kDecoderUnsupportedCodec = 0x00000104,
- kDecoderUnsupportedConfig = 0x00000105,
- kEncryptedContentUnsupported = 0x00000106,
- kClearContentUnsupported = 0x00000107,
- kDecoderMissingCdmForEncryptedContent = 0x00000108,
- kDecoderInitializationFailed = 0x00000109, // Prefer this one.
+ kDecoderInitializeNeverCompleted = 0x0101,
+ kDecoderFailedDecode = 0x0102,
+ kDecoderUnsupportedProfile = 0x0103,
+ kDecoderUnsupportedCodec = 0x0104,
+ kDecoderUnsupportedConfig = 0x0105,
+ kEncryptedContentUnsupported = 0x0106,
+ kClearContentUnsupported = 0x0107,
+ kDecoderMissingCdmForEncryptedContent = 0x0108,
+ kDecoderInitializationFailed = 0x0109, // Prefer this one.
kDecoderFailedInitialization = kDecoderInitializationFailed, // Do not use.
- kDecoderCantChangeCodec = 0x0000010A,
- kDecoderCreationFailed = 0x0000010B, // Prefer this one.
+ kDecoderCantChangeCodec = 0x010A,
+ kDecoderCreationFailed = 0x010B, // Prefer this one.
kDecoderFailedCreation = kDecoderCreationFailed, // Do not use.
- kInitializationUnspecifiedFailure = 0x0000010C,
- kDecoderVideoFrameConstructionFailed = 0x0000010D,
- kMakeContextCurrentFailed = 0x0000010E,
+ kInitializationUnspecifiedFailure = 0x010C,
+ kDecoderVideoFrameConstructionFailed = 0x010D,
+ kMakeContextCurrentFailed = 0x010E,
// This is a temporary error for use only by existing code during the
// DecodeStatus => Status conversion.
- kDecodeErrorDoNotUse = 0x0000010F,
+ kDecodeErrorDoNotUse = 0x010F,
// Windows Errors: 0x02
- kWindowsWrappedHresult = 0x00000201,
- kWindowsApiNotAvailible = 0x00000202,
- kWindowsD3D11Error = 0x00000203,
+ kWindowsWrappedHresult = 0x0201,
+ kWindowsApiNotAvailible = 0x0202,
+ kWindowsD3D11Error = 0x0203,
// D3D11VideoDecoder Errors: 0x03
- kPostTextureFailed = 0x00000301,
- kPostAcquireStreamFailed = 0x00000302,
- kCreateEglStreamFailed = 0x00000303,
- kCreateEglStreamConsumerFailed = 0x00000304,
- kCreateEglStreamProducerFailed = 0x00000305,
- kCreateTextureSelectorFailed = 0x00000306,
- kQueryID3D11MultithreadFailed = 0x00000307,
- kGetDecoderConfigCountFailed = 0x00000308,
- kGetDecoderConfigFailed = 0x00000309,
- kProcessTextureFailed = 0x0000030A,
- kUnsupportedTextureFormatForBind = 0x0000030B,
- kCreateDecoderOutputViewFailed = 0x0000030C,
- kAllocateTextureForCopyingWrapperFailed = 0x0000030D,
- kCreateDecoderOutputTextureFailed = 0x0000030E,
- kCreateVideoProcessorInputViewFailed = 0x0000030F,
- kVideoProcessorBltFailed = 0x00000310,
- kCreateVideoProcessorOutputViewFailed = 0x00000311,
- kCreateVideoProcessorEnumeratorFailed = 0x00000312,
- kCreateVideoProcessorFailed = 0x00000313,
- kQueryVideoContextFailed = 0x00000314,
- kAcceleratorFlushFailed = 0x00000315,
- kTryAgainNotSupported = 0x00000316,
- kCryptoConfigFailed = 0x00000317,
- kDecoderBeginFrameFailed = 0x00000318,
- kReleaseDecoderBufferFailed = 0x00000319,
- kGetPicParamBufferFailed = 0x00000320,
- kReleasePicParamBufferFailed = 0x00000321,
- kGetBitstreamBufferFailed = 0x00000322,
- kReleaseBitstreamBufferFailed = 0x00000323,
- kGetSliceControlBufferFailed = 0x00000324,
- kReleaseSliceControlBufferFailed = 0x00000325,
- kDecoderEndFrameFailed = 0x00000326,
- kSubmitDecoderBuffersFailed = 0x00000327,
- kGetQuantBufferFailed = 0x00000328,
- kReleaseQuantBufferFailed = 0x00000329,
- kBitstreamBufferSliceTooBig = 0x00000330,
- kCreateSharedImageFailed = 0x00000331,
+ kPostTextureFailed = 0x0301,
+ kPostAcquireStreamFailed = 0x0302,
+ kCreateEglStreamFailed = 0x0303,
+ kCreateEglStreamConsumerFailed = 0x0304,
+ kCreateEglStreamProducerFailed = 0x0305,
+ kCreateTextureSelectorFailed = 0x0306,
+ kQueryID3D11MultithreadFailed = 0x0307,
+ kGetDecoderConfigCountFailed = 0x0308,
+ kGetDecoderConfigFailed = 0x0309,
+ kProcessTextureFailed = 0x030A,
+ kUnsupportedTextureFormatForBind = 0x030B,
+ kCreateDecoderOutputViewFailed = 0x030C,
+ kAllocateTextureForCopyingWrapperFailed = 0x030D,
+ kCreateDecoderOutputTextureFailed = 0x030E,
+ kCreateVideoProcessorInputViewFailed = 0x030F,
+ kVideoProcessorBltFailed = 0x0310,
+ kCreateVideoProcessorOutputViewFailed = 0x0311,
+ kCreateVideoProcessorEnumeratorFailed = 0x0312,
+ kCreateVideoProcessorFailed = 0x0313,
+ kQueryVideoContextFailed = 0x0314,
+ kAcceleratorFlushFailed = 0x0315,
+ kTryAgainNotSupported = 0x0316,
+ kCryptoConfigFailed = 0x0317,
+ kDecoderBeginFrameFailed = 0x0318,
+ kReleaseDecoderBufferFailed = 0x0319,
+ kGetPicParamBufferFailed = 0x0320,
+ kReleasePicParamBufferFailed = 0x0321,
+ kGetBitstreamBufferFailed = 0x0322,
+ kReleaseBitstreamBufferFailed = 0x0323,
+ kGetSliceControlBufferFailed = 0x0324,
+ kReleaseSliceControlBufferFailed = 0x0325,
+ kDecoderEndFrameFailed = 0x0326,
+ kSubmitDecoderBuffersFailed = 0x0327,
+ kGetQuantBufferFailed = 0x0328,
+ kReleaseQuantBufferFailed = 0x0329,
+ kBitstreamBufferSliceTooBig = 0x0330,
+ kCreateSharedImageFailed = 0x0331,
+ kGetKeyedMutexFailed = 0x0332,
+ kAcquireKeyedMutexFailed = 0x0333,
+ kReleaseKeyedMutexFailed = 0x0334,
+ kCreateSharedHandleFailed = 0x0335,
// MojoDecoder Errors: 0x04
- kMojoDecoderNoWrappedDecoder = 0x00000401,
- kMojoDecoderStoppedBeforeInitDone = 0x00000402,
- kMojoDecoderUnsupported = 0x00000403,
- kMojoDecoderNoConnection = 0x00000404,
- kMojoDecoderDeletedWithoutInitialization = 0x00000405,
+ kMojoDecoderNoWrappedDecoder = 0x0401,
+ kMojoDecoderStoppedBeforeInitDone = 0x0402,
+ kMojoDecoderUnsupported = 0x0403,
+ kMojoDecoderNoConnection = 0x0404,
+ kMojoDecoderDeletedWithoutInitialization = 0x0405,
// Chromeos Errors: 0x05
- kChromeOSVideoDecoderNoDecoders = 0x00000501,
- kV4l2NoDevice = 0x00000502,
- kV4l2FailedToStopStreamQueue = 0x00000503,
- kV4l2NoDecoder = 0x00000504,
- kV4l2FailedFileCapabilitiesCheck = 0x00000505,
- kV4l2FailedResourceAllocation = 0x00000506,
- kV4l2BadFormat = 0x00000507,
- kV4L2FailedToStartStreamQueue = 0x00000508,
- kVaapiReinitializedDuringDecode = 0x00000509,
- kVaapiFailedAcceleratorCreation = 0x00000510,
+ kChromeOSVideoDecoderNoDecoders = 0x0501,
+ kV4l2NoDevice = 0x0502,
+ kV4l2FailedToStopStreamQueue = 0x0503,
+ kV4l2NoDecoder = 0x0504,
+ kV4l2FailedFileCapabilitiesCheck = 0x0505,
+ kV4l2FailedResourceAllocation = 0x0506,
+ kV4l2BadFormat = 0x0507,
+ kV4L2FailedToStartStreamQueue = 0x0508,
+ kVaapiReinitializedDuringDecode = 0x0509,
+ kVaapiFailedAcceleratorCreation = 0x0510,
// Encoder Error: 0x06
- kEncoderInitializeNeverCompleted = 0x00000601,
- kEncoderInitializeTwice = 0x00000602,
- kEncoderFailedEncode = 0x00000603,
- kEncoderUnsupportedProfile = 0x00000604,
- kEncoderUnsupportedCodec = 0x00000605,
- kEncoderUnsupportedConfig = 0x00000606,
- kEncoderInitializationError = 0x00000607,
- kEncoderFailedFlush = 0x00000608,
+ kEncoderInitializeNeverCompleted = 0x0601,
+ kEncoderInitializeTwice = 0x0602,
+ kEncoderFailedEncode = 0x0603,
+ kEncoderUnsupportedProfile = 0x0604,
+ kEncoderUnsupportedCodec = 0x0605,
+ kEncoderUnsupportedConfig = 0x0606,
+ kEncoderInitializationError = 0x0607,
+ kEncoderFailedFlush = 0x0608,
// VaapiVideoDecoder: 0x07
- kVaapiBadContext = 0x00000701,
- kVaapiNoBuffer = 0x00000702,
- kVaapiNoBufferHandle = 0x00000703,
- kVaapiNoPixmap = 0x00000704,
- kVaapiNoImage = 0x00000705,
- kVaapiNoSurface = 0x00000706,
- kVaapiFailedToInitializeImage = 0x00000707,
- kVaapiFailedToBindTexture = 0x00000708,
- kVaapiFailedToBindImage = 0x00000709,
- kVaapiUnsupportedFormat = 0x0000070A,
- kVaapiFailedToExportImage = 0x0000070B,
- kVaapiBadImageSize = 0x0000070C,
- kVaapiNoTexture = 0x0000070D,
+ kVaapiBadContext = 0x0701,
+ kVaapiNoBuffer = 0x0702,
+ kVaapiNoBufferHandle = 0x0703,
+ kVaapiNoPixmap = 0x0704,
+ kVaapiNoImage = 0x0705,
+ kVaapiNoSurface = 0x0706,
+ kVaapiFailedToInitializeImage = 0x0707,
+ kVaapiFailedToBindTexture = 0x0708,
+ kVaapiFailedToBindImage = 0x0709,
+ kVaapiUnsupportedFormat = 0x070A,
+ kVaapiFailedToExportImage = 0x070B,
+ kVaapiBadImageSize = 0x070C,
+ kVaapiNoTexture = 0x070D,
// Format Errors: 0x08
- kH264ParsingError = 0x00000801,
- kH264BufferTooSmall = 0x00000802,
+ kH264ParsingError = 0x0801,
+ kH264BufferTooSmall = 0x0802,
// Pipeline Errors: 0x09
- // Deprecated: kPipelineErrorUrlNotFound = 0x00000901,
- kPipelineErrorNetwork = 0x00000902,
- kPipelineErrorDecode = 0x00000903,
- // Deprecated: kPipelineErrorDecrypt = 0x00000904,
- kPipelineErrorAbort = 0x00000905,
- kPipelineErrorInitializationFailed = 0x00000906,
- // Unused: 0x00000907
- kPipelineErrorCouldNotRender = 0x00000908,
- kPipelineErrorRead = 0x00000909,
- // Deprecated: kPipelineErrorOperationPending = 0x0000090a,
- kPipelineErrorInvalidState = 0x0000090b,
+ // Deprecated: kPipelineErrorUrlNotFound = 0x0901,
+ kPipelineErrorNetwork = 0x0902,
+ kPipelineErrorDecode = 0x0903,
+ // Deprecated: kPipelineErrorDecrypt = 0x0904,
+ kPipelineErrorAbort = 0x0905,
+ kPipelineErrorInitializationFailed = 0x0906,
+ // Unused: 0x0907
+ kPipelineErrorCouldNotRender = 0x0908,
+ kPipelineErrorRead = 0x0909,
+ // Deprecated: kPipelineErrorOperationPending = 0x090a,
+ kPipelineErrorInvalidState = 0x090b,
// Demuxer related errors.
- kPipelineErrorDemuxerErrorCouldNotOpen = 0x0000090c,
- kPipelineErrorDemuxerErrorCouldNotParse = 0x0000090d,
- kPipelineErrorDemuxerErrorNoSupportedStreams = 0x0000090e,
+ kPipelineErrorDemuxerErrorCouldNotOpen = 0x090c,
+ kPipelineErrorDemuxerErrorCouldNotParse = 0x090d,
+ kPipelineErrorDemuxerErrorNoSupportedStreams = 0x090e,
// Decoder related errors.
- kPipelineErrorDecoderErrorNotSupported = 0x0000090f,
+ kPipelineErrorDecoderErrorNotSupported = 0x090f,
// ChunkDemuxer related errors.
- kPipelineErrorChuckDemuxerErrorAppendFailed = 0x00000910,
- kPipelineErrorChunkDemuxerErrorEosStatusDecodeError = 0x00000911,
- kPipelineErrorChunkDemuxerErrorEosStatusNetworkError = 0x00000912,
+ kPipelineErrorChuckDemuxerErrorAppendFailed = 0x0910,
+ kPipelineErrorChunkDemuxerErrorEosStatusDecodeError = 0x0911,
+ kPipelineErrorChunkDemuxerErrorEosStatusNetworkError = 0x0912,
// Audio rendering errors.
- kPipelineErrorAudioRendererError = 0x00000913,
- // Deprecated: kPipelineErrorAudioRendererErrorSpliceFailed = 0x00000914,
- kPipelineErrorExternalRendererFailed = 0x00000915,
+ kPipelineErrorAudioRendererError = 0x0913,
+ // Deprecated: kPipelineErrorAudioRendererErrorSpliceFailed = 0x0914,
+ kPipelineErrorExternalRendererFailed = 0x0915,
// Android only. Used as a signal to fallback MediaPlayerRenderer, and thus
// not exactly an 'error' per say.
- kPipelineErrorDemuxerErrorDetectedHLS = 0x00000916,
+ kPipelineErrorDemuxerErrorDetectedHLS = 0x0916,
// Used when hardware context is reset (e.g. OS sleep/resume), where we should
// recreate the Renderer instead of fail the playback. See
// https://crbug.com/1208618
- kPipelineErrorHardwareContextReset = 0x00000917,
+ kPipelineErrorHardwareContextReset = 0x0917,
// Frame operation errors: 0x0A
- kUnsupportedFrameFormatError = 0x00000A01,
+ kUnsupportedFrameFormatError = 0x0A01,
// DecoderStream errors: 0x0B
- kDecoderStreamInErrorState = 0x00000B00,
- kDecoderStreamReinitFailed = 0x00000B01,
+ kDecoderStreamInErrorState = 0x0B00,
+ kDecoderStreamReinitFailed = 0x0B01,
// This is a temporary error for use while the demuxer doesn't return a
// proper status.
- kDecoderStreamDemuxerError = 0x00000B02,
+ kDecoderStreamDemuxerError = 0x0B02,
// DecodeStatus temporary codes. These names were chosen to match the
// DecodeStatus enum, so that un-converted code can DecodeStatus::OK/etc.
@@ -203,7 +206,7 @@ enum class StatusCode : StatusCodeType {
DECODE_ERROR = kDecodeErrorDoNotUse,
// Special codes
- kGenericErrorPleaseRemove = 0x79999999,
+ kGenericErrorPleaseRemove = 0x7999,
kCodeOnlyForTesting = std::numeric_limits<StatusCodeType>::max(),
kMaxValue = kCodeOnlyForTesting,
};
diff --git a/chromium/media/base/status_unittest.cc b/chromium/media/base/status_unittest.cc
index 2da38b2ca90..3df6fec9cbd 100644
--- a/chromium/media/base/status_unittest.cc
+++ b/chromium/media/base/status_unittest.cc
@@ -82,10 +82,8 @@ TEST_F(StatusTest, StaticOKMethodGivesCorrectSerialization) {
TEST_F(StatusTest, SingleLayerError) {
Status failed = FailEasily();
base::Value actual = MediaSerialize(failed);
- ASSERT_EQ(actual.DictSize(), 5ul);
- ASSERT_EQ(actual.FindIntPath("status_code"),
- static_cast<int32_t>(StatusCode::kCodeOnlyForTesting));
- ASSERT_EQ(*actual.FindStringPath("status_message"), "Message");
+ ASSERT_EQ(actual.DictSize(), 6ul);
+ ASSERT_EQ(*actual.FindStringPath("message"), "Message");
ASSERT_EQ(actual.FindListPath("stack")->GetList().size(), 1ul);
ASSERT_EQ(actual.FindListPath("causes")->GetList().size(), 0ul);
ASSERT_EQ(actual.FindDictPath("data")->DictSize(), 0ul);
@@ -102,10 +100,8 @@ TEST_F(StatusTest, SingleLayerError) {
TEST_F(StatusTest, MultipleErrorLayer) {
Status failed = FailRecursively(3);
base::Value actual = MediaSerialize(failed);
- ASSERT_EQ(actual.DictSize(), 5ul);
- ASSERT_EQ(actual.FindIntPath("status_code").value_or(-1),
- static_cast<int32_t>(StatusCode::kCodeOnlyForTesting));
- ASSERT_EQ(*actual.FindStringPath("status_message"), "Message");
+ ASSERT_EQ(actual.DictSize(), 6ul);
+ ASSERT_EQ(*actual.FindStringPath("message"), "Message");
ASSERT_EQ(actual.FindListPath("stack")->GetList().size(), 4ul);
ASSERT_EQ(actual.FindListPath("causes")->GetList().size(), 0ul);
ASSERT_EQ(actual.FindDictPath("data")->DictSize(), 0ul);
@@ -117,10 +113,8 @@ TEST_F(StatusTest, MultipleErrorLayer) {
TEST_F(StatusTest, CanHaveData) {
Status failed = FailWithData("example", "data");
base::Value actual = MediaSerialize(failed);
- ASSERT_EQ(actual.DictSize(), 5ul);
- ASSERT_EQ(actual.FindIntPath("status_code").value_or(-1),
- static_cast<int32_t>(StatusCode::kCodeOnlyForTesting));
- ASSERT_EQ(*actual.FindStringPath("status_message"), "Message");
+ ASSERT_EQ(actual.DictSize(), 6ul);
+ ASSERT_EQ(*actual.FindStringPath("message"), "Message");
ASSERT_EQ(actual.FindListPath("stack")->GetList().size(), 1ul);
ASSERT_EQ(actual.FindListPath("causes")->GetList().size(), 0ul);
ASSERT_EQ(actual.FindDictPath("data")->DictSize(), 1ul);
@@ -134,10 +128,8 @@ TEST_F(StatusTest, CanHaveData) {
TEST_F(StatusTest, CanUseCustomSerializer) {
Status failed = FailWithData("example", UselessThingToBeSerialized("F"));
base::Value actual = MediaSerialize(failed);
- ASSERT_EQ(actual.DictSize(), 5ul);
- ASSERT_EQ(actual.FindIntPath("status_code"),
- static_cast<int32_t>(StatusCode::kCodeOnlyForTesting));
- ASSERT_EQ(*actual.FindStringPath("status_message"), "Message");
+ ASSERT_EQ(actual.DictSize(), 6ul);
+ ASSERT_EQ(*actual.FindStringPath("message"), "Message");
ASSERT_EQ(actual.FindListPath("stack")->GetList().size(), 1ul);
ASSERT_EQ(actual.FindListPath("causes")->GetList().size(), 0ul);
ASSERT_EQ(actual.FindDictPath("data")->DictSize(), 1ul);
@@ -151,42 +143,57 @@ TEST_F(StatusTest, CanUseCustomSerializer) {
TEST_F(StatusTest, CausedByHasVector) {
Status causal = FailWithCause();
base::Value actual = MediaSerialize(causal);
- ASSERT_EQ(actual.DictSize(), 5ul);
- ASSERT_EQ(actual.FindIntPath("status_code").value_or(-1),
- static_cast<int32_t>(StatusCode::kCodeOnlyForTesting));
- ASSERT_EQ(*actual.FindStringPath("status_message"), "Message");
+ ASSERT_EQ(actual.DictSize(), 6ul);
+ ASSERT_EQ(*actual.FindStringPath("message"), "Message");
ASSERT_EQ(actual.FindListPath("stack")->GetList().size(), 1ul);
ASSERT_EQ(actual.FindListPath("causes")->GetList().size(), 1ul);
ASSERT_EQ(actual.FindDictPath("data")->DictSize(), 0ul);
base::Value& nested = actual.FindListPath("causes")->GetList()[0];
- ASSERT_EQ(nested.DictSize(), 5ul);
- ASSERT_EQ(nested.FindIntPath("status_code").value_or(-1),
- static_cast<int32_t>(StatusCode::kCodeOnlyForTesting));
- ASSERT_EQ(*nested.FindStringPath("status_message"), "Message");
+ ASSERT_EQ(nested.DictSize(), 6ul);
+ ASSERT_EQ(*nested.FindStringPath("message"), "Message");
ASSERT_EQ(nested.FindListPath("stack")->GetList().size(), 1ul);
ASSERT_EQ(nested.FindListPath("causes")->GetList().size(), 0ul);
ASSERT_EQ(nested.FindDictPath("data")->DictSize(), 0ul);
}
+TEST_F(StatusTest, CausedByCanAssignCopy) {
+ Status causal = FailWithCause();
+ Status copy_causal = causal;
+ base::Value causal_serialized = MediaSerialize(causal);
+ base::Value copy_causal_serialized = MediaSerialize(copy_causal);
+
+ base::Value& original =
+ causal_serialized.FindListPath("causes")->GetList()[0];
+ ASSERT_EQ(original.DictSize(), 6ul);
+ ASSERT_EQ(*original.FindStringPath("message"), "Message");
+ ASSERT_EQ(original.FindListPath("stack")->GetList().size(), 1ul);
+ ASSERT_EQ(original.FindListPath("causes")->GetList().size(), 0ul);
+ ASSERT_EQ(original.FindDictPath("data")->DictSize(), 0ul);
+
+ base::Value& copied =
+ copy_causal_serialized.FindListPath("causes")->GetList()[0];
+ ASSERT_EQ(copied.DictSize(), 6ul);
+ ASSERT_EQ(*copied.FindStringPath("message"), "Message");
+ ASSERT_EQ(copied.FindListPath("stack")->GetList().size(), 1ul);
+ ASSERT_EQ(copied.FindListPath("causes")->GetList().size(), 0ul);
+ ASSERT_EQ(copied.FindDictPath("data")->DictSize(), 0ul);
+}
+
TEST_F(StatusTest, CanCopyEasily) {
Status failed = FailEasily();
Status withData = DoSomethingGiveItBack(failed);
base::Value actual = MediaSerialize(failed);
- ASSERT_EQ(actual.DictSize(), 5ul);
- ASSERT_EQ(actual.FindIntPath("status_code"),
- static_cast<int32_t>(StatusCode::kCodeOnlyForTesting));
- ASSERT_EQ(*actual.FindStringPath("status_message"), "Message");
+ ASSERT_EQ(actual.DictSize(), 6ul);
+ ASSERT_EQ(*actual.FindStringPath("message"), "Message");
ASSERT_EQ(actual.FindListPath("stack")->GetList().size(), 1ul);
ASSERT_EQ(actual.FindListPath("causes")->GetList().size(), 0ul);
ASSERT_EQ(actual.FindDictPath("data")->DictSize(), 0ul);
actual = MediaSerialize(withData);
- ASSERT_EQ(actual.DictSize(), 5ul);
- ASSERT_EQ(actual.FindIntPath("status_code"),
- static_cast<int32_t>(StatusCode::kCodeOnlyForTesting));
- ASSERT_EQ(*actual.FindStringPath("status_message"), "Message");
+ ASSERT_EQ(actual.DictSize(), 6ul);
+ ASSERT_EQ(*actual.FindStringPath("message"), "Message");
ASSERT_EQ(actual.FindListPath("stack")->GetList().size(), 1ul);
ASSERT_EQ(actual.FindListPath("causes")->GetList().size(), 0ul);
ASSERT_EQ(actual.FindDictPath("data")->DictSize(), 1ul);
@@ -243,9 +250,51 @@ TEST_F(StatusTest, StatusOrCodeIsOkWithValue) {
EXPECT_EQ(status_or.code(), StatusCode::kOk);
}
-TEST_F(StatusTest, StatusOrCodeIsNotOkWithoutValue) {
- StatusOr<int> status_or(StatusCode::kCodeOnlyForTesting);
- EXPECT_EQ(status_or.code(), StatusCode::kCodeOnlyForTesting);
+enum class NoDefaultType : StatusCodeType { kFoo = 0, kBar = 1, kBaz = 2 };
+
+struct NoDefaultTypeTraits {
+ using Codes = NoDefaultType;
+ static constexpr StatusGroupType Group() {
+ return "GroupWithNoDefaultTypeForTests";
+ }
+ static constexpr absl::optional<NoDefaultType> DefaultEnumValue() {
+ return absl::nullopt;
+ }
+};
+
+TEST_F(StatusTest, TypedStatusWithNoDefault) {
+ using NDStatus = TypedStatus<NoDefaultTypeTraits>;
+
+ NDStatus foo = NoDefaultType::kFoo;
+ EXPECT_EQ(foo.code(), NoDefaultType::kFoo);
+
+ NDStatus bar = NoDefaultType::kBar;
+ EXPECT_EQ(bar.code(), NoDefaultType::kBar);
+
+ NDStatus::Or<std::string> err = NoDefaultType::kBaz;
+ NDStatus::Or<std::string> ok = std::string("kBaz");
+
+ EXPECT_TRUE(err.has_error());
+ EXPECT_EQ(err.code(), NoDefaultType::kBaz);
+ EXPECT_FALSE(ok.has_error());
+
+ base::Value actual = MediaSerialize(bar);
+ EXPECT_EQ(*actual.FindIntPath("code"), 1);
+}
+
+TEST_F(StatusTest, StatusOrEqOp) {
+ // Test the case of a non-default (non-ok) status
+ StatusOr<std::string> failed = FailEasily();
+ ASSERT_TRUE(failed == StatusCode::kCodeOnlyForTesting);
+ ASSERT_FALSE(failed == StatusCode::kOk);
+ ASSERT_TRUE(failed != StatusCode::kOk);
+ ASSERT_FALSE(failed != StatusCode::kCodeOnlyForTesting);
+
+ StatusOr<std::string> success = std::string("Kirkland > Seattle");
+ ASSERT_TRUE(success != StatusCode::kCodeOnlyForTesting);
+ ASSERT_FALSE(success != StatusCode::kOk);
+ ASSERT_TRUE(success == StatusCode::kOk);
+ ASSERT_FALSE(success == StatusCode::kCodeOnlyForTesting);
}
} // namespace media
diff --git a/chromium/media/base/stream_parser.h b/chromium/media/base/stream_parser.h
index a7de9bc69c2..a07421ae71e 100644
--- a/chromium/media/base/stream_parser.h
+++ b/chromium/media/base/stream_parser.h
@@ -109,6 +109,10 @@ class MEDIA_EXPORT StreamParser {
EncryptedMediaInitDataCB;
StreamParser();
+
+ StreamParser(const StreamParser&) = delete;
+ StreamParser& operator=(const StreamParser&) = delete;
+
virtual ~StreamParser();
// Initializes the parser with necessary callbacks. Must be called before any
@@ -145,9 +149,6 @@ class MEDIA_EXPORT StreamParser {
// implement ProcessChunks().
virtual bool Parse(const uint8_t* buf, int size) = 0;
virtual bool ProcessChunks(std::unique_ptr<BufferQueue> buffer_queue);
-
- private:
- DISALLOW_COPY_AND_ASSIGN(StreamParser);
};
// Appends to |merged_buffers| the provided buffers in decode-timestamp order.
diff --git a/chromium/media/base/stream_parser_buffer.h b/chromium/media/base/stream_parser_buffer.h
index 55b2fbeb139..0999d8e32b0 100644
--- a/chromium/media/base/stream_parser_buffer.h
+++ b/chromium/media/base/stream_parser_buffer.h
@@ -66,15 +66,15 @@ class DecodeTimestamp {
int64_t IntDiv(base::TimeDelta rhs) const { return ts_.IntDiv(rhs); }
static DecodeTimestamp FromSecondsD(double seconds) {
- return DecodeTimestamp(base::TimeDelta::FromSecondsD(seconds));
+ return DecodeTimestamp(base::Seconds(seconds));
}
static DecodeTimestamp FromMilliseconds(int64_t milliseconds) {
- return DecodeTimestamp(base::TimeDelta::FromMilliseconds(milliseconds));
+ return DecodeTimestamp(base::Milliseconds(milliseconds));
}
static DecodeTimestamp FromMicroseconds(int64_t microseconds) {
- return DecodeTimestamp(base::TimeDelta::FromMicroseconds(microseconds));
+ return DecodeTimestamp(base::Microseconds(microseconds));
}
// This method is used to explicitly call out when presentation timestamps
diff --git a/chromium/media/base/supported_types.cc b/chromium/media/base/supported_types.cc
index 07564623070..9c4e2eac5ea 100644
--- a/chromium/media/base/supported_types.cc
+++ b/chromium/media/base/supported_types.cc
@@ -206,26 +206,26 @@ bool IsVp9ProfileSupported(VideoCodecProfile profile) {
bool IsAudioCodecProprietary(AudioCodec codec) {
switch (codec) {
- case kCodecAAC:
- case kCodecAC3:
- case kCodecEAC3:
- case kCodecAMR_NB:
- case kCodecAMR_WB:
- case kCodecGSM_MS:
- case kCodecALAC:
- case kCodecMpegHAudio:
+ case AudioCodec::kAAC:
+ case AudioCodec::kAC3:
+ case AudioCodec::kEAC3:
+ case AudioCodec::kAMR_NB:
+ case AudioCodec::kAMR_WB:
+ case AudioCodec::kGSM_MS:
+ case AudioCodec::kALAC:
+ case AudioCodec::kMpegHAudio:
return true;
- case kCodecFLAC:
- case kCodecMP3:
- case kCodecOpus:
- case kCodecVorbis:
- case kCodecPCM:
- case kCodecPCM_MULAW:
- case kCodecPCM_S16BE:
- case kCodecPCM_S24BE:
- case kCodecPCM_ALAW:
- case kUnknownAudioCodec:
+ case AudioCodec::kFLAC:
+ case AudioCodec::kMP3:
+ case AudioCodec::kOpus:
+ case AudioCodec::kVorbis:
+ case AudioCodec::kPCM:
+ case AudioCodec::kPCM_MULAW:
+ case AudioCodec::kPCM_S16BE:
+ case AudioCodec::kPCM_S24BE:
+ case AudioCodec::kPCM_ALAW:
+ case AudioCodec::kUnknown:
return false;
}
@@ -243,7 +243,7 @@ bool IsDefaultSupportedAudioType(const AudioType& type) {
#endif
switch (type.codec) {
- case kCodecAAC:
+ case AudioCodec::kAAC:
if (type.profile != AudioCodecProfile::kXHE_AAC)
return true;
#if defined(OS_ANDROID)
@@ -253,31 +253,31 @@ bool IsDefaultSupportedAudioType(const AudioType& type) {
return false;
#endif
- case kCodecFLAC:
- case kCodecMP3:
- case kCodecOpus:
- case kCodecPCM:
- case kCodecPCM_MULAW:
- case kCodecPCM_S16BE:
- case kCodecPCM_S24BE:
- case kCodecPCM_ALAW:
- case kCodecVorbis:
+ case AudioCodec::kFLAC:
+ case AudioCodec::kMP3:
+ case AudioCodec::kOpus:
+ case AudioCodec::kPCM:
+ case AudioCodec::kPCM_MULAW:
+ case AudioCodec::kPCM_S16BE:
+ case AudioCodec::kPCM_S24BE:
+ case AudioCodec::kPCM_ALAW:
+ case AudioCodec::kVorbis:
return true;
- case kCodecAMR_NB:
- case kCodecAMR_WB:
- case kCodecGSM_MS:
+ case AudioCodec::kAMR_NB:
+ case AudioCodec::kAMR_WB:
+ case AudioCodec::kGSM_MS:
#if BUILDFLAG(IS_CHROMEOS_ASH)
return true;
#else
return false;
#endif
- case kCodecEAC3:
- case kCodecALAC:
- case kCodecAC3:
- case kCodecMpegHAudio:
- case kUnknownAudioCodec:
+ case AudioCodec::kEAC3:
+ case AudioCodec::kALAC:
+ case AudioCodec::kAC3:
+ case AudioCodec::kMpegHAudio:
+ case AudioCodec::kUnknown:
return false;
}
@@ -287,18 +287,18 @@ bool IsDefaultSupportedAudioType(const AudioType& type) {
bool IsVideoCodecProprietary(VideoCodec codec) {
switch (codec) {
- case kCodecVC1:
- case kCodecH264:
- case kCodecMPEG2:
- case kCodecMPEG4:
- case kCodecHEVC:
- case kCodecDolbyVision:
+ case VideoCodec::kVC1:
+ case VideoCodec::kH264:
+ case VideoCodec::kMPEG2:
+ case VideoCodec::kMPEG4:
+ case VideoCodec::kHEVC:
+ case VideoCodec::kDolbyVision:
return true;
- case kUnknownVideoCodec:
- case kCodecTheora:
- case kCodecVP8:
- case kCodecVP9:
- case kCodecAV1:
+ case VideoCodec::kUnknown:
+ case VideoCodec::kTheora:
+ case VideoCodec::kVP8:
+ case VideoCodec::kVP9:
+ case VideoCodec::kAV1:
return false;
}
@@ -318,7 +318,7 @@ bool IsDefaultSupportedVideoType(const VideoType& type) {
#endif
switch (type.codec) {
- case kCodecAV1:
+ case VideoCodec::kAV1:
// If the AV1 decoder is enabled, or if we're on Q or later, yes.
#if BUILDFLAG(ENABLE_AV1_DECODER)
return IsColorSpaceSupported(type.color_space);
@@ -333,29 +333,29 @@ bool IsDefaultSupportedVideoType(const VideoType& type) {
return false;
#endif
- case kCodecVP9:
+ case VideoCodec::kVP9:
// Color management required for HDR to not look terrible.
return IsColorSpaceSupported(type.color_space) &&
IsVp9ProfileSupported(type.profile);
- case kCodecH264:
- case kCodecVP8:
- case kCodecTheora:
+ case VideoCodec::kH264:
+ case VideoCodec::kVP8:
+ case VideoCodec::kTheora:
return true;
- case kCodecHEVC:
+ case VideoCodec::kHEVC:
#if BUILDFLAG(ENABLE_PLATFORM_ENCRYPTED_HEVC)
return IsColorSpaceSupported(type.color_space) &&
IsHevcProfileSupported(type.profile);
#else
return false;
#endif // BUILDFLAG(ENABLE_PLATFORM_ENCRYPTED_HEVC)
- case kUnknownVideoCodec:
- case kCodecVC1:
- case kCodecMPEG2:
- case kCodecDolbyVision:
+ case VideoCodec::kUnknown:
+ case VideoCodec::kVC1:
+ case VideoCodec::kMPEG2:
+ case VideoCodec::kDolbyVision:
return false;
- case kCodecMPEG4:
+ case VideoCodec::kMPEG4:
#if BUILDFLAG(IS_CHROMEOS_ASH)
return true;
#else
diff --git a/chromium/media/base/supported_types_unittest.cc b/chromium/media/base/supported_types_unittest.cc
index eddc5b3ccc1..93f3c2a4fbd 100644
--- a/chromium/media/base/supported_types_unittest.cc
+++ b/chromium/media/base/supported_types_unittest.cc
@@ -35,30 +35,35 @@ TEST(SupportedTypesTest, IsSupportedVideoTypeBasics) {
// Expect support for baseline configuration of known codecs.
EXPECT_TRUE(IsSupportedVideoType(
- {kCodecVP8, VP8PROFILE_ANY, kUnspecifiedLevel, kColorSpace}));
+ {VideoCodec::kVP8, VP8PROFILE_ANY, kUnspecifiedLevel, kColorSpace}));
EXPECT_TRUE(IsSupportedVideoType(
- {kCodecVP9, VP9PROFILE_PROFILE0, kUnspecifiedLevel, kColorSpace}));
- EXPECT_TRUE(IsSupportedVideoType({kCodecTheora, VIDEO_CODEC_PROFILE_UNKNOWN,
- kUnspecifiedLevel, kColorSpace}));
+ {VideoCodec::kVP9, VP9PROFILE_PROFILE0, kUnspecifiedLevel, kColorSpace}));
+ EXPECT_TRUE(
+ IsSupportedVideoType({VideoCodec::kTheora, VIDEO_CODEC_PROFILE_UNKNOWN,
+ kUnspecifiedLevel, kColorSpace}));
// Expect non-support for the following.
EXPECT_FALSE(
- IsSupportedVideoType({kUnknownVideoCodec, VIDEO_CODEC_PROFILE_UNKNOWN,
+ IsSupportedVideoType({VideoCodec::kUnknown, VIDEO_CODEC_PROFILE_UNKNOWN,
+ kUnspecifiedLevel, kColorSpace}));
+ EXPECT_FALSE(
+ IsSupportedVideoType({VideoCodec::kVC1, VIDEO_CODEC_PROFILE_UNKNOWN,
+ kUnspecifiedLevel, kColorSpace}));
+ EXPECT_FALSE(
+ IsSupportedVideoType({VideoCodec::kMPEG2, VIDEO_CODEC_PROFILE_UNKNOWN,
+ kUnspecifiedLevel, kColorSpace}));
+ EXPECT_FALSE(
+ IsSupportedVideoType({VideoCodec::kHEVC, VIDEO_CODEC_PROFILE_UNKNOWN,
kUnspecifiedLevel, kColorSpace}));
- EXPECT_FALSE(IsSupportedVideoType({kCodecVC1, VIDEO_CODEC_PROFILE_UNKNOWN,
- kUnspecifiedLevel, kColorSpace}));
- EXPECT_FALSE(IsSupportedVideoType({kCodecMPEG2, VIDEO_CODEC_PROFILE_UNKNOWN,
- kUnspecifiedLevel, kColorSpace}));
- EXPECT_FALSE(IsSupportedVideoType({kCodecHEVC, VIDEO_CODEC_PROFILE_UNKNOWN,
- kUnspecifiedLevel, kColorSpace}));
// Expect conditional support for the following.
+ EXPECT_EQ(kPropCodecsEnabled,
+ IsSupportedVideoType(
+ {VideoCodec::kH264, H264PROFILE_BASELINE, 1, kColorSpace}));
EXPECT_EQ(
- kPropCodecsEnabled,
- IsSupportedVideoType({kCodecH264, H264PROFILE_BASELINE, 1, kColorSpace}));
- EXPECT_EQ(kMpeg4Supported,
- IsSupportedVideoType({kCodecMPEG4, VIDEO_CODEC_PROFILE_UNKNOWN,
- kUnspecifiedLevel, kColorSpace}));
+ kMpeg4Supported,
+ IsSupportedVideoType({VideoCodec::kMPEG4, VIDEO_CODEC_PROFILE_UNKNOWN,
+ kUnspecifiedLevel, kColorSpace}));
}
TEST(SupportedTypesTest, IsSupportedVideoType_VP9TransferFunctions) {
@@ -91,8 +96,9 @@ TEST(SupportedTypesTest, IsSupportedVideoType_VP9TransferFunctions) {
kSupportedTransfers.end();
if (found)
num_found++;
- EXPECT_EQ(found, IsSupportedVideoType(
- {kCodecVP9, VP9PROFILE_PROFILE0, 1, color_space}));
+ EXPECT_EQ(found,
+ IsSupportedVideoType(
+ {VideoCodec::kVP9, VP9PROFILE_PROFILE0, 1, color_space}));
}
EXPECT_EQ(kSupportedTransfers.size(), num_found);
}
@@ -121,8 +127,9 @@ TEST(SupportedTypesTest, IsSupportedVideoType_VP9Primaries) {
kSupportedPrimaries.end();
if (found)
num_found++;
- EXPECT_EQ(found, IsSupportedVideoType(
- {kCodecVP9, VP9PROFILE_PROFILE0, 1, color_space}));
+ EXPECT_EQ(found,
+ IsSupportedVideoType(
+ {VideoCodec::kVP9, VP9PROFILE_PROFILE0, 1, color_space}));
}
EXPECT_EQ(kSupportedPrimaries.size(), num_found);
}
@@ -151,8 +158,9 @@ TEST(SupportedTypesTest, IsSupportedVideoType_VP9Matrix) {
kSupportedMatrix.find(color_space.matrix) != kSupportedMatrix.end();
if (found)
num_found++;
- EXPECT_EQ(found, IsSupportedVideoType(
- {kCodecVP9, VP9PROFILE_PROFILE0, 1, color_space}));
+ EXPECT_EQ(found,
+ IsSupportedVideoType(
+ {VideoCodec::kVP9, VP9PROFILE_PROFILE0, 1, color_space}));
}
EXPECT_EQ(kSupportedMatrix.size(), num_found);
}
@@ -165,9 +173,9 @@ TEST(SupportedTypesTest, IsSupportedVideoType_VP9Profiles) {
const int kUnspecifiedLevel = 0;
EXPECT_TRUE(IsSupportedVideoType(
- {kCodecVP9, VP9PROFILE_PROFILE0, kUnspecifiedLevel, kColorSpace}));
+ {VideoCodec::kVP9, VP9PROFILE_PROFILE0, kUnspecifiedLevel, kColorSpace}));
EXPECT_TRUE(IsSupportedVideoType(
- {kCodecVP9, VP9PROFILE_PROFILE1, kUnspecifiedLevel, kColorSpace}));
+ {VideoCodec::kVP9, VP9PROFILE_PROFILE1, kUnspecifiedLevel, kColorSpace}));
// VP9 Profile2 are supported on x86, ChromeOS on ARM and Mac/Win on ARM64.
// See third_party/libvpx/BUILD.gn.
@@ -175,7 +183,7 @@ TEST(SupportedTypesTest, IsSupportedVideoType_VP9Profiles) {
(defined(ARCH_CPU_ARM_FAMILY) && BUILDFLAG(IS_CHROMEOS_ASH)) || \
(defined(ARCH_CPU_ARM64) && (defined(OS_MAC) || defined(OS_WIN)))
EXPECT_TRUE(IsSupportedVideoType(
- {kCodecVP9, VP9PROFILE_PROFILE2, kUnspecifiedLevel, kColorSpace}));
+ {VideoCodec::kVP9, VP9PROFILE_PROFILE2, kUnspecifiedLevel, kColorSpace}));
#endif
}
@@ -184,43 +192,53 @@ TEST(SupportedTypesTest, IsSupportedAudioTypeWithSpatialRenderingBasics) {
// Dolby Atmos = E-AC3 (Dolby Digital Plus) + spatialRendering. Currently not
// supported.
EXPECT_FALSE(IsSupportedAudioType(
- {kCodecEAC3, AudioCodecProfile::kUnknown, is_spatial_rendering}));
+ {AudioCodec::kEAC3, AudioCodecProfile::kUnknown, is_spatial_rendering}));
// Expect non-support for codecs with which there is no spatial audio format.
EXPECT_FALSE(IsSupportedAudioType(
- {kCodecAAC, AudioCodecProfile::kUnknown, is_spatial_rendering}));
- EXPECT_FALSE(IsSupportedAudioType(
- {kCodecMP3, AudioCodecProfile::kUnknown, is_spatial_rendering}));
- EXPECT_FALSE(IsSupportedAudioType(
- {kCodecPCM, AudioCodecProfile::kUnknown, is_spatial_rendering}));
- EXPECT_FALSE(IsSupportedAudioType(
- {kCodecVorbis, AudioCodecProfile::kUnknown, is_spatial_rendering}));
- EXPECT_FALSE(IsSupportedAudioType(
- {kCodecFLAC, AudioCodecProfile::kUnknown, is_spatial_rendering}));
- EXPECT_FALSE(IsSupportedAudioType(
- {kCodecAMR_NB, AudioCodecProfile::kUnknown, is_spatial_rendering}));
- EXPECT_FALSE(IsSupportedAudioType(
- {kCodecAMR_WB, AudioCodecProfile::kUnknown, is_spatial_rendering}));
- EXPECT_FALSE(IsSupportedAudioType(
- {kCodecPCM_MULAW, AudioCodecProfile::kUnknown, is_spatial_rendering}));
+ {AudioCodec::kAAC, AudioCodecProfile::kUnknown, is_spatial_rendering}));
EXPECT_FALSE(IsSupportedAudioType(
- {kCodecGSM_MS, AudioCodecProfile::kUnknown, is_spatial_rendering}));
+ {AudioCodec::kMP3, AudioCodecProfile::kUnknown, is_spatial_rendering}));
EXPECT_FALSE(IsSupportedAudioType(
- {kCodecPCM_S16BE, AudioCodecProfile::kUnknown, is_spatial_rendering}));
- EXPECT_FALSE(IsSupportedAudioType(
- {kCodecPCM_S24BE, AudioCodecProfile::kUnknown, is_spatial_rendering}));
- EXPECT_FALSE(IsSupportedAudioType(
- {kCodecOpus, AudioCodecProfile::kUnknown, is_spatial_rendering}));
- EXPECT_FALSE(IsSupportedAudioType(
- {kCodecPCM_ALAW, AudioCodecProfile::kUnknown, is_spatial_rendering}));
+ {AudioCodec::kPCM, AudioCodecProfile::kUnknown, is_spatial_rendering}));
+ EXPECT_FALSE(
+ IsSupportedAudioType({AudioCodec::kVorbis, AudioCodecProfile::kUnknown,
+ is_spatial_rendering}));
EXPECT_FALSE(IsSupportedAudioType(
- {kCodecALAC, AudioCodecProfile::kUnknown, is_spatial_rendering}));
+ {AudioCodec::kFLAC, AudioCodecProfile::kUnknown, is_spatial_rendering}));
+ EXPECT_FALSE(
+ IsSupportedAudioType({AudioCodec::kAMR_NB, AudioCodecProfile::kUnknown,
+ is_spatial_rendering}));
+ EXPECT_FALSE(
+ IsSupportedAudioType({AudioCodec::kAMR_WB, AudioCodecProfile::kUnknown,
+ is_spatial_rendering}));
+ EXPECT_FALSE(
+ IsSupportedAudioType({AudioCodec::kPCM_MULAW, AudioCodecProfile::kUnknown,
+ is_spatial_rendering}));
+ EXPECT_FALSE(
+ IsSupportedAudioType({AudioCodec::kGSM_MS, AudioCodecProfile::kUnknown,
+ is_spatial_rendering}));
+ EXPECT_FALSE(
+ IsSupportedAudioType({AudioCodec::kPCM_S16BE, AudioCodecProfile::kUnknown,
+ is_spatial_rendering}));
+ EXPECT_FALSE(
+ IsSupportedAudioType({AudioCodec::kPCM_S24BE, AudioCodecProfile::kUnknown,
+ is_spatial_rendering}));
EXPECT_FALSE(IsSupportedAudioType(
- {kCodecAC3, AudioCodecProfile::kUnknown, is_spatial_rendering}));
+ {AudioCodec::kOpus, AudioCodecProfile::kUnknown, is_spatial_rendering}));
+ EXPECT_FALSE(
+ IsSupportedAudioType({AudioCodec::kPCM_ALAW, AudioCodecProfile::kUnknown,
+ is_spatial_rendering}));
EXPECT_FALSE(IsSupportedAudioType(
- {kCodecMpegHAudio, AudioCodecProfile::kUnknown, is_spatial_rendering}));
+ {AudioCodec::kALAC, AudioCodecProfile::kUnknown, is_spatial_rendering}));
EXPECT_FALSE(IsSupportedAudioType(
- {kUnknownAudioCodec, AudioCodecProfile::kUnknown, is_spatial_rendering}));
+ {AudioCodec::kAC3, AudioCodecProfile::kUnknown, is_spatial_rendering}));
+ EXPECT_FALSE(IsSupportedAudioType({AudioCodec::kMpegHAudio,
+ AudioCodecProfile::kUnknown,
+ is_spatial_rendering}));
+ EXPECT_FALSE(
+ IsSupportedAudioType({AudioCodec::kUnknown, AudioCodecProfile::kUnknown,
+ is_spatial_rendering}));
}
TEST(SupportedTypesTest, XHE_AACSupportedOnAndroidOnly) {
@@ -231,11 +249,12 @@ TEST(SupportedTypesTest, XHE_AACSupportedOnAndroidOnly) {
base::android::BuildInfo::GetInstance()->sdk_int() >=
base::android::SDK_VERSION_P;
- EXPECT_EQ(is_supported, IsSupportedAudioType(
- {kCodecAAC, AudioCodecProfile::kXHE_AAC, false}));
+ EXPECT_EQ(is_supported,
+ IsSupportedAudioType(
+ {AudioCodec::kAAC, AudioCodecProfile::kXHE_AAC, false}));
#else
- EXPECT_FALSE(
- IsSupportedAudioType({kCodecAAC, AudioCodecProfile::kXHE_AAC, false}));
+ EXPECT_FALSE(IsSupportedAudioType(
+ {AudioCodec::kAAC, AudioCodecProfile::kXHE_AAC, false}));
#endif
}
@@ -248,42 +267,45 @@ TEST(SupportedTypesTest, IsSupportedVideoTypeWithHdrMetadataBasics) {
// Expect support for baseline configuration of known codecs.
EXPECT_TRUE(IsSupportedVideoType(
- {kCodecVP8, VP8PROFILE_ANY, kUnspecifiedLevel, color_space}));
+ {VideoCodec::kVP8, VP8PROFILE_ANY, kUnspecifiedLevel, color_space}));
EXPECT_TRUE(IsSupportedVideoType(
- {kCodecVP9, VP9PROFILE_PROFILE0, kUnspecifiedLevel, color_space}));
- EXPECT_TRUE(IsSupportedVideoType({kCodecTheora, VIDEO_CODEC_PROFILE_UNKNOWN,
- kUnspecifiedLevel, color_space}));
+ {VideoCodec::kVP9, VP9PROFILE_PROFILE0, kUnspecifiedLevel, color_space}));
+ EXPECT_TRUE(
+ IsSupportedVideoType({VideoCodec::kTheora, VIDEO_CODEC_PROFILE_UNKNOWN,
+ kUnspecifiedLevel, color_space}));
// All combinations of combinations of color gamuts and transfer functions
// should be supported.
color_space.primaries = VideoColorSpace::PrimaryID::SMPTEST431_2;
color_space.transfer = VideoColorSpace::TransferID::SMPTEST2084;
EXPECT_TRUE(IsSupportedVideoType(
- {kCodecVP8, VP8PROFILE_ANY, kUnspecifiedLevel, color_space}));
+ {VideoCodec::kVP8, VP8PROFILE_ANY, kUnspecifiedLevel, color_space}));
EXPECT_TRUE(IsSupportedVideoType(
- {kCodecVP9, VP9PROFILE_PROFILE0, kUnspecifiedLevel, color_space}));
- EXPECT_TRUE(IsSupportedVideoType({kCodecTheora, VIDEO_CODEC_PROFILE_UNKNOWN,
- kUnspecifiedLevel, color_space}));
+ {VideoCodec::kVP9, VP9PROFILE_PROFILE0, kUnspecifiedLevel, color_space}));
+ EXPECT_TRUE(
+ IsSupportedVideoType({VideoCodec::kTheora, VIDEO_CODEC_PROFILE_UNKNOWN,
+ kUnspecifiedLevel, color_space}));
color_space.primaries = VideoColorSpace::PrimaryID::BT2020;
color_space.transfer = VideoColorSpace::TransferID::ARIB_STD_B67;
EXPECT_TRUE(IsSupportedVideoType(
- {kCodecVP8, VP8PROFILE_ANY, kUnspecifiedLevel, color_space}));
+ {VideoCodec::kVP8, VP8PROFILE_ANY, kUnspecifiedLevel, color_space}));
EXPECT_TRUE(IsSupportedVideoType(
- {kCodecVP9, VP9PROFILE_PROFILE0, kUnspecifiedLevel, color_space}));
- EXPECT_TRUE(IsSupportedVideoType({kCodecTheora, VIDEO_CODEC_PROFILE_UNKNOWN,
- kUnspecifiedLevel, color_space}));
+ {VideoCodec::kVP9, VP9PROFILE_PROFILE0, kUnspecifiedLevel, color_space}));
+ EXPECT_TRUE(
+ IsSupportedVideoType({VideoCodec::kTheora, VIDEO_CODEC_PROFILE_UNKNOWN,
+ kUnspecifiedLevel, color_space}));
// No HDR metadata types are supported.
EXPECT_FALSE(
- IsSupportedVideoType({kCodecVP8, VP8PROFILE_ANY, kUnspecifiedLevel,
+ IsSupportedVideoType({VideoCodec::kVP8, VP8PROFILE_ANY, kUnspecifiedLevel,
color_space, gfx::HdrMetadataType::kSmpteSt2086}));
- EXPECT_FALSE(IsSupportedVideoType({kCodecVP8, VP8PROFILE_ANY,
+ EXPECT_FALSE(IsSupportedVideoType({VideoCodec::kVP8, VP8PROFILE_ANY,
kUnspecifiedLevel, color_space,
gfx::HdrMetadataType::kSmpteSt2094_10}));
- EXPECT_FALSE(IsSupportedVideoType({kCodecVP8, VP8PROFILE_ANY,
+ EXPECT_FALSE(IsSupportedVideoType({VideoCodec::kVP8, VP8PROFILE_ANY,
kUnspecifiedLevel, color_space,
gfx::HdrMetadataType::kSmpteSt2094_40}));
}
diff --git a/chromium/media/base/supported_video_decoder_config_unittest.cc b/chromium/media/base/supported_video_decoder_config_unittest.cc
index dd3dabdbe3d..14f0ad9146f 100644
--- a/chromium/media/base/supported_video_decoder_config_unittest.cc
+++ b/chromium/media/base/supported_video_decoder_config_unittest.cc
@@ -13,7 +13,7 @@ class SupportedVideoDecoderConfigTest : public ::testing::Test {
public:
SupportedVideoDecoderConfigTest()
: decoder_config_(
- TestVideoConfig::NormalCodecProfile(kCodecH264,
+ TestVideoConfig::NormalCodecProfile(VideoCodec::kH264,
H264PROFILE_EXTENDED)) {
supported_config_.profile_min = H264PROFILE_MIN;
supported_config_.profile_max = H264PROFILE_MAX;
diff --git a/chromium/media/base/svc_scalability_mode.cc b/chromium/media/base/svc_scalability_mode.cc
new file mode 100644
index 00000000000..13eca157f43
--- /dev/null
+++ b/chromium/media/base/svc_scalability_mode.cc
@@ -0,0 +1,79 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/svc_scalability_mode.h"
+
+#include "base/notreached.h"
+
+namespace media {
+
+const char* GetScalabilityModeName(SVCScalabilityMode scalability_mode) {
+ switch (scalability_mode) {
+ case SVCScalabilityMode::kL1T2:
+ return "L1T2";
+ case SVCScalabilityMode::kL1T3:
+ return "L1T3";
+ case SVCScalabilityMode::kL2T1:
+ return "L2T1";
+ case SVCScalabilityMode::kL2T2:
+ return "L2T2";
+ case SVCScalabilityMode::kL2T3:
+ return "L2T3";
+ case SVCScalabilityMode::kL3T1:
+ return "L3T1";
+ case SVCScalabilityMode::kL3T2:
+ return "L3T2";
+ case SVCScalabilityMode::kL3T3:
+ return "L3T3";
+ case SVCScalabilityMode::kL2T1h:
+ return "L2T1h";
+ case SVCScalabilityMode::kL2T2h:
+ return "L2T2h";
+ case SVCScalabilityMode::kL2T3h:
+ return "L2T3h";
+ case SVCScalabilityMode::kS2T1:
+ return "S2T1";
+ case SVCScalabilityMode::kS2T2:
+ return "S2T2";
+ case SVCScalabilityMode::kS2T3:
+ return "S2T3";
+ case SVCScalabilityMode::kS2T1h:
+ return "S2T1h";
+ case SVCScalabilityMode::kS2T2h:
+ return "S2T2h";
+ case SVCScalabilityMode::kS2T3h:
+ return "S2T3h";
+ case SVCScalabilityMode::kS3T1:
+ return "S3T1";
+ case SVCScalabilityMode::kS3T2:
+ return "S3T2";
+ case SVCScalabilityMode::kS3T3:
+ return "S3T3";
+ case SVCScalabilityMode::kS3T1h:
+ return "S3T1h";
+ case SVCScalabilityMode::kS3T2h:
+ return "S3T2h";
+ case SVCScalabilityMode::kS3T3h:
+ return "S3T3h";
+ case SVCScalabilityMode::kL2T2Key:
+ return "L2T2_KEY";
+ case SVCScalabilityMode::kL2T2KeyShift:
+ return "L2T2_KEY_SHIFT";
+ case SVCScalabilityMode::kL2T3Key:
+ return "L2T3_KEY";
+ case SVCScalabilityMode::kL2T3KeyShift:
+ return "L2T3_KEY_SHIFT";
+ case SVCScalabilityMode::kL3T2Key:
+ return "L3T2_KEY";
+ case SVCScalabilityMode::kL3T2KeyShift:
+ return "L3T2_KEY_SHIFT";
+ case SVCScalabilityMode::kL3T3Key:
+ return "L3T3_KEY";
+ case SVCScalabilityMode::kL3T3KeyShift:
+ return "L3T3_KEY_SHIFT";
+ }
+ NOTREACHED();
+ return "";
+}
+} // namespace media
diff --git a/chromium/media/base/svc_scalability_mode.h b/chromium/media/base/svc_scalability_mode.h
new file mode 100644
index 00000000000..65ce610af07
--- /dev/null
+++ b/chromium/media/base/svc_scalability_mode.h
@@ -0,0 +1,54 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BASE_SVC_SCALABILITY_MODE_H_
+#define MEDIA_BASE_SVC_SCALABILITY_MODE_H_
+
+#include "media/base/media_export.h"
+
+namespace media {
+
+// This enum class is the corresponding implementation with WebRTC-SVC.
+// See https://www.w3.org/TR/webrtc-svc/#scalabilitymodes* for the detail.
+enum class SVCScalabilityMode {
+ kL1T2,
+ kL1T3,
+ kL2T1,
+ kL2T2,
+ kL2T3,
+ kL3T1,
+ kL3T2,
+ kL3T3,
+ kL2T1h,
+ kL2T2h,
+ kL2T3h,
+ kS2T1,
+ kS2T2,
+ kS2T3,
+ kS2T1h,
+ kS2T2h,
+ kS2T3h,
+ kS3T1,
+ kS3T2,
+ kS3T3,
+ kS3T1h,
+ kS3T2h,
+ kS3T3h,
+ kL2T2Key,
+ kL2T2KeyShift,
+ kL2T3Key,
+ kL2T3KeyShift,
+ kL3T2Key,
+ kL3T2KeyShift,
+ kL3T3Key,
+ kL3T3KeyShift,
+};
+
+// Gets the WebRTC-SVC Spec defined scalability mode name.
+MEDIA_EXPORT const char* GetScalabilityModeName(
+ SVCScalabilityMode scalability_mode);
+
+} // namespace media
+
+#endif // MEDIA_BASE_SVC_SCALABILITY_MODE_H_
diff --git a/chromium/media/base/test_data_util.cc b/chromium/media/base/test_data_util.cc
index b3ccf07d9de..4940d4c4adc 100644
--- a/chromium/media/base/test_data_util.cc
+++ b/chromium/media/base/test_data_util.cc
@@ -167,14 +167,14 @@ const uint8_t kKeyId[] = {0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
// See http://crbug.com/592067
// Common test results.
-const char kFailed[] = "FAILED";
+const char kFailedTitle[] = "FAILED";
// Upper case event name set by Utils.installTitleEventHandler().
-const char kEnded[] = "ENDED";
-const char kErrorEvent[] = "ERROR";
+const char kEndedTitle[] = "ENDED";
+const char kErrorEventTitle[] = "ERROR";
// Lower case event name as set by Utils.failTest().
-const char kError[] = "error";
+const char kErrorTitle[] = "error";
const base::FilePath::CharType kTestDataPath[] =
FILE_PATH_LITERAL("media/test/data");
diff --git a/chromium/media/base/test_data_util.h b/chromium/media/base/test_data_util.h
index ba6a3427ba7..79adec7de2e 100644
--- a/chromium/media/base/test_data_util.h
+++ b/chromium/media/base/test_data_util.h
@@ -18,10 +18,10 @@ namespace media {
class DecoderBuffer;
// Common test results.
-extern const char kFailed[];
-extern const char kEnded[];
-extern const char kErrorEvent[];
-extern const char kError[];
+extern const char kFailedTitle[];
+extern const char kEndedTitle[];
+extern const char kErrorEventTitle[];
+extern const char kErrorTitle[];
// Returns a file path for a file in the media/test/data directory.
base::FilePath GetTestDataFilePath(const std::string& name);
diff --git a/chromium/media/base/test_helpers.cc b/chromium/media/base/test_helpers.cc
index 3f2d50050d6..a046a9181aa 100644
--- a/chromium/media/base/test_helpers.cc
+++ b/chromium/media/base/test_helpers.cc
@@ -146,24 +146,24 @@ static VideoDecoderConfig GetTestConfig(VideoCodec codec,
static VideoCodecProfile MinProfile(VideoCodec codec) {
switch (codec) {
- case kUnknownVideoCodec:
- case kCodecVC1:
- case kCodecMPEG2:
- case kCodecMPEG4:
+ case VideoCodec::kUnknown:
+ case VideoCodec::kVC1:
+ case VideoCodec::kMPEG2:
+ case VideoCodec::kMPEG4:
return VIDEO_CODEC_PROFILE_UNKNOWN;
- case kCodecH264:
+ case VideoCodec::kH264:
return H264PROFILE_MIN;
- case kCodecTheora:
+ case VideoCodec::kTheora:
return THEORAPROFILE_MIN;
- case kCodecVP8:
+ case VideoCodec::kVP8:
return VP8PROFILE_MIN;
- case kCodecVP9:
+ case VideoCodec::kVP9:
return VP9PROFILE_MIN;
- case kCodecHEVC:
+ case VideoCodec::kHEVC:
return HEVCPROFILE_MIN;
- case kCodecDolbyVision:
+ case VideoCodec::kDolbyVision:
return DOLBYVISION_PROFILE0;
- case kCodecAV1:
+ case VideoCodec::kAV1:
return AV1PROFILE_MIN;
}
}
@@ -174,7 +174,7 @@ static const gfx::Size kExtraLargeSize(15360, 8640);
// static
VideoDecoderConfig TestVideoConfig::Invalid() {
- return GetTestConfig(kUnknownVideoCodec, VIDEO_CODEC_PROFILE_UNKNOWN,
+ return GetTestConfig(VideoCodec::kUnknown, VIDEO_CODEC_PROFILE_UNKNOWN,
VideoColorSpace::JPEG(), VIDEO_ROTATION_0, kNormalSize,
false);
}
@@ -195,7 +195,7 @@ VideoDecoderConfig TestVideoConfig::NormalWithColorSpace(
// static
VideoDecoderConfig TestVideoConfig::NormalH264(VideoCodecProfile config) {
- return GetTestConfig(kCodecH264, MinProfile(kCodecH264),
+ return GetTestConfig(VideoCodec::kH264, MinProfile(VideoCodec::kH264),
VideoColorSpace::JPEG(), VIDEO_ROTATION_0, kNormalSize,
false);
}
@@ -217,7 +217,7 @@ VideoDecoderConfig TestVideoConfig::NormalEncrypted(VideoCodec codec,
// static
VideoDecoderConfig TestVideoConfig::NormalRotated(VideoRotation rotation) {
- return GetTestConfig(kCodecVP8, MinProfile(kCodecVP8),
+ return GetTestConfig(VideoCodec::kVP8, MinProfile(VideoCodec::kVP8),
VideoColorSpace::JPEG(), rotation, kNormalSize, false);
}
@@ -274,25 +274,25 @@ gfx::Size TestVideoConfig::ExtraLargeCodedSize() {
}
AudioDecoderConfig TestAudioConfig::Normal() {
- return AudioDecoderConfig(kCodecVorbis, kSampleFormatPlanarF32,
+ return AudioDecoderConfig(AudioCodec::kVorbis, kSampleFormatPlanarF32,
CHANNEL_LAYOUT_STEREO, NormalSampleRateValue(),
EmptyExtraData(), EncryptionScheme::kUnencrypted);
}
AudioDecoderConfig TestAudioConfig::NormalEncrypted() {
- return AudioDecoderConfig(kCodecVorbis, kSampleFormatPlanarF32,
+ return AudioDecoderConfig(AudioCodec::kVorbis, kSampleFormatPlanarF32,
CHANNEL_LAYOUT_STEREO, NormalSampleRateValue(),
EmptyExtraData(), EncryptionScheme::kCenc);
}
AudioDecoderConfig TestAudioConfig::HighSampleRate() {
- return AudioDecoderConfig(kCodecVorbis, kSampleFormatPlanarF32,
+ return AudioDecoderConfig(AudioCodec::kVorbis, kSampleFormatPlanarF32,
CHANNEL_LAYOUT_STEREO, HighSampleRateValue(),
EmptyExtraData(), EncryptionScheme::kUnencrypted);
}
AudioDecoderConfig TestAudioConfig::HighSampleRateEncrypted() {
- return AudioDecoderConfig(kCodecVorbis, kSampleFormatPlanarF32,
+ return AudioDecoderConfig(AudioCodec::kVorbis, kSampleFormatPlanarF32,
CHANNEL_LAYOUT_STEREO, HighSampleRateValue(),
EmptyExtraData(), EncryptionScheme::kCenc);
}
diff --git a/chromium/media/base/test_helpers.h b/chromium/media/base/test_helpers.h
index 81e73c76aca..1d91266d3e3 100644
--- a/chromium/media/base/test_helpers.h
+++ b/chromium/media/base/test_helpers.h
@@ -50,6 +50,10 @@ class WaitableMessageLoopEvent {
public:
WaitableMessageLoopEvent();
explicit WaitableMessageLoopEvent(base::TimeDelta timeout);
+
+ WaitableMessageLoopEvent(const WaitableMessageLoopEvent&) = delete;
+ WaitableMessageLoopEvent& operator=(const WaitableMessageLoopEvent&) = delete;
+
~WaitableMessageLoopEvent();
// Returns a thread-safe closure that will signal |this| when executed.
@@ -79,8 +83,6 @@ class WaitableMessageLoopEvent {
const base::TimeDelta timeout_;
SEQUENCE_CHECKER(sequence_checker_);
-
- DISALLOW_COPY_AND_ASSIGN(WaitableMessageLoopEvent);
};
// Provides pre-canned VideoDecoderConfig. These types are used for tests that
@@ -90,30 +92,32 @@ class TestVideoConfig {
// Returns a configuration that is invalid.
static VideoDecoderConfig Invalid();
- static VideoDecoderConfig Normal(VideoCodec codec = kCodecVP8);
+ static VideoDecoderConfig Normal(VideoCodec codec = VideoCodec::kVP8);
static VideoDecoderConfig NormalWithColorSpace(
VideoCodec codec,
const VideoColorSpace& color_space);
static VideoDecoderConfig NormalH264(VideoCodecProfile = H264PROFILE_MIN);
static VideoDecoderConfig NormalCodecProfile(
- VideoCodec codec = kCodecVP8,
+ VideoCodec codec = VideoCodec::kVP8,
VideoCodecProfile profile = VP8PROFILE_MIN);
- static VideoDecoderConfig NormalEncrypted(VideoCodec codec = kCodecVP8,
+ static VideoDecoderConfig NormalEncrypted(VideoCodec codec = VideoCodec::kVP8,
VideoCodecProfile = VP8PROFILE_MIN);
static VideoDecoderConfig NormalRotated(VideoRotation rotation);
// Returns a configuration that is larger in dimensions than Normal().
- static VideoDecoderConfig Large(VideoCodec codec = kCodecVP8);
- static VideoDecoderConfig LargeEncrypted(VideoCodec codec = kCodecVP8);
+ static VideoDecoderConfig Large(VideoCodec codec = VideoCodec::kVP8);
+ static VideoDecoderConfig LargeEncrypted(VideoCodec codec = VideoCodec::kVP8);
// Returns a configuration that is larger in dimensions that Large().
- static VideoDecoderConfig ExtraLarge(VideoCodec codec = kCodecVP8);
- static VideoDecoderConfig ExtraLargeEncrypted(VideoCodec codec = kCodecVP8);
+ static VideoDecoderConfig ExtraLarge(VideoCodec codec = VideoCodec::kVP8);
+ static VideoDecoderConfig ExtraLargeEncrypted(
+ VideoCodec codec = VideoCodec::kVP8);
static VideoDecoderConfig Custom(gfx::Size size,
- VideoCodec codec = kCodecVP8);
- static VideoDecoderConfig CustomEncrypted(gfx::Size size,
- VideoCodec codec = kCodecVP8);
+ VideoCodec codec = VideoCodec::kVP8);
+ static VideoDecoderConfig CustomEncrypted(
+ gfx::Size size,
+ VideoCodec codec = VideoCodec::kVP8);
// Returns coded size for Normal and Large config.
static gfx::Size NormalCodedSize();
diff --git a/chromium/media/base/text_ranges.h b/chromium/media/base/text_ranges.h
index 2b75822c9fd..e65553e1aee 100644
--- a/chromium/media/base/text_ranges.h
+++ b/chromium/media/base/text_ranges.h
@@ -20,6 +20,10 @@ namespace media {
class MEDIA_EXPORT TextRanges {
public:
TextRanges();
+
+ TextRanges(const TextRanges&) = delete;
+ TextRanges& operator=(const TextRanges&) = delete;
+
~TextRanges();
// Reset the current range pointer, such that we bind to a new range
@@ -88,8 +92,6 @@ class MEDIA_EXPORT TextRanges {
// The time range to which we bind following a Reset().
RangeMap::iterator curr_range_itr_;
-
- DISALLOW_COPY_AND_ASSIGN(TextRanges);
};
} // namespace media
diff --git a/chromium/media/base/text_ranges_unittest.cc b/chromium/media/base/text_ranges_unittest.cc
index 69cf2e3f9c4..979ec2dd9b3 100644
--- a/chromium/media/base/text_ranges_unittest.cc
+++ b/chromium/media/base/text_ranges_unittest.cc
@@ -13,9 +13,7 @@ namespace media {
class TextRangesTest : public ::testing::Test {
protected:
- bool AddCue(int seconds) {
- return ranges_.AddCue(base::TimeDelta::FromSeconds(seconds));
- }
+ bool AddCue(int seconds) { return ranges_.AddCue(base::Seconds(seconds)); }
void Reset() {
ranges_.Reset();
diff --git a/chromium/media/base/text_renderer_unittest.cc b/chromium/media/base/text_renderer_unittest.cc
index b00a269c0ea..2e25c2ac4b2 100644
--- a/chromium/media/base/text_renderer_unittest.cc
+++ b/chromium/media/base/text_renderer_unittest.cc
@@ -31,6 +31,10 @@ class FakeTextTrack : public TextTrack {
public:
FakeTextTrack(base::OnceClosure destroy_cb, const TextTrackConfig& config)
: destroy_cb_(std::move(destroy_cb)), config_(config) {}
+
+ FakeTextTrack(const FakeTextTrack&) = delete;
+ FakeTextTrack& operator=(const FakeTextTrack&) = delete;
+
~FakeTextTrack() override { std::move(destroy_cb_).Run(); }
MOCK_METHOD5(addWebVTTCue,
@@ -42,9 +46,6 @@ class FakeTextTrack : public TextTrack {
base::OnceClosure destroy_cb_;
const TextTrackConfig config_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(FakeTextTrack);
};
class TextRendererTest : public testing::Test {
@@ -145,7 +146,7 @@ class TextRendererTest : public testing::Test {
FakeTextTrackStream* const text_stream = text_track_streams_[idx].get();
const base::TimeDelta start;
- const base::TimeDelta duration = base::TimeDelta::FromSeconds(42);
+ const base::TimeDelta duration = base::Seconds(42);
const std::string id = "id";
const std::string content = "subtitle";
const std::string settings;
diff --git a/chromium/media/base/time_delta_interpolator.cc b/chromium/media/base/time_delta_interpolator.cc
index 33e06a6f0be..a6eb4bfad72 100644
--- a/chromium/media/base/time_delta_interpolator.cc
+++ b/chromium/media/base/time_delta_interpolator.cc
@@ -69,8 +69,7 @@ base::TimeDelta TimeDeltaInterpolator::GetInterpolatedTime() {
int64_t now_us = (tick_clock_->NowTicks() - reference_).InMicroseconds();
now_us = static_cast<int64_t>(now_us * playback_rate_);
- base::TimeDelta interpolated_time =
- lower_bound_ + base::TimeDelta::FromMicroseconds(now_us);
+ base::TimeDelta interpolated_time = lower_bound_ + base::Microseconds(now_us);
if (upper_bound_ == kNoTimestamp)
return interpolated_time;
diff --git a/chromium/media/base/time_delta_interpolator.h b/chromium/media/base/time_delta_interpolator.h
index 9a02eada2e5..1134272b75d 100644
--- a/chromium/media/base/time_delta_interpolator.h
+++ b/chromium/media/base/time_delta_interpolator.h
@@ -25,6 +25,10 @@ class MEDIA_EXPORT TimeDeltaInterpolator {
//
// |tick_clock| is used for sampling wall clock time for interpolating.
explicit TimeDeltaInterpolator(const base::TickClock* tick_clock);
+
+ TimeDeltaInterpolator(const TimeDeltaInterpolator&) = delete;
+ TimeDeltaInterpolator& operator=(const TimeDeltaInterpolator&) = delete;
+
~TimeDeltaInterpolator();
bool interpolating() { return interpolating_; }
@@ -76,8 +80,6 @@ class MEDIA_EXPORT TimeDeltaInterpolator {
base::TimeTicks reference_;
double playback_rate_;
-
- DISALLOW_COPY_AND_ASSIGN(TimeDeltaInterpolator);
};
} // namespace media
diff --git a/chromium/media/base/time_delta_interpolator_unittest.cc b/chromium/media/base/time_delta_interpolator_unittest.cc
index 540768ee45b..ed8b35c5291 100644
--- a/chromium/media/base/time_delta_interpolator_unittest.cc
+++ b/chromium/media/base/time_delta_interpolator_unittest.cc
@@ -24,13 +24,13 @@ class TimeDeltaInterpolatorTest : public ::testing::Test {
};
TEST_F(TimeDeltaInterpolatorTest, Created) {
- const base::TimeDelta kExpected = base::TimeDelta::FromSeconds(0);
+ const base::TimeDelta kExpected = base::Seconds(0);
EXPECT_EQ(kExpected, interpolator_.GetInterpolatedTime());
}
TEST_F(TimeDeltaInterpolatorTest, StartInterpolating_NormalSpeed) {
const base::TimeDelta kZero;
- const base::TimeDelta kTimeToAdvance = base::TimeDelta::FromSeconds(2);
+ const base::TimeDelta kTimeToAdvance = base::Seconds(2);
EXPECT_EQ(kZero, interpolator_.StartInterpolating());
AdvanceSystemTime(kTimeToAdvance);
@@ -39,7 +39,7 @@ TEST_F(TimeDeltaInterpolatorTest, StartInterpolating_NormalSpeed) {
TEST_F(TimeDeltaInterpolatorTest, StartInterpolating_DoubleSpeed) {
const base::TimeDelta kZero;
- const base::TimeDelta kTimeToAdvance = base::TimeDelta::FromSeconds(5);
+ const base::TimeDelta kTimeToAdvance = base::Seconds(5);
interpolator_.SetPlaybackRate(2.0);
EXPECT_EQ(kZero, interpolator_.StartInterpolating());
@@ -49,7 +49,7 @@ TEST_F(TimeDeltaInterpolatorTest, StartInterpolating_DoubleSpeed) {
TEST_F(TimeDeltaInterpolatorTest, StartInterpolating_HalfSpeed) {
const base::TimeDelta kZero;
- const base::TimeDelta kTimeToAdvance = base::TimeDelta::FromSeconds(4);
+ const base::TimeDelta kTimeToAdvance = base::Seconds(4);
interpolator_.SetPlaybackRate(0.5);
EXPECT_EQ(kZero, interpolator_.StartInterpolating());
@@ -61,9 +61,9 @@ TEST_F(TimeDeltaInterpolatorTest, StartInterpolating_ZeroSpeed) {
// We'll play for 2 seconds at normal speed, 4 seconds at zero speed, and 8
// seconds at normal speed.
const base::TimeDelta kZero;
- const base::TimeDelta kPlayDuration1 = base::TimeDelta::FromSeconds(2);
- const base::TimeDelta kPlayDuration2 = base::TimeDelta::FromSeconds(4);
- const base::TimeDelta kPlayDuration3 = base::TimeDelta::FromSeconds(8);
+ const base::TimeDelta kPlayDuration1 = base::Seconds(2);
+ const base::TimeDelta kPlayDuration2 = base::Seconds(4);
+ const base::TimeDelta kPlayDuration3 = base::Seconds(8);
const base::TimeDelta kExpected = kPlayDuration1 + kPlayDuration3;
EXPECT_EQ(kZero, interpolator_.StartInterpolating());
@@ -81,9 +81,9 @@ TEST_F(TimeDeltaInterpolatorTest, StartInterpolating_MultiSpeed) {
// We'll play for 2 seconds at half speed, 4 seconds at normal speed, and 8
// seconds at double speed.
const base::TimeDelta kZero;
- const base::TimeDelta kPlayDuration1 = base::TimeDelta::FromSeconds(2);
- const base::TimeDelta kPlayDuration2 = base::TimeDelta::FromSeconds(4);
- const base::TimeDelta kPlayDuration3 = base::TimeDelta::FromSeconds(8);
+ const base::TimeDelta kPlayDuration1 = base::Seconds(2);
+ const base::TimeDelta kPlayDuration2 = base::Seconds(4);
+ const base::TimeDelta kPlayDuration3 = base::Seconds(8);
const base::TimeDelta kExpected =
kPlayDuration1 / 2 + kPlayDuration2 + 2 * kPlayDuration3;
@@ -101,8 +101,8 @@ TEST_F(TimeDeltaInterpolatorTest, StartInterpolating_MultiSpeed) {
TEST_F(TimeDeltaInterpolatorTest, StopInterpolating) {
const base::TimeDelta kZero;
- const base::TimeDelta kPlayDuration = base::TimeDelta::FromSeconds(4);
- const base::TimeDelta kPauseDuration = base::TimeDelta::FromSeconds(20);
+ const base::TimeDelta kPlayDuration = base::Seconds(4);
+ const base::TimeDelta kPauseDuration = base::Seconds(20);
const base::TimeDelta kExpectedFirstPause = kPlayDuration;
const base::TimeDelta kExpectedSecondPause = 2 * kPlayDuration;
@@ -124,9 +124,9 @@ TEST_F(TimeDeltaInterpolatorTest, StopInterpolating) {
}
TEST_F(TimeDeltaInterpolatorTest, SetBounds_Stopped) {
- const base::TimeDelta kFirstTime = base::TimeDelta::FromSeconds(4);
- const base::TimeDelta kSecondTime = base::TimeDelta::FromSeconds(16);
- const base::TimeDelta kArbitraryMaxTime = base::TimeDelta::FromSeconds(100);
+ const base::TimeDelta kFirstTime = base::Seconds(4);
+ const base::TimeDelta kSecondTime = base::Seconds(16);
+ const base::TimeDelta kArbitraryMaxTime = base::Seconds(100);
interpolator_.SetBounds(kFirstTime, kArbitraryMaxTime,
test_tick_clock_.NowTicks());
@@ -140,9 +140,9 @@ TEST_F(TimeDeltaInterpolatorTest, SetBounds_Started) {
// We'll play for 4 seconds, then set the time to 12, then play for 4 more
// seconds.
const base::TimeDelta kZero;
- const base::TimeDelta kPlayDuration = base::TimeDelta::FromSeconds(4);
- const base::TimeDelta kUpdatedTime = base::TimeDelta::FromSeconds(12);
- const base::TimeDelta kArbitraryMaxTime = base::TimeDelta::FromSeconds(100);
+ const base::TimeDelta kPlayDuration = base::Seconds(4);
+ const base::TimeDelta kUpdatedTime = base::Seconds(12);
+ const base::TimeDelta kArbitraryMaxTime = base::Seconds(100);
const base::TimeDelta kExpected = kUpdatedTime + kPlayDuration;
EXPECT_EQ(kZero, interpolator_.StartInterpolating());
@@ -156,8 +156,8 @@ TEST_F(TimeDeltaInterpolatorTest, SetBounds_Started) {
TEST_F(TimeDeltaInterpolatorTest, SetUpperBound) {
const base::TimeDelta kZero;
- const base::TimeDelta kTimeInterval = base::TimeDelta::FromSeconds(4);
- const base::TimeDelta kMaxTime = base::TimeDelta::FromSeconds(6);
+ const base::TimeDelta kTimeInterval = base::Seconds(4);
+ const base::TimeDelta kMaxTime = base::Seconds(6);
EXPECT_EQ(kZero, interpolator_.StartInterpolating());
interpolator_.SetUpperBound(kMaxTime);
@@ -173,10 +173,10 @@ TEST_F(TimeDeltaInterpolatorTest, SetUpperBound) {
TEST_F(TimeDeltaInterpolatorTest, SetUpperBound_MultipleTimes) {
const base::TimeDelta kZero;
- const base::TimeDelta kTimeInterval = base::TimeDelta::FromSeconds(4);
- const base::TimeDelta kMaxTime0 = base::TimeDelta::FromSeconds(120);
- const base::TimeDelta kMaxTime1 = base::TimeDelta::FromSeconds(6);
- const base::TimeDelta kMaxTime2 = base::TimeDelta::FromSeconds(12);
+ const base::TimeDelta kTimeInterval = base::Seconds(4);
+ const base::TimeDelta kMaxTime0 = base::Seconds(120);
+ const base::TimeDelta kMaxTime1 = base::Seconds(6);
+ const base::TimeDelta kMaxTime2 = base::Seconds(12);
EXPECT_EQ(kZero, interpolator_.StartInterpolating());
interpolator_.SetUpperBound(kMaxTime0);
diff --git a/chromium/media/base/tuneable.cc b/chromium/media/base/tuneable.cc
index d82e68ef45a..0b71e94d563 100644
--- a/chromium/media/base/tuneable.cc
+++ b/chromium/media/base/tuneable.cc
@@ -43,9 +43,9 @@ base::TimeDelta GetParam<base::TimeDelta>(const char* name,
base::TimeDelta minimum_value,
base::TimeDelta default_value,
base::TimeDelta maximum_value) {
- return base::TimeDelta::FromMilliseconds(GetParam<int>(
- name, minimum_value.InMilliseconds(), default_value.InMilliseconds(),
- maximum_value.InMilliseconds()));
+ return base::Milliseconds(GetParam<int>(name, minimum_value.InMilliseconds(),
+ default_value.InMilliseconds(),
+ maximum_value.InMilliseconds()));
}
} // namespace
diff --git a/chromium/media/base/tuneable_unittest.cc b/chromium/media/base/tuneable_unittest.cc
index 1ea34fa93ed..813c9040890 100644
--- a/chromium/media/base/tuneable_unittest.cc
+++ b/chromium/media/base/tuneable_unittest.cc
@@ -105,9 +105,9 @@ TEST_F(TuneableTest, IntTuneableFromParams) {
TEST_F(TuneableTest, OtherSpecializationsCompile) {
// Since it's all templated, just be happy if it compiles and does something
// somewhat sane.
- constexpr base::TimeDelta min_value = base::TimeDelta::FromSeconds(0);
- constexpr base::TimeDelta default_value = base::TimeDelta::FromSeconds(5);
- constexpr base::TimeDelta max_value = base::TimeDelta::FromSeconds(10);
+ constexpr base::TimeDelta min_value = base::Seconds(0);
+ constexpr base::TimeDelta default_value = base::Seconds(5);
+ constexpr base::TimeDelta max_value = base::Seconds(10);
Tuneable<base::TimeDelta> time_delta_tuneable("whatever", min_value,
default_value, max_value);
// Since the tuneable is not provided in the finch parameters, it should
@@ -122,11 +122,11 @@ TEST_F(TuneableTest, TimeDeltaIsSpecifiedInMilliseconds) {
// Since the finch params are constructed with the assumption that the value
// will be interpreted as milliseconds, make sure that the Tuneable actually
// does interpret it that way.
- constexpr base::TimeDelta min_value = base::TimeDelta::FromSeconds(0);
- constexpr base::TimeDelta max_value = base::TimeDelta::FromSeconds(100);
+ constexpr base::TimeDelta min_value = base::Seconds(0);
+ constexpr base::TimeDelta max_value = base::Seconds(100);
Tuneable<base::TimeDelta> t(kTuneableTimeDeltaFiveSeconds, min_value,
min_value, max_value);
- EXPECT_EQ(t.value(), base::TimeDelta::FromSeconds(5));
+ EXPECT_EQ(t.value(), base::Seconds(5));
}
} // namespace media
diff --git a/chromium/media/base/unaligned_shared_memory.h b/chromium/media/base/unaligned_shared_memory.h
index 71bf064d652..f4fe2c976de 100644
--- a/chromium/media/base/unaligned_shared_memory.h
+++ b/chromium/media/base/unaligned_shared_memory.h
@@ -28,6 +28,9 @@ class MEDIA_EXPORT UnalignedSharedMemory {
size_t size,
bool read_only);
+ UnalignedSharedMemory(const UnalignedSharedMemory&) = delete;
+ UnalignedSharedMemory& operator=(const UnalignedSharedMemory&) = delete;
+
~UnalignedSharedMemory();
// Map the shared memory region. Note that the passed |size| parameter should
@@ -52,8 +55,6 @@ class MEDIA_EXPORT UnalignedSharedMemory {
// Pointer to the unaligned data in the shared memory mapping.
uint8_t* mapping_ptr_ = nullptr;
-
- DISALLOW_COPY_AND_ASSIGN(UnalignedSharedMemory);
};
// Wrapper over base::WritableSharedMemoryMapping that is mapped at unaligned
@@ -68,6 +69,9 @@ class MEDIA_EXPORT WritableUnalignedMapping {
size_t size,
off_t offset);
+ WritableUnalignedMapping(const WritableUnalignedMapping&) = delete;
+ WritableUnalignedMapping& operator=(const WritableUnalignedMapping&) = delete;
+
~WritableUnalignedMapping();
size_t size() const { return size_; }
@@ -86,8 +90,6 @@ class MEDIA_EXPORT WritableUnalignedMapping {
// mapped and requested offset; strictly less than
// base::SysInfo::VMAllocationGranularity().
size_t misalignment_;
-
- DISALLOW_COPY_AND_ASSIGN(WritableUnalignedMapping);
};
// Wrapper over base::ReadOnlySharedMemoryMapping that is mapped at unaligned
@@ -102,6 +104,9 @@ class MEDIA_EXPORT ReadOnlyUnalignedMapping {
size_t size,
off_t offset);
+ ReadOnlyUnalignedMapping(const ReadOnlyUnalignedMapping&) = delete;
+ ReadOnlyUnalignedMapping& operator=(const ReadOnlyUnalignedMapping&) = delete;
+
~ReadOnlyUnalignedMapping();
size_t size() const { return size_; }
@@ -120,8 +125,6 @@ class MEDIA_EXPORT ReadOnlyUnalignedMapping {
// mapped and requested offset; strictly less than
// base::SysInfo::VMAllocationGranularity().
size_t misalignment_;
-
- DISALLOW_COPY_AND_ASSIGN(ReadOnlyUnalignedMapping);
};
} // namespace media
diff --git a/chromium/media/base/use_after_free_checker.h b/chromium/media/base/use_after_free_checker.h
index aefcaae2b59..1ffde9b5f37 100644
--- a/chromium/media/base/use_after_free_checker.h
+++ b/chromium/media/base/use_after_free_checker.h
@@ -5,6 +5,7 @@
#ifndef MEDIA_BASE_USE_AFTER_FREE_CHECKER_H_
#define MEDIA_BASE_USE_AFTER_FREE_CHECKER_H_
+#include "base/debug/crash_logging.h"
#include "base/debug/dump_without_crashing.h"
#include "base/location.h"
#include "media/base/media_export.h"
diff --git a/chromium/media/base/user_input_monitor.h b/chromium/media/base/user_input_monitor.h
index 7b774949d71..aebbb8b67e5 100644
--- a/chromium/media/base/user_input_monitor.h
+++ b/chromium/media/base/user_input_monitor.h
@@ -33,6 +33,10 @@ WriteKeyPressMonitorCount(const base::WritableSharedMemoryMapping& shmem,
class MEDIA_EXPORT UserInputMonitor {
public:
UserInputMonitor();
+
+ UserInputMonitor(const UserInputMonitor&) = delete;
+ UserInputMonitor& operator=(const UserInputMonitor&) = delete;
+
virtual ~UserInputMonitor();
// Creates a platform-specific instance of UserInputMonitorBase.
@@ -52,15 +56,16 @@ class MEDIA_EXPORT UserInputMonitor {
// number of keypresses happened within that time period, but should not make
// any assumption on the initial value.
virtual uint32_t GetKeyPressCount() const = 0;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(UserInputMonitor);
};
// Monitors and notifies about keyboard events.
class MEDIA_EXPORT UserInputMonitorBase : public UserInputMonitor {
public:
UserInputMonitorBase();
+
+ UserInputMonitorBase(const UserInputMonitorBase&) = delete;
+ UserInputMonitorBase& operator=(const UserInputMonitorBase&) = delete;
+
~UserInputMonitorBase() override;
// A caller must call EnableKeyPressMonitoring(WithMapping) and
@@ -84,8 +89,6 @@ class MEDIA_EXPORT UserInputMonitorBase : public UserInputMonitor {
base::ReadOnlySharedMemoryRegion key_press_count_region_;
SEQUENCE_CHECKER(owning_sequence_);
-
- DISALLOW_COPY_AND_ASSIGN(UserInputMonitorBase);
};
} // namespace media
diff --git a/chromium/media/base/user_input_monitor_linux.cc b/chromium/media/base/user_input_monitor_linux.cc
index 58b7d3033fe..ba089a7ebcc 100644
--- a/chromium/media/base/user_input_monitor_linux.cc
+++ b/chromium/media/base/user_input_monitor_linux.cc
@@ -103,6 +103,10 @@ class UserInputMonitorLinux : public UserInputMonitorBase {
public:
explicit UserInputMonitorLinux(
const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner);
+
+ UserInputMonitorLinux(const UserInputMonitorLinux&) = delete;
+ UserInputMonitorLinux& operator=(const UserInputMonitorLinux&) = delete;
+
~UserInputMonitorLinux() override;
// Public UserInputMonitor overrides.
@@ -117,8 +121,6 @@ class UserInputMonitorLinux : public UserInputMonitorBase {
scoped_refptr<base::SingleThreadTaskRunner> io_task_runner_;
UserInputMonitorAdapter* core_;
-
- DISALLOW_COPY_AND_ASSIGN(UserInputMonitorLinux);
};
UserInputMonitorAdapter* CreateUserInputMonitor(
diff --git a/chromium/media/base/user_input_monitor_mac.cc b/chromium/media/base/user_input_monitor_mac.cc
index eac1a4fbd2d..92d3c01d549 100644
--- a/chromium/media/base/user_input_monitor_mac.cc
+++ b/chromium/media/base/user_input_monitor_mac.cc
@@ -18,11 +18,15 @@ namespace {
// Update key press count in shared memory twice as frequent as
// AudioInputController::AudioCallback::OnData() callback for WebRTC.
constexpr base::TimeDelta kUpdateKeyPressCountIntervalMs =
- base::TimeDelta::FromMilliseconds(5);
+ base::Milliseconds(5);
class UserInputMonitorMac : public UserInputMonitorBase {
public:
UserInputMonitorMac();
+
+ UserInputMonitorMac(const UserInputMonitorMac&) = delete;
+ UserInputMonitorMac& operator=(const UserInputMonitorMac&) = delete;
+
~UserInputMonitorMac() override;
uint32_t GetKeyPressCount() const override;
@@ -40,8 +44,6 @@ class UserInputMonitorMac : public UserInputMonitorBase {
// Timer for updating key press count in |key_press_count_mapping_|.
base::RepeatingTimer key_press_count_timer_;
-
- DISALLOW_COPY_AND_ASSIGN(UserInputMonitorMac);
};
UserInputMonitorMac::UserInputMonitorMac() {}
diff --git a/chromium/media/base/user_input_monitor_win.cc b/chromium/media/base/user_input_monitor_win.cc
index 327b28bb711..5b779fe8fce 100644
--- a/chromium/media/base/user_input_monitor_win.cc
+++ b/chromium/media/base/user_input_monitor_win.cc
@@ -52,6 +52,10 @@ class UserInputMonitorWinCore
explicit UserInputMonitorWinCore(
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner);
+
+ UserInputMonitorWinCore(const UserInputMonitorWinCore&) = delete;
+ UserInputMonitorWinCore& operator=(const UserInputMonitorWinCore&) = delete;
+
~UserInputMonitorWinCore() override;
// DestructionObserver overrides.
@@ -90,14 +94,16 @@ class UserInputMonitorWinCore
bool pause_monitoring_ = false;
bool start_monitoring_after_hook_removed_ = false;
-
- DISALLOW_COPY_AND_ASSIGN(UserInputMonitorWinCore);
};
class UserInputMonitorWin : public UserInputMonitorBase {
public:
explicit UserInputMonitorWin(
const scoped_refptr<base::SingleThreadTaskRunner>& ui_task_runner);
+
+ UserInputMonitorWin(const UserInputMonitorWin&) = delete;
+ UserInputMonitorWin& operator=(const UserInputMonitorWin&) = delete;
+
~UserInputMonitorWin() override;
// Public UserInputMonitor overrides.
@@ -112,8 +118,6 @@ class UserInputMonitorWin : public UserInputMonitorBase {
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner_;
UserInputMonitorWinCore* core_;
-
- DISALLOW_COPY_AND_ASSIGN(UserInputMonitorWin);
};
UserInputMonitorWinCore::UserInputMonitorWinCore(
diff --git a/chromium/media/base/video_codecs.cc b/chromium/media/base/video_codecs.cc
index 6c58bbabd66..8d2b7e3beca 100644
--- a/chromium/media/base/video_codecs.cc
+++ b/chromium/media/base/video_codecs.cc
@@ -17,27 +17,27 @@ namespace media {
// The names come from src/third_party/ffmpeg/libavcodec/codec_desc.c
std::string GetCodecName(VideoCodec codec) {
switch (codec) {
- case kUnknownVideoCodec:
+ case VideoCodec::kUnknown:
return "unknown";
- case kCodecH264:
+ case VideoCodec::kH264:
return "h264";
- case kCodecHEVC:
+ case VideoCodec::kHEVC:
return "hevc";
- case kCodecDolbyVision:
+ case VideoCodec::kDolbyVision:
return "dolbyvision";
- case kCodecVC1:
+ case VideoCodec::kVC1:
return "vc1";
- case kCodecMPEG2:
+ case VideoCodec::kMPEG2:
return "mpeg2video";
- case kCodecMPEG4:
+ case VideoCodec::kMPEG4:
return "mpeg4";
- case kCodecTheora:
+ case VideoCodec::kTheora:
return "theora";
- case kCodecVP8:
+ case VideoCodec::kVP8:
return "vp8";
- case kCodecVP9:
+ case VideoCodec::kVP9:
return "vp9";
- case kCodecAV1:
+ case VideoCodec::kAV1:
return "av1";
}
NOTREACHED();
@@ -867,7 +867,7 @@ bool ParseDolbyVisionCodecId(const std::string& codec_id,
#endif
VideoCodec StringToVideoCodec(const std::string& codec_id) {
- VideoCodec codec = kUnknownVideoCodec;
+ VideoCodec codec = VideoCodec::kUnknown;
VideoCodecProfile profile = VIDEO_CODEC_PROFILE_UNKNOWN;
uint8_t level = 0;
VideoColorSpace color_space;
@@ -883,61 +883,61 @@ void ParseCodec(const std::string& codec_id,
std::vector<std::string> elem = base::SplitString(
codec_id, ".", base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL);
if (elem.empty()) {
- codec = kUnknownVideoCodec;
+ codec = VideoCodec::kUnknown;
return;
}
if (codec_id == "vp8" || codec_id == "vp8.0") {
- codec = kCodecVP8;
+ codec = VideoCodec::kVP8;
return;
}
if (ParseNewStyleVp9CodecID(codec_id, &profile, &level, &color_space) ||
ParseLegacyVp9CodecID(codec_id, &profile, &level)) {
- codec = kCodecVP9;
+ codec = VideoCodec::kVP9;
return;
}
#if BUILDFLAG(ENABLE_AV1_DECODER)
if (ParseAv1CodecId(codec_id, &profile, &level, &color_space)) {
- codec = kCodecAV1;
+ codec = VideoCodec::kAV1;
return;
}
#endif
if (codec_id == "theora") {
- codec = kCodecTheora;
+ codec = VideoCodec::kTheora;
return;
}
if (ParseAVCCodecId(codec_id, &profile, &level)) {
- codec = kCodecH264;
+ codec = VideoCodec::kH264;
return;
}
#if BUILDFLAG(ENABLE_MSE_MPEG2TS_STREAM_PARSER)
if (ParseAVCCodecId(TranslateLegacyAvc1CodecIds(codec_id), &profile,
&level)) {
- codec = kCodecH264;
+ codec = VideoCodec::kH264;
return;
}
#endif
#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
if (ParseHEVCCodecId(codec_id, &profile, &level)) {
- codec = kCodecHEVC;
+ codec = VideoCodec::kHEVC;
return;
}
#endif
#if BUILDFLAG(ENABLE_PLATFORM_DOLBY_VISION)
if (ParseDolbyVisionCodecId(codec_id, &profile, &level)) {
- codec = kCodecDolbyVision;
+ codec = VideoCodec::kDolbyVision;
return;
}
#endif
- codec = kUnknownVideoCodec;
+ codec = VideoCodec::kUnknown;
}
VideoCodec VideoCodecProfileToVideoCodec(VideoCodecProfile profile) {
switch (profile) {
case VIDEO_CODEC_PROFILE_UNKNOWN:
- return kUnknownVideoCodec;
+ return VideoCodec::kUnknown;
case H264PROFILE_BASELINE:
case H264PROFILE_MAIN:
case H264PROFILE_EXTENDED:
@@ -949,31 +949,36 @@ VideoCodec VideoCodecProfileToVideoCodec(VideoCodecProfile profile) {
case H264PROFILE_SCALABLEHIGH:
case H264PROFILE_STEREOHIGH:
case H264PROFILE_MULTIVIEWHIGH:
- return kCodecH264;
+ return VideoCodec::kH264;
case HEVCPROFILE_MAIN:
case HEVCPROFILE_MAIN10:
case HEVCPROFILE_MAIN_STILL_PICTURE:
- return kCodecHEVC;
+ return VideoCodec::kHEVC;
case VP8PROFILE_ANY:
- return kCodecVP8;
+ return VideoCodec::kVP8;
case VP9PROFILE_PROFILE0:
case VP9PROFILE_PROFILE1:
case VP9PROFILE_PROFILE2:
case VP9PROFILE_PROFILE3:
- return kCodecVP9;
+ return VideoCodec::kVP9;
case DOLBYVISION_PROFILE0:
case DOLBYVISION_PROFILE4:
case DOLBYVISION_PROFILE5:
case DOLBYVISION_PROFILE7:
case DOLBYVISION_PROFILE8:
case DOLBYVISION_PROFILE9:
- return kCodecDolbyVision;
+ return VideoCodec::kDolbyVision;
case THEORAPROFILE_ANY:
- return kCodecTheora;
+ return VideoCodec::kTheora;
case AV1PROFILE_PROFILE_MAIN:
case AV1PROFILE_PROFILE_HIGH:
case AV1PROFILE_PROFILE_PRO:
- return kCodecAV1;
+ return VideoCodec::kAV1;
}
}
+
+std::ostream& operator<<(std::ostream& os, const VideoCodec& codec) {
+ return os << GetCodecName(codec);
+}
+
} // namespace media
diff --git a/chromium/media/base/video_codecs.h b/chromium/media/base/video_codecs.h
index 0e7dce197d8..12bf6e4508c 100644
--- a/chromium/media/base/video_codecs.h
+++ b/chromium/media/base/video_codecs.h
@@ -16,27 +16,27 @@ namespace media {
class VideoColorSpace;
// GENERATED_JAVA_ENUM_PACKAGE: org.chromium.media
-enum VideoCodec {
+enum class VideoCodec {
// These values are histogrammed over time; do not change their ordinal
// values. When deleting a codec replace it with a dummy value; when adding a
- // codec, do so at the bottom (and update kVideoCodecMax).
- kUnknownVideoCodec = 0,
- kCodecH264,
- kCodecVC1,
- kCodecMPEG2,
- kCodecMPEG4,
- kCodecTheora,
- kCodecVP8,
- kCodecVP9,
- kCodecHEVC,
- kCodecDolbyVision,
- kCodecAV1,
+ // codec, do so at the bottom (and update kMaxValue).
+ kUnknown = 0,
+ kH264,
+ kVC1,
+ kMPEG2,
+ kMPEG4,
+ kTheora,
+ kVP8,
+ kVP9,
+ kHEVC,
+ kDolbyVision,
+ kAV1,
// DO NOT ADD RANDOM VIDEO CODECS!
//
// The only acceptable time to add a new codec is if there is production code
// that uses said codec in the same CL.
- kVideoCodecMax = kCodecAV1, // Must equal the last "real" codec above.
+ kMaxValue = kAV1, // Must equal the last "real" codec above.
};
// Video codec profiles. Keep in sync with mojo::VideoCodecProfile (see
@@ -166,6 +166,9 @@ VideoCodecProfileToVideoCodec(VideoCodecProfile profile);
std::string TranslateLegacyAvc1CodecIds(const std::string& codec_id);
#endif
+MEDIA_EXPORT std::ostream& operator<<(std::ostream& os,
+ const VideoCodec& codec);
+
} // namespace media
#endif // MEDIA_BASE_VIDEO_CODECS_H_
diff --git a/chromium/media/base/video_color_space_unittest.cc b/chromium/media/base/video_color_space_unittest.cc
index b8cf8dd3d07..71986269858 100644
--- a/chromium/media/base/video_color_space_unittest.cc
+++ b/chromium/media/base/video_color_space_unittest.cc
@@ -6,7 +6,7 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "ui/gfx/color_space.h"
#include "ui/gfx/color_transform.h"
-#include "ui/gfx/transform.h"
+#include "ui/gfx/geometry/transform.h"
namespace media {
diff --git a/chromium/media/base/video_decoder_config.cc b/chromium/media/base/video_decoder_config.cc
index 40c9707fae4..b28a2239029 100644
--- a/chromium/media/base/video_decoder_config.cc
+++ b/chromium/media/base/video_decoder_config.cc
@@ -69,7 +69,7 @@ void VideoDecoderConfig::Initialize(VideoCodec codec,
}
bool VideoDecoderConfig::IsValidConfig() const {
- return codec_ != kUnknownVideoCodec && IsValidSize(coded_size_) &&
+ return codec_ != VideoCodec::kUnknown && IsValidSize(coded_size_) &&
IsValidSize(natural_size_) &&
gfx::Rect(coded_size_).Contains(visible_rect_);
}
diff --git a/chromium/media/base/video_decoder_config.h b/chromium/media/base/video_decoder_config.h
index 1eb9bf07c46..875de0d3125 100644
--- a/chromium/media/base/video_decoder_config.h
+++ b/chromium/media/base/video_decoder_config.h
@@ -160,7 +160,7 @@ class MEDIA_EXPORT VideoDecoderConfig {
bool is_rtc() const { return is_rtc_; }
private:
- VideoCodec codec_ = kUnknownVideoCodec;
+ VideoCodec codec_ = VideoCodec::kUnknown;
VideoCodecProfile profile_ = VIDEO_CODEC_PROFILE_UNKNOWN;
// Optional video codec level. kNoVideoCodecLevel means the field is not
diff --git a/chromium/media/base/video_decoder_config_unittest.cc b/chromium/media/base/video_decoder_config_unittest.cc
index 22954f2de69..6bfa5b94889 100644
--- a/chromium/media/base/video_decoder_config_unittest.cc
+++ b/chromium/media/base/video_decoder_config_unittest.cc
@@ -14,7 +14,7 @@ static const gfx::Rect kVisibleRect(320, 240);
static const gfx::Size kNaturalSize(320, 240);
TEST(VideoDecoderConfigTest, AlphaModeSetCorrectly) {
- VideoDecoderConfig config(kCodecVP8, VIDEO_CODEC_PROFILE_UNKNOWN,
+ VideoDecoderConfig config(VideoCodec::kVP8, VIDEO_CODEC_PROFILE_UNKNOWN,
VideoDecoderConfig::AlphaMode::kIsOpaque,
VideoColorSpace(), kNoTransformation, kCodedSize,
kVisibleRect, kNaturalSize, EmptyExtraData(),
@@ -22,7 +22,7 @@ TEST(VideoDecoderConfigTest, AlphaModeSetCorrectly) {
EXPECT_TRUE(config.IsValidConfig());
EXPECT_EQ(config.alpha_mode(), VideoDecoderConfig::AlphaMode::kIsOpaque);
- config.Initialize(kCodecVP8, VIDEO_CODEC_PROFILE_UNKNOWN,
+ config.Initialize(VideoCodec::kVP8, VIDEO_CODEC_PROFILE_UNKNOWN,
VideoDecoderConfig::AlphaMode::kHasAlpha, VideoColorSpace(),
kNoTransformation, kCodedSize, kVisibleRect, kNaturalSize,
EmptyExtraData(), EncryptionScheme::kUnencrypted);
@@ -30,10 +30,11 @@ TEST(VideoDecoderConfigTest, AlphaModeSetCorrectly) {
}
TEST(VideoDecoderConfigTest, SetProfile) {
- VideoDecoderConfig config(
- kCodecVP9, VP9PROFILE_PROFILE0, VideoDecoderConfig::AlphaMode::kIsOpaque,
- VideoColorSpace(), kNoTransformation, kCodedSize, kVisibleRect,
- kNaturalSize, EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ VideoDecoderConfig config(VideoCodec::kVP9, VP9PROFILE_PROFILE0,
+ VideoDecoderConfig::AlphaMode::kIsOpaque,
+ VideoColorSpace(), kNoTransformation, kCodedSize,
+ kVisibleRect, kNaturalSize, EmptyExtraData(),
+ EncryptionScheme::kUnencrypted);
config.set_profile(VP9PROFILE_PROFILE2);
EXPECT_EQ(config.profile(), VP9PROFILE_PROFILE2);
}
diff --git a/chromium/media/base/video_encoder.h b/chromium/media/base/video_encoder.h
index d7ca6801fb8..c6ee9c7d3c8 100644
--- a/chromium/media/base/video_encoder.h
+++ b/chromium/media/base/video_encoder.h
@@ -10,6 +10,7 @@
#include "media/base/bitrate.h"
#include "media/base/media_export.h"
#include "media/base/status.h"
+#include "media/base/svc_scalability_mode.h"
#include "media/base/video_codecs.h"
#include "third_party/abseil-cpp/absl/types/optional.h"
#include "ui/gfx/geometry/size.h"
@@ -54,11 +55,10 @@ class MEDIA_EXPORT VideoEncoder {
absl::optional<int> keyframe_interval = 10000;
- // Requested number of SVC temporal layers.
- int temporal_layers = 1;
-
LatencyMode latency_mode = LatencyMode::Realtime;
+ absl::optional<SVCScalabilityMode> scalability_mode;
+
// Only used for H264 encoding.
AvcOptions avc;
};
diff --git a/chromium/media/base/video_frame.cc b/chromium/media/base/video_frame.cc
index 9731ad2ca4a..8e59b5fc863 100644
--- a/chromium/media/base/video_frame.cc
+++ b/chromium/media/base/video_frame.cc
@@ -12,7 +12,6 @@
#include "base/atomic_sequence_num.h"
#include "base/bind.h"
#include "base/bits.h"
-#include "base/callback_helpers.h"
#include "base/cxx17_backports.h"
#include "base/logging.h"
#include "base/process/memory.h"
@@ -388,6 +387,7 @@ scoped_refptr<VideoFrame> VideoFrame::WrapNativeTextures(
// Wrapping native textures should... have textures. https://crbug.com/864145.
DCHECK(frame->HasTextures());
+ DCHECK_GT(frame->NumTextures(), 0u);
return frame;
}
@@ -898,6 +898,18 @@ scoped_refptr<VideoFrame> VideoFrame::WrapVideoFrame(
wrapping_frame->BackWithSharedMemory(frame->shm_region_);
}
+ // Don't let a Matryoshka doll of frames occur. Do this here instead of above
+ // since |frame| may have different metadata than |frame->wrapped_frame_|.
+ //
+ // We must still keep |frame| alive though since it may have destruction
+ // observers which signal that the underlying resource is okay to reuse. E.g.,
+ // VideoFramePool.
+ if (frame->wrapped_frame_) {
+ wrapping_frame->AddDestructionObserver(
+ base::BindOnce([](scoped_refptr<VideoFrame>) {}, frame));
+ frame = frame->wrapped_frame_;
+ }
+
wrapping_frame->wrapped_frame_ = std::move(frame);
return wrapping_frame;
}
@@ -1152,24 +1164,22 @@ bool VideoFrame::IsMappable() const {
}
bool VideoFrame::HasTextures() const {
- // A SharedImage can be turned into a texture, and so it counts as a texture
- // in the context of this call.
return wrapped_frame_ ? wrapped_frame_->HasTextures()
- : (mailbox_holders_[0].mailbox.IsSharedImage() ||
- !mailbox_holders_[0].mailbox.IsZero());
+ : !mailbox_holders_[0].mailbox.IsZero();
}
size_t VideoFrame::NumTextures() const {
+ if (wrapped_frame_)
+ return wrapped_frame_->NumTextures();
+
if (!HasTextures())
return 0;
- const auto& mailbox_holders =
- wrapped_frame_ ? wrapped_frame_->mailbox_holders_ : mailbox_holders_;
size_t i = 0;
for (; i < NumPlanes(format()); ++i) {
- if (mailbox_holders[i].mailbox.IsZero()) {
+ const auto& mailbox = mailbox_holders_[i].mailbox;
+ if (mailbox.IsZero())
return i;
- }
}
return i;
}
@@ -1239,7 +1249,7 @@ const gpu::MailboxHolder& VideoFrame::mailbox_holder(
size_t texture_index) const {
DCHECK(HasTextures());
DCHECK(IsValidPlane(format(), texture_index));
- return wrapped_frame_ ? wrapped_frame_->mailbox_holders_[texture_index]
+ return wrapped_frame_ ? wrapped_frame_->mailbox_holder(texture_index)
: mailbox_holders_[texture_index];
}
@@ -1411,6 +1421,23 @@ gfx::Size VideoFrame::DetermineAlignedSize(VideoPixelFormat format,
return adjusted;
}
+bool VideoFrame::IsValidSize(const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size) {
+ int coded_size_area = coded_size.GetCheckedArea().ValueOrDefault(INT_MAX);
+ int natural_size_area = natural_size.GetCheckedArea().ValueOrDefault(INT_MAX);
+ static_assert(limits::kMaxCanvas < INT_MAX, "");
+ return !(coded_size_area > limits::kMaxCanvas ||
+ coded_size.width() > limits::kMaxDimension ||
+ coded_size.height() > limits::kMaxDimension ||
+ visible_rect.x() < 0 || visible_rect.y() < 0 ||
+ visible_rect.right() > coded_size.width() ||
+ visible_rect.bottom() > coded_size.height() ||
+ natural_size_area > limits::kMaxCanvas ||
+ natural_size.width() > limits::kMaxDimension ||
+ natural_size.height() > limits::kMaxDimension);
+}
+
// static
bool VideoFrame::IsValidConfigInternal(VideoPixelFormat format,
FrameControlType frame_control_type,
@@ -1418,17 +1445,7 @@ bool VideoFrame::IsValidConfigInternal(VideoPixelFormat format,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size) {
// Check maximum limits for all formats.
- int coded_size_area = coded_size.GetCheckedArea().ValueOrDefault(INT_MAX);
- int natural_size_area = natural_size.GetCheckedArea().ValueOrDefault(INT_MAX);
- static_assert(limits::kMaxCanvas < INT_MAX, "");
- if (coded_size_area > limits::kMaxCanvas ||
- coded_size.width() > limits::kMaxDimension ||
- coded_size.height() > limits::kMaxDimension || visible_rect.x() < 0 ||
- visible_rect.y() < 0 || visible_rect.right() > coded_size.width() ||
- visible_rect.bottom() > coded_size.height() ||
- natural_size_area > limits::kMaxCanvas ||
- natural_size.width() > limits::kMaxDimension ||
- natural_size.height() > limits::kMaxDimension) {
+ if (!IsValidSize(coded_size, visible_rect, natural_size)) {
return false;
}
diff --git a/chromium/media/base/video_frame.h b/chromium/media/base/video_frame.h
index 81c9ac7dc59..21432585b8b 100644
--- a/chromium/media/base/video_frame.h
+++ b/chromium/media/base/video_frame.h
@@ -121,6 +121,13 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
VideoFrame(const VideoFrame&) = delete;
VideoFrame& operator=(const VideoFrame&) = delete;
+ // Returns true if size is valid for a VideoFrame. This method returns false
+ // if the size is empty, even though it is possible to create a zero-sized
+ // VideoFrame if the VideoPixelFormat is PIXEL_FORMAT_UNKNOWN.
+ static bool IsValidSize(const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size);
+
// Returns true if frame configuration is valid.
static bool IsValidConfig(VideoPixelFormat format,
StorageType storage_type,
diff --git a/chromium/media/base/video_frame_metadata.cc b/chromium/media/base/video_frame_metadata.cc
index 944151640dd..96c2c877d53 100644
--- a/chromium/media/base/video_frame_metadata.cc
+++ b/chromium/media/base/video_frame_metadata.cc
@@ -57,6 +57,9 @@ void VideoFrameMetadata::MergeMetadataFrom(
MERGE_FIELD(wallclock_frame_duration, metadata_source);
MERGE_FIELD(maximum_composition_delay_in_frames, metadata_source);
MERGE_FIELD(hw_protected_validation_id, metadata_source);
+#if BUILDFLAG(USE_VAAPI)
+ MERGE_FIELD(hw_va_protected_session_id, metadata_source);
+#endif
}
} // namespace media
diff --git a/chromium/media/base/video_frame_metadata.h b/chromium/media/base/video_frame_metadata.h
index 379b26feed6..b26fb4933a3 100644
--- a/chromium/media/base/video_frame_metadata.h
+++ b/chromium/media/base/video_frame_metadata.h
@@ -10,6 +10,7 @@
#include "build/build_config.h"
#include "media/base/media_export.h"
#include "media/base/video_transformation.h"
+#include "media/gpu/buildflags.h"
#include "third_party/abseil-cpp/absl/types/optional.h"
#include "ui/gfx/geometry/rect.h"
@@ -136,6 +137,18 @@ struct MEDIA_EXPORT VideoFrameMetadata {
// properly displayed or not. Non-zero when valid.
uint32_t hw_protected_validation_id = 0;
+#if BUILDFLAG(USE_VAAPI)
+ // The ID of the VA-API protected session used to decode this frame, if
+ // applicable. The proper type is VAProtectedSessionID. However, in order to
+ // avoid including the VA-API headers in this file, we use the underlying
+ // type. Users of this field are expected to have compile-time assertions to
+ // ensure it's safe to use this as a VAProtectedSessionID.
+ //
+ // Notes on IPC: this field should not be copied to the Mojo version of
+ // VideoFrameMetadata because it should not cross process boundaries.
+ absl::optional<unsigned int> hw_va_protected_session_id;
+#endif
+
// An UnguessableToken that identifies VideoOverlayFactory that created
// this VideoFrame. It's used by Cast to help with video hole punch.
absl::optional<base::UnguessableToken> overlay_plane_id;
diff --git a/chromium/media/base/video_frame_pool.cc b/chromium/media/base/video_frame_pool.cc
index b5db2c3fbab..52d82da5df9 100644
--- a/chromium/media/base/video_frame_pool.cc
+++ b/chromium/media/base/video_frame_pool.cc
@@ -132,7 +132,7 @@ void VideoFramePool::PoolImpl::FrameReleased(scoped_refptr<VideoFrame> frame) {
// After this loop, |stale_index| is the index of the oldest non-stale frame.
// Such an index must exist because |frame| is never stale.
int stale_index = -1;
- constexpr base::TimeDelta kStaleFrameLimit = base::TimeDelta::FromSeconds(10);
+ constexpr base::TimeDelta kStaleFrameLimit = base::Seconds(10);
while (now - frames_[++stale_index].last_use_time > kStaleFrameLimit) {
// Last frame should never be included since we just added it.
DCHECK_LE(static_cast<size_t>(stale_index), frames_.size());
diff --git a/chromium/media/base/video_frame_pool_unittest.cc b/chromium/media/base/video_frame_pool_unittest.cc
index 019111de709..a52ba7eb833 100644
--- a/chromium/media/base/video_frame_pool_unittest.cc
+++ b/chromium/media/base/video_frame_pool_unittest.cc
@@ -20,7 +20,7 @@ class VideoFramePoolTest
VideoFramePoolTest() : pool_(new VideoFramePool()) {
// Seed test clock with some dummy non-zero value to avoid confusion with
// empty base::TimeTicks values.
- test_clock_.Advance(base::TimeDelta::FromSeconds(1234));
+ test_clock_.Advance(base::Seconds(1234));
pool_->SetTickClockForTesting(&test_clock_);
}
@@ -31,12 +31,10 @@ class VideoFramePoolTest
gfx::Size natural_size(coded_size);
scoped_refptr<VideoFrame> frame =
- pool_->CreateFrame(
- format, coded_size, visible_rect, natural_size,
- base::TimeDelta::FromMilliseconds(timestamp_ms));
+ pool_->CreateFrame(format, coded_size, visible_rect, natural_size,
+ base::Milliseconds(timestamp_ms));
EXPECT_EQ(format, frame->format());
- EXPECT_EQ(base::TimeDelta::FromMilliseconds(timestamp_ms),
- frame->timestamp());
+ EXPECT_EQ(base::Milliseconds(timestamp_ms), frame->timestamp());
if (format == PIXEL_FORMAT_ARGB) {
EXPECT_EQ(coded_size, frame->coded_size());
} else {
@@ -140,7 +138,7 @@ TEST_F(VideoFramePoolTest, StaleFramesAreExpired) {
// Advance clock far enough to hit stale timer; ensure only frame_1 has its
// resources released.
- test_clock_.Advance(base::TimeDelta::FromMinutes(1));
+ test_clock_.Advance(base::Minutes(1));
frame_2 = nullptr;
CheckPoolSize(1u);
}
diff --git a/chromium/media/base/video_frame_unittest.cc b/chromium/media/base/video_frame_unittest.cc
index e637fb202ac..7d9fac9b416 100644
--- a/chromium/media/base/video_frame_unittest.cc
+++ b/chromium/media/base/video_frame_unittest.cc
@@ -90,17 +90,17 @@ media::VideoFrameMetadata GetFullVideoFrameMetadata() {
// base::TimeTicks
base::TimeTicks now = base::TimeTicks::Now();
- metadata.receive_time = now + base::TimeDelta::FromMilliseconds(10);
- metadata.capture_begin_time = now + base::TimeDelta::FromMilliseconds(20);
- metadata.capture_end_time = now + base::TimeDelta::FromMilliseconds(30);
- metadata.decode_begin_time = now + base::TimeDelta::FromMilliseconds(40);
- metadata.decode_end_time = now + base::TimeDelta::FromMilliseconds(50);
- metadata.reference_time = now + base::TimeDelta::FromMilliseconds(60);
+ metadata.receive_time = now + base::Milliseconds(10);
+ metadata.capture_begin_time = now + base::Milliseconds(20);
+ metadata.capture_end_time = now + base::Milliseconds(30);
+ metadata.decode_begin_time = now + base::Milliseconds(40);
+ metadata.decode_end_time = now + base::Milliseconds(50);
+ metadata.reference_time = now + base::Milliseconds(60);
// base::TimeDeltas
- metadata.processing_time = base::TimeDelta::FromMilliseconds(500);
- metadata.frame_duration = base::TimeDelta::FromMilliseconds(16);
- metadata.wallclock_frame_duration = base::TimeDelta::FromMilliseconds(17);
+ metadata.processing_time = base::Milliseconds(500);
+ metadata.frame_duration = base::Milliseconds(16);
+ metadata.wallclock_frame_duration = base::Milliseconds(17);
return metadata;
}
@@ -170,8 +170,7 @@ void InitializeYV12Frame(VideoFrame* frame, double white_to_black) {
// Given a |yv12_frame| this method converts the YV12 frame to RGBA and
// makes sure that all the pixels of the RBG frame equal |expect_rgb_color|.
-void ExpectFrameColor(media::VideoFrame* yv12_frame,
- uint32_t expect_rgb_color) {
+void ExpectFrameColor(VideoFrame* yv12_frame, uint32_t expect_rgb_color) {
ASSERT_EQ(PIXEL_FORMAT_YV12, yv12_frame->format());
ASSERT_EQ(yv12_frame->stride(VideoFrame::kUPlane),
yv12_frame->stride(VideoFrame::kVPlane));
@@ -216,7 +215,7 @@ void ExpectFrameExtents(VideoPixelFormat format, const char* expected_hash) {
const unsigned char kFillByte = 0x80;
const int kWidth = 61;
const int kHeight = 31;
- const base::TimeDelta kTimestamp = base::TimeDelta::FromMicroseconds(1337);
+ const base::TimeDelta kTimestamp = base::Microseconds(1337);
gfx::Size size(kWidth, kHeight);
scoped_refptr<VideoFrame> frame = VideoFrame::CreateFrame(
@@ -247,16 +246,16 @@ void ExpectFrameExtents(VideoPixelFormat format, const char* expected_hash) {
TEST(VideoFrame, CreateFrame) {
const int kWidth = 64;
const int kHeight = 48;
- const base::TimeDelta kTimestamp = base::TimeDelta::FromMicroseconds(1337);
+ const base::TimeDelta kTimestamp = base::Microseconds(1337);
// Create a YV12 Video Frame.
gfx::Size size(kWidth, kHeight);
- scoped_refptr<media::VideoFrame> frame = VideoFrame::CreateFrame(
- media::PIXEL_FORMAT_YV12, size, gfx::Rect(size), size, kTimestamp);
+ scoped_refptr<VideoFrame> frame = VideoFrame::CreateFrame(
+ PIXEL_FORMAT_YV12, size, gfx::Rect(size), size, kTimestamp);
ASSERT_TRUE(frame.get());
// Test VideoFrame implementation.
- EXPECT_EQ(media::PIXEL_FORMAT_YV12, frame->format());
+ EXPECT_EQ(PIXEL_FORMAT_YV12, frame->format());
{
SCOPED_TRACE("");
InitializeYV12Frame(frame.get(), 0.0f);
@@ -279,15 +278,15 @@ TEST(VideoFrame, CreateFrame) {
EXPECT_EQ(MD5DigestToBase16(digest), "911991d51438ad2e1a40ed5f6fc7c796");
// Test single planar frame.
- frame = VideoFrame::CreateFrame(media::PIXEL_FORMAT_ARGB, size,
- gfx::Rect(size), size, kTimestamp);
- EXPECT_EQ(media::PIXEL_FORMAT_ARGB, frame->format());
+ frame = VideoFrame::CreateFrame(PIXEL_FORMAT_ARGB, size, gfx::Rect(size),
+ size, kTimestamp);
+ EXPECT_EQ(PIXEL_FORMAT_ARGB, frame->format());
EXPECT_GE(frame->stride(VideoFrame::kARGBPlane), frame->coded_size().width());
// Test double planar frame.
- frame = VideoFrame::CreateFrame(media::PIXEL_FORMAT_NV12, size,
- gfx::Rect(size), size, kTimestamp);
- EXPECT_EQ(media::PIXEL_FORMAT_NV12, frame->format());
+ frame = VideoFrame::CreateFrame(PIXEL_FORMAT_NV12, size, gfx::Rect(size),
+ size, kTimestamp);
+ EXPECT_EQ(PIXEL_FORMAT_NV12, frame->format());
// Test an empty frame.
frame = VideoFrame::CreateEOSFrame();
@@ -302,13 +301,12 @@ TEST(VideoFrame, CreateFrame) {
TEST(VideoFrame, CreateZeroInitializedFrame) {
const int kWidth = 2;
const int kHeight = 2;
- const base::TimeDelta kTimestamp = base::TimeDelta::FromMicroseconds(1337);
+ const base::TimeDelta kTimestamp = base::Microseconds(1337);
// Create a YV12 Video Frame.
gfx::Size size(kWidth, kHeight);
- scoped_refptr<media::VideoFrame> frame =
- VideoFrame::CreateZeroInitializedFrame(media::PIXEL_FORMAT_YV12, size,
- gfx::Rect(size), size, kTimestamp);
+ scoped_refptr<VideoFrame> frame = VideoFrame::CreateZeroInitializedFrame(
+ PIXEL_FORMAT_YV12, size, gfx::Rect(size), size, kTimestamp);
ASSERT_TRUE(frame.get());
EXPECT_TRUE(frame->IsMappable());
@@ -325,7 +323,7 @@ TEST(VideoFrame, CreateBlackFrame) {
const uint8_t kExpectedYRow[] = {0, 0};
const uint8_t kExpectedUVRow[] = {128};
- scoped_refptr<media::VideoFrame> frame =
+ scoped_refptr<VideoFrame> frame =
VideoFrame::CreateBlackFrame(gfx::Size(kWidth, kHeight));
ASSERT_TRUE(frame.get());
EXPECT_TRUE(frame->IsMappable());
@@ -363,43 +361,66 @@ static void FrameNoLongerNeededCallback(bool* triggered) {
TEST(VideoFrame, WrapVideoFrame) {
const int kWidth = 4;
const int kHeight = 4;
- const base::TimeDelta kFrameDuration = base::TimeDelta::FromMicroseconds(42);
+ const base::TimeDelta kFrameDuration = base::Microseconds(42);
- scoped_refptr<media::VideoFrame> frame;
- bool done_callback_was_run = false;
+ scoped_refptr<VideoFrame> frame, frame2;
+ bool base_frame_done_callback_was_run = false;
+ bool wrapped_frame_done_callback_was_run = false;
{
- scoped_refptr<media::VideoFrame> wrapped_frame =
- VideoFrame::CreateBlackFrame(gfx::Size(kWidth, kHeight));
- ASSERT_TRUE(wrapped_frame.get());
+ auto base_frame = VideoFrame::CreateBlackFrame(gfx::Size(kWidth, kHeight));
+ ASSERT_TRUE(base_frame);
- gfx::Rect visible_rect(1, 1, 1, 1);
+ gfx::Rect visible_rect(0, 0, 2, 2);
gfx::Size natural_size = visible_rect.size();
- wrapped_frame->metadata().frame_duration = kFrameDuration;
- frame = media::VideoFrame::WrapVideoFrame(
- wrapped_frame, wrapped_frame->format(), visible_rect, natural_size);
- wrapped_frame->AddDestructionObserver(
- base::BindOnce(&FrameNoLongerNeededCallback, &done_callback_was_run));
- EXPECT_EQ(wrapped_frame->coded_size(), frame->coded_size());
- EXPECT_EQ(wrapped_frame->data(media::VideoFrame::kYPlane),
- frame->data(media::VideoFrame::kYPlane));
- EXPECT_NE(wrapped_frame->visible_rect(), frame->visible_rect());
+ base_frame->metadata().frame_duration = kFrameDuration;
+ frame = VideoFrame::WrapVideoFrame(base_frame, base_frame->format(),
+ visible_rect, natural_size);
+ base_frame->AddDestructionObserver(base::BindOnce(
+ &FrameNoLongerNeededCallback, &base_frame_done_callback_was_run));
+ EXPECT_EQ(base_frame->coded_size(), frame->coded_size());
+ EXPECT_EQ(base_frame->data(VideoFrame::kYPlane),
+ frame->data(VideoFrame::kYPlane));
+ EXPECT_NE(base_frame->visible_rect(), frame->visible_rect());
EXPECT_EQ(visible_rect, frame->visible_rect());
- EXPECT_NE(wrapped_frame->natural_size(), frame->natural_size());
+ EXPECT_NE(base_frame->natural_size(), frame->natural_size());
EXPECT_EQ(natural_size, frame->natural_size());
// Verify metadata was copied to the wrapped frame.
EXPECT_EQ(*frame->metadata().frame_duration, kFrameDuration);
// Verify the metadata copy was a deep copy.
- wrapped_frame->clear_metadata();
- EXPECT_NE(wrapped_frame->metadata().frame_duration.has_value(),
+ base_frame->clear_metadata();
+ EXPECT_NE(base_frame->metadata().frame_duration.has_value(),
frame->metadata().frame_duration.has_value());
+
+ frame->AddDestructionObserver(base::BindOnce(
+ &FrameNoLongerNeededCallback, &wrapped_frame_done_callback_was_run));
+
+ visible_rect = gfx::Rect(0, 0, 1, 1);
+ natural_size = visible_rect.size();
+ frame2 = VideoFrame::WrapVideoFrame(frame, frame->format(), visible_rect,
+ natural_size);
+ EXPECT_EQ(base_frame->coded_size(), frame2->coded_size());
+ EXPECT_EQ(base_frame->data(VideoFrame::kYPlane),
+ frame2->data(VideoFrame::kYPlane));
+ EXPECT_NE(base_frame->visible_rect(), frame2->visible_rect());
+ EXPECT_EQ(visible_rect, frame2->visible_rect());
+ EXPECT_NE(base_frame->natural_size(), frame2->natural_size());
+ EXPECT_EQ(natural_size, frame2->natural_size());
}
- // Verify that |wrapped_frame| outlives |frame|.
- EXPECT_FALSE(done_callback_was_run);
+ // At this point |base_frame| is held by |frame|, |frame2|.
+ EXPECT_FALSE(base_frame_done_callback_was_run);
+ EXPECT_FALSE(wrapped_frame_done_callback_was_run);
+
+ // At this point |base_frame| is held by |frame2|, which also holds |frame|.
frame.reset();
- EXPECT_TRUE(done_callback_was_run);
+ EXPECT_FALSE(base_frame_done_callback_was_run);
+ EXPECT_FALSE(wrapped_frame_done_callback_was_run);
+
+ // Now all |base_frame| references should be released.
+ frame2.reset();
+ EXPECT_TRUE(base_frame_done_callback_was_run);
}
// Create a frame that wraps unowned memory.
@@ -408,15 +429,15 @@ TEST(VideoFrame, WrapExternalData) {
gfx::Size coded_size(256, 256);
gfx::Rect visible_rect(coded_size);
CreateTestY16Frame(coded_size, visible_rect, memory);
- auto timestamp = base::TimeDelta::FromMilliseconds(1);
- auto frame = VideoFrame::WrapExternalData(media::PIXEL_FORMAT_Y16, coded_size,
+ auto timestamp = base::Milliseconds(1);
+ auto frame = VideoFrame::WrapExternalData(PIXEL_FORMAT_Y16, coded_size,
visible_rect, visible_rect.size(),
memory, sizeof(memory), timestamp);
EXPECT_EQ(frame->coded_size(), coded_size);
EXPECT_EQ(frame->visible_rect(), visible_rect);
EXPECT_EQ(frame->timestamp(), timestamp);
- EXPECT_EQ(frame->data(media::VideoFrame::kYPlane)[0], 0xff);
+ EXPECT_EQ(frame->data(VideoFrame::kYPlane)[0], 0xff);
}
// Create a frame that wraps read-only shared memory.
@@ -430,22 +451,22 @@ TEST(VideoFrame, WrapSharedMemory) {
gfx::Size coded_size(256, 256);
gfx::Rect visible_rect(coded_size);
CreateTestY16Frame(coded_size, visible_rect, mapping.memory());
- auto timestamp = base::TimeDelta::FromMilliseconds(1);
+ auto timestamp = base::Milliseconds(1);
auto frame = VideoFrame::WrapExternalData(
- media::PIXEL_FORMAT_Y16, coded_size, visible_rect, visible_rect.size(),
+ PIXEL_FORMAT_Y16, coded_size, visible_rect, visible_rect.size(),
mapping.GetMemoryAsSpan<uint8_t>().data(), kDataSize, timestamp);
frame->BackWithSharedMemory(&region);
EXPECT_EQ(frame->coded_size(), coded_size);
EXPECT_EQ(frame->visible_rect(), visible_rect);
EXPECT_EQ(frame->timestamp(), timestamp);
- EXPECT_EQ(frame->data(media::VideoFrame::kYPlane)[0], 0xff);
+ EXPECT_EQ(frame->data(VideoFrame::kYPlane)[0], 0xff);
}
TEST(VideoFrame, WrapExternalGpuMemoryBuffer) {
gfx::Size coded_size = gfx::Size(256, 256);
gfx::Rect visible_rect(coded_size);
- auto timestamp = base::TimeDelta::FromMilliseconds(1);
+ auto timestamp = base::Milliseconds(1);
#if defined(OS_LINUX) || defined(OS_CHROMEOS)
const uint64_t modifier = 0x001234567890abcdULL;
#else
@@ -455,14 +476,12 @@ TEST(VideoFrame, WrapExternalGpuMemoryBuffer) {
std::make_unique<FakeGpuMemoryBuffer>(
coded_size, gfx::BufferFormat::YUV_420_BIPLANAR, modifier);
gfx::GpuMemoryBuffer* gmb_raw_ptr = gmb.get();
- gpu::MailboxHolder mailbox_holders[media::VideoFrame::kMaxPlanes] = {
+ gpu::MailboxHolder mailbox_holders[VideoFrame::kMaxPlanes] = {
gpu::MailboxHolder(gpu::Mailbox::Generate(), gpu::SyncToken(), 5),
gpu::MailboxHolder(gpu::Mailbox::Generate(), gpu::SyncToken(), 10)};
auto frame = VideoFrame::WrapExternalGpuMemoryBuffer(
visible_rect, coded_size, std::move(gmb), mailbox_holders,
- base::DoNothing::Once<const gpu::SyncToken&,
- std::unique_ptr<gfx::GpuMemoryBuffer>>(),
- timestamp);
+ base::DoNothing(), timestamp);
EXPECT_EQ(frame->layout().format(), PIXEL_FORMAT_NV12);
EXPECT_EQ(frame->layout().coded_size(), coded_size);
@@ -498,7 +517,7 @@ TEST(VideoFrame, WrapExternalDmabufs) {
planes[i].offset = offsets[i];
planes[i].size = sizes[i];
}
- auto timestamp = base::TimeDelta::FromMilliseconds(1);
+ auto timestamp = base::Milliseconds(1);
auto layout =
VideoFrameLayout::CreateWithPlanes(PIXEL_FORMAT_I420, coded_size, planes);
ASSERT_TRUE(layout);
@@ -558,7 +577,7 @@ TEST(VideoFrame, TextureNoLongerNeededCallbackIsCalled) {
gpu::CommandBufferId::FromUnsafeValue(1), 1);
{
- gpu::MailboxHolder holders[media::VideoFrame::kMaxPlanes] = {
+ gpu::MailboxHolder holders[VideoFrame::kMaxPlanes] = {
gpu::MailboxHolder(gpu::Mailbox::Generate(), gpu::SyncToken(), 5)};
scoped_refptr<VideoFrame> frame = VideoFrame::WrapNativeTextures(
PIXEL_FORMAT_ARGB, holders,
@@ -599,7 +618,7 @@ TEST(VideoFrame,
gpu::SyncToken called_sync_token;
{
- gpu::MailboxHolder holders[media::VideoFrame::kMaxPlanes] = {
+ gpu::MailboxHolder holders[VideoFrame::kMaxPlanes] = {
gpu::MailboxHolder(mailbox[VideoFrame::kYPlane], sync_token, target),
gpu::MailboxHolder(mailbox[VideoFrame::kUPlane], sync_token, target),
gpu::MailboxHolder(mailbox[VideoFrame::kVPlane], sync_token, target),
@@ -762,9 +781,8 @@ TEST(VideoFrameMetadata, PartialMergeMetadata) {
VideoFrameMetadata full_metadata = GetFullVideoFrameMetadata();
const gfx::Rect kTempRect{100, 200, 300, 400};
- const base::TimeTicks kTempTicks =
- base::TimeTicks::Now() + base::TimeDelta::FromSeconds(2);
- const base::TimeDelta kTempDelta = base::TimeDelta::FromMilliseconds(31415);
+ const base::TimeTicks kTempTicks = base::TimeTicks::Now() + base::Seconds(2);
+ const base::TimeDelta kTempDelta = base::Milliseconds(31415);
VideoFrameMetadata partial_metadata;
partial_metadata.capture_update_rect = kTempRect;
diff --git a/chromium/media/base/video_thumbnail_decoder_unittest.cc b/chromium/media/base/video_thumbnail_decoder_unittest.cc
index f070c07cce9..5e510bba82b 100644
--- a/chromium/media/base/video_thumbnail_decoder_unittest.cc
+++ b/chromium/media/base/video_thumbnail_decoder_unittest.cc
@@ -39,9 +39,10 @@ class VideoThumbnailDecoderTest : public testing::Test {
auto mock_video_decoder = std::make_unique<MockVideoDecoder>();
mock_video_decoder_ = mock_video_decoder.get();
VideoDecoderConfig valid_config(
- kCodecVP8, VP8PROFILE_ANY, VideoDecoderConfig::AlphaMode::kIsOpaque,
- VideoColorSpace(), kNoTransformation, gfx::Size(1, 1), gfx::Rect(1, 1),
- gfx::Size(1, 1), EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ VideoCodec::kVP8, VP8PROFILE_ANY,
+ VideoDecoderConfig::AlphaMode::kIsOpaque, VideoColorSpace(),
+ kNoTransformation, gfx::Size(1, 1), gfx::Rect(1, 1), gfx::Size(1, 1),
+ EmptyExtraData(), EncryptionScheme::kUnencrypted);
thumbnail_decoder_ = std::make_unique<VideoThumbnailDecoder>(
std::move(mock_video_decoder), valid_config, std::vector<uint8_t>{0u});
diff --git a/chromium/media/base/video_util.cc b/chromium/media/base/video_util.cc
index 8c8d5f4a431..ebd61ffd5a3 100644
--- a/chromium/media/base/video_util.cc
+++ b/chromium/media/base/video_util.cc
@@ -7,6 +7,7 @@
#include <cmath>
#include "base/bind.h"
+#include "base/bits.h"
#include "base/callback_helpers.h"
#include "base/check_op.h"
#include "base/logging.h"
@@ -61,202 +62,159 @@ void FillRegionOutsideVisibleRect(uint8_t* data,
}
}
-std::pair<SkColorType, GrGLenum> GetSkiaAndGlColorTypesForPlane(
- VideoPixelFormat format,
- size_t plane) {
- // TODO(eugene): There is some strange channel switch during RGB readback.
- // When frame's pixel format matches GL and Skia color types we get reversed
- // channels. But why?
+VideoPixelFormat ReadbackFormat(const media::VideoFrame& frame) {
+ switch (frame.format()) {
+ case PIXEL_FORMAT_I420:
+ case PIXEL_FORMAT_I420A:
+ case PIXEL_FORMAT_I422:
+ case PIXEL_FORMAT_I444:
+ case PIXEL_FORMAT_ARGB:
+ case PIXEL_FORMAT_XRGB:
+ case PIXEL_FORMAT_ABGR:
+ case PIXEL_FORMAT_XBGR:
+ return frame.format();
+ case PIXEL_FORMAT_NV12:
+ // |frame| may be backed by a graphics buffer that is NV12, but sampled as
+ // a single RGB texture.
+ return frame.NumTextures() == 1 ? PIXEL_FORMAT_XRGB : PIXEL_FORMAT_NV12;
+ default:
+ // Currently unsupported.
+ return PIXEL_FORMAT_UNKNOWN;
+ }
+}
+
+// TODO(eugene): There is some strange channel switch during RGB readback.
+// When frame's pixel format matches GL and Skia color types we get reversed
+// channels. But why?
+SkColorType SkColorTypeForPlane(VideoPixelFormat format, size_t plane) {
switch (format) {
+ case PIXEL_FORMAT_I420:
+ case PIXEL_FORMAT_I420A:
+ case PIXEL_FORMAT_I422:
+ case PIXEL_FORMAT_I444:
+ // kGray_8_SkColorType would make more sense but doesn't work on Windows.
+ return kAlpha_8_SkColorType;
case PIXEL_FORMAT_NV12:
- if (plane == VideoFrame::kUVPlane)
- return {kR8G8_unorm_SkColorType, GL_RG8_EXT};
- if (plane == VideoFrame::kYPlane)
- return {kAlpha_8_SkColorType, GL_R8_EXT};
- break;
+ return plane == media::VideoFrame::kYPlane ? kAlpha_8_SkColorType
+ : kR8G8_unorm_SkColorType;
case PIXEL_FORMAT_XBGR:
- if (plane == VideoFrame::kARGBPlane)
- return {kRGBA_8888_SkColorType, GL_RGBA8_OES};
- break;
case PIXEL_FORMAT_ABGR:
- if (plane == VideoFrame::kARGBPlane)
- return {kRGBA_8888_SkColorType, GL_RGBA8_OES};
- break;
+ return kRGBA_8888_SkColorType;
case PIXEL_FORMAT_XRGB:
- if (plane == VideoFrame::kARGBPlane)
- return {kBGRA_8888_SkColorType, GL_BGRA8_EXT};
- break;
case PIXEL_FORMAT_ARGB:
- if (plane == VideoFrame::kARGBPlane)
- return {kBGRA_8888_SkColorType, GL_BGRA8_EXT};
- break;
+ return kBGRA_8888_SkColorType;
default:
- break;
+ NOTREACHED();
+ return kUnknown_SkColorType;
}
- NOTREACHED();
- return {kUnknown_SkColorType, 0};
}
-scoped_refptr<VideoFrame> ReadbackTextureBackedFrameToMemorySyncGLES(
- const VideoFrame& txt_frame,
- gpu::raster::RasterInterface* ri,
- GrDirectContext* gr_context,
- VideoFramePool* pool) {
- DCHECK(gr_context);
-
- if (txt_frame.NumTextures() > 2 || txt_frame.NumTextures() < 1) {
- DLOG(ERROR) << "Readback is not possible for this frame: "
- << txt_frame.AsHumanReadableString();
- return nullptr;
- }
-
- VideoPixelFormat result_format = txt_frame.format();
- if (txt_frame.NumTextures() == 1 && result_format == PIXEL_FORMAT_NV12) {
- // Even though |txt_frame| format is NV12 and it is NV12 in GPU memory,
- // the texture is a RGB view that is produced by a shader on the fly.
- // So we currently we currently can only read it back as RGB.
- result_format = PIXEL_FORMAT_ARGB;
+GrGLenum GLFormatForPlane(VideoPixelFormat format, size_t plane) {
+ switch (SkColorTypeForPlane(format, plane)) {
+ case kAlpha_8_SkColorType:
+ return GL_R8_EXT;
+ case kR8G8_unorm_SkColorType:
+ return GL_RG8_EXT;
+ case kRGBA_8888_SkColorType:
+ return GL_RGBA8_OES;
+ case kBGRA_8888_SkColorType:
+ return GL_BGRA8_EXT;
+ default:
+ NOTREACHED();
+ return 0;
}
+}
- scoped_refptr<VideoFrame> result =
- pool
- ? pool->CreateFrame(result_format, txt_frame.coded_size(),
- txt_frame.visible_rect(),
- txt_frame.natural_size(), txt_frame.timestamp())
- : VideoFrame::CreateFrame(
- result_format, txt_frame.coded_size(), txt_frame.visible_rect(),
- txt_frame.natural_size(), txt_frame.timestamp());
- result->set_color_space(txt_frame.ColorSpace());
- result->metadata().MergeMetadataFrom(txt_frame.metadata());
- result->metadata().texture_origin_is_top_left = true;
-
- size_t planes = VideoFrame::NumPlanes(result->format());
- for (size_t plane = 0; plane < planes; plane++) {
- const gpu::MailboxHolder& holder = txt_frame.mailbox_holder(plane);
- if (holder.mailbox.IsZero())
- return nullptr;
- ri->WaitSyncTokenCHROMIUM(holder.sync_token.GetConstData());
-
- int width = result->columns(plane);
- int height = result->rows(plane);
-
- auto texture_id = ri->CreateAndConsumeForGpuRaster(holder.mailbox);
- if (holder.mailbox.IsSharedImage()) {
- ri->BeginSharedImageAccessDirectCHROMIUM(
- texture_id, GL_SHARED_IMAGE_ACCESS_MODE_READ_CHROMIUM);
- }
-
- auto cleanup_fn = [](GLuint texture_id, bool shared,
- gpu::raster::RasterInterface* ri) {
- if (shared)
- ri->EndSharedImageAccessDirectCHROMIUM(texture_id);
- ri->DeleteGpuRasterTexture(texture_id);
- };
- base::ScopedClosureRunner cleanup(base::BindOnce(
- cleanup_fn, texture_id, holder.mailbox.IsSharedImage(), ri));
-
- GrGLenum texture_format;
- SkColorType sk_color_type;
- std::tie(sk_color_type, texture_format) =
- GetSkiaAndGlColorTypesForPlane(result->format(), plane);
- GrGLTextureInfo gl_texture_info;
- gl_texture_info.fID = texture_id;
- gl_texture_info.fTarget = holder.texture_target;
- gl_texture_info.fFormat = texture_format;
-
- GrBackendTexture texture(width, height, GrMipMapped::kNo, gl_texture_info);
- auto image = SkImage::MakeFromTexture(
- gr_context, texture,
- txt_frame.metadata().texture_origin_is_top_left
- ? kTopLeft_GrSurfaceOrigin
- : kBottomLeft_GrSurfaceOrigin,
- sk_color_type, kOpaque_SkAlphaType, /*colorSpace=*/nullptr);
-
- if (!image) {
- DLOG(ERROR) << "Can't create SkImage from texture!"
- << " plane:" << plane;
- return nullptr;
- }
+bool ReadbackTexturePlaneToMemorySyncSkImage(const VideoFrame& src_frame,
+ size_t src_plane,
+ gfx::Rect& src_rect,
+ uint8_t* dest_pixels,
+ size_t dest_stride,
+ gpu::raster::RasterInterface* ri,
+ GrDirectContext* gr_context) {
+ DCHECK(gr_context);
- auto info =
- SkImageInfo::Make(width, height, sk_color_type, kOpaque_SkAlphaType);
- SkPixmap pixmap(info, result->data(plane), result->row_bytes(plane));
- if (!image->readPixels(gr_context, pixmap, 0, 0,
- SkImage::kDisallow_CachingHint)) {
- DLOG(ERROR) << "Plane readback failed."
- << " plane:" << plane << " width: " << width
- << " height: " << height
- << " minRowBytes: " << info.minRowBytes();
- return nullptr;
- }
+ VideoPixelFormat format = ReadbackFormat(src_frame);
+ int width = src_frame.columns(src_plane);
+ int height = src_frame.rows(src_plane);
+ bool has_alpha = !IsOpaque(format) && src_frame.NumTextures() == 1;
+
+ const gpu::MailboxHolder& holder = src_frame.mailbox_holder(src_plane);
+ DCHECK(!holder.mailbox.IsZero());
+ ri->WaitSyncTokenCHROMIUM(holder.sync_token.GetConstData());
+ auto texture_id = ri->CreateAndConsumeForGpuRaster(holder.mailbox);
+ if (holder.mailbox.IsSharedImage()) {
+ ri->BeginSharedImageAccessDirectCHROMIUM(
+ texture_id, GL_SHARED_IMAGE_ACCESS_MODE_READ_CHROMIUM);
}
-
- return result;
-}
-
-scoped_refptr<VideoFrame> ReadbackTextureBackedFrameToMemorySyncOOP(
- const VideoFrame& txt_frame,
- gpu::raster::RasterInterface* ri,
- VideoFramePool* pool) {
- if (txt_frame.NumTextures() > 2 || txt_frame.NumTextures() < 1) {
- DLOG(ERROR) << "Readback is not possible for this frame: "
- << txt_frame.AsHumanReadableString();
- return nullptr;
+ base::ScopedClosureRunner cleanup(base::BindOnce(
+ [](GLuint texture_id, bool shared, gpu::raster::RasterInterface* ri) {
+ if (shared)
+ ri->EndSharedImageAccessDirectCHROMIUM(texture_id);
+ ri->DeleteGpuRasterTexture(texture_id);
+ },
+ texture_id, holder.mailbox.IsSharedImage(), ri));
+
+ GrGLenum texture_format = GLFormatForPlane(format, src_plane);
+ SkColorType sk_color_type = SkColorTypeForPlane(format, src_plane);
+ SkAlphaType sk_alpha_type =
+ has_alpha ? kUnpremul_SkAlphaType : kOpaque_SkAlphaType;
+
+ GrGLTextureInfo gl_texture_info;
+ gl_texture_info.fID = texture_id;
+ gl_texture_info.fTarget = holder.texture_target;
+ gl_texture_info.fFormat = texture_format;
+ GrBackendTexture texture(width, height, GrMipMapped::kNo, gl_texture_info);
+
+ auto image =
+ SkImage::MakeFromTexture(gr_context, texture,
+ src_frame.metadata().texture_origin_is_top_left
+ ? kTopLeft_GrSurfaceOrigin
+ : kBottomLeft_GrSurfaceOrigin,
+ sk_color_type, sk_alpha_type,
+ /*colorSpace=*/nullptr);
+ if (!image) {
+ DLOG(ERROR) << "Can't create SkImage from texture plane " << src_plane;
+ return false;
}
- VideoPixelFormat result_format = txt_frame.format();
- if (txt_frame.NumTextures() == 1 && result_format == PIXEL_FORMAT_NV12) {
- // Even though |txt_frame| format is NV12 and it is NV12 in GPU memory,
- // the texture is a RGB view that is produced by a shader on the fly.
- // So we currently we currently can only read it back as RGB.
- result_format = PIXEL_FORMAT_ARGB;
+ auto dest_info = SkImageInfo::Make(src_rect.width(), src_rect.height(),
+ sk_color_type, sk_alpha_type);
+ SkPixmap dest_pixmap(dest_info, dest_pixels, dest_stride);
+ if (!image->readPixels(gr_context, dest_pixmap, src_rect.x(), src_rect.y(),
+ SkImage::kDisallow_CachingHint)) {
+ DLOG(ERROR) << "Plane readback failed."
+ << " plane:" << src_plane << " width: " << width
+ << " height: " << height;
+ return false;
}
- scoped_refptr<VideoFrame> result =
- pool
- ? pool->CreateFrame(result_format, txt_frame.coded_size(),
- txt_frame.visible_rect(),
- txt_frame.natural_size(), txt_frame.timestamp())
- : VideoFrame::CreateFrame(
- result_format, txt_frame.coded_size(), txt_frame.visible_rect(),
- txt_frame.natural_size(), txt_frame.timestamp());
- result->set_color_space(txt_frame.ColorSpace());
- result->metadata().MergeMetadataFrom(txt_frame.metadata());
-
- size_t planes = VideoFrame::NumPlanes(result->format());
- for (size_t plane = 0; plane < planes; plane++) {
- const gpu::MailboxHolder& holder = txt_frame.mailbox_holder(plane);
- if (holder.mailbox.IsZero()) {
- DLOG(ERROR) << "Can't readback video frame with Zero texture on plane "
- << plane;
- return nullptr;
- }
- ri->WaitSyncTokenCHROMIUM(holder.sync_token.GetConstData());
-
- int width = result->columns(plane);
- int height = result->rows(plane);
-
- GrGLenum texture_format;
- SkColorType sk_color_type;
- std::tie(sk_color_type, texture_format) =
- GetSkiaAndGlColorTypesForPlane(result->format(), plane);
-
- auto info =
- SkImageInfo::Make(width, height, sk_color_type, kOpaque_SkAlphaType);
-
- ri->ReadbackImagePixels(holder.mailbox, info, info.minRowBytes(), 0, 0,
- result->data(plane));
- if (ri->GetError() != GL_NO_ERROR) {
- DLOG(ERROR) << "Plane readback failed."
- << " plane:" << plane << " width: " << width
- << " height: " << height
- << " minRowBytes: " << info.minRowBytes()
- << " error: " << ri->GetError();
- return nullptr;
- }
- }
+ return true;
+}
- return result;
+bool ReadbackTexturePlaneToMemorySyncOOP(const VideoFrame& src_frame,
+ size_t src_plane,
+ gfx::Rect& src_rect,
+ uint8_t* dest_pixels,
+ size_t dest_stride,
+ gpu::raster::RasterInterface* ri) {
+ VideoPixelFormat format = ReadbackFormat(src_frame);
+ bool has_alpha = !IsOpaque(format) && src_frame.NumTextures() == 1;
+
+ const gpu::MailboxHolder& holder = src_frame.mailbox_holder(src_plane);
+ DCHECK(!holder.mailbox.IsZero());
+ ri->WaitSyncTokenCHROMIUM(holder.sync_token.GetConstData());
+
+ SkColorType sk_color_type = SkColorTypeForPlane(format, src_plane);
+ SkAlphaType sk_alpha_type =
+ has_alpha ? kUnpremul_SkAlphaType : kOpaque_SkAlphaType;
+
+ auto info = SkImageInfo::Make(src_rect.width(), src_rect.height(),
+ sk_color_type, sk_alpha_type);
+ ri->ReadbackImagePixels(holder.mailbox, info, dest_stride, src_rect.x(),
+ src_rect.y(), dest_pixels);
+ DCHECK_EQ(ri->GetError(), static_cast<GLenum>(GL_NO_ERROR));
+ return true;
}
} // namespace
@@ -556,6 +514,28 @@ gfx::Size ScaleSizeToEncompassTarget(const gfx::Size& size,
return ScaleSizeToTarget(size, target, false);
}
+gfx::Rect CropSizeForScalingToTarget(const gfx::Size& size,
+ const gfx::Size& target,
+ size_t alignment) {
+ DCHECK_GT(alignment, 0u);
+ if (size.IsEmpty() || target.IsEmpty())
+ return gfx::Rect();
+
+ gfx::Rect crop(ScaleSizeToFitWithinTarget(target, size));
+ crop.set_width(base::checked_cast<int>(base::bits::AlignDown(
+ base::checked_cast<size_t>(crop.width()), alignment)));
+ crop.set_height(base::checked_cast<int>(base::bits::AlignDown(
+ base::checked_cast<size_t>(crop.height()), alignment)));
+ crop.set_x(base::checked_cast<int>(base::bits::AlignDown(
+ base::checked_cast<size_t>((size.width() - crop.width()) / 2),
+ alignment)));
+ crop.set_y(base::checked_cast<int>(base::bits::AlignDown(
+ base::checked_cast<size_t>((size.height() - crop.height()) / 2),
+ alignment)));
+ DCHECK(gfx::Rect(size).Contains(crop));
+ return crop;
+}
+
gfx::Size GetRectSizeFromOrigin(const gfx::Rect& rect) {
return gfx::Size(rect.right(), rect.bottom());
}
@@ -676,11 +656,53 @@ scoped_refptr<VideoFrame> ReadbackTextureBackedFrameToMemorySync(
VideoFramePool* pool) {
DCHECK(ri);
+ VideoPixelFormat format = ReadbackFormat(txt_frame);
+ if (format == PIXEL_FORMAT_UNKNOWN) {
+ DLOG(ERROR) << "Readback is not possible for this frame: "
+ << txt_frame.AsHumanReadableString();
+ return nullptr;
+ }
+
+ scoped_refptr<VideoFrame> result =
+ pool ? pool->CreateFrame(format, txt_frame.coded_size(),
+ txt_frame.visible_rect(),
+ txt_frame.natural_size(), txt_frame.timestamp())
+ : VideoFrame::CreateFrame(
+ format, txt_frame.coded_size(), txt_frame.visible_rect(),
+ txt_frame.natural_size(), txt_frame.timestamp());
+ result->set_color_space(txt_frame.ColorSpace());
+ result->metadata().MergeMetadataFrom(txt_frame.metadata());
+
+ size_t planes = VideoFrame::NumPlanes(format);
+ for (size_t plane = 0; plane < planes; plane++) {
+ gfx::Rect src_rect(0, 0, txt_frame.columns(plane), txt_frame.rows(plane));
+ if (!ReadbackTexturePlaneToMemorySync(
+ txt_frame, plane, src_rect, result->data(plane),
+ result->stride(plane), ri, gr_context)) {
+ return nullptr;
+ }
+ }
+
+ return result;
+}
+
+bool ReadbackTexturePlaneToMemorySync(const VideoFrame& src_frame,
+ size_t src_plane,
+ gfx::Rect& src_rect,
+ uint8_t* dest_pixels,
+ size_t dest_stride,
+ gpu::raster::RasterInterface* ri,
+ GrDirectContext* gr_context) {
+ DCHECK(ri);
+
if (gr_context) {
- return ReadbackTextureBackedFrameToMemorySyncGLES(txt_frame, ri, gr_context,
- pool);
+ return ReadbackTexturePlaneToMemorySyncSkImage(src_frame, src_plane,
+ src_rect, dest_pixels,
+ dest_stride, ri, gr_context);
}
- return ReadbackTextureBackedFrameToMemorySyncOOP(txt_frame, ri, pool);
+
+ return ReadbackTexturePlaneToMemorySyncOOP(src_frame, src_plane, src_rect,
+ dest_pixels, dest_stride, ri);
}
Status ConvertAndScaleFrame(const VideoFrame& src_frame,
@@ -958,8 +980,8 @@ scoped_refptr<VideoFrame> CreateFromSkImage(sk_sp<SkImage> sk_image,
if (!frame)
return nullptr;
- frame->AddDestructionObserver(base::BindOnce(
- base::DoNothing::Once<sk_sp<SkImage>>(), std::move(sk_image)));
+ frame->AddDestructionObserver(
+ base::BindOnce([](sk_sp<SkImage>) {}, std::move(sk_image)));
return frame;
}
diff --git a/chromium/media/base/video_util.h b/chromium/media/base/video_util.h
index c19b5b16b3b..87ecadd658d 100644
--- a/chromium/media/base/video_util.h
+++ b/chromium/media/base/video_util.h
@@ -101,6 +101,16 @@ MEDIA_EXPORT gfx::Size ScaleSizeToFitWithinTarget(const gfx::Size& size,
MEDIA_EXPORT gfx::Size ScaleSizeToEncompassTarget(const gfx::Size& size,
const gfx::Size& target);
+// Calculates the largest sub-rectangle of a rectangle of size |size| with
+// roughly the same aspect ratio as |target| and centered both horizontally
+// and vertically within the rectangle. It's "roughly" the same aspect ratio
+// because its dimensions may be rounded down to be a multiple of |alignment|.
+// The origin of the rectangle is also aligned down to a multiple of
+// |alignment|. Note that |alignment| must be a power of 2.
+MEDIA_EXPORT gfx::Rect CropSizeForScalingToTarget(const gfx::Size& size,
+ const gfx::Size& target,
+ size_t alignment = 1u);
+
// Returns the size of a rectangle whose upper left corner is at the origin (0,
// 0) and whose bottom right corner is the same as that of |rect|. This is
// useful to get the size of a buffer that contains the visible rectangle plus
@@ -143,6 +153,17 @@ MEDIA_EXPORT scoped_refptr<VideoFrame> ReadbackTextureBackedFrameToMemorySync(
GrDirectContext* gr_context,
VideoFramePool* pool = nullptr);
+// Synchronously reads a single plane. |src_rect| is relative to the plane,
+// which may be smaller than |frame| due to subsampling.
+MEDIA_EXPORT bool ReadbackTexturePlaneToMemorySync(
+ const VideoFrame& src_frame,
+ size_t src_plane,
+ gfx::Rect& src_rect,
+ uint8_t* dest_pixels,
+ size_t dest_stride,
+ gpu::raster::RasterInterface* ri,
+ GrDirectContext* gr_context);
+
// Converts a frame with I420A format into I420 by dropping alpha channel.
MEDIA_EXPORT scoped_refptr<VideoFrame> WrapAsI420VideoFrame(
scoped_refptr<VideoFrame> frame);
diff --git a/chromium/media/base/video_util_unittest.cc b/chromium/media/base/video_util_unittest.cc
index dc3273851c6..e82e3caccbd 100644
--- a/chromium/media/base/video_util_unittest.cc
+++ b/chromium/media/base/video_util_unittest.cc
@@ -430,6 +430,44 @@ TEST_F(VideoUtilTest, ScaleSizeToEncompassTarget) {
gfx::Size(0, 0), gfx::Size(2000000000, 2000000000)).IsEmpty());
}
+TEST_F(VideoUtilTest, CropSizeForScalingToTarget) {
+ // Test same aspect ratios.
+ EXPECT_EQ(gfx::Rect(0, 0, 640, 360),
+ CropSizeForScalingToTarget(gfx::Size(640, 360), gfx::Size(16, 9)));
+ EXPECT_EQ(gfx::Rect(0, 0, 320, 240),
+ CropSizeForScalingToTarget(gfx::Size(320, 240), gfx::Size(4, 3)));
+ EXPECT_EQ(
+ gfx::Rect(0, 0, 320, 240),
+ CropSizeForScalingToTarget(gfx::Size(321, 241), gfx::Size(4, 3), 2));
+
+ // Test cropping 4:3 from 16:9.
+ EXPECT_EQ(gfx::Rect(80, 0, 480, 360),
+ CropSizeForScalingToTarget(gfx::Size(640, 360), gfx::Size(4, 3)));
+ EXPECT_EQ(gfx::Rect(53, 0, 320, 240),
+ CropSizeForScalingToTarget(gfx::Size(426, 240), gfx::Size(4, 3)));
+ EXPECT_EQ(
+ gfx::Rect(52, 0, 320, 240),
+ CropSizeForScalingToTarget(gfx::Size(426, 240), gfx::Size(4, 3), 2));
+
+ // Test cropping 16:9 from 4:3.
+ EXPECT_EQ(gfx::Rect(0, 30, 320, 180),
+ CropSizeForScalingToTarget(gfx::Size(320, 240), gfx::Size(16, 9)));
+ EXPECT_EQ(gfx::Rect(0, 9, 96, 54),
+ CropSizeForScalingToTarget(gfx::Size(96, 72), gfx::Size(16, 9)));
+ EXPECT_EQ(gfx::Rect(0, 8, 96, 54),
+ CropSizeForScalingToTarget(gfx::Size(96, 72), gfx::Size(16, 9), 2));
+
+ // Test abnormal inputs.
+ EXPECT_EQ(gfx::Rect(),
+ CropSizeForScalingToTarget(gfx::Size(0, 1), gfx::Size(1, 1)));
+ EXPECT_EQ(gfx::Rect(),
+ CropSizeForScalingToTarget(gfx::Size(1, 0), gfx::Size(1, 1)));
+ EXPECT_EQ(gfx::Rect(),
+ CropSizeForScalingToTarget(gfx::Size(1, 1), gfx::Size(0, 1)));
+ EXPECT_EQ(gfx::Rect(),
+ CropSizeForScalingToTarget(gfx::Size(1, 1), gfx::Size(1, 0)));
+}
+
TEST_F(VideoUtilTest, PadToMatchAspectRatio) {
EXPECT_EQ(gfx::Size(640, 480),
PadToMatchAspectRatio(gfx::Size(640, 480), gfx::Size(640, 480)));
@@ -525,9 +563,8 @@ TEST_F(VideoUtilTest, I420CopyWithPadding) {
TEST_F(VideoUtilTest, WrapAsI420VideoFrame) {
gfx::Size size(640, 480);
- scoped_refptr<VideoFrame> src_frame =
- VideoFrame::CreateFrame(PIXEL_FORMAT_I420A, size, gfx::Rect(size), size,
- base::TimeDelta::FromDays(1));
+ scoped_refptr<VideoFrame> src_frame = VideoFrame::CreateFrame(
+ PIXEL_FORMAT_I420A, size, gfx::Rect(size), size, base::Days(1));
scoped_refptr<VideoFrame> dst_frame = WrapAsI420VideoFrame(src_frame);
EXPECT_EQ(dst_frame->format(), PIXEL_FORMAT_I420);
diff --git a/chromium/media/base/wall_clock_time_source.cc b/chromium/media/base/wall_clock_time_source.cc
index 9ddf51eb1af..cdfcc687993 100644
--- a/chromium/media/base/wall_clock_time_source.cc
+++ b/chromium/media/base/wall_clock_time_source.cc
@@ -94,8 +94,8 @@ base::TimeDelta WallClockTimeSource::CurrentMediaTime_Locked() {
base::TimeTicks now = tick_clock_->NowTicks();
return base_timestamp_ +
- base::TimeDelta::FromMicroseconds(
- (now - reference_time_).InMicroseconds() * playback_rate_);
+ base::Microseconds((now - reference_time_).InMicroseconds() *
+ playback_rate_);
}
} // namespace media
diff --git a/chromium/media/base/wall_clock_time_source.h b/chromium/media/base/wall_clock_time_source.h
index 028c3522138..17d76a4e097 100644
--- a/chromium/media/base/wall_clock_time_source.h
+++ b/chromium/media/base/wall_clock_time_source.h
@@ -18,6 +18,10 @@ namespace media {
class MEDIA_EXPORT WallClockTimeSource : public TimeSource {
public:
WallClockTimeSource();
+
+ WallClockTimeSource(const WallClockTimeSource&) = delete;
+ WallClockTimeSource& operator=(const WallClockTimeSource&) = delete;
+
~WallClockTimeSource() override;
// TimeSource implementation.
@@ -50,8 +54,6 @@ class MEDIA_EXPORT WallClockTimeSource : public TimeSource {
// TODO(scherkus): Remove internal locking from this class after access to
// Renderer::CurrentMediaTime() is single threaded http://crbug.com/370634
base::Lock lock_;
-
- DISALLOW_COPY_AND_ASSIGN(WallClockTimeSource);
};
} // namespace media
diff --git a/chromium/media/base/wall_clock_time_source_unittest.cc b/chromium/media/base/wall_clock_time_source_unittest.cc
index 49b6134c5a7..b643a6a7b2d 100644
--- a/chromium/media/base/wall_clock_time_source_unittest.cc
+++ b/chromium/media/base/wall_clock_time_source_unittest.cc
@@ -17,10 +17,14 @@ class WallClockTimeSourceTest : public testing::Test {
time_source_.SetTickClockForTesting(tick_clock_.get());
AdvanceTimeInSeconds(1);
}
+
+ WallClockTimeSourceTest(const WallClockTimeSourceTest&) = delete;
+ WallClockTimeSourceTest& operator=(const WallClockTimeSourceTest&) = delete;
+
~WallClockTimeSourceTest() override = default;
void AdvanceTimeInSeconds(int seconds) {
- tick_clock_->Advance(base::TimeDelta::FromSeconds(seconds));
+ tick_clock_->Advance(base::Seconds(seconds));
}
int CurrentMediaTimeInSeconds() {
@@ -28,7 +32,7 @@ class WallClockTimeSourceTest : public testing::Test {
}
void SetMediaTimeInSeconds(int seconds) {
- return time_source_.SetMediaTime(base::TimeDelta::FromSeconds(seconds));
+ return time_source_.SetMediaTime(base::Seconds(seconds));
}
base::TimeTicks ConvertMediaTime(base::TimeDelta timestamp,
@@ -42,22 +46,19 @@ class WallClockTimeSourceTest : public testing::Test {
bool IsWallClockNowForMediaTimeInSeconds(int seconds) {
bool is_time_moving = false;
return tick_clock_->NowTicks() ==
- ConvertMediaTime(base::TimeDelta::FromSeconds(seconds),
- &is_time_moving);
+ ConvertMediaTime(base::Seconds(seconds), &is_time_moving);
}
bool IsTimeStopped() {
bool is_time_moving = false;
// Convert any random value, it shouldn't matter for this call.
- ConvertMediaTime(base::TimeDelta::FromSeconds(1), &is_time_moving);
+ ConvertMediaTime(base::Seconds(1), &is_time_moving);
return !is_time_moving;
}
protected:
WallClockTimeSource time_source_;
std::unique_ptr<base::SimpleTestTickClock> tick_clock_;
-
- DISALLOW_COPY_AND_ASSIGN(WallClockTimeSourceTest);
};
TEST_F(WallClockTimeSourceTest, InitialTimeIsZero) {
@@ -130,7 +131,7 @@ TEST_F(WallClockTimeSourceTest, StopTicking) {
}
TEST_F(WallClockTimeSourceTest, ConvertsTimestampsWhenStopped) {
- const base::TimeDelta kOneSecond = base::TimeDelta::FromSeconds(1);
+ const base::TimeDelta kOneSecond = base::Seconds(1);
bool is_time_moving = false;
EXPECT_EQ(base::TimeTicks(),
ConvertMediaTime(base::TimeDelta(), &is_time_moving));
diff --git a/chromium/media/base/win/d3d11_mocks.cc b/chromium/media/base/win/d3d11_mocks.cc
index 41853a4fa24..326fe4bde33 100644
--- a/chromium/media/base/win/d3d11_mocks.cc
+++ b/chromium/media/base/win/d3d11_mocks.cc
@@ -9,6 +9,9 @@ namespace media {
D3D11Texture2DMock::D3D11Texture2DMock() = default;
D3D11Texture2DMock::~D3D11Texture2DMock() = default;
+D3D11MultithreadMock::D3D11MultithreadMock() = default;
+D3D11MultithreadMock::~D3D11MultithreadMock() = default;
+
D3D11BufferMock::D3D11BufferMock() = default;
D3D11BufferMock::~D3D11BufferMock() = default;
diff --git a/chromium/media/base/win/d3d11_mocks.h b/chromium/media/base/win/d3d11_mocks.h
index d2ad6b8b426..91c611913fe 100644
--- a/chromium/media/base/win/d3d11_mocks.h
+++ b/chromium/media/base/win/d3d11_mocks.h
@@ -34,6 +34,19 @@ class D3D11Texture2DMock
MOCK_STDCALL_METHOD1(GetDesc, void(D3D11_TEXTURE2D_DESC*));
};
+class D3D11MultithreadMock
+ : public Microsoft::WRL::RuntimeClass<
+ Microsoft::WRL::RuntimeClassFlags<Microsoft::WRL::ClassicCom>,
+ ID3D11Multithread> {
+ public:
+ D3D11MultithreadMock();
+ ~D3D11MultithreadMock() override;
+ MOCK_STDCALL_METHOD0(Enter, void());
+ MOCK_STDCALL_METHOD0(GetMultithreadProtected, BOOL());
+ MOCK_STDCALL_METHOD0(Leave, void());
+ MOCK_STDCALL_METHOD1(SetMultithreadProtected, BOOL(BOOL));
+};
+
class D3D11BufferMock
: public Microsoft::WRL::RuntimeClass<
Microsoft::WRL::RuntimeClassFlags<Microsoft::WRL::ClassicCom>,
diff --git a/chromium/media/base/win/dcomp_texture_wrapper.h b/chromium/media/base/win/dcomp_texture_wrapper.h
index abb45dd5ebc..f14b39c46e2 100644
--- a/chromium/media/base/win/dcomp_texture_wrapper.h
+++ b/chromium/media/base/win/dcomp_texture_wrapper.h
@@ -23,26 +23,25 @@ class DCOMPTextureWrapper {
public:
virtual ~DCOMPTextureWrapper() = default;
- // Initializes the DCOMPTexture and returns success/failure in `init_cb`.
- // TODO(xhwang): Pass `DCOMPSurfaceHandleBoundCB` in `SetDCOMPSurface()`.
- using DCOMPSurfaceHandleBoundCB = base::OnceCallback<void(bool)>;
- using CompositionParamsReceivedCB = base::RepeatingCallback<void(gfx::Rect)>;
- using InitCB = base::OnceCallback<void(bool)>;
- virtual void Initialize(const gfx::Size& natural_size,
- DCOMPSurfaceHandleBoundCB dcomp_handle_bound_cb,
- CompositionParamsReceivedCB comp_params_received_cb,
- InitCB init_cb) = 0;
-
- // Called whenever the video's natural size changes.
- virtual void UpdateTextureSize(const gfx::Size& natural_size) = 0;
-
- // Sets the DirectComposition surface identified by `surface_token`.
- virtual void SetDCOMPSurface(const base::UnguessableToken& surface_token) = 0;
+ // Initializes the DCOMPTexture and returns success/failure.
+ using OutputRectChangeCB = base::RepeatingCallback<void(gfx::Rect)>;
+ virtual bool Initialize(const gfx::Size& output_size,
+ OutputRectChangeCB output_rect_change_cb) = 0;
+
+ // Called whenever the video's output size changes.
+ virtual void UpdateTextureSize(const gfx::Size& output_size) = 0;
+
+ // Sets the DirectComposition surface identified by `token`.
+ using SetDCOMPSurfaceHandleCB = base::OnceCallback<void(bool)>;
+ virtual void SetDCOMPSurfaceHandle(
+ const base::UnguessableToken& token,
+ SetDCOMPSurfaceHandleCB set_dcomp_surface_handle_cb) = 0;
// Creates VideoFrame which will be returned in `create_video_frame_cb`.
using CreateVideoFrameCB =
base::OnceCallback<void(scoped_refptr<VideoFrame>)>;
- virtual void CreateVideoFrame(CreateVideoFrameCB create_video_frame_cb) = 0;
+ virtual void CreateVideoFrame(const gfx::Size& natural_size,
+ CreateVideoFrameCB create_video_frame_cb) = 0;
};
} // namespace media
diff --git a/chromium/media/base/win/dxgi_device_manager.cc b/chromium/media/base/win/dxgi_device_manager.cc
index 33226904288..921669b1c8c 100644
--- a/chromium/media/base/win/dxgi_device_manager.cc
+++ b/chromium/media/base/win/dxgi_device_manager.cc
@@ -95,6 +95,8 @@ HRESULT DXGIDeviceManager::ResetDevice() {
kDeviceFlags, nullptr, 0, D3D11_SDK_VERSION,
&d3d_device, nullptr, nullptr);
RETURN_ON_HR_FAILURE(hr, "D3D11 device creation failed", hr);
+ RETURN_ON_HR_FAILURE(
+ hr, media::SetDebugName(d3d_device.Get(), "Media_DXGIDeviceManager"), hr);
// Since FrameServerClient background threads in the video capture process
// call EnqueueSetEvent on Chromium's D3D11 device at the same time that
// Chromium is actively using it in a worker thread, we need to protect access
diff --git a/chromium/media/base/win/mf_helpers.cc b/chromium/media/base/win/mf_helpers.cc
index 37cf1183709..be610b58562 100644
--- a/chromium/media/base/win/mf_helpers.cc
+++ b/chromium/media/base/win/mf_helpers.cc
@@ -10,6 +10,18 @@
namespace media {
+namespace {
+
+// ID3D11DeviceChild, IDXGIObject and ID3D11Device implement SetPrivateData with
+// the exact same parameters.
+template <typename T>
+HRESULT SetDebugNameInternal(T* d3d11_object, const char* debug_string) {
+ return d3d11_object->SetPrivateData(WKPDID_D3DDebugObjectName,
+ strlen(debug_string), debug_string);
+}
+
+} // namespace
+
Microsoft::WRL::ComPtr<IMFSample> CreateEmptySampleWithBuffer(
uint32_t buffer_length,
int align) {
@@ -70,8 +82,15 @@ HRESULT CopyCoTaskMemWideString(LPCWSTR in_string, LPWSTR* out_string) {
HRESULT SetDebugName(ID3D11DeviceChild* d3d11_device_child,
const char* debug_string) {
- return d3d11_device_child->SetPrivateData(WKPDID_D3DDebugObjectName,
- strlen(debug_string), debug_string);
+ return SetDebugNameInternal(d3d11_device_child, debug_string);
+}
+
+HRESULT SetDebugName(ID3D11Device* d3d11_device, const char* debug_string) {
+ return SetDebugNameInternal(d3d11_device, debug_string);
+}
+
+HRESULT SetDebugName(IDXGIObject* dxgi_object, const char* debug_string) {
+ return SetDebugNameInternal(dxgi_object, debug_string);
}
} // namespace media
diff --git a/chromium/media/base/win/mf_helpers.h b/chromium/media/base/win/mf_helpers.h
index 6bebf0310f5..3fd615fdac9 100644
--- a/chromium/media/base/win/mf_helpers.h
+++ b/chromium/media/base/win/mf_helpers.h
@@ -14,6 +14,8 @@
#include "media/base/win/mf_initializer_export.h"
struct ID3D11DeviceChild;
+struct ID3D11Device;
+struct IDXGIObject;
namespace media {
@@ -60,6 +62,10 @@ CreateEmptySampleWithBuffer(uint32_t buffer_length, int align);
class MF_INITIALIZER_EXPORT MediaBufferScopedPointer {
public:
explicit MediaBufferScopedPointer(IMFMediaBuffer* media_buffer);
+
+ MediaBufferScopedPointer(const MediaBufferScopedPointer&) = delete;
+ MediaBufferScopedPointer& operator=(const MediaBufferScopedPointer&) = delete;
+
~MediaBufferScopedPointer();
uint8_t* get() { return buffer_; }
@@ -71,8 +77,6 @@ class MF_INITIALIZER_EXPORT MediaBufferScopedPointer {
uint8_t* buffer_;
DWORD max_length_;
DWORD current_length_;
-
- DISALLOW_COPY_AND_ASSIGN(MediaBufferScopedPointer);
};
// Copies |in_string| to |out_string| that is allocated with CoTaskMemAlloc().
@@ -83,6 +87,11 @@ MF_INITIALIZER_EXPORT HRESULT CopyCoTaskMemWideString(LPCWSTR in_string,
// D3D11 retains the string passed to this function.
MF_INITIALIZER_EXPORT HRESULT
SetDebugName(ID3D11DeviceChild* d3d11_device_child, const char* debug_string);
+MF_INITIALIZER_EXPORT HRESULT SetDebugName(ID3D11Device* d3d11_device,
+ const char* debug_string);
+MF_INITIALIZER_EXPORT HRESULT SetDebugName(IDXGIObject* dxgi_object,
+ const char* debug_string);
+
} // namespace media
#endif // MEDIA_BASE_WIN_MF_HELPERS_H_
diff --git a/chromium/media/capabilities/in_memory_video_decode_stats_db_impl.h b/chromium/media/capabilities/in_memory_video_decode_stats_db_impl.h
index 7e3190878af..b674b847889 100644
--- a/chromium/media/capabilities/in_memory_video_decode_stats_db_impl.h
+++ b/chromium/media/capabilities/in_memory_video_decode_stats_db_impl.h
@@ -34,6 +34,12 @@ class MEDIA_EXPORT InMemoryVideoDecodeStatsDBImpl : public VideoDecodeStatsDB {
// seed DB is available.
explicit InMemoryVideoDecodeStatsDBImpl(
VideoDecodeStatsDBProvider* seed_db_provider);
+
+ InMemoryVideoDecodeStatsDBImpl(const InMemoryVideoDecodeStatsDBImpl&) =
+ delete;
+ InMemoryVideoDecodeStatsDBImpl& operator=(
+ const InMemoryVideoDecodeStatsDBImpl&) = delete;
+
~InMemoryVideoDecodeStatsDBImpl() override;
// Implement VideoDecodeStatsDB.
@@ -93,8 +99,6 @@ class MEDIA_EXPORT InMemoryVideoDecodeStatsDBImpl : public VideoDecodeStatsDB {
SEQUENCE_CHECKER(sequence_checker_);
base::WeakPtrFactory<InMemoryVideoDecodeStatsDBImpl> weak_ptr_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(InMemoryVideoDecodeStatsDBImpl);
};
} // namespace media
diff --git a/chromium/media/capabilities/video_decode_stats_db_impl.cc b/chromium/media/capabilities/video_decode_stats_db_impl.cc
index c0b676a2baa..fc8f11b56d2 100644
--- a/chromium/media/capabilities/video_decode_stats_db_impl.cc
+++ b/chromium/media/capabilities/video_decode_stats_db_impl.cc
@@ -33,8 +33,7 @@ namespace {
// Timeout threshold for DB operations. See OnOperationTimeout().
// NOTE: Used by UmaHistogramOpTime. Change the name if you change the time.
-static constexpr base::TimeDelta kPendingOpTimeout =
- base::TimeDelta::FromSeconds(30);
+static constexpr base::TimeDelta kPendingOpTimeout = base::Seconds(30);
const int kMaxFramesPerBufferDefault = 2500;
@@ -45,7 +44,7 @@ const bool kEnableUnweightedEntriesDefault = false;
void UmaHistogramOpTime(const std::string& op_name, base::TimeDelta duration) {
base::UmaHistogramCustomMicrosecondsTimes(
"Media.VideoDecodeStatsDB.OpTiming." + op_name, duration,
- base::TimeDelta::FromMilliseconds(1), kPendingOpTimeout, 50);
+ base::Milliseconds(1), kPendingOpTimeout, 50);
}
} // namespace
@@ -309,7 +308,7 @@ bool VideoDecodeStatsDBImpl::AreStatsUsable(
DCHECK_GT(kMaxDaysToKeepStats, 0);
return wall_clock_->Now() - base::Time::FromJsTime(last_write_date) <=
- base::TimeDelta::FromDays(kMaxDaysToKeepStats);
+ base::Days(kMaxDaysToKeepStats);
}
void VideoDecodeStatsDBImpl::WriteUpdatedEntry(
diff --git a/chromium/media/capabilities/video_decode_stats_db_impl.h b/chromium/media/capabilities/video_decode_stats_db_impl.h
index fb041b04475..af403330590 100644
--- a/chromium/media/capabilities/video_decode_stats_db_impl.h
+++ b/chromium/media/capabilities/video_decode_stats_db_impl.h
@@ -47,6 +47,9 @@ class MEDIA_EXPORT VideoDecodeStatsDBImpl : public VideoDecodeStatsDB {
base::FilePath db_dir,
leveldb_proto::ProtoDatabaseProvider* db_provider);
+ VideoDecodeStatsDBImpl(const VideoDecodeStatsDBImpl&) = delete;
+ VideoDecodeStatsDBImpl& operator=(const VideoDecodeStatsDBImpl&) = delete;
+
~VideoDecodeStatsDBImpl() override;
// Implement VideoDecodeStatsDB.
@@ -209,8 +212,6 @@ class MEDIA_EXPORT VideoDecodeStatsDBImpl : public VideoDecodeStatsDB {
SEQUENCE_CHECKER(sequence_checker_);
base::WeakPtrFactory<VideoDecodeStatsDBImpl> weak_ptr_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(VideoDecodeStatsDBImpl);
};
} // namespace media
diff --git a/chromium/media/capabilities/video_decode_stats_db_impl_unittest.cc b/chromium/media/capabilities/video_decode_stats_db_impl_unittest.cc
index 0bce05ab1d4..a1a932943b8 100644
--- a/chromium/media/capabilities/video_decode_stats_db_impl_unittest.cc
+++ b/chromium/media/capabilities/video_decode_stats_db_impl_unittest.cc
@@ -65,6 +65,10 @@ class VideoDecodeStatsDBImplTest : public ::testing::Test {
std::unique_ptr<FakeDB<DecodeStatsProto>>(fake_db_)));
}
+ VideoDecodeStatsDBImplTest(const VideoDecodeStatsDBImplTest&) = delete;
+ VideoDecodeStatsDBImplTest& operator=(const VideoDecodeStatsDBImplTest&) =
+ delete;
+
~VideoDecodeStatsDBImplTest() override {
// Tests should always complete any pending operations
VerifyNoPendingOps();
@@ -193,9 +197,6 @@ class VideoDecodeStatsDBImplTest : public ::testing::Test {
std::unique_ptr<FakeDB<DecodeStatsProto>::EntryMap> fake_db_map_;
FakeDB<DecodeStatsProto>* fake_db_;
std::unique_ptr<VideoDecodeStatsDBImpl> stats_db_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(VideoDecodeStatsDBImplTest);
};
TEST_F(VideoDecodeStatsDBImplTest, InitializeFailed) {
@@ -213,7 +214,7 @@ TEST_F(VideoDecodeStatsDBImplTest, InitializeTimedOut) {
// Move time forward enough to trigger timeout.
EXPECT_CALL(*this, OnInitialize(_)).Times(0);
- task_environment_.FastForwardBy(base::TimeDelta::FromSeconds(100));
+ task_environment_.FastForwardBy(base::Seconds(100));
task_environment_.RunUntilIdle();
// Verify we didn't get an init callback and task is no longer considered
@@ -336,15 +337,15 @@ TEST_F(VideoDecodeStatsDBImplTest, ConfigureExpireDays) {
new_max_days_to_keep_stats - half_days_to_keep_stats;
// Advance time half way through grace period. Verify stats not expired.
- clock.Advance(base::TimeDelta::FromDays(half_days_to_keep_stats));
+ clock.Advance(base::Days(half_days_to_keep_stats));
VerifyReadStats(kStatsKeyVp9, DecodeStatsEntry(200, 20, 2));
// Advance time 1 day beyond grace period, verify stats are expired.
- clock.Advance(base::TimeDelta::FromDays((remaining_days_to_keep_stats) + 1));
+ clock.Advance(base::Days((remaining_days_to_keep_stats) + 1));
VerifyEmptyStats(kStatsKeyVp9);
// Advance the clock 100 extra days. Verify stats still expired.
- clock.Advance(base::TimeDelta::FromDays(100));
+ clock.Advance(base::Days(100));
VerifyEmptyStats(kStatsKeyVp9);
}
@@ -538,20 +539,18 @@ TEST_F(VideoDecodeStatsDBImplTest, NoWriteDateReadAndExpire) {
// don't want to immediately expire all the existing data).
base::SimpleTestClock clock;
SetDBClock(&clock);
- clock.SetNow(kDefaultWriteTime - base::TimeDelta::FromDays(10));
+ clock.SetNow(kDefaultWriteTime - base::Days(10));
// Verify the stats are readable (not expired).
VerifyReadStats(kStatsKeyVp9, DecodeStatsEntry(100, 10, 1));
// Set "now" to be in the middle of the grace period. Verify stats are still
// readable (not expired).
- clock.SetNow(kDefaultWriteTime +
- base::TimeDelta::FromDays(GetMaxDaysToKeepStats() / 2));
+ clock.SetNow(kDefaultWriteTime + base::Days(GetMaxDaysToKeepStats() / 2));
VerifyReadStats(kStatsKeyVp9, DecodeStatsEntry(100, 10, 1));
// Set the clock 1 day beyond the expiry date. Verify stats are no longer
// readable due to expiration.
- clock.SetNow(kDefaultWriteTime +
- base::TimeDelta::FromDays(GetMaxDaysToKeepStats() + 1));
+ clock.SetNow(kDefaultWriteTime + base::Days(GetMaxDaysToKeepStats() + 1));
VerifyEmptyStats(kStatsKeyVp9);
// Write some stats to the entry. Verify we get back exactly what's written
@@ -576,7 +575,7 @@ TEST_F(VideoDecodeStatsDBImplTest, NoWriteDateAppendReadAndExpire) {
// don't want to immediately expire all the existing data).
base::SimpleTestClock clock;
SetDBClock(&clock);
- clock.SetNow(kDefaultWriteTime - base::TimeDelta::FromDays(10));
+ clock.SetNow(kDefaultWriteTime - base::Days(10));
// Verify the stats are readable (not expired).
VerifyReadStats(kStatsKeyVp9, DecodeStatsEntry(100, 10, 1));
@@ -587,14 +586,12 @@ TEST_F(VideoDecodeStatsDBImplTest, NoWriteDateAppendReadAndExpire) {
// Set "now" to be in the middle of the grace period. Verify stats are still
// readable (not expired).
- clock.SetNow(kDefaultWriteTime +
- base::TimeDelta::FromDays(GetMaxDaysToKeepStats() / 2));
+ clock.SetNow(kDefaultWriteTime + base::Days(GetMaxDaysToKeepStats() / 2));
VerifyReadStats(kStatsKeyVp9, DecodeStatsEntry(300, 30, 3));
// Set the clock 1 day beyond the expiry date. Verify stats are no longer
// readable due to expiration.
- clock.SetNow(kDefaultWriteTime +
- base::TimeDelta::FromDays(GetMaxDaysToKeepStats() + 1));
+ clock.SetNow(kDefaultWriteTime + base::Days(GetMaxDaysToKeepStats() + 1));
VerifyEmptyStats(kStatsKeyVp9);
}
@@ -611,15 +608,15 @@ TEST_F(VideoDecodeStatsDBImplTest, AppendAndExpire) {
VerifyReadStats(kStatsKeyVp9, DecodeStatsEntry(200, 20, 2));
// Advance time half way through grace period. Verify stats not expired.
- clock.Advance(base::TimeDelta::FromDays(GetMaxDaysToKeepStats() / 2));
+ clock.Advance(base::Days(GetMaxDaysToKeepStats() / 2));
VerifyReadStats(kStatsKeyVp9, DecodeStatsEntry(200, 20, 2));
// Advance time 1 day beyond grace period, verify stats are expired.
- clock.Advance(base::TimeDelta::FromDays((GetMaxDaysToKeepStats() / 2) + 1));
+ clock.Advance(base::Days((GetMaxDaysToKeepStats() / 2) + 1));
VerifyEmptyStats(kStatsKeyVp9);
// Advance the clock 100 days. Verify stats still expired.
- clock.Advance(base::TimeDelta::FromDays(100));
+ clock.Advance(base::Days(100));
VerifyEmptyStats(kStatsKeyVp9);
}
@@ -772,8 +769,7 @@ TEST_F(VideoDecodeStatsDBImplTest, DiscardCorruptedDBData) {
// Make an invalid proto with a last write date in the future.
DecodeStatsProto protoG(protoA);
- protoG.set_last_write_date(
- (clock.Now() + base::TimeDelta::FromDays(1)).ToJsTime());
+ protoG.set_last_write_date((clock.Now() + base::Days(1)).ToJsTime());
AppendToProtoDB(keyG, &protoG);
VerifyEmptyStats(keyG);
}
diff --git a/chromium/media/capture/content/android/screen_capture_machine_android.cc b/chromium/media/capture/content/android/screen_capture_machine_android.cc
index 18f0ec6e4af..9d6a08c60c1 100644
--- a/chromium/media/capture/content/android/screen_capture_machine_android.cc
+++ b/chromium/media/capture/content/android/screen_capture_machine_android.cc
@@ -47,7 +47,7 @@ void ScreenCaptureMachineAndroid::OnRGBAFrameAvailable(
const uint64_t absolute_micro =
timestamp / base::Time::kNanosecondsPerMicrosecond;
const base::TimeTicks start_time =
- base::TimeTicks() + base::TimeDelta::FromMicroseconds(absolute_micro);
+ base::TimeTicks() + base::Microseconds(absolute_micro);
scoped_refptr<VideoFrame> frame;
ThreadSafeCaptureOracle::CaptureFrameCallback capture_frame_cb;
@@ -122,7 +122,7 @@ void ScreenCaptureMachineAndroid::OnI420FrameAvailable(
const uint64_t absolute_micro =
timestamp / base::Time::kNanosecondsPerMicrosecond;
const base::TimeTicks start_time =
- base::TimeTicks() + base::TimeDelta::FromMicroseconds(absolute_micro);
+ base::TimeTicks() + base::Microseconds(absolute_micro);
scoped_refptr<VideoFrame> frame;
ThreadSafeCaptureOracle::CaptureFrameCallback capture_frame_cb;
diff --git a/chromium/media/capture/content/android/screen_capture_machine_android.h b/chromium/media/capture/content/android/screen_capture_machine_android.h
index c1246e5a8f7..c195ec17dc4 100644
--- a/chromium/media/capture/content/android/screen_capture_machine_android.h
+++ b/chromium/media/capture/content/android/screen_capture_machine_android.h
@@ -21,6 +21,11 @@ class VideoFrame;
class CAPTURE_EXPORT ScreenCaptureMachineAndroid {
public:
ScreenCaptureMachineAndroid();
+
+ ScreenCaptureMachineAndroid(const ScreenCaptureMachineAndroid&) = delete;
+ ScreenCaptureMachineAndroid& operator=(const ScreenCaptureMachineAndroid&) =
+ delete;
+
virtual ~ScreenCaptureMachineAndroid();
static base::android::ScopedJavaLocalRef<jobject>
@@ -82,8 +87,6 @@ class CAPTURE_EXPORT ScreenCaptureMachineAndroid {
// Java VideoCaptureAndroid instance.
base::android::ScopedJavaLocalRef<jobject> j_capture_;
-
- DISALLOW_COPY_AND_ASSIGN(ScreenCaptureMachineAndroid);
};
} // namespace media
diff --git a/chromium/media/capture/content/android/thread_safe_capture_oracle.cc b/chromium/media/capture/content/android/thread_safe_capture_oracle.cc
index e1d37882063..d485610ed51 100644
--- a/chromium/media/capture/content/android/thread_safe_capture_oracle.cc
+++ b/chromium/media/capture/content/android/thread_safe_capture_oracle.cc
@@ -46,7 +46,7 @@ ThreadSafeCaptureOracle::ThreadSafeCaptureOracle(
const VideoCaptureParams& params)
: client_(std::move(client)), oracle_(false), params_(params) {
DCHECK_GE(params.requested_format.frame_rate, 1e-6f);
- oracle_.SetMinCapturePeriod(base::TimeDelta::FromMicroseconds(
+ oracle_.SetMinCapturePeriod(base::Microseconds(
static_cast<int64_t>(1000000.0 / params.requested_format.frame_rate +
0.5 /* to round to nearest int */)));
const auto constraints = params.SuggestConstraints();
diff --git a/chromium/media/capture/content/animated_content_sampler.cc b/chromium/media/capture/content/animated_content_sampler.cc
index c533123f809..2391d99bd71 100644
--- a/chromium/media/capture/content/animated_content_sampler.cc
+++ b/chromium/media/capture/content/animated_content_sampler.cc
@@ -22,23 +22,23 @@ namespace {
// These values were established by experimenting with a wide variety of
// scenarios, including 24/25/30 FPS videos, 60 FPS WebGL demos, and the
// transitions between static and animated content.
-constexpr auto kMinObservationWindow = base::TimeDelta::FromSeconds(1);
-constexpr auto kMaxObservationWindow = base::TimeDelta::FromSeconds(2);
+constexpr auto kMinObservationWindow = base::Seconds(1);
+constexpr auto kMaxObservationWindow = base::Seconds(2);
// The maximum amount of time that can elapse before declaring two subsequent
// events as "not animating." This is the same value found in
// cc::FrameRateCounter.
-constexpr auto kNonAnimatingThreshold = base::TimeDelta::FromSeconds(1) / 4;
+constexpr auto kNonAnimatingThreshold = base::Seconds(1) / 4;
// The slowest that content can be animating in order for AnimatedContentSampler
// to lock-in. This is the threshold at which the "smoothness" problem is no
// longer relevant.
-constexpr auto kMaxLockInPeriod = base::TimeDelta::FromSeconds(1) / 12;
+constexpr auto kMaxLockInPeriod = base::Seconds(1) / 12;
// The amount of time over which to fully correct the drift of the rewritten
// frame timestamps from the presentation event timestamps. The lower the
// value, the higher the variance in frame timestamps.
-constexpr auto kDriftCorrection = base::TimeDelta::FromSeconds(2);
+constexpr auto kDriftCorrection = base::Seconds(2);
} // anonymous namespace
diff --git a/chromium/media/capture/content/animated_content_sampler_unittest.cc b/chromium/media/capture/content/animated_content_sampler_unittest.cc
index c85643c271a..48b02cd1c5f 100644
--- a/chromium/media/capture/content/animated_content_sampler_unittest.cc
+++ b/chromium/media/capture/content/animated_content_sampler_unittest.cc
@@ -23,11 +23,11 @@ namespace media {
namespace {
base::TimeTicks InitialTestTimeTicks() {
- return base::TimeTicks() + base::TimeDelta::FromSeconds(1);
+ return base::TimeTicks() + base::Seconds(1);
}
base::TimeDelta FpsAsPeriod(int frame_rate) {
- return base::TimeDelta::FromSeconds(1) / frame_rate;
+ return base::Seconds(1) / frame_rate;
}
} // namespace
@@ -46,7 +46,7 @@ class AnimatedContentSamplerTest : public ::testing::Test {
protected:
// Overridden by subclass for parameterized tests.
virtual base::TimeDelta GetMinCapturePeriod() const {
- return base::TimeDelta::FromSeconds(1) / 30;
+ return base::Seconds(1) / 30;
}
AnimatedContentSampler* sampler() const { return sampler_.get(); }
@@ -430,18 +430,18 @@ TEST_P(AnimatedContentSamplerParameterizedTest, DetectsAnimatedContent) {
base::TimeTicks begin = InitialTestTimeTicks();
// Provide random events and expect no lock-in.
- RunEventSequence(
- GenerateEventSequence(begin, begin + base::TimeDelta::FromSeconds(5),
- false, true, &begin),
- false, false, false, "Provide random events and expect no lock-in.");
+ RunEventSequence(GenerateEventSequence(begin, begin + base::Seconds(5), false,
+ true, &begin),
+ false, false, false,
+ "Provide random events and expect no lock-in.");
if (HasFailure())
return;
// Provide content frame events with some random events mixed-in, and expect
// the sampler to lock-in.
RunEventSequence(
- GenerateEventSequence(begin, begin + base::TimeDelta::FromSeconds(5),
- true, true, &begin),
+ GenerateEventSequence(begin, begin + base::Seconds(5), true, true,
+ &begin),
false, true, false,
"Provide content frame events with some random events mixed-in, and "
"expect the sampler to lock-in.");
@@ -451,8 +451,8 @@ TEST_P(AnimatedContentSamplerParameterizedTest, DetectsAnimatedContent) {
// Continue providing content frame events without the random events mixed-in
// and expect the lock-in to hold.
RunEventSequence(
- GenerateEventSequence(begin, begin + base::TimeDelta::FromSeconds(5),
- true, false, &begin),
+ GenerateEventSequence(begin, begin + base::Seconds(5), true, false,
+ &begin),
true, true, false,
"Continue providing content frame events without the random events "
"mixed-in and expect the lock-in to hold.");
@@ -462,8 +462,8 @@ TEST_P(AnimatedContentSamplerParameterizedTest, DetectsAnimatedContent) {
// Continue providing just content frame events and expect the lock-in to
// hold. Also simulate the capture pipeline experiencing back pressure.
RunEventSequence(
- GenerateEventSequence(begin, begin + base::TimeDelta::FromSeconds(20),
- true, false, &begin),
+ GenerateEventSequence(begin, begin + base::Seconds(20), true, false,
+ &begin),
true, true, true,
"Continue providing just content frame events and expect the lock-in to "
"hold. Also simulate the capture pipeline experiencing back pressure.");
@@ -473,9 +473,8 @@ TEST_P(AnimatedContentSamplerParameterizedTest, DetectsAnimatedContent) {
// Provide a half-second of random events only, and expect the lock-in to be
// broken.
RunEventSequence(
- GenerateEventSequence(begin,
- begin + base::TimeDelta::FromMilliseconds(500),
- false, true, &begin),
+ GenerateEventSequence(begin, begin + base::Milliseconds(500), false, true,
+ &begin),
true, false, false,
"Provide a half-second of random events only, and expect the lock-in to "
"be broken.");
@@ -485,8 +484,8 @@ TEST_P(AnimatedContentSamplerParameterizedTest, DetectsAnimatedContent) {
// Now, go back to providing content frame events, and expect the sampler to
// lock-in once again.
RunEventSequence(
- GenerateEventSequence(begin, begin + base::TimeDelta::FromSeconds(5),
- true, false, &begin),
+ GenerateEventSequence(begin, begin + base::Seconds(5), true, false,
+ &begin),
false, true, false,
"Now, go back to providing content frame events, and expect the sampler "
"to lock-in once again.");
@@ -507,8 +506,8 @@ TEST_P(AnimatedContentSamplerParameterizedTest,
// lock-in.
base::TimeTicks begin = InitialTestTimeTicks();
RunEventSequence(
- GenerateEventSequence(begin, begin + base::TimeDelta::FromSeconds(5),
- true, false, &begin),
+ GenerateEventSequence(begin, begin + base::Seconds(5), true, false,
+ &begin),
false, true, false,
"Start the first animation and run for a bit, and expect the sampler to "
"lock-in.");
@@ -520,7 +519,7 @@ TEST_P(AnimatedContentSamplerParameterizedTest,
// the sampler to enter an "undetected" state since it's unclear which
// animation should be locked into.
std::vector<Event> first_animation_events = GenerateEventSequence(
- begin, begin + base::TimeDelta::FromSeconds(20), true, false, &begin);
+ begin, begin + base::Seconds(20), true, false, &begin);
gfx::Rect second_animation_rect(
gfx::Point(0, GetContentDamageRect().height()),
GetContentDamageRect().size());
@@ -542,8 +541,8 @@ TEST_P(AnimatedContentSamplerParameterizedTest,
// Now, run just the first animation, and expect the sampler to lock-in once
// again.
RunEventSequence(
- GenerateEventSequence(begin, begin + base::TimeDelta::FromSeconds(5),
- true, false, &begin),
+ GenerateEventSequence(begin, begin + base::Seconds(5), true, false,
+ &begin),
false, true, false,
"Now, run just the first animation, and expect the sampler to lock-in "
"once again.");
@@ -555,7 +554,7 @@ TEST_P(AnimatedContentSamplerParameterizedTest,
// cause the sampler to enter an "undetected" state again. This tests that
// pixel-weighting is being accounted for in the sampler's logic.
first_animation_events = GenerateEventSequence(
- begin, begin + base::TimeDelta::FromSeconds(20), true, false, &begin);
+ begin, begin + base::Seconds(20), true, false, &begin);
second_animation_rect.set_width(second_animation_rect.width() * 2);
both_animations_events.clear();
bool include_second_animation_frame = true;
@@ -583,7 +582,7 @@ TEST_P(AnimatedContentSamplerParameterizedTest, FrameTimestampsAreSmooth) {
// once lock-in is continuous.
const base::TimeTicks begin = InitialTestTimeTicks();
std::vector<Event> events = GenerateEventSequence(
- begin, begin + base::TimeDelta::FromSeconds(20), true, false, nullptr);
+ begin, begin + base::Seconds(20), true, false, nullptr);
typedef std::vector<base::TimeTicks> Timestamps;
Timestamps frame_timestamps;
for (std::vector<Event>::const_iterator i = events.begin(); i != events.end();
@@ -666,7 +665,7 @@ TEST_P(AnimatedContentSamplerParameterizedTest,
// Generate a full minute of events.
const base::TimeTicks begin = InitialTestTimeTicks();
std::vector<Event> events = GenerateEventSequence(
- begin, begin + base::TimeDelta::FromMinutes(1), true, false, nullptr);
+ begin, begin + base::Minutes(1), true, false, nullptr);
// Modify the event sequence so that 1-3 ms of additional drift is suddenly
// present every 100 events. This is meant to simulate that, external to
@@ -675,7 +674,7 @@ TEST_P(AnimatedContentSamplerParameterizedTest,
base::TimeDelta accumulated_drift;
for (size_t i = 1; i < events.size(); ++i) {
if (i % 100 == 0) {
- accumulated_drift += base::TimeDelta::FromMilliseconds(
+ accumulated_drift += base::Milliseconds(
GetRandomInRange(1, max_drift_increment_millis + 1));
}
events[i].second += accumulated_drift;
@@ -697,7 +696,7 @@ TEST_P(AnimatedContentSamplerParameterizedTest,
events.back().second - last_frame_timestamp;
const base::TimeDelta max_acceptable_error =
GetParam().min_capture_period +
- base::TimeDelta::FromMilliseconds(max_drift_increment_millis);
+ base::Milliseconds(max_drift_increment_millis);
EXPECT_NEAR(0.0, total_error.InMicroseconds(),
max_acceptable_error.InMicroseconds());
}
diff --git a/chromium/media/capture/content/smooth_event_sampler_unittest.cc b/chromium/media/capture/content/smooth_event_sampler_unittest.cc
index 33642f31bb6..fcc94b61f49 100644
--- a/chromium/media/capture/content/smooth_event_sampler_unittest.cc
+++ b/chromium/media/capture/content/smooth_event_sampler_unittest.cc
@@ -40,7 +40,7 @@ void SteadyStateNoSampleAndAdvance(base::TimeDelta vsync,
}
base::TimeTicks InitialTestTimeTicks() {
- return base::TimeTicks() + base::TimeDelta::FromSeconds(1);
+ return base::TimeTicks() + base::Seconds(1);
}
@@ -49,8 +49,8 @@ base::TimeTicks InitialTestTimeTicks() {
// 60Hz sampled at 30Hz should produce 30Hz. In addition, this test contains
// much more comprehensive before/after/edge-case scenarios than the others.
TEST(SmoothEventSamplerTest, Sample60HertzAt30Hertz) {
- const base::TimeDelta capture_period = base::TimeDelta::FromSeconds(1) / 30;
- const base::TimeDelta vsync = base::TimeDelta::FromSeconds(1) / 60;
+ const base::TimeDelta capture_period = base::Seconds(1) / 30;
+ const base::TimeDelta vsync = base::Seconds(1) / 60;
SmoothEventSampler sampler(capture_period);
base::TimeTicks t = InitialTestTimeTicks();
@@ -82,8 +82,8 @@ TEST(SmoothEventSamplerTest, Sample60HertzAt30Hertz) {
// 50Hz sampled at 30Hz should produce a sequence where some frames are skipped.
TEST(SmoothEventSamplerTest, Sample50HertzAt30Hertz) {
- const base::TimeDelta capture_period = base::TimeDelta::FromSeconds(1) / 30;
- const base::TimeDelta vsync = base::TimeDelta::FromSeconds(1) / 50;
+ const base::TimeDelta capture_period = base::Seconds(1) / 30;
+ const base::TimeDelta vsync = base::Seconds(1) / 50;
SmoothEventSampler sampler(capture_period);
base::TimeTicks t = InitialTestTimeTicks();
@@ -121,8 +121,8 @@ TEST(SmoothEventSamplerTest, Sample50HertzAt30Hertz) {
// 75Hz sampled at 30Hz should produce a sequence where some frames are skipped.
TEST(SmoothEventSamplerTest, Sample75HertzAt30Hertz) {
- const base::TimeDelta capture_period = base::TimeDelta::FromSeconds(1) / 30;
- const base::TimeDelta vsync = base::TimeDelta::FromSeconds(1) / 75;
+ const base::TimeDelta capture_period = base::Seconds(1) / 30;
+ const base::TimeDelta vsync = base::Seconds(1) / 75;
SmoothEventSampler sampler(capture_period);
base::TimeTicks t = InitialTestTimeTicks();
@@ -164,8 +164,8 @@ TEST(SmoothEventSamplerTest, Sample75HertzAt30Hertz) {
// 30Hz sampled at 30Hz should produce 30Hz.
TEST(SmoothEventSamplerTest, Sample30HertzAt30Hertz) {
- const base::TimeDelta capture_period = base::TimeDelta::FromSeconds(1) / 30;
- const base::TimeDelta vsync = base::TimeDelta::FromSeconds(1) / 30;
+ const base::TimeDelta capture_period = base::Seconds(1) / 30;
+ const base::TimeDelta vsync = base::Seconds(1) / 30;
SmoothEventSampler sampler(capture_period);
base::TimeTicks t = InitialTestTimeTicks();
@@ -193,8 +193,8 @@ TEST(SmoothEventSamplerTest, Sample30HertzAt30Hertz) {
// 24Hz sampled at 30Hz should produce 24Hz.
TEST(SmoothEventSamplerTest, Sample24HertzAt30Hertz) {
- const base::TimeDelta capture_period = base::TimeDelta::FromSeconds(1) / 30;
- const base::TimeDelta vsync = base::TimeDelta::FromSeconds(1) / 24;
+ const base::TimeDelta capture_period = base::Seconds(1) / 30;
+ const base::TimeDelta vsync = base::Seconds(1) / 24;
SmoothEventSampler sampler(capture_period);
base::TimeTicks t = InitialTestTimeTicks();
@@ -223,11 +223,10 @@ TEST(SmoothEventSamplerTest, Sample24HertzAt30Hertz) {
// Tests that changing the minimum capture period during usage results in the
// desired behavior.
TEST(SmoothEventSamplerTest, Sample60HertzWithVariedCapturePeriods) {
- const base::TimeDelta vsync = base::TimeDelta::FromSeconds(1) / 60;
+ const base::TimeDelta vsync = base::Seconds(1) / 60;
const base::TimeDelta one_to_one_period = vsync;
const base::TimeDelta two_to_one_period = vsync * 2;
- const base::TimeDelta two_and_three_to_one_period =
- base::TimeDelta::FromSeconds(1) / 24;
+ const base::TimeDelta two_and_three_to_one_period = base::Seconds(1) / 24;
SmoothEventSampler sampler(one_to_one_period);
base::TimeTicks t = InitialTestTimeTicks();
@@ -280,7 +279,7 @@ void ReplayCheckingSamplerDecisions(const DataPoint* data_points,
SmoothEventSampler* sampler) {
base::TimeTicks t = InitialTestTimeTicks();
for (size_t i = 0; i < num_data_points; ++i) {
- t += base::TimeDelta::FromMicroseconds(
+ t += base::Microseconds(
static_cast<int64_t>(data_points[i].increment_ms * 1000));
ASSERT_EQ(data_points[i].should_capture,
AddEventAndConsiderSampling(sampler, t))
@@ -368,7 +367,7 @@ TEST(SmoothEventSamplerTest, DrawingAt24FpsWith60HzVsyncSampledAt30Hertz) {
{true, 33.44},
{false, 0}};
- SmoothEventSampler sampler(base::TimeDelta::FromSeconds(1) / 30);
+ SmoothEventSampler sampler(base::Seconds(1) / 30);
ReplayCheckingSamplerDecisions(data_points, base::size(data_points),
&sampler);
}
@@ -478,7 +477,7 @@ TEST(SmoothEventSamplerTest, DrawingAt30FpsWith60HzVsyncSampledAt30Hertz) {
{true, 33.44},
{true, 33.44}};
- SmoothEventSampler sampler(base::TimeDelta::FromSeconds(1) / 30);
+ SmoothEventSampler sampler(base::Seconds(1) / 30);
ReplayCheckingSamplerDecisions(data_points, base::size(data_points),
&sampler);
}
@@ -612,7 +611,7 @@ TEST(SmoothEventSamplerTest, DrawingAt60FpsWith60HzVsyncSampledAt30Hertz) {
{true, 16.72},
{true, 50.16}};
- SmoothEventSampler sampler(base::TimeDelta::FromSeconds(1) / 30);
+ SmoothEventSampler sampler(base::Seconds(1) / 30);
ReplayCheckingSamplerDecisions(data_points, base::size(data_points),
&sampler);
}
diff --git a/chromium/media/capture/content/video_capture_oracle.cc b/chromium/media/capture/content/video_capture_oracle.cc
index 246e4566f0f..eeac59ac590 100644
--- a/chromium/media/capture/content/video_capture_oracle.cc
+++ b/chromium/media/capture/content/video_capture_oracle.cc
@@ -42,8 +42,7 @@ const int kConsumerCapabilityEvaluationMicros = 1000000; // 1 second
// longer, and currently-accumulated feedback is not considered recent enough to
// base decisions off of. This prevents changes to the capture size when there
// is an unexpected pause in events.
-const base::TimeDelta kMaxTimeSinceLastFeedbackUpdate =
- base::TimeDelta::FromSeconds(1);
+const base::TimeDelta kMaxTimeSinceLastFeedbackUpdate = base::Seconds(1);
// The amount of additional time, since content animation was last detected, to
// continue being extra-careful about increasing the capture size. This is used
@@ -59,8 +58,7 @@ const int kProvingPeriodForAnimatedContentMicros = 30000000; // 30 seconds
// time between frames at |frame_rate| and return the fractional difference.
double FractionFromExpectedFrameRate(base::TimeDelta delta, int frame_rate) {
DCHECK_GT(frame_rate, 0);
- const base::TimeDelta expected_delta =
- base::TimeDelta::FromSeconds(1) / frame_rate;
+ const base::TimeDelta expected_delta = base::Seconds(1) / frame_rate;
return (delta - expected_delta) / expected_delta;
}
@@ -68,7 +66,7 @@ double FractionFromExpectedFrameRate(base::TimeDelta delta, int frame_rate) {
// TODO(miu): Patch FeedbackSignalAccumulator reset behavior and remove this
// hack.
base::TimeTicks JustAfter(base::TimeTicks t) {
- return t + base::TimeDelta::FromMicroseconds(1);
+ return t + base::Microseconds(1);
}
} // anonymous namespace
@@ -89,10 +87,10 @@ VideoCaptureOracle::VideoCaptureOracle(bool enable_auto_throttling)
smoothing_sampler_(kDefaultMinCapturePeriod),
content_sampler_(kDefaultMinCapturePeriod),
min_capture_period_(kDefaultMinCapturePeriod),
- buffer_pool_utilization_(base::TimeDelta::FromMicroseconds(
- kBufferUtilizationEvaluationMicros)),
- estimated_capable_area_(base::TimeDelta::FromMicroseconds(
- kConsumerCapabilityEvaluationMicros)) {
+ buffer_pool_utilization_(
+ base::Microseconds(kBufferUtilizationEvaluationMicros)),
+ estimated_capable_area_(
+ base::Microseconds(kConsumerCapabilityEvaluationMicros)) {
VLOG(1) << "Capture size auto-throttling is now "
<< (enable_auto_throttling ? "enabled." : "disabled.");
}
@@ -205,7 +203,7 @@ bool VideoCaptureOracle::ObserveEventAndDecideCapture(
event_time - GetFrameTimestamp(next_frame_number_ - 1);
}
const base::TimeDelta upper_bound =
- base::TimeDelta::FromMilliseconds(kUpperBoundDurationEstimateMicros);
+ base::Milliseconds(kUpperBoundDurationEstimateMicros);
duration_of_next_frame_ = std::max(
std::min(duration_of_next_frame_, upper_bound), min_capture_period());
}
@@ -345,8 +343,8 @@ void VideoCaptureOracle::RecordConsumerFeedback(
base::TimeDelta period;
if (std::isfinite(feedback.max_framerate_fps) &&
feedback.max_framerate_fps > 0.0) {
- period = std::max(min_capture_period_,
- base::TimeDelta::FromHz(feedback.max_framerate_fps));
+ period =
+ std::max(min_capture_period_, base::Hertz(feedback.max_framerate_fps));
} else {
period = min_capture_period_;
}
diff --git a/chromium/media/capture/content/video_capture_oracle.h b/chromium/media/capture/content/video_capture_oracle.h
index f83e91c1309..4ed6901c013 100644
--- a/chromium/media/capture/content/video_capture_oracle.h
+++ b/chromium/media/capture/content/video_capture_oracle.h
@@ -139,11 +139,11 @@ class CAPTURE_EXPORT VideoCaptureOracle {
// Clients are expected to set a better minimum capture period after
// VideoCaptureOracle is constructed.
static constexpr base::TimeDelta kDefaultMinCapturePeriod =
- base::TimeDelta::FromMicroseconds(1000000 / 5); // 5 FPS
+ base::Microseconds(1000000 / 5); // 5 FPS
// Default minimum size change period if SetMinSizeChangePeriod is not called.
static constexpr base::TimeDelta kDefaultMinSizeChangePeriod =
- base::TimeDelta::FromSeconds(3);
+ base::Seconds(3);
void SetLogCallback(
base::RepeatingCallback<void(const std::string&)> emit_log_cb);
diff --git a/chromium/media/capture/content/video_capture_oracle_unittest.cc b/chromium/media/capture/content/video_capture_oracle_unittest.cc
index aea1256f456..06dc6735804 100644
--- a/chromium/media/capture/content/video_capture_oracle_unittest.cc
+++ b/chromium/media/capture/content/video_capture_oracle_unittest.cc
@@ -12,8 +12,8 @@ namespace media {
namespace {
constexpr base::TimeTicks kInitialTestTimeTicks =
- base::TimeTicks() + base::TimeDelta::FromSeconds(1);
-constexpr base::TimeDelta k30HzPeriod = base::TimeDelta::FromSeconds(1) / 30;
+ base::TimeTicks() + base::Seconds(1);
+constexpr base::TimeDelta k30HzPeriod = base::Seconds(1) / 30;
constexpr gfx::Size k1080pSize = gfx::Size(1920, 1080);
constexpr gfx::Size k720pSize = gfx::Size(1280, 720);
constexpr gfx::Size k360pSize = gfx::Size(640, 360);
@@ -197,9 +197,8 @@ TEST(VideoCaptureOracleTest, TransitionsSmoothlyBetweenSamplers) {
// Tests that VideoCaptureOracle prevents refresh request events from initiating
// simultaneous captures.
TEST(VideoCaptureOracleTest, SamplesAtCorrectTimesAroundRefreshRequests) {
- const base::TimeDelta vsync_interval = base::TimeDelta::FromSeconds(1) / 60;
- const base::TimeDelta refresh_interval =
- base::TimeDelta::FromMilliseconds(125); // 8 FPS
+ const base::TimeDelta vsync_interval = base::Seconds(1) / 60;
+ const base::TimeDelta refresh_interval = base::Milliseconds(125); // 8 FPS
VideoCaptureOracle oracle(false);
oracle.SetMinCapturePeriod(k30HzPeriod);
@@ -253,7 +252,7 @@ TEST(VideoCaptureOracleTest, SamplesAtCorrectTimesAroundRefreshRequests) {
t += refresh_interval;
if (oracle.ObserveEventAndDecideCapture(VideoCaptureOracle::kRefreshRequest,
gfx::Rect(), t)) {
- const int frame_number = oracle.next_frame_number();
+ frame_number = oracle.next_frame_number();
oracle.RecordCapture(0.0);
ASSERT_TRUE(oracle.CompleteCapture(frame_number, true, &ignored));
did_complete_a_capture = true;
@@ -303,7 +302,7 @@ TEST(VideoCaptureOracleTest, DoesNotRapidlyChangeCaptureSize) {
// Run 30 seconds of frame captures without any source size changes.
base::TimeTicks t = kInitialTestTimeTicks;
const base::TimeDelta event_increment = k30HzPeriod * 2;
- base::TimeTicks end_t = t + base::TimeDelta::FromSeconds(30);
+ base::TimeTicks end_t = t + base::Seconds(30);
for (; t < end_t; t += event_increment) {
ASSERT_TRUE(oracle.ObserveEventAndDecideCapture(
VideoCaptureOracle::kCompositorUpdate, gfx::Rect(), t));
@@ -323,7 +322,7 @@ TEST(VideoCaptureOracleTest, DoesNotRapidlyChangeCaptureSize) {
gfx::Size source_size = oracle.capture_size();
base::TimeTicks time_of_last_size_change = kInitialTestTimeTicks;
gfx::Size last_capture_size = oracle.capture_size();
- end_t = t + base::TimeDelta::FromSeconds(30);
+ end_t = t + base::Seconds(30);
for (; t < end_t; t += event_increment) {
// Change the source size every frame to a random non-empty size.
const gfx::Size last_source_size = source_size;
@@ -336,7 +335,7 @@ TEST(VideoCaptureOracleTest, DoesNotRapidlyChangeCaptureSize) {
VideoCaptureOracle::kCompositorUpdate, gfx::Rect(), t));
if (oracle.capture_size() != last_capture_size) {
- ASSERT_GE(t - time_of_last_size_change, base::TimeDelta::FromSeconds(1));
+ ASSERT_GE(t - time_of_last_size_change, base::Seconds(1));
time_of_last_size_change = t;
last_capture_size = oracle.capture_size();
}
@@ -363,7 +362,7 @@ TEST(VideoCaptureOracleTest, ResizeThrottlingDisabled) {
// changes. The capture size should be different every time.
base::TimeTicks t = kInitialTestTimeTicks;
const base::TimeDelta event_increment = k30HzPeriod * 2;
- base::TimeTicks end_t = t + base::TimeDelta::FromSeconds(30);
+ base::TimeTicks end_t = t + base::Seconds(30);
gfx::Size source_size = oracle.capture_size();
gfx::Size last_capture_size = oracle.capture_size();
for (; t < end_t; t += event_increment) {
@@ -415,7 +414,7 @@ void RunAutoThrottleTest(bool is_content_animating,
base::TimeTicks t = kInitialTestTimeTicks;
base::TimeTicks time_of_last_size_change = t;
const base::TimeDelta event_increment = k30HzPeriod * 2;
- base::TimeTicks end_t = t + base::TimeDelta::FromSeconds(10);
+ base::TimeTicks end_t = t + base::Seconds(10);
for (; t < end_t; t += event_increment) {
ASSERT_TRUE(oracle.ObserveEventAndDecideCapture(
VideoCaptureOracle::kCompositorUpdate,
@@ -448,7 +447,7 @@ void RunAutoThrottleTest(bool is_content_animating,
<< ", i=" << i);
gfx::Size stepped_down_size;
- end_t = t + base::TimeDelta::FromSeconds(10);
+ end_t = t + base::Seconds(10);
for (; t < end_t; t += event_increment) {
ASSERT_TRUE(oracle.ObserveEventAndDecideCapture(
VideoCaptureOracle::kCompositorUpdate,
@@ -486,7 +485,7 @@ void RunAutoThrottleTest(bool is_content_animating,
<< ", i=" << i);
gfx::Size stepped_up_size;
- end_t = t + base::TimeDelta::FromSeconds(is_content_animating ? 90 : 10);
+ end_t = t + base::Seconds(is_content_animating ? 90 : 10);
for (; t < end_t; t += event_increment) {
ASSERT_TRUE(oracle.ObserveEventAndDecideCapture(
VideoCaptureOracle::kCompositorUpdate,
@@ -496,7 +495,7 @@ void RunAutoThrottleTest(bool is_content_animating,
if (oracle.capture_size() != starting_size) {
// When content is animating, a much longer amount of time must pass
// before the capture size will step up.
- ASSERT_LT(base::TimeDelta::FromSeconds(is_content_animating ? 15 : 1),
+ ASSERT_LT(base::Seconds(is_content_animating ? 15 : 1),
t - time_of_last_size_change);
time_of_last_size_change = t;
stepped_up_size = oracle.capture_size();
@@ -547,7 +546,7 @@ TEST(VideoCaptureOracleTest,
// size changes.
base::TimeTicks t = kInitialTestTimeTicks;
const base::TimeDelta event_increment = k30HzPeriod * 2;
- base::TimeTicks end_t = t + base::TimeDelta::FromSeconds(10);
+ base::TimeTicks end_t = t + base::Seconds(10);
for (; t < end_t; t += event_increment) {
ASSERT_TRUE(oracle.ObserveEventAndDecideCapture(
VideoCaptureOracle::kCompositorUpdate, gfx::Rect(k720pSize), t));
@@ -562,7 +561,7 @@ TEST(VideoCaptureOracleTest,
// Increase utilization to 1000%, but expect no capture size change because
// there has never been any consumer feedback.
const gfx::Size starting_size = oracle.capture_size();
- end_t = t + base::TimeDelta::FromSeconds(10);
+ end_t = t + base::Seconds(10);
for (; t < end_t; t += event_increment) {
ASSERT_TRUE(oracle.ObserveEventAndDecideCapture(
VideoCaptureOracle::kCompositorUpdate, gfx::Rect(k720pSize), t));
@@ -591,7 +590,7 @@ TEST(VideoCaptureOracleTest, IncreasesFrequentlyOnlyAfterSourceSizeChange) {
// machine that can do more, but won't because the source size is small.
base::TimeTicks t = kInitialTestTimeTicks;
const base::TimeDelta event_increment = k30HzPeriod * 2;
- base::TimeTicks end_t = t + base::TimeDelta::FromSeconds(10);
+ base::TimeTicks end_t = t + base::Seconds(10);
for (; t < end_t; t += event_increment) {
if (!oracle.ObserveEventAndDecideCapture(
VideoCaptureOracle::kCompositorUpdate, gfx::Rect(k360pSize), t)) {
@@ -611,7 +610,7 @@ TEST(VideoCaptureOracleTest, IncreasesFrequentlyOnlyAfterSourceSizeChange) {
// seconds.
oracle.SetSourceSize(k720pSize);
gfx::Size last_capture_size = oracle.capture_size();
- end_t = t + base::TimeDelta::FromSeconds(15);
+ end_t = t + base::Seconds(15);
for (; t < end_t; t += event_increment) {
if (!oracle.ObserveEventAndDecideCapture(
VideoCaptureOracle::kCompositorUpdate, gfx::Rect(k720pSize), t)) {
@@ -634,7 +633,7 @@ TEST(VideoCaptureOracleTest, IncreasesFrequentlyOnlyAfterSourceSizeChange) {
// utilization to achieve a steady-state.
oracle.SetSourceSize(k1080pSize);
gfx::Size stepped_down_size;
- end_t = t + base::TimeDelta::FromSeconds(10);
+ end_t = t + base::Seconds(10);
for (; t < end_t; t += event_increment) {
if (!oracle.ObserveEventAndDecideCapture(
VideoCaptureOracle::kCompositorUpdate, gfx::Rect(k1080pSize), t)) {
@@ -664,10 +663,9 @@ TEST(VideoCaptureOracleTest, IncreasesFrequentlyOnlyAfterSourceSizeChange) {
// Now, if we report under-utilization again (without any source size change),
// there should be a long "proving period" before there is any increase in
// capture size made by the oracle.
- const base::TimeTicks proving_period_end_time =
- t + base::TimeDelta::FromSeconds(15);
+ const base::TimeTicks proving_period_end_time = t + base::Seconds(15);
gfx::Size stepped_up_size;
- end_t = t + base::TimeDelta::FromSeconds(60);
+ end_t = t + base::Seconds(60);
for (; t < end_t; t += event_increment) {
if (!oracle.ObserveEventAndDecideCapture(
VideoCaptureOracle::kCompositorUpdate, gfx::Rect(k1080pSize), t)) {
@@ -708,7 +706,7 @@ TEST(VideoCaptureOracleTest, DoesNotAutoThrottleWhenResolutionIsFixed) {
// size changes.
base::TimeTicks t = kInitialTestTimeTicks;
const base::TimeDelta event_increment = k30HzPeriod * 2;
- base::TimeTicks end_t = t + base::TimeDelta::FromSeconds(10);
+ base::TimeTicks end_t = t + base::Seconds(10);
for (; t < end_t; t += event_increment) {
ASSERT_TRUE(oracle.ObserveEventAndDecideCapture(
VideoCaptureOracle::kCompositorUpdate, gfx::Rect(), t));
@@ -723,7 +721,7 @@ TEST(VideoCaptureOracleTest, DoesNotAutoThrottleWhenResolutionIsFixed) {
// Now run 10 seconds with overload indicated. Still, expect no capture size
// changes.
- end_t = t + base::TimeDelta::FromSeconds(10);
+ end_t = t + base::Seconds(10);
for (; t < end_t; t += event_increment) {
ASSERT_TRUE(oracle.ObserveEventAndDecideCapture(
VideoCaptureOracle::kCompositorUpdate, gfx::Rect(), t));
@@ -747,7 +745,7 @@ TEST(VideoCaptureOracleTest, RespectsMaxPixelsFeedback) {
// Run 1 second with no feedback and expect no capture size changes.
base::TimeTicks t = kInitialTestTimeTicks;
const base::TimeDelta event_increment = k30HzPeriod * 2;
- base::TimeTicks end_t = t + base::TimeDelta::FromSeconds(1);
+ base::TimeTicks end_t = t + base::Seconds(1);
for (; t < end_t; t += event_increment) {
ASSERT_TRUE(oracle.ObserveEventAndDecideCapture(
VideoCaptureOracle::kCompositorUpdate, gfx::Rect(), t));
@@ -814,7 +812,7 @@ TEST(VideoCaptureOracleTest, IgnoresMaxPixelsFeedbackIfAutoThrottlingIsOn) {
// Run 1 second with no feedback and expect no capture size changes.
base::TimeTicks t = kInitialTestTimeTicks;
const base::TimeDelta event_increment = k30HzPeriod * 2;
- base::TimeTicks end_t = t + base::TimeDelta::FromSeconds(1);
+ base::TimeTicks end_t = t + base::Seconds(1);
for (; t < end_t; t += event_increment) {
ASSERT_TRUE(oracle.ObserveEventAndDecideCapture(
VideoCaptureOracle::kCompositorUpdate, gfx::Rect(), t));
@@ -861,10 +859,9 @@ TEST(VideoCaptureOracleTest, IgnoresMaxPixelsFeedbackIfAutoThrottlingIsOn) {
// Tests that VideoCaptureOracle respects the max framerate requested by the
// consumer.
TEST(VideoCaptureOracleTest, RespectsMaxFrameRateFeedback) {
- constexpr base::TimeDelta vsync_interval = base::TimeDelta::FromHz(60);
- constexpr base::TimeDelta k5HzPeriod = base::TimeDelta::FromHz(5);
- constexpr base::TimeDelta kAllowedError =
- base::TimeDelta::FromMilliseconds(1);
+ constexpr base::TimeDelta vsync_interval = base::Hertz(60);
+ constexpr base::TimeDelta k5HzPeriod = base::Hertz(5);
+ constexpr base::TimeDelta kAllowedError = base::Milliseconds(1);
constexpr float k5Fps = 5.0;
constexpr float kNoResourceUtilization = -1.0;
constexpr float kNoFpsLimit = std::numeric_limits<float>::infinity();
diff --git a/chromium/media/capture/mojom/BUILD.gn b/chromium/media/capture/mojom/BUILD.gn
index 8986e29511e..81b5703a525 100644
--- a/chromium/media/capture/mojom/BUILD.gn
+++ b/chromium/media/capture/mojom/BUILD.gn
@@ -137,6 +137,7 @@ mojom("image_capture") {
# prepackaged redistributable JS bindings. It is therefore not desirable to
# scramble these messages.
scramble_message_ids = false
+ webui_module_path = "/media/capture/mojom"
}
source_set("image_capture_types") {
diff --git a/chromium/media/capture/video/android/video_capture_device_android.cc b/chromium/media/capture/video/android/video_capture_device_android.cc
index e9a555e7da0..2e5f448099e 100644
--- a/chromium/media/capture/video/android/video_capture_device_android.cc
+++ b/chromium/media/capture/video/android/video_capture_device_android.cc
@@ -162,7 +162,7 @@ void VideoCaptureDeviceAndroid::AllocateAndStart(
CHECK(!(capture_format_.frame_size.height() % 2));
if (capture_format_.frame_rate > 0) {
- frame_interval_ = base::TimeDelta::FromMicroseconds(
+ frame_interval_ = base::Microseconds(
(base::Time::kMicrosecondsPerSecond + capture_format_.frame_rate - 1) /
capture_format_.frame_rate);
}
@@ -326,8 +326,7 @@ void VideoCaptureDeviceAndroid::OnI420FrameAvailable(JNIEnv* env,
return;
const int64_t absolute_micro =
timestamp / base::Time::kNanosecondsPerMicrosecond;
- const base::TimeDelta capture_time =
- base::TimeDelta::FromMicroseconds(absolute_micro);
+ const base::TimeDelta capture_time = base::Microseconds(absolute_micro);
const base::TimeTicks current_time = base::TimeTicks::Now();
ProcessFirstFrameAvailable(current_time);
diff --git a/chromium/media/capture/video/android/video_capture_device_factory_android.h b/chromium/media/capture/video/android/video_capture_device_factory_android.h
index ffc343638b5..a787f32a0cb 100644
--- a/chromium/media/capture/video/android/video_capture_device_factory_android.h
+++ b/chromium/media/capture/video/android/video_capture_device_factory_android.h
@@ -26,6 +26,12 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryAndroid
jlong nativeVideoCaptureDeviceAndroid);
VideoCaptureDeviceFactoryAndroid();
+
+ VideoCaptureDeviceFactoryAndroid(const VideoCaptureDeviceFactoryAndroid&) =
+ delete;
+ VideoCaptureDeviceFactoryAndroid& operator=(
+ const VideoCaptureDeviceFactoryAndroid&) = delete;
+
~VideoCaptureDeviceFactoryAndroid() override;
std::unique_ptr<VideoCaptureDevice> CreateDevice(
@@ -51,8 +57,6 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryAndroid
// an actively used camera is opened again (see https://crbug.com/1138608).
base::flat_map<std::string, VideoCaptureFormats> supported_formats_cache_;
base::flat_map<std::string, bool> zooms_cache_;
-
- DISALLOW_COPY_AND_ASSIGN(VideoCaptureDeviceFactoryAndroid);
};
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/camera_3a_controller.cc b/chromium/media/capture/video/chromeos/camera_3a_controller.cc
index e1781872044..a73bd4d3d7b 100644
--- a/chromium/media/capture/video/chromeos/camera_3a_controller.cc
+++ b/chromium/media/capture/video/chromeos/camera_3a_controller.cc
@@ -222,7 +222,7 @@ void Camera3AController::Stabilize3AForStillCapture(
}
Set3aStabilizedCallback(std::move(on_3a_stabilized_callback),
- base::TimeDelta::FromSeconds(2));
+ base::Seconds(2));
if (af_mode_ !=
cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_OFF) {
@@ -256,9 +256,8 @@ void Camera3AController::OnResultMetadataAvailable(
// metadata from zero-shutter-lag request may be out of order compared to
// previous regular requests.
// https://developer.android.com/reference/android/hardware/camera2/CaptureResult#CONTROL_ENABLE_ZSL
- latest_sensor_timestamp_ =
- std::max(latest_sensor_timestamp_,
- base::TimeDelta::FromNanoseconds(sensor_timestamp[0]));
+ latest_sensor_timestamp_ = std::max(latest_sensor_timestamp_,
+ base::Nanoseconds(sensor_timestamp[0]));
}
if (!af_mode_set_) {
@@ -571,7 +570,7 @@ void Camera3AController::SetPointOfInterestOn3AModeSet() {
Set3aStabilizedCallback(
base::BindOnce(&Camera3AController::SetPointOfInterestOn3AStabilized,
GetWeakPtr()),
- base::TimeDelta::FromSeconds(2));
+ base::Seconds(2));
SetCaptureMetadata(
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_TRIGGER,
cros::mojom::AndroidControlAfTrigger::ANDROID_CONTROL_AF_TRIGGER_START);
@@ -590,9 +589,8 @@ void Camera3AController::SetPointOfInterestOn3AStabilized() {
delayed_ae_unlock_callback_.Reset(base::BindOnce(
&Camera3AController::SetPointOfInterestUnlockAe, GetWeakPtr()));
// TODO(shik): Apply different delays for image capture / video recording.
- task_runner_->PostDelayedTask(FROM_HERE,
- delayed_ae_unlock_callback_.callback(),
- base::TimeDelta::FromSeconds(4));
+ task_runner_->PostDelayedTask(
+ FROM_HERE, delayed_ae_unlock_callback_.callback(), base::Seconds(4));
}
void Camera3AController::SetPointOfInterestUnlockAe() {
diff --git a/chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.cc b/chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.cc
index cacc47a09cd..92e25d9b05e 100644
--- a/chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.cc
+++ b/chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.cc
@@ -111,7 +111,7 @@ void CameraAppDeviceBridgeImpl::SetCameraInfoGetter(
void CameraAppDeviceBridgeImpl::UnsetCameraInfoGetter() {
base::AutoLock lock(camera_info_getter_lock_);
- camera_info_getter_ = {};
+ camera_info_getter_ = base::NullCallback();
}
void CameraAppDeviceBridgeImpl::SetVirtualDeviceController(
@@ -122,7 +122,7 @@ void CameraAppDeviceBridgeImpl::SetVirtualDeviceController(
void CameraAppDeviceBridgeImpl::UnsetVirtualDeviceController() {
base::AutoLock lock(virtual_device_controller_lock_);
- virtual_device_controller_ = {};
+ virtual_device_controller_ = base::NullCallback();
}
base::WeakPtr<CameraAppDeviceImpl>
diff --git a/chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.h b/chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.h
index 1ad8e6e902d..fc3b093d5bf 100644
--- a/chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.h
+++ b/chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.h
@@ -28,6 +28,10 @@ class CAPTURE_EXPORT CameraAppDeviceBridgeImpl
CameraAppDeviceBridgeImpl();
+ CameraAppDeviceBridgeImpl(const CameraAppDeviceBridgeImpl&) = delete;
+ CameraAppDeviceBridgeImpl& operator=(const CameraAppDeviceBridgeImpl&) =
+ delete;
+
~CameraAppDeviceBridgeImpl() override;
static CameraAppDeviceBridgeImpl* GetInstance();
@@ -97,8 +101,6 @@ class CAPTURE_EXPORT CameraAppDeviceBridgeImpl
base::Lock task_runner_map_lock_;
base::flat_map<std::string, scoped_refptr<base::SingleThreadTaskRunner>>
ipc_task_runners_ GUARDED_BY(task_runner_map_lock_);
-
- DISALLOW_COPY_AND_ASSIGN(CameraAppDeviceBridgeImpl);
};
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/camera_app_device_impl.cc b/chromium/media/capture/video/chromeos/camera_app_device_impl.cc
index d6adb1baa0b..8637559c7af 100644
--- a/chromium/media/capture/video/chromeos/camera_app_device_impl.cc
+++ b/chromium/media/capture/video/chromeos/camera_app_device_impl.cc
@@ -126,7 +126,6 @@ void CameraAppDeviceImpl::ConsumeReprocessOptions(
result_task_queue.push(std::move(still_capture_task));
base::AutoLock lock(reprocess_tasks_lock_);
-
while (!reprocess_task_queue_.empty()) {
result_task_queue.push(std::move(reprocess_task_queue_.front()));
reprocess_task_queue_.pop();
@@ -200,28 +199,32 @@ void CameraAppDeviceImpl::GetCameraInfo(GetCameraInfoCallback callback) {
std::move(callback).Run(camera_info_.Clone());
}
-void CameraAppDeviceImpl::SetReprocessOption(
- cros::mojom::Effect effect,
- SetReprocessOptionCallback reprocess_result_callback) {
+void CameraAppDeviceImpl::SetReprocessOptions(
+ const std::vector<cros::mojom::Effect>& effects,
+ mojo::PendingRemote<cros::mojom::ReprocessResultListener> listener,
+ SetReprocessOptionsCallback callback) {
DCHECK(mojo_task_runner_->BelongsToCurrentThread());
- ReprocessTask task;
- task.effect = effect;
- task.callback = media::BindToCurrentLoop(
- base::BindOnce(&CameraAppDeviceImpl::SetReprocessResultOnMojoThread,
- weak_ptr_factory_for_mojo_.GetWeakPtr(),
- std::move(reprocess_result_callback)));
-
- if (effect == cros::mojom::Effect::PORTRAIT_MODE) {
- auto e = BuildMetadataEntry(
- static_cast<cros::mojom::CameraMetadataTag>(kPortraitModeVendorKey),
- int32_t{1});
- task.extra_metadata.push_back(std::move(e));
- }
-
base::AutoLock lock(reprocess_tasks_lock_);
-
- reprocess_task_queue_.push(std::move(task));
+ reprocess_listener_.reset();
+ reprocess_listener_.Bind(std::move(listener));
+ reprocess_task_queue_ = {};
+ for (const auto& effect : effects) {
+ ReprocessTask task;
+ task.effect = effect;
+ task.callback = media::BindToCurrentLoop(
+ base::BindOnce(&CameraAppDeviceImpl::SetReprocessResultOnMojoThread,
+ weak_ptr_factory_for_mojo_.GetWeakPtr(), effect));
+
+ if (effect == cros::mojom::Effect::PORTRAIT_MODE) {
+ auto e = BuildMetadataEntry(
+ static_cast<cros::mojom::CameraMetadataTag>(kPortraitModeVendorKey),
+ 1);
+ task.extra_metadata.push_back(std::move(e));
+ }
+ reprocess_task_queue_.push(std::move(task));
+ }
+ std::move(callback).Run();
}
void CameraAppDeviceImpl::SetFpsRange(const gfx::Range& fps_range,
@@ -461,12 +464,13 @@ void CameraAppDeviceImpl::OnDetectedDocumentCornersOnMojoThread(
}
void CameraAppDeviceImpl::SetReprocessResultOnMojoThread(
- SetReprocessOptionCallback callback,
+ cros::mojom::Effect effect,
const int32_t status,
media::mojom::BlobPtr blob) {
DCHECK(mojo_task_runner_->BelongsToCurrentThread());
- std::move(callback).Run(status, std::move(blob));
+ base::AutoLock lock(reprocess_tasks_lock_);
+ reprocess_listener_->OnReprocessDone(effect, status, std::move(blob));
}
void CameraAppDeviceImpl::NotifyShutterDoneOnMojoThread() {
diff --git a/chromium/media/capture/video/chromeos/camera_app_device_impl.h b/chromium/media/capture/video/chromeos/camera_app_device_impl.h
index c1e5c87a19c..de6df72aff1 100644
--- a/chromium/media/capture/video/chromeos/camera_app_device_impl.h
+++ b/chromium/media/capture/video/chromeos/camera_app_device_impl.h
@@ -44,7 +44,7 @@ struct ReprocessTask {
ReprocessTask(ReprocessTask&& other);
~ReprocessTask();
cros::mojom::Effect effect;
- cros::mojom::CameraAppDevice::SetReprocessOptionCallback callback;
+ base::OnceCallback<void(int32_t, media::mojom::BlobPtr)> callback;
std::vector<cros::mojom::CameraMetadataEntryPtr> extra_metadata;
};
@@ -73,6 +73,10 @@ class CAPTURE_EXPORT CameraAppDeviceImpl : public cros::mojom::CameraAppDevice {
CameraAppDeviceImpl(const std::string& device_id,
cros::mojom::CameraInfoPtr camera_info);
+
+ CameraAppDeviceImpl(const CameraAppDeviceImpl&) = delete;
+ CameraAppDeviceImpl& operator=(const CameraAppDeviceImpl&) = delete;
+
~CameraAppDeviceImpl() override;
// Binds the mojo receiver to this implementation.
@@ -128,8 +132,10 @@ class CAPTURE_EXPORT CameraAppDeviceImpl : public cros::mojom::CameraAppDevice {
// cros::mojom::CameraAppDevice implementations.
void GetCameraInfo(GetCameraInfoCallback callback) override;
- void SetReprocessOption(cros::mojom::Effect effect,
- SetReprocessOptionCallback callback) override;
+ void SetReprocessOptions(
+ const std::vector<cros::mojom::Effect>& effects,
+ mojo::PendingRemote<cros::mojom::ReprocessResultListener> listener,
+ SetReprocessOptionsCallback callback) override;
void SetFpsRange(const gfx::Range& fps_range,
SetFpsRangeCallback callback) override;
void SetStillCaptureResolution(
@@ -168,7 +174,7 @@ class CAPTURE_EXPORT CameraAppDeviceImpl : public cros::mojom::CameraAppDevice {
bool success,
const std::vector<gfx::PointF>& corners);
- void SetReprocessResultOnMojoThread(SetReprocessOptionCallback callback,
+ void SetReprocessResultOnMojoThread(cros::mojom::Effect effect,
const int32_t status,
media::mojom::BlobPtr blob);
@@ -193,6 +199,8 @@ class CAPTURE_EXPORT CameraAppDeviceImpl : public cros::mojom::CameraAppDevice {
base::Lock reprocess_tasks_lock_;
base::queue<ReprocessTask> reprocess_task_queue_
GUARDED_BY(reprocess_tasks_lock_);
+ mojo::Remote<cros::mojom::ReprocessResultListener> reprocess_listener_
+ GUARDED_BY(reprocess_tasks_lock_);
// It will be inserted and read from different threads.
base::Lock fps_ranges_lock_;
@@ -235,8 +243,6 @@ class CAPTURE_EXPORT CameraAppDeviceImpl : public cros::mojom::CameraAppDevice {
// The weak pointers should be dereferenced and invalidated on the Mojo
// thread.
base::WeakPtrFactory<CameraAppDeviceImpl> weak_ptr_factory_for_mojo_{this};
-
- DISALLOW_COPY_AND_ASSIGN(CameraAppDeviceImpl);
};
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/camera_app_device_provider_impl.h b/chromium/media/capture/video/chromeos/camera_app_device_provider_impl.h
index 3fbb56257b6..a5c475e0622 100644
--- a/chromium/media/capture/video/chromeos/camera_app_device_provider_impl.h
+++ b/chromium/media/capture/video/chromeos/camera_app_device_provider_impl.h
@@ -24,6 +24,11 @@ class CAPTURE_EXPORT CameraAppDeviceProviderImpl
CameraAppDeviceProviderImpl(
mojo::PendingRemote<cros::mojom::CameraAppDeviceBridge> bridge,
DeviceIdMappingCallback mapping_callback);
+
+ CameraAppDeviceProviderImpl(const CameraAppDeviceProviderImpl&) = delete;
+ CameraAppDeviceProviderImpl& operator=(const CameraAppDeviceProviderImpl&) =
+ delete;
+
~CameraAppDeviceProviderImpl() override;
void Bind(
mojo::PendingReceiver<cros::mojom::CameraAppDeviceProvider> receiver);
@@ -54,8 +59,6 @@ class CAPTURE_EXPORT CameraAppDeviceProviderImpl
mojo::Receiver<cros::mojom::CameraAppDeviceProvider> receiver_{this};
base::WeakPtrFactory<CameraAppDeviceProviderImpl> weak_ptr_factory_;
-
- DISALLOW_COPY_AND_ASSIGN(CameraAppDeviceProviderImpl);
};
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/camera_device_delegate.cc b/chromium/media/capture/video/chromeos/camera_device_delegate.cc
index 7030912ff6f..39764145072 100644
--- a/chromium/media/capture/video/chromeos/camera_device_delegate.cc
+++ b/chromium/media/capture/video/chromeos/camera_device_delegate.cc
@@ -1438,10 +1438,15 @@ void CameraDeviceDelegate::OnResultMetadataAvailable(
if (ae_compensation.size() == 1)
result_metadata_.ae_compensation = ae_compensation[0];
+ auto lens_state = GetMetadataEntryAsSpan<uint8_t>(
+ result_metadata, cros::mojom::CameraMetadataTag::ANDROID_LENS_STATE);
result_metadata_frame_number_ = frame_number;
// We need to wait the new result metadata for new settings.
if (result_metadata_frame_number_ >
- result_metadata_frame_number_for_photo_state_) {
+ result_metadata_frame_number_for_photo_state_ &&
+ lens_state.size() == 1 &&
+ static_cast<cros::mojom::AndroidLensState>(lens_state[0]) ==
+ cros::mojom::AndroidLensState::ANDROID_LENS_STATE_STATIONARY) {
for (auto& request : get_photo_state_queue_)
ipc_task_runner_->PostTask(FROM_HERE, std::move(request));
get_photo_state_queue_.clear();
diff --git a/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc b/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc
index 9d47ddc1b62..1c3b7a154c5 100644
--- a/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc
+++ b/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc
@@ -46,6 +46,9 @@ class MockCameraDevice : public cros::mojom::Camera3DeviceOps {
public:
MockCameraDevice() = default;
+ MockCameraDevice(const MockCameraDevice&) = delete;
+ MockCameraDevice& operator=(const MockCameraDevice&) = delete;
+
~MockCameraDevice() = default;
void Initialize(
@@ -111,9 +114,6 @@ class MockCameraDevice : public cros::mojom::Camera3DeviceOps {
MOCK_METHOD2(DoConfigureStreamsAndGetAllocatedBuffers,
void(cros::mojom::Camera3StreamConfigurationPtr& config,
ConfigureStreamsAndGetAllocatedBuffersCallback& callback));
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MockCameraDevice);
};
constexpr int32_t kJpegMaxBufferSize = 1024;
@@ -465,7 +465,7 @@ class CameraDeviceDelegateTest : public ::testing::Test {
device_closed->Signal();
},
base::Unretained(&device_closed))));
- base::TimeDelta kWaitTimeoutSecs = base::TimeDelta::FromSeconds(3);
+ base::TimeDelta kWaitTimeoutSecs = base::Seconds(3);
EXPECT_TRUE(device_closed.TimedWait(kWaitTimeoutSecs));
EXPECT_EQ(CameraDeviceContext::State::kStopped, GetState());
}
@@ -616,7 +616,7 @@ TEST_F(CameraDeviceDelegateTest, StopBeforeOpened) {
base::BindOnce(&base::WaitableEvent::Signal,
base::Unretained(&device_closed))));
stop_posted.Signal();
- EXPECT_TRUE(device_closed.TimedWait(base::TimeDelta::FromSeconds(3)));
+ EXPECT_TRUE(device_closed.TimedWait(base::Seconds(3)));
EXPECT_EQ(CameraDeviceContext::State::kStopped, GetState());
ResetDevice();
diff --git a/chromium/media/capture/video/chromeos/camera_hal_delegate.cc b/chromium/media/capture/video/chromeos/camera_hal_delegate.cc
index 0fbc5f75ca4..c3b6a00f739 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_delegate.cc
+++ b/chromium/media/capture/video/chromeos/camera_hal_delegate.cc
@@ -36,8 +36,7 @@ namespace {
constexpr int32_t kDefaultFps = 30;
constexpr char kVirtualPrefix[] = "VIRTUAL_";
-constexpr base::TimeDelta kEventWaitTimeoutSecs =
- base::TimeDelta::FromSeconds(1);
+constexpr base::TimeDelta kEventWaitTimeoutSecs = base::Seconds(1);
class LocalCameraClientObserver : public CameraClientObserver {
public:
diff --git a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc
index 5825b3a1a7e..07ade5dbc04 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc
+++ b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc
@@ -49,6 +49,10 @@ const base::FilePath::CharType kForceEnableAePath[] =
"/run/camera/force_enable_face_ae";
const base::FilePath::CharType kForceDisableAePath[] =
"/run/camera/force_disable_face_ae";
+const base::FilePath::CharType kForceEnableHdrNetPath[] =
+ "/run/camera/force_enable_hdrnet";
+const base::FilePath::CharType kForceDisableHdrNetPath[] =
+ "/run/camera/force_disable_hdrnet";
std::string GenerateRandomToken() {
char random_bytes[16];
@@ -174,24 +178,50 @@ bool CameraHalDispatcherImpl::Start(
TRACE_EVENT0("camera", "CameraHalDispatcherImpl");
base::trace_event::TraceLog::GetInstance()->AddEnabledStateObserver(this);
- base::FilePath enable_file_path(kForceEnableAePath);
- base::FilePath disable_file_path(kForceDisableAePath);
- if (!base::DeleteFile(enable_file_path)) {
- LOG(WARNING) << "Could not delete " << kForceEnableAePath;
- }
- if (!base::DeleteFile(disable_file_path)) {
- LOG(WARNING) << "Could not delete " << kForceDisableAePath;
- }
- const base::CommandLine* command_line =
- base::CommandLine::ForCurrentProcess();
- if (command_line->HasSwitch(media::switches::kForceControlFaceAe)) {
- if (command_line->GetSwitchValueASCII(
- media::switches::kForceControlFaceAe) == "enable") {
- base::File file(enable_file_path, base::File::FLAG_CREATE_ALWAYS);
- file.Close();
- } else {
- base::File file(disable_file_path, base::File::FLAG_CREATE_ALWAYS);
- file.Close();
+ {
+ base::FilePath enable_file_path(kForceEnableAePath);
+ base::FilePath disable_file_path(kForceDisableAePath);
+ if (!base::DeleteFile(enable_file_path)) {
+ LOG(WARNING) << "Could not delete " << kForceEnableAePath;
+ }
+ if (!base::DeleteFile(disable_file_path)) {
+ LOG(WARNING) << "Could not delete " << kForceDisableAePath;
+ }
+ const base::CommandLine* command_line =
+ base::CommandLine::ForCurrentProcess();
+ if (command_line->HasSwitch(media::switches::kForceControlFaceAe)) {
+ if (command_line->GetSwitchValueASCII(
+ media::switches::kForceControlFaceAe) == "enable") {
+ base::File file(enable_file_path, base::File::FLAG_CREATE_ALWAYS);
+ file.Close();
+ } else {
+ base::File file(disable_file_path, base::File::FLAG_CREATE_ALWAYS);
+ file.Close();
+ }
+ }
+ }
+
+ {
+ base::FilePath enable_file_path(kForceEnableHdrNetPath);
+ base::FilePath disable_file_path(kForceDisableHdrNetPath);
+ if (!base::DeleteFile(enable_file_path)) {
+ LOG(WARNING) << "Could not delete " << kForceEnableHdrNetPath;
+ }
+ if (!base::DeleteFile(disable_file_path)) {
+ LOG(WARNING) << "Could not delete " << kForceDisableHdrNetPath;
+ }
+ const base::CommandLine* command_line =
+ base::CommandLine::ForCurrentProcess();
+ if (command_line->HasSwitch(media::switches::kHdrNetOverride)) {
+ std::string value =
+ command_line->GetSwitchValueASCII(switches::kHdrNetOverride);
+ if (value == switches::kHdrNetForceEnabled) {
+ base::File file(enable_file_path, base::File::FLAG_CREATE_ALWAYS);
+ file.Close();
+ } else if (value == switches::kHdrNetForceDisabled) {
+ base::File file(disable_file_path, base::File::FLAG_CREATE_ALWAYS);
+ file.Close();
+ }
}
}
diff --git a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl_unittest.cc b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl_unittest.cc
index f7b0265c2de..f8308a056aa 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl_unittest.cc
+++ b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl_unittest.cc
@@ -32,6 +32,9 @@ class MockCameraHalServer : public cros::mojom::CameraHalServer {
public:
MockCameraHalServer() = default;
+ MockCameraHalServer(const MockCameraHalServer&) = delete;
+ MockCameraHalServer& operator=(const MockCameraHalServer&) = delete;
+
~MockCameraHalServer() = default;
void CreateChannel(
@@ -52,13 +55,15 @@ class MockCameraHalServer : public cros::mojom::CameraHalServer {
private:
mojo::Receiver<cros::mojom::CameraHalServer> receiver_{this};
- DISALLOW_COPY_AND_ASSIGN(MockCameraHalServer);
};
class MockCameraHalClient : public cros::mojom::CameraHalClient {
public:
MockCameraHalClient() = default;
+ MockCameraHalClient(const MockCameraHalClient&) = delete;
+ MockCameraHalClient& operator=(const MockCameraHalClient&) = delete;
+
~MockCameraHalClient() = default;
void SetUpChannel(
@@ -75,7 +80,6 @@ class MockCameraHalClient : public cros::mojom::CameraHalClient {
private:
mojo::Receiver<cros::mojom::CameraHalClient> receiver_{this};
- DISALLOW_COPY_AND_ASSIGN(MockCameraHalClient);
};
class MockCameraActiveClientObserver : public CameraActiveClientObserver {
@@ -95,6 +99,10 @@ class CameraHalDispatcherImplTest : public ::testing::Test {
CameraHalDispatcherImplTest()
: register_client_event_(base::WaitableEvent::ResetPolicy::AUTOMATIC) {}
+ CameraHalDispatcherImplTest(const CameraHalDispatcherImplTest&) = delete;
+ CameraHalDispatcherImplTest& operator=(const CameraHalDispatcherImplTest&) =
+ delete;
+
~CameraHalDispatcherImplTest() override = default;
void SetUp() override {
@@ -171,7 +179,6 @@ class CameraHalDispatcherImplTest : public ::testing::Test {
private:
base::test::TaskEnvironment task_environment_;
std::unique_ptr<base::RunLoop> run_loop_;
- DISALLOW_COPY_AND_ASSIGN(CameraHalDispatcherImplTest);
};
// Test that the CameraHalDisptcherImpl correctly re-establishes a Mojo channel
diff --git a/chromium/media/capture/video/chromeos/gpu_memory_buffer_tracker.h b/chromium/media/capture/video/chromeos/gpu_memory_buffer_tracker.h
index e0817af43e2..4ed216c42eb 100644
--- a/chromium/media/capture/video/chromeos/gpu_memory_buffer_tracker.h
+++ b/chromium/media/capture/video/chromeos/gpu_memory_buffer_tracker.h
@@ -19,6 +19,10 @@ class CAPTURE_EXPORT GpuMemoryBufferTracker final
: public VideoCaptureBufferTracker {
public:
GpuMemoryBufferTracker();
+
+ GpuMemoryBufferTracker(const GpuMemoryBufferTracker&) = delete;
+ GpuMemoryBufferTracker& operator=(const GpuMemoryBufferTracker&) = delete;
+
~GpuMemoryBufferTracker() override;
// Implementation of VideoCaptureBufferTracker:
@@ -37,8 +41,6 @@ class CAPTURE_EXPORT GpuMemoryBufferTracker final
private:
CameraBufferFactory buffer_factory_;
std::unique_ptr<gfx::GpuMemoryBuffer> buffer_;
-
- DISALLOW_COPY_AND_ASSIGN(GpuMemoryBufferTracker);
};
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/mock_camera_module.h b/chromium/media/capture/video/chromeos/mock_camera_module.h
index 12397b61455..bdb6545d86f 100644
--- a/chromium/media/capture/video/chromeos/mock_camera_module.h
+++ b/chromium/media/capture/video/chromeos/mock_camera_module.h
@@ -26,6 +26,9 @@ class MockCameraModule : public cros::mojom::CameraModule {
public:
MockCameraModule();
+ MockCameraModule(const MockCameraModule&) = delete;
+ MockCameraModule& operator=(const MockCameraModule&) = delete;
+
~MockCameraModule();
void OpenDevice(
@@ -100,8 +103,6 @@ class MockCameraModule : public cros::mojom::CameraModule {
base::Thread mock_module_thread_;
mojo::Receiver<cros::mojom::CameraModule> receiver_{this};
mojo::AssociatedRemote<cros::mojom::CameraModuleCallbacks> callbacks_;
-
- DISALLOW_COPY_AND_ASSIGN(MockCameraModule);
};
} // namespace unittest_internal
diff --git a/chromium/media/capture/video/chromeos/mojom/BUILD.gn b/chromium/media/capture/video/chromeos/mojom/BUILD.gn
index 4dfea8a0522..c2f2fe4f670 100644
--- a/chromium/media/capture/video/chromeos/mojom/BUILD.gn
+++ b/chromium/media/capture/video/chromeos/mojom/BUILD.gn
@@ -4,13 +4,25 @@
import("//mojo/public/tools/bindings/mojom.gni")
-mojom("cros_camera") {
+mojom("cros_camera_common") {
sources = [
"camera3.mojom",
"camera_app.mojom",
"camera_common.mojom",
"camera_metadata.mojom",
"camera_metadata_tags.mojom",
+ ]
+
+ deps = [
+ "//media/capture/mojom:image_capture",
+ "//ui/gfx/geometry/mojom",
+ "//ui/gfx/range/mojom",
+ ]
+ webui_module_path = "/media/capture/video/chromeos/mojom"
+}
+
+mojom("cros_camera") {
+ sources = [
"cros_camera_client.mojom",
"cros_camera_service.mojom",
]
@@ -19,7 +31,6 @@ mojom("cros_camera") {
"//chromeos/components/sensors/mojom",
"//components/chromeos_camera/common",
"//media/capture/mojom:image_capture",
- "//ui/gfx/geometry/mojom",
- "//ui/gfx/range/mojom",
]
+ public_deps = [ ":cros_camera_common" ]
}
diff --git a/chromium/media/capture/video/chromeos/mojom/camera_app.mojom b/chromium/media/capture/video/chromeos/mojom/camera_app.mojom
index 00989dc92ed..ecb28878e32 100644
--- a/chromium/media/capture/video/chromeos/mojom/camera_app.mojom
+++ b/chromium/media/capture/video/chromeos/mojom/camera_app.mojom
@@ -90,13 +90,13 @@ interface CameraAppDevice {
// characteristics, orientation, etc.
GetCameraInfo() => (CameraInfo camera_info);
- // Sets reprocess option to bind with the coming take photo request. When this
- // method is called, the reprocess option will be queued. All reprocess
+ // Sets reprocess options to bind with the coming take photo request. When
+ // this method is called, the reprocess options will be queued. All reprocess
// options in the queue will be consumed when ImageCapture::TakePhoto() is
- // triggered and all the queued reprocess options will be bound
- // to that take photo request.
- SetReprocessOption(Effect effect)
- => (int32 status, media.mojom.Blob? blob);
+ // triggered and all the queued reprocess options will be bound to that take
+ // photo request.
+ SetReprocessOptions(array<Effect> effects,
+ pending_remote<ReprocessResultListener> listener) => ();
// Sets the fps range for upcoming configured camera stream.
// The caller sets the |fps_range|.
@@ -160,3 +160,9 @@ interface DocumentCornersObserver {
// top-right order. The value of the coordinate of a corner will be in [0, 1).
OnDocumentCornersUpdated(array<gfx.mojom.PointF> corners);
};
+
+// Interface for the listener of reprocess results.
+interface ReprocessResultListener {
+ // Triggered when reprocess done for target |effect|.
+ OnReprocessDone(Effect effect, int32 status, media.mojom.Blob? blob);
+};
diff --git a/chromium/media/capture/video/chromeos/request_manager.cc b/chromium/media/capture/video/chromeos/request_manager.cc
index 71e0b039a57..f27b8b46cfa 100644
--- a/chromium/media/capture/video/chromeos/request_manager.cc
+++ b/chromium/media/capture/video/chromeos/request_manager.cc
@@ -775,8 +775,7 @@ void RequestManager::Notify(cros::mojom::Camera3NotifyMsgPtr message) {
pending_result.shutter_timestamp = shutter_time;
// Shutter timestamp is in ns.
base::TimeTicks reference_time =
- base::TimeTicks() +
- base::TimeDelta::FromMicroseconds(shutter_time / 1000);
+ base::TimeTicks() + base::Microseconds(shutter_time / 1000);
pending_result.reference_time = reference_time;
if (first_frame_shutter_time_.is_null()) {
// Record the shutter time of the first frame for calculating the
diff --git a/chromium/media/capture/video/chromeos/video_capture_device_chromeos_delegate.cc b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_delegate.cc
index a1e7cdf555b..607a3a04949 100644
--- a/chromium/media/capture/video/chromeos/video_capture_device_chromeos_delegate.cc
+++ b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_delegate.cc
@@ -152,7 +152,6 @@ void VideoCaptureDeviceChromeOSDelegate::AllocateAndStart(
std::unique_ptr<VideoCaptureDevice::Client> client,
ClientType client_type) {
DCHECK(capture_task_runner_->BelongsToCurrentThread());
- DCHECK(!camera_device_delegate_);
if (!HasDeviceClient()) {
TRACE_EVENT0("camera", "Start Device");
if (!camera_device_ipc_thread_.Start()) {
@@ -287,7 +286,7 @@ void VideoCaptureDeviceChromeOSDelegate::CloseDevice(
device_closed->Signal();
},
base::Unretained(&device_closed_))));
- base::TimeDelta kWaitTimeoutSecs = base::TimeDelta::FromSeconds(3);
+ base::TimeDelta kWaitTimeoutSecs = base::Seconds(3);
device_closed_.TimedWait(kWaitTimeoutSecs);
if (!unblock_suspend_token.is_empty())
power_manager_client_proxy_->UnblockSuspend(unblock_suspend_token);
diff --git a/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h b/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h
index 970c199e4d2..e72de356157 100644
--- a/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h
+++ b/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h
@@ -26,6 +26,11 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryChromeOS final
scoped_refptr<base::SingleThreadTaskRunner>
task_runner_for_screen_observer);
+ VideoCaptureDeviceFactoryChromeOS(const VideoCaptureDeviceFactoryChromeOS&) =
+ delete;
+ VideoCaptureDeviceFactoryChromeOS& operator=(
+ const VideoCaptureDeviceFactoryChromeOS&) = delete;
+
~VideoCaptureDeviceFactoryChromeOS() override;
// VideoCaptureDeviceFactory interface implementations.
@@ -58,8 +63,6 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryChromeOS final
base::WeakPtrFactory<VideoCaptureDeviceFactoryChromeOS> weak_ptr_factory_{
this};
-
- DISALLOW_COPY_AND_ASSIGN(VideoCaptureDeviceFactoryChromeOS);
};
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/video_capture_features_chromeos.cc b/chromium/media/capture/video/chromeos/video_capture_features_chromeos.cc
index 81ee4c61017..a892c838d8c 100644
--- a/chromium/media/capture/video/chromeos/video_capture_features_chromeos.cc
+++ b/chromium/media/capture/video/chromeos/video_capture_features_chromeos.cc
@@ -5,8 +5,12 @@
#include "media/capture/video/chromeos/video_capture_features_chromeos.h"
namespace media {
+
namespace switches {
+
const char kForceControlFaceAe[] = "force-control-face-ae";
+const char kHdrNetOverride[] = "hdrnet-override";
+
} // namespace switches
namespace features {
@@ -17,4 +21,5 @@ const base::Feature kDisableCameraFrameRotationAtSource{
"DisableCameraFrameRotationAtSource", base::FEATURE_DISABLED_BY_DEFAULT};
} // namespace features
+
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/video_capture_features_chromeos.h b/chromium/media/capture/video/chromeos/video_capture_features_chromeos.h
index c8745cb2d7e..4170ee32f6a 100644
--- a/chromium/media/capture/video/chromeos/video_capture_features_chromeos.h
+++ b/chromium/media/capture/video/chromeos/video_capture_features_chromeos.h
@@ -11,7 +11,13 @@
namespace media {
namespace switches {
+
CAPTURE_EXPORT extern const char kForceControlFaceAe[];
+
+CAPTURE_EXPORT extern const char kHdrNetOverride[];
+constexpr char kHdrNetForceEnabled[] = "force-enabled";
+constexpr char kHdrNetForceDisabled[] = "force-disabled";
+
} // namespace switches
namespace features {
@@ -19,6 +25,7 @@ namespace features {
CAPTURE_EXPORT extern const base::Feature kDisableCameraFrameRotationAtSource;
} // namespace features
+
} // namespace media
#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_VIDEO_CAPTURE_FEATURES_CHROMEOS_H_
diff --git a/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.h b/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.h
index fbc3a41d95e..9f464ea1c30 100644
--- a/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.h
+++ b/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.h
@@ -43,6 +43,11 @@ class CAPTURE_EXPORT VideoCaptureJpegDecoderImpl
scoped_refptr<base::SequencedTaskRunner> decoder_task_runner,
DecodeDoneCB decode_done_cb,
base::RepeatingCallback<void(const std::string&)> send_log_message_cb);
+
+ VideoCaptureJpegDecoderImpl(const VideoCaptureJpegDecoderImpl&) = delete;
+ VideoCaptureJpegDecoderImpl& operator=(const VideoCaptureJpegDecoderImpl&) =
+ delete;
+
~VideoCaptureJpegDecoderImpl() override;
// Implementation of VideoCaptureJpegDecoder:
@@ -106,8 +111,6 @@ class CAPTURE_EXPORT VideoCaptureJpegDecoderImpl
SEQUENCE_CHECKER(sequence_checker_);
base::WeakPtrFactory<VideoCaptureJpegDecoderImpl> weak_ptr_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(VideoCaptureJpegDecoderImpl);
};
} // namespace media
diff --git a/chromium/media/capture/video/fake_video_capture_device.cc b/chromium/media/capture/video/fake_video_capture_device.cc
index 19ab4306af4..608f28606e0 100644
--- a/chromium/media/capture/video/fake_video_capture_device.cc
+++ b/chromium/media/capture/video/fake_video_capture_device.cc
@@ -859,10 +859,9 @@ void GpuMemoryBufferFrameDeliverer::PaintAndDeliverNextFrame(
void FakeVideoCaptureDevice::BeepAndScheduleNextCapture(
base::TimeTicks expected_execution_time) {
DCHECK(thread_checker_.CalledOnValidThread());
- const base::TimeDelta beep_interval =
- base::TimeDelta::FromMilliseconds(kBeepInterval);
+ const base::TimeDelta beep_interval = base::Milliseconds(kBeepInterval);
const base::TimeDelta frame_interval =
- base::TimeDelta::FromMicroseconds(1e6 / device_state_->format.frame_rate);
+ base::Microseconds(1e6 / device_state_->format.frame_rate);
beep_time_ += frame_interval;
elapsed_time_ += frame_interval;
diff --git a/chromium/media/capture/video/fake_video_capture_device.h b/chromium/media/capture/video/fake_video_capture_device.h
index a393717aba3..6de494f144f 100644
--- a/chromium/media/capture/video/fake_video_capture_device.h
+++ b/chromium/media/capture/video/fake_video_capture_device.h
@@ -69,6 +69,10 @@ class FakeVideoCaptureDevice : public VideoCaptureDevice {
std::unique_ptr<FrameDelivererFactory> frame_deliverer_factory,
std::unique_ptr<FakePhotoDevice> photo_device,
std::unique_ptr<FakeDeviceState> device_state);
+
+ FakeVideoCaptureDevice(const FakeVideoCaptureDevice&) = delete;
+ FakeVideoCaptureDevice& operator=(const FakeVideoCaptureDevice&) = delete;
+
~FakeVideoCaptureDevice() override;
static void GetSupportedSizes(std::vector<gfx::Size>* supported_sizes);
@@ -103,8 +107,6 @@ class FakeVideoCaptureDevice : public VideoCaptureDevice {
// FakeVideoCaptureDevice post tasks to itself for frame construction and
// needs to deal with asynchronous StopAndDeallocate().
base::WeakPtrFactory<FakeVideoCaptureDevice> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(FakeVideoCaptureDevice);
};
// Represents the current state of a FakeVideoCaptureDevice.
diff --git a/chromium/media/capture/video/file_video_capture_device.cc b/chromium/media/capture/video/file_video_capture_device.cc
index 539247e64ef..f91e8517734 100644
--- a/chromium/media/capture/video/file_video_capture_device.cc
+++ b/chromium/media/capture/video/file_video_capture_device.cc
@@ -158,6 +158,9 @@ class Y4mFileParser final : public VideoFileParser {
public:
explicit Y4mFileParser(const base::FilePath& file_path);
+ Y4mFileParser(const Y4mFileParser&) = delete;
+ Y4mFileParser& operator=(const Y4mFileParser&) = delete;
+
// VideoFileParser implementation, class methods.
~Y4mFileParser() override;
bool Initialize(VideoCaptureFormat* capture_format) override;
@@ -166,14 +169,15 @@ class Y4mFileParser final : public VideoFileParser {
private:
std::unique_ptr<base::File> file_;
std::unique_ptr<uint8_t[]> video_frame_;
-
- DISALLOW_COPY_AND_ASSIGN(Y4mFileParser);
};
class MjpegFileParser final : public VideoFileParser {
public:
explicit MjpegFileParser(const base::FilePath& file_path);
+ MjpegFileParser(const MjpegFileParser&) = delete;
+ MjpegFileParser& operator=(const MjpegFileParser&) = delete;
+
// VideoFileParser implementation, class methods.
~MjpegFileParser() override;
bool Initialize(VideoCaptureFormat* capture_format) override;
@@ -181,8 +185,6 @@ class MjpegFileParser final : public VideoFileParser {
private:
std::unique_ptr<base::MemoryMappedFile> mapped_file_;
-
- DISALLOW_COPY_AND_ASSIGN(MjpegFileParser);
};
VideoFileParser::VideoFileParser(const base::FilePath& file_path)
@@ -372,7 +374,7 @@ std::unique_ptr<uint8_t[]> FileVideoCaptureDevice::CropPTZRegion(
frame_buffer_size =
VideoFrame::AllocationSize(PIXEL_FORMAT_I420, frame_size);
*final_pixel_format = PIXEL_FORMAT_I420;
- ABSL_FALLTHROUGH_INTENDED;
+ FALLTHROUGH;
case PIXEL_FORMAT_I420:
fourcc = libyuv::FOURCC_I420;
break;
@@ -716,7 +718,7 @@ void FileVideoCaptureDevice::OnCaptureTask() {
// Reschedule next CaptureTask.
const base::TimeDelta frame_interval =
- base::TimeDelta::FromMicroseconds(1E6 / capture_format_.frame_rate);
+ base::Microseconds(1E6 / capture_format_.frame_rate);
if (next_frame_time_.is_null()) {
next_frame_time_ = current_time + frame_interval;
} else {
diff --git a/chromium/media/capture/video/file_video_capture_device.h b/chromium/media/capture/video/file_video_capture_device.h
index 63bdaaa9fcb..b66951725e7 100644
--- a/chromium/media/capture/video/file_video_capture_device.h
+++ b/chromium/media/capture/video/file_video_capture_device.h
@@ -48,6 +48,9 @@ class CAPTURE_EXPORT FileVideoCaptureDevice : public VideoCaptureDevice {
const base::FilePath& file_path,
std::unique_ptr<gpu::GpuMemoryBufferSupport> gmb_support = nullptr);
+ FileVideoCaptureDevice(const FileVideoCaptureDevice&) = delete;
+ FileVideoCaptureDevice& operator=(const FileVideoCaptureDevice&) = delete;
+
// VideoCaptureDevice implementation, class methods.
~FileVideoCaptureDevice() override;
void AllocateAndStart(
@@ -123,8 +126,6 @@ class CAPTURE_EXPORT FileVideoCaptureDevice : public VideoCaptureDevice {
// on the main thread and |capture_thread_|.
base::Lock lock_;
base::queue<TakePhotoCallback> take_photo_callbacks_;
-
- DISALLOW_COPY_AND_ASSIGN(FileVideoCaptureDevice);
};
} // namespace media
diff --git a/chromium/media/capture/video/file_video_capture_device_factory.cc b/chromium/media/capture/video/file_video_capture_device_factory.cc
index e318860c5cb..7c47a23f4f9 100644
--- a/chromium/media/capture/video/file_video_capture_device_factory.cc
+++ b/chromium/media/capture/video/file_video_capture_device_factory.cc
@@ -43,7 +43,7 @@ std::unique_ptr<VideoCaptureDevice> FileVideoCaptureDeviceFactory::CreateDevice(
void FileVideoCaptureDeviceFactory::GetDevicesInfo(
GetDevicesInfoCallback callback) {
DCHECK(thread_checker_.CalledOnValidThread());
- base::ThreadRestrictions::SetIOAllowed(true);
+ base::ScopedAllowBlocking allow_blocking;
std::vector<VideoCaptureDeviceInfo> devices_info;
diff --git a/chromium/media/capture/video/file_video_capture_device_unittest.cc b/chromium/media/capture/video/file_video_capture_device_unittest.cc
index a92f5729fb1..5712cbca709 100644
--- a/chromium/media/capture/video/file_video_capture_device_unittest.cc
+++ b/chromium/media/capture/video/file_video_capture_device_unittest.cc
@@ -26,7 +26,7 @@ namespace media {
namespace {
-const base::TimeDelta kWaitTimeoutSecs = base::TimeDelta::FromSeconds(3);
+const base::TimeDelta kWaitTimeoutSecs = base::Seconds(3);
class MockImageCaptureClient {
public:
diff --git a/chromium/media/capture/video/fuchsia/DIR_METADATA b/chromium/media/capture/video/fuchsia/DIR_METADATA
new file mode 100644
index 00000000000..210aa6a954b
--- /dev/null
+++ b/chromium/media/capture/video/fuchsia/DIR_METADATA
@@ -0,0 +1 @@
+mixins: "//build/fuchsia/COMMON_METADATA"
diff --git a/chromium/media/capture/video/fuchsia/video_capture_device_fuchsia_test.cc b/chromium/media/capture/video/fuchsia/video_capture_device_fuchsia_test.cc
index ab0c85710b4..63484483d97 100644
--- a/chromium/media/capture/video/fuchsia/video_capture_device_fuchsia_test.cc
+++ b/chromium/media/capture/video/fuchsia/video_capture_device_fuchsia_test.cc
@@ -313,8 +313,7 @@ TEST_F(VideoCaptureDeviceFuchsiaTest, MultipleFrames) {
for (size_t i = 0; i < 10; ++i) {
ASSERT_TRUE(stream->WaitFreeBuffer());
- auto frame_timestamp =
- start_timestamp + base::TimeDelta::FromMilliseconds(i * 16);
+ auto frame_timestamp = start_timestamp + base::Milliseconds(i * 16);
stream->ProduceFrame(frame_timestamp, i);
client_->WaitFrame();
diff --git a/chromium/media/capture/video/linux/fake_v4l2_impl.cc b/chromium/media/capture/video/linux/fake_v4l2_impl.cc
index 0a3c15f1d94..245d11ac0d6 100644
--- a/chromium/media/capture/video/linux/fake_v4l2_impl.cc
+++ b/chromium/media/capture/video/linux/fake_v4l2_impl.cc
@@ -26,6 +26,8 @@ static const int kErrorReturnValue = -1;
static const uint32_t kMaxBufferCount = 5;
static const int kDefaultWidth = 640;
static const int kDefaultHeight = 480;
+static const unsigned int kMaxWidth = 3840;
+static const unsigned int kMaxHeight = 2160;
// 20 fps.
static const int kDefaultFrameInternvalNumerator = 50;
@@ -98,7 +100,7 @@ class FakeV4L2Impl::OpenedDevice {
}
}
return wait_for_outgoing_queue_event_.TimedWait(
- base::TimeDelta::FromMilliseconds(timeout_in_milliseconds));
+ base::Milliseconds(timeout_in_milliseconds));
}
int enum_fmt(v4l2_fmtdesc* fmtdesc) {
@@ -159,8 +161,11 @@ class FakeV4L2Impl::OpenedDevice {
}
int s_fmt(v4l2_format* format) {
- if (format->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
+ if (format->type != V4L2_BUF_TYPE_VIDEO_CAPTURE ||
+ format->fmt.pix.width > kMaxWidth ||
+ format->fmt.pix.height > kMaxHeight) {
return EINVAL;
+ }
v4l2_pix_format& pix_format = format->fmt.pix;
// We only support YUV420 output for now. Tell this to the client by
// overwriting whatever format it requested.
@@ -337,7 +342,7 @@ class FakeV4L2Impl::OpenedDevice {
// Sleep for a bit.
// We ignore the requested frame rate here, and just sleep for a fixed
// duration.
- base::PlatformThread::Sleep(base::TimeDelta::FromMilliseconds(100));
+ base::PlatformThread::Sleep(base::Milliseconds(100));
}
}
diff --git a/chromium/media/capture/video/linux/v4l2_capture_delegate.cc b/chromium/media/capture/video/linux/v4l2_capture_delegate.cc
index 26e634863f6..3a0ef104d4b 100644
--- a/chromium/media/capture/video/linux/v4l2_capture_delegate.cc
+++ b/chromium/media/capture/video/linux/v4l2_capture_delegate.cc
@@ -974,7 +974,9 @@ void V4L2CaptureDelegate::DoCapture() {
bool V4L2CaptureDelegate::StopStream() {
DCHECK(v4l2_task_runner_->BelongsToCurrentThread());
- DCHECK(is_capturing_);
+ if (!is_capturing_)
+ return false;
+
is_capturing_ = false;
// The order is important: stop streaming, clear |buffer_pool_|,
@@ -1003,7 +1005,6 @@ void V4L2CaptureDelegate::SetErrorState(VideoCaptureError error,
const base::Location& from_here,
const std::string& reason) {
DCHECK(v4l2_task_runner_->BelongsToCurrentThread());
- is_capturing_ = false;
client_->OnError(error, from_here, reason);
}
diff --git a/chromium/media/capture/video/linux/v4l2_capture_delegate.h b/chromium/media/capture/video/linux/v4l2_capture_delegate.h
index 8d8c262d9f8..1fceb61e8e2 100644
--- a/chromium/media/capture/video/linux/v4l2_capture_delegate.h
+++ b/chromium/media/capture/video/linux/v4l2_capture_delegate.h
@@ -53,6 +53,10 @@ class CAPTURE_EXPORT V4L2CaptureDelegate final {
const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner,
int power_line_frequency,
int rotation);
+
+ V4L2CaptureDelegate(const V4L2CaptureDelegate&) = delete;
+ V4L2CaptureDelegate& operator=(const V4L2CaptureDelegate&) = delete;
+
~V4L2CaptureDelegate();
// Forward-to versions of VideoCaptureDevice virtual methods.
@@ -133,8 +137,6 @@ class CAPTURE_EXPORT V4L2CaptureDelegate final {
int rotation_;
base::WeakPtrFactory<V4L2CaptureDelegate> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(V4L2CaptureDelegate);
};
} // namespace media
diff --git a/chromium/media/capture/video/linux/video_capture_device_factory_linux.h b/chromium/media/capture/video/linux/video_capture_device_factory_linux.h
index 63ffeef9e96..82a4275e359 100644
--- a/chromium/media/capture/video/linux/video_capture_device_factory_linux.h
+++ b/chromium/media/capture/video/linux/video_capture_device_factory_linux.h
@@ -35,6 +35,12 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryLinux
explicit VideoCaptureDeviceFactoryLinux(
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner);
+
+ VideoCaptureDeviceFactoryLinux(const VideoCaptureDeviceFactoryLinux&) =
+ delete;
+ VideoCaptureDeviceFactoryLinux& operator=(
+ const VideoCaptureDeviceFactoryLinux&) = delete;
+
~VideoCaptureDeviceFactoryLinux() override;
void SetV4L2EnvironmentForTesting(
@@ -63,7 +69,6 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryLinux
scoped_refptr<V4L2CaptureDevice> v4l2_;
std::unique_ptr<DeviceProvider> device_provider_;
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner_;
- DISALLOW_COPY_AND_ASSIGN(VideoCaptureDeviceFactoryLinux);
};
} // namespace media
diff --git a/chromium/media/capture/video/mac/gpu_memory_buffer_tracker_mac.h b/chromium/media/capture/video/mac/gpu_memory_buffer_tracker_mac.h
index f344a90d6d0..b95e0570262 100644
--- a/chromium/media/capture/video/mac/gpu_memory_buffer_tracker_mac.h
+++ b/chromium/media/capture/video/mac/gpu_memory_buffer_tracker_mac.h
@@ -17,6 +17,11 @@ class CAPTURE_EXPORT GpuMemoryBufferTrackerMac final
GpuMemoryBufferTrackerMac();
explicit GpuMemoryBufferTrackerMac(
base::ScopedCFTypeRef<IOSurfaceRef> io_surface);
+
+ GpuMemoryBufferTrackerMac(const GpuMemoryBufferTrackerMac&) = delete;
+ GpuMemoryBufferTrackerMac& operator=(const GpuMemoryBufferTrackerMac&) =
+ delete;
+
~GpuMemoryBufferTrackerMac() override;
// VideoCaptureBufferTracker
@@ -44,8 +49,6 @@ class CAPTURE_EXPORT GpuMemoryBufferTrackerMac final
// false. To prevent reuse while consumers are accessing the IOSurface, use
// |in_use_for_consumers_| to maintain IOSurfaceIsInUse as true.
gfx::ScopedInUseIOSurface in_use_for_consumers_;
-
- DISALLOW_COPY_AND_ASSIGN(GpuMemoryBufferTrackerMac);
};
} // namespace media
diff --git a/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.mm b/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.mm
index 81f81e30cb0..7a0e5068827 100644
--- a/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.mm
+++ b/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.mm
@@ -53,7 +53,7 @@ base::TimeDelta GetCMSampleBufferTimestamp(CMSampleBufferRef sampleBuffer) {
CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
const base::TimeDelta timestamp =
CMTIME_IS_VALID(cm_timestamp)
- ? base::TimeDelta::FromSecondsD(CMTimeGetSeconds(cm_timestamp))
+ ? base::Seconds(CMTimeGetSeconds(cm_timestamp))
: media::kNoTimestamp;
return timestamp;
}
@@ -489,7 +489,7 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
[weakSelf.get() takePhotoInternal];
},
_weakPtrFactoryForTakePhoto->GetWeakPtr()),
- base::TimeDelta::FromSeconds(3));
+ base::Seconds(3));
}
}
@@ -580,8 +580,7 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
[strongSelf stopStillImageOutput];
},
_weakPtrFactoryForTakePhoto->GetWeakPtr(), _takePhotoStartedCount),
- base::TimeDelta::FromSeconds(
- kTimeToWaitBeforeStoppingStillImageCaptureInSeconds));
+ base::Seconds(kTimeToWaitBeforeStoppingStillImageCaptureInSeconds));
}
- (void)stopStillImageOutput {
@@ -868,8 +867,7 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
_weakPtrFactoryForStallCheck = std::make_unique<
base::WeakPtrFactory<VideoCaptureDeviceAVFoundation>>(self);
}
- constexpr base::TimeDelta kStallCheckInterval =
- base::TimeDelta::FromSeconds(1);
+ constexpr base::TimeDelta kStallCheckInterval = base::Seconds(1);
auto callback_lambda =
[](base::WeakPtr<VideoCaptureDeviceAVFoundation> weakSelf,
int failedCheckCount) {
diff --git a/chromium/media/capture/video/mac/video_capture_device_decklink_mac.h b/chromium/media/capture/video/mac/video_capture_device_decklink_mac.h
index b4e111625bb..7c473c2a7e2 100644
--- a/chromium/media/capture/video/mac/video_capture_device_decklink_mac.h
+++ b/chromium/media/capture/video/mac/video_capture_device_decklink_mac.h
@@ -45,6 +45,11 @@ class CAPTURE_EXPORT VideoCaptureDeviceDeckLinkMac : public VideoCaptureDevice {
explicit VideoCaptureDeviceDeckLinkMac(
const VideoCaptureDeviceDescriptor& descriptor);
+
+ VideoCaptureDeviceDeckLinkMac(const VideoCaptureDeviceDeckLinkMac&) = delete;
+ VideoCaptureDeviceDeckLinkMac& operator=(
+ const VideoCaptureDeviceDeckLinkMac&) = delete;
+
~VideoCaptureDeviceDeckLinkMac() override;
// Copy of VideoCaptureDevice::Client::OnIncomingCapturedData(). Used by
@@ -86,8 +91,6 @@ class CAPTURE_EXPORT VideoCaptureDeviceDeckLinkMac : public VideoCaptureDevice {
// Checks for Device (a.k.a. Audio) thread.
base::ThreadChecker thread_checker_;
-
- DISALLOW_COPY_AND_ASSIGN(VideoCaptureDeviceDeckLinkMac);
};
} // namespace media
diff --git a/chromium/media/capture/video/mac/video_capture_device_decklink_mac.mm b/chromium/media/capture/video/mac/video_capture_device_decklink_mac.mm
index 6b64c488044..8354e8b8a8a 100644
--- a/chromium/media/capture/video/mac/video_capture_device_decklink_mac.mm
+++ b/chromium/media/capture/video/mac/video_capture_device_decklink_mac.mm
@@ -289,7 +289,7 @@ HRESULT DeckLinkCaptureDelegate::VideoInputFrameArrived(
base::TimeDelta timestamp;
if (SUCCEEDED(video_frame->GetStreamTime(&frame_time, &frame_duration,
micros_time_scale))) {
- timestamp = base::TimeDelta::FromMicroseconds(frame_time);
+ timestamp = base::Microseconds(frame_time);
} else {
timestamp = now - first_ref_time_;
}
diff --git a/chromium/media/capture/video/mac/video_capture_device_factory_mac.h b/chromium/media/capture/video/mac/video_capture_device_factory_mac.h
index d26836fac30..54dd57db1cf 100644
--- a/chromium/media/capture/video/mac/video_capture_device_factory_mac.h
+++ b/chromium/media/capture/video/mac/video_capture_device_factory_mac.h
@@ -17,6 +17,11 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryMac
: public VideoCaptureDeviceFactory {
public:
VideoCaptureDeviceFactoryMac();
+
+ VideoCaptureDeviceFactoryMac(const VideoCaptureDeviceFactoryMac&) = delete;
+ VideoCaptureDeviceFactoryMac& operator=(const VideoCaptureDeviceFactoryMac&) =
+ delete;
+
~VideoCaptureDeviceFactoryMac() override;
static void SetGetDevicesInfoRetryCount(int count);
@@ -25,8 +30,6 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryMac
std::unique_ptr<VideoCaptureDevice> CreateDevice(
const VideoCaptureDeviceDescriptor& device_descriptor) override;
void GetDevicesInfo(GetDevicesInfoCallback callback) override;
-
- DISALLOW_COPY_AND_ASSIGN(VideoCaptureDeviceFactoryMac);
};
} // namespace media
diff --git a/chromium/media/capture/video/mac/video_capture_device_mac.h b/chromium/media/capture/video/mac/video_capture_device_mac.h
index e1538917169..5cb40e8e86d 100644
--- a/chromium/media/capture/video/mac/video_capture_device_mac.h
+++ b/chromium/media/capture/video/mac/video_capture_device_mac.h
@@ -58,6 +58,10 @@ class VideoCaptureDeviceMac
public:
explicit VideoCaptureDeviceMac(
const VideoCaptureDeviceDescriptor& device_descriptor);
+
+ VideoCaptureDeviceMac(const VideoCaptureDeviceMac&) = delete;
+ VideoCaptureDeviceMac& operator=(const VideoCaptureDeviceMac&) = delete;
+
~VideoCaptureDeviceMac() override;
// VideoCaptureDevice implementation.
@@ -132,8 +136,6 @@ class VideoCaptureDeviceMac
// VideoCaptureDeviceMac is destroyed.
// NOTE: Weak pointers must be invalidated before all other member variables.
base::WeakPtrFactory<VideoCaptureDeviceMac> weak_factory_;
-
- DISALLOW_COPY_AND_ASSIGN(VideoCaptureDeviceMac);
};
} // namespace media
diff --git a/chromium/media/capture/video/mock_gpu_memory_buffer_manager.h b/chromium/media/capture/video/mock_gpu_memory_buffer_manager.h
index d617825ef3f..9371f93f664 100644
--- a/chromium/media/capture/video/mock_gpu_memory_buffer_manager.h
+++ b/chromium/media/capture/video/mock_gpu_memory_buffer_manager.h
@@ -17,6 +17,10 @@ class MockGpuMemoryBufferManager : public gpu::GpuMemoryBufferManager {
public:
MockGpuMemoryBufferManager();
+ MockGpuMemoryBufferManager(const MockGpuMemoryBufferManager&) = delete;
+ MockGpuMemoryBufferManager& operator=(const MockGpuMemoryBufferManager&) =
+ delete;
+
~MockGpuMemoryBufferManager() override;
MOCK_METHOD5(CreateGpuMemoryBuffer,
@@ -46,9 +50,6 @@ class MockGpuMemoryBufferManager : public gpu::GpuMemoryBufferManager {
gfx::BufferUsage usage,
gpu::SurfaceHandle surface_handle,
base::WaitableEvent* shutdown_event);
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MockGpuMemoryBufferManager);
};
} // namespace unittest_internal
diff --git a/chromium/media/capture/video/shared_memory_buffer_tracker.h b/chromium/media/capture/video/shared_memory_buffer_tracker.h
index 8eeda002bf4..9c87aca97a3 100644
--- a/chromium/media/capture/video/shared_memory_buffer_tracker.h
+++ b/chromium/media/capture/video/shared_memory_buffer_tracker.h
@@ -20,6 +20,11 @@ namespace media {
class SharedMemoryBufferTracker final : public VideoCaptureBufferTracker {
public:
SharedMemoryBufferTracker();
+
+ SharedMemoryBufferTracker(const SharedMemoryBufferTracker&) = delete;
+ SharedMemoryBufferTracker& operator=(const SharedMemoryBufferTracker&) =
+ delete;
+
~SharedMemoryBufferTracker() override;
// Implementation of VideoCaptureBufferTracker:
@@ -39,8 +44,6 @@ class SharedMemoryBufferTracker final : public VideoCaptureBufferTracker {
private:
base::UnsafeSharedMemoryRegion region_;
base::WritableSharedMemoryMapping mapping_;
-
- DISALLOW_COPY_AND_ASSIGN(SharedMemoryBufferTracker);
};
} // namespace media
diff --git a/chromium/media/capture/video/video_capture_device_client.h b/chromium/media/capture/video/video_capture_device_client.h
index 3205586b357..f57f2d139f8 100644
--- a/chromium/media/capture/video/video_capture_device_client.h
+++ b/chromium/media/capture/video/video_capture_device_client.h
@@ -55,6 +55,10 @@ class CAPTURE_EXPORT VideoCaptureDeviceClient
std::unique_ptr<VideoFrameReceiver> receiver,
scoped_refptr<VideoCaptureBufferPool> buffer_pool);
#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+
+ VideoCaptureDeviceClient(const VideoCaptureDeviceClient&) = delete;
+ VideoCaptureDeviceClient& operator=(const VideoCaptureDeviceClient&) = delete;
+
~VideoCaptureDeviceClient() override;
static Buffer MakeBufferStruct(
@@ -144,8 +148,6 @@ class CAPTURE_EXPORT VideoCaptureDeviceClient
// concurrently. Producers are allowed to call from multiple threads, but not
// concurrently.
DFAKE_MUTEX(call_from_producer_);
-
- DISALLOW_COPY_AND_ASSIGN(VideoCaptureDeviceClient);
};
} // namespace media
diff --git a/chromium/media/capture/video/video_capture_device_client_unittest.cc b/chromium/media/capture/video/video_capture_device_client_unittest.cc
index c4a218f423e..2c7d97f2870 100644
--- a/chromium/media/capture/video/video_capture_device_client_unittest.cc
+++ b/chromium/media/capture/video/video_capture_device_client_unittest.cc
@@ -72,6 +72,11 @@ class VideoCaptureDeviceClientTest : public ::testing::Test {
buffer_pool);
#endif // BUILDFLAG(IS_CHROMEOS_ASH)
}
+
+ VideoCaptureDeviceClientTest(const VideoCaptureDeviceClientTest&) = delete;
+ VideoCaptureDeviceClientTest& operator=(const VideoCaptureDeviceClientTest&) =
+ delete;
+
~VideoCaptureDeviceClientTest() override = default;
protected:
@@ -79,9 +84,6 @@ class VideoCaptureDeviceClientTest : public ::testing::Test {
std::unique_ptr<unittest_internal::MockGpuMemoryBufferManager>
gpu_memory_buffer_manager_;
std::unique_ptr<VideoCaptureDeviceClient> device_client_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(VideoCaptureDeviceClientTest);
};
// A small test for reference and to verify VideoCaptureDeviceClient is
diff --git a/chromium/media/capture/video/video_capture_device_factory.h b/chromium/media/capture/video/video_capture_device_factory.h
index 5ff1e3748ca..a682f7f07ab 100644
--- a/chromium/media/capture/video/video_capture_device_factory.h
+++ b/chromium/media/capture/video/video_capture_device_factory.h
@@ -29,6 +29,11 @@ namespace media {
class CAPTURE_EXPORT VideoCaptureDeviceFactory {
public:
VideoCaptureDeviceFactory();
+
+ VideoCaptureDeviceFactory(const VideoCaptureDeviceFactory&) = delete;
+ VideoCaptureDeviceFactory& operator=(const VideoCaptureDeviceFactory&) =
+ delete;
+
virtual ~VideoCaptureDeviceFactory();
// Creates a VideoCaptureDevice object. Returns NULL if something goes wrong.
@@ -45,9 +50,6 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactory {
protected:
base::ThreadChecker thread_checker_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(VideoCaptureDeviceFactory);
};
} // namespace media
diff --git a/chromium/media/capture/video/video_capture_device_unittest.cc b/chromium/media/capture/video/video_capture_device_unittest.cc
index dcdb16d4c0b..1e6d5082695 100644
--- a/chromium/media/capture/video/video_capture_device_unittest.cc
+++ b/chromium/media/capture/video/video_capture_device_unittest.cc
@@ -284,11 +284,8 @@ class VideoCaptureDeviceTest
local_gpu_memory_buffer_manager_.get());
if (media::ShouldUseCrosCameraService() &&
!CameraHalDispatcherImpl::GetInstance()->IsStarted()) {
- CameraHalDispatcherImpl::GetInstance()->Start(
- base::DoNothing::Repeatedly<mojo::PendingReceiver<
- chromeos_camera::mojom::MjpegDecodeAccelerator>>(),
- base::DoNothing::Repeatedly<mojo::PendingReceiver<
- chromeos_camera::mojom::JpegEncodeAccelerator>>());
+ CameraHalDispatcherImpl::GetInstance()->Start(base::DoNothing(),
+ base::DoNothing());
}
#endif
video_capture_device_factory_ =
diff --git a/chromium/media/capture/video/video_capture_feedback.cc b/chromium/media/capture/video/video_capture_feedback.cc
index 013a5ea5aa8..3c072970548 100644
--- a/chromium/media/capture/video/video_capture_feedback.cc
+++ b/chromium/media/capture/video/video_capture_feedback.cc
@@ -107,8 +107,8 @@ VideoCaptureFeedback& VideoCaptureFeedback::RequireMapped(bool require) {
}
VideoCaptureFeedback& VideoCaptureFeedback::WithMappedSizes(
- std::vector<gfx::Size> mapped_sizes) {
- this->mapped_sizes = std::move(mapped_sizes);
+ std::vector<gfx::Size> sizes) {
+ mapped_sizes = std::move(sizes);
SortSizesDescending(mapped_sizes);
return *this;
}
diff --git a/chromium/media/capture/video/video_capture_feedback.h b/chromium/media/capture/video/video_capture_feedback.h
index 43e8bd35b57..ea687ac5a4b 100644
--- a/chromium/media/capture/video/video_capture_feedback.h
+++ b/chromium/media/capture/video/video_capture_feedback.h
@@ -48,7 +48,7 @@ struct CAPTURE_EXPORT VideoCaptureFeedback {
VideoCaptureFeedback& WithMaxFramerate(float max_framerate_fps);
VideoCaptureFeedback& WithMaxPixels(int max_pixels);
VideoCaptureFeedback& RequireMapped(bool require);
- VideoCaptureFeedback& WithMappedSizes(std::vector<gfx::Size> mapped_sizes);
+ VideoCaptureFeedback& WithMappedSizes(std::vector<gfx::Size> sizes);
// Combine constraints of two different sinks resulting in constraints fitting
// both of them.
diff --git a/chromium/media/capture/video/win/gpu_memory_buffer_tracker.h b/chromium/media/capture/video/win/gpu_memory_buffer_tracker.h
index 5093b9687a7..5fe262b9db9 100644
--- a/chromium/media/capture/video/win/gpu_memory_buffer_tracker.h
+++ b/chromium/media/capture/video/win/gpu_memory_buffer_tracker.h
@@ -24,6 +24,10 @@ class CAPTURE_EXPORT GpuMemoryBufferTracker final
public:
explicit GpuMemoryBufferTracker(
scoped_refptr<DXGIDeviceManager> dxgi_device_manager);
+
+ GpuMemoryBufferTracker(const GpuMemoryBufferTracker&) = delete;
+ GpuMemoryBufferTracker& operator=(const GpuMemoryBufferTracker&) = delete;
+
~GpuMemoryBufferTracker() override;
// Implementation of VideoCaptureBufferTracker:
@@ -48,8 +52,6 @@ class CAPTURE_EXPORT GpuMemoryBufferTracker final
gfx::Size buffer_size_;
bool CreateBufferInternal();
bool EnsureD3DDevice();
-
- DISALLOW_COPY_AND_ASSIGN(GpuMemoryBufferTracker);
};
} // namespace media
diff --git a/chromium/media/capture/video/win/sink_input_pin_win.cc b/chromium/media/capture/video/win/sink_input_pin_win.cc
index 11c19d2b9bc..2c80ba4a999 100644
--- a/chromium/media/capture/video/win/sink_input_pin_win.cc
+++ b/chromium/media/capture/video/win/sink_input_pin_win.cc
@@ -231,7 +231,7 @@ HRESULT SinkInputPin::Receive(IMediaSample* sample) {
base::TimeDelta timestamp = kNoTimestamp;
if (SUCCEEDED(sample->GetTime(&start_time, &end_time))) {
DCHECK(start_time <= end_time);
- timestamp = base::TimeDelta::FromMicroseconds(start_time / 10);
+ timestamp = base::Microseconds(start_time / 10);
}
observer_->FrameReceived(buffer, length, resulting_format_, timestamp,
diff --git a/chromium/media/capture/video/win/video_capture_device_factory_win.cc b/chromium/media/capture/video/win/video_capture_device_factory_win.cc
index 6b5c5604dad..c63a835fbc7 100644
--- a/chromium/media/capture/video/win/video_capture_device_factory_win.cc
+++ b/chromium/media/capture/video/win/video_capture_device_factory_win.cc
@@ -124,7 +124,9 @@ const char* const kModelIdsBlockedForMediaFoundation[] = {
// RBG/IR camera for Windows Hello Face Auth. See https://crbug.com/984864.
"13d3:5257",
// Acer Aspire f5-573g. See https://crbug.com/1034644.
- "0bda:57f2"};
+ "0bda:57f2",
+ // Elgato Camlink 4k
+ "0fd9:0066"};
// Use this list only for non-USB webcams.
const char* const kDisplayNamesBlockedForMediaFoundation[] = {
@@ -438,7 +440,7 @@ bool VideoCaptureDeviceFactoryWin::CreateDeviceFilterDirectShow(
for (ComPtr<IMoniker> moniker;
enum_moniker->Next(1, &moniker, nullptr) == S_OK; moniker.Reset()) {
ComPtr<IPropertyBag> prop_bag;
- HRESULT hr = moniker->BindToStorage(0, 0, IID_PPV_ARGS(&prop_bag));
+ hr = moniker->BindToStorage(0, 0, IID_PPV_ARGS(&prop_bag));
if (FAILED(hr))
continue;
diff --git a/chromium/media/capture/video/win/video_capture_device_factory_win.h b/chromium/media/capture/video/win/video_capture_device_factory_win.h
index cf61573f2b6..f62dfdb5c95 100644
--- a/chromium/media/capture/video/win/video_capture_device_factory_win.h
+++ b/chromium/media/capture/video/win/video_capture_device_factory_win.h
@@ -33,6 +33,11 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryWin
static bool PlatformSupportsMediaFoundation();
VideoCaptureDeviceFactoryWin();
+
+ VideoCaptureDeviceFactoryWin(const VideoCaptureDeviceFactoryWin&) = delete;
+ VideoCaptureDeviceFactoryWin& operator=(const VideoCaptureDeviceFactoryWin&) =
+ delete;
+
~VideoCaptureDeviceFactoryWin() override;
std::unique_ptr<VideoCaptureDevice> CreateDevice(
@@ -104,8 +109,6 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryWin
// For hardware acceleration in MediaFoundation capture engine
scoped_refptr<DXGIDeviceManager> dxgi_device_manager_;
base::WeakPtrFactory<VideoCaptureDeviceFactoryWin> weak_ptr_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(VideoCaptureDeviceFactoryWin);
};
} // namespace media
diff --git a/chromium/media/capture/video/win/video_capture_device_mf_win.cc b/chromium/media/capture/video/win/video_capture_device_mf_win.cc
index 6449299c145..e279aded010 100644
--- a/chromium/media/capture/video/win/video_capture_device_mf_win.cc
+++ b/chromium/media/capture/video/win/video_capture_device_mf_win.cc
@@ -658,8 +658,7 @@ class MFVideoCallback final
base::TimeTicks reference_time(base::TimeTicks::Now());
LONGLONG raw_time_stamp = 0;
sample->GetSampleTime(&raw_time_stamp);
- base::TimeDelta timestamp =
- base::TimeDelta::FromMicroseconds(raw_time_stamp / 10);
+ base::TimeDelta timestamp = base::Microseconds(raw_time_stamp / 10);
DWORD count = 0;
sample->GetBufferCount(&count);
@@ -754,8 +753,7 @@ HRESULT VideoCaptureDeviceMFWin::ExecuteHresultCallbackWithRetries(
do {
hr = callback.Run();
if (FAILED(hr))
- base::PlatformThread::Sleep(
- base::TimeDelta::FromMilliseconds(retry_delay_in_ms_));
+ base::PlatformThread::Sleep(base::Milliseconds(retry_delay_in_ms_));
// Give up after some amount of time
} while (hr == MF_E_INVALIDREQUEST && retry_count++ < max_retry_count_);
diff --git a/chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc b/chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc
index 22114941da2..1c0eefb7e41 100644
--- a/chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc
+++ b/chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc
@@ -544,7 +544,7 @@ class MockMFCaptureEngine : public MockInterface<IMFCaptureEngine> {
.WillByDefault(Return(MF_CAPTURE_ENGINE_INITIALIZED));
// HW Cameras usually add about 500ms latency on init
ON_CALL(*this, InitEventDelay)
- .WillByDefault(Return(base::TimeDelta::FromMilliseconds(500)));
+ .WillByDefault(Return(base::Milliseconds(500)));
base::TimeDelta event_delay = InitEventDelay();
@@ -554,8 +554,8 @@ class MockMFCaptureEngine : public MockInterface<IMFCaptureEngine> {
OnInitEventGuid(), OnInitStatus()),
event_delay);
// if zero is passed ensure event fires before wait starts
- if (event_delay == base::TimeDelta::FromMilliseconds(0)) {
- base::PlatformThread::Sleep(base::TimeDelta::FromMilliseconds(200));
+ if (event_delay == base::Milliseconds(0)) {
+ base::PlatformThread::Sleep(base::Milliseconds(200));
}
return S_OK;
@@ -1363,7 +1363,7 @@ TEST_F(VideoCaptureDeviceMFWinTest, CallClientOnFireCaptureEngineInitEarly) {
return MF_CAPTURE_ENGINE_INITIALIZED;
});
EXPECT_CALL(*(engine.Get()), InitEventDelay).WillOnce([]() {
- return base::TimeDelta::FromMilliseconds(0);
+ return base::Milliseconds(0);
});
EXPECT_CALL(*(engine.Get()), OnCorrectInitializeQueued());
diff --git a/chromium/media/capture/video/win/video_capture_device_win.cc b/chromium/media/capture/video/win/video_capture_device_win.cc
index f15d263f924..81aa33a2881 100644
--- a/chromium/media/capture/video/win/video_capture_device_win.cc
+++ b/chromium/media/capture/video/win/video_capture_device_win.cc
@@ -128,15 +128,15 @@ void VideoCaptureDeviceWin::GetPinCapabilityList(
ComPtr<IAMVideoControl> video_control;
hr = capture_filter.As(&video_control);
- int count = 0, size = 0;
- hr = stream_config->GetNumberOfCapabilities(&count, &size);
+ int count = 0, byte_size = 0;
+ hr = stream_config->GetNumberOfCapabilities(&count, &byte_size);
if (FAILED(hr)) {
DLOG(ERROR) << "GetNumberOfCapabilities failed: "
<< logging::SystemErrorCodeToString(hr);
return;
}
- std::unique_ptr<BYTE[]> caps(new BYTE[size]);
+ std::unique_ptr<BYTE[]> caps(new BYTE[byte_size]);
for (int i = 0; i < count; ++i) {
VideoCaptureDeviceWin::ScopedMediaType media_type;
hr = stream_config->GetStreamCaps(i, media_type.Receive(), caps.get());
diff --git a/chromium/media/cast/BUILD.gn b/chromium/media/cast/BUILD.gn
index 9a69682a21c..4178496f623 100644
--- a/chromium/media/cast/BUILD.gn
+++ b/chromium/media/cast/BUILD.gn
@@ -7,6 +7,7 @@ import("//build/config/chromeos/ui_mode.gni")
import("//build/config/features.gni")
import("//build/config/ui.gni")
import("//testing/test.gni")
+import("//third_party/libaom/options.gni")
import("//third_party/protobuf/proto_library.gni")
proto_library("logging_proto") {
@@ -148,10 +149,10 @@ source_set("sender") {
"sender/video_frame_factory.h",
"sender/video_sender.cc",
"sender/video_sender.h",
- "sender/vp8_encoder.cc",
- "sender/vp8_encoder.h",
- "sender/vp8_quantizer_parser.cc",
- "sender/vp8_quantizer_parser.h",
+ "sender/vpx_encoder.cc",
+ "sender/vpx_encoder.h",
+ "sender/vpx_quantizer_parser.cc",
+ "sender/vpx_quantizer_parser.h",
]
deps = [
@@ -160,6 +161,7 @@ source_set("sender") {
"//base",
"//media",
"//media/capture:capture_base",
+ "//third_party/libaom:libaom_buildflags",
"//third_party/libvpx",
"//third_party/opus",
"//ui/gfx/geometry",
@@ -180,6 +182,15 @@ source_set("sender") {
"VideoToolbox.framework",
]
}
+
+ if (enable_libaom) {
+ sources += [
+ "sender/av1_encoder.cc",
+ "sender/av1_encoder.h",
+ ]
+
+ deps += [ "//third_party/libaom" ]
+ }
}
source_set("test_receiver") {
@@ -318,7 +329,7 @@ test("cast_unittests") {
"sender/fake_video_encode_accelerator_factory.h",
"sender/video_encoder_unittest.cc",
"sender/video_sender_unittest.cc",
- "sender/vp8_quantizer_parser_unittest.cc",
+ "sender/vpx_quantizer_parser_unittest.cc",
"test/end2end_unittest.cc",
"test/receiver/audio_decoder_unittest.cc",
"test/receiver/cast_message_builder_unittest.cc",
diff --git a/chromium/media/cast/DEPS b/chromium/media/cast/DEPS
index 19af2a19eca..bb6defc5bdc 100644
--- a/chromium/media/cast/DEPS
+++ b/chromium/media/cast/DEPS
@@ -3,6 +3,7 @@ include_rules = [
"+media",
"+net",
"+third_party/libyuv",
+ "+third_party/libaom",
"+third_party/zlib",
"+ui/base",
"+ui/gfx",
diff --git a/chromium/media/cast/cast_config.cc b/chromium/media/cast/cast_config.cc
index 30e6c41c74c..0087e23265d 100644
--- a/chromium/media/cast/cast_config.cc
+++ b/chromium/media/cast/cast_config.cc
@@ -29,8 +29,7 @@ FrameSenderConfig::FrameSenderConfig()
// All three delays are set to the same value due to adaptive latency
// being disabled in Chrome. This will be fixed as part of the migration
// to libcast.
- min_playout_delay(
- base::TimeDelta::FromMilliseconds(kDefaultRtpMaxDelayMs)),
+ min_playout_delay(base::Milliseconds(kDefaultRtpMaxDelayMs)),
max_playout_delay(min_playout_delay),
animated_playout_delay(min_playout_delay),
rtp_payload_type(RtpPayloadType::UNKNOWN),
diff --git a/chromium/media/cast/cast_config.h b/chromium/media/cast/cast_config.h
index 9a9f84c1f1c..8b2a919739c 100644
--- a/chromium/media/cast/cast_config.h
+++ b/chromium/media/cast/cast_config.h
@@ -31,7 +31,9 @@ enum Codec {
CODEC_VIDEO_VP8,
CODEC_VIDEO_H264,
CODEC_VIDEO_REMOTE,
- CODEC_LAST = CODEC_VIDEO_REMOTE
+ CODEC_VIDEO_VP9,
+ CODEC_VIDEO_AV1,
+ CODEC_LAST = CODEC_VIDEO_AV1
};
// Describes the content being transported over RTP streams.
@@ -60,7 +62,11 @@ enum class RtpPayloadType {
// in-sequence. No assumptions about the data can be made.
REMOTE_VIDEO = 102,
- LAST = REMOTE_VIDEO
+ VIDEO_VP9 = 103,
+
+ VIDEO_AV1 = 104,
+
+ LAST = VIDEO_AV1
};
// TODO(miu): Eliminate these after moving "default config" into the top-level
diff --git a/chromium/media/cast/cast_sender_impl.h b/chromium/media/cast/cast_sender_impl.h
index df50674851b..5ceafeb35b4 100644
--- a/chromium/media/cast/cast_sender_impl.h
+++ b/chromium/media/cast/cast_sender_impl.h
@@ -35,6 +35,9 @@ class CastSenderImpl final : public CastSender {
void SetTargetPlayoutDelay(base::TimeDelta new_target_playout_delay) final;
+ CastSenderImpl(const CastSenderImpl&) = delete;
+ CastSenderImpl& operator=(const CastSenderImpl&) = delete;
+
~CastSenderImpl() final;
scoped_refptr<AudioFrameInput> audio_frame_input() final;
@@ -58,8 +61,6 @@ class CastSenderImpl final : public CastSender {
// NOTE: Weak pointers must be invalidated before all other member variables.
base::WeakPtrFactory<CastSenderImpl> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(CastSenderImpl);
};
} // namespace cast
diff --git a/chromium/media/cast/common/clock_drift_smoother.cc b/chromium/media/cast/common/clock_drift_smoother.cc
index 97eec39d5e7..f1cd0bb7ca0 100644
--- a/chromium/media/cast/common/clock_drift_smoother.cc
+++ b/chromium/media/cast/common/clock_drift_smoother.cc
@@ -23,8 +23,7 @@ ClockDriftSmoother::~ClockDriftSmoother() = default;
base::TimeDelta ClockDriftSmoother::Current() const {
DCHECK(!last_update_time_.is_null());
- return base::TimeDelta::FromMicroseconds(
- base::ClampRound<int64_t>(estimate_us_));
+ return base::Microseconds(base::ClampRound<int64_t>(estimate_us_));
}
void ClockDriftSmoother::Reset(base::TimeTicks now,
@@ -52,7 +51,7 @@ void ClockDriftSmoother::Update(base::TimeTicks now,
// static
base::TimeDelta ClockDriftSmoother::GetDefaultTimeConstant() {
- return base::TimeDelta::FromSeconds(30);
+ return base::Seconds(30);
}
} // namespace cast
diff --git a/chromium/media/cast/common/rtp_time.cc b/chromium/media/cast/common/rtp_time.cc
index 1fb1b9b71c6..28fc3a05e91 100644
--- a/chromium/media/cast/common/rtp_time.cc
+++ b/chromium/media/cast/common/rtp_time.cc
@@ -20,7 +20,7 @@ base::TimeDelta TicksToTimeDelta(int64_t ticks, int timebase) {
base::Time::kMicrosecondsPerSecond +
0.5 /* rounding */;
DCHECK_LT(micros, static_cast<double>(std::numeric_limits<int64_t>::max()));
- return base::TimeDelta::FromMicroseconds(static_cast<int64_t>(micros));
+ return base::Microseconds(static_cast<int64_t>(micros));
}
// Returns the tick count in the given timebase nearest to the base::TimeDelta.
diff --git a/chromium/media/cast/common/rtp_time_unittest.cc b/chromium/media/cast/common/rtp_time_unittest.cc
index cdbd10f9cef..3841215d2ab 100644
--- a/chromium/media/cast/common/rtp_time_unittest.cc
+++ b/chromium/media/cast/common/rtp_time_unittest.cc
@@ -23,48 +23,45 @@ TEST(RtpTimeDeltaTest, ConversionToAndFromTimeDelta) {
// Conversions that are exact (i.e., do not require rounding).
ASSERT_EQ(RtpTimeDelta::FromTicks(480),
- RtpTimeDelta::FromTimeDelta(base::TimeDelta::FromMilliseconds(10),
- kTimebase));
- ASSERT_EQ(
- RtpTimeDelta::FromTicks(96000),
- RtpTimeDelta::FromTimeDelta(base::TimeDelta::FromSeconds(2), kTimebase));
- ASSERT_EQ(base::TimeDelta::FromMilliseconds(10),
+ RtpTimeDelta::FromTimeDelta(base::Milliseconds(10), kTimebase));
+ ASSERT_EQ(RtpTimeDelta::FromTicks(96000),
+ RtpTimeDelta::FromTimeDelta(base::Seconds(2), kTimebase));
+ ASSERT_EQ(base::Milliseconds(10),
RtpTimeDelta::FromTicks(480).ToTimeDelta(kTimebase));
- ASSERT_EQ(base::TimeDelta::FromSeconds(2),
+ ASSERT_EQ(base::Seconds(2),
RtpTimeDelta::FromTicks(96000).ToTimeDelta(kTimebase));
// Conversions that are approximate (i.e., are rounded).
for (int error_us = -3; error_us <= +3; ++error_us) {
ASSERT_EQ(RtpTimeDelta::FromTicks(0),
- RtpTimeDelta::FromTimeDelta(
- base::TimeDelta::FromMicroseconds(0 + error_us), kTimebase));
+ RtpTimeDelta::FromTimeDelta(base::Microseconds(0 + error_us),
+ kTimebase));
ASSERT_EQ(RtpTimeDelta::FromTicks(1),
- RtpTimeDelta::FromTimeDelta(
- base::TimeDelta::FromMicroseconds(21 + error_us), kTimebase));
+ RtpTimeDelta::FromTimeDelta(base::Microseconds(21 + error_us),
+ kTimebase));
ASSERT_EQ(RtpTimeDelta::FromTicks(2),
- RtpTimeDelta::FromTimeDelta(
- base::TimeDelta::FromMicroseconds(42 + error_us), kTimebase));
+ RtpTimeDelta::FromTimeDelta(base::Microseconds(42 + error_us),
+ kTimebase));
ASSERT_EQ(RtpTimeDelta::FromTicks(3),
- RtpTimeDelta::FromTimeDelta(
- base::TimeDelta::FromMicroseconds(63 + error_us), kTimebase));
+ RtpTimeDelta::FromTimeDelta(base::Microseconds(63 + error_us),
+ kTimebase));
ASSERT_EQ(RtpTimeDelta::FromTicks(4),
+ RtpTimeDelta::FromTimeDelta(base::Microseconds(83 + error_us),
+ kTimebase));
+ ASSERT_EQ(RtpTimeDelta::FromTicks(11200000000000),
RtpTimeDelta::FromTimeDelta(
- base::TimeDelta::FromMicroseconds(83 + error_us), kTimebase));
- ASSERT_EQ(
- RtpTimeDelta::FromTicks(11200000000000),
- RtpTimeDelta::FromTimeDelta(base::TimeDelta::FromMicroseconds(
- INT64_C(233333333333333) + error_us),
- kTimebase));
+ base::Microseconds(INT64_C(233333333333333) + error_us),
+ kTimebase));
}
- ASSERT_EQ(base::TimeDelta::FromMicroseconds(21),
+ ASSERT_EQ(base::Microseconds(21),
RtpTimeDelta::FromTicks(1).ToTimeDelta(kTimebase));
- ASSERT_EQ(base::TimeDelta::FromMicroseconds(42),
+ ASSERT_EQ(base::Microseconds(42),
RtpTimeDelta::FromTicks(2).ToTimeDelta(kTimebase));
- ASSERT_EQ(base::TimeDelta::FromMicroseconds(63),
+ ASSERT_EQ(base::Microseconds(63),
RtpTimeDelta::FromTicks(3).ToTimeDelta(kTimebase));
- ASSERT_EQ(base::TimeDelta::FromMicroseconds(83),
+ ASSERT_EQ(base::Microseconds(83),
RtpTimeDelta::FromTicks(4).ToTimeDelta(kTimebase));
- ASSERT_EQ(base::TimeDelta::FromMicroseconds(INT64_C(233333333333333)),
+ ASSERT_EQ(base::Microseconds(INT64_C(233333333333333)),
RtpTimeDelta::FromTicks(11200000000000).ToTimeDelta(kTimebase));
}
diff --git a/chromium/media/cast/common/transport_encryption_handler.h b/chromium/media/cast/common/transport_encryption_handler.h
index ab3667cdc8c..6643060b641 100644
--- a/chromium/media/cast/common/transport_encryption_handler.h
+++ b/chromium/media/cast/common/transport_encryption_handler.h
@@ -27,6 +27,11 @@ namespace cast {
class TransportEncryptionHandler {
public:
TransportEncryptionHandler();
+
+ TransportEncryptionHandler(const TransportEncryptionHandler&) = delete;
+ TransportEncryptionHandler& operator=(const TransportEncryptionHandler&) =
+ delete;
+
~TransportEncryptionHandler();
bool Initialize(const std::string& aes_key, const std::string& aes_iv_mask);
@@ -46,8 +51,6 @@ class TransportEncryptionHandler {
std::unique_ptr<crypto::Encryptor> encryptor_;
std::string iv_mask_;
bool is_activated_;
-
- DISALLOW_COPY_AND_ASSIGN(TransportEncryptionHandler);
};
} // namespace cast
diff --git a/chromium/media/cast/logging/encoding_event_subscriber.h b/chromium/media/cast/logging/encoding_event_subscriber.h
index 289db10844f..614303f2012 100644
--- a/chromium/media/cast/logging/encoding_event_subscriber.h
+++ b/chromium/media/cast/logging/encoding_event_subscriber.h
@@ -57,6 +57,9 @@ class EncodingEventSubscriber final : public RawEventSubscriber {
// timestamp).
EncodingEventSubscriber(EventMediaType event_media_type, size_t max_frames);
+ EncodingEventSubscriber(const EncodingEventSubscriber&) = delete;
+ EncodingEventSubscriber& operator=(const EncodingEventSubscriber&) = delete;
+
~EncodingEventSubscriber() final;
// RawReventSubscriber implementations.
@@ -126,8 +129,6 @@ class EncodingEventSubscriber final : public RawEventSubscriber {
// Set to RTP timestamp of first event encountered after a |Reset()|.
RtpTimeTicks first_rtp_timestamp_;
-
- DISALLOW_COPY_AND_ASSIGN(EncodingEventSubscriber);
};
} // namespace cast
diff --git a/chromium/media/cast/logging/encoding_event_subscriber_unittest.cc b/chromium/media/cast/logging/encoding_event_subscriber_unittest.cc
index 8334ff9696e..427d0eb9b1e 100644
--- a/chromium/media/cast/logging/encoding_event_subscriber_unittest.cc
+++ b/chromium/media/cast/logging/encoding_event_subscriber_unittest.cc
@@ -283,7 +283,7 @@ TEST_F(EncodingEventSubscriberTest, FrameEventDelay) {
playout_event->media_type = AUDIO_EVENT;
playout_event->rtp_timestamp = rtp_timestamp;
playout_event->frame_id = FrameId::first();
- playout_event->delay_delta = base::TimeDelta::FromMilliseconds(delay_ms);
+ playout_event->delay_delta = base::Milliseconds(delay_ms);
cast_environment_->logger()->DispatchFrameEvent(std::move(playout_event));
GetEventsAndReset();
@@ -365,10 +365,10 @@ TEST_F(EncodingEventSubscriberTest, MultipleFrameEvents) {
playout_event->media_type = AUDIO_EVENT;
playout_event->rtp_timestamp = rtp_timestamp1;
playout_event->frame_id = FrameId::first();
- playout_event->delay_delta = base::TimeDelta::FromMilliseconds(100);
+ playout_event->delay_delta = base::Milliseconds(100);
cast_environment_->logger()->DispatchFrameEvent(std::move(playout_event));
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(20));
+ task_runner_->Sleep(base::Milliseconds(20));
base::TimeTicks now2(testing_clock_.NowTicks());
std::unique_ptr<FrameEvent> encode_event(new FrameEvent());
encode_event->timestamp = now2;
@@ -381,7 +381,7 @@ TEST_F(EncodingEventSubscriberTest, MultipleFrameEvents) {
encode_event->idealized_bitrate_utilization = 0.55;
cast_environment_->logger()->DispatchFrameEvent(std::move(encode_event));
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(20));
+ task_runner_->Sleep(base::Milliseconds(20));
base::TimeTicks now3(testing_clock_.NowTicks());
std::unique_ptr<FrameEvent> decode_event(new FrameEvent());
decode_event->timestamp = now3;
@@ -493,7 +493,7 @@ TEST_F(EncodingEventSubscriberTest, MultiplePacketEventsForPacket) {
send_event->size = size;
cast_environment_->logger()->DispatchPacketEvent(std::move(send_event));
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(20));
+ task_runner_->Sleep(base::Milliseconds(20));
base::TimeTicks now2(testing_clock_.NowTicks());
std::unique_ptr<PacketEvent> retransmit_event(new PacketEvent());
retransmit_event->timestamp = now2;
@@ -548,7 +548,7 @@ TEST_F(EncodingEventSubscriberTest, MultiplePacketEventsForFrame) {
send_event->size = size;
cast_environment_->logger()->DispatchPacketEvent(std::move(send_event));
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(20));
+ task_runner_->Sleep(base::Milliseconds(20));
base::TimeTicks now2(testing_clock_.NowTicks());
std::unique_ptr<PacketEvent> retransmit_event(new PacketEvent());
retransmit_event->timestamp = now2;
@@ -609,7 +609,7 @@ TEST_F(EncodingEventSubscriberTest, MultiplePacketEvents) {
send_event->size = size;
cast_environment_->logger()->DispatchPacketEvent(std::move(send_event));
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(20));
+ task_runner_->Sleep(base::Milliseconds(20));
base::TimeTicks now2(testing_clock_.NowTicks());
std::unique_ptr<PacketEvent> retransmit_event(new PacketEvent());
retransmit_event->timestamp = now2;
@@ -764,7 +764,7 @@ TEST_F(EncodingEventSubscriberTest, MaxEventsPerProto) {
ack_event->frame_id = FrameId::first();
cast_environment_->logger()->DispatchFrameEvent(std::move(ack_event));
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(30));
+ task_runner_->Sleep(base::Milliseconds(30));
}
GetEventsAndReset();
@@ -789,7 +789,7 @@ TEST_F(EncodingEventSubscriberTest, MaxEventsPerProto) {
send_event->size = 123;
cast_environment_->logger()->DispatchPacketEvent(std::move(send_event));
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(30));
+ task_runner_->Sleep(base::Milliseconds(30));
}
GetEventsAndReset();
@@ -823,7 +823,7 @@ TEST_F(EncodingEventSubscriberTest, MaxEventsPerProto) {
send_event->size = 123;
cast_environment_->logger()->DispatchPacketEvent(std::move(send_event));
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(30));
+ task_runner_->Sleep(base::Milliseconds(30));
}
GetEventsAndReset();
diff --git a/chromium/media/cast/logging/log_event_dispatcher.h b/chromium/media/cast/logging/log_event_dispatcher.h
index 901dc39975d..b4ab5ead38b 100644
--- a/chromium/media/cast/logging/log_event_dispatcher.h
+++ b/chromium/media/cast/logging/log_event_dispatcher.h
@@ -27,6 +27,9 @@ class LogEventDispatcher {
// |env| outlives this instance (and generally owns this instance).
explicit LogEventDispatcher(CastEnvironment* env);
+ LogEventDispatcher(const LogEventDispatcher&) = delete;
+ LogEventDispatcher& operator=(const LogEventDispatcher&) = delete;
+
~LogEventDispatcher();
// Called on any thread to schedule the sending of event(s) to all
@@ -71,8 +74,6 @@ class LogEventDispatcher {
CastEnvironment* const env_; // Owner of this instance.
const scoped_refptr<Impl> impl_;
-
- DISALLOW_COPY_AND_ASSIGN(LogEventDispatcher);
};
} // namespace cast
diff --git a/chromium/media/cast/logging/raw_event_subscriber_bundle.h b/chromium/media/cast/logging/raw_event_subscriber_bundle.h
index bb9b9719d76..d36a431fec6 100644
--- a/chromium/media/cast/logging/raw_event_subscriber_bundle.h
+++ b/chromium/media/cast/logging/raw_event_subscriber_bundle.h
@@ -33,6 +33,12 @@ class RawEventSubscriberBundleForStream {
const scoped_refptr<CastEnvironment>& cast_environment,
bool is_audio,
ReceiverTimeOffsetEstimator* offset_estimator);
+
+ RawEventSubscriberBundleForStream(const RawEventSubscriberBundleForStream&) =
+ delete;
+ RawEventSubscriberBundleForStream& operator=(
+ const RawEventSubscriberBundleForStream&) = delete;
+
~RawEventSubscriberBundleForStream();
EncodingEventSubscriber* GetEncodingEventSubscriber();
@@ -42,8 +48,6 @@ class RawEventSubscriberBundleForStream {
const scoped_refptr<CastEnvironment> cast_environment_;
EncodingEventSubscriber event_subscriber_;
StatsEventSubscriber stats_subscriber_;
-
- DISALLOW_COPY_AND_ASSIGN(RawEventSubscriberBundleForStream);
};
// A bundle of subscribers for all streams. An instance of this object
@@ -56,6 +60,10 @@ class RawEventSubscriberBundle {
public:
explicit RawEventSubscriberBundle(
const scoped_refptr<CastEnvironment>& cast_environment);
+
+ RawEventSubscriberBundle(const RawEventSubscriberBundle&) = delete;
+ RawEventSubscriberBundle& operator=(const RawEventSubscriberBundle&) = delete;
+
~RawEventSubscriberBundle();
void AddEventSubscribers(bool is_audio);
@@ -72,12 +80,9 @@ class RawEventSubscriberBundle {
std::map<bool, std::unique_ptr<RawEventSubscriberBundleForStream>>
subscribers_;
std::unique_ptr<ReceiverTimeOffsetEstimator> receiver_offset_estimator_;
-
- DISALLOW_COPY_AND_ASSIGN(RawEventSubscriberBundle);
};
} // namespace cast
} // namespace media
#endif // MEDIA_CAST_LOGGING_RAW_EVENT_SUBSCRIBER_BUNDLE_H_
-
diff --git a/chromium/media/cast/logging/receiver_time_offset_estimator_impl.h b/chromium/media/cast/logging/receiver_time_offset_estimator_impl.h
index 9625ae266b1..3947ac9ccca 100644
--- a/chromium/media/cast/logging/receiver_time_offset_estimator_impl.h
+++ b/chromium/media/cast/logging/receiver_time_offset_estimator_impl.h
@@ -41,6 +41,11 @@ class ReceiverTimeOffsetEstimatorImpl final
public:
ReceiverTimeOffsetEstimatorImpl();
+ ReceiverTimeOffsetEstimatorImpl(const ReceiverTimeOffsetEstimatorImpl&) =
+ delete;
+ ReceiverTimeOffsetEstimatorImpl& operator=(
+ const ReceiverTimeOffsetEstimatorImpl&) = delete;
+
~ReceiverTimeOffsetEstimatorImpl() final;
// RawEventSubscriber implementations.
@@ -95,7 +100,6 @@ class ReceiverTimeOffsetEstimatorImpl final
BoundCalculator lower_bound_;
base::ThreadChecker thread_checker_;
- DISALLOW_COPY_AND_ASSIGN(ReceiverTimeOffsetEstimatorImpl);
};
} // namespace cast
diff --git a/chromium/media/cast/logging/receiver_time_offset_estimator_impl_unittest.cc b/chromium/media/cast/logging/receiver_time_offset_estimator_impl_unittest.cc
index c90a1f32477..8bd7476ca23 100644
--- a/chromium/media/cast/logging/receiver_time_offset_estimator_impl_unittest.cc
+++ b/chromium/media/cast/logging/receiver_time_offset_estimator_impl_unittest.cc
@@ -54,7 +54,7 @@ class ReceiverTimeOffsetEstimatorImplTest : public ::testing::Test {
// Then the bound after all 3 events have arrived is [130-60=70, 130-20=110].
TEST_F(ReceiverTimeOffsetEstimatorImplTest, EstimateOffset) {
int64_t true_offset_ms = 100;
- receiver_clock_.Advance(base::TimeDelta::FromMilliseconds(true_offset_ms));
+ receiver_clock_.Advance(base::Milliseconds(true_offset_ms));
base::TimeDelta lower_bound;
base::TimeDelta upper_bound;
@@ -64,7 +64,7 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, EstimateOffset) {
const RtpTimeTicks rtp_timestamp;
FrameId frame_id = FrameId::first();
- AdvanceClocks(base::TimeDelta::FromMilliseconds(20));
+ AdvanceClocks(base::Milliseconds(20));
std::unique_ptr<FrameEvent> encode_event(new FrameEvent());
encode_event->timestamp = sender_clock_.NowTicks();
@@ -92,7 +92,7 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, EstimateOffset) {
EXPECT_FALSE(estimator_.GetReceiverOffsetBounds(&lower_bound, &upper_bound));
- AdvanceClocks(base::TimeDelta::FromMilliseconds(10));
+ AdvanceClocks(base::Milliseconds(10));
std::unique_ptr<FrameEvent> ack_sent_event(new FrameEvent());
ack_sent_event->timestamp = receiver_clock_.NowTicks();
ack_sent_event->type = FRAME_ACK_SENT;
@@ -114,7 +114,7 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, EstimateOffset) {
EXPECT_FALSE(estimator_.GetReceiverOffsetBounds(&lower_bound, &upper_bound));
- AdvanceClocks(base::TimeDelta::FromMilliseconds(30));
+ AdvanceClocks(base::Milliseconds(30));
std::unique_ptr<FrameEvent> ack_event(new FrameEvent());
ack_event->timestamp = sender_clock_.NowTicks();
ack_event->type = FRAME_ACK_RECEIVED;
@@ -137,7 +137,7 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, EstimateOffset) {
// event C occurred before event B.
TEST_F(ReceiverTimeOffsetEstimatorImplTest, EventCArrivesBeforeEventB) {
int64_t true_offset_ms = 100;
- receiver_clock_.Advance(base::TimeDelta::FromMilliseconds(true_offset_ms));
+ receiver_clock_.Advance(base::Milliseconds(true_offset_ms));
base::TimeDelta lower_bound;
base::TimeDelta upper_bound;
@@ -147,7 +147,7 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, EventCArrivesBeforeEventB) {
const RtpTimeTicks rtp_timestamp;
FrameId frame_id = FrameId::first();
- AdvanceClocks(base::TimeDelta::FromMilliseconds(20));
+ AdvanceClocks(base::Milliseconds(20));
std::unique_ptr<FrameEvent> encode_event(new FrameEvent());
encode_event->timestamp = sender_clock_.NowTicks();
@@ -175,9 +175,9 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, EventCArrivesBeforeEventB) {
EXPECT_FALSE(estimator_.GetReceiverOffsetBounds(&lower_bound, &upper_bound));
- AdvanceClocks(base::TimeDelta::FromMilliseconds(10));
+ AdvanceClocks(base::Milliseconds(10));
base::TimeTicks event_b_time = receiver_clock_.NowTicks();
- AdvanceClocks(base::TimeDelta::FromMilliseconds(30));
+ AdvanceClocks(base::Milliseconds(30));
base::TimeTicks event_c_time = sender_clock_.NowTicks();
std::unique_ptr<FrameEvent> ack_event(new FrameEvent());
@@ -221,7 +221,7 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, EventCArrivesBeforeEventB) {
TEST_F(ReceiverTimeOffsetEstimatorImplTest, MultipleIterations) {
int64_t true_offset_ms = 100;
- receiver_clock_.Advance(base::TimeDelta::FromMilliseconds(true_offset_ms));
+ receiver_clock_.Advance(base::Milliseconds(true_offset_ms));
base::TimeDelta lower_bound;
base::TimeDelta upper_bound;
@@ -240,7 +240,7 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, MultipleIterations) {
// Frame 3 times: [77, 80+100, 110]
// Bound should end up at [95, 103]
// Events times in chronological order: 20, 30 x2, 50, 55, 60, 77, 80, 110
- AdvanceClocks(base::TimeDelta::FromMilliseconds(20));
+ AdvanceClocks(base::Milliseconds(20));
std::unique_ptr<FrameEvent> encode_event(new FrameEvent());
encode_event->timestamp = sender_clock_.NowTicks();
encode_event->type = FRAME_ENCODED;
@@ -265,7 +265,7 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, MultipleIterations) {
send_event->size = 1500;
cast_environment_->logger()->DispatchPacketEvent(std::move(send_event));
- AdvanceClocks(base::TimeDelta::FromMilliseconds(10));
+ AdvanceClocks(base::Milliseconds(10));
encode_event = std::make_unique<FrameEvent>();
encode_event->timestamp = sender_clock_.NowTicks();
encode_event->type = FRAME_ENCODED;
@@ -298,7 +298,7 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, MultipleIterations) {
ack_sent_event->frame_id = frame_id_a;
cast_environment_->logger()->DispatchFrameEvent(std::move(ack_sent_event));
- AdvanceClocks(base::TimeDelta::FromMilliseconds(20));
+ AdvanceClocks(base::Milliseconds(20));
std::unique_ptr<PacketEvent> receive_event(new PacketEvent());
receive_event->timestamp = receiver_clock_.NowTicks();
@@ -319,7 +319,7 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, MultipleIterations) {
ack_sent_event->frame_id = frame_id_b;
cast_environment_->logger()->DispatchFrameEvent(std::move(ack_sent_event));
- AdvanceClocks(base::TimeDelta::FromMilliseconds(5));
+ AdvanceClocks(base::Milliseconds(5));
std::unique_ptr<FrameEvent> ack_event(new FrameEvent());
ack_event->timestamp = sender_clock_.NowTicks();
ack_event->type = FRAME_ACK_RECEIVED;
@@ -328,7 +328,7 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, MultipleIterations) {
ack_event->frame_id = frame_id_b;
cast_environment_->logger()->DispatchFrameEvent(std::move(ack_event));
- AdvanceClocks(base::TimeDelta::FromMilliseconds(5));
+ AdvanceClocks(base::Milliseconds(5));
ack_event = std::make_unique<FrameEvent>();
ack_event->timestamp = sender_clock_.NowTicks();
ack_event->type = FRAME_ACK_RECEIVED;
@@ -337,7 +337,7 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, MultipleIterations) {
ack_event->frame_id = frame_id_a;
cast_environment_->logger()->DispatchFrameEvent(std::move(ack_event));
- AdvanceClocks(base::TimeDelta::FromMilliseconds(17));
+ AdvanceClocks(base::Milliseconds(17));
encode_event = std::make_unique<FrameEvent>();
encode_event->timestamp = sender_clock_.NowTicks();
encode_event->type = FRAME_ENCODED;
@@ -362,7 +362,7 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, MultipleIterations) {
send_event->size = 1500;
cast_environment_->logger()->DispatchPacketEvent(std::move(send_event));
- AdvanceClocks(base::TimeDelta::FromMilliseconds(3));
+ AdvanceClocks(base::Milliseconds(3));
receive_event = std::make_unique<PacketEvent>();
receive_event->timestamp = receiver_clock_.NowTicks();
receive_event->type = PACKET_RECEIVED;
@@ -382,7 +382,7 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, MultipleIterations) {
ack_sent_event->frame_id = frame_id_c;
cast_environment_->logger()->DispatchFrameEvent(std::move(ack_sent_event));
- AdvanceClocks(base::TimeDelta::FromMilliseconds(30));
+ AdvanceClocks(base::Milliseconds(30));
ack_event = std::make_unique<FrameEvent>();
ack_event->timestamp = sender_clock_.NowTicks();
ack_event->type = FRAME_ACK_RECEIVED;
diff --git a/chromium/media/cast/logging/simple_event_subscriber.h b/chromium/media/cast/logging/simple_event_subscriber.h
index 252efde7882..c305995d4d3 100644
--- a/chromium/media/cast/logging/simple_event_subscriber.h
+++ b/chromium/media/cast/logging/simple_event_subscriber.h
@@ -23,6 +23,9 @@ class SimpleEventSubscriber final : public RawEventSubscriber {
public:
SimpleEventSubscriber();
+ SimpleEventSubscriber(const SimpleEventSubscriber&) = delete;
+ SimpleEventSubscriber& operator=(const SimpleEventSubscriber&) = delete;
+
~SimpleEventSubscriber() final;
// RawEventSubscriber implementations.
@@ -43,8 +46,6 @@ class SimpleEventSubscriber final : public RawEventSubscriber {
// All functions must be called on the main thread.
base::ThreadChecker thread_checker_;
-
- DISALLOW_COPY_AND_ASSIGN(SimpleEventSubscriber);
};
} // namespace cast
diff --git a/chromium/media/cast/logging/simple_event_subscriber_unittest.cc b/chromium/media/cast/logging/simple_event_subscriber_unittest.cc
index cee6f6564bc..a637b968289 100644
--- a/chromium/media/cast/logging/simple_event_subscriber_unittest.cc
+++ b/chromium/media/cast/logging/simple_event_subscriber_unittest.cc
@@ -60,7 +60,7 @@ TEST_F(SimpleEventSubscriberTest, GetAndResetEvents) {
playout_event->media_type = AUDIO_EVENT;
playout_event->rtp_timestamp = RtpTimeTicks().Expand(UINT32_C(100));
playout_event->frame_id = FrameId::first();
- playout_event->delay_delta = base::TimeDelta::FromMilliseconds(100);
+ playout_event->delay_delta = base::Milliseconds(100);
cast_environment_->logger()->DispatchFrameEvent(std::move(playout_event));
std::unique_ptr<FrameEvent> decode_event(new FrameEvent());
diff --git a/chromium/media/cast/logging/stats_event_subscriber.h b/chromium/media/cast/logging/stats_event_subscriber.h
index b2e2b410c54..3d66dccf14d 100644
--- a/chromium/media/cast/logging/stats_event_subscriber.h
+++ b/chromium/media/cast/logging/stats_event_subscriber.h
@@ -39,6 +39,9 @@ class StatsEventSubscriber final : public RawEventSubscriber {
const base::TickClock* clock,
ReceiverTimeOffsetEstimator* offset_estimator);
+ StatsEventSubscriber(const StatsEventSubscriber&) = delete;
+ StatsEventSubscriber& operator=(const StatsEventSubscriber&) = delete;
+
~StatsEventSubscriber() final;
// RawReventSubscriber implementations.
@@ -282,7 +285,6 @@ class StatsEventSubscriber final : public RawEventSubscriber {
HistogramMap histograms_;
base::ThreadChecker thread_checker_;
- DISALLOW_COPY_AND_ASSIGN(StatsEventSubscriber);
};
} // namespace cast
diff --git a/chromium/media/cast/logging/stats_event_subscriber_unittest.cc b/chromium/media/cast/logging/stats_event_subscriber_unittest.cc
index 689e85f6987..dd6999212ab 100644
--- a/chromium/media/cast/logging/stats_event_subscriber_unittest.cc
+++ b/chromium/media/cast/logging/stats_event_subscriber_unittest.cc
@@ -36,9 +36,8 @@ class StatsEventSubscriberTest : public ::testing::Test {
task_runner_,
task_runner_,
task_runner_)),
- fake_offset_estimator_(
- base::TimeDelta::FromSeconds(kReceiverOffsetSecs)) {
- receiver_clock_.Advance(base::TimeDelta::FromSeconds(kReceiverOffsetSecs));
+ fake_offset_estimator_(base::Seconds(kReceiverOffsetSecs)) {
+ receiver_clock_.Advance(base::Seconds(kReceiverOffsetSecs));
cast_environment_->logger()->Subscribe(&fake_offset_estimator_);
}
@@ -89,7 +88,7 @@ TEST_F(StatsEventSubscriberTest, CaptureEncode) {
cast_environment_->logger()->DispatchFrameEvent(
std::move(capture_begin_event));
- AdvanceClocks(base::TimeDelta::FromMicroseconds(10));
+ AdvanceClocks(base::Microseconds(10));
std::unique_ptr<FrameEvent> capture_end_event(new FrameEvent());
capture_end_event->timestamp = sender_clock_.NowTicks();
capture_end_event->type = FRAME_CAPTURE_END;
@@ -99,7 +98,7 @@ TEST_F(StatsEventSubscriberTest, CaptureEncode) {
std::move(capture_end_event));
if (i % 2 == 0) {
- AdvanceClocks(base::TimeDelta::FromMicroseconds(10));
+ AdvanceClocks(base::Microseconds(10));
std::unique_ptr<FrameEvent> encode_event(new FrameEvent());
encode_event->timestamp = sender_clock_.NowTicks();
encode_event->type = FRAME_ENCODED;
@@ -115,7 +114,7 @@ TEST_F(StatsEventSubscriberTest, CaptureEncode) {
} else if (i < extra_frames) {
dropped_frames++;
}
- AdvanceClocks(base::TimeDelta::FromMicroseconds(34567));
+ AdvanceClocks(base::Microseconds(34567));
rtp_timestamp += RtpTimeDelta::FromTicks(90);
frame_id++;
}
@@ -154,7 +153,7 @@ TEST_F(StatsEventSubscriberTest, Encode) {
FrameId frame_id = FrameId::first();
int num_frames = 10;
base::TimeTicks start_time = sender_clock_.NowTicks();
- AdvanceClocks(base::TimeDelta::FromMicroseconds(35678));
+ AdvanceClocks(base::Microseconds(35678));
base::TimeTicks first_event_time = sender_clock_.NowTicks();
base::TimeTicks last_event_time;
int total_size = 0;
@@ -175,7 +174,7 @@ TEST_F(StatsEventSubscriberTest, Encode) {
cast_environment_->logger()->DispatchFrameEvent(std::move(encode_event));
last_event_time = sender_clock_.NowTicks();
- AdvanceClocks(base::TimeDelta::FromMicroseconds(35678));
+ AdvanceClocks(base::Microseconds(35678));
rtp_timestamp += RtpTimeDelta::FromTicks(90);
frame_id++;
}
@@ -227,7 +226,7 @@ TEST_F(StatsEventSubscriberTest, Decode) {
decode_event->frame_id = frame_id;
cast_environment_->logger()->DispatchFrameEvent(std::move(decode_event));
- AdvanceClocks(base::TimeDelta::FromMicroseconds(36789));
+ AdvanceClocks(base::Microseconds(36789));
rtp_timestamp += RtpTimeDelta::FromTicks(90);
frame_id++;
}
@@ -252,7 +251,7 @@ TEST_F(StatsEventSubscriberTest, PlayoutDelay) {
int num_frames = 10;
int late_frames = 0;
for (int i = 0, delay_ms = -50; i < num_frames; i++, delay_ms += 10) {
- base::TimeDelta delay = base::TimeDelta::FromMilliseconds(delay_ms);
+ base::TimeDelta delay = base::Milliseconds(delay_ms);
if (delay_ms > 0)
late_frames++;
std::unique_ptr<FrameEvent> playout_event(new FrameEvent());
@@ -264,7 +263,7 @@ TEST_F(StatsEventSubscriberTest, PlayoutDelay) {
playout_event->delay_delta = delay;
cast_environment_->logger()->DispatchFrameEvent(std::move(playout_event));
- AdvanceClocks(base::TimeDelta::FromMicroseconds(37890));
+ AdvanceClocks(base::Microseconds(37890));
rtp_timestamp += RtpTimeDelta::FromTicks(90);
frame_id++;
}
@@ -295,11 +294,11 @@ TEST_F(StatsEventSubscriberTest, E2ELatency) {
std::move(capture_begin_event));
int latency_micros = 100000 + base::RandInt(-5000, 50000);
- base::TimeDelta latency = base::TimeDelta::FromMicroseconds(latency_micros);
+ base::TimeDelta latency = base::Microseconds(latency_micros);
AdvanceClocks(latency);
int delay_micros = base::RandInt(-50000, 50000);
- base::TimeDelta delay = base::TimeDelta::FromMilliseconds(delay_micros);
+ base::TimeDelta delay = base::Milliseconds(delay_micros);
total_latency += latency;
std::unique_ptr<FrameEvent> playout_event(new FrameEvent());
@@ -373,7 +372,7 @@ TEST_F(StatsEventSubscriberTest, Packets) {
total_queueing_latency += sender_clock_.NowTicks() - sender_encoded_time;
int latency_micros = 20000 + base::RandInt(-10000, 10000);
- base::TimeDelta latency = base::TimeDelta::FromMicroseconds(latency_micros);
+ base::TimeDelta latency = base::Microseconds(latency_micros);
// Latency is only recorded for packets that aren't retransmitted.
if (i % 2 != 0) {
total_network_latency += latency;
@@ -387,7 +386,7 @@ TEST_F(StatsEventSubscriberTest, Packets) {
base::TimeTicks received_time = receiver_clock_.NowTicks();
// Retransmission 1.
- AdvanceClocks(base::TimeDelta::FromMicroseconds(12345));
+ AdvanceClocks(base::Microseconds(12345));
if (i % 2 == 0) {
std::unique_ptr<PacketEvent> retransmit_event(new PacketEvent());
retransmit_event->timestamp = receiver_clock_.NowTicks();
@@ -407,7 +406,7 @@ TEST_F(StatsEventSubscriberTest, Packets) {
}
// Retransmission 2.
- AdvanceClocks(base::TimeDelta::FromMicroseconds(13456));
+ AdvanceClocks(base::Microseconds(13456));
if (i % 4 == 0) {
std::unique_ptr<PacketEvent> retransmit_event(new PacketEvent());
retransmit_event->timestamp = receiver_clock_.NowTicks();
@@ -427,7 +426,7 @@ TEST_F(StatsEventSubscriberTest, Packets) {
}
// Retransmission 3.
- AdvanceClocks(base::TimeDelta::FromMicroseconds(14567));
+ AdvanceClocks(base::Microseconds(14567));
if (i % 8 == 0) {
std::unique_ptr<PacketEvent> retransmit_event(new PacketEvent());
retransmit_event->timestamp = receiver_clock_.NowTicks();
@@ -534,7 +533,7 @@ TEST_F(StatsEventSubscriberTest, Packets) {
bool CheckHistogramHasValue(base::ListValue* values,
const std::string& bucket, int expected_count) {
- for (size_t i = 0; i < values->GetSize(); ++i) {
+ for (size_t i = 0; i < values->GetList().size(); ++i) {
const base::DictionaryValue* dict = NULL;
values->GetDictionary(i, &dict);
if (!dict->HasKey(bucket))
@@ -549,7 +548,7 @@ bool CheckHistogramHasValue(base::ListValue* values,
TEST_F(StatsEventSubscriberTest, Histograms) {
Init(VIDEO_EVENT);
- AdvanceClocks(base::TimeDelta::FromMilliseconds(123));
+ AdvanceClocks(base::Milliseconds(123));
RtpTimeTicks rtp_timestamp = RtpTimeTicks().Expand(UINT32_C(123));
FrameId frame_id = FrameId::first();
@@ -568,7 +567,7 @@ TEST_F(StatsEventSubscriberTest, Histograms) {
cast_environment_->logger()->DispatchFrameEvent(
std::move(capture_begin_event));
- AdvanceClocks(base::TimeDelta::FromMilliseconds(10));
+ AdvanceClocks(base::Milliseconds(10));
std::unique_ptr<FrameEvent> capture_end_event(new FrameEvent());
capture_end_event->timestamp = sender_clock_.NowTicks();
capture_end_event->type = FRAME_CAPTURE_END;
@@ -577,7 +576,7 @@ TEST_F(StatsEventSubscriberTest, Histograms) {
cast_environment_->logger()->DispatchFrameEvent(
std::move(capture_end_event));
- AdvanceClocks(base::TimeDelta::FromMilliseconds(15));
+ AdvanceClocks(base::Milliseconds(15));
std::unique_ptr<FrameEvent> encode_event(new FrameEvent());
encode_event->timestamp = sender_clock_.NowTicks();
encode_event->type = FRAME_ENCODED;
@@ -595,7 +594,7 @@ TEST_F(StatsEventSubscriberTest, Histograms) {
// Send 3 packets for the last frame.
// Queueing latencies are 100ms, 200ms and 300ms.
for (int i = 0; i < 3; ++i) {
- AdvanceClocks(base::TimeDelta::FromMilliseconds(100));
+ AdvanceClocks(base::Milliseconds(100));
std::unique_ptr<PacketEvent> send_event(new PacketEvent());
send_event->timestamp = sender_clock_.NowTicks();
send_event->type = PACKET_SENT_TO_NETWORK;
@@ -611,7 +610,7 @@ TEST_F(StatsEventSubscriberTest, Histograms) {
// Receive 3 packets for the last frame.
// Network latencies are 100ms, 200ms and 300ms.
// Packet latencies are 400ms.
- AdvanceClocks(base::TimeDelta::FromMilliseconds(100));
+ AdvanceClocks(base::Milliseconds(100));
for (int i = 0; i < 3; ++i) {
std::unique_ptr<PacketEvent> receive_event(new PacketEvent());
receive_event->timestamp = receiver_clock_.NowTicks();
@@ -631,7 +630,7 @@ TEST_F(StatsEventSubscriberTest, Histograms) {
playout_event->media_type = VIDEO_EVENT;
playout_event->rtp_timestamp = rtp_timestamp;
playout_event->frame_id = frame_id;
- playout_event->delay_delta = base::TimeDelta::FromMilliseconds(100);
+ playout_event->delay_delta = base::Milliseconds(100);
cast_environment_->logger()->DispatchFrameEvent(std::move(playout_event));
StatsEventSubscriber::SimpleHistogram* histogram;
diff --git a/chromium/media/cast/net/cast_transport_impl.h b/chromium/media/cast/net/cast_transport_impl.h
index 12e7183c454..5c6fa889f19 100644
--- a/chromium/media/cast/net/cast_transport_impl.h
+++ b/chromium/media/cast/net/cast_transport_impl.h
@@ -60,6 +60,9 @@ class CastTransportImpl final : public CastTransport {
std::unique_ptr<PacketTransport> transport,
const scoped_refptr<base::SingleThreadTaskRunner>& transport_task_runner);
+ CastTransportImpl(const CastTransportImpl&) = delete;
+ CastTransportImpl& operator=(const CastTransportImpl&) = delete;
+
~CastTransportImpl() final;
// CastTransport implementation for sending.
@@ -184,8 +187,6 @@ class CastTransportImpl final : public CastTransport {
SessionMap sessions_;
base::WeakPtrFactory<CastTransportImpl> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(CastTransportImpl);
};
} // namespace cast
diff --git a/chromium/media/cast/net/cast_transport_impl_unittest.cc b/chromium/media/cast/net/cast_transport_impl_unittest.cc
index 32c1e5cb596..b38905a9575 100644
--- a/chromium/media/cast/net/cast_transport_impl_unittest.cc
+++ b/chromium/media/cast/net/cast_transport_impl_unittest.cc
@@ -90,8 +90,7 @@ class CastTransportImplTest : public ::testing::Test {
protected:
CastTransportImplTest() : num_times_logging_callback_called_(0) {
- testing_clock_.Advance(
- base::TimeDelta::FromMilliseconds(kStartMillisecond));
+ testing_clock_.Advance(base::Milliseconds(kStartMillisecond));
task_runner_ = new FakeSingleThreadTaskRunner(&testing_clock_);
}
@@ -178,7 +177,7 @@ void CastTransportImplTest::InitWithOptions() {
void CastTransportImplTest::InitWithLogging() {
transport_ = new FakePacketSender();
transport_sender_ = std::make_unique<CastTransportImpl>(
- &testing_clock_, base::TimeDelta::FromMilliseconds(10),
+ &testing_clock_, base::Milliseconds(10),
std::make_unique<TransportClient>(this), base::WrapUnique(transport_),
task_runner_);
task_runner_->RunTasks();
@@ -186,20 +185,20 @@ void CastTransportImplTest::InitWithLogging() {
TEST_F(CastTransportImplTest, InitWithoutLogging) {
InitWithoutLogging();
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(50));
+ task_runner_->Sleep(base::Milliseconds(50));
EXPECT_EQ(0, num_times_logging_callback_called_);
}
TEST_F(CastTransportImplTest, InitWithOptions) {
InitWithOptions();
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(50));
+ task_runner_->Sleep(base::Milliseconds(50));
EXPECT_EQ(0, num_times_logging_callback_called_);
}
TEST_F(CastTransportImplTest, NacksCancelRetransmits) {
InitWithLogging();
InitializeVideo();
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(50));
+ task_runner_->Sleep(base::Milliseconds(50));
EXPECT_EQ(0, num_times_logging_callback_called_);
// A fake frame that will be decomposed into 4 packets.
@@ -211,7 +210,7 @@ TEST_F(CastTransportImplTest, NacksCancelRetransmits) {
fake_frame.data.resize(5000, ' ');
transport_sender_->InsertFrame(kVideoSsrc, fake_frame);
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(10));
+ task_runner_->Sleep(base::Milliseconds(10));
EXPECT_EQ(4, transport_->packets_sent());
EXPECT_EQ(1, num_times_logging_callback_called_);
@@ -223,11 +222,11 @@ TEST_F(CastTransportImplTest, NacksCancelRetransmits) {
transport_->SetPaused(true);
DedupInfo dedup_info;
- dedup_info.resend_interval = base::TimeDelta::FromMilliseconds(10);
+ dedup_info.resend_interval = base::Milliseconds(10);
transport_sender_->ResendPackets(kVideoSsrc, missing_packets, true,
dedup_info);
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(10));
+ task_runner_->Sleep(base::Milliseconds(10));
EXPECT_EQ(2, num_times_logging_callback_called_);
RtcpCastMessage cast_message;
@@ -236,7 +235,7 @@ TEST_F(CastTransportImplTest, NacksCancelRetransmits) {
cast_message.missing_frames_and_packets[fake_frame.frame_id].insert(3);
transport_sender_->OnReceivedCastMessage(kVideoSsrc, cast_message);
transport_->SetPaused(false);
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(10));
+ task_runner_->Sleep(base::Milliseconds(10));
EXPECT_EQ(3, num_times_logging_callback_called_);
// Resend one packet in the socket when unpaused.
@@ -247,7 +246,7 @@ TEST_F(CastTransportImplTest, NacksCancelRetransmits) {
TEST_F(CastTransportImplTest, CancelRetransmits) {
InitWithLogging();
InitializeVideo();
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(50));
+ task_runner_->Sleep(base::Milliseconds(50));
EXPECT_EQ(0, num_times_logging_callback_called_);
// A fake frame that will be decomposed into 4 packets.
@@ -259,7 +258,7 @@ TEST_F(CastTransportImplTest, CancelRetransmits) {
fake_frame.data.resize(5000, ' ');
transport_sender_->InsertFrame(kVideoSsrc, fake_frame);
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(10));
+ task_runner_->Sleep(base::Milliseconds(10));
EXPECT_EQ(4, transport_->packets_sent());
EXPECT_EQ(1, num_times_logging_callback_called_);
@@ -269,18 +268,18 @@ TEST_F(CastTransportImplTest, CancelRetransmits) {
transport_->SetPaused(true);
DedupInfo dedup_info;
- dedup_info.resend_interval = base::TimeDelta::FromMilliseconds(10);
+ dedup_info.resend_interval = base::Milliseconds(10);
transport_sender_->ResendPackets(kVideoSsrc, missing_packets, true,
dedup_info);
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(10));
+ task_runner_->Sleep(base::Milliseconds(10));
EXPECT_EQ(2, num_times_logging_callback_called_);
std::vector<FrameId> cancel_sending_frames;
cancel_sending_frames.push_back(fake_frame.frame_id);
transport_sender_->CancelSendingFrames(kVideoSsrc, cancel_sending_frames);
transport_->SetPaused(false);
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(10));
+ task_runner_->Sleep(base::Milliseconds(10));
EXPECT_EQ(2, num_times_logging_callback_called_);
// Resend one packet in the socket when unpaused.
@@ -290,7 +289,7 @@ TEST_F(CastTransportImplTest, CancelRetransmits) {
TEST_F(CastTransportImplTest, Kickstart) {
InitWithLogging();
InitializeVideo();
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(50));
+ task_runner_->Sleep(base::Milliseconds(50));
EXPECT_EQ(0, num_times_logging_callback_called_);
// A fake frame that will be decomposed into 4 packets.
@@ -305,7 +304,7 @@ TEST_F(CastTransportImplTest, Kickstart) {
transport_sender_->InsertFrame(kVideoSsrc, fake_frame);
transport_sender_->ResendFrameForKickstart(kVideoSsrc, fake_frame.frame_id);
transport_->SetPaused(false);
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(10));
+ task_runner_->Sleep(base::Milliseconds(10));
EXPECT_EQ(4, transport_->packets_sent());
EXPECT_EQ(1, num_times_logging_callback_called_);
@@ -316,12 +315,12 @@ TEST_F(CastTransportImplTest, Kickstart) {
transport_->SetPaused(true);
DedupInfo dedup_info;
- dedup_info.resend_interval = base::TimeDelta::FromMilliseconds(10);
+ dedup_info.resend_interval = base::Milliseconds(10);
transport_sender_->ResendPackets(kVideoSsrc, missing_packets, true,
dedup_info);
transport_sender_->ResendFrameForKickstart(kVideoSsrc, fake_frame.frame_id);
transport_->SetPaused(false);
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(10));
+ task_runner_->Sleep(base::Milliseconds(10));
EXPECT_EQ(2, num_times_logging_callback_called_);
// Resend one packet in the socket when unpaused.
@@ -333,7 +332,7 @@ TEST_F(CastTransportImplTest, DedupRetransmissionWithAudio) {
InitWithLogging();
InitializeAudio();
InitializeVideo();
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(50));
+ task_runner_->Sleep(base::Milliseconds(50));
EXPECT_EQ(0, num_times_logging_callback_called_);
// Send two audio frames.
@@ -344,11 +343,11 @@ TEST_F(CastTransportImplTest, DedupRetransmissionWithAudio) {
fake_audio.dependency = EncodedFrame::KEY;
fake_audio.data.resize(100, ' ');
transport_sender_->InsertFrame(kAudioSsrc, fake_audio);
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(2));
+ task_runner_->Sleep(base::Milliseconds(2));
fake_audio.frame_id = FrameId::first() + 2;
fake_audio.reference_time = testing_clock_.NowTicks();
transport_sender_->InsertFrame(kAudioSsrc, fake_audio);
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(2));
+ task_runner_->Sleep(base::Milliseconds(2));
EXPECT_EQ(2, transport_->packets_sent());
// Ack the first audio frame.
@@ -375,7 +374,7 @@ TEST_F(CastTransportImplTest, DedupRetransmissionWithAudio) {
cast_message.remote_ssrc = kVideoSsrc;
cast_message.ack_frame_id = FrameId::first();
cast_message.missing_frames_and_packets[fake_video.frame_id].insert(3);
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(10));
+ task_runner_->Sleep(base::Milliseconds(10));
transport_sender_->OnReceivedCastMessage(kVideoSsrc, cast_message);
task_runner_->RunTasks();
EXPECT_EQ(6, transport_->packets_sent());
@@ -385,7 +384,7 @@ TEST_F(CastTransportImplTest, DedupRetransmissionWithAudio) {
cast_message.remote_ssrc = kAudioSsrc;
cast_message.ack_frame_id = FrameId::first() + 2;
cast_message.missing_frames_and_packets.clear();
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(2));
+ task_runner_->Sleep(base::Milliseconds(2));
transport_sender_->OnReceivedCastMessage(kAudioSsrc, cast_message);
task_runner_->RunTasks();
EXPECT_EQ(6, transport_->packets_sent());
@@ -395,13 +394,13 @@ TEST_F(CastTransportImplTest, DedupRetransmissionWithAudio) {
cast_message.remote_ssrc = kVideoSsrc;
cast_message.ack_frame_id = FrameId::first() + 1;
cast_message.missing_frames_and_packets[fake_video.frame_id].insert(3);
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(2));
+ task_runner_->Sleep(base::Milliseconds(2));
transport_sender_->OnReceivedCastMessage(kVideoSsrc, cast_message);
task_runner_->RunTasks();
EXPECT_EQ(7, transport_->packets_sent());
EXPECT_EQ(1, num_times_logging_callback_called_); // Only 8 ms since last.
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(2));
+ task_runner_->Sleep(base::Milliseconds(2));
EXPECT_EQ(2, num_times_logging_callback_called_);
}
diff --git a/chromium/media/cast/net/pacing/paced_sender.cc b/chromium/media/cast/net/pacing/paced_sender.cc
index 62317fc0267..09a815c7df8 100644
--- a/chromium/media/cast/net/pacing/paced_sender.cc
+++ b/chromium/media/cast/net/pacing/paced_sender.cc
@@ -334,7 +334,7 @@ void PacedSender::SendStoredPackets() {
if (now >= burst_end_ || previous_state == State_BurstFull) {
// Start a new burst.
current_burst_size_ = 0;
- burst_end_ = now + base::TimeDelta::FromMilliseconds(kPacingIntervalMs);
+ burst_end_ = now + base::Milliseconds(kPacingIntervalMs);
// The goal here is to try to send out the queued packets over the next
// three bursts, while trying to keep the burst size below 10 if possible.
diff --git a/chromium/media/cast/net/pacing/paced_sender.h b/chromium/media/cast/net/pacing/paced_sender.h
index 7dedb1c62b0..1c4ef0ee276 100644
--- a/chromium/media/cast/net/pacing/paced_sender.h
+++ b/chromium/media/cast/net/pacing/paced_sender.h
@@ -109,6 +109,9 @@ class PacedSender final : public PacedPacketSender {
PacketTransport* external_transport,
const scoped_refptr<base::SingleThreadTaskRunner>& transport_task_runner);
+ PacedSender(const PacedSender&) = delete;
+ PacedSender& operator=(const PacedSender&) = delete;
+
~PacedSender() final;
// These must be called before non-RTCP packets are sent.
@@ -241,8 +244,6 @@ class PacedSender final : public PacedPacketSender {
// NOTE: Weak pointers must be invalidated before all other member variables.
base::WeakPtrFactory<PacedSender> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(PacedSender);
};
} // namespace cast
diff --git a/chromium/media/cast/net/pacing/paced_sender_unittest.cc b/chromium/media/cast/net/pacing/paced_sender_unittest.cc
index 84cb6f6244b..ad1fc808eb2 100644
--- a/chromium/media/cast/net/pacing/paced_sender_unittest.cc
+++ b/chromium/media/cast/net/pacing/paced_sender_unittest.cc
@@ -93,8 +93,7 @@ class TestPacketSender : public PacketTransport {
class PacedSenderTest : public ::testing::Test {
protected:
PacedSenderTest() {
- testing_clock_.Advance(
- base::TimeDelta::FromMilliseconds(kStartMillisecond));
+ testing_clock_.Advance(base::Milliseconds(kStartMillisecond));
task_runner_ = new FakeSingleThreadTaskRunner(&testing_clock_);
paced_sender_ = std::make_unique<PacedSender>(
kTargetBurstSize, kMaxBurstSize, &testing_clock_, &packet_events_,
@@ -115,7 +114,7 @@ class PacedSenderTest : public ::testing::Test {
base::TimeTicks frame_tick = testing_clock_.NowTicks();
// Advance the clock so that we don't get the same |frame_tick|
// next time this function is called.
- testing_clock_.Advance(base::TimeDelta::FromMilliseconds(1));
+ testing_clock_.Advance(base::Milliseconds(1));
for (int i = 0; i < num_of_packets_in_frame; ++i) {
PacketKey key(frame_tick, audio ? kAudioSsrc : kVideoSsrc,
FrameId::first(), i);
@@ -146,7 +145,7 @@ class PacedSenderTest : public ::testing::Test {
packets.begin() + i,
packets.begin() + i + std::min(packets.size() - i, kBatchSize));
ASSERT_TRUE(paced_sender_->SendPackets(next_batch));
- testing_clock_.Advance(base::TimeDelta::FromMilliseconds(10));
+ testing_clock_.Advance(base::Milliseconds(10));
task_runner_->RunTasks();
}
}
@@ -155,7 +154,7 @@ class PacedSenderTest : public ::testing::Test {
// to test the pacing implementation details.
bool RunUntilEmpty(int max_tries) {
for (int i = 0; i < max_tries; i++) {
- testing_clock_.Advance(base::TimeDelta::FromMilliseconds(10));
+ testing_clock_.Advance(base::Milliseconds(10));
task_runner_->RunTasks();
if (mock_transport_.expecting_nothing_else())
return true;
@@ -201,12 +200,12 @@ TEST_F(PacedSenderTest, BasicPace) {
// Check that we get the next burst.
mock_transport_.AddExpectedSizesAndPacketIds(kSize1, UINT16_C(10), 10);
- base::TimeDelta timeout = base::TimeDelta::FromMilliseconds(10);
+ base::TimeDelta timeout = base::Milliseconds(10);
testing_clock_.Advance(timeout);
task_runner_->RunTasks();
// If we call process too early make sure we don't send any packets.
- timeout = base::TimeDelta::FromMilliseconds(5);
+ timeout = base::Milliseconds(5);
testing_clock_.Advance(timeout);
task_runner_->RunTasks();
@@ -258,7 +257,7 @@ TEST_F(PacedSenderTest, PaceWithNack) {
// Check that we get the first NACK burst.
mock_transport_.AddExpectedSizesAndPacketIds(kNackSize, UINT16_C(0), 10);
- base::TimeDelta timeout = base::TimeDelta::FromMilliseconds(10);
+ base::TimeDelta timeout = base::Milliseconds(10);
testing_clock_.Advance(timeout);
task_runner_->RunTasks();
@@ -338,7 +337,7 @@ TEST_F(PacedSenderTest, PaceWith60fps) {
SendPacketVector fourth_frame_packets =
CreateSendPacketVector(kSize4, num_of_packets_in_frame, false);
- base::TimeDelta timeout_10ms = base::TimeDelta::FromMilliseconds(10);
+ base::TimeDelta timeout_10ms = base::Milliseconds(10);
// Check that the first burst of the frame go out on the wire.
mock_transport_.AddExpectedSizesAndPacketIds(kSize1, UINT16_C(0), 10);
@@ -348,12 +347,12 @@ TEST_F(PacedSenderTest, PaceWith60fps) {
testing_clock_.Advance(timeout_10ms);
task_runner_->RunTasks();
- testing_clock_.Advance(base::TimeDelta::FromMilliseconds(6));
+ testing_clock_.Advance(base::Milliseconds(6));
// Add second frame, after 16 ms.
mock_transport_.AddExpectedSizesAndPacketIds(kSize2, UINT16_C(0), 3);
EXPECT_TRUE(paced_sender_->SendPackets(second_frame_packets));
- testing_clock_.Advance(base::TimeDelta::FromMilliseconds(4));
+ testing_clock_.Advance(base::Milliseconds(4));
mock_transport_.AddExpectedSizesAndPacketIds(kSize2, UINT16_C(3), 10);
testing_clock_.Advance(timeout_10ms);
@@ -363,14 +362,14 @@ TEST_F(PacedSenderTest, PaceWith60fps) {
testing_clock_.Advance(timeout_10ms);
task_runner_->RunTasks();
- testing_clock_.Advance(base::TimeDelta::FromMilliseconds(3));
+ testing_clock_.Advance(base::Milliseconds(3));
// Add third frame, after 33 ms.
mock_transport_.AddExpectedSizesAndPacketIds(kSize3, UINT16_C(0), 6);
EXPECT_TRUE(paced_sender_->SendPackets(third_frame_packets));
mock_transport_.AddExpectedSizesAndPacketIds(kSize3, UINT16_C(6), 10);
- testing_clock_.Advance(base::TimeDelta::FromMilliseconds(7));
+ testing_clock_.Advance(base::Milliseconds(7));
task_runner_->RunTasks();
// Add fourth frame, after 50 ms.
@@ -412,14 +411,14 @@ TEST_F(PacedSenderTest, SendPriority) {
// Retransmission packets with the earlier timestamp.
SendPacketVector resend_packets = CreateSendPacketVector(kSize4, 10, false);
- testing_clock_.Advance(base::TimeDelta::FromMilliseconds(10));
+ testing_clock_.Advance(base::Milliseconds(10));
// Send 20 normal video packets. Only 10 will be sent in this
// call, the rest will be sitting in the queue waiting for pacing.
EXPECT_TRUE(
paced_sender_->SendPackets(CreateSendPacketVector(kSize2, 20, false)));
- testing_clock_.Advance(base::TimeDelta::FromMilliseconds(10));
+ testing_clock_.Advance(base::Milliseconds(10));
// Send normal audio packet. This is queued and will be sent
// earlier than video packets.
@@ -487,16 +486,16 @@ TEST_F(PacedSenderTest, DedupWithResendInterval) {
SendPacketVector packets = CreateSendPacketVector(kSize1, 1, true);
mock_transport_.AddExpectedSizesAndPacketIds(kSize1, UINT16_C(0), 1);
EXPECT_TRUE(paced_sender_->SendPackets(packets));
- testing_clock_.Advance(base::TimeDelta::FromMilliseconds(10));
+ testing_clock_.Advance(base::Milliseconds(10));
DedupInfo dedup_info;
- dedup_info.resend_interval = base::TimeDelta::FromMilliseconds(20);
+ dedup_info.resend_interval = base::Milliseconds(20);
// This packet will not be sent.
EXPECT_TRUE(paced_sender_->ResendPackets(packets, dedup_info));
EXPECT_EQ(static_cast<int64_t>(kSize1), mock_transport_.GetBytesSent());
- dedup_info.resend_interval = base::TimeDelta::FromMilliseconds(5);
+ dedup_info.resend_interval = base::Milliseconds(5);
mock_transport_.AddExpectedSizesAndPacketIds(kSize1, UINT16_C(0), 1);
EXPECT_TRUE(paced_sender_->ResendPackets(packets, dedup_info));
EXPECT_EQ(static_cast<int64_t>(2 * kSize1), mock_transport_.GetBytesSent());
diff --git a/chromium/media/cast/net/rtcp/receiver_rtcp_event_subscriber.h b/chromium/media/cast/net/rtcp/receiver_rtcp_event_subscriber.h
index d299698124f..3305aef6cfa 100644
--- a/chromium/media/cast/net/rtcp/receiver_rtcp_event_subscriber.h
+++ b/chromium/media/cast/net/rtcp/receiver_rtcp_event_subscriber.h
@@ -47,6 +47,10 @@ class ReceiverRtcpEventSubscriber final : public RawEventSubscriber {
ReceiverRtcpEventSubscriber(const size_t max_size_to_retain,
EventMediaType type);
+ ReceiverRtcpEventSubscriber(const ReceiverRtcpEventSubscriber&) = delete;
+ ReceiverRtcpEventSubscriber& operator=(const ReceiverRtcpEventSubscriber&) =
+ delete;
+
~ReceiverRtcpEventSubscriber() final;
// RawEventSubscriber implementation.
@@ -96,8 +100,6 @@ class ReceiverRtcpEventSubscriber final : public RawEventSubscriber {
// Ensures methods are only called on the main thread.
base::ThreadChecker thread_checker_;
-
- DISALLOW_COPY_AND_ASSIGN(ReceiverRtcpEventSubscriber);
};
} // namespace cast
diff --git a/chromium/media/cast/net/rtcp/receiver_rtcp_event_subscriber_unittest.cc b/chromium/media/cast/net/rtcp/receiver_rtcp_event_subscriber_unittest.cc
index 9345660494b..dfc4cbe5d93 100644
--- a/chromium/media/cast/net/rtcp/receiver_rtcp_event_subscriber_unittest.cc
+++ b/chromium/media/cast/net/rtcp/receiver_rtcp_event_subscriber_unittest.cc
@@ -60,7 +60,7 @@ class ReceiverRtcpEventSubscriberTest : public ::testing::Test {
playout_event->media_type = VIDEO_EVENT;
playout_event->rtp_timestamp = RtpTimeTicks().Expand(UINT32_C(100));
playout_event->frame_id = FrameId::first() + 2;
- playout_event->delay_delta = base::TimeDelta::FromMilliseconds(kDelayMs);
+ playout_event->delay_delta = base::Milliseconds(kDelayMs);
cast_environment_->logger()->DispatchFrameEvent(std::move(playout_event));
std::unique_ptr<FrameEvent> decode_event(new FrameEvent());
@@ -89,7 +89,7 @@ class ReceiverRtcpEventSubscriberTest : public ::testing::Test {
playout_event->media_type = AUDIO_EVENT;
playout_event->rtp_timestamp = RtpTimeTicks().Expand(UINT32_C(300));
playout_event->frame_id = FrameId::first() + 4;
- playout_event->delay_delta = base::TimeDelta::FromMilliseconds(kDelayMs);
+ playout_event->delay_delta = base::Milliseconds(kDelayMs);
cast_environment_->logger()->DispatchFrameEvent(std::move(playout_event));
decode_event = std::make_unique<FrameEvent>();
diff --git a/chromium/media/cast/net/rtcp/receiver_rtcp_session.cc b/chromium/media/cast/net/rtcp/receiver_rtcp_session.cc
index 3544cbd8023..547a1e733c0 100644
--- a/chromium/media/cast/net/rtcp/receiver_rtcp_session.cc
+++ b/chromium/media/cast/net/rtcp/receiver_rtcp_session.cc
@@ -109,8 +109,7 @@ bool ReceiverRtcpSession::GetLatestLipSyncTimes(
local_clock_ahead_by_.Current();
// Sanity-check: Getting regular lip sync updates?
- DCHECK((clock_->NowTicks() - local_reference_time) <
- base::TimeDelta::FromMinutes(1));
+ DCHECK((clock_->NowTicks() - local_reference_time) < base::Minutes(1));
*rtp_timestamp = lip_sync_rtp_timestamp_;
*reference_time = local_reference_time;
diff --git a/chromium/media/cast/net/rtcp/receiver_rtcp_session.h b/chromium/media/cast/net/rtcp/receiver_rtcp_session.h
index 166f91b6304..579a4d2ff13 100644
--- a/chromium/media/cast/net/rtcp/receiver_rtcp_session.h
+++ b/chromium/media/cast/net/rtcp/receiver_rtcp_session.h
@@ -25,6 +25,9 @@ class ReceiverRtcpSession : public RtcpSession {
uint32_t local_ssrc,
uint32_t remote_ssrc);
+ ReceiverRtcpSession(const ReceiverRtcpSession&) = delete;
+ ReceiverRtcpSession& operator=(const ReceiverRtcpSession&) = delete;
+
~ReceiverRtcpSession() override;
uint32_t local_ssrc() const { return local_ssrc_; }
@@ -88,8 +91,6 @@ class ReceiverRtcpSession : public RtcpSession {
// remembers state about prior RTP timestamps and other sequence values to
// re-construct "expanded" values.
RtcpParser parser_;
-
- DISALLOW_COPY_AND_ASSIGN(ReceiverRtcpSession);
};
} // namespace cast
diff --git a/chromium/media/cast/net/rtcp/rtcp_builder.h b/chromium/media/cast/net/rtcp/rtcp_builder.h
index 10bd90475ff..95c746d0599 100644
--- a/chromium/media/cast/net/rtcp/rtcp_builder.h
+++ b/chromium/media/cast/net/rtcp/rtcp_builder.h
@@ -21,6 +21,10 @@ namespace cast {
class RtcpBuilder {
public:
explicit RtcpBuilder(uint32_t sending_ssrc);
+
+ RtcpBuilder(const RtcpBuilder&) = delete;
+ RtcpBuilder& operator=(const RtcpBuilder&) = delete;
+
~RtcpBuilder();
PacketRef BuildRtcpFromSender(const RtcpSenderInfo& sender_info);
@@ -52,8 +56,6 @@ class RtcpBuilder {
const uint32_t local_ssrc_;
char* ptr_of_length_;
PacketRef packet_;
-
- DISALLOW_COPY_AND_ASSIGN(RtcpBuilder);
};
} // namespace cast
diff --git a/chromium/media/cast/net/rtcp/rtcp_builder_unittest.cc b/chromium/media/cast/net/rtcp/rtcp_builder_unittest.cc
index 8b2cc117bb9..a75410c6a2a 100644
--- a/chromium/media/cast/net/rtcp/rtcp_builder_unittest.cc
+++ b/chromium/media/cast/net/rtcp/rtcp_builder_unittest.cc
@@ -26,8 +26,7 @@ namespace cast {
namespace {
static const uint32_t kSendingSsrc = 0x12345678;
static const uint32_t kMediaSsrc = 0x87654321;
-static const base::TimeDelta kDefaultDelay =
- base::TimeDelta::FromMilliseconds(100);
+static const base::TimeDelta kDefaultDelay = base::Milliseconds(100);
RtcpReportBlock GetReportBlock() {
RtcpReportBlock report_block;
@@ -227,7 +226,7 @@ TEST_F(RtcpBuilderTest, RtcpReceiverReportWithRrtrCastMessageAndLog) {
nullptr, &rtcp_events, kDefaultDelay));
base::SimpleTestTickClock testing_clock;
- testing_clock.Advance(base::TimeDelta::FromMilliseconds(kTimeBaseMs));
+ testing_clock.Advance(base::Milliseconds(kTimeBaseMs));
p.AddReceiverLog(kSendingSsrc);
p.AddReceiverFrameLog(test_rtp_timestamp().lower_32_bits(), 2, kTimeBaseMs);
@@ -240,7 +239,7 @@ TEST_F(RtcpBuilderTest, RtcpReceiverReportWithRrtrCastMessageAndLog) {
frame_event.media_type = VIDEO_EVENT;
frame_event.timestamp = testing_clock.NowTicks();
event_subscriber.OnReceiveFrameEvent(frame_event);
- testing_clock.Advance(base::TimeDelta::FromMilliseconds(kTimeDelayMs));
+ testing_clock.Advance(base::Milliseconds(kTimeDelayMs));
PacketEvent packet_event;
packet_event.rtp_timestamp = test_rtp_timestamp();
@@ -268,7 +267,7 @@ TEST_F(RtcpBuilderTest, RtcpReceiverReportWithOversizedFrameLog) {
RtcpReportBlock report_block = GetReportBlock();
base::SimpleTestTickClock testing_clock;
- testing_clock.Advance(base::TimeDelta::FromMilliseconds(kTimeBaseMs));
+ testing_clock.Advance(base::Milliseconds(kTimeBaseMs));
p.AddReceiverLog(kSendingSsrc);
@@ -296,7 +295,7 @@ TEST_F(RtcpBuilderTest, RtcpReceiverReportWithOversizedFrameLog) {
packet_event.timestamp = testing_clock.NowTicks();
packet_event.packet_id = kLostPacketId1;
event_subscriber.OnReceivePacketEvent(packet_event);
- testing_clock.Advance(base::TimeDelta::FromMilliseconds(kTimeDelayMs));
+ testing_clock.Advance(base::Milliseconds(kTimeDelayMs));
}
ReceiverRtcpEventSubscriber::RtcpEvents rtcp_events;
@@ -318,7 +317,7 @@ TEST_F(RtcpBuilderTest, RtcpReceiverReportWithTooManyLogFrames) {
RtcpReportBlock report_block = GetReportBlock();
base::SimpleTestTickClock testing_clock;
- testing_clock.Advance(base::TimeDelta::FromMilliseconds(kTimeBaseMs));
+ testing_clock.Advance(base::Milliseconds(kTimeBaseMs));
p.AddReceiverLog(kSendingSsrc);
@@ -341,7 +340,7 @@ TEST_F(RtcpBuilderTest, RtcpReceiverReportWithTooManyLogFrames) {
frame_event.media_type = VIDEO_EVENT;
frame_event.timestamp = testing_clock.NowTicks();
event_subscriber.OnReceiveFrameEvent(frame_event);
- testing_clock.Advance(base::TimeDelta::FromMilliseconds(kTimeDelayMs));
+ testing_clock.Advance(base::Milliseconds(kTimeDelayMs));
}
ReceiverRtcpEventSubscriber::RtcpEvents rtcp_events;
@@ -362,7 +361,7 @@ TEST_F(RtcpBuilderTest, RtcpReceiverReportWithOldLogFrames) {
RtcpReportBlock report_block = GetReportBlock();
base::SimpleTestTickClock testing_clock;
- testing_clock.Advance(base::TimeDelta::FromMilliseconds(kTimeBaseMs));
+ testing_clock.Advance(base::Milliseconds(kTimeBaseMs));
p.AddReceiverLog(kSendingSsrc);
@@ -384,8 +383,7 @@ TEST_F(RtcpBuilderTest, RtcpReceiverReportWithOldLogFrames) {
frame_event.media_type = VIDEO_EVENT;
frame_event.timestamp = testing_clock.NowTicks();
event_subscriber.OnReceiveFrameEvent(frame_event);
- testing_clock.Advance(
- base::TimeDelta::FromMilliseconds(kTimeBetweenEventsMs));
+ testing_clock.Advance(base::Milliseconds(kTimeBetweenEventsMs));
}
ReceiverRtcpEventSubscriber::RtcpEvents rtcp_events;
@@ -403,7 +401,7 @@ TEST_F(RtcpBuilderTest, RtcpReceiverReportRedundancy) {
RtcpReportBlock report_block = GetReportBlock();
base::SimpleTestTickClock testing_clock;
- testing_clock.Advance(base::TimeDelta::FromMilliseconds(time_base_ms));
+ testing_clock.Advance(base::Milliseconds(time_base_ms));
ReceiverRtcpEventSubscriber event_subscriber(500, VIDEO_EVENT);
size_t packet_count = kNumResends * kResendDelay + 10;
@@ -419,9 +417,9 @@ TEST_F(RtcpBuilderTest, RtcpReceiverReportRedundancy) {
p.AddReceiverFrameLog(test_rtp_timestamp().lower_32_bits(), num_events,
time_base_ms - (num_events - 1) * kResendDelay *
kTimeBetweenEventsMs);
- for (int i = 0; i < num_events; i++) {
+ for (int event = 0; event < num_events; event++) {
p.AddReceiverEventLog(0, FRAME_ACK_SENT,
- base::checked_cast<uint16_t>(i * kResendDelay *
+ base::checked_cast<uint16_t>(event * kResendDelay *
kTimeBetweenEventsMs));
}
@@ -439,8 +437,7 @@ TEST_F(RtcpBuilderTest, RtcpReceiverReportRedundancy) {
BuildRtcpFromReceiver(&report_block, nullptr, nullptr,
nullptr, &rtcp_events, kDefaultDelay));
- testing_clock.Advance(
- base::TimeDelta::FromMilliseconds(kTimeBetweenEventsMs));
+ testing_clock.Advance(base::Milliseconds(kTimeBetweenEventsMs));
time_base_ms += kTimeBetweenEventsMs;
}
}
diff --git a/chromium/media/cast/net/rtcp/rtcp_unittest.cc b/chromium/media/cast/net/rtcp/rtcp_unittest.cc
index ed2fab8ef8e..db53457c3b1 100644
--- a/chromium/media/cast/net/rtcp/rtcp_unittest.cc
+++ b/chromium/media/cast/net/rtcp/rtcp_unittest.cc
@@ -41,7 +41,7 @@ static const uint16_t kTargetDelayMs = 100;
class FakeRtcpTransport : public PacedPacketSender {
public:
explicit FakeRtcpTransport(base::SimpleTestTickClock* clock)
- : clock_(clock), packet_delay_(base::TimeDelta::FromMilliseconds(42)) {}
+ : clock_(clock), packet_delay_(base::Milliseconds(42)) {}
void set_rtcp_destination(RtcpSession* rtcp_session) {
rtcp_session_ = rtcp_session;
@@ -92,9 +92,8 @@ class RtcpTest : public ::testing::Test, public RtcpObserver {
kSenderSsrc),
received_pli_(false) {
sender_clock_->Advance(base::TimeTicks::Now() - base::TimeTicks());
- receiver_clock_->SetSkew(
- 1.0, // No skew.
- base::TimeDelta::FromSeconds(kInitialReceiverClockOffsetSeconds));
+ receiver_clock_->SetSkew(1.0, // No skew.
+ base::Seconds(kInitialReceiverClockOffsetSeconds));
rtp_sender_pacer_.set_rtcp_destination(&rtcp_at_rtp_receiver_);
rtp_receiver_pacer_.set_rtcp_destination(&rtcp_at_rtp_sender_);
@@ -215,7 +214,7 @@ TEST_F(RtcpTest, LipSyncGleanedFromSenderReport) {
const base::TimeTicks rolled_back_time =
(reference_time -
// Roll-back relative clock offset:
- base::TimeDelta::FromSeconds(kInitialReceiverClockOffsetSeconds) -
+ base::Seconds(kInitialReceiverClockOffsetSeconds) -
// Roll-back packet transmission time (because RTT is not yet known):
rtp_sender_pacer_.packet_delay());
EXPECT_NEAR(0, (reference_time_sent - rolled_back_time).InMicroseconds(), 5);
@@ -233,7 +232,7 @@ TEST_F(RtcpTest, RoundTripTimesDeterminedFromReportPingPong) {
base::TimeDelta expected_rtt_according_to_sender;
for (int i = 0; i < iterations; ++i) {
const base::TimeDelta one_way_trip_time =
- base::TimeDelta::FromMilliseconds(static_cast<int64_t>(1) << i);
+ base::Milliseconds(static_cast<int64_t>(1) << i);
rtp_sender_pacer_.set_packet_delay(one_way_trip_time);
rtp_receiver_pacer_.set_packet_delay(one_way_trip_time);
@@ -282,7 +281,7 @@ TEST_F(RtcpTest, ReportCastFeedback) {
rtcp_at_rtp_receiver_.local_ssrc(),
BuildRtcpPacketFromRtpReceiver(
CreateRtcpTimeData(base::TimeTicks()), &cast_message, nullptr,
- base::TimeDelta::FromMilliseconds(kTargetDelayMs), nullptr, nullptr));
+ base::Milliseconds(kTargetDelayMs), nullptr, nullptr));
EXPECT_EQ(last_cast_message_.ack_frame_id, cast_message.ack_frame_id);
EXPECT_EQ(last_cast_message_.target_delay_ms, kTargetDelayMs);
@@ -315,8 +314,7 @@ TEST_F(RtcpTest, DropLateRtcpPacket) {
rtcp_at_rtp_receiver_.local_ssrc(),
BuildRtcpPacketFromRtpReceiver(
CreateRtcpTimeData(receiver_clock_->NowTicks()), &cast_message,
- nullptr, base::TimeDelta::FromMilliseconds(kTargetDelayMs), nullptr,
- nullptr));
+ nullptr, base::Milliseconds(kTargetDelayMs), nullptr, nullptr));
// Receiver ACKs first+2, but with a too-old timestamp.
RtcpCastMessage late_cast_message(kSenderSsrc);
@@ -324,8 +322,7 @@ TEST_F(RtcpTest, DropLateRtcpPacket) {
rtp_receiver_pacer_.SendRtcpPacket(
rtcp_at_rtp_receiver_.local_ssrc(),
BuildRtcpPacketFromRtpReceiver(
- CreateRtcpTimeData(receiver_clock_->NowTicks() -
- base::TimeDelta::FromSeconds(10)),
+ CreateRtcpTimeData(receiver_clock_->NowTicks() - base::Seconds(10)),
&late_cast_message, nullptr, base::TimeDelta(), nullptr, nullptr));
// Validate data from second packet is dropped.
@@ -347,7 +344,7 @@ TEST_F(RtcpTest, ReportReceiverEvents) {
const RtpTimeTicks kRtpTimeStamp =
media::cast::RtpTimeTicks().Expand(UINT32_C(100));
const base::TimeTicks kEventTimestamp = receiver_clock_->NowTicks();
- const base::TimeDelta kDelayDelta = base::TimeDelta::FromMilliseconds(100);
+ const base::TimeDelta kDelayDelta = base::Milliseconds(100);
RtcpEvent event;
event.type = FRAME_ACK_SENT;
diff --git a/chromium/media/cast/net/rtcp/rtcp_utility.cc b/chromium/media/cast/net/rtcp/rtcp_utility.cc
index 2cc8ca56e20..98ead3da373 100644
--- a/chromium/media/cast/net/rtcp/rtcp_utility.cc
+++ b/chromium/media/cast/net/rtcp/rtcp_utility.cc
@@ -252,8 +252,8 @@ bool RtcpParser::ParseCastReceiverLogFrameItem(
return false;
// We have 24 LSB of the event timestamp base on the wire.
- base::TimeTicks event_timestamp_base = base::TimeTicks() +
- base::TimeDelta::FromMilliseconds(data & 0xffffff);
+ base::TimeTicks event_timestamp_base =
+ base::TimeTicks() + base::Milliseconds(data & 0xffffff);
size_t num_events = 1 + static_cast<uint8_t>(data >> 24);
@@ -272,13 +272,12 @@ bool RtcpParser::ParseCastReceiverLogFrameItem(
static_cast<uint8_t>(event_type_and_timestamp_delta >> 12));
event_log.event_timestamp =
event_timestamp_base +
- base::TimeDelta::FromMilliseconds(
- event_type_and_timestamp_delta & 0xfff);
+ base::Milliseconds(event_type_and_timestamp_delta & 0xfff);
if (event_log.type == PACKET_RECEIVED) {
event_log.packet_id = delay_delta_or_packet_id;
} else {
- event_log.delay_delta = base::TimeDelta::FromMilliseconds(
- static_cast<int16_t>(delay_delta_or_packet_id));
+ event_log.delay_delta =
+ base::Milliseconds(static_cast<int16_t>(delay_delta_or_packet_id));
}
frame_log.event_log_messages_.push_back(event_log);
}
@@ -526,9 +525,9 @@ base::TimeTicks ConvertNtpToTimeTicks(uint32_t ntp_seconds,
ntp_seconds * base::Time::kMicrosecondsPerSecond +
static_cast<int64_t>(std::ceil(ntp_fractions / kMagicFractionalUnit));
- base::TimeDelta elapsed_since_unix_epoch = base::TimeDelta::FromMicroseconds(
- ntp_time_us -
- (kUnixEpochInNtpSeconds * base::Time::kMicrosecondsPerSecond));
+ base::TimeDelta elapsed_since_unix_epoch =
+ base::Microseconds(ntp_time_us - (kUnixEpochInNtpSeconds *
+ base::Time::kMicrosecondsPerSecond));
return base::TimeTicks::UnixEpoch() + elapsed_since_unix_epoch;
}
diff --git a/chromium/media/cast/net/rtcp/rtcp_utility.h b/chromium/media/cast/net/rtcp/rtcp_utility.h
index 9a10ecae523..667352529e3 100644
--- a/chromium/media/cast/net/rtcp/rtcp_utility.h
+++ b/chromium/media/cast/net/rtcp/rtcp_utility.h
@@ -40,6 +40,10 @@ struct RtcpCommonHeader {
class RtcpParser {
public:
RtcpParser(uint32_t local_ssrc, uint32_t remote_ssrc);
+
+ RtcpParser(const RtcpParser&) = delete;
+ RtcpParser& operator=(const RtcpParser&) = delete;
+
~RtcpParser();
// Gets/Sets the ID of the latest frame that could possibly be ACK'ed. This
@@ -132,8 +136,6 @@ class RtcpParser {
// Indicates if sender received the Pli message from the receiver.
bool has_picture_loss_indicator_;
-
- DISALLOW_COPY_AND_ASSIGN(RtcpParser);
};
// Converts a log event type to an integer value.
diff --git a/chromium/media/cast/net/rtcp/rtcp_utility_unittest.cc b/chromium/media/cast/net/rtcp/rtcp_utility_unittest.cc
index 915ff3c29fd..f4d3d08e59a 100644
--- a/chromium/media/cast/net/rtcp/rtcp_utility_unittest.cc
+++ b/chromium/media/cast/net/rtcp/rtcp_utility_unittest.cc
@@ -22,8 +22,7 @@ namespace cast {
static const uint32_t kRemoteSsrc = 0x10203;
static const uint32_t kLocalSsrc = 0x40506;
static const uint32_t kUnknownSsrc = 0xDEAD;
-static const base::TimeDelta kTargetDelay =
- base::TimeDelta::FromMilliseconds(100);
+static const base::TimeDelta kTargetDelay = base::Milliseconds(100);
class RtcpParserTest : public ::testing::Test {
protected:
@@ -434,7 +433,7 @@ TEST_F(RtcpParserTest, InjectReceiverReportWithReceiverLogVerificationBase) {
static const uint32_t kTimeDelayMs = 10;
static const uint32_t kDelayDeltaMs = 123;
base::SimpleTestTickClock testing_clock;
- testing_clock.Advance(base::TimeDelta::FromMilliseconds(kTimeBaseMs));
+ testing_clock.Advance(base::Milliseconds(kTimeBaseMs));
RtcpReceiverLogMessage receiver_log;
RtcpReceiverFrameLogMessage frame_log(RtpTimeTicks().Expand(kRtpTimestamp));
@@ -442,10 +441,10 @@ TEST_F(RtcpParserTest, InjectReceiverReportWithReceiverLogVerificationBase) {
event_log.type = FRAME_ACK_SENT;
event_log.event_timestamp = testing_clock.NowTicks();
- event_log.delay_delta = base::TimeDelta::FromMilliseconds(kDelayDeltaMs);
+ event_log.delay_delta = base::Milliseconds(kDelayDeltaMs);
frame_log.event_log_messages_.push_back(event_log);
- testing_clock.Advance(base::TimeDelta::FromMilliseconds(kTimeDelayMs));
+ testing_clock.Advance(base::Milliseconds(kTimeDelayMs));
event_log.type = PACKET_RECEIVED;
event_log.event_timestamp = testing_clock.NowTicks();
event_log.packet_id = kLostPacketId1;
@@ -477,7 +476,7 @@ TEST_F(RtcpParserTest, InjectReceiverReportWithReceiverLogVerificationMulti) {
static const uint32_t kTimeDelayMs = 10;
static const int kDelayDeltaMs = 123; // To be varied for every frame.
base::SimpleTestTickClock testing_clock;
- testing_clock.Advance(base::TimeDelta::FromMilliseconds(kTimeBaseMs));
+ testing_clock.Advance(base::Milliseconds(kTimeBaseMs));
RtcpReceiverLogMessage receiver_log;
@@ -486,11 +485,10 @@ TEST_F(RtcpParserTest, InjectReceiverReportWithReceiverLogVerificationMulti) {
RtcpReceiverEventLogMessage event_log;
event_log.type = FRAME_ACK_SENT;
event_log.event_timestamp = testing_clock.NowTicks();
- event_log.delay_delta =
- base::TimeDelta::FromMilliseconds((j - 50) * kDelayDeltaMs);
+ event_log.delay_delta = base::Milliseconds((j - 50) * kDelayDeltaMs);
frame_log.event_log_messages_.push_back(event_log);
receiver_log.push_back(frame_log);
- testing_clock.Advance(base::TimeDelta::FromMilliseconds(kTimeDelayMs));
+ testing_clock.Advance(base::Milliseconds(kTimeDelayMs));
}
TestRtcpPacketBuilder p;
@@ -524,7 +522,7 @@ TEST(RtcpUtilityTest, NtpAndTime) {
base::TimeTicks out_1 = ConvertNtpToTimeTicks(ntp_seconds_1, ntp_fraction_1);
EXPECT_EQ(input_time, out_1); // Verify inverse.
- base::TimeDelta time_delta = base::TimeDelta::FromMilliseconds(1000);
+ base::TimeDelta time_delta = base::Milliseconds(1000);
input_time += time_delta;
uint32_t ntp_seconds_2 = 0;
@@ -539,7 +537,7 @@ TEST(RtcpUtilityTest, NtpAndTime) {
EXPECT_EQ((ntp_seconds_2 - ntp_seconds_1), UINT32_C(1));
EXPECT_NEAR(ntp_fraction_2, ntp_fraction_1, 1);
- time_delta = base::TimeDelta::FromMilliseconds(500);
+ time_delta = base::Milliseconds(500);
input_time += time_delta;
uint32_t ntp_seconds_3 = 0;
diff --git a/chromium/media/cast/net/rtcp/sender_rtcp_session.cc b/chromium/media/cast/net/rtcp/sender_rtcp_session.cc
index dba167915ed..3d974afc725 100644
--- a/chromium/media/cast/net/rtcp/sender_rtcp_session.cc
+++ b/chromium/media/cast/net/rtcp/sender_rtcp_session.cc
@@ -39,7 +39,7 @@ base::TimeDelta ConvertFromNtpDiff(uint32_t ntp_delay) {
delay_us >>= 16;
delay_us +=
((ntp_delay & 0xffff0000) >> 16) * base::Time::kMicrosecondsPerSecond;
- return base::TimeDelta::FromMicroseconds(delay_us);
+ return base::Microseconds(delay_us);
}
// A receiver frame event is identified by frame RTP timestamp, event timestamp
@@ -159,7 +159,7 @@ void SenderRtcpSession::OnReceivedDelaySinceLastReport(
// such a level of precision cannot be measured with our approach; and 1 ms is
// good enough to represent "under 1 ms" for our use cases.
current_round_trip_time_ =
- std::max(current_round_trip_time_, base::TimeDelta::FromMilliseconds(1));
+ std::max(current_round_trip_time_, base::Milliseconds(1));
rtcp_observer_->OnReceivedRtt(current_round_trip_time_);
}
@@ -178,7 +178,7 @@ void SenderRtcpSession::SaveLastSentNtpTime(const base::TimeTicks& now,
last_reports_sent_queue_.push(std::make_pair(last_report, now));
const base::TimeTicks timeout =
- now - base::TimeDelta::FromMilliseconds(kStatsHistoryWindowMs);
+ now - base::Milliseconds(kStatsHistoryWindowMs);
// Cleanup old statistics older than |timeout|.
while (!last_reports_sent_queue_.empty()) {
diff --git a/chromium/media/cast/net/rtcp/sender_rtcp_session.h b/chromium/media/cast/net/rtcp/sender_rtcp_session.h
index 08ec817a4f5..0ff85b79eaa 100644
--- a/chromium/media/cast/net/rtcp/sender_rtcp_session.h
+++ b/chromium/media/cast/net/rtcp/sender_rtcp_session.h
@@ -48,6 +48,9 @@ class SenderRtcpSession : public RtcpSession {
uint32_t local_ssrc,
uint32_t remote_ssrc);
+ SenderRtcpSession(const SenderRtcpSession&) = delete;
+ SenderRtcpSession& operator=(const SenderRtcpSession&) = delete;
+
~SenderRtcpSession() override;
// If greater than zero, this is the last measured network round trip time.
@@ -128,8 +131,6 @@ class SenderRtcpSession : public RtcpSession {
// when last report is received from RTP receiver.
RtcpSendTimeMap last_reports_sent_map_;
RtcpSendTimeQueue last_reports_sent_queue_;
-
- DISALLOW_COPY_AND_ASSIGN(SenderRtcpSession);
};
} // namespace cast
diff --git a/chromium/media/cast/net/rtp/packet_storage.h b/chromium/media/cast/net/rtp/packet_storage.h
index 3766c42fa99..b78ec61b05a 100644
--- a/chromium/media/cast/net/rtp/packet_storage.h
+++ b/chromium/media/cast/net/rtp/packet_storage.h
@@ -18,6 +18,10 @@ namespace cast {
class PacketStorage {
public:
PacketStorage();
+
+ PacketStorage(const PacketStorage&) = delete;
+ PacketStorage& operator=(const PacketStorage&) = delete;
+
virtual ~PacketStorage();
// Store all of the packets for a frame.
@@ -39,8 +43,6 @@ class PacketStorage {
// The number of frames whose packets have been released, but the entry in the
// |frames_| queue has not yet been popped.
size_t zombie_count_;
-
- DISALLOW_COPY_AND_ASSIGN(PacketStorage);
};
} // namespace cast
diff --git a/chromium/media/cast/net/rtp/rtp_packetizer_unittest.cc b/chromium/media/cast/net/rtp/rtp_packetizer_unittest.cc
index 090f13723cd..c0af7dfebae 100644
--- a/chromium/media/cast/net/rtp/rtp_packetizer_unittest.cc
+++ b/chromium/media/cast/net/rtp/rtp_packetizer_unittest.cc
@@ -134,7 +134,7 @@ class RtpPacketizerTest : public ::testing::Test {
void RunTasks(int during_ms) {
for (int i = 0; i < during_ms; ++i) {
// Call process the timers every 1 ms.
- testing_clock_.Advance(base::TimeDelta::FromMilliseconds(1));
+ testing_clock_.Advance(base::Milliseconds(1));
task_runner_->RunTasks();
}
}
@@ -157,7 +157,7 @@ TEST_F(RtpPacketizerTest, SendStandardPackets) {
transport_->set_expected_number_of_packets(expected_num_of_packets);
transport_->set_rtp_timestamp(video_frame_.rtp_timestamp);
- testing_clock_.Advance(base::TimeDelta::FromMilliseconds(kTimestampMs));
+ testing_clock_.Advance(base::Milliseconds(kTimestampMs));
video_frame_.reference_time = testing_clock_.NowTicks();
rtp_packetizer_->SendFrameAsPackets(video_frame_);
RunTasks(33 + 1);
@@ -169,7 +169,7 @@ TEST_F(RtpPacketizerTest, SendPacketsWithAdaptivePlayoutExtension) {
transport_->set_expected_number_of_packets(expected_num_of_packets);
transport_->set_rtp_timestamp(video_frame_.rtp_timestamp);
- testing_clock_.Advance(base::TimeDelta::FromMilliseconds(kTimestampMs));
+ testing_clock_.Advance(base::Milliseconds(kTimestampMs));
video_frame_.reference_time = testing_clock_.NowTicks();
video_frame_.new_playout_delay_ms = 500;
rtp_packetizer_->SendFrameAsPackets(video_frame_);
@@ -185,7 +185,7 @@ TEST_F(RtpPacketizerTest, Stats) {
transport_->set_expected_number_of_packets(expected_num_of_packets);
transport_->set_rtp_timestamp(video_frame_.rtp_timestamp);
- testing_clock_.Advance(base::TimeDelta::FromMilliseconds(kTimestampMs));
+ testing_clock_.Advance(base::Milliseconds(kTimestampMs));
video_frame_.reference_time = testing_clock_.NowTicks();
rtp_packetizer_->SendFrameAsPackets(video_frame_);
RunTasks(33 + 1);
diff --git a/chromium/media/cast/net/rtp/rtp_parser.h b/chromium/media/cast/net/rtp/rtp_parser.h
index 860437c4e20..b87e2aca178 100644
--- a/chromium/media/cast/net/rtp/rtp_parser.h
+++ b/chromium/media/cast/net/rtp/rtp_parser.h
@@ -22,6 +22,9 @@ class RtpParser {
public:
RtpParser(uint32_t expected_sender_ssrc, uint8_t expected_payload_type);
+ RtpParser(const RtpParser&) = delete;
+ RtpParser& operator=(const RtpParser&) = delete;
+
virtual ~RtpParser();
// Parses the |packet|, expecting an RTP header along with a Cast header at
@@ -47,8 +50,6 @@ class RtpParser {
// re-expanded into full-form.
RtpTimeTicks last_parsed_rtp_timestamp_;
FrameId last_parsed_frame_id_;
-
- DISALLOW_COPY_AND_ASSIGN(RtpParser);
};
} // namespace cast
diff --git a/chromium/media/cast/net/rtp/rtp_sender.cc b/chromium/media/cast/net/rtp/rtp_sender.cc
index 181890a553f..23b11269d01 100644
--- a/chromium/media/cast/net/rtp/rtp_sender.cc
+++ b/chromium/media/cast/net/rtp/rtp_sender.cc
@@ -79,8 +79,9 @@ void RtpSender::ResendPackets(
if (!stored_packets)
continue;
- for (auto it = stored_packets->begin(); it != stored_packets->end(); ++it) {
- const PacketKey& packet_key = it->first;
+ for (auto packet_it = stored_packets->begin();
+ packet_it != stored_packets->end(); ++packet_it) {
+ const PacketKey& packet_key = packet_it->first;
const uint16_t packet_id = packet_key.packet_id;
// Should we resend the packet?
@@ -94,7 +95,7 @@ void RtpSender::ResendPackets(
// If we were asked to resend the last packet, check if it's the
// last packet.
- if (!resend && resend_last && (it + 1) == stored_packets->end()) {
+ if (!resend && resend_last && (packet_it + 1) == stored_packets->end()) {
resend = true;
}
@@ -102,11 +103,11 @@ void RtpSender::ResendPackets(
// Resend packet to the network.
VLOG(3) << "Resend " << frame_id << ":" << packet_id;
// Set a unique incremental sequence number for every packet.
- PacketRef packet_copy = FastCopyPacket(it->second);
+ PacketRef packet_copy = FastCopyPacket(packet_it->second);
UpdateSequenceNumber(&packet_copy->data);
packets_to_resend.push_back(std::make_pair(packet_key, packet_copy));
} else if (cancel_rtx_if_not_in_list) {
- transport_->CancelSendingPacket(it->first);
+ transport_->CancelSendingPacket(packet_it->first);
}
}
transport_->ResendPackets(packets_to_resend, dedup_info);
diff --git a/chromium/media/cast/net/rtp/rtp_sender.h b/chromium/media/cast/net/rtp/rtp_sender.h
index fc9ce99f05f..02a60186325 100644
--- a/chromium/media/cast/net/rtp/rtp_sender.h
+++ b/chromium/media/cast/net/rtp/rtp_sender.h
@@ -37,6 +37,9 @@ class RtpSender {
const scoped_refptr<base::SingleThreadTaskRunner>& transport_task_runner,
PacedSender* const transport);
+ RtpSender(const RtpSender&) = delete;
+ RtpSender& operator=(const RtpSender&) = delete;
+
~RtpSender();
// This must be called before sending any frames. Returns false if
@@ -78,8 +81,6 @@ class RtpSender {
// NOTE: Weak pointers must be invalidated before all other member variables.
base::WeakPtrFactory<RtpSender> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(RtpSender);
};
} // namespace cast
diff --git a/chromium/media/cast/net/udp_packet_pipe.h b/chromium/media/cast/net/udp_packet_pipe.h
index b05273675df..4970785a365 100644
--- a/chromium/media/cast/net/udp_packet_pipe.h
+++ b/chromium/media/cast/net/udp_packet_pipe.h
@@ -18,6 +18,9 @@ class UdpPacketPipeReader {
explicit UdpPacketPipeReader(
mojo::ScopedDataPipeConsumerHandle consumer_handle);
+ UdpPacketPipeReader(const UdpPacketPipeReader&) = delete;
+ UdpPacketPipeReader& operator=(const UdpPacketPipeReader&) = delete;
+
~UdpPacketPipeReader();
using ReadCB = base::OnceCallback<void(std::unique_ptr<Packet>)>;
@@ -37,8 +40,6 @@ class UdpPacketPipeReader {
MojoDataPipeReader data_pipe_reader_;
uint16_t current_packet_size_;
-
- DISALLOW_COPY_AND_ASSIGN(UdpPacketPipeReader);
};
// Writes UDP packets into the data mojo pipe. The size of each packet is
@@ -48,6 +49,9 @@ class UdpPacketPipeWriter {
explicit UdpPacketPipeWriter(
mojo::ScopedDataPipeProducerHandle producer_handle);
+ UdpPacketPipeWriter(const UdpPacketPipeWriter&) = delete;
+ UdpPacketPipeWriter& operator=(const UdpPacketPipeWriter&) = delete;
+
~UdpPacketPipeWriter();
// Writes the |packet| into the mojo data pipe. |done_cb| will be
@@ -71,8 +75,6 @@ class UdpPacketPipeWriter {
MojoDataPipeWriter data_pipe_writer_;
uint16_t current_packet_size_ = 0;
-
- DISALLOW_COPY_AND_ASSIGN(UdpPacketPipeWriter);
};
} // namespace cast
diff --git a/chromium/media/cast/net/udp_packet_pipe_unittest.cc b/chromium/media/cast/net/udp_packet_pipe_unittest.cc
index 69ca2d17814..1201fe0fcab 100644
--- a/chromium/media/cast/net/udp_packet_pipe_unittest.cc
+++ b/chromium/media/cast/net/udp_packet_pipe_unittest.cc
@@ -35,6 +35,9 @@ class UdpPacketPipeTest : public ::testing::Test {
reader_ = std::make_unique<UdpPacketPipeReader>(std::move(consumer_handle));
}
+ UdpPacketPipeTest(const UdpPacketPipeTest&) = delete;
+ UdpPacketPipeTest& operator=(const UdpPacketPipeTest&) = delete;
+
~UdpPacketPipeTest() override = default;
void OnPacketRead(std::unique_ptr<Packet> packet) {
@@ -46,9 +49,6 @@ class UdpPacketPipeTest : public ::testing::Test {
std::unique_ptr<UdpPacketPipeWriter> writer_;
std::unique_ptr<UdpPacketPipeReader> reader_;
base::circular_deque<std::unique_ptr<Packet>> packets_read_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(UdpPacketPipeTest);
};
TEST_F(UdpPacketPipeTest, Normal) {
diff --git a/chromium/media/cast/net/udp_transport_impl.h b/chromium/media/cast/net/udp_transport_impl.h
index 0b60a35bded..638ff7d6713 100644
--- a/chromium/media/cast/net/udp_transport_impl.h
+++ b/chromium/media/cast/net/udp_transport_impl.h
@@ -46,6 +46,10 @@ class UdpTransportImpl final : public PacketTransport, public UdpTransport {
const net::IPEndPoint& local_end_point,
const net::IPEndPoint& remote_end_point,
CastTransportStatusCallback status_callback);
+
+ UdpTransportImpl(const UdpTransportImpl&) = delete;
+ UdpTransportImpl& operator=(const UdpTransportImpl&) = delete;
+
~UdpTransportImpl() final;
// PacketTransport implementations.
@@ -136,8 +140,6 @@ class UdpTransportImpl final : public PacketTransport, public UdpTransport {
// NOTE: Weak pointers must be invalidated before all other member variables.
base::WeakPtrFactory<UdpTransportImpl> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(UdpTransportImpl);
};
} // namespace cast
diff --git a/chromium/media/cast/net/udp_transport_unittest.cc b/chromium/media/cast/net/udp_transport_unittest.cc
index 9e301406405..8711b53fd84 100644
--- a/chromium/media/cast/net/udp_transport_unittest.cc
+++ b/chromium/media/cast/net/udp_transport_unittest.cc
@@ -87,6 +87,9 @@ class UdpTransportImplTest : public ::testing::Test {
recv_transport_->SetSendBufferSize(65536);
}
+ UdpTransportImplTest(const UdpTransportImplTest&) = delete;
+ UdpTransportImplTest& operator=(const UdpTransportImplTest&) = delete;
+
~UdpTransportImplTest() override = default;
protected:
@@ -96,9 +99,6 @@ class UdpTransportImplTest : public ::testing::Test {
// A receiver side transport to receiver/send packets from/to sender.
std::unique_ptr<UdpTransportImpl> recv_transport_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(UdpTransportImplTest);
};
// Test the sending/receiving functions as a PacketSender.
diff --git a/chromium/media/cast/sender/audio_encoder.cc b/chromium/media/cast/sender/audio_encoder.cc
index 5b1cb671a2f..550e19fa7c6 100644
--- a/chromium/media/cast/sender/audio_encoder.cc
+++ b/chromium/media/cast/sender/audio_encoder.cc
@@ -63,8 +63,8 @@ class AudioEncoder::ImplBase
samples_per_frame_(samples_per_frame),
callback_(std::move(callback)),
operational_status_(STATUS_UNINITIALIZED),
- frame_duration_(base::TimeDelta::FromSecondsD(
- static_cast<double>(samples_per_frame_) / sampling_rate)),
+ frame_duration_(base::Seconds(static_cast<double>(samples_per_frame_) /
+ sampling_rate)),
buffer_fill_end_(0),
frame_id_(FrameId::first()),
samples_dropped_from_buffer_(0) {
@@ -305,12 +305,12 @@ class AudioEncoder::OpusImpl final : public AudioEncoder::ImplBase {
static bool IsValidFrameDuration(base::TimeDelta duration) {
// See https://tools.ietf.org/html/rfc6716#section-2.1.4
- return duration == base::TimeDelta::FromMicroseconds(2500) ||
- duration == base::TimeDelta::FromMilliseconds(5) ||
- duration == base::TimeDelta::FromMilliseconds(10) ||
- duration == base::TimeDelta::FromMilliseconds(20) ||
- duration == base::TimeDelta::FromMilliseconds(40) ||
- duration == base::TimeDelta::FromMilliseconds(60);
+ return duration == base::Microseconds(2500) ||
+ duration == base::Milliseconds(5) ||
+ duration == base::Milliseconds(10) ||
+ duration == base::Milliseconds(20) ||
+ duration == base::Milliseconds(40) ||
+ duration == base::Milliseconds(60);
}
const std::unique_ptr<uint8_t[]> encoder_memory_;
diff --git a/chromium/media/cast/sender/audio_encoder.h b/chromium/media/cast/sender/audio_encoder.h
index 8cee3ca2caa..9b897c4d552 100644
--- a/chromium/media/cast/sender/audio_encoder.h
+++ b/chromium/media/cast/sender/audio_encoder.h
@@ -35,6 +35,10 @@ class AudioEncoder {
int bitrate,
Codec codec,
FrameEncodedCallback frame_encoded_callback);
+
+ AudioEncoder(const AudioEncoder&) = delete;
+ AudioEncoder& operator=(const AudioEncoder&) = delete;
+
virtual ~AudioEncoder();
OperationalStatus InitializationResult() const;
@@ -56,8 +60,6 @@ class AudioEncoder {
// Used to ensure only one thread invokes InsertAudio().
base::ThreadChecker insert_thread_checker_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioEncoder);
};
} // namespace cast
diff --git a/chromium/media/cast/sender/audio_encoder_unittest.cc b/chromium/media/cast/sender/audio_encoder_unittest.cc
index ef4688fb616..5c6ba55fae1 100644
--- a/chromium/media/cast/sender/audio_encoder_unittest.cc
+++ b/chromium/media/cast/sender/audio_encoder_unittest.cc
@@ -35,6 +35,11 @@ namespace {
class TestEncodedAudioFrameReceiver {
public:
TestEncodedAudioFrameReceiver() : frames_received_(0) {}
+
+ TestEncodedAudioFrameReceiver(const TestEncodedAudioFrameReceiver&) = delete;
+ TestEncodedAudioFrameReceiver& operator=(
+ const TestEncodedAudioFrameReceiver&) = delete;
+
virtual ~TestEncodedAudioFrameReceiver() = default;
int frames_received() const { return frames_received_; }
@@ -78,8 +83,6 @@ class TestEncodedAudioFrameReceiver {
int samples_per_frame_;
base::TimeTicks lower_bound_;
base::TimeTicks upper_bound_;
-
- DISALLOW_COPY_AND_ASSIGN(TestEncodedAudioFrameReceiver);
};
struct TestScenario {
@@ -115,6 +118,9 @@ class AudioEncoderTest : public ::testing::TestWithParam<TestScenario> {
task_runner_, task_runner_);
}
+ AudioEncoderTest(const AudioEncoderTest&) = delete;
+ AudioEncoderTest& operator=(const AudioEncoderTest&) = delete;
+
virtual ~AudioEncoderTest() = default;
void RunTestForCodec(Codec codec) {
@@ -127,8 +133,8 @@ class AudioEncoderTest : public ::testing::TestWithParam<TestScenario> {
for (size_t i = 0; i < scenario.num_durations; ++i) {
const bool simulate_missing_data = scenario.durations_in_ms[i] < 0;
- const base::TimeDelta duration = base::TimeDelta::FromMilliseconds(
- std::abs(scenario.durations_in_ms[i]));
+ const base::TimeDelta duration =
+ base::Milliseconds(std::abs(scenario.durations_in_ms[i]));
receiver_->SetCaptureTimeBounds(
testing_clock_.NowTicks() - frame_duration,
testing_clock_.NowTicks() + duration);
@@ -172,8 +178,6 @@ class AudioEncoderTest : public ::testing::TestWithParam<TestScenario> {
std::unique_ptr<TestEncodedAudioFrameReceiver> receiver_;
std::unique_ptr<AudioEncoder> audio_encoder_;
scoped_refptr<CastEnvironment> cast_environment_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioEncoderTest);
};
TEST_P(AudioEncoderTest, EncodeOpus) {
diff --git a/chromium/media/cast/sender/audio_sender.h b/chromium/media/cast/sender/audio_sender.h
index cdae56680f9..e2417a96d01 100644
--- a/chromium/media/cast/sender/audio_sender.h
+++ b/chromium/media/cast/sender/audio_sender.h
@@ -36,6 +36,9 @@ class AudioSender final : public FrameSender {
StatusChangeOnceCallback status_change_cb,
CastTransport* const transport_sender);
+ AudioSender(const AudioSender&) = delete;
+ AudioSender& operator=(const AudioSender&) = delete;
+
~AudioSender() final;
// Note: It is not guaranteed that |audio_frame| will actually be encoded and
@@ -64,8 +67,6 @@ class AudioSender final : public FrameSender {
// NOTE: Weak pointers must be invalidated before all other member variables.
base::WeakPtrFactory<AudioSender> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(AudioSender);
};
} // namespace cast
diff --git a/chromium/media/cast/sender/audio_sender_unittest.cc b/chromium/media/cast/sender/audio_sender_unittest.cc
index e80e304a3fd..9bba25c5918 100644
--- a/chromium/media/cast/sender/audio_sender_unittest.cc
+++ b/chromium/media/cast/sender/audio_sender_unittest.cc
@@ -131,7 +131,7 @@ class AudioSenderTest : public ::testing::Test {
};
TEST_F(AudioSenderTest, Encode20ms) {
- const base::TimeDelta kDuration = base::TimeDelta::FromMilliseconds(20);
+ const base::TimeDelta kDuration = base::Milliseconds(20);
std::unique_ptr<AudioBus> bus(
TestAudioBusFactory(audio_config_.channels, audio_config_.rtp_timebase,
TestAudioBusFactory::kMiddleANoteFreq, 0.5f)
@@ -144,7 +144,7 @@ TEST_F(AudioSenderTest, Encode20ms) {
}
TEST_F(AudioSenderTest, RtcpTimer) {
- const base::TimeDelta kDuration = base::TimeDelta::FromMilliseconds(20);
+ const base::TimeDelta kDuration = base::Milliseconds(20);
std::unique_ptr<AudioBus> bus(
TestAudioBusFactory(audio_config_.channels, audio_config_.rtp_timebase,
TestAudioBusFactory::kMiddleANoteFreq, 0.5f)
@@ -155,7 +155,7 @@ TEST_F(AudioSenderTest, RtcpTimer) {
// Make sure that we send at least one RTCP packet.
base::TimeDelta max_rtcp_timeout =
- base::TimeDelta::FromMilliseconds(1 + kRtcpReportIntervalMs * 3 / 2);
+ base::Milliseconds(1 + kRtcpReportIntervalMs * 3 / 2);
testing_clock_.Advance(max_rtcp_timeout);
task_runner_->RunTasks();
EXPECT_LE(1, transport_->number_of_rtp_packets());
diff --git a/chromium/media/cast/sender/av1_encoder.cc b/chromium/media/cast/sender/av1_encoder.cc
new file mode 100644
index 00000000000..f60707dbb8a
--- /dev/null
+++ b/chromium/media/cast/sender/av1_encoder.cc
@@ -0,0 +1,377 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/cast/sender/av1_encoder.h"
+
+#include "base/logging.h"
+#include "media/base/video_frame.h"
+#include "media/cast/constants.h"
+#include "third_party/libaom/source/libaom/aom/aomcx.h"
+
+namespace media {
+namespace cast {
+
+namespace {
+
+// After a pause in the video stream, what is the maximum duration amount to
+// pass to the encoder for the next frame (in terms of 1/max_fps sized periods)?
+// This essentially controls the encoded size of the first frame that follows a
+// pause in the video stream.
+const int kRestartFramePeriods = 3;
+
+// The following constants are used to automactically tune the encoder
+// parameters: |cpu_used| and |min_quantizer|.
+
+// The |half-life| of the encoding speed accumulator.
+// The smaller, the shorter of the time averaging window.
+const int kEncodingSpeedAccHalfLife = 120000; // 0.12 second.
+
+// The target encoder utilization signal. This is a trade-off between quality
+// and less CPU usage. The range of this value is [0, 1]. Higher the value,
+// better the quality and higher the CPU usage.
+//
+// For machines with more than two encoding threads.
+const double kHiTargetEncoderUtilization = 0.7;
+// For machines with two encoding threads.
+const double kMidTargetEncoderUtilization = 0.6;
+// For machines with single encoding thread.
+const double kLoTargetEncoderUtilization = 0.5;
+
+// This is the equivalent change on encoding speed for the change on each
+// quantizer step.
+const double kEquivalentEncodingSpeedStepPerQpStep = 1 / 20.0;
+
+// Highest/lowest allowed encoding speed set to the encoder. The valid range
+// is [0, 9].
+const int kHighestEncodingSpeed = 9;
+const int kLowestEncodingSpeed = 0;
+
+bool HasSufficientFeedback(
+ const FeedbackSignalAccumulator<base::TimeDelta>& accumulator) {
+ const base::TimeDelta amount_of_history =
+ accumulator.update_time() - accumulator.reset_time();
+ return amount_of_history.InMicroseconds() >= 250000; // 0.25 second.
+}
+
+} // namespace
+
+Av1Encoder::Av1Encoder(const FrameSenderConfig& video_config)
+ : cast_config_(video_config),
+ target_encoder_utilization_(
+ video_config.video_codec_params.number_of_encode_threads > 2
+ ? kHiTargetEncoderUtilization
+ : (video_config.video_codec_params.number_of_encode_threads > 1
+ ? kMidTargetEncoderUtilization
+ : kLoTargetEncoderUtilization)),
+ key_frame_requested_(true),
+ bitrate_kbit_(cast_config_.start_bitrate / 1000),
+ next_frame_id_(FrameId::first()),
+ encoding_speed_acc_(base::Microseconds(kEncodingSpeedAccHalfLife)),
+ encoding_speed_(kHighestEncodingSpeed) {
+ config_.g_timebase.den = 0; // Not initialized.
+ DCHECK_LE(cast_config_.video_codec_params.min_qp,
+ cast_config_.video_codec_params.max_cpu_saver_qp);
+ DCHECK_LE(cast_config_.video_codec_params.max_cpu_saver_qp,
+ cast_config_.video_codec_params.max_qp);
+
+ DETACH_FROM_THREAD(thread_checker_);
+}
+
+Av1Encoder::~Av1Encoder() {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ if (is_initialized())
+ aom_codec_destroy(&encoder_);
+}
+
+void Av1Encoder::Initialize() {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ DCHECK(!is_initialized());
+ // The encoder will be created/configured when the first frame encode is
+ // requested.
+}
+
+void Av1Encoder::ConfigureForNewFrameSize(const gfx::Size& frame_size) {
+ if (is_initialized()) {
+ // Workaround for VP8 bug: If the new size is strictly less-than-or-equal to
+ // the old size, in terms of area, the existing encoder instance can
+ // continue. Otherwise, completely tear-down and re-create a new encoder to
+ // avoid a shutdown crash.
+ // NOTE: Determine if this workaround is needed for AV1
+ if (frame_size.GetArea() <= gfx::Size(config_.g_w, config_.g_h).GetArea()) {
+ DVLOG(1) << "Continuing to use existing encoder at smaller frame size: "
+ << gfx::Size(config_.g_w, config_.g_h).ToString() << " --> "
+ << frame_size.ToString();
+ config_.g_w = frame_size.width();
+ config_.g_h = frame_size.height();
+ config_.rc_min_quantizer = cast_config_.video_codec_params.min_qp;
+ if (aom_codec_enc_config_set(&encoder_, &config_) == AOM_CODEC_OK)
+ return;
+ DVLOG(1) << "libaom rejected the attempt to use a smaller frame size in "
+ "the current instance.";
+ }
+
+ DVLOG(1) << "Destroying/Re-Creating encoder for larger frame size: "
+ << gfx::Size(config_.g_w, config_.g_h).ToString() << " --> "
+ << frame_size.ToString();
+ aom_codec_destroy(&encoder_);
+ } else {
+ DVLOG(1) << "Creating encoder for the first frame; size: "
+ << frame_size.ToString();
+ }
+
+ // Populate encoder configuration with default values.
+ CHECK_EQ(aom_codec_enc_config_default(aom_codec_av1_cx(), &config_,
+ AOM_USAGE_REALTIME),
+ AOM_CODEC_OK);
+
+ config_.g_threads = cast_config_.video_codec_params.number_of_encode_threads;
+ config_.g_w = frame_size.width();
+ config_.g_h = frame_size.height();
+ // Set the timebase to match that of base::TimeDelta.
+ config_.g_timebase.num = 1;
+ config_.g_timebase.den = base::Time::kMicrosecondsPerSecond;
+
+ // |g_pass| and |g_lag_in_frames| must be "one pass" and zero, respectively,
+ // in order for AV1 to support changing frame sizes during encoding:
+ config_.g_pass = AOM_RC_ONE_PASS;
+ config_.g_lag_in_frames = 0; // Immediate data output for each frame.
+
+ // Rate control settings.
+ config_.rc_dropframe_thresh = 0; // The encoder may not drop any frames.
+ config_.rc_resize_mode = 0; // TODO(miu): Why not? Investigate this.
+ config_.rc_end_usage = AOM_CBR;
+ config_.rc_target_bitrate = bitrate_kbit_;
+ config_.rc_min_quantizer = cast_config_.video_codec_params.min_qp;
+ config_.rc_max_quantizer = cast_config_.video_codec_params.max_qp;
+ // TODO(miu): Revisit these now that the encoder is being successfully
+ // micro-managed.
+ config_.rc_undershoot_pct = 100;
+ config_.rc_overshoot_pct = 15;
+ // TODO(miu): Document why these rc_buf_*_sz values were chosen and/or
+ // research for better values. Should they be computed from the target
+ // playout delay?
+ config_.rc_buf_initial_sz = 500;
+ config_.rc_buf_optimal_sz = 600;
+ config_.rc_buf_sz = 1000;
+
+ config_.kf_mode = AOM_KF_DISABLED;
+
+ aom_codec_flags_t flags = 0;
+ CHECK_EQ(aom_codec_enc_init(&encoder_, aom_codec_av1_cx(), &config_, flags),
+ AOM_CODEC_OK);
+
+ // This cpu_used setting is a trade-off between cpu usage and encoded video
+ // quality. The default is zero, with increasingly less CPU to be used as the
+ // value is more positive. Starting with the highest encoding speed
+ // to avoid large cpu usage from the beginning. Unlike VP8/9, negative speed
+ // is not supported for AV1 encoding.
+ encoding_speed_ = kHighestEncodingSpeed;
+ CHECK_EQ(aom_codec_control(&encoder_, AOME_SET_CPUUSED, encoding_speed_),
+ AOM_CODEC_OK);
+}
+
+void Av1Encoder::Encode(scoped_refptr<media::VideoFrame> video_frame,
+ base::TimeTicks reference_time,
+ SenderEncodedFrame* encoded_frame) {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ DCHECK(encoded_frame);
+
+ // Note: This is used to compute the |encoder_utilization| and so it uses the
+ // real-world clock instead of the CastEnvironment clock, the latter of which
+ // might be simulated.
+ const base::TimeTicks start_time = base::TimeTicks::Now();
+
+ // Initialize on-demand. Later, if the video frame size has changed, update
+ // the encoder configuration.
+ const gfx::Size frame_size = video_frame->visible_rect().size();
+ if (!is_initialized() || gfx::Size(config_.g_w, config_.g_h) != frame_size)
+ ConfigureForNewFrameSize(frame_size);
+
+ // Wrapper for aom_codec_encode() to access the YUV data in the |video_frame|.
+ // Only the VISIBLE rectangle within |video_frame| is exposed to the codec.
+ aom_img_fmt_t aom_format = AOM_IMG_FMT_I420;
+ aom_image_t aom_image;
+ aom_image_t* const result = aom_img_wrap(
+ &aom_image, aom_format, frame_size.width(), frame_size.height(), 1,
+ video_frame->data(VideoFrame::kYPlane));
+ DCHECK_EQ(result, &aom_image);
+
+ aom_image.planes[AOM_PLANE_Y] =
+ video_frame->visible_data(VideoFrame::kYPlane);
+ aom_image.planes[AOM_PLANE_U] =
+ video_frame->visible_data(VideoFrame::kUPlane);
+ aom_image.planes[AOM_PLANE_V] =
+ video_frame->visible_data(VideoFrame::kVPlane);
+ aom_image.stride[AOM_PLANE_Y] = video_frame->stride(VideoFrame::kYPlane);
+ aom_image.stride[AOM_PLANE_U] = video_frame->stride(VideoFrame::kUPlane);
+ aom_image.stride[AOM_PLANE_V] = video_frame->stride(VideoFrame::kVPlane);
+
+ // The frame duration given to the AV1 codec affects a number of important
+ // behaviors, including: per-frame bandwidth, CPU time spent encoding,
+ // temporal quality trade-offs, and key/golden/alt-ref frame generation
+ // intervals. Bound the prediction to account for the fact that the frame
+ // rate can be highly variable, including long pauses in the video stream.
+ const base::TimeDelta minimum_frame_duration =
+ base::Seconds(1.0 / cast_config_.max_frame_rate);
+ const base::TimeDelta maximum_frame_duration = base::Seconds(
+ static_cast<double>(kRestartFramePeriods) / cast_config_.max_frame_rate);
+ base::TimeDelta predicted_frame_duration =
+ video_frame->metadata().frame_duration.value_or(base::TimeDelta());
+ if (predicted_frame_duration <= base::TimeDelta()) {
+ // The source of the video frame did not provide the frame duration. Use
+ // the actual amount of time between the current and previous frame as a
+ // prediction for the next frame's duration.
+ predicted_frame_duration = video_frame->timestamp() - last_frame_timestamp_;
+ }
+ predicted_frame_duration =
+ std::max(minimum_frame_duration,
+ std::min(maximum_frame_duration, predicted_frame_duration));
+ last_frame_timestamp_ = video_frame->timestamp();
+
+ // Encode the frame. The presentation time stamp argument here is fixed to
+ // zero to force the encoder to base its single-frame bandwidth calculations
+ // entirely on |predicted_frame_duration| and the target bitrate setting being
+ // micro-managed via calls to UpdateRates().
+ CHECK_EQ(aom_codec_encode(&encoder_, &aom_image, 0,
+ predicted_frame_duration.InMicroseconds(),
+ key_frame_requested_ ? AOM_EFLAG_FORCE_KF : 0),
+ AOM_CODEC_OK)
+ << "BUG: Invalid arguments passed to aom_codec_encode().";
+
+ // Pull data from the encoder, populating a new EncodedFrame.
+ encoded_frame->frame_id = next_frame_id_++;
+ const aom_codec_cx_pkt_t* pkt = nullptr;
+ aom_codec_iter_t iter = nullptr;
+ while ((pkt = aom_codec_get_cx_data(&encoder_, &iter)) != nullptr) {
+ if (pkt->kind != AOM_CODEC_CX_FRAME_PKT)
+ continue;
+ if (pkt->data.frame.flags & AOM_FRAME_IS_KEY) {
+ // TODO(hubbe): Replace "dependency" with a "bool is_key_frame".
+ encoded_frame->dependency = EncodedFrame::KEY;
+ encoded_frame->referenced_frame_id = encoded_frame->frame_id;
+ } else {
+ encoded_frame->dependency = EncodedFrame::DEPENDENT;
+ // Frame dependencies could theoretically be relaxed by looking for the
+ // AOM_FRAME_IS_DROPPABLE flag, but in recent testing (Oct 2014), this
+ // flag never seems to be set.
+ encoded_frame->referenced_frame_id = encoded_frame->frame_id - 1;
+ }
+ encoded_frame->rtp_timestamp =
+ RtpTimeTicks::FromTimeDelta(video_frame->timestamp(), kVideoFrequency);
+ encoded_frame->reference_time = reference_time;
+ encoded_frame->data.assign(
+ static_cast<const uint8_t*>(pkt->data.frame.buf),
+ static_cast<const uint8_t*>(pkt->data.frame.buf) + pkt->data.frame.sz);
+ break; // Done, since all data is provided in one CX_FRAME_PKT packet.
+ }
+ DCHECK(!encoded_frame->data.empty())
+ << "BUG: Encoder must provide data since lagged encoding is disabled.";
+
+ // Compute encoder utilization as the real-world time elapsed divided by the
+ // frame duration.
+ const base::TimeDelta processing_time = base::TimeTicks::Now() - start_time;
+ encoded_frame->encoder_utilization =
+ processing_time / predicted_frame_duration;
+
+ // Compute lossy utilization. The AV1 encoder took an estimated guess at what
+ // quantizer value would produce an encoded frame size as close to the target
+ // as possible. Now that the frame has been encoded and the number of bytes
+ // is known, the perfect quantizer value (i.e., the one that should have been
+ // used) can be determined. This perfect quantizer is then normalized and
+ // used as the lossy utilization.
+ const double actual_bitrate =
+ encoded_frame->data.size() * 8.0 / predicted_frame_duration.InSecondsF();
+ const double target_bitrate = 1000.0 * config_.rc_target_bitrate;
+ DCHECK_GT(target_bitrate, 0.0);
+ const double bitrate_utilization = actual_bitrate / target_bitrate;
+ int quantizer = -1;
+ CHECK_EQ(aom_codec_control(&encoder_, AOME_GET_LAST_QUANTIZER_64, &quantizer),
+ AOM_CODEC_OK);
+ const double perfect_quantizer = bitrate_utilization * std::max(0, quantizer);
+ // Side note: If it was possible for the encoder to encode within the target
+ // number of bytes, the |perfect_quantizer| will be in the range [0.0,63.0].
+ // If it was never possible, the value will be greater than 63.0.
+ encoded_frame->lossy_utilization = perfect_quantizer / 63.0;
+
+ DVLOG(2) << "AV1 encoded frame_id " << encoded_frame->frame_id
+ << ", sized: " << encoded_frame->data.size()
+ << ", encoder_utilization: " << encoded_frame->encoder_utilization
+ << ", lossy_utilization: " << encoded_frame->lossy_utilization
+ << " (quantizer chosen by the encoder was " << quantizer << ')';
+
+ if (encoded_frame->dependency == EncodedFrame::KEY) {
+ key_frame_requested_ = false;
+ encoding_speed_acc_.Reset(kHighestEncodingSpeed, video_frame->timestamp());
+ } else {
+ // Equivalent encoding speed considering both cpu_used setting and
+ // quantizer.
+ double actual_encoding_speed =
+ encoding_speed_ +
+ kEquivalentEncodingSpeedStepPerQpStep *
+ std::max(0, quantizer - cast_config_.video_codec_params.min_qp);
+ double adjusted_encoding_speed = actual_encoding_speed *
+ encoded_frame->encoder_utilization /
+ target_encoder_utilization_;
+ encoding_speed_acc_.Update(adjusted_encoding_speed,
+ video_frame->timestamp());
+ }
+
+ if (HasSufficientFeedback(encoding_speed_acc_)) {
+ // Predict |encoding_speed_| and |min_quantizer| for next frame.
+ // When CPU is constrained, increase encoding speed and increase
+ // |min_quantizer| if needed.
+ double next_encoding_speed = encoding_speed_acc_.current();
+ int next_min_qp;
+ if (next_encoding_speed > kHighestEncodingSpeed) {
+ double remainder = next_encoding_speed - kHighestEncodingSpeed;
+ next_encoding_speed = kHighestEncodingSpeed;
+ next_min_qp =
+ static_cast<int>(remainder / kEquivalentEncodingSpeedStepPerQpStep +
+ cast_config_.video_codec_params.min_qp + 0.5);
+ next_min_qp = std::min(next_min_qp,
+ cast_config_.video_codec_params.max_cpu_saver_qp);
+ } else {
+ next_encoding_speed =
+ std::max<double>(kLowestEncodingSpeed, next_encoding_speed) + 0.5;
+ next_min_qp = cast_config_.video_codec_params.min_qp;
+ }
+ if (encoding_speed_ != static_cast<int>(next_encoding_speed)) {
+ encoding_speed_ = static_cast<int>(next_encoding_speed);
+ CHECK_EQ(aom_codec_control(&encoder_, AOME_SET_CPUUSED, encoding_speed_),
+ AOM_CODEC_OK);
+ }
+ if (config_.rc_min_quantizer != static_cast<unsigned int>(next_min_qp)) {
+ config_.rc_min_quantizer = static_cast<unsigned int>(next_min_qp);
+ CHECK_EQ(aom_codec_enc_config_set(&encoder_, &config_), AOM_CODEC_OK);
+ }
+ }
+}
+
+void Av1Encoder::UpdateRates(uint32_t new_bitrate) {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+
+ if (!is_initialized())
+ return;
+
+ uint32_t new_bitrate_kbit = new_bitrate / 1000;
+ if (config_.rc_target_bitrate == new_bitrate_kbit)
+ return;
+
+ config_.rc_target_bitrate = bitrate_kbit_ = new_bitrate_kbit;
+
+ // Update encoder context.
+ if (aom_codec_enc_config_set(&encoder_, &config_)) {
+ NOTREACHED() << "Invalid return value";
+ }
+
+ VLOG(1) << "AV1 new rc_target_bitrate: " << new_bitrate_kbit << " kbps";
+}
+
+void Av1Encoder::GenerateKeyFrame() {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ key_frame_requested_ = true;
+}
+
+} // namespace cast
+} // namespace media
diff --git a/chromium/media/cast/sender/av1_encoder.h b/chromium/media/cast/sender/av1_encoder.h
new file mode 100644
index 00000000000..56f2d3e13ee
--- /dev/null
+++ b/chromium/media/cast/sender/av1_encoder.h
@@ -0,0 +1,90 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAST_SENDER_AV1_ENCODER_H_
+#define MEDIA_CAST_SENDER_AV1_ENCODER_H_
+
+#include <stdint.h>
+
+#include "base/macros.h"
+#include "base/threading/thread_checker.h"
+#include "media/base/feedback_signal_accumulator.h"
+#include "media/cast/cast_config.h"
+#include "media/cast/sender/software_video_encoder.h"
+#include "third_party/libaom/source/libaom/aom/aom_encoder.h"
+
+#include "ui/gfx/geometry/size.h"
+
+namespace media {
+class VideoFrame;
+}
+
+namespace media {
+namespace cast {
+
+class Av1Encoder final : public SoftwareVideoEncoder {
+ public:
+ explicit Av1Encoder(const FrameSenderConfig& video_config);
+
+ ~Av1Encoder() final;
+
+ // SoftwareVideoEncoder implementations.
+ void Initialize() final;
+ void Encode(scoped_refptr<media::VideoFrame> video_frame,
+ base::TimeTicks reference_time,
+ SenderEncodedFrame* encoded_frame) final;
+ void UpdateRates(uint32_t new_bitrate) final;
+ void GenerateKeyFrame() final;
+
+ private:
+ bool is_initialized() const {
+ // ConfigureForNewFrameSize() sets the timebase denominator value to
+ // non-zero if the encoder is successfully initialized, and it is zero
+ // otherwise.
+ return config_.g_timebase.den != 0;
+ }
+
+ // If the |encoder_| is live, attempt reconfiguration to allow it to encode
+ // frames at a new |frame_size|. Otherwise, tear it down and re-create a new
+ // |encoder_| instance.
+ void ConfigureForNewFrameSize(const gfx::Size& frame_size);
+
+ const FrameSenderConfig cast_config_;
+
+ const double target_encoder_utilization_;
+
+ // AV1 internal objects. These are valid for use only while is_initialized()
+ // returns true.
+ aom_codec_enc_cfg_t config_;
+ aom_codec_ctx_t encoder_;
+
+ // Set to true to request the next frame emitted by Av1Encoder be a key frame.
+ bool key_frame_requested_;
+
+ // Saves the current bitrate setting, for when the |encoder_| is reconfigured
+ // for different frame sizes.
+ int bitrate_kbit_;
+
+ // The |VideoFrame::timestamp()| of the last encoded frame. This is used to
+ // predict the duration of the next frame.
+ base::TimeDelta last_frame_timestamp_;
+
+ // The ID for the next frame to be emitted.
+ FrameId next_frame_id_;
+
+ // This is bound to the thread where Initialize() is called.
+ THREAD_CHECKER(thread_checker_);
+
+ // The accumulator (time averaging) of the encoding speed.
+ FeedbackSignalAccumulator<base::TimeDelta> encoding_speed_acc_;
+
+ // The higher the speed, the less CPU usage, and the lower quality. The valid
+ // range is [0-9].
+ int encoding_speed_;
+};
+
+} // namespace cast
+} // namespace media
+
+#endif // MEDIA_CAST_SENDER_AV1_ENCODER_H_
diff --git a/chromium/media/cast/sender/congestion_control.cc b/chromium/media/cast/sender/congestion_control.cc
index 6d872e81e48..3609f73e551 100644
--- a/chromium/media/cast/sender/congestion_control.cc
+++ b/chromium/media/cast/sender/congestion_control.cc
@@ -35,6 +35,10 @@ class AdaptiveCongestionControl final : public CongestionControl {
int min_bitrate_configured,
double max_frame_rate);
+ AdaptiveCongestionControl(const AdaptiveCongestionControl&) = delete;
+ AdaptiveCongestionControl& operator=(const AdaptiveCongestionControl&) =
+ delete;
+
~AdaptiveCongestionControl() final;
// CongestionControl implementation.
@@ -100,13 +104,15 @@ class AdaptiveCongestionControl final : public CongestionControl {
size_t history_size_;
size_t acked_bits_in_history_;
base::TimeDelta dead_time_in_history_;
-
- DISALLOW_COPY_AND_ASSIGN(AdaptiveCongestionControl);
};
class FixedCongestionControl final : public CongestionControl {
public:
explicit FixedCongestionControl(int bitrate) : bitrate_(bitrate) {}
+
+ FixedCongestionControl(const FixedCongestionControl&) = delete;
+ FixedCongestionControl& operator=(const FixedCongestionControl&) = delete;
+
~FixedCongestionControl() final = default;
// CongestionControl implementation.
@@ -125,8 +131,6 @@ class FixedCongestionControl final : public CongestionControl {
private:
const int bitrate_;
-
- DISALLOW_COPY_AND_ASSIGN(FixedCongestionControl);
};
CongestionControl* NewAdaptiveCongestionControl(const base::TickClock* clock,
@@ -217,7 +221,7 @@ double AdaptiveCongestionControl::CalculateSafeBitrate() {
if (acked_bits_in_history_ == 0 || transmit_time <= base::TimeDelta()) {
return min_bitrate_configured_;
}
- transmit_time = std::max(transmit_time, base::TimeDelta::FromMilliseconds(1));
+ transmit_time = std::max(transmit_time, base::Milliseconds(1));
return acked_bits_in_history_ / transmit_time.InSecondsF();
}
@@ -358,8 +362,8 @@ base::TimeTicks AdaptiveCongestionControl::EstimatedSendingTime(
// ~RTT/2 amount of time to travel to the receiver. Finally, the ACK from
// the receiver is sent and this takes another ~RTT/2 amount of time to
// reach the sender.
- const base::TimeDelta frame_transmit_time = base::TimeDelta::FromSecondsD(
- stats->frame_size_in_bits / estimated_bitrate);
+ const base::TimeDelta frame_transmit_time =
+ base::Seconds(stats->frame_size_in_bits / estimated_bitrate);
estimated_ack_time = std::max(estimated_sending_time, stats->enqueue_time) +
frame_transmit_time + rtt_;
diff --git a/chromium/media/cast/sender/congestion_control_unittest.cc b/chromium/media/cast/sender/congestion_control_unittest.cc
index 34e35e0533e..ae88b260447 100644
--- a/chromium/media/cast/sender/congestion_control_unittest.cc
+++ b/chromium/media/cast/sender/congestion_control_unittest.cc
@@ -29,15 +29,13 @@ class CongestionControlTest : public ::testing::Test {
protected:
CongestionControlTest()
: task_runner_(new FakeSingleThreadTaskRunner(&testing_clock_)) {
- testing_clock_.Advance(
- base::TimeDelta::FromMilliseconds(kStartMillisecond));
+ testing_clock_.Advance(base::Milliseconds(kStartMillisecond));
congestion_control_.reset(NewAdaptiveCongestionControl(
&testing_clock_, kMaxBitrateConfigured, kMinBitrateConfigured,
kMaxFrameRate));
const int max_unacked_frames = 10;
const base::TimeDelta target_playout_delay =
- (max_unacked_frames - 1) * base::TimeDelta::FromSeconds(1) /
- kMaxFrameRate;
+ (max_unacked_frames - 1) * base::Seconds(1) / kMaxFrameRate;
congestion_control_->UpdateTargetPlayoutDelay(target_playout_delay);
}
@@ -77,31 +75,28 @@ class CongestionControlTest : public ::testing::Test {
// "target buffer fill" model).
TEST_F(CongestionControlTest, SimpleRun) {
uint32_t frame_size = 10000 * 8;
- Run(500,
- frame_size,
- base::TimeDelta::FromMilliseconds(10),
- base::TimeDelta::FromMilliseconds(kFrameDelayMs),
- base::TimeDelta::FromMilliseconds(45));
+ Run(500, frame_size, base::Milliseconds(10),
+ base::Milliseconds(kFrameDelayMs), base::Milliseconds(45));
// Empty the buffer.
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(100));
+ task_runner_->Sleep(base::Milliseconds(100));
uint32_t safe_bitrate = frame_size * 1000 / kFrameDelayMs;
uint32_t bitrate = congestion_control_->GetBitrate(
- testing_clock_.NowTicks() + base::TimeDelta::FromMilliseconds(300),
- base::TimeDelta::FromMilliseconds(300));
+ testing_clock_.NowTicks() + base::Milliseconds(300),
+ base::Milliseconds(300));
EXPECT_NEAR(
safe_bitrate / kTargetEmptyBufferFraction, bitrate, safe_bitrate * 0.05);
bitrate = congestion_control_->GetBitrate(
- testing_clock_.NowTicks() + base::TimeDelta::FromMilliseconds(200),
- base::TimeDelta::FromMilliseconds(300));
+ testing_clock_.NowTicks() + base::Milliseconds(200),
+ base::Milliseconds(300));
EXPECT_NEAR(safe_bitrate / kTargetEmptyBufferFraction * 2 / 3,
bitrate,
safe_bitrate * 0.05);
bitrate = congestion_control_->GetBitrate(
- testing_clock_.NowTicks() + base::TimeDelta::FromMilliseconds(100),
- base::TimeDelta::FromMilliseconds(300));
+ testing_clock_.NowTicks() + base::Milliseconds(100),
+ base::Milliseconds(300));
EXPECT_NEAR(safe_bitrate / kTargetEmptyBufferFraction * 1 / 3,
bitrate,
safe_bitrate * 0.05);
@@ -112,8 +107,8 @@ TEST_F(CongestionControlTest, SimpleRun) {
// Results should show that we have ~200ms to send.
bitrate = congestion_control_->GetBitrate(
- testing_clock_.NowTicks() + base::TimeDelta::FromMilliseconds(300),
- base::TimeDelta::FromMilliseconds(300));
+ testing_clock_.NowTicks() + base::Milliseconds(300),
+ base::Milliseconds(300));
EXPECT_NEAR(safe_bitrate / kTargetEmptyBufferFraction * 2 / 3,
bitrate,
safe_bitrate * 0.05);
@@ -124,8 +119,8 @@ TEST_F(CongestionControlTest, SimpleRun) {
// Results should show that we have ~100ms to send.
bitrate = congestion_control_->GetBitrate(
- testing_clock_.NowTicks() + base::TimeDelta::FromMilliseconds(300),
- base::TimeDelta::FromMilliseconds(300));
+ testing_clock_.NowTicks() + base::Milliseconds(300),
+ base::Milliseconds(300));
EXPECT_NEAR(safe_bitrate / kTargetEmptyBufferFraction * 1 / 3,
bitrate,
safe_bitrate * 0.05);
@@ -138,8 +133,8 @@ TEST_F(CongestionControlTest, SimpleRun) {
// Results should show that we have ~200ms to send.
bitrate = congestion_control_->GetBitrate(
- testing_clock_.NowTicks() + base::TimeDelta::FromMilliseconds(300),
- base::TimeDelta::FromMilliseconds(300));
+ testing_clock_.NowTicks() + base::Milliseconds(300),
+ base::Milliseconds(300));
EXPECT_NEAR(safe_bitrate / kTargetEmptyBufferFraction * 2 / 3, bitrate,
safe_bitrate * 0.05);
}
@@ -148,11 +143,10 @@ TEST_F(CongestionControlTest, SimpleRun) {
// history is maintained in AdaptiveCongestionControl to avoid invalid
// indexing offsets. This test is successful if it does not crash the process.
TEST_F(CongestionControlTest, RetainsSufficientHistory) {
- constexpr base::TimeDelta kFakePlayoutDelay =
- base::TimeDelta::FromMilliseconds(400);
+ constexpr base::TimeDelta kFakePlayoutDelay = base::Milliseconds(400);
// Sanity-check: With no data, GetBitrate() returns an in-range value.
- const int bitrate = congestion_control_->GetBitrate(
+ int bitrate = congestion_control_->GetBitrate(
testing_clock_.NowTicks() + kFakePlayoutDelay, kFakePlayoutDelay);
ASSERT_GE(bitrate, kMinBitrateConfigured);
ASSERT_LE(bitrate, kMaxBitrateConfigured);
@@ -165,12 +159,12 @@ TEST_F(CongestionControlTest, RetainsSufficientHistory) {
congestion_control_->SendFrameToTransport(frame_id, 16384,
testing_clock_.NowTicks());
- const int bitrate = congestion_control_->GetBitrate(
+ bitrate = congestion_control_->GetBitrate(
testing_clock_.NowTicks() + kFakePlayoutDelay, kFakePlayoutDelay);
ASSERT_GE(bitrate, kMinBitrateConfigured);
ASSERT_LE(bitrate, kMaxBitrateConfigured);
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(kFrameDelayMs));
+ task_runner_->Sleep(base::Milliseconds(kFrameDelayMs));
++frame_id;
}
@@ -180,12 +174,12 @@ TEST_F(CongestionControlTest, RetainsSufficientHistory) {
for (int i = 0; i < kMaxUnackedFrames; ++i) {
congestion_control_->AckFrame(frame_id, testing_clock_.NowTicks());
- const int bitrate = congestion_control_->GetBitrate(
+ bitrate = congestion_control_->GetBitrate(
testing_clock_.NowTicks() + kFakePlayoutDelay, kFakePlayoutDelay);
ASSERT_GE(bitrate, kMinBitrateConfigured);
ASSERT_LE(bitrate, kMaxBitrateConfigured);
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(kFrameDelayMs));
+ task_runner_->Sleep(base::Milliseconds(kFrameDelayMs));
++frame_id;
}
}
diff --git a/chromium/media/cast/sender/external_video_encoder.cc b/chromium/media/cast/sender/external_video_encoder.cc
index 7a7daab4728..3644e1dbf5c 100644
--- a/chromium/media/cast/sender/external_video_encoder.cc
+++ b/chromium/media/cast/sender/external_video_encoder.cc
@@ -28,7 +28,7 @@
#include "media/cast/common/rtp_time.h"
#include "media/cast/logging/logging_defines.h"
#include "media/cast/net/cast_transport_config.h"
-#include "media/cast/sender/vp8_quantizer_parser.h"
+#include "media/cast/sender/vpx_quantizer_parser.h"
#include "media/video/h264_parser.h"
namespace {
@@ -397,7 +397,7 @@ class ExternalVideoEncoder::VEAClientImpl final
// and all the following delta frames.
if (metadata.key_frame || key_frame_quantizer_parsable_) {
if (codec_profile_ == media::VP8PROFILE_ANY) {
- quantizer = ParseVp8HeaderQuantizer(
+ quantizer = ParseVpxHeaderQuantizer(
reinterpret_cast<const uint8_t*>(encoded_frame->data.data()),
encoded_frame->data.size());
} else if (codec_profile_ == media::H264PROFILE_MAIN) {
diff --git a/chromium/media/cast/sender/external_video_encoder.h b/chromium/media/cast/sender/external_video_encoder.h
index be6878aec8d..66d135fe42f 100644
--- a/chromium/media/cast/sender/external_video_encoder.h
+++ b/chromium/media/cast/sender/external_video_encoder.h
@@ -38,6 +38,9 @@ class ExternalVideoEncoder final : public VideoEncoder {
StatusChangeCallback status_change_cb,
const CreateVideoEncodeAcceleratorCallback& create_vea_cb);
+ ExternalVideoEncoder(const ExternalVideoEncoder&) = delete;
+ ExternalVideoEncoder& operator=(const ExternalVideoEncoder&) = delete;
+
~ExternalVideoEncoder() final;
// VideoEncoder implementation.
@@ -77,8 +80,6 @@ class ExternalVideoEncoder final : public VideoEncoder {
// Provides a weak pointer for the OnCreateVideoEncoderAccelerator() callback.
// NOTE: Weak pointers must be invalidated before all other member variables.
base::WeakPtrFactory<ExternalVideoEncoder> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(ExternalVideoEncoder);
};
// An implementation of SizeAdaptableVideoEncoderBase to proxy for
@@ -92,6 +93,11 @@ class SizeAdaptableExternalVideoEncoder final
StatusChangeCallback status_change_cb,
const CreateVideoEncodeAcceleratorCallback& create_vea_cb);
+ SizeAdaptableExternalVideoEncoder(const SizeAdaptableExternalVideoEncoder&) =
+ delete;
+ SizeAdaptableExternalVideoEncoder& operator=(
+ const SizeAdaptableExternalVideoEncoder&) = delete;
+
~SizeAdaptableExternalVideoEncoder() final;
protected:
@@ -100,8 +106,6 @@ class SizeAdaptableExternalVideoEncoder final
private:
// Special callbacks needed by media::cast::ExternalVideoEncoder.
const CreateVideoEncodeAcceleratorCallback create_vea_cb_;
-
- DISALLOW_COPY_AND_ASSIGN(SizeAdaptableExternalVideoEncoder);
};
// A utility class for examining the sequence of frames sent to an external
@@ -117,6 +121,10 @@ class QuantizerEstimator {
};
QuantizerEstimator();
+
+ QuantizerEstimator(const QuantizerEstimator&) = delete;
+ QuantizerEstimator& operator=(const QuantizerEstimator&) = delete;
+
~QuantizerEstimator();
// Discard any state related to the processing of prior frames.
@@ -148,8 +156,6 @@ class QuantizerEstimator {
// turn is used to compute the entropy and quantizer.
std::unique_ptr<uint8_t[]> last_frame_pixel_buffer_;
gfx::Size last_frame_size_;
-
- DISALLOW_COPY_AND_ASSIGN(QuantizerEstimator);
};
} // namespace cast
diff --git a/chromium/media/cast/sender/fake_software_video_encoder.cc b/chromium/media/cast/sender/fake_software_video_encoder.cc
index 65f44b8bd1a..06eb64d76fa 100644
--- a/chromium/media/cast/sender/fake_software_video_encoder.cc
+++ b/chromium/media/cast/sender/fake_software_video_encoder.cc
@@ -32,7 +32,7 @@ void FakeSoftwareVideoEncoder::Initialize() {}
void FakeSoftwareVideoEncoder::Encode(
scoped_refptr<media::VideoFrame> video_frame,
- const base::TimeTicks& reference_time,
+ base::TimeTicks reference_time,
SenderEncodedFrame* encoded_frame) {
DCHECK(encoded_frame);
diff --git a/chromium/media/cast/sender/fake_software_video_encoder.h b/chromium/media/cast/sender/fake_software_video_encoder.h
index aafc07938eb..6b515270b67 100644
--- a/chromium/media/cast/sender/fake_software_video_encoder.h
+++ b/chromium/media/cast/sender/fake_software_video_encoder.h
@@ -22,7 +22,7 @@ class FakeSoftwareVideoEncoder final : public SoftwareVideoEncoder {
// SoftwareVideoEncoder implementations.
void Initialize() final;
void Encode(scoped_refptr<media::VideoFrame> video_frame,
- const base::TimeTicks& reference_time,
+ base::TimeTicks reference_time,
SenderEncodedFrame* encoded_frame) final;
void UpdateRates(uint32_t new_bitrate) final;
void GenerateKeyFrame() final;
diff --git a/chromium/media/cast/sender/fake_video_encode_accelerator_factory.h b/chromium/media/cast/sender/fake_video_encode_accelerator_factory.h
index 648ee27ed91..577a7245598 100644
--- a/chromium/media/cast/sender/fake_video_encode_accelerator_factory.h
+++ b/chromium/media/cast/sender/fake_video_encode_accelerator_factory.h
@@ -26,6 +26,12 @@ class FakeVideoEncodeAcceleratorFactory {
public:
explicit FakeVideoEncodeAcceleratorFactory(
const scoped_refptr<base::SingleThreadTaskRunner>& task_runner);
+
+ FakeVideoEncodeAcceleratorFactory(const FakeVideoEncodeAcceleratorFactory&) =
+ delete;
+ FakeVideoEncodeAcceleratorFactory& operator=(
+ const FakeVideoEncodeAcceleratorFactory&) = delete;
+
~FakeVideoEncodeAcceleratorFactory();
int vea_response_count() const {
@@ -56,8 +62,6 @@ class FakeVideoEncodeAcceleratorFactory {
std::unique_ptr<media::VideoEncodeAccelerator> next_response_vea_;
ReceiveVideoEncodeAcceleratorCallback vea_response_callback_;
int vea_response_count_ = 0;
-
- DISALLOW_COPY_AND_ASSIGN(FakeVideoEncodeAcceleratorFactory);
};
} // namespace cast
diff --git a/chromium/media/cast/sender/frame_sender.cc b/chromium/media/cast/sender/frame_sender.cc
index 79c13d3abf1..97840aae8b1 100644
--- a/chromium/media/cast/sender/frame_sender.cc
+++ b/chromium/media/cast/sender/frame_sender.cc
@@ -23,10 +23,8 @@ namespace cast {
namespace {
constexpr int kNumAggressiveReportsSentAtStart = 100;
-constexpr base::TimeDelta kMinSchedulingDelay =
- base::TimeDelta::FromMilliseconds(1);
-constexpr base::TimeDelta kReceiverProcessTime =
- base::TimeDelta::FromMilliseconds(250);
+constexpr base::TimeDelta kMinSchedulingDelay = base::Milliseconds(1);
+constexpr base::TimeDelta kReceiverProcessTime = base::Milliseconds(250);
// The additional number of frames that can be in-flight when input exceeds the
// maximum frame rate.
@@ -113,7 +111,7 @@ void FrameSender::ScheduleNextRtcpReport() {
CastEnvironment::MAIN, FROM_HERE,
base::BindOnce(&FrameSender::SendRtcpReport, weak_factory_.GetWeakPtr(),
true),
- base::TimeDelta::FromMilliseconds(kRtcpReportIntervalMs));
+ base::Milliseconds(kRtcpReportIntervalMs));
}
void FrameSender::SendRtcpReport(bool schedule_future_reports) {
diff --git a/chromium/media/cast/sender/frame_sender.h b/chromium/media/cast/sender/frame_sender.h
index 16bceb655c3..34629b47612 100644
--- a/chromium/media/cast/sender/frame_sender.h
+++ b/chromium/media/cast/sender/frame_sender.h
@@ -30,6 +30,10 @@ class FrameSender {
CastTransport* const transport_sender,
const FrameSenderConfig& config,
CongestionControl* congestion_control);
+
+ FrameSender(const FrameSender&) = delete;
+ FrameSender& operator=(const FrameSender&) = delete;
+
virtual ~FrameSender();
int rtp_timebase() const { return rtp_timebase_; }
@@ -202,8 +206,6 @@ class FrameSender {
// NOTE: Weak pointers must be invalidated before all other member variables.
base::WeakPtrFactory<FrameSender> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(FrameSender);
};
} // namespace cast
diff --git a/chromium/media/cast/sender/h264_vt_encoder.h b/chromium/media/cast/sender/h264_vt_encoder.h
index 928cedd7bb4..0db504341da 100644
--- a/chromium/media/cast/sender/h264_vt_encoder.h
+++ b/chromium/media/cast/sender/h264_vt_encoder.h
@@ -34,6 +34,10 @@ class H264VideoToolboxEncoder final : public VideoEncoder,
const scoped_refptr<CastEnvironment>& cast_environment,
const FrameSenderConfig& video_config,
StatusChangeCallback status_change_cb);
+
+ H264VideoToolboxEncoder(const H264VideoToolboxEncoder&) = delete;
+ H264VideoToolboxEncoder& operator=(const H264VideoToolboxEncoder&) = delete;
+
~H264VideoToolboxEncoder() override;
// media::cast::VideoEncoder implementation
@@ -112,8 +116,6 @@ class H264VideoToolboxEncoder final : public VideoEncoder,
// NOTE: Weak pointers must be invalidated before all other member variables.
base::WeakPtrFactory<H264VideoToolboxEncoder> weak_factory_;
-
- DISALLOW_COPY_AND_ASSIGN(H264VideoToolboxEncoder);
};
} // namespace cast
diff --git a/chromium/media/cast/sender/h264_vt_encoder_unittest.cc b/chromium/media/cast/sender/h264_vt_encoder_unittest.cc
index be544db78c2..ea53faf7cfe 100644
--- a/chromium/media/cast/sender/h264_vt_encoder_unittest.cc
+++ b/chromium/media/cast/sender/h264_vt_encoder_unittest.cc
@@ -232,9 +232,8 @@ class H264VideoToolboxEncoderTest : public ::testing::Test {
}
void AdvanceClockAndVideoFrameTimestamp() {
- clock_.Advance(base::TimeDelta::FromMilliseconds(33));
- frame_->set_timestamp(frame_->timestamp() +
- base::TimeDelta::FromMilliseconds(33));
+ clock_.Advance(base::Milliseconds(33));
+ frame_->set_timestamp(frame_->timestamp() + base::Milliseconds(33));
}
static void SetUpTestCase() {
@@ -306,7 +305,7 @@ TEST_F(H264VideoToolboxEncoderTest, DISABLED_CheckFramesAreDecodable) {
? VideoDecoderConfig::AlphaMode::kIsOpaque
: VideoDecoderConfig::AlphaMode::kHasAlpha;
VideoDecoderConfig config(
- kCodecH264, H264PROFILE_MAIN, alpha_mode, VideoColorSpace(),
+ VideoCodec::kH264, H264PROFILE_MAIN, alpha_mode, VideoColorSpace(),
kNoTransformation, frame_->coded_size(), frame_->visible_rect(),
frame_->natural_size(), EmptyExtraData(), EncryptionScheme::kUnencrypted);
scoped_refptr<EndToEndFrameChecker> checker(new EndToEndFrameChecker(config));
diff --git a/chromium/media/cast/sender/performance_metrics_overlay.cc b/chromium/media/cast/sender/performance_metrics_overlay.cc
index 0bc1e9770a3..2ff30e943fe 100644
--- a/chromium/media/cast/sender/performance_metrics_overlay.cc
+++ b/chromium/media/cast/sender/performance_metrics_overlay.cc
@@ -290,9 +290,9 @@ scoped_refptr<VideoFrame> MaybeRenderPerformanceMetricsOverlay(
}
base::TimeDelta rem = frame->timestamp();
const int minutes = rem.InMinutes();
- rem -= base::TimeDelta::FromMinutes(minutes);
+ rem -= base::Minutes(minutes);
const int seconds = static_cast<int>(rem.InSeconds());
- rem -= base::TimeDelta::FromSeconds(seconds);
+ rem -= base::Seconds(seconds);
const int hundredth_seconds = static_cast<int>(rem.InMilliseconds() / 10);
RenderLineOfText(
base::StringPrintf("%d.%01d %dx%d %d:%02d.%02d", frame_duration_ms,
diff --git a/chromium/media/cast/sender/size_adaptable_video_encoder_base.h b/chromium/media/cast/sender/size_adaptable_video_encoder_base.h
index 70c45f605a5..335b8395b89 100644
--- a/chromium/media/cast/sender/size_adaptable_video_encoder_base.h
+++ b/chromium/media/cast/sender/size_adaptable_video_encoder_base.h
@@ -33,6 +33,10 @@ class SizeAdaptableVideoEncoderBase : public VideoEncoder {
const FrameSenderConfig& video_config,
StatusChangeCallback status_change_cb);
+ SizeAdaptableVideoEncoderBase(const SizeAdaptableVideoEncoderBase&) = delete;
+ SizeAdaptableVideoEncoderBase& operator=(
+ const SizeAdaptableVideoEncoderBase&) = delete;
+
~SizeAdaptableVideoEncoderBase() override;
// VideoEncoder implementation.
@@ -109,8 +113,6 @@ class SizeAdaptableVideoEncoderBase : public VideoEncoder {
// NOTE: Weak pointers must be invalidated before all other member variables.
base::WeakPtrFactory<SizeAdaptableVideoEncoderBase> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(SizeAdaptableVideoEncoderBase);
};
} // namespace cast
diff --git a/chromium/media/cast/sender/software_video_encoder.h b/chromium/media/cast/sender/software_video_encoder.h
index b73afa0c0fd..53d817f9cec 100644
--- a/chromium/media/cast/sender/software_video_encoder.h
+++ b/chromium/media/cast/sender/software_video_encoder.h
@@ -31,7 +31,7 @@ class SoftwareVideoEncoder {
// Encode a raw image (as a part of a video stream).
virtual void Encode(scoped_refptr<media::VideoFrame> video_frame,
- const base::TimeTicks& reference_time,
+ base::TimeTicks reference_time,
SenderEncodedFrame* encoded_frame) = 0;
// Update the encoder with a new target bit rate.
diff --git a/chromium/media/cast/sender/video_encoder_impl.cc b/chromium/media/cast/sender/video_encoder_impl.cc
index 17005ba3138..a8805fd7b54 100644
--- a/chromium/media/cast/sender/video_encoder_impl.cc
+++ b/chromium/media/cast/sender/video_encoder_impl.cc
@@ -3,6 +3,7 @@
// found in the LICENSE file.
#include "media/cast/sender/video_encoder_impl.h"
+#include "third_party/libaom/libaom_buildflags.h"
#include <utility>
@@ -11,8 +12,11 @@
#include "base/callback_helpers.h"
#include "base/check.h"
#include "media/base/video_frame.h"
+#if BUILDFLAG(ENABLE_LIBAOM)
+#include "media/cast/sender/av1_encoder.h"
+#endif
#include "media/cast/sender/fake_software_video_encoder.h"
-#include "media/cast/sender/vp8_encoder.h"
+#include "media/cast/sender/vpx_encoder.h"
namespace media {
namespace cast {
@@ -55,7 +59,9 @@ bool VideoEncoderImpl::IsSupported(const FrameSenderConfig& video_config) {
return true;
}
#endif
- return video_config.codec == CODEC_VIDEO_VP8;
+ return video_config.codec == CODEC_VIDEO_VP8 ||
+ video_config.codec == CODEC_VIDEO_VP9 ||
+ video_config.codec == CODEC_VIDEO_AV1;
}
VideoEncoderImpl::VideoEncoderImpl(
@@ -66,8 +72,9 @@ VideoEncoderImpl::VideoEncoderImpl(
CHECK(cast_environment_->HasVideoThread());
DCHECK(status_change_cb);
- if (video_config.codec == CODEC_VIDEO_VP8) {
- encoder_ = std::make_unique<Vp8Encoder>(video_config);
+ if (video_config.codec == CODEC_VIDEO_VP8 ||
+ video_config.codec == CODEC_VIDEO_VP9) {
+ encoder_ = std::make_unique<VpxEncoder>(video_config);
cast_environment_->PostTask(
CastEnvironment::VIDEO, FROM_HERE,
base::BindOnce(&InitializeEncoderOnEncoderThread, cast_environment,
@@ -76,6 +83,14 @@ VideoEncoderImpl::VideoEncoderImpl(
} else if (video_config.codec == CODEC_VIDEO_FAKE) {
encoder_ = std::make_unique<FakeSoftwareVideoEncoder>(video_config);
#endif
+#if BUILDFLAG(ENABLE_LIBAOM)
+ } else if (video_config.codec == CODEC_VIDEO_AV1) {
+ encoder_ = std::make_unique<Av1Encoder>(video_config);
+ cast_environment_->PostTask(
+ CastEnvironment::VIDEO, FROM_HERE,
+ base::BindOnce(&InitializeEncoderOnEncoderThread, cast_environment,
+ encoder_.get()));
+#endif
} else {
DCHECK(false) << "Invalid config"; // Codec not supported.
}
diff --git a/chromium/media/cast/sender/video_encoder_impl.h b/chromium/media/cast/sender/video_encoder_impl.h
index ae39c398b45..d16e4f57816 100644
--- a/chromium/media/cast/sender/video_encoder_impl.h
+++ b/chromium/media/cast/sender/video_encoder_impl.h
@@ -34,6 +34,9 @@ class VideoEncoderImpl final : public VideoEncoder {
const FrameSenderConfig& video_config,
StatusChangeCallback status_change_cb);
+ VideoEncoderImpl(const VideoEncoderImpl&) = delete;
+ VideoEncoderImpl& operator=(const VideoEncoderImpl&) = delete;
+
~VideoEncoderImpl() final;
// VideoEncoder implementation.
@@ -52,8 +55,6 @@ class VideoEncoderImpl final : public VideoEncoder {
// manually because it needs to be initialize, used and destroyed on the
// video encoder thread and video encoder thread can out-live the main thread.
std::unique_ptr<SoftwareVideoEncoder> encoder_;
-
- DISALLOW_COPY_AND_ASSIGN(VideoEncoderImpl);
};
} // namespace cast
diff --git a/chromium/media/cast/sender/video_encoder_unittest.cc b/chromium/media/cast/sender/video_encoder_unittest.cc
index a8c54b7dbbc..033a8465110 100644
--- a/chromium/media/cast/sender/video_encoder_unittest.cc
+++ b/chromium/media/cast/sender/video_encoder_unittest.cc
@@ -122,8 +122,8 @@ class VideoEncoderTest
void RunTasksAndAdvanceClock() {
DCHECK_GT(video_config_.max_frame_rate, 0);
- const base::TimeDelta frame_duration = base::TimeDelta::FromMicroseconds(
- 1000000.0 / video_config_.max_frame_rate);
+ const base::TimeDelta frame_duration =
+ base::Microseconds(1000000.0 / video_config_.max_frame_rate);
#if defined(OS_MAC)
if (is_testing_video_toolbox_encoder()) {
// The H264VideoToolboxEncoder (on MAC_OSX and IOS) is not a faked
diff --git a/chromium/media/cast/sender/video_sender.cc b/chromium/media/cast/sender/video_sender.cc
index 490449cb392..f1976eb06f1 100644
--- a/chromium/media/cast/sender/video_sender.cc
+++ b/chromium/media/cast/sender/video_sender.cc
@@ -188,15 +188,16 @@ void VideoSender::InsertRawVideoFrame(
// based on the configured |max_frame_rate_|. Any error introduced by this
// guess will be eliminated when |duration_in_encoder_| is updated in
// OnEncodedVideoFrame().
- const base::TimeDelta duration_added_by_next_frame = frames_in_encoder_ > 0 ?
- reference_time - last_enqueued_frame_reference_time_ :
- base::TimeDelta::FromSecondsD(1.0 / max_frame_rate_);
+ const base::TimeDelta duration_added_by_next_frame =
+ frames_in_encoder_ > 0
+ ? reference_time - last_enqueued_frame_reference_time_
+ : base::Seconds(1.0 / max_frame_rate_);
if (ShouldDropNextFrame(duration_added_by_next_frame)) {
- base::TimeDelta new_target_delay = std::min(
- current_round_trip_time_ * kRoundTripsNeeded +
- base::TimeDelta::FromMilliseconds(kConstantTimeMs),
- max_playout_delay_);
+ base::TimeDelta new_target_delay =
+ std::min(current_round_trip_time_ * kRoundTripsNeeded +
+ base::Milliseconds(kConstantTimeMs),
+ max_playout_delay_);
// In case of low latency mode, we prefer frame drops over increasing
// playout time.
if (!low_latency_mode_ && new_target_delay > target_playout_delay_) {
diff --git a/chromium/media/cast/sender/video_sender.h b/chromium/media/cast/sender/video_sender.h
index 053d0dda9d2..b3b0a8a6173 100644
--- a/chromium/media/cast/sender/video_sender.h
+++ b/chromium/media/cast/sender/video_sender.h
@@ -48,6 +48,9 @@ class VideoSender : public FrameSender {
PlayoutDelayChangeCB playout_delay_change_cb,
media::VideoCaptureFeedbackCB feedback_callback);
+ VideoSender(const VideoSender&) = delete;
+ VideoSender& operator=(const VideoSender&) = delete;
+
~VideoSender() override;
// Note: It is not guaranteed that |video_frame| will actually be encoded and
@@ -114,8 +117,6 @@ class VideoSender : public FrameSender {
// NOTE: Weak pointers must be invalidated before all other member variables.
base::WeakPtrFactory<VideoSender> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(VideoSender);
};
} // namespace cast
diff --git a/chromium/media/cast/sender/video_sender_unittest.cc b/chromium/media/cast/sender/video_sender_unittest.cc
index 05584fe0dac..dd7570aec28 100644
--- a/chromium/media/cast/sender/video_sender_unittest.cc
+++ b/chromium/media/cast/sender/video_sender_unittest.cc
@@ -11,6 +11,7 @@
#include <vector>
#include "base/bind.h"
+#include "base/callback_helpers.h"
#include "base/logging.h"
#include "base/macros.h"
#include "base/memory/ptr_util.h"
@@ -229,7 +230,7 @@ class VideoSenderTest : public ::testing::Test {
}
void RunTasks(int during_ms) {
- task_runner_->Sleep(base::TimeDelta::FromMilliseconds(during_ms));
+ task_runner_->Sleep(base::Milliseconds(during_ms));
}
base::SimpleTestTickClock testing_clock_;
@@ -326,7 +327,7 @@ TEST_F(VideoSenderTest, RtcpTimer) {
// Make sure that we send at least one RTCP packet.
base::TimeDelta max_rtcp_timeout =
- base::TimeDelta::FromMilliseconds(1 + kRtcpReportIntervalMs * 3 / 2);
+ base::Milliseconds(1 + kRtcpReportIntervalMs * 3 / 2);
RunTasks(max_rtcp_timeout.InMilliseconds());
EXPECT_LE(1, transport_->number_of_rtp_packets());
@@ -359,7 +360,7 @@ TEST_F(VideoSenderTest, ResendTimer) {
video_sender_->InsertRawVideoFrame(video_frame, reference_time);
base::TimeDelta max_resend_timeout =
- base::TimeDelta::FromMilliseconds(1 + kDefaultRtpMaxDelayMs);
+ base::Milliseconds(1 + kDefaultRtpMaxDelayMs);
// Make sure that we do a re-send.
RunTasks(max_resend_timeout.InMilliseconds());
@@ -414,7 +415,7 @@ TEST_F(VideoSenderTest, StopSendingInTheAbsenceOfAck) {
// Send 3 more frames and record the number of packets sent.
for (int i = 0; i < 3; ++i) {
- scoped_refptr<media::VideoFrame> video_frame = GetNewVideoFrame();
+ video_frame = GetNewVideoFrame();
video_sender_->InsertRawVideoFrame(video_frame, testing_clock_.NowTicks());
RunTasks(33);
}
@@ -423,7 +424,7 @@ TEST_F(VideoSenderTest, StopSendingInTheAbsenceOfAck) {
// Send 3 more frames - they should not be encoded, as we have not received
// any acks.
for (int i = 0; i < 3; ++i) {
- scoped_refptr<media::VideoFrame> video_frame = GetNewVideoFrame();
+ video_frame = GetNewVideoFrame();
video_sender_->InsertRawVideoFrame(video_frame, testing_clock_.NowTicks());
RunTasks(33);
}
@@ -460,7 +461,7 @@ TEST_F(VideoSenderTest, DuplicateAckRetransmit) {
// Send 3 more frames but don't ACK.
for (int i = 0; i < 3; ++i) {
- scoped_refptr<media::VideoFrame> video_frame = GetNewVideoFrame();
+ video_frame = GetNewVideoFrame();
video_sender_->InsertRawVideoFrame(video_frame, testing_clock_.NowTicks());
RunTasks(33);
}
@@ -503,7 +504,7 @@ TEST_F(VideoSenderTest, DuplicateAckRetransmitDoesNotCancelRetransmits) {
// Send 2 more frames but don't ACK.
for (int i = 0; i < 2; ++i) {
- scoped_refptr<media::VideoFrame> video_frame = GetNewVideoFrame();
+ video_frame = GetNewVideoFrame();
video_sender_->InsertRawVideoFrame(video_frame, testing_clock_.NowTicks());
RunTasks(33);
}
@@ -625,8 +626,7 @@ TEST_F(VideoSenderTest, CancelSendingOnReceivingPli) {
video_sender_->OnReceivedPli();
video_frame = GetNewVideoFrame();
video_sender_->InsertRawVideoFrame(
- video_frame,
- testing_clock_.NowTicks() + base::TimeDelta::FromMilliseconds(1000));
+ video_frame, testing_clock_.NowTicks() + base::Milliseconds(1000));
RunTasks(33);
transport_->SetPause(false);
RunTasks(33);
diff --git a/chromium/media/cast/sender/vp8_encoder.cc b/chromium/media/cast/sender/vpx_encoder.cc
index 213114517c8..7777d57a797 100644
--- a/chromium/media/cast/sender/vp8_encoder.cc
+++ b/chromium/media/cast/sender/vpx_encoder.cc
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "media/cast/sender/vp8_encoder.h"
+#include "media/cast/sender/vpx_encoder.h"
#include "base/logging.h"
#include "media/base/video_frame.h"
@@ -59,7 +59,7 @@ bool HasSufficientFeedback(
} // namespace
-Vp8Encoder::Vp8Encoder(const FrameSenderConfig& video_config)
+VpxEncoder::VpxEncoder(const FrameSenderConfig& video_config)
: cast_config_(video_config),
target_encoder_utilization_(
video_config.video_codec_params.number_of_encode_threads > 2
@@ -70,8 +70,7 @@ Vp8Encoder::Vp8Encoder(const FrameSenderConfig& video_config)
key_frame_requested_(true),
bitrate_kbit_(cast_config_.start_bitrate / 1000),
next_frame_id_(FrameId::first()),
- encoding_speed_acc_(
- base::TimeDelta::FromMicroseconds(kEncodingSpeedAccHalfLife)),
+ encoding_speed_acc_(base::Microseconds(kEncodingSpeedAccHalfLife)),
encoding_speed_(kHighestEncodingSpeed) {
config_.g_timebase.den = 0; // Not initialized.
DCHECK_LE(cast_config_.video_codec_params.min_qp,
@@ -79,24 +78,25 @@ Vp8Encoder::Vp8Encoder(const FrameSenderConfig& video_config)
DCHECK_LE(cast_config_.video_codec_params.max_cpu_saver_qp,
cast_config_.video_codec_params.max_qp);
- thread_checker_.DetachFromThread();
+ DETACH_FROM_THREAD(thread_checker_);
}
-Vp8Encoder::~Vp8Encoder() {
- DCHECK(thread_checker_.CalledOnValidThread());
+VpxEncoder::~VpxEncoder() {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
if (is_initialized())
vpx_codec_destroy(&encoder_);
}
-void Vp8Encoder::Initialize() {
- DCHECK(thread_checker_.CalledOnValidThread());
+void VpxEncoder::Initialize() {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
DCHECK(!is_initialized());
// The encoder will be created/configured when the first frame encode is
// requested.
}
-void Vp8Encoder::ConfigureForNewFrameSize(const gfx::Size& frame_size) {
+void VpxEncoder::ConfigureForNewFrameSize(const gfx::Size& frame_size) {
if (is_initialized()) {
+ // NOTE: Do we need this workaround for VP9?
// Workaround for VP8 bug: If the new size is strictly less-than-or-equal to
// the old size, in terms of area, the existing encoder instance can
// continue. Otherwise, completely tear-down and re-create a new encoder to
@@ -123,9 +123,17 @@ void Vp8Encoder::ConfigureForNewFrameSize(const gfx::Size& frame_size) {
<< frame_size.ToString();
}
+ // Determine appropriate codec interface.
+ vpx_codec_iface_t* ctx;
+ if (cast_config_.codec == CODEC_VIDEO_VP9) {
+ ctx = vpx_codec_vp9_cx();
+ } else {
+ DCHECK(cast_config_.codec == CODEC_VIDEO_VP8);
+ ctx = vpx_codec_vp8_cx();
+ }
+
// Populate encoder configuration with default values.
- CHECK_EQ(vpx_codec_enc_config_default(vpx_codec_vp8_cx(), &config_, 0),
- VPX_CODEC_OK);
+ CHECK_EQ(vpx_codec_enc_config_default(ctx, &config_, 0), VPX_CODEC_OK);
config_.g_threads = cast_config_.video_codec_params.number_of_encode_threads;
config_.g_w = frame_size.width();
@@ -135,13 +143,13 @@ void Vp8Encoder::ConfigureForNewFrameSize(const gfx::Size& frame_size) {
config_.g_timebase.den = base::Time::kMicrosecondsPerSecond;
// |g_pass| and |g_lag_in_frames| must be "one pass" and zero, respectively,
- // in order for VP8 to support changing frame sizes during encoding:
+ // in order for VPX to support changing frame sizes during encoding:
config_.g_pass = VPX_RC_ONE_PASS;
config_.g_lag_in_frames = 0; // Immediate data output for each frame.
// Rate control settings.
config_.rc_dropframe_thresh = 0; // The encoder may not drop any frames.
- config_.rc_resize_allowed = 0; // TODO(miu): Why not? Investigate this.
+ config_.rc_resize_allowed = 0; // TODO(miu): Why not? Investigate this.
config_.rc_end_usage = VPX_CBR;
config_.rc_target_bitrate = bitrate_kbit_;
config_.rc_min_quantizer = cast_config_.video_codec_params.min_qp;
@@ -160,8 +168,7 @@ void Vp8Encoder::ConfigureForNewFrameSize(const gfx::Size& frame_size) {
config_.kf_mode = VPX_KF_DISABLED;
vpx_codec_flags_t flags = 0;
- CHECK_EQ(vpx_codec_enc_init(&encoder_, vpx_codec_vp8_cx(), &config_, flags),
- VPX_CODEC_OK);
+ CHECK_EQ(vpx_codec_enc_init(&encoder_, ctx, &config_, flags), VPX_CODEC_OK);
// Raise the threshold for considering macroblocks as static. The default is
// zero, so this setting makes the encoder less sensitive to motion. This
@@ -183,10 +190,10 @@ void Vp8Encoder::ConfigureForNewFrameSize(const gfx::Size& frame_size) {
VPX_CODEC_OK);
}
-void Vp8Encoder::Encode(scoped_refptr<media::VideoFrame> video_frame,
- const base::TimeTicks& reference_time,
+void VpxEncoder::Encode(scoped_refptr<media::VideoFrame> video_frame,
+ base::TimeTicks reference_time,
SenderEncodedFrame* encoded_frame) {
- DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
DCHECK(encoded_frame);
// Note: This is used to compute the |encoder_utilization| and so it uses the
@@ -239,16 +246,15 @@ void Vp8Encoder::Encode(scoped_refptr<media::VideoFrame> video_frame,
break;
}
- // The frame duration given to the VP8 codec affects a number of important
+ // The frame duration given to the VPX codecs affects a number of important
// behaviors, including: per-frame bandwidth, CPU time spent encoding,
// temporal quality trade-offs, and key/golden/alt-ref frame generation
// intervals. Bound the prediction to account for the fact that the frame
// rate can be highly variable, including long pauses in the video stream.
const base::TimeDelta minimum_frame_duration =
- base::TimeDelta::FromSecondsD(1.0 / cast_config_.max_frame_rate);
- const base::TimeDelta maximum_frame_duration =
- base::TimeDelta::FromSecondsD(static_cast<double>(kRestartFramePeriods) /
- cast_config_.max_frame_rate);
+ base::Seconds(1.0 / cast_config_.max_frame_rate);
+ const base::TimeDelta maximum_frame_duration = base::Seconds(
+ static_cast<double>(kRestartFramePeriods) / cast_config_.max_frame_rate);
base::TimeDelta predicted_frame_duration =
video_frame->metadata().frame_duration.value_or(base::TimeDelta());
if (predicted_frame_duration <= base::TimeDelta()) {
@@ -308,7 +314,7 @@ void Vp8Encoder::Encode(scoped_refptr<media::VideoFrame> video_frame,
encoded_frame->encoder_utilization =
processing_time / predicted_frame_duration;
- // Compute lossy utilization. The VP8 encoder took an estimated guess at what
+ // Compute lossy utilization. The VPX encoder took an estimated guess at what
// quantizer value would produce an encoded frame size as close to the target
// as possible. Now that the frame has been encoded and the number of bytes
// is known, the perfect quantizer value (i.e., the one that should have been
@@ -328,7 +334,7 @@ void Vp8Encoder::Encode(scoped_refptr<media::VideoFrame> video_frame,
// If it was never possible, the value will be greater than 63.0.
encoded_frame->lossy_utilization = perfect_quantizer / 63.0;
- DVLOG(2) << "VP8 encoded frame_id " << encoded_frame->frame_id
+ DVLOG(2) << "VPX encoded frame_id " << encoded_frame->frame_id
<< ", sized: " << encoded_frame->data.size()
<< ", encoder_utilization: " << encoded_frame->encoder_utilization
<< ", lossy_utilization: " << encoded_frame->lossy_utilization
@@ -384,8 +390,8 @@ void Vp8Encoder::Encode(scoped_refptr<media::VideoFrame> video_frame,
}
}
-void Vp8Encoder::UpdateRates(uint32_t new_bitrate) {
- DCHECK(thread_checker_.CalledOnValidThread());
+void VpxEncoder::UpdateRates(uint32_t new_bitrate) {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
if (!is_initialized())
return;
@@ -401,11 +407,11 @@ void Vp8Encoder::UpdateRates(uint32_t new_bitrate) {
NOTREACHED() << "Invalid return value";
}
- VLOG(1) << "VP8 new rc_target_bitrate: " << new_bitrate_kbit << " kbps";
+ VLOG(1) << "VPX new rc_target_bitrate: " << new_bitrate_kbit << " kbps";
}
-void Vp8Encoder::GenerateKeyFrame() {
- DCHECK(thread_checker_.CalledOnValidThread());
+void VpxEncoder::GenerateKeyFrame() {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
key_frame_requested_ = true;
}
diff --git a/chromium/media/cast/sender/vp8_encoder.h b/chromium/media/cast/sender/vpx_encoder.h
index 00afbfdcb32..49472839057 100644
--- a/chromium/media/cast/sender/vp8_encoder.h
+++ b/chromium/media/cast/sender/vpx_encoder.h
@@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef MEDIA_CAST_SENDER_VP8_ENCODER_H_
-#define MEDIA_CAST_SENDER_VP8_ENCODER_H_
+#ifndef MEDIA_CAST_SENDER_VPX_ENCODER_H_
+#define MEDIA_CAST_SENDER_VPX_ENCODER_H_
#include <stdint.h>
@@ -22,16 +22,21 @@ class VideoFrame;
namespace media {
namespace cast {
-class Vp8Encoder final : public SoftwareVideoEncoder {
+class VpxEncoder final : public SoftwareVideoEncoder {
public:
- explicit Vp8Encoder(const FrameSenderConfig& video_config);
+ explicit VpxEncoder(const FrameSenderConfig& video_config);
- ~Vp8Encoder() final;
+ ~VpxEncoder() final;
+
+ VpxEncoder(const VpxEncoder&) = delete;
+ VpxEncoder& operator=(const VpxEncoder&) = delete;
+ VpxEncoder(VpxEncoder&&) = delete;
+ VpxEncoder& operator=(VpxEncoder&&) = delete;
// SoftwareVideoEncoder implementations.
void Initialize() final;
void Encode(scoped_refptr<media::VideoFrame> video_frame,
- const base::TimeTicks& reference_time,
+ base::TimeTicks reference_time,
SenderEncodedFrame* encoded_frame) final;
void UpdateRates(uint32_t new_bitrate) final;
void GenerateKeyFrame() final;
@@ -53,12 +58,12 @@ class Vp8Encoder final : public SoftwareVideoEncoder {
const double target_encoder_utilization_;
- // VP8 internal objects. These are valid for use only while is_initialized()
+ // VPX internal objects. These are valid for use only while is_initialized()
// returns true.
vpx_codec_enc_cfg_t config_;
vpx_codec_ctx_t encoder_;
- // Set to true to request the next frame emitted by Vp8Encoder be a key frame.
+ // Set to true to request the next frame emitted by VpxEncoder be a key frame.
bool key_frame_requested_;
// Saves the current bitrate setting, for when the |encoder_| is reconfigured
@@ -73,18 +78,16 @@ class Vp8Encoder final : public SoftwareVideoEncoder {
FrameId next_frame_id_;
// This is bound to the thread where Initialize() is called.
- base::ThreadChecker thread_checker_;
+ THREAD_CHECKER(thread_checker_);
// The accumulator (time averaging) of the encoding speed.
FeedbackSignalAccumulator<base::TimeDelta> encoding_speed_acc_;
// The higher the speed, the less CPU usage, and the lower quality.
int encoding_speed_;
-
- DISALLOW_COPY_AND_ASSIGN(Vp8Encoder);
};
} // namespace cast
} // namespace media
-#endif // MEDIA_CAST_SENDER_VP8_ENCODER_H_
+#endif // MEDIA_CAST_SENDER_VPX_ENCODER_H_
diff --git a/chromium/media/cast/sender/vp8_quantizer_parser.cc b/chromium/media/cast/sender/vpx_quantizer_parser.cc
index 0cb8a172891..78e660cfd65 100644
--- a/chromium/media/cast/sender/vp8_quantizer_parser.cc
+++ b/chromium/media/cast/sender/vpx_quantizer_parser.cc
@@ -2,15 +2,15 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
+#include "media/cast/sender/vpx_quantizer_parser.h"
#include "base/logging.h"
#include "base/macros.h"
-#include "media/cast/sender/vp8_quantizer_parser.h"
namespace media {
namespace cast {
namespace {
-// Vp8BitReader is a re-implementation of a subset of the VP8 entropy decoder.
+// VpxBitReader is a re-implementation of a subset of the VP8 entropy decoder.
// It is used to decompress the VP8 bitstream for the purposes of quickly
// parsing the VP8 frame headers. It is mostly the exact same implementation
// found in third_party/libvpx/.../vp8/decoder/dboolhuff.h except that only
@@ -18,13 +18,18 @@ namespace {
// present. As of this writing, the implementation in libvpx could not be
// re-used because of the way that the code is structured, and lack of the
// necessary parts being exported.
-class Vp8BitReader {
+class VpxBitReader {
public:
- Vp8BitReader(const uint8_t* data, size_t size)
+ VpxBitReader(const uint8_t* data, size_t size)
: encoded_data_(data), encoded_data_end_(data + size) {
- Vp8DecoderReadBytes();
+ VpxDecoderReadBytes();
}
- ~Vp8BitReader() = default;
+ ~VpxBitReader() = default;
+
+ VpxBitReader(const VpxBitReader&) = delete;
+ VpxBitReader& operator=(const VpxBitReader&) = delete;
+ VpxBitReader(VpxBitReader&&) = delete;
+ VpxBitReader& operator=(VpxBitReader&&) = delete;
// Decode one bit. The output is 0 or 1.
unsigned int DecodeBit();
@@ -33,7 +38,7 @@ class Vp8BitReader {
private:
// Read new bytes frome the encoded data buffer until |bit_count_| > 0.
- void Vp8DecoderReadBytes();
+ void VpxDecoderReadBytes();
const uint8_t* encoded_data_; // Current byte to decode.
const uint8_t* const encoded_data_end_; // The end of the byte to decode.
@@ -47,14 +52,12 @@ class Vp8BitReader {
// Number of valid bits left to decode. Initializing it to -8 to let the
// decoder load two bytes at the beginning. The lower byte is used as
// a buffer byte. During the decoding, decoder needs to call
- // Vp8DecoderReadBytes() to load new bytes when it becomes negative.
+ // VpxDecoderReadBytes() to load new bytes when it becomes negative.
int bit_count_ = -8;
-
- DISALLOW_COPY_AND_ASSIGN(Vp8BitReader);
};
// The number of bits to be left-shifted to make the variable range_ over 128.
-const uint8_t vp8_shift[128] = {
+const uint8_t vpx_shift[128] = {
0, 7, 6, 6, 5, 5, 5, 5, 4, 4, 4, 4, 4, 4, 4, 4, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1,
@@ -63,7 +66,7 @@ const uint8_t vp8_shift[128] = {
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1};
// Mapping from the q_index(0-127) to the quantizer value(0-63).
-const uint8_t vp8_quantizer_lookup[128] = {
+const uint8_t vpx_quantizer_lookup[128] = {
0, 1, 2, 3, 4, 5, 6, 6, 7, 8, 9, 10, 10, 11, 12, 12, 13, 13, 14,
15, 16, 17, 18, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 27, 28, 28, 29, 29,
30, 30, 31, 31, 32, 32, 33, 33, 34, 34, 35, 35, 36, 36, 37, 37, 38, 38, 39,
@@ -72,7 +75,7 @@ const uint8_t vp8_quantizer_lookup[128] = {
53, 53, 53, 54, 54, 54, 55, 55, 55, 56, 56, 56, 57, 57, 57, 58, 58, 58, 59,
59, 59, 60, 60, 60, 61, 61, 61, 62, 62, 62, 63, 63, 63};
-void Vp8BitReader::Vp8DecoderReadBytes() {
+void VpxBitReader::VpxDecoderReadBytes() {
int shift = -bit_count_;
while ((shift >= 0) && (encoded_data_ < encoded_data_end_)) {
bit_count_ += 8;
@@ -82,11 +85,11 @@ void Vp8BitReader::Vp8DecoderReadBytes() {
}
}
-unsigned int Vp8BitReader::DecodeBit() {
+unsigned int VpxBitReader::DecodeBit() {
unsigned int decoded_bit = 0;
unsigned int split = 1 + (((range_ - 1) * 128) >> 8);
if (bit_count_ < 0) {
- Vp8DecoderReadBytes();
+ VpxDecoderReadBytes();
}
DCHECK_GE(bit_count_, 0);
unsigned int shifted_split = split << 8;
@@ -98,7 +101,7 @@ unsigned int Vp8BitReader::DecodeBit() {
range_ = split;
}
if (range_ < 128) {
- int shift = vp8_shift[range_];
+ int shift = vpx_shift[range_];
range_ <<= shift;
value_ <<= shift;
bit_count_ -= shift;
@@ -106,7 +109,7 @@ unsigned int Vp8BitReader::DecodeBit() {
return decoded_bit;
}
-unsigned int Vp8BitReader::DecodeValue(unsigned int num_bits) {
+unsigned int VpxBitReader::DecodeValue(unsigned int num_bits) {
unsigned int decoded_value = 0;
for (int i = static_cast<int>(num_bits) - 1; i >= 0; i--) {
decoded_value |= (DecodeBit() << i);
@@ -115,7 +118,7 @@ unsigned int Vp8BitReader::DecodeValue(unsigned int num_bits) {
}
// Parse the Segment Header part in the first partition.
-void ParseSegmentHeader(Vp8BitReader* bit_reader) {
+void ParseSegmentHeader(VpxBitReader* bit_reader) {
const bool segmentation_enabled = (bit_reader->DecodeBit() != 0);
DVLOG(2) << "segmentation_enabled:" << segmentation_enabled;
if (segmentation_enabled) {
@@ -147,7 +150,7 @@ void ParseSegmentHeader(Vp8BitReader* bit_reader) {
}
// Parse the Filter Header in the first partition.
-void ParseFilterHeader(Vp8BitReader* bit_reader) {
+void ParseFilterHeader(VpxBitReader* bit_reader) {
// Parse 1 bit filter_type + 6 bits loop_filter_level + 3 bits
// sharpness_level.
bit_reader->DecodeValue(1 + 6 + 3);
@@ -168,7 +171,7 @@ void ParseFilterHeader(Vp8BitReader* bit_reader) {
}
} // unnamed namespace
-int ParseVp8HeaderQuantizer(const uint8_t* encoded_data, size_t size) {
+int ParseVpxHeaderQuantizer(const uint8_t* encoded_data, size_t size) {
DCHECK(encoded_data);
if (size <= 3) {
return -1;
@@ -190,7 +193,7 @@ int ParseVp8HeaderQuantizer(const uint8_t* encoded_data, size_t size) {
if (size < partition_size) {
return -1;
}
- Vp8BitReader bit_reader(encoded_data, partition_size);
+ VpxBitReader bit_reader(encoded_data, partition_size);
if (is_key) {
bit_reader.DecodeValue(1 + 1); // Parse two bits: color_space + clamp_type.
}
@@ -203,7 +206,7 @@ int ParseVp8HeaderQuantizer(const uint8_t* encoded_data, size_t size) {
if (q_index > 127) {
return 63;
}
- return vp8_quantizer_lookup[q_index];
+ return vpx_quantizer_lookup[q_index];
}
} // namespace cast
diff --git a/chromium/media/cast/sender/vp8_quantizer_parser.h b/chromium/media/cast/sender/vpx_quantizer_parser.h
index 9932750dc25..4249c0b495e 100644
--- a/chromium/media/cast/sender/vp8_quantizer_parser.h
+++ b/chromium/media/cast/sender/vpx_quantizer_parser.h
@@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef MEDIA_CAST_SENDER_VP8_QUANTIZER_PARSER_H_
-#define MEDIA_CAST_SENDER_VP8_QUANTIZER_PARSER_H_
+#ifndef MEDIA_CAST_SENDER_VPX_QUANTIZER_PARSER_H_
+#define MEDIA_CAST_SENDER_VPX_QUANTIZER_PARSER_H_
#include <stddef.h>
#include <stdint.h>
@@ -15,9 +15,9 @@ namespace cast {
// Partially parse / skip data in the header and the first partition,
// and return the base quantizer in the range [0,63], or -1 on parse error.
-int ParseVp8HeaderQuantizer(const uint8_t* data, size_t size);
+int ParseVpxHeaderQuantizer(const uint8_t* data, size_t size);
} // namespace cast
} // namespace media
-#endif // MEDIA_CAST_SENDER_VP8_QUANTIZER_PARSER_H_
+#endif // MEDIA_CAST_SENDER_VPX_QUANTIZER_PARSER_H_
diff --git a/chromium/media/cast/sender/vp8_quantizer_parser_unittest.cc b/chromium/media/cast/sender/vpx_quantizer_parser_unittest.cc
index 3f42076715b..24602ac821c 100644
--- a/chromium/media/cast/sender/vp8_quantizer_parser_unittest.cc
+++ b/chromium/media/cast/sender/vpx_quantizer_parser_unittest.cc
@@ -11,8 +11,8 @@
#include "base/time/time.h"
#include "media/cast/cast_config.h"
#include "media/cast/sender/sender_encoded_frame.h"
-#include "media/cast/sender/vp8_encoder.h"
-#include "media/cast/sender/vp8_quantizer_parser.h"
+#include "media/cast/sender/vpx_encoder.h"
+#include "media/cast/sender/vpx_quantizer_parser.h"
#include "media/cast/test/receiver/video_decoder.h"
#include "media/cast/test/utility/default_config.h"
#include "media/cast/test/utility/video_utility.h"
@@ -39,9 +39,9 @@ FrameSenderConfig GetVideoConfigForTest() {
}
} // unnamed namespace
-class Vp8QuantizerParserTest : public ::testing::Test {
+class VpxQuantizerParserTest : public ::testing::Test {
public:
- Vp8QuantizerParserTest() : video_config_(GetVideoConfigForTest()) {}
+ VpxQuantizerParserTest() : video_config_(GetVideoConfigForTest()) {}
// Call vp8 software encoder to encode one randomly generated frame.
void EncodeOneFrame(SenderEncodedFrame* encoded_frame) {
@@ -51,7 +51,7 @@ class Vp8QuantizerParserTest : public ::testing::Test {
next_frame_timestamp_);
const base::TimeTicks reference_time =
base::TimeTicks::UnixEpoch() + next_frame_timestamp_;
- next_frame_timestamp_ += base::TimeDelta::FromSeconds(1) / kFrameRate;
+ next_frame_timestamp_ += base::Seconds(1) / kFrameRate;
PopulateVideoFrameWithNoise(video_frame.get());
vp8_encoder_->Encode(video_frame, reference_time, encoded_frame);
}
@@ -75,34 +75,32 @@ class Vp8QuantizerParserTest : public ::testing::Test {
// Reconstruct a vp8 encoder with new config since the Vp8Encoder
// class has no interface to update the config.
void RecreateVp8Encoder() {
- vp8_encoder_ = std::make_unique<Vp8Encoder>(video_config_);
+ vp8_encoder_ = std::make_unique<VpxEncoder>(video_config_);
vp8_encoder_->Initialize();
}
base::TimeDelta next_frame_timestamp_;
FrameSenderConfig video_config_;
- std::unique_ptr<Vp8Encoder> vp8_encoder_;
-
- DISALLOW_COPY_AND_ASSIGN(Vp8QuantizerParserTest);
+ std::unique_ptr<VpxEncoder> vp8_encoder_;
};
// Encode 3 frames to test the cases with insufficient data input.
-TEST_F(Vp8QuantizerParserTest, InsufficientData) {
+TEST_F(VpxQuantizerParserTest, InsufficientData) {
for (int i = 0; i < 3; ++i) {
std::unique_ptr<SenderEncodedFrame> encoded_frame(new SenderEncodedFrame());
const uint8_t* encoded_data =
reinterpret_cast<const uint8_t*>(encoded_frame->data.data());
// Null input.
int decoded_quantizer =
- ParseVp8HeaderQuantizer(encoded_data, encoded_frame->data.size());
+ ParseVpxHeaderQuantizer(encoded_data, encoded_frame->data.size());
EXPECT_EQ(-1, decoded_quantizer);
EncodeOneFrame(encoded_frame.get());
encoded_data = reinterpret_cast<const uint8_t*>(encoded_frame->data.data());
// Zero bytes should not be enough to decode the quantizer value.
- decoded_quantizer = ParseVp8HeaderQuantizer(encoded_data, 0);
+ decoded_quantizer = ParseVpxHeaderQuantizer(encoded_data, 0);
EXPECT_EQ(-1, decoded_quantizer);
// Three bytes should not be enough to decode the quantizer value..
- decoded_quantizer = ParseVp8HeaderQuantizer(encoded_data, 3);
+ decoded_quantizer = ParseVpxHeaderQuantizer(encoded_data, 3);
EXPECT_EQ(-1, decoded_quantizer);
unsigned int first_partition_size =
(encoded_data[0] | (encoded_data[1] << 8) | (encoded_data[2] << 16)) >>
@@ -110,31 +108,31 @@ TEST_F(Vp8QuantizerParserTest, InsufficientData) {
if (encoded_frame->dependency == EncodedFrame::KEY) {
// Ten bytes should not be enough to decode the quanitizer value
// for a Key frame.
- decoded_quantizer = ParseVp8HeaderQuantizer(encoded_data, 10);
+ decoded_quantizer = ParseVpxHeaderQuantizer(encoded_data, 10);
EXPECT_EQ(-1, decoded_quantizer);
// One byte less than needed to decode the quantizer value.
decoded_quantizer =
- ParseVp8HeaderQuantizer(encoded_data, 10 + first_partition_size - 1);
+ ParseVpxHeaderQuantizer(encoded_data, 10 + first_partition_size - 1);
EXPECT_EQ(-1, decoded_quantizer);
// Minimum number of bytes to decode the quantizer value.
decoded_quantizer =
- ParseVp8HeaderQuantizer(encoded_data, 10 + first_partition_size);
+ ParseVpxHeaderQuantizer(encoded_data, 10 + first_partition_size);
EXPECT_EQ(kQp, decoded_quantizer);
} else {
// One byte less than needed to decode the quantizer value.
decoded_quantizer =
- ParseVp8HeaderQuantizer(encoded_data, 3 + first_partition_size - 1);
+ ParseVpxHeaderQuantizer(encoded_data, 3 + first_partition_size - 1);
EXPECT_EQ(-1, decoded_quantizer);
// Minimum number of bytes to decode the quantizer value.
decoded_quantizer =
- ParseVp8HeaderQuantizer(encoded_data, 3 + first_partition_size);
+ ParseVpxHeaderQuantizer(encoded_data, 3 + first_partition_size);
EXPECT_EQ(kQp, decoded_quantizer);
}
}
}
// Encode 3 fames for every quantizer value in the range of [4,63].
-TEST_F(Vp8QuantizerParserTest, VariedQuantizer) {
+TEST_F(VpxQuantizerParserTest, VariedQuantizer) {
int decoded_quantizer = -1;
for (int qp = 4; qp <= 63; qp += 10) {
UpdateQuantizer(qp);
@@ -142,7 +140,7 @@ TEST_F(Vp8QuantizerParserTest, VariedQuantizer) {
std::unique_ptr<SenderEncodedFrame> encoded_frame(
new SenderEncodedFrame());
EncodeOneFrame(encoded_frame.get());
- decoded_quantizer = ParseVp8HeaderQuantizer(
+ decoded_quantizer = ParseVpxHeaderQuantizer(
reinterpret_cast<const uint8_t*>(encoded_frame->data.data()),
encoded_frame->data.size());
EXPECT_EQ(qp, decoded_quantizer);
diff --git a/chromium/media/cdm/BUILD.gn b/chromium/media/cdm/BUILD.gn
index a3e98aee4b0..5f6c0350287 100644
--- a/chromium/media/cdm/BUILD.gn
+++ b/chromium/media/cdm/BUILD.gn
@@ -125,8 +125,8 @@ source_set("cdm") {
if (is_win) {
sources += [
- "cdm_preference_data.cc",
- "cdm_preference_data.h",
+ "media_foundation_cdm_data.cc",
+ "media_foundation_cdm_data.h",
"win/media_foundation_cdm.cc",
"win/media_foundation_cdm.h",
"win/media_foundation_cdm_factory.cc",
@@ -179,9 +179,7 @@ source_set("unit_tests") {
"//url",
]
- configs += [
- "//media:media_config",
- ]
+ configs += [ "//media:media_config" ]
data_deps = []
diff --git a/chromium/media/cdm/aes_cbc_crypto.h b/chromium/media/cdm/aes_cbc_crypto.h
index a01ebc02d95..ed1b6cca429 100644
--- a/chromium/media/cdm/aes_cbc_crypto.h
+++ b/chromium/media/cdm/aes_cbc_crypto.h
@@ -26,6 +26,10 @@ namespace media {
class MEDIA_EXPORT AesCbcCrypto {
public:
AesCbcCrypto();
+
+ AesCbcCrypto(const AesCbcCrypto&) = delete;
+ AesCbcCrypto& operator=(const AesCbcCrypto&) = delete;
+
~AesCbcCrypto();
// Initializes the encryptor using |key| and |iv|. Returns false if either
@@ -42,8 +46,6 @@ class MEDIA_EXPORT AesCbcCrypto {
private:
EVP_CIPHER_CTX ctx_;
-
- DISALLOW_COPY_AND_ASSIGN(AesCbcCrypto);
};
} // namespace media
diff --git a/chromium/media/cdm/aes_decryptor.cc b/chromium/media/cdm/aes_decryptor.cc
index 800be34ba85..92745ceec74 100644
--- a/chromium/media/cdm/aes_decryptor.cc
+++ b/chromium/media/cdm/aes_decryptor.cc
@@ -78,6 +78,11 @@ class AesDecryptor::SessionIdDecryptionKeyMap {
public:
SessionIdDecryptionKeyMap() = default;
+
+ SessionIdDecryptionKeyMap(const SessionIdDecryptionKeyMap&) = delete;
+ SessionIdDecryptionKeyMap& operator=(const SessionIdDecryptionKeyMap&) =
+ delete;
+
~SessionIdDecryptionKeyMap() = default;
// Replaces value if |session_id| is already present, or adds it if not.
@@ -110,8 +115,6 @@ class AesDecryptor::SessionIdDecryptionKeyMap {
void Erase(KeyList::iterator position);
KeyList key_list_;
-
- DISALLOW_COPY_AND_ASSIGN(SessionIdDecryptionKeyMap);
};
void AesDecryptor::SessionIdDecryptionKeyMap::Insert(
diff --git a/chromium/media/cdm/aes_decryptor.h b/chromium/media/cdm/aes_decryptor.h
index 21f26643553..9faf62a58d9 100644
--- a/chromium/media/cdm/aes_decryptor.h
+++ b/chromium/media/cdm/aes_decryptor.h
@@ -125,6 +125,10 @@ class MEDIA_EXPORT AesDecryptor : public ContentDecryptionModule,
class DecryptionKey {
public:
explicit DecryptionKey(const std::string& secret);
+
+ DecryptionKey(const DecryptionKey&) = delete;
+ DecryptionKey& operator=(const DecryptionKey&) = delete;
+
~DecryptionKey();
// Creates the encryption key.
@@ -139,8 +143,6 @@ class MEDIA_EXPORT AesDecryptor : public ContentDecryptionModule,
// The key used to decrypt the data.
std::unique_ptr<crypto::SymmetricKey> decryption_key_;
-
- DISALLOW_COPY_AND_ASSIGN(DecryptionKey);
};
// Keep track of the keys for a key ID. If multiple sessions specify keys
diff --git a/chromium/media/cdm/cbcs_decryptor_unittest.cc b/chromium/media/cdm/cbcs_decryptor_unittest.cc
index 06acb23077a..b37fb3f6f2d 100644
--- a/chromium/media/cdm/cbcs_decryptor_unittest.cc
+++ b/chromium/media/cdm/cbcs_decryptor_unittest.cc
@@ -166,8 +166,8 @@ TEST_F(CbcsDecryptorTest, AdditionalData) {
auto encrypted_buffer = CreateEncryptedBuffer(
encrypted_block, iv_, subsamples, EncryptionPattern(1, 9));
- encrypted_buffer->set_timestamp(base::TimeDelta::FromDays(2));
- encrypted_buffer->set_duration(base::TimeDelta::FromMinutes(5));
+ encrypted_buffer->set_timestamp(base::Days(2));
+ encrypted_buffer->set_duration(base::Minutes(5));
encrypted_buffer->set_is_key_frame(true);
encrypted_buffer->CopySideDataFrom(encrypted_block.data(),
encrypted_block.size());
diff --git a/chromium/media/cdm/cdm_adapter.cc b/chromium/media/cdm/cdm_adapter.cc
index 7a0f3741d49..bc3183b3560 100644
--- a/chromium/media/cdm/cdm_adapter.cc
+++ b/chromium/media/cdm/cdm_adapter.cc
@@ -447,7 +447,7 @@ void CdmAdapter::Decrypt(StreamType stream_type,
DecoderBuffer::CopyFrom(decrypted_block->DecryptedBuffer()->Data(),
decrypted_block->DecryptedBuffer()->Size()));
decrypted_buffer->set_timestamp(
- base::TimeDelta::FromMicroseconds(decrypted_block->Timestamp()));
+ base::Microseconds(decrypted_block->Timestamp()));
std::move(decrypt_cb).Run(Decryptor::kSuccess, std::move(decrypted_buffer));
}
@@ -656,7 +656,7 @@ cdm::Buffer* CdmAdapter::Allocate(uint32_t capacity) {
void CdmAdapter::SetTimer(int64_t delay_ms, void* context) {
DCHECK(task_runner_->BelongsToCurrentThread());
- auto delay = base::TimeDelta::FromMilliseconds(delay_ms);
+ auto delay = base::Milliseconds(delay_ms);
DVLOG(3) << __func__ << ": delay = " << delay << ", context = " << context;
TRACE_EVENT2("media", "CdmAdapter::SetTimer", "delay_ms", delay_ms, "context",
context);
@@ -1079,7 +1079,7 @@ bool CdmAdapter::AudioFramesDataToAudioFrames(
scoped_refptr<media::AudioBuffer> frame = media::AudioBuffer::CopyFrom(
sample_format, audio_channel_layout_, audio_channel_count,
audio_samples_per_second_, frame_count, &channel_ptrs[0],
- base::TimeDelta::FromMicroseconds(timestamp), pool_);
+ base::Microseconds(timestamp), pool_);
result_frames->push_back(frame);
data += frame_size;
diff --git a/chromium/media/cdm/cdm_adapter_factory.h b/chromium/media/cdm/cdm_adapter_factory.h
index a7ed60de4a7..2468c514c48 100644
--- a/chromium/media/cdm/cdm_adapter_factory.h
+++ b/chromium/media/cdm/cdm_adapter_factory.h
@@ -22,6 +22,10 @@ class MEDIA_EXPORT CdmAdapterFactory final : public CdmFactory {
base::RepeatingCallback<std::unique_ptr<CdmAuxiliaryHelper>()>;
explicit CdmAdapterFactory(HelperCreationCB helper_creation_cb);
+
+ CdmAdapterFactory(const CdmAdapterFactory&) = delete;
+ CdmAdapterFactory& operator=(const CdmAdapterFactory&) = delete;
+
~CdmAdapterFactory() override;
// CdmFactory implementation.
@@ -36,8 +40,6 @@ class MEDIA_EXPORT CdmAdapterFactory final : public CdmFactory {
private:
// Callback to create CdmAuxiliaryHelper for the created CDM.
HelperCreationCB helper_creation_cb_;
-
- DISALLOW_COPY_AND_ASSIGN(CdmAdapterFactory);
};
} // namespace media
diff --git a/chromium/media/cdm/cdm_adapter_unittest.cc b/chromium/media/cdm/cdm_adapter_unittest.cc
index facda35f14e..eb92384a0f6 100644
--- a/chromium/media/cdm/cdm_adapter_unittest.cc
+++ b/chromium/media/cdm/cdm_adapter_unittest.cc
@@ -124,6 +124,9 @@ class CdmAdapterTestBase : public testing::Test,
base::NumberToString(GetCdmInterfaceVersion()));
}
+ CdmAdapterTestBase(const CdmAdapterTestBase&) = delete;
+ CdmAdapterTestBase& operator=(const CdmAdapterTestBase&) = delete;
+
~CdmAdapterTestBase() override { CdmModule::ResetInstanceForTesting(); }
protected:
@@ -186,9 +189,6 @@ class CdmAdapterTestBase : public testing::Test,
scoped_refptr<ContentDecryptionModule> cdm_;
base::test::SingleThreadTaskEnvironment task_environment_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(CdmAdapterTestBase);
};
class CdmAdapterTestWithClearKeyCdm : public CdmAdapterTestBase {
diff --git a/chromium/media/cdm/cdm_allocator.h b/chromium/media/cdm/cdm_allocator.h
index a15b92f14e1..8ab4f1e2390 100644
--- a/chromium/media/cdm/cdm_allocator.h
+++ b/chromium/media/cdm/cdm_allocator.h
@@ -27,6 +27,9 @@ class MEDIA_EXPORT CdmAllocator {
// Callback to create CdmAllocator for the created CDM.
using CreationCB = base::RepeatingCallback<std::unique_ptr<CdmAllocator>()>;
+ CdmAllocator(const CdmAllocator&) = delete;
+ CdmAllocator& operator=(const CdmAllocator&) = delete;
+
virtual ~CdmAllocator();
// Creates a buffer with at least |capacity| bytes. Caller is required to
@@ -38,9 +41,6 @@ class MEDIA_EXPORT CdmAllocator {
protected:
CdmAllocator();
-
- private:
- DISALLOW_COPY_AND_ASSIGN(CdmAllocator);
};
} // namespace media
diff --git a/chromium/media/cdm/cdm_auxiliary_helper.cc b/chromium/media/cdm/cdm_auxiliary_helper.cc
index 75d70e7eca2..d2b02382150 100644
--- a/chromium/media/cdm/cdm_auxiliary_helper.cc
+++ b/chromium/media/cdm/cdm_auxiliary_helper.cc
@@ -8,7 +8,7 @@
#include "media/cdm/cdm_helpers.h"
#if defined(OS_WIN)
-#include "media/cdm/cdm_preference_data.h"
+#include "media/cdm/media_foundation_cdm_data.h"
#include "third_party/abseil-cpp/absl/types/optional.h"
#endif // defined(OS_WIN)
@@ -55,9 +55,10 @@ void CdmAuxiliaryHelper::GetStorageId(uint32_t version, StorageIdCB callback) {
}
#if defined(OS_WIN)
-void CdmAuxiliaryHelper::GetCdmPreferenceData(GetCdmPreferenceDataCB callback) {
- std::move(callback).Run(std::make_unique<CdmPreferenceData>(
- base::UnguessableToken::Null(), absl::nullopt));
+void CdmAuxiliaryHelper::GetMediaFoundationCdmData(
+ GetMediaFoundationCdmDataCB callback) {
+ std::move(callback).Run(std::make_unique<MediaFoundationCdmData>(
+ base::UnguessableToken::Null(), absl::nullopt, base::FilePath()));
}
void CdmAuxiliaryHelper::SetCdmClientToken(
diff --git a/chromium/media/cdm/cdm_auxiliary_helper.h b/chromium/media/cdm/cdm_auxiliary_helper.h
index 3d4cbd0e785..836c2613ba8 100644
--- a/chromium/media/cdm/cdm_auxiliary_helper.h
+++ b/chromium/media/cdm/cdm_auxiliary_helper.h
@@ -37,6 +37,10 @@ class MEDIA_EXPORT CdmAuxiliaryHelper : public CdmAllocator,
public CdmDocumentService {
public:
CdmAuxiliaryHelper();
+
+ CdmAuxiliaryHelper(const CdmAuxiliaryHelper&) = delete;
+ CdmAuxiliaryHelper& operator=(const CdmAuxiliaryHelper&) = delete;
+
~CdmAuxiliaryHelper() override;
// Callback to report the size of file read by cdm::FileIO created by |this|.
@@ -69,12 +73,9 @@ class MEDIA_EXPORT CdmAuxiliaryHelper : public CdmAllocator,
void GetStorageId(uint32_t version, StorageIdCB callback) override;
#if defined(OS_WIN)
- void GetCdmPreferenceData(GetCdmPreferenceDataCB callback) override;
+ void GetMediaFoundationCdmData(GetMediaFoundationCdmDataCB callback) override;
void SetCdmClientToken(const std::vector<uint8_t>& client_token) override;
#endif // defined(OS_WIN)
-
- private:
- DISALLOW_COPY_AND_ASSIGN(CdmAuxiliaryHelper);
};
} // namespace media
diff --git a/chromium/media/cdm/cdm_context_ref_impl.h b/chromium/media/cdm/cdm_context_ref_impl.h
index 7ea73f99eb9..6b827f1afef 100644
--- a/chromium/media/cdm/cdm_context_ref_impl.h
+++ b/chromium/media/cdm/cdm_context_ref_impl.h
@@ -18,6 +18,10 @@ class ContentDecryptionModule;
class MEDIA_EXPORT CdmContextRefImpl final : public CdmContextRef {
public:
explicit CdmContextRefImpl(scoped_refptr<ContentDecryptionModule> cdm);
+
+ CdmContextRefImpl(const CdmContextRefImpl&) = delete;
+ CdmContextRefImpl& operator=(const CdmContextRefImpl&) = delete;
+
~CdmContextRefImpl() final;
// CdmContextRef implementation.
@@ -26,8 +30,6 @@ class MEDIA_EXPORT CdmContextRefImpl final : public CdmContextRef {
private:
scoped_refptr<ContentDecryptionModule> cdm_;
THREAD_CHECKER(thread_checker_);
-
- DISALLOW_COPY_AND_ASSIGN(CdmContextRefImpl);
};
} // namespace media
diff --git a/chromium/media/cdm/cdm_document_service.h b/chromium/media/cdm/cdm_document_service.h
index 4329bcf7ef2..8b8e4d58dae 100644
--- a/chromium/media/cdm/cdm_document_service.h
+++ b/chromium/media/cdm/cdm_document_service.h
@@ -14,7 +14,7 @@
#include "media/base/media_export.h"
#if defined(OS_WIN)
-#include "media/cdm/cdm_preference_data.h"
+#include "media/cdm/media_foundation_cdm_data.h"
#endif // defined(OS_WIN)
namespace media {
@@ -36,8 +36,8 @@ class MEDIA_EXPORT CdmDocumentService {
const std::vector<uint8_t>& storage_id)>;
#if defined(OS_WIN)
- using GetCdmPreferenceDataCB =
- base::OnceCallback<void(std::unique_ptr<CdmPreferenceData>)>;
+ using GetMediaFoundationCdmDataCB =
+ base::OnceCallback<void(std::unique_ptr<MediaFoundationCdmData>)>;
#endif // defined(OS_WIN)
// Allows authorized services to verify that the underlying platform is
@@ -66,8 +66,9 @@ class MEDIA_EXPORT CdmDocumentService {
virtual void GetStorageId(uint32_t version, StorageIdCB callback) = 0;
#if defined(OS_WIN)
- // Gets the cdm preference data for the origin associated with the CDM.
- virtual void GetCdmPreferenceData(GetCdmPreferenceDataCB callback) = 0;
+ // Gets the Media Foundation cdm data for the origin associated with the CDM.
+ virtual void GetMediaFoundationCdmData(
+ GetMediaFoundationCdmDataCB callback) = 0;
// Sets the client token for the origin associated with the CDM. The token is
// set by the content during license exchange. The token is then saved in the
diff --git a/chromium/media/cdm/cdm_helpers.h b/chromium/media/cdm/cdm_helpers.h
index f4a5a977153..2473756972d 100644
--- a/chromium/media/cdm/cdm_helpers.h
+++ b/chromium/media/cdm/cdm_helpers.h
@@ -22,6 +22,10 @@ class VideoFrame;
class DecryptedBlockImpl final : public cdm::DecryptedBlock {
public:
DecryptedBlockImpl();
+
+ DecryptedBlockImpl(const DecryptedBlockImpl&) = delete;
+ DecryptedBlockImpl& operator=(const DecryptedBlockImpl&) = delete;
+
~DecryptedBlockImpl() final;
// cdm::DecryptedBlock implementation.
@@ -33,14 +37,16 @@ class DecryptedBlockImpl final : public cdm::DecryptedBlock {
private:
cdm::Buffer* buffer_;
int64_t timestamp_;
-
- DISALLOW_COPY_AND_ASSIGN(DecryptedBlockImpl);
};
class MEDIA_EXPORT VideoFrameImpl : public cdm::VideoFrame,
public cdm::VideoFrame_2 {
public:
VideoFrameImpl();
+
+ VideoFrameImpl(const VideoFrameImpl&) = delete;
+ VideoFrameImpl& operator=(const VideoFrameImpl&) = delete;
+
~VideoFrameImpl() override;
// cdm::VideoFrame and cdm::VideoFrame_2 common implementation.
@@ -97,14 +103,15 @@ class MEDIA_EXPORT VideoFrameImpl : public cdm::VideoFrame,
// Presentation timestamp in microseconds.
int64_t timestamp_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(VideoFrameImpl);
};
class AudioFramesImpl final : public cdm::AudioFrames {
public:
AudioFramesImpl();
+
+ AudioFramesImpl(const AudioFramesImpl&) = delete;
+ AudioFramesImpl& operator=(const AudioFramesImpl&) = delete;
+
~AudioFramesImpl() final;
// cdm::AudioFrames implementation.
@@ -118,8 +125,6 @@ class AudioFramesImpl final : public cdm::AudioFrames {
private:
cdm::Buffer* buffer_;
cdm::AudioFormat format_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioFramesImpl);
};
} // namespace media
diff --git a/chromium/media/cdm/cdm_host_files.h b/chromium/media/cdm/cdm_host_files.h
index 7ed87f68f9a..c54e628d272 100644
--- a/chromium/media/cdm/cdm_host_files.h
+++ b/chromium/media/cdm/cdm_host_files.h
@@ -31,6 +31,10 @@ namespace media {
class MEDIA_EXPORT CdmHostFiles {
public:
CdmHostFiles();
+
+ CdmHostFiles(const CdmHostFiles&) = delete;
+ CdmHostFiles& operator=(const CdmHostFiles&) = delete;
+
~CdmHostFiles();
// Opens all common files and CDM specific files for the CDM at |cdm_path|.
@@ -75,8 +79,6 @@ class MEDIA_EXPORT CdmHostFiles {
// Files specific to each CDM type, e.g. the CDM binary.
ScopedFileVector cdm_specific_files_;
-
- DISALLOW_COPY_AND_ASSIGN(CdmHostFiles);
};
} // namespace media
diff --git a/chromium/media/cdm/cdm_module.h b/chromium/media/cdm/cdm_module.h
index 6cd149f7b53..ac7a196ac2c 100644
--- a/chromium/media/cdm/cdm_module.h
+++ b/chromium/media/cdm/cdm_module.h
@@ -29,6 +29,9 @@ class MEDIA_EXPORT CdmModule {
// Reset the CdmModule instance so that each test have it's own instance.
static void ResetInstanceForTesting();
+ CdmModule(const CdmModule&) = delete;
+ CdmModule& operator=(const CdmModule&) = delete;
+
~CdmModule();
using CreateCdmFunc = decltype(&::CreateCdmInstance);
@@ -62,8 +65,6 @@ class MEDIA_EXPORT CdmModule {
InitializeCdmModuleFunc initialize_cdm_module_func_ = nullptr;
DeinitializeCdmModuleFunc deinitialize_cdm_module_func_ = nullptr;
GetCdmVersionFunc get_cdm_version_func_ = nullptr;
-
- DISALLOW_COPY_AND_ASSIGN(CdmModule);
};
} // namespace media
diff --git a/chromium/media/cdm/cdm_paths.cc b/chromium/media/cdm/cdm_paths.cc
index 84c4a471819..725f4201f4a 100644
--- a/chromium/media/cdm/cdm_paths.cc
+++ b/chromium/media/cdm/cdm_paths.cc
@@ -6,7 +6,6 @@
#include <string>
-#include "base/system/sys_info.h"
#include "media/media_buildflags.h"
namespace media {
@@ -41,14 +40,10 @@ base::FilePath GetPlatformSpecificDirectory(const std::string& cdm_base_path) {
}
#if defined(OS_WIN)
-const char kCdmStore[] = "CdmStore";
-
-base::FilePath GetCdmStorePath(const base::FilePath& user_data_dir,
+base::FilePath GetCdmStorePath(const base::FilePath& cdm_store_path_root,
const base::UnguessableToken& cdm_origin_id,
const std::string& key_system) {
- return user_data_dir.AppendASCII(kCdmStore)
- .AppendASCII(base::SysInfo::ProcessCPUArchitecture())
- .AppendASCII(cdm_origin_id.ToString())
+ return cdm_store_path_root.AppendASCII(cdm_origin_id.ToString())
.AppendASCII(key_system);
}
#endif // defined(OS_WIN)
diff --git a/chromium/media/cdm/cdm_paths.h b/chromium/media/cdm/cdm_paths.h
index d761a2bbddc..cbeacc32cfb 100644
--- a/chromium/media/cdm/cdm_paths.h
+++ b/chromium/media/cdm/cdm_paths.h
@@ -45,11 +45,10 @@ base::FilePath GetPlatformSpecificDirectory(const std::string& cdm_base_path);
#if defined(OS_WIN)
// Returns the "CDM store path" to be passed to `MediaFoundationCdm`. The
-// `user_data_dir` is typically the LPAC specific path, e.g.
-// C:\Users\<user>\AppData\Local\Packages\
-// cr.sb.cdm4b414ceb52402c4e188a185dd531c100416d8daf\AC\Google\Chrome\User Data
-// TODO(xhwang): Separate by Chromium user profile as well.
-base::FilePath GetCdmStorePath(const base::FilePath& user_data_dir,
+// `cdm_store_path_root` is typically the path to the Chrome user's profile,
+// e.g.
+// C:\Users\<user>\AppData\Local\Google\Chrome\Default\MediaFoundationCdmStore\x86_x64
+base::FilePath GetCdmStorePath(const base::FilePath& cdm_store_path_root,
const base::UnguessableToken& cdm_origin_id,
const std::string& key_system);
#endif // defined(OS_WIN)
diff --git a/chromium/media/cdm/cdm_preference_data.cc b/chromium/media/cdm/cdm_preference_data.cc
deleted file mode 100644
index a3a6a5e3b98..00000000000
--- a/chromium/media/cdm/cdm_preference_data.cc
+++ /dev/null
@@ -1,18 +0,0 @@
-// Copyright 2021 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/cdm/cdm_preference_data.h"
-
-namespace media {
-
-CdmPreferenceData::CdmPreferenceData() = default;
-
-CdmPreferenceData::CdmPreferenceData(
- base::UnguessableToken origin_id,
- absl::optional<std::vector<uint8_t>> client_token)
- : origin_id(origin_id), client_token(client_token) {}
-
-CdmPreferenceData::~CdmPreferenceData() = default;
-
-} // namespace media
diff --git a/chromium/media/cdm/cdm_preference_data.h b/chromium/media/cdm/cdm_preference_data.h
deleted file mode 100644
index ca418b568d0..00000000000
--- a/chromium/media/cdm/cdm_preference_data.h
+++ /dev/null
@@ -1,30 +0,0 @@
-// Copyright 2021 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_CDM_CDM_PREFERENCE_DATA_H_
-#define MEDIA_CDM_CDM_PREFERENCE_DATA_H_
-
-#include <vector>
-
-#include "base/unguessable_token.h"
-#include "media/base/media_export.h"
-#include "third_party/abseil-cpp/absl/types/optional.h"
-
-namespace media {
-struct MEDIA_EXPORT CdmPreferenceData {
- CdmPreferenceData();
- CdmPreferenceData(base::UnguessableToken origin_id,
- absl::optional<std::vector<uint8_t>> client_token);
-
- CdmPreferenceData(const CdmPreferenceData& other) = delete;
- CdmPreferenceData& operator=(const CdmPreferenceData& other) = delete;
-
- ~CdmPreferenceData();
-
- base::UnguessableToken origin_id;
- absl::optional<std::vector<uint8_t>> client_token;
-};
-} // namespace media
-
-#endif // MEDIA_CDM_CDM_PREFERENCE_DATA_H_
diff --git a/chromium/media/cdm/cdm_type_conversion.cc b/chromium/media/cdm/cdm_type_conversion.cc
index 726aa52be7c..dd40ab1ecef 100644
--- a/chromium/media/cdm/cdm_type_conversion.cc
+++ b/chromium/media/cdm/cdm_type_conversion.cc
@@ -318,9 +318,9 @@ Decryptor::Status ToMediaDecryptorStatus(cdm::Status status) {
cdm::AudioCodec ToCdmAudioCodec(AudioCodec codec) {
switch (codec) {
- case kCodecVorbis:
+ case AudioCodec::kVorbis:
return cdm::kCodecVorbis;
- case kCodecAAC:
+ case AudioCodec::kAAC:
return cdm::kCodecAac;
default:
DVLOG(1) << "Unsupported AudioCodec " << codec;
@@ -354,13 +354,13 @@ SampleFormat ToMediaSampleFormat(cdm::AudioFormat format) {
cdm::VideoCodec ToCdmVideoCodec(VideoCodec codec) {
switch (codec) {
- case kCodecVP8:
+ case VideoCodec::kVP8:
return cdm::kCodecVp8;
- case kCodecH264:
+ case VideoCodec::kH264:
return cdm::kCodecH264;
- case kCodecVP9:
+ case VideoCodec::kVP9:
return cdm::kCodecVp9;
- case kCodecAV1:
+ case VideoCodec::kAV1:
return cdm::kCodecAv1;
default:
DVLOG(1) << "Unsupported VideoCodec " << codec;
@@ -371,19 +371,19 @@ cdm::VideoCodec ToCdmVideoCodec(VideoCodec codec) {
VideoCodec ToMediaVideoCodec(cdm::VideoCodec codec) {
switch (codec) {
case cdm::kUnknownVideoCodec:
- return kUnknownVideoCodec;
+ return VideoCodec::kUnknown;
case cdm::kCodecVp8:
- return kCodecVP8;
+ return VideoCodec::kVP8;
case cdm::kCodecH264:
- return kCodecH264;
+ return VideoCodec::kH264;
case cdm::kCodecVp9:
- return kCodecVP9;
+ return VideoCodec::kVP9;
case cdm::kCodecAv1:
- return kCodecAV1;
+ return VideoCodec::kAV1;
}
NOTREACHED() << "Unexpected cdm::VideoCodec " << codec;
- return kUnknownVideoCodec;
+ return VideoCodec::kUnknown;
}
cdm::VideoCodecProfile ToCdmVideoCodecProfile(VideoCodecProfile profile) {
diff --git a/chromium/media/cdm/cdm_wrapper.h b/chromium/media/cdm/cdm_wrapper.h
index d911516c6b9..e9b2f234d6a 100644
--- a/chromium/media/cdm/cdm_wrapper.h
+++ b/chromium/media/cdm/cdm_wrapper.h
@@ -70,6 +70,9 @@ class CdmWrapper {
GetCdmHostFunc get_cdm_host_func,
void* user_data);
+ CdmWrapper(const CdmWrapper&) = delete;
+ CdmWrapper& operator=(const CdmWrapper&) = delete;
+
virtual ~CdmWrapper() {}
// Returns the version of the CDM interface that the created CDM uses.
@@ -141,9 +144,6 @@ class CdmWrapper {
protected:
CdmWrapper() {}
-
- private:
- DISALLOW_COPY_AND_ASSIGN(CdmWrapper);
};
// Template class that does the CdmWrapper -> CdmInterface conversion. Default
@@ -172,6 +172,9 @@ class CdmWrapperImpl : public CdmWrapper {
static_cast<CdmInterface*>(cdm_instance));
}
+ CdmWrapperImpl(const CdmWrapperImpl&) = delete;
+ CdmWrapperImpl& operator=(const CdmWrapperImpl&) = delete;
+
~CdmWrapperImpl() override { cdm_->Destroy(); }
int GetInterfaceVersion() override { return CdmInterfaceVersion; }
@@ -293,8 +296,6 @@ class CdmWrapperImpl : public CdmWrapper {
CdmWrapperImpl(CdmInterface* cdm) : cdm_(cdm) { DCHECK(cdm_); }
CdmInterface* cdm_;
-
- DISALLOW_COPY_AND_ASSIGN(CdmWrapperImpl);
};
// Specialization for cdm::ContentDecryptionModule_10 methods.
diff --git a/chromium/media/cdm/cenc_decryptor_unittest.cc b/chromium/media/cdm/cenc_decryptor_unittest.cc
index 5b57e689058..8e484f6d5bd 100644
--- a/chromium/media/cdm/cenc_decryptor_unittest.cc
+++ b/chromium/media/cdm/cenc_decryptor_unittest.cc
@@ -148,8 +148,8 @@ TEST_F(CencDecryptorTest, ExtraData) {
auto encrypted_buffer =
CreateEncryptedBuffer(encrypted_block, iv_, subsamples);
- encrypted_buffer->set_timestamp(base::TimeDelta::FromDays(2));
- encrypted_buffer->set_duration(base::TimeDelta::FromMinutes(5));
+ encrypted_buffer->set_timestamp(base::Days(2));
+ encrypted_buffer->set_duration(base::Minutes(5));
encrypted_buffer->set_is_key_frame(true);
encrypted_buffer->CopySideDataFrom(encrypted_block.data(),
encrypted_block.size());
diff --git a/chromium/media/cdm/default_cdm_factory.h b/chromium/media/cdm/default_cdm_factory.h
index 4134fdfb018..343c12c2ae1 100644
--- a/chromium/media/cdm/default_cdm_factory.h
+++ b/chromium/media/cdm/default_cdm_factory.h
@@ -16,6 +16,10 @@ struct CdmConfig;
class MEDIA_EXPORT DefaultCdmFactory final : public CdmFactory {
public:
DefaultCdmFactory();
+
+ DefaultCdmFactory(const DefaultCdmFactory&) = delete;
+ DefaultCdmFactory& operator=(const DefaultCdmFactory&) = delete;
+
~DefaultCdmFactory() final;
// CdmFactory implementation.
@@ -26,9 +30,6 @@ class MEDIA_EXPORT DefaultCdmFactory final : public CdmFactory {
const SessionKeysChangeCB& session_keys_change_cb,
const SessionExpirationUpdateCB& session_expiration_update_cb,
CdmCreatedCB cdm_created_cb) final;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(DefaultCdmFactory);
};
} // namespace media
diff --git a/chromium/media/cdm/external_clear_key_test_helper.h b/chromium/media/cdm/external_clear_key_test_helper.h
index 3a72b39b578..76394f76fbf 100644
--- a/chromium/media/cdm/external_clear_key_test_helper.h
+++ b/chromium/media/cdm/external_clear_key_test_helper.h
@@ -18,6 +18,11 @@ namespace media {
class ExternalClearKeyTestHelper {
public:
ExternalClearKeyTestHelper();
+
+ ExternalClearKeyTestHelper(const ExternalClearKeyTestHelper&) = delete;
+ ExternalClearKeyTestHelper& operator=(const ExternalClearKeyTestHelper&) =
+ delete;
+
~ExternalClearKeyTestHelper();
std::string KeySystemName() { return "org.chromium.externalclearkey"; }
@@ -32,8 +37,6 @@ class ExternalClearKeyTestHelper {
// Keep a reference to the loaded library.
base::FilePath library_path_;
base::ScopedNativeLibrary library_;
-
- DISALLOW_COPY_AND_ASSIGN(ExternalClearKeyTestHelper);
};
} // namespace media
diff --git a/chromium/media/cdm/json_web_key.cc b/chromium/media/cdm/json_web_key.cc
index 0ee7e7828ad..fe30560e360 100644
--- a/chromium/media/cdm/json_web_key.cc
+++ b/chromium/media/cdm/json_web_key.cc
@@ -187,7 +187,7 @@ bool ExtractKeysFromJWKSet(const std::string& jwk_set,
// Create a local list of keys, so that |jwk_keys| only gets updated on
// success.
KeyIdAndKeyPairs local_keys;
- for (size_t i = 0; i < list_val->GetSize(); ++i) {
+ for (size_t i = 0; i < list_val->GetList().size(); ++i) {
base::DictionaryValue* jwk = NULL;
if (!list_val->GetDictionary(i, &jwk)) {
DVLOG(1) << "Unable to access '" << kKeysTag << "'[" << i
@@ -256,7 +256,7 @@ bool ExtractKeyIdsFromKeyIdsInitData(const std::string& input,
// Create a local list of key ids, so that |key_ids| only gets updated on
// success.
KeyIdList local_key_ids;
- for (size_t i = 0; i < list_val->GetSize(); ++i) {
+ for (size_t i = 0; i < list_val->GetList().size(); ++i) {
std::string encoded_key_id;
if (!list_val->GetString(i, &encoded_key_id)) {
error_message->assign("'");
@@ -402,7 +402,7 @@ bool ExtractFirstKeyIdFromLicenseRequest(const std::vector<uint8_t>& license,
}
// Get the first key.
- if (list_val->GetSize() < 1) {
+ if (list_val->GetList().size() < 1) {
DVLOG(1) << "Empty '" << kKeyIdsTag << "' list";
return false;
}
diff --git a/chromium/media/cdm/library_cdm/cdm_host_proxy_impl.h b/chromium/media/cdm/library_cdm/cdm_host_proxy_impl.h
index 728706ebe9a..1afb69cddda 100644
--- a/chromium/media/cdm/library_cdm/cdm_host_proxy_impl.h
+++ b/chromium/media/cdm/library_cdm/cdm_host_proxy_impl.h
@@ -17,6 +17,10 @@ template <typename HostInterface>
class CdmHostProxyImpl : public CdmHostProxy {
public:
explicit CdmHostProxyImpl(HostInterface* host) : host_(host) {}
+
+ CdmHostProxyImpl(const CdmHostProxyImpl&) = delete;
+ CdmHostProxyImpl& operator=(const CdmHostProxyImpl&) = delete;
+
~CdmHostProxyImpl() override {}
void OnInitialized(bool success) final {
@@ -115,8 +119,6 @@ class CdmHostProxyImpl : public CdmHostProxy {
private:
HostInterface* const host_ = nullptr;
-
- DISALLOW_COPY_AND_ASSIGN(CdmHostProxyImpl);
};
} // namespace media
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_file_adapter.h b/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_file_adapter.h
index 6e1391e7a96..1f73507099b 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_file_adapter.h
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_file_adapter.h
@@ -29,6 +29,10 @@ class CdmFileAdapter : public cdm::FileIOClient {
using WriteCB = base::OnceCallback<void(bool success)>;
explicit CdmFileAdapter(CdmHostProxy* cdm_host_proxy);
+
+ CdmFileAdapter(const CdmFileAdapter&) = delete;
+ CdmFileAdapter& operator=(const CdmFileAdapter&) = delete;
+
~CdmFileAdapter() override;
// Open the file with |name|. |open_cb| will be called when the file is
@@ -59,8 +63,6 @@ class CdmFileAdapter : public cdm::FileIOClient {
ReadCB read_cb_;
WriteCB write_cb_;
cdm::FileIO* file_io_ = nullptr;
-
- DISALLOW_COPY_AND_ASSIGN(CdmFileAdapter);
};
} // namespace media
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_file_io_test.h b/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_file_io_test.h
index c2eadf242b1..f5cda65f518 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_file_io_test.h
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_file_io_test.h
@@ -68,6 +68,10 @@ class FileIOTest : public cdm::FileIOClient {
FileIOTest(const CreateFileIOCB& create_file_io_cb,
const std::string& test_name);
+
+ FileIOTest(const FileIOTest&) = delete;
+ FileIOTest& operator=(const FileIOTest&) = delete;
+
~FileIOTest() override;
// Adds a test step in this test. |this| object doesn't take the ownership of
@@ -154,14 +158,16 @@ class FileIOTest : public cdm::FileIOClient {
// In the current implementation, all ACTION_* are performed on the latest
// opened cdm::FileIO object, hence the stack.
base::stack<cdm::FileIO*> file_io_stack_;
-
- DISALLOW_COPY_AND_ASSIGN(FileIOTest);
};
// Tests cdm::FileIO implementation.
class FileIOTestRunner {
public:
explicit FileIOTestRunner(const CreateFileIOCB& create_file_io_cb);
+
+ FileIOTestRunner(const FileIOTestRunner&) = delete;
+ FileIOTestRunner& operator=(const FileIOTestRunner&) = delete;
+
~FileIOTestRunner();
void AddTests();
@@ -180,8 +186,6 @@ class FileIOTestRunner {
std::vector<uint8_t> large_data_;
size_t total_num_tests_ = 0; // Total number of tests.
size_t num_passed_tests_ = 0; // Number of passed tests.
-
- DISALLOW_COPY_AND_ASSIGN(FileIOTestRunner);
};
} // namespace media
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_video_decoder.cc b/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_video_decoder.cc
index 9fd2daf5cb6..6f15f1ec922 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_video_decoder.cc
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_video_decoder.cc
@@ -15,6 +15,7 @@
#include "base/macros.h"
#include "base/memory/weak_ptr.h"
#include "base/no_destructor.h"
+#include "build/build_config.h"
#include "third_party/abseil-cpp/absl/types/optional.h"
// Necessary to convert async media::VideoDecoder to sync CdmVideoDecoder.
// Typically not recommended for production code, but is ok here since
@@ -149,8 +150,17 @@ void SetupGlobalEnvironmentIfNeeded() {
static base::NoDestructor<base::SingleThreadTaskExecutor> task_executor;
}
- if (!base::CommandLine::InitializedForCurrentProcess())
+ // Initialize CommandLine if not already initialized. Since this is a DLL,
+ // just use empty arguments.
+ if (!base::CommandLine::InitializedForCurrentProcess()) {
+#if defined(OS_WIN)
+ // Use InitUsingArgvForTesting() instead of Init() to avoid dependency on
+ // shell32 API which might not work in the sandbox. See crbug.com/1242710.
+ base::CommandLine::InitUsingArgvForTesting(0, nullptr);
+#else
base::CommandLine::Init(0, nullptr);
+#endif
+ }
}
// Adapts a media::VideoDecoder to a CdmVideoDecoder. Media VideoDecoders
@@ -170,6 +180,9 @@ class VideoDecoderAdapter final : public CdmVideoDecoder {
DCHECK(cdm_host_proxy_);
}
+ VideoDecoderAdapter(const VideoDecoderAdapter&) = delete;
+ VideoDecoderAdapter& operator=(const VideoDecoderAdapter&) = delete;
+
~VideoDecoderAdapter() final = default;
// CdmVideoDecoder implementation.
@@ -288,8 +301,6 @@ class VideoDecoderAdapter final : public CdmVideoDecoder {
VideoFrameQueue decoded_video_frames_;
base::WeakPtrFactory<VideoDecoderAdapter> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(VideoDecoderAdapter);
};
} // namespace
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.cc b/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.cc
index 4bbaa85997e..df837cf768c 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.cc
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.cc
@@ -91,8 +91,7 @@ static scoped_refptr<media::DecoderBuffer> CopyDecoderBufferFrom(
// TODO(xhwang): Get rid of this copy.
scoped_refptr<media::DecoderBuffer> output_buffer =
media::DecoderBuffer::CopyFrom(input_buffer.data, input_buffer.data_size);
- output_buffer->set_timestamp(
- base::TimeDelta::FromMicroseconds(input_buffer.timestamp));
+ output_buffer->set_timestamp(base::Microseconds(input_buffer.timestamp));
if (input_buffer.encryption_scheme == cdm::EncryptionScheme::kUnencrypted)
return output_buffer;
@@ -457,7 +456,7 @@ void ClearKeyCdm::OnUpdateSuccess(uint32_t promise_id,
if (!has_set_timer_) {
// Make sure the CDM can get time and sleep if necessary.
- constexpr auto kSleepDuration = base::TimeDelta::FromSeconds(1);
+ constexpr auto kSleepDuration = base::Seconds(1);
auto start_time = base::Time::Now();
base::PlatformThread::Sleep(kSleepDuration);
auto time_elapsed = base::Time::Now() - start_time;
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.h b/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.h
index b48293fce4d..4a70cc7c247 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.h
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.h
@@ -36,6 +36,10 @@ class ClearKeyCdm : public cdm::ContentDecryptionModule_10,
public:
template <typename HostInterface>
ClearKeyCdm(HostInterface* host, const std::string& key_system);
+
+ ClearKeyCdm(const ClearKeyCdm&) = delete;
+ ClearKeyCdm& operator=(const ClearKeyCdm&) = delete;
+
~ClearKeyCdm() override;
// cdm::ContentDecryptionModule_10 implementation.
@@ -183,8 +187,6 @@ class ClearKeyCdm : public cdm::ContentDecryptionModule_10,
bool is_running_output_protection_test_ = false;
bool is_running_platform_verification_test_ = false;
bool is_running_storage_id_test_ = false;
-
- DISALLOW_COPY_AND_ASSIGN(ClearKeyCdm);
};
} // namespace media
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_persistent_session_cdm.cc b/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_persistent_session_cdm.cc
index a6adbf0d0a0..5baec581809 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_persistent_session_cdm.cc
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_persistent_session_cdm.cc
@@ -31,6 +31,12 @@ class NewPersistentSessionCdmPromise : public NewSessionCdmPromise {
std::unique_ptr<NewSessionCdmPromise> promise)
: session_created_cb_(std::move(session_created_cb)),
promise_(std::move(promise)) {}
+
+ NewPersistentSessionCdmPromise(const NewPersistentSessionCdmPromise&) =
+ delete;
+ NewPersistentSessionCdmPromise& operator=(
+ const NewPersistentSessionCdmPromise&) = delete;
+
~NewPersistentSessionCdmPromise() override = default;
// NewSessionCdmPromise implementation.
@@ -50,8 +56,6 @@ class NewPersistentSessionCdmPromise : public NewSessionCdmPromise {
private:
SessionCreatedCB session_created_cb_;
std::unique_ptr<NewSessionCdmPromise> promise_;
-
- DISALLOW_COPY_AND_ASSIGN(NewPersistentSessionCdmPromise);
};
// When a session has been loaded, we need to call FinishUpdate() to complete
@@ -64,6 +68,10 @@ class FinishLoadCdmPromise : public SimpleCdmPromise {
FinishLoadCdmPromise(const std::string& session_id,
std::unique_ptr<NewSessionCdmPromise> promise)
: session_id_(session_id), promise_(std::move(promise)) {}
+
+ FinishLoadCdmPromise(const FinishLoadCdmPromise&) = delete;
+ FinishLoadCdmPromise& operator=(const FinishLoadCdmPromise&) = delete;
+
~FinishLoadCdmPromise() override = default;
// CdmSimplePromise implementation.
@@ -83,8 +91,6 @@ class FinishLoadCdmPromise : public SimpleCdmPromise {
private:
std::string session_id_;
std::unique_ptr<NewSessionCdmPromise> promise_;
-
- DISALLOW_COPY_AND_ASSIGN(FinishLoadCdmPromise);
};
} // namespace
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/ffmpeg_cdm_audio_decoder.cc b/chromium/media/cdm/library_cdm/clear_key_cdm/ffmpeg_cdm_audio_decoder.cc
index e0a868fe64d..8a78682b70a 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/ffmpeg_cdm_audio_decoder.cc
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/ffmpeg_cdm_audio_decoder.cc
@@ -217,8 +217,7 @@ cdm::Status FFmpegCdmAudioDecoder::DecodeBuffer(
cdm::AudioFrames* decoded_frames) {
DVLOG(1) << "DecodeBuffer()";
const bool is_end_of_stream = !compressed_buffer;
- base::TimeDelta timestamp =
- base::TimeDelta::FromMicroseconds(input_timestamp);
+ base::TimeDelta timestamp = base::Microseconds(input_timestamp);
if (!is_end_of_stream && timestamp != kNoTimestamp) {
if (last_input_timestamp_ != kNoTimestamp &&
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/ffmpeg_cdm_audio_decoder.h b/chromium/media/cdm/library_cdm/clear_key_cdm/ffmpeg_cdm_audio_decoder.h
index 52e3c5308cd..a7544fdd5e1 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/ffmpeg_cdm_audio_decoder.h
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/ffmpeg_cdm_audio_decoder.h
@@ -33,6 +33,10 @@ class FFmpegDecodingLoop;
class FFmpegCdmAudioDecoder {
public:
explicit FFmpegCdmAudioDecoder(CdmHostProxy* cdm_host_proxy);
+
+ FFmpegCdmAudioDecoder(const FFmpegCdmAudioDecoder&) = delete;
+ FFmpegCdmAudioDecoder& operator=(const FFmpegCdmAudioDecoder&) = delete;
+
~FFmpegCdmAudioDecoder();
bool Initialize(const cdm::AudioDecoderConfig_2& config);
void Deinitialize();
@@ -78,8 +82,6 @@ class FFmpegCdmAudioDecoder {
std::unique_ptr<AudioTimestampHelper> output_timestamp_helper_;
int bytes_per_frame_ = 0;
base::TimeDelta last_input_timestamp_ = kNoTimestamp;
-
- DISALLOW_COPY_AND_ASSIGN(FFmpegCdmAudioDecoder);
};
} // namespace media
diff --git a/chromium/media/cdm/media_foundation_cdm_data.cc b/chromium/media/cdm/media_foundation_cdm_data.cc
new file mode 100644
index 00000000000..0d21a94b20e
--- /dev/null
+++ b/chromium/media/cdm/media_foundation_cdm_data.cc
@@ -0,0 +1,21 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/cdm/media_foundation_cdm_data.h"
+
+namespace media {
+
+MediaFoundationCdmData::MediaFoundationCdmData() = default;
+
+MediaFoundationCdmData::MediaFoundationCdmData(
+ const base::UnguessableToken& origin_id,
+ const absl::optional<std::vector<uint8_t>>& client_token,
+ const base::FilePath& cdm_store_path_root)
+ : origin_id(origin_id),
+ client_token(client_token),
+ cdm_store_path_root(cdm_store_path_root) {}
+
+MediaFoundationCdmData::~MediaFoundationCdmData() = default;
+
+} // namespace media
diff --git a/chromium/media/cdm/media_foundation_cdm_data.h b/chromium/media/cdm/media_foundation_cdm_data.h
new file mode 100644
index 00000000000..b0967cd6f99
--- /dev/null
+++ b/chromium/media/cdm/media_foundation_cdm_data.h
@@ -0,0 +1,35 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CDM_MEDIA_FOUNDATION_CDM_DATA_H_
+#define MEDIA_CDM_MEDIA_FOUNDATION_CDM_DATA_H_
+
+#include <vector>
+
+#include "base/files/file_path.h"
+#include "base/unguessable_token.h"
+#include "media/base/media_export.h"
+#include "third_party/abseil-cpp/absl/types/optional.h"
+
+namespace media {
+struct MEDIA_EXPORT MediaFoundationCdmData {
+ MediaFoundationCdmData();
+ MediaFoundationCdmData(
+ const base::UnguessableToken& origin_id,
+ const absl::optional<std::vector<uint8_t>>& client_token,
+ const base::FilePath& cdm_store_path_root);
+
+ MediaFoundationCdmData(const MediaFoundationCdmData& other) = delete;
+ MediaFoundationCdmData& operator=(const MediaFoundationCdmData& other) =
+ delete;
+
+ ~MediaFoundationCdmData();
+
+ base::UnguessableToken origin_id;
+ absl::optional<std::vector<uint8_t>> client_token;
+ base::FilePath cdm_store_path_root;
+};
+} // namespace media
+
+#endif // MEDIA_CDM_MEDIA_FOUNDATION_CDM_DATA_H_
diff --git a/chromium/media/cdm/mock_helpers.h b/chromium/media/cdm/mock_helpers.h
index 8ae30c94bee..9998a0c4e6b 100644
--- a/chromium/media/cdm/mock_helpers.h
+++ b/chromium/media/cdm/mock_helpers.h
@@ -24,6 +24,10 @@ class MockCdmAuxiliaryHelper : public CdmAuxiliaryHelper {
public:
// `allocator` is optional; can be null if no need to create buffers/frames.
explicit MockCdmAuxiliaryHelper(std::unique_ptr<CdmAllocator> allocator);
+
+ MockCdmAuxiliaryHelper(const MockCdmAuxiliaryHelper&) = delete;
+ MockCdmAuxiliaryHelper& operator=(const MockCdmAuxiliaryHelper&) = delete;
+
~MockCdmAuxiliaryHelper() override;
// CdmAuxiliaryHelper implementation.
@@ -54,15 +58,13 @@ class MockCdmAuxiliaryHelper : public CdmAuxiliaryHelper {
#if defined(OS_WIN)
MOCK_METHOD(void,
- GetCdmPreferenceData,
- (GetCdmPreferenceDataCB callback),
+ GetMediaFoundationCdmData,
+ (GetMediaFoundationCdmDataCB callback),
(override));
#endif // defined(OS_WIN)
private:
std::unique_ptr<CdmAllocator> allocator_;
-
- DISALLOW_COPY_AND_ASSIGN(MockCdmAuxiliaryHelper);
};
} // namespace media
diff --git a/chromium/media/cdm/output_protection.h b/chromium/media/cdm/output_protection.h
index d2bdce3a0b7..ac6fc9651b8 100644
--- a/chromium/media/cdm/output_protection.h
+++ b/chromium/media/cdm/output_protection.h
@@ -16,6 +16,10 @@ namespace media {
class MEDIA_EXPORT OutputProtection {
public:
OutputProtection() = default;
+
+ OutputProtection(const OutputProtection&) = delete;
+ OutputProtection& operator=(const OutputProtection&) = delete;
+
virtual ~OutputProtection() = default;
using QueryStatusCB = base::OnceCallback<
@@ -65,9 +69,6 @@ class MEDIA_EXPORT OutputProtection {
// call QueryStatus().
virtual void EnableProtection(uint32_t desired_protection_mask,
EnableProtectionCB callback) = 0;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(OutputProtection);
};
} // namespace media
diff --git a/chromium/media/cdm/simple_cdm_allocator.cc b/chromium/media/cdm/simple_cdm_allocator.cc
index 6d0f9a3999d..854ad8cf7ea 100644
--- a/chromium/media/cdm/simple_cdm_allocator.cc
+++ b/chromium/media/cdm/simple_cdm_allocator.cc
@@ -20,6 +20,10 @@ namespace {
class SimpleCdmVideoFrame final : public VideoFrameImpl {
public:
SimpleCdmVideoFrame() = default;
+
+ SimpleCdmVideoFrame(const SimpleCdmVideoFrame&) = delete;
+ SimpleCdmVideoFrame& operator=(const SimpleCdmVideoFrame&) = delete;
+
~SimpleCdmVideoFrame() override = default;
// VideoFrameImpl implementation.
@@ -36,7 +40,7 @@ class SimpleCdmVideoFrame final : public VideoFrameImpl {
buffer->Data() + PlaneOffset(cdm::kYPlane),
buffer->Data() + PlaneOffset(cdm::kUPlane),
buffer->Data() + PlaneOffset(cdm::kVPlane),
- base::TimeDelta::FromMicroseconds(Timestamp()));
+ base::Microseconds(Timestamp()));
frame->set_color_space(MediaColorSpace().ToGfxColorSpace());
@@ -49,9 +53,6 @@ class SimpleCdmVideoFrame final : public VideoFrameImpl {
SetFrameBuffer(nullptr);
return frame;
}
-
- private:
- DISALLOW_COPY_AND_ASSIGN(SimpleCdmVideoFrame);
};
} // namespace
diff --git a/chromium/media/cdm/simple_cdm_allocator.h b/chromium/media/cdm/simple_cdm_allocator.h
index 4cdcc0f4997..1ae9e82c039 100644
--- a/chromium/media/cdm/simple_cdm_allocator.h
+++ b/chromium/media/cdm/simple_cdm_allocator.h
@@ -18,14 +18,15 @@ namespace media {
class SimpleCdmAllocator final : public CdmAllocator {
public:
SimpleCdmAllocator();
+
+ SimpleCdmAllocator(const SimpleCdmAllocator&) = delete;
+ SimpleCdmAllocator& operator=(const SimpleCdmAllocator&) = delete;
+
~SimpleCdmAllocator() override;
// CdmAllocator implementation.
cdm::Buffer* CreateCdmBuffer(size_t capacity) override;
std::unique_ptr<VideoFrameImpl> CreateCdmVideoFrame() override;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(SimpleCdmAllocator);
};
} // namespace media
diff --git a/chromium/media/cdm/simple_cdm_allocator_unittest.cc b/chromium/media/cdm/simple_cdm_allocator_unittest.cc
index 69552275955..3158b748dd7 100644
--- a/chromium/media/cdm/simple_cdm_allocator_unittest.cc
+++ b/chromium/media/cdm/simple_cdm_allocator_unittest.cc
@@ -49,13 +49,14 @@ class TestCdmBuffer final : public cdm::Buffer {
class SimpleCdmAllocatorTest : public testing::Test {
public:
SimpleCdmAllocatorTest() = default;
+
+ SimpleCdmAllocatorTest(const SimpleCdmAllocatorTest&) = delete;
+ SimpleCdmAllocatorTest& operator=(const SimpleCdmAllocatorTest&) = delete;
+
~SimpleCdmAllocatorTest() override = default;
protected:
SimpleCdmAllocator allocator_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(SimpleCdmAllocatorTest);
};
TEST_F(SimpleCdmAllocatorTest, CreateCdmBuffer) {
diff --git a/chromium/media/cdm/supported_audio_codecs.cc b/chromium/media/cdm/supported_audio_codecs.cc
index 1f416ac1f6e..31618890967 100644
--- a/chromium/media/cdm/supported_audio_codecs.cc
+++ b/chromium/media/cdm/supported_audio_codecs.cc
@@ -10,9 +10,9 @@ namespace media {
const std::vector<AudioCodec> GetCdmSupportedAudioCodecs() {
return {
- AudioCodec::kCodecOpus, AudioCodec::kCodecVorbis, AudioCodec::kCodecFLAC,
+ AudioCodec::kOpus, AudioCodec::kVorbis, AudioCodec::kFLAC,
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- AudioCodec::kCodecAAC,
+ AudioCodec::kAAC,
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
};
}
diff --git a/chromium/media/cdm/win/media_foundation_cdm_factory.cc b/chromium/media/cdm/win/media_foundation_cdm_factory.cc
index dc678ec8542..8b3aaa30b18 100644
--- a/chromium/media/cdm/win/media_foundation_cdm_factory.cc
+++ b/chromium/media/cdm/win/media_foundation_cdm_factory.cc
@@ -179,10 +179,8 @@ crash_reporter::CrashKeyString<256> g_origin_crash_key("cdm-origin");
} // namespace
MediaFoundationCdmFactory::MediaFoundationCdmFactory(
- std::unique_ptr<CdmAuxiliaryHelper> helper,
- const base::FilePath& user_data_dir)
+ std::unique_ptr<CdmAuxiliaryHelper> helper)
: helper_(std::move(helper)),
- user_data_dir_(user_data_dir),
cdm_origin_crash_key_(&g_origin_crash_key,
helper_->GetCdmOrigin().Serialize()) {}
@@ -213,7 +211,7 @@ void MediaFoundationCdmFactory::Create(
DCHECK(cdm_config.allow_distinctive_identifier);
// Don't cache `cdm_origin_id` in this class since user can clear it any time.
- helper_->GetCdmPreferenceData(base::BindOnce(
+ helper_->GetMediaFoundationCdmData(base::BindOnce(
&MediaFoundationCdmFactory::OnCdmOriginIdObtained,
weak_factory_.GetWeakPtr(), key_system, cdm_config, session_message_cb,
session_closed_cb, session_keys_change_cb, session_expiration_update_cb,
@@ -228,14 +226,14 @@ void MediaFoundationCdmFactory::OnCdmOriginIdObtained(
const SessionKeysChangeCB& session_keys_change_cb,
const SessionExpirationUpdateCB& session_expiration_update_cb,
CdmCreatedCB cdm_created_cb,
- const std::unique_ptr<CdmPreferenceData> cdm_preference_data) {
- if (!cdm_preference_data) {
+ const std::unique_ptr<MediaFoundationCdmData> media_foundation_cdm_data) {
+ if (!media_foundation_cdm_data) {
std::move(cdm_created_cb)
.Run(nullptr, "Failed to get the CDM preference data.");
return;
}
- if (cdm_preference_data->origin_id.is_empty()) {
+ if (media_foundation_cdm_data->origin_id.is_empty()) {
std::move(cdm_created_cb).Run(nullptr, "Failed to get the CDM origin ID.");
return;
}
@@ -243,8 +241,9 @@ void MediaFoundationCdmFactory::OnCdmOriginIdObtained(
auto cdm = base::MakeRefCounted<MediaFoundationCdm>(
base::BindRepeating(&MediaFoundationCdmFactory::CreateMfCdm,
weak_factory_.GetWeakPtr(), key_system, cdm_config,
- cdm_preference_data->origin_id,
- cdm_preference_data->client_token),
+ media_foundation_cdm_data->origin_id,
+ media_foundation_cdm_data->client_token,
+ media_foundation_cdm_data->cdm_store_path_root),
base::BindRepeating(&MediaFoundationCdmFactory::IsTypeSupported,
weak_factory_.GetWeakPtr(), key_system),
base::BindRepeating(&MediaFoundationCdmFactory::StoreClientToken,
@@ -314,6 +313,7 @@ HRESULT MediaFoundationCdmFactory::CreateMfCdmInternal(
const CdmConfig& cdm_config,
const base::UnguessableToken& cdm_origin_id,
const absl::optional<std::vector<uint8_t>>& cdm_client_token,
+ const base::FilePath& cdm_store_path_root,
ComPtr<IMFContentDecryptionModule>& mf_cdm) {
ComPtr<IMFContentDecryptionModuleFactory> cdm_factory;
RETURN_IF_FAILED(GetCdmFactory(key_system, cdm_factory));
@@ -336,7 +336,8 @@ HRESULT MediaFoundationCdmFactory::CreateMfCdmInternal(
&cdm_access));
// Provide a per-user, per-arch, per-origin and per-key-system path.
- auto store_path = GetCdmStorePath(user_data_dir_, cdm_origin_id, key_system);
+ auto store_path =
+ GetCdmStorePath(cdm_store_path_root, cdm_origin_id, key_system);
DVLOG(1) << "store_path=" << store_path;
// Ensure the path exists. If it already exists, this call will do nothing.
@@ -362,10 +363,11 @@ void MediaFoundationCdmFactory::CreateMfCdm(
const CdmConfig& cdm_config,
const base::UnguessableToken& cdm_origin_id,
const absl::optional<std::vector<uint8_t>>& cdm_client_token,
+ const base::FilePath& cdm_store_path_root,
HRESULT& hresult,
Microsoft::WRL::ComPtr<IMFContentDecryptionModule>& mf_cdm) {
hresult = CreateMfCdmInternal(key_system, cdm_config, cdm_origin_id,
- cdm_client_token, mf_cdm);
+ cdm_client_token, cdm_store_path_root, mf_cdm);
}
} // namespace media
diff --git a/chromium/media/cdm/win/media_foundation_cdm_factory.h b/chromium/media/cdm/win/media_foundation_cdm_factory.h
index d9eb8132faf..db1bdf6732a 100644
--- a/chromium/media/cdm/win/media_foundation_cdm_factory.h
+++ b/chromium/media/cdm/win/media_foundation_cdm_factory.h
@@ -26,8 +26,7 @@ namespace media {
class MEDIA_EXPORT MediaFoundationCdmFactory final : public CdmFactory {
public:
- MediaFoundationCdmFactory(std::unique_ptr<CdmAuxiliaryHelper> helper,
- const base::FilePath& user_data_dir);
+ MediaFoundationCdmFactory(std::unique_ptr<CdmAuxiliaryHelper> helper);
MediaFoundationCdmFactory(const MediaFoundationCdmFactory&) = delete;
MediaFoundationCdmFactory& operator=(const MediaFoundationCdmFactory&) =
delete;
@@ -62,7 +61,7 @@ class MEDIA_EXPORT MediaFoundationCdmFactory final : public CdmFactory {
const SessionKeysChangeCB& session_keys_change_cb,
const SessionExpirationUpdateCB& session_expiration_update_cb,
CdmCreatedCB cdm_created_cb,
- const std::unique_ptr<CdmPreferenceData> cdm_preference_data);
+ const std::unique_ptr<MediaFoundationCdmData> media_foundation_cdm_data);
HRESULT GetCdmFactory(
const std::string& key_system,
@@ -79,6 +78,7 @@ class MEDIA_EXPORT MediaFoundationCdmFactory final : public CdmFactory {
const CdmConfig& cdm_config,
const base::UnguessableToken& cdm_origin_id,
const absl::optional<std::vector<uint8_t>>& cdm_client_token,
+ const base::FilePath& cdm_store_path_root,
Microsoft::WRL::ComPtr<IMFContentDecryptionModule>& mf_cdm);
// Same as `CreateMfCdmInternal()`, but returns the HRESULT in out parameter
@@ -87,11 +87,11 @@ class MEDIA_EXPORT MediaFoundationCdmFactory final : public CdmFactory {
const CdmConfig& cdm_config,
const base::UnguessableToken& cdm_origin_id,
const absl::optional<std::vector<uint8_t>>& cdm_client_token,
+ const base::FilePath& cdm_store_path_root,
HRESULT& hresult,
Microsoft::WRL::ComPtr<IMFContentDecryptionModule>& mf_cdm);
std::unique_ptr<CdmAuxiliaryHelper> helper_;
- base::FilePath user_data_dir_;
// CDM origin crash key used in crash reporting.
crash_reporter::ScopedCrashKeyString cdm_origin_crash_key_;
diff --git a/chromium/media/cdm/win/media_foundation_cdm_factory_unittest.cc b/chromium/media/cdm/win/media_foundation_cdm_factory_unittest.cc
index 6dbde8dd24d..bed0918df0c 100644
--- a/chromium/media/cdm/win/media_foundation_cdm_factory_unittest.cc
+++ b/chromium/media/cdm/win/media_foundation_cdm_factory_unittest.cc
@@ -44,8 +44,8 @@ class MediaFoundationCdmFactoryTest : public testing::Test {
auto cdm_helper =
std::make_unique<StrictMock<MockCdmAuxiliaryHelper>>(nullptr);
cdm_helper_ = cdm_helper.get();
- cdm_factory_ = std::make_unique<MediaFoundationCdmFactory>(
- std::move(cdm_helper), base::FilePath());
+ cdm_factory_ =
+ std::make_unique<MediaFoundationCdmFactory>(std::move(cdm_helper));
}
~MediaFoundationCdmFactoryTest() override = default;
@@ -102,9 +102,9 @@ TEST_F(MediaFoundationCdmFactoryTest, Create) {
COM_EXPECT_CALL(mf_cdm_factory_, CreateContentDecryptionModuleAccess(
NotNull(), NotNull(), _, _))
.WillOnce(DoAll(SetComPointee<3>(mf_cdm_access_.Get()), Return(S_OK)));
- EXPECT_CALL(*cdm_helper_, GetCdmPreferenceData(_))
- .WillOnce(RunOnceCallback<0>(std::make_unique<CdmPreferenceData>(
- base::UnguessableToken::Create(), absl::nullopt)));
+ EXPECT_CALL(*cdm_helper_, GetMediaFoundationCdmData(_))
+ .WillOnce(RunOnceCallback<0>(std::make_unique<MediaFoundationCdmData>(
+ base::UnguessableToken::Create(), absl::nullopt, base::FilePath())));
COM_EXPECT_CALL(mf_cdm_access_, CreateContentDecryptionModule(NotNull(), _))
.WillOnce(DoAll(SetComPointee<1>(mf_cdm_.Get()), Return(S_OK)));
@@ -115,9 +115,9 @@ TEST_F(MediaFoundationCdmFactoryTest, Create) {
TEST_F(MediaFoundationCdmFactoryTest, CreateCdmFactoryFail) {
SetCreateCdmFactoryCallbackForTesting(/*expect_success=*/false);
- EXPECT_CALL(*cdm_helper_, GetCdmPreferenceData(_))
- .WillOnce(RunOnceCallback<0>(std::make_unique<CdmPreferenceData>(
- base::UnguessableToken::Create(), absl::nullopt)));
+ EXPECT_CALL(*cdm_helper_, GetMediaFoundationCdmData(_))
+ .WillOnce(RunOnceCallback<0>(std::make_unique<MediaFoundationCdmData>(
+ base::UnguessableToken::Create(), absl::nullopt, base::FilePath())));
EXPECT_CALL(cdm_created_cb_, Run(IsNull(), _));
Create();
@@ -128,9 +128,9 @@ TEST_F(MediaFoundationCdmFactoryTest, IsTypeSupportedFail) {
COM_EXPECT_CALL(mf_cdm_factory_, IsTypeSupported(NotNull(), IsNull()))
.WillOnce(Return(FALSE));
- EXPECT_CALL(*cdm_helper_, GetCdmPreferenceData(_))
- .WillOnce(RunOnceCallback<0>(std::make_unique<CdmPreferenceData>(
- base::UnguessableToken::Create(), absl::nullopt)));
+ EXPECT_CALL(*cdm_helper_, GetMediaFoundationCdmData(_))
+ .WillOnce(RunOnceCallback<0>(std::make_unique<MediaFoundationCdmData>(
+ base::UnguessableToken::Create(), absl::nullopt, base::FilePath())));
EXPECT_CALL(cdm_created_cb_, Run(IsNull(), _));
Create();
@@ -144,9 +144,9 @@ TEST_F(MediaFoundationCdmFactoryTest, CreateCdmAccessFail) {
COM_EXPECT_CALL(mf_cdm_factory_, CreateContentDecryptionModuleAccess(
NotNull(), NotNull(), _, _))
.WillOnce(Return(E_FAIL));
- EXPECT_CALL(*cdm_helper_, GetCdmPreferenceData(_))
- .WillOnce(RunOnceCallback<0>(std::make_unique<CdmPreferenceData>(
- base::UnguessableToken::Create(), absl::nullopt)));
+ EXPECT_CALL(*cdm_helper_, GetMediaFoundationCdmData(_))
+ .WillOnce(RunOnceCallback<0>(std::make_unique<MediaFoundationCdmData>(
+ base::UnguessableToken::Create(), absl::nullopt, base::FilePath())));
EXPECT_CALL(cdm_created_cb_, Run(IsNull(), _));
Create();
@@ -155,9 +155,9 @@ TEST_F(MediaFoundationCdmFactoryTest, CreateCdmAccessFail) {
TEST_F(MediaFoundationCdmFactoryTest, NullCdmOriginIdFail) {
SetCreateCdmFactoryCallbackForTesting(/*expect_success=*/true);
- EXPECT_CALL(*cdm_helper_, GetCdmPreferenceData(_))
- .WillOnce(RunOnceCallback<0>(std::make_unique<CdmPreferenceData>(
- base::UnguessableToken::Null(), absl::nullopt)));
+ EXPECT_CALL(*cdm_helper_, GetMediaFoundationCdmData(_))
+ .WillOnce(RunOnceCallback<0>(std::make_unique<MediaFoundationCdmData>(
+ base::UnguessableToken::Null(), absl::nullopt, base::FilePath())));
EXPECT_CALL(cdm_created_cb_, Run(IsNull(), _));
Create();
@@ -171,9 +171,9 @@ TEST_F(MediaFoundationCdmFactoryTest, CreateCdmFail) {
COM_EXPECT_CALL(mf_cdm_factory_, CreateContentDecryptionModuleAccess(
NotNull(), NotNull(), _, _))
.WillOnce(DoAll(SetComPointee<3>(mf_cdm_access_.Get()), Return(S_OK)));
- EXPECT_CALL(*cdm_helper_, GetCdmPreferenceData(_))
- .WillOnce(RunOnceCallback<0>(std::make_unique<CdmPreferenceData>(
- base::UnguessableToken::Create(), absl::nullopt)));
+ EXPECT_CALL(*cdm_helper_, GetMediaFoundationCdmData(_))
+ .WillOnce(RunOnceCallback<0>(std::make_unique<MediaFoundationCdmData>(
+ base::UnguessableToken::Create(), absl::nullopt, base::FilePath())));
COM_EXPECT_CALL(mf_cdm_access_, CreateContentDecryptionModule(NotNull(), _))
.WillOnce(DoAll(SetComPointee<1>(mf_cdm_.Get()), Return(E_FAIL)));
diff --git a/chromium/media/device_monitors/device_monitor_mac.h b/chromium/media/device_monitors/device_monitor_mac.h
index 7a8c43ca8d0..619cc03e963 100644
--- a/chromium/media/device_monitors/device_monitor_mac.h
+++ b/chromium/media/device_monitors/device_monitor_mac.h
@@ -28,6 +28,10 @@ class MEDIA_EXPORT DeviceMonitorMac {
// enumeration will occur.
explicit DeviceMonitorMac(
scoped_refptr<base::SingleThreadTaskRunner> device_task_runner);
+
+ DeviceMonitorMac(const DeviceMonitorMac&) = delete;
+ DeviceMonitorMac& operator=(const DeviceMonitorMac&) = delete;
+
~DeviceMonitorMac();
// Registers the observers for the video device removal, connection and
@@ -47,8 +51,6 @@ class MEDIA_EXPORT DeviceMonitorMac {
// |thread_checker_| is used to check that constructor and StartMonitoring()
// are called in the correct thread, the UI thread, that also owns the object.
base::ThreadChecker thread_checker_;
-
- DISALLOW_COPY_AND_ASSIGN(DeviceMonitorMac);
};
} // namespace media
diff --git a/chromium/media/device_monitors/device_monitor_mac.mm b/chromium/media/device_monitors/device_monitor_mac.mm
index 91df5c668ec..8616895c674 100644
--- a/chromium/media/device_monitors/device_monitor_mac.mm
+++ b/chromium/media/device_monitors/device_monitor_mac.mm
@@ -56,6 +56,10 @@ class DeviceMonitorMacImpl {
// devices were added nor removed and not notifying the |monitor_|.
cached_devices_.push_back(DeviceInfo("invalid", DeviceInfo::kInvalid));
}
+
+ DeviceMonitorMacImpl(const DeviceMonitorMacImpl&) = delete;
+ DeviceMonitorMacImpl& operator=(const DeviceMonitorMacImpl&) = delete;
+
virtual ~DeviceMonitorMacImpl() {}
virtual void OnDeviceChanged() = 0;
@@ -74,9 +78,6 @@ class DeviceMonitorMacImpl {
// Handles to NSNotificationCenter block observers.
id device_arrival_;
id device_removal_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(DeviceMonitorMacImpl);
};
void DeviceMonitorMacImpl::ConsolidateDevicesListAndNotify(
@@ -290,6 +291,10 @@ class AVFoundationMonitorImpl : public DeviceMonitorMacImpl {
AVFoundationMonitorImpl(
media::DeviceMonitorMac* monitor,
const scoped_refptr<base::SingleThreadTaskRunner>& device_task_runner);
+
+ AVFoundationMonitorImpl(const AVFoundationMonitorImpl&) = delete;
+ AVFoundationMonitorImpl& operator=(const AVFoundationMonitorImpl&) = delete;
+
~AVFoundationMonitorImpl() override;
void OnDeviceChanged() override;
@@ -303,8 +308,6 @@ class AVFoundationMonitorImpl : public DeviceMonitorMacImpl {
base::ThreadChecker main_thread_checker_;
scoped_refptr<SuspendObserverDelegate> suspend_observer_delegate_;
-
- DISALLOW_COPY_AND_ASSIGN(AVFoundationMonitorImpl);
};
AVFoundationMonitorImpl::AVFoundationMonitorImpl(
diff --git a/chromium/media/device_monitors/device_monitor_udev.cc b/chromium/media/device_monitors/device_monitor_udev.cc
index 7a75d47d11a..dd542dced57 100644
--- a/chromium/media/device_monitors/device_monitor_udev.cc
+++ b/chromium/media/device_monitors/device_monitor_udev.cc
@@ -45,6 +45,10 @@ class DeviceMonitorLinux::BlockingTaskRunnerHelper
: public device::UdevWatcher::Observer {
public:
BlockingTaskRunnerHelper();
+
+ BlockingTaskRunnerHelper(const BlockingTaskRunnerHelper&) = delete;
+ BlockingTaskRunnerHelper& operator=(const BlockingTaskRunnerHelper&) = delete;
+
~BlockingTaskRunnerHelper() override = default;
void Initialize();
@@ -60,8 +64,6 @@ class DeviceMonitorLinux::BlockingTaskRunnerHelper
std::unique_ptr<device::UdevWatcher> udev_watcher_;
SEQUENCE_CHECKER(sequence_checker_);
-
- DISALLOW_COPY_AND_ASSIGN(BlockingTaskRunnerHelper);
};
DeviceMonitorLinux::BlockingTaskRunnerHelper::BlockingTaskRunnerHelper() {
diff --git a/chromium/media/device_monitors/device_monitor_udev.h b/chromium/media/device_monitors/device_monitor_udev.h
index 270bbe7fdf3..85af4e8acbf 100644
--- a/chromium/media/device_monitors/device_monitor_udev.h
+++ b/chromium/media/device_monitors/device_monitor_udev.h
@@ -19,6 +19,10 @@ namespace media {
class MEDIA_EXPORT DeviceMonitorLinux {
public:
DeviceMonitorLinux();
+
+ DeviceMonitorLinux(const DeviceMonitorLinux&) = delete;
+ DeviceMonitorLinux& operator=(const DeviceMonitorLinux&) = delete;
+
~DeviceMonitorLinux();
// TODO(mcasas): Consider adding a StartMonitoring() method like
@@ -34,8 +38,6 @@ class MEDIA_EXPORT DeviceMonitorLinux {
// |blocking_task_runner_|.
std::unique_ptr<BlockingTaskRunnerHelper, base::OnTaskRunnerDeleter>
blocking_task_helper_;
-
- DISALLOW_COPY_AND_ASSIGN(DeviceMonitorLinux);
};
} // namespace media
diff --git a/chromium/media/device_monitors/system_message_window_win.h b/chromium/media/device_monitors/system_message_window_win.h
index a52230448b8..f1563ede94b 100644
--- a/chromium/media/device_monitors/system_message_window_win.h
+++ b/chromium/media/device_monitors/system_message_window_win.h
@@ -18,6 +18,9 @@ class MEDIA_EXPORT SystemMessageWindowWin {
public:
SystemMessageWindowWin();
+ SystemMessageWindowWin(const SystemMessageWindowWin&) = delete;
+ SystemMessageWindowWin& operator=(const SystemMessageWindowWin&) = delete;
+
virtual ~SystemMessageWindowWin();
virtual LRESULT OnDeviceChange(UINT event_type, LPARAM data);
@@ -45,8 +48,6 @@ class MEDIA_EXPORT SystemMessageWindowWin {
HWND window_;
class DeviceNotifications;
std::unique_ptr<DeviceNotifications> device_notifications_;
-
- DISALLOW_COPY_AND_ASSIGN(SystemMessageWindowWin);
};
} // namespace media
diff --git a/chromium/media/ffmpeg/ffmpeg_common.cc b/chromium/media/ffmpeg/ffmpeg_common.cc
index faa8de7e406..0e1bf872445 100644
--- a/chromium/media/ffmpeg/ffmpeg_common.cc
+++ b/chromium/media/ffmpeg/ffmpeg_common.cc
@@ -73,7 +73,7 @@ static const AVRational kMicrosBase = { 1, base::Time::kMicrosecondsPerSecond };
base::TimeDelta ConvertFromTimeBase(const AVRational& time_base,
int64_t timestamp) {
int64_t microseconds = av_rescale_q(timestamp, time_base, kMicrosBase);
- return base::TimeDelta::FromMicroseconds(microseconds);
+ return base::Microseconds(microseconds);
}
int64_t ConvertToTimeBase(const AVRational& time_base,
@@ -84,63 +84,63 @@ int64_t ConvertToTimeBase(const AVRational& time_base,
AudioCodec CodecIDToAudioCodec(AVCodecID codec_id) {
switch (codec_id) {
case AV_CODEC_ID_AAC:
- return kCodecAAC;
+ return AudioCodec::kAAC;
#if BUILDFLAG(ENABLE_PLATFORM_AC3_EAC3_AUDIO)
case AV_CODEC_ID_AC3:
- return kCodecAC3;
+ return AudioCodec::kAC3;
case AV_CODEC_ID_EAC3:
- return kCodecEAC3;
+ return AudioCodec::kEAC3;
#endif
case AV_CODEC_ID_MP3:
- return kCodecMP3;
+ return AudioCodec::kMP3;
case AV_CODEC_ID_VORBIS:
- return kCodecVorbis;
+ return AudioCodec::kVorbis;
case AV_CODEC_ID_PCM_U8:
case AV_CODEC_ID_PCM_S16LE:
case AV_CODEC_ID_PCM_S24LE:
case AV_CODEC_ID_PCM_S32LE:
case AV_CODEC_ID_PCM_F32LE:
- return kCodecPCM;
+ return AudioCodec::kPCM;
case AV_CODEC_ID_PCM_S16BE:
- return kCodecPCM_S16BE;
+ return AudioCodec::kPCM_S16BE;
case AV_CODEC_ID_PCM_S24BE:
- return kCodecPCM_S24BE;
+ return AudioCodec::kPCM_S24BE;
case AV_CODEC_ID_FLAC:
- return kCodecFLAC;
+ return AudioCodec::kFLAC;
case AV_CODEC_ID_AMR_NB:
- return kCodecAMR_NB;
+ return AudioCodec::kAMR_NB;
case AV_CODEC_ID_AMR_WB:
- return kCodecAMR_WB;
+ return AudioCodec::kAMR_WB;
case AV_CODEC_ID_GSM_MS:
- return kCodecGSM_MS;
+ return AudioCodec::kGSM_MS;
case AV_CODEC_ID_PCM_ALAW:
- return kCodecPCM_ALAW;
+ return AudioCodec::kPCM_ALAW;
case AV_CODEC_ID_PCM_MULAW:
- return kCodecPCM_MULAW;
+ return AudioCodec::kPCM_MULAW;
case AV_CODEC_ID_OPUS:
- return kCodecOpus;
+ return AudioCodec::kOpus;
case AV_CODEC_ID_ALAC:
- return kCodecALAC;
+ return AudioCodec::kALAC;
#if BUILDFLAG(ENABLE_PLATFORM_MPEG_H_AUDIO)
case AV_CODEC_ID_MPEGH_3D_AUDIO:
- return kCodecMpegHAudio;
+ return AudioCodec::kMpegHAudio;
#endif
default:
DVLOG(1) << "Unknown audio CodecID: " << codec_id;
}
- return kUnknownAudioCodec;
+ return AudioCodec::kUnknown;
}
AVCodecID AudioCodecToCodecID(AudioCodec audio_codec,
SampleFormat sample_format) {
switch (audio_codec) {
- case kCodecAAC:
+ case AudioCodec::kAAC:
return AV_CODEC_ID_AAC;
- case kCodecALAC:
+ case AudioCodec::kALAC:
return AV_CODEC_ID_ALAC;
- case kCodecMP3:
+ case AudioCodec::kMP3:
return AV_CODEC_ID_MP3;
- case kCodecPCM:
+ case AudioCodec::kPCM:
switch (sample_format) {
case kSampleFormatU8:
return AV_CODEC_ID_PCM_U8;
@@ -156,28 +156,28 @@ AVCodecID AudioCodecToCodecID(AudioCodec audio_codec,
DVLOG(1) << "Unsupported sample format: " << sample_format;
}
break;
- case kCodecPCM_S16BE:
+ case AudioCodec::kPCM_S16BE:
return AV_CODEC_ID_PCM_S16BE;
- case kCodecPCM_S24BE:
+ case AudioCodec::kPCM_S24BE:
return AV_CODEC_ID_PCM_S24BE;
- case kCodecVorbis:
+ case AudioCodec::kVorbis:
return AV_CODEC_ID_VORBIS;
- case kCodecFLAC:
+ case AudioCodec::kFLAC:
return AV_CODEC_ID_FLAC;
- case kCodecAMR_NB:
+ case AudioCodec::kAMR_NB:
return AV_CODEC_ID_AMR_NB;
- case kCodecAMR_WB:
+ case AudioCodec::kAMR_WB:
return AV_CODEC_ID_AMR_WB;
- case kCodecGSM_MS:
+ case AudioCodec::kGSM_MS:
return AV_CODEC_ID_GSM_MS;
- case kCodecPCM_ALAW:
+ case AudioCodec::kPCM_ALAW:
return AV_CODEC_ID_PCM_ALAW;
- case kCodecPCM_MULAW:
+ case AudioCodec::kPCM_MULAW:
return AV_CODEC_ID_PCM_MULAW;
- case kCodecOpus:
+ case AudioCodec::kOpus:
return AV_CODEC_ID_OPUS;
#if BUILDFLAG(ENABLE_PLATFORM_MPEG_H_AUDIO)
- case kCodecMpegHAudio:
+ case AudioCodec::kMpegHAudio:
return AV_CODEC_ID_MPEGH_3D_AUDIO;
#endif
default:
@@ -190,44 +190,44 @@ AVCodecID AudioCodecToCodecID(AudioCodec audio_codec,
static VideoCodec CodecIDToVideoCodec(AVCodecID codec_id) {
switch (codec_id) {
case AV_CODEC_ID_H264:
- return kCodecH264;
+ return VideoCodec::kH264;
#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
case AV_CODEC_ID_HEVC:
- return kCodecHEVC;
+ return VideoCodec::kHEVC;
#endif
case AV_CODEC_ID_THEORA:
- return kCodecTheora;
+ return VideoCodec::kTheora;
case AV_CODEC_ID_MPEG4:
- return kCodecMPEG4;
+ return VideoCodec::kMPEG4;
case AV_CODEC_ID_VP8:
- return kCodecVP8;
+ return VideoCodec::kVP8;
case AV_CODEC_ID_VP9:
- return kCodecVP9;
+ return VideoCodec::kVP9;
case AV_CODEC_ID_AV1:
- return kCodecAV1;
+ return VideoCodec::kAV1;
default:
DVLOG(1) << "Unknown video CodecID: " << codec_id;
}
- return kUnknownVideoCodec;
+ return VideoCodec::kUnknown;
}
AVCodecID VideoCodecToCodecID(VideoCodec video_codec) {
switch (video_codec) {
- case kCodecH264:
+ case VideoCodec::kH264:
return AV_CODEC_ID_H264;
#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
- case kCodecHEVC:
+ case VideoCodec::kHEVC:
return AV_CODEC_ID_HEVC;
#endif
- case kCodecTheora:
+ case VideoCodec::kTheora:
return AV_CODEC_ID_THEORA;
- case kCodecMPEG4:
+ case VideoCodec::kMPEG4:
return AV_CODEC_ID_MPEG4;
- case kCodecVP8:
+ case VideoCodec::kVP8:
return AV_CODEC_ID_VP8;
- case kCodecVP9:
+ case VideoCodec::kVP9:
return AV_CODEC_ID_VP9;
- case kCodecAV1:
+ case VideoCodec::kAV1:
return AV_CODEC_ID_AV1;
default:
DVLOG(1) << "Unknown VideoCodec: " << video_codec;
@@ -351,8 +351,8 @@ bool AVCodecContextToAudioDecoderConfig(const AVCodecContext* codec_context,
switch (codec) {
// For AC3/EAC3 we enable only demuxing, but not decoding, so FFmpeg does
// not fill |sample_fmt|.
- case kCodecAC3:
- case kCodecEAC3:
+ case AudioCodec::kAC3:
+ case AudioCodec::kEAC3:
#if BUILDFLAG(ENABLE_PLATFORM_AC3_EAC3_AUDIO)
// The spec for AC3/EAC3 audio is ETSI TS 102 366. According to sections
// F.3.1 and F.5.1 in that spec the sample_format for AC3/EAC3 must be 16.
@@ -362,7 +362,7 @@ bool AVCodecContextToAudioDecoderConfig(const AVCodecContext* codec_context,
#endif
break;
#if BUILDFLAG(ENABLE_PLATFORM_MPEG_H_AUDIO)
- case kCodecMpegHAudio:
+ case AudioCodec::kMpegHAudio:
channel_layout = CHANNEL_LAYOUT_BITSTREAM;
sample_format = kSampleFormatMpegHAudio;
break;
@@ -374,8 +374,8 @@ bool AVCodecContextToAudioDecoderConfig(const AVCodecContext* codec_context,
base::TimeDelta seek_preroll;
if (codec_context->seek_preroll > 0) {
- seek_preroll = base::TimeDelta::FromMicroseconds(
- codec_context->seek_preroll * 1000000.0 / codec_context->sample_rate);
+ seek_preroll = base::Microseconds(codec_context->seek_preroll * 1000000.0 /
+ codec_context->sample_rate);
}
// AVStream occasionally has invalid extra data. See http://crbug.com/517163
@@ -403,18 +403,19 @@ bool AVCodecContextToAudioDecoderConfig(const AVCodecContext* codec_context,
#if BUILDFLAG(ENABLE_PLATFORM_AC3_EAC3_AUDIO)
// These are bitstream formats unknown to ffmpeg, so they don't have
// a known sample format size.
- if (codec == kCodecAC3 || codec == kCodecEAC3)
+ if (codec == AudioCodec::kAC3 || codec == AudioCodec::kEAC3)
return true;
#endif
#if BUILDFLAG(ENABLE_PLATFORM_MPEG_H_AUDIO)
- if (codec == kCodecMpegHAudio)
+ if (codec == AudioCodec::kMpegHAudio)
return true;
#endif
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
// TODO(dalecurtis): Just use the profile from the codec context if ffmpeg
// ever starts supporting xHE-AAC.
- if (codec == kCodecAAC && codec_context->profile == FF_PROFILE_UNKNOWN) {
+ if (codec == AudioCodec::kAAC &&
+ codec_context->profile == FF_PROFILE_UNKNOWN) {
// Errors aren't fatal here, so just drop any MediaLog messages.
NullMediaLog media_log;
mp4::AAC aac_parser;
@@ -521,7 +522,7 @@ bool AVStreamToVideoDecoderConfig(const AVStream* stream,
VideoCodecProfile profile = VIDEO_CODEC_PROFILE_UNKNOWN;
switch (codec) {
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- case kCodecH264: {
+ case VideoCodec::kH264: {
profile = ProfileIDToVideoCodecProfile(codec_context->profile);
// if the profile is still unknown, try to extract it from
// the extradata using the internal parser
@@ -539,10 +540,10 @@ bool AVStreamToVideoDecoderConfig(const AVStream* stream,
break;
}
#endif
- case kCodecVP8:
+ case VideoCodec::kVP8:
profile = VP8PROFILE_ANY;
break;
- case kCodecVP9:
+ case VideoCodec::kVP9:
switch (codec_context->profile) {
case FF_PROFILE_VP9_0:
profile = VP9PROFILE_PROFILE0;
@@ -561,15 +562,15 @@ bool AVStreamToVideoDecoderConfig(const AVStream* stream,
break;
}
break;
- case kCodecAV1:
+ case VideoCodec::kAV1:
profile = AV1PROFILE_PROFILE_MAIN;
break;
#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
- case kCodecHEVC:
+ case VideoCodec::kHEVC:
profile = HEVCPROFILE_MAIN;
break;
#endif
- case kCodecTheora:
+ case VideoCodec::kTheora:
profile = THEORAPROFILE_ANY;
break;
default:
diff --git a/chromium/media/ffmpeg/ffmpeg_common_unittest.cc b/chromium/media/ffmpeg/ffmpeg_common_unittest.cc
index 9146072ace8..c8bc7038604 100644
--- a/chromium/media/ffmpeg/ffmpeg_common_unittest.cc
+++ b/chromium/media/ffmpeg/ffmpeg_common_unittest.cc
@@ -143,7 +143,7 @@ TEST_F(FFmpegCommonTest, AVStreamToAudioDecoderConfig_OpusAmbisonics_4ch) {
AudioDecoderConfig audio_config;
ASSERT_TRUE(AVStreamToAudioDecoderConfig(stream, &audio_config));
- EXPECT_EQ(kCodecOpus, audio_config.codec());
+ EXPECT_EQ(AudioCodec::kOpus, audio_config.codec());
EXPECT_EQ(CHANNEL_LAYOUT_QUAD, audio_config.channel_layout());
EXPECT_EQ(4, audio_config.channels());
}
@@ -166,7 +166,7 @@ TEST_F(FFmpegCommonTest, AVStreamToAudioDecoderConfig_OpusAmbisonics_11ch) {
AudioDecoderConfig audio_config;
ASSERT_TRUE(AVStreamToAudioDecoderConfig(stream, &audio_config));
- EXPECT_EQ(kCodecOpus, audio_config.codec());
+ EXPECT_EQ(AudioCodec::kOpus, audio_config.codec());
EXPECT_EQ(CHANNEL_LAYOUT_DISCRETE, audio_config.channel_layout());
EXPECT_EQ(11, audio_config.channels());
}
@@ -188,7 +188,7 @@ TEST_F(FFmpegCommonTest, AVStreamToAudioDecoderConfig_9ch_wav) {
AudioDecoderConfig audio_config;
ASSERT_TRUE(AVStreamToAudioDecoderConfig(stream, &audio_config));
- EXPECT_EQ(kCodecPCM, audio_config.codec());
+ EXPECT_EQ(AudioCodec::kPCM, audio_config.codec());
EXPECT_EQ(CHANNEL_LAYOUT_DISCRETE, audio_config.channel_layout());
EXPECT_EQ(9, audio_config.channels());
}
diff --git a/chromium/media/ffmpeg/ffmpeg_decoding_loop.h b/chromium/media/ffmpeg/ffmpeg_decoding_loop.h
index ac51a739158..b1c91d1622b 100644
--- a/chromium/media/ffmpeg/ffmpeg_decoding_loop.h
+++ b/chromium/media/ffmpeg/ffmpeg_decoding_loop.h
@@ -44,6 +44,10 @@ class MEDIA_EXPORT FFmpegDecodingLoop {
// true; note: send packet failures are always fatal.
FFmpegDecodingLoop(AVCodecContext* context,
bool continue_on_decoding_errors = false);
+
+ FFmpegDecodingLoop(const FFmpegDecodingLoop&) = delete;
+ FFmpegDecodingLoop& operator=(const FFmpegDecodingLoop&) = delete;
+
~FFmpegDecodingLoop();
// Callback issued when the decoding loop has produced a frame. |frame| is
@@ -68,8 +72,6 @@ class MEDIA_EXPORT FFmpegDecodingLoop {
AVCodecContext* const context_;
std::unique_ptr<AVFrame, ScopedPtrAVFreeFrame> frame_;
int last_averror_code_ = 0;
-
- DISALLOW_COPY_AND_ASSIGN(FFmpegDecodingLoop);
};
} // namespace media
diff --git a/chromium/media/ffmpeg/ffmpeg_regression_tests.cc b/chromium/media/ffmpeg/ffmpeg_regression_tests.cc
index a450eb827f1..74fb3ac7b7a 100644
--- a/chromium/media/ffmpeg/ffmpeg_regression_tests.cc
+++ b/chromium/media/ffmpeg/ffmpeg_regression_tests.cc
@@ -223,7 +223,7 @@ FFMPEG_TEST_CASE_SEEKING(Cr666770,
"security/666770.mp4",
PIPELINE_ERROR_DECODE,
PIPELINE_ERROR_DECODE,
- base::TimeDelta::FromSecondsD(0.0843));
+ base::Seconds(0.0843));
FFMPEG_TEST_CASE(Cr666874,
"security/666874.mp3",
DEMUXER_ERROR_COULD_NOT_OPEN,
diff --git a/chromium/media/filters/android/media_codec_audio_decoder.cc b/chromium/media/filters/android/media_codec_audio_decoder.cc
index a89182ae501..1fd4784e08c 100644
--- a/chromium/media/filters/android/media_codec_audio_decoder.cc
+++ b/chromium/media/filters/android/media_codec_audio_decoder.cc
@@ -71,11 +71,11 @@ void MediaCodecAudioDecoder::Initialize(const AudioDecoderConfig& config,
is_passthrough_ = MediaCodecUtil::IsPassthroughAudioFormat(config.codec());
sample_format_ = kSampleFormatS16;
- if (config.codec() == kCodecAC3)
+ if (config.codec() == AudioCodec::kAC3)
sample_format_ = kSampleFormatAc3;
- else if (config.codec() == kCodecEAC3)
+ else if (config.codec() == AudioCodec::kEAC3)
sample_format_ = kSampleFormatEac3;
- else if (config.codec() == kCodecMpegHAudio)
+ else if (config.codec() == AudioCodec::kMpegHAudio)
sample_format_ = kSampleFormatMpegHAudio;
if (state_ == STATE_ERROR) {
@@ -88,10 +88,11 @@ void MediaCodecAudioDecoder::Initialize(const AudioDecoderConfig& config,
// We can support only the codecs that MediaCodecBridge can decode.
// TODO(xhwang): Get this list from MediaCodecBridge or just rely on
// attempting to create one to determine whether the codec is supported.
- const bool is_codec_supported =
- config.codec() == kCodecVorbis || config.codec() == kCodecFLAC ||
- config.codec() == kCodecAAC || config.codec() == kCodecOpus ||
- is_passthrough_;
+ const bool is_codec_supported = config.codec() == AudioCodec::kVorbis ||
+ config.codec() == AudioCodec::kFLAC ||
+ config.codec() == AudioCodec::kAAC ||
+ config.codec() == AudioCodec::kOpus ||
+ is_passthrough_;
if (!is_codec_supported) {
DVLOG(1) << "Unsuported codec " << GetCodecName(config.codec());
BindToCurrentLoop(std::move(init_cb))
@@ -221,8 +222,8 @@ void MediaCodecAudioDecoder::Reset(base::OnceClosure closure) {
bool MediaCodecAudioDecoder::NeedsBitstreamConversion() const {
// An AAC stream needs to be converted as ADTS stream.
- DCHECK_NE(config_.codec(), kUnknownAudioCodec);
- return config_.codec() == kCodecAAC;
+ DCHECK_NE(config_.codec(), AudioCodec::kUnknown);
+ return config_.codec() == AudioCodec::kAAC;
}
void MediaCodecAudioDecoder::SetCdm(CdmContext* cdm_context, InitCB init_cb) {
@@ -411,10 +412,10 @@ bool MediaCodecAudioDecoder::OnDecodedFrame(
return false;
}
- if (config_.codec() == kCodecAC3) {
+ if (config_.codec() == AudioCodec::kAC3) {
frame_count = Ac3Util::ParseTotalAc3SampleCount(
audio_buffer->channel_data()[0], out.size);
- } else if (config_.codec() == kCodecEAC3) {
+ } else if (config_.codec() == AudioCodec::kEAC3) {
frame_count = Ac3Util::ParseTotalEac3SampleCount(
audio_buffer->channel_data()[0], out.size);
} else {
diff --git a/chromium/media/filters/android/media_codec_audio_decoder.h b/chromium/media/filters/android/media_codec_audio_decoder.h
index acf37982cb2..e74c3ef5363 100644
--- a/chromium/media/filters/android/media_codec_audio_decoder.h
+++ b/chromium/media/filters/android/media_codec_audio_decoder.h
@@ -81,6 +81,10 @@ class MEDIA_EXPORT MediaCodecAudioDecoder : public AudioDecoder,
public:
explicit MediaCodecAudioDecoder(
scoped_refptr<base::SingleThreadTaskRunner> task_runner);
+
+ MediaCodecAudioDecoder(const MediaCodecAudioDecoder&) = delete;
+ MediaCodecAudioDecoder& operator=(const MediaCodecAudioDecoder&) = delete;
+
~MediaCodecAudioDecoder() override;
// AudioDecoder implementation.
@@ -210,8 +214,6 @@ class MEDIA_EXPORT MediaCodecAudioDecoder : public AudioDecoder,
JavaObjectPtr media_crypto_;
base::WeakPtrFactory<MediaCodecAudioDecoder> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(MediaCodecAudioDecoder);
};
} // namespace media
diff --git a/chromium/media/filters/android/video_frame_extractor.h b/chromium/media/filters/android/video_frame_extractor.h
index 11755107976..eab3fb312f0 100644
--- a/chromium/media/filters/android/video_frame_extractor.h
+++ b/chromium/media/filters/android/video_frame_extractor.h
@@ -41,6 +41,10 @@ class MEDIA_EXPORT VideoFrameExtractor {
const VideoDecoderConfig& decoder_config)>;
explicit VideoFrameExtractor(DataSource* data_source);
+
+ VideoFrameExtractor(const VideoFrameExtractor&) = delete;
+ VideoFrameExtractor& operator=(const VideoFrameExtractor&) = delete;
+
~VideoFrameExtractor();
// Starts to retrieve thumbnail from video frame.
@@ -75,8 +79,6 @@ class MEDIA_EXPORT VideoFrameExtractor {
VideoFrameCallback video_frame_callback_;
base::WeakPtrFactory<VideoFrameExtractor> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(VideoFrameExtractor);
};
} // namespace media
diff --git a/chromium/media/filters/android/video_frame_extractor_unittest.cc b/chromium/media/filters/android/video_frame_extractor_unittest.cc
index f659425c009..4fcd0972c81 100644
--- a/chromium/media/filters/android/video_frame_extractor_unittest.cc
+++ b/chromium/media/filters/android/video_frame_extractor_unittest.cc
@@ -41,6 +41,10 @@ void OnFrameExtracted(ExtractVideoFrameResult* result,
class VideoFrameExtractorTest : public testing::Test {
public:
VideoFrameExtractorTest() {}
+
+ VideoFrameExtractorTest(const VideoFrameExtractorTest&) = delete;
+ VideoFrameExtractorTest& operator=(const VideoFrameExtractorTest&) = delete;
+
~VideoFrameExtractorTest() override {}
protected:
@@ -69,8 +73,6 @@ class VideoFrameExtractorTest : public testing::Test {
base::ScopedTempDir temp_dir_;
std::unique_ptr<FileDataSource> data_source_;
std::unique_ptr<VideoFrameExtractor> extractor_;
-
- DISALLOW_COPY_AND_ASSIGN(VideoFrameExtractorTest);
};
// Verifies the encoded video frame can be extracted correctly.
@@ -78,7 +80,7 @@ TEST_F(VideoFrameExtractorTest, ExtractVideoFrame) {
auto result = ExtractFrame(GetTestDataFilePath("bear.mp4"));
EXPECT_TRUE(result.success);
EXPECT_GT(result.encoded_frame.size(), 0u);
- EXPECT_EQ(result.decoder_config.codec(), VideoCodec::kCodecH264);
+ EXPECT_EQ(result.decoder_config.codec(), VideoCodec::kH264);
}
// Verifies graceful failure when trying to extract frame from an invalid video
diff --git a/chromium/media/filters/audio_clock.cc b/chromium/media/filters/audio_clock.cc
index 30576e3d87b..73d6806e9d6 100644
--- a/chromium/media/filters/audio_clock.cc
+++ b/chromium/media/filters/audio_clock.cc
@@ -120,7 +120,7 @@ base::TimeDelta AudioClock::TimeUntilPlayback(base::TimeDelta timestamp) const {
frames_until_timestamp += buffered_[i].frames;
}
- return base::TimeDelta::FromMicroseconds(
+ return base::Microseconds(
std::round(frames_until_timestamp * microseconds_per_frame_));
}
@@ -146,10 +146,9 @@ void AudioClock::ContiguousAudioDataBufferedForTesting(
scaled_frames_at_same_rate = scaled_frames;
}
- *total = base::TimeDelta::FromMicroseconds(scaled_frames *
- microseconds_per_frame_);
- *same_rate_total = base::TimeDelta::FromMicroseconds(
- scaled_frames_at_same_rate * microseconds_per_frame_);
+ *total = base::Microseconds(scaled_frames * microseconds_per_frame_);
+ *same_rate_total =
+ base::Microseconds(scaled_frames_at_same_rate * microseconds_per_frame_);
}
AudioClock::AudioData::AudioData(int64_t frames, double playback_rate)
diff --git a/chromium/media/filters/audio_clock.h b/chromium/media/filters/audio_clock.h
index 2d050a9a598..286f355c63b 100644
--- a/chromium/media/filters/audio_clock.h
+++ b/chromium/media/filters/audio_clock.h
@@ -53,6 +53,10 @@ namespace media {
class MEDIA_EXPORT AudioClock {
public:
AudioClock(base::TimeDelta start_timestamp, int sample_rate);
+
+ AudioClock(const AudioClock&) = delete;
+ AudioClock& operator=(const AudioClock&) = delete;
+
~AudioClock();
// |frames_written| amount of audio data scaled to |playback_rate| written.
@@ -91,12 +95,10 @@ class MEDIA_EXPORT AudioClock {
// media data has been played yet. by AudioClock, which would be
// 1000 + 500 + 250 = 1750 ms.
base::TimeDelta front_timestamp() const {
- return base::TimeDelta::FromMicroseconds(
- std::round(front_timestamp_micros_));
+ return base::Microseconds(std::round(front_timestamp_micros_));
}
base::TimeDelta back_timestamp() const {
- return base::TimeDelta::FromMicroseconds(
- std::round(back_timestamp_micros_));
+ return base::Microseconds(std::round(back_timestamp_micros_));
}
// Returns the amount of wall time until |timestamp| will be played by the
@@ -140,8 +142,6 @@ class MEDIA_EXPORT AudioClock {
// See http://crbug.com/564604.
double front_timestamp_micros_;
double back_timestamp_micros_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioClock);
};
} // namespace media
diff --git a/chromium/media/filters/audio_clock_unittest.cc b/chromium/media/filters/audio_clock_unittest.cc
index 551827a26bd..65c0e844bb0 100644
--- a/chromium/media/filters/audio_clock_unittest.cc
+++ b/chromium/media/filters/audio_clock_unittest.cc
@@ -16,6 +16,9 @@ class AudioClockTest : public testing::Test {
public:
AudioClockTest() { SetupClock(base::TimeDelta(), 10); }
+ AudioClockTest(const AudioClockTest&) = delete;
+ AudioClockTest& operator=(const AudioClockTest&) = delete;
+
~AudioClockTest() override = default;
void WroteAudio(int frames_written,
@@ -42,8 +45,7 @@ class AudioClockTest : public testing::Test {
}
int TimeUntilPlaybackInMilliseconds(int timestamp_ms) {
- return clock_
- ->TimeUntilPlayback(base::TimeDelta::FromMilliseconds(timestamp_ms))
+ return clock_->TimeUntilPlayback(base::Milliseconds(timestamp_ms))
.InMilliseconds();
}
@@ -67,20 +69,17 @@ class AudioClockTest : public testing::Test {
int sample_rate_;
std::unique_ptr<AudioClock> clock_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioClockTest);
};
TEST_F(AudioClockTest, FrontTimestampStartsAtStartTimestamp) {
- base::TimeDelta expected = base::TimeDelta::FromSeconds(123);
+ base::TimeDelta expected = base::Seconds(123);
AudioClock clock(expected, sample_rate_);
EXPECT_EQ(expected, clock.front_timestamp());
}
TEST_F(AudioClockTest, BackTimestampStartsAtStartTimestamp) {
- base::TimeDelta expected = base::TimeDelta::FromSeconds(123);
+ base::TimeDelta expected = base::Seconds(123);
AudioClock clock(expected, sample_rate_);
EXPECT_EQ(expected, clock.back_timestamp());
@@ -307,7 +306,7 @@ TEST_F(AudioClockTest, SupportsYearsWorthOfAudioData) {
// Use number of frames that would be likely to overflow 32-bit integer math.
const int huge_amount_of_frames = std::numeric_limits<int>::max();
const base::TimeDelta huge =
- base::TimeDelta::FromSeconds(huge_amount_of_frames / sample_rate_);
+ base::Seconds(huge_amount_of_frames / sample_rate_);
EXPECT_EQ(2485, huge.InDays()); // Just to give some context on how big...
// Use zero delay to test calculation of current timestamp.
@@ -345,8 +344,7 @@ TEST_F(AudioClockTest, CompensateForSuspendedWrites) {
// Elapsing frames less than we have buffered should do nothing.
const int kDelayFrames = 2;
for (int i = 1000; i <= kBaseTimeMs; i += 1000) {
- clock_->CompensateForSuspendedWrites(base::TimeDelta::FromMilliseconds(i),
- kDelayFrames);
+ clock_->CompensateForSuspendedWrites(base::Milliseconds(i), kDelayFrames);
EXPECT_EQ(kBaseTimeMs - (i - 1000), TimeUntilPlaybackInMilliseconds(0));
// Write silence to simulate maintaining a 7s output buffer.
@@ -355,8 +353,7 @@ TEST_F(AudioClockTest, CompensateForSuspendedWrites) {
// Exhausting all frames should advance timestamps and prime the buffer with
// our delay frames value.
- clock_->CompensateForSuspendedWrites(base::TimeDelta::FromMilliseconds(7000),
- kDelayFrames);
+ clock_->CompensateForSuspendedWrites(base::Milliseconds(7000), kDelayFrames);
EXPECT_EQ(kDelayFrames * 100, TimeUntilPlaybackInMilliseconds(1000));
}
@@ -367,7 +364,7 @@ TEST_F(AudioClockTest, FramesToTimePrecision) {
// Write ~2 hours of data to clock to give any error a significant chance to
// accumulate.
- while (clock_->back_timestamp() <= base::TimeDelta::FromHours(2)) {
+ while (clock_->back_timestamp() <= base::Hours(2)) {
frames_written += 1024;
WroteAudio(1024, 1024, 0, 1);
}
diff --git a/chromium/media/filters/audio_decoder_stream_unittest.cc b/chromium/media/filters/audio_decoder_stream_unittest.cc
index 9e6475c2042..c0a3402f436 100644
--- a/chromium/media/filters/audio_decoder_stream_unittest.cc
+++ b/chromium/media/filters/audio_decoder_stream_unittest.cc
@@ -54,8 +54,12 @@ class AudioDecoderStreamTest : public testing::Test {
base::Unretained(this)),
&media_log_) {
// Any valid config will do.
- demuxer_stream_.set_audio_decoder_config(
- {kCodecAAC, kSampleFormatS16, CHANNEL_LAYOUT_STEREO, 44100, {}, {}});
+ demuxer_stream_.set_audio_decoder_config({AudioCodec::kAAC,
+ kSampleFormatS16,
+ CHANNEL_LAYOUT_STEREO,
+ 44100,
+ {},
+ {}});
EXPECT_CALL(demuxer_stream_, SupportsConfigChanges())
.WillRepeatedly(Return(true));
@@ -80,7 +84,7 @@ class AudioDecoderStreamTest : public testing::Test {
void ProduceDecoderOutput(scoped_refptr<DecoderBuffer> buffer,
AudioDecoder::DecodeCB decode_cb) {
// Make sure successive AudioBuffers have increasing timestamps.
- last_timestamp_ += base::TimeDelta::FromMilliseconds(27);
+ last_timestamp_ += base::Milliseconds(27);
const auto& config = demuxer_stream_.audio_decoder_config();
base::SequencedTaskRunnerHandle::Get()->PostTask(
FROM_HERE,
diff --git a/chromium/media/filters/audio_decoder_unittest.cc b/chromium/media/filters/audio_decoder_unittest.cc
index 127855d41f9..b4724be867d 100644
--- a/chromium/media/filters/audio_decoder_unittest.cc
+++ b/chromium/media/filters/audio_decoder_unittest.cc
@@ -115,10 +115,10 @@ void SetDiscardPadding(AVPacket* packet,
packet, AV_PKT_DATA_SKIP_SAMPLES, &skip_samples_size));
if (skip_samples_size < 4)
return;
- buffer->set_discard_padding(std::make_pair(
- base::TimeDelta::FromSecondsD(base::ByteSwapToLE32(*skip_samples_ptr) /
- samples_per_second),
- base::TimeDelta()));
+ buffer->set_discard_padding(
+ std::make_pair(base::Seconds(base::ByteSwapToLE32(*skip_samples_ptr) /
+ samples_per_second),
+ base::TimeDelta()));
}
} // namespace
@@ -146,6 +146,9 @@ class AudioDecoderTest
}
}
+ AudioDecoderTest(const AudioDecoderTest&) = delete;
+ AudioDecoderTest& operator=(const AudioDecoderTest&) = delete;
+
virtual ~AudioDecoderTest() {
EXPECT_FALSE(pending_decode_);
EXPECT_FALSE(pending_reset_);
@@ -159,7 +162,7 @@ class AudioDecoderTest
VLOG(0) << "Could not run test - no MediaCodec on device.";
return false;
}
- if (params_.codec == kCodecOpus &&
+ if (params_.codec == AudioCodec::kOpus &&
base::android::BuildInfo::GetInstance()->sdk_int() <
base::android::SDK_VERSION_LOLLIPOP) {
VLOG(0) << "Could not run test - Opus is not supported";
@@ -216,7 +219,7 @@ class AudioDecoderTest
#if defined(OS_ANDROID) && BUILDFLAG(USE_PROPRIETARY_CODECS)
// MEDIA_CODEC type requires config->extra_data() for AAC codec. For ADTS
// streams we need to extract it with a separate procedure.
- if (decoder_type_ == MEDIA_CODEC && params_.codec == kCodecAAC &&
+ if (decoder_type_ == MEDIA_CODEC && params_.codec == AudioCodec::kAAC &&
config.extra_data().empty()) {
int sample_rate;
ChannelLayout channel_layout;
@@ -225,7 +228,7 @@ class AudioDecoderTest
packet.data, packet.size, nullptr, &sample_rate,
&channel_layout, nullptr, nullptr, &extra_data),
0);
- config.Initialize(kCodecAAC, kSampleFormatS16, channel_layout,
+ config.Initialize(AudioCodec::kAAC, kSampleFormatS16, channel_layout,
sample_rate, extra_data, EncryptionScheme::kUnencrypted,
base::TimeDelta(), 0);
ASSERT_FALSE(config.extra_data().empty());
@@ -274,7 +277,7 @@ class AudioDecoderTest
// Don't set discard padding for Opus, it already has discard behavior set
// based on the codec delay in the AudioDecoderConfig.
- if (decoder_type_ == FFMPEG && params_.codec != kCodecOpus)
+ if (decoder_type_ == FFMPEG && params_.codec != AudioCodec::kOpus)
SetDiscardPadding(&packet, buffer.get(), params_.samples_per_second);
// DecodeBuffer() shouldn't need the original packet since it uses the copy.
@@ -345,7 +348,7 @@ class AudioDecoderTest
#if defined(OS_ANDROID)
return (base::android::BuildInfo::GetInstance()->sdk_int() <
base::android::SDK_VERSION_LOLLIPOP) &&
- decoder_type_ == MEDIA_CODEC && params_.codec == kCodecAAC;
+ decoder_type_ == MEDIA_CODEC && params_.codec == AudioCodec::kAAC;
#else
return false;
#endif
@@ -414,8 +417,6 @@ class AudioDecoderTest
base::circular_deque<scoped_refptr<AudioBuffer>> decoded_audio_;
base::TimeDelta start_timestamp_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioDecoderTest);
};
const DecodedBufferExpectations kBearOpusExpectations[] = {
@@ -427,8 +428,8 @@ const DecodedBufferExpectations kBearOpusExpectations[] = {
// Test params to test decoder reinitialization. Choose opus because it is
// supported on all platforms we test on.
const TestParams kReinitializeTestParams = {
- kCodecOpus, "bear-opus.ogg", kBearOpusExpectations,
- 24, 48000, CHANNEL_LAYOUT_STEREO};
+ AudioCodec::kOpus, "bear-opus.ogg", kBearOpusExpectations, 24, 48000,
+ CHANNEL_LAYOUT_STEREO};
#if defined(OS_ANDROID)
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
@@ -446,13 +447,13 @@ const DecodedBufferExpectations kHeAacMcExpectations[] = {
#endif // defined(USE_PROPRIETARY_CODECS)
const TestParams kMediaCodecTestParams[] = {
- {kCodecOpus, "bear-opus.ogg", kBearOpusExpectations, 24, 48000,
+ {AudioCodec::kOpus, "bear-opus.ogg", kBearOpusExpectations, 24, 48000,
CHANNEL_LAYOUT_STEREO},
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- {kCodecAAC, "sfx.adts", kSfxAdtsMcExpectations, 0, 44100,
+ {AudioCodec::kAAC, "sfx.adts", kSfxAdtsMcExpectations, 0, 44100,
CHANNEL_LAYOUT_MONO},
- {kCodecAAC, "bear-audio-implicit-he-aac-v2.aac", kHeAacMcExpectations, 0,
- 24000, CHANNEL_LAYOUT_MONO},
+ {AudioCodec::kAAC, "bear-audio-implicit-he-aac-v2.aac",
+ kHeAacMcExpectations, 0, 24000, CHANNEL_LAYOUT_MONO},
#endif // defined(USE_PROPRIETARY_CODECS)
};
@@ -517,28 +518,29 @@ const DecodedBufferExpectations kSfxOpusExpectations[] = {
#endif
const TestParams kFFmpegTestParams[] = {
- {kCodecMP3, "sfx.mp3", kSfxMp3Expectations, 0, 44100, CHANNEL_LAYOUT_MONO},
+ {AudioCodec::kMP3, "sfx.mp3", kSfxMp3Expectations, 0, 44100,
+ CHANNEL_LAYOUT_MONO},
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- {kCodecAAC, "sfx.adts", kSfxAdtsExpectations, 0, 44100,
+ {AudioCodec::kAAC, "sfx.adts", kSfxAdtsExpectations, 0, 44100,
CHANNEL_LAYOUT_MONO},
#endif
- {kCodecFLAC, "sfx-flac.mp4", kSfxFlacExpectations, 0, 44100,
+ {AudioCodec::kFLAC, "sfx-flac.mp4", kSfxFlacExpectations, 0, 44100,
CHANNEL_LAYOUT_MONO},
- {kCodecFLAC, "sfx.flac", kSfxFlacExpectations, 0, 44100,
+ {AudioCodec::kFLAC, "sfx.flac", kSfxFlacExpectations, 0, 44100,
CHANNEL_LAYOUT_MONO},
- {kCodecPCM, "sfx_f32le.wav", kSfxWaveExpectations, 0, 44100,
+ {AudioCodec::kPCM, "sfx_f32le.wav", kSfxWaveExpectations, 0, 44100,
CHANNEL_LAYOUT_MONO},
- {kCodecPCM, "4ch.wav", kFourChannelWaveExpectations, 0, 44100,
+ {AudioCodec::kPCM, "4ch.wav", kFourChannelWaveExpectations, 0, 44100,
CHANNEL_LAYOUT_QUAD},
- {kCodecVorbis, "sfx.ogg", kSfxOggExpectations, 0, 44100,
+ {AudioCodec::kVorbis, "sfx.ogg", kSfxOggExpectations, 0, 44100,
CHANNEL_LAYOUT_MONO},
// Note: bear.ogv is incorrectly muxed such that valid samples are given
// negative timestamps, this marks them for discard per the ogg vorbis spec.
- {kCodecVorbis, "bear.ogv", kBearOgvExpectations, -704, 44100,
+ {AudioCodec::kVorbis, "bear.ogv", kBearOgvExpectations, -704, 44100,
CHANNEL_LAYOUT_STEREO},
- {kCodecOpus, "sfx-opus.ogg", kSfxOpusExpectations, -312, 48000,
+ {AudioCodec::kOpus, "sfx-opus.ogg", kSfxOpusExpectations, -312, 48000,
CHANNEL_LAYOUT_MONO},
- {kCodecOpus, "bear-opus.ogg", kBearOpusExpectations, 24, 48000,
+ {AudioCodec::kOpus, "bear-opus.ogg", kBearOpusExpectations, 24, 48000,
CHANNEL_LAYOUT_STEREO},
};
diff --git a/chromium/media/filters/audio_file_reader.cc b/chromium/media/filters/audio_file_reader.cc
index 94a4a72279c..976aa357813 100644
--- a/chromium/media/filters/audio_file_reader.cc
+++ b/chromium/media/filters/audio_file_reader.cc
@@ -29,7 +29,7 @@ static const int kAACRemainderFrameCount = 519;
AudioFileReader::AudioFileReader(FFmpegURLProtocol* protocol)
: stream_index_(0),
protocol_(protocol),
- audio_codec_(kUnknownAudioCodec),
+ audio_codec_(AudioCodec::kUnknown),
channels_(0),
sample_rate_(0),
av_sample_format_(0) {}
@@ -169,7 +169,7 @@ base::TimeDelta AudioFileReader::GetDuration() const {
base::CheckedNumeric<int64_t> estimated_duration_us =
glue_->format_context()->duration;
- if (audio_codec_ == kCodecAAC) {
+ if (audio_codec_ == AudioCodec::kAAC) {
// For certain AAC-encoded files, FFMPEG's estimated frame count might not
// be sufficient to capture the entire audio content that we want. This is
// especially noticeable for short files (< 10ms) resulting in silence
@@ -242,12 +242,12 @@ bool AudioFileReader::OnNewFrame(
// silence from being output. In the case where we are also discarding some
// portion of the packet (as indicated by a negative pts), we further want to
// adjust the duration downward by however much exists before zero.
- if (audio_codec_ == kCodecAAC && frame->pkt_duration) {
+ if (audio_codec_ == AudioCodec::kAAC && frame->pkt_duration) {
const base::TimeDelta pkt_duration = ConvertFromTimeBase(
glue_->format_context()->streams[stream_index_]->time_base,
frame->pkt_duration + std::min(static_cast<int64_t>(0), frame->pts));
- const base::TimeDelta frame_duration = base::TimeDelta::FromSecondsD(
- frames_read / static_cast<double>(sample_rate_));
+ const base::TimeDelta frame_duration =
+ base::Seconds(frames_read / static_cast<double>(sample_rate_));
if (pkt_duration < frame_duration && pkt_duration > base::TimeDelta()) {
const int new_frames_read =
diff --git a/chromium/media/filters/audio_file_reader.h b/chromium/media/filters/audio_file_reader.h
index 3aaffcf52b9..9955b3d01fb 100644
--- a/chromium/media/filters/audio_file_reader.h
+++ b/chromium/media/filters/audio_file_reader.h
@@ -33,6 +33,10 @@ class MEDIA_EXPORT AudioFileReader {
// The AudioFileReader does not take ownership of |protocol| and
// simply maintains a weak reference to it.
explicit AudioFileReader(FFmpegURLProtocol* protocol);
+
+ AudioFileReader(const AudioFileReader&) = delete;
+ AudioFileReader& operator=(const AudioFileReader&) = delete;
+
virtual ~AudioFileReader();
// Open() reads the audio data format so that the sample_rate(),
@@ -110,8 +114,6 @@ class MEDIA_EXPORT AudioFileReader {
// AVSampleFormat initially requested; not Chrome's SampleFormat.
int av_sample_format_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioFileReader);
};
} // namespace media
diff --git a/chromium/media/filters/audio_file_reader_unittest.cc b/chromium/media/filters/audio_file_reader_unittest.cc
index fe5e27a5e31..fc295d8a4e4 100644
--- a/chromium/media/filters/audio_file_reader_unittest.cc
+++ b/chromium/media/filters/audio_file_reader_unittest.cc
@@ -23,6 +23,10 @@ namespace media {
class AudioFileReaderTest : public testing::Test {
public:
AudioFileReaderTest() : packet_verification_disabled_(false) {}
+
+ AudioFileReaderTest(const AudioFileReaderTest&) = delete;
+ AudioFileReaderTest& operator=(const AudioFileReaderTest&) = delete;
+
~AudioFileReaderTest() override = default;
void Initialize(const char* filename) {
@@ -140,8 +144,6 @@ class AudioFileReaderTest : public testing::Test {
std::unique_ptr<InMemoryUrlProtocol> protocol_;
std::unique_ptr<AudioFileReader> reader_;
bool packet_verification_disabled_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioFileReaderTest);
};
TEST_F(AudioFileReaderTest, WithoutOpen) {
@@ -154,42 +156,42 @@ TEST_F(AudioFileReaderTest, InvalidFile) {
TEST_F(AudioFileReaderTest, UnknownDuration) {
RunTest("bear-320x240-live.webm", "-3.59,-2.06,-0.43,2.15,0.77,-0.95,", 2,
- 44100, base::TimeDelta::FromMicroseconds(-1), -1, 121024);
+ 44100, base::Microseconds(-1), -1, 121024);
}
TEST_F(AudioFileReaderTest, WithVideo) {
RunTest("bear.ogv", "-0.73,0.92,0.48,-0.07,-0.92,-0.88,", 2, 44100,
- base::TimeDelta::FromMicroseconds(1011520), 44609, 45632);
+ base::Microseconds(1011520), 44609, 45632);
}
TEST_F(AudioFileReaderTest, Vorbis) {
RunTest("sfx.ogg", "2.17,3.31,5.15,6.33,5.97,4.35,", 1, 44100,
- base::TimeDelta::FromMicroseconds(350001), 15436, 15936);
+ base::Microseconds(350001), 15436, 15936);
}
TEST_F(AudioFileReaderTest, WaveU8) {
RunTest("sfx_u8.wav", "-1.23,-1.57,-1.14,-0.91,-0.87,-0.07,", 1, 44100,
- base::TimeDelta::FromMicroseconds(288414), 12720, 12719);
+ base::Microseconds(288414), 12720, 12719);
}
TEST_F(AudioFileReaderTest, WaveS16LE) {
RunTest("sfx_s16le.wav", "3.05,2.87,3.00,3.32,3.58,4.08,", 1, 44100,
- base::TimeDelta::FromMicroseconds(288414), 12720, 12719);
+ base::Microseconds(288414), 12720, 12719);
}
TEST_F(AudioFileReaderTest, WaveS24LE) {
RunTest("sfx_s24le.wav", "3.03,2.86,2.99,3.31,3.57,4.06,", 1, 44100,
- base::TimeDelta::FromMicroseconds(288414), 12720, 12719);
+ base::Microseconds(288414), 12720, 12719);
}
TEST_F(AudioFileReaderTest, WaveF32LE) {
RunTest("sfx_f32le.wav", "3.03,2.86,2.99,3.31,3.57,4.06,", 1, 44100,
- base::TimeDelta::FromMicroseconds(288414), 12720, 12719);
+ base::Microseconds(288414), 12720, 12719);
}
TEST_F(AudioFileReaderTest, MP3) {
RunTest("sfx.mp3", "1.30,2.72,4.56,5.08,3.74,2.03,", 1, 44100,
- base::TimeDelta::FromMicroseconds(313470), 13825, 11025);
+ base::Microseconds(313470), 13825, 11025);
}
TEST_F(AudioFileReaderTest, CorruptMP3) {
@@ -197,23 +199,23 @@ TEST_F(AudioFileReaderTest, CorruptMP3) {
// make any guarantees on packet consistency in this case.
disable_packet_verification();
RunTest("corrupt.mp3", "-4.95,-2.95,-0.44,1.16,0.31,-2.21,", 1, 44100,
- base::TimeDelta::FromMicroseconds(1018801), 44930, 44928);
+ base::Microseconds(1018801), 44930, 44928);
}
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
TEST_F(AudioFileReaderTest, AAC) {
RunTest("sfx.m4a", "0.79,2.31,4.15,4.92,4.04,1.44,", 1, 44100,
- base::TimeDelta::FromMicroseconds(371660), 16391, 12701);
+ base::Microseconds(371660), 16391, 12701);
}
TEST_F(AudioFileReaderTest, AAC_SinglePacket) {
RunTest("440hz-10ms.m4a", "3.77,4.53,4.75,3.48,3.67,3.76,", 1, 44100,
- base::TimeDelta::FromMicroseconds(69660), 3073, 441);
+ base::Microseconds(69660), 3073, 441);
}
TEST_F(AudioFileReaderTest, AAC_ADTS) {
RunTest("sfx.adts", "1.80,1.66,2.31,3.26,4.46,3.36,", 1, 44100,
- base::TimeDelta::FromMicroseconds(384733), 16967, 13312);
+ base::Microseconds(384733), 16967, 13312);
}
TEST_F(AudioFileReaderTest, MidStreamConfigChangesFail) {
@@ -227,7 +229,7 @@ TEST_F(AudioFileReaderTest, VorbisInvalidChannelLayout) {
TEST_F(AudioFileReaderTest, WaveValidFourChannelLayout) {
RunTest("4ch.wav", "131.71,38.02,130.31,44.89,135.98,42.52,", 4, 44100,
- base::TimeDelta::FromMicroseconds(100001), 4411, 4410);
+ base::Microseconds(100001), 4411, 4410);
}
TEST_F(AudioFileReaderTest, ReadPartialMP3) {
diff --git a/chromium/media/filters/audio_renderer_algorithm.cc b/chromium/media/filters/audio_renderer_algorithm.cc
index d3e1e1fc834..12195522a4e 100644
--- a/chromium/media/filters/audio_renderer_algorithm.cc
+++ b/chromium/media/filters/audio_renderer_algorithm.cc
@@ -47,21 +47,18 @@ namespace media {
// |search_block_center_offset_|.
// Overlap-and-add window size in milliseconds.
-constexpr base::TimeDelta kOlaWindowSize =
- base::TimeDelta::FromMilliseconds(20);
+constexpr base::TimeDelta kOlaWindowSize = base::Milliseconds(20);
// Size of search interval in milliseconds. The search interval is
// [-delta delta] around |output_index_| * |playback_rate|. So the search
// interval is 2 * delta.
-constexpr base::TimeDelta kWsolaSearchInterval =
- base::TimeDelta::FromMilliseconds(30);
+constexpr base::TimeDelta kWsolaSearchInterval = base::Milliseconds(30);
// The maximum size for the |audio_buffer_|. Arbitrarily determined.
-constexpr base::TimeDelta kMaxCapacity = base::TimeDelta::FromSeconds(3);
+constexpr base::TimeDelta kMaxCapacity = base::Seconds(3);
// The minimum size for the |audio_buffer_|. Arbitrarily determined.
-constexpr base::TimeDelta kStartingCapacity =
- base::TimeDelta::FromMilliseconds(200);
+constexpr base::TimeDelta kStartingCapacity = base::Milliseconds(200);
// The minimum size for the |audio_buffer_| for encrypted streams.
// Set this to be larger than |kStartingCapacity| because the performance of
@@ -69,7 +66,7 @@ constexpr base::TimeDelta kStartingCapacity =
// potentially IPC overhead. For the context, see https://crbug.com/403462,
// https://crbug.com/718161 and https://crbug.com/879970.
constexpr base::TimeDelta kStartingCapacityForEncrypted =
- base::TimeDelta::FromMilliseconds(500);
+ base::Milliseconds(500);
AudioRendererAlgorithm::AudioRendererAlgorithm(MediaLog* media_log)
: AudioRendererAlgorithm(
diff --git a/chromium/media/filters/audio_renderer_algorithm.h b/chromium/media/filters/audio_renderer_algorithm.h
index bd5cffea050..68b2c41d861 100644
--- a/chromium/media/filters/audio_renderer_algorithm.h
+++ b/chromium/media/filters/audio_renderer_algorithm.h
@@ -44,6 +44,10 @@ class MEDIA_EXPORT AudioRendererAlgorithm {
AudioRendererAlgorithm(MediaLog* media_log);
AudioRendererAlgorithm(MediaLog* media_log,
AudioRendererAlgorithmParameters params);
+
+ AudioRendererAlgorithm(const AudioRendererAlgorithm&) = delete;
+ AudioRendererAlgorithm& operator=(const AudioRendererAlgorithm&) = delete;
+
~AudioRendererAlgorithm();
// Initializes this object with information about the audio stream.
@@ -322,8 +326,6 @@ class MEDIA_EXPORT AudioRendererAlgorithm {
int64_t max_capacity_;
FillBufferMode last_mode_ = FillBufferMode::kPassthrough;
-
- DISALLOW_COPY_AND_ASSIGN(AudioRendererAlgorithm);
};
} // namespace media
diff --git a/chromium/media/filters/audio_renderer_algorithm_unittest.cc b/chromium/media/filters/audio_renderer_algorithm_unittest.cc
index 465b4cd1e2c..13e197129bc 100644
--- a/chromium/media/filters/audio_renderer_algorithm_unittest.cc
+++ b/chromium/media/filters/audio_renderer_algorithm_unittest.cc
@@ -964,7 +964,7 @@ TEST_F(AudioRendererAlgorithmTest, LowLatencyHint) {
EXPECT_FALSE(algorithm_.IsQueueFull());
// Set a new *slightly higher* hint. Verify we're no longer "adequate".
- low_latency_hint += base::TimeDelta::FromMilliseconds(10);
+ low_latency_hint += base::Milliseconds(10);
algorithm_.SetLatencyHint(low_latency_hint);
EXPECT_FALSE(algorithm_.IsQueueAdequateForPlayback());
@@ -1070,9 +1070,9 @@ TEST_F(AudioRendererAlgorithmTest, ClampLatencyHint) {
algorithm_.FlushBuffers();
// Set a crazy high latency hint.
- algorithm_.SetLatencyHint(base::TimeDelta::FromSeconds(100));
+ algorithm_.SetLatencyHint(base::Seconds(100));
- const base::TimeDelta kDefaultMax = base::TimeDelta::FromSeconds(3);
+ const base::TimeDelta kDefaultMax = base::Seconds(3);
// Verify "full" and "adequate" thresholds increased, but to a known max well
// bellow the hinted value.
EXPECT_GT(algorithm_.QueueCapacity(), default_capacity);
@@ -1083,7 +1083,7 @@ TEST_F(AudioRendererAlgorithmTest, ClampLatencyHint) {
algorithm_.FlushBuffers();
// Set an impossibly low latency hint.
- algorithm_.SetLatencyHint(base::TimeDelta::FromSeconds(0));
+ algorithm_.SetLatencyHint(base::Seconds(0));
// Verify "full" and "adequate" thresholds decreased, but to a known minimum
// well above the hinted value.
diff --git a/chromium/media/filters/audio_timestamp_validator.h b/chromium/media/filters/audio_timestamp_validator.h
index 7a2ebdf71f9..d3020d9d2e4 100644
--- a/chromium/media/filters/audio_timestamp_validator.h
+++ b/chromium/media/filters/audio_timestamp_validator.h
@@ -20,6 +20,10 @@ class MEDIA_EXPORT AudioTimestampValidator {
public:
AudioTimestampValidator(const AudioDecoderConfig& decoder_config,
MediaLog* media_log);
+
+ AudioTimestampValidator(const AudioTimestampValidator&) = delete;
+ AudioTimestampValidator& operator=(const AudioTimestampValidator&) = delete;
+
~AudioTimestampValidator();
// These methods monitor DecoderBuffer timestamps for gaps for the purpose of
@@ -61,8 +65,6 @@ class MEDIA_EXPORT AudioTimestampValidator {
// Tracks the number of MEDIA_LOG warnings when large timestamp gap detected.
int num_timestamp_gap_warnings_ = 0;
-
- DISALLOW_COPY_AND_ASSIGN(AudioTimestampValidator);
};
} // namespace media
diff --git a/chromium/media/filters/audio_timestamp_validator_unittest.cc b/chromium/media/filters/audio_timestamp_validator_unittest.cc
index b662ed6ef84..17ca4a789b9 100644
--- a/chromium/media/filters/audio_timestamp_validator_unittest.cc
+++ b/chromium/media/filters/audio_timestamp_validator_unittest.cc
@@ -19,12 +19,11 @@ using ::testing::HasSubstr;
namespace media {
// Constants to specify the type of audio data used.
-static const AudioCodec kCodec = kCodecVorbis;
+static const AudioCodec kCodec = AudioCodec::kVorbis;
static const SampleFormat kSampleFormat = kSampleFormatPlanarF32;
static const base::TimeDelta kSeekPreroll;
static const int kSamplesPerSecond = 10000;
-static const base::TimeDelta kBufferDuration =
- base::TimeDelta::FromMilliseconds(20);
+static const base::TimeDelta kBufferDuration = base::Milliseconds(20);
static const ChannelLayout kChannelLayout = CHANNEL_LAYOUT_STEREO;
static const int kChannelCount = 2;
static const int kChannels = ChannelLayoutToChannelCount(kChannelLayout);
@@ -78,9 +77,8 @@ TEST_P(AudioTimestampValidatorTest, WarnForEraticTimes) {
AudioTimestampValidator validator(decoder_config, &media_log_);
- const base::TimeDelta kRandomOffsets[] = {
- base::TimeDelta::FromMilliseconds(100),
- base::TimeDelta::FromMilliseconds(350)};
+ const base::TimeDelta kRandomOffsets[] = {base::Milliseconds(100),
+ base::Milliseconds(350)};
for (int i = 0; i < 100; ++i) {
// Each buffer's timestamp is kBufferDuration from the previous buffer.
@@ -168,7 +166,7 @@ TEST_P(AudioTimestampValidatorTest, SingleWarnForSingleLargeGap) {
// Halfway through the stream, introduce sudden gap of 50 milliseconds.
base::TimeDelta offset;
if (i >= 50)
- offset = base::TimeDelta::FromMilliseconds(100);
+ offset = base::Milliseconds(100);
// This gap never widens, so expect only a single warning when its first
// introduced.
@@ -220,7 +218,7 @@ TEST_P(AudioTimestampValidatorTest, RepeatedWarnForSlowAccumulatingDrift) {
// iteration.
base::TimeDelta offset;
if (i >= output_delay_ + 2)
- offset = i * base::TimeDelta::FromMilliseconds(1);
+ offset = i * base::Milliseconds(1);
scoped_refptr<DecoderBuffer> encoded_buffer = new DecoderBuffer(0);
encoded_buffer->set_timestamp((i * kBufferDuration) + offset);
@@ -228,7 +226,7 @@ TEST_P(AudioTimestampValidatorTest, RepeatedWarnForSlowAccumulatingDrift) {
// Expect gap warnings to start when drift hits 50 milliseconds. Warnings
// should continue as the gap widens until log limit is hit.
- if (offset > base::TimeDelta::FromMilliseconds(50)) {
+ if (offset > base::Milliseconds(50)) {
EXPECT_LIMITED_MEDIA_LOG(HasSubstr("timestamp gap detected"),
num_timestamp_gap_warnings,
kMaxTimestampGapWarnings);
@@ -253,10 +251,9 @@ TEST_P(AudioTimestampValidatorTest, RepeatedWarnForSlowAccumulatingDrift) {
INSTANTIATE_TEST_SUITE_P(
All,
AudioTimestampValidatorTest,
- ::testing::Combine(
- ::testing::Values(0, 10), // output delay
- ::testing::Values(0, 512), // codec delay
- ::testing::Values(base::TimeDelta(), // front discard
- base::TimeDelta::FromMilliseconds(65))));
+ ::testing::Combine(::testing::Values(0, 10), // output delay
+ ::testing::Values(0, 512), // codec delay
+ ::testing::Values(base::TimeDelta(), // front discard
+ base::Milliseconds(65))));
} // namespace media
diff --git a/chromium/media/filters/audio_video_metadata_extractor.h b/chromium/media/filters/audio_video_metadata_extractor.h
index b875caeaf04..f2634e83db4 100644
--- a/chromium/media/filters/audio_video_metadata_extractor.h
+++ b/chromium/media/filters/audio_video_metadata_extractor.h
@@ -35,6 +35,11 @@ class MEDIA_EXPORT AudioVideoMetadataExtractor {
typedef std::vector<StreamInfo> StreamInfoVector;
AudioVideoMetadataExtractor();
+
+ AudioVideoMetadataExtractor(const AudioVideoMetadataExtractor&) = delete;
+ AudioVideoMetadataExtractor& operator=(const AudioVideoMetadataExtractor&) =
+ delete;
+
~AudioVideoMetadataExtractor();
// Returns whether or not the fields were successfully extracted. Should only
@@ -104,8 +109,6 @@ class MEDIA_EXPORT AudioVideoMetadataExtractor {
StreamInfoVector stream_infos_;
std::vector<std::string> attached_images_bytes_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioVideoMetadataExtractor);
};
} // namespace media
diff --git a/chromium/media/filters/blocking_url_protocol_unittest.cc b/chromium/media/filters/blocking_url_protocol_unittest.cc
index 8d7c9a7fa47..f8ba1da3062 100644
--- a/chromium/media/filters/blocking_url_protocol_unittest.cc
+++ b/chromium/media/filters/blocking_url_protocol_unittest.cc
@@ -29,15 +29,15 @@ class BlockingUrlProtocolTest : public testing::Test {
CHECK(data_source_.Initialize(GetTestDataFilePath("bear-320x240.webm")));
}
+ BlockingUrlProtocolTest(const BlockingUrlProtocolTest&) = delete;
+ BlockingUrlProtocolTest& operator=(const BlockingUrlProtocolTest&) = delete;
+
~BlockingUrlProtocolTest() override { data_source_.Stop(); }
MOCK_METHOD0(OnDataSourceError, void());
FileDataSource data_source_;
std::unique_ptr<BlockingUrlProtocol> url_protocol_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(BlockingUrlProtocolTest);
};
diff --git a/chromium/media/filters/chunk_demuxer.cc b/chromium/media/filters/chunk_demuxer.cc
index 0dc2efcb5c2..7befc2de7eb 100644
--- a/chromium/media/filters/chunk_demuxer.cc
+++ b/chromium/media/filters/chunk_demuxer.cc
@@ -29,7 +29,6 @@
#include "media/filters/source_buffer_stream.h"
#include "media/filters/stream_parser_factory.h"
-using base::TimeDelta;
namespace {
@@ -117,7 +116,7 @@ bool ChunkDemuxerStream::IsSeekWaitingForData() const {
return stream_->IsSeekPending();
}
-void ChunkDemuxerStream::Seek(TimeDelta time) {
+void ChunkDemuxerStream::Seek(base::TimeDelta time) {
DVLOG(1) << "ChunkDemuxerStream::Seek(" << time.InSecondsF() << ")";
base::AutoLock auto_lock(lock_);
DCHECK(!read_cb_);
@@ -144,8 +143,9 @@ bool ChunkDemuxerStream::Append(const StreamParser::BufferQueue& buffers) {
return true;
}
-void ChunkDemuxerStream::Remove(TimeDelta start, TimeDelta end,
- TimeDelta duration) {
+void ChunkDemuxerStream::Remove(base::TimeDelta start,
+ base::TimeDelta end,
+ base::TimeDelta duration) {
base::AutoLock auto_lock(lock_);
stream_->Remove(start, end, duration);
}
@@ -183,13 +183,13 @@ void ChunkDemuxerStream::OnMemoryPressure(
force_instant_gc);
}
-void ChunkDemuxerStream::OnSetDuration(TimeDelta duration) {
+void ChunkDemuxerStream::OnSetDuration(base::TimeDelta duration) {
base::AutoLock auto_lock(lock_);
stream_->OnSetDuration(duration);
}
-Ranges<TimeDelta> ChunkDemuxerStream::GetBufferedRanges(
- TimeDelta duration) const {
+Ranges<base::TimeDelta> ChunkDemuxerStream::GetBufferedRanges(
+ base::TimeDelta duration) const {
base::AutoLock auto_lock(lock_);
if (type_ == TEXT) {
@@ -197,12 +197,12 @@ Ranges<TimeDelta> ChunkDemuxerStream::GetBufferedRanges(
// playback, report the buffered range for text tracks as [0, |duration|) so
// that intesections with audio & video tracks are computed correctly when
// no cues are present.
- Ranges<TimeDelta> text_range;
- text_range.Add(TimeDelta(), duration);
+ Ranges<base::TimeDelta> text_range;
+ text_range.Add(base::TimeDelta(), duration);
return text_range;
}
- Ranges<TimeDelta> range = stream_->GetBufferedTime();
+ Ranges<base::TimeDelta> range = stream_->GetBufferedTime();
if (range.size() == 0u)
return range;
@@ -210,17 +210,17 @@ Ranges<TimeDelta> ChunkDemuxerStream::GetBufferedRanges(
// Clamp the end of the stream's buffered ranges to fit within the duration.
// This can be done by intersecting the stream's range with the valid time
// range.
- Ranges<TimeDelta> valid_time_range;
+ Ranges<base::TimeDelta> valid_time_range;
valid_time_range.Add(range.start(0), duration);
return range.IntersectionWith(valid_time_range);
}
-TimeDelta ChunkDemuxerStream::GetHighestPresentationTimestamp() const {
+base::TimeDelta ChunkDemuxerStream::GetHighestPresentationTimestamp() const {
base::AutoLock auto_lock(lock_);
return stream_->GetHighestPresentationTimestamp();
}
-TimeDelta ChunkDemuxerStream::GetBufferedDuration() const {
+base::TimeDelta ChunkDemuxerStream::GetBufferedDuration() const {
base::AutoLock auto_lock(lock_);
return stream_->GetBufferedDuration();
}
@@ -500,9 +500,9 @@ void ChunkDemuxer::Stop() {
Shutdown();
}
-void ChunkDemuxer::Seek(TimeDelta time, PipelineStatusCallback cb) {
+void ChunkDemuxer::Seek(base::TimeDelta time, PipelineStatusCallback cb) {
DVLOG(1) << "Seek(" << time.InSecondsF() << ")";
- DCHECK(time >= TimeDelta());
+ DCHECK(time >= base::TimeDelta());
TRACE_EVENT_ASYNC_BEGIN0("media", "ChunkDemuxer::Seek", this);
base::AutoLock auto_lock(lock_);
@@ -562,8 +562,8 @@ std::vector<DemuxerStream*> ChunkDemuxer::GetAllStreams() {
return result;
}
-TimeDelta ChunkDemuxer::GetStartTime() const {
- return TimeDelta();
+base::TimeDelta ChunkDemuxer::GetStartTime() const {
+ return base::TimeDelta();
}
int64_t ChunkDemuxer::GetMemoryUsage() const {
@@ -593,7 +593,7 @@ void ChunkDemuxer::AbortPendingReads() {
AbortPendingReads_Locked();
}
-void ChunkDemuxer::StartWaitingForSeek(TimeDelta seek_time) {
+void ChunkDemuxer::StartWaitingForSeek(base::TimeDelta seek_time) {
DVLOG(1) << "StartWaitingForSeek()";
base::AutoLock auto_lock(lock_);
DCHECK(state_ == INITIALIZED || state_ == ENDED || state_ == SHUTDOWN ||
@@ -611,7 +611,7 @@ void ChunkDemuxer::StartWaitingForSeek(TimeDelta seek_time) {
cancel_next_seek_ = false;
}
-void ChunkDemuxer::CancelPendingSeek(TimeDelta seek_time) {
+void ChunkDemuxer::CancelPendingSeek(base::TimeDelta seek_time) {
base::AutoLock auto_lock(lock_);
DCHECK_NE(state_, INITIALIZING);
DCHECK(!seek_cb_ || IsSeekWaitingForData_Locked());
@@ -799,7 +799,8 @@ void ChunkDemuxer::RemoveId(const std::string& id) {
id_to_streams_map_.erase(id);
}
-Ranges<TimeDelta> ChunkDemuxer::GetBufferedRanges(const std::string& id) const {
+Ranges<base::TimeDelta> ChunkDemuxer::GetBufferedRanges(
+ const std::string& id) const {
base::AutoLock auto_lock(lock_);
DCHECK(!id.empty());
@@ -912,15 +913,15 @@ bool ChunkDemuxer::EvictCodedFrames(const std::string& id,
bool ChunkDemuxer::AppendData(const std::string& id,
const uint8_t* data,
size_t length,
- TimeDelta append_window_start,
- TimeDelta append_window_end,
- TimeDelta* timestamp_offset) {
+ base::TimeDelta append_window_start,
+ base::TimeDelta append_window_end,
+ base::TimeDelta* timestamp_offset) {
DVLOG(1) << "AppendData(" << id << ", " << length << ")";
DCHECK(!id.empty());
DCHECK(timestamp_offset);
- Ranges<TimeDelta> ranges;
+ Ranges<base::TimeDelta> ranges;
{
base::AutoLock auto_lock(lock_);
@@ -982,7 +983,7 @@ bool ChunkDemuxer::AppendChunks(
DCHECK(!id.empty());
DCHECK(timestamp_offset);
- Ranges<TimeDelta> ranges;
+ Ranges<base::TimeDelta> ranges;
{
base::AutoLock auto_lock(lock_);
@@ -1030,9 +1031,9 @@ bool ChunkDemuxer::AppendChunks(
}
void ChunkDemuxer::ResetParserState(const std::string& id,
- TimeDelta append_window_start,
- TimeDelta append_window_end,
- TimeDelta* timestamp_offset) {
+ base::TimeDelta append_window_start,
+ base::TimeDelta append_window_end,
+ base::TimeDelta* timestamp_offset) {
DVLOG(1) << "ResetParserState(" << id << ")";
base::AutoLock auto_lock(lock_);
DCHECK(!id.empty());
@@ -1047,8 +1048,9 @@ void ChunkDemuxer::ResetParserState(const std::string& id,
RunSeekCB_Locked(PIPELINE_OK);
}
-void ChunkDemuxer::Remove(const std::string& id, TimeDelta start,
- TimeDelta end) {
+void ChunkDemuxer::Remove(const std::string& id,
+ base::TimeDelta start,
+ base::TimeDelta end) {
DVLOG(1) << "Remove(" << id << ", " << start.InSecondsF()
<< ", " << end.InSecondsF() << ")";
base::AutoLock auto_lock(lock_);
@@ -1143,17 +1145,18 @@ void ChunkDemuxer::SetDuration(double duration) {
if (duration == GetDuration_Locked())
return;
- // Compute & bounds check the TimeDelta representation of duration.
+ // Compute & bounds check the base::TimeDelta representation of duration.
// This can be different if the value of |duration| doesn't fit the range or
- // precision of TimeDelta.
- TimeDelta min_duration = TimeDelta::FromInternalValue(1);
- // Don't use TimeDelta::Max() here, as we want the largest finite time delta.
- TimeDelta max_duration =
- TimeDelta::FromInternalValue(std::numeric_limits<int64_t>::max() - 1);
+ // precision of base::TimeDelta.
+ base::TimeDelta min_duration = base::TimeDelta::FromInternalValue(1);
+ // Don't use base::TimeDelta::Max() here, as we want the largest finite time
+ // delta.
+ base::TimeDelta max_duration = base::TimeDelta::FromInternalValue(
+ std::numeric_limits<int64_t>::max() - 1);
double min_duration_in_seconds = min_duration.InSecondsF();
double max_duration_in_seconds = max_duration.InSecondsF();
- TimeDelta duration_td;
+ base::TimeDelta duration_td;
if (duration == std::numeric_limits<double>::infinity()) {
duration_td = media::kInfiniteDuration;
} else if (duration < min_duration_in_seconds) {
@@ -1161,11 +1164,11 @@ void ChunkDemuxer::SetDuration(double duration) {
} else if (duration > max_duration_in_seconds) {
duration_td = max_duration;
} else {
- duration_td = TimeDelta::FromMicroseconds(
- duration * base::Time::kMicrosecondsPerSecond);
+ duration_td =
+ base::Microseconds(duration * base::Time::kMicrosecondsPerSecond);
}
- DCHECK(duration_td > TimeDelta());
+ DCHECK(duration_td > base::TimeDelta());
user_specified_duration_ = duration;
duration_ = duration_td;
@@ -1463,7 +1466,7 @@ bool ChunkDemuxer::IsValidId(const std::string& source_id) const {
return source_state_map_.count(source_id) > 0u;
}
-void ChunkDemuxer::UpdateDuration(TimeDelta new_duration) {
+void ChunkDemuxer::UpdateDuration(base::TimeDelta new_duration) {
DCHECK(duration_ != new_duration ||
user_specified_duration_ != new_duration.InSecondsF());
user_specified_duration_ = -1;
@@ -1471,7 +1474,7 @@ void ChunkDemuxer::UpdateDuration(TimeDelta new_duration) {
host_->SetDuration(new_duration);
}
-void ChunkDemuxer::IncreaseDurationIfNecessary(TimeDelta new_duration) {
+void ChunkDemuxer::IncreaseDurationIfNecessary(base::TimeDelta new_duration) {
DCHECK(new_duration != kNoTimestamp);
DCHECK(new_duration != kInfiniteDuration);
@@ -1494,7 +1497,7 @@ void ChunkDemuxer::IncreaseDurationIfNecessary(TimeDelta new_duration) {
void ChunkDemuxer::DecreaseDurationIfNecessary() {
lock_.AssertAcquired();
- TimeDelta max_duration;
+ base::TimeDelta max_duration;
for (auto itr = source_state_map_.begin(); itr != source_state_map_.end();
++itr) {
@@ -1513,12 +1516,12 @@ void ChunkDemuxer::DecreaseDurationIfNecessary() {
}
}
-Ranges<TimeDelta> ChunkDemuxer::GetBufferedRanges() const {
+Ranges<base::TimeDelta> ChunkDemuxer::GetBufferedRanges() const {
base::AutoLock auto_lock(lock_);
return GetBufferedRanges_Locked();
}
-Ranges<TimeDelta> ChunkDemuxer::GetBufferedRanges_Locked() const {
+Ranges<base::TimeDelta> ChunkDemuxer::GetBufferedRanges_Locked() const {
lock_.AssertAcquired();
bool ended = state_ == ENDED;
@@ -1547,7 +1550,7 @@ void ChunkDemuxer::AbortPendingReads_Locked() {
}
}
-void ChunkDemuxer::SeekAllSources(TimeDelta seek_time) {
+void ChunkDemuxer::SeekAllSources(base::TimeDelta seek_time) {
for (auto itr = source_state_map_.begin(); itr != source_state_map_.end();
++itr) {
itr->second->Seek(seek_time);
diff --git a/chromium/media/filters/chunk_demuxer.h b/chromium/media/filters/chunk_demuxer.h
index c2bf269e6fe..d2fb4d1cfa8 100644
--- a/chromium/media/filters/chunk_demuxer.h
+++ b/chromium/media/filters/chunk_demuxer.h
@@ -212,6 +212,10 @@ class MEDIA_EXPORT ChunkDemuxer : public Demuxer {
base::RepeatingClosure progress_cb,
EncryptedMediaInitDataCB encrypted_media_init_data_cb,
MediaLog* media_log);
+
+ ChunkDemuxer(const ChunkDemuxer&) = delete;
+ ChunkDemuxer& operator=(const ChunkDemuxer&) = delete;
+
~ChunkDemuxer() override;
// Demuxer implementation.
@@ -552,8 +556,6 @@ class MEDIA_EXPORT ChunkDemuxer : public Demuxer {
std::vector<std::unique_ptr<ChunkDemuxerStream>> removed_streams_;
std::map<MediaTrack::Id, ChunkDemuxerStream*> track_id_to_demux_stream_map_;
-
- DISALLOW_COPY_AND_ASSIGN(ChunkDemuxer);
};
} // namespace media
diff --git a/chromium/media/filters/chunk_demuxer_unittest.cc b/chromium/media/filters/chunk_demuxer_unittest.cc
index 4575cf6c609..a1b29204434 100644
--- a/chromium/media/filters/chunk_demuxer_unittest.cc
+++ b/chromium/media/filters/chunk_demuxer_unittest.cc
@@ -104,7 +104,7 @@ const int kDefaultFirstClusterEndTimestamp = 66;
const int kDefaultSecondClusterEndTimestamp = 132;
base::TimeDelta kDefaultDuration() {
- return base::TimeDelta::FromMilliseconds(201224);
+ return base::Milliseconds(201224);
}
// Write an integer into buffer in the form of vint that spans 8 bytes.
@@ -802,8 +802,7 @@ class ChunkDemuxerTest : public ::testing::Test {
ExpectInitMediaLogs(HAS_AUDIO | HAS_VIDEO);
EXPECT_CALL(*this, InitSegmentReceivedMock(_));
demuxer_->Initialize(
- &host_, CreateInitDoneCallback(base::TimeDelta::FromMilliseconds(2744),
- PIPELINE_OK));
+ &host_, CreateInitDoneCallback(base::Milliseconds(2744), PIPELINE_OK));
if (AddId(kSourceId, HAS_AUDIO | HAS_VIDEO) != ChunkDemuxer::kOk)
return false;
@@ -812,7 +811,7 @@ class ChunkDemuxerTest : public ::testing::Test {
EXPECT_MEDIA_LOG(WebMSimpleBlockDurationEstimated(2)).Times(7);
// Expect duration adjustment since actual duration differs slightly from
// duration in the init segment.
- EXPECT_CALL(host_, SetDuration(base::TimeDelta::FromMilliseconds(2768)));
+ EXPECT_CALL(host_, SetDuration(base::Milliseconds(2768)));
EXPECT_TRUE(AppendData(bear1->data(), bear1->data_size()));
// Last audio frame has timestamp 2721 and duration 24 (estimated from max
// seen so far for audio track).
@@ -1195,16 +1194,14 @@ class ChunkDemuxerTest : public ::testing::Test {
if (timestamps[i].audio_time_ms != kSkip) {
ReadAudio(base::BindOnce(
- &OnReadDone,
- base::TimeDelta::FromMilliseconds(timestamps[i].audio_time_ms),
+ &OnReadDone, base::Milliseconds(timestamps[i].audio_time_ms),
&audio_read_done));
EXPECT_TRUE(audio_read_done);
}
if (timestamps[i].video_time_ms != kSkip) {
ReadVideo(base::BindOnce(
- &OnReadDone,
- base::TimeDelta::FromMilliseconds(timestamps[i].video_time_ms),
+ &OnReadDone, base::Milliseconds(timestamps[i].video_time_ms),
&video_read_done));
EXPECT_TRUE(video_read_done);
}
@@ -1330,7 +1327,7 @@ TEST_F(ChunkDemuxerTest, Init) {
ASSERT_TRUE(audio_stream);
const AudioDecoderConfig& config = audio_stream->audio_decoder_config();
- EXPECT_EQ(kCodecVorbis, config.codec());
+ EXPECT_EQ(AudioCodec::kVorbis, config.codec());
EXPECT_EQ(32, config.bits_per_channel());
EXPECT_EQ(CHANNEL_LAYOUT_STEREO, config.channel_layout());
EXPECT_EQ(44100, config.samples_per_second());
@@ -1477,7 +1474,7 @@ TEST_F(ChunkDemuxerTest, AppendDataAfterSeek) {
EXPECT_CALL(*this, Checkpoint(1));
- Seek(base::TimeDelta::FromMilliseconds(46));
+ Seek(base::Milliseconds(46));
EXPECT_CALL(*this, Checkpoint(2));
@@ -1521,7 +1518,7 @@ TEST_F(ChunkDemuxerTest, SeekWhileParsingCluster) {
ExpectRead(DemuxerStream::VIDEO, 0);
ExpectRead(DemuxerStream::AUDIO, kAudioBlockDuration);
- Seek(base::TimeDelta::FromSeconds(5));
+ Seek(base::Seconds(5));
// Append the rest of the cluster.
ASSERT_TRUE(
@@ -1553,10 +1550,10 @@ TEST_F(ChunkDemuxerTest, Read) {
bool audio_read_done = false;
bool video_read_done = false;
- ReadAudio(base::BindOnce(&OnReadDone, base::TimeDelta::FromMilliseconds(0),
- &audio_read_done));
- ReadVideo(base::BindOnce(&OnReadDone, base::TimeDelta::FromMilliseconds(0),
- &video_read_done));
+ ReadAudio(
+ base::BindOnce(&OnReadDone, base::Milliseconds(0), &audio_read_done));
+ ReadVideo(
+ base::BindOnce(&OnReadDone, base::Milliseconds(0), &video_read_done));
EXPECT_TRUE(audio_read_done);
EXPECT_TRUE(video_read_done);
@@ -1593,7 +1590,7 @@ TEST_F(ChunkDemuxerTest, OutOfOrderClusters) {
kSourceId, cluster_c->data(), cluster_c->size(),
append_window_start_for_next_append_, append_window_end_for_next_append_,
&timestamp_offset_map_[kSourceId]));
- Seek(base::TimeDelta::FromMilliseconds(45));
+ Seek(base::Milliseconds(45));
CheckExpectedBuffers(audio_stream, "45K");
CheckExpectedBuffers(video_stream, "45K");
}
@@ -1830,10 +1827,10 @@ TEST_F(ChunkDemuxerTest, EndOfStreamWithPendingReads) {
EndOfStreamHelper end_of_stream_helper_1(audio_stream, video_stream);
EndOfStreamHelper end_of_stream_helper_2(audio_stream, video_stream);
- ReadAudio(base::BindOnce(&OnReadDone, base::TimeDelta::FromMilliseconds(0),
- &audio_read_done_1));
- ReadVideo(base::BindOnce(&OnReadDone, base::TimeDelta::FromMilliseconds(0),
- &video_read_done_1));
+ ReadAudio(
+ base::BindOnce(&OnReadDone, base::Milliseconds(0), &audio_read_done_1));
+ ReadVideo(
+ base::BindOnce(&OnReadDone, base::Milliseconds(0), &video_read_done_1));
base::RunLoop().RunUntilIdle();
EXPECT_TRUE(audio_read_done_1);
@@ -1841,8 +1838,7 @@ TEST_F(ChunkDemuxerTest, EndOfStreamWithPendingReads) {
end_of_stream_helper_1.RequestReads();
- EXPECT_CALL(host_, SetDuration(
- base::TimeDelta::FromMilliseconds(kVideoBlockDuration)));
+ EXPECT_CALL(host_, SetDuration(base::Milliseconds(kVideoBlockDuration)));
MarkEndOfStream(PIPELINE_OK);
end_of_stream_helper_1.CheckIfReadDonesWereCalled(true);
@@ -1866,10 +1862,10 @@ TEST_F(ChunkDemuxerTest, ReadsAfterEndOfStream) {
EndOfStreamHelper end_of_stream_helper_2(audio_stream, video_stream);
EndOfStreamHelper end_of_stream_helper_3(audio_stream, video_stream);
- ReadAudio(base::BindOnce(&OnReadDone, base::TimeDelta::FromMilliseconds(0),
- &audio_read_done_1));
- ReadVideo(base::BindOnce(&OnReadDone, base::TimeDelta::FromMilliseconds(0),
- &video_read_done_1));
+ ReadAudio(
+ base::BindOnce(&OnReadDone, base::Milliseconds(0), &audio_read_done_1));
+ ReadVideo(
+ base::BindOnce(&OnReadDone, base::Milliseconds(0), &video_read_done_1));
end_of_stream_helper_1.RequestReads();
@@ -1877,8 +1873,7 @@ TEST_F(ChunkDemuxerTest, ReadsAfterEndOfStream) {
EXPECT_TRUE(video_read_done_1);
end_of_stream_helper_1.CheckIfReadDonesWereCalled(false);
- EXPECT_CALL(host_, SetDuration(
- base::TimeDelta::FromMilliseconds(kVideoBlockDuration)));
+ EXPECT_CALL(host_, SetDuration(base::Milliseconds(kVideoBlockDuration)));
MarkEndOfStream(PIPELINE_OK);
end_of_stream_helper_1.CheckIfReadDonesWereCalled(true);
@@ -1896,15 +1891,15 @@ TEST_F(ChunkDemuxerTest, EndOfStreamDuringCanceledSeek) {
ASSERT_TRUE(InitDemuxer(HAS_AUDIO | HAS_VIDEO));
ASSERT_TRUE(AppendCluster(0, 10));
- EXPECT_CALL(host_, SetDuration(base::TimeDelta::FromMilliseconds(138)));
+ EXPECT_CALL(host_, SetDuration(base::Milliseconds(138)));
MarkEndOfStream(PIPELINE_OK);
// Start the first seek.
- Seek(base::TimeDelta::FromMilliseconds(20));
+ Seek(base::Milliseconds(20));
// Simulate another seek being requested before the first
// seek has finished prerolling.
- base::TimeDelta seek_time2 = base::TimeDelta::FromMilliseconds(30);
+ base::TimeDelta seek_time2 = base::Milliseconds(30);
demuxer_->CancelPendingSeek(seek_time2);
// Finish second seek.
@@ -1931,7 +1926,7 @@ TEST_F(ChunkDemuxerTest, EndOfStreamRangeChanges) {
CheckExpectedRanges("{ [0,46) }");
- EXPECT_CALL(host_, SetDuration(base::TimeDelta::FromMilliseconds(66)));
+ EXPECT_CALL(host_, SetDuration(base::Milliseconds(66)));
MarkEndOfStream(PIPELINE_OK);
CheckExpectedRanges("{ [0,66) }");
@@ -1989,10 +1984,10 @@ TEST_F(ChunkDemuxerTest, WebMFile_AudioAndVideo) {
// Expect duration adjustment since actual duration differs slightly from
// duration in the init segment.
- EXPECT_CALL(host_, SetDuration(base::TimeDelta::FromMilliseconds(2768)));
+ EXPECT_CALL(host_, SetDuration(base::Milliseconds(2768)));
ASSERT_TRUE(ParseWebMFile("bear-320x240.webm", buffer_timestamps,
- base::TimeDelta::FromMilliseconds(2744)));
+ base::Milliseconds(2744)));
EXPECT_EQ(212949, demuxer_->GetMemoryUsage());
}
@@ -2033,11 +2028,10 @@ TEST_F(ChunkDemuxerTest, WebMFile_AudioOnly) {
// Expect duration adjustment since actual duration differs slightly from
// duration in the init segment.
- EXPECT_CALL(host_, SetDuration(base::TimeDelta::FromMilliseconds(2768)));
+ EXPECT_CALL(host_, SetDuration(base::Milliseconds(2768)));
ASSERT_TRUE(ParseWebMFile("bear-320x240-audio-only.webm", buffer_timestamps,
- base::TimeDelta::FromMilliseconds(2744),
- HAS_AUDIO));
+ base::Milliseconds(2744), HAS_AUDIO));
EXPECT_EQ(18624, demuxer_->GetMemoryUsage());
}
@@ -2055,11 +2049,10 @@ TEST_F(ChunkDemuxerTest, WebMFile_VideoOnly) {
// Expect duration adjustment since actual duration differs slightly from
// duration in the init segment.
- EXPECT_CALL(host_, SetDuration(base::TimeDelta::FromMilliseconds(2736)));
+ EXPECT_CALL(host_, SetDuration(base::Milliseconds(2736)));
ASSERT_TRUE(ParseWebMFile("bear-320x240-video-only.webm", buffer_timestamps,
- base::TimeDelta::FromMilliseconds(2703),
- HAS_VIDEO));
+ base::Milliseconds(2703), HAS_VIDEO));
EXPECT_EQ(194325, demuxer_->GetMemoryUsage());
}
@@ -2075,12 +2068,12 @@ TEST_F(ChunkDemuxerTest, WebMFile_AltRefFrames) {
// Expect duration adjustment since actual duration differs slightly from
// duration in the init segment.
- EXPECT_CALL(host_, SetDuration(base::TimeDelta::FromMilliseconds(2768)));
+ EXPECT_CALL(host_, SetDuration(base::Milliseconds(2768)));
ExpectInitMediaLogs(HAS_AUDIO | HAS_VIDEO);
EXPECT_MEDIA_LOG(WebMSimpleBlockDurationEstimated(2));
ASSERT_TRUE(ParseWebMFile("bear-320x240-altref.webm", buffer_timestamps,
- base::TimeDelta::FromMilliseconds(2767)));
+ base::Milliseconds(2767)));
}
// Verify that we output buffers before the entire cluster has been parsed.
@@ -2091,10 +2084,10 @@ TEST_F(ChunkDemuxerTest, IncrementalClusterParsing) {
bool audio_read_done = false;
bool video_read_done = false;
- ReadAudio(base::BindOnce(&OnReadDone, base::TimeDelta::FromMilliseconds(0),
- &audio_read_done));
- ReadVideo(base::BindOnce(&OnReadDone, base::TimeDelta::FromMilliseconds(0),
- &video_read_done));
+ ReadAudio(
+ base::BindOnce(&OnReadDone, base::Milliseconds(0), &audio_read_done));
+ ReadVideo(
+ base::BindOnce(&OnReadDone, base::Milliseconds(0), &video_read_done));
// Make sure the reads haven't completed yet.
EXPECT_FALSE(audio_read_done);
@@ -2113,10 +2106,10 @@ TEST_F(ChunkDemuxerTest, IncrementalClusterParsing) {
audio_read_done = false;
video_read_done = false;
- ReadAudio(base::BindOnce(&OnReadDone, base::TimeDelta::FromMilliseconds(23),
- &audio_read_done));
- ReadVideo(base::BindOnce(&OnReadDone, base::TimeDelta::FromMilliseconds(33),
- &video_read_done));
+ ReadAudio(
+ base::BindOnce(&OnReadDone, base::Milliseconds(23), &audio_read_done));
+ ReadVideo(
+ base::BindOnce(&OnReadDone, base::Milliseconds(33), &video_read_done));
// Make sure the reads haven't completed yet.
EXPECT_FALSE(audio_read_done);
@@ -2327,7 +2320,7 @@ TEST_F(ChunkDemuxerTest, SeekCanceled) {
ASSERT_TRUE(AppendCluster(GenerateCluster(0, 4)));
// Seek to an unbuffered region.
- Seek(base::TimeDelta::FromSeconds(50));
+ Seek(base::Seconds(50));
// Attempt to read in unbuffered area; should not fulfill the read.
bool audio_read_done = false;
@@ -2339,7 +2332,7 @@ TEST_F(ChunkDemuxerTest, SeekCanceled) {
// Now cancel the pending seek, which should flush the reads with empty
// buffers.
- base::TimeDelta seek_time = base::TimeDelta::FromSeconds(0);
+ base::TimeDelta seek_time = base::Seconds(0);
demuxer_->CancelPendingSeek(seek_time);
base::RunLoop().RunUntilIdle();
EXPECT_TRUE(audio_read_done);
@@ -2357,8 +2350,8 @@ TEST_F(ChunkDemuxerTest, SeekCanceledWhileWaitingForSeek) {
ASSERT_TRUE(AppendCluster(GenerateCluster(0, 4)));
// Start waiting for a seek.
- base::TimeDelta seek_time1 = base::TimeDelta::FromSeconds(50);
- base::TimeDelta seek_time2 = base::TimeDelta::FromSeconds(0);
+ base::TimeDelta seek_time1 = base::Seconds(50);
+ base::TimeDelta seek_time2 = base::Seconds(0);
demuxer_->StartWaitingForSeek(seek_time1);
// Now cancel the upcoming seek to an unbuffered region.
@@ -2394,22 +2387,20 @@ TEST_F(ChunkDemuxerTest, SeekAudioAndVideoSources) {
// Read() should return buffers at 0.
bool audio_read_done = false;
bool video_read_done = false;
- ReadAudio(base::BindOnce(&OnReadDone, base::TimeDelta::FromMilliseconds(0),
- &audio_read_done));
- ReadVideo(base::BindOnce(&OnReadDone, base::TimeDelta::FromMilliseconds(0),
- &video_read_done));
+ ReadAudio(
+ base::BindOnce(&OnReadDone, base::Milliseconds(0), &audio_read_done));
+ ReadVideo(
+ base::BindOnce(&OnReadDone, base::Milliseconds(0), &video_read_done));
EXPECT_TRUE(audio_read_done);
EXPECT_TRUE(video_read_done);
// Seek to 3 (an unbuffered region).
- Seek(base::TimeDelta::FromSeconds(3));
+ Seek(base::Seconds(3));
audio_read_done = false;
video_read_done = false;
- ReadAudio(base::BindOnce(&OnReadDone, base::TimeDelta::FromSeconds(3),
- &audio_read_done));
- ReadVideo(base::BindOnce(&OnReadDone, base::TimeDelta::FromSeconds(3),
- &video_read_done));
+ ReadAudio(base::BindOnce(&OnReadDone, base::Seconds(3), &audio_read_done));
+ ReadVideo(base::BindOnce(&OnReadDone, base::Seconds(3), &video_read_done));
// Read()s should not return until after data is appended at the Seek point.
EXPECT_FALSE(audio_read_done);
EXPECT_FALSE(video_read_done);
@@ -2444,7 +2435,7 @@ TEST_F(ChunkDemuxerTest, EndOfStreamAfterPastEosSeek) {
// Seeking past the end of video.
// Note: audio data is available for that seek point.
bool seek_cb_was_called = false;
- base::TimeDelta seek_time = base::TimeDelta::FromMilliseconds(110);
+ base::TimeDelta seek_time = base::Milliseconds(110);
demuxer_->StartWaitingForSeek(seek_time);
demuxer_->Seek(seek_time,
base::BindOnce(OnSeekDone_OKExpected, &seek_cb_was_called));
@@ -2452,8 +2443,7 @@ TEST_F(ChunkDemuxerTest, EndOfStreamAfterPastEosSeek) {
EXPECT_FALSE(seek_cb_was_called);
- EXPECT_CALL(host_, SetDuration(
- base::TimeDelta::FromMilliseconds(120)));
+ EXPECT_CALL(host_, SetDuration(base::Milliseconds(120)));
MarkEndOfStream(PIPELINE_OK);
CheckExpectedRanges("{ [0,120) }");
base::RunLoop().RunUntilIdle();
@@ -2478,7 +2468,7 @@ TEST_F(ChunkDemuxerTest, EndOfStreamDuringPendingSeek) {
MuxedStreamInfo(kVideoTrackNum, "200K 220K 240K 260K 280K", 20));
bool seek_cb_was_called = false;
- base::TimeDelta seek_time = base::TimeDelta::FromMilliseconds(160);
+ base::TimeDelta seek_time = base::Milliseconds(160);
demuxer_->StartWaitingForSeek(seek_time);
demuxer_->Seek(seek_time,
base::BindOnce(OnSeekDone_OKExpected, &seek_cb_was_called));
@@ -2486,7 +2476,7 @@ TEST_F(ChunkDemuxerTest, EndOfStreamDuringPendingSeek) {
EXPECT_FALSE(seek_cb_was_called);
- EXPECT_CALL(host_, SetDuration(base::TimeDelta::FromMilliseconds(300)));
+ EXPECT_CALL(host_, SetDuration(base::Milliseconds(300)));
MarkEndOfStream(PIPELINE_OK);
base::RunLoop().RunUntilIdle();
@@ -2727,7 +2717,7 @@ TEST_F(ChunkDemuxerTest, GetBufferedRanges_EndOfStream) {
CheckExpectedRanges("{ [0,46) }");
- EXPECT_CALL(host_, SetDuration(base::TimeDelta::FromMilliseconds(66)));
+ EXPECT_CALL(host_, SetDuration(base::Milliseconds(66)));
MarkEndOfStream(PIPELINE_OK);
// Verify that the range extends to the end of the video data.
@@ -2739,7 +2729,7 @@ TEST_F(ChunkDemuxerTest, GetBufferedRanges_EndOfStream) {
CheckExpectedRanges("{ [0,46) }");
// Append and remove data so that the 2 streams' end ranges do not overlap.
- EXPECT_CALL(host_, SetDuration(base::TimeDelta::FromMilliseconds(398)));
+ EXPECT_CALL(host_, SetDuration(base::Milliseconds(398)));
AppendMuxedCluster(
MuxedStreamInfo(kAudioTrackNum, "200K 223K", 23),
MuxedStreamInfo(kVideoTrackNum, "200K 233 266 299 332K 365", 33));
@@ -2749,8 +2739,7 @@ TEST_F(ChunkDemuxerTest, GetBufferedRanges_EndOfStream) {
// Video: [0,66) [200,398)
CheckExpectedRanges("{ [0,46) [200,246) }");
- demuxer_->Remove(kSourceId, base::TimeDelta::FromMilliseconds(200),
- base::TimeDelta::FromMilliseconds(300));
+ demuxer_->Remove(kSourceId, base::Milliseconds(200), base::Milliseconds(300));
// At this point, the per-stream ranges are as follows:
// Audio: [0,46)
@@ -2781,11 +2770,11 @@ TEST_F(ChunkDemuxerTest, DifferentStreamTimecodes) {
// Create a cluster where the video timecode begins 25ms after the audio.
ASSERT_TRUE(AppendCluster(GenerateCluster(0, 25, 8)));
- Seek(base::TimeDelta::FromSeconds(0));
+ Seek(base::Seconds(0));
GenerateExpectedReads(0, 25, 8);
// Seek to 5 seconds.
- Seek(base::TimeDelta::FromSeconds(5));
+ Seek(base::Seconds(5));
// Generate a cluster to fulfill this seek, where audio timecode begins 25ms
// after the video.
@@ -2810,7 +2799,7 @@ TEST_F(ChunkDemuxerTest, DifferentStreamTimecodesSeparateSources) {
kVideoTrackNum, kVideoBlockDuration)));
// Both streams should be able to fulfill a seek to 25.
- Seek(base::TimeDelta::FromMilliseconds(25));
+ Seek(base::Milliseconds(25));
GenerateAudioStreamExpectedReads(25, 4);
GenerateVideoStreamExpectedReads(30, 4);
}
@@ -2832,7 +2821,7 @@ TEST_F(ChunkDemuxerTest, DifferentStreamTimecodesOutOfRange) {
kVideoTrackNum, kVideoBlockDuration)));
// Should not be able to fulfill a seek to 0.
- base::TimeDelta seek_time = base::TimeDelta::FromMilliseconds(0);
+ base::TimeDelta seek_time = base::Milliseconds(0);
demuxer_->StartWaitingForSeek(seek_time);
demuxer_->Seek(seek_time,
NewExpectedStatusCB(PIPELINE_ERROR_ABORT));
@@ -2900,8 +2889,8 @@ TEST_F(ChunkDemuxerTest, EndOfStreamStillSetAfterSeek) {
EXPECT_CALL(host_, SetDuration(_))
.Times(AnyNumber());
- base::TimeDelta kLastAudioTimestamp = base::TimeDelta::FromMilliseconds(92);
- base::TimeDelta kLastVideoTimestamp = base::TimeDelta::FromMilliseconds(99);
+ base::TimeDelta kLastAudioTimestamp = base::Milliseconds(92);
+ base::TimeDelta kLastVideoTimestamp = base::Milliseconds(99);
ASSERT_TRUE(AppendCluster(kDefaultFirstCluster()));
ASSERT_TRUE(AppendCluster(kDefaultSecondCluster()));
@@ -2920,7 +2909,7 @@ TEST_F(ChunkDemuxerTest, EndOfStreamStillSetAfterSeek) {
EXPECT_EQ(kLastVideoTimestamp, last_timestamp);
// Seek back to 0 and verify that we can read to the end again..
- Seek(base::TimeDelta::FromMilliseconds(0));
+ Seek(base::Milliseconds(0));
ReadUntilNotOkOrEndOfStream(DemuxerStream::AUDIO, &status, &last_timestamp);
EXPECT_EQ(DemuxerStream::kOk, status);
@@ -2952,12 +2941,13 @@ TEST_F(ChunkDemuxerTest, EndOfStreamDuringSeek) {
ASSERT_TRUE(AppendCluster(kDefaultFirstCluster()));
- base::TimeDelta seek_time = base::TimeDelta::FromSeconds(0);
+ base::TimeDelta seek_time = base::Seconds(0);
demuxer_->StartWaitingForSeek(seek_time);
ASSERT_TRUE(AppendCluster(kDefaultSecondCluster()));
- EXPECT_CALL(host_, SetDuration(
- base::TimeDelta::FromMilliseconds(kDefaultSecondClusterEndTimestamp)));
+ EXPECT_CALL(
+ host_,
+ SetDuration(base::Milliseconds(kDefaultSecondClusterEndTimestamp)));
MarkEndOfStream(PIPELINE_OK);
demuxer_->Seek(seek_time, NewExpectedStatusCB(PIPELINE_OK));
@@ -3077,7 +3067,7 @@ TEST_F(ChunkDemuxerTest, ConfigChange_Seek) {
ExpectRead(DemuxerStream::VIDEO, 0);
// Seek to a location with a different config.
- Seek(base::TimeDelta::FromMilliseconds(527));
+ Seek(base::Milliseconds(527));
// Verify that the config change is signalled.
ExpectConfigChanged(DemuxerStream::VIDEO);
@@ -3092,7 +3082,7 @@ TEST_F(ChunkDemuxerTest, ConfigChange_Seek) {
ExpectRead(DemuxerStream::VIDEO, 527);
// Seek back to the beginning and verify we get another config change.
- Seek(base::TimeDelta::FromMilliseconds(0));
+ Seek(base::Milliseconds(0));
ExpectConfigChanged(DemuxerStream::VIDEO);
ASSERT_TRUE(video_config_1.Matches(video->video_decoder_config()));
ExpectRead(DemuxerStream::VIDEO, 0);
@@ -3100,8 +3090,8 @@ TEST_F(ChunkDemuxerTest, ConfigChange_Seek) {
// Seek to a location that requires a config change and then
// seek to a new location that has the same configuration as
// the start of the file without a Read() in the middle.
- Seek(base::TimeDelta::FromMilliseconds(527));
- Seek(base::TimeDelta::FromMilliseconds(801));
+ Seek(base::Milliseconds(527));
+ Seek(base::Milliseconds(801));
// Verify that no config change is signalled.
ExpectRead(DemuxerStream::VIDEO, 801);
@@ -3111,10 +3101,10 @@ TEST_F(ChunkDemuxerTest, ConfigChange_Seek) {
TEST_F(ChunkDemuxerTest, TimestampPositiveOffset) {
ASSERT_TRUE(InitDemuxer(HAS_AUDIO | HAS_VIDEO));
- ASSERT_TRUE(SetTimestampOffset(kSourceId, base::TimeDelta::FromSeconds(30)));
+ ASSERT_TRUE(SetTimestampOffset(kSourceId, base::Seconds(30)));
ASSERT_TRUE(AppendCluster(GenerateCluster(0, 2)));
- Seek(base::TimeDelta::FromMilliseconds(30000));
+ Seek(base::Milliseconds(30000));
GenerateExpectedReads(30000, 2);
}
@@ -3122,7 +3112,7 @@ TEST_F(ChunkDemuxerTest, TimestampPositiveOffset) {
TEST_F(ChunkDemuxerTest, TimestampNegativeOffset) {
ASSERT_TRUE(InitDemuxer(HAS_AUDIO | HAS_VIDEO));
- ASSERT_TRUE(SetTimestampOffset(kSourceId, base::TimeDelta::FromSeconds(-1)));
+ ASSERT_TRUE(SetTimestampOffset(kSourceId, base::Seconds(-1)));
ASSERT_TRUE(AppendCluster(GenerateCluster(1000, 2)));
GenerateExpectedReads(0, 2);
@@ -3133,10 +3123,8 @@ TEST_F(ChunkDemuxerTest, TimestampOffsetSeparateStreams) {
std::string video_id = "video1";
ASSERT_TRUE(InitDemuxerAudioAndVideoSources(audio_id, video_id));
- ASSERT_TRUE(SetTimestampOffset(
- audio_id, base::TimeDelta::FromMilliseconds(-2500)));
- ASSERT_TRUE(SetTimestampOffset(
- video_id, base::TimeDelta::FromMilliseconds(-2500)));
+ ASSERT_TRUE(SetTimestampOffset(audio_id, base::Milliseconds(-2500)));
+ ASSERT_TRUE(SetTimestampOffset(video_id, base::Milliseconds(-2500)));
ASSERT_TRUE(AppendCluster(
audio_id,
GenerateSingleStreamCluster(2500, 2500 + kAudioBlockDuration * 4,
@@ -3148,12 +3136,10 @@ TEST_F(ChunkDemuxerTest, TimestampOffsetSeparateStreams) {
GenerateAudioStreamExpectedReads(0, 4);
GenerateVideoStreamExpectedReads(0, 4);
- Seek(base::TimeDelta::FromMilliseconds(27300));
+ Seek(base::Milliseconds(27300));
- ASSERT_TRUE(SetTimestampOffset(
- audio_id, base::TimeDelta::FromMilliseconds(27300)));
- ASSERT_TRUE(SetTimestampOffset(
- video_id, base::TimeDelta::FromMilliseconds(27300)));
+ ASSERT_TRUE(SetTimestampOffset(audio_id, base::Milliseconds(27300)));
+ ASSERT_TRUE(SetTimestampOffset(video_id, base::Milliseconds(27300)));
ASSERT_TRUE(AppendCluster(
audio_id,
GenerateSingleStreamCluster(0, kAudioBlockDuration * 4, kAudioTrackNum,
@@ -3306,7 +3292,7 @@ TEST_F(ChunkDemuxerTest, SeekCompleteDuringAbort) {
// Seek to a time corresponding to buffers that will be emitted during the
// abort.
- Seek(base::TimeDelta::FromMilliseconds(4110));
+ Seek(base::Milliseconds(4110));
// ResetParserState on the Mpeg2 TS parser triggers the emission of the last
// video buffer which is pending in the stream parser.
@@ -3413,8 +3399,7 @@ TEST_F(ChunkDemuxerTest, DurationChange) {
// to be signaled. Note that the last video block will have a higher end
// timestamp than the last audio block.
const int kNewStreamDurationVideo = kStreamDuration + kVideoBlockDuration;
- EXPECT_CALL(host_, SetDuration(
- base::TimeDelta::FromMilliseconds(kNewStreamDurationVideo)));
+ EXPECT_CALL(host_, SetDuration(base::Milliseconds(kNewStreamDurationVideo)));
ASSERT_TRUE(
AppendCluster(GenerateCluster(kDefaultDuration().InMilliseconds(), 2)));
@@ -3423,8 +3408,7 @@ TEST_F(ChunkDemuxerTest, DurationChange) {
// Add more data to the end of each media type. Note that the last audio block
// will have a higher end timestamp than the last video block.
const int kFinalStreamDuration = kStreamDuration + kAudioBlockDuration * 3;
- EXPECT_CALL(host_, SetDuration(
- base::TimeDelta::FromMilliseconds(kFinalStreamDuration)));
+ EXPECT_CALL(host_, SetDuration(base::Milliseconds(kFinalStreamDuration)));
ASSERT_TRUE(
AppendCluster(GenerateCluster(kStreamDuration + kAudioBlockDuration,
kStreamDuration + kVideoBlockDuration, 3)));
@@ -3437,9 +3421,8 @@ TEST_F(ChunkDemuxerTest, DurationChange) {
TEST_F(ChunkDemuxerTest, DurationChangeTimestampOffset) {
ASSERT_TRUE(InitDemuxer(HAS_AUDIO | HAS_VIDEO));
ASSERT_TRUE(SetTimestampOffset(kSourceId, kDefaultDuration()));
- EXPECT_CALL(host_, SetDuration(
- kDefaultDuration() + base::TimeDelta::FromMilliseconds(
- kVideoBlockDuration * 2)));
+ EXPECT_CALL(host_, SetDuration(kDefaultDuration() +
+ base::Milliseconds(kVideoBlockDuration * 2)));
ASSERT_TRUE(AppendCluster(GenerateCluster(0, 4)));
}
@@ -3448,8 +3431,8 @@ TEST_F(ChunkDemuxerTest, EndOfStreamTruncateDuration) {
ASSERT_TRUE(AppendCluster(kDefaultFirstCluster()));
- EXPECT_CALL(host_, SetDuration(
- base::TimeDelta::FromMilliseconds(kDefaultFirstClusterEndTimestamp)));
+ EXPECT_CALL(
+ host_, SetDuration(base::Milliseconds(kDefaultFirstClusterEndTimestamp)));
MarkEndOfStream(PIPELINE_OK);
}
@@ -3496,16 +3479,16 @@ TEST_F(ChunkDemuxerTest, EndOfStreamWhileWaitingForGapToBeFilled) {
bool audio_read_done = false;
bool video_read_done = false;
- ReadAudio(base::BindOnce(&OnReadDone, base::TimeDelta::FromMilliseconds(138),
- &audio_read_done));
- ReadVideo(base::BindOnce(&OnReadDone, base::TimeDelta::FromMilliseconds(138),
- &video_read_done));
+ ReadAudio(
+ base::BindOnce(&OnReadDone, base::Milliseconds(138), &audio_read_done));
+ ReadVideo(
+ base::BindOnce(&OnReadDone, base::Milliseconds(138), &video_read_done));
// Verify that the reads didn't complete
EXPECT_FALSE(audio_read_done);
EXPECT_FALSE(video_read_done);
- EXPECT_CALL(host_, SetDuration(base::TimeDelta::FromMilliseconds(438)));
+ EXPECT_CALL(host_, SetDuration(base::Milliseconds(438)));
MarkEndOfStream(PIPELINE_OK);
// Verify that the reads still haven't completed.
@@ -3537,7 +3520,7 @@ TEST_F(ChunkDemuxerTest, EndOfStreamWhileWaitingForGapToBeFilled) {
EXPECT_FALSE(audio_read_done);
EXPECT_FALSE(video_read_done);
- EXPECT_CALL(host_, SetDuration(base::TimeDelta::FromMilliseconds(437)));
+ EXPECT_CALL(host_, SetDuration(base::Milliseconds(437)));
MarkEndOfStream(PIPELINE_OK);
EXPECT_TRUE(audio_read_done);
@@ -3548,7 +3531,7 @@ TEST_F(ChunkDemuxerTest, CanceledSeekDuringInitialPreroll) {
ASSERT_TRUE(InitDemuxer(HAS_AUDIO | HAS_VIDEO));
// Cancel preroll.
- base::TimeDelta seek_time = base::TimeDelta::FromMilliseconds(200);
+ base::TimeDelta seek_time = base::Milliseconds(200);
demuxer_->CancelPendingSeek(seek_time);
// Initiate the seek to the new location.
@@ -3565,7 +3548,7 @@ TEST_F(ChunkDemuxerTest, SetMemoryLimitType) {
demuxer_->SetMemoryLimitsForTest(DemuxerStream::AUDIO, 10 * block_size_);
demuxer_->SetMemoryLimitsForTest(DemuxerStream::VIDEO, 5 * block_size_ + 1);
- base::TimeDelta seek_time = base::TimeDelta::FromMilliseconds(1000);
+ base::TimeDelta seek_time = base::Milliseconds(1000);
// Append data at the start that can be garbage collected:
AppendMuxedCluster(
@@ -3574,8 +3557,7 @@ TEST_F(ChunkDemuxerTest, SetMemoryLimitType) {
MuxedStreamInfo(kVideoTrackNum, "0K 33K 66K 99K 132K", 33));
// We should be right at buffer limit, should pass
- EXPECT_TRUE(demuxer_->EvictCodedFrames(
- kSourceId, base::TimeDelta::FromMilliseconds(0), 0));
+ EXPECT_TRUE(demuxer_->EvictCodedFrames(kSourceId, base::Milliseconds(0), 0));
CheckExpectedRanges(DemuxerStream::AUDIO, "{ [0,230) }");
CheckExpectedRanges(DemuxerStream::VIDEO, "{ [0,165) }");
@@ -3607,7 +3589,7 @@ TEST_F(ChunkDemuxerTest, GCDuringSeek_SingleRange_SeekForward) {
// GC should be able to evict frames in the currently buffered range, since
// those frames are earlier than the seek target position.
- base::TimeDelta seek_time = base::TimeDelta::FromMilliseconds(2000);
+ base::TimeDelta seek_time = base::Milliseconds(2000);
Seek(seek_time);
EXPECT_TRUE(
demuxer_->EvictCodedFrames(kSourceId, seek_time, 5 * block_size_));
@@ -3647,7 +3629,7 @@ TEST_F(ChunkDemuxerTest, GCDuringSeek_MultipleRanges_SeekForward) {
// GC should be able to evict frames in the currently buffered ranges, since
// those frames are earlier than the seek target position.
- base::TimeDelta seek_time = base::TimeDelta::FromMilliseconds(3000);
+ base::TimeDelta seek_time = base::Milliseconds(3000);
Seek(seek_time);
EXPECT_TRUE(
demuxer_->EvictCodedFrames(kSourceId, seek_time, 8 * block_size_));
@@ -3673,7 +3655,7 @@ TEST_F(ChunkDemuxerTest, GCDuringSeek_MultipleRanges_SeekInbetween1) {
// recently appended data, so then GC starts removing data from the front of
// the remaining buffered range (2000ms) to ensure we free up enough space for
// the upcoming append and allow seek to proceed.
- base::TimeDelta seek_time = base::TimeDelta::FromMilliseconds(1500);
+ base::TimeDelta seek_time = base::Milliseconds(1500);
Seek(seek_time);
EXPECT_TRUE(
demuxer_->EvictCodedFrames(kSourceId, seek_time, 8 * block_size_));
@@ -3697,7 +3679,7 @@ TEST_F(ChunkDemuxerTest, GCDuringSeek_MultipleRanges_SeekInbetween2) {
// Now try performing garbage collection without announcing seek first, i.e.
// without calling Seek(), the GC algorithm should try to preserve data in the
// first range, since that is most recently appended data.
- base::TimeDelta seek_time = base::TimeDelta::FromMilliseconds(2030);
+ base::TimeDelta seek_time = base::Milliseconds(2030);
EXPECT_TRUE(
demuxer_->EvictCodedFrames(kSourceId, seek_time, 5 * block_size_));
@@ -3730,8 +3712,8 @@ TEST_F(ChunkDemuxerTest, GCDuringSeek) {
demuxer_->SetMemoryLimitsForTest(DemuxerStream::AUDIO, 5 * block_size_);
- base::TimeDelta seek_time1 = base::TimeDelta::FromMilliseconds(1000);
- base::TimeDelta seek_time2 = base::TimeDelta::FromMilliseconds(500);
+ base::TimeDelta seek_time1 = base::Milliseconds(1000);
+ base::TimeDelta seek_time2 = base::Milliseconds(500);
// Initiate a seek to |seek_time1|.
Seek(seek_time1);
@@ -3782,25 +3764,24 @@ TEST_F(ChunkDemuxerTest, GCKeepPlayhead) {
// We expect garbage collection to fail, as we don't want to spontaneously
// create gaps in source buffer stream. Gaps could break playback for many
// clients, who don't bother to check ranges after append.
- EXPECT_FALSE(demuxer_->EvictCodedFrames(
- kSourceId, base::TimeDelta::FromMilliseconds(0), 0));
+ EXPECT_FALSE(demuxer_->EvictCodedFrames(kSourceId, base::Milliseconds(0), 0));
CheckExpectedRanges("{ [0,230) }");
// Increase media_time a bit, this will allow some data to be collected, but
// we are still over memory usage limit.
- base::TimeDelta seek_time1 = base::TimeDelta::FromMilliseconds(23*2);
+ base::TimeDelta seek_time1 = base::Milliseconds(23 * 2);
Seek(seek_time1);
EXPECT_FALSE(demuxer_->EvictCodedFrames(kSourceId, seek_time1, 0));
CheckExpectedRanges("{ [46,230) }");
- base::TimeDelta seek_time2 = base::TimeDelta::FromMilliseconds(23*4);
+ base::TimeDelta seek_time2 = base::Milliseconds(23 * 4);
Seek(seek_time2);
EXPECT_FALSE(demuxer_->EvictCodedFrames(kSourceId, seek_time2, 0));
CheckExpectedRanges("{ [92,230) }");
// media_time has progressed to a point where we can collect enough data to
// be under memory limit, so Evict should return true.
- base::TimeDelta seek_time3 = base::TimeDelta::FromMilliseconds(23*6);
+ base::TimeDelta seek_time3 = base::Milliseconds(23 * 6);
Seek(seek_time3);
EXPECT_TRUE(demuxer_->EvictCodedFrames(kSourceId, seek_time3, 0));
// Strictly speaking the current playback time is 23*6==138ms, so we could
@@ -3814,8 +3795,8 @@ TEST_F(ChunkDemuxerTest, AppendWindow_Video) {
DemuxerStream* stream = GetStream(DemuxerStream::VIDEO);
// Set the append window to [50,280).
- append_window_start_for_next_append_ = base::TimeDelta::FromMilliseconds(50);
- append_window_end_for_next_append_ = base::TimeDelta::FromMilliseconds(280);
+ append_window_start_for_next_append_ = base::Milliseconds(50);
+ append_window_end_for_next_append_ = base::Milliseconds(280);
// Append a cluster that starts before and ends after the append window.
EXPECT_MEDIA_LOG(DroppedFrame("video", 0));
@@ -3834,7 +3815,7 @@ TEST_F(ChunkDemuxerTest, AppendWindow_Video) {
CheckExpectedBuffers(stream, "120K 150 180 210 240K");
// Extend the append window to [50,650).
- append_window_end_for_next_append_ = base::TimeDelta::FromMilliseconds(650);
+ append_window_end_for_next_append_ = base::Milliseconds(650);
// Append more data and verify that adding buffers start at the next
// key frame.
@@ -3850,8 +3831,8 @@ TEST_F(ChunkDemuxerTest, AppendWindow_Audio) {
DemuxerStream* stream = GetStream(DemuxerStream::AUDIO);
// Set the append window to [50,280).
- append_window_start_for_next_append_ = base::TimeDelta::FromMilliseconds(50);
- append_window_end_for_next_append_ = base::TimeDelta::FromMilliseconds(280);
+ append_window_start_for_next_append_ = base::Milliseconds(50);
+ append_window_end_for_next_append_ = base::Milliseconds(280);
// Append a cluster that starts before and ends after the append window.
EXPECT_MEDIA_LOG(DroppedFrame("audio", 0));
@@ -3877,7 +3858,7 @@ TEST_F(ChunkDemuxerTest, AppendWindow_Audio) {
CheckExpectedBuffers(stream, "50KP 50K 60K 90K 120K 150K 180K 210K 240K");
// Extend the append window to [50,650).
- append_window_end_for_next_append_ = base::TimeDelta::FromMilliseconds(650);
+ append_window_end_for_next_append_ = base::Milliseconds(650);
// Append more data and verify that a new range is created.
EXPECT_MEDIA_LOG(TruncatedFrame(630000, 660000, "end", 650000));
@@ -3892,8 +3873,8 @@ TEST_F(ChunkDemuxerTest, AppendWindow_AudioOverlapStartAndEnd) {
ASSERT_TRUE(InitDemuxer(HAS_AUDIO));
// Set the append window to [10,20).
- append_window_start_for_next_append_ = base::TimeDelta::FromMilliseconds(10);
- append_window_end_for_next_append_ = base::TimeDelta::FromMilliseconds(20);
+ append_window_start_for_next_append_ = base::Milliseconds(10);
+ append_window_end_for_next_append_ = base::Milliseconds(20);
EXPECT_MEDIA_LOG(
TruncatedFrame(0, kAudioBlockDuration * 1000, "start", 10000));
@@ -3912,13 +3893,12 @@ TEST_F(ChunkDemuxerTest, AppendWindow_AudioOverlapStartAndEnd) {
TEST_F(ChunkDemuxerTest, AppendWindow_WebMFile_AudioOnly) {
EXPECT_CALL(*this, DemuxerOpened());
demuxer_->Initialize(
- &host_, CreateInitDoneCallback(base::TimeDelta::FromMilliseconds(2744),
- PIPELINE_OK));
+ &host_, CreateInitDoneCallback(base::Milliseconds(2744), PIPELINE_OK));
ASSERT_EQ(ChunkDemuxer::kOk, AddId(kSourceId, HAS_AUDIO));
// Set the append window to [50,150).
- append_window_start_for_next_append_ = base::TimeDelta::FromMilliseconds(50);
- append_window_end_for_next_append_ = base::TimeDelta::FromMilliseconds(150);
+ append_window_start_for_next_append_ = base::Milliseconds(50);
+ append_window_end_for_next_append_ = base::Milliseconds(150);
EXPECT_MEDIA_LOG(DroppedFrameCheckAppendWindow(
"audio",
@@ -3945,15 +3925,14 @@ TEST_F(ChunkDemuxerTest, AppendWindow_WebMFile_AudioOnly) {
TEST_F(ChunkDemuxerTest, AppendWindow_AudioConfigUpdateRemovesPreroll) {
EXPECT_CALL(*this, DemuxerOpened());
demuxer_->Initialize(
- &host_, CreateInitDoneCallback(base::TimeDelta::FromMilliseconds(2744),
- PIPELINE_OK));
+ &host_, CreateInitDoneCallback(base::Milliseconds(2744), PIPELINE_OK));
ASSERT_EQ(ChunkDemuxer::kOk, AddId(kSourceId, HAS_AUDIO));
// Set the append window such that the first file is completely before the
// append window.
// Expect duration adjustment since actual duration differs slightly from
// duration in the init segment.
- const base::TimeDelta duration_1 = base::TimeDelta::FromMilliseconds(2768);
+ const base::TimeDelta duration_1 = base::Milliseconds(2768);
append_window_start_for_next_append_ = duration_1;
EXPECT_MEDIA_LOG(DroppedFrameCheckAppendWindow(
@@ -3995,7 +3974,7 @@ TEST_F(ChunkDemuxerTest, StartWaitingForSeekAfterParseError) {
EXPECT_MEDIA_LOG(StreamParsingFailed());
EXPECT_CALL(host_, OnDemuxerError(CHUNK_DEMUXER_ERROR_APPEND_FAILED));
AppendGarbage();
- base::TimeDelta seek_time = base::TimeDelta::FromSeconds(50);
+ base::TimeDelta seek_time = base::Seconds(50);
demuxer_->StartWaitingForSeek(seek_time);
}
@@ -4013,8 +3992,7 @@ TEST_F(ChunkDemuxerTest, Remove_AudioVideoText) {
CheckExpectedBuffers(video_stream, "0K 30 60 90 120K 150 180");
// Remove the buffers that were added.
- demuxer_->Remove(kSourceId, base::TimeDelta(),
- base::TimeDelta::FromMilliseconds(300));
+ demuxer_->Remove(kSourceId, base::TimeDelta(), base::Milliseconds(300));
// Verify that all the appended data has been removed.
CheckExpectedRanges("{ }");
@@ -4036,20 +4014,17 @@ TEST_F(ChunkDemuxerTest, Remove_StartAtDuration) {
// Set the duration to something small so that the append that
// follows updates the duration to reflect the end of the appended data.
- EXPECT_CALL(host_, SetDuration(
- base::TimeDelta::FromMilliseconds(1)));
+ EXPECT_CALL(host_, SetDuration(base::Milliseconds(1)));
demuxer_->SetDuration(0.001);
- EXPECT_CALL(host_, SetDuration(
- base::TimeDelta::FromMilliseconds(160)));
+ EXPECT_CALL(host_, SetDuration(base::Milliseconds(160)));
AppendSingleStreamCluster(kSourceId, kAudioTrackNum,
"0K 20K 40K 60K 80K 100K 120K 140D20K");
CheckExpectedRanges("{ [0,160) }");
CheckExpectedBuffers(audio_stream, "0K 20K 40K 60K 80K 100K 120K 140K");
- demuxer_->Remove(kSourceId,
- base::TimeDelta::FromSecondsD(demuxer_->GetDuration()),
+ demuxer_->Remove(kSourceId, base::Seconds(demuxer_->GetDuration()),
kInfiniteDuration);
Seek(base::TimeDelta());
@@ -4066,7 +4041,7 @@ TEST_F(ChunkDemuxerTest, SeekCompletesWithoutTextCues) {
DemuxerStream* audio_stream = GetStream(DemuxerStream::AUDIO);
DemuxerStream* video_stream = GetStream(DemuxerStream::VIDEO);
- base::TimeDelta seek_time = base::TimeDelta::FromMilliseconds(120);
+ base::TimeDelta seek_time = base::Milliseconds(120);
bool seek_cb_was_called = false;
demuxer_->StartWaitingForSeek(seek_time);
demuxer_->Seek(seek_time,
@@ -4154,21 +4129,19 @@ TEST_F(ChunkDemuxerTest, EvictCodedFramesTest) {
// If we want to append 80 more blocks of muxed a+v data and the current
// position is 0, that will fail, because EvictCodedFrames won't remove the
// data after the current playback position.
- ASSERT_FALSE(demuxer_->EvictCodedFrames(kSourceId,
- base::TimeDelta::FromMilliseconds(0),
- 80));
+ ASSERT_FALSE(
+ demuxer_->EvictCodedFrames(kSourceId, base::Milliseconds(0), 80));
// EvictCodedFrames has failed, so data should be unchanged.
- Seek(base::TimeDelta::FromMilliseconds(0));
+ Seek(base::Milliseconds(0));
CheckExpectedBuffers(audio_stream, kAudioStreamInfo);
CheckExpectedBuffers(video_stream, kVideoStreamInfo);
// But if we pretend that playback position has moved to 120ms, that allows
// EvictCodedFrames to garbage-collect enough data to succeed.
- ASSERT_TRUE(demuxer_->EvictCodedFrames(kSourceId,
- base::TimeDelta::FromMilliseconds(120),
- 80));
+ ASSERT_TRUE(
+ demuxer_->EvictCodedFrames(kSourceId, base::Milliseconds(120), 80));
- Seek(base::TimeDelta::FromMilliseconds(0));
+ Seek(base::Milliseconds(0));
// Audio stream had 8 buffers, video stream had 15. We told EvictCodedFrames
// that the new data size is 8 blocks muxed, i.e. 80 bytes. Given the current
// ratio of video to the total data size (15 : (8+15) ~= 0.65) the estimated
@@ -4251,8 +4224,7 @@ TEST_F(ChunkDemuxerTest, RelaxedKeyframe_RemoveInterruptsCodedFrameGroup_1) {
EXPECT_MEDIA_LOG(WebMSimpleBlockDurationEstimated(10)).Times(3);
AppendSingleStreamCluster(kSourceId, kVideoTrackNum, "0K 10 20");
- demuxer_->Remove(kSourceId, base::TimeDelta(),
- base::TimeDelta::FromMilliseconds(30));
+ demuxer_->Remove(kSourceId, base::TimeDelta(), base::Milliseconds(30));
AppendSingleStreamCluster(kSourceId, kVideoTrackNum, "30 40 50");
AppendSingleStreamCluster(kSourceId, kVideoTrackNum, "60 70K 80");
CheckExpectedRanges("{ [70,90) }");
@@ -4270,8 +4242,7 @@ TEST_F(ChunkDemuxerTest, RelaxedKeyframe_RemoveInterruptsCodedFrameGroup_2) {
AppendSingleStreamCluster(kSourceId, kVideoTrackNum, "0K 10 20");
AppendSingleStreamCluster(kSourceId, kVideoTrackNum, "30 40 50");
AppendSingleStreamCluster(kSourceId, kVideoTrackNum, "60 70K 80");
- demuxer_->Remove(kSourceId, base::TimeDelta(),
- base::TimeDelta::FromMilliseconds(10));
+ demuxer_->Remove(kSourceId, base::TimeDelta(), base::Milliseconds(10));
CheckExpectedRanges("{ [70,90) }");
CheckExpectedBuffers(video_stream, "70K 80");
}
@@ -4287,11 +4258,10 @@ TEST_F(ChunkDemuxerTest, RelaxedKeyframe_RemoveInterruptsCodedFrameGroup_3) {
AppendSingleStreamCluster(kSourceId, kVideoTrackNum, "0K 10 20");
AppendSingleStreamCluster(kSourceId, kVideoTrackNum, "30 40 50");
AppendSingleStreamCluster(kSourceId, kVideoTrackNum, "60 70K 80");
- demuxer_->Remove(kSourceId, base::TimeDelta::FromMilliseconds(50),
- base::TimeDelta::FromMilliseconds(60));
+ demuxer_->Remove(kSourceId, base::Milliseconds(50), base::Milliseconds(60));
CheckExpectedRanges("{ [0,50) [70,90) }");
CheckExpectedBuffers(video_stream, "0K 10 20 30 40");
- Seek(base::TimeDelta::FromMilliseconds(70));
+ Seek(base::Milliseconds(70));
CheckExpectedBuffers(video_stream, "70K 80");
}
@@ -4313,8 +4283,7 @@ TEST_F(ChunkDemuxerTest,
AppendMuxedCluster(MuxedStreamInfo(kAudioTrackNum, "0K 10K 20D10K"),
MuxedStreamInfo(kVideoTrackNum, "0K 10 20", 10));
- demuxer_->Remove(kSourceId, base::TimeDelta(),
- base::TimeDelta::FromMilliseconds(30));
+ demuxer_->Remove(kSourceId, base::TimeDelta(), base::Milliseconds(30));
AppendMuxedCluster(MuxedStreamInfo(kAudioTrackNum, "30K 40K 50D10K"),
MuxedStreamInfo(kVideoTrackNum, "30 40 50", 10));
AppendMuxedCluster(MuxedStreamInfo(kAudioTrackNum, "60K 70K 80D10K"),
@@ -4348,8 +4317,7 @@ TEST_F(ChunkDemuxerTest,
EXPECT_MEDIA_LOG(SegmentMissingFrames("1"));
AppendMuxedCluster(MuxedStreamInfo(kAudioTrackNum, "0K 10K 20D10K"),
MuxedStreamInfo(kVideoTrackNum, ""));
- demuxer_->Remove(kSourceId, base::TimeDelta(),
- base::TimeDelta::FromMilliseconds(30));
+ demuxer_->Remove(kSourceId, base::TimeDelta(), base::Milliseconds(30));
AppendMuxedCluster(MuxedStreamInfo(kAudioTrackNum, "30K 40K 50D10K"),
MuxedStreamInfo(kVideoTrackNum, "30 40 50", 10));
AppendMuxedCluster(MuxedStreamInfo(kAudioTrackNum, "60K 70K 80D10K"),
@@ -4409,8 +4377,7 @@ TEST_F(ChunkDemuxerTest,
CheckExpectedRanges(DemuxerStream::AUDIO, "{ [0,30) }");
CheckExpectedRanges(DemuxerStream::VIDEO, "{ }");
- demuxer_->Remove(kSourceId, base::TimeDelta(),
- base::TimeDelta::FromMilliseconds(30));
+ demuxer_->Remove(kSourceId, base::TimeDelta(), base::Milliseconds(30));
// Append the remainder of the cluster
ASSERT_TRUE(AppendData(kSourceId, cluster->data() + video_start,
@@ -4670,7 +4637,7 @@ TEST_F(ChunkDemuxerTest, ZeroLengthFramesDropped) {
// ensure we read back precisely the expected buffers.
ASSERT_GT(block_size_, 0U);
AppendSingleStreamCluster(kSourceId, c.track_number, "0K 10K 20K 30D10K");
- EXPECT_CALL(host_, SetDuration(base::TimeDelta::FromMilliseconds(40)));
+ EXPECT_CALL(host_, SetDuration(base::Milliseconds(40)));
MarkEndOfStream(PIPELINE_OK);
CheckExpectedRanges("{ [0,40) }");
CheckExpectedBuffers(stream, "0K 10K 20K 30K");
@@ -4683,18 +4650,18 @@ TEST_F(ChunkDemuxerTest, ZeroLengthFramesDropped) {
block_size_ = 0;
AppendSingleStreamCluster(kSourceId, c.track_number, "40D10K");
MarkEndOfStream(PIPELINE_OK);
- Seek(base::TimeDelta::FromMilliseconds(0));
+ Seek(base::Milliseconds(0));
CheckExpectedRanges("{ [0,40) }");
CheckExpectedBuffers(stream, "0K 10K 20K 30K");
ExpectEndOfStream(c.stream_type);
// Append a cluster containing a nonzero-sized frame. Verify it is buffered.
demuxer_->UnmarkEndOfStream();
- EXPECT_CALL(host_, SetDuration(base::TimeDelta::FromMilliseconds(50)));
+ EXPECT_CALL(host_, SetDuration(base::Milliseconds(50)));
block_size_ = kBlockSize;
AppendSingleStreamCluster(kSourceId, c.track_number, "40D10K");
MarkEndOfStream(PIPELINE_OK);
- Seek(base::TimeDelta::FromMilliseconds(0));
+ Seek(base::Milliseconds(0));
CheckExpectedRanges("{ [0,50) }");
CheckExpectedBuffers(stream, "0K 10K 20K 30K 40K");
ExpectEndOfStream(c.stream_type);
diff --git a/chromium/media/filters/dav1d_video_decoder.cc b/chromium/media/filters/dav1d_video_decoder.cc
index ff50619394c..6b69299dca7 100644
--- a/chromium/media/filters/dav1d_video_decoder.cc
+++ b/chromium/media/filters/dav1d_video_decoder.cc
@@ -11,7 +11,6 @@
#include "base/bind.h"
#include "base/bits.h"
#include "base/callback.h"
-#include "base/callback_helpers.h"
#include "base/logging.h"
#include "base/strings/stringprintf.h"
#include "base/threading/sequenced_task_runner_handle.h"
@@ -172,7 +171,7 @@ void Dav1dVideoDecoder::Initialize(const VideoDecoderConfig& config,
return;
}
- if (config.codec() != kCodecAV1) {
+ if (config.codec() != VideoCodec::kAV1) {
std::move(bound_init_cb)
.Run(Status(StatusCode::kDecoderUnsupportedCodec)
.WithData("codec", config.codec()));
@@ -388,8 +387,8 @@ bool Dav1dVideoDecoder::DecodeBuffer(scoped_refptr<DecoderBuffer> buffer) {
// When we use bind mode, our image data is dependent on the Dav1dPicture,
// so we must ensure it stays alive along enough.
- frame->AddDestructionObserver(base::BindOnce(
- base::DoNothing::Once<ScopedPtrDav1dPicture>(), std::move(p)));
+ frame->AddDestructionObserver(
+ base::BindOnce([](ScopedPtrDav1dPicture) {}, std::move(p)));
output_cb_.Run(std::move(frame));
}
@@ -445,15 +444,14 @@ scoped_refptr<VideoFrame> Dav1dVideoDecoder::BindImageToVideoFrame(
config_.aspect_ratio().GetNaturalSize(gfx::Rect(visible_size)),
pic->stride[0], uv_plane_stride, uv_plane_stride,
static_cast<uint8_t*>(pic->data[0]), u_plane, v_plane,
- base::TimeDelta::FromMicroseconds(pic->m.timestamp));
+ base::Microseconds(pic->m.timestamp));
if (!frame)
return nullptr;
// Each frame needs a ref on the fake UV data to keep it alive until done.
if (needs_fake_uv_planes) {
frame->AddDestructionObserver(base::BindOnce(
- base::DoNothing::Once<scoped_refptr<base::RefCountedBytes>>(),
- fake_uv_data_));
+ [](scoped_refptr<base::RefCountedBytes>) {}, fake_uv_data_));
}
return frame;
diff --git a/chromium/media/filters/dav1d_video_decoder.h b/chromium/media/filters/dav1d_video_decoder.h
index 3cdc51841d4..2ba9beb5bbb 100644
--- a/chromium/media/filters/dav1d_video_decoder.h
+++ b/chromium/media/filters/dav1d_video_decoder.h
@@ -27,6 +27,10 @@ class MEDIA_EXPORT Dav1dVideoDecoder : public OffloadableVideoDecoder {
Dav1dVideoDecoder(MediaLog* media_log,
OffloadState offload_state = OffloadState::kNormal);
+
+ Dav1dVideoDecoder(const Dav1dVideoDecoder&) = delete;
+ Dav1dVideoDecoder& operator=(const Dav1dVideoDecoder&) = delete;
+
~Dav1dVideoDecoder() override;
// VideoDecoder implementation.
@@ -87,8 +91,6 @@ class MEDIA_EXPORT Dav1dVideoDecoder : public OffloadableVideoDecoder {
// The allocated decoder; null before Initialize() and anytime after
// CloseDecoder().
Dav1dContext* dav1d_decoder_ = nullptr;
-
- DISALLOW_COPY_AND_ASSIGN(Dav1dVideoDecoder);
};
// Helper class for creating a Dav1dVideoDecoder which will offload all AV1
@@ -98,7 +100,7 @@ class OffloadingDav1dVideoDecoder : public OffloadingVideoDecoder {
explicit OffloadingDav1dVideoDecoder(MediaLog* media_log)
: OffloadingVideoDecoder(
0,
- std::vector<VideoCodec>(1, kCodecAV1),
+ std::vector<VideoCodec>(1, VideoCodec::kAV1),
std::make_unique<Dav1dVideoDecoder>(
media_log,
OffloadableVideoDecoder::OffloadState::kOffloaded)) {}
diff --git a/chromium/media/filters/dav1d_video_decoder_unittest.cc b/chromium/media/filters/dav1d_video_decoder_unittest.cc
index 0711b2f9e7e..ac1a45605d9 100644
--- a/chromium/media/filters/dav1d_video_decoder_unittest.cc
+++ b/chromium/media/filters/dav1d_video_decoder_unittest.cc
@@ -42,10 +42,13 @@ class Dav1dVideoDecoderTest : public testing::Test {
: decoder_(new Dav1dVideoDecoder(&media_log_)),
i_frame_buffer_(ReadTestDataFile("av1-I-frame-320x240")) {}
+ Dav1dVideoDecoderTest(const Dav1dVideoDecoderTest&) = delete;
+ Dav1dVideoDecoderTest& operator=(const Dav1dVideoDecoderTest&) = delete;
+
~Dav1dVideoDecoderTest() override { Destroy(); }
void Initialize() {
- InitializeWithConfig(TestVideoConfig::Normal(kCodecAV1));
+ InitializeWithConfig(TestVideoConfig::Normal(VideoCodec::kAV1));
}
void InitializeWithConfigWithResult(const VideoDecoderConfig& config,
@@ -69,7 +72,7 @@ class Dav1dVideoDecoderTest : public testing::Test {
}
void Reinitialize() {
- InitializeWithConfig(TestVideoConfig::Large(kCodecAV1));
+ InitializeWithConfig(TestVideoConfig::Large(VideoCodec::kAV1));
}
void Reset() {
@@ -191,9 +194,6 @@ class Dav1dVideoDecoderTest : public testing::Test {
scoped_refptr<DecoderBuffer> i_frame_buffer_;
OutputFrames output_frames_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(Dav1dVideoDecoderTest);
};
TEST_F(Dav1dVideoDecoderTest, Initialize_Normal) {
diff --git a/chromium/media/filters/decoder_stream.cc b/chromium/media/filters/decoder_stream.cc
index c2c0f94a812..0a450d14df2 100644
--- a/chromium/media/filters/decoder_stream.cc
+++ b/chromium/media/filters/decoder_stream.cc
@@ -1023,6 +1023,11 @@ void DecoderStream<StreamType>::ReportEncryptionType(
: EncryptionType::kEncrypted;
}
+ if (encryption_type == EncryptionType::kEncryptedWithClearLead) {
+ MEDIA_LOG(INFO, media_log_)
+ << GetStreamTypeString() << "stream is encrypted with clear lead";
+ }
+
traits_->SetEncryptionType(encryption_type);
traits_->ReportStatistics(statistics_cb_, 0);
}
diff --git a/chromium/media/filters/decrypting_audio_decoder.h b/chromium/media/filters/decrypting_audio_decoder.h
index 9ca951e2950..c7864d78107 100644
--- a/chromium/media/filters/decrypting_audio_decoder.h
+++ b/chromium/media/filters/decrypting_audio_decoder.h
@@ -37,6 +37,10 @@ class MEDIA_EXPORT DecryptingAudioDecoder : public AudioDecoder {
DecryptingAudioDecoder(
const scoped_refptr<base::SequencedTaskRunner>& task_runner,
MediaLog* media_log);
+
+ DecryptingAudioDecoder(const DecryptingAudioDecoder&) = delete;
+ DecryptingAudioDecoder& operator=(const DecryptingAudioDecoder&) = delete;
+
~DecryptingAudioDecoder() override;
// Decoder implementation
@@ -126,8 +130,6 @@ class MEDIA_EXPORT DecryptingAudioDecoder : public AudioDecoder {
std::unique_ptr<CallbackRegistration> event_cb_registration_;
base::WeakPtrFactory<DecryptingAudioDecoder> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(DecryptingAudioDecoder);
};
} // namespace media
diff --git a/chromium/media/filters/decrypting_audio_decoder_unittest.cc b/chromium/media/filters/decrypting_audio_decoder_unittest.cc
index b2836c34f7b..a55e5d5fa60 100644
--- a/chromium/media/filters/decrypting_audio_decoder_unittest.cc
+++ b/chromium/media/filters/decrypting_audio_decoder_unittest.cc
@@ -67,6 +67,10 @@ class DecryptingAudioDecoderTest : public testing::Test {
decoded_frame_(nullptr),
decoded_frame_list_() {}
+ DecryptingAudioDecoderTest(const DecryptingAudioDecoderTest&) = delete;
+ DecryptingAudioDecoderTest& operator=(const DecryptingAudioDecoderTest&) =
+ delete;
+
~DecryptingAudioDecoderTest() override { Destroy(); }
void InitializeAndExpectResult(const AudioDecoderConfig& config,
@@ -114,7 +118,7 @@ class DecryptingAudioDecoderTest : public testing::Test {
return std::make_unique<CallbackRegistration>();
});
- config_.Initialize(kCodecVorbis, kSampleFormatPlanarF32,
+ config_.Initialize(AudioCodec::kVorbis, kSampleFormatPlanarF32,
CHANNEL_LAYOUT_STEREO, kSampleRate, EmptyExtraData(),
EncryptionScheme::kCenc, base::TimeDelta(), 0);
InitializeAndExpectResult(config_, true);
@@ -271,9 +275,6 @@ class DecryptingAudioDecoderTest : public testing::Test {
scoped_refptr<DecoderBuffer> encrypted_buffer_;
scoped_refptr<AudioBuffer> decoded_frame_;
Decryptor::AudioFrames decoded_frame_list_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(DecryptingAudioDecoderTest);
};
TEST_F(DecryptingAudioDecoderTest, Initialize_Normal) {
@@ -282,7 +283,7 @@ TEST_F(DecryptingAudioDecoderTest, Initialize_Normal) {
// Ensure decoder handles invalid audio configs without crashing.
TEST_F(DecryptingAudioDecoderTest, Initialize_InvalidAudioConfig) {
- AudioDecoderConfig config(kUnknownAudioCodec, kUnknownSampleFormat,
+ AudioDecoderConfig config(AudioCodec::kUnknown, kUnknownSampleFormat,
CHANNEL_LAYOUT_STEREO, 0, EmptyExtraData(),
EncryptionScheme::kCenc);
@@ -299,7 +300,7 @@ TEST_F(DecryptingAudioDecoderTest, Initialize_UnsupportedAudioConfig) {
EXPECT_CALL(*decryptor_, InitializeAudioDecoder(_, _))
.WillOnce(RunOnceCallback<1>(false));
- AudioDecoderConfig config(kCodecVorbis, kSampleFormatPlanarF32,
+ AudioDecoderConfig config(AudioCodec::kVorbis, kSampleFormatPlanarF32,
CHANNEL_LAYOUT_STEREO, kSampleRate,
EmptyExtraData(), EncryptionScheme::kCenc);
InitializeAndExpectResult(config, false);
@@ -307,7 +308,7 @@ TEST_F(DecryptingAudioDecoderTest, Initialize_UnsupportedAudioConfig) {
TEST_F(DecryptingAudioDecoderTest, Initialize_CdmWithoutDecryptor) {
SetCdmType(CDM_WITHOUT_DECRYPTOR);
- AudioDecoderConfig config(kCodecVorbis, kSampleFormatPlanarF32,
+ AudioDecoderConfig config(AudioCodec::kVorbis, kSampleFormatPlanarF32,
CHANNEL_LAYOUT_STEREO, kSampleRate,
EmptyExtraData(), EncryptionScheme::kCenc);
InitializeAndExpectResult(config, false);
@@ -372,7 +373,7 @@ TEST_F(DecryptingAudioDecoderTest, Reinitialize_EncryptedToEncrypted) {
// The new config is different from the initial config in bits-per-channel,
// channel layout and samples_per_second.
- AudioDecoderConfig new_config(kCodecVorbis, kSampleFormatPlanarS16,
+ AudioDecoderConfig new_config(AudioCodec::kVorbis, kSampleFormatPlanarS16,
CHANNEL_LAYOUT_5_1, 88200, EmptyExtraData(),
EncryptionScheme::kCenc);
EXPECT_NE(new_config.bits_per_channel(), config_.bits_per_channel());
@@ -394,7 +395,7 @@ TEST_F(DecryptingAudioDecoderTest, Reinitialize_EncryptedToClear) {
// The new config is different from the initial config in bits-per-channel,
// channel layout and samples_per_second.
- AudioDecoderConfig new_config(kCodecVorbis, kSampleFormatPlanarS16,
+ AudioDecoderConfig new_config(AudioCodec::kVorbis, kSampleFormatPlanarS16,
CHANNEL_LAYOUT_5_1, 88200, EmptyExtraData(),
EncryptionScheme::kUnencrypted);
EXPECT_NE(new_config.bits_per_channel(), config_.bits_per_channel());
diff --git a/chromium/media/filters/decrypting_demuxer_stream.h b/chromium/media/filters/decrypting_demuxer_stream.h
index a709b3ef9b5..56bc8a281e9 100644
--- a/chromium/media/filters/decrypting_demuxer_stream.h
+++ b/chromium/media/filters/decrypting_demuxer_stream.h
@@ -41,6 +41,9 @@ class MEDIA_EXPORT DecryptingDemuxerStream : public DemuxerStream {
MediaLog* media_log,
const WaitingCB& waiting_cb);
+ DecryptingDemuxerStream(const DecryptingDemuxerStream&) = delete;
+ DecryptingDemuxerStream& operator=(const DecryptingDemuxerStream&) = delete;
+
// Cancels all pending operations immediately and fires all pending callbacks.
~DecryptingDemuxerStream() override;
@@ -184,8 +187,6 @@ class MEDIA_EXPORT DecryptingDemuxerStream : public DemuxerStream {
std::unique_ptr<CallbackRegistration> event_cb_registration_;
base::WeakPtrFactory<DecryptingDemuxerStream> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(DecryptingDemuxerStream);
};
} // namespace media
diff --git a/chromium/media/filters/decrypting_demuxer_stream_unittest.cc b/chromium/media/filters/decrypting_demuxer_stream_unittest.cc
index dfe1e1716b4..973e018ac17 100644
--- a/chromium/media/filters/decrypting_demuxer_stream_unittest.cc
+++ b/chromium/media/filters/decrypting_demuxer_stream_unittest.cc
@@ -91,6 +91,10 @@ class DecryptingDemuxerStreamTest : public testing::Test {
encrypted_buffer_(CreateFakeEncryptedStreamBuffer(false)),
decrypted_buffer_(new DecoderBuffer(kFakeBufferSize)) {}
+ DecryptingDemuxerStreamTest(const DecryptingDemuxerStreamTest&) = delete;
+ DecryptingDemuxerStreamTest& operator=(const DecryptingDemuxerStreamTest&) =
+ delete;
+
~DecryptingDemuxerStreamTest() override {
if (is_initialized_)
EXPECT_CALL(*decryptor_, CancelDecrypt(_));
@@ -143,7 +147,7 @@ class DecryptingDemuxerStreamTest : public testing::Test {
return std::make_unique<CallbackRegistration>();
});
- AudioDecoderConfig input_config(kCodecVorbis, kSampleFormatPlanarF32,
+ AudioDecoderConfig input_config(AudioCodec::kVorbis, kSampleFormatPlanarF32,
CHANNEL_LAYOUT_STEREO, 44100,
EmptyExtraData(), EncryptionScheme::kCenc);
@@ -300,9 +304,6 @@ class DecryptingDemuxerStreamTest : public testing::Test {
scoped_refptr<DecoderBuffer> clear_encrypted_stream_buffer_;
scoped_refptr<DecoderBuffer> encrypted_buffer_;
scoped_refptr<DecoderBuffer> decrypted_buffer_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(DecryptingDemuxerStreamTest);
};
TEST_F(DecryptingDemuxerStreamTest, Initialize_NormalAudio) {
@@ -336,7 +337,7 @@ TEST_F(DecryptingDemuxerStreamTest, Initialize_NormalVideo) {
TEST_F(DecryptingDemuxerStreamTest, Initialize_CdmWithoutDecryptor) {
SetCdmType(CDM_WITHOUT_DECRYPTOR);
- AudioDecoderConfig input_config(kCodecVorbis, kSampleFormatPlanarF32,
+ AudioDecoderConfig input_config(AudioCodec::kVorbis, kSampleFormatPlanarF32,
CHANNEL_LAYOUT_STEREO, 44100,
EmptyExtraData(), EncryptionScheme::kCenc);
EXPECT_MEDIA_LOG(HasSubstr("kAudioTracks"));
@@ -516,7 +517,7 @@ TEST_F(DecryptingDemuxerStreamTest, Reset_DuringAbortedDemuxerRead) {
TEST_F(DecryptingDemuxerStreamTest, DemuxerRead_ConfigChanged) {
Initialize(2, 2);
- AudioDecoderConfig new_config(kCodecVorbis, kSampleFormatPlanarF32,
+ AudioDecoderConfig new_config(AudioCodec::kVorbis, kSampleFormatPlanarF32,
CHANNEL_LAYOUT_STEREO, 88200, EmptyExtraData(),
EncryptionScheme::kCenc);
input_audio_stream_->set_audio_decoder_config(new_config);
diff --git a/chromium/media/filters/decrypting_media_resource.h b/chromium/media/filters/decrypting_media_resource.h
index 8261a89db7e..b0929f5b163 100644
--- a/chromium/media/filters/decrypting_media_resource.h
+++ b/chromium/media/filters/decrypting_media_resource.h
@@ -36,6 +36,10 @@ class MEDIA_EXPORT DecryptingMediaResource : public MediaResource {
CdmContext* cdm_context,
MediaLog* media_log,
scoped_refptr<base::SequencedTaskRunner> task_runner);
+
+ DecryptingMediaResource(const DecryptingMediaResource&) = delete;
+ DecryptingMediaResource& operator=(const DecryptingMediaResource&) = delete;
+
~DecryptingMediaResource() override;
// MediaResource implementation:
@@ -69,8 +73,6 @@ class MEDIA_EXPORT DecryptingMediaResource : public MediaResource {
// if one of the DecryptingDemuxerStreams failed to initialize correctly.
InitCB init_cb_;
base::WeakPtrFactory<DecryptingMediaResource> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(DecryptingMediaResource);
};
} // namespace media
diff --git a/chromium/media/filters/decrypting_video_decoder.h b/chromium/media/filters/decrypting_video_decoder.h
index 67ef66f0f55..e30358fa14a 100644
--- a/chromium/media/filters/decrypting_video_decoder.h
+++ b/chromium/media/filters/decrypting_video_decoder.h
@@ -36,6 +36,10 @@ class MEDIA_EXPORT DecryptingVideoDecoder : public VideoDecoder {
DecryptingVideoDecoder(
const scoped_refptr<base::SequencedTaskRunner>& task_runner,
MediaLog* media_log);
+
+ DecryptingVideoDecoder(const DecryptingVideoDecoder&) = delete;
+ DecryptingVideoDecoder& operator=(const DecryptingVideoDecoder&) = delete;
+
~DecryptingVideoDecoder() override;
bool SupportsDecryption() const override;
@@ -121,8 +125,6 @@ class MEDIA_EXPORT DecryptingVideoDecoder : public VideoDecoder {
std::unique_ptr<CallbackRegistration> event_cb_registration_;
base::WeakPtrFactory<DecryptingVideoDecoder> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(DecryptingVideoDecoder);
};
} // namespace media
diff --git a/chromium/media/filters/decrypting_video_decoder_unittest.cc b/chromium/media/filters/decrypting_video_decoder_unittest.cc
index f2148b3cf85..69c1b4249e8 100644
--- a/chromium/media/filters/decrypting_video_decoder_unittest.cc
+++ b/chromium/media/filters/decrypting_video_decoder_unittest.cc
@@ -63,6 +63,10 @@ class DecryptingVideoDecoderTest : public testing::Test {
VideoFrame::CreateBlackFrame(TestVideoConfig::NormalCodedSize())),
null_video_frame_(scoped_refptr<VideoFrame>()) {}
+ DecryptingVideoDecoderTest(const DecryptingVideoDecoderTest&) = delete;
+ DecryptingVideoDecoderTest& operator=(const DecryptingVideoDecoderTest&) =
+ delete;
+
~DecryptingVideoDecoderTest() override { Destroy(); }
enum CdmType { CDM_WITHOUT_DECRYPTOR, CDM_WITH_DECRYPTOR };
@@ -255,9 +259,6 @@ class DecryptingVideoDecoderTest : public testing::Test {
scoped_refptr<DecoderBuffer> encrypted_buffer_;
scoped_refptr<VideoFrame> decoded_video_frame_;
scoped_refptr<VideoFrame> null_video_frame_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(DecryptingVideoDecoderTest);
};
TEST_F(DecryptingVideoDecoderTest, Initialize_Normal) {
diff --git a/chromium/media/filters/demuxer_perftest.cc b/chromium/media/filters/demuxer_perftest.cc
index 3bb2619c3e8..35c6a01862b 100644
--- a/chromium/media/filters/demuxer_perftest.cc
+++ b/chromium/media/filters/demuxer_perftest.cc
@@ -33,6 +33,10 @@ static const int kBenchmarkIterations = 100;
class DemuxerHostImpl : public media::DemuxerHost {
public:
DemuxerHostImpl() = default;
+
+ DemuxerHostImpl(const DemuxerHostImpl&) = delete;
+ DemuxerHostImpl& operator=(const DemuxerHostImpl&) = delete;
+
~DemuxerHostImpl() override = default;
// DemuxerHost implementation.
@@ -40,9 +44,6 @@ class DemuxerHostImpl : public media::DemuxerHost {
const Ranges<base::TimeDelta>& ranges) override {}
void SetDuration(base::TimeDelta duration) override {}
void OnDemuxerError(media::PipelineStatus error) override {}
-
- private:
- DISALLOW_COPY_AND_ASSIGN(DemuxerHostImpl);
};
static void QuitLoopWithStatus(base::OnceClosure quit_cb,
@@ -67,6 +68,10 @@ typedef std::vector<media::DemuxerStream*> Streams;
class StreamReader {
public:
StreamReader(media::Demuxer* demuxer, bool enable_bitstream_converter);
+
+ StreamReader(const StreamReader&) = delete;
+ StreamReader& operator=(const StreamReader&) = delete;
+
~StreamReader();
// Performs a single step read.
@@ -92,8 +97,6 @@ class StreamReader {
std::vector<bool> end_of_stream_;
std::vector<base::TimeDelta> last_read_timestamp_;
std::vector<int> counts_;
-
- DISALLOW_COPY_AND_ASSIGN(StreamReader);
};
StreamReader::StreamReader(media::Demuxer* demuxer,
diff --git a/chromium/media/filters/fake_video_decoder.h b/chromium/media/filters/fake_video_decoder.h
index 77770b27fb5..8305776cc22 100644
--- a/chromium/media/filters/fake_video_decoder.h
+++ b/chromium/media/filters/fake_video_decoder.h
@@ -39,6 +39,9 @@ class FakeVideoDecoder : public VideoDecoder {
int max_parallel_decoding_requests,
const BytesDecodedCB& bytes_decoded_cb);
+ FakeVideoDecoder(const FakeVideoDecoder&) = delete;
+ FakeVideoDecoder& operator=(const FakeVideoDecoder&) = delete;
+
~FakeVideoDecoder() override;
// Enables encrypted config supported. Must be called before Initialize().
@@ -140,8 +143,6 @@ class FakeVideoDecoder : public VideoDecoder {
// NOTE: Weak pointers must be invalidated before all other member variables.
base::WeakPtrFactory<FakeVideoDecoder> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(FakeVideoDecoder);
};
} // namespace media
diff --git a/chromium/media/filters/fake_video_decoder_unittest.cc b/chromium/media/filters/fake_video_decoder_unittest.cc
index 09832e74bdf..68ccdb9103c 100644
--- a/chromium/media/filters/fake_video_decoder_unittest.cc
+++ b/chromium/media/filters/fake_video_decoder_unittest.cc
@@ -50,6 +50,9 @@ class FakeVideoDecoderTest
pending_decode_requests_(0),
is_reset_pending_(false) {}
+ FakeVideoDecoderTest(const FakeVideoDecoderTest&) = delete;
+ FakeVideoDecoderTest& operator=(const FakeVideoDecoderTest&) = delete;
+
virtual ~FakeVideoDecoderTest() {
Destroy();
}
@@ -135,9 +138,8 @@ class FakeVideoDecoderTest
if (num_input_buffers_ < kTotalBuffers) {
buffer = CreateFakeVideoBufferForTest(
- current_config_,
- base::TimeDelta::FromMilliseconds(kDurationMs * num_input_buffers_),
- base::TimeDelta::FromMilliseconds(kDurationMs));
+ current_config_, base::Milliseconds(kDurationMs * num_input_buffers_),
+ base::Milliseconds(kDurationMs));
total_bytes_in_buffers_ += buffer->data_size();
} else {
buffer = DecoderBuffer::CreateEOSBuffer();
@@ -244,9 +246,6 @@ class FakeVideoDecoderTest
scoped_refptr<VideoFrame> last_decoded_frame_;
int pending_decode_requests_;
bool is_reset_pending_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(FakeVideoDecoderTest);
};
INSTANTIATE_TEST_SUITE_P(NoParallelDecode,
diff --git a/chromium/media/filters/ffmpeg_aac_bitstream_converter.h b/chromium/media/filters/ffmpeg_aac_bitstream_converter.h
index ff8a03f7846..9740558c8e8 100644
--- a/chromium/media/filters/ffmpeg_aac_bitstream_converter.h
+++ b/chromium/media/filters/ffmpeg_aac_bitstream_converter.h
@@ -29,6 +29,11 @@ class MEDIA_EXPORT FFmpegAACBitstreamConverter
// |stream_codec_parameters| is retained, so it must outlive this class.
explicit FFmpegAACBitstreamConverter(
AVCodecParameters* stream_codec_parameters);
+
+ FFmpegAACBitstreamConverter(const FFmpegAACBitstreamConverter&) = delete;
+ FFmpegAACBitstreamConverter& operator=(const FFmpegAACBitstreamConverter&) =
+ delete;
+
~FFmpegAACBitstreamConverter() override;
// FFmpegBitstreamConverter implementation.
@@ -48,8 +53,6 @@ class MEDIA_EXPORT FFmpegAACBitstreamConverter
int sample_rate_index_;
int channel_configuration_;
int frame_length_;
-
- DISALLOW_COPY_AND_ASSIGN(FFmpegAACBitstreamConverter);
};
} // namespace media
diff --git a/chromium/media/filters/ffmpeg_audio_decoder.cc b/chromium/media/filters/ffmpeg_audio_decoder.cc
index f860ead2afc..4d8e312a3f0 100644
--- a/chromium/media/filters/ffmpeg_audio_decoder.cc
+++ b/chromium/media/filters/ffmpeg_audio_decoder.cc
@@ -52,7 +52,7 @@ FFmpegAudioDecoder::FFmpegAudioDecoder(
const scoped_refptr<base::SequencedTaskRunner>& task_runner,
MediaLog* media_log)
: task_runner_(task_runner),
- state_(kUninitialized),
+ state_(DecoderState::kUninitialized),
av_sample_format_(0),
media_log_(media_log),
pool_(new AudioBufferMemoryPool()) {
@@ -62,7 +62,7 @@ FFmpegAudioDecoder::FFmpegAudioDecoder(
FFmpegAudioDecoder::~FFmpegAudioDecoder() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- if (state_ != kUninitialized)
+ if (state_ != DecoderState::kUninitialized)
ReleaseFFmpegResources();
}
@@ -105,7 +105,7 @@ void FFmpegAudioDecoder::Initialize(const AudioDecoderConfig& config,
// Success!
config_ = config;
output_cb_ = BindToCurrentLoop(output_cb);
- state_ = kNormal;
+ state_ = DecoderState::kNormal;
std::move(bound_init_cb).Run(OkStatus());
}
@@ -113,16 +113,16 @@ void FFmpegAudioDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
DecodeCB decode_cb) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(decode_cb);
- CHECK_NE(state_, kUninitialized);
+ CHECK_NE(state_, DecoderState::kUninitialized);
DecodeCB decode_cb_bound = BindToCurrentLoop(std::move(decode_cb));
- if (state_ == kError) {
+ if (state_ == DecoderState::kError) {
std::move(decode_cb_bound).Run(DecodeStatus::DECODE_ERROR);
return;
}
// Do nothing if decoding has finished.
- if (state_ == kDecodeFinished) {
+ if (state_ == DecoderState::kDecodeFinished) {
std::move(decode_cb_bound).Run(DecodeStatus::OK);
return;
}
@@ -134,7 +134,7 @@ void FFmpegAudioDecoder::Reset(base::OnceClosure closure) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
avcodec_flush_buffers(codec_context_.get());
- state_ = kNormal;
+ state_ = DecoderState::kNormal;
ResetTimestampState(config_);
task_runner_->PostTask(FROM_HERE, std::move(closure));
}
@@ -142,9 +142,9 @@ void FFmpegAudioDecoder::Reset(base::OnceClosure closure) {
void FFmpegAudioDecoder::DecodeBuffer(const DecoderBuffer& buffer,
DecodeCB decode_cb) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- DCHECK_NE(state_, kUninitialized);
- DCHECK_NE(state_, kDecodeFinished);
- DCHECK_NE(state_, kError);
+ DCHECK_NE(state_, DecoderState::kUninitialized);
+ DCHECK_NE(state_, DecoderState::kDecodeFinished);
+ DCHECK_NE(state_, DecoderState::kError);
// Make sure we are notified if http://crbug.com/49709 returns. Issue also
// occurs with some damaged files.
@@ -155,13 +155,13 @@ void FFmpegAudioDecoder::DecodeBuffer(const DecoderBuffer& buffer,
}
if (!FFmpegDecode(buffer)) {
- state_ = kError;
+ state_ = DecoderState::kError;
std::move(decode_cb).Run(DecodeStatus::DECODE_ERROR);
return;
}
if (buffer.end_of_stream())
- state_ = kDecodeFinished;
+ state_ = DecoderState::kDecodeFinished;
std::move(decode_cb).Run(DecodeStatus::OK);
}
@@ -320,7 +320,7 @@ bool FFmpegAudioDecoder::ConfigureDecoder(const AudioDecoderConfig& config) {
codec_context_->flags2 |= AV_CODEC_FLAG2_SKIP_MANUAL;
AVDictionary* codec_options = NULL;
- if (config.codec() == kCodecOpus) {
+ if (config.codec() == AudioCodec::kOpus) {
codec_context_->request_sample_fmt = AV_SAMPLE_FMT_FLT;
// Disable phase inversion to avoid artifacts in mono downmix. See
@@ -337,7 +337,7 @@ bool FFmpegAudioDecoder::ConfigureDecoder(const AudioDecoderConfig& config) {
DLOG(ERROR) << "Could not initialize audio decoder: "
<< codec_context_->codec_id;
ReleaseFFmpegResources();
- state_ = kUninitialized;
+ state_ = DecoderState::kUninitialized;
return false;
}
// Verify avcodec_open2() used all given options.
@@ -352,7 +352,7 @@ bool FFmpegAudioDecoder::ConfigureDecoder(const AudioDecoderConfig& config) {
<< " channels, but FFmpeg thinks the file contains "
<< codec_context_->channels << " channels";
ReleaseFFmpegResources();
- state_ = kUninitialized;
+ state_ = DecoderState::kUninitialized;
return false;
}
@@ -365,9 +365,10 @@ bool FFmpegAudioDecoder::ConfigureDecoder(const AudioDecoderConfig& config) {
void FFmpegAudioDecoder::ResetTimestampState(const AudioDecoderConfig& config) {
// Opus codec delay is handled by ffmpeg.
const int codec_delay =
- config.codec() == kCodecOpus ? 0 : config.codec_delay();
+ config.codec() == AudioCodec::kOpus ? 0 : config.codec_delay();
discard_helper_ = std::make_unique<AudioDiscardHelper>(
- config.samples_per_second(), codec_delay, config.codec() == kCodecVorbis);
+ config.samples_per_second(), codec_delay,
+ config.codec() == AudioCodec::kVorbis);
discard_helper_->Reset(codec_delay);
}
diff --git a/chromium/media/filters/ffmpeg_audio_decoder.h b/chromium/media/filters/ffmpeg_audio_decoder.h
index bd8f074e23b..da0f217ac9e 100644
--- a/chromium/media/filters/ffmpeg_audio_decoder.h
+++ b/chromium/media/filters/ffmpeg_audio_decoder.h
@@ -73,12 +73,7 @@ class MEDIA_EXPORT FFmpegAudioDecoder : public AudioDecoder {
// A decoding error occurs and decoding needs to stop.
// (any state) -> kNormal:
// Any time Reset() is called.
- enum DecoderState {
- kUninitialized,
- kNormal,
- kDecodeFinished,
- kError
- };
+ enum class DecoderState { kUninitialized, kNormal, kDecodeFinished, kError };
// Reset decoder and call |reset_cb_|.
void DoReset();
diff --git a/chromium/media/filters/ffmpeg_demuxer.cc b/chromium/media/filters/ffmpeg_demuxer.cc
index a76be9a01c5..026c0f8ec4d 100644
--- a/chromium/media/filters/ffmpeg_demuxer.cc
+++ b/chromium/media/filters/ffmpeg_demuxer.cc
@@ -92,8 +92,8 @@ static base::Time ExtractTimelineOffset(
}
static base::TimeDelta FramesToTimeDelta(int frames, double sample_rate) {
- return base::TimeDelta::FromMicroseconds(
- frames * base::Time::kMicrosecondsPerSecond / sample_rate);
+ return base::Microseconds(frames * base::Time::kMicrosecondsPerSecond /
+ sample_rate);
}
static base::TimeDelta ExtractStartTime(AVStream* stream) {
@@ -121,8 +121,7 @@ static base::TimeDelta ExtractStartTime(AVStream* stream) {
// Record audio decoder config UMA stats corresponding to a src= playback.
static void RecordAudioCodecStats(const AudioDecoderConfig& audio_config) {
- UMA_HISTOGRAM_ENUMERATION("Media.AudioCodec", audio_config.codec(),
- kAudioCodecMax + 1);
+ base::UmaHistogramEnumeration("Media.AudioCodec", audio_config.codec());
}
// Record video decoder config UMA stats corresponding to a src= playback.
@@ -132,14 +131,13 @@ static void RecordVideoCodecStats(container_names::MediaContainerName container,
MediaLog* media_log) {
// TODO(xhwang): Fix these misleading metric names. They should be something
// like "Media.SRC.Xxxx". See http://crbug.com/716183.
- UMA_HISTOGRAM_ENUMERATION("Media.VideoCodec", video_config.codec(),
- kVideoCodecMax + 1);
+ base::UmaHistogramEnumeration("Media.VideoCodec", video_config.codec());
if (container == container_names::CONTAINER_MOV) {
- UMA_HISTOGRAM_ENUMERATION("Media.SRC.VideoCodec.MP4", video_config.codec(),
- kVideoCodecMax + 1);
+ base::UmaHistogramEnumeration("Media.SRC.VideoCodec.MP4",
+ video_config.codec());
} else if (container == container_names::CONTAINER_WEBM) {
- UMA_HISTOGRAM_ENUMERATION("Media.SRC.VideoCodec.WebM", video_config.codec(),
- kVideoCodecMax + 1);
+ base::UmaHistogramEnumeration("Media.SRC.VideoCodec.WebM",
+ video_config.codec());
}
}
@@ -565,8 +563,7 @@ void FFmpegDemuxerStream::EnqueuePacket(ScopedAVPacket packet) {
// correctly give them unique timestamps.
buffer->set_timestamp(last_packet_timestamp_ == kNoTimestamp
? base::TimeDelta()
- : last_packet_timestamp_ +
- base::TimeDelta::FromMicroseconds(1));
+ : last_packet_timestamp_ + base::Microseconds(1));
}
// Fixup negative timestamps where the before-zero portion is completely
@@ -576,7 +573,7 @@ void FFmpegDemuxerStream::EnqueuePacket(ScopedAVPacket packet) {
auto fixed_ts = buffer->discard_padding().first + buffer->timestamp();
// Allow for rounding error in the discard padding calculations.
- if (fixed_ts == base::TimeDelta::FromMicroseconds(-1))
+ if (fixed_ts == base::Microseconds(-1))
fixed_ts = base::TimeDelta();
if (fixed_ts >= base::TimeDelta())
@@ -638,7 +635,7 @@ void FFmpegDemuxerStream::EnqueuePacket(ScopedAVPacket packet) {
buffer->set_timestamp(last_packet_timestamp_ +
(last_packet_duration_ != kNoTimestamp
? last_packet_duration_
- : base::TimeDelta::FromMicroseconds(1)));
+ : base::Microseconds(1)));
}
// The demuxer should always output positive timestamps.
@@ -884,7 +881,7 @@ void FFmpegDemuxerStream::SatisfyPendingRead() {
bool FFmpegDemuxerStream::HasAvailableCapacity() {
// Try to have two second's worth of encoded data per stream.
- const base::TimeDelta kCapacity = base::TimeDelta::FromSeconds(2);
+ const base::TimeDelta kCapacity = base::Seconds(2);
return buffer_queue_.IsEmpty() || buffer_queue_.Duration() < kCapacity;
}
@@ -1098,7 +1095,7 @@ void FFmpegDemuxer::SeekInternal(base::TimeDelta time,
GetFirstEnabledFFmpegStream(DemuxerStream::AUDIO);
if (audio_stream) {
const AudioDecoderConfig& config = audio_stream->audio_decoder_config();
- if (config.codec() == kCodecOpus)
+ if (config.codec() == AudioCodec::kOpus)
seek_time = std::max(start_time_, seek_time - config.seek_preroll());
}
diff --git a/chromium/media/filters/ffmpeg_demuxer.h b/chromium/media/filters/ffmpeg_demuxer.h
index 8d4c66e7611..cd645920f57 100644
--- a/chromium/media/filters/ffmpeg_demuxer.h
+++ b/chromium/media/filters/ffmpeg_demuxer.h
@@ -77,6 +77,9 @@ class MEDIA_EXPORT FFmpegDemuxerStream : public DemuxerStream {
AVStream* stream,
MediaLog* media_log);
+ FFmpegDemuxerStream(const FFmpegDemuxerStream&) = delete;
+ FFmpegDemuxerStream& operator=(const FFmpegDemuxerStream&) = delete;
+
~FFmpegDemuxerStream() override;
// Enqueues the given AVPacket. It is invalid to queue a |packet| after
@@ -207,8 +210,6 @@ class MEDIA_EXPORT FFmpegDemuxerStream : public DemuxerStream {
int num_discarded_packet_warnings_;
int64_t last_packet_pos_;
int64_t last_packet_dts_;
-
- DISALLOW_COPY_AND_ASSIGN(FFmpegDemuxerStream);
};
class MEDIA_EXPORT FFmpegDemuxer : public Demuxer {
@@ -219,6 +220,10 @@ class MEDIA_EXPORT FFmpegDemuxer : public Demuxer {
MediaTracksUpdatedCB media_tracks_updated_cb,
MediaLog* media_log,
bool is_local_file);
+
+ FFmpegDemuxer(const FFmpegDemuxer&) = delete;
+ FFmpegDemuxer& operator=(const FFmpegDemuxer&) = delete;
+
~FFmpegDemuxer() override;
// Demuxer implementation.
@@ -417,8 +422,6 @@ class MEDIA_EXPORT FFmpegDemuxer : public Demuxer {
base::WeakPtr<FFmpegDemuxer> weak_this_;
base::WeakPtrFactory<FFmpegDemuxer> cancel_pending_seek_factory_{this};
base::WeakPtrFactory<FFmpegDemuxer> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(FFmpegDemuxer);
};
} // namespace media
diff --git a/chromium/media/filters/ffmpeg_demuxer_unittest.cc b/chromium/media/filters/ffmpeg_demuxer_unittest.cc
index 4ddc9521edc..fbf915231d4 100644
--- a/chromium/media/filters/ffmpeg_demuxer_unittest.cc
+++ b/chromium/media/filters/ffmpeg_demuxer_unittest.cc
@@ -398,7 +398,7 @@ TEST_F(FFmpegDemuxerTest, Initialize_Successful) {
EXPECT_EQ(DemuxerStream::VIDEO, stream->type());
const VideoDecoderConfig& video_config = stream->video_decoder_config();
- EXPECT_EQ(kCodecVP8, video_config.codec());
+ EXPECT_EQ(VideoCodec::kVP8, video_config.codec());
EXPECT_EQ(VideoDecoderConfig::AlphaMode::kIsOpaque,
video_config.alpha_mode());
EXPECT_EQ(320, video_config.coded_size().width());
@@ -417,7 +417,7 @@ TEST_F(FFmpegDemuxerTest, Initialize_Successful) {
EXPECT_EQ(DemuxerStream::AUDIO, stream->type());
const AudioDecoderConfig& audio_config = stream->audio_decoder_config();
- EXPECT_EQ(kCodecVorbis, audio_config.codec());
+ EXPECT_EQ(AudioCodec::kVorbis, audio_config.codec());
EXPECT_EQ(32, audio_config.bits_per_channel());
EXPECT_EQ(CHANNEL_LAYOUT_STEREO, audio_config.channel_layout());
EXPECT_EQ(44100, audio_config.samples_per_second());
@@ -447,26 +447,26 @@ TEST_F(FFmpegDemuxerTest, Initialize_Multitrack) {
DemuxerStream* stream = streams[0];
ASSERT_TRUE(stream);
EXPECT_EQ(DemuxerStream::VIDEO, stream->type());
- EXPECT_EQ(kCodecVP8, stream->video_decoder_config().codec());
+ EXPECT_EQ(VideoCodec::kVP8, stream->video_decoder_config().codec());
// Stream #1 should be Vorbis audio.
stream = streams[1];
ASSERT_TRUE(stream);
EXPECT_EQ(DemuxerStream::AUDIO, stream->type());
- EXPECT_EQ(kCodecVorbis, stream->audio_decoder_config().codec());
+ EXPECT_EQ(AudioCodec::kVorbis, stream->audio_decoder_config().codec());
// The subtitles stream is skipped.
// Stream #2 should be Theora video.
stream = streams[2];
ASSERT_TRUE(stream);
EXPECT_EQ(DemuxerStream::VIDEO, stream->type());
- EXPECT_EQ(kCodecTheora, stream->video_decoder_config().codec());
+ EXPECT_EQ(VideoCodec::kTheora, stream->video_decoder_config().codec());
// Stream #3 should be PCM audio.
stream = streams[3];
ASSERT_TRUE(stream);
EXPECT_EQ(DemuxerStream::AUDIO, stream->type());
- EXPECT_EQ(kCodecPCM, stream->audio_decoder_config().codec());
+ EXPECT_EQ(AudioCodec::kPCM, stream->audio_decoder_config().codec());
}
#endif
@@ -572,10 +572,8 @@ TEST_F(FFmpegDemuxerTest, Seeking_PreferredStreamSelection) {
FFmpegDemuxerStream* audio =
static_cast<FFmpegDemuxerStream*>(GetStream(DemuxerStream::AUDIO));
- const base::TimeDelta video_start_time =
- base::TimeDelta::FromMicroseconds(400000);
- const base::TimeDelta audio_start_time =
- base::TimeDelta::FromMicroseconds(396000);
+ const base::TimeDelta video_start_time = base::Microseconds(400000);
+ const base::TimeDelta audio_start_time = base::Microseconds(396000);
// Seeking to a position lower than the start time of either stream should
// prefer video stream for seeking.
@@ -619,10 +617,8 @@ TEST_F(FFmpegDemuxerTest, Read_VideoPositiveStartTime) {
DemuxerStream* video = GetStream(DemuxerStream::VIDEO);
DemuxerStream* audio = GetStream(DemuxerStream::AUDIO);
- const base::TimeDelta video_start_time =
- base::TimeDelta::FromMicroseconds(400000);
- const base::TimeDelta audio_start_time =
- base::TimeDelta::FromMicroseconds(396000);
+ const base::TimeDelta video_start_time = base::Microseconds(400000);
+ const base::TimeDelta audio_start_time = base::Microseconds(396000);
// Run the test twice with a seek in between.
for (int i = 0; i < 2; ++i) {
@@ -684,11 +680,10 @@ TEST_F(FFmpegDemuxerTest, Read_AudioNegativeStartTimeAndOggDiscard_Bear) {
Read(audio, FROM_HERE, 41, 2903, true, DemuxerStream::Status::kOk,
kInfiniteDuration);
Read(audio, FROM_HERE, 173, 5805, true, DemuxerStream::Status::kOk,
- base::TimeDelta::FromMicroseconds(10159));
+ base::Microseconds(10159));
Read(audio, FROM_HERE, 148, 18866, true);
- EXPECT_EQ(base::TimeDelta::FromMicroseconds(-15964),
- demuxer_->start_time());
+ EXPECT_EQ(base::Microseconds(-15964), demuxer_->start_time());
Read(video, FROM_HERE, 5751, 0, true);
Read(video, FROM_HERE, 846, 33367, false);
@@ -717,9 +712,9 @@ TEST_F(FFmpegDemuxerTest, Read_AudioNegativeStartTimeAndOggDiscard_Sync) {
// Run the test twice with a seek in between.
for (int i = 0; i < 2; ++i) {
Read(audio, FROM_HERE, 1, 0, true, DemuxerStream::Status::kOk,
- base::TimeDelta::FromMicroseconds(2902));
+ base::Microseconds(2902));
Read(audio, FROM_HERE, 1, 2902, true);
- EXPECT_EQ(base::TimeDelta::FromMicroseconds(-2902), demuxer_->start_time());
+ EXPECT_EQ(base::Microseconds(-2902), demuxer_->start_time());
// Though the internal start time may be below zero, the exposed media time
// must always be >= zero.
@@ -817,7 +812,7 @@ TEST_F(FFmpegDemuxerTest,
// Run the test twice with a seek in between.
for (int i = 0; i < 2; ++i) {
Read(audio, FROM_HERE, 408, 0, true, DemuxerStream::Status::kOk,
- base::TimeDelta::FromMicroseconds(6500));
+ base::Microseconds(6500));
for (size_t j = 0; j < base::size(kTestExpectations); ++j) {
Read(audio, FROM_HERE, kTestExpectations[j][0], kTestExpectations[j][1],
@@ -859,7 +854,7 @@ TEST_F(FFmpegDemuxerTest, Read_AudioNegativeStartTimeAndOpusSfxDiscard_Sync) {
// has the same sequence, but doesn't have a different discard padding
// after seeking to the start. Why is this test different?
Read(audio, FROM_HERE, 314, 0, true, DemuxerStream::Status::kOk,
- i == 0 ? base::TimeDelta::FromMicroseconds(6500) : base::TimeDelta());
+ i == 0 ? base::Microseconds(6500) : base::TimeDelta());
Read(audio, FROM_HERE, 244, 20000, true);
// Though the internal start time may be below zero, the exposed media time
@@ -885,7 +880,7 @@ TEST_F(FFmpegDemuxerTest, Read_DiscardDisabledVideoStream) {
// earliest position guaranteed to give us key frames for all enabled streams.
// But when the video stream is disabled, FFmpeg can start reading from 1.987s
// which is earliest audio key frame before the 2.0s |seek_target|.
- const base::TimeDelta seek_target = base::TimeDelta::FromMilliseconds(2000);
+ const base::TimeDelta seek_target = base::Milliseconds(2000);
CreateDemuxer("bear-vp8-webvtt.webm");
InitializeDemuxer();
@@ -914,7 +909,7 @@ TEST_F(FFmpegDemuxerTest, Read_EndOfStream_NoDuration) {
CreateDemuxer("bear-320x240.webm");
InitializeDemuxer();
SetDurationKnown(false);
- EXPECT_CALL(host_, SetDuration(base::TimeDelta::FromMilliseconds(2744)));
+ EXPECT_CALL(host_, SetDuration(base::Milliseconds(2744)));
ReadUntilEndOfStream(GetStream(DemuxerStream::AUDIO));
ReadUntilEndOfStream(GetStream(DemuxerStream::VIDEO));
}
@@ -924,7 +919,7 @@ TEST_F(FFmpegDemuxerTest, Read_EndOfStream_NoDuration_VideoOnly) {
CreateDemuxer("bear-320x240-video-only.webm");
InitializeDemuxer();
SetDurationKnown(false);
- EXPECT_CALL(host_, SetDuration(base::TimeDelta::FromMilliseconds(2703)));
+ EXPECT_CALL(host_, SetDuration(base::Milliseconds(2703)));
ReadUntilEndOfStream(GetStream(DemuxerStream::VIDEO));
}
@@ -933,7 +928,7 @@ TEST_F(FFmpegDemuxerTest, Read_EndOfStream_NoDuration_AudioOnly) {
CreateDemuxer("bear-320x240-audio-only.webm");
InitializeDemuxer();
SetDurationKnown(false);
- EXPECT_CALL(host_, SetDuration(base::TimeDelta::FromMilliseconds(2744)));
+ EXPECT_CALL(host_, SetDuration(base::Milliseconds(2744)));
ReadUntilEndOfStream(GetStream(DemuxerStream::AUDIO));
}
@@ -943,7 +938,7 @@ TEST_F(FFmpegDemuxerTest, Read_EndOfStream_NoDuration_UnsupportedStream) {
CreateDemuxer("vorbis_audio_wmv_video.mkv");
InitializeDemuxer();
SetDurationKnown(false);
- EXPECT_CALL(host_, SetDuration(base::TimeDelta::FromMilliseconds(991)));
+ EXPECT_CALL(host_, SetDuration(base::Milliseconds(991)));
ReadUntilEndOfStream(GetStream(DemuxerStream::AUDIO));
}
@@ -964,8 +959,7 @@ TEST_F(FFmpegDemuxerTest, Seek) {
// Issue a simple forward seek, which should discard queued packets.
WaitableMessageLoopEvent event;
- demuxer_->Seek(base::TimeDelta::FromMicroseconds(1000000),
- event.GetPipelineStatusCB());
+ demuxer_->Seek(base::Microseconds(1000000), event.GetPipelineStatusCB());
event.RunAndWaitForStatus(PIPELINE_OK);
// Audio read #1.
@@ -996,10 +990,9 @@ TEST_F(FFmpegDemuxerTest, CancelledSeek) {
// Issue a simple forward seek, which should discard queued packets.
WaitableMessageLoopEvent event;
- demuxer_->Seek(base::TimeDelta::FromMicroseconds(1000000),
- event.GetPipelineStatusCB());
+ demuxer_->Seek(base::Microseconds(1000000), event.GetPipelineStatusCB());
// FFmpegDemuxer does not care what the previous seek time was when canceling.
- demuxer_->CancelPendingSeek(base::TimeDelta::FromSeconds(12345));
+ demuxer_->CancelPendingSeek(base::Seconds(12345));
event.RunAndWaitForStatus(PIPELINE_OK);
}
@@ -1044,8 +1037,7 @@ TEST_F(FFmpegDemuxerTest, SeekWithCuesBeforeFirstCluster) {
// Issue a simple forward seek, which should discard queued packets.
WaitableMessageLoopEvent event;
- demuxer_->Seek(base::TimeDelta::FromMicroseconds(2500000),
- event.GetPipelineStatusCB());
+ demuxer_->Seek(base::Microseconds(2500000), event.GetPipelineStatusCB());
event.RunAndWaitForStatus(PIPELINE_OK);
// Audio read #1.
@@ -1313,8 +1305,8 @@ TEST_F(FFmpegDemuxerTest, HEVC_in_MP4_container) {
VideoColorSpace::TransferID::SMPTE170M,
VideoColorSpace::MatrixID::SMPTE170M,
gfx::ColorSpace::RangeID::LIMITED);
- VideoType hevc_type = {VideoCodec::kCodecHEVC,
- VideoCodecProfile::HEVCPROFILE_MAIN, 10, color_space};
+ VideoType hevc_type = {VideoCodec::kHEVC, VideoCodecProfile::HEVCPROFILE_MAIN,
+ 10, color_space};
EXPECT_CALL(media_client, IsSupportedVideoType(Eq(hevc_type)))
.WillRepeatedly(Return(true));
@@ -1340,7 +1332,7 @@ TEST_F(FFmpegDemuxerTest, Read_AC3_Audio) {
MockMediaClient media_client;
SetMediaClient(&media_client);
- AudioType ac3_type = {AudioCodec::kCodecAC3};
+ AudioType ac3_type = {AudioCodec::kAC3};
EXPECT_CALL(media_client, IsSupportedAudioType(Eq(ac3_type)))
.WillRepeatedly(Return(true));
@@ -1367,7 +1359,7 @@ TEST_F(FFmpegDemuxerTest, Read_EAC3_Audio) {
MockMediaClient media_client;
SetMediaClient(&media_client);
- AudioType eac3_type = {AudioCodec::kCodecEAC3};
+ AudioType eac3_type = {AudioCodec::kEAC3};
EXPECT_CALL(media_client, IsSupportedAudioType(Eq(eac3_type)))
.WillRepeatedly(Return(true));
@@ -1576,7 +1568,7 @@ static void VerifyFlacStream(DemuxerStream* stream,
EXPECT_EQ(DemuxerStream::AUDIO, stream->type());
const AudioDecoderConfig& audio_config = stream->audio_decoder_config();
- EXPECT_EQ(kCodecFLAC, audio_config.codec());
+ EXPECT_EQ(AudioCodec::kFLAC, audio_config.codec());
EXPECT_EQ(expected_bits_per_channel, audio_config.bits_per_channel());
EXPECT_EQ(expected_channel_layout, audio_config.channel_layout());
EXPECT_EQ(expected_samples_per_second, audio_config.samples_per_second());
diff --git a/chromium/media/filters/ffmpeg_glue.h b/chromium/media/filters/ffmpeg_glue.h
index c5aec54b87c..647ee5f1d78 100644
--- a/chromium/media/filters/ffmpeg_glue.h
+++ b/chromium/media/filters/ffmpeg_glue.h
@@ -61,6 +61,10 @@ class MEDIA_EXPORT FFmpegGlue {
public:
// See file documentation for usage. |protocol| must outlive FFmpegGlue.
explicit FFmpegGlue(FFmpegURLProtocol* protocol);
+
+ FFmpegGlue(const FFmpegGlue&) = delete;
+ FFmpegGlue& operator=(const FFmpegGlue&) = delete;
+
~FFmpegGlue();
// Opens an AVFormatContext specially prepared to process reads and seeks
@@ -85,8 +89,6 @@ class MEDIA_EXPORT FFmpegGlue {
std::unique_ptr<AVIOContext, ScopedPtrAVFree> avio_context_;
container_names::MediaContainerName container_ =
container_names::CONTAINER_UNKNOWN;
-
- DISALLOW_COPY_AND_ASSIGN(FFmpegGlue);
};
} // namespace media
diff --git a/chromium/media/filters/ffmpeg_glue_unittest.cc b/chromium/media/filters/ffmpeg_glue_unittest.cc
index b8d83d1ac2e..833644c938a 100644
--- a/chromium/media/filters/ffmpeg_glue_unittest.cc
+++ b/chromium/media/filters/ffmpeg_glue_unittest.cc
@@ -32,6 +32,10 @@ namespace media {
class MockProtocol : public FFmpegURLProtocol {
public:
MockProtocol() = default;
+
+ MockProtocol(const MockProtocol&) = delete;
+ MockProtocol& operator=(const MockProtocol&) = delete;
+
virtual ~MockProtocol() = default;
MOCK_METHOD2(Read, int(int size, uint8_t* data));
@@ -39,9 +43,6 @@ class MockProtocol : public FFmpegURLProtocol {
MOCK_METHOD1(SetPosition, bool(int64_t position));
MOCK_METHOD1(GetSize, bool(int64_t* size_out));
MOCK_METHOD0(IsStreaming, bool());
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MockProtocol);
};
class FFmpegGlueTest : public ::testing::Test {
@@ -55,6 +56,9 @@ class FFmpegGlueTest : public ::testing::Test {
CHECK(glue_->format_context()->pb);
}
+ FFmpegGlueTest(const FFmpegGlueTest&) = delete;
+ FFmpegGlueTest& operator=(const FFmpegGlueTest&) = delete;
+
~FFmpegGlueTest() override {
// Ensure |glue_| and |protocol_| are still alive.
CHECK(glue_.get());
@@ -76,9 +80,6 @@ class FFmpegGlueTest : public ::testing::Test {
protected:
std::unique_ptr<FFmpegGlue> glue_;
std::unique_ptr<StrictMock<MockProtocol>> protocol_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(FFmpegGlueTest);
};
class FFmpegGlueDestructionTest : public ::testing::Test {
@@ -94,6 +95,10 @@ class FFmpegGlueDestructionTest : public ::testing::Test {
CHECK(glue_->format_context()->pb);
}
+ FFmpegGlueDestructionTest(const FFmpegGlueDestructionTest&) = delete;
+ FFmpegGlueDestructionTest& operator=(const FFmpegGlueDestructionTest&) =
+ delete;
+
~FFmpegGlueDestructionTest() override {
// Ensure Initialize() was called.
CHECK(glue_.get());
@@ -113,8 +118,6 @@ class FFmpegGlueDestructionTest : public ::testing::Test {
private:
std::unique_ptr<InMemoryUrlProtocol> protocol_;
scoped_refptr<DecoderBuffer> data_;
-
- DISALLOW_COPY_AND_ASSIGN(FFmpegGlueDestructionTest);
};
// Tests that ensure we are using the correct AVInputFormat name given by ffmpeg
@@ -122,6 +125,10 @@ class FFmpegGlueDestructionTest : public ::testing::Test {
class FFmpegGlueContainerTest : public FFmpegGlueDestructionTest {
public:
FFmpegGlueContainerTest() = default;
+
+ FFmpegGlueContainerTest(const FFmpegGlueContainerTest&) = delete;
+ FFmpegGlueContainerTest& operator=(const FFmpegGlueContainerTest&) = delete;
+
~FFmpegGlueContainerTest() override = default;
protected:
@@ -137,7 +144,6 @@ class FFmpegGlueContainerTest : public FFmpegGlueDestructionTest {
private:
base::HistogramTester histogram_tester_;
- DISALLOW_COPY_AND_ASSIGN(FFmpegGlueContainerTest);
};
// Ensure writing has been disabled.
diff --git a/chromium/media/filters/ffmpeg_h264_to_annex_b_bitstream_converter.h b/chromium/media/filters/ffmpeg_h264_to_annex_b_bitstream_converter.h
index f15f4928a43..fbc7784b3f9 100644
--- a/chromium/media/filters/ffmpeg_h264_to_annex_b_bitstream_converter.h
+++ b/chromium/media/filters/ffmpeg_h264_to_annex_b_bitstream_converter.h
@@ -27,6 +27,11 @@ class MEDIA_EXPORT FFmpegH264ToAnnexBBitstreamConverter
explicit FFmpegH264ToAnnexBBitstreamConverter(
AVCodecParameters* stream_codec_parameters);
+ FFmpegH264ToAnnexBBitstreamConverter(
+ const FFmpegH264ToAnnexBBitstreamConverter&) = delete;
+ FFmpegH264ToAnnexBBitstreamConverter& operator=(
+ const FFmpegH264ToAnnexBBitstreamConverter&) = delete;
+
~FFmpegH264ToAnnexBBitstreamConverter() override;
// FFmpegBitstreamConverter implementation.
@@ -60,8 +65,6 @@ class MEDIA_EXPORT FFmpegH264ToAnnexBBitstreamConverter
// Variable to hold a pointer to memory where we can access the global
// data from the FFmpeg file format's global headers.
AVCodecParameters* stream_codec_parameters_;
-
- DISALLOW_COPY_AND_ASSIGN(FFmpegH264ToAnnexBBitstreamConverter);
};
} // namespace media
diff --git a/chromium/media/filters/ffmpeg_h265_to_annex_b_bitstream_converter.h b/chromium/media/filters/ffmpeg_h265_to_annex_b_bitstream_converter.h
index 9b52a08324f..2351445734f 100644
--- a/chromium/media/filters/ffmpeg_h265_to_annex_b_bitstream_converter.h
+++ b/chromium/media/filters/ffmpeg_h265_to_annex_b_bitstream_converter.h
@@ -29,6 +29,11 @@ class MEDIA_EXPORT FFmpegH265ToAnnexBBitstreamConverter
explicit FFmpegH265ToAnnexBBitstreamConverter(
AVCodecParameters* stream_codec_parameters);
+ FFmpegH265ToAnnexBBitstreamConverter(
+ const FFmpegH265ToAnnexBBitstreamConverter&) = delete;
+ FFmpegH265ToAnnexBBitstreamConverter& operator=(
+ const FFmpegH265ToAnnexBBitstreamConverter&) = delete;
+
~FFmpegH265ToAnnexBBitstreamConverter() override;
// FFmpegBitstreamConverter implementation.
@@ -40,11 +45,8 @@ class MEDIA_EXPORT FFmpegH265ToAnnexBBitstreamConverter
// Variable to hold a pointer to memory where we can access the global
// data from the FFmpeg file format's global headers.
AVCodecParameters* stream_codec_parameters_;
-
- DISALLOW_COPY_AND_ASSIGN(FFmpegH265ToAnnexBBitstreamConverter);
};
} // namespace media
#endif // MEDIA_FILTERS_FFMPEG_H265_TO_ANNEX_B_BITSTREAM_CONVERTER_H_
-
diff --git a/chromium/media/filters/ffmpeg_video_decoder.cc b/chromium/media/filters/ffmpeg_video_decoder.cc
index 5aba5f00947..3bca7356c96 100644
--- a/chromium/media/filters/ffmpeg_video_decoder.cc
+++ b/chromium/media/filters/ffmpeg_video_decoder.cc
@@ -37,23 +37,23 @@ static int GetFFmpegVideoDecoderThreadCount(const VideoDecoderConfig& config) {
// Some ffmpeg codecs don't actually benefit from using more threads.
// Only add more threads for those codecs that we know will benefit.
switch (config.codec()) {
- case kUnknownVideoCodec:
- case kCodecVC1:
- case kCodecMPEG2:
- case kCodecHEVC:
- case kCodecVP9:
- case kCodecAV1:
- case kCodecDolbyVision:
+ case VideoCodec::kUnknown:
+ case VideoCodec::kVC1:
+ case VideoCodec::kMPEG2:
+ case VideoCodec::kHEVC:
+ case VideoCodec::kVP9:
+ case VideoCodec::kAV1:
+ case VideoCodec::kDolbyVision:
// We do not compile ffmpeg with support for any of these codecs.
break;
- case kCodecTheora:
- case kCodecMPEG4:
+ case VideoCodec::kTheora:
+ case VideoCodec::kMPEG4:
// No extra threads for these codecs.
break;
- case kCodecH264:
- case kCodecVP8:
+ case VideoCodec::kH264:
+ case VideoCodec::kVP8:
// Normalize to three threads for 1080p content, then scale linearly
// with number of pixels.
// Examples:
@@ -89,7 +89,7 @@ bool FFmpegVideoDecoder::IsCodecSupported(VideoCodec codec) {
SupportedVideoDecoderConfigs FFmpegVideoDecoder::SupportedConfigsForWebRTC() {
SupportedVideoDecoderConfigs supported_configs;
- if (IsCodecSupported(kCodecH264)) {
+ if (IsCodecSupported(VideoCodec::kH264)) {
supported_configs.emplace_back(/*profile_min=*/H264PROFILE_BASELINE,
/*profile_max=*/H264PROFILE_HIGH,
/*coded_size_min=*/kDefaultSwDecodeSizeMin,
@@ -97,7 +97,7 @@ SupportedVideoDecoderConfigs FFmpegVideoDecoder::SupportedConfigsForWebRTC() {
/*allow_encrypted=*/false,
/*require_encrypted=*/false);
}
- if (IsCodecSupported(kCodecVP8)) {
+ if (IsCodecSupported(VideoCodec::kVP8)) {
supported_configs.emplace_back(/*profile_min=*/VP8PROFILE_ANY,
/*profile_max=*/VP8PROFILE_ANY,
/*coded_size_min=*/kDefaultSwDecodeSizeMin,
@@ -260,7 +260,7 @@ void FFmpegVideoDecoder::Initialize(const VideoDecoderConfig& config,
// Success!
config_ = config;
output_cb_ = output_cb;
- state_ = kNormal;
+ state_ = DecoderState::kNormal;
std::move(bound_init_cb).Run(OkStatus());
}
@@ -270,48 +270,48 @@ void FFmpegVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(buffer.get());
DCHECK(decode_cb);
- CHECK_NE(state_, kUninitialized);
+ CHECK_NE(state_, DecoderState::kUninitialized);
DecodeCB decode_cb_bound = BindToCurrentLoop(std::move(decode_cb));
- if (state_ == kError) {
+ if (state_ == DecoderState::kError) {
std::move(decode_cb_bound).Run(DecodeStatus::DECODE_ERROR);
return;
}
- if (state_ == kDecodeFinished) {
+ if (state_ == DecoderState::kDecodeFinished) {
std::move(decode_cb_bound).Run(DecodeStatus::OK);
return;
}
- DCHECK_EQ(state_, kNormal);
+ DCHECK_EQ(state_, DecoderState::kNormal);
// During decode, because reads are issued asynchronously, it is possible to
// receive multiple end of stream buffers since each decode is acked. There
// are three states the decoder can be in:
//
- // kNormal: This is the starting state. Buffers are decoded. Decode errors
- // are discarded.
- // kDecodeFinished: All calls return empty frames.
- // kError: Unexpected error happened.
+ // DecoderState::kNormal: This is the starting state. Buffers are decoded.
+ // Decode errors are discarded.
+ // DecoderState::kDecodeFinished: All calls return empty frames.
+ // DecoderState::kError: Unexpected error happened.
//
// These are the possible state transitions.
//
- // kNormal -> kDecodeFinished:
+ // DecoderState::kNormal -> DecoderState::kDecodeFinished:
// When EOS buffer is received and the codec has been flushed.
- // kNormal -> kError:
+ // DecoderState::kNormal -> DecoderState::kError:
// A decoding error occurs and decoding needs to stop.
- // (any state) -> kNormal:
+ // (any state) -> DecoderState::kNormal:
// Any time Reset() is called.
if (!FFmpegDecode(*buffer)) {
- state_ = kError;
+ state_ = DecoderState::kError;
std::move(decode_cb_bound).Run(DecodeStatus::DECODE_ERROR);
return;
}
if (buffer->end_of_stream())
- state_ = kDecodeFinished;
+ state_ = DecoderState::kDecodeFinished;
// VideoDecoderShim expects that |decode_cb| is called only after
// |output_cb_|.
@@ -323,7 +323,7 @@ void FFmpegVideoDecoder::Reset(base::OnceClosure closure) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
avcodec_flush_buffers(codec_context_.get());
- state_ = kNormal;
+ state_ = DecoderState::kNormal;
// PostTask() to avoid calling |closure| immediately.
base::SequencedTaskRunnerHandle::Get()->PostTask(FROM_HERE,
std::move(closure));
@@ -332,7 +332,7 @@ void FFmpegVideoDecoder::Reset(base::OnceClosure closure) {
FFmpegVideoDecoder::~FFmpegVideoDecoder() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- if (state_ != kUninitialized)
+ if (state_ != DecoderState::kUninitialized)
ReleaseFFmpegResources();
}
@@ -393,8 +393,7 @@ bool FFmpegVideoDecoder::OnNewFrame(AVFrame* frame) {
scoped_refptr<VideoFrame> video_frame =
reinterpret_cast<VideoFrame*>(av_buffer_get_opaque(frame->buf[0]));
- video_frame->set_timestamp(
- base::TimeDelta::FromMicroseconds(frame->reordered_opaque));
+ video_frame->set_timestamp(base::Microseconds(frame->reordered_opaque));
video_frame->metadata().power_efficient = false;
output_cb_.Run(video_frame);
return true;
diff --git a/chromium/media/filters/ffmpeg_video_decoder.h b/chromium/media/filters/ffmpeg_video_decoder.h
index 08b59748605..2156df03ee6 100644
--- a/chromium/media/filters/ffmpeg_video_decoder.h
+++ b/chromium/media/filters/ffmpeg_video_decoder.h
@@ -33,6 +33,10 @@ class MEDIA_EXPORT FFmpegVideoDecoder : public VideoDecoder {
static SupportedVideoDecoderConfigs SupportedConfigsForWebRTC();
explicit FFmpegVideoDecoder(MediaLog* media_log);
+
+ FFmpegVideoDecoder(const FFmpegVideoDecoder&) = delete;
+ FFmpegVideoDecoder& operator=(const FFmpegVideoDecoder&) = delete;
+
~FFmpegVideoDecoder() override;
// Allow decoding of individual NALU. Entire frames are required by default.
@@ -60,12 +64,7 @@ class MEDIA_EXPORT FFmpegVideoDecoder : public VideoDecoder {
void force_allocation_error_for_testing() { force_allocation_error_ = true; }
private:
- enum DecoderState {
- kUninitialized,
- kNormal,
- kDecodeFinished,
- kError
- };
+ enum class DecoderState { kUninitialized, kNormal, kDecodeFinished, kError };
// Handles decoding of an unencrypted encoded buffer. A return value of false
// indicates that an error has occurred.
@@ -83,7 +82,7 @@ class MEDIA_EXPORT FFmpegVideoDecoder : public VideoDecoder {
MediaLog* const media_log_;
- DecoderState state_ = kUninitialized;
+ DecoderState state_ = DecoderState::kUninitialized;
OutputCB output_cb_;
@@ -99,8 +98,6 @@ class MEDIA_EXPORT FFmpegVideoDecoder : public VideoDecoder {
bool force_allocation_error_ = false;
std::unique_ptr<FFmpegDecodingLoop> decoding_loop_;
-
- DISALLOW_COPY_AND_ASSIGN(FFmpegVideoDecoder);
};
} // namespace media
diff --git a/chromium/media/filters/ffmpeg_video_decoder_unittest.cc b/chromium/media/filters/ffmpeg_video_decoder_unittest.cc
index dead41951b0..6128e794900 100644
--- a/chromium/media/filters/ffmpeg_video_decoder_unittest.cc
+++ b/chromium/media/filters/ffmpeg_video_decoder_unittest.cc
@@ -65,6 +65,9 @@ class FFmpegVideoDecoderTest : public testing::Test {
corrupt_i_frame_buffer_ = ReadTestDataFile("vp8-corrupt-I-frame");
}
+ FFmpegVideoDecoderTest(const FFmpegVideoDecoderTest&) = delete;
+ FFmpegVideoDecoderTest& operator=(const FFmpegVideoDecoderTest&) = delete;
+
~FFmpegVideoDecoderTest() override { Destroy(); }
void Initialize() {
@@ -215,9 +218,6 @@ class FFmpegVideoDecoderTest : public testing::Test {
scoped_refptr<DecoderBuffer> corrupt_i_frame_buffer_;
OutputFrames output_frames_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(FFmpegVideoDecoderTest);
};
TEST_F(FFmpegVideoDecoderTest, Initialize_Normal) {
@@ -226,7 +226,7 @@ TEST_F(FFmpegVideoDecoderTest, Initialize_Normal) {
TEST_F(FFmpegVideoDecoderTest, Initialize_OpenDecoderFails) {
// Specify Theora w/o extra data so that avcodec_open2() fails.
- VideoDecoderConfig config(kCodecTheora, VIDEO_CODEC_PROFILE_UNKNOWN,
+ VideoDecoderConfig config(VideoCodec::kTheora, VIDEO_CODEC_PROFILE_UNKNOWN,
VideoDecoderConfig::AlphaMode::kIsOpaque,
VideoColorSpace(), kNoTransformation, kCodedSize,
kVisibleRect, kNaturalSize, EmptyExtraData(),
diff --git a/chromium/media/filters/file_data_source.h b/chromium/media/filters/file_data_source.h
index 86f2fde9080..f64f29258c7 100644
--- a/chromium/media/filters/file_data_source.h
+++ b/chromium/media/filters/file_data_source.h
@@ -21,6 +21,10 @@ namespace media {
class MEDIA_EXPORT FileDataSource : public DataSource {
public:
FileDataSource();
+
+ FileDataSource(const FileDataSource&) = delete;
+ FileDataSource& operator=(const FileDataSource&) = delete;
+
~FileDataSource() override;
bool Initialize(const base::FilePath& file_path) WARN_UNUSED_RESULT;
@@ -49,8 +53,6 @@ class MEDIA_EXPORT FileDataSource : public DataSource {
bool force_read_errors_;
bool force_streaming_;
uint64_t bytes_read_;
-
- DISALLOW_COPY_AND_ASSIGN(FileDataSource);
};
} // namespace media
diff --git a/chromium/media/filters/frame_buffer_pool.cc b/chromium/media/filters/frame_buffer_pool.cc
index 7a140f1c782..255256f89c5 100644
--- a/chromium/media/filters/frame_buffer_pool.cc
+++ b/chromium/media/filters/frame_buffer_pool.cc
@@ -13,6 +13,7 @@
#include "base/memory/free_deleter.h"
#include "base/process/memory.h"
#include "base/sequenced_task_runner.h"
+#include "base/strings/stringprintf.h"
#include "base/threading/sequenced_task_runner_handle.h"
#include "base/trace_event/memory_allocator_dump.h"
#include "base/trace_event/memory_dump_manager.h"
@@ -141,9 +142,13 @@ bool FrameBufferPool::OnMemoryDump(
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
base::trace_event::MemoryAllocatorDump* memory_dump =
- pmd->CreateAllocatorDump("media/frame_buffers/memory_pool");
+ pmd->CreateAllocatorDump(
+ base::StringPrintf("media/frame_buffers/memory_pool/0x%" PRIXPTR,
+ reinterpret_cast<uintptr_t>(this)));
base::trace_event::MemoryAllocatorDump* used_memory_dump =
- pmd->CreateAllocatorDump("media/frame_buffers/memory_pool/used");
+ pmd->CreateAllocatorDump(
+ base::StringPrintf("media/frame_buffers/memory_pool/used/0x%" PRIXPTR,
+ reinterpret_cast<uintptr_t>(this)));
pmd->AddSuballocation(memory_dump->guid(),
base::trace_event::MemoryDumpManager::GetInstance()
@@ -222,8 +227,7 @@ void FrameBufferPool::OnVideoFrameDestroyed(
base::EraseIf(frame_buffers_, [now](const std::unique_ptr<FrameBuffer>& buf) {
return !IsUsed(buf.get()) &&
- now - buf->last_use_time >
- base::TimeDelta::FromSeconds(kStaleFrameLimitSecs);
+ now - buf->last_use_time > base::Seconds(kStaleFrameLimitSecs);
});
}
diff --git a/chromium/media/filters/frame_buffer_pool_unittest.cc b/chromium/media/filters/frame_buffer_pool_unittest.cc
index 0cdf4d040b0..a06039c0fe1 100644
--- a/chromium/media/filters/frame_buffer_pool_unittest.cc
+++ b/chromium/media/filters/frame_buffer_pool_unittest.cc
@@ -92,8 +92,7 @@ TEST(FrameBufferPool, DeferredDestruction) {
EXPECT_EQ(3u, pool->get_pool_size_for_testing());
// Advance some time, but not enough to trigger expiration.
- test_clock.Advance(
- base::TimeDelta::FromSeconds(FrameBufferPool::kStaleFrameLimitSecs / 2));
+ test_clock.Advance(base::Seconds(FrameBufferPool::kStaleFrameLimitSecs / 2));
// We should still have 3 frame buffers in the pool at this point.
frame_release_cb = pool->CreateFrameCallback(priv2);
@@ -102,8 +101,7 @@ TEST(FrameBufferPool, DeferredDestruction) {
std::move(frame_release_cb).Run();
EXPECT_EQ(3u, pool->get_pool_size_for_testing());
- test_clock.Advance(
- base::TimeDelta::FromSeconds(FrameBufferPool::kStaleFrameLimitSecs + 1));
+ test_clock.Advance(base::Seconds(FrameBufferPool::kStaleFrameLimitSecs + 1));
// All but this most recently released frame should remain now.
frame_release_cb = pool->CreateFrameCallback(priv3);
diff --git a/chromium/media/filters/frame_processor.cc b/chromium/media/filters/frame_processor.cc
index 11c72893684..0e8399a7f0a 100644
--- a/chromium/media/filters/frame_processor.cc
+++ b/chromium/media/filters/frame_processor.cc
@@ -33,6 +33,10 @@ class MseTrackBuffer {
MseTrackBuffer(ChunkDemuxerStream* stream,
MediaLog* media_log,
SourceBufferParseWarningCB parse_warning_cb);
+
+ MseTrackBuffer(const MseTrackBuffer&) = delete;
+ MseTrackBuffer& operator=(const MseTrackBuffer&) = delete;
+
~MseTrackBuffer();
// Get/set |last_decode_timestamp_|.
@@ -186,8 +190,6 @@ class MseTrackBuffer {
// Counter that limits spam to |media_log_| for MseTrackBuffer warnings.
int num_keyframe_time_greater_than_dependant_warnings_ = 0;
-
- DISALLOW_COPY_AND_ASSIGN(MseTrackBuffer);
};
MseTrackBuffer::MseTrackBuffer(ChunkDemuxerStream* stream,
@@ -562,8 +564,8 @@ void FrameProcessor::OnPossibleAudioConfigUpdate(
return;
current_audio_config_ = config;
- sample_duration_ = base::TimeDelta::FromSecondsD(
- 1.0 / current_audio_config_.samples_per_second());
+ sample_duration_ =
+ base::Seconds(1.0 / current_audio_config_.samples_per_second());
has_dependent_audio_frames_ =
current_audio_config_.profile() == AudioCodecProfile::kXHE_AAC;
last_audio_pts_for_nonkeyframe_monotonicity_check_ = kNoTimestamp;
diff --git a/chromium/media/filters/frame_processor.h b/chromium/media/filters/frame_processor.h
index 7095bb603f6..69ae90a37b5 100644
--- a/chromium/media/filters/frame_processor.h
+++ b/chromium/media/filters/frame_processor.h
@@ -28,6 +28,10 @@ class MEDIA_EXPORT FrameProcessor {
using UpdateDurationCB = base::RepeatingCallback<void(base::TimeDelta)>;
FrameProcessor(UpdateDurationCB update_duration_cb, MediaLog* media_log);
+
+ FrameProcessor(const FrameProcessor&) = delete;
+ FrameProcessor& operator=(const FrameProcessor&) = delete;
+
~FrameProcessor();
// This must be called exactly once, before doing any track buffer creation or
@@ -211,8 +215,6 @@ class MEDIA_EXPORT FrameProcessor {
int num_skipped_empty_frame_warnings_ = 0;
int num_partial_discard_warnings_ = 0;
int num_dropped_frame_warnings_ = 0;
-
- DISALLOW_COPY_AND_ASSIGN(FrameProcessor);
};
} // namespace media
diff --git a/chromium/media/filters/frame_processor_unittest.cc b/chromium/media/filters/frame_processor_unittest.cc
index 9547f723297..bd1c325d167 100644
--- a/chromium/media/filters/frame_processor_unittest.cc
+++ b/chromium/media/filters/frame_processor_unittest.cc
@@ -35,10 +35,10 @@ using ::testing::Values;
namespace {
-// Helper to shorten "base::TimeDelta::FromMilliseconds(...)" in these test
+// Helper to shorten "base::Milliseconds(...)" in these test
// cases for integer milliseconds.
constexpr base::TimeDelta Milliseconds(int64_t milliseconds) {
- return base::TimeDelta::FromMilliseconds(milliseconds);
+ return base::Milliseconds(milliseconds);
}
} // namespace
@@ -53,6 +53,12 @@ typedef StreamParser::TrackId TrackId;
class FrameProcessorTestCallbackHelper {
public:
FrameProcessorTestCallbackHelper() = default;
+
+ FrameProcessorTestCallbackHelper(const FrameProcessorTestCallbackHelper&) =
+ delete;
+ FrameProcessorTestCallbackHelper& operator=(
+ const FrameProcessorTestCallbackHelper&) = delete;
+
virtual ~FrameProcessorTestCallbackHelper() = default;
MOCK_METHOD1(OnParseWarning, void(const SourceBufferParseWarning));
@@ -73,9 +79,6 @@ class FrameProcessorTestCallbackHelper {
void(const DemuxerStream::Type type,
DecodeTimestamp start_dts,
base::TimeDelta start_pts));
-
- private:
- DISALLOW_COPY_AND_ASSIGN(FrameProcessorTestCallbackHelper);
};
class FrameProcessorTest : public ::testing::TestWithParam<bool> {
@@ -169,10 +172,10 @@ class FrameProcessorTest : public ::testing::TestWithParam<bool> {
scoped_refptr<StreamParserBuffer> buffer =
StreamParserBuffer::CopyFrom(timestamp_as_data, sizeof(time_in_ms),
is_keyframe, type, track_id);
- buffer->set_timestamp(base::TimeDelta::FromMillisecondsD(time_in_ms));
+ buffer->set_timestamp(base::Milliseconds(time_in_ms));
if (time_in_ms != decode_time_in_ms) {
buffer->SetDecodeTimestamp(DecodeTimestamp::FromPresentationTime(
- base::TimeDelta::FromMillisecondsD(decode_time_in_ms)));
+ base::Milliseconds(decode_time_in_ms)));
}
buffer->set_duration(frame_duration_);
@@ -381,13 +384,14 @@ class FrameProcessorTest : public ::testing::TestWithParam<bool> {
AudioDecoderConfig decoder_config;
if (support_audio_nonkeyframes) {
decoder_config = AudioDecoderConfig(
- kCodecAAC, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO, 1000,
- EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ AudioCodec::kAAC, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO,
+ 1000, EmptyExtraData(), EncryptionScheme::kUnencrypted);
decoder_config.set_profile(AudioCodecProfile::kXHE_AAC);
} else {
- decoder_config = AudioDecoderConfig(
- kCodecVorbis, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO, 1000,
- EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ decoder_config =
+ AudioDecoderConfig(AudioCodec::kVorbis, kSampleFormatPlanarF32,
+ CHANNEL_LAYOUT_STEREO, 1000, EmptyExtraData(),
+ EncryptionScheme::kUnencrypted);
}
frame_processor_->OnPossibleAudioConfigUpdate(decoder_config);
ASSERT_TRUE(
@@ -924,8 +928,7 @@ TEST_P(FrameProcessorTest, AppendWindowFilterWithInexactPreroll_2) {
EXPECT_CALL(callbacks_, PossibleDurationIncrease(Milliseconds(0)));
EXPECT_TRUE(ProcessFrames("0K", ""));
- EXPECT_CALL(callbacks_, PossibleDurationIncrease(
- base::TimeDelta::FromMicroseconds(10250)));
+ EXPECT_CALL(callbacks_, PossibleDurationIncrease(base::Microseconds(10250)));
EXPECT_TRUE(ProcessFrames("10.25K", ""));
EXPECT_MEDIA_LOG(SkippingSpliceTooLittleOverlap(10000, 250));
@@ -1884,7 +1887,7 @@ TEST_P(FrameProcessorTest,
if (use_sequence_mode_)
frame_processor_->SetSequenceMode(true);
- frame_duration_ = base::TimeDelta::FromMicroseconds(4999);
+ frame_duration_ = base::Microseconds(4999);
EXPECT_CALL(callbacks_, OnGroupStart(DemuxerStream::AUDIO, DecodeTimestamp(),
base::TimeDelta()));
@@ -1913,8 +1916,7 @@ TEST_P(FrameProcessorTest,
Milliseconds(20) + frame_duration_));
EXPECT_CALL(callbacks_, OnAppend(DemuxerStream::AUDIO, _));
- EXPECT_CALL(callbacks_, PossibleDurationIncrease(
- base::TimeDelta::FromMicroseconds(34999)));
+ EXPECT_CALL(callbacks_, PossibleDurationIncrease(base::Microseconds(34999)));
EXPECT_TRUE(ProcessFrames("0K 10|5K 20|10K 30|15K", ""));
EXPECT_EQ(Milliseconds(0), timestamp_offset_);
@@ -2238,14 +2240,14 @@ TEST_P(FrameProcessorTest, NonkeyframeAudioBuffering_TrimSpliceOverlap) {
if (use_sequence_mode_)
frame_processor_->SetSequenceMode(true);
- frame_duration_ = base::TimeDelta::FromMicroseconds(9750);
+ frame_duration_ = base::Microseconds(9750);
EXPECT_CALL(callbacks_, PossibleDurationIncrease(frame_duration_));
EXPECT_TRUE(ProcessFrames("0K", ""));
// As with all-keyframe streams, a slight jump forward should not trigger any
// splicing logic, though accumulations of these may result in loss of A/V
// sync.
- frame_duration_ = base::TimeDelta::FromMicroseconds(10250);
+ frame_duration_ = base::Microseconds(10250);
EXPECT_CALL(callbacks_,
PossibleDurationIncrease(Milliseconds(10) + frame_duration_));
EXPECT_TRUE(ProcessFrames("10", ""));
@@ -2265,7 +2267,7 @@ TEST_P(FrameProcessorTest, NonkeyframeAudioBuffering_TrimSpliceOverlap) {
// frame. Accumulations of these could rapidly lead to loss of A/V sync.
// Nonkeyframe timestamp & duration metadata sequences need to be correctly
// muxed to avoid this.
- frame_duration_ = base::TimeDelta::FromMicroseconds(10250);
+ frame_duration_ = base::Microseconds(10250);
EXPECT_CALL(callbacks_,
PossibleDurationIncrease(Milliseconds(22) + frame_duration_));
EXPECT_TRUE(ProcessFrames("22", ""));
diff --git a/chromium/media/filters/fuchsia/DIR_METADATA b/chromium/media/filters/fuchsia/DIR_METADATA
index abc57ac0fd5..5b3985ecc8b 100644
--- a/chromium/media/filters/fuchsia/DIR_METADATA
+++ b/chromium/media/filters/fuchsia/DIR_METADATA
@@ -6,5 +6,5 @@
# For the schema of this file, see Metadata message:
# https://source.chromium.org/chromium/infra/infra/+/main:go/src/infra/tools/dirmd/proto/dir_metadata.proto
-team_email: "cr-fuchsia@chromium.org"
+mixins: "//build/fuchsia/COMMON_METADATA"
os: FUCHSIA \ No newline at end of file
diff --git a/chromium/media/filters/fuchsia/fuchsia_video_decoder.cc b/chromium/media/filters/fuchsia/fuchsia_video_decoder.cc
index 908c5b27d84..8723c3567c6 100644
--- a/chromium/media/filters/fuchsia/fuchsia_video_decoder.cc
+++ b/chromium/media/filters/fuchsia/fuchsia_video_decoder.cc
@@ -90,6 +90,9 @@ class FuchsiaVideoDecoder::OutputMailbox {
kPremul_SkAlphaType, usage);
}
+ OutputMailbox(const OutputMailbox&) = delete;
+ OutputMailbox& operator=(const OutputMailbox&) = delete;
+
~OutputMailbox() {
raster_context_provider_->SharedImageInterface()->DestroySharedImage(
sync_token_, mailbox_);
@@ -172,8 +175,6 @@ class FuchsiaVideoDecoder::OutputMailbox {
base::OnceClosure reuse_callback_;
base::WeakPtrFactory<OutputMailbox> weak_factory_;
-
- DISALLOW_COPY_AND_ASSIGN(OutputMailbox);
};
// static
@@ -250,7 +251,7 @@ void FuchsiaVideoDecoder::Initialize(const VideoDecoderConfig& config,
container_aspect_ratio_ = config.aspect_ratio();
// Keep decoder and decryptor if the configuration hasn't changed.
- if (decoder_ && current_config_.is_encrypted() == config.codec() &&
+ if (decoder_ && current_config_.codec() == config.codec() &&
current_config_.is_encrypted() == config.is_encrypted()) {
std::move(done_callback).Run(OkStatus());
return;
@@ -275,19 +276,19 @@ void FuchsiaVideoDecoder::Initialize(const VideoDecoderConfig& config,
decoder_params.mutable_input_details()->set_format_details_version_ordinal(0);
switch (config.codec()) {
- case kCodecH264:
+ case VideoCodec::kH264:
decoder_params.mutable_input_details()->set_mime_type("video/h264");
break;
- case kCodecVP8:
+ case VideoCodec::kVP8:
decoder_params.mutable_input_details()->set_mime_type("video/vp8");
break;
- case kCodecVP9:
+ case VideoCodec::kVP9:
decoder_params.mutable_input_details()->set_mime_type("video/vp9");
break;
- case kCodecHEVC:
+ case VideoCodec::kHEVC:
decoder_params.mutable_input_details()->set_mime_type("video/hevc");
break;
- case kCodecAV1:
+ case VideoCodec::kAV1:
decoder_params.mutable_input_details()->set_mime_type("video/av1");
break;
diff --git a/chromium/media/filters/fuchsia/fuchsia_video_decoder_unittest.cc b/chromium/media/filters/fuchsia/fuchsia_video_decoder_unittest.cc
index aced32b7add..213af7ece21 100644
--- a/chromium/media/filters/fuchsia/fuchsia_video_decoder_unittest.cc
+++ b/chromium/media/filters/fuchsia/fuchsia_video_decoder_unittest.cc
@@ -57,6 +57,9 @@ class TestBufferCollection {
ZX_CHECK(status == ZX_OK, status) << "BufferCollection::SetConstraints()";
}
+ TestBufferCollection(const TestBufferCollection&) = delete;
+ TestBufferCollection& operator=(const TestBufferCollection&) = delete;
+
~TestBufferCollection() { buffers_collection_->Close(); }
size_t GetNumBuffers() {
@@ -80,8 +83,6 @@ class TestBufferCollection {
absl::optional<fuchsia::sysmem::BufferCollectionInfo_2>
buffer_collection_info_;
-
- DISALLOW_COPY_AND_ASSIGN(TestBufferCollection);
};
class TestSharedImageInterface : public gpu::SharedImageInterface {
@@ -295,6 +296,10 @@ class FuchsiaVideoDecoderTest : public testing::Test {
decoder_(
FuchsiaVideoDecoder::CreateForTests(raster_context_provider_.get(),
/*enable_sw_decoding=*/true)) {}
+
+ FuchsiaVideoDecoderTest(const FuchsiaVideoDecoderTest&) = delete;
+ FuchsiaVideoDecoderTest& operator=(const FuchsiaVideoDecoderTest&) = delete;
+
~FuchsiaVideoDecoderTest() override = default;
bool InitializeDecoder(VideoDecoderConfig config) WARN_UNUSED_RESULT {
@@ -390,8 +395,6 @@ class FuchsiaVideoDecoderTest : public testing::Test {
size_t frames_to_keep_ = 2;
base::WeakPtrFactory<FuchsiaVideoDecoderTest> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(FuchsiaVideoDecoderTest);
};
scoped_refptr<DecoderBuffer> GetH264Frame(size_t frame_num) {
@@ -411,7 +414,7 @@ TEST_F(FuchsiaVideoDecoderTest, CreateInitDestroy) {
}
TEST_F(FuchsiaVideoDecoderTest, DISABLED_VP9) {
- ASSERT_TRUE(InitializeDecoder(TestVideoConfig::Normal(kCodecVP9)));
+ ASSERT_TRUE(InitializeDecoder(TestVideoConfig::Normal(VideoCodec::kVP9)));
DecodeBuffer(ReadTestDataFile("vp9-I-frame-320x240"));
DecodeBuffer(DecoderBuffer::CreateEOSBuffer());
diff --git a/chromium/media/filters/gav1_video_decoder.cc b/chromium/media/filters/gav1_video_decoder.cc
index 453476d7399..a52bf678bd6 100644
--- a/chromium/media/filters/gav1_video_decoder.cc
+++ b/chromium/media/filters/gav1_video_decoder.cc
@@ -208,8 +208,7 @@ scoped_refptr<VideoFrame> FormatVideoFrame(
const VideoColorSpace& container_color_space) {
scoped_refptr<VideoFrame> frame =
static_cast<VideoFrame*>(buffer.buffer_private_data);
- frame->set_timestamp(
- base::TimeDelta::FromMicroseconds(buffer.user_private_data));
+ frame->set_timestamp(base::Microseconds(buffer.user_private_data));
// AV1 color space defines match ISO 23001-8:2016 via ISO/IEC 23091-4/ITU-T
// H.273. https://aomediacodec.github.io/av1-spec/#color-config-semantics
@@ -269,7 +268,7 @@ void Gav1VideoDecoder::Initialize(const VideoDecoderConfig& config,
InitCB bound_init_cb = bind_callbacks_ ? BindToCurrentLoop(std::move(init_cb))
: std::move(init_cb);
- if (config.is_encrypted() || config.codec() != kCodecAV1) {
+ if (config.is_encrypted() || config.codec() != VideoCodec::kAV1) {
std::move(bound_init_cb).Run(StatusCode::kEncryptedContentUnsupported);
return;
}
diff --git a/chromium/media/filters/gav1_video_decoder.h b/chromium/media/filters/gav1_video_decoder.h
index 68daf5cd44c..5ac5936576d 100644
--- a/chromium/media/filters/gav1_video_decoder.h
+++ b/chromium/media/filters/gav1_video_decoder.h
@@ -96,7 +96,7 @@ class OffloadingGav1VideoDecoder : public OffloadingVideoDecoder {
explicit OffloadingGav1VideoDecoder(MediaLog* media_log)
: OffloadingVideoDecoder(
0,
- std::vector<VideoCodec>(1, kCodecAV1),
+ std::vector<VideoCodec>(1, VideoCodec::kAV1),
std::make_unique<Gav1VideoDecoder>(
media_log,
OffloadableVideoDecoder::OffloadState::kOffloaded)) {}
diff --git a/chromium/media/filters/gav1_video_decoder_unittest.cc b/chromium/media/filters/gav1_video_decoder_unittest.cc
index 33ccf13e2c4..bfd57f6b35a 100644
--- a/chromium/media/filters/gav1_video_decoder_unittest.cc
+++ b/chromium/media/filters/gav1_video_decoder_unittest.cc
@@ -69,10 +69,13 @@ class Gav1VideoDecoderTest : public testing::Test {
: decoder_(new Gav1VideoDecoder(&media_log_)),
i_frame_buffer_(ReadTestDataFile("av1-I-frame-320x240")) {}
+ Gav1VideoDecoderTest(const Gav1VideoDecoderTest&) = delete;
+ Gav1VideoDecoderTest& operator=(const Gav1VideoDecoderTest&) = delete;
+
~Gav1VideoDecoderTest() override { Destroy(); }
void Initialize() {
- InitializeWithConfig(TestVideoConfig::Normal(kCodecAV1));
+ InitializeWithConfig(TestVideoConfig::Normal(VideoCodec::kAV1));
}
void InitializeWithConfigWithResult(const VideoDecoderConfig& config,
@@ -94,7 +97,7 @@ class Gav1VideoDecoderTest : public testing::Test {
}
void Reinitialize() {
- InitializeWithConfig(TestVideoConfig::Large(kCodecAV1));
+ InitializeWithConfig(TestVideoConfig::Large(VideoCodec::kAV1));
}
void Reset() {
@@ -216,9 +219,6 @@ class Gav1VideoDecoderTest : public testing::Test {
scoped_refptr<DecoderBuffer> i_frame_buffer_;
OutputFrames output_frames_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(Gav1VideoDecoderTest);
};
TEST_F(Gav1VideoDecoderTest, Initialize_Normal) {
diff --git a/chromium/media/filters/h264_to_annex_b_bitstream_converter.h b/chromium/media/filters/h264_to_annex_b_bitstream_converter.h
index ce5fe35c45f..6b73c60ca29 100644
--- a/chromium/media/filters/h264_to_annex_b_bitstream_converter.h
+++ b/chromium/media/filters/h264_to_annex_b_bitstream_converter.h
@@ -24,6 +24,12 @@ struct AVCDecoderConfigurationRecord;
class MEDIA_EXPORT H264ToAnnexBBitstreamConverter {
public:
H264ToAnnexBBitstreamConverter();
+
+ H264ToAnnexBBitstreamConverter(const H264ToAnnexBBitstreamConverter&) =
+ delete;
+ H264ToAnnexBBitstreamConverter& operator=(
+ const H264ToAnnexBBitstreamConverter&) = delete;
+
~H264ToAnnexBBitstreamConverter();
// Parses the global AVCDecoderConfigurationRecord from the file format's
@@ -146,8 +152,6 @@ class MEDIA_EXPORT H264ToAnnexBBitstreamConverter {
bool first_nal_unit_in_access_unit_;
// Variable to hold interleaving field's length in bytes.
uint8_t nal_unit_length_field_width_;
-
- DISALLOW_COPY_AND_ASSIGN(H264ToAnnexBBitstreamConverter);
};
} // namespace media
diff --git a/chromium/media/filters/media_file_checker.cc b/chromium/media/filters/media_file_checker.cc
index 12e78e39e2d..db67fb7370a 100644
--- a/chromium/media/filters/media_file_checker.cc
+++ b/chromium/media/filters/media_file_checker.cc
@@ -86,8 +86,7 @@ bool MediaFileChecker::Start(base::TimeDelta check_time) {
auto do_nothing_cb = base::BindRepeating([](AVFrame*) { return true; });
const base::TimeTicks deadline =
base::TimeTicks::Now() +
- std::min(check_time,
- base::TimeDelta::FromSeconds(kMaxCheckTimeInSeconds));
+ std::min(check_time, base::Seconds(kMaxCheckTimeInSeconds));
do {
result = av_read_frame(glue.format_context(), &packet);
if (result < 0)
diff --git a/chromium/media/filters/media_file_checker.h b/chromium/media/filters/media_file_checker.h
index 113f8434b02..75d97041ddf 100644
--- a/chromium/media/filters/media_file_checker.h
+++ b/chromium/media/filters/media_file_checker.h
@@ -21,6 +21,10 @@ namespace media {
class MEDIA_EXPORT MediaFileChecker {
public:
explicit MediaFileChecker(base::File file);
+
+ MediaFileChecker(const MediaFileChecker&) = delete;
+ MediaFileChecker& operator=(const MediaFileChecker&) = delete;
+
~MediaFileChecker();
// After opening |file|, up to |check_time| amount of wall-clock time is spent
@@ -30,8 +34,6 @@ class MEDIA_EXPORT MediaFileChecker {
private:
base::File file_;
-
- DISALLOW_COPY_AND_ASSIGN(MediaFileChecker);
};
} // namespace media
diff --git a/chromium/media/filters/media_file_checker_unittest.cc b/chromium/media/filters/media_file_checker_unittest.cc
index 38047fd8ed1..b8d52aacad5 100644
--- a/chromium/media/filters/media_file_checker_unittest.cc
+++ b/chromium/media/filters/media_file_checker_unittest.cc
@@ -20,7 +20,7 @@ static void RunMediaFileChecker(const std::string& filename, bool expectation) {
ASSERT_TRUE(file.IsValid());
MediaFileChecker checker(std::move(file));
- const base::TimeDelta check_time = base::TimeDelta::FromMilliseconds(100);
+ const base::TimeDelta check_time = base::Milliseconds(100);
bool result = checker.Start(check_time);
EXPECT_EQ(expectation, result);
}
diff --git a/chromium/media/filters/memory_data_source.h b/chromium/media/filters/memory_data_source.h
index 11f5f288a66..fae7a2248d3 100644
--- a/chromium/media/filters/memory_data_source.h
+++ b/chromium/media/filters/memory_data_source.h
@@ -24,6 +24,9 @@ class MEDIA_EXPORT MemoryDataSource final : public DataSource {
// Similar to the above, but takes ownership of the std::string.
explicit MemoryDataSource(std::string data);
+ MemoryDataSource(const MemoryDataSource&) = delete;
+ MemoryDataSource& operator=(const MemoryDataSource&) = delete;
+
~MemoryDataSource() final;
// Implementation of DataSource.
@@ -46,8 +49,6 @@ class MEDIA_EXPORT MemoryDataSource final : public DataSource {
// the media thread. It's harmless if we fulfill a read after Stop() has been
// called, so an atomic without a lock is safe.
std::atomic<bool> is_stopped_{false};
-
- DISALLOW_COPY_AND_ASSIGN(MemoryDataSource);
};
} // namespace media
diff --git a/chromium/media/filters/offloading_video_decoder.h b/chromium/media/filters/offloading_video_decoder.h
index 06016c2bc36..b9740966d8b 100644
--- a/chromium/media/filters/offloading_video_decoder.h
+++ b/chromium/media/filters/offloading_video_decoder.h
@@ -85,6 +85,10 @@ class MEDIA_EXPORT OffloadingVideoDecoder : public VideoDecoder {
OffloadingVideoDecoder(int min_offloading_width,
std::vector<VideoCodec> supported_codecs,
std::unique_ptr<OffloadableVideoDecoder> decoder);
+
+ OffloadingVideoDecoder(const OffloadingVideoDecoder&) = delete;
+ OffloadingVideoDecoder& operator=(const OffloadingVideoDecoder&) = delete;
+
~OffloadingVideoDecoder() override;
// VideoDecoder implementation.
@@ -124,8 +128,6 @@ class MEDIA_EXPORT OffloadingVideoDecoder : public VideoDecoder {
// NOTE: Weak pointers must be invalidated before all other member variables.
base::WeakPtrFactory<OffloadingVideoDecoder> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(OffloadingVideoDecoder);
};
} // namespace media
diff --git a/chromium/media/filters/offloading_video_decoder_unittest.cc b/chromium/media/filters/offloading_video_decoder_unittest.cc
index 7571c9e54c7..4f9a116f7d6 100644
--- a/chromium/media/filters/offloading_video_decoder_unittest.cc
+++ b/chromium/media/filters/offloading_video_decoder_unittest.cc
@@ -204,35 +204,35 @@ class OffloadingVideoDecoderTest : public testing::Test {
};
TEST_F(OffloadingVideoDecoderTest, NoOffloadingTooSmall) {
- auto offload_config = TestVideoConfig::Large(kCodecVP9);
- CreateWrapper(offload_config.coded_size().width(), kCodecVP9);
- TestNoOffloading(TestVideoConfig::Normal(kCodecVP9));
+ auto offload_config = TestVideoConfig::Large(VideoCodec::kVP9);
+ CreateWrapper(offload_config.coded_size().width(), VideoCodec::kVP9);
+ TestNoOffloading(TestVideoConfig::Normal(VideoCodec::kVP9));
}
TEST_F(OffloadingVideoDecoderTest, NoOffloadingDifferentCodec) {
- auto offload_config = TestVideoConfig::Large(kCodecVP9);
- CreateWrapper(offload_config.coded_size().width(), kCodecVP9);
- TestNoOffloading(TestVideoConfig::Large(kCodecVP8));
+ auto offload_config = TestVideoConfig::Large(VideoCodec::kVP9);
+ CreateWrapper(offload_config.coded_size().width(), VideoCodec::kVP9);
+ TestNoOffloading(TestVideoConfig::Large(VideoCodec::kVP8));
}
TEST_F(OffloadingVideoDecoderTest, NoOffloadingHasEncryption) {
- auto offload_config = TestVideoConfig::Large(kCodecVP9);
- CreateWrapper(offload_config.coded_size().width(), kCodecVP9);
- TestNoOffloading(TestVideoConfig::LargeEncrypted(kCodecVP9));
+ auto offload_config = TestVideoConfig::Large(VideoCodec::kVP9);
+ CreateWrapper(offload_config.coded_size().width(), VideoCodec::kVP9);
+ TestNoOffloading(TestVideoConfig::LargeEncrypted(VideoCodec::kVP9));
}
TEST_F(OffloadingVideoDecoderTest, Offloading) {
- auto offload_config = TestVideoConfig::Large(kCodecVP9);
- CreateWrapper(offload_config.coded_size().width(), kCodecVP9);
+ auto offload_config = TestVideoConfig::Large(VideoCodec::kVP9);
+ CreateWrapper(offload_config.coded_size().width(), VideoCodec::kVP9);
TestOffloading(offload_config);
}
TEST_F(OffloadingVideoDecoderTest, OffloadingAfterNoOffloading) {
- auto offload_config = TestVideoConfig::Large(kCodecVP9);
- CreateWrapper(offload_config.coded_size().width(), kCodecVP9);
+ auto offload_config = TestVideoConfig::Large(VideoCodec::kVP9);
+ CreateWrapper(offload_config.coded_size().width(), VideoCodec::kVP9);
// Setup and test the no offloading path first.
- TestNoOffloading(TestVideoConfig::Normal(kCodecVP9));
+ TestNoOffloading(TestVideoConfig::Normal(VideoCodec::kVP9));
// Test offloading now.
TestOffloading(offload_config, true);
@@ -242,7 +242,8 @@ TEST_F(OffloadingVideoDecoderTest, OffloadingAfterNoOffloading) {
// should happen asynchronously, set expectation after the call.
VideoDecoder::OutputCB output_cb;
offloading_decoder_->Initialize(
- TestVideoConfig::Normal(kCodecVP9), false, nullptr, ExpectInitCB(true),
+ TestVideoConfig::Normal(VideoCodec::kVP9), false, nullptr,
+ ExpectInitCB(true),
base::BindRepeating(&OffloadingVideoDecoderTest::OutputDone,
base::Unretained(this)),
base::NullCallback());
@@ -255,17 +256,17 @@ TEST_F(OffloadingVideoDecoderTest, OffloadingAfterNoOffloading) {
}
TEST_F(OffloadingVideoDecoderTest, InitializeWithoutDetach) {
- auto offload_config = TestVideoConfig::Large(kCodecVP9);
- CreateWrapper(offload_config.coded_size().width(), kCodecVP9);
+ auto offload_config = TestVideoConfig::Large(VideoCodec::kVP9);
+ CreateWrapper(offload_config.coded_size().width(), VideoCodec::kVP9);
EXPECT_CALL(*decoder_, Detach()).Times(0);
- TestNoOffloading(TestVideoConfig::Normal(kCodecVP9));
- TestNoOffloading(TestVideoConfig::Normal(kCodecVP9));
+ TestNoOffloading(TestVideoConfig::Normal(VideoCodec::kVP9));
+ TestNoOffloading(TestVideoConfig::Normal(VideoCodec::kVP9));
}
TEST_F(OffloadingVideoDecoderTest, ParallelizedOffloading) {
- auto offload_config = TestVideoConfig::Large(kCodecVP9);
- CreateWrapper(offload_config.coded_size().width(), kCodecVP9);
+ auto offload_config = TestVideoConfig::Large(VideoCodec::kVP9);
+ CreateWrapper(offload_config.coded_size().width(), VideoCodec::kVP9);
// Since this Initialize() should be happening on another thread, set the
// expectation after we make the call.
@@ -315,8 +316,8 @@ TEST_F(OffloadingVideoDecoderTest, ParallelizedOffloading) {
}
TEST_F(OffloadingVideoDecoderTest, ParallelizedOffloadingResetAbortsDecodes) {
- auto offload_config = TestVideoConfig::Large(kCodecVP9);
- CreateWrapper(offload_config.coded_size().width(), kCodecVP9);
+ auto offload_config = TestVideoConfig::Large(VideoCodec::kVP9);
+ CreateWrapper(offload_config.coded_size().width(), VideoCodec::kVP9);
// Since this Initialize() should be happening on another thread, set the
// expectation after we make the call.
diff --git a/chromium/media/filters/pipeline_controller.h b/chromium/media/filters/pipeline_controller.h
index ea0370332dc..95ea732c8ac 100644
--- a/chromium/media/filters/pipeline_controller.h
+++ b/chromium/media/filters/pipeline_controller.h
@@ -63,6 +63,10 @@ class MEDIA_EXPORT PipelineController {
BeforeResumeCB before_resume_cb,
ResumedCB resumed_cb,
PipelineStatusCB error_cb);
+
+ PipelineController(const PipelineController&) = delete;
+ PipelineController& operator=(const PipelineController&) = delete;
+
~PipelineController();
// Start |pipeline_|. |demuxer| will be retained and StartWaitingForSeek()/
@@ -234,8 +238,6 @@ class MEDIA_EXPORT PipelineController {
base::ThreadChecker thread_checker_;
base::WeakPtrFactory<PipelineController> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(PipelineController);
};
} // namespace media
diff --git a/chromium/media/filters/pipeline_controller_unittest.cc b/chromium/media/filters/pipeline_controller_unittest.cc
index 8d54ec1923a..982dfe7c2e8 100644
--- a/chromium/media/filters/pipeline_controller_unittest.cc
+++ b/chromium/media/filters/pipeline_controller_unittest.cc
@@ -50,6 +50,9 @@ class PipelineControllerTest : public ::testing::Test, public Pipeline::Client {
base::BindRepeating(&PipelineControllerTest::OnError,
base::Unretained(this))) {}
+ PipelineControllerTest(const PipelineControllerTest&) = delete;
+ PipelineControllerTest& operator=(const PipelineControllerTest&) = delete;
+
~PipelineControllerTest() override = default;
PipelineStatusCallback StartPipeline(bool is_streaming, bool is_static) {
@@ -167,8 +170,6 @@ class PipelineControllerTest : public ::testing::Test, public Pipeline::Client {
bool was_resuming_ = false;
bool was_resumed_ = false;
base::TimeDelta last_resume_time_;
-
- DISALLOW_COPY_AND_ASSIGN(PipelineControllerTest);
};
TEST_F(PipelineControllerTest, Startup) {
@@ -191,7 +192,7 @@ TEST_F(PipelineControllerTest, StartSuspendedSeekAndResume) {
Mock::VerifyAndClear(pipeline_);
// Initiate a seek before the pipeline completes suspended startup.
- base::TimeDelta seek_time = base::TimeDelta::FromSeconds(5);
+ base::TimeDelta seek_time = base::Seconds(5);
EXPECT_CALL(demuxer_, StartWaitingForSeek(seek_time));
pipeline_controller_.Seek(seek_time, true);
base::RunLoop().RunUntilIdle();
@@ -282,7 +283,7 @@ TEST_F(PipelineControllerTest, Seek) {
Complete(StartPipeline());
was_seeked_ = false;
- base::TimeDelta seek_time = base::TimeDelta::FromSeconds(5);
+ base::TimeDelta seek_time = base::Seconds(5);
EXPECT_CALL(demuxer_, StartWaitingForSeek(seek_time));
PipelineStatusCallback seek_cb = SeekPipeline(seek_time);
base::RunLoop().RunUntilIdle();
@@ -297,7 +298,7 @@ TEST_F(PipelineControllerTest, Seek) {
TEST_F(PipelineControllerTest, DecoderStateLost) {
Complete(StartPipeline());
- constexpr auto kCurrentMediaTime = base::TimeDelta::FromSeconds(7);
+ constexpr auto kCurrentMediaTime = base::Seconds(7);
EXPECT_CALL(*pipeline_, GetMediaTime())
.WillRepeatedly(Return(kCurrentMediaTime));
@@ -313,7 +314,7 @@ TEST_F(PipelineControllerTest, DecoderStateLost_DuringPendingSeek) {
Complete(StartPipeline());
// Create a pending seek.
- base::TimeDelta kSeekTime = base::TimeDelta::FromSeconds(5);
+ base::TimeDelta kSeekTime = base::Seconds(5);
EXPECT_CALL(demuxer_, StartWaitingForSeek(kSeekTime));
PipelineStatusCallback seek_cb = SeekPipeline(kSeekTime);
base::RunLoop().RunUntilIdle();
@@ -331,7 +332,7 @@ TEST_F(PipelineControllerTest, SuspendResumeTime) {
Complete(StartPipeline());
Complete(SuspendPipeline());
- base::TimeDelta seek_time = base::TimeDelta::FromSeconds(5);
+ base::TimeDelta seek_time = base::Seconds(5);
pipeline_controller_.Seek(seek_time, true);
base::RunLoop().RunUntilIdle();
@@ -343,7 +344,7 @@ TEST_F(PipelineControllerTest, SuspendResumeTime_WithStreamingData) {
Complete(StartPipeline_WithStreamingData());
Complete(SuspendPipeline());
- base::TimeDelta seek_time = base::TimeDelta::FromSeconds(5);
+ base::TimeDelta seek_time = base::Seconds(5);
pipeline_controller_.Seek(seek_time, true);
base::RunLoop().RunUntilIdle();
@@ -355,14 +356,14 @@ TEST_F(PipelineControllerTest, SeekAborted) {
Complete(StartPipeline());
// Create a first pending seek.
- base::TimeDelta seek_time_1 = base::TimeDelta::FromSeconds(5);
+ base::TimeDelta seek_time_1 = base::Seconds(5);
EXPECT_CALL(demuxer_, StartWaitingForSeek(seek_time_1));
PipelineStatusCallback seek_cb_1 = SeekPipeline(seek_time_1);
base::RunLoop().RunUntilIdle();
Mock::VerifyAndClear(&demuxer_);
// Create a second seek; the first should be aborted.
- base::TimeDelta seek_time_2 = base::TimeDelta::FromSeconds(10);
+ base::TimeDelta seek_time_2 = base::Seconds(10);
EXPECT_CALL(demuxer_, CancelPendingSeek(seek_time_2));
pipeline_controller_.Seek(seek_time_2, true);
base::RunLoop().RunUntilIdle();
@@ -377,7 +378,7 @@ TEST_F(PipelineControllerTest, SeekAborted) {
TEST_F(PipelineControllerTest, PendingSuspend) {
Complete(StartPipeline());
- base::TimeDelta seek_time = base::TimeDelta::FromSeconds(5);
+ base::TimeDelta seek_time = base::Seconds(5);
PipelineStatusCallback seek_cb = SeekPipeline(seek_time);
base::RunLoop().RunUntilIdle();
@@ -401,7 +402,7 @@ TEST_F(PipelineControllerTest, SeekMergesWithResume) {
// Request a seek while suspended.
// It will be a mock failure if pipeline_.Seek() is called.
- base::TimeDelta seek_time = base::TimeDelta::FromSeconds(5);
+ base::TimeDelta seek_time = base::Seconds(5);
pipeline_controller_.Seek(seek_time, true);
base::RunLoop().RunUntilIdle();
EXPECT_FALSE(was_seeked_);
@@ -415,17 +416,17 @@ TEST_F(PipelineControllerTest, SeekMergesWithResume) {
TEST_F(PipelineControllerTest, SeekMergesWithSeek) {
Complete(StartPipeline());
- base::TimeDelta seek_time_1 = base::TimeDelta::FromSeconds(5);
+ base::TimeDelta seek_time_1 = base::Seconds(5);
PipelineStatusCallback seek_cb_1 = SeekPipeline(seek_time_1);
base::RunLoop().RunUntilIdle();
// Request another seek while the first is ongoing.
- base::TimeDelta seek_time_2 = base::TimeDelta::FromSeconds(10);
+ base::TimeDelta seek_time_2 = base::Seconds(10);
pipeline_controller_.Seek(seek_time_2, true);
base::RunLoop().RunUntilIdle();
// Request a third seek. (It should replace the second.)
- base::TimeDelta seek_time_3 = base::TimeDelta::FromSeconds(15);
+ base::TimeDelta seek_time_3 = base::Seconds(15);
pipeline_controller_.Seek(seek_time_3, true);
base::RunLoop().RunUntilIdle();
@@ -437,7 +438,7 @@ TEST_F(PipelineControllerTest, SeekMergesWithSeek) {
TEST_F(PipelineControllerTest, SeekToSeekTimeElided) {
Complete(StartPipeline());
- base::TimeDelta seek_time = base::TimeDelta::FromSeconds(5);
+ base::TimeDelta seek_time = base::Seconds(5);
PipelineStatusCallback seek_cb_1 = SeekPipeline(seek_time);
base::RunLoop().RunUntilIdle();
@@ -454,7 +455,7 @@ TEST_F(PipelineControllerTest, SeekToSeekTimeElided) {
TEST_F(PipelineControllerTest, SeekToSeekTimeNotElided) {
Complete(StartPipeline_WithDynamicData());
- base::TimeDelta seek_time = base::TimeDelta::FromSeconds(5);
+ base::TimeDelta seek_time = base::Seconds(5);
PipelineStatusCallback seek_cb_1 = SeekPipeline(seek_time);
base::RunLoop().RunUntilIdle();
diff --git a/chromium/media/filters/source_buffer_range.cc b/chromium/media/filters/source_buffer_range.cc
index 80f776452f2..dcc13354ea4 100644
--- a/chromium/media/filters/source_buffer_range.cc
+++ b/chromium/media/filters/source_buffer_range.cc
@@ -537,7 +537,7 @@ base::TimeDelta SourceBufferRange::GetBufferedEndTimestamp() const {
// report 1 microsecond for the last buffer's duration if it is a 0 duration
// buffer.
if (duration.is_zero())
- duration = base::TimeDelta::FromMicroseconds(1);
+ duration = base::Microseconds(1);
return GetEndTimestamp() + duration;
}
diff --git a/chromium/media/filters/source_buffer_range.h b/chromium/media/filters/source_buffer_range.h
index 19326580397..820d2d81bf8 100644
--- a/chromium/media/filters/source_buffer_range.h
+++ b/chromium/media/filters/source_buffer_range.h
@@ -50,6 +50,9 @@ class MEDIA_EXPORT SourceBufferRange {
base::TimeDelta range_start_pts,
InterbufferDistanceCB interbuffer_distance_cb);
+ SourceBufferRange(const SourceBufferRange&) = delete;
+ SourceBufferRange& operator=(const SourceBufferRange&) = delete;
+
~SourceBufferRange();
// Deletes all buffers in range.
@@ -392,8 +395,6 @@ class MEDIA_EXPORT SourceBufferRange {
// Maps keyframe presentation timestamps to GOP start index of |buffers_|
// (with index adjusted by |keyframe_map_index_base_|);
KeyframeMap keyframe_map_;
-
- DISALLOW_COPY_AND_ASSIGN(SourceBufferRange);
};
} // namespace media
diff --git a/chromium/media/filters/source_buffer_state.cc b/chromium/media/filters/source_buffer_state.cc
index c1a4cae4eee..74814c3be00 100644
--- a/chromium/media/filters/source_buffer_state.cc
+++ b/chromium/media/filters/source_buffer_state.cc
@@ -10,6 +10,7 @@
#include "base/command_line.h"
#include "base/strings/string_number_conversions.h"
#include "build/build_config.h"
+#include "build/chromeos_buildflags.h"
#include "media/base/media_switches.h"
#include "media/base/media_track.h"
#include "media/base/media_tracks.h"
@@ -30,7 +31,7 @@ enum {
namespace {
-TimeDelta EndTimestamp(const StreamParser::BufferQueue& queue) {
+base::TimeDelta EndTimestamp(const StreamParser::BufferQueue& queue) {
return queue.back()->timestamp() + queue.back()->duration();
}
@@ -72,7 +73,7 @@ unsigned GetMSEBufferSizeLimitIfExists(base::StringPiece switch_string) {
// List of time ranges for each SourceBuffer.
// static
-Ranges<TimeDelta> SourceBufferState::ComputeRangesIntersection(
+Ranges<base::TimeDelta> SourceBufferState::ComputeRangesIntersection(
const RangesList& active_ranges,
bool ended) {
// TODO(servolk): Perhaps this can be removed in favor of blink implementation
@@ -85,13 +86,13 @@ Ranges<TimeDelta> SourceBufferState::ComputeRangesIntersection(
// Step 1: If activeSourceBuffers.length equals 0 then return an empty
// TimeRanges object and abort these steps.
if (active_ranges.empty())
- return Ranges<TimeDelta>();
+ return Ranges<base::TimeDelta>();
// Step 2: Let active ranges be the ranges returned by buffered for each
// SourceBuffer object in activeSourceBuffers.
// Step 3: Let highest end time be the largest range end time in the active
// ranges.
- TimeDelta highest_end_time;
+ base::TimeDelta highest_end_time;
for (const auto& range : active_ranges) {
if (!range.size())
continue;
@@ -101,15 +102,15 @@ Ranges<TimeDelta> SourceBufferState::ComputeRangesIntersection(
// Step 4: Let intersection ranges equal a TimeRange object containing a
// single range from 0 to highest end time.
- Ranges<TimeDelta> intersection_ranges;
- intersection_ranges.Add(TimeDelta(), highest_end_time);
+ Ranges<base::TimeDelta> intersection_ranges;
+ intersection_ranges.Add(base::TimeDelta(), highest_end_time);
// Step 5: For each SourceBuffer object in activeSourceBuffers run the
// following steps:
for (const auto& range : active_ranges) {
// Step 5.1: Let source ranges equal the ranges returned by the buffered
// attribute on the current SourceBuffer.
- Ranges<TimeDelta> source_ranges = range;
+ Ranges<base::TimeDelta> source_ranges = range;
// Step 5.2: If readyState is "ended", then set the end time on the last
// range in source ranges to highest end time.
@@ -206,9 +207,9 @@ void SourceBufferState::SetParseWarningCallback(
bool SourceBufferState::Append(const uint8_t* data,
size_t length,
- TimeDelta append_window_start,
- TimeDelta append_window_end,
- TimeDelta* timestamp_offset) {
+ base::TimeDelta append_window_start,
+ base::TimeDelta append_window_end,
+ base::TimeDelta* timestamp_offset) {
append_in_progress_ = true;
DCHECK(timestamp_offset);
DCHECK(!timestamp_offset_during_append_);
@@ -233,9 +234,9 @@ bool SourceBufferState::Append(const uint8_t* data,
bool SourceBufferState::AppendChunks(
std::unique_ptr<StreamParser::BufferQueue> buffer_queue,
- TimeDelta append_window_start,
- TimeDelta append_window_end,
- TimeDelta* timestamp_offset) {
+ base::TimeDelta append_window_start,
+ base::TimeDelta append_window_end,
+ base::TimeDelta* timestamp_offset) {
append_in_progress_ = true;
DCHECK(timestamp_offset);
DCHECK(!timestamp_offset_during_append_);
@@ -256,8 +257,8 @@ bool SourceBufferState::AppendChunks(
return result;
}
-void SourceBufferState::ResetParserState(TimeDelta append_window_start,
- TimeDelta append_window_end,
+void SourceBufferState::ResetParserState(base::TimeDelta append_window_start,
+ base::TimeDelta append_window_end,
base::TimeDelta* timestamp_offset) {
DCHECK(timestamp_offset);
DCHECK(!timestamp_offset_during_append_);
@@ -273,9 +274,9 @@ void SourceBufferState::ResetParserState(TimeDelta append_window_start,
media_segment_has_data_for_track_.clear();
}
-void SourceBufferState::Remove(TimeDelta start,
- TimeDelta end,
- TimeDelta duration) {
+void SourceBufferState::Remove(base::TimeDelta start,
+ base::TimeDelta end,
+ base::TimeDelta duration) {
for (const auto& it : audio_streams_) {
it.second->Remove(start, end, duration);
}
@@ -366,8 +367,9 @@ void SourceBufferState::OnMemoryPressure(
}
}
-Ranges<TimeDelta> SourceBufferState::GetBufferedRanges(TimeDelta duration,
- bool ended) const {
+Ranges<base::TimeDelta> SourceBufferState::GetBufferedRanges(
+ base::TimeDelta duration,
+ bool ended) const {
RangesList ranges_list;
for (const auto& it : audio_streams_)
ranges_list.push_back(it.second->GetBufferedRanges(duration));
@@ -381,8 +383,8 @@ Ranges<TimeDelta> SourceBufferState::GetBufferedRanges(TimeDelta duration,
return ComputeRangesIntersection(ranges_list, ended);
}
-TimeDelta SourceBufferState::GetHighestPresentationTimestamp() const {
- TimeDelta max_pts;
+base::TimeDelta SourceBufferState::GetHighestPresentationTimestamp() const {
+ base::TimeDelta max_pts;
for (const auto& it : audio_streams_) {
max_pts = std::max(max_pts, it.second->GetHighestPresentationTimestamp());
@@ -399,8 +401,8 @@ TimeDelta SourceBufferState::GetHighestPresentationTimestamp() const {
return max_pts;
}
-TimeDelta SourceBufferState::GetMaxBufferedDuration() const {
- TimeDelta max_duration;
+base::TimeDelta SourceBufferState::GetMaxBufferedDuration() const {
+ base::TimeDelta max_duration;
for (const auto& it : audio_streams_) {
max_duration = std::max(max_duration, it.second->GetBufferedDuration());
@@ -445,7 +447,7 @@ void SourceBufferState::AbortReads() {
}
}
-void SourceBufferState::Seek(TimeDelta seek_time) {
+void SourceBufferState::Seek(base::TimeDelta seek_time) {
for (const auto& it : audio_streams_) {
it.second->Seek(seek_time);
}
@@ -473,7 +475,7 @@ void SourceBufferState::CompletePendingReadIfPossible() {
}
}
-void SourceBufferState::OnSetDuration(TimeDelta duration) {
+void SourceBufferState::OnSetDuration(base::TimeDelta duration) {
for (const auto& it : audio_streams_) {
it.second->OnSetDuration(duration);
}
@@ -585,12 +587,12 @@ void SourceBufferState::InitializeParser(const std::string& expected_codecs) {
std::vector<VideoCodec> expected_vcodecs;
for (const auto& codec_id : expected_codecs_parsed) {
AudioCodec acodec = StringToAudioCodec(codec_id);
- if (acodec != kUnknownAudioCodec) {
+ if (acodec != AudioCodec::kUnknown) {
expected_audio_codecs_.push_back(acodec);
continue;
}
VideoCodec vcodec = StringToVideoCodec(codec_id);
- if (vcodec != kUnknownVideoCodec) {
+ if (vcodec != VideoCodec::kUnknown) {
expected_video_codecs_.push_back(vcodec);
continue;
}
@@ -683,19 +685,19 @@ bool SourceBufferState::OnNewConfigs(
std::vector<AudioDecoderConfig>{audio_config});
} else {
if (audio_streams_.size() > 1) {
- auto it = audio_streams_.find(track_id);
- if (it != audio_streams_.end())
- stream = it->second;
+ auto stream_it = audio_streams_.find(track_id);
+ if (stream_it != audio_streams_.end())
+ stream = stream_it->second;
} else {
// If there is only one audio track then bytestream id might change in
// a new init segment. So update our state and notify frame processor.
- const auto& it = audio_streams_.begin();
- if (it != audio_streams_.end()) {
- stream = it->second;
- if (it->first != track_id) {
- track_id_changes[it->first] = track_id;
+ const auto& stream_it = audio_streams_.begin();
+ if (stream_it != audio_streams_.end()) {
+ stream = stream_it->second;
+ if (stream_it->first != track_id) {
+ track_id_changes[stream_it->first] = track_id;
audio_streams_[track_id] = stream;
- audio_streams_.erase(it->first);
+ audio_streams_.erase(stream_it->first);
}
}
}
@@ -716,7 +718,7 @@ bool SourceBufferState::OnNewConfigs(
<< " config: " << video_config.AsHumanReadableString();
DCHECK(video_config.IsValidConfig());
- if (video_config.codec() == kCodecHEVC) {
+ if (video_config.codec() == VideoCodec::kHEVC) {
#if BUILDFLAG(ENABLE_PLATFORM_ENCRYPTED_HEVC)
#if BUILDFLAG(IS_CHROMEOS_LACROS)
if (!base::CommandLine::ForCurrentProcess()->HasSwitch(
@@ -771,19 +773,19 @@ bool SourceBufferState::OnNewConfigs(
std::vector<VideoDecoderConfig>{video_config});
} else {
if (video_streams_.size() > 1) {
- auto it = video_streams_.find(track_id);
- if (it != video_streams_.end())
- stream = it->second;
+ auto stream_it = video_streams_.find(track_id);
+ if (stream_it != video_streams_.end())
+ stream = stream_it->second;
} else {
// If there is only one video track then bytestream id might change in
// a new init segment. So update our state and notify frame processor.
- const auto& it = video_streams_.begin();
- if (it != video_streams_.end()) {
- stream = it->second;
- if (it->first != track_id) {
- track_id_changes[it->first] = track_id;
+ const auto& stream_it = video_streams_.begin();
+ if (stream_it != video_streams_.end()) {
+ stream = stream_it->second;
+ if (stream_it->first != track_id) {
+ track_id_changes[stream_it->first] = track_id;
video_streams_[track_id] = stream;
- video_streams_.erase(it->first);
+ video_streams_.erase(stream_it->first);
}
}
}
@@ -982,15 +984,16 @@ bool SourceBufferState::OnNewBuffers(
media_segment_has_data_for_track_[it.first] = true;
}
- const TimeDelta timestamp_offset_before_processing =
+ const base::TimeDelta timestamp_offset_before_processing =
*timestamp_offset_during_append_;
// Calculate the new timestamp offset for audio/video tracks if the stream
// parser corresponds to MSE MIME type with 'Generate Timestamps Flag' set
// true.
- TimeDelta predicted_timestamp_offset = timestamp_offset_before_processing;
+ base::TimeDelta predicted_timestamp_offset =
+ timestamp_offset_before_processing;
if (generate_timestamps_flag()) {
- TimeDelta min_end_timestamp = kNoTimestamp;
+ base::TimeDelta min_end_timestamp = kNoTimestamp;
for (const auto& it : buffer_queue_map) {
const StreamParser::BufferQueue& bufq = it.second;
DCHECK(!bufq.empty());
diff --git a/chromium/media/filters/source_buffer_state.h b/chromium/media/filters/source_buffer_state.h
index 8e69435a303..31f5bab8bc1 100644
--- a/chromium/media/filters/source_buffer_state.h
+++ b/chromium/media/filters/source_buffer_state.h
@@ -22,7 +22,6 @@
namespace media {
-using base::TimeDelta;
class ChunkDemuxerStream;
class FrameProcessor;
@@ -42,6 +41,9 @@ class MEDIA_EXPORT SourceBufferState {
CreateDemuxerStreamCB create_demuxer_stream_cb,
MediaLog* media_log);
+ SourceBufferState(const SourceBufferState&) = delete;
+ SourceBufferState& operator=(const SourceBufferState&) = delete;
+
~SourceBufferState();
void Init(StreamParser::InitCB init_cb,
@@ -65,22 +67,24 @@ class MEDIA_EXPORT SourceBufferState {
// AppendChunks appends the provided BufferQueue.
bool Append(const uint8_t* data,
size_t length,
- TimeDelta append_window_start,
- TimeDelta append_window_end,
- TimeDelta* timestamp_offset);
+ base::TimeDelta append_window_start,
+ base::TimeDelta append_window_end,
+ base::TimeDelta* timestamp_offset);
bool AppendChunks(std::unique_ptr<StreamParser::BufferQueue> buffer_queue,
- TimeDelta append_window_start,
- TimeDelta append_window_end,
- TimeDelta* timestamp_offset);
+ base::TimeDelta append_window_start,
+ base::TimeDelta append_window_end,
+ base::TimeDelta* timestamp_offset);
// Aborts the current append sequence and resets the parser.
- void ResetParserState(TimeDelta append_window_start,
- TimeDelta append_window_end,
- TimeDelta* timestamp_offset);
+ void ResetParserState(base::TimeDelta append_window_start,
+ base::TimeDelta append_window_end,
+ base::TimeDelta* timestamp_offset);
// Calls Remove(|start|, |end|, |duration|) on all
// ChunkDemuxerStreams managed by this object.
- void Remove(TimeDelta start, TimeDelta end, TimeDelta duration);
+ void Remove(base::TimeDelta start,
+ base::TimeDelta end,
+ base::TimeDelta duration);
// If the buffer is full, attempts to try to free up space, as specified in
// the "Coded Frame Eviction Algorithm" in the Media Source Extensions Spec.
@@ -118,24 +122,25 @@ class MEDIA_EXPORT SourceBufferState {
// Returns the range of buffered data in this source, capped at |duration|.
// |ended| - Set to true if end of stream has been signaled and the special
// end of stream range logic needs to be executed.
- Ranges<TimeDelta> GetBufferedRanges(TimeDelta duration, bool ended) const;
+ Ranges<base::TimeDelta> GetBufferedRanges(base::TimeDelta duration,
+ bool ended) const;
// Returns the highest PTS of currently buffered frames in this source, or
// base::TimeDelta() if none of the streams contain buffered data.
- TimeDelta GetHighestPresentationTimestamp() const;
+ base::TimeDelta GetHighestPresentationTimestamp() const;
// Returns the highest buffered duration across all streams managed
// by this object.
- // Returns TimeDelta() if none of the streams contain buffered data.
- TimeDelta GetMaxBufferedDuration() const;
+ // Returns base::TimeDelta() if none of the streams contain buffered data.
+ base::TimeDelta GetMaxBufferedDuration() const;
// Helper methods that call methods with similar names on all the
// ChunkDemuxerStreams managed by this object.
void StartReturningData();
void AbortReads();
- void Seek(TimeDelta seek_time);
+ void Seek(base::TimeDelta seek_time);
void CompletePendingReadIfPossible();
- void OnSetDuration(TimeDelta duration);
+ void OnSetDuration(base::TimeDelta duration);
void MarkEndOfStream();
void UnmarkEndOfStream();
void Shutdown();
@@ -145,8 +150,8 @@ class MEDIA_EXPORT SourceBufferState {
void SetMemoryLimits(DemuxerStream::Type type, size_t memory_limit);
bool IsSeekWaitingForData() const;
- using RangesList = std::vector<Ranges<TimeDelta>>;
- static Ranges<TimeDelta> ComputeRangesIntersection(
+ using RangesList = std::vector<Ranges<base::TimeDelta>>;
+ static Ranges<base::TimeDelta> ComputeRangesIntersection(
const RangesList& active_ranges,
bool ended);
@@ -218,13 +223,13 @@ class MEDIA_EXPORT SourceBufferState {
// timestamp offset then |*timestamp_offset_during_append_| is also updated
// so Append()'s caller can know the new offset. This pointer is only non-NULL
// during the lifetime of an Append() call.
- TimeDelta* timestamp_offset_during_append_;
+ base::TimeDelta* timestamp_offset_during_append_;
// During Append(), coded frame processing triggered by OnNewBuffers()
// requires these two attributes. These are only valid during the lifetime of
// an Append() call.
- TimeDelta append_window_start_during_append_;
- TimeDelta append_window_end_during_append_;
+ base::TimeDelta append_window_start_during_append_;
+ base::TimeDelta append_window_end_during_append_;
// Keeps track of whether a media segment is being parsed.
bool parsing_media_segment_;
@@ -266,8 +271,6 @@ class MEDIA_EXPORT SourceBufferState {
std::vector<AudioCodec> expected_audio_codecs_;
std::vector<VideoCodec> expected_video_codecs_;
-
- DISALLOW_COPY_AND_ASSIGN(SourceBufferState);
};
} // namespace media
diff --git a/chromium/media/filters/source_buffer_state_unittest.cc b/chromium/media/filters/source_buffer_state_unittest.cc
index f0fd900f1fe..2e11a89781b 100644
--- a/chromium/media/filters/source_buffer_state_unittest.cc
+++ b/chromium/media/filters/source_buffer_state_unittest.cc
@@ -158,7 +158,7 @@ TEST_F(SourceBufferStateTest, InitSingleAudioTrack) {
CreateAndInitSourceBufferState("vorbis");
std::unique_ptr<MediaTracks> tracks(new MediaTracks());
- AddAudioTrack(tracks, kCodecVorbis, 1);
+ AddAudioTrack(tracks, AudioCodec::kVorbis, 1);
EXPECT_FOUND_CODEC_NAME(Audio, "vorbis");
EXPECT_CALL(*this, MediaTracksUpdatedMock(_));
@@ -170,7 +170,7 @@ TEST_F(SourceBufferStateTest, InitSingleVideoTrack) {
CreateAndInitSourceBufferState("vp8");
std::unique_ptr<MediaTracks> tracks(new MediaTracks());
- AddVideoTrack(tracks, kCodecVP8, 1);
+ AddVideoTrack(tracks, VideoCodec::kVP8, 1);
EXPECT_FOUND_CODEC_NAME(Video, "vp8");
EXPECT_CALL(*this, MediaTracksUpdatedMock(_));
@@ -182,10 +182,10 @@ TEST_F(SourceBufferStateTest, InitMultipleTracks) {
CreateAndInitSourceBufferState("vorbis,vp8,opus,vp9");
std::unique_ptr<MediaTracks> tracks(new MediaTracks());
- AddAudioTrack(tracks, kCodecVorbis, 1);
- AddAudioTrack(tracks, kCodecOpus, 2);
- AddVideoTrack(tracks, kCodecVP8, 3);
- AddVideoTrack(tracks, kCodecVP9, 4);
+ AddAudioTrack(tracks, AudioCodec::kVorbis, 1);
+ AddAudioTrack(tracks, AudioCodec::kOpus, 2);
+ AddVideoTrack(tracks, VideoCodec::kVP8, 3);
+ AddVideoTrack(tracks, VideoCodec::kVP9, 4);
EXPECT_FOUND_CODEC_NAME(Audio, "vorbis");
EXPECT_FOUND_CODEC_NAME(Audio, "opus");
@@ -199,7 +199,7 @@ TEST_F(SourceBufferStateTest, AudioStreamMismatchesExpectedCodecs) {
std::unique_ptr<SourceBufferState> sbs =
CreateAndInitSourceBufferState("opus");
std::unique_ptr<MediaTracks> tracks(new MediaTracks());
- AddAudioTrack(tracks, kCodecVorbis, 1);
+ AddAudioTrack(tracks, AudioCodec::kVorbis, 1);
EXPECT_MEDIA_LOG(InitSegmentMismatchesMimeType("Audio", "vorbis"));
EXPECT_FALSE(AppendDataAndReportTracks(sbs, std::move(tracks)));
}
@@ -208,7 +208,7 @@ TEST_F(SourceBufferStateTest, VideoStreamMismatchesExpectedCodecs) {
std::unique_ptr<SourceBufferState> sbs =
CreateAndInitSourceBufferState("vp9");
std::unique_ptr<MediaTracks> tracks(new MediaTracks());
- AddVideoTrack(tracks, kCodecVP8, 1);
+ AddVideoTrack(tracks, VideoCodec::kVP8, 1);
EXPECT_MEDIA_LOG(InitSegmentMismatchesMimeType("Video", "vp8"));
EXPECT_FALSE(AppendDataAndReportTracks(sbs, std::move(tracks)));
}
@@ -217,7 +217,7 @@ TEST_F(SourceBufferStateTest, MissingExpectedAudioStream) {
std::unique_ptr<SourceBufferState> sbs =
CreateAndInitSourceBufferState("opus,vp9");
std::unique_ptr<MediaTracks> tracks(new MediaTracks());
- AddVideoTrack(tracks, kCodecVP9, 1);
+ AddVideoTrack(tracks, VideoCodec::kVP9, 1);
EXPECT_FOUND_CODEC_NAME(Video, "vp9");
EXPECT_MEDIA_LOG(InitSegmentMissesExpectedTrack("opus"));
EXPECT_FALSE(AppendDataAndReportTracks(sbs, std::move(tracks)));
@@ -227,8 +227,9 @@ TEST_F(SourceBufferStateTest, MissingExpectedVideoStream) {
std::unique_ptr<SourceBufferState> sbs =
CreateAndInitSourceBufferState("opus,vp9");
std::unique_ptr<MediaTracks> tracks(new MediaTracks());
- tracks->AddAudioTrack(CreateAudioConfig(kCodecOpus), 1, MediaTrack::Kind(),
- MediaTrack::Label(), MediaTrack::Language());
+ tracks->AddAudioTrack(CreateAudioConfig(AudioCodec::kOpus), 1,
+ MediaTrack::Kind(), MediaTrack::Label(),
+ MediaTrack::Language());
EXPECT_FOUND_CODEC_NAME(Audio, "opus");
EXPECT_MEDIA_LOG(InitSegmentMissesExpectedTrack("vp9"));
EXPECT_FALSE(AppendDataAndReportTracks(sbs, std::move(tracks)));
@@ -239,8 +240,8 @@ TEST_F(SourceBufferStateTest, TrackIdsChangeInSecondInitSegment) {
CreateAndInitSourceBufferState("opus,vp9");
std::unique_ptr<MediaTracks> tracks(new MediaTracks());
- AddAudioTrack(tracks, kCodecOpus, 1);
- AddVideoTrack(tracks, kCodecVP9, 2);
+ AddAudioTrack(tracks, AudioCodec::kOpus, 1);
+ AddVideoTrack(tracks, VideoCodec::kVP9, 2);
EXPECT_FOUND_CODEC_NAME(Audio, "opus");
EXPECT_FOUND_CODEC_NAME(Video, "vp9");
EXPECT_CALL(*this, MediaTracksUpdatedMock(_));
@@ -251,8 +252,8 @@ TEST_F(SourceBufferStateTest, TrackIdsChangeInSecondInitSegment) {
// Bytestream track ids are allowed to change when there is only a single
// track of each type.
std::unique_ptr<MediaTracks> tracks2(new MediaTracks());
- AddAudioTrack(tracks2, kCodecOpus, 3);
- AddVideoTrack(tracks2, kCodecVP9, 4);
+ AddAudioTrack(tracks2, AudioCodec::kOpus, 3);
+ AddVideoTrack(tracks2, VideoCodec::kVP9, 4);
EXPECT_CALL(*this, MediaTracksUpdatedMock(_));
AppendDataAndReportTracks(sbs, std::move(tracks2));
}
@@ -262,8 +263,8 @@ TEST_F(SourceBufferStateTest, TrackIdChangeWithTwoAudioTracks) {
CreateAndInitSourceBufferState("vorbis,opus");
std::unique_ptr<MediaTracks> tracks(new MediaTracks());
- AddAudioTrack(tracks, kCodecVorbis, 1);
- AddAudioTrack(tracks, kCodecOpus, 2);
+ AddAudioTrack(tracks, AudioCodec::kVorbis, 1);
+ AddAudioTrack(tracks, AudioCodec::kOpus, 2);
EXPECT_FOUND_CODEC_NAME(Audio, "vorbis");
EXPECT_FOUND_CODEC_NAME(Audio, "opus");
EXPECT_CALL(*this, MediaTracksUpdatedMock(_));
@@ -272,16 +273,16 @@ TEST_F(SourceBufferStateTest, TrackIdChangeWithTwoAudioTracks) {
// Since we have two audio tracks, bytestream track ids must match the first
// init segment.
std::unique_ptr<MediaTracks> tracks2(new MediaTracks());
- AddAudioTrack(tracks2, kCodecVorbis, 1);
- AddAudioTrack(tracks2, kCodecOpus, 2);
+ AddAudioTrack(tracks2, AudioCodec::kVorbis, 1);
+ AddAudioTrack(tracks2, AudioCodec::kOpus, 2);
EXPECT_CALL(*this, MediaTracksUpdatedMock(_));
EXPECT_TRUE(AppendDataAndReportTracks(sbs, std::move(tracks2)));
// Emulate the situation where bytestream track ids have changed in the third
// init segment. This must cause failure in the OnNewConfigs.
std::unique_ptr<MediaTracks> tracks3(new MediaTracks());
- AddAudioTrack(tracks3, kCodecVorbis, 1);
- AddAudioTrack(tracks3, kCodecOpus, 3);
+ AddAudioTrack(tracks3, AudioCodec::kVorbis, 1);
+ AddAudioTrack(tracks3, AudioCodec::kOpus, 3);
EXPECT_MEDIA_LOG(UnexpectedTrack("audio", "3"));
EXPECT_FALSE(AppendDataAndReportTracks(sbs, std::move(tracks3)));
}
@@ -291,8 +292,8 @@ TEST_F(SourceBufferStateTest, TrackIdChangeWithTwoVideoTracks) {
CreateAndInitSourceBufferState("vp8,vp9");
std::unique_ptr<MediaTracks> tracks(new MediaTracks());
- AddVideoTrack(tracks, kCodecVP8, 1);
- AddVideoTrack(tracks, kCodecVP9, 2);
+ AddVideoTrack(tracks, VideoCodec::kVP8, 1);
+ AddVideoTrack(tracks, VideoCodec::kVP9, 2);
EXPECT_FOUND_CODEC_NAME(Video, "vp8");
EXPECT_FOUND_CODEC_NAME(Video, "vp9");
EXPECT_CALL(*this, MediaTracksUpdatedMock(_));
@@ -301,16 +302,16 @@ TEST_F(SourceBufferStateTest, TrackIdChangeWithTwoVideoTracks) {
// Since we have two video tracks, bytestream track ids must match the first
// init segment.
std::unique_ptr<MediaTracks> tracks2(new MediaTracks());
- AddVideoTrack(tracks2, kCodecVP8, 1);
- AddVideoTrack(tracks2, kCodecVP9, 2);
+ AddVideoTrack(tracks2, VideoCodec::kVP8, 1);
+ AddVideoTrack(tracks2, VideoCodec::kVP9, 2);
EXPECT_CALL(*this, MediaTracksUpdatedMock(_));
EXPECT_TRUE(AppendDataAndReportTracks(sbs, std::move(tracks2)));
// Emulate the situation where bytestream track ids have changed in the third
// init segment. This must cause failure in the OnNewConfigs.
std::unique_ptr<MediaTracks> tracks3(new MediaTracks());
- AddVideoTrack(tracks3, kCodecVP8, 1);
- AddVideoTrack(tracks3, kCodecVP9, 3);
+ AddVideoTrack(tracks3, VideoCodec::kVP8, 1);
+ AddVideoTrack(tracks3, VideoCodec::kVP9, 3);
EXPECT_MEDIA_LOG(UnexpectedTrack("video", "3"));
EXPECT_FALSE(AppendDataAndReportTracks(sbs, std::move(tracks3)));
}
@@ -320,8 +321,8 @@ TEST_F(SourceBufferStateTest, TrackIdsSwappedInSecondInitSegment) {
CreateAndInitSourceBufferState("opus,vp9");
std::unique_ptr<MediaTracks> tracks(new MediaTracks());
- AddAudioTrack(tracks, kCodecOpus, 1);
- AddVideoTrack(tracks, kCodecVP9, 2);
+ AddAudioTrack(tracks, AudioCodec::kOpus, 1);
+ AddVideoTrack(tracks, VideoCodec::kVP9, 2);
EXPECT_FOUND_CODEC_NAME(Audio, "opus");
EXPECT_FOUND_CODEC_NAME(Video, "vp9");
EXPECT_CALL(*this, MediaTracksUpdatedMock(_));
@@ -329,8 +330,8 @@ TEST_F(SourceBufferStateTest, TrackIdsSwappedInSecondInitSegment) {
// Track ids are swapped in the second init segment.
std::unique_ptr<MediaTracks> tracks2(new MediaTracks());
- AddAudioTrack(tracks2, kCodecOpus, 2);
- AddVideoTrack(tracks2, kCodecVP9, 1);
+ AddAudioTrack(tracks2, AudioCodec::kOpus, 2);
+ AddVideoTrack(tracks2, VideoCodec::kVP9, 1);
EXPECT_CALL(*this, MediaTracksUpdatedMock(_));
AppendDataAndReportTracks(sbs, std::move(tracks2));
}
diff --git a/chromium/media/filters/source_buffer_stream.cc b/chromium/media/filters/source_buffer_stream.cc
index 3c877e35d0b..509e2df604b 100644
--- a/chromium/media/filters/source_buffer_stream.cc
+++ b/chromium/media/filters/source_buffer_stream.cc
@@ -65,7 +65,7 @@ base::TimeDelta ComputeFudgeRoom(base::TimeDelta approximate_duration) {
// The amount of time the beginning of the buffered data can differ from the
// start time in order to still be considered the start of stream.
base::TimeDelta kSeekToStartFudgeRoom() {
- return base::TimeDelta::FromMilliseconds(1000);
+ return base::Milliseconds(1000);
}
// Helper method for logging.
@@ -164,7 +164,7 @@ SourceBufferStream::SourceBufferStream(const AudioDecoderConfig& audio_config,
range_for_next_append_(ranges_.end()),
highest_output_buffer_timestamp_(kNoTimestamp),
max_interbuffer_distance_(
- base::TimeDelta::FromMilliseconds(kMinimumInterbufferDistanceInMs)),
+ base::Milliseconds(kMinimumInterbufferDistanceInMs)),
memory_limit_(GetDemuxerStreamAudioMemoryLimit(&audio_config)) {
DCHECK(audio_config.IsValidConfig());
audio_configs_.push_back(audio_config);
@@ -179,7 +179,7 @@ SourceBufferStream::SourceBufferStream(const VideoDecoderConfig& video_config,
range_for_next_append_(ranges_.end()),
highest_output_buffer_timestamp_(kNoTimestamp),
max_interbuffer_distance_(
- base::TimeDelta::FromMilliseconds(kMinimumInterbufferDistanceInMs)),
+ base::Milliseconds(kMinimumInterbufferDistanceInMs)),
memory_limit_(
GetDemuxerStreamVideoMemoryLimit(Demuxer::DemuxerTypes::kChunkDemuxer,
&video_config)) {
@@ -197,7 +197,7 @@ SourceBufferStream::SourceBufferStream(const TextTrackConfig& text_config,
range_for_next_append_(ranges_.end()),
highest_output_buffer_timestamp_(kNoTimestamp),
max_interbuffer_distance_(
- base::TimeDelta::FromMilliseconds(kMinimumInterbufferDistanceInMs)),
+ base::Milliseconds(kMinimumInterbufferDistanceInMs)),
memory_limit_(
GetDemuxerStreamAudioMemoryLimit(nullptr /*audio_config*/)) {}
@@ -241,7 +241,7 @@ void SourceBufferStream::OnStartOfCodedFrameGroup(
// Exclude removal of that earlier frame during later Append
// processing by adjusting the removal range slightly forward.
coded_frame_group_start_pts_ =
- adjusted_start_time + base::TimeDelta::FromMicroseconds(1);
+ adjusted_start_time + base::Microseconds(1);
}
}
} else if (last_range != ranges_.end()) {
@@ -736,7 +736,7 @@ bool SourceBufferStream::UpdateMaxInterbufferDtsDistance(
}
DCHECK(max_interbuffer_distance_ >=
- base::TimeDelta::FromMilliseconds(kMinimumInterbufferDistanceInMs));
+ base::Milliseconds(kMinimumInterbufferDistanceInMs));
max_interbuffer_distance_ =
std::max(max_interbuffer_distance_, interbuffer_distance);
prev_dts = current_dts;
@@ -1146,8 +1146,7 @@ void SourceBufferStream::TrimSpliceOverlap(const BufferQueue& new_buffers) {
// Search for overlapped buffer needs exclusive end value. Choosing smallest
// possible value.
- const base::TimeDelta end_pts =
- splice_timestamp + base::TimeDelta::FromMicroseconds(1);
+ const base::TimeDelta end_pts = splice_timestamp + base::Microseconds(1);
// Find if new buffer's start would overlap an existing buffer. Note that
// overlapped audio buffers might be nonkeyframes, but if so, FrameProcessor
@@ -1209,7 +1208,7 @@ void SourceBufferStream::TrimSpliceOverlap(const BufferQueue& new_buffers) {
// Don't trim for overlaps of less than one millisecond (which is frequently
// the extent of timestamp resolution for poorly encoded media).
- if (overlap_duration < base::TimeDelta::FromMilliseconds(1)) {
+ if (overlap_duration < base::Milliseconds(1)) {
std::stringstream log_string;
log_string << "Skipping audio splice trimming at PTS="
<< splice_timestamp.InMicroseconds() << "us. Found only "
@@ -1370,7 +1369,7 @@ void SourceBufferStream::GetTimestampInterval(const BufferQueue& buffers,
} else {
// TODO(chcunningham): Emit warning when 0ms durations are not expected.
// http://crbug.com/312836
- timestamp += base::TimeDelta::FromMicroseconds(1);
+ timestamp += base::Microseconds(1);
}
end_pts = std::max(timestamp, end_pts);
}
@@ -1495,7 +1494,7 @@ void SourceBufferStream::Seek(base::TimeDelta timestamp) {
// |timestamp| is already before the range start time, as can happen due to
// fudge room, do not adjust it.
const auto& config = audio_configs_[(*itr)->GetConfigIdAtTime(timestamp)];
- if (config.codec() == kCodecOpus &&
+ if (config.codec() == AudioCodec::kOpus &&
timestamp > (*itr)->GetStartTimestamp()) {
base::TimeDelta preroll_timestamp = std::max(
timestamp - config.seek_preroll(), (*itr)->GetStartTimestamp());
@@ -1896,8 +1895,7 @@ void SourceBufferStream::SetSelectedRangeIfNeeded(
return;
}
- start_timestamp =
- highest_output_buffer_timestamp_ + base::TimeDelta::FromMicroseconds(1);
+ start_timestamp = highest_output_buffer_timestamp_ + base::Microseconds(1);
}
base::TimeDelta seek_timestamp =
diff --git a/chromium/media/filters/source_buffer_stream.h b/chromium/media/filters/source_buffer_stream.h
index 718ae0cb84c..1c6d245b50d 100644
--- a/chromium/media/filters/source_buffer_stream.h
+++ b/chromium/media/filters/source_buffer_stream.h
@@ -67,6 +67,9 @@ class MEDIA_EXPORT SourceBufferStream {
MediaLog* media_log);
SourceBufferStream(const TextTrackConfig& text_config, MediaLog* media_log);
+ SourceBufferStream(const SourceBufferStream&) = delete;
+ SourceBufferStream& operator=(const SourceBufferStream&) = delete;
+
~SourceBufferStream();
// Signals that the next buffers appended are part of a new coded frame group
@@ -508,8 +511,6 @@ class MEDIA_EXPORT SourceBufferStream {
int num_splice_logs_ = 0;
int num_track_buffer_gap_warning_logs_ = 0;
int num_garbage_collect_algorithm_logs_ = 0;
-
- DISALLOW_COPY_AND_ASSIGN(SourceBufferStream);
};
} // namespace media
diff --git a/chromium/media/filters/source_buffer_stream_unittest.cc b/chromium/media/filters/source_buffer_stream_unittest.cc
index 27d13133594..f0768427db7 100644
--- a/chromium/media/filters/source_buffer_stream_unittest.cc
+++ b/chromium/media/filters/source_buffer_stream_unittest.cc
@@ -99,9 +99,10 @@ class SourceBufferStreamTest : public testing::Test {
void SetAudioStream() {
video_config_ = TestVideoConfig::Invalid();
- audio_config_.Initialize(
- kCodecVorbis, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO, 1000,
- EmptyExtraData(), EncryptionScheme::kUnencrypted, base::TimeDelta(), 0);
+ audio_config_.Initialize(AudioCodec::kVorbis, kSampleFormatPlanarF32,
+ CHANNEL_LAYOUT_STEREO, 1000, EmptyExtraData(),
+ EncryptionScheme::kUnencrypted, base::TimeDelta(),
+ 0);
ResetStream<>(audio_config_);
// Equivalent to 2ms per frame.
@@ -164,7 +165,7 @@ class SourceBufferStreamTest : public testing::Test {
void Seek(int position) { stream_->Seek(position * frame_duration_); }
void SeekToTimestampMs(int64_t timestamp_ms) {
- stream_->Seek(base::TimeDelta::FromMilliseconds(timestamp_ms));
+ stream_->Seek(base::Milliseconds(timestamp_ms));
}
bool GarbageCollect(base::TimeDelta media_time, int new_data_size) {
@@ -177,9 +178,8 @@ class SourceBufferStreamTest : public testing::Test {
}
void RemoveInMs(int start, int end, int duration) {
- Remove(base::TimeDelta::FromMilliseconds(start),
- base::TimeDelta::FromMilliseconds(end),
- base::TimeDelta::FromMilliseconds(duration));
+ Remove(base::Milliseconds(start), base::Milliseconds(end),
+ base::Milliseconds(duration));
}
void Remove(base::TimeDelta start, base::TimeDelta end,
@@ -193,12 +193,10 @@ class SourceBufferStreamTest : public testing::Test {
int GetRemovalRangeInMs(int start, int end, int bytes_to_free,
int* removal_end) {
- base::TimeDelta removal_end_timestamp =
- base::TimeDelta::FromMilliseconds(*removal_end);
- int bytes_removed =
- stream_->GetRemovalRange(base::TimeDelta::FromMilliseconds(start),
- base::TimeDelta::FromMilliseconds(end),
- bytes_to_free, &removal_end_timestamp);
+ base::TimeDelta removal_end_timestamp = base::Milliseconds(*removal_end);
+ int bytes_removed = stream_->GetRemovalRange(
+ base::Milliseconds(start), base::Milliseconds(end), bytes_to_free,
+ &removal_end_timestamp);
*removal_end = removal_end_timestamp.InMilliseconds();
return bytes_removed;
}
@@ -254,7 +252,7 @@ class SourceBufferStreamTest : public testing::Test {
ASSERT_GE(stream_->ranges_.size(), 1u);
const auto& range_ptr = *(stream_->ranges_.begin());
EXPECT_EQ(expectation, range_ptr->IsNextInPresentationSequence(
- base::TimeDelta::FromMilliseconds(pts_in_ms)));
+ base::Milliseconds(pts_in_ms)));
}
void CheckExpectedBuffers(
@@ -452,7 +450,7 @@ class SourceBufferStreamTest : public testing::Test {
}
base::TimeDelta ConvertToFrameDuration(int frames_per_second) {
- return base::TimeDelta::FromSeconds(1) / frames_per_second;
+ return base::Seconds(1) / frames_per_second;
}
void AppendBuffers(int starting_position,
@@ -622,7 +620,7 @@ class SourceBufferStreamTest : public testing::Test {
if (!is_us)
us *= base::Time::kMicrosecondsPerMillisecond;
- buffer_timestamps.push_back(base::TimeDelta::FromMicroseconds(us));
+ buffer_timestamps.push_back(base::Microseconds(us));
}
// Create buffer. Track ID is meaningless to these tests
@@ -638,7 +636,7 @@ class SourceBufferStreamTest : public testing::Test {
}
if (duration_in_us >= 0)
- buffer->set_duration(base::TimeDelta::FromMicroseconds(duration_in_us));
+ buffer->set_duration(base::Microseconds(duration_in_us));
// Simulate preroll buffers by just generating another buffer and sticking
// it as the preroll.
@@ -778,8 +776,7 @@ TEST_F(SourceBufferStreamTest,
// Append a coded frame group with a start timestamp of 0, but the first
// buffer starts at 30ms. This can happen in muxed content where the
// audio starts before the first frame.
- NewCodedFrameGroupAppend(base::TimeDelta::FromMilliseconds(0),
- "30K 60K 90K 120K");
+ NewCodedFrameGroupAppend(base::Milliseconds(0), "30K 60K 90K 120K");
CheckExpectedRangesByTimestamp("{ [0,150) }");
@@ -2599,7 +2596,7 @@ TEST_F(SourceBufferStreamTest,
// GOP in that first range. Neither can it collect the last appended GOP
// (which is the entire second range), so GC should return false since it
// couldn't collect enough.
- EXPECT_FALSE(GarbageCollect(base::TimeDelta::FromMilliseconds(95), 7));
+ EXPECT_FALSE(GarbageCollect(base::Milliseconds(95), 7));
CheckExpectedRangesByTimestamp("{ [50,100) [1000,1050) }");
}
@@ -2749,7 +2746,7 @@ TEST_F(SourceBufferStreamTest, GarbageCollection_DeleteAfterLastAppend) {
// So the ranges before GC are "{ [100,280) [310,400) [490,670) }".
NewCodedFrameGroupAppend("100K 130 160 190K 220 250K");
- EXPECT_TRUE(GarbageCollect(base::TimeDelta::FromMilliseconds(580), 0));
+ EXPECT_TRUE(GarbageCollect(base::Milliseconds(580), 0));
// Should save the newly appended GOPs.
CheckExpectedRangesByTimestamp("{ [100,280) [580,670) }");
@@ -2769,7 +2766,7 @@ TEST_F(SourceBufferStreamTest, GarbageCollection_DeleteAfterLastAppendMerged) {
// range. So the range before GC is "{ [220,670) }".
NewCodedFrameGroupAppend("220K 250 280 310K 340 370");
- EXPECT_TRUE(GarbageCollect(base::TimeDelta::FromMilliseconds(580), 0));
+ EXPECT_TRUE(GarbageCollect(base::Milliseconds(580), 0));
// Should save the newly appended GOPs.
CheckExpectedRangesByTimestamp("{ [220,400) [580,670) }");
@@ -2949,44 +2946,44 @@ TEST_F(SourceBufferStreamTest, GarbageCollection_SaveDataAtPlaybackPosition) {
CheckExpectedRanges("{ [0,299) }");
// Playback position at 0, all data must be preserved.
- EXPECT_FALSE(GarbageCollect(base::TimeDelta::FromMilliseconds(0), 0));
+ EXPECT_FALSE(GarbageCollect(base::Milliseconds(0), 0));
CheckExpectedRanges("{ [0,299) }");
// Playback position at 1 sec, the first second of data [0,29) should be
// collected, since we are way over memory limit.
- EXPECT_FALSE(GarbageCollect(base::TimeDelta::FromMilliseconds(1000), 0));
+ EXPECT_FALSE(GarbageCollect(base::Milliseconds(1000), 0));
CheckExpectedRanges("{ [30,299) }");
// Playback position at 1.1 sec, no new data can be collected, since the
// playback position is still in the first GOP of buffered data.
- EXPECT_FALSE(GarbageCollect(base::TimeDelta::FromMilliseconds(1100), 0));
+ EXPECT_FALSE(GarbageCollect(base::Milliseconds(1100), 0));
CheckExpectedRanges("{ [30,299) }");
// Playback position at 5.166 sec, just at the very end of GOP corresponding
// to buffer range 150-155, which should be preserved.
- EXPECT_FALSE(GarbageCollect(base::TimeDelta::FromMilliseconds(5166), 0));
+ EXPECT_FALSE(GarbageCollect(base::Milliseconds(5166), 0));
CheckExpectedRanges("{ [150,299) }");
// Playback position at 5.167 sec, just past the end of GOP corresponding to
// buffer range 150-155, it should be garbage collected now.
- EXPECT_FALSE(GarbageCollect(base::TimeDelta::FromMilliseconds(5167), 0));
+ EXPECT_FALSE(GarbageCollect(base::Milliseconds(5167), 0));
CheckExpectedRanges("{ [155,299) }");
// Playback at 9.0 sec, we can now successfully collect all data except the
// last second and we are back under memory limit of 30 buffers, so GCIfNeeded
// should return true.
- EXPECT_TRUE(GarbageCollect(base::TimeDelta::FromMilliseconds(9000), 0));
+ EXPECT_TRUE(GarbageCollect(base::Milliseconds(9000), 0));
CheckExpectedRanges("{ [270,299) }");
// Playback at 9.999 sec, GC succeeds, since we are under memory limit even
// without removing any data.
- EXPECT_TRUE(GarbageCollect(base::TimeDelta::FromMilliseconds(9999), 0));
+ EXPECT_TRUE(GarbageCollect(base::Milliseconds(9999), 0));
CheckExpectedRanges("{ [270,299) }");
// Playback at 15 sec, this should never happen during regular playback in
// browser, since this position has no data buffered, but it should still
// cause no problems to GC algorithm, so test it just in case.
- EXPECT_TRUE(GarbageCollect(base::TimeDelta::FromMilliseconds(15000), 0));
+ EXPECT_TRUE(GarbageCollect(base::Milliseconds(15000), 0));
CheckExpectedRanges("{ [270,299) }");
}
@@ -3016,14 +3013,14 @@ TEST_F(SourceBufferStreamTest, GarbageCollection_SaveAppendGOP) {
// GC. Because it is after 290ms, this tests that the GOP is saved when
// deleting from the back.
NewCodedFrameGroupAppend("500K 530 560 590");
- EXPECT_FALSE(GarbageCollect(base::TimeDelta::FromMilliseconds(290), 0));
+ EXPECT_FALSE(GarbageCollect(base::Milliseconds(290), 0));
// Should save GOPs between 290ms and the last GOP appended.
CheckExpectedRangesByTimestamp("{ [290,380) [500,620) }");
// Continue appending to this GOP after GC.
AppendBuffers("620D30");
- EXPECT_FALSE(GarbageCollect(base::TimeDelta::FromMilliseconds(290), 0));
+ EXPECT_FALSE(GarbageCollect(base::Milliseconds(290), 0));
CheckExpectedRangesByTimestamp("{ [290,380) [500,650) }");
}
@@ -3041,11 +3038,11 @@ TEST_F(SourceBufferStreamTest, GarbageCollection_SaveAppendGOP_Middle) {
// This whole GOP should be saved after GC, which will fail due to GOP being
// larger than 1 buffer
- EXPECT_FALSE(GarbageCollect(base::TimeDelta::FromMilliseconds(80), 0));
+ EXPECT_FALSE(GarbageCollect(base::Milliseconds(80), 0));
CheckExpectedRangesByTimestamp("{ [80,170) }");
// We should still be able to continue appending data to GOP
AppendBuffers("170D30");
- EXPECT_FALSE(GarbageCollect(base::TimeDelta::FromMilliseconds(80), 0));
+ EXPECT_FALSE(GarbageCollect(base::Milliseconds(80), 0));
CheckExpectedRangesByTimestamp("{ [80,200) }");
// Append a 2nd range after this range, without triggering GC.
@@ -3059,14 +3056,14 @@ TEST_F(SourceBufferStreamTest, GarbageCollection_SaveAppendGOP_Middle) {
// it is after the selected range, this tests that the GOP is saved when
// deleting from the back.
NewCodedFrameGroupAppend("500K 530 560 590");
- EXPECT_FALSE(GarbageCollect(base::TimeDelta::FromMilliseconds(80), 0));
+ EXPECT_FALSE(GarbageCollect(base::Milliseconds(80), 0));
// Should save the GOPs between the seek point and GOP that was last appended
CheckExpectedRangesByTimestamp("{ [80,200) [400,620) }");
// Continue appending to this GOP after GC.
AppendBuffers("620D30");
- EXPECT_FALSE(GarbageCollect(base::TimeDelta::FromMilliseconds(80), 0));
+ EXPECT_FALSE(GarbageCollect(base::Milliseconds(80), 0));
CheckExpectedRangesByTimestamp("{ [80,200) [400,650) }");
}
@@ -3086,7 +3083,7 @@ TEST_F(SourceBufferStreamTest, GarbageCollection_SaveAppendGOP_Selected1) {
// GC should save the GOP at 0ms and 90ms, and will fail since GOP larger
// than 1 buffer
- EXPECT_FALSE(GarbageCollect(base::TimeDelta::FromMilliseconds(90), 0));
+ EXPECT_FALSE(GarbageCollect(base::Milliseconds(90), 0));
CheckExpectedRangesByTimestamp("{ [0,180) }");
// Seek to 0 and check all buffers.
@@ -3099,7 +3096,7 @@ TEST_F(SourceBufferStreamTest, GarbageCollection_SaveAppendGOP_Selected1) {
NewCodedFrameGroupAppend("180K 210 240");
// Should save the GOP at 90ms and the GOP at 180ms.
- EXPECT_FALSE(GarbageCollect(base::TimeDelta::FromMilliseconds(90), 0));
+ EXPECT_FALSE(GarbageCollect(base::Milliseconds(90), 0));
CheckExpectedRangesByTimestamp("{ [90,270) }");
CheckExpectedBuffers("90K 120 150 180K 210 240");
CheckNoNextBuffer();
@@ -3122,7 +3119,7 @@ TEST_F(SourceBufferStreamTest, GarbageCollection_SaveAppendGOP_Selected2) {
// GC will save data in the range where the most recent append has happened
// [0; 180) and the range where the next read position is [270;360)
- EXPECT_FALSE(GarbageCollect(base::TimeDelta::FromMilliseconds(270), 0));
+ EXPECT_FALSE(GarbageCollect(base::Milliseconds(270), 0));
CheckExpectedRangesByTimestamp("{ [0,180) [270,360) }");
// Add 3 GOPs to the end of the selected range at 360ms, 450ms, and 540ms.
@@ -3132,7 +3129,7 @@ TEST_F(SourceBufferStreamTest, GarbageCollection_SaveAppendGOP_Selected2) {
// Overlap the GOP at 450ms and garbage collect to test deleting from the
// back.
NewCodedFrameGroupAppend("450K 480 510");
- EXPECT_FALSE(GarbageCollect(base::TimeDelta::FromMilliseconds(270), 0));
+ EXPECT_FALSE(GarbageCollect(base::Milliseconds(270), 0));
// Should save GOPs from GOP at 270ms to GOP at 450ms.
CheckExpectedRangesByTimestamp("{ [270,540) }");
@@ -3155,7 +3152,7 @@ TEST_F(SourceBufferStreamTest, GarbageCollection_SaveAppendGOP_Selected3) {
// GC should save the newly appended GOP, which is also the next GOP that
// will be returned from the seek request.
- EXPECT_FALSE(GarbageCollect(base::TimeDelta::FromMilliseconds(0), 0));
+ EXPECT_FALSE(GarbageCollect(base::Milliseconds(0), 0));
CheckExpectedRangesByTimestamp("{ [0,60) }");
// Check the buffers in the range.
@@ -3167,7 +3164,7 @@ TEST_F(SourceBufferStreamTest, GarbageCollection_SaveAppendGOP_Selected3) {
// GC should still save the rest of this GOP and should be able to fulfill
// the read.
- EXPECT_FALSE(GarbageCollect(base::TimeDelta::FromMilliseconds(0), 0));
+ EXPECT_FALSE(GarbageCollect(base::Milliseconds(0), 0));
CheckExpectedRangesByTimestamp("{ [0,120) }");
CheckExpectedBuffers("60 90");
CheckNoNextBuffer();
@@ -3206,7 +3203,7 @@ TEST_F(SourceBufferStreamTest, GarbageCollection_MediaTimeAfterLastAppendTime) {
// the last appended buffer (330), but still within buffered ranges, taking
// into account the duration of the last frame (timestamp of the last frame is
// 330, duration is 30, so the latest valid buffered position is 330+30=360).
- EXPECT_TRUE(GarbageCollect(base::TimeDelta::FromMilliseconds(360), 0));
+ EXPECT_TRUE(GarbageCollect(base::Milliseconds(360), 0));
// GC should collect one GOP from the front to bring us back under memory
// limit of 10 buffers.
@@ -3233,7 +3230,7 @@ TEST_F(SourceBufferStreamTest,
// return a media_time that is slightly outside of video buffered range). In
// those cases the GC algorithm should clamp the media_time value to the
// buffered ranges to work correctly (see crbug.com/563292).
- EXPECT_TRUE(GarbageCollect(base::TimeDelta::FromMilliseconds(361), 0));
+ EXPECT_TRUE(GarbageCollect(base::Milliseconds(361), 0));
// GC should collect one GOP from the front to bring us back under memory
// limit of 10 buffers.
@@ -3457,7 +3454,7 @@ TEST_F(SourceBufferStreamTest, SetExplicitDuration) {
CheckExpectedRangesByTimestamp("{ [50,100) [150,200) [250,300) }");
// Set duration to be 80ms. Truncates the buffered data after 80ms.
- stream_->OnSetDuration(base::TimeDelta::FromMilliseconds(80));
+ stream_->OnSetDuration(base::Milliseconds(80));
// The simulated P-frame at PTS 90ms should have been
// removed by the duration truncation. Only the frame at PTS 50ms should
@@ -3499,7 +3496,7 @@ TEST_F(SourceBufferStreamTest, SetExplicitDuration_EdgeCase2) {
// Trim off last 2 buffers, totaling 8 ms. Notably less than the current fudge
// room of 10 ms.
- stream_->OnSetDuration(base::TimeDelta::FromMilliseconds(5));
+ stream_->OnSetDuration(base::Milliseconds(5));
// Verify truncation.
CheckExpectedRangesByTimestamp("{ [0,5) }");
@@ -3547,7 +3544,7 @@ TEST_F(SourceBufferStreamTest, SetExplicitDuration_DeletePartialRange) {
// Check expected ranges.
CheckExpectedRangesByTimestamp("{ [0,50) [100,200) [250,300) }");
- stream_->OnSetDuration(base::TimeDelta::FromMilliseconds(140));
+ stream_->OnSetDuration(base::Milliseconds(140));
// The B-frames at PTS 110-130 were in the GOP in decode order after
// the simulated P-frame at PTS 140 which was truncated, so those B-frames
@@ -3566,7 +3563,7 @@ TEST_F(SourceBufferStreamTest, SetExplicitDuration_DeleteSelectedRange) {
SeekToTimestampMs(150);
// Set duration to 50ms.
- stream_->OnSetDuration(base::TimeDelta::FromMilliseconds(50));
+ stream_->OnSetDuration(base::Milliseconds(50));
// Expect everything to be deleted, and should not have next buffer anymore.
CheckNoNextBuffer();
@@ -3626,7 +3623,7 @@ TEST_F(SourceBufferStreamTest, SetExplicitDuration_UpdateSelectedRange) {
CheckExpectedBuffers("0K 30");
// Set duration to be right before buffer 1.
- stream_->OnSetDuration(base::TimeDelta::FromMilliseconds(60));
+ stream_->OnSetDuration(base::Milliseconds(60));
// Verify that there is no next buffer.
CheckNoNextBuffer();
@@ -3644,14 +3641,13 @@ TEST_F(SourceBufferStreamTest,
// Append a coded frame group with a start timestamp of 200, but the first
// buffer starts at 230ms. This can happen in muxed content where the
// audio starts before the first frame.
- NewCodedFrameGroupAppend(base::TimeDelta::FromMilliseconds(200),
- "230K 260K 290K 320K");
+ NewCodedFrameGroupAppend(base::Milliseconds(200), "230K 260K 290K 320K");
NewCodedFrameGroupAppend("400K 430K 460K");
CheckExpectedRangesByTimestamp("{ [0,90) [200,350) [400,490) }");
- stream_->OnSetDuration(base::TimeDelta::FromMilliseconds(120));
+ stream_->OnSetDuration(base::Milliseconds(120));
// Verify that the buffered ranges are updated properly and we don't crash.
CheckExpectedRangesByTimestamp("{ [0,90) }");
@@ -3670,7 +3666,7 @@ TEST_F(SourceBufferStreamTest, SetExplicitDuration_MarkEOS) {
// Set duration to be before the seeked to position.
// This will result in truncation of the selected range and a reset
// of NextBufferPosition.
- stream_->OnSetDuration(base::TimeDelta::FromMilliseconds(40));
+ stream_->OnSetDuration(base::Milliseconds(40));
// The P-frame at PTS 40ms was removed, so its dependent B-frames at PTS 10-30
// were also removed.
@@ -3697,7 +3693,7 @@ TEST_F(SourceBufferStreamTest, SetExplicitDuration_MarkEOS_IsSeekPending) {
// Set duration to be before the seeked to position.
// This will result in truncation of the selected range and a reset
// of NextBufferPosition.
- stream_->OnSetDuration(base::TimeDelta::FromMilliseconds(40));
+ stream_->OnSetDuration(base::Milliseconds(40));
// The P-frame at PTS 40ms was removed, so its dependent B-frames at PTS 10-30
// were also removed.
@@ -3893,8 +3889,8 @@ TEST_F(SourceBufferStreamTest, SameTimestamp_Video_Overlap_3) {
// Test all the valid same timestamp cases for audio.
TEST_F(SourceBufferStreamTest, SameTimestamp_Audio) {
- AudioDecoderConfig config(kCodecMP3, kSampleFormatF32, CHANNEL_LAYOUT_STEREO,
- 44100, EmptyExtraData(),
+ AudioDecoderConfig config(AudioCodec::kMP3, kSampleFormatF32,
+ CHANNEL_LAYOUT_STEREO, 44100, EmptyExtraData(),
EncryptionScheme::kUnencrypted);
ResetStream<>(config);
Seek(0);
@@ -4190,8 +4186,7 @@ TEST_F(SourceBufferStreamTest, Remove_GapAtBeginningOfGroup) {
Seek(0);
// Append a coded frame group that has a gap at the beginning of it.
- NewCodedFrameGroupAppend(base::TimeDelta::FromMilliseconds(0),
- "30K 60 90 120K 150");
+ NewCodedFrameGroupAppend(base::Milliseconds(0), "30K 60 90 120K 150");
CheckExpectedRangesByTimestamp("{ [0,180) }");
// Remove the gap that doesn't contain any buffers.
@@ -4369,8 +4364,8 @@ TEST_F(SourceBufferStreamTest, Audio_SpliceFrame_NoSplice) {
// Verify buffer timestamps and durations are preserved and no buffers have
// discard padding (indicating no splice trimming).
EXPECT_STATUS_FOR_STREAM_OP(kSuccess, GetNextBuffer(&buffer));
- EXPECT_EQ(base::TimeDelta::FromMilliseconds(i * 2), buffer->timestamp());
- EXPECT_EQ(base::TimeDelta::FromMilliseconds(2), buffer->duration());
+ EXPECT_EQ(base::Milliseconds(i * 2), buffer->timestamp());
+ EXPECT_EQ(base::Milliseconds(2), buffer->duration());
EXPECT_EQ(kEmptyDiscardPadding, buffer->discard_padding());
}
@@ -4424,7 +4419,7 @@ TEST_F(SourceBufferStreamTest, Audio_NoSpliceForEstimatedDuration) {
}
TEST_F(SourceBufferStreamTest, Audio_SpliceTrimming_ExistingTrimming) {
- const base::TimeDelta kDuration = base::TimeDelta::FromMilliseconds(4);
+ const base::TimeDelta kDuration = base::Milliseconds(4);
const base::TimeDelta kNoDiscard = base::TimeDelta();
const bool is_keyframe = true;
@@ -4441,7 +4436,7 @@ TEST_F(SourceBufferStreamTest, Audio_SpliceTrimming_ExistingTrimming) {
// Buffer A1: PTS = 0, front discard = 2ms, duration = 2ms.
scoped_refptr<StreamParserBuffer> bufferA1 = StreamParserBuffer::CopyFrom(
&kDataA, kDataSize, is_keyframe, DemuxerStream::AUDIO, 0);
- bufferA1->set_timestamp(base::TimeDelta::FromMilliseconds(0));
+ bufferA1->set_timestamp(base::Milliseconds(0));
bufferA1->set_duration(kDuration / 2);
const DecoderBuffer::DiscardPadding discardA1 =
std::make_pair(kDuration / 2, kNoDiscard);
@@ -4451,7 +4446,7 @@ TEST_F(SourceBufferStreamTest, Audio_SpliceTrimming_ExistingTrimming) {
// Buffer A2: PTS = 2, end discard = 2ms, duration = 2ms.
scoped_refptr<StreamParserBuffer> bufferA2 = StreamParserBuffer::CopyFrom(
&kDataA, kDataSize, is_keyframe, DemuxerStream::AUDIO, 0);
- bufferA2->set_timestamp(base::TimeDelta::FromMilliseconds(2));
+ bufferA2->set_timestamp(base::Milliseconds(2));
bufferA2->set_duration(kDuration / 2);
const DecoderBuffer::DiscardPadding discardA2 =
std::make_pair(kNoDiscard, kDuration / 2);
@@ -4461,7 +4456,7 @@ TEST_F(SourceBufferStreamTest, Audio_SpliceTrimming_ExistingTrimming) {
// Buffer B1: PTS = 3, front discard = 2ms, duration = 2ms.
scoped_refptr<StreamParserBuffer> bufferB1 = StreamParserBuffer::CopyFrom(
&kDataA, kDataSize, is_keyframe, DemuxerStream::AUDIO, 0);
- bufferB1->set_timestamp(base::TimeDelta::FromMilliseconds(3));
+ bufferB1->set_timestamp(base::Milliseconds(3));
bufferB1->set_duration(kDuration / 2);
const DecoderBuffer::DiscardPadding discardB1 =
std::make_pair(kDuration / 2, kNoDiscard);
@@ -4471,7 +4466,7 @@ TEST_F(SourceBufferStreamTest, Audio_SpliceTrimming_ExistingTrimming) {
// Buffer B2: PTS = 5, no discard padding, duration = 4ms.
scoped_refptr<StreamParserBuffer> bufferB2 = StreamParserBuffer::CopyFrom(
&kDataA, kDataSize, is_keyframe, DemuxerStream::AUDIO, 0);
- bufferB2->set_timestamp(base::TimeDelta::FromMilliseconds(5));
+ bufferB2->set_timestamp(base::Milliseconds(5));
bufferB2->set_duration(kDuration);
B_buffers.push_back(bufferB2);
@@ -4486,15 +4481,15 @@ TEST_F(SourceBufferStreamTest, Audio_SpliceTrimming_ExistingTrimming) {
// Buffer A1 was not spliced, should be unchanged.
EXPECT_STATUS_FOR_STREAM_OP(kSuccess, GetNextBuffer(&read_buffer));
- EXPECT_EQ(base::TimeDelta::FromMilliseconds(0), read_buffer->timestamp());
+ EXPECT_EQ(base::Milliseconds(0), read_buffer->timestamp());
EXPECT_EQ(kDuration / 2, read_buffer->duration());
EXPECT_EQ(discardA1, read_buffer->discard_padding());
// Buffer A2 was overlapped by buffer B1 1ms. Splice trimming should trim A2's
// duration and increase its discard padding by 1ms.
- const base::TimeDelta overlap = base::TimeDelta::FromMilliseconds(1);
+ const base::TimeDelta overlap = base::Milliseconds(1);
EXPECT_STATUS_FOR_STREAM_OP(kSuccess, GetNextBuffer(&read_buffer));
- EXPECT_EQ(base::TimeDelta::FromMilliseconds(2), read_buffer->timestamp());
+ EXPECT_EQ(base::Milliseconds(2), read_buffer->timestamp());
EXPECT_EQ((kDuration / 2) - overlap, read_buffer->duration());
const DecoderBuffer::DiscardPadding overlap_discard =
std::make_pair(discardA2.first, discardA2.second + overlap);
@@ -4503,13 +4498,13 @@ TEST_F(SourceBufferStreamTest, Audio_SpliceTrimming_ExistingTrimming) {
// Buffer B1 is overlapping A2, but B1 should be unchanged - splice trimming
// only modifies the earlier buffer (A1).
EXPECT_STATUS_FOR_STREAM_OP(kSuccess, GetNextBuffer(&read_buffer));
- EXPECT_EQ(base::TimeDelta::FromMilliseconds(3), read_buffer->timestamp());
+ EXPECT_EQ(base::Milliseconds(3), read_buffer->timestamp());
EXPECT_EQ(kDuration / 2, read_buffer->duration());
EXPECT_EQ(discardB1, read_buffer->discard_padding());
// Buffer B2 is not spliced, should be unchanged.
EXPECT_STATUS_FOR_STREAM_OP(kSuccess, GetNextBuffer(&read_buffer));
- EXPECT_EQ(base::TimeDelta::FromMilliseconds(5), read_buffer->timestamp());
+ EXPECT_EQ(base::Milliseconds(5), read_buffer->timestamp());
EXPECT_EQ(kDuration, read_buffer->duration());
EXPECT_EQ(std::make_pair(kNoDiscard, kNoDiscard),
read_buffer->discard_padding());
@@ -4522,7 +4517,7 @@ TEST_F(SourceBufferStreamTest, Audio_SpliceFrame_NoMillisecondSplices) {
video_config_ = TestVideoConfig::Invalid();
audio_config_.Initialize(
- kCodecVorbis, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO, 4000,
+ AudioCodec::kVorbis, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO, 4000,
EmptyExtraData(), EncryptionScheme::kUnencrypted, base::TimeDelta(), 0);
ResetStream<>(audio_config_);
// Equivalent to 0.5ms per frame.
@@ -4536,8 +4531,7 @@ TEST_F(SourceBufferStreamTest, Audio_SpliceFrame_NoMillisecondSplices) {
// Overlap the range [0, 2) with [1.25, 2); this results in an overlap of
// 0.25ms between the original buffer at time 1.0 and the new buffer at time
// 1.25.
- NewCodedFrameGroupAppend_OffsetFirstBuffer(
- 2, 2, base::TimeDelta::FromMillisecondsD(0.25));
+ NewCodedFrameGroupAppend_OffsetFirstBuffer(2, 2, base::Milliseconds(0.25));
CheckExpectedRangesByTimestamp("{ [0,2) }");
// A splice frame should not be generated since it requires at least 1ms of
@@ -4554,7 +4548,7 @@ TEST_F(SourceBufferStreamTest, Audio_PrerollFrame) {
}
TEST_F(SourceBufferStreamTest, Audio_ConfigChangeWithPreroll) {
- AudioDecoderConfig new_config(kCodecVorbis, kSampleFormatPlanarF32,
+ AudioDecoderConfig new_config(AudioCodec::kVorbis, kSampleFormatPlanarF32,
CHANNEL_LAYOUT_MONO, 2000, EmptyExtraData(),
EncryptionScheme::kUnencrypted);
SetAudioStream();
@@ -4599,10 +4593,10 @@ TEST_F(SourceBufferStreamTest, Audio_Opus_SeekToJustBeforeRangeStart) {
// in case the associated logic to check same config in the preroll time
// interval requires a nonzero seek_preroll value.
video_config_ = TestVideoConfig::Invalid();
- audio_config_.Initialize(kCodecOpus, kSampleFormatPlanarF32,
+ audio_config_.Initialize(AudioCodec::kOpus, kSampleFormatPlanarF32,
CHANNEL_LAYOUT_STEREO, 1000, EmptyExtraData(),
EncryptionScheme::kUnencrypted,
- base::TimeDelta::FromMilliseconds(10), 0);
+ base::Milliseconds(10), 0);
ResetStream<>(audio_config_);
// Equivalent to 1s per frame.
@@ -4975,7 +4969,7 @@ TEST_F(SourceBufferStreamTest,
NewCodedFrameGroupAppend("0K 10 20");
CheckExpectedRangesByTimestamp("{ [0,30) [1000,1090) }");
- SignalStartOfCodedFrameGroup(base::TimeDelta::FromMilliseconds(1070));
+ SignalStartOfCodedFrameGroup(base::Milliseconds(1070));
CheckExpectedRangesByTimestamp("{ [0,30) [1000,1090) }");
RemoveInMs(1030, 1050, 1090);
@@ -4998,7 +4992,7 @@ TEST_F(SourceBufferStreamTest,
TEST_F(SourceBufferStreamTest,
StartCodedFrameGroup_InExisting_AppendMuchLater) {
NewCodedFrameGroupAppend("0K 10 20 30K 40 50");
- SignalStartOfCodedFrameGroup(base::TimeDelta::FromMilliseconds(45));
+ SignalStartOfCodedFrameGroup(base::Milliseconds(45));
CheckExpectedRangesByTimestamp("{ [0,60) }");
AppendBuffers("2000K 2010");
@@ -5011,7 +5005,7 @@ TEST_F(SourceBufferStreamTest,
TEST_F(SourceBufferStreamTest,
StartCodedFrameGroup_InExisting_RemoveGOP_ThenAppend_1) {
NewCodedFrameGroupAppend("0K 10 20 30K 40 50");
- SignalStartOfCodedFrameGroup(base::TimeDelta::FromMilliseconds(30));
+ SignalStartOfCodedFrameGroup(base::Milliseconds(30));
RemoveInMs(30, 60, 60);
CheckExpectedRangesByTimestamp("{ [0,30) }");
@@ -5029,7 +5023,7 @@ TEST_F(SourceBufferStreamTest,
// to be 40.001ms (which is just beyond the highest buffered timestamp at or
// before 45ms) to help prevent potential discontinuity across the front of
// the overlapping append.
- SignalStartOfCodedFrameGroup(base::TimeDelta::FromMilliseconds(45));
+ SignalStartOfCodedFrameGroup(base::Milliseconds(45));
RemoveInMs(30, 60, 60);
CheckExpectedRangesByTimestamp("{ [0,30) }");
@@ -5049,7 +5043,7 @@ TEST_F(SourceBufferStreamTest,
TEST_F(SourceBufferStreamTest,
StartCodedFrameGroup_InExisting_RemoveMostRecentAppend_ThenAppend_1) {
NewCodedFrameGroupAppend("0K 10 20 30K 40 50");
- SignalStartOfCodedFrameGroup(base::TimeDelta::FromMilliseconds(45));
+ SignalStartOfCodedFrameGroup(base::Milliseconds(45));
RemoveInMs(50, 60, 60);
CheckExpectedRangesByTimestamp("{ [0,50) }");
@@ -5063,7 +5057,7 @@ TEST_F(SourceBufferStreamTest,
TEST_F(SourceBufferStreamTest,
StartCodedFrameGroup_InExisting_RemoveMostRecentAppend_ThenAppend_2) {
NewCodedFrameGroupAppend("0K 10 20 30K 40 50");
- SignalStartOfCodedFrameGroup(base::TimeDelta::FromMilliseconds(50));
+ SignalStartOfCodedFrameGroup(base::Milliseconds(50));
RemoveInMs(50, 60, 60);
CheckExpectedRangesByTimestamp("{ [0,50) }");
@@ -5078,19 +5072,16 @@ TEST_F(SourceBufferStreamTest, GetHighestPresentationTimestamp) {
EXPECT_EQ(base::TimeDelta(), stream_->GetHighestPresentationTimestamp());
NewCodedFrameGroupAppend("0K 10K");
- EXPECT_EQ(base::TimeDelta::FromMilliseconds(10),
- stream_->GetHighestPresentationTimestamp());
+ EXPECT_EQ(base::Milliseconds(10), stream_->GetHighestPresentationTimestamp());
RemoveInMs(0, 10, 20);
- EXPECT_EQ(base::TimeDelta::FromMilliseconds(10),
- stream_->GetHighestPresentationTimestamp());
+ EXPECT_EQ(base::Milliseconds(10), stream_->GetHighestPresentationTimestamp());
RemoveInMs(10, 20, 20);
EXPECT_EQ(base::TimeDelta(), stream_->GetHighestPresentationTimestamp());
NewCodedFrameGroupAppend("0K 10K");
- EXPECT_EQ(base::TimeDelta::FromMilliseconds(10),
- stream_->GetHighestPresentationTimestamp());
+ EXPECT_EQ(base::Milliseconds(10), stream_->GetHighestPresentationTimestamp());
RemoveInMs(10, 20, 20);
EXPECT_EQ(base::TimeDelta(), stream_->GetHighestPresentationTimestamp());
@@ -5105,9 +5096,9 @@ TEST_F(SourceBufferStreamTest, GarbageCollectionUnderMemoryPressure) {
// notification takes no effect and the memory limits and won't remove
// anything from buffered ranges, since we are under the limit of 20 bytes.
stream_->OnMemoryPressure(
- base::TimeDelta::FromMilliseconds(0),
+ base::Milliseconds(0),
base::MemoryPressureListener::MEMORY_PRESSURE_LEVEL_MODERATE, false);
- EXPECT_TRUE(GarbageCollect(base::TimeDelta::FromMilliseconds(8), 0));
+ EXPECT_TRUE(GarbageCollect(base::Milliseconds(8), 0));
CheckExpectedRangesByTimestamp("{ [0,16) }");
// Now enable the feature (on top of any overrides already in
@@ -5117,26 +5108,26 @@ TEST_F(SourceBufferStreamTest, GarbageCollectionUnderMemoryPressure) {
// Verify that effective MSE memory limit is reduced under memory pressure.
stream_->OnMemoryPressure(
- base::TimeDelta::FromMilliseconds(0),
+ base::Milliseconds(0),
base::MemoryPressureListener::MEMORY_PRESSURE_LEVEL_MODERATE, false);
// Effective memory limit is now 8 buffers, but we still will not collect any
// data between the current playback position 3 and last append position 15.
- EXPECT_TRUE(GarbageCollect(base::TimeDelta::FromMilliseconds(4), 0));
+ EXPECT_TRUE(GarbageCollect(base::Milliseconds(4), 0));
CheckExpectedRangesByTimestamp("{ [3,16) }");
// As playback proceeds further to time 9 we should be able to collect
// enough data to bring us back under memory limit of 8 buffers.
- EXPECT_TRUE(GarbageCollect(base::TimeDelta::FromMilliseconds(9), 0));
+ EXPECT_TRUE(GarbageCollect(base::Milliseconds(9), 0));
CheckExpectedRangesByTimestamp("{ [9,16) }");
// If memory pressure becomes critical, the garbage collection algorithm
// becomes even more aggressive and collects everything up to the current
// playback position.
stream_->OnMemoryPressure(
- base::TimeDelta::FromMilliseconds(0),
+ base::Milliseconds(0),
base::MemoryPressureListener::MEMORY_PRESSURE_LEVEL_CRITICAL, false);
- EXPECT_TRUE(GarbageCollect(base::TimeDelta::FromMilliseconds(13), 0));
+ EXPECT_TRUE(GarbageCollect(base::Milliseconds(13), 0));
CheckExpectedRangesByTimestamp("{ [12,16) }");
// But even under critical memory pressure the MSE memory limit imposed by the
@@ -5144,7 +5135,7 @@ TEST_F(SourceBufferStreamTest, GarbageCollectionUnderMemoryPressure) {
// successfully up to the hard limit of 16 bytes.
NewCodedFrameGroupAppend("16K 17 18 19 20 21 22 23 24 25 26 27");
CheckExpectedRangesByTimestamp("{ [12,28) }");
- EXPECT_TRUE(GarbageCollect(base::TimeDelta::FromMilliseconds(13), 0));
+ EXPECT_TRUE(GarbageCollect(base::Milliseconds(13), 0));
CheckExpectedRangesByTimestamp("{ [12,28) }");
}
@@ -5161,11 +5152,11 @@ TEST_F(SourceBufferStreamTest, InstantGarbageCollectionUnderMemoryPressure) {
base::test::ScopedFeatureList scoped_feature_list;
scoped_feature_list.InitAndEnableFeature(kMemoryPressureBasedSourceBufferGC);
stream_->OnMemoryPressure(
- base::TimeDelta::FromMilliseconds(7),
+ base::Milliseconds(7),
base::MemoryPressureListener::MEMORY_PRESSURE_LEVEL_CRITICAL, true);
CheckExpectedRangesByTimestamp("{ [6,16) }");
stream_->OnMemoryPressure(
- base::TimeDelta::FromMilliseconds(9),
+ base::Milliseconds(9),
base::MemoryPressureListener::MEMORY_PRESSURE_LEVEL_CRITICAL, true);
CheckExpectedRangesByTimestamp("{ [9,16) }");
}
@@ -5188,7 +5179,7 @@ TEST_F(SourceBufferStreamTest, GCFromFrontThenExplicitRemoveFromMiddleToEnd) {
// Seek to the second GOP's keyframe to allow GC to collect all of the first
// GOP (ostensibly increasing SourceBufferRange's |keyframe_map_index_base_|).
SeekToTimestampMs(50);
- GarbageCollect(base::TimeDelta::FromMilliseconds(50), 0);
+ GarbageCollect(base::Milliseconds(50), 0);
CheckExpectedRangesByTimestamp("{ [50,150) }");
// Remove from the middle of the first remaining GOP to the end of the range.
@@ -5200,7 +5191,7 @@ TEST_F(SourceBufferStreamTest, BFrames_WithoutEditList) {
// Simulates B-frame content where MP4 edit lists are not used to shift PTS so
// it matches DTS. From acolwell@chromium.org in https://crbug.com/398130
Seek(0);
- NewCodedFrameGroupAppend(base::TimeDelta::FromMilliseconds(60),
+ NewCodedFrameGroupAppend(base::Milliseconds(60),
"60|0K 180|30 90|60 120|90 150|120");
CheckExpectedRangesByTimestamp("{ [60,210) }");
CheckExpectedBuffers("60|0K 180|30 90|60 120|90 150|120");
@@ -5488,7 +5479,7 @@ TEST_F(SourceBufferStreamTest, AllowIncrementalAppendsToCoalesceRangeGap) {
// incrementally append more frames of that preceding GOP to fill in the
// timeline to abut the first appended GOP's keyframe timestamp and observe no
// further buffered range change or discontinuity.
- NewCodedFrameGroupAppend(base::TimeDelta::FromMilliseconds(100), "150K 160");
+ NewCodedFrameGroupAppend(base::Milliseconds(100), "150K 160");
SeekToTimestampMs(100);
CheckExpectedRangesByTimestamp("{ [100,170) }");
CheckExpectedRangeEndTimes("{ <160,170> }");
diff --git a/chromium/media/filters/video_cadence_estimator.cc b/chromium/media/filters/video_cadence_estimator.cc
index 44d1ed50933..cbbd1109b90 100644
--- a/chromium/media/filters/video_cadence_estimator.cc
+++ b/chromium/media/filters/video_cadence_estimator.cc
@@ -77,7 +77,7 @@ VideoCadenceEstimator::Cadence ConstructCadence(int k, int n) {
VideoCadenceEstimator::VideoCadenceEstimator(
base::TimeDelta minimum_time_until_max_drift)
: cadence_hysteresis_threshold_(
- base::TimeDelta::FromMilliseconds(kMinimumCadenceDurationMs)),
+ base::Milliseconds(kMinimumCadenceDurationMs)),
minimum_time_until_max_drift_(minimum_time_until_max_drift),
is_variable_frame_rate_(false) {
Reset();
diff --git a/chromium/media/filters/video_cadence_estimator.h b/chromium/media/filters/video_cadence_estimator.h
index bed98a919ed..7535303741d 100644
--- a/chromium/media/filters/video_cadence_estimator.h
+++ b/chromium/media/filters/video_cadence_estimator.h
@@ -75,6 +75,10 @@ class MEDIA_EXPORT VideoCadenceEstimator {
// be dropped or repeated to compensate for reaching the maximum acceptable
// drift; this time length is controlled by |minimum_time_until_max_drift|.
explicit VideoCadenceEstimator(base::TimeDelta minimum_time_until_max_drift);
+
+ VideoCadenceEstimator(const VideoCadenceEstimator&) = delete;
+ VideoCadenceEstimator& operator=(const VideoCadenceEstimator&) = delete;
+
~VideoCadenceEstimator();
// Clears stored cadence information.
@@ -181,8 +185,6 @@ class MEDIA_EXPORT VideoCadenceEstimator {
// Absent when a video has variable frame rate.
absl::optional<double> perfect_cadence_;
} bm_;
-
- DISALLOW_COPY_AND_ASSIGN(VideoCadenceEstimator);
};
} // namespace media
diff --git a/chromium/media/filters/video_cadence_estimator_unittest.cc b/chromium/media/filters/video_cadence_estimator_unittest.cc
index f914a23169a..7d1ce93a0f8 100644
--- a/chromium/media/filters/video_cadence_estimator_unittest.cc
+++ b/chromium/media/filters/video_cadence_estimator_unittest.cc
@@ -20,8 +20,7 @@
namespace media {
// See VideoCadenceEstimator header for more details.
-constexpr auto kMinimumAcceptableTimeBetweenGlitches =
- base::TimeDelta::FromSeconds(8);
+constexpr auto kMinimumAcceptableTimeBetweenGlitches = base::Seconds(8);
// Slows down the given |fps| according to NTSC field reduction standards; see
// http://en.wikipedia.org/wiki/Frame_rate#Digital_video_and_television
@@ -30,7 +29,7 @@ static double NTSC(double fps) {
}
static base::TimeDelta Interval(double hertz) {
- return base::TimeDelta::FromSecondsD(1.0 / hertz);
+ return base::Seconds(1.0 / hertz);
}
std::vector<int> CreateCadenceFromString(const std::string& cadence) {
@@ -176,7 +175,7 @@ TEST(VideoCadenceEstimatorTest, CadenceCalculationWithLargeDrift) {
VideoCadenceEstimator estimator(kMinimumAcceptableTimeBetweenGlitches);
estimator.set_cadence_hysteresis_threshold_for_testing(base::TimeDelta());
- base::TimeDelta drift = base::TimeDelta::FromHours(1);
+ base::TimeDelta drift = base::Hours(1);
VerifyCadenceVectorWithCustomDrift(&estimator, 1, NTSC(60), drift, "[60]");
VerifyCadenceVectorWithCustomDrift(&estimator, 30, 60, drift, "[2]");
@@ -199,7 +198,7 @@ TEST(VideoCadenceEstimatorTest, CadenceCalculationWithLargeDeviation) {
VideoCadenceEstimator estimator(kMinimumAcceptableTimeBetweenGlitches);
estimator.set_cadence_hysteresis_threshold_for_testing(base::TimeDelta());
- const base::TimeDelta deviation = base::TimeDelta::FromMilliseconds(30);
+ const base::TimeDelta deviation = base::Milliseconds(30);
VerifyCadenceVectorWithCustomDeviation(&estimator, 1, 60, deviation, "[]");
VerifyCadenceVectorWithCustomDeviation(&estimator, 30, 60, deviation, "[]");
VerifyCadenceVectorWithCustomDeviation(&estimator, 25, 60, deviation, "[]");
@@ -307,7 +306,7 @@ void VerifyCadenceSequence(VideoCadenceEstimator* estimator,
const base::TimeDelta frame_interval = Interval(frame_rate);
const base::TimeDelta acceptable_drift =
frame_interval < render_interval ? render_interval : frame_interval;
- const base::TimeDelta test_runtime = base::TimeDelta::FromSeconds(10 * 60);
+ const base::TimeDelta test_runtime = base::Seconds(10 * 60);
const int test_frames = base::ClampFloor(test_runtime / frame_interval);
estimator->Reset();
@@ -334,7 +333,7 @@ void VerifyCadenceSequence(VideoCadenceEstimator* estimator,
TEST(VideoCadenceEstimatorTest, BresenhamCadencePatterns) {
base::test::ScopedFeatureList scoped_feature_list;
scoped_feature_list.InitAndEnableFeature(media::kBresenhamCadence);
- VideoCadenceEstimator estimator(base::TimeDelta::FromSeconds(1));
+ VideoCadenceEstimator estimator(base::Seconds(1));
estimator.set_cadence_hysteresis_threshold_for_testing(base::TimeDelta());
VerifyCadenceSequence(&estimator, 30, 60,
@@ -366,8 +365,7 @@ TEST(VideoCadenceEstimatorTest, BresenhamCadencePatterns) {
// Frame rate deviation is too high, refuse to provide cadence.
EXPECT_TRUE(estimator.UpdateCadenceEstimate(
- Interval(60), Interval(30), base::TimeDelta::FromMilliseconds(20),
- base::TimeDelta::FromSeconds(100)));
+ Interval(60), Interval(30), base::Milliseconds(20), base::Seconds(100)));
EXPECT_FALSE(estimator.has_cadence());
// No cadence change for neglegable rate changes
@@ -381,7 +379,7 @@ TEST(VideoCadenceEstimatorTest, BresenhamCadencePatterns) {
TEST(VideoCadenceEstimatorTest, BresenhamCadenceChange) {
base::test::ScopedFeatureList scoped_feature_list;
scoped_feature_list.InitAndEnableFeature(media::kBresenhamCadence);
- VideoCadenceEstimator estimator(base::TimeDelta::FromSeconds(1));
+ VideoCadenceEstimator estimator(base::Seconds(1));
estimator.set_cadence_hysteresis_threshold_for_testing(base::TimeDelta());
base::TimeDelta render_interval = Interval(60);
@@ -394,7 +392,7 @@ TEST(VideoCadenceEstimatorTest, BresenhamCadenceChange) {
for (double t = 0.0; t < 10.0; t += 0.1) {
// +-100us drift of the rendering interval, a totally realistic thing.
base::TimeDelta new_render_interval =
- render_interval + base::TimeDelta::FromMicrosecondsD(std::sin(t) * 100);
+ render_interval + base::Microseconds(std::sin(t) * 100);
EXPECT_FALSE(
estimator.UpdateCadenceEstimate(new_render_interval, frame_duration,
diff --git a/chromium/media/filters/video_decoder_stream_unittest.cc b/chromium/media/filters/video_decoder_stream_unittest.cc
index 1530672b47a..76dbc414286 100644
--- a/chromium/media/filters/video_decoder_stream_unittest.cc
+++ b/chromium/media/filters/video_decoder_stream_unittest.cc
@@ -49,7 +49,7 @@ namespace media {
namespace {
const int kNumConfigs = 4;
const int kNumBuffersInOneConfig = 5;
-constexpr base::TimeDelta kPrepareDelay = base::TimeDelta::FromMilliseconds(5);
+constexpr base::TimeDelta kPrepareDelay = base::Milliseconds(5);
static int GetDecoderId(int i) {
return i;
@@ -143,6 +143,9 @@ class VideoDecoderStreamTest
EXPECT_MEDIA_LOG(HasSubstr("decryptor")).Times(AnyNumber());
}
+ VideoDecoderStreamTest(const VideoDecoderStreamTest&) = delete;
+ VideoDecoderStreamTest& operator=(const VideoDecoderStreamTest&) = delete;
+
~VideoDecoderStreamTest() {
// Check that the pipeline statistics callback was fired correctly.
EXPECT_EQ(num_decoded_bytes_unreported_, 0);
@@ -556,9 +559,6 @@ class VideoDecoderStreamTest
// Decryptor has no key to decrypt a frame.
bool has_no_key_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(VideoDecoderStreamTest);
};
INSTANTIATE_TEST_SUITE_P(
@@ -689,8 +689,7 @@ TEST_P(VideoDecoderStreamTest, Read_ProperMetadata) {
base::Unretained(this)));
}
- constexpr base::TimeDelta kDecodeDelay =
- base::TimeDelta::FromMilliseconds(10);
+ constexpr base::TimeDelta kDecodeDelay = base::Milliseconds(10);
Initialize();
diff --git a/chromium/media/filters/video_renderer_algorithm.cc b/chromium/media/filters/video_renderer_algorithm.cc
index 863359fd01a..5b6be2af6ad 100644
--- a/chromium/media/filters/video_renderer_algorithm.cc
+++ b/chromium/media/filters/video_renderer_algorithm.cc
@@ -35,8 +35,8 @@ VideoRendererAlgorithm::VideoRendererAlgorithm(
const TimeSource::WallClockTimeCB& wall_clock_time_cb,
MediaLog* media_log)
: media_log_(media_log),
- cadence_estimator_(base::TimeDelta::FromSeconds(
- kMinimumAcceptableTimeBetweenGlitchesSecs)),
+ cadence_estimator_(
+ base::Seconds(kMinimumAcceptableTimeBetweenGlitchesSecs)),
wall_clock_time_cb_(wall_clock_time_cb),
frame_duration_calculator_(kMovingAverageSamples),
frame_dropping_disabled_(false) {
@@ -313,7 +313,7 @@ void VideoRendererAlgorithm::Reset(ResetFlag reset_flag) {
// Default to ATSC IS/191 recommendations for maximum acceptable drift before
// we have enough frames to base the maximum on frame duration.
- max_acceptable_drift_ = base::TimeDelta::FromMilliseconds(15);
+ max_acceptable_drift_ = base::Milliseconds(15);
}
int64_t VideoRendererAlgorithm::GetMemoryUsage() const {
@@ -364,7 +364,7 @@ void VideoRendererAlgorithm::EnqueueFrame(scoped_refptr<VideoFrame> frame) {
new_frame_index > 0
? timestamp - frame_queue_[new_frame_index - 1].frame->timestamp()
: base::TimeDelta::Max());
- if (delta < base::TimeDelta::FromMilliseconds(1)) {
+ if (delta < base::Milliseconds(1)) {
DVLOG(2) << "Dropping frame too close to an already enqueued frame: "
<< delta.InMicroseconds() << " us";
++frames_dropped_during_enqueue_;
@@ -559,8 +559,8 @@ void VideoRendererAlgorithm::UpdateFrameStatistics() {
// We'll always allow at least 16.66ms of drift since literature suggests it's
// well below the floor of detection and is high enough to ensure stability
// for 60fps content.
- max_acceptable_drift_ = std::max(average_frame_duration_ / 2,
- base::TimeDelta::FromSecondsD(1.0 / 60));
+ max_acceptable_drift_ =
+ std::max(average_frame_duration_ / 2, base::Seconds(1.0 / 60));
// If we were called via RemoveExpiredFrames() and Render() was never called,
// we may not have a render interval yet.
@@ -670,8 +670,7 @@ int VideoRendererAlgorithm::FindBestFrameByCoverage(
// ensure proper coverage calculation for 24fps in 60Hz where +/- 100us of
// jitter is present within the |render_interval_|. At 60Hz this works out to
// an allowed jitter of 3%.
- const base::TimeDelta kAllowableJitter =
- base::TimeDelta::FromMicroseconds(500);
+ const base::TimeDelta kAllowableJitter = base::Microseconds(500);
if (*second_best >= 0 && best_frame_by_coverage > *second_best &&
(best_coverage - coverage[*second_best]).magnitude() <=
kAllowableJitter) {
diff --git a/chromium/media/filters/video_renderer_algorithm.h b/chromium/media/filters/video_renderer_algorithm.h
index 573c397fc4f..e50838940d6 100644
--- a/chromium/media/filters/video_renderer_algorithm.h
+++ b/chromium/media/filters/video_renderer_algorithm.h
@@ -51,6 +51,10 @@ class MEDIA_EXPORT VideoRendererAlgorithm {
public:
VideoRendererAlgorithm(const TimeSource::WallClockTimeCB& wall_clock_time_cb,
MediaLog* media_log);
+
+ VideoRendererAlgorithm(const VideoRendererAlgorithm&) = delete;
+ VideoRendererAlgorithm& operator=(const VideoRendererAlgorithm&) = delete;
+
~VideoRendererAlgorithm();
// Chooses the best frame for the interval [deadline_min, deadline_max] based
@@ -345,8 +349,6 @@ class MEDIA_EXPORT VideoRendererAlgorithm {
// Current number of effective frames in the |frame_queue_|. Updated by calls
// to UpdateEffectiveFramesQueued() whenever the |frame_queue_| is changed.
size_t effective_frames_queued_;
-
- DISALLOW_COPY_AND_ASSIGN(VideoRendererAlgorithm);
};
} // namespace media
diff --git a/chromium/media/filters/video_renderer_algorithm_unittest.cc b/chromium/media/filters/video_renderer_algorithm_unittest.cc
index 88b2656d192..483fcedd980 100644
--- a/chromium/media/filters/video_renderer_algorithm_unittest.cc
+++ b/chromium/media/filters/video_renderer_algorithm_unittest.cc
@@ -40,8 +40,7 @@ class TickGenerator {
base_time_(base_timestamp) {}
base::TimeDelta interval(int tick_count) const {
- return base::TimeDelta::FromMicroseconds(tick_count *
- microseconds_per_tick_);
+ return base::Microseconds(tick_count * microseconds_per_tick_);
}
base::TimeTicks current() const { return base_time_ + interval(tick_count_); }
@@ -78,9 +77,14 @@ class VideoRendererAlgorithmTest : public testing::Test {
&media_log_) {
// Always start the TickClock at a non-zero value since null values have
// special connotations.
- tick_clock_->Advance(base::TimeDelta::FromMicroseconds(10000));
+ tick_clock_->Advance(base::Microseconds(10000));
time_source_.SetTickClockForTesting(tick_clock_.get());
}
+
+ VideoRendererAlgorithmTest(const VideoRendererAlgorithmTest&) = delete;
+ VideoRendererAlgorithmTest& operator=(const VideoRendererAlgorithmTest&) =
+ delete;
+
~VideoRendererAlgorithmTest() override = default;
scoped_refptr<VideoFrame> CreateFrame(base::TimeDelta timestamp) {
@@ -91,7 +95,7 @@ class VideoRendererAlgorithmTest : public testing::Test {
}
base::TimeDelta minimum_glitch_time() const {
- return base::TimeDelta::FromSeconds(
+ return base::Seconds(
VideoRendererAlgorithm::kMinimumAcceptableTimeBetweenGlitchesSecs);
}
@@ -326,9 +330,6 @@ class VideoRendererAlgorithmTest : public testing::Test {
std::unique_ptr<base::SimpleTestTickClock> tick_clock_;
WallClockTimeSource time_source_;
VideoRendererAlgorithm algorithm_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(VideoRendererAlgorithmTest);
};
TEST_F(VideoRendererAlgorithmTest, Empty) {
@@ -1007,16 +1008,14 @@ TEST_F(VideoRendererAlgorithmTest, BestFrameByCoverage) {
// 49/51 coverage for frame 0 and frame 1 should be within tolerance such that
// the earlier frame should still be chosen.
- deadline_min = tg.current() + tg.interval(1) / 2 +
- base::TimeDelta::FromMicroseconds(250);
+ deadline_min = tg.current() + tg.interval(1) / 2 + base::Microseconds(250);
deadline_max = deadline_min + tg.interval(1);
EXPECT_EQ(0,
FindBestFrameByCoverage(deadline_min, deadline_max, &second_best));
EXPECT_EQ(1, second_best);
// 48/52 coverage should result in the second frame being chosen.
- deadline_min = tg.current() + tg.interval(1) / 2 +
- base::TimeDelta::FromMicroseconds(500);
+ deadline_min = tg.current() + tg.interval(1) / 2 + base::Microseconds(500);
deadline_max = deadline_min + tg.interval(1);
EXPECT_EQ(1,
FindBestFrameByCoverage(deadline_min, deadline_max, &second_best));
@@ -1414,12 +1413,12 @@ TEST_F(VideoRendererAlgorithmTest, VariablePlaybackRateCadence) {
TickGenerator frame_tg(base::TimeTicks(), NTSC(30));
TickGenerator display_tg(tick_clock_->NowTicks(), 60);
- const double kTestRates[] = {1.0, 2, 0.215, 0.5, 1.0, 3.15};
- const bool kTestRateHasCadence[base::size(kTestRates)] = {true, true, true,
- true, true, false};
+ const double kPlaybackRates[] = {1.0, 2, 0.215, 0.5, 1.0, 3.15};
+ const bool kTestRateHasCadence[base::size(kPlaybackRates)] = {
+ true, true, true, true, true, false};
- for (size_t i = 0; i < base::size(kTestRates); ++i) {
- const double playback_rate = kTestRates[i];
+ for (size_t i = 0; i < base::size(kPlaybackRates); ++i) {
+ const double playback_rate = kPlaybackRates[i];
SCOPED_TRACE(base::StringPrintf("Playback Rate: %.03f", playback_rate));
time_source_.SetPlaybackRate(playback_rate);
RunFramePumpTest(
@@ -1452,7 +1451,7 @@ TEST_F(VideoRendererAlgorithmTest, UglyTimestampsHaveCadence) {
for (size_t i = 0; i < base::size(kBadTimestampsMs) * 2; ++i) {
while (EffectiveFramesQueued() < 3) {
algorithm_.EnqueueFrame(CreateFrame(timestamp));
- timestamp += base::TimeDelta::FromMilliseconds(
+ timestamp += base::Milliseconds(
kBadTimestampsMs[i % base::size(kBadTimestampsMs)]);
}
@@ -1487,7 +1486,7 @@ TEST_F(VideoRendererAlgorithmTest, VariableFrameRateNoCadence) {
for (size_t i = 0; i < base::size(kBadTimestampsMs);) {
while (EffectiveFramesQueued() < 3) {
algorithm_.EnqueueFrame(CreateFrame(timestamp));
- timestamp += base::TimeDelta::FromMilliseconds(
+ timestamp += base::Milliseconds(
kBadTimestampsMs[i % base::size(kBadTimestampsMs)]);
++i;
}
@@ -1551,7 +1550,7 @@ TEST_F(VideoRendererAlgorithmTest, EnqueueFrames) {
EXPECT_EQ(2, GetCurrentFrameDisplayCount());
// Trying to add a frame < 1 ms after the last frame should drop the frame.
- algorithm_.EnqueueFrame(CreateFrame(base::TimeDelta::FromMicroseconds(999)));
+ algorithm_.EnqueueFrame(CreateFrame(base::Microseconds(999)));
rendered_frame = RenderAndStep(&tg, &frames_dropped);
EXPECT_EQ(1u, frames_queued());
EXPECT_EQ(frame_1, rendered_frame);
@@ -1564,7 +1563,7 @@ TEST_F(VideoRendererAlgorithmTest, EnqueueFrames) {
// Trying to add a frame < 1 ms before the last frame should drop the frame.
algorithm_.EnqueueFrame(
- CreateFrame(tg.interval(1) - base::TimeDelta::FromMicroseconds(999)));
+ CreateFrame(tg.interval(1) - base::Microseconds(999)));
rendered_frame = RenderAndStep(&tg, &frames_dropped);
EXPECT_EQ(1u, frames_queued());
EXPECT_EQ(frame_3, rendered_frame);
@@ -1595,7 +1594,7 @@ TEST_F(VideoRendererAlgorithmTest, CadenceForFutureFrames) {
// Add some noise to the tick generator so it our first frame
// doesn't line up evenly on a deadline.
- tg.Reset(tg.current() + base::TimeDelta::FromMilliseconds(5));
+ tg.Reset(tg.current() + base::Milliseconds(5));
// We're now at the first frame, cadence should be one, so
// it should only be displayed once.
@@ -1645,7 +1644,7 @@ TEST_F(VideoRendererAlgorithmTest, UsesFrameDuration) {
EXPECT_EQ(tg.interval(1), algorithm_.average_frame_duration());
// Add a bunch of normal frames and then one with a 3s duration.
- constexpr base::TimeDelta kLongDuration = base::TimeDelta::FromSeconds(3);
+ constexpr base::TimeDelta kLongDuration = base::Seconds(3);
for (int i = 1; i < 4; ++i) {
frame = CreateFrame(tg.interval(i));
frame->metadata().frame_duration = i == 3 ? kLongDuration : tg.interval(1);
diff --git a/chromium/media/filters/vp9_bool_decoder.h b/chromium/media/filters/vp9_bool_decoder.h
index e4daed285f6..6fb7ce9564b 100644
--- a/chromium/media/filters/vp9_bool_decoder.h
+++ b/chromium/media/filters/vp9_bool_decoder.h
@@ -20,6 +20,10 @@ class BitReader;
class MEDIA_EXPORT Vp9BoolDecoder {
public:
Vp9BoolDecoder();
+
+ Vp9BoolDecoder(const Vp9BoolDecoder&) = delete;
+ Vp9BoolDecoder& operator=(const Vp9BoolDecoder&) = delete;
+
~Vp9BoolDecoder();
// |data| is the input buffer with |size| bytes.
@@ -64,8 +68,6 @@ class MEDIA_EXPORT Vp9BoolDecoder {
// bits pre-filled.
int count_to_fill_ = 0;
unsigned int bool_range_ = 0;
-
- DISALLOW_COPY_AND_ASSIGN(Vp9BoolDecoder);
};
} // namespace media
diff --git a/chromium/media/filters/vp9_raw_bits_reader.h b/chromium/media/filters/vp9_raw_bits_reader.h
index e5dbf46dcbb..0df4898297e 100644
--- a/chromium/media/filters/vp9_raw_bits_reader.h
+++ b/chromium/media/filters/vp9_raw_bits_reader.h
@@ -22,6 +22,10 @@ class BitReader;
class MEDIA_EXPORT Vp9RawBitsReader {
public:
Vp9RawBitsReader();
+
+ Vp9RawBitsReader(const Vp9RawBitsReader&) = delete;
+ Vp9RawBitsReader& operator=(const Vp9RawBitsReader&) = delete;
+
~Vp9RawBitsReader();
// |data| is the input buffer with |size| bytes.
@@ -58,8 +62,6 @@ class MEDIA_EXPORT Vp9RawBitsReader {
// Indicates if none of the reads since the last Initialize() call has gone
// beyond the end of available data.
bool valid_;
-
- DISALLOW_COPY_AND_ASSIGN(Vp9RawBitsReader);
};
} // namespace media
diff --git a/chromium/media/filters/vpx_video_decoder.cc b/chromium/media/filters/vpx_video_decoder.cc
index 45b12488ed1..fbfddce1e6d 100644
--- a/chromium/media/filters/vpx_video_decoder.cc
+++ b/chromium/media/filters/vpx_video_decoder.cc
@@ -42,7 +42,7 @@ static int GetVpxVideoDecoderThreadCount(const VideoDecoderConfig& config) {
// For VP9 decoding increase the number of decode threads to equal the
// maximum number of tiles possible for higher resolution streams.
- if (config.codec() == kCodecVP9) {
+ if (config.codec() == VideoCodec::kVP9) {
const int width = config.coded_size().width();
if (width >= 3840)
desired_threads = 16;
@@ -63,10 +63,11 @@ static std::unique_ptr<vpx_codec_ctx> InitializeVpxContext(
vpx_config.h = config.coded_size().height();
vpx_config.threads = GetVpxVideoDecoderThreadCount(config);
- vpx_codec_err_t status = vpx_codec_dec_init(
- context.get(),
- config.codec() == kCodecVP9 ? vpx_codec_vp9_dx() : vpx_codec_vp8_dx(),
- &vpx_config, 0 /* flags */);
+ vpx_codec_err_t status = vpx_codec_dec_init(context.get(),
+ config.codec() == VideoCodec::kVP9
+ ? vpx_codec_vp9_dx()
+ : vpx_codec_vp8_dx(),
+ &vpx_config, 0 /* flags */);
if (status == VPX_CODEC_OK)
return context;
@@ -157,7 +158,7 @@ void VpxVideoDecoder::Initialize(const VideoDecoderConfig& config,
// Success!
config_ = config;
- state_ = kNormal;
+ state_ = DecoderState::kNormal;
output_cb_ = output_cb;
std::move(bound_init_cb).Run(OkStatus());
}
@@ -168,32 +169,32 @@ void VpxVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(buffer);
DCHECK(decode_cb);
- DCHECK_NE(state_, kUninitialized)
+ DCHECK_NE(state_, DecoderState::kUninitialized)
<< "Called Decode() before successful Initialize()";
DecodeCB bound_decode_cb = bind_callbacks_
? BindToCurrentLoop(std::move(decode_cb))
: std::move(decode_cb);
- if (state_ == kError) {
+ if (state_ == DecoderState::kError) {
std::move(bound_decode_cb).Run(DecodeStatus::DECODE_ERROR);
return;
}
- if (state_ == kDecodeFinished) {
+ if (state_ == DecoderState::kDecodeFinished) {
std::move(bound_decode_cb).Run(DecodeStatus::OK);
return;
}
- if (state_ == kNormal && buffer->end_of_stream()) {
- state_ = kDecodeFinished;
+ if (state_ == DecoderState::kNormal && buffer->end_of_stream()) {
+ state_ = DecoderState::kDecodeFinished;
std::move(bound_decode_cb).Run(DecodeStatus::OK);
return;
}
scoped_refptr<VideoFrame> video_frame;
if (!VpxDecode(buffer.get(), &video_frame)) {
- state_ = kError;
+ state_ = DecoderState::kError;
std::move(bound_decode_cb).Run(DecodeStatus::DECODE_ERROR);
return;
}
@@ -211,7 +212,7 @@ void VpxVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
void VpxVideoDecoder::Reset(base::OnceClosure reset_cb) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- state_ = kNormal;
+ state_ = DecoderState::kNormal;
if (bind_callbacks_)
BindToCurrentLoop(std::move(reset_cb)).Run();
@@ -224,7 +225,7 @@ void VpxVideoDecoder::Reset(base::OnceClosure reset_cb) {
bool VpxVideoDecoder::ConfigureDecoder(const VideoDecoderConfig& config) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- if (config.codec() != kCodecVP8 && config.codec() != kCodecVP9)
+ if (config.codec() != VideoCodec::kVP8 && config.codec() != VideoCodec::kVP9)
return false;
#if BUILDFLAG(ENABLE_FFMPEG_VIDEO_DECODERS)
@@ -232,7 +233,7 @@ bool VpxVideoDecoder::ConfigureDecoder(const VideoDecoderConfig& config) {
// VpxVideoDecoder will handle VP8 with alpha. FFvp8 is being deprecated.
// See http://crbug.com/992235.
if (base::FeatureList::IsEnabled(kFFmpegDecodeOpaqueVP8) &&
- config.codec() == kCodecVP8 &&
+ config.codec() == VideoCodec::kVP8 &&
config.alpha_mode() == VideoDecoderConfig::AlphaMode::kIsOpaque) {
return false;
}
@@ -246,7 +247,7 @@ bool VpxVideoDecoder::ConfigureDecoder(const VideoDecoderConfig& config) {
// Configure VP9 to decode on our buffers to skip a data copy on
// decoding. For YV12A-VP9, we use our buffers for the Y, U and V planes and
// copy the A plane.
- if (config.codec() == kCodecVP9) {
+ if (config.codec() == VideoCodec::kVP9) {
DCHECK(vpx_codec_get_caps(vpx_codec_->iface) &
VPX_CODEC_CAP_EXTERNAL_FRAME_BUFFER);
@@ -347,7 +348,7 @@ bool VpxVideoDecoder::VpxDecode(const DecoderBuffer* buffer,
if (!CopyVpxImageToVideoFrame(vpx_image, vpx_image_alpha, video_frame))
return false;
- if (vpx_image_alpha && config_.codec() == kCodecVP8) {
+ if (vpx_image_alpha && config_.codec() == VideoCodec::kVP8) {
libyuv::CopyPlane(vpx_image_alpha->planes[VPX_PLANE_Y],
vpx_image_alpha->stride[VPX_PLANE_Y],
(*video_frame)->visible_data(VideoFrame::kAPlane),
@@ -404,7 +405,7 @@ bool VpxVideoDecoder::VpxDecode(const DecoderBuffer* buffer,
case VPX_CS_SRGB:
primaries = gfx::ColorSpace::PrimaryID::BT709;
transfer = gfx::ColorSpace::TransferID::IEC61966_2_1;
- matrix = gfx::ColorSpace::MatrixID::BT709;
+ matrix = gfx::ColorSpace::MatrixID::RGB;
break;
default:
break;
@@ -550,7 +551,7 @@ bool VpxVideoDecoder::CopyVpxImageToVideoFrame(
config_.aspect_ratio().GetNaturalSize(gfx::Rect(visible_size));
if (memory_pool_) {
- DCHECK_EQ(kCodecVP9, config_.codec());
+ DCHECK_EQ(VideoCodec::kVP9, config_.codec());
if (vpx_image_alpha) {
size_t alpha_plane_size =
vpx_image_alpha->stride[VPX_PLANE_Y] * vpx_image_alpha->d_h;
diff --git a/chromium/media/filters/vpx_video_decoder.h b/chromium/media/filters/vpx_video_decoder.h
index 34a8433258c..a0be02e4a89 100644
--- a/chromium/media/filters/vpx_video_decoder.h
+++ b/chromium/media/filters/vpx_video_decoder.h
@@ -34,6 +34,10 @@ class MEDIA_EXPORT VpxVideoDecoder : public OffloadableVideoDecoder {
static SupportedVideoDecoderConfigs SupportedConfigs();
explicit VpxVideoDecoder(OffloadState offload_state = OffloadState::kNormal);
+
+ VpxVideoDecoder(const VpxVideoDecoder&) = delete;
+ VpxVideoDecoder& operator=(const VpxVideoDecoder&) = delete;
+
~VpxVideoDecoder() override;
// VideoDecoder implementation.
@@ -55,13 +59,7 @@ class MEDIA_EXPORT VpxVideoDecoder : public OffloadableVideoDecoder {
}
private:
- enum DecoderState {
- kUninitialized,
- kNormal,
- kFlushCodec,
- kDecodeFinished,
- kError
- };
+ enum class DecoderState { kUninitialized, kNormal, kDecodeFinished, kError };
// Return values for decoding alpha plane.
enum AlphaDecodeStatus {
@@ -99,7 +97,7 @@ class MEDIA_EXPORT VpxVideoDecoder : public OffloadableVideoDecoder {
// |state_| must only be read and written to on |offload_task_runner_| if it
// is non-null and there are outstanding tasks on the offload thread.
- DecoderState state_ = kUninitialized;
+ DecoderState state_ = DecoderState::kUninitialized;
OutputCB output_cb_;
@@ -112,8 +110,6 @@ class MEDIA_EXPORT VpxVideoDecoder : public OffloadableVideoDecoder {
// with no alpha. |frame_pool_| is used for all other cases.
scoped_refptr<FrameBufferPool> memory_pool_;
VideoFramePool frame_pool_;
-
- DISALLOW_COPY_AND_ASSIGN(VpxVideoDecoder);
};
// Helper class for creating a VpxVideoDecoder which will offload > 720p VP9
@@ -123,7 +119,7 @@ class OffloadingVpxVideoDecoder : public OffloadingVideoDecoder {
OffloadingVpxVideoDecoder()
: OffloadingVideoDecoder(
1024,
- std::vector<VideoCodec>(1, kCodecVP9),
+ std::vector<VideoCodec>(1, VideoCodec::kVP9),
std::make_unique<VpxVideoDecoder>(
OffloadableVideoDecoder::OffloadState::kOffloaded)) {}
};
diff --git a/chromium/media/filters/vpx_video_decoder_fuzzertest.cc b/chromium/media/filters/vpx_video_decoder_fuzzertest.cc
index 087d46547e4..afeba4f5f9c 100644
--- a/chromium/media/filters/vpx_video_decoder_fuzzertest.cc
+++ b/chromium/media/filters/vpx_video_decoder_fuzzertest.cc
@@ -64,11 +64,11 @@ extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
bool has_alpha = false;
if (rng() & 1) {
- codec = media::kCodecVP8;
+ codec = media::VideoCodec::kVP8;
// non-Alpha VP8 decoding isn't supported by VpxVideoDecoder on Linux.
has_alpha = true;
} else {
- codec = media::kCodecVP9;
+ codec = media::VideoCodec::kVP9;
has_alpha = rng() & 1;
}
diff --git a/chromium/media/filters/vpx_video_decoder_unittest.cc b/chromium/media/filters/vpx_video_decoder_unittest.cc
index 6f37edb3067..8f2eeeef207 100644
--- a/chromium/media/filters/vpx_video_decoder_unittest.cc
+++ b/chromium/media/filters/vpx_video_decoder_unittest.cc
@@ -31,10 +31,13 @@ class VpxVideoDecoderTest : public testing::Test {
: decoder_(new VpxVideoDecoder()),
i_frame_buffer_(ReadTestDataFile("vp9-I-frame-320x240")) {}
+ VpxVideoDecoderTest(const VpxVideoDecoderTest&) = delete;
+ VpxVideoDecoderTest& operator=(const VpxVideoDecoderTest&) = delete;
+
~VpxVideoDecoderTest() override { Destroy(); }
void Initialize() {
- InitializeWithConfig(TestVideoConfig::Normal(kCodecVP9));
+ InitializeWithConfig(TestVideoConfig::Normal(VideoCodec::kVP9));
}
void InitializeWithConfigWithResult(const VideoDecoderConfig& config,
@@ -56,7 +59,7 @@ class VpxVideoDecoderTest : public testing::Test {
}
void Reinitialize() {
- InitializeWithConfig(TestVideoConfig::Large(kCodecVP9));
+ InitializeWithConfig(TestVideoConfig::Large(VideoCodec::kVP9));
}
void Reset() {
@@ -172,9 +175,6 @@ class VpxVideoDecoderTest : public testing::Test {
scoped_refptr<DecoderBuffer> i_frame_buffer_;
OutputFrames output_frames_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(VpxVideoDecoderTest);
};
TEST_F(VpxVideoDecoderTest, Initialize_Normal) {
diff --git a/chromium/media/formats/common/offset_byte_queue.h b/chromium/media/formats/common/offset_byte_queue.h
index eb0bd2c58e7..ea5017b7230 100644
--- a/chromium/media/formats/common/offset_byte_queue.h
+++ b/chromium/media/formats/common/offset_byte_queue.h
@@ -20,6 +20,10 @@ namespace media {
class MEDIA_EXPORT OffsetByteQueue {
public:
OffsetByteQueue();
+
+ OffsetByteQueue(const OffsetByteQueue&) = delete;
+ OffsetByteQueue& operator=(const OffsetByteQueue&) = delete;
+
~OffsetByteQueue();
// These work like their underlying ByteQueue counterparts.
@@ -59,8 +63,6 @@ class MEDIA_EXPORT OffsetByteQueue {
const uint8_t* buf_;
int size_;
int64_t head_;
-
- DISALLOW_COPY_AND_ASSIGN(OffsetByteQueue);
};
} // namespace media
diff --git a/chromium/media/formats/common/stream_parser_test_base.h b/chromium/media/formats/common/stream_parser_test_base.h
index fc47ccc5343..43feeb847cb 100644
--- a/chromium/media/formats/common/stream_parser_test_base.h
+++ b/chromium/media/formats/common/stream_parser_test_base.h
@@ -24,6 +24,10 @@ namespace media {
class StreamParserTestBase {
public:
explicit StreamParserTestBase(std::unique_ptr<StreamParser> stream_parser);
+
+ StreamParserTestBase(const StreamParserTestBase&) = delete;
+ StreamParserTestBase& operator=(const StreamParserTestBase&) = delete;
+
virtual ~StreamParserTestBase();
protected:
@@ -71,8 +75,6 @@ class StreamParserTestBase {
std::stringstream results_stream_;
AudioDecoderConfig last_audio_config_;
StreamParser::TrackId audio_track_id_;
-
- DISALLOW_COPY_AND_ASSIGN(StreamParserTestBase);
};
} // namespace media
diff --git a/chromium/media/formats/mp2t/es_adapter_video.cc b/chromium/media/formats/mp2t/es_adapter_video.cc
index 88bdecc667f..eb181aa342b 100644
--- a/chromium/media/formats/mp2t/es_adapter_video.cc
+++ b/chromium/media/formats/mp2t/es_adapter_video.cc
@@ -32,8 +32,7 @@ EsAdapterVideo::EsAdapterVideo(NewVideoConfigCB new_video_config_cb,
emit_buffer_cb_(std::move(emit_buffer_cb)),
has_valid_config_(false),
has_valid_frame_(false),
- last_frame_duration_(
- base::TimeDelta::FromMilliseconds(kDefaultFrameDurationMs)),
+ last_frame_duration_(base::Milliseconds(kDefaultFrameDurationMs)),
buffer_index_(0),
has_valid_initial_timestamp_(false),
discarded_frame_count_(0) {}
@@ -49,8 +48,7 @@ void EsAdapterVideo::Reset() {
has_valid_config_ = false;
has_valid_frame_ = false;
- last_frame_duration_ =
- base::TimeDelta::FromMilliseconds(kDefaultFrameDurationMs);
+ last_frame_duration_ = base::Milliseconds(kDefaultFrameDurationMs);
config_list_.clear();
buffer_index_ = 0;
diff --git a/chromium/media/formats/mp2t/es_adapter_video.h b/chromium/media/formats/mp2t/es_adapter_video.h
index 60193351864..eca870c7975 100644
--- a/chromium/media/formats/mp2t/es_adapter_video.h
+++ b/chromium/media/formats/mp2t/es_adapter_video.h
@@ -40,6 +40,10 @@ class MEDIA_EXPORT EsAdapterVideo {
EsAdapterVideo(NewVideoConfigCB new_video_config_cb,
EmitBufferCB emit_buffer_cb);
+
+ EsAdapterVideo(const EsAdapterVideo&) = delete;
+ EsAdapterVideo& operator=(const EsAdapterVideo&) = delete;
+
~EsAdapterVideo();
// Force the emission of the pending video buffers.
@@ -95,8 +99,6 @@ class MEDIA_EXPORT EsAdapterVideo {
// Number of frames to replace with the first valid key frame.
int discarded_frame_count_;
-
- DISALLOW_COPY_AND_ASSIGN(EsAdapterVideo);
};
} // namespace mp2t
diff --git a/chromium/media/formats/mp2t/es_adapter_video_unittest.cc b/chromium/media/formats/mp2t/es_adapter_video_unittest.cc
index 8043605d4d0..30fe18c3e9e 100644
--- a/chromium/media/formats/mp2t/es_adapter_video_unittest.cc
+++ b/chromium/media/formats/mp2t/es_adapter_video_unittest.cc
@@ -31,10 +31,11 @@ VideoDecoderConfig CreateFakeVideoConfig() {
gfx::Size coded_size(320, 240);
gfx::Rect visible_rect(0, 0, 320, 240);
gfx::Size natural_size(320, 240);
- return VideoDecoderConfig(
- kCodecH264, H264PROFILE_MAIN, VideoDecoderConfig::AlphaMode::kIsOpaque,
- VideoColorSpace(), kNoTransformation, coded_size, visible_rect,
- natural_size, EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ return VideoDecoderConfig(VideoCodec::kH264, H264PROFILE_MAIN,
+ VideoDecoderConfig::AlphaMode::kIsOpaque,
+ VideoColorSpace(), kNoTransformation, coded_size,
+ visible_rect, natural_size, EmptyExtraData(),
+ EncryptionScheme::kUnencrypted);
}
BufferQueue GenerateFakeBuffers(const int* frame_pts_ms,
@@ -50,8 +51,7 @@ BufferQueue GenerateFakeBuffers(const int* frame_pts_ms,
if (frame_pts_ms[k] < 0) {
buffers[k]->set_timestamp(kNoTimestamp);
} else {
- buffers[k]->set_timestamp(
- base::TimeDelta::FromMilliseconds(frame_pts_ms[k]));
+ buffers[k]->set_timestamp(base::Milliseconds(frame_pts_ms[k]));
}
}
return buffers;
diff --git a/chromium/media/formats/mp2t/es_parser.h b/chromium/media/formats/mp2t/es_parser.h
index 2cca8db0f4a..75f7060c97e 100644
--- a/chromium/media/formats/mp2t/es_parser.h
+++ b/chromium/media/formats/mp2t/es_parser.h
@@ -33,6 +33,10 @@ class MEDIA_EXPORT EsParser {
using GetDecryptConfigCB = base::RepeatingCallback<const DecryptConfig*()>;
EsParser();
+
+ EsParser(const EsParser&) = delete;
+ EsParser& operator=(const EsParser&) = delete;
+
virtual ~EsParser();
// ES parsing.
@@ -87,8 +91,6 @@ class MEDIA_EXPORT EsParser {
// present in the PES packet header, it shall refer to the first AVC access
// unit that commences in this PES packet.
std::list<std::pair<int64_t, TimingDesc>> timing_desc_list_;
-
- DISALLOW_COPY_AND_ASSIGN(EsParser);
};
} // namespace mp2t
diff --git a/chromium/media/formats/mp2t/es_parser_adts.cc b/chromium/media/formats/mp2t/es_parser_adts.cc
index 91470b75d25..a484ee338c0 100644
--- a/chromium/media/formats/mp2t/es_parser_adts.cc
+++ b/chromium/media/formats/mp2t/es_parser_adts.cc
@@ -263,8 +263,8 @@ bool EsParserAdts::UpdateAudioConfiguration(const uint8_t* adts_header,
scheme = init_encryption_scheme_;
#endif
AudioDecoderConfig audio_decoder_config(
- kCodecAAC, kSampleFormatS16, channel_layout, extended_samples_per_second,
- extra_data, scheme);
+ AudioCodec::kAAC, kSampleFormatS16, channel_layout,
+ extended_samples_per_second, extra_data, scheme);
if (!audio_decoder_config.IsValidConfig()) {
DVLOG(1) << "Invalid config: "
diff --git a/chromium/media/formats/mp2t/es_parser_adts.h b/chromium/media/formats/mp2t/es_parser_adts.h
index 665ec3f7794..2521f277cdc 100644
--- a/chromium/media/formats/mp2t/es_parser_adts.h
+++ b/chromium/media/formats/mp2t/es_parser_adts.h
@@ -44,6 +44,9 @@ class MEDIA_EXPORT EsParserAdts : public EsParser {
bool sbr_in_mimetype);
#endif
+ EsParserAdts(const EsParserAdts&) = delete;
+ EsParserAdts& operator=(const EsParserAdts&) = delete;
+
~EsParserAdts() override;
// EsParser implementation.
@@ -98,8 +101,6 @@ class MEDIA_EXPORT EsParserAdts : public EsParser {
AudioDecoderConfig last_audio_decoder_config_;
ADTSStreamParser adts_parser_;
-
- DISALLOW_COPY_AND_ASSIGN(EsParserAdts);
};
} // namespace mp2t
diff --git a/chromium/media/formats/mp2t/es_parser_adts_unittest.cc b/chromium/media/formats/mp2t/es_parser_adts_unittest.cc
index 5dbc26428c8..b542b255825 100644
--- a/chromium/media/formats/mp2t/es_parser_adts_unittest.cc
+++ b/chromium/media/formats/mp2t/es_parser_adts_unittest.cc
@@ -60,7 +60,7 @@ TEST_F(EsParserAdtsTest, SinglePts) {
LoadStream("bear.adts");
std::vector<Packet> pes_packets = GenerateFixedSizePesPacket(512);
- pes_packets.front().pts = base::TimeDelta::FromSeconds(10);
+ pes_packets.front().pts = base::Seconds(10);
EXPECT_TRUE(Process(pes_packets, false /* sbr_in_mimetype */));
EXPECT_EQ(1u, config_count_);
@@ -70,7 +70,7 @@ TEST_F(EsParserAdtsTest, SinglePts) {
TEST_F(EsParserAdtsTest, AacLcAdts) {
LoadStream("sfx.adts");
std::vector<Packet> pes_packets = GenerateFixedSizePesPacket(512);
- pes_packets.front().pts = base::TimeDelta::FromSeconds(1);
+ pes_packets.front().pts = base::Seconds(1);
EXPECT_TRUE(Process(pes_packets, false /* sbr_in_mimetype */));
EXPECT_EQ(1u, config_count_);
EXPECT_EQ(14u, buffer_count_);
@@ -80,7 +80,7 @@ TEST_F(EsParserAdtsTest, AacSampleRate) {
std::vector<Packet> pes_packets =
LoadPacketsFromFiles("aac-44100-packet-%d", 4);
- pes_packets.front().pts = base::TimeDelta::FromSeconds(0);
+ pes_packets.front().pts = base::Seconds(0);
EXPECT_TRUE(Process(pes_packets, true /* sbr_in_mimetype */));
EXPECT_EQ(4u, buffer_count_);
EXPECT_EQ(kAac44100PacketTimestamp, buffer_timestamps_);
diff --git a/chromium/media/formats/mp2t/es_parser_h264.cc b/chromium/media/formats/mp2t/es_parser_h264.cc
index a00a90cad5f..eeae824558d 100644
--- a/chromium/media/formats/mp2t/es_parser_h264.cc
+++ b/chromium/media/formats/mp2t/es_parser_h264.cc
@@ -517,7 +517,7 @@ bool EsParserH264::UpdateVideoDecoderConfig(const H264SPS* sps,
}
VideoDecoderConfig video_decoder_config(
- kCodecH264, profile, VideoDecoderConfig::AlphaMode::kIsOpaque,
+ VideoCodec::kH264, profile, VideoDecoderConfig::AlphaMode::kIsOpaque,
VideoColorSpace::REC709(), kNoTransformation, coded_size.value(),
visible_rect.value(), natural_size, EmptyExtraData(), scheme);
diff --git a/chromium/media/formats/mp2t/es_parser_h264.h b/chromium/media/formats/mp2t/es_parser_h264.h
index 4a771af3bd2..4c599c8a886 100644
--- a/chromium/media/formats/mp2t/es_parser_h264.h
+++ b/chromium/media/formats/mp2t/es_parser_h264.h
@@ -50,6 +50,10 @@ class MEDIA_EXPORT EsParserH264 : public EsParser {
EncryptionScheme init_encryption_scheme,
const GetDecryptConfigCB& get_decrypt_config_cb);
#endif
+
+ EsParserH264(const EsParserH264&) = delete;
+ EsParserH264& operator=(const EsParserH264&) = delete;
+
~EsParserH264() override;
// EsParser implementation.
@@ -95,8 +99,6 @@ class MEDIA_EXPORT EsParserH264 : public EsParser {
// Last video decoder config.
VideoDecoderConfig last_video_decoder_config_;
-
- DISALLOW_COPY_AND_ASSIGN(EsParserH264);
};
} // namespace mp2t
diff --git a/chromium/media/formats/mp2t/es_parser_h264_unittest.cc b/chromium/media/formats/mp2t/es_parser_h264_unittest.cc
index 05f57751b88..b84ac5d42cc 100644
--- a/chromium/media/formats/mp2t/es_parser_h264_unittest.cc
+++ b/chromium/media/formats/mp2t/es_parser_h264_unittest.cc
@@ -64,7 +64,7 @@ void EsParserH264Test::LoadH264Stream(const char* filename) {
// Generate some timestamps based on a 25fps stream.
for (size_t k = 0; k < access_units_.size(); k++)
- access_units_[k].pts = base::TimeDelta::FromMilliseconds(k * 40u);
+ access_units_[k].pts = base::Milliseconds(k * 40u);
}
void EsParserH264Test::GetAccessUnits() {
@@ -143,7 +143,7 @@ void EsParserH264Test::GetPesTimestamps(std::vector<Packet>* pes_packets_ptr) {
// a special meaning in EsParserH264. The negative timestamps should be
// ultimately discarded by the H264 parser since not relevant.
for (size_t k = 0; k < pes_packets.size(); k++) {
- (*pes_packets_ptr)[k].pts = base::TimeDelta::FromMilliseconds(-1);
+ (*pes_packets_ptr)[k].pts = base::Milliseconds(-1);
}
// Set a valid timestamp for PES packets which include the start
diff --git a/chromium/media/formats/mp2t/es_parser_mpeg1audio.cc b/chromium/media/formats/mp2t/es_parser_mpeg1audio.cc
index 4cc67f92956..a48a4ba6062 100644
--- a/chromium/media/formats/mp2t/es_parser_mpeg1audio.cc
+++ b/chromium/media/formats/mp2t/es_parser_mpeg1audio.cc
@@ -170,8 +170,8 @@ bool EsParserMpeg1Audio::UpdateAudioConfiguration(
// TODO(damienv): Verify whether Android playback requires the extra data
// field for Mpeg1 audio. If yes, we should generate this field.
AudioDecoderConfig audio_decoder_config(
- kCodecMP3, kSampleFormatS16, header.channel_layout, header.sample_rate,
- EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ AudioCodec::kMP3, kSampleFormatS16, header.channel_layout,
+ header.sample_rate, EmptyExtraData(), EncryptionScheme::kUnencrypted);
if (!audio_decoder_config.IsValidConfig()) {
DVLOG(1) << "Invalid config: "
diff --git a/chromium/media/formats/mp2t/es_parser_mpeg1audio.h b/chromium/media/formats/mp2t/es_parser_mpeg1audio.h
index c367a21ddf6..eda747a2c6f 100644
--- a/chromium/media/formats/mp2t/es_parser_mpeg1audio.h
+++ b/chromium/media/formats/mp2t/es_parser_mpeg1audio.h
@@ -35,6 +35,10 @@ class MEDIA_EXPORT EsParserMpeg1Audio : public EsParser {
EsParserMpeg1Audio(const NewAudioConfigCB& new_audio_config_cb,
EmitBufferCB emit_buffer_cb,
MediaLog* media_log);
+
+ EsParserMpeg1Audio(const EsParserMpeg1Audio&) = delete;
+ EsParserMpeg1Audio& operator=(const EsParserMpeg1Audio&) = delete;
+
~EsParserMpeg1Audio() override;
// EsParser implementation.
@@ -81,8 +85,6 @@ class MEDIA_EXPORT EsParserMpeg1Audio : public EsParser {
// Last audio config.
AudioDecoderConfig last_audio_decoder_config_;
-
- DISALLOW_COPY_AND_ASSIGN(EsParserMpeg1Audio);
};
} // namespace mp2t
diff --git a/chromium/media/formats/mp2t/es_parser_mpeg1audio_unittest.cc b/chromium/media/formats/mp2t/es_parser_mpeg1audio_unittest.cc
index 46477bf9487..9965396ab8d 100644
--- a/chromium/media/formats/mp2t/es_parser_mpeg1audio_unittest.cc
+++ b/chromium/media/formats/mp2t/es_parser_mpeg1audio_unittest.cc
@@ -50,7 +50,7 @@ TEST_F(EsParserMpeg1AudioTest, SinglePts) {
LoadStream("sfx.mp3");
std::vector<Packet> pes_packets = GenerateFixedSizePesPacket(512);
- pes_packets.front().pts = base::TimeDelta::FromSeconds(10);
+ pes_packets.front().pts = base::Seconds(10);
// Note: there is no parsing of metadata as part of Mpeg2 TS,
// so the tag starting at 0x80d with 0x54 0x41 0x47 (ascii for "TAG")
diff --git a/chromium/media/formats/mp2t/es_parser_test_base.h b/chromium/media/formats/mp2t/es_parser_test_base.h
index 233da6b0346..ef0d5f1669b 100644
--- a/chromium/media/formats/mp2t/es_parser_test_base.h
+++ b/chromium/media/formats/mp2t/es_parser_test_base.h
@@ -40,6 +40,10 @@ class EsParserTestBase {
};
EsParserTestBase();
+
+ EsParserTestBase(const EsParserTestBase&) = delete;
+ EsParserTestBase& operator=(const EsParserTestBase&) = delete;
+
virtual ~EsParserTestBase();
protected:
@@ -82,8 +86,6 @@ class EsParserTestBase {
private:
// Timestamps of buffers generated while parsing the ES stream.
std::stringstream buffer_timestamps_stream_;
-
- DISALLOW_COPY_AND_ASSIGN(EsParserTestBase);
};
} // namespace mp2t
diff --git a/chromium/media/formats/mp2t/mp2t_stream_parser.cc b/chromium/media/formats/mp2t/mp2t_stream_parser.cc
index 3cbc4269f46..a15bef6bb7d 100644
--- a/chromium/media/formats/mp2t/mp2t_stream_parser.cc
+++ b/chromium/media/formats/mp2t/mp2t_stream_parser.cc
@@ -201,13 +201,13 @@ Mp2tStreamParser::Mp2tStreamParser(base::span<const std::string> allowed_codecs,
segment_started_(false) {
for (const std::string& codec_name : allowed_codecs) {
switch (StringToVideoCodec(codec_name)) {
- case VideoCodec::kCodecH264:
+ case VideoCodec::kH264:
allowed_stream_types_.insert(kStreamTypeAVC);
#if BUILDFLAG(ENABLE_HLS_SAMPLE_AES)
allowed_stream_types_.insert(kStreamTypeAVCWithSampleAES);
#endif
continue;
- case VideoCodec::kUnknownVideoCodec:
+ case VideoCodec::kUnknown:
// Probably audio.
break;
default:
@@ -216,17 +216,17 @@ Mp2tStreamParser::Mp2tStreamParser(base::span<const std::string> allowed_codecs,
}
switch (StringToAudioCodec(codec_name)) {
- case AudioCodec::kCodecAAC:
+ case AudioCodec::kAAC:
allowed_stream_types_.insert(kStreamTypeAAC);
#if BUILDFLAG(ENABLE_HLS_SAMPLE_AES)
allowed_stream_types_.insert(kStreamTypeAACWithSampleAES);
#endif
continue;
- case AudioCodec::kCodecMP3:
+ case AudioCodec::kMP3:
allowed_stream_types_.insert(kStreamTypeMpeg1Audio);
allowed_stream_types_.insert(kStreamTypeMpeg2Audio);
continue;
- case AudioCodec::kUnknownAudioCodec:
+ case AudioCodec::kUnknown:
// Neither audio, nor video.
break;
default:
diff --git a/chromium/media/formats/mp2t/mp2t_stream_parser.h b/chromium/media/formats/mp2t/mp2t_stream_parser.h
index 5fc3fa70cf9..84a8cb00d44 100644
--- a/chromium/media/formats/mp2t/mp2t_stream_parser.h
+++ b/chromium/media/formats/mp2t/mp2t_stream_parser.h
@@ -38,6 +38,10 @@ class MEDIA_EXPORT Mp2tStreamParser : public StreamParser {
public:
explicit Mp2tStreamParser(base::span<const std::string> allowed_codecs,
bool sbr_in_mimetype);
+
+ Mp2tStreamParser(const Mp2tStreamParser&) = delete;
+ Mp2tStreamParser& operator=(const Mp2tStreamParser&) = delete;
+
~Mp2tStreamParser() override;
// StreamParser implementation.
@@ -185,8 +189,6 @@ class MEDIA_EXPORT Mp2tStreamParser : public StreamParser {
// provide a better way to access the last values seen in a ECM packet.
std::unique_ptr<DecryptConfig> decrypt_config_;
#endif
-
- DISALLOW_COPY_AND_ASSIGN(Mp2tStreamParser);
};
} // namespace mp2t
diff --git a/chromium/media/formats/mp2t/mp2t_stream_parser_unittest.cc b/chromium/media/formats/mp2t/mp2t_stream_parser_unittest.cc
index 89ae1025d15..368b86edce7 100644
--- a/chromium/media/formats/mp2t/mp2t_stream_parser_unittest.cc
+++ b/chromium/media/formats/mp2t/mp2t_stream_parser_unittest.cc
@@ -55,7 +55,7 @@ bool IsMonotonic(const StreamParser::BufferQueue& buffers) {
}
bool IsAlmostEqual(DecodeTimestamp t0, DecodeTimestamp t1) {
- base::TimeDelta kMaxDeviation = base::TimeDelta::FromMilliseconds(5);
+ base::TimeDelta kMaxDeviation = base::Milliseconds(5);
base::TimeDelta diff = t1 - t0;
return (diff >= -kMaxDeviation && diff <= kMaxDeviation);
}
diff --git a/chromium/media/formats/mp2t/timestamp_unroller.h b/chromium/media/formats/mp2t/timestamp_unroller.h
index afebde5629f..062ecd66d9d 100644
--- a/chromium/media/formats/mp2t/timestamp_unroller.h
+++ b/chromium/media/formats/mp2t/timestamp_unroller.h
@@ -16,6 +16,10 @@ namespace mp2t {
class MEDIA_EXPORT TimestampUnroller {
public:
TimestampUnroller();
+
+ TimestampUnroller(const TimestampUnroller&) = delete;
+ TimestampUnroller& operator=(const TimestampUnroller&) = delete;
+
~TimestampUnroller();
// Given that |timestamp| is coded using 33 bits (accuracy of MPEG-2 TS
@@ -38,8 +42,6 @@ class MEDIA_EXPORT TimestampUnroller {
// This is the last output of GetUnrolledTimestamp.
int64_t previous_unrolled_timestamp_;
-
- DISALLOW_COPY_AND_ASSIGN(TimestampUnroller);
};
} // namespace mp2t
diff --git a/chromium/media/formats/mp2t/ts_packet.h b/chromium/media/formats/mp2t/ts_packet.h
index 3918cc1c856..963e2b6cdda 100644
--- a/chromium/media/formats/mp2t/ts_packet.h
+++ b/chromium/media/formats/mp2t/ts_packet.h
@@ -28,6 +28,9 @@ class TsPacket {
// Return NULL otherwise.
static TsPacket* Parse(const uint8_t* buf, int size);
+ TsPacket(const TsPacket&) = delete;
+ TsPacket& operator=(const TsPacket&) = delete;
+
~TsPacket();
// TS header accessors.
@@ -64,8 +67,6 @@ class TsPacket {
// Params from the adaptation field.
bool discontinuity_indicator_;
bool random_access_indicator_;
-
- DISALLOW_COPY_AND_ASSIGN(TsPacket);
};
} // namespace mp2t
diff --git a/chromium/media/formats/mp2t/ts_section_cat.h b/chromium/media/formats/mp2t/ts_section_cat.h
index 69ef68714e9..9c737b04d61 100644
--- a/chromium/media/formats/mp2t/ts_section_cat.h
+++ b/chromium/media/formats/mp2t/ts_section_cat.h
@@ -22,6 +22,10 @@ class TsSectionCat : public TsSectionPsi {
base::RepeatingCallback<void(EncryptionScheme)>;
TsSectionCat(const RegisterCencPidsCB& register_cenc_ids_cb,
const RegisterEncryptionSchemeCB& register_encryption_scheme_cb);
+
+ TsSectionCat(const TsSectionCat&) = delete;
+ TsSectionCat& operator=(const TsSectionCat&) = delete;
+
~TsSectionCat() override;
// TsSectionPsi implementation.
@@ -34,8 +38,6 @@ class TsSectionCat : public TsSectionPsi {
// Parameters from the CAT.
int version_number_;
-
- DISALLOW_COPY_AND_ASSIGN(TsSectionCat);
};
} // namespace mp2t
diff --git a/chromium/media/formats/mp2t/ts_section_cets_ecm.h b/chromium/media/formats/mp2t/ts_section_cets_ecm.h
index d2fb53414cf..ad9a0b34630 100644
--- a/chromium/media/formats/mp2t/ts_section_cets_ecm.h
+++ b/chromium/media/formats/mp2t/ts_section_cets_ecm.h
@@ -29,6 +29,10 @@ class TsSectionCetsEcm : public TsSection {
explicit TsSectionCetsEcm(
const RegisterNewKeyIdAndIvCB& register_new_key_id_and_iv_cb);
+
+ TsSectionCetsEcm(const TsSectionCetsEcm&) = delete;
+ TsSectionCetsEcm& operator=(const TsSectionCetsEcm&) = delete;
+
~TsSectionCetsEcm() override;
// TsSection implementation.
@@ -40,8 +44,6 @@ class TsSectionCetsEcm : public TsSection {
private:
RegisterNewKeyIdAndIvCB register_new_key_id_and_iv_cb_;
-
- DISALLOW_COPY_AND_ASSIGN(TsSectionCetsEcm);
};
} // namespace mp2t
diff --git a/chromium/media/formats/mp2t/ts_section_cets_pssh.h b/chromium/media/formats/mp2t/ts_section_cets_pssh.h
index b4803068891..43c1a26580d 100644
--- a/chromium/media/formats/mp2t/ts_section_cets_pssh.h
+++ b/chromium/media/formats/mp2t/ts_section_cets_pssh.h
@@ -22,6 +22,10 @@ class TsSectionCetsPssh : public TsSection {
base::RepeatingCallback<void(const std::vector<uint8_t>&)>;
explicit TsSectionCetsPssh(RegisterPsshBoxesCB register_pssh_boxes_cb);
+
+ TsSectionCetsPssh(const TsSectionCetsPssh&) = delete;
+ TsSectionCetsPssh& operator=(const TsSectionCetsPssh&) = delete;
+
~TsSectionCetsPssh() override;
// TsSection implementation.
@@ -33,8 +37,6 @@ class TsSectionCetsPssh : public TsSection {
private:
const RegisterPsshBoxesCB register_pssh_boxes_cb_;
-
- DISALLOW_COPY_AND_ASSIGN(TsSectionCetsPssh);
};
} // namespace mp2t
diff --git a/chromium/media/formats/mp2t/ts_section_pat.h b/chromium/media/formats/mp2t/ts_section_pat.h
index f5ad9e87848..d834d5e9312 100644
--- a/chromium/media/formats/mp2t/ts_section_pat.h
+++ b/chromium/media/formats/mp2t/ts_section_pat.h
@@ -19,6 +19,10 @@ class TsSectionPat : public TsSectionPsi {
using RegisterPmtCB = base::RepeatingCallback<void(int, int)>;
explicit TsSectionPat(RegisterPmtCB register_pmt_cb);
+
+ TsSectionPat(const TsSectionPat&) = delete;
+ TsSectionPat& operator=(const TsSectionPat&) = delete;
+
~TsSectionPat() override;
// TsSectionPsi implementation.
@@ -30,8 +34,6 @@ class TsSectionPat : public TsSectionPsi {
// Parameters from the PAT.
int version_number_;
-
- DISALLOW_COPY_AND_ASSIGN(TsSectionPat);
};
} // namespace mp2t
diff --git a/chromium/media/formats/mp2t/ts_section_pes.cc b/chromium/media/formats/mp2t/ts_section_pes.cc
index 9cd704e71a3..e2012daa06a 100644
--- a/chromium/media/formats/mp2t/ts_section_pes.cc
+++ b/chromium/media/formats/mp2t/ts_section_pes.cc
@@ -227,7 +227,7 @@ bool TsSectionPes::ParseInternal(const uint8_t* raw_pes, int raw_pes_size) {
if (is_pts_valid) {
int64_t pts = timestamp_unroller_->GetUnrolledTimestamp(
ConvertTimestampSectionToTimestamp(pts_section));
- media_pts = base::TimeDelta::FromMicroseconds((1000 * pts) / 90);
+ media_pts = base::Microseconds((1000 * pts) / 90);
}
if (is_dts_valid) {
int64_t dts = timestamp_unroller_->GetUnrolledTimestamp(
diff --git a/chromium/media/formats/mp2t/ts_section_pes.h b/chromium/media/formats/mp2t/ts_section_pes.h
index 405fd3b74de..08ae1afa3d6 100644
--- a/chromium/media/formats/mp2t/ts_section_pes.h
+++ b/chromium/media/formats/mp2t/ts_section_pes.h
@@ -24,6 +24,10 @@ class TsSectionPes : public TsSection {
public:
TsSectionPes(std::unique_ptr<EsParser> es_parser,
TimestampUnroller* timestamp_unroller);
+
+ TsSectionPes(const TsSectionPes&) = delete;
+ TsSectionPes& operator=(const TsSectionPes&) = delete;
+
~TsSectionPes() override;
// TsSection implementation.
@@ -56,8 +60,6 @@ class TsSectionPes : public TsSection {
// Used to unroll PTS and DTS.
TimestampUnroller* const timestamp_unroller_;
-
- DISALLOW_COPY_AND_ASSIGN(TsSectionPes);
};
} // namespace mp2t
diff --git a/chromium/media/formats/mp2t/ts_section_pmt.h b/chromium/media/formats/mp2t/ts_section_pmt.h
index e36930781fe..10235867a53 100644
--- a/chromium/media/formats/mp2t/ts_section_pmt.h
+++ b/chromium/media/formats/mp2t/ts_section_pmt.h
@@ -22,6 +22,10 @@ class TsSectionPmt : public TsSectionPsi {
void(int pes_pid, int stream_type, const Descriptors& descriptors)>;
explicit TsSectionPmt(RegisterPesCB register_pes_cb);
+
+ TsSectionPmt(const TsSectionPmt&) = delete;
+ TsSectionPmt& operator=(const TsSectionPmt&) = delete;
+
~TsSectionPmt() override;
// Mpeg2TsPsiParser implementation.
@@ -30,8 +34,6 @@ class TsSectionPmt : public TsSectionPsi {
private:
const RegisterPesCB register_pes_cb_;
-
- DISALLOW_COPY_AND_ASSIGN(TsSectionPmt);
};
} // namespace mp2t
diff --git a/chromium/media/formats/mp2t/ts_section_psi.h b/chromium/media/formats/mp2t/ts_section_psi.h
index 7da1cca977e..25c2110e73b 100644
--- a/chromium/media/formats/mp2t/ts_section_psi.h
+++ b/chromium/media/formats/mp2t/ts_section_psi.h
@@ -21,6 +21,10 @@ namespace mp2t {
class TsSectionPsi : public TsSection {
public:
TsSectionPsi();
+
+ TsSectionPsi(const TsSectionPsi&) = delete;
+ TsSectionPsi& operator=(const TsSectionPsi&) = delete;
+
~TsSectionPsi() override;
// TsSection implementation.
@@ -47,8 +51,6 @@ class TsSectionPsi : public TsSection {
// Number of leading bytes to discard (pointer field).
int leading_bytes_to_discard_;
-
- DISALLOW_COPY_AND_ASSIGN(TsSectionPsi);
};
} // namespace mp2t
diff --git a/chromium/media/formats/mp4/aac.cc b/chromium/media/formats/mp4/aac.cc
index ca92ff81a8c..7742a469567 100644
--- a/chromium/media/formats/mp4/aac.cc
+++ b/chromium/media/formats/mp4/aac.cc
@@ -9,7 +9,6 @@
#include <algorithm>
#include "base/logging.h"
-#include "build/build_config.h"
#include "media/base/bit_reader.h"
#include "media/formats/mp4/rcheck.h"
#include "media/formats/mpeg/adts_constants.h"
@@ -29,9 +28,8 @@ AAC::AAC(const AAC& other) = default;
AAC::~AAC() = default;
bool AAC::Parse(const std::vector<uint8_t>& data, MediaLog* media_log) {
-#if defined(OS_ANDROID)
codec_specific_data_ = data;
-#endif
+
if (data.empty())
return false;
diff --git a/chromium/media/formats/mp4/aac.h b/chromium/media/formats/mp4/aac.h
index 3fa403b76c2..301b239f95c 100644
--- a/chromium/media/formats/mp4/aac.h
+++ b/chromium/media/formats/mp4/aac.h
@@ -9,7 +9,6 @@
#include <vector>
-#include "build/build_config.h"
#include "media/base/audio_codecs.h"
#include "media/base/channel_layout.h"
#include "media/base/media_export.h"
@@ -60,12 +59,10 @@ class MEDIA_EXPORT AAC {
// If known, returns the AudioCodecProfile.
AudioCodecProfile GetProfile() const;
-#if defined(OS_ANDROID)
// Returns the codec specific data needed by android MediaCodec.
std::vector<uint8_t> codec_specific_data() const {
return codec_specific_data_;
}
-#endif
private:
bool SkipDecoderGASpecificConfig(BitReader* bit_reader) const;
@@ -78,10 +75,8 @@ class MEDIA_EXPORT AAC {
uint8_t frequency_index_;
uint8_t channel_config_;
-#if defined(OS_ANDROID)
// The codec specific data needed by the android MediaCodec.
std::vector<uint8_t> codec_specific_data_;
-#endif
// The following variables store audio configuration information that
// can be used by Chromium. They are based on the AAC specific
diff --git a/chromium/media/formats/mp4/box_definitions.cc b/chromium/media/formats/mp4/box_definitions.cc
index 263e913404b..069f22bcd82 100644
--- a/chromium/media/formats/mp4/box_definitions.cc
+++ b/chromium/media/formats/mp4/box_definitions.cc
@@ -950,10 +950,16 @@ FourCC ColorParameterInformation::BoxType() const {
}
bool ColorParameterInformation::Parse(BoxReader* reader) {
+ fully_parsed = false;
+
FourCC type;
RCHECK(reader->ReadFourCC(&type));
- // TODO: Support 'nclc', 'rICC', and 'prof'.
- RCHECK(type == FOURCC_NCLX);
+
+ if (type != FOURCC_NCLX) {
+ // Ignore currently unsupported color information metadata parsing.
+ // TODO: Support 'nclc', 'rICC', and 'prof'.
+ return true;
+ }
uint8_t full_range_byte;
RCHECK(reader->Read2(&colour_primaries) &&
@@ -961,6 +967,7 @@ bool ColorParameterInformation::Parse(BoxReader* reader) {
reader->Read2(&matrix_coefficients) &&
reader->Read1(&full_range_byte));
full_range = full_range_byte & 0x80;
+ fully_parsed = true;
return true;
}
@@ -1049,7 +1056,7 @@ VideoSampleEntry::VideoSampleEntry()
data_reference_index(0),
width(0),
height(0),
- video_codec(kUnknownVideoCodec),
+ video_codec(VideoCodec::kUnknown),
video_codec_profile(VIDEO_CODEC_PROFILE_UNKNOWN),
video_codec_level(kNoVideoCodecLevel) {}
@@ -1095,7 +1102,7 @@ bool VideoSampleEntry::Parse(BoxReader* reader) {
std::unique_ptr<AVCDecoderConfigurationRecord> avcConfig(
new AVCDecoderConfigurationRecord());
RCHECK(reader->ReadChild(avcConfig.get()));
- video_codec = kCodecH264;
+ video_codec = VideoCodec::kH264;
video_codec_profile = H264Parser::ProfileIDCToVideoCodecProfile(
avcConfig->profile_indication);
@@ -1106,7 +1113,7 @@ bool VideoSampleEntry::Parse(BoxReader* reader) {
auto dv_config = ParseDOVIConfig(reader);
if (dv_config.has_value()) {
DVLOG(2) << __func__ << " reading DolbyVisionConfiguration (dvcC/dvvC)";
- video_codec = kCodecDolbyVision;
+ video_codec = VideoCodec::kDolbyVision;
video_codec_profile = dv_config->codec_profile;
video_codec_level = dv_config->dv_level;
}
@@ -1120,7 +1127,7 @@ bool VideoSampleEntry::Parse(BoxReader* reader) {
std::unique_ptr<HEVCDecoderConfigurationRecord> hevcConfig(
new HEVCDecoderConfigurationRecord());
RCHECK(reader->ReadChild(hevcConfig.get()));
- video_codec = kCodecHEVC;
+ video_codec = VideoCodec::kHEVC;
video_codec_profile = hevcConfig->GetVideoProfile();
frame_bitstream_converter =
base::MakeRefCounted<HEVCBitstreamConverter>(std::move(hevcConfig));
@@ -1129,7 +1136,7 @@ bool VideoSampleEntry::Parse(BoxReader* reader) {
auto dv_config = ParseDOVIConfig(reader);
if (dv_config.has_value()) {
DVLOG(2) << __func__ << " reading DolbyVisionConfiguration (dvcC/dvvC)";
- video_codec = kCodecDolbyVision;
+ video_codec = VideoCodec::kDolbyVision;
video_codec_profile = dv_config->codec_profile;
video_codec_level = dv_config->dv_level;
}
@@ -1150,7 +1157,7 @@ bool VideoSampleEntry::Parse(BoxReader* reader) {
DVLOG(2) << __func__ << " reading DolbyVisionConfiguration (dvcC/dvvC)";
auto dv_config = ParseDOVIConfig(reader);
RCHECK(dv_config.has_value());
- video_codec = kCodecDolbyVision;
+ video_codec = VideoCodec::kDolbyVision;
video_codec_profile = dv_config->codec_profile;
video_codec_level = dv_config->dv_level;
break;
@@ -1167,7 +1174,7 @@ bool VideoSampleEntry::Parse(BoxReader* reader) {
DVLOG(2) << __func__ << " reading DolbyVisionConfiguration (dvcC/dvvC)";
auto dv_config = ParseDOVIConfig(reader);
RCHECK(dv_config.has_value());
- video_codec = kCodecDolbyVision;
+ video_codec = VideoCodec::kDolbyVision;
video_codec_profile = dv_config->codec_profile;
video_codec_level = dv_config->dv_level;
break;
@@ -1181,21 +1188,21 @@ bool VideoSampleEntry::Parse(BoxReader* reader) {
new VPCodecConfigurationRecord());
RCHECK(reader->ReadChild(vp_config.get()));
frame_bitstream_converter = nullptr;
- video_codec = kCodecVP9;
+ video_codec = VideoCodec::kVP9;
video_codec_profile = vp_config->profile;
video_color_space = vp_config->color_space;
video_codec_level = vp_config->level;
- SMPTE2086MasteringDisplayMetadataBox mastering_display_color_volume;
- if (reader->HasChild(&mastering_display_color_volume)) {
- RCHECK(reader->ReadChild(&mastering_display_color_volume));
- this->mastering_display_color_volume = mastering_display_color_volume;
+ SMPTE2086MasteringDisplayMetadataBox color_volume;
+ if (reader->HasChild(&color_volume)) {
+ RCHECK(reader->ReadChild(&color_volume));
+ mastering_display_color_volume = color_volume;
}
- ContentLightLevel content_light_level_information;
- if (reader->HasChild(&content_light_level_information)) {
- RCHECK(reader->ReadChild(&content_light_level_information));
- this->content_light_level_information = content_light_level_information;
+ ContentLightLevel level_information;
+ if (reader->HasChild(&level_information)) {
+ RCHECK(reader->ReadChild(&level_information));
+ content_light_level_information = level_information;
}
break;
}
@@ -1205,7 +1212,7 @@ bool VideoSampleEntry::Parse(BoxReader* reader) {
AV1CodecConfigurationRecord av1_config;
RCHECK(reader->ReadChild(&av1_config));
frame_bitstream_converter = nullptr;
- video_codec = kCodecAV1;
+ video_codec = VideoCodec::kAV1;
video_codec_profile = av1_config.profile;
break;
}
@@ -1221,20 +1228,22 @@ bool VideoSampleEntry::Parse(BoxReader* reader) {
ColorParameterInformation color_parameter_information;
if (reader->HasChild(&color_parameter_information)) {
RCHECK(reader->ReadChild(&color_parameter_information));
- video_color_space = ConvertColorParameterInformationToColorSpace(
- color_parameter_information);
+ if (color_parameter_information.fully_parsed) {
+ video_color_space = ConvertColorParameterInformationToColorSpace(
+ color_parameter_information);
+ }
}
- MasteringDisplayColorVolume mastering_display_color_volume;
- if (reader->HasChild(&mastering_display_color_volume)) {
- RCHECK(reader->ReadChild(&mastering_display_color_volume));
- this->mastering_display_color_volume = mastering_display_color_volume;
+ MasteringDisplayColorVolume color_volume;
+ if (reader->HasChild(&color_volume)) {
+ RCHECK(reader->ReadChild(&color_volume));
+ mastering_display_color_volume = color_volume;
}
- ContentLightLevelInformation content_light_level_information;
- if (reader->HasChild(&content_light_level_information)) {
- RCHECK(reader->ReadChild(&content_light_level_information));
- this->content_light_level_information = content_light_level_information;
+ ContentLightLevelInformation level_information;
+ if (reader->HasChild(&level_information)) {
+ RCHECK(reader->ReadChild(&level_information));
+ content_light_level_information = level_information;
}
if (video_codec_profile == VIDEO_CODEC_PROFILE_UNKNOWN) {
@@ -1377,8 +1386,7 @@ bool FlacSpecificBox::Parse(BoxReader* reader) {
}
OpusSpecificBox::OpusSpecificBox()
- : seek_preroll(base::TimeDelta::FromMilliseconds(80)),
- codec_delay_in_frames(0) {}
+ : seek_preroll(base::Milliseconds(80)), codec_delay_in_frames(0) {}
OpusSpecificBox::OpusSpecificBox(const OpusSpecificBox& other) = default;
diff --git a/chromium/media/formats/mp4/box_definitions.h b/chromium/media/formats/mp4/box_definitions.h
index 450f3fc43c1..5a307f0476b 100644
--- a/chromium/media/formats/mp4/box_definitions.h
+++ b/chromium/media/formats/mp4/box_definitions.h
@@ -276,6 +276,7 @@ struct MEDIA_EXPORT ColorParameterInformation : Box {
uint16_t transfer_characteristics;
uint16_t matrix_coefficients;
bool full_range;
+ bool fully_parsed;
};
struct MEDIA_EXPORT MasteringDisplayColorVolume : Box {
diff --git a/chromium/media/formats/mp4/h264_annex_b_to_avc_bitstream_converter.h b/chromium/media/formats/mp4/h264_annex_b_to_avc_bitstream_converter.h
index f89ac8c4f01..a45b81c42b9 100644
--- a/chromium/media/formats/mp4/h264_annex_b_to_avc_bitstream_converter.h
+++ b/chromium/media/formats/mp4/h264_annex_b_to_avc_bitstream_converter.h
@@ -24,6 +24,12 @@ namespace media {
class MEDIA_EXPORT H264AnnexBToAvcBitstreamConverter {
public:
H264AnnexBToAvcBitstreamConverter();
+
+ H264AnnexBToAvcBitstreamConverter(const H264AnnexBToAvcBitstreamConverter&) =
+ delete;
+ H264AnnexBToAvcBitstreamConverter& operator=(
+ const H264AnnexBToAvcBitstreamConverter&) = delete;
+
~H264AnnexBToAvcBitstreamConverter();
// Converts a video chunk from a format with in-place decoder configuration
@@ -59,8 +65,6 @@ class MEDIA_EXPORT H264AnnexBToAvcBitstreamConverter {
int active_sps_id_ = -1;
int active_pps_id_ = -1;
-
- DISALLOW_COPY_AND_ASSIGN(H264AnnexBToAvcBitstreamConverter);
};
} // namespace media
diff --git a/chromium/media/formats/mp4/mp4_stream_parser.cc b/chromium/media/formats/mp4/mp4_stream_parser.cc
index fb83b2ebe59..b5f7b3d6651 100644
--- a/chromium/media/formats/mp4/mp4_stream_parser.cc
+++ b/chromium/media/formats/mp4/mp4_stream_parser.cc
@@ -345,15 +345,20 @@ bool MP4StreamParser::ParseMoov(BoxReader* reader) {
return false;
}
- AudioCodec codec = kUnknownAudioCodec;
- AudioCodecProfile profile = AudioCodecProfile::kUnknown;
+ AudioCodec codec = AudioCodec::kUnknown;
ChannelLayout channel_layout = CHANNEL_LAYOUT_NONE;
int sample_per_second = 0;
int codec_delay_in_frames = 0;
base::TimeDelta seek_preroll;
std::vector<uint8_t> extra_data;
+
+#if BUILDFLAG(USE_PROPRIETARY_CODECS)
+ AudioCodecProfile profile = AudioCodecProfile::kUnknown;
+ std::vector<uint8_t> aac_extra_data;
+#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
+
if (audio_format == FOURCC_OPUS) {
- codec = kCodecOpus;
+ codec = AudioCodec::kOpus;
channel_layout = GuessChannelLayout(entry.dops.channel_count);
sample_per_second = entry.dops.sample_rate;
codec_delay_in_frames = entry.dops.codec_delay_in_frames;
@@ -369,14 +374,14 @@ bool MP4StreamParser::ParseMoov(BoxReader* reader) {
return false;
}
- codec = kCodecFLAC;
+ codec = AudioCodec::kFLAC;
channel_layout = GuessChannelLayout(entry.channelcount);
sample_per_second = entry.samplerate;
extra_data = entry.dfla.stream_info;
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
#if BUILDFLAG(ENABLE_PLATFORM_MPEG_H_AUDIO)
} else if (audio_format == FOURCC_MHM1 || audio_format == FOURCC_MHA1) {
- codec = kCodecMpegHAudio;
+ codec = AudioCodec::kMpegHAudio;
channel_layout = CHANNEL_LAYOUT_BITSTREAM;
sample_per_second = entry.samplerate;
extra_data = entry.dfla.stream_info;
@@ -404,20 +409,24 @@ bool MP4StreamParser::ParseMoov(BoxReader* reader) {
// supported MPEG2 AAC varients.
if (ESDescriptor::IsAAC(audio_type)) {
const AAC& aac = entry.esds.aac;
- codec = kCodecAAC;
+ codec = AudioCodec::kAAC;
profile = aac.GetProfile();
channel_layout = aac.GetChannelLayout(has_sbr_);
sample_per_second = aac.GetOutputSamplesPerSecond(has_sbr_);
+ // Set `aac_extra_data` on all platforms but only set `extra_data` on
+ // Android. This is for backward compatibility until we have a better
+ // solution. See crbug.com/1245123 for details.
+ aac_extra_data = aac.codec_specific_data();
#if defined(OS_ANDROID)
extra_data = aac.codec_specific_data();
-#endif
+#endif // defined(OS_ANDROID)
#if BUILDFLAG(ENABLE_PLATFORM_AC3_EAC3_AUDIO)
} else if (audio_type == kAC3) {
- codec = kCodecAC3;
+ codec = AudioCodec::kAC3;
channel_layout = GuessChannelLayout(entry.channelcount);
sample_per_second = entry.samplerate;
} else if (audio_type == kEAC3) {
- codec = kCodecEAC3;
+ codec = AudioCodec::kEAC3;
channel_layout = GuessChannelLayout(entry.channelcount);
sample_per_second = entry.samplerate;
#endif
@@ -458,13 +467,18 @@ bool MP4StreamParser::ParseMoov(BoxReader* reader) {
if (scheme == EncryptionScheme::kUnencrypted)
return false;
}
+
audio_config.Initialize(codec, sample_format, channel_layout,
sample_per_second, extra_data, scheme,
seek_preroll, codec_delay_in_frames);
- if (codec == kCodecAAC) {
+
+#if BUILDFLAG(USE_PROPRIETARY_CODECS)
+ if (codec == AudioCodec::kAAC) {
audio_config.disable_discard_decoder_delay();
audio_config.set_profile(profile);
+ audio_config.set_aac_extra_data(std::move(aac_extra_data));
}
+#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
DVLOG(1) << "audio_track_id=" << audio_track_id
<< " config=" << audio_config.AsHumanReadableString();
@@ -806,9 +820,9 @@ ParseResult MP4StreamParser::EnqueueSample(BufferQueueMap* buffers) {
std::vector<uint8_t> frame_buf(buf, buf + sample_size);
if (video) {
- if (runs_->video_description().video_codec == kCodecH264 ||
- runs_->video_description().video_codec == kCodecHEVC ||
- runs_->video_description().video_codec == kCodecDolbyVision) {
+ if (runs_->video_description().video_codec == VideoCodec::kH264 ||
+ runs_->video_description().video_codec == VideoCodec::kHEVC ||
+ runs_->video_description().video_codec == VideoCodec::kDolbyVision) {
DCHECK(runs_->video_description().frame_bitstream_converter);
BitstreamConverter::AnalysisResult analysis;
if (!runs_->video_description()
diff --git a/chromium/media/formats/mp4/mp4_stream_parser.h b/chromium/media/formats/mp4/mp4_stream_parser.h
index 86ef7467c52..c04afcb7795 100644
--- a/chromium/media/formats/mp4/mp4_stream_parser.h
+++ b/chromium/media/formats/mp4/mp4_stream_parser.h
@@ -37,6 +37,10 @@ class MEDIA_EXPORT MP4StreamParser : public StreamParser {
MP4StreamParser(const std::set<int>& audio_object_types,
bool has_sbr,
bool has_flac);
+
+ MP4StreamParser(const MP4StreamParser&) = delete;
+ MP4StreamParser& operator=(const MP4StreamParser&) = delete;
+
~MP4StreamParser() override;
void Init(InitCB init_cb,
@@ -150,8 +154,6 @@ class MEDIA_EXPORT MP4StreamParser : public StreamParser {
// Tracks the number of MEDIA_LOGS for video keyframe MP4<->frame mismatch.
int num_video_keyframe_mismatches_;
-
- DISALLOW_COPY_AND_ASSIGN(MP4StreamParser);
};
} // namespace mp4
diff --git a/chromium/media/formats/mp4/mp4_stream_parser_unittest.cc b/chromium/media/formats/mp4/mp4_stream_parser_unittest.cc
index e5f745cad5b..2c973ffa1ef 100644
--- a/chromium/media/formats/mp4/mp4_stream_parser_unittest.cc
+++ b/chromium/media/formats/mp4/mp4_stream_parser_unittest.cc
@@ -40,7 +40,6 @@
using ::testing::InSequence;
using ::testing::StrictMock;
-using base::TimeDelta;
namespace media {
namespace mp4 {
@@ -274,7 +273,7 @@ constexpr char kShakaPackagerUMA[] = "Media.MSE.DetectedShakaPackagerInMp4";
TEST_F(MP4StreamParserTest, DidNotUseShakaPackager) {
// Encrypted files have non-zero duration and are treated as recorded streams.
auto params = GetDefaultInitParametersExpectations();
- params.duration = base::TimeDelta::FromMicroseconds(2736066);
+ params.duration = base::Microseconds(2736066);
params.liveness = DemuxerStream::LIVENESS_RECORDED;
params.detected_audio_track_count = 0;
InitializeParserWithInitParametersExpectations(params);
@@ -288,7 +287,7 @@ TEST_F(MP4StreamParserTest, DidNotUseShakaPackager) {
TEST_F(MP4StreamParserTest, UsedShakaPackager) {
auto params = GetDefaultInitParametersExpectations();
- params.duration = base::TimeDelta::FromMicroseconds(2736000);
+ params.duration = base::Microseconds(2736000);
params.liveness = DemuxerStream::LIVENESS_RECORDED;
params.detected_audio_track_count = 0;
InitializeParserWithInitParametersExpectations(params);
@@ -462,7 +461,7 @@ TEST_F(MP4StreamParserTest, MissingSampleEncryptionInfo) {
// Encrypted test mp4 files have non-zero duration and are treated as
// recorded streams.
auto params = GetDefaultInitParametersExpectations();
- params.duration = base::TimeDelta::FromMicroseconds(23219);
+ params.duration = base::Microseconds(23219);
params.liveness = DemuxerStream::LIVENESS_RECORDED;
params.detected_video_track_count = 0;
InitializeParserWithInitParametersExpectations(params);
@@ -490,7 +489,7 @@ TEST_F(MP4StreamParserTest, HEVC_in_MP4_container) {
EXPECT_MEDIA_LOG(ErrorLog("Unsupported VisualSampleEntry type hev1"));
#endif
auto params = GetDefaultInitParametersExpectations();
- params.duration = base::TimeDelta::FromMicroseconds(1002000);
+ params.duration = base::Microseconds(1002000);
params.liveness = DemuxerStream::LIVENESS_RECORDED;
params.detected_audio_track_count = 0;
InitializeParserWithInitParametersExpectations(params);
@@ -499,7 +498,7 @@ TEST_F(MP4StreamParserTest, HEVC_in_MP4_container) {
EXPECT_EQ(expect_success,
AppendDataInPieces(buffer->data(), buffer->data_size(), 512));
#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
- EXPECT_EQ(kCodecHEVC, video_decoder_config_.codec());
+ EXPECT_EQ(VideoCodec::kHEVC, video_decoder_config_.codec());
EXPECT_EQ(HEVCPROFILE_MAIN, video_decoder_config_.profile());
#endif
}
@@ -569,7 +568,7 @@ TEST_F(MP4StreamParserTest, CencWithEncryptionInfoStoredAsAuxDataInMdat) {
// Encrypted test mp4 files have non-zero duration and are treated as
// recorded streams.
auto params = GetDefaultInitParametersExpectations();
- params.duration = base::TimeDelta::FromMicroseconds(2736066);
+ params.duration = base::Microseconds(2736066);
params.liveness = DemuxerStream::LIVENESS_RECORDED;
params.detected_audio_track_count = 0;
InitializeParserWithInitParametersExpectations(params);
@@ -583,7 +582,7 @@ TEST_F(MP4StreamParserTest, CencWithSampleEncryptionBox) {
// Encrypted test mp4 files have non-zero duration and are treated as
// recorded streams.
auto params = GetDefaultInitParametersExpectations();
- params.duration = base::TimeDelta::FromMicroseconds(2736066);
+ params.duration = base::Microseconds(2736066);
params.liveness = DemuxerStream::LIVENESS_RECORDED;
params.detected_audio_track_count = 0;
InitializeParserWithInitParametersExpectations(params);
@@ -595,7 +594,7 @@ TEST_F(MP4StreamParserTest, CencWithSampleEncryptionBox) {
TEST_F(MP4StreamParserTest, NaturalSizeWithoutPASP) {
auto params = GetDefaultInitParametersExpectations();
- params.duration = base::TimeDelta::FromMicroseconds(1000966);
+ params.duration = base::Microseconds(1000966);
params.liveness = DemuxerStream::LIVENESS_RECORDED;
params.detected_audio_track_count = 0;
InitializeParserWithInitParametersExpectations(params);
@@ -609,7 +608,7 @@ TEST_F(MP4StreamParserTest, NaturalSizeWithoutPASP) {
TEST_F(MP4StreamParserTest, NaturalSizeWithPASP) {
auto params = GetDefaultInitParametersExpectations();
- params.duration = base::TimeDelta::FromMicroseconds(1000966);
+ params.duration = base::Microseconds(1000966);
params.liveness = DemuxerStream::LIVENESS_RECORDED;
params.detected_audio_track_count = 0;
InitializeParserWithInitParametersExpectations(params);
@@ -634,7 +633,7 @@ TEST_F(MP4StreamParserTest, DemuxingAC3) {
#endif
auto params = GetDefaultInitParametersExpectations();
- params.duration = base::TimeDelta::FromMicroseconds(1045000);
+ params.duration = base::Microseconds(1045000);
params.liveness = DemuxerStream::LIVENESS_RECORDED;
params.detected_video_track_count = 0;
InitializeParserWithInitParametersExpectations(params);
@@ -658,7 +657,7 @@ TEST_F(MP4StreamParserTest, DemuxingEAC3) {
#endif
auto params = GetDefaultInitParametersExpectations();
- params.duration = base::TimeDelta::FromMicroseconds(1045000);
+ params.duration = base::Microseconds(1045000);
params.liveness = DemuxerStream::LIVENESS_RECORDED;
params.detected_video_track_count = 0;
InitializeParserWithInitParametersExpectations(params);
@@ -787,7 +786,7 @@ TEST_F(MP4StreamParserTest, TextTrackDetection) {
TEST_F(MP4StreamParserTest, MultiTrackFile) {
auto params = GetDefaultInitParametersExpectations();
- params.duration = base::TimeDelta::FromMilliseconds(4248);
+ params.duration = base::Milliseconds(4248);
params.liveness = DemuxerStream::LIVENESS_RECORDED;
params.detected_audio_track_count = 2;
params.detected_video_track_count = 2;
diff --git a/chromium/media/formats/mp4/sample_to_group_iterator.h b/chromium/media/formats/mp4/sample_to_group_iterator.h
index 4c062007e3c..2675ee5033a 100644
--- a/chromium/media/formats/mp4/sample_to_group_iterator.h
+++ b/chromium/media/formats/mp4/sample_to_group_iterator.h
@@ -23,6 +23,10 @@ namespace mp4 {
class MEDIA_EXPORT SampleToGroupIterator {
public:
explicit SampleToGroupIterator(const SampleToGroup& sample_to_group);
+
+ SampleToGroupIterator(const SampleToGroupIterator&) = delete;
+ SampleToGroupIterator& operator=(const SampleToGroupIterator&) = delete;
+
~SampleToGroupIterator();
// Advances the iterator to refer to the next sample. Return status
@@ -42,8 +46,6 @@ class MEDIA_EXPORT SampleToGroupIterator {
uint32_t remaining_samples_;
const std::vector<SampleToGroupEntry>& sample_to_group_table_;
std::vector<SampleToGroupEntry>::const_iterator iterator_;
-
- DISALLOW_COPY_AND_ASSIGN(SampleToGroupIterator);
};
} // namespace mp4
diff --git a/chromium/media/formats/mp4/track_run_iterator.cc b/chromium/media/formats/mp4/track_run_iterator.cc
index 2c8cdc176e7..18ebdae847e 100644
--- a/chromium/media/formats/mp4/track_run_iterator.cc
+++ b/chromium/media/formats/mp4/track_run_iterator.cc
@@ -115,7 +115,7 @@ base::TimeDelta TimeDeltaFromRational(int64_t numer, int64_t denom) {
const int64_t total_microseconds =
base::Time::kMicrosecondsPerSecond * result_seconds + result_microseconds;
- return base::TimeDelta::FromMicroseconds(total_microseconds);
+ return base::Microseconds(total_microseconds);
}
DecodeTimestamp DecodeTimestampFromRational(int64_t numer, int64_t denom) {
diff --git a/chromium/media/formats/mp4/track_run_iterator.h b/chromium/media/formats/mp4/track_run_iterator.h
index 7ed9f7daca6..d3503ee0288 100644
--- a/chromium/media/formats/mp4/track_run_iterator.h
+++ b/chromium/media/formats/mp4/track_run_iterator.h
@@ -38,6 +38,10 @@ class MEDIA_EXPORT TrackRunIterator {
// Create a new TrackRunIterator. A reference to |moov| will be retained for
// the lifetime of this object.
TrackRunIterator(const Movie* moov, MediaLog* media_log);
+
+ TrackRunIterator(const TrackRunIterator&) = delete;
+ TrackRunIterator& operator=(const TrackRunIterator&) = delete;
+
~TrackRunIterator();
// Sets up the iterator to handle all the runs from the current fragment.
@@ -116,8 +120,6 @@ class MEDIA_EXPORT TrackRunIterator {
int64_t sample_dts_;
int64_t sample_cts_;
int64_t sample_offset_;
-
- DISALLOW_COPY_AND_ASSIGN(TrackRunIterator);
};
} // namespace mp4
diff --git a/chromium/media/formats/mp4/track_run_iterator_unittest.cc b/chromium/media/formats/mp4/track_run_iterator_unittest.cc
index f22f865b1fc..a2e80800256 100644
--- a/chromium/media/formats/mp4/track_run_iterator_unittest.cc
+++ b/chromium/media/formats/mp4/track_run_iterator_unittest.cc
@@ -148,7 +148,7 @@ MATCHER(ReservedValueInSampleDependencyInfo, "") {
TEST(TimeDeltaFromRationalTest, RoundsTowardZero) {
// In each case, 1.5us should round to 1us.
- base::TimeDelta expected = base::TimeDelta::FromMicroseconds(1);
+ base::TimeDelta expected = base::Microseconds(1);
EXPECT_EQ(TimeDeltaFromRational(3, 2000000), expected);
EXPECT_EQ(TimeDeltaFromRational(-3, 2000000), -expected);
}
@@ -160,7 +160,7 @@ TEST(TimeDeltaFromRationalTest, HandlesLargeValues) {
// Note: kNoTimestamp is printed as "9.22337e+12 s", which is visually
// indistinguishable from |expected|.
int64_t seconds = max_seconds - 1;
- base::TimeDelta expected = base::TimeDelta::FromSeconds(seconds);
+ base::TimeDelta expected = base::Seconds(seconds);
EXPECT_EQ(TimeDeltaFromRational(seconds, 1), expected);
EXPECT_EQ(TimeDeltaFromRational(-seconds, 1), -expected);
}
diff --git a/chromium/media/formats/mpeg/adts_stream_parser.cc b/chromium/media/formats/mpeg/adts_stream_parser.cc
index a47d2242dde..116f536a64d 100644
--- a/chromium/media/formats/mpeg/adts_stream_parser.cc
+++ b/chromium/media/formats/mpeg/adts_stream_parser.cc
@@ -16,7 +16,7 @@ namespace media {
constexpr uint32_t kADTSStartCodeMask = 0xfff00000;
ADTSStreamParser::ADTSStreamParser()
- : MPEGAudioStreamParserBase(kADTSStartCodeMask, kCodecAAC, 0) {}
+ : MPEGAudioStreamParserBase(kADTSStartCodeMask, AudioCodec::kAAC, 0) {}
ADTSStreamParser::~ADTSStreamParser() = default;
diff --git a/chromium/media/formats/mpeg/adts_stream_parser.h b/chromium/media/formats/mpeg/adts_stream_parser.h
index 5015c79626b..7284846f2a3 100644
--- a/chromium/media/formats/mpeg/adts_stream_parser.h
+++ b/chromium/media/formats/mpeg/adts_stream_parser.h
@@ -16,6 +16,10 @@ namespace media {
class MEDIA_EXPORT ADTSStreamParser : public MPEGAudioStreamParserBase {
public:
ADTSStreamParser();
+
+ ADTSStreamParser(const ADTSStreamParser&) = delete;
+ ADTSStreamParser& operator=(const ADTSStreamParser&) = delete;
+
~ADTSStreamParser() override;
// MPEGAudioStreamParserBase overrides.
@@ -30,8 +34,6 @@ class MEDIA_EXPORT ADTSStreamParser : public MPEGAudioStreamParserBase {
private:
size_t adts_parse_error_limit_ = 0;
-
- DISALLOW_COPY_AND_ASSIGN(ADTSStreamParser);
};
} // namespace media
diff --git a/chromium/media/formats/mpeg/mpeg1_audio_stream_parser.cc b/chromium/media/formats/mpeg/mpeg1_audio_stream_parser.cc
index d12f7cacee4..5b996764f4f 100644
--- a/chromium/media/formats/mpeg/mpeg1_audio_stream_parser.cc
+++ b/chromium/media/formats/mpeg/mpeg1_audio_stream_parser.cc
@@ -187,7 +187,9 @@ bool MPEG1AudioStreamParser::ParseHeader(MediaLog* media_log,
}
MPEG1AudioStreamParser::MPEG1AudioStreamParser()
- : MPEGAudioStreamParserBase(kMPEG1StartCodeMask, kCodecMP3, kCodecDelay) {}
+ : MPEGAudioStreamParserBase(kMPEG1StartCodeMask,
+ AudioCodec::kMP3,
+ kCodecDelay) {}
MPEG1AudioStreamParser::~MPEG1AudioStreamParser() = default;
diff --git a/chromium/media/formats/mpeg/mpeg1_audio_stream_parser.h b/chromium/media/formats/mpeg/mpeg1_audio_stream_parser.h
index de86e69661a..707e3408ee8 100644
--- a/chromium/media/formats/mpeg/mpeg1_audio_stream_parser.h
+++ b/chromium/media/formats/mpeg/mpeg1_audio_stream_parser.h
@@ -68,6 +68,10 @@ class MEDIA_EXPORT MPEG1AudioStreamParser : public MPEGAudioStreamParserBase {
Header* header);
MPEG1AudioStreamParser();
+
+ MPEG1AudioStreamParser(const MPEG1AudioStreamParser&) = delete;
+ MPEG1AudioStreamParser& operator=(const MPEG1AudioStreamParser&) = delete;
+
~MPEG1AudioStreamParser() override;
private:
@@ -82,8 +86,6 @@ class MEDIA_EXPORT MPEG1AudioStreamParser : public MPEGAudioStreamParserBase {
std::vector<uint8_t>* extra_data) override;
size_t mp3_parse_error_limit_ = 0;
-
- DISALLOW_COPY_AND_ASSIGN(MPEG1AudioStreamParser);
};
} // namespace media
diff --git a/chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.cc b/chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.cc
index 0ea60b26b41..aea794e3dfe 100644
--- a/chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.cc
+++ b/chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.cc
@@ -213,7 +213,7 @@ int MPEGAudioStreamParserBase::ParseFrame(const uint8_t* data,
config_.Initialize(audio_codec_, kSampleFormatF32, channel_layout,
sample_rate, extra_data, EncryptionScheme::kUnencrypted,
base::TimeDelta(), codec_delay_);
- if (audio_codec_ == kCodecAAC)
+ if (audio_codec_ == AudioCodec::kAAC)
config_.disable_discard_decoder_delay();
base::TimeDelta base_timestamp;
diff --git a/chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.h b/chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.h
index 425d7079aa3..5687e5235f7 100644
--- a/chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.h
+++ b/chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.h
@@ -31,6 +31,11 @@ class MEDIA_EXPORT MPEGAudioStreamParserBase : public StreamParser {
MPEGAudioStreamParserBase(uint32_t start_code_mask,
AudioCodec audio_codec,
int codec_delay);
+
+ MPEGAudioStreamParserBase(const MPEGAudioStreamParserBase&) = delete;
+ MPEGAudioStreamParserBase& operator=(const MPEGAudioStreamParserBase&) =
+ delete;
+
~MPEGAudioStreamParserBase() override;
// StreamParser implementation.
@@ -152,8 +157,6 @@ class MEDIA_EXPORT MPEGAudioStreamParserBase : public StreamParser {
const uint32_t start_code_mask_;
const AudioCodec audio_codec_;
const int codec_delay_;
-
- DISALLOW_COPY_AND_ASSIGN(MPEGAudioStreamParserBase);
};
} // namespace media
diff --git a/chromium/media/formats/webcodecs/webcodecs_encoded_chunk_stream_parser.h b/chromium/media/formats/webcodecs/webcodecs_encoded_chunk_stream_parser.h
index 85480ab1793..b1f39847108 100644
--- a/chromium/media/formats/webcodecs/webcodecs_encoded_chunk_stream_parser.h
+++ b/chromium/media/formats/webcodecs/webcodecs_encoded_chunk_stream_parser.h
@@ -24,6 +24,12 @@ class MEDIA_EXPORT WebCodecsEncodedChunkStreamParser : public StreamParser {
std::unique_ptr<AudioDecoderConfig> audio_config);
explicit WebCodecsEncodedChunkStreamParser(
std::unique_ptr<VideoDecoderConfig> video_config);
+
+ WebCodecsEncodedChunkStreamParser(const WebCodecsEncodedChunkStreamParser&) =
+ delete;
+ WebCodecsEncodedChunkStreamParser& operator=(
+ const WebCodecsEncodedChunkStreamParser&) = delete;
+
~WebCodecsEncodedChunkStreamParser() override;
// StreamParser implementation.
@@ -69,8 +75,6 @@ class MEDIA_EXPORT WebCodecsEncodedChunkStreamParser : public StreamParser {
NewMediaSegmentCB new_segment_cb_;
EndMediaSegmentCB end_of_segment_cb_;
MediaLog* media_log_;
-
- DISALLOW_COPY_AND_ASSIGN(WebCodecsEncodedChunkStreamParser);
};
} // namespace media
diff --git a/chromium/media/formats/webm/cluster_builder.h b/chromium/media/formats/webm/cluster_builder.h
index a708cde6fe6..685700be26e 100644
--- a/chromium/media/formats/webm/cluster_builder.h
+++ b/chromium/media/formats/webm/cluster_builder.h
@@ -31,6 +31,10 @@ class Cluster {
class ClusterBuilder {
public:
ClusterBuilder();
+
+ ClusterBuilder(const ClusterBuilder&) = delete;
+ ClusterBuilder& operator=(const ClusterBuilder&) = delete;
+
~ClusterBuilder();
void SetClusterTimecode(int64_t cluster_timecode);
@@ -79,8 +83,6 @@ class ClusterBuilder {
int buffer_size_;
int bytes_used_;
int64_t cluster_timecode_;
-
- DISALLOW_COPY_AND_ASSIGN(ClusterBuilder);
};
} // namespace media
diff --git a/chromium/media/formats/webm/opus_packet_builder.h b/chromium/media/formats/webm/opus_packet_builder.h
index 77e5f2a9597..5b03ce816f2 100644
--- a/chromium/media/formats/webm/opus_packet_builder.h
+++ b/chromium/media/formats/webm/opus_packet_builder.h
@@ -24,6 +24,10 @@ enum OpusConstants {
class OpusPacket {
public:
OpusPacket(uint8_t config, uint8_t frame_count, bool is_VBR);
+
+ OpusPacket(const OpusPacket&) = delete;
+ OpusPacket& operator=(const OpusPacket&) = delete;
+
~OpusPacket();
const uint8_t* data() const;
@@ -33,8 +37,6 @@ class OpusPacket {
private:
std::vector<uint8_t> data_;
double duration_ms_;
-
- DISALLOW_COPY_AND_ASSIGN(OpusPacket);
};
// Builds an exhaustive collection of Opus packet configurations.
diff --git a/chromium/media/formats/webm/tracks_builder.h b/chromium/media/formats/webm/tracks_builder.h
index 41d0938f5ef..9acf4ad4bea 100644
--- a/chromium/media/formats/webm/tracks_builder.h
+++ b/chromium/media/formats/webm/tracks_builder.h
@@ -22,6 +22,10 @@ class TracksBuilder {
// |name|, |language| and any device-specific constraints are not checked.
explicit TracksBuilder(bool allow_invalid_values);
TracksBuilder(); // Sets |allow_invalid_values| to false.
+
+ TracksBuilder(const TracksBuilder&) = delete;
+ TracksBuilder& operator=(const TracksBuilder&) = delete;
+
~TracksBuilder();
// Only a non-negative |default_duration| will result in a serialized
@@ -109,8 +113,6 @@ class TracksBuilder {
typedef std::list<Track> TrackList;
TrackList tracks_;
bool allow_invalid_values_;
-
- DISALLOW_COPY_AND_ASSIGN(TracksBuilder);
};
} // namespace media
diff --git a/chromium/media/formats/webm/webm_audio_client.cc b/chromium/media/formats/webm/webm_audio_client.cc
index 5ac98e8eb02..6c49c6dc3b1 100644
--- a/chromium/media/formats/webm/webm_audio_client.cc
+++ b/chromium/media/formats/webm/webm_audio_client.cc
@@ -32,11 +32,11 @@ bool WebMAudioClient::InitializeConfig(
DCHECK(config);
SampleFormat sample_format = kSampleFormatPlanarF32;
- AudioCodec audio_codec = kUnknownAudioCodec;
+ AudioCodec audio_codec = AudioCodec::kUnknown;
if (codec_id == "A_VORBIS") {
- audio_codec = kCodecVorbis;
+ audio_codec = AudioCodec::kVorbis;
} else if (codec_id == "A_OPUS") {
- audio_codec = kCodecOpus;
+ audio_codec = AudioCodec::kOpus;
} else {
MEDIA_LOG(ERROR, media_log_) << "Unsupported audio codec_id " << codec_id;
return false;
@@ -63,7 +63,7 @@ bool WebMAudioClient::InitializeConfig(
// Always use 48kHz for OPUS. See the "Input Sample Rate" section of the
// spec: http://tools.ietf.org/html/draft-terriberry-oggopus-01#page-11
- if (audio_codec == kCodecOpus) {
+ if (audio_codec == AudioCodec::kOpus) {
samples_per_second = 48000;
sample_format = kSampleFormatF32;
}
@@ -77,11 +77,11 @@ bool WebMAudioClient::InitializeConfig(
base::Time::kNanosecondsPerSecond);
}
- config->Initialize(audio_codec, sample_format, channel_layout,
- samples_per_second, codec_private, encryption_scheme,
- base::TimeDelta::FromMicroseconds(
- (seek_preroll != -1 ? seek_preroll : 0) / 1000),
- codec_delay_in_frames);
+ config->Initialize(
+ audio_codec, sample_format, channel_layout, samples_per_second,
+ codec_private, encryption_scheme,
+ base::Microseconds((seek_preroll != -1 ? seek_preroll : 0) / 1000),
+ codec_delay_in_frames);
config->SetChannelsForDiscrete(channels_);
return config->IsValidConfig();
}
diff --git a/chromium/media/formats/webm/webm_audio_client.h b/chromium/media/formats/webm/webm_audio_client.h
index ab658677b94..6d734255df8 100644
--- a/chromium/media/formats/webm/webm_audio_client.h
+++ b/chromium/media/formats/webm/webm_audio_client.h
@@ -22,6 +22,10 @@ class AudioDecoderConfig;
class WebMAudioClient : public WebMParserClient {
public:
explicit WebMAudioClient(MediaLog* media_log);
+
+ WebMAudioClient(const WebMAudioClient&) = delete;
+ WebMAudioClient& operator=(const WebMAudioClient&) = delete;
+
~WebMAudioClient() override;
// Reset this object's state so it can process a new audio track element.
@@ -49,8 +53,6 @@ class WebMAudioClient : public WebMParserClient {
int channels_;
double samples_per_second_;
double output_samples_per_second_;
-
- DISALLOW_COPY_AND_ASSIGN(WebMAudioClient);
};
} // namespace media
diff --git a/chromium/media/formats/webm/webm_cluster_parser.cc b/chromium/media/formats/webm/webm_cluster_parser.cc
index 9d192f81134..932763273e0 100644
--- a/chromium/media/formats/webm/webm_cluster_parser.cc
+++ b/chromium/media/formats/webm/webm_cluster_parser.cc
@@ -108,8 +108,8 @@ int WebMClusterParser::Parse(const uint8_t* buf, int size) {
if (cluster_timecode_ < 0)
return -1;
- cluster_start_time_ = base::TimeDelta::FromMicroseconds(
- cluster_timecode_ * timecode_multiplier_);
+ cluster_start_time_ =
+ base::Microseconds(cluster_timecode_ * timecode_multiplier_);
}
// Reset the parser if we're done parsing so that
@@ -172,7 +172,7 @@ base::TimeDelta WebMClusterParser::TryGetEncodedAudioDuration(
// TODO(chcunningham): Consider parsing "Signal Byte" for encrypted streams
// to return duration for any unencrypted blocks.
- if (audio_codec_ == kCodecOpus) {
+ if (audio_codec_ == AudioCodec::kOpus) {
return ReadOpusDuration(data, size);
}
@@ -189,8 +189,7 @@ base::TimeDelta WebMClusterParser::ReadOpusDuration(const uint8_t* data,
static const uint8_t kTocConfigMask = 0xf8;
static const uint8_t kTocFrameCountCodeMask = 0x03;
static const uint8_t kFrameCountMask = 0x3f;
- static const base::TimeDelta kPacketDurationMax =
- base::TimeDelta::FromMilliseconds(120);
+ static const base::TimeDelta kPacketDurationMax = base::Milliseconds(120);
if (size < 1) {
LIMITED_MEDIA_LOG(DEBUG, media_log_, num_duration_errors_,
@@ -246,8 +245,8 @@ base::TimeDelta WebMClusterParser::ReadOpusDuration(const uint8_t* data,
CHECK_LT(opusConfig, static_cast<int>(base::size(kOpusFrameDurationsMu)));
DCHECK_GT(frame_count, 0);
- base::TimeDelta duration = base::TimeDelta::FromMicroseconds(
- kOpusFrameDurationsMu[opusConfig] * frame_count);
+ base::TimeDelta duration =
+ base::Microseconds(kOpusFrameDurationsMu[opusConfig] * frame_count);
if (duration > kPacketDurationMax) {
// Intentionally allowing packet to pass through for now. Decoder should
@@ -503,7 +502,7 @@ bool WebMClusterParser::OnBlock(bool is_simple_block,
return false;
}
- base::TimeDelta timestamp = base::TimeDelta::FromMicroseconds(microseconds);
+ base::TimeDelta timestamp = base::Microseconds(microseconds);
if (timestamp == kNoTimestamp || timestamp == kInfiniteDuration) {
MEDIA_LOG(ERROR, media_log_) << "Invalid block timestamp.";
@@ -562,8 +561,8 @@ bool WebMClusterParser::OnBlock(bool is_simple_block,
base::TimeDelta block_duration_time_delta = kNoTimestamp;
if (block_duration >= 0) {
- block_duration_time_delta = base::TimeDelta::FromMicroseconds(
- block_duration * timecode_multiplier_);
+ block_duration_time_delta =
+ base::Microseconds(block_duration * timecode_multiplier_);
}
// Prefer encoded duration over BlockGroup->BlockDuration or
@@ -590,7 +589,7 @@ bool WebMClusterParser::OnBlock(bool is_simple_block,
block_duration_time_delta - encoded_duration;
const auto kWarnDurationDiff =
- base::TimeDelta::FromMicroseconds(timecode_multiplier_ * 2);
+ base::Microseconds(timecode_multiplier_ * 2);
if (duration_difference.magnitude() > kWarnDurationDiff) {
LIMITED_MEDIA_LOG(DEBUG, media_log_, num_duration_errors_,
kMaxDurationErrorLogs)
@@ -610,8 +609,7 @@ bool WebMClusterParser::OnBlock(bool is_simple_block,
// https://crbug.com/969195.
if (discard_padding != 0) {
buffer->set_discard_padding(std::make_pair(
- base::TimeDelta(),
- base::TimeDelta::FromMicroseconds(discard_padding / 1000)));
+ base::TimeDelta(), base::Microseconds(discard_padding / 1000)));
}
return track->AddBuffer(std::move(buffer));
@@ -790,12 +788,10 @@ base::TimeDelta WebMClusterParser::Track::GetDurationEstimate() {
if (max_frame_duration_ == kNoTimestamp) {
DVLOG(3) << __func__ << " : using hardcoded default duration";
if (track_type_ == TrackType::AUDIO) {
- duration =
- base::TimeDelta::FromMilliseconds(kDefaultAudioBufferDurationInMs);
+ duration = base::Milliseconds(kDefaultAudioBufferDurationInMs);
} else {
// Text and video tracks can both use the larger video default duration.
- duration =
- base::TimeDelta::FromMilliseconds(kDefaultVideoBufferDurationInMs);
+ duration = base::Milliseconds(kDefaultVideoBufferDurationInMs);
}
} else {
// Use max duration to minimize the risk of introducing gaps in the buffered
diff --git a/chromium/media/formats/webm/webm_cluster_parser_unittest.cc b/chromium/media/formats/webm/webm_cluster_parser_unittest.cc
index 987dc710839..9edb409f96f 100644
--- a/chromium/media/formats/webm/webm_cluster_parser_unittest.cc
+++ b/chromium/media/formats/webm/webm_cluster_parser_unittest.cc
@@ -293,9 +293,9 @@ class WebMClusterParserTest : public testing::Test {
protected:
void ResetParserToHaveDefaultDurations() {
base::TimeDelta default_audio_duration =
- base::TimeDelta::FromMillisecondsD(kTestAudioFrameDefaultDurationInMs);
+ base::Milliseconds(kTestAudioFrameDefaultDurationInMs);
base::TimeDelta default_video_duration =
- base::TimeDelta::FromMillisecondsD(kTestVideoFrameDefaultDurationInMs);
+ base::Milliseconds(kTestVideoFrameDefaultDurationInMs);
ASSERT_GE(default_audio_duration, base::TimeDelta());
ASSERT_GE(default_video_duration, base::TimeDelta());
ASSERT_NE(kNoTimestamp, default_audio_duration);
@@ -325,7 +325,7 @@ class WebMClusterParserTest : public testing::Test {
WebMClusterParser* CreateDefaultParser() {
return CreateParserHelper(kNoTimestamp, kNoTimestamp, TextTracks(),
std::set<int64_t>(), std::string(), std::string(),
- kUnknownAudioCodec);
+ AudioCodec::kUnknown);
}
// Create a parser for test with custom audio and video default durations, and
@@ -336,7 +336,7 @@ class WebMClusterParserTest : public testing::Test {
const WebMTracksParser::TextTracks& text_tracks = TextTracks()) {
return CreateParserHelper(audio_default_duration, video_default_duration,
text_tracks, std::set<int64_t>(), std::string(),
- std::string(), kUnknownAudioCodec);
+ std::string(), AudioCodec::kUnknown);
}
// Create a parser for test with custom ignored tracks.
@@ -344,7 +344,7 @@ class WebMClusterParserTest : public testing::Test {
std::set<int64_t>& ignored_tracks) {
return CreateParserHelper(kNoTimestamp, kNoTimestamp, TextTracks(),
ignored_tracks, std::string(), std::string(),
- kUnknownAudioCodec);
+ AudioCodec::kUnknown);
}
// Create a parser for test with custom encryption key ids and audio codec.
@@ -378,7 +378,7 @@ TEST_F(WebMClusterParserTest, HeldBackBufferHoldsBackAllTracks) {
TextTrackConfig(kTextSubtitles, "", "",
"")));
base::TimeDelta default_audio_duration =
- base::TimeDelta::FromMillisecondsD(kTestAudioFrameDefaultDurationInMs);
+ base::Milliseconds(kTestAudioFrameDefaultDurationInMs);
ASSERT_GE(default_audio_duration, base::TimeDelta());
ASSERT_NE(kNoTimestamp, default_audio_duration);
parser_.reset(CreateParserWithDefaultDurationsAndOptionalTextTracks(
@@ -716,7 +716,7 @@ TEST_F(WebMClusterParserTest, ParseEncryptedBlock) {
CreateEncryptedCluster(sizeof(kEncryptedFrame)));
parser_.reset(CreateParserWithKeyIdsAndAudioCodec(
- std::string(), "video_key_id", kUnknownAudioCodec));
+ std::string(), "video_key_id", AudioCodec::kUnknown));
// The encrypted cluster contains just one block, video.
EXPECT_MEDIA_LOG(WebMSimpleBlockDurationEstimated(
@@ -736,7 +736,7 @@ TEST_F(WebMClusterParserTest, ParseBadEncryptedBlock) {
CreateEncryptedCluster(sizeof(kEncryptedFrame) - 1));
parser_.reset(CreateParserWithKeyIdsAndAudioCodec(
- std::string(), "video_key_id", kUnknownAudioCodec));
+ std::string(), "video_key_id", AudioCodec::kUnknown));
EXPECT_MEDIA_LOG(HasSubstr("Failed to extract decrypt config"));
int result = parser_->Parse(cluster->data(), cluster->size());
@@ -1149,7 +1149,7 @@ TEST_F(WebMClusterParserTest, ReadOpusDurationsSimpleBlockAtEndOfCluster) {
// Get a new parser each iteration to prevent exceeding the media log cap.
parser_.reset(CreateParserWithKeyIdsAndAudioCodec(
- std::string(), std::string(), kCodecOpus));
+ std::string(), std::string(), AudioCodec::kOpus));
const BlockInfo kBlockInfo[] = {{kAudioTrackNum,
0,
@@ -1186,7 +1186,7 @@ TEST_F(WebMClusterParserTest, PreferOpusDurationsOverBlockDurations) {
// Get a new parser each iteration to prevent exceeding the media log cap.
parser_.reset(CreateParserWithKeyIdsAndAudioCodec(
- std::string(), std::string(), kCodecOpus));
+ std::string(), std::string(), AudioCodec::kOpus));
// Setting BlockDuration != Opus duration to see which one the parser uses.
double block_duration_ms = packet_ptr->duration_ms() + 10;
@@ -1234,8 +1234,8 @@ TEST_F(WebMClusterParserTest, DontReadEncodedDurationWhenEncrypted) {
std::string audio_encryption_id("audio_key_id");
// Reset parser to expect Opus codec audio and use audio encryption key id.
- parser_.reset(CreateParserWithKeyIdsAndAudioCodec(audio_encryption_id,
- std::string(), kCodecOpus));
+ parser_.reset(CreateParserWithKeyIdsAndAudioCodec(
+ audio_encryption_id, std::string(), AudioCodec::kOpus));
// Single Block with BlockDuration and encrypted data.
const BlockInfo kBlockInfo[] = {{kAudioTrackNum, 0,
diff --git a/chromium/media/formats/webm/webm_colour_parser.h b/chromium/media/formats/webm/webm_colour_parser.h
index 90b86f1c9b9..fe93df2d349 100644
--- a/chromium/media/formats/webm/webm_colour_parser.h
+++ b/chromium/media/formats/webm/webm_colour_parser.h
@@ -37,6 +37,11 @@ struct MEDIA_EXPORT WebMColorMetadata {
class WebMColorVolumeMetadataParser : public WebMParserClient {
public:
WebMColorVolumeMetadataParser();
+
+ WebMColorVolumeMetadataParser(const WebMColorVolumeMetadataParser&) = delete;
+ WebMColorVolumeMetadataParser& operator=(
+ const WebMColorVolumeMetadataParser&) = delete;
+
~WebMColorVolumeMetadataParser() override;
gfx::ColorVolumeMetadata GetColorVolumeMetadata() const {
@@ -48,7 +53,6 @@ class WebMColorVolumeMetadataParser : public WebMParserClient {
bool OnFloat(int id, double val) override;
gfx::ColorVolumeMetadata color_volume_metadata_;
- DISALLOW_COPY_AND_ASSIGN(WebMColorVolumeMetadataParser);
};
// Parser for WebM Colour element:
@@ -56,6 +60,10 @@ class WebMColorVolumeMetadataParser : public WebMParserClient {
class WebMColourParser : public WebMParserClient {
public:
WebMColourParser();
+
+ WebMColourParser(const WebMColourParser&) = delete;
+ WebMColourParser& operator=(const WebMColourParser&) = delete;
+
~WebMColourParser() override;
void Reset();
@@ -84,8 +92,6 @@ class WebMColourParser : public WebMParserClient {
WebMColorVolumeMetadataParser color_volume_metadata_parser_;
bool color_volume_metadata_parsed_ = false;
-
- DISALLOW_COPY_AND_ASSIGN(WebMColourParser);
};
} // namespace media
diff --git a/chromium/media/formats/webm/webm_content_encodings.h b/chromium/media/formats/webm/webm_content_encodings.h
index 1a1fa8d721e..3f699d5597f 100644
--- a/chromium/media/formats/webm/webm_content_encodings.h
+++ b/chromium/media/formats/webm/webm_content_encodings.h
@@ -52,6 +52,10 @@ class MEDIA_EXPORT ContentEncoding {
};
ContentEncoding();
+
+ ContentEncoding(const ContentEncoding&) = delete;
+ ContentEncoding& operator=(const ContentEncoding&) = delete;
+
~ContentEncoding();
int64_t order() const { return order_; }
@@ -81,8 +85,6 @@ class MEDIA_EXPORT ContentEncoding {
EncryptionAlgo encryption_algo_;
std::string encryption_key_id_;
CipherMode cipher_mode_;
-
- DISALLOW_COPY_AND_ASSIGN(ContentEncoding);
};
} // namespace media
diff --git a/chromium/media/formats/webm/webm_content_encodings_client.h b/chromium/media/formats/webm/webm_content_encodings_client.h
index 23820c5234c..c43a84da6db 100644
--- a/chromium/media/formats/webm/webm_content_encodings_client.h
+++ b/chromium/media/formats/webm/webm_content_encodings_client.h
@@ -26,6 +26,11 @@ typedef std::vector<std::unique_ptr<ContentEncoding>> ContentEncodings;
class MEDIA_EXPORT WebMContentEncodingsClient : public WebMParserClient {
public:
explicit WebMContentEncodingsClient(MediaLog* media_log);
+
+ WebMContentEncodingsClient(const WebMContentEncodingsClient&) = delete;
+ WebMContentEncodingsClient& operator=(const WebMContentEncodingsClient&) =
+ delete;
+
~WebMContentEncodingsClient() override;
const ContentEncodings& content_encodings() const;
@@ -44,8 +49,6 @@ class MEDIA_EXPORT WebMContentEncodingsClient : public WebMParserClient {
// |content_encodings_| is ready. For debugging purpose.
bool content_encodings_ready_;
-
- DISALLOW_COPY_AND_ASSIGN(WebMContentEncodingsClient);
};
} // namespace media
diff --git a/chromium/media/formats/webm/webm_info_parser.cc b/chromium/media/formats/webm/webm_info_parser.cc
index 3e5eede925a..831a5850191 100644
--- a/chromium/media/formats/webm/webm_info_parser.cc
+++ b/chromium/media/formats/webm/webm_info_parser.cc
@@ -96,8 +96,7 @@ bool WebMInfoParser::OnBinary(int id, const uint8_t* data, int size) {
base::Time out_time;
if (!base::Time::FromUTCExploded(exploded_epoch, &out_time))
return false;
- date_utc_ = out_time +
- base::TimeDelta::FromMicroseconds(date_in_nanoseconds / 1000);
+ date_utc_ = out_time + base::Microseconds(date_in_nanoseconds / 1000);
}
return true;
}
diff --git a/chromium/media/formats/webm/webm_info_parser.h b/chromium/media/formats/webm/webm_info_parser.h
index 1c6a518bd71..92c5d671d81 100644
--- a/chromium/media/formats/webm/webm_info_parser.h
+++ b/chromium/media/formats/webm/webm_info_parser.h
@@ -19,6 +19,10 @@ namespace media {
class MEDIA_EXPORT WebMInfoParser : public WebMParserClient {
public:
WebMInfoParser();
+
+ WebMInfoParser(const WebMInfoParser&) = delete;
+ WebMInfoParser& operator=(const WebMInfoParser&) = delete;
+
~WebMInfoParser() override;
// Parses a WebM Info element in |buf|.
@@ -44,8 +48,6 @@ class MEDIA_EXPORT WebMInfoParser : public WebMParserClient {
int64_t timecode_scale_ns_;
double duration_;
base::Time date_utc_;
-
- DISALLOW_COPY_AND_ASSIGN(WebMInfoParser);
};
} // namespace media
diff --git a/chromium/media/formats/webm/webm_parser.h b/chromium/media/formats/webm/webm_parser.h
index 90a853b0313..c8b8da15d1c 100644
--- a/chromium/media/formats/webm/webm_parser.h
+++ b/chromium/media/formats/webm/webm_parser.h
@@ -29,6 +29,9 @@ namespace media {
// error is reported by the parser.
class MEDIA_EXPORT WebMParserClient {
public:
+ WebMParserClient(const WebMParserClient&) = delete;
+ WebMParserClient& operator=(const WebMParserClient&) = delete;
+
virtual ~WebMParserClient();
virtual WebMParserClient* OnListStart(int id);
@@ -45,8 +48,6 @@ class MEDIA_EXPORT WebMParserClient {
protected:
WebMParserClient();
-
- DISALLOW_COPY_AND_ASSIGN(WebMParserClient);
};
struct ListElementInfo;
@@ -61,6 +62,10 @@ class MEDIA_EXPORT WebMListParser {
// |id| - Element ID of the list we intend to parse.
// |client| - Called as different elements in the list are parsed.
WebMListParser(int id, WebMParserClient* client);
+
+ WebMListParser(const WebMListParser&) = delete;
+ WebMListParser& operator=(const WebMListParser&) = delete;
+
~WebMListParser();
// Resets the state of the parser so it can start parsing a new list.
@@ -151,8 +156,6 @@ class MEDIA_EXPORT WebMListParser {
// Stack of state for all the lists currently being parsed. Lists are
// added and removed from this stack as they are parsed.
std::vector<ListState> list_state_stack_;
-
- DISALLOW_COPY_AND_ASSIGN(WebMListParser);
};
// Parses an element header & returns the ID and element size.
diff --git a/chromium/media/formats/webm/webm_projection_parser.h b/chromium/media/formats/webm/webm_projection_parser.h
index 8766f8fb7a1..a6e89752f03 100644
--- a/chromium/media/formats/webm/webm_projection_parser.h
+++ b/chromium/media/formats/webm/webm_projection_parser.h
@@ -15,6 +15,10 @@ namespace media {
class MEDIA_EXPORT WebMProjectionParser : public WebMParserClient {
public:
explicit WebMProjectionParser(MediaLog* media_log);
+
+ WebMProjectionParser(const WebMProjectionParser&) = delete;
+ WebMProjectionParser& operator=(const WebMProjectionParser&) = delete;
+
~WebMProjectionParser() override;
void Reset();
@@ -33,8 +37,6 @@ class MEDIA_EXPORT WebMProjectionParser : public WebMParserClient {
double pose_yaw_; // value must be [-180, 180]
double pose_pitch_; // value must be [-90, 90]
double pose_roll_; // value must be [-180, 180]
-
- DISALLOW_COPY_AND_ASSIGN(WebMProjectionParser);
};
} // namespace media
diff --git a/chromium/media/formats/webm/webm_stream_parser.cc b/chromium/media/formats/webm/webm_stream_parser.cc
index bc632221646..951cba094c9 100644
--- a/chromium/media/formats/webm/webm_stream_parser.cc
+++ b/chromium/media/formats/webm/webm_stream_parser.cc
@@ -205,7 +205,7 @@ int WebMStreamParser::ParseInfoAndTracks(const uint8_t* data, int size) {
if (info_parser.duration() > 0) {
int64_t duration_in_us = info_parser.duration() * timecode_scale_in_us;
- params.duration = base::TimeDelta::FromMicroseconds(duration_in_us);
+ params.duration = base::Microseconds(duration_in_us);
}
params.timeline_offset = info_parser.date_utc();
diff --git a/chromium/media/formats/webm/webm_stream_parser.h b/chromium/media/formats/webm/webm_stream_parser.h
index bab74de0a80..407c23d0cc9 100644
--- a/chromium/media/formats/webm/webm_stream_parser.h
+++ b/chromium/media/formats/webm/webm_stream_parser.h
@@ -24,6 +24,10 @@ class WebMClusterParser;
class MEDIA_EXPORT WebMStreamParser : public StreamParser {
public:
WebMStreamParser();
+
+ WebMStreamParser(const WebMStreamParser&) = delete;
+ WebMStreamParser& operator=(const WebMStreamParser&) = delete;
+
~WebMStreamParser() override;
// StreamParser implementation.
@@ -86,8 +90,6 @@ class MEDIA_EXPORT WebMStreamParser : public StreamParser {
std::unique_ptr<WebMClusterParser> cluster_parser_;
ByteQueue byte_queue_;
-
- DISALLOW_COPY_AND_ASSIGN(WebMStreamParser);
};
} // namespace media
diff --git a/chromium/media/formats/webm/webm_tracks_parser.cc b/chromium/media/formats/webm/webm_tracks_parser.cc
index 99ee1acc628..b8d448042eb 100644
--- a/chromium/media/formats/webm/webm_tracks_parser.cc
+++ b/chromium/media/formats/webm/webm_tracks_parser.cc
@@ -57,7 +57,7 @@ base::TimeDelta WebMTracksParser::PrecisionCappedDefaultDuration(
if (result_us == 0)
return kNoTimestamp;
- return base::TimeDelta::FromMicroseconds(result_us);
+ return base::Microseconds(result_us);
}
void WebMTracksParser::Reset() {
diff --git a/chromium/media/formats/webm/webm_tracks_parser.h b/chromium/media/formats/webm/webm_tracks_parser.h
index 72996688a81..65da011d475 100644
--- a/chromium/media/formats/webm/webm_tracks_parser.h
+++ b/chromium/media/formats/webm/webm_tracks_parser.h
@@ -14,6 +14,7 @@
#include <vector>
#include "base/compiler_specific.h"
+#include "base/gtest_prod_util.h"
#include "base/macros.h"
#include "base/time/time.h"
#include "media/base/audio_decoder_config.h"
@@ -32,6 +33,10 @@ namespace media {
class MEDIA_EXPORT WebMTracksParser : public WebMParserClient {
public:
WebMTracksParser(MediaLog* media_log, bool ignore_text_tracks);
+
+ WebMTracksParser(const WebMTracksParser&) = delete;
+ WebMTracksParser& operator=(const WebMTracksParser&) = delete;
+
~WebMTracksParser() override;
// Parses a WebM Tracks element in |buf|.
@@ -150,8 +155,6 @@ class MEDIA_EXPORT WebMTracksParser : public WebMParserClient {
int detected_video_track_count_;
int detected_text_track_count_;
std::unique_ptr<MediaTracks> media_tracks_;
-
- DISALLOW_COPY_AND_ASSIGN(WebMTracksParser);
};
} // namespace media
diff --git a/chromium/media/formats/webm/webm_tracks_parser_unittest.cc b/chromium/media/formats/webm/webm_tracks_parser_unittest.cc
index 66d2a6a377d..ed6c2fc77d3 100644
--- a/chromium/media/formats/webm/webm_tracks_parser_unittest.cc
+++ b/chromium/media/formats/webm/webm_tracks_parser_unittest.cc
@@ -181,14 +181,14 @@ TEST_F(WebMTracksParserTest, AudioVideoDefaultDurationSet) {
EXPECT_LE(0, result);
EXPECT_EQ(static_cast<int>(buf.size()), result);
- EXPECT_EQ(base::TimeDelta::FromMicroseconds(12000),
+ EXPECT_EQ(base::Microseconds(12000),
parser->GetAudioDefaultDuration(kOneMsInNs));
- EXPECT_EQ(base::TimeDelta::FromMicroseconds(985000),
+ EXPECT_EQ(base::Microseconds(985000),
parser->GetVideoDefaultDuration(5000000)); // 5 ms resolution
EXPECT_EQ(kNoTimestamp, parser->GetAudioDefaultDuration(12346000));
- EXPECT_EQ(base::TimeDelta::FromMicroseconds(12345),
+ EXPECT_EQ(base::Microseconds(12345),
parser->GetAudioDefaultDuration(12345000));
- EXPECT_EQ(base::TimeDelta::FromMicroseconds(12003),
+ EXPECT_EQ(base::Microseconds(12003),
parser->GetAudioDefaultDuration(1000300)); // 1.0003 ms resolution
}
@@ -262,10 +262,10 @@ TEST_F(WebMTracksParserTest, PrecisionCapping) {
{kOneMsInNs, 0, kNoTimestamp},
{kOneMsInNs, 1, kNoTimestamp},
{kOneMsInNs, 999999, kNoTimestamp},
- {kOneMsInNs, 1000000, base::TimeDelta::FromMilliseconds(1)},
- {kOneMsInNs, 1000001, base::TimeDelta::FromMilliseconds(1)},
- {kOneMsInNs, 1999999, base::TimeDelta::FromMilliseconds(1)},
- {kOneMsInNs, 2000000, base::TimeDelta::FromMilliseconds(2)},
+ {kOneMsInNs, 1000000, base::Milliseconds(1)},
+ {kOneMsInNs, 1000001, base::Milliseconds(1)},
+ {kOneMsInNs, 1999999, base::Milliseconds(1)},
+ {kOneMsInNs, 2000000, base::Milliseconds(2)},
{1, -1, kNoTimestamp},
{1, 0, kNoTimestamp},
@@ -273,11 +273,11 @@ TEST_F(WebMTracksParserTest, PrecisionCapping) {
{1, 1, kNoTimestamp},
{1, 999, kNoTimestamp},
- {1, 1000, base::TimeDelta::FromMicroseconds(1)},
- {1, 1999, base::TimeDelta::FromMicroseconds(1)},
- {1, 2000, base::TimeDelta::FromMicroseconds(2)},
+ {1, 1000, base::Microseconds(1)},
+ {1, 1999, base::Microseconds(1)},
+ {1, 2000, base::Microseconds(2)},
- {64, 1792, base::TimeDelta::FromMicroseconds(1)},
+ {64, 1792, base::Microseconds(1)},
};
std::unique_ptr<WebMTracksParser> parser(
diff --git a/chromium/media/formats/webm/webm_video_client.cc b/chromium/media/formats/webm/webm_video_client.cc
index 407c11b3c79..8e4add7a4cf 100644
--- a/chromium/media/formats/webm/webm_video_client.cc
+++ b/chromium/media/formats/webm/webm_video_client.cc
@@ -80,13 +80,13 @@ bool WebMVideoClient::InitializeConfig(
is_8bit = color_metadata.BitsPerChannel <= 8;
}
- VideoCodec video_codec = kUnknownVideoCodec;
+ VideoCodec video_codec = VideoCodec::kUnknown;
VideoCodecProfile profile = VIDEO_CODEC_PROFILE_UNKNOWN;
if (codec_id == "V_VP8") {
- video_codec = kCodecVP8;
+ video_codec = VideoCodec::kVP8;
profile = VP8PROFILE_ANY;
} else if (codec_id == "V_VP9") {
- video_codec = kCodecVP9;
+ video_codec = VideoCodec::kVP9;
profile = GetVP9CodecProfile(
codec_private, color_space.ToGfxColorSpace().IsHDR() ||
config->hdr_metadata().has_value() || !is_8bit);
@@ -95,7 +95,7 @@ bool WebMVideoClient::InitializeConfig(
// TODO(dalecurtis): AV1 profiles in WebM are not finalized, this needs
// updating to read the actual profile and configuration before enabling for
// release. http://crbug.com/784993
- video_codec = kCodecAV1;
+ video_codec = VideoCodec::kAV1;
profile = AV1PROFILE_PROFILE_MAIN;
#endif
} else {
@@ -129,7 +129,8 @@ bool WebMVideoClient::InitializeConfig(
// TODO(dalecurtis): This is not correct, but it's what's muxed in webm
// containers with AV1 right now. So accept it. We won't get here unless the
// build and runtime flags are enabled for AV1.
- if (display_unit_ == 0 || (video_codec == kCodecAV1 && display_unit_ == 4)) {
+ if (display_unit_ == 0 ||
+ (video_codec == VideoCodec::kAV1 && display_unit_ == 4)) {
if (display_width_ <= 0)
display_width_ = visible_rect.width();
if (display_height_ <= 0)
diff --git a/chromium/media/formats/webm/webm_video_client.h b/chromium/media/formats/webm/webm_video_client.h
index 558831dc58f..cfd93b626fa 100644
--- a/chromium/media/formats/webm/webm_video_client.h
+++ b/chromium/media/formats/webm/webm_video_client.h
@@ -25,6 +25,10 @@ class VideoDecoderConfig;
class MEDIA_EXPORT WebMVideoClient : public WebMParserClient {
public:
explicit WebMVideoClient(MediaLog* media_log);
+
+ WebMVideoClient(const WebMVideoClient&) = delete;
+ WebMVideoClient& operator=(const WebMVideoClient&) = delete;
+
~WebMVideoClient() override;
// Reset this object's state so it can process a new video track element.
@@ -71,8 +75,6 @@ class MEDIA_EXPORT WebMVideoClient : public WebMParserClient {
WebMProjectionParser projection_parser_;
bool projection_parsed_ = false;
-
- DISALLOW_COPY_AND_ASSIGN(WebMVideoClient);
};
} // namespace media
diff --git a/chromium/media/formats/webm/webm_video_client_unittest.cc b/chromium/media/formats/webm/webm_video_client_unittest.cc
index eb422b20bd9..5786ed1d723 100644
--- a/chromium/media/formats/webm/webm_video_client_unittest.cc
+++ b/chromium/media/formats/webm/webm_video_client_unittest.cc
@@ -150,7 +150,7 @@ TEST_P(WebMVideoClientTest, InitializeConfigVP9Profiles) {
EncryptionScheme(), &config));
VideoDecoderConfig expected_config(
- kCodecVP9, profile, VideoDecoderConfig::AlphaMode::kIsOpaque,
+ VideoCodec::kVP9, profile, VideoDecoderConfig::AlphaMode::kIsOpaque,
VideoColorSpace::REC709(), kNoTransformation, kCodedSize,
gfx::Rect(kCodedSize), kCodedSize, codec_private,
EncryptionScheme::kUnencrypted);
diff --git a/chromium/media/fuchsia/audio/fake_audio_consumer.cc b/chromium/media/fuchsia/audio/fake_audio_consumer.cc
index 3babfc3df0f..c4e59e95487 100644
--- a/chromium/media/fuchsia/audio/fake_audio_consumer.cc
+++ b/chromium/media/fuchsia/audio/fake_audio_consumer.cc
@@ -11,10 +11,8 @@
namespace media {
-const base::TimeDelta FakeAudioConsumer::kMinLeadTime =
- base::TimeDelta::FromMilliseconds(100);
-const base::TimeDelta FakeAudioConsumer::kMaxLeadTime =
- base::TimeDelta::FromMilliseconds(500);
+const base::TimeDelta FakeAudioConsumer::kMinLeadTime = base::Milliseconds(100);
+const base::TimeDelta FakeAudioConsumer::kMaxLeadTime = base::Milliseconds(500);
FakeAudioConsumer::FakeAudioConsumer(
uint64_t session_id,
diff --git a/chromium/media/fuchsia/audio/fuchsia_audio_capturer_source.cc b/chromium/media/fuchsia/audio/fuchsia_audio_capturer_source.cc
index 29fbcde83c2..970284a5692 100644
--- a/chromium/media/fuchsia/audio/fuchsia_audio_capturer_source.cc
+++ b/chromium/media/fuchsia/audio/fuchsia_audio_capturer_source.cc
@@ -11,6 +11,7 @@
#include "base/bits.h"
#include "base/cxx17_backports.h"
#include "base/fuchsia/fuchsia_logging.h"
+#include "base/fuchsia/koid.h"
#include "base/location.h"
#include "base/task_runner.h"
#include "base/threading/thread_task_runner_handle.h"
@@ -29,14 +30,17 @@ constexpr size_t kBufferPacketCapacity = 10;
} // namespace
FuchsiaAudioCapturerSource::FuchsiaAudioCapturerSource(
- fidl::InterfaceHandle<fuchsia::media::AudioCapturer> capturer_handle)
- : capturer_handle_(std::move(capturer_handle)) {
+ fidl::InterfaceHandle<fuchsia::media::AudioCapturer> capturer_handle,
+ scoped_refptr<base::SingleThreadTaskRunner> capturer_task_runner)
+ : capturer_handle_(std::move(capturer_handle)),
+ capturer_task_runner_(capturer_task_runner) {
DCHECK(capturer_handle_);
- DETACH_FROM_THREAD(thread_checker_);
}
FuchsiaAudioCapturerSource::~FuchsiaAudioCapturerSource() {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ DCHECK(!callback_)
+ << "Stop() must be called before FuchsiaAudioCapturerSource is released.";
+
if (capture_buffer_) {
zx_status_t status = zx::vmar::root_self()->unmap(
reinterpret_cast<uint64_t>(capture_buffer_), capture_buffer_size_);
@@ -46,11 +50,11 @@ FuchsiaAudioCapturerSource::~FuchsiaAudioCapturerSource() {
void FuchsiaAudioCapturerSource::Initialize(const AudioParameters& params,
CaptureCallback* callback) {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
DCHECK(!capture_buffer_);
DCHECK(!callback_);
DCHECK(callback);
+ main_task_runner_ = base::ThreadTaskRunnerHandle::Get();
params_ = params;
callback_ = callback;
@@ -58,6 +62,60 @@ void FuchsiaAudioCapturerSource::Initialize(const AudioParameters& params,
ReportError("Only AUDIO_PCM_LOW_LATENCY format is supported");
return;
}
+ capturer_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&FuchsiaAudioCapturerSource::InitializeOnCapturerThread,
+ this));
+}
+
+void FuchsiaAudioCapturerSource::Start() {
+ DCHECK(main_task_runner_->BelongsToCurrentThread());
+ DCHECK(callback_);
+
+ capturer_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&FuchsiaAudioCapturerSource::StartOnCapturerThread, this));
+}
+
+void FuchsiaAudioCapturerSource::Stop() {
+ // Nothing to do if Initialize() hasn't been called.
+ if (!main_task_runner_)
+ return;
+
+ DCHECK(main_task_runner_->BelongsToCurrentThread());
+
+ {
+ base::AutoLock lock(callback_lock_);
+
+ if (!callback_)
+ return;
+
+ callback_ = nullptr;
+ }
+
+ capturer_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&FuchsiaAudioCapturerSource::StopOnCapturerThread, this));
+}
+
+void FuchsiaAudioCapturerSource::SetVolume(double volume) {
+ DCHECK(main_task_runner_->BelongsToCurrentThread());
+ NOTIMPLEMENTED();
+}
+
+void FuchsiaAudioCapturerSource::SetAutomaticGainControl(bool enable) {
+ DCHECK(main_task_runner_->BelongsToCurrentThread());
+ NOTIMPLEMENTED();
+}
+
+void FuchsiaAudioCapturerSource::SetOutputDeviceForAec(
+ const std::string& output_device_id) {
+ DCHECK(main_task_runner_->BelongsToCurrentThread());
+ NOTIMPLEMENTED();
+}
+
+void FuchsiaAudioCapturerSource::InitializeOnCapturerThread() {
+ DCHECK(capturer_task_runner_->BelongsToCurrentThread());
// Bind AudioCapturer.
capturer_.Bind(std::move(capturer_handle_));
@@ -110,84 +168,57 @@ void FuchsiaAudioCapturerSource::Initialize(const AudioParameters& params,
capturer_->AddPayloadBuffer(kBufferId, std::move(buffer_vmo));
}
-void FuchsiaAudioCapturerSource::Start() {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+void FuchsiaAudioCapturerSource::StartOnCapturerThread() {
+ DCHECK(capturer_task_runner_->BelongsToCurrentThread());
// Errors are reported asynchronously, so Start() may be called after an error
// has occurred.
if (!capturer_)
return;
- DCHECK(!is_active_);
- is_active_ = true;
-
if (!is_capturer_started_) {
is_capturer_started_ = true;
capturer_->StartAsyncCapture(params_.frames_per_buffer());
}
- // Post a task to call OnCaptureStarted() asynchronously, as required by
- // AudioCapturerSource interface..
- base::ThreadTaskRunnerHandle::Get()->PostTask(
+ main_task_runner_->PostTask(
FROM_HERE,
- base::BindOnce(&FuchsiaAudioCapturerSource::NotifyCaptureStarted,
- weak_factory_.GetWeakPtr()));
+ base::BindOnce(&FuchsiaAudioCapturerSource::NotifyCaptureStarted, this));
}
-void FuchsiaAudioCapturerSource::Stop() {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
-
- // Errors are reported asynchronously, so Stop() may be called after an error
- // has occurred.
- if (!capturer_)
- return;
-
- // StopAsyncCapture() is an asynchronous operation that completes
- // asynchronously and other methods cannot be called until it's complete.
- // To avoid extra complexity, update internal state without actually stopping
- // the capturer. The downside is that |capturer_| will keep sending packets in
- // the stopped state. This is acceptable because normally AudioCapturerSource
- // instances are not kept in the stopped state for significant amount of time,
- // i.e. usually either destructor or Start() are called immediately after
- // Stop().
- is_active_ = false;
-}
-
-void FuchsiaAudioCapturerSource::SetVolume(double volume) {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- NOTIMPLEMENTED();
-}
-
-void FuchsiaAudioCapturerSource::SetAutomaticGainControl(bool enable) {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- NOTIMPLEMENTED();
-}
-
-void FuchsiaAudioCapturerSource::SetOutputDeviceForAec(
- const std::string& output_device_id) {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- NOTIMPLEMENTED();
+void FuchsiaAudioCapturerSource::StopOnCapturerThread() {
+ DCHECK(capturer_task_runner_->BelongsToCurrentThread());
+ capturer_.Unbind();
}
void FuchsiaAudioCapturerSource::NotifyCaptureError(
const std::string& message) {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ DCHECK(main_task_runner_->BelongsToCurrentThread());
+
+ // Nothing to do if Stop() was called.
+ if (!callback_)
+ return;
+
+ // `Stop()` cannot be called on other threads, so `callback_lock_` doesn't
+ // need to be held.
callback_->OnCaptureError(AudioCapturerSource::ErrorCode::kUnknown, message);
}
void FuchsiaAudioCapturerSource::NotifyCaptureStarted() {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ DCHECK(main_task_runner_->BelongsToCurrentThread());
- // Nothing to do if initialization has failed.
- if (!capturer_ || !is_active_)
+ // Nothing to do if Stop() was called.
+ if (!callback_)
return;
+ // `Stop()` cannot be called on other threads, so `callback_lock_` doesn't
+ // need to be held.
callback_->OnCaptureStarted();
}
void FuchsiaAudioCapturerSource::OnPacketCaptured(
fuchsia::media::StreamPacket packet) {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ DCHECK(capturer_task_runner_->BelongsToCurrentThread());
size_t bytes_per_frame = params_.GetBytesPerFrame(kSampleFormatF32);
@@ -199,31 +230,38 @@ void FuchsiaAudioCapturerSource::OnPacketCaptured(
return;
}
- // If the capturer was stopped then just drop the packet.
- if (is_active_) {
- size_t num_frames = packet.payload_size / bytes_per_frame;
- auto audio_bus = AudioBus::Create(params_.channels(), num_frames);
- audio_bus->FromInterleaved<Float32SampleTypeTraits>(
- reinterpret_cast<const float*>(capture_buffer_ + packet.payload_offset),
- num_frames);
- callback_->Capture(audio_bus.get(), base::TimeTicks::FromZxTime(packet.pts),
- /*volume=*/1.0,
- /*key_pressed=*/false);
- }
+ // Keep the lock when calling `Capture()` to ensure that we don't call the
+ // callback after `Stop()`. If `Stop()` is called on the main thread while the
+ // lock is held it will wait until we release the lock below. This is
+ // acceptable because `CaptureCallback::Capture()` is expected to return
+ // quickly.
+ base::AutoLock lock(callback_lock_);
+
+ // If `Stop()` was called then we can drop the capturer - it won't be used
+ // again.
+ if (!callback_)
+ capturer_.Unbind();
+
+ size_t num_frames = packet.payload_size / bytes_per_frame;
+ auto audio_bus = AudioBus::Create(params_.channels(), num_frames);
+ audio_bus->FromInterleaved<Float32SampleTypeTraits>(
+ reinterpret_cast<const float*>(capture_buffer_ + packet.payload_offset),
+ num_frames);
+ callback_->Capture(audio_bus.get(), base::TimeTicks::FromZxTime(packet.pts),
+ /*volume=*/1.0,
+ /*key_pressed=*/false);
capturer_->ReleasePacket(std::move(packet));
}
void FuchsiaAudioCapturerSource::ReportError(const std::string& message) {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ DCHECK(capturer_task_runner_->BelongsToCurrentThread());
capturer_.Unbind();
- // Post async task to report the error as required by the the
- // AudioCapturerSource interface.
- base::ThreadTaskRunnerHandle::Get()->PostTask(
+ main_task_runner_->PostTask(
FROM_HERE, base::BindOnce(&FuchsiaAudioCapturerSource::NotifyCaptureError,
- weak_factory_.GetWeakPtr(), message));
+ this, message));
}
} // namespace media
diff --git a/chromium/media/fuchsia/audio/fuchsia_audio_capturer_source.h b/chromium/media/fuchsia/audio/fuchsia_audio_capturer_source.h
index 8079026e9a7..ca3cdfa689f 100644
--- a/chromium/media/fuchsia/audio/fuchsia_audio_capturer_source.h
+++ b/chromium/media/fuchsia/audio/fuchsia_audio_capturer_source.h
@@ -12,13 +12,18 @@
#include "media/base/audio_capturer_source.h"
#include "media/base/media_export.h"
+namespace base {
+class SingleThreadTaskRunner;
+} // namespace base
+
namespace media {
class MEDIA_EXPORT FuchsiaAudioCapturerSource final
: public AudioCapturerSource {
public:
- explicit FuchsiaAudioCapturerSource(
- fidl::InterfaceHandle<fuchsia::media::AudioCapturer> capturer_handle);
+ FuchsiaAudioCapturerSource(
+ fidl::InterfaceHandle<fuchsia::media::AudioCapturer> capturer_handle,
+ scoped_refptr<base::SingleThreadTaskRunner> capturer_task_runner);
FuchsiaAudioCapturerSource(const FuchsiaAudioCapturerSource&) = delete;
FuchsiaAudioCapturerSource& operator=(const FuchsiaAudioCapturerSource&) =
@@ -36,6 +41,9 @@ class MEDIA_EXPORT FuchsiaAudioCapturerSource final
private:
~FuchsiaAudioCapturerSource() override;
+ void InitializeOnCapturerThread();
+ void StartOnCapturerThread();
+ void StopOnCapturerThread();
void NotifyCaptureError(const std::string& error);
void NotifyCaptureStarted();
void OnPacketCaptured(fuchsia::media::StreamPacket packet);
@@ -47,11 +55,22 @@ class MEDIA_EXPORT FuchsiaAudioCapturerSource final
// in the constructor.
fidl::InterfaceHandle<fuchsia::media::AudioCapturer> capturer_handle_;
+ // Task runner for the thread that's used for the |capturer_|.
+ scoped_refptr<base::SingleThreadTaskRunner> capturer_task_runner_;
+
+ // Main thread on which the object was initialized.
+ scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_;
+
fuchsia::media::AudioCapturerPtr capturer_;
AudioParameters params_;
CaptureCallback* callback_ = nullptr;
+ // `callback_lock_` is used to synchronize `Stop()` called on the main thread
+ // and `CaptureCallback::Capture()` called on the capturer thread. All other
+ // `CaptureCallback` methods are called on the main thread.
+ base::Lock callback_lock_;
+
// Shared VMO mapped to the current address space.
uint8_t* capture_buffer_ = nullptr;
size_t capture_buffer_size_ = 0;
@@ -59,13 +78,6 @@ class MEDIA_EXPORT FuchsiaAudioCapturerSource final
// Indicates that async capture mode has been activated for |capturer_|, i.e.
// StartAsyncCapture() has been called.
bool is_capturer_started_ = false;
-
- // Set to true between Start() and Stop().
- bool is_active_ = false;
-
- THREAD_CHECKER(thread_checker_);
-
- base::WeakPtrFactory<FuchsiaAudioCapturerSource> weak_factory_{this};
};
} // namespace media
diff --git a/chromium/media/fuchsia/audio/fuchsia_audio_capturer_source_test.cc b/chromium/media/fuchsia/audio/fuchsia_audio_capturer_source_test.cc
index bd1c4ead735..461f25619a3 100644
--- a/chromium/media/fuchsia/audio/fuchsia_audio_capturer_source_test.cc
+++ b/chromium/media/fuchsia/audio/fuchsia_audio_capturer_source_test.cc
@@ -9,6 +9,7 @@
#include "base/fuchsia/fuchsia_logging.h"
#include "base/test/task_environment.h"
+#include "base/threading/thread_task_runner_handle.h"
#include "media/base/channel_layout.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -183,7 +184,14 @@ class FuchsiaAudioCapturerSourceTest : public testing::Test {
test_capturer_ =
std::make_unique<TestAudioCapturer>(capturer_handle.NewRequest());
capturer_source_ = base::MakeRefCounted<FuchsiaAudioCapturerSource>(
- std::move(capturer_handle));
+ std::move(capturer_handle), base::ThreadTaskRunnerHandle::Get());
+ }
+
+ ~FuchsiaAudioCapturerSourceTest() override {
+ capturer_source_->Stop();
+ capturer_source_ = nullptr;
+
+ base::RunLoop().RunUntilIdle();
}
void InitializeCapturer(ChannelLayout layout) {
@@ -298,8 +306,7 @@ TEST_F(FuchsiaAudioCapturerSourceTest, CaptureTwoPackets) {
base::TimeTicks ts = base::TimeTicks::FromZxTime(100);
test_capturer_->SendData(ts, samples1.data());
- test_capturer_->SendData(ts + base::TimeDelta::FromMilliseconds(10),
- samples2.data());
+ test_capturer_->SendData(ts + base::Milliseconds(10), samples2.data());
base::RunLoop().RunUntilIdle();
// Verify that both packets were received.
diff --git a/chromium/media/fuchsia/audio/fuchsia_audio_output_device.cc b/chromium/media/fuchsia/audio/fuchsia_audio_output_device.cc
index 5c2fc9d94cc..d84b381a946 100644
--- a/chromium/media/fuchsia/audio/fuchsia_audio_output_device.cc
+++ b/chromium/media/fuchsia/audio/fuchsia_audio_output_device.cc
@@ -30,8 +30,7 @@ constexpr size_t kNumBuffers = 4;
// TODO(crbug.com/1153909): It may be possible to reduce this value to reduce
// total latency, but that requires that an elevated scheduling profile is
// applied to this thread.
-constexpr base::TimeDelta kLeadTimeExtra =
- base::TimeDelta::FromMilliseconds(20);
+constexpr base::TimeDelta kLeadTimeExtra = base::Milliseconds(20);
class DefaultAudioThread {
public:
@@ -337,13 +336,13 @@ void FuchsiaAudioOutputDevice::OnAudioConsumerStatusChanged(
return;
}
- min_lead_time_ = base::TimeDelta::FromNanoseconds(status.min_lead_time());
+ min_lead_time_ = base::Nanoseconds(status.min_lead_time());
if (status.has_presentation_timeline()) {
timeline_reference_time_ = base::TimeTicks::FromZxTime(
status.presentation_timeline().reference_time);
- timeline_subject_time_ = base::TimeDelta::FromNanoseconds(
- status.presentation_timeline().subject_time);
+ timeline_subject_time_ =
+ base::Nanoseconds(status.presentation_timeline().subject_time);
timeline_reference_delta_ = status.presentation_timeline().reference_delta;
timeline_subject_delta_ = status.presentation_timeline().subject_delta;
} else {
diff --git a/chromium/media/fuchsia/audio/fuchsia_audio_output_device_test.cc b/chromium/media/fuchsia/audio/fuchsia_audio_output_device_test.cc
index 1b8c6eb51af..727c7133b97 100644
--- a/chromium/media/fuchsia/audio/fuchsia_audio_output_device_test.cc
+++ b/chromium/media/fuchsia/audio/fuchsia_audio_output_device_test.cc
@@ -20,7 +20,7 @@ constexpr int kSampleRate = 44100;
constexpr ChannelLayout kChannelLayout = CHANNEL_LAYOUT_STEREO;
constexpr int kNumChannels = 2;
constexpr uint64_t kTestSessionId = 42;
-constexpr base::TimeDelta kPeriod = base::TimeDelta::FromMilliseconds(10);
+constexpr base::TimeDelta kPeriod = base::Milliseconds(10);
constexpr int kFramesPerPeriod = 441;
class TestRenderer : public AudioRendererSink::RenderCallback {
@@ -99,7 +99,7 @@ class FuchsiaAudioOutputDeviceTest : public testing::Test {
void CallPumpSamples() {
output_device_->PumpSamples(base::TimeTicks::Now() +
- base::TimeDelta::FromMilliseconds(200));
+ base::Milliseconds(200));
}
void ValidatePresentationTime() {
@@ -110,8 +110,8 @@ class FuchsiaAudioOutputDeviceTest : public testing::Test {
auto lead_time =
renderer_.last_presentation_time() - base::TimeTicks::Now();
EXPECT_GT(lead_time, FakeAudioConsumer::kMinLeadTime);
- EXPECT_LT(lead_time, FakeAudioConsumer::kMinLeadTime +
- base::TimeDelta::FromMilliseconds(30));
+ EXPECT_LT(lead_time,
+ FakeAudioConsumer::kMinLeadTime + base::Milliseconds(30));
}
base::test::SingleThreadTaskEnvironment task_environment_{
@@ -127,7 +127,7 @@ TEST_F(FuchsiaAudioOutputDeviceTest, Start) {
Initialize();
// Verify that playback doesn't start before Start().
- task_environment_.FastForwardBy(base::TimeDelta::FromSeconds(2));
+ task_environment_.FastForwardBy(base::Seconds(2));
EXPECT_EQ(renderer_.frames_rendered(), 0);
// Rendering should start after Start().
@@ -167,7 +167,7 @@ TEST_F(FuchsiaAudioOutputDeviceTest, Pause) {
// Render() should not be called while paused.
output_device_->Pause();
- task_environment_.FastForwardBy(base::TimeDelta::FromSeconds(10));
+ task_environment_.FastForwardBy(base::Seconds(10));
EXPECT_EQ(renderer_.frames_rendered(), 0);
// Unpause the stream and verify that Render() is being called now.
diff --git a/chromium/media/fuchsia/audio/fuchsia_audio_renderer.cc b/chromium/media/fuchsia/audio/fuchsia_audio_renderer.cc
index aeaf0c8cfc3..28b3a4044ef 100644
--- a/chromium/media/fuchsia/audio/fuchsia_audio_renderer.cc
+++ b/chromium/media/fuchsia/audio/fuchsia_audio_renderer.cc
@@ -29,22 +29,22 @@ absl::optional<std::unique_ptr<fuchsia::media::Compression>>
GetFuchsiaCompressionFromDecoderConfig(AudioDecoderConfig config) {
auto compression = std::make_unique<fuchsia::media::Compression>();
switch (config.codec()) {
- case kCodecAAC:
+ case AudioCodec::kAAC:
compression->type = fuchsia::media::AUDIO_ENCODING_AAC;
break;
- case kCodecMP3:
+ case AudioCodec::kMP3:
compression->type = fuchsia::media::AUDIO_ENCODING_MP3;
break;
- case kCodecVorbis:
+ case AudioCodec::kVorbis:
compression->type = fuchsia::media::AUDIO_ENCODING_VORBIS;
break;
- case kCodecOpus:
+ case AudioCodec::kOpus:
compression->type = fuchsia::media::AUDIO_ENCODING_OPUS;
break;
- case kCodecFLAC:
+ case AudioCodec::kFLAC:
compression->type = fuchsia::media::AUDIO_ENCODING_FLAC;
break;
- case kCodecPCM:
+ case AudioCodec::kPCM:
compression.reset();
break;
@@ -153,7 +153,7 @@ void FuchsiaAudioRenderer::Initialize(DemuxerStream* stream,
// produce decoded stream without ADTS headers which are required for AAC
// streams in AudioConsumer.
// TODO(crbug.com/1120095): Reconsider this logic.
- if (stream->audio_decoder_config().codec() == kCodecAAC) {
+ if (stream->audio_decoder_config().codec() == AudioCodec::kAAC) {
stream->EnableBitstreamConverter();
}
@@ -451,6 +451,10 @@ void FuchsiaAudioRenderer::OnError(PipelineStatus status) {
stream_sink_.Unbind();
sysmem_buffer_stream_.reset();
+ if (is_demuxer_read_pending_) {
+ drop_next_demuxer_read_result_ = true;
+ }
+
if (init_cb_) {
std::move(init_cb_).Run(status);
} else if (client_) {
@@ -610,7 +614,7 @@ void FuchsiaAudioRenderer::OnDemuxerStreamReadDone(
// Update layout for 24-bit PCM streams.
if (!buffer->end_of_stream() &&
- demuxer_stream_->audio_decoder_config().codec() == kCodecPCM &&
+ demuxer_stream_->audio_decoder_config().codec() == AudioCodec::kPCM &&
demuxer_stream_->audio_decoder_config().sample_format() ==
kSampleFormatS24) {
buffer = PreparePcm24Buffer(std::move(buffer));
diff --git a/chromium/media/fuchsia/audio/fuchsia_audio_renderer.h b/chromium/media/fuchsia/audio/fuchsia_audio_renderer.h
index 83c72601c6a..e192eb66652 100644
--- a/chromium/media/fuchsia/audio/fuchsia_audio_renderer.h
+++ b/chromium/media/fuchsia/audio/fuchsia_audio_renderer.h
@@ -201,8 +201,8 @@ class MEDIA_EXPORT FuchsiaAudioRenderer final
// Lead time range requested by the |audio_consumer_|. Initialized to the
// [100ms, 500ms] until the initial AudioConsumerStatus is received.
- base::TimeDelta min_lead_time_ = base::TimeDelta::FromMilliseconds(100);
- base::TimeDelta max_lead_time_ = base::TimeDelta::FromMilliseconds(500);
+ base::TimeDelta min_lead_time_ = base::Milliseconds(100);
+ base::TimeDelta max_lead_time_ = base::Milliseconds(500);
// Set to true after we've received end-of-stream from the |demuxer_stream_|.
// The renderer may be restarted after Flush().
diff --git a/chromium/media/fuchsia/audio/fuchsia_audio_renderer_test.cc b/chromium/media/fuchsia/audio/fuchsia_audio_renderer_test.cc
index 2c006bddcf2..6ee12d13d61 100644
--- a/chromium/media/fuchsia/audio/fuchsia_audio_renderer_test.cc
+++ b/chromium/media/fuchsia/audio/fuchsia_audio_renderer_test.cc
@@ -28,11 +28,10 @@ namespace media {
namespace {
constexpr int kDefaultSampleRate = 48000;
-constexpr base::TimeDelta kPacketDuration =
- base::TimeDelta::FromMilliseconds(20);
-constexpr base::TimeDelta kMinLeadTime = base::TimeDelta::FromMilliseconds(100);
-constexpr base::TimeDelta kMaxLeadTime = base::TimeDelta::FromMilliseconds(500);
-const base::TimeDelta kTimeStep = base::TimeDelta::FromMilliseconds(2);
+constexpr base::TimeDelta kPacketDuration = base::Milliseconds(20);
+constexpr base::TimeDelta kMinLeadTime = base::Milliseconds(100);
+constexpr base::TimeDelta kMaxLeadTime = base::Milliseconds(500);
+const base::TimeDelta kTimeStep = base::Milliseconds(2);
class TestDemuxerStream : public DemuxerStream {
public:
@@ -526,8 +525,8 @@ void FuchsiaAudioRendererTest::CreateUninitializedRenderer() {
}
void FuchsiaAudioRendererTest::CreateTestDemuxerStream() {
- AudioDecoderConfig config(kCodecPCM, kSampleFormatF32, CHANNEL_LAYOUT_MONO,
- kDefaultSampleRate, {},
+ AudioDecoderConfig config(AudioCodec::kPCM, kSampleFormatF32,
+ CHANNEL_LAYOUT_MONO, kDefaultSampleRate, {},
EncryptionScheme::kUnencrypted);
if (GetParam().simulate_fuchsia_cdm) {
@@ -660,7 +659,7 @@ void FuchsiaAudioRendererTest::StartPlaybackAndVerifyClock(
base::TimeTicks::Now() + kTimeStep, false);
// MediaTime will start moving once AudioConsumer updates timeline.
- const base::TimeDelta kStartDelay = base::TimeDelta::FromMilliseconds(3);
+ const base::TimeDelta kStartDelay = base::Milliseconds(3);
base::TimeTicks start_wall_clock = base::TimeTicks::Now() + kStartDelay;
audio_consumer_->UpdateStatus(start_wall_clock, start_time);
task_environment_.RunUntilIdle();
@@ -697,7 +696,7 @@ TEST_P(FuchsiaAudioRendererTest, InitializeAndBuffer) {
// Extra packets should be sent to AudioConsumer immediately.
stream_sink_->received_packets()->clear();
- ProduceDemuxerPacket(base::TimeDelta::FromMilliseconds(10));
+ ProduceDemuxerPacket(base::Milliseconds(10));
task_environment_.RunUntilIdle();
EXPECT_EQ(stream_sink_->received_packets()->size(), 1U);
}
@@ -708,7 +707,7 @@ TEST_P(FuchsiaAudioRendererTest, StartPlaybackBeforeStreamSinkConnected) {
// Start playing immediately after initialization. The renderer should wait
// for buffers to be allocated before it starts reading from the demuxer.
audio_renderer_->StartPlaying();
- ProduceDemuxerPacket(base::TimeDelta::FromMilliseconds(10));
+ ProduceDemuxerPacket(base::Milliseconds(10));
task_environment_.RunUntilIdle();
stream_sink_ = audio_consumer_->WaitStreamSinkConnected();
@@ -719,21 +718,21 @@ TEST_P(FuchsiaAudioRendererTest, StartPlaybackBeforeStreamSinkConnected) {
TEST_P(FuchsiaAudioRendererTest, StartTicking) {
ASSERT_NO_FATAL_FAILURE(CreateAndInitializeRenderer());
ASSERT_NO_FATAL_FAILURE(StartPlaybackAndVerifyClock(
- /*start_pos=*/base::TimeDelta::FromMilliseconds(123),
+ /*start_pos=*/base::Milliseconds(123),
/*playback_rate=*/1.0));
}
TEST_P(FuchsiaAudioRendererTest, StartTickingRate1_5) {
ASSERT_NO_FATAL_FAILURE(CreateAndInitializeRenderer());
ASSERT_NO_FATAL_FAILURE(StartPlaybackAndVerifyClock(
- /*start_pos=*/base::TimeDelta::FromMilliseconds(123),
+ /*start_pos=*/base::Milliseconds(123),
/*playback_rate=*/1.5));
}
TEST_P(FuchsiaAudioRendererTest, StartTickingRate0_5) {
ASSERT_NO_FATAL_FAILURE(CreateAndInitializeRenderer());
ASSERT_NO_FATAL_FAILURE(StartPlaybackAndVerifyClock(
- /*start_pos=*/base::TimeDelta::FromMilliseconds(123),
+ /*start_pos=*/base::Milliseconds(123),
/*playback_rate=*/0.5));
}
@@ -774,7 +773,7 @@ TEST_P(FuchsiaAudioRendererTest, Seek) {
run_loop.Run();
// Restart playback from a new position.
- const base::TimeDelta kSeekPos = base::TimeDelta::FromMilliseconds(123);
+ const base::TimeDelta kSeekPos = base::Milliseconds(123);
ASSERT_NO_FATAL_FAILURE(StartPlaybackAndVerifyClock(kSeekPos,
/*playback_rate=*/1.0));
@@ -791,7 +790,7 @@ TEST_P(FuchsiaAudioRendererTest, ChangeConfig) {
ASSERT_NO_FATAL_FAILURE(CreateAndInitializeRenderer());
ASSERT_NO_FATAL_FAILURE(StartPlayback());
- const auto kConfigChangePos = base::TimeDelta::FromSeconds(1);
+ const auto kConfigChangePos = base::Seconds(1);
// Queue packets up to kConfigChangePos.
FillDemuxerStream(kConfigChangePos);
@@ -799,8 +798,8 @@ TEST_P(FuchsiaAudioRendererTest, ChangeConfig) {
const size_t kNewSampleRate = 44100;
const std::vector<uint8_t> kArbitraryExtraData = {1, 2, 3};
AudioDecoderConfig updated_config(
- kCodecOpus, kSampleFormatF32, CHANNEL_LAYOUT_STEREO, kNewSampleRate,
- kArbitraryExtraData, EncryptionScheme::kUnencrypted);
+ AudioCodec::kOpus, kSampleFormatF32, CHANNEL_LAYOUT_STEREO,
+ kNewSampleRate, kArbitraryExtraData, EncryptionScheme::kUnencrypted);
demuxer_stream_->QueueReadResult(
TestDemuxerStream::ReadResult(updated_config));
@@ -838,13 +837,13 @@ TEST_P(FuchsiaAudioRendererTest, UpdateTimeline) {
ASSERT_NO_FATAL_FAILURE(CreateAndInitializeRenderer());
ASSERT_NO_FATAL_FAILURE(StartPlayback());
- FillDemuxerStream(base::TimeDelta::FromSeconds(2));
+ FillDemuxerStream(base::Seconds(2));
- const auto kTimelineChangePos = base::TimeDelta::FromSeconds(1);
+ const auto kTimelineChangePos = base::Seconds(1);
task_environment_.FastForwardBy(kTimelineChangePos);
// Shift the timeline by 2ms.
- const auto kMediaDelta = base::TimeDelta::FromMilliseconds(2);
+ const auto kMediaDelta = base::Milliseconds(2);
audio_consumer_->UpdateStatus(base::TimeTicks::Now(),
kTimelineChangePos + kMediaDelta);
task_environment_.RunUntilIdle();
@@ -859,8 +858,8 @@ TEST_P(FuchsiaAudioRendererTest, PauseAndResume) {
ASSERT_NO_FATAL_FAILURE(CreateAndInitializeRenderer());
ASSERT_NO_FATAL_FAILURE(StartPlayback());
- const auto kPauseTimestamp = base::TimeDelta::FromSeconds(1);
- const auto kStreamLength = base::TimeDelta::FromSeconds(2);
+ const auto kPauseTimestamp = base::Seconds(1);
+ const auto kStreamLength = base::Seconds(2);
FillDemuxerStream(kStreamLength);
@@ -882,7 +881,7 @@ TEST_P(FuchsiaAudioRendererTest, PauseAndResume) {
// Keep the stream paused for 10 seconds. The Renderer should not be sending
// new packets
- task_environment_.FastForwardBy(base::TimeDelta::FromSeconds(10));
+ task_environment_.FastForwardBy(base::Seconds(10));
EXPECT_EQ(stream_sink_->received_packets()->size(), kExpectedQueuedPackets);
EXPECT_EQ(time_source_->CurrentMediaTime(), kPauseTimestamp);
@@ -909,7 +908,7 @@ TEST_P(FuchsiaAudioRendererTest, EndOfStreamBuffered) {
ASSERT_NO_FATAL_FAILURE(CreateAndInitializeRenderer());
ASSERT_NO_FATAL_FAILURE(StartPlayback());
- const auto kStreamLength = base::TimeDelta::FromSeconds(1);
+ const auto kStreamLength = base::Seconds(1);
FillDemuxerStream(kStreamLength);
demuxer_stream_->QueueReadResult(
TestDemuxerStream::ReadResult(DecoderBuffer::CreateEOSBuffer()));
@@ -993,7 +992,7 @@ TEST_P(FuchsiaAudioRendererTest, SetVolumeBeforeInitialize) {
// only after CreateStreamSink(). See crbug.com/1219147 .
TEST_P(FuchsiaAudioRendererTest, PlaybackBeforeSinkCreation) {
CreateTestDemuxerStream();
- const auto kStreamLength = base::TimeDelta::FromMilliseconds(100);
+ const auto kStreamLength = base::Milliseconds(100);
FillDemuxerStream(kStreamLength);
demuxer_stream_->QueueReadResult(
TestDemuxerStream::ReadResult(DecoderBuffer::CreateEOSBuffer()));
@@ -1018,8 +1017,8 @@ void FuchsiaAudioRendererTest::TestPcmStream(
size_t bytes_per_sample_input,
fuchsia::media::AudioSampleFormat fuchsia_sample_format,
size_t bytes_per_sample_output) {
- AudioDecoderConfig config(kCodecPCM, sample_format, CHANNEL_LAYOUT_STEREO,
- kDefaultSampleRate, {},
+ AudioDecoderConfig config(AudioCodec::kPCM, sample_format,
+ CHANNEL_LAYOUT_STEREO, kDefaultSampleRate, {},
EncryptionScheme::kUnencrypted);
demuxer_stream_ = std::make_unique<TestDemuxerStream>(config);
diff --git a/chromium/media/fuchsia/cdm/BUILD.gn b/chromium/media/fuchsia/cdm/BUILD.gn
index d37121f101e..a1e134a4f99 100644
--- a/chromium/media/fuchsia/cdm/BUILD.gn
+++ b/chromium/media/fuchsia/cdm/BUILD.gn
@@ -18,12 +18,14 @@ source_set("cdm") {
"fuchsia_stream_decryptor.h",
]
- public_deps = [ "//third_party/fuchsia-sdk/sdk/fidl/fuchsia.media.drm" ]
+ public_deps = [
+ "//base",
+ "//third_party/fuchsia-sdk/sdk/fidl/fuchsia.media.drm",
+ ]
configs += [ "//media:subcomponent_config" ]
deps = [
- "//fuchsia/base",
"//media/base",
"//media/cdm",
"//media/fuchsia/common",
diff --git a/chromium/media/fuchsia/cdm/DEPS b/chromium/media/fuchsia/cdm/DEPS
index 81899398d3a..85df62d1627 100644
--- a/chromium/media/fuchsia/cdm/DEPS
+++ b/chromium/media/fuchsia/cdm/DEPS
@@ -1,5 +1,4 @@
include_rules = [
- "+fuchsia/base",
"+mojo/public",
"+third_party/blink/public/common",
]
diff --git a/chromium/media/fuchsia/cdm/client/BUILD.gn b/chromium/media/fuchsia/cdm/client/BUILD.gn
index fb7bd286e8f..811750f9bdf 100644
--- a/chromium/media/fuchsia/cdm/client/BUILD.gn
+++ b/chromium/media/fuchsia/cdm/client/BUILD.gn
@@ -14,7 +14,7 @@ source_set("client") {
deps = [
"//media",
- "//media/fuchsia/mojom",
+ "//media/fuchsia/mojom:cdm_provider",
"//third_party/blink/public:blink_headers",
]
}
diff --git a/chromium/media/fuchsia/cdm/client/mojo_fuchsia_cdm_provider.cc b/chromium/media/fuchsia/cdm/client/mojo_fuchsia_cdm_provider.cc
index 60c713e3d66..a0c47ddcf9d 100644
--- a/chromium/media/fuchsia/cdm/client/mojo_fuchsia_cdm_provider.cc
+++ b/chromium/media/fuchsia/cdm/client/mojo_fuchsia_cdm_provider.cc
@@ -20,12 +20,11 @@ void MojoFuchsiaCdmProvider::CreateCdmInterface(
const std::string& key_system,
fidl::InterfaceRequest<fuchsia::media::drm::ContentDecryptionModule>
cdm_request) {
- if (!media_resource_provider_) {
- interface_broker_->GetInterface(
- media_resource_provider_.BindNewPipeAndPassReceiver());
+ if (!cdm_provider_) {
+ interface_broker_->GetInterface(cdm_provider_.BindNewPipeAndPassReceiver());
}
- media_resource_provider_->CreateCdm(key_system, std::move(cdm_request));
+ cdm_provider_->CreateCdm(key_system, std::move(cdm_request));
}
} // namespace media
diff --git a/chromium/media/fuchsia/cdm/client/mojo_fuchsia_cdm_provider.h b/chromium/media/fuchsia/cdm/client/mojo_fuchsia_cdm_provider.h
index d4261d21e03..4e63198d936 100644
--- a/chromium/media/fuchsia/cdm/client/mojo_fuchsia_cdm_provider.h
+++ b/chromium/media/fuchsia/cdm/client/mojo_fuchsia_cdm_provider.h
@@ -7,7 +7,7 @@
#include "base/macros.h"
#include "media/fuchsia/cdm/fuchsia_cdm_provider.h"
-#include "media/fuchsia/mojom/fuchsia_media_resource_provider.mojom.h"
+#include "media/fuchsia/mojom/fuchsia_cdm_provider.mojom.h"
#include "mojo/public/cpp/bindings/remote.h"
namespace blink {
@@ -21,6 +21,10 @@ class MojoFuchsiaCdmProvider : public FuchsiaCdmProvider {
// |interface_broker| must outlive this class.
explicit MojoFuchsiaCdmProvider(
blink::BrowserInterfaceBrokerProxy* interface_broker);
+
+ MojoFuchsiaCdmProvider(const MojoFuchsiaCdmProvider&) = delete;
+ MojoFuchsiaCdmProvider& operator=(const MojoFuchsiaCdmProvider&) = delete;
+
~MojoFuchsiaCdmProvider() override;
// FuchsiaCdmProvider implementation:
@@ -31,10 +35,7 @@ class MojoFuchsiaCdmProvider : public FuchsiaCdmProvider {
private:
blink::BrowserInterfaceBrokerProxy* const interface_broker_;
- mojo::Remote<media::mojom::FuchsiaMediaResourceProvider>
- media_resource_provider_;
-
- DISALLOW_COPY_AND_ASSIGN(MojoFuchsiaCdmProvider);
+ mojo::Remote<media::mojom::FuchsiaCdmProvider> cdm_provider_;
};
} // namespace media
diff --git a/chromium/media/fuchsia/cdm/fuchsia_cdm.cc b/chromium/media/fuchsia/cdm/fuchsia_cdm.cc
index 17189af8bbe..96ecffd2104 100644
--- a/chromium/media/fuchsia/cdm/fuchsia_cdm.cc
+++ b/chromium/media/fuchsia/cdm/fuchsia_cdm.cc
@@ -5,8 +5,8 @@
#include "media/fuchsia/cdm/fuchsia_cdm.h"
#include "base/fuchsia/fuchsia_logging.h"
+#include "base/fuchsia/mem_buffer_util.h"
#include "base/logging.h"
-#include "fuchsia/base/mem_buffer_util.h"
#include "media/base/callback_registry.h"
#include "media/base/cdm_promise.h"
#include "third_party/abseil-cpp/absl/types/optional.h"
@@ -47,7 +47,7 @@ fuchsia::media::drm::LicenseInitData CreateLicenseInitData(
fuchsia::media::drm::LicenseServerMessage CreateLicenseServerMessage(
const std::vector<uint8_t>& response) {
fuchsia::media::drm::LicenseServerMessage message;
- message.message = cr_fuchsia::MemBufferFromString(
+ message.message = base::MemBufferFromString(
base::StringPiece(reinterpret_cast<const char*>(response.data()),
response.size()),
"cr-drm-license-server-message");
@@ -142,6 +142,9 @@ class FuchsiaCdm::CdmSession {
fit::bind_member(this, &CdmSession::OnSessionError));
}
+ CdmSession(const CdmSession&) = delete;
+ CdmSession& operator=(const CdmSession&) = delete;
+
~CdmSession() {
if (!session_id_.empty()) {
session_callbacks_->closed_cb.Run(session_id_,
@@ -204,18 +207,17 @@ class FuchsiaCdm::CdmSession {
void OnLicenseMessageGenerated(fuchsia::media::drm::LicenseMessage message) {
DCHECK(!session_id_.empty());
- std::string session_msg;
- bool msg_available =
- cr_fuchsia::StringFromMemBuffer(message.message, &session_msg);
+ absl::optional<std::string> session_msg =
+ base::StringFromMemBuffer(message.message);
- if (!msg_available) {
+ if (!session_msg) {
LOG(ERROR) << "Failed to generate message for session " << session_id_;
return;
}
session_callbacks_->message_cb.Run(
session_id_, ToCdmMessageType(message.type),
- std::vector<uint8_t>(session_msg.begin(), session_msg.end()));
+ std::vector<uint8_t>(session_msg->begin(), session_msg->end()));
}
void OnKeyStatesChanged(
@@ -273,8 +275,6 @@ class FuchsiaCdm::CdmSession {
// `GenerateLicenseRelease` has been called and the session is waiting for
// license release response from server.
bool pending_release_ = false;
-
- DISALLOW_COPY_AND_ASSIGN(CdmSession);
};
FuchsiaCdm::SessionCallbacks::SessionCallbacks() = default;
diff --git a/chromium/media/fuchsia/cdm/fuchsia_cdm.h b/chromium/media/fuchsia/cdm/fuchsia_cdm.h
index f1f058ba580..91c26642b76 100644
--- a/chromium/media/fuchsia/cdm/fuchsia_cdm.h
+++ b/chromium/media/fuchsia/cdm/fuchsia_cdm.h
@@ -27,6 +27,10 @@ class FuchsiaCdm : public ContentDecryptionModule,
struct SessionCallbacks {
SessionCallbacks();
SessionCallbacks(SessionCallbacks&&);
+
+ SessionCallbacks(const SessionCallbacks&) = delete;
+ SessionCallbacks& operator=(const SessionCallbacks&) = delete;
+
~SessionCallbacks();
SessionCallbacks& operator=(SessionCallbacks&&);
@@ -35,8 +39,6 @@ class FuchsiaCdm : public ContentDecryptionModule,
SessionClosedCB closed_cb;
SessionKeysChangeCB keys_change_cb;
SessionExpirationUpdateCB expiration_update_cb;
-
- DISALLOW_COPY_AND_ASSIGN(SessionCallbacks);
};
using ReadyCB = base::OnceCallback<void(bool, const std::string&)>;
diff --git a/chromium/media/fuchsia/cdm/fuchsia_cdm_factory.h b/chromium/media/fuchsia/cdm/fuchsia_cdm_factory.h
index 54c5922af79..d643277a968 100644
--- a/chromium/media/fuchsia/cdm/fuchsia_cdm_factory.h
+++ b/chromium/media/fuchsia/cdm/fuchsia_cdm_factory.h
@@ -23,6 +23,10 @@ class MEDIA_EXPORT FuchsiaCdmFactory final : public CdmFactory {
public:
// |interface_provider| must outlive this class.
explicit FuchsiaCdmFactory(std::unique_ptr<FuchsiaCdmProvider> provider);
+
+ FuchsiaCdmFactory(const FuchsiaCdmFactory&) = delete;
+ FuchsiaCdmFactory& operator=(const FuchsiaCdmFactory&) = delete;
+
~FuchsiaCdmFactory() override;
// CdmFactory implementation.
@@ -48,8 +52,6 @@ class MEDIA_EXPORT FuchsiaCdmFactory final : public CdmFactory {
pending_cdms_;
base::WeakPtrFactory<FuchsiaCdmFactory> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(FuchsiaCdmFactory);
};
} // namespace media
diff --git a/chromium/media/fuchsia/cdm/fuchsia_decryptor.h b/chromium/media/fuchsia/cdm/fuchsia_decryptor.h
index 006b079e094..74c14745412 100644
--- a/chromium/media/fuchsia/cdm/fuchsia_decryptor.h
+++ b/chromium/media/fuchsia/cdm/fuchsia_decryptor.h
@@ -20,6 +20,10 @@ class FuchsiaDecryptor : public Decryptor {
public:
// Caller should make sure |cdm| lives longer than this class.
explicit FuchsiaDecryptor(FuchsiaCdmContext* cdm_context);
+
+ FuchsiaDecryptor(const FuchsiaDecryptor&) = delete;
+ FuchsiaDecryptor& operator=(const FuchsiaDecryptor&) = delete;
+
~FuchsiaDecryptor() override;
// media::Decryptor implementation:
@@ -44,8 +48,6 @@ class FuchsiaDecryptor : public Decryptor {
// TaskRunner for the thread on which |audio_decryptor_| was created.
scoped_refptr<base::SingleThreadTaskRunner> audio_decryptor_task_runner_;
-
- DISALLOW_COPY_AND_ASSIGN(FuchsiaDecryptor);
};
} // namespace media
diff --git a/chromium/media/fuchsia/cdm/service/BUILD.gn b/chromium/media/fuchsia/cdm/service/BUILD.gn
index 7cd4ac5c23b..e3169ffabe7 100644
--- a/chromium/media/fuchsia/cdm/service/BUILD.gn
+++ b/chromium/media/fuchsia/cdm/service/BUILD.gn
@@ -13,12 +13,12 @@ source_set("service") {
]
public_deps = [
+ "//base",
"//media",
"//third_party/fuchsia-sdk/sdk/fidl/fuchsia.media.drm",
]
deps = [
- "//fuchsia/base",
"//media/fuchsia/mojom",
"//third_party/fuchsia-sdk/sdk/pkg/fit-promise",
"//url",
@@ -32,7 +32,6 @@ source_set("unittests") {
":service",
"//base",
"//base/test:test_support",
- "//fuchsia/base",
"//media",
"//testing/gmock",
"//testing/gtest",
diff --git a/chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager_unittest.cc b/chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager_unittest.cc
index 346b2c3ad64..a47ff82ee30 100644
--- a/chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager_unittest.cc
+++ b/chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager_unittest.cc
@@ -326,7 +326,7 @@ TEST_F(FuchsiaCdmManagerTest, CdmDataQuotaBytes) {
kEmptyKeySystemDirectory, 0);
// Sleep to account for coarse-grained filesystem timestamps.
- base::PlatformThread::Sleep(base::TimeDelta::FromSeconds(1));
+ base::PlatformThread::Sleep(base::Seconds(1));
// Create the recently-used directories.
CreateDummyCdmDirectory(temp_path, kOriginDirectory1, kKeySystemDirectory2,
@@ -389,7 +389,7 @@ TEST_F(FuchsiaCdmManagerTest, EmptyOriginDirectory) {
kKeySystemDirectory2, kTestQuotaBytes / 2);
// Sleep to account for coarse-grained filesystem timestamps.
- base::PlatformThread::Sleep(base::TimeDelta::FromSeconds(1));
+ base::PlatformThread::Sleep(base::Seconds(1));
// Create dummy data for a recently-used, active origin.
CreateDummyCdmDirectory(temp_path, kActiveOriginDirectory,
diff --git a/chromium/media/fuchsia/cdm/service/provisioning_fetcher_impl.cc b/chromium/media/fuchsia/cdm/service/provisioning_fetcher_impl.cc
index d234d4c8b92..e3608572c37 100644
--- a/chromium/media/fuchsia/cdm/service/provisioning_fetcher_impl.cc
+++ b/chromium/media/fuchsia/cdm/service/provisioning_fetcher_impl.cc
@@ -7,8 +7,8 @@
#include "base/bind.h"
#include "base/callback.h"
#include "base/fuchsia/fuchsia_logging.h"
+#include "base/fuchsia/mem_buffer_util.h"
#include "base/logging.h"
-#include "fuchsia/base/mem_buffer_util.h"
namespace media {
@@ -45,8 +45,9 @@ void ProvisioningFetcherImpl::Fetch(
return;
}
- std::string request_str;
- if (!cr_fuchsia::StringFromMemBuffer(request.message, &request_str)) {
+ absl::optional<std::string> request_str =
+ base::StringFromMemBuffer(request.message);
+ if (!request_str) {
DLOG(WARNING) << "Failed to read ProvisioningRequest.";
OnError(ZX_ERR_INVALID_ARGS);
return;
@@ -64,7 +65,7 @@ void ProvisioningFetcherImpl::Fetch(
retrieve_in_progress_ = true;
fetcher_->Retrieve(
- GURL(request.default_provisioning_server_url.value()), request_str,
+ GURL(request.default_provisioning_server_url.value()), *request_str,
base::BindRepeating(&ProvisioningFetcherImpl::OnRetrieveComplete,
base::Unretained(this),
base::Passed(std::move(callback))));
@@ -80,7 +81,7 @@ void ProvisioningFetcherImpl::OnRetrieveComplete(FetchCallback callback,
fuchsia::media::drm::ProvisioningResponse provision_response;
provision_response.message =
- cr_fuchsia::MemBufferFromString(response, "cr-drm-provision-response");
+ base::MemBufferFromString(response, "cr-drm-provision-response");
callback(std::move(provision_response));
}
diff --git a/chromium/media/fuchsia/cdm/service/provisioning_fetcher_impl_unittest.cc b/chromium/media/fuchsia/cdm/service/provisioning_fetcher_impl_unittest.cc
index a9803b326d6..5e269255e49 100644
--- a/chromium/media/fuchsia/cdm/service/provisioning_fetcher_impl_unittest.cc
+++ b/chromium/media/fuchsia/cdm/service/provisioning_fetcher_impl_unittest.cc
@@ -8,10 +8,10 @@
#include <memory>
#include "base/bind.h"
+#include "base/fuchsia/mem_buffer_util.h"
#include "base/location.h"
#include "base/test/bind.h"
#include "base/test/task_environment.h"
-#include "fuchsia/base/mem_buffer_util.h"
#include "media/fuchsia/cdm/service/mock_provision_fetcher.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -37,8 +37,7 @@ drm::ProvisioningRequest CreateProvisioningRequest(
drm::ProvisioningRequest request;
request.default_provisioning_server_url =
std::move(default_provisioning_server_url);
- request.message =
- cr_fuchsia::MemBufferFromString(message, "provisioning_request");
+ request.message = base::MemBufferFromString(message, "provisioning_request");
return request;
}
@@ -66,13 +65,14 @@ TEST_F(ProvisioningFetcherImplTest, Fetch) {
fetcher.Bind(base::MakeExpectedNotRunClosure(FROM_HERE));
- std::string response_message;
+ absl::optional<std::string> response_message;
fetcher.Fetch(CreateProvisioningRequest(kTestDefaultUrl, kTestRequest),
[&](drm::ProvisioningResponse response) {
- ASSERT_TRUE(cr_fuchsia::StringFromMemBuffer(
- response.message, &response_message));
+ response_message =
+ base::StringFromMemBuffer(response.message);
});
- EXPECT_EQ(response_message, kTestResponse);
+ ASSERT_TRUE(response_message.has_value());
+ EXPECT_EQ(*response_message, kTestResponse);
}
TEST_F(ProvisioningFetcherImplTest, RetrieveFails) {
@@ -89,13 +89,14 @@ TEST_F(ProvisioningFetcherImplTest, RetrieveFails) {
fetcher.Bind(base::MakeExpectedNotRunClosure(FROM_HERE));
- std::string response_message;
+ absl::optional<std::string> response_message;
fetcher.Fetch(CreateProvisioningRequest(kTestDefaultUrl, kTestRequest),
[&](drm::ProvisioningResponse response) {
- ASSERT_TRUE(cr_fuchsia::StringFromMemBuffer(
- response.message, &response_message));
+ response_message =
+ base::StringFromMemBuffer(response.message);
});
- EXPECT_TRUE(response_message.empty());
+ ASSERT_TRUE(response_message.has_value());
+ EXPECT_TRUE(response_message->empty());
}
TEST_F(ProvisioningFetcherImplTest, NoDefaultProvisioningUrl) {
diff --git a/chromium/media/fuchsia/common/stream_processor_helper.cc b/chromium/media/fuchsia/common/stream_processor_helper.cc
index 0ca556a7ac3..b4f71511284 100644
--- a/chromium/media/fuchsia/common/stream_processor_helper.cc
+++ b/chromium/media/fuchsia/common/stream_processor_helper.cc
@@ -266,7 +266,7 @@ void StreamProcessorHelper::OnOutputPacket(fuchsia::media::Packet output_packet,
auto packet_index = output_packet.header().packet_index();
base::TimeDelta timestamp =
output_packet.has_timestamp_ish()
- ? base::TimeDelta::FromNanoseconds(output_packet.timestamp_ish())
+ ? base::Nanoseconds(output_packet.timestamp_ish())
: kNoTimestamp;
client_->OnStreamProcessorOutputPacket(IoPacket(
diff --git a/chromium/media/fuchsia/common/stream_processor_helper.h b/chromium/media/fuchsia/common/stream_processor_helper.h
index 1ada861ed2d..9d473612059 100644
--- a/chromium/media/fuchsia/common/stream_processor_helper.h
+++ b/chromium/media/fuchsia/common/stream_processor_helper.h
@@ -34,6 +34,10 @@ class MEDIA_EXPORT StreamProcessorHelper {
base::TimeDelta timestamp,
bool unit_end,
base::OnceClosure destroy_cb);
+
+ IoPacket(const IoPacket&) = delete;
+ IoPacket& operator=(const IoPacket&) = delete;
+
~IoPacket();
IoPacket(IoPacket&&);
@@ -60,8 +64,6 @@ class MEDIA_EXPORT StreamProcessorHelper {
bool unit_end_;
fuchsia::media::FormatDetails format_;
std::forward_list<base::OnceClosure> destroy_callbacks_;
-
- DISALLOW_COPY_AND_ASSIGN(IoPacket);
};
class Client {
diff --git a/chromium/media/fuchsia/common/sysmem_client.h b/chromium/media/fuchsia/common/sysmem_client.h
index 1b0b1b16e1d..e8b784f6b17 100644
--- a/chromium/media/fuchsia/common/sysmem_client.h
+++ b/chromium/media/fuchsia/common/sysmem_client.h
@@ -99,10 +99,6 @@ class MEDIA_EXPORT SysmemAllocatorClient {
std::unique_ptr<SysmemCollectionClient> BindSharedCollection(
fuchsia::sysmem::BufferCollectionTokenPtr token);
- // TODO(crbug.com/1131183): Update FuchsiaVideoDecoder to use
- // SysmemCollectionClient and remove this function.
- fuchsia::sysmem::Allocator* raw() { return allocator_.get(); }
-
private:
friend SysmemCollectionClient;
diff --git a/chromium/media/fuchsia/mojom/BUILD.gn b/chromium/media/fuchsia/mojom/BUILD.gn
index 5d5a38d5d45..c4e78387148 100644
--- a/chromium/media/fuchsia/mojom/BUILD.gn
+++ b/chromium/media/fuchsia/mojom/BUILD.gn
@@ -14,11 +14,6 @@ mojom("mojom") {
shared_cpp_typemaps = {
types = [
{
- mojom = "media.mojom.CdmRequest"
- cpp = "::fidl::InterfaceRequest<::fuchsia::media::drm::ContentDecryptionModule>"
- move_only = true
- },
- {
mojom = "media.mojom.AudioConsumerRequest"
cpp = "::fidl::InterfaceRequest<::fuchsia::media::AudioConsumer>"
move_only = true
@@ -33,6 +28,31 @@ mojom("mojom") {
traits_public_deps = [
"//fuchsia/mojom:traits",
"//third_party/fuchsia-sdk/sdk/fidl/fuchsia.media",
+ ]
+ }
+
+ cpp_typemaps = [ shared_cpp_typemaps ]
+ blink_cpp_typemaps = [ shared_cpp_typemaps ]
+}
+
+mojom("cdm_provider") {
+ sources = [ "fuchsia_cdm_provider.mojom" ]
+
+ export_class_attribute_blink = "BLINK_PLATFORM_EXPORT"
+ export_define_blink = "BLINK_PLATFORM_IMPLEMENTATION=1"
+ export_header_blink = "third_party/blink/public/platform/web_common.h"
+
+ shared_cpp_typemaps = {
+ types = [
+ {
+ mojom = "media.mojom.CdmRequest"
+ cpp = "::fidl::InterfaceRequest<::fuchsia::media::drm::ContentDecryptionModule>"
+ move_only = true
+ },
+ ]
+ traits_headers = [ "fuchsia_cdm_provider_mojom_traits.h" ]
+ traits_public_deps = [
+ "//fuchsia/mojom:traits",
"//third_party/fuchsia-sdk/sdk/fidl/fuchsia.media.drm",
]
}
diff --git a/chromium/media/fuchsia/mojom/fuchsia_cdm_provider.mojom b/chromium/media/fuchsia/mojom/fuchsia_cdm_provider.mojom
new file mode 100644
index 00000000000..ac6eadde75b
--- /dev/null
+++ b/chromium/media/fuchsia/mojom/fuchsia_cdm_provider.mojom
@@ -0,0 +1,20 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+module media.mojom;
+
+// Mojo struct for
+// fidl::InterfaceRequest<fuchsia::media::drm::ContentDecryptionModule>.
+struct CdmRequest {
+ handle<platform> request;
+};
+
+// Interface used by the render to connect
+// fuchsia::media::drm::ContentDecryptionModule. Instances are document-scoped.
+interface FuchsiaCdmProvider {
+ // Create connection to fuchsia::media::drm::ContentDecryptionModule for
+ // |key_system|. Implementation should make sure the persistent storage is
+ // isolated per web origin.
+ CreateCdm(string key_system, CdmRequest cdm_request);
+};
diff --git a/chromium/media/fuchsia/mojom/fuchsia_cdm_provider_mojom_traits.h b/chromium/media/fuchsia/mojom/fuchsia_cdm_provider_mojom_traits.h
new file mode 100644
index 00000000000..dd44fa59d25
--- /dev/null
+++ b/chromium/media/fuchsia/mojom/fuchsia_cdm_provider_mojom_traits.h
@@ -0,0 +1,24 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_FUCHSIA_MOJOM_FUCHSIA_CDM_PROVIDER_MOJOM_TRAITS_H_
+#define MEDIA_FUCHSIA_MOJOM_FUCHSIA_CDM_PROVIDER_MOJOM_TRAITS_H_
+
+#include <fuchsia/media/drm/cpp/fidl.h>
+
+#include "fuchsia/mojom/fidl_interface_request_mojom_traits.h"
+
+namespace mojo {
+
+template <>
+struct StructTraits<
+ media::mojom::CdmRequestDataView,
+ fidl::InterfaceRequest<fuchsia::media::drm::ContentDecryptionModule>>
+ : public FidlInterfaceRequestStructTraits<
+ media::mojom::CdmRequestDataView,
+ fuchsia::media::drm::ContentDecryptionModule> {};
+
+} // namespace mojo
+
+#endif // MEDIA_FUCHSIA_MOJOM_FUCHSIA_CDM_PROVIDER_MOJOM_TRAITS_H_
diff --git a/chromium/media/fuchsia/mojom/fuchsia_media_resource_provider.mojom b/chromium/media/fuchsia/mojom/fuchsia_media_resource_provider.mojom
index 68a658abd6a..596727bf901 100644
--- a/chromium/media/fuchsia/mojom/fuchsia_media_resource_provider.mojom
+++ b/chromium/media/fuchsia/mojom/fuchsia_media_resource_provider.mojom
@@ -4,12 +4,6 @@
module media.mojom;
-// Mojo struct for
-// fidl::InterfaceRequest<fuchsia::media::drm::ContentDecryptionModule>.
-struct CdmRequest {
- handle<platform> request;
-};
-
// Mojo struct for fidl::InterfaceRequest<fuchsia::media::AudioConsumer>.
struct AudioConsumerRequest {
handle<platform> request;
@@ -23,11 +17,6 @@ struct AudioCapturerRequest {
// Interface used by the render to create media resources. Instances are
// document-scoped.
interface FuchsiaMediaResourceProvider {
- // Create connection to fuchsia::media::drm::ContentDecryptionModule for
- // |key_system|. Implementation should make sure the persistent storage is
- // isolated per web origin.
- CreateCdm(string key_system, CdmRequest cdm_request);
-
// Creates a fuchsia.media.AudioConsumer for the current frame.
CreateAudioConsumer(AudioConsumerRequest request);
diff --git a/chromium/media/fuchsia/mojom/fuchsia_media_resource_provider_mojom_traits.h b/chromium/media/fuchsia/mojom/fuchsia_media_resource_provider_mojom_traits.h
index ef2ddd5ecd5..29aa82a5049 100644
--- a/chromium/media/fuchsia/mojom/fuchsia_media_resource_provider_mojom_traits.h
+++ b/chromium/media/fuchsia/mojom/fuchsia_media_resource_provider_mojom_traits.h
@@ -5,21 +5,13 @@
#ifndef MEDIA_FUCHSIA_MOJOM_FUCHSIA_MEDIA_RESOURCE_PROVIDER_MOJOM_TRAITS_H_
#define MEDIA_FUCHSIA_MOJOM_FUCHSIA_MEDIA_RESOURCE_PROVIDER_MOJOM_TRAITS_H_
-#include <fuchsia/media/drm/cpp/fidl.h>
+#include <fuchsia/media/cpp/fidl.h>
#include "fuchsia/mojom/fidl_interface_request_mojom_traits.h"
namespace mojo {
template <>
-struct StructTraits<
- media::mojom::CdmRequestDataView,
- fidl::InterfaceRequest<fuchsia::media::drm::ContentDecryptionModule>>
- : public FidlInterfaceRequestStructTraits<
- media::mojom::CdmRequestDataView,
- fuchsia::media::drm::ContentDecryptionModule> {};
-
-template <>
struct StructTraits<media::mojom::AudioConsumerRequestDataView,
fidl::InterfaceRequest<fuchsia::media::AudioConsumer>>
: public FidlInterfaceRequestStructTraits<
diff --git a/chromium/media/gpu/BUILD.gn b/chromium/media/gpu/BUILD.gn
index c7499485ac0..3fedc78948f 100644
--- a/chromium/media/gpu/BUILD.gn
+++ b/chromium/media/gpu/BUILD.gn
@@ -20,6 +20,7 @@ buildflag_header("buildflags") {
"USE_VAAPI_IMAGE_CODECS=$use_vaapi_image_codecs",
"USE_V4L2_CODEC=$use_v4l2_codec",
"USE_LIBV4L2=$use_v4lplugin",
+ "USE_VAAPI_X11=$use_vaapi_x11",
]
}
@@ -215,9 +216,7 @@ component("gpu") {
"windows/supported_profile_helpers.cc",
"windows/supported_profile_helpers.h",
]
- configs += [
- "//third_party/khronos:khronos_headers",
- ]
+ configs += [ "//third_party/khronos:khronos_headers" ]
public_deps += [ "//media/base/win:media_foundation_util" ]
deps += [
"//gpu/ipc/common:common",
diff --git a/chromium/media/gpu/accelerated_video_decoder.h b/chromium/media/gpu/accelerated_video_decoder.h
index 135e4b0bfa6..859bb49c69b 100644
--- a/chromium/media/gpu/accelerated_video_decoder.h
+++ b/chromium/media/gpu/accelerated_video_decoder.h
@@ -24,6 +24,10 @@ namespace media {
class MEDIA_GPU_EXPORT AcceleratedVideoDecoder {
public:
AcceleratedVideoDecoder() {}
+
+ AcceleratedVideoDecoder(const AcceleratedVideoDecoder&) = delete;
+ AcceleratedVideoDecoder& operator=(const AcceleratedVideoDecoder&) = delete;
+
virtual ~AcceleratedVideoDecoder() {}
// Set the buffer owned by |decoder_buffer| as the current source of encoded
@@ -87,9 +91,6 @@ class MEDIA_GPU_EXPORT AcceleratedVideoDecoder {
// The number is the sweet spot which the decoder can tolerate to handle the
// missing keyframe by itself. In addition, this situation is exceptional.
static constexpr size_t kVPxMaxNumOfSizeChangeFailures = 75;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AcceleratedVideoDecoder);
};
} // namespace media
diff --git a/chromium/media/gpu/android/android_video_encode_accelerator.cc b/chromium/media/gpu/android/android_video_encode_accelerator.cc
index 008ab0bf43d..8ed24c31925 100644
--- a/chromium/media/gpu/android/android_video_encode_accelerator.cc
+++ b/chromium/media/gpu/android/android_video_encode_accelerator.cc
@@ -70,11 +70,11 @@ static inline const base::TimeDelta EncodePollDelay() {
// pictures have been fed to saturate any internal buffering). This is
// speculative and it's unclear that this would be a win (nor that there's a
// reasonably device-agnostic way to fill in the "believes" above).
- return base::TimeDelta::FromMilliseconds(10);
+ return base::Milliseconds(10);
}
static inline const base::TimeDelta NoWaitTimeOut() {
- return base::TimeDelta::FromMicroseconds(0);
+ return base::Microseconds(0);
}
static bool GetSupportedColorFormatForMime(const std::string& mime,
@@ -107,16 +107,16 @@ AndroidVideoEncodeAccelerator::GetSupportedProfiles() {
const struct {
const VideoCodec codec;
const VideoCodecProfile profile;
- } kSupportedCodecs[] = {{kCodecVP8, VP8PROFILE_ANY},
- {kCodecH264, H264PROFILE_BASELINE}};
+ } kSupportedCodecs[] = {{VideoCodec::kVP8, VP8PROFILE_ANY},
+ {VideoCodec::kH264, H264PROFILE_BASELINE}};
for (const auto& supported_codec : kSupportedCodecs) {
- if (supported_codec.codec == kCodecVP8 &&
+ if (supported_codec.codec == VideoCodec::kVP8 &&
!MediaCodecUtil::IsVp8EncoderAvailable()) {
continue;
}
- if (supported_codec.codec == kCodecH264 &&
+ if (supported_codec.codec == VideoCodec::kH264 &&
!MediaCodecUtil::IsH264EncoderAvailable()) {
continue;
}
@@ -161,13 +161,13 @@ bool AndroidVideoEncodeAccelerator::Initialize(const Config& config,
uint32_t frame_input_count;
uint32_t i_frame_interval;
if (config.output_profile == VP8PROFILE_ANY) {
- codec = kCodecVP8;
+ codec = VideoCodec::kVP8;
mime_type = "video/x-vnd.on2.vp8";
frame_input_count = 1;
i_frame_interval = IFRAME_INTERVAL_VPX;
} else if (config.output_profile == H264PROFILE_BASELINE ||
config.output_profile == H264PROFILE_MAIN) {
- codec = kCodecH264;
+ codec = VideoCodec::kH264;
mime_type = "video/avc";
frame_input_count = 30;
i_frame_interval = IFRAME_INTERVAL_H264;
@@ -369,7 +369,7 @@ void AndroidVideoEncodeAccelerator::QueueInput() {
// mapping to the generated |presentation_timestamp_|, and will read them out
// after encoding. Then encoder can work happily always and we can preserve
// the timestamps in captured frames for other purpose.
- presentation_timestamp_ += base::TimeDelta::FromMicroseconds(
+ presentation_timestamp_ += base::Microseconds(
base::Time::kMicrosecondsPerSecond / INITIAL_FRAMERATE);
DCHECK(frame_timestamp_map_.find(presentation_timestamp_) ==
frame_timestamp_map_.end());
diff --git a/chromium/media/gpu/android/android_video_encode_accelerator.h b/chromium/media/gpu/android/android_video_encode_accelerator.h
index a47fdde1ba0..71e63c833d0 100644
--- a/chromium/media/gpu/android/android_video_encode_accelerator.h
+++ b/chromium/media/gpu/android/android_video_encode_accelerator.h
@@ -37,6 +37,11 @@ class MEDIA_GPU_EXPORT AndroidVideoEncodeAccelerator
: public VideoEncodeAccelerator {
public:
AndroidVideoEncodeAccelerator();
+
+ AndroidVideoEncodeAccelerator(const AndroidVideoEncodeAccelerator&) = delete;
+ AndroidVideoEncodeAccelerator& operator=(
+ const AndroidVideoEncodeAccelerator&) = delete;
+
~AndroidVideoEncodeAccelerator() override;
// VideoEncodeAccelerator implementation.
@@ -107,8 +112,6 @@ class MEDIA_GPU_EXPORT AndroidVideoEncodeAccelerator
// True if there is encoder error.
bool error_occurred_;
-
- DISALLOW_COPY_AND_ASSIGN(AndroidVideoEncodeAccelerator);
};
} // namespace media
diff --git a/chromium/media/gpu/android/android_video_surface_chooser.h b/chromium/media/gpu/android/android_video_surface_chooser.h
index b5e7f319d7c..0c6364100bf 100644
--- a/chromium/media/gpu/android/android_video_surface_chooser.h
+++ b/chromium/media/gpu/android/android_video_surface_chooser.h
@@ -72,6 +72,11 @@ class MEDIA_GPU_EXPORT AndroidVideoSurfaceChooser {
using UseTextureOwnerCB = base::RepeatingCallback<void(void)>;
AndroidVideoSurfaceChooser() {}
+
+ AndroidVideoSurfaceChooser(const AndroidVideoSurfaceChooser&) = delete;
+ AndroidVideoSurfaceChooser& operator=(const AndroidVideoSurfaceChooser&) =
+ delete;
+
virtual ~AndroidVideoSurfaceChooser() {}
// Sets the client callbacks to be called when a new surface choice is made.
@@ -84,9 +89,6 @@ class MEDIA_GPU_EXPORT AndroidVideoSurfaceChooser {
// the factory is updated to |*new_factory|.
virtual void UpdateState(absl::optional<AndroidOverlayFactoryCB> new_factory,
const State& new_state) = 0;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AndroidVideoSurfaceChooser);
};
} // namespace media
diff --git a/chromium/media/gpu/android/android_video_surface_chooser_impl.cc b/chromium/media/gpu/android/android_video_surface_chooser_impl.cc
index 4b87ad93b29..110fad5bbc5 100644
--- a/chromium/media/gpu/android/android_video_surface_chooser_impl.cc
+++ b/chromium/media/gpu/android/android_video_surface_chooser_impl.cc
@@ -12,8 +12,7 @@ namespace media {
// Minimum time that we require after a failed overlay attempt before we'll try
// again for an overlay.
-constexpr base::TimeDelta MinimumDelayAfterFailedOverlay =
- base::TimeDelta::FromSeconds(5);
+constexpr base::TimeDelta MinimumDelayAfterFailedOverlay = base::Seconds(5);
AndroidVideoSurfaceChooserImpl::AndroidVideoSurfaceChooserImpl(
bool allow_dynamic,
diff --git a/chromium/media/gpu/android/android_video_surface_chooser_impl.h b/chromium/media/gpu/android/android_video_surface_chooser_impl.h
index a23aad8cd51..d1fa138a004 100644
--- a/chromium/media/gpu/android/android_video_surface_chooser_impl.h
+++ b/chromium/media/gpu/android/android_video_surface_chooser_impl.h
@@ -26,6 +26,12 @@ class MEDIA_GPU_EXPORT AndroidVideoSurfaceChooserImpl
// provided, then it must outlast |this|.
AndroidVideoSurfaceChooserImpl(bool allow_dynamic,
const base::TickClock* tick_clock = nullptr);
+
+ AndroidVideoSurfaceChooserImpl(const AndroidVideoSurfaceChooserImpl&) =
+ delete;
+ AndroidVideoSurfaceChooserImpl& operator=(
+ const AndroidVideoSurfaceChooserImpl&) = delete;
+
~AndroidVideoSurfaceChooserImpl() override;
// AndroidVideoSurfaceChooser
@@ -90,8 +96,6 @@ class MEDIA_GPU_EXPORT AndroidVideoSurfaceChooserImpl
base::TimeTicks most_recent_overlay_failure_;
base::WeakPtrFactory<AndroidVideoSurfaceChooserImpl> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(AndroidVideoSurfaceChooserImpl);
};
} // namespace media
diff --git a/chromium/media/gpu/android/android_video_surface_chooser_impl_unittest.cc b/chromium/media/gpu/android/android_video_surface_chooser_impl_unittest.cc
index 920988c54c5..47d814a3d42 100644
--- a/chromium/media/gpu/android/android_video_surface_chooser_impl_unittest.cc
+++ b/chromium/media/gpu/android/android_video_surface_chooser_impl_unittest.cc
@@ -109,7 +109,7 @@ class AndroidVideoSurfaceChooserImplTest
overlay_ = std::make_unique<MockAndroidOverlay>();
// Advance the clock just so we're not at 0.
- tick_clock_.Advance(base::TimeDelta::FromSeconds(10));
+ tick_clock_.Advance(base::Seconds(10));
// Don't prevent promotions because of the compositor.
chooser_state_.is_compositor_promotable = true;
@@ -258,7 +258,7 @@ TEST_F(AndroidVideoSurfaceChooserImplTest,
testing::Mock::VerifyAndClearExpectations(this);
// Try to get it to choose again, which shouldn't do anything.
- tick_clock_.Advance(base::TimeDelta::FromSeconds(2));
+ tick_clock_.Advance(base::Seconds(2));
EXPECT_CALL(*this, MockOnOverlayCreated()).Times(0);
chooser_->UpdateState(FactoryFor(nullptr), chooser_state_);
testing::Mock::VerifyAndClearExpectations(&client_);
@@ -266,7 +266,7 @@ TEST_F(AndroidVideoSurfaceChooserImplTest,
// Advance some more and try again. This time, it should request an overlay
// from the factory.
- tick_clock_.Advance(base::TimeDelta::FromSeconds(100));
+ tick_clock_.Advance(base::Seconds(100));
EXPECT_CALL(*this, MockOnOverlayCreated()).Times(1);
chooser_->UpdateState(FactoryFor(nullptr), chooser_state_);
testing::Mock::VerifyAndClearExpectations(&client_);
diff --git a/chromium/media/gpu/android/codec_allocator.cc b/chromium/media/gpu/android/codec_allocator.cc
index 6e2547d96b4..dc23a57b6df 100644
--- a/chromium/media/gpu/android/codec_allocator.cc
+++ b/chromium/media/gpu/android/codec_allocator.cc
@@ -193,8 +193,7 @@ bool CodecAllocator::IsPrimaryTaskRunnerLikelyHung() const {
// typically take 100-200ms on a N5, so 800ms is expected to very rarely
// result in false positives. Also, false positives have low impact because we
// resume using the thread when the task completes.
- constexpr base::TimeDelta kHungTaskDetectionTimeout =
- base::TimeDelta::FromMilliseconds(800);
+ constexpr base::TimeDelta kHungTaskDetectionTimeout = base::Milliseconds(800);
return !pending_operations_.empty() &&
tick_clock_->NowTicks() - *pending_operations_.begin() >
diff --git a/chromium/media/gpu/android/codec_allocator_unittest.cc b/chromium/media/gpu/android/codec_allocator_unittest.cc
index 09a02a325c1..451b4df1df0 100644
--- a/chromium/media/gpu/android/codec_allocator_unittest.cc
+++ b/chromium/media/gpu/android/codec_allocator_unittest.cc
@@ -33,13 +33,16 @@ class CodecAllocatorTest : public testing::Test {
public:
CodecAllocatorTest() : allocator_thread_("AllocatorThread") {
// Don't start the clock at null.
- tick_clock_.Advance(base::TimeDelta::FromSeconds(1));
+ tick_clock_.Advance(base::Seconds(1));
allocator_ = new CodecAllocator(
base::BindRepeating(&MockMediaCodecBridge::CreateVideoDecoder),
base::SequencedTaskRunnerHandle::Get());
allocator_->tick_clock_ = &tick_clock_;
}
+ CodecAllocatorTest(const CodecAllocatorTest&) = delete;
+ CodecAllocatorTest& operator=(const CodecAllocatorTest&) = delete;
+
~CodecAllocatorTest() override {
if (allocator_thread_.IsRunning()) {
// Don't leave any threads hung, or this will hang too. It would be nice
@@ -133,9 +136,6 @@ class CodecAllocatorTest : public testing::Test {
CodecAllocator* allocator_ = nullptr;
std::unique_ptr<MockMediaCodecBridge> last_created_codec_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(CodecAllocatorTest);
};
TEST_F(CodecAllocatorTest, NormalCreation) {
@@ -179,12 +179,12 @@ TEST_F(CodecAllocatorTest, MultipleCreation) {
base::RunLoop run_loop;
allocator_->CreateMediaCodecAsync(
base::BindOnce(&CodecAllocatorTest::OnCodecCreatedInternal,
- base::Unretained(this), base::DoNothing::Once()),
+ base::Unretained(this), base::DoNothing()),
std::move(config));
// Advance some time, but not enough to trigger hang detection.
ASSERT_FALSE(IsPrimaryTaskRunnerLikelyHung());
- tick_clock_.Advance(base::TimeDelta::FromMilliseconds(400));
+ tick_clock_.Advance(base::Milliseconds(400));
ASSERT_FALSE(IsPrimaryTaskRunnerLikelyHung());
auto config_secure = CreateConfig();
@@ -209,11 +209,11 @@ TEST_F(CodecAllocatorTest, MultipleRelease) {
allocator_->ReleaseMediaCodec(
std::make_unique<MockMediaCodecBridge>(),
base::BindOnce(&CodecAllocatorTest::OnCodecReleasedInternal,
- base::Unretained(this), base::DoNothing::Once()));
+ base::Unretained(this), base::DoNothing()));
// Advance some time, but not enough to trigger hang detection.
ASSERT_FALSE(IsPrimaryTaskRunnerLikelyHung());
- tick_clock_.Advance(base::TimeDelta::FromMilliseconds(400));
+ tick_clock_.Advance(base::Milliseconds(400));
ASSERT_FALSE(IsPrimaryTaskRunnerLikelyHung());
allocator_->ReleaseMediaCodec(
@@ -233,7 +233,7 @@ TEST_F(CodecAllocatorTest, StalledReleaseCountsAsHung) {
// Release null codec, but don't pump message loop.
allocator_->ReleaseMediaCodec(std::make_unique<MockMediaCodecBridge>(),
base::DoNothing());
- tick_clock_.Advance(base::TimeDelta::FromSeconds(1));
+ tick_clock_.Advance(base::Seconds(1));
ASSERT_TRUE(IsPrimaryTaskRunnerLikelyHung());
}
@@ -244,7 +244,7 @@ TEST_F(CodecAllocatorTest, StalledCreateCountsAsHung) {
auto config = CreateConfig();
config->codec_type = CodecType::kSecure;
allocator_->CreateMediaCodecAsync(base::DoNothing(), std::move(config));
- tick_clock_.Advance(base::TimeDelta::FromSeconds(1));
+ tick_clock_.Advance(base::Seconds(1));
ASSERT_TRUE(IsPrimaryTaskRunnerLikelyHung());
}
@@ -254,7 +254,7 @@ TEST_F(CodecAllocatorTest, SecureCreationFailsWhenHung) {
// Release null codec, but don't pump message loop.
allocator_->ReleaseMediaCodec(std::make_unique<MockMediaCodecBridge>(),
base::DoNothing());
- tick_clock_.Advance(base::TimeDelta::FromSeconds(1));
+ tick_clock_.Advance(base::Seconds(1));
ASSERT_TRUE(IsPrimaryTaskRunnerLikelyHung());
// Secure creation should fail since we're now using software codecs.
@@ -286,7 +286,7 @@ TEST_F(CodecAllocatorTest, SoftwareCodecUsedWhenHung) {
// Release null codec, but don't pump message loop.
allocator_->ReleaseMediaCodec(std::make_unique<MockMediaCodecBridge>(),
base::DoNothing());
- tick_clock_.Advance(base::TimeDelta::FromSeconds(1));
+ tick_clock_.Advance(base::Seconds(1));
ASSERT_TRUE(IsPrimaryTaskRunnerLikelyHung());
// Creation should fall back to software.
@@ -316,7 +316,7 @@ TEST_F(CodecAllocatorTest, CodecReleasedOnRightTaskRunnerWhenHung) {
// Release null codec, but don't pump message loop.
allocator_->ReleaseMediaCodec(std::make_unique<MockMediaCodecBridge>(),
base::DoNothing());
- tick_clock_.Advance(base::TimeDelta::FromSeconds(1));
+ tick_clock_.Advance(base::Seconds(1));
ASSERT_TRUE(IsPrimaryTaskRunnerLikelyHung());
// Release software codec, ensure it runs on secondary task runner.
diff --git a/chromium/media/gpu/android/codec_buffer_wait_coordinator.h b/chromium/media/gpu/android/codec_buffer_wait_coordinator.h
index 5a4a560f6e9..7c3f8823072 100644
--- a/chromium/media/gpu/android/codec_buffer_wait_coordinator.h
+++ b/chromium/media/gpu/android/codec_buffer_wait_coordinator.h
@@ -68,10 +68,9 @@ class MEDIA_GPU_EXPORT CodecBufferWaitCoordinator
// 5msec covers >99.9% of cases, so just wait for up to that much before
// giving up. If an error occurs, we might not ever get a notification.
- Tuneable<base::TimeDelta> max_wait_ = {"MediaCodecOutputBufferMaxWaitTime",
- base::TimeDelta::FromMilliseconds(0),
- base::TimeDelta::FromMilliseconds(5),
- base::TimeDelta::FromMilliseconds(20)};
+ Tuneable<base::TimeDelta> max_wait_ = {
+ "MediaCodecOutputBufferMaxWaitTime", base::Milliseconds(0),
+ base::Milliseconds(5), base::Milliseconds(20)};
DISALLOW_COPY_AND_ASSIGN(CodecBufferWaitCoordinator);
};
diff --git a/chromium/media/gpu/android/codec_image.cc b/chromium/media/gpu/android/codec_image.cc
index a9dd87b24b9..f86895b66c4 100644
--- a/chromium/media/gpu/android/codec_image.cc
+++ b/chromium/media/gpu/android/codec_image.cc
@@ -92,9 +92,9 @@ void CodecImage::ReleaseTexImage(unsigned target) {}
bool CodecImage::CopyTexImage(unsigned target) {
DCHECK_CALLED_ON_VALID_THREAD(gpu_main_thread_checker_);
- // This method is only called for SurfaceTexture implementation for which DrDc
- // is disabled.
- DCHECK(!features::IsDrDcEnabled());
+ // This method is only called for SurfaceTexture implementation which can't be
+ // thread-safe.
+ DCHECK(!features::NeedThreadSafeAndroidMedia());
TRACE_EVENT0("media", "CodecImage::CopyTexImage");
DCHECK_EQ(COPY, ShouldBindOrCopy());
@@ -132,26 +132,6 @@ bool CodecImage::CopyTexSubImage(unsigned target,
return false;
}
-bool CodecImage::ScheduleOverlayPlane(
- gfx::AcceleratedWidget widget,
- int z_order,
- gfx::OverlayTransform transform,
- const gfx::Rect& bounds_rect,
- const gfx::RectF& crop_rect,
- bool enable_blend,
- std::unique_ptr<gfx::GpuFence> gpu_fence) {
- TRACE_EVENT0("media", "CodecImage::ScheduleOverlayPlane");
- if (is_texture_owner_backed_) {
- DVLOG(1) << "Invalid call to ScheduleOverlayPlane; this image is "
- "TextureOwner backed.";
- return false;
- }
-
- NotifyOverlayPromotion(true, bounds_rect);
- RenderToOverlay();
- return true;
-}
-
void CodecImage::NotifyOverlayPromotion(bool promotion,
const gfx::Rect& bounds) {
AssertAcquiredDrDcLock();
diff --git a/chromium/media/gpu/android/codec_image.h b/chromium/media/gpu/android/codec_image.h
index a03a3daedfb..ff35b5b10eb 100644
--- a/chromium/media/gpu/android/codec_image.h
+++ b/chromium/media/gpu/android/codec_image.h
@@ -11,6 +11,7 @@
#include <vector>
#include "base/callback.h"
+#include "base/gtest_prod_util.h"
#include "base/macros.h"
#include "base/memory/ref_counted_delete_on_sequence.h"
#include "gpu/command_buffer/service/ref_counted_lock.h"
@@ -75,16 +76,6 @@ class MEDIA_GPU_EXPORT CodecImage
bool CopyTexSubImage(unsigned target,
const gfx::Point& offset,
const gfx::Rect& rect) override;
- // Currently this API is depended on the implementation of
- // NotifyOverlayPromotion. since we expect overlay to use SharedImage in the
- // future.
- bool ScheduleOverlayPlane(gfx::AcceleratedWidget widget,
- int z_order,
- gfx::OverlayTransform transform,
- const gfx::Rect& bounds_rect,
- const gfx::RectF& crop_rect,
- bool enable_blend,
- std::unique_ptr<gfx::GpuFence> gpu_fence) override;
void SetColorSpace(const gfx::ColorSpace& color_space) override {}
void Flush() override {}
void OnMemoryDump(base::trace_event::ProcessMemoryDump* pmd,
diff --git a/chromium/media/gpu/android/codec_image_unittest.cc b/chromium/media/gpu/android/codec_image_unittest.cc
index 2366d99b93f..e72b597f7a8 100644
--- a/chromium/media/gpu/android/codec_image_unittest.cc
+++ b/chromium/media/gpu/android/codec_image_unittest.cc
@@ -170,13 +170,6 @@ TEST_F(CodecImageTest, CopyTexImageIsInvalidForOverlayImages) {
ASSERT_NE(gl::GLImage::COPY, i->ShouldBindOrCopy());
}
-TEST_F(CodecImageTest, ScheduleOverlayPlaneIsInvalidForTextureOwnerImages) {
- auto i = NewImage(kTextureOwner);
- ASSERT_FALSE(i->ScheduleOverlayPlane(gfx::AcceleratedWidget(), 0,
- gfx::OverlayTransform(), gfx::Rect(),
- gfx::RectF(), true, nullptr));
-}
-
TEST_F(CodecImageTest, CopyTexImageFailsIfTargetIsNotOES) {
auto i = NewImage(kTextureOwner);
ASSERT_FALSE(i->CopyTexImage(GL_TEXTURE_2D));
@@ -208,18 +201,6 @@ TEST_F(CodecImageTest, CopyTexImageTriggersFrontBufferRendering) {
ASSERT_TRUE(i->was_rendered_to_front_buffer());
}
-TEST_F(CodecImageTest, ScheduleOverlayPlaneTriggersFrontBufferRendering) {
- auto i = NewImage(kOverlay);
- EXPECT_CALL(*codec_, ReleaseOutputBuffer(_, true));
- // Also verify that it sends the appropriate promotion hint so that the
- // overlay is positioned properly.
- PromotionHintAggregator::Hint hint(gfx::Rect(1, 2, 3, 4), true);
- EXPECT_CALL(promotion_hint_receiver_, OnPromotionHint(hint));
- i->ScheduleOverlayPlane(gfx::AcceleratedWidget(), 0, gfx::OverlayTransform(),
- hint.screen_rect, gfx::RectF(), true, nullptr);
- ASSERT_TRUE(i->was_rendered_to_front_buffer());
-}
-
TEST_F(CodecImageTest, CanRenderTextureOwnerImageToBackBuffer) {
auto i = NewImage(kTextureOwner);
ASSERT_TRUE(i->RenderToTextureOwnerBackBuffer());
@@ -316,24 +297,6 @@ TEST_F(CodecImageTest, RenderToFrontBufferRestoresGLContext) {
surface = nullptr;
}
-TEST_F(CodecImageTest, ScheduleOverlayPlaneDoesntSendDuplicateHints) {
- // SOP should send only one promotion hint unless the position changes.
- auto i = NewImage(kOverlay);
- // Also verify that it sends the appropriate promotion hint so that the
- // overlay is positioned properly.
- PromotionHintAggregator::Hint hint1(gfx::Rect(1, 2, 3, 4), true);
- PromotionHintAggregator::Hint hint2(gfx::Rect(5, 6, 7, 8), true);
- EXPECT_CALL(promotion_hint_receiver_, OnPromotionHint(hint1)).Times(1);
- EXPECT_CALL(promotion_hint_receiver_, OnPromotionHint(hint2)).Times(1);
- i->ScheduleOverlayPlane(gfx::AcceleratedWidget(), 0, gfx::OverlayTransform(),
- hint1.screen_rect, gfx::RectF(), true, nullptr);
- i->ScheduleOverlayPlane(gfx::AcceleratedWidget(), 0, gfx::OverlayTransform(),
- hint1.screen_rect, gfx::RectF(), true, nullptr);
- // Sending a different rectangle should send another hint.
- i->ScheduleOverlayPlane(gfx::AcceleratedWidget(), 0, gfx::OverlayTransform(),
- hint2.screen_rect, gfx::RectF(), true, nullptr);
-}
-
TEST_F(CodecImageTest, GetAHardwareBuffer) {
auto i = NewImage(kTextureOwner);
EXPECT_EQ(codec_buffer_wait_coordinator_->texture_owner()
diff --git a/chromium/media/gpu/android/codec_wrapper.h b/chromium/media/gpu/android/codec_wrapper.h
index 751f81129f2..08471a4befc 100644
--- a/chromium/media/gpu/android/codec_wrapper.h
+++ b/chromium/media/gpu/android/codec_wrapper.h
@@ -33,6 +33,9 @@ using CodecSurfacePair = std::pair<std::unique_ptr<MediaCodecBridge>,
// soon as we know we no longer need them.
class MEDIA_GPU_EXPORT CodecOutputBuffer {
public:
+ CodecOutputBuffer(const CodecOutputBuffer&) = delete;
+ CodecOutputBuffer& operator=(const CodecOutputBuffer&) = delete;
+
// Releases the buffer without rendering it.
~CodecOutputBuffer();
@@ -72,7 +75,6 @@ class MEDIA_GPU_EXPORT CodecOutputBuffer {
bool was_rendered_ = false;
gfx::Size size_;
base::OnceClosure render_cb_;
- DISALLOW_COPY_AND_ASSIGN(CodecOutputBuffer);
};
// This wraps a MediaCodecBridge and provides higher level features and tracks
@@ -99,6 +101,10 @@ class MEDIA_GPU_EXPORT CodecWrapper {
CodecWrapper(CodecSurfacePair codec_surface_pair,
OutputReleasedCB output_buffer_release_cb,
scoped_refptr<base::SequencedTaskRunner> release_task_runner);
+
+ CodecWrapper(const CodecWrapper&) = delete;
+ CodecWrapper& operator=(const CodecWrapper&) = delete;
+
~CodecWrapper();
// Takes the backing codec and surface, implicitly discarding all outstanding
@@ -150,7 +156,6 @@ class MEDIA_GPU_EXPORT CodecWrapper {
private:
scoped_refptr<CodecWrapperImpl> impl_;
- DISALLOW_COPY_AND_ASSIGN(CodecWrapper);
};
} // namespace media
diff --git a/chromium/media/gpu/android/direct_shared_image_video_provider.cc b/chromium/media/gpu/android/direct_shared_image_video_provider.cc
index 8b77ef06e37..790089a7723 100644
--- a/chromium/media/gpu/android/direct_shared_image_video_provider.cc
+++ b/chromium/media/gpu/android/direct_shared_image_video_provider.cc
@@ -215,10 +215,10 @@ bool GpuSharedImageVideoFactory::CreateImageInternal(
// colorspace and wire it here.
// TODO(vikassoni): This shared image need to be thread safe eventually for
// webview to work with shared images.
- auto shared_image = std::make_unique<gpu::SharedImageVideo>(
+ auto shared_image = gpu::SharedImageVideo::Create(
mailbox, coded_size, gfx::ColorSpace::CreateSRGB(),
kTopLeft_GrSurfaceOrigin, kPremul_SkAlphaType, std::move(image),
- std::move(shared_context), /*is_thread_safe=*/true, std::move(drdc_lock));
+ std::move(shared_context), std::move(drdc_lock));
// Register it with shared image mailbox as well as legacy mailbox. This
// keeps |shared_image| around until its destruction cb is called.
diff --git a/chromium/media/gpu/android/direct_shared_image_video_provider.h b/chromium/media/gpu/android/direct_shared_image_video_provider.h
index 411ce85731e..2a727291e7f 100644
--- a/chromium/media/gpu/android/direct_shared_image_video_provider.h
+++ b/chromium/media/gpu/android/direct_shared_image_video_provider.h
@@ -37,6 +37,12 @@ class MEDIA_GPU_EXPORT DirectSharedImageVideoProvider
scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner,
GetStubCB get_stub_cb,
scoped_refptr<gpu::RefCountedLock> drdc_lock);
+
+ DirectSharedImageVideoProvider(const DirectSharedImageVideoProvider&) =
+ delete;
+ DirectSharedImageVideoProvider& operator=(
+ const DirectSharedImageVideoProvider&) = delete;
+
~DirectSharedImageVideoProvider() override;
// SharedImageVideoProvider
@@ -47,8 +53,6 @@ class MEDIA_GPU_EXPORT DirectSharedImageVideoProvider
base::SequenceBound<GpuSharedImageVideoFactory> gpu_factory_;
scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner_;
-
- DISALLOW_COPY_AND_ASSIGN(DirectSharedImageVideoProvider);
};
// GpuSharedImageVideoFactory creates SharedImageVideo objects. It must be run
@@ -62,6 +66,11 @@ class GpuSharedImageVideoFactory
public:
explicit GpuSharedImageVideoFactory(
SharedImageVideoProvider::GetStubCB get_stub_cb);
+
+ GpuSharedImageVideoFactory(const GpuSharedImageVideoFactory&) = delete;
+ GpuSharedImageVideoFactory& operator=(const GpuSharedImageVideoFactory&) =
+ delete;
+
~GpuSharedImageVideoFactory() override;
// Will run |init_cb| with the shared context current. |init_cb| should not
@@ -95,8 +104,6 @@ class GpuSharedImageVideoFactory
THREAD_CHECKER(thread_checker_);
base::WeakPtrFactory<GpuSharedImageVideoFactory> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(GpuSharedImageVideoFactory);
};
} // namespace media
diff --git a/chromium/media/gpu/android/fake_codec_allocator.h b/chromium/media/gpu/android/fake_codec_allocator.h
index 6816c0a75ea..9be19d4e14f 100644
--- a/chromium/media/gpu/android/fake_codec_allocator.h
+++ b/chromium/media/gpu/android/fake_codec_allocator.h
@@ -22,6 +22,10 @@ class FakeCodecAllocator : public testing::NiceMock<CodecAllocator> {
public:
explicit FakeCodecAllocator(
scoped_refptr<base::SequencedTaskRunner> task_runner);
+
+ FakeCodecAllocator(const FakeCodecAllocator&) = delete;
+ FakeCodecAllocator& operator=(const FakeCodecAllocator&) = delete;
+
~FakeCodecAllocator() override;
// These are called with some parameters of the codec config by our
@@ -59,8 +63,6 @@ class FakeCodecAllocator : public testing::NiceMock<CodecAllocator> {
private:
CodecCreatedCB pending_codec_created_cb_;
-
- DISALLOW_COPY_AND_ASSIGN(FakeCodecAllocator);
};
} // namespace media
diff --git a/chromium/media/gpu/android/maybe_render_early_manager.cc b/chromium/media/gpu/android/maybe_render_early_manager.cc
index 337a4307e8d..c82fd30b99f 100644
--- a/chromium/media/gpu/android/maybe_render_early_manager.cc
+++ b/chromium/media/gpu/android/maybe_render_early_manager.cc
@@ -20,6 +20,10 @@ namespace media {
class GpuMaybeRenderEarlyImpl {
public:
GpuMaybeRenderEarlyImpl() {}
+
+ GpuMaybeRenderEarlyImpl(const GpuMaybeRenderEarlyImpl&) = delete;
+ GpuMaybeRenderEarlyImpl& operator=(const GpuMaybeRenderEarlyImpl&) = delete;
+
~GpuMaybeRenderEarlyImpl() = default;
void SetCodecImageGroup(scoped_refptr<CodecImageGroup> image_group) {
@@ -65,8 +69,6 @@ class GpuMaybeRenderEarlyImpl {
scoped_refptr<CodecImageGroup> image_group_;
base::WeakPtrFactory<GpuMaybeRenderEarlyImpl> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(GpuMaybeRenderEarlyImpl);
};
// Default implementation of MaybeRenderEarlyManager. Lives on whatever thread
@@ -80,6 +82,11 @@ class MaybeRenderEarlyManagerImpl : public MaybeRenderEarlyManager,
: gpu::RefCountedLockHelperDrDc(std::move(drdc_lock)),
gpu_task_runner_(gpu_task_runner),
gpu_impl_(std::move(gpu_task_runner)) {}
+
+ MaybeRenderEarlyManagerImpl(const MaybeRenderEarlyManagerImpl&) = delete;
+ MaybeRenderEarlyManagerImpl& operator=(const MaybeRenderEarlyManagerImpl&) =
+ delete;
+
~MaybeRenderEarlyManagerImpl() override = default;
void SetSurfaceBundle(
@@ -117,8 +124,6 @@ class MaybeRenderEarlyManagerImpl : public MaybeRenderEarlyManager,
// Gpu-side.
base::SequenceBound<GpuMaybeRenderEarlyImpl> gpu_impl_;
-
- DISALLOW_COPY_AND_ASSIGN(MaybeRenderEarlyManagerImpl);
};
// static
diff --git a/chromium/media/gpu/android/maybe_render_early_manager.h b/chromium/media/gpu/android/maybe_render_early_manager.h
index f64a444aff8..002ea5ce8a2 100644
--- a/chromium/media/gpu/android/maybe_render_early_manager.h
+++ b/chromium/media/gpu/android/maybe_render_early_manager.h
@@ -23,6 +23,10 @@ class CodecSurfaceBundle;
class MEDIA_GPU_EXPORT MaybeRenderEarlyManager {
public:
MaybeRenderEarlyManager() = default;
+
+ MaybeRenderEarlyManager(const MaybeRenderEarlyManager&) = delete;
+ MaybeRenderEarlyManager& operator=(const MaybeRenderEarlyManager&) = delete;
+
virtual ~MaybeRenderEarlyManager() = default;
// Sets the surface bundle that future images will use.
@@ -45,8 +49,6 @@ class MEDIA_GPU_EXPORT MaybeRenderEarlyManager {
static std::unique_ptr<MaybeRenderEarlyManager> Create(
scoped_refptr<base::SequencedTaskRunner> gpu_task_runner,
scoped_refptr<gpu::RefCountedLock> drdc_lock);
-
- DISALLOW_COPY_AND_ASSIGN(MaybeRenderEarlyManager);
};
namespace internal {
diff --git a/chromium/media/gpu/android/media_codec_video_decoder.cc b/chromium/media/gpu/android/media_codec_video_decoder.cc
index af3d1e15500..577bdd3ca6f 100644
--- a/chromium/media/gpu/android/media_codec_video_decoder.cc
+++ b/chromium/media/gpu/android/media_codec_video_decoder.cc
@@ -64,7 +64,7 @@ std::vector<SupportedVideoDecoderConfig> GetSupportedConfigsInternal(
if (device_info->IsVp8DecoderAvailable()) {
// For unencrypted content, require that the size is at least 360p and that
// the MediaCodec implementation is hardware; otherwise fall back to libvpx.
- if (!device_info->IsDecoderKnownUnaccelerated(kCodecVP8)) {
+ if (!device_info->IsDecoderKnownUnaccelerated(VideoCodec::kVP8)) {
supported_configs.emplace_back(VP8PROFILE_ANY, VP8PROFILE_ANY,
gfx::Size(480, 360), gfx::Size(3840, 2160),
false, // allow_encrypted
@@ -81,7 +81,8 @@ std::vector<SupportedVideoDecoderConfig> GetSupportedConfigsInternal(
// TODO(dalecurtis): This needs to actually check the profiles available. This
// can be done by calling MediaCodecUtil::AddSupportedCodecProfileLevels.
if (device_info->IsVp9DecoderAvailable()) {
- const bool is_sw = device_info->IsDecoderKnownUnaccelerated(kCodecVP9);
+ const bool is_sw =
+ device_info->IsDecoderKnownUnaccelerated(VideoCodec::kVP9);
std::vector<CodecProfileLevel> profiles;
@@ -92,10 +93,10 @@ std::vector<SupportedVideoDecoderConfig> GetSupportedConfigsInternal(
// If we think a VP9 decoder is available, but we didn't get any profiles
// returned, just assume support for vp9.0 only.
if (profiles.empty())
- profiles.push_back({kCodecVP9, VP9PROFILE_PROFILE0, 0});
+ profiles.push_back({VideoCodec::kVP9, VP9PROFILE_PROFILE0, 0});
for (const auto& p : profiles) {
- if (p.codec != kCodecVP9)
+ if (p.codec != VideoCodec::kVP9)
continue;
// We don't compile support into libvpx for these profiles, so allow them
@@ -373,7 +374,7 @@ void MediaCodecVideoDecoder::Initialize(const VideoDecoderConfig& config,
waiting_cb_ = waiting_cb;
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- if (config.codec() == kCodecH264)
+ if (config.codec() == VideoCodec::kH264)
ExtractSpsAndPps(config.extra_data(), &csd0_, &csd1_);
#endif
@@ -820,7 +821,7 @@ void MediaCodecVideoDecoder::StartTimerOrPumpCodec() {
// TODO: Experiment with this number to save power. Since we already pump the
// codec in response to receiving a decode and output buffer release, polling
// at this frequency is likely overkill in the steady state.
- const auto kPollingPeriod = base::TimeDelta::FromMilliseconds(10);
+ const auto kPollingPeriod = base::Milliseconds(10);
if (!pump_codec_timer_.IsRunning()) {
pump_codec_timer_.Start(
FROM_HERE, kPollingPeriod,
@@ -834,7 +835,7 @@ void MediaCodecVideoDecoder::StopTimerIfIdle() {
DCHECK(!using_async_api_);
// Stop the timer if we've been idle for one second. Chosen arbitrarily.
- const auto kTimeout = base::TimeDelta::FromSeconds(1);
+ const auto kTimeout = base::Seconds(1);
if (idle_timer_.Elapsed() > kTimeout) {
DVLOG(2) << "Stopping timer; idle timeout hit";
pump_codec_timer_.Stop();
@@ -876,10 +877,11 @@ bool MediaCodecVideoDecoder::QueueInput() {
// larger based on the actual input size.
if (decoder_config_.coded_size().width() == last_width_) {
// See MediaFormatBuilder::addInputSizeInfoToFormat() for details.
- const size_t compression_ratio = (decoder_config_.codec() == kCodecH264 ||
- decoder_config_.codec() == kCodecVP8)
- ? 2
- : 4;
+ const size_t compression_ratio =
+ (decoder_config_.codec() == VideoCodec::kH264 ||
+ decoder_config_.codec() == VideoCodec::kVP8)
+ ? 2
+ : 4;
const size_t max_pixels =
(pending_decode.buffer->data_size() * compression_ratio * 2) / 3;
if (max_pixels > 8294400) // 4K
@@ -1051,8 +1053,8 @@ void MediaCodecVideoDecoder::ForwardVideoFrame(
// Record how long this frame was pending.
const base::TimeDelta duration = base::TimeTicks::Now() - started_at;
UMA_HISTOGRAM_CUSTOM_TIMES("Media.MCVD.ForwardVideoFrameTiming", duration,
- base::TimeDelta::FromMilliseconds(1),
- base::TimeDelta::FromMilliseconds(100), 25);
+ base::Milliseconds(1), base::Milliseconds(100),
+ 25);
// No |frame| indicates an error creating it.
if (!frame) {
@@ -1105,8 +1107,8 @@ void MediaCodecVideoDecoder::StartDrainingCodec(DrainType drain_type) {
// (http://crbug.com/598963).
// TODO(watk): Strongly consider blocking VP8 (or specific MediaCodecs)
// instead. Draining is responsible for a lot of complexity.
- if (decoder_config_.codec() != kCodecVP8 || !codec_ || codec_->IsFlushed() ||
- codec_->IsDrained() || using_async_api_) {
+ if (decoder_config_.codec() != VideoCodec::kVP8 || !codec_ ||
+ codec_->IsFlushed() || codec_->IsDrained() || using_async_api_) {
// If the codec isn't already drained or flushed, then we have to remember
// that we owe it a flush. We also have to remember not to deliver any
// output buffers that might still be in progress in the codec.
diff --git a/chromium/media/gpu/android/media_codec_video_decoder.h b/chromium/media/gpu/android/media_codec_video_decoder.h
index add847b26ac..7abdcced569 100644
--- a/chromium/media/gpu/android/media_codec_video_decoder.h
+++ b/chromium/media/gpu/android/media_codec_video_decoder.h
@@ -67,6 +67,9 @@ class MEDIA_GPU_EXPORT MediaCodecVideoDecoder final
public:
static std::vector<SupportedVideoDecoderConfig> GetSupportedConfigs();
+ MediaCodecVideoDecoder(const MediaCodecVideoDecoder&) = delete;
+ MediaCodecVideoDecoder& operator=(const MediaCodecVideoDecoder&) = delete;
+
~MediaCodecVideoDecoder() override;
static void DestroyAsync(std::unique_ptr<MediaCodecVideoDecoder>);
@@ -350,8 +353,6 @@ class MEDIA_GPU_EXPORT MediaCodecVideoDecoder final
base::WeakPtrFactory<MediaCodecVideoDecoder> weak_factory_{this};
base::WeakPtrFactory<MediaCodecVideoDecoder> codec_allocator_weak_factory_{
this};
-
- DISALLOW_COPY_AND_ASSIGN(MediaCodecVideoDecoder);
};
} // namespace media
diff --git a/chromium/media/gpu/android/media_codec_video_decoder_unittest.cc b/chromium/media/gpu/android/media_codec_video_decoder_unittest.cc
index 958bd3ef780..b4b51a34be9 100644
--- a/chromium/media/gpu/android/media_codec_video_decoder_unittest.cc
+++ b/chromium/media/gpu/android/media_codec_video_decoder_unittest.cc
@@ -121,7 +121,8 @@ class MediaCodecVideoDecoderTest : public testing::TestWithParam<VideoCodec> {
void TearDown() override {
// For VP8, make MCVD skip the drain by resetting it. Otherwise, it's hard
// to finish the drain.
- if (mcvd_ && codec_ == kCodecVP8 && codec_allocator_->most_recent_codec)
+ if (mcvd_ && codec_ == VideoCodec::kVP8 &&
+ codec_allocator_->most_recent_codec)
DoReset();
// MCVD calls DeleteSoon() on itself, so we have to run a RunLoop.
@@ -327,7 +328,7 @@ TEST_P(MediaCodecVideoDecoderVp8Test, SmallVp8IsRejected) {
TEST_P(MediaCodecVideoDecoderAV1Test, Av1IsSupported) {
EXPECT_CALL(*device_info_, IsAv1DecoderAvailable()).WillOnce(Return(true));
- ASSERT_TRUE(Initialize(TestVideoConfig::Normal(kCodecAV1)));
+ ASSERT_TRUE(Initialize(TestVideoConfig::Normal(VideoCodec::kAV1)));
}
TEST_P(MediaCodecVideoDecoderTest, InitializeDoesntInitSurfaceOrCodec) {
@@ -955,7 +956,7 @@ TEST_P(MediaCodecVideoDecoderVp9Test, ColorSpaceIsIncludedInCodecConfig) {
VideoColorSpace::MatrixID::BT2020_CL,
gfx::ColorSpace::RangeID::LIMITED);
VideoDecoderConfig config =
- TestVideoConfig::NormalWithColorSpace(kCodecVP9, color_space);
+ TestVideoConfig::NormalWithColorSpace(VideoCodec::kVP9, color_space);
EXPECT_TRUE(InitializeFully_OneDecodePending(config));
EXPECT_EQ(color_space,
@@ -963,7 +964,7 @@ TEST_P(MediaCodecVideoDecoderVp9Test, ColorSpaceIsIncludedInCodecConfig) {
}
TEST_P(MediaCodecVideoDecoderVp9Test, HdrMetadataIsIncludedInCodecConfig) {
- VideoDecoderConfig config = TestVideoConfig::Normal(kCodecVP9);
+ VideoDecoderConfig config = TestVideoConfig::Normal(VideoCodec::kVP9);
gfx::HDRMetadata hdr_metadata;
hdr_metadata.max_frame_average_light_level = 123;
hdr_metadata.max_content_light_level = 456;
@@ -990,29 +991,29 @@ static std::vector<VideoCodec> GetTestList() {
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
if (MediaCodecUtil::IsMediaCodecAvailable())
- test_codecs.push_back(kCodecH264);
+ test_codecs.push_back(VideoCodec::kH264);
#endif
if (MediaCodecUtil::IsVp8DecoderAvailable())
- test_codecs.push_back(kCodecVP8);
+ test_codecs.push_back(VideoCodec::kVP8);
if (MediaCodecUtil::IsVp9DecoderAvailable())
- test_codecs.push_back(kCodecVP9);
+ test_codecs.push_back(VideoCodec::kVP9);
if (MediaCodecUtil::IsAv1DecoderAvailable())
- test_codecs.push_back(kCodecAV1);
+ test_codecs.push_back(VideoCodec::kAV1);
return test_codecs;
}
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
static std::vector<VideoCodec> GetH264IfAvailable() {
return MediaCodecUtil::IsMediaCodecAvailable()
- ? std::vector<VideoCodec>(1, kCodecH264)
+ ? std::vector<VideoCodec>(1, VideoCodec::kH264)
: std::vector<VideoCodec>();
}
#endif
static std::vector<VideoCodec> GetVp8IfAvailable() {
return MediaCodecUtil::IsVp8DecoderAvailable()
- ? std::vector<VideoCodec>(1, kCodecVP8)
+ ? std::vector<VideoCodec>(1, VideoCodec::kVP8)
: std::vector<VideoCodec>();
}
@@ -1020,13 +1021,13 @@ static std::vector<VideoCodec> GetVp8IfAvailable() {
// is fixed.
// static std::vector<VideoCodec> GetVp9IfAvailable() {
// return MediaCodecUtil::IsVp9DecoderAvailable()
-// ? std::vector<VideoCodec>(1, kCodecVP9)
+// ? std::vector<VideoCodec>(1, VideoCodec::kVP9)
// : std::vector<VideoCodec>();
// }
static std::vector<VideoCodec> GetAv1IfAvailable() {
return MediaCodecUtil::IsAv1DecoderAvailable()
- ? std::vector<VideoCodec>(1, kCodecAV1)
+ ? std::vector<VideoCodec>(1, VideoCodec::kAV1)
: std::vector<VideoCodec>();
}
diff --git a/chromium/media/gpu/android/mock_android_video_surface_chooser.h b/chromium/media/gpu/android/mock_android_video_surface_chooser.h
index 2f0fe248b00..33c4b41c162 100644
--- a/chromium/media/gpu/android/mock_android_video_surface_chooser.h
+++ b/chromium/media/gpu/android/mock_android_video_surface_chooser.h
@@ -16,6 +16,12 @@ namespace media {
class MockAndroidVideoSurfaceChooser : public AndroidVideoSurfaceChooser {
public:
MockAndroidVideoSurfaceChooser();
+
+ MockAndroidVideoSurfaceChooser(const MockAndroidVideoSurfaceChooser&) =
+ delete;
+ MockAndroidVideoSurfaceChooser& operator=(
+ const MockAndroidVideoSurfaceChooser&) = delete;
+
~MockAndroidVideoSurfaceChooser() override;
// Mocks that are called by the fakes below.
@@ -39,9 +45,6 @@ class MockAndroidVideoSurfaceChooser : public AndroidVideoSurfaceChooser {
UseTextureOwnerCB use_texture_owner_cb_;
AndroidOverlayFactoryCB factory_;
State current_state_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MockAndroidVideoSurfaceChooser);
};
} // namespace media
diff --git a/chromium/media/gpu/android/pooled_shared_image_video_provider.h b/chromium/media/gpu/android/pooled_shared_image_video_provider.h
index e4c035af9f2..24204631018 100644
--- a/chromium/media/gpu/android/pooled_shared_image_video_provider.h
+++ b/chromium/media/gpu/android/pooled_shared_image_video_provider.h
@@ -24,6 +24,10 @@ class MEDIA_GPU_EXPORT PooledSharedImageVideoProvider
class GpuHelper {
public:
GpuHelper() = default;
+
+ GpuHelper(const GpuHelper&) = delete;
+ GpuHelper& operator=(const GpuHelper&) = delete;
+
virtual ~GpuHelper() = default;
// Called (on the gpu thread) to handle image return.
@@ -32,9 +36,6 @@ class MEDIA_GPU_EXPORT PooledSharedImageVideoProvider
scoped_refptr<CodecImageHolder> codec_image_holder,
base::OnceClosure cb,
scoped_refptr<gpu::RefCountedLock> drdc_lock) = 0;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(GpuHelper);
};
// Create a default implementation. |provider| is the underlying provider to
@@ -45,6 +46,11 @@ class MEDIA_GPU_EXPORT PooledSharedImageVideoProvider
std::unique_ptr<SharedImageVideoProvider> provider,
scoped_refptr<gpu::RefCountedLock> drdc_lock);
+ PooledSharedImageVideoProvider(const PooledSharedImageVideoProvider&) =
+ delete;
+ PooledSharedImageVideoProvider& operator=(
+ const PooledSharedImageVideoProvider&) = delete;
+
~PooledSharedImageVideoProvider() override;
// SharedImageVideoProvider
@@ -129,8 +135,6 @@ class MEDIA_GPU_EXPORT PooledSharedImageVideoProvider
base::SequenceBound<GpuHelper> gpu_helper_;
base::WeakPtrFactory<PooledSharedImageVideoProvider> weak_factory_;
-
- DISALLOW_COPY_AND_ASSIGN(PooledSharedImageVideoProvider);
};
} // namespace media
diff --git a/chromium/media/gpu/android/promotion_hint_aggregator_impl.cc b/chromium/media/gpu/android/promotion_hint_aggregator_impl.cc
index cf98994cc1c..2e9c0d8d17f 100644
--- a/chromium/media/gpu/android/promotion_hint_aggregator_impl.cc
+++ b/chromium/media/gpu/android/promotion_hint_aggregator_impl.cc
@@ -15,8 +15,7 @@ namespace media {
// idea is to prevent promoting on paused / background rendering. Note that
// this time is only enforced when transitioning from unpromotable to promotable
// frames. We don't unpromote later because of this.
-constexpr base::TimeDelta MaximumInterFrameTime =
- base::TimeDelta::FromMilliseconds(100);
+constexpr base::TimeDelta MaximumInterFrameTime = base::Milliseconds(100);
// Minimum number of consecutive promotable frames before we actually start
// promoting frames.
@@ -25,7 +24,7 @@ constexpr int MinimumPromotableFrames = 10;
// Minimum time since the last unpromotable frame that we require before we will
// promote new ones.
constexpr base::TimeDelta MinimumUnpromotableFrameTime =
- base::TimeDelta::FromMilliseconds(2000);
+ base::Milliseconds(2000);
PromotionHintAggregatorImpl::PromotionHintAggregatorImpl(
const base::TickClock* tick_clock) {
diff --git a/chromium/media/gpu/android/promotion_hint_aggregator_impl.h b/chromium/media/gpu/android/promotion_hint_aggregator_impl.h
index 1d33c4cfa65..6aedfd1e15d 100644
--- a/chromium/media/gpu/android/promotion_hint_aggregator_impl.h
+++ b/chromium/media/gpu/android/promotion_hint_aggregator_impl.h
@@ -21,6 +21,11 @@ class MEDIA_GPU_EXPORT PromotionHintAggregatorImpl
// |tick_clock| may be null, in which case we will use wall clock. If it is
// not null, then it must outlive |this|. It is provided for tests.
PromotionHintAggregatorImpl(const base::TickClock* tick_clock = nullptr);
+
+ PromotionHintAggregatorImpl(const PromotionHintAggregatorImpl&) = delete;
+ PromotionHintAggregatorImpl& operator=(const PromotionHintAggregatorImpl&) =
+ delete;
+
~PromotionHintAggregatorImpl() override;
void NotifyPromotionHint(const Hint& hint) override;
@@ -40,8 +45,6 @@ class MEDIA_GPU_EXPORT PromotionHintAggregatorImpl
int consecutive_promotable_frames_ = 0;
base::WeakPtrFactory<PromotionHintAggregatorImpl> weak_ptr_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(PromotionHintAggregatorImpl);
};
} // namespace media
diff --git a/chromium/media/gpu/android/promotion_hint_aggregator_impl_unittest.cc b/chromium/media/gpu/android/promotion_hint_aggregator_impl_unittest.cc
index 4e7e04acbfa..048ddc7b158 100644
--- a/chromium/media/gpu/android/promotion_hint_aggregator_impl_unittest.cc
+++ b/chromium/media/gpu/android/promotion_hint_aggregator_impl_unittest.cc
@@ -12,11 +12,10 @@
#include "base/test/simple_test_tick_clock.h"
#include "testing/gtest/include/gtest/gtest.h"
-using base::TimeDelta;
namespace {
// Default elapsed time between frames.
-constexpr TimeDelta FrameTime = TimeDelta::FromMilliseconds(10);
+constexpr base::TimeDelta FrameTime = base::Milliseconds(10);
} // namespace
namespace media {
@@ -28,7 +27,7 @@ class PromotionHintAggregatorImplTest : public testing::Test {
void SetUp() override {
// Advance the clock so that time 0 isn't recent.
- tick_clock_.Advance(TimeDelta::FromSeconds(10000));
+ tick_clock_.Advance(base::Seconds(10000));
impl_ = std::make_unique<PromotionHintAggregatorImpl>(&tick_clock_);
}
@@ -36,7 +35,7 @@ class PromotionHintAggregatorImplTest : public testing::Test {
// Sends a new frame that's |is_promotable| or not, with |elapsed| since the
// previous frame. Returns whether the video is promotable.
- bool SendFrame(bool is_promotable, TimeDelta elapsed = FrameTime) {
+ bool SendFrame(bool is_promotable, base::TimeDelta elapsed = FrameTime) {
tick_clock_.Advance(elapsed);
PromotionHintAggregator::Hint hint(gfx::Rect(), is_promotable);
impl_->NotifyPromotionHint(hint);
@@ -60,8 +59,8 @@ TEST_F(PromotionHintAggregatorImplTest, SomePromotableFramesArePromotable) {
ASSERT_TRUE(SendFrame(true));
// Waiting a while should't cause un-promotion.
- ASSERT_TRUE(SendFrame(true, TimeDelta::FromMilliseconds(10000)));
- ASSERT_TRUE(SendFrame(true, TimeDelta::FromMilliseconds(10000)));
+ ASSERT_TRUE(SendFrame(true, base::Milliseconds(10000)));
+ ASSERT_TRUE(SendFrame(true, base::Milliseconds(10000)));
}
TEST_F(PromotionHintAggregatorImplTest, UnpromotableFramesDelayPromotion) {
@@ -71,7 +70,7 @@ TEST_F(PromotionHintAggregatorImplTest, UnpromotableFramesDelayPromotion) {
// Send more until the minimum time has elapsed. Note that this will also be
// at least enough promotable frames in a row.
- while (tick_clock_.NowTicks() - start + FrameTime < TimeDelta::FromSeconds(2))
+ while (tick_clock_.NowTicks() - start + FrameTime < base::Seconds(2))
ASSERT_FALSE(SendFrame(true));
// The next frame should do it.
@@ -84,7 +83,7 @@ TEST_F(PromotionHintAggregatorImplTest, PromotableFramesMustBeFastEnough) {
ASSERT_FALSE(SendFrame(true));
// Time passes.
- tick_clock_.Advance(TimeDelta::FromMilliseconds(500));
+ tick_clock_.Advance(base::Milliseconds(500));
// We should now start over.
for (int i = 0; i < 9; i++)
diff --git a/chromium/media/gpu/android/shared_image_video_provider.h b/chromium/media/gpu/android/shared_image_video_provider.h
index 87fa7ec8d2c..bf2933dc3b7 100644
--- a/chromium/media/gpu/android/shared_image_video_provider.h
+++ b/chromium/media/gpu/android/shared_image_video_provider.h
@@ -60,6 +60,10 @@ class MEDIA_GPU_EXPORT SharedImageVideoProvider {
struct ImageRecord {
ImageRecord();
ImageRecord(ImageRecord&&);
+
+ ImageRecord(const ImageRecord&) = delete;
+ ImageRecord& operator=(const ImageRecord&) = delete;
+
~ImageRecord();
// Mailbox to which this shared image is bound.
@@ -76,12 +80,13 @@ class MEDIA_GPU_EXPORT SharedImageVideoProvider {
// Is the underlying context Vulkan? If so, then one must provide YCbCrInfo
// with the VideoFrame.
bool is_vulkan = false;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(ImageRecord);
};
SharedImageVideoProvider() = default;
+
+ SharedImageVideoProvider(const SharedImageVideoProvider&) = delete;
+ SharedImageVideoProvider& operator=(const SharedImageVideoProvider&) = delete;
+
virtual ~SharedImageVideoProvider() = default;
using ImageReadyCB = base::OnceCallback<void(ImageRecord)>;
@@ -95,9 +100,6 @@ class MEDIA_GPU_EXPORT SharedImageVideoProvider {
// Call |cb| when we have a shared image that matches |spec|. We may call
// |cb| back before returning, or we might post it for later.
virtual void RequestImage(ImageReadyCB cb, const ImageSpec& spec) = 0;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(SharedImageVideoProvider);
};
} // namespace media
diff --git a/chromium/media/gpu/android/surface_chooser_helper.cc b/chromium/media/gpu/android/surface_chooser_helper.cc
index 68dffb712de..9aa4c12b8fc 100644
--- a/chromium/media/gpu/android/surface_chooser_helper.cc
+++ b/chromium/media/gpu/android/surface_chooser_helper.cc
@@ -25,7 +25,7 @@ enum { kFrameDelayForFullscreenLayout = 15 };
// there's plenty of state that we don't know about (e.g., power efficiency,
// memory pressure => cancelling an old overlay, etc.). We just let the chooser
// retry every once in a while for those things.
-constexpr base::TimeDelta RetryChooserTimeout = base::TimeDelta::FromSeconds(5);
+constexpr base::TimeDelta RetryChooserTimeout = base::Seconds(5);
} // namespace
diff --git a/chromium/media/gpu/android/surface_chooser_helper.h b/chromium/media/gpu/android/surface_chooser_helper.h
index dafa7dbe1e3..0bb4f5021a3 100644
--- a/chromium/media/gpu/android/surface_chooser_helper.h
+++ b/chromium/media/gpu/android/surface_chooser_helper.h
@@ -40,6 +40,10 @@ class MEDIA_GPU_EXPORT SurfaceChooserHelper {
std::unique_ptr<PromotionHintAggregator> promotion_hint_aggregator =
nullptr,
const base::TickClock* tick_clock = nullptr);
+
+ SurfaceChooserHelper(const SurfaceChooserHelper&) = delete;
+ SurfaceChooserHelper& operator=(const SurfaceChooserHelper&) = delete;
+
~SurfaceChooserHelper();
enum class SecureSurfaceMode {
@@ -124,8 +128,6 @@ class MEDIA_GPU_EXPORT SurfaceChooserHelper {
// Since overlay positioning isn't synchronous, it's good to make sure that
// blink isn't moving the quad around too.
int hints_until_clear_relayout_flag_ = 0;
-
- DISALLOW_COPY_AND_ASSIGN(SurfaceChooserHelper);
};
} // namespace media
diff --git a/chromium/media/gpu/android/surface_chooser_helper_unittest.cc b/chromium/media/gpu/android/surface_chooser_helper_unittest.cc
index b01f63232b5..5392de24f29 100644
--- a/chromium/media/gpu/android/surface_chooser_helper_unittest.cc
+++ b/chromium/media/gpu/android/surface_chooser_helper_unittest.cc
@@ -14,7 +14,6 @@
#include "media/gpu/android/mock_promotion_hint_aggregator.h"
#include "testing/gtest/include/gtest/gtest.h"
-using base::TimeDelta;
using testing::_;
using testing::AtLeast;
@@ -36,7 +35,7 @@ class SurfaceChooserHelperTest : public testing::Test {
bool promote_secure_only,
bool always_use_texture_owner = false) {
// Advance the clock so that time 0 isn't recent.
- tick_clock_.Advance(TimeDelta::FromSeconds(10000));
+ tick_clock_.Advance(base::Seconds(10000));
std::unique_ptr<MockAndroidVideoSurfaceChooser> chooser =
std::make_unique<MockAndroidVideoSurfaceChooser>();
@@ -241,7 +240,7 @@ TEST_F(SurfaceChooserHelperTest, PromotionHintsUpdateChooserStatePeriodically) {
helper_->NotifyPromotionHintAndUpdateChooser(hint, false);
// Advancing the time and using an overlay should not send a hint.
- tick_clock_.Advance(base::TimeDelta::FromSeconds(10));
+ tick_clock_.Advance(base::Seconds(10));
EXPECT_CALL(*chooser_, MockUpdateState()).Times(0);
helper_->NotifyPromotionHintAndUpdateChooser(hint, true);
diff --git a/chromium/media/gpu/android/video_frame_factory_impl.cc b/chromium/media/gpu/android/video_frame_factory_impl.cc
index 665acf05831..3e10cdcf1d2 100644
--- a/chromium/media/gpu/android/video_frame_factory_impl.cc
+++ b/chromium/media/gpu/android/video_frame_factory_impl.cc
@@ -42,6 +42,10 @@ absl::optional<VideoFrameMetadata::CopyMode> GetVideoFrameCopyMode(
if (!enable_threaded_texture_mailboxes)
return absl::nullopt;
+ // If we can run thread-safe, we don't need to copy.
+ if (features::NeedThreadSafeAndroidMedia())
+ return absl::nullopt;
+
return features::IsWebViewZeroCopyVideoEnabled()
? VideoFrameMetadata::CopyMode::kCopyMailboxesOnly
: VideoFrameMetadata::CopyMode::kCopyToNewTexture;
diff --git a/chromium/media/gpu/android/video_frame_factory_impl.h b/chromium/media/gpu/android/video_frame_factory_impl.h
index 744cc2a13da..46628e3204d 100644
--- a/chromium/media/gpu/android/video_frame_factory_impl.h
+++ b/chromium/media/gpu/android/video_frame_factory_impl.h
@@ -55,6 +55,10 @@ class MEDIA_GPU_EXPORT VideoFrameFactoryImpl
std::unique_ptr<MaybeRenderEarlyManager> mre_manager,
std::unique_ptr<FrameInfoHelper> frame_info_helper,
scoped_refptr<gpu::RefCountedLock> drdc_lock);
+
+ VideoFrameFactoryImpl(const VideoFrameFactoryImpl&) = delete;
+ VideoFrameFactoryImpl& operator=(const VideoFrameFactoryImpl&) = delete;
+
~VideoFrameFactoryImpl() override;
void Initialize(OverlayMode overlay_mode, InitCB init_cb) override;
@@ -137,8 +141,6 @@ class MEDIA_GPU_EXPORT VideoFrameFactoryImpl
SEQUENCE_CHECKER(sequence_checker_);
base::WeakPtrFactory<VideoFrameFactoryImpl> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(VideoFrameFactoryImpl);
};
} // namespace media
diff --git a/chromium/media/gpu/args.gni b/chromium/media/gpu/args.gni
index 4004937949e..2f53843918d 100644
--- a/chromium/media/gpu/args.gni
+++ b/chromium/media/gpu/args.gni
@@ -3,7 +3,15 @@
# found in the LICENSE file.
import("//build/config/chromeos/ui_mode.gni")
-import("//build/config/ui.gni")
+import("//build/config/ozone.gni")
+
+declare_args() {
+ # Indicates if X11 VA-API-based hardware acceleration is to be used.
+ # See also the comment near the |use_vaapi| arg.
+ use_vaapi_x11 =
+ is_linux && ozone_platform_x11 && !is_chromecast && !is_chromeos_lacros &&
+ (target_cpu == "x86" || target_cpu == "x64")
+}
declare_args() {
# Indicates if V4L plugin is used.
@@ -21,8 +29,7 @@ declare_args() {
# is typically the case on x86-based ChromeOS devices.
# VA-API should also be compiled by default on x11-using linux devices
# using x86/x64.
- use_vaapi =
- is_linux && use_x11 && (target_cpu == "x86" || target_cpu == "x64")
+ use_vaapi = use_vaapi_x11
# Indicates if ChromeOS protected media support exists. This is used
# to enable the CDM daemon in Chrome OS as well as support for
diff --git a/chromium/media/gpu/av1_decoder.cc b/chromium/media/gpu/av1_decoder.cc
index c3e4d142157..cc4eb3edc76 100644
--- a/chromium/media/gpu/av1_decoder.cc
+++ b/chromium/media/gpu/av1_decoder.cc
@@ -378,7 +378,8 @@ AcceleratedVideoDecoder::DecodeResult AV1Decoder::DecodeInternal() {
pic->set_colorspace(container_color_space_);
pic->frame_header = frame_header;
- pic->set_decrypt_config(std::move(decrypt_config_));
+ if (decrypt_config_)
+ pic->set_decrypt_config(decrypt_config_->Clone());
const AV1Accelerator::Status status =
DecodeAndOutputPicture(std::move(pic), parser_->tile_buffers());
if (status == AV1Accelerator::Status::kFail)
diff --git a/chromium/media/gpu/chromeos/BUILD.gn b/chromium/media/gpu/chromeos/BUILD.gn
index 26fc1fb5ea3..5e84665a8f0 100644
--- a/chromium/media/gpu/chromeos/BUILD.gn
+++ b/chromium/media/gpu/chromeos/BUILD.gn
@@ -31,7 +31,10 @@ source_set("chromeos") {
]
if (use_vaapi) {
- deps += [ "//media/gpu/vaapi" ]
+ deps += [
+ "//media/gpu/vaapi",
+ "//media/gpu/vaapi:common",
+ ]
}
if (use_v4l2_codec) {
@@ -77,6 +80,7 @@ source_set("common") {
deps = [
":fourcc",
"//base",
+ "//build:chromeos_buildflags",
"//build/config/linux/libdrm",
"//gpu/ipc/common:common",
"//media",
diff --git a/chromium/media/gpu/chromeos/dmabuf_video_frame_pool.h b/chromium/media/gpu/chromeos/dmabuf_video_frame_pool.h
index aff3c945182..20e1bbc845c 100644
--- a/chromium/media/gpu/chromeos/dmabuf_video_frame_pool.h
+++ b/chromium/media/gpu/chromeos/dmabuf_video_frame_pool.h
@@ -7,6 +7,7 @@
#include "base/memory/scoped_refptr.h"
#include "base/sequenced_task_runner.h"
+#include "media/base/status.h"
#include "media/base/video_frame.h"
#include "media/gpu/chromeos/fourcc.h"
#include "media/gpu/chromeos/gpu_buffer_layout.h"
@@ -40,19 +41,20 @@ class MEDIA_GPU_EXPORT DmabufVideoFramePool {
scoped_refptr<base::SequencedTaskRunner> parent_task_runner);
// Sets the parameters of allocating frames and the maximum number of frames
- // which can be allocated. Returns a valid GpuBufferLayout if VideoFrame
- // will be created by GetFrame().
- virtual absl::optional<GpuBufferLayout> Initialize(
- const Fourcc& fourcc,
- const gfx::Size& coded_size,
- const gfx::Rect& visible_rect,
- const gfx::Size& natural_size,
- size_t max_num_frames,
- bool use_protected) = 0;
+ // which can be allocated.
+ // Returns a valid GpuBufferLayout if the initialization is successful.
+ // Returns StatusCode::kAborted if the initialization process is aborted.
+ // Returns StatusCode::kInvalidArgument if any other error occurs.
+ virtual StatusOr<GpuBufferLayout> Initialize(const Fourcc& fourcc,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ size_t max_num_frames,
+ bool use_protected) = 0;
- // Returns a frame from the pool with the parameters assigned by
- // SetFrameFormat() and zero timestamp. Returns nullptr if the pool is
- // exhausted.
+ // Returns a frame from the pool with the layout that is returned by the
+ // previous Initialize() method and zero timestamp. Returns nullptr if the
+ // pool is exhausted.
virtual scoped_refptr<VideoFrame> GetFrame() = 0;
// Checks whether the pool is exhausted. This happens when the pool reached
@@ -66,6 +68,11 @@ class MEDIA_GPU_EXPORT DmabufVideoFramePool {
// would be dropped immediately.
virtual void NotifyWhenFrameAvailable(base::OnceClosure cb) = 0;
+ // Invoke to cause the pool to release all the frames it has allocated before
+ // which will cause new ones to be allocated. This method must be called on
+ // |parent_task_runner_| because it may invalidate weak ptrs.
+ virtual void ReleaseAllFrames() = 0;
+
protected:
scoped_refptr<base::SequencedTaskRunner> parent_task_runner_;
};
diff --git a/chromium/media/gpu/chromeos/generic_dmabuf_video_frame_mapper.h b/chromium/media/gpu/chromeos/generic_dmabuf_video_frame_mapper.h
index a5c72137517..ed2523242c8 100644
--- a/chromium/media/gpu/chromeos/generic_dmabuf_video_frame_mapper.h
+++ b/chromium/media/gpu/chromeos/generic_dmabuf_video_frame_mapper.h
@@ -17,6 +17,10 @@ class MEDIA_GPU_EXPORT GenericDmaBufVideoFrameMapper : public VideoFrameMapper {
static std::unique_ptr<GenericDmaBufVideoFrameMapper> Create(
VideoPixelFormat format);
+ GenericDmaBufVideoFrameMapper(const GenericDmaBufVideoFrameMapper&) = delete;
+ GenericDmaBufVideoFrameMapper& operator=(
+ const GenericDmaBufVideoFrameMapper&) = delete;
+
~GenericDmaBufVideoFrameMapper() override = default;
// VideoFrameMapper implementation.
scoped_refptr<VideoFrame> Map(
@@ -24,8 +28,6 @@ class MEDIA_GPU_EXPORT GenericDmaBufVideoFrameMapper : public VideoFrameMapper {
private:
explicit GenericDmaBufVideoFrameMapper(VideoPixelFormat format);
-
- DISALLOW_COPY_AND_ASSIGN(GenericDmaBufVideoFrameMapper);
};
} // namespace media
diff --git a/chromium/media/gpu/chromeos/gpu_memory_buffer_video_frame_mapper.h b/chromium/media/gpu/chromeos/gpu_memory_buffer_video_frame_mapper.h
index af64b2c9ebf..82239f5f22c 100644
--- a/chromium/media/gpu/chromeos/gpu_memory_buffer_video_frame_mapper.h
+++ b/chromium/media/gpu/chromeos/gpu_memory_buffer_video_frame_mapper.h
@@ -18,6 +18,11 @@ class MEDIA_GPU_EXPORT GpuMemoryBufferVideoFrameMapper
static std::unique_ptr<GpuMemoryBufferVideoFrameMapper> Create(
VideoPixelFormat format);
+ GpuMemoryBufferVideoFrameMapper(const GpuMemoryBufferVideoFrameMapper&) =
+ delete;
+ GpuMemoryBufferVideoFrameMapper& operator=(
+ const GpuMemoryBufferVideoFrameMapper&) = delete;
+
~GpuMemoryBufferVideoFrameMapper() override = default;
// VideoFrameMapper implementation.
@@ -26,8 +31,6 @@ class MEDIA_GPU_EXPORT GpuMemoryBufferVideoFrameMapper
private:
explicit GpuMemoryBufferVideoFrameMapper(VideoPixelFormat format);
-
- DISALLOW_COPY_AND_ASSIGN(GpuMemoryBufferVideoFrameMapper);
};
} // namespace media
diff --git a/chromium/media/gpu/chromeos/image_processor.cc b/chromium/media/gpu/chromeos/image_processor.cc
index 6c40df02254..873901620b5 100644
--- a/chromium/media/gpu/chromeos/image_processor.cc
+++ b/chromium/media/gpu/chromeos/image_processor.cc
@@ -9,7 +9,6 @@
#include <sstream>
#include "base/bind.h"
-#include "base/callback_helpers.h"
#include "base/memory/ptr_util.h"
#include "base/task/post_task.h"
#include "base/task/task_traits.h"
@@ -39,7 +38,13 @@ bool CheckVideoFrameFormat(const ImageProcessor::PortConfig& config,
return false;
}
- // TODO(b/195351653): Add visible_rect check here.
+ if (frame.visible_rect() != config.visible_rect) {
+ VLOGF(1) << "Invalid frame visible rectangle="
+ << frame.visible_rect().ToString()
+ << ", expected=" << config.visible_rect.ToString();
+ return false;
+ }
+
return true;
}
@@ -98,11 +103,7 @@ ImageProcessor::~ImageProcessor() {
weak_this_factory_.InvalidateWeakPtrs();
// Delete |backend_| on |backend_task_runner_|.
- backend_task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(
- base::DoNothing::Once<std::unique_ptr<ImageProcessorBackend>>(),
- std::move(backend_)));
+ backend_task_runner_->DeleteSoon(FROM_HERE, std::move(backend_));
}
bool ImageProcessor::Process(scoped_refptr<VideoFrame> input_frame,
diff --git a/chromium/media/gpu/chromeos/image_processor_backend.h b/chromium/media/gpu/chromeos/image_processor_backend.h
index 6b0c86f5bc8..8b9b361d114 100644
--- a/chromium/media/gpu/chromeos/image_processor_backend.h
+++ b/chromium/media/gpu/chromeos/image_processor_backend.h
@@ -54,6 +54,13 @@ class MEDIA_GPU_EXPORT ImageProcessorBackend {
const std::vector<VideoFrame::StorageType>& preferred_storage_types);
~PortConfig();
+ bool operator==(const PortConfig& other) const {
+ return fourcc == other.fourcc && size == other.size &&
+ planes == other.planes && visible_rect == other.visible_rect &&
+ preferred_storage_types == other.preferred_storage_types;
+ }
+ bool operator!=(const PortConfig& other) const { return !(*this == other); }
+
// Get the first |preferred_storage_types|.
// If |preferred_storage_types| is empty, return STORAGE_UNKNOWN.
VideoFrame::StorageType storage_type() const {
diff --git a/chromium/media/gpu/chromeos/image_processor_factory.cc b/chromium/media/gpu/chromeos/image_processor_factory.cc
index c39717fa767..27ffcc43188 100644
--- a/chromium/media/gpu/chromeos/image_processor_factory.cc
+++ b/chromium/media/gpu/chromeos/image_processor_factory.cc
@@ -15,6 +15,11 @@
#include "media/gpu/chromeos/libyuv_image_processor_backend.h"
#include "media/gpu/macros.h"
+#if BUILDFLAG(USE_VAAPI)
+#include "media/gpu/vaapi/vaapi_image_processor_backend.h"
+#include "media/gpu/vaapi/vaapi_wrapper.h"
+#endif // BUILDFLAG(USE_VAAPI)
+
#if BUILDFLAG(USE_V4L2_CODEC)
#include "media/gpu/v4l2/v4l2_device.h"
#include "media/gpu/v4l2/v4l2_image_processor_backend.h"
@@ -25,6 +30,55 @@ namespace media {
namespace {
+#if BUILDFLAG(USE_VAAPI)
+std::unique_ptr<ImageProcessor> CreateVaapiImageProcessorWithInputCandidates(
+ const std::vector<std::pair<Fourcc, gfx::Size>>& input_candidates,
+ const gfx::Rect& input_visible_rect,
+ const gfx::Size& output_size,
+ scoped_refptr<base::SequencedTaskRunner> client_task_runner,
+ ImageProcessorFactory::PickFormatCB out_format_picker,
+ ImageProcessor::ErrorCB error_cb) {
+ std::vector<Fourcc> vpp_supported_formats =
+ VaapiWrapper::GetVppSupportedFormats();
+ absl::optional<std::pair<Fourcc, gfx::Size>> chosen_input_candidate;
+ for (const auto& input_candidate : input_candidates) {
+ if (base::Contains(vpp_supported_formats, input_candidate.first) &&
+ VaapiWrapper::IsVppResolutionAllowed(input_candidate.second)) {
+ chosen_input_candidate = input_candidate;
+ break;
+ }
+ }
+ if (!chosen_input_candidate)
+ return nullptr;
+
+ // Note that we pick the first input candidate as the preferred output format.
+ // The reason is that in practice, the VaapiVideoDecoder will make
+ // |input_candidates| either {NV12} or {P010} depending on the bitdepth. So
+ // choosing the first (and only) element will keep the bitdepth of the frame
+ // which is needed to display HDR content.
+ auto chosen_output_format =
+ out_format_picker.Run(/*candidates=*/vpp_supported_formats,
+ /*preferred_fourcc=*/input_candidates[0].first);
+ if (!chosen_output_format)
+ return nullptr;
+
+ // Note: the VaapiImageProcessorBackend doesn't use the ColorPlaneLayouts in
+ // the PortConfigs, so we just pass an empty list of plane layouts.
+ ImageProcessor::PortConfig input_config(
+ /*fourcc=*/chosen_input_candidate->first,
+ /*size=*/chosen_input_candidate->second, /*planes=*/{},
+ input_visible_rect, {VideoFrame::STORAGE_GPU_MEMORY_BUFFER});
+ ImageProcessor::PortConfig output_config(
+ /*fourcc=*/*chosen_output_format, /*size=*/output_size, /*planes=*/{},
+ /*visible_rect=*/gfx::Rect(output_size),
+ {VideoFrame::STORAGE_GPU_MEMORY_BUFFER});
+ return ImageProcessor::Create(
+ base::BindRepeating(&VaapiImageProcessorBackend::Create), input_config,
+ output_config, {ImageProcessor::OutputMode::IMPORT}, VIDEO_ROTATION_0,
+ std::move(error_cb), std::move(client_task_runner));
+}
+#endif // BUILDFLAG(USE_VAAPI)
+
#if BUILDFLAG(USE_V4L2_CODEC)
std::unique_ptr<ImageProcessor> CreateV4L2ImageProcessorWithInputCandidates(
const std::vector<std::pair<Fourcc, gfx::Size>>& input_candidates,
@@ -46,7 +100,8 @@ std::unique_ptr<ImageProcessor> CreateV4L2ImageProcessorWithInputCandidates(
supported_fourccs.push_back(*fourcc);
}
- const auto output_fourcc = out_format_picker.Run(supported_fourccs);
+ const auto output_fourcc = out_format_picker.Run(
+ /*candidates=*/supported_fourccs, /*preferred_fourcc=*/absl::nullopt);
if (!output_fourcc)
return nullptr;
@@ -92,9 +147,9 @@ std::unique_ptr<ImageProcessor> ImageProcessorFactory::Create(
ImageProcessor::ErrorCB error_cb) {
std::vector<ImageProcessor::CreateBackendCB> create_funcs;
#if BUILDFLAG(USE_VAAPI)
- NOTIMPLEMENTED();
-#endif // BUILDFLAG(USE_VAAPI)
-#if BUILDFLAG(USE_V4L2_CODEC)
+ create_funcs.push_back(
+ base::BindRepeating(&VaapiImageProcessorBackend::Create));
+#elif BUILDFLAG(USE_V4L2_CODEC)
create_funcs.push_back(base::BindRepeating(
&V4L2ImageProcessorBackend::Create, V4L2Device::Create(), num_buffers));
#endif // BUILDFLAG(USE_V4L2_CODEC)
@@ -117,21 +172,32 @@ std::unique_ptr<ImageProcessor> ImageProcessorFactory::Create(
std::unique_ptr<ImageProcessor>
ImageProcessorFactory::CreateWithInputCandidates(
const std::vector<std::pair<Fourcc, gfx::Size>>& input_candidates,
- const gfx::Size& visible_size,
+ const gfx::Rect& input_visible_rect,
+ const gfx::Size& output_size,
size_t num_buffers,
scoped_refptr<base::SequencedTaskRunner> client_task_runner,
PickFormatCB out_format_picker,
ImageProcessor::ErrorCB error_cb) {
-#if BUILDFLAG(USE_V4L2_CODEC)
+#if BUILDFLAG(USE_VAAPI)
+ auto processor = CreateVaapiImageProcessorWithInputCandidates(
+ input_candidates, input_visible_rect, output_size, client_task_runner,
+ out_format_picker, error_cb);
+ if (processor)
+ return processor;
+#elif BUILDFLAG(USE_V4L2_CODEC)
+ // TODO(andrescj): we need to pass the |input_visible_rect| along for the V4L2
+ // ImageProcessor.
auto processor = CreateV4L2ImageProcessorWithInputCandidates(
- input_candidates, visible_size, num_buffers, client_task_runner,
+ input_candidates, output_size, num_buffers, client_task_runner,
out_format_picker, error_cb);
if (processor)
return processor;
#endif // BUILDFLAG(USE_V4L2_CODEC)
- // TODO(crbug.com/1004727): Implement LibYUVImageProcessorBackend and
- // VaapiImageProcessorBackend.
+ // TODO(crbug.com/1004727): Implement LibYUVImageProcessorBackend. When doing
+ // so, we must keep in mind that it might not be desirable to fallback to
+ // libyuv if the hardware image processor fails (e.g., in the case of
+ // protected content).
return nullptr;
}
diff --git a/chromium/media/gpu/chromeos/image_processor_factory.h b/chromium/media/gpu/chromeos/image_processor_factory.h
index 986c3a3a752..3d56115a36a 100644
--- a/chromium/media/gpu/chromeos/image_processor_factory.h
+++ b/chromium/media/gpu/chromeos/image_processor_factory.h
@@ -22,9 +22,11 @@ namespace media {
class MEDIA_GPU_EXPORT ImageProcessorFactory {
public:
- // Callback to pick a valid format from given |candidates| formats.
+ // Callback to pick a valid format from the given |candidates| formats giving
+ // preference to |preferred_fourcc| if provided.
using PickFormatCB = base::RepeatingCallback<absl::optional<Fourcc>(
- const std::vector<Fourcc>& /* candidates */)>;
+ const std::vector<Fourcc>& /* candidates */,
+ absl::optional<Fourcc> /* preferred_fourcc */)>;
// Factory method to create ImageProcessor.
// Given input and output PortConfig, it tries to find out the most suitable
@@ -59,14 +61,18 @@ class MEDIA_GPU_EXPORT ImageProcessorFactory {
scoped_refptr<base::SequencedTaskRunner> client_task_runner,
ImageProcessor::ErrorCB error_cb);
- // Factory method to create ImageProcessor.
- // Unlike Create(), caller gives a list of valid input for the
- // ImageProcessor, |candidates|; frame's |input_size|; |out_format_picker| for
- // caller to pick a valid output. With the parameters the factory can
- // instantiate a suitable ImageProcessor if exists.
+ // Factory method to create an ImageProcessor.
+ // Unlike Create(), the caller passes a list of supported inputs,
+ // |input_candidates|. It also passes the |input_visible_rect| and the desired
+ // |output_size|. |out_format_picker| allows us to negotiate the output
+ // format: we'll call it with a list of supported formats and (possibly) a
+ // preferred one and the callback picks one. With the rest of the parameters
+ // the factory can instantiate a suitable ImageProcessor. Returns nullptr if
+ // an ImageProcessor can't be created.
static std::unique_ptr<ImageProcessor> CreateWithInputCandidates(
const std::vector<std::pair<Fourcc, gfx::Size>>& input_candidates,
- const gfx::Size& visible_size,
+ const gfx::Rect& input_visible_rect,
+ const gfx::Size& output_size,
size_t num_buffers,
scoped_refptr<base::SequencedTaskRunner> client_task_runner,
PickFormatCB out_format_picker,
diff --git a/chromium/media/gpu/chromeos/image_processor_test.cc b/chromium/media/gpu/chromeos/image_processor_test.cc
index 193c9206ed1..6269ab34e78 100644
--- a/chromium/media/gpu/chromeos/image_processor_test.cc
+++ b/chromium/media/gpu/chromeos/image_processor_test.cc
@@ -51,11 +51,6 @@ bool g_save_images = false;
media::test::VideoTestEnvironment* g_env;
// Files for pixel format conversion test.
-// TODO(crbug.com/944822): Use kI420Image for I420 -> NV12 test case. It is
-// currently disabled because there is currently no way of creating DMABUF I420
-// buffer by NativePixmap.
-// constexpr const base::FilePath::CharType* kI420Image =
-// FILE_PATH_LITERAL("bear_320x192.i420.yuv");
const base::FilePath::CharType* kNV12Image =
FILE_PATH_LITERAL("bear_320x192.nv12.yuv");
const base::FilePath::CharType* kRGBAImage =
@@ -64,6 +59,12 @@ const base::FilePath::CharType* kBGRAImage =
FILE_PATH_LITERAL("bear_320x192.bgra");
const base::FilePath::CharType* kYV12Image =
FILE_PATH_LITERAL("bear_320x192.yv12.yuv");
+const base::FilePath::CharType* kI420Image =
+ FILE_PATH_LITERAL("bear_320x192.i420.yuv");
+const base::FilePath::CharType* kI422Image =
+ FILE_PATH_LITERAL("bear_320x192.i422.yuv");
+const base::FilePath::CharType* kYUYVImage =
+ FILE_PATH_LITERAL("bear_320x192.yuyv.yuv");
// Files for scaling test.
const base::FilePath::CharType* kNV12Image720P =
@@ -76,6 +77,14 @@ const base::FilePath::CharType* kNV12Image180P =
FILE_PATH_LITERAL("puppets-320x180.nv12.yuv");
const base::FilePath::CharType* kNV12Image360PIn480P =
FILE_PATH_LITERAL("puppets-640x360_in_640x480.nv12.yuv");
+const base::FilePath::CharType* kI422Image360P =
+ FILE_PATH_LITERAL("puppets-640x360.i422.yuv");
+const base::FilePath::CharType* kYUYVImage360P =
+ FILE_PATH_LITERAL("puppets-640x360.yuyv.yuv");
+const base::FilePath::CharType* kI420Image360P =
+ FILE_PATH_LITERAL("puppets-640x360.i420.yuv");
+const base::FilePath::CharType* kI420Image270P =
+ FILE_PATH_LITERAL("puppets-480x270.i420.yuv");
// Files for rotation test.
const base::FilePath::CharType* kNV12Image90 =
@@ -85,6 +94,41 @@ const base::FilePath::CharType* kNV12Image180 =
const base::FilePath::CharType* kNV12Image270 =
FILE_PATH_LITERAL("bear_192x320_270.nv12.yuv");
+enum class YuvSubsampling {
+ kYuv420,
+ kYuv422,
+ kYuv444,
+};
+
+YuvSubsampling ToYuvSubsampling(VideoPixelFormat format) {
+ switch (format) {
+ case PIXEL_FORMAT_I420:
+ case PIXEL_FORMAT_NV12:
+ case PIXEL_FORMAT_YV12:
+ return YuvSubsampling::kYuv420;
+ case PIXEL_FORMAT_I422:
+ case PIXEL_FORMAT_YUY2:
+ return YuvSubsampling::kYuv422;
+ default:
+ NOTREACHED() << "Invalid format " << format;
+ return YuvSubsampling::kYuv444;
+ }
+}
+
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+bool IsFormatTestedForDmabufAndGbm(VideoPixelFormat format) {
+ switch (format) {
+ case PIXEL_FORMAT_ABGR:
+ case PIXEL_FORMAT_ARGB:
+ case PIXEL_FORMAT_NV12:
+ case PIXEL_FORMAT_YV12:
+ return true;
+ default:
+ return false;
+ }
+}
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+
class ImageProcessorParamTest
: public ::testing::Test,
public ::testing::WithParamInterface<
@@ -152,7 +196,9 @@ class ImageProcessorParamTest
// Validating processed frames is currently not supported when a format is
// not YUV or when scaling images.
if (IsYuvPlanar(input_fourcc.ToVideoPixelFormat()) &&
- IsYuvPlanar(output_fourcc.ToVideoPixelFormat())) {
+ IsYuvPlanar(output_fourcc.ToVideoPixelFormat()) &&
+ ToYuvSubsampling(input_fourcc.ToVideoPixelFormat()) ==
+ ToYuvSubsampling(output_fourcc.ToVideoPixelFormat())) {
if (input_image.Size() == output_image->Size()) {
auto vf_validator = test::MD5VideoFrameValidator::Create(
{output_image->Checksum()}, output_image->PixelFormat());
@@ -233,6 +279,8 @@ TEST_P(ImageProcessorParamTest, ConvertOneTime_DmabufToMem) {
test::Image output_image(std::get<1>(GetParam()));
ASSERT_TRUE(input_image.Load());
ASSERT_TRUE(output_image.LoadMetadata());
+ if (!IsFormatTestedForDmabufAndGbm(input_image.PixelFormat()))
+ GTEST_SKIP() << "Skipping Dmabuf format " << input_image.PixelFormat();
auto ip_client = CreateImageProcessorClient(
input_image, {VideoFrame::STORAGE_DMABUFS}, &output_image,
{VideoFrame::STORAGE_OWNED_MEMORY});
@@ -253,6 +301,10 @@ TEST_P(ImageProcessorParamTest, ConvertOneTime_DmabufToDmabuf) {
test::Image output_image(std::get<1>(GetParam()));
ASSERT_TRUE(input_image.Load());
ASSERT_TRUE(output_image.LoadMetadata());
+ if (!IsFormatTestedForDmabufAndGbm(input_image.PixelFormat()))
+ GTEST_SKIP() << "Skipping Dmabuf format " << input_image.PixelFormat();
+ if (!IsFormatTestedForDmabufAndGbm(output_image.PixelFormat()))
+ GTEST_SKIP() << "Skipping Dmabuf format " << output_image.PixelFormat();
auto ip_client =
CreateImageProcessorClient(input_image, {VideoFrame::STORAGE_DMABUFS},
@@ -275,6 +327,14 @@ TEST_P(ImageProcessorParamTest, ConvertOneTime_GmbToGmb) {
test::Image output_image(std::get<1>(GetParam()));
ASSERT_TRUE(input_image.Load());
ASSERT_TRUE(output_image.LoadMetadata());
+ if (!IsFormatTestedForDmabufAndGbm(input_image.PixelFormat())) {
+ GTEST_SKIP() << "Skipping GpuMemoryBuffer format "
+ << input_image.PixelFormat();
+ }
+ if (!IsFormatTestedForDmabufAndGbm(output_image.PixelFormat())) {
+ GTEST_SKIP() << "Skipping GpuMemoryBuffer format "
+ << output_image.PixelFormat();
+ }
auto ip_client = CreateImageProcessorClient(
input_image, {VideoFrame::STORAGE_GPU_MEMORY_BUFFER}, &output_image,
@@ -289,20 +349,22 @@ TEST_P(ImageProcessorParamTest, ConvertOneTime_GmbToGmb) {
}
#endif // BUILDFLAG(IS_CHROMEOS_ASH)
-// BGRA -> NV12
-// I420 -> NV12
-// RGBA -> NV12
-// YV12 -> NV12
INSTANTIATE_TEST_SUITE_P(
PixelFormatConversionToNV12,
ImageProcessorParamTest,
::testing::Values(std::make_tuple(kBGRAImage, kNV12Image),
- // TODO(crbug.com/944822): Add I420 -> NV12 test case.
- // There is currently no way of creating DMABUF
- // I420 buffer by NativePixmap.
- // std::make_tuple(kI420Image, kNV12Image),
+ std::make_tuple(kI420Image, kNV12Image),
std::make_tuple(kRGBAImage, kNV12Image),
- std::make_tuple(kYV12Image, kNV12Image)));
+ std::make_tuple(kYV12Image, kNV12Image),
+ std::make_tuple(kI422Image, kNV12Image),
+ std::make_tuple(kYUYVImage, kNV12Image)));
+
+INSTANTIATE_TEST_SUITE_P(
+ PixelFormatConversionToI420,
+ ImageProcessorParamTest,
+ ::testing::Values(std::make_tuple(kI420Image, kI420Image),
+ std::make_tuple(kI422Image, kI420Image),
+ std::make_tuple(kYUYVImage, kI420Image)));
INSTANTIATE_TEST_SUITE_P(
NV12DownScaling,
@@ -313,6 +375,24 @@ INSTANTIATE_TEST_SUITE_P(
std::make_tuple(kNV12Image360P, kNV12Image270P),
std::make_tuple(kNV12Image360P, kNV12Image180P)));
+INSTANTIATE_TEST_SUITE_P(I420DownScaling,
+ ImageProcessorParamTest,
+ ::testing::Values(std::make_tuple(kI420Image360P,
+ kI420Image270P)));
+
+INSTANTIATE_TEST_SUITE_P(
+ DownScalingConversionToNV12,
+ ImageProcessorParamTest,
+ ::testing::Values(std::make_tuple(kI422Image360P, kNV12Image270P),
+ std::make_tuple(kYUYVImage360P, kNV12Image270P)));
+
+INSTANTIATE_TEST_SUITE_P(
+ DownScalingConversionToI420,
+ ImageProcessorParamTest,
+ ::testing::Values(std::make_tuple(kI420Image360P, kI420Image270P),
+ std::make_tuple(kI422Image360P, kI420Image270P),
+ std::make_tuple(kYUYVImage360P, kI420Image270P)));
+
// Crop 360P frame from 480P.
INSTANTIATE_TEST_SUITE_P(NV12Cropping,
ImageProcessorParamTest,
diff --git a/chromium/media/gpu/chromeos/image_processor_with_pool.cc b/chromium/media/gpu/chromeos/image_processor_with_pool.cc
index bbd9c64b168..e90c359fcd0 100644
--- a/chromium/media/gpu/chromeos/image_processor_with_pool.cc
+++ b/chromium/media/gpu/chromeos/image_processor_with_pool.cc
@@ -12,20 +12,27 @@
namespace media {
// static
-std::unique_ptr<ImageProcessorWithPool> ImageProcessorWithPool::Create(
+StatusOr<std::unique_ptr<ImageProcessorWithPool>>
+ImageProcessorWithPool::Create(
std::unique_ptr<ImageProcessor> image_processor,
DmabufVideoFramePool* const frame_pool,
size_t num_frames,
+ bool use_protected,
const scoped_refptr<base::SequencedTaskRunner> task_runner) {
const ImageProcessor::PortConfig& config = image_processor->output_config();
- absl::optional<GpuBufferLayout> layout = frame_pool->Initialize(
- config.fourcc, config.size, config.visible_rect, config.size, num_frames,
- /*use_protected=*/false);
- if (!layout || layout->size() != config.size) {
+ StatusOr<GpuBufferLayout> status_or_layout =
+ frame_pool->Initialize(config.fourcc, config.size, config.visible_rect,
+ config.size, num_frames, use_protected);
+ if (status_or_layout.has_error()) {
+ VLOGF(1) << "Failed to initialize the pool.";
+ return std::move(status_or_layout).error();
+ }
+
+ const GpuBufferLayout layout = std::move(status_or_layout).value();
+ if (layout.size() != config.size) {
VLOGF(1) << "Failed to request frame with correct size. "
- << config.size.ToString() << " != "
- << (layout ? layout->size().ToString() : gfx::Size().ToString());
- return nullptr;
+ << config.size.ToString() << " != " << layout.size().ToString();
+ return Status(StatusCode::kInvalidArgument);
}
return base::WrapUnique<ImageProcessorWithPool>(new ImageProcessorWithPool(
diff --git a/chromium/media/gpu/chromeos/image_processor_with_pool.h b/chromium/media/gpu/chromeos/image_processor_with_pool.h
index b60419f36bd..8cd1309b45c 100644
--- a/chromium/media/gpu/chromeos/image_processor_with_pool.h
+++ b/chromium/media/gpu/chromeos/image_processor_with_pool.h
@@ -25,12 +25,16 @@ class ImageProcessorWithPool {
public:
using FrameReadyCB = ImageProcessor::FrameReadyCB;
- // Create ImageProcessorWithPool instance. |num_frames| is the number of
- // frames requested from |frame_pool|.
- static std::unique_ptr<ImageProcessorWithPool> Create(
+ // Initializes |frame_pool| and creates an ImageProcessorWithPool instance.
+ // |num_frames| is the number of frames requested from |frame_pool|.
+ // Returns a valid ImageProcessorWithPool instance if successful.
+ // Returns StatusCode::kAborted if the initialization is aborted.
+ // Returns StatusCode::kInvalidArgument if any other error occurs.
+ static StatusOr<std::unique_ptr<ImageProcessorWithPool>> Create(
std::unique_ptr<ImageProcessor> image_processor,
DmabufVideoFramePool* const frame_pool,
size_t num_frames,
+ bool use_protected,
const scoped_refptr<base::SequencedTaskRunner> task_runner);
~ImageProcessorWithPool();
diff --git a/chromium/media/gpu/chromeos/libyuv_image_processor_backend.cc b/chromium/media/gpu/chromeos/libyuv_image_processor_backend.cc
index 76a5cf5eed3..80cb1f178d6 100644
--- a/chromium/media/gpu/chromeos/libyuv_image_processor_backend.cc
+++ b/chromium/media/gpu/chromeos/libyuv_image_processor_backend.cc
@@ -28,37 +28,43 @@ int NV12Rotate(uint8_t* tmp_buffer,
int src_stride_y,
const uint8_t* src_uv,
int src_stride_uv,
- int src_width,
- int src_height,
uint8_t* dst_y,
int dst_stride_y,
uint8_t* dst_uv,
int dst_stride_uv,
- int dst_width,
- int dst_height,
+ int width,
+ int height,
VideoRotation relative_rotation) {
libyuv::RotationModeEnum rotation = libyuv::kRotate0;
+ int tmp_width = width;
+ int tmp_height = height;
switch (relative_rotation) {
case VIDEO_ROTATION_0:
NOTREACHED() << "Unexpected rotation: " << rotation;
return -1;
case VIDEO_ROTATION_90:
rotation = libyuv::kRotate90;
+ tmp_width = height;
+ tmp_height = width;
break;
case VIDEO_ROTATION_180:
rotation = libyuv::kRotate180;
+ tmp_width = width;
+ tmp_height = height;
break;
case VIDEO_ROTATION_270:
rotation = libyuv::kRotate270;
+ tmp_width = height;
+ tmp_height = width;
break;
}
// Rotating.
int tmp_uv_width = 0;
int tmp_uv_height = 0;
- if (!(base::CheckAdd<int>(dst_width, 1) / 2).AssignIfValid(&tmp_uv_width) ||
- !(base::CheckAdd<int>(dst_height, 1) / 2).AssignIfValid(&tmp_uv_height)) {
- VLOGF(1) << "Overflow occurred for " << dst_width << "x" << dst_height;
+ if (!(base::CheckAdd<int>(tmp_width, 1) / 2).AssignIfValid(&tmp_uv_width) ||
+ !(base::CheckAdd<int>(tmp_height, 1) / 2).AssignIfValid(&tmp_uv_height)) {
+ VLOGF(1) << "Overflow occurred for " << tmp_width << "x" << tmp_height;
return -1;
}
uint8_t* const tmp_u = tmp_buffer;
@@ -67,7 +73,7 @@ int NV12Rotate(uint8_t* tmp_buffer,
// Rotate the NV12 planes to I420.
int ret = libyuv::NV12ToI420Rotate(
src_y, src_stride_y, src_uv, src_stride_uv, dst_y, dst_stride_y, tmp_u,
- tmp_uv_width, tmp_v, tmp_uv_width, src_width, src_height, rotation);
+ tmp_uv_width, tmp_v, tmp_uv_width, width, height, rotation);
if (ret != 0)
return ret;
@@ -79,24 +85,50 @@ int NV12Rotate(uint8_t* tmp_buffer,
enum class SupportResult {
Supported,
- SupportedWithPivot,
+ SupportedWithI420Pivot,
+ SupportedWithNV12Pivot,
Unsupported,
};
-SupportResult IsFormatSupported(Fourcc input_fourcc, Fourcc output_fourcc) {
+enum class Transform {
+ kConversion,
+ kScaling,
+ kRotation,
+};
+
+SupportResult IsConversionSupported(Fourcc input_fourcc,
+ Fourcc output_fourcc,
+ Transform transform) {
static constexpr struct {
uint32_t input;
uint32_t output;
- bool need_pivot;
+ Transform transform;
+ SupportResult support_result;
} kSupportFormatConversionArray[] = {
+#define CONV(in, out, trans, result) \
+ {Fourcc::in, Fourcc::out, Transform::trans, SupportResult::result}
// Conversion.
- {Fourcc::AR24, Fourcc::NV12, false},
- {Fourcc::YU12, Fourcc::NV12, false},
- {Fourcc::YV12, Fourcc::NV12, false},
- {Fourcc::AB24, Fourcc::NV12, true},
- {Fourcc::XB24, Fourcc::NV12, true},
- // Scaling or Rotating.
- {Fourcc::NV12, Fourcc::NV12, true},
+ CONV(AB24, NV12, kConversion, SupportedWithI420Pivot),
+ CONV(AR24, NV12, kConversion, Supported),
+ CONV(NV12, NV12, kConversion, Supported),
+ CONV(XB24, NV12, kConversion, SupportedWithI420Pivot),
+ CONV(YM16, NV12, kConversion, Supported),
+ CONV(YM16, YU12, kConversion, Supported),
+ CONV(YU12, NV12, kConversion, Supported),
+ CONV(YU12, YU12, kConversion, Supported),
+ CONV(YUYV, NV12, kConversion, Supported),
+ CONV(YUYV, YU12, kConversion, Supported),
+ CONV(YV12, NV12, kConversion, Supported),
+ // Scaling.
+ CONV(NV12, NV12, kScaling, Supported),
+ CONV(YM16, NV12, kScaling, SupportedWithNV12Pivot),
+ CONV(YM16, YU12, kScaling, SupportedWithI420Pivot),
+ CONV(YU12, YU12, kScaling, Supported),
+ CONV(YUYV, NV12, kScaling, SupportedWithNV12Pivot),
+ CONV(YUYV, YU12, kScaling, SupportedWithI420Pivot),
+ // Rotating.
+ CONV(NV12, NV12, kRotation, SupportedWithI420Pivot),
+#undef CONV
};
const auto single_input_fourcc = input_fourcc.ToSinglePlanar();
@@ -117,9 +149,9 @@ SupportResult IsFormatSupported(Fourcc input_fourcc, Fourcc output_fourcc) {
continue;
if (single_input_fourcc == single_conv_input_fourcc &&
- single_output_fourcc == single_conv_output_fourcc) {
- return conv.need_pivot ? SupportResult::SupportedWithPivot
- : SupportResult::Supported;
+ single_output_fourcc == single_conv_output_fourcc &&
+ transform == conv.transform) {
+ return conv.support_result;
}
}
@@ -191,23 +223,11 @@ std::unique_ptr<ImageProcessorBackend> LibYUVImageProcessorBackend::Create(
return nullptr;
}
- SupportResult res =
- IsFormatSupported(input_config.fourcc, output_config.fourcc);
- if (res == SupportResult::Unsupported) {
- VLOGF(2) << "Conversion from " << input_config.fourcc.ToString() << " to "
- << output_config.fourcc.ToString() << " is not supported";
- return nullptr;
- }
-
+ const gfx::Size& input_size = input_config.visible_rect.size();
+ const gfx::Size& output_size = output_config.visible_rect.size();
+ Transform transform = Transform::kConversion;
if (relative_rotation != VIDEO_ROTATION_0) {
- if (input_config.fourcc.ToVideoPixelFormat() != PIXEL_FORMAT_NV12 ||
- output_config.fourcc.ToVideoPixelFormat() != PIXEL_FORMAT_NV12) {
- VLOGF(2) << "Rotation is supported for NV12->NV12 only";
- return nullptr;
- }
-
- const gfx::Size& input_size = input_config.visible_rect.size();
- const gfx::Size& output_size = output_config.visible_rect.size();
+ transform = Transform::kRotation;
bool size_mismatch = false;
if (relative_rotation == VIDEO_ROTATION_180) {
size_mismatch = input_size.width() != output_size.width() ||
@@ -222,6 +242,18 @@ std::unique_ptr<ImageProcessorBackend> LibYUVImageProcessorBackend::Create(
<< ", output=" << output_size.ToString();
return nullptr;
}
+ } else if (input_size.width() != output_size.width() ||
+ input_size.height() != output_size.height()) {
+ transform = Transform::kScaling;
+ }
+ SupportResult res = IsConversionSupported(input_config.fourcc,
+ output_config.fourcc, transform);
+ if (res == SupportResult::Unsupported) {
+ VLOGF(2) << "Conversion from " << input_size.ToString() << "/"
+ << input_config.fourcc.ToString() << " to "
+ << output_size.ToString() << "/" << output_config.fourcc.ToString()
+ << " with rotation " << relative_rotation << " is not supported";
+ return nullptr;
}
if (input_config.fourcc.ToVideoPixelFormat() ==
@@ -235,9 +267,12 @@ std::unique_ptr<ImageProcessorBackend> LibYUVImageProcessorBackend::Create(
}
scoped_refptr<VideoFrame> intermediate_frame;
- if (res == SupportResult::SupportedWithPivot) {
+ if (res == SupportResult::SupportedWithI420Pivot ||
+ res == SupportResult::SupportedWithNV12Pivot) {
intermediate_frame = VideoFrame::CreateFrame(
- PIXEL_FORMAT_I420, input_config.visible_rect.size(),
+ res == SupportResult::SupportedWithI420Pivot ? PIXEL_FORMAT_I420
+ : PIXEL_FORMAT_NV12,
+ input_config.visible_rect.size(),
gfx::Rect(input_config.visible_rect.size()),
input_config.visible_rect.size(), base::TimeDelta());
if (!intermediate_frame) {
@@ -346,6 +381,9 @@ int LibYUVImageProcessorBackend::DoConversion(const VideoFrame* const input,
fr->visible_data(VideoFrame::kYPlane), fr->stride(VideoFrame::kYPlane), \
fr->visible_data(VideoFrame::kUVPlane), fr->stride(VideoFrame::kUVPlane)
+#define YUY2_DATA(fr) \
+ fr->visible_data(VideoFrame::kYPlane), fr->stride(VideoFrame::kYPlane)
+
#define RGB_DATA(fr) \
fr->visible_data(VideoFrame::kARGBPlane), fr->stride(VideoFrame::kARGBPlane)
@@ -369,6 +407,7 @@ int LibYUVImageProcessorBackend::DoConversion(const VideoFrame* const input,
// There is no libyuv function to convert to RGBA to NV12. Therefore, we
// convert RGBA to I420 tentatively and thereafter convert the tentative
// one to NV12.
+ DCHECK_EQ(intermediate_frame_->format(), PIXEL_FORMAT_I420);
int ret = LIBYUV_FUNC(ABGRToI420, RGB_DATA(input),
Y_U_V_DATA(intermediate_frame_));
if (ret != 0)
@@ -385,11 +424,9 @@ int LibYUVImageProcessorBackend::DoConversion(const VideoFrame* const input,
// temporary U and V planes for I420 data. Although
// |intermediate_frame_->data(0)| is much larger than the required
// size, we use the frame to simplify the code.
- return NV12Rotate(
- intermediate_frame_->data(0), Y_UV_DATA(input),
- input->visible_rect().width(), input->visible_rect().height(),
- Y_UV_DATA(output), output->visible_rect().width(),
- output->visible_rect().height(), relative_rotation_);
+ return NV12Rotate(intermediate_frame_->data(0), Y_UV_DATA(input),
+ Y_UV_DATA(output), input->visible_rect().width(),
+ input->visible_rect().height(), relative_rotation_);
}
// Scaling mode.
return libyuv::NV12Scale(
@@ -397,6 +434,93 @@ int LibYUVImageProcessorBackend::DoConversion(const VideoFrame* const input,
input->visible_rect().height(), Y_UV_DATA(output),
output->visible_rect().width(), output->visible_rect().height(),
libyuv::kFilterBilinear);
+
+ case PIXEL_FORMAT_YUY2:
+ if (input->visible_rect().size() == output->visible_rect().size()) {
+ return LIBYUV_FUNC(YUY2ToNV12, YUY2_DATA(input), Y_UV_DATA(output));
+ } else {
+ DCHECK_EQ(intermediate_frame_->format(), PIXEL_FORMAT_NV12);
+ int ret = libyuv::YUY2ToNV12(
+ YUY2_DATA(input), Y_UV_DATA(intermediate_frame_),
+ intermediate_frame_->visible_rect().width(),
+ intermediate_frame_->visible_rect().height());
+ if (ret != 0)
+ return ret;
+ return libyuv::NV12Scale(
+ Y_UV_DATA(intermediate_frame_),
+ intermediate_frame_->visible_rect().width(),
+ intermediate_frame_->visible_rect().height(), Y_UV_DATA(output),
+ output->visible_rect().width(), output->visible_rect().height(),
+ libyuv::kFilterBilinear);
+ }
+ case PIXEL_FORMAT_I422:
+ if (input->visible_rect().size() == output->visible_rect().size()) {
+ return LIBYUV_FUNC(I422ToNV21, Y_V_U_DATA(input), Y_UV_DATA(output));
+ } else {
+ DCHECK_EQ(intermediate_frame_->format(), PIXEL_FORMAT_NV12);
+ int ret = libyuv::I422ToNV21(
+ Y_V_U_DATA(input), Y_UV_DATA(intermediate_frame_),
+ intermediate_frame_->visible_rect().width(),
+ intermediate_frame_->visible_rect().height());
+ if (ret != 0)
+ return ret;
+ return libyuv::NV12Scale(
+ Y_UV_DATA(intermediate_frame_),
+ intermediate_frame_->visible_rect().width(),
+ intermediate_frame_->visible_rect().height(), Y_UV_DATA(output),
+ output->visible_rect().width(), output->visible_rect().height(),
+ libyuv::kFilterBilinear);
+ }
+ default:
+ VLOGF(1) << "Unexpected input format: " << input->format();
+ return -1;
+ }
+ }
+
+ if (output->format() == PIXEL_FORMAT_I420) {
+ switch (input->format()) {
+ case PIXEL_FORMAT_I420:
+ return libyuv::I420Scale(
+ Y_U_V_DATA(input), input->visible_rect().width(),
+ input->visible_rect().height(), Y_U_V_DATA(output),
+ output->visible_rect().width(), output->visible_rect().height(),
+ libyuv::kFilterBilinear);
+ case PIXEL_FORMAT_YUY2:
+ if (input->visible_rect().size() == output->visible_rect().size()) {
+ return LIBYUV_FUNC(YUY2ToI420, YUY2_DATA(input), Y_U_V_DATA(output));
+ } else {
+ DCHECK_EQ(intermediate_frame_->format(), PIXEL_FORMAT_I420);
+ int ret = libyuv::YUY2ToI420(
+ YUY2_DATA(input), Y_U_V_DATA(intermediate_frame_),
+ intermediate_frame_->visible_rect().width(),
+ intermediate_frame_->visible_rect().height());
+ if (ret != 0)
+ return ret;
+ return libyuv::I420Scale(
+ Y_U_V_DATA(intermediate_frame_),
+ intermediate_frame_->visible_rect().width(),
+ intermediate_frame_->visible_rect().height(), Y_U_V_DATA(output),
+ output->visible_rect().width(), output->visible_rect().height(),
+ libyuv::kFilterBilinear);
+ }
+ case PIXEL_FORMAT_I422:
+ if (input->visible_rect().size() == output->visible_rect().size()) {
+ return LIBYUV_FUNC(I422ToI420, Y_U_V_DATA(input), Y_U_V_DATA(output));
+ } else {
+ DCHECK_EQ(intermediate_frame_->format(), PIXEL_FORMAT_I420);
+ int ret = libyuv::I422ToI420(
+ Y_U_V_DATA(input), Y_U_V_DATA(intermediate_frame_),
+ intermediate_frame_->visible_rect().width(),
+ intermediate_frame_->visible_rect().height());
+ if (ret != 0)
+ return ret;
+ return libyuv::I420Scale(
+ Y_U_V_DATA(intermediate_frame_),
+ intermediate_frame_->visible_rect().width(),
+ intermediate_frame_->visible_rect().height(), Y_U_V_DATA(output),
+ output->visible_rect().width(), output->visible_rect().height(),
+ libyuv::kFilterBilinear);
+ }
default:
VLOGF(1) << "Unexpected input format: " << input->format();
return -1;
diff --git a/chromium/media/gpu/chromeos/mailbox_video_frame_converter.cc b/chromium/media/gpu/chromeos/mailbox_video_frame_converter.cc
index 6da0229a5bf..0b755c2df93 100644
--- a/chromium/media/gpu/chromeos/mailbox_video_frame_converter.cc
+++ b/chromium/media/gpu/chromeos/mailbox_video_frame_converter.cc
@@ -5,7 +5,6 @@
#include "media/gpu/chromeos/mailbox_video_frame_converter.h"
#include "base/bind.h"
-#include "base/callback_helpers.h"
#include "base/containers/contains.h"
#include "base/location.h"
#include "base/memory/ptr_util.h"
@@ -43,6 +42,10 @@ class MailboxVideoFrameConverter::ScopedSharedImage {
ScopedSharedImage(scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner)
: destruction_task_runner_(std::move(gpu_task_runner)) {}
+
+ ScopedSharedImage(const ScopedSharedImage&) = delete;
+ ScopedSharedImage& operator=(const ScopedSharedImage&) = delete;
+
~ScopedSharedImage() { Destroy(); }
void Reset(const gpu::Mailbox& mailbox,
@@ -76,8 +79,6 @@ class MailboxVideoFrameConverter::ScopedSharedImage {
gfx::Size size_;
DestroySharedImageCB destroy_shared_image_cb_;
const scoped_refptr<base::SequencedTaskRunner> destruction_task_runner_;
-
- DISALLOW_COPY_AND_ASSIGN(ScopedSharedImage);
};
// static
@@ -430,8 +431,8 @@ void MailboxVideoFrameConverter::WaitOnSyncTokenAndReleaseFrameOnGPUThread(
gpu::SharedImageStub* shared_image_stub = gpu_channel_->shared_image_stub();
DCHECK(shared_image_stub);
- auto keep_video_frame_alive = base::BindOnce(
- base::DoNothing::Once<scoped_refptr<VideoFrame>>(), std::move(frame));
+ auto keep_video_frame_alive =
+ base::BindOnce([](scoped_refptr<VideoFrame>) {}, std::move(frame));
auto* scheduler = gpu_channel_->scheduler();
DCHECK(scheduler);
scheduler->ScheduleTask(gpu::Scheduler::Task(
diff --git a/chromium/media/gpu/chromeos/mailbox_video_frame_converter_unittest.cc b/chromium/media/gpu/chromeos/mailbox_video_frame_converter_unittest.cc
index 2545f7de11c..bb4a1791a09 100644
--- a/chromium/media/gpu/chromeos/mailbox_video_frame_converter_unittest.cc
+++ b/chromium/media/gpu/chromeos/mailbox_video_frame_converter_unittest.cc
@@ -33,6 +33,12 @@ class MailboxVideoFrameConverterTest : public testing::Test {
base::BindRepeating(&UnwrapVideoFrame),
base::ThreadTaskRunnerHandle::Get(),
base::BindRepeating(&GetGpuChannel))) {}
+
+ MailboxVideoFrameConverterTest(const MailboxVideoFrameConverterTest&) =
+ delete;
+ MailboxVideoFrameConverterTest& operator=(
+ const MailboxVideoFrameConverterTest&) = delete;
+
~MailboxVideoFrameConverterTest() override = default;
void TearDown() override {
@@ -46,8 +52,6 @@ class MailboxVideoFrameConverterTest : public testing::Test {
base::test::TaskEnvironment task_environment_;
std::unique_ptr<VideoFrameConverter> converter_;
-
- DISALLOW_COPY_AND_ASSIGN(MailboxVideoFrameConverterTest);
};
TEST_F(MailboxVideoFrameConverterTest, Initialize) {
diff --git a/chromium/media/gpu/chromeos/platform_video_frame_pool.cc b/chromium/media/gpu/chromeos/platform_video_frame_pool.cc
index db9a3086fe2..4a7b7166909 100644
--- a/chromium/media/gpu/chromeos/platform_video_frame_pool.cc
+++ b/chromium/media/gpu/chromeos/platform_video_frame_pool.cc
@@ -138,7 +138,7 @@ scoped_refptr<VideoFrame> PlatformVideoFramePool::GetFrame() {
return wrapped_frame;
}
-absl::optional<GpuBufferLayout> PlatformVideoFramePool::Initialize(
+StatusOr<GpuBufferLayout> PlatformVideoFramePool::Initialize(
const Fourcc& fourcc,
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
@@ -152,13 +152,13 @@ absl::optional<GpuBufferLayout> PlatformVideoFramePool::Initialize(
VideoPixelFormat format = fourcc.ToVideoPixelFormat();
if (format == PIXEL_FORMAT_UNKNOWN) {
VLOGF(1) << "Unsupported fourcc: " << fourcc.ToString();
- return absl::nullopt;
+ return Status(StatusCode::kInvalidArgument);
}
#if !BUILDFLAG(USE_CHROMEOS_PROTECTED_MEDIA)
if (use_protected) {
VLOGF(1) << "Protected buffers unsupported";
- return absl::nullopt;
+ return Status(StatusCode::kInvalidArgument);
}
#endif
@@ -186,13 +186,19 @@ absl::optional<GpuBufferLayout> PlatformVideoFramePool::Initialize(
if (!frame) {
VLOGF(1) << "Failed to create video frame " << format << " (fourcc "
<< fourcc.ToString() << ")";
- return absl::nullopt;
+ return Status(StatusCode::kInvalidArgument);
}
frame_layout_ = GpuBufferLayout::Create(fourcc, frame->coded_size(),
frame->layout().planes(),
frame->layout().modifier());
+ if (!frame_layout_) {
+ VLOGF(1) << "Failed to create the layout (fourcc=" << fourcc.ToString()
+ << ", coded_size=" << frame->coded_size().ToString() << ")";
+ return Status(StatusCode::kInvalidArgument);
+ }
}
+ DCHECK(frame_layout_);
visible_rect_ = visible_rect;
natural_size_ = natural_size;
max_num_frames_ = max_num_frames;
@@ -203,7 +209,7 @@ absl::optional<GpuBufferLayout> PlatformVideoFramePool::Initialize(
if (frame_available_cb_ && !IsExhausted_Locked())
std::move(frame_available_cb_).Run();
- return frame_layout_;
+ return *frame_layout_;
}
bool PlatformVideoFramePool::IsExhausted() {
@@ -241,6 +247,16 @@ void PlatformVideoFramePool::NotifyWhenFrameAvailable(base::OnceClosure cb) {
frame_available_cb_ = std::move(cb);
}
+void PlatformVideoFramePool::ReleaseAllFrames() {
+ DCHECK(parent_task_runner_->RunsTasksInCurrentSequence());
+ DVLOGF(4);
+ base::AutoLock auto_lock(lock_);
+ free_frames_.clear();
+ frames_in_use_.clear();
+ weak_this_factory_.InvalidateWeakPtrs();
+ weak_this_ = weak_this_factory_.GetWeakPtr();
+}
+
// static
void PlatformVideoFramePool::OnFrameReleasedThunk(
absl::optional<base::WeakPtr<PlatformVideoFramePool>> pool,
diff --git a/chromium/media/gpu/chromeos/platform_video_frame_pool.h b/chromium/media/gpu/chromeos/platform_video_frame_pool.h
index 5e1391aa1a1..f5b3530b74a 100644
--- a/chromium/media/gpu/chromeos/platform_video_frame_pool.h
+++ b/chromium/media/gpu/chromeos/platform_video_frame_pool.h
@@ -42,21 +42,26 @@ class MEDIA_GPU_EXPORT PlatformVideoFramePool : public DmabufVideoFramePool {
public:
explicit PlatformVideoFramePool(
gpu::GpuMemoryBufferFactory* gpu_memory_buffer_factory);
+
+ PlatformVideoFramePool(const PlatformVideoFramePool&) = delete;
+ PlatformVideoFramePool& operator=(const PlatformVideoFramePool&) = delete;
+
~PlatformVideoFramePool() override;
// Returns the ID of the GpuMemoryBuffer wrapped by |frame|.
static gfx::GpuMemoryBufferId GetGpuMemoryBufferId(const VideoFrame& frame);
// DmabufVideoFramePool implementation.
- absl::optional<GpuBufferLayout> Initialize(const Fourcc& fourcc,
- const gfx::Size& coded_size,
- const gfx::Rect& visible_rect,
- const gfx::Size& natural_size,
- size_t max_num_frames,
- bool use_protected) override;
+ StatusOr<GpuBufferLayout> Initialize(const Fourcc& fourcc,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ size_t max_num_frames,
+ bool use_protected) override;
scoped_refptr<VideoFrame> GetFrame() override;
bool IsExhausted() override;
void NotifyWhenFrameAvailable(base::OnceClosure cb) override;
+ void ReleaseAllFrames() override;
// Returns the original frame of a wrapped frame. We need this method to
// determine whether the frame returned by GetFrame() is the same one after
@@ -141,8 +146,6 @@ class MEDIA_GPU_EXPORT PlatformVideoFramePool : public DmabufVideoFramePool {
// Used at the VideoFrame destruction callback.
base::WeakPtr<PlatformVideoFramePool> weak_this_;
base::WeakPtrFactory<PlatformVideoFramePool> weak_this_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(PlatformVideoFramePool);
};
} // namespace media
diff --git a/chromium/media/gpu/chromeos/platform_video_frame_pool_unittest.cc b/chromium/media/gpu/chromeos/platform_video_frame_pool_unittest.cc
index d6d5dc24854..e5cf8efbce1 100644
--- a/chromium/media/gpu/chromeos/platform_video_frame_pool_unittest.cc
+++ b/chromium/media/gpu/chromeos/platform_video_frame_pool_unittest.cc
@@ -68,15 +68,19 @@ class PlatformVideoFramePoolTest
constexpr size_t kNumFrames = 10;
visible_rect_ = visible_rect;
natural_size_ = visible_rect.size();
- layout_ = pool_->Initialize(fourcc, coded_size, visible_rect_,
- natural_size_, kNumFrames,
- /*use_protected=*/false);
- return !!layout_;
+ auto status_or_layout = pool_->Initialize(fourcc, coded_size, visible_rect_,
+ natural_size_, kNumFrames,
+ /*use_protected=*/false);
+ if (status_or_layout.has_error()) {
+ return false;
+ }
+ layout_ = std::move(status_or_layout).value();
+ return true;
}
scoped_refptr<VideoFrame> GetFrame(int timestamp_ms) {
scoped_refptr<VideoFrame> frame = pool_->GetFrame();
- frame->set_timestamp(base::TimeDelta::FromMilliseconds(timestamp_ms));
+ frame->set_timestamp(base::Milliseconds(timestamp_ms));
EXPECT_EQ(layout_->modifier(), frame->layout().modifier());
EXPECT_EQ(layout_->fourcc(),
diff --git a/chromium/media/gpu/chromeos/platform_video_frame_utils.cc b/chromium/media/gpu/chromeos/platform_video_frame_utils.cc
index 7d79759066c..b38b2e21b87 100644
--- a/chromium/media/gpu/chromeos/platform_video_frame_utils.cc
+++ b/chromium/media/gpu/chromeos/platform_video_frame_utils.cc
@@ -42,6 +42,14 @@ namespace media {
namespace {
+gfx::GpuMemoryBufferId GetNextGpuMemoryBufferId() {
+ static base::NoDestructor<base::Lock> id_lock;
+ static int next_gpu_memory_buffer_id = 0;
+ base::AutoLock lock(*id_lock);
+ CHECK_LT(next_gpu_memory_buffer_id, std::numeric_limits<int>::max());
+ return gfx::GpuMemoryBufferId(next_gpu_memory_buffer_id++);
+}
+
// GbmDeviceWrapper is a singleton that provides thread-safe access to a
// ui::GbmDevice for the purposes of creating native BOs. The ui::GbmDevice is
// initialized with the first non-vgem render node found that works starting at
@@ -82,13 +90,9 @@ class GbmDeviceWrapper {
if (native_pixmap_handle.planes.empty())
return gfx::GpuMemoryBufferHandle();
- CHECK_LT(next_gpu_memory_buffer_id_, std::numeric_limits<int>::max());
- const gfx::GpuMemoryBufferId gpu_memory_buffer_id(
- next_gpu_memory_buffer_id_++);
-
gfx::GpuMemoryBufferHandle gmb_handle;
gmb_handle.type = gfx::GpuMemoryBufferType::NATIVE_PIXMAP;
- gmb_handle.id = gpu_memory_buffer_id;
+ gmb_handle.id = GetNextGpuMemoryBufferId();
gmb_handle.native_pixmap_handle = std::move(native_pixmap_handle);
return gmb_handle;
}
@@ -128,7 +132,6 @@ class GbmDeviceWrapper {
base::Lock lock_;
base::File render_node_file_ GUARDED_BY(lock_);
std::unique_ptr<ui::GbmDevice> gbm_device_ GUARDED_BY(lock_);
- int next_gpu_memory_buffer_id_ GUARDED_BY(lock_) = 0;
};
gfx::GpuMemoryBufferHandle AllocateGpuMemoryBufferHandle(
@@ -138,8 +141,6 @@ gfx::GpuMemoryBufferHandle AllocateGpuMemoryBufferHandle(
const gfx::Rect& visible_rect,
gfx::BufferUsage buffer_usage,
base::ScopedClosureRunner& destroy_cb) {
- DCHECK(factory ||
- buffer_usage == gfx::BufferUsage::VEA_READ_CAMERA_AND_CPU_READ_WRITE);
gfx::GpuMemoryBufferHandle gmb_handle;
auto buffer_format = VideoPixelFormatToGfxBufferFormat(pixel_format);
if (!buffer_format)
@@ -150,17 +151,8 @@ gfx::GpuMemoryBufferHandle AllocateGpuMemoryBufferHandle(
*buffer_format, coded_size, buffer_usage);
}
- int gpu_memory_buffer_id;
- {
- static base::NoDestructor<base::Lock> id_lock;
- static int next_gpu_memory_buffer_id = 0;
- base::AutoLock lock(*id_lock);
- CHECK_LT(next_gpu_memory_buffer_id, std::numeric_limits<int>::max());
- gpu_memory_buffer_id = next_gpu_memory_buffer_id++;
- }
-
gmb_handle = factory->CreateGpuMemoryBuffer(
- gfx::GpuMemoryBufferId(gpu_memory_buffer_id), coded_size,
+ GetNextGpuMemoryBufferId(), coded_size,
/*framebuffer_size=*/GetRectSizeFromOrigin(visible_rect), *buffer_format,
buffer_usage, gpu::kPlatformVideoFramePoolClientId,
gfx::kNullAcceleratedWidget);
diff --git a/chromium/media/gpu/chromeos/platform_video_frame_utils_unittest.cc b/chromium/media/gpu/chromeos/platform_video_frame_utils_unittest.cc
index 32aae281a41..f525ad9bfa1 100644
--- a/chromium/media/gpu/chromeos/platform_video_frame_utils_unittest.cc
+++ b/chromium/media/gpu/chromeos/platform_video_frame_utils_unittest.cc
@@ -164,7 +164,7 @@ TEST(PlatformVideoFrameUtilsTest, CreateVideoFrame) {
constexpr gfx::Size kCodedSize(320, 240);
constexpr gfx::Rect kVisibleRect(kCodedSize);
constexpr gfx::Size kNaturalSize(kCodedSize);
- constexpr auto kTimeStamp = base::TimeDelta::FromMilliseconds(1234);
+ constexpr auto kTimeStamp = base::Milliseconds(1234);
constexpr gfx::BufferUsage kBufferUsage =
gfx::BufferUsage::VEA_READ_CAMERA_AND_CPU_READ_WRITE;
diff --git a/chromium/media/gpu/chromeos/vd_video_decode_accelerator.cc b/chromium/media/gpu/chromeos/vd_video_decode_accelerator.cc
index 32311997c2c..981e0114dbb 100644
--- a/chromium/media/gpu/chromeos/vd_video_decode_accelerator.cc
+++ b/chromium/media/gpu/chromeos/vd_video_decode_accelerator.cc
@@ -31,7 +31,7 @@ namespace {
// timestamp field. These two functions are used for converting between
// bitstream ID and fake timestamp.
base::TimeDelta BitstreamIdToFakeTimestamp(int32_t bitstream_id) {
- return base::TimeDelta::FromMilliseconds(bitstream_id);
+ return base::Milliseconds(bitstream_id);
}
int32_t FakeTimestampToBitstreamId(base::TimeDelta timestamp) {
diff --git a/chromium/media/gpu/chromeos/vda_video_frame_pool.cc b/chromium/media/gpu/chromeos/vda_video_frame_pool.cc
index 5145817422f..20a38ac4762 100644
--- a/chromium/media/gpu/chromeos/vda_video_frame_pool.cc
+++ b/chromium/media/gpu/chromeos/vda_video_frame_pool.cc
@@ -29,7 +29,7 @@ VdaVideoFramePool::~VdaVideoFramePool() {
weak_this_factory_.InvalidateWeakPtrs();
}
-absl::optional<GpuBufferLayout> VdaVideoFramePool::Initialize(
+StatusOr<GpuBufferLayout> VdaVideoFramePool::Initialize(
const Fourcc& fourcc,
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
@@ -41,16 +41,16 @@ absl::optional<GpuBufferLayout> VdaVideoFramePool::Initialize(
if (use_protected) {
LOG(ERROR) << "Cannot allocated protected buffers for VDA";
- return absl::nullopt;
+ return Status(StatusCode::kInvalidArgument);
}
visible_rect_ = visible_rect;
natural_size_ = natural_size;
- if (max_num_frames_ == max_num_frames && fourcc_ && *fourcc_ == fourcc &&
- coded_size_ == coded_size) {
+ if (layout_ && max_num_frames_ == max_num_frames && fourcc_ &&
+ *fourcc_ == fourcc && coded_size_ == coded_size) {
DVLOGF(3) << "Arguments related to frame layout are not changed, skip.";
- return layout_;
+ return *layout_;
}
// Invalidate weak pointers so the re-import callbacks of the frames we are
@@ -84,7 +84,9 @@ absl::optional<GpuBufferLayout> VdaVideoFramePool::Initialize(
parent_task_runner_, weak_this_)));
done.Wait();
- return layout_;
+ if (!layout_)
+ return Status(StatusCode::kInvalidArgument);
+ return *layout_;
}
void VdaVideoFramePool::OnRequestFramesDone(
@@ -169,6 +171,12 @@ void VdaVideoFramePool::NotifyWhenFrameAvailable(base::OnceClosure cb) {
CallFrameAvailableCbIfNeeded();
}
+void VdaVideoFramePool::ReleaseAllFrames() {
+ // TODO(jkardatzke): Implement this when we do protected content on Android
+ // for Intel platforms.
+ NOTREACHED();
+}
+
void VdaVideoFramePool::CallFrameAvailableCbIfNeeded() {
DVLOGF(4);
DCHECK_CALLED_ON_VALID_SEQUENCE(parent_sequence_checker_);
diff --git a/chromium/media/gpu/chromeos/vda_video_frame_pool.h b/chromium/media/gpu/chromeos/vda_video_frame_pool.h
index d18d4967a9c..7bc0f131712 100644
--- a/chromium/media/gpu/chromeos/vda_video_frame_pool.h
+++ b/chromium/media/gpu/chromeos/vda_video_frame_pool.h
@@ -60,15 +60,16 @@ class VdaVideoFramePool : public DmabufVideoFramePool {
~VdaVideoFramePool() override;
// DmabufVideoFramePool implementation.
- absl::optional<GpuBufferLayout> Initialize(const Fourcc& fourcc,
- const gfx::Size& coded_size,
- const gfx::Rect& visible_rect,
- const gfx::Size& natural_size,
- size_t max_num_frames,
- bool use_protected) override;
+ StatusOr<GpuBufferLayout> Initialize(const Fourcc& fourcc,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ size_t max_num_frames,
+ bool use_protected) override;
scoped_refptr<VideoFrame> GetFrame() override;
bool IsExhausted() override;
void NotifyWhenFrameAvailable(base::OnceClosure cb) override;
+ void ReleaseAllFrames() override;
private:
// Update the layout of the buffers. |vda_| calls this as
diff --git a/chromium/media/gpu/chromeos/video_decoder_pipeline.cc b/chromium/media/gpu/chromeos/video_decoder_pipeline.cc
index 71f315c41c5..0792e851ecd 100644
--- a/chromium/media/gpu/chromeos/video_decoder_pipeline.cc
+++ b/chromium/media/gpu/chromeos/video_decoder_pipeline.cc
@@ -46,11 +46,23 @@ constexpr size_t kNumFramesForImageProcessor = limits::kMaxVideoFrames + 1;
constexpr Fourcc::Value kPreferredRenderableFourccs[] = {
Fourcc::NV12,
Fourcc::YV12,
+ Fourcc::P010,
};
// Picks the preferred compositor renderable format from |candidates|, if any.
+// If |preferred_fourcc| is provided, contained in |candidates|, and considered
+// renderable, it returns that. Otherwise, it goes through
+// |kPreferredRenderableFourccs| until it finds one that's in |candidates|. If
+// it can't find a renderable format in |candidates|, it returns absl::nullopt.
absl::optional<Fourcc> PickRenderableFourcc(
- const std::vector<Fourcc>& candidates) {
+ const std::vector<Fourcc>& candidates,
+ absl::optional<Fourcc> preferred_fourcc) {
+ if (preferred_fourcc && base::Contains(candidates, *preferred_fourcc)) {
+ for (const auto value : kPreferredRenderableFourccs) {
+ if (Fourcc(value) == *preferred_fourcc)
+ return preferred_fourcc;
+ }
+ }
for (const auto value : kPreferredRenderableFourccs) {
if (base::Contains(candidates, Fourcc(value)))
return Fourcc(value);
@@ -266,8 +278,8 @@ void VideoDecoderPipeline::Initialize(const VideoDecoderConfig& config,
}
#endif // !BUILDFLAG(USE_CHROMEOS_PROTECTED_MEDIA)
- needs_bitstream_conversion_ =
- (config.codec() == kCodecH264) || (config.codec() == kCodecHEVC);
+ needs_bitstream_conversion_ = (config.codec() == VideoCodec::kH264) ||
+ (config.codec() == VideoCodec::kHEVC);
decoder_task_runner_->PostTask(
FROM_HERE,
@@ -386,6 +398,14 @@ void VideoDecoderPipeline::OnResetDone(base::OnceClosure reset_cb) {
CallFlushCbIfNeeded(DecodeStatus::ABORTED);
+ if (need_frame_pool_rebuild_) {
+ need_frame_pool_rebuild_ = false;
+ if (main_frame_pool_)
+ main_frame_pool_->ReleaseAllFrames();
+ if (auxiliary_frame_pool_)
+ auxiliary_frame_pool_->ReleaseAllFrames();
+ }
+
client_task_runner_->PostTask(FROM_HERE, std::move(reset_cb));
}
@@ -504,6 +524,9 @@ void VideoDecoderPipeline::OnFrameConverted(scoped_refptr<VideoFrame> frame) {
void VideoDecoderPipeline::OnDecoderWaiting(WaitingReason reason) {
DVLOGF(3);
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
+ if (reason == media::WaitingReason::kDecoderStateLost)
+ need_frame_pool_rebuild_ = true;
+
client_task_runner_->PostTask(FROM_HERE, base::BindOnce(waiting_cb_, reason));
}
@@ -566,44 +589,87 @@ DmabufVideoFramePool* VideoDecoderPipeline::GetVideoFramePool() const {
DVLOGF(3);
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
- // |main_frame_pool_| is used by |image_processor_| in this case.
- // |decoder_| will output native buffer allocated by itself.
- // (e.g. V4L2 MMAP buffer in V4L2 API and VA surface in VA API.)
+ // TODO(andrescj): consider returning a WeakPtr instead. That way, if callers
+ // store the returned pointer, they know that they should check it's valid
+ // because the video frame pool can change across resolution changes if we go
+ // from using an image processor to not using one (or viceversa).
if (image_processor_)
- return nullptr;
+ return auxiliary_frame_pool_.get();
return main_frame_pool_.get();
}
-absl::optional<std::pair<Fourcc, gfx::Size>>
+StatusOr<std::pair<Fourcc, gfx::Size>>
VideoDecoderPipeline::PickDecoderOutputFormat(
const std::vector<std::pair<Fourcc, gfx::Size>>& candidates,
- const gfx::Rect& visible_rect) {
+ const gfx::Rect& decoder_visible_rect,
+ const gfx::Size& decoder_natural_size,
+ absl::optional<gfx::Size> output_size,
+ size_t num_of_pictures,
+ bool use_protected,
+ bool need_aux_frame_pool) {
DVLOGF(3);
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
if (candidates.empty())
- return absl::nullopt;
+ return Status(StatusCode::kInvalidArgument);
+ auxiliary_frame_pool_.reset();
image_processor_.reset();
- // Check if any of the |candidates| formats is directly renderable.
- for (const auto preferred_fourcc : kPreferredRenderableFourccs) {
- for (const auto& candidate : candidates) {
- if (candidate.first == Fourcc(preferred_fourcc))
- return candidate;
+ // As long as we're not scaling, check if any of the |candidates| formats is
+ // directly renderable.
+ if (!output_size || *output_size == decoder_visible_rect.size()) {
+ for (const auto preferred_fourcc : kPreferredRenderableFourccs) {
+ for (const auto& candidate : candidates) {
+ if (candidate.first == Fourcc(preferred_fourcc)) {
+ StatusOr<GpuBufferLayout> status_or_layout =
+ main_frame_pool_->Initialize(
+ candidate.first, candidate.second, decoder_visible_rect,
+ decoder_natural_size, num_of_pictures, use_protected);
+ return status_or_layout.has_error()
+ ? StatusOr<std::pair<Fourcc, gfx::Size>>(
+ std::move(status_or_layout).error())
+ : StatusOr<std::pair<Fourcc, gfx::Size>>(candidate);
+ }
+ }
}
}
std::unique_ptr<ImageProcessor> image_processor =
ImageProcessorFactory::CreateWithInputCandidates(
- candidates, visible_rect.size(), kNumFramesForImageProcessor,
- decoder_task_runner_, base::BindRepeating(&PickRenderableFourcc),
+ candidates, /*input_visible_rect=*/decoder_visible_rect,
+ output_size ? *output_size : decoder_visible_rect.size(),
+ kNumFramesForImageProcessor, decoder_task_runner_,
+ base::BindRepeating(&PickRenderableFourcc),
BindToCurrentLoop(base::BindRepeating(&VideoDecoderPipeline::OnError,
decoder_weak_this_,
"ImageProcessor error")));
if (!image_processor) {
DVLOGF(2) << "Unable to find ImageProcessor to convert format";
- return absl::nullopt;
+ return Status(StatusCode::kInvalidArgument);
+ }
+
+ if (need_aux_frame_pool) {
+ // Initialize the auxiliary frame pool with the input format of the image
+ // processor. Note that we pass nullptr as the GpuMemoryBufferFactory. That
+ // way, the pool will allocate buffers using minigbm directly instead of
+ // going through Ozone which means it won't create DRM/KMS framebuffers for
+ // those buffers. This is good because these buffers don't end up as
+ // overlays anyway.
+ auxiliary_frame_pool_ = std::make_unique<PlatformVideoFramePool>(
+ /*gpu_memory_buffer_factory=*/nullptr);
+ auxiliary_frame_pool_->set_parent_task_runner(decoder_task_runner_);
+ StatusOr<GpuBufferLayout> status_or_layout =
+ auxiliary_frame_pool_->Initialize(
+ image_processor->input_config().fourcc,
+ image_processor->input_config().size, decoder_visible_rect,
+ decoder_natural_size, num_of_pictures, use_protected);
+ if (status_or_layout.has_error()) {
+ // A PlatformVideoFramePool should never abort initialization.
+ DCHECK_NE(status_or_layout.code(), StatusCode::kAborted);
+ DVLOGF(2) << "Could not initialize the auxiliary frame pool";
+ return Status(StatusCode::kInvalidArgument);
+ }
}
// Note that fourcc is specified in ImageProcessor's factory method.
@@ -611,14 +677,15 @@ VideoDecoderPipeline::PickDecoderOutputFormat(
auto size = image_processor->input_config().size;
// Setup new pipeline.
- image_processor_ = ImageProcessorWithPool::Create(
+ auto status_or_image_processor = ImageProcessorWithPool::Create(
std::move(image_processor), main_frame_pool_.get(),
- kNumFramesForImageProcessor, decoder_task_runner_);
- if (!image_processor_) {
+ kNumFramesForImageProcessor, use_protected, decoder_task_runner_);
+ if (status_or_image_processor.has_error()) {
DVLOGF(2) << "Unable to create ImageProcessorWithPool.";
- return absl::nullopt;
+ return std::move(status_or_image_processor).error();
}
+ image_processor_ = std::move(status_or_image_processor).value();
return std::make_pair(fourcc, size);
}
diff --git a/chromium/media/gpu/chromeos/video_decoder_pipeline.h b/chromium/media/gpu/chromeos/video_decoder_pipeline.h
index a9e88b58dce..b42f9162fe9 100644
--- a/chromium/media/gpu/chromeos/video_decoder_pipeline.h
+++ b/chromium/media/gpu/chromeos/video_decoder_pipeline.h
@@ -11,6 +11,7 @@
#include "base/memory/weak_ptr.h"
#include "base/sequence_checker.h"
#include "build/build_config.h"
+#include "build/chromeos_buildflags.h"
#include "media/base/cdm_context.h"
#include "media/base/supported_video_decoder_config.h"
#include "media/base/video_decoder.h"
@@ -49,8 +50,10 @@ class MEDIA_GPU_EXPORT VideoDecoderMixin : public VideoDecoder {
Client() = default;
virtual ~Client() = default;
- // Get the video frame pool without passing the ownership. Return nullptr if
- // the decoder is responsible for allocating its own frames.
+ // Returns the video frame pool without giving up ownership or nullptr if
+ // the decoder is responsible for allocating its own frames. Note that
+ // callers may not assume that the returned pointer is valid after a call to
+ // PickDecoderOutputFormat().
virtual DmabufVideoFramePool* GetVideoFramePool() const = 0;
// After this method is called from |decoder_|, the client needs to call
@@ -58,20 +61,43 @@ class MEDIA_GPU_EXPORT VideoDecoderMixin : public VideoDecoder {
// flushed.
virtual void PrepareChangeResolution() = 0;
- // Return a valid format and size for |decoder_| output from given
- // |candidates| and the visible rect. The size might be modified from the
- // ones provided originally to accommodate the needs of the pipeline.
- // Return absl::nullopt if no valid format is found.
- virtual absl::optional<std::pair<Fourcc, gfx::Size>>
- PickDecoderOutputFormat(
+ // Negotiates the output format and size of the decoder: if not scaling
+ // (i.e., the size of |decoder_visible_rect| is equal to |output_size|), it
+ // selects a renderable format out of |candidates| and initializes the main
+ // video frame pool with the selected format and the given arguments. If
+ // scaling or none of the |candidates| are considered renderable, this
+ // method attempts to initialize an image processor to reconcile the formats
+ // and/or perform scaling. |need_aux_frame_pool| indicates whether the
+ // caller needs a frame pool in the event that an image processor is needed:
+ // if true, a new pool is initialized and that pool can be obtained by
+ // calling GetVideoFramePool(). This pool will provide buffers consistent
+ // with the selected candidate out of |candidates|. If false, the caller
+ // must allocate its own buffers.
+ //
+ // This method returns StatusCode::kAborted if the initialization of a frame
+ // pool is aborted. On any other failure, it returns
+ // StatusCode::kInvalidArgument.
+ //
+ // Note: after a call to this method, callers should assume that a pointer
+ // returned by a prior call to GetVideoFramePool() is no longer valid.
+ virtual StatusOr<std::pair<Fourcc, gfx::Size>> PickDecoderOutputFormat(
const std::vector<std::pair<Fourcc, gfx::Size>>& candidates,
- const gfx::Rect& visible_rect) = 0;
+ const gfx::Rect& decoder_visible_rect,
+ const gfx::Size& decoder_natural_size,
+ absl::optional<gfx::Size> output_size,
+ size_t num_of_pictures,
+ bool use_protected,
+ bool need_aux_frame_pool) = 0;
};
VideoDecoderMixin(
std::unique_ptr<MediaLog> media_log,
scoped_refptr<base::SequencedTaskRunner> decoder_task_runner,
base::WeakPtr<VideoDecoderMixin::Client> client);
+
+ VideoDecoderMixin(const VideoDecoderMixin&) = delete;
+ VideoDecoderMixin& operator=(const VideoDecoderMixin&) = delete;
+
~VideoDecoderMixin() override;
// After DecoderInterface calls |prepare_change_resolution_cb| passed
@@ -93,8 +119,6 @@ class MEDIA_GPU_EXPORT VideoDecoderMixin : public VideoDecoder {
// The WeakPtr client instance, bound to |decoder_task_runner_|.
base::WeakPtr<VideoDecoderMixin::Client> client_;
-
- DISALLOW_COPY_AND_ASSIGN(VideoDecoderMixin);
};
class MEDIA_GPU_EXPORT VideoDecoderPipeline : public VideoDecoder,
@@ -138,12 +162,14 @@ class MEDIA_GPU_EXPORT VideoDecoderPipeline : public VideoDecoder,
// VideoDecoderMixin::Client implementation.
DmabufVideoFramePool* GetVideoFramePool() const override;
void PrepareChangeResolution() override;
- // After picking a format, it instantiates an |image_processor_| if none of
- // format in |candidates| is renderable and an ImageProcessor can convert a
- // candidate to renderable format.
- absl::optional<std::pair<Fourcc, gfx::Size>> PickDecoderOutputFormat(
+ StatusOr<std::pair<Fourcc, gfx::Size>> PickDecoderOutputFormat(
const std::vector<std::pair<Fourcc, gfx::Size>>& candidates,
- const gfx::Rect& visible_rect) override;
+ const gfx::Rect& decoder_visible_rect,
+ const gfx::Size& decoder_natural_size,
+ absl::optional<gfx::Size> output_size,
+ size_t num_of_pictures,
+ bool use_protected,
+ bool need_aux_frame_pool) override;
private:
friend class VideoDecoderPipelineTest;
@@ -212,6 +238,12 @@ class MEDIA_GPU_EXPORT VideoDecoderPipeline : public VideoDecoder,
// |decoder_task_runner_|.
std::unique_ptr<DmabufVideoFramePool> main_frame_pool_;
+ // When an image processor is needed, |auxiliary_frame_pool_| is the pool of
+ // output buffers for the |decoder_| (which will serve as the input buffers
+ // for the image processor) and |main_frame_pool_| will be the pool of output
+ // buffers for the image processor.
+ std::unique_ptr<DmabufVideoFramePool> auxiliary_frame_pool_;
+
// The image processor is only created when the decoder cannot output frames
// with renderable format.
std::unique_ptr<ImageProcessorWithPool> image_processor_
@@ -258,6 +290,11 @@ class MEDIA_GPU_EXPORT VideoDecoderPipeline : public VideoDecoder,
// Set to true when any unexpected error occurs.
bool has_error_ GUARDED_BY_CONTEXT(decoder_sequence_checker_) = false;
+ // Set to true when we need to tell the frame pool to rebuild itself. This is
+ // needed for protected content on Intel platforms.
+ bool need_frame_pool_rebuild_ GUARDED_BY_CONTEXT(decoder_sequence_checker_) =
+ false;
+
// Set to true to bypass checks for encrypted content support for testing.
bool allow_encrypted_content_for_testing_ = false;
diff --git a/chromium/media/gpu/chromeos/video_decoder_pipeline_unittest.cc b/chromium/media/gpu/chromeos/video_decoder_pipeline_unittest.cc
index e67004c548c..2f39beb99db 100644
--- a/chromium/media/gpu/chromeos/video_decoder_pipeline_unittest.cc
+++ b/chromium/media/gpu/chromeos/video_decoder_pipeline_unittest.cc
@@ -47,15 +47,16 @@ class MockVideoFramePool : public DmabufVideoFramePool {
// DmabufVideoFramePool implementation.
MOCK_METHOD6(Initialize,
- absl::optional<GpuBufferLayout>(const Fourcc&,
- const gfx::Size&,
- const gfx::Rect&,
- const gfx::Size&,
- size_t,
- bool));
+ StatusOr<GpuBufferLayout>(const Fourcc&,
+ const gfx::Size&,
+ const gfx::Rect&,
+ const gfx::Size&,
+ size_t,
+ bool));
MOCK_METHOD0(GetFrame, scoped_refptr<VideoFrame>());
MOCK_METHOD0(IsExhausted, bool());
MOCK_METHOD1(NotifyWhenFrameAvailable, void(base::OnceClosure));
+ MOCK_METHOD0(ReleaseAllFrames, void());
};
constexpr gfx::Size kCodedSize(48, 36);
@@ -127,7 +128,7 @@ class VideoDecoderPipelineTest
: public testing::TestWithParam<DecoderPipelineTestParams> {
public:
VideoDecoderPipelineTest()
- : config_(kCodecVP8,
+ : config_(VideoCodec::kVP8,
VP8PROFILE_ANY,
VideoDecoderConfig::AlphaMode::kIsOpaque,
VideoColorSpace(),
@@ -137,15 +138,14 @@ class VideoDecoderPipelineTest
kCodedSize,
EmptyExtraData(),
EncryptionScheme::kUnencrypted),
- pool_(new MockVideoFramePool),
- converter_(new VideoFrameConverter),
- decoder_(new VideoDecoderPipeline(
- base::ThreadTaskRunnerHandle::Get(),
- std::move(pool_),
- std::move(converter_),
- std::make_unique<MockMediaLog>(),
- // This callback needs to be configured in the individual tests.
- base::BindOnce(&VideoDecoderPipelineTest::CreateNullMockDecoder))) {
+ converter_(new VideoFrameConverter) {
+ auto pool = std::make_unique<MockVideoFramePool>();
+ pool_ = pool.get();
+ decoder_ = base::WrapUnique(new VideoDecoderPipeline(
+ base::ThreadTaskRunnerHandle::Get(), std::move(pool),
+ std::move(converter_), std::make_unique<MockMediaLog>(),
+ // This callback needs to be configured in the individual tests.
+ base::BindOnce(&VideoDecoderPipelineTest::CreateNullMockDecoder)));
}
~VideoDecoderPipelineTest() override = default;
@@ -280,6 +280,12 @@ class VideoDecoderPipelineTest
DETACH_FROM_SEQUENCE(decoder_->decoder_sequence_checker_);
}
+ void InvokeWaitingCB(WaitingReason reason) {
+ decoder_->decoder_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&VideoDecoderPipeline::OnDecoderWaiting,
+ base::Unretained(decoder_.get()), reason));
+ }
+
base::test::TaskEnvironment task_environment_;
const VideoDecoderConfig config_;
@@ -291,9 +297,9 @@ class VideoDecoderPipelineTest
scoped_refptr<DecoderBuffer> transcrypted_buffer_;
media::CallbackRegistry<CdmContext::EventCB::RunType> event_callbacks_;
#endif // BUILDFLAG(IS_CHROMEOS_ASH)
- std::unique_ptr<MockVideoFramePool> pool_;
std::unique_ptr<VideoFrameConverter> converter_;
std::unique_ptr<VideoDecoderPipeline> decoder_;
+ MockVideoFramePool* pool_;
};
// Verifies the status code for several typical CreateDecoderFunctionCB cases.
@@ -559,6 +565,7 @@ TEST_F(VideoDecoderPipelineTest, TranscryptError) {
TEST_F(VideoDecoderPipelineTest, PickDecoderOutputFormat) {
constexpr gfx::Size kSize(320, 240);
constexpr gfx::Rect kVisibleRect(320, 240);
+ constexpr size_t kMaxNumOfFrames = 4u;
const struct {
std::vector<std::pair<Fourcc, gfx::Size>> input_candidates;
@@ -569,6 +576,8 @@ TEST_F(VideoDecoderPipelineTest, PickDecoderOutputFormat) {
std::pair<Fourcc, gfx::Size>(Fourcc::NV12, kSize)},
{{std::pair<Fourcc, gfx::Size>(Fourcc::YV12, kSize)},
std::pair<Fourcc, gfx::Size>(Fourcc::YV12, kSize)},
+ {{std::pair<Fourcc, gfx::Size>(Fourcc::P010, kSize)},
+ std::pair<Fourcc, gfx::Size>(Fourcc::P010, kSize)},
// Two candidates, both supported: pick as per implementation.
{{std::pair<Fourcc, gfx::Size>(Fourcc::NV12, kSize),
std::pair<Fourcc, gfx::Size>(Fourcc::YV12, kSize)},
@@ -576,23 +585,72 @@ TEST_F(VideoDecoderPipelineTest, PickDecoderOutputFormat) {
{{std::pair<Fourcc, gfx::Size>(Fourcc::YV12, kSize),
std::pair<Fourcc, gfx::Size>(Fourcc::NV12, kSize)},
std::pair<Fourcc, gfx::Size>(Fourcc::NV12, kSize)},
+ {{std::pair<Fourcc, gfx::Size>(Fourcc::NV12, kSize),
+ std::pair<Fourcc, gfx::Size>(Fourcc::P010, kSize)},
+ std::pair<Fourcc, gfx::Size>(Fourcc::NV12, kSize)},
// Two candidates, only one supported, the supported one should be picked.
- {{std::pair<Fourcc, gfx::Size>(Fourcc::P010, kSize),
+ {{std::pair<Fourcc, gfx::Size>(Fourcc::YU16, kSize),
std::pair<Fourcc, gfx::Size>(Fourcc::NV12, kSize)},
std::pair<Fourcc, gfx::Size>(Fourcc::NV12, kSize)},
- {{std::pair<Fourcc, gfx::Size>(Fourcc::P010, kSize),
+ {{std::pair<Fourcc, gfx::Size>(Fourcc::YU16, kSize),
std::pair<Fourcc, gfx::Size>(Fourcc::YV12, kSize)},
- std::pair<Fourcc, gfx::Size>(Fourcc::YV12, kSize)}};
+ std::pair<Fourcc, gfx::Size>(Fourcc::YV12, kSize)},
+ {{std::pair<Fourcc, gfx::Size>(Fourcc::YU16, kSize),
+ std::pair<Fourcc, gfx::Size>(Fourcc::P010, kSize)},
+ std::pair<Fourcc, gfx::Size>(Fourcc::P010, kSize)}};
for (const auto& test_vector : test_vectors) {
- const auto chosen_candidate = decoder_->PickDecoderOutputFormat(
- test_vector.input_candidates, kVisibleRect);
+ const Fourcc& expected_fourcc = test_vector.expected_chosen_candidate.first;
+ const gfx::Size& expected_coded_size =
+ test_vector.expected_chosen_candidate.second;
+ std::vector<ColorPlaneLayout> planes(
+ VideoFrame::NumPlanes(expected_fourcc.ToVideoPixelFormat()));
+ EXPECT_CALL(*pool_,
+ Initialize(expected_fourcc, expected_coded_size, kVisibleRect,
+ /*natural_size=*/kVisibleRect.size(),
+ kMaxNumOfFrames, /*use_protected=*/false))
+ .WillOnce(Return(
+ *GpuBufferLayout::Create(expected_fourcc, expected_coded_size,
+ std::move(planes), /*modifier=*/0u)));
+ auto status_or_chosen_candidate = decoder_->PickDecoderOutputFormat(
+ test_vector.input_candidates, kVisibleRect,
+ /*decoder_natural_size=*/kVisibleRect.size(),
+ /*output_size=*/absl::nullopt, /*num_of_pictures=*/kMaxNumOfFrames,
+ /*use_protected=*/false, /*need_aux_frame_pool=*/false);
+ ASSERT_TRUE(status_or_chosen_candidate.has_value());
+ const auto chosen_candidate = std::move(status_or_chosen_candidate).value();
EXPECT_EQ(test_vector.expected_chosen_candidate, chosen_candidate)
<< " expected: "
<< test_vector.expected_chosen_candidate.first.ToString()
- << ", actual: " << chosen_candidate->first.ToString();
+ << ", actual: " << chosen_candidate.first.ToString();
+ testing::Mock::VerifyAndClearExpectations(pool_);
}
DetachDecoderSequenceChecker();
}
+// Verifies that ReleaseAllFrames is called on the frame pool when we receive
+// the kDecoderStateLost event through the waiting callback. This can occur
+// during protected content playback on Intel.
+TEST_F(VideoDecoderPipelineTest, RebuildFramePoolsOnStateLost) {
+ InitializeDecoder(
+ base::BindOnce(&VideoDecoderPipelineTest::CreateGoodMockDecoder),
+ StatusCode::kOk);
+
+ // Simulate the waiting callback from the decoder for kDecoderStateLost.
+ EXPECT_CALL(*this, OnWaiting(media::WaitingReason::kDecoderStateLost));
+ InvokeWaitingCB(media::WaitingReason::kDecoderStateLost);
+ task_environment_.RunUntilIdle();
+
+ // Invoke Reset() as a client would do, and we then expect that to invoke the
+ // method to rebuild the frame pool.
+ EXPECT_CALL(*reinterpret_cast<MockDecoder*>(GetUnderlyingDecoder()), Reset(_))
+ .WillOnce(::testing::WithArgs<0>(
+ [](base::OnceClosure closure) { std::move(closure).Run(); }));
+ EXPECT_CALL(*this, OnResetDone());
+ EXPECT_CALL(*pool_, ReleaseAllFrames());
+
+ decoder_->Reset(base::BindOnce(&VideoDecoderPipelineTest::OnResetDone,
+ base::Unretained(this)));
+ task_environment_.RunUntilIdle();
+}
} // namespace media
diff --git a/chromium/media/gpu/decode_surface_handler.h b/chromium/media/gpu/decode_surface_handler.h
index 35a7586406a..76f953d4b57 100644
--- a/chromium/media/gpu/decode_surface_handler.h
+++ b/chromium/media/gpu/decode_surface_handler.h
@@ -20,46 +20,25 @@ template <class T>
class DecodeSurfaceHandler {
public:
DecodeSurfaceHandler() = default;
- virtual ~DecodeSurfaceHandler() = default;
-
- // Returns a T for decoding into and for output, if available, or nullptr.
- virtual scoped_refptr<T> CreateSurface() = 0;
- // Used by implementations that scale the video between decode and output. In
- // those cases, the CreateSurface() call will be used for allocating the
- // output surfaces and CreateDecodeSurface() will be used for decoding
- // surfaces. This mode can be detected by calling IsScalingDecode().
- virtual scoped_refptr<T> CreateDecodeSurface() { return nullptr; }
+ DecodeSurfaceHandler(const DecodeSurfaceHandler&) = delete;
+ DecodeSurfaceHandler& operator=(const DecodeSurfaceHandler&) = delete;
- // Returns true if there are separate surfaces for decoding and output due to
- // a scaling operation being performed between the two.
- virtual bool IsScalingDecode() { return false; }
+ virtual ~DecodeSurfaceHandler() = default;
- // Returns the visible rect relative to the output surface if we are in
- // scaling mode. The |decode_visible_rect| should be passed in as well as the
- // |output_picture_size| for validation. The returned rect will only differ if
- // IsScalingDecode() is true.
- virtual const gfx::Rect GetOutputVisibleRect(
- const gfx::Rect& decode_visible_rect,
- const gfx::Size& output_picture_size) {
- CHECK(gfx::Rect(output_picture_size).Contains(decode_visible_rect));
- return decode_visible_rect;
- }
+ // Returns a T for decoding into, if available, or nullptr.
+ virtual scoped_refptr<T> CreateSurface() = 0;
// Called by the client to indicate that |dec_surface| is ready to be
- // outputted. |dec_surface| must be obtained from CreateSurface() and NOT from
- // CreateDecodeSurface(). This can actually be called before decode is
- // finished in hardware; this method must guarantee that |dec_surface|s are
- // processed in the same order as SurfaceReady() is called. (On Intel, this
- // order doesn't need to be explicitly maintained since the driver will
- // enforce it, together with any necessary dependencies).
+ // outputted. This can actually be called before decode is finished in
+ // hardware; this method must guarantee that |dec_surface|s are processed in
+ // the same order as SurfaceReady() is called. (On Intel, this order doesn't
+ // need to be explicitly maintained since the driver will enforce it, together
+ // with any necessary dependencies).
virtual void SurfaceReady(scoped_refptr<T> dec_surface,
int32_t bitstream_id,
const gfx::Rect& visible_rect,
const VideoColorSpace& color_space) = 0;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(DecodeSurfaceHandler);
};
} // namespace media
diff --git a/chromium/media/gpu/gpu_video_decode_accelerator_factory.h b/chromium/media/gpu/gpu_video_decode_accelerator_factory.h
index 7d853c54064..0412554241a 100644
--- a/chromium/media/gpu/gpu_video_decode_accelerator_factory.h
+++ b/chromium/media/gpu/gpu_video_decode_accelerator_factory.h
@@ -9,6 +9,7 @@
#include "base/callback.h"
#include "base/threading/thread_checker.h"
+#include "build/build_config.h"
#include "gpu/config/gpu_driver_bug_workarounds.h"
#include "gpu/config/gpu_info.h"
#include "gpu/config/gpu_preferences.h"
diff --git a/chromium/media/gpu/h264_decoder.cc b/chromium/media/gpu/h264_decoder.cc
index 33d6097e6ff..51e2e0435fa 100644
--- a/chromium/media/gpu/h264_decoder.cc
+++ b/chromium/media/gpu/h264_decoder.cc
@@ -109,7 +109,7 @@ H264Decoder::H264Accelerator::ParseEncryptedSliceHeader(
H264Decoder::H264Decoder(std::unique_ptr<H264Accelerator> accelerator,
VideoCodecProfile profile,
const VideoColorSpace& container_color_space)
- : state_(kNeedStreamMetadata),
+ : state_(State::kNeedStreamMetadata),
container_color_space_(container_color_space),
max_frame_num_(0),
max_pic_num_(0),
@@ -159,8 +159,8 @@ void H264Decoder::Reset() {
// If we are in kDecoding, we can resume without processing an SPS.
// The state becomes kDecoding again, (1) at the first IDR slice or (2) at
// the first slice after the recovery point SEI.
- if (state_ == kDecoding)
- state_ = kAfterReset;
+ if (state_ == State::kDecoding)
+ state_ = State::kAfterReset;
}
void H264Decoder::PrepareRefPicLists() {
@@ -876,8 +876,8 @@ bool H264Decoder::HandleMemoryManagementOps(scoped_refptr<H264Picture> pic) {
ref_pic_marking->max_long_term_frame_idx_plus1 - 1;
H264Picture::Vector long_terms;
dpb_.GetLongTermRefPicsAppending(&long_terms);
- for (size_t i = 0; i < long_terms.size(); ++i) {
- scoped_refptr<H264Picture>& long_term_pic = long_terms[i];
+ for (size_t long_term = 0; long_term < long_terms.size(); ++long_term) {
+ scoped_refptr<H264Picture>& long_term_pic = long_terms[long_term];
DCHECK(long_term_pic->ref && long_term_pic->long_term);
// Ok to cast, max_long_term_frame_idx is much smaller than 16bit.
if (long_term_pic->long_term_frame_idx >
@@ -899,8 +899,8 @@ bool H264Decoder::HandleMemoryManagementOps(scoped_refptr<H264Picture> pic) {
// First unmark if any existing with this long_term_frame_idx...
H264Picture::Vector long_terms;
dpb_.GetLongTermRefPicsAppending(&long_terms);
- for (size_t i = 0; i < long_terms.size(); ++i) {
- scoped_refptr<H264Picture>& long_term_pic = long_terms[i];
+ for (size_t long_term = 0; long_term < long_terms.size(); ++long_term) {
+ scoped_refptr<H264Picture>& long_term_pic = long_terms[long_term];
DCHECK(long_term_pic->ref && long_term_pic->long_term);
// Ok to cast, long_term_frame_idx is much smaller than 16bit.
if (long_term_pic->long_term_frame_idx ==
@@ -1364,7 +1364,7 @@ H264Decoder::H264Accelerator::Status H264Decoder::ProcessCurrentSlice() {
#define SET_ERROR_AND_RETURN() \
do { \
DVLOG(1) << "Error during decode"; \
- state_ = kError; \
+ state_ = State::kError; \
return H264Decoder::kDecodeError; \
} while (0)
@@ -1408,7 +1408,7 @@ void H264Decoder::SetStream(int32_t id, const DecoderBuffer& decoder_buffer) {
}
H264Decoder::DecodeResult H264Decoder::Decode() {
- if (state_ == kError) {
+ if (state_ == State::kError) {
DVLOG(1) << "Decoder in error state";
return kDecodeError;
}
@@ -1457,7 +1457,8 @@ H264Decoder::DecodeResult H264Decoder::Decode() {
case H264NALU::kNonIDRSlice:
// We can't resume from a non-IDR slice unless recovery point SEI
// process is going.
- if (state_ == kError || (state_ == kAfterReset && !recovery_frame_cnt_))
+ if (state_ == State::kError ||
+ (state_ == State::kAfterReset && !recovery_frame_cnt_))
break;
FALLTHROUGH;
@@ -1465,7 +1466,7 @@ H264Decoder::DecodeResult H264Decoder::Decode() {
// TODO(posciak): the IDR may require an SPS that we don't have
// available. For now we'd fail if that happens, but ideally we'd like
// to keep going until the next SPS in the stream.
- if (state_ == kNeedStreamMetadata) {
+ if (state_ == State::kNeedStreamMetadata) {
// We need an SPS, skip this IDR and keep looking.
break;
}
@@ -1479,10 +1480,10 @@ H264Decoder::DecodeResult H264Decoder::Decode() {
// steps will be executed.
if (!curr_slice_hdr_) {
curr_slice_hdr_ = std::make_unique<H264SliceHeader>();
- state_ = kParseSliceHeader;
+ state_ = State::kParseSliceHeader;
}
- if (state_ == kParseSliceHeader) {
+ if (state_ == State::kParseSliceHeader) {
// Check if the slice header is encrypted.
bool parsed_header = false;
if (current_decrypt_config_) {
@@ -1504,18 +1505,18 @@ H264Decoder::DecodeResult H264Decoder::Decode() {
if (par_res != H264Parser::kOk)
SET_ERROR_AND_RETURN();
}
- state_ = kTryPreprocessCurrentSlice;
+ state_ = State::kTryPreprocessCurrentSlice;
}
- if (state_ == kTryPreprocessCurrentSlice) {
+ if (state_ == State::kTryPreprocessCurrentSlice) {
CHECK_ACCELERATOR_RESULT(PreprocessCurrentSlice());
- state_ = kEnsurePicture;
+ state_ = State::kEnsurePicture;
}
- if (state_ == kEnsurePicture) {
+ if (state_ == State::kEnsurePicture) {
if (curr_pic_) {
// |curr_pic_| already exists, so skip to ProcessCurrentSlice().
- state_ = kTryCurrentSlice;
+ state_ = State::kTryCurrentSlice;
} else {
// New picture/finished previous one, try to start a new one
// or tell the client we need more surfaces.
@@ -1525,19 +1526,19 @@ H264Decoder::DecodeResult H264Decoder::Decode() {
if (current_decrypt_config_)
curr_pic_->set_decrypt_config(current_decrypt_config_->Clone());
- state_ = kTryNewFrame;
+ state_ = State::kTryNewFrame;
}
}
- if (state_ == kTryNewFrame) {
+ if (state_ == State::kTryNewFrame) {
CHECK_ACCELERATOR_RESULT(StartNewFrame(curr_slice_hdr_.get()));
- state_ = kTryCurrentSlice;
+ state_ = State::kTryCurrentSlice;
}
- DCHECK_EQ(state_, kTryCurrentSlice);
+ DCHECK_EQ(state_, State::kTryCurrentSlice);
CHECK_ACCELERATOR_RESULT(ProcessCurrentSlice());
curr_slice_hdr_.reset();
- state_ = kDecoding;
+ state_ = State::kDecoding;
break;
}
@@ -1555,8 +1556,8 @@ H264Decoder::DecodeResult H264Decoder::Decode() {
last_sps_nalu_.assign(curr_nalu_->data,
curr_nalu_->data + curr_nalu_->size);
- if (state_ == kNeedStreamMetadata)
- state_ = kAfterReset;
+ if (state_ == State::kNeedStreamMetadata)
+ state_ = State::kAfterReset;
if (need_new_buffers) {
curr_pic_ = nullptr;
@@ -1584,7 +1585,7 @@ H264Decoder::DecodeResult H264Decoder::Decode() {
case H264NALU::kAUD:
case H264NALU::kEOSeq:
case H264NALU::kEOStream:
- if (state_ != kDecoding)
+ if (state_ != State::kDecoding)
break;
CHECK_ACCELERATOR_RESULT(FinishPrevFrameIfPresent());
@@ -1605,7 +1606,7 @@ H264Decoder::DecodeResult H264Decoder::Decode() {
sei_subsamples_.push_back(subsamples[0]);
}
}
- if (state_ == kAfterReset && !recovery_frame_cnt_ &&
+ if (state_ == State::kAfterReset && !recovery_frame_cnt_ &&
!recovery_frame_num_) {
// If we are after reset, we can also resume from a SEI recovery point
// (spec D.2.8) if one is present. However, if we are already in the
diff --git a/chromium/media/gpu/h264_decoder.h b/chromium/media/gpu/h264_decoder.h
index 9b4f0830ce1..b86a415564f 100644
--- a/chromium/media/gpu/h264_decoder.h
+++ b/chromium/media/gpu/h264_decoder.h
@@ -60,6 +60,10 @@ class MEDIA_GPU_EXPORT H264Decoder : public AcceleratedVideoDecoder {
};
H264Accelerator();
+
+ H264Accelerator(const H264Accelerator&) = delete;
+ H264Accelerator& operator=(const H264Accelerator&) = delete;
+
virtual ~H264Accelerator();
// Create a new H264Picture that the decoder client can use for decoding
@@ -158,14 +162,15 @@ class MEDIA_GPU_EXPORT H264Decoder : public AcceleratedVideoDecoder {
// kNotSupported.
virtual Status SetStream(base::span<const uint8_t> stream,
const DecryptConfig* decrypt_config);
-
- private:
- DISALLOW_COPY_AND_ASSIGN(H264Accelerator);
};
H264Decoder(std::unique_ptr<H264Accelerator> accelerator,
VideoCodecProfile profile,
const VideoColorSpace& container_color_space = VideoColorSpace());
+
+ H264Decoder(const H264Decoder&) = delete;
+ H264Decoder& operator=(const H264Decoder&) = delete;
+
~H264Decoder() override;
// AcceleratedVideoDecoder implementation.
@@ -194,7 +199,7 @@ class MEDIA_GPU_EXPORT H264Decoder : public AcceleratedVideoDecoder {
private:
// Internal state of the decoder.
- enum State {
+ enum class State {
// After initialization, need an SPS.
kNeedStreamMetadata,
// Ready to decode from any point.
@@ -405,8 +410,6 @@ class MEDIA_GPU_EXPORT H264Decoder : public AcceleratedVideoDecoder {
int last_output_poc_;
const std::unique_ptr<H264Accelerator> accelerator_;
-
- DISALLOW_COPY_AND_ASSIGN(H264Decoder);
};
} // namespace media
diff --git a/chromium/media/gpu/h264_dpb.h b/chromium/media/gpu/h264_dpb.h
index 0482fa62900..84d90389f0d 100644
--- a/chromium/media/gpu/h264_dpb.h
+++ b/chromium/media/gpu/h264_dpb.h
@@ -17,6 +17,7 @@
#include "media/gpu/codec_picture.h"
#include "media/gpu/media_gpu_export.h"
#include "media/video/h264_parser.h"
+#include "media/video/video_encode_accelerator.h"
#include "ui/gfx/geometry/rect.h"
namespace media {
@@ -91,6 +92,8 @@ class MEDIA_GPU_EXPORT H264Picture : public CodecPicture {
// Position in DPB (i.e. index in DPB).
int dpb_position;
+ absl::optional<H264Metadata> metadata_for_encoding;
+
protected:
~H264Picture() override;
@@ -104,6 +107,10 @@ class MEDIA_GPU_EXPORT H264Picture : public CodecPicture {
class H264DPB {
public:
H264DPB();
+
+ H264DPB(const H264DPB&) = delete;
+ H264DPB& operator=(const H264DPB&) = delete;
+
~H264DPB();
void set_max_num_pics(size_t max_num_pics);
@@ -174,8 +181,6 @@ class H264DPB {
H264Picture::Vector pics_;
size_t max_num_pics_;
-
- DISALLOW_COPY_AND_ASSIGN(H264DPB);
};
} // namespace media
diff --git a/chromium/media/gpu/ipc/client/gpu_video_decode_accelerator_host.cc b/chromium/media/gpu/ipc/client/gpu_video_decode_accelerator_host.cc
index f8d7baaa1fd..fa668ac56fc 100644
--- a/chromium/media/gpu/ipc/client/gpu_video_decode_accelerator_host.cc
+++ b/chromium/media/gpu/ipc/client/gpu_video_decode_accelerator_host.cc
@@ -9,6 +9,7 @@
#include "base/threading/thread_task_runner_handle.h"
#include "build/build_config.h"
#include "gpu/ipc/client/gpu_channel_host.h"
+#include "ipc/ipc_mojo_bootstrap.h"
#include "mojo/public/cpp/bindings/associated_remote.h"
namespace media {
@@ -48,6 +49,13 @@ bool GpuVideoDecodeAcceleratorHost::Initialize(const Config& config,
if (!impl_)
return false;
+ // Mojo will ignore our request to bind to a different thread than the main or
+ // IO thread unless we construct this object. It does this to avoid breaking
+ // use cases that depend on the behavior of ignoring other bindings, as
+ // detailed in the documentation for
+ // IPC::ScopedAllowOffSequenceChannelAssociatedBindings.
+ IPC::ScopedAllowOffSequenceChannelAssociatedBindings allow_binding;
+
const scoped_refptr<base::SingleThreadTaskRunner> io_task_runner =
impl_->channel()->io_task_runner();
bool succeeded = false;
diff --git a/chromium/media/gpu/ipc/service/vda_video_decoder.cc b/chromium/media/gpu/ipc/service/vda_video_decoder.cc
index 06787f9f51c..90a26a2305b 100644
--- a/chromium/media/gpu/ipc/service/vda_video_decoder.cc
+++ b/chromium/media/gpu/ipc/service/vda_video_decoder.cc
@@ -486,7 +486,8 @@ bool VdaVideoDecoder::NeedsBitstreamConversion() const {
// TODO(sandersd): Can we move bitstream conversion into VdaVideoDecoder and
// always return false?
- return config_.codec() == kCodecH264 || config_.codec() == kCodecHEVC;
+ return config_.codec() == VideoCodec::kH264 ||
+ config_.codec() == VideoCodec::kHEVC;
}
bool VdaVideoDecoder::CanReadWithoutStalling() const {
diff --git a/chromium/media/gpu/ipc/service/vda_video_decoder_unittest.cc b/chromium/media/gpu/ipc/service/vda_video_decoder_unittest.cc
index 87d64df7702..b7d18597e30 100644
--- a/chromium/media/gpu/ipc/service/vda_video_decoder_unittest.cc
+++ b/chromium/media/gpu/ipc/service/vda_video_decoder_unittest.cc
@@ -147,7 +147,7 @@ class VdaVideoDecoderTest : public testing::TestWithParam<bool> {
.WillOnce(Return(GetParam()));
EXPECT_CALL(init_cb_, Run(IsOkStatus()));
InitializeWithConfig(VideoDecoderConfig(
- kCodecVP9, VP9PROFILE_PROFILE0,
+ VideoCodec::kVP9, VP9PROFILE_PROFILE0,
VideoDecoderConfig::AlphaMode::kIsOpaque, VideoColorSpace::REC709(),
kNoTransformation, gfx::Size(1920, 1088), gfx::Rect(1920, 1080),
gfx::Size(1920, 1080), EmptyExtraData(),
@@ -323,10 +323,10 @@ TEST_P(VdaVideoDecoderTest, Initialize) {
TEST_P(VdaVideoDecoderTest, Initialize_UnsupportedSize) {
InitializeWithConfig(VideoDecoderConfig(
- kCodecVP9, VP9PROFILE_PROFILE0, VideoDecoderConfig::AlphaMode::kIsOpaque,
- VideoColorSpace::REC601(), kNoTransformation, gfx::Size(320, 240),
- gfx::Rect(320, 240), gfx::Size(320, 240), EmptyExtraData(),
- EncryptionScheme::kUnencrypted));
+ VideoCodec::kVP9, VP9PROFILE_PROFILE0,
+ VideoDecoderConfig::AlphaMode::kIsOpaque, VideoColorSpace::REC601(),
+ kNoTransformation, gfx::Size(320, 240), gfx::Rect(320, 240),
+ gfx::Size(320, 240), EmptyExtraData(), EncryptionScheme::kUnencrypted));
EXPECT_CALL(init_cb_,
Run(HasStatusCode(StatusCode::kDecoderInitializeNeverCompleted)));
RunUntilIdle();
@@ -334,7 +334,7 @@ TEST_P(VdaVideoDecoderTest, Initialize_UnsupportedSize) {
TEST_P(VdaVideoDecoderTest, Initialize_UnsupportedCodec) {
InitializeWithConfig(VideoDecoderConfig(
- kCodecH264, H264PROFILE_BASELINE,
+ VideoCodec::kH264, H264PROFILE_BASELINE,
VideoDecoderConfig::AlphaMode::kIsOpaque, VideoColorSpace::REC709(),
kNoTransformation, gfx::Size(1920, 1088), gfx::Rect(1920, 1080),
gfx::Size(1920, 1080), EmptyExtraData(), EncryptionScheme::kUnencrypted));
@@ -346,10 +346,10 @@ TEST_P(VdaVideoDecoderTest, Initialize_UnsupportedCodec) {
TEST_P(VdaVideoDecoderTest, Initialize_RejectedByVda) {
EXPECT_CALL(*vda_, Initialize(_, client_)).WillOnce(Return(false));
InitializeWithConfig(VideoDecoderConfig(
- kCodecVP9, VP9PROFILE_PROFILE0, VideoDecoderConfig::AlphaMode::kIsOpaque,
- VideoColorSpace::REC709(), kNoTransformation, gfx::Size(1920, 1088),
- gfx::Rect(1920, 1080), gfx::Size(1920, 1080), EmptyExtraData(),
- EncryptionScheme::kUnencrypted));
+ VideoCodec::kVP9, VP9PROFILE_PROFILE0,
+ VideoDecoderConfig::AlphaMode::kIsOpaque, VideoColorSpace::REC709(),
+ kNoTransformation, gfx::Size(1920, 1088), gfx::Rect(1920, 1080),
+ gfx::Size(1920, 1080), EmptyExtraData(), EncryptionScheme::kUnencrypted));
EXPECT_CALL(init_cb_,
Run(HasStatusCode(StatusCode::kDecoderInitializeNeverCompleted)));
RunUntilIdle();
@@ -430,10 +430,10 @@ TEST_P(VdaVideoDecoderTest, Decode_Output_MaintainsAspect) {
EXPECT_CALL(*vda_, TryToSetupDecodeOnSeparateThread(_, _))
.WillOnce(Return(GetParam()));
InitializeWithConfig(VideoDecoderConfig(
- kCodecVP9, VP9PROFILE_PROFILE0, VideoDecoderConfig::AlphaMode::kIsOpaque,
- VideoColorSpace::REC709(), kNoTransformation, gfx::Size(640, 480),
- gfx::Rect(640, 480), gfx::Size(1280, 480), EmptyExtraData(),
- EncryptionScheme::kUnencrypted));
+ VideoCodec::kVP9, VP9PROFILE_PROFILE0,
+ VideoDecoderConfig::AlphaMode::kIsOpaque, VideoColorSpace::REC709(),
+ kNoTransformation, gfx::Size(640, 480), gfx::Rect(640, 480),
+ gfx::Size(1280, 480), EmptyExtraData(), EncryptionScheme::kUnencrypted));
EXPECT_CALL(init_cb_, Run(IsOkStatus()));
RunUntilIdle();
diff --git a/chromium/media/gpu/mac/vt_video_decode_accelerator_mac.cc b/chromium/media/gpu/mac/vt_video_decode_accelerator_mac.cc
index 89d91a15694..24414d3fe29 100644
--- a/chromium/media/gpu/mac/vt_video_decode_accelerator_mac.cc
+++ b/chromium/media/gpu/mac/vt_video_decode_accelerator_mac.cc
@@ -620,11 +620,11 @@ bool VTVideoDecodeAccelerator::Initialize(const Config& config,
case H264PROFILE_EXTENDED:
case H264PROFILE_MAIN:
case H264PROFILE_HIGH:
- codec_ = kCodecH264;
+ codec_ = VideoCodec::kH264;
break;
case VP9PROFILE_PROFILE0:
case VP9PROFILE_PROFILE2:
- codec_ = kCodecVP9;
+ codec_ = VideoCodec::kVP9;
break;
default:
NOTREACHED() << "Unsupported profile.";
@@ -656,10 +656,10 @@ bool VTVideoDecodeAccelerator::ConfigureDecoder() {
base::ScopedCFTypeRef<CMFormatDescriptionRef> format;
switch (codec_) {
- case kCodecH264:
+ case VideoCodec::kH264:
format = CreateVideoFormatH264(active_sps_, active_spsext_, active_pps_);
break;
- case kCodecVP9:
+ case VideoCodec::kVP9:
format = CreateVideoFormatVP9(
cc_detector_->GetColorSpace(config_.container_color_space),
config_.profile, config_.hdr_metadata,
@@ -708,7 +708,7 @@ bool VTVideoDecodeAccelerator::ConfigureDecoder() {
}
UMA_HISTOGRAM_BOOLEAN("Media.VTVDA.HardwareAccelerated", using_hardware);
- if (codec_ == kCodecVP9 && !vp9_bsf_)
+ if (codec_ == VideoCodec::kVP9 && !vp9_bsf_)
vp9_bsf_ = std::make_unique<VP9SuperFrameBitstreamFilter>();
// Record that the configuration change is complete.
@@ -1228,7 +1228,7 @@ void VTVideoDecodeAccelerator::Decode(scoped_refptr<DecoderBuffer> buffer,
Frame* frame = new Frame(bitstream_id);
pending_frames_[bitstream_id] = base::WrapUnique(frame);
- if (codec_ == kCodecVP9) {
+ if (codec_ == VideoCodec::kVP9) {
decoder_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(&VTVideoDecodeAccelerator::DecodeTaskVp9,
@@ -1307,7 +1307,7 @@ void VTVideoDecodeAccelerator::ProcessWorkQueues() {
DCHECK(gpu_task_runner_->BelongsToCurrentThread());
switch (state_) {
case STATE_DECODING:
- if (codec_ != kCodecH264) {
+ if (codec_ != VideoCodec::kH264) {
while (state_ == STATE_DECODING) {
if (!ProcessOutputQueue() && !ProcessTaskQueue())
break;
@@ -1350,7 +1350,7 @@ bool VTVideoDecodeAccelerator::ProcessTaskQueue() {
Task& task = task_queue_.front();
switch (task.type) {
case TASK_FRAME: {
- if (codec_ == kCodecVP9) {
+ if (codec_ == VideoCodec::kVP9) {
// Once we've reached our maximum output queue size, defer end of
// bitstream buffer signals to avoid piling up too many frames.
if (output_queue_.size() >= limits::kMaxVideoFrames)
@@ -1382,8 +1382,8 @@ bool VTVideoDecodeAccelerator::ProcessTaskQueue() {
case TASK_FLUSH:
DCHECK_EQ(task.type, pending_flush_tasks_.front());
- if ((codec_ == kCodecH264 && reorder_queue_.size() == 0) ||
- (codec_ == kCodecVP9 && output_queue_.empty())) {
+ if ((codec_ == VideoCodec::kH264 && reorder_queue_.size() == 0) ||
+ (codec_ == VideoCodec::kVP9 && output_queue_.empty())) {
DVLOG(1) << "Flush complete";
pending_flush_tasks_.pop();
client_->NotifyFlushDone();
@@ -1394,8 +1394,8 @@ bool VTVideoDecodeAccelerator::ProcessTaskQueue() {
case TASK_RESET:
DCHECK_EQ(task.type, pending_flush_tasks_.front());
- if ((codec_ == kCodecH264 && reorder_queue_.size() == 0) ||
- (codec_ == kCodecVP9 && output_queue_.empty())) {
+ if ((codec_ == VideoCodec::kH264 && reorder_queue_.size() == 0) ||
+ (codec_ == VideoCodec::kVP9 && output_queue_.empty())) {
DVLOG(1) << "Reset complete";
waiting_for_idr_ = true;
pending_flush_tasks_.pop();
diff --git a/chromium/media/gpu/mac/vt_video_decode_accelerator_mac.h b/chromium/media/gpu/mac/vt_video_decode_accelerator_mac.h
index cef703f94e6..232b374488b 100644
--- a/chromium/media/gpu/mac/vt_video_decode_accelerator_mac.h
+++ b/chromium/media/gpu/mac/vt_video_decode_accelerator_mac.h
@@ -52,6 +52,9 @@ class VTVideoDecodeAccelerator : public VideoDecodeAccelerator,
const gpu::GpuDriverBugWorkarounds& workarounds,
MediaLog* media_log);
+ VTVideoDecodeAccelerator(const VTVideoDecodeAccelerator&) = delete;
+ VTVideoDecodeAccelerator& operator=(const VTVideoDecodeAccelerator&) = delete;
+
~VTVideoDecodeAccelerator() override;
// VideoDecodeAccelerator implementation.
@@ -325,8 +328,6 @@ class VTVideoDecodeAccelerator : public VideoDecodeAccelerator,
// other destructors run.
base::WeakPtrFactory<VTVideoDecodeAccelerator> decoder_weak_this_factory_;
base::WeakPtrFactory<VTVideoDecodeAccelerator> weak_this_factory_;
-
- DISALLOW_COPY_AND_ASSIGN(VTVideoDecodeAccelerator);
};
} // namespace media
diff --git a/chromium/media/gpu/mac/vt_video_encode_accelerator_mac.h b/chromium/media/gpu/mac/vt_video_encode_accelerator_mac.h
index 6fe7f3bc9f6..5c7d073a091 100644
--- a/chromium/media/gpu/mac/vt_video_encode_accelerator_mac.h
+++ b/chromium/media/gpu/mac/vt_video_encode_accelerator_mac.h
@@ -29,6 +29,10 @@ class MEDIA_GPU_EXPORT VTVideoEncodeAccelerator
: public VideoEncodeAccelerator {
public:
VTVideoEncodeAccelerator();
+
+ VTVideoEncodeAccelerator(const VTVideoEncodeAccelerator&) = delete;
+ VTVideoEncodeAccelerator& operator=(const VTVideoEncodeAccelerator&) = delete;
+
~VTVideoEncodeAccelerator() override;
// VideoEncodeAccelerator implementation.
@@ -155,8 +159,6 @@ class MEDIA_GPU_EXPORT VTVideoEncodeAccelerator
// other destructors run.
base::WeakPtr<VTVideoEncodeAccelerator> encoder_weak_ptr_;
base::WeakPtrFactory<VTVideoEncodeAccelerator> encoder_task_weak_factory_;
-
- DISALLOW_COPY_AND_ASSIGN(VTVideoEncodeAccelerator);
};
} // namespace media
diff --git a/chromium/media/gpu/v4l2/BUILD.gn b/chromium/media/gpu/v4l2/BUILD.gn
index 9d60b804edb..0ac4aa4cf97 100644
--- a/chromium/media/gpu/v4l2/BUILD.gn
+++ b/chromium/media/gpu/v4l2/BUILD.gn
@@ -126,9 +126,14 @@ source_set("v4l2") {
source_set("unit_test") {
testonly = true
- sources = [ "v4l2_device_unittest.cc" ]
+ sources = [
+ "v4l2_device_unittest.cc",
+ "v4l2_stateful_workaround_unittest.cc",
+ ]
deps = [
":v4l2",
+ "//base",
+ "//media",
"//media:test_support",
"//testing/gtest",
"//ui/gfx:test_support",
@@ -139,6 +144,8 @@ source_set("unit_test") {
executable("v4l2_stateless_decoder") {
testonly = true
sources = [
+ "test/v4l2_ioctl_shim.cc",
+ "test/v4l2_ioctl_shim.h",
"test/v4l2_stateless_decoder.cc",
"test/vp9_decoder.cc",
"test/vp9_decoder.h",
diff --git a/chromium/media/gpu/v4l2/v4l2_decode_surface_handler.h b/chromium/media/gpu/v4l2/v4l2_decode_surface_handler.h
index 919cce4fe99..e49cd961039 100644
--- a/chromium/media/gpu/v4l2/v4l2_decode_surface_handler.h
+++ b/chromium/media/gpu/v4l2/v4l2_decode_surface_handler.h
@@ -16,6 +16,10 @@ class V4L2DecodeSurfaceHandler
: public DecodeSurfaceHandler<V4L2DecodeSurface> {
public:
V4L2DecodeSurfaceHandler() = default;
+
+ V4L2DecodeSurfaceHandler(const V4L2DecodeSurfaceHandler&) = delete;
+ V4L2DecodeSurfaceHandler& operator=(const V4L2DecodeSurfaceHandler&) = delete;
+
~V4L2DecodeSurfaceHandler() override = default;
// Append slice data in |data| of size |size| to pending hardware
@@ -27,9 +31,6 @@ class V4L2DecodeSurfaceHandler
// Decode the surface |dec_surface|.
virtual void DecodeSurface(scoped_refptr<V4L2DecodeSurface> dec_surface) = 0;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(V4L2DecodeSurfaceHandler);
};
} // namespace media
diff --git a/chromium/media/gpu/v4l2/v4l2_device.cc b/chromium/media/gpu/v4l2/v4l2_device.cc
index a37692892d8..24e914711e0 100644
--- a/chromium/media/gpu/v4l2/v4l2_device.cc
+++ b/chromium/media/gpu/v4l2/v4l2_device.cc
@@ -152,6 +152,10 @@ class V4L2Buffer {
enum v4l2_memory memory,
const struct v4l2_format& format,
size_t buffer_id);
+
+ V4L2Buffer(const V4L2Buffer&) = delete;
+ V4L2Buffer& operator=(const V4L2Buffer&) = delete;
+
~V4L2Buffer();
void* GetPlaneMapping(const size_t plane);
@@ -180,8 +184,6 @@ class V4L2Buffer {
struct v4l2_format format_;
scoped_refptr<VideoFrame> video_frame_;
-
- DISALLOW_COPY_AND_ASSIGN(V4L2Buffer);
};
std::unique_ptr<V4L2Buffer> V4L2Buffer::Create(scoped_refptr<V4L2Device> device,
@@ -405,6 +407,10 @@ class V4L2BufferRefBase {
public:
V4L2BufferRefBase(const struct v4l2_buffer& v4l2_buffer,
base::WeakPtr<V4L2Queue> queue);
+
+ V4L2BufferRefBase(const V4L2BufferRefBase&) = delete;
+ V4L2BufferRefBase& operator=(const V4L2BufferRefBase&) = delete;
+
~V4L2BufferRefBase();
bool QueueBuffer(scoped_refptr<VideoFrame> video_frame);
@@ -436,7 +442,6 @@ class V4L2BufferRefBase {
bool queued = false;
SEQUENCE_CHECKER(sequence_checker_);
- DISALLOW_COPY_AND_ASSIGN(V4L2BufferRefBase);
};
V4L2BufferRefBase::V4L2BufferRefBase(const struct v4l2_buffer& v4l2_buffer,
@@ -1545,7 +1550,7 @@ namespace {
VideoCodecProfile V4L2ProfileToVideoCodecProfile(VideoCodec codec,
uint32_t v4l2_profile) {
switch (codec) {
- case kCodecH264:
+ case VideoCodec::kH264:
switch (v4l2_profile) {
// H264 Stereo amd Multiview High are not tested and the use is
// minuscule, skip.
@@ -1560,7 +1565,7 @@ VideoCodecProfile V4L2ProfileToVideoCodecProfile(VideoCodec codec,
return H264PROFILE_HIGH;
}
break;
- case kCodecVP8:
+ case VideoCodec::kVP8:
switch (v4l2_profile) {
case V4L2_MPEG_VIDEO_VP8_PROFILE_0:
case V4L2_MPEG_VIDEO_VP8_PROFILE_1:
@@ -1569,7 +1574,7 @@ VideoCodecProfile V4L2ProfileToVideoCodecProfile(VideoCodec codec,
return VP8PROFILE_ANY;
}
break;
- case kCodecVP9:
+ case VideoCodec::kVP9:
switch (v4l2_profile) {
// VP9 Profile 1 and 3 are not tested and the use is minuscule, skip.
case V4L2_MPEG_VIDEO_VP9_PROFILE_0:
@@ -1594,13 +1599,13 @@ std::vector<VideoCodecProfile> V4L2Device::V4L2PixFmtToVideoCodecProfiles(
std::vector<VideoCodecProfile>* profiles) {
uint32_t query_id = 0;
switch (codec) {
- case kCodecH264:
+ case VideoCodec::kH264:
query_id = V4L2_CID_MPEG_VIDEO_H264_PROFILE;
break;
- case kCodecVP8:
+ case VideoCodec::kVP8:
query_id = V4L2_CID_MPEG_VIDEO_VP8_PROFILE;
break;
- case kCodecVP9:
+ case VideoCodec::kVP9:
query_id = V4L2_CID_MPEG_VIDEO_VP9_PROFILE;
break;
default:
@@ -1633,7 +1638,7 @@ std::vector<VideoCodecProfile> V4L2Device::V4L2PixFmtToVideoCodecProfiles(
switch (pix_fmt) {
case V4L2_PIX_FMT_H264:
case V4L2_PIX_FMT_H264_SLICE:
- if (!get_supported_profiles(kCodecH264, &profiles)) {
+ if (!get_supported_profiles(VideoCodec::kH264, &profiles)) {
DLOG(WARNING) << "Driver doesn't support QUERY H264 profiles, "
<< "use default values, Base, Main, High";
profiles = {
@@ -1649,7 +1654,7 @@ std::vector<VideoCodecProfile> V4L2Device::V4L2PixFmtToVideoCodecProfiles(
break;
case V4L2_PIX_FMT_VP9:
case V4L2_PIX_FMT_VP9_FRAME:
- if (!get_supported_profiles(kCodecVP9, &profiles)) {
+ if (!get_supported_profiles(VideoCodec::kVP9, &profiles)) {
DLOG(WARNING) << "Driver doesn't support QUERY VP9 profiles, "
<< "use default values, Profile0";
profiles = {VP9PROFILE_PROFILE0};
diff --git a/chromium/media/gpu/v4l2/v4l2_device.h b/chromium/media/gpu/v4l2/v4l2_device.h
index 49c0aa630a0..c8eec90a774 100644
--- a/chromium/media/gpu/v4l2/v4l2_device.h
+++ b/chromium/media/gpu/v4l2/v4l2_device.h
@@ -184,6 +184,9 @@ class MEDIA_GPU_EXPORT V4L2WritableBufferRef {
// store is deprecated and should not be called by new code.
void SetConfigStore(uint32_t config_store);
+ V4L2WritableBufferRef(const V4L2WritableBufferRef&) = delete;
+ V4L2WritableBufferRef& operator=(const V4L2WritableBufferRef&) = delete;
+
~V4L2WritableBufferRef();
private:
@@ -201,7 +204,6 @@ class MEDIA_GPU_EXPORT V4L2WritableBufferRef {
std::unique_ptr<V4L2BufferRefBase> buffer_data_;
SEQUENCE_CHECKER(sequence_checker_);
- DISALLOW_COPY_AND_ASSIGN(V4L2WritableBufferRef);
};
// A reference to a read-only, dequeued buffer.
diff --git a/chromium/media/gpu/v4l2/v4l2_framerate_control.cc b/chromium/media/gpu/v4l2/v4l2_framerate_control.cc
index 28d0bd1934e..2158c329410 100644
--- a/chromium/media/gpu/v4l2/v4l2_framerate_control.cc
+++ b/chromium/media/gpu/v4l2/v4l2_framerate_control.cc
@@ -64,9 +64,9 @@ namespace media {
static constexpr int kMovingAverageWindowSize = 32;
static constexpr base::TimeDelta kFrameIntervalFor120fps =
- base::TimeDelta::FromMilliseconds(8);
+ base::Milliseconds(8);
static constexpr base::TimeDelta kFrameIntervalFor24fps =
- base::TimeDelta::FromMilliseconds(41);
+ base::Milliseconds(41);
V4L2FrameRateControl::V4L2FrameRateControl(
scoped_refptr<V4L2Device> device,
@@ -128,8 +128,7 @@ void V4L2FrameRateControl::RecordFrameDurationThunk(
void V4L2FrameRateControl::RecordFrameDuration() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- constexpr base::TimeDelta kMaxFrameInterval =
- base::TimeDelta::FromMilliseconds(500);
+ constexpr base::TimeDelta kMaxFrameInterval = base::Milliseconds(500);
const base::TimeTicks frame_display_time = base::TimeTicks::Now();
const base::TimeDelta duration =
frame_display_time - last_frame_display_time_;
diff --git a/chromium/media/gpu/v4l2/v4l2_image_processor_backend.cc b/chromium/media/gpu/v4l2/v4l2_image_processor_backend.cc
index 30097973821..749e2c2585f 100644
--- a/chromium/media/gpu/v4l2/v4l2_image_processor_backend.cc
+++ b/chromium/media/gpu/v4l2/v4l2_image_processor_backend.cc
@@ -18,7 +18,6 @@
#include "base/bind.h"
#include "base/callback.h"
-#include "base/callback_helpers.h"
#include "base/numerics/safe_conversions.h"
#include "base/task/post_task.h"
#include "base/task/task_traits.h"
@@ -403,8 +402,7 @@ V4L2ImageProcessorBackend::CreateWithOutputMode(
output_planes[i].size = pix_mp.plane_fmt[i].sizeimage;
}
- auto image_processor = std::unique_ptr<
- V4L2ImageProcessorBackend, std::default_delete<ImageProcessorBackend>>(
+ std::unique_ptr<V4L2ImageProcessorBackend> image_processor(
new V4L2ImageProcessorBackend(
backend_task_runner, std::move(device),
PortConfig(input_config.fourcc, negotiated_input_size, input_planes,
@@ -432,11 +430,7 @@ V4L2ImageProcessorBackend::CreateWithOutputMode(
done.Wait();
if (!success) {
// This needs to be destroyed on |backend_task_runner|.
- backend_task_runner->PostTask(
- FROM_HERE,
- base::BindOnce(
- base::DoNothing::Once<std::unique_ptr<ImageProcessorBackend>>(),
- std::move(image_processor)));
+ backend_task_runner->DeleteSoon(FROM_HERE, std::move(image_processor));
return nullptr;
}
diff --git a/chromium/media/gpu/v4l2/v4l2_image_processor_backend.h b/chromium/media/gpu/v4l2/v4l2_image_processor_backend.h
index e8a9629d584..cd0536b6211 100644
--- a/chromium/media/gpu/v4l2/v4l2_image_processor_backend.h
+++ b/chromium/media/gpu/v4l2/v4l2_image_processor_backend.h
@@ -82,6 +82,8 @@ class MEDIA_GPU_EXPORT V4L2ImageProcessorBackend
size_t* num_planes);
private:
+ friend struct std::default_delete<V4L2ImageProcessorBackend>;
+
// Callback for initialization.
using InitCB = base::OnceCallback<void(bool)>;
@@ -195,4 +197,12 @@ class MEDIA_GPU_EXPORT V4L2ImageProcessorBackend
} // namespace media
+namespace std {
+
+template <>
+struct default_delete<media::V4L2ImageProcessorBackend>
+ : public default_delete<media::ImageProcessorBackend> {};
+
+} // namespace std
+
#endif // MEDIA_GPU_V4L2_V4L2_IMAGE_PROCESSOR_BACKEND_H_
diff --git a/chromium/media/gpu/v4l2/v4l2_jpeg_encode_accelerator.h b/chromium/media/gpu/v4l2/v4l2_jpeg_encode_accelerator.h
index 63f485fcf3f..08539edb683 100644
--- a/chromium/media/gpu/v4l2/v4l2_jpeg_encode_accelerator.h
+++ b/chromium/media/gpu/v4l2/v4l2_jpeg_encode_accelerator.h
@@ -53,6 +53,11 @@ class MEDIA_GPU_EXPORT V4L2JpegEncodeAccelerator
public:
V4L2JpegEncodeAccelerator(
const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner);
+
+ V4L2JpegEncodeAccelerator(const V4L2JpegEncodeAccelerator&) = delete;
+ V4L2JpegEncodeAccelerator& operator=(const V4L2JpegEncodeAccelerator&) =
+ delete;
+
~V4L2JpegEncodeAccelerator() override;
// JpegEncodeAccelerator implementation.
@@ -430,8 +435,6 @@ class MEDIA_GPU_EXPORT V4L2JpegEncodeAccelerator
base::WeakPtr<V4L2JpegEncodeAccelerator> weak_ptr_;
// Weak factory for producing weak pointers on the child thread.
base::WeakPtrFactory<V4L2JpegEncodeAccelerator> weak_factory_;
-
- DISALLOW_COPY_AND_ASSIGN(V4L2JpegEncodeAccelerator);
};
} // namespace media
diff --git a/chromium/media/gpu/v4l2/v4l2_mjpeg_decode_accelerator.cc b/chromium/media/gpu/v4l2/v4l2_mjpeg_decode_accelerator.cc
index 7e14f4984a3..fa28a53a350 100644
--- a/chromium/media/gpu/v4l2/v4l2_mjpeg_decode_accelerator.cc
+++ b/chromium/media/gpu/v4l2/v4l2_mjpeg_decode_accelerator.cc
@@ -136,6 +136,9 @@ const uint8_t kDefaultDhtSeg[] = {
class V4L2MjpegDecodeAccelerator::JobRecord {
public:
+ JobRecord(const JobRecord&) = delete;
+ JobRecord& operator=(const JobRecord&) = delete;
+
virtual ~JobRecord() = default;
// Task ID passed from Decode() call.
@@ -154,8 +157,6 @@ class V4L2MjpegDecodeAccelerator::JobRecord {
protected:
JobRecord() = default;
-
- DISALLOW_COPY_AND_ASSIGN(JobRecord);
};
// Job record when the client uses BitstreamBuffer as input in Decode().
@@ -202,6 +203,9 @@ class JobRecordDmaBuf : public V4L2MjpegDecodeAccelerator::JobRecord {
mapped_addr_(nullptr),
out_frame_(std::move(dst_frame)) {}
+ JobRecordDmaBuf(const JobRecordDmaBuf&) = delete;
+ JobRecordDmaBuf& operator=(const JobRecordDmaBuf&) = delete;
+
~JobRecordDmaBuf() {
if (mapped_addr_) {
const int ret = munmap(mapped_addr_, size());
@@ -242,8 +246,6 @@ class JobRecordDmaBuf : public V4L2MjpegDecodeAccelerator::JobRecord {
off_t offset_;
void* mapped_addr_;
scoped_refptr<VideoFrame> out_frame_;
-
- DISALLOW_COPY_AND_ASSIGN(JobRecordDmaBuf);
};
V4L2MjpegDecodeAccelerator::BufferRecord::BufferRecord() : at_device(false) {
diff --git a/chromium/media/gpu/v4l2/v4l2_mjpeg_decode_accelerator.h b/chromium/media/gpu/v4l2/v4l2_mjpeg_decode_accelerator.h
index a8bf30e8911..cb16763faa0 100644
--- a/chromium/media/gpu/v4l2/v4l2_mjpeg_decode_accelerator.h
+++ b/chromium/media/gpu/v4l2/v4l2_mjpeg_decode_accelerator.h
@@ -39,6 +39,11 @@ class MEDIA_GPU_EXPORT V4L2MjpegDecodeAccelerator
V4L2MjpegDecodeAccelerator(
const scoped_refptr<V4L2Device>& device,
const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner);
+
+ V4L2MjpegDecodeAccelerator(const V4L2MjpegDecodeAccelerator&) = delete;
+ V4L2MjpegDecodeAccelerator& operator=(const V4L2MjpegDecodeAccelerator&) =
+ delete;
+
~V4L2MjpegDecodeAccelerator() override;
// MjpegDecodeAccelerator implementation.
@@ -185,8 +190,6 @@ class MEDIA_GPU_EXPORT V4L2MjpegDecodeAccelerator
base::WeakPtr<V4L2MjpegDecodeAccelerator> weak_ptr_;
// Weak factory for producing weak pointers on the child thread.
base::WeakPtrFactory<V4L2MjpegDecodeAccelerator> weak_factory_;
-
- DISALLOW_COPY_AND_ASSIGN(V4L2MjpegDecodeAccelerator);
};
} // namespace media
diff --git a/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc b/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc
index 53aa2d6fee5..bbdf34c3429 100644
--- a/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc
+++ b/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc
@@ -2067,7 +2067,7 @@ void V4L2SliceVideoDecodeAccelerator::CheckGLFences() {
FROM_HERE,
base::BindOnce(&V4L2SliceVideoDecodeAccelerator::DecodeBufferTask,
base::Unretained(this)),
- base::TimeDelta::FromMilliseconds(kRescheduleDelayMs));
+ base::Milliseconds(kRescheduleDelayMs));
}
}
@@ -2240,7 +2240,36 @@ bool V4L2SliceVideoDecodeAccelerator::ProcessFrame(
DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
scoped_refptr<VideoFrame> input_frame = buffer->GetVideoFrame();
- DCHECK(input_frame);
+ if (!input_frame) {
+ VLOGF(1) << "Could not get the input frame for the image processor!";
+ return false;
+ }
+
+ // The |input_frame| has a potentially incorrect visible rectangle and natural
+ // size: that frame gets created by V4L2Buffer::CreateVideoFrame() which uses
+ // v4l2_format::fmt.pix_mp.width and v4l2_format::fmt.pix_mp.height as the
+ // visible rectangle and natural size. However, those dimensions actually
+ // correspond to the coded size. Therefore, we should wrap |input_frame| into
+ // another frame with the right visible rectangle and natural size.
+ DCHECK(input_frame->visible_rect().origin().IsOrigin());
+ const gfx::Rect visible_rect = image_processor_->input_config().visible_rect;
+ const gfx::Size natural_size = visible_rect.size();
+ if (!gfx::Rect(input_frame->coded_size()).Contains(visible_rect) ||
+ !input_frame->visible_rect().Contains(visible_rect)) {
+ VLOGF(1) << "The visible rectangle is invalid!";
+ return false;
+ }
+ if (!gfx::Rect(input_frame->natural_size())
+ .Contains(gfx::Rect(natural_size))) {
+ VLOGF(1) << "The natural size is too large!";
+ return false;
+ }
+ scoped_refptr<VideoFrame> cropped_input_frame = VideoFrame::WrapVideoFrame(
+ input_frame, input_frame->format(), visible_rect, natural_size);
+ if (!cropped_input_frame) {
+ VLOGF(1) << "Could not wrap the input frame for the image processor!";
+ return false;
+ }
if (image_processor_->output_mode() == ImageProcessor::OutputMode::IMPORT) {
// In IMPORT mode we can decide ourselves which IP buffer to use, so choose
@@ -2256,14 +2285,14 @@ bool V4L2SliceVideoDecodeAccelerator::ProcessFrame(
DCHECK(wrapped_frame);
image_processor_->Process(
- std::move(input_frame), std::move(wrapped_frame),
+ std::move(cropped_input_frame), std::move(wrapped_frame),
base::BindOnce(&V4L2SliceVideoDecodeAccelerator::FrameProcessed,
base::Unretained(this), surface, buffer->BufferId()));
} else {
// In ALLOCATE mode we cannot choose which IP buffer to use. We will get
// the surprise when FrameProcessed() is invoked...
if (!image_processor_->Process(
- std::move(input_frame),
+ std::move(cropped_input_frame),
base::BindOnce(&V4L2SliceVideoDecodeAccelerator::FrameProcessed,
base::Unretained(this), surface)))
return false;
diff --git a/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.h b/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.h
index 864a50b38b1..b452fdccdb7 100644
--- a/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.h
+++ b/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.h
@@ -53,6 +53,12 @@ class MEDIA_GPU_EXPORT V4L2SliceVideoDecodeAccelerator
EGLDisplay egl_display,
const BindGLImageCallback& bind_image_cb,
const MakeGLContextCurrentCallback& make_context_current_cb);
+
+ V4L2SliceVideoDecodeAccelerator(const V4L2SliceVideoDecodeAccelerator&) =
+ delete;
+ V4L2SliceVideoDecodeAccelerator& operator=(
+ const V4L2SliceVideoDecodeAccelerator&) = delete;
+
~V4L2SliceVideoDecodeAccelerator() override;
// VideoDecodeAccelerator implementation.
@@ -512,8 +518,6 @@ class MEDIA_GPU_EXPORT V4L2SliceVideoDecodeAccelerator
// The WeakPtrFactory for |weak_this_|.
base::WeakPtrFactory<V4L2SliceVideoDecodeAccelerator> weak_this_factory_;
-
- DISALLOW_COPY_AND_ASSIGN(V4L2SliceVideoDecodeAccelerator);
};
} // namespace media
diff --git a/chromium/media/gpu/v4l2/v4l2_stateful_workaround.cc b/chromium/media/gpu/v4l2/v4l2_stateful_workaround.cc
index e862d4b2cdd..e8db08dd08a 100644
--- a/chromium/media/gpu/v4l2/v4l2_stateful_workaround.cc
+++ b/chromium/media/gpu/v4l2/v4l2_stateful_workaround.cc
@@ -5,18 +5,111 @@
#include "media/gpu/v4l2/v4l2_stateful_workaround.h"
#include <string.h>
+#include <memory>
+#include <vector>
#include <linux/videodev2.h>
#include "base/containers/small_map.h"
+#include "base/containers/span.h"
#include "base/memory/ptr_util.h"
+#include "media/base/decoder_buffer.h"
#include "media/base/video_types.h"
#include "media/gpu/macros.h"
#include "media/parsers/vp8_parser.h"
#include "media/video/video_decode_accelerator.h"
namespace media {
+namespace {
+// Creates superframe index from |frame_sizes|. The frame sizes is stored in the
+// same bytes. For example, if the max frame size is two bytes, even if the
+// smaller frame sizes are 1 byte, they are stored as two bytes. See the detail
+// for VP9 Spec Annex B.
+std::vector<uint8_t> CreateSuperFrameIndex(
+ const std::vector<uint32_t>& frame_sizes) {
+ if (frame_sizes.size() < 2)
+ return {};
+ // Computes the bytes of the maximum frame size.
+ const uint32_t max_frame_size =
+ *std::max_element(frame_sizes.begin(), frame_sizes.end());
+ uint8_t bytes_per_framesize = 1;
+ for (uint32_t mask = 0xff; bytes_per_framesize <= 4; bytes_per_framesize++) {
+ if (max_frame_size < mask)
+ break;
+ mask <<= 8;
+ mask |= 0xff;
+ }
+
+ uint8_t superframe_header = 0xc0;
+ superframe_header |= static_cast<uint8_t>(frame_sizes.size() - 1);
+ superframe_header |= (bytes_per_framesize - 1) << 3;
+ const size_t index_sz = 2 + bytes_per_framesize * frame_sizes.size();
+ std::vector<uint8_t> superframe_index(index_sz);
+ size_t pos = 0;
+ superframe_index[pos++] = superframe_header;
+ for (uint32_t size : frame_sizes) {
+ for (int i = 0; i < bytes_per_framesize; i++) {
+ superframe_index[pos++] = size & 0xff;
+ size >>= 8;
+ }
+ }
+ superframe_index[pos++] = superframe_header;
+
+ return superframe_index;
+}
+
+// Overwrites show_frame of each frame. It is set to 1 for the top spatial layer
+// or otherwise 0.
+bool OverwriteShowFrame(base::span<uint8_t> frame_data,
+ const std::vector<uint32_t>& frame_sizes) {
+ size_t sum_frame_size = 0;
+ for (uint32_t frame_size : frame_sizes)
+ sum_frame_size += frame_size;
+ if (frame_data.size() != sum_frame_size) {
+ LOG(ERROR) << "frame data size=" << frame_data.size()
+ << " is different from the sum of frame sizes"
+ << " index size=" << sum_frame_size;
+ return false;
+ }
+
+ size_t offset = 0;
+ for (size_t i = 0; i < frame_sizes.size(); ++i) {
+ uint8_t* header = frame_data.data() + offset;
+
+ // See VP9 Spec Annex B.
+ const uint8_t frame_marker = (*header >> 6);
+ if (frame_marker != 0b10) {
+ LOG(ERROR) << "Invalid frame marker: " << static_cast<int>(frame_marker);
+ return false;
+ }
+ const uint8_t profile = (*header >> 4) & 0b11;
+ if (profile == 3) {
+ LOG(ERROR) << "Unsupported profile";
+ return false;
+ }
+
+ const bool show_existing_frame = (*header >> 3) & 1;
+ const bool show_frame = i == frame_sizes.size() - 1;
+ int bit = 0;
+ if (show_existing_frame) {
+ header++;
+ bit = 6;
+ } else {
+ bit = 1;
+ }
+ if (show_frame) {
+ *header |= (1u << bit);
+ } else {
+ *header &= ~(1u << bit);
+ }
+
+ offset += frame_sizes[i];
+ }
+
+ return true;
+}
+} // namespace
// If the given resolution is not supported by the driver, some IOCTL must
// return some error code (e.g. EIO). However, there is a driver that doesn't
// follow this specification, for example go2001. This will be called before
@@ -134,4 +227,37 @@ CreateV4L2StatefulWorkarounds(V4L2Device::Type device_type,
return workarounds;
}
+bool AppendVP9SuperFrameIndexIfNeeded(scoped_refptr<DecoderBuffer>& buffer) {
+ if (buffer->side_data_size() == 0)
+ return true;
+
+ const size_t num_of_layers = buffer->side_data_size() / sizeof(uint32_t);
+ if (num_of_layers > 3u) {
+ LOG(ERROR) << "The maximum number of spatial layers in VP9 is three";
+ return false;
+ }
+
+ const uint32_t* cue_data =
+ reinterpret_cast<const uint32_t*>(buffer->side_data());
+ std::vector<uint32_t> frame_sizes(cue_data, cue_data + num_of_layers);
+ std::vector<uint8_t> superframe_index = CreateSuperFrameIndex(frame_sizes);
+ const size_t vp9_superframe_size =
+ buffer->data_size() + superframe_index.size();
+ auto vp9_superframe = std::make_unique<uint8_t[]>(vp9_superframe_size);
+ memcpy(vp9_superframe.get(), buffer->data(), buffer->data_size());
+ memcpy(vp9_superframe.get() + buffer->data_size(), superframe_index.data(),
+ superframe_index.size());
+
+ if (!OverwriteShowFrame(
+ base::make_span(vp9_superframe.get(), buffer->data_size()),
+ frame_sizes)) {
+ return false;
+ }
+
+ DVLOG(3) << "DecoderBuffer is overwritten";
+ buffer =
+ DecoderBuffer::FromArray(std::move(vp9_superframe), vp9_superframe_size);
+
+ return true;
+}
} // namespace media
diff --git a/chromium/media/gpu/v4l2/v4l2_stateful_workaround.h b/chromium/media/gpu/v4l2/v4l2_stateful_workaround.h
index 07387dfa7de..d01e9c9b747 100644
--- a/chromium/media/gpu/v4l2/v4l2_stateful_workaround.h
+++ b/chromium/media/gpu/v4l2/v4l2_stateful_workaround.h
@@ -13,6 +13,7 @@
namespace media {
+// TODO(hiroh): Remove this class once V4L2VideoDecodeAccelerator is removed.
class V4L2StatefulWorkaround {
public:
enum class Result {
@@ -21,6 +22,9 @@ class V4L2StatefulWorkaround {
// VDA will call NotifyError() if this is returned.
};
+ V4L2StatefulWorkaround(const V4L2StatefulWorkaround&) = delete;
+ V4L2StatefulWorkaround& operator=(const V4L2StatefulWorkaround&) = delete;
+
virtual ~V4L2StatefulWorkaround() = default;
// Apply the workaround.
@@ -28,8 +32,6 @@ class V4L2StatefulWorkaround {
protected:
V4L2StatefulWorkaround() = default;
-
- DISALLOW_COPY_AND_ASSIGN(V4L2StatefulWorkaround);
};
// Create necessary workarounds on the device for |device_type| and |profile|.
@@ -37,6 +39,14 @@ std::vector<std::unique_ptr<V4L2StatefulWorkaround>>
CreateV4L2StatefulWorkarounds(V4L2Device::Type device_type,
VideoCodecProfile profile);
+// DecoderBuffer contains superframe in VP9 k-SVC stream but doesn't have
+// superframe_index. If DecoderBuffer has side_data, it stands for sizes of
+// frames in a superframe, this constructs superframe_index from them.
+// |buffer| is replaced with a new DecoderBuffer, where superframe index is
+// appended to |buffer| data. Besides, show_frame in the new DecoderBuffer is
+// overwritten so that show_frame is one only in the top spatial layer.
+// See go/VP9-k-SVC-Decoing-VAAPI for detail.
+bool AppendVP9SuperFrameIndexIfNeeded(scoped_refptr<DecoderBuffer>& buffer);
} // namespace media
#endif // MEDIA_GPU_V4L2_V4L2_STATEFUL_WORKAROUND_H_
diff --git a/chromium/media/gpu/v4l2/v4l2_stateful_workaround_unittest.cc b/chromium/media/gpu/v4l2/v4l2_stateful_workaround_unittest.cc
new file mode 100644
index 00000000000..ebe13ce917f
--- /dev/null
+++ b/chromium/media/gpu/v4l2/v4l2_stateful_workaround_unittest.cc
@@ -0,0 +1,135 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/v4l2/v4l2_stateful_workaround.h"
+
+#include <vector>
+
+#include "base/containers/span.h"
+#include "base/files/memory_mapped_file.h"
+#include "media/base/decoder_buffer.h"
+#include "media/base/test_data_util.h"
+#include "media/filters/ivf_parser.h"
+#include "media/filters/vp9_parser.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "ui/gfx/geometry/size.h"
+
+namespace media {
+namespace {
+// Append |frame_sizes| to |decoder_buffer|'s side_data.
+void AppendSideData(DecoderBuffer& decoder_buffer,
+ const std::vector<uint32_t>& frame_sizes) {
+ const uint8_t* side_data =
+ reinterpret_cast<const uint8_t*>(frame_sizes.data());
+ size_t side_data_size =
+ frame_sizes.size() * sizeof(uint32_t) / sizeof(uint8_t);
+ decoder_buffer.CopySideDataFrom(side_data, side_data_size);
+}
+} // namespace
+
+// Checks superframe index size is expected.
+TEST(V4L2StatefulWorkaroundTest, CheckSuperFrameIndexSize) {
+ constexpr uint32_t kFrameSizes[] = {
+ 0x10, // 1 byte
+ 0x1020, // 2 byte
+ 0x010203, // 3 byte
+ 0x01020304 // 4 byte
+ };
+
+ constexpr size_t kNumFrames = base::size(kFrameSizes);
+ for (size_t mask = 1; mask < (1 << kNumFrames) - 1; mask++) {
+ size_t buffer_size = 0;
+ size_t expected_bytes_per_framesize = 0;
+ std::vector<uint32_t> frame_sizes;
+ for (size_t i = 0; i < kNumFrames; i++) {
+ if (!(mask & (1 << i)))
+ continue;
+ frame_sizes.push_back(kFrameSizes[i]);
+ buffer_size += kFrameSizes[i];
+ expected_bytes_per_framesize = i + 1;
+ }
+
+ // Since we don't care the buffer content, the buffer is zero except VP9
+ // frame marker.
+ std::vector<uint8_t> tmp_buffer(buffer_size);
+ size_t offset = 0;
+ for (const uint32_t frame_size : frame_sizes) {
+ uint8_t* header = tmp_buffer.data() + offset;
+ *header = 0x8f;
+ offset += frame_size;
+ }
+ auto decoder_buffer =
+ DecoderBuffer::CopyFrom(tmp_buffer.data(), tmp_buffer.size());
+ AppendSideData(*decoder_buffer, frame_sizes);
+
+ AppendVP9SuperFrameIndexIfNeeded(decoder_buffer);
+ if (frame_sizes.size() == 1) {
+ EXPECT_EQ(decoder_buffer->data_size(), buffer_size);
+ continue;
+ }
+
+ EXPECT_GT(decoder_buffer->data_size(), buffer_size);
+ size_t superframe_index_size = decoder_buffer->data_size() - buffer_size;
+ EXPECT_EQ(superframe_index_size,
+ 2 + expected_bytes_per_framesize * frame_sizes.size());
+ }
+}
+
+TEST(V4L2StatefulWorkaroundTest, ParseAppendedSuperFrameIndex) {
+ auto stream = std::make_unique<base::MemoryMappedFile>();
+ ASSERT_TRUE(stream->Initialize(GetTestDataFilePath("test-25fps.vp9")));
+
+ // Read three frames from test-25fps.vp9.
+ IvfParser ivf_parser;
+ IvfFileHeader ivf_file_header;
+ ASSERT_TRUE(ivf_parser.Initialize(stream->data(), stream->length(),
+ &ivf_file_header));
+ ASSERT_EQ(ivf_file_header.fourcc, 0x30395056u); // VP90
+
+ constexpr size_t kNumBuffers = 3;
+ std::vector<base::span<const uint8_t>> buffers(3);
+ for (size_t i = 0; i < kNumBuffers; i++) {
+ IvfFrameHeader ivf_frame_header;
+ const uint8_t* ivf_payload;
+ ASSERT_TRUE(ivf_parser.ParseNextFrame(&ivf_frame_header, &ivf_payload));
+ buffers[i] = base::make_span(ivf_payload, ivf_frame_header.frame_size);
+ }
+
+ std::vector<uint32_t> frame_sizes;
+ std::vector<uint8_t> merged_buffer;
+ for (size_t i = 0; i < kNumBuffers; ++i) {
+ frame_sizes.push_back(buffers[i].size());
+
+ // |merged_buffer| is composed of [0, i] frames.
+ const size_t offset = merged_buffer.size();
+ merged_buffer.resize(offset + buffers[i].size());
+ memcpy(merged_buffer.data() + offset, buffers[i].data(), buffers[i].size());
+
+ auto decoder_buffer =
+ DecoderBuffer::CopyFrom(merged_buffer.data(), merged_buffer.size());
+ AppendSideData(*decoder_buffer, frame_sizes);
+
+ AppendVP9SuperFrameIndexIfNeeded(decoder_buffer);
+
+ Vp9Parser vp9_parser(/*parsing_compressed_header=*/false);
+ vp9_parser.SetStream(decoder_buffer->data(), decoder_buffer->data_size(),
+ /*stream_config=*/nullptr);
+
+ // Parse the merged buffer with the created superframe index.
+ for (size_t j = 0; j <= i; j++) {
+ Vp9FrameHeader frame_header{};
+ gfx::Size allocate_size;
+ std::unique_ptr<DecryptConfig> frame_decrypt_config;
+ EXPECT_EQ(vp9_parser.ParseNextFrame(&frame_header, &allocate_size,
+ &frame_decrypt_config),
+ Vp9Parser::Result::kOk);
+
+ EXPECT_EQ(frame_header.frame_size, buffers[j].size());
+ // show_frame is 1 if and only if the frame is in the top spatial layer.
+ EXPECT_EQ(frame_header.show_frame, j == i);
+ }
+ }
+}
+
+} // namespace media
diff --git a/chromium/media/gpu/v4l2/v4l2_vda_helpers.cc b/chromium/media/gpu/v4l2/v4l2_vda_helpers.cc
index adec5b8c064..b83b41aa60d 100644
--- a/chromium/media/gpu/v4l2/v4l2_vda_helpers.cc
+++ b/chromium/media/gpu/v4l2/v4l2_vda_helpers.cc
@@ -147,11 +147,11 @@ std::unique_ptr<InputBufferFragmentSplitter>
InputBufferFragmentSplitter::CreateFromProfile(
media::VideoCodecProfile profile) {
switch (VideoCodecProfileToVideoCodec(profile)) {
- case kCodecH264:
+ case VideoCodec::kH264:
return std::make_unique<
v4l2_vda_helpers::H264InputBufferFragmentSplitter>();
- case kCodecVP8:
- case kCodecVP9:
+ case VideoCodec::kVP8:
+ case VideoCodec::kVP9:
// VP8/VP9 don't need any frame splitting, use the default implementation.
return std::make_unique<v4l2_vda_helpers::InputBufferFragmentSplitter>();
default:
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.cc b/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.cc
index b943f35e6ff..32e75c8a541 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.cc
+++ b/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.cc
@@ -15,6 +15,7 @@
#include <sys/mman.h>
#include "base/bind.h"
+#include "base/callback_helpers.h"
#include "base/command_line.h"
#include "base/cxx17_backports.h"
#include "base/numerics/safe_conversions.h"
@@ -1286,7 +1287,7 @@ void V4L2VideoDecodeAccelerator::CheckGLFences() {
FROM_HERE,
base::BindOnce(&V4L2VideoDecodeAccelerator::Enqueue,
base::Unretained(this)),
- base::TimeDelta::FromMilliseconds(resched_delay));
+ base::Milliseconds(resched_delay));
}
break;
}
@@ -1925,7 +1926,7 @@ bool V4L2VideoDecodeAccelerator::StopDevicePoll() {
// Must be done after the Stop() above to ensure
// |cancelable_service_device_task_callback_| is not copied.
cancelable_service_device_task_.Cancel();
- cancelable_service_device_task_callback_ = {};
+ cancelable_service_device_task_callback_ = base::NullCallback();
// Clear the interrupt now, to be sure.
if (!device_->ClearDevicePollInterrupt()) {
PLOG(ERROR) << "ClearDevicePollInterrupt: failed";
@@ -2366,7 +2367,33 @@ bool V4L2VideoDecodeAccelerator::ProcessFrame(int32_t bitstream_buffer_id,
scoped_refptr<VideoFrame> input_frame = buf->GetVideoFrame();
if (!input_frame) {
- VLOGF(1) << "Failed wrapping input frame!";
+ VLOGF(1) << "Could not get the input frame for the image processor!";
+ return false;
+ }
+
+ // The |input_frame| has a potentially incorrect visible rectangle and natural
+ // size: that frame gets created by V4L2Buffer::CreateVideoFrame() which uses
+ // v4l2_format::fmt.pix_mp.width and v4l2_format::fmt.pix_mp.height as the
+ // visible rectangle and natural size. However, those dimensions actually
+ // correspond to the coded size. Therefore, we should wrap |input_frame| into
+ // another frame with the right visible rectangle and natural size.
+ DCHECK(input_frame->visible_rect().origin().IsOrigin());
+ const gfx::Rect visible_rect = image_processor_->input_config().visible_rect;
+ const gfx::Size natural_size = visible_rect.size();
+ if (!gfx::Rect(input_frame->coded_size()).Contains(visible_rect) ||
+ !input_frame->visible_rect().Contains(visible_rect)) {
+ VLOGF(1) << "The visible size is too large!";
+ return false;
+ }
+ if (!gfx::Rect(input_frame->natural_size())
+ .Contains(gfx::Rect(natural_size))) {
+ VLOGF(1) << "The natural size is too large!";
+ return false;
+ }
+ scoped_refptr<VideoFrame> cropped_input_frame = VideoFrame::WrapVideoFrame(
+ input_frame, input_frame->format(), visible_rect, natural_size);
+ if (!cropped_input_frame) {
+ VLOGF(1) << "Could not wrap the input frame for the image processor!";
return false;
}
@@ -2378,13 +2405,13 @@ bool V4L2VideoDecodeAccelerator::ProcessFrame(int32_t bitstream_buffer_id,
// FrameReadyCB is executed.
if (image_processor_->output_mode() == ImageProcessor::OutputMode::IMPORT) {
image_processor_->Process(
- input_frame, output_record.output_frame,
+ cropped_input_frame, output_record.output_frame,
base::BindOnce(&V4L2VideoDecodeAccelerator::FrameProcessed,
base::Unretained(this), bitstream_buffer_id,
buf->BufferId()));
} else {
image_processor_->Process(
- input_frame,
+ cropped_input_frame,
base::BindOnce(&V4L2VideoDecodeAccelerator::FrameProcessed,
base::Unretained(this), bitstream_buffer_id));
}
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.h b/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.h
index 6dacbdc5bdc..186f2cd552f 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.h
+++ b/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.h
@@ -111,6 +111,11 @@ class MEDIA_GPU_EXPORT V4L2VideoDecodeAccelerator
const GetGLContextCallback& get_gl_context_cb,
const MakeGLContextCurrentCallback& make_context_current_cb,
scoped_refptr<V4L2Device> device);
+
+ V4L2VideoDecodeAccelerator(const V4L2VideoDecodeAccelerator&) = delete;
+ V4L2VideoDecodeAccelerator& operator=(const V4L2VideoDecodeAccelerator&) =
+ delete;
+
~V4L2VideoDecodeAccelerator() override;
// VideoDecodeAccelerator implementation.
@@ -618,8 +623,6 @@ class MEDIA_GPU_EXPORT V4L2VideoDecodeAccelerator
// The WeakPtrFactory for |weak_this_|.
base::WeakPtrFactory<V4L2VideoDecodeAccelerator> weak_this_factory_;
-
- DISALLOW_COPY_AND_ASSIGN(V4L2VideoDecodeAccelerator);
};
} // namespace media
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decoder.cc b/chromium/media/gpu/v4l2/v4l2_video_decoder.cc
index e1b9d205220..9b44eaad406 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decoder.cc
+++ b/chromium/media/gpu/v4l2/v4l2_video_decoder.cc
@@ -382,14 +382,18 @@ bool V4L2VideoDecoder::SetupOutputFormat(const gfx::Size& size,
}
// Ask the pipeline to pick the output format.
- const absl::optional<std::pair<Fourcc, gfx::Size>> output_format =
- client_->PickDecoderOutputFormat(candidates, visible_rect);
- if (!output_format) {
+ StatusOr<std::pair<Fourcc, gfx::Size>> status_or_output_format =
+ client_->PickDecoderOutputFormat(
+ candidates, visible_rect, aspect_ratio_.GetNaturalSize(visible_rect),
+ /*output_size=*/absl::nullopt, num_output_frames_,
+ /*use+protected=*/false, /*need_aux_frame_pool=*/false);
+ if (status_or_output_format.has_error()) {
VLOGF(1) << "Failed to pick an output format.";
return false;
}
- Fourcc fourcc = std::move(output_format->first);
- gfx::Size picked_size = std::move(output_format->second);
+ const auto output_format = std::move(status_or_output_format).value();
+ Fourcc fourcc = std::move(output_format.first);
+ gfx::Size picked_size = std::move(output_format.second);
// We successfully picked the output format. Now setup output format again.
absl::optional<struct v4l2_format> format =
@@ -412,27 +416,34 @@ bool V4L2VideoDecoder::SetupOutputFormat(const gfx::Size& size,
// created by VideoFramePool.
DmabufVideoFramePool* pool = client_->GetVideoFramePool();
if (pool) {
- absl::optional<GpuBufferLayout> layout = pool->Initialize(
+ // TODO(andrescj): the call to PickDecoderOutputFormat() should have already
+ // initialized the frame pool, so this call to Initialize() is redundant.
+ // However, we still have to get the GpuBufferLayout to find out the
+ // modifier that we need to give to the driver. We should add a
+ // GetGpuBufferLayout() method to DmabufVideoFramePool to query that without
+ // having to re-initialize the pool.
+ StatusOr<GpuBufferLayout> status_or_layout = pool->Initialize(
fourcc, adjusted_size, visible_rect,
aspect_ratio_.GetNaturalSize(visible_rect), num_output_frames_,
/*use_protected=*/false);
- if (!layout) {
+ if (status_or_layout.has_error()) {
VLOGF(1) << "Failed to setup format to VFPool";
return false;
}
- if (layout->size() != adjusted_size) {
+ const GpuBufferLayout layout = std::move(status_or_layout).value();
+ if (layout.size() != adjusted_size) {
VLOGF(1) << "The size adjusted by VFPool is different from one "
<< "adjusted by a video driver. fourcc: " << fourcc.ToString()
<< ", (video driver v.s. VFPool) " << adjusted_size.ToString()
- << " != " << layout->size().ToString();
+ << " != " << layout.size().ToString();
return false;
}
- VLOGF(1) << "buffer modifier: " << std::hex << layout->modifier();
- if (layout->modifier() &&
- layout->modifier() != gfx::NativePixmapHandle::kNoModifier) {
+ VLOGF(1) << "buffer modifier: " << std::hex << layout.modifier();
+ if (layout.modifier() &&
+ layout.modifier() != gfx::NativePixmapHandle::kNoModifier) {
absl::optional<struct v4l2_format> modifier_format =
- output_queue_->SetModifierFormat(layout->modifier(), picked_size);
+ output_queue_->SetModifierFormat(layout.modifier(), picked_size);
if (!modifier_format)
return false;
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decoder_backend.h b/chromium/media/gpu/v4l2/v4l2_video_decoder_backend.h
index 3c49de8f8dd..df24f24505b 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decoder_backend.h
+++ b/chromium/media/gpu/v4l2/v4l2_video_decoder_backend.h
@@ -61,6 +61,9 @@ class V4L2VideoDecoderBackend {
virtual DmabufVideoFramePool* GetVideoFramePool() const = 0;
};
+ V4L2VideoDecoderBackend(const V4L2VideoDecoderBackend&) = delete;
+ V4L2VideoDecoderBackend& operator=(const V4L2VideoDecoderBackend&) = delete;
+
virtual ~V4L2VideoDecoderBackend();
virtual bool Initialize() = 0;
@@ -113,7 +116,6 @@ class V4L2VideoDecoderBackend {
scoped_refptr<V4L2Queue> output_queue_;
SEQUENCE_CHECKER(sequence_checker_);
- DISALLOW_COPY_AND_ASSIGN(V4L2VideoDecoderBackend);
};
} // namespace media
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateful.cc b/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateful.cc
index ef46925cb7b..b46528b4f7a 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateful.cc
+++ b/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateful.cc
@@ -19,6 +19,7 @@
#include "media/gpu/chromeos/dmabuf_video_frame_pool.h"
#include "media/gpu/macros.h"
#include "media/gpu/v4l2/v4l2_device.h"
+#include "media/gpu/v4l2/v4l2_stateful_workaround.h"
#include "media/gpu/v4l2/v4l2_vda_helpers.h"
#include "media/gpu/v4l2/v4l2_video_decoder_backend.h"
#include "third_party/abseil-cpp/absl/types/optional.h"
@@ -147,6 +148,11 @@ void V4L2StatefulVideoDecoderBackend::DoDecodeWork() {
// This is our new decode request.
current_decode_request_ = std::move(decode_request);
DCHECK_EQ(current_decode_request_->bytes_used, 0u);
+
+ if (VideoCodecProfileToVideoCodec(profile_) == VideoCodec::kVP9 &&
+ !AppendVP9SuperFrameIndexIfNeeded(current_decode_request_->buffer)) {
+ VLOGF(1) << "Failed to append superframe index for VP9 k-SVC frame";
+ }
}
// Get a V4L2 buffer to copy the encoded data into.
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateless.cc b/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateless.cc
index 1753c588b47..c3222750143 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateless.cc
+++ b/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateless.cc
@@ -516,11 +516,14 @@ void V4L2StatelessVideoDecoderBackend::PumpOutputSurfaces() {
{
const int64_t flat_timestamp = request.timestamp.InMilliseconds();
- DCHECK(base::Contains(encoding_timestamps_, flat_timestamp));
- UMA_HISTOGRAM_TIMES(
- "Media.PlatformVideoDecoding.Decode",
- base::TimeTicks::Now() - encoding_timestamps_[flat_timestamp]);
- encoding_timestamps_.erase(flat_timestamp);
+ // TODO(b/190615065) |flat_timestamp| might be repeated with H.264
+ // bitstreams, investigate why, and change the if() to DCHECK().
+ if (base::Contains(encoding_timestamps_, flat_timestamp)) {
+ UMA_HISTOGRAM_TIMES(
+ "Media.PlatformVideoDecoding.Decode",
+ base::TimeTicks::Now() - encoding_timestamps_[flat_timestamp]);
+ encoding_timestamps_.erase(flat_timestamp);
+ }
}
break;
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateless.h b/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateless.h
index c1e35be6cc4..10cd8c307ee 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateless.h
+++ b/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateless.h
@@ -36,6 +36,11 @@ class V4L2StatelessVideoDecoderBackend : public V4L2VideoDecoderBackend,
VideoCodecProfile profile,
scoped_refptr<base::SequencedTaskRunner> task_runner);
+ V4L2StatelessVideoDecoderBackend(const V4L2StatelessVideoDecoderBackend&) =
+ delete;
+ V4L2StatelessVideoDecoderBackend& operator=(
+ const V4L2StatelessVideoDecoderBackend&) = delete;
+
~V4L2StatelessVideoDecoderBackend() override;
// V4L2VideoDecoderBackend implementation
@@ -179,8 +184,6 @@ class V4L2StatelessVideoDecoderBackend : public V4L2VideoDecoderBackend,
base::WeakPtr<V4L2StatelessVideoDecoderBackend> weak_this_;
base::WeakPtrFactory<V4L2StatelessVideoDecoderBackend> weak_this_factory_{
this};
-
- DISALLOW_COPY_AND_ASSIGN(V4L2StatelessVideoDecoderBackend);
};
} // namespace media
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decoder_delegate_h264.h b/chromium/media/gpu/v4l2/v4l2_video_decoder_delegate_h264.h
index 98155724da9..44ca01ce85b 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decoder_delegate_h264.h
+++ b/chromium/media/gpu/v4l2/v4l2_video_decoder_delegate_h264.h
@@ -27,6 +27,11 @@ class V4L2VideoDecoderDelegateH264 : public H264Decoder::H264Accelerator {
explicit V4L2VideoDecoderDelegateH264(
V4L2DecodeSurfaceHandler* surface_handler,
V4L2Device* device);
+
+ V4L2VideoDecoderDelegateH264(const V4L2VideoDecoderDelegateH264&) = delete;
+ V4L2VideoDecoderDelegateH264& operator=(const V4L2VideoDecoderDelegateH264&) =
+ delete;
+
~V4L2VideoDecoderDelegateH264() override;
// H264Decoder::H264Accelerator implementation.
@@ -62,8 +67,6 @@ class V4L2VideoDecoderDelegateH264 : public H264Decoder::H264Accelerator {
// Contains the kernel-specific structures that we don't want to expose
// outside of the compilation unit.
const std::unique_ptr<V4L2VideoDecoderDelegateH264Private> priv_;
-
- DISALLOW_COPY_AND_ASSIGN(V4L2VideoDecoderDelegateH264);
};
} // namespace media
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decoder_delegate_h264_legacy.h b/chromium/media/gpu/v4l2/v4l2_video_decoder_delegate_h264_legacy.h
index c926ebcee60..a59a165cb59 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decoder_delegate_h264_legacy.h
+++ b/chromium/media/gpu/v4l2/v4l2_video_decoder_delegate_h264_legacy.h
@@ -27,6 +27,12 @@ class V4L2VideoDecoderDelegateH264Legacy : public H264Decoder::H264Accelerator {
explicit V4L2VideoDecoderDelegateH264Legacy(
V4L2DecodeSurfaceHandler* surface_handler,
V4L2Device* device);
+
+ V4L2VideoDecoderDelegateH264Legacy(
+ const V4L2VideoDecoderDelegateH264Legacy&) = delete;
+ V4L2VideoDecoderDelegateH264Legacy& operator=(
+ const V4L2VideoDecoderDelegateH264Legacy&) = delete;
+
~V4L2VideoDecoderDelegateH264Legacy() override;
// H264Decoder::H264Accelerator implementation.
@@ -69,8 +75,6 @@ class V4L2VideoDecoderDelegateH264Legacy : public H264Decoder::H264Accelerator {
// Contains the kernel-specific structures that we don't want to expose
// outside of the compilation unit.
const std::unique_ptr<V4L2VideoDecoderDelegateH264LegacyPrivate> priv_;
-
- DISALLOW_COPY_AND_ASSIGN(V4L2VideoDecoderDelegateH264Legacy);
};
} // namespace media
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decoder_delegate_vp8.h b/chromium/media/gpu/v4l2/v4l2_video_decoder_delegate_vp8.h
index cbd7d5afbd4..b46b09ec63d 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decoder_delegate_vp8.h
+++ b/chromium/media/gpu/v4l2/v4l2_video_decoder_delegate_vp8.h
@@ -20,6 +20,11 @@ class V4L2VideoDecoderDelegateVP8 : public VP8Decoder::VP8Accelerator {
explicit V4L2VideoDecoderDelegateVP8(
V4L2DecodeSurfaceHandler* surface_handler,
V4L2Device* device);
+
+ V4L2VideoDecoderDelegateVP8(const V4L2VideoDecoderDelegateVP8&) = delete;
+ V4L2VideoDecoderDelegateVP8& operator=(const V4L2VideoDecoderDelegateVP8&) =
+ delete;
+
~V4L2VideoDecoderDelegateVP8() override;
// VP8Decoder::VP8Accelerator implementation.
@@ -34,8 +39,6 @@ class V4L2VideoDecoderDelegateVP8 : public VP8Decoder::VP8Accelerator {
V4L2DecodeSurfaceHandler* const surface_handler_;
V4L2Device* const device_;
-
- DISALLOW_COPY_AND_ASSIGN(V4L2VideoDecoderDelegateVP8);
};
} // namespace media
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decoder_delegate_vp8_legacy.h b/chromium/media/gpu/v4l2/v4l2_video_decoder_delegate_vp8_legacy.h
index 098682a8787..692f43829c6 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decoder_delegate_vp8_legacy.h
+++ b/chromium/media/gpu/v4l2/v4l2_video_decoder_delegate_vp8_legacy.h
@@ -20,6 +20,12 @@ class V4L2VideoDecoderDelegateVP8Legacy : public VP8Decoder::VP8Accelerator {
explicit V4L2VideoDecoderDelegateVP8Legacy(
V4L2DecodeSurfaceHandler* surface_handler,
V4L2Device* device);
+
+ V4L2VideoDecoderDelegateVP8Legacy(const V4L2VideoDecoderDelegateVP8Legacy&) =
+ delete;
+ V4L2VideoDecoderDelegateVP8Legacy& operator=(
+ const V4L2VideoDecoderDelegateVP8Legacy&) = delete;
+
~V4L2VideoDecoderDelegateVP8Legacy() override;
// VP8Decoder::VP8Accelerator implementation.
@@ -34,8 +40,6 @@ class V4L2VideoDecoderDelegateVP8Legacy : public VP8Decoder::VP8Accelerator {
V4L2DecodeSurfaceHandler* const surface_handler_;
V4L2Device* const device_;
-
- DISALLOW_COPY_AND_ASSIGN(V4L2VideoDecoderDelegateVP8Legacy);
};
} // namespace media
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decoder_delegate_vp9_chromium.h b/chromium/media/gpu/v4l2/v4l2_video_decoder_delegate_vp9_chromium.h
index f4975c92325..0cf8b66534c 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decoder_delegate_vp9_chromium.h
+++ b/chromium/media/gpu/v4l2/v4l2_video_decoder_delegate_vp9_chromium.h
@@ -18,6 +18,12 @@ class V4L2VideoDecoderDelegateVP9Chromium : public VP9Decoder::VP9Accelerator {
explicit V4L2VideoDecoderDelegateVP9Chromium(
V4L2DecodeSurfaceHandler* surface_handler,
V4L2Device* device);
+
+ V4L2VideoDecoderDelegateVP9Chromium(
+ const V4L2VideoDecoderDelegateVP9Chromium&) = delete;
+ V4L2VideoDecoderDelegateVP9Chromium& operator=(
+ const V4L2VideoDecoderDelegateVP9Chromium&) = delete;
+
~V4L2VideoDecoderDelegateVP9Chromium() override;
// VP9Decoder::VP9Accelerator implementation.
@@ -41,8 +47,6 @@ class V4L2VideoDecoderDelegateVP9Chromium : public VP9Decoder::VP9Accelerator {
V4L2Device* const device_;
bool device_needs_frame_context_ = false;
-
- DISALLOW_COPY_AND_ASSIGN(V4L2VideoDecoderDelegateVP9Chromium);
};
} // namespace media
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decoder_delegate_vp9_legacy.h b/chromium/media/gpu/v4l2/v4l2_video_decoder_delegate_vp9_legacy.h
index cedb188243c..3f953cec74d 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decoder_delegate_vp9_legacy.h
+++ b/chromium/media/gpu/v4l2/v4l2_video_decoder_delegate_vp9_legacy.h
@@ -22,6 +22,12 @@ class V4L2VideoDecoderDelegateVP9Legacy : public VP9Decoder::VP9Accelerator {
explicit V4L2VideoDecoderDelegateVP9Legacy(
V4L2DecodeSurfaceHandler* surface_handler,
V4L2Device* device);
+
+ V4L2VideoDecoderDelegateVP9Legacy(const V4L2VideoDecoderDelegateVP9Legacy&) =
+ delete;
+ V4L2VideoDecoderDelegateVP9Legacy& operator=(
+ const V4L2VideoDecoderDelegateVP9Legacy&) = delete;
+
~V4L2VideoDecoderDelegateVP9Legacy() override;
// VP9Decoder::VP9Accelerator implementation.
@@ -48,8 +54,6 @@ class V4L2VideoDecoderDelegateVP9Legacy : public VP9Decoder::VP9Accelerator {
V4L2DecodeSurfaceHandler* const surface_handler_;
V4L2Device* const device_;
-
- DISALLOW_COPY_AND_ASSIGN(V4L2VideoDecoderDelegateVP9Legacy);
};
} // namespace media
diff --git a/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc b/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc
index 0941c8880ab..b238069e5e9 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc
+++ b/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc
@@ -1191,13 +1191,13 @@ void V4L2VideoEncodeAccelerator::PumpBitstreamBuffers() {
<< ", key_frame=" << output_buf->IsKeyframe();
child_task_runner_->PostTask(
FROM_HERE,
- base::BindOnce(&Client::BitstreamBufferReady, client_, buffer_id,
- BitstreamBufferMetadata(
- output_data_size, output_buf->IsKeyframe(),
- base::TimeDelta::FromMicroseconds(
- output_buf->GetTimeStamp().tv_usec +
- output_buf->GetTimeStamp().tv_sec *
- base::Time::kMicrosecondsPerSecond))));
+ base::BindOnce(
+ &Client::BitstreamBufferReady, client_, buffer_id,
+ BitstreamBufferMetadata(
+ output_data_size, output_buf->IsKeyframe(),
+ base::Microseconds(output_buf->GetTimeStamp().tv_usec +
+ output_buf->GetTimeStamp().tv_sec *
+ base::Time::kMicrosecondsPerSecond))));
}
if ((encoder_state_ == kFlushing) && output_buf->IsLast()) {
diff --git a/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.h b/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.h
index 31846773a7c..da26a0b87b5 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.h
+++ b/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.h
@@ -43,6 +43,11 @@ class MEDIA_GPU_EXPORT V4L2VideoEncodeAccelerator
: public VideoEncodeAccelerator {
public:
explicit V4L2VideoEncodeAccelerator(scoped_refptr<V4L2Device> device);
+
+ V4L2VideoEncodeAccelerator(const V4L2VideoEncodeAccelerator&) = delete;
+ V4L2VideoEncodeAccelerator& operator=(const V4L2VideoEncodeAccelerator&) =
+ delete;
+
~V4L2VideoEncodeAccelerator() override;
// VideoEncodeAccelerator implementation.
@@ -360,8 +365,6 @@ class MEDIA_GPU_EXPORT V4L2VideoEncodeAccelerator
// |encoder_task_runner_|.
base::WeakPtr<V4L2VideoEncodeAccelerator> weak_this_;
base::WeakPtrFactory<V4L2VideoEncodeAccelerator> weak_this_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(V4L2VideoEncodeAccelerator);
};
} // namespace media
diff --git a/chromium/media/gpu/vaapi/BUILD.gn b/chromium/media/gpu/vaapi/BUILD.gn
index 8887b040d95..0c2520dd6b5 100644
--- a/chromium/media/gpu/vaapi/BUILD.gn
+++ b/chromium/media/gpu/vaapi/BUILD.gn
@@ -17,7 +17,7 @@ assert(use_vaapi)
generate_stubs("libva_stubs") {
extra_header = "va_stub_header.fragment"
sigs = [ "va.sigs" ]
- if (use_x11) {
+ if (use_vaapi_x11) {
sigs += [ "va_x11.sigs" ]
}
if (is_chromeos_ash) {
@@ -45,6 +45,8 @@ source_set("vaapi") {
"vaapi_image_decode_accelerator_worker.h",
"vaapi_image_decoder.cc",
"vaapi_image_decoder.h",
+ "vaapi_image_processor_backend.cc",
+ "vaapi_image_processor_backend.h",
"vaapi_jpeg_decoder.cc",
"vaapi_jpeg_decoder.h",
"vaapi_jpeg_encoder.cc",
@@ -102,7 +104,7 @@ source_set("vaapi") {
"//media/gpu/chromeos:common",
"//media/parsers",
"//mojo/public/cpp/bindings",
- "//third_party/libvpx:libvp9rc",
+ "//third_party/libvpx:libvpxrc",
"//third_party/libyuv",
"//ui/gfx",
"//ui/gfx/geometry",
@@ -129,14 +131,14 @@ source_set("vaapi") {
]
}
- if (use_x11 || use_ozone || use_egl) {
+ if (use_ozone || use_egl) {
sources += [
"vaapi_picture_native_pixmap.cc",
"vaapi_picture_native_pixmap.h",
]
}
- if (use_x11) {
+ if (use_vaapi_x11) {
deps += [ "//ui/gfx/x" ]
sources += [
"vaapi_picture_native_pixmap_angle.cc",
@@ -180,6 +182,7 @@ source_set("common") {
"//gpu",
"//media",
"//media/gpu:common",
+ "//media/gpu/chromeos:fourcc",
"//ui/gfx/geometry",
]
deps = [
@@ -194,7 +197,7 @@ source_set("common") {
deps += [ "//ui/ozone" ]
}
- if (use_x11) {
+ if (use_vaapi_x11) {
deps += [ "//ui/gfx/x" ]
}
@@ -240,7 +243,7 @@ source_set("unit_test") {
"//mojo/core/embedder",
"//testing/gmock",
"//testing/gtest",
- "//third_party/libvpx:libvp9rc",
+ "//third_party/libvpx:libvpxrc",
"//ui/gfx:memory_buffer",
"//ui/gfx:test_support",
"//ui/gfx/geometry",
@@ -325,6 +328,8 @@ test("vaapi_unittest") {
"//gpu",
"//media/gpu/test:helpers",
"//testing/gtest",
+ "//third_party/minigbm",
+ "//ui/gfx/linux:gbm",
]
# TODO(https://crbug.com/1043007): remove is_chromeos.
diff --git a/chromium/media/gpu/vaapi/av1_vaapi_video_decoder_delegate.cc b/chromium/media/gpu/vaapi/av1_vaapi_video_decoder_delegate.cc
index 02bd3e197b2..b2d986b71d4 100644
--- a/chromium/media/gpu/vaapi/av1_vaapi_video_decoder_delegate.cc
+++ b/chromium/media/gpu/vaapi/av1_vaapi_video_decoder_delegate.cc
@@ -723,16 +723,21 @@ bool FillAV1SliceParameters(
AV1VaapiVideoDecoderDelegate::AV1VaapiVideoDecoderDelegate(
DecodeSurfaceHandler<VASurface>* const vaapi_dec,
- scoped_refptr<VaapiWrapper> vaapi_wrapper)
+ scoped_refptr<VaapiWrapper> vaapi_wrapper,
+ ProtectedSessionUpdateCB on_protected_session_update_cb,
+ CdmContext* cdm_context,
+ EncryptionScheme encryption_scheme)
: VaapiVideoDecoderDelegate(vaapi_dec,
std::move(vaapi_wrapper),
- base::DoNothing(),
- nullptr) {}
+ std::move(on_protected_session_update_cb),
+ cdm_context,
+ encryption_scheme) {}
AV1VaapiVideoDecoderDelegate::~AV1VaapiVideoDecoderDelegate() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(!picture_params_);
DCHECK(slice_params_.empty());
+ DCHECK(!crypto_params_);
}
scoped_refptr<AV1Picture> AV1VaapiVideoDecoderDelegate::CreateAV1Picture(
@@ -776,6 +781,39 @@ DecodeStatus AV1VaapiVideoDecoderDelegate::SubmitDecode(
const libgav1::Vector<libgav1::TileBuffer>& tile_buffers,
base::span<const uint8_t> data) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ const DecryptConfig* decrypt_config = pic.decrypt_config();
+ if (decrypt_config && !SetDecryptConfig(decrypt_config->Clone()))
+ return DecodeStatus::kFail;
+
+ bool uses_crypto = false;
+ std::vector<VAEncryptionSegmentInfo> encryption_segment_info;
+ VAEncryptionParameters crypto_param{};
+ if (IsEncryptedSession()) {
+ const ProtectedSessionState state = SetupDecryptDecode(
+ /*full_sample=*/false, data.size_bytes(), &crypto_param,
+ &encryption_segment_info,
+ decrypt_config ? decrypt_config->subsamples()
+ : std::vector<SubsampleEntry>());
+ if (state == ProtectedSessionState::kFailed) {
+ LOG(ERROR)
+ << "SubmitDecode fails because we couldn't setup the protected "
+ "session";
+ return DecodeStatus::kFail;
+ } else if (state != ProtectedSessionState::kCreated) {
+ return DecodeStatus::kTryAgain;
+ }
+ uses_crypto = true;
+ if (!crypto_params_) {
+ crypto_params_ = vaapi_wrapper_->CreateVABuffer(
+ VAEncryptionParameterBufferType, sizeof(crypto_param));
+ if (!crypto_params_)
+ return DecodeStatus::kFail;
+ }
+ }
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+
// libgav1 ensures that tile_columns is >= 0 and <= MAX_TILE_COLS.
DCHECK_LE(0, pic.frame_header.tile_info.tile_columns);
DCHECK_LE(pic.frame_header.tile_info.tile_columns, libgav1::kMaxTileColumns);
@@ -807,9 +845,11 @@ DecodeStatus AV1VaapiVideoDecoderDelegate::SubmitDecode(
slice_params_.resize(slice_params.size());
slice_params_.shrink_to_fit();
}
+
// TODO(hiroh): Don't submit the entire coded data to the buffer. Instead,
// only pass the data starting from the tile list OBU to reduce the size of
- // the VA buffer.
+ // the VA buffer. When this is changed, the encrypted subsample ranges must
+ // also be adjusted.
// Always re-create |encoded_data| because reusing the buffer causes horrific
// artifacts in decoded buffers. TODO(b/177028692): This seems to be a driver
// bug, fix it and reuse the buffer.
@@ -828,12 +868,24 @@ DecodeStatus AV1VaapiVideoDecoderDelegate::SubmitDecode(
{slice_params_[i]->type(), slice_params_[i]->size(),
&slice_params[i]}});
}
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ if (uses_crypto) {
+ buffers.push_back(
+ {crypto_params_->id(),
+ {crypto_params_->type(), crypto_params_->size(), &crypto_param}});
+ }
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
const auto* vaapi_pic = static_cast<const VaapiAV1Picture*>(&pic);
- return vaapi_wrapper_->MapAndCopyAndExecute(
- vaapi_pic->reconstruct_va_surface()->id(), buffers)
- ? DecodeStatus::kOk
- : DecodeStatus::kFail;
+ const bool success = vaapi_wrapper_->MapAndCopyAndExecute(
+ vaapi_pic->reconstruct_va_surface()->id(), buffers);
+ if (!success && NeedsProtectedSessionRecovery())
+ return DecodeStatus::kTryAgain;
+
+ if (success && IsEncryptedSession())
+ ProtectedDecodedSucceeded();
+
+ return success ? DecodeStatus::kOk : DecodeStatus::kFail;
}
void AV1VaapiVideoDecoderDelegate::OnVAContextDestructionSoon() {
@@ -841,5 +893,6 @@ void AV1VaapiVideoDecoderDelegate::OnVAContextDestructionSoon() {
// that will be destroyed soon.
picture_params_.reset();
slice_params_.clear();
+ crypto_params_.reset();
}
} // namespace media
diff --git a/chromium/media/gpu/vaapi/av1_vaapi_video_decoder_delegate.h b/chromium/media/gpu/vaapi/av1_vaapi_video_decoder_delegate.h
index 2ed40780007..301c4884513 100644
--- a/chromium/media/gpu/vaapi/av1_vaapi_video_decoder_delegate.h
+++ b/chromium/media/gpu/vaapi/av1_vaapi_video_decoder_delegate.h
@@ -17,8 +17,13 @@ class ScopedVABuffer;
class AV1VaapiVideoDecoderDelegate : public AV1Decoder::AV1Accelerator,
public VaapiVideoDecoderDelegate {
public:
- AV1VaapiVideoDecoderDelegate(DecodeSurfaceHandler<VASurface>* const vaapi_dec,
- scoped_refptr<VaapiWrapper> vaapi_wrapper);
+ AV1VaapiVideoDecoderDelegate(
+ DecodeSurfaceHandler<VASurface>* const vaapi_dec,
+ scoped_refptr<VaapiWrapper> vaapi_wrapper,
+ ProtectedSessionUpdateCB on_protected_session_update_cb =
+ base::DoNothing(),
+ CdmContext* cdm_context = nullptr,
+ EncryptionScheme encryption_scheme = EncryptionScheme::kUnencrypted);
~AV1VaapiVideoDecoderDelegate() override;
AV1VaapiVideoDecoderDelegate(const AV1VaapiVideoDecoderDelegate&) = delete;
AV1VaapiVideoDecoderDelegate& operator=(const AV1VaapiVideoDecoderDelegate&) =
@@ -39,6 +44,7 @@ class AV1VaapiVideoDecoderDelegate : public AV1Decoder::AV1Accelerator,
private:
std::unique_ptr<ScopedVABuffer> picture_params_;
std::vector<std::unique_ptr<ScopedVABuffer>> slice_params_;
+ std::unique_ptr<ScopedVABuffer> crypto_params_;
};
} // namespace media
#endif // MEDIA_GPU_VAAPI_AV1_VAAPI_VIDEO_DECODER_DELEGATE_H_
diff --git a/chromium/media/gpu/vaapi/h264_vaapi_video_decoder_delegate.cc b/chromium/media/gpu/vaapi/h264_vaapi_video_decoder_delegate.cc
index f63e70ab3f4..aa5e9bde2ec 100644
--- a/chromium/media/gpu/vaapi/h264_vaapi_video_decoder_delegate.cc
+++ b/chromium/media/gpu/vaapi/h264_vaapi_video_decoder_delegate.cc
@@ -65,15 +65,7 @@ scoped_refptr<H264Picture> H264VaapiVideoDecoderDelegate::CreateH264Picture() {
if (!va_surface)
return nullptr;
- scoped_refptr<H264Picture> pic = new VaapiH264Picture(std::move(va_surface));
- if (!vaapi_dec_->IsScalingDecode())
- return pic;
-
- // Setup the scaling buffer.
- scoped_refptr<VASurface> scaled_surface = vaapi_dec_->CreateDecodeSurface();
- CHECK(scaled_surface);
- pic->AsVaapiH264Picture()->SetDecodeSurface(std::move(scaled_surface));
- return pic;
+ return new VaapiH264Picture(std::move(va_surface));
}
// Fill |va_pic| with default/neutral values.
@@ -527,20 +519,11 @@ DecodeStatus H264VaapiVideoDecoderDelegate::SubmitDecode(
}
#endif // BUILDFLAG(IS_CHROMEOS_ASH)
const VaapiH264Picture* vaapi_pic = pic->AsVaapiH264Picture();
- CHECK(gfx::Rect(vaapi_pic->GetDecodeSize()).Contains(pic->visible_rect()));
- VAProcPipelineParameterBuffer proc_buffer;
- if (FillDecodeScalingIfNeeded(pic->visible_rect(),
- vaapi_pic->GetVADecodeSurfaceID(),
- vaapi_pic->va_surface(), &proc_buffer)) {
- if (!vaapi_wrapper_->SubmitBuffer(VAProcPipelineParameterBufferType,
- sizeof(proc_buffer), &proc_buffer)) {
- DLOG(ERROR) << "Failed submitting proc buffer";
- return DecodeStatus::kFail;
- }
- }
+ CHECK(
+ gfx::Rect(vaapi_pic->va_surface()->size()).Contains(pic->visible_rect()));
const bool success = vaapi_wrapper_->ExecuteAndDestroyPendingBuffers(
- vaapi_pic->GetVADecodeSurfaceID());
+ vaapi_pic->GetVASurfaceID());
#if BUILDFLAG(IS_CHROMEOS_ASH)
encryption_segment_info_.clear();
#endif // BUILDFLAG(IS_CHROMEOS_ASH)
@@ -557,11 +540,9 @@ bool H264VaapiVideoDecoderDelegate::OutputPicture(
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
const VaapiH264Picture* vaapi_pic = pic->AsVaapiH264Picture();
- vaapi_dec_->SurfaceReady(
- vaapi_pic->va_surface(), vaapi_pic->bitstream_id(),
- vaapi_dec_->GetOutputVisibleRect(vaapi_pic->visible_rect(),
- vaapi_pic->va_surface()->size()),
- vaapi_pic->get_colorspace());
+ vaapi_dec_->SurfaceReady(vaapi_pic->va_surface(), vaapi_pic->bitstream_id(),
+ vaapi_pic->visible_rect(),
+ vaapi_pic->get_colorspace());
return true;
}
@@ -590,7 +571,7 @@ void H264VaapiVideoDecoderDelegate::FillVAPicture(
VASurfaceID va_surface_id = VA_INVALID_SURFACE;
if (!pic->nonexisting)
- va_surface_id = pic->AsVaapiH264Picture()->GetVADecodeSurfaceID();
+ va_surface_id = pic->AsVaapiH264Picture()->GetVASurfaceID();
va_pic->picture_id = va_surface_id;
va_pic->frame_idx = pic->frame_num;
diff --git a/chromium/media/gpu/vaapi/h264_vaapi_video_decoder_delegate.h b/chromium/media/gpu/vaapi/h264_vaapi_video_decoder_delegate.h
index 9ca8a59c6af..e623cc04700 100644
--- a/chromium/media/gpu/vaapi/h264_vaapi_video_decoder_delegate.h
+++ b/chromium/media/gpu/vaapi/h264_vaapi_video_decoder_delegate.h
@@ -32,6 +32,11 @@ class H264VaapiVideoDecoderDelegate : public H264Decoder::H264Accelerator,
base::DoNothing(),
CdmContext* cdm_context = nullptr,
EncryptionScheme encryption_scheme = EncryptionScheme::kUnencrypted);
+
+ H264VaapiVideoDecoderDelegate(const H264VaapiVideoDecoderDelegate&) = delete;
+ H264VaapiVideoDecoderDelegate& operator=(
+ const H264VaapiVideoDecoderDelegate&) = delete;
+
~H264VaapiVideoDecoderDelegate() override;
// H264Decoder::H264Accelerator implementation.
@@ -82,8 +87,6 @@ class H264VaapiVideoDecoderDelegate : public H264Decoder::H264Accelerator,
// We need to set this so we don't resubmit crypto params on decode.
bool full_sample_;
-
- DISALLOW_COPY_AND_ASSIGN(H264VaapiVideoDecoderDelegate);
};
} // namespace media
diff --git a/chromium/media/gpu/vaapi/h264_vaapi_video_encoder_delegate.cc b/chromium/media/gpu/vaapi/h264_vaapi_video_encoder_delegate.cc
index 48095a31547..4df4d6eddf5 100644
--- a/chromium/media/gpu/vaapi/h264_vaapi_video_encoder_delegate.cc
+++ b/chromium/media/gpu/vaapi/h264_vaapi_video_encoder_delegate.cc
@@ -7,9 +7,13 @@
#include <va/va.h>
#include <va/va_enc_h264.h>
+#include <utility>
+
#include "base/bits.h"
#include "base/cxx17_backports.h"
#include "base/memory/ref_counted_memory.h"
+#include "build/build_config.h"
+#include "media/base/media_switches.h"
#include "media/gpu/macros.h"
#include "media/gpu/vaapi/vaapi_common.h"
#include "media/gpu/vaapi/vaapi_wrapper.h"
@@ -51,6 +55,9 @@ constexpr int kCPBSizeScale = 0; // cpb_size_scale for SPS HRD parameters.
// 4:2:0
constexpr int kChromaFormatIDC = 1;
+constexpr uint8_t kMinSupportedH264TemporalLayers = 2;
+constexpr uint8_t kMaxSupportedH264TemporalLayers = 3;
+
void FillVAEncRateControlParams(
uint32_t bps,
uint32_t window_size,
@@ -78,6 +85,35 @@ void FillVAEncRateControlParams(
hrd_param.initial_buffer_fullness = buffer_size / 2;
}
+// TODO(hiroh): Put this to media/gpu/gpu_video_encode_accelerator_helpers.h
+VideoBitrateAllocation GetDefaultVideoBitrateAllocation(
+ const VideoEncodeAccelerator::Config& config) {
+ VideoBitrateAllocation bitrate_allocation;
+ if (!config.HasTemporalLayer() && !config.HasSpatialLayer()) {
+ bitrate_allocation.SetBitrate(0, 0, config.bitrate.target());
+ return bitrate_allocation;
+ }
+
+ auto& spatial_layer = config.spatial_layers[0];
+ const size_t num_temporal_layers = spatial_layer.num_of_temporal_layers;
+ // TODO(hiroh): support one temporal layer when moving this function.
+ DCHECK_GE(num_temporal_layers, 2u);
+ constexpr double kTemporalLayersBitrateScaleFactors[][3] = {
+ {0.60, 0.40, 0.00}, // For two temporal layers.
+ {0.50, 0.20, 0.30}, // For three temporal layers.
+ };
+
+ const uint32_t bitrate_bps = spatial_layer.bitrate_bps;
+ for (size_t tid = 0; tid < num_temporal_layers; ++tid) {
+ const double factor =
+ kTemporalLayersBitrateScaleFactors[num_temporal_layers - 2][tid];
+ bitrate_allocation.SetBitrate(
+ 0, tid, base::checked_cast<int>(bitrate_bps * factor));
+ }
+
+ return bitrate_allocation;
+}
+
static scoped_refptr<base::RefCountedBytes> MakeRefCountedBytes(void* ptr,
size_t size) {
return base::MakeRefCounted<base::RefCountedBytes>(
@@ -90,11 +126,78 @@ static void InitVAPictureH264(VAPictureH264* va_pic) {
va_pic->flags = VA_PICTURE_H264_INVALID;
}
+// Updates |frame_num| as spec section 7.4.3 and sets it to |pic.frame_num|.
+void UpdateAndSetFrameNum(H264Picture& pic, unsigned int& frame_num) {
+ if (pic.idr)
+ frame_num = 0;
+ else if (pic.ref)
+ frame_num++;
+ DCHECK_LT(frame_num, kIDRPeriod);
+ pic.frame_num = frame_num;
+}
+
+// Updates and fills variables in |pic|, |frame_num| and |ref_frame_idx| for
+// temporal layer encoding. |frame_num| is the frame_num in H.264 spec for
+// |pic|. |ref_frame_idx| is the index in |ref_pic_list0| of the frame
+// referenced by |pic|.
+void UpdatePictureForTemporalLayerEncoding(
+ const size_t num_layers,
+ H264Picture& pic,
+ unsigned int& frame_num,
+ absl::optional<size_t>& ref_frame_idx,
+ const unsigned int num_encoded_frames,
+ const base::circular_deque<scoped_refptr<H264Picture>>& ref_pic_list0) {
+ DCHECK_GE(num_layers, kMinSupportedH264TemporalLayers);
+ DCHECK_LE(num_layers, kMaxSupportedH264TemporalLayers);
+ constexpr size_t kTemporalLayerCycle = 4;
+ constexpr std::pair<H264Metadata, bool>
+ kFrameMetadata[][kTemporalLayerCycle] = {
+ {
+ // For two temporal layers.
+ {{.temporal_idx = 0, .layer_sync = false}, true},
+ {{.temporal_idx = 1, .layer_sync = true}, false},
+ {{.temporal_idx = 0, .layer_sync = false}, true},
+ {{.temporal_idx = 1, .layer_sync = true}, false},
+ },
+ {
+ // For three temporal layers.
+ {{.temporal_idx = 0, .layer_sync = false}, true},
+ {{.temporal_idx = 2, .layer_sync = true}, false},
+ {{.temporal_idx = 1, .layer_sync = true}, true},
+ {{.temporal_idx = 2, .layer_sync = false}, false},
+ }};
+
+ // Fill |pic.metadata_for_encoding| and |pic.ref|.
+ H264Metadata metadata;
+ std::tie(pic.metadata_for_encoding.emplace(), pic.ref) =
+ kFrameMetadata[num_layers - 2][num_encoded_frames % kTemporalLayerCycle];
+
+ UpdateAndSetFrameNum(pic, frame_num);
+
+ if (pic.idr)
+ return;
+
+ // Fill reference frame related variables in |pic| and |ref_frame_idx|.
+ DCHECK_EQ(pic.ref_pic_list_modification_flag_l0, 0);
+ DCHECK_EQ(pic.abs_diff_pic_num_minus1, 0);
+ DCHECK(!ref_pic_list0.empty());
+ if (metadata.temporal_idx == 0)
+ ref_frame_idx = base::checked_cast<size_t>(ref_pic_list0.size() - 1);
+ else
+ ref_frame_idx = 0;
+
+ DCHECK_LT(*ref_frame_idx, ref_pic_list0.size());
+ const H264Picture& ref_frame_pic = *ref_pic_list0[*ref_frame_idx];
+ const int abs_diff_pic_num = pic.frame_num - ref_frame_pic.frame_num;
+ if (*ref_frame_idx != 0 && abs_diff_pic_num > 0) {
+ pic.ref_pic_list_modification_flag_l0 = 1;
+ pic.abs_diff_pic_num_minus1 = abs_diff_pic_num - 1;
+ }
+}
} // namespace
H264VaapiVideoEncoderDelegate::EncodeParams::EncodeParams()
- : bitrate_bps(0),
- framerate(0),
+ : framerate(0),
cpb_window_size_ms(kCPBWindowSizeMs),
cpb_size_bits(0),
initial_qp(kDefaultQP),
@@ -106,9 +209,7 @@ H264VaapiVideoEncoderDelegate::EncodeParams::EncodeParams()
H264VaapiVideoEncoderDelegate::H264VaapiVideoEncoderDelegate(
scoped_refptr<VaapiWrapper> vaapi_wrapper,
base::RepeatingClosure error_cb)
- : VaapiVideoEncoderDelegate(std::move(vaapi_wrapper), error_cb),
- packed_sps_(new H264BitstreamBuffer()),
- packed_pps_(new H264BitstreamBuffer()) {}
+ : VaapiVideoEncoderDelegate(std::move(vaapi_wrapper), error_cb) {}
H264VaapiVideoEncoderDelegate::~H264VaapiVideoEncoderDelegate() {
// H264VaapiVideoEncoderDelegate can be destroyed on any thread.
@@ -135,10 +236,26 @@ bool H264VaapiVideoEncoderDelegate::Initialize(
return false;
}
- if (config.HasSpatialLayer() || config.HasTemporalLayer()) {
- DVLOGF(1) << "Neither temporal nor spatial layer supported";
+ if (config.HasSpatialLayer()) {
+ DVLOGF(1) << "Spatial layer encoding is not supported";
return false;
}
+ if (config.HasTemporalLayer()) {
+ bool support_temporal_layer = false;
+#if defined(ARCH_CPU_X86_FAMILY) && BUILDFLAG(IS_CHROMEOS_ASH)
+ VAImplementation implementation = VaapiWrapper::GetImplementationType();
+ // TODO(b/199487660): Enable H.264 temporal layer encoding on AMD once their
+ // drivers support them.
+ support_temporal_layer =
+ base::FeatureList::IsEnabled(kVaapiH264TemporalLayerHWEncoding) &&
+ (implementation == VAImplementation::kIntelI965 ||
+ implementation == VAImplementation::kIntelIHD);
+#endif
+ if (!support_temporal_layer) {
+ DVLOGF(1) << "Temporal layer encoding is not supported";
+ return false;
+ }
+ }
visible_size_ = config.input_visible_size;
// For 4:2:0, the pixel sizes have to be even.
@@ -177,20 +294,60 @@ bool H264VaapiVideoEncoderDelegate::Initialize(
level_ = *valid_level;
}
+ num_temporal_layers_ = 1;
+ if (config.HasTemporalLayer()) {
+ DCHECK(!config.spatial_layers.empty());
+ num_temporal_layers_ = config.spatial_layers[0].num_of_temporal_layers;
+ if (num_temporal_layers_ > kMaxSupportedH264TemporalLayers ||
+ num_temporal_layers_ < kMinSupportedH264TemporalLayers) {
+ DVLOGF(1) << "Unsupported number of temporal layers: "
+ << base::strict_cast<size_t>(num_temporal_layers_);
+ return false;
+ }
+
+ // |ave_config.max_num_ref_frames| represents the maximum number of
+ // reference frames for both the reference picture list 0 (bottom 16 bits)
+ // and the reference picture list 1 (top 16 bits) in H264 encoding.
+ const size_t max_p_frame_slots = ave_config.max_num_ref_frames & 0xffff;
+ if (max_p_frame_slots < num_temporal_layers_ - 1) {
+ DVLOGF(1) << "P frame slots is too short: " << max_p_frame_slots;
+ return false;
+ }
+ }
+
curr_params_.max_ref_pic_list0_size =
- std::min(kMaxRefIdxL0Size, ave_config.max_num_ref_frames & 0xffff);
+ num_temporal_layers_ > 1u
+ ? num_temporal_layers_ - 1
+ : std::min(kMaxRefIdxL0Size, ave_config.max_num_ref_frames & 0xffff);
curr_params_.max_num_ref_frames =
std::min(kMaxNumReferenceFrames, curr_params_.max_ref_pic_list0_size);
- VideoBitrateAllocation initial_bitrate_allocation;
- initial_bitrate_allocation.SetBitrate(0, 0, config.bitrate.target());
- if (!UpdateRates(initial_bitrate_allocation, initial_framerate))
+ bool submit_packed_sps = false;
+ bool submit_packed_pps = false;
+ bool submit_packed_slice = false;
+ if (!vaapi_wrapper_->GetSupportedPackedHeaders(
+ config.output_profile, submit_packed_sps, submit_packed_pps,
+ submit_packed_slice)) {
+ DVLOGF(1) << "Failed getting supported packed headers";
return false;
+ }
+
+ // Submit packed headers only if packed SPS, PPS and slice header all are
+ // supported.
+ submit_packed_headers_ =
+ submit_packed_sps && submit_packed_pps && submit_packed_slice;
+ if (submit_packed_headers_) {
+ packed_sps_ = base::MakeRefCounted<H264BitstreamBuffer>();
+ packed_pps_ = base::MakeRefCounted<H264BitstreamBuffer>();
+ } else {
+ DVLOGF(2) << "Packed headers are not submitted to a driver";
+ }
UpdateSPS();
UpdatePPS();
- return true;
+ return UpdateRates(GetDefaultVideoBitrateAllocation(config),
+ initial_framerate);
}
gfx::Size H264VaapiVideoEncoderDelegate::GetCodedSize() const {
@@ -207,9 +364,26 @@ size_t H264VaapiVideoEncoderDelegate::GetMaxNumOfRefFrames() const {
}
std::vector<gfx::Size> H264VaapiVideoEncoderDelegate::GetSVCLayerResolutions() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+
return {visible_size_};
}
+BitstreamBufferMetadata H264VaapiVideoEncoderDelegate::GetMetadata(
+ EncodeJob* encode_job,
+ size_t payload_size) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+
+ auto metadata =
+ VaapiVideoEncoderDelegate::GetMetadata(encode_job, payload_size);
+ auto picture = GetPicture(encode_job);
+ DCHECK(picture);
+
+ metadata.h264 = picture->metadata_for_encoding;
+
+ return metadata;
+}
+
bool H264VaapiVideoEncoderDelegate::PrepareEncodeJob(EncodeJob* encode_job) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
@@ -217,12 +391,9 @@ bool H264VaapiVideoEncoderDelegate::PrepareEncodeJob(EncodeJob* encode_job) {
DCHECK(pic);
if (encode_job->IsKeyframeRequested() || encoding_parameters_changed_)
- frame_num_ = 0;
+ num_encoded_frames_ = 0;
- pic->frame_num = frame_num_++;
- frame_num_ %= kIDRPeriod;
-
- if (pic->frame_num == 0) {
+ if (num_encoded_frames_ == 0) {
pic->idr = true;
// H264 spec mandates idr_pic_id to differ between two consecutive IDRs.
idr_pic_id_ ^= 1;
@@ -234,8 +405,18 @@ bool H264VaapiVideoEncoderDelegate::PrepareEncodeJob(EncodeJob* encode_job) {
}
pic->type = pic->idr ? H264SliceHeader::kISlice : H264SliceHeader::kPSlice;
- pic->ref = true;
- pic->pic_order_cnt = pic->frame_num * 2;
+
+ absl::optional<size_t> ref_frame_index;
+ if (num_temporal_layers_ > 1u) {
+ UpdatePictureForTemporalLayerEncoding(num_temporal_layers_, *pic,
+ frame_num_, ref_frame_index,
+ num_encoded_frames_, ref_pic_list0_);
+ } else {
+ pic->ref = true;
+ UpdateAndSetFrameNum(*pic, frame_num_);
+ }
+
+ pic->pic_order_cnt = num_encoded_frames_ * 2;
pic->top_field_order_cnt = pic->pic_order_cnt;
pic->pic_order_cnt_lsb = pic->pic_order_cnt;
@@ -244,13 +425,15 @@ bool H264VaapiVideoEncoderDelegate::PrepareEncodeJob(EncodeJob* encode_job) {
<< " frame_num: " << pic->frame_num
<< " POC: " << pic->pic_order_cnt;
+ // TODO(b/195407733): Use a software bitrate controller and specify QP.
if (!SubmitFrameParameters(encode_job, curr_params_, current_sps_,
- current_pps_, pic, ref_pic_list0_)) {
+ current_pps_, pic, ref_pic_list0_,
+ ref_frame_index)) {
DVLOGF(1) << "Failed submitting frame parameters";
return false;
}
- if (pic->type == H264SliceHeader::kISlice) {
+ if (pic->type == H264SliceHeader::kISlice && submit_packed_headers_) {
// We always generate SPS and PPS with I(DR) frame. This will help for Seek
// operation on the generated stream.
if (!SubmitPackedHeaders(encode_job, packed_sps_, packed_pps_)) {
@@ -272,6 +455,8 @@ bool H264VaapiVideoEncoderDelegate::PrepareEncodeJob(EncodeJob* encode_job) {
ref_pic_list0_.pop_back();
}
+ num_encoded_frames_++;
+ num_encoded_frames_ %= kIDRPeriod;
return true;
}
@@ -284,17 +469,23 @@ bool H264VaapiVideoEncoderDelegate::UpdateRates(
if (bitrate == 0 || framerate == 0)
return false;
- if (curr_params_.bitrate_bps == bitrate &&
+ if (curr_params_.bitrate_allocation == bitrate_allocation &&
curr_params_.framerate == framerate) {
return true;
}
- VLOGF(2) << "New bitrate: " << bitrate_allocation.GetSumBps()
+ VLOGF(2) << "New bitrate allocation: " << bitrate_allocation.ToString()
<< ", New framerate: " << framerate;
- curr_params_.bitrate_bps = bitrate;
+ curr_params_.bitrate_allocation = bitrate_allocation;
curr_params_.framerate = framerate;
- curr_params_.cpb_size_bits =
- curr_params_.bitrate_bps * curr_params_.cpb_window_size_ms / 1000;
+
+ base::CheckedNumeric<uint32_t> cpb_size_bits(bitrate);
+ cpb_size_bits /= 1000;
+ cpb_size_bits *= curr_params_.cpb_window_size_ms;
+ if (!cpb_size_bits.AssignIfValid(&curr_params_.cpb_size_bits)) {
+ VLOGF(1) << "Too large bitrate: " << bitrate_allocation.GetSumBps();
+ return false;
+ }
bool previous_encoding_parameters_changed = encoding_parameters_changed_;
@@ -393,7 +584,7 @@ void H264VaapiVideoEncoderDelegate::UpdateSPS() {
current_sps_.bit_rate_scale = kBitRateScale;
current_sps_.cpb_size_scale = kCPBSizeScale;
current_sps_.bit_rate_value_minus1[0] =
- (curr_params_.bitrate_bps >>
+ (curr_params_.bitrate_allocation.GetSumBps() >>
(kBitRateScale + H264SPS::kBitRateScaleConstantTerm)) -
1;
current_sps_.cpb_size_value_minus1[0] =
@@ -410,7 +601,8 @@ void H264VaapiVideoEncoderDelegate::UpdateSPS() {
current_sps_.time_offset_length = H264SPS::kDefaultTimeOffsetLength;
current_sps_.low_delay_hrd_flag = false;
- GeneratePackedSPS();
+ if (submit_packed_headers_)
+ GeneratePackedSPS();
encoding_parameters_changed_ = true;
}
@@ -436,12 +628,15 @@ void H264VaapiVideoEncoderDelegate::UpdatePPS() {
current_pps_.transform_8x8_mode_flag =
(current_sps_.profile_idc == H264SPS::kProfileIDCHigh);
- GeneratePackedPPS();
+ if (submit_packed_headers_)
+ GeneratePackedPPS();
encoding_parameters_changed_ = true;
}
void H264VaapiVideoEncoderDelegate::GeneratePackedSPS() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(submit_packed_headers_);
+ DCHECK(packed_sps_);
packed_sps_->Reset();
@@ -557,6 +752,8 @@ void H264VaapiVideoEncoderDelegate::GeneratePackedSPS() {
void H264VaapiVideoEncoderDelegate::GeneratePackedPPS() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(submit_packed_headers_);
+ DCHECK(packed_pps_);
packed_pps_->Reset();
@@ -703,7 +900,8 @@ bool H264VaapiVideoEncoderDelegate::SubmitFrameParameters(
const H264SPS& sps,
const H264PPS& pps,
scoped_refptr<H264Picture> pic,
- const base::circular_deque<scoped_refptr<H264Picture>>& ref_pic_list0) {
+ const base::circular_deque<scoped_refptr<H264Picture>>& ref_pic_list0,
+ const absl::optional<size_t>& ref_frame_index) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
VAEncSequenceParameterBufferH264 seq_param = {};
@@ -715,7 +913,7 @@ bool H264VaapiVideoEncoderDelegate::SubmitFrameParameters(
seq_param.intra_period = kIPeriod;
seq_param.intra_idr_period = kIDRPeriod;
seq_param.ip_period = kIPPeriod;
- seq_param.bits_per_second = encode_params.bitrate_bps;
+ seq_param.bits_per_second = encode_params.bitrate_allocation.GetSumBps();
SPS_TO_SP(max_num_ref_frames);
absl::optional<gfx::Size> coded_size = sps.GetCodedSize();
@@ -796,7 +994,8 @@ bool H264VaapiVideoEncoderDelegate::SubmitFrameParameters(
slice_param.pic_order_cnt_lsb = pic->pic_order_cnt_lsb;
slice_param.num_ref_idx_active_override_flag = true;
if (slice_param.slice_type == H264SliceHeader::kPSlice) {
- slice_param.num_ref_idx_l0_active_minus1 = ref_pic_list0.size() - 1;
+ slice_param.num_ref_idx_l0_active_minus1 =
+ ref_frame_index.has_value() ? 0 : ref_pic_list0.size() - 1;
} else {
slice_param.num_ref_idx_l0_active_minus1 = 0;
}
@@ -810,7 +1009,7 @@ bool H264VaapiVideoEncoderDelegate::SubmitFrameParameters(
for (VAPictureH264& picture : slice_param.RefPicList1)
InitVAPictureH264(&picture);
- for (size_t i = 0; i < ref_pic_list0.size(); ++i) {
+ for (size_t i = 0, j = 0; i < ref_pic_list0.size(); ++i) {
H264Picture& ref_pic = *ref_pic_list0[i];
VAPictureH264 va_pic_h264;
InitVAPictureH264(&va_pic_h264);
@@ -822,14 +1021,16 @@ bool H264VaapiVideoEncoderDelegate::SubmitFrameParameters(
// Initialize the current entry on slice and picture reference lists to
// |ref_pic| and advance list pointers.
pic_param.ReferenceFrames[i] = va_pic_h264;
- slice_param.RefPicList0[i] = va_pic_h264;
+ if (!ref_frame_index || *ref_frame_index == i)
+ slice_param.RefPicList0[j++] = va_pic_h264;
}
VAEncMiscParameterRateControl rate_control_param;
VAEncMiscParameterFrameRate framerate_param;
VAEncMiscParameterHRD hrd_param;
FillVAEncRateControlParams(
- encode_params.bitrate_bps, encode_params.cpb_window_size_ms,
+ encode_params.bitrate_allocation.GetSumBps(),
+ encode_params.cpb_window_size_ms,
base::strict_cast<uint32_t>(pic_param.pic_init_qp),
base::strict_cast<uint32_t>(encode_params.min_qp),
base::strict_cast<uint32_t>(encode_params.max_qp),
@@ -842,23 +1043,6 @@ bool H264VaapiVideoEncoderDelegate::SubmitFrameParameters(
base::Unretained(this), VAEncPictureParameterBufferType,
MakeRefCountedBytes(&pic_param, sizeof(pic_param))));
- scoped_refptr<H264BitstreamBuffer> packed_slice_header =
- GeneratePackedSliceHeader(pic_param, slice_param, *pic);
- VAEncPackedHeaderParameterBuffer packed_slice_param_buffer;
- packed_slice_param_buffer.type = VAEncPackedHeaderSlice;
- packed_slice_param_buffer.bit_length = packed_slice_header->BitsInBuffer();
- packed_slice_param_buffer.has_emulation_bytes = 0;
-
- // Submit packed slice header.
- job->AddSetupCallback(base::BindOnce(
- &VaapiVideoEncoderDelegate::SubmitBuffer, base::Unretained(this),
- VAEncPackedHeaderParameterBufferType,
- MakeRefCountedBytes(&packed_slice_param_buffer,
- sizeof(packed_slice_param_buffer))));
- job->AddSetupCallback(
- base::BindOnce(&H264VaapiVideoEncoderDelegate::SubmitH264BitstreamBuffer,
- base::Unretained(this), packed_slice_header));
-
job->AddSetupCallback(
base::BindOnce(&VaapiVideoEncoderDelegate::SubmitBuffer,
base::Unretained(this), VAEncSliceParameterBufferType,
@@ -879,6 +1063,26 @@ bool H264VaapiVideoEncoderDelegate::SubmitFrameParameters(
base::Unretained(this), VAEncMiscParameterTypeHRD,
MakeRefCountedBytes(&hrd_param, sizeof(hrd_param))));
+ if (!submit_packed_headers_)
+ return true;
+
+ scoped_refptr<H264BitstreamBuffer> packed_slice_header =
+ GeneratePackedSliceHeader(pic_param, slice_param, *pic);
+ VAEncPackedHeaderParameterBuffer packed_slice_param_buffer;
+ packed_slice_param_buffer.type = VAEncPackedHeaderSlice;
+ packed_slice_param_buffer.bit_length = packed_slice_header->BitsInBuffer();
+ packed_slice_param_buffer.has_emulation_bytes = 0;
+
+ // Submit packed slice header.
+ job->AddSetupCallback(base::BindOnce(
+ &VaapiVideoEncoderDelegate::SubmitBuffer, base::Unretained(this),
+ VAEncPackedHeaderParameterBufferType,
+ MakeRefCountedBytes(&packed_slice_param_buffer,
+ sizeof(packed_slice_param_buffer))));
+ job->AddSetupCallback(
+ base::BindOnce(&H264VaapiVideoEncoderDelegate::SubmitH264BitstreamBuffer,
+ base::Unretained(this), packed_slice_header));
+
return true;
}
@@ -895,6 +1099,9 @@ bool H264VaapiVideoEncoderDelegate::SubmitPackedHeaders(
scoped_refptr<H264BitstreamBuffer> packed_sps,
scoped_refptr<H264BitstreamBuffer> packed_pps) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(submit_packed_headers_);
+ DCHECK(packed_sps);
+ DCHECK(packed_pps);
// Submit SPS.
VAEncPackedHeaderParameterBuffer par_buffer = {};
@@ -923,7 +1130,6 @@ bool H264VaapiVideoEncoderDelegate::SubmitPackedHeaders(
job->AddSetupCallback(
base::BindOnce(&H264VaapiVideoEncoderDelegate::SubmitH264BitstreamBuffer,
base::Unretained(this), packed_pps));
-
return true;
}
diff --git a/chromium/media/gpu/vaapi/h264_vaapi_video_encoder_delegate.h b/chromium/media/gpu/vaapi/h264_vaapi_video_encoder_delegate.h
index a76b5b29c69..fe1c4cf2345 100644
--- a/chromium/media/gpu/vaapi/h264_vaapi_video_encoder_delegate.h
+++ b/chromium/media/gpu/vaapi/h264_vaapi_video_encoder_delegate.h
@@ -31,8 +31,7 @@ class H264VaapiVideoEncoderDelegate : public VaapiVideoEncoderDelegate {
struct EncodeParams {
EncodeParams();
- // Bitrate in bps.
- uint32_t bitrate_bps;
+ VideoBitrateAllocation bitrate_allocation;
// Framerate in FPS.
uint32_t framerate;
@@ -57,6 +56,11 @@ class H264VaapiVideoEncoderDelegate : public VaapiVideoEncoderDelegate {
H264VaapiVideoEncoderDelegate(scoped_refptr<VaapiWrapper> vaapi_wrapper,
base::RepeatingClosure error_cb);
+
+ H264VaapiVideoEncoderDelegate(const H264VaapiVideoEncoderDelegate&) = delete;
+ H264VaapiVideoEncoderDelegate& operator=(
+ const H264VaapiVideoEncoderDelegate&) = delete;
+
~H264VaapiVideoEncoderDelegate() override;
// VaapiVideoEncoderDelegate implementation.
@@ -68,8 +72,12 @@ class H264VaapiVideoEncoderDelegate : public VaapiVideoEncoderDelegate {
size_t GetMaxNumOfRefFrames() const override;
std::vector<gfx::Size> GetSVCLayerResolutions() override;
bool PrepareEncodeJob(EncodeJob* encode_job) override;
+ BitstreamBufferMetadata GetMetadata(EncodeJob* encode_job,
+ size_t payload_size) override;
private:
+ class TemporalLayers;
+
friend class H264VaapiVideoEncoderDelegateTest;
// Fill current_sps_ and current_pps_ with current encoding state parameters.
@@ -106,7 +114,8 @@ class H264VaapiVideoEncoderDelegate : public VaapiVideoEncoderDelegate {
const H264SPS& sps,
const H264PPS& pps,
scoped_refptr<H264Picture> pic,
- const base::circular_deque<scoped_refptr<H264Picture>>& ref_pic_list0);
+ const base::circular_deque<scoped_refptr<H264Picture>>& ref_pic_list0,
+ const absl::optional<size_t>& ref_frame_index);
// Current SPS, PPS and their packed versions. Packed versions are NALUs
// in AnnexB format *without* emulation prevention three-byte sequences
@@ -115,6 +124,7 @@ class H264VaapiVideoEncoderDelegate : public VaapiVideoEncoderDelegate {
scoped_refptr<H264BitstreamBuffer> packed_sps_;
H264PPS current_pps_;
scoped_refptr<H264BitstreamBuffer> packed_pps_;
+ bool submit_packed_headers_;
// Current encoding parameters being used.
EncodeParams curr_params_;
@@ -133,7 +143,9 @@ class H264VaapiVideoEncoderDelegate : public VaapiVideoEncoderDelegate {
unsigned int mb_width_ = 0;
unsigned int mb_height_ = 0;
- // frame_num (spec section 7.4.3) to be used for the next frame.
+ // The number of encoded frames. Resets to 0 on IDR frame.
+ unsigned int num_encoded_frames_ = 0;
+ // frame_num (spec section 7.4.3).
unsigned int frame_num_ = 0;
// idr_pic_id (spec section 7.4.3) to be used for the next frame.
@@ -147,7 +159,7 @@ class H264VaapiVideoEncoderDelegate : public VaapiVideoEncoderDelegate {
// RefPicList0 per spec (spec section 8.2.4.2).
base::circular_deque<scoped_refptr<H264Picture>> ref_pic_list0_;
- DISALLOW_COPY_AND_ASSIGN(H264VaapiVideoEncoderDelegate);
+ uint8_t num_temporal_layers_ = 1;
};
} // namespace media
diff --git a/chromium/media/gpu/vaapi/h264_vaapi_video_encoder_delegate_unittest.cc b/chromium/media/gpu/vaapi/h264_vaapi_video_encoder_delegate_unittest.cc
index e72a2da4360..a8a31992332 100644
--- a/chromium/media/gpu/vaapi/h264_vaapi_video_encoder_delegate_unittest.cc
+++ b/chromium/media/gpu/vaapi/h264_vaapi_video_encoder_delegate_unittest.cc
@@ -6,6 +6,10 @@
#include <memory>
+#include "base/logging.h"
+#include "build/build_config.h"
+#include "media/gpu/vaapi/va_surface.h"
+#include "media/gpu/vaapi/vaapi_common.h"
#include "media/gpu/vaapi/vaapi_wrapper.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -17,7 +21,7 @@ using ::testing::Return;
namespace media {
namespace {
-VaapiVideoEncoderDelegate::Config kDefaultVEADelegateConfig{10};
+VaapiVideoEncoderDelegate::Config kDefaultVEADelegateConfig{4};
VideoEncodeAccelerator::Config kDefaultVEAConfig(
PIXEL_FORMAT_I420,
@@ -32,17 +36,67 @@ VideoEncodeAccelerator::Config kDefaultVEAConfig(
VideoEncodeAccelerator::Config::StorageType::kShmem,
VideoEncodeAccelerator::Config::ContentType::kCamera);
+void ValidateTemporalLayerStructure(uint8_t num_temporal_layers,
+ size_t num_frames,
+ int frame_num,
+ uint8_t temporal_idx,
+ bool ref,
+ int& previous_frame_num) {
+ constexpr size_t kTemporalLayerCycle = 4;
+ constexpr uint8_t kExpectedTemporalIdx[][kTemporalLayerCycle] = {
+ {0, 1, 0, 1}, // For two temporal layers.
+ {0, 2, 1, 2} // For three temporal layers.
+ };
+
+ const uint8_t expected_temporal_idx =
+ kExpectedTemporalIdx[num_temporal_layers - 2]
+ [num_frames % kTemporalLayerCycle];
+ EXPECT_EQ(temporal_idx, expected_temporal_idx)
+ << "Unexpected temporal index: temporal_idx"
+ << base::strict_cast<int>(temporal_idx)
+ << ", expected=" << base::strict_cast<int>(expected_temporal_idx)
+ << ", num_frames=" << num_frames;
+
+ const bool expected_ref = temporal_idx != num_temporal_layers - 1;
+ EXPECT_EQ(ref, expected_ref)
+ << "Unexpected reference: reference=" << ref
+ << ", expected=" << expected_ref
+ << ", temporal_idx=" << base::strict_cast<int>(temporal_idx)
+ << ", num_frames=" << num_frames;
+
+ if (num_frames == 0) {
+ // IDR frame.
+ EXPECT_EQ(frame_num, 0);
+ previous_frame_num = 0;
+ return;
+ }
+
+ EXPECT_EQ(frame_num, previous_frame_num + ref);
+ previous_frame_num = frame_num;
+}
+
class MockVaapiWrapper : public VaapiWrapper {
public:
MockVaapiWrapper() : VaapiWrapper(kEncodeConstantBitrate) {}
+ bool GetSupportedPackedHeaders(VideoCodecProfile profile,
+ bool& packed_sps,
+ bool& packed_pps,
+ bool& packed_slice) override {
+ packed_sps = true;
+ packed_pps = true;
+ packed_slice = true;
+ return true;
+ }
+
protected:
~MockVaapiWrapper() override = default;
};
} // namespace
-class H264VaapiVideoEncoderDelegateTest : public ::testing::Test {
+class H264VaapiVideoEncoderDelegateTest
+ : public ::testing::TestWithParam<uint8_t> {
public:
H264VaapiVideoEncoderDelegateTest() = default;
void SetUp() override;
@@ -51,11 +105,45 @@ class H264VaapiVideoEncoderDelegateTest : public ::testing::Test {
MOCK_METHOD0(OnError, void());
+ bool InitializeEncoder(uint8_t num_temporal_layers);
+ void EncodeFrame(bool force_keyframe);
+
protected:
std::unique_ptr<H264VaapiVideoEncoderDelegate> encoder_;
+
+ private:
+ std::unique_ptr<VaapiVideoEncoderDelegate::EncodeJob> CreateEncodeJob(
+ bool keyframe);
+
scoped_refptr<MockVaapiWrapper> mock_vaapi_wrapper_;
+ unsigned int next_surface_id_ = 0;
+ size_t num_encode_frames_ = 0;
+ int previous_frame_num_ = 0;
};
+std::unique_ptr<VaapiVideoEncoderDelegate::EncodeJob>
+H264VaapiVideoEncoderDelegateTest::CreateEncodeJob(bool keyframe) {
+ auto input_frame = VideoFrame::CreateFrame(
+ kDefaultVEAConfig.input_format, kDefaultVEAConfig.input_visible_size,
+ gfx::Rect(kDefaultVEAConfig.input_visible_size),
+ kDefaultVEAConfig.input_visible_size, base::TimeDelta());
+ LOG_ASSERT(input_frame) << " Failed to create VideoFrame";
+
+ auto va_surface = base::MakeRefCounted<VASurface>(
+ next_surface_id_++, kDefaultVEAConfig.input_visible_size,
+ VA_RT_FORMAT_YUV420, base::DoNothing());
+ scoped_refptr<H264Picture> picture(new VaapiH264Picture(va_surface));
+
+ constexpr VABufferID kDummyVABufferID = 12;
+ auto scoped_va_buffer = ScopedVABuffer::CreateForTesting(
+ kDummyVABufferID, VAEncCodedBufferType,
+ kDefaultVEAConfig.input_visible_size.GetArea());
+
+ return std::make_unique<VaapiVideoEncoderDelegate::EncodeJob>(
+ input_frame, keyframe, base::DoNothing(), va_surface, picture,
+ std::move(scoped_va_buffer));
+}
+
void H264VaapiVideoEncoderDelegateTest::SetUp() {
mock_vaapi_wrapper_ = base::MakeRefCounted<MockVaapiWrapper>();
ASSERT_TRUE(mock_vaapi_wrapper_);
@@ -67,6 +155,44 @@ void H264VaapiVideoEncoderDelegateTest::SetUp() {
EXPECT_CALL(*this, OnError()).Times(0);
}
+bool H264VaapiVideoEncoderDelegateTest::InitializeEncoder(
+ uint8_t num_temporal_layers) {
+ auto vea_config = kDefaultVEAConfig;
+ vea_config.spatial_layers.resize(1u);
+ auto& sl = vea_config.spatial_layers[0];
+ sl.width = vea_config.input_visible_size.width();
+ sl.height = vea_config.input_visible_size.height();
+ sl.bitrate_bps = vea_config.bitrate.target();
+ sl.framerate = vea_config.initial_framerate.value_or(30);
+ sl.max_qp = 30;
+ sl.num_of_temporal_layers = num_temporal_layers;
+ return encoder_->Initialize(vea_config, kDefaultVEADelegateConfig);
+}
+
+void H264VaapiVideoEncoderDelegateTest::EncodeFrame(bool force_keyframe) {
+ auto encode_job = CreateEncodeJob(force_keyframe);
+ EXPECT_TRUE(encoder_->PrepareEncodeJob(encode_job.get()));
+
+ const H264Picture& pic = *encoder_->GetPicture(encode_job.get());
+ if (force_keyframe)
+ EXPECT_EQ(pic.idr, true);
+ EXPECT_EQ(pic.type == H264SliceHeader::kISlice, pic.idr);
+ if (pic.idr)
+ num_encode_frames_ = 0;
+
+ const int frame_num = pic.frame_num;
+ constexpr size_t kDummyPayloadSize = 12345;
+ const BitstreamBufferMetadata metadata =
+ encoder_->GetMetadata(encode_job.get(), kDummyPayloadSize);
+ ASSERT_TRUE(metadata.h264.has_value());
+
+ const uint8_t temporal_idx = metadata.h264->temporal_idx;
+ ValidateTemporalLayerStructure(GetParam(), num_encode_frames_, frame_num,
+ temporal_idx, pic.ref, previous_frame_num_);
+
+ num_encode_frames_++;
+}
+
TEST_F(H264VaapiVideoEncoderDelegateTest, Initialize) {
auto vea_config = kDefaultVEAConfig;
const auto vea_delegate_config = kDefaultVEADelegateConfig;
@@ -82,17 +208,41 @@ TEST_F(H264VaapiVideoEncoderDelegateTest, Initialize) {
ExpectLevel(H264SPS::kLevelIDC5p1);
}
-TEST_F(H264VaapiVideoEncoderDelegateTest, InitializeFailsTemporalLayerRequest) {
- auto vea_config = kDefaultVEAConfig;
- vea_config.spatial_layers.resize(1u);
- auto& sl = vea_config.spatial_layers[0];
- sl.width = vea_config.input_visible_size.width();
- sl.height = vea_config.input_visible_size.height();
- sl.bitrate_bps = vea_config.bitrate.target();
- sl.framerate = vea_config.initial_framerate.value_or(30);
- sl.max_qp = 30;
- sl.num_of_temporal_layers = 2u;
- EXPECT_FALSE(encoder_->Initialize(vea_config, kDefaultVEADelegateConfig));
+// H.264 temporal layer encoding is enabled on ChromeOS only. Skip this test
+// on other platforms.
+#if defined(ARCH_CPU_X86_FAMILY) && BUILDFLAG(IS_CHROMEOS_ASH)
+TEST_P(H264VaapiVideoEncoderDelegateTest, EncodeTemporalLayerRequest) {
+ // TODO(b/199487660): Enable H.264 temporal layer encoding on AMD once their
+ // drivers support them.
+ const auto implementation = VaapiWrapper::GetImplementationType();
+ if (implementation != VAImplementation::kIntelI965 &&
+ implementation != VAImplementation::kIntelIHD) {
+ GTEST_SKIP() << "Skip temporal layer test on AMD devices";
+ }
+
+ const uint8_t num_temporal_layers = GetParam();
+ const bool initialize_success = num_temporal_layers <= 3;
+ // Initialize.
+ EXPECT_EQ(initialize_success, InitializeEncoder(num_temporal_layers));
+ if (!initialize_success)
+ return;
+
+ EXPECT_EQ(encoder_->GetCodedSize(), kDefaultVEAConfig.input_visible_size);
+ EXPECT_EQ(encoder_->GetMaxNumOfRefFrames(),
+ base::checked_cast<size_t>(num_temporal_layers - 1));
+ EXPECT_EQ(encoder_->GetSVCLayerResolutions(),
+ std::vector<gfx::Size>{kDefaultVEAConfig.input_visible_size});
+
+ size_t kKeyFrameInterval = 10;
+ for (size_t frame_num = 0; frame_num < 30; ++frame_num) {
+ const bool force_keyframe = frame_num % kKeyFrameInterval == 0;
+ EncodeFrame(force_keyframe);
+ }
}
+// We expect 4 to fail to initialize.
+INSTANTIATE_TEST_SUITE_P(,
+ H264VaapiVideoEncoderDelegateTest,
+ ::testing::Values(2u, 3u, 4u));
+#endif // defined(ARCH_CPU_X86_FAMILY) && BUILDFLAG(IS_CHROMEOS_ASH)
} // namespace media
diff --git a/chromium/media/gpu/vaapi/h265_vaapi_video_decoder_delegate.cc b/chromium/media/gpu/vaapi/h265_vaapi_video_decoder_delegate.cc
index ecaf8fdddad..556ea8a20f1 100644
--- a/chromium/media/gpu/vaapi/h265_vaapi_video_decoder_delegate.cc
+++ b/chromium/media/gpu/vaapi/h265_vaapi_video_decoder_delegate.cc
@@ -57,15 +57,7 @@ scoped_refptr<H265Picture> H265VaapiVideoDecoderDelegate::CreateH265Picture() {
if (!va_surface)
return nullptr;
- scoped_refptr<H265Picture> pic = new VaapiH265Picture(std::move(va_surface));
- if (!vaapi_dec_->IsScalingDecode())
- return pic;
-
- // Setup the scaling buffer.
- scoped_refptr<VASurface> scaled_surface = vaapi_dec_->CreateDecodeSurface();
- CHECK(scaled_surface);
- pic->AsVaapiH265Picture()->SetDecodeSurface(std::move(scaled_surface));
- return pic;
+ return new VaapiH265Picture(std::move(va_surface));
}
DecodeStatus H265VaapiVideoDecoderDelegate::SubmitFrameMetadata(
@@ -475,20 +467,11 @@ DecodeStatus H265VaapiVideoDecoderDelegate::SubmitDecode(
#endif // BUILDFLAG(IS_CHROMEOS_ASH)
const VaapiH265Picture* vaapi_pic = pic->AsVaapiH265Picture();
- CHECK(gfx::Rect(vaapi_pic->GetDecodeSize()).Contains(pic->visible_rect()));
- VAProcPipelineParameterBuffer proc_buffer;
- if (FillDecodeScalingIfNeeded(pic->visible_rect(),
- vaapi_pic->GetVADecodeSurfaceID(),
- vaapi_pic->va_surface(), &proc_buffer)) {
- if (!vaapi_wrapper_->SubmitBuffer(VAProcPipelineParameterBufferType,
- sizeof(proc_buffer), &proc_buffer)) {
- DLOG(ERROR) << "Failed submitting proc buffer";
- return DecodeStatus::kFail;
- }
- }
+ CHECK(
+ gfx::Rect(vaapi_pic->va_surface()->size()).Contains(pic->visible_rect()));
const bool success = vaapi_wrapper_->ExecuteAndDestroyPendingBuffers(
- vaapi_pic->GetVADecodeSurfaceID());
+ vaapi_pic->GetVASurfaceID());
ref_pic_list_pocs_.clear();
#if BUILDFLAG(IS_CHROMEOS_ASH)
encryption_segment_info_.clear();
@@ -507,11 +490,9 @@ bool H265VaapiVideoDecoderDelegate::OutputPicture(
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
const VaapiH265Picture* vaapi_pic = pic->AsVaapiH265Picture();
- vaapi_dec_->SurfaceReady(
- vaapi_pic->va_surface(), vaapi_pic->bitstream_id(),
- vaapi_dec_->GetOutputVisibleRect(vaapi_pic->visible_rect(),
- vaapi_pic->va_surface()->size()),
- vaapi_pic->get_colorspace());
+ vaapi_dec_->SurfaceReady(vaapi_pic->va_surface(), vaapi_pic->bitstream_id(),
+ vaapi_pic->visible_rect(),
+ vaapi_pic->get_colorspace());
return true;
}
@@ -541,7 +522,7 @@ void H265VaapiVideoDecoderDelegate::FillVAPicture(
VAPictureHEVC* va_pic,
scoped_refptr<H265Picture> pic) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- va_pic->picture_id = pic->AsVaapiH265Picture()->GetVADecodeSurfaceID();
+ va_pic->picture_id = pic->AsVaapiH265Picture()->GetVASurfaceID();
va_pic->pic_order_cnt = pic->pic_order_cnt_val_;
va_pic->flags = 0;
diff --git a/chromium/media/gpu/vaapi/va_stub_header.fragment b/chromium/media/gpu/vaapi/va_stub_header.fragment
index bed198484d5..e8c46c5e81c 100644
--- a/chromium/media/gpu/vaapi/va_stub_header.fragment
+++ b/chromium/media/gpu/vaapi/va_stub_header.fragment
@@ -1,12 +1,14 @@
// The extra include header needed in the generated stub file for defining
// various libva types.
+#include "media/gpu/buildflags.h"
+
extern "C" {
#include <va/va_drm.h>
#include <va/va.h>
-#if defined(USE_X11)
+#if BUILDFLAG(USE_VAAPI_X11)
#include <va/va_x11.h>
-#endif
+#endif // BUILDFLAG(USE_VAAPI_X11)
}
diff --git a/chromium/media/gpu/vaapi/vaapi_common.cc b/chromium/media/gpu/vaapi/vaapi_common.cc
index c2d0548488d..14a5c416347 100644
--- a/chromium/media/gpu/vaapi/vaapi_common.cc
+++ b/chromium/media/gpu/vaapi/vaapi_common.cc
@@ -17,11 +17,6 @@ VaapiH264Picture* VaapiH264Picture::AsVaapiH264Picture() {
return this;
}
-void VaapiH264Picture::SetDecodeSurface(
- scoped_refptr<VASurface> decode_va_surface) {
- decode_va_surface_ = std::move(decode_va_surface);
-}
-
#if BUILDFLAG(ENABLE_PLATFORM_HEVC_DECODING)
VaapiH265Picture::VaapiH265Picture(scoped_refptr<VASurface> va_surface)
: va_surface_(va_surface) {}
@@ -32,11 +27,6 @@ VaapiH265Picture* VaapiH265Picture::AsVaapiH265Picture() {
return this;
}
-void VaapiH265Picture::SetDecodeSurface(
- scoped_refptr<VASurface> decode_va_surface) {
- decode_va_surface_ = std::move(decode_va_surface);
-}
-
#endif // BUILDFLAG(ENABLE_PLATFORM_HEVC_DECODING)
VaapiVP8Picture::VaapiVP8Picture(scoped_refptr<VASurface> va_surface)
@@ -57,11 +47,6 @@ VaapiVP9Picture* VaapiVP9Picture::AsVaapiVP9Picture() {
return this;
}
-void VaapiVP9Picture::SetDecodeSurface(
- scoped_refptr<VASurface> decode_va_surface) {
- decode_va_surface_ = std::move(decode_va_surface);
-}
-
scoped_refptr<VP9Picture> VaapiVP9Picture::CreateDuplicate() {
return new VaapiVP9Picture(va_surface_);
}
diff --git a/chromium/media/gpu/vaapi/vaapi_common.h b/chromium/media/gpu/vaapi/vaapi_common.h
index 710aaa519ae..6dc18cbe3d3 100644
--- a/chromium/media/gpu/vaapi/vaapi_common.h
+++ b/chromium/media/gpu/vaapi/vaapi_common.h
@@ -30,21 +30,12 @@ class VaapiH264Picture : public H264Picture {
scoped_refptr<VASurface> va_surface() const { return va_surface_; }
VASurfaceID GetVASurfaceID() const { return va_surface_->id(); }
- void SetDecodeSurface(scoped_refptr<VASurface> decode_va_surface);
- VASurfaceID GetVADecodeSurfaceID() const {
- return decode_va_surface_ ? decode_va_surface_->id() : GetVASurfaceID();
- }
- const gfx::Size& GetDecodeSize() const {
- return decode_va_surface_ ? decode_va_surface_->size()
- : va_surface_->size();
- }
protected:
~VaapiH264Picture() override;
private:
scoped_refptr<VASurface> va_surface_;
- scoped_refptr<VASurface> decode_va_surface_;
DISALLOW_COPY_AND_ASSIGN(VaapiH264Picture);
};
@@ -61,21 +52,12 @@ class VaapiH265Picture : public H265Picture {
scoped_refptr<VASurface> va_surface() const { return va_surface_; }
VASurfaceID GetVASurfaceID() const { return va_surface_->id(); }
- void SetDecodeSurface(scoped_refptr<VASurface> decode_va_surface);
- VASurfaceID GetVADecodeSurfaceID() const {
- return decode_va_surface_ ? decode_va_surface_->id() : GetVASurfaceID();
- }
- const gfx::Size& GetDecodeSize() const {
- return decode_va_surface_ ? decode_va_surface_->size()
- : va_surface_->size();
- }
protected:
~VaapiH265Picture() override;
private:
scoped_refptr<VASurface> va_surface_;
- scoped_refptr<VASurface> decode_va_surface_;
};
#endif // BUILDFLAG(ENABLE_PLATFORM_HEVC_DECODING)
@@ -105,14 +87,6 @@ class VaapiVP9Picture : public VP9Picture {
scoped_refptr<VASurface> va_surface() const { return va_surface_; }
VASurfaceID GetVASurfaceID() const { return va_surface_->id(); }
- void SetDecodeSurface(scoped_refptr<VASurface> decode_va_surface);
- VASurfaceID GetVADecodeSurfaceID() const {
- return decode_va_surface_ ? decode_va_surface_->id() : GetVASurfaceID();
- }
- const gfx::Size& GetDecodeSize() const {
- return decode_va_surface_ ? decode_va_surface_->size()
- : va_surface_->size();
- }
protected:
~VaapiVP9Picture() override;
@@ -121,7 +95,6 @@ class VaapiVP9Picture : public VP9Picture {
scoped_refptr<VP9Picture> CreateDuplicate() override;
scoped_refptr<VASurface> va_surface_;
- scoped_refptr<VASurface> decode_va_surface_;
DISALLOW_COPY_AND_ASSIGN(VaapiVP9Picture);
};
diff --git a/chromium/media/gpu/vaapi/vaapi_dmabuf_video_frame_mapper.cc b/chromium/media/gpu/vaapi/vaapi_dmabuf_video_frame_mapper.cc
index b3abc085115..0fbdbdc0e5c 100644
--- a/chromium/media/gpu/vaapi/vaapi_dmabuf_video_frame_mapper.cc
+++ b/chromium/media/gpu/vaapi/vaapi_dmabuf_video_frame_mapper.cc
@@ -133,8 +133,7 @@ scoped_refptr<VideoFrame> CreateMappedVideoFrame(
DeallocateBuffers, std::move(va_image), std::move(src_video_frame)));
for (auto&& buffer : p016le_buffers) {
video_frame->AddDestructionObserver(
- base::BindOnce(base::DoNothing::Once<std::unique_ptr<uint16_t[]>>(),
- std::move(buffer)));
+ base::BindOnce([](std::unique_ptr<uint16_t[]>) {}, std::move(buffer)));
}
return video_frame;
}
diff --git a/chromium/media/gpu/vaapi/vaapi_dmabuf_video_frame_mapper.h b/chromium/media/gpu/vaapi/vaapi_dmabuf_video_frame_mapper.h
index 1077b0ce429..e22e7b9c305 100644
--- a/chromium/media/gpu/vaapi/vaapi_dmabuf_video_frame_mapper.h
+++ b/chromium/media/gpu/vaapi/vaapi_dmabuf_video_frame_mapper.h
@@ -22,6 +22,10 @@ class MEDIA_GPU_EXPORT VaapiDmaBufVideoFrameMapper : public VideoFrameMapper {
public:
static std::unique_ptr<VideoFrameMapper> Create(VideoPixelFormat format);
+ VaapiDmaBufVideoFrameMapper(const VaapiDmaBufVideoFrameMapper&) = delete;
+ VaapiDmaBufVideoFrameMapper& operator=(const VaapiDmaBufVideoFrameMapper&) =
+ delete;
+
~VaapiDmaBufVideoFrameMapper() override;
// VideoFrameMapper override.
@@ -33,8 +37,6 @@ class MEDIA_GPU_EXPORT VaapiDmaBufVideoFrameMapper : public VideoFrameMapper {
// Vaapi components for mapping.
const scoped_refptr<VaapiWrapper> vaapi_wrapper_;
-
- DISALLOW_COPY_AND_ASSIGN(VaapiDmaBufVideoFrameMapper);
};
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_image_decode_accelerator_worker.h b/chromium/media/gpu/vaapi/vaapi_image_decode_accelerator_worker.h
index af69a710d1f..656ca5faf07 100644
--- a/chromium/media/gpu/vaapi/vaapi_image_decode_accelerator_worker.h
+++ b/chromium/media/gpu/vaapi/vaapi_image_decode_accelerator_worker.h
@@ -45,6 +45,11 @@ class VaapiImageDecodeAcceleratorWorker
// internal state. Returns nullptr if initialization fails.
static std::unique_ptr<VaapiImageDecodeAcceleratorWorker> Create();
+ VaapiImageDecodeAcceleratorWorker(const VaapiImageDecodeAcceleratorWorker&) =
+ delete;
+ VaapiImageDecodeAcceleratorWorker& operator=(
+ const VaapiImageDecodeAcceleratorWorker&) = delete;
+
~VaapiImageDecodeAcceleratorWorker() override;
// gpu::ImageDecodeAcceleratorWorker implementation.
@@ -69,8 +74,6 @@ class VaapiImageDecodeAcceleratorWorker
SEQUENCE_CHECKER(main_sequence_checker_);
SEQUENCE_CHECKER(io_sequence_checker_);
-
- DISALLOW_COPY_AND_ASSIGN(VaapiImageDecodeAcceleratorWorker);
};
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_image_decoder.h b/chromium/media/gpu/vaapi/vaapi_image_decoder.h
index 3f56610c9dc..b390f4c420c 100644
--- a/chromium/media/gpu/vaapi/vaapi_image_decoder.h
+++ b/chromium/media/gpu/vaapi/vaapi_image_decoder.h
@@ -59,6 +59,9 @@ enum class VaapiImageDecodeStatus : uint32_t {
// call the methods on any thread, but calls must be synchronized externally.
class VaapiImageDecoder {
public:
+ VaapiImageDecoder(const VaapiImageDecoder&) = delete;
+ VaapiImageDecoder& operator=(const VaapiImageDecoder&) = delete;
+
virtual ~VaapiImageDecoder();
// Initializes |vaapi_wrapper_| in kDecode mode with the
@@ -113,8 +116,6 @@ class VaapiImageDecoder {
// The VA profile used for the current image decoder.
const VAProfile va_profile_;
-
- DISALLOW_COPY_AND_ASSIGN(VaapiImageDecoder);
};
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_image_processor_backend.cc b/chromium/media/gpu/vaapi/vaapi_image_processor_backend.cc
new file mode 100644
index 00000000000..ef41f007e48
--- /dev/null
+++ b/chromium/media/gpu/vaapi/vaapi_image_processor_backend.cc
@@ -0,0 +1,287 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/vaapi/vaapi_image_processor_backend.h"
+
+#include <stdint.h>
+
+#include <va/va.h>
+
+#include "base/bind.h"
+#include "base/callback.h"
+#include "base/containers/contains.h"
+#include "base/memory/ptr_util.h"
+#include "base/memory/scoped_refptr.h"
+#include "base/metrics/histogram_functions.h"
+#include "base/stl_util.h"
+#include "base/task/post_task.h"
+#include "build/build_config.h"
+#include "media/gpu/chromeos/fourcc.h"
+#include "media/gpu/chromeos/platform_video_frame_utils.h"
+#include "media/gpu/macros.h"
+#include "media/gpu/vaapi/va_surface.h"
+#include "media/gpu/vaapi/vaapi_utils.h"
+#include "media/gpu/vaapi/vaapi_wrapper.h"
+#include "ui/gfx/native_pixmap.h"
+
+namespace media {
+
+#if defined(OS_CHROMEOS)
+namespace {
+bool IsSupported(const ImageProcessorBackend::PortConfig& config) {
+ if (!config.fourcc.ToVAFourCC())
+ return false;
+ const uint32_t va_fourcc = *config.fourcc.ToVAFourCC();
+ if (!VaapiWrapper::IsVppFormatSupported(va_fourcc)) {
+ VLOGF(2) << "Unsupported format: VA_FOURCC_" << FourccToString(va_fourcc);
+ return false;
+ }
+ if (!VaapiWrapper::IsVppResolutionAllowed(config.size)) {
+ VLOGF(2) << "Unsupported size: " << config.size.ToString();
+ return false;
+ }
+ const gfx::Size& visible_size = config.visible_rect.size();
+ if (!VaapiWrapper::IsVppResolutionAllowed(visible_size)) {
+ VLOGF(2) << "Unsupported visible size: " << visible_size.ToString();
+ return false;
+ }
+ if (!gfx::Rect(config.size).Contains(config.visible_rect)) {
+ VLOGF(2) << "The frame size (" << config.size.ToString()
+ << ") does not contain the visible rect ("
+ << config.visible_rect.ToString() << ")";
+ return false;
+ }
+ return true;
+}
+
+} // namespace
+#endif
+
+// static
+std::unique_ptr<ImageProcessorBackend> VaapiImageProcessorBackend::Create(
+ const PortConfig& input_config,
+ const PortConfig& output_config,
+ const std::vector<OutputMode>& preferred_output_modes,
+ VideoRotation relative_rotation,
+ ErrorCB error_cb,
+ scoped_refptr<base::SequencedTaskRunner> backend_task_runner) {
+// VaapiImageProcessorBackend supports ChromeOS only.
+#if !defined(OS_CHROMEOS)
+ return nullptr;
+#else
+ if (!IsSupported(input_config) || !IsSupported(output_config))
+ return nullptr;
+
+ if (!base::Contains(input_config.preferred_storage_types,
+ VideoFrame::STORAGE_DMABUFS) &&
+ !base::Contains(input_config.preferred_storage_types,
+ VideoFrame::STORAGE_GPU_MEMORY_BUFFER)) {
+ VLOGF(2) << "VaapiImageProcessorBackend supports Dmabuf-backed or "
+ "GpuMemoryBuffer based VideoFrame only for input";
+ return nullptr;
+ }
+ if (!base::Contains(output_config.preferred_storage_types,
+ VideoFrame::STORAGE_DMABUFS) &&
+ !base::Contains(output_config.preferred_storage_types,
+ VideoFrame::STORAGE_GPU_MEMORY_BUFFER)) {
+ VLOGF(2) << "VaapiImageProcessorBackend supports Dmabuf-backed or "
+ "GpuMemoryBuffer based VideoFrame only for output";
+ return nullptr;
+ }
+
+ if (!base::Contains(preferred_output_modes, OutputMode::IMPORT)) {
+ VLOGF(2) << "VaapiImageProcessorBackend only supports IMPORT mode.";
+ return nullptr;
+ }
+
+ // Note that EncryptionScheme::kUnencrypted is fine even for the use case
+ // where the VPP is needed for processing protected content after decoding.
+ // That's because when calling VaapiWrapper::BlitSurface(), we re-use the
+ // protected session ID created at decoding time.
+ auto vaapi_wrapper = VaapiWrapper::Create(
+ VaapiWrapper::kVideoProcess, VAProfileNone,
+ EncryptionScheme::kUnencrypted,
+ base::BindRepeating(&ReportVaapiErrorToUMA,
+ "Media.VaapiImageProcessorBackend.VAAPIError"));
+ if (!vaapi_wrapper) {
+ VLOGF(1) << "Failed to create VaapiWrapper";
+ return nullptr;
+ }
+
+ // Size is irrelevant for a VPP context.
+ if (!vaapi_wrapper->CreateContext(gfx::Size())) {
+ VLOGF(1) << "Failed to create context for VPP";
+ return nullptr;
+ }
+
+ // Checks if VA-API driver supports rotation.
+ if (relative_rotation != VIDEO_ROTATION_0 &&
+ !vaapi_wrapper->IsRotationSupported()) {
+ VLOGF(1) << "VaapiIP doesn't support rotation";
+ return nullptr;
+ }
+
+ // We should restrict the acceptable PortConfig for input and output both to
+ // the one returned by GetPlatformVideoFrameLayout(). However,
+ // ImageProcessorFactory interface doesn't provide information about what
+ // ImageProcessor will be used for. (e.g. format conversion after decoding and
+ // scaling before encoding). Thus we cannot execute
+ // GetPlatformVideoFrameLayout() with a proper gfx::BufferUsage.
+ // TODO(crbug.com/898423): Adjust layout once ImageProcessor provide the use
+ // scenario.
+ return base::WrapUnique<ImageProcessorBackend>(new VaapiImageProcessorBackend(
+ std::move(vaapi_wrapper), input_config, output_config, OutputMode::IMPORT,
+ relative_rotation, std::move(error_cb), std::move(backend_task_runner)));
+#endif
+}
+
+VaapiImageProcessorBackend::VaapiImageProcessorBackend(
+ scoped_refptr<VaapiWrapper> vaapi_wrapper,
+ const PortConfig& input_config,
+ const PortConfig& output_config,
+ OutputMode output_mode,
+ VideoRotation relative_rotation,
+ ErrorCB error_cb,
+ scoped_refptr<base::SequencedTaskRunner> backend_task_runner)
+ : ImageProcessorBackend(input_config,
+ output_config,
+ output_mode,
+ relative_rotation,
+ std::move(error_cb),
+ std::move(backend_task_runner)),
+ vaapi_wrapper_(std::move(vaapi_wrapper)) {}
+
+VaapiImageProcessorBackend::~VaapiImageProcessorBackend() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(backend_sequence_checker_);
+
+ // To clear |allocated_va_surfaces_|, we have to first DestroyContext().
+ vaapi_wrapper_->DestroyContext();
+ allocated_va_surfaces_.clear();
+}
+
+const VASurface* VaapiImageProcessorBackend::GetSurfaceForVideoFrame(
+ scoped_refptr<VideoFrame> frame,
+ bool use_protected) {
+ if (!frame->HasGpuMemoryBuffer())
+ return nullptr;
+ DCHECK_EQ(frame->storage_type(), VideoFrame::STORAGE_GPU_MEMORY_BUFFER);
+
+ const gfx::GpuMemoryBufferId gmb_id = frame->GetGpuMemoryBuffer()->GetId();
+ if (base::Contains(allocated_va_surfaces_, gmb_id)) {
+ const VASurface* surface = allocated_va_surfaces_[gmb_id].get();
+ CHECK_EQ(frame->GetGpuMemoryBuffer()->GetSize(), surface->size());
+ const unsigned int format = VaapiWrapper::BufferFormatToVARTFormat(
+ frame->GetGpuMemoryBuffer()->GetFormat());
+ CHECK_NE(format, 0u);
+ CHECK_EQ(format, surface->format());
+ return surface;
+ }
+
+ scoped_refptr<gfx::NativePixmap> pixmap =
+ CreateNativePixmapDmaBuf(frame.get());
+ if (!pixmap) {
+ VLOGF(1) << "Failed to create NativePixmap from VideoFrame";
+ return nullptr;
+ }
+
+ auto va_surface = vaapi_wrapper_->CreateVASurfaceForPixmap(std::move(pixmap),
+ use_protected);
+ if (!va_surface) {
+ VLOGF(1) << "Failed to create VASurface from NativePixmap";
+ return nullptr;
+ }
+
+ allocated_va_surfaces_[gmb_id] = std::move(va_surface);
+ return allocated_va_surfaces_[gmb_id].get();
+}
+
+void VaapiImageProcessorBackend::Process(scoped_refptr<VideoFrame> input_frame,
+ scoped_refptr<VideoFrame> output_frame,
+ FrameReadyCB cb) {
+ DVLOGF(4);
+ DCHECK_CALLED_ON_VALID_SEQUENCE(backend_sequence_checker_);
+
+ bool use_protected = false;
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ VAProtectedSessionID va_protected_session_id = VA_INVALID_ID;
+ if (input_frame->metadata().hw_va_protected_session_id.has_value()) {
+ static_assert(
+ std::is_same<decltype(input_frame->metadata()
+ .hw_va_protected_session_id)::value_type,
+ VAProtectedSessionID>::value,
+ "The optional type of VideoFrameMetadata::hw_va_protected_session_id "
+ "is "
+ "not VAProtectedSessionID");
+ va_protected_session_id =
+ input_frame->metadata().hw_va_protected_session_id.value();
+ use_protected = va_protected_session_id != VA_INVALID_ID;
+ }
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+
+ if (needs_context_ && !vaapi_wrapper_->CreateContext(gfx::Size())) {
+ VLOGF(1) << "Failed to create context for VPP";
+ error_cb_.Run();
+ return;
+ }
+ needs_context_ = false;
+
+ DCHECK(input_frame);
+ DCHECK(output_frame);
+ const VASurface* src_va_surface =
+ GetSurfaceForVideoFrame(input_frame, use_protected);
+ if (!src_va_surface) {
+ error_cb_.Run();
+ return;
+ }
+ const VASurface* dst_va_surface =
+ GetSurfaceForVideoFrame(output_frame, use_protected);
+ if (!dst_va_surface) {
+ error_cb_.Run();
+ return;
+ }
+
+ // VA-API performs pixel format conversion and scaling without any filters.
+ if (!vaapi_wrapper_->BlitSurface(
+ *src_va_surface, *dst_va_surface, input_frame->visible_rect(),
+ output_frame->visible_rect(), relative_rotation_
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ ,
+ va_protected_session_id
+#endif
+ )) {
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ if (use_protected &&
+ vaapi_wrapper_->IsProtectedSessionDead(va_protected_session_id)) {
+ DCHECK_NE(va_protected_session_id, VA_INVALID_ID);
+
+ // If the VPP failed because the protected session is dead, we should
+ // still output the frame. That's because we don't want to put the
+ // VideoDecoderPipeline into an unusable error state: the
+ // VaapiVideoDecoder can recover from a dead protected session later and
+ // the compositor should not try to render the frame we output here
+ // anyway.
+ output_frame->set_timestamp(input_frame->timestamp());
+ std::move(cb).Run(std::move(output_frame));
+ return;
+ }
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+ error_cb_.Run();
+ return;
+ }
+
+ output_frame->set_timestamp(input_frame->timestamp());
+ std::move(cb).Run(std::move(output_frame));
+}
+
+void VaapiImageProcessorBackend::Reset() {
+ DVLOGF(4);
+ DCHECK_CALLED_ON_VALID_SEQUENCE(backend_sequence_checker_);
+
+ // To clear |allocated_va_surfaces_|, we have to first DestroyContext().
+ vaapi_wrapper_->DestroyContext();
+ allocated_va_surfaces_.clear();
+ needs_context_ = true;
+}
+
+} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_image_processor_backend.h b/chromium/media/gpu/vaapi/vaapi_image_processor_backend.h
new file mode 100644
index 00000000000..a72b9a05353
--- /dev/null
+++ b/chromium/media/gpu/vaapi/vaapi_image_processor_backend.h
@@ -0,0 +1,77 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_GPU_VAAPI_VAAPI_IMAGE_PROCESSOR_BACKEND_H_
+#define MEDIA_GPU_VAAPI_VAAPI_IMAGE_PROCESSOR_BACKEND_H_
+
+#include <memory>
+
+#include "base/containers/small_map.h"
+#include "base/macros.h"
+#include "media/gpu/chromeos/image_processor_backend.h"
+#include "media/gpu/media_gpu_export.h"
+#include "ui/gfx/gpu_memory_buffer.h"
+
+namespace media {
+
+class VaapiWrapper;
+class VASurface;
+
+// ImageProcessor that is hardware accelerated with VA-API. This ImageProcessor
+// supports only dma-buf and GpuMemoryBuffer VideoFrames for both input and
+// output.
+class VaapiImageProcessorBackend : public ImageProcessorBackend {
+ public:
+ VaapiImageProcessorBackend(const VaapiImageProcessorBackend&) = delete;
+ VaapiImageProcessorBackend& operator=(const VaapiImageProcessorBackend&) =
+ delete;
+
+ // Factory method to create a VaapiImageProcessorBackend for processing frames
+ // as specified by |input_config| and |output_config|. The provided |error_cb|
+ // will be posted to the same thread that executes Create() if an error occurs
+ // after initialization.
+ // Returns nullptr if it fails to create a VaapiImageProcessorBackend.
+ static std::unique_ptr<ImageProcessorBackend> Create(
+ const PortConfig& input_config,
+ const PortConfig& output_config,
+ const std::vector<OutputMode>& preferred_output_modes,
+ VideoRotation relative_rotation,
+ ErrorCB error_cb,
+ scoped_refptr<base::SequencedTaskRunner> backend_task_runner);
+
+ // ImageProcessor implementation.
+ void Process(scoped_refptr<VideoFrame> input_frame,
+ scoped_refptr<VideoFrame> output_frame,
+ FrameReadyCB cb) override;
+ void Reset() override;
+
+ private:
+ VaapiImageProcessorBackend(
+ scoped_refptr<VaapiWrapper> vaapi_wrapper,
+ const PortConfig& input_config,
+ const PortConfig& output_config,
+ OutputMode output_mode,
+ VideoRotation relative_rotation,
+ ErrorCB error_cb,
+ scoped_refptr<base::SequencedTaskRunner> backend_task_runner);
+ ~VaapiImageProcessorBackend() override;
+
+ const VASurface* GetSurfaceForVideoFrame(scoped_refptr<VideoFrame> frame,
+ bool use_protected);
+
+ const scoped_refptr<VaapiWrapper> vaapi_wrapper_;
+ bool needs_context_ = false;
+
+ // VASurfaces are created via importing dma-bufs into libva using
+ // |vaapi_wrapper_|->CreateVASurfaceForPixmap(). The following map keeps those
+ // VASurfaces for reuse according to the expectations of libva
+ // vaDestroySurfaces(): "Surfaces can only be destroyed after all contexts
+ // using these surfaces have been destroyed."
+ base::small_map<std::map<gfx::GpuMemoryBufferId, scoped_refptr<VASurface>>>
+ allocated_va_surfaces_;
+};
+
+} // namespace media
+
+#endif // MEDIA_GPU_VAAPI_VAAPI_IMAGE_PROCESSOR_BACKEND_H_
diff --git a/chromium/media/gpu/vaapi/vaapi_jpeg_decoder.h b/chromium/media/gpu/vaapi/vaapi_jpeg_decoder.h
index de88d99a867..b0af4aef131 100644
--- a/chromium/media/gpu/vaapi/vaapi_jpeg_decoder.h
+++ b/chromium/media/gpu/vaapi/vaapi_jpeg_decoder.h
@@ -29,6 +29,10 @@ unsigned int VaSurfaceFormatForJpeg(const JpegFrameHeader& frame_header);
class VaapiJpegDecoder : public VaapiImageDecoder {
public:
VaapiJpegDecoder();
+
+ VaapiJpegDecoder(const VaapiJpegDecoder&) = delete;
+ VaapiJpegDecoder& operator=(const VaapiJpegDecoder&) = delete;
+
~VaapiJpegDecoder() override;
// VaapiImageDecoder implementation.
@@ -58,8 +62,6 @@ class VaapiJpegDecoder : public VaapiImageDecoder {
const gfx::Size& new_coded_size,
const gfx::Size& new_visible_size);
bool SubmitBuffers(const JpegParseResult& parse_result);
-
- DISALLOW_COPY_AND_ASSIGN(VaapiJpegDecoder);
};
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.cc b/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.cc
index e9d06f539a0..25b2baafa59 100644
--- a/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.cc
+++ b/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.cc
@@ -70,6 +70,10 @@ class VaapiJpegEncodeAccelerator::Encoder {
scoped_refptr<VaapiWrapper> vpp_vaapi_wrapper,
base::RepeatingCallback<void(int32_t, size_t)> video_frame_ready_cb,
base::RepeatingCallback<void(int32_t, Status)> notify_error_cb);
+
+ Encoder(const Encoder&) = delete;
+ Encoder& operator=(const Encoder&) = delete;
+
~Encoder();
// Processes one encode task with DMA-buf.
@@ -106,8 +110,6 @@ class VaapiJpegEncodeAccelerator::Encoder {
uint32_t va_format_;
SEQUENCE_CHECKER(sequence_checker_);
-
- DISALLOW_COPY_AND_ASSIGN(Encoder);
};
VaapiJpegEncodeAccelerator::Encoder::Encoder(
diff --git a/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.h b/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.h
index c15c57fb90d..e83c57653c5 100644
--- a/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.h
+++ b/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.h
@@ -33,6 +33,11 @@ class MEDIA_GPU_EXPORT VaapiJpegEncodeAccelerator
public:
explicit VaapiJpegEncodeAccelerator(
scoped_refptr<base::SingleThreadTaskRunner> io_task_runner);
+
+ VaapiJpegEncodeAccelerator(const VaapiJpegEncodeAccelerator&) = delete;
+ VaapiJpegEncodeAccelerator& operator=(const VaapiJpegEncodeAccelerator&) =
+ delete;
+
~VaapiJpegEncodeAccelerator() override;
// JpegEncodeAccelerator implementation.
@@ -111,8 +116,6 @@ class MEDIA_GPU_EXPORT VaapiJpegEncodeAccelerator
// |task_runner_|.
base::WeakPtr<VaapiJpegEncodeAccelerator> weak_this_;
base::WeakPtrFactory<VaapiJpegEncodeAccelerator> weak_this_factory_;
-
- DISALLOW_COPY_AND_ASSIGN(VaapiJpegEncodeAccelerator);
};
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_jpeg_encoder.h b/chromium/media/gpu/vaapi/vaapi_jpeg_encoder.h
index 73fadae229a..e732ba32e9d 100644
--- a/chromium/media/gpu/vaapi/vaapi_jpeg_encoder.h
+++ b/chromium/media/gpu/vaapi/vaapi_jpeg_encoder.h
@@ -29,6 +29,10 @@ class MEDIA_GPU_EXPORT VaapiJpegEncoder {
// VaapiWrapper::kEncodeConstantBitrate mode with VAProfileJPEGBaseline
// profile.
explicit VaapiJpegEncoder(scoped_refptr<VaapiWrapper> vaapi_wrapper);
+
+ VaapiJpegEncoder(const VaapiJpegEncoder&) = delete;
+ VaapiJpegEncoder& operator=(const VaapiJpegEncoder&) = delete;
+
~VaapiJpegEncoder();
// Encode a JPEG picture. It will fill VA-API parameters and call
@@ -64,8 +68,6 @@ class MEDIA_GPU_EXPORT VaapiJpegEncoder {
std::unique_ptr<VAQMatrixBufferJPEG> q_matrix_cached_;
std::unique_ptr<VAHuffmanTableBufferJPEGBaseline> huff_table_param_cached_;
std::unique_ptr<VAEncSliceParameterBufferJPEG> slice_param_cached_;
-
- DISALLOW_COPY_AND_ASSIGN(VaapiJpegEncoder);
};
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_mjpeg_decode_accelerator.cc b/chromium/media/gpu/vaapi/vaapi_mjpeg_decode_accelerator.cc
index f436784d6ca..cf551b2f827 100644
--- a/chromium/media/gpu/vaapi/vaapi_mjpeg_decode_accelerator.cc
+++ b/chromium/media/gpu/vaapi/vaapi_mjpeg_decode_accelerator.cc
@@ -24,14 +24,15 @@
#include "base/threading/thread_task_runner_handle.h"
#include "base/trace_event/trace_event.h"
#include "gpu/ipc/common/gpu_memory_buffer_impl.h"
-#include "gpu/ipc/common/gpu_memory_buffer_support.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/bitstream_buffer.h"
#include "media/base/format_utils.h"
#include "media/base/unaligned_shared_memory.h"
#include "media/base/video_frame.h"
#include "media/base/video_frame_layout.h"
+#include "media/base/video_util.h"
#include "media/gpu/chromeos/fourcc.h"
+#include "media/gpu/chromeos/libyuv_image_processor_backend.h"
#include "media/gpu/chromeos/platform_video_frame_utils.h"
#include "media/gpu/macros.h"
#include "media/gpu/vaapi/va_surface.h"
@@ -39,7 +40,6 @@
#include "media/gpu/vaapi/vaapi_utils.h"
#include "media/gpu/vaapi/vaapi_wrapper.h"
#include "third_party/abseil-cpp/absl/types/optional.h"
-#include "third_party/libyuv/include/libyuv.h"
#include "ui/gfx/geometry/rect.h"
#include "ui/gfx/geometry/size.h"
#include "ui/gfx/gpu_memory_buffer.h"
@@ -48,15 +48,15 @@ namespace media {
namespace {
-static void ReportToVAJDAResponseToClientUMA(
+void ReportToVAJDAResponseToClientUMA(
chromeos_camera::MjpegDecodeAccelerator::Error response) {
UMA_HISTOGRAM_ENUMERATION(
"Media.VAJDA.ResponseToClient", response,
chromeos_camera::MjpegDecodeAccelerator::Error::MJDA_ERROR_CODE_MAX + 1);
}
-static chromeos_camera::MjpegDecodeAccelerator::Error
-VaapiJpegDecodeStatusToError(VaapiImageDecodeStatus status) {
+chromeos_camera::MjpegDecodeAccelerator::Error VaapiJpegDecodeStatusToError(
+ VaapiImageDecodeStatus status) {
switch (status) {
case VaapiImageDecodeStatus::kSuccess:
return chromeos_camera::MjpegDecodeAccelerator::Error::NO_ERRORS;
@@ -69,7 +69,7 @@ VaapiJpegDecodeStatusToError(VaapiImageDecodeStatus status) {
}
}
-static bool VerifyDataSize(const VAImage* image) {
+bool VerifyDataSize(const VAImage* image) {
const gfx::Size dimensions(base::strict_cast<int>(image->width),
base::strict_cast<int>(image->height));
size_t min_size = 0;
@@ -85,6 +85,7 @@ static bool VerifyDataSize(const VAImage* image) {
}
return base::strict_cast<size_t>(image->data_size) >= min_size;
}
+
} // namespace
void VaapiMjpegDecodeAccelerator::NotifyError(int32_t task_id, Error error) {
@@ -119,12 +120,13 @@ VaapiMjpegDecodeAccelerator::VaapiMjpegDecodeAccelerator(
decoder_thread_("VaapiMjpegDecoderThread"),
weak_this_factory_(this) {}
-// Destroy |decoder_| and |vpp_vaapi_wrapper_| on |decoder_thread_|.
+// Some members expect to be destroyed on the |decoder_thread_|.
void VaapiMjpegDecodeAccelerator::CleanUpOnDecoderThread() {
DCHECK(decoder_task_runner_->BelongsToCurrentThread());
DCHECK(vpp_vaapi_wrapper_->HasOneRef());
vpp_vaapi_wrapper_.reset();
decoder_.reset();
+ image_processor_.reset();
}
VaapiMjpegDecodeAccelerator::~VaapiMjpegDecodeAccelerator() {
@@ -153,6 +155,7 @@ void VaapiMjpegDecodeAccelerator::InitializeOnDecoderTaskRunner(
"Media.VaapiMjpegDecodeAccelerator.VAAPIError"))) {
VLOGF(1) << "Failed initializing |decoder_|";
std::move(init_cb).Run(false);
+ return;
}
vpp_vaapi_wrapper_ = VaapiWrapper::Create(
@@ -163,12 +166,14 @@ void VaapiMjpegDecodeAccelerator::InitializeOnDecoderTaskRunner(
if (!vpp_vaapi_wrapper_) {
VLOGF(1) << "Failed initializing VAAPI for VPP";
std::move(init_cb).Run(false);
+ return;
}
// Size is irrelevant for a VPP context.
if (!vpp_vaapi_wrapper_->CreateContext(gfx::Size())) {
VLOGF(1) << "Failed to create context for VPP";
std::move(init_cb).Run(false);
+ return;
}
std::move(init_cb).Run(true);
@@ -183,9 +188,9 @@ void VaapiMjpegDecodeAccelerator::InitializeOnTaskRunner(
if (!decoder_thread_.Start()) {
VLOGF(1) << "Failed to start decoding thread.";
std::move(init_cb).Run(false);
+ return;
}
decoder_task_runner_ = decoder_thread_.task_runner();
- gpu_memory_buffer_support_ = std::make_unique<gpu::GpuMemoryBufferSupport>();
// base::Unretained() is fine here because we control |decoder_task_runner_|
// lifetime.
@@ -211,179 +216,142 @@ void VaapiMjpegDecodeAccelerator::InitializeAsync(
BindToCurrentLoop(std::move(init_cb))));
}
+void VaapiMjpegDecodeAccelerator::CreateImageProcessor(
+ const VideoFrame* src_frame,
+ const VideoFrame* dst_frame) {
+ DCHECK(decoder_task_runner_->BelongsToCurrentThread());
+
+ // The fourcc of |src_frame| will be either Fourcc(YUYV) or Fourcc(YU12) based
+ // on the implementation of OutputPictureLibYuvOnTaskRunner(). The fourcc of
+ // |dst_frame| should have been validated in DecodeImpl().
+ const auto src_fourcc = Fourcc::FromVideoPixelFormat(src_frame->format());
+ DCHECK(src_fourcc.has_value());
+ const auto dst_fourcc = Fourcc::FromVideoPixelFormat(dst_frame->format());
+ DCHECK(dst_fourcc.has_value());
+ const ImageProcessorBackend::PortConfig input_config(
+ *src_fourcc, src_frame->coded_size(), src_frame->layout().planes(),
+ src_frame->visible_rect(), {src_frame->storage_type()});
+ const ImageProcessorBackend::PortConfig output_config(
+ *dst_fourcc, dst_frame->coded_size(), dst_frame->layout().planes(),
+ dst_frame->visible_rect(), {dst_frame->storage_type()});
+ if (image_processor_ && image_processor_->input_config() == input_config &&
+ image_processor_->output_config() == output_config) {
+ return;
+ }
+
+ // The error callback is posted to the same thread that
+ // LibYUVImageProcessorBackend::Create() is called on
+ // (i.e., |decoder_thread_|) and we control the lifetime of |decoder_thread_|.
+ // Therefore, base::Unretained(this) is safe.
+ image_processor_ = LibYUVImageProcessorBackend::Create(
+ input_config, output_config, {ImageProcessorBackend::OutputMode::IMPORT},
+ VIDEO_ROTATION_0,
+ base::BindRepeating(&VaapiMjpegDecodeAccelerator::OnImageProcessorError,
+ base::Unretained(this)),
+ decoder_task_runner_);
+}
+
bool VaapiMjpegDecodeAccelerator::OutputPictureLibYuvOnTaskRunner(
+ int32_t task_id,
std::unique_ptr<ScopedVAImage> scoped_image,
- int32_t input_buffer_id,
- scoped_refptr<VideoFrame> video_frame) {
+ scoped_refptr<VideoFrame> video_frame,
+ const gfx::Rect& crop_rect) {
DCHECK(decoder_task_runner_->BelongsToCurrentThread());
- TRACE_EVENT1("jpeg", __func__, "input_buffer_id", input_buffer_id);
+ TRACE_EVENT1("jpeg", __func__, "task_id", task_id);
DCHECK(scoped_image);
const VAImage* image = scoped_image->image();
-
- // For camera captures, we assume that the visible size is the same as the
- // coded size.
- DCHECK_EQ(video_frame->visible_rect().size(), video_frame->coded_size());
- DCHECK_EQ(0, video_frame->visible_rect().x());
- DCHECK_EQ(0, video_frame->visible_rect().y());
- DCHECK(decoder_->GetScopedVASurface());
- const gfx::Size visible_size(base::strict_cast<int>(image->width),
- base::strict_cast<int>(image->height));
- if (visible_size != video_frame->visible_rect().size()) {
- VLOGF(1) << "The decoded visible size is not the same as the video frame's";
- return false;
- }
-
- // The decoded image size is aligned up to JPEG MCU size, so it may be larger
- // than |video_frame|'s visible size.
- if (base::strict_cast<int>(image->width) < visible_size.width() ||
- base::strict_cast<int>(image->height) < visible_size.height()) {
- VLOGF(1) << "Decoded image size is smaller than output frame size";
- return false;
- }
DCHECK(VerifyDataSize(image));
+ const gfx::Size src_size(base::strict_cast<int>(image->width),
+ base::strict_cast<int>(image->height));
+ DCHECK(gfx::Rect(src_size).Contains(crop_rect));
- // Extract source pointers and strides.
- auto* const mem =
- static_cast<const uint8_t*>(scoped_image->va_buffer()->data());
- std::array<const uint8_t*, VideoFrame::kMaxPlanes> src_ptrs{};
- std::array<int, VideoFrame::kMaxPlanes> src_strides{};
- for (uint32_t i = 0; i < image->num_planes; i++) {
- src_ptrs[i] = mem + image->offsets[i];
+ // Wrap |image| into VideoFrame.
+ std::vector<int32_t> strides(image->num_planes);
+ for (uint32_t i = 0; i < image->num_planes; ++i) {
if (!base::CheckedNumeric<uint32_t>(image->pitches[i])
- .AssignIfValid(&src_strides[i])) {
- VLOGF(1) << "Can't extract the strides";
- return false;
- }
- }
-
- // Extract destination pointers and strides.
- std::array<uint8_t*, VideoFrame::kMaxPlanes> dst_ptrs{};
- std::array<int, VideoFrame::kMaxPlanes> dst_strides{};
- base::ScopedClosureRunner buffer_unmapper;
- if (video_frame->HasDmaBufs()) {
- // Dmabuf-backed frame needs to be mapped for SW access.
- DCHECK(gpu_memory_buffer_support_);
- absl::optional<gfx::BufferFormat> gfx_format =
- VideoPixelFormatToGfxBufferFormat(video_frame->format());
- if (!gfx_format) {
- VLOGF(1) << "Unsupported format: " << video_frame->format();
+ .AssignIfValid(&strides[i])) {
+ VLOGF(1) << "Invalid VAImage stride " << image->pitches[i]
+ << " for plane " << i;
return false;
}
- auto gmb_handle = CreateGpuMemoryBufferHandle(video_frame.get());
- DCHECK(!gmb_handle.is_null());
- std::unique_ptr<gpu::GpuMemoryBufferImpl> gmb =
- gpu_memory_buffer_support_->CreateGpuMemoryBufferImplFromHandle(
- std::move(gmb_handle), video_frame->coded_size(), *gfx_format,
- gfx::BufferUsage::SCANOUT_CPU_READ_WRITE, base::DoNothing());
- if (!gmb) {
- VLOGF(1) << "Failed to create GPU memory buffer";
- return false;
- }
- if (!gmb->Map()) {
- VLOGF(1) << "Failed to map GPU memory buffer";
- return false;
- }
- for (size_t i = 0; i < video_frame->layout().num_planes(); i++) {
- dst_ptrs[i] = static_cast<uint8_t*>(gmb->memory(i));
- dst_strides[i] = gmb->stride(i);
- }
- buffer_unmapper.ReplaceClosure(
- base::BindOnce(&gpu::GpuMemoryBufferImpl::Unmap, std::move(gmb)));
- } else {
- DCHECK(video_frame->IsMappable());
- for (size_t i = 0; i < video_frame->layout().num_planes(); i++) {
- dst_ptrs[i] = video_frame->visible_data(i);
- dst_strides[i] = video_frame->stride(i);
- }
}
-
+ auto* const data = static_cast<uint8_t*>(scoped_image->va_buffer()->data());
+ scoped_refptr<VideoFrame> src_frame;
switch (image->format.fourcc) {
- case VA_FOURCC_I420:
- DCHECK_EQ(image->num_planes, 3u);
- switch (video_frame->format()) {
- case PIXEL_FORMAT_I420:
- DCHECK_EQ(video_frame->layout().num_planes(), 3u);
- if (libyuv::I420Copy(src_ptrs[0], src_strides[0], src_ptrs[1],
- src_strides[1], src_ptrs[2], src_strides[2],
- dst_ptrs[0], dst_strides[0], dst_ptrs[1],
- dst_strides[1], dst_ptrs[2], dst_strides[2],
- visible_size.width(), visible_size.height())) {
- VLOGF(1) << "I420Copy failed";
- return false;
- }
- break;
- case PIXEL_FORMAT_NV12:
- DCHECK_EQ(video_frame->layout().num_planes(), 2u);
- if (libyuv::I420ToNV12(src_ptrs[0], src_strides[0], src_ptrs[1],
- src_strides[1], src_ptrs[2], src_strides[2],
- dst_ptrs[0], dst_strides[0], dst_ptrs[1],
- dst_strides[1], visible_size.width(),
- visible_size.height())) {
- VLOGF(1) << "I420ToNV12 failed";
- return false;
- }
- break;
- default:
- VLOGF(1) << "Can't convert image from I420 to "
- << video_frame->format();
- return false;
+ case VA_FOURCC_YUY2:
+ case VA_FOURCC('Y', 'U', 'Y', 'V'): {
+ auto layout = VideoFrameLayout::CreateWithStrides(PIXEL_FORMAT_YUY2,
+ src_size, strides);
+ if (!layout.has_value()) {
+ VLOGF(1) << "Failed to create video frame layout";
+ return false;
}
+ src_frame = VideoFrame::WrapExternalDataWithLayout(
+ *layout, crop_rect, crop_rect.size(), data + image->offsets[0],
+ base::strict_cast<size_t>(image->data_size), base::TimeDelta());
break;
- case VA_FOURCC_YUY2:
- case VA_FOURCC('Y', 'U', 'Y', 'V'):
- DCHECK_EQ(image->num_planes, 1u);
- switch (video_frame->format()) {
- case PIXEL_FORMAT_I420:
- DCHECK_EQ(video_frame->layout().num_planes(), 3u);
- if (libyuv::YUY2ToI420(src_ptrs[0], src_strides[0], dst_ptrs[0],
- dst_strides[0], dst_ptrs[1], dst_strides[1],
- dst_ptrs[2], dst_strides[2],
- visible_size.width(), visible_size.height())) {
- VLOGF(1) << "YUY2ToI420 failed";
- return false;
- }
- break;
- case PIXEL_FORMAT_NV12:
- DCHECK_EQ(video_frame->layout().num_planes(), 2u);
- if (libyuv::YUY2ToNV12(src_ptrs[0], src_strides[0], dst_ptrs[0],
- dst_strides[0], dst_ptrs[1], dst_strides[1],
- visible_size.width(), visible_size.height())) {
- VLOGF(1) << "YUY2ToNV12 failed";
- return false;
- }
- break;
- default:
- VLOGF(1) << "Can't convert image from YUYV to "
- << video_frame->format();
- return false;
+ }
+ case VA_FOURCC_I420: {
+ auto layout = VideoFrameLayout::CreateWithStrides(PIXEL_FORMAT_I420,
+ src_size, strides);
+ if (!layout.has_value()) {
+ VLOGF(1) << "Failed to create video frame layout";
+ return false;
}
+ src_frame = VideoFrame::WrapExternalYuvDataWithLayout(
+ *layout, crop_rect, crop_rect.size(), data + image->offsets[0],
+ data + image->offsets[1], data + image->offsets[2],
+ base::TimeDelta());
break;
+ }
default:
- VLOGF(1) << "Can't convert image from "
- << FourccToString(image->format.fourcc) << " to "
- << video_frame->format();
+ VLOGF(1) << "Unsupported VA image format: "
+ << FourccToString(image->format.fourcc);
return false;
}
+ if (!src_frame) {
+ VLOGF(1) << "Failed to create video frame";
+ return false;
+ }
- task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(&VaapiMjpegDecodeAccelerator::VideoFrameReady,
- weak_this_factory_.GetWeakPtr(), input_buffer_id));
-
+ CreateImageProcessor(src_frame.get(), video_frame.get());
+ if (!image_processor_) {
+ VLOGF(1) << "Failed to create image processor";
+ return false;
+ }
+ image_processor_->Process(
+ std::move(src_frame), std::move(video_frame),
+ base::BindOnce(
+ [](scoped_refptr<base::SingleThreadTaskRunner> runner,
+ base::OnceClosure cb, scoped_refptr<VideoFrame> frame) {
+ runner->PostTask(FROM_HERE, std::move(cb));
+ },
+ task_runner_,
+ base::BindOnce(&VaapiMjpegDecodeAccelerator::VideoFrameReady,
+ weak_this_factory_.GetWeakPtr(), task_id)));
return true;
}
+void VaapiMjpegDecodeAccelerator::OnImageProcessorError() {
+ DCHECK(decoder_task_runner_->BelongsToCurrentThread());
+ VLOGF(1) << "Failed to process frames using the libyuv image processor";
+ NotifyError(kInvalidTaskId, PLATFORM_FAILURE);
+ image_processor_.reset();
+}
+
bool VaapiMjpegDecodeAccelerator::OutputPictureVppOnTaskRunner(
+ int32_t task_id,
const ScopedVASurface* surface,
- int32_t input_buffer_id,
- scoped_refptr<VideoFrame> video_frame) {
+ scoped_refptr<VideoFrame> video_frame,
+ const gfx::Rect& crop_rect) {
DCHECK(decoder_task_runner_->BelongsToCurrentThread());
DCHECK(surface);
DCHECK(video_frame);
+ DCHECK(gfx::Rect(surface->size()).Contains(crop_rect));
- TRACE_EVENT1("jpeg", __func__, "input_buffer_id", input_buffer_id);
+ TRACE_EVENT1("jpeg", __func__, "task_id", task_id);
scoped_refptr<gfx::NativePixmap> pixmap =
CreateNativePixmapDmaBuf(video_frame.get());
@@ -395,50 +363,36 @@ bool VaapiMjpegDecodeAccelerator::OutputPictureVppOnTaskRunner(
// Bind a VA surface to |video_frame|.
scoped_refptr<VASurface> output_surface =
vpp_vaapi_wrapper_->CreateVASurfaceForPixmap(std::move(pixmap));
-
if (!output_surface) {
VLOGF(1) << "Cannot create VA surface for output buffer";
return false;
}
- // Use VPP to blit the visible size region within |surface| into
- // |output_surface|. BlitSurface() does scaling not cropping when source and
- // destination sizes don't match, so we manipulate the sizes of surfaces to
- // effectively do the cropping.
- const gfx::Size& blit_size = video_frame->visible_rect().size();
- if (surface->size().width() < blit_size.width() ||
- surface->size().height() < blit_size.height()) {
- VLOGF(1) << "Decoded surface size is smaller than target size";
- return false;
- }
scoped_refptr<VASurface> src_surface = base::MakeRefCounted<VASurface>(
- surface->id(), blit_size, surface->format(),
- base::DoNothing() /* release_cb */);
- scoped_refptr<VASurface> dst_surface = base::MakeRefCounted<VASurface>(
- output_surface->id(), blit_size, output_surface->format(),
- base::DoNothing() /* release_cb */);
+ surface->id(), surface->size(), surface->format(),
+ /*release_cb=*/base::DoNothing());
// We should call vaSyncSurface() when passing surface between contexts. See:
// https://lists.01.org/pipermail/intel-vaapi-media/2019-June/000131.html
- if (!vpp_vaapi_wrapper_->SyncSurface(src_surface->id())) {
+ if (!vpp_vaapi_wrapper_->SyncSurface(surface->id())) {
VLOGF(1) << "Cannot sync VPP input surface";
return false;
}
- if (!vpp_vaapi_wrapper_->BlitSurface(*src_surface, *dst_surface)) {
+ if (!vpp_vaapi_wrapper_->BlitSurface(*src_surface, *output_surface,
+ crop_rect)) {
VLOGF(1) << "Cannot convert decoded image into output buffer";
return false;
}
// Sync target surface since the buffer is returning to client.
- if (!vpp_vaapi_wrapper_->SyncSurface(dst_surface->id())) {
+ if (!vpp_vaapi_wrapper_->SyncSurface(output_surface->id())) {
VLOGF(1) << "Cannot sync VPP output surface";
return false;
}
task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(&VaapiMjpegDecodeAccelerator::VideoFrameReady,
- weak_this_factory_.GetWeakPtr(), input_buffer_id));
+ FROM_HERE, base::BindOnce(&VaapiMjpegDecodeAccelerator::VideoFrameReady,
+ weak_this_factory_.GetWeakPtr(), task_id));
return true;
}
@@ -490,11 +444,9 @@ void VaapiMjpegDecodeAccelerator::DecodeImpl(
int32_t task_id,
base::span<const uint8_t> src_image,
scoped_refptr<VideoFrame> dst_frame) {
- // TODO(andrescj): validate that the video frame's visible size is the same as
- // the parsed JPEG's visible size when it is returned from Decode(), and
- // remove the size checks in OutputPicture*().
VaapiImageDecodeStatus status = decoder_->Decode(src_image);
if (status != VaapiImageDecodeStatus::kSuccess) {
+ VLOGF(1) << "Failed to decode JPEG image";
NotifyError(task_id, VaapiJpegDecodeStatusToError(status));
return;
}
@@ -502,30 +454,71 @@ void VaapiMjpegDecodeAccelerator::DecodeImpl(
DCHECK(surface);
DCHECK(surface->IsValid());
+ // For camera captures, we assume that the visible size is the same as the
+ // coded size.
+ if (dst_frame->visible_rect().size() != dst_frame->coded_size() ||
+ dst_frame->visible_rect().x() != 0 ||
+ dst_frame->visible_rect().y() != 0) {
+ VLOGF(1)
+ << "The video frame visible size should be the same as the coded size";
+ NotifyError(task_id, INVALID_ARGUMENT);
+ return;
+ }
+
+ // Note that |surface->size()| is the visible size of the JPEG image. The
+ // underlying VASurface size (coded size) can be larger because of alignments.
+ if (surface->size().width() < dst_frame->visible_rect().width() ||
+ surface->size().height() < dst_frame->visible_rect().height()) {
+ VLOGF(1) << "Invalid JPEG image and video frame sizes: "
+ << surface->size().ToString() << ", "
+ << dst_frame->visible_rect().size().ToString();
+ NotifyError(task_id, INVALID_ARGUMENT);
+ return;
+ }
+
// For DMA-buf backed |dst_frame|, we will import it as a VA surface and use
// VPP to convert the decoded |surface| into it, if the formats and sizes are
// supported.
- const auto video_frame_fourcc =
+ const auto dst_frame_fourcc =
Fourcc::FromVideoPixelFormat(dst_frame->format());
- if (!video_frame_fourcc) {
+ if (!dst_frame_fourcc) {
VLOGF(1) << "Unsupported video frame format: " << dst_frame->format();
NotifyError(task_id, PLATFORM_FAILURE);
return;
}
- const auto video_frame_va_fourcc = video_frame_fourcc->ToVAFourCC();
- if (!video_frame_va_fourcc) {
+ const auto dst_frame_va_fourcc = dst_frame_fourcc->ToVAFourCC();
+ if (!dst_frame_va_fourcc) {
VLOGF(1) << "Unsupported video frame format: " << dst_frame->format();
NotifyError(task_id, PLATFORM_FAILURE);
return;
}
+
+ // Crop and scale the decoded image into |dst_frame|.
+ // The VPP is known to have some problems with odd-sized buffers, so we
+ // request a crop rectangle whose dimensions are aligned to 2.
+ const gfx::Rect crop_rect = CropSizeForScalingToTarget(
+ surface->size(), dst_frame->visible_rect().size(), /*alignment=*/2u);
+ if (crop_rect.IsEmpty()) {
+ VLOGF(1) << "Failed to calculate crop rectangle for "
+ << surface->size().ToString() << " to "
+ << dst_frame->visible_rect().size().ToString();
+ NotifyError(task_id, PLATFORM_FAILURE);
+ return;
+ }
+
// TODO(kamesan): move HasDmaBufs() to DCHECK when we deprecate
// shared-memory-backed video frame.
+ // Check all the sizes involved until we figure out the definition of min/max
+ // resolutions in the VPP profile (b/195312242).
if (dst_frame->HasDmaBufs() &&
VaapiWrapper::IsVppResolutionAllowed(surface->size()) &&
+ VaapiWrapper::IsVppResolutionAllowed(crop_rect.size()) &&
+ VaapiWrapper::IsVppResolutionAllowed(dst_frame->visible_rect().size()) &&
VaapiWrapper::IsVppSupportedForJpegDecodedSurfaceToFourCC(
- surface->format(), *video_frame_va_fourcc)) {
- if (!OutputPictureVppOnTaskRunner(surface, task_id, std::move(dst_frame))) {
+ surface->format(), *dst_frame_va_fourcc)) {
+ if (!OutputPictureVppOnTaskRunner(task_id, surface, std::move(dst_frame),
+ crop_rect)) {
VLOGF(1) << "Output picture using VPP failed";
NotifyError(task_id, PLATFORM_FAILURE);
}
@@ -537,13 +530,15 @@ void VaapiMjpegDecodeAccelerator::DecodeImpl(
// 2. VPP doesn't support the format conversion. This is intended for AMD
// VAAPI driver whose VPP only supports converting decoded 4:2:0 JPEGs.
std::unique_ptr<ScopedVAImage> image =
- decoder_->GetImage(*video_frame_va_fourcc, &status);
+ decoder_->GetImage(*dst_frame_va_fourcc, &status);
if (status != VaapiImageDecodeStatus::kSuccess) {
NotifyError(task_id, VaapiJpegDecodeStatusToError(status));
return;
}
- if (!OutputPictureLibYuvOnTaskRunner(std::move(image), task_id,
- std::move(dst_frame))) {
+ DCHECK_EQ(image->image()->width, surface->size().width());
+ DCHECK_EQ(image->image()->height, surface->size().height());
+ if (!OutputPictureLibYuvOnTaskRunner(task_id, std::move(image),
+ std::move(dst_frame), crop_rect)) {
VLOGF(1) << "Output picture using libyuv failed";
NotifyError(task_id, PLATFORM_FAILURE);
}
diff --git a/chromium/media/gpu/vaapi/vaapi_mjpeg_decode_accelerator.h b/chromium/media/gpu/vaapi/vaapi_mjpeg_decode_accelerator.h
index 7c41829a7de..3c27768f1f4 100644
--- a/chromium/media/gpu/vaapi/vaapi_mjpeg_decode_accelerator.h
+++ b/chromium/media/gpu/vaapi/vaapi_mjpeg_decode_accelerator.h
@@ -15,6 +15,7 @@
#include "base/memory/weak_ptr.h"
#include "base/threading/thread.h"
#include "components/chromeos_camera/mjpeg_decode_accelerator.h"
+#include "media/gpu/chromeos/image_processor_backend.h"
#include "media/gpu/media_gpu_export.h"
#include "media/gpu/vaapi/vaapi_jpeg_decoder.h"
@@ -22,10 +23,6 @@ namespace base {
class SingleThreadTaskRunner;
}
-namespace gpu {
-class GpuMemoryBufferSupport;
-}
-
namespace media {
class BitstreamBuffer;
@@ -47,6 +44,11 @@ class MEDIA_GPU_EXPORT VaapiMjpegDecodeAccelerator
public:
VaapiMjpegDecodeAccelerator(
const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner);
+
+ VaapiMjpegDecodeAccelerator(const VaapiMjpegDecodeAccelerator&) = delete;
+ VaapiMjpegDecodeAccelerator& operator=(const VaapiMjpegDecodeAccelerator&) =
+ delete;
+
~VaapiMjpegDecodeAccelerator() override;
// chromeos_camera::MjpegDecodeAccelerator implementation.
@@ -88,18 +90,27 @@ class MEDIA_GPU_EXPORT VaapiMjpegDecodeAccelerator
base::span<const uint8_t> src_image,
scoped_refptr<VideoFrame> dst_frame);
- // Puts contents of |surface| into given |video_frame| using VA-API Video
- // Processing Pipeline (VPP), and passes the |input_buffer_id| of the
- // resulting picture to client for output.
- bool OutputPictureVppOnTaskRunner(const ScopedVASurface* surface,
- int32_t input_buffer_id,
- scoped_refptr<VideoFrame> video_frame);
+ // Creates |image_processor_| for converting |src_frame| into |dst_frame|.
+ void CreateImageProcessor(const VideoFrame* src_frame,
+ const VideoFrame* dst_frame);
- // Puts contents of |image| into given |video_frame| using libyuv, and passes
- // the |input_buffer_id| of the resulting picture to client for output.
- bool OutputPictureLibYuvOnTaskRunner(std::unique_ptr<ScopedVAImage> image,
- int32_t input_buffer_id,
- scoped_refptr<VideoFrame> video_frame);
+ // Puts contents of |surface| within |crop_rect| into given |video_frame|
+ // using VA-API Video Processing Pipeline (VPP), and passes the |task_id| of
+ // the resulting picture to client for output.
+ bool OutputPictureVppOnTaskRunner(int32_t task_id,
+ const ScopedVASurface* surface,
+ scoped_refptr<VideoFrame> video_frame,
+ const gfx::Rect& crop_rect);
+
+ // Puts contents of |image| within |crop_rect| into the given |video_frame|
+ // using libyuv, and passes the |task_id| of the resulting picture to client
+ // for output.
+ bool OutputPictureLibYuvOnTaskRunner(int32_t task_id,
+ std::unique_ptr<ScopedVAImage> image,
+ scoped_refptr<VideoFrame> video_frame,
+ const gfx::Rect& crop_rect);
+
+ void OnImageProcessorError();
void InitializeOnDecoderTaskRunner(InitCB init_cb);
@@ -124,9 +135,9 @@ class MEDIA_GPU_EXPORT VaapiMjpegDecodeAccelerator
// client buffer.
scoped_refptr<VaapiWrapper> vpp_vaapi_wrapper_;
- // For creating GpuMemoryBuffer from client DMA buffer that can be mapped for
- // software access.
- std::unique_ptr<gpu::GpuMemoryBufferSupport> gpu_memory_buffer_support_;
+ // Image processor to convert the decoded frame into client buffer when VA-API
+ // is not capable.
+ std::unique_ptr<ImageProcessorBackend> image_processor_;
base::Thread decoder_thread_;
// Use this to post tasks to |decoder_thread_| instead of
@@ -140,8 +151,6 @@ class MEDIA_GPU_EXPORT VaapiMjpegDecodeAccelerator
// posted from the |decoder_task_runner_| to |task_runner_| should use a
// WeakPtr (obtained via weak_this_factory_.GetWeakPtr()).
base::WeakPtrFactory<VaapiMjpegDecodeAccelerator> weak_this_factory_;
-
- DISALLOW_COPY_AND_ASSIGN(VaapiMjpegDecodeAccelerator);
};
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_picture.h b/chromium/media/gpu/vaapi/vaapi_picture.h
index 8020a6b5393..dc7a8a00ef7 100644
--- a/chromium/media/gpu/vaapi/vaapi_picture.h
+++ b/chromium/media/gpu/vaapi/vaapi_picture.h
@@ -30,6 +30,9 @@ class VaapiWrapper;
// Picture is native pixmap abstraction (X11/Ozone).
class MEDIA_GPU_EXPORT VaapiPicture {
public:
+ VaapiPicture(const VaapiPicture&) = delete;
+ VaapiPicture& operator=(const VaapiPicture&) = delete;
+
virtual ~VaapiPicture();
// Uses the buffer of |format|, pointed to by |gpu_memory_buffer_handle| as
@@ -79,8 +82,6 @@ class MEDIA_GPU_EXPORT VaapiPicture {
private:
const int32_t picture_buffer_id_;
-
- DISALLOW_COPY_AND_ASSIGN(VaapiPicture);
};
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_picture_factory.cc b/chromium/media/gpu/vaapi/vaapi_picture_factory.cc
index 719035b87c8..9c7d7387d24 100644
--- a/chromium/media/gpu/vaapi/vaapi_picture_factory.cc
+++ b/chromium/media/gpu/vaapi/vaapi_picture_factory.cc
@@ -10,13 +10,13 @@
#include "ui/base/ui_base_features.h"
#include "ui/gl/gl_bindings.h"
-#if defined(USE_X11)
-#include "media/gpu/vaapi/vaapi_picture_native_pixmap_angle.h"
-#include "media/gpu/vaapi/vaapi_picture_tfp.h"
-#endif
#if defined(USE_OZONE)
#include "media/gpu/vaapi/vaapi_picture_native_pixmap_ozone.h"
-#endif
+#endif // defined(USE_OZONE)
+#if BUILDFLAG(USE_VAAPI_X11)
+#include "media/gpu/vaapi/vaapi_picture_native_pixmap_angle.h"
+#include "media/gpu/vaapi/vaapi_picture_tfp.h"
+#endif // BUILDFLAG(USE_VAAPI_X11)
#if defined(USE_EGL)
#include "media/gpu/vaapi/vaapi_picture_native_pixmap_egl.h"
#endif
@@ -46,15 +46,13 @@ VaapiPictureFactory::VaapiPictureFactory() {
vaapi_impl_pairs_.insert(
std::make_pair(gl::kGLImplementationEGLGLES2,
VaapiPictureFactory::kVaapiImplementationDrm));
-#if defined(USE_X11)
+#if BUILDFLAG(USE_VAAPI_X11)
vaapi_impl_pairs_.insert(
std::make_pair(gl::kGLImplementationEGLANGLE,
VaapiPictureFactory::kVaapiImplementationAngle));
- if (!features::IsUsingOzonePlatform()) {
- vaapi_impl_pairs_.insert(
- std::make_pair(gl::kGLImplementationDesktopGL,
- VaapiPictureFactory::kVaapiImplementationX11));
- }
+ vaapi_impl_pairs_.insert(
+ std::make_pair(gl::kGLImplementationDesktopGL,
+ VaapiPictureFactory::kVaapiImplementationX11));
#endif
DeterminePictureCreationAndDownloadingMechanism();
@@ -96,19 +94,19 @@ VaapiPictureFactory::GetVaapiImplementation(gl::GLImplementation gl_impl) {
}
uint32_t VaapiPictureFactory::GetGLTextureTarget() {
-#if defined(USE_OZONE)
- if (features::IsUsingOzonePlatform())
- return GL_TEXTURE_EXTERNAL_OES;
-#endif
+#if BUILDFLAG(USE_VAAPI_X11)
return GL_TEXTURE_2D;
+#else
+ return GL_TEXTURE_EXTERNAL_OES;
+#endif
}
gfx::BufferFormat VaapiPictureFactory::GetBufferFormat() {
-#if defined(USE_OZONE)
- if (features::IsUsingOzonePlatform())
- return gfx::BufferFormat::YUV_420_BIPLANAR;
-#endif
+#if BUILDFLAG(USE_VAAPI_X11)
return gfx::BufferFormat::RGBX_8888;
+#else
+ return gfx::BufferFormat::YUV_420_BIPLANAR;
+#endif
}
void VaapiPictureFactory::DeterminePictureCreationAndDownloadingMechanism() {
@@ -116,51 +114,43 @@ void VaapiPictureFactory::DeterminePictureCreationAndDownloadingMechanism() {
#if defined(USE_OZONE)
// We can be called without GL initialized, which is valid if we use Ozone.
case kVaapiImplementationNone:
- if (features::IsUsingOzonePlatform()) {
- create_picture_cb_ = base::BindRepeating(
- &CreateVaapiPictureNativeImpl<VaapiPictureNativePixmapOzone>);
- needs_vpp_for_downloading_ = true;
- }
-
- // This is reached by unit tests which don't require create_picture_cb_
- // to be initialized or called.
+ create_picture_cb_ = base::BindRepeating(
+ &CreateVaapiPictureNativeImpl<VaapiPictureNativePixmapOzone>);
+ needs_vpp_for_downloading_ = true;
break;
#endif // defined(USE_OZONE)
-#if defined(USE_X11)
+#if BUILDFLAG(USE_VAAPI_X11)
case kVaapiImplementationX11:
- DCHECK(!features::IsUsingOzonePlatform());
create_picture_cb_ =
base::BindRepeating(&CreateVaapiPictureNativeImpl<VaapiTFPPicture>);
// Neither VaapiTFPPicture or VaapiPictureNativePixmapAngle needs the VPP.
needs_vpp_for_downloading_ = false;
break;
case kVaapiImplementationAngle:
- DCHECK(!features::IsUsingOzonePlatform());
create_picture_cb_ = base::BindRepeating(
&CreateVaapiPictureNativeImpl<VaapiPictureNativePixmapAngle>);
// Neither VaapiTFPPicture or VaapiPictureNativePixmapAngle needs the VPP.
needs_vpp_for_downloading_ = false;
break;
-#endif // defined(USE_X11)
+#endif // BUILDFLAG(USE_VAAPI_X11)
case kVaapiImplementationDrm:
#if defined(USE_OZONE)
- if (features::IsUsingOzonePlatform()) {
- create_picture_cb_ = base::BindRepeating(
- &CreateVaapiPictureNativeImpl<VaapiPictureNativePixmapOzone>);
- needs_vpp_for_downloading_ = true;
- break;
- }
-#endif // defined(USE_OZONE)
-#if defined(USE_EGL)
+ create_picture_cb_ = base::BindRepeating(
+ &CreateVaapiPictureNativeImpl<VaapiPictureNativePixmapOzone>);
+ needs_vpp_for_downloading_ = true;
+ break;
+#elif defined(USE_EGL)
create_picture_cb_ = base::BindRepeating(
&CreateVaapiPictureNativeImpl<VaapiPictureNativePixmapEgl>);
needs_vpp_for_downloading_ = true;
break;
-#endif // defined(USE_EGL)
+#else
// ozone or egl must be used to use the DRM implementation.
- NOTREACHED();
+ FALLTHROUGH;
+#endif
default:
NOTREACHED();
+ break;
}
}
diff --git a/chromium/media/gpu/vaapi/vaapi_picture_factory.h b/chromium/media/gpu/vaapi/vaapi_picture_factory.h
index e894581f35c..358b5b18228 100644
--- a/chromium/media/gpu/vaapi/vaapi_picture_factory.h
+++ b/chromium/media/gpu/vaapi/vaapi_picture_factory.h
@@ -38,6 +38,10 @@ class MEDIA_GPU_EXPORT VaapiPictureFactory {
};
VaapiPictureFactory();
+
+ VaapiPictureFactory(const VaapiPictureFactory&) = delete;
+ VaapiPictureFactory& operator=(const VaapiPictureFactory&) = delete;
+
virtual ~VaapiPictureFactory();
// Creates a VaapiPicture of picture_buffer.size() associated with
@@ -82,8 +86,6 @@ class MEDIA_GPU_EXPORT VaapiPictureFactory {
CreatePictureCB create_picture_cb_;
bool needs_vpp_for_downloading_ = false;
-
- DISALLOW_COPY_AND_ASSIGN(VaapiPictureFactory);
};
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap.h b/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap.h
index a77583a7776..71ced2dcf56 100644
--- a/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap.h
+++ b/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap.h
@@ -35,6 +35,10 @@ class VaapiPictureNativePixmap : public VaapiPicture {
uint32_t texture_id,
uint32_t client_texture_id,
uint32_t texture_target);
+
+ VaapiPictureNativePixmap(const VaapiPictureNativePixmap&) = delete;
+ VaapiPictureNativePixmap& operator=(const VaapiPictureNativePixmap&) = delete;
+
~VaapiPictureNativePixmap() override;
// VaapiPicture implementation.
@@ -48,9 +52,6 @@ class VaapiPictureNativePixmap : public VaapiPicture {
// VASurface used to transfer from the decoder's pixel format.
scoped_refptr<VASurface> va_surface_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(VaapiPictureNativePixmap);
};
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_angle.cc b/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_angle.cc
index 9de0c93b442..e5b99084905 100644
--- a/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_angle.cc
+++ b/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_angle.cc
@@ -6,7 +6,6 @@
#include "media/gpu/vaapi/va_surface.h"
#include "media/gpu/vaapi/vaapi_wrapper.h"
-#include "ui/base/ui_base_features.h"
#include "ui/gfx/x/connection.h"
#include "ui/gfx/x/future.h"
#include "ui/gfx/x/xproto.h"
@@ -92,7 +91,6 @@ Status VaapiPictureNativePixmapAngle::Allocate(gfx::BufferFormat format) {
if (!make_context_current_cb_ || !make_context_current_cb_.Run())
return StatusCode::kVaapiBadContext;
- DCHECK(!features::IsUsingOzonePlatform());
auto image =
base::MakeRefCounted<gl::GLImageEGLPixmap>(visible_size_, format);
if (!image)
diff --git a/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_angle.h b/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_angle.h
index f29068773e5..0487b1ab68c 100644
--- a/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_angle.h
+++ b/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_angle.h
@@ -34,6 +34,10 @@ class VaapiPictureNativePixmapAngle : public VaapiPictureNativePixmap {
uint32_t client_texture_id,
uint32_t texture_target);
+ VaapiPictureNativePixmapAngle(const VaapiPictureNativePixmapAngle&) = delete;
+ VaapiPictureNativePixmapAngle& operator=(
+ const VaapiPictureNativePixmapAngle&) = delete;
+
~VaapiPictureNativePixmapAngle() override;
// VaapiPicture implementation.
@@ -48,8 +52,6 @@ class VaapiPictureNativePixmapAngle : public VaapiPictureNativePixmap {
private:
x11::Pixmap x_pixmap_ = x11::Pixmap::None;
-
- DISALLOW_COPY_AND_ASSIGN(VaapiPictureNativePixmapAngle);
};
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_egl.h b/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_egl.h
index 40bd0d90363..1d7e48fc24b 100644
--- a/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_egl.h
+++ b/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_egl.h
@@ -37,6 +37,10 @@ class VaapiPictureNativePixmapEgl : public VaapiPictureNativePixmap {
uint32_t client_texture_id,
uint32_t texture_target);
+ VaapiPictureNativePixmapEgl(const VaapiPictureNativePixmapEgl&) = delete;
+ VaapiPictureNativePixmapEgl& operator=(const VaapiPictureNativePixmapEgl&) =
+ delete;
+
~VaapiPictureNativePixmapEgl() override;
// VaapiPicture implementation.
@@ -47,8 +51,6 @@ class VaapiPictureNativePixmapEgl : public VaapiPictureNativePixmap {
private:
Status Initialize(scoped_refptr<gfx::NativePixmap> pixmap);
-
- DISALLOW_COPY_AND_ASSIGN(VaapiPictureNativePixmapEgl);
};
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_ozone.h b/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_ozone.h
index caf3fba6ab3..d3bd1892586 100644
--- a/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_ozone.h
+++ b/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_ozone.h
@@ -36,6 +36,10 @@ class VaapiPictureNativePixmapOzone : public VaapiPictureNativePixmap {
uint32_t client_texture_id,
uint32_t texture_target);
+ VaapiPictureNativePixmapOzone(const VaapiPictureNativePixmapOzone&) = delete;
+ VaapiPictureNativePixmapOzone& operator=(
+ const VaapiPictureNativePixmapOzone&) = delete;
+
~VaapiPictureNativePixmapOzone() override;
// VaapiPicture implementation.
@@ -46,8 +50,6 @@ class VaapiPictureNativePixmapOzone : public VaapiPictureNativePixmap {
private:
Status Initialize(scoped_refptr<gfx::NativePixmap> pixmap);
-
- DISALLOW_COPY_AND_ASSIGN(VaapiPictureNativePixmapOzone);
};
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_picture_tfp.cc b/chromium/media/gpu/vaapi/vaapi_picture_tfp.cc
index 3f7e221d8a0..11914e3640d 100644
--- a/chromium/media/gpu/vaapi/vaapi_picture_tfp.cc
+++ b/chromium/media/gpu/vaapi/vaapi_picture_tfp.cc
@@ -6,7 +6,6 @@
#include "media/gpu/vaapi/va_surface.h"
#include "media/gpu/vaapi/vaapi_wrapper.h"
-#include "ui/base/ui_base_features.h"
#include "ui/gfx/x/connection.h"
#include "ui/gfx/x/future.h"
#include "ui/gl/gl_bindings.h"
@@ -37,7 +36,6 @@ VaapiTFPPicture::VaapiTFPPicture(
connection_(x11::Connection::Get()),
x_pixmap_(x11::Pixmap::None) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- DCHECK(!features::IsUsingOzonePlatform());
DCHECK(texture_id);
DCHECK(client_texture_id);
}
diff --git a/chromium/media/gpu/vaapi/vaapi_picture_tfp.h b/chromium/media/gpu/vaapi/vaapi_picture_tfp.h
index 53d6c53fd42..9bc9ae8215d 100644
--- a/chromium/media/gpu/vaapi/vaapi_picture_tfp.h
+++ b/chromium/media/gpu/vaapi/vaapi_picture_tfp.h
@@ -36,6 +36,9 @@ class VaapiTFPPicture : public VaapiPicture {
uint32_t client_texture_id,
uint32_t texture_target);
+ VaapiTFPPicture(const VaapiTFPPicture&) = delete;
+ VaapiTFPPicture& operator=(const VaapiTFPPicture&) = delete;
+
~VaapiTFPPicture() override;
// VaapiPicture implementation.
@@ -52,8 +55,6 @@ class VaapiTFPPicture : public VaapiPicture {
x11::Pixmap x_pixmap_;
scoped_refptr<gl::GLImageGLX> glx_image_;
-
- DISALLOW_COPY_AND_ASSIGN(VaapiTFPPicture);
};
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_unittest.cc b/chromium/media/gpu/vaapi/vaapi_unittest.cc
index de8119aff68..aeb200d8d22 100644
--- a/chromium/media/gpu/vaapi/vaapi_unittest.cc
+++ b/chromium/media/gpu/vaapi/vaapi_unittest.cc
@@ -6,23 +6,29 @@
// See http://code.google.com/p/googletest/issues/detail?id=371
#include "testing/gtest/include/gtest/gtest.h"
+#include <drm_fourcc.h>
+#include <gbm.h>
#include <unistd.h>
#include <map>
#include <vector>
#include <va/va.h>
+#include <va/va_drmcommon.h>
#include <va/va_str.h>
+#include "base/bits.h"
#include "base/callback_helpers.h"
#include "base/containers/contains.h"
#include "base/cpu.h"
#include "base/files/file.h"
+#include "base/files/file_util.h"
#include "base/files/scoped_file.h"
#include "base/logging.h"
#include "base/process/launch.h"
#include "base/strings/pattern.h"
#include "base/strings/string_split.h"
#include "base/strings/string_util.h"
+#include "base/strings/stringprintf.h"
#include "base/test/launcher/unit_test_launcher.h"
#include "base/test/scoped_feature_list.h"
#include "base/test/test_suite.h"
@@ -31,6 +37,7 @@
#include "media/gpu/vaapi/vaapi_wrapper.h"
#include "media/media_buildflags.h"
#include "third_party/abseil-cpp/absl/types/optional.h"
+#include "ui/gfx/linux/gbm_defines.h"
namespace media {
namespace {
@@ -117,6 +124,7 @@ std::unique_ptr<base::test::ScopedFeatureList> CreateScopedFeatureList() {
unsigned int ToVaRTFormat(uint32_t va_fourcc) {
switch (va_fourcc) {
case VA_FOURCC_I420:
+ case VA_FOURCC_NV12:
return VA_RT_FORMAT_YUV420;
case VA_FOURCC_YUY2:
return VA_RT_FORMAT_YUV422;
@@ -128,6 +136,90 @@ unsigned int ToVaRTFormat(uint32_t va_fourcc) {
return kInvalidVaRtFormat;
}
+uint32_t ToVaFourcc(unsigned int va_rt_format) {
+ switch (va_rt_format) {
+ case VA_RT_FORMAT_YUV420:
+ return VA_FOURCC_NV12;
+ case VA_RT_FORMAT_YUV420_10:
+ return VA_FOURCC_P010;
+ }
+ return DRM_FORMAT_INVALID;
+}
+
+int ToGBMFormat(unsigned int va_rt_format) {
+ switch (va_rt_format) {
+ case VA_RT_FORMAT_YUV420:
+ return DRM_FORMAT_NV12;
+ case VA_RT_FORMAT_YUV420_10:
+ return DRM_FORMAT_P010;
+ }
+ return DRM_FORMAT_INVALID;
+}
+
+const std::string VARTFormatToString(unsigned int va_rt_format) {
+ switch (va_rt_format) {
+ case VA_RT_FORMAT_YUV420:
+ return "VA_RT_FORMAT_YUV420";
+ case VA_RT_FORMAT_YUV420_10:
+ return "VA_RT_FORMAT_YUV420_10";
+ }
+ NOTREACHED() << "Unknown VA_RT_FORMAT 0x" << std::hex << va_rt_format;
+ return "Unknown VA_RT_FORMAT";
+}
+
+#define TOSTR(enumCase) \
+ case enumCase: \
+ return #enumCase
+
+const char* VAProfileToString(VAProfile profile) {
+ // clang-format off
+ switch (profile) {
+ TOSTR(VAProfileNone);
+ TOSTR(VAProfileMPEG2Simple);
+ TOSTR(VAProfileMPEG2Main);
+ TOSTR(VAProfileMPEG4Simple);
+ TOSTR(VAProfileMPEG4AdvancedSimple);
+ TOSTR(VAProfileMPEG4Main);
+ case VAProfileH264Baseline:
+ NOTREACHED() << "VAProfileH264Baseline is deprecated";
+ return "Deprecated VAProfileH264Baseline";
+ TOSTR(VAProfileH264Main);
+ TOSTR(VAProfileH264High);
+ TOSTR(VAProfileVC1Simple);
+ TOSTR(VAProfileVC1Main);
+ TOSTR(VAProfileVC1Advanced);
+ TOSTR(VAProfileH263Baseline);
+ TOSTR(VAProfileH264ConstrainedBaseline);
+ TOSTR(VAProfileJPEGBaseline);
+ TOSTR(VAProfileVP8Version0_3);
+ TOSTR(VAProfileH264MultiviewHigh);
+ TOSTR(VAProfileH264StereoHigh);
+ TOSTR(VAProfileHEVCMain);
+ TOSTR(VAProfileHEVCMain10);
+ TOSTR(VAProfileVP9Profile0);
+ TOSTR(VAProfileVP9Profile1);
+ TOSTR(VAProfileVP9Profile2);
+ TOSTR(VAProfileVP9Profile3);
+ TOSTR(VAProfileHEVCMain12);
+ TOSTR(VAProfileHEVCMain422_10);
+ TOSTR(VAProfileHEVCMain422_12);
+ TOSTR(VAProfileHEVCMain444);
+ TOSTR(VAProfileHEVCMain444_10);
+ TOSTR(VAProfileHEVCMain444_12);
+ TOSTR(VAProfileHEVCSccMain);
+ TOSTR(VAProfileHEVCSccMain10);
+ TOSTR(VAProfileHEVCSccMain444);
+ TOSTR(VAProfileAV1Profile0);
+ TOSTR(VAProfileAV1Profile1);
+ TOSTR(VAProfileHEVCSccMain444_10);
+#if VA_MAJOR_VERSION >= 2 || VA_MINOR_VERSION >= 11
+ TOSTR(VAProfileProtected);
+#endif
+ }
+ // clang-format on
+ return "<unknown profile>";
+}
+
} // namespace
class VaapiTest : public testing::Test {
@@ -369,6 +461,57 @@ TEST_F(VaapiTest, LowQualityEncodingSetting) {
}
}
+// This test checks the supported SVC scalability mode.
+TEST_F(VaapiTest, CheckSupportedSVCScalabilityModes) {
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ const std::vector<SVCScalabilityMode> kSupportedTemporalSVC = {
+ SVCScalabilityMode::kL1T2, SVCScalabilityMode::kL1T3};
+ const std::vector<SVCScalabilityMode> kSupportedTemporalAndKeySVC = {
+ SVCScalabilityMode::kL1T2, SVCScalabilityMode::kL1T3,
+ SVCScalabilityMode::kL2T2Key, SVCScalabilityMode::kL2T3Key,
+ SVCScalabilityMode::kL3T2Key, SVCScalabilityMode::kL3T3Key};
+#endif
+
+ const auto scalability_modes_vp9_profile0 =
+ VaapiWrapper::GetSupportedScalabilityModes(VP9PROFILE_PROFILE0,
+ VAProfileVP9Profile0);
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ if (base::FeatureList::IsEnabled(kVaapiVp9kSVCHWEncoding) &&
+ VaapiWrapper::GetDefaultVaEntryPoint(
+ VaapiWrapper::kEncodeConstantQuantizationParameter,
+ VAProfileVP9Profile0) == VAEntrypointEncSliceLP) {
+ EXPECT_EQ(scalability_modes_vp9_profile0, kSupportedTemporalAndKeySVC);
+ } else {
+ EXPECT_EQ(scalability_modes_vp9_profile0, kSupportedTemporalSVC);
+ }
+#else
+ EXPECT_TRUE(scalability_modes_vp9_profile0.empty());
+#endif
+
+ const auto scalability_modes_vp9_profile2 =
+ VaapiWrapper::GetSupportedScalabilityModes(VP9PROFILE_PROFILE2,
+ VAProfileVP9Profile2);
+ EXPECT_TRUE(scalability_modes_vp9_profile2.empty());
+
+ const auto scalability_modes_h264_baseline =
+ VaapiWrapper::GetSupportedScalabilityModes(
+ H264PROFILE_BASELINE, VAProfileH264ConstrainedBaseline);
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ // TODO(b/199487660): Enable H.264 temporal layer encoding on AMD once their
+ // drivers support them.
+ const auto implementation = VaapiWrapper::GetImplementationType();
+ if (base::FeatureList::IsEnabled(kVaapiH264TemporalLayerHWEncoding) &&
+ (implementation == VAImplementation::kIntelI965 ||
+ implementation == VAImplementation::kIntelIHD)) {
+ EXPECT_EQ(scalability_modes_h264_baseline, kSupportedTemporalSVC);
+ } else {
+ EXPECT_TRUE(scalability_modes_h264_baseline.empty());
+ }
+#else
+ EXPECT_TRUE(scalability_modes_h264_baseline.empty());
+#endif
+}
+
class VaapiVppTest
: public VaapiTest,
public testing::WithParamInterface<std::tuple<uint32_t, uint32_t>> {
@@ -459,6 +602,208 @@ INSTANTIATE_TEST_SUITE_P(,
::testing::ValuesIn(kVAFourCCs)),
VaapiVppTest::PrintToStringParamName());
+class VaapiMinigbmTest
+ : public VaapiTest,
+ public testing::WithParamInterface<
+ std::tuple<VAProfile, unsigned int /*va_rt_format*/, gfx::Size>> {
+ public:
+ VaapiMinigbmTest() = default;
+ ~VaapiMinigbmTest() override = default;
+
+ // Populate meaningful test suffixes instead of /0, /1, etc.
+ struct PrintToStringParamName {
+ template <class ParamType>
+ std::string operator()(
+ const testing::TestParamInfo<ParamType>& info) const {
+ // Using here vaProfileStr(std::get<0>(info.param)) crashes the binary.
+ // TODO(mcasas): investigate why and use it instead of codec%d.
+ return base::StringPrintf(
+ "%s__%s__%s", VAProfileToString(std::get<0>(info.param)),
+ VARTFormatToString(std::get<1>(info.param)).c_str(),
+ std::get<2>(info.param).ToString().c_str());
+ }
+ };
+};
+
+// This test allocates a VASurface (via VaapiWrapper) for the given VAProfile,
+// VA RT Format and resolution (as per the test parameters). It then verifies
+// that said VASurface's metadata (e.g. width, height, number of planes, pitch)
+// are the same as those we would allocate via minigbm.
+TEST_P(VaapiMinigbmTest, AllocateAndCompareWithMinigbm) {
+ const VAProfile va_profile = std::get<0>(GetParam());
+ const unsigned int va_rt_format = std::get<1>(GetParam());
+ const gfx::Size resolution = std::get<2>(GetParam());
+
+ // TODO(b/187852384): enable the other backends.
+ if (VaapiWrapper::GetImplementationType() != VAImplementation::kIntelIHD)
+ GTEST_SKIP() << "backend not supported";
+
+ ASSERT_NE(va_rt_format, kInvalidVaRtFormat);
+ if (!VaapiWrapper::IsDecodeSupported(va_profile))
+ GTEST_SKIP() << vaProfileStr(va_profile) << " not supported.";
+
+ if (!VaapiWrapper::IsDecodingSupportedForInternalFormat(va_profile,
+ va_rt_format)) {
+ GTEST_SKIP() << VARTFormatToString(va_rt_format) << " not supported.";
+ }
+
+ gfx::Size minimum_supported_size;
+ ASSERT_TRUE(VaapiWrapper::GetDecodeMinResolution(va_profile,
+ &minimum_supported_size));
+ gfx::Size maximum_supported_size;
+ ASSERT_TRUE(VaapiWrapper::GetDecodeMaxResolution(va_profile,
+ &maximum_supported_size));
+
+ if (resolution.width() < minimum_supported_size.width() ||
+ resolution.height() < minimum_supported_size.height() ||
+ resolution.width() > maximum_supported_size.width() ||
+ resolution.height() > maximum_supported_size.height()) {
+ GTEST_SKIP() << resolution.ToString()
+ << " not supported (min: " << minimum_supported_size.ToString()
+ << ", max: " << maximum_supported_size.ToString() << ")";
+ }
+
+ auto wrapper =
+ VaapiWrapper::Create(VaapiWrapper::kDecode, va_profile,
+ EncryptionScheme::kUnencrypted, base::DoNothing());
+ ASSERT_TRUE(!!wrapper);
+ ASSERT_TRUE(wrapper->CreateContext(resolution));
+
+ auto scoped_surfaces = wrapper->CreateScopedVASurfaces(
+ va_rt_format, resolution, {VaapiWrapper::SurfaceUsageHint::kVideoDecoder},
+ 1u,
+ /*visible_size=*/absl::nullopt, /*va_fourcc=*/absl::nullopt);
+ ASSERT_FALSE(scoped_surfaces.empty());
+ const auto scoped_va_surface = std::move(scoped_surfaces[0]);
+ wrapper->DestroyContext();
+
+ ASSERT_TRUE(scoped_va_surface->IsValid());
+ EXPECT_EQ(scoped_va_surface->format(), va_rt_format);
+
+ // Request the underlying DRM metadata for |scoped_va_surface|.
+ VADRMPRIMESurfaceDescriptor va_descriptor{};
+ {
+ base::AutoLock auto_lock(*wrapper->va_lock_);
+ VAStatus va_res =
+ vaSyncSurface(wrapper->va_display_, scoped_va_surface->id());
+ ASSERT_EQ(va_res, VA_STATUS_SUCCESS);
+ va_res = vaExportSurfaceHandle(
+ wrapper->va_display_, scoped_va_surface->id(),
+ VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME_2,
+ VA_EXPORT_SURFACE_READ_ONLY | VA_EXPORT_SURFACE_SEPARATE_LAYERS,
+ &va_descriptor);
+ ASSERT_EQ(va_res, VA_STATUS_SUCCESS);
+ }
+
+ // Verify some expected properties of the allocated VASurface. We expect a
+ // single |object|, with a number of |layers| of the same |pitch|.
+ EXPECT_EQ(scoped_va_surface->size(),
+ gfx::Size(base::checked_cast<int>(va_descriptor.width),
+ base::checked_cast<int>(va_descriptor.height)));
+
+ const auto va_fourcc = ToVaFourcc(va_rt_format);
+ ASSERT_NE(va_fourcc, base::checked_cast<unsigned int>(DRM_FORMAT_INVALID));
+ EXPECT_EQ(va_descriptor.fourcc, va_fourcc)
+ << FourccToString(va_descriptor.fourcc)
+ << " != " << FourccToString(va_fourcc);
+ EXPECT_EQ(va_descriptor.num_objects, 1u);
+ // TODO(mcasas): consider comparing |size| with a better estimate of the
+ // |scoped_va_surface| memory footprint (e.g. including planes and format).
+ EXPECT_GE(va_descriptor.objects[0].size,
+ base::checked_cast<uint32_t>(scoped_va_surface->size().GetArea()));
+ EXPECT_EQ(va_descriptor.objects[0].drm_format_modifier,
+ I915_FORMAT_MOD_Y_TILED);
+ // TODO(mcasas): |num_layers| actually depends on |va_descriptor.va_fourcc|.
+ EXPECT_EQ(va_descriptor.num_layers, 2u);
+ for (uint32_t i = 0; i < va_descriptor.num_layers; ++i) {
+ EXPECT_EQ(va_descriptor.layers[i].num_planes, 1u);
+ EXPECT_EQ(va_descriptor.layers[i].object_index[0], 0u);
+
+ DVLOG(2) << "plane " << i
+ << ", pitch: " << va_descriptor.layers[i].pitch[0];
+ // Luma and chroma planes have different |pitch| expectations.
+ // TODO(mcasas): consider bitdepth for pitch lower thresholds.
+ if (i == 0) {
+ EXPECT_GE(
+ va_descriptor.layers[i].pitch[0],
+ base::checked_cast<uint32_t>(scoped_va_surface->size().width()));
+ } else {
+ const auto expected_rounded_up_pitch =
+ base::bits::AlignUp(scoped_va_surface->size().width(), 2);
+ EXPECT_GE(va_descriptor.layers[i].pitch[0],
+ base::checked_cast<uint32_t>(expected_rounded_up_pitch));
+ }
+ }
+
+ // Now open minigbm pointing to the DRM primary node, allocate a gbm_bo, and
+ // compare its width/height/stride/etc with the |va_descriptor|s.
+ base::File drm_fd(
+ base::FilePath("/dev/dri/card0"),
+ base::File::FLAG_OPEN | base::File::FLAG_READ | base::File::FLAG_WRITE);
+
+ ASSERT_TRUE(drm_fd.IsValid());
+ struct gbm_device* gbm = gbm_create_device(drm_fd.GetPlatformFile());
+ ASSERT_TRUE(gbm);
+
+ const auto gbm_format = ToGBMFormat(va_rt_format);
+ ASSERT_NE(gbm_format, DRM_FORMAT_INVALID);
+ const auto bo_use_flags = GBM_BO_USE_TEXTURING | GBM_BO_USE_HW_VIDEO_DECODER;
+ struct gbm_bo* bo =
+ gbm_bo_create(gbm, resolution.width(), resolution.height(), gbm_format,
+ bo_use_flags | GBM_BO_USE_SCANOUT);
+ if (!bo) {
+ // Try again without the scanout flag. This reproduces Chrome's behaviour.
+ bo = gbm_bo_create(gbm, resolution.width(), resolution.height(), gbm_format,
+ bo_use_flags);
+ }
+ ASSERT_TRUE(bo);
+ EXPECT_EQ(scoped_va_surface->size(),
+ gfx::Size(base::checked_cast<int>(gbm_bo_get_width(bo)),
+ base::checked_cast<int>(gbm_bo_get_height(bo))));
+
+ const int bo_num_planes = gbm_bo_get_plane_count(bo);
+ ASSERT_EQ(va_descriptor.num_layers,
+ base::checked_cast<uint32_t>(bo_num_planes));
+ for (int i = 0; i < bo_num_planes; ++i) {
+ EXPECT_EQ(va_descriptor.layers[i].pitch[0],
+ gbm_bo_get_stride_for_plane(bo, i));
+ }
+
+ // TODO(mcasas): consider comparing |va_descriptor.objects[0].size| with |bo|s
+ // size (as returned by lseek()ing it).
+
+ gbm_bo_destroy(bo);
+ gbm_device_destroy(gbm);
+}
+
+constexpr VAProfile kVACodecProfiles[] = {
+ VAProfileVP8Version0_3, VAProfileH264ConstrainedBaseline,
+ VAProfileVP9Profile0, VAProfileVP9Profile2,
+ VAProfileAV1Profile0, VAProfileJPEGBaseline};
+constexpr uint32_t kVARTFormatsForGBM[] = {VA_RT_FORMAT_YUV420,
+ VA_RT_FORMAT_YUV420_10};
+constexpr gfx::Size kResolutions[] = {
+ // clang-format off
+ gfx::Size(127, 127),
+ gfx::Size(128, 128),
+ gfx::Size(129, 129),
+ gfx::Size(320, 180),
+ gfx::Size(320, 240), // QVGA
+ gfx::Size(323, 243),
+ gfx::Size(480, 320), // 3/4 VGA
+ gfx::Size(640, 360), // VGA
+ gfx::Size(640, 480),
+ gfx::Size(1280, 720)};
+// clang-format on
+
+INSTANTIATE_TEST_SUITE_P(
+ ,
+ VaapiMinigbmTest,
+ ::testing::Combine(::testing::ValuesIn(kVACodecProfiles),
+ ::testing::ValuesIn(kVARTFormatsForGBM),
+ ::testing::ValuesIn(kResolutions)),
+ VaapiMinigbmTest::PrintToStringParamName());
+
} // namespace media
int main(int argc, char** argv) {
diff --git a/chromium/media/gpu/vaapi/vaapi_utils.h b/chromium/media/gpu/vaapi/vaapi_utils.h
index 9bcee23ce96..98d5e39cccb 100644
--- a/chromium/media/gpu/vaapi/vaapi_utils.h
+++ b/chromium/media/gpu/vaapi/vaapi_utils.h
@@ -33,6 +33,10 @@ class ScopedVABufferMapping {
VABufferID buffer_id,
base::OnceCallback<void(VABufferID)> release_callback =
base::NullCallback());
+
+ ScopedVABufferMapping(const ScopedVABufferMapping&) = delete;
+ ScopedVABufferMapping& operator=(const ScopedVABufferMapping&) = delete;
+
~ScopedVABufferMapping();
bool IsValid() const { return !!va_buffer_data_; }
void* data() const {
@@ -49,8 +53,6 @@ class ScopedVABufferMapping {
const VABufferID buffer_id_;
void* va_buffer_data_ = nullptr;
-
- DISALLOW_COPY_AND_ASSIGN(ScopedVABufferMapping);
};
// This class tracks the VABuffer life cycle from vaCreateBuffer() to
@@ -102,6 +104,10 @@ class ScopedVAImage {
VASurfaceID va_surface_id,
VAImageFormat* format /* Needs to be a pointer for libva */,
const gfx::Size& size);
+
+ ScopedVAImage(const ScopedVAImage&) = delete;
+ ScopedVAImage& operator=(const ScopedVAImage&) = delete;
+
~ScopedVAImage();
bool IsValid() const { return va_buffer_ && va_buffer_->IsValid(); }
@@ -117,8 +123,6 @@ class ScopedVAImage {
const VADisplay va_display_ GUARDED_BY(lock_);
std::unique_ptr<VAImage> image_;
std::unique_ptr<ScopedVABufferMapping> va_buffer_;
-
- DISALLOW_COPY_AND_ASSIGN(ScopedVAImage);
};
// A VA-API-specific surface used by video/image codec accelerators to work on.
@@ -129,6 +133,10 @@ class ScopedVASurface {
VASurfaceID va_surface_id,
const gfx::Size& size,
unsigned int va_rt_format);
+
+ ScopedVASurface(const ScopedVASurface&) = delete;
+ ScopedVASurface& operator=(const ScopedVASurface&) = delete;
+
~ScopedVASurface();
bool IsValid() const;
@@ -142,8 +150,6 @@ class ScopedVASurface {
const VASurfaceID va_surface_id_;
const gfx::Size size_;
const unsigned int va_rt_format_;
-
- DISALLOW_COPY_AND_ASSIGN(ScopedVASurface);
};
// A combination of a numeric ID |id| and a callback to release it. This class
diff --git a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.cc b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.cc
index 0dc47d88016..26696da1271 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.cc
+++ b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.cc
@@ -101,6 +101,10 @@ class VaapiVideoDecodeAccelerator::InputBuffer {
: id_(id),
buffer_(std::move(buffer)),
release_cb_(std::move(release_cb)) {}
+
+ InputBuffer(const InputBuffer&) = delete;
+ InputBuffer& operator=(const InputBuffer&) = delete;
+
~InputBuffer() {
DVLOGF(4) << "id = " << id_;
if (release_cb_)
@@ -116,8 +120,6 @@ class VaapiVideoDecodeAccelerator::InputBuffer {
const int32_t id_ = -1;
const scoped_refptr<DecoderBuffer> buffer_;
base::OnceCallback<void(int32_t id)> release_cb_;
-
- DISALLOW_COPY_AND_ASSIGN(InputBuffer);
};
void VaapiVideoDecodeAccelerator::NotifyStatus(Status status) {
@@ -184,12 +186,6 @@ bool VaapiVideoDecodeAccelerator::Initialize(const Config& config,
Client* client) {
DCHECK(task_runner_->BelongsToCurrentThread());
-#if defined(USE_X11)
- // TODO(crbug/1116701): implement decode acceleration when running with Ozone.
- if (features::IsUsingOzonePlatform())
- return false;
-#endif
-
vaapi_picture_factory_ = std::make_unique<VaapiPictureFactory>();
if (config.is_encrypted()) {
@@ -1206,19 +1202,18 @@ VaapiVideoDecodeAccelerator::GetSupportedProfiles() {
base::EraseIf(profiles, [](const auto& profile) {
VideoCodec codec = VideoCodecProfileToVideoCodec(profile.profile);
return profile.profile == VP9PROFILE_PROFILE2 ||
- codec == VideoCodec::kCodecAV1 || codec == VideoCodec::kCodecHEVC;
+ codec == VideoCodec::kAV1 || codec == VideoCodec::kHEVC;
});
return profiles;
}
VaapiVideoDecodeAccelerator::BufferAllocationMode
VaapiVideoDecodeAccelerator::DecideBufferAllocationMode() {
-#if defined(USE_X11)
+#if BUILDFLAG(USE_VAAPI_X11)
// The IMPORT mode is used for Android on Chrome OS, so this doesn't apply
// here.
DCHECK_NE(output_mode_, VideoDecodeAccelerator::Config::OutputMode::IMPORT);
// TODO(crbug/1116701): get video decode acceleration working with ozone.
- DCHECK(!features::IsUsingOzonePlatform());
// For H.264 on older devices, another +1 is experimentally needed for
// high-to-high resolution changes.
// TODO(mcasas): Figure out why and why only H264, see crbug.com/912295 and
diff --git a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.h b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.h
index d347fb95f62..f116d5d0953 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.h
+++ b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.h
@@ -65,6 +65,10 @@ class MEDIA_GPU_EXPORT VaapiVideoDecodeAccelerator
const MakeGLContextCurrentCallback& make_context_current_cb,
const BindGLImageCallback& bind_image_cb);
+ VaapiVideoDecodeAccelerator(const VaapiVideoDecodeAccelerator&) = delete;
+ VaapiVideoDecodeAccelerator& operator=(const VaapiVideoDecodeAccelerator&) =
+ delete;
+
~VaapiVideoDecodeAccelerator() override;
// VideoDecodeAccelerator implementation.
@@ -354,8 +358,6 @@ class MEDIA_GPU_EXPORT VaapiVideoDecodeAccelerator
// The WeakPtrFactory for |weak_this_|.
base::WeakPtrFactory<VaapiVideoDecodeAccelerator> weak_this_factory_;
-
- DISALLOW_COPY_AND_ASSIGN(VaapiVideoDecodeAccelerator);
};
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc
index 5b9a543386f..66ea517579f 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc
+++ b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc
@@ -17,7 +17,6 @@
#include "media/gpu/vaapi/vaapi_wrapper.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
-#include "ui/base/ui_base_features.h"
using base::test::RunClosure;
using ::testing::_;
@@ -192,6 +191,12 @@ class VaapiVideoDecodeAcceleratorTest : public TestWithParam<TestParams>,
vda_.state_ = VaapiVideoDecodeAccelerator::kIdle;
}
+
+ VaapiVideoDecodeAcceleratorTest(const VaapiVideoDecodeAcceleratorTest&) =
+ delete;
+ VaapiVideoDecodeAcceleratorTest& operator=(
+ const VaapiVideoDecodeAcceleratorTest&) = delete;
+
~VaapiVideoDecodeAcceleratorTest() {}
void SetUp() override {
@@ -403,8 +408,6 @@ class VaapiVideoDecodeAcceleratorTest : public TestWithParam<TestParams>,
private:
base::WeakPtrFactory<VaapiVideoDecodeAcceleratorTest> weak_ptr_factory_;
-
- DISALLOW_COPY_AND_ASSIGN(VaapiVideoDecodeAcceleratorTest);
};
// Verify that it is possible to select DRM(egl) and TFP(glx) at runtime.
@@ -416,12 +419,10 @@ TEST_P(VaapiVideoDecodeAcceleratorTest, SupportedPlatforms) {
mock_vaapi_picture_factory_->GetVaapiImplementation(
gl::kGLImplementationEGLGLES2));
-#if defined(USE_X11)
- if (!features::IsUsingOzonePlatform()) {
- EXPECT_EQ(VaapiPictureFactory::kVaapiImplementationX11,
- mock_vaapi_picture_factory_->GetVaapiImplementation(
- gl::kGLImplementationDesktopGL));
- }
+#if BUILDFLAG(USE_VAAPI_X11)
+ EXPECT_EQ(VaapiPictureFactory::kVaapiImplementationX11,
+ mock_vaapi_picture_factory_->GetVaapiImplementation(
+ gl::kGLImplementationDesktopGL));
#endif
}
diff --git a/chromium/media/gpu/vaapi/vaapi_video_decoder.cc b/chromium/media/gpu/vaapi/vaapi_video_decoder.cc
index b398d0b257d..d559da0d22d 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_decoder.cc
+++ b/chromium/media/gpu/vaapi/vaapi_video_decoder.cc
@@ -154,16 +154,13 @@ VaapiVideoDecoder::~VaapiVideoDecoder() {
decoder_delegate_->OnVAContextDestructionSoon();
// Destroy explicitly to DCHECK() that |vaapi_wrapper_| references are held
- // inside the accelerator in |decoder_|, by the |allocated_va_surfaces_|, by
- // the |decode_surface_pool_for_scaling_| and of course by this class. To
- // clear |allocated_va_surfaces_| and |decode_surface_pool_for_scaling_| we
- // have to first DestroyContext().
+ // inside the accelerator in |decoder_|, by the |allocated_va_surfaces_| and
+ // of course by this class. To clear |allocated_va_surfaces_| we have to first
+ // DestroyContext().
decoder_ = nullptr;
if (vaapi_wrapper_) {
vaapi_wrapper_->DestroyContext();
allocated_va_surfaces_.clear();
- while (!decode_surface_pool_for_scaling_.empty())
- decode_surface_pool_for_scaling_.pop();
DCHECK(vaapi_wrapper_->HasOneRef());
vaapi_wrapper_ = nullptr;
@@ -181,17 +178,18 @@ void VaapiVideoDecoder::Initialize(const VideoDecoderConfig& config,
DVLOGF(2) << config.AsHumanReadableString();
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(config.IsValidConfig());
- DCHECK(state_ == State::kError || state_ == State::kUninitialized ||
- state_ == State::kWaitingForInput);
// Reinitializing the decoder is allowed if there are no pending decodes.
- if (current_decode_task_ || !decode_task_queue_.empty()) {
+ if (current_decode_task_ || !decode_task_queue_.empty() ||
+ state_ == State::kExpectingReset) {
LOG(ERROR)
<< "Don't call Initialize() while there are pending decode tasks";
std::move(init_cb).Run(StatusCode::kVaapiReinitializedDuringDecode);
return;
}
+ DCHECK(state_ == State::kError || state_ == State::kUninitialized ||
+ state_ == State::kWaitingForInput);
if (state_ != State::kUninitialized) {
DVLOGF(3) << "Reinitializing decoder";
@@ -201,13 +199,9 @@ void VaapiVideoDecoder::Initialize(const VideoDecoderConfig& config,
decoder_ = nullptr;
DCHECK(vaapi_wrapper_);
- // To clear |allocated_va_surfaces_| and |decode_surface_pool_for_scaling_|
- // we have to first DestroyContext().
+ // To clear |allocated_va_surfaces_|, we have to first DestroyContext().
vaapi_wrapper_->DestroyContext();
allocated_va_surfaces_.clear();
- while (!decode_surface_pool_for_scaling_.empty())
- decode_surface_pool_for_scaling_.pop();
- decode_to_output_scale_factor_.reset();
DCHECK(vaapi_wrapper_->HasOneRef());
vaapi_wrapper_ = nullptr;
@@ -245,8 +239,17 @@ void VaapiVideoDecoder::Initialize(const VideoDecoderConfig& config,
std::move(init_cb).Run(StatusCode::kDecoderMissingCdmForEncryptedContent);
return;
}
- if (config.codec() != kCodecH264 && config.codec() != kCodecVP9 &&
- config.codec() != kCodecHEVC) {
+ bool encrypted_av1_support = false;
+#if BUILDFLAG(USE_CHROMEOS_PROTECTED_AV1)
+ encrypted_av1_support = true;
+#elif BUILDFLAG(IS_CHROMEOS_LACROS)
+ encrypted_av1_support = base::CommandLine::ForCurrentProcess()->HasSwitch(
+ switches::kLacrosUseChromeosProtectedAv1);
+#endif
+ if (config.codec() != VideoCodec::kH264 &&
+ config.codec() != VideoCodec::kVP9 &&
+ (config.codec() != VideoCodec::kAV1 || !encrypted_av1_support) &&
+ config.codec() != VideoCodec::kHEVC) {
SetErrorState(
base::StringPrintf("%s is not supported for encrypted content",
GetCodecName(config.codec()).c_str()));
@@ -263,7 +266,7 @@ void VaapiVideoDecoder::Initialize(const VideoDecoderConfig& config,
VAImplementation::kMesaGallium);
#endif
#if BUILDFLAG(ENABLE_PLATFORM_HEVC_DECODING)
- } else if (config.codec() == kCodecHEVC &&
+ } else if (config.codec() == VideoCodec::kHEVC &&
!base::CommandLine::ForCurrentProcess()->HasSwitch(
switches::kEnableClearHevcForTesting)) {
SetErrorState("clear HEVC content is not supported");
@@ -308,10 +311,6 @@ void VaapiVideoDecoder::Initialize(const VideoDecoderConfig& config,
return;
}
- // Get and initialize the frame pool.
- DCHECK(client_);
- frame_pool_ = client_->GetVideoFramePool();
-
aspect_ratio_ = config.aspect_ratio();
output_cb_ = std::move(output_cb);
@@ -390,7 +389,7 @@ void VaapiVideoDecoder::HandleDecodeTask() {
DVLOGF(4);
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- if (state_ == State::kError || state_ == State::kResetting)
+ if (state_ != State::kDecoding)
return;
DCHECK_EQ(state_, State::kDecoding);
@@ -472,11 +471,14 @@ scoped_refptr<VASurface> VaapiVideoDecoder::CreateSurface() {
DCHECK(current_decode_task_);
// Get a video frame from the video frame pool.
- scoped_refptr<VideoFrame> frame = frame_pool_->GetFrame();
+ DCHECK(client_);
+ DmabufVideoFramePool* frame_pool = client_->GetVideoFramePool();
+ DCHECK(frame_pool);
+ scoped_refptr<VideoFrame> frame = frame_pool->GetFrame();
if (!frame) {
// Ask the video frame pool to notify us when new frames are available, so
// we can retry the current decode task.
- frame_pool_->NotifyWhenFrameAvailable(
+ frame_pool->NotifyWhenFrameAvailable(
base::BindOnce(&VaapiVideoDecoder::NotifyFrameAvailable, weak_this_));
return nullptr;
}
@@ -499,8 +501,8 @@ scoped_refptr<VASurface> VaapiVideoDecoder::CreateSurface() {
return nullptr;
}
- va_surface = vaapi_wrapper_->CreateVASurfaceForPixmap(std::move(pixmap),
- transcryption_);
+ va_surface = vaapi_wrapper_->CreateVASurfaceForPixmap(
+ std::move(pixmap), cdm_context_ref_ || transcryption_);
if (!va_surface || va_surface->id() == VA_INVALID_ID) {
SetErrorState("failed to create VASurface from VideoFrame");
return nullptr;
@@ -532,55 +534,6 @@ scoped_refptr<VASurface> VaapiVideoDecoder::CreateSurface() {
va_surface->format(), std::move(release_frame_cb));
}
-scoped_refptr<VASurface> VaapiVideoDecoder::CreateDecodeSurface() {
- DVLOGF(4);
- DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- DCHECK_EQ(state_, State::kDecoding);
- DCHECK(current_decode_task_);
-
- if (decode_surface_pool_for_scaling_.empty())
- return nullptr;
-
- // Get surface from pool.
- std::unique_ptr<ScopedVASurface> surface =
- std::move(decode_surface_pool_for_scaling_.front());
- decode_surface_pool_for_scaling_.pop();
- // Gather information about the surface to avoid use-after-move.
- const VASurfaceID surface_id = surface->id();
- const gfx::Size surface_size = surface->size();
- const unsigned int surface_format = surface->format();
- // Wrap the ScopedVASurface inside a VASurface indirectly.
- VASurface::ReleaseCB release_decode_surface_cb =
- base::BindOnce(&VaapiVideoDecoder::ReturnDecodeSurfaceToPool, weak_this_,
- std::move(surface));
- return new VASurface(surface_id, surface_size, surface_format,
- std::move(release_decode_surface_cb));
-}
-
-bool VaapiVideoDecoder::IsScalingDecode() {
- // If we're not decoding while scaling, we shouldn't have any surfaces for
- // that purpose.
- DCHECK(!!decode_to_output_scale_factor_ ||
- decode_surface_pool_for_scaling_.empty());
- return !!decode_to_output_scale_factor_;
-}
-
-const gfx::Rect VaapiVideoDecoder::GetOutputVisibleRect(
- const gfx::Rect& decode_visible_rect,
- const gfx::Size& output_picture_size) {
- if (!IsScalingDecode())
- return decode_visible_rect;
- DCHECK_LT(*decode_to_output_scale_factor_, 1.0f);
- gfx::Rect output_rect =
- ScaleToEnclosedRect(decode_visible_rect, *decode_to_output_scale_factor_);
- // Make the dimensions even numbered to align with other requirements later in
- // the pipeline.
- output_rect.set_width(RoundDownToEven(output_rect.width()));
- output_rect.set_height(RoundDownToEven(output_rect.height()));
- CHECK(gfx::Rect(output_picture_size).Contains(output_rect));
- return output_rect;
-}
-
void VaapiVideoDecoder::SurfaceReady(scoped_refptr<VASurface> va_surface,
int32_t buffer_id,
const gfx::Rect& visible_rect,
@@ -629,6 +582,23 @@ void VaapiVideoDecoder::SurfaceReady(scoped_refptr<VASurface> va_surface,
// still valid once we get to the compositor stage.
uint32_t protected_instance_id = vaapi_wrapper_->GetProtectedInstanceID();
video_frame->metadata().hw_protected_validation_id = protected_instance_id;
+
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ // Additionally, we store the VA-API protected session ID so that it can be
+ // re-used for scaling the decoded video frame later in the pipeline.
+ VAProtectedSessionID va_protected_session_id =
+ vaapi_wrapper_->GetProtectedSessionID();
+
+ static_assert(
+ std::is_same<decltype(va_protected_session_id),
+ decltype(
+ video_frame->metadata()
+ .hw_va_protected_session_id)::value_type>::value,
+ "The type of VideoFrameMetadata::hw_va_protected_session_id "
+ "does not match the type exposed by VaapiWrapper");
+ video_frame->metadata().hw_va_protected_session_id =
+ va_protected_session_id;
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
}
const auto gfx_color_space = color_space.ToGfxColorSpace();
@@ -691,21 +661,18 @@ void VaapiVideoDecoder::ApplyResolutionChangeWithScreenSizes(
// All pending decode operations will be completed before triggering a
// resolution change, so we can safely DestroyContext() here; that, in turn,
- // allows for clearing the |allocated_va_surfaces_| and the
- // |decode_surface_pool_for_scaling_|.
+ // allows for clearing the |allocated_va_surfaces_|.
vaapi_wrapper_->DestroyContext();
allocated_va_surfaces_.clear();
- while (!decode_surface_pool_for_scaling_.empty())
- decode_surface_pool_for_scaling_.pop();
- decode_to_output_scale_factor_.reset();
-
- gfx::Rect output_visible_rect = decoder_->GetVisibleRect();
- gfx::Size output_pic_size = decoder_->GetPicSize();
- if (output_pic_size.IsEmpty()) {
+ const gfx::Rect decoder_visible_rect = decoder_->GetVisibleRect();
+ const gfx::Size decoder_pic_size = decoder_->GetPicSize();
+ if (decoder_pic_size.IsEmpty()) {
SetErrorState("|decoder_| returned an empty picture size");
return;
}
+ gfx::Rect output_visible_rect = decoder_visible_rect;
+ gfx::Size output_pic_size = decoder_pic_size;
const auto format_fourcc = Fourcc::FromVideoPixelFormat(*format);
CHECK(format_fourcc);
if (!screen_resolutions.empty()) {
@@ -715,8 +682,8 @@ void VaapiVideoDecoder::ApplyResolutionChangeWithScreenSizes(
// visible rect later.
CHECK(cdm_context_ref_);
gfx::Size max_desired_size;
- const float pic_aspect =
- static_cast<float>(output_pic_size.width()) / output_pic_size.height();
+ const float pic_aspect = static_cast<float>(decoder_pic_size.width()) /
+ decoder_pic_size.height();
for (const auto& screen : screen_resolutions) {
if (screen.IsEmpty())
continue;
@@ -726,23 +693,23 @@ void VaapiVideoDecoder::ApplyResolutionChangeWithScreenSizes(
static_cast<float>(screen.width()) / screen.height();
if (pic_aspect >= screen_aspect) {
// Constrain on width.
- if (screen.width() < output_pic_size.width()) {
+ if (screen.width() < decoder_pic_size.width()) {
target_width = screen.width();
target_height =
base::checked_cast<int>(std::lround(target_width / pic_aspect));
} else {
- target_width = output_pic_size.width();
- target_height = output_pic_size.height();
+ target_width = decoder_pic_size.width();
+ target_height = decoder_pic_size.height();
}
} else {
// Constrain on height.
- if (screen.height() < output_pic_size.height()) {
+ if (screen.height() < decoder_pic_size.height()) {
target_height = screen.height();
target_width =
base::checked_cast<int>(std::lround(target_height * pic_aspect));
} else {
- target_height = output_pic_size.height();
- target_width = output_pic_size.width();
+ target_height = decoder_pic_size.height();
+ target_width = decoder_pic_size.width();
}
}
if (target_width > max_desired_size.width() ||
@@ -751,64 +718,26 @@ void VaapiVideoDecoder::ApplyResolutionChangeWithScreenSizes(
}
}
if (!max_desired_size.IsEmpty() &&
- max_desired_size.width() < output_pic_size.width()) {
+ max_desired_size.width() < decoder_pic_size.width()) {
// Fix this so we are sure it's on a multiple of two to deal with
// subsampling.
max_desired_size.set_width(RoundUpToEven(max_desired_size.width()));
max_desired_size.set_height(RoundUpToEven(max_desired_size.height()));
- decode_to_output_scale_factor_ =
+ const auto decode_to_output_scale_factor =
static_cast<float>(max_desired_size.width()) /
- output_pic_size.width();
+ decoder_pic_size.width();
output_pic_size = max_desired_size;
- output_visible_rect =
- GetOutputVisibleRect(output_visible_rect, output_pic_size);
-
- // Create the surface pool for decoding, the normal pool will be used for
- // output.
- const size_t decode_pool_size = decoder_->GetRequiredNumOfPictures();
- const absl::optional<gfx::BufferFormat> buffer_format =
- VideoPixelFormatToGfxBufferFormat(*format);
- if (!buffer_format) {
- decode_to_output_scale_factor_.reset();
- SetErrorState(
- base::StringPrintf("unsupported pixel format: %s",
- VideoPixelFormatToString(*format).c_str()));
- return;
- }
- const uint32_t va_fourcc =
- VaapiWrapper::BufferFormatToVAFourCC(*buffer_format);
- const uint32_t va_rt_format =
- VaapiWrapper::BufferFormatToVARTFormat(*buffer_format);
- if (!va_fourcc || !va_rt_format) {
- decode_to_output_scale_factor_.reset();
- SetErrorState(
- base::StringPrintf("VA-API does not support: %s",
- gfx::BufferFormatToString(*buffer_format)));
- return;
- }
- const gfx::Size decoder_pic_size = decoder_->GetPicSize();
- auto scoped_va_surfaces = vaapi_wrapper_->CreateScopedVASurfaces(
- base::strict_cast<unsigned int>(va_rt_format), decoder_pic_size,
- {VaapiWrapper::SurfaceUsageHint::kVideoDecoder}, decode_pool_size,
- /*visible_size=*/absl::nullopt, va_fourcc);
- if (scoped_va_surfaces.empty()) {
- decode_to_output_scale_factor_.reset();
- SetErrorState("failed creating VASurfaces");
- return;
- }
-
- for (auto&& scoped_va_surface : scoped_va_surfaces)
- decode_surface_pool_for_scaling_.push(std::move(scoped_va_surface));
+ output_visible_rect = ScaleToEnclosedRect(decoder_visible_rect,
+ decode_to_output_scale_factor);
+ // Make the dimensions even numbered to align with other requirements
+ // later in the pipeline.
+ output_visible_rect.set_width(
+ RoundDownToEven(output_visible_rect.width()));
+ output_visible_rect.set_height(
+ RoundDownToEven(output_visible_rect.height()));
+ CHECK(gfx::Rect(output_pic_size).Contains(output_visible_rect));
}
}
- const gfx::Size natural_size =
- aspect_ratio_.GetNaturalSize(output_visible_rect);
- if (!frame_pool_->Initialize(
- *format_fourcc, output_pic_size, output_visible_rect, natural_size,
- decoder_->GetRequiredNumOfPictures(), !!cdm_context_ref_)) {
- SetErrorState("failed Initialize()ing the frame pool");
- return;
- }
if (profile_ != decoder_->GetProfile()) {
// When a profile is changed, we need to re-initialize VaapiWrapper.
@@ -831,11 +760,29 @@ void VaapiVideoDecoder::ApplyResolutionChangeWithScreenSizes(
vaapi_wrapper_ = std::move(new_vaapi_wrapper);
}
- if (!vaapi_wrapper_->CreateContext(decoder_->GetPicSize())) {
+ if (!vaapi_wrapper_->CreateContext(decoder_pic_size)) {
SetErrorState("failed creating VAContext");
return;
}
+ const gfx::Size decoder_natural_size =
+ aspect_ratio_.GetNaturalSize(decoder_visible_rect);
+ auto status_or_layout = client_->PickDecoderOutputFormat(
+ /*candidates=*/{{*format_fourcc, decoder_pic_size}}, decoder_visible_rect,
+ decoder_natural_size, output_visible_rect.size(),
+ decoder_->GetRequiredNumOfPictures(),
+ /*use_protected=*/!!cdm_context_ref_,
+ /*need_aux_frame_pool=*/true);
+ if (status_or_layout.has_error()) {
+ if (std::move(status_or_layout).error().code() == StatusCode::kAborted) {
+ DVLOGF(2) << "The frame pool initialization is aborted.";
+ SetState(State::kExpectingReset);
+ } else {
+ SetErrorState("failed Initialize()ing the frame pool");
+ }
+ return;
+ }
+
DCHECK(current_decode_task_);
// Retry the current decode task.
SetState(State::kDecoding);
@@ -871,6 +818,9 @@ bool VaapiVideoDecoder::IsPlatformDecoder() const {
}
bool VaapiVideoDecoder::NeedsTranscryption() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(state_ == State::kWaitingForInput);
+
return transcryption_;
}
@@ -958,12 +908,16 @@ void VaapiVideoDecoder::Reset(base::OnceClosure reset_cb) {
return;
}
- if (state_ == State::kChangingResolution) {
- // Recreate |decoder_| and |decoder_delegate_| if we are Reset() in the
- // interim between calling |client_|s PrepareChangeResolution() and being
- // called back on ApplyResolutionChange(), so the latter will find a fresh
- // |decoder_|. Also give a chance to |decoder_delegate_| to release its
- // internal data structures.
+ if (state_ == State::kChangingResolution ||
+ state_ == State::kExpectingReset) {
+ // Recreate |decoder_| and |decoder_delegate_| if we are either:
+ // a) Reset() in the interim between calling |client_|s
+ // PrepareChangeResolution() and being called back on
+ // ApplyResolutionChange(), so the latter will find a fresh |decoder_|;
+ // b) expecting a Reset() after the initialization of the frame pool was
+ // aborted.
+ // Also give a chance to |decoder_delegate_| to release its internal data
+ // structures.
decoder_delegate_->OnVAContextDestructionSoon();
if (!CreateAcceleratedVideoDecoder().is_ok()) {
SetErrorState("failed to (re)create decoder/delegate");
@@ -989,6 +943,9 @@ void VaapiVideoDecoder::Reset(base::OnceClosure reset_cb) {
Status VaapiVideoDecoder::CreateAcceleratedVideoDecoder() {
DVLOGF(2);
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(state_ == State::kUninitialized ||
+ state_ == State::kChangingResolution ||
+ state_ == State::kExpectingReset);
VaapiVideoDecoderDelegate::ProtectedSessionUpdateCB protected_update_cb =
BindToCurrentLoop(base::BindRepeating(
@@ -1031,8 +988,10 @@ Status VaapiVideoDecoder::CreateAcceleratedVideoDecoder() {
}
#endif // BUILDFLAG(ENABLE_PLATFORM_HEVC_DECODING)
else if (profile_ >= AV1PROFILE_MIN && profile_ <= AV1PROFILE_MAX) {
- auto accelerator =
- std::make_unique<AV1VaapiVideoDecoderDelegate>(this, vaapi_wrapper_);
+ auto accelerator = std::make_unique<AV1VaapiVideoDecoderDelegate>(
+ this, vaapi_wrapper_, std::move(protected_update_cb),
+ cdm_context_ref_ ? cdm_context_ref_->GetCdmContext() : nullptr,
+ encryption_scheme_);
decoder_delegate_ = accelerator.get();
decoder_.reset(new AV1Decoder(std::move(accelerator), profile_));
@@ -1087,12 +1046,16 @@ void VaapiVideoDecoder::SetState(State state) {
DCHECK(state_ == State::kWaitingForInput ||
state_ == State::kWaitingForOutput || state_ == State::kDecoding ||
state_ == State::kWaitingForProtected ||
- state_ == State::kChangingResolution);
+ state_ == State::kChangingResolution ||
+ state_ == State::kExpectingReset);
ClearDecodeTaskQueue(DecodeStatus::ABORTED);
break;
case State::kChangingResolution:
DCHECK_EQ(state_, State::kDecoding);
break;
+ case State::kExpectingReset:
+ DCHECK_EQ(state_, State::kChangingResolution);
+ break;
case State::kError:
ClearDecodeTaskQueue(DecodeStatus::DECODE_ERROR);
break;
@@ -1108,12 +1071,4 @@ void VaapiVideoDecoder::SetErrorState(std::string message) {
SetState(State::kError);
}
-void VaapiVideoDecoder::ReturnDecodeSurfaceToPool(
- std::unique_ptr<ScopedVASurface> surface,
- VASurfaceID) {
- DVLOGF(4);
- DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- decode_surface_pool_for_scaling_.push(std::move(surface));
-}
-
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_video_decoder.h b/chromium/media/gpu/vaapi/vaapi_video_decoder.h
index 558756e25d1..f953716dfbe 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_decoder.h
+++ b/chromium/media/gpu/vaapi/vaapi_video_decoder.h
@@ -30,7 +30,6 @@
#include "media/base/video_frame_layout.h"
#include "media/gpu/chromeos/video_decoder_pipeline.h"
#include "media/gpu/decode_surface_handler.h"
-#include "media/gpu/vaapi/vaapi_utils.h"
#include "third_party/abseil-cpp/absl/types/optional.h"
#include "ui/gfx/geometry/rect.h"
#include "ui/gfx/geometry/size.h"
@@ -76,11 +75,6 @@ class VaapiVideoDecoder : public VideoDecoderMixin,
// DecodeSurfaceHandler<VASurface> implementation.
scoped_refptr<VASurface> CreateSurface() override;
- scoped_refptr<VASurface> CreateDecodeSurface() override;
- bool IsScalingDecode() override;
- const gfx::Rect GetOutputVisibleRect(
- const gfx::Rect& decode_visible_rect,
- const gfx::Size& output_picture_size) override;
void SurfaceReady(scoped_refptr<VASurface> va_surface,
int32_t buffer_id,
const gfx::Rect& visible_rect,
@@ -110,6 +104,8 @@ class VaapiVideoDecoder : public VideoDecoderMixin,
kDecoding, // decoding buffers.
kChangingResolution, // need to change resolution, waiting for pipeline to
// be flushed.
+ kExpectingReset, // resolution change is aborted, waiting for decoder
+ // to be reset.
kResetting, // resetting decoder.
kError, // decoder encountered an error.
};
@@ -134,7 +130,7 @@ class VaapiVideoDecoder : public VideoDecoderMixin,
// reference frame. Note that this doesn't mean the frame can be reused
// immediately, as it might still be used by the client.
void ReleaseVideoFrame(VASurfaceID surface_id);
- // Callback for |frame_pool_| to notify of available resources.
+ // Callback for the frame pool to notify us when a frame becomes available.
void NotifyFrameAvailable();
// Callback from accelerator to indicate the protected state has been updated
// so we can proceed or fail.
@@ -165,13 +161,6 @@ class VaapiVideoDecoder : public VideoDecoderMixin,
void ApplyResolutionChangeWithScreenSizes(
const std::vector<gfx::Size>& screen_resolution);
- // Callback for when a VASurface in the decode pool is no longer used as a
- // reference frame and should then be returned to the pool. We ignore the
- // VASurfaceID in the normal callback because it is retained in the |surface|
- // object.
- void ReturnDecodeSurfaceToPool(std::unique_ptr<ScopedVASurface> surface,
- VASurfaceID);
-
// Having too many decoder instances at once may cause us to run out of FDs
// and subsequently crash (b/181264362). To avoid that, we limit the maximum
// number of decoder instances that can exist at once. |num_instances_| tracks
@@ -200,9 +189,6 @@ class VaapiVideoDecoder : public VideoDecoderMixin,
// Aspect ratio from the config.
VideoAspectRatio aspect_ratio_;
- // Video frame pool used to allocate and recycle video frames.
- DmabufVideoFramePool* frame_pool_ = nullptr;
-
// The time at which each buffer decode operation started. Not each decode
// operation leads to a frame being output and frames might be reordered, so
// we don't know when it's safe to drop a timestamp. This means we need to use
@@ -219,10 +205,10 @@ class VaapiVideoDecoder : public VideoDecoderMixin,
// The list of frames currently used as output buffers or reference frames.
std::map<VASurfaceID, scoped_refptr<VideoFrame>> output_frames_;
- // VASurfaces are created via importing |frame_pool_| resources into libva in
- // CreateSurface(). The following map keeps those VASurfaces for reuse
- // according to the expectations of libva vaDestroySurfaces(): "Surfaces can
- // only be destroyed after all contexts using these surfaces have been
+ // VASurfaces are created via importing resources from a DmabufVideoFramePool
+ // into libva in CreateSurface(). The following map keeps those VASurfaces for
+ // reuse according to the expectations of libva vaDestroySurfaces(): "Surfaces
+ // can only be destroyed after all contexts using these surfaces have been
// destroyed."
// TODO(crbug.com/1040291): remove this keep-alive when using SharedImages.
base::small_map<std::map<gfx::GpuMemoryBufferId, scoped_refptr<VASurface>>>
@@ -253,15 +239,6 @@ class VaapiVideoDecoder : public VideoDecoderMixin,
// the pointer from AcceleratedVideoDecoder.
VaapiVideoDecoderDelegate* decoder_delegate_ = nullptr;
- // When we are doing scaled decoding, this is the pool of surfaces used by the
- // decoder for reference frames.
- base::queue<std::unique_ptr<ScopedVASurface>>
- decode_surface_pool_for_scaling_;
-
- // When we are doing scaled decoding, this is the scale factor we are using,
- // and applies the same in both dimensions.
- absl::optional<float> decode_to_output_scale_factor_;
-
// This is used on AMD protected content implementations to indicate that the
// DecoderBuffers we receive have been transcrypted and need special handling.
bool transcryption_ = false;
diff --git a/chromium/media/gpu/vaapi/vaapi_video_decoder_delegate.cc b/chromium/media/gpu/vaapi/vaapi_video_decoder_delegate.cc
index c11fcef30de..bc7da98fea2 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_decoder_delegate.cc
+++ b/chromium/media/gpu/vaapi/vaapi_video_decoder_delegate.cc
@@ -22,8 +22,7 @@
namespace {
// During playback of protected content, we need to request the keys at an
// interval no greater than this. This allows updating of key usage data.
-constexpr base::TimeDelta kKeyRetrievalMaxPeriod =
- base::TimeDelta::FromMinutes(1);
+constexpr base::TimeDelta kKeyRetrievalMaxPeriod = base::Minutes(1);
// This increments the lower 64 bit counter of an 128 bit IV.
void ctr128_inc64(uint8_t* counter) {
uint32_t n = 16;
@@ -50,7 +49,6 @@ VaapiVideoDecoderDelegate::VaapiVideoDecoderDelegate(
std::move(on_protected_session_update_cb)),
encryption_scheme_(encryption_scheme),
protected_session_state_(ProtectedSessionState::kNotCreated),
- scaled_surface_id_(VA_INVALID_ID),
performing_recovery_(false) {
DCHECK(vaapi_wrapper_);
DCHECK(vaapi_dec_);
@@ -59,8 +57,6 @@ VaapiVideoDecoderDelegate::VaapiVideoDecoderDelegate(
if (cdm_context)
chromeos_cdm_context_ = cdm_context->GetChromeOsCdmContext();
#endif // BUILDFLAG(IS_CHROMEOS_ASH)
- memset(&src_region_, 0, sizeof(src_region_));
- memset(&dst_region_, 0, sizeof(dst_region_));
transcryption_ = cdm_context && VaapiWrapper::GetImplementationType() ==
VAImplementation::kMesaGallium;
}
@@ -145,7 +141,8 @@ VaapiVideoDecoderDelegate::SetupDecryptDecode(
DCHECK_EQ(protected_session_state_, ProtectedSessionState::kCreated);
- if (encryption_scheme_ == EncryptionScheme::kCenc) {
+ const bool ctr = (encryption_scheme_ == EncryptionScheme::kCenc);
+ if (ctr) {
crypto_params->encryption_type = full_sample
? VA_ENCRYPTION_TYPE_FULLSAMPLE_CTR
: VA_ENCRYPTION_TYPE_SUBSAMPLE_CTR;
@@ -214,38 +211,26 @@ VaapiVideoDecoderDelegate::SetupDecryptDecode(
crypto_params->num_segments += subsamples.size();
if (decrypt_config_->HasPattern()) {
- if (subsamples.size() != 1) {
- LOG(ERROR) << "Need single subsample for encryption pattern";
- protected_session_state_ = ProtectedSessionState::kFailed;
- return protected_session_state_;
- }
crypto_params->blocks_stripe_encrypted =
decrypt_config_->encryption_pattern()->crypt_byte_block();
crypto_params->blocks_stripe_clear =
decrypt_config_->encryption_pattern()->skip_byte_block();
+ }
+ size_t total_cypher_size = 0;
+ std::vector<uint8_t> iv(DecryptConfig::kDecryptionKeySize);
+ iv.assign(decrypt_config_->iv().begin(), decrypt_config_->iv().end());
+ for (const auto& entry : subsamples) {
VAEncryptionSegmentInfo segment_info = {};
segment_info.segment_start_offset = offset;
- segment_info.init_byte_length = subsamples[0].clear_bytes;
- segment_info.segment_length =
- subsamples[0].clear_bytes + subsamples[0].cypher_bytes;
- memcpy(segment_info.aes_cbc_iv_or_ctr, decrypt_config_->iv().data(),
+ segment_info.segment_length = entry.clear_bytes + entry.cypher_bytes;
+ memcpy(segment_info.aes_cbc_iv_or_ctr, iv.data(),
DecryptConfig::kDecryptionKeySize);
- segments->emplace_back(std::move(segment_info));
- } else {
- size_t total_cypher_size = 0;
- std::vector<uint8_t> iv(DecryptConfig::kDecryptionKeySize);
- iv.assign(decrypt_config_->iv().begin(), decrypt_config_->iv().end());
- for (const auto& entry : subsamples) {
- VAEncryptionSegmentInfo segment_info = {};
- segment_info.segment_start_offset = offset;
- segment_info.segment_length = entry.clear_bytes + entry.cypher_bytes;
+ if (ctr) {
size_t partial_block_size =
(DecryptConfig::kDecryptionKeySize -
(total_cypher_size % DecryptConfig::kDecryptionKeySize)) %
DecryptConfig::kDecryptionKeySize;
segment_info.partial_aes_block_size = partial_block_size;
- memcpy(segment_info.aes_cbc_iv_or_ctr, iv.data(),
- DecryptConfig::kDecryptionKeySize);
if (entry.cypher_bytes > partial_block_size) {
// If we are finishing a block, increment the counter.
if (partial_block_size)
@@ -258,10 +243,10 @@ VaapiVideoDecoderDelegate::SetupDecryptDecode(
ctr128_inc64(iv.data());
}
total_cypher_size += entry.cypher_bytes;
- segment_info.init_byte_length = entry.clear_bytes;
- offset += entry.clear_bytes + entry.cypher_bytes;
- segments->emplace_back(std::move(segment_info));
}
+ segment_info.init_byte_length = entry.clear_bytes;
+ offset += entry.clear_bytes + entry.cypher_bytes;
+ segments->emplace_back(std::move(segment_info));
}
memcpy(crypto_params->wrapped_decrypt_blob,
hw_key_data_map_[decrypt_config_->key_id()].data(),
@@ -290,40 +275,6 @@ void VaapiVideoDecoderDelegate::ProtectedDecodedSucceeded() {
performing_recovery_ = false;
}
-bool VaapiVideoDecoderDelegate::FillDecodeScalingIfNeeded(
- const gfx::Rect& decode_visible_rect,
- VASurfaceID decode_surface_id,
- scoped_refptr<VASurface> output_surface,
- VAProcPipelineParameterBuffer* proc_buffer) {
- if (!vaapi_dec_->IsScalingDecode())
- return false;
-
- // Submit the buffer for the inline decode scaling.
- memset(proc_buffer, 0, sizeof(*proc_buffer));
- src_region_.x = base::checked_cast<int16_t>(decode_visible_rect.x());
- src_region_.y = base::checked_cast<int16_t>(decode_visible_rect.y());
- src_region_.width = base::checked_cast<uint16_t>(decode_visible_rect.width());
- src_region_.height =
- base::checked_cast<uint16_t>(decode_visible_rect.height());
-
- gfx::Rect scaled_visible_rect = vaapi_dec_->GetOutputVisibleRect(
- decode_visible_rect, output_surface->size());
- dst_region_.x = base::checked_cast<int16_t>(scaled_visible_rect.x());
- dst_region_.y = base::checked_cast<int16_t>(scaled_visible_rect.y());
- dst_region_.width = base::checked_cast<uint16_t>(scaled_visible_rect.width());
- dst_region_.height =
- base::checked_cast<uint16_t>(scaled_visible_rect.height());
-
- proc_buffer->surface_region = &src_region_;
- proc_buffer->output_region = &dst_region_;
-
- scaled_surface_id_ = output_surface->id();
- proc_buffer->additional_outputs = &scaled_surface_id_;
- proc_buffer->num_additional_outputs = 1;
- proc_buffer->surface = decode_surface_id;
- return true;
-}
-
std::string VaapiVideoDecoderDelegate::GetDecryptKeyId() const {
DCHECK(decrypt_config_);
return decrypt_config_->key_id();
diff --git a/chromium/media/gpu/vaapi/vaapi_video_decoder_delegate.h b/chromium/media/gpu/vaapi/vaapi_video_decoder_delegate.h
index efb6fae9faf..40edb0fed1d 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_decoder_delegate.h
+++ b/chromium/media/gpu/vaapi/vaapi_video_decoder_delegate.h
@@ -21,7 +21,6 @@
#include "media/base/encryption_scheme.h"
#include "media/base/subsample_entry.h"
#include "third_party/libva_protected_content/va_protected_content.h"
-#include "ui/gfx/geometry/rect.h"
#if BUILDFLAG(IS_CHROMEOS_ASH)
#include "chromeos/components/cdm_factory_daemon/chromeos_cdm_context.h"
@@ -127,14 +126,6 @@ class VaapiVideoDecoderDelegate {
// every successful protected decode.
void ProtectedDecodedSucceeded();
- // Fills *|proc_buffer| with the proper parameters for decode scaling and
- // returns true if that buffer was filled in and should be submitted, false
- // otherwise.
- bool FillDecodeScalingIfNeeded(const gfx::Rect& decode_visible_rect,
- VASurfaceID decode_surface_id,
- scoped_refptr<VASurface> output_surface,
- VAProcPipelineParameterBuffer* proc_buffer);
-
// Returns the key_id string for the current DecryptConfig.
std::string GetDecryptKeyId() const;
@@ -161,12 +152,6 @@ class VaapiVideoDecoderDelegate {
std::vector<uint8_t> hw_identifier_;
std::map<std::string, std::vector<uint8_t>> hw_key_data_map_;
base::TimeTicks last_key_retrieval_time_;
- // We need to hold onto these across a call since the VABuffer will reference
- // their pointers, so declare them here to allow for that. These are used in
- // the decode scaling operation.
- VARectangle src_region_;
- VARectangle dst_region_;
- VASurfaceID scaled_surface_id_;
// This will only be true on AMD platforms where we support encrypted content
// and the content is encrypted.
diff --git a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc
index 2add9b86d32..eaf1d47a8eb 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc
+++ b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc
@@ -156,6 +156,14 @@ bool VaapiVideoEncodeAccelerator::Initialize(const Config& config,
DCHECK_EQ(state_, kUninitialized);
VLOGF(2) << "Initializing VAVEA, " << config.AsHumanReadableString();
+ if (AttemptedInitialization()) {
+ VLOGF(1) << "Initialize() cannot be called more than once.";
+ return false;
+ }
+
+ client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client));
+ client_ = client_ptr_factory_->GetWeakPtr();
+
if (config.HasSpatialLayer()) {
#if BUILDFLAG(IS_CHROMEOS_ASH)
if (!base::FeatureList::IsEnabled(kVaapiVp9kSVCHWEncoding) &&
@@ -200,11 +208,9 @@ bool VaapiVideoEncodeAccelerator::Initialize(const Config& config,
}
}
- client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client));
- client_ = client_ptr_factory_->GetWeakPtr();
-
const VideoCodec codec = VideoCodecProfileToVideoCodec(config.output_profile);
- if (codec != kCodecH264 && codec != kCodecVP8 && codec != kCodecVP9) {
+ if (codec != VideoCodec::kH264 && codec != VideoCodec::kVP8 &&
+ codec != VideoCodec::kVP9) {
VLOGF(1) << "Unsupported profile: "
<< GetProfileName(config.output_profile);
return false;
@@ -259,26 +265,6 @@ bool VaapiVideoEncodeAccelerator::Initialize(const Config& config,
return false;
}
- DCHECK_EQ(IsConfiguredForTesting(), !!vaapi_wrapper_);
- if (!IsConfiguredForTesting()) {
- if (vaapi_wrapper_) {
- VLOGF(1) << "Initialize() is called twice";
- return false;
- }
- VaapiWrapper::CodecMode mode =
- codec == kCodecVP9 ? VaapiWrapper::kEncodeConstantQuantizationParameter
- : VaapiWrapper::kEncodeConstantBitrate;
- vaapi_wrapper_ = VaapiWrapper::CreateForVideoCodec(
- mode, config.output_profile, EncryptionScheme::kUnencrypted,
- base::BindRepeating(&ReportVaapiErrorToUMA,
- "Media.VaapiVideoEncodeAccelerator.VAAPIError"));
- if (!vaapi_wrapper_) {
- VLOGF(1) << "Failed initializing VAAPI for profile "
- << GetProfileName(config.output_profile);
- return false;
- }
- }
-
// Finish remaining initialization on the encoder thread.
encoder_task_runner_->PostTask(
FROM_HERE, base::BindOnce(&VaapiVideoEncodeAccelerator::InitializeTask,
@@ -292,7 +278,24 @@ void VaapiVideoEncodeAccelerator::InitializeTask(const Config& config) {
VLOGF(2);
output_codec_ = VideoCodecProfileToVideoCodec(config.output_profile);
- VaapiVideoEncoderDelegate::Config ave_config{};
+ DCHECK_EQ(IsConfiguredForTesting(), !!vaapi_wrapper_);
+ if (!IsConfiguredForTesting()) {
+ const auto mode = output_codec_ == VideoCodec::kVP9
+ ? VaapiWrapper::kEncodeConstantQuantizationParameter
+ : VaapiWrapper::kEncodeConstantBitrate;
+ vaapi_wrapper_ = VaapiWrapper::CreateForVideoCodec(
+ mode, config.output_profile, EncryptionScheme::kUnencrypted,
+ base::BindRepeating(&ReportVaapiErrorToUMA,
+ "Media.VaapiVideoEncodeAccelerator.VAAPIError"));
+
+ if (!vaapi_wrapper_) {
+ NOTIFY_ERROR(kPlatformFailureError,
+ "Failed initializing VAAPI for profile " +
+ GetProfileName(config.output_profile));
+ return;
+ }
+ }
+
DCHECK_EQ(IsConfiguredForTesting(), !!encoder_);
// Base::Unretained(this) is safe because |error_cb| is called by
// |encoder_| and |this| outlives |encoder_|.
@@ -302,8 +305,10 @@ void VaapiVideoEncodeAccelerator::InitializeTask(const Config& config) {
vea->NotifyError(kPlatformFailureError);
},
base::Unretained(this));
+
+ VaapiVideoEncoderDelegate::Config ave_config{};
switch (output_codec_) {
- case kCodecH264:
+ case VideoCodec::kH264:
if (!IsConfiguredForTesting()) {
encoder_ = std::make_unique<H264VaapiVideoEncoderDelegate>(
vaapi_wrapper_, error_cb);
@@ -312,7 +317,7 @@ void VaapiVideoEncodeAccelerator::InitializeTask(const Config& config) {
DCHECK_EQ(ave_config.bitrate_control,
VaapiVideoEncoderDelegate::BitrateControl::kConstantBitrate);
break;
- case kCodecVP8:
+ case VideoCodec::kVP8:
if (!IsConfiguredForTesting()) {
encoder_ = std::make_unique<VP8VaapiVideoEncoderDelegate>(
vaapi_wrapper_, error_cb);
@@ -321,7 +326,7 @@ void VaapiVideoEncodeAccelerator::InitializeTask(const Config& config) {
DCHECK_EQ(ave_config.bitrate_control,
VaapiVideoEncoderDelegate::BitrateControl::kConstantBitrate);
break;
- case kCodecVP9:
+ case VideoCodec::kVP9:
if (!IsConfiguredForTesting()) {
encoder_ = std::make_unique<VP9VaapiVideoEncoderDelegate>(
vaapi_wrapper_, error_cb);
@@ -357,31 +362,14 @@ void VaapiVideoEncodeAccelerator::InitializeTask(const Config& config) {
expected_input_coded_size_.width() <= encoder_->GetCodedSize().width() &&
expected_input_coded_size_.height() <= encoder_->GetCodedSize().height());
- va_surfaces_per_video_frame_ =
- native_input_mode_
- ?
- // In native input mode, we do not need surfaces for input frames.
- kNumSurfacesForOutputPicture
- :
- // In non-native mode, we need to create additional surfaces for input
- // frames.
- kNumSurfacesForOutputPicture + kNumSurfacesPerInputVideoFrame;
-
// The number of required buffers is the number of required reference frames
// + 1 for the current frame to be encoded.
const size_t max_ref_frames = encoder_->GetMaxNumOfRefFrames();
num_frames_in_flight_ = std::max(kMinNumFramesInFlight, max_ref_frames);
DVLOGF(1) << "Frames in flight: " << num_frames_in_flight_;
- // The surface size for the reconstructed surface (and input surface in non
- // native input mode) is the coded size.
- available_va_surfaces_ = vaapi_wrapper_->CreateContextAndScopedVASurfaces(
- kVaSurfaceFormat, encoder_->GetCodedSize(),
- {VaapiWrapper::SurfaceUsageHint::kVideoEncoder},
- (num_frames_in_flight_ + 1) * va_surfaces_per_video_frame_,
- /*visible_size=*/absl::nullopt);
- if (available_va_surfaces_.empty()) {
- NOTIFY_ERROR(kPlatformFailureError, "Failed creating VASurfaces");
+ if (!vaapi_wrapper_->CreateContext(encoder_->GetCodedSize())) {
+ NOTIFY_ERROR(kPlatformFailureError, "Failed creating VAContext");
return;
}
@@ -419,6 +407,19 @@ void VaapiVideoEncodeAccelerator::RecycleVASurface(
DVLOGF(4) << "va_surface_id: " << va_surface_id;
va_surfaces->push_back(std::move(va_surface));
+
+ // At least one surface must available in each |available_encode_surfaces_|
+ // and |available_vpp_dest_surfaces_| to succeed in EncodePendingInputs().
+ // Checks here to avoid redundant EncodePendingInputs() call.
+ for (const auto& surfaces : available_encode_surfaces_) {
+ if (surfaces.second.empty())
+ return;
+ }
+ for (const auto& surfaces : available_vpp_dest_surfaces_) {
+ if (surfaces.second.empty())
+ return;
+ }
+
EncodePendingInputs();
}
@@ -535,72 +536,14 @@ VaapiVideoEncodeAccelerator::GetAvailableVASurfaceAsRefCounted(
std::move(release_cb));
}
-scoped_refptr<VASurface>
-VaapiVideoEncodeAccelerator::BlitSurfaceWithCreateVppIfNeeded(
- const VASurface& input_surface,
- const gfx::Rect& input_visible_rect,
- const gfx::Size& encode_size,
- size_t num_va_surfaces) {
- DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_);
-
- if (!vpp_vaapi_wrapper_) {
- vpp_vaapi_wrapper_ = VaapiWrapper::Create(
- VaapiWrapper::kVideoProcess, VAProfileNone,
- EncryptionScheme::kUnencrypted,
- base::BindRepeating(
- &ReportVaapiErrorToUMA,
- "Media.VaapiVideoEncodeAccelerator.Vpp.VAAPIError"));
- if (!vpp_vaapi_wrapper_) {
- NOTIFY_ERROR(kPlatformFailureError,
- "Failed to initialize VppVaapiWrapper");
- return nullptr;
- }
-
- // VA context for VPP is not associated with a specific resolution.
- if (!vpp_vaapi_wrapper_->CreateContext(gfx::Size())) {
- NOTIFY_ERROR(kPlatformFailureError, "Failed creating Context for VPP");
- return nullptr;
- }
- }
-
- if (!base::Contains(available_vpp_va_surfaces_, encode_size)) {
- auto scoped_va_surfaces = vpp_vaapi_wrapper_->CreateScopedVASurfaces(
- kVaSurfaceFormat, encode_size,
- {VaapiWrapper::SurfaceUsageHint::kVideoProcessWrite,
- VaapiWrapper::SurfaceUsageHint::kVideoEncoder},
- num_va_surfaces, /*visible_size=*/absl::nullopt,
- /*va_fourcc=*/absl::nullopt);
- if (scoped_va_surfaces.empty()) {
- NOTIFY_ERROR(kPlatformFailureError, "Failed creating VASurfaces");
- return nullptr;
- }
-
- available_vpp_va_surfaces_[encode_size] = std::move(scoped_va_surfaces);
- }
-
- auto blit_surface = GetAvailableVASurfaceAsRefCounted(
- &available_vpp_va_surfaces_[encode_size]);
- if (!vpp_vaapi_wrapper_->BlitSurface(input_surface, *blit_surface,
- input_visible_rect,
- gfx::Rect(encode_size))) {
- NOTIFY_ERROR(kPlatformFailureError,
- "Failed BlitSurface on frame size: "
- << input_surface.size().ToString()
- << " (visible rect: " << input_visible_rect.ToString()
- << ") -> encode size: " << encode_size.ToString());
- return nullptr;
- }
-
- return blit_surface;
-}
-
bool VaapiVideoEncodeAccelerator::CreateSurfacesForGpuMemoryBufferEncoding(
const VideoFrame& frame,
- const gfx::Size& encode_size,
- scoped_refptr<VASurface>* input_surface,
- scoped_refptr<VASurface>* reconstructed_surface) {
+ const std::vector<gfx::Size>& spatial_layer_resolutions,
+ std::vector<scoped_refptr<VASurface>>* input_surfaces,
+ std::vector<scoped_refptr<VASurface>>* reconstructed_surfaces) {
DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_);
DCHECK(native_input_mode_);
+ TRACE_EVENT0("media,gpu", "VAVEA::CreateSurfacesForGpuMemoryBuffer");
if (frame.storage_type() != VideoFrame::STORAGE_GPU_MEMORY_BUFFER) {
NOTIFY_ERROR(kPlatformFailureError,
@@ -608,7 +551,6 @@ bool VaapiVideoEncodeAccelerator::CreateSurfacesForGpuMemoryBufferEncoding(
<< VideoFrame::StorageTypeToString(frame.storage_type()));
return false;
}
-
if (frame.format() != PIXEL_FORMAT_NV12) {
NOTIFY_ERROR(
kPlatformFailureError,
@@ -616,58 +558,64 @@ bool VaapiVideoEncodeAccelerator::CreateSurfacesForGpuMemoryBufferEncoding(
return false;
}
- const bool do_vpp = frame.visible_rect() != gfx::Rect(encode_size);
- if (do_vpp) {
- constexpr size_t kNumSurfaces = 2; // For input and reconstructed surface.
- if (base::Contains(available_vpp_va_surfaces_, encode_size) &&
- available_vpp_va_surfaces_[encode_size].size() < kNumSurfaces) {
- DVLOGF(4) << "Not enough surfaces available";
+ scoped_refptr<VASurface> source_surface;
+ {
+ TRACE_EVENT0("media,gpu", "VAVEA::ImportGpuMemoryBufferToVASurface");
+
+ // Create VASurface from GpuMemory-based VideoFrame.
+ scoped_refptr<gfx::NativePixmap> pixmap = CreateNativePixmapDmaBuf(&frame);
+ if (!pixmap) {
+ NOTIFY_ERROR(kPlatformFailureError,
+ "Failed to create NativePixmap from VideoFrame");
return false;
}
- } else {
- if (available_va_surfaces_.empty()) { // For the reconstructed surface.
- DVLOGF(4) << "Not surface available";
+
+ source_surface =
+ vaapi_wrapper_->CreateVASurfaceForPixmap(std::move(pixmap));
+ if (!source_surface) {
+ NOTIFY_ERROR(kPlatformFailureError, "Failed to create VASurface");
return false;
}
}
- // Create VASurface from GpuMemory-based VideoFrame.
- scoped_refptr<gfx::NativePixmap> pixmap = CreateNativePixmapDmaBuf(&frame);
- if (!pixmap) {
- NOTIFY_ERROR(kPlatformFailureError,
- "Failed to create NativePixmap from VideoFrame");
- return false;
- }
+ // Create input and reconstructed surfaces.
+ TRACE_EVENT1("media,gpu", "VAVEA::ConstructSurfaces", "the number of layers",
+ spatial_layer_resolutions.size());
+ input_surfaces->reserve(spatial_layer_resolutions.size());
+ reconstructed_surfaces->reserve(spatial_layer_resolutions.size());
+ for (const gfx::Size& encode_size : spatial_layer_resolutions) {
+ const bool engage_vpp = frame.visible_rect() != gfx::Rect(encode_size);
+ // Crop and Scale input surface to a surface whose size is |encode_size|.
+ // The size of a reconstructed surface is also |encode_size|.
+ if (engage_vpp) {
+ auto blit_surface = ExecuteBlitSurface(*source_surface,
+ frame.visible_rect(), encode_size);
+ if (!blit_surface)
+ return false;
- *input_surface = vaapi_wrapper_->CreateVASurfaceForPixmap(std::move(pixmap));
- if (!(*input_surface)) {
- NOTIFY_ERROR(kPlatformFailureError, "Failed to create VASurface");
- return false;
- }
+ input_surfaces->push_back(std::move(blit_surface));
+ } else {
+ input_surfaces->emplace_back(source_surface);
+ }
- // Crop and Scale input surface to a surface whose size is |encode_size|.
- // The size of a reconstructed surface is also |encode_size|.
- if (do_vpp) {
- // Create blit destination and reconstructed surfaces.
- *input_surface = BlitSurfaceWithCreateVppIfNeeded(
- *input_surface->get(), frame.visible_rect(), encode_size,
- (num_frames_in_flight_ + 1) * 2);
- if (!(*input_surface))
+ if (!CreateSurfacesIfNeeded(*vaapi_wrapper_, available_encode_surfaces_,
+ encode_size,
+ {VaapiWrapper::SurfaceUsageHint::kVideoEncoder},
+ num_frames_in_flight_ + 1)) {
return false;
- // A reconstructed surface is fine to be created by VPP VaapiWrapper and
- // encoder VaapiWrapper. Use one created by VPP VaapiWrapper when VPP is
- // executed.
- *reconstructed_surface = GetAvailableVASurfaceAsRefCounted(
- &available_vpp_va_surfaces_[encode_size]);
- } else {
- // TODO(crbug.com/1186051): Create VA Surface here for the first time, not
- // in Initialize()
- *reconstructed_surface =
- GetAvailableVASurfaceAsRefCounted(&available_va_surfaces_);
+ }
+ if (available_encode_surfaces_[encode_size].empty()) {
+ DVLOGF(4) << "Not enough reconstructed surface available";
+ return false;
+ }
+ reconstructed_surfaces->emplace_back(GetAvailableVASurfaceAsRefCounted(
+ &available_encode_surfaces_[encode_size]));
+ DCHECK(!!reconstructed_surfaces->back());
}
- DCHECK(*input_surface);
- return !!(*reconstructed_surface);
+ DCHECK(!base::Contains(*input_surfaces, nullptr));
+ DCHECK(!base::Contains(*reconstructed_surfaces, nullptr));
+ return true;
}
bool VaapiVideoEncodeAccelerator::CreateSurfacesForShmemEncoding(
@@ -677,6 +625,7 @@ bool VaapiVideoEncodeAccelerator::CreateSurfacesForShmemEncoding(
DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_);
DCHECK(!native_input_mode_);
DCHECK(frame.IsMappable());
+ TRACE_EVENT0("media,gpu", "VAVEA::CreateSurfacesForShmem");
if (expected_input_coded_size_ != frame.coded_size()) {
// In non-zero copy mode, the coded size of the incoming frame should be
@@ -689,7 +638,7 @@ bool VaapiVideoEncodeAccelerator::CreateSurfacesForShmemEncoding(
return false;
}
- DCHECK_EQ(visible_rect_.origin(), gfx::Point(0, 0));
+ DCHECK(visible_rect_.origin().IsOrigin());
if (visible_rect_ != frame.visible_rect()) {
// In non-zero copy mode, the client is responsible for scaling and
// cropping.
@@ -700,55 +649,158 @@ bool VaapiVideoEncodeAccelerator::CreateSurfacesForShmemEncoding(
return false;
}
- constexpr size_t kNumSurfaces = 2; // input and reconstructed surface.
- if (available_va_surfaces_.size() < kNumSurfaces) {
+ const gfx::Size& encode_size = encoder_->GetCodedSize();
+ constexpr size_t kNumSurfaces = 2; // For input and reconstructed surface.
+ if (!CreateSurfacesIfNeeded(*vaapi_wrapper_, available_encode_surfaces_,
+ encode_size,
+ {VaapiWrapper::SurfaceUsageHint::kVideoEncoder},
+ (num_frames_in_flight_ + 1) * kNumSurfaces)) {
+ return false;
+ }
+
+ auto& surfaces = available_encode_surfaces_[encode_size];
+ if (surfaces.size() < kNumSurfaces) {
DVLOGF(4) << "Not enough surfaces available";
return false;
}
- *input_surface = GetAvailableVASurfaceAsRefCounted(&available_va_surfaces_);
- *reconstructed_surface =
- GetAvailableVASurfaceAsRefCounted(&available_va_surfaces_);
+ *input_surface = GetAvailableVASurfaceAsRefCounted(&surfaces);
+ *reconstructed_surface = GetAvailableVASurfaceAsRefCounted(&surfaces);
return true;
}
-std::unique_ptr<VaapiVideoEncoderDelegate::EncodeJob>
-VaapiVideoEncodeAccelerator::CreateEncodeJob(scoped_refptr<VideoFrame> frame,
- bool force_keyframe,
- const gfx::Size& encode_size) {
+bool VaapiVideoEncodeAccelerator::CreateSurfacesIfNeeded(
+ VaapiWrapper& vaapi_wrapper,
+ ScopedVASurfacesMap& scoped_surfaces_map,
+ const gfx::Size& encode_size,
+ const std::vector<VaapiWrapper::SurfaceUsageHint>& surface_usage_hints,
+ size_t num_surfaces) {
DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_);
- DCHECK(frame);
+ if (base::Contains(scoped_surfaces_map, encode_size))
+ return true;
+
+ // iHD driver doesn't align a resolution for encoding properly. Align it only
+ // with encoder driver.
+ // TODO(https://github.com/intel/media-driver/issues/1232): Remove this
+ // workaround of aligning |encode_size|.
+ gfx::Size surface_size = encode_size;
+ if (!base::Contains(surface_usage_hints,
+ VaapiWrapper::SurfaceUsageHint::kVideoProcessWrite)) {
+ surface_size = gfx::Size(base::bits::AlignUp(encode_size.width(), 16u),
+ base::bits::AlignUp(encode_size.height(), 16u));
+ }
- scoped_refptr<VASurface> input_surface;
- scoped_refptr<VASurface> reconstructed_surface;
- if (native_input_mode_) {
- if (!CreateSurfacesForGpuMemoryBufferEncoding(
- *frame, encode_size, &input_surface, &reconstructed_surface)) {
- return nullptr;
- }
- } else {
- if (!CreateSurfacesForShmemEncoding(*frame, &input_surface,
- &reconstructed_surface)) {
+ auto scoped_va_surfaces = vaapi_wrapper.CreateScopedVASurfaces(
+ kVaSurfaceFormat, surface_size, surface_usage_hints, num_surfaces,
+ /*visible_size=*/absl::nullopt,
+ /*va_fourcc=*/absl::nullopt);
+ if (scoped_va_surfaces.empty()) {
+ NOTIFY_ERROR(kPlatformFailureError, "Failed creating surfaces");
+ return false;
+ }
+
+ scoped_surfaces_map[encode_size] = std::move(scoped_va_surfaces);
+ return true;
+}
+
+scoped_refptr<VaapiWrapper>
+VaapiVideoEncodeAccelerator::CreateVppVaapiWrapper() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_);
+ DCHECK(!vpp_vaapi_wrapper_);
+ auto vpp_vaapi_wrapper = VaapiWrapper::Create(
+ VaapiWrapper::kVideoProcess, VAProfileNone,
+ EncryptionScheme::kUnencrypted,
+ base::BindRepeating(&ReportVaapiErrorToUMA,
+ "Media.VaapiVideoEncodeAccelerator.Vpp.VAAPIError"));
+ if (!vpp_vaapi_wrapper) {
+ NOTIFY_ERROR(kPlatformFailureError, "Failed to initialize VppVaapiWrapper");
+ return nullptr;
+ }
+ // VA context for VPP is not associated with a specific resolution.
+ if (!vpp_vaapi_wrapper->CreateContext(gfx::Size())) {
+ NOTIFY_ERROR(kPlatformFailureError, "Failed creating Context for VPP");
+ return nullptr;
+ }
+
+ return vpp_vaapi_wrapper;
+}
+
+scoped_refptr<VASurface> VaapiVideoEncodeAccelerator::ExecuteBlitSurface(
+ const VASurface& source_surface,
+ const gfx::Rect source_visible_rect,
+ const gfx::Size& encode_size) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_);
+ if (!vpp_vaapi_wrapper_) {
+ vpp_vaapi_wrapper_ = CreateVppVaapiWrapper();
+ if (!vpp_vaapi_wrapper_) {
+ NOTIFY_ERROR(kPlatformFailureError, "Failed to create Vpp");
return nullptr;
}
}
- DCHECK(input_surface && reconstructed_surface);
- auto coded_buffer = vaapi_wrapper_->CreateVABuffer(VAEncCodedBufferType,
- output_buffer_byte_size_);
- if (!coded_buffer) {
- NOTIFY_ERROR(kPlatformFailureError, "Failed creating coded buffer");
+ if (!CreateSurfacesIfNeeded(
+ *vpp_vaapi_wrapper_, available_vpp_dest_surfaces_, encode_size,
+ {VaapiWrapper::SurfaceUsageHint::kVideoProcessWrite,
+ VaapiWrapper::SurfaceUsageHint::kVideoEncoder},
+ num_frames_in_flight_ + 1)) {
+ VLOGF(1) << "Failed to create or reuse " << (num_frames_in_flight_ + 1)
+ << " VASurfaces of size " << encode_size.ToString();
return nullptr;
}
+ if (available_vpp_dest_surfaces_[encode_size].empty()) {
+ DVLOGF(4) << "Not enough vpp destination surface available";
+ return nullptr;
+ }
+
+ auto blit_surface = GetAvailableVASurfaceAsRefCounted(
+ &available_vpp_dest_surfaces_[encode_size]);
+ DCHECK(blit_surface);
+ DCHECK(vpp_vaapi_wrapper_);
+ if (!vpp_vaapi_wrapper_->BlitSurface(source_surface, *blit_surface,
+ source_visible_rect,
+ gfx::Rect(encode_size))) {
+ NOTIFY_ERROR(kPlatformFailureError,
+ "Failed BlitSurface on frame size: "
+ << source_surface.size().ToString()
+ << " (visible rect: " << source_visible_rect.ToString()
+ << ") -> encode size: " << encode_size.ToString());
+ return nullptr;
+ }
+
+ return blit_surface;
+}
+
+std::unique_ptr<VaapiVideoEncoderDelegate::EncodeJob>
+VaapiVideoEncodeAccelerator::CreateEncodeJob(
+ scoped_refptr<VideoFrame> frame,
+ bool force_keyframe,
+ scoped_refptr<VASurface> input_surface,
+ scoped_refptr<VASurface> reconstructed_surface) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_);
+ DCHECK(frame);
+ DCHECK(input_surface && reconstructed_surface);
+
+ std::unique_ptr<ScopedVABuffer> coded_buffer;
+ {
+ TRACE_EVENT1("media,gpu", "VAVEA::CreateVABuffer", "buffer size",
+ output_buffer_byte_size_);
+ coded_buffer = vaapi_wrapper_->CreateVABuffer(VAEncCodedBufferType,
+ output_buffer_byte_size_);
+ if (!coded_buffer) {
+ NOTIFY_ERROR(kPlatformFailureError, "Failed creating coded buffer");
+ return nullptr;
+ }
+ }
+
scoped_refptr<CodecPicture> picture;
switch (output_codec_) {
- case kCodecH264:
+ case VideoCodec::kH264:
picture = new VaapiH264Picture(std::move(reconstructed_surface));
break;
- case kCodecVP8:
+ case VideoCodec::kVP8:
picture = new VaapiVP8Picture(std::move(reconstructed_surface));
break;
- case kCodecVP9:
+ case VideoCodec::kVP9:
picture = new VaapiVP9Picture(std::move(reconstructed_surface));
break;
default:
@@ -792,16 +844,37 @@ void VaapiVideoEncodeAccelerator::EncodePendingInputs() {
continue;
}
+ const size_t num_spatial_layers = spatial_layer_resolutions.size();
+ std::vector<scoped_refptr<VASurface>> input_surfaces;
+ std::vector<scoped_refptr<VASurface>> reconstructed_surfaces;
+ if (native_input_mode_) {
+ if (!CreateSurfacesForGpuMemoryBufferEncoding(
+ *input_frame->frame, spatial_layer_resolutions, &input_surfaces,
+ &reconstructed_surfaces)) {
+ return;
+ }
+ } else {
+ DCHECK_EQ(num_spatial_layers, 1u);
+ input_surfaces.resize(1u);
+ reconstructed_surfaces.resize(1u);
+ if (!CreateSurfacesForShmemEncoding(*input_frame->frame,
+ &input_surfaces[0],
+ &reconstructed_surfaces[0])) {
+ return;
+ }
+ }
+
// Encoding different spatial layers for |input_frame|.
std::vector<std::unique_ptr<EncodeJob>> jobs;
- for (size_t spatial_idx = 0; spatial_idx < spatial_layer_resolutions.size();
+ for (size_t spatial_idx = 0; spatial_idx < num_spatial_layers;
++spatial_idx) {
std::unique_ptr<EncodeJob> job;
TRACE_EVENT0("media,gpu", "VAVEA::FromCreateEncodeJobToReturn");
const bool force_key =
(spatial_idx == 0 ? input_frame->force_keyframe : false);
job = CreateEncodeJob(input_frame->frame, force_key,
- spatial_layer_resolutions[spatial_idx]);
+ std::move(input_surfaces[spatial_idx]),
+ std::move(reconstructed_surfaces[spatial_idx]));
if (!job)
return;
@@ -815,8 +888,10 @@ void VaapiVideoEncodeAccelerator::EncodePendingInputs() {
}
TRACE_EVENT0("media,gpu", "VAVEA::FromExecuteToReturn");
- TRACE_EVENT0("media,gpu", "VAVEA::Execute");
- job->Execute();
+ {
+ TRACE_EVENT0("media,gpu", "VAVEA::Execute");
+ job->Execute();
+ }
submitted_encode_jobs_.push(std::move(job));
TryToReturnBitstreamBuffer();
@@ -940,7 +1015,8 @@ void VaapiVideoEncodeAccelerator::Destroy() {
child_weak_this_factory_.InvalidateWeakPtrs();
// We're destroying; cancel all callbacks.
- client_ptr_factory_.reset();
+ if (client_ptr_factory_)
+ client_ptr_factory_->InvalidateWeakPtrs();
encoder_task_runner_->PostTask(
FROM_HERE, base::BindOnce(&VaapiVideoEncodeAccelerator::DestroyTask,
@@ -964,13 +1040,13 @@ void VaapiVideoEncodeAccelerator::DestroyTask() {
if (vaapi_wrapper_)
vaapi_wrapper_->DestroyContext();
- available_va_surfaces_.clear();
+ available_encode_surfaces_.clear();
available_va_buffer_ids_.clear();
if (vpp_vaapi_wrapper_)
vpp_vaapi_wrapper_->DestroyContext();
- available_vpp_va_surfaces_.clear();
+ available_vpp_dest_surfaces_.clear();
while (!available_bitstream_buffers_.empty())
available_bitstream_buffers_.pop();
@@ -1013,7 +1089,7 @@ void VaapiVideoEncodeAccelerator::NotifyError(Error error) {
if (client_) {
client_->NotifyError(error);
- client_ptr_factory_.reset();
+ client_ptr_factory_->InvalidateWeakPtrs();
}
}
diff --git a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.h b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.h
index 53196eb4b0d..61f17749d2d 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.h
+++ b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.h
@@ -12,6 +12,7 @@
#include <memory>
#include "base/containers/queue.h"
+#include "base/containers/small_map.h"
#include "base/macros.h"
#include "base/memory/ref_counted_memory.h"
#include "base/sequence_checker.h"
@@ -20,10 +21,10 @@
#include "media/gpu/media_gpu_export.h"
#include "media/gpu/vaapi/vaapi_utils.h"
#include "media/gpu/vaapi/vaapi_video_encoder_delegate.h"
+#include "media/gpu/vaapi/vaapi_wrapper.h"
#include "media/video/video_encode_accelerator.h"
namespace media {
-class VaapiWrapper;
// A VideoEncodeAccelerator implementation that uses VA-API
// (https://01.org/vaapi) for HW-accelerated video encode.
@@ -31,6 +32,11 @@ class MEDIA_GPU_EXPORT VaapiVideoEncodeAccelerator
: public VideoEncodeAccelerator {
public:
VaapiVideoEncodeAccelerator();
+
+ VaapiVideoEncodeAccelerator(const VaapiVideoEncodeAccelerator&) = delete;
+ VaapiVideoEncodeAccelerator& operator=(const VaapiVideoEncodeAccelerator&) =
+ delete;
+
~VaapiVideoEncodeAccelerator() override;
// VideoEncodeAccelerator implementation.
@@ -67,22 +73,27 @@ class MEDIA_GPU_EXPORT VaapiVideoEncodeAccelerator
}
};
+ // Maximum size is four to support the worst case of a given input of a
+ // different resolution than the maximum number of spatial layers (3).
+ using ScopedVASurfacesMap =
+ base::small_map<std::map<gfx::Size,
+ std::vector<std::unique_ptr<ScopedVASurface>>,
+ SizeComparator>,
+ 4>;
+
// Holds input frames coming from the client ready to be encoded.
struct InputFrameRef;
// Holds output buffers coming from the client ready to be filled.
struct BitstreamBufferRef;
- // one surface for input data.
- // one surface for reconstructed picture, which is later used for reference.
- static constexpr size_t kNumSurfacesPerInputVideoFrame = 1;
- static constexpr size_t kNumSurfacesForOutputPicture = 1;
-
//
// Tasks for each of the VEA interface calls to be executed on
// |encoder_task_runner_|.
//
void InitializeTask(const Config& config);
+ bool AttemptedInitialization() const { return !!client_ptr_factory_; }
+
// Enqueues |frame| onto the queue of pending inputs and attempts to continue
// encoding.
void EncodeTask(scoped_refptr<VideoFrame> frame, bool force_keyframe);
@@ -99,23 +110,17 @@ class MEDIA_GPU_EXPORT VaapiVideoEncodeAccelerator
void DestroyTask();
void FlushTask(FlushCallback flush_callback);
- // Blits |input_surface| to an internally-allocated |input_visible_rect|
- // surface, returning it. If |vpp_vaapi_wrapper_| is empty, this will create
- // it and corresponding surfaces. Returns nullptr on failure.
- scoped_refptr<VASurface> BlitSurfaceWithCreateVppIfNeeded(
- const VASurface& input_surface,
- const gfx::Rect& input_visible_rect,
- const gfx::Size& encode_size,
- size_t num_va_surfaces);
-
// Create input and reconstructed surfaces used in encoding whose sizes are
- // |encode_size| from GpuMemoryBuffer-based VideoFrame |frame|. This must be
- // called only in native input mode.
+ // |spatial_layer_resolutions| from GpuMemoryBuffer-based VideoFrame |frame|.
+ // The created surfaces for input to an encoder driver are filled into
+ // |input_surfaces| and, ones used as reconstructed surfaces by the driver are
+ // filled to |reconstructed_surfaces|. This must be called only in native
+ // input mode.
bool CreateSurfacesForGpuMemoryBufferEncoding(
const VideoFrame& frame,
- const gfx::Size& encode_size,
- scoped_refptr<VASurface>* input_surface,
- scoped_refptr<VASurface>* reconstructed_surface);
+ const std::vector<gfx::Size>& spatial_layer_resolutions,
+ std::vector<scoped_refptr<VASurface>>* input_surfaces,
+ std::vector<scoped_refptr<VASurface>>* reconstructed_surfaces);
// Create input and reconstructed surfaces used in encoding from SharedMemory
// VideoFrame |frame|. This must be called only in non native input mode.
@@ -124,12 +129,35 @@ class MEDIA_GPU_EXPORT VaapiVideoEncodeAccelerator
scoped_refptr<VASurface>* input_surface,
scoped_refptr<VASurface>* reconstructed_surface);
+ // Creates |num_surfaces| ScopedVASurfaces using |vaapi_wrapper| whose sizes
+ // are |encode_size| with |surface_usage_hints|. Returns false if the surfaces
+ // fail to be created successfully.
+ // The created surfaces are filled into |scoped_surfaces_map[encode_size]|.
+ bool CreateSurfacesIfNeeded(
+ VaapiWrapper& vaapi_wrapper,
+ ScopedVASurfacesMap& scoped_surfaces_map,
+ const gfx::Size& encode_size,
+ const std::vector<VaapiWrapper::SurfaceUsageHint>& surface_usage_hints,
+ size_t num_surfaces);
+
+ // Creates |vpp_vaapi_wrapper_| if it hasn't been created.
+ scoped_refptr<VaapiWrapper> CreateVppVaapiWrapper();
+ // Executes BlitSurface() using |vpp_vaapi_wrapper_| with |source_surface|,
+ // |source_visible_rect|. Returns the destination VASurface in BlitSurface()
+ // whose size is |encode_size| on success, otherwise nullptr.
+ scoped_refptr<VASurface> ExecuteBlitSurface(
+ const VASurface& source_surface,
+ const gfx::Rect source_visible_rect,
+ const gfx::Size& encode_size);
+
// Checks if sufficient resources for a new encode job with |frame| as input
// are available, and if so, claims them by associating them with
// a EncodeJob, and returns the newly-created job, nullptr otherwise.
- std::unique_ptr<EncodeJob> CreateEncodeJob(scoped_refptr<VideoFrame> frame,
- bool force_keyframe,
- const gfx::Size& encode_size);
+ std::unique_ptr<EncodeJob> CreateEncodeJob(
+ scoped_refptr<VideoFrame> frame,
+ bool force_keyframe,
+ scoped_refptr<VASurface> input_surface,
+ scoped_refptr<VASurface> reconstructed_surface);
// Continues encoding frames as long as input_queue_ is not empty, and we are
// able to create new EncodeJobs.
@@ -183,14 +211,15 @@ class MEDIA_GPU_EXPORT VaapiVideoEncodeAccelerator
// VaapiWrapper is the owner of all HW resources (surfaces and buffers)
// and will free them on destruction.
- scoped_refptr<VaapiWrapper> vaapi_wrapper_;
+ scoped_refptr<VaapiWrapper> vaapi_wrapper_
+ GUARDED_BY_CONTEXT(encoder_sequence_checker_);
// The expected coded size of incoming video frames when |native_input_mode_|
// is false.
gfx::Size expected_input_coded_size_;
// The codec of the stream to be produced. Set during initialization.
- VideoCodec output_codec_ = kUnknownVideoCodec;
+ VideoCodec output_codec_ = VideoCodec::kUnknown;
// The visible rect to be encoded.
gfx::Rect visible_rect_;
@@ -203,14 +232,6 @@ class MEDIA_GPU_EXPORT VaapiVideoEncodeAccelerator
// input.
bool native_input_mode_ = false;
- // The number of va surfaces required for one video frame on Encode().
- // In |native_input_mode_|, one surface for input data is created from DmaBufs
- // of incoming VideoFrame. One surface for reconstructed picture is always
- // needed, which is later used for reference.
- // Therefore, |va_surfaces_per_video_frame| is one in |native_input_mode_|,
- // and two otherwise.
- size_t va_surfaces_per_video_frame_;
-
// The number of frames that needs to be held on encoding.
size_t num_frames_in_flight_;
@@ -224,14 +245,12 @@ class MEDIA_GPU_EXPORT VaapiVideoEncodeAccelerator
// Should only be used on |encoder_task_runner_|.
std::unique_ptr<VaapiVideoEncoderDelegate> encoder_;
- // VA surfaces available for encoding.
- std::vector<std::unique_ptr<ScopedVASurface>> available_va_surfaces_;
- // VA surfaces available for scaling.
- // TODO(crbug.com/1186051): Use base::small_map.
- std::map<gfx::Size,
- std::vector<std::unique_ptr<ScopedVASurface>>,
- SizeComparator>
- available_vpp_va_surfaces_;
+ // Map of available input or reconstructed surfaces for encoding indexed by a
+ // layer resolution.
+ ScopedVASurfacesMap available_encode_surfaces_;
+ // Map of available destination surfaces for scaling and cropping, and input
+ // surfaces for encoding indexed by a layer resolution..
+ ScopedVASurfacesMap available_vpp_dest_surfaces_;
// VA buffers for coded frames.
std::vector<VABufferID> available_va_buffer_ids_;
@@ -262,7 +281,8 @@ class MEDIA_GPU_EXPORT VaapiVideoEncodeAccelerator
// VaapiWrapper for VPP (Video Pre Processing). This is used for scale down
// for the picture send to vaapi encoder.
- scoped_refptr<VaapiWrapper> vpp_vaapi_wrapper_;
+ scoped_refptr<VaapiWrapper> vpp_vaapi_wrapper_
+ GUARDED_BY_CONTEXT(encoder_sequence_checker_);
// The completion callback of the Flush() function.
FlushCallback flush_callback_;
@@ -278,8 +298,6 @@ class MEDIA_GPU_EXPORT VaapiVideoEncodeAccelerator
this};
base::WeakPtrFactory<VaapiVideoEncodeAccelerator> encoder_weak_this_factory_{
this};
-
- DISALLOW_COPY_AND_ASSIGN(VaapiVideoEncodeAccelerator);
};
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator_unittest.cc b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator_unittest.cc
index 4d6ba488bf6..6bbd75bb2c7 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator_unittest.cc
+++ b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator_unittest.cc
@@ -8,6 +8,7 @@
#include <numeric>
#include <vector>
+#include "base/bits.h"
#include "base/run_loop.h"
#include "base/test/gmock_callback_support.h"
#include "base/test/task_environment.h"
@@ -149,13 +150,7 @@ class MockVaapiWrapper : public VaapiWrapper {
explicit MockVaapiWrapper(CodecMode mode) : VaapiWrapper(mode) {}
MOCK_METHOD2(GetVAEncMaxNumOfRefFrames, bool(VideoCodecProfile, size_t*));
- MOCK_METHOD5(CreateContextAndScopedVASurfaces,
- std::vector<std::unique_ptr<ScopedVASurface>>(
- unsigned int,
- const gfx::Size&,
- const std::vector<SurfaceUsageHint>&,
- size_t,
- const absl::optional<gfx::Size>&));
+ MOCK_METHOD1(CreateContext, bool(const gfx::Size&));
MOCK_METHOD6(CreateScopedVASurfaces,
std::vector<std::unique_ptr<ScopedVASurface>>(
unsigned int,
@@ -164,12 +159,6 @@ class MockVaapiWrapper : public VaapiWrapper {
size_t,
const absl::optional<gfx::Size>&,
const absl::optional<uint32_t>&));
- MOCK_METHOD5(BlitSurface,
- bool(const VASurface&,
- const VASurface&,
- absl::optional<gfx::Rect>,
- absl::optional<gfx::Rect>,
- VideoRotation));
MOCK_METHOD2(CreateVABuffer,
std::unique_ptr<ScopedVABuffer>(VABufferType, size_t));
MOCK_METHOD2(CreateVASurfaceForPixmap,
@@ -184,6 +173,26 @@ class MockVaapiWrapper : public VaapiWrapper {
MOCK_METHOD0(DestroyContext, void());
MOCK_METHOD1(DestroySurface, void(VASurfaceID));
+ MOCK_METHOD5(DoBlitSurface,
+ bool(const VASurface&,
+ const VASurface&,
+ absl::optional<gfx::Rect>,
+ absl::optional<gfx::Rect>,
+ VideoRotation));
+ bool BlitSurface(const VASurface& va_surface_src,
+ const VASurface& va_surface_dest,
+ absl::optional<gfx::Rect> src_rect = absl::nullopt,
+ absl::optional<gfx::Rect> dest_rect = absl::nullopt,
+ VideoRotation rotation = VIDEO_ROTATION_0
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ ,
+ VAProtectedSessionID va_protected_session_id = VA_INVALID_ID
+#endif
+ ) override {
+ return DoBlitSurface(va_surface_src, va_surface_dest, src_rect, dest_rect,
+ rotation);
+ }
+
private:
~MockVaapiWrapper() override = default;
};
@@ -227,7 +236,7 @@ class VaapiVideoEncodeAcceleratorTest
ResetEncoder();
}
- void ResetEncoder() {
+ void ResetEncoder() NO_THREAD_SAFETY_ANALYSIS {
encoder_.reset(new VaapiVideoEncodeAccelerator);
auto* vaapi_encoder =
reinterpret_cast<VaapiVideoEncodeAccelerator*>(encoder_.get());
@@ -280,7 +289,8 @@ class VaapiVideoEncodeAcceleratorTest
return encoder_->Initialize(config, &client_);
}
- void InitializeSequenceForVP9(const VideoEncodeAccelerator::Config& config) {
+ void InitializeSequenceForVP9(const VideoEncodeAccelerator::Config& config)
+ NO_THREAD_SAFETY_ANALYSIS {
base::RunLoop run_loop;
::testing::InSequence s;
constexpr auto kBitrateControl = VaapiVideoEncoderDelegate::BitrateControl::
@@ -288,30 +298,52 @@ class VaapiVideoEncodeAcceleratorTest
const size_t num_spatial_layers = config.spatial_layers.size();
// Scaling is needed only for non highest spatial layer, so here the vpp
// number is |num_spatial_layers - 1|.
- vpp_svc_va_surface_ids_.resize(num_spatial_layers - 1);
- vpp_svc_mock_vaapi_wrapper_ =
+ va_encode_surface_ids_.resize(num_spatial_layers);
+ va_vpp_dest_surface_ids_.resize(num_spatial_layers - 1);
+ mock_vpp_vaapi_wrapper_ =
base::MakeRefCounted<MockVaapiWrapper>(VaapiWrapper::kVideoProcess);
auto* vaapi_encoder =
reinterpret_cast<VaapiVideoEncodeAccelerator*>(encoder_.get());
- vaapi_encoder->vpp_vaapi_wrapper_ = vpp_svc_mock_vaapi_wrapper_;
+ vaapi_encoder->vpp_vaapi_wrapper_ = mock_vpp_vaapi_wrapper_;
EXPECT_CALL(*mock_encoder_,
Initialize(_, MatchesVaapiVideoEncoderDelegateConfig(
kMaxNumOfRefFrames, kBitrateControl)))
.WillOnce(Return(true));
+ EXPECT_CALL(*mock_vaapi_wrapper_, CreateContext(kDefaultEncodeSize))
+ .WillOnce(Return(true));
+ EXPECT_CALL(client_, RequireBitstreamBuffers(_, kDefaultEncodeSize, _))
+ .WillOnce(WithArgs<2>([this](size_t output_buffer_size) {
+ this->output_buffer_size_ = output_buffer_size;
+ }));
+ EXPECT_CALL(client_, NotifyEncoderInfoChange(MatchesEncoderInfo(
+ num_spatial_layers,
+ config.spatial_layers[0].num_of_temporal_layers)))
+ .WillOnce(RunClosure(run_loop.QuitClosure()));
+ ASSERT_TRUE(InitializeVideoEncodeAccelerator(config));
+ run_loop.Run();
+ }
+
+ void EncodeSequenceForVP9SingleSpatialLayer(
+ bool use_temporal_layer_encoding) {
+ ::testing::InSequence s;
+
+ constexpr VASurfaceID kInputSurfaceId = 1234;
EXPECT_CALL(*mock_vaapi_wrapper_,
- CreateContextAndScopedVASurfaces(
+ CreateScopedVASurfaces(
VA_RT_FORMAT_YUV420, kDefaultEncodeSize,
std::vector<VaapiWrapper::SurfaceUsageHint>{
VaapiWrapper::SurfaceUsageHint::kVideoEncoder},
- _, _))
+ _, absl::optional<gfx::Size>(), absl::optional<uint32_t>()))
.WillOnce(
- WithArgs<0, 1, 3>([&surface_ids = this->va_surface_ids_,
- &vaapi_wrapper = this->mock_vaapi_wrapper_](
+ WithArgs<0, 1, 3>([&surface_ids = this->va_encode_surface_ids_[0],
+ &vaapi_wrapper = this->mock_vaapi_wrapper_,
+ va_surface_id = kInputSurfaceId](
unsigned int format, const gfx::Size& size,
size_t num_surfaces) {
surface_ids.resize(num_surfaces);
std::iota(surface_ids.begin(), surface_ids.end(), 1);
+ surface_ids.back() = va_surface_id;
std::vector<std::unique_ptr<ScopedVASurface>> va_surfaces;
for (const VASurfaceID id : surface_ids) {
va_surfaces.push_back(std::make_unique<ScopedVASurface>(
@@ -319,21 +351,6 @@ class VaapiVideoEncodeAcceleratorTest
}
return va_surfaces;
}));
- EXPECT_CALL(client_, RequireBitstreamBuffers(_, kDefaultEncodeSize, _))
- .WillOnce(WithArgs<2>([this](size_t output_buffer_size) {
- this->output_buffer_size_ = output_buffer_size;
- }));
- EXPECT_CALL(client_, NotifyEncoderInfoChange(MatchesEncoderInfo(
- num_spatial_layers,
- config.spatial_layers[0].num_of_temporal_layers)))
- .WillOnce(RunClosure(run_loop.QuitClosure()));
- ASSERT_TRUE(InitializeVideoEncodeAccelerator(config));
- run_loop.Run();
- }
-
- void EncodeSequenceForVP9SingleSpatialLayer(
- bool use_temporal_layer_encoding) {
- ::testing::InSequence s;
constexpr VABufferID kCodedBufferId = 123;
EXPECT_CALL(*mock_vaapi_wrapper_,
@@ -342,29 +359,27 @@ class VaapiVideoEncodeAcceleratorTest
return ScopedVABuffer::CreateForTesting(
kCodedBufferId, VAEncCodedBufferType, buffer_size);
}));
- ASSERT_FALSE(va_surface_ids_.empty());
- const VASurfaceID kInputSurfaceId = va_surface_ids_.back();
+
EXPECT_CALL(*mock_encoder_, PrepareEncodeJob(_))
- .WillOnce(WithArgs<0>(
- [encoder = encoder_.get(), kCodedBufferId,
- use_temporal_layer_encoding,
- kInputSurfaceId](VaapiVideoEncoderDelegate::EncodeJob* job) {
- if (use_temporal_layer_encoding) {
- // Set Vp9Metadata on temporal layer encoding.
- CodecPicture* picture = job->picture().get();
- reinterpret_cast<VP9Picture*>(picture)->metadata_for_encoding =
- Vp9Metadata();
- }
- auto* vaapi_encoder =
- reinterpret_cast<VaapiVideoEncodeAccelerator*>(encoder);
- job->AddPostExecuteCallback(base::BindOnce(
- &VP9VaapiVideoEncoderDelegate::NotifyEncodedChunkSize,
- base::Unretained(
- reinterpret_cast<VP9VaapiVideoEncoderDelegate*>(
- vaapi_encoder->encoder_.get())),
- kCodedBufferId, kInputSurfaceId));
- return true;
- }));
+ .WillOnce(WithArgs<0>([encoder = encoder_.get(), kCodedBufferId,
+ use_temporal_layer_encoding,
+ va_surface_id = kInputSurfaceId](
+ VaapiVideoEncoderDelegate::EncodeJob* job) {
+ if (use_temporal_layer_encoding) {
+ // Set Vp9Metadata on temporal layer encoding.
+ CodecPicture* picture = job->picture().get();
+ reinterpret_cast<VP9Picture*>(picture)->metadata_for_encoding =
+ Vp9Metadata();
+ }
+ auto* vaapi_encoder =
+ reinterpret_cast<VaapiVideoEncodeAccelerator*>(encoder);
+ job->AddPostExecuteCallback(base::BindOnce(
+ &VP9VaapiVideoEncoderDelegate::NotifyEncodedChunkSize,
+ base::Unretained(reinterpret_cast<VP9VaapiVideoEncoderDelegate*>(
+ vaapi_encoder->encoder_.get())),
+ kCodedBufferId, va_surface_id));
+ return true;
+ }));
EXPECT_CALL(
*mock_vaapi_wrapper_,
UploadVideoFrameToSurface(_, kInputSurfaceId, kDefaultEncodeSize))
@@ -456,36 +471,38 @@ class VaapiVideoEncodeAcceleratorTest
::testing::InSequence s;
+ // Create VASurface from GpuMemory-based VideoFrame.
+ const VASurfaceID kSourceSurfaceId = 123456;
+ EXPECT_CALL(*mock_vaapi_wrapper_, CreateVASurfaceForPixmap(_, _))
+ .WillOnce(
+ Return(new VASurface(kSourceSurfaceId, kDefaultEncodeSize,
+ VA_RT_FORMAT_YUV420, base::DoNothing())));
+
+ constexpr VASurfaceID kVppDestSurfaceIds[] = {456, 457};
+ constexpr VASurfaceID kEncodeSurfaceIds[] = {458, 459, 460};
std::vector<gfx::Size> svc_resolutions =
GetDefaultSVCResolutions(num_spatial_layers);
- constexpr VABufferID kCodedBufferIds[] = {123, 124, 125};
+ // Create Surfaces.
for (size_t i = 0; i < num_spatial_layers; ++i) {
- const VASurfaceID kInputSurfaceId = va_surface_ids_.back();
- const gfx::Size layer_size = svc_resolutions[i];
- EXPECT_CALL(*mock_vaapi_wrapper_, CreateVASurfaceForPixmap(_, _))
- .WillOnce(WithArgs<0>([kInputSurfaceId, layer_size]() {
- return new VASurface(kInputSurfaceId, layer_size,
- VA_RT_FORMAT_YUV420, base::DoNothing());
- }));
-
- // Scaling and vpp only needed for non highest spatial layer.
if (i < num_spatial_layers - 1) {
- if (vpp_svc_va_surface_ids_[i].empty()) {
+ if (va_vpp_dest_surface_ids_[i].empty()) {
EXPECT_CALL(
- *vpp_svc_mock_vaapi_wrapper_,
+ *mock_vpp_vaapi_wrapper_,
CreateScopedVASurfaces(
- VA_RT_FORMAT_YUV420, layer_size,
+ VA_RT_FORMAT_YUV420, svc_resolutions[i],
std::vector<VaapiWrapper::SurfaceUsageHint>{
VaapiWrapper::SurfaceUsageHint::kVideoProcessWrite,
VaapiWrapper::SurfaceUsageHint::kVideoEncoder},
_, absl::optional<gfx::Size>(), absl::optional<uint32_t>()))
.WillOnce(WithArgs<0, 1, 3>(
- [&surface_ids = this->vpp_svc_va_surface_ids_[i],
- &vaapi_wrapper = this->vpp_svc_mock_vaapi_wrapper_](
+ [&surface_ids = this->va_vpp_dest_surface_ids_[i],
+ &vaapi_wrapper = this->mock_vpp_vaapi_wrapper_,
+ vpp_dest_surface_id = kVppDestSurfaceIds[i]](
unsigned int format, const gfx::Size& size,
size_t num_surfaces) {
surface_ids.resize(num_surfaces);
std::iota(surface_ids.begin(), surface_ids.end(), 1);
+ surface_ids.back() = vpp_dest_surface_id;
std::vector<std::unique_ptr<ScopedVASurface>> va_surfaces;
for (const VASurfaceID id : surface_ids) {
va_surfaces.push_back(std::make_unique<ScopedVASurface>(
@@ -494,15 +511,54 @@ class VaapiVideoEncodeAcceleratorTest
return va_surfaces;
}));
}
-
absl::optional<gfx::Rect> default_rect = gfx::Rect(kDefaultEncodeSize);
- absl::optional<gfx::Rect> layer_rect = gfx::Rect(layer_size);
- EXPECT_CALL(*vpp_svc_mock_vaapi_wrapper_,
- BlitSurface(_, _, default_rect, layer_rect,
- VideoRotation::VIDEO_ROTATION_0))
+ absl::optional<gfx::Rect> layer_rect = gfx::Rect(svc_resolutions[i]);
+ EXPECT_CALL(*mock_vpp_vaapi_wrapper_,
+ DoBlitSurface(_, _, default_rect, layer_rect,
+ VideoRotation::VIDEO_ROTATION_0))
.WillOnce(Return(true));
}
+ // For reconstructed surface.
+ if (va_encode_surface_ids_[i].empty()) {
+ // TODO(https://github.com/intel/media-driver/issues/1232): Remove this
+ // workaround of aligning |encode_size|.
+ gfx::Size aligned_size(
+ base::bits::AlignUp(svc_resolutions[i].width(), 16),
+ base::bits::AlignUp(svc_resolutions[i].height(), 16));
+
+ EXPECT_CALL(
+ *mock_vaapi_wrapper_,
+ CreateScopedVASurfaces(
+ VA_RT_FORMAT_YUV420, aligned_size,
+ std::vector<VaapiWrapper::SurfaceUsageHint>{
+ VaapiWrapper::SurfaceUsageHint::kVideoEncoder},
+ _, absl::optional<gfx::Size>(), absl::optional<uint32_t>()))
+ .WillOnce(WithArgs<0, 1, 3>(
+ [&surface_ids = this->va_encode_surface_ids_[i],
+ &vaapi_wrapper = this->mock_vaapi_wrapper_,
+ va_encode_surface_id = kEncodeSurfaceIds[i]](
+ unsigned int format, const gfx::Size& size,
+ size_t num_surfaces) {
+ surface_ids.resize(num_surfaces);
+ std::iota(surface_ids.begin(), surface_ids.end(), 1);
+ surface_ids.back() = va_encode_surface_id;
+ std::vector<std::unique_ptr<ScopedVASurface>> va_surfaces;
+ for (const VASurfaceID id : surface_ids) {
+ va_surfaces.push_back(std::make_unique<ScopedVASurface>(
+ vaapi_wrapper, id, size, format));
+ }
+ return va_surfaces;
+ }));
+ }
+ }
+
+ for (size_t i = 0; i < num_spatial_layers; ++i) {
+ }
+
+ // Create CodedBuffers in creating EncodeJobs.
+ constexpr VABufferID kCodedBufferIds[] = {123, 124, 125};
+ for (size_t i = 0; i < num_spatial_layers; ++i) {
const VABufferID kCodedBufferId = kCodedBufferIds[i];
EXPECT_CALL(*mock_vaapi_wrapper_,
CreateVABuffer(VAEncCodedBufferType, output_buffer_size_))
@@ -514,13 +570,13 @@ class VaapiVideoEncodeAcceleratorTest
for (size_t i = 0; i < num_spatial_layers; ++i) {
const VABufferID kCodedBufferId = kCodedBufferIds[i];
- std::vector<VASurfaceID>* surfaces = i < num_spatial_layers - 1
- ? &vpp_svc_va_surface_ids_[i]
- : &va_surface_ids_;
+ const VASurfaceID input_surface_id =
+ i < num_spatial_layers - 1 ? kVppDestSurfaceIds[i] : kSourceSurfaceId;
+
EXPECT_CALL(*mock_encoder_, PrepareEncodeJob(_))
- .WillOnce(
- WithArgs<0>([encoder = encoder_.get(), kCodedBufferId, surfaces](
- VaapiVideoEncoderDelegate::EncodeJob* job) {
+ .WillOnce(WithArgs<0>(
+ [encoder = encoder_.get(), kCodedBufferId,
+ input_surface_id](VaapiVideoEncoderDelegate::EncodeJob* job) {
// Set Vp9Metadata on spatial layer encoding.
CodecPicture* picture = job->picture().get();
reinterpret_cast<VP9Picture*>(picture)->metadata_for_encoding =
@@ -532,7 +588,7 @@ class VaapiVideoEncodeAcceleratorTest
base::Unretained(
reinterpret_cast<VP9VaapiVideoEncoderDelegate*>(
vaapi_encoder->encoder_.get())),
- kCodedBufferId, surfaces->back()));
+ kCodedBufferId, input_surface_id));
return true;
}));
EXPECT_CALL(*mock_vaapi_wrapper_, ExecuteAndDestroyPendingBuffers(_))
@@ -570,25 +626,22 @@ class VaapiVideoEncodeAcceleratorTest
gpu::MailboxHolder mailbox_holders[media::VideoFrame::kMaxPlanes];
auto frame = VideoFrame::WrapExternalGpuMemoryBuffer(
gfx::Rect(kDefaultEncodeSize), kDefaultEncodeSize, std::move(gmb),
- mailbox_holders,
- base::DoNothing::Once<const gpu::SyncToken&,
- std::unique_ptr<gfx::GpuMemoryBuffer>>(),
- base::TimeDelta());
+ mailbox_holders, base::DoNothing(), base::TimeDelta());
ASSERT_TRUE(frame);
encoder_->Encode(std::move(frame), /*force_keyframe=*/false);
run_loop.Run();
}
size_t output_buffer_size_ = 0;
- std::vector<VASurfaceID> va_surface_ids_;
- std::vector<std::vector<VASurfaceID>> vpp_svc_va_surface_ids_;
+ std::vector<std::vector<VASurfaceID>> va_encode_surface_ids_;
+ std::vector<std::vector<VASurfaceID>> va_vpp_dest_surface_ids_;
base::test::TaskEnvironment task_environment_;
MockVideoEncodeAcceleratorClient client_;
// |encoder_| is a VideoEncodeAccelerator to use its specialized Deleter that
// calls Destroy() so that destruction threading is respected.
std::unique_ptr<VideoEncodeAccelerator> encoder_;
scoped_refptr<MockVaapiWrapper> mock_vaapi_wrapper_;
- scoped_refptr<MockVaapiWrapper> vpp_svc_mock_vaapi_wrapper_;
+ scoped_refptr<MockVaapiWrapper> mock_vpp_vaapi_wrapper_;
MockVP9VaapiVideoEncoderDelegate* mock_encoder_ = nullptr;
};
diff --git a/chromium/media/gpu/vaapi/vaapi_video_encoder_delegate.h b/chromium/media/gpu/vaapi/vaapi_video_encoder_delegate.h
index 5bdb6c27c28..d1bdbb4d9c2 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_encoder_delegate.h
+++ b/chromium/media/gpu/vaapi/vaapi_video_encoder_delegate.h
@@ -88,6 +88,10 @@ class VaapiVideoEncoderDelegate {
scoped_refptr<VASurface> input_surface,
scoped_refptr<CodecPicture> picture,
std::unique_ptr<ScopedVABuffer> coded_buffer);
+
+ EncodeJob(const EncodeJob&) = delete;
+ EncodeJob& operator=(const EncodeJob&) = delete;
+
~EncodeJob();
// Schedules a callback to be run immediately before this job is executed.
@@ -163,8 +167,6 @@ class VaapiVideoEncoderDelegate {
// Reference pictures required for this job.
std::vector<scoped_refptr<CodecPicture>> reference_pictures_;
-
- DISALLOW_COPY_AND_ASSIGN(EncodeJob);
};
// Initializes the encoder with requested parameter set |config| and
diff --git a/chromium/media/gpu/vaapi/vaapi_webp_decoder.h b/chromium/media/gpu/vaapi/vaapi_webp_decoder.h
index 9cf621e3568..dd581ef01df 100644
--- a/chromium/media/gpu/vaapi/vaapi_webp_decoder.h
+++ b/chromium/media/gpu/vaapi/vaapi_webp_decoder.h
@@ -15,6 +15,10 @@ namespace media {
class VaapiWebPDecoder : public VaapiImageDecoder {
public:
VaapiWebPDecoder();
+
+ VaapiWebPDecoder(const VaapiWebPDecoder&) = delete;
+ VaapiWebPDecoder& operator=(const VaapiWebPDecoder&) = delete;
+
~VaapiWebPDecoder() override;
// VaapiImageDecoder implementation.
@@ -25,8 +29,6 @@ class VaapiWebPDecoder : public VaapiImageDecoder {
// VaapiImageDecoder implementation.
VaapiImageDecodeStatus AllocateVASurfaceAndSubmitVABuffers(
base::span<const uint8_t> encoded_image) override;
-
- DISALLOW_COPY_AND_ASSIGN(VaapiWebPDecoder);
};
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_wrapper.cc b/chromium/media/gpu/vaapi/vaapi_wrapper.cc
index c36411d380f..e0012e28e3f 100644
--- a/chromium/media/gpu/vaapi/vaapi_wrapper.cc
+++ b/chromium/media/gpu/vaapi/vaapi_wrapper.cc
@@ -65,7 +65,7 @@
#include "ui/gl/gl_bindings.h"
#include "ui/gl/gl_implementation.h"
-#if defined(USE_X11)
+#if BUILDFLAG(USE_VAAPI_X11)
typedef XID Drawable;
extern "C" {
@@ -73,7 +73,7 @@ extern "C" {
}
#include "ui/gfx/x/connection.h" // nogncheck
-#endif
+#endif // BUILDFLAG(USE_VAAPI_X11)
#if defined(USE_OZONE)
#include "ui/ozone/public/ozone_platform.h"
@@ -87,14 +87,14 @@ using media_gpu_vaapi::kModuleVa_prot;
using media_gpu_vaapi::kModuleVa;
using media_gpu_vaapi::kModuleVa_drm;
-#if defined(USE_X11)
+#if BUILDFLAG(USE_VAAPI_X11)
using media_gpu_vaapi::kModuleVa_x11;
-#endif
+#endif // BUILDFLAG(USE_VAAPI_X11)
using media_gpu_vaapi::InitializeStubs;
using media_gpu_vaapi::IsVaInitialized;
-#if defined(USE_X11)
+#if BUILDFLAG(USE_VAAPI_X11)
using media_gpu_vaapi::IsVa_x11Initialized;
-#endif
+#endif // BUILDFLAG(USE_VAAPI_X11)
using media_gpu_vaapi::IsVa_drmInitialized;
using media_gpu_vaapi::StubPathMap;
@@ -196,23 +196,47 @@ const char* VaapiFunctionName(VaapiFunctions function) {
report_error_to_uma_cb_.Run(function); \
} while (0)
-#define VA_LOG_ON_ERROR(va_error, function) \
- do { \
- if ((va_error) != VA_STATUS_SUCCESS) \
- LOG_VA_ERROR_AND_REPORT(va_error, function); \
+#define VA_LOG_ON_ERROR(va_res, function) \
+ do { \
+ const VAStatus va_res_va_log_on_error = (va_res); \
+ if (va_res_va_log_on_error != VA_STATUS_SUCCESS) \
+ LOG_VA_ERROR_AND_REPORT(va_res_va_log_on_error, function); \
} while (0)
-#define VA_SUCCESS_OR_RETURN(va_error, function, ret) \
- do { \
- if ((va_error) != VA_STATUS_SUCCESS) { \
- LOG_VA_ERROR_AND_REPORT(va_error, function); \
- return (ret); \
- } \
- DVLOG(3) << VaapiFunctionName(function); \
+#define VA_SUCCESS_OR_RETURN(va_res, function, ret) \
+ do { \
+ const VAStatus va_res_va_sucess_or_return = (va_res); \
+ if (va_res_va_sucess_or_return != VA_STATUS_SUCCESS) { \
+ LOG_VA_ERROR_AND_REPORT(va_res_va_sucess_or_return, function); \
+ return (ret); \
+ } \
+ DVLOG(3) << VaapiFunctionName(function); \
} while (0)
namespace {
+uint32_t BufferFormatToVAFourCC(gfx::BufferFormat fmt) {
+ switch (fmt) {
+ case gfx::BufferFormat::BGRX_8888:
+ return VA_FOURCC_BGRX;
+ case gfx::BufferFormat::BGRA_8888:
+ return VA_FOURCC_BGRA;
+ case gfx::BufferFormat::RGBX_8888:
+ return VA_FOURCC_RGBX;
+ case gfx::BufferFormat::RGBA_8888:
+ return VA_FOURCC_RGBA;
+ case gfx::BufferFormat::YVU_420:
+ return VA_FOURCC_YV12;
+ case gfx::BufferFormat::YUV_420_BIPLANAR:
+ return VA_FOURCC_NV12;
+ case gfx::BufferFormat::P010:
+ return VA_FOURCC_P010;
+ default:
+ NOTREACHED() << gfx::BufferFormatToString(fmt);
+ return 0;
+ }
+}
+
media::VAImplementation VendorStringToImplementationType(
const std::string& va_vendor_string) {
if (base::StartsWith(va_vendor_string, "Mesa Gallium driver",
@@ -535,10 +559,20 @@ VADisplayState::VADisplayState()
bool VADisplayState::Initialize() {
base::AutoLock auto_lock(va_lock_);
+#if defined(USE_OZONE) && defined(OS_LINUX)
+ // TODO(crbug.com/1116701): add vaapi support for other Ozone platforms on
+ // Linux. See comment in OzonePlatform::PlatformProperties::supports_vaapi
+ // for more details. This will also require revisiting everything that's
+ // guarded by USE_VAAPI_X11. For example, if USE_VAAPI_X11 is true, but the
+ // user chooses the Wayland backend for Ozone at runtime, then many things (if
+ // not all) that we do for X11 won't apply.
+ if (!ui::OzonePlatform::GetInstance()->GetPlatformProperties().supports_vaapi)
+ return false;
+#endif
+
bool libraries_initialized = IsVaInitialized() && IsVa_drmInitialized();
-#if defined(USE_X11)
- if (!features::IsUsingOzonePlatform())
- libraries_initialized = libraries_initialized && IsVa_x11Initialized();
+#if BUILDFLAG(USE_VAAPI_X11)
+ libraries_initialized = libraries_initialized && IsVa_x11Initialized();
#endif
if (!libraries_initialized)
return false;
@@ -553,34 +587,25 @@ bool VADisplayState::Initialize() {
return success;
}
-#if defined(USE_X11)
+#if BUILDFLAG(USE_VAAPI_X11)
absl::optional<VADisplay> GetVADisplayStateX11(const base::ScopedFD& drm_fd) {
- bool use_drm_as_fallback = false;
switch (gl::GetGLImplementation()) {
case gl::kGLImplementationEGLGLES2:
return vaGetDisplayDRM(drm_fd.get());
case gl::kGLImplementationNone:
- use_drm_as_fallback = true;
- FALLTHROUGH;
case gl::kGLImplementationDesktopGL: {
- if (!features::IsUsingOzonePlatform()) {
- VADisplay display =
- vaGetDisplay(x11::Connection::Get()->GetXlibDisplay());
- if (vaDisplayIsValid(display))
- return display;
- return vaGetDisplayDRM(drm_fd.get());
- }
- break;
+ VADisplay display =
+ vaGetDisplay(x11::Connection::Get()->GetXlibDisplay());
+ if (vaDisplayIsValid(display))
+ return display;
+ return vaGetDisplayDRM(drm_fd.get());
}
- case gl::kGLImplementationEGLANGLE: {
- if (!features::IsUsingOzonePlatform())
- return vaGetDisplay(x11::Connection::Get()->GetXlibDisplay());
- break;
- }
+ case gl::kGLImplementationEGLANGLE:
+ return vaGetDisplay(x11::Connection::Get()->GetXlibDisplay());
default:
LOG(WARNING) << "VAAPI video acceleration not available for "
@@ -588,10 +613,6 @@ absl::optional<VADisplay> GetVADisplayStateX11(const base::ScopedFD& drm_fd) {
gl::GetGLImplementationParts());
return absl::nullopt;
}
-
- if (use_drm_as_fallback)
- return vaGetDisplayDRM(drm_fd.get());
- return absl::nullopt;
}
#else
@@ -609,11 +630,11 @@ absl::optional<VADisplay> GetVADisplayState(const base::ScopedFD& drm_fd) {
}
}
-#endif // defined(USE_X11)
+#endif // BUILDFLAG(USE_VAAPI_X11)
bool VADisplayState::InitializeVaDisplay_Locked() {
absl::optional<VADisplay> display =
-#if defined(USE_X11)
+#if BUILDFLAG(USE_VAAPI_X11)
GetVADisplayStateX11(drm_fd_);
#else
GetVADisplayState(drm_fd_);
@@ -678,10 +699,9 @@ bool VADisplayState::InitializeOnce() {
if (!InitializeVaDisplay_Locked() || !InitializeVaDriver_Locked())
return false;
-#if defined(USE_X11)
+#if BUILDFLAG(USE_VAAPI_X11)
if (gl::GetGLImplementation() == gl::kGLImplementationEGLANGLE &&
implementation_type_ == VAImplementation::kIntelIHD) {
- DCHECK(!features::IsUsingOzonePlatform());
constexpr char libva_driver_impl_env[] = "LIBVA_DRIVER_NAME";
// TODO(crbug/1116703) The libva intel-media driver has a known segfault in
// vaPutSurface, so until this is fixed, fall back to the i965 driver. There
@@ -698,7 +718,7 @@ bool VADisplayState::InitializeOnce() {
if (!InitializeVaDisplay_Locked() || !InitializeVaDriver_Locked())
return false;
}
-#endif // USE_X11
+#endif // BUILDFLAG(USE_VAAPI_X11)
return true;
}
@@ -835,8 +855,6 @@ bool GetRequiredAttribs(const base::Lock* va_lock,
if (mode == VaapiWrapper::kDecodeProtected && profile != VAProfileProtected) {
required_attribs->push_back(
{VAConfigAttribEncryption, VA_ENCRYPTION_TYPE_SUBSAMPLE_CTR});
- required_attribs->push_back(
- {VAConfigAttribDecProcessing, VA_DEC_PROCESSING});
}
#endif
@@ -866,7 +884,8 @@ bool GetRequiredAttribs(const base::Lock* va_lock,
}
const uint32_t packed_header_attributes =
- (VA_ENC_PACKED_HEADER_SEQUENCE | VA_ENC_PACKED_HEADER_PICTURE);
+ (VA_ENC_PACKED_HEADER_SEQUENCE | VA_ENC_PACKED_HEADER_PICTURE |
+ VA_ENC_PACKED_HEADER_SLICE);
if ((packed_header_attributes & attrib.value) == packed_header_attributes) {
required_attribs->push_back(
{VAConfigAttribEncPackedHeaders, packed_header_attributes});
@@ -1440,6 +1459,41 @@ scoped_refptr<VaapiWrapper> VaapiWrapper::CreateForVideoCodec(
}
// static
+std::vector<SVCScalabilityMode> VaapiWrapper::GetSupportedScalabilityModes(
+ VideoCodecProfile media_profile,
+ VAProfile va_profile) {
+ std::vector<SVCScalabilityMode> scalability_modes;
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ if (media_profile == VP9PROFILE_PROFILE0) {
+ scalability_modes.push_back(SVCScalabilityMode::kL1T2);
+ scalability_modes.push_back(SVCScalabilityMode::kL1T3);
+ if (base::FeatureList::IsEnabled(kVaapiVp9kSVCHWEncoding) &&
+ GetDefaultVaEntryPoint(
+ VaapiWrapper::kEncodeConstantQuantizationParameter, va_profile) ==
+ VAEntrypointEncSliceLP) {
+ scalability_modes.push_back(SVCScalabilityMode::kL2T2Key);
+ scalability_modes.push_back(SVCScalabilityMode::kL2T3Key);
+ scalability_modes.push_back(SVCScalabilityMode::kL3T2Key);
+ scalability_modes.push_back(SVCScalabilityMode::kL3T3Key);
+ }
+ }
+
+ if (media_profile >= H264PROFILE_MIN && media_profile <= H264PROFILE_MAX) {
+ // TODO(b/199487660): Enable H.264 temporal layer encoding on AMD once their
+ // drivers support them.
+ VAImplementation implementation = VaapiWrapper::GetImplementationType();
+ if (base::FeatureList::IsEnabled(kVaapiH264TemporalLayerHWEncoding) &&
+ (implementation == VAImplementation::kIntelI965 ||
+ implementation == VAImplementation::kIntelIHD)) {
+ scalability_modes.push_back(SVCScalabilityMode::kL1T2);
+ scalability_modes.push_back(SVCScalabilityMode::kL1T3);
+ }
+ }
+#endif
+ return scalability_modes;
+}
+
+// static
VideoEncodeAccelerator::SupportedProfiles
VaapiWrapper::GetSupportedEncodeProfiles() {
VideoEncodeAccelerator::SupportedProfiles profiles;
@@ -1464,6 +1518,8 @@ VaapiWrapper::GetSupportedEncodeProfiles() {
constexpr int kMaxEncoderFramerate = 30;
profile.max_framerate_numerator = kMaxEncoderFramerate;
profile.max_framerate_denominator = 1;
+ profile.scalability_modes =
+ GetSupportedScalabilityModes(media_profile, va_profile);
profiles.push_back(profile);
}
return profiles;
@@ -1647,6 +1703,24 @@ bool VaapiWrapper::IsVppFormatSupported(uint32_t va_fourcc) {
}
// static
+std::vector<Fourcc> VaapiWrapper::GetVppSupportedFormats() {
+ const VASupportedProfiles::ProfileInfo* profile_info =
+ VASupportedProfiles::Get().IsProfileSupported(kVideoProcess,
+ VAProfileNone);
+ if (!profile_info)
+ return {};
+
+ std::vector<Fourcc> supported_fourccs;
+ for (uint32_t pixel_format : profile_info->pixel_formats) {
+ auto fourcc = Fourcc::FromVAFourCC(pixel_format);
+ if (!fourcc)
+ continue;
+ supported_fourccs.push_back(*fourcc);
+ }
+ return supported_fourccs;
+}
+
+// static
bool VaapiWrapper::IsVppSupportedForJpegDecodedSurfaceToFourCC(
unsigned int rt_format,
uint32_t fourcc) {
@@ -1740,29 +1814,6 @@ uint32_t VaapiWrapper::BufferFormatToVARTFormat(gfx::BufferFormat fmt) {
}
}
-// static
-uint32_t VaapiWrapper::BufferFormatToVAFourCC(gfx::BufferFormat fmt) {
- switch (fmt) {
- case gfx::BufferFormat::BGRX_8888:
- return VA_FOURCC_BGRX;
- case gfx::BufferFormat::BGRA_8888:
- return VA_FOURCC_BGRA;
- case gfx::BufferFormat::RGBX_8888:
- return VA_FOURCC_RGBX;
- case gfx::BufferFormat::RGBA_8888:
- return VA_FOURCC_RGBA;
- case gfx::BufferFormat::YVU_420:
- return VA_FOURCC_YV12;
- case gfx::BufferFormat::YUV_420_BIPLANAR:
- return VA_FOURCC_NV12;
- case gfx::BufferFormat::P010:
- return VA_FOURCC_P010;
- default:
- NOTREACHED() << gfx::BufferFormatToString(fmt);
- return 0;
- }
-}
-
bool VaapiWrapper::CreateContextAndSurfaces(
unsigned int va_format,
const gfx::Size& size,
@@ -1859,6 +1910,8 @@ bool VaapiWrapper::CreateProtectedSession(
va_res = vaCreateProtectedSession(va_display_, va_protected_config_id_,
&va_protected_session_id_);
+ DCHECK(va_res == VA_STATUS_SUCCESS ||
+ va_protected_session_id_ == VA_INVALID_ID);
VA_SUCCESS_OR_RETURN(va_res, VaapiFunctions::kVACreateProtectedSession,
false);
}
@@ -1975,7 +2028,16 @@ uint32_t VaapiWrapper::GetProtectedInstanceID() {
bool VaapiWrapper::IsProtectedSessionDead() {
#if BUILDFLAG(IS_CHROMEOS_ASH)
- if (va_protected_session_id_ == VA_INVALID_ID)
+ return IsProtectedSessionDead(va_protected_session_id_);
+#else
+ return false;
+#endif
+}
+
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+bool VaapiWrapper::IsProtectedSessionDead(
+ VAProtectedSessionID va_protected_session_id) {
+ if (va_protected_session_id == VA_INVALID_ID)
return false;
uint8_t alive;
@@ -1988,25 +2050,28 @@ bool VaapiWrapper::IsProtectedSessionDead() {
base::AutoLock auto_lock(*va_lock_);
VABufferID buf_id;
- VAStatus va_res =
- vaCreateBuffer(va_display_, va_protected_session_id_,
- VAProtectedSessionExecuteBufferType, sizeof(tee_exec_buf),
- 1, &tee_exec_buf, &buf_id);
+ VAStatus va_res = vaCreateBuffer(
+ va_display_, va_protected_session_id, VAProtectedSessionExecuteBufferType,
+ sizeof(tee_exec_buf), 1, &tee_exec_buf, &buf_id);
// Failure here is valid if the protected session has been closed.
if (va_res != VA_STATUS_SUCCESS)
return true;
va_res =
- vaProtectedSessionExecute(va_display_, va_protected_session_id_, buf_id);
+ vaProtectedSessionExecute(va_display_, va_protected_session_id, buf_id);
vaDestroyBuffer(va_display_, buf_id);
if (va_res != VA_STATUS_SUCCESS)
return true;
return !alive;
-#else // BUILDFLAG(IS_CHROMEOS_ASH)
- return false;
+}
#endif
+
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+VAProtectedSessionID VaapiWrapper::GetProtectedSessionID() const {
+ return va_protected_session_id_;
}
+#endif
void VaapiWrapper::DestroyProtectedSession() {
#if BUILDFLAG(IS_CHROMEOS_ASH)
@@ -2127,6 +2192,15 @@ scoped_refptr<VASurface> VaapiWrapper::CreateVASurfaceForPixmap(
DCHECK_EQ(va_attrib_extbuf.flags, 0u);
DCHECK_EQ(va_attrib_extbuf.private_data, nullptr);
+ uint32_t va_format = BufferFormatToVARTFormat(buffer_format);
+
+ if (protected_content) {
+ if (GetImplementationType() == VAImplementation::kMesaGallium)
+ va_format |= VA_RT_FORMAT_PROTECTED;
+ else
+ va_attrib_extbuf.flags = VA_SURFACE_EXTBUF_DESC_PROTECTED;
+ }
+
std::vector<VASurfaceAttrib> va_attribs(2);
va_attribs[0].type = VASurfaceAttribMemoryType;
@@ -2139,13 +2213,6 @@ scoped_refptr<VASurface> VaapiWrapper::CreateVASurfaceForPixmap(
va_attribs[1].value.type = VAGenericValueTypePointer;
va_attribs[1].value.value.p = &va_attrib_extbuf;
- unsigned int va_format = BufferFormatToVARTFormat(buffer_format);
-
- if (protected_content) {
- DCHECK_EQ(GetImplementationType(), VAImplementation::kMesaGallium);
- va_format |= VA_RT_FORMAT_PROTECTED;
- }
-
VASurfaceID va_surface_id = VA_INVALID_ID;
{
base::AutoLock auto_lock(*va_lock_);
@@ -2387,11 +2454,10 @@ bool VaapiWrapper::MapAndCopyAndExecute(
return Execute_Locked(va_surface_id, va_buffer_ids);
}
-#if defined(USE_X11)
+#if BUILDFLAG(USE_VAAPI_X11)
bool VaapiWrapper::PutSurfaceIntoPixmap(VASurfaceID va_surface_id,
x11::Pixmap x_pixmap,
gfx::Size dest_size) {
- DCHECK(!features::IsUsingOzonePlatform());
base::AutoLock auto_lock(*va_lock_);
VAStatus va_res = vaSyncSurface(va_display_, va_surface_id);
@@ -2405,7 +2471,7 @@ bool VaapiWrapper::PutSurfaceIntoPixmap(VASurfaceID va_surface_id,
VA_SUCCESS_OR_RETURN(va_res, VaapiFunctions::kVAPutSurface, false);
return true;
}
-#endif // USE_X11
+#endif // BUILDFLAG(USE_VAAPI_X11)
std::unique_ptr<ScopedVAImage> VaapiWrapper::CreateVaImage(
VASurfaceID va_surface_id,
@@ -2645,6 +2711,25 @@ bool VaapiWrapper::GetVAEncMaxNumOfRefFrames(VideoCodecProfile profile,
return true;
}
+bool VaapiWrapper::GetSupportedPackedHeaders(VideoCodecProfile profile,
+ bool& packed_sps,
+ bool& packed_pps,
+ bool& packed_slice) {
+ const VAProfile va_profile =
+ ProfileToVAProfile(profile, CodecMode::kEncodeConstantBitrate);
+ VAConfigAttrib attrib{};
+ attrib.type = VAConfigAttribEncPackedHeaders;
+ base::AutoLock auto_lock(*va_lock_);
+ const VAStatus va_res = vaGetConfigAttributes(va_display_, va_profile,
+ va_entrypoint_, &attrib, 1);
+ VA_SUCCESS_OR_RETURN(va_res, VaapiFunctions::kVAGetConfigAttributes, false);
+ packed_sps = attrib.value & VA_ENC_PACKED_HEADER_SEQUENCE;
+ packed_pps = attrib.value & VA_ENC_PACKED_HEADER_PICTURE;
+ packed_slice = attrib.value & VA_ENC_PACKED_HEADER_SLICE;
+
+ return true;
+}
+
bool VaapiWrapper::IsRotationSupported() {
base::AutoLock auto_lock(*va_lock_);
VAProcPipelineCaps pipeline_caps;
@@ -2665,7 +2750,12 @@ bool VaapiWrapper::BlitSurface(const VASurface& va_surface_src,
const VASurface& va_surface_dest,
absl::optional<gfx::Rect> src_rect,
absl::optional<gfx::Rect> dest_rect,
- VideoRotation rotation) {
+ VideoRotation rotation
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ ,
+ VAProtectedSessionID va_protected_session_id
+#endif
+) {
DCHECK_EQ(mode_, kVideoProcess);
base::AutoLock auto_lock(*va_lock_);
@@ -2731,21 +2821,39 @@ bool VaapiWrapper::BlitSurface(const VASurface& va_surface_src,
break;
}
- VA_SUCCESS_OR_RETURN(mapping.Unmap(), VaapiFunctions::kVAUnmapBuffer,
+ const VAStatus va_res = mapping.Unmap();
+ VA_SUCCESS_OR_RETURN(va_res, VaapiFunctions::kVAUnmapBuffer, false);
+ }
+
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ base::ScopedClosureRunner protected_session_detacher;
+ if (va_protected_session_id != VA_INVALID_ID) {
+ const VAStatus va_res = vaAttachProtectedSession(
+ va_display_, va_context_id_, va_protected_session_id);
+ VA_SUCCESS_OR_RETURN(va_res, VaapiFunctions::kVAAttachProtectedSession,
false);
+ // Note that we use a lambda expression to wrap vaDetachProtectedSession()
+ // because the function in |protected_session_detacher| must return void.
+ protected_session_detacher.ReplaceClosure(base::BindOnce(
+ [](VADisplay va_display, VAContextID va_context_id) {
+ vaDetachProtectedSession(va_display, va_context_id);
+ },
+ va_display_, va_context_id_));
}
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
- VA_SUCCESS_OR_RETURN(
- vaBeginPicture(va_display_, va_context_id_, va_surface_dest.id()),
- VaapiFunctions::kVABeginPicture, false);
+ TRACE_EVENT2("media,gpu", "VaapiWrapper::BlitSurface", "src_rect",
+ src_rect->ToString(), "dest_rect", dest_rect->ToString());
- VABufferID va_buffer_id = va_buffer_for_vpp_->id();
- VA_SUCCESS_OR_RETURN(
- vaRenderPicture(va_display_, va_context_id_, &va_buffer_id, 1),
- VaapiFunctions::kVARenderPicture_Vpp, false);
+ VAStatus va_res =
+ vaBeginPicture(va_display_, va_context_id_, va_surface_dest.id());
+ VA_SUCCESS_OR_RETURN(va_res, VaapiFunctions::kVABeginPicture, false);
- VA_SUCCESS_OR_RETURN(vaEndPicture(va_display_, va_context_id_),
- VaapiFunctions::kVAEndPicture, false);
+ VABufferID va_buffer_id = va_buffer_for_vpp_->id();
+ va_res = vaRenderPicture(va_display_, va_context_id_, &va_buffer_id, 1);
+ VA_SUCCESS_OR_RETURN(va_res, VaapiFunctions::kVARenderPicture_Vpp, false);
+ va_res = vaEndPicture(va_display_, va_context_id_);
+ VA_SUCCESS_OR_RETURN(va_res, VaapiFunctions::kVAEndPicture, false);
return true;
}
@@ -2759,9 +2867,8 @@ void VaapiWrapper::PreSandboxInitialization() {
paths[kModuleVa].push_back(std::string("libva.so.") + va_suffix);
paths[kModuleVa_drm].push_back(std::string("libva-drm.so.") + va_suffix);
-#if defined(USE_X11)
- if (!features::IsUsingOzonePlatform())
- paths[kModuleVa_x11].push_back(std::string("libva-x11.so.") + va_suffix);
+#if BUILDFLAG(USE_VAAPI_X11)
+ paths[kModuleVa_x11].push_back(std::string("libva-x11.so.") + va_suffix);
#endif
#if BUILDFLAG(IS_CHROMEOS_ASH)
paths[kModuleVa_prot].push_back(std::string("libva.so.") + va_suffix);
diff --git a/chromium/media/gpu/vaapi/vaapi_wrapper.h b/chromium/media/gpu/vaapi/vaapi_wrapper.h
index cd2099ecd7b..0dea294e574 100644
--- a/chromium/media/gpu/vaapi/vaapi_wrapper.h
+++ b/chromium/media/gpu/vaapi/vaapi_wrapper.h
@@ -27,6 +27,7 @@
#include "base/synchronization/lock.h"
#include "base/thread_annotations.h"
#include "build/chromeos_buildflags.h"
+#include "media/gpu/chromeos/fourcc.h"
#include "media/gpu/media_gpu_export.h"
#include "media/gpu/vaapi/va_surface.h"
#include "media/gpu/vaapi/vaapi_utils.h"
@@ -35,9 +36,9 @@
#include "third_party/abseil-cpp/absl/types/optional.h"
#include "ui/gfx/geometry/size.h"
-#if defined(USE_X11)
+#if BUILDFLAG(USE_VAAPI_X11)
#include "ui/gfx/x/xproto.h" // nogncheck
-#endif // USE_X11
+#endif // BUILDFLAG(USE_VAAPI_X11)
namespace gfx {
enum class BufferFormat;
@@ -161,6 +162,11 @@ class MEDIA_GPU_EXPORT VaapiWrapper
EncryptionScheme encryption_scheme,
const ReportErrorToUMACB& report_error_to_uma_cb);
+ // Returns the supported SVC scalability modes for specified profile.
+ static std::vector<SVCScalabilityMode> GetSupportedScalabilityModes(
+ VideoCodecProfile media_profile,
+ VAProfile va_profile);
+
// Return the supported video encode profiles.
static VideoEncodeAccelerator::SupportedProfiles GetSupportedEncodeProfiles();
@@ -212,6 +218,9 @@ class MEDIA_GPU_EXPORT VaapiWrapper
// Returns true if the VPP supports converting from/to |fourcc|.
static bool IsVppFormatSupported(uint32_t fourcc);
+ // Returns the pixel formats supported by the VPP.
+ static std::vector<Fourcc> GetVppSupportedFormats();
+
// Returns true if VPP supports the format conversion from a JPEG decoded
// internal surface to a FOURCC. |rt_format| corresponds to the JPEG's
// subsampling format. |fourcc| is the output surface's FOURCC.
@@ -235,7 +244,6 @@ class MEDIA_GPU_EXPORT VaapiWrapper
static VAEntrypoint GetDefaultVaEntryPoint(CodecMode mode, VAProfile profile);
static uint32_t BufferFormatToVARTFormat(gfx::BufferFormat fmt);
- static uint32_t BufferFormatToVAFourCC(gfx::BufferFormat fmt);
// Returns the current instance identifier for the protected content system.
// This can be used to detect when protected context loss has occurred, so any
@@ -285,6 +293,20 @@ class MEDIA_GPU_EXPORT VaapiWrapper
// querying libva indicates that our protected session is no longer alive,
// otherwise this will return false.
bool IsProtectedSessionDead();
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ // Returns true if and only if |va_protected_session_id| is not VA_INVALID_ID
+ // and querying libva indicates that the protected session identified by
+ // |va_protected_session_id| is no longer alive.
+ bool IsProtectedSessionDead(VAProtectedSessionID va_protected_session_id);
+
+ // Returns the ID of the current protected session or VA_INVALID_ID if there's
+ // none. This must be called on the same sequence as other methods that use
+ // the protected session ID internally.
+ //
+ // TODO(b/183515581): update this documentation once we force the VaapiWrapper
+ // to be used on a single sequence.
+ VAProtectedSessionID GetProtectedSessionID() const;
+#endif
// If we have a protected session, destroys it immediately. This should be
// used as part of recovering dead protected sessions.
void DestroyProtectedSession();
@@ -404,13 +426,13 @@ class MEDIA_GPU_EXPORT VaapiWrapper
const std::vector<std::pair<VABufferID, VABufferDescriptor>>& va_buffers)
WARN_UNUSED_RESULT;
-#if defined(USE_X11)
+#if BUILDFLAG(USE_VAAPI_X11)
// Put data from |va_surface_id| into |x_pixmap| of size
// |dest_size|, converting/scaling to it.
bool PutSurfaceIntoPixmap(VASurfaceID va_surface_id,
x11::Pixmap x_pixmap,
gfx::Size dest_size) WARN_UNUSED_RESULT;
-#endif // USE_X11
+#endif // BUILDFLAG(USE_VAAPI_X11)
// Creates a ScopedVAImage from a VASurface |va_surface_id| and map it into
// memory with the given |format| and |size|. If |format| is not equal to the
@@ -462,19 +484,36 @@ class MEDIA_GPU_EXPORT VaapiWrapper
size_t* max_ref_frames)
WARN_UNUSED_RESULT;
+ // Gets packed headers are supported for encoding. This is called for
+ // H264 encoding. |packed_sps|, |packed_pps| and |packed_slice| stands for
+ // whether packed slice parameter set, packed picture parameter set and packed
+ // slice header is supported, respectively.
+ virtual bool GetSupportedPackedHeaders(VideoCodecProfile profile,
+ bool& packed_sps,
+ bool& packed_pps,
+ bool& packed_slice) WARN_UNUSED_RESULT;
+
// Checks if the driver supports frame rotation.
bool IsRotationSupported();
// Blits a VASurface |va_surface_src| into another VASurface
// |va_surface_dest| applying pixel format conversion, rotation, cropping
// and scaling if needed. |src_rect| and |dest_rect| are optional. They can
- // be used to specify the area used in the blit.
- virtual bool BlitSurface(const VASurface& va_surface_src,
- const VASurface& va_surface_dest,
- absl::optional<gfx::Rect> src_rect = absl::nullopt,
- absl::optional<gfx::Rect> dest_rect = absl::nullopt,
- VideoRotation rotation = VIDEO_ROTATION_0)
- WARN_UNUSED_RESULT;
+ // be used to specify the area used in the blit. If |va_protected_session_id|
+ // is provided and is not VA_INVALID_ID, the corresponding protected session
+ // is attached to the VPP context prior to submitting the VPP buffers and
+ // detached after submitting those buffers.
+ virtual bool BlitSurface(
+ const VASurface& va_surface_src,
+ const VASurface& va_surface_dest,
+ absl::optional<gfx::Rect> src_rect = absl::nullopt,
+ absl::optional<gfx::Rect> dest_rect = absl::nullopt,
+ VideoRotation rotation = VIDEO_ROTATION_0
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ ,
+ VAProtectedSessionID va_protected_session_id = VA_INVALID_ID
+#endif
+ ) WARN_UNUSED_RESULT;
// Initialize static data before sandbox is enabled.
static void PreSandboxInitialization();
@@ -495,6 +534,7 @@ class MEDIA_GPU_EXPORT VaapiWrapper
FRIEND_TEST_ALL_PREFIXES(VaapiUtilsTest, ScopedVAImage);
FRIEND_TEST_ALL_PREFIXES(VaapiUtilsTest, BadScopedVAImage);
FRIEND_TEST_ALL_PREFIXES(VaapiUtilsTest, BadScopedVABufferMapping);
+ FRIEND_TEST_ALL_PREFIXES(VaapiMinigbmTest, AllocateAndCompareWithMinigbm);
bool Initialize(VAProfile va_profile,
EncryptionScheme encryption_scheme) WARN_UNUSED_RESULT;
diff --git a/chromium/media/gpu/vaapi/vp8_vaapi_video_decoder_delegate.h b/chromium/media/gpu/vaapi/vp8_vaapi_video_decoder_delegate.h
index 9a299d8d207..644e8a9cf2a 100644
--- a/chromium/media/gpu/vaapi/vp8_vaapi_video_decoder_delegate.h
+++ b/chromium/media/gpu/vaapi/vp8_vaapi_video_decoder_delegate.h
@@ -22,6 +22,10 @@ class VP8VaapiVideoDecoderDelegate : public VP8Decoder::VP8Accelerator,
VP8VaapiVideoDecoderDelegate(DecodeSurfaceHandler<VASurface>* const vaapi_dec,
scoped_refptr<VaapiWrapper> vaapi_wrapper);
+ VP8VaapiVideoDecoderDelegate(const VP8VaapiVideoDecoderDelegate&) = delete;
+ VP8VaapiVideoDecoderDelegate& operator=(const VP8VaapiVideoDecoderDelegate&) =
+ delete;
+
~VP8VaapiVideoDecoderDelegate() override;
// VP8Decoder::VP8Accelerator implementation.
@@ -38,8 +42,6 @@ class VP8VaapiVideoDecoderDelegate : public VP8Decoder::VP8Accelerator,
std::unique_ptr<ScopedVABuffer> prob_buffer_;
std::unique_ptr<ScopedVABuffer> picture_params_;
std::unique_ptr<ScopedVABuffer> slice_params_;
-
- DISALLOW_COPY_AND_ASSIGN(VP8VaapiVideoDecoderDelegate);
};
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vp8_vaapi_video_encoder_delegate.cc b/chromium/media/gpu/vaapi/vp8_vaapi_video_encoder_delegate.cc
index 76d80594c9d..52cf4dce8b4 100644
--- a/chromium/media/gpu/vaapi/vp8_vaapi_video_encoder_delegate.cc
+++ b/chromium/media/gpu/vaapi/vp8_vaapi_video_encoder_delegate.cc
@@ -98,7 +98,8 @@ bool VP8VaapiVideoEncoderDelegate::Initialize(
const VaapiVideoEncoderDelegate::Config& ave_config) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- if (VideoCodecProfileToVideoCodec(config.output_profile) != kCodecVP8) {
+ if (VideoCodecProfileToVideoCodec(config.output_profile) !=
+ VideoCodec::kVP8) {
DVLOGF(1) << "Invalid profile: " << GetProfileName(config.output_profile);
return false;
}
@@ -181,22 +182,27 @@ bool VP8VaapiVideoEncoderDelegate::UpdateRates(
uint32_t framerate) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- if (bitrate_allocation.GetSumBps() == 0 || framerate == 0)
+ uint32_t bitrate = bitrate_allocation.GetSumBps();
+ if (bitrate == 0 || framerate == 0)
return false;
if (current_params_.bitrate_allocation == bitrate_allocation &&
current_params_.framerate == framerate) {
return true;
}
- VLOGF(2) << "New bitrate: " << bitrate_allocation.GetSumBps()
+ VLOGF(2) << "New bitrate: " << bitrate_allocation.ToString()
<< ", new framerate: " << framerate;
current_params_.bitrate_allocation = bitrate_allocation;
current_params_.framerate = framerate;
- current_params_.cpb_size_bits =
- current_params_.bitrate_allocation.GetSumBps() *
- current_params_.cpb_window_size_ms / 1000;
+ base::CheckedNumeric<uint32_t> cpb_size_bits(bitrate);
+ cpb_size_bits /= 1000;
+ cpb_size_bits *= current_params_.cpb_window_size_ms;
+ if (!cpb_size_bits.AssignIfValid(&current_params_.cpb_size_bits)) {
+ VLOGF(1) << "Too large bitrate: " << bitrate_allocation.GetSumBps();
+ return false;
+ }
return true;
}
diff --git a/chromium/media/gpu/vaapi/vp8_vaapi_video_encoder_delegate.h b/chromium/media/gpu/vaapi/vp8_vaapi_video_encoder_delegate.h
index 5e38feb332c..9d9c4eaf741 100644
--- a/chromium/media/gpu/vaapi/vp8_vaapi_video_encoder_delegate.h
+++ b/chromium/media/gpu/vaapi/vp8_vaapi_video_encoder_delegate.h
@@ -49,6 +49,11 @@ class VP8VaapiVideoEncoderDelegate : public VaapiVideoEncoderDelegate {
VP8VaapiVideoEncoderDelegate(scoped_refptr<VaapiWrapper> vaapi_wrapper,
base::RepeatingClosure error_cb);
+
+ VP8VaapiVideoEncoderDelegate(const VP8VaapiVideoEncoderDelegate&) = delete;
+ VP8VaapiVideoEncoderDelegate& operator=(const VP8VaapiVideoEncoderDelegate&) =
+ delete;
+
~VP8VaapiVideoEncoderDelegate() override;
// VaapiVideoEncoderDelegate implementation.
@@ -86,8 +91,6 @@ class VP8VaapiVideoEncoderDelegate : public VaapiVideoEncoderDelegate {
Vp8FrameHeader current_frame_hdr_;
Vp8ReferenceFrameVector reference_frames_;
-
- DISALLOW_COPY_AND_ASSIGN(VP8VaapiVideoEncoderDelegate);
};
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vp9_vaapi_video_decoder_delegate.cc b/chromium/media/gpu/vaapi/vp9_vaapi_video_decoder_delegate.cc
index dfa8b832578..3e5e2a4b156 100644
--- a/chromium/media/gpu/vaapi/vp9_vaapi_video_decoder_delegate.cc
+++ b/chromium/media/gpu/vaapi/vp9_vaapi_video_decoder_delegate.cc
@@ -36,7 +36,6 @@ VP9VaapiVideoDecoderDelegate::~VP9VaapiVideoDecoderDelegate() {
DCHECK(!picture_params_);
DCHECK(!slice_params_);
DCHECK(!crypto_params_);
- DCHECK(!proc_params_);
DCHECK(!protected_params_);
}
@@ -46,15 +45,7 @@ scoped_refptr<VP9Picture> VP9VaapiVideoDecoderDelegate::CreateVP9Picture() {
if (!va_surface)
return nullptr;
- scoped_refptr<VP9Picture> pic = new VaapiVP9Picture(std::move(va_surface));
- if (!vaapi_dec_->IsScalingDecode())
- return pic;
-
- // Setup the scaling buffer.
- scoped_refptr<VASurface> scaled_surface = vaapi_dec_->CreateDecodeSurface();
- CHECK(scaled_surface);
- pic->AsVaapiVP9Picture()->SetDecodeSurface(std::move(scaled_surface));
- return pic;
+ return new VaapiVP9Picture(std::move(va_surface));
}
DecodeStatus VP9VaapiVideoDecoderDelegate::SubmitDecode(
@@ -127,7 +118,7 @@ DecodeStatus VP9VaapiVideoDecoderDelegate::SubmitDecode(
auto ref_pic = ref_frames.GetFrame(i);
if (ref_pic) {
pic_param.reference_frames[i] =
- ref_pic->AsVaapiVP9Picture()->GetVADecodeSurfaceID();
+ ref_pic->AsVaapiVP9Picture()->GetVASurfaceID();
} else {
pic_param.reference_frames[i] = VA_INVALID_SURFACE;
}
@@ -287,25 +278,11 @@ DecodeStatus VP9VaapiVideoDecoderDelegate::SubmitDecode(
#endif // BUILDFLAG(IS_CHROMEOS_ASH)
const VaapiVP9Picture* vaapi_pic = pic->AsVaapiVP9Picture();
- VAProcPipelineParameterBuffer proc_buffer;
- if (vaapi_dec_->IsScalingDecode()) {
- if (!proc_params_) {
- proc_params_ = vaapi_wrapper_->CreateVABuffer(
- VAProcPipelineParameterBufferType, sizeof(proc_buffer));
- if (!proc_params_)
- return DecodeStatus::kFail;
- }
- CHECK(gfx::Rect(vaapi_pic->GetDecodeSize()).Contains(pic->visible_rect()));
- CHECK(FillDecodeScalingIfNeeded(
- pic->visible_rect(), vaapi_pic->GetVADecodeSurfaceID(),
- pic->AsVaapiVP9Picture()->va_surface(), &proc_buffer));
- buffers.push_back(
- {proc_params_->id(),
- {proc_params_->type(), proc_params_->size(), &proc_buffer}});
- }
+ CHECK(
+ gfx::Rect(vaapi_pic->va_surface()->size()).Contains(pic->visible_rect()));
bool success = vaapi_wrapper_->MapAndCopyAndExecute(
- vaapi_pic->GetVADecodeSurfaceID(), buffers);
+ vaapi_pic->GetVASurfaceID(), buffers);
if (!success && NeedsProtectedSessionRecovery())
return DecodeStatus::kTryAgain;
@@ -320,11 +297,9 @@ bool VP9VaapiVideoDecoderDelegate::OutputPicture(
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
const VaapiVP9Picture* vaapi_pic = pic->AsVaapiVP9Picture();
- vaapi_dec_->SurfaceReady(
- vaapi_pic->va_surface(), vaapi_pic->bitstream_id(),
- vaapi_dec_->GetOutputVisibleRect(vaapi_pic->visible_rect(),
- vaapi_pic->va_surface()->size()),
- vaapi_pic->get_colorspace());
+ vaapi_dec_->SurfaceReady(vaapi_pic->va_surface(), vaapi_pic->bitstream_id(),
+ vaapi_pic->visible_rect(),
+ vaapi_pic->get_colorspace());
return true;
}
@@ -346,7 +321,6 @@ void VP9VaapiVideoDecoderDelegate::OnVAContextDestructionSoon() {
picture_params_.reset();
slice_params_.reset();
crypto_params_.reset();
- proc_params_.reset();
protected_params_.reset();
}
diff --git a/chromium/media/gpu/vaapi/vp9_vaapi_video_decoder_delegate.h b/chromium/media/gpu/vaapi/vp9_vaapi_video_decoder_delegate.h
index 733fff66d38..e2382c31861 100644
--- a/chromium/media/gpu/vaapi/vp9_vaapi_video_decoder_delegate.h
+++ b/chromium/media/gpu/vaapi/vp9_vaapi_video_decoder_delegate.h
@@ -27,6 +27,11 @@ class VP9VaapiVideoDecoderDelegate : public VP9Decoder::VP9Accelerator,
base::DoNothing(),
CdmContext* cdm_context = nullptr,
EncryptionScheme encryption_scheme = EncryptionScheme::kUnencrypted);
+
+ VP9VaapiVideoDecoderDelegate(const VP9VaapiVideoDecoderDelegate&) = delete;
+ VP9VaapiVideoDecoderDelegate& operator=(const VP9VaapiVideoDecoderDelegate&) =
+ delete;
+
~VP9VaapiVideoDecoderDelegate() override;
// VP9Decoder::VP9Accelerator implementation.
@@ -49,10 +54,7 @@ class VP9VaapiVideoDecoderDelegate : public VP9Decoder::VP9Accelerator,
std::unique_ptr<ScopedVABuffer> picture_params_;
std::unique_ptr<ScopedVABuffer> slice_params_;
std::unique_ptr<ScopedVABuffer> crypto_params_;
- std::unique_ptr<ScopedVABuffer> proc_params_;
std::unique_ptr<ScopedVABuffer> protected_params_;
-
- DISALLOW_COPY_AND_ASSIGN(VP9VaapiVideoDecoderDelegate);
};
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vp9_vaapi_video_encoder_delegate.cc b/chromium/media/gpu/vaapi/vp9_vaapi_video_encoder_delegate.cc
index 8ace81f2a0b..76f43e988c0 100644
--- a/chromium/media/gpu/vaapi/vp9_vaapi_video_encoder_delegate.cc
+++ b/chromium/media/gpu/vaapi/vp9_vaapi_video_encoder_delegate.cc
@@ -209,7 +209,8 @@ bool VP9VaapiVideoEncoderDelegate::Initialize(
const VideoEncodeAccelerator::Config& config,
const VaapiVideoEncoderDelegate::Config& ave_config) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- if (VideoCodecProfileToVideoCodec(config.output_profile) != kCodecVP9) {
+ if (VideoCodecProfileToVideoCodec(config.output_profile) !=
+ VideoCodec::kVP9) {
DVLOGF(1) << "Invalid profile: " << GetProfileName(config.output_profile);
return false;
}
@@ -387,7 +388,7 @@ bool VP9VaapiVideoEncoderDelegate::ApplyPendingUpdateRates() {
if (!pending_update_rates_)
return true;
- VLOGF(2) << "New bitrate: " << pending_update_rates_->first.GetSumBps()
+ VLOGF(2) << "New bitrate: " << pending_update_rates_->first.ToString()
<< ", New framerate: " << pending_update_rates_->second;
current_params_.bitrate_allocation = pending_update_rates_->first;
diff --git a/chromium/media/gpu/vaapi/vp9_vaapi_video_encoder_delegate.h b/chromium/media/gpu/vaapi/vp9_vaapi_video_encoder_delegate.h
index b9c5a77329e..b3606a1713d 100644
--- a/chromium/media/gpu/vaapi/vp9_vaapi_video_encoder_delegate.h
+++ b/chromium/media/gpu/vaapi/vp9_vaapi_video_encoder_delegate.h
@@ -47,6 +47,11 @@ class VP9VaapiVideoEncoderDelegate : public VaapiVideoEncoderDelegate {
VP9VaapiVideoEncoderDelegate(scoped_refptr<VaapiWrapper> vaapi_wrapper,
base::RepeatingClosure error_cb);
+
+ VP9VaapiVideoEncoderDelegate(const VP9VaapiVideoEncoderDelegate&) = delete;
+ VP9VaapiVideoEncoderDelegate& operator=(const VP9VaapiVideoEncoderDelegate&) =
+ delete;
+
~VP9VaapiVideoEncoderDelegate() override;
// VaapiVideoEncoderDelegate implementation.
@@ -106,8 +111,6 @@ class VP9VaapiVideoEncoderDelegate : public VaapiVideoEncoderDelegate {
pending_update_rates_;
std::unique_ptr<VP9RateControl> rate_ctrl_;
-
- DISALLOW_COPY_AND_ASSIGN(VP9VaapiVideoEncoderDelegate);
};
} // namespace media
diff --git a/chromium/media/gpu/video_decode_accelerator_perf_tests.cc b/chromium/media/gpu/video_decode_accelerator_perf_tests.cc
index ccd61068534..c7b14ab3b27 100644
--- a/chromium/media/gpu/video_decode_accelerator_perf_tests.cc
+++ b/chromium/media/gpu/video_decode_accelerator_perf_tests.cc
@@ -30,7 +30,7 @@ namespace {
constexpr const char* usage_msg =
"usage: video_decode_accelerator_perf_tests\n"
" [-v=<level>] [--vmodule=<config>] [--output_folder]\n"
- " ([--use-legacy][--use_vd]|[--use_vd_vda]) [--gtest_help]\n"
+ " ([--use-legacy]|[--use_vd]|[--use_vd_vda]) [--gtest_help]\n"
" [--help] [<video path>] [<video metadata path>]\n";
// Video decoder perf tests help message.
@@ -49,9 +49,8 @@ constexpr const char* help_msg =
" performance metrics, if not specified results\n"
" will be stored in the current working directory.\n"
" --use-legacy use the legacy VDA-based video decoders.\n"
+ " --use_vd use the new VD-based video decoders.\n"
" (enabled by default)\n"
- " --use_vd use the new VD-based video decoders, instead of\n"
- " the default VDA-based video decoders.\n"
" --use_vd_vda use the new VD-based video decoders with a\n"
" wrapper that translates to the VDA interface,\n"
" used to test interaction with older components\n"
@@ -309,8 +308,8 @@ class VideoDecoderTest : public ::testing::Test {
base::TimeDelta frame_duration;
base::TimeDelta vsync_interval_duration;
if (render_frame_rate > 0) {
- frame_duration = base::TimeDelta::FromSeconds(1) / render_frame_rate;
- vsync_interval_duration = base::TimeDelta::FromSeconds(1) / vsync_rate;
+ frame_duration = base::Seconds(1) / render_frame_rate;
+ vsync_interval_duration = base::Seconds(1) / vsync_rate;
}
auto frame_renderer =
FrameRendererDummy::Create(frame_duration, vsync_interval_duration);
@@ -441,7 +440,7 @@ int main(int argc, char** argv) {
bool use_vd = false;
bool use_vd_vda = false;
media::test::DecoderImplementation implementation =
- media::test::DecoderImplementation::kVDA;
+ media::test::DecoderImplementation::kVD;
base::CommandLine::SwitchMap switches = cmd_line->GetSwitches();
for (base::CommandLine::SwitchMap::const_iterator it = switches.begin();
it != switches.end(); ++it) {
diff --git a/chromium/media/gpu/video_decode_accelerator_tests.cc b/chromium/media/gpu/video_decode_accelerator_tests.cc
index 81678b43669..7d37d426d6b 100644
--- a/chromium/media/gpu/video_decode_accelerator_tests.cc
+++ b/chromium/media/gpu/video_decode_accelerator_tests.cc
@@ -4,10 +4,12 @@
#include <limits>
+#include "base/callback_helpers.h"
#include "base/command_line.h"
#include "base/files/file_util.h"
#include "base/numerics/safe_conversions.h"
#include "base/strings/string_number_conversions.h"
+#include "build/build_config.h"
#include "media/base/decoder_buffer.h"
#include "media/base/encryption_scheme.h"
#include "media/base/media_switches.h"
@@ -46,7 +48,7 @@ constexpr const char* usage_msg =
" [--validator_type=(none|md5|ssim)]\n"
" [--output_frames=(all|corrupt)] [--output_format=(png|yuv)]\n"
" [--output_limit=<number>] [--output_folder=<folder>]\n"
- " ([--use-legacy][--use_vd]|[--use_vd_vda]) [--gtest_help]\n"
+ " ([--use-legacy]|[--use_vd]|[--use_vd_vda]) [--gtest_help]\n"
" [--help] [<video path>] [<video metadata path>]\n";
// Video decoder tests help message.
@@ -67,9 +69,8 @@ constexpr const char* help_msg =
" frames, currently allowed for AV1 streams only)\n"
" and none (disable frame validation).\n"
" --use-legacy use the legacy VDA-based video decoders.\n"
+ " --use_vd use the new VD-based video decoders.\n"
" (enabled by default)\n"
- " --use_vd use the new VD-based video decoders, instead of\n"
- " the default VDA-based video decoders.\n"
" --use_vd_vda use the new VD-based video decoders with a\n"
" wrapper that translates to the VDA interface,\n"
" used to test interaction with older components\n"
@@ -173,7 +174,7 @@ class VideoDecoderTest : public ::testing::Test {
// TODO(hiroh): Move this to Video class or video_frame_helpers.h.
// TODO(hiroh): Create model frames once during the test.
bool CreateModelFrames(const Video* video) {
- if (video->Codec() != VideoCodec::kCodecAV1) {
+ if (video->Codec() != VideoCodec::kAV1) {
LOG(ERROR) << "Frame validation by SSIM is allowed for AV1 streams only";
return false;
}
@@ -357,8 +358,8 @@ TEST_F(VideoDecoderTest, ResetBeforeFlushDone) {
// H.264/HEVC video stream. After resetting the video is played until the end.
TEST_F(VideoDecoderTest, ResetAfterFirstConfigInfo) {
// This test is only relevant for H.264/HEVC video streams.
- if (g_env->Video()->Codec() != media::kCodecH264 &&
- g_env->Video()->Codec() != media::kCodecHEVC)
+ if (g_env->Video()->Codec() != media::VideoCodec::kH264 &&
+ g_env->Video()->Codec() != media::VideoCodec::kHEVC)
GTEST_SKIP();
auto tvp = CreateVideoPlayer(g_env->Video());
@@ -496,7 +497,7 @@ int main(int argc, char** argv) {
bool use_vd = false;
bool use_vd_vda = false;
media::test::DecoderImplementation implementation =
- media::test::DecoderImplementation::kVDA;
+ media::test::DecoderImplementation::kVD;
base::CommandLine::SwitchMap switches = cmd_line->GetSwitches();
for (base::CommandLine::SwitchMap::const_iterator it = switches.begin();
it != switches.end(); ++it) {
@@ -589,6 +590,13 @@ int main(int argc, char** argv) {
// video decoder to allow clear HEVC decoding.
cmd_line->AppendSwitch("enable-clear-hevc-for-testing");
+#if defined(ARCH_CPU_ARM_FAMILY)
+ // On some platforms bandwidth compression is fully opaque and can not be
+ // read by the cpu. This prevents MD5 computation as that is done by the
+ // cpu.
+ cmd_line->AppendSwitch("disable-buffer-bw-compression");
+#endif
+
// Set up our test environment.
media::test::VideoPlayerTestEnvironment* test_environment =
media::test::VideoPlayerTestEnvironment::Create(
diff --git a/chromium/media/gpu/video_encode_accelerator_perf_tests.cc b/chromium/media/gpu/video_encode_accelerator_perf_tests.cc
index d431d432e8c..38d3d30ac1d 100644
--- a/chromium/media/gpu/video_encode_accelerator_perf_tests.cc
+++ b/chromium/media/gpu/video_encode_accelerator_perf_tests.cc
@@ -10,6 +10,7 @@
#include "base/command_line.h"
#include "base/files/file_util.h"
#include "base/json/json_writer.h"
+#include "base/strings/string_number_conversions.h"
#include "media/base/bitstream_buffer.h"
#include "media/base/media_util.h"
#include "media/base/test_data_util.h"
@@ -36,7 +37,8 @@ namespace {
constexpr const char* usage_msg =
"usage: video_encode_accelerator_perf_tests\n"
" [--codec=<codec>] [--num_spatial_layers=<number>]\n"
- " [--num_temporal_layers=<number>] [--bitrate=<bitrate>]\n"
+ " [--num_temporal_layers=<number>] [--reverse]\n"
+ " [--bitrate=<bitrate>]\n"
" [-v=<level>] [--vmodule=<config>] [--output_folder]\n"
" [--gtest_help] [--help]\n"
" [<video path>] [<video metadata path>]\n";
@@ -58,6 +60,9 @@ constexpr const char* help_msg =
" --num_temporal_layers the number of temporal layers of the encoded\n"
" bitstream. A default value is 1. Only affected\n"
" if --codec=vp9 currently.\n"
+ " --reverse the stream plays backwards if the stream reaches\n"
+ " end of stream. So the input stream to be encoded\n"
+ " is consecutive. By default this is false.\n"
" --bitrate bitrate (bits in second) of a produced bitstram.\n"
" If not specified, a proper value for the video\n"
" resolution is selected by the test.\n"
@@ -82,7 +87,7 @@ constexpr size_t kMaxSpatialLayers = 3;
// The event timeout used in perf tests because encoding 2160p
// |kNumFramesToEncodeForPerformance| frames take much time.
-constexpr base::TimeDelta kPerfEventTimeout = base::TimeDelta::FromSeconds(180);
+constexpr base::TimeDelta kPerfEventTimeout = base::Seconds(180);
// Default output folder used to store performance metrics.
constexpr const base::FilePath::CharType* kDefaultOutputFolder =
@@ -468,13 +473,13 @@ class VideoEncoderTest : public ::testing::Test {
LOG_ASSERT(!bitstream_processors.empty())
<< "Failed to create bitstream processors";
- VideoEncoderClientConfig config(video, profile, spatial_layers, bitrate);
+ VideoEncoderClientConfig config(video, profile, spatial_layers, bitrate,
+ g_env->Reverse());
config.input_storage_type =
VideoEncodeAccelerator::Config::StorageType::kGpuMemoryBuffer;
config.num_frames_to_encode = kNumFramesToEncodeForPerformance;
if (encode_rate) {
- config.encode_interval =
- base::TimeDelta::FromSeconds(1u) / encode_rate.value();
+ config.encode_interval = base::Seconds(1u) / encode_rate.value();
}
auto video_encoder =
@@ -538,7 +543,7 @@ class VideoEncoderTest : public ::testing::Test {
spatial_layers) {
std::vector<std::unique_ptr<BitstreamProcessor>> bitstream_processors;
- raw_data_helper_ = RawDataHelper::Create(video);
+ raw_data_helper_ = RawDataHelper::Create(video, g_env->Reverse());
if (!raw_data_helper_) {
LOG(ERROR) << "Failed to create raw data helper";
return bitstream_processors;
@@ -575,8 +580,7 @@ class VideoEncoderTest : public ::testing::Test {
scoped_refptr<const VideoFrame> GetModelFrame(const gfx::Rect& visible_rect,
size_t frame_index) {
LOG_ASSERT(raw_data_helper_);
- auto frame =
- raw_data_helper_->GetFrame(frame_index % g_env->Video()->NumFrames());
+ auto frame = raw_data_helper_->GetFrame(frame_index);
if (!frame)
return nullptr;
if (visible_rect.size() == frame->visible_rect().size())
@@ -673,6 +677,7 @@ int main(int argc, char** argv) {
std::string codec = "h264";
size_t num_spatial_layers = 1u;
size_t num_temporal_layers = 1u;
+ bool reverse = false;
absl::optional<uint32_t> encode_bitrate;
// Parse command line arguments.
@@ -710,6 +715,8 @@ int main(int argc, char** argv) {
<< "\n";
return EXIT_FAILURE;
}
+ } else if (it->first == "reverse") {
+ reverse = true;
} else if (it->first == "bitrate") {
unsigned value;
if (!base::StringToUint(it->second, &value)) {
@@ -732,7 +739,7 @@ int main(int argc, char** argv) {
media::test::VideoEncoderTestEnvironment::Create(
video_path, video_metadata_path, false, base::FilePath(output_folder),
codec, num_temporal_layers, num_spatial_layers,
- false /* output_bitstream */, encode_bitrate);
+ false /* output_bitstream */, encode_bitrate, reverse);
if (!test_environment)
return EXIT_FAILURE;
diff --git a/chromium/media/gpu/video_encode_accelerator_tests.cc b/chromium/media/gpu/video_encode_accelerator_tests.cc
index 7f9854d775b..f71209d95a5 100644
--- a/chromium/media/gpu/video_encode_accelerator_tests.cc
+++ b/chromium/media/gpu/video_encode_accelerator_tests.cc
@@ -41,7 +41,7 @@ namespace {
constexpr const char* usage_msg =
"usage: video_encode_accelerator_tests\n"
" [--codec=<codec>] [--num_temporal_layers=<number>]\n"
- " [--num_spatial_layers=<number>]\n"
+ " [--num_spatial_layers=<number>] [--reverse]\n"
" [--disable_validator] [--output_bitstream]\n"
" [--output_images=(all|corrupt)] [--output_format=(png|yuv)]\n"
" [--output_folder=<filepath>] [--output_limit=<number>]\n"
@@ -67,6 +67,9 @@ constexpr const char* help_msg =
" bitstream. Only used in --codec=vp9 currently.\n"
" Spatial SVC encoding is applied only in\n"
" NV12Dmabuf test cases.\n"
+ " --reverse the stream plays backwards if the stream reaches\n"
+ " end of stream. So the input stream to be encoded\n"
+ " is consecutive. By default this is false. \n"
" --disable_validator disable validation of encoded bitstream.\n"
" --output_bitstream save the output bitstream in either H264 AnnexB\n"
" format (for H264) or IVF format (for vp8 and\n"
@@ -98,8 +101,7 @@ constexpr size_t kNumFramesToEncodeForBitrateCheck = 300;
constexpr double kBitrateTolerance = 0.1;
// The event timeout used in bitrate check tests because encoding 2160p and
// validating |kNumFramesToEncodeBitrateCheck| frames take much time.
-constexpr base::TimeDelta kBitrateCheckEventTimeout =
- base::TimeDelta::FromSeconds(180);
+constexpr base::TimeDelta kBitrateCheckEventTimeout = base::Seconds(180);
media::test::VideoEncoderTestEnvironment* g_env;
@@ -113,7 +115,8 @@ class VideoEncoderTest : public ::testing::Test {
CHECK_LE(spatial_layers.size(), 1u);
return VideoEncoderClientConfig(g_env->Video(), g_env->Profile(),
- spatial_layers, g_env->Bitrate());
+ spatial_layers, g_env->Bitrate(),
+ g_env->Reverse());
}
std::unique_ptr<VideoEncoder> CreateVideoEncoder(
@@ -206,8 +209,8 @@ class VideoEncoderTest : public ::testing::Test {
VideoCodecProfileToVideoCodec(config.output_profile);
if (g_env->SaveOutputBitstream()) {
base::FilePath::StringPieceType extension =
- codec == VideoCodec::kCodecH264 ? FILE_PATH_LITERAL("h264")
- : FILE_PATH_LITERAL("ivf");
+ codec == VideoCodec::kH264 ? FILE_PATH_LITERAL("h264")
+ : FILE_PATH_LITERAL("ivf");
auto output_bitstream_filepath =
g_env->OutputFolder()
.Append(g_env->GetTestOutputFilePath())
@@ -248,14 +251,14 @@ class VideoEncoderTest : public ::testing::Test {
}
switch (codec) {
- case kCodecH264:
+ case VideoCodec::kH264:
bitstream_processors.emplace_back(new H264Validator(
config.output_profile, visible_rect, config.num_temporal_layers));
break;
- case kCodecVP8:
+ case VideoCodec::kVP8:
bitstream_processors.emplace_back(new VP8Validator(visible_rect));
break;
- case kCodecVP9:
+ case VideoCodec::kVP9:
bitstream_processors.emplace_back(new VP9Validator(
config.output_profile, visible_rect, config.num_spatial_layers,
config.num_temporal_layers));
@@ -266,7 +269,7 @@ class VideoEncoderTest : public ::testing::Test {
break;
}
- raw_data_helper_ = RawDataHelper::Create(video);
+ raw_data_helper_ = RawDataHelper::Create(video, g_env->Reverse());
if (!raw_data_helper_) {
LOG(ERROR) << "Failed to create raw data helper";
return bitstream_processors;
@@ -324,8 +327,7 @@ class VideoEncoderTest : public ::testing::Test {
scoped_refptr<const VideoFrame> GetModelFrame(const gfx::Rect& visible_rect,
size_t frame_index) {
LOG_ASSERT(raw_data_helper_);
- auto frame =
- raw_data_helper_->GetFrame(frame_index % g_env->Video()->NumFrames());
+ auto frame = raw_data_helper_->GetFrame(frame_index);
if (!frame)
return nullptr;
if (visible_rect.size() == frame->visible_rect().size())
@@ -584,7 +586,8 @@ TEST_F(VideoEncoderTest, FlushAtEndOfStream_NV12Dmabuf) {
Video* nv12_video = g_env->GenerateNV12Video();
VideoEncoderClientConfig config(nv12_video, g_env->Profile(),
- g_env->SpatialLayers(), g_env->Bitrate());
+ g_env->SpatialLayers(), g_env->Bitrate(),
+ g_env->Reverse());
config.input_storage_type =
VideoEncodeAccelerator::Config::StorageType::kGpuMemoryBuffer;
@@ -631,7 +634,7 @@ TEST_F(VideoEncoderTest, FlushAtEndOfStream_NV12DmabufScaling) {
}
VideoEncoderClientConfig config(
nv12_video, g_env->Profile(), spatial_layers,
- g_env->GetDefaultVideoBitrateAllocation(new_bitrate));
+ g_env->GetDefaultVideoBitrateAllocation(new_bitrate), g_env->Reverse());
config.output_resolution = output_resolution;
config.input_storage_type =
VideoEncodeAccelerator::Config::StorageType::kGpuMemoryBuffer;
@@ -672,7 +675,8 @@ TEST_F(VideoEncoderTest, FlushAtEndOfStream_NV12DmabufCroppingTopAndBottom) {
expanded_resolution, expanded_visible_rect);
ASSERT_TRUE(nv12_expanded_video);
VideoEncoderClientConfig config(nv12_expanded_video.get(), g_env->Profile(),
- g_env->SpatialLayers(), g_env->Bitrate());
+ g_env->SpatialLayers(), g_env->Bitrate(),
+ g_env->Reverse());
config.output_resolution = original_resolution;
config.input_storage_type =
VideoEncodeAccelerator::Config::StorageType::kGpuMemoryBuffer;
@@ -713,7 +717,8 @@ TEST_F(VideoEncoderTest, FlushAtEndOfStream_NV12DmabufCroppingRightAndLeft) {
expanded_resolution, expanded_visible_rect);
ASSERT_TRUE(nv12_expanded_video);
VideoEncoderClientConfig config(nv12_expanded_video.get(), g_env->Profile(),
- g_env->SpatialLayers(), g_env->Bitrate());
+ g_env->SpatialLayers(), g_env->Bitrate(),
+ g_env->Reverse());
config.output_resolution = original_resolution;
config.input_storage_type =
VideoEncodeAccelerator::Config::StorageType::kGpuMemoryBuffer;
@@ -773,7 +778,8 @@ TEST_F(VideoEncoderTest, DeactivateAndActivateSpatialLayers) {
bitrate_allocations.emplace_back(bitrate_allocation);
VideoEncoderClientConfig config(nv12_video, g_env->Profile(),
- g_env->SpatialLayers(), g_env->Bitrate());
+ g_env->SpatialLayers(), g_env->Bitrate(),
+ g_env->Reverse());
config.input_storage_type =
VideoEncodeAccelerator::Config::StorageType::kGpuMemoryBuffer;
std::vector<size_t> num_frames_to_encode(bitrate_allocations.size());
@@ -825,6 +831,7 @@ int main(int argc, char** argv) {
size_t num_temporal_layers = 1u;
size_t num_spatial_layers = 1u;
bool output_bitstream = false;
+ bool reverse = false;
media::test::FrameOutputConfig frame_output_config;
base::FilePath output_folder =
base::FilePath(base::FilePath::kCurrentDirectory);
@@ -856,6 +863,8 @@ int main(int argc, char** argv) {
enable_bitstream_validator = false;
} else if (it->first == "output_bitstream") {
output_bitstream = true;
+ } else if (it->first == "reverse") {
+ reverse = true;
} else if (it->first == "output_images") {
if (it->second == "all") {
frame_output_config.output_mode = media::test::FrameOutputMode::kAll;
@@ -902,7 +911,7 @@ int main(int argc, char** argv) {
video_path, video_metadata_path, enable_bitstream_validator,
output_folder, codec, num_temporal_layers, num_spatial_layers,
output_bitstream,
- /*output_bitrate=*/absl::nullopt, frame_output_config);
+ /*output_bitrate=*/absl::nullopt, reverse, frame_output_config);
if (!test_environment)
return EXIT_FAILURE;
diff --git a/chromium/media/gpu/video_frame_mapper.h b/chromium/media/gpu/video_frame_mapper.h
index 9dcffc730e6..bd565409db5 100644
--- a/chromium/media/gpu/video_frame_mapper.h
+++ b/chromium/media/gpu/video_frame_mapper.h
@@ -18,6 +18,9 @@ namespace media {
// VideoFrameMapper should be created by using VideoFrameMapperFactory.
class MEDIA_GPU_EXPORT VideoFrameMapper {
public:
+ VideoFrameMapper(const VideoFrameMapper&) = delete;
+ VideoFrameMapper& operator=(const VideoFrameMapper&) = delete;
+
virtual ~VideoFrameMapper() = default;
// Maps data referred by |video_frame| and creates a VideoFrame whose dtor
@@ -33,8 +36,6 @@ class MEDIA_GPU_EXPORT VideoFrameMapper {
// The allowed pixel format of video frames on Map().
VideoPixelFormat format_;
-
- DISALLOW_COPY_AND_ASSIGN(VideoFrameMapper);
};
} // namespace media
diff --git a/chromium/media/gpu/vp8_decoder.h b/chromium/media/gpu/vp8_decoder.h
index 193972faba4..230db5372af 100644
--- a/chromium/media/gpu/vp8_decoder.h
+++ b/chromium/media/gpu/vp8_decoder.h
@@ -30,6 +30,10 @@ class MEDIA_GPU_EXPORT VP8Decoder : public AcceleratedVideoDecoder {
class MEDIA_GPU_EXPORT VP8Accelerator {
public:
VP8Accelerator();
+
+ VP8Accelerator(const VP8Accelerator&) = delete;
+ VP8Accelerator& operator=(const VP8Accelerator&) = delete;
+
virtual ~VP8Accelerator();
// Create a new VP8Picture that the decoder client can use for decoding
@@ -54,12 +58,13 @@ class MEDIA_GPU_EXPORT VP8Decoder : public AcceleratedVideoDecoder {
// to |pic| after calling this method.
// Return true if successful.
virtual bool OutputPicture(scoped_refptr<VP8Picture> pic) = 0;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(VP8Accelerator);
};
explicit VP8Decoder(std::unique_ptr<VP8Accelerator> accelerator);
+
+ VP8Decoder(const VP8Decoder&) = delete;
+ VP8Decoder& operator=(const VP8Decoder&) = delete;
+
~VP8Decoder() override;
// AcceleratedVideoDecoder implementation.
@@ -105,8 +110,6 @@ class MEDIA_GPU_EXPORT VP8Decoder : public AcceleratedVideoDecoder {
int vertical_scale_;
const std::unique_ptr<VP8Accelerator> accelerator_;
-
- DISALLOW_COPY_AND_ASSIGN(VP8Decoder);
};
} // namespace media
diff --git a/chromium/media/gpu/vp8_reference_frame_vector.h b/chromium/media/gpu/vp8_reference_frame_vector.h
index 6f0604fe4b6..8e0e2e8a0e0 100644
--- a/chromium/media/gpu/vp8_reference_frame_vector.h
+++ b/chromium/media/gpu/vp8_reference_frame_vector.h
@@ -18,6 +18,10 @@ class VP8Picture;
class Vp8ReferenceFrameVector {
public:
Vp8ReferenceFrameVector();
+
+ Vp8ReferenceFrameVector(const Vp8ReferenceFrameVector&) = delete;
+ Vp8ReferenceFrameVector& operator=(const Vp8ReferenceFrameVector&) = delete;
+
~Vp8ReferenceFrameVector();
void Refresh(scoped_refptr<VP8Picture> pic);
@@ -30,7 +34,6 @@ class Vp8ReferenceFrameVector {
reference_frames_;
SEQUENCE_CHECKER(sequence_checker_);
- DISALLOW_COPY_AND_ASSIGN(Vp8ReferenceFrameVector);
};
} // namespace media
diff --git a/chromium/media/gpu/vp9_decoder.cc b/chromium/media/gpu/vp9_decoder.cc
index 58a7aa0c8f3..d8870fc88cb 100644
--- a/chromium/media/gpu/vp9_decoder.cc
+++ b/chromium/media/gpu/vp9_decoder.cc
@@ -37,8 +37,9 @@ std::vector<uint32_t> GetSpatialLayerFrameSize(
return {};
}
return std::vector<uint32_t>(cue_data, cue_data + num_of_layers);
-#endif // defined(ARCH_CPU_X86_FAMILY) && BUILDFLAG(IS_CHROMEOS_ASH)
+#else
return {};
+#endif // defined(ARCH_CPU_X86_FAMILY) && BUILDFLAG(IS_CHROMEOS_ASH)
}
VideoCodecProfile VP9ProfileToVideoCodecProfile(uint8_t profile) {
diff --git a/chromium/media/gpu/vp9_decoder.h b/chromium/media/gpu/vp9_decoder.h
index 2698e9893c7..534f8c30514 100644
--- a/chromium/media/gpu/vp9_decoder.h
+++ b/chromium/media/gpu/vp9_decoder.h
@@ -52,6 +52,10 @@ class MEDIA_GPU_EXPORT VP9Decoder : public AcceleratedVideoDecoder {
kTryAgain,
};
VP9Accelerator();
+
+ VP9Accelerator(const VP9Accelerator&) = delete;
+ VP9Accelerator& operator=(const VP9Accelerator&) = delete;
+
virtual ~VP9Accelerator();
// Create a new VP9Picture that the decoder client can use for initial
@@ -106,15 +110,16 @@ class MEDIA_GPU_EXPORT VP9Decoder : public AcceleratedVideoDecoder {
// success, false otherwise.
virtual bool GetFrameContext(scoped_refptr<VP9Picture> pic,
Vp9FrameContext* frame_ctx) = 0;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(VP9Accelerator);
};
explicit VP9Decoder(
std::unique_ptr<VP9Accelerator> accelerator,
VideoCodecProfile profile,
const VideoColorSpace& container_color_space = VideoColorSpace());
+
+ VP9Decoder(const VP9Decoder&) = delete;
+ VP9Decoder& operator=(const VP9Decoder&) = delete;
+
~VP9Decoder() override;
// AcceleratedVideoDecoder implementation.
@@ -187,8 +192,6 @@ class MEDIA_GPU_EXPORT VP9Decoder : public AcceleratedVideoDecoder {
const std::unique_ptr<VP9Accelerator> accelerator_;
Vp9Parser parser_;
-
- DISALLOW_COPY_AND_ASSIGN(VP9Decoder);
};
} // namespace media
diff --git a/chromium/media/gpu/vp9_reference_frame_vector.h b/chromium/media/gpu/vp9_reference_frame_vector.h
index bf91596b320..4685e584305 100644
--- a/chromium/media/gpu/vp9_reference_frame_vector.h
+++ b/chromium/media/gpu/vp9_reference_frame_vector.h
@@ -20,6 +20,10 @@ class VP9Picture;
class Vp9ReferenceFrameVector {
public:
Vp9ReferenceFrameVector();
+
+ Vp9ReferenceFrameVector(const Vp9ReferenceFrameVector&) = delete;
+ Vp9ReferenceFrameVector& operator=(const Vp9ReferenceFrameVector&) = delete;
+
~Vp9ReferenceFrameVector();
void Refresh(scoped_refptr<VP9Picture> pic);
@@ -31,7 +35,6 @@ class Vp9ReferenceFrameVector {
std::array<scoped_refptr<VP9Picture>, kVp9NumRefFrames> reference_frames_;
SEQUENCE_CHECKER(sequence_checker_);
- DISALLOW_COPY_AND_ASSIGN(Vp9ReferenceFrameVector);
};
} // namespace media
diff --git a/chromium/media/gpu/windows/d3d11_av1_accelerator.cc b/chromium/media/gpu/windows/d3d11_av1_accelerator.cc
index 401f5917c0b..4c9e1dc1c64 100644
--- a/chromium/media/gpu/windows/d3d11_av1_accelerator.cc
+++ b/chromium/media/gpu/windows/d3d11_av1_accelerator.cc
@@ -397,6 +397,11 @@ D3D11AV1Accelerator::D3D11AV1Accelerator(
D3D11AV1Accelerator::~D3D11AV1Accelerator() {}
void D3D11AV1Accelerator::RecordFailure(const std::string& fail_type,
+ media::Status error) {
+ RecordFailure(fail_type, error.message(), error.code());
+}
+
+void D3D11AV1Accelerator::RecordFailure(const std::string& fail_type,
const std::string& message,
StatusCode reason) {
MEDIA_LOG(ERROR, media_log_)
@@ -502,9 +507,16 @@ DecodeStatus D3D11AV1Accelerator::SubmitDecode(
base::span<const uint8_t> data) {
const D3D11AV1Picture* pic_ptr = static_cast<const D3D11AV1Picture*>(&pic);
do {
- const auto hr = video_context_->DecoderBeginFrame(
- video_decoder_.Get(), pic_ptr->picture_buffer()->output_view().Get(), 0,
- nullptr);
+ ID3D11VideoDecoderOutputView* output_view = nullptr;
+ auto result = pic_ptr->picture_buffer()->AcquireOutputView();
+ if (result.has_value()) {
+ output_view = std::move(result).value();
+ } else {
+ RecordFailure("AcquireOutputView", std::move(result).error());
+ return DecodeStatus::kFail;
+ }
+ const auto hr = video_context_->DecoderBeginFrame(video_decoder_.Get(),
+ output_view, 0, nullptr);
if (SUCCEEDED(hr)) {
break;
} else if (hr == E_PENDING || hr == D3DERR_WASSTILLDRAWING) {
diff --git a/chromium/media/gpu/windows/d3d11_av1_accelerator.h b/chromium/media/gpu/windows/d3d11_av1_accelerator.h
index 34854acbe58..90d979e9d99 100644
--- a/chromium/media/gpu/windows/d3d11_av1_accelerator.h
+++ b/chromium/media/gpu/windows/d3d11_av1_accelerator.h
@@ -30,6 +30,10 @@ class D3D11AV1Accelerator : public AV1Decoder::AV1Accelerator {
MediaLog* media_log,
ComD3D11VideoDevice video_device,
std::unique_ptr<VideoContextWrapper> video_context);
+
+ D3D11AV1Accelerator(const D3D11AV1Accelerator&) = delete;
+ D3D11AV1Accelerator& operator=(const D3D11AV1Accelerator&) = delete;
+
~D3D11AV1Accelerator() override;
scoped_refptr<AV1Picture> CreateAV1Picture(bool apply_grain) override;
@@ -49,6 +53,8 @@ class D3D11AV1Accelerator : public AV1Decoder::AV1Accelerator {
bool SubmitDecoderBuffer(
const DXVA_PicParams_AV1& pic_params,
const libgav1::Vector<libgav1::TileBuffer>& tile_buffers);
+
+ void RecordFailure(const std::string& fail_type, media::Status error);
void RecordFailure(const std::string& fail_type,
const std::string& message,
StatusCode reason);
@@ -65,8 +71,6 @@ class D3D11AV1Accelerator : public AV1Decoder::AV1Accelerator {
ComD3D11VideoDecoder video_decoder_;
ComD3D11VideoDevice video_device_;
std::unique_ptr<VideoContextWrapper> video_context_;
-
- DISALLOW_COPY_AND_ASSIGN(D3D11AV1Accelerator);
};
} // namespace media
diff --git a/chromium/media/gpu/windows/d3d11_copying_texture_wrapper.cc b/chromium/media/gpu/windows/d3d11_copying_texture_wrapper.cc
index 8d0a4c2cc15..a73cc2828ef 100644
--- a/chromium/media/gpu/windows/d3d11_copying_texture_wrapper.cc
+++ b/chromium/media/gpu/windows/d3d11_copying_texture_wrapper.cc
@@ -29,10 +29,22 @@ CopyingTexture2DWrapper::CopyingTexture2DWrapper(
CopyingTexture2DWrapper::~CopyingTexture2DWrapper() = default;
+// Copy path doesn't need to acquire keyed mutex until calling
+// VideoProcessorBlt.
+Status CopyingTexture2DWrapper::AcquireKeyedMutexIfNeeded() {
+ return OkStatus();
+}
+
Status CopyingTexture2DWrapper::ProcessTexture(
const gfx::ColorSpace& input_color_space,
MailboxHolderArray* mailbox_dest,
gfx::ColorSpace* output_color_space) {
+ // Acquire keyed mutex for VideoProcessorBlt ops.
+ Status status = output_texture_wrapper_->AcquireKeyedMutexIfNeeded();
+ if (!status.is_ok()) {
+ return status;
+ }
+
D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC output_view_desc = {
D3D11_VPOV_DIMENSION_TEXTURE2D};
output_view_desc.Texture2D.MipSlice = 0;
@@ -101,9 +113,9 @@ Status CopyingTexture2DWrapper::Init(
texture_ = texture;
array_slice_ = array_slice;
- return output_texture_wrapper_->Init(std::move(gpu_task_runner),
- std::move(get_helper_cb),
- output_texture_, /*array_slice=*/0);
+ return output_texture_wrapper_->Init(
+ std::move(gpu_task_runner), std::move(get_helper_cb), output_texture_,
+ /*array_slice=*/0);
}
void CopyingTexture2DWrapper::SetStreamHDRMetadata(
diff --git a/chromium/media/gpu/windows/d3d11_copying_texture_wrapper.h b/chromium/media/gpu/windows/d3d11_copying_texture_wrapper.h
index 31c6a40570b..a533af7044e 100644
--- a/chromium/media/gpu/windows/d3d11_copying_texture_wrapper.h
+++ b/chromium/media/gpu/windows/d3d11_copying_texture_wrapper.h
@@ -31,6 +31,8 @@ class MEDIA_GPU_EXPORT CopyingTexture2DWrapper : public Texture2DWrapper {
absl::optional<gfx::ColorSpace> output_color_space);
~CopyingTexture2DWrapper() override;
+ Status AcquireKeyedMutexIfNeeded() override;
+
Status ProcessTexture(const gfx::ColorSpace& input_color_space,
MailboxHolderArray* mailbox_dest,
gfx::ColorSpace* output_color_space) override;
diff --git a/chromium/media/gpu/windows/d3d11_copying_texture_wrapper_unittest.cc b/chromium/media/gpu/windows/d3d11_copying_texture_wrapper_unittest.cc
index ca25feba566..83a70411116 100644
--- a/chromium/media/gpu/windows/d3d11_copying_texture_wrapper_unittest.cc
+++ b/chromium/media/gpu/windows/d3d11_copying_texture_wrapper_unittest.cc
@@ -106,7 +106,12 @@ class MockTexture2DWrapper : public Texture2DWrapper {
return MockInit();
}
+ Status AcquireKeyedMutexIfNeeded() override {
+ return MockAcquireKeyedMutexIfNeeded();
+ }
+
MOCK_METHOD0(MockInit, Status());
+ MOCK_METHOD0(MockAcquireKeyedMutexIfNeeded, Status());
MOCK_METHOD0(MockProcessTexture, Status());
MOCK_METHOD1(SetStreamHDRMetadata,
void(const gfx::HDRMetadata& stream_metadata));
@@ -122,7 +127,7 @@ CommandBufferHelperPtr UselessHelper() {
class D3D11CopyingTexture2DWrapperTest
: public ::testing::TestWithParam<
- std::tuple<HRESULT, HRESULT, HRESULT, bool, bool, bool, bool>> {
+ std::tuple<HRESULT, HRESULT, HRESULT, bool, bool, bool, bool, bool>> {
public:
#define FIELD(TYPE, NAME, INDEX) \
TYPE Get##NAME() { return std::get<INDEX>(GetParam()); }
@@ -133,6 +138,7 @@ class D3D11CopyingTexture2DWrapperTest
FIELD(bool, TextureWrapperInit, 4)
FIELD(bool, ProcessTexture, 5)
FIELD(bool, PassthroughColorSpace, 6)
+ FIELD(bool, AcquireKeyedMutexIfNeeded, 7)
#undef FIELD
void SetUp() override {
@@ -166,6 +172,11 @@ class D3D11CopyingTexture2DWrapperTest
? StatusCode::kOk
: StatusCode::kCodeOnlyForTesting));
+ ON_CALL(*result.get(), MockAcquireKeyedMutexIfNeeded())
+ .WillByDefault(Return(GetAcquireKeyedMutexIfNeeded()
+ ? StatusCode::kOk
+ : StatusCode::kCodeOnlyForTesting));
+
ON_CALL(*result.get(), MockProcessTexture())
.WillByDefault(Return(GetProcessTexture()
? StatusCode::kOk
@@ -183,7 +194,7 @@ class D3D11CopyingTexture2DWrapperTest
}
bool ProcessTextureSucceeds() {
- return GetProcessTexture() &&
+ return GetAcquireKeyedMutexIfNeeded() && GetProcessTexture() &&
SUCCEEDED(GetCreateVideoProcessorOutputView()) &&
SUCCEEDED(GetCreateVideoProcessorInputView()) &&
SUCCEEDED(GetVideoProcessorBlt());
@@ -201,6 +212,7 @@ INSTANTIATE_TEST_CASE_P(CopyingTexture2DWrapperTest,
Bool(),
Bool(),
Bool(),
+ Bool(),
Bool()));
// For ever potential return value combination for the D3D11VideoProcessor,
diff --git a/chromium/media/gpu/windows/d3d11_decoder_configurator.cc b/chromium/media/gpu/windows/d3d11_decoder_configurator.cc
index 59f0823ca5a..2644f13272e 100644
--- a/chromium/media/gpu/windows/d3d11_decoder_configurator.cc
+++ b/chromium/media/gpu/windows/d3d11_decoder_configurator.cc
@@ -27,9 +27,12 @@ D3D11DecoderConfigurator::D3D11DecoderConfigurator(
gfx::Size coded_size,
bool is_encrypted,
bool supports_swap_chain)
- : dxgi_format_(decoder_output_dxgifmt), decoder_guid_(decoder_guid) {
+ : dxgi_format_(decoder_output_dxgifmt),
+ decoder_guid_(decoder_guid),
+ supports_swap_chain_(supports_swap_chain),
+ is_encrypted_(is_encrypted) {
SetUpDecoderDescriptor(coded_size);
- SetUpTextureDescriptor(supports_swap_chain, is_encrypted);
+ SetUpTextureDescriptor();
}
// static
@@ -38,13 +41,18 @@ std::unique_ptr<D3D11DecoderConfigurator> D3D11DecoderConfigurator::Create(
const gpu::GpuDriverBugWorkarounds& workarounds,
const VideoDecoderConfig& config,
uint8_t bit_depth,
- MediaLog* media_log) {
+ MediaLog* media_log,
+ bool use_shared_handle) {
+ // Decoder swap chains do not support shared resources. More info in
+ // https://crbug.com/911847. To enable Kaby Lake+ systems for using shared
+ // handle, we disable decode swap chain support if shared handle is enabled.
const bool supports_nv12_decode_swap_chain =
- gl::DirectCompositionSurfaceWin::IsDecodeSwapChainSupported();
+ gl::DirectCompositionSurfaceWin::IsDecodeSwapChainSupported() &&
+ !use_shared_handle;
const auto decoder_dxgi_format =
bit_depth == 8 ? DXGI_FORMAT_NV12 : DXGI_FORMAT_P010;
GUID decoder_guid = {};
- if (config.codec() == kCodecH264) {
+ if (config.codec() == VideoCodec::kH264) {
decoder_guid = D3D11_DECODER_PROFILE_H264_VLD_NOFGT;
} else if (config.profile() == VP9PROFILE_PROFILE0) {
decoder_guid = D3D11_DECODER_PROFILE_VP9_VLD_PROFILE0;
@@ -86,11 +94,37 @@ bool D3D11DecoderConfigurator::SupportsDevice(
StatusOr<ComD3D11Texture2D> D3D11DecoderConfigurator::CreateOutputTexture(
ComD3D11Device device,
gfx::Size size,
- uint32_t array_size) {
+ uint32_t array_size,
+ bool use_shared_handle) {
output_texture_desc_.Width = size.width();
output_texture_desc_.Height = size.height();
output_texture_desc_.ArraySize = array_size;
+ if (use_shared_handle) {
+ // Update the decoder output texture usage to support shared handle and
+ // keyed_mutex if required. SwapChain should be disabled and the frame
+ // shouldn't be encrypted.
+ DCHECK(!supports_swap_chain_);
+ DCHECK(!is_encrypted_);
+ output_texture_desc_.MiscFlags = D3D11_RESOURCE_MISC_SHARED_NTHANDLE |
+ D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX;
+ } else if (supports_swap_chain_) {
+ // Decode swap chains do not support shared resources.
+ // TODO(sunnyps): Find a workaround for when the decoder moves to its own
+ // thread and D3D device. See https://crbug.com/911847
+ // TODO(liberato): This depends on the configuration of the TextureSelector,
+ // to some degree. We should unset the flag only if it's binding and the
+ // decode swap chain is supported, as Intel driver is buggy on Gen9 and
+ // older devices without the flag. See https://crbug.com/1107403
+ output_texture_desc_.MiscFlags = 0;
+ } else {
+ // Create non-shareable texture for d3d11 video decoder.
+ output_texture_desc_.MiscFlags = D3D11_RESOURCE_MISC_SHARED;
+ }
+
+ if (is_encrypted_)
+ output_texture_desc_.MiscFlags |= D3D11_RESOURCE_MISC_HW_PROTECTED;
+
ComD3D11Texture2D texture;
HRESULT hr =
device->CreateTexture2D(&output_texture_desc_, nullptr, &texture);
@@ -117,8 +151,7 @@ void D3D11DecoderConfigurator::SetUpDecoderDescriptor(
}
// private
-void D3D11DecoderConfigurator::SetUpTextureDescriptor(bool supports_swap_chain,
- bool is_encrypted) {
+void D3D11DecoderConfigurator::SetUpTextureDescriptor() {
output_texture_desc_ = {};
output_texture_desc_.MipLevels = 1;
output_texture_desc_.Format = dxgi_format_;
@@ -126,19 +159,6 @@ void D3D11DecoderConfigurator::SetUpTextureDescriptor(bool supports_swap_chain,
output_texture_desc_.Usage = D3D11_USAGE_DEFAULT;
output_texture_desc_.BindFlags =
D3D11_BIND_DECODER | D3D11_BIND_SHADER_RESOURCE;
-
- // Decode swap chains do not support shared resources.
- // TODO(sunnyps): Find a workaround for when the decoder moves to its own
- // thread and D3D device. See https://crbug.com/911847
- // TODO(liberato): This depends on the configuration of the TextureSelector,
- // to some degree. We should unset the flag only if it's binding and the
- // decode swap chain is supported, as Intel driver is buggy on Gen9 and older
- // devices without the flag. See https://crbug.com/1107403
- output_texture_desc_.MiscFlags =
- supports_swap_chain ? 0 : D3D11_RESOURCE_MISC_SHARED;
-
- if (is_encrypted)
- output_texture_desc_.MiscFlags |= D3D11_RESOURCE_MISC_HW_PROTECTED;
}
} // namespace media
diff --git a/chromium/media/gpu/windows/d3d11_decoder_configurator.h b/chromium/media/gpu/windows/d3d11_decoder_configurator.h
index beb99ecfb97..654d438778b 100644
--- a/chromium/media/gpu/windows/d3d11_decoder_configurator.h
+++ b/chromium/media/gpu/windows/d3d11_decoder_configurator.h
@@ -37,14 +37,16 @@ class MEDIA_GPU_EXPORT D3D11DecoderConfigurator {
const gpu::GpuDriverBugWorkarounds& workarounds,
const VideoDecoderConfig& config,
uint8_t bit_depth,
- MediaLog* media_log);
+ MediaLog* media_log,
+ bool use_shared_handle);
bool SupportsDevice(ComD3D11VideoDevice video_device);
// Create the decoder's output texture.
StatusOr<ComD3D11Texture2D> CreateOutputTexture(ComD3D11Device device,
gfx::Size size,
- uint32_t array_size);
+ uint32_t array_size,
+ bool use_shared_handle);
const D3D11_VIDEO_DECODER_DESC* DecoderDescriptor() const {
return &decoder_desc_;
@@ -57,13 +59,16 @@ class MEDIA_GPU_EXPORT D3D11DecoderConfigurator {
private:
// Set up instances of the parameter structs for D3D11 Functions
void SetUpDecoderDescriptor(const gfx::Size& coded_size);
- void SetUpTextureDescriptor(bool supports_swap_chain, bool is_encrypted);
+ void SetUpTextureDescriptor();
D3D11_TEXTURE2D_DESC output_texture_desc_;
D3D11_VIDEO_DECODER_DESC decoder_desc_;
const DXGI_FORMAT dxgi_format_;
const GUID decoder_guid_;
+
+ const bool supports_swap_chain_;
+ const bool is_encrypted_;
};
} // namespace media
diff --git a/chromium/media/gpu/windows/d3d11_decoder_configurator_unittest.cc b/chromium/media/gpu/windows/d3d11_decoder_configurator_unittest.cc
index 4bc4976933e..bf56ca1507e 100644
--- a/chromium/media/gpu/windows/d3d11_decoder_configurator_unittest.cc
+++ b/chromium/media/gpu/windows/d3d11_decoder_configurator_unittest.cc
@@ -27,7 +27,8 @@ class D3D11DecoderConfiguratorUnittest : public ::testing::Test {
bool encrypted) {
VideoDecoderConfig result;
result.Initialize(
- kUnknownVideoCodec, // It doesn't matter because it won't be used.
+ VideoCodec::kUnknown, // It doesn't matter because it won't
+ // be used.
profile, VideoDecoderConfig::AlphaMode::kIsOpaque, VideoColorSpace(),
kNoTransformation, size, {}, {}, {},
encrypted ? EncryptionScheme::kCenc : EncryptionScheme::kUnencrypted);
@@ -44,7 +45,8 @@ class D3D11DecoderConfiguratorUnittest : public ::testing::Test {
workarounds.disable_dxgi_zero_copy_video = false;
auto media_log = std::make_unique<NullMediaLog>();
return D3D11DecoderConfigurator::Create(prefs, workarounds, config,
- bit_depth, media_log.get());
+ bit_depth, media_log.get(),
+ false /*use_shared_handle*/);
}
};
diff --git a/chromium/media/gpu/windows/d3d11_h264_accelerator.cc b/chromium/media/gpu/windows/d3d11_h264_accelerator.cc
index ba3bf54dab7..e0631d877ec 100644
--- a/chromium/media/gpu/windows/d3d11_h264_accelerator.cc
+++ b/chromium/media/gpu/windows/d3d11_h264_accelerator.cc
@@ -109,9 +109,18 @@ DecoderStatus D3D11H264Accelerator::SubmitFrameMetadata(
D3D11H264Picture* d3d11_pic = pic->AsD3D11H264Picture();
if (!d3d11_pic)
return DecoderStatus::kFail;
- hr = video_context_->DecoderBeginFrame(
- video_decoder_.Get(), d3d11_pic->picture->output_view().Get(), 0,
- nullptr);
+
+ ID3D11VideoDecoderOutputView* output_view = nullptr;
+ auto result = d3d11_pic->picture->AcquireOutputView();
+ if (result.has_value()) {
+ output_view = std::move(result).value();
+ } else {
+ RecordFailure(std::move(result).error());
+ return DecoderStatus::kFail;
+ }
+
+ hr = video_context_->DecoderBeginFrame(video_decoder_.Get(), output_view, 0,
+ nullptr);
if (hr == E_PENDING || hr == D3DERR_WASSTILLDRAWING) {
// Hardware is busy. We should make the call again.
@@ -616,6 +625,10 @@ void D3D11H264Accelerator::RecordFailure(const std::string& reason,
base::UmaHistogramSparse("Media.D3D11.H264Status", static_cast<int>(code));
}
+void D3D11H264Accelerator::RecordFailure(media::Status error) const {
+ RecordFailure(error.message(), error.code());
+}
+
void D3D11H264Accelerator::SetVideoDecoder(ComD3D11VideoDecoder video_decoder) {
video_decoder_ = std::move(video_decoder);
}
diff --git a/chromium/media/gpu/windows/d3d11_h264_accelerator.h b/chromium/media/gpu/windows/d3d11_h264_accelerator.h
index 59198167366..1bc69533ce1 100644
--- a/chromium/media/gpu/windows/d3d11_h264_accelerator.h
+++ b/chromium/media/gpu/windows/d3d11_h264_accelerator.h
@@ -13,7 +13,7 @@
#include <vector>
#include "gpu/command_buffer/service/texture_manager.h"
-#include "media/base/status_codes.h"
+#include "media/base/status.h"
#include "media/base/video_frame.h"
#include "media/base/win/mf_helpers.h"
#include "media/gpu/h264_decoder.h"
@@ -39,6 +39,10 @@ class D3D11H264Accelerator : public H264Decoder::H264Accelerator {
MediaLog* media_log,
ComD3D11VideoDevice video_device,
std::unique_ptr<VideoContextWrapper> video_context);
+
+ D3D11H264Accelerator(const D3D11H264Accelerator&) = delete;
+ D3D11H264Accelerator& operator=(const D3D11H264Accelerator&) = delete;
+
~D3D11H264Accelerator() override;
// H264Decoder::H264Accelerator implementation.
@@ -89,6 +93,7 @@ class D3D11H264Accelerator : public H264Decoder::H264Accelerator {
void RecordFailure(const std::string& reason,
StatusCode code,
HRESULT hr = S_OK) const;
+ void RecordFailure(media::Status error) const;
D3D11VideoDecoderClient* client_;
MediaLog* media_log_ = nullptr;
@@ -117,8 +122,6 @@ class D3D11H264Accelerator : public H264Decoder::H264Accelerator {
std::vector<D3D11_VIDEO_DECODER_SUB_SAMPLE_MAPPING_BLOCK> subsamples_;
// IV for the current frame.
std::vector<uint8_t> frame_iv_;
-
- DISALLOW_COPY_AND_ASSIGN(D3D11H264Accelerator);
};
} // namespace media
diff --git a/chromium/media/gpu/windows/d3d11_picture_buffer.cc b/chromium/media/gpu/windows/d3d11_picture_buffer.cc
index aeb0d409860..54e1f42781c 100644
--- a/chromium/media/gpu/windows/d3d11_picture_buffer.cc
+++ b/chromium/media/gpu/windows/d3d11_picture_buffer.cc
@@ -11,6 +11,7 @@
#include <memory>
+#include "base/metrics/histogram_functions.h"
#include "gpu/command_buffer/service/mailbox_manager.h"
#include "gpu/command_buffer/service/texture_manager.h"
#include "media/base/media_log.h"
@@ -52,11 +53,12 @@ Status D3D11PictureBuffer::Init(
view_desc.ViewDimension = D3D11_VDOV_DIMENSION_TEXTURE2D;
view_desc.Texture2D.ArraySlice = array_slice_;
+ media_log_ = std::move(media_log);
Status result =
texture_wrapper_->Init(std::move(gpu_task_runner),
std::move(get_helper_cb), texture_, array_slice_);
if (!result.is_ok()) {
- MEDIA_LOG(ERROR, media_log) << "Failed to Initialize the wrapper";
+ MEDIA_LOG(ERROR, media_log_) << "Failed to Initialize the wrapper";
return result;
}
@@ -64,7 +66,7 @@ Status D3D11PictureBuffer::Init(
Texture().Get(), &view_desc, &output_view_);
if (!SUCCEEDED(hr)) {
- MEDIA_LOG(ERROR, media_log) << "Failed to CreateVideoDecoderOutputView";
+ MEDIA_LOG(ERROR, media_log_) << "Failed to CreateVideoDecoderOutputView";
return Status(StatusCode::kCreateDecoderOutputViewFailed)
.AddCause(HresultToStatus(hr));
}
@@ -84,4 +86,18 @@ ComD3D11Texture2D D3D11PictureBuffer::Texture() const {
return texture_;
}
+StatusOr<ID3D11VideoDecoderOutputView*> D3D11PictureBuffer::AcquireOutputView()
+ const {
+ Status result = texture_wrapper_->AcquireKeyedMutexIfNeeded();
+ if (!result.is_ok()) {
+ MEDIA_LOG(ERROR, media_log_)
+ << "Failed to acquired key mutex for native texture resource";
+ base::UmaHistogramSparse("Media.D3D11.PictureBuffer",
+ static_cast<int>(result.code()));
+ return result;
+ }
+
+ return output_view_.Get();
+}
+
} // namespace media
diff --git a/chromium/media/gpu/windows/d3d11_picture_buffer.h b/chromium/media/gpu/windows/d3d11_picture_buffer.h
index 8ceadaa3ed1..9871465658a 100644
--- a/chromium/media/gpu/windows/d3d11_picture_buffer.h
+++ b/chromium/media/gpu/windows/d3d11_picture_buffer.h
@@ -75,6 +75,7 @@ class MEDIA_GPU_EXPORT D3D11PictureBuffer
MailboxHolderArray* mailbox_dest,
gfx::ColorSpace* output_color_space);
ComD3D11Texture2D Texture() const;
+ StatusOr<ID3D11VideoDecoderOutputView*> AcquireOutputView() const;
const gfx::Size& size() const { return size_; }
size_t picture_index() const { return picture_index_; }
@@ -95,10 +96,6 @@ class MEDIA_GPU_EXPORT D3D11PictureBuffer
}
void set_in_picture_use(bool use) { in_picture_use_ = use; }
- const ComD3D11VideoDecoderOutputView& output_view() const {
- return output_view_;
- }
-
Texture2DWrapper* texture_wrapper() const { return texture_wrapper_.get(); }
// Shouldn't be here, but simpler for now.
@@ -112,6 +109,7 @@ class MEDIA_GPU_EXPORT D3D11PictureBuffer
ComD3D11Texture2D texture_;
uint32_t array_slice_;
+ std::unique_ptr<MediaLog> media_log_;
std::unique_ptr<Texture2DWrapper> texture_wrapper_;
gfx::Size size_;
bool in_picture_use_ = false;
diff --git a/chromium/media/gpu/windows/d3d11_texture_selector.cc b/chromium/media/gpu/windows/d3d11_texture_selector.cc
index ea2abf94d2f..0a07ed53490 100644
--- a/chromium/media/gpu/windows/d3d11_texture_selector.cc
+++ b/chromium/media/gpu/windows/d3d11_texture_selector.cc
@@ -19,11 +19,13 @@ namespace media {
TextureSelector::TextureSelector(VideoPixelFormat pixfmt,
DXGI_FORMAT output_dxgifmt,
ComD3D11VideoDevice video_device,
- ComD3D11DeviceContext device_context)
+ ComD3D11DeviceContext device_context,
+ bool shared_image_use_shared_handle)
: pixel_format_(pixfmt),
output_dxgifmt_(output_dxgifmt),
video_device_(std::move(video_device)),
- device_context_(std::move(device_context)) {}
+ device_context_(std::move(device_context)),
+ shared_image_use_shared_handle_(shared_image_use_shared_handle) {}
TextureSelector::~TextureSelector() = default;
@@ -47,14 +49,13 @@ std::unique_ptr<TextureSelector> TextureSelector::Create(
const FormatSupportChecker* format_checker,
ComD3D11VideoDevice video_device,
ComD3D11DeviceContext device_context,
- MediaLog* media_log) {
+ MediaLog* media_log,
+ bool shared_image_use_shared_handle) {
VideoPixelFormat output_pixel_format;
DXGI_FORMAT output_dxgi_format;
absl::optional<gfx::ColorSpace> output_color_space;
- bool needs_texture_copy =
- !SupportsZeroCopy(gpu_preferences, workarounds) ||
- base::FeatureList::IsEnabled(kD3D11VideoDecoderAlwaysCopy);
+ bool needs_texture_copy = !SupportsZeroCopy(gpu_preferences, workarounds);
auto supports_fmt = [format_checker](auto fmt) {
return format_checker->CheckOutputFormatSupport(fmt);
@@ -133,7 +134,8 @@ std::unique_ptr<TextureSelector> TextureSelector::Create(
MEDIA_LOG(INFO, media_log) << "D3D11VideoDecoder is copying textures";
return std::make_unique<CopyTextureSelector>(
output_pixel_format, decoder_output_format, output_dxgi_format,
- output_color_space, std::move(video_device), std::move(device_context));
+ output_color_space, std::move(video_device), std::move(device_context),
+ shared_image_use_shared_handle);
} else {
MEDIA_LOG(INFO, media_log) << "D3D11VideoDecoder is binding textures";
// Binding can't change the color space. The consumer has to do it, if they
@@ -141,7 +143,7 @@ std::unique_ptr<TextureSelector> TextureSelector::Create(
DCHECK(!output_color_space);
return std::make_unique<TextureSelector>(
output_pixel_format, output_dxgi_format, std::move(video_device),
- std::move(device_context));
+ std::move(device_context), shared_image_use_shared_handle);
}
}
@@ -152,6 +154,10 @@ std::unique_ptr<Texture2DWrapper> TextureSelector::CreateTextureWrapper(
return std::make_unique<DefaultTexture2DWrapper>(size, OutputDXGIFormat());
}
+bool TextureSelector::DoesDecoderOutputUseSharedHandle() const {
+ return shared_image_use_shared_handle_;
+}
+
bool TextureSelector::WillCopyForTesting() const {
return false;
}
@@ -162,11 +168,13 @@ CopyTextureSelector::CopyTextureSelector(
DXGI_FORMAT output_dxgifmt,
absl::optional<gfx::ColorSpace> output_color_space,
ComD3D11VideoDevice video_device,
- ComD3D11DeviceContext device_context)
+ ComD3D11DeviceContext device_context,
+ bool shared_image_use_shared_handle)
: TextureSelector(pixfmt,
output_dxgifmt,
std::move(video_device),
- std::move(device_context)),
+ std::move(device_context),
+ shared_image_use_shared_handle),
output_color_space_(std::move(output_color_space)),
video_processor_proxy_(
base::MakeRefCounted<VideoProcessorProxy>(this->video_device(),
@@ -188,6 +196,10 @@ std::unique_ptr<Texture2DWrapper> CopyTextureSelector::CreateTextureWrapper(
D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET;
texture_desc.Width = size.width();
texture_desc.Height = size.height();
+ if (DoesSharedImageUseSharedHandle()) {
+ texture_desc.MiscFlags = D3D11_RESOURCE_MISC_SHARED_NTHANDLE |
+ D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX;
+ }
ComD3D11Texture2D out_texture;
if (FAILED(device->CreateTexture2D(&texture_desc, nullptr, &out_texture)))
@@ -202,6 +214,10 @@ std::unique_ptr<Texture2DWrapper> CopyTextureSelector::CreateTextureWrapper(
video_processor_proxy_, out_texture, output_color_space_);
}
+bool CopyTextureSelector::DoesDecoderOutputUseSharedHandle() const {
+ return false;
+}
+
bool CopyTextureSelector::WillCopyForTesting() const {
return true;
}
diff --git a/chromium/media/gpu/windows/d3d11_texture_selector.h b/chromium/media/gpu/windows/d3d11_texture_selector.h
index f27444bcb0d..0b7c567318e 100644
--- a/chromium/media/gpu/windows/d3d11_texture_selector.h
+++ b/chromium/media/gpu/windows/d3d11_texture_selector.h
@@ -33,7 +33,8 @@ class MEDIA_GPU_EXPORT TextureSelector {
TextureSelector(VideoPixelFormat pixfmt,
DXGI_FORMAT output_dxgifmt,
ComD3D11VideoDevice video_device,
- ComD3D11DeviceContext d3d11_device_context);
+ ComD3D11DeviceContext d3d11_device_context,
+ bool use_shared_handle);
virtual ~TextureSelector();
static std::unique_ptr<TextureSelector> Create(
@@ -44,14 +45,20 @@ class MEDIA_GPU_EXPORT TextureSelector {
const FormatSupportChecker* format_checker,
ComD3D11VideoDevice video_device,
ComD3D11DeviceContext device_context,
- MediaLog* media_log);
+ MediaLog* media_log,
+ bool shared_image_use_shared_handle = false);
virtual std::unique_ptr<Texture2DWrapper> CreateTextureWrapper(
ComD3D11Device device,
gfx::Size size);
+ virtual bool DoesDecoderOutputUseSharedHandle() const;
+
VideoPixelFormat PixelFormat() const { return pixel_format_; }
DXGI_FORMAT OutputDXGIFormat() const { return output_dxgifmt_; }
+ bool DoesSharedImageUseSharedHandle() const {
+ return shared_image_use_shared_handle_;
+ }
virtual bool WillCopyForTesting() const;
@@ -70,6 +77,8 @@ class MEDIA_GPU_EXPORT TextureSelector {
ComD3D11VideoDevice video_device_;
ComD3D11DeviceContext device_context_;
+
+ bool shared_image_use_shared_handle_;
};
class MEDIA_GPU_EXPORT CopyTextureSelector : public TextureSelector {
@@ -80,13 +89,16 @@ class MEDIA_GPU_EXPORT CopyTextureSelector : public TextureSelector {
DXGI_FORMAT output_dxgifmt,
absl::optional<gfx::ColorSpace> output_color_space,
ComD3D11VideoDevice video_device,
- ComD3D11DeviceContext d3d11_device_context);
+ ComD3D11DeviceContext d3d11_device_context,
+ bool use_shared_handle);
~CopyTextureSelector() override;
std::unique_ptr<Texture2DWrapper> CreateTextureWrapper(
ComD3D11Device device,
gfx::Size size) override;
+ bool DoesDecoderOutputUseSharedHandle() const override;
+
bool WillCopyForTesting() const override;
private:
diff --git a/chromium/media/gpu/windows/d3d11_texture_selector_unittest.cc b/chromium/media/gpu/windows/d3d11_texture_selector_unittest.cc
index d9d57c793fb..57801b9edb5 100644
--- a/chromium/media/gpu/windows/d3d11_texture_selector_unittest.cc
+++ b/chromium/media/gpu/windows/d3d11_texture_selector_unittest.cc
@@ -36,7 +36,8 @@ class D3D11TextureSelectorUnittest : public ::testing::Test {
bool encrypted) {
VideoDecoderConfig result;
result.Initialize(
- kUnknownVideoCodec, // It doesn't matter because it won't be used.
+ VideoCodec::kUnknown, // It doesn't matter because it won't
+ // be used.
profile, VideoDecoderConfig::AlphaMode::kIsOpaque, VideoColorSpace(),
kNoTransformation, size, {}, {}, {},
encrypted ? EncryptionScheme::kCenc : EncryptionScheme::kUnencrypted);
diff --git a/chromium/media/gpu/windows/d3d11_texture_wrapper.cc b/chromium/media/gpu/windows/d3d11_texture_wrapper.cc
index 0d1da963177..ea48622d28a 100644
--- a/chromium/media/gpu/windows/d3d11_texture_wrapper.cc
+++ b/chromium/media/gpu/windows/d3d11_texture_wrapper.cc
@@ -10,10 +10,12 @@
#include <vector>
#include "components/viz/common/resources/resource_format_utils.h"
+#include "gpu/command_buffer/common/constants.h"
#include "gpu/command_buffer/common/shared_image_usage.h"
#include "gpu/command_buffer/service/mailbox_manager.h"
#include "gpu/command_buffer/service/shared_image_backing_d3d.h"
#include "media/base/bind_to_current_loop.h"
+#include "media/base/win/hresult_status_helper.h"
#include "media/base/win/mf_helpers.h"
#include "mojo/public/cpp/bindings/callback_helpers.h"
#include "ui/gl/gl_image.h"
@@ -62,10 +64,53 @@ DefaultTexture2DWrapper::DefaultTexture2DWrapper(const gfx::Size& size,
DefaultTexture2DWrapper::~DefaultTexture2DWrapper() = default;
+Status DefaultTexture2DWrapper::AcquireKeyedMutexIfNeeded() {
+ // keyed_mutex_acquired_ should be false when calling this API.
+ // For non-shareable resource, the keyed_mutex_acquired_ should
+ // never be reset.
+ // For shareable resource, it lives behind use_single_texture flag
+ // and decoder should always follow acquire-release operation pairs.
+ DCHECK(!keyed_mutex_acquired_);
+
+ // No need to acquire key mutex for non-shared resource.
+ if (!keyed_mutex_) {
+ return OkStatus();
+ }
+
+ // Handled shared resource with no key mutex acquired.
+ HRESULT hr =
+ keyed_mutex_->AcquireSync(gpu::kDXGIKeyedMutexAcquireKey, INFINITE);
+
+ if (FAILED(hr)) {
+ keyed_mutex_acquired_ = false;
+ DPLOG(ERROR) << "Unable to acquire the key mutex, error: " << hr;
+ return Status(StatusCode::kAcquireKeyedMutexFailed)
+ .AddCause(HresultToStatus(hr));
+ }
+
+ // Key mutex has been acquired for shared resource.
+ keyed_mutex_acquired_ = true;
+
+ return OkStatus();
+}
+
Status DefaultTexture2DWrapper::ProcessTexture(
const gfx::ColorSpace& input_color_space,
MailboxHolderArray* mailbox_dest,
gfx::ColorSpace* output_color_space) {
+ // If the decoder acquired the key mutex before, it should be released now.
+ if (keyed_mutex_) {
+ DCHECK(keyed_mutex_acquired_);
+ HRESULT hr = keyed_mutex_->ReleaseSync(gpu::kDXGIKeyedMutexAcquireKey);
+ if (FAILED(hr)) {
+ DPLOG(ERROR) << "Unable to release the keyed mutex, error: " << hr;
+ return Status(StatusCode::kReleaseKeyedMutexFailed)
+ .AddCause(HresultToStatus(hr));
+ }
+
+ keyed_mutex_acquired_ = false;
+ }
+
// If we've received an error, then return it to our caller. This is probably
// from some previous operation.
// TODO(liberato): Return the error.
@@ -91,6 +136,23 @@ Status DefaultTexture2DWrapper::Init(
if (!SupportsFormat(dxgi_format_))
return Status(StatusCode::kUnsupportedTextureFormatForBind);
+ // Init IDXGIKeyedMutex when using shared handle.
+ if (texture) {
+ // Cannot use shared handle for swap chain output texture.
+ D3D11_TEXTURE2D_DESC desc = {};
+ texture->GetDesc(&desc);
+ if (desc.MiscFlags & D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX) {
+ DCHECK(!keyed_mutex_acquired_);
+ HRESULT hr = texture.As(&keyed_mutex_);
+ if (FAILED(hr)) {
+ DPLOG(ERROR) << "Failed to get key_mutex from output resource, error "
+ << std::hex << hr;
+ return Status(StatusCode::kGetKeyedMutexFailed)
+ .AddCause(HresultToStatus(hr));
+ }
+ }
+ }
+
// Generate mailboxes and holders.
// TODO(liberato): Verify that this is really okay off the GPU main thread.
// The current implementation is.
@@ -149,9 +211,42 @@ DefaultTexture2DWrapper::GpuResources::GpuResources(
gpu::SHARED_IMAGE_USAGE_RASTER | gpu::SHARED_IMAGE_USAGE_DISPLAY |
gpu::SHARED_IMAGE_USAGE_SCANOUT;
+ base::win::ScopedHandle shared_handle;
+ if (texture) {
+ D3D11_TEXTURE2D_DESC desc = {};
+ texture->GetDesc(&desc);
+ // Create shared handle for shareable output texture.
+ if (desc.MiscFlags & D3D11_RESOURCE_MISC_SHARED_NTHANDLE) {
+ Microsoft::WRL::ComPtr<IDXGIResource1> dxgi_resource;
+ HRESULT hr = texture.As(&dxgi_resource);
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "QueryInterface for IDXGIResource failed with error "
+ << std::hex << hr;
+ std::move(on_error_cb)
+ .Run(std::move(StatusCode::kCreateSharedHandleFailed));
+ return;
+ }
+
+ HANDLE handle = nullptr;
+ hr = dxgi_resource->CreateSharedHandle(
+ nullptr, DXGI_SHARED_RESOURCE_READ | DXGI_SHARED_RESOURCE_WRITE,
+ nullptr, &handle);
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "CreateSharedHandle failed with error " << std::hex
+ << hr;
+ std::move(on_error_cb)
+ .Run(std::move(StatusCode::kCreateSharedHandleFailed));
+ return;
+ }
+
+ shared_handle.Set(handle);
+ }
+ }
+
auto shared_image_backings =
gpu::SharedImageBackingD3D::CreateFromVideoTexture(
- mailboxes, dxgi_format, size, usage, texture, array_slice);
+ mailboxes, dxgi_format, size, usage, texture, array_slice,
+ std::move(shared_handle));
if (shared_image_backings.empty()) {
std::move(on_error_cb).Run(std::move(StatusCode::kCreateSharedImageFailed));
return;
diff --git a/chromium/media/gpu/windows/d3d11_texture_wrapper.h b/chromium/media/gpu/windows/d3d11_texture_wrapper.h
index 88171c16a82..4d8c95e6ecd 100644
--- a/chromium/media/gpu/windows/d3d11_texture_wrapper.h
+++ b/chromium/media/gpu/windows/d3d11_texture_wrapper.h
@@ -51,6 +51,14 @@ class MEDIA_GPU_EXPORT Texture2DWrapper {
ComD3D11Texture2D texture,
size_t array_size) = 0;
+ // If the |texture| has key mutex, it is important to acquire the key mutex
+ // before any usage or you'll get an error. This API is required to be called:
+ // - Before reading or writing to the texture via views on the texture or
+ // other means.
+ // - Before calling ProcessTexture.
+ // And need to call ProcessTexture() to release the key mutex.
+ virtual Status AcquireKeyedMutexIfNeeded() = 0;
+
// Import |texture|, |array_slice| and return the mailbox(es) that can be
// used to refer to it.
virtual Status ProcessTexture(const gfx::ColorSpace& input_color_space,
@@ -82,6 +90,8 @@ class MEDIA_GPU_EXPORT DefaultTexture2DWrapper : public Texture2DWrapper {
ComD3D11Texture2D in_texture,
size_t array_slice) override;
+ Status AcquireKeyedMutexIfNeeded() override;
+
Status ProcessTexture(const gfx::ColorSpace& input_color_space,
MailboxHolderArray* mailbox_dest,
gfx::ColorSpace* output_color_space) override;
@@ -104,6 +114,10 @@ class MEDIA_GPU_EXPORT DefaultTexture2DWrapper : public Texture2DWrapper {
DXGI_FORMAT dxgi_format,
ComD3D11Texture2D texture,
size_t array_slice);
+
+ GpuResources(const GpuResources&) = delete;
+ GpuResources& operator=(const GpuResources&) = delete;
+
~GpuResources();
private:
@@ -111,8 +125,6 @@ class MEDIA_GPU_EXPORT DefaultTexture2DWrapper : public Texture2DWrapper {
std::vector<std::unique_ptr<gpu::SharedImageRepresentationFactoryRef>>
shared_images_;
-
- DISALLOW_COPY_AND_ASSIGN(GpuResources);
};
// Receive an error from |gpu_resources_| and store it in |received_error_|.
@@ -126,6 +138,9 @@ class MEDIA_GPU_EXPORT DefaultTexture2DWrapper : public Texture2DWrapper {
MailboxHolderArray mailbox_holders_;
DXGI_FORMAT dxgi_format_;
+ Microsoft::WRL::ComPtr<IDXGIKeyedMutex> keyed_mutex_;
+ bool keyed_mutex_acquired_ = false;
+
base::WeakPtrFactory<DefaultTexture2DWrapper> weak_factory_{this};
};
diff --git a/chromium/media/gpu/windows/d3d11_video_context_wrapper.h b/chromium/media/gpu/windows/d3d11_video_context_wrapper.h
index f4cef4a4484..3c6671a6d16 100644
--- a/chromium/media/gpu/windows/d3d11_video_context_wrapper.h
+++ b/chromium/media/gpu/windows/d3d11_video_context_wrapper.h
@@ -18,6 +18,10 @@ namespace media {
class MEDIA_GPU_EXPORT VideoContextWrapper {
public:
VideoContextWrapper() = default;
+
+ VideoContextWrapper(const VideoContextWrapper&) = delete;
+ VideoContextWrapper& operator=(const VideoContextWrapper&) = delete;
+
virtual ~VideoContextWrapper();
// D3D11_VIDEO_DECODER_BUFFER_DESC1 and D3D11_VIDEO_DECODER_BUFFER_DESC
// have radically different sets of member variables, which means that in
@@ -73,8 +77,6 @@ class MEDIA_GPU_EXPORT VideoContextWrapper {
virtual HRESULT SubmitDecoderBuffers(ID3D11VideoDecoder* video_decoder,
UINT num_buffers,
const VideoBufferWrapper* buffers) = 0;
-
- DISALLOW_COPY_AND_ASSIGN(VideoContextWrapper);
}; // VideoContextWrapper
} // namespace media
diff --git a/chromium/media/gpu/windows/d3d11_video_decoder.cc b/chromium/media/gpu/windows/d3d11_video_decoder.cc
index 1a2cc644130..47ff940e347 100644
--- a/chromium/media/gpu/windows/d3d11_video_decoder.cc
+++ b/chromium/media/gpu/windows/d3d11_video_decoder.cc
@@ -168,17 +168,17 @@ HRESULT D3D11VideoDecoder::InitializeAcceleratedDecoder(
return hr;
profile_ = config.profile();
- if (config.codec() == kCodecVP9) {
+ if (config.codec() == VideoCodec::kVP9) {
accelerated_video_decoder_ = std::make_unique<VP9Decoder>(
std::make_unique<D3D11VP9Accelerator>(
this, media_log_.get(), video_device_, std::move(video_context)),
profile_, config.color_space_info());
- } else if (config.codec() == kCodecH264) {
+ } else if (config.codec() == VideoCodec::kH264) {
accelerated_video_decoder_ = std::make_unique<H264Decoder>(
std::make_unique<D3D11H264Accelerator>(
this, media_log_.get(), video_device_, std::move(video_context)),
profile_, config.color_space_info());
- } else if (config.codec() == kCodecAV1) {
+ } else if (config.codec() == VideoCodec::kAV1) {
accelerated_video_decoder_ = std::make_unique<AV1Decoder>(
std::make_unique<D3D11AV1Accelerator>(
this, media_log_.get(), video_device_, std::move(video_context)),
@@ -206,11 +206,16 @@ StatusOr<ComD3D11VideoDecoder> D3D11VideoDecoder::CreateD3D11Decoder() {
? 10
: 8);
- // TODO: supported check?
+ // OS prevent read any content from encrypted video frame. No need to support
+ // shared handle and keyed_mutex system for the encrypted frame.
+ const bool use_shared_handle =
+ base::FeatureList::IsEnabled(kD3D11VideoDecoderUseSharedHandle) &&
+ !config_.is_encrypted();
- decoder_configurator_ =
- D3D11DecoderConfigurator::Create(gpu_preferences_, gpu_workarounds_,
- config_, bit_depth_, media_log_.get());
+ // TODO: supported check?
+ decoder_configurator_ = D3D11DecoderConfigurator::Create(
+ gpu_preferences_, gpu_workarounds_, config_, bit_depth_, media_log_.get(),
+ use_shared_handle);
if (!decoder_configurator_)
return StatusCode::kDecoderUnsupportedProfile;
@@ -231,7 +236,8 @@ StatusOr<ComD3D11VideoDecoder> D3D11VideoDecoder::CreateD3D11Decoder() {
decoder_configurator_->TextureFormat(),
is_hdr_supported_ ? TextureSelector::HDRMode::kSDROrHDR
: TextureSelector::HDRMode::kSDROnly,
- &format_checker, video_device_, device_context_, media_log_.get());
+ &format_checker, video_device_, device_context_, media_log_.get(),
+ use_shared_handle);
if (!texture_selector_)
return StatusCode::kCreateTextureSelectorFailed;
@@ -257,14 +263,16 @@ StatusOr<ComD3D11VideoDecoder> D3D11VideoDecoder::CreateD3D11Decoder() {
.AddCause(HresultToStatus(hr));
}
- if ((config_.codec() == kCodecVP9 || config_.codec() == kCodecAV1) &&
+ if ((config_.codec() == VideoCodec::kVP9 ||
+ config_.codec() == VideoCodec::kAV1) &&
dec_config.ConfigBitstreamRaw == 1) {
// DXVA VP9 and AV1 specifications say ConfigBitstreamRaw "shall be 1".
found = true;
break;
}
- if (config_.codec() == kCodecH264 && dec_config.ConfigBitstreamRaw == 2) {
+ if (config_.codec() == VideoCodec::kH264 &&
+ dec_config.ConfigBitstreamRaw == 2) {
// ConfigBitstreamRaw == 2 means the decoder uses DXVA_Slice_H264_Short.
found = true;
break;
@@ -287,8 +295,13 @@ StatusOr<ComD3D11VideoDecoder> D3D11VideoDecoder::CreateD3D11Decoder() {
// For more information, please see:
// https://download.microsoft.com/download/9/2/A/92A4E198-67E0-4ABD-9DB7-635D711C2752/DXVA_VPx.pdf
// https://download.microsoft.com/download/5/f/c/5fc4ec5c-bd8c-4624-8034-319c1bab7671/DXVA_H264.pdf
+ //
+ // When creating output texture with shared handle supports, we can't use a
+ // texture array. Because the keyed mutex applies on the entire texture array
+ // causing a deadlock when multiple threads try to use different slots of the
+ // array. More info here: https://crbug.com/1238943
use_single_video_decoder_texture_ =
- !!(dec_config.ConfigDecoderSpecific & (1 << 14));
+ !!(dec_config.ConfigDecoderSpecific & (1 << 14)) || use_shared_handle;
if (use_single_video_decoder_texture_)
MEDIA_LOG(INFO, media_log_) << "D3D11VideoDecoder is using single textures";
else
@@ -388,21 +401,15 @@ void D3D11VideoDecoder::Initialize(const VideoDecoderConfig& config,
// TODO(liberato): Handle cleanup better. Also consider being less chatty in
// the logs, since this will fall back.
- // TODO(liberato): dxva does this. don't know if we need to.
- if (!base::FeatureList::IsEnabled(kD3D11VideoDecoderSkipMultithreaded)) {
- ComD3D11Multithread multi_threaded;
- hr = device_->QueryInterface(IID_PPV_ARGS(&multi_threaded));
- if (FAILED(hr)) {
- NotifyError(Status(StatusCode::kQueryID3D11MultithreadFailed)
- .AddCause(HresultToStatus(hr)));
- return;
- }
- // TODO(liberato): This is a hack, since the unittest returns
- // success without providing |multi_threaded|.
- if (multi_threaded)
- multi_threaded->SetMultithreadProtected(TRUE);
+ ComD3D11Multithread multi_threaded;
+ hr = device_->QueryInterface(IID_PPV_ARGS(&multi_threaded));
+ if (FAILED(hr)) {
+ return NotifyError(Status(StatusCode::kQueryID3D11MultithreadFailed)
+ .AddCause(HresultToStatus(hr)));
}
+ multi_threaded->SetMultithreadProtected(TRUE);
+
hr = device_.As(&video_device_);
if (!SUCCEEDED(hr)) {
NotifyError("Failed to get video device");
@@ -742,7 +749,8 @@ void D3D11VideoDecoder::CreatePictureBuffers() {
device_, size,
use_single_video_decoder_texture_
? 1
- : D3D11DecoderConfigurator::BUFFER_COUNT);
+ : D3D11DecoderConfigurator::BUFFER_COUNT,
+ texture_selector_->DoesDecoderOutputUseSharedHandle());
if (result.has_value()) {
in_texture = std::move(result).value();
} else {
@@ -863,23 +871,21 @@ bool D3D11VideoDecoder::OutputResult(const CodecPicture* picture,
frame->SetReleaseMailboxCB(
base::BindOnce(release_mailbox_cb_, std::move(wait_complete_cb)));
- frame->metadata().power_efficient = true;
// For NV12, overlay is allowed by default. If the decoder is going to support
// non-NV12 textures, then this may have to be conditionally set. Also note
// that ALLOW_OVERLAY is required for encrypted video path.
//
- // Since all of our picture buffers allow overlay, we just use the finch
- // feature. However, we may choose to set ALLOW_OVERLAY to false even if
+ // Since all of our picture buffers allow overlay, we just set this to true.
+ // However, we may choose to set ALLOW_OVERLAY to false even if
// the finch flag is enabled. We may not choose to set ALLOW_OVERLAY if the
// flag is off, however.
//
// Also note that, since we end up binding textures with GLImageDXGI, it's
// probably okay just to allow overlay always, and let the swap chain
// presenter decide if it wants to.
- const bool allow_overlay =
- base::FeatureList::IsEnabled(kD3D11VideoDecoderAllowOverlay);
- frame->metadata().allow_overlay = allow_overlay;
+ frame->metadata().allow_overlay = true;
+ frame->metadata().power_efficient = true;
frame->set_color_space(output_color_space);
frame->set_hdr_metadata(config_.hdr_metadata());
output_cb_.Run(frame);
diff --git a/chromium/media/gpu/windows/d3d11_video_decoder_impl.h b/chromium/media/gpu/windows/d3d11_video_decoder_impl.h
index 004295b558a..5c93cad7df7 100644
--- a/chromium/media/gpu/windows/d3d11_video_decoder_impl.h
+++ b/chromium/media/gpu/windows/d3d11_video_decoder_impl.h
@@ -42,6 +42,10 @@ class MEDIA_GPU_EXPORT D3D11VideoDecoderImpl {
std::unique_ptr<MediaLog> media_log,
base::RepeatingCallback<scoped_refptr<CommandBufferHelper>()>
get_helper_cb);
+
+ D3D11VideoDecoderImpl(const D3D11VideoDecoderImpl&) = delete;
+ D3D11VideoDecoderImpl& operator=(const D3D11VideoDecoderImpl&) = delete;
+
virtual ~D3D11VideoDecoderImpl();
using InitCB = base::OnceCallback<void(bool success, ReleaseMailboxCB)>;
@@ -79,8 +83,6 @@ class MEDIA_GPU_EXPORT D3D11VideoDecoderImpl {
THREAD_CHECKER(thread_checker_);
base::WeakPtrFactory<D3D11VideoDecoderImpl> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(D3D11VideoDecoderImpl);
};
} // namespace media
diff --git a/chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc b/chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc
index a3c5e9ca7df..93d47aafb17 100644
--- a/chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc
+++ b/chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc
@@ -84,6 +84,10 @@ class D3D11VideoDecoderTest : public ::testing::Test {
.WillByDefault(
SetComPointeeAndReturnOk<1>(mock_d3d11_video_device_.Get()));
+ mock_multithreaded_ = MakeComPtr<NiceMock<D3D11MultithreadMock>>();
+ ON_CALL(*mock_d3d11_device_.Get(), QueryInterface(IID_ID3D11Multithread, _))
+ .WillByDefault(SetComPointeeAndReturnOk<1>(mock_multithreaded_.Get()));
+
EnableDecoder(D3D11_DECODER_PROFILE_H264_VLD_NOFGT);
mock_d3d11_video_decoder_ = MakeComPtr<D3D11VideoDecoderMock>();
@@ -238,6 +242,7 @@ class D3D11VideoDecoderTest : public ::testing::Test {
Microsoft::WRL::ComPtr<D3D11DeviceMock> mock_d3d11_device_;
Microsoft::WRL::ComPtr<D3D11DeviceContextMock> mock_d3d11_device_context_;
+ Microsoft::WRL::ComPtr<D3D11MultithreadMock> mock_multithreaded_;
Microsoft::WRL::ComPtr<D3D11VideoDeviceMock> mock_d3d11_video_device_;
Microsoft::WRL::ComPtr<D3D11VideoDecoderMock> mock_d3d11_video_decoder_;
Microsoft::WRL::ComPtr<D3D11VideoContextMock> mock_d3d11_video_context_;
@@ -254,8 +259,8 @@ class D3D11VideoDecoderTest : public ::testing::Test {
};
TEST_F(D3D11VideoDecoderTest, SupportsVP9Profile0WithDecoderEnabled) {
- VideoDecoderConfig configuration =
- TestVideoConfig::NormalCodecProfile(kCodecVP9, VP9PROFILE_PROFILE0);
+ VideoDecoderConfig configuration = TestVideoConfig::NormalCodecProfile(
+ VideoCodec::kVP9, VP9PROFILE_PROFILE0);
EnableDecoder(D3D11_DECODER_PROFILE_VP9_VLD_PROFILE0);
CreateDecoder();
@@ -269,8 +274,8 @@ TEST_F(D3D11VideoDecoderTest, SupportsVP9Profile0WithDecoderEnabled) {
TEST_F(D3D11VideoDecoderTest, DoesNotSupportVP9WithLegacyGPU) {
SetGPUProfile(LegacyIntelGPU);
- VideoDecoderConfig configuration =
- TestVideoConfig::NormalCodecProfile(kCodecVP9, VP9PROFILE_PROFILE0);
+ VideoDecoderConfig configuration = TestVideoConfig::NormalCodecProfile(
+ VideoCodec::kVP9, VP9PROFILE_PROFILE0);
EnableDecoder(D3D11_DECODER_PROFILE_VP9_VLD_PROFILE0);
CreateDecoder();
@@ -279,8 +284,8 @@ TEST_F(D3D11VideoDecoderTest, DoesNotSupportVP9WithLegacyGPU) {
TEST_F(D3D11VideoDecoderTest, DoesNotSupportVP9WithGPUWorkaroundDisableVPX) {
gpu_workarounds_.disable_accelerated_vp9_decode = true;
- VideoDecoderConfig configuration =
- TestVideoConfig::NormalCodecProfile(kCodecVP9, VP9PROFILE_PROFILE0);
+ VideoDecoderConfig configuration = TestVideoConfig::NormalCodecProfile(
+ VideoCodec::kVP9, VP9PROFILE_PROFILE0);
EnableDecoder(D3D11_DECODER_PROFILE_VP9_VLD_PROFILE0);
CreateDecoder();
@@ -288,8 +293,8 @@ TEST_F(D3D11VideoDecoderTest, DoesNotSupportVP9WithGPUWorkaroundDisableVPX) {
}
TEST_F(D3D11VideoDecoderTest, DoesNotSupportVP9WithoutDecoderEnabled) {
- VideoDecoderConfig configuration =
- TestVideoConfig::NormalCodecProfile(kCodecVP9, VP9PROFILE_PROFILE0);
+ VideoDecoderConfig configuration = TestVideoConfig::NormalCodecProfile(
+ VideoCodec::kVP9, VP9PROFILE_PROFILE0);
// Enable a non-VP9 decoder.
EnableDecoder(D3D11_DECODER_PROFILE_H264_VLD_NOFGT); // Paranoia, not VP9.
@@ -301,7 +306,7 @@ TEST_F(D3D11VideoDecoderTest, DoesNotSupportsH264HIGH10Profile) {
CreateDecoder();
VideoDecoderConfig high10 = TestVideoConfig::NormalCodecProfile(
- kCodecH264, H264PROFILE_HIGH10PROFILE);
+ VideoCodec::kH264, H264PROFILE_HIGH10PROFILE);
InitializeDecoder(high10, false);
}
@@ -310,7 +315,7 @@ TEST_F(D3D11VideoDecoderTest, SupportsH264WithAutodetectedConfig) {
CreateDecoder();
VideoDecoderConfig normal =
- TestVideoConfig::NormalCodecProfile(kCodecH264, H264PROFILE_MAIN);
+ TestVideoConfig::NormalCodecProfile(VideoCodec::kH264, H264PROFILE_MAIN);
InitializeDecoder(normal, true);
// TODO(liberato): Check |last_video_decoder_desc_| for sanity.
@@ -328,7 +333,7 @@ TEST_F(D3D11VideoDecoderTest, DoesNotSupportH264IfNoSupportedConfig) {
CreateDecoder(empty_configs);
VideoDecoderConfig normal =
- TestVideoConfig::NormalCodecProfile(kCodecH264, H264PROFILE_MAIN);
+ TestVideoConfig::NormalCodecProfile(VideoCodec::kH264, H264PROFILE_MAIN);
InitializeDecoder(normal, false);
}
@@ -336,7 +341,7 @@ TEST_F(D3D11VideoDecoderTest, DoesNotSupportH264IfNoSupportedConfig) {
TEST_F(D3D11VideoDecoderTest, DoesNotSupportEncryptedConfig) {
CreateDecoder();
VideoDecoderConfig encrypted_config =
- TestVideoConfig::NormalCodecProfile(kCodecH264, H264PROFILE_MAIN);
+ TestVideoConfig::NormalCodecProfile(VideoCodec::kH264, H264PROFILE_MAIN);
encrypted_config.SetIsEncrypted(true);
InitializeDecoder(encrypted_config, false);
}
@@ -348,7 +353,8 @@ TEST_F(D3D11VideoDecoderTest, IgnoreWorkaroundsIgnoresWorkaround) {
gpu_workarounds_.disable_d3d11_video_decoder = true;
CreateDecoder();
InitializeDecoder(
- TestVideoConfig::NormalCodecProfile(kCodecH264, H264PROFILE_MAIN), true);
+ TestVideoConfig::NormalCodecProfile(VideoCodec::kH264, H264PROFILE_MAIN),
+ true);
}
TEST_F(D3D11VideoDecoderTest, WorkaroundTurnsOffDecoder) {
@@ -356,7 +362,8 @@ TEST_F(D3D11VideoDecoderTest, WorkaroundTurnsOffDecoder) {
gpu_workarounds_.disable_d3d11_video_decoder = true;
CreateDecoder();
InitializeDecoder(
- TestVideoConfig::NormalCodecProfile(kCodecH264, H264PROFILE_MAIN), false);
+ TestVideoConfig::NormalCodecProfile(VideoCodec::kH264, H264PROFILE_MAIN),
+ false);
}
} // namespace media
diff --git a/chromium/media/gpu/windows/d3d11_video_device_format_support.cc b/chromium/media/gpu/windows/d3d11_video_device_format_support.cc
index 4002a5a28ca..7436f418f9d 100644
--- a/chromium/media/gpu/windows/d3d11_video_device_format_support.cc
+++ b/chromium/media/gpu/windows/d3d11_video_device_format_support.cc
@@ -60,7 +60,7 @@ bool FormatSupportChecker::Initialize() {
}
bool FormatSupportChecker::CheckOutputFormatSupport(DXGI_FORMAT format) const {
- if (!device_)
+ if (!device_ || !enumerator_)
return false;
DCHECK(initialized_);
diff --git a/chromium/media/gpu/windows/d3d11_video_device_format_support.h b/chromium/media/gpu/windows/d3d11_video_device_format_support.h
index 569f61ab549..01dae897d68 100644
--- a/chromium/media/gpu/windows/d3d11_video_device_format_support.h
+++ b/chromium/media/gpu/windows/d3d11_video_device_format_support.h
@@ -19,6 +19,10 @@ class MEDIA_GPU_EXPORT FormatSupportChecker {
public:
// |device| may be null, mostly for tests.
explicit FormatSupportChecker(ComD3D11Device device);
+
+ FormatSupportChecker(const FormatSupportChecker&) = delete;
+ FormatSupportChecker& operator=(const FormatSupportChecker&) = delete;
+
virtual ~FormatSupportChecker();
// Set up the device to be able to check format support.
@@ -32,8 +36,6 @@ class MEDIA_GPU_EXPORT FormatSupportChecker {
ComD3D11Device device_;
ComD3D11VideoProcessorEnumerator enumerator_;
bool initialized_ = false;
-
- DISALLOW_COPY_AND_ASSIGN(FormatSupportChecker);
};
} // namespace media
diff --git a/chromium/media/gpu/windows/d3d11_vp9_accelerator.cc b/chromium/media/gpu/windows/d3d11_vp9_accelerator.cc
index 43315d7c660..83dcc163527 100644
--- a/chromium/media/gpu/windows/d3d11_vp9_accelerator.cc
+++ b/chromium/media/gpu/windows/d3d11_vp9_accelerator.cc
@@ -56,6 +56,11 @@ D3D11VP9Accelerator::D3D11VP9Accelerator(
D3D11VP9Accelerator::~D3D11VP9Accelerator() {}
void D3D11VP9Accelerator::RecordFailure(const std::string& fail_type,
+ media::Status error) {
+ RecordFailure(fail_type, error.message(), error.code());
+}
+
+void D3D11VP9Accelerator::RecordFailure(const std::string& fail_type,
const std::string& reason,
StatusCode code) {
MEDIA_LOG(ERROR, media_log_)
@@ -81,9 +86,18 @@ bool D3D11VP9Accelerator::BeginFrame(const D3D11VP9Picture& pic) {
HRESULT hr;
do {
- hr = video_context_->DecoderBeginFrame(
- video_decoder_.Get(), pic.picture_buffer()->output_view().Get(), 0,
- nullptr);
+ ID3D11VideoDecoderOutputView* output_view = nullptr;
+ auto result = pic.picture_buffer()->AcquireOutputView();
+ if (result.has_value()) {
+ output_view = std::move(result).value();
+ } else {
+ media::Status error = std::move(result).error();
+ RecordFailure("AcquireOutputView", error.message(), error.code());
+ return false;
+ }
+
+ hr = video_context_->DecoderBeginFrame(video_decoder_.Get(), output_view, 0,
+ nullptr);
} while (hr == E_PENDING || hr == D3DERR_WASSTILLDRAWING);
if (FAILED(hr)) {
diff --git a/chromium/media/gpu/windows/d3d11_vp9_accelerator.h b/chromium/media/gpu/windows/d3d11_vp9_accelerator.h
index 07dd99796f0..70a45d88d6d 100644
--- a/chromium/media/gpu/windows/d3d11_vp9_accelerator.h
+++ b/chromium/media/gpu/windows/d3d11_vp9_accelerator.h
@@ -27,6 +27,10 @@ class D3D11VP9Accelerator : public VP9Decoder::VP9Accelerator {
MediaLog* media_log,
ComD3D11VideoDevice video_device,
std::unique_ptr<VideoContextWrapper> video_context);
+
+ D3D11VP9Accelerator(const D3D11VP9Accelerator&) = delete;
+ D3D11VP9Accelerator& operator=(const D3D11VP9Accelerator&) = delete;
+
~D3D11VP9Accelerator() override;
scoped_refptr<VP9Picture> CreateVP9Picture() override;
@@ -67,6 +71,7 @@ class D3D11VP9Accelerator : public VP9Decoder::VP9Accelerator {
bool SubmitDecoderBuffer(const DXVA_PicParams_VP9& pic_params,
const D3D11VP9Picture& pic);
+ void RecordFailure(const std::string& fail_type, media::Status error);
void RecordFailure(const std::string& fail_type,
const std::string& reason,
StatusCode code);
@@ -83,8 +88,6 @@ class D3D11VP9Accelerator : public VP9Decoder::VP9Accelerator {
// Used to set |use_prev_in_find_mv_refs| properly.
gfx::Size last_frame_size_;
bool last_show_frame_ = false;
-
- DISALLOW_COPY_AND_ASSIGN(D3D11VP9Accelerator);
};
} // namespace media
diff --git a/chromium/media/gpu/windows/dxva_picture_buffer_win.cc b/chromium/media/gpu/windows/dxva_picture_buffer_win.cc
index 1dfc67e2001..51299c6b68f 100644
--- a/chromium/media/gpu/windows/dxva_picture_buffer_win.cc
+++ b/chromium/media/gpu/windows/dxva_picture_buffer_win.cc
@@ -45,15 +45,6 @@ class DummyGLImage : public gl::GLImage {
const gfx::Rect& rect) override {
return false;
}
- bool ScheduleOverlayPlane(gfx::AcceleratedWidget widget,
- int z_order,
- gfx::OverlayTransform transform,
- const gfx::Rect& bounds_rect,
- const gfx::RectF& crop_rect,
- bool enable_blend,
- std::unique_ptr<gfx::GpuFence> gpu_fence) override {
- return false;
- }
void SetColorSpace(const gfx::ColorSpace& color_space) override {}
void Flush() override {}
void OnMemoryDump(base::trace_event::ProcessMemoryDump* pmd,
diff --git a/chromium/media/gpu/windows/dxva_picture_buffer_win.h b/chromium/media/gpu/windows/dxva_picture_buffer_win.h
index c14b3c8a25d..1fbb8494813 100644
--- a/chromium/media/gpu/windows/dxva_picture_buffer_win.h
+++ b/chromium/media/gpu/windows/dxva_picture_buffer_win.h
@@ -34,6 +34,10 @@ class DXVAPictureBuffer {
const DXVAVideoDecodeAccelerator& decoder,
const PictureBuffer& buffer,
EGLConfig egl_config);
+
+ DXVAPictureBuffer(const DXVAPictureBuffer&) = delete;
+ DXVAPictureBuffer& operator=(const DXVAPictureBuffer&) = delete;
+
virtual ~DXVAPictureBuffer();
virtual bool ReusePictureBuffer() = 0;
@@ -122,8 +126,6 @@ class DXVAPictureBuffer {
scoped_refptr<gl::GLImage> gl_image_;
std::vector<scoped_refptr<Picture::ScopedSharedImage>> shared_images_;
-
- DISALLOW_COPY_AND_ASSIGN(DXVAPictureBuffer);
};
// Copies the video result into an RGBA EGL pbuffer.
diff --git a/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc b/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc
index 20bc733c2de..3ba3af32e7f 100644
--- a/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc
+++ b/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc
@@ -335,6 +335,10 @@ bool ConfigChangeDetector::is_vp9_resilient_mode() const {
class H264ConfigChangeDetector : public ConfigChangeDetector {
public:
H264ConfigChangeDetector() {}
+
+ H264ConfigChangeDetector(const H264ConfigChangeDetector&) = delete;
+ H264ConfigChangeDetector& operator=(const H264ConfigChangeDetector&) = delete;
+
~H264ConfigChangeDetector() override {}
// Detects stream configuration changes.
@@ -360,8 +364,6 @@ class H264ConfigChangeDetector : public ConfigChangeDetector {
bool pending_config_changed_ = false;
std::unique_ptr<H264Parser> parser_;
-
- DISALLOW_COPY_AND_ASSIGN(H264ConfigChangeDetector);
};
bool H264ConfigChangeDetector::DetectConfig(const uint8_t* stream,
@@ -652,7 +654,7 @@ DXVAVideoDecodeAccelerator::DXVAVideoDecodeAccelerator(
make_context_current_cb_(make_context_current_cb),
bind_image_cb_(bind_image_cb),
media_log_(media_log),
- codec_(kUnknownVideoCodec),
+ codec_(VideoCodec::kUnknown),
decoder_thread_("DXVAVideoDecoderThread"),
pending_flush_(false),
enable_low_latency_(gpu_preferences.enable_low_latency_dxva),
@@ -824,11 +826,11 @@ bool DXVAVideoDecodeAccelerator::Initialize(const Config& config,
SendMFTMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0),
"Send MFT_MESSAGE_NOTIFY_START_OF_STREAM notification failed", false);
- if (codec_ == kCodecH264)
+ if (codec_ == VideoCodec::kH264)
config_change_detector_ = std::make_unique<H264ConfigChangeDetector>();
- if (codec_ == kCodecVP8)
+ if (codec_ == VideoCodec::kVP8)
config_change_detector_ = std::make_unique<VP8ConfigChangeDetector>();
- if (codec_ == kCodecVP9)
+ if (codec_ == VideoCodec::kVP9)
config_change_detector_ = std::make_unique<VP9ConfigChangeDetector>();
processing_config_changed_ = false;
@@ -1046,7 +1048,9 @@ bool DXVAVideoDecodeAccelerator::CreateDX11DevManager() {
&feature_level_out, &d3d11_device_context_);
RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device", false);
}
-
+ RETURN_ON_HR_FAILURE(
+ hr, media::SetDebugName(d3d11_device_.Get(), "DXVA_DecodeAccelerator"),
+ false);
hr = d3d11_device_.As(&video_device_);
RETURN_ON_HR_FAILURE(hr, "Failed to get video device", false);
}
@@ -1279,7 +1283,7 @@ void DXVAVideoDecodeAccelerator::WaitForOutputBuffer(int32_t picture_buffer_id,
FROM_HERE,
base::BindOnce(&DXVAVideoDecodeAccelerator::WaitForOutputBuffer,
weak_ptr_, picture_buffer_id, count + 1),
- base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs));
+ base::Milliseconds(kFlushDecoderSurfaceTimeoutMs));
return;
}
RETURN_AND_NOTIFY_ON_FAILURE(picture_buffer->ReusePictureBuffer(),
@@ -1500,14 +1504,14 @@ bool DXVAVideoDecodeAccelerator::InitDecoder(VideoCodecProfile profile) {
std::u16string file_version = version_info->file_version();
RETURN_ON_FAILURE(file_version.find(u"6.1.7140") == std::u16string::npos,
"blocked version of msmpeg2vdec.dll 6.1.7140", false);
- codec_ = kCodecH264;
+ codec_ = VideoCodec::kH264;
clsid = __uuidof(CMSH264DecoderMFT);
} else if ((profile >= VP9PROFILE_PROFILE0 &&
profile <= VP9PROFILE_PROFILE3) ||
profile == VP8PROFILE_ANY) {
- codec_ = profile == VP8PROFILE_ANY ? kCodecVP8 : kCodecVP9;
- if ((codec_ == kCodecVP8 && enable_accelerated_vp8_decode_) ||
- (codec_ == kCodecVP9 && enable_accelerated_vp9_decode_)) {
+ codec_ = profile == VP8PROFILE_ANY ? VideoCodec::kVP8 : VideoCodec::kVP9;
+ if ((codec_ == VideoCodec::kVP8 && enable_accelerated_vp8_decode_) ||
+ (codec_ == VideoCodec::kVP9 && enable_accelerated_vp9_decode_)) {
clsid = CLSID_MSVPxDecoder;
decoder_dll = ::LoadLibrary(kMSVPxDecoderDLLName);
if (decoder_dll)
@@ -1518,7 +1522,7 @@ bool DXVAVideoDecodeAccelerator::InitDecoder(VideoCodecProfile profile) {
if (enable_accelerated_av1_decode_ &&
base::FeatureList::IsEnabled(kMediaFoundationAV1Decoding) &&
(profile >= AV1PROFILE_MIN && profile <= AV1PROFILE_MAX)) {
- codec_ = kCodecAV1;
+ codec_ = VideoCodec::kAV1;
clsid = CLSID_CAV1DecoderMFT;
// Since the AV1 decoder is a Windows Store package, it can't be created
@@ -1632,7 +1636,7 @@ bool DXVAVideoDecodeAccelerator::CheckDecoderDxvaSupport() {
hr = attributes->GetUINT32(MF_SA_D3D_AWARE, &dxva);
RETURN_ON_HR_FAILURE(hr, "Failed to check if decoder supports DXVA", false);
- if (codec_ == kCodecH264) {
+ if (codec_ == VideoCodec::kH264) {
hr = attributes->SetUINT32(CODECAPI_AVDecVideoAcceleration_H264, TRUE);
RETURN_ON_HR_FAILURE(hr, "Failed to enable DXVA H/W decoding", false);
}
@@ -1701,13 +1705,13 @@ bool DXVAVideoDecodeAccelerator::SetDecoderInputMediaType() {
hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
RETURN_ON_HR_FAILURE(hr, "Failed to set major input type", false);
- if (codec_ == kCodecH264) {
+ if (codec_ == VideoCodec::kH264) {
hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264);
- } else if (codec_ == kCodecVP9) {
+ } else if (codec_ == VideoCodec::kVP9) {
hr = media_type->SetGUID(MF_MT_SUBTYPE, MEDIASUBTYPE_VP90);
- } else if (codec_ == kCodecVP8) {
+ } else if (codec_ == VideoCodec::kVP8) {
hr = media_type->SetGUID(MF_MT_SUBTYPE, MEDIASUBTYPE_VP80);
- } else if (codec_ == kCodecAV1) {
+ } else if (codec_ == VideoCodec::kAV1) {
hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_AV1);
} else {
NOTREACHED();
@@ -1788,7 +1792,7 @@ bool DXVAVideoDecodeAccelerator::GetStreamsInfoAndBufferReqs() {
// There should be three flags, one for requiring a whole frame be in a
// single sample, one for requiring there be one buffer only in a single
// sample, and one that specifies a fixed sample size. (as in cbSize)
- if (codec_ == kCodecH264 && input_stream_info_.dwFlags != 0x7u)
+ if (codec_ == VideoCodec::kH264 && input_stream_info_.dwFlags != 0x7u)
return false;
DVLOG(1) << "Min buffer size: " << input_stream_info_.cbSize;
@@ -1804,7 +1808,7 @@ bool DXVAVideoDecodeAccelerator::GetStreamsInfoAndBufferReqs() {
// The flags here should be the same and mean the same thing, except when
// DXVA is enabled, there is an extra 0x100 flag meaning decoder will
// allocate its own sample.
- if (codec_ == kCodecH264 && output_stream_info_.dwFlags != 0x107u)
+ if (codec_ == VideoCodec::kH264 && output_stream_info_.dwFlags != 0x107u)
return false;
// We should fail above during MFT_MESSAGE_SET_D3D_MANAGER if the decoder
@@ -2355,7 +2359,7 @@ void DXVAVideoDecodeAccelerator::DecodeInternal(
// https://crbug.com/1160623 -- non 4:2:0 content hangs the decoder.
RETURN_AND_NOTIFY_ON_FAILURE(
- codec_ != kCodecH264 || config_change_detector_->IsYUV420(),
+ codec_ != VideoCodec::kH264 || config_change_detector_->IsYUV420(),
"Only 4:2:0 H.264 content is supported", PLATFORM_FAILURE, );
processing_config_changed_ = config_changed;
@@ -2637,7 +2641,7 @@ void DXVAVideoDecodeAccelerator::CopySurface(
base::Unretained(this), 0, base::Unretained(src_surface),
base::Unretained(dest_surface), picture_buffer_id,
input_buffer_id),
- base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs));
+ base::Milliseconds(kFlushDecoderSurfaceTimeoutMs));
}
void DXVAVideoDecodeAccelerator::CopySurfaceComplete(
@@ -2742,8 +2746,8 @@ void DXVAVideoDecodeAccelerator::BindPictureBufferToSample(
// Get the viz resource format per texture.
std::array<viz::ResourceFormat, VideoFrame::kMaxPlanes> viz_formats;
{
- const bool result = VideoPixelFormatToVizFormat(
- picture_buffer->pixel_format(), textures_per_picture, viz_formats);
+ result = VideoPixelFormatToVizFormat(picture_buffer->pixel_format(),
+ textures_per_picture, viz_formats);
RETURN_AND_NOTIFY_ON_FAILURE(
result, "Could not convert pixel format to viz format",
PLATFORM_FAILURE, );
@@ -2899,8 +2903,7 @@ void DXVAVideoDecodeAccelerator::CopyTextureOnDecoderThread(
DCHECK(d3d11_processor_.Get());
if (dest_keyed_mutex) {
- HRESULT hr =
- dest_keyed_mutex->AcquireSync(keyed_mutex_value, kAcquireSyncWaitMs);
+ hr = dest_keyed_mutex->AcquireSync(keyed_mutex_value, kAcquireSyncWaitMs);
RETURN_AND_NOTIFY_ON_FAILURE(
hr == S_OK, "D3D11 failed to acquire keyed mutex for texture.",
PLATFORM_FAILURE, );
@@ -2956,7 +2959,7 @@ void DXVAVideoDecodeAccelerator::CopyTextureOnDecoderThread(
PLATFORM_FAILURE, );
if (dest_keyed_mutex) {
- HRESULT hr = dest_keyed_mutex->ReleaseSync(keyed_mutex_value + 1);
+ hr = dest_keyed_mutex->ReleaseSync(keyed_mutex_value + 1);
RETURN_AND_NOTIFY_ON_FAILURE(hr == S_OK, "Failed to release keyed mutex.",
PLATFORM_FAILURE, );
@@ -2974,7 +2977,7 @@ void DXVAVideoDecodeAccelerator::CopyTextureOnDecoderThread(
base::BindOnce(&DXVAVideoDecodeAccelerator::FlushDecoder,
base::Unretained(this), 0, nullptr, nullptr,
picture_buffer_id, input_buffer_id),
- base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs));
+ base::Milliseconds(kFlushDecoderSurfaceTimeoutMs));
}
}
@@ -3018,7 +3021,7 @@ void DXVAVideoDecodeAccelerator::FlushDecoder(int iterations,
&DXVAVideoDecodeAccelerator::FlushDecoder, base::Unretained(this),
iterations, base::Unretained(src_surface),
base::Unretained(dest_surface), picture_buffer_id, input_buffer_id),
- base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs));
+ base::Milliseconds(kFlushDecoderSurfaceTimeoutMs));
return;
}
diff --git a/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h b/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h
index c4c5463f9ed..b863289106c 100644
--- a/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h
+++ b/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h
@@ -96,6 +96,11 @@ class MEDIA_GPU_EXPORT DXVAVideoDecodeAccelerator
const gpu::GpuDriverBugWorkarounds& workarounds,
const gpu::GpuPreferences& gpu_preferences,
MediaLog* media_log);
+
+ DXVAVideoDecodeAccelerator(const DXVAVideoDecodeAccelerator&) = delete;
+ DXVAVideoDecodeAccelerator& operator=(const DXVAVideoDecodeAccelerator&) =
+ delete;
+
~DXVAVideoDecodeAccelerator() override;
// VideoDecodeAccelerator implementation.
@@ -644,8 +649,6 @@ class MEDIA_GPU_EXPORT DXVAVideoDecodeAccelerator
// Function pointer for the MFCreateDXGIDeviceManager API.
static CreateDXGIDeviceManager create_dxgi_device_manager_;
-
- DISALLOW_COPY_AND_ASSIGN(DXVAVideoDecodeAccelerator);
};
} // namespace media
diff --git a/chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.cc b/chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.cc
index f4aab2953ad..6eb64869805 100644
--- a/chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.cc
+++ b/chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.cc
@@ -18,6 +18,7 @@
#include <utility>
#include <vector>
+#include "base/cxx17_backports.h"
#include "base/memory/shared_memory_mapping.h"
#include "base/memory/unsafe_shared_memory_region.h"
#include "base/threading/thread_task_runner_handle.h"
@@ -25,6 +26,7 @@
#include "base/win/scoped_co_mem.h"
#include "base/win/scoped_variant.h"
#include "base/win/windows_version.h"
+#include "build/build_config.h"
#include "gpu/ipc/common/dxgi_helpers.h"
#include "media/base/media_switches.h"
#include "media/base/win/mf_helpers.h"
@@ -74,12 +76,58 @@ eAVEncH264VProfile GetH264VProfile(VideoCodecProfile profile,
return eAVEncH264VProfile_unknown;
}
}
+
+bool IsSvcSupported(IMFActivate* activate) {
+#if defined(ARCH_CPU_X86)
+ // x86 systems sometimes crash in video drivers here.
+ // More info: https://crbug.com/1253748
+ return false;
+#else
+ Microsoft::WRL::ComPtr<IMFTransform> encoder;
+ Microsoft::WRL::ComPtr<ICodecAPI> codec_api;
+ HRESULT hr = activate->ActivateObject(IID_PPV_ARGS(&encoder));
+ if (FAILED(hr))
+ return false;
+
+ bool result = false;
+ hr = encoder.As(&codec_api);
+ if (SUCCEEDED(hr)) {
+ result = (codec_api->IsSupported(&CODECAPI_AVEncVideoTemporalLayerCount) ==
+ S_OK);
+ if (result) {
+ VARIANT min, max, step;
+ VariantInit(&min);
+ VariantInit(&max);
+ VariantInit(&step);
+
+ hr = codec_api->GetParameterRange(&CODECAPI_AVEncVideoTemporalLayerCount,
+ &min, &max, &step);
+ if (hr != S_OK || min.ulVal > 1 || max.ulVal < 3)
+ result = false;
+
+ VariantClear(&min);
+ VariantClear(&max);
+ VariantClear(&step);
+ }
+ }
+
+ activate->ShutdownObject();
+ return result;
+#endif // defined(ARCH_CPU_X86)
+}
+
} // namespace
class MediaFoundationVideoEncodeAccelerator::EncodeOutput {
public:
- EncodeOutput(uint32_t size, bool key_frame, base::TimeDelta timestamp)
- : keyframe(key_frame), capture_timestamp(timestamp), data_(size) {}
+ EncodeOutput(uint32_t size,
+ bool key_frame,
+ base::TimeDelta timestamp,
+ int temporal_id = 0)
+ : keyframe(key_frame),
+ capture_timestamp(timestamp),
+ temporal_layer_id(temporal_id),
+ data_(size) {}
uint8_t* memory() { return data_.data(); }
@@ -87,6 +135,7 @@ class MediaFoundationVideoEncodeAccelerator::EncodeOutput {
const bool keyframe;
const base::TimeDelta capture_timestamp;
+ const int temporal_layer_id;
private:
std::vector<uint8_t> data_;
@@ -149,13 +198,17 @@ MediaFoundationVideoEncodeAccelerator::GetSupportedProfiles() {
return profiles;
}
+ bool svc_supported = false;
if (pp_activate) {
- // Release the enumerated instances if any.
- // According to Windows Dev Center,
- // https://docs.microsoft.com/en-us/windows/win32/api/mfapi/nf-mfapi-mftenumex
- // The caller must release the pointers.
for (UINT32 i = 0; i < encoder_count; i++) {
if (pp_activate[i]) {
+ if (IsSvcSupported(pp_activate[i]))
+ svc_supported = true;
+
+ // Release the enumerated instances if any.
+ // According to Windows Dev Center,
+ // https://docs.microsoft.com/en-us/windows/win32/api/mfapi/nf-mfapi-mftenumex
+ // The caller must release the pointers.
pp_activate[i]->Release();
pp_activate[i] = nullptr;
}
@@ -171,6 +224,10 @@ MediaFoundationVideoEncodeAccelerator::GetSupportedProfiles() {
profile.max_framerate_numerator = kMaxFrameRateNumerator;
profile.max_framerate_denominator = kMaxFrameRateDenominator;
profile.max_resolution = gfx::Size(kMaxResolutionWidth, kMaxResolutionHeight);
+ if (svc_supported) {
+ profile.scalability_modes.push_back(SVCScalabilityMode::kL1T2);
+ profile.scalability_modes.push_back(SVCScalabilityMode::kL1T3);
+ }
profiles.push_back(profile);
profile.profile = H264PROFILE_MAIN;
@@ -261,6 +318,9 @@ bool MediaFoundationVideoEncodeAccelerator::Initialize(const Config& config,
gop_length_ = config.gop_length;
low_latency_mode_ = config.require_low_delay;
+ if (config.HasTemporalLayer())
+ num_temporal_layers_ = config.spatial_layers.front().num_of_temporal_layers;
+
if (!SetEncoderModes()) {
DLOG(ERROR) << "Failed setting encoder parameters.";
return false;
@@ -663,7 +723,7 @@ bool MediaFoundationVideoEncodeAccelerator::SetEncoderModes() {
if (is_async_mft_ && S_OK == codec_api_->IsModifiable(
&CODECAPI_AVEncVideoTemporalLayerCount)) {
- var.ulVal = 1;
+ var.ulVal = num_temporal_layers_;
hr = codec_api_->SetValue(&CODECAPI_AVEncVideoTemporalLayerCount, &var);
if (!compatible_with_win7_) {
RETURN_ON_HR_FAILURE(hr, "Couldn't set temporal layer count", false);
@@ -981,6 +1041,32 @@ HRESULT MediaFoundationVideoEncodeAccelerator::PopulateInputSampleBuffer(
return S_OK;
}
+int MediaFoundationVideoEncodeAccelerator::AssignTemporalId(bool keyframe) {
+ int result = 0;
+
+ if (keyframe)
+ outputs_since_keyframe_count_ = 0;
+
+ switch (num_temporal_layers_) {
+ case 1:
+ return 0;
+ case 2: {
+ const static std::array<int, 2> kTwoTemporalLayers = {0, 1};
+ result = kTwoTemporalLayers[outputs_since_keyframe_count_ %
+ kTwoTemporalLayers.size()];
+ break;
+ }
+ case 3: {
+ const static std::array<int, 4> kThreeTemporalLayers = {0, 2, 1, 2};
+ result = kThreeTemporalLayers[outputs_since_keyframe_count_ %
+ kThreeTemporalLayers.size()];
+ break;
+ }
+ }
+ outputs_since_keyframe_count_++;
+ return result;
+}
+
void MediaFoundationVideoEncodeAccelerator::ProcessOutputAsync() {
DVLOG(3) << __func__;
DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
@@ -1021,20 +1107,23 @@ void MediaFoundationVideoEncodeAccelerator::ProcessOutputAsync() {
LONGLONG sample_time;
hr = output_data_buffer.pSample->GetSampleTime(&sample_time);
if (SUCCEEDED(hr)) {
- timestamp = base::TimeDelta::FromMicroseconds(
- sample_time / kOneMicrosecondInMFSampleTimeUnits);
+ timestamp =
+ base::Microseconds(sample_time / kOneMicrosecondInMFSampleTimeUnits);
}
const bool keyframe = MFGetAttributeUINT32(
output_data_buffer.pSample, MFSampleExtension_CleanPoint, false);
+
+ int temporal_id = AssignTemporalId(keyframe);
DVLOG(3) << "Encoded data with size:" << size << " keyframe " << keyframe;
// If no bit stream buffer presents, queue the output first.
if (bitstream_buffer_queue_.empty()) {
DVLOG(3) << "No bitstream buffers.";
+
// We need to copy the output so that encoding can continue.
- std::unique_ptr<EncodeOutput> encode_output(
- new EncodeOutput(size, keyframe, timestamp));
+ auto encode_output =
+ std::make_unique<EncodeOutput>(size, keyframe, timestamp, temporal_id);
{
MediaBufferScopedPointer scoped_buffer(output_buffer.Get());
memcpy(encode_output->memory(), scoped_buffer.get(), size);
@@ -1058,11 +1147,12 @@ void MediaFoundationVideoEncodeAccelerator::ProcessOutputAsync() {
output_data_buffer.pSample->Release();
output_data_buffer.pSample = nullptr;
+ BitstreamBufferMetadata md(size, keyframe, timestamp);
+ if (temporalScalableCoding())
+ md.h264.emplace().temporal_idx = temporal_id;
main_client_task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(&Client::BitstreamBufferReady, main_client_,
- buffer_ref->id,
- BitstreamBufferMetadata(size, keyframe, timestamp)));
+ FROM_HERE, base::BindOnce(&Client::BitstreamBufferReady, main_client_,
+ buffer_ref->id, md));
}
void MediaFoundationVideoEncodeAccelerator::ProcessOutputSync() {
@@ -1103,8 +1193,8 @@ void MediaFoundationVideoEncodeAccelerator::ProcessOutputSync() {
LONGLONG sample_time;
hr = output_sample_->GetSampleTime(&sample_time);
if (SUCCEEDED(hr)) {
- timestamp = base::TimeDelta::FromMicroseconds(
- sample_time / kOneMicrosecondInMFSampleTimeUnits);
+ timestamp =
+ base::Microseconds(sample_time / kOneMicrosecondInMFSampleTimeUnits);
}
const bool keyframe = MFGetAttributeUINT32(
@@ -1238,13 +1328,13 @@ void MediaFoundationVideoEncodeAccelerator::UseOutputBitstreamBufferTask(
memcpy(buffer_ref->mapping.memory(), encode_output->memory(),
encode_output->size());
+ BitstreamBufferMetadata md(encode_output->size(), encode_output->keyframe,
+ encode_output->capture_timestamp);
+ if (temporalScalableCoding())
+ md.h264.emplace().temporal_idx = encode_output->temporal_layer_id;
main_client_task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(&Client::BitstreamBufferReady, main_client_,
- buffer_ref->id,
- BitstreamBufferMetadata(
- encode_output->size(), encode_output->keyframe,
- encode_output->capture_timestamp)));
+ FROM_HERE, base::BindOnce(&Client::BitstreamBufferReady, main_client_,
+ buffer_ref->id, md));
return;
}
@@ -1256,13 +1346,77 @@ void MediaFoundationVideoEncodeAccelerator::RequestEncodingParametersChangeTask(
uint32_t framerate) {
DVLOG(3) << __func__;
DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
+ DCHECK(imf_output_media_type_);
+ DCHECK(imf_input_media_type_);
+ DCHECK(encoder_);
RETURN_ON_FAILURE(bitrate.mode() == bitrate_.mode(),
"Invalid bitrate mode", );
- frame_rate_ =
- framerate
- ? std::min(framerate, static_cast<uint32_t>(kMaxFrameRateNumerator))
- : 1;
+ framerate = base::clamp(framerate, 1u, uint32_t{kMaxFrameRateNumerator});
+
+ if (frame_rate_ != framerate) {
+ HRESULT hr = MFSetAttributeRatio(imf_output_media_type_.Get(),
+ MF_MT_FRAME_RATE, framerate, 1);
+ RETURN_ON_HR_FAILURE(hr, "Couldn't set frame rate for output type", );
+
+ imf_output_media_type_->SetUINT32(MF_MT_AVG_BITRATE, bitrate.target());
+ RETURN_ON_HR_FAILURE(hr, "Couldn't set average bitrate for output type", );
+
+ hr = MFSetAttributeRatio(imf_input_media_type_.Get(), MF_MT_FRAME_RATE,
+ framerate, 1);
+ RETURN_ON_HR_FAILURE(hr, "Couldn't set frame rate for input type", );
+
+ if (is_async_mft_) {
+ // Some HMFTs will reject output type change with MF_E_INVALIDTYPE due
+ // to temporary mismatch between output/input media types, so we always
+ // clear the input/output media types before reconfiguring them
+ // dynamically.
+ hr = encoder_->ProcessMessage(MFT_MESSAGE_COMMAND_DRAIN, 0);
+ RETURN_ON_HR_FAILURE(
+ hr, "Couldn't process message MFT_MESSAGE_COMMAND_DRAIN", );
+
+ DrainPendingOutputs();
+
+ hr = encoder_->ProcessMessage(MFT_MESSAGE_NOTIFY_END_OF_STREAM, 0);
+ RETURN_ON_HR_FAILURE(
+ hr, "Couldn't process message MFT_MESSAGE_NOTIFY_END_OF_STREAM", );
+
+ hr = encoder_->ProcessMessage(MFT_MESSAGE_NOTIFY_END_STREAMING, 0);
+ RETURN_ON_HR_FAILURE(
+ hr, "Couldn't process message MFT_MESSAGE_NOTIFY_END_STREAMING", );
+
+ hr = encoder_->SetInputType(input_stream_id_, nullptr, 0);
+ RETURN_ON_HR_FAILURE(hr, "Couldn't clear input media type.", );
+
+ hr = encoder_->SetOutputType(output_stream_id_, nullptr, 0);
+ RETURN_ON_HR_FAILURE(hr, "Couldn't clear ouput media type.", );
+
+ hr = encoder_->SetOutputType(output_stream_id_,
+ imf_output_media_type_.Get(), 0);
+ RETURN_ON_HR_FAILURE(hr, "Couldn't set output media type", );
+
+ hr = encoder_->SetInputType(input_stream_id_, imf_input_media_type_.Get(),
+ 0);
+ RETURN_ON_HR_FAILURE(hr, "Couldn't set input media type", );
+
+ hr = encoder_->ProcessMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0);
+ RETURN_ON_HR_FAILURE(
+ hr, "Couldn't process message MFT_MESSAGE_NOTIFY_BEGIN_STREAMING", );
+
+ hr = encoder_->ProcessMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0);
+ RETURN_ON_HR_FAILURE(
+ hr, "Couldn't process message MFT_MESSAGE_NOTIFY_START_OF_STREAM", );
+ } else {
+ hr = encoder_->SetOutputType(output_stream_id_,
+ imf_output_media_type_.Get(), 0);
+ RETURN_ON_HR_FAILURE(hr, "Couldn't set output media type", );
+
+ hr = encoder_->SetInputType(input_stream_id_, imf_input_media_type_.Get(),
+ 0);
+ RETURN_ON_HR_FAILURE(hr, "Couldn't set input media type", );
+ }
+ frame_rate_ = framerate;
+ }
if (bitrate_ != bitrate) {
bitrate_ = bitrate;
@@ -1377,6 +1531,10 @@ HRESULT MediaFoundationVideoEncodeAccelerator::InitializeD3DVideoProcessing(
&scaled_d3d11_texture);
RETURN_ON_HR_FAILURE(hr, "Failed to create texture", hr);
+ hr = media::SetDebugName(scaled_d3d11_texture.Get(),
+ "MFVideoEncodeAccelerator_ScaledTexture");
+ RETURN_ON_HR_FAILURE(hr, "Failed to set debug name", hr);
+
D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC output_desc = {};
output_desc.ViewDimension = D3D11_VPOV_DIMENSION_TEXTURE2D;
output_desc.Texture2D.MipSlice = 0;
@@ -1471,4 +1629,22 @@ HRESULT MediaFoundationVideoEncodeAccelerator::PerformD3DScaling(
return hr;
}
+void MediaFoundationVideoEncodeAccelerator::DrainPendingOutputs() {
+ Microsoft::WRL::ComPtr<IMFMediaEvent> media_event;
+
+ while ((SUCCEEDED(
+ event_generator_->GetEvent(MF_EVENT_FLAG_NO_WAIT, &media_event)))) {
+ MediaEventType event_type;
+ HRESULT hr = media_event->GetType(&event_type);
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Failed to get the type of media event.";
+ continue;
+ }
+
+ if (event_type == METransformHaveOutput) {
+ ProcessOutputAsync();
+ }
+ }
+}
+
} // namespace media
diff --git a/chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.h b/chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.h
index b82e70261f2..f6dcb348658 100644
--- a/chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.h
+++ b/chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.h
@@ -94,10 +94,16 @@ class MEDIA_GPU_EXPORT MediaFoundationVideoEncodeAccelerator
// Populates input sample buffer with contents of a video frame
HRESULT PopulateInputSampleBuffer(scoped_refptr<VideoFrame> frame);
+ int AssignTemporalId(bool keyframe);
+ bool temporalScalableCoding() { return num_temporal_layers_ > 1; }
+
// Checks for and copies encoded output on |encoder_thread_|.
void ProcessOutputAsync();
void ProcessOutputSync();
+ // Drains pending output samples on |encoder_thread_|.
+ void DrainPendingOutputs();
+
// Tries to deliver the input frame to the encoder.
bool TryToDeliverInputFrame(scoped_refptr<VideoFrame> frame,
bool force_keyframe);
@@ -140,11 +146,16 @@ class MEDIA_GPU_EXPORT MediaFoundationVideoEncodeAccelerator
// EncodeOutput needs to be copied into a BitstreamBufferRef as a FIFO.
base::circular_deque<std::unique_ptr<EncodeOutput>> encoder_output_queue_;
+ // Counter of outputs which is used to assign temporal layer indexes
+ // according to the corresponding layer pattern. Reset for every key frame.
+ uint32_t outputs_since_keyframe_count_ = 0;
+
gfx::Size input_visible_size_;
size_t bitstream_buffer_size_;
uint32_t frame_rate_;
Bitrate bitrate_;
bool low_latency_mode_;
+ int num_temporal_layers_ = 1;
// Group of picture length for encoded output stream, indicates the
// distance between two key frames.
diff --git a/chromium/media/gpu/windows/supported_profile_helpers.cc b/chromium/media/gpu/windows/supported_profile_helpers.cc
index 7b38e6885ea..f271b29c680 100644
--- a/chromium/media/gpu/windows/supported_profile_helpers.cc
+++ b/chromium/media/gpu/windows/supported_profile_helpers.cc
@@ -245,7 +245,8 @@ SupportedResolutionRangeMap GetSupportedD3D11VideoDecoderResolutions(
gfx::Size(8192, 4320), gfx::Size(8192, 8192)};
const bool should_test_for_av1_support =
- base::FeatureList::IsEnabled(kMediaFoundationAV1Decoding) &&
+ (base::FeatureList::IsEnabled(kMediaFoundationAV1Decoding) ||
+ base::FeatureList::IsEnabled(kD3D11VideoDecoderAV1)) &&
!workarounds.disable_accelerated_av1_decode && provide_av1_resolutions;
// Enumerate supported video profiles and look for the known profile for each
diff --git a/chromium/media/learning/common/feature_dictionary.h b/chromium/media/learning/common/feature_dictionary.h
index ac09739d6a6..9b5b9fb099a 100644
--- a/chromium/media/learning/common/feature_dictionary.h
+++ b/chromium/media/learning/common/feature_dictionary.h
@@ -30,6 +30,10 @@ class COMPONENT_EXPORT(LEARNING_COMMON) FeatureDictionary {
using Dictionary = std::map<std::string, FeatureValue>;
FeatureDictionary();
+
+ FeatureDictionary(const FeatureDictionary&) = delete;
+ FeatureDictionary& operator=(const FeatureDictionary&) = delete;
+
~FeatureDictionary();
// Add features for |task| to |features| from our dictionary. Features that
@@ -42,8 +46,6 @@ class COMPONENT_EXPORT(LEARNING_COMMON) FeatureDictionary {
private:
Dictionary dictionary_;
-
- DISALLOW_COPY_AND_ASSIGN(FeatureDictionary);
};
} // namespace learning
diff --git a/chromium/media/learning/common/learning_session.h b/chromium/media/learning/common/learning_session.h
index 6468871eab4..568c3bc3f89 100644
--- a/chromium/media/learning/common/learning_session.h
+++ b/chromium/media/learning/common/learning_session.h
@@ -24,14 +24,15 @@ class COMPONENT_EXPORT(LEARNING_COMMON) LearningSession
: public base::SupportsUserData::Data {
public:
LearningSession();
+
+ LearningSession(const LearningSession&) = delete;
+ LearningSession& operator=(const LearningSession&) = delete;
+
~LearningSession() override;
// Return a LearningTaskController for the given task.
virtual std::unique_ptr<LearningTaskController> GetController(
const std::string& task_name) = 0;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(LearningSession);
};
} // namespace learning
diff --git a/chromium/media/learning/common/learning_task_controller.h b/chromium/media/learning/common/learning_task_controller.h
index 0644e2d85f8..21dacfd9df9 100644
--- a/chromium/media/learning/common/learning_task_controller.h
+++ b/chromium/media/learning/common/learning_task_controller.h
@@ -49,6 +49,10 @@ class COMPONENT_EXPORT(LEARNING_COMMON) LearningTaskController {
const absl::optional<TargetHistogram>& predicted)>;
LearningTaskController() = default;
+
+ LearningTaskController(const LearningTaskController&) = delete;
+ LearningTaskController& operator=(const LearningTaskController&) = delete;
+
virtual ~LearningTaskController() = default;
// Start a new observation. Call this at the time one would try to predict
@@ -95,9 +99,6 @@ class COMPONENT_EXPORT(LEARNING_COMMON) LearningTaskController {
// may be called immediately without posting.
virtual void PredictDistribution(const FeatureVector& features,
PredictionCB callback) = 0;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(LearningTaskController);
};
} // namespace learning
diff --git a/chromium/media/learning/impl/distribution_reporter.h b/chromium/media/learning/impl/distribution_reporter.h
index 6a96cf7aac4..1799129ebc0 100644
--- a/chromium/media/learning/impl/distribution_reporter.h
+++ b/chromium/media/learning/impl/distribution_reporter.h
@@ -55,6 +55,9 @@ class COMPONENT_EXPORT(LEARNING_IMPL) DistributionReporter {
// Create a DistributionReporter that's suitable for |task|.
static std::unique_ptr<DistributionReporter> Create(const LearningTask& task);
+ DistributionReporter(const DistributionReporter&) = delete;
+ DistributionReporter& operator=(const DistributionReporter&) = delete;
+
virtual ~DistributionReporter();
// Returns a prediction CB that will be compared to |prediction_info.observed|
@@ -93,8 +96,6 @@ class COMPONENT_EXPORT(LEARNING_IMPL) DistributionReporter {
absl::optional<std::set<int>> feature_indices_;
base::WeakPtrFactory<DistributionReporter> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(DistributionReporter);
};
} // namespace learning
diff --git a/chromium/media/learning/impl/extra_trees_trainer.cc b/chromium/media/learning/impl/extra_trees_trainer.cc
index 572c0f1f56f..03cb8e6e814 100644
--- a/chromium/media/learning/impl/extra_trees_trainer.cc
+++ b/chromium/media/learning/impl/extra_trees_trainer.cc
@@ -57,15 +57,15 @@ void ExtraTreesTrainer::OnRandomTreeModel(TrainedModelCB model_cb,
// If this is the last tree, then return the finished model.
if (trees_.size() == task_.rf_number_of_trees) {
- std::unique_ptr<Model> model =
+ std::unique_ptr<Model> finished_model =
std::make_unique<VotingEnsemble>(std::move(trees_));
// If we have a converter, then wrap everything in a ConvertingModel.
if (converter_) {
- model = std::make_unique<ConvertingModel>(std::move(converter_),
- std::move(model));
+ finished_model = std::make_unique<ConvertingModel>(
+ std::move(converter_), std::move(finished_model));
}
- std::move(model_cb).Run(std::move(model));
+ std::move(model_cb).Run(std::move(finished_model));
return;
}
diff --git a/chromium/media/learning/impl/extra_trees_trainer.h b/chromium/media/learning/impl/extra_trees_trainer.h
index 45784f2e3f5..0509a540063 100644
--- a/chromium/media/learning/impl/extra_trees_trainer.h
+++ b/chromium/media/learning/impl/extra_trees_trainer.h
@@ -34,6 +34,10 @@ class COMPONENT_EXPORT(LEARNING_IMPL) ExtraTreesTrainer
public base::SupportsWeakPtr<ExtraTreesTrainer> {
public:
ExtraTreesTrainer();
+
+ ExtraTreesTrainer(const ExtraTreesTrainer&) = delete;
+ ExtraTreesTrainer& operator=(const ExtraTreesTrainer&) = delete;
+
~ExtraTreesTrainer() override;
// TrainingAlgorithm
@@ -51,8 +55,6 @@ class COMPONENT_EXPORT(LEARNING_IMPL) ExtraTreesTrainer
std::vector<std::unique_ptr<Model>> trees_;
std::unique_ptr<OneHotConverter> converter_;
TrainingData converted_training_data_;
-
- DISALLOW_COPY_AND_ASSIGN(ExtraTreesTrainer);
};
} // namespace learning
diff --git a/chromium/media/learning/impl/feature_provider.h b/chromium/media/learning/impl/feature_provider.h
index c53fb81374a..b384c8c40f7 100644
--- a/chromium/media/learning/impl/feature_provider.h
+++ b/chromium/media/learning/impl/feature_provider.h
@@ -25,14 +25,15 @@ class COMPONENT_EXPORT(LEARNING_IMPL) FeatureProvider {
using FeatureVectorCB = base::OnceCallback<void(FeatureVector)>;
FeatureProvider();
+
+ FeatureProvider(const FeatureProvider&) = delete;
+ FeatureProvider& operator=(const FeatureProvider&) = delete;
+
virtual ~FeatureProvider();
// Update |features| to include whatever features are specified by |task_|,
// and call |cb| once they're filled in.
virtual void AddFeatures(FeatureVector features, FeatureVectorCB cb) = 0;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(FeatureProvider);
};
// Since FeatureProviders are often going to thread-hop, provide this typedef.
diff --git a/chromium/media/learning/impl/lookup_table_trainer.h b/chromium/media/learning/impl/lookup_table_trainer.h
index f17e6173b2a..08486f6aab3 100644
--- a/chromium/media/learning/impl/lookup_table_trainer.h
+++ b/chromium/media/learning/impl/lookup_table_trainer.h
@@ -18,14 +18,15 @@ class COMPONENT_EXPORT(LEARNING_IMPL) LookupTableTrainer
: public TrainingAlgorithm {
public:
LookupTableTrainer();
+
+ LookupTableTrainer(const LookupTableTrainer&) = delete;
+ LookupTableTrainer& operator=(const LookupTableTrainer&) = delete;
+
~LookupTableTrainer() override;
void Train(const LearningTask& task,
const TrainingData& training_data,
TrainedModelCB model_cb) override;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(LookupTableTrainer);
};
} // namespace learning
diff --git a/chromium/media/learning/impl/one_hot.h b/chromium/media/learning/impl/one_hot.h
index 23e69845c38..e249ddeb8d3 100644
--- a/chromium/media/learning/impl/one_hot.h
+++ b/chromium/media/learning/impl/one_hot.h
@@ -27,6 +27,10 @@ class COMPONENT_EXPORT(LEARNING_IMPL) OneHotConverter {
// Build a one-hot converter for all nominal features |task|, using the values
// found in |training_data|.
OneHotConverter(const LearningTask& task, const TrainingData& training_data);
+
+ OneHotConverter(const OneHotConverter&) = delete;
+ OneHotConverter& operator=(const OneHotConverter&) = delete;
+
~OneHotConverter();
// Return the LearningTask that has only nominal features.
@@ -54,8 +58,6 @@ class COMPONENT_EXPORT(LEARNING_IMPL) OneHotConverter {
// [original task feature index] = optional converter for it. If the feature
// was kNumeric to begin with, then there will be no converter.
std::vector<absl::optional<ValueVectorIndexMap>> converters_;
-
- DISALLOW_COPY_AND_ASSIGN(OneHotConverter);
};
// Model that uses |Converter| to convert instances before sending them to the
@@ -64,6 +66,10 @@ class COMPONENT_EXPORT(LEARNING_IMPL) ConvertingModel : public Model {
public:
ConvertingModel(std::unique_ptr<OneHotConverter> converter,
std::unique_ptr<Model> model);
+
+ ConvertingModel(const ConvertingModel&) = delete;
+ ConvertingModel& operator=(const ConvertingModel&) = delete;
+
~ConvertingModel() override;
// Model
@@ -72,8 +78,6 @@ class COMPONENT_EXPORT(LEARNING_IMPL) ConvertingModel : public Model {
private:
std::unique_ptr<OneHotConverter> converter_;
std::unique_ptr<Model> model_;
-
- DISALLOW_COPY_AND_ASSIGN(ConvertingModel);
};
} // namespace learning
diff --git a/chromium/media/learning/impl/random_number_generator.h b/chromium/media/learning/impl/random_number_generator.h
index aeb4514b18e..0974a47a353 100644
--- a/chromium/media/learning/impl/random_number_generator.h
+++ b/chromium/media/learning/impl/random_number_generator.h
@@ -17,6 +17,10 @@ namespace media {
class COMPONENT_EXPORT(LEARNING_IMPL) RandomNumberGenerator {
public:
RandomNumberGenerator() = default;
+
+ RandomNumberGenerator(const RandomNumberGenerator&) = delete;
+ RandomNumberGenerator& operator=(const RandomNumberGenerator&) = delete;
+
virtual ~RandomNumberGenerator() = default;
// Return a random generator that will return unpredictable values in the
@@ -35,9 +39,6 @@ class COMPONENT_EXPORT(LEARNING_IMPL) RandomNumberGenerator {
// This isn't an overload of Generate() to be sure that one isn't surprised by
// the result.
double GenerateDouble(double range);
-
- private:
- DISALLOW_COPY_AND_ASSIGN(RandomNumberGenerator);
};
// Handy mix-in class if you want to support rng injection.
diff --git a/chromium/media/learning/impl/random_tree_trainer.h b/chromium/media/learning/impl/random_tree_trainer.h
index 13d5bf12573..23fc7b31fa6 100644
--- a/chromium/media/learning/impl/random_tree_trainer.h
+++ b/chromium/media/learning/impl/random_tree_trainer.h
@@ -78,6 +78,10 @@ class COMPONENT_EXPORT(LEARNING_IMPL) RandomTreeTrainer
public HasRandomNumberGenerator {
public:
explicit RandomTreeTrainer(RandomNumberGenerator* rng = nullptr);
+
+ RandomTreeTrainer(const RandomTreeTrainer&) = delete;
+ RandomTreeTrainer& operator=(const RandomTreeTrainer&) = delete;
+
~RandomTreeTrainer() override;
// Train on all examples. Calls |model_cb| with the trained model, which
@@ -174,8 +178,6 @@ class COMPONENT_EXPORT(LEARNING_IMPL) RandomTreeTrainer
FeatureValue FindSplitPoint_Numeric(size_t index,
const TrainingData& training_data,
const std::vector<size_t>& training_idx);
-
- DISALLOW_COPY_AND_ASSIGN(RandomTreeTrainer);
};
} // namespace learning
diff --git a/chromium/media/learning/impl/training_algorithm.h b/chromium/media/learning/impl/training_algorithm.h
index a5fea2ca347..90c71f9bedd 100644
--- a/chromium/media/learning/impl/training_algorithm.h
+++ b/chromium/media/learning/impl/training_algorithm.h
@@ -21,14 +21,15 @@ using TrainedModelCB = base::OnceCallback<void(std::unique_ptr<Model>)>;
class TrainingAlgorithm {
public:
TrainingAlgorithm() = default;
+
+ TrainingAlgorithm(const TrainingAlgorithm&) = delete;
+ TrainingAlgorithm& operator=(const TrainingAlgorithm&) = delete;
+
virtual ~TrainingAlgorithm() = default;
virtual void Train(const LearningTask& task,
const TrainingData& training_data,
TrainedModelCB model_cb) = 0;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(TrainingAlgorithm);
};
} // namespace learning
diff --git a/chromium/media/learning/impl/voting_ensemble.h b/chromium/media/learning/impl/voting_ensemble.h
index 7e0cba7b59a..0994e58a1c0 100644
--- a/chromium/media/learning/impl/voting_ensemble.h
+++ b/chromium/media/learning/impl/voting_ensemble.h
@@ -20,6 +20,10 @@ namespace learning {
class COMPONENT_EXPORT(LEARNING_IMPL) VotingEnsemble : public Model {
public:
VotingEnsemble(std::vector<std::unique_ptr<Model>> models);
+
+ VotingEnsemble(const VotingEnsemble&) = delete;
+ VotingEnsemble& operator=(const VotingEnsemble&) = delete;
+
~VotingEnsemble() override;
// Model
@@ -27,8 +31,6 @@ class COMPONENT_EXPORT(LEARNING_IMPL) VotingEnsemble : public Model {
private:
std::vector<std::unique_ptr<Model>> models_;
-
- DISALLOW_COPY_AND_ASSIGN(VotingEnsemble);
};
} // namespace learning
diff --git a/chromium/media/learning/mojo/mojo_learning_task_controller_service.h b/chromium/media/learning/mojo/mojo_learning_task_controller_service.h
index 9cdb8eae8ea..70f382a417e 100644
--- a/chromium/media/learning/mojo/mojo_learning_task_controller_service.h
+++ b/chromium/media/learning/mojo/mojo_learning_task_controller_service.h
@@ -28,6 +28,12 @@ class COMPONENT_EXPORT(MEDIA_LEARNING_MOJO) MojoLearningTaskControllerService
const LearningTask& task,
ukm::SourceId source_id,
std::unique_ptr<::media::learning::LearningTaskController> impl);
+
+ MojoLearningTaskControllerService(const MojoLearningTaskControllerService&) =
+ delete;
+ MojoLearningTaskControllerService& operator=(
+ const MojoLearningTaskControllerService&) = delete;
+
~MojoLearningTaskControllerService() override;
// mojom::LearningTaskController
@@ -53,8 +59,6 @@ class COMPONENT_EXPORT(MEDIA_LEARNING_MOJO) MojoLearningTaskControllerService
std::unique_ptr<::media::learning::LearningTaskController> impl_;
std::set<base::UnguessableToken> in_flight_observations_;
-
- DISALLOW_COPY_AND_ASSIGN(MojoLearningTaskControllerService);
};
} // namespace learning
diff --git a/chromium/media/learning/mojo/public/cpp/mojo_learning_task_controller.h b/chromium/media/learning/mojo/public/cpp/mojo_learning_task_controller.h
index f58305908a5..3ab18bf93a7 100644
--- a/chromium/media/learning/mojo/public/cpp/mojo_learning_task_controller.h
+++ b/chromium/media/learning/mojo/public/cpp/mojo_learning_task_controller.h
@@ -23,6 +23,11 @@ class COMPONENT_EXPORT(MEDIA_LEARNING_MOJO) MojoLearningTaskController
MojoLearningTaskController(
const LearningTask& task,
mojo::Remote<mojom::LearningTaskController> controller);
+
+ MojoLearningTaskController(const MojoLearningTaskController&) = delete;
+ MojoLearningTaskController& operator=(const MojoLearningTaskController&) =
+ delete;
+
~MojoLearningTaskController() override;
// LearningTaskController
@@ -44,8 +49,6 @@ class COMPONENT_EXPORT(MEDIA_LEARNING_MOJO) MojoLearningTaskController
private:
LearningTask task_;
mojo::Remote<mojom::LearningTaskController> controller_;
-
- DISALLOW_COPY_AND_ASSIGN(MojoLearningTaskController);
};
} // namespace learning
diff --git a/chromium/media/media_options.gni b/chromium/media/media_options.gni
index 1a1f657637c..3574b2aa056 100644
--- a/chromium/media/media_options.gni
+++ b/chromium/media/media_options.gni
@@ -229,7 +229,6 @@ declare_args() {
# Valid options are:
# - "browser": Use mojo media service hosted in the browser process.
# - "gpu": Use mojo media service hosted in the gpu process.
- # - "utility": Use mojo media service hosted in the utility process.
# - "": Do not use mojo media service.
mojo_media_host = _default_mojo_media_host
}
diff --git a/chromium/media/midi/midi_manager.h b/chromium/media/midi/midi_manager.h
index facb5f3b2d0..cb27488c4f6 100644
--- a/chromium/media/midi/midi_manager.h
+++ b/chromium/media/midi/midi_manager.h
@@ -85,6 +85,10 @@ class MIDI_EXPORT MidiManager {
static const size_t kMaxPendingClientCount = 128;
explicit MidiManager(MidiService* service);
+
+ MidiManager(const MidiManager&) = delete;
+ MidiManager& operator=(const MidiManager&) = delete;
+
virtual ~MidiManager();
static MidiManager* Create(MidiService* service);
@@ -207,8 +211,6 @@ class MIDI_EXPORT MidiManager {
// MidiService outlives MidiManager.
MidiService* const service_;
-
- DISALLOW_COPY_AND_ASSIGN(MidiManager);
};
} // namespace midi
diff --git a/chromium/media/midi/midi_manager_alsa.h b/chromium/media/midi/midi_manager_alsa.h
index c9ac0cfb713..271a6b8661b 100644
--- a/chromium/media/midi/midi_manager_alsa.h
+++ b/chromium/media/midi/midi_manager_alsa.h
@@ -29,6 +29,10 @@ namespace midi {
class MIDI_EXPORT MidiManagerAlsa final : public MidiManager {
public:
explicit MidiManagerAlsa(MidiService* service);
+
+ MidiManagerAlsa(const MidiManagerAlsa&) = delete;
+ MidiManagerAlsa& operator=(const MidiManagerAlsa&) = delete;
+
~MidiManagerAlsa() override;
// MidiManager implementation.
@@ -181,6 +185,9 @@ class MIDI_EXPORT MidiManagerAlsa final : public MidiManager {
public:
typedef std::vector<std::unique_ptr<MidiPort>>::iterator iterator;
+ MidiPortStateBase(const MidiPortStateBase&) = delete;
+ MidiPortStateBase& operator=(const MidiPortStateBase&) = delete;
+
virtual ~MidiPortStateBase();
// Given a port, finds a port in the internal store.
@@ -204,8 +211,6 @@ class MIDI_EXPORT MidiManagerAlsa final : public MidiManager {
private:
std::vector<std::unique_ptr<MidiPort>> ports_;
-
- DISALLOW_COPY_AND_ASSIGN(MidiPortStateBase);
};
class TemporaryMidiPortState final : public MidiPortStateBase {
@@ -235,6 +240,10 @@ class MIDI_EXPORT MidiManagerAlsa final : public MidiManager {
enum class PortDirection { kInput, kOutput, kDuplex };
AlsaSeqState();
+
+ AlsaSeqState(const AlsaSeqState&) = delete;
+ AlsaSeqState& operator=(const AlsaSeqState&) = delete;
+
~AlsaSeqState();
void ClientStart(int client_id,
@@ -258,6 +267,10 @@ class MIDI_EXPORT MidiManagerAlsa final : public MidiManager {
class Port {
public:
Port(const std::string& name, PortDirection direction, bool midi);
+
+ Port(const Port&) = delete;
+ Port& operator=(const Port&) = delete;
+
~Port();
std::string name() const { return name_; }
@@ -269,8 +282,6 @@ class MIDI_EXPORT MidiManagerAlsa final : public MidiManager {
const std::string name_;
const PortDirection direction_;
const bool midi_;
-
- DISALLOW_COPY_AND_ASSIGN(Port);
};
class Client {
@@ -278,6 +289,10 @@ class MIDI_EXPORT MidiManagerAlsa final : public MidiManager {
using PortMap = std::map<int, std::unique_ptr<Port>>;
Client(const std::string& name, snd_seq_client_type_t type);
+
+ Client(const Client&) = delete;
+ Client& operator=(const Client&) = delete;
+
~Client();
std::string name() const { return name_; }
@@ -291,8 +306,6 @@ class MIDI_EXPORT MidiManagerAlsa final : public MidiManager {
const std::string name_;
const snd_seq_client_type_t type_;
PortMap ports_;
-
- DISALLOW_COPY_AND_ASSIGN(Client);
};
std::map<int, std::unique_ptr<Client>> clients_;
@@ -302,8 +315,6 @@ class MIDI_EXPORT MidiManagerAlsa final : public MidiManager {
// we are in sync between ALSA and udev. Until then, we cannot generate
// MIDIConnectionEvents to web clients.
int card_client_count_ = 0;
-
- DISALLOW_COPY_AND_ASSIGN(AlsaSeqState);
};
class AlsaCard {
@@ -313,6 +324,10 @@ class MIDI_EXPORT MidiManagerAlsa final : public MidiManager {
const std::string& longname,
const std::string& driver,
int midi_device_count);
+
+ AlsaCard(const AlsaCard&) = delete;
+ AlsaCard& operator=(const AlsaCard&) = delete;
+
~AlsaCard();
std::string name() const { return name_; }
std::string longname() const { return longname_; }
@@ -348,8 +363,6 @@ class MIDI_EXPORT MidiManagerAlsa final : public MidiManager {
const std::string serial_;
const int midi_device_count_;
const std::string manufacturer_;
-
- DISALLOW_COPY_AND_ASSIGN(AlsaCard);
};
struct SndSeqDeleter {
@@ -435,8 +448,6 @@ class MIDI_EXPORT MidiManagerAlsa final : public MidiManager {
// udev, for querying hardware devices.
device::ScopedUdevPtr udev_;
device::ScopedUdevMonitorPtr udev_monitor_;
-
- DISALLOW_COPY_AND_ASSIGN(MidiManagerAlsa);
};
} // namespace midi
diff --git a/chromium/media/midi/midi_manager_mac.cc b/chromium/media/midi/midi_manager_mac.cc
index 6041adcb26a..525d53af323 100644
--- a/chromium/media/midi/midi_manager_mac.cc
+++ b/chromium/media/midi/midi_manager_mac.cc
@@ -102,7 +102,7 @@ mojom::PortInfo GetPortInfoFromEndpoint(MIDIEndpointRef endpoint) {
base::TimeTicks MIDITimeStampToTimeTicks(MIDITimeStamp timestamp) {
UInt64 nanoseconds = AudioConvertHostTimeToNanos(timestamp);
- return base::TimeTicks() + base::TimeDelta::FromNanoseconds(nanoseconds);
+ return base::TimeTicks() + base::Nanoseconds(nanoseconds);
}
MIDITimeStamp TimeTicksToMIDITimeStamp(base::TimeTicks ticks) {
diff --git a/chromium/media/midi/midi_manager_mac.h b/chromium/media/midi/midi_manager_mac.h
index d18d139b959..c17d80e1fa7 100644
--- a/chromium/media/midi/midi_manager_mac.h
+++ b/chromium/media/midi/midi_manager_mac.h
@@ -26,6 +26,10 @@ class MidiService;
class MIDI_EXPORT MidiManagerMac final : public MidiManager {
public:
explicit MidiManagerMac(MidiService* service);
+
+ MidiManagerMac(const MidiManagerMac&) = delete;
+ MidiManagerMac& operator=(const MidiManagerMac&) = delete;
+
~MidiManagerMac() override;
// MidiManager implementation.
@@ -81,8 +85,6 @@ class MIDI_EXPORT MidiManagerMac final : public MidiManager {
// Keeps track of all destinations.
std::vector<MIDIEndpointRef> destinations_;
-
- DISALLOW_COPY_AND_ASSIGN(MidiManagerMac);
};
} // namespace midi
diff --git a/chromium/media/midi/midi_manager_mac_unittest.cc b/chromium/media/midi/midi_manager_mac_unittest.cc
index 30771290c4f..2175f2a2cc5 100644
--- a/chromium/media/midi/midi_manager_mac_unittest.cc
+++ b/chromium/media/midi/midi_manager_mac_unittest.cc
@@ -112,6 +112,10 @@ class FakeMidiManagerClient : public MidiManagerClient {
class MidiManagerMacTest : public ::testing::Test {
public:
MidiManagerMacTest() : service_(std::make_unique<MidiService>()) {}
+
+ MidiManagerMacTest(const MidiManagerMacTest&) = delete;
+ MidiManagerMacTest& operator=(const MidiManagerMacTest&) = delete;
+
~MidiManagerMacTest() override {
service_->Shutdown();
base::RunLoop run_loop;
@@ -127,8 +131,6 @@ class MidiManagerMacTest : public ::testing::Test {
private:
std::unique_ptr<MidiService> service_;
base::test::SingleThreadTaskEnvironment task_environment_;
-
- DISALLOW_COPY_AND_ASSIGN(MidiManagerMacTest);
};
diff --git a/chromium/media/midi/midi_manager_unittest.cc b/chromium/media/midi/midi_manager_unittest.cc
index eb06f60ed29..59cc375f3ab 100644
--- a/chromium/media/midi/midi_manager_unittest.cc
+++ b/chromium/media/midi/midi_manager_unittest.cc
@@ -37,6 +37,9 @@ class FakeMidiManager : public MidiManager {
public:
explicit FakeMidiManager(MidiService* service) : MidiManager(service) {}
+ FakeMidiManager(const FakeMidiManager&) = delete;
+ FakeMidiManager& operator=(const FakeMidiManager&) = delete;
+
~FakeMidiManager() override = default;
base::WeakPtr<FakeMidiManager> GetWeakPtr() {
@@ -68,13 +71,15 @@ class FakeMidiManager : public MidiManager {
bool initialized_ = false;
base::WeakPtrFactory<FakeMidiManager> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(FakeMidiManager);
};
class FakeMidiManagerFactory : public MidiService::ManagerFactory {
public:
FakeMidiManagerFactory() {}
+
+ FakeMidiManagerFactory(const FakeMidiManagerFactory&) = delete;
+ FakeMidiManagerFactory& operator=(const FakeMidiManagerFactory&) = delete;
+
~FakeMidiManagerFactory() override = default;
std::unique_ptr<MidiManager> Create(MidiService* service) override {
@@ -103,13 +108,15 @@ class FakeMidiManagerFactory : public MidiService::ManagerFactory {
private:
base::WeakPtr<FakeMidiManager> manager_ = nullptr;
base::WeakPtrFactory<FakeMidiManagerFactory> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(FakeMidiManagerFactory);
};
class FakeMidiManagerClient : public MidiManagerClient {
public:
FakeMidiManagerClient() = default;
+
+ FakeMidiManagerClient(const FakeMidiManagerClient&) = delete;
+ FakeMidiManagerClient& operator=(const FakeMidiManagerClient&) = delete;
+
~FakeMidiManagerClient() override = default;
// MidiManagerClient implementation.
@@ -142,8 +149,6 @@ class FakeMidiManagerClient : public MidiManagerClient {
private:
Result result_ = Result::NOT_SUPPORTED;
bool wait_for_result_ = true;
-
- DISALLOW_COPY_AND_ASSIGN(FakeMidiManagerClient);
};
class MidiManagerTest : public ::testing::Test {
@@ -155,6 +160,9 @@ class MidiManagerTest : public ::testing::Test {
service_ = std::make_unique<MidiService>(std::move(factory));
}
+ MidiManagerTest(const MidiManagerTest&) = delete;
+ MidiManagerTest& operator=(const MidiManagerTest&) = delete;
+
~MidiManagerTest() override {
service_->Shutdown();
base::RunLoop run_loop;
@@ -224,8 +232,6 @@ class MidiManagerTest : public ::testing::Test {
base::test::TaskEnvironment env_;
base::WeakPtr<FakeMidiManagerFactory> factory_;
std::unique_ptr<MidiService> service_;
-
- DISALLOW_COPY_AND_ASSIGN(MidiManagerTest);
};
TEST_F(MidiManagerTest, StartAndEndSession) {
@@ -327,6 +333,9 @@ class PlatformMidiManagerTest : public ::testing::Test {
//
}
+ PlatformMidiManagerTest(const PlatformMidiManagerTest&) = delete;
+ PlatformMidiManagerTest& operator=(const PlatformMidiManagerTest&) = delete;
+
~PlatformMidiManagerTest() override {
service_->Shutdown();
base::RunLoop run_loop;
@@ -358,8 +367,6 @@ class PlatformMidiManagerTest : public ::testing::Test {
std::unique_ptr<FakeMidiManagerClient> client_;
std::unique_ptr<MidiService> service_;
-
- DISALLOW_COPY_AND_ASSIGN(PlatformMidiManagerTest);
};
#if defined(OS_ANDROID)
diff --git a/chromium/media/midi/midi_manager_usb.h b/chromium/media/midi/midi_manager_usb.h
index af436c66f5e..7b08c44476b 100644
--- a/chromium/media/midi/midi_manager_usb.h
+++ b/chromium/media/midi/midi_manager_usb.h
@@ -36,6 +36,10 @@ class USB_MIDI_EXPORT MidiManagerUsb : public MidiManager,
public:
MidiManagerUsb(MidiService* service,
std::unique_ptr<UsbMidiDevice::Factory> device_factory);
+
+ MidiManagerUsb(const MidiManagerUsb&) = delete;
+ MidiManagerUsb& operator=(const MidiManagerUsb&) = delete;
+
~MidiManagerUsb() override;
// MidiManager implementation.
@@ -92,8 +96,6 @@ class USB_MIDI_EXPORT MidiManagerUsb : public MidiManager,
size_t,
base::IntPairHash<std::pair<int, int>>>
input_jack_dictionary_;
-
- DISALLOW_COPY_AND_ASSIGN(MidiManagerUsb);
};
} // namespace midi
diff --git a/chromium/media/midi/midi_manager_usb_unittest.cc b/chromium/media/midi/midi_manager_usb_unittest.cc
index 2b0765895e4..c7713f54e2f 100644
--- a/chromium/media/midi/midi_manager_usb_unittest.cc
+++ b/chromium/media/midi/midi_manager_usb_unittest.cc
@@ -34,6 +34,10 @@ std::vector<T> ToVector(const T (&array)[N]) {
class Logger {
public:
Logger() = default;
+
+ Logger(const Logger&) = delete;
+ Logger& operator=(const Logger&) = delete;
+
~Logger() = default;
void AddLog(const std::string& message) { log_ += message; }
@@ -45,13 +49,15 @@ class Logger {
private:
std::string log_;
-
- DISALLOW_COPY_AND_ASSIGN(Logger);
};
class FakeUsbMidiDevice : public UsbMidiDevice {
public:
explicit FakeUsbMidiDevice(Logger* logger) : logger_(logger) {}
+
+ FakeUsbMidiDevice(const FakeUsbMidiDevice&) = delete;
+ FakeUsbMidiDevice& operator=(const FakeUsbMidiDevice&) = delete;
+
~FakeUsbMidiDevice() override = default;
std::vector<uint8_t> GetDescriptors() override {
@@ -91,8 +97,6 @@ class FakeUsbMidiDevice : public UsbMidiDevice {
std::string product_name_;
std::string device_version_;
Logger* logger_;
-
- DISALLOW_COPY_AND_ASSIGN(FakeUsbMidiDevice);
};
class FakeMidiManagerClient : public MidiManagerClient {
@@ -101,6 +105,10 @@ class FakeMidiManagerClient : public MidiManagerClient {
: complete_start_session_(false),
result_(Result::NOT_SUPPORTED),
logger_(logger) {}
+
+ FakeMidiManagerClient(const FakeMidiManagerClient&) = delete;
+ FakeMidiManagerClient& operator=(const FakeMidiManagerClient&) = delete;
+
~FakeMidiManagerClient() override = default;
void AddInputPort(const mojom::PortInfo& info) override {
@@ -148,13 +156,15 @@ class FakeMidiManagerClient : public MidiManagerClient {
private:
Logger* logger_;
-
- DISALLOW_COPY_AND_ASSIGN(FakeMidiManagerClient);
};
class TestUsbMidiDeviceFactory : public UsbMidiDevice::Factory {
public:
TestUsbMidiDeviceFactory() = default;
+
+ TestUsbMidiDeviceFactory(const TestUsbMidiDeviceFactory&) = delete;
+ TestUsbMidiDeviceFactory& operator=(const TestUsbMidiDeviceFactory&) = delete;
+
~TestUsbMidiDeviceFactory() override = default;
void EnumerateDevices(UsbMidiDeviceDelegate* device,
Callback callback) override {
@@ -162,9 +172,6 @@ class TestUsbMidiDeviceFactory : public UsbMidiDevice::Factory {
}
Callback callback_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(TestUsbMidiDeviceFactory);
};
class MidiManagerUsbForTesting : public MidiManagerUsb {
@@ -173,6 +180,10 @@ class MidiManagerUsbForTesting : public MidiManagerUsb {
std::unique_ptr<UsbMidiDevice::Factory> device_factory,
MidiService* service)
: MidiManagerUsb(service, std::move(device_factory)) {}
+
+ MidiManagerUsbForTesting(const MidiManagerUsbForTesting&) = delete;
+ MidiManagerUsbForTesting& operator=(const MidiManagerUsbForTesting&) = delete;
+
~MidiManagerUsbForTesting() override = default;
void CallCompleteInitialization(Result result) {
@@ -180,9 +191,6 @@ class MidiManagerUsbForTesting : public MidiManagerUsb {
base::RunLoop run_loop;
run_loop.RunUntilIdle();
}
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MidiManagerUsbForTesting);
};
class MidiManagerFactoryForTesting : public midi::MidiService::ManagerFactory {
@@ -226,6 +234,10 @@ class MidiManagerUsbTest : public ::testing::Test {
factory_ = factory.get();
service_ = std::make_unique<MidiService>(std::move(factory));
}
+
+ MidiManagerUsbTest(const MidiManagerUsbTest&) = delete;
+ MidiManagerUsbTest& operator=(const MidiManagerUsbTest&) = delete;
+
~MidiManagerUsbTest() override {
service_->Shutdown();
base::RunLoop run_loop;
@@ -276,8 +288,6 @@ class MidiManagerUsbTest : public ::testing::Test {
private:
std::unique_ptr<MidiService> service_;
base::test::SingleThreadTaskEnvironment task_environment_;
-
- DISALLOW_COPY_AND_ASSIGN(MidiManagerUsbTest);
};
diff --git a/chromium/media/midi/midi_manager_win.cc b/chromium/media/midi/midi_manager_win.cc
index 4fd33120517..ae9ac0cf0d1 100644
--- a/chromium/media/midi/midi_manager_win.cc
+++ b/chromium/media/midi/midi_manager_win.cc
@@ -392,7 +392,7 @@ class MidiManagerWin::InPort final : public Port {
}
base::TimeTicks CalculateInEventTime(uint32_t elapsed_ms) const {
- return start_time_ + base::TimeDelta::FromMilliseconds(elapsed_ms);
+ return start_time_ + base::Milliseconds(elapsed_ms);
}
void RestoreBuffer() {
diff --git a/chromium/media/midi/midi_manager_win.h b/chromium/media/midi/midi_manager_win.h
index 978abface10..d6f0bd32f25 100644
--- a/chromium/media/midi/midi_manager_win.h
+++ b/chromium/media/midi/midi_manager_win.h
@@ -32,6 +32,10 @@ class MidiManagerWin final
MIDI_EXPORT static void OverflowInstanceIdForTesting();
explicit MidiManagerWin(MidiService* service);
+
+ MidiManagerWin(const MidiManagerWin&) = delete;
+ MidiManagerWin& operator=(const MidiManagerWin&) = delete;
+
~MidiManagerWin() override;
// Returns PortManager that implements interfaces to help implementation.
@@ -94,8 +98,6 @@ class MidiManagerWin final
// Manages platform dependent implementation for port managegent. Should be
// accessed with the task lock.
std::unique_ptr<PortManager> port_manager_;
-
- DISALLOW_COPY_AND_ASSIGN(MidiManagerWin);
};
} // namespace midi
diff --git a/chromium/media/midi/midi_manager_winrt.h b/chromium/media/midi/midi_manager_winrt.h
index 121ff6fcaad..ff0599584bd 100644
--- a/chromium/media/midi/midi_manager_winrt.h
+++ b/chromium/media/midi/midi_manager_winrt.h
@@ -20,6 +20,10 @@ class MIDI_EXPORT MidiManagerWinrt final : public MidiManager {
class MidiOutPortManager;
explicit MidiManagerWinrt(MidiService* service);
+
+ MidiManagerWinrt(const MidiManagerWinrt&) = delete;
+ MidiManagerWinrt& operator=(const MidiManagerWinrt&) = delete;
+
~MidiManagerWinrt() override;
// MidiManager overrides:
@@ -58,8 +62,6 @@ class MIDI_EXPORT MidiManagerWinrt final : public MidiManager {
// Incremented when a MidiPortManager is ready.
uint8_t port_manager_ready_count_ = 0;
-
- DISALLOW_COPY_AND_ASSIGN(MidiManagerWinrt);
};
} // namespace midi
diff --git a/chromium/media/midi/midi_message_queue.h b/chromium/media/midi/midi_message_queue.h
index c351b33fc4c..a2c09b88292 100644
--- a/chromium/media/midi/midi_message_queue.h
+++ b/chromium/media/midi/midi_message_queue.h
@@ -46,6 +46,10 @@ class MIDI_EXPORT MidiMessageQueue {
// Initializes the queue. Set true to |allow_running_status| to enable
// "MIDI running status" reconstruction.
explicit MidiMessageQueue(bool allow_running_status);
+
+ MidiMessageQueue(const MidiMessageQueue&) = delete;
+ MidiMessageQueue& operator=(const MidiMessageQueue&) = delete;
+
~MidiMessageQueue();
// Enqueues |data| to the internal buffer.
@@ -67,7 +71,6 @@ class MIDI_EXPORT MidiMessageQueue {
base::circular_deque<uint8_t> queue_;
std::vector<uint8_t> next_message_;
const bool allow_running_status_;
- DISALLOW_COPY_AND_ASSIGN(MidiMessageQueue);
};
} // namespace midi
diff --git a/chromium/media/midi/midi_service.h b/chromium/media/midi/midi_service.h
index e10bd163267..08f07c17439 100644
--- a/chromium/media/midi/midi_service.h
+++ b/chromium/media/midi/midi_service.h
@@ -31,10 +31,12 @@ class MIDI_EXPORT MidiService final {
class MIDI_EXPORT ManagerFactory {
public:
ManagerFactory() = default;
+
+ ManagerFactory(const ManagerFactory&) = delete;
+ ManagerFactory& operator=(const ManagerFactory&) = delete;
+
virtual ~ManagerFactory() = default;
virtual std::unique_ptr<MidiManager> Create(MidiService* service);
-
- DISALLOW_COPY_AND_ASSIGN(ManagerFactory);
};
// Converts Web MIDI timestamp to base::TimeDelta delay for PostDelayedTask.
@@ -43,6 +45,10 @@ class MIDI_EXPORT MidiService final {
MidiService();
// Customized ManagerFactory can be specified in the constructor for testing.
explicit MidiService(std::unique_ptr<ManagerFactory> factory);
+
+ MidiService(const MidiService&) = delete;
+ MidiService& operator=(const MidiService&) = delete;
+
~MidiService();
// Called on the browser main thread to notify the I/O thread will stop and
@@ -98,8 +104,6 @@ class MIDI_EXPORT MidiService final {
// Protects |threads_|.
base::Lock threads_lock_;
-
- DISALLOW_COPY_AND_ASSIGN(MidiService);
};
} // namespace midi
diff --git a/chromium/media/midi/task_service.h b/chromium/media/midi/task_service.h
index 3ac8f31f4c7..8af720951cc 100644
--- a/chromium/media/midi/task_service.h
+++ b/chromium/media/midi/task_service.h
@@ -33,6 +33,10 @@ class MIDI_EXPORT TaskService final {
static constexpr RunnerId kDefaultRunnerId = 0;
TaskService();
+
+ TaskService(const TaskService&) = delete;
+ TaskService& operator=(const TaskService&) = delete;
+
~TaskService();
// Issues an InstanceId internally to post tasks via PostBoundTask() and
@@ -111,8 +115,6 @@ class MIDI_EXPORT TaskService final {
// Verifies all UnbindInstance() calls occur on the same sequence as
// BindInstance().
SEQUENCE_CHECKER(instance_binding_sequence_checker_);
-
- DISALLOW_COPY_AND_ASSIGN(TaskService);
};
} // namespace midi
diff --git a/chromium/media/midi/task_service_unittest.cc b/chromium/media/midi/task_service_unittest.cc
index f2bc8fb3cd8..e76eeba4718 100644
--- a/chromium/media/midi/task_service_unittest.cc
+++ b/chromium/media/midi/task_service_unittest.cc
@@ -84,7 +84,7 @@ class TaskServiceClient {
task_service()->PostBoundDelayedTask(
runner_id,
base::BindOnce(&TaskServiceClient::SignalEvent, base::Unretained(this)),
- base::TimeDelta::FromMilliseconds(100));
+ base::Milliseconds(100));
}
void WaitTask() { wait_task_event_->Wait(); }
diff --git a/chromium/media/midi/usb_midi_descriptor_parser.h b/chromium/media/midi/usb_midi_descriptor_parser.h
index ebee18582eb..bbecce8e370 100644
--- a/chromium/media/midi/usb_midi_descriptor_parser.h
+++ b/chromium/media/midi/usb_midi_descriptor_parser.h
@@ -45,6 +45,10 @@ class USB_MIDI_EXPORT UsbMidiDescriptorParser {
};
UsbMidiDescriptorParser();
+
+ UsbMidiDescriptorParser(const UsbMidiDescriptorParser&) = delete;
+ UsbMidiDescriptorParser& operator=(const UsbMidiDescriptorParser&) = delete;
+
~UsbMidiDescriptorParser();
// Returns true if the operation succeeds.
@@ -78,8 +82,6 @@ class USB_MIDI_EXPORT UsbMidiDescriptorParser {
uint8_t current_cable_number_;
std::vector<UsbMidiJack> incomplete_jacks_;
-
- DISALLOW_COPY_AND_ASSIGN(UsbMidiDescriptorParser);
};
} // namespace midi
diff --git a/chromium/media/midi/usb_midi_device_factory_android.h b/chromium/media/midi/usb_midi_device_factory_android.h
index feb6fafdbda..03416bec278 100644
--- a/chromium/media/midi/usb_midi_device_factory_android.h
+++ b/chromium/media/midi/usb_midi_device_factory_android.h
@@ -21,6 +21,11 @@ class USB_MIDI_EXPORT UsbMidiDeviceFactoryAndroid
: public UsbMidiDevice::Factory {
public:
UsbMidiDeviceFactoryAndroid();
+
+ UsbMidiDeviceFactoryAndroid(const UsbMidiDeviceFactoryAndroid&) = delete;
+ UsbMidiDeviceFactoryAndroid& operator=(const UsbMidiDeviceFactoryAndroid&) =
+ delete;
+
~UsbMidiDeviceFactoryAndroid() override;
// UsbMidiDevice::Factory implementation.
@@ -42,8 +47,6 @@ class USB_MIDI_EXPORT UsbMidiDeviceFactoryAndroid
// Not owned.
UsbMidiDeviceDelegate* delegate_;
Callback callback_;
-
- DISALLOW_COPY_AND_ASSIGN(UsbMidiDeviceFactoryAndroid);
};
} // namespace midi
diff --git a/chromium/media/midi/usb_midi_input_stream.h b/chromium/media/midi/usb_midi_input_stream.h
index 830af0dc022..f1693019a2b 100644
--- a/chromium/media/midi/usb_midi_input_stream.h
+++ b/chromium/media/midi/usb_midi_input_stream.h
@@ -48,6 +48,10 @@ class USB_MIDI_EXPORT UsbMidiInputStream {
};
explicit UsbMidiInputStream(Delegate* delegate);
+
+ UsbMidiInputStream(const UsbMidiInputStream&) = delete;
+ UsbMidiInputStream& operator=(const UsbMidiInputStream&) = delete;
+
~UsbMidiInputStream();
void Add(const UsbMidiJack& jack);
@@ -79,8 +83,6 @@ class USB_MIDI_EXPORT UsbMidiInputStream {
// Not owned
Delegate* delegate_;
-
- DISALLOW_COPY_AND_ASSIGN(UsbMidiInputStream);
};
} // namespace midi
diff --git a/chromium/media/midi/usb_midi_input_stream_unittest.cc b/chromium/media/midi/usb_midi_input_stream_unittest.cc
index 88be8dcf31d..a817ea80744 100644
--- a/chromium/media/midi/usb_midi_input_stream_unittest.cc
+++ b/chromium/media/midi/usb_midi_input_stream_unittest.cc
@@ -26,6 +26,10 @@ namespace {
class TestUsbMidiDevice : public UsbMidiDevice {
public:
TestUsbMidiDevice() = default;
+
+ TestUsbMidiDevice(const TestUsbMidiDevice&) = delete;
+ TestUsbMidiDevice& operator=(const TestUsbMidiDevice&) = delete;
+
~TestUsbMidiDevice() override = default;
std::vector<uint8_t> GetDescriptors() override {
return std::vector<uint8_t>();
@@ -34,14 +38,15 @@ class TestUsbMidiDevice : public UsbMidiDevice {
std::string GetProductName() override { return std::string(); }
std::string GetDeviceVersion() override { return std::string(); }
void Send(int endpoint_number, const std::vector<uint8_t>& data) override {}
-
- private:
- DISALLOW_COPY_AND_ASSIGN(TestUsbMidiDevice);
};
class MockDelegate : public UsbMidiInputStream::Delegate {
public:
MockDelegate() = default;
+
+ MockDelegate(const MockDelegate&) = delete;
+ MockDelegate& operator=(const MockDelegate&) = delete;
+
~MockDelegate() override = default;
void OnReceivedData(size_t jack_index,
const uint8_t* data,
@@ -56,7 +61,6 @@ class MockDelegate : public UsbMidiInputStream::Delegate {
private:
std::string received_data_;
- DISALLOW_COPY_AND_ASSIGN(MockDelegate);
};
class UsbMidiInputStreamTest : public ::testing::Test {
diff --git a/chromium/media/midi/usb_midi_output_stream.cc b/chromium/media/midi/usb_midi_output_stream.cc
index 50784f3aa62..2b4494a8db2 100644
--- a/chromium/media/midi/usb_midi_output_stream.cc
+++ b/chromium/media/midi/usb_midi_output_stream.cc
@@ -15,8 +15,6 @@ UsbMidiOutputStream::UsbMidiOutputStream(const UsbMidiJack& jack)
: jack_(jack), pending_size_(0), is_sending_sysex_(false) {}
void UsbMidiOutputStream::Send(const std::vector<uint8_t>& data) {
- // To prevent link errors caused by DCHECK_*.
- const size_t kPacketContentSize = UsbMidiOutputStream::kPacketContentSize;
DCHECK_LT(jack_.cable_number, 16u);
std::vector<uint8_t> data_to_send;
diff --git a/chromium/media/midi/usb_midi_output_stream_unittest.cc b/chromium/media/midi/usb_midi_output_stream_unittest.cc
index eb39056281b..fc6dc5f27ba 100644
--- a/chromium/media/midi/usb_midi_output_stream_unittest.cc
+++ b/chromium/media/midi/usb_midi_output_stream_unittest.cc
@@ -28,6 +28,10 @@ std::vector<T> ToVector(const T((&array)[N])) {
class MockUsbMidiDevice : public UsbMidiDevice {
public:
MockUsbMidiDevice() = default;
+
+ MockUsbMidiDevice(const MockUsbMidiDevice&) = delete;
+ MockUsbMidiDevice& operator=(const MockUsbMidiDevice&) = delete;
+
~MockUsbMidiDevice() override = default;
std::vector<uint8_t> GetDescriptors() override {
@@ -50,8 +54,6 @@ class MockUsbMidiDevice : public UsbMidiDevice {
private:
std::string log_;
-
- DISALLOW_COPY_AND_ASSIGN(MockUsbMidiDevice);
};
class UsbMidiOutputStreamTest : public ::testing::Test {
diff --git a/chromium/media/mojo/BUILD.gn b/chromium/media/mojo/BUILD.gn
index 7005cfbbb78..5e0fa50d2cd 100644
--- a/chromium/media/mojo/BUILD.gn
+++ b/chromium/media/mojo/BUILD.gn
@@ -16,7 +16,6 @@ buildflag_header("buildflags") {
enable_mojo_video_decoder = false
enable_mojo_media_in_browser_process = false
enable_mojo_media_in_gpu_process = false
- enable_mojo_media_in_utility_process = false
foreach(service, mojo_media_services) {
if (service == "renderer") {
@@ -36,8 +35,6 @@ buildflag_header("buildflags") {
enable_mojo_media_in_browser_process = true
} else if (mojo_media_host == "gpu") {
enable_mojo_media_in_gpu_process = true
- } else if (mojo_media_host == "utility") {
- enable_mojo_media_in_utility_process = true
} else if (mojo_media_host != "") {
assert(false, "Invalid mojo media host: $mojo_media_host")
}
@@ -50,7 +47,6 @@ buildflag_header("buildflags") {
"ENABLE_MOJO_VIDEO_DECODER=$enable_mojo_video_decoder",
"ENABLE_MOJO_MEDIA_IN_BROWSER_PROCESS=$enable_mojo_media_in_browser_process",
"ENABLE_MOJO_MEDIA_IN_GPU_PROCESS=$enable_mojo_media_in_gpu_process",
- "ENABLE_MOJO_MEDIA_IN_UTILITY_PROCESS=$enable_mojo_media_in_utility_process",
]
}
diff --git a/chromium/media/mojo/README.md b/chromium/media/mojo/README.md
index d13dc2c607e..955d9a589da 100644
--- a/chromium/media/mojo/README.md
+++ b/chromium/media/mojo/README.md
@@ -107,13 +107,13 @@ mojo interface implementations. It comes with some nice benefits.
Different platforms or products have different requirements on where the remote
media components should run. For example, a hardware decoder typically should
run in the GPU process. The `ServiceManagerContext` provides the ability to run
-a service_manager::Service in-process (browser), out-of-process (utility) or in
-the GPU process. Therefore, by using a `MediaService`, it’s very easy to support
-hosting remote media components interfaces in most common Chromium process types
-(Browser/Utility/GPU). This can by set using the gn argument `mojo_media_host`,
+a service in-process (browser) or in the GPU process. Therefore, by using a
+`MediaService`, it’s very easy to support hosting remote media components
+interfaces in most common Chromium process types (Browser/GPU). This can by set
+using the gn argument `mojo_media_host`,
e.g.
```
-mojo_media_host = "browser" or “gpu” or “utility”
+mojo_media_host = "browser" or “gpu”
```
MediaService is registered in `ServiceManagerContext` using `kMediaServiceName`.
diff --git a/chromium/media/mojo/clients/BUILD.gn b/chromium/media/mojo/clients/BUILD.gn
index 44b3e5be6e3..6ba0dfc5008 100644
--- a/chromium/media/mojo/clients/BUILD.gn
+++ b/chromium/media/mojo/clients/BUILD.gn
@@ -77,6 +77,7 @@ source_set("clients") {
]
deps = [
+ "//build:chromeos_buildflags",
"//media/gpu",
"//media/mojo/common",
"//media/mojo/common:mojo_shared_buffer_video_frame",
diff --git a/chromium/media/mojo/clients/mojo_android_overlay.h b/chromium/media/mojo/clients/mojo_android_overlay.h
index 0e2e8a8cafb..06409382c8d 100644
--- a/chromium/media/mojo/clients/mojo_android_overlay.h
+++ b/chromium/media/mojo/clients/mojo_android_overlay.h
@@ -24,6 +24,9 @@ class MojoAndroidOverlay : public AndroidOverlay,
AndroidOverlayConfig config,
const base::UnguessableToken& routing_token);
+ MojoAndroidOverlay(const MojoAndroidOverlay&) = delete;
+ MojoAndroidOverlay& operator=(const MojoAndroidOverlay&) = delete;
+
~MojoAndroidOverlay() override;
// AndroidOverlay
@@ -45,8 +48,6 @@ class MojoAndroidOverlay : public AndroidOverlay,
// Have we received OnSurfaceReady yet?
bool received_surface_ = false;
-
- DISALLOW_COPY_AND_ASSIGN(MojoAndroidOverlay);
};
} // namespace media
diff --git a/chromium/media/mojo/clients/mojo_audio_decoder.h b/chromium/media/mojo/clients/mojo_audio_decoder.h
index 04a40615daa..3688175653c 100644
--- a/chromium/media/mojo/clients/mojo_audio_decoder.h
+++ b/chromium/media/mojo/clients/mojo_audio_decoder.h
@@ -31,6 +31,10 @@ class MojoAudioDecoder final : public AudioDecoder,
public:
MojoAudioDecoder(scoped_refptr<base::SequencedTaskRunner> task_runner,
mojo::PendingRemote<mojom::AudioDecoder> remote_decoder);
+
+ MojoAudioDecoder(const MojoAudioDecoder&) = delete;
+ MojoAudioDecoder& operator=(const MojoAudioDecoder&) = delete;
+
~MojoAudioDecoder() final;
// Decoder implementation
@@ -106,8 +110,6 @@ class MojoAudioDecoder final : public AudioDecoder,
// Passed from |remote_decoder_| as a result of its initialization.
bool needs_bitstream_conversion_ = false;
AudioDecoderType decoder_type_ = AudioDecoderType::kUnknown;
-
- DISALLOW_COPY_AND_ASSIGN(MojoAudioDecoder);
};
} // namespace media
diff --git a/chromium/media/mojo/clients/mojo_audio_decoder_unittest.cc b/chromium/media/mojo/clients/mojo_audio_decoder_unittest.cc
index 8f4ad6ca715..50fda74e7b9 100644
--- a/chromium/media/mojo/clients/mojo_audio_decoder_unittest.cc
+++ b/chromium/media/mojo/clients/mojo_audio_decoder_unittest.cc
@@ -70,6 +70,9 @@ class MojoAudioDecoderTest : public ::testing::Test {
std::move(remote_audio_decoder));
}
+ MojoAudioDecoderTest(const MojoAudioDecoderTest&) = delete;
+ MojoAudioDecoderTest& operator=(const MojoAudioDecoderTest&) = delete;
+
~MojoAudioDecoderTest() override {
// Destroy |mojo_audio_decoder_| first so that the service will be
// destructed. Then stop the service thread. Otherwise we'll leak memory.
@@ -140,9 +143,9 @@ class MojoAudioDecoderTest : public ::testing::Test {
EXPECT_CALL(*this, OnInitialized(SameStatusCode(status)))
.WillOnce(InvokeWithoutArgs(this, &MojoAudioDecoderTest::QuitLoop));
- AudioDecoderConfig audio_config(kCodecVorbis, kSampleFormat, kChannelLayout,
- kDefaultSampleRate, EmptyExtraData(),
- EncryptionScheme::kUnencrypted);
+ AudioDecoderConfig audio_config(
+ AudioCodec::kVorbis, kSampleFormat, kChannelLayout, kDefaultSampleRate,
+ EmptyExtraData(), EncryptionScheme::kUnencrypted);
mojo_audio_decoder_->Initialize(
audio_config, nullptr,
@@ -248,9 +251,6 @@ class MojoAudioDecoderTest : public ::testing::Test {
int num_of_decodes_ = 0;
int decode_count_ = 0;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MojoAudioDecoderTest);
};
TEST_F(MojoAudioDecoderTest, Initialize_Success) {
diff --git a/chromium/media/mojo/clients/mojo_cdm_factory.h b/chromium/media/mojo/clients/mojo_cdm_factory.h
index 1ca54fec43f..df512de9af8 100644
--- a/chromium/media/mojo/clients/mojo_cdm_factory.h
+++ b/chromium/media/mojo/clients/mojo_cdm_factory.h
@@ -17,6 +17,10 @@ class InterfaceFactory;
class MojoCdmFactory final : public CdmFactory {
public:
explicit MojoCdmFactory(media::mojom::InterfaceFactory* interface_factory);
+
+ MojoCdmFactory(const MojoCdmFactory&) = delete;
+ MojoCdmFactory& operator=(const MojoCdmFactory&) = delete;
+
~MojoCdmFactory() final;
// CdmFactory implementation.
@@ -30,8 +34,6 @@ class MojoCdmFactory final : public CdmFactory {
private:
media::mojom::InterfaceFactory* interface_factory_;
-
- DISALLOW_COPY_AND_ASSIGN(MojoCdmFactory);
};
} // namespace media
diff --git a/chromium/media/mojo/clients/mojo_cdm_unittest.cc b/chromium/media/mojo/clients/mojo_cdm_unittest.cc
index f38445b3a4b..2717ce7dd2c 100644
--- a/chromium/media/mojo/clients/mojo_cdm_unittest.cc
+++ b/chromium/media/mojo/clients/mojo_cdm_unittest.cc
@@ -70,6 +70,10 @@ class MojoCdmTest : public ::testing::Test {
};
MojoCdmTest() = default;
+
+ MojoCdmTest(const MojoCdmTest&) = delete;
+ MojoCdmTest& operator=(const MojoCdmTest&) = delete;
+
~MojoCdmTest() override = default;
void Initialize(ExpectedResult expected_result) {
@@ -376,9 +380,6 @@ class MojoCdmTest : public ::testing::Test {
#if defined(OS_WIN)
bool requires_media_foundation_renderer_ = false;
#endif
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MojoCdmTest);
};
TEST_F(MojoCdmTest, Create_Success) {
diff --git a/chromium/media/mojo/clients/mojo_decoder_factory.h b/chromium/media/mojo/clients/mojo_decoder_factory.h
index 9bce9003f1d..92e132c7d33 100644
--- a/chromium/media/mojo/clients/mojo_decoder_factory.h
+++ b/chromium/media/mojo/clients/mojo_decoder_factory.h
@@ -19,6 +19,10 @@ class MojoDecoderFactory final : public DecoderFactory {
public:
explicit MojoDecoderFactory(
media::mojom::InterfaceFactory* interface_factory);
+
+ MojoDecoderFactory(const MojoDecoderFactory&) = delete;
+ MojoDecoderFactory& operator=(const MojoDecoderFactory&) = delete;
+
~MojoDecoderFactory() final;
void CreateAudioDecoders(
@@ -38,8 +42,6 @@ class MojoDecoderFactory final : public DecoderFactory {
private:
media::mojom::InterfaceFactory* interface_factory_;
-
- DISALLOW_COPY_AND_ASSIGN(MojoDecoderFactory);
};
} // namespace media
diff --git a/chromium/media/mojo/clients/mojo_decryptor.h b/chromium/media/mojo/clients/mojo_decryptor.h
index 932e9456ee9..885e8cc249b 100644
--- a/chromium/media/mojo/clients/mojo_decryptor.h
+++ b/chromium/media/mojo/clients/mojo_decryptor.h
@@ -31,6 +31,10 @@ class MojoDecryptor final : public Decryptor {
// will be used.
MojoDecryptor(mojo::PendingRemote<mojom::Decryptor> remote_decryptor,
uint32_t writer_capacity = 0);
+
+ MojoDecryptor(const MojoDecryptor&) = delete;
+ MojoDecryptor& operator=(const MojoDecryptor&) = delete;
+
~MojoDecryptor() final;
// Decryptor implementation.
@@ -87,8 +91,6 @@ class MojoDecryptor final : public Decryptor {
std::unique_ptr<MojoDecoderBufferReader> decrypted_buffer_reader_;
base::WeakPtrFactory<MojoDecryptor> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(MojoDecryptor);
};
} // namespace media
diff --git a/chromium/media/mojo/clients/mojo_decryptor_unittest.cc b/chromium/media/mojo/clients/mojo_decryptor_unittest.cc
index bd1d63a3408..32b4a151a55 100644
--- a/chromium/media/mojo/clients/mojo_decryptor_unittest.cc
+++ b/chromium/media/mojo/clients/mojo_decryptor_unittest.cc
@@ -37,6 +37,10 @@ namespace media {
class MojoDecryptorTest : public ::testing::Test {
public:
MojoDecryptorTest() = default;
+
+ MojoDecryptorTest(const MojoDecryptorTest&) = delete;
+ MojoDecryptorTest& operator=(const MojoDecryptorTest&) = delete;
+
~MojoDecryptorTest() override = default;
void SetWriterCapacity(uint32_t capacity) { writer_capacity_ = capacity; }
@@ -72,8 +76,7 @@ class MojoDecryptorTest : public ::testing::Test {
// We don't care about the encrypted data, just create a simple VideoFrame.
scoped_refptr<VideoFrame> frame(
MojoSharedBufferVideoFrame::CreateDefaultForTesting(
- PIXEL_FORMAT_I420, gfx::Size(100, 100),
- base::TimeDelta::FromSeconds(100)));
+ PIXEL_FORMAT_I420, gfx::Size(100, 100), base::Seconds(100)));
frame->AddDestructionObserver(base::BindOnce(
&MojoDecryptorTest::OnFrameDestroyed, base::Unretained(this)));
@@ -87,7 +90,7 @@ class MojoDecryptorTest : public ::testing::Test {
Decryptor::AudioDecodeCB audio_decode_cb) {
const ChannelLayout kChannelLayout = CHANNEL_LAYOUT_4_0;
const int kSampleRate = 48000;
- const base::TimeDelta start_time = base::TimeDelta::FromSecondsD(1000.0);
+ const base::TimeDelta start_time = base::Seconds(1000.0);
auto audio_buffer = MakeAudioBuffer<float>(
kSampleFormatPlanarF32, kChannelLayout,
ChannelLayoutToChannelCount(kChannelLayout), kSampleRate, 0.0f, 1.0f,
@@ -126,9 +129,6 @@ class MojoDecryptorTest : public ::testing::Test {
// The actual Decryptor object used by |mojo_decryptor_service_|.
std::unique_ptr<StrictMock<MockDecryptor>> decryptor_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MojoDecryptorTest);
};
// DecryptAndDecodeAudio() and ResetDecoder(kAudio) immediately.
diff --git a/chromium/media/mojo/clients/mojo_demuxer_stream_impl.h b/chromium/media/mojo/clients/mojo_demuxer_stream_impl.h
index f3d258b11a0..8bbde8f5e5f 100644
--- a/chromium/media/mojo/clients/mojo_demuxer_stream_impl.h
+++ b/chromium/media/mojo/clients/mojo_demuxer_stream_impl.h
@@ -28,6 +28,10 @@ class MojoDemuxerStreamImpl : public mojom::DemuxerStream {
// Note: |this| does not take ownership of |stream|.
MojoDemuxerStreamImpl(media::DemuxerStream* stream,
mojo::PendingReceiver<mojom::DemuxerStream> receiver);
+
+ MojoDemuxerStreamImpl(const MojoDemuxerStreamImpl&) = delete;
+ MojoDemuxerStreamImpl& operator=(const MojoDemuxerStreamImpl&) = delete;
+
~MojoDemuxerStreamImpl() override;
// mojom::DemuxerStream implementation.
@@ -59,7 +63,6 @@ class MojoDemuxerStreamImpl : public mojom::DemuxerStream {
std::unique_ptr<MojoDecoderBufferWriter> mojo_decoder_buffer_writer_;
base::WeakPtrFactory<MojoDemuxerStreamImpl> weak_factory_{this};
- DISALLOW_COPY_AND_ASSIGN(MojoDemuxerStreamImpl);
};
} // namespace media
diff --git a/chromium/media/mojo/clients/mojo_media_log_service.cc b/chromium/media/mojo/clients/mojo_media_log_service.cc
index 0e5ba5ba327..ab91a410402 100644
--- a/chromium/media/mojo/clients/mojo_media_log_service.cc
+++ b/chromium/media/mojo/clients/mojo_media_log_service.cc
@@ -4,15 +4,14 @@
#include "media/mojo/clients/mojo_media_log_service.h"
-#include <memory>
-
#include "base/logging.h"
#include "media/base/media_log_record.h"
namespace media {
-MojoMediaLogService::MojoMediaLogService(media::MediaLog* media_log)
- : media_log_(media_log) {
+MojoMediaLogService::MojoMediaLogService(
+ std::unique_ptr<media::MediaLog> media_log)
+ : media_log_(std::move(media_log)) {
DVLOG(1) << __func__;
DCHECK(media_log_);
}
@@ -21,7 +20,7 @@ MojoMediaLogService::~MojoMediaLogService() {
DVLOG(1) << __func__;
}
-void MojoMediaLogService::AddLogRecord(const media::MediaLogRecord& event) {
+void MojoMediaLogService::AddLogRecord(const MediaLogRecord& event) {
DVLOG(1) << __func__;
// Make a copy so that we can transfer ownership to |media_log_|.
diff --git a/chromium/media/mojo/clients/mojo_media_log_service.h b/chromium/media/mojo/clients/mojo_media_log_service.h
index c6937073271..5a39ab67527 100644
--- a/chromium/media/mojo/clients/mojo_media_log_service.h
+++ b/chromium/media/mojo/clients/mojo_media_log_service.h
@@ -5,9 +5,8 @@
#ifndef MEDIA_MOJO_CLIENTS_MOJO_MEDIA_LOG_SERVICE_H_
#define MEDIA_MOJO_CLIENTS_MOJO_MEDIA_LOG_SERVICE_H_
-#include <stdint.h>
+#include <memory>
-#include "base/macros.h"
#include "media/base/media_log.h"
#include "media/mojo/mojom/media_log.mojom.h"
@@ -16,16 +15,17 @@ namespace media {
// Implementation of a mojom::MediaLog service which wraps a media::MediaLog.
class MojoMediaLogService final : public mojom::MediaLog {
public:
- explicit MojoMediaLogService(media::MediaLog* media_log);
+ explicit MojoMediaLogService(std::unique_ptr<media::MediaLog> media_log);
+ MojoMediaLogService(const MojoMediaLogService&) = delete;
+ MojoMediaLogService& operator=(const MojoMediaLogService&) = delete;
~MojoMediaLogService() final;
// mojom::MediaLog implementation
- void AddLogRecord(const media::MediaLogRecord& event) final;
+ void AddLogRecord(const MediaLogRecord& event) final;
private:
- media::MediaLog* media_log_;
-
- DISALLOW_COPY_AND_ASSIGN(MojoMediaLogService);
+ // `media::` is needed to be distinguished from `mojom::MediaLog`.
+ std::unique_ptr<media::MediaLog> media_log_;
};
} // namespace media
diff --git a/chromium/media/mojo/clients/mojo_renderer.h b/chromium/media/mojo/clients/mojo_renderer.h
index 096f6e24e66..f308b10476f 100644
--- a/chromium/media/mojo/clients/mojo_renderer.h
+++ b/chromium/media/mojo/clients/mojo_renderer.h
@@ -49,6 +49,10 @@ class MojoRenderer : public Renderer, public mojom::RendererClient {
std::unique_ptr<VideoOverlayFactory> video_overlay_factory,
VideoRendererSink* video_renderer_sink,
mojo::PendingRemote<mojom::Renderer> remote_renderer);
+
+ MojoRenderer(const MojoRenderer&) = delete;
+ MojoRenderer& operator=(const MojoRenderer&) = delete;
+
~MojoRenderer() override;
// Renderer implementation.
@@ -64,8 +68,7 @@ class MojoRenderer : public Renderer, public mojom::RendererClient {
base::TimeDelta GetMediaTime() override;
private:
- // mojom::RendererClient implementation, dispatched on the
- // |task_runner_|.
+ // mojom::RendererClient implementation, dispatched on the |task_runner_|.
void OnTimeUpdate(base::TimeDelta time,
base::TimeDelta max_time,
base::TimeTicks capture_time) override;
@@ -152,15 +155,13 @@ class MojoRenderer : public Renderer, public mojom::RendererClient {
base::OnceClosure flush_cb_;
CdmAttachedCB cdm_attached_cb_;
- bool volume_ = 1.0f;
+ float volume_ = 1.0f;
// Lock used to serialize access for |time_interpolator_|.
mutable base::Lock lock_;
media::TimeDeltaInterpolator media_time_interpolator_;
absl::optional<PipelineStatistics> pending_stats_;
-
- DISALLOW_COPY_AND_ASSIGN(MojoRenderer);
};
} // namespace media
diff --git a/chromium/media/mojo/clients/mojo_renderer_factory.cc b/chromium/media/mojo/clients/mojo_renderer_factory.cc
index 3b317f7978d..c7cf8f3f41b 100644
--- a/chromium/media/mojo/clients/mojo_renderer_factory.cc
+++ b/chromium/media/mojo/clients/mojo_renderer_factory.cc
@@ -47,6 +47,7 @@ std::unique_ptr<Renderer> MojoRendererFactory::CreateRenderer(
#if defined(OS_WIN)
std::unique_ptr<MojoRenderer>
MojoRendererFactory::CreateMediaFoundationRenderer(
+ mojo::PendingRemote<mojom::MediaLog> media_log_remote,
mojo::PendingReceiver<mojom::MediaFoundationRendererExtension>
renderer_extension_receiver,
const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner,
@@ -54,6 +55,7 @@ MojoRendererFactory::CreateMediaFoundationRenderer(
DCHECK(interface_factory_);
mojo::PendingRemote<mojom::Renderer> renderer_remote;
interface_factory_->CreateMediaFoundationRenderer(
+ std::move(media_log_remote),
renderer_remote.InitWithNewPipeAndPassReceiver(),
std::move(renderer_extension_receiver));
diff --git a/chromium/media/mojo/clients/mojo_renderer_factory.h b/chromium/media/mojo/clients/mojo_renderer_factory.h
index bab4060fa2b..ad6e32478f6 100644
--- a/chromium/media/mojo/clients/mojo_renderer_factory.h
+++ b/chromium/media/mojo/clients/mojo_renderer_factory.h
@@ -34,6 +34,10 @@ class MojoRendererFactory final : public RendererFactory {
public:
explicit MojoRendererFactory(
media::mojom::InterfaceFactory* interface_factory);
+
+ MojoRendererFactory(const MojoRendererFactory&) = delete;
+ MojoRendererFactory& operator=(const MojoRendererFactory&) = delete;
+
~MojoRendererFactory() final;
std::unique_ptr<Renderer> CreateRenderer(
@@ -46,6 +50,7 @@ class MojoRendererFactory final : public RendererFactory {
#if defined(OS_WIN)
std::unique_ptr<MojoRenderer> CreateMediaFoundationRenderer(
+ mojo::PendingRemote<mojom::MediaLog> media_log_remote,
mojo::PendingReceiver<mojom::MediaFoundationRendererExtension>
renderer_extension_receiver,
const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner,
@@ -79,8 +84,6 @@ class MojoRendererFactory final : public RendererFactory {
// InterfaceFactory or InterfaceProvider used to create or connect to remote
// renderer.
media::mojom::InterfaceFactory* interface_factory_ = nullptr;
-
- DISALLOW_COPY_AND_ASSIGN(MojoRendererFactory);
};
} // namespace media
diff --git a/chromium/media/mojo/clients/mojo_renderer_unittest.cc b/chromium/media/mojo/clients/mojo_renderer_unittest.cc
index 831017c1355..e80ef3199cf 100644
--- a/chromium/media/mojo/clients/mojo_renderer_unittest.cc
+++ b/chromium/media/mojo/clients/mojo_renderer_unittest.cc
@@ -87,6 +87,9 @@ class MojoRendererTest : public ::testing::Test {
.WillRepeatedly(Return(base::TimeDelta()));
}
+ MojoRendererTest(const MojoRendererTest&) = delete;
+ MojoRendererTest& operator=(const MojoRendererTest&) = delete;
+
~MojoRendererTest() override = default;
void Destroy() {
@@ -192,9 +195,7 @@ class MojoRendererTest : public ::testing::Test {
base::RunLoop().RunUntilIdle();
}
- void Play() {
- StartPlayingFrom(base::TimeDelta::FromMilliseconds(kStartPlayingTimeInMs));
- }
+ void Play() { StartPlayingFrom(base::Milliseconds(kStartPlayingTimeInMs)); }
// Fixture members.
base::TestMessageLoop message_loop_;
@@ -223,9 +224,6 @@ class MojoRendererTest : public ::testing::Test {
RendererClient* remote_renderer_client_;
mojo::SelfOwnedReceiverRef<mojom::Renderer> renderer_receiver_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MojoRendererTest);
};
TEST_F(MojoRendererTest, Initialize_Success) {
@@ -375,9 +373,8 @@ TEST_F(MojoRendererTest, GetMediaTime) {
Initialize();
EXPECT_EQ(base::TimeDelta(), mojo_renderer_->GetMediaTime());
- const base::TimeDelta kSleepTime = base::TimeDelta::FromMilliseconds(500);
- const base::TimeDelta kStartTime =
- base::TimeDelta::FromMilliseconds(kStartPlayingTimeInMs);
+ const base::TimeDelta kSleepTime = base::Milliseconds(500);
+ const base::TimeDelta kStartTime = base::Milliseconds(kStartPlayingTimeInMs);
// Media time should not advance since playback rate is 0.
EXPECT_CALL(*mock_renderer_, SetPlaybackRate(0));
diff --git a/chromium/media/mojo/clients/mojo_renderer_wrapper.h b/chromium/media/mojo/clients/mojo_renderer_wrapper.h
index 23fbb053d08..f8ee0dcb0d9 100644
--- a/chromium/media/mojo/clients/mojo_renderer_wrapper.h
+++ b/chromium/media/mojo/clients/mojo_renderer_wrapper.h
@@ -20,6 +20,10 @@ namespace media {
class MojoRendererWrapper : public Renderer {
public:
explicit MojoRendererWrapper(std::unique_ptr<MojoRenderer> mojo_renderer);
+
+ MojoRendererWrapper(const MojoRendererWrapper&) = delete;
+ MojoRendererWrapper& operator=(const MojoRendererWrapper&) = delete;
+
~MojoRendererWrapper() override;
// Renderer implementation.
@@ -36,9 +40,6 @@ class MojoRendererWrapper : public Renderer {
private:
std::unique_ptr<MojoRenderer> mojo_renderer_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MojoRendererWrapper);
};
} // namespace media
diff --git a/chromium/media/mojo/clients/mojo_video_decoder.cc b/chromium/media/mojo/clients/mojo_video_decoder.cc
index f9eb8bc87a8..7fecee41e6c 100644
--- a/chromium/media/mojo/clients/mojo_video_decoder.cc
+++ b/chromium/media/mojo/clients/mojo_video_decoder.cc
@@ -18,6 +18,7 @@
#include "base/sequenced_task_runner.h"
#include "base/unguessable_token.h"
#include "build/build_config.h"
+#include "build/chromeos_buildflags.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/decoder_buffer.h"
#include "media/base/demuxer_stream.h"
@@ -25,6 +26,7 @@
#include "media/base/overlay_info.h"
#include "media/base/video_frame.h"
#include "media/media_buildflags.h"
+#include "media/mojo/clients/mojo_media_log_service.h"
#include "media/mojo/common/media_type_converters.h"
#include "media/mojo/common/mojo_decoder_buffer_converter.h"
#include "media/mojo/mojom/media_types.mojom.h"
@@ -32,6 +34,7 @@
#include "media/video/video_decode_accelerator.h"
#include "mojo/public/cpp/bindings/pending_associated_remote.h"
#include "mojo/public/cpp/bindings/pending_remote.h"
+#include "mojo/public/cpp/bindings/self_owned_receiver.h"
#include "mojo/public/cpp/bindings/shared_remote.h"
namespace media {
@@ -107,11 +110,10 @@ MojoVideoDecoder::MojoVideoDecoder(
: task_runner_(task_runner),
pending_remote_decoder_(std::move(pending_remote_decoder)),
gpu_factories_(gpu_factories),
+ media_log_(media_log),
timestamps_(128),
writer_capacity_(
GetDefaultDecoderBufferConverterCapacity(DemuxerStream::VIDEO)),
- media_log_service_(media_log),
- media_log_receiver_(&media_log_service_),
request_overlay_info_cb_(std::move(request_overlay_info_cb)),
target_color_space_(target_color_space) {
DVLOG(1) << __func__;
@@ -190,21 +192,31 @@ void MojoVideoDecoder::Initialize(const VideoDecoderConfig& config,
return;
}
+ initialized_ = false;
+ init_cb_ = std::move(init_cb);
+ output_cb_ = output_cb;
+ waiting_cb_ = waiting_cb;
+
if (!remote_decoder_bound_) {
- BindRemoteDecoder();
- get_mojo_instance_counter()++;
+ InitAndBindRemoteDecoder(
+ base::BindOnce(&MojoVideoDecoder::InitializeRemoteDecoder, weak_this_,
+ config, low_delay, std::move(cdm_id)));
+ return;
}
+ InitializeRemoteDecoder(config, low_delay, std::move(cdm_id));
+}
+
+void MojoVideoDecoder::InitializeRemoteDecoder(
+ const VideoDecoderConfig& config,
+ bool low_delay,
+ absl::optional<base::UnguessableToken> cdm_id) {
if (has_connection_error_) {
- FailInit(std::move(init_cb), StatusCode::kMojoDecoderNoConnection);
+ DCHECK(init_cb_);
+ FailInit(std::move(init_cb_), StatusCode::kMojoDecoderNoConnection);
return;
}
- initialized_ = false;
- init_cb_ = std::move(init_cb);
- output_cb_ = output_cb;
- waiting_cb_ = waiting_cb;
-
remote_decoder_->Initialize(
config, low_delay, cdm_id,
base::BindOnce(&MojoVideoDecoder::OnInitializeDone,
@@ -365,7 +377,7 @@ bool MojoVideoDecoder::IsOptimizedForRTC() const {
return true;
}
-void MojoVideoDecoder::BindRemoteDecoder() {
+void MojoVideoDecoder::InitAndBindRemoteDecoder(base::OnceClosure complete_cb) {
DVLOG(3) << __func__;
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(!remote_decoder_bound_);
@@ -376,6 +388,38 @@ void MojoVideoDecoder::BindRemoteDecoder() {
remote_decoder_.set_disconnect_handler(
base::BindOnce(&MojoVideoDecoder::Stop, base::Unretained(this)));
+ // Generate |command_buffer_id|.
+ media::mojom::CommandBufferIdPtr command_buffer_id;
+
+ if (gpu_factories_) {
+ DCHECK(complete_cb);
+ gpu_factories_->GetChannelToken(
+ base::BindOnce(&MojoVideoDecoder::OnChannelTokenReady, weak_this_,
+ std::move(command_buffer_id), std::move(complete_cb)));
+ return;
+ }
+
+ DCHECK(complete_cb);
+ InitAndConstructRemoteDecoder(std::move(command_buffer_id),
+ std::move(complete_cb));
+}
+
+void MojoVideoDecoder::OnChannelTokenReady(
+ media::mojom::CommandBufferIdPtr command_buffer_id,
+ base::OnceClosure complete_cb,
+ const base::UnguessableToken& channel_token) {
+ if (channel_token) {
+ command_buffer_id = media::mojom::CommandBufferId::New();
+ command_buffer_id->channel_token = std::move(channel_token);
+ command_buffer_id->route_id = gpu_factories_->GetCommandBufferRouteId();
+ }
+ InitAndConstructRemoteDecoder(std::move(command_buffer_id),
+ std::move(complete_cb));
+}
+
+void MojoVideoDecoder::InitAndConstructRemoteDecoder(
+ media::mojom::CommandBufferIdPtr command_buffer_id,
+ base::OnceClosure complete_cb) {
// Create |video_frame_handle_releaser| interface receiver, and bind
// |mojo_video_frame_handle_releaser_| to it.
mojo::PendingRemote<mojom::VideoFrameHandleReleaser>
@@ -392,22 +436,22 @@ void MojoVideoDecoder::BindRemoteDecoder() {
mojo_decoder_buffer_writer_ = MojoDecoderBufferWriter::Create(
writer_capacity_, &remote_consumer_handle);
- // Generate |command_buffer_id|.
- media::mojom::CommandBufferIdPtr command_buffer_id;
- if (gpu_factories_) {
- base::UnguessableToken channel_token = gpu_factories_->GetChannelToken();
- if (channel_token) {
- command_buffer_id = media::mojom::CommandBufferId::New();
- command_buffer_id->channel_token = std::move(channel_token);
- command_buffer_id->route_id = gpu_factories_->GetCommandBufferRouteId();
- }
- }
+ // Use `mojo::MakeSelfOwnedReceiver` for MediaLog so logs may go through even
+ // after `MojoVideoDecoder` is destructed.
+ mojo::PendingReceiver<mojom::MediaLog> media_log_pending_receiver;
+ auto media_log_pending_remote =
+ media_log_pending_receiver.InitWithNewPipeAndPassRemote();
+ mojo::MakeSelfOwnedReceiver(
+ std::make_unique<MojoMediaLogService>(media_log_->Clone()),
+ std::move(media_log_pending_receiver));
remote_decoder_->Construct(client_receiver_.BindNewEndpointAndPassRemote(),
- media_log_receiver_.BindNewEndpointAndPassRemote(),
+ std::move(media_log_pending_remote),
std::move(video_frame_handle_releaser_receiver),
std::move(remote_consumer_handle),
std::move(command_buffer_id), target_color_space_);
+ get_mojo_instance_counter()++;
+ std::move(complete_cb).Run();
}
void MojoVideoDecoder::OnWaiting(WaitingReason reason) {
diff --git a/chromium/media/mojo/clients/mojo_video_decoder.h b/chromium/media/mojo/clients/mojo_video_decoder.h
index 601d6b30ba9..413d9067dbc 100644
--- a/chromium/media/mojo/clients/mojo_video_decoder.h
+++ b/chromium/media/mojo/clients/mojo_video_decoder.h
@@ -13,7 +13,6 @@
#include "media/base/status.h"
#include "media/base/video_decoder.h"
#include "media/base/video_frame.h"
-#include "media/mojo/clients/mojo_media_log_service.h"
#include "media/mojo/mojom/video_decoder.mojom.h"
#include "media/video/video_decode_accelerator.h"
#include "mojo/public/cpp/bindings/associated_receiver.h"
@@ -55,6 +54,10 @@ class MojoVideoDecoder final : public VideoDecoder,
mojo::PendingRemote<mojom::VideoDecoder> pending_remote_decoder,
RequestOverlayInfoCB request_overlay_info_cb,
const gfx::ColorSpace& target_color_space);
+
+ MojoVideoDecoder(const MojoVideoDecoder&) = delete;
+ MojoVideoDecoder& operator=(const MojoVideoDecoder&) = delete;
+
~MojoVideoDecoder() final;
// Decoder implementation
@@ -97,7 +100,16 @@ class MojoVideoDecoder final : public VideoDecoder,
void OnDecodeDone(uint64_t decode_id, const Status& status);
void OnResetDone();
- void BindRemoteDecoder();
+ void InitAndBindRemoteDecoder(base::OnceClosure complete_cb);
+ void OnChannelTokenReady(media::mojom::CommandBufferIdPtr command_buffer_id,
+ base::OnceClosure complete_cb,
+ const base::UnguessableToken& channel_token);
+ void InitAndConstructRemoteDecoder(
+ media::mojom::CommandBufferIdPtr command_buffer_id,
+ base::OnceClosure complete_cb);
+ void InitializeRemoteDecoder(const VideoDecoderConfig& config,
+ bool low_delay,
+ absl::optional<base::UnguessableToken> cdm_id);
// Forwards |overlay_info| to the remote decoder.
void OnOverlayInfoChanged(const OverlayInfo& overlay_info);
@@ -120,6 +132,10 @@ class MojoVideoDecoder final : public VideoDecoder,
GpuVideoAcceleratorFactories* gpu_factories_ = nullptr;
+ // Raw pointer is safe since both `this` and the `media_log` are owned by
+ // WebMediaPlayerImpl with the correct declaration order.
+ MediaLog* media_log_ = nullptr;
+
InitCB init_cb_;
OutputCB output_cb_;
WaitingCB waiting_cb_;
@@ -139,8 +155,6 @@ class MojoVideoDecoder final : public VideoDecoder,
bool remote_decoder_bound_ = false;
bool has_connection_error_ = false;
mojo::AssociatedReceiver<mojom::VideoDecoderClient> client_receiver_{this};
- MojoMediaLogService media_log_service_;
- mojo::AssociatedReceiver<mojom::MediaLog> media_log_receiver_;
RequestOverlayInfoCB request_overlay_info_cb_;
bool overlay_info_requested_ = false;
gfx::ColorSpace target_color_space_;
@@ -158,8 +172,6 @@ class MojoVideoDecoder final : public VideoDecoder,
base::WeakPtr<MojoVideoDecoder> weak_this_;
base::WeakPtrFactory<MojoVideoDecoder> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(MojoVideoDecoder);
};
} // namespace media
diff --git a/chromium/media/mojo/clients/mojo_video_encode_accelerator.cc b/chromium/media/mojo/clients/mojo_video_encode_accelerator.cc
index 68ff46ddf1b..c70c7da083e 100644
--- a/chromium/media/mojo/clients/mojo_video_encode_accelerator.cc
+++ b/chromium/media/mojo/clients/mojo_video_encode_accelerator.cc
@@ -7,7 +7,6 @@
#include <utility>
#include "base/bind.h"
-#include "base/callback_helpers.h"
#include "base/logging.h"
#include "build/build_config.h"
#include "gpu/ipc/client/gpu_channel_host.h"
@@ -32,6 +31,11 @@ class VideoEncodeAcceleratorClient
VideoEncodeAcceleratorClient(
VideoEncodeAccelerator::Client* client,
mojo::PendingReceiver<mojom::VideoEncodeAcceleratorClient> receiver);
+
+ VideoEncodeAcceleratorClient(const VideoEncodeAcceleratorClient&) = delete;
+ VideoEncodeAcceleratorClient& operator=(const VideoEncodeAcceleratorClient&) =
+ delete;
+
~VideoEncodeAcceleratorClient() override = default;
// mojom::VideoEncodeAcceleratorClient impl.
@@ -47,8 +51,6 @@ class VideoEncodeAcceleratorClient
private:
VideoEncodeAccelerator::Client* client_;
mojo::Receiver<mojom::VideoEncodeAcceleratorClient> receiver_;
-
- DISALLOW_COPY_AND_ASSIGN(VideoEncodeAcceleratorClient);
};
VideoEncodeAcceleratorClient::VideoEncodeAcceleratorClient(
@@ -136,10 +138,8 @@ void MojoVideoEncodeAccelerator::Encode(scoped_refptr<VideoFrame> frame,
// GPU memory path: Pass-through.
if (frame->storage_type() == VideoFrame::STORAGE_GPU_MEMORY_BUFFER) {
- vea_->Encode(
- frame, force_keyframe,
- base::BindOnce(base::DoNothing::Once<scoped_refptr<VideoFrame>>(),
- frame));
+ vea_->Encode(frame, force_keyframe,
+ base::BindOnce([](scoped_refptr<VideoFrame>) {}, frame));
return;
}
@@ -165,8 +165,7 @@ void MojoVideoEncodeAccelerator::Encode(scoped_refptr<VideoFrame> frame,
}
vea_->Encode(
std::move(mojo_frame), force_keyframe,
- base::BindOnce(base::DoNothing::Once<scoped_refptr<VideoFrame>>(),
- std::move(frame)));
+ base::BindOnce([](scoped_refptr<VideoFrame>) {}, std::move(frame)));
}
void MojoVideoEncodeAccelerator::UseOutputBitstreamBuffer(
diff --git a/chromium/media/mojo/clients/win/media_foundation_renderer_client.cc b/chromium/media/mojo/clients/win/media_foundation_renderer_client.cc
index 7110e955fd1..3bf9746ebfa 100644
--- a/chromium/media/mojo/clients/win/media_foundation_renderer_client.cc
+++ b/chromium/media/mojo/clients/win/media_foundation_renderer_client.cc
@@ -9,6 +9,7 @@
#include "base/callback_helpers.h"
#include "base/metrics/histogram_functions.h"
#include "base/metrics/histogram_macros.h"
+#include "media/base/media_log.h"
#include "media/base/win/mf_helpers.h"
#include "mojo/public/cpp/bindings/callback_helpers.h"
@@ -16,11 +17,13 @@ namespace media {
MediaFoundationRendererClient::MediaFoundationRendererClient(
scoped_refptr<base::SingleThreadTaskRunner> media_task_runner,
+ std::unique_ptr<MediaLog> media_log,
std::unique_ptr<MojoRenderer> mojo_renderer,
mojo::PendingRemote<RendererExtension> pending_renderer_extension,
std::unique_ptr<DCOMPTextureWrapper> dcomp_texture_wrapper,
VideoRendererSink* sink)
: media_task_runner_(std::move(media_task_runner)),
+ media_log_(std::move(media_log)),
mojo_renderer_(std::move(mojo_renderer)),
pending_renderer_extension_(std::move(pending_renderer_extension)),
dcomp_texture_wrapper_(std::move(dcomp_texture_wrapper)),
@@ -32,8 +35,6 @@ MediaFoundationRendererClient::~MediaFoundationRendererClient() {
DVLOG_FUNC(1);
}
-// TODO(xhwang): Reorder method definitions to match the header file.
-
// Renderer implementation.
void MediaFoundationRendererClient::Initialize(MediaResource* media_resource,
@@ -72,125 +73,6 @@ void MediaFoundationRendererClient::Initialize(MediaResource* media_resource,
weak_factory_.GetWeakPtr()));
}
-void MediaFoundationRendererClient::OnConnectionError() {
- DVLOG_FUNC(1);
- DCHECK(media_task_runner_->BelongsToCurrentThread());
-
- OnError(PIPELINE_ERROR_DECODE);
-}
-
-void MediaFoundationRendererClient::OnRemoteRendererInitialized(
- PipelineStatus status) {
- DVLOG_FUNC(1) << "status=" << status;
- DCHECK(media_task_runner_->BelongsToCurrentThread());
- DCHECK(!init_cb_.is_null());
-
- if (status != PipelineStatus::PIPELINE_OK) {
- std::move(init_cb_).Run(status);
- return;
- }
-
- if (has_video_) {
- using Self = MediaFoundationRendererClient;
- auto weak_ptr = weak_factory_.GetWeakPtr();
- dcomp_texture_wrapper_->Initialize(
- gfx::Size(1, 1),
- base::BindOnce(&Self::OnDCOMPSurfaceHandleCreated, weak_ptr),
- base::BindRepeating(&Self::OnCompositionParamsReceived, weak_ptr),
- base::BindOnce(&Self::OnDCOMPTextureInitialized, weak_ptr));
- // `init_cb_` will be handled in `OnDCOMPTextureInitialized()`.
- return;
- }
-
- std::move(init_cb_).Run(status);
-}
-
-// TODO(xhwang): Rename this method to be consistent across the stack.
-void MediaFoundationRendererClient::OnDCOMPSurfaceHandleCreated(bool success) {
- DVLOG_FUNC(1);
- DCHECK(media_task_runner_->BelongsToCurrentThread());
- DCHECK(has_video_);
-
- dcomp_texture_wrapper_->CreateVideoFrame(
- base::BindOnce(&MediaFoundationRendererClient::OnVideoFrameCreated,
- weak_factory_.GetWeakPtr()));
-}
-
-void MediaFoundationRendererClient::OnDCOMPSurfaceReceived(
- const absl::optional<base::UnguessableToken>& token) {
- DVLOG_FUNC(1);
- DCHECK(has_video_);
- DCHECK(media_task_runner_->BelongsToCurrentThread());
-
- if (!token) {
- DLOG(ERROR) << "Failed to initialize DCOMP mode or failed to get or "
- "register DCOMP surface handle on remote renderer";
- OnError(PIPELINE_ERROR_COULD_NOT_RENDER);
- return;
- }
-
- dcomp_texture_wrapper_->SetDCOMPSurface(token.value());
-}
-
-void MediaFoundationRendererClient::OnDCOMPTextureInitialized(bool success) {
- DVLOG_FUNC(1) << "success=" << success;
- DCHECK(media_task_runner_->BelongsToCurrentThread());
- DCHECK(!init_cb_.is_null());
- DCHECK(has_video_);
-
- if (!success) {
- std::move(init_cb_).Run(PIPELINE_ERROR_INITIALIZATION_FAILED);
- return;
- }
-
- // Initialize DCOMP texture size to {1, 1} to signify to SwapChainPresenter
- // that the video output size is not yet known. {1, 1} is chosen as opposed to
- // {0, 0} because VideoFrameSubmitter will not submit 0x0 video frames.
- if (natural_size_.IsEmpty())
- dcomp_texture_wrapper_->UpdateTextureSize(gfx::Size(1, 1));
-
- std::move(init_cb_).Run(PIPELINE_OK);
-}
-
-void MediaFoundationRendererClient::OnVideoFrameCreated(
- scoped_refptr<VideoFrame> video_frame) {
- DVLOG_FUNC(1);
- DCHECK(media_task_runner_->BelongsToCurrentThread());
- DCHECK(has_video_);
-
- video_frame->metadata().protected_video = true;
- video_frame->metadata().allow_overlay = true;
-
- dcomp_video_frame_ = video_frame;
- sink_->PaintSingleFrame(dcomp_video_frame_, true);
-}
-void MediaFoundationRendererClient::OnCompositionParamsReceived(
- gfx::Rect output_rect) {
- DVLOG_FUNC(1) << "output_rect=" << output_rect.ToString();
- DCHECK(media_task_runner_->BelongsToCurrentThread());
- DCHECK(has_video_);
-
- renderer_extension_->SetOutputParams(output_rect);
-}
-
-void MediaFoundationRendererClient::InitializeDCOMPRenderingIfNeeded() {
- DVLOG_FUNC(1);
- DCHECK(has_video_);
-
- if (dcomp_rendering_initialized_)
- return;
-
- dcomp_rendering_initialized_ = true;
-
- // Set DirectComposition mode and get DirectComposition surface from
- // MediaFoundationRenderer.
- renderer_extension_->GetDCOMPSurface(
- mojo::WrapCallbackWithDefaultInvokeIfNotRun(
- base::BindOnce(&MediaFoundationRendererClient::OnDCOMPSurfaceReceived,
- weak_factory_.GetWeakPtr()),
- absl::nullopt));
-}
-
void MediaFoundationRendererClient::SetCdm(CdmContext* cdm_context,
CdmAttachedCB cdm_attached_cb) {
DVLOG_FUNC(1) << "cdm_context=" << cdm_context;
@@ -213,12 +95,7 @@ void MediaFoundationRendererClient::SetCdm(CdmContext* cdm_context,
void MediaFoundationRendererClient::SetLatencyHint(
absl::optional<base::TimeDelta> /*latency_hint*/) {
- // We do not use the latency hint today
-}
-
-void MediaFoundationRendererClient::OnCdmAttached(bool success) {
- DCHECK(cdm_attached_cb_);
- std::move(cdm_attached_cb_).Run(success);
+ NOTIMPLEMENTED() << "Latency hint not supported in MediaFoundationRenderer";
}
void MediaFoundationRendererClient::Flush(base::OnceClosure flush_cb) {
@@ -292,15 +169,10 @@ void MediaFoundationRendererClient::OnVideoNaturalSizeChange(
DCHECK(has_video_);
natural_size_ = size;
-
- // Ensure we don't update with an empty size as |dcomp_text_wrapper_| was
- // initialized with size of 1x1.
- auto texture_size = natural_size_.IsEmpty() ? gfx::Size(1, 1) : natural_size_;
- dcomp_texture_wrapper_->UpdateTextureSize(texture_size);
- InitializeDCOMPRenderingIfNeeded();
-
- if (dcomp_video_frame_)
- sink_->PaintSingleFrame(dcomp_video_frame_, true);
+ dcomp_texture_wrapper_->CreateVideoFrame(
+ natural_size_,
+ base::BindOnce(&MediaFoundationRendererClient::OnVideoFrameCreated,
+ weak_factory_.GetWeakPtr()));
client_->OnVideoNaturalSizeChange(natural_size_);
}
@@ -318,4 +190,160 @@ void MediaFoundationRendererClient::OnVideoFrameRateChange(
client_->OnVideoFrameRateChange(fps);
}
+// private
+
+void MediaFoundationRendererClient::OnRemoteRendererInitialized(
+ PipelineStatus status) {
+ DVLOG_FUNC(1) << "status=" << status;
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+ DCHECK(!init_cb_.is_null());
+
+ if (status != PipelineStatus::PIPELINE_OK) {
+ std::move(init_cb_).Run(status);
+ return;
+ }
+
+ if (!has_video_) {
+ std::move(init_cb_).Run(PipelineStatus::PIPELINE_OK);
+ return;
+ }
+
+ // For playback with video, initialize `dcomp_texture_wrapper_` for direct
+ // composition.
+ bool success = dcomp_texture_wrapper_->Initialize(
+ gfx::Size(1, 1),
+ base::BindRepeating(&MediaFoundationRendererClient::OnOutputRectChange,
+ weak_factory_.GetWeakPtr()));
+ if (!success) {
+ std::move(init_cb_).Run(PIPELINE_ERROR_INITIALIZATION_FAILED);
+ return;
+ }
+
+ // Initialize DCOMP texture size to {1, 1} to signify to SwapChainPresenter
+ // that the video output size is not yet known.
+ if (output_size_.IsEmpty())
+ dcomp_texture_wrapper_->UpdateTextureSize(gfx::Size(1, 1));
+
+ std::move(init_cb_).Run(PIPELINE_OK);
+}
+
+void MediaFoundationRendererClient::OnOutputRectChange(gfx::Rect output_rect) {
+ DVLOG_FUNC(1) << "output_rect=" << output_rect.ToString();
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+ DCHECK(has_video_);
+
+ renderer_extension_->SetOutputRect(
+ output_rect,
+ base::BindOnce(&MediaFoundationRendererClient::OnSetOutputRectDone,
+ weak_factory_.GetWeakPtr(), output_rect.size()));
+}
+
+void MediaFoundationRendererClient::OnSetOutputRectDone(
+ const gfx::Size& output_size,
+ bool success) {
+ DVLOG_FUNC(1) << "output_size=" << output_size.ToString();
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+ DCHECK(has_video_);
+
+ if (!success) {
+ DLOG(ERROR) << "Failed to SetOutputRect";
+ MEDIA_LOG(WARNING, media_log_) << "Failed to SetOutputRect";
+ // Ignore this error as video can possibly be seen but displayed incorrectly
+ // against the video output area.
+ return;
+ }
+
+ output_size_ = output_size;
+ if (output_size_updated_)
+ return;
+
+ // Call UpdateTextureSize() only 1 time to indicate DCOMP rendering is ready.
+ // The actual size does not matter as long as it is not empty and not (1x1).
+ if (!output_size_.IsEmpty() && output_size_ != gfx::Size(1, 1)) {
+ dcomp_texture_wrapper_->UpdateTextureSize(output_size_);
+ output_size_updated_ = true;
+ }
+
+ InitializeDCOMPRenderingIfNeeded();
+
+ // Ensures `SwapChainPresenter::PresentDCOMPSurface()` is invoked to add video
+ // into DCOMP visual tree if needed.
+ if (dcomp_video_frame_)
+ sink_->PaintSingleFrame(dcomp_video_frame_, true);
+}
+
+void MediaFoundationRendererClient::InitializeDCOMPRenderingIfNeeded() {
+ DVLOG_FUNC(1);
+ DCHECK(has_video_);
+
+ if (dcomp_rendering_initialized_)
+ return;
+
+ dcomp_rendering_initialized_ = true;
+
+ // Set DirectComposition mode and get DirectComposition surface from
+ // MediaFoundationRenderer.
+ renderer_extension_->GetDCOMPSurface(
+ mojo::WrapCallbackWithDefaultInvokeIfNotRun(
+ base::BindOnce(&MediaFoundationRendererClient::OnDCOMPSurfaceReceived,
+ weak_factory_.GetWeakPtr()),
+ absl::nullopt));
+}
+
+void MediaFoundationRendererClient::OnDCOMPSurfaceReceived(
+ const absl::optional<base::UnguessableToken>& token) {
+ DVLOG_FUNC(1);
+ DCHECK(has_video_);
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ if (!token) {
+ MEDIA_LOG(ERROR, media_log_)
+ << "Failed to initialize DCOMP mode or failed to get or "
+ "register DCOMP surface handle on remote renderer";
+ OnError(PIPELINE_ERROR_COULD_NOT_RENDER);
+ return;
+ }
+
+ dcomp_texture_wrapper_->SetDCOMPSurfaceHandle(
+ token.value(),
+ base::BindOnce(&MediaFoundationRendererClient::OnDCOMPSurfaceHandleSet,
+ weak_factory_.GetWeakPtr()));
+}
+
+void MediaFoundationRendererClient::OnDCOMPSurfaceHandleSet(bool success) {
+ DVLOG_FUNC(1);
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+ DCHECK(has_video_);
+
+ if (!success) {
+ MEDIA_LOG(ERROR, media_log_) << "Failed to set DCOMP surface handle";
+ OnError(PIPELINE_ERROR_COULD_NOT_RENDER);
+ }
+}
+
+void MediaFoundationRendererClient::OnVideoFrameCreated(
+ scoped_refptr<VideoFrame> video_frame) {
+ DVLOG_FUNC(1);
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+ DCHECK(has_video_);
+
+ video_frame->metadata().protected_video = true;
+ video_frame->metadata().allow_overlay = true;
+
+ dcomp_video_frame_ = video_frame;
+ sink_->PaintSingleFrame(dcomp_video_frame_, true);
+}
+
+void MediaFoundationRendererClient::OnCdmAttached(bool success) {
+ DCHECK(cdm_attached_cb_);
+ std::move(cdm_attached_cb_).Run(success);
+}
+
+void MediaFoundationRendererClient::OnConnectionError() {
+ DVLOG_FUNC(1);
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+ MEDIA_LOG(ERROR, media_log_) << "MediaFoundationRendererClient disconnected";
+ OnError(PIPELINE_ERROR_DECODE);
+}
+
} // namespace media
diff --git a/chromium/media/mojo/clients/win/media_foundation_renderer_client.h b/chromium/media/mojo/clients/win/media_foundation_renderer_client.h
index ff03f69f46f..b1455757a85 100644
--- a/chromium/media/mojo/clients/win/media_foundation_renderer_client.h
+++ b/chromium/media/mojo/clients/win/media_foundation_renderer_client.h
@@ -22,24 +22,40 @@
namespace media {
+class MediaLog;
+
// MediaFoundationRendererClient lives in Renderer process talks to the
// MediaFoundationRenderer living in the MediaFoundationService (utility)
// process, using `mojo_renderer_` and `renderer_extension_`.
//
-// It also manages a DCOMPTexture living in the GPU process via
-// `dcomp_texture_wrapper_` and notifies the VideoRendererSink when new frames
-// are available.
+// It also manages a DCOMPTexture (via `dcomp_texture_wrapper_`) living in the
+// GPU process for direct composition support. The initialization of the
+// compositing path is summarized as follows:
+// ```
+// OnVideoNaturalSizeChange() -> CreateVideoFrame(natural_size) ->
+// PaintSingleFrame() -> SwapChainPresenter::PresentDCOMPSurface() ->
+// DCOMPTexture::OnUpdateParentWindowRect() -> DCOMPTexture::SendOutputRect() ->
+// OnOutputRectChange() -> SetOutputRect() -> OnSetOutputRectDone()
+// a) -> UpdateTextureSize(output_size), and
+// b) -> renderer_extension_->GetDCOMPSurface() -> OnDCOMPSurfaceReceived() ->
+// SetDCOMPSurfaceHandle() -> OnDCOMPSurfaceHandleSet()
+// ```
class MediaFoundationRendererClient : public Renderer, public RendererClient {
public:
using RendererExtension = mojom::MediaFoundationRendererExtension;
MediaFoundationRendererClient(
scoped_refptr<base::SingleThreadTaskRunner> media_task_runner,
+ std::unique_ptr<MediaLog> media_log,
std::unique_ptr<MojoRenderer> mojo_renderer,
mojo::PendingRemote<RendererExtension> pending_renderer_extension,
std::unique_ptr<DCOMPTextureWrapper> dcomp_texture_wrapper,
VideoRendererSink* sink);
+ MediaFoundationRendererClient(const MediaFoundationRendererClient&) = delete;
+ MediaFoundationRendererClient& operator=(
+ const MediaFoundationRendererClient&) = delete;
+
~MediaFoundationRendererClient() override;
// Renderer implementation.
@@ -72,14 +88,13 @@ class MediaFoundationRendererClient : public Renderer, public RendererClient {
private:
void OnRemoteRendererInitialized(PipelineStatus status);
- void OnDCOMPTextureInitialized(bool success);
+ void OnOutputRectChange(gfx::Rect output_rect);
+ void OnSetOutputRectDone(const gfx::Size& output_size, bool success);
void InitializeDCOMPRenderingIfNeeded();
void OnDCOMPSurfaceReceived(
const absl::optional<base::UnguessableToken>& token);
- void OnDCOMPSurfaceHandleCreated(bool success);
+ void OnDCOMPSurfaceHandleSet(bool success);
void OnVideoFrameCreated(scoped_refptr<VideoFrame> video_frame);
- void OnCompositionParamsReceived(gfx::Rect output_rect);
-
void OnCdmAttached(bool success);
void OnConnectionError();
@@ -87,16 +102,19 @@ class MediaFoundationRendererClient : public Renderer, public RendererClient {
// media thread. Hence we store PendingRemotes so we can bind the Remotes
// on the media task runner during/after Initialize().
scoped_refptr<base::SingleThreadTaskRunner> media_task_runner_;
+ std::unique_ptr<MediaLog> media_log_;
std::unique_ptr<MojoRenderer> mojo_renderer_;
mojo::PendingRemote<RendererExtension> pending_renderer_extension_;
std::unique_ptr<DCOMPTextureWrapper> dcomp_texture_wrapper_;
- VideoRendererSink* sink_;
+ VideoRendererSink* sink_ = nullptr;
mojo::Remote<RendererExtension> renderer_extension_;
RendererClient* client_ = nullptr;
bool dcomp_rendering_initialized_ = false;
gfx::Size natural_size_; // video's native size.
+ gfx::Size output_size_; // video's output size (the on-screen video size).
+ bool output_size_updated_ = false;
bool has_video_ = false;
scoped_refptr<VideoFrame> dcomp_video_frame_;
@@ -107,8 +125,6 @@ class MediaFoundationRendererClient : public Renderer, public RendererClient {
// NOTE: Weak pointers must be invalidated before all other member variables.
base::WeakPtrFactory<MediaFoundationRendererClient> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(MediaFoundationRendererClient);
};
} // namespace media
diff --git a/chromium/media/mojo/clients/win/media_foundation_renderer_client_factory.cc b/chromium/media/mojo/clients/win/media_foundation_renderer_client_factory.cc
index 120365890b4..74df6484ea6 100644
--- a/chromium/media/mojo/clients/win/media_foundation_renderer_client_factory.cc
+++ b/chromium/media/mojo/clients/win/media_foundation_renderer_client_factory.cc
@@ -6,17 +6,23 @@
#include "media/base/win/dcomp_texture_wrapper.h"
#include "media/base/win/mf_helpers.h"
+#include "media/mojo/clients/mojo_media_log_service.h"
#include "media/mojo/clients/mojo_renderer.h"
#include "media/mojo/clients/mojo_renderer_factory.h"
#include "media/mojo/clients/win/media_foundation_renderer_client.h"
#include "media/mojo/mojom/renderer_extensions.mojom.h"
+#include "mojo/public/cpp/bindings/pending_receiver.h"
+#include "mojo/public/cpp/bindings/pending_remote.h"
+#include "mojo/public/cpp/bindings/self_owned_receiver.h"
namespace media {
MediaFoundationRendererClientFactory::MediaFoundationRendererClientFactory(
+ MediaLog* media_log,
GetDCOMPTextureWrapperCB get_dcomp_texture_cb,
std::unique_ptr<media::MojoRendererFactory> mojo_renderer_factory)
- : get_dcomp_texture_cb_(std::move(get_dcomp_texture_cb)),
+ : media_log_(media_log),
+ get_dcomp_texture_cb_(std::move(get_dcomp_texture_cb)),
mojo_renderer_factory_(std::move(mojo_renderer_factory)) {
DVLOG_FUNC(1);
}
@@ -35,6 +41,16 @@ MediaFoundationRendererClientFactory::CreateRenderer(
const gfx::ColorSpace& /*target_color_space*/) {
DVLOG_FUNC(1);
+ // Use `mojo::MakeSelfOwnedReceiver` for MediaLog so logs may go through even
+ // after `this` is destructed. `Clone()` is necessary since the remote could
+ // live longer than `media_log_`.
+ mojo::PendingReceiver<mojom::MediaLog> media_log_pending_receiver;
+ auto media_log_pending_remote =
+ media_log_pending_receiver.InitWithNewPipeAndPassRemote();
+ mojo::MakeSelfOwnedReceiver(
+ std::make_unique<MojoMediaLogService>(media_log_->Clone()),
+ std::move(media_log_pending_receiver));
+
// Used to send messages from the MediaFoundationRendererClient (Renderer
// process), to the MediaFoundationRenderer (MF_CDM LPAC Utility process).
// The |renderer_extension_receiver| will be bound in MediaFoundationRenderer.
@@ -48,12 +64,13 @@ MediaFoundationRendererClientFactory::CreateRenderer(
std::unique_ptr<media::MojoRenderer> mojo_renderer =
mojo_renderer_factory_->CreateMediaFoundationRenderer(
+ std::move(media_log_pending_remote),
std::move(renderer_extension_receiver), media_task_runner,
video_renderer_sink);
// mojo_renderer's ownership is passed to MediaFoundationRendererClient.
return std::make_unique<MediaFoundationRendererClient>(
- media_task_runner, std::move(mojo_renderer),
+ media_task_runner, media_log_->Clone(), std::move(mojo_renderer),
std::move(renderer_extension_remote), std::move(dcomp_texture),
video_renderer_sink);
}
diff --git a/chromium/media/mojo/clients/win/media_foundation_renderer_client_factory.h b/chromium/media/mojo/clients/win/media_foundation_renderer_client_factory.h
index 1ca6c792a36..0eacabaa8c2 100644
--- a/chromium/media/mojo/clients/win/media_foundation_renderer_client_factory.h
+++ b/chromium/media/mojo/clients/win/media_foundation_renderer_client_factory.h
@@ -15,6 +15,8 @@
namespace media {
+class MediaLog;
+
// The default class for creating a MediaFoundationRendererClient
// and its associated MediaFoundationRenderer.
class MediaFoundationRendererClientFactory : public media::RendererFactory {
@@ -23,6 +25,7 @@ class MediaFoundationRendererClientFactory : public media::RendererFactory {
base::RepeatingCallback<std::unique_ptr<DCOMPTextureWrapper>()>;
MediaFoundationRendererClientFactory(
+ MediaLog* media_log,
GetDCOMPTextureWrapperCB get_dcomp_texture_cb,
std::unique_ptr<media::MojoRendererFactory> mojo_renderer_factory);
~MediaFoundationRendererClientFactory() override;
@@ -39,6 +42,10 @@ class MediaFoundationRendererClientFactory : public media::RendererFactory {
media::MediaResource::Type GetRequiredMediaResourceType() override;
private:
+ // Raw pointer is safe since both `this` and the `media_log` are owned by
+ // WebMediaPlayerImpl with the correct declaration order.
+ MediaLog* media_log_ = nullptr;
+
GetDCOMPTextureWrapperCB get_dcomp_texture_cb_;
std::unique_ptr<media::MojoRendererFactory> mojo_renderer_factory_;
};
diff --git a/chromium/media/mojo/common/media_type_converters_unittest.cc b/chromium/media/mojo/common/media_type_converters_unittest.cc
index 3ef5ba0641e..bf8c04aa6b2 100644
--- a/chromium/media/mojo/common/media_type_converters_unittest.cc
+++ b/chromium/media/mojo/common/media_type_converters_unittest.cc
@@ -68,11 +68,10 @@ TEST(MediaTypeConvertersTest, ConvertDecoderBuffer_Normal) {
scoped_refptr<DecoderBuffer> buffer(DecoderBuffer::CopyFrom(
reinterpret_cast<const uint8_t*>(&kData), kDataSize,
reinterpret_cast<const uint8_t*>(&kSideData), kSideDataSize));
- buffer->set_timestamp(base::TimeDelta::FromMilliseconds(123));
- buffer->set_duration(base::TimeDelta::FromMilliseconds(456));
- buffer->set_discard_padding(
- DecoderBuffer::DiscardPadding(base::TimeDelta::FromMilliseconds(5),
- base::TimeDelta::FromMilliseconds(6)));
+ buffer->set_timestamp(base::Milliseconds(123));
+ buffer->set_duration(base::Milliseconds(456));
+ buffer->set_discard_padding(DecoderBuffer::DiscardPadding(
+ base::Milliseconds(5), base::Milliseconds(6)));
// Convert from and back.
mojom::DecoderBufferPtr ptr(mojom::DecoderBuffer::From(*buffer));
@@ -234,7 +233,7 @@ TEST(MediaTypeConvertersTest, ConvertAudioBuffer_FLOAT) {
// Original.
const ChannelLayout kChannelLayout = CHANNEL_LAYOUT_4_0;
const int kSampleRate = 48000;
- const base::TimeDelta start_time = base::TimeDelta::FromSecondsD(1000.0);
+ const base::TimeDelta start_time = base::Seconds(1000.0);
scoped_refptr<AudioBuffer> buffer = MakeAudioBuffer<float>(
kSampleFormatPlanarF32, kChannelLayout,
ChannelLayoutToChannelCount(kChannelLayout), kSampleRate, 0.0f, 1.0f,
diff --git a/chromium/media/mojo/common/mojo_data_pipe_read_write.h b/chromium/media/mojo/common/mojo_data_pipe_read_write.h
index 553d356b461..dee06baaa8d 100644
--- a/chromium/media/mojo/common/mojo_data_pipe_read_write.h
+++ b/chromium/media/mojo/common/mojo_data_pipe_read_write.h
@@ -18,6 +18,9 @@ class MojoDataPipeReader {
explicit MojoDataPipeReader(
mojo::ScopedDataPipeConsumerHandle consumer_handle);
+ MojoDataPipeReader(const MojoDataPipeReader&) = delete;
+ MojoDataPipeReader& operator=(const MojoDataPipeReader&) = delete;
+
~MojoDataPipeReader();
using DoneCB = base::OnceCallback<void(bool)>;
@@ -59,8 +62,6 @@ class MojoDataPipeReader {
// Number of bytes already read into the current buffer.
uint32_t bytes_read_ = 0;
-
- DISALLOW_COPY_AND_ASSIGN(MojoDataPipeReader);
};
// Write a certain amount of data into a mojo data pipe by request.
@@ -69,6 +70,9 @@ class MojoDataPipeWriter {
explicit MojoDataPipeWriter(
mojo::ScopedDataPipeProducerHandle producer_handle);
+ MojoDataPipeWriter(const MojoDataPipeWriter&) = delete;
+ MojoDataPipeWriter& operator=(const MojoDataPipeWriter&) = delete;
+
~MojoDataPipeWriter();
using DoneCB = base::OnceCallback<void(bool)>;
@@ -109,8 +113,6 @@ class MojoDataPipeWriter {
// Number of bytes already written from the current buffer.
uint32_t bytes_written_ = 0;
-
- DISALLOW_COPY_AND_ASSIGN(MojoDataPipeWriter);
};
} // namespace media
diff --git a/chromium/media/mojo/common/mojo_decoder_buffer_converter.h b/chromium/media/mojo/common/mojo_decoder_buffer_converter.h
index c1ea6eeb892..c582d09849b 100644
--- a/chromium/media/mojo/common/mojo_decoder_buffer_converter.h
+++ b/chromium/media/mojo/common/mojo_decoder_buffer_converter.h
@@ -45,6 +45,9 @@ class MojoDecoderBufferReader {
explicit MojoDecoderBufferReader(
mojo::ScopedDataPipeConsumerHandle consumer_handle);
+ MojoDecoderBufferReader(const MojoDecoderBufferReader&) = delete;
+ MojoDecoderBufferReader& operator=(const MojoDecoderBufferReader&) = delete;
+
~MojoDecoderBufferReader();
// Enqueues conversion of and reading data for a mojom::DecoderBuffer. Once
@@ -97,8 +100,6 @@ class MojoDecoderBufferReader {
// Number of bytes already read into the current buffer.
uint32_t bytes_read_;
-
- DISALLOW_COPY_AND_ASSIGN(MojoDecoderBufferReader);
};
// Converts media::DecoderBuffers to mojom::DecoderBuffers, writing the data
@@ -123,6 +124,9 @@ class MojoDecoderBufferWriter {
explicit MojoDecoderBufferWriter(
mojo::ScopedDataPipeProducerHandle producer_handle);
+ MojoDecoderBufferWriter(const MojoDecoderBufferWriter&) = delete;
+ MojoDecoderBufferWriter& operator=(const MojoDecoderBufferWriter&) = delete;
+
~MojoDecoderBufferWriter();
// Converts a media::DecoderBuffer to a mojom::DecoderBuffer and enqueues the
@@ -150,8 +154,6 @@ class MojoDecoderBufferWriter {
// Number of bytes already written from the current buffer.
uint32_t bytes_written_;
-
- DISALLOW_COPY_AND_ASSIGN(MojoDecoderBufferWriter);
};
} // namespace media
diff --git a/chromium/media/mojo/common/mojo_decoder_buffer_converter_unittest.cc b/chromium/media/mojo/common/mojo_decoder_buffer_converter_unittest.cc
index 5b70eb2fd70..f4546654b1e 100644
--- a/chromium/media/mojo/common/mojo_decoder_buffer_converter_unittest.cc
+++ b/chromium/media/mojo/common/mojo_decoder_buffer_converter_unittest.cc
@@ -70,11 +70,10 @@ TEST(MojoDecoderBufferConverterTest, ConvertDecoderBuffer_Normal) {
scoped_refptr<DecoderBuffer> buffer(DecoderBuffer::CopyFrom(
reinterpret_cast<const uint8_t*>(&kData), kDataSize,
reinterpret_cast<const uint8_t*>(&kSideData), kSideDataSize));
- buffer->set_timestamp(base::TimeDelta::FromMilliseconds(123));
- buffer->set_duration(base::TimeDelta::FromMilliseconds(456));
- buffer->set_discard_padding(
- DecoderBuffer::DiscardPadding(base::TimeDelta::FromMilliseconds(5),
- base::TimeDelta::FromMilliseconds(6)));
+ buffer->set_timestamp(base::Milliseconds(123));
+ buffer->set_duration(base::Milliseconds(456));
+ buffer->set_discard_padding(DecoderBuffer::DiscardPadding(
+ base::Milliseconds(5), base::Milliseconds(6)));
MojoDecoderBufferConverter converter;
converter.ConvertAndVerify(buffer);
diff --git a/chromium/media/mojo/common/mojo_shared_buffer_video_frame_unittest.cc b/chromium/media/mojo/common/mojo_shared_buffer_video_frame_unittest.cc
index 3c10e4f3bba..7b24ef71904 100644
--- a/chromium/media/mojo/common/mojo_shared_buffer_video_frame_unittest.cc
+++ b/chromium/media/mojo/common/mojo_shared_buffer_video_frame_unittest.cc
@@ -39,7 +39,7 @@ void CompareDestructionCallbackValues(
TEST(MojoSharedBufferVideoFrameTest, CreateFrameWithSharedMemoryI420) {
const int kWidth = 16;
const int kHeight = 9;
- const base::TimeDelta kTimestamp = base::TimeDelta::FromMicroseconds(1337);
+ const base::TimeDelta kTimestamp = base::Microseconds(1337);
// Create a MojoSharedBufferVideoFrame which will allocate enough space
// to hold a 16x9 video frame.
@@ -71,7 +71,7 @@ TEST(MojoSharedBufferVideoFrameTest, CreateFrameWithSharedMemoryI420) {
TEST(MojoSharedBufferVideoFrameTest, CreateFrameWithSharedMemoryNV12) {
const int kWidth = 16;
const int kHeight = 9;
- const base::TimeDelta kTimestamp = base::TimeDelta::FromMicroseconds(1337);
+ const base::TimeDelta kTimestamp = base::Microseconds(1337);
// Create a MojoSharedBufferVideoFrame which will allocate enough space
// to hold a 16x9 video frame.
@@ -100,7 +100,7 @@ TEST(MojoSharedBufferVideoFrameTest, CreateFrameWithSharedMemoryNV12) {
TEST(MojoSharedBufferVideoFrameTest, CreateFrameAndPassSharedMemoryI420) {
const int kWidth = 32;
const int kHeight = 18;
- const base::TimeDelta kTimestamp = base::TimeDelta::FromMicroseconds(1338);
+ const base::TimeDelta kTimestamp = base::Microseconds(1338);
// Some random values to use. Since we actually don't use the data inside the
// frame, random values are fine (as long as the offsets are within the
@@ -150,7 +150,7 @@ TEST(MojoSharedBufferVideoFrameTest, CreateFrameAndPassSharedMemoryI420) {
TEST(MojoSharedBufferVideoFrameTest, CreateFrameAndPassSharedMemoryNV12) {
const int kWidth = 32;
const int kHeight = 18;
- const base::TimeDelta kTimestamp = base::TimeDelta::FromMicroseconds(1338);
+ const base::TimeDelta kTimestamp = base::Microseconds(1338);
// Some random values to use. Since we actually don't use the data inside the
// frame, random values are fine (as long as the offsets are within the
@@ -194,7 +194,7 @@ TEST(MojoSharedBufferVideoFrameTest, CreateFrameAndPassSharedMemoryNV12) {
TEST(MojoSharedBufferVideoFrameTest, CreateFrameOddWidth) {
const int kWidth = 15;
const int kHeight = 9;
- const base::TimeDelta kTimestamp = base::TimeDelta::FromMicroseconds(1337);
+ const base::TimeDelta kTimestamp = base::Microseconds(1337);
VideoPixelFormat formats[] = {PIXEL_FORMAT_I420, PIXEL_FORMAT_NV12};
for (auto format : formats) {
@@ -219,7 +219,7 @@ TEST(MojoSharedBufferVideoFrameTest, TestDestructionCallback) {
const VideoPixelFormat format = PIXEL_FORMAT_I420;
const int kWidth = 32;
const int kHeight = 18;
- const base::TimeDelta kTimestamp = base::TimeDelta::FromMicroseconds(1338);
+ const base::TimeDelta kTimestamp = base::Microseconds(1338);
// Allocate some shared memory.
gfx::Size size(kWidth, kHeight);
@@ -257,7 +257,7 @@ TEST(MojoSharedBufferVideoFrameTest, InterleavedData) {
const VideoPixelFormat format = PIXEL_FORMAT_I420;
const int kWidth = 32;
const int kHeight = 18;
- const base::TimeDelta kTimestamp = base::TimeDelta::FromMicroseconds(1338);
+ const base::TimeDelta kTimestamp = base::Microseconds(1338);
gfx::Size size(kWidth, kHeight);
gfx::Rect visible_rect(size);
diff --git a/chromium/media/mojo/mojom/BUILD.gn b/chromium/media/mojo/mojom/BUILD.gn
index e4213fd8d2f..589b4e3c719 100644
--- a/chromium/media/mojo/mojom/BUILD.gn
+++ b/chromium/media/mojo/mojom/BUILD.gn
@@ -91,8 +91,21 @@ mojom("mojom") {
public_deps += [ "//sandbox/mac/mojom" ]
}
+ enabled_features = []
+
+ # Help select ServiceSandbox for media_service.mojom.
+ if (mojo_media_host == "browser") {
+ enabled_features += [ "mojo_media_in_browser" ]
+ } else if (mojo_media_host == "gpu") {
+ enabled_features += [ "mojo_media_in_gpu" ]
+ } else if (mojo_media_host == "") {
+ enabled_features += [ "mojo_media_service_unused" ]
+ } else {
+ assert(false, "Invalid mojo media host: $mojo_media_host")
+ }
+
if (enable_cast_renderer) {
- enabled_features = [ "enable_cast_renderer" ]
+ enabled_features += [ "enable_cast_renderer" ]
}
shared_typemaps = [
@@ -313,6 +326,10 @@ mojom("mojom") {
{
types = [
{
+ mojom = "media.mojom.StatusData"
+ cpp = "::media::internal::StatusData"
+ },
+ {
mojom = "media.mojom.Status"
cpp = "::media::Status"
},
@@ -417,6 +434,10 @@ mojom("mojom") {
cpp = "::media::VideoEncodeAccelerator::Config::SpatialLayer"
},
{
+ mojom = "media.mojom.SVCScalabilityMode"
+ cpp = "::media::SVCScalabilityMode"
+ },
+ {
mojom = "media.mojom.Bitrate"
cpp = "::media::Bitrate"
},
@@ -628,13 +649,13 @@ mojom("mojom") {
{
types = [
{
- mojom = "media.mojom.CdmPreferenceData"
- cpp = "::std::unique_ptr<::media::CdmPreferenceData>"
+ mojom = "media.mojom.MediaFoundationCdmData"
+ cpp = "::std::unique_ptr<::media::MediaFoundationCdmData>"
move_only = true
},
]
- traits_headers = [ "cdm_preference_data_mojom_traits.h" ]
- traits_sources = [ "cdm_preference_data_mojom_traits.cc" ]
+ traits_headers = [ "media_foundation_cdm_data_mojom_traits.h" ]
+ traits_sources = [ "media_foundation_cdm_data_mojom_traits.cc" ]
traits_public_deps = [ "//media" ]
},
]
diff --git a/chromium/media/mojo/mojom/audio_decoder_config_mojom_traits.cc b/chromium/media/mojo/mojom/audio_decoder_config_mojom_traits.cc
index 7b34934b563..b7cf06f27ed 100644
--- a/chromium/media/mojo/mojom/audio_decoder_config_mojom_traits.cc
+++ b/chromium/media/mojo/mojom/audio_decoder_config_mojom_traits.cc
@@ -45,11 +45,16 @@ bool StructTraits<media::mojom::AudioDecoderConfigDataView,
if (!input.ReadTargetOutputChannelLayout(&target_output_channel_layout))
return false;
+ std::vector<uint8_t> aac_extra_data;
+ if (!input.ReadAacExtraData(&aac_extra_data))
+ return false;
+
output->Initialize(codec, sample_format, channel_layout,
- input.samples_per_second(), extra_data, encryption_scheme,
- seek_preroll, input.codec_delay());
+ input.samples_per_second(), std::move(extra_data),
+ encryption_scheme, seek_preroll, input.codec_delay());
output->set_profile(profile);
output->set_target_output_channel_layout(target_output_channel_layout);
+ output->set_aac_extra_data(std::move(aac_extra_data));
if (!input.should_discard_decoder_delay())
output->disable_discard_decoder_delay();
diff --git a/chromium/media/mojo/mojom/audio_decoder_config_mojom_traits.h b/chromium/media/mojo/mojom/audio_decoder_config_mojom_traits.h
index 3d10781ff75..cc657447b7d 100644
--- a/chromium/media/mojo/mojom/audio_decoder_config_mojom_traits.h
+++ b/chromium/media/mojo/mojom/audio_decoder_config_mojom_traits.h
@@ -65,6 +65,11 @@ struct StructTraits<media::mojom::AudioDecoderConfigDataView,
return input.should_discard_decoder_delay();
}
+ static const std::vector<uint8_t>& aac_extra_data(
+ const media::AudioDecoderConfig& input) {
+ return input.aac_extra_data();
+ }
+
static bool Read(media::mojom::AudioDecoderConfigDataView input,
media::AudioDecoderConfig* output);
};
diff --git a/chromium/media/mojo/mojom/audio_decoder_config_mojom_traits_unittest.cc b/chromium/media/mojo/mojom/audio_decoder_config_mojom_traits_unittest.cc
index c3423ac861c..8fb19dae940 100644
--- a/chromium/media/mojo/mojom/audio_decoder_config_mojom_traits_unittest.cc
+++ b/chromium/media/mojo/mojom/audio_decoder_config_mojom_traits_unittest.cc
@@ -14,95 +14,96 @@
namespace media {
-TEST(AudioDecoderConfigStructTraitsTest, ConvertAudioDecoderConfig_Normal) {
+TEST(AudioDecoderConfigStructTraitsTest, Normal) {
const uint8_t kExtraData[] = "input extra data";
const std::vector<uint8_t> kExtraDataVector(
&kExtraData[0], &kExtraData[0] + base::size(kExtraData));
AudioDecoderConfig input;
- input.Initialize(kCodecAAC, kSampleFormatU8, CHANNEL_LAYOUT_SURROUND, 48000,
- kExtraDataVector, EncryptionScheme::kUnencrypted,
+ input.Initialize(AudioCodec::kAAC, kSampleFormatU8, CHANNEL_LAYOUT_SURROUND,
+ 48000, kExtraDataVector, EncryptionScheme::kUnencrypted,
base::TimeDelta(), 0);
- std::vector<uint8_t> data =
- media::mojom::AudioDecoderConfig::Serialize(&input);
+ std::vector<uint8_t> data = mojom::AudioDecoderConfig::Serialize(&input);
AudioDecoderConfig output;
- EXPECT_TRUE(
- media::mojom::AudioDecoderConfig::Deserialize(std::move(data), &output));
+ EXPECT_TRUE(mojom::AudioDecoderConfig::Deserialize(std::move(data), &output));
EXPECT_TRUE(output.Matches(input));
}
-TEST(AudioDecoderConfigStructTraitsTest,
- ConvertAudioDecoderConfig_EmptyExtraData) {
+TEST(AudioDecoderConfigStructTraitsTest, EmptyExtraData) {
AudioDecoderConfig input;
- input.Initialize(kCodecAAC, kSampleFormatU8, CHANNEL_LAYOUT_SURROUND, 48000,
- EmptyExtraData(), EncryptionScheme::kUnencrypted,
+ input.Initialize(AudioCodec::kAAC, kSampleFormatU8, CHANNEL_LAYOUT_SURROUND,
+ 48000, EmptyExtraData(), EncryptionScheme::kUnencrypted,
base::TimeDelta(), 0);
- std::vector<uint8_t> data =
- media::mojom::AudioDecoderConfig::Serialize(&input);
+ std::vector<uint8_t> data = mojom::AudioDecoderConfig::Serialize(&input);
AudioDecoderConfig output;
- EXPECT_TRUE(
- media::mojom::AudioDecoderConfig::Deserialize(std::move(data), &output));
+ EXPECT_TRUE(mojom::AudioDecoderConfig::Deserialize(std::move(data), &output));
EXPECT_TRUE(output.Matches(input));
}
-TEST(AudioDecoderConfigStructTraitsTest, ConvertAudioDecoderConfig_Encrypted) {
+TEST(AudioDecoderConfigStructTraitsTest, Encrypted) {
AudioDecoderConfig input;
- input.Initialize(kCodecAAC, kSampleFormatU8, CHANNEL_LAYOUT_SURROUND, 48000,
- EmptyExtraData(), EncryptionScheme::kCenc, base::TimeDelta(),
- 0);
- std::vector<uint8_t> data =
- media::mojom::AudioDecoderConfig::Serialize(&input);
+ input.Initialize(AudioCodec::kAAC, kSampleFormatU8, CHANNEL_LAYOUT_SURROUND,
+ 48000, EmptyExtraData(), EncryptionScheme::kCenc,
+ base::TimeDelta(), 0);
+ std::vector<uint8_t> data = mojom::AudioDecoderConfig::Serialize(&input);
AudioDecoderConfig output;
- EXPECT_TRUE(
- media::mojom::AudioDecoderConfig::Deserialize(std::move(data), &output));
+ EXPECT_TRUE(mojom::AudioDecoderConfig::Deserialize(std::move(data), &output));
EXPECT_TRUE(output.Matches(input));
}
-TEST(AudioDecoderConfigStructTraitsTest,
- ConvertAudioDecoderConfig_WithProfile) {
+TEST(AudioDecoderConfigStructTraitsTest, WithProfile) {
AudioDecoderConfig input;
- input.Initialize(kCodecAAC, kSampleFormatU8, CHANNEL_LAYOUT_SURROUND, 48000,
- EmptyExtraData(), EncryptionScheme::kUnencrypted,
+ input.Initialize(AudioCodec::kAAC, kSampleFormatU8, CHANNEL_LAYOUT_SURROUND,
+ 48000, EmptyExtraData(), EncryptionScheme::kUnencrypted,
base::TimeDelta(), 0);
input.set_profile(AudioCodecProfile::kXHE_AAC);
- std::vector<uint8_t> data =
- media::mojom::AudioDecoderConfig::Serialize(&input);
+ std::vector<uint8_t> data = mojom::AudioDecoderConfig::Serialize(&input);
AudioDecoderConfig output;
- EXPECT_TRUE(
- media::mojom::AudioDecoderConfig::Deserialize(std::move(data), &output));
+ EXPECT_TRUE(mojom::AudioDecoderConfig::Deserialize(std::move(data), &output));
EXPECT_TRUE(output.Matches(input));
}
-TEST(AudioDecoderConfigStructTraitsTest,
- ConvertAudioDecoderConfig_DisableDiscardDecoderDelay) {
+TEST(AudioDecoderConfigStructTraitsTest, DisableDiscardDecoderDelay) {
AudioDecoderConfig input;
- input.Initialize(kCodecAAC, kSampleFormatU8, CHANNEL_LAYOUT_SURROUND, 48000,
- EmptyExtraData(), EncryptionScheme::kUnencrypted,
+ input.Initialize(AudioCodec::kAAC, kSampleFormatU8, CHANNEL_LAYOUT_SURROUND,
+ 48000, EmptyExtraData(), EncryptionScheme::kUnencrypted,
base::TimeDelta(), 0);
input.disable_discard_decoder_delay();
- std::vector<uint8_t> data =
- media::mojom::AudioDecoderConfig::Serialize(&input);
+ std::vector<uint8_t> data = mojom::AudioDecoderConfig::Serialize(&input);
AudioDecoderConfig output;
- EXPECT_TRUE(
- media::mojom::AudioDecoderConfig::Deserialize(std::move(data), &output));
+ EXPECT_TRUE(mojom::AudioDecoderConfig::Deserialize(std::move(data), &output));
EXPECT_TRUE(output.Matches(input));
EXPECT_FALSE(output.should_discard_decoder_delay());
}
-TEST(AudioDecoderConfigStructTraitsTest,
- ConvertAudioDecoderConfig_TargetOutputChannelLayout) {
+TEST(AudioDecoderConfigStructTraitsTest, TargetOutputChannelLayout) {
AudioDecoderConfig input;
- input.Initialize(kCodecAAC, kSampleFormatU8, CHANNEL_LAYOUT_SURROUND, 48000,
- EmptyExtraData(), EncryptionScheme::kUnencrypted,
+ input.Initialize(AudioCodec::kAAC, kSampleFormatU8, CHANNEL_LAYOUT_SURROUND,
+ 48000, EmptyExtraData(), EncryptionScheme::kUnencrypted,
base::TimeDelta(), 0);
input.set_target_output_channel_layout(CHANNEL_LAYOUT_5_1);
- std::vector<uint8_t> data =
- media::mojom::AudioDecoderConfig::Serialize(&input);
+ std::vector<uint8_t> data = mojom::AudioDecoderConfig::Serialize(&input);
AudioDecoderConfig output;
- EXPECT_TRUE(
- media::mojom::AudioDecoderConfig::Deserialize(std::move(data), &output));
+ EXPECT_TRUE(mojom::AudioDecoderConfig::Deserialize(std::move(data), &output));
EXPECT_TRUE(output.Matches(input));
EXPECT_EQ(output.target_output_channel_layout(), CHANNEL_LAYOUT_5_1);
}
+TEST(AudioDecoderConfigStructTraitsTest, AacExtraData) {
+ const uint8_t kAacExtraData[] = "aac extra data";
+ const std::vector<uint8_t> kAacExtraDataVector(
+ kAacExtraData, kAacExtraData + base::size(kAacExtraData));
+
+ AudioDecoderConfig input;
+ input.Initialize(AudioCodec::kAAC, kSampleFormatU8, CHANNEL_LAYOUT_SURROUND,
+ 48000, EmptyExtraData(), EncryptionScheme::kUnencrypted,
+ base::TimeDelta(), 0);
+ input.set_aac_extra_data(kAacExtraDataVector);
+ std::vector<uint8_t> data = mojom::AudioDecoderConfig::Serialize(&input);
+ AudioDecoderConfig output;
+ EXPECT_TRUE(mojom::AudioDecoderConfig::Deserialize(std::move(data), &output));
+ EXPECT_TRUE(output.Matches(input));
+ EXPECT_EQ(output.aac_extra_data(), kAacExtraDataVector);
+}
+
} // namespace media
diff --git a/chromium/media/mojo/mojom/cdm_document_service.mojom b/chromium/media/mojo/mojom/cdm_document_service.mojom
index e6b122ddbdc..b2d4c953404 100644
--- a/chromium/media/mojo/mojom/cdm_document_service.mojom
+++ b/chromium/media/mojo/mojom/cdm_document_service.mojom
@@ -6,11 +6,12 @@
module media.mojom;
import "mojo/public/mojom/base/unguessable_token.mojom";
+import "mojo/public/mojom/base/file_path.mojom";
-// Contains data linked to an origin that the CDM stores in the Preference
-// Service.
+// Contains data linked to an origin that the CDM stores in the browser
+// process.
[EnableIf=is_win]
-struct CdmPreferenceData {
+struct MediaFoundationCdmData {
// The origin ID of the document associated with the CDM. The origin ID
// is used in place of the origin when hiding the concrete origin is needed.
// The origin ID is also user resettable by clearing the browsing data.
@@ -19,6 +20,10 @@ struct CdmPreferenceData {
// The token is set by the CDM. The token is then saved in the Pref Service so
// that it can be reused by the CDM for that same origin in the future.
array<uint8>? client_token;
+
+ // The path where the MediaFoundation CDM should store its data. The path is
+ // specific to the current chrome user and the device's architecture.
+ mojo_base.mojom.FilePath cdm_store_path_root;
};
// The service itself is associated with a RenderFrameHost in the browser
@@ -55,11 +60,11 @@ interface CdmDocumentService {
[EnableIf=is_chromeos]
IsVerifiedAccessEnabled() => (bool enabled);
- // Gets the CDM preference data for the origin associated with the CDM.
- // - `cdm_preference_data`: The CDM preference data for the origin associated
- // with the CDM.
+ // Gets the Media FoundationCDM data for the origin associated with the CDM.
+ // - `media_foundation_cdm_data`: The CDM data for the origin associated with
+ // the CDM.
[EnableIf=is_win]
- GetCdmPreferenceData() => (CdmPreferenceData cdm_preference_data);
+ GetMediaFoundationCdmData() => (MediaFoundationCdmData cdm_data);
// Sets the client token for the origin associated with the CDM. The token is
// set by the CDM. The token is then saved in the Pref Service so that it can
diff --git a/chromium/media/mojo/mojom/cdm_preference_data_mojom_traits.cc b/chromium/media/mojo/mojom/cdm_preference_data_mojom_traits.cc
deleted file mode 100644
index 8d098f9cac5..00000000000
--- a/chromium/media/mojo/mojom/cdm_preference_data_mojom_traits.cc
+++ /dev/null
@@ -1,28 +0,0 @@
-// Copyright 2021 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/mojo/mojom/cdm_preference_data_mojom_traits.h"
-
-#include "third_party/abseil-cpp/absl/types/optional.h"
-
-namespace mojo {
-
-// static
-bool StructTraits<media::mojom::CdmPreferenceDataDataView,
- std::unique_ptr<media::CdmPreferenceData>>::
- Read(media::mojom::CdmPreferenceDataDataView input,
- std::unique_ptr<media::CdmPreferenceData>* output) {
- base::UnguessableToken origin_id;
- if (!input.ReadOriginId(&origin_id))
- return false;
-
- absl::optional<std::vector<uint8_t>> client_token;
- if (!input.ReadClientToken(&client_token))
- return false;
-
- *output = std::make_unique<media::CdmPreferenceData>(origin_id, client_token);
- return true;
-}
-
-} // namespace mojo
diff --git a/chromium/media/mojo/mojom/cdm_preference_data_mojom_traits.h b/chromium/media/mojo/mojom/cdm_preference_data_mojom_traits.h
deleted file mode 100644
index 426668f13b9..00000000000
--- a/chromium/media/mojo/mojom/cdm_preference_data_mojom_traits.h
+++ /dev/null
@@ -1,35 +0,0 @@
-// Copyright 2021 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_MOJO_MOJOM_CDM_PREFERENCE_DATA_MOJOM_TRAITS_H_
-#define MEDIA_MOJO_MOJOM_CDM_PREFERENCE_DATA_MOJOM_TRAITS_H_
-
-#include <vector>
-
-#include "base/unguessable_token.h"
-#include "media/cdm/cdm_preference_data.h"
-#include "media/mojo/mojom/cdm_document_service.mojom.h"
-
-namespace mojo {
-
-template <>
-struct StructTraits<media::mojom::CdmPreferenceDataDataView,
- std::unique_ptr<media::CdmPreferenceData>> {
- static base::UnguessableToken origin_id(
- const std::unique_ptr<media::CdmPreferenceData>& input) {
- return input->origin_id;
- }
-
- static absl::optional<std::vector<uint8_t>> client_token(
- const std::unique_ptr<media::CdmPreferenceData>& input) {
- return input->client_token;
- }
-
- static bool Read(media::mojom::CdmPreferenceDataDataView input,
- std::unique_ptr<media::CdmPreferenceData>* output);
-};
-
-} // namespace mojo
-
-#endif // MEDIA_MOJO_MOJOM_CDM_PREFERENCE_DATA_MOJOM_TRAITS_H_
diff --git a/chromium/media/mojo/mojom/interface_factory.mojom b/chromium/media/mojo/mojom/interface_factory.mojom
index cf43b8cd012..5bea32e2c3c 100644
--- a/chromium/media/mojo/mojom/interface_factory.mojom
+++ b/chromium/media/mojo/mojom/interface_factory.mojom
@@ -7,6 +7,7 @@ module media.mojom;
import "media/mojo/mojom/audio_decoder.mojom";
import "media/mojo/mojom/decryptor.mojom";
import "media/mojo/mojom/content_decryption_module.mojom";
+import "media/mojo/mojom/media_log.mojom";
import "media/mojo/mojom/renderer.mojom";
import "media/mojo/mojom/renderer_extensions.mojom";
import "media/mojo/mojom/video_decoder.mojom";
@@ -51,6 +52,7 @@ interface InterfaceFactory {
// - |renderer_extension| is bound in MediaFoundationRenderer, and receives
// calls from MediaFoundationRendererClient.
CreateMediaFoundationRenderer(
+ pending_remote<MediaLog> media_log,
pending_receiver<Renderer> renderer,
pending_receiver<MediaFoundationRendererExtension> renderer_extension);
diff --git a/chromium/media/mojo/mojom/media_foundation_cdm_data_mojom_traits.cc b/chromium/media/mojo/mojom/media_foundation_cdm_data_mojom_traits.cc
new file mode 100644
index 00000000000..984d1efe433
--- /dev/null
+++ b/chromium/media/mojo/mojom/media_foundation_cdm_data_mojom_traits.cc
@@ -0,0 +1,33 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/mojo/mojom/media_foundation_cdm_data_mojom_traits.h"
+
+#include "third_party/abseil-cpp/absl/types/optional.h"
+
+namespace mojo {
+
+// static
+bool StructTraits<media::mojom::MediaFoundationCdmDataDataView,
+ std::unique_ptr<media::MediaFoundationCdmData>>::
+ Read(media::mojom::MediaFoundationCdmDataDataView input,
+ std::unique_ptr<media::MediaFoundationCdmData>* output) {
+ base::UnguessableToken origin_id;
+ if (!input.ReadOriginId(&origin_id))
+ return false;
+
+ absl::optional<std::vector<uint8_t>> client_token;
+ if (!input.ReadClientToken(&client_token))
+ return false;
+
+ base::FilePath cdm_store_path_root;
+ if (!input.ReadCdmStorePathRoot(&cdm_store_path_root))
+ return false;
+
+ *output = std::make_unique<media::MediaFoundationCdmData>(
+ origin_id, std::move(client_token), std::move(cdm_store_path_root));
+ return true;
+}
+
+} // namespace mojo
diff --git a/chromium/media/mojo/mojom/media_foundation_cdm_data_mojom_traits.h b/chromium/media/mojo/mojom/media_foundation_cdm_data_mojom_traits.h
new file mode 100644
index 00000000000..5055a8f09f3
--- /dev/null
+++ b/chromium/media/mojo/mojom/media_foundation_cdm_data_mojom_traits.h
@@ -0,0 +1,41 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_MOJO_MOJOM_MEDIA_FOUNDATION_CDM_DATA_MOJOM_TRAITS_H_
+#define MEDIA_MOJO_MOJOM_MEDIA_FOUNDATION_CDM_DATA_MOJOM_TRAITS_H_
+
+#include <vector>
+
+#include "base/files/file_path.h"
+#include "base/unguessable_token.h"
+#include "media/cdm/media_foundation_cdm_data.h"
+#include "media/mojo/mojom/cdm_document_service.mojom.h"
+
+namespace mojo {
+
+template <>
+struct StructTraits<media::mojom::MediaFoundationCdmDataDataView,
+ std::unique_ptr<media::MediaFoundationCdmData>> {
+ static const base::UnguessableToken& origin_id(
+ const std::unique_ptr<media::MediaFoundationCdmData>& input) {
+ return input->origin_id;
+ }
+
+ static const absl::optional<std::vector<uint8_t>>& client_token(
+ const std::unique_ptr<media::MediaFoundationCdmData>& input) {
+ return input->client_token;
+ }
+
+ static const base::FilePath& cdm_store_path_root(
+ const std::unique_ptr<media::MediaFoundationCdmData>& input) {
+ return input->cdm_store_path_root;
+ }
+
+ static bool Read(media::mojom::MediaFoundationCdmDataDataView input,
+ std::unique_ptr<media::MediaFoundationCdmData>* output);
+};
+
+} // namespace mojo
+
+#endif // MEDIA_MOJO_MOJOM_MEDIA_FOUNDATION_CDM_DATA_MOJOM_TRAITS_H_
diff --git a/chromium/media/mojo/mojom/media_player.mojom b/chromium/media/mojo/mojom/media_player.mojom
index 2d7ce73591e..83f8adef789 100644
--- a/chromium/media/mojo/mojom/media_player.mojom
+++ b/chromium/media/mojo/mojom/media_player.mojom
@@ -9,7 +9,8 @@ import "mojo/public/mojom/base/time.mojom";
import "services/media_session/public/mojom/media_session.mojom";
import "ui/gfx/geometry/mojom/geometry.mojom";
-// Implemented by HTMLMediaElement in the renderer process.
+// Implemented by HTMLMediaElement in the renderer process to allow the
+// browser to control media playback.
interface MediaPlayer {
// Requests the media player to start or resume media playback.
RequestPlay();
@@ -32,6 +33,9 @@ interface MediaPlayer {
// Requests the media player to exit the Picture-in-Picture mode.
RequestExitPictureInPicture();
+ // Requests the media player to mute or unmute.
+ RequestMute(bool mute);
+
// Set the volume multiplier to control audio ducking.
// Output volume should be set to |player_volume| * |multiplier|. The range
// of |multiplier| is [0, 1], where 1 indicates normal (non-ducked) playback.
@@ -52,6 +56,13 @@ interface MediaPlayer {
SuspendForFrameClosed();
};
+// Implemented by the MediaWebContentsObserver. The remote lives in the renderer
+// process and the receiver lives in the browser process.
+interface MediaPlayerObserverClient {
+ // Gets a flag indicating whether media has been played before.
+ GetHasPlayedBefore() => (bool has_played_before);
+};
+
// Implemented by MediaWebContentsObserver::MediaPlayerObserverHostImpl in the
// browser process.
interface MediaPlayerObserver {
diff --git a/chromium/media/mojo/mojom/media_service.mojom b/chromium/media/mojo/mojom/media_service.mojom
index 738b54a72c7..8c641b89d50 100644
--- a/chromium/media/mojo/mojom/media_service.mojom
+++ b/chromium/media/mojo/mojom/media_service.mojom
@@ -6,11 +6,22 @@ module media.mojom;
import "media/mojo/mojom/frame_interface_factory.mojom";
import "media/mojo/mojom/interface_factory.mojom";
+import "sandbox/policy/mojom/sandbox.mojom";
+
+// Determined in BUILD.gn from //media/media_options.gni.
+[EnableIf=mojo_media_in_browser]
+const sandbox.mojom.Sandbox kMediaSandbox = sandbox.mojom.Sandbox.kNoSandbox;
+[EnableIf=mojo_media_in_gpu]
+const sandbox.mojom.Sandbox kMediaSandbox = sandbox.mojom.Sandbox.kGpu;
+// This placeholder is required to allow compilation.
+[EnableIf=mojo_media_service_unused]
+const sandbox.mojom.Sandbox kMediaSandbox = sandbox.mojom.Sandbox.kService;
// A service to provide media InterfaceFactory, typically to the media pipeline
// running in the renderer process. The service itself runs in the process
// specified by the |mojo_media_host| gn build flag. The service is always
// connected from the browser process.
+[ServiceSandbox=kMediaSandbox]
interface MediaService {
// Requests an InterfaceFactory. |frame_interfaces| can optionally be used to
// provide interfaces hosted by the caller to the remote InterfaceFactory
diff --git a/chromium/media/mojo/mojom/media_types.mojom b/chromium/media/mojo/mojom/media_types.mojom
index 06306d604b5..b9fd9953890 100644
--- a/chromium/media/mojo/mojom/media_types.mojom
+++ b/chromium/media/mojo/mojom/media_types.mojom
@@ -121,6 +121,7 @@ enum MediaStatusState;
// This defines a mojo transport format for media::EncryptionPattern
// See media/base/encryption_pattern.h for description.
+[Stable]
struct EncryptionPattern {
uint32 crypt_byte_block;
uint32 skip_byte_block;
@@ -138,6 +139,20 @@ enum EncryptionType {
kEncryptedWithClearLead,
};
+// See media/base/svc_scalability_mode.h for description.
+// This mojo enum only list hardware codec supported scalability mode.
+enum SVCScalabilityMode {
+ // kUnsupportedMode is used to handle the enum differ of C++ and Mojo
+ // SVCScalabilityMode in ToMojom, should not be used in other place.
+ kUnsupportedMode,
+ kL1T2,
+ kL1T3,
+ kL2T2Key,
+ kL2T3Key,
+ kL3T2Key,
+ kL3T3Key,
+};
+
// This defines a mojo transport format for media::VideoColorSpace.
// See media/base/video_color_space.h for description.
struct VideoColorSpace {
@@ -174,6 +189,7 @@ struct AudioDecoderConfig {
AudioCodecProfile profile;
ChannelLayout target_output_channel_layout;
bool should_discard_decoder_delay;
+ array<uint8> aac_extra_data;
};
// This defines a mojo transport format for media::VideoDecoderConfig.
@@ -475,12 +491,17 @@ struct VideoPipelineInfo {
};
// See media/base/status.h for descriptions.
-struct Status {
- StatusCode code;
- string? message;
+struct StatusData {
+ string group;
+ uint16 code;
+ string message;
array<mojo_base.mojom.Value> frames;
- array<media.mojom.Status> causes;
- mojo_base.mojom.Value? data;
+ array<StatusData> causes;
+ mojo_base.mojom.Value data;
+};
+
+struct Status {
+ StatusData? internal;
};
// Types of media stream, categorised by the media stream's source.
diff --git a/chromium/media/mojo/mojom/media_types_enum_mojom_traits.h b/chromium/media/mojo/mojom/media_types_enum_mojom_traits.h
index 62c32513948..76e89c824ba 100644
--- a/chromium/media/mojo/mojom/media_types_enum_mojom_traits.h
+++ b/chromium/media/mojo/mojom/media_types_enum_mojom_traits.h
@@ -7,6 +7,7 @@
#include "base/notreached.h"
#include "media/base/renderer_factory_selector.h"
+#include "media/base/svc_scalability_mode.h"
#include "media/base/video_frame_metadata.h"
#include "media/base/video_transformation.h"
#include "media/mojo/mojom/media_types.mojom-shared.h"
@@ -109,6 +110,84 @@ struct EnumTraits<media::mojom::EncryptionType, ::media::EncryptionType> {
};
template <>
+struct EnumTraits<media::mojom::SVCScalabilityMode, media::SVCScalabilityMode> {
+ static media::mojom::SVCScalabilityMode ToMojom(
+ media::SVCScalabilityMode input) {
+ switch (input) {
+ case media::SVCScalabilityMode::kL1T2:
+ return media::mojom::SVCScalabilityMode::kL1T2;
+ case media::SVCScalabilityMode::kL1T3:
+ return media::mojom::SVCScalabilityMode::kL1T3;
+ case media::SVCScalabilityMode::kL2T2Key:
+ return media::mojom::SVCScalabilityMode::kL2T2Key;
+ case media::SVCScalabilityMode::kL2T3Key:
+ return media::mojom::SVCScalabilityMode::kL2T3Key;
+ case media::SVCScalabilityMode::kL3T2Key:
+ return media::mojom::SVCScalabilityMode::kL3T2Key;
+ case media::SVCScalabilityMode::kL3T3Key:
+ return media::mojom::SVCScalabilityMode::kL3T3Key;
+ case media::SVCScalabilityMode::kL2T1:
+ case media::SVCScalabilityMode::kL2T2:
+ case media::SVCScalabilityMode::kL2T3:
+ case media::SVCScalabilityMode::kL3T1:
+ case media::SVCScalabilityMode::kL3T2:
+ case media::SVCScalabilityMode::kL3T3:
+ case media::SVCScalabilityMode::kL2T1h:
+ case media::SVCScalabilityMode::kL2T2h:
+ case media::SVCScalabilityMode::kL2T3h:
+ case media::SVCScalabilityMode::kS2T1:
+ case media::SVCScalabilityMode::kS2T2:
+ case media::SVCScalabilityMode::kS2T3:
+ case media::SVCScalabilityMode::kS2T1h:
+ case media::SVCScalabilityMode::kS2T2h:
+ case media::SVCScalabilityMode::kS2T3h:
+ case media::SVCScalabilityMode::kS3T1:
+ case media::SVCScalabilityMode::kS3T2:
+ case media::SVCScalabilityMode::kS3T3:
+ case media::SVCScalabilityMode::kS3T1h:
+ case media::SVCScalabilityMode::kS3T2h:
+ case media::SVCScalabilityMode::kS3T3h:
+ case media::SVCScalabilityMode::kL2T2KeyShift:
+ case media::SVCScalabilityMode::kL2T3KeyShift:
+ case media::SVCScalabilityMode::kL3T2KeyShift:
+ case media::SVCScalabilityMode::kL3T3KeyShift:
+ NOTREACHED();
+ return media::mojom::SVCScalabilityMode::kUnsupportedMode;
+ }
+ }
+
+ static bool FromMojom(media::mojom::SVCScalabilityMode input,
+ media::SVCScalabilityMode* output) {
+ switch (input) {
+ case media::mojom::SVCScalabilityMode::kUnsupportedMode:
+ NOTREACHED();
+ return false;
+ case media::mojom::SVCScalabilityMode::kL1T2:
+ *output = media::SVCScalabilityMode::kL1T2;
+ return true;
+ case media::mojom::SVCScalabilityMode::kL1T3:
+ *output = media::SVCScalabilityMode::kL1T3;
+ return true;
+ case media::mojom::SVCScalabilityMode::kL2T2Key:
+ *output = media::SVCScalabilityMode::kL2T2Key;
+ return true;
+ case media::mojom::SVCScalabilityMode::kL2T3Key:
+ *output = media::SVCScalabilityMode::kL2T3Key;
+ return true;
+ case media::mojom::SVCScalabilityMode::kL3T2Key:
+ *output = media::SVCScalabilityMode::kL3T2Key;
+ return true;
+ case media::mojom::SVCScalabilityMode::kL3T3Key:
+ *output = media::SVCScalabilityMode::kL3T3Key;
+ return true;
+ }
+
+ NOTREACHED();
+ return false;
+ }
+};
+
+template <>
struct EnumTraits<media::mojom::VideoRotation, ::media::VideoRotation> {
static media::mojom::VideoRotation ToMojom(::media::VideoRotation input) {
switch (input) {
diff --git a/chromium/media/mojo/mojom/renderer_extensions.mojom b/chromium/media/mojo/mojom/renderer_extensions.mojom
index 389626afacd..9672c158818 100644
--- a/chromium/media/mojo/mojom/renderer_extensions.mojom
+++ b/chromium/media/mojo/mojom/renderer_extensions.mojom
@@ -64,6 +64,11 @@ interface MediaFoundationRendererExtension {
// Notifies whether video is enabled.
SetVideoStreamEnabled(bool enabled);
- // Notifies of output composition parameters.
- SetOutputParams(gfx.mojom.Rect rect);
+ // Notifies of output composition parameters. It might fail if
+ // MediaFoundationRenderer runs into error while setting the `rect`
+ // information onto MFMediaEngine. It it fails, the video will be displayed
+ // incorrectly (e.g. a smaller video rendered at the corner of the video
+ // output area.). In case of failure, caller should not use the `rect` for
+ // further operations.
+ SetOutputRect(gfx.mojom.Rect rect) => (bool success);
};
diff --git a/chromium/media/mojo/mojom/speech_recognition_result_mojom_traits.cc b/chromium/media/mojo/mojom/speech_recognition_result_mojom_traits.cc
index e5205a39446..623083391e4 100644
--- a/chromium/media/mojo/mojom/speech_recognition_result_mojom_traits.cc
+++ b/chromium/media/mojo/mojom/speech_recognition_result_mojom_traits.cc
@@ -8,7 +8,7 @@ namespace mojo {
namespace {
-constexpr base::TimeDelta kZeroTime = base::TimeDelta::FromSeconds(0);
+constexpr base::TimeDelta kZeroTime = base::Seconds(0);
} // namespace
diff --git a/chromium/media/mojo/mojom/speech_recognition_result_mojom_traits_unittest.cc b/chromium/media/mojo/mojom/speech_recognition_result_mojom_traits_unittest.cc
index 42a83fbffb8..cf52fb7d81b 100644
--- a/chromium/media/mojo/mojom/speech_recognition_result_mojom_traits_unittest.cc
+++ b/chromium/media/mojo/mojom/speech_recognition_result_mojom_traits_unittest.cc
@@ -14,8 +14,7 @@ namespace media {
namespace {
-base::TimeDelta kZeroTime = base::TimeDelta::FromSeconds(0);
-
+base::TimeDelta kZeroTime = base::Seconds(0);
}
TEST(SpeechRecognitionResultStructTraitsTest, NoTimingInformation) {
@@ -32,8 +31,7 @@ TEST(SpeechRecognitionResultStructTraitsTest, WithTimingInformation) {
media::SpeechRecognitionResult invalid_result("hello world", true);
invalid_result.timing_information = media::TimingInformation();
invalid_result.timing_information->audio_start_time = kZeroTime;
- invalid_result.timing_information->audio_end_time =
- base::TimeDelta::FromSeconds(-1);
+ invalid_result.timing_information->audio_end_time = base::Seconds(-1);
std::vector<uint8_t> data =
media::mojom::SpeechRecognitionResult::Serialize(&invalid_result);
media::SpeechRecognitionResult output;
@@ -43,8 +41,7 @@ TEST(SpeechRecognitionResultStructTraitsTest, WithTimingInformation) {
media::SpeechRecognitionResult valid_result("hello world", true);
valid_result.timing_information = media::TimingInformation();
valid_result.timing_information->audio_start_time = kZeroTime;
- valid_result.timing_information->audio_end_time =
- base::TimeDelta::FromSeconds(1);
+ valid_result.timing_information->audio_end_time = base::Seconds(1);
std::vector<uint8_t> valid_data =
media::mojom::SpeechRecognitionResult::Serialize(&valid_result);
media::SpeechRecognitionResult valid_output;
@@ -58,8 +55,7 @@ TEST(SpeechRecognitionResultStructTraitsTest,
media::SpeechRecognitionResult invalid_result("hello world", false);
invalid_result.timing_information = media::TimingInformation();
invalid_result.timing_information->audio_start_time = kZeroTime;
- invalid_result.timing_information->audio_end_time =
- base::TimeDelta::FromSeconds(1);
+ invalid_result.timing_information->audio_end_time = base::Seconds(1);
std::vector<uint8_t> invalid_data =
media::mojom::SpeechRecognitionResult::Serialize(&invalid_result);
media::SpeechRecognitionResult invalid_output;
@@ -73,16 +69,15 @@ TEST(SpeechRecognitionResultStructTraitsTest, WithInvalidHypothesisParts) {
media::SpeechRecognitionResult invalid_result("hello world", true);
invalid_result.timing_information = media::TimingInformation();
invalid_result.timing_information->audio_start_time = kZeroTime;
- invalid_result.timing_information->audio_end_time =
- base::TimeDelta::FromSeconds(1);
+ invalid_result.timing_information->audio_end_time = base::Seconds(1);
invalid_result.timing_information->hypothesis_parts =
std::vector<media::HypothesisParts>();
auto& hypothesis_parts =
invalid_result.timing_information->hypothesis_parts.value();
hypothesis_parts.emplace_back(std::vector<std::string>({"hello"}),
- base::TimeDelta::FromSeconds(-1));
+ base::Seconds(-1));
hypothesis_parts.emplace_back(std::vector<std::string>({"world"}),
- base::TimeDelta::FromSeconds(1));
+ base::Seconds(1));
std::vector<uint8_t> data =
media::mojom::SpeechRecognitionResult::Serialize(&invalid_result);
media::SpeechRecognitionResult output;
@@ -94,16 +89,15 @@ TEST(SpeechRecognitionResultStructTraitsTest, WithValidHypothesisParts) {
media::SpeechRecognitionResult valid_result("hello world", true);
valid_result.timing_information = media::TimingInformation();
valid_result.timing_information->audio_start_time = kZeroTime;
- valid_result.timing_information->audio_end_time =
- base::TimeDelta::FromSeconds(2);
+ valid_result.timing_information->audio_end_time = base::Seconds(2);
valid_result.timing_information->hypothesis_parts =
std::vector<media::HypothesisParts>();
auto& hypothesis_parts =
valid_result.timing_information->hypothesis_parts.value();
hypothesis_parts.emplace_back(std::vector<std::string>({"hello"}),
- base::TimeDelta::FromSeconds(0));
+ base::Seconds(0));
hypothesis_parts.emplace_back(std::vector<std::string>({"world"}),
- base::TimeDelta::FromSeconds(1));
+ base::Seconds(1));
std::vector<uint8_t> data =
media::mojom::SpeechRecognitionResult::Serialize(&valid_result);
media::SpeechRecognitionResult output;
diff --git a/chromium/media/mojo/mojom/speech_recognition_service.mojom b/chromium/media/mojo/mojom/speech_recognition_service.mojom
index 985168244dc..e510b9765fc 100644
--- a/chromium/media/mojo/mojom/speech_recognition_service.mojom
+++ b/chromium/media/mojo/mojom/speech_recognition_service.mojom
@@ -9,6 +9,7 @@ import "media/mojo/mojom/audio_stream_factory.mojom";
import "media/mojo/mojom/media_types.mojom";
import "mojo/public/mojom/base/file_path.mojom";
import "mojo/public/mojom/base/time.mojom";
+import "sandbox/policy/mojom/sandbox.mojom";
import "services/network/public/mojom/url_loader_factory.mojom";
// Corresponds to the LangIdEvent.ConfidenceInterval defined in
@@ -47,6 +48,7 @@ interface SpeechRecognitionContext {
// The main interface to a speech secognition service process.
// Used by the browser to issue top-level control requests to the service,
// acquired during process launch.
+[ServiceSandbox=sandbox.mojom.Sandbox.kSpeechRecognition]
interface SpeechRecognitionService {
// Bind the context to a new instance of the speech recognition.
BindContext(pending_receiver<SpeechRecognitionContext> context);
diff --git a/chromium/media/mojo/mojom/stable/BUILD.gn b/chromium/media/mojo/mojom/stable/BUILD.gn
new file mode 100644
index 00000000000..67a67542d8e
--- /dev/null
+++ b/chromium/media/mojo/mojom/stable/BUILD.gn
@@ -0,0 +1,23 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//mojo/public/tools/bindings/mojom.gni")
+
+mojom("stable_video_decoder") {
+ sources = [
+ "stable_video_decoder.mojom",
+ "stable_video_decoder_types.mojom",
+ ]
+
+ public_deps = [
+ "//media/mojo/mojom",
+ "//mojo/public/mojom/base",
+ "//ui/gfx/geometry/mojom",
+ ]
+
+ cpp_proxy_target = "//media/mojo/services"
+ export_class_attribute = "MEDIA_MOJO_EXPORT"
+ export_define = "MEDIA_MOJO_IMPLEMENTATION"
+ export_header = "media/mojo/services/media_mojo_export.h"
+}
diff --git a/chromium/media/mojo/mojom/stable/OWNERS b/chromium/media/mojo/mojom/stable/OWNERS
new file mode 100644
index 00000000000..08850f42120
--- /dev/null
+++ b/chromium/media/mojo/mojom/stable/OWNERS
@@ -0,0 +1,2 @@
+per-file *.mojom=set noparent
+per-file *.mojom=file://ipc/SECURITY_OWNERS
diff --git a/chromium/media/mojo/mojom/stable/README b/chromium/media/mojo/mojom/stable/README
new file mode 100644
index 00000000000..77c2a4f5c72
--- /dev/null
+++ b/chromium/media/mojo/mojom/stable/README
@@ -0,0 +1,3 @@
+This directory contains the Mojo API used both by LaCrOS and by out-of-process
+video decoding. This API is a stable version of VideoDecoder and should only be
+used for the mentioned purposes.
diff --git a/chromium/media/mojo/mojom/stable/stable_video_decoder.mojom b/chromium/media/mojo/mojom/stable/stable_video_decoder.mojom
new file mode 100644
index 00000000000..dbd33e276c9
--- /dev/null
+++ b/chromium/media/mojo/mojom/stable/stable_video_decoder.mojom
@@ -0,0 +1,113 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+module media.stable.mojom;
+
+import "media/mojo/mojom/stable/stable_video_decoder_types.mojom";
+import "mojo/public/mojom/base/unguessable_token.mojom";
+
+// This API is a stable version of VideoDecoder. This is used both by LaCrOS and
+// by out-of-process video decoding to allow the GPU process to forward video
+// decoding requests to a video decoder process.
+// In order to avoid depending on unstable definitions or on components which
+// will cause cyclic dependencies, some similar but occasionally simplified
+// version of structures were used rather than directly depending on the
+// structures in other components.
+
+// Based on |media.mojom.MediaLog| but does not depend on
+// |media.mojom.MediaLogRecord|.
+// Next min method ID: 1
+[Stable, Uuid="2e4c1aed-fd62-40e6-8601-e5c4288246c0"]
+interface MediaLog {
+ // Adds a log record to a MediaLog service.
+ AddLogRecord@0(MediaLogRecord event);
+};
+
+// Based on |media.mojom.VideoFrameHandleReleaser| but does not depend on
+// |gpu.mojom.SyncToken|.
+// Next min method ID: 1
+[Stable, Uuid="8afdcf21-99d7-4864-a957-75d2a7e17da6"]
+interface VideoFrameHandleReleaser {
+ // Signals that the VideoFrame identified by |release_token| should be
+ // released.
+ ReleaseVideoFrame@0(mojo_base.mojom.UnguessableToken release_token);
+};
+
+// Based on |media.mojom.VideoDecoderClient| but does not depend on
+// |media.mojom.VideoFrame| or |media.mojom.WaitingReason|.
+// Next min method ID: 2
+[Stable, Uuid="8a6fce77-7fcc-42e1-ac74-443859039696"]
+interface VideoDecoderClient {
+ // Output a decoded frame. Frames are output in presentation order.
+ //
+ // When |can_read_without_stalling| is false, preroll should be disabled. This
+ // is necessary if the decoder cannot guarantee that it can output another
+ // frame, for example if output buffers are limited or configuration changes
+ // require the return of all outstanding frames.
+ //
+ // If |release_token| is provided, the client shall call
+ // VideoFrameHandleReleaser::Release() when it is finished using |frame|.
+ OnVideoFrameDecoded@0(VideoFrame frame,
+ bool can_read_without_stalling,
+ mojo_base.mojom.UnguessableToken? release_token);
+
+ // Called when the remote decoder is waiting because of |reason|, e.g. waiting
+ // for decryption key.
+ OnWaiting@1(WaitingReason reason);
+};
+
+// TODO(b/194429120): Implement when protected content support is integrated.
+[Stable, Uuid="33c7a00e-2970-41b3-8c7b-f1074a539740"]
+interface StableCdmContext {
+};
+
+// Based on |media.mojom.VideoDecoder|.
+// Next min method ID: 5
+[Stable, Uuid="85611470-3e87-43a9-ac75-a11a63e76415"]
+interface StableVideoDecoder {
+ // Returns a list of supported configs as well as the decoder ID for the
+ // decoder which supports them. It is expected that Initialize() will fail
+ // for any config that does not match an entry in this list.
+ //
+ // May be called before Construct().
+ GetSupportedConfigs@0() =>
+ (array<SupportedVideoDecoderConfig> supported_configs,
+ VideoDecoderType decoder_type);
+
+ // Initialize the decoder. This must be called before any method other than
+ // GetSupportedConfigs().
+ Construct@1(
+ pending_associated_remote<VideoDecoderClient> client,
+ pending_remote<MediaLog> media_log,
+ pending_receiver<VideoFrameHandleReleaser> video_frame_handle_releaser,
+ handle<data_pipe_consumer> decoder_buffer_pipe,
+ ColorSpace target_color_space);
+
+ // Configure (or reconfigure) the decoder. This must be called before decoding
+ // any frames, and must not be called while there are pending Initialize(),
+ // Decode(), or Reset() requests.
+ Initialize@2(VideoDecoderConfig config, bool low_delay,
+ pending_remote<StableCdmContext>? cdm_context)
+ => (Status status,
+ bool needs_bitstream_conversion,
+ int32 max_decode_requests,
+ VideoDecoderType decoder_type);
+
+ // Request decoding of exactly one frame or an EOS buffer. This must not be
+ // called while there are pending Initialize(), Reset(), or Decode(EOS)
+ // requests.
+ Decode@3(DecoderBuffer buffer) => (Status status);
+
+ // Reset the decoder. All ongoing Decode() requests must be completed or
+ // aborted before executing the callback. This must not be called while there
+ // is a pending Initialize() request.
+ Reset@4() => ();
+};
+
+// Next min method ID: 1
+[Stable, Uuid="d6047fd9-fffb-4e37-ad9b-383a1c9e1d2d"]
+interface StableVideoDecoderFactory {
+ // Used to create StableVideoDecoder interfaces.
+ CreateStableVideoDecoder@0(pending_receiver<StableVideoDecoder> receiver);
+};
diff --git a/chromium/media/mojo/mojom/stable/stable_video_decoder_types.mojom b/chromium/media/mojo/mojom/stable/stable_video_decoder_types.mojom
new file mode 100644
index 00000000000..3684e5d0fdc
--- /dev/null
+++ b/chromium/media/mojo/mojom/stable/stable_video_decoder_types.mojom
@@ -0,0 +1,516 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+module media.stable.mojom;
+
+import "media/mojo/mojom/media_types.mojom";
+import "mojo/public/mojom/base/time.mojom";
+import "mojo/public/mojom/base/values.mojom";
+import "ui/gfx/geometry/mojom/geometry.mojom";
+import "ui/gfx/mojom/buffer_types.mojom";
+import "ui/gfx/mojom/native_handle_types.mojom";
+
+// Maps to |media.mojom.VideoCodec|.
+[Stable, Extensible]
+enum VideoCodec {
+ [Default] kUnknown = 0,
+ kH264,
+ kVC1,
+ kMPEG2,
+ kMPEG4,
+ kTheora,
+ kVP8,
+ kVP9,
+ kHEVC,
+ kDolbyVision,
+ kAV1,
+ // DO NOT ADD RANDOM VIDEO CODECS!
+ //
+ // The only acceptable time to add a new codec is if there is production code
+ // that uses said codec in the same CL.
+};
+
+// Maps to |media.mojom.VideoCodecProfile|.
+[Stable, Extensible]
+enum VideoCodecProfile {
+ // Keep the values in this enum unique, as they imply format (h.264 vs. VP8,
+ // for example), and keep the values for a particular format grouped
+ // together for clarity.
+ [Default] kVideoCodecProfileUnknown = -1,
+ kH264ProfileMin = 0,
+ kH264ProfileBaseline = kH264ProfileMin,
+ kH264ProfileMain = 1,
+ kH264ProfileExtended = 2,
+ kH264ProfileHigh = 3,
+ kH264ProfileHigh10 = 4,
+ kH264ProfileHigh422 = 5,
+ kH264ProfileHigh444Predictive = 6,
+ kH264ProfileScalableBaseline = 7,
+ kH264ProfileScalableHigh = 8,
+ kH264ProfileStereoHigh = 9,
+ kH264ProfileMultiviewHigh = 10,
+ kH264ProfileMax = kH264ProfileMultiviewHigh,
+ kVP8ProfileMin = 11,
+ kVP8ProfileAny = kVP8ProfileMin,
+ kVP8ProfileMax = kVP8ProfileAny,
+ kVP9ProfileMin = 12,
+ kVP9Profile0 = kVP9ProfileMin,
+ kVP9Profile1 = 13,
+ kVP9Profile2 = 14,
+ kVP9Profile3 = 15,
+ kVP9ProfileMax = kVP9Profile3,
+ kHEVCProfileMin = 16,
+ kHEVCProfileMain = kHEVCProfileMin,
+ kHEVCProfileMain10 = 17,
+ kHEVCProfileMainStillPicture = 18,
+ kHEVCProfileMax = kHEVCProfileMainStillPicture,
+ kDolbyVisionProfile0 = 19,
+ kDolbyVisionProfile4 = 20,
+ kDolbyVisionProfile5 = 21,
+ kDolbyVisionProfile7 = 22,
+ kTheoraProfileMin = 23,
+ kTheoraProfileAny = kTheoraProfileMin,
+ kTheoraProfileMax = kTheoraProfileAny,
+ kAV1ProfileMin = 24,
+ kAV1ProfileMain = kAV1ProfileMin,
+ kAV1ProfileHigh = 25,
+ kAV1ProfilePro = 26,
+ kAV1ProfileMax = kAV1ProfilePro,
+ kDolbyVisionProfile8 = 27,
+ kDolbyVisionProfile9 = 28,
+};
+
+// Based on |media.mojom.SubsampleEntry|.
+// Next min field ID: 2
+[Stable]
+struct SubsampleEntry {
+ uint32 clear_bytes@0;
+ uint32 cypher_bytes@1;
+};
+
+// Maps to |media.mojom.EncryptionScheme|.
+[Stable, Extensible]
+enum EncryptionScheme {
+ [Default] kUnencrypted = 0,
+ kCenc, // 'cenc' subsample encryption using AES-CTR mode.
+ kCbcs, // 'cbcs' pattern encryption using AES-CBC mode.
+};
+
+// Based on |media.mojom.DecryptConfig| but does not depend on
+// |media.mojom.EncryptionScheme| and |media.mojom.SubsampleEntry|.
+// Next min field ID: 5
+[Stable]
+struct DecryptConfig {
+ EncryptionScheme encryption_scheme@0;
+ string key_id@1;
+ string iv@2;
+ array<SubsampleEntry> subsamples@3;
+ media.mojom.EncryptionPattern? encryption_pattern@4;
+};
+
+// Based on |gfx.mojom.ColorVolumeMetadata|.
+// Next min field ID: 6
+[Stable]
+struct ColorVolumeMetadata {
+ gfx.mojom.PointF primary_r@0;
+ gfx.mojom.PointF primary_g@1;
+ gfx.mojom.PointF primary_b@2;
+ gfx.mojom.PointF white_point@3;
+ float luminance_max@4;
+ float luminance_min@5;
+};
+
+// Based on |gfx.mojom.HDRMetadata| but does not depend on
+// |gfx.mojom.ColorVolumeMetadata|.
+// Next min field ID: 3
+[Stable]
+struct HDRMetadata {
+ ColorVolumeMetadata color_volume_metadata@0;
+ uint32 max_content_light_level@1;
+ uint32 max_frame_average_light_level@2;
+};
+
+// Based on |media.mojom.VideoDecoderConfig| but does not depend on
+// |media.mojom.VideoCodec|, |media.mojom.VideoCodecProfile|,
+// |media.mojom.VideoTransformation|, |media.mojom.EncryptionScheme|,
+// |media.mojom.VideoColorSpace| and |gfx.mojom.HDRMetadata|.
+// Next min field ID: 11
+[Stable]
+struct VideoDecoderConfig {
+ VideoCodec codec@0;
+ VideoCodecProfile profile@1;
+ uint32 level@2;
+ bool has_alpha@3;
+ gfx.mojom.Size coded_size@4;
+ gfx.mojom.Rect visible_rect@5;
+ gfx.mojom.Size natural_size@6;
+ array<uint8> extra_data@7;
+ EncryptionScheme encryption_scheme@8;
+ ColorSpace color_space_info@9;
+ HDRMetadata? hdr_metadata@10;
+};
+
+// Based on |media.mojom.SupportedVideoDecoderConfig| but does not
+// depend on |media.mojom.VideoCodecProfile|
+// Next min field ID: 6
+[Stable]
+struct SupportedVideoDecoderConfig {
+ // Range of VideoCodecProfiles to match, inclusive.
+ VideoCodecProfile profile_min@0;
+ VideoCodecProfile profile_max@1;
+
+ // Range of coded sizes to match, inclusive in each dimension.
+ gfx.mojom.Size coded_size_min@2;
+ gfx.mojom.Size coded_size_max@3;
+
+ // Match configs that have encryption configured.
+ bool allow_encrypted@4;
+
+ // Do not match configs that do not have encryption configured.
+ bool require_encrypted@5;
+};
+
+[Stable, Extensible]
+enum StatusCode {
+ kOk,
+ kAborted,
+ [Default] kError,
+};
+
+// Based on |media.mojom.StatusData|.
+// Next min field ID: 6
+[Stable]
+struct StatusData {
+ string group@0;
+ StatusCode code@1;
+ string message@2;
+ array<mojo_base.mojom.Value> frames@3;
+ array<StatusData> causes@4;
+ mojo_base.mojom.Value data@5;
+};
+
+// Based on |media.mojom.Status|.
+// Next min field ID: 1
+[Stable]
+struct Status {
+ StatusData? internal@0;
+};
+
+// Based on |media.mojom.DecoderBuffer| but does not depend on
+// |media.mojom.DecryptConfig|.
+// Next min field ID: 9
+[Stable]
+struct DecoderBuffer {
+ mojo_base.mojom.TimeDelta timestamp@0;
+ mojo_base.mojom.TimeDelta duration@1;
+
+ bool is_end_of_stream@2;
+
+ uint32 data_size@3;
+
+ bool is_key_frame@4;
+
+ array<uint8> side_data@5;
+
+ DecryptConfig? decrypt_config@6;
+
+ mojo_base.mojom.TimeDelta front_discard@7;
+ mojo_base.mojom.TimeDelta back_discard@8;
+};
+
+// Maps to |media.mojom.VideoDecoderType|.
+[Stable, Extensible]
+enum VideoDecoderType {
+ [Default] kUnknown,
+ kVaapi,
+ kVda,
+ kV4L2,
+ kTesting,
+};
+
+// Maps to |gfx.mojom.ColorSpacePrimaryID|.
+[Stable, Extensible]
+enum ColorSpacePrimaryID {
+ [Default] kInvalid,
+ kBT709,
+ kBT470M,
+ kBT470BG,
+ kSMPTE170M,
+ kSMPTE240M,
+ kFilm,
+ kBT2020,
+ kSMPTEST428_1,
+ kSMPTEST431_2,
+ kSMPTEST432_1,
+ kXYZ_D50,
+ kAdobeRGB,
+ kAppleGenericRGB,
+ kWideGamutColorSpin,
+ kCustom,
+};
+
+// Maps to |gfx.mojom.ColorSpaceTransferID|.
+[Stable, Extensible]
+enum ColorSpaceTransferID {
+ [Default] kInvalid,
+ kBT709,
+ kBT709Apple,
+ kGamma18,
+ kGamma22,
+ kGamma24,
+ kGamma28,
+ kSMPTE170M,
+ kSMPTE240M,
+ kLinear,
+ kLog,
+ kLogSqrt,
+ kIEC61966_2_4,
+ kBT1361_ECG,
+ kIEC61966_2_1,
+ kBT2020_10,
+ kBT2020_12,
+ kSMPTEST2084,
+ kSMPTEST428_1,
+ kARIB_STD_B67,
+ kIEC61966_2_1_HDR,
+ kLinearHDR,
+ kCustom,
+ kCustomHDR,
+ kPiecewiseHDR,
+};
+
+// Maps to |gfx.mojom.ColorSpaceMatrixID|.
+[Stable, Extensible]
+enum ColorSpaceMatrixID {
+ [Default] kInvalid,
+ kRGB,
+ kBT709,
+ kFCC,
+ kBT470BG,
+ kSMPTE170M,
+ kSMPTE240M,
+ kYCOCG,
+ kBT2020_NCL,
+ kBT2020_CL,
+ kYDZDX,
+ kGBR,
+};
+
+// Maps to |gfx.mojom.ColorSpaceRangeID|.
+[Stable, Extensible]
+enum ColorSpaceRangeID {
+ [Default] kInvalid,
+ kLimited,
+ kFull,
+ kDerived,
+};
+
+// Based on |gfx.mojom.ColorSpace| but does not depend on
+// |gfx.mojom.ColorSpacePrimaryID|, |gfx.mojom.ColorSpaceTransferID|,
+// |gfx.mojom.ColorSpaceMatrixID| and |gfx.mojom.ColorSpaceRangeID|.
+// Next min field ID: 6
+[Stable]
+struct ColorSpace {
+ ColorSpacePrimaryID primaries@0;
+ ColorSpaceTransferID transfer@1;
+ ColorSpaceMatrixID matrix@2;
+ ColorSpaceRangeID range@3;
+ array<float, 9> custom_primary_matrix@4;
+ array<float, 7> transfer_params@5;
+};
+
+// Maps to |media.mojom.VideoPixelFormat|.
+[Stable, Extensible]
+enum VideoPixelFormat {
+ [Default] kPixelFormatUnknown = 0, // Unknown or unspecified format value.
+ kPixelFormatI420 =
+ 1, // 12bpp YUV planar 1x1 Y, 2x2 UV samples, a.k.a. YU12.
+
+ // Note: Chrome does not actually support YVU compositing, so you probably
+ // don't actually want to use this. See http://crbug.com/784627.
+ kPixelFormatYV12 = 2, // 12bpp YVU planar 1x1 Y, 2x2 VU samples.
+
+ kPixelFormatI422 = 3, // 16bpp YUV planar 1x1 Y, 2x1 UV samples.
+ kPixelFormatI420A = 4, // 20bpp YUVA planar 1x1 Y, 2x2 UV, 1x1 A samples.
+ kPixelFormatI444 = 5, // 24bpp YUV planar, no subsampling.
+ kPixelFormatNV12 =
+ 6, // 12bpp with Y plane followed by a 2x2 interleaved UV plane.
+ kPixelFormatNV21 =
+ 7, // 12bpp with Y plane followed by a 2x2 interleaved VU plane.
+ kPixelFormatUYVY =
+ 8, // 16bpp interleaved 2x1 U, 1x1 Y, 2x1 V, 1x1 Y samples.
+ kPixelFormatYUY2 =
+ 9, // 16bpp interleaved 1x1 Y, 2x1 U, 1x1 Y, 2x1 V samples.
+ kPixelFormatARGB = 10, // 32bpp BGRA (byte-order), 1 plane.
+ kPixelFormatXRGB = 11, // 24bpp BGRX (byte-order), 1 plane.
+ kPixelFormatRGB24 = 12, // 24bpp BGR (byte-order), 1 plane.
+
+ kPixelFormatMJPEG = 14, // MJPEG compressed.
+
+ // The P* in the formats below designates the number of bits per pixel
+ // component. I.e. P9 is 9-bits per pixel component, P10 is 10-bits per pixel
+ // component, etc.
+ kPixelFormatYUV420P9 = 16,
+ kPixelFormatYUV420P10 = 17,
+ kPixelFormatYUV422P9 = 18,
+ kPixelFormatYUV422P10 = 19,
+ kPixelFormatYUV444P9 = 20,
+ kPixelFormatYUV444P10 = 21,
+ kPixelFormatYUV420P12 = 22,
+ kPixelFormatYUV422P12 = 23,
+ kPixelFormatYUV444P12 = 24,
+
+ kPixelFormatY16 = 26, // single 16bpp plane.
+
+ kPixelFormatABGR = 27, // 32bpp RGBA (byte-order), 1 plane.
+ kPixelFormatXBGR = 28, // 24bpp RGBX (byte-order), 1 plane.
+
+ kPixelFormatP016LE = 29, // 24bpp NV12, 16 bits per channel
+
+ kPixelFormatXR30 =
+ 30, // 32bpp BGRX, 10 bits per channel, 2 bits ignored, 1 plane
+ kPixelFormatXB30 =
+ 31, // 32bpp RGBX, 10 bits per channel, 2 bits ignored, 1 plane
+
+ kPixelFormatBGRA = 32, // 32bpp ARGB (byte-order), 1 plane.
+
+ kPixelFormatRGBAF16 = 33, // Half float RGBA, 1 plane.
+};
+
+// Based on |media.mojom.EosVideoFrameData|.
+[Stable]
+struct EosVideoFrameData {
+};
+
+// Based on |gfx.mojom.NativePixmapHandle|.
+// Next min field ID: 2
+[EnableIf=supports_native_pixmap, Stable]
+struct NativePixmapHandle {
+ array<gfx.mojom.NativePixmapPlane> planes@0;
+ uint64 modifier@1;
+};
+
+// Based on |gfx.mojom.GpuMemoryBufferHandle| but does not depend on
+// |gfx.mojom.GpuMemoryBufferPlatformHandle|.
+// Next min field ID: 2
+[Stable]
+struct NativeGpuMemoryBufferHandle {
+ gfx.mojom.GpuMemoryBufferId id@0;
+
+ [EnableIf=supports_native_pixmap]
+ NativePixmapHandle? platform_handle@1;
+};
+
+// Based on |media.mojom.GpuMemoryBufferVideoFrameData| but does not depend
+// on |gfx.mojom.GpuMemoryBufferHandle| or |gpu.mojom.MailboxHolder|.
+// Next min field ID: 1
+[Stable]
+struct GpuMemoryBufferVideoFrameData {
+ NativeGpuMemoryBufferHandle gpu_memory_buffer_handle@0;
+};
+
+// Based on |media.mojom.VideoFrameMetadata| but does not depend on
+// |media.mojom.VideoTransformation| or |media.mojom.CopyMode|.
+// Next min field ID: 6
+[Stable]
+struct VideoFrameMetadata {
+ bool allow_overlay@0;
+
+ bool end_of_stream@1;
+
+ bool read_lock_fences_enabled@2;
+
+ bool protected_video@3;
+
+ bool hw_protected@4;
+
+ bool power_efficient@5;
+};
+
+// Based on |media.mojom.VideoFrameData| but does not depend on
+// |media.mojom.EosVideoFrameData|, |media.mojom.SharedBufferVideoFrameData|,
+// |media.mojom.GpuMemoryBufferVideoFrameData| or
+// |media.mojom.MailboxVideoFrameData|.
+[Stable]
+union VideoFrameData {
+ EosVideoFrameData eos_data;
+ GpuMemoryBufferVideoFrameData gpu_memory_buffer_data;
+};
+
+// Based on |media.mojom.VideoFrame| but does not depend on
+// |media.mojom.VideoPixelFormat|, |media.mojom.VideoFrameData|,
+// |media.mojom.VideoFrameMetadata|, |gfx.mojom.ColorSpace| or
+// or |gfx.mojom.HDRMetadata|.
+// Next min field ID: 9
+[Stable]
+struct VideoFrame {
+ // Format of the frame.
+ VideoPixelFormat format@0;
+
+ // Width and height of the video frame, in pixels.
+ gfx.mojom.Size coded_size@1;
+
+ // Visible size of the frame.
+ gfx.mojom.Rect visible_rect@2;
+
+ // Natural size of the frame.
+ gfx.mojom.Size natural_size@3;
+
+ // Timestamp in microseconds of the associated frame.
+ mojo_base.mojom.TimeDelta timestamp@4;
+
+ // Contents of the video frame (or EOS marker).
+ VideoFrameData data@5;
+
+ // Extra properties associated with the VideoFrame.
+ VideoFrameMetadata metadata@6;
+
+ ColorSpace color_space@7;
+ HDRMetadata? hdr_metadata@8;
+};
+
+// Maps to |media.mojom.WaitingReason|.
+[Stable, Extensible]
+enum WaitingReason {
+ // The playback cannot start because "Media Data May Contain Encrypted Blocks"
+ // and no CDM is available. The playback will start after a CDM is set. See
+ // https://www.w3.org/TR/encrypted-media/#media-may-contain-encrypted-blocks
+ [Default] kNoCdm,
+
+ // The playback cannot proceed because some decryption key is not available.
+ // This could happen when the license exchange is delayed or failed. The
+ // playback will resume after the decryption key becomes available.
+ // See https://www.w3.org/TR/encrypted-media/#encrypted-block-encountered
+ kNoDecryptionKey,
+
+ // The playback cannot proceed because the decoder has lost its state, e.g.
+ // information about reference frames. Usually this only happens to hardware
+ // decoders. To recover from this state, reset the decoder and start decoding
+ // from a key frame, which can typically be accomplished by a pipeline seek.
+ kDecoderStateLost,
+};
+
+// Based on |media.mojom.MediaLogRecord|.
+// Next min field ID: 4
+[Stable]
+struct MediaLogRecord {
+ [Stable, Extensible]
+ enum Type {
+ // See media/base/media_log_message_levels.h for info.
+ [Default] kMessage,
+
+ // See media/base/media_log_properties.h for info.
+ kMediaPropertyChange,
+
+ // See media/base/media_log_events.h for info.
+ kMediaEventTriggered,
+
+ kMediaStatus,
+ };
+
+ int32 id@0;
+ Type type@1;
+ mojo_base.mojom.DictionaryValue params@2;
+ mojo_base.mojom.TimeTicks time@3;
+};
diff --git a/chromium/media/mojo/mojom/status_mojom_traits.cc b/chromium/media/mojo/mojom/status_mojom_traits.cc
index 639e8462643..7ac13fbceaa 100644
--- a/chromium/media/mojo/mojom/status_mojom_traits.cc
+++ b/chromium/media/mojo/mojom/status_mojom_traits.cc
@@ -11,37 +11,30 @@
namespace mojo {
// static
-bool StructTraits<media::mojom::StatusDataView, media::Status>::Read(
- media::mojom::StatusDataView data,
- media::Status* output) {
- DCHECK(!output->data_);
-
- media::StatusCode code;
- std::string message;
- if (!data.ReadCode(&code))
- return false;
-
- if (media::StatusCode::kOk == code)
- return true;
+bool StructTraits<
+ media::mojom::StatusDataDataView,
+ media::internal::StatusData>::Read(media::mojom::StatusDataDataView data,
+ media::internal::StatusData* output) {
+ output->code = data.code();
- absl::optional<std::string> optional_message;
- if (!data.ReadMessage(&optional_message))
+ if (!data.ReadGroup(&output->group))
return false;
- message = std::move(optional_message).value_or(std::string());
- output->data_ =
- std::make_unique<media::Status::StatusInternal>(code, std::move(message));
+ if (!data.ReadMessage(&output->message))
+ return false;
- if (!data.ReadFrames(&output->data_->frames))
+ if (!data.ReadFrames(&output->frames))
return false;
- if (!data.ReadCauses(&output->data_->causes))
+ if (!data.ReadData(&output->data))
return false;
- absl::optional<base::Value> optional_data;
- if (!data.ReadData(&optional_data))
+ std::vector<media::internal::StatusData> causes;
+ if (!data.ReadCauses(&causes))
return false;
- output->data_->data = std::move(optional_data).value_or(base::Value());
+
+ for (const auto& cause : causes)
+ output->causes.push_back(cause);
return true;
}
diff --git a/chromium/media/mojo/mojom/status_mojom_traits.h b/chromium/media/mojo/mojom/status_mojom_traits.h
index 8896ea76130..5a742d58d76 100644
--- a/chromium/media/mojo/mojom/status_mojom_traits.h
+++ b/chromium/media/mojo/mojom/status_mojom_traits.h
@@ -15,41 +15,55 @@
namespace mojo {
template <>
-struct StructTraits<media::mojom::StatusDataView, media::Status> {
- static media::StatusCode code(const media::Status& input) {
- return input.code();
+struct StructTraits<media::mojom::StatusDataDataView,
+ media::internal::StatusData> {
+ static media::StatusCodeType code(const media::internal::StatusData& input) {
+ return input.code;
}
- static absl::optional<std::string> message(const media::Status& input) {
- if (input.is_ok())
- return absl::nullopt;
- DCHECK(input.data_);
- return input.message();
+ static media::StatusGroupType group(
+ const media::internal::StatusData& input) {
+ return input.group;
}
- static base::span<base::Value> frames(const media::Status& input) {
- if (input.is_ok())
- return {};
- DCHECK(input.data_);
- return input.data_->frames;
+ static std::string message(const media::internal::StatusData& input) {
+ return input.message;
}
- static base::span<media::Status> causes(const media::Status& input) {
- if (input.is_ok())
- return {};
- DCHECK(input.data_);
- return input.data_->causes;
+ static base::span<base::Value> frames(media::internal::StatusData& input) {
+ return input.frames;
}
- static absl::optional<base::Value> data(const media::Status& input) {
- if (!input.is_ok()) {
- DCHECK(input.data_);
- return input.data_->data.Clone();
- }
+ static base::span<media::internal::StatusData> causes(
+ media::internal::StatusData& input) {
+ return input.causes;
+ }
+
+ static base::Value data(const media::internal::StatusData& input) {
+ return input.data.Clone();
+ }
+
+ static bool Read(media::mojom::StatusDataDataView data,
+ media::internal::StatusData* output);
+};
+
+template <typename StatusEnum, typename DataView>
+struct StructTraits<DataView, media::TypedStatus<StatusEnum>> {
+ static absl::optional<media::internal::StatusData> internal(
+ const media::TypedStatus<StatusEnum>& input) {
+ if (input.data_)
+ return *input.data_;
return absl::nullopt;
}
- static bool Read(media::mojom::StatusDataView data, media::Status* output);
+ static bool Read(DataView data, media::TypedStatus<StatusEnum>* output) {
+ absl::optional<media::internal::StatusData> internal;
+ if (!data.ReadInternal(&internal))
+ return false;
+ if (internal)
+ output->data_ = internal->copy();
+ return true;
+ }
};
} // namespace mojo
diff --git a/chromium/media/mojo/mojom/video_decoder.mojom b/chromium/media/mojo/mojom/video_decoder.mojom
index aeb46ff6cb7..3cfe79687f3 100644
--- a/chromium/media/mojo/mojom/video_decoder.mojom
+++ b/chromium/media/mojo/mojom/video_decoder.mojom
@@ -95,7 +95,7 @@ interface VideoDecoder {
// media::VideoDecoder::Initialize() is renamed to Configure().
Construct(
pending_associated_remote<VideoDecoderClient> client,
- pending_associated_remote<MediaLog> media_log,
+ pending_remote<MediaLog> media_log,
pending_receiver<VideoFrameHandleReleaser> video_frame_handle_releaser,
handle<data_pipe_consumer> decoder_buffer_pipe,
CommandBufferId? command_buffer_id,
diff --git a/chromium/media/mojo/mojom/video_decoder_config_mojom_traits_unittest.cc b/chromium/media/mojo/mojom/video_decoder_config_mojom_traits_unittest.cc
index e62e9511548..666d790984e 100644
--- a/chromium/media/mojo/mojom/video_decoder_config_mojom_traits_unittest.cc
+++ b/chromium/media/mojo/mojom/video_decoder_config_mojom_traits_unittest.cc
@@ -25,10 +25,11 @@ TEST(VideoDecoderConfigStructTraitsTest, ConvertVideoDecoderConfig_Normal) {
const uint8_t kExtraData[] = "config extra data";
const std::vector<uint8_t> kExtraDataVector(
&kExtraData[0], &kExtraData[0] + base::size(kExtraData));
- VideoDecoderConfig input(
- kCodecVP8, VP8PROFILE_ANY, VideoDecoderConfig::AlphaMode::kIsOpaque,
- VideoColorSpace(), kNoTransformation, kCodedSize, kVisibleRect,
- kNaturalSize, kExtraDataVector, EncryptionScheme::kUnencrypted);
+ VideoDecoderConfig input(VideoCodec::kVP8, VP8PROFILE_ANY,
+ VideoDecoderConfig::AlphaMode::kIsOpaque,
+ VideoColorSpace(), kNoTransformation, kCodedSize,
+ kVisibleRect, kNaturalSize, kExtraDataVector,
+ EncryptionScheme::kUnencrypted);
std::vector<uint8_t> data =
media::mojom::VideoDecoderConfig::Serialize(&input);
VideoDecoderConfig output;
@@ -39,10 +40,11 @@ TEST(VideoDecoderConfigStructTraitsTest, ConvertVideoDecoderConfig_Normal) {
TEST(VideoDecoderConfigStructTraitsTest,
ConvertVideoDecoderConfig_EmptyExtraData) {
- VideoDecoderConfig input(
- kCodecVP8, VP8PROFILE_ANY, VideoDecoderConfig::AlphaMode::kIsOpaque,
- VideoColorSpace(), kNoTransformation, kCodedSize, kVisibleRect,
- kNaturalSize, EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ VideoDecoderConfig input(VideoCodec::kVP8, VP8PROFILE_ANY,
+ VideoDecoderConfig::AlphaMode::kIsOpaque,
+ VideoColorSpace(), kNoTransformation, kCodedSize,
+ kVisibleRect, kNaturalSize, EmptyExtraData(),
+ EncryptionScheme::kUnencrypted);
std::vector<uint8_t> data =
media::mojom::VideoDecoderConfig::Serialize(&input);
VideoDecoderConfig output;
@@ -52,10 +54,11 @@ TEST(VideoDecoderConfigStructTraitsTest,
}
TEST(VideoDecoderConfigStructTraitsTest, ConvertVideoDecoderConfig_Encrypted) {
- VideoDecoderConfig input(
- kCodecVP8, VP8PROFILE_ANY, VideoDecoderConfig::AlphaMode::kIsOpaque,
- VideoColorSpace(), kNoTransformation, kCodedSize, kVisibleRect,
- kNaturalSize, EmptyExtraData(), EncryptionScheme::kCenc);
+ VideoDecoderConfig input(VideoCodec::kVP8, VP8PROFILE_ANY,
+ VideoDecoderConfig::AlphaMode::kIsOpaque,
+ VideoColorSpace(), kNoTransformation, kCodedSize,
+ kVisibleRect, kNaturalSize, EmptyExtraData(),
+ EncryptionScheme::kCenc);
std::vector<uint8_t> data =
media::mojom::VideoDecoderConfig::Serialize(&input);
VideoDecoderConfig output;
@@ -67,7 +70,8 @@ TEST(VideoDecoderConfigStructTraitsTest, ConvertVideoDecoderConfig_Encrypted) {
TEST(VideoDecoderConfigStructTraitsTest,
ConvertVideoDecoderConfig_ColorSpaceInfo) {
VideoDecoderConfig input(
- kCodecVP8, VP8PROFILE_ANY, VideoDecoderConfig::AlphaMode::kIsOpaque,
+ VideoCodec::kVP8, VP8PROFILE_ANY,
+ VideoDecoderConfig::AlphaMode::kIsOpaque,
VideoColorSpace(VideoColorSpace::PrimaryID::BT2020,
VideoColorSpace::TransferID::SMPTEST2084,
VideoColorSpace::MatrixID::BT2020_CL,
@@ -84,10 +88,11 @@ TEST(VideoDecoderConfigStructTraitsTest,
TEST(VideoDecoderConfigStructTraitsTest,
ConvertVideoDecoderConfig_HDRMetadata) {
- VideoDecoderConfig input(
- kCodecVP8, VP8PROFILE_ANY, VideoDecoderConfig::AlphaMode::kIsOpaque,
- VideoColorSpace(), kNoTransformation, kCodedSize, kVisibleRect,
- kNaturalSize, EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ VideoDecoderConfig input(VideoCodec::kVP8, VP8PROFILE_ANY,
+ VideoDecoderConfig::AlphaMode::kIsOpaque,
+ VideoColorSpace(), kNoTransformation, kCodedSize,
+ kVisibleRect, kNaturalSize, EmptyExtraData(),
+ EncryptionScheme::kUnencrypted);
gfx::HDRMetadata hdr_metadata;
hdr_metadata.max_frame_average_light_level = 123;
hdr_metadata.max_content_light_level = 456;
@@ -126,10 +131,11 @@ TEST(VideoDecoderConfigStructTraitsTest,
// Next try an non-empty invalid config. Natural size must not be zero.
const gfx::Size kInvalidNaturalSize(0, 0);
- input.Initialize(
- kCodecVP8, VP8PROFILE_ANY, VideoDecoderConfig::AlphaMode::kIsOpaque,
- VideoColorSpace(), kNoTransformation, kCodedSize, kVisibleRect,
- kInvalidNaturalSize, EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ input.Initialize(VideoCodec::kVP8, VP8PROFILE_ANY,
+ VideoDecoderConfig::AlphaMode::kIsOpaque, VideoColorSpace(),
+ kNoTransformation, kCodedSize, kVisibleRect,
+ kInvalidNaturalSize, EmptyExtraData(),
+ EncryptionScheme::kUnencrypted);
EXPECT_FALSE(input.IsValidConfig());
// Deserialize should again fail due to invalid config.
diff --git a/chromium/media/mojo/mojom/video_encode_accelerator.mojom b/chromium/media/mojo/mojom/video_encode_accelerator.mojom
index a9f5c400966..853d3137264 100644
--- a/chromium/media/mojo/mojom/video_encode_accelerator.mojom
+++ b/chromium/media/mojo/mojom/video_encode_accelerator.mojom
@@ -40,6 +40,7 @@ struct VideoEncodeAcceleratorSupportedProfile {
gfx.mojom.Size max_resolution;
uint32 max_framerate_numerator;
uint32 max_framerate_denominator;
+ array<SVCScalabilityMode> scalability_modes;
};
// A renderer process calls this interface's functions. GPU process implements
diff --git a/chromium/media/mojo/mojom/video_encode_accelerator_mojom_traits.cc b/chromium/media/mojo/mojom/video_encode_accelerator_mojom_traits.cc
index e1bdbd7eb31..2b41f302949 100644
--- a/chromium/media/mojo/mojom/video_encode_accelerator_mojom_traits.cc
+++ b/chromium/media/mojo/mojom/video_encode_accelerator_mojom_traits.cc
@@ -25,6 +25,12 @@ bool StructTraits<media::mojom::VideoEncodeAcceleratorSupportedProfileDataView,
out->max_framerate_numerator = data.max_framerate_numerator();
out->max_framerate_denominator = data.max_framerate_denominator();
+
+ std::vector<media::SVCScalabilityMode> scalability_modes;
+ if (!data.ReadScalabilityModes(&scalability_modes))
+ return false;
+ out->scalability_modes = std::move(scalability_modes);
+
return true;
}
diff --git a/chromium/media/mojo/mojom/video_encode_accelerator_mojom_traits.h b/chromium/media/mojo/mojom/video_encode_accelerator_mojom_traits.h
index e5cadb26b2b..a49707c3183 100644
--- a/chromium/media/mojo/mojom/video_encode_accelerator_mojom_traits.h
+++ b/chromium/media/mojo/mojom/video_encode_accelerator_mojom_traits.h
@@ -43,6 +43,11 @@ struct StructTraits<
return profile.max_framerate_denominator;
}
+ static const std::vector<media::SVCScalabilityMode>& scalability_modes(
+ const media::VideoEncodeAccelerator::SupportedProfile& profile) {
+ return profile.scalability_modes;
+ }
+
static bool Read(
media::mojom::VideoEncodeAcceleratorSupportedProfileDataView data,
media::VideoEncodeAccelerator::SupportedProfile* out);
diff --git a/chromium/media/mojo/mojom/video_encode_accelerator_mojom_traits_unittest.cc b/chromium/media/mojo/mojom/video_encode_accelerator_mojom_traits_unittest.cc
index 0322bb6390d..efa11e9956c 100644
--- a/chromium/media/mojo/mojom/video_encode_accelerator_mojom_traits_unittest.cc
+++ b/chromium/media/mojo/mojom/video_encode_accelerator_mojom_traits_unittest.cc
@@ -12,6 +12,22 @@
#include "testing/gtest/include/gtest/gtest.h"
namespace media {
+TEST(VideoEncodeAcceleratorSupportedProfile, RoundTrip) {
+ ::media::VideoEncodeAccelerator::SupportedProfile input;
+ input.profile = VP9PROFILE_PROFILE0;
+ input.min_resolution = gfx::Size(64, 64);
+ input.max_resolution = gfx::Size(4096, 4096);
+ input.max_framerate_numerator = 30;
+ input.max_framerate_denominator = 1;
+ input.scalability_modes.push_back(::media::SVCScalabilityMode::kL1T3);
+ input.scalability_modes.push_back(::media::SVCScalabilityMode::kL3T3Key);
+
+ ::media::VideoEncodeAccelerator::SupportedProfile output;
+ ASSERT_TRUE(mojo::test::SerializeAndDeserialize<
+ mojom::VideoEncodeAcceleratorSupportedProfile>(input, output));
+ EXPECT_EQ(input, output);
+}
+
TEST(VideoEncoderInfoStructTraitTest, RoundTrip) {
::media::VideoEncoderInfo input;
input.implementation_name = "FakeVideoEncodeAccelerator";
@@ -104,7 +120,7 @@ TEST(BitstreamBufferMetadataTraitTest, RoundTrip) {
::media::BitstreamBufferMetadata input_metadata;
input_metadata.payload_size_bytes = 1234;
input_metadata.key_frame = true;
- input_metadata.timestamp = base::TimeDelta::FromMilliseconds(123456);
+ input_metadata.timestamp = base::Milliseconds(123456);
::media::BitstreamBufferMetadata output_metadata;
ASSERT_TRUE(
mojo::test::SerializeAndDeserialize<mojom::BitstreamBufferMetadata>(
diff --git a/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits_unittest.cc b/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits_unittest.cc
index f84a39befa1..cbd4659ac70 100644
--- a/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits_unittest.cc
+++ b/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits_unittest.cc
@@ -136,17 +136,17 @@ TEST_F(VideoFrameMetadataStructTraitsTest, ValidMetadata) {
// base::TimeTicks
base::TimeTicks now = base::TimeTicks::Now();
- metadata_in.receive_time = now + base::TimeDelta::FromMilliseconds(10);
- metadata_in.capture_begin_time = now + base::TimeDelta::FromMilliseconds(20);
- metadata_in.capture_end_time = now + base::TimeDelta::FromMilliseconds(30);
- metadata_in.decode_begin_time = now + base::TimeDelta::FromMilliseconds(40);
- metadata_in.decode_end_time = now + base::TimeDelta::FromMilliseconds(50);
- metadata_in.reference_time = now + base::TimeDelta::FromMilliseconds(60);
+ metadata_in.receive_time = now + base::Milliseconds(10);
+ metadata_in.capture_begin_time = now + base::Milliseconds(20);
+ metadata_in.capture_end_time = now + base::Milliseconds(30);
+ metadata_in.decode_begin_time = now + base::Milliseconds(40);
+ metadata_in.decode_end_time = now + base::Milliseconds(50);
+ metadata_in.reference_time = now + base::Milliseconds(60);
// base::TimeDeltas
- metadata_in.processing_time = base::TimeDelta::FromMilliseconds(500);
- metadata_in.frame_duration = base::TimeDelta::FromMilliseconds(16);
- metadata_in.wallclock_frame_duration = base::TimeDelta::FromMilliseconds(17);
+ metadata_in.processing_time = base::Milliseconds(500);
+ metadata_in.frame_duration = base::Milliseconds(16);
+ metadata_in.wallclock_frame_duration = base::Milliseconds(17);
VideoFrameMetadata metadata_out;
diff --git a/chromium/media/mojo/mojom/video_frame_mojom_traits_unittest.cc b/chromium/media/mojo/mojom/video_frame_mojom_traits_unittest.cc
index 32197b763ad..3d2bb8ffee4 100644
--- a/chromium/media/mojo/mojom/video_frame_mojom_traits_unittest.cc
+++ b/chromium/media/mojo/mojom/video_frame_mojom_traits_unittest.cc
@@ -80,7 +80,7 @@ TEST_F(VideoFrameStructTraitsTest, MojoSharedBufferVideoFrame) {
for (auto format : formats) {
scoped_refptr<VideoFrame> frame =
MojoSharedBufferVideoFrame::CreateDefaultForTesting(
- format, gfx::Size(100, 100), base::TimeDelta::FromSeconds(100));
+ format, gfx::Size(100, 100), base::Seconds(100));
frame->metadata().frame_rate = 42.0;
ASSERT_TRUE(RoundTrip(&frame));
@@ -88,7 +88,7 @@ TEST_F(VideoFrameStructTraitsTest, MojoSharedBufferVideoFrame) {
EXPECT_FALSE(frame->metadata().end_of_stream);
EXPECT_EQ(*frame->metadata().frame_rate, 42.0);
EXPECT_EQ(frame->coded_size(), gfx::Size(100, 100));
- EXPECT_EQ(frame->timestamp(), base::TimeDelta::FromSeconds(100));
+ EXPECT_EQ(frame->timestamp(), base::Seconds(100));
ASSERT_EQ(frame->storage_type(), VideoFrame::STORAGE_MOJO_SHARED_BUFFER);
MojoSharedBufferVideoFrame* mojo_shared_buffer_frame =
@@ -104,7 +104,7 @@ TEST_F(VideoFrameStructTraitsTest, MailboxVideoFrame) {
scoped_refptr<VideoFrame> frame = VideoFrame::WrapNativeTextures(
PIXEL_FORMAT_ARGB, mailbox_holder, VideoFrame::ReleaseMailboxCB(),
gfx::Size(100, 100), gfx::Rect(10, 10, 80, 80), gfx::Size(200, 100),
- base::TimeDelta::FromSeconds(100));
+ base::Seconds(100));
ASSERT_TRUE(RoundTrip(&frame));
ASSERT_TRUE(frame);
@@ -113,7 +113,7 @@ TEST_F(VideoFrameStructTraitsTest, MailboxVideoFrame) {
EXPECT_EQ(frame->coded_size(), gfx::Size(100, 100));
EXPECT_EQ(frame->visible_rect(), gfx::Rect(10, 10, 80, 80));
EXPECT_EQ(frame->natural_size(), gfx::Size(200, 100));
- EXPECT_EQ(frame->timestamp(), base::TimeDelta::FromSeconds(100));
+ EXPECT_EQ(frame->timestamp(), base::Seconds(100));
ASSERT_TRUE(frame->HasTextures());
ASSERT_EQ(frame->mailbox_holder(0).mailbox, mailbox);
}
@@ -126,7 +126,7 @@ TEST_F(VideoFrameStructTraitsTest, MailboxVideoFrame) {
TEST_F(VideoFrameStructTraitsTest, GpuMemoryBufferVideoFrame) {
gfx::Size coded_size = gfx::Size(256, 256);
gfx::Rect visible_rect(coded_size);
- auto timestamp = base::TimeDelta::FromMilliseconds(1);
+ auto timestamp = base::Milliseconds(1);
std::unique_ptr<gfx::GpuMemoryBuffer> gmb =
std::make_unique<FakeGpuMemoryBuffer>(
coded_size, gfx::BufferFormat::YUV_420_BIPLANAR);
diff --git a/chromium/media/mojo/services/BUILD.gn b/chromium/media/mojo/services/BUILD.gn
index 0e8626347dd..22f07fb36c6 100644
--- a/chromium/media/mojo/services/BUILD.gn
+++ b/chromium/media/mojo/services/BUILD.gn
@@ -58,6 +58,8 @@ component("services") {
"mojo_video_encode_accelerator_service.h",
"playback_events_recorder.cc",
"playback_events_recorder.h",
+ "stable_video_decoder_factory_service.cc",
+ "stable_video_decoder_factory_service.h",
"test_mojo_media_client.cc",
"test_mojo_media_client.h",
"video_decode_perf_history.cc",
@@ -77,6 +79,7 @@ component("services") {
"//media/gpu/ipc/common",
"//media/mojo:buildflags",
"//media/mojo/mojom",
+ "//media/mojo/mojom/stable:stable_video_decoder_cpp_sources",
"//mojo/public/cpp/bindings",
"//mojo/public/cpp/system",
"//services/service_manager/public/cpp",
@@ -109,13 +112,12 @@ component("services") {
} else if (use_vaapi || use_v4l2_codec) {
sources += [ "gpu_mojo_media_client_cros.cc" ]
- if (is_chromeos_ash) {
- sources += [ "gpu_mojo_media_client_cros_ash.cc" ]
+ if (is_chromeos) {
deps +=
[ "//chromeos/components/cdm_factory_daemon:cdm_factory_daemon_gpu" ]
}
} else {
- sources += [ "gpu_mojo_media_client_stubs.cc" ]
+ sources += [ "gpu_mojo_media_client_default.cc" ]
}
if (is_android) {
diff --git a/chromium/media/mojo/services/DEPS b/chromium/media/mojo/services/DEPS
index 8a21c9d2d67..418719a3c7f 100644
--- a/chromium/media/mojo/services/DEPS
+++ b/chromium/media/mojo/services/DEPS
@@ -1,5 +1,5 @@
specific_include_rules = {
- "gpu_mojo_media_client_cros_ash\.cc": [
+ "gpu_mojo_media_client_cros\.cc": [
"+chromeos/components/cdm_factory_daemon",
],
"media_manifest\.cc": [
diff --git a/chromium/media/mojo/services/android_mojo_media_client.h b/chromium/media/mojo/services/android_mojo_media_client.h
index 0d5e14a3848..ddd2e1fa2da 100644
--- a/chromium/media/mojo/services/android_mojo_media_client.h
+++ b/chromium/media/mojo/services/android_mojo_media_client.h
@@ -16,6 +16,10 @@ namespace media {
class AndroidMojoMediaClient final : public MojoMediaClient {
public:
AndroidMojoMediaClient();
+
+ AndroidMojoMediaClient(const AndroidMojoMediaClient&) = delete;
+ AndroidMojoMediaClient& operator=(const AndroidMojoMediaClient&) = delete;
+
~AndroidMojoMediaClient() override;
// MojoMediaClient implementation.
@@ -24,9 +28,6 @@ class AndroidMojoMediaClient final : public MojoMediaClient {
std::unique_ptr<CdmFactory> CreateCdmFactory(
mojom::FrameInterfaceFactory* frame_interfaces) override;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AndroidMojoMediaClient);
};
} // namespace media
diff --git a/chromium/media/mojo/services/cdm_service_unittest.cc b/chromium/media/mojo/services/cdm_service_unittest.cc
index b146c93295f..f634875e7d4 100644
--- a/chromium/media/mojo/services/cdm_service_unittest.cc
+++ b/chromium/media/mojo/services/cdm_service_unittest.cc
@@ -89,6 +89,10 @@ class MockCdmServiceClient : public media::CdmService::Client {
class CdmServiceTest : public testing::Test {
public:
CdmServiceTest() = default;
+
+ CdmServiceTest(const CdmServiceTest&) = delete;
+ CdmServiceTest& operator=(const CdmServiceTest&) = delete;
+
~CdmServiceTest() override = default;
MOCK_METHOD0(CdmServiceIdle, void());
@@ -169,8 +173,6 @@ class CdmServiceTest : public testing::Test {
}
std::unique_ptr<CdmService> service_;
MockCdmServiceClient* mock_cdm_service_client_ = nullptr;
-
- DISALLOW_COPY_AND_ASSIGN(CdmServiceTest);
};
} // namespace
diff --git a/chromium/media/mojo/services/gpu_mojo_media_client.cc b/chromium/media/mojo/services/gpu_mojo_media_client.cc
index fa1e422757a..c4bcbb2ebb4 100644
--- a/chromium/media/mojo/services/gpu_mojo_media_client.cc
+++ b/chromium/media/mojo/services/gpu_mojo_media_client.cc
@@ -63,11 +63,7 @@ VideoDecoderTraits::VideoDecoderTraits(
std::unique_ptr<MediaLog> media_log,
RequestOverlayInfoCB request_overlay_info_cb,
const gfx::ColorSpace* target_color_space,
- gpu::GpuPreferences gpu_preferences,
- gpu::GpuFeatureInfo gpu_feature_info,
- const gpu::GpuDriverBugWorkarounds* gpu_workarounds,
gpu::GpuMemoryBufferFactory* gpu_memory_buffer_factory,
- GetConfigCacheCB get_cached_configs_cb,
GetCommandBufferStubCB get_command_buffer_stub_cb,
AndroidOverlayMojoFactoryCB android_overlay_factory_cb)
: task_runner(std::move(task_runner)),
@@ -75,11 +71,7 @@ VideoDecoderTraits::VideoDecoderTraits(
media_log(std::move(media_log)),
request_overlay_info_cb(request_overlay_info_cb),
target_color_space(target_color_space),
- gpu_preferences(gpu_preferences),
- gpu_feature_info(gpu_feature_info),
- gpu_workarounds(gpu_workarounds),
gpu_memory_buffer_factory(gpu_memory_buffer_factory),
- get_cached_configs_cb(std::move(get_cached_configs_cb)),
get_command_buffer_stub_cb(std::move(get_command_buffer_stub_cb)),
android_overlay_factory_cb(std::move(android_overlay_factory_cb)) {}
@@ -97,35 +89,79 @@ GpuMojoMediaClient::GpuMojoMediaClient(
gpu_task_runner_(std::move(gpu_task_runner)),
media_gpu_channel_manager_(std::move(media_gpu_channel_manager)),
android_overlay_factory_cb_(std::move(android_overlay_factory_cb)),
- gpu_memory_buffer_factory_(gpu_memory_buffer_factory) {}
+ gpu_memory_buffer_factory_(gpu_memory_buffer_factory),
+ platform_(PlatformDelegate::Create(this)) {}
GpuMojoMediaClient::~GpuMojoMediaClient() = default;
+GpuMojoMediaClient::PlatformDelegate::~PlatformDelegate() = default;
+
+std::unique_ptr<VideoDecoder>
+GpuMojoMediaClient::PlatformDelegate::CreateVideoDecoder(
+ const VideoDecoderTraits&) {
+ return nullptr;
+}
+
+void GpuMojoMediaClient::PlatformDelegate::GetSupportedVideoDecoderConfigs(
+ MojoMediaClient::SupportedVideoDecoderConfigsCallback callback) {
+ std::move(callback).Run({});
+}
+
+std::unique_ptr<AudioDecoder>
+GpuMojoMediaClient::PlatformDelegate::CreateAudioDecoder(
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner) {
+ return nullptr;
+}
+
+std::unique_ptr<CdmFactory>
+GpuMojoMediaClient::PlatformDelegate::CreateCdmFactory(
+ mojom::FrameInterfaceFactory* frame_interfaces) {
+ return nullptr;
+}
+
+VideoDecoderType
+GpuMojoMediaClient::PlatformDelegate::GetDecoderImplementationType() {
+ return VideoDecoderType::kUnknown;
+}
+
std::unique_ptr<AudioDecoder> GpuMojoMediaClient::CreateAudioDecoder(
scoped_refptr<base::SingleThreadTaskRunner> task_runner) {
- return CreatePlatformAudioDecoder(task_runner);
+ return platform_->CreateAudioDecoder(task_runner);
}
VideoDecoderType GpuMojoMediaClient::GetDecoderImplementationType() {
- return GetPlatformDecoderImplementationType(gpu_workarounds_,
- gpu_preferences_);
+ return platform_->GetDecoderImplementationType();
}
-SupportedVideoDecoderConfigs
-GpuMojoMediaClient::GetSupportedVideoDecoderConfigs() {
- if (!supported_config_cache_)
- supported_config_cache_ = GetPlatformSupportedVideoDecoderConfigs(
- gpu_workarounds_, gpu_preferences_,
- // GetPlatformSupportedVideoDecoderConfigs runs this callback either
- // never or immediately, and will not store it, so |this| will outlive
- // the bound function.
- base::BindOnce(&GpuMojoMediaClient::GetVDAVideoDecoderConfigs,
+void GpuMojoMediaClient::GetSupportedVideoDecoderConfigs(
+ MojoMediaClient::SupportedVideoDecoderConfigsCallback callback) {
+ if (supported_config_cache_) {
+ DCHECK(pending_supported_config_callbacks_.empty());
+
+ std::move(callback).Run(*supported_config_cache_);
+ return;
+ }
+
+ const bool should_query = pending_supported_config_callbacks_.empty();
+ pending_supported_config_callbacks_.push_back(std::move(callback));
+ if (should_query) {
+ // Only get configurations if there is no query already in flight.
+ platform_->GetSupportedVideoDecoderConfigs(
+ base::BindOnce(&GpuMojoMediaClient::OnSupportedVideoDecoderConfigs,
base::Unretained(this)));
+ }
+}
- if (!supported_config_cache_)
- return {};
+void GpuMojoMediaClient::OnSupportedVideoDecoderConfigs(
+ SupportedVideoDecoderConfigs configs) {
+ DCHECK(!pending_supported_config_callbacks_.empty());
- return *supported_config_cache_;
+ // Return the result to all pending queries.
+ supported_config_cache_ = std::move(configs);
+ for (auto& callback : pending_supported_config_callbacks_) {
+ std::move(callback).Run(*supported_config_cache_);
+ }
+ pending_supported_config_callbacks_.clear();
}
SupportedVideoDecoderConfigs GpuMojoMediaClient::GetVDAVideoDecoderConfigs() {
@@ -152,23 +188,19 @@ std::unique_ptr<VideoDecoder> GpuMojoMediaClient::CreateVideoDecoder(
media_log ? media_log->Clone() : std::make_unique<media::NullMediaLog>();
VideoDecoderTraits traits(
task_runner, gpu_task_runner_, std::move(log),
- std::move(request_overlay_info_cb), &target_color_space, gpu_preferences_,
- gpu_feature_info_, &gpu_workarounds_, gpu_memory_buffer_factory_,
- // CreatePlatformVideoDecoder does not keep a reference to |traits|
- // so this bound method will not outlive |this|
- base::BindRepeating(&GpuMojoMediaClient::GetSupportedVideoDecoderConfigs,
- base::Unretained(this)),
+ std::move(request_overlay_info_cb), &target_color_space,
+ gpu_memory_buffer_factory_,
base::BindRepeating(
&GetCommandBufferStub, gpu_task_runner_, media_gpu_channel_manager_,
command_buffer_id->channel_token, command_buffer_id->route_id),
std::move(android_overlay_factory_cb_));
- return CreatePlatformVideoDecoder(traits);
+ return platform_->CreateVideoDecoder(traits);
}
std::unique_ptr<CdmFactory> GpuMojoMediaClient::CreateCdmFactory(
mojom::FrameInterfaceFactory* frame_interfaces) {
- return CreatePlatformCdmFactory(frame_interfaces);
+ return platform_->CreateCdmFactory(frame_interfaces);
}
} // namespace media
diff --git a/chromium/media/mojo/services/gpu_mojo_media_client.h b/chromium/media/mojo/services/gpu_mojo_media_client.h
index 862388fa9af..904310538c5 100644
--- a/chromium/media/mojo/services/gpu_mojo_media_client.h
+++ b/chromium/media/mojo/services/gpu_mojo_media_client.h
@@ -43,14 +43,8 @@ struct VideoDecoderTraits {
std::unique_ptr<MediaLog> media_log;
RequestOverlayInfoCB request_overlay_info_cb;
const gfx::ColorSpace* const target_color_space;
- gpu::GpuPreferences gpu_preferences;
- gpu::GpuFeatureInfo gpu_feature_info;
- const gpu::GpuDriverBugWorkarounds* const gpu_workarounds;
gpu::GpuMemoryBufferFactory* const gpu_memory_buffer_factory;
- // Windows decoders need to ensure that the cache is populated.
- GetConfigCacheCB get_cached_configs_cb;
-
// Android uses this twice.
GetCommandBufferStubCB get_command_buffer_stub_cb;
@@ -62,48 +56,52 @@ struct VideoDecoderTraits {
std::unique_ptr<MediaLog> media_log,
RequestOverlayInfoCB request_overlay_info_cb,
const gfx::ColorSpace* target_color_space,
- gpu::GpuPreferences gpu_preferences,
- gpu::GpuFeatureInfo gpu_feature_info,
- const gpu::GpuDriverBugWorkarounds* gpu_workarounds,
gpu::GpuMemoryBufferFactory* gpu_memory_buffer_factory,
- GetConfigCacheCB get_cached_configs_cb,
GetCommandBufferStubCB get_command_buffer_stub_cb,
AndroidOverlayMojoFactoryCB android_overlay_factory_cb);
~VideoDecoderTraits();
};
-// Find platform specific implementations of these in
-// gpu_mojo_media_client_{platform}.cc
-// Creates a platform-specific media::VideoDecoder.
-std::unique_ptr<VideoDecoder> CreatePlatformVideoDecoder(
- const VideoDecoderTraits&);
-
-// Queries the platform-specific VideoDecoder implementation for its
-// supported profiles. Many platforms fall back to use the VDAVideoDecoder
-// so that implementation is shared, and its supported configs can be
-// queries using the |get_vda_configs| callback.
-absl::optional<SupportedVideoDecoderConfigs>
-GetPlatformSupportedVideoDecoderConfigs(
- gpu::GpuDriverBugWorkarounds gpu_workarounds,
- gpu::GpuPreferences gpu_preferences,
- base::OnceCallback<SupportedVideoDecoderConfigs()> get_vda_configs);
-
-// Creates a platform-specific media::AudioDecoder. Most platforms don't do
-// anything here, but android, for example, does.
-std::unique_ptr<AudioDecoder> CreatePlatformAudioDecoder(
- scoped_refptr<base::SingleThreadTaskRunner> task_runner);
-
-// Creates a CDM factory, right now only used on android and chromeos.
-std::unique_ptr<CdmFactory> CreatePlatformCdmFactory(
- mojom::FrameInterfaceFactory* frame_interfaces);
-
-// Queries the platform decoder type.
-VideoDecoderType GetPlatformDecoderImplementationType(
- gpu::GpuDriverBugWorkarounds gpu_workarounds,
- gpu::GpuPreferences gpu_preferences);
-
class GpuMojoMediaClient final : public MojoMediaClient {
public:
+ // Implementations of platform specific media functionality can be provided
+ // by overriding the appropriate methods in this interface.
+ // Specification of the platform object is done by implementing the static
+ // Create() method in the gpu_mojo_media_client_<platform>.cc file.
+ class PlatformDelegate {
+ public:
+ virtual ~PlatformDelegate();
+
+ // Instantiates the PlatformDelegate suitable for the platform.
+ // Implemented in platform-specific files.
+ static std::unique_ptr<PlatformDelegate> Create(GpuMojoMediaClient* client);
+
+ // Find platform specific implementations of these in
+ // gpu_mojo_media_client_{platform}.cc
+ // Creates a platform-specific media::VideoDecoder.
+ virtual std::unique_ptr<VideoDecoder> CreateVideoDecoder(
+ const VideoDecoderTraits& traits);
+
+ // Queries the platform-specific VideoDecoder implementation for its
+ // supported profiles. Many platforms fall back to use the VDAVideoDecoder
+ // so that implementation is shared, and its supported configs can be
+ // queries using the |get_vda_configs| callback.
+ virtual void GetSupportedVideoDecoderConfigs(
+ MojoMediaClient::SupportedVideoDecoderConfigsCallback callback);
+
+ // Creates a platform-specific media::AudioDecoder. Most platforms don't do
+ // anything here, but android, for example, does.
+ virtual std::unique_ptr<AudioDecoder> CreateAudioDecoder(
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner);
+
+ // Creates a CDM factory, right now only used on android and chromeos.
+ virtual std::unique_ptr<CdmFactory> CreateCdmFactory(
+ mojom::FrameInterfaceFactory* frame_interfaces);
+
+ // Queries the platform decoder type.
+ virtual VideoDecoderType GetDecoderImplementationType();
+ };
+
// |media_gpu_channel_manager| must only be used on |gpu_task_runner|, which
// is expected to be the GPU main thread task runner.
GpuMojoMediaClient(
@@ -116,8 +114,26 @@ class GpuMojoMediaClient final : public MojoMediaClient {
AndroidOverlayMojoFactoryCB android_overlay_factory_cb);
~GpuMojoMediaClient() final;
+ GpuMojoMediaClient(const GpuMojoMediaClient&) = delete;
+ GpuMojoMediaClient& operator=(const GpuMojoMediaClient&) = delete;
+
+ // Can be used as default fallback values by platform specific
+ // implementations.
+ SupportedVideoDecoderConfigs GetVDAVideoDecoderConfigs();
+
+ const gpu::GpuPreferences& gpu_preferences() { return gpu_preferences_; }
+
+ const gpu::GpuDriverBugWorkarounds& gpu_workarounds() {
+ return gpu_workarounds_;
+ }
+
+ const gpu::GpuFeatureInfo& gpu_feature_info() const {
+ return gpu_feature_info_;
+ }
+
// MojoMediaClient implementation.
- SupportedVideoDecoderConfigs GetSupportedVideoDecoderConfigs() final;
+ void GetSupportedVideoDecoderConfigs(
+ MojoMediaClient::SupportedVideoDecoderConfigsCallback callback) final;
VideoDecoderType GetDecoderImplementationType() final;
std::unique_ptr<AudioDecoder> CreateAudioDecoder(
scoped_refptr<base::SingleThreadTaskRunner> task_runner) final;
@@ -131,12 +147,12 @@ class GpuMojoMediaClient final : public MojoMediaClient {
mojom::FrameInterfaceFactory* interface_provider) final;
private:
- // These are useful to bind into callbacks for platform specific
- // implementations that can use these defaults as fallbacks.
- SupportedVideoDecoderConfigs GetVDAVideoDecoderConfigs();
+ void OnSupportedVideoDecoderConfigs(SupportedVideoDecoderConfigs configs);
// Cross-platform cache supported config cache.
absl::optional<SupportedVideoDecoderConfigs> supported_config_cache_;
+ std::vector<MojoMediaClient::SupportedVideoDecoderConfigsCallback>
+ pending_supported_config_callbacks_;
gpu::GpuPreferences gpu_preferences_;
gpu::GpuDriverBugWorkarounds gpu_workarounds_;
@@ -145,7 +161,7 @@ class GpuMojoMediaClient final : public MojoMediaClient {
base::WeakPtr<MediaGpuChannelManager> media_gpu_channel_manager_;
AndroidOverlayMojoFactoryCB android_overlay_factory_cb_;
gpu::GpuMemoryBufferFactory* const gpu_memory_buffer_factory_;
- DISALLOW_COPY_AND_ASSIGN(GpuMojoMediaClient);
+ std::unique_ptr<PlatformDelegate> platform_;
};
} // namespace media
diff --git a/chromium/media/mojo/services/gpu_mojo_media_client_android.cc b/chromium/media/mojo/services/gpu_mojo_media_client_android.cc
index 639533e0f16..52773c901b6 100644
--- a/chromium/media/mojo/services/gpu_mojo_media_client_android.cc
+++ b/chromium/media/mojo/services/gpu_mojo_media_client_android.cc
@@ -28,82 +28,99 @@ using media::android_mojo_util::CreateMediaDrmStorage;
using media::android_mojo_util::CreateProvisionFetcher;
namespace media {
+namespace {
-std::unique_ptr<VideoDecoder> CreatePlatformVideoDecoder(
- const VideoDecoderTraits& traits) {
- scoped_refptr<gpu::RefCountedLock> ref_counted_lock;
-
- // When this feature is enabled, CodecImage, CodecBufferWaitCorrdinator and
- // other media classes used in MCVD path will be accessed by multiple gpu
- // threads. To implement thread safetyness, we are using a global ref
- // counted lock here. CodecImage, CodecOutputBufferRenderer,
- // CodecBufferWaitCoordinator expects this ref counted lock to be held by the
- // classes which are accessing them (SharedImageVideo, MRE, FrameInfoHelper
- // etc.)
- if (features::IsDrDcEnabled()) {
- ref_counted_lock = base::MakeRefCounted<gpu::RefCountedLock>();
- }
+class AndroidPlatformDelegate : public GpuMojoMediaClient::PlatformDelegate {
+ public:
+ explicit AndroidPlatformDelegate(GpuMojoMediaClient* client)
+ : client_(client) {}
+ ~AndroidPlatformDelegate() override = default;
- std::unique_ptr<SharedImageVideoProvider> image_provider =
- std::make_unique<DirectSharedImageVideoProvider>(
- traits.gpu_task_runner, traits.get_command_buffer_stub_cb,
- ref_counted_lock);
+ AndroidPlatformDelegate(const AndroidPlatformDelegate&) = delete;
+ void operator=(const AndroidPlatformDelegate&) = delete;
+
+ // GpuMojoMediaClient::PlatformDelegate implementation.
+ std::unique_ptr<VideoDecoder> CreateVideoDecoder(
+ const VideoDecoderTraits& traits) override {
+ scoped_refptr<gpu::RefCountedLock> ref_counted_lock;
+
+ // When this feature is enabled, CodecImage, CodecBufferWaitCorrdinator and
+ // other media classes used in MCVD path will be accessed by multiple gpu
+ // threads. To implement thread safetyness, we are using a global ref
+ // counted lock here. CodecImage, CodecOutputBufferRenderer,
+ // CodecBufferWaitCoordinator expects this ref counted lock to be held by
+ // the classes which are accessing them (SharedImageVideo, MRE,
+ // FrameInfoHelper etc.)
+ if (features::NeedThreadSafeAndroidMedia()) {
+ ref_counted_lock = base::MakeRefCounted<gpu::RefCountedLock>();
+ }
+
+ std::unique_ptr<SharedImageVideoProvider> image_provider =
+ std::make_unique<DirectSharedImageVideoProvider>(
+ traits.gpu_task_runner, traits.get_command_buffer_stub_cb,
+ ref_counted_lock);
- if (base::FeatureList::IsEnabled(kUsePooledSharedImageVideoProvider)) {
- // Wrap |image_provider| in a pool.
- image_provider = PooledSharedImageVideoProvider::Create(
+ if (base::FeatureList::IsEnabled(kUsePooledSharedImageVideoProvider)) {
+ // Wrap |image_provider| in a pool.
+ image_provider = PooledSharedImageVideoProvider::Create(
+ traits.gpu_task_runner, traits.get_command_buffer_stub_cb,
+ std::move(image_provider), ref_counted_lock);
+ }
+ // TODO(liberato): Create this only if we're using Vulkan, else it's
+ // ignored. If we can tell that here, then VideoFrameFactory can use it
+ // as a signal about whether it's supposed to get YCbCrInfo rather than
+ // requiring the provider to set |is_vulkan| in the ImageRecord.
+ auto frame_info_helper = FrameInfoHelper::Create(
traits.gpu_task_runner, traits.get_command_buffer_stub_cb,
- std::move(image_provider), ref_counted_lock);
+ ref_counted_lock);
+
+ return MediaCodecVideoDecoder::Create(
+ client_->gpu_preferences(), client_->gpu_feature_info(),
+ traits.media_log->Clone(), DeviceInfo::GetInstance(),
+ CodecAllocator::GetInstance(traits.gpu_task_runner),
+ std::make_unique<AndroidVideoSurfaceChooserImpl>(
+ DeviceInfo::GetInstance()->IsSetOutputSurfaceSupported()),
+ traits.android_overlay_factory_cb,
+ std::move(traits.request_overlay_info_cb),
+ std::make_unique<VideoFrameFactoryImpl>(
+ traits.gpu_task_runner, client_->gpu_preferences(),
+ std::move(image_provider),
+ MaybeRenderEarlyManager::Create(traits.gpu_task_runner,
+ ref_counted_lock),
+ std::move(frame_info_helper), ref_counted_lock),
+ ref_counted_lock);
}
- // TODO(liberato): Create this only if we're using Vulkan, else it's
- // ignored. If we can tell that here, then VideoFrameFactory can use it
- // as a signal about whether it's supposed to get YCbCrInfo rather than
- // requiring the provider to set |is_vulkan| in the ImageRecord.
- auto frame_info_helper = FrameInfoHelper::Create(
- traits.gpu_task_runner, traits.get_command_buffer_stub_cb,
- ref_counted_lock);
-
- return MediaCodecVideoDecoder::Create(
- traits.gpu_preferences, traits.gpu_feature_info,
- traits.media_log->Clone(), DeviceInfo::GetInstance(),
- CodecAllocator::GetInstance(traits.gpu_task_runner),
- std::make_unique<AndroidVideoSurfaceChooserImpl>(
- DeviceInfo::GetInstance()->IsSetOutputSurfaceSupported()),
- traits.android_overlay_factory_cb,
- std::move(traits.request_overlay_info_cb),
- std::make_unique<VideoFrameFactoryImpl>(
- traits.gpu_task_runner, traits.gpu_preferences,
- std::move(image_provider),
- MaybeRenderEarlyManager::Create(traits.gpu_task_runner,
- ref_counted_lock),
- std::move(frame_info_helper), ref_counted_lock),
- ref_counted_lock);
-}
-absl::optional<SupportedVideoDecoderConfigs>
-GetPlatformSupportedVideoDecoderConfigs(
- gpu::GpuDriverBugWorkarounds gpu_workarounds,
- gpu::GpuPreferences gpu_preferences,
- base::OnceCallback<SupportedVideoDecoderConfigs()> get_vda_configs) {
- return MediaCodecVideoDecoder::GetSupportedConfigs();
-}
+ void GetSupportedVideoDecoderConfigs(
+ MojoMediaClient::SupportedVideoDecoderConfigsCallback callback) override {
+ std::move(callback).Run(MediaCodecVideoDecoder::GetSupportedConfigs());
+ }
-std::unique_ptr<AudioDecoder> CreatePlatformAudioDecoder(
- scoped_refptr<base::SingleThreadTaskRunner> task_runner) {
- return std::make_unique<MediaCodecAudioDecoder>(std::move(task_runner));
-}
+ std::unique_ptr<AudioDecoder> CreateAudioDecoder(
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner) override {
+ return std::make_unique<MediaCodecAudioDecoder>(std::move(task_runner));
+ }
-std::unique_ptr<CdmFactory> CreatePlatformCdmFactory(
- mojom::FrameInterfaceFactory* frame_interfaces) {
- return std::make_unique<AndroidCdmFactory>(
- base::BindRepeating(&CreateProvisionFetcher, frame_interfaces),
- base::BindRepeating(&CreateMediaDrmStorage, frame_interfaces));
-}
+ std::unique_ptr<CdmFactory> CreateCdmFactory(
+ mojom::FrameInterfaceFactory* frame_interfaces) override {
+ return std::make_unique<AndroidCdmFactory>(
+ base::BindRepeating(&CreateProvisionFetcher, frame_interfaces),
+ base::BindRepeating(&CreateMediaDrmStorage, frame_interfaces));
+ }
+
+ VideoDecoderType GetDecoderImplementationType() override {
+ return VideoDecoderType::kMediaCodec;
+ }
+
+ private:
+ GpuMojoMediaClient* client_;
+};
+
+} // namespace
-VideoDecoderType GetPlatformDecoderImplementationType(
- gpu::GpuDriverBugWorkarounds gpu_workarounds,
- gpu::GpuPreferences gpu_preferences) {
- return VideoDecoderType::kMediaCodec;
+std::unique_ptr<GpuMojoMediaClient::PlatformDelegate>
+GpuMojoMediaClient::PlatformDelegate::Create(GpuMojoMediaClient* client) {
+ return std::make_unique<AndroidPlatformDelegate>(client);
}
} // namespace media
diff --git a/chromium/media/mojo/services/gpu_mojo_media_client_cros.cc b/chromium/media/mojo/services/gpu_mojo_media_client_cros.cc
index 186244dbdc2..b099865aad9 100644
--- a/chromium/media/mojo/services/gpu_mojo_media_client_cros.cc
+++ b/chromium/media/mojo/services/gpu_mojo_media_client_cros.cc
@@ -5,11 +5,16 @@
#include "media/mojo/services/gpu_mojo_media_client.h"
#include "media/base/audio_decoder.h"
+#include "media/base/cdm_factory.h"
#include "media/gpu/chromeos/mailbox_video_frame_converter.h"
#include "media/gpu/chromeos/platform_video_frame_pool.h"
#include "media/gpu/chromeos/video_decoder_pipeline.h"
#include "media/gpu/ipc/service/vda_video_decoder.h"
+#if defined(OS_CHROMEOS)
+#include "chromeos/components/cdm_factory_daemon/chromeos_cdm_factory.h"
+#endif // defined(OS_CHROMEOS)
+
namespace media {
namespace {
@@ -22,62 +27,70 @@ bool ShouldUseChromeOSDirectVideoDecoder(
#endif
}
-} // namespace
+class CrosPlatformDelegate : public GpuMojoMediaClient::PlatformDelegate {
+ public:
+ explicit CrosPlatformDelegate(GpuMojoMediaClient* client) : client_(client) {}
+ ~CrosPlatformDelegate() override = default;
+
+ CrosPlatformDelegate(const CrosPlatformDelegate&) = delete;
+ void operator=(const CrosPlatformDelegate&) = delete;
-std::unique_ptr<VideoDecoder> CreatePlatformVideoDecoder(
- const VideoDecoderTraits& traits) {
- if (ShouldUseChromeOSDirectVideoDecoder(traits.gpu_preferences)) {
- auto frame_pool = std::make_unique<PlatformVideoFramePool>(
- traits.gpu_memory_buffer_factory);
- auto frame_converter = MailboxVideoFrameConverter::Create(
- base::BindRepeating(&PlatformVideoFramePool::UnwrapFrame,
- base::Unretained(frame_pool.get())),
- traits.gpu_task_runner, traits.get_command_buffer_stub_cb);
- return VideoDecoderPipeline::Create(
- traits.task_runner, std::move(frame_pool), std::move(frame_converter),
- traits.media_log->Clone());
+ // GpuMojoMediaClient::PlatformDelegate implementation.
+ std::unique_ptr<VideoDecoder> CreateVideoDecoder(
+ const VideoDecoderTraits& traits) override {
+ if (ShouldUseChromeOSDirectVideoDecoder(client_->gpu_preferences())) {
+ auto frame_pool = std::make_unique<PlatformVideoFramePool>(
+ traits.gpu_memory_buffer_factory);
+ auto frame_converter = MailboxVideoFrameConverter::Create(
+ base::BindRepeating(&PlatformVideoFramePool::UnwrapFrame,
+ base::Unretained(frame_pool.get())),
+ traits.gpu_task_runner, traits.get_command_buffer_stub_cb);
+ return VideoDecoderPipeline::Create(
+ traits.task_runner, std::move(frame_pool), std::move(frame_converter),
+ traits.media_log->Clone());
+ }
+ return VdaVideoDecoder::Create(
+ traits.task_runner, traits.gpu_task_runner, traits.media_log->Clone(),
+ *traits.target_color_space, client_->gpu_preferences(),
+ client_->gpu_workarounds(), traits.get_command_buffer_stub_cb);
}
- return VdaVideoDecoder::Create(
- traits.task_runner, traits.gpu_task_runner, traits.media_log->Clone(),
- *traits.target_color_space, traits.gpu_preferences,
- *traits.gpu_workarounds, traits.get_command_buffer_stub_cb);
-}
-absl::optional<SupportedVideoDecoderConfigs>
-GetPlatformSupportedVideoDecoderConfigs(
- gpu::GpuDriverBugWorkarounds gpu_workarounds,
- gpu::GpuPreferences gpu_preferences,
- base::OnceCallback<SupportedVideoDecoderConfigs()> get_vda_configs) {
- SupportedVideoDecoderConfigs supported_configs;
- if (ShouldUseChromeOSDirectVideoDecoder(gpu_preferences)) {
- return VideoDecoderPipeline::GetSupportedConfigs(gpu_workarounds);
+ void GetSupportedVideoDecoderConfigs(
+ MojoMediaClient::SupportedVideoDecoderConfigsCallback callback) override {
+ SupportedVideoDecoderConfigs supported_configs;
+ if (ShouldUseChromeOSDirectVideoDecoder(client_->gpu_preferences())) {
+ std::move(callback).Run(*VideoDecoderPipeline::GetSupportedConfigs(
+ client_->gpu_workarounds()));
+ return;
+ }
+ std::move(callback).Run(client_->GetVDAVideoDecoderConfigs());
}
- return std::move(get_vda_configs).Run();
-}
-VideoDecoderType GetPlatformDecoderImplementationType(
- gpu::GpuDriverBugWorkarounds gpu_workarounds,
- gpu::GpuPreferences gpu_preferences) {
- if (ShouldUseChromeOSDirectVideoDecoder(gpu_preferences)) {
- return VideoDecoderType::kVaapi;
+ std::unique_ptr<CdmFactory> CreateCdmFactory(
+ mojom::FrameInterfaceFactory* frame_interfaces) override {
+#if defined(OS_CHROMEOS)
+ return std::make_unique<chromeos::ChromeOsCdmFactory>(frame_interfaces);
+#else // defined(OS_CHROMEOS)
+ return nullptr;
+#endif // else defined(OS_CHROMEOS)
}
- return VideoDecoderType::kVda;
-}
-std::unique_ptr<AudioDecoder> CreatePlatformAudioDecoder(
- scoped_refptr<base::SingleThreadTaskRunner> task_runner) {
- return nullptr;
-}
+ VideoDecoderType GetDecoderImplementationType() override {
+ if (ShouldUseChromeOSDirectVideoDecoder(client_->gpu_preferences())) {
+ return VideoDecoderType::kVaapi;
+ }
+ return VideoDecoderType::kVda;
+ }
-// When |IS_CHROMEOS_ASH|, gpu_mojo_media_client_cros_ash.cc is built, which
-// has the real implementation of this method.
-#if !BUILDFLAG(IS_CHROMEOS_ASH)
-class CdmFactory {};
+ private:
+ GpuMojoMediaClient* client_;
+};
-std::unique_ptr<CdmFactory> CreatePlatformCdmFactory(
- mojom::FrameInterfaceFactory* frame_interfaces) {
- return nullptr;
+} // namespace
+
+std::unique_ptr<GpuMojoMediaClient::PlatformDelegate>
+GpuMojoMediaClient::PlatformDelegate::Create(GpuMojoMediaClient* client) {
+ return std::make_unique<CrosPlatformDelegate>(client);
}
-#endif
} // namespace media
diff --git a/chromium/media/mojo/services/gpu_mojo_media_client_cros_ash.cc b/chromium/media/mojo/services/gpu_mojo_media_client_cros_ash.cc
deleted file mode 100644
index ca464f1baf8..00000000000
--- a/chromium/media/mojo/services/gpu_mojo_media_client_cros_ash.cc
+++ /dev/null
@@ -1,15 +0,0 @@
-// Copyright 2021 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "chromeos/components/cdm_factory_daemon/chromeos_cdm_factory.h" // nogncheck
-#include "media/mojo/services/gpu_mojo_media_client.h"
-
-namespace media {
-
-std::unique_ptr<CdmFactory> CreatePlatformCdmFactory(
- mojom::FrameInterfaceFactory* frame_interfaces) {
- return std::make_unique<chromeos::ChromeOsCdmFactory>(frame_interfaces);
-}
-
-} // namespace media \ No newline at end of file
diff --git a/chromium/media/mojo/services/gpu_mojo_media_client_default.cc b/chromium/media/mojo/services/gpu_mojo_media_client_default.cc
new file mode 100644
index 00000000000..24c6d5f5064
--- /dev/null
+++ b/chromium/media/mojo/services/gpu_mojo_media_client_default.cc
@@ -0,0 +1,14 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/mojo/services/gpu_mojo_media_client.h"
+
+namespace media {
+
+std::unique_ptr<GpuMojoMediaClient::PlatformDelegate>
+GpuMojoMediaClient::PlatformDelegate::Create(GpuMojoMediaClient* client) {
+ return std::make_unique<GpuMojoMediaClient::PlatformDelegate>();
+}
+
+} // namespace media
diff --git a/chromium/media/mojo/services/gpu_mojo_media_client_mac.cc b/chromium/media/mojo/services/gpu_mojo_media_client_mac.cc
index fad1ca3c962..a1bcb33d554 100644
--- a/chromium/media/mojo/services/gpu_mojo_media_client_mac.cc
+++ b/chromium/media/mojo/services/gpu_mojo_media_client_mac.cc
@@ -7,40 +7,43 @@
#include "media/mojo/services/gpu_mojo_media_client.h"
namespace media {
-
-std::unique_ptr<VideoDecoder> CreatePlatformVideoDecoder(
- const VideoDecoderTraits& traits) {
- return VdaVideoDecoder::Create(
- traits.task_runner, traits.gpu_task_runner, traits.media_log->Clone(),
- *traits.target_color_space, traits.gpu_preferences,
- *traits.gpu_workarounds, traits.get_command_buffer_stub_cb);
-}
-
-absl::optional<SupportedVideoDecoderConfigs>
-GetPlatformSupportedVideoDecoderConfigs(
- gpu::GpuDriverBugWorkarounds gpu_workarounds,
- gpu::GpuPreferences gpu_preferences,
- base::OnceCallback<SupportedVideoDecoderConfigs()> get_vda_configs) {
- return std::move(get_vda_configs).Run();
-}
-
-std::unique_ptr<AudioDecoder> CreatePlatformAudioDecoder(
- scoped_refptr<base::SingleThreadTaskRunner> task_runner) {
- return nullptr;
-}
-
-// This class doesn't exist on mac, so we need a stub for unique_ptr.
-class CdmFactory {};
-
-std::unique_ptr<CdmFactory> CreatePlatformCdmFactory(
- mojom::FrameInterfaceFactory* frame_interfaces) {
- return nullptr;
-}
-
-VideoDecoderType GetPlatformDecoderImplementationType(
- gpu::GpuDriverBugWorkarounds gpu_workarounds,
- gpu::GpuPreferences gpu_preferences) {
- return VideoDecoderType::kVda;
+namespace {
+
+class MacPlatformDelegate : public GpuMojoMediaClient::PlatformDelegate {
+ public:
+ explicit MacPlatformDelegate(GpuMojoMediaClient* client) : client_(client) {}
+ ~MacPlatformDelegate() override = default;
+
+ MacPlatformDelegate(const MacPlatformDelegate&) = delete;
+ void operator=(const MacPlatformDelegate&) = delete;
+
+ // GpuMojoMediaClient::PlatformDelegate implementation.
+ std::unique_ptr<VideoDecoder> CreateVideoDecoder(
+ const VideoDecoderTraits& traits) override {
+ return VdaVideoDecoder::Create(
+ traits.task_runner, traits.gpu_task_runner, traits.media_log->Clone(),
+ *traits.target_color_space, client_->gpu_preferences(),
+ client_->gpu_workarounds(), traits.get_command_buffer_stub_cb);
+ }
+
+ void GetSupportedVideoDecoderConfigs(
+ MojoMediaClient::SupportedVideoDecoderConfigsCallback callback) override {
+ std::move(callback).Run(client_->GetVDAVideoDecoderConfigs());
+ }
+
+ VideoDecoderType GetDecoderImplementationType() override {
+ return VideoDecoderType::kVda;
+ }
+
+ private:
+ GpuMojoMediaClient* client_;
+};
+
+} // namespace
+
+std::unique_ptr<GpuMojoMediaClient::PlatformDelegate>
+GpuMojoMediaClient::PlatformDelegate::Create(GpuMojoMediaClient* client) {
+ return std::make_unique<MacPlatformDelegate>(client);
}
} // namespace media
diff --git a/chromium/media/mojo/services/gpu_mojo_media_client_stubs.cc b/chromium/media/mojo/services/gpu_mojo_media_client_stubs.cc
deleted file mode 100644
index 135bd87ab22..00000000000
--- a/chromium/media/mojo/services/gpu_mojo_media_client_stubs.cc
+++ /dev/null
@@ -1,43 +0,0 @@
-// Copyright 2021 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/base/audio_decoder.h"
-#include "media/base/video_decoder.h"
-#include "media/mojo/services/gpu_mojo_media_client.h"
-
-namespace media {
-
-std::unique_ptr<VideoDecoder> CreatePlatformVideoDecoder(
- const VideoDecoderTraits& traits) {
- return nullptr;
-}
-
-absl::optional<SupportedVideoDecoderConfigs>
-GetPlatformSupportedVideoDecoderConfigs(
- gpu::GpuDriverBugWorkarounds gpu_workarounds,
- gpu::GpuPreferences gpu_preferences,
- base::OnceCallback<SupportedVideoDecoderConfigs()> get_vda_configs) {
- return {};
-}
-
-std::unique_ptr<AudioDecoder> CreatePlatformAudioDecoder(
- scoped_refptr<base::SingleThreadTaskRunner> task_runner) {
- return nullptr;
-}
-
-// This class doesn't exist on any of the platforms that use the stubs.
-class CdmFactory {};
-
-std::unique_ptr<CdmFactory> CreatePlatformCdmFactory(
- mojom::FrameInterfaceFactory* frame_interfaces) {
- return nullptr;
-}
-
-VideoDecoderType GetPlatformDecoderImplementationType(
- gpu::GpuDriverBugWorkarounds gpu_workarounds,
- gpu::GpuPreferences gpu_preferences) {
- return VideoDecoderType::kUnknown;
-}
-
-} // namespace media
diff --git a/chromium/media/mojo/services/gpu_mojo_media_client_win.cc b/chromium/media/mojo/services/gpu_mojo_media_client_win.cc
index 63f5d9ade6b..94b63c0d5b4 100644
--- a/chromium/media/mojo/services/gpu_mojo_media_client_win.cc
+++ b/chromium/media/mojo/services/gpu_mojo_media_client_win.cc
@@ -32,61 +32,68 @@ bool ShouldUseD3D11VideoDecoder(
return true;
}
-} // namespace
+class WinPlatformDelegate : public GpuMojoMediaClient::PlatformDelegate {
+ public:
+ explicit WinPlatformDelegate(GpuMojoMediaClient* client) : client_(client) {}
+ ~WinPlatformDelegate() override = default;
+
+ WinPlatformDelegate(const WinPlatformDelegate&) = delete;
+ void operator=(const WinPlatformDelegate&) = delete;
-std::unique_ptr<VideoDecoder> CreatePlatformVideoDecoder(
- const VideoDecoderTraits& traits) {
- if (!ShouldUseD3D11VideoDecoder(*traits.gpu_workarounds)) {
- if (traits.gpu_workarounds->disable_dxva_video_decoder)
- return nullptr;
- return VdaVideoDecoder::Create(
- traits.task_runner, traits.gpu_task_runner, traits.media_log->Clone(),
- *traits.target_color_space, traits.gpu_preferences,
- *traits.gpu_workarounds, traits.get_command_buffer_stub_cb);
+ // GpuMojoMediaClient::PlatformDelegate implementation.
+ SupportedVideoDecoderConfigs GetSupportedVideoDecoderConfigsSync() {
+ SupportedVideoDecoderConfigs supported_configs;
+ if (ShouldUseD3D11VideoDecoder(client_->gpu_workarounds())) {
+ return D3D11VideoDecoder::GetSupportedVideoDecoderConfigs(
+ client_->gpu_preferences(), client_->gpu_workarounds(),
+ GetD3D11DeviceCallback());
+ } else if (!client_->gpu_workarounds().disable_dxva_video_decoder) {
+ return client_->GetVDAVideoDecoderConfigs();
+ } else {
+ return {};
+ }
}
- DCHECK(base::FeatureList::IsEnabled(kD3D11VideoDecoder));
- const bool enable_hdr =
- gl::DirectCompositionSurfaceWin::IsHDRSupported() ||
- base::FeatureList::IsEnabled(kD3D11VideoDecoderForceEnableHDR);
- return D3D11VideoDecoder::Create(
- traits.gpu_task_runner, traits.media_log->Clone(), traits.gpu_preferences,
- *traits.gpu_workarounds, traits.get_command_buffer_stub_cb,
- GetD3D11DeviceCallback(), traits.get_cached_configs_cb.Run(), enable_hdr);
-}
-absl::optional<SupportedVideoDecoderConfigs>
-GetPlatformSupportedVideoDecoderConfigs(
- gpu::GpuDriverBugWorkarounds gpu_workarounds,
- gpu::GpuPreferences gpu_preferences,
- base::OnceCallback<SupportedVideoDecoderConfigs()> get_vda_configs) {
- SupportedVideoDecoderConfigs supported_configs;
- if (ShouldUseD3D11VideoDecoder(gpu_workarounds)) {
- supported_configs = D3D11VideoDecoder::GetSupportedVideoDecoderConfigs(
- gpu_preferences, gpu_workarounds, GetD3D11DeviceCallback());
- } else if (!gpu_workarounds.disable_dxva_video_decoder) {
- supported_configs = std::move(get_vda_configs).Run();
+ // GpuMojoMediaClient::PlatformDelegate implementation.
+ std::unique_ptr<VideoDecoder> CreateVideoDecoder(
+ const VideoDecoderTraits& traits) override {
+ if (!ShouldUseD3D11VideoDecoder(client_->gpu_workarounds())) {
+ if (client_->gpu_workarounds().disable_dxva_video_decoder)
+ return nullptr;
+ return VdaVideoDecoder::Create(
+ traits.task_runner, traits.gpu_task_runner, traits.media_log->Clone(),
+ *traits.target_color_space, client_->gpu_preferences(),
+ client_->gpu_workarounds(), traits.get_command_buffer_stub_cb);
+ }
+ DCHECK(base::FeatureList::IsEnabled(kD3D11VideoDecoder));
+ return D3D11VideoDecoder::Create(
+ traits.gpu_task_runner, traits.media_log->Clone(),
+ client_->gpu_preferences(), client_->gpu_workarounds(),
+ traits.get_command_buffer_stub_cb, GetD3D11DeviceCallback(),
+ GetSupportedVideoDecoderConfigsSync(),
+ gl::DirectCompositionSurfaceWin::IsHDRSupported());
}
- return supported_configs;
-}
-std::unique_ptr<AudioDecoder> CreatePlatformAudioDecoder(
- scoped_refptr<base::SingleThreadTaskRunner> task_runner) {
- return nullptr;
-}
+ void GetSupportedVideoDecoderConfigs(
+ MojoMediaClient::SupportedVideoDecoderConfigsCallback callback) override {
+ std::move(callback).Run(GetSupportedVideoDecoderConfigsSync());
+ }
-VideoDecoderType GetPlatformDecoderImplementationType(
- gpu::GpuDriverBugWorkarounds gpu_workarounds,
- gpu::GpuPreferences gpu_preferences) {
- if (!ShouldUseD3D11VideoDecoder(gpu_workarounds))
- return VideoDecoderType::kVda;
- return VideoDecoderType::kD3D11;
-}
+ VideoDecoderType GetDecoderImplementationType() override {
+ if (!ShouldUseD3D11VideoDecoder(client_->gpu_workarounds()))
+ return VideoDecoderType::kVda;
+ return VideoDecoderType::kD3D11;
+ }
+
+ private:
+ GpuMojoMediaClient* client_;
+};
+
+} // namespace
-// There is no CdmFactory on windows, so just stub it out.
-class CdmFactory {};
-std::unique_ptr<CdmFactory> CreatePlatformCdmFactory(
- mojom::FrameInterfaceFactory* frame_interfaces) {
- return nullptr;
+std::unique_ptr<GpuMojoMediaClient::PlatformDelegate>
+GpuMojoMediaClient::PlatformDelegate::Create(GpuMojoMediaClient* client) {
+ return std::make_unique<WinPlatformDelegate>(client);
}
} // namespace media
diff --git a/chromium/media/mojo/services/interface_factory_impl.cc b/chromium/media/mojo/services/interface_factory_impl.cc
index d0c0eb26c31..f62ae800d08 100644
--- a/chromium/media/mojo/services/interface_factory_impl.cc
+++ b/chromium/media/mojo/services/interface_factory_impl.cc
@@ -140,13 +140,14 @@ void InterfaceFactoryImpl::CreateFlingingRenderer(
#if defined(OS_WIN)
void InterfaceFactoryImpl::CreateMediaFoundationRenderer(
+ mojo::PendingRemote<mojom::MediaLog> media_log_remote,
mojo::PendingReceiver<media::mojom::Renderer> receiver,
mojo::PendingReceiver<media::mojom::MediaFoundationRendererExtension>
renderer_extension_receiver) {
DVLOG(2) << __func__;
auto renderer = mojo_media_client_->CreateMediaFoundationRenderer(
base::ThreadTaskRunnerHandle::Get(), frame_interfaces_.get(),
- std::move(renderer_extension_receiver));
+ std::move(media_log_remote), std::move(renderer_extension_receiver));
if (!renderer) {
DLOG(ERROR) << "MediaFoundationRenderer creation failed.";
return;
diff --git a/chromium/media/mojo/services/interface_factory_impl.h b/chromium/media/mojo/services/interface_factory_impl.h
index d238edb589c..cf24954b99f 100644
--- a/chromium/media/mojo/services/interface_factory_impl.h
+++ b/chromium/media/mojo/services/interface_factory_impl.h
@@ -19,6 +19,7 @@
#include "media/mojo/mojom/decryptor.mojom.h"
#include "media/mojo/mojom/frame_interface_factory.mojom.h"
#include "media/mojo/mojom/interface_factory.mojom.h"
+#include "media/mojo/mojom/media_log.mojom.h"
#include "media/mojo/mojom/renderer.mojom.h"
#include "media/mojo/mojom/video_decoder.mojom.h"
#include "media/mojo/services/deferred_destroy_unique_receiver_set.h"
@@ -41,6 +42,10 @@ class InterfaceFactoryImpl final
InterfaceFactoryImpl(
mojo::PendingRemote<mojom::FrameInterfaceFactory> frame_interfaces,
MojoMediaClient* mojo_media_client);
+
+ InterfaceFactoryImpl(const InterfaceFactoryImpl&) = delete;
+ InterfaceFactoryImpl& operator=(const InterfaceFactoryImpl&) = delete;
+
~InterfaceFactoryImpl() final;
// mojom::InterfaceFactory implementation.
@@ -71,6 +76,7 @@ class InterfaceFactoryImpl final
#endif // defined(OS_ANDROID)
#if defined(OS_WIN)
void CreateMediaFoundationRenderer(
+ mojo::PendingRemote<mojom::MediaLog> media_log_remote,
mojo::PendingReceiver<mojom::Renderer> receiver,
mojo::PendingReceiver<mojom::MediaFoundationRendererExtension>
renderer_extension_receiver) final;
@@ -145,8 +151,6 @@ class InterfaceFactoryImpl final
// NOTE: Weak pointers must be invalidated before all other member variables.
base::WeakPtrFactory<InterfaceFactoryImpl> weak_ptr_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(InterfaceFactoryImpl);
};
} // namespace media
diff --git a/chromium/media/mojo/services/media_foundation_mojo_media_client.cc b/chromium/media/mojo/services/media_foundation_mojo_media_client.cc
index 484ccae9dee..e628948afda 100644
--- a/chromium/media/mojo/services/media_foundation_mojo_media_client.cc
+++ b/chromium/media/mojo/services/media_foundation_mojo_media_client.cc
@@ -11,9 +11,7 @@
namespace media {
-MediaFoundationMojoMediaClient::MediaFoundationMojoMediaClient(
- const base::FilePath& user_data_dir)
- : user_data_dir_(user_data_dir) {
+MediaFoundationMojoMediaClient::MediaFoundationMojoMediaClient() {
DVLOG_FUNC(1);
}
@@ -25,11 +23,12 @@ std::unique_ptr<Renderer>
MediaFoundationMojoMediaClient::CreateMediaFoundationRenderer(
scoped_refptr<base::SingleThreadTaskRunner> task_runner,
mojom::FrameInterfaceFactory* frame_interfaces,
+ mojo::PendingRemote<mojom::MediaLog> media_log_remote,
mojo::PendingReceiver<mojom::MediaFoundationRendererExtension>
renderer_extension_receiver) {
DVLOG_FUNC(1);
return std::make_unique<MediaFoundationRendererWrapper>(
- std::move(task_runner), frame_interfaces,
+ std::move(task_runner), frame_interfaces, std::move(media_log_remote),
std::move(renderer_extension_receiver));
}
@@ -37,7 +36,7 @@ std::unique_ptr<CdmFactory> MediaFoundationMojoMediaClient::CreateCdmFactory(
mojom::FrameInterfaceFactory* frame_interfaces) {
DVLOG_FUNC(1);
return std::make_unique<MediaFoundationCdmFactory>(
- std::make_unique<MojoCdmHelper>(frame_interfaces), user_data_dir_);
+ std::make_unique<MojoCdmHelper>(frame_interfaces));
}
} // namespace media
diff --git a/chromium/media/mojo/services/media_foundation_mojo_media_client.h b/chromium/media/mojo/services/media_foundation_mojo_media_client.h
index 3e1fa574853..ebd045ef558 100644
--- a/chromium/media/mojo/services/media_foundation_mojo_media_client.h
+++ b/chromium/media/mojo/services/media_foundation_mojo_media_client.h
@@ -17,21 +17,24 @@ namespace media {
// process hosting MediaFoundationRenderer and MediaFoundationCdm.
class MediaFoundationMojoMediaClient final : public MojoMediaClient {
public:
- explicit MediaFoundationMojoMediaClient(const base::FilePath& user_data_dir);
+ MediaFoundationMojoMediaClient();
+
+ MediaFoundationMojoMediaClient(const MediaFoundationMojoMediaClient&) =
+ delete;
+ MediaFoundationMojoMediaClient& operator=(
+ const MediaFoundationMojoMediaClient&) = delete;
+
~MediaFoundationMojoMediaClient() override;
// MojoMediaClient implementation.
std::unique_ptr<Renderer> CreateMediaFoundationRenderer(
scoped_refptr<base::SingleThreadTaskRunner> task_runner,
mojom::FrameInterfaceFactory* frame_interfaces,
+ mojo::PendingRemote<mojom::MediaLog> media_log_remote,
mojo::PendingReceiver<mojom::MediaFoundationRendererExtension>
renderer_extension_receiver) override;
std::unique_ptr<CdmFactory> CreateCdmFactory(
mojom::FrameInterfaceFactory* frame_interfaces) override;
-
- private:
- base::FilePath user_data_dir_;
- DISALLOW_COPY_AND_ASSIGN(MediaFoundationMojoMediaClient);
};
} // namespace media
diff --git a/chromium/media/mojo/services/media_foundation_renderer_wrapper.cc b/chromium/media/mojo/services/media_foundation_renderer_wrapper.cc
index 698941ed74b..9cb3dd5f2a1 100644
--- a/chromium/media/mojo/services/media_foundation_renderer_wrapper.cc
+++ b/chromium/media/mojo/services/media_foundation_renderer_wrapper.cc
@@ -7,6 +7,7 @@
#include "base/callback_helpers.h"
#include "media/base/win/mf_helpers.h"
#include "media/mojo/mojom/renderer_extensions.mojom.h"
+#include "media/mojo/services/mojo_media_log.h"
#include "mojo/public/cpp/bindings/callback_helpers.h"
#include "mojo/public/cpp/system/platform_handle.h"
@@ -31,11 +32,13 @@ bool HasAudio(MediaResource* media_resource) {
MediaFoundationRendererWrapper::MediaFoundationRendererWrapper(
scoped_refptr<base::SequencedTaskRunner> task_runner,
mojom::FrameInterfaceFactory* frame_interfaces,
+ mojo::PendingRemote<mojom::MediaLog> media_log_remote,
mojo::PendingReceiver<RendererExtension> renderer_extension_receiver)
: frame_interfaces_(frame_interfaces),
renderer_(std::make_unique<MediaFoundationRenderer>(
- std::move(task_runner),
- /*force_dcomp_mode_for_testing=*/false)),
+ task_runner,
+ std::make_unique<MojoMediaLog>(std::move(media_log_remote),
+ task_runner))),
renderer_extension_receiver_(this,
std::move(renderer_extension_receiver)),
site_mute_observer_(this) {
@@ -95,7 +98,8 @@ base::TimeDelta MediaFoundationRendererWrapper::GetMediaTime() {
void MediaFoundationRendererWrapper::GetDCOMPSurface(
GetDCOMPSurfaceCallback callback) {
if (has_get_dcomp_surface_called_) {
- mojo::ReportBadMessage("GetDCOMPSurface should only be called once!");
+ renderer_extension_receiver_.ReportBadMessage(
+ "GetDCOMPSurface should only be called once!");
return;
}
@@ -109,9 +113,10 @@ void MediaFoundationRendererWrapper::SetVideoStreamEnabled(bool enabled) {
renderer_->SetVideoStreamEnabled(enabled);
}
-void MediaFoundationRendererWrapper::SetOutputParams(
- const gfx::Rect& output_rect) {
- renderer_->SetOutputParams(output_rect);
+void MediaFoundationRendererWrapper::SetOutputRect(
+ const gfx::Rect& output_rect,
+ SetOutputRectCallback callback) {
+ renderer_->SetOutputRect(output_rect, std::move(callback));
}
void MediaFoundationRendererWrapper::OnMuteStateChange(bool muted) {
diff --git a/chromium/media/mojo/services/media_foundation_renderer_wrapper.h b/chromium/media/mojo/services/media_foundation_renderer_wrapper.h
index 8031b4dcddb..425ada63cda 100644
--- a/chromium/media/mojo/services/media_foundation_renderer_wrapper.h
+++ b/chromium/media/mojo/services/media_foundation_renderer_wrapper.h
@@ -33,6 +33,7 @@ class MediaFoundationRendererWrapper final
MediaFoundationRendererWrapper(
scoped_refptr<base::SequencedTaskRunner> task_runner,
mojom::FrameInterfaceFactory* frame_interfaces,
+ mojo::PendingRemote<mojom::MediaLog> media_log_remote,
mojo::PendingReceiver<RendererExtension> renderer_extension_receiver);
MediaFoundationRendererWrapper(const MediaFoundationRendererWrapper&) =
delete;
@@ -55,7 +56,8 @@ class MediaFoundationRendererWrapper final
// mojom::MediaFoundationRendererExtension implementation.
void GetDCOMPSurface(GetDCOMPSurfaceCallback callback) override;
void SetVideoStreamEnabled(bool enabled) override;
- void SetOutputParams(const gfx::Rect& output_rect) override;
+ void SetOutputRect(const gfx::Rect& output_rect,
+ SetOutputRectCallback callback) override;
// mojom::MuteStateObserver implementation.
void OnMuteStateChange(bool muted) override;
diff --git a/chromium/media/mojo/services/media_foundation_service.cc b/chromium/media/mojo/services/media_foundation_service.cc
index 3cbb55a7586..89871aab583 100644
--- a/chromium/media/mojo/services/media_foundation_service.cc
+++ b/chromium/media/mojo/services/media_foundation_service.cc
@@ -4,6 +4,7 @@
#include "media/mojo/services/media_foundation_service.h"
+#include <map>
#include <memory>
#include "base/bind.h"
@@ -21,6 +22,7 @@
#include "media/mojo/mojom/interface_factory.mojom.h"
#include "media/mojo/mojom/key_system_support.mojom.h"
#include "media/mojo/services/interface_factory_impl.h"
+#include "third_party/abseil-cpp/absl/types/optional.h"
using Microsoft::WRL::ComPtr;
@@ -51,23 +53,22 @@ const char kHwSecureRobustness[] = "HW_SECURE_ALL";
// to query.
constexpr VideoCodec kAllVideoCodecs[] = {
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- VideoCodec::kCodecH264,
+ VideoCodec::kH264,
#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
- VideoCodec::kCodecHEVC,
+ VideoCodec::kHEVC,
#if BUILDFLAG(ENABLE_PLATFORM_DOLBY_VISION)
- VideoCodec::kCodecDolbyVision,
+ VideoCodec::kDolbyVision,
#endif // BUILDFLAG(ENABLE_PLATFORM_DOLBY_VISION)
#endif // BUILDFLAG(ENABLE_PLATFORM_HEVC)
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
- VideoCodec::kCodecVP9, VideoCodec::kCodecAV1};
+ VideoCodec::kVP9, VideoCodec::kAV1};
constexpr AudioCodec kAllAudioCodecs[] = {
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- AudioCodec::kCodecAAC, AudioCodec::kCodecEAC3,
- AudioCodec::kCodecAC3, AudioCodec::kCodecMpegHAudio,
+ AudioCodec::kAAC, AudioCodec::kEAC3, AudioCodec::kAC3,
+ AudioCodec::kMpegHAudio,
#endif
- AudioCodec::kCodecVorbis, AudioCodec::kCodecFLAC,
- AudioCodec::kCodecOpus};
+ AudioCodec::kVorbis, AudioCodec::kFLAC, AudioCodec::kOpus};
constexpr EncryptionScheme kAllEncryptionSchemes[] = {EncryptionScheme::kCenc,
EncryptionScheme::kCbcs};
@@ -78,21 +79,24 @@ bool IsTypeSupportedInternal(
ComPtr<IMFContentDecryptionModuleFactory> cdm_factory,
const std::string& key_system,
const std::string& content_type) {
- return cdm_factory->IsTypeSupported(base::UTF8ToWide(key_system).c_str(),
- base::UTF8ToWide(content_type).c_str());
+ bool supported =
+ cdm_factory->IsTypeSupported(base::UTF8ToWide(key_system).c_str(),
+ base::UTF8ToWide(content_type).c_str());
+ DVLOG(3) << __func__ << " " << (supported ? "[yes]" : "[no]") << ": "
+ << key_system << ", " << content_type;
+ return supported;
}
std::string GetFourCCString(VideoCodec codec) {
switch (codec) {
- case VideoCodec::kCodecH264:
+ case VideoCodec::kH264:
return "avc1";
- case VideoCodec::kCodecVP9:
+ case VideoCodec::kVP9:
return "vp09";
- case VideoCodec::kCodecHEVC:
+ case VideoCodec::kHEVC:
+ case VideoCodec::kDolbyVision:
return "hvc1";
- case VideoCodec::kCodecDolbyVision:
- return "dvhe";
- case VideoCodec::kCodecAV1:
+ case VideoCodec::kAV1:
return "av01";
default:
NOTREACHED()
@@ -102,21 +106,30 @@ std::string GetFourCCString(VideoCodec codec) {
return "";
}
+// Returns an "ext-profile" feature query (with ending comma) for a video codec.
+// Returns an empty string if "ext-profile" is not needed.
+std::string GetExtProfile(VideoCodec codec) {
+ if (codec == VideoCodec::kDolbyVision)
+ return "ext-profile=dvhe.05,";
+
+ return "";
+}
+
std::string GetFourCCString(AudioCodec codec) {
switch (codec) {
- case AudioCodec::kCodecAAC:
+ case AudioCodec::kAAC:
return "mp4a";
- case AudioCodec::kCodecVorbis:
+ case AudioCodec::kVorbis:
return "vrbs";
- case AudioCodec::kCodecFLAC:
+ case AudioCodec::kFLAC:
return "fLaC";
- case AudioCodec::kCodecOpus:
+ case AudioCodec::kOpus:
return "Opus";
- case AudioCodec::kCodecEAC3:
+ case AudioCodec::kEAC3:
return "ec-3";
- case AudioCodec::kCodecAC3:
+ case AudioCodec::kAC3:
return "ac-3";
- case AudioCodec::kCodecMpegHAudio:
+ case AudioCodec::kMpegHAudio:
return "mhm1";
default:
NOTREACHED()
@@ -155,19 +168,41 @@ int GetIvSize(EncryptionScheme scheme) {
return 0;
}
+// Feature name:value mapping.
+using FeatureMap = std::map<std::string, std::string>;
+
+// Construct the query type string based on `video_codec`, optional
+// `audio_codec`, `kDefaultFeatures` and `extra_features`.
+std::string GetTypeString(VideoCodec video_codec,
+ absl::optional<AudioCodec> audio_codec,
+ const FeatureMap& extra_features) {
+ auto codec_string = GetFourCCString(video_codec);
+ if (audio_codec.has_value())
+ codec_string += "," + GetFourCCString(audio_codec.value());
+
+ auto feature_string = GetExtProfile(video_codec) + kDefaultFeatures;
+ DCHECK(!feature_string.empty()) << "default feature cannot be empty";
+ for (const auto& feature : extra_features) {
+ DCHECK(!feature.first.empty() && !feature.second.empty());
+ feature_string += "," + feature.first + "=" + feature.second;
+ }
+
+ return base::ReplaceStringPlaceholders(
+ "video/mp4;codecs=\"$1\";features=\"$2\"", {codec_string, feature_string},
+ /*offsets=*/nullptr);
+}
+
base::flat_set<EncryptionScheme> GetSupportedEncryptionSchemes(
IsTypeSupportedCB callback,
- VideoCodec codec,
+ VideoCodec video_codec,
const std::string& robustness) {
base::flat_set<EncryptionScheme> supported_schemes;
for (const auto scheme : kAllEncryptionSchemes) {
- auto type = base::ReplaceStringPlaceholders(
- "video/mp4;codecs=\"$1\";features=\"$2,$3=$4,$5=$6,$7=$8\"",
- {GetFourCCString(codec), kDefaultFeatures, kEncryptionSchemeQueryName,
- GetName(scheme), kEncryptionIvQueryName,
- base::NumberToString(GetIvSize(scheme)), kRobustnessQueryName,
- robustness.c_str()},
- 0);
+ auto type = GetTypeString(
+ video_codec, /*audio_codec=*/absl::nullopt,
+ {{kEncryptionSchemeQueryName, GetName(scheme)},
+ {kEncryptionIvQueryName, base::NumberToString(GetIvSize(scheme))},
+ {kRobustnessQueryName, robustness.c_str()}});
if (callback.Run(type))
supported_schemes.insert(scheme);
@@ -186,46 +221,40 @@ absl::optional<CdmCapability> GetCdmCapability(IsTypeSupportedCB callback,
CdmCapability capability;
// Query video codecs.
- for (const auto codec : kAllVideoCodecs) {
- auto content_type = base::ReplaceStringPlaceholders(
- "video/mp4;codecs=\"$1\";features=\"$2,$3=$4\"",
- {GetFourCCString(codec), kDefaultFeatures, kRobustnessQueryName,
- robustness},
- /*offsets=*/nullptr);
-
- if (callback.Run(content_type)) {
+ for (const auto video_codec : kAllVideoCodecs) {
+ auto type = GetTypeString(video_codec, /*audio_codec=*/absl::nullopt,
+ {{kRobustnessQueryName, robustness}});
+
+ if (callback.Run(type)) {
// IsTypeSupported() does not support querying profiling, so specify {}
// to indicate all relevant profiles should be considered supported.
const std::vector<media::VideoCodecProfile> kAllProfiles = {};
- capability.video_codecs.emplace(codec, kAllProfiles);
+ capability.video_codecs.emplace(video_codec, kAllProfiles);
}
}
// IsTypeSupported query string requires video codec, so stops if no video
// codecs are supported.
if (capability.video_codecs.empty()) {
- DVLOG(2) << "No video codecs are supported.";
+ DVLOG(2) << "No video codecs supported for is_hw_secure=" << is_hw_secure;
return absl::nullopt;
}
// Query audio codecs.
// Audio is usually independent to the video codec. So we use <one of the
// supported video codecs> + <audio codec> to query the audio capability.
- for (const auto codec : kAllAudioCodecs) {
- auto type = base::ReplaceStringPlaceholders(
- "video/mp4;codecs=\"$1,$2\";features=\"$3,$4=$5\"",
- {GetFourCCString(capability.video_codecs.begin()->first),
- GetFourCCString(codec), kDefaultFeatures, kRobustnessQueryName,
- robustness},
- /*offsets=*/nullptr);
+ for (const auto audio_codec : kAllAudioCodecs) {
+ const auto& video_codec = capability.video_codecs.begin()->first;
+ auto type = GetTypeString(video_codec, audio_codec,
+ {{kRobustnessQueryName, robustness}});
if (callback.Run(type))
- capability.audio_codecs.push_back(codec);
+ capability.audio_codecs.push_back(audio_codec);
}
// Query encryption scheme.
- // Note that the CdmCapability assumes all `video_codecs` + `encryotion_
+ // Note that the CdmCapability assumes all `video_codecs` + `encryption_
// schemes` combinations are supported. However, in Media Foundation,
// encryption scheme may be dependent on video codecs, so we query the
// encryption scheme for all supported video codecs and get the intersection
@@ -255,9 +284,8 @@ absl::optional<CdmCapability> GetCdmCapability(IsTypeSupportedCB callback,
} // namespace
MediaFoundationService::MediaFoundationService(
- mojo::PendingReceiver<mojom::MediaFoundationService> receiver,
- const base::FilePath& user_data_dir)
- : receiver_(this, std::move(receiver)), mojo_media_client_(user_data_dir) {
+ mojo::PendingReceiver<mojom::MediaFoundationService> receiver)
+ : receiver_(this, std::move(receiver)) {
DVLOG(1) << __func__;
mojo_media_client_.Initialize();
}
@@ -270,6 +298,7 @@ void MediaFoundationService::IsKeySystemSupported(
const std::string& key_system,
IsKeySystemSupportedCallback callback) {
DVLOG(2) << __func__ << ", key_system=" << key_system;
+
ComPtr<IMFContentDecryptionModuleFactory> cdm_factory;
HRESULT hr = MediaFoundationCdmModule::GetInstance()->GetCdmFactory(
key_system, cdm_factory);
@@ -289,7 +318,7 @@ void MediaFoundationService::IsKeySystemSupported(
GetCdmCapability(is_type_supported_cb, /*is_hw_secure=*/true);
if (!sw_secure_capability && !hw_secure_capability) {
- DVLOG(2) << "Get empty CdmCapbility.";
+ DVLOG(2) << "Get empty CdmCapability.";
std::move(callback).Run(false, nullptr);
return;
}
diff --git a/chromium/media/mojo/services/media_foundation_service.h b/chromium/media/mojo/services/media_foundation_service.h
index 63f03a0d3dd..a1df8ee734c 100644
--- a/chromium/media/mojo/services/media_foundation_service.h
+++ b/chromium/media/mojo/services/media_foundation_service.h
@@ -27,9 +27,8 @@ class MEDIA_MOJO_EXPORT MediaFoundationService final
// The MediaFoundationService process is NOT sandboxed after startup. The
// `ensure_sandboxed_cb` must be called after necessary initialization to
// ensure the process is sandboxed.
- MediaFoundationService(
- mojo::PendingReceiver<mojom::MediaFoundationService> receiver,
- const base::FilePath& user_data_dir);
+ explicit MediaFoundationService(
+ mojo::PendingReceiver<mojom::MediaFoundationService> receiver);
MediaFoundationService(const MediaFoundationService&) = delete;
MediaFoundationService operator=(const MediaFoundationService&) = delete;
~MediaFoundationService() final;
diff --git a/chromium/media/mojo/services/media_foundation_service_broker.cc b/chromium/media/mojo/services/media_foundation_service_broker.cc
index 7728ba86c37..314b2a43953 100644
--- a/chromium/media/mojo/services/media_foundation_service_broker.cc
+++ b/chromium/media/mojo/services/media_foundation_service_broker.cc
@@ -11,10 +11,8 @@ namespace media {
MediaFoundationServiceBroker::MediaFoundationServiceBroker(
mojo::PendingReceiver<mojom::MediaFoundationServiceBroker> receiver,
- const base::FilePath& user_data_dir,
base::OnceClosure ensure_sandboxed_cb)
: receiver_(this, std::move(receiver)),
- user_data_dir_(user_data_dir),
ensure_sandboxed_cb_(std::move(ensure_sandboxed_cb)) {}
MediaFoundationServiceBroker::~MediaFoundationServiceBroker() = default;
@@ -32,8 +30,8 @@ void MediaFoundationServiceBroker::GetService(
MediaFoundationCdmModule::GetInstance()->Initialize(cdm_path);
std::move(ensure_sandboxed_cb_).Run();
- media_foundation_service_ = std::make_unique<MediaFoundationService>(
- std::move(service_receiver), user_data_dir_);
+ media_foundation_service_ =
+ std::make_unique<MediaFoundationService>(std::move(service_receiver));
}
} // namespace media
diff --git a/chromium/media/mojo/services/media_foundation_service_broker.h b/chromium/media/mojo/services/media_foundation_service_broker.h
index 070baadb634..92f31e7c55b 100644
--- a/chromium/media/mojo/services/media_foundation_service_broker.h
+++ b/chromium/media/mojo/services/media_foundation_service_broker.h
@@ -25,7 +25,6 @@ class MEDIA_MOJO_EXPORT MediaFoundationServiceBroker final
// initialization to ensure the process is sandboxed.
MediaFoundationServiceBroker(
mojo::PendingReceiver<mojom::MediaFoundationServiceBroker> receiver,
- const base::FilePath& user_data_dir,
base::OnceClosure ensure_sandboxed_cb);
MediaFoundationServiceBroker(const MediaFoundationServiceBroker&) = delete;
MediaFoundationServiceBroker operator=(const MediaFoundationServiceBroker&) =
@@ -39,7 +38,6 @@ class MEDIA_MOJO_EXPORT MediaFoundationServiceBroker final
private:
mojo::Receiver<mojom::MediaFoundationServiceBroker> receiver_;
- base::FilePath user_data_dir_;
base::OnceClosure ensure_sandboxed_cb_;
std::unique_ptr<MediaFoundationService> media_foundation_service_;
};
diff --git a/chromium/media/mojo/services/media_metrics_provider.cc b/chromium/media/mojo/services/media_metrics_provider.cc
index bb121c2204c..214af2f9e92 100644
--- a/chromium/media/mojo/services/media_metrics_provider.cc
+++ b/chromium/media/mojo/services/media_metrics_provider.cc
@@ -102,13 +102,13 @@ std::string MediaMetricsProvider::GetUMANameForAVStream(
const PipelineInfo& player_info) {
constexpr char kPipelineUmaPrefix[] = "Media.PipelineStatus.AudioVideo.";
std::string uma_name = kPipelineUmaPrefix;
- if (player_info.video_codec == kCodecVP8)
+ if (player_info.video_codec == VideoCodec::kVP8)
uma_name += "VP8.";
- else if (player_info.video_codec == kCodecVP9)
+ else if (player_info.video_codec == VideoCodec::kVP9)
uma_name += "VP9.";
- else if (player_info.video_codec == kCodecH264)
+ else if (player_info.video_codec == VideoCodec::kH264)
uma_name += "H264.";
- else if (player_info.video_codec == kCodecAV1)
+ else if (player_info.video_codec == VideoCodec::kAV1)
uma_name += "AV1.";
else
return uma_name + "Other";
diff --git a/chromium/media/mojo/services/media_metrics_provider.h b/chromium/media/mojo/services/media_metrics_provider.h
index 745a50273f7..12e4d1c501b 100644
--- a/chromium/media/mojo/services/media_metrics_provider.h
+++ b/chromium/media/mojo/services/media_metrics_provider.h
@@ -52,6 +52,10 @@ class MEDIA_MOJO_EXPORT MediaMetricsProvider
VideoDecodePerfHistory::SaveCallback save_cb,
GetLearningSessionCallback learning_session_cb,
RecordAggregateWatchTimeCallback record_playback_cb);
+
+ MediaMetricsProvider(const MediaMetricsProvider&) = delete;
+ MediaMetricsProvider& operator=(const MediaMetricsProvider&) = delete;
+
~MediaMetricsProvider() override;
// Callback for retrieving a ukm::SourceId.
@@ -163,8 +167,6 @@ class MEDIA_MOJO_EXPORT MediaMetricsProvider
base::TimeDelta time_to_play_ready_ = kNoTimestamp;
absl::optional<container_names::MediaContainerName> container_name_;
-
- DISALLOW_COPY_AND_ASSIGN(MediaMetricsProvider);
};
} // namespace media
diff --git a/chromium/media/mojo/services/media_metrics_provider_unittest.cc b/chromium/media/mojo/services/media_metrics_provider_unittest.cc
index 56c30302154..8d390d4ab6d 100644
--- a/chromium/media/mojo/services/media_metrics_provider_unittest.cc
+++ b/chromium/media/mojo/services/media_metrics_provider_unittest.cc
@@ -29,6 +29,9 @@ class MediaMetricsProviderTest : public testing::Test {
public:
MediaMetricsProviderTest() { ResetMetricRecorders(); }
+ MediaMetricsProviderTest(const MediaMetricsProviderTest&) = delete;
+ MediaMetricsProviderTest& operator=(const MediaMetricsProviderTest&) = delete;
+
~MediaMetricsProviderTest() override { base::RunLoop().RunUntilIdle(); }
void Initialize(bool is_mse,
@@ -74,8 +77,6 @@ class MediaMetricsProviderTest : public testing::Test {
std::unique_ptr<ukm::TestAutoSetUkmRecorder> test_recorder_;
ukm::SourceId source_id_;
mojo::Remote<mojom::MediaMetricsProvider> provider_;
-
- DISALLOW_COPY_AND_ASSIGN(MediaMetricsProviderTest);
};
#define EXPECT_UKM(name, value) \
@@ -119,9 +120,9 @@ TEST_F(MediaMetricsProviderTest, TestUkm) {
// Now try one with different values and optional parameters set.
const std::string kTestOrigin2 = "https://test2.google.com/";
const std::string kClearKeyKeySystem = "org.w3.clearkey";
- const base::TimeDelta kMetadataTime = base::TimeDelta::FromSeconds(1);
- const base::TimeDelta kFirstFrameTime = base::TimeDelta::FromSeconds(2);
- const base::TimeDelta kPlayReadyTime = base::TimeDelta::FromSeconds(3);
+ const base::TimeDelta kMetadataTime = base::Seconds(1);
+ const base::TimeDelta kFirstFrameTime = base::Seconds(2);
+ const base::TimeDelta kPlayReadyTime = base::Seconds(3);
ResetMetricRecorders();
Initialize(false, false, false, kTestOrigin2, mojom::MediaURLScheme::kHttps);
@@ -183,9 +184,9 @@ TEST_F(MediaMetricsProviderTest, TestUkmMediaStream) {
// Now try one with different values and optional parameters set.
const std::string kTestOrigin2 = "https://test2.google.com/";
- const base::TimeDelta kMetadataTime = base::TimeDelta::FromSeconds(1);
- const base::TimeDelta kFirstFrameTime = base::TimeDelta::FromSeconds(2);
- const base::TimeDelta kPlayReadyTime = base::TimeDelta::FromSeconds(3);
+ const base::TimeDelta kMetadataTime = base::Seconds(1);
+ const base::TimeDelta kFirstFrameTime = base::Seconds(2);
+ const base::TimeDelta kPlayReadyTime = base::Seconds(3);
ResetMetricRecorders();
Initialize(false, false, false, kTestOrigin2, mojom::MediaURLScheme::kMissing,
@@ -213,8 +214,8 @@ TEST_F(MediaMetricsProviderTest, TestPipelineUMA) {
{false, false, AudioDecoderType::kMojo, EncryptionType::kClear});
provider_->SetVideoPipelineInfo(
{false, false, VideoDecoderType::kMojo, EncryptionType::kEncrypted});
- provider_->SetHasAudio(AudioCodec::kCodecVorbis);
- provider_->SetHasVideo(VideoCodec::kCodecVP9);
+ provider_->SetHasAudio(AudioCodec::kVorbis);
+ provider_->SetHasVideo(VideoCodec::kVP9);
provider_->SetHasPlayed();
provider_->SetHaveEnough();
provider_.reset();
@@ -233,8 +234,8 @@ TEST_F(MediaMetricsProviderTest, TestPipelineUMAMediaStream) {
{false, false, AudioDecoderType::kMojo, EncryptionType::kClear});
provider_->SetVideoPipelineInfo(
{false, false, VideoDecoderType::kMojo, EncryptionType::kEncrypted});
- provider_->SetHasAudio(AudioCodec::kCodecVorbis);
- provider_->SetHasVideo(VideoCodec::kCodecVP9);
+ provider_->SetHasAudio(AudioCodec::kVorbis);
+ provider_->SetHasVideo(VideoCodec::kVP9);
provider_->SetHasPlayed();
provider_->SetHaveEnough();
provider_.reset();
@@ -251,7 +252,7 @@ TEST_F(MediaMetricsProviderTest, TestPipelineUMANoAudioWithEme) {
provider_->SetIsEME();
provider_->SetVideoPipelineInfo(
{true, true, VideoDecoderType::kMojo, EncryptionType::kEncrypted});
- provider_->SetHasVideo(VideoCodec::kCodecAV1);
+ provider_->SetHasVideo(VideoCodec::kAV1);
provider_->SetHasPlayed();
provider_->SetHaveEnough();
provider_.reset();
@@ -271,8 +272,8 @@ TEST_F(MediaMetricsProviderTest, TestPipelineUMADecoderFallback) {
{false, false, AudioDecoderType::kMojo, EncryptionType::kClear});
provider_->SetVideoPipelineInfo(
{true, false, VideoDecoderType::kD3D11, EncryptionType::kEncrypted});
- provider_->SetHasVideo(VideoCodec::kCodecVP9);
- provider_->SetHasAudio(AudioCodec::kCodecVorbis);
+ provider_->SetHasVideo(VideoCodec::kVP9);
+ provider_->SetHasAudio(AudioCodec::kVorbis);
provider_->SetHasPlayed();
provider_->SetHaveEnough();
provider_->SetVideoPipelineInfo({true, false, VideoDecoderType::kFFmpeg});
@@ -289,8 +290,8 @@ TEST_F(MediaMetricsProviderTest, TestPipelineUMARendererType) {
Initialize(false, false, false, kTestOrigin, mojom::MediaURLScheme::kHttps);
provider_->SetIsEME();
provider_->SetRendererType(RendererType::kMediaFoundation);
- provider_->SetHasVideo(VideoCodec::kCodecVP9);
- provider_->SetHasAudio(AudioCodec::kCodecVorbis);
+ provider_->SetHasVideo(VideoCodec::kVP9);
+ provider_->SetHasAudio(AudioCodec::kVorbis);
provider_->SetHasPlayed();
provider_->SetHaveEnough();
provider_.reset();
diff --git a/chromium/media/mojo/services/media_resource_shim.h b/chromium/media/mojo/services/media_resource_shim.h
index a042fc50a4e..3511ccf9d81 100644
--- a/chromium/media/mojo/services/media_resource_shim.h
+++ b/chromium/media/mojo/services/media_resource_shim.h
@@ -25,6 +25,10 @@ class MediaResourceShim : public MediaResource {
MediaResourceShim(
std::vector<mojo::PendingRemote<mojom::DemuxerStream>> streams,
base::OnceClosure demuxer_ready_cb);
+
+ MediaResourceShim(const MediaResourceShim&) = delete;
+ MediaResourceShim& operator=(const MediaResourceShim&) = delete;
+
~MediaResourceShim() override;
// MediaResource interface.
@@ -48,8 +52,6 @@ class MediaResourceShim : public MediaResource {
// WeakPtrFactorys must always be the last member variable.
base::WeakPtrFactory<MediaResourceShim> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(MediaResourceShim);
};
} // namespace media
diff --git a/chromium/media/mojo/services/media_service.h b/chromium/media/mojo/services/media_service.h
index c1b9dddc270..36dc0a71d1e 100644
--- a/chromium/media/mojo/services/media_service.h
+++ b/chromium/media/mojo/services/media_service.h
@@ -26,6 +26,10 @@ class MEDIA_MOJO_EXPORT MediaService final : public mojom::MediaService {
public:
MediaService(std::unique_ptr<MojoMediaClient> mojo_media_client,
mojo::PendingReceiver<mojom::MediaService> receiver);
+
+ MediaService(const MediaService&) = delete;
+ MediaService& operator=(const MediaService&) = delete;
+
~MediaService() final;
private:
@@ -47,8 +51,6 @@ class MEDIA_MOJO_EXPORT MediaService final : public mojom::MediaService {
std::unique_ptr<MojoMediaClient> mojo_media_client_;
mojo::UniqueReceiverSet<mojom::InterfaceFactory> interface_factory_receivers_;
-
- DISALLOW_COPY_AND_ASSIGN(MediaService);
};
} // namespace media
diff --git a/chromium/media/mojo/services/media_service_unittest.cc b/chromium/media/mojo/services/media_service_unittest.cc
index ac1cba2aa34..f7d4eaffdbe 100644
--- a/chromium/media/mojo/services/media_service_unittest.cc
+++ b/chromium/media/mojo/services/media_service_unittest.cc
@@ -61,6 +61,10 @@ const char kInvalidKeySystem[] = "invalid.key.system";
class MockRendererClient : public mojom::RendererClient {
public:
MockRendererClient() = default;
+
+ MockRendererClient(const MockRendererClient&) = delete;
+ MockRendererClient& operator=(const MockRendererClient&) = delete;
+
~MockRendererClient() override = default;
// mojom::RendererClient implementation.
@@ -81,9 +85,6 @@ class MockRendererClient : public mojom::RendererClient {
MOCK_METHOD1(OnWaiting, void(WaitingReason));
MOCK_METHOD1(OnDurationChange, void(base::TimeDelta duration));
MOCK_METHOD1(OnRemotePlayStateChange, void(MediaStatus::State state));
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MockRendererClient);
};
ACTION_P(QuitLoop, run_loop) {
@@ -99,6 +100,10 @@ class MediaServiceTest : public testing::Test {
MediaServiceTest()
: renderer_client_receiver_(&renderer_client_),
video_stream_(DemuxerStream::VIDEO) {}
+
+ MediaServiceTest(const MediaServiceTest&) = delete;
+ MediaServiceTest& operator=(const MediaServiceTest&) = delete;
+
~MediaServiceTest() override = default;
void SetUp() override {
@@ -190,9 +195,6 @@ class MediaServiceTest : public testing::Test {
StrictMock<MockDemuxerStream> video_stream_;
std::unique_ptr<MojoDemuxerStreamImpl> mojo_video_stream_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MediaServiceTest);
};
} // namespace
diff --git a/chromium/media/mojo/services/mojo_audio_decoder_service.h b/chromium/media/mojo/services/mojo_audio_decoder_service.h
index 123f5201d52..cc9c1086aa8 100644
--- a/chromium/media/mojo/services/mojo_audio_decoder_service.h
+++ b/chromium/media/mojo/services/mojo_audio_decoder_service.h
@@ -31,6 +31,9 @@ class MEDIA_MOJO_EXPORT MojoAudioDecoderService final
MojoAudioDecoderService(MojoCdmServiceContext* mojo_cdm_service_context,
std::unique_ptr<media::AudioDecoder> decoder);
+ MojoAudioDecoderService(const MojoAudioDecoderService&) = delete;
+ MojoAudioDecoderService& operator=(const MojoAudioDecoderService&) = delete;
+
~MojoAudioDecoderService() final;
// mojom::AudioDecoder implementation
@@ -90,8 +93,6 @@ class MEDIA_MOJO_EXPORT MojoAudioDecoderService final
base::WeakPtr<MojoAudioDecoderService> weak_this_;
base::WeakPtrFactory<MojoAudioDecoderService> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(MojoAudioDecoderService);
};
} // namespace media
diff --git a/chromium/media/mojo/services/mojo_audio_input_stream.h b/chromium/media/mojo/services/mojo_audio_input_stream.h
index 049696193cc..922e7e92ba7 100644
--- a/chromium/media/mojo/services/mojo_audio_input_stream.h
+++ b/chromium/media/mojo/services/mojo_audio_input_stream.h
@@ -44,6 +44,9 @@ class MEDIA_MOJO_EXPORT MojoAudioInputStream
StreamCreatedCallback stream_created_callback,
base::OnceClosure deleter_callback);
+ MojoAudioInputStream(const MojoAudioInputStream&) = delete;
+ MojoAudioInputStream& operator=(const MojoAudioInputStream&) = delete;
+
~MojoAudioInputStream() override;
void SetOutputDeviceForAec(const std::string& raw_output_device_id);
@@ -73,8 +76,6 @@ class MEDIA_MOJO_EXPORT MojoAudioInputStream
mojo::Remote<mojom::AudioInputStreamClient> client_;
std::unique_ptr<AudioInputDelegate> delegate_;
base::WeakPtrFactory<MojoAudioInputStream> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(MojoAudioInputStream);
};
} // namespace media
diff --git a/chromium/media/mojo/services/mojo_audio_input_stream_unittest.cc b/chromium/media/mojo/services/mojo_audio_input_stream_unittest.cc
index dda1bd435cd..a3f1d1c12d3 100644
--- a/chromium/media/mojo/services/mojo_audio_input_stream_unittest.cc
+++ b/chromium/media/mojo/services/mojo_audio_input_stream_unittest.cc
@@ -44,6 +44,9 @@ class TestCancelableSyncSocket : public base::CancelableSyncSocket {
void ExpectOwnershipTransfer() { expect_ownership_transfer_ = true; }
+ TestCancelableSyncSocket(const TestCancelableSyncSocket&) = delete;
+ TestCancelableSyncSocket& operator=(const TestCancelableSyncSocket&) = delete;
+
~TestCancelableSyncSocket() override {
// When the handle is sent over mojo, mojo takes ownership over it and
// closes it. We have to make sure we do not also retain the handle in the
@@ -54,8 +57,6 @@ class TestCancelableSyncSocket : public base::CancelableSyncSocket {
private:
bool expect_ownership_transfer_ = false;
-
- DISALLOW_COPY_AND_ASSIGN(TestCancelableSyncSocket);
};
class MockDelegate : public AudioInputDelegate {
diff --git a/chromium/media/mojo/services/mojo_audio_output_stream.h b/chromium/media/mojo/services/mojo_audio_output_stream.h
index 8a8526958bd..5a7f95e5b22 100644
--- a/chromium/media/mojo/services/mojo_audio_output_stream.h
+++ b/chromium/media/mojo/services/mojo_audio_output_stream.h
@@ -40,6 +40,9 @@ class MEDIA_MOJO_EXPORT MojoAudioOutputStream
StreamCreatedCallback stream_created_callback,
DeleterCallback deleter_callback);
+ MojoAudioOutputStream(const MojoAudioOutputStream&) = delete;
+ MojoAudioOutputStream& operator=(const MojoAudioOutputStream&) = delete;
+
~MojoAudioOutputStream() override;
private:
@@ -65,8 +68,6 @@ class MEDIA_MOJO_EXPORT MojoAudioOutputStream
mojo::Receiver<AudioOutputStream> receiver_{this};
std::unique_ptr<AudioOutputDelegate> delegate_;
base::WeakPtrFactory<MojoAudioOutputStream> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(MojoAudioOutputStream);
};
} // namespace media
diff --git a/chromium/media/mojo/services/mojo_audio_output_stream_provider.cc b/chromium/media/mojo/services/mojo_audio_output_stream_provider.cc
index 7a1ab08dfbd..f7c137f836d 100644
--- a/chromium/media/mojo/services/mojo_audio_output_stream_provider.cc
+++ b/chromium/media/mojo/services/mojo_audio_output_stream_provider.cc
@@ -83,7 +83,7 @@ void MojoAudioOutputStreamProvider::CleanUp(bool had_error) {
}
void MojoAudioOutputStreamProvider::BadMessage(const std::string& error) {
- mojo::ReportBadMessage(error);
+ receiver_.ReportBadMessage(error);
std::move(deleter_callback_).Run(this); // deletes |this|.
}
diff --git a/chromium/media/mojo/services/mojo_audio_output_stream_provider.h b/chromium/media/mojo/services/mojo_audio_output_stream_provider.h
index 8d632351d55..ee24d37574f 100644
--- a/chromium/media/mojo/services/mojo_audio_output_stream_provider.h
+++ b/chromium/media/mojo/services/mojo_audio_output_stream_provider.h
@@ -41,6 +41,10 @@ class MEDIA_MOJO_EXPORT MojoAudioOutputStreamProvider
DeleterCallback deleter_callback,
std::unique_ptr<mojom::AudioOutputStreamObserver> observer);
+ MojoAudioOutputStreamProvider(const MojoAudioOutputStreamProvider&) = delete;
+ MojoAudioOutputStreamProvider& operator=(
+ const MojoAudioOutputStreamProvider&) = delete;
+
~MojoAudioOutputStreamProvider() override;
private:
@@ -64,8 +68,6 @@ class MEDIA_MOJO_EXPORT MojoAudioOutputStreamProvider
mojo::Receiver<mojom::AudioOutputStreamObserver> observer_receiver_;
absl::optional<MojoAudioOutputStream> audio_output_;
mojo::Remote<mojom::AudioOutputStreamProviderClient> provider_client_;
-
- DISALLOW_COPY_AND_ASSIGN(MojoAudioOutputStreamProvider);
};
} // namespace media
diff --git a/chromium/media/mojo/services/mojo_audio_output_stream_unittest.cc b/chromium/media/mojo/services/mojo_audio_output_stream_unittest.cc
index b6d724f0a84..f0faa0bfa39 100644
--- a/chromium/media/mojo/services/mojo_audio_output_stream_unittest.cc
+++ b/chromium/media/mojo/services/mojo_audio_output_stream_unittest.cc
@@ -44,6 +44,9 @@ class TestCancelableSyncSocket : public base::CancelableSyncSocket {
void ExpectOwnershipTransfer() { expect_ownership_transfer_ = true; }
+ TestCancelableSyncSocket(const TestCancelableSyncSocket&) = delete;
+ TestCancelableSyncSocket& operator=(const TestCancelableSyncSocket&) = delete;
+
~TestCancelableSyncSocket() override {
// When the handle is sent over mojo, mojo takes ownership over it and
// closes it. We have to make sure we do not also retain the handle in the
@@ -54,8 +57,6 @@ class TestCancelableSyncSocket : public base::CancelableSyncSocket {
private:
bool expect_ownership_transfer_ = false;
-
- DISALLOW_COPY_AND_ASSIGN(TestCancelableSyncSocket);
};
class MockDelegate : public AudioOutputDelegate {
diff --git a/chromium/media/mojo/services/mojo_cdm_allocator.cc b/chromium/media/mojo/services/mojo_cdm_allocator.cc
index 340fe57177a..387396f1e37 100644
--- a/chromium/media/mojo/services/mojo_cdm_allocator.cc
+++ b/chromium/media/mojo/services/mojo_cdm_allocator.cc
@@ -113,6 +113,10 @@ class MojoCdmVideoFrame final : public VideoFrameImpl {
explicit MojoCdmVideoFrame(MojoSharedBufferVideoFrame::MojoSharedBufferDoneCB
mojo_shared_buffer_done_cb)
: mojo_shared_buffer_done_cb_(std::move(mojo_shared_buffer_done_cb)) {}
+
+ MojoCdmVideoFrame(const MojoCdmVideoFrame&) = delete;
+ MojoCdmVideoFrame& operator=(const MojoCdmVideoFrame&) = delete;
+
~MojoCdmVideoFrame() final = default;
// VideoFrameImpl implementation.
@@ -144,7 +148,7 @@ class MojoCdmVideoFrame final : public VideoFrameImpl {
{static_cast<int32_t>(Stride(cdm::kYPlane)),
static_cast<int32_t>(Stride(cdm::kUPlane)),
static_cast<int32_t>(Stride(cdm::kVPlane))},
- base::TimeDelta::FromMicroseconds(Timestamp()));
+ base::Microseconds(Timestamp()));
// |frame| could fail to be created if the memory can't be mapped into
// this address space.
@@ -158,8 +162,6 @@ class MojoCdmVideoFrame final : public VideoFrameImpl {
private:
MojoSharedBufferVideoFrame::MojoSharedBufferDoneCB
mojo_shared_buffer_done_cb_;
-
- DISALLOW_COPY_AND_ASSIGN(MojoCdmVideoFrame);
};
} // namespace
diff --git a/chromium/media/mojo/services/mojo_cdm_allocator.h b/chromium/media/mojo/services/mojo_cdm_allocator.h
index 1a362245b3e..ca4c7a3a1ca 100644
--- a/chromium/media/mojo/services/mojo_cdm_allocator.h
+++ b/chromium/media/mojo/services/mojo_cdm_allocator.h
@@ -24,6 +24,10 @@ namespace media {
class MEDIA_MOJO_EXPORT MojoCdmAllocator final : public CdmAllocator {
public:
MojoCdmAllocator();
+
+ MojoCdmAllocator(const MojoCdmAllocator&) = delete;
+ MojoCdmAllocator& operator=(const MojoCdmAllocator&) = delete;
+
~MojoCdmAllocator() final;
// CdmAllocator implementation.
@@ -63,8 +67,6 @@ class MEDIA_MOJO_EXPORT MojoCdmAllocator final : public CdmAllocator {
// NOTE: Weak pointers must be invalidated before all other member variables.
base::WeakPtrFactory<MojoCdmAllocator> weak_ptr_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(MojoCdmAllocator);
};
} // namespace media
diff --git a/chromium/media/mojo/services/mojo_cdm_allocator_unittest.cc b/chromium/media/mojo/services/mojo_cdm_allocator_unittest.cc
index 725d044c88d..f47bef74385 100644
--- a/chromium/media/mojo/services/mojo_cdm_allocator_unittest.cc
+++ b/chromium/media/mojo/services/mojo_cdm_allocator_unittest.cc
@@ -20,6 +20,10 @@ namespace media {
class MojoCdmAllocatorTest : public testing::Test {
public:
MojoCdmAllocatorTest() = default;
+
+ MojoCdmAllocatorTest(const MojoCdmAllocatorTest&) = delete;
+ MojoCdmAllocatorTest& operator=(const MojoCdmAllocatorTest&) = delete;
+
~MojoCdmAllocatorTest() override = default;
protected:
@@ -41,7 +45,6 @@ class MojoCdmAllocatorTest : public testing::Test {
private:
MojoCdmAllocator allocator_;
- DISALLOW_COPY_AND_ASSIGN(MojoCdmAllocatorTest);
};
TEST_F(MojoCdmAllocatorTest, CreateCdmBuffer) {
diff --git a/chromium/media/mojo/services/mojo_cdm_helper.cc b/chromium/media/mojo/services/mojo_cdm_helper.cc
index ab559d9a0c9..f068781cf59 100644
--- a/chromium/media/mojo/services/mojo_cdm_helper.cc
+++ b/chromium/media/mojo/services/mojo_cdm_helper.cc
@@ -50,9 +50,10 @@ url::Origin MojoCdmHelper::GetCdmOrigin() {
}
#if defined(OS_WIN)
-void MojoCdmHelper::GetCdmPreferenceData(GetCdmPreferenceDataCB callback) {
+void MojoCdmHelper::GetMediaFoundationCdmData(
+ GetMediaFoundationCdmDataCB callback) {
ConnectToCdmDocumentService();
- cdm_document_service_->GetCdmPreferenceData(std::move(callback));
+ cdm_document_service_->GetMediaFoundationCdmData(std::move(callback));
}
void MojoCdmHelper::SetCdmClientToken(
diff --git a/chromium/media/mojo/services/mojo_cdm_helper.h b/chromium/media/mojo/services/mojo_cdm_helper.h
index 4522a8e3cff..355ca5c3e54 100644
--- a/chromium/media/mojo/services/mojo_cdm_helper.h
+++ b/chromium/media/mojo/services/mojo_cdm_helper.h
@@ -48,7 +48,7 @@ class MEDIA_MOJO_EXPORT MojoCdmHelper final : public CdmAuxiliaryHelper,
ChallengePlatformCB callback) final;
void GetStorageId(uint32_t version, StorageIdCB callback) final;
#if defined(OS_WIN)
- void GetCdmPreferenceData(GetCdmPreferenceDataCB callback) final;
+ void GetMediaFoundationCdmData(GetMediaFoundationCdmDataCB callback) final;
void SetCdmClientToken(const std::vector<uint8_t>& client_token) final;
#endif // defined(OS_WIN)
diff --git a/chromium/media/mojo/services/mojo_cdm_service.h b/chromium/media/mojo/services/mojo_cdm_service.h
index bedc547fd54..7c8034245e3 100644
--- a/chromium/media/mojo/services/mojo_cdm_service.h
+++ b/chromium/media/mojo/services/mojo_cdm_service.h
@@ -43,6 +43,10 @@ class MEDIA_MOJO_EXPORT MojoCdmService final
const std::string& error_message)>;
explicit MojoCdmService(MojoCdmServiceContext* context);
+
+ MojoCdmService(const MojoCdmService&) = delete;
+ MojoCdmService& operator=(const MojoCdmService&) = delete;
+
~MojoCdmService() final;
// Initialize the MojoCdmService, including creating the real CDM using the
@@ -119,8 +123,6 @@ class MEDIA_MOJO_EXPORT MojoCdmService final
mojo::AssociatedRemote<mojom::ContentDecryptionModuleClient> client_;
base::WeakPtrFactory<MojoCdmService> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(MojoCdmService);
};
} // namespace media
diff --git a/chromium/media/mojo/services/mojo_cdm_service_context.h b/chromium/media/mojo/services/mojo_cdm_service_context.h
index b6a3cb69b05..6b34593ace7 100644
--- a/chromium/media/mojo/services/mojo_cdm_service_context.h
+++ b/chromium/media/mojo/services/mojo_cdm_service_context.h
@@ -24,6 +24,10 @@ class MojoCdmService;
class MEDIA_MOJO_EXPORT MojoCdmServiceContext {
public:
MojoCdmServiceContext();
+
+ MojoCdmServiceContext(const MojoCdmServiceContext&) = delete;
+ MojoCdmServiceContext& operator=(const MojoCdmServiceContext&) = delete;
+
~MojoCdmServiceContext();
// Registers the |cdm_service| and returns a unique (per-process) CDM ID.
@@ -39,8 +43,6 @@ class MEDIA_MOJO_EXPORT MojoCdmServiceContext {
private:
// A map between CDM ID and MojoCdmService.
std::map<base::UnguessableToken, MojoCdmService*> cdm_services_;
-
- DISALLOW_COPY_AND_ASSIGN(MojoCdmServiceContext);
};
} // namespace media
diff --git a/chromium/media/mojo/services/mojo_decryptor_service.cc b/chromium/media/mojo/services/mojo_decryptor_service.cc
index 96b757d0697..b95abe3fbe7 100644
--- a/chromium/media/mojo/services/mojo_decryptor_service.cc
+++ b/chromium/media/mojo/services/mojo_decryptor_service.cc
@@ -39,12 +39,15 @@ class FrameResourceReleaserImpl final : public mojom::FrameResourceReleaser {
DVLOG(3) << __func__;
DCHECK_EQ(VideoFrame::STORAGE_MOJO_SHARED_BUFFER, frame_->storage_type());
}
+
+ FrameResourceReleaserImpl(const FrameResourceReleaserImpl&) = delete;
+ FrameResourceReleaserImpl& operator=(const FrameResourceReleaserImpl&) =
+ delete;
+
~FrameResourceReleaserImpl() override { DVLOG(3) << __func__; }
private:
scoped_refptr<VideoFrame> frame_;
-
- DISALLOW_COPY_AND_ASSIGN(FrameResourceReleaserImpl);
};
const char kInvalidStateMessage[] = "MojoDecryptorService - invalid state";
diff --git a/chromium/media/mojo/services/mojo_decryptor_service.h b/chromium/media/mojo/services/mojo_decryptor_service.h
index 5665be4c30b..a5e4fc70606 100644
--- a/chromium/media/mojo/services/mojo_decryptor_service.h
+++ b/chromium/media/mojo/services/mojo_decryptor_service.h
@@ -37,6 +37,9 @@ class MEDIA_MOJO_EXPORT MojoDecryptorService final : public mojom::Decryptor {
MojoDecryptorService(media::Decryptor* decryptor,
std::unique_ptr<CdmContextRef> cdm_context_ref);
+ MojoDecryptorService(const MojoDecryptorService&) = delete;
+ MojoDecryptorService& operator=(const MojoDecryptorService&) = delete;
+
~MojoDecryptorService() final;
// mojom::Decryptor implementation.
@@ -110,8 +113,6 @@ class MEDIA_MOJO_EXPORT MojoDecryptorService final : public mojom::Decryptor {
base::WeakPtr<MojoDecryptorService> weak_this_;
base::WeakPtrFactory<MojoDecryptorService> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(MojoDecryptorService);
};
} // namespace media
diff --git a/chromium/media/mojo/services/mojo_demuxer_stream_adapter.h b/chromium/media/mojo/services/mojo_demuxer_stream_adapter.h
index 4d5b55fc3d4..fff6ef7aa50 100644
--- a/chromium/media/mojo/services/mojo_demuxer_stream_adapter.h
+++ b/chromium/media/mojo/services/mojo_demuxer_stream_adapter.h
@@ -36,6 +36,10 @@ class MojoDemuxerStreamAdapter : public DemuxerStream {
MojoDemuxerStreamAdapter(
mojo::PendingRemote<mojom::DemuxerStream> demuxer_stream,
base::OnceClosure stream_ready_cb);
+
+ MojoDemuxerStreamAdapter(const MojoDemuxerStreamAdapter&) = delete;
+ MojoDemuxerStreamAdapter& operator=(const MojoDemuxerStreamAdapter&) = delete;
+
~MojoDemuxerStreamAdapter() override;
// DemuxerStream implementation.
@@ -84,7 +88,6 @@ class MojoDemuxerStreamAdapter : public DemuxerStream {
std::unique_ptr<MojoDecoderBufferReader> mojo_decoder_buffer_reader_;
base::WeakPtrFactory<MojoDemuxerStreamAdapter> weak_factory_{this};
- DISALLOW_COPY_AND_ASSIGN(MojoDemuxerStreamAdapter);
};
} // namespace media
diff --git a/chromium/media/mojo/services/mojo_media_client.cc b/chromium/media/mojo/services/mojo_media_client.cc
index c35168a08e6..d4a9194b8b6 100644
--- a/chromium/media/mojo/services/mojo_media_client.cc
+++ b/chromium/media/mojo/services/mojo_media_client.cc
@@ -24,9 +24,9 @@ std::unique_ptr<AudioDecoder> MojoMediaClient::CreateAudioDecoder(
return nullptr;
}
-SupportedVideoDecoderConfigs
-MojoMediaClient::GetSupportedVideoDecoderConfigs() {
- return {};
+void MojoMediaClient::GetSupportedVideoDecoderConfigs(
+ MojoMediaClient::SupportedVideoDecoderConfigsCallback callback) {
+ std::move(callback).Run({});
}
VideoDecoderType MojoMediaClient::GetDecoderImplementationType() {
@@ -64,6 +64,7 @@ std::unique_ptr<Renderer> MojoMediaClient::CreateCastRenderer(
std::unique_ptr<Renderer> MojoMediaClient::CreateMediaFoundationRenderer(
scoped_refptr<base::SingleThreadTaskRunner> task_runner,
mojom::FrameInterfaceFactory* frame_interfaces,
+ mojo::PendingRemote<mojom::MediaLog> media_log_remote,
mojo::PendingReceiver<mojom::MediaFoundationRendererExtension>
renderer_extension_receiver) {
return nullptr;
diff --git a/chromium/media/mojo/services/mojo_media_client.h b/chromium/media/mojo/services/mojo_media_client.h
index 492ac4eebe2..8557f627efe 100644
--- a/chromium/media/mojo/services/mojo_media_client.h
+++ b/chromium/media/mojo/services/mojo_media_client.h
@@ -11,6 +11,7 @@
#include "base/memory/ref_counted.h"
#include "base/unguessable_token.h"
+#include "build/build_config.h"
#include "media/base/overlay_info.h"
#include "media/base/supported_video_decoder_config.h"
#include "media/media_buildflags.h"
@@ -42,6 +43,9 @@ class VideoDecoder;
// the media components.
class MEDIA_MOJO_EXPORT MojoMediaClient {
public:
+ using SupportedVideoDecoderConfigsCallback =
+ base::OnceCallback<void(SupportedVideoDecoderConfigs)>;
+
// Called before the host application is scheduled to quit.
// The application message loop is still valid at this point, so all clean
// up tasks requiring the message loop must be completed before returning.
@@ -53,8 +57,8 @@ class MEDIA_MOJO_EXPORT MojoMediaClient {
virtual std::unique_ptr<AudioDecoder> CreateAudioDecoder(
scoped_refptr<base::SingleThreadTaskRunner> task_runner);
- virtual std::vector<SupportedVideoDecoderConfig>
- GetSupportedVideoDecoderConfigs();
+ virtual void GetSupportedVideoDecoderConfigs(
+ SupportedVideoDecoderConfigsCallback callback);
virtual VideoDecoderType GetDecoderImplementationType();
@@ -91,6 +95,7 @@ class MEDIA_MOJO_EXPORT MojoMediaClient {
virtual std::unique_ptr<Renderer> CreateMediaFoundationRenderer(
scoped_refptr<base::SingleThreadTaskRunner> task_runner,
mojom::FrameInterfaceFactory* frame_interfaces,
+ mojo::PendingRemote<mojom::MediaLog> media_log_remote,
mojo::PendingReceiver<mojom::MediaFoundationRendererExtension>
renderer_extension_receiver);
#endif // defined(OS_WIN)
diff --git a/chromium/media/mojo/services/mojo_media_drm_storage.h b/chromium/media/mojo/services/mojo_media_drm_storage.h
index 1b56a561e9c..e4ee68e4e1b 100644
--- a/chromium/media/mojo/services/mojo_media_drm_storage.h
+++ b/chromium/media/mojo/services/mojo_media_drm_storage.h
@@ -23,6 +23,10 @@ class MEDIA_MOJO_EXPORT MojoMediaDrmStorage final : public MediaDrmStorage {
public:
explicit MojoMediaDrmStorage(
mojo::PendingRemote<mojom::MediaDrmStorage> media_drm_storage);
+
+ MojoMediaDrmStorage(const MojoMediaDrmStorage&) = delete;
+ MojoMediaDrmStorage& operator=(const MojoMediaDrmStorage&) = delete;
+
~MojoMediaDrmStorage() override;
// MediaDrmStorage implementation:
@@ -44,8 +48,6 @@ class MEDIA_MOJO_EXPORT MojoMediaDrmStorage final : public MediaDrmStorage {
mojo::Remote<mojom::MediaDrmStorage> media_drm_storage_;
base::WeakPtrFactory<MojoMediaDrmStorage> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(MojoMediaDrmStorage);
};
} // namespace media
diff --git a/chromium/media/mojo/services/mojo_media_log.cc b/chromium/media/mojo/services/mojo_media_log.cc
index 8efd4494a9d..5dfe8f2dfc4 100644
--- a/chromium/media/mojo/services/mojo_media_log.cc
+++ b/chromium/media/mojo/services/mojo_media_log.cc
@@ -11,11 +11,10 @@
namespace media {
MojoMediaLog::MojoMediaLog(
- mojo::PendingAssociatedRemote<mojom::MediaLog> remote_media_log,
+ mojo::PendingRemote<mojom::MediaLog> remote_media_log,
scoped_refptr<base::SequencedTaskRunner> task_runner)
: remote_media_log_(std::move(remote_media_log)),
task_runner_(std::move(task_runner)) {
- weak_this_ = weak_ptr_factory_.GetWeakPtr();
DVLOG(1) << __func__;
}
@@ -28,7 +27,7 @@ MojoMediaLog::~MojoMediaLog() {
}
void MojoMediaLog::AddLogRecordLocked(std::unique_ptr<MediaLogRecord> event) {
- DVLOG(1) << __func__;
+ DVLOG(2) << __func__;
DCHECK(event);
// Don't post unless we need to. Otherwise, we can order a log entry after
@@ -43,9 +42,10 @@ void MojoMediaLog::AddLogRecordLocked(std::unique_ptr<MediaLogRecord> event) {
}
// From other threads, we have little choice.
- task_runner_->PostTask(FROM_HERE,
- base::BindOnce(&MojoMediaLog::AddLogRecord, weak_this_,
- std::move(event)));
+ task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&MojoMediaLog::AddLogRecord,
+ weak_ptr_factory_.GetWeakPtr(), std::move(event)));
}
} // namespace media
diff --git a/chromium/media/mojo/services/mojo_media_log.h b/chromium/media/mojo/services/mojo_media_log.h
index ae972abbd3b..52e2b43329a 100644
--- a/chromium/media/mojo/services/mojo_media_log.h
+++ b/chromium/media/mojo/services/mojo_media_log.h
@@ -7,24 +7,23 @@
#include <memory>
-#include "base/macros.h"
#include "base/memory/scoped_refptr.h"
#include "base/memory/weak_ptr.h"
#include "base/sequenced_task_runner.h"
#include "media/base/media_log.h"
#include "media/mojo/mojom/media_log.mojom.h"
-#include "mojo/public/cpp/bindings/associated_remote.h"
-#include "mojo/public/cpp/bindings/pending_associated_remote.h"
+#include "mojo/public/cpp/bindings/pending_remote.h"
+#include "mojo/public/cpp/bindings/remote.h"
namespace media {
// Client side for a MediaLog via mojo.
class MojoMediaLog final : public MediaLog {
public:
- // TODO(sandersd): Template on Ptr type to support non-associated.
- explicit MojoMediaLog(
- mojo::PendingAssociatedRemote<mojom::MediaLog> remote_media_log,
- scoped_refptr<base::SequencedTaskRunner> task_runner);
+ MojoMediaLog(mojo::PendingRemote<mojom::MediaLog> remote_media_log,
+ scoped_refptr<base::SequencedTaskRunner> task_runner);
+ MojoMediaLog(const MojoMediaLog&) = delete;
+ MojoMediaLog& operator=(const MojoMediaLog&) = delete;
~MojoMediaLog() final;
protected:
@@ -33,16 +32,12 @@ class MojoMediaLog final : public MediaLog {
void AddLogRecordLocked(std::unique_ptr<MediaLogRecord> event) override;
private:
- mojo::AssociatedRemote<mojom::MediaLog> remote_media_log_;
+ mojo::Remote<mojom::MediaLog> remote_media_log_;
// The mojo service thread on which we'll access |remote_media_log_|.
scoped_refptr<base::SequencedTaskRunner> task_runner_;
- base::WeakPtr<MojoMediaLog> weak_this_;
-
base::WeakPtrFactory<MojoMediaLog> weak_ptr_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(MojoMediaLog);
};
} // namespace media
diff --git a/chromium/media/mojo/services/mojo_provision_fetcher.h b/chromium/media/mojo/services/mojo_provision_fetcher.h
index b1b0cfe8899..cb79dfceaad 100644
--- a/chromium/media/mojo/services/mojo_provision_fetcher.h
+++ b/chromium/media/mojo/services/mojo_provision_fetcher.h
@@ -21,6 +21,10 @@ class MEDIA_MOJO_EXPORT MojoProvisionFetcher final : public ProvisionFetcher {
public:
explicit MojoProvisionFetcher(
mojo::PendingRemote<mojom::ProvisionFetcher> provision_fetcher);
+
+ MojoProvisionFetcher(const MojoProvisionFetcher&) = delete;
+ MojoProvisionFetcher& operator=(const MojoProvisionFetcher&) = delete;
+
~MojoProvisionFetcher() final;
// ProvisionFetcher implementation:
@@ -37,8 +41,6 @@ class MEDIA_MOJO_EXPORT MojoProvisionFetcher final : public ProvisionFetcher {
mojo::Remote<mojom::ProvisionFetcher> provision_fetcher_;
base::WeakPtrFactory<MojoProvisionFetcher> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(MojoProvisionFetcher);
};
} // namespace media
diff --git a/chromium/media/mojo/services/mojo_renderer_service.cc b/chromium/media/mojo/services/mojo_renderer_service.cc
index 31967288a8b..468d2cb5ab4 100644
--- a/chromium/media/mojo/services/mojo_renderer_service.cc
+++ b/chromium/media/mojo/services/mojo_renderer_service.cc
@@ -243,7 +243,7 @@ void MojoRendererService::UpdateMediaTime(bool force) {
base::TimeDelta max_time = media_time;
// Allow some slop to account for delays in scheduling time update tasks.
if (time_update_timer_.IsRunning() && (playback_rate_ > 0))
- max_time += base::TimeDelta::FromMilliseconds(2 * kTimeUpdateIntervalMs);
+ max_time += base::Milliseconds(2 * kTimeUpdateIntervalMs);
client_->OnTimeUpdate(media_time, max_time, base::TimeTicks::Now());
last_media_time_ = media_time;
@@ -261,7 +261,7 @@ void MojoRendererService::SchedulePeriodicMediaTimeUpdates() {
UpdateMediaTime(true);
time_update_timer_.Start(
- FROM_HERE, base::TimeDelta::FromMilliseconds(kTimeUpdateIntervalMs),
+ FROM_HERE, base::Milliseconds(kTimeUpdateIntervalMs),
base::BindRepeating(&MojoRendererService::UpdateMediaTime, weak_this_,
false));
}
diff --git a/chromium/media/mojo/services/mojo_renderer_service.h b/chromium/media/mojo/services/mojo_renderer_service.h
index 837f26998d7..5bd23217ec0 100644
--- a/chromium/media/mojo/services/mojo_renderer_service.h
+++ b/chromium/media/mojo/services/mojo_renderer_service.h
@@ -52,6 +52,9 @@ class MEDIA_MOJO_EXPORT MojoRendererService final : public mojom::Renderer,
MojoRendererService(MojoCdmServiceContext* mojo_cdm_service_context,
std::unique_ptr<media::Renderer> renderer);
+ MojoRendererService(const MojoRendererService&) = delete;
+ MojoRendererService& operator=(const MojoRendererService&) = delete;
+
~MojoRendererService() final;
// mojom::Renderer implementation.
@@ -135,8 +138,6 @@ class MEDIA_MOJO_EXPORT MojoRendererService final : public mojom::Renderer,
base::WeakPtr<MojoRendererService> weak_this_;
base::WeakPtrFactory<MojoRendererService> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(MojoRendererService);
};
} // namespace media
diff --git a/chromium/media/mojo/services/mojo_video_decoder_service.cc b/chromium/media/mojo/services/mojo_video_decoder_service.cc
index 582283fff9d..a3a2a23be8b 100644
--- a/chromium/media/mojo/services/mojo_video_decoder_service.cc
+++ b/chromium/media/mojo/services/mojo_video_decoder_service.cc
@@ -12,11 +12,9 @@
#include "base/logging.h"
#include "base/macros.h"
#include "base/metrics/histogram_functions.h"
-#include "base/metrics/histogram_macros.h"
#include "base/stl_util.h"
#include "base/strings/stringprintf.h"
#include "base/threading/thread_task_runner_handle.h"
-#include "base/timer/elapsed_timer.h"
#include "media/base/decoder_buffer.h"
#include "media/base/simple_sync_token_client.h"
#include "media/base/video_decoder.h"
@@ -48,13 +46,6 @@ const char kInitializeTraceName[] = "MojoVideoDecoderService::Initialize";
const char kDecodeTraceName[] = "MojoVideoDecoderService::Decode";
const char kResetTraceName[] = "MojoVideoDecoderService::Reset";
-void RecordTimingHistogram(const char* method, base::TimeDelta elapsed) {
- base::UmaHistogramTimes(
- base::StringPrintf("Media.MojoVideoDecoderServiceTiming.Default.%s",
- method),
- elapsed);
-}
-
base::debug::CrashKeyString* GetNumVideoDecodersCrashKeyString() {
static base::debug::CrashKeyString* codec_count_crash_key =
base::debug::AllocateCrashKeyString("num-video-decoders",
@@ -69,6 +60,10 @@ class VideoFrameHandleReleaserImpl final
public:
VideoFrameHandleReleaserImpl() { DVLOG(3) << __func__; }
+ VideoFrameHandleReleaserImpl(const VideoFrameHandleReleaserImpl&) = delete;
+ VideoFrameHandleReleaserImpl& operator=(const VideoFrameHandleReleaserImpl&) =
+ delete;
+
~VideoFrameHandleReleaserImpl() final { DVLOG(3) << __func__; }
// Register a VideoFrame to recieve release callbacks. A reference to |frame|
@@ -101,8 +96,6 @@ class VideoFrameHandleReleaserImpl final
private:
// TODO(sandersd): Also track age, so that an overall limit can be enforced.
std::map<base::UnguessableToken, scoped_refptr<VideoFrame>> video_frames_;
-
- DISALLOW_COPY_AND_ASSIGN(VideoFrameHandleReleaserImpl);
};
MojoVideoDecoderService::MojoVideoDecoderService(
@@ -118,7 +111,6 @@ MojoVideoDecoderService::MojoVideoDecoderService(
MojoVideoDecoderService::~MojoVideoDecoderService() {
DVLOG(1) << __func__;
- base::ElapsedTimer elapsed;
if (init_cb_) {
OnDecoderInitialized(
@@ -140,23 +132,27 @@ MojoVideoDecoderService::~MojoVideoDecoderService() {
// the histogram timer below.
weak_factory_.InvalidateWeakPtrs();
decoder_.reset();
-
- if (decoder_)
- RecordTimingHistogram("Destruct", elapsed.Elapsed());
}
void MojoVideoDecoderService::GetSupportedConfigs(
GetSupportedConfigsCallback callback) {
DVLOG(3) << __func__;
- TRACE_EVENT0("media", "MojoVideoDecoderService::GetSupportedConfigs");
- std::move(callback).Run(mojo_media_client_->GetSupportedVideoDecoderConfigs(),
+ mojo_media_client_->GetSupportedVideoDecoderConfigs(
+ base::BindOnce(&MojoVideoDecoderService::OnSupportedVideoDecoderConfigs,
+ weak_factory_.GetWeakPtr(), std::move(callback)));
+}
+
+void MojoVideoDecoderService::OnSupportedVideoDecoderConfigs(
+ GetSupportedConfigsCallback callback,
+ SupportedVideoDecoderConfigs configs) {
+ std::move(callback).Run(std::move(configs),
mojo_media_client_->GetDecoderImplementationType());
}
void MojoVideoDecoderService::Construct(
mojo::PendingAssociatedRemote<mojom::VideoDecoderClient> client,
- mojo::PendingAssociatedRemote<mojom::MediaLog> media_log,
+ mojo::PendingRemote<mojom::MediaLog> media_log,
mojo::PendingReceiver<mojom::VideoFrameHandleReleaser>
video_frame_handle_releaser_receiver,
mojo::ScopedDataPipeConsumerHandle decoder_buffer_pipe,
@@ -170,7 +166,6 @@ void MojoVideoDecoderService::Construct(
return;
}
- base::ElapsedTimer elapsed;
client_.Bind(std::move(client));
scoped_refptr<base::SingleThreadTaskRunner> task_runner =
@@ -191,8 +186,6 @@ void MojoVideoDecoderService::Construct(
base::BindRepeating(
&MojoVideoDecoderService::OnDecoderRequestedOverlayInfo, weak_this_),
target_color_space);
-
- RecordTimingHistogram("Construct", elapsed.Elapsed());
}
void MojoVideoDecoderService::Initialize(
@@ -289,8 +282,8 @@ void MojoVideoDecoderService::Decode(mojom::DecoderBufferPtr buffer,
if (!is_active_instance_) {
is_active_instance_ = true;
g_num_active_mvd_instances++;
- UMA_HISTOGRAM_EXACT_LINEAR("Media.MojoVideoDecoder.ActiveInstances",
- g_num_active_mvd_instances, 64);
+ base::UmaHistogramExactLinear("Media.MojoVideoDecoder.ActiveInstances",
+ g_num_active_mvd_instances, 64);
base::debug::SetCrashKeyString(
GetNumVideoDecodersCrashKeyString(),
base::NumberToString(g_num_active_mvd_instances));
diff --git a/chromium/media/mojo/services/mojo_video_decoder_service.h b/chromium/media/mojo/services/mojo_video_decoder_service.h
index 45a1a5723ab..cc8897ca264 100644
--- a/chromium/media/mojo/services/mojo_video_decoder_service.h
+++ b/chromium/media/mojo/services/mojo_video_decoder_service.h
@@ -41,13 +41,17 @@ class MEDIA_MOJO_EXPORT MojoVideoDecoderService final
explicit MojoVideoDecoderService(
MojoMediaClient* mojo_media_client,
MojoCdmServiceContext* mojo_cdm_service_context);
+
+ MojoVideoDecoderService(const MojoVideoDecoderService&) = delete;
+ MojoVideoDecoderService& operator=(const MojoVideoDecoderService&) = delete;
+
~MojoVideoDecoderService() final;
// mojom::VideoDecoder implementation
void GetSupportedConfigs(GetSupportedConfigsCallback callback) final;
void Construct(
mojo::PendingAssociatedRemote<mojom::VideoDecoderClient> client,
- mojo::PendingAssociatedRemote<mojom::MediaLog> media_log,
+ mojo::PendingRemote<mojom::MediaLog> media_log,
mojo::PendingReceiver<mojom::VideoFrameHandleReleaser>
video_frame_handle_receiver,
mojo::ScopedDataPipeConsumerHandle decoder_buffer_pipe,
@@ -86,6 +90,9 @@ class MEDIA_MOJO_EXPORT MojoVideoDecoderService final
bool restart_for_transitions,
ProvideOverlayInfoCB provide_overlay_info_cb);
+ void OnSupportedVideoDecoderConfigs(GetSupportedConfigsCallback callback,
+ SupportedVideoDecoderConfigs configs);
+
// Whether this instance is active (Decode() was called at least once).
bool is_active_instance_ = false;
@@ -126,8 +133,6 @@ class MEDIA_MOJO_EXPORT MojoVideoDecoderService final
base::WeakPtr<MojoVideoDecoderService> weak_this_;
base::WeakPtrFactory<MojoVideoDecoderService> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(MojoVideoDecoderService);
};
} // namespace media
diff --git a/chromium/media/mojo/services/mojo_video_encode_accelerator_provider.h b/chromium/media/mojo/services/mojo_video_encode_accelerator_provider.h
index 01b64d79c34..d0052e2917e 100644
--- a/chromium/media/mojo/services/mojo_video_encode_accelerator_provider.h
+++ b/chromium/media/mojo/services/mojo_video_encode_accelerator_provider.h
@@ -42,6 +42,12 @@ class MEDIA_MOJO_EXPORT MojoVideoEncodeAcceleratorProvider
CreateAndInitializeVideoEncodeAcceleratorCallback create_vea_callback,
const gpu::GpuPreferences& gpu_preferences,
const gpu::GpuDriverBugWorkarounds& gpu_workarounds);
+
+ MojoVideoEncodeAcceleratorProvider(
+ const MojoVideoEncodeAcceleratorProvider&) = delete;
+ MojoVideoEncodeAcceleratorProvider& operator=(
+ const MojoVideoEncodeAcceleratorProvider&) = delete;
+
~MojoVideoEncodeAcceleratorProvider() override;
// mojom::VideoEncodeAcceleratorProvider impl.
@@ -54,8 +60,6 @@ class MEDIA_MOJO_EXPORT MojoVideoEncodeAcceleratorProvider
const CreateAndInitializeVideoEncodeAcceleratorCallback create_vea_callback_;
const gpu::GpuPreferences& gpu_preferences_;
const gpu::GpuDriverBugWorkarounds gpu_workarounds_;
-
- DISALLOW_COPY_AND_ASSIGN(MojoVideoEncodeAcceleratorProvider);
};
} // namespace media
diff --git a/chromium/media/mojo/services/mojo_video_encode_accelerator_service.h b/chromium/media/mojo/services/mojo_video_encode_accelerator_service.h
index 9d9d3d29b2e..660ce87f05d 100644
--- a/chromium/media/mojo/services/mojo_video_encode_accelerator_service.h
+++ b/chromium/media/mojo/services/mojo_video_encode_accelerator_service.h
@@ -52,6 +52,12 @@ class MEDIA_MOJO_EXPORT MojoVideoEncodeAcceleratorService
CreateAndInitializeVideoEncodeAcceleratorCallback create_vea_callback,
const gpu::GpuPreferences& gpu_preferences,
const gpu::GpuDriverBugWorkarounds& gpu_workarounds);
+
+ MojoVideoEncodeAcceleratorService(const MojoVideoEncodeAcceleratorService&) =
+ delete;
+ MojoVideoEncodeAcceleratorService& operator=(
+ const MojoVideoEncodeAcceleratorService&) = delete;
+
~MojoVideoEncodeAcceleratorService() override;
// mojom::VideoEncodeAccelerator impl.
@@ -103,8 +109,6 @@ class MEDIA_MOJO_EXPORT MojoVideoEncodeAcceleratorService
SEQUENCE_CHECKER(sequence_checker_);
base::WeakPtrFactory<MojoVideoEncodeAcceleratorService> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(MojoVideoEncodeAcceleratorService);
};
} // namespace media
diff --git a/chromium/media/mojo/services/playback_events_recorder.cc b/chromium/media/mojo/services/playback_events_recorder.cc
index 6080dca225a..061a96cf75e 100644
--- a/chromium/media/mojo/services/playback_events_recorder.cc
+++ b/chromium/media/mojo/services/playback_events_recorder.cc
@@ -26,8 +26,7 @@ void RecordEventWithValue(const char* name, int64_t value) {
RecordEventWithValueAt(name, value, base::TimeTicks::Now());
}
-constexpr base::TimeDelta kBitrateReportPeriod =
- base::TimeDelta::FromSeconds(5);
+constexpr base::TimeDelta kBitrateReportPeriod = base::Seconds(5);
} // namespace
diff --git a/chromium/media/mojo/services/playback_events_recorder_test.cc b/chromium/media/mojo/services/playback_events_recorder_test.cc
index 5d41dc2c48c..3325aa8ca68 100644
--- a/chromium/media/mojo/services/playback_events_recorder_test.cc
+++ b/chromium/media/mojo/services/playback_events_recorder_test.cc
@@ -11,7 +11,7 @@
namespace media {
-constexpr base::TimeDelta kSecond = base::TimeDelta::FromSeconds(1);
+constexpr base::TimeDelta kSecond = base::Seconds(1);
class PlaybackEventsRecorderTest : public testing::Test {
public:
diff --git a/chromium/media/mojo/services/stable_video_decoder_factory_service.cc b/chromium/media/mojo/services/stable_video_decoder_factory_service.cc
new file mode 100644
index 00000000000..e9daea2b3e7
--- /dev/null
+++ b/chromium/media/mojo/services/stable_video_decoder_factory_service.cc
@@ -0,0 +1,24 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/mojo/services/stable_video_decoder_factory_service.h"
+
+namespace media {
+
+StableVideoDecoderFactoryService::StableVideoDecoderFactoryService() = default;
+StableVideoDecoderFactoryService::~StableVideoDecoderFactoryService() = default;
+
+void StableVideoDecoderFactoryService::BindReceiver(
+ mojo::PendingReceiver<stable::mojom::StableVideoDecoderFactory> receiver) {
+ receivers_.Add(this, std::move(receiver));
+}
+
+void StableVideoDecoderFactoryService::CreateStableVideoDecoder(
+ mojo::PendingReceiver<stable::mojom::StableVideoDecoder> receiver) {
+ // TODO(b/171813538): connect with the ash-chrome video decoding stack.
+ // TODO(b/195769334): plumb OOP-VD.
+ NOTIMPLEMENTED();
+}
+
+} // namespace media
diff --git a/chromium/media/mojo/services/stable_video_decoder_factory_service.h b/chromium/media/mojo/services/stable_video_decoder_factory_service.h
new file mode 100644
index 00000000000..17b4178da72
--- /dev/null
+++ b/chromium/media/mojo/services/stable_video_decoder_factory_service.h
@@ -0,0 +1,39 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_MOJO_SERVICES_STABLE_VIDEO_DECODER_FACTORY_SERVICE_H_
+#define MEDIA_MOJO_SERVICES_STABLE_VIDEO_DECODER_FACTORY_SERVICE_H_
+
+#include "media/mojo/mojom/stable/stable_video_decoder.mojom.h"
+#include "media/mojo/services/media_mojo_export.h"
+#include "mojo/public/cpp/bindings/pending_receiver.h"
+#include "mojo/public/cpp/bindings/receiver_set.h"
+
+namespace media {
+
+class MEDIA_MOJO_EXPORT StableVideoDecoderFactoryService
+ : public stable::mojom::StableVideoDecoderFactory {
+ public:
+ StableVideoDecoderFactoryService();
+ StableVideoDecoderFactoryService(const StableVideoDecoderFactoryService&) =
+ delete;
+ StableVideoDecoderFactoryService& operator=(
+ const StableVideoDecoderFactoryService&) = delete;
+ ~StableVideoDecoderFactoryService() override;
+
+ void BindReceiver(
+ mojo::PendingReceiver<stable::mojom::StableVideoDecoderFactory> receiver);
+
+ // stable::mojom::StableVideoDecoderFactory implementation.
+ void CreateStableVideoDecoder(
+ mojo::PendingReceiver<stable::mojom::StableVideoDecoder> receiver)
+ override;
+
+ private:
+ mojo::ReceiverSet<stable::mojom::StableVideoDecoderFactory> receivers_;
+};
+
+} // namespace media
+
+#endif // MEDIA_MOJO_SERVICES_STABLE_VIDEO_DECODER_FACTORY_SERVICE_H_
diff --git a/chromium/media/mojo/services/test_mojo_media_client.cc b/chromium/media/mojo/services/test_mojo_media_client.cc
index 8a9a71f4d29..b3f71b46471 100644
--- a/chromium/media/mojo/services/test_mojo_media_client.cc
+++ b/chromium/media/mojo/services/test_mojo_media_client.cc
@@ -76,8 +76,7 @@ std::unique_ptr<Renderer> TestMojoMediaClient::CreateRenderer(
// RendererImpls. Thus create one for each Renderer creation.
auto audio_sink = base::MakeRefCounted<AudioOutputStreamSink>();
auto video_sink = std::make_unique<NullVideoSink>(
- false, base::TimeDelta::FromSecondsD(1.0 / 60),
- NullVideoSink::NewFrameCB(), task_runner);
+ false, base::Seconds(1.0 / 60), NullVideoSink::NewFrameCB(), task_runner);
auto* video_sink_ptr = video_sink.get();
// Hold created sinks since DefaultRendererFactory only takes raw pointers to
diff --git a/chromium/media/mojo/services/test_mojo_media_client.h b/chromium/media/mojo/services/test_mojo_media_client.h
index 495da4c8536..1c7e8bb499c 100644
--- a/chromium/media/mojo/services/test_mojo_media_client.h
+++ b/chromium/media/mojo/services/test_mojo_media_client.h
@@ -25,6 +25,10 @@ class VideoRendererSink;
class TestMojoMediaClient final : public MojoMediaClient {
public:
TestMojoMediaClient();
+
+ TestMojoMediaClient(const TestMojoMediaClient&) = delete;
+ TestMojoMediaClient& operator=(const TestMojoMediaClient&) = delete;
+
~TestMojoMediaClient() final;
// MojoMediaClient implementation.
@@ -50,8 +54,6 @@ class TestMojoMediaClient final : public MojoMediaClient {
std::unique_ptr<RendererFactory> renderer_factory_;
std::vector<scoped_refptr<AudioRendererSink>> audio_sinks_;
std::vector<std::unique_ptr<VideoRendererSink>> video_sinks_;
-
- DISALLOW_COPY_AND_ASSIGN(TestMojoMediaClient);
};
} // namespace media
diff --git a/chromium/media/mojo/services/video_decode_perf_history.h b/chromium/media/mojo/services/video_decode_perf_history.h
index 30e3823aa75..4de22e6bf45 100644
--- a/chromium/media/mojo/services/video_decode_perf_history.h
+++ b/chromium/media/mojo/services/video_decode_perf_history.h
@@ -60,6 +60,10 @@ class MEDIA_MOJO_EXPORT VideoDecodePerfHistory
std::unique_ptr<VideoDecodeStatsDB> db,
learning::FeatureProviderFactoryCB feature_factory_cb =
learning::FeatureProviderFactoryCB());
+
+ VideoDecodePerfHistory(const VideoDecodePerfHistory&) = delete;
+ VideoDecodePerfHistory& operator=(const VideoDecodePerfHistory&) = delete;
+
~VideoDecodePerfHistory() override;
// Bind the mojo receiver to this instance. Single instance will be used to
@@ -205,8 +209,6 @@ class MEDIA_MOJO_EXPORT VideoDecodePerfHistory
SEQUENCE_CHECKER(sequence_checker_);
base::WeakPtrFactory<VideoDecodePerfHistory> weak_ptr_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(VideoDecodePerfHistory);
};
} // namespace media
diff --git a/chromium/media/mojo/services/video_decode_stats_recorder.h b/chromium/media/mojo/services/video_decode_stats_recorder.h
index f4901789b84..595068f4aa9 100644
--- a/chromium/media/mojo/services/video_decode_stats_recorder.h
+++ b/chromium/media/mojo/services/video_decode_stats_recorder.h
@@ -29,6 +29,10 @@ class MEDIA_MOJO_EXPORT VideoDecodeStatsRecorder
learning::FeatureValue origin,
bool is_top_frame,
uint64_t player_id);
+
+ VideoDecodeStatsRecorder(const VideoDecodeStatsRecorder&) = delete;
+ VideoDecodeStatsRecorder& operator=(const VideoDecodeStatsRecorder&) = delete;
+
~VideoDecodeStatsRecorder() override;
// mojom::VideoDecodeStatsRecorder implementation:
@@ -48,8 +52,6 @@ class MEDIA_MOJO_EXPORT VideoDecodeStatsRecorder
mojom::PredictionFeatures features_;
mojom::PredictionTargets targets_;
-
- DISALLOW_COPY_AND_ASSIGN(VideoDecodeStatsRecorder);
};
} // namespace media
diff --git a/chromium/media/mojo/services/watch_time_recorder.cc b/chromium/media/mojo/services/watch_time_recorder.cc
index 414751e8aa3..77a267771eb 100644
--- a/chromium/media/mojo/services/watch_time_recorder.cc
+++ b/chromium/media/mojo/services/watch_time_recorder.cc
@@ -24,7 +24,7 @@ namespace media {
// The minimum amount of media playback which can elapse before we'll report
// watch time metrics for a playback.
constexpr base::TimeDelta kMinimumElapsedWatchTime =
- base::TimeDelta::FromSeconds(limits::kMinimumElapsedWatchTimeSecs);
+ base::Seconds(limits::kMinimumElapsedWatchTimeSecs);
static void RecordWatchTimeInternal(
base::StringPiece key,
@@ -32,7 +32,7 @@ static void RecordWatchTimeInternal(
base::TimeDelta minimum = kMinimumElapsedWatchTime) {
DCHECK(!key.empty());
base::UmaHistogramCustomTimes(std::string(key), value, minimum,
- base::TimeDelta::FromHours(10), 50);
+ base::Hours(10), 50);
}
static void RecordMeanTimeBetweenRebuffers(base::StringPiece key,
@@ -41,7 +41,7 @@ static void RecordMeanTimeBetweenRebuffers(base::StringPiece key,
// There are a maximum of 5 underflow events possible in a given 7s watch time
// period, so the minimum value is 1.4s.
- RecordWatchTimeInternal(key, value, base::TimeDelta::FromSecondsD(1.4));
+ RecordWatchTimeInternal(key, value, base::Seconds(1.4));
}
static void RecordDiscardedWatchTime(base::StringPiece key,
@@ -201,8 +201,8 @@ void WatchTimeRecorder::UpdateSecondaryProperties(
// update without creating a whole new record. Not checking
// audio_encryption_scheme and video_encryption_scheme as we want to
// capture changes in encryption schemes.
- if (last_record.secondary_properties->audio_codec == kUnknownAudioCodec ||
- last_record.secondary_properties->video_codec == kUnknownVideoCodec ||
+ if (last_record.secondary_properties->audio_codec == AudioCodec::kUnknown ||
+ last_record.secondary_properties->video_codec == VideoCodec::kUnknown ||
last_record.secondary_properties->audio_codec_profile ==
AudioCodecProfile::kUnknown ||
last_record.secondary_properties->video_codec_profile ==
@@ -212,9 +212,9 @@ void WatchTimeRecorder::UpdateSecondaryProperties(
last_record.secondary_properties->video_decoder ==
VideoDecoderType::kUnknown) {
auto temp_props = last_record.secondary_properties.Clone();
- if (last_record.secondary_properties->audio_codec == kUnknownAudioCodec)
+ if (last_record.secondary_properties->audio_codec == AudioCodec::kUnknown)
temp_props->audio_codec = secondary_properties->audio_codec;
- if (last_record.secondary_properties->video_codec == kUnknownVideoCodec)
+ if (last_record.secondary_properties->video_codec == VideoCodec::kUnknown)
temp_props->video_codec = secondary_properties->video_codec;
if (last_record.secondary_properties->audio_codec_profile ==
AudioCodecProfile::kUnknown) {
@@ -331,7 +331,7 @@ void WatchTimeRecorder::RecordUkmPlaybackData() {
absl::optional<uint64_t> clamped_duration_ms;
if (duration_ != kNoTimestamp && duration_ != kInfiniteDuration) {
clamped_duration_ms = duration_.InMilliseconds();
- if (duration_ > base::TimeDelta::FromSeconds(1)) {
+ if (duration_ > base::Seconds(1)) {
// Turns 54321 => 10000.
const uint64_t base =
std::pow(10, static_cast<uint64_t>(std::log10(*clamped_duration_ms)));
@@ -425,8 +425,10 @@ void WatchTimeRecorder::RecordUkmPlaybackData() {
}
// See note in mojom::PlaybackProperties about why we have both of these.
- builder.SetAudioCodec(ukm_record.secondary_properties->audio_codec);
- builder.SetVideoCodec(ukm_record.secondary_properties->video_codec);
+ builder.SetAudioCodec(
+ static_cast<int64_t>(ukm_record.secondary_properties->audio_codec));
+ builder.SetVideoCodec(
+ static_cast<int64_t>(ukm_record.secondary_properties->video_codec));
builder.SetAudioCodecProfile(static_cast<int64_t>(
ukm_record.secondary_properties->audio_codec_profile));
builder.SetVideoCodecProfile(
@@ -434,7 +436,7 @@ void WatchTimeRecorder::RecordUkmPlaybackData() {
builder.SetHasAudio(properties_->has_audio);
builder.SetHasVideo(properties_->has_video);
- if (ukm_record.secondary_properties->audio_codec == kCodecAAC)
+ if (ukm_record.secondary_properties->audio_codec == AudioCodec::kAAC)
aac_profiles.insert(ukm_record.secondary_properties->audio_codec_profile);
builder.SetAudioDecoderName(
diff --git a/chromium/media/mojo/services/watch_time_recorder.h b/chromium/media/mojo/services/watch_time_recorder.h
index 669781cf641..19f1b3259f7 100644
--- a/chromium/media/mojo/services/watch_time_recorder.h
+++ b/chromium/media/mojo/services/watch_time_recorder.h
@@ -34,6 +34,10 @@ class MEDIA_MOJO_EXPORT WatchTimeRecorder : public mojom::WatchTimeRecorder {
bool is_top_frame,
uint64_t player_id,
RecordAggregateWatchTimeCallback record_playback_cb);
+
+ WatchTimeRecorder(const WatchTimeRecorder&) = delete;
+ WatchTimeRecorder& operator=(const WatchTimeRecorder&) = delete;
+
~WatchTimeRecorder() override;
// mojom::WatchTimeRecorder implementation:
@@ -127,8 +131,6 @@ class MEDIA_MOJO_EXPORT WatchTimeRecorder : public mojom::WatchTimeRecorder {
base::TimeDelta last_timestamp_ = kNoTimestamp;
absl::optional<bool> autoplay_initiated_;
RecordAggregateWatchTimeCallback record_playback_cb_;
-
- DISALLOW_COPY_AND_ASSIGN(WatchTimeRecorder);
};
} // namespace media
diff --git a/chromium/media/mojo/services/watch_time_recorder_unittest.cc b/chromium/media/mojo/services/watch_time_recorder_unittest.cc
index 8a235e456e0..3eb2fddb05d 100644
--- a/chromium/media/mojo/services/watch_time_recorder_unittest.cc
+++ b/chromium/media/mojo/services/watch_time_recorder_unittest.cc
@@ -75,6 +75,9 @@ class WatchTimeRecorderTest : public testing::Test {
provider_.BindNewPipeAndPassReceiver());
}
+ WatchTimeRecorderTest(const WatchTimeRecorderTest&) = delete;
+ WatchTimeRecorderTest& operator=(const WatchTimeRecorderTest&) = delete;
+
~WatchTimeRecorderTest() override { base::RunLoop().RunUntilIdle(); }
void Initialize(mojom::PlaybackPropertiesPtr properties) {
@@ -177,10 +180,10 @@ class WatchTimeRecorderTest : public testing::Test {
mojom::SecondaryPlaybackPropertiesPtr CreateSecondaryProperties() {
return mojom::SecondaryPlaybackProperties::New(
- kCodecAAC, kCodecH264, AudioCodecProfile::kUnknown, H264PROFILE_MAIN,
- AudioDecoderType::kUnknown, VideoDecoderType::kUnknown,
- EncryptionScheme::kUnencrypted, EncryptionScheme::kUnencrypted,
- gfx::Size(800, 600));
+ AudioCodec::kAAC, VideoCodec::kH264, AudioCodecProfile::kUnknown,
+ H264PROFILE_MAIN, AudioDecoderType::kUnknown,
+ VideoDecoderType::kUnknown, EncryptionScheme::kUnencrypted,
+ EncryptionScheme::kUnencrypted, gfx::Size(800, 600));
}
ukm::SourceId GetSourceId() { return source_id_; }
@@ -209,13 +212,11 @@ class WatchTimeRecorderTest : public testing::Test {
const std::vector<base::StringPiece> mtbr_keys_;
const std::vector<base::StringPiece> smooth_keys_;
const std::vector<base::StringPiece> discard_keys_;
-
- DISALLOW_COPY_AND_ASSIGN(WatchTimeRecorderTest);
};
TEST_F(WatchTimeRecorderTest, TestBasicReporting) {
- constexpr base::TimeDelta kWatchTime1 = base::TimeDelta::FromSeconds(25);
- constexpr base::TimeDelta kWatchTime2 = base::TimeDelta::FromSeconds(50);
+ constexpr base::TimeDelta kWatchTime1 = base::Seconds(25);
+ constexpr base::TimeDelta kWatchTime2 = base::Seconds(50);
for (int i = 0; i <= static_cast<int>(WatchTimeKey::kWatchTimeKeyMax); ++i) {
const WatchTimeKey key = static_cast<WatchTimeKey>(i);
@@ -365,8 +366,8 @@ TEST_F(WatchTimeRecorderTest, TestBasicReporting) {
}
TEST_F(WatchTimeRecorderTest, TestBasicReportingMediaStream) {
- constexpr base::TimeDelta kWatchTime1 = base::TimeDelta::FromSeconds(25);
- constexpr base::TimeDelta kWatchTime2 = base::TimeDelta::FromSeconds(50);
+ constexpr base::TimeDelta kWatchTime1 = base::Seconds(25);
+ constexpr base::TimeDelta kWatchTime2 = base::Seconds(50);
for (int i = 0; i <= static_cast<int>(WatchTimeKey::kWatchTimeKeyMax); ++i) {
const WatchTimeKey key = static_cast<WatchTimeKey>(i);
@@ -518,7 +519,7 @@ TEST_F(WatchTimeRecorderTest, TestBasicReportingMediaStream) {
TEST_F(WatchTimeRecorderTest, TestRebufferingMetrics) {
Initialize(true, false, true, true);
- constexpr base::TimeDelta kWatchTime = base::TimeDelta::FromSeconds(50);
+ constexpr base::TimeDelta kWatchTime = base::Seconds(50);
for (auto key : computation_keys_)
wtr_->RecordWatchTime(key, kWatchTime);
wtr_->UpdateUnderflowCount(1);
@@ -544,7 +545,7 @@ TEST_F(WatchTimeRecorderTest, TestRebufferingMetrics) {
// Now rerun the test with a small amount of watch time and ensure rebuffering
// isn't recorded because we haven't met the watch time requirements.
ResetMetricRecorders();
- constexpr base::TimeDelta kWatchTimeShort = base::TimeDelta::FromSeconds(5);
+ constexpr base::TimeDelta kWatchTimeShort = base::Seconds(5);
for (auto key : computation_keys_)
wtr_->RecordWatchTime(key, kWatchTimeShort);
wtr_->UpdateUnderflowCount(1);
@@ -562,7 +563,7 @@ TEST_F(WatchTimeRecorderTest, TestRebufferingMetricsMediaStream) {
Initialize(true, false, true, true,
mojom::MediaStreamType::kLocalDeviceCapture);
- constexpr base::TimeDelta kWatchTime = base::TimeDelta::FromSeconds(50);
+ constexpr base::TimeDelta kWatchTime = base::Seconds(50);
for (auto key : computation_keys_)
wtr_->RecordWatchTime(key, kWatchTime);
wtr_->UpdateUnderflowCount(1);
@@ -590,7 +591,7 @@ TEST_F(WatchTimeRecorderTest, TestDiscardMetrics) {
Initialize(true, false, true, true);
wtr_->UpdateSecondaryProperties(CreateSecondaryProperties());
- constexpr base::TimeDelta kWatchTime = base::TimeDelta::FromSeconds(5);
+ constexpr base::TimeDelta kWatchTime = base::Seconds(5);
for (auto key : computation_keys_)
wtr_->RecordWatchTime(key, kWatchTime);
@@ -616,7 +617,7 @@ TEST_F(WatchTimeRecorderTest, TestDiscardMetricsMediaStream) {
mojom::MediaStreamType::kLocalDeviceCapture);
wtr_->UpdateSecondaryProperties(CreateSecondaryProperties());
- constexpr base::TimeDelta kWatchTime = base::TimeDelta::FromSeconds(5);
+ constexpr base::TimeDelta kWatchTime = base::Seconds(5);
for (auto key : computation_keys_)
wtr_->RecordWatchTime(key, kWatchTime);
@@ -651,7 +652,7 @@ TEST_F(WatchTimeRecorderTest, TestFinalizeNoDuplication) {
wtr_->UpdateSecondaryProperties(secondary_properties.Clone());
// Verify that UKM is reported along with the watch time.
- constexpr base::TimeDelta kWatchTime = base::TimeDelta::FromSeconds(4);
+ constexpr base::TimeDelta kWatchTime = base::Seconds(4);
wtr_->RecordWatchTime(WatchTimeKey::kAudioVideoAll, kWatchTime);
// Finalize everything. UKM is only recorded at destruction, so this should do
@@ -681,8 +682,10 @@ TEST_F(WatchTimeRecorderTest, TestFinalizeNoDuplication) {
EXPECT_UKM(UkmEntry::kIsBackgroundName, properties->is_background);
EXPECT_UKM(UkmEntry::kIsMutedName, properties->is_muted);
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties->video_codec));
EXPECT_UKM(UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties->audio_codec_profile));
EXPECT_UKM(UkmEntry::kVideoCodecProfileName,
@@ -761,8 +764,10 @@ TEST_F(WatchTimeRecorderTest, FinalizeWithoutWatchTime) {
EXPECT_UKM(UkmEntry::kIsBackgroundName, properties->is_background);
EXPECT_UKM(UkmEntry::kIsMutedName, properties->is_muted);
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties->video_codec));
EXPECT_UKM(UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties->audio_codec_profile));
EXPECT_UKM(UkmEntry::kVideoCodecProfileName,
@@ -810,14 +815,14 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideo) {
false, mojom::MediaStreamType::kNone);
mojom::SecondaryPlaybackPropertiesPtr secondary_properties =
mojom::SecondaryPlaybackProperties::New(
- kCodecAAC, kCodecH264, AudioCodecProfile::kXHE_AAC, H264PROFILE_MAIN,
- AudioDecoderType::kUnknown, VideoDecoderType::kUnknown,
- EncryptionScheme::kCenc, EncryptionScheme::kCbcs,
- gfx::Size(800, 600));
+ AudioCodec::kAAC, VideoCodec::kH264, AudioCodecProfile::kXHE_AAC,
+ H264PROFILE_MAIN, AudioDecoderType::kUnknown,
+ VideoDecoderType::kUnknown, EncryptionScheme::kCenc,
+ EncryptionScheme::kCbcs, gfx::Size(800, 600));
Initialize(properties.Clone());
wtr_->UpdateSecondaryProperties(secondary_properties.Clone());
- constexpr base::TimeDelta kWatchTime = base::TimeDelta::FromSeconds(4);
+ constexpr base::TimeDelta kWatchTime = base::Seconds(4);
wtr_->RecordWatchTime(WatchTimeKey::kAudioVideoAll, kWatchTime);
wtr_.reset();
base::RunLoop().RunUntilIdle();
@@ -833,8 +838,10 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideo) {
EXPECT_UKM(UkmEntry::kWatchTimeName, kWatchTime.InMilliseconds());
EXPECT_UKM(UkmEntry::kIsBackgroundName, properties->is_background);
EXPECT_UKM(UkmEntry::kIsMutedName, properties->is_muted);
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties->video_codec));
EXPECT_UKM(UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties->audio_codec_profile));
EXPECT_UKM(UkmEntry::kVideoCodecProfileName,
@@ -881,14 +888,14 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoWithExtras) {
false, mojom::MediaStreamType::kNone);
mojom::SecondaryPlaybackPropertiesPtr secondary_properties =
mojom::SecondaryPlaybackProperties::New(
- kCodecOpus, kCodecVP9, AudioCodecProfile::kUnknown,
+ AudioCodec::kOpus, VideoCodec::kVP9, AudioCodecProfile::kUnknown,
VP9PROFILE_PROFILE0, AudioDecoderType::kUnknown,
VideoDecoderType::kUnknown, EncryptionScheme::kUnencrypted,
EncryptionScheme::kUnencrypted, gfx::Size(800, 600));
Initialize(properties.Clone());
wtr_->UpdateSecondaryProperties(secondary_properties.Clone());
- constexpr base::TimeDelta kWatchTime = base::TimeDelta::FromSeconds(54);
+ constexpr base::TimeDelta kWatchTime = base::Seconds(54);
const base::TimeDelta kWatchTime2 = kWatchTime * 2;
const base::TimeDelta kWatchTime3 = kWatchTime / 3;
wtr_->RecordWatchTime(WatchTimeKey::kAudioVideoAll, kWatchTime2);
@@ -908,8 +915,7 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoWithExtras) {
wtr_->RecordWatchTime(WatchTimeKey::kAudioVideoDisplayPictureInPicture,
kWatchTime3);
wtr_->UpdateUnderflowCount(3);
- constexpr base::TimeDelta kUnderflowDuration =
- base::TimeDelta::FromMilliseconds(500);
+ constexpr base::TimeDelta kUnderflowDuration = base::Milliseconds(500);
wtr_->UpdateUnderflowDuration(2, kUnderflowDuration);
wtr_->UpdateVideoDecodeStats(10, 2);
wtr_->OnError(PIPELINE_ERROR_DECODE);
@@ -920,7 +926,7 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoWithExtras) {
wtr_->SetAutoplayInitiated(true);
- wtr_->OnDurationChanged(base::TimeDelta::FromSeconds(9500));
+ wtr_->OnDurationChanged(base::Seconds(9500));
wtr_.reset();
base::RunLoop().RunUntilIdle();
@@ -955,8 +961,10 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoWithExtras) {
EXPECT_UKM(UkmEntry::kIsBackgroundName, properties->is_background);
EXPECT_UKM(UkmEntry::kIsMutedName, properties->is_muted);
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties->video_codec));
EXPECT_UKM(UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties->audio_codec_profile));
EXPECT_UKM(UkmEntry::kVideoCodecProfileName,
@@ -997,12 +1005,12 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoBackgroundMuted) {
Initialize(properties.Clone());
wtr_->UpdateSecondaryProperties(secondary_properties.Clone());
- constexpr base::TimeDelta kWatchTime = base::TimeDelta::FromSeconds(54);
+ constexpr base::TimeDelta kWatchTime = base::Seconds(54);
wtr_->RecordWatchTime(WatchTimeKey::kAudioVideoBackgroundAll, kWatchTime);
wtr_.reset();
base::RunLoop().RunUntilIdle();
- if (secondary_properties->audio_codec == kCodecAAC) {
+ if (secondary_properties->audio_codec == AudioCodec::kAAC) {
ExpectAacAudioCodecProfileHistogram(
secondary_properties->audio_codec_profile);
}
@@ -1015,8 +1023,10 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoBackgroundMuted) {
EXPECT_UKM(UkmEntry::kWatchTimeName, kWatchTime.InMilliseconds());
EXPECT_UKM(UkmEntry::kIsBackgroundName, properties->is_background);
EXPECT_UKM(UkmEntry::kIsMutedName, properties->is_muted);
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties->video_codec));
EXPECT_UKM(UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties->audio_codec_profile));
EXPECT_UKM(UkmEntry::kVideoCodecProfileName,
@@ -1067,11 +1077,11 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoDuration) {
Initialize(properties.Clone());
wtr_->UpdateSecondaryProperties(secondary_properties.Clone());
- wtr_->OnDurationChanged(base::TimeDelta::FromSeconds(12345));
+ wtr_->OnDurationChanged(base::Seconds(12345));
wtr_.reset();
base::RunLoop().RunUntilIdle();
- if (secondary_properties->audio_codec == kCodecAAC) {
+ if (secondary_properties->audio_codec == AudioCodec::kAAC) {
ExpectAacAudioCodecProfileHistogram(
secondary_properties->audio_codec_profile);
}
@@ -1083,8 +1093,10 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoDuration) {
EXPECT_UKM(UkmEntry::kIsBackgroundName, properties->is_background);
EXPECT_UKM(UkmEntry::kIsMutedName, properties->is_muted);
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties->video_codec));
EXPECT_UKM(UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties->audio_codec_profile));
EXPECT_UKM(UkmEntry::kVideoCodecProfileName,
@@ -1141,7 +1153,7 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoDurationInfinite) {
wtr_.reset();
base::RunLoop().RunUntilIdle();
- if (secondary_properties->audio_codec == kCodecAAC) {
+ if (secondary_properties->audio_codec == AudioCodec::kAAC) {
ExpectAacAudioCodecProfileHistogram(
secondary_properties->audio_codec_profile);
}
@@ -1153,8 +1165,10 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoDurationInfinite) {
EXPECT_UKM(UkmEntry::kIsBackgroundName, properties->is_background);
EXPECT_UKM(UkmEntry::kIsMutedName, properties->is_muted);
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties->video_codec));
EXPECT_UKM(UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties->audio_codec_profile));
EXPECT_UKM(UkmEntry::kVideoCodecProfileName,
@@ -1215,7 +1229,7 @@ TEST_F(WatchTimeRecorderTest, BasicUkmMediaStreamType) {
Initialize(properties.Clone());
wtr_->UpdateSecondaryProperties(CreateSecondaryProperties());
- constexpr base::TimeDelta kWatchTime = base::TimeDelta::FromSeconds(1);
+ constexpr base::TimeDelta kWatchTime = base::Seconds(1);
wtr_->RecordWatchTime(WatchTimeKey::kAudioVideoAll, kWatchTime);
wtr_.reset();
base::RunLoop().RunUntilIdle();
@@ -1240,7 +1254,7 @@ TEST_F(WatchTimeRecorderTest, NoSecondaryProperties) {
false, mojom::MediaStreamType::kNone);
Initialize(properties.Clone());
- constexpr base::TimeDelta kWatchTime = base::TimeDelta::FromSeconds(54);
+ constexpr base::TimeDelta kWatchTime = base::Seconds(54);
wtr_->RecordWatchTime(WatchTimeKey::kAudioVideoAll, kWatchTime);
wtr_.reset();
base::RunLoop().RunUntilIdle();
@@ -1254,22 +1268,23 @@ TEST_F(WatchTimeRecorderTest, SingleSecondaryPropertiesUnknownToKnown) {
false, mojom::MediaStreamType::kNone);
mojom::SecondaryPlaybackPropertiesPtr secondary_properties1 =
mojom::SecondaryPlaybackProperties::New(
- kUnknownAudioCodec, kUnknownVideoCodec, AudioCodecProfile::kUnknown,
- VIDEO_CODEC_PROFILE_UNKNOWN, AudioDecoderType::kUnknown,
- VideoDecoderType::kUnknown, EncryptionScheme::kUnencrypted,
- EncryptionScheme::kUnencrypted, gfx::Size(800, 600));
+ AudioCodec::kUnknown, VideoCodec::kUnknown,
+ AudioCodecProfile::kUnknown, VIDEO_CODEC_PROFILE_UNKNOWN,
+ AudioDecoderType::kUnknown, VideoDecoderType::kUnknown,
+ EncryptionScheme::kUnencrypted, EncryptionScheme::kUnencrypted,
+ gfx::Size(800, 600));
Initialize(properties.Clone());
wtr_->UpdateSecondaryProperties(secondary_properties1.Clone());
- constexpr base::TimeDelta kWatchTime = base::TimeDelta::FromSeconds(54);
+ constexpr base::TimeDelta kWatchTime = base::Seconds(54);
wtr_->RecordWatchTime(WatchTimeKey::kAudioVideoAll, kWatchTime);
mojom::SecondaryPlaybackPropertiesPtr secondary_properties2 =
mojom::SecondaryPlaybackProperties::New(
- kCodecAAC, kCodecH264, AudioCodecProfile::kXHE_AAC, H264PROFILE_MAIN,
- AudioDecoderType::kFFmpeg, VideoDecoderType::kFFmpeg,
- EncryptionScheme::kUnencrypted, EncryptionScheme::kUnencrypted,
- gfx::Size(800, 600));
+ AudioCodec::kAAC, VideoCodec::kH264, AudioCodecProfile::kXHE_AAC,
+ H264PROFILE_MAIN, AudioDecoderType::kFFmpeg,
+ VideoDecoderType::kFFmpeg, EncryptionScheme::kUnencrypted,
+ EncryptionScheme::kUnencrypted, gfx::Size(800, 600));
wtr_->UpdateSecondaryProperties(secondary_properties2.Clone());
wtr_.reset();
@@ -1298,8 +1313,10 @@ TEST_F(WatchTimeRecorderTest, SingleSecondaryPropertiesUnknownToKnown) {
EXPECT_UKM(UkmEntry::kRebuffersCountName, 0);
EXPECT_UKM(UkmEntry::kCompletedRebuffersCountName, 0);
EXPECT_UKM(UkmEntry::kCompletedRebuffersDurationName, 0);
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties2->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties2->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties2->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties2->video_codec));
EXPECT_UKM(
UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties2->audio_codec_profile));
@@ -1325,16 +1342,15 @@ TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesNoFinalize) {
false, mojom::MediaStreamType::kNone);
mojom::SecondaryPlaybackPropertiesPtr secondary_properties1 =
mojom::SecondaryPlaybackProperties::New(
- kCodecOpus, kCodecVP9, AudioCodecProfile::kUnknown,
+ AudioCodec::kOpus, VideoCodec::kVP9, AudioCodecProfile::kUnknown,
VP9PROFILE_PROFILE0, AudioDecoderType::kMojo, VideoDecoderType::kMojo,
EncryptionScheme::kUnencrypted, EncryptionScheme::kUnencrypted,
gfx::Size(400, 300));
Initialize(properties.Clone());
wtr_->UpdateSecondaryProperties(secondary_properties1.Clone());
- constexpr base::TimeDelta kUnderflowDuration =
- base::TimeDelta::FromMilliseconds(250);
- constexpr base::TimeDelta kWatchTime1 = base::TimeDelta::FromSeconds(54);
+ constexpr base::TimeDelta kUnderflowDuration = base::Milliseconds(250);
+ constexpr base::TimeDelta kWatchTime1 = base::Seconds(54);
const int kUnderflowCount1 = 2;
wtr_->RecordWatchTime(WatchTimeKey::kAudioVideoAll, kWatchTime1);
wtr_->UpdateUnderflowCount(kUnderflowCount1);
@@ -1346,13 +1362,13 @@ TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesNoFinalize) {
mojom::SecondaryPlaybackPropertiesPtr secondary_properties2 =
mojom::SecondaryPlaybackProperties::New(
- kCodecAAC, kCodecH264, AudioCodecProfile::kUnknown, H264PROFILE_MAIN,
- AudioDecoderType::kFFmpeg, VideoDecoderType::kFFmpeg,
- EncryptionScheme::kCenc, EncryptionScheme::kCenc,
- gfx::Size(800, 600));
+ AudioCodec::kAAC, VideoCodec::kH264, AudioCodecProfile::kUnknown,
+ H264PROFILE_MAIN, AudioDecoderType::kFFmpeg,
+ VideoDecoderType::kFFmpeg, EncryptionScheme::kCenc,
+ EncryptionScheme::kCenc, gfx::Size(800, 600));
wtr_->UpdateSecondaryProperties(secondary_properties2.Clone());
- constexpr base::TimeDelta kWatchTime2 = base::TimeDelta::FromSeconds(25);
+ constexpr base::TimeDelta kWatchTime2 = base::Seconds(25);
const int kUnderflowCount2 = 3;
// Watch time and underflow counts continue to accumulate during property
@@ -1361,7 +1377,7 @@ TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesNoFinalize) {
kWatchTime1 + kWatchTime2);
wtr_->UpdateUnderflowCount(kUnderflowCount1 + kUnderflowCount2);
wtr_->OnError(PIPELINE_ERROR_DECODE);
- wtr_->OnDurationChanged(base::TimeDelta::FromSeconds(5125));
+ wtr_->OnDurationChanged(base::Seconds(5125));
constexpr int kDecodedFrameCount2 = 20;
constexpr int kDroppedFrameCount2 = 10;
@@ -1405,8 +1421,10 @@ TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesNoFinalize) {
kUnderflowDuration.InMilliseconds());
EXPECT_UKM(UkmEntry::kVideoFramesDecodedName, kDecodedFrameCount1);
EXPECT_UKM(UkmEntry::kVideoFramesDroppedName, kDroppedFrameCount1);
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties1->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties1->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties1->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties1->video_codec));
EXPECT_UKM(UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties1->audio_codec_profile));
EXPECT_UKM(UkmEntry::kVideoCodecProfileName,
@@ -1435,8 +1453,10 @@ TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesNoFinalize) {
EXPECT_UKM(UkmEntry::kVideoFramesDroppedName, kDroppedFrameCount2);
EXPECT_UKM(UkmEntry::kCompletedRebuffersDurationName, 0);
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties2->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties2->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties2->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties2->video_codec));
EXPECT_UKM(UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties2->audio_codec_profile));
EXPECT_UKM(UkmEntry::kVideoCodecProfileName,
@@ -1459,16 +1479,15 @@ TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesNoFinalizeNo2ndWT) {
false, mojom::MediaStreamType::kNone);
mojom::SecondaryPlaybackPropertiesPtr secondary_properties1 =
mojom::SecondaryPlaybackProperties::New(
- kCodecOpus, kCodecVP9, AudioCodecProfile::kUnknown,
+ AudioCodec::kOpus, VideoCodec::kVP9, AudioCodecProfile::kUnknown,
VP9PROFILE_PROFILE0, AudioDecoderType::kMojo, VideoDecoderType::kMojo,
EncryptionScheme::kUnencrypted, EncryptionScheme::kUnencrypted,
gfx::Size(400, 300));
Initialize(properties.Clone());
wtr_->UpdateSecondaryProperties(secondary_properties1.Clone());
- constexpr base::TimeDelta kUnderflowDuration =
- base::TimeDelta::FromMilliseconds(250);
- constexpr base::TimeDelta kWatchTime1 = base::TimeDelta::FromSeconds(54);
+ constexpr base::TimeDelta kUnderflowDuration = base::Milliseconds(250);
+ constexpr base::TimeDelta kWatchTime1 = base::Seconds(54);
const int kUnderflowCount1 = 2;
wtr_->RecordWatchTime(WatchTimeKey::kAudioVideoAll, kWatchTime1);
wtr_->UpdateUnderflowCount(kUnderflowCount1);
@@ -1480,10 +1499,10 @@ TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesNoFinalizeNo2ndWT) {
mojom::SecondaryPlaybackPropertiesPtr secondary_properties2 =
mojom::SecondaryPlaybackProperties::New(
- kCodecAAC, kCodecH264, AudioCodecProfile::kXHE_AAC, H264PROFILE_MAIN,
- AudioDecoderType::kFFmpeg, VideoDecoderType::kFFmpeg,
- EncryptionScheme::kUnencrypted, EncryptionScheme::kUnencrypted,
- gfx::Size(800, 600));
+ AudioCodec::kAAC, VideoCodec::kH264, AudioCodecProfile::kXHE_AAC,
+ H264PROFILE_MAIN, AudioDecoderType::kFFmpeg,
+ VideoDecoderType::kFFmpeg, EncryptionScheme::kUnencrypted,
+ EncryptionScheme::kUnencrypted, gfx::Size(800, 600));
wtr_->UpdateSecondaryProperties(secondary_properties2.Clone());
// Don't record any watch time to the new record, it should report zero watch
@@ -1524,8 +1543,10 @@ TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesNoFinalizeNo2ndWT) {
kUnderflowDuration.InMilliseconds());
EXPECT_UKM(UkmEntry::kVideoFramesDecodedName, kDecodedFrameCount1);
EXPECT_UKM(UkmEntry::kVideoFramesDroppedName, kDroppedFrameCount1);
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties1->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties1->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties1->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties1->video_codec));
EXPECT_UKM(UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties1->audio_codec_profile));
EXPECT_UKM(UkmEntry::kVideoCodecProfileName,
@@ -1551,8 +1572,10 @@ TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesNoFinalizeNo2ndWT) {
EXPECT_UKM(UkmEntry::kCompletedRebuffersDurationName, 0);
EXPECT_UKM(UkmEntry::kVideoFramesDecodedName, 0);
EXPECT_UKM(UkmEntry::kVideoFramesDroppedName, 0);
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties2->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties2->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties2->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties2->video_codec));
EXPECT_UKM(UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties2->audio_codec_profile));
EXPECT_UKM(UkmEntry::kVideoCodecProfileName,
@@ -1575,16 +1598,15 @@ TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesWithFinalize) {
false, mojom::MediaStreamType::kNone);
mojom::SecondaryPlaybackPropertiesPtr secondary_properties1 =
mojom::SecondaryPlaybackProperties::New(
- kCodecOpus, kCodecVP9, AudioCodecProfile::kUnknown,
+ AudioCodec::kOpus, VideoCodec::kVP9, AudioCodecProfile::kUnknown,
VP9PROFILE_PROFILE0, AudioDecoderType::kMojo, VideoDecoderType::kMojo,
EncryptionScheme::kCbcs, EncryptionScheme::kCbcs,
gfx::Size(400, 300));
Initialize(properties.Clone());
wtr_->UpdateSecondaryProperties(secondary_properties1.Clone());
- constexpr base::TimeDelta kUnderflowDuration =
- base::TimeDelta::FromMilliseconds(250);
- constexpr base::TimeDelta kWatchTime1 = base::TimeDelta::FromSeconds(54);
+ constexpr base::TimeDelta kUnderflowDuration = base::Milliseconds(250);
+ constexpr base::TimeDelta kWatchTime1 = base::Seconds(54);
const int kUnderflowCount1 = 2;
wtr_->RecordWatchTime(WatchTimeKey::kAudioVideoAll, kWatchTime1);
wtr_->UpdateUnderflowCount(kUnderflowCount1);
@@ -1600,13 +1622,13 @@ TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesWithFinalize) {
mojom::SecondaryPlaybackPropertiesPtr secondary_properties2 =
mojom::SecondaryPlaybackProperties::New(
- kCodecAAC, kCodecH264, AudioCodecProfile::kXHE_AAC, H264PROFILE_MAIN,
- AudioDecoderType::kFFmpeg, VideoDecoderType::kFFmpeg,
- EncryptionScheme::kUnencrypted, EncryptionScheme::kUnencrypted,
- gfx::Size(800, 600));
+ AudioCodec::kAAC, VideoCodec::kH264, AudioCodecProfile::kXHE_AAC,
+ H264PROFILE_MAIN, AudioDecoderType::kFFmpeg,
+ VideoDecoderType::kFFmpeg, EncryptionScheme::kUnencrypted,
+ EncryptionScheme::kUnencrypted, gfx::Size(800, 600));
wtr_->UpdateSecondaryProperties(secondary_properties2.Clone());
- constexpr base::TimeDelta kWatchTime2 = base::TimeDelta::FromSeconds(25);
+ constexpr base::TimeDelta kWatchTime2 = base::Seconds(25);
const int kUnderflowCount2 = 3;
wtr_->RecordWatchTime(WatchTimeKey::kAudioVideoAll, kWatchTime2);
@@ -1650,8 +1672,10 @@ TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesWithFinalize) {
kUnderflowDuration.InMilliseconds());
EXPECT_UKM(UkmEntry::kVideoFramesDecodedName, kDecodedFrameCount1);
EXPECT_UKM(UkmEntry::kVideoFramesDroppedName, kDroppedFrameCount1);
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties1->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties1->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties1->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties1->video_codec));
EXPECT_UKM(UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties1->audio_codec_profile));
EXPECT_UKM(UkmEntry::kVideoCodecProfileName,
@@ -1679,8 +1703,10 @@ TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesWithFinalize) {
EXPECT_UKM(UkmEntry::kCompletedRebuffersDurationName, 0);
EXPECT_UKM(UkmEntry::kVideoFramesDecodedName, 0);
EXPECT_UKM(UkmEntry::kVideoFramesDroppedName, 0);
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties2->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties2->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties2->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties2->video_codec));
EXPECT_UKM(UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties2->audio_codec_profile));
EXPECT_UKM(UkmEntry::kVideoCodecProfileName,
@@ -1703,16 +1729,15 @@ TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesRebufferCarryover) {
false, mojom::MediaStreamType::kNone);
mojom::SecondaryPlaybackPropertiesPtr secondary_properties1 =
mojom::SecondaryPlaybackProperties::New(
- kCodecOpus, kCodecVP9, AudioCodecProfile::kUnknown,
+ AudioCodec::kOpus, VideoCodec::kVP9, AudioCodecProfile::kUnknown,
VP9PROFILE_PROFILE0, AudioDecoderType::kMojo, VideoDecoderType::kMojo,
EncryptionScheme::kCbcs, EncryptionScheme::kCbcs,
gfx::Size(400, 300));
Initialize(properties.Clone());
wtr_->UpdateSecondaryProperties(secondary_properties1.Clone());
- constexpr base::TimeDelta kUnderflowDuration =
- base::TimeDelta::FromMilliseconds(250);
- constexpr base::TimeDelta kWatchTime1 = base::TimeDelta::FromSeconds(54);
+ constexpr base::TimeDelta kUnderflowDuration = base::Milliseconds(250);
+ constexpr base::TimeDelta kWatchTime1 = base::Seconds(54);
const int kUnderflowCount1 = 2;
wtr_->RecordWatchTime(WatchTimeKey::kAudioVideoAll, kWatchTime1);
wtr_->UpdateUnderflowCount(kUnderflowCount1);
@@ -1722,13 +1747,13 @@ TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesRebufferCarryover) {
mojom::SecondaryPlaybackPropertiesPtr secondary_properties2 =
mojom::SecondaryPlaybackProperties::New(
- kCodecAAC, kCodecH264, AudioCodecProfile::kXHE_AAC, H264PROFILE_MAIN,
- AudioDecoderType::kFFmpeg, VideoDecoderType::kFFmpeg,
- EncryptionScheme::kUnencrypted, EncryptionScheme::kUnencrypted,
- gfx::Size(800, 600));
+ AudioCodec::kAAC, VideoCodec::kH264, AudioCodecProfile::kXHE_AAC,
+ H264PROFILE_MAIN, AudioDecoderType::kFFmpeg,
+ VideoDecoderType::kFFmpeg, EncryptionScheme::kUnencrypted,
+ EncryptionScheme::kUnencrypted, gfx::Size(800, 600));
wtr_->UpdateSecondaryProperties(secondary_properties2.Clone());
- constexpr base::TimeDelta kWatchTime2 = base::TimeDelta::FromSeconds(25);
+ constexpr base::TimeDelta kWatchTime2 = base::Seconds(25);
const int kUnderflowCount2 = 3;
// Watch time and underflow counts continue to accumulate during property
@@ -1743,7 +1768,7 @@ TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesRebufferCarryover) {
wtr_->UpdateUnderflowDuration(kUnderflowCount1, kUnderflowDuration * 1.5);
wtr_->OnError(PIPELINE_ERROR_DECODE);
- wtr_->OnDurationChanged(base::TimeDelta::FromSeconds(5125));
+ wtr_->OnDurationChanged(base::Seconds(5125));
wtr_.reset();
base::RunLoop().RunUntilIdle();
@@ -1780,8 +1805,10 @@ TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesRebufferCarryover) {
EXPECT_UKM(UkmEntry::kCompletedRebuffersCountName, kUnderflowCount1 - 1);
EXPECT_UKM(UkmEntry::kCompletedRebuffersDurationName,
kUnderflowDuration.InMilliseconds());
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties1->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties1->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties1->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties1->video_codec));
EXPECT_UKM(UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties1->audio_codec_profile));
EXPECT_UKM(UkmEntry::kVideoCodecProfileName,
@@ -1808,8 +1835,10 @@ TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesRebufferCarryover) {
EXPECT_UKM(UkmEntry::kCompletedRebuffersCountName, 1);
EXPECT_UKM(UkmEntry::kCompletedRebuffersDurationName,
(kUnderflowDuration * 1.5 - kUnderflowDuration).InMilliseconds());
- EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties2->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties2->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName,
+ static_cast<int>(secondary_properties2->audio_codec));
+ EXPECT_UKM(UkmEntry::kVideoCodecName,
+ static_cast<int>(secondary_properties2->video_codec));
EXPECT_UKM(UkmEntry::kAudioCodecProfileName,
static_cast<int64_t>(secondary_properties2->audio_codec_profile));
EXPECT_UKM(UkmEntry::kVideoCodecProfileName,
diff --git a/chromium/media/muxers/webm_muxer.cc b/chromium/media/muxers/webm_muxer.cc
index c2fe59e1abc..d95e6e5765c 100644
--- a/chromium/media/muxers/webm_muxer.cc
+++ b/chromium/media/muxers/webm_muxer.cc
@@ -54,7 +54,7 @@ constexpr uint8_t codec_private[4] = {
// Force new clusters at a maximum rate of 10 Hz.
constexpr base::TimeDelta kMinimumForcedClusterDuration =
- base::TimeDelta::FromMilliseconds(100);
+ base::Milliseconds(100);
void WriteOpusHeader(const media::AudioParameters& params, uint8_t* header) {
// See https://wiki.xiph.org/OggOpus#ID_Header.
@@ -97,13 +97,13 @@ static const char kPcmCodecId[] = "A_PCM/FLOAT/IEEE";
static const char* MkvCodeIcForMediaVideoCodecId(VideoCodec video_codec) {
switch (video_codec) {
- case kCodecVP8:
+ case VideoCodec::kVP8:
return mkvmuxer::Tracks::kVp8CodecId;
- case kCodecVP9:
+ case VideoCodec::kVP9:
return mkvmuxer::Tracks::kVp9CodecId;
- case kCodecAV1:
+ case VideoCodec::kAV1:
return mkvmuxer::Tracks::kAv1CodecId;
- case kCodecH264:
+ case VideoCodec::kH264:
return kH264CodecId;
default:
NOTREACHED() << "Unsupported codec " << GetCodecName(video_codec);
@@ -204,7 +204,7 @@ WebmMuxer::VideoParameters::VideoParameters(
scoped_refptr<media::VideoFrame> frame)
: visible_rect_size(frame->visible_rect().size()),
frame_rate(frame->metadata().frame_rate.value_or(0.0)),
- codec(kUnknownVideoCodec),
+ codec(VideoCodec::kUnknown),
color_space(frame->ColorSpace()) {}
WebmMuxer::VideoParameters::VideoParameters(
@@ -229,7 +229,7 @@ WebmMuxer::WebmMuxer(AudioCodec audio_codec,
bool has_audio,
std::unique_ptr<Delegate> delegate)
: audio_codec_(audio_codec),
- video_codec_(kUnknownVideoCodec),
+ video_codec_(VideoCodec::kUnknown),
video_track_index_(0),
audio_track_index_(0),
has_video_(has_video),
@@ -238,7 +238,7 @@ WebmMuxer::WebmMuxer(AudioCodec audio_codec,
force_one_libwebm_error_(false) {
DCHECK(has_video_ || has_audio_);
DCHECK(delegate_);
- DCHECK(audio_codec == kCodecOpus || audio_codec == kCodecPCM)
+ DCHECK(audio_codec == AudioCodec::kOpus || audio_codec == AudioCodec::kPCM)
<< " Unsupported audio codec: " << GetCodecName(audio_codec);
delegate_->InitSegment(&segment_);
@@ -268,10 +268,10 @@ bool WebmMuxer::OnEncodedVideo(const VideoParameters& params,
bool is_key_frame) {
DVLOG(2) << __func__ << " - " << encoded_data.size() << "B";
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- DCHECK(params.codec == kCodecVP8 || params.codec == kCodecVP9 ||
- params.codec == kCodecH264 || params.codec == kCodecAV1)
+ DCHECK(params.codec == VideoCodec::kVP8 || params.codec == VideoCodec::kVP9 ||
+ params.codec == VideoCodec::kH264 || params.codec == VideoCodec::kAV1)
<< " Unsupported video codec: " << GetCodecName(params.codec);
- DCHECK(video_codec_ == kUnknownVideoCodec || video_codec_ == params.codec)
+ DCHECK(video_codec_ == VideoCodec::kUnknown || video_codec_ == params.codec)
<< "Unsupported: codec switched, to: " << GetCodecName(params.codec);
if (encoded_data.size() == 0u) {
@@ -292,7 +292,7 @@ bool WebmMuxer::OnEncodedVideo(const VideoParameters& params,
last_frame_timestamp_video_ = first_frame_timestamp_video_;
}
// Add codec private for AV1.
- if (params.codec == kCodecAV1 &&
+ if (params.codec == VideoCodec::kAV1 &&
!segment_.GetTrackByNumber(video_track_index_)
->SetCodecPrivate(av1::codec_private, sizeof(av1::codec_private)))
LOG(ERROR) << __func__ << " failed to set CodecPrivate for AV1.";
@@ -409,7 +409,7 @@ void WebmMuxer::AddVideoTrack(
DCHECK_EQ(1000000ull, segment_.GetSegmentInfo()->timecode_scale());
// Set alpha channel parameters for only VPX (crbug.com/711825).
- if (video_codec_ == kCodecH264)
+ if (video_codec_ == VideoCodec::kH264)
return;
video_track->SetAlphaMode(mkvmuxer::VideoTrack::kAlpha);
// Alpha channel, if present, is stored in a BlockAdditional next to the
@@ -445,7 +445,7 @@ void WebmMuxer::AddAudioTrack(const media::AudioParameters& params) {
// Audio data is always pcm_f32le.
audio_track->set_bit_depth(32u);
- if (audio_codec_ == kCodecOpus) {
+ if (audio_codec_ == AudioCodec::kOpus) {
audio_track->set_codec_id(mkvmuxer::Tracks::kOpusCodecId);
uint8_t opus_header[OPUS_EXTRADATA_SIZE];
@@ -457,7 +457,7 @@ void WebmMuxer::AddAudioTrack(const media::AudioParameters& params) {
// Segment's timestamps should be in milliseconds, DCHECK it. See
// http://www.webmproject.org/docs/container/#muxer-guidelines
DCHECK_EQ(1000000ull, segment_.GetSegmentInfo()->timecode_scale());
- } else if (audio_codec_ == kCodecPCM) {
+ } else if (audio_codec_ == AudioCodec::kPCM) {
audio_track->set_codec_id(kPcmCodecId);
}
}
diff --git a/chromium/media/muxers/webm_muxer_fuzzertest.cc b/chromium/media/muxers/webm_muxer_fuzzertest.cc
index c9abb046ca9..0d241ef5fbc 100644
--- a/chromium/media/muxers/webm_muxer_fuzzertest.cc
+++ b/chromium/media/muxers/webm_muxer_fuzzertest.cc
@@ -23,10 +23,10 @@
const int kMinNumIterations = 1;
const int kMaxNumIterations = 10;
-static const int kSupportedVideoCodecs[] = {media::kCodecVP8, media::kCodecVP9,
- media::kCodecH264};
-static const int kSupportedAudioCodecs[] = {media::kCodecOpus,
- media::kCodecPCM};
+static const media::VideoCodec kSupportedVideoCodecs[] = {
+ media::VideoCodec::kVP8, media::VideoCodec::kVP9, media::VideoCodec::kH264};
+static const media::AudioCodec kSupportedAudioCodecs[] = {
+ media::AudioCodec::kOpus, media::AudioCodec::kPCM};
static const int kSampleRatesInKHz[] = {48, 24, 16, 12, 8};
@@ -55,10 +55,10 @@ extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
}
for (const auto& input_type : kVideoAudioInputTypes) {
- const auto video_codec = static_cast<media::VideoCodec>(
- kSupportedVideoCodecs[rng() % base::size(kSupportedVideoCodecs)]);
- const auto audio_codec = static_cast<media::AudioCodec>(
- kSupportedAudioCodecs[rng() % base::size(kSupportedAudioCodecs)]);
+ const auto video_codec =
+ kSupportedVideoCodecs[rng() % base::size(kSupportedVideoCodecs)];
+ const auto audio_codec =
+ kSupportedAudioCodecs[rng() % base::size(kSupportedAudioCodecs)];
media::WebmMuxer muxer(audio_codec, input_type.has_video,
input_type.has_audio,
std::make_unique<media::LiveWebmMuxerDelegate>(
diff --git a/chromium/media/muxers/webm_muxer_unittest.cc b/chromium/media/muxers/webm_muxer_unittest.cc
index 3a2798bdeca..fa993b895c5 100644
--- a/chromium/media/muxers/webm_muxer_unittest.cc
+++ b/chromium/media/muxers/webm_muxer_unittest.cc
@@ -146,7 +146,7 @@ TEST_P(WebmMuxerTest,
bool video_success =
!GetParam().num_video_tracks ||
webm_muxer_->OnEncodedVideo(video_params, encoded_data, std::string(),
- now + base::TimeDelta::FromMilliseconds(1),
+ now + base::Milliseconds(1),
/*is_key_frame=*/true);
EXPECT_FALSE(audio_success && video_success);
}
@@ -166,7 +166,7 @@ TEST_P(WebmMuxerTest,
bool video_success =
!GetParam().num_video_tracks ||
webm_muxer_->OnEncodedVideo(video_params, encoded_data, std::string(),
- now + base::TimeDelta::FromMilliseconds(1),
+ now + base::Milliseconds(1),
/*is_key_frame=*/true);
bool audio_success =
!GetParam().num_audio_tracks ||
@@ -310,7 +310,7 @@ TEST_P(WebmMuxerTest, OnEncodedAudioTwoFrames) {
}
TEST_P(WebmMuxerTest, ColorSpaceREC709IsPropagatedToTrack) {
- WebmMuxer::VideoParameters params(gfx::Size(1, 1), 0, media::kCodecVP9,
+ WebmMuxer::VideoParameters params(gfx::Size(1, 1), 0, media::VideoCodec::kVP9,
gfx::ColorSpace::CreateREC709());
webm_muxer_->OnEncodedVideo(params, "abab", {}, base::TimeTicks::Now(),
true /* keyframe */);
@@ -323,7 +323,7 @@ TEST_P(WebmMuxerTest, ColorSpaceREC709IsPropagatedToTrack) {
TEST_P(WebmMuxerTest, ColorSpaceExtendedSRGBIsPropagatedToTrack) {
WebmMuxer::VideoParameters params(
- gfx::Size(1, 1), 0, media::kCodecVP9,
+ gfx::Size(1, 1), 0, media::VideoCodec::kVP9,
gfx::ColorSpace(gfx::ColorSpace::PrimaryID::BT709,
gfx::ColorSpace::TransferID::IEC61966_2_1,
gfx::ColorSpace::MatrixID::BT709,
@@ -339,7 +339,7 @@ TEST_P(WebmMuxerTest, ColorSpaceExtendedSRGBIsPropagatedToTrack) {
TEST_P(WebmMuxerTest, ColorSpaceHDR10IsPropagatedToTrack) {
WebmMuxer::VideoParameters params(
- gfx::Size(1, 1), 0, media::kCodecVP9,
+ gfx::Size(1, 1), 0, media::VideoCodec::kVP9,
gfx::ColorSpace(gfx::ColorSpace::PrimaryID::BT2020,
gfx::ColorSpace::TransferID::SMPTEST2084,
gfx::ColorSpace::MatrixID::BT2020_NCL,
@@ -356,7 +356,7 @@ TEST_P(WebmMuxerTest, ColorSpaceHDR10IsPropagatedToTrack) {
TEST_P(WebmMuxerTest, ColorSpaceFullRangeHDR10IsPropagatedToTrack) {
WebmMuxer::VideoParameters params(
- gfx::Size(1, 1), 0, media::kCodecVP9,
+ gfx::Size(1, 1), 0, media::VideoCodec::kVP9,
gfx::ColorSpace(gfx::ColorSpace::PrimaryID::BT2020,
gfx::ColorSpace::TransferID::SMPTEST2084,
gfx::ColorSpace::MatrixID::BT2020_NCL,
@@ -388,14 +388,12 @@ TEST_P(WebmMuxerTest, VideoIsStoredWhileWaitingForAudio) {
// Timestamp: video origin + X
webm_muxer_->OnEncodedVideo(
GetVideoParameters(video_frame), encoded_video, std::string(),
- base::TimeTicks() + base::TimeDelta::FromMilliseconds(1),
- false /* keyframe */);
+ base::TimeTicks() + base::Milliseconds(1), false /* keyframe */);
// Timestamp: video origin + X + Y
webm_muxer_->OnEncodedVideo(
GetVideoParameters(video_frame), encoded_video, std::string(),
- base::TimeTicks() + base::TimeDelta::FromMilliseconds(2),
- false /* keyframe */);
+ base::TimeTicks() + base::Milliseconds(2), false /* keyframe */);
const int sample_rate = 48000;
const int frames_per_buffer = 480;
@@ -418,27 +416,27 @@ TEST_P(WebmMuxerTest, VideoIsStoredWhileWaitingForAudio) {
.Times(AnyNumber());
// Timestamp: 0 (audio origin)
- webm_muxer_->OnEncodedAudio(
- audio_params, encoded_audio,
- base::TimeTicks() + base::TimeDelta::FromMilliseconds(3));
+ webm_muxer_->OnEncodedAudio(audio_params, encoded_audio,
+ base::TimeTicks() + base::Milliseconds(3));
webm_muxer_.reset();
}
const TestParams kTestCases[] = {
- {kCodecVP8, kCodecOpus, 1 /* num_video_tracks */, 0 /*num_audio_tracks*/},
- {kCodecVP8, kCodecOpus, 0, 1},
- {kCodecVP8, kCodecOpus, 1, 1},
- {kCodecVP9, kCodecOpus, 1, 0},
- {kCodecVP9, kCodecOpus, 0, 1},
- {kCodecVP9, kCodecOpus, 1, 1},
- {kCodecH264, kCodecOpus, 1, 0},
- {kCodecH264, kCodecOpus, 0, 1},
- {kCodecH264, kCodecOpus, 1, 1},
- {kCodecVP8, kCodecPCM, 0, 1},
- {kCodecVP8, kCodecPCM, 1, 1},
- {kCodecAV1, kCodecOpus, 1, 0},
- {kCodecAV1, kCodecOpus, 0, 1},
- {kCodecAV1, kCodecOpus, 1, 1},
+ {VideoCodec::kVP8, AudioCodec::kOpus, 1 /* num_video_tracks */,
+ 0 /*num_audio_tracks*/},
+ {VideoCodec::kVP8, AudioCodec::kOpus, 0, 1},
+ {VideoCodec::kVP8, AudioCodec::kOpus, 1, 1},
+ {VideoCodec::kVP9, AudioCodec::kOpus, 1, 0},
+ {VideoCodec::kVP9, AudioCodec::kOpus, 0, 1},
+ {VideoCodec::kVP9, AudioCodec::kOpus, 1, 1},
+ {VideoCodec::kH264, AudioCodec::kOpus, 1, 0},
+ {VideoCodec::kH264, AudioCodec::kOpus, 0, 1},
+ {VideoCodec::kH264, AudioCodec::kOpus, 1, 1},
+ {VideoCodec::kVP8, AudioCodec::kPCM, 0, 1},
+ {VideoCodec::kVP8, AudioCodec::kPCM, 1, 1},
+ {VideoCodec::kAV1, AudioCodec::kOpus, 1, 0},
+ {VideoCodec::kAV1, AudioCodec::kOpus, 0, 1},
+ {VideoCodec::kAV1, AudioCodec::kOpus, 1, 1},
};
INSTANTIATE_TEST_SUITE_P(All, WebmMuxerTest, ValuesIn(kTestCases));
@@ -448,7 +446,7 @@ class WebmMuxerTestUnparametrized : public testing::Test {
WebmMuxerTestUnparametrized()
: environment_(base::test::TaskEnvironment::TimeSource::MOCK_TIME),
webm_muxer_(std::make_unique<WebmMuxer>(
- kCodecOpus,
+ AudioCodec::kOpus,
/*has_audio=*/true,
/*has_video=*/true,
std::make_unique<LiveWebmMuxerDelegate>(base::BindRepeating(
@@ -484,12 +482,11 @@ class WebmMuxerTestUnparametrized : public testing::Test {
}
void AddVideoAtOffset(int system_timestamp_offset_ms, bool is_key_frame) {
- WebmMuxer::VideoParameters params(gfx::Size(1, 1), 0, media::kCodecVP8,
- gfx::ColorSpace());
+ WebmMuxer::VideoParameters params(
+ gfx::Size(1, 1), 0, media::VideoCodec::kVP8, gfx::ColorSpace());
webm_muxer_->OnEncodedVideo(
params, "video_at_offset", "",
- base::TimeTicks() +
- base::TimeDelta::FromMilliseconds(system_timestamp_offset_ms),
+ base::TimeTicks() + base::Milliseconds(system_timestamp_offset_ms),
is_key_frame);
got_video_ = true;
}
@@ -503,8 +500,7 @@ class WebmMuxerTestUnparametrized : public testing::Test {
media::CHANNEL_LAYOUT_MONO, frame_rate_hz, frames_per_buffer);
webm_muxer_->OnEncodedAudio(
audio_params, "audio_at_offset",
- base::TimeTicks() +
- base::TimeDelta::FromMilliseconds(system_timestamp_offset_ms));
+ base::TimeTicks() + base::Milliseconds(system_timestamp_offset_ms));
}
MOCK_METHOD(void, OnWrite, ());
@@ -557,7 +553,7 @@ class WebmMuxerTestUnparametrized : public testing::Test {
TEST_F(WebmMuxerTestUnparametrized, MuxerCompensatesForPausedTimeWithVideo) {
AddVideoAtOffset(123, /*is_key_frame=*/true);
webm_muxer_->Pause();
- environment_.FastForwardBy(base::TimeDelta::FromMilliseconds(200));
+ environment_.FastForwardBy(base::Milliseconds(200));
webm_muxer_->Resume();
AddVideoAtOffset(123 + 266, /*is_key_frame=*/false);
EXPECT_TRUE(Parse());
@@ -568,7 +564,7 @@ TEST_F(WebmMuxerTestUnparametrized, MuxerCompensatesForPausedTimeWithVideo) {
TEST_F(WebmMuxerTestUnparametrized, MuxerCompensatesForPausedTimeWithAudio) {
AddAudioAtOffsetWithDuration(234, 10);
webm_muxer_->Pause();
- environment_.FastForwardBy(base::TimeDelta::FromMilliseconds(666));
+ environment_.FastForwardBy(base::Milliseconds(666));
webm_muxer_->Resume();
AddAudioAtOffsetWithDuration(234 + 686, 10);
EXPECT_TRUE(Parse());
@@ -581,7 +577,7 @@ TEST_F(WebmMuxerTestUnparametrized,
AddAudioAtOffsetWithDuration(234, 10);
AddVideoAtOffset(234 + 1, /*is_key_frame=*/true);
webm_muxer_->Pause();
- environment_.FastForwardBy(base::TimeDelta::FromMilliseconds(300));
+ environment_.FastForwardBy(base::Milliseconds(300));
webm_muxer_->Resume();
AddAudioAtOffsetWithDuration(234 + 321, 10);
AddVideoAtOffset(234 + 315, /*is_key_frame=*/false);
@@ -595,7 +591,7 @@ TEST_F(WebmMuxerTestUnparametrized,
TEST_F(WebmMuxerTestUnparametrized,
MuxerCompensatesForPausedTimeBeforeAudioVideo) {
webm_muxer_->Pause();
- environment_.FastForwardBy(base::TimeDelta::FromMilliseconds(100));
+ environment_.FastForwardBy(base::Milliseconds(100));
webm_muxer_->Resume();
AddAudioAtOffsetWithDuration(50, 10);
AddVideoAtOffset(65, /*is_key_frame=*/true);
@@ -608,8 +604,7 @@ TEST_F(WebmMuxerTestUnparametrized,
}
TEST_F(WebmMuxerTestUnparametrized, HoldsDataUntilDurationExpiry) {
- webm_muxer_->SetMaximumDurationToForceDataOutput(
- base::TimeDelta::FromMilliseconds(200));
+ webm_muxer_->SetMaximumDurationToForceDataOutput(base::Milliseconds(200));
AddVideoAtOffset(0, /*is_key_frame=*/true);
AddAudioAtOffsetWithDuration(0, 10);
// Mute video. The muxer will hold on to audio data after this until the max
@@ -622,7 +617,7 @@ TEST_F(WebmMuxerTestUnparametrized, HoldsDataUntilDurationExpiry) {
AddAudioAtOffsetWithDuration(30, 10);
AddAudioAtOffsetWithDuration(40, 10);
Mock::VerifyAndClearExpectations(this);
- environment_.FastForwardBy(base::TimeDelta::FromMilliseconds(200));
+ environment_.FastForwardBy(base::Milliseconds(200));
EXPECT_CALL(*this, OnWrite).Times(AtLeast(1));
AddAudioAtOffsetWithDuration(50, 10);
Mock::VerifyAndClearExpectations(this);
@@ -631,9 +626,8 @@ TEST_F(WebmMuxerTestUnparametrized, HoldsDataUntilDurationExpiry) {
}
TEST_F(WebmMuxerTestUnparametrized, DurationExpiryLimitedByMaxFrequency) {
- webm_muxer_->SetMaximumDurationToForceDataOutput(
- base::TimeDelta::FromMilliseconds(
- 50)); // This value is below the minimum limit of 100 ms.
+ webm_muxer_->SetMaximumDurationToForceDataOutput(base::Milliseconds(
+ 50)); // This value is below the minimum limit of 100 ms.
AddVideoAtOffset(0, /*is_key_frame=*/true);
AddAudioAtOffsetWithDuration(0, 10);
// Mute video. The muxer will hold on to audio data after this until the max
@@ -646,7 +640,7 @@ TEST_F(WebmMuxerTestUnparametrized, DurationExpiryLimitedByMaxFrequency) {
AddAudioAtOffsetWithDuration(30, 10);
AddAudioAtOffsetWithDuration(40, 10);
Mock::VerifyAndClearExpectations(this);
- environment_.FastForwardBy(base::TimeDelta::FromMilliseconds(100));
+ environment_.FastForwardBy(base::Milliseconds(100));
EXPECT_CALL(*this, OnWrite).Times(AtLeast(1));
AddAudioAtOffsetWithDuration(50, 10);
Mock::VerifyAndClearExpectations(this);
diff --git a/chromium/media/parsers/vp8_parser.h b/chromium/media/parsers/vp8_parser.h
index 546addc6449..df5a8c68b32 100644
--- a/chromium/media/parsers/vp8_parser.h
+++ b/chromium/media/parsers/vp8_parser.h
@@ -170,6 +170,10 @@ struct MEDIA_PARSERS_EXPORT Vp8FrameHeader {
class MEDIA_PARSERS_EXPORT Vp8Parser {
public:
Vp8Parser();
+
+ Vp8Parser(const Vp8Parser&) = delete;
+ Vp8Parser& operator=(const Vp8Parser&) = delete;
+
~Vp8Parser();
// Try to parse exactly one VP8 frame starting at |ptr| and of size |size|,
@@ -202,8 +206,6 @@ class MEDIA_PARSERS_EXPORT Vp8Parser {
const uint8_t* stream_;
size_t bytes_left_;
Vp8BoolDecoder bd_;
-
- DISALLOW_COPY_AND_ASSIGN(Vp8Parser);
};
} // namespace media
diff --git a/chromium/media/remoting/BUILD.gn b/chromium/media/remoting/BUILD.gn
index 780b8d3eb17..f77d14a48db 100644
--- a/chromium/media/remoting/BUILD.gn
+++ b/chromium/media/remoting/BUILD.gn
@@ -14,16 +14,22 @@ source_set("rpc") {
"proto_enum_utils.h",
"proto_utils.cc",
"proto_utils.h",
- "rpc_broker.cc",
- "rpc_broker.h",
]
+ public_configs =
+ [ "//third_party/openscreen/src/build:openscreen_include_dirs" ]
+
deps = [
"//base",
+ "//components/openscreen_platform:openscreen_platform",
+ "//components/openscreen_platform:openscreen_platform_network_service",
"//media",
]
- public_deps = [ "//third_party/openscreen/src/cast/streaming:remoting_proto" ]
+ public_deps = [
+ "//third_party/openscreen/src/cast/streaming:common",
+ "//third_party/openscreen/src/cast/streaming:remoting_proto",
+ ]
}
source_set("remoting_sender") {
@@ -46,6 +52,8 @@ source_set("remoting_sender") {
"//url",
]
+ public_deps = []
+
if (enable_media_remoting_rpc) {
sources += [
"courier_renderer.cc",
@@ -55,7 +63,9 @@ source_set("remoting_sender") {
"triggers.h",
]
- deps += [ ":rpc" ]
+ # Consumers of the CourierRenderer implicitly take a dependency on the
+ # generated remoting.pb.h file.
+ public_deps += [ ":rpc" ]
}
}
@@ -81,6 +91,10 @@ source_set("remoting_renderer") {
"//media/mojo/common:common",
"//media/mojo/mojom:remoting",
]
+
+ # Consumers of the ReceiverController implicitly take a dependency on the
+ # generated remoting.pb.h file.
+ public_deps = [ ":rpc" ]
}
source_set("media_remoting_tests") {
@@ -122,7 +136,6 @@ source_set("media_remoting_tests") {
"integration_test.cc",
"metrics_unittest.cc",
"proto_utils_unittest.cc",
- "rpc_broker_unittest.cc",
]
deps += [
diff --git a/chromium/media/remoting/DEPS b/chromium/media/remoting/DEPS
index d30e683ccfb..d7d8ee5b05a 100644
--- a/chromium/media/remoting/DEPS
+++ b/chromium/media/remoting/DEPS
@@ -1,4 +1,5 @@
include_rules = [
"+mojo/public",
"+third_party/openscreen/src/cast/streaming",
+ "+third_party/openscreen/src/util",
]
diff --git a/chromium/media/remoting/courier_renderer.cc b/chromium/media/remoting/courier_renderer.cc
index 773f87e8ab5..b7e1f7e81aa 100644
--- a/chromium/media/remoting/courier_renderer.cc
+++ b/chromium/media/remoting/courier_renderer.cc
@@ -27,19 +27,21 @@
#include "media/remoting/proto_utils.h"
#include "media/remoting/renderer_controller.h"
+using openscreen::cast::RpcMessenger;
+
namespace media {
namespace remoting {
namespace {
// The moving time window to track the media time and statistics updates.
-constexpr base::TimeDelta kTrackingWindow = base::TimeDelta::FromSeconds(5);
+constexpr base::TimeDelta kTrackingWindow = base::Seconds(5);
// The allowed delay for the remoting playback. When continuously exceeds this
// limit for |kPlaybackDelayCountThreshold| times, the user experience is likely
// poor and the controller is notified.
constexpr base::TimeDelta kMediaPlaybackDelayThreshold =
- base::TimeDelta::FromMilliseconds(750);
+ base::Milliseconds(750);
constexpr int kPlaybackDelayCountThreshold = 10;
// The allowed percentage of the number of video frames dropped vs. the number
@@ -49,13 +51,11 @@ constexpr int kMaxNumVideoFramesDroppedPercentage = 3;
// The time period to allow receiver get stable after playback rate change or
// Flush().
-constexpr base::TimeDelta kStabilizationPeriod =
- base::TimeDelta::FromSeconds(2);
+constexpr base::TimeDelta kStabilizationPeriod = base::Seconds(2);
// The amount of time between polling the DemuxerStreamAdapters to measure their
// data flow rates for metrics.
-constexpr base::TimeDelta kDataFlowPollPeriod =
- base::TimeDelta::FromSeconds(10);
+constexpr base::TimeDelta kDataFlowPollPeriod = base::Seconds(10);
} // namespace
@@ -69,18 +69,25 @@ CourierRenderer::CourierRenderer(
media_resource_(nullptr),
client_(nullptr),
controller_(controller),
- rpc_broker_(controller_->GetRpcBroker()),
- rpc_handle_(rpc_broker_->GetUniqueHandle()),
- remote_renderer_handle_(RpcBroker::kInvalidHandle),
+ rpc_messenger_(controller_->GetRpcMessenger()),
+ rpc_handle_(rpc_messenger_->GetUniqueHandle()),
+ remote_renderer_handle_(RpcMessenger::kInvalidHandle),
video_renderer_sink_(video_renderer_sink),
clock_(base::DefaultTickClock::GetInstance()) {
// Note: The constructor is running on the main thread, but will be destroyed
// on the media thread. Therefore, all weak pointers must be dereferenced on
// the media thread.
- const RpcBroker::ReceiveMessageCallback receive_callback =
- base::BindRepeating(&CourierRenderer::OnMessageReceivedOnMainThread,
- media_task_runner_, weak_factory_.GetWeakPtr());
- rpc_broker_->RegisterMessageReceiverCallback(rpc_handle_, receive_callback);
+ rpc_messenger_->RegisterMessageReceiverCallback(
+ rpc_handle_,
+ [runner = media_task_runner_, ptr = weak_factory_.GetWeakPtr()](
+ std::unique_ptr<openscreen::cast::RpcMessage> message) {
+ if (ptr) {
+ CourierRenderer::OnMessageReceivedOnMainThread(runner, ptr,
+ std::move(message));
+ } else {
+ LOG(WARNING) << "Invalid weak factory pointer.";
+ }
+ });
}
CourierRenderer::~CourierRenderer() {
@@ -88,8 +95,8 @@ CourierRenderer::~CourierRenderer() {
// Post task on main thread to unregister message receiver.
main_task_runner_->PostTask(
- FROM_HERE, base::BindOnce(&RpcBroker::UnregisterMessageReceiverCallback,
- rpc_broker_, rpc_handle_));
+ FROM_HERE, base::BindOnce(&CourierRenderer::DeregisterFromRpcMessaging,
+ weak_factory_.GetWeakPtr()));
if (video_renderer_sink_) {
video_renderer_sink_->PaintSingleFrame(
@@ -134,7 +141,7 @@ void CourierRenderer::Initialize(MediaResource* media_resource,
audio_demuxer_stream, video_demuxer_stream,
base::BindOnce(&CourierRenderer::OnDataPipeCreatedOnMainThread,
media_task_runner_, weak_factory_.GetWeakPtr(),
- rpc_broker_)));
+ rpc_messenger_)));
}
void CourierRenderer::SetLatencyHint(
@@ -255,7 +262,7 @@ base::TimeDelta CourierRenderer::GetMediaTime() {
void CourierRenderer::OnDataPipeCreatedOnMainThread(
scoped_refptr<base::SingleThreadTaskRunner> media_task_runner,
base::WeakPtr<CourierRenderer> self,
- base::WeakPtr<RpcBroker> rpc_broker,
+ openscreen::WeakPtr<RpcMessenger> rpc_messenger,
mojo::PendingRemote<mojom::RemotingDataStreamSender> audio,
mojo::PendingRemote<mojom::RemotingDataStreamSender> video,
mojo::ScopedDataPipeProducerHandle audio_handle,
@@ -265,10 +272,10 @@ void CourierRenderer::OnDataPipeCreatedOnMainThread(
base::BindOnce(&CourierRenderer::OnDataPipeCreated, self,
std::move(audio), std::move(video),
std::move(audio_handle), std::move(video_handle),
- rpc_broker ? rpc_broker->GetUniqueHandle()
- : RpcBroker::kInvalidHandle,
- rpc_broker ? rpc_broker->GetUniqueHandle()
- : RpcBroker::kInvalidHandle));
+ rpc_messenger ? rpc_messenger->GetUniqueHandle()
+ : RpcMessenger::kInvalidHandle,
+ rpc_messenger ? rpc_messenger->GetUniqueHandle()
+ : RpcMessenger::kInvalidHandle));
}
void CourierRenderer::OnDataPipeCreated(
@@ -294,20 +301,20 @@ void CourierRenderer::OnDataPipeCreated(
// Create audio demuxer stream adapter if audio is available.
if (audio_demuxer_stream && audio.is_valid() && audio_handle.is_valid() &&
- audio_rpc_handle != RpcBroker::kInvalidHandle) {
+ audio_rpc_handle != RpcMessenger::kInvalidHandle) {
audio_demuxer_stream_adapter_ = std::make_unique<DemuxerStreamAdapter>(
main_task_runner_, media_task_runner_, "audio", audio_demuxer_stream,
- rpc_broker_, audio_rpc_handle, std::move(audio),
+ rpc_messenger_, audio_rpc_handle, std::move(audio),
std::move(audio_handle),
base::BindOnce(&CourierRenderer::OnFatalError, base::Unretained(this)));
}
// Create video demuxer stream adapter if video is available.
if (video_demuxer_stream && video.is_valid() && video_handle.is_valid() &&
- video_rpc_handle != RpcBroker::kInvalidHandle) {
+ video_rpc_handle != RpcMessenger::kInvalidHandle) {
video_demuxer_stream_adapter_ = std::make_unique<DemuxerStreamAdapter>(
main_task_runner_, media_task_runner_, "video", video_demuxer_stream,
- rpc_broker_, video_rpc_handle, std::move(video),
+ rpc_messenger_, video_rpc_handle, std::move(video),
std::move(video_handle),
base::BindOnce(&CourierRenderer::OnFatalError, base::Unretained(this)));
}
@@ -322,23 +329,23 @@ void CourierRenderer::OnDataPipeCreated(
// Issues RPC_ACQUIRE_DEMUXER RPC message.
auto rpc = std::make_unique<openscreen::cast::RpcMessage>();
- rpc->set_handle(RpcBroker::kAcquireDemuxerHandle);
+ rpc->set_handle(RpcMessenger::kAcquireDemuxerHandle);
rpc->set_proc(openscreen::cast::RpcMessage::RPC_ACQUIRE_DEMUXER);
openscreen::cast::AcquireDemuxer* message =
rpc->mutable_acquire_demuxer_rpc();
message->set_audio_demuxer_handle(
audio_demuxer_stream_adapter_
? audio_demuxer_stream_adapter_->rpc_handle()
- : RpcBroker::kInvalidHandle);
+ : RpcMessenger::kInvalidHandle);
message->set_video_demuxer_handle(
video_demuxer_stream_adapter_
? video_demuxer_stream_adapter_->rpc_handle()
- : RpcBroker::kInvalidHandle);
+ : RpcMessenger::kInvalidHandle);
SendRpcToRemote(std::move(rpc));
// Issues RPC_ACQUIRE_RENDERER RPC message.
rpc = std::make_unique<openscreen::cast::RpcMessage>();
- rpc->set_handle(RpcBroker::kAcquireRendererHandle);
+ rpc->set_handle(RpcMessenger::kAcquireRendererHandle);
rpc->set_proc(openscreen::cast::RpcMessage::RPC_ACQUIRE_RENDERER);
rpc->set_integer_value(rpc_handle_);
SendRpcToRemote(std::move(rpc));
@@ -349,9 +356,13 @@ void CourierRenderer::OnMessageReceivedOnMainThread(
scoped_refptr<base::SingleThreadTaskRunner> media_task_runner,
base::WeakPtr<CourierRenderer> self,
std::unique_ptr<openscreen::cast::RpcMessage> message) {
- media_task_runner->PostTask(
- FROM_HERE, base::BindOnce(&CourierRenderer::OnReceivedRpc, self,
- std::move(message)));
+ if (media_task_runner) {
+ media_task_runner->PostTask(
+ FROM_HERE, base::BindOnce(&CourierRenderer::OnReceivedRpc, self,
+ std::move(message)));
+ } else {
+ LOG(WARNING) << "No valid task runner.";
+ }
}
void CourierRenderer::OnReceivedRpc(
@@ -405,9 +416,9 @@ void CourierRenderer::SendRpcToRemote(
std::unique_ptr<openscreen::cast::RpcMessage> message) {
DCHECK(media_task_runner_->BelongsToCurrentThread());
DCHECK(main_task_runner_);
- main_task_runner_->PostTask(
- FROM_HERE, base::BindOnce(&RpcBroker::SendMessageToRemote, rpc_broker_,
- std::move(message)));
+ main_task_runner_->PostTask(FROM_HERE,
+ base::BindOnce(&RpcMessenger::SendMessageToRemote,
+ rpc_messenger_, *message));
}
void CourierRenderer::AcquireRendererDone(
@@ -433,11 +444,11 @@ void CourierRenderer::AcquireRendererDone(
init->set_audio_demuxer_handle(
audio_demuxer_stream_adapter_
? audio_demuxer_stream_adapter_->rpc_handle()
- : RpcBroker::kInvalidHandle);
+ : RpcMessenger::kInvalidHandle);
init->set_video_demuxer_handle(
video_demuxer_stream_adapter_
? video_demuxer_stream_adapter_->rpc_handle()
- : RpcBroker::kInvalidHandle);
+ : RpcMessenger::kInvalidHandle);
init->set_callback_handle(rpc_handle_);
SendRpcToRemote(std::move(rpc));
}
@@ -504,8 +515,8 @@ void CourierRenderer::OnTimeUpdate(
{
// Updates current time information.
base::AutoLock auto_lock(time_lock_);
- current_media_time_ = base::TimeDelta::FromMicroseconds(time_usec);
- current_max_time_ = base::TimeDelta::FromMicroseconds(max_time_usec);
+ current_media_time_ = base::Microseconds(time_usec);
+ current_max_time_ = base::Microseconds(max_time_usec);
}
metrics_recorder_.OnEvidenceOfPlayoutAtReceiver();
@@ -798,5 +809,12 @@ bool CourierRenderer::IsWaitingForDataFromDemuxers() const {
!audio_demuxer_stream_adapter_->is_data_pending()));
}
+void CourierRenderer::DeregisterFromRpcMessaging() {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+ if (rpc_messenger_) {
+ rpc_messenger_->UnregisterMessageReceiverCallback(rpc_handle_);
+ }
+}
+
} // namespace remoting
} // namespace media
diff --git a/chromium/media/remoting/courier_renderer.h b/chromium/media/remoting/courier_renderer.h
index bbbb693a766..21f290f9a49 100644
--- a/chromium/media/remoting/courier_renderer.h
+++ b/chromium/media/remoting/courier_renderer.h
@@ -21,11 +21,12 @@
#include "media/base/renderer.h"
#include "media/mojo/mojom/remoting.mojom.h"
#include "media/remoting/metrics.h"
-#include "media/remoting/rpc_broker.h"
#include "mojo/public/cpp/bindings/pending_remote.h"
#include "mojo/public/cpp/system/data_pipe.h"
#include "third_party/abseil-cpp/absl/types/optional.h"
#include "third_party/openscreen/src/cast/streaming/remoting.pb.h"
+#include "third_party/openscreen/src/cast/streaming/rpc_messenger.h"
+#include "third_party/openscreen/src/util/weak_ptr.h"
namespace media {
@@ -48,6 +49,10 @@ class CourierRenderer final : public Renderer {
CourierRenderer(scoped_refptr<base::SingleThreadTaskRunner> media_task_runner,
const base::WeakPtr<RendererController>& controller,
VideoRendererSink* video_renderer_sink);
+
+ CourierRenderer(const CourierRenderer&) = delete;
+ CourierRenderer& operator=(const CourierRenderer&) = delete;
+
~CourierRenderer() final;
private:
@@ -57,7 +62,7 @@ class CourierRenderer final : public Renderer {
static void OnDataPipeCreatedOnMainThread(
scoped_refptr<base::SingleThreadTaskRunner> media_task_runner,
base::WeakPtr<CourierRenderer> self,
- base::WeakPtr<RpcBroker> rpc_broker,
+ openscreen::WeakPtr<openscreen::cast::RpcMessenger> rpc_messenger,
mojo::PendingRemote<mojom::RemotingDataStreamSender> audio,
mojo::PendingRemote<mojom::RemotingDataStreamSender> video,
mojo::ScopedDataPipeProducerHandle audio_handle,
@@ -155,6 +160,9 @@ class CourierRenderer final : public Renderer {
// though the playback might be delayed or paused.
bool IsWaitingForDataFromDemuxers() const;
+ // Helper to deregister the renderer from the RPC messenger.
+ void DeregisterFromRpcMessaging();
+
State state_;
const scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_;
const scoped_refptr<base::SingleThreadTaskRunner> media_task_runner_;
@@ -173,8 +181,13 @@ class CourierRenderer final : public Renderer {
// Component to establish mojo remoting service on browser process.
const base::WeakPtr<RendererController> controller_;
- // Broker class to process incoming and outgoing RPC message.
- const base::WeakPtr<RpcBroker> rpc_broker_;
+
+ // Broker class to process incoming and outgoing RPC messages.
+ // Only accessed on |main_task_runner_|. NOTE: the messenger is wrapped
+ // in an |openscreen::WeakPtr| instead of |base|'s implementation due to
+ // it being defined in the third_party/openscreen repository.
+ const openscreen::WeakPtr<openscreen::cast::RpcMessenger> rpc_messenger_;
+
// RPC handle value for CourierRenderer component.
const int rpc_handle_;
@@ -232,8 +245,6 @@ class CourierRenderer final : public Renderer {
bool receiver_is_blocked_on_local_demuxers_ = true;
base::WeakPtrFactory<CourierRenderer> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(CourierRenderer);
};
} // namespace remoting
diff --git a/chromium/media/remoting/courier_renderer_factory.h b/chromium/media/remoting/courier_renderer_factory.h
index 8673e52401e..5b822a9d572 100644
--- a/chromium/media/remoting/courier_renderer_factory.h
+++ b/chromium/media/remoting/courier_renderer_factory.h
@@ -16,6 +16,10 @@ class CourierRendererFactory : public RendererFactory {
public:
explicit CourierRendererFactory(
std::unique_ptr<RendererController> controller);
+
+ CourierRendererFactory(const CourierRendererFactory&) = delete;
+ CourierRendererFactory& operator=(const CourierRendererFactory&) = delete;
+
~CourierRendererFactory() override;
std::unique_ptr<Renderer> CreateRenderer(
@@ -33,8 +37,6 @@ class CourierRendererFactory : public RendererFactory {
private:
const std::unique_ptr<RendererController> controller_;
-
- DISALLOW_COPY_AND_ASSIGN(CourierRendererFactory);
};
} // namespace remoting
diff --git a/chromium/media/remoting/courier_renderer_unittest.cc b/chromium/media/remoting/courier_renderer_unittest.cc
index 739cbc65878..c27a4812b09 100644
--- a/chromium/media/remoting/courier_renderer_unittest.cc
+++ b/chromium/media/remoting/courier_renderer_unittest.cc
@@ -23,10 +23,11 @@
#include "media/remoting/proto_enum_utils.h"
#include "media/remoting/proto_utils.h"
#include "media/remoting/renderer_controller.h"
-#include "media/remoting/rpc_broker.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "third_party/openscreen/src/cast/streaming/rpc_messenger.h"
+using openscreen::cast::RpcMessenger;
using testing::_;
using testing::Invoke;
using testing::Return;
@@ -85,6 +86,10 @@ class RendererClientImpl final : public RendererClient {
.WillByDefault(
Invoke(this, &RendererClientImpl::DelegateOnVideoOpacityChange));
}
+
+ RendererClientImpl(const RendererClientImpl&) = delete;
+ RendererClientImpl& operator=(const RendererClientImpl&) = delete;
+
~RendererClientImpl() = default;
// RendererClient implementation.
@@ -144,8 +149,6 @@ class RendererClientImpl final : public RendererClient {
PipelineStatistics stats_;
VideoDecoderConfig video_decoder_config_;
AudioDecoderConfig audio_decoder_config_;
-
- DISALLOW_COPY_AND_ASSIGN(RendererClientImpl);
};
} // namespace
@@ -153,87 +156,90 @@ class RendererClientImpl final : public RendererClient {
class CourierRendererTest : public testing::Test {
public:
CourierRendererTest() = default;
+
+ CourierRendererTest(const CourierRendererTest&) = delete;
+ CourierRendererTest& operator=(const CourierRendererTest&) = delete;
+
~CourierRendererTest() override = default;
// Use this function to mimic receiver to handle RPC message for renderer
// initialization,
- void RpcMessageResponseBot(std::unique_ptr<std::vector<uint8_t>> message) {
- std::unique_ptr<openscreen::cast::RpcMessage> rpc(
- new openscreen::cast::RpcMessage());
- ASSERT_TRUE(rpc->ParseFromArray(message->data(), message->size()));
- switch (rpc->proc()) {
+ void RpcMessageResponseBot(std::vector<uint8_t> message) {
+ openscreen::cast::RpcMessage rpc;
+ ASSERT_TRUE(rpc.ParseFromArray(message.data(), message.size()));
+ switch (rpc.proc()) {
case openscreen::cast::RpcMessage::RPC_ACQUIRE_RENDERER: {
- DCHECK(rpc->has_integer_value());
- sender_renderer_handle_ = rpc->integer_value();
+ DCHECK(rpc.has_integer_value());
+ sender_renderer_handle_ = rpc.integer_value();
// Issues RPC_ACQUIRE_RENDERER_DONE RPC message.
auto acquire_done = std::make_unique<openscreen::cast::RpcMessage>();
acquire_done->set_handle(sender_renderer_handle_);
acquire_done->set_proc(
openscreen::cast::RpcMessage::RPC_ACQUIRE_RENDERER_DONE);
acquire_done->set_integer_value(receiver_renderer_handle_);
- controller_->GetRpcBroker()->ProcessMessageFromRemote(
+ controller_->GetRpcMessenger()->ProcessMessageFromRemote(
std::move(acquire_done));
} break;
case openscreen::cast::RpcMessage::RPC_ACQUIRE_DEMUXER: {
if (!is_backward_compatible_mode_) {
- int acquire_demuxer_handle = RpcBroker::kAcquireDemuxerHandle;
- EXPECT_EQ(rpc->handle(), acquire_demuxer_handle);
+ int acquire_demuxer_handle = RpcMessenger::kAcquireDemuxerHandle;
+ EXPECT_EQ(rpc.handle(), acquire_demuxer_handle);
sender_audio_demuxer_handle_ =
- rpc->acquire_demuxer_rpc().audio_demuxer_handle();
+ rpc.acquire_demuxer_rpc().audio_demuxer_handle();
sender_video_demuxer_handle_ =
- rpc->acquire_demuxer_rpc().video_demuxer_handle();
+ rpc.acquire_demuxer_rpc().video_demuxer_handle();
// Issues audio RPC_DS_INITIALIZE RPC message.
- if (sender_audio_demuxer_handle_ != RpcBroker::kInvalidHandle) {
+ if (sender_audio_demuxer_handle_ != RpcMessenger::kInvalidHandle) {
auto ds_init = std::make_unique<openscreen::cast::RpcMessage>();
ds_init->set_handle(sender_audio_demuxer_handle_);
ds_init->set_proc(openscreen::cast::RpcMessage::RPC_DS_INITIALIZE);
ds_init->set_integer_value(receiver_audio_demuxer_callback_handle_);
- controller_->GetRpcBroker()->ProcessMessageFromRemote(
+ controller_->GetRpcMessenger()->ProcessMessageFromRemote(
std::move(ds_init));
}
// Issues video RPC_DS_INITIALIZE RPC message.
- if (sender_video_demuxer_handle_ != RpcBroker::kInvalidHandle) {
+ if (sender_video_demuxer_handle_ != RpcMessenger::kInvalidHandle) {
auto ds_init = std::make_unique<openscreen::cast::RpcMessage>();
ds_init->set_handle(sender_video_demuxer_handle_);
ds_init->set_proc(openscreen::cast::RpcMessage::RPC_DS_INITIALIZE);
ds_init->set_integer_value(receiver_video_demuxer_callback_handle_);
- controller_->GetRpcBroker()->ProcessMessageFromRemote(
+ controller_->GetRpcMessenger()->ProcessMessageFromRemote(
std::move(ds_init));
}
}
} break;
case openscreen::cast::RpcMessage::RPC_R_INITIALIZE: {
sender_renderer_callback_handle_ =
- rpc->renderer_initialize_rpc().callback_handle();
- sender_client_handle_ = rpc->renderer_initialize_rpc().client_handle();
+ rpc.renderer_initialize_rpc().callback_handle();
+ sender_client_handle_ = rpc.renderer_initialize_rpc().client_handle();
if (is_backward_compatible_mode_) {
- EXPECT_EQ(rpc->handle(), receiver_renderer_handle_);
+ EXPECT_EQ(rpc.handle(), receiver_renderer_handle_);
sender_audio_demuxer_handle_ =
- rpc->renderer_initialize_rpc().audio_demuxer_handle();
+ rpc.renderer_initialize_rpc().audio_demuxer_handle();
sender_video_demuxer_handle_ =
- rpc->renderer_initialize_rpc().video_demuxer_handle();
+ rpc.renderer_initialize_rpc().video_demuxer_handle();
// Issues audio RPC_DS_INITIALIZE RPC message.
- if (sender_audio_demuxer_handle_ != RpcBroker::kInvalidHandle) {
+ if (sender_audio_demuxer_handle_ != RpcMessenger::kInvalidHandle) {
auto ds_init = std::make_unique<openscreen::cast::RpcMessage>();
ds_init->set_handle(sender_audio_demuxer_handle_);
ds_init->set_proc(openscreen::cast::RpcMessage::RPC_DS_INITIALIZE);
ds_init->set_integer_value(receiver_audio_demuxer_callback_handle_);
- controller_->GetRpcBroker()->ProcessMessageFromRemote(
+ controller_->GetRpcMessenger()->ProcessMessageFromRemote(
std::move(ds_init));
}
// Issues video RPC_DS_INITIALIZE RPC message.
- if (sender_video_demuxer_handle_ != RpcBroker::kInvalidHandle) {
+ if (sender_video_demuxer_handle_ != RpcMessenger::kInvalidHandle) {
auto ds_init = std::make_unique<openscreen::cast::RpcMessage>();
ds_init->set_handle(sender_video_demuxer_handle_);
ds_init->set_proc(openscreen::cast::RpcMessage::RPC_DS_INITIALIZE);
ds_init->set_integer_value(receiver_video_demuxer_callback_handle_);
- controller_->GetRpcBroker()->ProcessMessageFromRemote(
+ controller_->GetRpcMessenger()->ProcessMessageFromRemote(
std::move(ds_init));
}
} else {
@@ -244,21 +250,21 @@ class CourierRendererTest : public testing::Test {
init_cb->set_proc(
openscreen::cast::RpcMessage::RPC_R_INITIALIZE_CALLBACK);
init_cb->set_boolean_value(is_successfully_initialized_);
- controller_->GetRpcBroker()->ProcessMessageFromRemote(
+ controller_->GetRpcMessenger()->ProcessMessageFromRemote(
std::move(init_cb));
}
} break;
case openscreen::cast::RpcMessage::RPC_DS_INITIALIZE_CALLBACK: {
- if (rpc->handle() == receiver_audio_demuxer_callback_handle_)
+ if (rpc.handle() == receiver_audio_demuxer_callback_handle_)
received_audio_ds_init_cb_ = true;
- if (rpc->handle() == receiver_video_demuxer_callback_handle_)
+ if (rpc.handle() == receiver_video_demuxer_callback_handle_)
received_video_ds_init_cb_ = true;
// Check whether the demuxer at the receiver end is initialized.
- if (received_audio_ds_init_cb_ ==
- (sender_audio_demuxer_handle_ != RpcBroker::kInvalidHandle) &&
- received_video_ds_init_cb_ ==
- (sender_video_demuxer_handle_ != RpcBroker::kInvalidHandle)) {
+ if (received_audio_ds_init_cb_ == (sender_audio_demuxer_handle_ !=
+ RpcMessenger::kInvalidHandle) &&
+ received_video_ds_init_cb_ == (sender_video_demuxer_handle_ !=
+ RpcMessenger::kInvalidHandle)) {
is_receiver_demuxer_initialized_ = true;
}
@@ -270,7 +276,7 @@ class CourierRendererTest : public testing::Test {
init_cb->set_proc(
openscreen::cast::RpcMessage::RPC_R_INITIALIZE_CALLBACK);
init_cb->set_boolean_value(is_successfully_initialized_);
- controller_->GetRpcBroker()->ProcessMessageFromRemote(
+ controller_->GetRpcMessenger()->ProcessMessageFromRemote(
std::move(init_cb));
}
} break;
@@ -278,10 +284,10 @@ class CourierRendererTest : public testing::Test {
// Issues RPC_R_FLUSHUNTIL_CALLBACK RPC message.
std::unique_ptr<openscreen::cast::RpcMessage> flush_cb(
new openscreen::cast::RpcMessage());
- flush_cb->set_handle(rpc->renderer_flushuntil_rpc().callback_handle());
+ flush_cb->set_handle(rpc.renderer_flushuntil_rpc().callback_handle());
flush_cb->set_proc(
openscreen::cast::RpcMessage::RPC_R_FLUSHUNTIL_CALLBACK);
- controller_->GetRpcBroker()->ProcessMessageFromRemote(
+ controller_->GetRpcMessenger()->ProcessMessageFromRemote(
std::move(flush_cb));
} break;
case openscreen::cast::RpcMessage::RPC_R_SETVOLUME:
@@ -294,14 +300,20 @@ class CourierRendererTest : public testing::Test {
RunPendingTasks();
}
- // Callback from RpcBroker when sending message to remote sink.
- void OnSendMessageToSink(std::unique_ptr<std::vector<uint8_t>> message) {
- std::unique_ptr<openscreen::cast::RpcMessage> rpc(
- new openscreen::cast::RpcMessage());
- ASSERT_TRUE(rpc->ParseFromArray(message->data(), message->size()));
+ // Callback from RpcMessenger when sending message to remote sink.
+ void OnSendMessageToSink(std::vector<uint8_t> message) {
+ openscreen::cast::RpcMessage rpc;
+ ASSERT_TRUE(rpc.ParseFromArray(message.data(), message.size()));
received_rpc_.push_back(std::move(rpc));
}
+ void RewireSendMessageCallbackToSink() {
+ controller_->GetRpcMessenger()->set_send_message_cb_for_testing(
+ [this](std::vector<uint8_t> message) {
+ this->OnSendMessageToSink(message);
+ });
+ }
+
protected:
void InitializeRenderer() {
// Register media::RendererClient implementation.
@@ -310,19 +322,17 @@ class CourierRendererTest : public testing::Test {
EXPECT_CALL(*render_client_, OnPipelineStatus(_)).Times(1);
DCHECK(renderer_);
// Redirect RPC message for simulate receiver scenario
- controller_->GetRpcBroker()->SetMessageCallbackForTesting(
- base::BindRepeating(&CourierRendererTest::RpcMessageResponseBot,
- base::Unretained(this)));
+ controller_->GetRpcMessenger()->set_send_message_cb_for_testing(
+ [this](std::vector<uint8_t> message) {
+ this->RpcMessageResponseBot(message);
+ });
RunPendingTasks();
renderer_->Initialize(
media_resource_.get(), render_client_.get(),
base::BindOnce(&RendererClientImpl::OnPipelineStatus,
base::Unretained(render_client_.get())));
RunPendingTasks();
- // Redirect RPC message back to save for later check.
- controller_->GetRpcBroker()->SetMessageCallbackForTesting(
- base::BindRepeating(&CourierRendererTest::OnSendMessageToSink,
- base::Unretained(this)));
+ RewireSendMessageCallbackToSink();
RunPendingTasks();
}
@@ -350,16 +360,12 @@ class CourierRendererTest : public testing::Test {
controller_ = FakeRemoterFactory::CreateController(false);
controller_->OnMetadataChanged(DefaultMetadata());
- // Redirect RPC message to CourierRendererTest::OnSendMessageToSink().
- controller_->GetRpcBroker()->SetMessageCallbackForTesting(
- base::BindRepeating(&CourierRendererTest::OnSendMessageToSink,
- base::Unretained(this)));
-
+ RewireSendMessageCallbackToSink();
renderer_ =
std::make_unique<CourierRenderer>(base::ThreadTaskRunnerHandle::Get(),
controller_->GetWeakPtr(), nullptr);
renderer_->clock_ = &clock_;
- clock_.Advance(base::TimeDelta::FromSeconds(1));
+ clock_.Advance(base::Seconds(1));
RunPendingTasks();
}
@@ -371,9 +377,10 @@ class CourierRendererTest : public testing::Test {
// Gets first available RpcMessage with specific |proc|.
const openscreen::cast::RpcMessage* PeekRpcMessage(int proc) const {
for (auto& s : received_rpc_) {
- if (proc == s->proc())
- return s.get();
+ if (proc == s.proc())
+ return &s;
}
+
return nullptr;
}
int ReceivedRpcMessageCount() const { return received_rpc_.size(); }
@@ -405,9 +412,8 @@ class CourierRendererTest : public testing::Test {
int end_serial_number) {
for (int i = start_serial_number; i < end_serial_number; ++i) {
ASSERT_FALSE(DidEncounterFatalError());
- IssueTimeUpdateRpc(base::TimeDelta::FromMilliseconds(100 + i * 800),
- base::TimeDelta::FromSeconds(100));
- clock_.Advance(base::TimeDelta::FromSeconds(1));
+ IssueTimeUpdateRpc(base::Milliseconds(100 + i * 800), base::Seconds(100));
+ clock_.Advance(base::Seconds(1));
RunPendingTasks();
}
}
@@ -469,10 +475,10 @@ class CourierRendererTest : public testing::Test {
const int receiver_audio_demuxer_callback_handle_{11};
const int receiver_video_demuxer_callback_handle_{12};
int sender_renderer_handle_;
- int sender_client_handle_{RpcBroker::kInvalidHandle};
- int sender_renderer_callback_handle_{RpcBroker::kInvalidHandle};
- int sender_audio_demuxer_handle_{RpcBroker::kInvalidHandle};
- int sender_video_demuxer_handle_{RpcBroker::kInvalidHandle};
+ int sender_client_handle_{RpcMessenger::kInvalidHandle};
+ int sender_renderer_callback_handle_{RpcMessenger::kInvalidHandle};
+ int sender_audio_demuxer_handle_{RpcMessenger::kInvalidHandle};
+ int sender_video_demuxer_handle_{RpcMessenger::kInvalidHandle};
// Indicates whether the test runs in backward-compatible mode.
bool is_backward_compatible_mode_ = false;
@@ -488,11 +494,8 @@ class CourierRendererTest : public testing::Test {
// the renderer on the receiver side.
bool is_successfully_initialized_ = true;
- // Stores RPC messages that are sending to remote sink.
- std::vector<std::unique_ptr<openscreen::cast::RpcMessage>> received_rpc_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(CourierRendererTest);
+ // Stores RPC messages that have been sent to the remote sink.
+ std::vector<openscreen::cast::RpcMessage> received_rpc_;
};
TEST_F(CourierRendererTest, Initialize) {
@@ -529,7 +532,7 @@ TEST_F(CourierRendererTest, InitializeFailed) {
RunPendingTasks();
ASSERT_EQ(0, ReceivedRpcMessageCount());
- base::TimeDelta seek = base::TimeDelta::FromMicroseconds(100);
+ base::TimeDelta seek = base::Microseconds(100);
renderer_->StartPlayingFrom(seek);
RunPendingTasks();
ASSERT_EQ(0, ReceivedRpcMessageCount());
@@ -552,8 +555,10 @@ TEST_F(CourierRendererTest, Flush) {
// Flush Renderer.
// Redirect RPC message for simulate receiver scenario
- controller_->GetRpcBroker()->SetMessageCallbackForTesting(base::BindRepeating(
- &CourierRendererTest::RpcMessageResponseBot, base::Unretained(this)));
+ controller_->GetRpcMessenger()->set_send_message_cb_for_testing(
+ [this](std::vector<uint8_t> message) {
+ this->RpcMessageResponseBot(message);
+ });
RunPendingTasks();
EXPECT_CALL(*render_client_, OnFlushCallback()).Times(1);
renderer_->Flush(base::BindOnce(&RendererClientImpl::OnFlushCallback,
@@ -569,7 +574,7 @@ TEST_F(CourierRendererTest, StartPlayingFrom) {
ASSERT_EQ(render_client_->status(), PIPELINE_OK);
// StartPlaying from
- base::TimeDelta seek = base::TimeDelta::FromMicroseconds(100);
+ base::TimeDelta seek = base::Microseconds(100);
renderer_->StartPlayingFrom(seek);
RunPendingTasks();
@@ -578,7 +583,7 @@ TEST_F(CourierRendererTest, StartPlayingFrom) {
const openscreen::cast::RpcMessage* rpc =
PeekRpcMessage(openscreen::cast::RpcMessage::RPC_R_STARTPLAYINGFROM);
ASSERT_TRUE(rpc);
- ASSERT_EQ(rpc->integer64_value(), 100);
+ ASSERT_EQ(100, rpc->integer64_value());
}
TEST_F(CourierRendererTest, SetVolume) {
@@ -597,7 +602,7 @@ TEST_F(CourierRendererTest, SetVolume) {
const openscreen::cast::RpcMessage* rpc =
PeekRpcMessage(openscreen::cast::RpcMessage::RPC_R_SETVOLUME);
ASSERT_TRUE(rpc);
- ASSERT_TRUE(rpc->double_value() == 3.0);
+ ASSERT_DOUBLE_EQ(3.0, rpc->double_value());
}
TEST_F(CourierRendererTest, SetPlaybackRate) {
@@ -615,18 +620,18 @@ TEST_F(CourierRendererTest, SetPlaybackRate) {
const openscreen::cast::RpcMessage* rpc =
PeekRpcMessage(openscreen::cast::RpcMessage::RPC_R_SETPLAYBACKRATE);
ASSERT_TRUE(rpc);
- ASSERT_TRUE(rpc->double_value() == 2.5);
+ ASSERT_DOUBLE_EQ(2.5, rpc->double_value());
}
TEST_F(CourierRendererTest, OnTimeUpdate) {
- base::TimeDelta media_time = base::TimeDelta::FromMicroseconds(100);
- base::TimeDelta max_media_time = base::TimeDelta::FromMicroseconds(500);
+ base::TimeDelta media_time = base::Microseconds(100);
+ base::TimeDelta max_media_time = base::Microseconds(500);
IssueTimeUpdateRpc(media_time, max_media_time);
ValidateCurrentTime(media_time, max_media_time);
// Issues RPC_RC_ONTIMEUPDATE RPC message with invalid time
- base::TimeDelta media_time2 = base::TimeDelta::FromMicroseconds(-100);
- base::TimeDelta max_media_time2 = base::TimeDelta::FromMicroseconds(500);
+ base::TimeDelta media_time2 = base::Microseconds(-100);
+ base::TimeDelta max_media_time2 = base::Microseconds(500);
IssueTimeUpdateRpc(media_time2, max_media_time2);
// Because of invalid value, the time will not be updated and remain the same.
ValidateCurrentTime(media_time, max_media_time);
@@ -642,7 +647,7 @@ TEST_F(CourierRendererTest, OnBufferingStateChange) {
TEST_F(CourierRendererTest, OnAudioConfigChange) {
const AudioDecoderConfig kNewAudioConfig(
- kCodecVorbis, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO, 44100,
+ AudioCodec::kVorbis, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO, 44100,
EmptyExtraData(), EncryptionScheme::kUnencrypted);
InitializeRenderer();
// Make sure initial audio config does not match the one we intend to send.
@@ -753,9 +758,7 @@ TEST_F(CourierRendererTest, OnStatisticsUpdate) {
TEST_F(CourierRendererTest, OnPacingTooSlowly) {
InitializeRenderer();
-
- controller_->GetRpcBroker()->SetMessageCallbackForTesting(base::BindRepeating(
- &CourierRendererTest::OnSendMessageToSink, base::Unretained(this)));
+ RewireSendMessageCallbackToSink();
// There should be no error reported with this playback rate.
renderer_->SetPlaybackRate(0.8);
@@ -763,7 +766,7 @@ TEST_F(CourierRendererTest, OnPacingTooSlowly) {
EXPECT_CALL(*render_client_, OnBufferingStateChange(BUFFERING_HAVE_ENOUGH, _))
.Times(1);
IssuesBufferingStateRpc(BufferingState::BUFFERING_HAVE_ENOUGH);
- clock_.Advance(base::TimeDelta::FromSeconds(3));
+ clock_.Advance(base::Seconds(3));
VerifyAndReportTimeUpdates(0, 15);
ASSERT_FALSE(DidEncounterFatalError());
@@ -771,7 +774,7 @@ TEST_F(CourierRendererTest, OnPacingTooSlowly) {
// playback was continuously delayed for 10 times.
renderer_->SetPlaybackRate(1);
RunPendingTasks();
- clock_.Advance(base::TimeDelta::FromSeconds(3));
+ clock_.Advance(base::Seconds(3));
VerifyAndReportTimeUpdates(15, 30);
ASSERT_TRUE(DidEncounterFatalError());
}
@@ -782,7 +785,7 @@ TEST_F(CourierRendererTest, OnFrameDropRateHigh) {
for (int i = 0; i < 7; ++i) {
ASSERT_FALSE(DidEncounterFatalError()); // Not enough measurements.
IssueStatisticsUpdateRpc();
- clock_.Advance(base::TimeDelta::FromSeconds(1));
+ clock_.Advance(base::Seconds(1));
RunPendingTasks();
}
ASSERT_TRUE(DidEncounterFatalError());
diff --git a/chromium/media/remoting/demuxer_stream_adapter.cc b/chromium/media/remoting/demuxer_stream_adapter.cc
index 9039908e07c..c404fa20f6a 100644
--- a/chromium/media/remoting/demuxer_stream_adapter.cc
+++ b/chromium/media/remoting/demuxer_stream_adapter.cc
@@ -19,6 +19,8 @@
// Convenience logging macro used throughout this file.
#define DEMUXER_VLOG(level) VLOG(level) << __func__ << "[" << name_ << "]: "
+using openscreen::cast::RpcMessenger;
+
namespace media {
namespace remoting {
@@ -27,7 +29,7 @@ DemuxerStreamAdapter::DemuxerStreamAdapter(
scoped_refptr<base::SingleThreadTaskRunner> media_task_runner,
const std::string& name,
DemuxerStream* demuxer_stream,
- const base::WeakPtr<RpcBroker>& rpc_broker,
+ const openscreen::WeakPtr<RpcMessenger>& rpc_messenger,
int rpc_handle,
mojo::PendingRemote<mojom::RemotingDataStreamSender> stream_sender_remote,
mojo::ScopedDataPipeProducerHandle producer_handle,
@@ -35,13 +37,13 @@ DemuxerStreamAdapter::DemuxerStreamAdapter(
: main_task_runner_(std::move(main_task_runner)),
media_task_runner_(std::move(media_task_runner)),
name_(name),
- rpc_broker_(rpc_broker),
+ rpc_messenger_(rpc_messenger),
rpc_handle_(rpc_handle),
demuxer_stream_(demuxer_stream),
type_(demuxer_stream ? demuxer_stream->type() : DemuxerStream::UNKNOWN),
error_callback_(std::move(error_callback)),
- remote_callback_handle_(RpcBroker::kInvalidHandle),
- read_until_callback_handle_(RpcBroker::kInvalidHandle),
+ remote_callback_handle_(RpcMessenger::kInvalidHandle),
+ read_until_callback_handle_(RpcMessenger::kInvalidHandle),
read_until_count_(0),
last_count_(0),
pending_flush_(false),
@@ -54,12 +56,17 @@ DemuxerStreamAdapter::DemuxerStreamAdapter(
DCHECK(media_task_runner_->BelongsToCurrentThread());
DCHECK(demuxer_stream);
DCHECK(!error_callback_.is_null());
- const RpcBroker::ReceiveMessageCallback receive_callback =
- BindToCurrentLoop(base::BindRepeating(
- &DemuxerStreamAdapter::OnReceivedRpc, weak_factory_.GetWeakPtr()));
+ auto receive_callback = BindToCurrentLoop(base::BindRepeating(
+ &DemuxerStreamAdapter::OnReceivedRpc, weak_factory_.GetWeakPtr()));
main_task_runner_->PostTask(
- FROM_HERE, base::BindOnce(&RpcBroker::RegisterMessageReceiverCallback,
- rpc_broker_, rpc_handle_, receive_callback));
+ FROM_HERE,
+ base::BindOnce(
+ &RpcMessenger::RegisterMessageReceiverCallback, rpc_messenger_,
+ rpc_handle_,
+ [cb = receive_callback](
+ std::unique_ptr<openscreen::cast::RpcMessage> message) {
+ cb.Run(std::move(message));
+ }));
stream_sender_.Bind(std::move(stream_sender_remote));
stream_sender_.set_disconnect_handler(
@@ -70,8 +77,9 @@ DemuxerStreamAdapter::DemuxerStreamAdapter(
DemuxerStreamAdapter::~DemuxerStreamAdapter() {
DCHECK(media_task_runner_->BelongsToCurrentThread());
main_task_runner_->PostTask(
- FROM_HERE, base::BindOnce(&RpcBroker::UnregisterMessageReceiverCallback,
- rpc_broker_, rpc_handle_));
+ FROM_HERE,
+ base::BindOnce(&DemuxerStreamAdapter::DeregisterFromRpcMessaging,
+ weak_factory_.GetWeakPtr()));
}
int64_t DemuxerStreamAdapter::GetBytesWrittenAndReset() {
@@ -100,7 +108,7 @@ absl::optional<uint32_t> DemuxerStreamAdapter::SignalFlush(bool flushing) {
stream_sender_->CancelInFlightData();
} else {
// Sets callback handle invalid to abort ongoing read request.
- read_until_callback_handle_ = RpcBroker::kInvalidHandle;
+ read_until_callback_handle_ = RpcMessenger::kInvalidHandle;
}
return last_count_;
}
@@ -136,7 +144,7 @@ void DemuxerStreamAdapter::Initialize(int remote_callback_handle) {
<< remote_callback_handle;
// Checks if initialization had been called or not.
- if (remote_callback_handle_ != RpcBroker::kInvalidHandle) {
+ if (remote_callback_handle_ != RpcMessenger::kInvalidHandle) {
DEMUXER_VLOG(1) << "Duplicated initialization. Have: "
<< remote_callback_handle_
<< ", Given: " << remote_callback_handle;
@@ -183,8 +191,8 @@ void DemuxerStreamAdapter::Initialize(int remote_callback_handle) {
: video_config_.AsHumanReadableString())
<< '}';
main_task_runner_->PostTask(
- FROM_HERE, base::BindOnce(&RpcBroker::SendMessageToRemote, rpc_broker_,
- std::move(rpc)));
+ FROM_HERE,
+ base::BindOnce(&RpcMessenger::SendMessageToRemote, rpc_messenger_, *rpc));
}
void DemuxerStreamAdapter::ReadUntil(
@@ -370,10 +378,10 @@ void DemuxerStreamAdapter::SendReadAck() {
: "DID NOT CHANGE")
<< '}';
main_task_runner_->PostTask(
- FROM_HERE, base::BindOnce(&RpcBroker::SendMessageToRemote, rpc_broker_,
- std::move(rpc)));
+ FROM_HERE,
+ base::BindOnce(&RpcMessenger::SendMessageToRemote, rpc_messenger_, *rpc));
// Resets callback handle after completing the reading request.
- read_until_callback_handle_ = RpcBroker::kInvalidHandle;
+ read_until_callback_handle_ = RpcMessenger::kInvalidHandle;
// Resets audio/video decoder config since it only sends once.
if (audio_config_.IsValidConfig())
@@ -401,5 +409,12 @@ void DemuxerStreamAdapter::OnFatalError(StopTrigger stop_trigger) {
std::move(error_callback_).Run(stop_trigger);
}
+void DemuxerStreamAdapter::DeregisterFromRpcMessaging() {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+ if (rpc_messenger_) {
+ rpc_messenger_->UnregisterMessageReceiverCallback(rpc_handle_);
+ }
+}
+
} // namespace remoting
} // namespace media
diff --git a/chromium/media/remoting/demuxer_stream_adapter.h b/chromium/media/remoting/demuxer_stream_adapter.h
index a30444c8c48..30cd07daf95 100644
--- a/chromium/media/remoting/demuxer_stream_adapter.h
+++ b/chromium/media/remoting/demuxer_stream_adapter.h
@@ -18,13 +18,14 @@
#include "media/base/video_decoder_config.h"
#include "media/mojo/common/mojo_data_pipe_read_write.h"
#include "media/mojo/mojom/remoting.mojom.h"
-#include "media/remoting/rpc_broker.h"
#include "media/remoting/triggers.h"
#include "mojo/public/cpp/bindings/pending_remote.h"
#include "mojo/public/cpp/bindings/remote.h"
#include "mojo/public/cpp/system/data_pipe.h"
#include "mojo/public/cpp/system/simple_watcher.h"
#include "third_party/abseil-cpp/absl/types/optional.h"
+#include "third_party/openscreen/src/cast/streaming/rpc_messenger.h"
+#include "third_party/openscreen/src/util/weak_ptr.h"
namespace base {
class SingleThreadTaskRunner;
@@ -51,7 +52,8 @@ class DemuxerStreamAdapter {
// |media_task_runner|: Task runner to run whole class on media thread.
// |name|: Demuxer stream name. For troubleshooting purposes.
// |demuxer_stream|: Demuxer component.
- // |rpc_broker|: Broker class to handle incoming and outgoing RPC message. It
+ // |rpc_messenger|: Broker class to handle incoming and outgoing RPC message.
+ // It
// is used only on the main thread.
// |rpc_handle|: Unique value that references this DemuxerStreamAdapter.
// |stream_sender_remote|: Transfer of pipe binding on the media thread. It is
@@ -64,15 +66,19 @@ class DemuxerStreamAdapter {
scoped_refptr<base::SingleThreadTaskRunner> media_task_runner,
const std::string& name,
DemuxerStream* demuxer_stream,
- const base::WeakPtr<RpcBroker>& rpc_broker,
+ const openscreen::WeakPtr<openscreen::cast::RpcMessenger>& rpc_messenger,
int rpc_handle,
mojo::PendingRemote<mojom::RemotingDataStreamSender> stream_sender_remote,
mojo::ScopedDataPipeProducerHandle producer_handle,
ErrorCallback error_callback);
+
+ DemuxerStreamAdapter(const DemuxerStreamAdapter&) = delete;
+ DemuxerStreamAdapter& operator=(const DemuxerStreamAdapter&) = delete;
+
~DemuxerStreamAdapter();
// Rpc handle for this class. This is used for sending/receiving RPC message
- // with specific hanle using Rpcbroker.
+ // with specific handle using RpcMessenger.
int rpc_handle() const { return rpc_handle_; }
// Returns the number of bytes that have been written to the data pipe since
@@ -92,7 +98,8 @@ class DemuxerStreamAdapter {
// received, and will be reset to invalid value after
// RPC_DS_READUNTIL_CALLBACK is sent back to receiver. Therefore it can be
// used to determine if the class is in the reading state or not.
- return read_until_callback_handle_ != RpcBroker::kInvalidHandle;
+ return read_until_callback_handle_ !=
+ openscreen::cast::RpcMessenger::kInvalidHandle;
}
// Indicates whether there is data waiting to be written to the mojo data
@@ -102,7 +109,7 @@ class DemuxerStreamAdapter {
private:
friend class MockDemuxerStreamAdapter;
- // Receives RPC message from RpcBroker.
+ // Receives RPC message from RpcMessenger.
void OnReceivedRpc(std::unique_ptr<openscreen::cast::RpcMessage> message);
// RPC message tasks.
@@ -124,15 +131,20 @@ class DemuxerStreamAdapter {
// Callback function when a fatal runtime error occurs.
void OnFatalError(StopTrigger stop_trigger);
+ // Helper to deregister the renderer from the RPC messenger.
+ void DeregisterFromRpcMessaging();
+
const scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_;
const scoped_refptr<base::SingleThreadTaskRunner> media_task_runner_;
// Name of demuxer stream. Debug only.
const std::string name_;
- // Weak pointer of RpcBroker. It should use |main_task_runner_| to access the
- // interfaces.
- const base::WeakPtr<RpcBroker> rpc_broker_;
+ // Broker class to process incoming and outgoing RPC messages.
+ // Only accessed on |main_task_runner_|. NOTE: the messenger is wrapped
+ // in an |openscreen::WeakPtr| instead of |base|'s implementation due to
+ // it being defined in the third_party/openscreen repository.
+ const openscreen::WeakPtr<openscreen::cast::RpcMessenger> rpc_messenger_;
// RPC handle for this demuxer stream service.
const int rpc_handle_;
@@ -193,8 +205,6 @@ class DemuxerStreamAdapter {
base::WeakPtrFactory<DemuxerStreamAdapter> request_buffer_weak_factory_{this};
// WeakPtrFactory for normal usage.
base::WeakPtrFactory<DemuxerStreamAdapter> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(DemuxerStreamAdapter);
};
} // namespace remoting
diff --git a/chromium/media/remoting/demuxer_stream_adapter_unittest.cc b/chromium/media/remoting/demuxer_stream_adapter_unittest.cc
index 5b6053cbacf..5a39cb26a2d 100644
--- a/chromium/media/remoting/demuxer_stream_adapter_unittest.cc
+++ b/chromium/media/remoting/demuxer_stream_adapter_unittest.cc
@@ -20,6 +20,7 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
+using openscreen::cast::RpcMessenger;
using testing::_;
using testing::Invoke;
using testing::Return;
@@ -36,13 +37,15 @@ class MockDemuxerStreamAdapter {
DemuxerStream* demuxer_stream,
mojo::PendingRemote<mojom::RemotingDataStreamSender> stream_sender_remote,
mojo::ScopedDataPipeProducerHandle producer_handle) {
- rpc_broker_ = std::make_unique<RpcBroker>(
- base::BindRepeating(&MockDemuxerStreamAdapter::OnSendMessageToSink,
- weak_factory_.GetWeakPtr()));
+ rpc_messenger_ = std::make_unique<RpcMessenger>(
+ [cb =
+ base::BindRepeating(&MockDemuxerStreamAdapter::OnSendMessageToSink,
+ weak_factory_.GetWeakPtr())](
+ std::vector<uint8_t> message) { cb.Run(std::move(message)); });
demuxer_stream_adapter_ = std::make_unique<DemuxerStreamAdapter>(
std::move(main_task_runner), std::move(media_task_runner), name,
- demuxer_stream, rpc_broker_->GetWeakPtr(),
- rpc_broker_->GetUniqueHandle(), std::move(stream_sender_remote),
+ demuxer_stream, rpc_messenger_->GetWeakPtr(),
+ rpc_messenger_->GetUniqueHandle(), std::move(stream_sender_remote),
std::move(producer_handle),
base::BindOnce(&MockDemuxerStreamAdapter::OnError,
weak_factory_.GetWeakPtr()));
@@ -51,6 +54,9 @@ class MockDemuxerStreamAdapter {
demuxer_stream_adapter_->Initialize(3);
}
+ MockDemuxerStreamAdapter(const MockDemuxerStreamAdapter&) = delete;
+ MockDemuxerStreamAdapter& operator=(const MockDemuxerStreamAdapter&) = delete;
+
~MockDemuxerStreamAdapter() {
// Make sure unit tests that did not expect errors did not cause any errors.
EXPECT_TRUE(errors_.empty());
@@ -93,27 +99,29 @@ class MockDemuxerStreamAdapter {
}
private:
- void OnSendMessageToSink(std::unique_ptr<std::vector<uint8_t>> message) {
+ void OnSendMessageToSink(std::vector<uint8_t> message) {
last_received_rpc_ = std::make_unique<openscreen::cast::RpcMessage>();
- CHECK(last_received_rpc_->ParseFromArray(message->data(), message->size()));
+ CHECK(last_received_rpc_->ParseFromArray(message.data(), message.size()));
}
void OnError(StopTrigger stop_trigger) { errors_.push_back(stop_trigger); }
- std::unique_ptr<RpcBroker> rpc_broker_;
+ std::unique_ptr<RpcMessenger> rpc_messenger_;
std::unique_ptr<DemuxerStreamAdapter> demuxer_stream_adapter_;
std::unique_ptr<openscreen::cast::RpcMessage> last_received_rpc_;
std::vector<StopTrigger> errors_;
base::WeakPtrFactory<MockDemuxerStreamAdapter> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(MockDemuxerStreamAdapter);
};
class DemuxerStreamAdapterTest : public ::testing::Test {
public:
DemuxerStreamAdapterTest() = default;
+
+ DemuxerStreamAdapterTest(const DemuxerStreamAdapterTest&) = delete;
+ DemuxerStreamAdapterTest& operator=(const DemuxerStreamAdapterTest&) = delete;
+
~DemuxerStreamAdapterTest() override = default;
void SetUpDataPipe() {
@@ -154,9 +162,6 @@ class DemuxerStreamAdapterTest : public ::testing::Test {
std::unique_ptr<FakeDemuxerStream> demuxer_stream_;
std::unique_ptr<FakeRemotingDataStreamSender> data_stream_sender_;
std::unique_ptr<MockDemuxerStreamAdapter> demuxer_stream_adapter_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(DemuxerStreamAdapterTest);
};
TEST_F(DemuxerStreamAdapterTest, SingleReadUntil) {
diff --git a/chromium/media/remoting/end2end_test_renderer.cc b/chromium/media/remoting/end2end_test_renderer.cc
index b431f565fdd..f0c983e75ae 100644
--- a/chromium/media/remoting/end2end_test_renderer.cc
+++ b/chromium/media/remoting/end2end_test_renderer.cc
@@ -30,6 +30,8 @@
#include "mojo/public/cpp/bindings/self_owned_receiver.h"
#include "mojo/public/cpp/system/data_pipe.h"
+using openscreen::cast::RpcMessenger;
+
namespace media {
namespace remoting {
@@ -51,6 +53,9 @@ class TestStreamSender final : public mojom::RemotingDataStreamSender {
type_(type),
send_frame_to_sink_cb_(std::move(callback)) {}
+ TestStreamSender(const TestStreamSender&) = delete;
+ TestStreamSender& operator=(const TestStreamSender&) = delete;
+
~TestStreamSender() override = default;
// mojom::RemotingDataStreamSender implementation.
@@ -78,8 +83,6 @@ class TestStreamSender final : public mojom::RemotingDataStreamSender {
const DemuxerStream::Type type_;
const SendFrameToSinkCallback send_frame_to_sink_cb_;
std::vector<uint8_t> next_frame_data_;
-
- DISALLOW_COPY_AND_ASSIGN(TestStreamSender);
};
class TestRemoter final : public mojom::Remoter {
@@ -93,6 +96,9 @@ class TestRemoter final : public mojom::Remoter {
send_message_to_sink_cb_(std::move(send_message_to_sink_cb)),
send_frame_to_sink_cb_(std::move(send_frame_to_sink_cb)) {}
+ TestRemoter(const TestRemoter&) = delete;
+ TestRemoter& operator=(const TestRemoter&) = delete;
+
~TestRemoter() override = default;
// mojom::Remoter implementation.
@@ -142,8 +148,6 @@ class TestRemoter final : public mojom::Remoter {
const TestStreamSender::SendFrameToSinkCallback send_frame_to_sink_cb_;
std::unique_ptr<TestStreamSender> audio_stream_sender_;
std::unique_ptr<TestStreamSender> video_stream_sender_;
-
- DISALLOW_COPY_AND_ASSIGN(TestRemoter);
};
std::unique_ptr<RendererController> CreateController(
@@ -266,13 +270,17 @@ End2EndTestRenderer::End2EndTestRenderer(std::unique_ptr<Renderer> renderer)
receiver_controller_ = ReceiverController::GetInstance();
ResetForTesting(receiver_controller_);
- receiver_rpc_broker_ = receiver_controller_->rpc_broker();
- receiver_renderer_handle_ = receiver_rpc_broker_->GetUniqueHandle();
+ receiver_rpc_messenger_ = receiver_controller_->rpc_messenger();
+ receiver_renderer_handle_ = receiver_rpc_messenger_->GetUniqueHandle();
- receiver_rpc_broker_->RegisterMessageReceiverCallback(
- RpcBroker::kAcquireRendererHandle,
- base::BindRepeating(&End2EndTestRenderer::OnReceivedRpc,
- weak_factory_.GetWeakPtr()));
+ receiver_rpc_messenger_->RegisterMessageReceiverCallback(
+ RpcMessenger::kAcquireRendererHandle,
+ [ptr = weak_factory_.GetWeakPtr()](
+ std::unique_ptr<openscreen::cast::RpcMessage> message) {
+ if (ptr) {
+ ptr->OnReceivedRpc(std::move(message));
+ }
+ });
receiver_ = std::make_unique<Receiver>(
receiver_renderer_handle_, sender_renderer_handle_, receiver_controller_,
@@ -288,8 +296,8 @@ End2EndTestRenderer::End2EndTestRenderer(std::unique_ptr<Renderer> renderer)
}
End2EndTestRenderer::~End2EndTestRenderer() {
- receiver_rpc_broker_->UnregisterMessageReceiverCallback(
- RpcBroker::kAcquireRendererHandle);
+ receiver_rpc_messenger_->UnregisterMessageReceiverCallback(
+ RpcMessenger::kAcquireRendererHandle);
}
void End2EndTestRenderer::Initialize(MediaResource* media_resource,
@@ -345,20 +353,20 @@ void End2EndTestRenderer::OnReceivedRpc(
void End2EndTestRenderer::OnAcquireRenderer(
std::unique_ptr<openscreen::cast::RpcMessage> message) {
DCHECK(message->has_integer_value());
- DCHECK(message->integer_value() != RpcBroker::kInvalidHandle);
+ DCHECK(message->integer_value() != RpcMessenger::kInvalidHandle);
- if (sender_renderer_handle_ == RpcBroker::kInvalidHandle) {
+ if (sender_renderer_handle_ == RpcMessenger::kInvalidHandle) {
sender_renderer_handle_ = message->integer_value();
receiver_->SetRemoteHandle(sender_renderer_handle_);
}
}
void End2EndTestRenderer::OnAcquireRendererDone(int receiver_renderer_handle) {
- auto rpc = std::make_unique<openscreen::cast::RpcMessage>();
- rpc->set_handle(sender_renderer_handle_);
- rpc->set_proc(openscreen::cast::RpcMessage::RPC_ACQUIRE_RENDERER_DONE);
- rpc->set_integer_value(receiver_renderer_handle);
- receiver_rpc_broker_->SendMessageToRemote(std::move(rpc));
+ openscreen::cast::RpcMessage rpc;
+ rpc.set_handle(sender_renderer_handle_);
+ rpc.set_proc(openscreen::cast::RpcMessage::RPC_ACQUIRE_RENDERER_DONE);
+ rpc.set_integer_value(receiver_renderer_handle);
+ receiver_rpc_messenger_->SendMessageToRemote(rpc);
}
void End2EndTestRenderer::SetLatencyHint(
diff --git a/chromium/media/remoting/end2end_test_renderer.h b/chromium/media/remoting/end2end_test_renderer.h
index deab7964849..3d201112b79 100644
--- a/chromium/media/remoting/end2end_test_renderer.h
+++ b/chromium/media/remoting/end2end_test_renderer.h
@@ -11,8 +11,8 @@
#include "base/memory/weak_ptr.h"
#include "media/base/demuxer_stream.h"
#include "media/base/renderer.h"
-#include "media/remoting/rpc_broker.h"
#include "media/remoting/stream_provider.h"
+#include "third_party/openscreen/src/cast/streaming/rpc_messenger.h"
namespace media {
namespace remoting {
@@ -88,13 +88,14 @@ class End2EndTestRenderer final : public Renderer {
ReceiverController* receiver_controller_;
std::unique_ptr<Receiver> receiver_;
std::unique_ptr<StreamProvider> stream_provider_;
- RpcBroker* receiver_rpc_broker_;
+ openscreen::cast::RpcMessenger* receiver_rpc_messenger_;
// Handle of |receiver_|
- int receiver_renderer_handle_ = RpcBroker::kInvalidHandle;
+ int receiver_renderer_handle_ =
+ openscreen::cast::RpcMessenger::kInvalidHandle;
// Handle of |courier_renderer_|, it would be sent with AcquireRenderer
// message.
- int sender_renderer_handle_ = RpcBroker::kInvalidHandle;
+ int sender_renderer_handle_ = openscreen::cast::RpcMessenger::kInvalidHandle;
base::WeakPtrFactory<End2EndTestRenderer> weak_factory_{this};
};
diff --git a/chromium/media/remoting/fake_media_resource.cc b/chromium/media/remoting/fake_media_resource.cc
index 39ff3c24a56..32293283948 100644
--- a/chromium/media/remoting/fake_media_resource.cc
+++ b/chromium/media/remoting/fake_media_resource.cc
@@ -21,14 +21,14 @@ namespace remoting {
FakeDemuxerStream::FakeDemuxerStream(bool is_audio) {
type_ = is_audio ? DemuxerStream::AUDIO : DemuxerStream::VIDEO;
if (is_audio) {
- audio_config_.Initialize(kCodecAAC, kSampleFormatS16, CHANNEL_LAYOUT_STEREO,
- 38400, std::vector<uint8_t>(),
- EncryptionScheme::kUnencrypted, base::TimeDelta(),
- 0);
+ audio_config_.Initialize(
+ AudioCodec::kAAC, kSampleFormatS16, CHANNEL_LAYOUT_STEREO, 38400,
+ std::vector<uint8_t>(), EncryptionScheme::kUnencrypted,
+ base::TimeDelta(), 0);
} else {
gfx::Size size(640, 480);
gfx::Rect rect(0, 0, 640, 480);
- video_config_.Initialize(kCodecH264, H264PROFILE_BASELINE,
+ video_config_.Initialize(VideoCodec::kH264, H264PROFILE_BASELINE,
VideoDecoderConfig::AlphaMode::kIsOpaque,
VideoColorSpace::REC601(), kNoTransformation, size,
rect, size, std::vector<uint8_t>(),
@@ -79,7 +79,7 @@ void FakeDemuxerStream::CreateFakeFrame(size_t size,
for (size_t i = 0; i < size; ++i) {
buffer[i] = static_cast<uint8_t>(i & 0xFF);
}
- base::TimeDelta pts = base::TimeDelta::FromMilliseconds(pts_ms);
+ base::TimeDelta pts = base::Milliseconds(pts_ms);
// To DecoderBuffer
scoped_refptr<DecoderBuffer> input_buffer =
diff --git a/chromium/media/remoting/fake_media_resource.h b/chromium/media/remoting/fake_media_resource.h
index 85159671267..c73181e0ec9 100644
--- a/chromium/media/remoting/fake_media_resource.h
+++ b/chromium/media/remoting/fake_media_resource.h
@@ -20,6 +20,10 @@ namespace remoting {
class FakeDemuxerStream : public DemuxerStream {
public:
explicit FakeDemuxerStream(bool is_audio);
+
+ FakeDemuxerStream(const FakeDemuxerStream&) = delete;
+ FakeDemuxerStream& operator=(const FakeDemuxerStream&) = delete;
+
~FakeDemuxerStream() override;
// DemuxerStream implementation.
@@ -41,14 +45,16 @@ class FakeDemuxerStream : public DemuxerStream {
Type type_;
AudioDecoderConfig audio_config_;
VideoDecoderConfig video_config_;
-
- DISALLOW_COPY_AND_ASSIGN(FakeDemuxerStream);
};
// Audio only demuxer stream provider
class FakeMediaResource final : public MediaResource {
public:
FakeMediaResource();
+
+ FakeMediaResource(const FakeMediaResource&) = delete;
+ FakeMediaResource& operator=(const FakeMediaResource&) = delete;
+
~FakeMediaResource() override;
// MediaResource implementation.
@@ -57,8 +63,6 @@ class FakeMediaResource final : public MediaResource {
private:
std::unique_ptr<FakeDemuxerStream> audio_stream_;
std::unique_ptr<FakeDemuxerStream> video_stream_;
-
- DISALLOW_COPY_AND_ASSIGN(FakeMediaResource);
};
} // namespace remoting
diff --git a/chromium/media/remoting/fake_remoter.h b/chromium/media/remoting/fake_remoter.h
index 78f8b608543..8f321fad72b 100644
--- a/chromium/media/remoting/fake_remoter.h
+++ b/chromium/media/remoting/fake_remoter.h
@@ -23,6 +23,11 @@ class FakeRemotingDataStreamSender : public mojom::RemotingDataStreamSender {
FakeRemotingDataStreamSender(
mojo::PendingReceiver<mojom::RemotingDataStreamSender> receiver,
mojo::ScopedDataPipeConsumerHandle consumer_handle);
+
+ FakeRemotingDataStreamSender(const FakeRemotingDataStreamSender&) = delete;
+ FakeRemotingDataStreamSender& operator=(const FakeRemotingDataStreamSender&) =
+ delete;
+
~FakeRemotingDataStreamSender() override;
uint32_t send_frame_count() const { return send_frame_count_; }
@@ -47,8 +52,6 @@ class FakeRemotingDataStreamSender : public mojom::RemotingDataStreamSender {
std::vector<std::vector<uint8_t>> received_frame_list;
uint32_t send_frame_count_;
uint32_t cancel_in_flight_count_;
-
- DISALLOW_COPY_AND_ASSIGN(FakeRemotingDataStreamSender);
};
class FakeRemoter final : public mojom::Remoter {
@@ -56,6 +59,10 @@ class FakeRemoter final : public mojom::Remoter {
// |start_will_fail| indicates whether starting remoting will fail.
FakeRemoter(mojo::PendingRemote<mojom::RemotingSource> source,
bool start_will_fail);
+
+ FakeRemoter(const FakeRemoter&) = delete;
+ FakeRemoter& operator=(const FakeRemoter&) = delete;
+
~FakeRemoter() override;
// mojom::Remoter implementations.
@@ -83,14 +90,16 @@ class FakeRemoter final : public mojom::Remoter {
std::unique_ptr<FakeRemotingDataStreamSender> video_stream_sender_;
base::WeakPtrFactory<FakeRemoter> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(FakeRemoter);
};
class FakeRemoterFactory final : public mojom::RemoterFactory {
public:
// |start_will_fail| indicates whether starting remoting will fail.
explicit FakeRemoterFactory(bool start_will_fail);
+
+ FakeRemoterFactory(const FakeRemoterFactory&) = delete;
+ FakeRemoterFactory& operator=(const FakeRemoterFactory&) = delete;
+
~FakeRemoterFactory() override;
// mojom::RemoterFactory implementation.
@@ -102,8 +111,6 @@ class FakeRemoterFactory final : public mojom::RemoterFactory {
private:
bool start_will_fail_;
-
- DISALLOW_COPY_AND_ASSIGN(FakeRemoterFactory);
};
} // namespace remoting
diff --git a/chromium/media/remoting/integration_test.cc b/chromium/media/remoting/integration_test.cc
index 805a425e1d0..d50888ea431 100644
--- a/chromium/media/remoting/integration_test.cc
+++ b/chromium/media/remoting/integration_test.cc
@@ -60,7 +60,7 @@ TEST_F(MediaRemotingIntegrationTest, MediaSource_ConfigChange_WebM) {
EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(640, 360))).Times(1);
scoped_refptr<DecoderBuffer> second_file =
ReadTestDataFile("bear-640x360.webm");
- ASSERT_TRUE(source.AppendAtTime(base::TimeDelta::FromSeconds(kAppendTimeSec),
+ ASSERT_TRUE(source.AppendAtTime(base::Seconds(kAppendTimeSec),
second_file->data(),
second_file->data_size()));
source.EndOfStream();
diff --git a/chromium/media/remoting/metrics.cc b/chromium/media/remoting/metrics.cc
index c2f9f03a271..b744f292c33 100644
--- a/chromium/media/remoting/metrics.cc
+++ b/chromium/media/remoting/metrics.cc
@@ -42,10 +42,10 @@ constexpr int kVideoWidthBuckets[] = {
} // namespace
SessionMetricsRecorder::SessionMetricsRecorder()
- : last_audio_codec_(kUnknownAudioCodec),
+ : last_audio_codec_(AudioCodec::kUnknown),
last_channel_layout_(CHANNEL_LAYOUT_NONE),
last_sample_rate_(0),
- last_video_codec_(kUnknownVideoCodec),
+ last_video_codec_(VideoCodec::kUnknown),
last_video_profile_(VIDEO_CODEC_PROFILE_UNKNOWN) {}
SessionMetricsRecorder::~SessionMetricsRecorder() = default;
@@ -59,9 +59,9 @@ void SessionMetricsRecorder::WillStartSession(StartTrigger trigger) {
void SessionMetricsRecorder::DidStartSession() {
UMA_HISTOGRAM_ENUMERATION("Media.Remoting.SessionStartTrigger",
*start_trigger_, START_TRIGGER_MAX + 1);
- if (last_audio_codec_ != kUnknownAudioCodec)
+ if (last_audio_codec_ != AudioCodec::kUnknown)
RecordAudioConfiguration();
- if (last_video_codec_ != kUnknownVideoCodec)
+ if (last_video_codec_ != VideoCodec::kUnknown)
RecordVideoConfiguration();
RecordTrackConfiguration();
}
@@ -77,33 +77,31 @@ void SessionMetricsRecorder::WillStopSession(StopTrigger trigger) {
// Record the session duration.
const base::TimeDelta session_duration = base::TimeTicks::Now() - start_time_;
UMA_HISTOGRAM_CUSTOM_TIMES("Media.Remoting.SessionDuration", session_duration,
- base::TimeDelta::FromSeconds(15),
- base::TimeDelta::FromHours(12), 50);
+ base::Seconds(15), base::Hours(12), 50);
- if (session_duration <= base::TimeDelta::FromSeconds(15)) {
+ if (session_duration <= base::Seconds(15)) {
// Record the session duration in finer scale for short sessions
UMA_HISTOGRAM_CUSTOM_TIMES("Media.Remoting.ShortSessionDuration",
- session_duration,
- base::TimeDelta::FromSecondsD(0.1),
- base::TimeDelta::FromSeconds(15), 60);
+ session_duration, base::Seconds(0.1),
+ base::Seconds(15), 60);
- if (session_duration <= base::TimeDelta::FromSecondsD(0.1)) {
+ if (session_duration <= base::Seconds(0.1)) {
UMA_HISTOGRAM_ENUMERATION(
"Media.Remoting.SessionStopTrigger.Duration0To100MilliSec", trigger,
STOP_TRIGGER_MAX + 1);
- } else if (session_duration <= base::TimeDelta::FromSeconds(1)) {
+ } else if (session_duration <= base::Seconds(1)) {
UMA_HISTOGRAM_ENUMERATION(
"Media.Remoting.SessionStopTrigger.Duration100MilliSecTo1Sec",
trigger, STOP_TRIGGER_MAX + 1);
- } else if (session_duration <= base::TimeDelta::FromSeconds(3)) {
+ } else if (session_duration <= base::Seconds(3)) {
UMA_HISTOGRAM_ENUMERATION(
"Media.Remoting.SessionStopTrigger.Duration1To3Sec", trigger,
STOP_TRIGGER_MAX + 1);
- } else if (session_duration <= base::TimeDelta::FromSeconds(5)) {
+ } else if (session_duration <= base::Seconds(5)) {
UMA_HISTOGRAM_ENUMERATION(
"Media.Remoting.SessionStopTrigger.Duration3To5Sec", trigger,
STOP_TRIGGER_MAX + 1);
- } else if (session_duration <= base::TimeDelta::FromSeconds(10)) {
+ } else if (session_duration <= base::Seconds(10)) {
UMA_HISTOGRAM_ENUMERATION(
"Media.Remoting.SessionStopTrigger.Duration5To10Sec", trigger,
STOP_TRIGGER_MAX + 1);
@@ -134,7 +132,7 @@ void SessionMetricsRecorder::OnPipelineMetadataChanged(
if (need_to_record_audio_configuration)
RecordAudioConfiguration();
} else {
- last_audio_codec_ = kUnknownAudioCodec;
+ last_audio_codec_ = AudioCodec::kUnknown;
last_channel_layout_ = CHANNEL_LAYOUT_NONE;
last_sample_rate_ = 0;
}
@@ -152,7 +150,7 @@ void SessionMetricsRecorder::OnPipelineMetadataChanged(
if (need_to_record_video_configuration)
RecordVideoConfiguration();
} else {
- last_video_codec_ = kUnknownVideoCodec;
+ last_video_codec_ = VideoCodec::kUnknown;
last_video_profile_ = VIDEO_CODEC_PROFILE_UNKNOWN;
last_natural_size_ = gfx::Size();
}
@@ -190,8 +188,7 @@ void SessionMetricsRecorder::RecordCompatibility(
}
void SessionMetricsRecorder::RecordAudioConfiguration() {
- UMA_HISTOGRAM_ENUMERATION("Media.Remoting.AudioCodec", last_audio_codec_,
- kAudioCodecMax + 1);
+ base::UmaHistogramEnumeration("Media.Remoting.AudioCodec", last_audio_codec_);
UMA_HISTOGRAM_ENUMERATION("Media.Remoting.AudioChannelLayout",
last_channel_layout_, CHANNEL_LAYOUT_MAX + 1);
AudioSampleRate asr;
@@ -205,8 +202,7 @@ void SessionMetricsRecorder::RecordAudioConfiguration() {
}
void SessionMetricsRecorder::RecordVideoConfiguration() {
- UMA_HISTOGRAM_ENUMERATION("Media.Remoting.VideoCodec", last_video_codec_,
- kVideoCodecMax + 1);
+ base::UmaHistogramEnumeration("Media.Remoting.VideoCodec", last_video_codec_);
UMA_HISTOGRAM_ENUMERATION("Media.Remoting.VideoCodecProfile",
last_video_profile_, VIDEO_CODEC_PROFILE_MAX + 1);
UMA_HISTOGRAM_CUSTOM_ENUMERATION(
@@ -224,9 +220,9 @@ void SessionMetricsRecorder::RecordVideoConfiguration() {
void SessionMetricsRecorder::RecordTrackConfiguration() {
TrackConfiguration config = NEITHER_AUDIO_NOR_VIDEO;
- if (last_audio_codec_ != kUnknownAudioCodec)
+ if (last_audio_codec_ != AudioCodec::kUnknown)
config = AUDIO_ONLY;
- if (last_video_codec_ != kUnknownVideoCodec) {
+ if (last_video_codec_ != VideoCodec::kUnknown) {
if (config == AUDIO_ONLY)
config = AUDIO_AND_VIDEO;
else
@@ -245,9 +241,8 @@ void RendererMetricsRecorder::OnRendererInitialized() {
const base::TimeDelta elapsed_since_start =
base::TimeTicks::Now() - start_time_;
UMA_HISTOGRAM_CUSTOM_TIMES("Media.Remoting.TimeUntilRemoteInitialized",
- elapsed_since_start,
- base::TimeDelta::FromMilliseconds(10),
- base::TimeDelta::FromSeconds(30), 50);
+ elapsed_since_start, base::Milliseconds(10),
+ base::Seconds(30), 50);
}
void RendererMetricsRecorder::OnEvidenceOfPlayoutAtReceiver() {
@@ -256,9 +251,8 @@ void RendererMetricsRecorder::OnEvidenceOfPlayoutAtReceiver() {
const base::TimeDelta elapsed_since_start =
base::TimeTicks::Now() - start_time_;
UMA_HISTOGRAM_CUSTOM_TIMES("Media.Remoting.TimeUntilFirstPlayout",
- elapsed_since_start,
- base::TimeDelta::FromMilliseconds(10),
- base::TimeDelta::FromSeconds(30), 50);
+ elapsed_since_start, base::Milliseconds(10),
+ base::Seconds(30), 50);
did_record_first_playout_ = true;
}
diff --git a/chromium/media/remoting/metrics.h b/chromium/media/remoting/metrics.h
index c08e8779b77..fe865afd39a 100644
--- a/chromium/media/remoting/metrics.h
+++ b/chromium/media/remoting/metrics.h
@@ -51,6 +51,10 @@ enum class PixelRateSupport {
class SessionMetricsRecorder {
public:
SessionMetricsRecorder();
+
+ SessionMetricsRecorder(const SessionMetricsRecorder&) = delete;
+ SessionMetricsRecorder& operator=(const SessionMetricsRecorder&) = delete;
+
~SessionMetricsRecorder();
// When attempting to start a remoting session, WillStartSession() is called,
@@ -116,13 +120,15 @@ class SessionMetricsRecorder {
bool did_record_pixel_rate_support_ = false;
bool did_record_compatibility_ = false;
-
- DISALLOW_COPY_AND_ASSIGN(SessionMetricsRecorder);
};
class RendererMetricsRecorder {
public:
RendererMetricsRecorder();
+
+ RendererMetricsRecorder(const RendererMetricsRecorder&) = delete;
+ RendererMetricsRecorder& operator=(const RendererMetricsRecorder&) = delete;
+
~RendererMetricsRecorder();
// Called when an "initialize success" message is received from the remote.
@@ -140,8 +146,6 @@ class RendererMetricsRecorder {
private:
const base::TimeTicks start_time_;
bool did_record_first_playout_ = false;
-
- DISALLOW_COPY_AND_ASSIGN(RendererMetricsRecorder);
};
} // namespace remoting
diff --git a/chromium/media/remoting/mock_receiver_controller.cc b/chromium/media/remoting/mock_receiver_controller.cc
index b0e742d7635..7ee23f8b376 100644
--- a/chromium/media/remoting/mock_receiver_controller.cc
+++ b/chromium/media/remoting/mock_receiver_controller.cc
@@ -101,17 +101,15 @@ MockReceiverController* MockReceiverController::GetInstance() {
MockReceiverController::MockReceiverController()
: mock_remotee_(new MockRemotee()) {
- // Overwrites |rpc_broker_|.
- rpc_broker_.SetMessageCallbackForTesting(base::BindRepeating(
- &MockReceiverController::OnSendRpc, base::Unretained(this)));
+ // Overwrites |rpc_messenger_|.
+ rpc_messenger_.set_send_message_cb_for_testing(
+ [this](std::vector<uint8_t> message) { OnSendRpc(message); });
}
MockReceiverController::~MockReceiverController() = default;
-void MockReceiverController::OnSendRpc(
- std::unique_ptr<std::vector<uint8_t>> message) {
- std::vector<uint8_t> binary_message = *message;
- ReceiverController::OnMessageFromSource(binary_message);
+void MockReceiverController::OnSendRpc(std::vector<uint8_t> message) {
+ ReceiverController::OnMessageFromSource(message);
}
} // namespace remoting
diff --git a/chromium/media/remoting/mock_receiver_controller.h b/chromium/media/remoting/mock_receiver_controller.h
index a6c619e4718..69df2fec16b 100644
--- a/chromium/media/remoting/mock_receiver_controller.h
+++ b/chromium/media/remoting/mock_receiver_controller.h
@@ -86,7 +86,7 @@ class MockReceiverController : public ReceiverController {
MockReceiverController();
~MockReceiverController() override;
- void OnSendRpc(std::unique_ptr<std::vector<uint8_t>> message);
+ void OnSendRpc(std::vector<uint8_t> message);
std::unique_ptr<MockRemotee> mock_remotee_;
};
diff --git a/chromium/media/remoting/proto_enum_utils.cc b/chromium/media/remoting/proto_enum_utils.cc
index ce46988b0cb..25c2827b34b 100644
--- a/chromium/media/remoting/proto_enum_utils.cc
+++ b/chromium/media/remoting/proto_enum_utils.cc
@@ -11,29 +11,33 @@ namespace remoting {
case OriginType::x: \
return OtherType::x
+#define CASE_RETURN_ORIGIN_TO_OTHER(x, y) \
+ case OriginType::x: \
+ return OtherType::y
+
absl::optional<AudioCodec> ToMediaAudioCodec(
openscreen::cast::AudioDecoderConfig::Codec value) {
using OriginType = openscreen::cast::AudioDecoderConfig;
using OtherType = AudioCodec;
switch (value) {
- CASE_RETURN_OTHER(kUnknownAudioCodec);
- CASE_RETURN_OTHER(kCodecAAC);
- CASE_RETURN_OTHER(kCodecMP3);
- CASE_RETURN_OTHER(kCodecPCM);
- CASE_RETURN_OTHER(kCodecVorbis);
- CASE_RETURN_OTHER(kCodecFLAC);
- CASE_RETURN_OTHER(kCodecAMR_NB);
- CASE_RETURN_OTHER(kCodecAMR_WB);
- CASE_RETURN_OTHER(kCodecPCM_MULAW);
- CASE_RETURN_OTHER(kCodecGSM_MS);
- CASE_RETURN_OTHER(kCodecPCM_S16BE);
- CASE_RETURN_OTHER(kCodecPCM_S24BE);
- CASE_RETURN_OTHER(kCodecOpus);
- CASE_RETURN_OTHER(kCodecEAC3);
- CASE_RETURN_OTHER(kCodecPCM_ALAW);
- CASE_RETURN_OTHER(kCodecALAC);
- CASE_RETURN_OTHER(kCodecAC3);
- CASE_RETURN_OTHER(kCodecMpegHAudio);
+ CASE_RETURN_ORIGIN_TO_OTHER(kUnknownAudioCodec, kUnknown);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecAAC, kAAC);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecMP3, kMP3);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecPCM, kPCM);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecVorbis, kVorbis);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecFLAC, kFLAC);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecAMR_NB, kAMR_NB);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecAMR_WB, kAMR_WB);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecPCM_MULAW, kPCM_MULAW);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecGSM_MS, kGSM_MS);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecPCM_S16BE, kPCM_S16BE);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecPCM_S24BE, kPCM_S24BE);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecOpus, kOpus);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecEAC3, kEAC3);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecPCM_ALAW, kPCM_ALAW);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecALAC, kALAC);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecAC3, kAC3);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecMpegHAudio, kMpegHAudio);
default:
return absl::nullopt;
}
@@ -44,24 +48,24 @@ ToProtoAudioDecoderConfigCodec(AudioCodec value) {
using OriginType = AudioCodec;
using OtherType = openscreen::cast::AudioDecoderConfig;
switch (value) {
- CASE_RETURN_OTHER(kUnknownAudioCodec);
- CASE_RETURN_OTHER(kCodecAAC);
- CASE_RETURN_OTHER(kCodecMP3);
- CASE_RETURN_OTHER(kCodecPCM);
- CASE_RETURN_OTHER(kCodecVorbis);
- CASE_RETURN_OTHER(kCodecFLAC);
- CASE_RETURN_OTHER(kCodecAMR_NB);
- CASE_RETURN_OTHER(kCodecAMR_WB);
- CASE_RETURN_OTHER(kCodecPCM_MULAW);
- CASE_RETURN_OTHER(kCodecGSM_MS);
- CASE_RETURN_OTHER(kCodecPCM_S16BE);
- CASE_RETURN_OTHER(kCodecPCM_S24BE);
- CASE_RETURN_OTHER(kCodecOpus);
- CASE_RETURN_OTHER(kCodecEAC3);
- CASE_RETURN_OTHER(kCodecPCM_ALAW);
- CASE_RETURN_OTHER(kCodecALAC);
- CASE_RETURN_OTHER(kCodecAC3);
- CASE_RETURN_OTHER(kCodecMpegHAudio);
+ CASE_RETURN_ORIGIN_TO_OTHER(kUnknown, kUnknownAudioCodec);
+ CASE_RETURN_ORIGIN_TO_OTHER(kAAC, kCodecAAC);
+ CASE_RETURN_ORIGIN_TO_OTHER(kMP3, kCodecMP3);
+ CASE_RETURN_ORIGIN_TO_OTHER(kPCM, kCodecPCM);
+ CASE_RETURN_ORIGIN_TO_OTHER(kVorbis, kCodecVorbis);
+ CASE_RETURN_ORIGIN_TO_OTHER(kFLAC, kCodecFLAC);
+ CASE_RETURN_ORIGIN_TO_OTHER(kAMR_NB, kCodecAMR_NB);
+ CASE_RETURN_ORIGIN_TO_OTHER(kAMR_WB, kCodecAMR_WB);
+ CASE_RETURN_ORIGIN_TO_OTHER(kPCM_MULAW, kCodecPCM_MULAW);
+ CASE_RETURN_ORIGIN_TO_OTHER(kGSM_MS, kCodecGSM_MS);
+ CASE_RETURN_ORIGIN_TO_OTHER(kPCM_S16BE, kCodecPCM_S16BE);
+ CASE_RETURN_ORIGIN_TO_OTHER(kPCM_S24BE, kCodecPCM_S24BE);
+ CASE_RETURN_ORIGIN_TO_OTHER(kOpus, kCodecOpus);
+ CASE_RETURN_ORIGIN_TO_OTHER(kEAC3, kCodecEAC3);
+ CASE_RETURN_ORIGIN_TO_OTHER(kPCM_ALAW, kCodecPCM_ALAW);
+ CASE_RETURN_ORIGIN_TO_OTHER(kALAC, kCodecALAC);
+ CASE_RETURN_ORIGIN_TO_OTHER(kAC3, kCodecAC3);
+ CASE_RETURN_ORIGIN_TO_OTHER(kMpegHAudio, kCodecMpegHAudio);
default:
return absl::nullopt;
}
@@ -204,17 +208,17 @@ absl::optional<VideoCodec> ToMediaVideoCodec(
using OriginType = openscreen::cast::VideoDecoderConfig;
using OtherType = VideoCodec;
switch (value) {
- CASE_RETURN_OTHER(kUnknownVideoCodec);
- CASE_RETURN_OTHER(kCodecH264);
- CASE_RETURN_OTHER(kCodecVC1);
- CASE_RETURN_OTHER(kCodecMPEG2);
- CASE_RETURN_OTHER(kCodecMPEG4);
- CASE_RETURN_OTHER(kCodecTheora);
- CASE_RETURN_OTHER(kCodecVP8);
- CASE_RETURN_OTHER(kCodecVP9);
- CASE_RETURN_OTHER(kCodecHEVC);
- CASE_RETURN_OTHER(kCodecDolbyVision);
- CASE_RETURN_OTHER(kCodecAV1);
+ CASE_RETURN_ORIGIN_TO_OTHER(kUnknownVideoCodec, kUnknown);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecH264, kH264);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecVC1, kVC1);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecMPEG2, kMPEG2);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecMPEG4, kMPEG4);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecTheora, kTheora);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecVP8, kVP8);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecVP9, kVP9);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecHEVC, kHEVC);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecDolbyVision, kDolbyVision);
+ CASE_RETURN_ORIGIN_TO_OTHER(kCodecAV1, kAV1);
default:
return absl::nullopt;
}
@@ -225,17 +229,17 @@ ToProtoVideoDecoderConfigCodec(VideoCodec value) {
using OriginType = VideoCodec;
using OtherType = openscreen::cast::VideoDecoderConfig;
switch (value) {
- CASE_RETURN_OTHER(kUnknownVideoCodec);
- CASE_RETURN_OTHER(kCodecH264);
- CASE_RETURN_OTHER(kCodecVC1);
- CASE_RETURN_OTHER(kCodecMPEG2);
- CASE_RETURN_OTHER(kCodecMPEG4);
- CASE_RETURN_OTHER(kCodecTheora);
- CASE_RETURN_OTHER(kCodecVP8);
- CASE_RETURN_OTHER(kCodecVP9);
- CASE_RETURN_OTHER(kCodecHEVC);
- CASE_RETURN_OTHER(kCodecDolbyVision);
- CASE_RETURN_OTHER(kCodecAV1);
+ CASE_RETURN_ORIGIN_TO_OTHER(kUnknown, kUnknownVideoCodec);
+ CASE_RETURN_ORIGIN_TO_OTHER(kH264, kCodecH264);
+ CASE_RETURN_ORIGIN_TO_OTHER(kVC1, kCodecVC1);
+ CASE_RETURN_ORIGIN_TO_OTHER(kMPEG2, kCodecMPEG2);
+ CASE_RETURN_ORIGIN_TO_OTHER(kMPEG4, kCodecMPEG4);
+ CASE_RETURN_ORIGIN_TO_OTHER(kTheora, kCodecTheora);
+ CASE_RETURN_ORIGIN_TO_OTHER(kVP8, kCodecVP8);
+ CASE_RETURN_ORIGIN_TO_OTHER(kVP9, kCodecVP9);
+ CASE_RETURN_ORIGIN_TO_OTHER(kHEVC, kCodecHEVC);
+ CASE_RETURN_ORIGIN_TO_OTHER(kDolbyVision, kCodecDolbyVision);
+ CASE_RETURN_ORIGIN_TO_OTHER(kAV1, kCodecAV1);
default:
return absl::nullopt;
}
diff --git a/chromium/media/remoting/proto_utils.cc b/chromium/media/remoting/proto_utils.cc
index 753d2254d77..a482132c0be 100644
--- a/chromium/media/remoting/proto_utils.cc
+++ b/chromium/media/remoting/proto_utils.cc
@@ -32,13 +32,11 @@ scoped_refptr<DecoderBuffer> ConvertProtoToDecoderBuffer(
}
if (buffer_message.has_timestamp_usec()) {
- buffer->set_timestamp(
- base::TimeDelta::FromMicroseconds(buffer_message.timestamp_usec()));
+ buffer->set_timestamp(base::Microseconds(buffer_message.timestamp_usec()));
}
if (buffer_message.has_duration_usec()) {
- buffer->set_duration(
- base::TimeDelta::FromMicroseconds(buffer_message.duration_usec()));
+ buffer->set_duration(base::Microseconds(buffer_message.duration_usec()));
}
VLOG(3) << "timestamp:" << buffer_message.timestamp_usec()
<< " duration:" << buffer_message.duration_usec();
@@ -50,14 +48,12 @@ scoped_refptr<DecoderBuffer> ConvertProtoToDecoderBuffer(
base::TimeDelta front_discard;
if (buffer_message.has_front_discard_usec()) {
has_discard = true;
- front_discard =
- base::TimeDelta::FromMicroseconds(buffer_message.front_discard_usec());
+ front_discard = base::Microseconds(buffer_message.front_discard_usec());
}
base::TimeDelta back_discard;
if (buffer_message.has_back_discard_usec()) {
has_discard = true;
- back_discard =
- base::TimeDelta::FromMicroseconds(buffer_message.back_discard_usec());
+ back_discard = base::Microseconds(buffer_message.back_discard_usec());
}
if (has_discard) {
@@ -198,7 +194,7 @@ bool ConvertProtoToAudioDecoderConfig(
std::vector<uint8_t>(audio_message.extra_data().begin(),
audio_message.extra_data().end()),
EncryptionScheme::kUnencrypted,
- base::TimeDelta::FromMicroseconds(audio_message.seek_preroll_usec()),
+ base::Microseconds(audio_message.seek_preroll_usec()),
audio_message.codec_delay());
return audio_config->IsValidConfig();
}
@@ -335,8 +331,8 @@ void ConvertProtoToPipelineStatistics(
stats->video_pipeline_info.encryption_type = EncryptionType::kClear;
}
if (stats_message.has_video_frame_duration_average_usec()) {
- stats->video_frame_duration_average = base::TimeDelta::FromMicroseconds(
- stats_message.video_frame_duration_average_usec());
+ stats->video_frame_duration_average =
+ base::Microseconds(stats_message.video_frame_duration_average_usec());
}
}
diff --git a/chromium/media/remoting/proto_utils_unittest.cc b/chromium/media/remoting/proto_utils_unittest.cc
index 880d0e2d709..2d643f15ea1 100644
--- a/chromium/media/remoting/proto_utils_unittest.cc
+++ b/chromium/media/remoting/proto_utils_unittest.cc
@@ -65,7 +65,7 @@ TEST_F(ProtoUtilsTest, PassValidDecoderBuffer) {
size_t buffer_size = sizeof(buffer) / sizeof(uint8_t);
const uint8_t side_buffer[] = "XX";
size_t side_buffer_size = sizeof(side_buffer) / sizeof(uint8_t);
- base::TimeDelta pts = base::TimeDelta::FromMilliseconds(5);
+ base::TimeDelta pts = base::Milliseconds(5);
// 1. To DecoderBuffer
scoped_refptr<DecoderBuffer> input_buffer = DecoderBuffer::CopyFrom(
@@ -99,7 +99,7 @@ TEST_F(ProtoUtilsTest, PassValidDecoderBuffer) {
TEST_F(ProtoUtilsTest, AudioDecoderConfigConversionTest) {
const char extra_data[4] = {'A', 'C', 'E', 'G'};
AudioDecoderConfig audio_config(
- kCodecAAC, kSampleFormatF32, CHANNEL_LAYOUT_MONO, 48000,
+ AudioCodec::kAAC, kSampleFormatF32, CHANNEL_LAYOUT_MONO, 48000,
std::vector<uint8_t>(std::begin(extra_data), std::end(extra_data)),
EncryptionScheme::kUnencrypted);
ASSERT_TRUE(audio_config.IsValidConfig());
diff --git a/chromium/media/remoting/receiver.cc b/chromium/media/remoting/receiver.cc
index fa5fda9ed55..d7944c62286 100644
--- a/chromium/media/remoting/receiver.cc
+++ b/chromium/media/remoting/receiver.cc
@@ -19,14 +19,15 @@
#include "media/remoting/receiver_controller.h"
#include "media/remoting/stream_provider.h"
+using openscreen::cast::RpcMessenger;
+
namespace media {
namespace remoting {
namespace {
// The period to send the TimeUpdate RPC message to update the media time on
// sender side.
-constexpr base::TimeDelta kTimeUpdateInterval =
- base::TimeDelta::FromMilliseconds(250);
+constexpr base::TimeDelta kTimeUpdateInterval = base::Milliseconds(250);
} // namespace
@@ -40,32 +41,35 @@ Receiver::Receiver(
: rpc_handle_(rpc_handle),
remote_handle_(remote_handle),
receiver_controller_(receiver_controller),
- rpc_broker_(receiver_controller_->rpc_broker()),
+ rpc_messenger_(receiver_controller_->rpc_messenger()),
main_task_runner_(base::ThreadTaskRunnerHandle::Get()),
media_task_runner_(media_task_runner),
renderer_(std::move(renderer)),
acquire_renderer_done_cb_(std::move(acquire_renderer_done_cb)) {
- DCHECK(rpc_handle_ != RpcBroker::kInvalidHandle);
+ DCHECK(rpc_handle_ != RpcMessenger::kInvalidHandle);
DCHECK(receiver_controller_);
- DCHECK(rpc_broker_);
+ DCHECK(rpc_messenger_);
DCHECK(renderer_);
// Note: The constructor is running on the main thread, but will be destroyed
// on the media thread. Therefore, all weak pointers must be dereferenced on
// the media thread.
- const RpcBroker::ReceiveMessageCallback receive_callback = base::BindPostTask(
+ auto receive_callback = base::BindPostTask(
media_task_runner_,
BindRepeating(&Receiver::OnReceivedRpc, weak_factory_.GetWeakPtr()));
// Listening all renderer rpc messages.
- rpc_broker_->RegisterMessageReceiverCallback(rpc_handle_, receive_callback);
+ rpc_messenger_->RegisterMessageReceiverCallback(
+ rpc_handle_, [cb = receive_callback](
+ std::unique_ptr<openscreen::cast::RpcMessage> message) {
+ cb.Run(std::move(message));
+ });
+
VerifyAcquireRendererDone();
}
Receiver::~Receiver() {
- rpc_broker_->UnregisterMessageReceiverCallback(rpc_handle_);
- rpc_broker_->UnregisterMessageReceiverCallback(
- RpcBroker::kAcquireRendererHandle);
+ rpc_messenger_->UnregisterMessageReceiverCallback(rpc_handle_);
}
// Receiver::Initialize() will be called by the local pipeline, it would only
@@ -106,12 +110,11 @@ base::TimeDelta Receiver::GetMediaTime() {
void Receiver::SendRpcMessageOnMainThread(
std::unique_ptr<openscreen::cast::RpcMessage> message) {
- // |rpc_broker_| is owned by |receiver_controller_| which is a singleton per
- // process, so it's safe to use Unretained() here.
+ // |rpc_messenger_| is owned by |receiver_controller_| which is a singleton
+ // per process, so it's safe to use Unretained() here.
main_task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(&RpcBroker::SendMessageToRemote,
- base::Unretained(rpc_broker_), std::move(message)));
+ FROM_HERE, base::BindOnce(&RpcMessenger::SendMessageToRemote,
+ base::Unretained(rpc_messenger_), *message));
}
void Receiver::OnReceivedRpc(
@@ -140,14 +143,14 @@ void Receiver::OnReceivedRpc(
}
void Receiver::SetRemoteHandle(int remote_handle) {
- DCHECK_NE(remote_handle, RpcBroker::kInvalidHandle);
- DCHECK_EQ(remote_handle_, RpcBroker::kInvalidHandle);
+ DCHECK_NE(remote_handle, RpcMessenger::kInvalidHandle);
+ DCHECK_EQ(remote_handle_, RpcMessenger::kInvalidHandle);
remote_handle_ = remote_handle;
VerifyAcquireRendererDone();
}
void Receiver::VerifyAcquireRendererDone() {
- if (remote_handle_ == RpcBroker::kInvalidHandle)
+ if (remote_handle_ == RpcMessenger::kInvalidHandle)
return;
DCHECK(acquire_renderer_done_cb_);
@@ -237,8 +240,7 @@ void Receiver::RpcStartPlayingFrom(
std::unique_ptr<openscreen::cast::RpcMessage> message) {
DCHECK(media_task_runner_->BelongsToCurrentThread());
- base::TimeDelta time =
- base::TimeDelta::FromMicroseconds(message->integer64_value());
+ base::TimeDelta time = base::Microseconds(message->integer64_value());
renderer_->StartPlayingFrom(time);
ScheduleMediaTimeUpdates();
}
diff --git a/chromium/media/remoting/receiver.h b/chromium/media/remoting/receiver.h
index 885dc2e1b74..4d44c7d5f0a 100644
--- a/chromium/media/remoting/receiver.h
+++ b/chromium/media/remoting/receiver.h
@@ -16,8 +16,14 @@
#include "media/base/demuxer_stream.h"
#include "media/base/renderer.h"
#include "media/base/renderer_client.h"
-#include "media/remoting/rpc_broker.h"
#include "third_party/openscreen/src/cast/streaming/remoting.pb.h"
+#include "third_party/openscreen/src/cast/streaming/rpc_messenger.h"
+
+namespace openscreen {
+namespace cast {
+class RpcMessenger;
+}
+} // namespace openscreen
namespace base {
class SingleThreadTaskRunner;
@@ -27,7 +33,6 @@ namespace media {
namespace remoting {
class ReceiverController;
-class RpcBroker;
// Receiver runs on a remote device, and forwards the information sent from a
// CourierRenderer to |renderer_|, which actually renders the media.
@@ -123,9 +128,9 @@ class Receiver final : public Renderer, public RendererClient {
int remote_handle_;
ReceiverController* const receiver_controller_; // Outlives this class.
- RpcBroker* const rpc_broker_; // Outlives this class.
+ openscreen::cast::RpcMessenger* const rpc_messenger_; // Outlives this class.
- // Calling SendMessageCallback() of |rpc_broker_| should be on main thread.
+ // Calling SendMessageCallback() of |rpc_messenger_| should be on main thread.
const scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_;
// Media tasks should run on media thread.
diff --git a/chromium/media/remoting/receiver_controller.cc b/chromium/media/remoting/receiver_controller.cc
index 16e1ba469c1..d17c47a1f9f 100644
--- a/chromium/media/remoting/receiver_controller.cc
+++ b/chromium/media/remoting/receiver_controller.cc
@@ -17,8 +17,9 @@ ReceiverController* ReceiverController::GetInstance() {
}
ReceiverController::ReceiverController()
- : rpc_broker_(base::BindRepeating(&ReceiverController::OnSendRpc,
- base::Unretained(this))),
+ : rpc_messenger_([this](std::vector<uint8_t> message) {
+ OnSendRpc(std::move(message));
+ }),
main_task_runner_(base::ThreadTaskRunnerHandle::Get()) {}
ReceiverController::~ReceiverController() = default;
@@ -93,11 +94,10 @@ void ReceiverController::OnMessageFromSource(
if (!rpc_message->ParseFromArray(message.data(), message.size()))
return;
- rpc_broker_.ProcessMessageFromRemote(std::move(rpc_message));
+ rpc_messenger_.ProcessMessageFromRemote(std::move(rpc_message));
}
-void ReceiverController::OnSendRpc(
- std::unique_ptr<std::vector<uint8_t>> message) {
+void ReceiverController::OnSendRpc(std::vector<uint8_t> message) {
if (!main_task_runner_->BelongsToCurrentThread()) {
// |this| is a singleton per process, it would be safe to use
// base::Unretained() here.
@@ -108,9 +108,8 @@ void ReceiverController::OnSendRpc(
}
DCHECK(media_remotee_.is_bound());
- std::vector<uint8_t> binary_message = *message;
if (media_remotee_.is_bound())
- media_remotee_->SendMessageToSource(binary_message);
+ media_remotee_->SendMessageToSource(message);
}
} // namespace remoting
diff --git a/chromium/media/remoting/receiver_controller.h b/chromium/media/remoting/receiver_controller.h
index 1071de2760a..28108f1addc 100644
--- a/chromium/media/remoting/receiver_controller.h
+++ b/chromium/media/remoting/receiver_controller.h
@@ -9,17 +9,17 @@
#include "base/no_destructor.h"
#include "media/mojo/mojom/remoting.mojom.h"
-#include "media/remoting/rpc_broker.h"
#include "mojo/public/cpp/bindings/pending_remote.h"
#include "mojo/public/cpp/bindings/receiver.h"
#include "mojo/public/cpp/bindings/remote.h"
+#include "third_party/openscreen/src/cast/streaming/rpc_messenger.h"
namespace media {
namespace remoting {
-// ReceiverController is the bridge that owns |rpc_broker_| to allow Receivers
-// and StreamProvider::MediaStreams to communicate with the sender via RPC
-// calls.
+// ReceiverController is the bridge that owns |rpc_messenger_| to allow
+// Receivers and StreamProvider::MediaStreams to communicate with the sender via
+// RPC calls.
//
// It also forwards calls to a |media_remotee_| instance, which will be
// implemented the browser process. Currently, the only use case will be on
@@ -39,8 +39,7 @@ class ReceiverController : mojom::RemotingSink {
mojo::PendingRemote<mojom::RemotingDataStreamReceiver> audio_stream,
mojo::PendingRemote<mojom::RemotingDataStreamReceiver> video_stream);
- // The reference of |rpc_broker_|.
- media::remoting::RpcBroker* rpc_broker() { return &rpc_broker_; }
+ openscreen::cast::RpcMessenger* rpc_messenger() { return &rpc_messenger_; }
private:
friend base::NoDestructor<ReceiverController>;
@@ -53,10 +52,10 @@ class ReceiverController : mojom::RemotingSink {
// media::mojom::RemotingSink implementation.
void OnMessageFromSource(const std::vector<uint8_t>& message) override;
- // Callback for |rpc_broker_| to send messages.
- void OnSendRpc(std::unique_ptr<std::vector<uint8_t>> message);
+ // Callback for |rpc_messenger_| to send messages.
+ void OnSendRpc(std::vector<uint8_t> message);
- RpcBroker rpc_broker_;
+ openscreen::cast::RpcMessenger rpc_messenger_;
const scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_;
diff --git a/chromium/media/remoting/receiver_unittest.cc b/chromium/media/remoting/receiver_unittest.cc
index e5c46712d62..bc149ab7224 100644
--- a/chromium/media/remoting/receiver_unittest.cc
+++ b/chromium/media/remoting/receiver_unittest.cc
@@ -23,6 +23,7 @@
#include "third_party/abseil-cpp/absl/types/optional.h"
using base::test::RunOnceCallback;
+using openscreen::cast::RpcMessenger;
using testing::_;
using testing::AtLeast;
using testing::NiceMock;
@@ -33,13 +34,14 @@ namespace remoting {
class MockSender {
public:
- MockSender(RpcBroker* rpc_broker, int remote_handle)
- : rpc_broker_(rpc_broker),
- rpc_handle_(rpc_broker->GetUniqueHandle()),
+ MockSender(RpcMessenger* rpc_messenger, int remote_handle)
+ : rpc_messenger_(rpc_messenger),
+ rpc_handle_(rpc_messenger->GetUniqueHandle()),
remote_handle_(remote_handle) {
- rpc_broker_->RegisterMessageReceiverCallback(
- rpc_handle_, base::BindRepeating(&MockSender::OnReceivedRpc,
- base::Unretained(this)));
+ rpc_messenger_->RegisterMessageReceiverCallback(
+ rpc_handle_, [this](std::unique_ptr<openscreen::cast::RpcMessage> rpc) {
+ this->OnReceivedRpc(std::move(rpc));
+ });
}
MOCK_METHOD(void, AcquireRendererDone, ());
@@ -147,58 +149,58 @@ class MockSender {
}
void SendRpcAcquireRenderer() {
- auto rpc = std::make_unique<openscreen::cast::RpcMessage>();
- rpc->set_handle(RpcBroker::kAcquireRendererHandle);
- rpc->set_proc(openscreen::cast::RpcMessage::RPC_ACQUIRE_RENDERER);
- rpc->set_integer_value(rpc_handle_);
- rpc_broker_->SendMessageToRemote(std::move(rpc));
+ openscreen::cast::RpcMessage rpc;
+ rpc.set_handle(RpcMessenger::kAcquireRendererHandle);
+ rpc.set_proc(openscreen::cast::RpcMessage::RPC_ACQUIRE_RENDERER);
+ rpc.set_integer_value(rpc_handle_);
+ rpc_messenger_->SendMessageToRemote(rpc);
}
void SendRpcInitialize() {
- auto rpc = std::make_unique<openscreen::cast::RpcMessage>();
- rpc->set_handle(remote_handle_);
- rpc->set_proc(openscreen::cast::RpcMessage::RPC_R_INITIALIZE);
- rpc_broker_->SendMessageToRemote(std::move(rpc));
+ openscreen::cast::RpcMessage rpc;
+ rpc.set_handle(remote_handle_);
+ rpc.set_proc(openscreen::cast::RpcMessage::RPC_R_INITIALIZE);
+ rpc_messenger_->SendMessageToRemote(rpc);
}
void SendRpcSetPlaybackRate(double playback_rate) {
- auto rpc = std::make_unique<openscreen::cast::RpcMessage>();
- rpc->set_handle(remote_handle_);
- rpc->set_proc(openscreen::cast::RpcMessage::RPC_R_SETPLAYBACKRATE);
- rpc->set_double_value(playback_rate);
- rpc_broker_->SendMessageToRemote(std::move(rpc));
+ openscreen::cast::RpcMessage rpc;
+ rpc.set_handle(remote_handle_);
+ rpc.set_proc(openscreen::cast::RpcMessage::RPC_R_SETPLAYBACKRATE);
+ rpc.set_double_value(playback_rate);
+ rpc_messenger_->SendMessageToRemote(rpc);
}
void SendRpcFlushUntil(uint32_t audio_count, uint32_t video_count) {
- auto rpc = std::make_unique<openscreen::cast::RpcMessage>();
- rpc->set_handle(remote_handle_);
- rpc->set_proc(openscreen::cast::RpcMessage::RPC_R_FLUSHUNTIL);
+ openscreen::cast::RpcMessage rpc;
+ rpc.set_handle(remote_handle_);
+ rpc.set_proc(openscreen::cast::RpcMessage::RPC_R_FLUSHUNTIL);
openscreen::cast::RendererFlushUntil* message =
- rpc->mutable_renderer_flushuntil_rpc();
+ rpc.mutable_renderer_flushuntil_rpc();
message->set_audio_count(audio_count);
message->set_video_count(video_count);
message->set_callback_handle(rpc_handle_);
- rpc_broker_->SendMessageToRemote(std::move(rpc));
+ rpc_messenger_->SendMessageToRemote(rpc);
}
void SendRpcStartPlayingFrom(base::TimeDelta time) {
- auto rpc = std::make_unique<openscreen::cast::RpcMessage>();
- rpc->set_handle(remote_handle_);
- rpc->set_proc(openscreen::cast::RpcMessage::RPC_R_STARTPLAYINGFROM);
- rpc->set_integer64_value(time.InMicroseconds());
- rpc_broker_->SendMessageToRemote(std::move(rpc));
+ openscreen::cast::RpcMessage rpc;
+ rpc.set_handle(remote_handle_);
+ rpc.set_proc(openscreen::cast::RpcMessage::RPC_R_STARTPLAYINGFROM);
+ rpc.set_integer64_value(time.InMicroseconds());
+ rpc_messenger_->SendMessageToRemote(rpc);
}
void SendRpcSetVolume(float volume) {
- auto rpc = std::make_unique<openscreen::cast::RpcMessage>();
- rpc->set_handle(remote_handle_);
- rpc->set_proc(openscreen::cast::RpcMessage::RPC_R_SETVOLUME);
- rpc->set_double_value(volume);
- rpc_broker_->SendMessageToRemote(std::move(rpc));
+ openscreen::cast::RpcMessage rpc;
+ rpc.set_handle(remote_handle_);
+ rpc.set_proc(openscreen::cast::RpcMessage::RPC_R_SETVOLUME);
+ rpc.set_double_value(volume);
+ rpc_messenger_->SendMessageToRemote(rpc);
}
private:
- RpcBroker* const rpc_broker_;
+ RpcMessenger* const rpc_messenger_;
const int rpc_handle_;
const int remote_handle_;
};
@@ -213,21 +215,25 @@ class ReceiverTest : public ::testing::Test {
mock_controller_->mock_remotee()->BindNewPipeAndPassRemote());
mock_remotee_ = mock_controller_->mock_remotee();
- rpc_broker_ = mock_controller_->rpc_broker();
- receiver_renderer_handle_ = rpc_broker_->GetUniqueHandle();
+ rpc_messenger_ = mock_controller_->rpc_messenger();
+ receiver_renderer_handle_ = rpc_messenger_->GetUniqueHandle();
mock_sender_ = std::make_unique<StrictMock<MockSender>>(
- rpc_broker_, receiver_renderer_handle_);
-
- rpc_broker_->RegisterMessageReceiverCallback(
- RpcBroker::kAcquireRendererHandle,
- base::BindRepeating(&ReceiverTest::OnReceivedRpc,
- weak_factory_.GetWeakPtr()));
+ rpc_messenger_, receiver_renderer_handle_);
+
+ rpc_messenger_->RegisterMessageReceiverCallback(
+ RpcMessenger::kAcquireRendererHandle,
+ [ptr = weak_factory_.GetWeakPtr()](
+ std::unique_ptr<openscreen::cast::RpcMessage> message) {
+ if (ptr) {
+ ptr->OnReceivedRpc(std::move(message));
+ }
+ });
}
void TearDown() override {
- rpc_broker_->UnregisterMessageReceiverCallback(
- RpcBroker::kAcquireRendererHandle);
+ rpc_messenger_->UnregisterMessageReceiverCallback(
+ RpcMessenger::kAcquireRendererHandle);
}
void OnReceivedRpc(std::unique_ptr<openscreen::cast::RpcMessage> message) {
@@ -240,9 +246,9 @@ class ReceiverTest : public ::testing::Test {
void OnAcquireRenderer(
std::unique_ptr<openscreen::cast::RpcMessage> message) {
DCHECK(message->has_integer_value());
- DCHECK(message->integer_value() != RpcBroker::kInvalidHandle);
+ DCHECK(message->integer_value() != RpcMessenger::kInvalidHandle);
- if (sender_renderer_handle_ == RpcBroker::kInvalidHandle) {
+ if (sender_renderer_handle_ == RpcMessenger::kInvalidHandle) {
sender_renderer_handle_ = message->integer_value();
SetRemoteHandle();
}
@@ -253,11 +259,11 @@ class ReceiverTest : public ::testing::Test {
<< ": Issues RPC_ACQUIRE_RENDERER_DONE RPC message. remote_handle="
<< sender_renderer_handle_
<< " rpc_handle=" << receiver_renderer_handle;
- auto rpc = std::make_unique<openscreen::cast::RpcMessage>();
- rpc->set_handle(sender_renderer_handle_);
- rpc->set_proc(openscreen::cast::RpcMessage::RPC_ACQUIRE_RENDERER_DONE);
- rpc->set_integer_value(receiver_renderer_handle);
- rpc_broker_->SendMessageToRemote(std::move(rpc));
+ openscreen::cast::RpcMessage rpc;
+ rpc.set_handle(sender_renderer_handle_);
+ rpc.set_proc(openscreen::cast::RpcMessage::RPC_ACQUIRE_RENDERER_DONE);
+ rpc.set_integer_value(receiver_renderer_handle);
+ rpc_messenger_->SendMessageToRemote(rpc);
}
void CreateReceiver() {
@@ -286,16 +292,16 @@ class ReceiverTest : public ::testing::Test {
base::test::TaskEnvironment task_environment_;
- int sender_renderer_handle_ = RpcBroker::kInvalidHandle;
- int receiver_renderer_handle_ = RpcBroker::kInvalidHandle;
+ int sender_renderer_handle_ = RpcMessenger::kInvalidHandle;
+ int receiver_renderer_handle_ = RpcMessenger::kInvalidHandle;
MockMediaResource mock_media_resource_;
- MockRenderer* mock_renderer_;
+ MockRenderer* mock_renderer_ = nullptr;
std::unique_ptr<MockSender> mock_sender_;
- RpcBroker* rpc_broker_;
+ RpcMessenger* rpc_messenger_ = nullptr;
MockRemotee* mock_remotee_;
- MockReceiverController* mock_controller_;
+ MockReceiverController* mock_controller_ = nullptr;
std::unique_ptr<Receiver> receiver_;
base::WeakPtrFactory<ReceiverTest> weak_factory_{this};
@@ -389,7 +395,7 @@ TEST_F(ReceiverTest, RpcRendererMessages) {
EXPECT_EQ(flush_video_count, mock_remotee_->flush_video_count());
// StartPlayingFrom
- const base::TimeDelta time = base::TimeDelta::FromSeconds(100);
+ const base::TimeDelta time = base::Seconds(100);
EXPECT_CALL(*mock_renderer_, StartPlayingFrom(time)).Times(1);
mock_sender_->SendRpcStartPlayingFrom(time);
task_environment_.RunUntilIdle();
diff --git a/chromium/media/remoting/remoting_renderer_factory.cc b/chromium/media/remoting/remoting_renderer_factory.cc
index 5617967e503..334b80ca9bf 100644
--- a/chromium/media/remoting/remoting_renderer_factory.cc
+++ b/chromium/media/remoting/remoting_renderer_factory.cc
@@ -9,6 +9,8 @@
#include "media/remoting/receiver_controller.h"
#include "media/remoting/stream_provider.h"
+using openscreen::cast::RpcMessenger;
+
namespace media {
namespace remoting {
@@ -17,8 +19,8 @@ RemotingRendererFactory::RemotingRendererFactory(
std::unique_ptr<RendererFactory> renderer_factory,
const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner)
: receiver_controller_(ReceiverController::GetInstance()),
- rpc_broker_(receiver_controller_->rpc_broker()),
- renderer_handle_(rpc_broker_->GetUniqueHandle()),
+ rpc_messenger_(receiver_controller_->rpc_messenger()),
+ renderer_handle_(rpc_messenger_->GetUniqueHandle()),
waiting_for_remote_handle_receiver_(nullptr),
real_renderer_factory_(std::move(renderer_factory)),
media_task_runner_(media_task_runner) {
@@ -26,16 +28,20 @@ RemotingRendererFactory::RemotingRendererFactory(
DCHECK(receiver_controller_);
// Register the callback to listen RPC_ACQUIRE_RENDERER message.
- rpc_broker_->RegisterMessageReceiverCallback(
- RpcBroker::kAcquireRendererHandle,
- base::BindRepeating(&RemotingRendererFactory::OnAcquireRenderer,
- weak_factory_.GetWeakPtr()));
+ rpc_messenger_->RegisterMessageReceiverCallback(
+ RpcMessenger::kAcquireRendererHandle,
+ [ptr = weak_factory_.GetWeakPtr()](
+ std::unique_ptr<openscreen::cast::RpcMessage> message) {
+ if (ptr) {
+ ptr->OnAcquireRenderer(std::move(message));
+ }
+ });
receiver_controller_->Initialize(std::move(remotee));
}
RemotingRendererFactory::~RemotingRendererFactory() {
- rpc_broker_->UnregisterMessageReceiverCallback(
- RpcBroker::kAcquireRendererHandle);
+ rpc_messenger_->UnregisterMessageReceiverCallback(
+ RpcMessenger::kAcquireRendererHandle);
}
std::unique_ptr<Renderer> RemotingRendererFactory::CreateRenderer(
@@ -59,7 +65,7 @@ std::unique_ptr<Renderer> RemotingRendererFactory::CreateRenderer(
// If we haven't received a RPC_ACQUIRE_RENDERER yet, keep a reference to
// |receiver|, and set its remote handle when we get the call to
// OnAcquireRenderer().
- if (remote_renderer_handle_ == RpcBroker::kInvalidHandle)
+ if (remote_renderer_handle_ == RpcMessenger::kInvalidHandle)
waiting_for_remote_handle_receiver_ = receiver->GetWeakPtr();
return std::move(receiver);
@@ -77,7 +83,7 @@ void RemotingRendererFactory::OnReceivedRpc(
void RemotingRendererFactory::OnAcquireRenderer(
std::unique_ptr<openscreen::cast::RpcMessage> message) {
DCHECK(message->has_integer_value());
- DCHECK(message->integer_value() != RpcBroker::kInvalidHandle);
+ DCHECK(message->integer_value() != RpcMessenger::kInvalidHandle);
remote_renderer_handle_ = message->integer_value();
@@ -105,11 +111,11 @@ void RemotingRendererFactory::OnAcquireRendererDone(int receiver_rpc_handle) {
DVLOG(3) << __func__
<< ": Issues RPC_ACQUIRE_RENDERER_DONE RPC message. remote_handle="
<< remote_renderer_handle_ << " rpc_handle=" << receiver_rpc_handle;
- auto rpc = std::make_unique<openscreen::cast::RpcMessage>();
- rpc->set_handle(remote_renderer_handle_);
- rpc->set_proc(openscreen::cast::RpcMessage::RPC_ACQUIRE_RENDERER_DONE);
- rpc->set_integer_value(receiver_rpc_handle);
- rpc_broker_->SendMessageToRemote(std::move(rpc));
+ openscreen::cast::RpcMessage rpc;
+ rpc.set_handle(remote_renderer_handle_);
+ rpc.set_proc(openscreen::cast::RpcMessage::RPC_ACQUIRE_RENDERER_DONE);
+ rpc.set_integer_value(receiver_rpc_handle);
+ rpc_messenger_->SendMessageToRemote(rpc);
// Once RPC_ACQUIRE_RENDERER_DONE is sent, it implies there is no Receiver
// instance that is waiting the remote handle.
diff --git a/chromium/media/remoting/remoting_renderer_factory.h b/chromium/media/remoting/remoting_renderer_factory.h
index 99efd598939..00b735658f4 100644
--- a/chromium/media/remoting/remoting_renderer_factory.h
+++ b/chromium/media/remoting/remoting_renderer_factory.h
@@ -7,8 +7,8 @@
#include "media/base/renderer_factory.h"
#include "media/mojo/mojom/remoting.mojom.h"
-#include "media/remoting/rpc_broker.h"
#include "mojo/public/cpp/bindings/pending_remote.h"
+#include "third_party/openscreen/src/cast/streaming/rpc_messenger.h"
namespace media {
namespace remoting {
@@ -42,18 +42,18 @@ class RemotingRendererFactory : public RendererFactory {
// Indicates whether RPC_ACQUIRE_RENDERER_DONE is sent or not.
bool is_acquire_renderer_done_sent_ = false;
- ReceiverController* receiver_controller_;
+ ReceiverController* const receiver_controller_;
- RpcBroker* rpc_broker_; // Outlives this class.
+ openscreen::cast::RpcMessenger* const rpc_messenger_;
// The RPC handle used by all Receiver instances created by |this|. Sent only
// once to the sender side, through RPC_ACQUIRE_RENDERER_DONE, regardless of
// how many times CreateRenderer() is called."
- const int renderer_handle_ = RpcBroker::kInvalidHandle;
+ const int renderer_handle_ = openscreen::cast::RpcMessenger::kInvalidHandle;
// The RPC handle of the CourierRenderer on the sender side. Will be received
// once, via an RPC_ACQUIRE_RENDERER message"
- int remote_renderer_handle_ = RpcBroker::kInvalidHandle;
+ int remote_renderer_handle_ = openscreen::cast::RpcMessenger::kInvalidHandle;
// Used to set remote handle if receiving RPC_ACQUIRE_RENDERER after
// CreateRenderer() is called.
diff --git a/chromium/media/remoting/renderer_controller.cc b/chromium/media/remoting/renderer_controller.cc
index 0fe8e123cc5..a5ab09a42e3 100644
--- a/chromium/media/remoting/renderer_controller.cc
+++ b/chromium/media/remoting/renderer_controller.cc
@@ -26,7 +26,7 @@ namespace {
// The duration to delay the start of media remoting to ensure all preconditions
// are held stable before switching to media remoting.
-constexpr base::TimeDelta kDelayedStart = base::TimeDelta::FromSeconds(5);
+constexpr base::TimeDelta kDelayedStart = base::Seconds(5);
constexpr int kPixelsPerSec4k = 3840 * 2160 * 30; // 4k 30fps.
constexpr int kPixelsPerSec2k = 1920 * 1080 * 30; // 1080p 30fps.
@@ -108,8 +108,9 @@ RendererController::RendererController(
mojo::PendingReceiver<mojom::RemotingSource> source_receiver,
mojo::PendingRemote<mojom::Remoter> remoter)
#if BUILDFLAG(ENABLE_MEDIA_REMOTING_RPC)
- : rpc_broker_(base::BindRepeating(&RendererController::SendMessageToSink,
- base::Unretained(this))),
+ : rpc_messenger_([this](std::vector<uint8_t> message) {
+ SendMessageToSink(std::move(message));
+ }),
#else
:
#endif
@@ -187,7 +188,7 @@ void RendererController::OnMessageFromSink(
return;
}
- rpc_broker_.ProcessMessageFromRemote(std::move(rpc));
+ rpc_messenger_.ProcessMessageFromRemote(std::move(rpc));
#endif
}
@@ -213,10 +214,11 @@ void RendererController::OnRemotePlaybackDisabled(bool disabled) {
}
#if BUILDFLAG(ENABLE_MEDIA_REMOTING_RPC)
-base::WeakPtr<RpcBroker> RendererController::GetRpcBroker() {
+openscreen::WeakPtr<openscreen::cast::RpcMessenger>
+RendererController::GetRpcMessenger() {
DCHECK(thread_checker_.CalledOnValidThread());
- return rpc_broker_.GetWeakPtr();
+ return rpc_messenger_.GetWeakPtr();
}
#endif
@@ -375,16 +377,16 @@ RemotingCompatibility RendererController::GetVideoCompatibility() const {
bool compatible = false;
switch (pipeline_metadata_.video_decoder_config.codec()) {
- case VideoCodec::kCodecH264:
+ case VideoCodec::kH264:
compatible = HasVideoCapability(RemotingSinkVideoCapability::CODEC_H264);
break;
- case VideoCodec::kCodecVP8:
+ case VideoCodec::kVP8:
compatible = HasVideoCapability(RemotingSinkVideoCapability::CODEC_VP8);
break;
- case VideoCodec::kCodecVP9:
+ case VideoCodec::kVP9:
compatible = HasVideoCapability(RemotingSinkVideoCapability::CODEC_VP9);
break;
- case VideoCodec::kCodecHEVC:
+ case VideoCodec::kHEVC:
compatible = HasVideoCapability(RemotingSinkVideoCapability::CODEC_HEVC);
break;
default:
@@ -405,26 +407,26 @@ RemotingCompatibility RendererController::GetAudioCompatibility() const {
bool compatible = false;
switch (pipeline_metadata_.audio_decoder_config.codec()) {
- case AudioCodec::kCodecAAC:
+ case AudioCodec::kAAC:
compatible = HasAudioCapability(RemotingSinkAudioCapability::CODEC_AAC);
break;
- case AudioCodec::kCodecOpus:
+ case AudioCodec::kOpus:
compatible = HasAudioCapability(RemotingSinkAudioCapability::CODEC_OPUS);
break;
- case AudioCodec::kCodecMP3:
- case AudioCodec::kCodecPCM:
- case AudioCodec::kCodecVorbis:
- case AudioCodec::kCodecFLAC:
- case AudioCodec::kCodecAMR_NB:
- case AudioCodec::kCodecAMR_WB:
- case AudioCodec::kCodecPCM_MULAW:
- case AudioCodec::kCodecGSM_MS:
- case AudioCodec::kCodecPCM_S16BE:
- case AudioCodec::kCodecPCM_S24BE:
- case AudioCodec::kCodecEAC3:
- case AudioCodec::kCodecPCM_ALAW:
- case AudioCodec::kCodecALAC:
- case AudioCodec::kCodecAC3:
+ case AudioCodec::kMP3:
+ case AudioCodec::kPCM:
+ case AudioCodec::kVorbis:
+ case AudioCodec::kFLAC:
+ case AudioCodec::kAMR_NB:
+ case AudioCodec::kAMR_WB:
+ case AudioCodec::kPCM_MULAW:
+ case AudioCodec::kGSM_MS:
+ case AudioCodec::kPCM_S16BE:
+ case AudioCodec::kPCM_S24BE:
+ case AudioCodec::kEAC3:
+ case AudioCodec::kPCM_ALAW:
+ case AudioCodec::kALAC:
+ case AudioCodec::kAC3:
compatible =
HasAudioCapability(RemotingSinkAudioCapability::CODEC_BASELINE_SET);
break;
@@ -632,10 +634,9 @@ bool RendererController::SinkSupportsRemoting() const {
return HasFeatureCapability(RemotingSinkFeature::RENDERING);
}
-void RendererController::SendMessageToSink(
- std::unique_ptr<std::vector<uint8_t>> message) {
+void RendererController::SendMessageToSink(std::vector<uint8_t> message) {
DCHECK(thread_checker_.CalledOnValidThread());
- remoter_->SendMessageToSink(*message);
+ remoter_->SendMessageToSink(message);
}
#if defined(OS_ANDROID)
@@ -648,22 +649,22 @@ bool RendererController::IsAudioRemotePlaybackSupported() const {
return false;
switch (pipeline_metadata_.audio_decoder_config.codec()) {
- case AudioCodec::kCodecAAC:
- case AudioCodec::kCodecOpus:
- case AudioCodec::kCodecMP3:
- case AudioCodec::kCodecPCM:
- case AudioCodec::kCodecVorbis:
- case AudioCodec::kCodecFLAC:
- case AudioCodec::kCodecAMR_NB:
- case AudioCodec::kCodecAMR_WB:
- case AudioCodec::kCodecPCM_MULAW:
- case AudioCodec::kCodecGSM_MS:
- case AudioCodec::kCodecPCM_S16BE:
- case AudioCodec::kCodecPCM_S24BE:
- case AudioCodec::kCodecEAC3:
- case AudioCodec::kCodecPCM_ALAW:
- case AudioCodec::kCodecALAC:
- case AudioCodec::kCodecAC3:
+ case AudioCodec::kAAC:
+ case AudioCodec::kOpus:
+ case AudioCodec::kMP3:
+ case AudioCodec::kPCM:
+ case AudioCodec::kVorbis:
+ case AudioCodec::kFLAC:
+ case AudioCodec::kAMR_NB:
+ case AudioCodec::kAMR_WB:
+ case AudioCodec::kPCM_MULAW:
+ case AudioCodec::kGSM_MS:
+ case AudioCodec::kPCM_S16BE:
+ case AudioCodec::kPCM_S24BE:
+ case AudioCodec::kEAC3:
+ case AudioCodec::kPCM_ALAW:
+ case AudioCodec::kALAC:
+ case AudioCodec::kAC3:
return true;
default:
return false;
@@ -678,10 +679,10 @@ bool RendererController::IsVideoRemotePlaybackSupported() const {
return false;
switch (pipeline_metadata_.video_decoder_config.codec()) {
- case VideoCodec::kCodecH264:
- case VideoCodec::kCodecVP8:
- case VideoCodec::kCodecVP9:
- case VideoCodec::kCodecHEVC:
+ case VideoCodec::kH264:
+ case VideoCodec::kVP8:
+ case VideoCodec::kVP9:
+ case VideoCodec::kHEVC:
return true;
default:
return false;
diff --git a/chromium/media/remoting/renderer_controller.h b/chromium/media/remoting/renderer_controller.h
index 4837c2d8557..93ab714b3d5 100644
--- a/chromium/media/remoting/renderer_controller.h
+++ b/chromium/media/remoting/renderer_controller.h
@@ -24,7 +24,8 @@
#include "mojo/public/cpp/bindings/remote.h"
#if BUILDFLAG(ENABLE_MEDIA_REMOTING_RPC)
-#include "media/remoting/rpc_broker.h" // nogncheck
+#include "third_party/openscreen/src/cast/streaming/rpc_messenger.h" // nogncheck
+#include "third_party/openscreen/src/util/weak_ptr.h" // nogncheck
#endif
namespace base {
@@ -43,6 +44,10 @@ class RendererController final : public mojom::RemotingSource,
RendererController(
mojo::PendingReceiver<mojom::RemotingSource> source_receiver,
mojo::PendingRemote<mojom::Remoter> remoter);
+
+ RendererController(const RendererController&) = delete;
+ RendererController& operator=(const RendererController&) = delete;
+
~RendererController() override;
// mojom::RemotingSource implementations.
@@ -89,7 +94,7 @@ class RendererController final : public mojom::RemotingSource,
DataPipeStartCallback done_callback);
#if BUILDFLAG(ENABLE_MEDIA_REMOTING_RPC)
- base::WeakPtr<RpcBroker> GetRpcBroker();
+ openscreen::WeakPtr<openscreen::cast::RpcMessenger> GetRpcMessenger();
#endif
// Called by CourierRenderer when it encountered a fatal error. This will
@@ -163,8 +168,8 @@ class RendererController final : public mojom::RemotingSource,
bool HasFeatureCapability(mojom::RemotingSinkFeature capability) const;
bool SinkSupportsRemoting() const;
- // Callback from RpcBroker when sending message to remote sink.
- void SendMessageToSink(std::unique_ptr<std::vector<uint8_t>> message);
+ // Callback from RpcMessenger when sending message to remote sink.
+ void SendMessageToSink(std::vector<uint8_t> message);
#if defined(OS_ANDROID)
bool IsAudioRemotePlaybackSupported() const;
@@ -174,7 +179,7 @@ class RendererController final : public mojom::RemotingSource,
#if BUILDFLAG(ENABLE_MEDIA_REMOTING_RPC)
// Handles dispatching of incoming and outgoing RPC messages.
- RpcBroker rpc_broker_;
+ openscreen::cast::RpcMessenger rpc_messenger_;
#endif
const mojo::Receiver<mojom::RemotingSource> receiver_;
@@ -244,8 +249,6 @@ class RendererController final : public mojom::RemotingSource,
const base::TickClock* clock_;
base::WeakPtrFactory<RendererController> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(RendererController);
};
} // namespace remoting
diff --git a/chromium/media/remoting/renderer_controller_unittest.cc b/chromium/media/remoting/renderer_controller_unittest.cc
index 24468394bdd..2d1acb2feeb 100644
--- a/chromium/media/remoting/renderer_controller_unittest.cc
+++ b/chromium/media/remoting/renderer_controller_unittest.cc
@@ -52,8 +52,7 @@ mojom::RemotingSinkMetadata GetDefaultSinkMetadata(bool enable) {
return metadata;
}
-constexpr base::TimeDelta kDelayedStartDuration =
- base::TimeDelta::FromSeconds(5);
+constexpr base::TimeDelta kDelayedStartDuration = base::Seconds(5);
} // namespace
@@ -63,6 +62,9 @@ class RendererControllerTest : public ::testing::Test,
RendererControllerTest()
: controller_(FakeRemoterFactory::CreateController(false)) {}
+ RendererControllerTest(const RendererControllerTest&) = delete;
+ RendererControllerTest& operator=(const RendererControllerTest&) = delete;
+
~RendererControllerTest() override = default;
void TearDown() final { RunUntilIdle(); }
@@ -97,7 +99,7 @@ class RendererControllerTest : public ::testing::Test,
EXPECT_FALSE(is_rendering_remotely_);
EXPECT_TRUE(sink_name_.empty());
controller_->clock_ = &clock_;
- clock_.Advance(base::TimeDelta::FromSeconds(1));
+ clock_.Advance(base::Seconds(1));
controller_->SetClient(this);
RunUntilIdle();
EXPECT_FALSE(is_rendering_remotely_);
@@ -167,13 +169,10 @@ class RendererControllerTest : public ::testing::Test,
std::string sink_name_;
std::unique_ptr<RendererController> controller_;
double duration_in_sec_ = 120; // 2m duration.
-
- private:
- DISALLOW_COPY_AND_ASSIGN(RendererControllerTest);
};
TEST_F(RendererControllerTest, ToggleRendererOnDominantChange) {
- InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kCodecVP8),
+ InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kVP8),
GetDefaultSinkMetadata(true));
DelayedStartEnds();
RunUntilIdle();
@@ -187,7 +186,7 @@ TEST_F(RendererControllerTest, ToggleRendererOnDominantChange) {
TEST_F(RendererControllerTest, ToggleRendererOnDisableChange) {
EXPECT_FALSE(is_rendering_remotely_);
- InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kCodecVP8),
+ InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kVP8),
GetDefaultSinkMetadata(true));
ExpectInDelayedStart();
DelayedStartEnds();
@@ -203,13 +202,13 @@ TEST_F(RendererControllerTest, ToggleRendererOnDisableChange) {
TEST_F(RendererControllerTest, NotStartForShortContent) {
duration_in_sec_ = 30;
- InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kCodecVP8),
+ InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kVP8),
GetDefaultSinkMetadata(true));
ExpectInLocalRendering();
}
TEST_F(RendererControllerTest, ToggleRendererOnSinkCapabilities) {
- InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kCodecVP8),
+ InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kVP8),
GetDefaultSinkMetadata(false));
// An available sink that does not support remote rendering should not cause
// the controller to toggle remote rendering on.
@@ -231,7 +230,7 @@ TEST_F(RendererControllerTest, ToggleRendererOnSinkCapabilities) {
}
TEST_F(RendererControllerTest, WithVP9VideoCodec) {
- InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kCodecVP9),
+ InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kVP9),
GetDefaultSinkMetadata(true));
// An available sink that does not support VP9 video codec should not cause
// the controller to toggle remote rendering on.
@@ -252,7 +251,7 @@ TEST_F(RendererControllerTest, WithVP9VideoCodec) {
}
TEST_F(RendererControllerTest, WithHEVCVideoCodec) {
- InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kCodecHEVC),
+ InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kHEVC),
GetDefaultSinkMetadata(true));
// An available sink that does not support HEVC video codec should not cause
// the controller to toggle remote rendering on.
@@ -276,9 +275,9 @@ TEST_F(RendererControllerTest, WithHEVCVideoCodec) {
TEST_F(RendererControllerTest, WithAACAudioCodec) {
const AudioDecoderConfig audio_config = AudioDecoderConfig(
- AudioCodec::kCodecAAC, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO,
- 44100, EmptyExtraData(), EncryptionScheme::kUnencrypted);
- PipelineMetadata pipeline_metadata = DefaultMetadata(VideoCodec::kCodecVP8);
+ AudioCodec::kAAC, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO, 44100,
+ EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ PipelineMetadata pipeline_metadata = DefaultMetadata(VideoCodec::kVP8);
pipeline_metadata.audio_decoder_config = audio_config;
InitializeControllerAndBecomeDominant(pipeline_metadata,
GetDefaultSinkMetadata(true));
@@ -304,9 +303,9 @@ TEST_F(RendererControllerTest, WithAACAudioCodec) {
TEST_F(RendererControllerTest, WithOpusAudioCodec) {
const AudioDecoderConfig audio_config = AudioDecoderConfig(
- AudioCodec::kCodecOpus, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO,
- 44100, EmptyExtraData(), EncryptionScheme::kUnencrypted);
- PipelineMetadata pipeline_metadata = DefaultMetadata(VideoCodec::kCodecVP8);
+ AudioCodec::kOpus, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO, 44100,
+ EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ PipelineMetadata pipeline_metadata = DefaultMetadata(VideoCodec::kVP8);
pipeline_metadata.audio_decoder_config = audio_config;
InitializeControllerAndBecomeDominant(pipeline_metadata,
GetDefaultSinkMetadata(true));
@@ -330,7 +329,7 @@ TEST_F(RendererControllerTest, WithOpusAudioCodec) {
}
TEST_F(RendererControllerTest, StartFailedWithHighFrameRate) {
- InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kCodecVP8),
+ InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kVP8),
GetDefaultSinkMetadata(true));
RunUntilIdle();
ExpectInDelayedStart();
@@ -343,7 +342,7 @@ TEST_F(RendererControllerTest, StartSuccessWithHighFrameRate) {
mojom::RemotingSinkMetadata sink_metadata = GetDefaultSinkMetadata(true);
sink_metadata.video_capabilities.push_back(
mojom::RemotingSinkVideoCapability::SUPPORT_4K);
- InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kCodecVP8),
+ InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kVP8),
sink_metadata);
RunUntilIdle();
ExpectInDelayedStart();
@@ -354,7 +353,7 @@ TEST_F(RendererControllerTest, StartSuccessWithHighFrameRate) {
TEST_F(RendererControllerTest, PacingTooSlowly) {
mojom::RemotingSinkMetadata sink_metadata = GetDefaultSinkMetadata(true);
- InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kCodecVP8),
+ InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kVP8),
sink_metadata);
RunUntilIdle();
ExpectInDelayedStart();
@@ -376,7 +375,7 @@ TEST_F(RendererControllerTest, PacingTooSlowly) {
TEST_F(RendererControllerTest, StartFailed) {
controller_ = FakeRemoterFactory::CreateController(true);
- InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kCodecVP8),
+ InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kVP8),
GetDefaultSinkMetadata(true));
RunUntilIdle();
ExpectInDelayedStart();
@@ -387,7 +386,7 @@ TEST_F(RendererControllerTest, StartFailed) {
TEST_F(RendererControllerTest, SetClientNullptr) {
controller_ = FakeRemoterFactory::CreateController(true);
- InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kCodecVP8),
+ InitializeControllerAndBecomeDominant(DefaultMetadata(VideoCodec::kVP8),
GetDefaultSinkMetadata(true));
RunUntilIdle();
controller_->SetClient(nullptr);
diff --git a/chromium/media/remoting/rpc_broker.cc b/chromium/media/remoting/rpc_broker.cc
deleted file mode 100644
index ad6646bb478..00000000000
--- a/chromium/media/remoting/rpc_broker.cc
+++ /dev/null
@@ -1,109 +0,0 @@
-// Copyright 2016 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/remoting/rpc_broker.h"
-
-#include <utility>
-
-#include "base/logging.h"
-#include "media/base/bind_to_current_loop.h"
-
-namespace media {
-namespace remoting {
-
-namespace {
-
-std::ostream& operator<<(std::ostream& out,
- const openscreen::cast::RpcMessage& message) {
- out << "handle=" << message.handle() << ", proc=" << message.proc();
- switch (message.rpc_oneof_case()) {
- case openscreen::cast::RpcMessage::kIntegerValue:
- out << ", integer_value=" << message.integer_value();
- break;
- case openscreen::cast::RpcMessage::kInteger64Value:
- out << ", integer64_value=" << message.integer64_value();
- break;
- case openscreen::cast::RpcMessage::kDoubleValue:
- out << ", double_value=" << message.double_value();
- break;
- case openscreen::cast::RpcMessage::kBooleanValue:
- out << ", boolean_value=" << message.boolean_value();
- break;
- case openscreen::cast::RpcMessage::kStringValue:
- out << ", string_value=" << message.string_value();
- break;
- default:
- out << ", rpc_oneof=" << message.rpc_oneof_case();
- break;
- }
- return out;
-}
-
-} // namespace
-
-RpcBroker::RpcBroker(const SendMessageCallback& send_message_cb)
- : next_handle_(kFirstHandle), send_message_cb_(send_message_cb) {}
-
-RpcBroker::~RpcBroker() {
- DCHECK(thread_checker_.CalledOnValidThread());
- receive_callbacks_.clear();
-}
-
-int RpcBroker::GetUniqueHandle() {
- DCHECK(thread_checker_.CalledOnValidThread());
- return next_handle_++;
-}
-
-void RpcBroker::RegisterMessageReceiverCallback(
- int handle,
- const ReceiveMessageCallback& callback) {
- VLOG(2) << __func__ << "handle=" << handle;
- DCHECK(thread_checker_.CalledOnValidThread());
- DCHECK(receive_callbacks_.find(handle) == receive_callbacks_.end());
- receive_callbacks_[handle] = callback;
-}
-
-void RpcBroker::UnregisterMessageReceiverCallback(int handle) {
- VLOG(2) << __func__ << " handle=" << handle;
- DCHECK(thread_checker_.CalledOnValidThread());
- receive_callbacks_.erase(handle);
-}
-
-void RpcBroker::ProcessMessageFromRemote(
- std::unique_ptr<openscreen::cast::RpcMessage> message) {
- DCHECK(message);
- DCHECK(thread_checker_.CalledOnValidThread());
- VLOG(3) << __func__ << ": " << *message;
- const auto entry = receive_callbacks_.find(message->handle());
- if (entry == receive_callbacks_.end()) {
- VLOG(1) << "unregistered handle: " << message->handle();
- return;
- }
- entry->second.Run(std::move(message));
-}
-
-void RpcBroker::SendMessageToRemote(
- std::unique_ptr<openscreen::cast::RpcMessage> message) {
- DCHECK(thread_checker_.CalledOnValidThread());
- DCHECK(message);
- VLOG(3) << __func__ << ": " << *message;
- std::unique_ptr<std::vector<uint8_t>> serialized_message(
- new std::vector<uint8_t>(message->ByteSize()));
- CHECK(message->SerializeToArray(serialized_message->data(),
- serialized_message->size()));
- send_message_cb_.Run(std::move(serialized_message));
-}
-
-base::WeakPtr<RpcBroker> RpcBroker::GetWeakPtr() {
- return weak_factory_.GetWeakPtr();
-}
-
-void RpcBroker::SetMessageCallbackForTesting(
- const SendMessageCallback& send_message_cb) {
- DCHECK(thread_checker_.CalledOnValidThread());
- send_message_cb_ = send_message_cb;
-}
-
-} // namespace remoting
-} // namespace media
diff --git a/chromium/media/remoting/rpc_broker.h b/chromium/media/remoting/rpc_broker.h
deleted file mode 100644
index b0d8227eec2..00000000000
--- a/chromium/media/remoting/rpc_broker.h
+++ /dev/null
@@ -1,109 +0,0 @@
-// Copyright 2016 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_REMOTING_RPC_BROKER_H_
-#define MEDIA_REMOTING_RPC_BROKER_H_
-
-#include <map>
-#include <memory>
-#include <vector>
-
-#include "base/callback.h"
-#include "base/macros.h"
-#include "base/memory/weak_ptr.h"
-#include "base/threading/thread_checker.h"
-#include "third_party/openscreen/src/cast/streaming/remoting.pb.h"
-
-namespace media {
-namespace remoting {
-
-// Utility class to process incoming and outgoing RPC message to desired
-// components on both end points. On sender side, for outgoing message, sender
-// sends RPC message with associated handle value. On receiver side, for
-// component which is interested in this RPC message has to register itself to
-// RpcBroker. Before the RPC transmission starts, both sender and receiver need
-// to negotiate the handle value in the existing RPC communication channel using
-// handle kAcquireHandle.
-//
-// The class doesn't actually send RPC message to remote end point. Actual
-// sender needs to set SendMessageCallback to RpcBroker. The class doesn't
-// actually receive RPC message from the remote end point, either. Actually
-// receiver needs to call ProcessMessageFromRemote() when RPC message is
-// received. RpcBroker will distribute each RPC message to the components based
-// on the handle value in the RPC message.
-//
-// Note this is single-threaded class running on main thread. It provides
-// WeakPtr() for caller to post tasks to the main thread.
-class RpcBroker {
- public:
- using SendMessageCallback =
- base::RepeatingCallback<void(std::unique_ptr<std::vector<uint8_t>>)>;
- explicit RpcBroker(const SendMessageCallback& send_message_cb);
- ~RpcBroker();
-
- // Get unique handle value (larger than 0) for RPC message handles.
- int GetUniqueHandle();
-
- // TODO(chkuo): Change the parameter to accept const ref of RpcMessage.
- using ReceiveMessageCallback = base::RepeatingCallback<void(
- std::unique_ptr<openscreen::cast::RpcMessage>)>;
- // Register a component to receive messages via the given
- // ReceiveMessageCallback. |handle| is a unique handle value provided by a
- // prior call to GetUniqueHandle() and is used to reference the component in
- // the RPC messages. The receiver can then use it to direct an RPC message
- // back to a specific component.
- void RegisterMessageReceiverCallback(int handle,
- const ReceiveMessageCallback& callback);
- // Allows components to unregister in order to stop receiving message.
- void UnregisterMessageReceiverCallback(int handle);
-
- // Allows RpcBroker to distribute incoming RPC message to desired components.
- void ProcessMessageFromRemote(
- std::unique_ptr<openscreen::cast::RpcMessage> message);
- // Sends RPC message to remote end point. The actually sender which sets
- // SendMessageCallback to RpcBrokwer will receive RPC message to do actual
- // data transmission.
- void SendMessageToRemote(
- std::unique_ptr<openscreen::cast::RpcMessage> message);
-
- // Gets weak pointer of RpcBroker. This allows callers to post tasks to
- // RpcBroker on the main thread.
- base::WeakPtr<RpcBroker> GetWeakPtr();
-
- // Overwrites |send_message_cb_|. This is used only for test purposes.
- void SetMessageCallbackForTesting(const SendMessageCallback& send_message_cb);
-
- // Predefined invalid handle value for RPC message.
- static constexpr int kInvalidHandle = -1;
-
- // Predefined handle value for RPC messages related to initialization (before
- // the receiver handle(s) are known).
- static constexpr int kAcquireRendererHandle = 0;
- static constexpr int kAcquireDemuxerHandle = 1;
-
- // The first handle to return from GetUniqueHandle().
- static constexpr int kFirstHandle = 100;
-
- private:
- // Checks that all method calls occur on the same thread.
- base::ThreadChecker thread_checker_;
-
- // Next unique handle to return from GetUniqueHandle().
- int next_handle_;
-
- // Maps to hold handle value associated to MessageReceiver.
- std::map<int, ReceiveMessageCallback> receive_callbacks_;
-
- // Callback that is run to send a serialized message.
- SendMessageCallback send_message_cb_;
-
- base::WeakPtrFactory<RpcBroker> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(RpcBroker);
-};
-
-} // namespace remoting
-} // namespace media
-
-#endif // MEDIA_REMOTING_RPC_BROKER_H_
diff --git a/chromium/media/remoting/rpc_broker_unittest.cc b/chromium/media/remoting/rpc_broker_unittest.cc
deleted file mode 100644
index 8c37f39c7ea..00000000000
--- a/chromium/media/remoting/rpc_broker_unittest.cc
+++ /dev/null
@@ -1,252 +0,0 @@
-// Copyright 2016 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/remoting/rpc_broker.h"
-
-#include <memory>
-#include <vector>
-
-#include "base/bind.h"
-#include "base/callback_helpers.h"
-#include "base/macros.h"
-#include "base/memory/ref_counted.h"
-#include "base/run_loop.h"
-#include "base/test/task_environment.h"
-#include "testing/gmock/include/gmock/gmock.h"
-#include "testing/gtest/include/gtest/gtest.h"
-#include "third_party/openscreen/src/cast/streaming/remoting.pb.h"
-
-using testing::_;
-using testing::Invoke;
-using testing::Return;
-
-namespace media {
-namespace remoting {
-
-namespace {
-
-class FakeMessageSender {
- public:
- FakeMessageSender() : received_rpc_(new openscreen::cast::RpcMessage()) {}
- ~FakeMessageSender() = default;
-
- void OnSendMessageAndQuit(std::unique_ptr<std::vector<uint8_t>> message) {
- EXPECT_TRUE(
- received_rpc_->ParseFromArray(message->data(), message->size()));
- has_sent_message_ = true;
- }
-
- void OnSendMessage(std::unique_ptr<std::vector<uint8_t>> message) {
- ++send_count_;
- }
- base::WeakPtr<FakeMessageSender> GetWeakPtr() {
- return weak_factory_.GetWeakPtr();
- }
- bool has_sent_message() const { return has_sent_message_; }
- const openscreen::cast::RpcMessage* received_rpc() const {
- return received_rpc_.get();
- }
- int send_count() const { return send_count_; }
-
- private:
- std::unique_ptr<openscreen::cast::RpcMessage> received_rpc_;
- bool has_sent_message_{false};
- int send_count_{0};
- base::WeakPtrFactory<FakeMessageSender> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(FakeMessageSender);
-};
-
-class FakeMessageReceiver {
- public:
- FakeMessageReceiver() = default;
- ~FakeMessageReceiver() = default;
-
- // RpcBroker::MessageReceiver implementation.
- void OnReceivedRpc(std::unique_ptr<openscreen::cast::RpcMessage> message) {
- received_rpc_ = std::move(message);
- num_received_messages_++;
- }
-
- void OnSendMessage(std::unique_ptr<std::vector<uint8_t>> message) {}
- base::WeakPtr<FakeMessageReceiver> GetWeakPtr() {
- return weak_factory_.GetWeakPtr();
- }
- int num_received_messages() const { return num_received_messages_; }
- const openscreen::cast::RpcMessage* received_rpc() const {
- return received_rpc_.get();
- }
-
- private:
- std::unique_ptr<openscreen::cast::RpcMessage> received_rpc_;
- int num_received_messages_{0};
- base::WeakPtrFactory<FakeMessageReceiver> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(FakeMessageReceiver);
-};
-
-} // namespace
-
-class RpcBrokerTest : public testing::Test {
- protected:
- void SetUp() override {}
-};
-
-TEST_F(RpcBrokerTest, TestProcessMessageFromRemoteRegistered) {
- std::unique_ptr<FakeMessageReceiver> fake_receiver(new FakeMessageReceiver());
- ASSERT_FALSE(fake_receiver->num_received_messages());
-
- // Creates receiver RpcBroker and registers FakeMessageReceiver.
- std::unique_ptr<RpcBroker> rpc_broker(new RpcBroker(base::BindRepeating(
- &FakeMessageReceiver::OnSendMessage, fake_receiver->GetWeakPtr())));
-
- int handle = rpc_broker->GetUniqueHandle();
- const RpcBroker::ReceiveMessageCallback receive_callback =
- base::BindRepeating(&FakeMessageReceiver::OnReceivedRpc,
- fake_receiver->GetWeakPtr());
- rpc_broker->RegisterMessageReceiverCallback(handle, receive_callback);
-
- std::unique_ptr<openscreen::cast::RpcMessage> rpc(
- new openscreen::cast::RpcMessage());
- rpc->set_handle(handle);
- rpc_broker->ProcessMessageFromRemote(std::move(rpc));
- ASSERT_EQ(fake_receiver->num_received_messages(), 1);
-}
-
-TEST_F(RpcBrokerTest, TestProcessMessageFromRemoteUnregistered) {
- std::unique_ptr<FakeMessageReceiver> fake_receiver(new FakeMessageReceiver());
- ASSERT_FALSE(fake_receiver->num_received_messages());
-
- // Creates receiver RpcBroker and registers FakeMessageReceiver.
- std::unique_ptr<RpcBroker> rpc_broker(new RpcBroker(base::BindRepeating(
- &FakeMessageReceiver::OnSendMessage, fake_receiver->GetWeakPtr())));
-
- int handle = rpc_broker->GetUniqueHandle();
- const RpcBroker::ReceiveMessageCallback receive_callback =
- base::BindRepeating(&FakeMessageReceiver::OnReceivedRpc,
- fake_receiver->GetWeakPtr());
- rpc_broker->RegisterMessageReceiverCallback(handle, receive_callback);
-
- std::unique_ptr<openscreen::cast::RpcMessage> rpc(
- new openscreen::cast::RpcMessage());
- rpc_broker->UnregisterMessageReceiverCallback(handle);
- rpc_broker->ProcessMessageFromRemote(std::move(rpc));
- ASSERT_EQ(fake_receiver->num_received_messages(), 0);
-}
-
-TEST_F(RpcBrokerTest, TestSendMessageToRemote) {
- base::test::SingleThreadTaskEnvironment task_environment;
-
- std::unique_ptr<FakeMessageSender> fake_sender(new FakeMessageSender());
- ASSERT_FALSE(fake_sender->has_sent_message());
-
- // Creates RpcBroker and set message callback.
- std::unique_ptr<RpcBroker> rpc_broker(new RpcBroker(base::BindRepeating(
- &FakeMessageSender::OnSendMessage, fake_sender->GetWeakPtr())));
-
- for (int i = 0; i < 10; ++i) {
- std::unique_ptr<openscreen::cast::RpcMessage> rpc(
- new openscreen::cast::RpcMessage());
- rpc_broker->SendMessageToRemote(std::move(rpc));
- }
- EXPECT_EQ(10, fake_sender->send_count());
-}
-
-TEST_F(RpcBrokerTest, RpcBrokerSendMessageCallback) {
- base::test::SingleThreadTaskEnvironment task_environment;
-
- std::unique_ptr<FakeMessageSender> fake_sender(new FakeMessageSender());
- ASSERT_FALSE(fake_sender->has_sent_message());
-
- // Creates RpcBroker and set message callback.
- std::unique_ptr<RpcBroker> rpc_broker(new RpcBroker(base::BindRepeating(
- &FakeMessageSender::OnSendMessageAndQuit, fake_sender->GetWeakPtr())));
-
- // Sends RPC message.
- std::unique_ptr<openscreen::cast::RpcMessage> sent_rpc(
- new openscreen::cast::RpcMessage());
- sent_rpc->set_handle(2);
- sent_rpc->set_proc(openscreen::cast::RpcMessage::RPC_R_SETVOLUME);
- sent_rpc->set_double_value(2.2);
- rpc_broker->SendMessageToRemote(std::move(sent_rpc));
-
- // Wait for message callback.
- // message_loop->Run();
- base::RunLoop().RunUntilIdle();
-
- // Check if received message is identical to the one sent earlier.
- ASSERT_TRUE(fake_sender->has_sent_message());
- const auto* received_rpc = fake_sender->received_rpc();
- ASSERT_EQ(2, received_rpc->handle());
- ASSERT_EQ(openscreen::cast::RpcMessage::RPC_R_SETVOLUME,
- received_rpc->proc());
- ASSERT_EQ(2.2, received_rpc->double_value());
-}
-
-TEST_F(RpcBrokerTest, RpcBrokerProcessMessageWithRegisteredHandle) {
- std::unique_ptr<FakeMessageReceiver> fake_receiver(new FakeMessageReceiver());
- ASSERT_FALSE(fake_receiver->num_received_messages());
-
- // Creates receiver RpcBroker and registers FakeMessageReceiver.
- std::unique_ptr<RpcBroker> rpc_broker(new RpcBroker(base::BindRepeating(
- &FakeMessageReceiver::OnSendMessage, fake_receiver->GetWeakPtr())));
- int handle = rpc_broker->GetUniqueHandle();
- const RpcBroker::ReceiveMessageCallback receive_callback =
- base::BindRepeating(&FakeMessageReceiver::OnReceivedRpc,
- fake_receiver->GetWeakPtr());
- rpc_broker->RegisterMessageReceiverCallback(handle, receive_callback);
-
- // Generates RPC message with handle value |handle| and send it to recover
- // RpcBroker to process.
- std::unique_ptr<openscreen::cast::RpcMessage> sent_rpc(
- new openscreen::cast::RpcMessage());
- sent_rpc->set_handle(handle);
- sent_rpc->set_proc(openscreen::cast::RpcMessage::RPC_R_SETVOLUME);
- sent_rpc->set_double_value(2.2);
- rpc_broker->ProcessMessageFromRemote(std::move(sent_rpc));
-
- // Checks if received message is identical to the one sent earlier.
- ASSERT_TRUE(fake_receiver->num_received_messages());
- auto* received_rpc = fake_receiver->received_rpc();
- ASSERT_EQ(handle, received_rpc->handle());
- ASSERT_EQ(openscreen::cast::RpcMessage::RPC_R_SETVOLUME,
- received_rpc->proc());
- ASSERT_EQ(2.2, received_rpc->double_value());
-
- // Unregisters FakeMessageReceiver.
- rpc_broker->UnregisterMessageReceiverCallback(handle);
-}
-
-TEST_F(RpcBrokerTest, RpcBrokerProcessMessageWithUnregisteredHandle) {
- std::unique_ptr<FakeMessageReceiver> fake_receiver(new FakeMessageReceiver());
- ASSERT_FALSE(fake_receiver->num_received_messages());
-
- // Creates receiver RpcBroker and registers FakeMessageReceiver.
- std::unique_ptr<RpcBroker> rpc_broker(new RpcBroker(base::BindRepeating(
- &FakeMessageReceiver::OnSendMessage, fake_receiver->GetWeakPtr())));
- int handle = rpc_broker->GetUniqueHandle();
- const RpcBroker::ReceiveMessageCallback receive_callback =
- base::BindRepeating(&FakeMessageReceiver::OnReceivedRpc,
- fake_receiver->GetWeakPtr());
- rpc_broker->RegisterMessageReceiverCallback(handle, receive_callback);
-
- // Generates RPC message with handle value |handle| and send it to recover
- // RpcBroker to process.
- std::unique_ptr<openscreen::cast::RpcMessage> sent_rpc(
- new openscreen::cast::RpcMessage());
- int different_handle = handle + 1;
- sent_rpc->set_handle(different_handle);
- sent_rpc->set_proc(openscreen::cast::RpcMessage::RPC_R_SETVOLUME);
- sent_rpc->set_double_value(2.2);
- rpc_broker->ProcessMessageFromRemote(std::move(sent_rpc));
-
- // Check if received message is identical to the one sent earlier.
- ASSERT_FALSE(fake_receiver->num_received_messages());
-
- // Unregisters FakeMessageReceiver.
- rpc_broker->UnregisterMessageReceiverCallback(handle);
-}
-
-} // namespace remoting
-} // namespace media
diff --git a/chromium/media/remoting/stream_provider.cc b/chromium/media/remoting/stream_provider.cc
index 15c1d134e7b..099d2e3bf5c 100644
--- a/chromium/media/remoting/stream_provider.cc
+++ b/chromium/media/remoting/stream_provider.cc
@@ -20,7 +20,9 @@
#include "media/remoting/proto_enum_utils.h"
#include "media/remoting/proto_utils.h"
#include "media/remoting/receiver_controller.h"
-#include "media/remoting/rpc_broker.h"
+#include "third_party/openscreen/src/cast/streaming/rpc_messenger.h"
+
+using openscreen::cast::RpcMessenger;
namespace media {
namespace remoting {
@@ -32,13 +34,13 @@ constexpr int kNumFramesInEachReadUntil = 10;
// static
void StreamProvider::MediaStream::CreateOnMainThread(
- RpcBroker* rpc_broker,
+ RpcMessenger* rpc_messenger,
Type type,
int32_t handle,
const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner,
base::OnceCallback<void(MediaStream::UniquePtr)> callback) {
MediaStream::UniquePtr stream(
- new MediaStream(rpc_broker, type, handle, media_task_runner),
+ new MediaStream(rpc_messenger, type, handle, media_task_runner),
&DestructionHelper);
std::move(callback).Run(std::move(stream));
}
@@ -49,29 +51,33 @@ void StreamProvider::MediaStream::DestructionHelper(MediaStream* stream) {
}
StreamProvider::MediaStream::MediaStream(
- RpcBroker* rpc_broker,
+ RpcMessenger* rpc_messenger,
Type type,
int remote_handle,
const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner)
: main_task_runner_(base::ThreadTaskRunnerHandle::Get()),
media_task_runner_(media_task_runner),
- rpc_broker_(rpc_broker),
+ rpc_messenger_(rpc_messenger),
type_(type),
remote_handle_(remote_handle),
- rpc_handle_(rpc_broker_->GetUniqueHandle()) {
- DCHECK(remote_handle_ != RpcBroker::kInvalidHandle);
+ rpc_handle_(rpc_messenger_->GetUniqueHandle()) {
+ DCHECK(remote_handle_ != RpcMessenger::kInvalidHandle);
media_weak_this_ = media_weak_factory_.GetWeakPtr();
- const RpcBroker::ReceiveMessageCallback receive_callback = base::BindPostTask(
+ auto receive_callback = base::BindPostTask(
media_task_runner_,
BindRepeating(&MediaStream::OnReceivedRpc, media_weak_this_));
- rpc_broker_->RegisterMessageReceiverCallback(rpc_handle_, receive_callback);
+ rpc_messenger_->RegisterMessageReceiverCallback(
+ rpc_handle_, [receive_callback](
+ std::unique_ptr<openscreen::cast::RpcMessage> message) {
+ receive_callback.Run(std::move(message));
+ });
}
StreamProvider::MediaStream::~MediaStream() {
DCHECK(main_task_runner_->BelongsToCurrentThread());
- rpc_broker_->UnregisterMessageReceiverCallback(rpc_handle_);
+ rpc_messenger_->UnregisterMessageReceiverCallback(rpc_handle_);
}
void StreamProvider::MediaStream::Destroy() {
@@ -92,12 +98,11 @@ void StreamProvider::MediaStream::Destroy() {
void StreamProvider::MediaStream::SendRpcMessageOnMainThread(
std::unique_ptr<openscreen::cast::RpcMessage> message) {
- // |rpc_broker_| is owned by |receiver_controller_| which is a singleton per
- // process, so it's safe to use Unretained() here.
+ // |rpc_messenger_| is owned by |receiver_controller_| which is a singleton
+ // per process, so it's safe to use Unretained() here.
main_task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(&RpcBroker::SendMessageToRemote,
- base::Unretained(rpc_broker_), std::move(message)));
+ FROM_HERE, base::BindOnce(&RpcMessenger::SendMessageToRemote,
+ base::Unretained(rpc_messenger_), *message));
}
void StreamProvider::MediaStream::Initialize(
@@ -420,23 +425,26 @@ StreamProvider::StreamProvider(
: main_task_runner_(base::ThreadTaskRunnerHandle::Get()),
media_task_runner_(media_task_runner),
receiver_controller_(receiver_controller),
- rpc_broker_(receiver_controller_->rpc_broker()) {
+ rpc_messenger_(receiver_controller_->rpc_messenger()) {
DCHECK(receiver_controller_);
- DCHECK(rpc_broker_);
+ DCHECK(rpc_messenger_);
media_weak_this_ = media_weak_factory_.GetWeakPtr();
auto callback = base::BindPostTask(
media_task_runner_,
base::BindRepeating(&StreamProvider::OnReceivedRpc, media_weak_this_));
- rpc_broker_->RegisterMessageReceiverCallback(RpcBroker::kAcquireDemuxerHandle,
- callback);
+ rpc_messenger_->RegisterMessageReceiverCallback(
+ RpcMessenger::kAcquireDemuxerHandle,
+ [callback](std::unique_ptr<openscreen::cast::RpcMessage> message) {
+ callback.Run(std::move(message));
+ });
}
StreamProvider::~StreamProvider() {
DCHECK(main_task_runner_->BelongsToCurrentThread());
- rpc_broker_->UnregisterMessageReceiverCallback(
- RpcBroker::kAcquireDemuxerHandle);
+ rpc_messenger_->UnregisterMessageReceiverCallback(
+ RpcMessenger::kAcquireDemuxerHandle);
}
std::string StreamProvider::GetDisplayName() const {
@@ -538,8 +546,8 @@ void StreamProvider::OnAcquireDemuxer(
message->acquire_demuxer_rpc().audio_demuxer_handle();
int32_t video_demuxer_handle =
message->acquire_demuxer_rpc().video_demuxer_handle();
- has_audio_ = audio_demuxer_handle != RpcBroker::kInvalidHandle;
- has_video_ = video_demuxer_handle != RpcBroker::kInvalidHandle;
+ has_audio_ = audio_demuxer_handle != RpcMessenger::kInvalidHandle;
+ has_video_ = video_demuxer_handle != RpcMessenger::kInvalidHandle;
DCHECK(has_audio_ || has_video_);
@@ -547,18 +555,20 @@ void StreamProvider::OnAcquireDemuxer(
auto callback = BindToCurrentLoop(base::BindOnce(
&StreamProvider::OnAudioStreamCreated, media_weak_this_));
main_task_runner_->PostTask(
- FROM_HERE, base::BindOnce(&MediaStream::CreateOnMainThread, rpc_broker_,
- DemuxerStream::AUDIO, audio_demuxer_handle,
- media_task_runner_, std::move(callback)));
+ FROM_HERE,
+ base::BindOnce(&MediaStream::CreateOnMainThread, rpc_messenger_,
+ DemuxerStream::AUDIO, audio_demuxer_handle,
+ media_task_runner_, std::move(callback)));
}
if (has_video_) {
auto callback = BindToCurrentLoop(base::BindOnce(
&StreamProvider::OnVideoStreamCreated, media_weak_this_));
main_task_runner_->PostTask(
- FROM_HERE, base::BindOnce(&MediaStream::CreateOnMainThread, rpc_broker_,
- DemuxerStream::VIDEO, video_demuxer_handle,
- media_task_runner_, std::move(callback)));
+ FROM_HERE,
+ base::BindOnce(&MediaStream::CreateOnMainThread, rpc_messenger_,
+ DemuxerStream::VIDEO, video_demuxer_handle,
+ media_task_runner_, std::move(callback)));
}
}
diff --git a/chromium/media/remoting/stream_provider.h b/chromium/media/remoting/stream_provider.h
index d7352ab854b..d21ea3ad037 100644
--- a/chromium/media/remoting/stream_provider.h
+++ b/chromium/media/remoting/stream_provider.h
@@ -24,6 +24,12 @@ namespace base {
class SingleThreadTaskRunner;
} // namespace base
+namespace openscreen {
+namespace cast {
+class RpcMessenger;
+}
+} // namespace openscreen
+
namespace media {
class MojoDecoderBufferReader;
@@ -31,7 +37,6 @@ class MojoDecoderBufferReader;
namespace remoting {
class ReceiverController;
-class RpcBroker;
// The media stream provider for Media Remoting receiver.
class StreamProvider final : public Demuxer {
@@ -76,9 +81,9 @@ class StreamProvider final : public Demuxer {
std::unique_ptr<MediaStream, std::function<void(MediaStream*)>>;
// MediaStream should be created on the main thread to be able to get unique
- // handle ID from |rpc_broker_|.
+ // handle ID from |rpc_messenger_|.
static void CreateOnMainThread(
- RpcBroker* rpc_broker,
+ openscreen::cast::RpcMessenger* rpc_messenger,
Type type,
int32_t handle,
const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner,
@@ -89,7 +94,7 @@ class StreamProvider final : public Demuxer {
static void DestructionHelper(MediaStream* stream);
MediaStream(
- RpcBroker* rpc_broker,
+ openscreen::cast::RpcMessenger* rpc_messenger,
Type type,
int32_t remote_handle,
const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner);
@@ -166,7 +171,7 @@ class StreamProvider final : public Demuxer {
scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_;
scoped_refptr<base::SingleThreadTaskRunner> media_task_runner_;
- RpcBroker* const rpc_broker_; // Outlives this class.
+ openscreen::cast::RpcMessenger* const rpc_messenger_;
const Type type_;
const int remote_handle_;
const int rpc_handle_;
@@ -242,8 +247,8 @@ class StreamProvider final : public Demuxer {
scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_;
scoped_refptr<base::SingleThreadTaskRunner> media_task_runner_;
- ReceiverController* const receiver_controller_; // Outlives this class
- RpcBroker* const rpc_broker_; // Outlives this class
+ ReceiverController* const receiver_controller_;
+ openscreen::cast::RpcMessenger* const rpc_messenger_;
MediaStream::UniquePtr audio_stream_;
MediaStream::UniquePtr video_stream_;
bool has_audio_{false};
diff --git a/chromium/media/remoting/stream_provider_unittest.cc b/chromium/media/remoting/stream_provider_unittest.cc
index 4431eff4f48..21aa76b0807 100644
--- a/chromium/media/remoting/stream_provider_unittest.cc
+++ b/chromium/media/remoting/stream_provider_unittest.cc
@@ -16,6 +16,7 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
+using openscreen::cast::RpcMessenger;
using testing::NiceMock;
namespace {
@@ -41,19 +42,21 @@ class StreamProviderTest : public testing::Test {
stream_provider_ = std::make_unique<StreamProvider>(
mock_controller_, base::ThreadTaskRunnerHandle::Get());
- rpc_broker_ = mock_controller_->rpc_broker();
- sender_audio_demuxer_stream_handle_ = rpc_broker_->GetUniqueHandle();
- sender_video_demuxer_stream_handle_ = rpc_broker_->GetUniqueHandle();
- rpc_broker_->RegisterMessageReceiverCallback(
+ rpc_messenger_ = mock_controller_->rpc_messenger();
+ sender_audio_demuxer_stream_handle_ = rpc_messenger_->GetUniqueHandle();
+ sender_video_demuxer_stream_handle_ = rpc_messenger_->GetUniqueHandle();
+ rpc_messenger_->RegisterMessageReceiverCallback(
sender_audio_demuxer_stream_handle_,
- base::BindRepeating(&StreamProviderTest::OnDemuxerStreamReceivedRpc,
- base::Unretained(this),
- DemuxerStream::Type::AUDIO));
- rpc_broker_->RegisterMessageReceiverCallback(
+ [this](std::unique_ptr<openscreen::cast::RpcMessage> message) {
+ OnDemuxerStreamReceivedRpc(DemuxerStream::Type::AUDIO,
+ std::move(message));
+ });
+ rpc_messenger_->RegisterMessageReceiverCallback(
sender_video_demuxer_stream_handle_,
- base::BindRepeating(&StreamProviderTest::OnDemuxerStreamReceivedRpc,
- base::Unretained(this),
- DemuxerStream::Type::VIDEO));
+ [this](std::unique_ptr<openscreen::cast::RpcMessage> message) {
+ OnDemuxerStreamReceivedRpc(DemuxerStream::Type::VIDEO,
+ std::move(message));
+ });
}
void TearDown() override {
@@ -116,7 +119,7 @@ class StreamProviderTest : public testing::Test {
NOTREACHED();
}
- rpc_broker_->SendMessageToRemote(std::move(rpc));
+ rpc_messenger_->SendMessageToRemote(*rpc);
}
void ReadUntil(DemuxerStream::Type type) {
@@ -134,13 +137,13 @@ class StreamProviderTest : public testing::Test {
void SendRpcAcquireDemuxer() {
auto rpc = std::make_unique<openscreen::cast::RpcMessage>();
- rpc->set_handle(RpcBroker::kAcquireDemuxerHandle);
+ rpc->set_handle(RpcMessenger::kAcquireDemuxerHandle);
rpc->set_proc(openscreen::cast::RpcMessage::RPC_ACQUIRE_DEMUXER);
openscreen::cast::AcquireDemuxer* message =
rpc->mutable_acquire_demuxer_rpc();
message->set_audio_demuxer_handle(sender_audio_demuxer_stream_handle_);
message->set_video_demuxer_handle(sender_video_demuxer_stream_handle_);
- rpc_broker_->SendMessageToRemote(std::move(rpc));
+ rpc_messenger_->SendMessageToRemote(*rpc);
}
void OnStreamProviderInitialized(PipelineStatus status) {
@@ -175,16 +178,16 @@ class StreamProviderTest : public testing::Test {
void SendRpcReadUntilCallback(DemuxerStream::Type type) {
// Issues RPC_DS_READUNTIL_CALLBACK RPC message.
- auto rpc = std::make_unique<openscreen::cast::RpcMessage>();
- rpc->set_handle(type == DemuxerStream::Type::AUDIO
- ? receiver_audio_demuxer_stream_handle_
- : receiver_video_demuxer_stream_handle_);
- rpc->set_proc(openscreen::cast::RpcMessage::RPC_DS_READUNTIL_CALLBACK);
- auto* message = rpc->mutable_demuxerstream_readuntilcb_rpc();
+ openscreen::cast::RpcMessage rpc;
+ rpc.set_handle(type == DemuxerStream::Type::AUDIO
+ ? receiver_audio_demuxer_stream_handle_
+ : receiver_video_demuxer_stream_handle_);
+ rpc.set_proc(openscreen::cast::RpcMessage::RPC_DS_READUNTIL_CALLBACK);
+ auto* message = rpc.mutable_demuxerstream_readuntilcb_rpc();
message->set_count(0);
message->set_status(
ToProtoDemuxerStreamStatus(DemuxerStream::Status::kOk).value());
- rpc_broker_->SendMessageToRemote(std::move(rpc));
+ rpc_messenger_->SendMessageToRemote(rpc);
}
void FlushUntil(uint32_t flush_audio_count, uint32_t flush_video_count) {
@@ -230,12 +233,12 @@ class StreamProviderTest : public testing::Test {
scoped_refptr<DecoderBuffer> received_audio_buffer_;
scoped_refptr<DecoderBuffer> received_video_buffer_;
- int sender_audio_demuxer_stream_handle_ = RpcBroker::kInvalidHandle;
- int sender_video_demuxer_stream_handle_ = RpcBroker::kInvalidHandle;
- int receiver_audio_demuxer_stream_handle_ = RpcBroker::kInvalidHandle;
- int receiver_video_demuxer_stream_handle_ = RpcBroker::kInvalidHandle;
+ int sender_audio_demuxer_stream_handle_ = RpcMessenger::kInvalidHandle;
+ int sender_video_demuxer_stream_handle_ = RpcMessenger::kInvalidHandle;
+ int receiver_audio_demuxer_stream_handle_ = RpcMessenger::kInvalidHandle;
+ int receiver_video_demuxer_stream_handle_ = RpcMessenger::kInvalidHandle;
- RpcBroker* rpc_broker_;
+ RpcMessenger* rpc_messenger_;
MockReceiverController* mock_controller_;
MockRemotee* mock_remotee_;
std::unique_ptr<StreamProvider> stream_provider_;
diff --git a/chromium/media/renderers/BUILD.gn b/chromium/media/renderers/BUILD.gn
index a37d1891aec..00083feb2db 100644
--- a/chromium/media/renderers/BUILD.gn
+++ b/chromium/media/renderers/BUILD.gn
@@ -53,9 +53,9 @@ source_set("renderers") {
"//media/filters",
"//media/video",
"//third_party/libyuv",
- "//ui/gfx:geometry_skia",
"//ui/gfx:memory_buffer",
"//ui/gfx/geometry",
+ "//ui/gfx/geometry:geometry_skia",
"//ui/gl",
]
@@ -89,9 +89,7 @@ source_set("renderers") {
]
}
- configs += [
- "//media:subcomponent_config",
- ]
+ configs += [ "//media:subcomponent_config" ]
}
# Note: This is a roll-up only target; do not expand the visibility. DEPS should
diff --git a/chromium/media/renderers/audio_renderer_impl.cc b/chromium/media/renderers/audio_renderer_impl.cc
index 305369256b7..74117712664 100644
--- a/chromium/media/renderers/audio_renderer_impl.cc
+++ b/chromium/media/renderers/audio_renderer_impl.cc
@@ -476,9 +476,9 @@ void AudioRendererImpl::OnDeviceInfoReceived(
if (is_passthrough_) {
AudioParameters::Format format = AudioParameters::AUDIO_FAKE;
- if (codec == kCodecAC3) {
+ if (codec == AudioCodec::kAC3) {
format = AudioParameters::AUDIO_BITSTREAM_AC3;
- } else if (codec == kCodecEAC3) {
+ } else if (codec == AudioCodec::kEAC3) {
format = AudioParameters::AUDIO_BITSTREAM_EAC3;
} else {
NOTREACHED();
@@ -538,11 +538,10 @@ void AudioRendererImpl::OnDeviceInfoReceived(
// mixer will attempt to up-mix stereo source streams to just the left/right
// speaker of the 5.1 setup, nulling out the other channels
// (http://crbug.com/177872).
- ChannelLayout hw_channel_layout =
- hw_params.channel_layout() == CHANNEL_LAYOUT_DISCRETE ||
- try_supported_channel_layouts
- ? CHANNEL_LAYOUT_STEREO
- : hw_params.channel_layout();
+ hw_channel_layout = hw_params.channel_layout() == CHANNEL_LAYOUT_DISCRETE ||
+ try_supported_channel_layouts
+ ? CHANNEL_LAYOUT_STEREO
+ : hw_params.channel_layout();
int hw_channel_count = ChannelLayoutToChannelCount(hw_channel_layout);
// The layout we pass to |audio_parameters_| will be used for the lifetime
diff --git a/chromium/media/renderers/audio_renderer_impl.h b/chromium/media/renderers/audio_renderer_impl.h
index b201648b67d..53dc93630d2 100644
--- a/chromium/media/renderers/audio_renderer_impl.h
+++ b/chromium/media/renderers/audio_renderer_impl.h
@@ -78,6 +78,10 @@ class MEDIA_EXPORT AudioRendererImpl
const CreateAudioDecodersCB& create_audio_decoders_cb,
MediaLog* media_log,
SpeechRecognitionClient* speech_recognition_client = nullptr);
+
+ AudioRendererImpl(const AudioRendererImpl&) = delete;
+ AudioRendererImpl& operator=(const AudioRendererImpl&) = delete;
+
~AudioRendererImpl() override;
// TimeSource implementation.
@@ -383,8 +387,6 @@ class MEDIA_EXPORT AudioRendererImpl
// NOTE: Weak pointers must be invalidated before all other member variables.
base::WeakPtrFactory<AudioRendererImpl> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(AudioRendererImpl);
};
} // namespace media
diff --git a/chromium/media/renderers/audio_renderer_impl_unittest.cc b/chromium/media/renderers/audio_renderer_impl_unittest.cc
index 6a76173d67c..628c9502660 100644
--- a/chromium/media/renderers/audio_renderer_impl_unittest.cc
+++ b/chromium/media/renderers/audio_renderer_impl_unittest.cc
@@ -58,7 +58,7 @@ struct OutputFrames {
} // namespace
// Constants to specify the type of audio data used.
-constexpr AudioCodec kCodec = kCodecVorbis;
+constexpr AudioCodec kCodec = AudioCodec::kVorbis;
constexpr SampleFormat kSampleFormat = kSampleFormatPlanarF32;
constexpr ChannelLayout kChannelLayout = CHANNEL_LAYOUT_STEREO;
constexpr int kChannels = 2;
@@ -136,9 +136,12 @@ class AudioRendererImplTest : public ::testing::Test, public RendererClient {
base::Unretained(this)),
&media_log_, nullptr);
renderer_->tick_clock_ = &tick_clock_;
- tick_clock_.Advance(base::TimeDelta::FromSeconds(1));
+ tick_clock_.Advance(base::Seconds(1));
}
+ AudioRendererImplTest(const AudioRendererImplTest&) = delete;
+ AudioRendererImplTest& operator=(const AudioRendererImplTest&) = delete;
+
~AudioRendererImplTest() override {
SCOPED_TRACE("~AudioRendererImplTest()");
}
@@ -251,9 +254,10 @@ class AudioRendererImplTest : public ::testing::Test, public RendererClient {
hardware_params_.Reset(AudioParameters::AUDIO_BITSTREAM_EAC3,
kChannelLayout, kOutputSamplesPerSecond, 512);
sink_ = base::MakeRefCounted<FakeAudioRendererSink>(hardware_params_);
- AudioDecoderConfig audio_config(
- kCodecAC3, kSampleFormatEac3, kChannelLayout, kInputSamplesPerSecond,
- EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ AudioDecoderConfig audio_config(AudioCodec::kAC3, kSampleFormatEac3,
+ kChannelLayout, kInputSamplesPerSecond,
+ EmptyExtraData(),
+ EncryptionScheme::kUnencrypted);
demuxer_stream_.set_audio_decoder_config(audio_config);
ConfigureDemuxerStream(true);
@@ -594,8 +598,6 @@ class AudioRendererImplTest : public ::testing::Test, public RendererClient {
bool expected_init_result_;
bool enter_pending_decoder_init_;
bool ended_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioRendererImplTest);
};
TEST_F(AudioRendererImplTest, Initialize_Successful) {
@@ -649,7 +651,7 @@ TEST_F(AudioRendererImplTest, SignalConfigChange) {
// Force config change to simulate detected change from decoder stream. Expect
// that RendererClient to be signaled with the new config.
const AudioDecoderConfig kValidAudioConfig(
- kCodecVorbis, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO, 44100,
+ AudioCodec::kVorbis, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO, 44100,
EmptyExtraData(), EncryptionScheme::kUnencrypted);
EXPECT_TRUE(kValidAudioConfig.IsValidConfig());
EXPECT_CALL(*this, OnAudioConfigChange(DecoderConfigEq(kValidAudioConfig)));
@@ -923,7 +925,7 @@ TEST_F(AudioRendererImplTest, ChannelMask_DownmixDiscreteLayout) {
int audio_channels = 9;
AudioDecoderConfig audio_config(
- kCodecOpus, kSampleFormat, CHANNEL_LAYOUT_DISCRETE,
+ AudioCodec::kOpus, kSampleFormat, CHANNEL_LAYOUT_DISCRETE,
kInputSamplesPerSecond, EmptyExtraData(), EncryptionScheme::kUnencrypted);
audio_config.SetChannelsForDiscrete(audio_channels);
demuxer_stream_.set_audio_decoder_config(audio_config);
@@ -976,8 +978,7 @@ TEST_F(AudioRendererImplTest, PendingRead_Flush) {
FlushDuringPendingRead();
// Preroll again to a different timestamp and verify it completed normally.
- const base::TimeDelta seek_timestamp =
- base::TimeDelta::FromMilliseconds(1000);
+ const base::TimeDelta seek_timestamp = base::Milliseconds(1000);
Preroll(seek_timestamp, seek_timestamp, PIPELINE_OK);
}
@@ -1095,8 +1096,8 @@ TEST_F(AudioRendererImplTest, RenderingDelayedForEarlyStartTime) {
// through the desired output buffer; this allows for maximum test coverage.
const double kBuffers = 4.5;
const base::TimeDelta first_timestamp =
- base::TimeDelta::FromSecondsD(hardware_params_.frames_per_buffer() *
- kBuffers / hardware_params_.sample_rate());
+ base::Seconds(hardware_params_.frames_per_buffer() * kBuffers /
+ hardware_params_.sample_rate());
Preroll(base::TimeDelta(), first_timestamp, PIPELINE_OK);
StartTicking();
@@ -1278,8 +1279,7 @@ TEST_F(AudioRendererImplTest, TimeSourceBehavior) {
// Consume some more audio data.
frames_to_consume = frames_buffered();
- tick_clock_.Advance(
- base::TimeDelta::FromSecondsD(1.0 / kOutputSamplesPerSecond));
+ tick_clock_.Advance(base::Seconds(1.0 / kOutputSamplesPerSecond));
EXPECT_TRUE(ConsumeBufferedData(frames_to_consume));
// Time should change now that the audio hardware has called back.
@@ -1314,7 +1314,7 @@ TEST_F(AudioRendererImplTest, TimeSourceBehavior) {
// Advancing once more will exceed the amount of played out frames finally.
const base::TimeDelta kOneSample =
- base::TimeDelta::FromSecondsD(1.0 / kOutputSamplesPerSecond);
+ base::Seconds(1.0 / kOutputSamplesPerSecond);
base::TimeTicks current_time = tick_clock_.NowTicks();
tick_clock_.Advance(kOneSample);
EXPECT_EQ(current_time, CurrentMediaWallClockTime(&is_time_moving));
@@ -1324,7 +1324,7 @@ TEST_F(AudioRendererImplTest, TimeSourceBehavior) {
DeliverRemainingAudio();
// Elapse a lot of time between StopTicking() and the next Render() call.
- const base::TimeDelta kOneSecond = base::TimeDelta::FromSeconds(1);
+ const base::TimeDelta kOneSecond = base::Seconds(1);
tick_clock_.Advance(kOneSecond);
StartTicking();
@@ -1334,8 +1334,8 @@ TEST_F(AudioRendererImplTest, TimeSourceBehavior) {
// Consume some buffered data with a small delay.
uint32_t delay_frames = 500;
- base::TimeDelta delay_time = base::TimeDelta::FromMicroseconds(
- std::round(delay_frames * kOutputMicrosPerFrame));
+ base::TimeDelta delay_time =
+ base::Microseconds(std::round(delay_frames * kOutputMicrosPerFrame));
frames_to_consume.value = frames_buffered().value / 16;
EXPECT_TRUE(ConsumeBufferedData(frames_to_consume, delay_time));
diff --git a/chromium/media/renderers/decrypting_renderer.h b/chromium/media/renderers/decrypting_renderer.h
index d803b7504ff..b1b9a25e6c7 100644
--- a/chromium/media/renderers/decrypting_renderer.h
+++ b/chromium/media/renderers/decrypting_renderer.h
@@ -38,6 +38,10 @@ class MEDIA_EXPORT DecryptingRenderer : public Renderer {
std::unique_ptr<Renderer> renderer,
MediaLog* media_log,
const scoped_refptr<base::SingleThreadTaskRunner> media_task_runner);
+
+ DecryptingRenderer(const DecryptingRenderer&) = delete;
+ DecryptingRenderer& operator=(const DecryptingRenderer&) = delete;
+
~DecryptingRenderer() override;
// Renderer implementation:
@@ -88,8 +92,6 @@ class MEDIA_EXPORT DecryptingRenderer : public Renderer {
std::unique_ptr<DecryptingMediaResource> decrypting_media_resource_;
base::WeakPtrFactory<DecryptingRenderer> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(DecryptingRenderer);
};
} // namespace media
diff --git a/chromium/media/renderers/decrypting_renderer_factory.h b/chromium/media/renderers/decrypting_renderer_factory.h
index 6b0e12cd578..cc8cd013b16 100644
--- a/chromium/media/renderers/decrypting_renderer_factory.h
+++ b/chromium/media/renderers/decrypting_renderer_factory.h
@@ -26,6 +26,11 @@ class MEDIA_EXPORT DecryptingRendererFactory final : public RendererFactory {
DecryptingRendererFactory(
MediaLog* media_log,
std::unique_ptr<media::RendererFactory> renderer_factory);
+
+ DecryptingRendererFactory(const DecryptingRendererFactory&) = delete;
+ DecryptingRendererFactory& operator=(const DecryptingRendererFactory&) =
+ delete;
+
~DecryptingRendererFactory() final;
// RendererFactory implementation.
@@ -41,8 +46,6 @@ class MEDIA_EXPORT DecryptingRendererFactory final : public RendererFactory {
MediaLog* media_log_;
std::unique_ptr<media::RendererFactory> renderer_factory_;
-
- DISALLOW_COPY_AND_ASSIGN(DecryptingRendererFactory);
};
} // namespace media
diff --git a/chromium/media/renderers/default_decoder_factory.h b/chromium/media/renderers/default_decoder_factory.h
index 9283bd9cccc..d38d06a71a8 100644
--- a/chromium/media/renderers/default_decoder_factory.h
+++ b/chromium/media/renderers/default_decoder_factory.h
@@ -19,6 +19,10 @@ class MEDIA_EXPORT DefaultDecoderFactory final : public DecoderFactory {
// additional decoders.
explicit DefaultDecoderFactory(
std::unique_ptr<DecoderFactory> external_decoder_factory);
+
+ DefaultDecoderFactory(const DefaultDecoderFactory&) = delete;
+ DefaultDecoderFactory& operator=(const DefaultDecoderFactory&) = delete;
+
~DefaultDecoderFactory() final;
void CreateAudioDecoders(
@@ -46,8 +50,6 @@ class MEDIA_EXPORT DefaultDecoderFactory final : public DecoderFactory {
std::unique_ptr<DecoderFactory> external_decoder_factory_
GUARDED_BY(shutdown_lock_);
-
- DISALLOW_COPY_AND_ASSIGN(DefaultDecoderFactory);
};
} // namespace media
diff --git a/chromium/media/renderers/default_renderer_factory.h b/chromium/media/renderers/default_renderer_factory.h
index b6486df45d3..4446df21bcb 100644
--- a/chromium/media/renderers/default_renderer_factory.h
+++ b/chromium/media/renderers/default_renderer_factory.h
@@ -51,6 +51,10 @@ class MEDIA_EXPORT DefaultRendererFactory final : public RendererFactory {
const GetGpuFactoriesCB& get_gpu_factories_cb,
std::unique_ptr<SpeechRecognitionClient> speech_recognition_client);
#endif
+
+ DefaultRendererFactory(const DefaultRendererFactory&) = delete;
+ DefaultRendererFactory& operator=(const DefaultRendererFactory&) = delete;
+
~DefaultRendererFactory() final;
std::unique_ptr<Renderer> CreateRenderer(
@@ -82,8 +86,6 @@ class MEDIA_EXPORT DefaultRendererFactory final : public RendererFactory {
#if !defined(OS_ANDROID)
std::unique_ptr<SpeechRecognitionClient> speech_recognition_client_;
#endif
-
- DISALLOW_COPY_AND_ASSIGN(DefaultRendererFactory);
};
} // namespace media
diff --git a/chromium/media/renderers/paint_canvas_video_renderer.cc b/chromium/media/renderers/paint_canvas_video_renderer.cc
index 8b6339f26a9..9a84134c8ee 100644
--- a/chromium/media/renderers/paint_canvas_video_renderer.cc
+++ b/chromium/media/renderers/paint_canvas_video_renderer.cc
@@ -43,7 +43,7 @@
#include "third_party/skia/include/gpu/GrDirectContext.h"
#include "third_party/skia/include/gpu/gl/GrGLTypes.h"
#include "ui/gfx/geometry/rect_f.h"
-#include "ui/gfx/skia_util.h"
+#include "ui/gfx/geometry/skia_conversions.h"
// Skia internal format depends on a platform. On Android it is ABGR, on others
// it's ARGB. YUV_MATRIX(), YUV_ORDER() conditionally remap YUV to YVU for ABGR.
@@ -568,7 +568,8 @@ class VideoImageGenerator : public cc::PaintImageGenerator {
VideoImageGenerator(scoped_refptr<VideoFrame> frame)
: cc::PaintImageGenerator(
SkImageInfo::MakeN32Premul(frame->visible_rect().width(),
- frame->visible_rect().height())),
+ frame->visible_rect().height(),
+ frame->ColorSpace().ToSkColorSpace())),
frame_(std::move(frame)) {
DCHECK(!frame_->HasTextures());
}
@@ -587,6 +588,13 @@ class VideoImageGenerator : public cc::PaintImageGenerator {
// If skia couldn't do the YUV conversion on GPU, we will on CPU.
PaintCanvasVideoRenderer::ConvertVideoFrameToRGBPixels(frame_.get(), pixels,
row_bytes);
+
+ if (!SkColorSpace::Equals(GetSkImageInfo().colorSpace(),
+ info.colorSpace())) {
+ SkPixmap src(GetSkImageInfo(), pixels, row_bytes);
+ if (!src.readPixels(info, pixels, row_bytes))
+ return false;
+ }
return true;
}
@@ -775,11 +783,10 @@ class VideoTextureBacking : public cc::TextureBacking {
};
PaintCanvasVideoRenderer::PaintCanvasVideoRenderer()
- : cache_deleting_timer_(
- FROM_HERE,
- base::TimeDelta::FromSeconds(kTemporaryResourceDeletionDelay),
- this,
- &PaintCanvasVideoRenderer::ResetCache),
+ : cache_deleting_timer_(FROM_HERE,
+ base::Seconds(kTemporaryResourceDeletionDelay),
+ this,
+ &PaintCanvasVideoRenderer::ResetCache),
renderer_stable_id_(cc::PaintImage::GetNextId()) {}
PaintCanvasVideoRenderer::~PaintCanvasVideoRenderer() = default;
@@ -1405,13 +1412,16 @@ bool PaintCanvasVideoRenderer::UploadVideoFrameToGLTexture(
return true;
}
+// static
bool PaintCanvasVideoRenderer::PrepareVideoFrameForWebGL(
viz::RasterContextProvider* raster_context_provider,
gpu::gles2::GLES2Interface* destination_gl,
scoped_refptr<VideoFrame> video_frame,
unsigned int target,
unsigned int texture) {
- DCHECK(thread_checker_.CalledOnValidThread());
+ // TODO(776222): This static function uses no common functionality in
+ // PaintCanvasVideoRenderer, and should be removed from this class.
+
DCHECK(video_frame);
DCHECK(video_frame->HasTextures());
if (video_frame->NumTextures() == 1) {
@@ -1449,9 +1459,15 @@ bool PaintCanvasVideoRenderer::PrepareVideoFrameForWebGL(
destination_gl->GenUnverifiedSyncTokenCHROMIUM(
mailbox_holder.sync_token.GetData());
- if (!PrepareVideoFrame(video_frame, raster_context_provider,
- mailbox_holder)) {
- return false;
+ // Generate a new image.
+ if (video_frame->HasTextures()) {
+ if (video_frame->NumTextures() > 1) {
+ VideoFrameYUVConverter::ConvertYUVVideoFrameNoCaching(
+ video_frame.get(), raster_context_provider, mailbox_holder);
+ } else {
+ // We don't support Android now.
+ return false;
+ }
}
// Wait for mailbox creation on canvas context before consuming it and
@@ -1465,7 +1481,6 @@ bool PaintCanvasVideoRenderer::PrepareVideoFrameForWebGL(
WaitAndReplaceSyncTokenClient client(source_ri);
video_frame->UpdateReleaseSyncToken(&client);
- DCHECK(!CacheBackingWrapsTexture());
return true;
}
@@ -1833,27 +1848,6 @@ bool PaintCanvasVideoRenderer::UpdateLastImage(
return true;
}
-bool PaintCanvasVideoRenderer::PrepareVideoFrame(
- scoped_refptr<VideoFrame> video_frame,
- viz::RasterContextProvider* raster_context_provider,
- const gpu::MailboxHolder& dest_holder) {
- // Generate a new image.
- // Note: Skia will hold onto |video_frame| via |video_generator| only when
- // |video_frame| is software.
- // Holding |video_frame| longer than this call when using GPUVideoDecoder
- // could cause problems since the pool of VideoFrames has a fixed size.
- if (video_frame->HasTextures()) {
- if (video_frame->NumTextures() > 1) {
- VideoFrameYUVConverter::ConvertYUVVideoFrameNoCaching(
- video_frame.get(), raster_context_provider, dest_holder);
- } else {
- // We don't support Android now.
- return false;
- }
- }
- return true;
-}
-
PaintCanvasVideoRenderer::YUVTextureCache::YUVTextureCache() = default;
PaintCanvasVideoRenderer::YUVTextureCache::~YUVTextureCache() {
Reset();
diff --git a/chromium/media/renderers/paint_canvas_video_renderer.h b/chromium/media/renderers/paint_canvas_video_renderer.h
index 81a43500daf..f8e668e9cec 100644
--- a/chromium/media/renderers/paint_canvas_video_renderer.h
+++ b/chromium/media/renderers/paint_canvas_video_renderer.h
@@ -46,6 +46,10 @@ class VideoTextureBacking;
class MEDIA_EXPORT PaintCanvasVideoRenderer {
public:
PaintCanvasVideoRenderer();
+
+ PaintCanvasVideoRenderer(const PaintCanvasVideoRenderer&) = delete;
+ PaintCanvasVideoRenderer& operator=(const PaintCanvasVideoRenderer&) = delete;
+
~PaintCanvasVideoRenderer();
// Paints |video_frame| translated and scaled to |dest_rect| on |canvas|.
@@ -103,7 +107,8 @@ class MEDIA_EXPORT PaintCanvasVideoRenderer {
bool premultiply_alpha,
bool flip_y);
- bool PrepareVideoFrameForWebGL(
+ // TODO(776222): Remove this function from PaintCanvasVideoRenderer.
+ static bool PrepareVideoFrameForWebGL(
viz::RasterContextProvider* raster_context_provider,
gpu::gles2::GLES2Interface* gl,
scoped_refptr<VideoFrame> video_frame,
@@ -277,8 +282,6 @@ class MEDIA_EXPORT PaintCanvasVideoRenderer {
gpu::SyncToken sync_token;
};
YUVTextureCache yuv_cache_;
-
- DISALLOW_COPY_AND_ASSIGN(PaintCanvasVideoRenderer);
};
} // namespace media
diff --git a/chromium/media/renderers/paint_canvas_video_renderer_unittest.cc b/chromium/media/renderers/paint_canvas_video_renderer_unittest.cc
index 727fad0ffdd..ebd8b5cb7aa 100644
--- a/chromium/media/renderers/paint_canvas_video_renderer_unittest.cc
+++ b/chromium/media/renderers/paint_canvas_video_renderer_unittest.cc
@@ -110,6 +110,11 @@ class PaintCanvasVideoRendererTest : public testing::Test {
};
PaintCanvasVideoRendererTest();
+
+ PaintCanvasVideoRendererTest(const PaintCanvasVideoRendererTest&) = delete;
+ PaintCanvasVideoRendererTest& operator=(const PaintCanvasVideoRendererTest&) =
+ delete;
+
~PaintCanvasVideoRendererTest() override;
// Paints to |canvas| using |renderer_| without any frame data.
@@ -150,8 +155,6 @@ class PaintCanvasVideoRendererTest : public testing::Test {
SkBitmap bitmap_;
cc::SkiaPaintCanvas target_canvas_;
base::test::TaskEnvironment task_environment_;
-
- DISALLOW_COPY_AND_ASSIGN(PaintCanvasVideoRendererTest);
};
static SkBitmap AllocBitmap(int width, int height) {
@@ -164,7 +167,7 @@ static SkBitmap AllocBitmap(int width, int height) {
static scoped_refptr<VideoFrame> CreateCroppedFrame() {
scoped_refptr<VideoFrame> cropped_frame = VideoFrame::CreateFrame(
PIXEL_FORMAT_I420, gfx::Size(16, 16), gfx::Rect(6, 6, 8, 6),
- gfx::Size(8, 6), base::TimeDelta::FromMilliseconds(4));
+ gfx::Size(8, 6), base::Milliseconds(4));
// Make sure the cropped video frame's aspect ratio matches the output device.
// Update cropped_frame_'s crop dimensions if this is not the case.
EXPECT_EQ(cropped_frame->visible_rect().width() * kHeight,
@@ -256,9 +259,9 @@ PaintCanvasVideoRendererTest::PaintCanvasVideoRendererTest()
bitmap_(AllocBitmap(kWidth, kHeight)),
target_canvas_(bitmap_) {
// Give each frame a unique timestamp.
- natural_frame_->set_timestamp(base::TimeDelta::FromMilliseconds(1));
- larger_frame_->set_timestamp(base::TimeDelta::FromMilliseconds(2));
- smaller_frame_->set_timestamp(base::TimeDelta::FromMilliseconds(3));
+ natural_frame_->set_timestamp(base::Milliseconds(1));
+ larger_frame_->set_timestamp(base::Milliseconds(2));
+ smaller_frame_->set_timestamp(base::Milliseconds(3));
}
PaintCanvasVideoRendererTest::~PaintCanvasVideoRendererTest() = default;
@@ -682,20 +685,20 @@ TEST_F(PaintCanvasVideoRendererTest, Y16) {
TEST_F(PaintCanvasVideoRendererTest, Yuv420P12OddWidth) {
// Allocate the Y, U, V planes for a 3x3 12-bit YUV 4:2:0 image. Note that
// there are no padding bytes after each row.
- constexpr int kWidth = 3;
- constexpr int kHeight = 3;
- constexpr int kUvWidth = (kWidth + 1) / 2;
- constexpr int kUvHeight = (kHeight + 1) / 2;
+ constexpr int kImgWidth = 3;
+ constexpr int kImgHeight = 3;
+ constexpr int kUvWidth = (kImgWidth + 1) / 2;
+ constexpr int kUvHeight = (kImgHeight + 1) / 2;
std::unique_ptr<uint16_t[]> y_plane =
- std::make_unique<uint16_t[]>(kWidth * kHeight);
+ std::make_unique<uint16_t[]>(kImgWidth * kImgHeight);
std::unique_ptr<uint16_t[]> u_plane =
std::make_unique<uint16_t[]>(kUvWidth * kUvHeight);
std::unique_ptr<uint16_t[]> v_plane =
std::make_unique<uint16_t[]>(kUvWidth * kUvHeight);
// Set all pixels to white.
- for (int i = 0; i < kHeight; ++i) {
- for (int j = 0; j < kWidth; ++j) {
- y_plane[i * kWidth + j] = 4095;
+ for (int i = 0; i < kImgHeight; ++i) {
+ for (int j = 0; j < kImgWidth; ++j) {
+ y_plane[i * kImgWidth + j] = 4095;
}
}
for (int i = 0; i < kUvHeight; ++i) {
@@ -704,25 +707,25 @@ TEST_F(PaintCanvasVideoRendererTest, Yuv420P12OddWidth) {
v_plane[i * kUvWidth + j] = 2048;
}
}
- const int32_t y_stride = sizeof(uint16_t) * kWidth;
+ const int32_t y_stride = sizeof(uint16_t) * kImgWidth;
const int32_t uv_stride = sizeof(uint16_t) * kUvWidth;
uint8_t* const y_data = reinterpret_cast<uint8_t*>(y_plane.get());
uint8_t* const u_data = reinterpret_cast<uint8_t*>(u_plane.get());
uint8_t* const v_data = reinterpret_cast<uint8_t*>(v_plane.get());
- auto size = gfx::Size(kWidth, kHeight);
+ auto size = gfx::Size(kImgWidth, kImgHeight);
scoped_refptr<VideoFrame> frame = VideoFrame::WrapExternalYuvData(
PIXEL_FORMAT_YUV420P12, size, gfx::Rect(size), size, y_stride, uv_stride,
uv_stride, y_data, u_data, v_data, base::TimeDelta());
std::unique_ptr<uint32_t[]> rgba =
- std::make_unique<uint32_t[]>(kWidth * kHeight);
+ std::make_unique<uint32_t[]>(kImgWidth * kImgHeight);
PaintCanvasVideoRenderer::ConvertVideoFrameToRGBPixels(
frame.get(), rgba.get(), frame->visible_rect().width() * 4,
/*premultiply_alpha=*/true);
- for (int i = 0; i < kHeight; ++i) {
- for (int j = 0; j < kWidth; ++j) {
- EXPECT_EQ(rgba[i * kWidth + j], 0xffffffff);
+ for (int i = 0; i < kImgHeight; ++i) {
+ for (int j = 0; j < kImgWidth; ++j) {
+ EXPECT_EQ(rgba[i * kImgWidth + j], 0xffffffff);
}
}
}
@@ -829,8 +832,7 @@ TEST_F(PaintCanvasVideoRendererTest, CorrectFrameSizeToVisibleRect) {
auto video_frame = media::VideoFrame::WrapExternalData(
media::PIXEL_FORMAT_Y16, coded_size, gfx::Rect(visible_size),
- visible_size, &memory[0], fWidth * fHeight * 2,
- base::TimeDelta::FromMilliseconds(4));
+ visible_size, &memory[0], fWidth * fHeight * 2, base::Milliseconds(4));
gfx::RectF visible_rect(visible_size.width(), visible_size.height());
cc::PaintFlags flags;
@@ -944,21 +946,23 @@ class PaintCanvasVideoRendererWithGLTest : public testing::TestWithParam<bool> {
gl::GLSurfaceTestSupport::InitializeOneOff();
enable_pixels_.emplace();
media_context_ = base::MakeRefCounted<viz::TestInProcessContextProvider>(
- /*enable_gpu_rasterization=*/false,
- /*enable_oop_rasterization=*/GetParam(), /*support_locking=*/false);
+ /*enable_gles2_interface=*/false,
+ /*support_locking=*/false,
+ GetParam() ? viz::RasterInterfaceType::OOPR
+ : viz::RasterInterfaceType::GPU);
gpu::ContextResult result = media_context_->BindToCurrentThread();
ASSERT_EQ(result, gpu::ContextResult::kSuccess);
gles2_context_ = base::MakeRefCounted<viz::TestInProcessContextProvider>(
- /*enable_gpu_rasterization=*/false,
- /*enable_oop_rasterization=*/false, /*support_locking=*/false);
+ /*enable_gles2_interface=*/true, /*support_locking=*/false,
+ viz::RasterInterfaceType::None);
result = gles2_context_->BindToCurrentThread();
ASSERT_EQ(result, gpu::ContextResult::kSuccess);
destination_context_ =
base::MakeRefCounted<viz::TestInProcessContextProvider>(
- /*enable_gpu_rasterization=*/false,
- /*enable_oop_rasterization=*/false, /*support_locking=*/false);
+ /*enable_gles2_interface=*/true, /*support_locking=*/false,
+ viz::RasterInterfaceType::None);
result = destination_context_->BindToCurrentThread();
ASSERT_EQ(result, gpu::ContextResult::kSuccess);
cropped_frame_ = CreateCroppedFrame();
diff --git a/chromium/media/renderers/renderer_impl.h b/chromium/media/renderers/renderer_impl.h
index 69abeb247da..e48a4b3f066 100644
--- a/chromium/media/renderers/renderer_impl.h
+++ b/chromium/media/renderers/renderer_impl.h
@@ -51,6 +51,9 @@ class MEDIA_EXPORT RendererImpl final : public Renderer {
std::unique_ptr<AudioRenderer> audio_renderer,
std::unique_ptr<VideoRenderer> video_renderer);
+ RendererImpl(const RendererImpl&) = delete;
+ RendererImpl& operator=(const RendererImpl&) = delete;
+
~RendererImpl() final;
// Renderer implementation.
@@ -256,9 +259,8 @@ class MEDIA_EXPORT RendererImpl final : public Renderer {
// The amount of time to wait before declaring underflow if the video renderer
// runs out of data but the audio renderer still has enough.
Tuneable<base::TimeDelta> video_underflow_threshold_ = {
- "MediaVideoUnderflowThreshold", base::TimeDelta::FromMilliseconds(1000),
- base::TimeDelta::FromMilliseconds(3000),
- base::TimeDelta::FromMilliseconds(8000)};
+ "MediaVideoUnderflowThreshold", base::Milliseconds(1000),
+ base::Milliseconds(3000), base::Milliseconds(8000)};
// Lock used to protect access to the |restarting_audio_| flag and
// |restarting_audio_time_|.
@@ -272,8 +274,6 @@ class MEDIA_EXPORT RendererImpl final : public Renderer {
base::WeakPtr<RendererImpl> weak_this_;
base::WeakPtrFactory<RendererImpl> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(RendererImpl);
};
} // namespace media
diff --git a/chromium/media/renderers/renderer_impl_unittest.cc b/chromium/media/renderers/renderer_impl_unittest.cc
index a7e80c0ca91..7d16cc84b1c 100644
--- a/chromium/media/renderers/renderer_impl_unittest.cc
+++ b/chromium/media/renderers/renderer_impl_unittest.cc
@@ -66,6 +66,10 @@ class RendererImplTest : public ::testing::Test {
class CallbackHelper : public MockRendererClient {
public:
CallbackHelper() = default;
+
+ CallbackHelper(const CallbackHelper&) = delete;
+ CallbackHelper& operator=(const CallbackHelper&) = delete;
+
virtual ~CallbackHelper() = default;
// Completion callbacks.
@@ -75,9 +79,6 @@ class RendererImplTest : public ::testing::Test {
MOCK_METHOD1(OnDurationChange, void(base::TimeDelta duration));
MOCK_METHOD0(OnVideoTrackChangeComplete, void());
MOCK_METHOD0(OnAudioTrackChangeComplete, void());
-
- private:
- DISALLOW_COPY_AND_ASSIGN(CallbackHelper);
};
RendererImplTest()
@@ -97,6 +98,9 @@ class RendererImplTest : public ::testing::Test {
EXPECT_CALL(*demuxer_, GetAllStreams()).WillRepeatedly(Return(streams_));
}
+ RendererImplTest(const RendererImplTest&) = delete;
+ RendererImplTest& operator=(const RendererImplTest&) = delete;
+
~RendererImplTest() override { Destroy(); }
protected:
@@ -244,8 +248,7 @@ class RendererImplTest : public ::testing::Test {
OnBufferingStateChange(BUFFERING_HAVE_ENOUGH,
BUFFERING_CHANGE_REASON_UNKNOWN));
- base::TimeDelta start_time(
- base::TimeDelta::FromMilliseconds(kStartPlayingTimeInMs));
+ base::TimeDelta start_time(base::Milliseconds(kStartPlayingTimeInMs));
EXPECT_CALL(time_source_, SetMediaTime(start_time));
EXPECT_CALL(time_source_, StartTicking());
@@ -295,8 +298,7 @@ class RendererImplTest : public ::testing::Test {
int64_t start_time_ms = GetMediaTimeMs();
const int64_t time_to_advance_ms = 100;
- test_tick_clock_.Advance(
- base::TimeDelta::FromMilliseconds(time_to_advance_ms));
+ test_tick_clock_.Advance(base::Milliseconds(time_to_advance_ms));
if (GetMediaTimeMs() == start_time_ms + time_to_advance_ms * playback_rate)
return true;
@@ -374,9 +376,6 @@ class RendererImplTest : public ::testing::Test {
PipelineStatus initialization_status_;
bool is_encrypted_ = false;
bool is_cdm_set_ = false;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(RendererImplTest);
};
TEST_F(RendererImplTest, Destroy_BeforeInitialize) {
@@ -815,8 +814,7 @@ TEST_F(RendererImplTest, VideoUnderflowWithAudioFlush) {
Play();
// Set a massive threshold such that it shouldn't fire within this test.
- renderer_impl_->set_video_underflow_threshold_for_testing(
- base::TimeDelta::FromSeconds(100));
+ renderer_impl_->set_video_underflow_threshold_for_testing(base::Seconds(100));
// Simulate the cases where audio underflows and then video underflows.
EXPECT_CALL(time_source_, StopTicking());
diff --git a/chromium/media/renderers/shared_image_video_frame_test_utils.cc b/chromium/media/renderers/shared_image_video_frame_test_utils.cc
index 9b36170d6c1..9431cae4e27 100644
--- a/chromium/media/renderers/shared_image_video_frame_test_utils.cc
+++ b/chromium/media/renderers/shared_image_video_frame_test_utils.cc
@@ -122,8 +122,7 @@ scoped_refptr<VideoFrame> CreateSharedImageRGBAFrame(
return CreateSharedImageFrame(
std::move(context_provider), VideoPixelFormat::PIXEL_FORMAT_ABGR,
{mailbox}, sync_token, GL_TEXTURE_2D, coded_size, visible_rect,
- visible_rect.size(), base::TimeDelta::FromSeconds(1),
- std::move(destroyed_callback));
+ visible_rect.size(), base::Seconds(1), std::move(destroyed_callback));
}
scoped_refptr<VideoFrame> CreateSharedImageI420Frame(
@@ -186,7 +185,7 @@ scoped_refptr<VideoFrame> CreateSharedImageI420Frame(
return CreateSharedImageFrame(
std::move(context_provider), VideoPixelFormat::PIXEL_FORMAT_I420,
{y_mailbox, u_mailbox, v_mailbox}, sync_token, GL_TEXTURE_2D, coded_size,
- visible_rect, visible_rect.size(), base::TimeDelta::FromSeconds(1),
+ visible_rect, visible_rect.size(), base::Seconds(1),
std::move(destroyed_callback));
}
@@ -247,7 +246,7 @@ scoped_refptr<VideoFrame> CreateSharedImageNV12Frame(
return CreateSharedImageFrame(
std::move(context_provider), VideoPixelFormat::PIXEL_FORMAT_NV12,
{y_mailbox, uv_mailbox}, sync_token, GL_TEXTURE_2D, coded_size,
- visible_rect, visible_rect.size(), base::TimeDelta::FromSeconds(1),
+ visible_rect, visible_rect.size(), base::Seconds(1),
std::move(destroyed_callback));
}
diff --git a/chromium/media/renderers/video_frame_rgba_to_yuva_converter.cc b/chromium/media/renderers/video_frame_rgba_to_yuva_converter.cc
index 0d94aa8500e..58e588933f4 100644
--- a/chromium/media/renderers/video_frame_rgba_to_yuva_converter.cc
+++ b/chromium/media/renderers/video_frame_rgba_to_yuva_converter.cc
@@ -5,9 +5,11 @@
#include "media/renderers/video_frame_rgba_to_yuva_converter.h"
#include "base/logging.h"
+#include "base/memory/ptr_util.h"
#include "components/viz/common/gpu/raster_context_provider.h"
#include "components/viz/common/resources/resource_format_utils.h"
#include "gpu/command_buffer/client/raster_interface.h"
+#include "gpu/command_buffer/client/shared_image_interface.h"
#include "gpu/command_buffer/common/mailbox_holder.h"
#include "media/base/simple_sync_token_client.h"
#include "media/base/wait_and_replace_sync_token_client.h"
@@ -16,6 +18,7 @@
#include "skia/ext/rgba_to_yuva.h"
#include "third_party/skia/include/core/SkImage.h"
#include "third_party/skia/include/gpu/GrDirectContext.h"
+#include "ui/gfx/gpu_memory_buffer.h"
namespace {
@@ -71,7 +74,7 @@ class ScopedAcceleratedSkImage {
return nullptr;
}
- return absl::WrapUnique<ScopedAcceleratedSkImage>(
+ return base::WrapUnique<ScopedAcceleratedSkImage>(
new ScopedAcceleratedSkImage(provider, texture_id,
std::move(sk_image)));
}
@@ -119,50 +122,86 @@ bool CopyRGBATextureToVideoFrame(viz::RasterContextProvider* provider,
auto* ri = provider->RasterInterface();
DCHECK(ri);
- // Create an accelerated SkImage for the source.
- auto scoped_sk_image = ScopedAcceleratedSkImage::Create(
- provider, src_format, src_size, src_color_space, src_surface_origin,
- src_mailbox_holder);
- if (!scoped_sk_image) {
- DLOG(ERROR)
- << "Failed to create accelerated SkImage for RGBA to YUVA conversion.";
- return false;
- }
+ if (!provider->GrContext()) {
+ SkYUVAInfo yuva_info =
+ VideoFrameYUVMailboxesHolder::VideoFrameGetSkYUVAInfo(dst_video_frame);
+ gpu::Mailbox yuva_mailboxes[SkYUVAInfo::kMaxPlanes];
+ ri->WaitSyncTokenCHROMIUM(src_mailbox_holder.sync_token.GetConstData());
+ for (int plane = 0; plane < yuva_info.numPlanes(); ++plane) {
+ gpu::MailboxHolder dst_mailbox_holder =
+ dst_video_frame->mailbox_holder(plane);
+ ri->WaitSyncTokenCHROMIUM(dst_mailbox_holder.sync_token.GetConstData());
+ yuva_mailboxes[plane] = dst_mailbox_holder.mailbox;
+ }
+ ri->ConvertRGBAToYUVAMailboxes(
+ yuva_info.yuvColorSpace(), yuva_info.planeConfig(),
+ yuva_info.subsampling(), yuva_mailboxes, src_mailbox_holder.mailbox);
+ } else {
+ // Create an accelerated SkImage for the source.
+ auto scoped_sk_image = ScopedAcceleratedSkImage::Create(
+ provider, src_format, src_size, src_color_space, src_surface_origin,
+ src_mailbox_holder);
+ if (!scoped_sk_image) {
+ DLOG(ERROR) << "Failed to create accelerated SkImage for RGBA to YUVA "
+ "conversion.";
+ return false;
+ }
- // Create SkSurfaces for the destination planes.
- sk_sp<SkSurface> sk_surfaces[SkYUVAInfo::kMaxPlanes];
- VideoFrameYUVMailboxesHolder holder;
- if (!holder.VideoFrameToPlaneSkSurfaces(dst_video_frame, provider,
- sk_surfaces)) {
- DLOG(ERROR) << "Failed to create SkSurfaces for VideoFrame.";
- return false;
- }
+ // Create SkSurfaces for the destination planes.
+ sk_sp<SkSurface> sk_surfaces[SkYUVAInfo::kMaxPlanes];
+ SkSurface* sk_surface_ptrs[SkYUVAInfo::kMaxPlanes] = {nullptr};
+ VideoFrameYUVMailboxesHolder holder;
+ if (!holder.VideoFrameToPlaneSkSurfaces(dst_video_frame, provider,
+ sk_surfaces)) {
+ DLOG(ERROR) << "Failed to create SkSurfaces for VideoFrame.";
+ return false;
+ }
- // Make GrContext wait for `dst_video_frame`. Waiting on the mailbox tokens
- // here ensures that all writes are completed in cases where the underlying
- // GpuMemoryBuffer and SharedImage resources have been reused.
- ri->Flush();
- WaitAndReplaceSyncTokenClient client(ri);
- for (size_t plane = 0; plane < 2; ++plane)
- dst_video_frame->UpdateMailboxHolderSyncToken(plane, &client);
-
- // Do the blit.
- skia::BlitRGBAToYUVA(
- scoped_sk_image->sk_image(),
- SkRect::MakeWH(src_size.width(), src_size.height()), sk_surfaces,
- holder.yuva_info(),
- SkRect::MakeWH(holder.yuva_info().width(), holder.yuva_info().height()));
- provider->GrContext()->flushAndSubmit(false);
+ // Make GrContext wait for `dst_video_frame`. Waiting on the mailbox tokens
+ // here ensures that all writes are completed in cases where the underlying
+ // GpuMemoryBuffer and SharedImage resources have been reused.
+ ri->Flush();
+ WaitAndReplaceSyncTokenClient client(ri);
+ for (int plane = 0; plane < holder.yuva_info().numPlanes(); ++plane) {
+ sk_surface_ptrs[plane] = sk_surfaces[plane].get();
+ dst_video_frame->UpdateMailboxHolderSyncToken(plane, &client);
+ }
+
+ // Do the blit.
+ skia::BlitRGBAToYUVA(scoped_sk_image->sk_image().get(), sk_surface_ptrs,
+ holder.yuva_info());
+ provider->GrContext()->flushAndSubmit(false);
+ }
ri->Flush();
- // Set `completion_sync_token` to mark the completion of the copy.
- ri->GenSyncTokenCHROMIUM(completion_sync_token.GetData());
+ const size_t num_planes = dst_video_frame->layout().num_planes();
+
+ // For shared memory GMBs on Windows we needed to explicitly request a copy
+ // from the shared image GPU texture to the GMB. Set `completion_sync_token`
+ // to mark the completion of the copy.
+ if (dst_video_frame->HasGpuMemoryBuffer() &&
+ dst_video_frame->GetGpuMemoryBuffer()->GetType() ==
+ gfx::SHARED_MEMORY_BUFFER) {
+ auto* sii = provider->SharedImageInterface();
+
+ gpu::SyncToken blit_done_sync_token;
+ ri->GenUnverifiedSyncTokenCHROMIUM(blit_done_sync_token.GetData());
+
+ for (size_t plane = 0; plane < num_planes; ++plane) {
+ const auto& mailbox = dst_video_frame->mailbox_holder(plane).mailbox;
+ sii->CopyToGpuMemoryBuffer(blit_done_sync_token, mailbox);
+ }
+
+ completion_sync_token = sii->GenVerifiedSyncToken();
+ } else {
+ ri->GenSyncTokenCHROMIUM(completion_sync_token.GetData());
+ }
// Make access to the `dst_video_frame` wait on copy completion. We also
// update the ReleaseSyncToken here since it's used when the underlying
// GpuMemoryBuffer and SharedImage resources are returned to the pool.
SimpleSyncTokenClient simple_client(completion_sync_token);
- for (size_t plane = 0; plane < 2; ++plane)
+ for (size_t plane = 0; plane < num_planes; ++plane)
dst_video_frame->UpdateMailboxHolderSyncToken(plane, &simple_client);
dst_video_frame->UpdateReleaseSyncToken(&simple_client);
return true;
diff --git a/chromium/media/renderers/video_overlay_factory.h b/chromium/media/renderers/video_overlay_factory.h
index e11ee06557a..14eaf7e7e2b 100644
--- a/chromium/media/renderers/video_overlay_factory.h
+++ b/chromium/media/renderers/video_overlay_factory.h
@@ -23,6 +23,10 @@ class VideoFrame;
class MEDIA_EXPORT VideoOverlayFactory {
public:
VideoOverlayFactory();
+
+ VideoOverlayFactory(const VideoOverlayFactory&) = delete;
+ VideoOverlayFactory& operator=(const VideoOverlayFactory&) = delete;
+
~VideoOverlayFactory();
scoped_refptr<::media::VideoFrame> CreateFrame(const gfx::Size& size);
@@ -33,8 +37,6 @@ class MEDIA_EXPORT VideoOverlayFactory {
private:
// |overlay_plane_id_| identifies the instances of VideoOverlayFactory.
const base::UnguessableToken overlay_plane_id_;
-
- DISALLOW_COPY_AND_ASSIGN(VideoOverlayFactory);
};
} // namespace media
diff --git a/chromium/media/renderers/video_renderer_impl.cc b/chromium/media/renderers/video_renderer_impl.cc
index ad1e16b2870..7186f6c2f4e 100644
--- a/chromium/media/renderers/video_renderer_impl.cc
+++ b/chromium/media/renderers/video_renderer_impl.cc
@@ -657,8 +657,7 @@ void VideoRendererImpl::FrameReady(VideoDecoderStream::ReadResult result) {
// Update average frame duration.
base::TimeDelta frame_duration = algorithm_->average_frame_duration();
- if (frame_duration != kNoTimestamp &&
- frame_duration != base::TimeDelta::FromSeconds(0)) {
+ if (frame_duration != kNoTimestamp && frame_duration != base::Seconds(0)) {
fps_estimator_.AddSample(frame_duration);
} else {
fps_estimator_.Reset();
diff --git a/chromium/media/renderers/video_renderer_impl.h b/chromium/media/renderers/video_renderer_impl.h
index 00c4d08743f..c275ddd183e 100644
--- a/chromium/media/renderers/video_renderer_impl.h
+++ b/chromium/media/renderers/video_renderer_impl.h
@@ -61,6 +61,10 @@ class MEDIA_EXPORT VideoRendererImpl
bool drop_frames,
MediaLog* media_log,
std::unique_ptr<GpuMemoryBufferVideoFramePool> gmb_pool);
+
+ VideoRendererImpl(const VideoRendererImpl&) = delete;
+ VideoRendererImpl& operator=(const VideoRendererImpl&) = delete;
+
~VideoRendererImpl() override;
// VideoRenderer implementation.
@@ -357,8 +361,6 @@ class MEDIA_EXPORT VideoRendererImpl
// want to discard video frames that might be received after the stream has
// been reset.
base::WeakPtrFactory<VideoRendererImpl> cancel_on_flush_weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(VideoRendererImpl);
};
} // namespace media
diff --git a/chromium/media/renderers/video_renderer_impl_unittest.cc b/chromium/media/renderers/video_renderer_impl_unittest.cc
index 9cebbf8bd98..9afe17711c6 100644
--- a/chromium/media/renderers/video_renderer_impl_unittest.cc
+++ b/chromium/media/renderers/video_renderer_impl_unittest.cc
@@ -86,7 +86,7 @@ class VideoRendererImplTest : public testing::Test {
simulate_decode_delay_(false),
expect_init_success_(true) {
null_video_sink_ = std::make_unique<NullVideoSink>(
- false, base::TimeDelta::FromSecondsD(1.0 / 60),
+ false, base::Seconds(1.0 / 60),
base::BindRepeating(&MockCB::FrameReceived,
base::Unretained(&mock_cb_)),
base::ThreadTaskRunnerHandle::Get());
@@ -111,6 +111,9 @@ class VideoRendererImplTest : public testing::Test {
.WillByDefault(Invoke(this, &VideoRendererImplTest::OnDemuxerRead));
}
+ VideoRendererImplTest(const VideoRendererImplTest&) = delete;
+ VideoRendererImplTest& operator=(const VideoRendererImplTest&) = delete;
+
~VideoRendererImplTest() override = default;
void Initialize() {
@@ -152,8 +155,7 @@ class VideoRendererImplTest : public testing::Test {
void StartPlayingFrom(int milliseconds) {
SCOPED_TRACE(base::StringPrintf("StartPlayingFrom(%d)", milliseconds));
- const base::TimeDelta media_time =
- base::TimeDelta::FromMilliseconds(milliseconds);
+ const base::TimeDelta media_time = base::Milliseconds(milliseconds);
time_source_.SetMediaTime(media_time);
renderer_->StartPlayingFrom(media_time);
base::RunLoop().RunUntilIdle();
@@ -233,7 +235,7 @@ class VideoRendererImplTest : public testing::Test {
gfx::Size natural_size = TestVideoConfig::NormalCodedSize();
scoped_refptr<VideoFrame> frame = VideoFrame::CreateFrame(
PIXEL_FORMAT_I420, natural_size, gfx::Rect(natural_size),
- natural_size, base::TimeDelta::FromMilliseconds(timestamp_in_ms));
+ natural_size, base::Milliseconds(timestamp_in_ms));
QueueFrame(DecodeStatus::OK, frame);
continue;
}
@@ -324,14 +326,14 @@ class VideoRendererImplTest : public testing::Test {
EXPECT_TRUE(
task_environment_.GetMainThreadTaskRunner()->BelongsToCurrentThread());
base::AutoLock l(lock_);
- tick_clock_.Advance(base::TimeDelta::FromMilliseconds(time_ms));
+ tick_clock_.Advance(base::Milliseconds(time_ms));
}
void AdvanceTimeInMs(int time_ms) {
EXPECT_TRUE(
task_environment_.GetMainThreadTaskRunner()->BelongsToCurrentThread());
base::AutoLock l(lock_);
- time_ += base::TimeDelta::FromMilliseconds(time_ms);
+ time_ += base::Milliseconds(time_ms);
time_source_.StopTicking();
time_source_.SetMediaTime(time_);
time_source_.StartTicking();
@@ -419,8 +421,6 @@ class VideoRendererImplTest : public testing::Test {
base::circular_deque<std::pair<DecodeStatus, scoped_refptr<VideoFrame>>>
decode_results_;
-
- DISALLOW_COPY_AND_ASSIGN(VideoRendererImplTest);
};
TEST_F(VideoRendererImplTest, DoNothing) {
@@ -559,7 +559,7 @@ static void VideoRendererImplTest_FlushDoneCB(VideoRendererImplTest* test,
VideoRenderer* renderer,
base::OnceClosure success_cb) {
test->QueueFrames("0 10 20 30");
- renderer->StartPlayingFrom(base::TimeDelta::FromSeconds(0));
+ renderer->StartPlayingFrom(base::Seconds(0));
std::move(success_cb).Run();
}
@@ -1018,19 +1018,19 @@ TEST_F(VideoRendererImplTest, NaturalSizeChange) {
QueueFrame(DecodeStatus::OK,
VideoFrame::CreateFrame(PIXEL_FORMAT_I420, initial_size,
gfx::Rect(initial_size), initial_size,
- base::TimeDelta::FromMilliseconds(0)));
+ base::Milliseconds(0)));
QueueFrame(DecodeStatus::OK,
VideoFrame::CreateFrame(PIXEL_FORMAT_I420, larger_size,
gfx::Rect(larger_size), larger_size,
- base::TimeDelta::FromMilliseconds(10)));
+ base::Milliseconds(10)));
QueueFrame(DecodeStatus::OK,
VideoFrame::CreateFrame(PIXEL_FORMAT_I420, larger_size,
gfx::Rect(larger_size), larger_size,
- base::TimeDelta::FromMilliseconds(20)));
+ base::Milliseconds(20)));
QueueFrame(DecodeStatus::OK,
VideoFrame::CreateFrame(PIXEL_FORMAT_I420, initial_size,
gfx::Rect(initial_size), initial_size,
- base::TimeDelta::FromMilliseconds(30)));
+ base::Milliseconds(30)));
EXPECT_CALL(mock_cb_, OnBufferingStateChange(BUFFERING_HAVE_ENOUGH, _));
EXPECT_CALL(mock_cb_, OnStatisticsUpdate(_)).Times(AnyNumber());
@@ -1084,19 +1084,19 @@ TEST_F(VideoRendererImplTest, OpacityChange) {
QueueFrame(DecodeStatus::OK,
VideoFrame::CreateFrame(non_opaque_format, frame_size,
gfx::Rect(frame_size), frame_size,
- base::TimeDelta::FromMilliseconds(0)));
+ base::Milliseconds(0)));
QueueFrame(DecodeStatus::OK,
VideoFrame::CreateFrame(non_opaque_format, frame_size,
gfx::Rect(frame_size), frame_size,
- base::TimeDelta::FromMilliseconds(10)));
- QueueFrame(DecodeStatus::OK,
- VideoFrame::CreateFrame(opaque_format, frame_size,
- gfx::Rect(frame_size), frame_size,
- base::TimeDelta::FromMilliseconds(20)));
- QueueFrame(DecodeStatus::OK,
- VideoFrame::CreateFrame(opaque_format, frame_size,
- gfx::Rect(frame_size), frame_size,
- base::TimeDelta::FromMilliseconds(30)));
+ base::Milliseconds(10)));
+ QueueFrame(
+ DecodeStatus::OK,
+ VideoFrame::CreateFrame(opaque_format, frame_size, gfx::Rect(frame_size),
+ frame_size, base::Milliseconds(20)));
+ QueueFrame(
+ DecodeStatus::OK,
+ VideoFrame::CreateFrame(opaque_format, frame_size, gfx::Rect(frame_size),
+ frame_size, base::Milliseconds(30)));
EXPECT_CALL(mock_cb_, OnBufferingStateChange(BUFFERING_HAVE_ENOUGH, _));
EXPECT_CALL(mock_cb_, OnStatisticsUpdate(_)).Times(AnyNumber());
@@ -1166,7 +1166,7 @@ TEST_F(VideoRendererImplTest, VideoFrameRateChange) {
AdvanceTimeInMs(20);
AdvanceWallclockTimeInMs(20);
// This runs the sink callbacks to consume frames.
- task_environment_.FastForwardBy(base::TimeDelta::FromMilliseconds(20));
+ task_environment_.FastForwardBy(base::Milliseconds(20));
base::RunLoop().RunUntilIdle();
}
@@ -1498,10 +1498,10 @@ TEST_F(VideoRendererLatencyHintTest, HaveEnough_HighLatencyHint) {
// We must provide a |buffer_duration_| for the latencyHint to take effect
// immediately. The VideoRendererAlgorithm will eventually provide a PTS-delta
// duration, but not until after we've started rendering.
- buffer_duration_ = base::TimeDelta::FromMilliseconds(30);
+ buffer_duration_ = base::Milliseconds(30);
// Set latencyHint to a large value.
- renderer_->SetLatencyHint(base::TimeDelta::FromMilliseconds(400));
+ renderer_->SetLatencyHint(base::Milliseconds(400));
// NOTE: other tests will SetLatencyHint after Initialize(). Either way should
// work. Initializing later is especially interesting for "high" hints because
@@ -1559,10 +1559,10 @@ TEST_F(VideoRendererLatencyHintTest,
// We must provide a |buffer_duration_| for the latencyHint to take effect
// immediately. The VideoRendererAlgorithm will eventually provide a PTS-delta
// duration, but not until after we've started rendering.
- buffer_duration_ = base::TimeDelta::FromMilliseconds(30);
+ buffer_duration_ = base::Milliseconds(30);
// Set latency hint to a medium value.
- renderer_->SetLatencyHint(base::TimeDelta::FromMilliseconds(200));
+ renderer_->SetLatencyHint(base::Milliseconds(200));
// Stall the demuxer after 7 frames.
simulate_demuxer_stall_after_n_reads_ = 7;
@@ -1618,10 +1618,10 @@ TEST_F(VideoRendererLatencyHintTest, LatencyHintOverridesLowDelay) {
// We must provide a |buffer_duration_| for the latencyHint to take effect
// immediately. The VideoRendererAlgorithm will eventually provide a PTS-delta
// duration, but not until after we've started rendering.
- buffer_duration_ = base::TimeDelta::FromMilliseconds(30);
+ buffer_duration_ = base::Milliseconds(30);
// Set latency hint to a medium value.
- renderer_->SetLatencyHint(base::TimeDelta::FromMilliseconds(200));
+ renderer_->SetLatencyHint(base::Milliseconds(200));
// Initial frames should trigger various callbacks.
EXPECT_CALL(mock_cb_, FrameReceived(HasTimestampMatcher(0)));
@@ -1678,11 +1678,11 @@ TEST_F(VideoRendererLatencyHintTest,
// We must provide a |buffer_duration_| for the latencyHint to take effect
// immediately. The VideoRendererAlgorithm will eventually provide a PTS-delta
// duration, but not until after we've started rendering.
- buffer_duration_ = base::TimeDelta::FromMilliseconds(30);
+ buffer_duration_ = base::Milliseconds(30);
// Set latency hint to a medium value. At a spacing of 30ms this would set
// the HAVE_ENOUGH threshold to 4 frames.
- renderer_->SetLatencyHint(base::TimeDelta::FromMilliseconds(200));
+ renderer_->SetLatencyHint(base::Milliseconds(200));
// Initial frames should trigger various callbacks.
EXPECT_CALL(mock_cb_, FrameReceived(HasTimestampMatcher(0)));
diff --git a/chromium/media/renderers/video_resource_updater.cc b/chromium/media/renderers/video_resource_updater.cc
index 80cb1a4575d..8270054bbdf 100644
--- a/chromium/media/renderers/video_resource_updater.cc
+++ b/chromium/media/renderers/video_resource_updater.cc
@@ -50,7 +50,7 @@
#include "third_party/libyuv/include/libyuv.h"
#include "third_party/skia/include/core/SkCanvas.h"
#include "ui/gfx/geometry/size_conversions.h"
-#include "ui/gfx/skia_util.h"
+#include "ui/gfx/geometry/skia_conversions.h"
#include "ui/gl/gl_enums.h"
#include "ui/gl/trace_util.h"
@@ -184,6 +184,10 @@ class SyncTokenClientImpl : public VideoFrame::SyncTokenClient {
// Only one interface should be used.
DCHECK((gl_ && !sii_) || (!gl_ && sii_));
}
+
+ SyncTokenClientImpl(const SyncTokenClientImpl&) = delete;
+ SyncTokenClientImpl& operator=(const SyncTokenClientImpl&) = delete;
+
~SyncTokenClientImpl() override = default;
void GenerateSyncToken(gpu::SyncToken* sync_token) override {
@@ -220,7 +224,6 @@ class SyncTokenClientImpl : public VideoFrame::SyncTokenClient {
gpu::gles2::GLES2Interface* gl_;
gpu::SharedImageInterface* sii_;
gpu::SyncToken sync_token_;
- DISALLOW_COPY_AND_ASSIGN(SyncTokenClientImpl);
};
// Sync tokens passed downstream to the compositor can be unverified.
@@ -265,6 +268,10 @@ class VideoResourceUpdater::PlaneResource {
resource_size_(resource_size),
resource_format_(resource_format),
is_software_(is_software) {}
+
+ PlaneResource(const PlaneResource&) = delete;
+ PlaneResource& operator=(const PlaneResource&) = delete;
+
virtual ~PlaneResource() = default;
// Casts |this| to SoftwarePlaneResource for software compositing.
@@ -316,8 +323,6 @@ class VideoResourceUpdater::PlaneResource {
size_t plane_index_ = 0u;
// Indicates if the above two members have been set or not.
bool has_unique_frame_id_and_plane_index_ = false;
-
- DISALLOW_COPY_AND_ASSIGN(PlaneResource);
};
class VideoResourceUpdater::SoftwarePlaneResource
@@ -342,6 +347,10 @@ class VideoResourceUpdater::SoftwarePlaneResource
shared_bitmap_reporter_->DidAllocateSharedBitmap(std::move(shm.region),
shared_bitmap_id_);
}
+
+ SoftwarePlaneResource(const SoftwarePlaneResource&) = delete;
+ SoftwarePlaneResource& operator=(const SoftwarePlaneResource&) = delete;
+
~SoftwarePlaneResource() override {
shared_bitmap_reporter_->DidDeleteSharedBitmap(shared_bitmap_id_);
}
@@ -360,8 +369,6 @@ class VideoResourceUpdater::SoftwarePlaneResource
viz::SharedBitmapReporter* const shared_bitmap_reporter_;
const viz::SharedBitmapId shared_bitmap_id_;
base::WritableSharedMemoryMapping shared_mapping_;
-
- DISALLOW_COPY_AND_ASSIGN(SoftwarePlaneResource);
};
class VideoResourceUpdater::HardwarePlaneResource
diff --git a/chromium/media/renderers/video_resource_updater.h b/chromium/media/renderers/video_resource_updater.h
index d2ad861f59a..37d9297d1e7 100644
--- a/chromium/media/renderers/video_resource_updater.h
+++ b/chromium/media/renderers/video_resource_updater.h
@@ -101,6 +101,9 @@ class MEDIA_EXPORT VideoResourceUpdater
bool use_r16_texture,
int max_resource_size);
+ VideoResourceUpdater(const VideoResourceUpdater&) = delete;
+ VideoResourceUpdater& operator=(const VideoResourceUpdater&) = delete;
+
~VideoResourceUpdater() override;
// For each CompositorFrame the following sequence is expected:
@@ -244,8 +247,6 @@ class MEDIA_EXPORT VideoResourceUpdater
std::vector<std::unique_ptr<PlaneResource>> all_resources_;
base::WeakPtrFactory<VideoResourceUpdater> weak_ptr_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(VideoResourceUpdater);
};
} // namespace media
diff --git a/chromium/media/renderers/video_resource_updater_unittest.cc b/chromium/media/renderers/video_resource_updater_unittest.cc
index 32dee8d7ec7..6e8d7c78313 100644
--- a/chromium/media/renderers/video_resource_updater_unittest.cc
+++ b/chromium/media/renderers/video_resource_updater_unittest.cc
@@ -383,7 +383,7 @@ TEST_F(VideoResourceUpdaterTest, WonkySoftwareFrameSoftwareCompositor) {
TEST_F(VideoResourceUpdaterTest, ReuseResource) {
std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForHardware();
scoped_refptr<VideoFrame> video_frame = CreateTestYUVVideoFrame();
- video_frame->set_timestamp(base::TimeDelta::FromSeconds(1234));
+ video_frame->set_timestamp(base::Seconds(1234));
// Allocate the resources for a YUV video frame.
gl_->ResetUploadCount();
@@ -413,7 +413,7 @@ TEST_F(VideoResourceUpdaterTest, ReuseResource) {
TEST_F(VideoResourceUpdaterTest, ReuseResourceNoDelete) {
std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForHardware();
scoped_refptr<VideoFrame> video_frame = CreateTestYUVVideoFrame();
- video_frame->set_timestamp(base::TimeDelta::FromSeconds(1234));
+ video_frame->set_timestamp(base::Seconds(1234));
// Allocate the resources for a YUV video frame.
gl_->ResetUploadCount();
@@ -458,7 +458,7 @@ TEST_F(VideoResourceUpdaterTest, SoftwareFrameRGBSoftwareCompositor) {
TEST_F(VideoResourceUpdaterTest, ReuseResourceSoftwareCompositor) {
std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForSoftware();
scoped_refptr<VideoFrame> video_frame = CreateTestYUVVideoFrame();
- video_frame->set_timestamp(base::TimeDelta::FromSeconds(1234));
+ video_frame->set_timestamp(base::Seconds(1234));
// Allocate the resources for a software video frame.
VideoFrameExternalResources resources =
@@ -487,7 +487,7 @@ TEST_F(VideoResourceUpdaterTest, ReuseResourceSoftwareCompositor) {
TEST_F(VideoResourceUpdaterTest, ReuseResourceNoDeleteSoftwareCompositor) {
std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForSoftware();
scoped_refptr<VideoFrame> video_frame = CreateTestYUVVideoFrame();
- video_frame->set_timestamp(base::TimeDelta::FromSeconds(1234));
+ video_frame->set_timestamp(base::Seconds(1234));
// Allocate the resources for a software video frame.
VideoFrameExternalResources resources =
diff --git a/chromium/media/renderers/win/media_engine_notify_impl.cc b/chromium/media/renderers/win/media_engine_notify_impl.cc
index da71066f543..18220d53015 100644
--- a/chromium/media/renderers/win/media_engine_notify_impl.cc
+++ b/chromium/media/renderers/win/media_engine_notify_impl.cc
@@ -132,7 +132,7 @@ HRESULT MediaEngineNotifyImpl::EventNotify(DWORD event_code,
MF_MEDIA_ENGINE_ERR error = static_cast<MF_MEDIA_ENGINE_ERR>(param1);
HRESULT hr = param2;
LOG(ERROR) << __func__ << ": error=" << error << ", hr=" << PrintHr(hr);
- error_cb_.Run(MediaEngineErrorToPipelineStatus(error, hr));
+ error_cb_.Run(MediaEngineErrorToPipelineStatus(error, hr), hr);
break;
}
case MF_MEDIA_ENGINE_EVENT_ENDED:
diff --git a/chromium/media/renderers/win/media_engine_notify_impl.h b/chromium/media/renderers/win/media_engine_notify_impl.h
index cf3cafc6045..fd03064be03 100644
--- a/chromium/media/renderers/win/media_engine_notify_impl.h
+++ b/chromium/media/renderers/win/media_engine_notify_impl.h
@@ -15,13 +15,6 @@
namespace media {
-using ErrorCB = base::RepeatingCallback<void(PipelineStatus)>;
-using EndedCB = base::RepeatingClosure;
-using BufferingStateChangedCB =
- base::RepeatingCallback<void(BufferingState, BufferingStateChangeReason)>;
-using VideoNaturalSizeChangedCB = base::RepeatingClosure;
-using TimeUpdateCB = base::RepeatingClosure;
-
// Implements IMFMediaEngineNotify required by IMFMediaEngine
// (https://docs.microsoft.com/en-us/windows/win32/api/mfmediaengine/nn-mfmediaengine-imfmediaengine).
//
@@ -34,6 +27,13 @@ class MediaEngineNotifyImpl
MediaEngineNotifyImpl();
~MediaEngineNotifyImpl() override;
+ using ErrorCB = base::RepeatingCallback<void(PipelineStatus, HRESULT)>;
+ using EndedCB = base::RepeatingClosure;
+ using BufferingStateChangedCB =
+ base::RepeatingCallback<void(BufferingState, BufferingStateChangeReason)>;
+ using VideoNaturalSizeChangedCB = base::RepeatingClosure;
+ using TimeUpdateCB = base::RepeatingClosure;
+
HRESULT RuntimeClassInitialize(
ErrorCB error_cb,
EndedCB ended_cb,
diff --git a/chromium/media/renderers/win/media_foundation_audio_stream.cc b/chromium/media/renderers/win/media_foundation_audio_stream.cc
index 5a211ae8b41..c3fca8cf272 100644
--- a/chromium/media/renderers/win/media_foundation_audio_stream.cc
+++ b/chromium/media/renderers/win/media_foundation_audio_stream.cc
@@ -37,37 +37,37 @@ GUID AudioCodecToMediaFoundationSubtype(AudioCodec codec) {
DVLOG(1) << __func__ << ": codec=" << codec;
switch (codec) {
- case kCodecAAC:
+ case AudioCodec::kAAC:
return MFAudioFormat_AAC;
- case kCodecMP3:
+ case AudioCodec::kMP3:
return MFAudioFormat_MP3;
- case kCodecPCM:
+ case AudioCodec::kPCM:
return MFAudioFormat_PCM;
- case kCodecVorbis:
+ case AudioCodec::kVorbis:
return MFAudioFormat_Vorbis;
- case kCodecFLAC:
+ case AudioCodec::kFLAC:
return MFAudioFormat_FLAC;
- case kCodecAMR_NB:
+ case AudioCodec::kAMR_NB:
return MFAudioFormat_AMR_NB;
- case kCodecAMR_WB:
+ case AudioCodec::kAMR_WB:
return MFAudioFormat_AMR_WB;
- case kCodecPCM_MULAW:
+ case AudioCodec::kPCM_MULAW:
return MediaFoundationSubTypeFromWaveFormat(WAVE_FORMAT_MULAW);
- case kCodecGSM_MS:
+ case AudioCodec::kGSM_MS:
return MediaFoundationSubTypeFromWaveFormat(WAVE_FORMAT_GSM610);
- case kCodecPCM_S16BE:
+ case AudioCodec::kPCM_S16BE:
return MFAudioFormat_PCM;
- case kCodecPCM_S24BE:
+ case AudioCodec::kPCM_S24BE:
return MFAudioFormat_PCM;
- case kCodecOpus:
+ case AudioCodec::kOpus:
return MFAudioFormat_Opus;
- case kCodecEAC3:
+ case AudioCodec::kEAC3:
return MFAudioFormat_Dolby_DDPlus;
- case kCodecPCM_ALAW:
+ case AudioCodec::kPCM_ALAW:
return MediaFoundationSubTypeFromWaveFormat(WAVE_FORMAT_ALAW);
- case kCodecALAC:
+ case AudioCodec::kALAC:
return MFAudioFormat_ALAC;
- case kCodecAC3:
+ case AudioCodec::kAC3:
return MFAudioFormat_Dolby_AC3;
default:
return GUID_NULL;
@@ -76,9 +76,9 @@ GUID AudioCodecToMediaFoundationSubtype(AudioCodec codec) {
bool IsUncompressedAudio(AudioCodec codec) {
switch (codec) {
- case kCodecPCM:
- case kCodecPCM_S16BE:
- case kCodecPCM_S24BE:
+ case AudioCodec::kPCM:
+ case AudioCodec::kPCM_S16BE:
+ case AudioCodec::kPCM_S24BE:
return true;
default:
return false;
@@ -155,8 +155,11 @@ HRESULT GetAacAudioType(const AudioDecoderConfig decoder_config,
ComPtr<IMFMediaType> media_type;
RETURN_IF_FAILED(GetDefaultAudioType(decoder_config, &media_type));
- size_t wave_format_size =
- sizeof(HEAACWAVEINFO) + decoder_config.extra_data().size();
+ // On Windows `extra_data` is not populated for AAC in `decoder_config`. Use
+ // `aac_extra_data` instead. See crbug.com/1245123.
+ const auto& extra_data = decoder_config.aac_extra_data();
+
+ size_t wave_format_size = sizeof(HEAACWAVEINFO) + extra_data.size();
std::vector<uint8_t> wave_format_buffer(wave_format_size);
HEAACWAVEINFO* aac_wave_format =
reinterpret_cast<HEAACWAVEINFO*>(wave_format_buffer.data());
@@ -178,10 +181,9 @@ HRESULT GetAacAudioType(const AudioDecoderConfig decoder_config,
aac_wave_format->wReserved1 = 0;
aac_wave_format->dwReserved2 = 0;
- if (decoder_config.extra_data().size() > 0) {
+ if (!extra_data.empty()) {
memcpy(reinterpret_cast<uint8_t*>(aac_wave_format) + sizeof(HEAACWAVEINFO),
- decoder_config.extra_data().data(),
- decoder_config.extra_data().size());
+ extra_data.data(), extra_data.size());
}
RETURN_IF_FAILED(MFInitMediaTypeFromWaveFormatEx(
@@ -199,6 +201,7 @@ HRESULT MediaFoundationAudioStream::Create(
int stream_id,
IMFMediaSource* parent_source,
DemuxerStream* demuxer_stream,
+ std::unique_ptr<MediaLog> media_log,
MediaFoundationStreamWrapper** stream_out) {
DVLOG(1) << __func__ << ": stream_id=" << stream_id;
@@ -206,14 +209,16 @@ HRESULT MediaFoundationAudioStream::Create(
AudioCodec codec = demuxer_stream->audio_decoder_config().codec();
switch (codec) {
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- case kCodecAAC:
+ case AudioCodec::kAAC:
RETURN_IF_FAILED(MakeAndInitialize<MediaFoundationAACAudioStream>(
- &audio_stream, stream_id, parent_source, demuxer_stream));
+ &audio_stream, stream_id, parent_source, demuxer_stream,
+ std::move(media_log)));
break;
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
default:
RETURN_IF_FAILED(MakeAndInitialize<MediaFoundationAudioStream>(
- &audio_stream, stream_id, parent_source, demuxer_stream));
+ &audio_stream, stream_id, parent_source, demuxer_stream,
+ std::move(media_log)));
break;
}
*stream_out =
diff --git a/chromium/media/renderers/win/media_foundation_audio_stream.h b/chromium/media/renderers/win/media_foundation_audio_stream.h
index ebac24d07fb..1d281a3f862 100644
--- a/chromium/media/renderers/win/media_foundation_audio_stream.h
+++ b/chromium/media/renderers/win/media_foundation_audio_stream.h
@@ -10,6 +10,7 @@
#include "media/renderers/win/media_foundation_stream_wrapper.h"
+#include "media/base/media_log.h"
#include "media/media_buildflags.h"
namespace media {
@@ -20,6 +21,7 @@ class MediaFoundationAudioStream : public MediaFoundationStreamWrapper {
static HRESULT Create(int stream_id,
IMFMediaSource* parent_source,
DemuxerStream* demuxer_stream,
+ std::unique_ptr<MediaLog> media_log,
MediaFoundationStreamWrapper** stream_out);
bool IsEncrypted() const override;
HRESULT GetMediaType(IMFMediaType** media_type_out) override;
diff --git a/chromium/media/renderers/win/media_foundation_protection_manager.cc b/chromium/media/renderers/win/media_foundation_protection_manager.cc
index e19382e57e6..d9c7b087745 100644
--- a/chromium/media/renderers/win/media_foundation_protection_manager.cc
+++ b/chromium/media/renderers/win/media_foundation_protection_manager.cc
@@ -203,7 +203,7 @@ void MediaFoundationProtectionManager::OnBeginEnableContent() {
// If EnableContent takes too long, report waiting for key status. Choose a
// timeout of 500ms to be on the safe side, e.g. on slower machines.
- const auto kWaitingForKeyTimeOut = base::TimeDelta::FromMilliseconds(500);
+ const auto kWaitingForKeyTimeOut = base::Milliseconds(500);
waiting_for_key_time_out_cb_.Reset(
base::BindOnce(&MediaFoundationProtectionManager::OnWaitingForKeyTimeOut,
diff --git a/chromium/media/renderers/win/media_foundation_renderer.cc b/chromium/media/renderers/win/media_foundation_renderer.cc
index 196d161dfa7..5a7c89ac2df 100644
--- a/chromium/media/renderers/win/media_foundation_renderer.cc
+++ b/chromium/media/renderers/win/media_foundation_renderer.cc
@@ -11,6 +11,7 @@
#include "base/callback_helpers.h"
#include "base/guid.h"
+#include "base/metrics/histogram_functions.h"
#include "base/numerics/safe_conversions.h"
#include "base/process/process_handle.h"
#include "base/strings/string_number_conversions.h"
@@ -22,6 +23,7 @@
#include "base/win/wrapped_window_proc.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/cdm_context.h"
+#include "media/base/media_log.h"
#include "media/base/timestamp_constants.h"
#include "media/base/win/mf_helpers.h"
#include "media/base/win/mf_initializer.h"
@@ -72,8 +74,10 @@ bool MediaFoundationRenderer::IsSupported() {
MediaFoundationRenderer::MediaFoundationRenderer(
scoped_refptr<base::SequencedTaskRunner> task_runner,
+ std::unique_ptr<MediaLog> media_log,
bool force_dcomp_mode_for_testing)
: task_runner_(std::move(task_runner)),
+ media_log_(std::move(media_log)),
force_dcomp_mode_for_testing_(force_dcomp_mode_for_testing) {
DVLOG_FUNC(1);
}
@@ -191,7 +195,7 @@ HRESULT MediaFoundationRenderer::CreateMediaEngine(
}
RETURN_IF_FAILED(MakeAndInitialize<MediaFoundationSourceWrapper>(
- &mf_source_, media_resource, task_runner_));
+ &mf_source_, media_resource, media_log_.get(), task_runner_));
if (force_dcomp_mode_for_testing_)
ignore_result(SetDCompModeInternal());
@@ -268,6 +272,7 @@ HRESULT MediaFoundationRenderer::InitializeDXGIDeviceManager() {
D3D11CreateDevice(nullptr, D3D_DRIVER_TYPE_HARDWARE, 0, creation_flags,
feature_levels, base::size(feature_levels),
D3D11_SDK_VERSION, &d3d11_device, nullptr, nullptr));
+ RETURN_IF_FAILED(media::SetDebugName(d3d11_device.Get(), "Media_MFRenderer"));
ComPtr<ID3D10Multithread> multithreaded_device;
RETURN_IF_FAILED(d3d11_device.As(&multithreaded_device));
@@ -452,27 +457,32 @@ void MediaFoundationRenderer::SetVideoStreamEnabled(bool enabled) {
}
}
-void MediaFoundationRenderer::SetOutputParams(const gfx::Rect& output_rect) {
+void MediaFoundationRenderer::SetOutputRect(const gfx::Rect& output_rect,
+ SetOutputRectCB callback) {
DVLOG_FUNC(2);
- output_rect_ = output_rect;
-
if (virtual_video_window_ &&
!::SetWindowPos(virtual_video_window_, HWND_BOTTOM, output_rect.x(),
output_rect.y(), output_rect.width(),
output_rect.height(), SWP_NOACTIVATE)) {
DLOG(ERROR) << "Failed to SetWindowPos: "
<< PrintHr(HRESULT_FROM_WIN32(GetLastError()));
+ std::move(callback).Run(false);
+ return;
+ }
+
+ if (FAILED(UpdateVideoStream(output_rect))) {
+ std::move(callback).Run(false);
return;
}
- ignore_result(UpdateVideoStream(output_rect));
+ std::move(callback).Run(true);
}
HRESULT MediaFoundationRenderer::UpdateVideoStream(const gfx::Rect& rect) {
ComPtr<IMFMediaEngineEx> mf_media_engine_ex;
RETURN_IF_FAILED(mf_media_engine_.As(&mf_media_engine_ex));
- RECT dest_rect = rect.ToRECT();
+ RECT dest_rect = {0, 0, rect.width(), rect.height()};
RETURN_IF_FAILED(mf_media_engine_ex->UpdateVideoStream(
/*pSrc=*/nullptr, &dest_rect, /*pBorderClr=*/nullptr));
return S_OK;
@@ -518,7 +528,8 @@ void MediaFoundationRenderer::SendStatistics() {
PipelineStatistics new_stats = {};
HRESULT hr = PopulateStatistics(new_stats);
if (FAILED(hr)) {
- DVLOG_FUNC(3) << "Unable to populate pipeline stats: " << PrintHr(hr);
+ LIMITED_MEDIA_LOG(INFO, media_log_, populate_statistics_failure_count_, 3)
+ << "MediaFoundationRenderer failed to populate stats: " + PrintHr(hr);
return;
}
@@ -530,8 +541,7 @@ void MediaFoundationRenderer::SendStatistics() {
void MediaFoundationRenderer::StartSendingStatistics() {
DVLOG_FUNC(2);
- const auto kPipelineStatsPollingPeriod =
- base::TimeDelta::FromMilliseconds(500);
+ const auto kPipelineStatsPollingPeriod = base::Milliseconds(500);
statistics_timer_.Start(FROM_HERE, kPipelineStatsPollingPeriod, this,
&MediaFoundationRenderer::SendStatistics);
}
@@ -557,18 +567,25 @@ base::TimeDelta MediaFoundationRenderer::GetMediaTime() {
double current_time = mf_media_engine_->GetCurrentTime();
// Restore macro definition.
#define GetCurrentTime() GetTickCount()
- auto media_time = base::TimeDelta::FromSecondsD(current_time);
+ auto media_time = base::Seconds(current_time);
DVLOG_FUNC(3) << "media_time=" << media_time;
return media_time;
}
-void MediaFoundationRenderer::OnPlaybackError(PipelineStatus status) {
- DVLOG_FUNC(1) << "status=" << status;
+void MediaFoundationRenderer::OnPlaybackError(PipelineStatus status,
+ HRESULT hr) {
+ DVLOG_FUNC(1) << "status=" << status << ", hr=" << hr;
DCHECK(task_runner_->RunsTasksInCurrentSequence());
+ base::UmaHistogramSparse("Media.MediaFoundationRenderer.PlaybackError", hr);
+
if (status == PIPELINE_ERROR_HARDWARE_CONTEXT_RESET && cdm_proxy_)
cdm_proxy_->OnHardwareContextReset();
+ MEDIA_LOG(ERROR, media_log_)
+ << "MediaFoundationRenderer OnPlaybackError: " << status << ", "
+ << PrintHr(hr);
+
renderer_client_->OnError(status);
StopSendingStatistics();
}
@@ -628,9 +645,14 @@ void MediaFoundationRenderer::OnVideoNaturalSizeChange() {
base::checked_cast<int>(native_height)};
}
- // If `output_rect_` is not available yet, use `native_video_size_` for now.
- if (output_rect_.IsEmpty())
- ignore_result(UpdateVideoStream(gfx::Rect(native_video_size_)));
+ // TODO(frankli): Let test code to call `UpdateVideoStream()`.
+ if (force_dcomp_mode_for_testing_) {
+ const gfx::Rect test_rect(/*x=*/0, /*y=*/0, /*width=*/640, /*height=*/320);
+ // This invokes IMFMediaEngineEx::UpdateVideoStream() for video frames to
+ // be presented. Otherwise, the Media Foundation video renderer will not
+ // request video samples from our source.
+ ignore_result(UpdateVideoStream(test_rect));
+ }
renderer_client_->OnVideoNaturalSizeChange(native_video_size_);
}
diff --git a/chromium/media/renderers/win/media_foundation_renderer.h b/chromium/media/renderers/win/media_foundation_renderer.h
index 95d4d0499ed..5b34e30dc51 100644
--- a/chromium/media/renderers/win/media_foundation_renderer.h
+++ b/chromium/media/renderers/win/media_foundation_renderer.h
@@ -11,7 +11,6 @@
#include <wrl.h>
#include "base/callback.h"
-#include "base/macros.h"
#include "base/memory/scoped_refptr.h"
#include "base/memory/weak_ptr.h"
#include "base/sequenced_task_runner.h"
@@ -32,6 +31,8 @@
namespace media {
+class MediaLog;
+
// MediaFoundationRenderer bridges the Renderer and Windows MFMediaEngine
// interfaces.
class MEDIA_EXPORT MediaFoundationRenderer
@@ -42,13 +43,12 @@ class MEDIA_EXPORT MediaFoundationRenderer
static bool IsSupported();
MediaFoundationRenderer(scoped_refptr<base::SequencedTaskRunner> task_runner,
+ std::unique_ptr<MediaLog> media_log,
bool force_dcomp_mode_for_testing = false);
-
+ MediaFoundationRenderer(const MediaFoundationRenderer&) = delete;
+ MediaFoundationRenderer& operator=(const MediaFoundationRenderer&) = delete;
~MediaFoundationRenderer() override;
- // TODO(frankli): naming: Change DComp into DirectComposition for interface
- // method names in a separate CL.
-
// Renderer implementation.
void Initialize(MediaResource* media_resource,
RendererClient* client,
@@ -64,7 +64,8 @@ class MEDIA_EXPORT MediaFoundationRenderer
// MediaFoundationRendererExtension implementation.
void GetDCompSurface(GetDCompSurfaceCB callback) override;
void SetVideoStreamEnabled(bool enabled) override;
- void SetOutputParams(const gfx::Rect& output_rect) override;
+ void SetOutputRect(const gfx::Rect& output_rect,
+ SetOutputRectCB callback) override;
private:
HRESULT CreateMediaEngine(MediaResource* media_resource);
@@ -77,8 +78,8 @@ class MEDIA_EXPORT MediaFoundationRenderer
void StartSendingStatistics();
void StopSendingStatistics();
- // Callbacks for |mf_media_engine_notify_|.
- void OnPlaybackError(PipelineStatus status);
+ // Callbacks for `mf_media_engine_notify_`.
+ void OnPlaybackError(PipelineStatus status, HRESULT hr);
void OnPlaybackEnded();
void OnBufferingStateChange(BufferingState state,
BufferingStateChangeReason reason);
@@ -98,7 +99,10 @@ class MEDIA_EXPORT MediaFoundationRenderer
// Renderer methods are running in the same sequence.
scoped_refptr<base::SequencedTaskRunner> task_runner_;
- // Once set, will force |mf_media_engine_| to use DirectComposition mode.
+ // Used to report media logs. Can be called on any thread.
+ std::unique_ptr<MediaLog> media_log_;
+
+ // Once set, will force `mf_media_engine_` to use DirectComposition mode.
// This is used for testing.
const bool force_dcomp_mode_for_testing_;
@@ -118,9 +122,6 @@ class MEDIA_EXPORT MediaFoundationRenderer
// This is the same as "natural_size" in Chromium.
gfx::Size native_video_size_;
- // The actual output Rect for video.
- gfx::Rect output_rect_;
-
// Keep the last volume value being set.
float volume_ = 1.0;
@@ -131,6 +132,10 @@ class MEDIA_EXPORT MediaFoundationRenderer
PipelineStatistics statistics_ = {};
base::RepeatingTimer statistics_timer_;
+ // Tracks the number of MEDIA_LOGs emitted for failure to populate statistics.
+ // Useful to prevent log spam.
+ int populate_statistics_failure_count_ = 0;
+
// A fake window handle passed to MF-based rendering pipeline for OPM.
HWND virtual_video_window_ = nullptr;
@@ -143,8 +148,6 @@ class MEDIA_EXPORT MediaFoundationRenderer
// NOTE: Weak pointers must be invalidated before all other member variables.
base::WeakPtrFactory<MediaFoundationRenderer> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(MediaFoundationRenderer);
};
} // namespace media
diff --git a/chromium/media/renderers/win/media_foundation_renderer_extension.h b/chromium/media/renderers/win/media_foundation_renderer_extension.h
index 5162dc6a934..1f48b40c59f 100644
--- a/chromium/media/renderers/win/media_foundation_renderer_extension.h
+++ b/chromium/media/renderers/win/media_foundation_renderer_extension.h
@@ -32,7 +32,9 @@ class MEDIA_EXPORT MediaFoundationRendererExtension {
virtual void SetVideoStreamEnabled(bool enabled) = 0;
// Notifies renderer of output composition parameters.
- virtual void SetOutputParams(const ::gfx::Rect& rect) = 0;
+ using SetOutputRectCB = base::OnceCallback<void(bool)>;
+ virtual void SetOutputRect(const ::gfx::Rect& rect,
+ SetOutputRectCB callback) = 0;
};
} // namespace media
diff --git a/chromium/media/renderers/win/media_foundation_renderer_integration_test.cc b/chromium/media/renderers/win/media_foundation_renderer_integration_test.cc
index 6d0bedb8577..257042270a0 100644
--- a/chromium/media/renderers/win/media_foundation_renderer_integration_test.cc
+++ b/chromium/media/renderers/win/media_foundation_renderer_integration_test.cc
@@ -8,6 +8,8 @@
#include <mfapi.h>
+#include "base/win/windows_version.h"
+#include "media/base/media_util.h"
#include "media/test/pipeline_integration_test_base.h"
#include "media/test/test_media_source.h"
@@ -64,6 +66,7 @@ class MediaFoundationRendererIntegrationTest
absl::optional<RendererType> /*renderer_type*/) {
auto renderer = std::make_unique<MediaFoundationRenderer>(
task_environment_.GetMainThreadTaskRunner(),
+ std::make_unique<NullMediaLog>(),
/*force_dcomp_mode_for_testing=*/true);
return renderer;
}
@@ -72,6 +75,11 @@ class MediaFoundationRendererIntegrationTest
};
TEST_F(MediaFoundationRendererIntegrationTest, BasicPlayback) {
+ // TODO(crbug.com/1240681): This test is very flaky on win10-20h2.
+ if (base::win::OSInfo::GetInstance()->version() >=
+ base::win::Version::WIN10_20H2) {
+ GTEST_SKIP() << "Skipping test for WIN10_20H2 and greater";
+ }
if (!CanDecodeVp9())
return;
@@ -81,6 +89,11 @@ TEST_F(MediaFoundationRendererIntegrationTest, BasicPlayback) {
}
TEST_F(MediaFoundationRendererIntegrationTest, BasicPlayback_MediaSource) {
+ // TODO(crbug.com/1240681): This test is very flaky on win10-20h2.
+ if (base::win::OSInfo::GetInstance()->version() >=
+ base::win::Version::WIN10_20H2) {
+ GTEST_SKIP() << "Skipping test for WIN10_20H2 and greater";
+ }
if (!CanDecodeVp9())
return;
diff --git a/chromium/media/renderers/win/media_foundation_renderer_unittest.cc b/chromium/media/renderers/win/media_foundation_renderer_unittest.cc
index 8678ae691f6..07b7f19dd25 100644
--- a/chromium/media/renderers/win/media_foundation_renderer_unittest.cc
+++ b/chromium/media/renderers/win/media_foundation_renderer_unittest.cc
@@ -15,6 +15,7 @@
#include "base/win/scoped_com_initializer.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/demuxer_stream.h"
+#include "media/base/media_util.h"
#include "media/base/mock_filters.h"
#include "media/base/test_helpers.h"
#include "media/base/win/test_utils.h"
@@ -103,7 +104,8 @@ class MediaFoundationRendererTest : public testing::Test {
&pmp_server_);
mf_renderer_ = std::make_unique<MediaFoundationRenderer>(
- task_environment_.GetMainThreadTaskRunner());
+ task_environment_.GetMainThreadTaskRunner(),
+ std::make_unique<NullMediaLog>());
// Some default actions.
ON_CALL(cdm_context_, GetMediaFoundationCdmProxy(_))
diff --git a/chromium/media/renderers/win/media_foundation_source_wrapper.cc b/chromium/media/renderers/win/media_foundation_source_wrapper.cc
index 3a41567f5c4..a6b2cc8279a 100644
--- a/chromium/media/renderers/win/media_foundation_source_wrapper.cc
+++ b/chromium/media/renderers/win/media_foundation_source_wrapper.cc
@@ -8,6 +8,7 @@
#include "media/base/audio_decoder_config.h"
#include "media/base/demuxer_stream.h"
+#include "media/base/media_log.h"
#include "media/base/video_decoder_config.h"
#include "media/base/win/mf_helpers.h"
@@ -34,6 +35,7 @@ MediaFoundationSourceWrapper::~MediaFoundationSourceWrapper() {
HRESULT MediaFoundationSourceWrapper::RuntimeClassInitialize(
MediaResource* media_resource,
+ MediaLog* media_log,
scoped_refptr<base::SequencedTaskRunner> task_runner) {
DVLOG_FUNC(1);
@@ -50,7 +52,8 @@ HRESULT MediaFoundationSourceWrapper::RuntimeClassInitialize(
for (DemuxerStream* demuxer_stream : demuxer_streams) {
ComPtr<MediaFoundationStreamWrapper> mf_stream;
RETURN_IF_FAILED(MediaFoundationStreamWrapper::Create(
- stream_id++, this, demuxer_stream, task_runner, &mf_stream));
+ stream_id++, this, demuxer_stream, media_log->Clone(), task_runner,
+ &mf_stream));
media_streams_.push_back(mf_stream);
}
diff --git a/chromium/media/renderers/win/media_foundation_source_wrapper.h b/chromium/media/renderers/win/media_foundation_source_wrapper.h
index 88ca8f96b44..de77dac3555 100644
--- a/chromium/media/renderers/win/media_foundation_source_wrapper.h
+++ b/chromium/media/renderers/win/media_foundation_source_wrapper.h
@@ -20,6 +20,8 @@
namespace media {
+class MediaLog;
+
// IMFMediaSource implementation
// (https://docs.microsoft.com/en-us/windows/win32/api/mfidl/nn-mfidl-imfmediasource)
// based on the given |media_resource|.
@@ -47,6 +49,7 @@ class MediaFoundationSourceWrapper
HRESULT RuntimeClassInitialize(
MediaResource* media_resource,
+ MediaLog* media_log,
scoped_refptr<base::SequencedTaskRunner> task_runner);
// Note: All COM interface (IMFXxx) methods are called on the MF threadpool
diff --git a/chromium/media/renderers/win/media_foundation_stream_wrapper.cc b/chromium/media/renderers/win/media_foundation_stream_wrapper.cc
index 7eb15906d5c..0d7783695af 100644
--- a/chromium/media/renderers/win/media_foundation_stream_wrapper.cc
+++ b/chromium/media/renderers/win/media_foundation_stream_wrapper.cc
@@ -122,6 +122,7 @@ HRESULT MediaFoundationStreamWrapper::Create(
int stream_id,
IMFMediaSource* parent_source,
DemuxerStream* demuxer_stream,
+ std::unique_ptr<MediaLog> media_log,
scoped_refptr<base::SequencedTaskRunner> task_runner,
MediaFoundationStreamWrapper** stream_out) {
DVLOG(1) << __func__ << ": stream_id=" << stream_id;
@@ -130,11 +131,13 @@ HRESULT MediaFoundationStreamWrapper::Create(
switch (demuxer_stream->type()) {
case DemuxerStream::Type::VIDEO:
RETURN_IF_FAILED(MediaFoundationVideoStream::Create(
- stream_id, parent_source, demuxer_stream, &stream));
+ stream_id, parent_source, demuxer_stream, std::move(media_log),
+ &stream));
break;
case DemuxerStream::Type::AUDIO:
RETURN_IF_FAILED(MediaFoundationAudioStream::Create(
- stream_id, parent_source, demuxer_stream, &stream));
+ stream_id, parent_source, demuxer_stream, std::move(media_log),
+ &stream));
break;
default:
DLOG(ERROR) << "Unsupported demuxer stream type: "
@@ -149,7 +152,8 @@ HRESULT MediaFoundationStreamWrapper::Create(
HRESULT MediaFoundationStreamWrapper::RuntimeClassInitialize(
int stream_id,
IMFMediaSource* parent_source,
- DemuxerStream* demuxer_stream) {
+ DemuxerStream* demuxer_stream,
+ std::unique_ptr<MediaLog> media_log) {
{
base::AutoLock auto_lock(lock_);
parent_source_ = parent_source;
@@ -161,6 +165,8 @@ HRESULT MediaFoundationStreamWrapper::RuntimeClassInitialize(
DVLOG_FUNC(1) << "stream_id=" << stream_id
<< ", stream_type=" << DemuxerStream::GetTypeName(stream_type_);
+ media_log_ = std::move(media_log);
+
RETURN_IF_FAILED(GenerateStreamDescriptor());
RETURN_IF_FAILED(MFCreateEventQueue(&mf_media_event_queue_));
return S_OK;
@@ -394,6 +400,11 @@ void MediaFoundationStreamWrapper::OnDemuxerStreamRead(
HRESULT hr = S_OK;
if (status == DemuxerStream::Status::kOk) {
+ if (!encryption_type_reported_) {
+ encryption_type_reported_ = true;
+ ReportEncryptionType(buffer);
+ }
+
// Push |buffer| to process later if needed. Otherwise, process it
// immediately.
if (flushed_ || !post_flush_buffers_.empty()) {
@@ -601,4 +612,24 @@ GUID MediaFoundationStreamWrapper::GetLastKeyId() const {
return last_key_id_;
}
+void MediaFoundationStreamWrapper::ReportEncryptionType(
+ const scoped_refptr<DecoderBuffer>& buffer) {
+ auto encryption_type = EncryptionType::kClear;
+ if (IsEncrypted()) {
+ bool is_buffer_encrypted = buffer->decrypt_config();
+ encryption_type = !is_buffer_encrypted
+ ? EncryptionType::kEncryptedWithClearLead
+ : EncryptionType::kEncrypted;
+ }
+
+ if (encryption_type == EncryptionType::kEncryptedWithClearLead) {
+ MEDIA_LOG(INFO, media_log_) << "MediaFoundationStreamWrapper: "
+ << DemuxerStream::GetTypeName(stream_type_)
+ << " stream is encrypted with clear lead";
+ }
+
+ // TODO(xhwang): Report `encryption_type` to `PipelineStatistics` so it's
+ // also reported to UKM.
+}
+
} // namespace media
diff --git a/chromium/media/renderers/win/media_foundation_stream_wrapper.h b/chromium/media/renderers/win/media_foundation_stream_wrapper.h
index 6e9cd79fe06..521cb4683a8 100644
--- a/chromium/media/renderers/win/media_foundation_stream_wrapper.h
+++ b/chromium/media/renderers/win/media_foundation_stream_wrapper.h
@@ -8,6 +8,8 @@
#include <mfapi.h>
#include <mfidl.h>
#include <wrl.h>
+
+#include <memory>
#include <queue>
#include "base/memory/scoped_refptr.h"
@@ -16,6 +18,7 @@
#include "base/synchronization/lock.h"
#include "media/base/decoder_buffer.h"
#include "media/base/demuxer_stream.h"
+#include "media/base/media_log.h"
namespace media {
@@ -47,12 +50,14 @@ class MediaFoundationStreamWrapper
static HRESULT Create(int stream_id,
IMFMediaSource* parent_source,
DemuxerStream* demuxer_stream,
+ std::unique_ptr<MediaLog> media_log,
scoped_refptr<base::SequencedTaskRunner> task_runner,
MediaFoundationStreamWrapper** stream_out);
HRESULT RuntimeClassInitialize(int stream_id,
IMFMediaSource* parent_source,
- DemuxerStream* demuxer_stream);
+ DemuxerStream* demuxer_stream,
+ std::unique_ptr<MediaLog> media_log);
void SetTaskRunner(scoped_refptr<base::SequencedTaskRunner> task_runner);
void DetachParent();
void DetachDemuxerStream();
@@ -116,6 +121,8 @@ class MediaFoundationStreamWrapper
bool ServicePostFlushSampleRequest();
virtual HRESULT GetMediaType(IMFMediaType** media_type_out) = 0;
+ void ReportEncryptionType(const scoped_refptr<DecoderBuffer>& buffer);
+
scoped_refptr<base::SequencedTaskRunner> task_runner_;
enum class State {
kInitialized,
@@ -126,6 +133,8 @@ class MediaFoundationStreamWrapper
DemuxerStream* demuxer_stream_ = nullptr;
DemuxerStream::Type stream_type_ = DemuxerStream::Type::UNKNOWN;
+ std::unique_ptr<MediaLog> media_log_;
+
// Need exclusive access to some members between calls from MF threadpool
// thread and calling thread from Chromium media stack.
base::Lock lock_;
@@ -166,6 +175,8 @@ class MediaFoundationStreamWrapper
std::queue<scoped_refptr<DecoderBuffer>> post_flush_buffers_
GUARDED_BY(lock_);
+ bool encryption_type_reported_ = false;
+
// NOTE: Weak pointers must be invalidated before all other member variables.
base::WeakPtrFactory<MediaFoundationStreamWrapper> weak_factory_{this};
};
diff --git a/chromium/media/renderers/win/media_foundation_video_stream.cc b/chromium/media/renderers/win/media_foundation_video_stream.cc
index 1ef146a6f04..6d623bf3d8d 100644
--- a/chromium/media/renderers/win/media_foundation_video_stream.cc
+++ b/chromium/media/renderers/win/media_foundation_video_stream.cc
@@ -26,27 +26,27 @@ DEFINE_MEDIATYPE_GUID(MFVideoFormat_THEORA, FCC('theo'))
GUID VideoCodecToMFSubtype(VideoCodec codec) {
switch (codec) {
- case kCodecH264:
+ case VideoCodec::kH264:
return MFVideoFormat_H264;
- case kCodecVC1:
+ case VideoCodec::kVC1:
return MFVideoFormat_WVC1;
- case kCodecMPEG2:
+ case VideoCodec::kMPEG2:
return MFVideoFormat_MPEG2;
- case kCodecMPEG4:
+ case VideoCodec::kMPEG4:
return MFVideoFormat_MP4V;
- case kCodecTheora:
+ case VideoCodec::kTheora:
return MFVideoFormat_THEORA;
- case kCodecVP8:
+ case VideoCodec::kVP8:
return MFVideoFormat_VP80;
- case kCodecVP9:
+ case VideoCodec::kVP9:
return MFVideoFormat_VP90;
- case kCodecHEVC:
+ case VideoCodec::kHEVC:
return MFVideoFormat_HEVC;
- case kCodecDolbyVision:
+ case VideoCodec::kDolbyVision:
// TODO(frankli): DolbyVision also supports H264 when the profile ID is 9
// (DOLBYVISION_PROFILE9). Will it be fine to use HEVC?
return MFVideoFormat_HEVC;
- case kCodecAV1:
+ case VideoCodec::kAV1:
return MFVideoFormat_AV1;
default:
return GUID_NULL;
@@ -234,6 +234,7 @@ HRESULT MediaFoundationVideoStream::Create(
int stream_id,
IMFMediaSource* parent_source,
DemuxerStream* demuxer_stream,
+ std::unique_ptr<MediaLog> media_log,
MediaFoundationStreamWrapper** stream_out) {
DVLOG(1) << __func__ << ": stream_id=" << stream_id;
@@ -241,28 +242,32 @@ HRESULT MediaFoundationVideoStream::Create(
VideoCodec codec = demuxer_stream->video_decoder_config().codec();
switch (codec) {
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- case kCodecH264:
+ case VideoCodec::kH264:
RETURN_IF_FAILED(MakeAndInitialize<MediaFoundationH264VideoStream>(
- &video_stream, stream_id, parent_source, demuxer_stream));
+ &video_stream, stream_id, parent_source, demuxer_stream,
+ std::move(media_log)));
break;
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
- case kCodecHEVC:
+ case VideoCodec::kHEVC:
#endif
#if BUILDFLAG(ENABLE_PLATFORM_DOLBY_VISION)
- case kCodecDolbyVision:
+ case VideoCodec::kDolbyVision:
#endif
#if BUILDFLAG(ENABLE_PLATFORM_HEVC) || BUILDFLAG(ENABLE_PLATFORM_DOLBY_VISION)
RETURN_IF_FAILED(MakeAndInitialize<MediaFoundationHEVCVideoStream>(
- &video_stream, stream_id, parent_source, demuxer_stream));
+ &video_stream, stream_id, parent_source, demuxer_stream,
+ std::move(media_log)));
break;
#endif // BUILDFLAG(ENABLE_PLATFORM_HEVC) ||
// BUILDFLAG(ENABLE_PLATFORM_DOLBY_VISION)
default:
RETURN_IF_FAILED(MakeAndInitialize<MediaFoundationVideoStream>(
- &video_stream, stream_id, parent_source, demuxer_stream));
+ &video_stream, stream_id, parent_source, demuxer_stream,
+ std::move(media_log)));
break;
}
+
*stream_out =
static_cast<MediaFoundationStreamWrapper*>(video_stream.Detach());
return S_OK;
diff --git a/chromium/media/renderers/win/media_foundation_video_stream.h b/chromium/media/renderers/win/media_foundation_video_stream.h
index 230df187d44..33e43def6c6 100644
--- a/chromium/media/renderers/win/media_foundation_video_stream.h
+++ b/chromium/media/renderers/win/media_foundation_video_stream.h
@@ -20,6 +20,7 @@ class MediaFoundationVideoStream : public MediaFoundationStreamWrapper {
static HRESULT Create(int stream_id,
IMFMediaSource* parent_source,
DemuxerStream* demuxer_stream,
+ std::unique_ptr<MediaLog> media_log,
MediaFoundationStreamWrapper** stream_out);
bool IsEncrypted() const override;
diff --git a/chromium/media/video/BUILD.gn b/chromium/media/video/BUILD.gn
index 6d30383a182..a17b1df6569 100644
--- a/chromium/media/video/BUILD.gn
+++ b/chromium/media/video/BUILD.gn
@@ -67,6 +67,7 @@ source_set("video") {
"//build:chromeos_buildflags",
"//gpu/command_buffer/client",
"//gpu/command_buffer/common",
+ "//gpu/ipc/common",
"//media/base",
"//third_party/libyuv",
"//ui/gfx",
diff --git a/chromium/media/video/fake_video_encode_accelerator.h b/chromium/media/video/fake_video_encode_accelerator.h
index dc9f6c96a01..5468f28af06 100644
--- a/chromium/media/video/fake_video_encode_accelerator.h
+++ b/chromium/media/video/fake_video_encode_accelerator.h
@@ -32,6 +32,11 @@ class FakeVideoEncodeAccelerator : public VideoEncodeAccelerator {
public:
explicit FakeVideoEncodeAccelerator(
const scoped_refptr<base::SequencedTaskRunner>& task_runner);
+
+ FakeVideoEncodeAccelerator(const FakeVideoEncodeAccelerator&) = delete;
+ FakeVideoEncodeAccelerator& operator=(const FakeVideoEncodeAccelerator&) =
+ delete;
+
~FakeVideoEncodeAccelerator() override;
VideoEncodeAccelerator::SupportedProfiles GetSupportedProfiles() override;
@@ -106,8 +111,6 @@ class FakeVideoEncodeAccelerator : public VideoEncodeAccelerator {
EncodingCallback encoding_callback_;
base::WeakPtrFactory<FakeVideoEncodeAccelerator> weak_this_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(FakeVideoEncodeAccelerator);
};
} // namespace media
diff --git a/chromium/media/video/gpu_memory_buffer_video_frame_pool.cc b/chromium/media/video/gpu_memory_buffer_video_frame_pool.cc
index a78357947bd..774ef97519d 100644
--- a/chromium/media/video/gpu_memory_buffer_video_frame_pool.cc
+++ b/chromium/media/video/gpu_memory_buffer_video_frame_pool.cc
@@ -184,6 +184,15 @@ class GpuMemoryBufferVideoFramePool::PoolImpl
scoped_refptr<VideoFrame> video_frame,
FrameResources* frame_resources);
+ static void CopyRowsToBuffer(
+ GpuVideoAcceleratorFactories::OutputFormat output_format,
+ const size_t plane,
+ const size_t row,
+ const size_t rows_to_copy,
+ const gfx::Size coded_size,
+ const VideoFrame* video_frame,
+ FrameResources* frame_resources,
+ base::OnceClosure done);
// Prepares GL resources, mailboxes and allocates the new VideoFrame. This has
// to be run on `media_task_runner_`. On failure, this will release
// `frame_resources` and return nullptr.
@@ -303,14 +312,13 @@ size_t PlanesPerCopy(GpuVideoAcceleratorFactories::OutputFormat format) {
case GpuVideoAcceleratorFactories::OutputFormat::I420:
case GpuVideoAcceleratorFactories::OutputFormat::RGBA:
case GpuVideoAcceleratorFactories::OutputFormat::BGRA:
+ case GpuVideoAcceleratorFactories::OutputFormat::XR30:
+ case GpuVideoAcceleratorFactories::OutputFormat::XB30:
return 1;
case GpuVideoAcceleratorFactories::OutputFormat::NV12_DUAL_GMB:
case GpuVideoAcceleratorFactories::OutputFormat::NV12_SINGLE_GMB:
case GpuVideoAcceleratorFactories::OutputFormat::P010:
return 2;
- case GpuVideoAcceleratorFactories::OutputFormat::XR30:
- case GpuVideoAcceleratorFactories::OutputFormat::XB30:
- return 3;
case GpuVideoAcceleratorFactories::OutputFormat::UNDEFINED:
NOTREACHED();
break;
@@ -432,9 +440,7 @@ void CopyRowsToI420Buffer(int first_row,
const uint8_t* source,
int source_stride,
uint8_t* output,
- int dest_stride,
- base::OnceClosure done) {
- base::ScopedClosureRunner done_runner(std::move(done));
+ int dest_stride) {
TRACE_EVENT2("media", "CopyRowsToI420Buffer", "bytes_per_row", bytes_per_row,
"rows", rows);
@@ -466,9 +472,7 @@ void CopyRowsToP010Buffer(int first_row,
uint8_t* dest_y,
int dest_stride_y,
uint8_t* dest_uv,
- int dest_stride_uv,
- base::OnceClosure done) {
- base::ScopedClosureRunner done_runner(std::move(done));
+ int dest_stride_uv) {
TRACE_EVENT2("media", "CopyRowsToP010Buffer", "width", width, "rows", rows);
if (!dest_y || !dest_uv)
@@ -508,9 +512,7 @@ void CopyRowsToNV12Buffer(int first_row,
uint8_t* dest_y,
int dest_stride_y,
uint8_t* dest_uv,
- int dest_stride_uv,
- base::OnceClosure done) {
- base::ScopedClosureRunner done_runner(std::move(done));
+ int dest_stride_uv) {
TRACE_EVENT2("media", "CopyRowsToNV12Buffer", "bytes_per_row", bytes_per_row,
"rows", rows);
@@ -561,9 +563,7 @@ void CopyRowsToRGB10Buffer(bool is_argb,
int width,
const VideoFrame* source_frame,
uint8_t* output,
- int dest_stride,
- base::OnceClosure done) {
- base::ScopedClosureRunner done_runner(std::move(done));
+ int dest_stride) {
TRACE_EVENT2("media", "CopyRowsToXR30Buffer", "bytes_per_row", width * 2,
"rows", rows);
if (!output)
@@ -630,9 +630,7 @@ void CopyRowsToRGBABuffer(bool is_rgba,
int width,
const VideoFrame* source_frame,
uint8_t* output,
- int dest_stride,
- base::OnceClosure done) {
- base::ScopedClosureRunner done_runner(std::move(done));
+ int dest_stride) {
TRACE_EVENT2("media", "CopyRowsToRGBABuffer", "bytes_per_row", width * 2,
"rows", rows);
@@ -937,20 +935,32 @@ void GpuMemoryBufferVideoFramePool::PoolImpl::CopyVideoFrameToGpuMemoryBuffers(
}
}
- // |barrier| keeps refptr of |video_frame| until all copy tasks are done.
- const base::RepeatingClosure barrier = base::BarrierClosure(
- copies,
+ auto on_copies_done =
base::BindOnce(&PoolImpl::OnCopiesDone, this, /*copy_failed=*/false,
- video_frame, frame_resources));
-
+ video_frame, frame_resources);
TRACE_EVENT_NESTABLE_ASYNC_BEGIN0(
"media", "CopyVideoFrameToGpuMemoryBuffers",
TRACE_ID_WITH_SCOPE("CopyVideoFrameToGpuMemoryBuffers",
video_frame->timestamp().InNanoseconds()));
- // Post all the async tasks.
+ // If the frame can be copied in one step, do it directly.
+ if (copies == 1) {
+ DCHECK_LE(num_planes, planes_per_copy);
+ const int rows = VideoFrame::Rows(/*plane=*/0, VideoFormat(output_format_),
+ coded_size.height());
+ DCHECK_LE(rows, RowsPerCopy(
+ /*plane=*/0, VideoFormat(output_format_),
+ coded_size.width()));
+ CopyRowsToBuffer(output_format_, /*plane=*/0, /*row=*/0, rows, coded_size,
+ video_frame.get(), frame_resources,
+ std::move(on_copies_done));
+ return;
+ }
+
+ // |barrier| keeps refptr of |video_frame| until all copy tasks are done.
+ const base::RepeatingClosure barrier =
+ base::BarrierClosure(copies, std::move(on_copies_done));
+ // If is more than one copy, post each copy async.
for (size_t i = 0; i < num_planes; i += planes_per_copy) {
- gfx::GpuMemoryBuffer* buffer =
- frame_resources->plane_resources[i].gpu_memory_buffer.get();
const int rows =
VideoFrame::Rows(i, VideoFormat(output_format_), coded_size.height());
const int rows_per_copy =
@@ -958,96 +968,82 @@ void GpuMemoryBufferVideoFramePool::PoolImpl::CopyVideoFrameToGpuMemoryBuffers(
for (int row = 0; row < rows; row += rows_per_copy) {
const int rows_to_copy = std::min(rows_per_copy, rows - row);
- switch (output_format_) {
- case GpuVideoAcceleratorFactories::OutputFormat::I420: {
- const int bytes_per_row = VideoFrame::RowBytes(
- i, VideoFormat(output_format_), coded_size.width());
- worker_task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(&CopyRowsToI420Buffer, row, rows_to_copy,
- bytes_per_row, video_frame->BitDepth(),
- video_frame->visible_data(i),
- video_frame->stride(i),
- static_cast<uint8_t*>(buffer->memory(0)),
- buffer->stride(0), barrier));
- break;
- }
- case GpuVideoAcceleratorFactories::OutputFormat::P010:
- // Using base::Unretained(video_frame) here is safe because |barrier|
- // keeps refptr of |video_frame| until all copy tasks are done.
- worker_task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(
- &CopyRowsToP010Buffer, row, rows_to_copy, coded_size.width(),
- base::Unretained(video_frame.get()),
- static_cast<uint8_t*>(buffer->memory(0)), buffer->stride(0),
- static_cast<uint8_t*>(buffer->memory(1)), buffer->stride(1),
- barrier));
- break;
- case GpuVideoAcceleratorFactories::OutputFormat::NV12_SINGLE_GMB:
- // Using base::Unretained(video_frame) here is safe because |barrier|
- // keeps refptr of |video_frame| until all copy tasks are done.
- worker_task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(
- &CopyRowsToNV12Buffer, row, rows_to_copy, coded_size.width(),
- base::Unretained(video_frame.get()),
- static_cast<uint8_t*>(buffer->memory(0)), buffer->stride(0),
- static_cast<uint8_t*>(buffer->memory(1)), buffer->stride(1),
- barrier));
- break;
- case GpuVideoAcceleratorFactories::OutputFormat::NV12_DUAL_GMB: {
- gfx::GpuMemoryBuffer* buffer2 =
- frame_resources->plane_resources[1].gpu_memory_buffer.get();
- // Using base::Unretained(video_frame) here is safe because |barrier|
- // keeps refptr of |video_frame| until all copy tasks are done.
- worker_task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(
- &CopyRowsToNV12Buffer, row, rows_to_copy, coded_size.width(),
- base::Unretained(video_frame.get()),
- static_cast<uint8_t*>(buffer->memory(0)), buffer->stride(0),
- static_cast<uint8_t*>(buffer2->memory(0)), buffer2->stride(0),
- barrier));
- break;
- }
+ worker_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&CopyRowsToBuffer, output_format_, i, row,
+ rows_to_copy, coded_size,
+ base::Unretained(video_frame.get()),
+ frame_resources, barrier));
+ }
+ }
+}
- case GpuVideoAcceleratorFactories::OutputFormat::XR30:
- case GpuVideoAcceleratorFactories::OutputFormat::XB30: {
- const bool is_argb = output_format_ ==
- GpuVideoAcceleratorFactories::OutputFormat::XR30;
- // Using base::Unretained(video_frame) here is safe because |barrier|
- // keeps refptr of |video_frame| until all copy tasks are done.
- worker_task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(&CopyRowsToRGB10Buffer, is_argb, row, rows_to_copy,
- coded_size.width(),
- base::Unretained(video_frame.get()),
- static_cast<uint8_t*>(buffer->memory(0)),
- buffer->stride(0), barrier));
- break;
- }
+// static
+void GpuMemoryBufferVideoFramePool::PoolImpl::CopyRowsToBuffer(
+ GpuVideoAcceleratorFactories::OutputFormat output_format,
+ const size_t plane,
+ const size_t row,
+ const size_t rows_to_copy,
+ const gfx::Size coded_size,
+ const VideoFrame* video_frame,
+ FrameResources* frame_resources,
+ base::OnceClosure done) {
+ base::ScopedClosureRunner done_runner(std::move(done));
+ gfx::GpuMemoryBuffer* buffer =
+ frame_resources->plane_resources[plane].gpu_memory_buffer.get();
+ switch (output_format) {
+ case GpuVideoAcceleratorFactories::OutputFormat::I420: {
+ const int bytes_per_row = VideoFrame::RowBytes(
+ plane, VideoFormat(output_format), coded_size.width());
+ CopyRowsToI420Buffer(
+ row, rows_to_copy, bytes_per_row, video_frame->BitDepth(),
+ video_frame->visible_data(plane), video_frame->stride(plane),
+ static_cast<uint8_t*>(buffer->memory(0)), buffer->stride(0));
+ break;
+ }
+ case GpuVideoAcceleratorFactories::OutputFormat::P010:
+ CopyRowsToP010Buffer(
+ row, rows_to_copy, coded_size.width(), video_frame,
+ static_cast<uint8_t*>(buffer->memory(0)), buffer->stride(0),
+ static_cast<uint8_t*>(buffer->memory(1)), buffer->stride(1));
+ break;
+ case GpuVideoAcceleratorFactories::OutputFormat::NV12_SINGLE_GMB:
+ CopyRowsToNV12Buffer(
+ row, rows_to_copy, coded_size.width(), video_frame,
+ static_cast<uint8_t*>(buffer->memory(0)), buffer->stride(0),
+ static_cast<uint8_t*>(buffer->memory(1)), buffer->stride(1));
+ break;
+ case GpuVideoAcceleratorFactories::OutputFormat::NV12_DUAL_GMB: {
+ gfx::GpuMemoryBuffer* buffer2 =
+ frame_resources->plane_resources[1].gpu_memory_buffer.get();
+ CopyRowsToNV12Buffer(
+ row, rows_to_copy, coded_size.width(), video_frame,
+ static_cast<uint8_t*>(buffer->memory(0)), buffer->stride(0),
+ static_cast<uint8_t*>(buffer2->memory(0)), buffer2->stride(0));
+ break;
+ }
- case GpuVideoAcceleratorFactories::OutputFormat::RGBA:
- case GpuVideoAcceleratorFactories::OutputFormat::BGRA: {
- const bool is_rgba = output_format_ ==
- GpuVideoAcceleratorFactories::OutputFormat::RGBA;
- // Using base::Unretained(video_frame) here is safe because |barrier|
- // keeps refptr of |video_frame| until all copy tasks are done.
- worker_task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(&CopyRowsToRGBABuffer, is_rgba, row, rows_to_copy,
- coded_size.width(),
- base::Unretained(video_frame.get()),
- static_cast<uint8_t*>(buffer->memory(0)),
- buffer->stride(0), barrier));
- break;
- }
+ case GpuVideoAcceleratorFactories::OutputFormat::XR30:
+ case GpuVideoAcceleratorFactories::OutputFormat::XB30: {
+ const bool is_argb =
+ output_format == GpuVideoAcceleratorFactories::OutputFormat::XR30;
+ CopyRowsToRGB10Buffer(
+ is_argb, row, rows_to_copy, coded_size.width(), video_frame,
+ static_cast<uint8_t*>(buffer->memory(0)), buffer->stride(0));
+ break;
+ }
- case GpuVideoAcceleratorFactories::OutputFormat::UNDEFINED:
- NOTREACHED();
- }
+ case GpuVideoAcceleratorFactories::OutputFormat::RGBA:
+ case GpuVideoAcceleratorFactories::OutputFormat::BGRA: {
+ const bool is_rgba =
+ output_format == GpuVideoAcceleratorFactories::OutputFormat::RGBA;
+ CopyRowsToRGBABuffer(
+ is_rgba, row, rows_to_copy, coded_size.width(), video_frame,
+ static_cast<uint8_t*>(buffer->memory(0)), buffer->stride(0));
+ break;
}
+
+ case GpuVideoAcceleratorFactories::OutputFormat::UNDEFINED:
+ NOTREACHED();
}
}
@@ -1336,15 +1332,14 @@ void GpuMemoryBufferVideoFramePool::PoolImpl::MailboxHoldersReleased(
frame_resources->MarkUnused(now);
auto it = resources_pool_.begin();
while (it != resources_pool_.end()) {
- FrameResources* frame_resources = *it;
+ FrameResources* resources = *it;
- constexpr base::TimeDelta kStaleFrameLimit =
- base::TimeDelta::FromSeconds(10);
- if (!frame_resources->is_used() &&
- now - frame_resources->last_use_time() > kStaleFrameLimit) {
+ constexpr base::TimeDelta kStaleFrameLimit = base::Seconds(10);
+ if (!resources->is_used() &&
+ now - resources->last_use_time() > kStaleFrameLimit) {
resources_pool_.erase(it++);
- DeleteFrameResources(gpu_factories_, frame_resources);
- delete frame_resources;
+ DeleteFrameResources(gpu_factories_, resources);
+ delete resources;
} else {
it++;
}
diff --git a/chromium/media/video/gpu_memory_buffer_video_frame_pool.h b/chromium/media/video/gpu_memory_buffer_video_frame_pool.h
index 584372fba48..043ac132a43 100644
--- a/chromium/media/video/gpu_memory_buffer_video_frame_pool.h
+++ b/chromium/media/video/gpu_memory_buffer_video_frame_pool.h
@@ -38,6 +38,11 @@ class MEDIA_EXPORT GpuMemoryBufferVideoFramePool {
const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner,
const scoped_refptr<base::TaskRunner>& worker_task_runner,
GpuVideoAcceleratorFactories* gpu_factories);
+
+ GpuMemoryBufferVideoFramePool(const GpuMemoryBufferVideoFramePool&) = delete;
+ GpuMemoryBufferVideoFramePool& operator=(
+ const GpuMemoryBufferVideoFramePool&) = delete;
+
virtual ~GpuMemoryBufferVideoFramePool();
// Callback used by MaybeCreateHardwareFrame to deliver a new VideoFrame
@@ -68,8 +73,6 @@ class MEDIA_EXPORT GpuMemoryBufferVideoFramePool {
private:
class PoolImpl;
scoped_refptr<PoolImpl> pool_impl_;
-
- DISALLOW_COPY_AND_ASSIGN(GpuMemoryBufferVideoFramePool);
};
} // namespace media
diff --git a/chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc b/chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc
index 134d7f06aa1..d94cd64b28b 100644
--- a/chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc
+++ b/chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc
@@ -29,7 +29,7 @@ class GpuMemoryBufferVideoFramePoolTest : public ::testing::Test {
void SetUp() override {
// Seed test clock with some dummy non-zero value to avoid confusion with
// empty base::TimeTicks values.
- test_clock_.Advance(base::TimeDelta::FromSeconds(1234));
+ test_clock_.Advance(base::Seconds(1234));
sii_ = std::make_unique<viz::TestSharedImageInterface>();
media_task_runner_ = base::MakeRefCounted<base::TestSimpleTaskRunner>();
@@ -491,7 +491,7 @@ TEST_F(GpuMemoryBufferVideoFramePoolTest, PreservesMetadata) {
scoped_refptr<VideoFrame> software_frame = CreateTestYUVVideoFrame(10);
software_frame->metadata().end_of_stream = true;
base::TimeTicks kTestReferenceTime =
- base::TimeDelta::FromMilliseconds(12345) + base::TimeTicks();
+ base::Milliseconds(12345) + base::TimeTicks();
software_frame->metadata().reference_time = kTestReferenceTime;
scoped_refptr<VideoFrame> frame;
gpu_memory_buffer_pool_->MaybeCreateHardwareFrame(
@@ -596,7 +596,7 @@ TEST_F(GpuMemoryBufferVideoFramePoolTest, StaleFramesAreExpired) {
// Advance clock far enough to hit stale timer; ensure only frame_1 has its
// resources released.
- test_clock_.Advance(base::TimeDelta::FromMinutes(1));
+ test_clock_.Advance(base::Minutes(1));
frame_2 = nullptr;
RunUntilIdle();
EXPECT_EQ(3u, sii_->shared_image_count());
diff --git a/chromium/media/video/gpu_video_accelerator_factories.h b/chromium/media/video/gpu_video_accelerator_factories.h
index 3569fccc587..e94732e4571 100644
--- a/chromium/media/video/gpu_video_accelerator_factories.h
+++ b/chromium/media/video/gpu_video_accelerator_factories.h
@@ -17,6 +17,7 @@
#include "base/unguessable_token.h"
#include "gpu/command_buffer/client/gles2_interface.h"
#include "gpu/command_buffer/common/mailbox.h"
+#include "gpu/ipc/common/gpu_channel.mojom.h"
#include "media/base/media_export.h"
#include "media/base/overlay_info.h"
#include "media/base/supported_video_decoder_config.h"
@@ -33,12 +34,12 @@ class SequencedTaskRunner;
namespace gfx {
class ColorSpace;
class Size;
-}
+} // namespace gfx
namespace gpu {
class GpuMemoryBufferManager;
class SharedImageInterface;
-}
+} // namespace gpu
namespace viz {
class RasterContextProvider;
@@ -80,7 +81,9 @@ class MEDIA_EXPORT GpuVideoAcceleratorFactories {
virtual bool IsGpuVideoAcceleratorEnabled() = 0;
// Return the channel token, or an empty token if the channel is unusable.
- virtual base::UnguessableToken GetChannelToken() = 0;
+ // |cb| could be called re-entrantly. This function is not thread safe.
+ virtual void GetChannelToken(
+ gpu::mojom::GpuChannel::GetChannelTokenCallback cb) = 0;
// Returns the |route_id| of the command buffer, or 0 if there is none.
virtual int32_t GetCommandBufferRouteId() = 0;
diff --git a/chromium/media/video/h264_bit_reader.h b/chromium/media/video/h264_bit_reader.h
index 7672172f52c..cbdaccfa2dc 100644
--- a/chromium/media/video/h264_bit_reader.h
+++ b/chromium/media/video/h264_bit_reader.h
@@ -23,6 +23,10 @@ namespace media {
class MEDIA_EXPORT H264BitReader {
public:
H264BitReader();
+
+ H264BitReader(const H264BitReader&) = delete;
+ H264BitReader& operator=(const H264BitReader&) = delete;
+
~H264BitReader();
// Initialize the reader to start reading at |data|, |size| being size
@@ -72,8 +76,6 @@ class MEDIA_EXPORT H264BitReader {
// Number of emulation preventation bytes (0x000003) we met.
size_t emulation_prevention_bytes_;
-
- DISALLOW_COPY_AND_ASSIGN(H264BitReader);
};
} // namespace media
diff --git a/chromium/media/video/h264_parser.h b/chromium/media/video/h264_parser.h
index af8cdc8d75e..6757ac7acd1 100644
--- a/chromium/media/video/h264_parser.h
+++ b/chromium/media/video/h264_parser.h
@@ -437,6 +437,10 @@ class MEDIA_EXPORT H264Parser {
std::vector<H264NALU>* nalus);
H264Parser();
+
+ H264Parser(const H264Parser&) = delete;
+ H264Parser& operator=(const H264Parser&) = delete;
+
~H264Parser();
void Reset();
@@ -565,8 +569,6 @@ class MEDIA_EXPORT H264Parser {
// This contains the range of the previous NALU found in
// AdvanceToNextNalu(). Holds exactly one range.
Ranges<const uint8_t*> previous_nalu_range_;
-
- DISALLOW_COPY_AND_ASSIGN(H264Parser);
};
} // namespace media
diff --git a/chromium/media/video/h264_poc.h b/chromium/media/video/h264_poc.h
index 3474a464472..176ea715453 100644
--- a/chromium/media/video/h264_poc.h
+++ b/chromium/media/video/h264_poc.h
@@ -18,6 +18,10 @@ struct H264SliceHeader;
class MEDIA_EXPORT H264POC {
public:
H264POC();
+
+ H264POC(const H264POC&) = delete;
+ H264POC& operator=(const H264POC&) = delete;
+
~H264POC();
// Returns the picture order count for a slice.
@@ -43,8 +47,6 @@ class MEDIA_EXPORT H264POC {
int32_t prev_frame_num_;
int32_t prev_frame_num_offset_;
bool pending_mmco5_;
-
- DISALLOW_COPY_AND_ASSIGN(H264POC);
};
} // namespace media
diff --git a/chromium/media/video/h265_parser.cc b/chromium/media/video/h265_parser.cc
index 5e49c16b5ac..0bcaa83c868 100644
--- a/chromium/media/video/h265_parser.cc
+++ b/chromium/media/video/h265_parser.cc
@@ -152,6 +152,15 @@ void FillInDefaultScalingListData(H265ScalingListData* scaling_list_data,
} \
} while (0)
+#define EQ_OR_RETURN(shdr1, shdr2, field) \
+ do { \
+ if ((shdr1->field) != (shdr2->field)) { \
+ DVLOG(1) << "Error in stream, slice header fields must match for: " \
+ << #field; \
+ return kInvalidStream; \
+ } \
+ } while (0)
+
H265ScalingListData::H265ScalingListData() {
memset(reinterpret_cast<void*>(this), 0, sizeof(*this));
}
@@ -1094,6 +1103,24 @@ H265Parser::Result H265Parser::ParseSliceHeader(const H265NALU& nalu,
SKIP_BITS_OR_RETURN(slice_segment_header_extension_length * 8);
}
+ if (prior_shdr) {
+ // Validate the fields that must match between slice headers for the same
+ // picture.
+ EQ_OR_RETURN(shdr, prior_shdr, slice_pic_parameter_set_id);
+ EQ_OR_RETURN(shdr, prior_shdr, pic_output_flag);
+ EQ_OR_RETURN(shdr, prior_shdr, no_output_of_prior_pics_flag);
+ EQ_OR_RETURN(shdr, prior_shdr, slice_pic_order_cnt_lsb);
+ EQ_OR_RETURN(shdr, prior_shdr, short_term_ref_pic_set_sps_flag);
+
+ // All the other fields we need to compare are contiguous, so compare them
+ // as one memory range.
+ size_t block_start = offsetof(H265SliceHeader, short_term_ref_pic_set_idx);
+ size_t block_end = offsetof(H265SliceHeader, slice_sao_luma_flag);
+ TRUE_OR_RETURN(!memcmp(reinterpret_cast<uint8_t*>(shdr) + block_start,
+ reinterpret_cast<uint8_t*>(prior_shdr) + block_start,
+ block_end - block_start));
+ }
+
// byte_alignment()
SKIP_BITS_OR_RETURN(1); // alignment bit
int bits_left_to_align = br_.NumBitsLeft() % 8;
diff --git a/chromium/media/video/h265_parser.h b/chromium/media/video/h265_parser.h
index 6e2f28168d6..44ce5c87da6 100644
--- a/chromium/media/video/h265_parser.h
+++ b/chromium/media/video/h265_parser.h
@@ -287,12 +287,16 @@ struct MEDIA_EXPORT H265SliceHeader {
int slice_pic_parameter_set_id;
bool dependent_slice_segment_flag;
int slice_segment_address;
+ // Do not move any of the above fields below or vice-versa, everything after
+ // this is copied as a block.
int slice_type;
bool pic_output_flag;
int colour_plane_id;
int slice_pic_order_cnt_lsb;
bool short_term_ref_pic_set_sps_flag;
H265StRefPicSet st_ref_pic_set;
+ // Do not change the order of the following fields up through
+ // slice_sao_luma_flag. They are compared as a block.
int short_term_ref_pic_set_idx;
int num_long_term_sps;
int num_long_term_pics;
@@ -344,6 +348,10 @@ struct MEDIA_EXPORT H265SliceHeader {
class MEDIA_EXPORT H265Parser : public H265NaluParser {
public:
H265Parser();
+
+ H265Parser(const H265Parser&) = delete;
+ H265Parser& operator=(const H265Parser&) = delete;
+
~H265Parser() override;
// NALU-specific parsing functions.
@@ -410,8 +418,6 @@ class MEDIA_EXPORT H265Parser : public H265NaluParser {
// PPSes and SPSes stored for future reference.
base::flat_map<int, std::unique_ptr<H265SPS>> active_sps_;
base::flat_map<int, std::unique_ptr<H265PPS>> active_pps_;
-
- DISALLOW_COPY_AND_ASSIGN(H265Parser);
};
} // namespace media
diff --git a/chromium/media/video/h265_parser_unittest.cc b/chromium/media/video/h265_parser_unittest.cc
index c81bdb020ae..4838515ba8e 100644
--- a/chromium/media/video/h265_parser_unittest.cc
+++ b/chromium/media/video/h265_parser_unittest.cc
@@ -78,7 +78,6 @@ TEST_F(H265ParserTest, RawHevcStreamFileParsing) {
DVLOG(4) << "Found NALU " << nalu.nal_unit_type;
H265SliceHeader shdr;
- H265SliceHeader prior_shdr;
switch (nalu.nal_unit_type) {
case H265NALU::SPS_NUT:
int sps_id;
@@ -106,8 +105,7 @@ TEST_F(H265ParserTest, RawHevcStreamFileParsing) {
case H265NALU::IDR_W_RADL:
case H265NALU::IDR_N_LP:
case H265NALU::CRA_NUT: // fallthrough
- res = parser_.ParseSliceHeader(nalu, &shdr, &prior_shdr);
- prior_shdr = shdr;
+ res = parser_.ParseSliceHeader(nalu, &shdr, nullptr);
break;
default:
break;
diff --git a/chromium/media/video/mock_gpu_video_accelerator_factories.h b/chromium/media/video/mock_gpu_video_accelerator_factories.h
index 7aac0e60a43..cd7016206b3 100644
--- a/chromium/media/video/mock_gpu_video_accelerator_factories.h
+++ b/chromium/media/video/mock_gpu_video_accelerator_factories.h
@@ -14,6 +14,7 @@
#include "base/macros.h"
#include "base/memory/ref_counted.h"
#include "base/sequenced_task_runner.h"
+#include "gpu/ipc/common/gpu_channel.mojom.h"
#include "media/video/gpu_video_accelerator_factories.h"
#include "media/video/video_encode_accelerator.h"
#include "services/viz/public/cpp/gpu/context_provider_command_buffer.h"
@@ -24,11 +25,18 @@ namespace media {
class MockGpuVideoAcceleratorFactories : public GpuVideoAcceleratorFactories {
public:
explicit MockGpuVideoAcceleratorFactories(gpu::SharedImageInterface* sii);
+
+ MockGpuVideoAcceleratorFactories(const MockGpuVideoAcceleratorFactories&) =
+ delete;
+ MockGpuVideoAcceleratorFactories& operator=(
+ const MockGpuVideoAcceleratorFactories&) = delete;
+
~MockGpuVideoAcceleratorFactories() override;
bool IsGpuVideoAcceleratorEnabled() override;
- MOCK_METHOD0(GetChannelToken, base::UnguessableToken());
+ MOCK_METHOD1(GetChannelToken,
+ void(gpu::mojom::GpuChannel::GetChannelTokenCallback));
MOCK_METHOD0(GetCommandBufferRouteId, int32_t());
MOCK_METHOD1(IsDecoderConfigSupported, Supported(const VideoDecoderConfig&));
@@ -96,8 +104,6 @@ class MockGpuVideoAcceleratorFactories : public GpuVideoAcceleratorFactories {
}
private:
- DISALLOW_COPY_AND_ASSIGN(MockGpuVideoAcceleratorFactories);
-
base::Lock lock_;
OutputFormat video_frame_output_format_ = OutputFormat::I420;
diff --git a/chromium/media/video/mock_video_decode_accelerator.h b/chromium/media/video/mock_video_decode_accelerator.h
index bdb89164994..fde8214b450 100644
--- a/chromium/media/video/mock_video_decode_accelerator.h
+++ b/chromium/media/video/mock_video_decode_accelerator.h
@@ -24,6 +24,11 @@ namespace media {
class MockVideoDecodeAccelerator : public VideoDecodeAccelerator {
public:
MockVideoDecodeAccelerator();
+
+ MockVideoDecodeAccelerator(const MockVideoDecodeAccelerator&) = delete;
+ MockVideoDecodeAccelerator& operator=(const MockVideoDecodeAccelerator&) =
+ delete;
+
~MockVideoDecodeAccelerator() override;
MOCK_METHOD2(Initialize, bool(const Config& config, Client* client));
@@ -42,7 +47,6 @@ class MockVideoDecodeAccelerator : public VideoDecodeAccelerator {
private:
void DeleteThis();
- DISALLOW_COPY_AND_ASSIGN(MockVideoDecodeAccelerator);
};
} // namespace media
diff --git a/chromium/media/video/mock_video_encode_accelerator.h b/chromium/media/video/mock_video_encode_accelerator.h
index 259e1277879..8791b1950f1 100644
--- a/chromium/media/video/mock_video_encode_accelerator.h
+++ b/chromium/media/video/mock_video_encode_accelerator.h
@@ -15,6 +15,11 @@ namespace media {
class MockVideoEncodeAccelerator : public VideoEncodeAccelerator {
public:
MockVideoEncodeAccelerator();
+
+ MockVideoEncodeAccelerator(const MockVideoEncodeAccelerator&) = delete;
+ MockVideoEncodeAccelerator& operator=(const MockVideoEncodeAccelerator&) =
+ delete;
+
~MockVideoEncodeAccelerator() override;
MOCK_METHOD0(GetSupportedProfiles,
@@ -31,7 +36,6 @@ class MockVideoEncodeAccelerator : public VideoEncodeAccelerator {
private:
void DeleteThis();
- DISALLOW_COPY_AND_ASSIGN(MockVideoEncodeAccelerator);
};
} // namespace media
diff --git a/chromium/media/video/openh264_video_encoder.cc b/chromium/media/video/openh264_video_encoder.cc
index 8fd15fc6511..a20ec00e28c 100644
--- a/chromium/media/video/openh264_video_encoder.cc
+++ b/chromium/media/video/openh264_video_encoder.cc
@@ -13,6 +13,7 @@
#include "base/time/time.h"
#include "base/trace_event/trace_event.h"
#include "media/base/bind_to_current_loop.h"
+#include "media/base/svc_scalability_mode.h"
#include "media/base/video_frame.h"
#include "media/base/video_util.h"
@@ -45,7 +46,23 @@ Status SetUpOpenH264Params(const VideoEncoder::Options& options,
params->iRCMode = RC_OFF_MODE;
}
- params->iTemporalLayerNum = options.temporal_layers;
+ int num_temporal_layers = 1;
+ if (options.scalability_mode) {
+ switch (options.scalability_mode.value()) {
+ case SVCScalabilityMode::kL1T2:
+ num_temporal_layers = 2;
+ break;
+ case SVCScalabilityMode::kL1T3:
+ num_temporal_layers = 3;
+ break;
+ default:
+ NOTREACHED() << "Unsupported SVC: "
+ << GetScalabilityModeName(
+ options.scalability_mode.value());
+ }
+ }
+
+ params->iTemporalLayerNum = num_temporal_layers;
params->iSpatialLayerNum = 1;
params->sSpatialLayers[0].fFrameRate = params->fMaxFrameRate;
params->sSpatialLayers[0].iMaxSpatialBitrate = params->iTargetBitrate;
diff --git a/chromium/media/video/renderable_gpu_memory_buffer_video_frame_pool.cc b/chromium/media/video/renderable_gpu_memory_buffer_video_frame_pool.cc
index 8695f57462b..3a866e3f1ec 100644
--- a/chromium/media/video/renderable_gpu_memory_buffer_video_frame_pool.cc
+++ b/chromium/media/video/renderable_gpu_memory_buffer_video_frame_pool.cc
@@ -14,6 +14,7 @@
#include "base/bind_post_task.h"
#include "base/logging.h"
#include "base/threading/sequenced_task_runner_handle.h"
+#include "build/build_config.h"
#include "gpu/GLES2/gl2extchromium.h"
#include "gpu/command_buffer/client/gpu_memory_buffer_manager.h"
#include "gpu/command_buffer/client/shared_image_interface.h"
@@ -145,10 +146,17 @@ FrameResources::~FrameResources() {
bool FrameResources::Initialize() {
auto* context = pool_->GetContext();
+ constexpr gfx::BufferUsage kBufferUsage =
+#if defined(OS_MAC)
+ gfx::BufferUsage::SCANOUT_VEA_CPU_READ
+#else
+ gfx::BufferUsage::SCANOUT_CPU_READ_WRITE
+#endif
+ ;
+
// Create the GpuMemoryBuffer.
gpu_memory_buffer_ = context->CreateGpuMemoryBuffer(
- coded_size_, gfx::BufferFormat::YUV_420_BIPLANAR,
- gfx::BufferUsage::SCANOUT_VEA_CPU_READ);
+ coded_size_, gfx::BufferFormat::YUV_420_BIPLANAR, kBufferUsage);
if (!gpu_memory_buffer_) {
DLOG(ERROR) << "Failed to allocate GpuMemoryBuffer for frame.";
return false;
@@ -158,17 +166,24 @@ bool FrameResources::Initialize() {
constexpr size_t kNumPlanes = 2;
constexpr gfx::BufferPlane kPlanes[kNumPlanes] = {gfx::BufferPlane::Y,
gfx::BufferPlane::UV};
- constexpr uint32_t kUsage =
+ constexpr uint32_t kSharedImageUsage =
+#if defined(OS_MAC)
+ gpu::SHARED_IMAGE_USAGE_MACOS_VIDEO_TOOLBOX |
+#endif
gpu::SHARED_IMAGE_USAGE_GLES2 | gpu::SHARED_IMAGE_USAGE_RASTER |
- gpu::SHARED_IMAGE_USAGE_DISPLAY | gpu::SHARED_IMAGE_USAGE_SCANOUT |
- gpu::SHARED_IMAGE_USAGE_MACOS_VIDEO_TOOLBOX;
+ gpu::SHARED_IMAGE_USAGE_DISPLAY | gpu::SHARED_IMAGE_USAGE_SCANOUT;
+
for (size_t plane = 0; plane < kNumPlanes; ++plane) {
context->CreateSharedImage(
gpu_memory_buffer_.get(), kPlanes[plane], color_space_,
- kTopLeft_GrSurfaceOrigin, kPremul_SkAlphaType, kUsage,
+ kTopLeft_GrSurfaceOrigin, kPremul_SkAlphaType, kSharedImageUsage,
mailbox_holders_[plane].mailbox, mailbox_holders_[plane].sync_token);
// TODO(https://crbug.com/1191956): This should be parameterized.
+#if defined(OS_MAC)
mailbox_holders_[plane].texture_target = GL_TEXTURE_RECTANGLE_ARB;
+#else
+ mailbox_holders_[plane].texture_target = GL_TEXTURE_2D;
+#endif
}
return true;
}
@@ -189,10 +204,17 @@ FrameResources::CreateVideoFrameAndTakeGpuMemoryBuffer() {
return nullptr;
video_frame->set_color_space(color_space_);
+
// TODO(https://crbug.com/1191956): This should depend on the platform and
// format.
video_frame->metadata().allow_overlay = true;
- video_frame->metadata().read_lock_fences_enabled = true;
+
+ // Only native (non shared memory) GMBs require waiting on GPU fences.
+ const bool has_native_gmb =
+ video_frame->HasGpuMemoryBuffer() &&
+ video_frame->GetGpuMemoryBuffer()->GetType() != gfx::SHARED_MEMORY_BUFFER;
+ video_frame->metadata().read_lock_fences_enabled = has_native_gmb;
+
return video_frame;
}
diff --git a/chromium/media/video/software_video_encoder_test.cc b/chromium/media/video/software_video_encoder_test.cc
index 88144f0ecbd..6684399ad6b 100644
--- a/chromium/media/video/software_video_encoder_test.cc
+++ b/chromium/media/video/software_video_encoder_test.cc
@@ -45,7 +45,7 @@ struct SwVideoTestParams {
VideoCodec codec;
VideoCodecProfile profile;
VideoPixelFormat pixel_format;
- int temporal_layers = 1;
+ absl::optional<SVCScalabilityMode> scalability_mode;
};
class SoftwareVideoEncoderTest
@@ -77,11 +77,11 @@ class SoftwareVideoEncoderTest
VideoColorSpace::JPEG(), VideoTransformation(), size, visible_rect,
size, extra_data, EncryptionScheme::kUnencrypted);
- if (codec_ == kCodecH264 || codec_ == kCodecVP8) {
+ if (codec_ == VideoCodec::kH264 || codec_ == VideoCodec::kVP8) {
#if BUILDFLAG(ENABLE_FFMPEG_VIDEO_DECODERS)
decoder_ = std::make_unique<FFmpegVideoDecoder>(&media_log_);
#endif
- } else if (codec_ == kCodecVP9) {
+ } else if (codec_ == VideoCodec::kVP9) {
#if BUILDFLAG(ENABLE_LIBVPX)
decoder_ = std::make_unique<VpxVideoDecoder>();
#endif
@@ -151,14 +151,14 @@ class SoftwareVideoEncoderTest
std::unique_ptr<VideoEncoder> CreateEncoder(VideoCodec codec) {
switch (codec) {
- case media::kCodecVP8:
- case media::kCodecVP9:
+ case media::VideoCodec::kVP8:
+ case media::VideoCodec::kVP9:
#if BUILDFLAG(ENABLE_LIBVPX)
return std::make_unique<media::VpxVideoEncoder>();
#else
return nullptr;
#endif
- case media::kCodecH264:
+ case media::VideoCodec::kH264:
#if BUILDFLAG(ENABLE_OPENH264)
return std::make_unique<OpenH264VideoEncoder>();
#else
@@ -271,7 +271,7 @@ TEST_P(SoftwareVideoEncoderTest, ForceAllKeyFrames) {
int frames = 10;
VideoEncoder::Options options;
options.frame_size = gfx::Size(640, 480);
- auto frame_duration = base::TimeDelta::FromSecondsD(1.0 / 60);
+ auto frame_duration = base::Seconds(1.0 / 60);
VideoEncoder::OutputCB output_cb = base::BindLambdaForTesting(
[&](VideoEncoderOutput output,
@@ -299,7 +299,7 @@ TEST_P(SoftwareVideoEncoderTest, ResizeFrames) {
int outputs_count = 0;
VideoEncoder::Options options;
options.frame_size = gfx::Size(640, 480);
- auto sec = base::TimeDelta::FromSeconds(1);
+ auto sec = base::Seconds(1);
VideoEncoder::OutputCB output_cb = base::BindLambdaForTesting(
[&](VideoEncoderOutput output,
@@ -333,8 +333,7 @@ TEST_P(SoftwareVideoEncoderTest, OutputCountEqualsFrameCount) {
options.framerate.value() * 10; // total duration 20s
int outputs_count = 0;
- auto frame_duration =
- base::TimeDelta::FromSecondsD(1.0 / options.framerate.value());
+ auto frame_duration = base::Seconds(1.0 / options.framerate.value());
VideoEncoder::OutputCB output_cb = base::BindLambdaForTesting(
[&](VideoEncoderOutput output,
@@ -369,7 +368,7 @@ TEST_P(SoftwareVideoEncoderTest, EncodeAndDecode) {
options.frame_size = gfx::Size(320, 200);
options.bitrate = Bitrate::ConstantBitrate(1e6); // 1Mbps
options.framerate = 25;
- if (codec_ == kCodecH264)
+ if (codec_ == VideoCodec::kH264)
options.avc.produce_annexb = true;
options.keyframe_interval = options.framerate.value() * 3; // every 3s
std::vector<scoped_refptr<VideoFrame>> frames_to_encode;
@@ -377,8 +376,7 @@ TEST_P(SoftwareVideoEncoderTest, EncodeAndDecode) {
int total_frames_count =
options.framerate.value() * 10; // total duration 10s
- auto frame_duration =
- base::TimeDelta::FromSecondsD(1.0 / options.framerate.value());
+ auto frame_duration = base::Seconds(1.0 / options.framerate.value());
VideoEncoder::OutputCB encoder_output_cb = base::BindLambdaForTesting(
[&, this](VideoEncoderOutput output,
@@ -433,8 +431,8 @@ TEST_P(SVCVideoEncoderTest, EncodeClipTemporalSvc) {
options.frame_size = gfx::Size(320, 200);
options.bitrate = Bitrate::ConstantBitrate(1e6); // 1Mbps
options.framerate = 25;
- options.temporal_layers = GetParam().temporal_layers;
- if (codec_ == kCodecH264)
+ options.scalability_mode = GetParam().scalability_mode;
+ if (codec_ == VideoCodec::kH264)
options.avc.produce_annexb = true;
std::vector<scoped_refptr<VideoFrame>> frames_to_encode;
@@ -442,8 +440,7 @@ TEST_P(SVCVideoEncoderTest, EncodeClipTemporalSvc) {
size_t total_frames_count = 80;
// Encoder all frames with 3 temporal layers and put all outputs in |chunks|
- auto frame_duration =
- base::TimeDelta::FromSecondsD(1.0 / options.framerate.value());
+ auto frame_duration = base::Seconds(1.0 / options.framerate.value());
VideoEncoder::OutputCB encoder_output_cb = base::BindLambdaForTesting(
[&](VideoEncoderOutput output,
@@ -470,12 +467,27 @@ TEST_P(SVCVideoEncoderTest, EncodeClipTemporalSvc) {
RunUntilIdle();
EXPECT_EQ(chunks.size(), total_frames_count);
+ int num_temporal_layers = 1;
+ if (options.scalability_mode) {
+ switch (options.scalability_mode.value()) {
+ case SVCScalabilityMode::kL1T2:
+ num_temporal_layers = 2;
+ break;
+ case SVCScalabilityMode::kL1T3:
+ num_temporal_layers = 3;
+ break;
+ default:
+ NOTREACHED() << "Unsupported SVC: "
+ << GetScalabilityModeName(
+ options.scalability_mode.value());
+ }
+ }
// Try decoding saved outputs dropping varying number of layers
// and check that decoded frames indeed match the pattern:
// Layer Index 0: |0| | | |4| | | |8| | | |12|
// Layer Index 1: | | |2| | | |6| | | |10| | |
// Layer Index 2: | |1| |3| |5| |7| |9| |11| |
- for (int max_layer = 0; max_layer < options.temporal_layers; max_layer++) {
+ for (int max_layer = 0; max_layer < num_temporal_layers; max_layer++) {
std::vector<scoped_refptr<VideoFrame>> decoded_frames;
VideoDecoder::OutputCB decoder_output_cb =
base::BindLambdaForTesting([&](scoped_refptr<VideoFrame> frame) {
@@ -493,8 +505,7 @@ TEST_P(SVCVideoEncoderTest, EncodeClipTemporalSvc) {
}
DecodeAndWaitForStatus(DecoderBuffer::CreateEOSBuffer());
- int rate_decimator =
- (1 << (options.temporal_layers - 1)) / (1 << max_layer);
+ int rate_decimator = (1 << (num_temporal_layers - 1)) / (1 << max_layer);
ASSERT_EQ(decoded_frames.size(),
size_t{total_frames_count / rate_decimator});
for (auto i = 0u; i < decoded_frames.size(); i++) {
@@ -510,7 +521,7 @@ TEST_P(H264VideoEncoderTest, AvcExtraData) {
int outputs_count = 0;
VideoEncoder::Options options;
options.frame_size = gfx::Size(640, 480);
- auto sec = base::TimeDelta::FromSeconds(1);
+ auto sec = base::Seconds(1);
VideoEncoder::OutputCB output_cb = base::BindLambdaForTesting(
[&](VideoEncoderOutput output,
@@ -555,7 +566,7 @@ TEST_P(H264VideoEncoderTest, AnnexB) {
VideoEncoder::Options options;
options.frame_size = gfx::Size(640, 480);
options.avc.produce_annexb = true;
- auto sec = base::TimeDelta::FromSeconds(1);
+ auto sec = base::Seconds(1);
VideoEncoder::OutputCB output_cb = base::BindLambdaForTesting(
[&](VideoEncoderOutput output,
@@ -607,8 +618,7 @@ TEST_P(H264VideoEncoderTest, EncodeAndDecodeWithConfig) {
std::vector<scoped_refptr<VideoFrame>> decoded_frames;
std::vector<ChunkWithConfig> chunks;
size_t total_frames_count = 30;
- auto frame_duration =
- base::TimeDelta::FromSecondsD(1.0 / options.framerate.value());
+ auto frame_duration = base::Seconds(1.0 / options.framerate.value());
VideoEncoder::OutputCB encoder_output_cb = base::BindLambdaForTesting(
[&](VideoEncoderOutput output,
@@ -658,10 +668,13 @@ TEST_P(H264VideoEncoderTest, EncodeAndDecodeWithConfig) {
std::string PrintTestParams(
const testing::TestParamInfo<SwVideoTestParams>& info) {
- auto result = GetCodecName(info.param.codec) + "__" +
- GetProfileName(info.param.profile) + "__" +
- VideoPixelFormatToString(info.param.pixel_format) + "__" +
- base::NumberToString(info.param.temporal_layers);
+ auto result =
+ GetCodecName(info.param.codec) + "__" +
+ GetProfileName(info.param.profile) + "__" +
+ VideoPixelFormatToString(info.param.pixel_format) + "__" +
+ (info.param.scalability_mode
+ ? GetScalabilityModeName(info.param.scalability_mode.value())
+ : "");
// GTest doesn't like spaces, but profile names have spaces, so we need
// to replace them with underscores.
@@ -674,8 +687,8 @@ std::string PrintTestParams(
#if BUILDFLAG(ENABLE_OPENH264)
SwVideoTestParams kH264Params[] = {
- {kCodecH264, H264PROFILE_BASELINE, PIXEL_FORMAT_I420},
- {kCodecH264, H264PROFILE_BASELINE, PIXEL_FORMAT_XRGB}};
+ {VideoCodec::kH264, H264PROFILE_BASELINE, PIXEL_FORMAT_I420},
+ {VideoCodec::kH264, H264PROFILE_BASELINE, PIXEL_FORMAT_XRGB}};
INSTANTIATE_TEST_SUITE_P(H264Specific,
H264VideoEncoderTest,
@@ -688,9 +701,11 @@ INSTANTIATE_TEST_SUITE_P(H264Generic,
PrintTestParams);
SwVideoTestParams kH264SVCParams[] = {
- {kCodecH264, H264PROFILE_BASELINE, PIXEL_FORMAT_I420, 1},
- {kCodecH264, H264PROFILE_BASELINE, PIXEL_FORMAT_I420, 2},
- {kCodecH264, H264PROFILE_BASELINE, PIXEL_FORMAT_I420, 3}};
+ {VideoCodec::kH264, H264PROFILE_BASELINE, PIXEL_FORMAT_I420, absl::nullopt},
+ {VideoCodec::kH264, H264PROFILE_BASELINE, PIXEL_FORMAT_I420,
+ SVCScalabilityMode::kL1T2},
+ {VideoCodec::kH264, H264PROFILE_BASELINE, PIXEL_FORMAT_I420,
+ SVCScalabilityMode::kL1T3}};
INSTANTIATE_TEST_SUITE_P(H264TemporalSvc,
SVCVideoEncoderTest,
@@ -700,10 +715,10 @@ INSTANTIATE_TEST_SUITE_P(H264TemporalSvc,
#if BUILDFLAG(ENABLE_LIBVPX)
SwVideoTestParams kVpxParams[] = {
- {kCodecVP9, VP9PROFILE_PROFILE0, PIXEL_FORMAT_I420},
- {kCodecVP9, VP9PROFILE_PROFILE0, PIXEL_FORMAT_XRGB},
- {kCodecVP8, VP8PROFILE_ANY, PIXEL_FORMAT_I420},
- {kCodecVP8, VP8PROFILE_ANY, PIXEL_FORMAT_XRGB}};
+ {VideoCodec::kVP9, VP9PROFILE_PROFILE0, PIXEL_FORMAT_I420},
+ {VideoCodec::kVP9, VP9PROFILE_PROFILE0, PIXEL_FORMAT_XRGB},
+ {VideoCodec::kVP8, VP8PROFILE_ANY, PIXEL_FORMAT_I420},
+ {VideoCodec::kVP8, VP8PROFILE_ANY, PIXEL_FORMAT_XRGB}};
INSTANTIATE_TEST_SUITE_P(VpxGeneric,
SoftwareVideoEncoderTest,
@@ -711,12 +726,16 @@ INSTANTIATE_TEST_SUITE_P(VpxGeneric,
PrintTestParams);
SwVideoTestParams kVpxSVCParams[] = {
- {kCodecVP9, VP9PROFILE_PROFILE0, PIXEL_FORMAT_I420, 1},
- {kCodecVP9, VP9PROFILE_PROFILE0, PIXEL_FORMAT_I420, 2},
- {kCodecVP9, VP9PROFILE_PROFILE0, PIXEL_FORMAT_I420, 3},
- {kCodecVP8, VP8PROFILE_ANY, PIXEL_FORMAT_I420, 1},
- {kCodecVP8, VP8PROFILE_ANY, PIXEL_FORMAT_I420, 2},
- {kCodecVP8, VP8PROFILE_ANY, PIXEL_FORMAT_I420, 3}};
+ {VideoCodec::kVP9, VP9PROFILE_PROFILE0, PIXEL_FORMAT_I420, absl::nullopt},
+ {VideoCodec::kVP9, VP9PROFILE_PROFILE0, PIXEL_FORMAT_I420,
+ SVCScalabilityMode::kL1T2},
+ {VideoCodec::kVP9, VP9PROFILE_PROFILE0, PIXEL_FORMAT_I420,
+ SVCScalabilityMode::kL1T3},
+ {VideoCodec::kVP8, VP8PROFILE_ANY, PIXEL_FORMAT_I420, absl::nullopt},
+ {VideoCodec::kVP8, VP8PROFILE_ANY, PIXEL_FORMAT_I420,
+ SVCScalabilityMode::kL1T2},
+ {VideoCodec::kVP8, VP8PROFILE_ANY, PIXEL_FORMAT_I420,
+ SVCScalabilityMode::kL1T3}};
INSTANTIATE_TEST_SUITE_P(VpxTemporalSvc,
SVCVideoEncoderTest,
diff --git a/chromium/media/video/supported_video_decoder_config_unittest.cc b/chromium/media/video/supported_video_decoder_config_unittest.cc
index dd3dabdbe3d..14f0ad9146f 100644
--- a/chromium/media/video/supported_video_decoder_config_unittest.cc
+++ b/chromium/media/video/supported_video_decoder_config_unittest.cc
@@ -13,7 +13,7 @@ class SupportedVideoDecoderConfigTest : public ::testing::Test {
public:
SupportedVideoDecoderConfigTest()
: decoder_config_(
- TestVideoConfig::NormalCodecProfile(kCodecH264,
+ TestVideoConfig::NormalCodecProfile(VideoCodec::kH264,
H264PROFILE_EXTENDED)) {
supported_config_.profile_min = H264PROFILE_MIN;
supported_config_.profile_max = H264PROFILE_MAX;
diff --git a/chromium/media/video/video_encode_accelerator.cc b/chromium/media/video/video_encode_accelerator.cc
index 7480f621ad4..1d1911adac7 100644
--- a/chromium/media/video/video_encode_accelerator.cc
+++ b/chromium/media/video/video_encode_accelerator.cc
@@ -13,13 +13,6 @@
namespace media {
-H264Metadata::H264Metadata() = default;
-H264Metadata::~H264Metadata() = default;
-H264Metadata::H264Metadata(const H264Metadata&) = default;
-
-Vp8Metadata::Vp8Metadata()
- : non_reference(false), temporal_idx(0), layer_sync(false) {}
-
Vp9Metadata::Vp9Metadata() = default;
Vp9Metadata::~Vp9Metadata() = default;
Vp9Metadata::Vp9Metadata(const Vp9Metadata&) = default;
@@ -91,7 +84,7 @@ std::string VideoEncodeAccelerator::Config::AsHumanReadableString() const {
if (gop_length)
str += base::StringPrintf(", gop_length: %u", gop_length.value());
- if (VideoCodecProfileToVideoCodec(output_profile) == kCodecH264) {
+ if (VideoCodecProfileToVideoCodec(output_profile) == VideoCodec::kH264) {
if (h264_output_level) {
str += base::StringPrintf(", h264_output_level: %u",
h264_output_level.value());
@@ -148,19 +141,22 @@ void VideoEncodeAccelerator::Client::NotifyEncoderInfoChange(
VideoEncodeAccelerator::~VideoEncodeAccelerator() = default;
VideoEncodeAccelerator::SupportedProfile::SupportedProfile()
- : profile(media::VIDEO_CODEC_PROFILE_UNKNOWN),
- max_framerate_numerator(0),
- max_framerate_denominator(0) {}
+ : profile(media::VIDEO_CODEC_PROFILE_UNKNOWN) {}
VideoEncodeAccelerator::SupportedProfile::SupportedProfile(
VideoCodecProfile profile,
const gfx::Size& max_resolution,
uint32_t max_framerate_numerator,
- uint32_t max_framerate_denominator)
+ uint32_t max_framerate_denominator,
+ const std::vector<SVCScalabilityMode>& scalability_modes)
: profile(profile),
max_resolution(max_resolution),
max_framerate_numerator(max_framerate_numerator),
- max_framerate_denominator(max_framerate_denominator) {}
+ max_framerate_denominator(max_framerate_denominator),
+ scalability_modes(scalability_modes) {}
+
+VideoEncodeAccelerator::SupportedProfile::SupportedProfile(
+ const SupportedProfile& other) = default;
VideoEncodeAccelerator::SupportedProfile::~SupportedProfile() = default;
@@ -191,6 +187,15 @@ void VideoEncodeAccelerator::RequestEncodingParametersChange(
Bitrate::ConstantBitrate(bitrate_allocation.GetSumBps()), framerate);
}
+bool operator==(const VideoEncodeAccelerator::SupportedProfile& l,
+ const VideoEncodeAccelerator::SupportedProfile& r) {
+ return l.profile == r.profile && l.min_resolution == r.min_resolution &&
+ l.max_resolution == r.max_resolution &&
+ l.max_framerate_numerator == r.max_framerate_numerator &&
+ l.max_framerate_denominator == r.max_framerate_denominator &&
+ l.scalability_modes == r.scalability_modes;
+}
+
bool operator==(const H264Metadata& l, const H264Metadata& r) {
return l.temporal_idx == r.temporal_idx && l.layer_sync == r.layer_sync;
}
diff --git a/chromium/media/video/video_encode_accelerator.h b/chromium/media/video/video_encode_accelerator.h
index d275b40c48d..a6882b3c2d5 100644
--- a/chromium/media/video/video_encode_accelerator.h
+++ b/chromium/media/video/video_encode_accelerator.h
@@ -18,6 +18,7 @@
#include "media/base/bitrate.h"
#include "media/base/bitstream_buffer.h"
#include "media/base/media_export.h"
+#include "media/base/svc_scalability_mode.h"
#include "media/base/video_bitrate_allocation.h"
#include "media/base/video_codecs.h"
#include "media/base/video_frame.h"
@@ -36,10 +37,6 @@ class VideoFrame;
// reference any reference buffer containing a frame with
// temporal_idx > 0.
struct MEDIA_EXPORT H264Metadata final {
- H264Metadata();
- ~H264Metadata();
- H264Metadata(const H264Metadata&);
-
uint8_t temporal_idx = 0;
bool layer_sync = false;
};
@@ -53,10 +50,9 @@ struct MEDIA_EXPORT H264Metadata final {
// reference any reference buffer containing a frame with
// temporal_idx > 0.
struct MEDIA_EXPORT Vp8Metadata final {
- Vp8Metadata();
- bool non_reference;
- uint8_t temporal_idx;
- bool layer_sync;
+ bool non_reference = false;
+ uint8_t temporal_idx = 0;
+ bool layer_sync = false;
};
// Metadata for a VP9 bitstream buffer, this struct resembles
@@ -123,18 +119,21 @@ class MEDIA_EXPORT VideoEncodeAccelerator {
// Specification of an encoding profile supported by an encoder.
struct MEDIA_EXPORT SupportedProfile {
SupportedProfile();
- SupportedProfile(VideoCodecProfile profile,
- const gfx::Size& max_resolution,
- uint32_t max_framerate_numerator = 0u,
- uint32_t max_framerate_denominator = 1u);
- SupportedProfile(const SupportedProfile& other) = default;
+ SupportedProfile(
+ VideoCodecProfile profile,
+ const gfx::Size& max_resolution,
+ uint32_t max_framerate_numerator = 0u,
+ uint32_t max_framerate_denominator = 1u,
+ const std::vector<SVCScalabilityMode>& scalability_modes = {});
+ SupportedProfile(const SupportedProfile& other);
SupportedProfile& operator=(const SupportedProfile& other) = default;
~SupportedProfile();
VideoCodecProfile profile;
gfx::Size min_resolution;
gfx::Size max_resolution;
- uint32_t max_framerate_numerator;
- uint32_t max_framerate_denominator;
+ uint32_t max_framerate_numerator{0};
+ uint32_t max_framerate_denominator{0};
+ std::vector<SVCScalabilityMode> scalability_modes;
};
using SupportedProfiles = std::vector<SupportedProfile>;
using FlushCallback = base::OnceCallback<void(bool)>;
@@ -399,6 +398,8 @@ class MEDIA_EXPORT VideoEncodeAccelerator {
virtual ~VideoEncodeAccelerator();
};
+MEDIA_EXPORT bool operator==(const VideoEncodeAccelerator::SupportedProfile& l,
+ const VideoEncodeAccelerator::SupportedProfile& r);
MEDIA_EXPORT bool operator==(const Vp8Metadata& l, const Vp8Metadata& r);
MEDIA_EXPORT bool operator==(const Vp9Metadata& l, const Vp9Metadata& r);
MEDIA_EXPORT bool operator==(const BitstreamBufferMetadata& l,
diff --git a/chromium/media/video/video_encode_accelerator_adapter.cc b/chromium/media/video/video_encode_accelerator_adapter.cc
index 3e66b78a5e7..e567569e056 100644
--- a/chromium/media/video/video_encode_accelerator_adapter.cc
+++ b/chromium/media/video/video_encode_accelerator_adapter.cc
@@ -17,6 +17,7 @@
#include "base/time/time.h"
#include "build/build_config.h"
#include "media/base/bind_to_current_loop.h"
+#include "media/base/svc_scalability_mode.h"
#include "media/base/video_frame.h"
#include "media/base/video_util.h"
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
@@ -50,14 +51,28 @@ VideoEncodeAccelerator::Config SetUpVeaConfig(
VideoEncodeAccelerator::Config(format, opts.frame_size, profile, bitrate,
initial_framerate, opts.keyframe_interval);
- if (opts.temporal_layers > 1) {
+ size_t num_temporal_layers = 1;
+ if (opts.scalability_mode) {
+ switch (opts.scalability_mode.value()) {
+ case SVCScalabilityMode::kL1T2:
+ num_temporal_layers = 2;
+ break;
+ case SVCScalabilityMode::kL1T3:
+ num_temporal_layers = 3;
+ break;
+ default:
+ NOTREACHED() << "Unsupported SVC: "
+ << GetScalabilityModeName(opts.scalability_mode.value());
+ }
+ }
+ if (num_temporal_layers > 1) {
VideoEncodeAccelerator::Config::SpatialLayer layer;
layer.width = opts.frame_size.width();
layer.height = opts.frame_size.height();
layer.bitrate_bps = config.bitrate.target();
if (initial_framerate.has_value())
layer.framerate = initial_framerate.value();
- layer.num_of_temporal_layers = opts.temporal_layers;
+ layer.num_of_temporal_layers = num_temporal_layers;
config.spatial_layers.push_back(layer);
}
@@ -645,8 +660,7 @@ VideoEncodeAcceleratorAdapter::PrepareCpuFrame(
// Keep the SharedMemoryHolder until the frame is destroyed so that the
// memory is not freed prematurely.
shared_frame->AddDestructionObserver(BindToCurrentLoop(base::BindOnce(
- base::DoNothing::Once<
- std::unique_ptr<base::UnsafeSharedMemoryPool::Handle>>(),
+ [](std::unique_ptr<base::UnsafeSharedMemoryPool::Handle>) {},
std::move(handle))));
auto status =
ConvertAndScaleFrame(*mapped_src_frame, *shared_frame, resize_buf_);
diff --git a/chromium/media/video/video_encode_accelerator_adapter_test.cc b/chromium/media/video/video_encode_accelerator_adapter_test.cc
index 81add5e8301..e4e578be8b6 100644
--- a/chromium/media/video/video_encode_accelerator_adapter_test.cc
+++ b/chromium/media/video/video_encode_accelerator_adapter_test.cc
@@ -207,8 +207,8 @@ TEST_F(VideoEncodeAcceleratorAdapterTest, InitializeAfterFirstFrame) {
adapter()->Initialize(profile_, options, std::move(output_cb),
ValidatingStatusCB());
- auto frame = CreateGreenFrame(options.frame_size, pixel_format,
- base::TimeDelta::FromMilliseconds(1));
+ auto frame =
+ CreateGreenFrame(options.frame_size, pixel_format, base::Milliseconds(1));
adapter()->Encode(frame, true, ValidatingStatusCB());
RunUntilIdle();
EXPECT_EQ(outputs_count, 1);
@@ -217,15 +217,15 @@ TEST_F(VideoEncodeAcceleratorAdapterTest, InitializeAfterFirstFrame) {
TEST_F(VideoEncodeAcceleratorAdapterTest, TemporalSvc) {
VideoEncoder::Options options;
options.frame_size = gfx::Size(640, 480);
- options.temporal_layers = 3;
+ options.scalability_mode = SVCScalabilityMode::kL1T3;
int outputs_count = 0;
auto pixel_format = PIXEL_FORMAT_I420;
VideoEncoder::OutputCB output_cb = base::BindLambdaForTesting(
[&](VideoEncoderOutput output,
absl::optional<VideoEncoder::CodecDescription>) {
- if (output.timestamp == base::TimeDelta::FromMilliseconds(1))
+ if (output.timestamp == base::Milliseconds(1))
EXPECT_EQ(output.temporal_id, 1);
- else if (output.timestamp == base::TimeDelta::FromMilliseconds(2))
+ else if (output.timestamp == base::Milliseconds(2))
EXPECT_EQ(output.temporal_id, 1);
else
EXPECT_EQ(output.temporal_id, 2);
@@ -235,10 +235,10 @@ TEST_F(VideoEncodeAcceleratorAdapterTest, TemporalSvc) {
vea()->SetEncodingCallback(base::BindLambdaForTesting(
[&](BitstreamBuffer&, bool keyframe, scoped_refptr<VideoFrame> frame) {
BitstreamBufferMetadata result(1, keyframe, frame->timestamp());
- if (frame->timestamp() == base::TimeDelta::FromMilliseconds(1)) {
+ if (frame->timestamp() == base::Milliseconds(1)) {
result.h264 = H264Metadata();
result.h264->temporal_idx = 1;
- } else if (frame->timestamp() == base::TimeDelta::FromMilliseconds(2)) {
+ } else if (frame->timestamp() == base::Milliseconds(2)) {
result.vp8 = Vp8Metadata();
result.vp8->temporal_idx = 1;
} else {
@@ -250,12 +250,12 @@ TEST_F(VideoEncodeAcceleratorAdapterTest, TemporalSvc) {
adapter()->Initialize(profile_, options, std::move(output_cb),
ValidatingStatusCB());
- auto frame1 = CreateGreenFrame(options.frame_size, pixel_format,
- base::TimeDelta::FromMilliseconds(1));
- auto frame2 = CreateGreenFrame(options.frame_size, pixel_format,
- base::TimeDelta::FromMilliseconds(2));
- auto frame3 = CreateGreenFrame(options.frame_size, pixel_format,
- base::TimeDelta::FromMilliseconds(3));
+ auto frame1 =
+ CreateGreenFrame(options.frame_size, pixel_format, base::Milliseconds(1));
+ auto frame2 =
+ CreateGreenFrame(options.frame_size, pixel_format, base::Milliseconds(2));
+ auto frame3 =
+ CreateGreenFrame(options.frame_size, pixel_format, base::Milliseconds(3));
adapter()->Encode(frame1, true, ValidatingStatusCB());
RunUntilIdle();
adapter()->Encode(frame2, true, ValidatingStatusCB());
@@ -285,8 +285,8 @@ TEST_F(VideoEncodeAcceleratorAdapterTest, FlushDuringInitialize) {
adapter()->Initialize(profile_, options, std::move(output_cb),
ValidatingStatusCB());
- auto frame = CreateGreenFrame(options.frame_size, pixel_format,
- base::TimeDelta::FromMilliseconds(1));
+ auto frame =
+ CreateGreenFrame(options.frame_size, pixel_format, base::Milliseconds(1));
adapter()->Encode(frame, true, ValidatingStatusCB());
adapter()->Flush(base::BindLambdaForTesting([&](Status s) {
EXPECT_TRUE(s.is_ok());
@@ -316,8 +316,8 @@ TEST_F(VideoEncodeAcceleratorAdapterTest, InitializationError) {
adapter()->Initialize(VIDEO_CODEC_PROFILE_UNKNOWN, options,
std::move(output_cb), ValidatingStatusCB());
- auto frame = CreateGreenFrame(options.frame_size, pixel_format,
- base::TimeDelta::FromMilliseconds(1));
+ auto frame =
+ CreateGreenFrame(options.frame_size, pixel_format, base::Milliseconds(1));
adapter()->Encode(frame, true, std::move(expect_error_done_cb));
RunUntilIdle();
EXPECT_EQ(outputs_count, 0);
@@ -350,10 +350,10 @@ TEST_P(VideoEncodeAcceleratorAdapterTest, TwoFramesResize) {
adapter()->Initialize(profile_, options, std::move(output_cb),
ValidatingStatusCB());
- auto small_frame = CreateGreenFrame(small_size, pixel_format,
- base::TimeDelta::FromMilliseconds(1));
- auto large_frame = CreateGreenFrame(large_size, pixel_format,
- base::TimeDelta::FromMilliseconds(2));
+ auto small_frame =
+ CreateGreenFrame(small_size, pixel_format, base::Milliseconds(1));
+ auto large_frame =
+ CreateGreenFrame(large_size, pixel_format, base::Milliseconds(2));
adapter()->Encode(small_frame, true, ValidatingStatusCB());
adapter()->Encode(large_frame, false, ValidatingStatusCB());
RunUntilIdle();
@@ -380,10 +380,10 @@ TEST_F(VideoEncodeAcceleratorAdapterTest, AutomaticResizeSupport) {
adapter()->Initialize(profile_, options, std::move(output_cb),
ValidatingStatusCB());
- auto frame1 = CreateGreenFrame(small_size, pixel_format,
- base::TimeDelta::FromMilliseconds(1));
- auto frame2 = CreateGreenFrame(small_size, pixel_format,
- base::TimeDelta::FromMilliseconds(2));
+ auto frame1 =
+ CreateGreenFrame(small_size, pixel_format, base::Milliseconds(1));
+ auto frame2 =
+ CreateGreenFrame(small_size, pixel_format, base::Milliseconds(2));
adapter()->Encode(frame1, true, ValidatingStatusCB());
adapter()->Encode(frame2, false, ValidatingStatusCB());
RunUntilIdle();
@@ -437,8 +437,8 @@ TEST_P(VideoEncodeAcceleratorAdapterTest, RunWithAllPossibleInputConversions) {
else if (rem < 8)
format = PIXEL_FORMAT_NV12;
bool key = frame_index % 9 == 0;
- auto frame = CreateGreenFrame(
- size, format, base::TimeDelta::FromMilliseconds(frame_index));
+ auto frame =
+ CreateGreenFrame(size, format, base::Milliseconds(frame_index));
adapter()->Encode(frame, key, ValidatingStatusCB());
}
diff --git a/chromium/media/video/video_encoder_fallback_test.cc b/chromium/media/video/video_encoder_fallback_test.cc
index c2a33be7b05..1635e23a23e 100644
--- a/chromium/media/video/video_encoder_fallback_test.cc
+++ b/chromium/media/video/video_encoder_fallback_test.cc
@@ -119,7 +119,7 @@ TEST_F(VideoEncoderFallbackTest, NoFallbackEncoding) {
for (int i = 0; i < kFrameCount; i++) {
auto frame = VideoFrame::CreateFrame(PIXEL_FORMAT_I420, kFrameSize,
gfx::Rect(kFrameSize), kFrameSize,
- base::TimeDelta::FromSeconds(i));
+ base::Seconds(i));
fallback_encoder_->Encode(frame, true, ValidatingStatusCB());
}
RunLoop();
@@ -177,7 +177,7 @@ TEST_F(VideoEncoderFallbackTest, FallbackOnInitialize) {
for (int i = 0; i < kFrameCount; i++) {
auto frame = VideoFrame::CreateFrame(PIXEL_FORMAT_I420, kFrameSize,
gfx::Rect(kFrameSize), kFrameSize,
- base::TimeDelta::FromSeconds(i));
+ base::Seconds(i));
fallback_encoder_->Encode(frame, true, ValidatingStatusCB());
}
RunLoop();
@@ -218,7 +218,7 @@ TEST_F(VideoEncoderFallbackTest, FallbackOnEncode) {
RunStatusCallbackAync(std::move(done_cb));
}));
- auto encoder_switch_time = base::TimeDelta::FromSeconds(kFrameCount / 2);
+ auto encoder_switch_time = base::Seconds(kFrameCount / 2);
// Start failing encodes after half of the frames.
EXPECT_CALL(*main_video_encoder_, Encode(_, _, _))
@@ -259,7 +259,7 @@ TEST_F(VideoEncoderFallbackTest, FallbackOnEncode) {
for (int i = 0; i < kFrameCount; i++) {
auto frame = VideoFrame::CreateFrame(PIXEL_FORMAT_I420, kFrameSize,
gfx::Rect(kFrameSize), kFrameSize,
- base::TimeDelta::FromSeconds(i));
+ base::Seconds(i));
fallback_encoder_->Encode(frame, true, ValidatingStatusCB());
}
RunLoop();
@@ -296,10 +296,30 @@ TEST_F(VideoEncoderFallbackTest, SecondaryFailureOnInitialize) {
StatusCode::kEncoderUnsupportedCodec);
}));
+ EXPECT_CALL(*main_video_encoder_, Encode(_, _, _))
+ .WillRepeatedly(
+ Invoke([&, this](scoped_refptr<VideoFrame> frame, bool key_frame,
+ VideoEncoder::StatusCB done_cb) {
+ RunStatusCallbackAync(std::move(done_cb),
+ StatusCode::kEncoderInitializeNeverCompleted);
+ }));
+
fallback_encoder_->Initialize(
profile, options, std::move(output_cb),
ValidatingStatusCB(StatusCode::kEncoderUnsupportedCodec));
+ for (int i = 0; i < kFrameCount; i++) {
+ auto frame = VideoFrame::CreateFrame(PIXEL_FORMAT_I420, kFrameSize,
+ gfx::Rect(kFrameSize), kFrameSize,
+ base::Seconds(i));
+ auto done_callback = base::BindLambdaForTesting([this](Status s) {
+ EXPECT_TRUE(callback_runner_->RunsTasksInCurrentSequence());
+ EXPECT_EQ(s.code(), StatusCode::kEncoderUnsupportedCodec);
+ callback_runner_->DeleteSoon(FROM_HERE, std::move(fallback_encoder_));
+ });
+ fallback_encoder_->Encode(frame, true, std::move(done_callback));
+ }
+
RunLoop();
EXPECT_TRUE(FallbackHappened());
}
diff --git a/chromium/media/video/vpx_video_encoder.cc b/chromium/media/video/vpx_video_encoder.cc
index c0b2b9ade67..af76a3f81b0 100644
--- a/chromium/media/video/vpx_video_encoder.cc
+++ b/chromium/media/video/vpx_video_encoder.cc
@@ -12,6 +12,7 @@
#include "base/time/time.h"
#include "base/trace_event/trace_event.h"
#include "media/base/bind_to_current_loop.h"
+#include "media/base/svc_scalability_mode.h"
#include "media/base/video_frame.h"
#include "media/base/video_util.h"
#include "third_party/libvpx/source/libvpx/vpx/vp8cx.h"
@@ -128,10 +129,11 @@ Status SetUpVpxConfig(const VideoEncoder::Options& opts,
config->g_w = opts.frame_size.width();
config->g_h = opts.frame_size.height();
- switch (opts.temporal_layers) {
- case 1:
- break;
- case 2:
+ if (!opts.scalability_mode)
+ return Status();
+
+ switch (opts.scalability_mode.value()) {
+ case SVCScalabilityMode::kL1T2:
// Frame Pattern:
// Layer Index 0: |0| |2| |4| |6| |8|
// Layer Index 1: | |1| |3| |5| |7| |
@@ -150,7 +152,7 @@ Status SetUpVpxConfig(const VideoEncoder::Options& opts,
config->temporal_layering_mode = VP9E_TEMPORAL_LAYERING_MODE_0101;
config->g_error_resilient = VPX_ERROR_RESILIENT_DEFAULT;
break;
- case 3:
+ case SVCScalabilityMode::kL1T3:
// Frame Pattern:
// Layer Index 0: |0| | | |4| | | |8| | | |12|
// Layer Index 1: | | |2| | | |6| | | |10| | |
@@ -348,7 +350,6 @@ void VpxVideoEncoder::Initialize(VideoCodecProfile profile,
void VpxVideoEncoder::Encode(scoped_refptr<VideoFrame> frame,
bool key_frame,
StatusCB done_cb) {
- Status status;
done_cb = BindToCurrentLoop(std::move(done_cb));
if (!codec_) {
std::move(done_cb).Run(StatusCode::kEncoderInitializeNeverCompleted);
@@ -368,7 +369,7 @@ void VpxVideoEncoder::Encode(scoped_refptr<VideoFrame> frame,
frame->format() == PIXEL_FORMAT_ARGB;
if ((!frame->IsMappable() && !frame->HasGpuMemoryBuffer()) ||
!supported_format) {
- status =
+ Status status =
Status(StatusCode::kEncoderFailedEncode, "Unexpected frame format.")
.WithData("IsMappable", frame->IsMappable())
.WithData("format", frame->format());
@@ -392,6 +393,7 @@ void VpxVideoEncoder::Encode(scoped_refptr<VideoFrame> frame,
is_yuv ? frame->format() : PIXEL_FORMAT_I420, options_.frame_size,
gfx::Rect(options_.frame_size), options_.frame_size,
frame->timestamp());
+ Status status;
if (resized_frame) {
status = ConvertAndScaleFrame(*frame, *resized_frame, resize_buf_);
} else {
@@ -498,8 +500,8 @@ void VpxVideoEncoder::Encode(scoped_refptr<VideoFrame> frame,
vpx_codec_err_to_string(vpx_error),
vpx_codec_error_detail(codec_.get()));
DLOG(ERROR) << msg;
- status = Status(StatusCode::kEncoderFailedEncode, msg)
- .WithData("vpx_error", vpx_error);
+ Status status = Status(StatusCode::kEncoderFailedEncode, msg)
+ .WithData("vpx_error", vpx_error);
std::move(done_cb).Run(std::move(status));
return;
}
@@ -592,12 +594,12 @@ base::TimeDelta VpxVideoEncoder::GetFrameDuration(const VideoFrame& frame) {
// Options have framerate specified, use it.
if (options_.framerate.has_value())
- return base::TimeDelta::FromSecondsD(1.0 / options_.framerate.value());
+ return base::Seconds(1.0 / options_.framerate.value());
// No real way to figure out duration, use time passed since the last frame
// as an educated guess, but clamp it within a reasonable limits.
- constexpr auto min_duration = base::TimeDelta::FromSecondsD(1.0 / 60.0);
- constexpr auto max_duration = base::TimeDelta::FromSecondsD(1.0 / 24.0);
+ constexpr auto min_duration = base::Seconds(1.0 / 60.0);
+ constexpr auto max_duration = base::Seconds(1.0 / 24.0);
auto duration = frame.timestamp() - last_frame_timestamp_;
return base::clamp(duration, min_duration, max_duration);
}
diff --git a/chromium/media/webrtc/BUILD.gn b/chromium/media/webrtc/BUILD.gn
index 61bec7b7dd8..8eb3f3744ce 100644
--- a/chromium/media/webrtc/BUILD.gn
+++ b/chromium/media/webrtc/BUILD.gn
@@ -13,14 +13,16 @@ component("webrtc") {
"audio_delay_stats_reporter.h",
"helpers.cc",
"helpers.h",
- "webrtc_switches.cc",
- "webrtc_switches.h",
+ "webrtc_features.cc",
+ "webrtc_features.h",
]
defines = [ "IS_MEDIA_WEBRTC_IMPL" ]
deps = [
"//base",
+ "//build:chromecast_buildflags",
+ "//media",
"//media:shared_memory_support",
"//third_party/webrtc_overrides:webrtc_component",
]
@@ -28,4 +30,15 @@ component("webrtc") {
source_set("unit_tests") {
testonly = true
+ deps = [
+ "//base",
+ "//base/test:test_support",
+ "//build:chromecast_buildflags",
+ "//media:test_support",
+ "//media/webrtc",
+ "//testing/gmock",
+ "//testing/gtest",
+ "//third_party/webrtc_overrides:webrtc_component",
+ ]
+ sources = [ "helpers_unittests.cc" ]
}
diff --git a/chromium/media/webrtc/OWNERS b/chromium/media/webrtc/OWNERS
index 217dfcd2975..3710dfbbff1 100644
--- a/chromium/media/webrtc/OWNERS
+++ b/chromium/media/webrtc/OWNERS
@@ -1,3 +1,5 @@
-olka@chromium.org
+alessiob@chromium.org
dalecurtis@chromium.org
-ossu@chromium.org
+olka@chromium.org
+peah@chromium.org
+saza@chromium.org
diff --git a/chromium/media/webrtc/audio_delay_stats_reporter.cc b/chromium/media/webrtc/audio_delay_stats_reporter.cc
index 6d9b93cb59e..a8072752615 100644
--- a/chromium/media/webrtc/audio_delay_stats_reporter.cc
+++ b/chromium/media/webrtc/audio_delay_stats_reporter.cc
@@ -35,8 +35,8 @@ int CalculateVariance(const std::vector<int>& values) {
AudioDelayStatsReporter::AudioDelayStatsReporter(int variance_window_size)
: variance_window_size_(variance_window_size),
- delay_histogram_min_(base::TimeDelta::FromMilliseconds(1)),
- delay_histogram_max_(base::TimeDelta::FromMilliseconds(500)) {
+ delay_histogram_min_(base::Milliseconds(1)),
+ delay_histogram_max_(base::Milliseconds(500)) {
DCHECK_GT(variance_window_size_, 1);
capture_delays_ms_.reserve(variance_window_size_);
render_delays_ms_.reserve(variance_window_size_);
diff --git a/chromium/media/webrtc/audio_delay_stats_reporter.h b/chromium/media/webrtc/audio_delay_stats_reporter.h
index b300a0e16b4..b481d03b20e 100644
--- a/chromium/media/webrtc/audio_delay_stats_reporter.h
+++ b/chromium/media/webrtc/audio_delay_stats_reporter.h
@@ -21,6 +21,10 @@ class COMPONENT_EXPORT(MEDIA_WEBRTC) AudioDelayStatsReporter {
// |variance_window_size| is the window size, that is the number of delay
// values, on which to calculate the variance.
AudioDelayStatsReporter(int variance_window_size);
+
+ AudioDelayStatsReporter(const AudioDelayStatsReporter&) = delete;
+ AudioDelayStatsReporter& operator=(const AudioDelayStatsReporter&) = delete;
+
virtual ~AudioDelayStatsReporter();
// Reports delay stats and stores delays. When the number of stored delays
@@ -41,8 +45,6 @@ class COMPONENT_EXPORT(MEDIA_WEBRTC) AudioDelayStatsReporter {
// Ensures that all function calls are done on the same thread.
THREAD_CHECKER(thread_checker_);
-
- DISALLOW_COPY_AND_ASSIGN(AudioDelayStatsReporter);
};
} // namespace media
diff --git a/chromium/media/webrtc/helpers.cc b/chromium/media/webrtc/helpers.cc
index e5aecb7f6a9..373e6b0a177 100644
--- a/chromium/media/webrtc/helpers.cc
+++ b/chromium/media/webrtc/helpers.cc
@@ -4,19 +4,207 @@
#include "media/webrtc/helpers.h"
+#include "base/feature_list.h"
+#include "base/files/file_util.h"
+#include "base/logging.h"
+#include "build/build_config.h"
+#include "build/chromecast_buildflags.h"
+#include "media/webrtc/webrtc_features.h"
+#include "third_party/webrtc/api/audio/echo_canceller3_config.h"
+#include "third_party/webrtc/api/audio/echo_canceller3_factory.h"
+#include "third_party/webrtc/modules/audio_processing/aec_dump/aec_dump_factory.h"
+#include "third_party/webrtc/modules/audio_processing/include/audio_processing.h"
+
namespace media {
+namespace {
+
+// The analog gain controller is not supported on mobile - i.e., Android, iOS.
+#if defined(OS_ANDROID) || defined(OS_IOS)
+constexpr bool kAnalogAgcSupported = false;
+#else
+constexpr bool kAnalogAgcSupported = true;
+#endif // defined(OS_ANDROID) || defined(OS_IOS)
+
+// The analog gain controller can only be disabled on Chromecast.
+#if BUILDFLAG(IS_CHROMECAST)
+constexpr bool kAllowToDisableAnalogAgc = true;
+#else
+constexpr bool kAllowToDisableAnalogAgc = false;
+#endif // BUILDFLAG(IS_CHROMECAST)
+
+// AGC1 mode.
+using Agc1Mode = webrtc::AudioProcessing::Config::GainController1::Mode;
+// TODO(bugs.webrtc.org/7909): Maybe set mode to kFixedDigital also for IOS.
+#if defined(OS_ANDROID)
+constexpr Agc1Mode kAgc1Mode = Agc1Mode::kFixedDigital;
+#else
+constexpr Agc1Mode kAgc1Mode = Agc1Mode::kAdaptiveAnalog;
+#endif
+
+using Agc1AnalagConfig =
+ ::webrtc::AudioProcessing::Config::GainController1::AnalogGainController;
+
+Agc1AnalagConfig::ClippingPredictor::Mode GetClippingPredictorMode(int mode) {
+ using Mode = Agc1AnalagConfig::ClippingPredictor::Mode;
+ switch (mode) {
+ case 1:
+ return Mode::kAdaptiveStepClippingPeakPrediction;
+ case 2:
+ return Mode::kFixedStepClippingPeakPrediction;
+ default:
+ return Mode::kClippingEventPrediction;
+ }
+}
+
+bool Allow48kHzApmProcessing() {
+ return base::FeatureList::IsEnabled(
+ ::features::kWebRtcAllow48kHzProcessingOnArm);
+}
+
+absl::optional<int> GetAgcStartupMinVolume() {
+ if (!base::FeatureList::IsEnabled(
+ ::features::kWebRtcAnalogAgcStartupMinVolume)) {
+ return absl::nullopt;
+ }
+ return base::GetFieldTrialParamByFeatureAsInt(
+ ::features::kWebRtcAnalogAgcStartupMinVolume, "volume", 0);
+}
+
+void ConfigAgc2AdaptiveDigitalForHybridExperiment(
+ ::webrtc::AudioProcessing::Config::GainController2::AdaptiveDigital&
+ config) {
+ config.dry_run = base::GetFieldTrialParamByFeatureAsBool(
+ ::features::kWebRtcHybridAgc, "dry_run", false);
+ config.vad_reset_period_ms = base::GetFieldTrialParamByFeatureAsInt(
+ ::features::kWebRtcHybridAgc, "vad_reset_period_ms", 1500);
+ config.adjacent_speech_frames_threshold =
+ base::GetFieldTrialParamByFeatureAsInt(
+ ::features::kWebRtcHybridAgc, "adjacent_speech_frames_threshold", 12);
+ config.max_gain_change_db_per_second =
+ static_cast<float>(base::GetFieldTrialParamByFeatureAsDouble(
+ ::features::kWebRtcHybridAgc, "max_gain_change_db_per_second", 3));
+ config.max_output_noise_level_dbfs =
+ static_cast<float>(base::GetFieldTrialParamByFeatureAsDouble(
+ ::features::kWebRtcHybridAgc, "max_output_noise_level_dbfs", -50));
+}
+
+void ConfigAgc1AnalogForClippingControlExperiment(Agc1AnalagConfig& config) {
+ config.clipped_level_step = base::GetFieldTrialParamByFeatureAsInt(
+ ::features::kWebRtcAnalogAgcClippingControl, "clipped_level_step", 15);
+ config.clipped_ratio_threshold =
+ static_cast<float>(base::GetFieldTrialParamByFeatureAsDouble(
+ ::features::kWebRtcAnalogAgcClippingControl,
+ "clipped_ratio_threshold", 0.1));
+ config.clipped_wait_frames = base::GetFieldTrialParamByFeatureAsInt(
+ ::features::kWebRtcAnalogAgcClippingControl, "clipped_wait_frames", 300);
+
+ config.clipping_predictor.mode =
+ GetClippingPredictorMode(base::GetFieldTrialParamByFeatureAsInt(
+ ::features::kWebRtcAnalogAgcClippingControl, "mode", 0));
+ config.clipping_predictor.window_length =
+ base::GetFieldTrialParamByFeatureAsInt(
+ ::features::kWebRtcAnalogAgcClippingControl, "window_length", 5);
+ config.clipping_predictor.reference_window_length =
+ base::GetFieldTrialParamByFeatureAsInt(
+ ::features::kWebRtcAnalogAgcClippingControl,
+ "reference_window_length", 5);
+ config.clipping_predictor.reference_window_delay =
+ base::GetFieldTrialParamByFeatureAsInt(
+ ::features::kWebRtcAnalogAgcClippingControl, "reference_window_delay",
+ 5);
+ config.clipping_predictor.clipping_threshold =
+ static_cast<float>(base::GetFieldTrialParamByFeatureAsDouble(
+ ::features::kWebRtcAnalogAgcClippingControl, "clipping_threshold",
+ -1.0));
+ config.clipping_predictor.crest_factor_margin =
+ static_cast<float>(base::GetFieldTrialParamByFeatureAsDouble(
+ ::features::kWebRtcAnalogAgcClippingControl, "crest_factor_margin",
+ 3.0));
+ config.clipping_predictor.use_predicted_step =
+ base::GetFieldTrialParamByFeatureAsBool(
+ ::features::kWebRtcAnalogAgcClippingControl, "use_predicted_step",
+ true);
+}
+
+// Configures automatic gain control in `apm_config`.
+// TODO(bugs.webrtc.org/7494): Clean up once hybrid AGC experiment finalized.
+// TODO(bugs.webrtc.org/7494): Remove unused cases, simplify decision logic.
+void ConfigAutomaticGainControl(const AudioProcessingSettings& settings,
+ webrtc::AudioProcessing::Config& apm_config) {
+ // Configure AGC1.
+ if (settings.automatic_gain_control) {
+ apm_config.gain_controller1.enabled = true;
+ apm_config.gain_controller1.mode = kAgc1Mode;
+ }
+ auto& agc1_analog_config = apm_config.gain_controller1.analog_gain_controller;
+ // Enable and configure AGC1 Analog if needed.
+ if (kAnalogAgcSupported && settings.experimental_automatic_gain_control) {
+ agc1_analog_config.enabled = true;
+ absl::optional<int> startup_min_volume = GetAgcStartupMinVolume();
+ // TODO(crbug.com/555577): Do not zero if `startup_min_volume` if no
+ // override is specified, instead fall back to the config default value.
+ agc1_analog_config.startup_min_volume = startup_min_volume.value_or(0);
+ }
+ // Disable AGC1 Analog.
+ if (kAllowToDisableAnalogAgc &&
+ !settings.experimental_automatic_gain_control) {
+ // This should likely be done on non-Chromecast platforms as well, but care
+ // is needed since users may be relying on the current behavior.
+ // https://crbug.com/918677#c4
+ agc1_analog_config.enabled = false;
+ }
+
+ // TODO(bugs.webrtc.org/7909): Consider returning if `kAnalogAgcSupported` is
+ // false since the AGC clipping controller and the Hybrid AGC experiments are
+ // meant to run when AGC1 Analog is used.
+ if (!settings.automatic_gain_control ||
+ !settings.experimental_automatic_gain_control ||
+ !agc1_analog_config.enabled) {
+ // The settings below only apply when AGC is enabled and when the analog
+ // controller is supported and enabled.
+ return;
+ }
+
+ // AGC1 Analog Clipping Controller experiment.
+ if (base::FeatureList::IsEnabled(
+ ::features::kWebRtcAnalogAgcClippingControl)) {
+ agc1_analog_config.clipping_predictor.enabled = true;
+ ConfigAgc1AnalogForClippingControlExperiment(agc1_analog_config);
+ }
+
+ // Hybrid AGC feature.
+ const bool use_hybrid_agc =
+ base::FeatureList::IsEnabled(::features::kWebRtcHybridAgc);
+ auto& agc2_config = apm_config.gain_controller2;
+ agc2_config.enabled = use_hybrid_agc;
+ agc2_config.fixed_digital.gain_db = 0.0f;
+ if (use_hybrid_agc) {
+ agc2_config.adaptive_digital.enabled = true;
+ ConfigAgc2AdaptiveDigitalForHybridExperiment(agc2_config.adaptive_digital);
+ // Disable AGC1 adaptive digital unless AGC2 adaptive digital runs in
+ // dry-run mode.
+ agc1_analog_config.enable_digital_adaptive =
+ agc2_config.adaptive_digital.dry_run;
+ } else {
+ // Use the adaptive digital controller of AGC1 and disable that of AGC2.
+ agc1_analog_config.enable_digital_adaptive = true;
+ agc2_config.adaptive_digital.enabled = false;
+ }
+}
+
+} // namespace
webrtc::StreamConfig CreateStreamConfig(const AudioParameters& parameters) {
int channels = parameters.channels();
// Mapping all discrete channel layouts to max two channels assuming that any
// required channel remix takes place in the native audio layer.
- if (parameters.channel_layout() == media::CHANNEL_LAYOUT_DISCRETE) {
+ if (parameters.channel_layout() == CHANNEL_LAYOUT_DISCRETE) {
channels = std::min(parameters.channels(), 2);
}
const int rate = parameters.sample_rate();
- const bool has_keyboard = parameters.channel_layout() ==
- media::CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC;
+ const bool has_keyboard =
+ parameters.channel_layout() == CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC;
// webrtc::StreamConfig requires that the keyboard mic channel is not included
// in the channel count. It may still be used.
@@ -33,4 +221,72 @@ bool LeftAndRightChannelsAreSymmetric(const AudioBus& audio) {
audio.channel(1));
}
+void StartEchoCancellationDump(webrtc::AudioProcessing* audio_processing,
+ base::File aec_dump_file,
+ rtc::TaskQueue* worker_queue) {
+ DCHECK(aec_dump_file.IsValid());
+
+ FILE* stream = base::FileToFILE(std::move(aec_dump_file), "w");
+ if (!stream) {
+ LOG(DFATAL) << "Failed to open AEC dump file";
+ return;
+ }
+
+ auto aec_dump = webrtc::AecDumpFactory::Create(
+ stream, -1 /* max_log_size_bytes */, worker_queue);
+ if (!aec_dump) {
+ LOG(ERROR) << "Failed to start AEC debug recording";
+ return;
+ }
+ audio_processing->AttachAecDump(std::move(aec_dump));
+}
+
+void StopEchoCancellationDump(webrtc::AudioProcessing* audio_processing) {
+ audio_processing->DetachAecDump();
+}
+
+rtc::scoped_refptr<webrtc::AudioProcessing> CreateWebRtcAudioProcessingModule(
+ const AudioProcessingSettings& settings) {
+ // Create and configure the webrtc::AudioProcessing.
+ webrtc::AudioProcessingBuilder ap_builder;
+ if (settings.echo_cancellation) {
+ ap_builder.SetEchoControlFactory(
+ std::make_unique<webrtc::EchoCanceller3Factory>());
+ }
+ rtc::scoped_refptr<webrtc::AudioProcessing> audio_processing_module =
+ ap_builder.Create();
+
+ webrtc::AudioProcessing::Config apm_config =
+ audio_processing_module->GetConfig();
+ apm_config.pipeline.multi_channel_render = true;
+ apm_config.pipeline.multi_channel_capture =
+ settings.multi_channel_capture_processing;
+ apm_config.high_pass_filter.enabled = settings.high_pass_filter;
+ apm_config.noise_suppression.enabled = settings.noise_suppression;
+ apm_config.noise_suppression.level =
+ webrtc::AudioProcessing::Config::NoiseSuppression::Level::kHigh;
+ apm_config.echo_canceller.enabled = settings.echo_cancellation;
+#if defined(OS_ANDROID)
+ apm_config.echo_canceller.mobile_mode = true;
+#else
+ apm_config.echo_canceller.mobile_mode = false;
+#endif
+ apm_config.residual_echo_detector.enabled = false;
+
+#if !(defined(OS_ANDROID) || defined(OS_IOS))
+ apm_config.transient_suppression.enabled =
+ settings.transient_noise_suppression;
+#endif
+
+ ConfigAutomaticGainControl(settings, apm_config);
+
+ // Ensure that 48 kHz APM processing is always active. This overrules the
+ // default setting in WebRTC of 32 kHz for ARM platforms.
+ if (Allow48kHzApmProcessing()) {
+ apm_config.pipeline.maximum_internal_processing_rate = 48000;
+ }
+
+ audio_processing_module->ApplyConfig(apm_config);
+ return audio_processing_module;
+}
} // namespace media
diff --git a/chromium/media/webrtc/helpers.h b/chromium/media/webrtc/helpers.h
index c748f86f58a..f18373eb99b 100644
--- a/chromium/media/webrtc/helpers.h
+++ b/chromium/media/webrtc/helpers.h
@@ -6,10 +6,18 @@
#define MEDIA_WEBRTC_HELPERS_H_
#include "base/component_export.h"
+#include "base/files/file.h"
+#include "build/build_config.h"
#include "media/base/audio_bus.h"
#include "media/base/audio_parameters.h"
+#include "media/base/audio_processing.h"
+#include "third_party/abseil-cpp/absl/types/optional.h"
#include "third_party/webrtc/modules/audio_processing/include/audio_processing.h"
+namespace rtc {
+class TaskQueue;
+} // namespace rtc
+
namespace media {
COMPONENT_EXPORT(MEDIA_WEBRTC)
@@ -21,6 +29,27 @@ webrtc::StreamConfig CreateStreamConfig(const AudioParameters& parameters);
COMPONENT_EXPORT(MEDIA_WEBRTC)
bool LeftAndRightChannelsAreSymmetric(const AudioBus& audio);
+// Creates and configures a `webrtc::AudioProcessing` audio processing module
+// (APM), based on the provided parameters and on features and field trials.
+COMPONENT_EXPORT(MEDIA_WEBRTC)
+rtc::scoped_refptr<webrtc::AudioProcessing> CreateWebRtcAudioProcessingModule(
+ const AudioProcessingSettings& settings);
+
+// Starts the echo cancellation dump in
+// |audio_processing|. |worker_queue| must be kept alive until either
+// |audio_processing| is destroyed, or
+// StopEchoCancellationDump(audio_processing) is called.
+COMPONENT_EXPORT(MEDIA_WEBRTC)
+void StartEchoCancellationDump(webrtc::AudioProcessing* audio_processing,
+ base::File aec_dump_file,
+ rtc::TaskQueue* worker_queue);
+
+// Stops the echo cancellation dump in |audio_processing|.
+// This method has no impact if echo cancellation dump has not been started on
+// |audio_processing|.
+COMPONENT_EXPORT(MEDIA_WEBRTC)
+void StopEchoCancellationDump(webrtc::AudioProcessing* audio_processing);
+
} // namespace media
#endif // MEDIA_WEBRTC_HELPERS_H_
diff --git a/chromium/media/webrtc/helpers_unittests.cc b/chromium/media/webrtc/helpers_unittests.cc
new file mode 100644
index 00000000000..9cb11a3238b
--- /dev/null
+++ b/chromium/media/webrtc/helpers_unittests.cc
@@ -0,0 +1,401 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/webrtc/helpers.h"
+
+#include "base/logging.h"
+#include "base/test/scoped_feature_list.h"
+#include "build/build_config.h"
+#include "build/chromecast_buildflags.h"
+#include "media/webrtc/webrtc_features.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace media {
+namespace {
+
+constexpr webrtc::AudioProcessing::Config kDefaultApmConfig{};
+
+webrtc::AudioProcessing::Config CreateApmGetConfig(
+ const AudioProcessingSettings& settings) {
+ rtc::scoped_refptr<webrtc::AudioProcessing> apm =
+ CreateWebRtcAudioProcessingModule(settings);
+ DCHECK(!!apm);
+ return apm->GetConfig();
+}
+
+// Verify that the default settings in AudioProcessingSettings are applied
+// correctly by `CreateWebRtcAudioProcessingModule()`.
+TEST(CreateWebRtcAudioProcessingModuleTest, CheckDefaultAudioProcessingConfig) {
+ auto config = CreateApmGetConfig(/*settings=*/{});
+
+ EXPECT_TRUE(config.pipeline.multi_channel_render);
+ EXPECT_TRUE(config.pipeline.multi_channel_capture);
+ EXPECT_EQ(config.pipeline.maximum_internal_processing_rate, 48000);
+ EXPECT_TRUE(config.high_pass_filter.enabled);
+ EXPECT_FALSE(config.pre_amplifier.enabled);
+ EXPECT_TRUE(config.echo_canceller.enabled);
+ EXPECT_TRUE(config.gain_controller1.enabled);
+#if defined(OS_WIN) || defined(OS_MAC) || defined(OS_LINUX)
+ EXPECT_TRUE(config.gain_controller2.enabled);
+#else
+ EXPECT_FALSE(config.gain_controller2.enabled);
+#endif
+ EXPECT_TRUE(config.noise_suppression.enabled);
+ EXPECT_EQ(config.noise_suppression.level,
+ webrtc::AudioProcessing::Config::NoiseSuppression::kHigh);
+ EXPECT_FALSE(config.voice_detection.enabled);
+ EXPECT_FALSE(config.residual_echo_detector.enabled);
+
+#if defined(OS_ANDROID)
+ // Android uses echo cancellation optimized for mobiles, and does not
+ // support keytap suppression.
+ EXPECT_TRUE(config.echo_canceller.mobile_mode);
+ EXPECT_FALSE(config.transient_suppression.enabled);
+#else
+ EXPECT_FALSE(config.echo_canceller.mobile_mode);
+ EXPECT_TRUE(config.transient_suppression.enabled);
+#endif
+}
+
+TEST(CreateWebRtcAudioProcessingModuleTest, CheckDefaultAgcConfig) {
+ auto config = CreateApmGetConfig(/*settings=*/{});
+ EXPECT_TRUE(config.gain_controller1.enabled);
+ using Mode = webrtc::AudioProcessing::Config::GainController1::Mode;
+ // TODO(bugs.webrtc.org/7909): Add OS_IOS once bug fixed.
+#if defined(OS_ANDROID)
+ EXPECT_EQ(config.gain_controller1.mode, Mode::kFixedDigital);
+#else
+ EXPECT_EQ(config.gain_controller1.mode, Mode::kAdaptiveAnalog);
+#endif
+
+ const auto& agc1_analog_config =
+ config.gain_controller1.analog_gain_controller;
+ // TODO(bugs.webrtc.org/7909): Uncomment below once fixed.
+ // #if defined(OS_ANDROID) || defined(OS_IOS)
+ // // No analog controller available on mobile.
+ // EXPECT_FALSE(agc1_analog_config.enabled);
+ // #else
+ EXPECT_TRUE(agc1_analog_config.enabled);
+#if defined(OS_ANDROID) || defined(OS_IOS)
+ // Leaving `agc_startup_min_volume` unspecified on mobile does not override
+ // `startup_min_volume`.
+ EXPECT_EQ(agc1_analog_config.startup_min_volume,
+ kDefaultApmConfig.gain_controller1.analog_gain_controller
+ .startup_min_volume);
+#else
+ // TODO(bugs.webrtc.org/7494): Check if the following is unwanted, fix if so.
+ // Leaving `agc_startup_min_volume` overrides the default WebRTC value with
+ // zero.
+ EXPECT_EQ(agc1_analog_config.startup_min_volume, 0);
+#endif
+ EXPECT_FALSE(agc1_analog_config.clipping_predictor.enabled);
+ // TODO(bugs.webrtc.org/7909): Uncomment below once fixed.
+ // #endif
+
+ // Check that either AGC1 digital or AGC2 digital is used based on the
+ // platforms where the Hybrid AGC is enabled by default.
+#if defined(OS_WIN) || defined(OS_MAC) || defined(OS_LINUX)
+ EXPECT_FALSE(agc1_analog_config.enable_digital_adaptive);
+ EXPECT_TRUE(config.gain_controller2.enabled);
+ EXPECT_TRUE(config.gain_controller2.adaptive_digital.enabled);
+#else
+ // AGC1 Digital.
+ EXPECT_TRUE(agc1_analog_config.enable_digital_adaptive);
+ EXPECT_FALSE(config.gain_controller2.enabled);
+#endif
+}
+
+// When `automatic_gain_control` and `experimental_automatic_gain_control` are
+// false, the default AGC1 configuration is used, but on Chromecast AGC1 Analog
+// is explicitly disabled.
+TEST(CreateWebRtcAudioProcessingModuleTest,
+ Agc1ConfigUnchangedIfAgcSettingsDisabled) {
+ auto config = CreateApmGetConfig(
+ /*settings=*/{.automatic_gain_control = false,
+ .experimental_automatic_gain_control = false});
+#if BUILDFLAG(IS_CHROMECAST)
+ // Override the default config since on Chromecast AGC1 is explicitly
+ // disabled.
+ auto expected_config = kDefaultApmConfig.gain_controller1;
+ expected_config.analog_gain_controller.enabled = false;
+ EXPECT_EQ(config.gain_controller1, expected_config);
+#else
+ EXPECT_EQ(config.gain_controller1, kDefaultApmConfig.gain_controller1);
+#endif
+}
+
+TEST(CreateWebRtcAudioProcessingModuleTest,
+ Agc2ConfigUnchangedIfAgcSettingsDisabled) {
+ auto config = CreateApmGetConfig(
+ /*settings=*/{.automatic_gain_control = false,
+ .experimental_automatic_gain_control = false});
+ EXPECT_EQ(config.gain_controller2, kDefaultApmConfig.gain_controller2);
+}
+
+TEST(CreateWebRtcAudioProcessingModuleTest,
+ Agc2ConfigUnchangedIfAgcSettingsDisabledAndHybridAgcEnabled) {
+ ::base::test::ScopedFeatureList feature_list;
+ feature_list.InitAndEnableFeature(features::kWebRtcAnalogAgcClippingControl);
+ auto config = CreateApmGetConfig(
+ /*settings=*/{.automatic_gain_control = false,
+ .experimental_automatic_gain_control = false});
+ EXPECT_EQ(config.gain_controller2, kDefaultApmConfig.gain_controller2);
+}
+
+TEST(CreateWebRtcAudioProcessingModuleTest, DisableAgcEnableExperimentalAgc) {
+ auto config = CreateApmGetConfig(
+ /*settings=*/{.automatic_gain_control = false,
+ .experimental_automatic_gain_control = true});
+ EXPECT_FALSE(config.gain_controller1.enabled);
+ EXPECT_TRUE(config.gain_controller1.analog_gain_controller.enabled);
+}
+
+// TODO(bugs.webrtc.org/7909): Remove #IF once fixed.
+#if BUILDFLAG(IS_CHROMECAST)
+TEST(CreateWebRtcAudioProcessingModuleTest, DisableAnalogAgc) {
+ auto config = CreateApmGetConfig(
+ /*settings=*/{.automatic_gain_control = true,
+ .experimental_automatic_gain_control = false});
+ EXPECT_TRUE(config.gain_controller1.enabled);
+ EXPECT_FALSE(config.gain_controller1.analog_gain_controller.enabled);
+}
+#else // !BUILDFLAG(IS_CHROMECAST)
+// Checks that setting `experimental_automatic_gain_control` to false does not
+// disable the analog controller.
+// TODO(bugs.webrtc.org/7909): Remove once fixed.
+TEST(CreateWebRtcAudioProcessingModuleTest, CannotDisableAnalogAgc) {
+ auto config = CreateApmGetConfig(
+ /*settings=*/{.automatic_gain_control = true,
+ .experimental_automatic_gain_control = false});
+ EXPECT_TRUE(config.gain_controller1.enabled);
+ EXPECT_TRUE(config.gain_controller1.analog_gain_controller.enabled);
+}
+#endif // !BUILDFLAG(IS_CHROMECAST)
+
+#if defined(OS_ANDROID) || defined(OS_IOS)
+// Checks that on mobile the AGC1 Analog startup minimum volume cannot be
+// overridden.
+TEST(CreateWebRtcAudioProcessingModuleTest, CannotOverrideAgcStartupMinVolume) {
+ ::base::test::ScopedFeatureList feature_list;
+ feature_list.InitAndEnableFeatureWithParameters(
+ features::kWebRtcAnalogAgcStartupMinVolume, {{"volume", "123"}});
+ ASSERT_NE(kDefaultApmConfig.gain_controller1.analog_gain_controller
+ .startup_min_volume,
+ 123);
+ auto config = CreateApmGetConfig(/*settings=*/{});
+ EXPECT_EQ(config.gain_controller1.analog_gain_controller.startup_min_volume,
+ kDefaultApmConfig.gain_controller1.analog_gain_controller
+ .startup_min_volume);
+}
+#else // !(defined(OS_ANDROID) || defined(OS_IOS))
+// Checks that on all the platforms other than mobile the AGC1 Analog startup
+// minimum volume can be overridden.
+TEST(CreateWebRtcAudioProcessingModuleTest, OverrideAgcStartupMinVolume) {
+ ::base::test::ScopedFeatureList feature_list;
+ feature_list.InitAndEnableFeatureWithParameters(
+ features::kWebRtcAnalogAgcStartupMinVolume, {{"volume", "123"}});
+ ASSERT_NE(kDefaultApmConfig.gain_controller1.analog_gain_controller
+ .startup_min_volume,
+ 123);
+ auto config = CreateApmGetConfig(/*settings=*/{});
+ EXPECT_EQ(config.gain_controller1.analog_gain_controller.startup_min_volume,
+ 123);
+}
+#endif // !(defined(OS_ANDROID) || defined(OS_IOS))
+
+TEST(CreateWebRtcAudioProcessingModuleTest, EnableAgc1AnalogClippingControl) {
+ ::base::test::ScopedFeatureList feature_list;
+ feature_list.InitAndEnableFeatureWithParameters(
+ features::kWebRtcAnalogAgcClippingControl,
+ {{"mode", "2"},
+ {"window_length", "111"},
+ {"reference_window_length", "222"},
+ {"reference_window_delay", "333"},
+ {"clipping_threshold", "4.44"},
+ {"crest_factor_margin", ".555"},
+ {"clipped_level_step", "255"},
+ {"clipped_ratio_threshold", "0.77"},
+ {"clipped_wait_frames", "888"},
+ {"use_predicted_step", "false"}});
+
+ auto config = CreateApmGetConfig(
+ /*settings=*/{.automatic_gain_control = true,
+ .experimental_automatic_gain_control = true});
+ const auto& analog_agc = config.gain_controller1.analog_gain_controller;
+ EXPECT_TRUE(analog_agc.clipping_predictor.enabled);
+
+ using Mode = webrtc::AudioProcessing::Config::GainController1::
+ AnalogGainController::ClippingPredictor::Mode;
+ EXPECT_EQ(analog_agc.clipping_predictor.mode,
+ Mode::kFixedStepClippingPeakPrediction);
+ EXPECT_EQ(analog_agc.clipping_predictor.window_length, 111);
+ EXPECT_EQ(analog_agc.clipping_predictor.reference_window_length, 222);
+ EXPECT_EQ(analog_agc.clipping_predictor.reference_window_delay, 333);
+ EXPECT_FLOAT_EQ(analog_agc.clipping_predictor.clipping_threshold, 4.44f);
+ EXPECT_FLOAT_EQ(analog_agc.clipping_predictor.crest_factor_margin, 0.555f);
+ EXPECT_FALSE(analog_agc.clipping_predictor.use_predicted_step);
+ EXPECT_EQ(analog_agc.clipped_level_step, 255);
+ EXPECT_FLOAT_EQ(analog_agc.clipped_ratio_threshold, 0.77f);
+ EXPECT_EQ(analog_agc.clipped_wait_frames, 888);
+}
+
+TEST(CreateWebRtcAudioProcessingModuleTest, DisableAgc1AnalogClippingControl) {
+ ::base::test::ScopedFeatureList feature_list;
+ feature_list.InitAndDisableFeature(features::kWebRtcAnalogAgcClippingControl);
+ auto config = CreateApmGetConfig(
+ /*settings=*/{.automatic_gain_control = true,
+ .experimental_automatic_gain_control = true});
+ const auto& analog_agc = config.gain_controller1.analog_gain_controller;
+ EXPECT_FALSE(analog_agc.clipping_predictor.enabled);
+}
+
+TEST(CreateWebRtcAudioProcessingModuleTest,
+ CannotEnableAgc1AnalogClippingControlWhenAgcIsDisabled) {
+ ::base::test::ScopedFeatureList feature_list;
+ feature_list.InitAndEnableFeature(features::kWebRtcAnalogAgcClippingControl);
+ auto config =
+ CreateApmGetConfig(/*settings=*/{.automatic_gain_control = false});
+ EXPECT_FALSE(config.gain_controller1.analog_gain_controller.clipping_predictor
+ .enabled);
+}
+
+TEST(CreateWebRtcAudioProcessingModuleTest,
+ CannotEnableAgc1AnalogClippingControlWhenExperimentalAgcIsDisabled) {
+ ::base::test::ScopedFeatureList feature_list;
+ feature_list.InitAndEnableFeature(features::kWebRtcAnalogAgcClippingControl);
+ auto config = CreateApmGetConfig(
+ /*settings=*/{.automatic_gain_control = true,
+ .experimental_automatic_gain_control = false});
+ EXPECT_FALSE(config.gain_controller1.analog_gain_controller.clipping_predictor
+ .enabled);
+}
+
+TEST(CreateWebRtcAudioProcessingModuleTest, EnableHybridAgc) {
+ ::base::test::ScopedFeatureList feature_list;
+ feature_list.InitAndEnableFeatureWithParameters(
+ features::kWebRtcHybridAgc, {{"dry_run", "false"},
+ {"vad_reset_period_ms", "1230"},
+ {"adjacent_speech_frames_threshold", "4"},
+ {"max_gain_change_db_per_second", "5"},
+ {"max_output_noise_level_dbfs", "-6"}});
+
+ auto config = CreateApmGetConfig(
+ /*settings=*/{.automatic_gain_control = true,
+ .experimental_automatic_gain_control = true});
+
+ // Checks that the analog AGC is enabled and that its digital adaptive
+ // controller is disabled.
+ const auto& agc1_analog = config.gain_controller1.analog_gain_controller;
+ EXPECT_TRUE(agc1_analog.enabled);
+ EXPECT_FALSE(agc1_analog.enable_digital_adaptive);
+
+ // Check that AGC2 is enabled and that the properties are correctly read from
+ // the field trials.
+ const auto& agc2 = config.gain_controller2;
+ EXPECT_TRUE(agc2.enabled);
+ EXPECT_EQ(config.gain_controller2.fixed_digital.gain_db, 0);
+ EXPECT_TRUE(agc2.adaptive_digital.enabled);
+ EXPECT_FALSE(agc2.adaptive_digital.dry_run);
+ EXPECT_EQ(agc2.adaptive_digital.vad_reset_period_ms, 1230);
+ EXPECT_EQ(agc2.adaptive_digital.adjacent_speech_frames_threshold, 4);
+ EXPECT_FLOAT_EQ(agc2.adaptive_digital.max_gain_change_db_per_second, 5.0f);
+ EXPECT_FLOAT_EQ(agc2.adaptive_digital.max_output_noise_level_dbfs, -6.0f);
+}
+
+TEST(CreateWebRtcAudioProcessingModuleTest, EnableHybridAgcDryRun) {
+ ::base::test::ScopedFeatureList feature_list;
+ feature_list.InitAndEnableFeatureWithParameters(features::kWebRtcHybridAgc,
+ {{"dry_run", "true"}});
+ auto config = CreateApmGetConfig(
+ /*settings=*/{.automatic_gain_control = true,
+ .experimental_automatic_gain_control = true});
+ // Checks that the analog AGC is enabled together with its digital adaptive
+ // controller.
+ const auto& agc1_analog = config.gain_controller1.analog_gain_controller;
+ EXPECT_TRUE(agc1_analog.enabled);
+ EXPECT_TRUE(agc1_analog.enable_digital_adaptive);
+
+ // Check that AGC2 is enabled in dry run mode.
+ const auto& agc2 = config.gain_controller2;
+ EXPECT_TRUE(agc2.enabled);
+ EXPECT_TRUE(agc2.adaptive_digital.enabled);
+ EXPECT_TRUE(agc2.adaptive_digital.dry_run);
+}
+
+TEST(CreateWebRtcAudioProcessingModuleTest,
+ HybridAgcDisabledWhenAgcIsDisabled) {
+ ::base::test::ScopedFeatureList feature_list;
+ feature_list.InitAndEnableFeature(features::kWebRtcHybridAgc);
+ auto config =
+ CreateApmGetConfig(/*settings=*/{.automatic_gain_control = false});
+ EXPECT_FALSE(config.gain_controller2.enabled);
+ EXPECT_FALSE(config.gain_controller2.adaptive_digital.enabled);
+}
+
+TEST(CreateWebRtcAudioProcessingModuleTest,
+ HybridAgcDisabledWhenExperimentalAgcIsDisabled) {
+ ::base::test::ScopedFeatureList feature_list;
+ feature_list.InitAndEnableFeature(features::kWebRtcHybridAgc);
+ auto config = CreateApmGetConfig(
+ /*settings=*/{.automatic_gain_control = true,
+ .experimental_automatic_gain_control = false});
+ EXPECT_FALSE(config.gain_controller2.enabled);
+ EXPECT_FALSE(config.gain_controller2.adaptive_digital.enabled);
+}
+
+TEST(CreateWebRtcAudioProcessingModuleTest, VerifyNoiseSuppressionSettings) {
+ for (bool noise_suppressor_enabled : {true, false}) {
+ SCOPED_TRACE(noise_suppressor_enabled);
+ auto config = CreateApmGetConfig(
+ /*settings=*/{.noise_suppression = noise_suppressor_enabled});
+
+ EXPECT_EQ(config.noise_suppression.enabled, noise_suppressor_enabled);
+ EXPECT_EQ(config.noise_suppression.level,
+ webrtc::AudioProcessing::Config::NoiseSuppression::kHigh);
+ }
+}
+
+TEST(CreateWebRtcAudioProcessingModuleTest, VerifyEchoCancellerSettings) {
+ for (bool echo_canceller_enabled : {true, false}) {
+ SCOPED_TRACE(echo_canceller_enabled);
+ auto config = CreateApmGetConfig(
+ /*settings=*/{.echo_cancellation = echo_canceller_enabled});
+
+ EXPECT_EQ(config.echo_canceller.enabled, echo_canceller_enabled);
+#if defined(OS_ANDROID)
+ EXPECT_TRUE(config.echo_canceller.mobile_mode);
+#else
+ EXPECT_FALSE(config.echo_canceller.mobile_mode);
+#endif
+ }
+}
+
+TEST(CreateWebRtcAudioProcessingModuleTest, ToggleHighPassFilter) {
+ for (bool high_pass_filter_enabled : {true, false}) {
+ SCOPED_TRACE(high_pass_filter_enabled);
+ auto config = CreateApmGetConfig(
+ /*settings=*/{.high_pass_filter = high_pass_filter_enabled});
+
+ EXPECT_EQ(config.high_pass_filter.enabled, high_pass_filter_enabled);
+ }
+}
+
+TEST(CreateWebRtcAudioProcessingModuleTest, ToggleTransientSuppression) {
+ for (bool transient_suppression_enabled : {true, false}) {
+ SCOPED_TRACE(transient_suppression_enabled);
+ auto config = CreateApmGetConfig(/*settings=*/{
+ .transient_noise_suppression = transient_suppression_enabled});
+
+#if defined(OS_ANDROID) || defined(OS_IOS)
+ // Transient suppression is not supported (nor useful) on mobile platforms.
+ EXPECT_FALSE(config.transient_suppression.enabled);
+#else
+ EXPECT_EQ(config.transient_suppression.enabled,
+ transient_suppression_enabled);
+#endif
+ }
+}
+
+} // namespace
+} // namespace media
diff --git a/chromium/media/webrtc/webrtc_switches.cc b/chromium/media/webrtc/webrtc_features.cc
index 051228d7519..9745857712b 100644
--- a/chromium/media/webrtc/webrtc_switches.cc
+++ b/chromium/media/webrtc/webrtc_features.cc
@@ -2,23 +2,21 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "media/webrtc/webrtc_switches.h"
+#include "media/webrtc/webrtc_features.h"
#include "base/command_line.h"
#include "build/build_config.h"
-namespace switches {
-
-// Override the default minimum starting volume of the Automatic Gain Control
-// algorithm in WebRTC used with audio tracks from getUserMedia.
-// The valid range is 12-255. Values outside that range will be clamped
-// to the lowest or highest valid value inside WebRTC.
-// TODO(tommi): Remove this switch when crbug.com/555577 is fixed.
-const char kAgcStartupMinVolume[] = "agc-startup-min-volume";
-
-} // namespace switches
-
namespace features {
+namespace {
+#if defined(OS_WIN) || defined(OS_MAC) || defined(OS_LINUX)
+constexpr base::FeatureState kWebRtcHybridAgcState =
+ base::FEATURE_ENABLED_BY_DEFAULT;
+#else
+constexpr base::FeatureState kWebRtcHybridAgcState =
+ base::FEATURE_DISABLED_BY_DEFAULT;
+#endif
+} // namespace
// When enabled we will tell WebRTC that we want to use the
// Windows.Graphics.Capture API based DesktopCapturer, if it is available.
@@ -36,13 +34,17 @@ const base::Feature kWebRtcEnableCaptureMultiChannelApm{
const base::Feature kWebRtcAllow48kHzProcessingOnArm{
"WebRtcAllow48kHzProcessingOnArm", base::FEATURE_ENABLED_BY_DEFAULT};
-// Enables the WebRTC Agc2 digital adaptation with WebRTC Agc1 analog
-// adaptation. Feature for http://crbug.com/873650. Is sent to WebRTC.
-const base::Feature kWebRtcHybridAgc{"WebRtcHybridAgc",
- base::FEATURE_DISABLED_BY_DEFAULT};
+// Enables the WebRTC Hybrid AGC configuration - i.e., AGC1 analog and AGC2
+// digital (see http://crbug.com/1231085).
+const base::Feature kWebRtcHybridAgc{"WebRtcHybridAgc", kWebRtcHybridAgcState};
// Enables and configures the clipping control in the WebRTC analog AGC.
const base::Feature kWebRtcAnalogAgcClippingControl{
"WebRtcAnalogAgcClippingControl", base::FEATURE_DISABLED_BY_DEFAULT};
+// Enables the override for the default minimum starting volume of the Automatic
+// Gain Control algorithm in WebRTC.
+const base::Feature kWebRtcAnalogAgcStartupMinVolume{
+ "WebRtcAnalogAgcStartupMinVolume", base::FEATURE_DISABLED_BY_DEFAULT};
+
} // namespace features
diff --git a/chromium/media/webrtc/webrtc_switches.h b/chromium/media/webrtc/webrtc_features.h
index c7675917c10..69b549a4268 100644
--- a/chromium/media/webrtc/webrtc_switches.h
+++ b/chromium/media/webrtc/webrtc_features.h
@@ -2,20 +2,14 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-// Defines all command-line switches for media/webrtc.
+// Defines features for media/webrtc.
-#ifndef MEDIA_WEBRTC_WEBRTC_SWITCHES_H_
-#define MEDIA_WEBRTC_WEBRTC_SWITCHES_H_
+#ifndef MEDIA_WEBRTC_WEBRTC_FEATURES_H_
+#define MEDIA_WEBRTC_WEBRTC_FEATURES_H_
#include "base/component_export.h"
#include "base/feature_list.h"
-namespace switches {
-
-COMPONENT_EXPORT(MEDIA_WEBRTC) extern const char kAgcStartupMinVolume[];
-
-} // namespace switches
-
namespace features {
COMPONENT_EXPORT(MEDIA_WEBRTC)
@@ -33,6 +27,9 @@ extern const base::Feature kWebRtcHybridAgc;
COMPONENT_EXPORT(MEDIA_WEBRTC)
extern const base::Feature kWebRtcAnalogAgcClippingControl;
+COMPONENT_EXPORT(MEDIA_WEBRTC)
+extern const base::Feature kWebRtcAnalogAgcStartupMinVolume;
+
} // namespace features
-#endif // MEDIA_WEBRTC_WEBRTC_SWITCHES_H_
+#endif // MEDIA_WEBRTC_WEBRTC_FEATURES_H_