summaryrefslogtreecommitdiff
path: root/chromium/content/renderer/media/stream
diff options
context:
space:
mode:
Diffstat (limited to 'chromium/content/renderer/media/stream')
-rw-r--r--chromium/content/renderer/media/stream/aec_dump_message_filter.cc11
-rw-r--r--chromium/content/renderer/media/stream/apply_constraints_processor.cc2
-rw-r--r--chromium/content/renderer/media/stream/local_media_stream_audio_source.cc26
-rw-r--r--chromium/content/renderer/media/stream/media_stream_audio_processor.cc58
-rw-r--r--chromium/content/renderer/media/stream/media_stream_audio_processor.h4
-rw-r--r--chromium/content/renderer/media/stream/media_stream_audio_processor_options.cc14
-rw-r--r--chromium/content/renderer/media/stream/media_stream_audio_processor_options.h8
-rw-r--r--chromium/content/renderer/media/stream/media_stream_audio_processor_unittest.cc30
-rw-r--r--chromium/content/renderer/media/stream/media_stream_audio_source.h11
-rw-r--r--chromium/content/renderer/media/stream/media_stream_audio_unittest.cc4
-rw-r--r--chromium/content/renderer/media/stream/media_stream_constraints_util.cc2
-rw-r--r--chromium/content/renderer/media/stream/media_stream_constraints_util.h15
-rw-r--r--chromium/content/renderer/media/stream/media_stream_constraints_util_audio.cc6
-rw-r--r--chromium/content/renderer/media/stream/media_stream_constraints_util_audio_unittest.cc10
-rw-r--r--chromium/content/renderer/media/stream/media_stream_constraints_util_video_device.cc221
-rw-r--r--chromium/content/renderer/media/stream/media_stream_constraints_util_video_device.h23
-rw-r--r--chromium/content/renderer/media/stream/media_stream_constraints_util_video_device_unittest.cc101
-rw-r--r--chromium/content/renderer/media/stream/media_stream_device_observer.cc4
-rw-r--r--chromium/content/renderer/media/stream/media_stream_video_source.cc13
-rw-r--r--chromium/content/renderer/media/stream/media_stream_video_source.h2
-rw-r--r--chromium/content/renderer/media/stream/media_stream_video_source_unittest.cc44
-rw-r--r--chromium/content/renderer/media/stream/media_stream_video_track.cc11
-rw-r--r--chromium/content/renderer/media/stream/processed_local_audio_source.cc4
-rw-r--r--chromium/content/renderer/media/stream/processed_local_audio_source_unittest.cc5
-rw-r--r--chromium/content/renderer/media/stream/user_media_client_impl.cc4
-rw-r--r--chromium/content/renderer/media/stream/user_media_client_impl.h1
-rw-r--r--chromium/content/renderer/media/stream/user_media_client_impl_unittest.cc21
-rw-r--r--chromium/content/renderer/media/stream/user_media_processor.cc12
-rw-r--r--chromium/content/renderer/media/stream/user_media_processor.h2
-rw-r--r--chromium/content/renderer/media/stream/video_track_adapter.cc2
-rw-r--r--chromium/content/renderer/media/stream/webmediaplayer_ms.cc201
-rw-r--r--chromium/content/renderer/media/stream/webmediaplayer_ms.h30
-rw-r--r--chromium/content/renderer/media/stream/webmediaplayer_ms_compositor.cc144
-rw-r--r--chromium/content/renderer/media/stream/webmediaplayer_ms_compositor.h49
-rw-r--r--chromium/content/renderer/media/stream/webmediaplayer_ms_unittest.cc444
35 files changed, 920 insertions, 619 deletions
diff --git a/chromium/content/renderer/media/stream/aec_dump_message_filter.cc b/chromium/content/renderer/media/stream/aec_dump_message_filter.cc
index 5c1312e963b..bb1f4296661 100644
--- a/chromium/content/renderer/media/stream/aec_dump_message_filter.cc
+++ b/chromium/content/renderer/media/stream/aec_dump_message_filter.cc
@@ -148,7 +148,7 @@ void AecDumpMessageFilter::DoEnableAecDump(
int id,
IPC::PlatformFileForTransit file_handle) {
DCHECK(main_task_runner_->BelongsToCurrentThread());
- DelegateMap::iterator it = delegates_.find(id);
+ auto it = delegates_.find(id);
if (it != delegates_.end()) {
it->second->OnAecDumpFile(file_handle);
} else {
@@ -161,16 +161,14 @@ void AecDumpMessageFilter::DoEnableAecDump(
void AecDumpMessageFilter::DoDisableAecDump() {
DCHECK(main_task_runner_->BelongsToCurrentThread());
- for (DelegateMap::iterator it = delegates_.begin();
- it != delegates_.end(); ++it) {
+ for (auto it = delegates_.begin(); it != delegates_.end(); ++it) {
it->second->OnDisableAecDump();
}
}
void AecDumpMessageFilter::DoChannelClosingOnDelegates() {
DCHECK(main_task_runner_->BelongsToCurrentThread());
- for (DelegateMap::iterator it = delegates_.begin();
- it != delegates_.end(); ++it) {
+ for (auto it = delegates_.begin(); it != delegates_.end(); ++it) {
it->second->OnIpcClosing();
}
delegates_.clear();
@@ -179,8 +177,7 @@ void AecDumpMessageFilter::DoChannelClosingOnDelegates() {
int AecDumpMessageFilter::GetIdForDelegate(
AecDumpMessageFilter::AecDumpDelegate* delegate) {
DCHECK(main_task_runner_->BelongsToCurrentThread());
- for (DelegateMap::iterator it = delegates_.begin();
- it != delegates_.end(); ++it) {
+ for (auto it = delegates_.begin(); it != delegates_.end(); ++it) {
if (it->second == delegate)
return it->first;
}
diff --git a/chromium/content/renderer/media/stream/apply_constraints_processor.cc b/chromium/content/renderer/media/stream/apply_constraints_processor.cc
index 874d3bd98b3..9d791cb6b84 100644
--- a/chromium/content/renderer/media/stream/apply_constraints_processor.cc
+++ b/chromium/content/renderer/media/stream/apply_constraints_processor.cc
@@ -256,8 +256,6 @@ VideoCaptureSettings ApplyConstraintsProcessor::SelectVideoSettings(
DCHECK(video_source_->GetCurrentCaptureParams());
VideoDeviceCaptureCapabilities video_capabilities;
- video_capabilities.power_line_capabilities.push_back(
- video_source_->GetCurrentCaptureParams()->power_line_frequency);
video_capabilities.noise_reduction_capabilities.push_back(
GetCurrentVideoTrack()->noise_reduction());
video_capabilities.device_capabilities.push_back(
diff --git a/chromium/content/renderer/media/stream/local_media_stream_audio_source.cc b/chromium/content/renderer/media/stream/local_media_stream_audio_source.cc
index 6b826e25c95..58a617f5072 100644
--- a/chromium/content/renderer/media/stream/local_media_stream_audio_source.cc
+++ b/chromium/content/renderer/media/stream/local_media_stream_audio_source.cc
@@ -4,12 +4,27 @@
#include "content/renderer/media/stream/local_media_stream_audio_source.h"
+#include "build/build_config.h"
#include "content/renderer/media/audio/audio_device_factory.h"
#include "content/renderer/media/webrtc_logging.h"
#include "content/renderer/render_frame_impl.h"
namespace content {
+// TODO(crbug.com/638081): Like in ProcessedLocalAudioSource::GetBufferSize(),
+// we should re-evaluate whether Android needs special treatment here. Or,
+// perhaps we should just DCHECK_GT(device...frames_per_buffer, 0)?
+#if defined(OS_ANDROID)
+static constexpr int kFallbackAudioLatencyMs = 20;
+#else
+static constexpr int kFallbackAudioLatencyMs = 10;
+#endif
+
+static_assert(kFallbackAudioLatencyMs >= 0,
+ "Audio latency has to be non-negative.");
+static_assert(kFallbackAudioLatencyMs <= kMaxAudioLatencyMs,
+ "Audio latency can cause overflow.");
+
LocalMediaStreamAudioSource::LocalMediaStreamAudioSource(
int consumer_render_frame_id,
const MediaStreamDevice& device,
@@ -27,15 +42,8 @@ LocalMediaStreamAudioSource::LocalMediaStreamAudioSource(
// If the device buffer size was not provided, use a default.
int frames_per_buffer = device.input.frames_per_buffer();
if (frames_per_buffer <= 0) {
-// TODO(miu): Like in ProcessedLocalAudioSource::GetBufferSize(), we should
-// re-evaluate whether Android needs special treatment here. Or, perhaps we
-// should just DCHECK_GT(device...frames_per_buffer, 0)?
-// http://crbug.com/638081
-#if defined(OS_ANDROID)
- frames_per_buffer = device.input.sample_rate() / 50; // 20 ms
-#else
- frames_per_buffer = device.input.sample_rate() / 100; // 10 ms
-#endif
+ frames_per_buffer =
+ (device.input.sample_rate() * kFallbackAudioLatencyMs) / 1000;
}
SetFormat(media::AudioParameters(
diff --git a/chromium/content/renderer/media/stream/media_stream_audio_processor.cc b/chromium/content/renderer/media/stream/media_stream_audio_processor.cc
index 40af27f948d..c9a3b8ba93c 100644
--- a/chromium/content/renderer/media/stream/media_stream_audio_processor.cc
+++ b/chromium/content/renderer/media/stream/media_stream_audio_processor.cc
@@ -22,7 +22,9 @@
#include "base/threading/thread_task_runner_handle.h"
#include "base/trace_event/trace_event.h"
#include "build/build_config.h"
+#include "content/public/common/content_client.h"
#include "content/public/common/content_features.h"
+#include "content/public/renderer/content_renderer_client.h"
#include "content/renderer/media/webrtc/webrtc_audio_device_impl.h"
#include "media/base/audio_converter.h"
#include "media/base/audio_fifo.h"
@@ -30,6 +32,8 @@
#include "media/base/channel_layout.h"
#include "media/webrtc/echo_information.h"
#include "media/webrtc/webrtc_switches.h"
+#include "third_party/webrtc/api/audio/echo_canceller3_config.h"
+#include "third_party/webrtc/api/audio/echo_canceller3_config_json.h"
#include "third_party/webrtc/api/audio/echo_canceller3_factory.h"
#include "third_party/webrtc/api/mediaconstraintsinterface.h"
#include "third_party/webrtc/modules/audio_processing/include/audio_processing_statistics.h"
@@ -44,7 +48,8 @@ namespace {
using webrtc::AudioProcessing;
using webrtc::NoiseSuppression;
-const int kAudioProcessingNumberOfChannels = 1;
+constexpr int kAudioProcessingNumberOfChannels = 1;
+constexpr int kBuffersPerSecond = 100; // 10 ms per buffer.
AudioProcessing::ChannelLayout MapLayout(media::ChannelLayout media_layout) {
switch (media_layout) {
@@ -272,6 +277,7 @@ MediaStreamAudioProcessor::MediaStreamAudioProcessor(
const AudioProcessingProperties& properties,
WebRtcPlayoutDataSource* playout_data_source)
: render_delay_ms_(0),
+ audio_delay_stats_reporter_(kBuffersPerSecond),
playout_data_source_(playout_data_source),
main_thread_runner_(base::ThreadTaskRunnerHandle::Get()),
audio_mirroring_(false),
@@ -471,11 +477,6 @@ void MediaStreamAudioProcessor::OnPlayoutData(media::AudioBus* audio_bus,
int sample_rate,
int audio_delay_milliseconds) {
DCHECK(render_thread_checker_.CalledOnValidThread());
-#if defined(OS_ANDROID)
- DCHECK(!audio_processing_->echo_cancellation()->is_enabled());
-#else
- DCHECK(!audio_processing_->echo_control_mobile()->is_enabled());
-#endif
DCHECK_GE(audio_bus->channels(), 1);
DCHECK_LE(audio_bus->channels(), 2);
int frames_per_10_ms = sample_rate / 100;
@@ -603,11 +604,22 @@ void MediaStreamAudioProcessor::InitializeAudioProcessingModule(
if (properties.echo_cancellation_type ==
EchoCancellationType::kEchoCancellationAec3) {
webrtc::EchoCanceller3Config aec3_config;
- aec3_config.ep_strength.bounded_erl =
+ base::Optional<std::string> audio_processing_platform_config_json =
+ GetContentClient()
+ ->renderer()
+ ->WebRTCPlatformSpecificAudioProcessingConfiguration();
+ if (audio_processing_platform_config_json) {
+ aec3_config = webrtc::Aec3ConfigFromJsonString(
+ *audio_processing_platform_config_json);
+ bool config_parameters_already_valid =
+ webrtc::EchoCanceller3Config::Validate(&aec3_config);
+ RTC_DCHECK(config_parameters_already_valid);
+ }
+ aec3_config.ep_strength.bounded_erl |=
base::FeatureList::IsEnabled(features::kWebRtcAecBoundedErlSetup);
- aec3_config.echo_removal_control.has_clock_drift =
+ aec3_config.echo_removal_control.has_clock_drift |=
base::FeatureList::IsEnabled(features::kWebRtcAecClockDriftSetup);
- aec3_config.echo_audibility.use_stationary_properties =
+ aec3_config.echo_audibility.use_stationary_properties |=
base::FeatureList::IsEnabled(features::kWebRtcAecNoiseTransparency);
ap_builder.SetEchoControlFactory(
@@ -665,19 +677,13 @@ void MediaStreamAudioProcessor::InitializeCaptureFifo(
DCHECK(input_format.IsValid());
input_format_ = input_format;
- // TODO(ajm): For now, we assume fixed parameters for the output when audio
- // processing is enabled, to match the previous behavior. We should either
- // use the input parameters (in which case, audio processing will convert
- // at output) or ideally, have a backchannel from the sink to know what
- // format it would prefer.
-#if defined(OS_ANDROID)
- int audio_processing_sample_rate = AudioProcessing::kSampleRate16kHz;
-#else
- int audio_processing_sample_rate = AudioProcessing::kSampleRate48kHz;
-#endif
- const int output_sample_rate = audio_processing_ ?
- audio_processing_sample_rate :
- input_format.sample_rate();
+ // TODO(crbug/881275): For now, we assume fixed parameters for the output when
+ // audio processing is enabled, to match the previous behavior. We should
+ // either use the input parameters (in which case, audio processing will
+ // convert at output) or ideally, have a backchannel from the sink to know
+ // what format it would prefer.
+ const int output_sample_rate = audio_processing_ ? kAudioProcessingSampleRate
+ : input_format.sample_rate();
media::ChannelLayout output_channel_layout = audio_processing_ ?
media::GuessChannelLayout(kAudioProcessingNumberOfChannels) :
input_format.channel_layout();
@@ -751,13 +757,16 @@ int MediaStreamAudioProcessor::ProcessData(const float* const* process_ptrs,
"capture_delay_ms", capture_delay_ms, "render_delay_ms",
render_delay_ms);
- int total_delay_ms = capture_delay_ms + render_delay_ms;
+ const int total_delay_ms = capture_delay_ms + render_delay_ms;
if (total_delay_ms > 300 && large_delay_log_count_ < 10) {
LOG(WARNING) << "Large audio delay, capture delay: " << capture_delay_ms
<< "ms; render delay: " << render_delay_ms << "ms";
++large_delay_log_count_;
}
+ audio_delay_stats_reporter_.ReportDelay(
+ capture_delay, base::TimeDelta::FromMilliseconds(render_delay_ms));
+
webrtc::AudioProcessing* ap = audio_processing_.get();
ap->set_stream_delay_ms(total_delay_ms);
@@ -798,7 +807,8 @@ int MediaStreamAudioProcessor::ProcessData(const float* const* process_ptrs,
void MediaStreamAudioProcessor::UpdateAecStats() {
DCHECK(main_thread_runner_->BelongsToCurrentThread());
if (echo_information_)
- echo_information_->UpdateAecStats(audio_processing_->echo_cancellation());
+ echo_information_->UpdateAecStats(
+ audio_processing_->GetStatistics(true /* has_remote_tracks */));
}
} // namespace content
diff --git a/chromium/content/renderer/media/stream/media_stream_audio_processor.h b/chromium/content/renderer/media/stream/media_stream_audio_processor.h
index edf302f3460..f105c489239 100644
--- a/chromium/content/renderer/media/stream/media_stream_audio_processor.h
+++ b/chromium/content/renderer/media/stream/media_stream_audio_processor.h
@@ -23,6 +23,7 @@
#include "content/renderer/media/stream/media_stream_audio_processor_options.h"
#include "content/renderer/media/webrtc/webrtc_audio_device_impl.h"
#include "media/base/audio_converter.h"
+#include "media/webrtc/audio_delay_stats_reporter.h"
#include "third_party/webrtc/api/mediastreaminterface.h"
#include "third_party/webrtc/modules/audio_processing/include/audio_processing.h"
#include "third_party/webrtc/rtc_base/task_queue.h"
@@ -164,6 +165,9 @@ class CONTENT_EXPORT MediaStreamAudioProcessor
// both the capture audio thread and the render audio thread.
base::subtle::Atomic32 render_delay_ms_;
+ // For reporting audio delay stats.
+ media::AudioDelayStatsReporter audio_delay_stats_reporter_;
+
// Low-priority task queue for doing AEC dump recordings. It has to
// out-live audio_processing_ and be created/destroyed from the same
// thread.
diff --git a/chromium/content/renderer/media/stream/media_stream_audio_processor_options.cc b/chromium/content/renderer/media/stream/media_stream_audio_processor_options.cc
index eceaaee43d2..e6b525b05bb 100644
--- a/chromium/content/renderer/media/stream/media_stream_audio_processor_options.cc
+++ b/chromium/content/renderer/media/stream/media_stream_audio_processor_options.cc
@@ -91,25 +91,11 @@ AudioProcessingProperties::ToAudioProcessingSettings() const {
}
void EnableEchoCancellation(AudioProcessing* audio_processing) {
- // TODO(bugs.webrtc.org/9535): Remove double-booking AEC toggle when the
- // config applies (from 2018-08-16).
webrtc::AudioProcessing::Config apm_config = audio_processing->GetConfig();
apm_config.echo_canceller.enabled = true;
#if defined(OS_ANDROID)
- // Mobile devices are using AECM.
- CHECK_EQ(0, audio_processing->echo_control_mobile()->set_routing_mode(
- webrtc::EchoControlMobile::kSpeakerphone));
- CHECK_EQ(0, audio_processing->echo_control_mobile()->Enable(true));
apm_config.echo_canceller.mobile_mode = true;
#else
- int err = audio_processing->echo_cancellation()->set_suppression_level(
- webrtc::EchoCancellation::kHighSuppression);
-
- // Enable the metrics for AEC.
- err |= audio_processing->echo_cancellation()->enable_metrics(true);
- err |= audio_processing->echo_cancellation()->enable_delay_logging(true);
- err |= audio_processing->echo_cancellation()->Enable(true);
- CHECK_EQ(err, 0);
apm_config.echo_canceller.mobile_mode = false;
#endif
audio_processing->ApplyConfig(apm_config);
diff --git a/chromium/content/renderer/media/stream/media_stream_audio_processor_options.h b/chromium/content/renderer/media/stream/media_stream_audio_processor_options.h
index c730fab589e..f6d64874b3e 100644
--- a/chromium/content/renderer/media/stream/media_stream_audio_processor_options.h
+++ b/chromium/content/renderer/media/stream/media_stream_audio_processor_options.h
@@ -11,6 +11,7 @@
#include "base/files/file.h"
#include "base/macros.h"
#include "base/threading/thread_checker.h"
+#include "build/build_config.h"
#include "content/common/content_export.h"
#include "content/public/common/media_stream_request.h"
#include "media/audio/audio_processing.h"
@@ -31,6 +32,13 @@ namespace content {
using webrtc::AudioProcessing;
+static constexpr int kAudioProcessingSampleRate =
+#if defined(OS_ANDROID)
+ AudioProcessing::kSampleRate16kHz;
+#else
+ AudioProcessing::kSampleRate48kHz;
+#endif
+
// Simple struct with audio-processing properties.
struct CONTENT_EXPORT AudioProcessingProperties {
enum class EchoCancellationType {
diff --git a/chromium/content/renderer/media/stream/media_stream_audio_processor_unittest.cc b/chromium/content/renderer/media/stream/media_stream_audio_processor_unittest.cc
index 19dff280a73..a36eac22e4b 100644
--- a/chromium/content/renderer/media/stream/media_stream_audio_processor_unittest.cc
+++ b/chromium/content/renderer/media/stream/media_stream_audio_processor_unittest.cc
@@ -44,11 +44,6 @@ namespace content {
namespace {
-#if defined(ANDROID)
-const int kAudioProcessingSampleRate = 16000;
-#else
-const int kAudioProcessingSampleRate = 48000;
-#endif
const int kAudioProcessingNumberOfChannel = 1;
// The number of packers used for testing.
@@ -122,13 +117,7 @@ class MediaStreamAudioProcessorTest : public ::testing::Test {
// |audio_processor| does nothing when the audio processing is off in
// the processor.
webrtc::AudioProcessing* ap = audio_processor->audio_processing_.get();
-#if defined(OS_ANDROID)
- const bool is_aec_enabled = ap && ap->echo_control_mobile()->is_enabled();
- // AEC should be turned off for mobiles.
- DCHECK(!ap || !ap->echo_cancellation()->is_enabled());
-#else
- const bool is_aec_enabled = ap && ap->echo_cancellation()->is_enabled();
-#endif
+ const bool is_aec_enabled = ap && ap->GetConfig().echo_canceller.enabled;
if (is_aec_enabled) {
if (params.channels() > kMaxNumberOfPlayoutDataChannels) {
for (int i = 0; i < kMaxNumberOfPlayoutDataChannels; ++i) {
@@ -164,24 +153,19 @@ class MediaStreamAudioProcessorTest : public ::testing::Test {
void VerifyDefaultComponents(MediaStreamAudioProcessor* audio_processor) {
webrtc::AudioProcessing* audio_processing =
audio_processor->audio_processing_.get();
+ const webrtc::AudioProcessing::Config config =
+ audio_processing->GetConfig();
+ EXPECT_TRUE(config.echo_canceller.enabled);
#if defined(OS_ANDROID)
- EXPECT_TRUE(audio_processing->echo_control_mobile()->is_enabled());
- EXPECT_TRUE(audio_processing->echo_control_mobile()->routing_mode() ==
- webrtc::EchoControlMobile::kSpeakerphone);
- EXPECT_FALSE(audio_processing->echo_cancellation()->is_enabled());
+ EXPECT_TRUE(config.echo_canceller.mobile_mode);
#else
- EXPECT_TRUE(audio_processing->echo_cancellation()->is_enabled());
- EXPECT_TRUE(audio_processing->echo_cancellation()->suppression_level() ==
- webrtc::EchoCancellation::kHighSuppression);
- EXPECT_TRUE(audio_processing->echo_cancellation()->are_metrics_enabled());
- EXPECT_TRUE(
- audio_processing->echo_cancellation()->is_delay_logging_enabled());
+ EXPECT_FALSE(config.echo_canceller.mobile_mode);
#endif
+ EXPECT_TRUE(config.high_pass_filter.enabled);
EXPECT_TRUE(audio_processing->noise_suppression()->is_enabled());
EXPECT_TRUE(audio_processing->noise_suppression()->level() ==
webrtc::NoiseSuppression::kHigh);
- EXPECT_TRUE(audio_processing->high_pass_filter()->is_enabled());
EXPECT_TRUE(audio_processing->gain_control()->is_enabled());
#if defined(OS_ANDROID)
EXPECT_TRUE(audio_processing->gain_control()->mode() ==
diff --git a/chromium/content/renderer/media/stream/media_stream_audio_source.h b/chromium/content/renderer/media/stream/media_stream_audio_source.h
index cc55c329096..dfcbfcc7e62 100644
--- a/chromium/content/renderer/media/stream/media_stream_audio_source.h
+++ b/chromium/content/renderer/media/stream/media_stream_audio_source.h
@@ -5,6 +5,7 @@
#ifndef CONTENT_RENDERER_MEDIA_STREAM_MEDIA_STREAM_AUDIO_SOURCE_H_
#define CONTENT_RENDERER_MEDIA_STREAM_MEDIA_STREAM_AUDIO_SOURCE_H_
+#include <limits>
#include <memory>
#include <string>
@@ -14,6 +15,7 @@
#include "content/common/content_export.h"
#include "content/renderer/media/stream/media_stream_audio_deliverer.h"
#include "content/renderer/media/stream/media_stream_source.h"
+#include "media/base/limits.h"
#include "third_party/blink/public/platform/web_media_stream_source.h"
#include "third_party/blink/public/platform/web_media_stream_track.h"
@@ -23,6 +25,15 @@ class SingleThreadTaskRunner;
namespace content {
+// Define a max limit on the latency equivalent to 5 seconds. This limit is
+// meant to avoid overflows when deriving buffersize or sample rate from the
+// latency.
+static constexpr int kMaxAudioLatencyMs = 5000;
+
+static_assert(std::numeric_limits<int>::max() / media::limits::kMaxSampleRate >
+ kMaxAudioLatencyMs,
+ "The maxium audio latency can cause overflow.");
+
class MediaStreamAudioTrack;
// Represents a source of audio, and manages the delivery of audio data between
diff --git a/chromium/content/renderer/media/stream/media_stream_audio_unittest.cc b/chromium/content/renderer/media/stream/media_stream_audio_unittest.cc
index abddd1deed5..4c881a02674 100644
--- a/chromium/content/renderer/media/stream/media_stream_audio_unittest.cc
+++ b/chromium/content/renderer/media/stream/media_stream_audio_unittest.cc
@@ -5,9 +5,9 @@
#include <stdint.h>
#include "base/atomicops.h"
-#include "base/message_loop/message_loop.h"
#include "base/synchronization/lock.h"
#include "base/synchronization/waitable_event.h"
+#include "base/test/scoped_task_environment.h"
#include "base/test/test_timeouts.h"
#include "base/threading/platform_thread.h"
#include "base/threading/thread_checker.h"
@@ -267,7 +267,7 @@ class MediaStreamAudioTest : public ::testing::Test {
blink::WebMediaStreamSource blink_audio_source_;
blink::WebMediaStreamTrack blink_audio_track_;
- base::MessageLoop message_loop_;
+ base::test::ScopedTaskEnvironment task_environment_;
};
// Tests that a simple source-->track-->sink connection and audio data flow
diff --git a/chromium/content/renderer/media/stream/media_stream_constraints_util.cc b/chromium/content/renderer/media/stream/media_stream_constraints_util.cc
index d4111ac35b1..5708faed1f7 100644
--- a/chromium/content/renderer/media/stream/media_stream_constraints_util.cc
+++ b/chromium/content/renderer/media/stream/media_stream_constraints_util.cc
@@ -152,14 +152,12 @@ AudioCaptureSettings::AudioCaptureSettings(const char* failed_constraint_name)
AudioCaptureSettings::AudioCaptureSettings(
std::string device_id,
- const media::AudioParameters& audio_parameters,
bool enable_hotword,
bool disable_local_echo,
bool enable_automatic_output_device_selection,
const AudioProcessingProperties& audio_processing_properties)
: failed_constraint_name_(nullptr),
device_id_(std::move(device_id)),
- audio_parameters_(audio_parameters),
hotword_enabled_(enable_hotword),
disable_local_echo_(disable_local_echo),
render_to_associated_sink_(enable_automatic_output_device_selection),
diff --git a/chromium/content/renderer/media/stream/media_stream_constraints_util.h b/chromium/content/renderer/media/stream/media_stream_constraints_util.h
index fd2267d2086..4bcbf1068d6 100644
--- a/chromium/content/renderer/media/stream/media_stream_constraints_util.h
+++ b/chromium/content/renderer/media/stream/media_stream_constraints_util.h
@@ -36,8 +36,8 @@ class NumericRangeSet;
// The following fields are used to control MediaStreamVideoSource objects:
// * device_id: used for device selection and obtained from the deviceId
// * capture_params: used to initialize video capture. Its values are obtained
-// from the width, height, aspectRatio, frame_rate, googPowerLineFrequency,
-// and googNoiseReduction constraints.
+// from the width, height, aspectRatio, frame_rate, and googNoiseReduction
+// constraints.
// The following fields are used to control MediaStreamVideoTrack objects:
// * track_adapter_settings: All track objects use a VideoTrackAdapter object
// that may perform cropping and frame-rate adjustment. This field contains
@@ -104,10 +104,6 @@ class CONTENT_EXPORT VideoCaptureSettings {
DCHECK(HasValue());
return capture_params_.resolution_change_policy;
}
- media::PowerLineFrequency PowerLineFrequency() const {
- DCHECK(HasValue());
- return capture_params_.power_line_frequency;
- }
// Other accessors.
const char* failed_constraint_name() const { return failed_constraint_name_; }
@@ -186,7 +182,6 @@ class CONTENT_EXPORT AudioCaptureSettings {
// Creates an object with the given values.
explicit AudioCaptureSettings(
std::string device_id,
- const media::AudioParameters& audio_parameters,
bool enable_hotword,
bool disable_local_echo,
bool enable_automatic_output_device_selection,
@@ -204,11 +199,6 @@ class CONTENT_EXPORT AudioCaptureSettings {
DCHECK(HasValue());
return device_id_;
}
- // This field is meaningless in content capture.
- const media::AudioParameters& device_parameters() const {
- DCHECK(HasValue());
- return audio_parameters_;
- }
bool hotword_enabled() const {
DCHECK(HasValue());
return hotword_enabled_;
@@ -229,7 +219,6 @@ class CONTENT_EXPORT AudioCaptureSettings {
private:
const char* failed_constraint_name_;
std::string device_id_;
- media::AudioParameters audio_parameters_;
bool hotword_enabled_;
bool disable_local_echo_;
bool render_to_associated_sink_;
diff --git a/chromium/content/renderer/media/stream/media_stream_constraints_util_audio.cc b/chromium/content/renderer/media/stream/media_stream_constraints_util_audio.cc
index ac494175f9c..735fda4877f 100644
--- a/chromium/content/renderer/media/stream/media_stream_constraints_util_audio.cc
+++ b/chromium/content/renderer/media/stream/media_stream_constraints_util_audio.cc
@@ -400,9 +400,9 @@ class SingleDeviceCandidateSet {
basic_constraint_set, is_device_capture,
should_disable_hardware_noise_suppression);
- return AudioCaptureSettings(
- std::move(device_id), parameters_, hotword_enabled, disable_local_echo,
- render_to_associated_sink, audio_processing_properties);
+ return AudioCaptureSettings(std::move(device_id), hotword_enabled,
+ disable_local_echo, render_to_associated_sink,
+ audio_processing_properties);
}
private:
diff --git a/chromium/content/renderer/media/stream/media_stream_constraints_util_audio_unittest.cc b/chromium/content/renderer/media/stream/media_stream_constraints_util_audio_unittest.cc
index bce1de0a82c..934644c3811 100644
--- a/chromium/content/renderer/media/stream/media_stream_constraints_util_audio_unittest.cc
+++ b/chromium/content/renderer/media/stream/media_stream_constraints_util_audio_unittest.cc
@@ -9,7 +9,7 @@
#include <string>
#include <utility>
-#include "base/message_loop/message_loop.h"
+#include "base/test/scoped_task_environment.h"
#include "content/renderer/media/stream/local_media_stream_audio_source.h"
#include "content/renderer/media/stream/media_stream_audio_source.h"
#include "content/renderer/media/stream/media_stream_source.h"
@@ -343,12 +343,6 @@ class MediaStreamConstraintsUtilAudioTest
void CheckDevice(const AudioDeviceCaptureCapability& expected_device,
const AudioCaptureSettings& result) {
EXPECT_EQ(expected_device.DeviceID(), result.device_id());
- EXPECT_EQ(expected_device.Parameters().sample_rate(),
- result.device_parameters().sample_rate());
- EXPECT_EQ(expected_device.Parameters().channels(),
- result.device_parameters().channels());
- EXPECT_EQ(expected_device.Parameters().effects(),
- result.device_parameters().effects());
}
void CheckDeviceDefaults(const AudioCaptureSettings& result) {
@@ -473,7 +467,7 @@ class MediaStreamConstraintsUtilAudioTest
private:
// Required for tests involving a MediaStreamAudioSource.
- base::MessageLoop message_loop_;
+ base::test::ScopedTaskEnvironment task_environment_;
MockPeerConnectionDependencyFactory pc_factory_;
};
diff --git a/chromium/content/renderer/media/stream/media_stream_constraints_util_video_device.cc b/chromium/content/renderer/media/stream/media_stream_constraints_util_video_device.cc
index 0062de4435e..8be1cee6042 100644
--- a/chromium/content/renderer/media/stream/media_stream_constraints_util_video_device.cc
+++ b/chromium/content/renderer/media/stream/media_stream_constraints_util_video_device.cc
@@ -14,6 +14,7 @@
#include "content/renderer/media/stream/media_stream_constraints_util_sets.h"
#include "content/renderer/media/stream/media_stream_video_source.h"
#include "media/base/limits.h"
+#include "media/mojo/interfaces/display_media_information.mojom.h"
#include "third_party/blink/public/platform/web_media_constraints.h"
#include "third_party/blink/public/platform/web_string.h"
@@ -26,8 +27,8 @@ using DoubleRangeSet = media_constraints::NumericRangeSet<double>;
// Number of default settings to be used as final tie-breaking criteria for
// settings that are equally good at satisfying constraints:
-// device ID, power-line frequency, noise reduction, resolution and frame rate.
-const int kNumDefaultDistanceEntries = 5;
+// device ID, noise reduction, resolution and frame rate.
+const int kNumDefaultDistanceEntries = 4;
// The minimum aspect ratio to be supported by sources.
const double kMinSourceAspectRatio = 0.05;
@@ -53,21 +54,16 @@ struct Candidate {
const std::string& group_id,
const media::VideoCaptureFormat& format,
media::VideoFacingMode facing_mode,
- media::PowerLineFrequency power_line_frequency,
const base::Optional<bool>& noise_reduction)
: device_id_(device_id),
group_id_(group_id),
format_(format),
facing_mode_(facing_mode),
- power_line_frequency_(power_line_frequency),
noise_reduction_(noise_reduction) {}
// These accessor-like methods transform types to what Blink constraint
// classes expect.
blink::WebString GetFacingMode() const { return ToWebString(facing_mode_); }
- long GetPowerLineFrequency() const {
- return static_cast<long>(power_line_frequency_);
- }
blink::WebString GetDeviceId() const {
return blink::WebString::FromASCII(device_id_.data());
}
@@ -83,9 +79,6 @@ struct Candidate {
const std::string& device_id() const { return device_id_; }
const std::string& group_id() const { return group_id_; }
media::VideoFacingMode facing_mode() const { return facing_mode_; }
- media::PowerLineFrequency power_line_frequency() const {
- return power_line_frequency_;
- }
const base::Optional<bool>& noise_reduction() const {
return noise_reduction_;
}
@@ -95,7 +88,6 @@ struct Candidate {
std::string group_id_;
media::VideoCaptureFormat format_;
media::VideoFacingMode facing_mode_;
- media::PowerLineFrequency power_line_frequency_;
base::Optional<bool> noise_reduction_;
};
@@ -184,7 +176,6 @@ VideoCaptureSettings ComputeVideoDeviceCaptureSettings(
const blink::WebMediaTrackConstraintSet& basic_constraint_set) {
media::VideoCaptureParams capture_params;
capture_params.requested_format = candidate.format();
- capture_params.power_line_frequency = candidate.power_line_frequency();
auto track_adapter_settings = SelectVideoTrackAdapterSettings(
basic_constraint_set, constrained_format.constrained_resolution(),
constrained_format.constrained_frame_rate(),
@@ -408,32 +399,6 @@ double AspectRatioConstraintSourceDistance(
return 0.0;
}
-// Returns a custom distance function suitable for the googPowerLineFrequency
-// constraint, given a |constraint| and a candidate value |source_value|.
-// The distance is HUGE_VAL if |source_value| cannot satisfy |constraint|.
-// Otherwise, the distance is zero.
-double PowerLineFrequencyConstraintSourceDistance(
- const blink::LongConstraint& constraint,
- media::PowerLineFrequency source_value,
- const char** failed_constraint_name) {
- bool constraint_has_min = ConstraintHasMin(constraint);
- bool constraint_has_max = ConstraintHasMax(constraint);
- long constraint_min = constraint_has_min ? ConstraintMin(constraint) : -1L;
- long constraint_max = constraint_has_max ? ConstraintMax(constraint) : -1L;
- long source_value_long = static_cast<long>(source_value);
-
- if ((constraint_has_max && source_value_long > constraint_max) ||
- (constraint_has_min && source_value_long < constraint_min) ||
- (constraint_has_min && constraint_has_max &&
- constraint_min > constraint_max)) {
- if (failed_constraint_name)
- *failed_constraint_name = constraint.GetName();
- return HUGE_VAL;
- }
-
- return 0.0;
-}
-
// Returns a custom distance function suitable for the googNoiseReduction
// constraint, given a |constraint| and a candidate value |value|.
// The distance is HUGE_VAL if |candidate_value| cannot satisfy |constraint|.
@@ -519,9 +484,6 @@ double CandidateSourceDistance(
failed_constraint_name) +
FormatSourceDistance(candidate.format(), constrained_format,
constraint_set, failed_constraint_name) +
- PowerLineFrequencyConstraintSourceDistance(
- constraint_set.goog_power_line_frequency,
- candidate.power_line_frequency(), failed_constraint_name) +
NoiseReductionConstraintSourceDistance(
constraint_set.goog_noise_reduction, candidate.noise_reduction(),
failed_constraint_name);
@@ -626,23 +588,6 @@ double FrameRateConstraintNativeFitnessDistance(
}
// Returns the fitness distance between |value| and |constraint| for the
-// googPowerLineFrequency constraint.
-// Based on https://w3c.github.io/mediacapture-main/#dfn-fitness-distance.
-double PowerLineFrequencyConstraintFitnessDistance(
- long value,
- const blink::LongConstraint& constraint) {
- if (!constraint.HasIdeal())
- return 0.0;
-
- // This constraint is of type long, but it behaves as an enum. Thus, values
- // equal to ideal have fitness 0.0 and any other values have fitness 1.0.
- if (value == constraint.Ideal())
- return 0.0;
-
- return 1.0;
-}
-
-// Returns the fitness distance between |value| and |constraint| for the
// googNoiseReduction constraint.
// Based on https://w3c.github.io/mediacapture-main/#dfn-fitness-distance.
double NoiseReductionConstraintFitnessDistance(
@@ -678,9 +623,6 @@ double CandidateFitnessDistance(
constraint_set.facing_mode);
fitness += StringConstraintFitnessDistance(candidate.GetVideoKind(),
constraint_set.video_kind);
- fitness += PowerLineFrequencyConstraintFitnessDistance(
- candidate.GetPowerLineFrequency(),
- constraint_set.goog_power_line_frequency);
fitness += NoiseReductionConstraintFitnessDistance(
candidate.noise_reduction(), constraint_set.goog_noise_reduction);
// No need to pass minimum value to compute fitness for range-based
@@ -740,14 +682,6 @@ void AppendDistanceFromDefault(
}
}
- // Prefer default power-line frequency.
- double power_line_frequency_distance =
- candidate.power_line_frequency() ==
- media::PowerLineFrequency::FREQUENCY_DEFAULT
- ? 0.0
- : HUGE_VAL;
- distance_vector->push_back(power_line_frequency_distance);
-
// Prefer not having a specific noise-reduction value and let the lower-layers
// implementation choose a noise-reduction strategy.
double noise_reduction_distance =
@@ -793,6 +727,33 @@ blink::WebMediaStreamTrack::FacingMode ToWebFacingMode(
}
}
+blink::WebMediaStreamTrack::DisplayCaptureSurfaceType ToWebDisplaySurface(
+ media::mojom::DisplayCaptureSurfaceType display_surface) {
+ switch (display_surface) {
+ case media::mojom::DisplayCaptureSurfaceType::MONITOR:
+ return blink::WebMediaStreamTrack::DisplayCaptureSurfaceType::kMonitor;
+ case media::mojom::DisplayCaptureSurfaceType::WINDOW:
+ return blink::WebMediaStreamTrack::DisplayCaptureSurfaceType::kWindow;
+ case media::mojom::DisplayCaptureSurfaceType::APPLICATION:
+ return blink::WebMediaStreamTrack::DisplayCaptureSurfaceType::
+ kApplication;
+ case media::mojom::DisplayCaptureSurfaceType::BROWSER:
+ return blink::WebMediaStreamTrack::DisplayCaptureSurfaceType::kBrowser;
+ }
+}
+
+blink::WebMediaStreamTrack::CursorCaptureType ToWebCursorCaptureType(
+ media::mojom::CursorCaptureType cursor) {
+ switch (cursor) {
+ case media::mojom::CursorCaptureType::NEVER:
+ return blink::WebMediaStreamTrack::CursorCaptureType::kNever;
+ case media::mojom::CursorCaptureType::ALWAYS:
+ return blink::WebMediaStreamTrack::CursorCaptureType::kAlways;
+ case media::mojom::CursorCaptureType::MOTION:
+ return blink::WebMediaStreamTrack::CursorCaptureType::kMotion;
+ }
+}
+
VideoDeviceCaptureCapabilities::VideoDeviceCaptureCapabilities() = default;
VideoDeviceCaptureCapabilities::VideoDeviceCaptureCapabilities(
VideoDeviceCaptureCapabilities&& other) = default;
@@ -849,75 +810,63 @@ VideoCaptureSettings SelectSettingsVideoDeviceCapture(
if (!constrained_format.ApplyConstraintSet(constraints.Basic()))
continue;
- for (auto& power_line_frequency : capabilities.power_line_capabilities) {
- double basic_power_line_frequency_distance =
- PowerLineFrequencyConstraintSourceDistance(
- constraints.Basic().goog_power_line_frequency,
- power_line_frequency, &failed_constraint_name);
- if (!std::isfinite(basic_power_line_frequency_distance))
+ for (auto& noise_reduction : capabilities.noise_reduction_capabilities) {
+ double basic_noise_reduction_distance =
+ NoiseReductionConstraintSourceDistance(
+ constraints.Basic().goog_noise_reduction, noise_reduction,
+ &failed_constraint_name);
+ if (!std::isfinite(basic_noise_reduction_distance))
continue;
- for (auto& noise_reduction :
- capabilities.noise_reduction_capabilities) {
- double basic_noise_reduction_distance =
- NoiseReductionConstraintSourceDistance(
- constraints.Basic().goog_noise_reduction, noise_reduction,
- &failed_constraint_name);
- if (!std::isfinite(basic_noise_reduction_distance))
- continue;
-
- // The candidate satisfies the basic constraint set.
- double candidate_basic_custom_distance =
- basic_device_distance + basic_format_distance +
- basic_power_line_frequency_distance +
- basic_noise_reduction_distance;
- DCHECK(std::isfinite(candidate_basic_custom_distance));
-
- // Temporary vector to save custom distances for advanced constraints.
- // Custom distances must be added to the candidate distance vector
- // after all the spec-mandated values.
- DistanceVector advanced_custom_distance_vector;
- Candidate candidate(device->device_id, device->group_id, format,
- device->facing_mode, power_line_frequency,
- noise_reduction);
- DistanceVector candidate_distance_vector;
- // First criteria for valid candidates is satisfaction of advanced
- // constraint sets.
- for (const auto& advanced_set : constraints.Advanced()) {
- double custom_distance = CandidateSourceDistance(
- candidate, constrained_format, advanced_set, nullptr);
- if (!constrained_format.ApplyConstraintSet(advanced_set))
- custom_distance = HUGE_VAL;
- advanced_custom_distance_vector.push_back(custom_distance);
- double spec_distance = std::isfinite(custom_distance) ? 0 : 1;
- candidate_distance_vector.push_back(spec_distance);
- }
-
- // Second criterion is fitness distance.
- candidate_distance_vector.push_back(CandidateFitnessDistance(
- candidate, constrained_format, constraints.Basic()));
-
- // Third criteria are custom distances to constraint sets.
- candidate_distance_vector.push_back(candidate_basic_custom_distance);
- std::copy(advanced_custom_distance_vector.begin(),
- advanced_custom_distance_vector.end(),
- std::back_inserter(candidate_distance_vector));
-
- // Fourth criteria is native fitness distance.
- candidate_distance_vector.push_back(CandidateNativeFitnessDistance(
- constrained_format, constraints.Basic()));
-
- // Final criteria are custom distances to default settings.
- AppendDistanceFromDefault(candidate, capabilities, default_width,
- default_height, default_frame_rate,
- &candidate_distance_vector);
-
- DCHECK_EQ(best_distance.size(), candidate_distance_vector.size());
- if (candidate_distance_vector < best_distance) {
- best_distance = candidate_distance_vector;
- result = ComputeVideoDeviceCaptureSettings(
- candidate, constrained_format, constraints.Basic());
- }
+ // The candidate satisfies the basic constraint set.
+ double candidate_basic_custom_distance = basic_device_distance +
+ basic_format_distance +
+ basic_noise_reduction_distance;
+ DCHECK(std::isfinite(candidate_basic_custom_distance));
+
+ // Temporary vector to save custom distances for advanced constraints.
+ // Custom distances must be added to the candidate distance vector
+ // after all the spec-mandated values.
+ DistanceVector advanced_custom_distance_vector;
+ Candidate candidate(device->device_id, device->group_id, format,
+ device->facing_mode, noise_reduction);
+ DistanceVector candidate_distance_vector;
+ // First criteria for valid candidates is satisfaction of advanced
+ // constraint sets.
+ for (const auto& advanced_set : constraints.Advanced()) {
+ double custom_distance = CandidateSourceDistance(
+ candidate, constrained_format, advanced_set, nullptr);
+ if (!constrained_format.ApplyConstraintSet(advanced_set))
+ custom_distance = HUGE_VAL;
+ advanced_custom_distance_vector.push_back(custom_distance);
+ double spec_distance = std::isfinite(custom_distance) ? 0 : 1;
+ candidate_distance_vector.push_back(spec_distance);
+ }
+
+ // Second criterion is fitness distance.
+ candidate_distance_vector.push_back(CandidateFitnessDistance(
+ candidate, constrained_format, constraints.Basic()));
+
+ // Third criteria are custom distances to constraint sets.
+ candidate_distance_vector.push_back(candidate_basic_custom_distance);
+ std::copy(advanced_custom_distance_vector.begin(),
+ advanced_custom_distance_vector.end(),
+ std::back_inserter(candidate_distance_vector));
+
+ // Fourth criteria is native fitness distance.
+ candidate_distance_vector.push_back(CandidateNativeFitnessDistance(
+ constrained_format, constraints.Basic()));
+
+ // Final criteria are custom distances to default settings.
+ AppendDistanceFromDefault(candidate, capabilities, default_width,
+ default_height, default_frame_rate,
+ &candidate_distance_vector);
+
+ DCHECK_EQ(best_distance.size(), candidate_distance_vector.size());
+ if (candidate_distance_vector < best_distance) {
+ best_distance = candidate_distance_vector;
+ result = ComputeVideoDeviceCaptureSettings(
+ candidate, constrained_format, constraints.Basic());
}
}
}
diff --git a/chromium/content/renderer/media/stream/media_stream_constraints_util_video_device.h b/chromium/content/renderer/media/stream/media_stream_constraints_util_video_device.h
index cfd1e96908c..4466828ab51 100644
--- a/chromium/content/renderer/media/stream/media_stream_constraints_util_video_device.h
+++ b/chromium/content/renderer/media/stream/media_stream_constraints_util_video_device.h
@@ -17,17 +17,23 @@
namespace blink {
class WebString;
class WebMediaConstraints;
-}
+} // namespace blink
namespace content {
// Calculates and returns videoKind value for |format|.
// See https://w3c.github.io/mediacapture-depth.
-blink::WebString CONTENT_EXPORT
-GetVideoKindForFormat(const media::VideoCaptureFormat& format);
+CONTENT_EXPORT blink::WebString GetVideoKindForFormat(
+ const media::VideoCaptureFormat& format);
-blink::WebMediaStreamTrack::FacingMode CONTENT_EXPORT
-ToWebFacingMode(media::VideoFacingMode video_facing);
+CONTENT_EXPORT blink::WebMediaStreamTrack::FacingMode ToWebFacingMode(
+ media::VideoFacingMode video_facing);
+
+CONTENT_EXPORT blink::WebMediaStreamTrack::DisplayCaptureSurfaceType
+ToWebDisplaySurface(media::mojom::DisplayCaptureSurfaceType display_surface);
+
+CONTENT_EXPORT blink::WebMediaStreamTrack::CursorCaptureType
+ToWebCursorCaptureType(media::mojom::CursorCaptureType cursor);
struct CONTENT_EXPORT VideoDeviceCaptureCapabilities {
VideoDeviceCaptureCapabilities();
@@ -39,7 +45,6 @@ struct CONTENT_EXPORT VideoDeviceCaptureCapabilities {
// Each field is independent of each other.
std::vector<blink::mojom::VideoInputDeviceCapabilitiesPtr>
device_capabilities;
- std::vector<media::PowerLineFrequency> power_line_capabilities;
std::vector<base::Optional<bool>> noise_reduction_capabilities;
};
@@ -100,9 +105,9 @@ struct CONTENT_EXPORT VideoDeviceCaptureCapabilities {
// ideal value and thus has worse fitness according to step 2, even if C3's
// native fitness is better than C1's and C2's.
// 5. C1 is better than C2 if its settings are closer to certain default
-// settings that include the device ID, power-line frequency, noise
-// reduction, resolution, and frame rate, in that order. Note that there is
-// no default facing mode or aspect ratio.
+// settings that include the device ID, noise reduction, resolution,
+// and frame rate, in that order. Note that there is no default facing mode
+// or aspect ratio.
// This function uses the SelectVideoTrackAdapterSettings function to compute
// some track-specific settings. These are available in the returned value via
// the track_adapter_settings() accessor. For more details about the algorithm
diff --git a/chromium/content/renderer/media/stream/media_stream_constraints_util_video_device_unittest.cc b/chromium/content/renderer/media/stream/media_stream_constraints_util_video_device_unittest.cc
index 8a9a13eb380..cb17ff92126 100644
--- a/chromium/content/renderer/media/stream/media_stream_constraints_util_video_device_unittest.cc
+++ b/chromium/content/renderer/media/stream/media_stream_constraints_util_video_device_unittest.cc
@@ -171,12 +171,6 @@ class MediaStreamConstraintsUtilVideoDeviceTest : public testing::Test {
};
capabilities_.device_capabilities.push_back(std::move(device));
- capabilities_.power_line_capabilities = {
- media::PowerLineFrequency::FREQUENCY_DEFAULT,
- media::PowerLineFrequency::FREQUENCY_50HZ,
- media::PowerLineFrequency::FREQUENCY_60HZ,
- };
-
capabilities_.noise_reduction_capabilities = {
base::Optional<bool>(), base::Optional<bool>(true),
base::Optional<bool>(false),
@@ -222,8 +216,6 @@ TEST_F(MediaStreamConstraintsUtilVideoDeviceTest, Unconstrained) {
EXPECT_EQ(default_device_->device_id, result.device_id());
EXPECT_EQ(*default_closest_format_, result.Format());
// Should select default settings for other constraints.
- EXPECT_EQ(media::PowerLineFrequency::FREQUENCY_DEFAULT,
- result.PowerLineFrequency());
EXPECT_EQ(base::Optional<bool>(), result.noise_reduction());
}
@@ -387,30 +379,6 @@ TEST_F(MediaStreamConstraintsUtilVideoDeviceTest, OverconstrainedOnFrameRate) {
}
TEST_F(MediaStreamConstraintsUtilVideoDeviceTest,
- OverconstrainedOnPowerLineFrequency) {
- constraint_factory_.Reset();
- constraint_factory_.basic().goog_power_line_frequency.SetExact(123467890);
- auto result = SelectSettings();
- EXPECT_FALSE(result.HasValue());
- EXPECT_EQ(constraint_factory_.basic().goog_power_line_frequency.GetName(),
- result.failed_constraint_name());
-
- constraint_factory_.Reset();
- constraint_factory_.basic().goog_power_line_frequency.SetMin(123467890);
- result = SelectSettings();
- EXPECT_FALSE(result.HasValue());
- EXPECT_EQ(constraint_factory_.basic().goog_power_line_frequency.GetName(),
- result.failed_constraint_name());
-
- constraint_factory_.Reset();
- constraint_factory_.basic().goog_power_line_frequency.SetMax(-1);
- result = SelectSettings();
- EXPECT_FALSE(result.HasValue());
- EXPECT_EQ(constraint_factory_.basic().goog_power_line_frequency.GetName(),
- result.failed_constraint_name());
-}
-
-TEST_F(MediaStreamConstraintsUtilVideoDeviceTest,
OverconstrainedOnNoiseReduction) {
// Simulate a system that does not support noise reduction.
// Manually adding device capabilities because VideoDeviceCaptureCapabilities
@@ -425,7 +393,6 @@ TEST_F(MediaStreamConstraintsUtilVideoDeviceTest,
media::PIXEL_FORMAT_I420),
};
capabilities.device_capabilities.push_back(std::move(device));
- capabilities.power_line_capabilities = capabilities_.power_line_capabilities;
capabilities.noise_reduction_capabilities = {base::Optional<bool>(false)};
constraint_factory_.Reset();
@@ -447,8 +414,6 @@ TEST_F(MediaStreamConstraintsUtilVideoDeviceTest, MandatoryDeviceID) {
EXPECT_TRUE(result.HasValue());
EXPECT_EQ(default_device_->device_id, result.device_id());
EXPECT_EQ(*default_closest_format_, result.Format());
- EXPECT_EQ(media::PowerLineFrequency::FREQUENCY_DEFAULT,
- result.PowerLineFrequency());
CheckTrackAdapterSettingsEqualsFormat(result);
constraint_factory_.basic().device_id.SetExact(
@@ -456,8 +421,6 @@ TEST_F(MediaStreamConstraintsUtilVideoDeviceTest, MandatoryDeviceID) {
result = SelectSettings();
EXPECT_EQ(low_res_device_->device_id, result.device_id());
EXPECT_EQ(*low_res_closest_format_, result.Format());
- EXPECT_EQ(media::PowerLineFrequency::FREQUENCY_DEFAULT,
- result.PowerLineFrequency());
CheckTrackAdapterSettingsEqualsFormat(result);
constraint_factory_.basic().device_id.SetExact(
@@ -465,8 +428,6 @@ TEST_F(MediaStreamConstraintsUtilVideoDeviceTest, MandatoryDeviceID) {
result = SelectSettings();
EXPECT_EQ(high_res_device_->device_id, result.device_id());
EXPECT_EQ(*high_res_closest_format_, result.Format());
- EXPECT_EQ(media::PowerLineFrequency::FREQUENCY_DEFAULT,
- result.PowerLineFrequency());
CheckTrackAdapterSettingsEqualsFormat(result);
}
@@ -478,8 +439,6 @@ TEST_F(MediaStreamConstraintsUtilVideoDeviceTest, MandatoryGroupID) {
EXPECT_TRUE(result.HasValue());
EXPECT_EQ(default_device_->device_id, result.device_id());
EXPECT_EQ(*default_closest_format_, result.Format());
- EXPECT_EQ(media::PowerLineFrequency::FREQUENCY_DEFAULT,
- result.PowerLineFrequency());
CheckTrackAdapterSettingsEqualsFormat(result);
constraint_factory_.basic().group_id.SetExact(
@@ -487,8 +446,6 @@ TEST_F(MediaStreamConstraintsUtilVideoDeviceTest, MandatoryGroupID) {
result = SelectSettings();
EXPECT_EQ(low_res_device_->device_id, result.device_id());
EXPECT_EQ(*low_res_closest_format_, result.Format());
- EXPECT_EQ(media::PowerLineFrequency::FREQUENCY_DEFAULT,
- result.PowerLineFrequency());
CheckTrackAdapterSettingsEqualsFormat(result);
constraint_factory_.basic().group_id.SetExact(
@@ -496,8 +453,6 @@ TEST_F(MediaStreamConstraintsUtilVideoDeviceTest, MandatoryGroupID) {
result = SelectSettings();
EXPECT_EQ(high_res_device_->device_id, result.device_id());
EXPECT_EQ(*high_res_closest_format_, result.Format());
- EXPECT_EQ(media::PowerLineFrequency::FREQUENCY_DEFAULT,
- result.PowerLineFrequency());
CheckTrackAdapterSettingsEqualsFormat(result);
}
@@ -513,8 +468,6 @@ TEST_F(MediaStreamConstraintsUtilVideoDeviceTest, MandatoryFacingMode) {
EXPECT_EQ(media::MEDIA_VIDEO_FACING_ENVIRONMENT,
low_res_device_->facing_mode);
EXPECT_EQ(*low_res_closest_format_, result.Format());
- EXPECT_EQ(media::PowerLineFrequency::FREQUENCY_DEFAULT,
- result.PowerLineFrequency());
CheckTrackAdapterSettingsEqualsFormat(result);
constraint_factory_.basic().facing_mode.SetExact(
@@ -526,8 +479,6 @@ TEST_F(MediaStreamConstraintsUtilVideoDeviceTest, MandatoryFacingMode) {
EXPECT_EQ(high_res_device_->device_id, result.device_id());
EXPECT_EQ(media::MEDIA_VIDEO_FACING_USER, high_res_device_->facing_mode);
EXPECT_EQ(*high_res_closest_format_, result.Format());
- EXPECT_EQ(media::PowerLineFrequency::FREQUENCY_DEFAULT,
- result.PowerLineFrequency());
CheckTrackAdapterSettingsEqualsFormat(result);
}
@@ -549,25 +500,6 @@ TEST_F(MediaStreamConstraintsUtilVideoDeviceTest, MandatoryVideoKind) {
CheckTrackAdapterSettingsEqualsFormat(result);
}
-TEST_F(MediaStreamConstraintsUtilVideoDeviceTest, MandatoryPowerLineFrequency) {
- constraint_factory_.Reset();
- const media::PowerLineFrequency kPowerLineFrequencies[] = {
- media::PowerLineFrequency::FREQUENCY_50HZ,
- media::PowerLineFrequency::FREQUENCY_60HZ};
- for (auto power_line_frequency : kPowerLineFrequencies) {
- constraint_factory_.basic().goog_power_line_frequency.SetExact(
- static_cast<long>(power_line_frequency));
- auto result = SelectSettings();
- EXPECT_TRUE(result.HasValue());
- EXPECT_EQ(power_line_frequency, result.PowerLineFrequency());
- // The default device and settings closest to the default should be
- // selected.
- EXPECT_EQ(default_device_->device_id, result.device_id());
- EXPECT_EQ(*default_closest_format_, result.Format());
- CheckTrackAdapterSettingsEqualsFormat(result);
- }
-}
-
TEST_F(MediaStreamConstraintsUtilVideoDeviceTest, MandatoryNoiseReduction) {
constraint_factory_.Reset();
const bool kNoiseReductionValues[] = {true, false};
@@ -2276,39 +2208,6 @@ TEST_F(MediaStreamConstraintsUtilVideoDeviceTest,
}
TEST_F(MediaStreamConstraintsUtilVideoDeviceTest,
- AdvancedContradictoryPowerLineFrequency) {
- {
- constraint_factory_.Reset();
- blink::WebMediaTrackConstraintSet& advanced1 =
- constraint_factory_.AddAdvanced();
- advanced1.width.SetMin(640);
- advanced1.height.SetMin(480);
- advanced1.goog_power_line_frequency.SetExact(50);
- blink::WebMediaTrackConstraintSet& advanced2 =
- constraint_factory_.AddAdvanced();
- advanced2.width.SetMin(1920);
- advanced2.height.SetMin(1080);
- advanced2.goog_power_line_frequency.SetExact(60);
- auto result = SelectSettings();
- EXPECT_TRUE(result.HasValue());
- // The second advanced set cannot be satisfied because it contradicts the
- // first set. The default device supports the first set and should be
- // selected.
- EXPECT_EQ(default_device_->device_id, result.device_id());
- EXPECT_LE(640, result.Width());
- EXPECT_LE(480, result.Height());
- EXPECT_EQ(50, static_cast<int>(result.PowerLineFrequency()));
- EXPECT_EQ(result.Width(), result.track_adapter_settings().max_width);
- EXPECT_EQ(result.Height(), result.track_adapter_settings().max_height);
- EXPECT_EQ(640.0 / result.Height(),
- result.track_adapter_settings().min_aspect_ratio);
- EXPECT_EQ(result.Width() / 480.0,
- result.track_adapter_settings().max_aspect_ratio);
- CheckTrackAdapterSettingsEqualsFrameRate(result);
- }
-}
-
-TEST_F(MediaStreamConstraintsUtilVideoDeviceTest,
AdvancedContradictoryAspectRatioWidth) {
{
constraint_factory_.Reset();
diff --git a/chromium/content/renderer/media/stream/media_stream_device_observer.cc b/chromium/content/renderer/media/stream/media_stream_device_observer.cc
index 80a3e5df962..df23fbe6299 100644
--- a/chromium/content/renderer/media/stream/media_stream_device_observer.cc
+++ b/chromium/content/renderer/media/stream/media_stream_device_observer.cc
@@ -22,8 +22,8 @@ namespace {
bool RemoveStreamDeviceFromArray(const MediaStreamDevice& device,
MediaStreamDevices* devices) {
- for (MediaStreamDevices::iterator device_it = devices->begin();
- device_it != devices->end(); ++device_it) {
+ for (auto device_it = devices->begin(); device_it != devices->end();
+ ++device_it) {
if (device_it->IsSameDevice(device)) {
devices->erase(device_it);
return true;
diff --git a/chromium/content/renderer/media/stream/media_stream_video_source.cc b/chromium/content/renderer/media/stream/media_stream_video_source.cc
index 9ea6162be88..0550fb2209c 100644
--- a/chromium/content/renderer/media/stream/media_stream_video_source.cc
+++ b/chromium/content/renderer/media/stream/media_stream_video_source.cc
@@ -15,7 +15,6 @@
#include "base/macros.h"
#include "base/strings/string_number_conversions.h"
#include "base/threading/sequenced_task_runner_handle.h"
-#include "base/trace_event/trace_event.h"
#include "content/child/child_process.h"
#include "content/public/common/content_features.h"
#include "content/renderer/media/stream/media_stream_constraints_util_video_device.h"
@@ -86,8 +85,7 @@ void MediaStreamVideoSource::RemoveTrack(MediaStreamVideoTrack* video_track,
base::OnceClosure callback) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
{
- std::vector<MediaStreamVideoTrack*>::iterator it =
- std::find(tracks_.begin(), tracks_.end(), video_track);
+ auto it = std::find(tracks_.begin(), tracks_.end(), video_track);
DCHECK(it != tracks_.end());
tracks_.erase(it);
}
@@ -126,7 +124,7 @@ void MediaStreamVideoSource::RemoveTrack(MediaStreamVideoTrack* video_track,
// stopping a source with StopSource() can have side effects that affect
// sources created after that StopSource() call, but before the actual
// stop takes place. See http://crbug.com/778039.
- StopForRestart(base::BindOnce(&MediaStreamVideoSource::DidRemoveLastTrack,
+ StopForRestart(base::BindOnce(&MediaStreamVideoSource::DidStopSource,
weak_factory_.GetWeakPtr(),
std::move(callback)));
if (state_ == STOPPING_FOR_RESTART || state_ == STOPPED_FOR_RESTART) {
@@ -139,7 +137,7 @@ void MediaStreamVideoSource::RemoveTrack(MediaStreamVideoTrack* video_track,
FinalizeStopSource();
} else {
// If the source does not support restarting, call StopSource()
- // to ensure stop on this task. DidRemoveLastTrack() will be called on
+ // to ensure stop on this task. DidStopSource() will be called on
// another task even if the source does not support restarting, as
// StopForRestart() always posts a task to run its callback.
StopSource();
@@ -152,11 +150,10 @@ void MediaStreamVideoSource::RemoveTrack(MediaStreamVideoTrack* video_track,
}
}
-void MediaStreamVideoSource::DidRemoveLastTrack(base::OnceClosure callback,
- RestartResult result) {
+void MediaStreamVideoSource::DidStopSource(base::OnceClosure callback,
+ RestartResult result) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(callback);
- DCHECK(tracks_.empty());
DCHECK_EQ(Owner().GetReadyState(),
blink::WebMediaStreamSource::kReadyStateEnded);
if (result == RestartResult::IS_STOPPED) {
diff --git a/chromium/content/renderer/media/stream/media_stream_video_source.h b/chromium/content/renderer/media/stream/media_stream_video_source.h
index 4ef327f92f5..82da3b1f3c2 100644
--- a/chromium/content/renderer/media/stream/media_stream_video_source.h
+++ b/chromium/content/renderer/media/stream/media_stream_video_source.h
@@ -256,7 +256,7 @@ class CONTENT_EXPORT MediaStreamVideoSource : public MediaStreamSource {
void StartFrameMonitoring();
void UpdateTrackSettings(MediaStreamVideoTrack* track,
const VideoTrackAdapterSettings& adapter_settings);
- void DidRemoveLastTrack(base::OnceClosure callback, RestartResult result);
+ void DidStopSource(base::OnceClosure callback, RestartResult result);
State state_;
diff --git a/chromium/content/renderer/media/stream/media_stream_video_source_unittest.cc b/chromium/content/renderer/media/stream/media_stream_video_source_unittest.cc
index ebbbf191ad5..ff1c5663c7c 100644
--- a/chromium/content/renderer/media/stream/media_stream_video_source_unittest.cc
+++ b/chromium/content/renderer/media/stream/media_stream_video_source_unittest.cc
@@ -3,6 +3,7 @@
// found in the LICENSE file.
#include <string>
+#include <utility>
#include <vector>
#include "base/bind.h"
@@ -58,15 +59,15 @@ class MediaStreamVideoSourceTest : public ::testing::Test {
media::PIXEL_FORMAT_I420));
formats.push_back(media::VideoCaptureFormat(gfx::Size(320, 240), 30,
media::PIXEL_FORMAT_I420));
- webkit_source_.Initialize(blink::WebString::FromASCII("dummy_source_id"),
- blink::WebMediaStreamSource::kTypeVideo,
- blink::WebString::FromASCII("dummy_source_name"),
- false /* remote */);
- webkit_source_.SetExtraData(mock_source_);
+ web_source_.Initialize(blink::WebString::FromASCII("dummy_source_id"),
+ blink::WebMediaStreamSource::kTypeVideo,
+ blink::WebString::FromASCII("dummy_source_name"),
+ false /* remote */);
+ web_source_.SetExtraData(mock_source_);
}
void TearDown() override {
- webkit_source_.Reset();
+ web_source_.Reset();
blink::WebHeap::CollectAllGarbageForTesting();
}
@@ -75,7 +76,7 @@ class MediaStreamVideoSourceTest : public ::testing::Test {
protected:
MediaStreamVideoSource* source() { return mock_source_; }
- // Create a track that's associated with |webkit_source_|.
+ // Create a track that's associated with |web_source_|.
blink::WebMediaStreamTrack CreateTrack(const std::string& id) {
bool enabled = true;
return MediaStreamVideoTrack::CreateVideoTrack(
@@ -131,7 +132,7 @@ class MediaStreamVideoSourceTest : public ::testing::Test {
MockMediaStreamVideoSource* mock_source() { return mock_source_; }
- const blink::WebMediaStreamSource& webkit_source() { return webkit_source_; }
+ const blink::WebMediaStreamSource& web_source() { return web_source_; }
void TestSourceCropFrame(int capture_width,
int capture_height,
@@ -238,7 +239,7 @@ class MediaStreamVideoSourceTest : public ::testing::Test {
void OnConstraintsApplied(MediaStreamSource* source,
MediaStreamRequestResult result,
const blink::WebString& result_name) {
- ASSERT_EQ(source, webkit_source().GetExtraData());
+ ASSERT_EQ(source, web_source().GetExtraData());
if (result == MEDIA_DEVICE_OK) {
++number_of_successful_constraints_applied_;
@@ -250,7 +251,7 @@ class MediaStreamVideoSourceTest : public ::testing::Test {
if (!track_to_release_.IsNull()) {
mock_source_ = nullptr;
- webkit_source_.Reset();
+ web_source_.Reset();
track_to_release_.Reset();
}
}
@@ -261,8 +262,8 @@ class MediaStreamVideoSourceTest : public ::testing::Test {
int number_of_failed_constraints_applied_;
content::MediaStreamRequestResult result_;
blink::WebString result_name_;
- blink::WebMediaStreamSource webkit_source_;
- // |mock_source_| is owned by |webkit_source_|.
+ blink::WebMediaStreamSource web_source_;
+ // |mock_source_| is owned by |web_source_|.
MockMediaStreamVideoSource* mock_source_;
};
@@ -697,4 +698,23 @@ TEST_F(MediaStreamVideoSourceTest, StopSuspendedTrack) {
EXPECT_FALSE(mock_source()->is_suspended());
}
+TEST_F(MediaStreamVideoSourceTest, AddTrackAfterStoppingSource) {
+ blink::WebMediaStreamTrack web_track1 = CreateTrack("123");
+ mock_source()->StartMockedSource();
+ EXPECT_EQ(1, NumberOfSuccessConstraintsCallbacks());
+ EXPECT_EQ(0, NumberOfFailedConstraintsCallbacks());
+
+ MediaStreamVideoTrack* track1 =
+ MediaStreamVideoTrack::GetVideoTrack(web_track1);
+ EXPECT_CALL(*this, MockNotification());
+ // This is equivalent to track.stop() in JavaScript.
+ track1->StopAndNotify(base::BindOnce(
+ &MediaStreamVideoSourceTest::MockNotification, base::Unretained(this)));
+
+ blink::WebMediaStreamTrack track2 = CreateTrack("456");
+ base::RunLoop().RunUntilIdle();
+ EXPECT_EQ(1, NumberOfSuccessConstraintsCallbacks());
+ EXPECT_EQ(1, NumberOfFailedConstraintsCallbacks());
+}
+
} // namespace content
diff --git a/chromium/content/renderer/media/stream/media_stream_video_track.cc b/chromium/content/renderer/media/stream/media_stream_video_track.cc
index 5a30ba62c91..b2f048e9d25 100644
--- a/chromium/content/renderer/media/stream/media_stream_video_track.cc
+++ b/chromium/content/renderer/media/stream/media_stream_video_track.cc
@@ -128,7 +128,7 @@ void MediaStreamVideoTrack::FrameDeliverer::RemoveCallbackOnIO(
VideoSinkId id,
const scoped_refptr<base::SingleThreadTaskRunner>& task_runner) {
DCHECK(io_task_runner_->BelongsToCurrentThread());
- std::vector<VideoIdCallbackPair>::iterator it = callbacks_.begin();
+ auto it = callbacks_.begin();
for (; it != callbacks_.end(); ++it) {
if (it->first == id) {
// Callback is copied to heap and then deleted on the target thread.
@@ -318,8 +318,7 @@ void MediaStreamVideoTrack::AddSink(MediaStreamVideoSink* sink,
void MediaStreamVideoTrack::RemoveSink(MediaStreamVideoSink* sink) {
DCHECK(main_render_thread_checker_.CalledOnValidThread());
- std::vector<MediaStreamVideoSink*>::iterator it =
- std::find(sinks_.begin(), sinks_.end(), sink);
+ auto it = std::find(sinks_.begin(), sinks_.end(), sink);
DCHECK(it != sinks_.end());
sinks_.erase(it);
frame_deliverer_->RemoveCallback(sink);
@@ -390,6 +389,12 @@ void MediaStreamVideoTrack::GetSettings(
settings.focal_length_x = calibration->focal_length_x;
settings.focal_length_y = calibration->focal_length_y;
}
+ if (source_->device().display_media_info.has_value()) {
+ const auto& info = source_->device().display_media_info.value();
+ settings.display_surface = ToWebDisplaySurface(info->display_surface);
+ settings.logical_surface = info->logical_surface;
+ settings.cursor = ToWebCursorCaptureType(info->cursor);
+ }
}
void MediaStreamVideoTrack::OnReadyStateChanged(
diff --git a/chromium/content/renderer/media/stream/processed_local_audio_source.cc b/chromium/content/renderer/media/stream/processed_local_audio_source.cc
index 7b6cfe95d94..bd946623d17 100644
--- a/chromium/content/renderer/media/stream/processed_local_audio_source.cc
+++ b/chromium/content/renderer/media/stream/processed_local_audio_source.cc
@@ -192,8 +192,8 @@ bool ProcessedLocalAudioSource::EnsureSourceIsStarted() {
UMA_HISTOGRAM_ENUMERATION(
"WebRTC.AudioInputSampleRate", asr, media::kAudioSampleRateMax + 1);
} else {
- UMA_HISTOGRAM_COUNTS("WebRTC.AudioInputSampleRateUnexpected",
- device().input.sample_rate());
+ UMA_HISTOGRAM_COUNTS_1M("WebRTC.AudioInputSampleRateUnexpected",
+ device().input.sample_rate());
}
// Determine the audio format required of the AudioCapturerSource. Then, pass
diff --git a/chromium/content/renderer/media/stream/processed_local_audio_source_unittest.cc b/chromium/content/renderer/media/stream/processed_local_audio_source_unittest.cc
index 6cc48ae1c3e..50003e741e3 100644
--- a/chromium/content/renderer/media/stream/processed_local_audio_source_unittest.cc
+++ b/chromium/content/renderer/media/stream/processed_local_audio_source_unittest.cc
@@ -6,7 +6,7 @@
#include <string>
#include "base/logging.h"
-#include "base/message_loop/message_loop.h"
+#include "base/test/scoped_task_environment.h"
#include "build/build_config.h"
#include "content/public/renderer/media_stream_audio_sink.h"
#include "content/renderer/media/audio/mock_audio_device_factory.h"
@@ -144,7 +144,8 @@ class ProcessedLocalAudioSourceTest : public testing::Test {
const blink::WebString& result_name) {}
private:
- base::MessageLoop main_thread_message_loop_; // Needed for MSAudioProcessor.
+ base::test::ScopedTaskEnvironment
+ task_environment_; // Needed for MSAudioProcessor.
MockAudioDeviceFactory mock_audio_device_factory_;
MockPeerConnectionDependencyFactory mock_dependency_factory_;
blink::WebMediaStreamSource blink_audio_source_;
diff --git a/chromium/content/renderer/media/stream/user_media_client_impl.cc b/chromium/content/renderer/media/stream/user_media_client_impl.cc
index 56cc10384ac..738a84c66ed 100644
--- a/chromium/content/renderer/media/stream/user_media_client_impl.cc
+++ b/chromium/content/renderer/media/stream/user_media_client_impl.cc
@@ -194,6 +194,10 @@ void UserMediaClientImpl::StopTrack(
MaybeProcessNextRequestInfo();
}
+bool UserMediaClientImpl::IsCapturing() {
+ return user_media_processor_->HasActiveSources();
+}
+
void UserMediaClientImpl::MaybeProcessNextRequestInfo() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (is_processing_request_ || pending_request_infos_.empty())
diff --git a/chromium/content/renderer/media/stream/user_media_client_impl.h b/chromium/content/renderer/media/stream/user_media_client_impl.h
index b87767c1b3b..1882252ddb5 100644
--- a/chromium/content/renderer/media/stream/user_media_client_impl.h
+++ b/chromium/content/renderer/media/stream/user_media_client_impl.h
@@ -59,6 +59,7 @@ class CONTENT_EXPORT UserMediaClientImpl : public RenderFrameObserver,
void ApplyConstraints(
const blink::WebApplyConstraintsRequest& web_request) override;
void StopTrack(const blink::WebMediaStreamTrack& web_track) override;
+ bool IsCapturing() override;
// RenderFrameObserver override
void WillCommitProvisionalLoad() override;
diff --git a/chromium/content/renderer/media/stream/user_media_client_impl_unittest.cc b/chromium/content/renderer/media/stream/user_media_client_impl_unittest.cc
index 703ea988283..f2f19559296 100644
--- a/chromium/content/renderer/media/stream/user_media_client_impl_unittest.cc
+++ b/chromium/content/renderer/media/stream/user_media_client_impl_unittest.cc
@@ -884,8 +884,6 @@ TEST_F(UserMediaClientImplTest, DefaultConstraintsPropagate) {
MediaStreamVideoSource::kDefaultFrameRate);
EXPECT_EQ(video_capture_settings.ResolutionChangePolicy(),
media::ResolutionChangePolicy::FIXED_RESOLUTION);
- EXPECT_EQ(video_capture_settings.PowerLineFrequency(),
- media::PowerLineFrequency::FREQUENCY_DEFAULT);
EXPECT_FALSE(video_capture_settings.noise_reduction());
EXPECT_FALSE(video_capture_settings.min_frame_rate().has_value());
@@ -945,8 +943,6 @@ TEST_F(UserMediaClientImplTest, DefaultTabCapturePropagate) {
EXPECT_EQ(video_capture_settings.FrameRate(), kDefaultScreenCastFrameRate);
EXPECT_EQ(video_capture_settings.ResolutionChangePolicy(),
media::ResolutionChangePolicy::FIXED_RESOLUTION);
- EXPECT_EQ(video_capture_settings.PowerLineFrequency(),
- media::PowerLineFrequency::FREQUENCY_DEFAULT);
EXPECT_FALSE(video_capture_settings.noise_reduction());
EXPECT_FALSE(video_capture_settings.min_frame_rate().has_value());
EXPECT_FALSE(video_capture_settings.max_frame_rate().has_value());
@@ -1006,8 +1002,6 @@ TEST_F(UserMediaClientImplTest, DefaultDesktopCapturePropagate) {
EXPECT_EQ(video_capture_settings.FrameRate(), kDefaultScreenCastFrameRate);
EXPECT_EQ(video_capture_settings.ResolutionChangePolicy(),
media::ResolutionChangePolicy::ANY_WITHIN_LIMIT);
- EXPECT_EQ(video_capture_settings.PowerLineFrequency(),
- media::PowerLineFrequency::FREQUENCY_DEFAULT);
EXPECT_FALSE(video_capture_settings.noise_reduction());
EXPECT_FALSE(video_capture_settings.min_frame_rate().has_value());
EXPECT_FALSE(video_capture_settings.max_frame_rate().has_value());
@@ -1339,4 +1333,19 @@ TEST_F(UserMediaClientImplTest,
EXPECT_FALSE(source->device().matched_output_device_id);
}
+TEST_F(UserMediaClientImplTest, IsCapturing) {
+ EXPECT_FALSE(user_media_client_impl_->IsCapturing());
+ EXPECT_CALL(mock_dispatcher_host_, OnStreamStarted(_));
+ blink::WebMediaStream stream = RequestLocalMediaStream();
+ EXPECT_TRUE(user_media_client_impl_->IsCapturing());
+
+ user_media_client_impl_->StopTrack(stream.AudioTracks()[0]);
+ base::RunLoop().RunUntilIdle();
+ EXPECT_TRUE(user_media_client_impl_->IsCapturing());
+
+ user_media_client_impl_->StopTrack(stream.VideoTracks()[0]);
+ base::RunLoop().RunUntilIdle();
+ EXPECT_FALSE(user_media_client_impl_->IsCapturing());
+}
+
} // namespace content
diff --git a/chromium/content/renderer/media/stream/user_media_processor.cc b/chromium/content/renderer/media/stream/user_media_processor.cc
index 150e73cdbbb..86c723b009e 100644
--- a/chromium/content/renderer/media/stream/user_media_processor.cc
+++ b/chromium/content/renderer/media/stream/user_media_processor.cc
@@ -609,10 +609,6 @@ void UserMediaProcessor::SelectVideoDeviceSettings(
VideoDeviceCaptureCapabilities capabilities;
capabilities.device_capabilities = std::move(video_input_capabilities);
- capabilities.power_line_capabilities = {
- media::PowerLineFrequency::FREQUENCY_DEFAULT,
- media::PowerLineFrequency::FREQUENCY_50HZ,
- media::PowerLineFrequency::FREQUENCY_60HZ};
capabilities.noise_reduction_capabilities = {base::Optional<bool>(),
base::Optional<bool>(true),
base::Optional<bool>(false)};
@@ -1256,7 +1252,7 @@ bool UserMediaProcessor::RemoveLocalSource(
const blink::WebMediaStreamSource& source) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- for (LocalStreamSources::iterator device_it = local_sources_.begin();
+ for (auto device_it = local_sources_.begin();
device_it != local_sources_.end(); ++device_it) {
if (IsSameSource(*device_it, source)) {
local_sources_.erase(device_it);
@@ -1265,7 +1261,7 @@ bool UserMediaProcessor::RemoveLocalSource(
}
// Check if the source was pending.
- for (LocalStreamSources::iterator device_it = pending_local_sources_.begin();
+ for (auto device_it = pending_local_sources_.begin();
device_it != pending_local_sources_.end(); ++device_it) {
if (IsSameSource(*device_it, source)) {
MediaStreamSource* const source_extra_data =
@@ -1376,6 +1372,10 @@ void UserMediaProcessor::StopLocalSource(
source_impl->StopSource();
}
+bool UserMediaProcessor::HasActiveSources() const {
+ return !local_sources_.empty();
+}
+
const mojom::MediaStreamDispatcherHostPtr&
UserMediaProcessor::GetMediaStreamDispatcherHost() {
if (!dispatcher_host_) {
diff --git a/chromium/content/renderer/media/stream/user_media_processor.h b/chromium/content/renderer/media/stream/user_media_processor.h
index 6b9e18f85e0..f323c3170ab 100644
--- a/chromium/content/renderer/media/stream/user_media_processor.h
+++ b/chromium/content/renderer/media/stream/user_media_processor.h
@@ -105,6 +105,8 @@ class CONTENT_EXPORT UserMediaProcessor
return media_stream_device_observer_.get();
}
+ bool HasActiveSources() const;
+
// MediaStreamDispatcherEventHandler implementation.
void OnDeviceStopped(const MediaStreamDevice& device) override;
diff --git a/chromium/content/renderer/media/stream/video_track_adapter.cc b/chromium/content/renderer/media/stream/video_track_adapter.cc
index 28727ef7b1c..cbe15d6b527 100644
--- a/chromium/content/renderer/media/stream/video_track_adapter.cc
+++ b/chromium/content/renderer/media/stream/video_track_adapter.cc
@@ -179,7 +179,7 @@ void VideoTrackAdapter::VideoFrameResolutionAdapter::AddCallback(
void VideoTrackAdapter::VideoFrameResolutionAdapter::RemoveAndReleaseCallback(
const MediaStreamVideoTrack* track) {
DCHECK(io_thread_checker_.CalledOnValidThread());
- std::vector<VideoIdCallbackPair>::iterator it = callbacks_.begin();
+ auto it = callbacks_.begin();
for (; it != callbacks_.end(); ++it) {
if (it->first == track) {
// Make sure the VideoCaptureDeliverFrameCB is released on the main
diff --git a/chromium/content/renderer/media/stream/webmediaplayer_ms.cc b/chromium/content/renderer/media/stream/webmediaplayer_ms.cc
index 45c8d7ee9d2..5031219c66a 100644
--- a/chromium/content/renderer/media/stream/webmediaplayer_ms.cc
+++ b/chromium/content/renderer/media/stream/webmediaplayer_ms.cc
@@ -174,13 +174,17 @@ class WebMediaPlayerMS::FrameDeliverer {
void EnqueueFrame(const scoped_refptr<media::VideoFrame>& frame) {
DCHECK(io_thread_checker_.CalledOnValidThread());
- base::TimeTicks render_time;
- if (frame->metadata()->GetTimeTicks(
- media::VideoFrameMetadata::REFERENCE_TIME, &render_time)) {
- TRACE_EVENT1("media", "EnqueueFrame", "Ideal Render Instant",
- render_time.ToInternalValue());
- } else {
- TRACE_EVENT0("media", "EnqueueFrame");
+ bool tracing_enabled = false;
+ TRACE_EVENT_CATEGORY_GROUP_ENABLED("media", &tracing_enabled);
+ if (tracing_enabled) {
+ base::TimeTicks render_time;
+ if (frame->metadata()->GetTimeTicks(
+ media::VideoFrameMetadata::REFERENCE_TIME, &render_time)) {
+ TRACE_EVENT1("media", "EnqueueFrame", "Ideal Render Instant",
+ render_time.ToInternalValue());
+ } else {
+ TRACE_EVENT0("media", "EnqueueFrame");
+ }
}
const bool is_opaque = media::IsOpaque(frame->format());
@@ -265,7 +269,8 @@ WebMediaPlayerMS::WebMediaPlayerMS(
media::GpuVideoAcceleratorFactories* gpu_factories,
const blink::WebString& sink_id,
CreateSurfaceLayerBridgeCB create_bridge_callback,
- bool surface_layer_for_video_enabled)
+ std::unique_ptr<blink::WebVideoFrameSubmitter> submitter,
+ blink::WebMediaPlayer::SurfaceLayerMode surface_layer_mode)
: frame_(frame),
network_state_(WebMediaPlayer::kNetworkStateEmpty),
ready_state_(WebMediaPlayer::kReadyStateHaveNothing),
@@ -287,7 +292,8 @@ WebMediaPlayerMS::WebMediaPlayerMS(
volume_multiplier_(1.0),
should_play_upon_shown_(false),
create_bridge_callback_(std::move(create_bridge_callback)),
- surface_layer_for_video_enabled_(surface_layer_for_video_enabled) {
+ submitter_(std::move(submitter)),
+ surface_layer_mode_(surface_layer_mode) {
DVLOG(1) << __func__;
DCHECK(client);
DCHECK(delegate_);
@@ -307,7 +313,8 @@ WebMediaPlayerMS::~WebMediaPlayerMS() {
// Destruct compositor resources in the proper order.
get_client()->SetCcLayer(nullptr);
if (video_layer_) {
- DCHECK(!surface_layer_for_video_enabled_);
+ DCHECK(surface_layer_mode_ !=
+ blink::WebMediaPlayer::SurfaceLayerMode::kAlways);
video_layer_->StopUsingProvider();
}
@@ -345,7 +352,8 @@ blink::WebMediaPlayer::LoadTiming WebMediaPlayerMS::Load(
web_stream_.AddObserver(this);
compositor_ = new WebMediaPlayerMSCompositor(
- compositor_task_runner_, io_task_runner_, web_stream_, AsWeakPtr());
+ compositor_task_runner_, io_task_runner_, web_stream_,
+ std::move(submitter_), surface_layer_mode_, AsWeakPtr());
SetNetworkState(WebMediaPlayer::kNetworkStateLoading);
SetReadyState(WebMediaPlayer::kReadyStateHaveNothing);
@@ -371,8 +379,7 @@ blink::WebMediaPlayer::LoadTiming WebMediaPlayerMS::Load(
if (frame) {
// Report UMA and RAPPOR metrics.
- media::ReportMetrics(load_type, url, frame_->GetSecurityOrigin(),
- media_log_.get());
+ media::ReportMetrics(load_type, url, *frame_, media_log_.get());
routing_id = frame->GetRoutingID();
}
@@ -442,8 +449,15 @@ void WebMediaPlayerMS::UnregisterContentsLayer(cc::Layer* layer) {
}
void WebMediaPlayerMS::OnSurfaceIdUpdated(viz::SurfaceId surface_id) {
- // TODO(apacible): Add implementation. See http://crbug/746182.
- NOTIMPLEMENTED();
+ // TODO(726619): Handle the behavior when Picture-in-Picture mode is
+ // disabled.
+ // The viz::SurfaceId may be updated when the video begins playback or when
+ // the size of the video changes.
+ if (client_ && IsInPictureInPicture() && !client_->IsInAutoPIP()) {
+ delegate_->DidPictureInPictureSurfaceChange(
+ delegate_id_, surface_id, NaturalSize(),
+ false /* show_play_pause_button */);
+ }
}
void WebMediaPlayerMS::TrackAdded(const blink::WebMediaStreamTrack& track) {
@@ -655,36 +669,42 @@ void WebMediaPlayerMS::SetVolume(double volume) {
void WebMediaPlayerMS::EnterPictureInPicture(
blink::WebMediaPlayer::PipWindowOpenedCallback callback) {
- // TODO(crbug.com/806249): Use Picture-in-Picture window size.
- std::move(callback).Run(this->NaturalSize());
+ if (!bridge_)
+ ActivateSurfaceLayerForVideo();
- NOTIMPLEMENTED();
- // TODO(apacible): Implement after video in surfaces is supported for
- // WebMediaPlayerMS. See http://crbug/746182.
+ DCHECK(bridge_);
+
+ const viz::SurfaceId& surface_id = bridge_->GetSurfaceId();
+ DCHECK(surface_id.is_valid());
+
+ // Notifies the browser process that the player should now be in
+ // Picture-in-Picture mode.
+ delegate_->DidPictureInPictureModeStart(delegate_id_, surface_id,
+ NaturalSize(), std::move(callback),
+ false /* show_play_pause_button */);
}
void WebMediaPlayerMS::ExitPictureInPicture(
blink::WebMediaPlayer::PipWindowClosedCallback callback) {
- // TODO(crbug.com/806249): Run callback when Picture-in-Picture window closes.
- std::move(callback).Run();
+ // Notifies the browser process that Picture-in-Picture has ended. It will
+ // clear out the states and close the window.
+ delegate_->DidPictureInPictureModeEnd(delegate_id_, std::move(callback));
- NOTIMPLEMENTED();
- // TODO(apacible): Implement after video in surfaces is supported for
- // WebMediaPlayerMS. See http://crbug/746182.
+ // Internal cleanups.
+ OnPictureInPictureModeEnded();
}
void WebMediaPlayerMS::SetPictureInPictureCustomControls(
const std::vector<blink::PictureInPictureControlInfo>& controls) {
- NOTIMPLEMENTED();
- // TODO(apacible): Implement after video in surfaces is supported for
- // WebMediaPlayerMS. See http://crbug/746182.
+ delegate_->DidSetPictureInPictureCustomControls(delegate_id_, controls);
}
void WebMediaPlayerMS::RegisterPictureInPictureWindowResizeCallback(
- blink::WebMediaPlayer::PipWindowResizedCallback) {
- NOTIMPLEMENTED();
- // TODO(apacible): Implement after video in surfaces is supported for
- // WebMediaPlayerMS. See http://crbug/746182.
+ blink::WebMediaPlayer::PipWindowResizedCallback callback) {
+ DCHECK(IsInPictureInPicture() && !client_->IsInAutoPIP());
+
+ delegate_->RegisterPictureInPictureWindowResizeCallback(delegate_id_,
+ std::move(callback));
}
void WebMediaPlayerMS::SetSinkId(
@@ -780,6 +800,11 @@ blink::WebMediaPlayer::ReadyState WebMediaPlayerMS::GetReadyState() const {
return ready_state_;
}
+blink::WebMediaPlayer::SurfaceLayerMode
+WebMediaPlayerMS::GetVideoSurfaceLayerMode() const {
+ return surface_layer_mode_;
+}
+
blink::WebString WebMediaPlayerMS::GetErrorMessage() const {
return blink::WebString::FromUTF8(media_log_->GetErrorMessage());
}
@@ -811,6 +836,7 @@ void WebMediaPlayerMS::Paint(cc::PaintCanvas* canvas,
compositor_->GetCurrentFrameWithoutUpdatingStatistics();
media::Context3D context_3d;
+ gpu::ContextSupport* context_support = nullptr;
if (frame && frame->HasTextures()) {
auto* provider =
RenderThreadImpl::current()->SharedMainThreadContextProvider().get();
@@ -818,28 +844,18 @@ void WebMediaPlayerMS::Paint(cc::PaintCanvas* canvas,
if (!provider)
return;
context_3d = media::Context3D(provider->ContextGL(), provider->GrContext());
- DCHECK(context_3d.gl);
+ context_support = provider->ContextSupport();
}
const gfx::RectF dest_rect(rect.x, rect.y, rect.width, rect.height);
video_renderer_.Paint(frame, canvas, dest_rect, flags, video_rotation_,
- context_3d);
+ context_3d, context_support);
}
-bool WebMediaPlayerMS::DidGetOpaqueResponseFromServiceWorker() const {
+bool WebMediaPlayerMS::WouldTaintOrigin() const {
DCHECK(thread_checker_.CalledOnValidThread());
return false;
}
-bool WebMediaPlayerMS::HasSingleSecurityOrigin() const {
- DCHECK(thread_checker_.CalledOnValidThread());
- return true;
-}
-
-bool WebMediaPlayerMS::DidPassCORSAccessCheck() const {
- DCHECK(thread_checker_.CalledOnValidThread());
- return true;
-}
-
double WebMediaPlayerMS::MediaTimeForTimeValue(double timeValue) const {
return base::TimeDelta::FromSecondsD(timeValue).InSecondsF();
}
@@ -954,7 +970,12 @@ void WebMediaPlayerMS::OnBecamePersistentVideo(bool value) {
}
void WebMediaPlayerMS::OnPictureInPictureModeEnded() {
- NOTIMPLEMENTED();
+ // It is possible for this method to be called when the player is no longer in
+ // Picture-in-Picture mode.
+ if (!client_ || !IsInPictureInPicture())
+ return;
+
+ client_->PictureInPictureStopped();
}
void WebMediaPlayerMS::OnPictureInPictureControlClicked(
@@ -993,8 +1014,8 @@ bool WebMediaPlayerMS::CopyVideoTextureToPlatformTexture(
DCHECK(context_3d.gl);
return video_renderer_.CopyVideoFrameTexturesToGLTexture(
- context_3d, gl, video_frame.get(), target, texture, internal_format,
- format, type, level, premultiply_alpha, flip_y);
+ context_3d, provider->ContextSupport(), gl, video_frame.get(), target,
+ texture, internal_format, format, type, level, premultiply_alpha, flip_y);
}
bool WebMediaPlayerMS::CopyVideoYUVDataToPlatformTexture(
@@ -1075,17 +1096,59 @@ bool WebMediaPlayerMS::TexImageImpl(TexImageFunctionID functionID,
return false;
}
+void WebMediaPlayerMS::OnFrameSinkDestroyed() {
+ bridge_->ClearSurfaceId();
+}
+
+void WebMediaPlayerMS::ActivateSurfaceLayerForVideo() {
+ // Note that we might or might not already be in VideoLayer mode.
+ DCHECK(!bridge_);
+
+ // If we're in VideoLayer mode, then get rid of the layer.
+ if (video_layer_) {
+ client_->SetCcLayer(nullptr);
+ video_layer_ = nullptr;
+ }
+
+ bridge_ = std::move(create_bridge_callback_)
+ .Run(this, compositor_->GetUpdateSubmissionStateCallback());
+ bridge_->CreateSurfaceLayer();
+ bridge_->SetContentsOpaque(opaque_);
+
+ compositor_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(
+ &WebMediaPlayerMSCompositor::EnableSubmission, compositor_,
+ bridge_->GetSurfaceId(), video_rotation_, IsInPictureInPicture(),
+ opaque_,
+ media::BindToCurrentLoop(base::BindRepeating(
+ &WebMediaPlayerMS::OnFrameSinkDestroyed, AsWeakPtr()))));
+
+ // If the element is already in Picture-in-Picture mode, it means that it
+ // was set in this mode prior to this load, with a different
+ // WebMediaPlayerImpl. The new player needs to send its id, size and
+ // surface id to the browser process to make sure the states are properly
+ // updated.
+ // TODO(872056): the surface should be activated but for some reason, it
+ // does not. It is possible that this will no longer be needed after 872056
+ // is fixed.
+ if (client_->DisplayType() ==
+ WebMediaPlayer::DisplayType::kPictureInPicture) {
+ OnSurfaceIdUpdated(bridge_->GetSurfaceId());
+ }
+}
+
void WebMediaPlayerMS::OnFirstFrameReceived(media::VideoRotation video_rotation,
bool is_opaque) {
DVLOG(1) << __func__;
DCHECK(thread_checker_.CalledOnValidThread());
- if (surface_layer_for_video_enabled_) {
- DCHECK(!bridge_);
-
- bridge_ = std::move(create_bridge_callback_)
- .Run(this, compositor_->GetUpdateSubmissionStateCallback());
- bridge_->CreateSurfaceLayer();
+ if (surface_layer_mode_ == blink::WebMediaPlayer::SurfaceLayerMode::kAlways ||
+ (surface_layer_mode_ ==
+ blink::WebMediaPlayer::SurfaceLayerMode::kOnDemand &&
+ client_->DisplayType() ==
+ WebMediaPlayer::DisplayType::kPictureInPicture)) {
+ ActivateSurfaceLayerForVideo();
}
SetReadyState(WebMediaPlayer::kReadyStateHaveMetadata);
@@ -1108,6 +1171,9 @@ void WebMediaPlayerMS::OnOpacityChanged(bool is_opaque) {
DCHECK(bridge_);
bridge_->SetContentsOpaque(opaque_);
+ compositor_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&WebMediaPlayerMSCompositor::UpdateIsOpaque,
+ compositor_, opaque_));
}
}
@@ -1128,12 +1194,20 @@ void WebMediaPlayerMS::OnRotationChanged(media::VideoRotation video_rotation,
get_client()->SetCcLayer(new_video_layer.get());
video_layer_ = std::move(new_video_layer);
- } else if (bridge_->GetCcLayer()) {
- // TODO(lethalantidote): Handle rotation.
- bridge_->SetContentsOpaque(opaque_);
+ } else {
+ compositor_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&WebMediaPlayerMSCompositor::UpdateRotation,
+ compositor_, video_rotation));
}
}
+bool WebMediaPlayerMS::IsInPictureInPicture() const {
+ DCHECK(client_);
+ return (!client_->IsInAutoPIP() &&
+ client_->DisplayType() ==
+ WebMediaPlayer::DisplayType::kPictureInPicture);
+}
+
void WebMediaPlayerMS::RepaintInternal() {
DVLOG(1) << __func__;
DCHECK(thread_checker_.CalledOnValidThread());
@@ -1183,4 +1257,17 @@ void WebMediaPlayerMS::SetGpuMemoryBufferVideoForTesting(
frame_deliverer_->gpu_memory_buffer_pool_.reset(gpu_memory_buffer_pool);
}
+void WebMediaPlayerMS::OnDisplayTypeChanged(
+ WebMediaPlayer::DisplayType display_type) {
+ if (!bridge_)
+ return;
+
+ compositor_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(
+ &WebMediaPlayerMSCompositor::SetForceSubmit,
+ base::Unretained(compositor_.get()),
+ display_type == WebMediaPlayer::DisplayType::kPictureInPicture));
+}
+
} // namespace content
diff --git a/chromium/content/renderer/media/stream/webmediaplayer_ms.h b/chromium/content/renderer/media/stream/webmediaplayer_ms.h
index 8408a5d40b5..8f78f0b9ab2 100644
--- a/chromium/content/renderer/media/stream/webmediaplayer_ms.h
+++ b/chromium/content/renderer/media/stream/webmediaplayer_ms.h
@@ -16,6 +16,7 @@
#include "build/build_config.h"
#include "content/common/content_export.h"
#include "media/blink/webmediaplayer_delegate.h"
+#include "media/blink/webmediaplayer_params.h"
#include "media/blink/webmediaplayer_util.h"
#include "media/renderers/paint_canvas_video_renderer.h"
#include "media/video/gpu_video_accelerator_factories.h"
@@ -27,6 +28,7 @@ namespace blink {
class WebLocalFrame;
class WebMediaPlayerClient;
class WebString;
+class WebVideoFrameSubmitter;
}
namespace media {
@@ -91,7 +93,8 @@ class CONTENT_EXPORT WebMediaPlayerMS
media::GpuVideoAcceleratorFactories* gpu_factories,
const blink::WebString& sink_id,
CreateSurfaceLayerBridgeCB create_bridge_callback,
- bool surface_layer_for_video_enabled_);
+ std::unique_ptr<blink::WebVideoFrameSubmitter> submitter_,
+ blink::WebMediaPlayer::SurfaceLayerMode surface_layer_mode);
~WebMediaPlayerMS() override;
@@ -157,12 +160,13 @@ class CONTENT_EXPORT WebMediaPlayerMS
blink::WebMediaPlayer::NetworkState GetNetworkState() const override;
blink::WebMediaPlayer::ReadyState GetReadyState() const override;
+ blink::WebMediaPlayer::SurfaceLayerMode GetVideoSurfaceLayerMode()
+ const override;
+
blink::WebString GetErrorMessage() const override;
bool DidLoadingProgress() override;
- bool DidGetOpaqueResponseFromServiceWorker() const override;
- bool HasSingleSecurityOrigin() const override;
- bool DidPassCORSAccessCheck() const override;
+ bool WouldTaintOrigin() const override;
double MediaTimeForTimeValue(double timeValue) const override;
@@ -230,6 +234,8 @@ class CONTENT_EXPORT WebMediaPlayerMS
void TrackRemoved(const blink::WebMediaStreamTrack& track) override;
void ActiveStateChanged(bool is_active) override;
+ void OnDisplayTypeChanged(WebMediaPlayer::DisplayType) override;
+
private:
friend class WebMediaPlayerMSTest;
@@ -237,11 +243,21 @@ class CONTENT_EXPORT WebMediaPlayerMS
static const gfx::Size kUseGpuMemoryBufferVideoFramesMinResolution;
#endif // defined(OS_WIN)
+ // When we lose the context_provider, we destroy the CompositorFrameSink to
+ // prevent frames from being submitted. The current surface_ids become
+ // invalid.
+ void OnFrameSinkDestroyed();
+
void OnFirstFrameReceived(media::VideoRotation video_rotation,
bool is_opaque);
void OnOpacityChanged(bool is_opaque);
void OnRotationChanged(media::VideoRotation video_rotation, bool is_opaque);
+ bool IsInPictureInPicture() const;
+
+ // Switch to SurfaceLayer, either initially or from VideoLayer.
+ void ActivateSurfaceLayerForVideo();
+
// Need repaint due to state change.
void RepaintInternal();
@@ -309,6 +325,7 @@ class CONTENT_EXPORT WebMediaPlayerMS
const scoped_refptr<base::SingleThreadTaskRunner> io_task_runner_;
const scoped_refptr<base::SingleThreadTaskRunner> compositor_task_runner_;
const scoped_refptr<base::SingleThreadTaskRunner> media_task_runner_;
+
const scoped_refptr<base::TaskRunner> worker_task_runner_;
media::GpuVideoAcceleratorFactories* gpu_factories_;
@@ -336,8 +353,11 @@ class CONTENT_EXPORT WebMediaPlayerMS
CreateSurfaceLayerBridgeCB create_bridge_callback_;
+ std::unique_ptr<blink::WebVideoFrameSubmitter> submitter_;
+
// Whether the use of a surface layer instead of a video layer is enabled.
- bool surface_layer_for_video_enabled_ = false;
+ blink::WebMediaPlayer::SurfaceLayerMode surface_layer_mode_ =
+ blink::WebMediaPlayer::SurfaceLayerMode::kNever;
// Owns the weblayer and obtains/maintains SurfaceIds for
// kUseSurfaceLayerForVideo feature.
diff --git a/chromium/content/renderer/media/stream/webmediaplayer_ms_compositor.cc b/chromium/content/renderer/media/stream/webmediaplayer_ms_compositor.cc
index 02b2d8ddb0f..02fc7ee8551 100644
--- a/chromium/content/renderer/media/stream/webmediaplayer_ms_compositor.cc
+++ b/chromium/content/renderer/media/stream/webmediaplayer_ms_compositor.cc
@@ -27,6 +27,7 @@
#include "third_party/blink/public/platform/web_media_stream.h"
#include "third_party/blink/public/platform/web_media_stream_source.h"
#include "third_party/blink/public/platform/web_media_stream_track.h"
+#include "third_party/blink/public/platform/web_video_frame_submitter.h"
#include "third_party/libyuv/include/libyuv/convert.h"
#include "third_party/libyuv/include/libyuv/planar_functions.h"
#include "third_party/libyuv/include/libyuv/video_common.h"
@@ -67,7 +68,8 @@ scoped_refptr<media::VideoFrame> CopyFrame(
DCHECK(provider->ContextGL());
video_renderer->Copy(
frame.get(), &paint_canvas,
- media::Context3D(provider->ContextGL(), provider->GrContext()));
+ media::Context3D(provider->ContextGL(), provider->GrContext()),
+ provider->ContextSupport());
SkPixmap pixmap;
const bool result = bitmap.peekPixels(&pixmap);
@@ -128,11 +130,16 @@ scoped_refptr<media::VideoFrame> CopyFrame(
} // anonymous namespace
WebMediaPlayerMSCompositor::WebMediaPlayerMSCompositor(
- scoped_refptr<base::SingleThreadTaskRunner> compositor_task_runner,
+ scoped_refptr<base::SingleThreadTaskRunner>
+ video_frame_compositor_task_runner,
scoped_refptr<base::SingleThreadTaskRunner> io_task_runner,
const blink::WebMediaStream& web_stream,
+ std::unique_ptr<blink::WebVideoFrameSubmitter> submitter,
+ blink::WebMediaPlayer::SurfaceLayerMode surface_layer_mode,
const base::WeakPtr<WebMediaPlayerMS>& player)
- : compositor_task_runner_(compositor_task_runner),
+ : RefCountedDeleteOnSequence<WebMediaPlayerMSCompositor>(
+ video_frame_compositor_task_runner),
+ video_frame_compositor_task_runner_(video_frame_compositor_task_runner),
io_task_runner_(io_task_runner),
player_(player),
video_frame_provider_client_(nullptr),
@@ -141,9 +148,23 @@ WebMediaPlayerMSCompositor::WebMediaPlayerMSCompositor(
total_frame_count_(0),
dropped_frame_count_(0),
stopped_(true),
- render_started_(!stopped_) {
+ render_started_(!stopped_),
+ weak_ptr_factory_(this) {
main_message_loop_ = base::MessageLoopCurrent::Get();
+ if (surface_layer_mode != blink::WebMediaPlayer::SurfaceLayerMode::kNever) {
+ submitter_ = std::move(submitter);
+
+ video_frame_compositor_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&WebMediaPlayerMSCompositor::InitializeSubmitter,
+ weak_ptr_factory_.GetWeakPtr()));
+ update_submission_state_callback_ = media::BindToLoop(
+ video_frame_compositor_task_runner_,
+ base::BindRepeating(&WebMediaPlayerMSCompositor::UpdateSubmissionState,
+ weak_ptr_factory_.GetWeakPtr()));
+ }
+
blink::WebVector<blink::WebMediaStreamTrack> video_tracks;
if (!web_stream.IsNull())
video_tracks = web_stream.VideoTracks();
@@ -168,8 +189,64 @@ WebMediaPlayerMSCompositor::WebMediaPlayerMSCompositor(
}
WebMediaPlayerMSCompositor::~WebMediaPlayerMSCompositor() {
- DCHECK(!video_frame_provider_client_)
- << "Must call StopUsingProvider() before dtor!";
+ if (submitter_) {
+ video_frame_compositor_task_runner_->DeleteSoon(FROM_HERE,
+ std::move(submitter_));
+ } else {
+ DCHECK(!video_frame_provider_client_)
+ << "Must call StopUsingProvider() before dtor!";
+ }
+}
+
+void WebMediaPlayerMSCompositor::InitializeSubmitter() {
+ DCHECK(video_frame_compositor_task_runner_->BelongsToCurrentThread());
+ submitter_->Initialize(this);
+}
+
+void WebMediaPlayerMSCompositor::UpdateSubmissionState(bool state) {
+ DCHECK(video_frame_compositor_task_runner_->BelongsToCurrentThread());
+ submitter_->UpdateSubmissionState(state);
+}
+
+// TODO(https://crbug/879424): Rename, since it really doesn't enable
+// submission. Do this along with the VideoFrameSubmitter refactor.
+void WebMediaPlayerMSCompositor::EnableSubmission(
+ const viz::SurfaceId& id,
+ media::VideoRotation rotation,
+ bool force_submit,
+ bool is_opaque,
+ blink::WebFrameSinkDestroyedCallback frame_sink_destroyed_callback) {
+ DCHECK(video_frame_compositor_task_runner_->BelongsToCurrentThread());
+
+ // If we're switching to |submitter_| from some other client, then tell it.
+ if (video_frame_provider_client_ &&
+ video_frame_provider_client_ != submitter_.get()) {
+ video_frame_provider_client_->StopUsingProvider();
+ }
+
+ submitter_->SetRotation(rotation);
+ submitter_->SetForceSubmit(force_submit);
+ submitter_->SetIsOpaque(is_opaque);
+ submitter_->EnableSubmission(id, std::move(frame_sink_destroyed_callback));
+ video_frame_provider_client_ = submitter_.get();
+
+ if (!stopped_)
+ video_frame_provider_client_->StartRendering();
+}
+
+void WebMediaPlayerMSCompositor::UpdateRotation(media::VideoRotation rotation) {
+ DCHECK(video_frame_compositor_task_runner_->BelongsToCurrentThread());
+ submitter_->SetRotation(rotation);
+}
+
+void WebMediaPlayerMSCompositor::SetForceSubmit(bool force_submit) {
+ DCHECK(video_frame_compositor_task_runner_->BelongsToCurrentThread());
+ submitter_->SetForceSubmit(force_submit);
+}
+
+void WebMediaPlayerMSCompositor::UpdateIsOpaque(bool is_opaque) {
+ DCHECK(video_frame_compositor_task_runner_->BelongsToCurrentThread());
+ submitter_->SetIsOpaque(is_opaque);
}
gfx::Size WebMediaPlayerMSCompositor::GetCurrentSize() {
@@ -200,7 +277,7 @@ size_t WebMediaPlayerMSCompositor::dropped_frame_count() {
void WebMediaPlayerMSCompositor::SetVideoFrameProviderClient(
cc::VideoFrameProvider::Client* client) {
- DCHECK(compositor_task_runner_->BelongsToCurrentThread());
+ DCHECK(video_frame_compositor_task_runner_->BelongsToCurrentThread());
if (video_frame_provider_client_)
video_frame_provider_client_->StopUsingProvider();
@@ -272,7 +349,7 @@ void WebMediaPlayerMSCompositor::EnqueueFrame(
bool WebMediaPlayerMSCompositor::UpdateCurrentFrame(
base::TimeTicks deadline_min,
base::TimeTicks deadline_max) {
- DCHECK(compositor_task_runner_->BelongsToCurrentThread());
+ DCHECK(video_frame_compositor_task_runner_->BelongsToCurrentThread());
TRACE_EVENT_BEGIN2("media", "UpdateCurrentFrame", "Actual Render Begin",
deadline_min.ToInternalValue(), "Actual Render End",
@@ -280,22 +357,27 @@ bool WebMediaPlayerMSCompositor::UpdateCurrentFrame(
if (stopped_)
return false;
- base::TimeTicks render_time;
-
base::AutoLock auto_lock(current_frame_lock_);
if (rendering_frame_buffer_)
RenderUsingAlgorithm(deadline_min, deadline_max);
- if (!current_frame_->metadata()->GetTimeTicks(
- media::VideoFrameMetadata::REFERENCE_TIME, &render_time)) {
- DCHECK(!rendering_frame_buffer_)
- << "VideoFrames need REFERENCE_TIME to use "
- "sophisticated video rendering algorithm.";
+ bool tracing_or_dcheck_enabled = false;
+ TRACE_EVENT_CATEGORY_GROUP_ENABLED("media", &tracing_or_dcheck_enabled);
+#if DCHECK_IS_ON()
+ tracing_or_dcheck_enabled = true;
+#endif // DCHECK_IS_ON()
+ if (tracing_or_dcheck_enabled) {
+ base::TimeTicks render_time;
+ if (!current_frame_->metadata()->GetTimeTicks(
+ media::VideoFrameMetadata::REFERENCE_TIME, &render_time)) {
+ DCHECK(!rendering_frame_buffer_)
+ << "VideoFrames need REFERENCE_TIME to use "
+ "sophisticated video rendering algorithm.";
+ }
+ TRACE_EVENT_END2("media", "UpdateCurrentFrame", "Ideal Render Instant",
+ render_time.ToInternalValue(), "Serial", serial_);
}
-
- TRACE_EVENT_END2("media", "UpdateCurrentFrame", "Ideal Render Instant",
- render_time.ToInternalValue(), "Serial", serial_);
return !current_frame_rendered_;
}
@@ -306,7 +388,7 @@ bool WebMediaPlayerMSCompositor::HasCurrentFrame() {
scoped_refptr<media::VideoFrame> WebMediaPlayerMSCompositor::GetCurrentFrame() {
DVLOG(3) << __func__;
- DCHECK(compositor_task_runner_->BelongsToCurrentThread());
+ DCHECK(video_frame_compositor_task_runner_->BelongsToCurrentThread());
base::AutoLock auto_lock(current_frame_lock_);
TRACE_EVENT_INSTANT1("media", "WebMediaPlayerMSCompositor::GetCurrentFrame",
TRACE_EVENT_SCOPE_THREAD, "Timestamp",
@@ -319,7 +401,7 @@ scoped_refptr<media::VideoFrame> WebMediaPlayerMSCompositor::GetCurrentFrame() {
void WebMediaPlayerMSCompositor::PutCurrentFrame() {
DVLOG(3) << __func__;
- DCHECK(compositor_task_runner_->BelongsToCurrentThread());
+ DCHECK(video_frame_compositor_task_runner_->BelongsToCurrentThread());
current_frame_rendered_ = true;
}
@@ -339,7 +421,7 @@ void WebMediaPlayerMSCompositor::StartRendering() {
base::AutoLock auto_lock(current_frame_lock_);
render_started_ = true;
}
- compositor_task_runner_->PostTask(
+ video_frame_compositor_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(&WebMediaPlayerMSCompositor::StartRenderingInternal,
this));
@@ -347,7 +429,7 @@ void WebMediaPlayerMSCompositor::StartRendering() {
void WebMediaPlayerMSCompositor::StopRendering() {
DCHECK(thread_checker_.CalledOnValidThread());
- compositor_task_runner_->PostTask(
+ video_frame_compositor_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(&WebMediaPlayerMSCompositor::StopRenderingInternal, this));
}
@@ -365,7 +447,7 @@ void WebMediaPlayerMSCompositor::ReplaceCurrentFrameWithACopy() {
void WebMediaPlayerMSCompositor::StopUsingProvider() {
DCHECK(thread_checker_.CalledOnValidThread());
- compositor_task_runner_->PostTask(
+ video_frame_compositor_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(&WebMediaPlayerMSCompositor::StopUsingProviderInternal,
this));
@@ -374,7 +456,7 @@ void WebMediaPlayerMSCompositor::StopUsingProvider() {
bool WebMediaPlayerMSCompositor::MapTimestampsToRenderTimeTicks(
const std::vector<base::TimeDelta>& timestamps,
std::vector<base::TimeTicks>* wall_clock_times) {
- DCHECK(compositor_task_runner_->BelongsToCurrentThread() ||
+ DCHECK(video_frame_compositor_task_runner_->BelongsToCurrentThread() ||
thread_checker_.CalledOnValidThread() ||
io_task_runner_->BelongsToCurrentThread());
for (const base::TimeDelta& timestamp : timestamps) {
@@ -387,7 +469,7 @@ bool WebMediaPlayerMSCompositor::MapTimestampsToRenderTimeTicks(
void WebMediaPlayerMSCompositor::RenderUsingAlgorithm(
base::TimeTicks deadline_min,
base::TimeTicks deadline_max) {
- DCHECK(compositor_task_runner_->BelongsToCurrentThread());
+ DCHECK(video_frame_compositor_task_runner_->BelongsToCurrentThread());
current_frame_lock_.AssertAcquired();
last_deadline_max_ = deadline_max;
last_render_length_ = deadline_max - deadline_min;
@@ -417,7 +499,7 @@ void WebMediaPlayerMSCompositor::RenderUsingAlgorithm(
void WebMediaPlayerMSCompositor::RenderWithoutAlgorithm(
const scoped_refptr<media::VideoFrame>& frame) {
DCHECK(io_task_runner_->BelongsToCurrentThread());
- compositor_task_runner_->PostTask(
+ video_frame_compositor_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(
&WebMediaPlayerMSCompositor::RenderWithoutAlgorithmOnCompositor, this,
@@ -426,7 +508,7 @@ void WebMediaPlayerMSCompositor::RenderWithoutAlgorithm(
void WebMediaPlayerMSCompositor::RenderWithoutAlgorithmOnCompositor(
const scoped_refptr<media::VideoFrame>& frame) {
- DCHECK(compositor_task_runner_->BelongsToCurrentThread());
+ DCHECK(video_frame_compositor_task_runner_->BelongsToCurrentThread());
{
base::AutoLock auto_lock(current_frame_lock_);
SetCurrentFrame(frame);
@@ -437,7 +519,7 @@ void WebMediaPlayerMSCompositor::RenderWithoutAlgorithmOnCompositor(
void WebMediaPlayerMSCompositor::SetCurrentFrame(
const scoped_refptr<media::VideoFrame>& frame) {
- DCHECK(compositor_task_runner_->BelongsToCurrentThread());
+ DCHECK(video_frame_compositor_task_runner_->BelongsToCurrentThread());
current_frame_lock_.AssertAcquired();
TRACE_EVENT_INSTANT1("media", "WebMediaPlayerMSCompositor::SetCurrentFrame",
TRACE_EVENT_SCOPE_THREAD, "Timestamp",
@@ -459,7 +541,7 @@ void WebMediaPlayerMSCompositor::SetCurrentFrame(
}
void WebMediaPlayerMSCompositor::StartRenderingInternal() {
- DCHECK(compositor_task_runner_->BelongsToCurrentThread());
+ DCHECK(video_frame_compositor_task_runner_->BelongsToCurrentThread());
stopped_ = false;
if (video_frame_provider_client_)
@@ -467,7 +549,7 @@ void WebMediaPlayerMSCompositor::StartRenderingInternal() {
}
void WebMediaPlayerMSCompositor::StopRenderingInternal() {
- DCHECK(compositor_task_runner_->BelongsToCurrentThread());
+ DCHECK(video_frame_compositor_task_runner_->BelongsToCurrentThread());
stopped_ = true;
// It is possible that the video gets paused and then resumed. We need to
@@ -485,7 +567,7 @@ void WebMediaPlayerMSCompositor::StopRenderingInternal() {
}
void WebMediaPlayerMSCompositor::StopUsingProviderInternal() {
- DCHECK(compositor_task_runner_->BelongsToCurrentThread());
+ DCHECK(video_frame_compositor_task_runner_->BelongsToCurrentThread());
if (video_frame_provider_client_)
video_frame_provider_client_->StopUsingProvider();
video_frame_provider_client_ = nullptr;
diff --git a/chromium/content/renderer/media/stream/webmediaplayer_ms_compositor.h b/chromium/content/renderer/media/stream/webmediaplayer_ms_compositor.h
index 921b0bb192a..f10a0d1b2c5 100644
--- a/chromium/content/renderer/media/stream/webmediaplayer_ms_compositor.h
+++ b/chromium/content/renderer/media/stream/webmediaplayer_ms_compositor.h
@@ -11,7 +11,7 @@
#include <memory>
#include <vector>
-#include "base/memory/ref_counted.h"
+#include "base/memory/ref_counted_delete_on_sequence.h"
#include "base/memory/weak_ptr.h"
#include "base/message_loop/message_loop.h"
#include "base/synchronization/lock.h"
@@ -20,6 +20,8 @@
#include "cc/layers/video_frame_provider.h"
#include "content/common/content_export.h"
#include "media/base/media_log.h"
+#include "media/blink/webmediaplayer_params.h"
+#include "third_party/blink/public/platform/web_video_frame_submitter.h"
namespace base {
class SingleThreadTaskRunner;
@@ -37,6 +39,10 @@ namespace media {
class VideoRendererAlgorithm;
}
+namespace viz {
+class SurfaceId;
+}
+
namespace content {
class WebMediaPlayerMS;
@@ -51,16 +57,18 @@ class WebMediaPlayerMS;
// frame, and submit it whenever asked by the compositor.
class CONTENT_EXPORT WebMediaPlayerMSCompositor
: public cc::VideoFrameProvider,
- public base::RefCountedThreadSafe<WebMediaPlayerMSCompositor> {
+ public base::RefCountedDeleteOnSequence<WebMediaPlayerMSCompositor> {
public:
// This |url| represents the media stream we are rendering. |url| is used to
// find out what web stream this WebMediaPlayerMSCompositor is playing, and
// together with flag "--disable-rtc-smoothness-algorithm" determine whether
// we enable algorithm or not.
WebMediaPlayerMSCompositor(
- scoped_refptr<base::SingleThreadTaskRunner> compositor_task_runner,
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner,
scoped_refptr<base::SingleThreadTaskRunner> io_task_runner,
const blink::WebMediaStream& web_stream,
+ std::unique_ptr<blink::WebVideoFrameSubmitter> submitter,
+ blink::WebMediaPlayer::SurfaceLayerMode surface_layer_mode,
const base::WeakPtr<WebMediaPlayerMS>& player);
// Can be called from any thread.
@@ -76,6 +84,24 @@ class CONTENT_EXPORT WebMediaPlayerMSCompositor
size_t total_frame_count();
size_t dropped_frame_count();
+ // Signals the VideoFrameSubmitter to prepare to receive BeginFrames and
+ // submit video frames given by WebMediaPlayerMSCompositor.
+ virtual void EnableSubmission(
+ const viz::SurfaceId& id,
+ media::VideoRotation rotation,
+ bool force_submit,
+ bool is_opaque,
+ blink::WebFrameSinkDestroyedCallback frame_sink_destroyed_callback);
+
+ // Updates the rotation information for frames given to |submitter_|.
+ void UpdateRotation(media::VideoRotation rotation);
+
+ // Notifies the |submitter_| that the frames must be submitted.
+ void SetForceSubmit(bool);
+
+ // Updates the opacity information for frames given to |submitter_|.
+ void UpdateIsOpaque(bool);
+
// VideoFrameProvider implementation.
void SetVideoFrameProviderClient(
cc::VideoFrameProvider::Client* client) override;
@@ -101,11 +127,19 @@ class CONTENT_EXPORT WebMediaPlayerMSCompositor
void StopUsingProvider();
private:
- friend class base::RefCountedThreadSafe<WebMediaPlayerMSCompositor>;
+ friend class base::RefCountedDeleteOnSequence<WebMediaPlayerMSCompositor>;
+ friend class base::DeleteHelper<WebMediaPlayerMSCompositor>;
friend class WebMediaPlayerMSTest;
~WebMediaPlayerMSCompositor() override;
+ // Ran on the |video_frame_compositor_task_runner_| to initialize
+ // |submitter_|
+ void InitializeSubmitter();
+
+ // Signals the VideoFrameSubmitter to stop submitting frames.
+ void UpdateSubmissionState(bool);
+
bool MapTimestampsToRenderTimeTicks(
const std::vector<base::TimeDelta>& timestamps,
std::vector<base::TimeTicks>* wall_clock_times);
@@ -138,7 +172,8 @@ class CONTENT_EXPORT WebMediaPlayerMSCompositor
// which is renderer main thread in this class.
base::ThreadChecker thread_checker_;
- const scoped_refptr<base::SingleThreadTaskRunner> compositor_task_runner_;
+ const scoped_refptr<base::SingleThreadTaskRunner>
+ video_frame_compositor_task_runner_;
const scoped_refptr<base::SingleThreadTaskRunner> io_task_runner_;
base::MessageLoop* main_message_loop_;
@@ -186,6 +221,8 @@ class CONTENT_EXPORT WebMediaPlayerMSCompositor
bool stopped_;
bool render_started_;
+ std::unique_ptr<blink::WebVideoFrameSubmitter> submitter_;
+
std::map<base::TimeDelta, base::TimeTicks> timestamps_to_clock_times_;
cc::UpdateSubmissionStateCB update_submission_state_callback_;
@@ -194,6 +231,8 @@ class CONTENT_EXPORT WebMediaPlayerMSCompositor
// |dropped_frame_count_|, and |render_started_|.
base::Lock current_frame_lock_;
+ base::WeakPtrFactory<WebMediaPlayerMSCompositor> weak_ptr_factory_;
+
DISALLOW_COPY_AND_ASSIGN(WebMediaPlayerMSCompositor);
};
} // namespace content
diff --git a/chromium/content/renderer/media/stream/webmediaplayer_ms_unittest.cc b/chromium/content/renderer/media/stream/webmediaplayer_ms_unittest.cc
index 065fc62abdf..e84c2bb4a8e 100644
--- a/chromium/content/renderer/media/stream/webmediaplayer_ms_unittest.cc
+++ b/chromium/content/renderer/media/stream/webmediaplayer_ms_unittest.cc
@@ -8,9 +8,9 @@
#include <vector>
#include "base/containers/circular_deque.h"
-#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
#include "base/single_thread_task_runner.h"
+#include "base/test/scoped_task_environment.h"
#include "build/build_config.h"
#include "cc/layers/layer.h"
#include "content/public/renderer/media_stream_renderer_factory.h"
@@ -21,12 +21,18 @@
#include "media/base/video_frame.h"
#include "media/video/mock_gpu_memory_buffer_video_frame_pool.h"
#include "media/video/mock_gpu_video_accelerator_factories.h"
+#include "third_party/blink/public/common/picture_in_picture/picture_in_picture_control_info.h"
#include "third_party/blink/public/platform/web_fullscreen_video_status.h"
#include "third_party/blink/public/platform/web_media_player.h"
#include "third_party/blink/public/platform/web_media_player_client.h"
#include "third_party/blink/public/platform/web_media_player_source.h"
+using ::testing::_;
+using ::testing::ByRef;
+using ::testing::Eq;
+using ::testing::NiceMock;
using ::testing::Return;
+using ::testing::ReturnRef;
using ::testing::StrictMock;
namespace content {
@@ -40,12 +46,22 @@ enum class FrameType {
class MockSurfaceLayerBridge : public blink::WebSurfaceLayerBridge {
public:
+ MockSurfaceLayerBridge() {
+ ON_CALL(*this, GetSurfaceId).WillByDefault(ReturnRef(surface_id_));
+ }
+
MOCK_CONST_METHOD0(GetCcLayer, cc::Layer*());
MOCK_CONST_METHOD0(GetFrameSinkId, const viz::FrameSinkId&());
MOCK_CONST_METHOD0(GetSurfaceId, const viz::SurfaceId&());
MOCK_METHOD1(SetContentsOpaque, void(bool));
MOCK_METHOD0(CreateSurfaceLayer, void());
MOCK_METHOD0(ClearSurfaceId, void());
+
+ viz::FrameSinkId frame_sink_id_ = viz::FrameSinkId(1, 1);
+ viz::LocalSurfaceId local_surface_id_ =
+ viz::LocalSurfaceId(11, base::UnguessableToken::Deserialize(0x111111, 0));
+ viz::SurfaceId surface_id_ =
+ viz::SurfaceId(frame_sink_id_, local_surface_id_);
};
using TestFrame = std::pair<FrameType, scoped_refptr<media::VideoFrame>>;
@@ -89,37 +105,21 @@ class FakeWebMediaPlayerDelegate
EXPECT_EQ(delegate_id_, delegate_id);
}
- void DidPictureInPictureModeStart(
- int delegate_id,
- const viz::SurfaceId&,
- const gfx::Size&,
- blink::WebMediaPlayer::PipWindowOpenedCallback) override {
- EXPECT_EQ(delegate_id_, delegate_id);
- }
-
- void DidPictureInPictureModeEnd(
- int delegate_id,
- blink::WebMediaPlayer::PipWindowClosedCallback) override {
- EXPECT_EQ(delegate_id_, delegate_id);
- }
-
- void DidSetPictureInPictureCustomControls(
- int delegate_id,
- const std::vector<blink::PictureInPictureControlInfo>&) override {
- EXPECT_EQ(delegate_id_, delegate_id);
- }
-
- void DidPictureInPictureSurfaceChange(int delegate_id,
- const viz::SurfaceId&,
- const gfx::Size&) override {
- EXPECT_EQ(delegate_id_, delegate_id);
- }
-
- void RegisterPictureInPictureWindowResizeCallback(
- int delegate_id,
- blink::WebMediaPlayer::PipWindowResizedCallback) override {
- EXPECT_EQ(delegate_id_, delegate_id);
- }
+ MOCK_METHOD5(DidPictureInPictureModeStart,
+ void(int,
+ const viz::SurfaceId&,
+ const gfx::Size&,
+ blink::WebMediaPlayer::PipWindowOpenedCallback,
+ bool));
+ MOCK_METHOD2(DidPictureInPictureModeEnd,
+ void(int, blink::WebMediaPlayer::PipWindowClosedCallback));
+ MOCK_METHOD2(DidSetPictureInPictureCustomControls,
+ void(int,
+ const std::vector<blink::PictureInPictureControlInfo>&));
+ MOCK_METHOD4(DidPictureInPictureSurfaceChange,
+ void(int, const viz::SurfaceId&, const gfx::Size&, bool));
+ MOCK_METHOD2(RegisterPictureInPictureWindowResizeCallback,
+ void(int, blink::WebMediaPlayer::PipWindowResizedCallback));
void DidPause(int delegate_id) override {
EXPECT_EQ(delegate_id_, delegate_id);
@@ -169,6 +169,8 @@ class FakeWebMediaPlayerDelegate
void set_hidden(bool is_hidden) { is_hidden_ = is_hidden; }
+ int delegate_id() { return delegate_id_; }
+
private:
int delegate_id_ = 1234;
Observer* observer_ = nullptr;
@@ -414,6 +416,23 @@ void MockMediaStreamVideoRenderer::InjectFrame() {
message_loop_controller_->GetClosure().Run();
}
+class MockWebVideoFrameSubmitter : public blink::WebVideoFrameSubmitter {
+ public:
+ // blink::WebVideoFrameSubmitter implementation.
+ MOCK_METHOD0(StopUsingProvider, void());
+ MOCK_METHOD0(DidReceiveFrame, void());
+ MOCK_METHOD2(EnableSubmission,
+ void(viz::SurfaceId, blink::WebFrameSinkDestroyedCallback));
+ MOCK_METHOD0(StartRendering, void());
+ MOCK_METHOD0(StopRendering, void());
+ MOCK_METHOD1(Initialize, void(cc::VideoFrameProvider*));
+ MOCK_METHOD1(SetRotation, void(media::VideoRotation));
+ MOCK_METHOD1(SetIsOpaque, void(bool));
+ MOCK_METHOD1(UpdateSubmissionState, void(bool));
+ MOCK_METHOD1(SetForceSubmit, void(bool));
+ MOCK_CONST_METHOD0(IsDrivingFrameUpdates, bool());
+};
+
// The class is used to generate a MockVideoProvider in
// WebMediaPlayerMS::load().
class MockRenderFactory : public MediaStreamRendererFactory {
@@ -498,27 +517,33 @@ scoped_refptr<MediaStreamVideoRenderer> MockRenderFactory::GetVideoRenderer(
// 7. When WebMediaPlayerMS::play gets called, evething paused in step 6 should
// be resumed.
class WebMediaPlayerMSTest
- : public testing::TestWithParam<testing::tuple<bool, bool>> ,
+ : public testing::TestWithParam<
+ testing::tuple<bool /* enable_surface_layer_for_video */,
+ bool /* opaque_frame */,
+ bool /* odd_size_frame */>>,
public blink::WebMediaPlayerClient,
public cc::VideoFrameProvider::Client {
public:
WebMediaPlayerMSTest()
- : render_factory_(new MockRenderFactory(message_loop_.task_runner(),
- &message_loop_controller_)),
+ : render_factory_(
+ new MockRenderFactory(base::ThreadTaskRunnerHandle::Get(),
+ &message_loop_controller_)),
gpu_factories_(new media::MockGpuVideoAcceleratorFactories(nullptr)),
surface_layer_bridge_(
- std::make_unique<StrictMock<MockSurfaceLayerBridge>>()),
+ std::make_unique<NiceMock<MockSurfaceLayerBridge>>()),
+ submitter_(std::make_unique<NiceMock<MockWebVideoFrameSubmitter>>()),
layer_set_(false),
rendering_(false),
background_rendering_(false) {
surface_layer_bridge_ptr_ = surface_layer_bridge_.get();
+ submitter_ptr_ = submitter_.get();
}
~WebMediaPlayerMSTest() override {
player_.reset();
base::RunLoop().RunUntilIdle();
}
- void InitializeWebMediaPlayerMS(bool enable_surface_layer_for_video);
+ void InitializeWebMediaPlayerMS();
MockMediaStreamVideoRenderer* LoadAndGetFrameProvider(bool algorithm_enabled);
@@ -566,9 +591,6 @@ class WebMediaPlayerMSTest
}
bool HasNativeControls() override { return false; }
bool IsAudioElement() override { return is_audio_element_; }
- blink::WebMediaPlayer::DisplayType DisplayType() const override {
- return blink::WebMediaPlayer::DisplayType::kInline;
- }
bool IsInAutoPIP() const override { return false; }
void ActivateViewportIntersectionMonitoring(bool activate) override {}
void MediaRemotingStarted(
@@ -613,6 +635,7 @@ class WebMediaPlayerMSTest
void(blink::WebMediaPlayer::NetworkState));
MOCK_METHOD1(DoReadyStateChanged, void(blink::WebMediaPlayer::ReadyState));
MOCK_METHOD1(CheckSizeChanged, void(gfx::Size));
+ MOCK_CONST_METHOD0(DisplayType, blink::WebMediaPlayer::DisplayType());
MOCK_CONST_METHOD0(CouldPlayIfEnoughData, bool());
std::unique_ptr<blink::WebSurfaceLayerBridge> CreateMockSurfaceLayerBridge(
@@ -621,7 +644,7 @@ class WebMediaPlayerMSTest
return std::move(surface_layer_bridge_);
}
- base::MessageLoop message_loop_;
+ base::test::ScopedTaskEnvironment task_environment_;
MockRenderFactory* render_factory_;
std::unique_ptr<media::MockGpuVideoAcceleratorFactories> gpu_factories_;
FakeWebMediaPlayerDelegate delegate_;
@@ -631,8 +654,11 @@ class WebMediaPlayerMSTest
cc::Layer* layer_;
bool is_audio_element_ = false;
std::vector<base::OnceClosure> frame_ready_cbs_;
- std::unique_ptr<StrictMock<MockSurfaceLayerBridge>> surface_layer_bridge_;
- StrictMock<MockSurfaceLayerBridge>* surface_layer_bridge_ptr_ = nullptr;
+ std::unique_ptr<NiceMock<MockSurfaceLayerBridge>> surface_layer_bridge_;
+ std::unique_ptr<NiceMock<MockWebVideoFrameSubmitter>> submitter_;
+ NiceMock<MockSurfaceLayerBridge>* surface_layer_bridge_ptr_ = nullptr;
+ NiceMock<MockWebVideoFrameSubmitter>* submitter_ptr_ = nullptr;
+ bool enable_surface_layer_for_video_ = false;
private:
// Main function trying to ask WebMediaPlayerMS to submit a frame for
@@ -644,17 +670,21 @@ class WebMediaPlayerMSTest
bool background_rendering_;
};
-void WebMediaPlayerMSTest::InitializeWebMediaPlayerMS(
- bool enable_surface_layer_for_video) {
+void WebMediaPlayerMSTest::InitializeWebMediaPlayerMS() {
+ enable_surface_layer_for_video_ = testing::get<0>(GetParam());
+ blink::WebMediaPlayer::SurfaceLayerMode surface_layer_mode =
+ enable_surface_layer_for_video_
+ ? blink::WebMediaPlayer::SurfaceLayerMode::kAlways
+ : blink::WebMediaPlayer::SurfaceLayerMode::kNever;
player_ = std::make_unique<WebMediaPlayerMS>(
nullptr, this, &delegate_, std::make_unique<media::MediaLog>(),
std::unique_ptr<MediaStreamRendererFactory>(render_factory_),
- message_loop_.task_runner(), message_loop_.task_runner(),
- message_loop_.task_runner(), message_loop_.task_runner(),
+ base::ThreadTaskRunnerHandle::Get(), base::ThreadTaskRunnerHandle::Get(),
+ base::ThreadTaskRunnerHandle::Get(), base::ThreadTaskRunnerHandle::Get(),
gpu_factories_.get(), blink::WebString(),
- base::BindRepeating(&WebMediaPlayerMSTest::CreateMockSurfaceLayerBridge,
- base::Unretained(this)),
- enable_surface_layer_for_video);
+ base::BindOnce(&WebMediaPlayerMSTest::CreateMockSurfaceLayerBridge,
+ base::Unretained(this)),
+ std::move(submitter_), surface_layer_mode);
}
MockMediaStreamVideoRenderer* WebMediaPlayerMSTest::LoadAndGetFrameProvider(
@@ -709,8 +739,12 @@ void WebMediaPlayerMSTest::SetCcLayer(cc::Layer* layer) {
layer_set_ = layer ? true : false;
layer_ = layer;
- if (layer)
- compositor_->SetVideoFrameProviderClient(this);
+ if (layer) {
+ if (enable_surface_layer_for_video_)
+ compositor_->SetVideoFrameProviderClient(submitter_ptr_);
+ else
+ compositor_->SetVideoFrameProviderClient(this);
+ }
DoSetCcLayer(!!layer);
}
@@ -722,7 +756,7 @@ void WebMediaPlayerMSTest::StopUsingProvider() {
void WebMediaPlayerMSTest::StartRendering() {
if (!rendering_) {
rendering_ = true;
- message_loop_.task_runner()->PostTask(
+ base::ThreadTaskRunnerHandle::Get()->PostTask(
FROM_HERE, base::BindOnce(&WebMediaPlayerMSTest::RenderFrame,
base::Unretained(this)));
}
@@ -757,7 +791,7 @@ void WebMediaPlayerMSTest::RenderFrame() {
auto frame = compositor_->GetCurrentFrame();
compositor_->PutCurrentFrame();
}
- message_loop_.task_runner()->PostDelayedTask(
+ base::ThreadTaskRunnerHandle::Get()->PostDelayedTask(
FROM_HERE,
base::BindOnce(&WebMediaPlayerMSTest::RenderFrame,
base::Unretained(this)),
@@ -769,8 +803,8 @@ void WebMediaPlayerMSTest::SizeChanged() {
CheckSizeChanged(frame_size);
}
-TEST_F(WebMediaPlayerMSTest, NoDataDuringLoadForVideo) {
- InitializeWebMediaPlayerMS(false);
+TEST_P(WebMediaPlayerMSTest, NoDataDuringLoadForVideo) {
+ InitializeWebMediaPlayerMS();
EXPECT_CALL(*this, DoReadyStateChanged(
blink::WebMediaPlayer::kReadyStateHaveMetadata))
.Times(0);
@@ -787,8 +821,8 @@ TEST_F(WebMediaPlayerMSTest, NoDataDuringLoadForVideo) {
EXPECT_CALL(*this, DoSetCcLayer(false));
}
-TEST_F(WebMediaPlayerMSTest, NoWaitForFrameForAudio) {
- InitializeWebMediaPlayerMS(false);
+TEST_P(WebMediaPlayerMSTest, NoWaitForFrameForAudio) {
+ InitializeWebMediaPlayerMS();
is_audio_element_ = true;
scoped_refptr<MediaStreamAudioRenderer> audio_renderer(
new MockMediaStreamAudioRenderer());
@@ -814,8 +848,8 @@ TEST_F(WebMediaPlayerMSTest, NoWaitForFrameForAudio) {
EXPECT_CALL(*this, DoSetCcLayer(false));
}
-TEST_F(WebMediaPlayerMSTest, NoWaitForFrameForAudioOnly) {
- InitializeWebMediaPlayerMS(false);
+TEST_P(WebMediaPlayerMSTest, NoWaitForFrameForAudioOnly) {
+ InitializeWebMediaPlayerMS();
render_factory_->set_support_video_renderer(false);
scoped_refptr<MediaStreamAudioRenderer> audio_renderer(
new MockMediaStreamAudioRenderer());
@@ -828,12 +862,12 @@ TEST_F(WebMediaPlayerMSTest, NoWaitForFrameForAudioOnly) {
EXPECT_CALL(*this, DoSetCcLayer(false));
}
-TEST_F(WebMediaPlayerMSTest, Playing_Normal) {
+TEST_P(WebMediaPlayerMSTest, Playing_Normal) {
// This test sends a bunch of normal frames with increasing timestamps
// and verifies that they are produced by WebMediaPlayerMS in appropriate
// order.
- InitializeWebMediaPlayerMS(false);
+ InitializeWebMediaPlayerMS();
MockMediaStreamVideoRenderer* provider = LoadAndGetFrameProvider(true);
@@ -842,8 +876,12 @@ TEST_F(WebMediaPlayerMSTest, Playing_Normal) {
std::vector<int> timestamps(tokens, tokens + sizeof(tokens) / sizeof(int));
provider->QueueFrames(timestamps);
- EXPECT_CALL(*this, DoSetCcLayer(true));
- EXPECT_CALL(*this, DoStartRendering());
+ if (enable_surface_layer_for_video_) {
+ EXPECT_CALL(*submitter_ptr_, StartRendering());
+ } else {
+ EXPECT_CALL(*this, DoSetCcLayer(true));
+ EXPECT_CALL(*this, DoStartRendering());
+ }
EXPECT_CALL(*this, DoReadyStateChanged(
blink::WebMediaPlayer::kReadyStateHaveMetadata));
EXPECT_CALL(*this, DoReadyStateChanged(
@@ -858,14 +896,17 @@ TEST_F(WebMediaPlayerMSTest, Playing_Normal) {
testing::Mock::VerifyAndClearExpectations(this);
EXPECT_CALL(*this, DoSetCcLayer(false));
- EXPECT_CALL(*this, DoStopRendering());
+ if (enable_surface_layer_for_video_)
+ EXPECT_CALL(*submitter_ptr_, StopUsingProvider());
+ else
+ EXPECT_CALL(*this, DoStopRendering());
}
-TEST_F(WebMediaPlayerMSTest, Playing_ErrorFrame) {
+TEST_P(WebMediaPlayerMSTest, Playing_ErrorFrame) {
// This tests sends a broken frame to WebMediaPlayerMS, and verifies
// OnSourceError function works as expected.
- InitializeWebMediaPlayerMS(false);
+ InitializeWebMediaPlayerMS();
MockMediaStreamVideoRenderer* provider = LoadAndGetFrameProvider(false);
@@ -875,8 +916,12 @@ TEST_F(WebMediaPlayerMSTest, Playing_ErrorFrame) {
std::vector<int> timestamps(tokens, tokens + sizeof(tokens) / sizeof(int));
provider->QueueFrames(timestamps);
- EXPECT_CALL(*this, DoSetCcLayer(true));
- EXPECT_CALL(*this, DoStartRendering());
+ if (enable_surface_layer_for_video_) {
+ EXPECT_CALL(*submitter_ptr_, StartRendering());
+ } else {
+ EXPECT_CALL(*this, DoSetCcLayer(true));
+ EXPECT_CALL(*this, DoStartRendering());
+ }
EXPECT_CALL(*this, DoReadyStateChanged(
blink::WebMediaPlayer::kReadyStateHaveMetadata));
EXPECT_CALL(*this, DoReadyStateChanged(
@@ -890,13 +935,16 @@ TEST_F(WebMediaPlayerMSTest, Playing_ErrorFrame) {
testing::Mock::VerifyAndClearExpectations(this);
EXPECT_CALL(*this, DoSetCcLayer(false));
- EXPECT_CALL(*this, DoStopRendering());
+ if (enable_surface_layer_for_video_)
+ EXPECT_CALL(*submitter_ptr_, StopUsingProvider());
+ else
+ EXPECT_CALL(*this, DoStopRendering());
}
TEST_P(WebMediaPlayerMSTest, PlayThenPause) {
- InitializeWebMediaPlayerMS(false);
- const bool opaque_frame = testing::get<0>(GetParam());
- const bool odd_size_frame = testing::get<1>(GetParam());
+ InitializeWebMediaPlayerMS();
+ const bool opaque_frame = testing::get<1>(GetParam());
+ const bool odd_size_frame = testing::get<2>(GetParam());
// In the middle of this test, WebMediaPlayerMS::pause will be called, and we
// are going to verify that during the pause stage, a frame gets freezed, and
// cc::VideoFrameProviderClient should also be paused.
@@ -908,8 +956,13 @@ TEST_P(WebMediaPlayerMSTest, PlayThenPause) {
std::vector<int> timestamps(tokens, tokens + sizeof(tokens) / sizeof(int));
provider->QueueFrames(timestamps, opaque_frame, odd_size_frame);
- EXPECT_CALL(*this, DoSetCcLayer(true));
- EXPECT_CALL(*this, DoStartRendering());
+ if (enable_surface_layer_for_video_) {
+ EXPECT_CALL(*surface_layer_bridge_ptr_, CreateSurfaceLayer());
+ EXPECT_CALL(*submitter_ptr_, StartRendering());
+ } else {
+ EXPECT_CALL(*this, DoSetCcLayer(true));
+ EXPECT_CALL(*this, DoStartRendering());
+ }
EXPECT_CALL(*this, DoReadyStateChanged(
blink::WebMediaPlayer::kReadyStateHaveMetadata));
EXPECT_CALL(*this, DoReadyStateChanged(
@@ -923,7 +976,11 @@ TEST_P(WebMediaPlayerMSTest, PlayThenPause) {
testing::Mock::VerifyAndClearExpectations(this);
// Here we call pause, and expect a freezing frame.
- EXPECT_CALL(*this, DoStopRendering());
+ if (enable_surface_layer_for_video_)
+ EXPECT_CALL(*submitter_ptr_, StopRendering());
+ else
+ EXPECT_CALL(*this, DoStopRendering());
+
player_->Pause();
auto prev_frame = compositor_->GetCurrentFrameWithoutUpdatingStatistics();
message_loop_controller_.RunAndWaitForStatus(
@@ -936,9 +993,9 @@ TEST_P(WebMediaPlayerMSTest, PlayThenPause) {
}
TEST_P(WebMediaPlayerMSTest, PlayThenPauseThenPlay) {
- InitializeWebMediaPlayerMS(false);
- const bool opaque_frame = testing::get<0>(GetParam());
- const bool odd_size_frame = testing::get<1>(GetParam());
+ InitializeWebMediaPlayerMS();
+ const bool opaque_frame = testing::get<1>(GetParam());
+ const bool odd_size_frame = testing::get<2>(GetParam());
// Similary to PlayAndPause test above, this one focuses on testing that
// WebMediaPlayerMS can be resumed after a period of paused status.
MockMediaStreamVideoRenderer* provider = LoadAndGetFrameProvider(false);
@@ -950,8 +1007,13 @@ TEST_P(WebMediaPlayerMSTest, PlayThenPauseThenPlay) {
std::vector<int> timestamps(tokens, tokens + sizeof(tokens) / sizeof(int));
provider->QueueFrames(timestamps, opaque_frame, odd_size_frame);
- EXPECT_CALL(*this, DoSetCcLayer(true));
- EXPECT_CALL(*this, DoStartRendering());
+ if (enable_surface_layer_for_video_) {
+ EXPECT_CALL(*surface_layer_bridge_ptr_, CreateSurfaceLayer());
+ EXPECT_CALL(*submitter_ptr_, StartRendering());
+ } else {
+ EXPECT_CALL(*this, DoSetCcLayer(true));
+ EXPECT_CALL(*this, DoStartRendering());
+ }
EXPECT_CALL(*this, DoReadyStateChanged(
blink::WebMediaPlayer::kReadyStateHaveMetadata));
EXPECT_CALL(*this, DoReadyStateChanged(
@@ -965,7 +1027,11 @@ TEST_P(WebMediaPlayerMSTest, PlayThenPauseThenPlay) {
testing::Mock::VerifyAndClearExpectations(this);
// Here we call pause, and expect a freezing frame.
- EXPECT_CALL(*this, DoStopRendering());
+ if (enable_surface_layer_for_video_)
+ EXPECT_CALL(*submitter_ptr_, StopRendering());
+ else
+ EXPECT_CALL(*this, DoStopRendering());
+
player_->Pause();
auto prev_frame = compositor_->GetCurrentFrameWithoutUpdatingStatistics();
message_loop_controller_.RunAndWaitForStatus(
@@ -975,7 +1041,11 @@ TEST_P(WebMediaPlayerMSTest, PlayThenPauseThenPlay) {
testing::Mock::VerifyAndClearExpectations(this);
// We resume the player, and expect rendering can continue.
- EXPECT_CALL(*this, DoStartRendering());
+ if (enable_surface_layer_for_video_)
+ EXPECT_CALL(*submitter_ptr_, StartRendering());
+ else
+ EXPECT_CALL(*this, DoStartRendering());
+
player_->Play();
prev_frame = compositor_->GetCurrentFrameWithoutUpdatingStatistics();
message_loop_controller_.RunAndWaitForStatus(
@@ -985,26 +1055,30 @@ TEST_P(WebMediaPlayerMSTest, PlayThenPauseThenPlay) {
testing::Mock::VerifyAndClearExpectations(this);
EXPECT_CALL(*this, DoSetCcLayer(false));
- EXPECT_CALL(*this, DoStopRendering());
+ if (enable_surface_layer_for_video_) {
+ EXPECT_CALL(*submitter_ptr_, StopUsingProvider());
+ } else {
+ EXPECT_CALL(*this, DoStopRendering());
+ }
}
-INSTANTIATE_TEST_CASE_P(,
- WebMediaPlayerMSTest,
- ::testing::Combine(::testing::Bool(),
- ::testing::Bool()));
-
// During this test, we check that when we send rotated video frames, it applies
// to player's natural size.
-TEST_F(WebMediaPlayerMSTest, RotationChange) {
- InitializeWebMediaPlayerMS(false);
+TEST_P(WebMediaPlayerMSTest, RotationChange) {
+ InitializeWebMediaPlayerMS();
MockMediaStreamVideoRenderer* provider = LoadAndGetFrameProvider(true);
const int kTestBrake = static_cast<int>(FrameType::TEST_BRAKE);
static int tokens[] = {0, 33, kTestBrake};
std::vector<int> timestamps(tokens, tokens + sizeof(tokens) / sizeof(int));
provider->QueueFrames(timestamps, false, false, 17, media::VIDEO_ROTATION_90);
- EXPECT_CALL(*this, DoSetCcLayer(true));
- EXPECT_CALL(*this, DoStartRendering());
+ if (enable_surface_layer_for_video_) {
+ EXPECT_CALL(*surface_layer_bridge_ptr_, CreateSurfaceLayer());
+ EXPECT_CALL(*submitter_ptr_, StartRendering());
+ } else {
+ EXPECT_CALL(*this, DoSetCcLayer(true));
+ EXPECT_CALL(*this, DoStartRendering());
+ }
EXPECT_CALL(*this, DoReadyStateChanged(
blink::WebMediaPlayer::kReadyStateHaveMetadata));
EXPECT_CALL(*this, DoReadyStateChanged(
@@ -1020,9 +1094,13 @@ TEST_F(WebMediaPlayerMSTest, RotationChange) {
// Change rotation.
provider->QueueFrames(timestamps, false, false, 17, media::VIDEO_ROTATION_0);
- EXPECT_CALL(*this, DoSetCcLayer(true));
- EXPECT_CALL(*this, DoStopRendering());
- EXPECT_CALL(*this, DoStartRendering());
+ if (enable_surface_layer_for_video_) {
+ EXPECT_CALL(*submitter_ptr_, SetRotation(media::VIDEO_ROTATION_0));
+ } else {
+ EXPECT_CALL(*this, DoSetCcLayer(true));
+ EXPECT_CALL(*this, DoStopRendering());
+ EXPECT_CALL(*this, DoStartRendering());
+ }
message_loop_controller_.RunAndWaitForStatus(
media::PipelineStatus::PIPELINE_OK);
natural_size = player_->NaturalSize();
@@ -1031,13 +1109,17 @@ TEST_F(WebMediaPlayerMSTest, RotationChange) {
testing::Mock::VerifyAndClearExpectations(this);
EXPECT_CALL(*this, DoSetCcLayer(false));
- EXPECT_CALL(*this, DoStopRendering());
+
+ if (enable_surface_layer_for_video_)
+ EXPECT_CALL(*submitter_ptr_, StopUsingProvider());
+ else
+ EXPECT_CALL(*this, DoStopRendering());
}
// During this test, we check that web layer changes opacity according to the
// given frames.
-TEST_F(WebMediaPlayerMSTest, OpacityChange) {
- InitializeWebMediaPlayerMS(false);
+TEST_P(WebMediaPlayerMSTest, OpacityChange) {
+ InitializeWebMediaPlayerMS();
MockMediaStreamVideoRenderer* provider = LoadAndGetFrameProvider(true);
// Push one opaque frame.
@@ -1045,8 +1127,13 @@ TEST_F(WebMediaPlayerMSTest, OpacityChange) {
static int tokens[] = {0, kTestBrake};
std::vector<int> timestamps(tokens, tokens + arraysize(tokens));
provider->QueueFrames(timestamps, true);
- EXPECT_CALL(*this, DoSetCcLayer(true));
- EXPECT_CALL(*this, DoStartRendering());
+ if (enable_surface_layer_for_video_) {
+ EXPECT_CALL(*surface_layer_bridge_ptr_, CreateSurfaceLayer());
+ EXPECT_CALL(*submitter_ptr_, StartRendering());
+ } else {
+ EXPECT_CALL(*this, DoSetCcLayer(true));
+ EXPECT_CALL(*this, DoStartRendering());
+ }
EXPECT_CALL(*this, DoReadyStateChanged(
blink::WebMediaPlayer::kReadyStateHaveMetadata));
EXPECT_CALL(*this, DoReadyStateChanged(
@@ -1055,34 +1142,50 @@ TEST_F(WebMediaPlayerMSTest, OpacityChange) {
CheckSizeChanged(gfx::Size(kStandardWidth, kStandardHeight)));
message_loop_controller_.RunAndWaitForStatus(
media::PipelineStatus::PIPELINE_OK);
- ASSERT_TRUE(layer_ != nullptr);
- EXPECT_TRUE(layer_->contents_opaque());
+
+ if (!enable_surface_layer_for_video_) {
+ ASSERT_TRUE(layer_ != nullptr);
+ EXPECT_TRUE(layer_->contents_opaque());
+ }
// Push one transparent frame.
+ if (enable_surface_layer_for_video_) {
+ EXPECT_CALL(*surface_layer_bridge_ptr_, SetContentsOpaque(false));
+ EXPECT_CALL(*submitter_ptr_, SetIsOpaque(false));
+ }
provider->QueueFrames(timestamps, false);
message_loop_controller_.RunAndWaitForStatus(
media::PipelineStatus::PIPELINE_OK);
- EXPECT_FALSE(layer_->contents_opaque());
+ if (!enable_surface_layer_for_video_)
+ EXPECT_FALSE(layer_->contents_opaque());
+ if (enable_surface_layer_for_video_) {
+ EXPECT_CALL(*surface_layer_bridge_ptr_, SetContentsOpaque(true));
+ EXPECT_CALL(*submitter_ptr_, SetIsOpaque(true));
+ }
// Push another opaque frame.
provider->QueueFrames(timestamps, true);
message_loop_controller_.RunAndWaitForStatus(
media::PipelineStatus::PIPELINE_OK);
- EXPECT_TRUE(layer_->contents_opaque());
+ if (!enable_surface_layer_for_video_)
+ EXPECT_TRUE(layer_->contents_opaque());
testing::Mock::VerifyAndClearExpectations(this);
EXPECT_CALL(*this, DoSetCcLayer(false));
- EXPECT_CALL(*this, DoStopRendering());
+ if (enable_surface_layer_for_video_)
+ EXPECT_CALL(*submitter_ptr_, StopUsingProvider());
+ else
+ EXPECT_CALL(*this, DoStopRendering());
}
-TEST_F(WebMediaPlayerMSTest, BackgroundRendering) {
+TEST_P(WebMediaPlayerMSTest, BackgroundRendering) {
// During this test, we will switch to background rendering mode, in which
// WebMediaPlayerMS::pause does not get called, but
// cc::VideoFrameProviderClient simply stops asking frames from
// WebMediaPlayerMS without an explicit notification. We should expect that
// WebMediaPlayerMS can digest old frames, rather than piling frames up and
// explode.
- InitializeWebMediaPlayerMS(false);
+ InitializeWebMediaPlayerMS();
MockMediaStreamVideoRenderer* provider = LoadAndGetFrameProvider(true);
const int kTestBrake = static_cast<int>(FrameType::TEST_BRAKE);
@@ -1092,8 +1195,13 @@ TEST_F(WebMediaPlayerMSTest, BackgroundRendering) {
std::vector<int> timestamps(tokens, tokens + sizeof(tokens) / sizeof(int));
provider->QueueFrames(timestamps);
- EXPECT_CALL(*this, DoSetCcLayer(true));
- EXPECT_CALL(*this, DoStartRendering());
+ if (enable_surface_layer_for_video_) {
+ EXPECT_CALL(*surface_layer_bridge_ptr_, CreateSurfaceLayer());
+ EXPECT_CALL(*submitter_ptr_, StartRendering());
+ } else {
+ EXPECT_CALL(*this, DoSetCcLayer(true));
+ EXPECT_CALL(*this, DoStartRendering());
+ }
EXPECT_CALL(*this, DoReadyStateChanged(
blink::WebMediaPlayer::kReadyStateHaveMetadata));
EXPECT_CALL(*this, DoReadyStateChanged(
@@ -1106,7 +1214,11 @@ TEST_F(WebMediaPlayerMSTest, BackgroundRendering) {
// Switch to background rendering, expect rendering to continue for all the
// frames between kTestBrake frames.
- EXPECT_CALL(*this, DoDidReceiveFrame()).Times(testing::AtLeast(1));
+ if (enable_surface_layer_for_video_)
+ EXPECT_CALL(*submitter_ptr_, DidReceiveFrame()).Times(testing::AtLeast(1));
+ else
+ EXPECT_CALL(*this, DoDidReceiveFrame()).Times(testing::AtLeast(1));
+
SetBackgroundRendering(true);
auto prev_frame = compositor_->GetCurrentFrameWithoutUpdatingStatistics();
message_loop_controller_.RunAndWaitForStatus(
@@ -1124,14 +1236,17 @@ TEST_F(WebMediaPlayerMSTest, BackgroundRendering) {
testing::Mock::VerifyAndClearExpectations(this);
EXPECT_CALL(*this, DoSetCcLayer(false));
- EXPECT_CALL(*this, DoStopRendering());
+ if (enable_surface_layer_for_video_)
+ EXPECT_CALL(*submitter_ptr_, StopUsingProvider());
+ else
+ EXPECT_CALL(*this, DoStopRendering());
}
-TEST_F(WebMediaPlayerMSTest, FrameSizeChange) {
+TEST_P(WebMediaPlayerMSTest, FrameSizeChange) {
// During this test, the frame size of the input changes.
// We need to make sure, when sizeChanged() gets called, new size should be
// returned by GetCurrentSize().
- InitializeWebMediaPlayerMS(false);
+ InitializeWebMediaPlayerMS();
MockMediaStreamVideoRenderer* provider = LoadAndGetFrameProvider(true);
int tokens[] = {0, 33, 66, 100, 133, 166, 200, 233, 266, 300,
@@ -1139,8 +1254,13 @@ TEST_F(WebMediaPlayerMSTest, FrameSizeChange) {
std::vector<int> timestamps(tokens, tokens + sizeof(tokens) / sizeof(int));
provider->QueueFrames(timestamps, false, false, 7);
- EXPECT_CALL(*this, DoSetCcLayer(true));
- EXPECT_CALL(*this, DoStartRendering());
+ if (enable_surface_layer_for_video_) {
+ EXPECT_CALL(*surface_layer_bridge_ptr_, CreateSurfaceLayer());
+ EXPECT_CALL(*submitter_ptr_, StartRendering());
+ } else {
+ EXPECT_CALL(*this, DoSetCcLayer(true));
+ EXPECT_CALL(*this, DoStartRendering());
+ }
EXPECT_CALL(*this, DoReadyStateChanged(
blink::WebMediaPlayer::kReadyStateHaveMetadata));
EXPECT_CALL(*this, DoReadyStateChanged(
@@ -1154,12 +1274,15 @@ TEST_F(WebMediaPlayerMSTest, FrameSizeChange) {
testing::Mock::VerifyAndClearExpectations(this);
EXPECT_CALL(*this, DoSetCcLayer(false));
- EXPECT_CALL(*this, DoStopRendering());
+ if (enable_surface_layer_for_video_)
+ EXPECT_CALL(*submitter_ptr_, StopUsingProvider());
+ else
+ EXPECT_CALL(*this, DoStopRendering());
}
// Tests that GpuMemoryBufferVideoFramePool is called in the expected sequence.
-TEST_F(WebMediaPlayerMSTest, CreateHardwareFrames) {
- InitializeWebMediaPlayerMS(false);
+TEST_P(WebMediaPlayerMSTest, CreateHardwareFrames) {
+ InitializeWebMediaPlayerMS();
MockMediaStreamVideoRenderer* provider = LoadAndGetFrameProvider(false);
SetGpuMemoryBufferVideoForTesting();
@@ -1171,8 +1294,13 @@ TEST_F(WebMediaPlayerMSTest, CreateHardwareFrames) {
media::PipelineStatus::PIPELINE_OK);
ASSERT_EQ(1u, frame_ready_cbs_.size());
- EXPECT_CALL(*this, DoSetCcLayer(true));
- EXPECT_CALL(*this, DoStartRendering());
+ if (enable_surface_layer_for_video_) {
+ EXPECT_CALL(*surface_layer_bridge_ptr_, CreateSurfaceLayer());
+ EXPECT_CALL(*submitter_ptr_, StartRendering());
+ } else {
+ EXPECT_CALL(*this, DoSetCcLayer(true));
+ EXPECT_CALL(*this, DoStartRendering());
+ }
EXPECT_CALL(*this, DoReadyStateChanged(
blink::WebMediaPlayer::kReadyStateHaveMetadata));
EXPECT_CALL(*this, DoReadyStateChanged(
@@ -1189,12 +1317,14 @@ TEST_F(WebMediaPlayerMSTest, CreateHardwareFrames) {
testing::Mock::VerifyAndClearExpectations(this);
EXPECT_CALL(*this, DoSetCcLayer(false));
- EXPECT_CALL(*this, DoStopRendering());
+ if (enable_surface_layer_for_video_)
+ EXPECT_CALL(*submitter_ptr_, StopUsingProvider());
+ else
+ EXPECT_CALL(*this, DoStopRendering());
}
-
#if defined(OS_ANDROID)
-TEST_F(WebMediaPlayerMSTest, HiddenPlayerTests) {
- InitializeWebMediaPlayerMS(false);
+TEST_P(WebMediaPlayerMSTest, HiddenPlayerTests) {
+ InitializeWebMediaPlayerMS();
LoadAndGetFrameProvider(true);
// Hidden status should not affect playback.
@@ -1244,4 +1374,68 @@ TEST_F(WebMediaPlayerMSTest, HiddenPlayerTests) {
}
#endif
+// Tests delegate methods are called when Picture-in-Picture is triggered.
+TEST_P(WebMediaPlayerMSTest, PictureInPictureTriggerCallback) {
+ InitializeWebMediaPlayerMS();
+
+ // It works only a surface layer is used instead of a video layer.
+ if (!enable_surface_layer_for_video_) {
+ EXPECT_CALL(*this, DoSetCcLayer(false));
+ return;
+ }
+
+ MockMediaStreamVideoRenderer* provider = LoadAndGetFrameProvider(true);
+
+ int tokens[] = {0, 33, 66, 100, 133, 166, 200, 233, 266, 300,
+ 333, 366, 400, 433, 466, 500, 533, 566, 600};
+ std::vector<int> timestamps(tokens, tokens + sizeof(tokens) / sizeof(int));
+ provider->QueueFrames(timestamps);
+
+ EXPECT_CALL(*submitter_ptr_, StartRendering());
+ EXPECT_CALL(*this, DisplayType()).Times(2);
+ EXPECT_CALL(*this, DoReadyStateChanged(
+ blink::WebMediaPlayer::kReadyStateHaveMetadata));
+ EXPECT_CALL(*this, DoReadyStateChanged(
+ blink::WebMediaPlayer::kReadyStateHaveEnoughData));
+ EXPECT_CALL(*this,
+ CheckSizeChanged(gfx::Size(kStandardWidth, kStandardHeight)));
+ message_loop_controller_.RunAndWaitForStatus(
+ media::PipelineStatus::PIPELINE_OK);
+ testing::Mock::VerifyAndClearExpectations(this);
+
+ EXPECT_CALL(*this, DisplayType())
+ .WillRepeatedly(
+ Return(blink::WebMediaPlayer::DisplayType::kPictureInPicture));
+
+ const gfx::Size natural_size = player_->NaturalSize();
+ EXPECT_CALL(delegate_, DidPictureInPictureSurfaceChange(
+ delegate_.delegate_id(),
+ surface_layer_bridge_ptr_->GetSurfaceId(),
+ natural_size, false))
+ .Times(2);
+
+ player_->OnSurfaceIdUpdated(surface_layer_bridge_ptr_->GetSurfaceId());
+
+ EXPECT_CALL(delegate_, DidPictureInPictureModeStart(
+ delegate_.delegate_id(),
+ surface_layer_bridge_ptr_->GetSurfaceId(),
+ natural_size, _, false));
+
+ player_->EnterPictureInPicture(base::DoNothing());
+ player_->OnSurfaceIdUpdated(surface_layer_bridge_ptr_->GetSurfaceId());
+
+ // Updating SurfaceId should NOT exit Picture-in-Picture.
+ EXPECT_CALL(delegate_, DidPictureInPictureModeEnd(delegate_.delegate_id(), _))
+ .Times(0);
+
+ testing::Mock::VerifyAndClearExpectations(this);
+ EXPECT_CALL(*this, DoSetCcLayer(false));
+ EXPECT_CALL(*submitter_ptr_, StopUsingProvider());
+}
+
+INSTANTIATE_TEST_CASE_P(,
+ WebMediaPlayerMSTest,
+ ::testing::Combine(::testing::Bool(),
+ ::testing::Bool(),
+ ::testing::Bool()));
} // namespace content