summaryrefslogtreecommitdiff
path: root/chromium/media/renderers
diff options
context:
space:
mode:
authorAllan Sandfeld Jensen <allan.jensen@qt.io>2018-12-10 16:19:40 +0100
committerAllan Sandfeld Jensen <allan.jensen@qt.io>2018-12-10 16:01:50 +0000
commit51f6c2793adab2d864b3d2b360000ef8db1d3e92 (patch)
tree835b3b4446b012c75e80177cef9fbe6972cc7dbe /chromium/media/renderers
parent6036726eb981b6c4b42047513b9d3f4ac865daac (diff)
downloadqtwebengine-chromium-51f6c2793adab2d864b3d2b360000ef8db1d3e92.tar.gz
BASELINE: Update Chromium to 71.0.3578.93
Change-Id: I6a32086c33670e1b033f8b10e6bf1fd4da1d105d Reviewed-by: Alexandru Croitor <alexandru.croitor@qt.io>
Diffstat (limited to 'chromium/media/renderers')
-rw-r--r--chromium/media/renderers/audio_renderer_impl.cc96
-rw-r--r--chromium/media/renderers/audio_renderer_impl.h15
-rw-r--r--chromium/media/renderers/audio_renderer_impl_unittest.cc46
-rw-r--r--chromium/media/renderers/default_renderer_factory.cc2
-rw-r--r--chromium/media/renderers/paint_canvas_video_renderer.cc71
-rw-r--r--chromium/media/renderers/paint_canvas_video_renderer.h34
-rw-r--r--chromium/media/renderers/paint_canvas_video_renderer_unittest.cc35
-rw-r--r--chromium/media/renderers/renderer_impl.cc90
-rw-r--r--chromium/media/renderers/renderer_impl.h1
-rw-r--r--chromium/media/renderers/video_renderer_impl.cc125
-rw-r--r--chromium/media/renderers/video_renderer_impl.h32
-rw-r--r--chromium/media/renderers/video_renderer_impl_unittest.cc48
-rw-r--r--chromium/media/renderers/video_resource_updater.cc2
13 files changed, 357 insertions, 240 deletions
diff --git a/chromium/media/renderers/audio_renderer_impl.cc b/chromium/media/renderers/audio_renderer_impl.cc
index 58d512e155d..79c34da2ab0 100644
--- a/chromium/media/renderers/audio_renderer_impl.cc
+++ b/chromium/media/renderers/audio_renderer_impl.cc
@@ -83,8 +83,8 @@ AudioRendererImpl::AudioRendererImpl(
// Safe to post this without a WeakPtr because this class must be destructed
// on the same thread and construction has not completed yet.
task_runner_->PostTask(FROM_HERE,
- base::Bind(&base::PowerMonitor::AddObserver,
- base::Unretained(monitor), this));
+ base::BindOnce(&base::PowerMonitor::AddObserver,
+ base::Unretained(monitor), this));
}
// Do not add anything below this line since the above actions are only safe
@@ -107,8 +107,8 @@ AudioRendererImpl::~AudioRendererImpl() {
CHECK(lock_.Try());
lock_.Release();
- if (!init_cb_.is_null())
- base::ResetAndReturn(&init_cb_).Run(PIPELINE_ERROR_ABORT);
+ if (init_cb_)
+ FinishInitialization(PIPELINE_ERROR_ABORT);
}
void AudioRendererImpl::StartTicking() {
@@ -271,10 +271,11 @@ TimeSource* AudioRendererImpl::GetTimeSource() {
void AudioRendererImpl::Flush(const base::Closure& callback) {
DVLOG(1) << __func__;
DCHECK(task_runner_->BelongsToCurrentThread());
+ TRACE_EVENT_ASYNC_BEGIN0("media", "AudioRendererImpl::Flush", this);
base::AutoLock auto_lock(lock_);
DCHECK_EQ(state_, kPlaying);
- DCHECK(flush_cb_.is_null());
+ DCHECK(!flush_cb_);
flush_cb_ = callback;
ChangeState_Locked(kFlushing);
@@ -294,7 +295,7 @@ void AudioRendererImpl::DoFlush_Locked() {
DCHECK_EQ(state_, kFlushed);
ended_timestamp_ = kInfiniteDuration;
- audio_buffer_stream_->Reset(base::BindOnce(
+ audio_decoder_stream_->Reset(base::BindOnce(
&AudioRendererImpl::ResetDecoderDone, weak_factory_.GetWeakPtr()));
}
@@ -304,7 +305,7 @@ void AudioRendererImpl::ResetDecoderDone() {
base::AutoLock auto_lock(lock_);
DCHECK_EQ(state_, kFlushed);
- DCHECK(!flush_cb_.is_null());
+ DCHECK(flush_cb_);
received_end_of_stream_ = false;
rendered_end_of_stream_ = false;
@@ -320,7 +321,8 @@ void AudioRendererImpl::ResetDecoderDone() {
// Changes in buffering state are always posted. Flush callback must only be
// run after buffering state has been set back to nothing.
- task_runner_->PostTask(FROM_HERE, base::ResetAndReturn(&flush_cb_));
+ flush_cb_ = BindToCurrentLoop(flush_cb_);
+ FinishFlush();
}
void AudioRendererImpl::StartPlaying() {
@@ -346,9 +348,10 @@ void AudioRendererImpl::Initialize(DemuxerStream* stream,
DCHECK(client);
DCHECK(stream);
DCHECK_EQ(stream->type(), DemuxerStream::AUDIO);
- DCHECK(!init_cb.is_null());
+ DCHECK(init_cb);
DCHECK(state_ == kUninitialized || state_ == kFlushed);
DCHECK(sink_.get());
+ TRACE_EVENT_ASYNC_BEGIN0("media", "AudioRendererImpl::Initialize", this);
// Trying to track down AudioClock crash, http://crbug.com/674856.
// Initialize should never be called while Rendering is ongoing. This can lead
@@ -380,12 +383,12 @@ void AudioRendererImpl::Initialize(DemuxerStream* stream,
ChannelLayout hw_channel_layout =
hw_params.IsValid() ? hw_params.channel_layout() : CHANNEL_LAYOUT_NONE;
- audio_buffer_stream_ = std::make_unique<AudioBufferStream>(
- std::make_unique<AudioBufferStream::StreamTraits>(media_log_,
- hw_channel_layout),
+ audio_decoder_stream_ = std::make_unique<AudioDecoderStream>(
+ std::make_unique<AudioDecoderStream::StreamTraits>(media_log_,
+ hw_channel_layout),
task_runner_, create_audio_decoders_cb_, media_log_);
- audio_buffer_stream_->set_config_change_observer(base::Bind(
+ audio_decoder_stream_->set_config_change_observer(base::BindRepeating(
&AudioRendererImpl::OnConfigChange, weak_factory_.GetWeakPtr()));
// Always post |init_cb_| because |this| could be destroyed if initialization
@@ -532,9 +535,9 @@ void AudioRendererImpl::Initialize(DemuxerStream* stream,
audio_clock_.reset(
new AudioClock(base::TimeDelta(), audio_parameters_.sample_rate()));
- audio_buffer_stream_->Initialize(
+ audio_decoder_stream_->Initialize(
stream,
- base::BindOnce(&AudioRendererImpl::OnAudioBufferStreamInitialized,
+ base::BindOnce(&AudioRendererImpl::OnAudioDecoderStreamInitialized,
weak_factory_.GetWeakPtr()),
cdm_context,
base::BindRepeating(&AudioRendererImpl::OnStatisticsUpdate,
@@ -543,15 +546,14 @@ void AudioRendererImpl::Initialize(DemuxerStream* stream,
weak_factory_.GetWeakPtr()));
}
-void AudioRendererImpl::OnAudioBufferStreamInitialized(bool success) {
+void AudioRendererImpl::OnAudioDecoderStreamInitialized(bool success) {
DVLOG(1) << __func__ << ": " << success;
DCHECK(task_runner_->BelongsToCurrentThread());
-
base::AutoLock auto_lock(lock_);
if (!success) {
state_ = kUninitialized;
- base::ResetAndReturn(&init_cb_).Run(DECODER_ERROR_NOT_SUPPORTED);
+ FinishInitialization(DECODER_ERROR_NOT_SUPPORTED);
return;
}
@@ -559,11 +561,12 @@ void AudioRendererImpl::OnAudioBufferStreamInitialized(bool success) {
DVLOG(1) << __func__ << ": Invalid audio parameters: "
<< audio_parameters_.AsHumanReadableString();
ChangeState_Locked(kUninitialized);
+
// TODO(flim): If the channel layout is discrete but channel count is 0, a
// possible cause is that the input stream has > 8 channels but there is no
// Web Audio renderer attached and no channel mixing matrices defined for
// hardware renderers. Adding one for previewing content could be useful.
- base::ResetAndReturn(&init_cb_).Run(PIPELINE_ERROR_INITIALIZATION_FAILED);
+ FinishInitialization(PIPELINE_ERROR_INITIALIZATION_FAILED);
return;
}
@@ -588,7 +591,20 @@ void AudioRendererImpl::OnAudioBufferStreamInitialized(bool success) {
}
DCHECK(!sink_playing_);
- base::ResetAndReturn(&init_cb_).Run(PIPELINE_OK);
+ FinishInitialization(PIPELINE_OK);
+}
+
+void AudioRendererImpl::FinishInitialization(PipelineStatus status) {
+ DCHECK(init_cb_);
+ TRACE_EVENT_ASYNC_END1("media", "AudioRendererImpl::Initialize", this,
+ "status", MediaLog::PipelineStatusToString(status));
+ std::move(init_cb_).Run(status);
+}
+
+void AudioRendererImpl::FinishFlush() {
+ DCHECK(flush_cb_);
+ TRACE_EVENT_ASYNC_END0("media", "AudioRendererImpl::Flush", this);
+ std::move(flush_cb_).Run();
}
void AudioRendererImpl::OnPlaybackError(PipelineStatus error) {
@@ -640,7 +656,7 @@ void AudioRendererImpl::SetPlayDelayCBForTesting(PlayDelayCBForTesting cb) {
}
void AudioRendererImpl::DecodedAudioReady(
- AudioBufferStream::Status status,
+ AudioDecoderStream::Status status,
const scoped_refptr<AudioBuffer>& buffer) {
DVLOG(2) << __func__ << "(" << status << ")";
DCHECK(task_runner_->BelongsToCurrentThread());
@@ -651,18 +667,18 @@ void AudioRendererImpl::DecodedAudioReady(
CHECK(pending_read_);
pending_read_ = false;
- if (status == AudioBufferStream::ABORTED ||
- status == AudioBufferStream::DEMUXER_READ_ABORTED) {
+ if (status == AudioDecoderStream::ABORTED ||
+ status == AudioDecoderStream::DEMUXER_READ_ABORTED) {
HandleAbortedReadOrDecodeError(PIPELINE_OK);
return;
}
- if (status == AudioBufferStream::DECODE_ERROR) {
+ if (status == AudioDecoderStream::DECODE_ERROR) {
HandleAbortedReadOrDecodeError(PIPELINE_ERROR_DECODE);
return;
}
- DCHECK_EQ(status, AudioBufferStream::OK);
+ DCHECK_EQ(status, AudioDecoderStream::OK);
DCHECK(buffer.get());
if (state_ == kFlushing) {
@@ -791,8 +807,8 @@ bool AudioRendererImpl::HandleDecodedBuffer_Locked(
stats.audio_memory_usage = memory_usage - last_audio_memory_usage_;
last_audio_memory_usage_ = memory_usage;
task_runner_->PostTask(FROM_HERE,
- base::Bind(&AudioRendererImpl::OnStatisticsUpdate,
- weak_factory_.GetWeakPtr(), stats));
+ base::BindOnce(&AudioRendererImpl::OnStatisticsUpdate,
+ weak_factory_.GetWeakPtr(), stats));
switch (state_) {
case kUninitialized:
@@ -829,7 +845,12 @@ void AudioRendererImpl::AttemptRead_Locked() {
return;
pending_read_ = true;
- audio_buffer_stream_->Read(base::BindOnce(
+
+ // Don't hold the lock while calling Read(), if the demuxer is busy this will
+ // block audio rendering for an extended period of time.
+ // |audio_decoder_stream_| is only accessed on |task_runner_| so this is safe.
+ base::AutoUnlock auto_unlock(lock_);
+ audio_decoder_stream_->Read(base::BindOnce(
&AudioRendererImpl::DecodedAudioReady, weak_factory_.GetWeakPtr()));
}
@@ -1035,16 +1056,16 @@ int AudioRendererImpl::Render(base::TimeDelta delay,
if (CanRead_Locked()) {
task_runner_->PostTask(FROM_HERE,
- base::Bind(&AudioRendererImpl::AttemptRead,
- weak_factory_.GetWeakPtr()));
+ base::BindOnce(&AudioRendererImpl::AttemptRead,
+ weak_factory_.GetWeakPtr()));
}
if (audio_clock_->front_timestamp() >= ended_timestamp_ &&
!rendered_end_of_stream_) {
rendered_end_of_stream_ = true;
task_runner_->PostTask(FROM_HERE,
- base::Bind(&AudioRendererImpl::OnPlaybackEnded,
- weak_factory_.GetWeakPtr()));
+ base::BindOnce(&AudioRendererImpl::OnPlaybackEnded,
+ weak_factory_.GetWeakPtr()));
}
}
@@ -1057,8 +1078,9 @@ void AudioRendererImpl::OnRenderError() {
// Post to |task_runner_| as this is called on the audio callback thread.
task_runner_->PostTask(
- FROM_HERE, base::Bind(&AudioRendererImpl::OnPlaybackError,
- weak_factory_.GetWeakPtr(), AUDIO_RENDERER_ERROR));
+ FROM_HERE,
+ base::BindOnce(&AudioRendererImpl::OnPlaybackError,
+ weak_factory_.GetWeakPtr(), AUDIO_RENDERER_ERROR));
}
void AudioRendererImpl::HandleAbortedReadOrDecodeError(PipelineStatus status) {
@@ -1080,7 +1102,7 @@ void AudioRendererImpl::HandleAbortedReadOrDecodeError(PipelineStatus status) {
MEDIA_LOG(ERROR, media_log_) << "audio error during flushing, status: "
<< MediaLog::PipelineStatusToString(status);
client_->OnError(status);
- base::ResetAndReturn(&flush_cb_).Run();
+ FinishFlush();
return;
case kFlushed:
@@ -1125,8 +1147,8 @@ void AudioRendererImpl::SetBufferingState_Locked(
buffering_state_ = buffering_state;
task_runner_->PostTask(
- FROM_HERE, base::Bind(&AudioRendererImpl::OnBufferingStateChange,
- weak_factory_.GetWeakPtr(), buffering_state_));
+ FROM_HERE, base::BindOnce(&AudioRendererImpl::OnBufferingStateChange,
+ weak_factory_.GetWeakPtr(), buffering_state_));
}
void AudioRendererImpl::ConfigureChannelMask() {
diff --git a/chromium/media/renderers/audio_renderer_impl.h b/chromium/media/renderers/audio_renderer_impl.h
index c23f7bd6f25..883c5e5579c 100644
--- a/chromium/media/renderers/audio_renderer_impl.h
+++ b/chromium/media/renderers/audio_renderer_impl.h
@@ -126,7 +126,7 @@ class MEDIA_EXPORT AudioRendererImpl
};
// Callback from the audio decoder delivering decoded audio samples.
- void DecodedAudioReady(AudioBufferStream::Status status,
+ void DecodedAudioReady(AudioDecoderStream::Status status,
const scoped_refptr<AudioBuffer>& buffer);
// Handles buffers that come out of decoder (MSE: after passing through
@@ -179,9 +179,12 @@ class MEDIA_EXPORT AudioRendererImpl
// This can only return true while in the kPlaying state.
bool IsBeforeStartTime(const scoped_refptr<AudioBuffer>& buffer);
- // Called upon AudioBufferStream initialization, or failure thereof (indicated
- // by the value of |success|).
- void OnAudioBufferStreamInitialized(bool succes);
+ // Called upon AudioDecoderStream initialization, or failure thereof
+ // (indicated by the value of |success|).
+ void OnAudioDecoderStreamInitialized(bool succes);
+
+ void FinishInitialization(PipelineStatus status);
+ void FinishFlush();
// Callback functions to be called on |client_|.
void OnPlaybackError(PipelineStatus error);
@@ -190,7 +193,7 @@ class MEDIA_EXPORT AudioRendererImpl
void OnBufferingStateChange(BufferingState state);
void OnWaitingForDecryptionKey();
- // Generally called by the AudioBufferStream when a config change occurs. May
+ // Generally called by the AudioDecoderStream when a config change occurs. May
// also be called internally with an empty config to reset config-based state.
// Will notify RenderClient when called with a valid config.
void OnConfigChange(const AudioDecoderConfig& config);
@@ -226,7 +229,7 @@ class MEDIA_EXPORT AudioRendererImpl
// may deadlock between |task_runner_| and the audio callback thread.
scoped_refptr<media::AudioRendererSink> sink_;
- std::unique_ptr<AudioBufferStream> audio_buffer_stream_;
+ std::unique_ptr<AudioDecoderStream> audio_decoder_stream_;
MediaLog* media_log_;
diff --git a/chromium/media/renderers/audio_renderer_impl_unittest.cc b/chromium/media/renderers/audio_renderer_impl_unittest.cc
index 9f27429a4e1..263471bb74d 100644
--- a/chromium/media/renderers/audio_renderer_impl_unittest.cc
+++ b/chromium/media/renderers/audio_renderer_impl_unittest.cc
@@ -242,7 +242,7 @@ class AudioRendererImplTest : public ::testing::Test, public RendererClient {
event.RunAndWaitForStatus(expected);
// We should have no reads.
- EXPECT_TRUE(decode_cb_.is_null());
+ EXPECT_TRUE(!decode_cb_);
}
void InitializeAndDestroy() {
@@ -262,7 +262,7 @@ class AudioRendererImplTest : public ::testing::Test, public RendererClient {
WaitableMessageLoopEvent event;
InitializeRenderer(&demuxer_stream_, event.GetPipelineStatusCB());
base::RunLoop().RunUntilIdle();
- DCHECK(!init_decoder_cb_.is_null());
+ DCHECK(init_decoder_cb_);
renderer_.reset();
event.RunAndWaitForStatus(PIPELINE_ERROR_ABORT);
@@ -309,29 +309,27 @@ class AudioRendererImplTest : public ::testing::Test, public RendererClient {
void StopTicking() { renderer_->StopTicking(); }
- bool IsReadPending() const {
- return !decode_cb_.is_null();
- }
+ bool IsReadPending() const { return !!decode_cb_; }
void WaitForPendingRead() {
SCOPED_TRACE("WaitForPendingRead()");
- if (!decode_cb_.is_null())
+ if (decode_cb_)
return;
- DCHECK(wait_for_pending_decode_cb_.is_null());
+ DCHECK(!wait_for_pending_decode_cb_);
WaitableMessageLoopEvent event;
wait_for_pending_decode_cb_ = event.GetClosure();
event.RunAndWait();
- DCHECK(!decode_cb_.is_null());
- DCHECK(wait_for_pending_decode_cb_.is_null());
+ DCHECK(decode_cb_);
+ DCHECK(!wait_for_pending_decode_cb_);
}
// Delivers decoded frames to |renderer_|.
void SatisfyPendingRead(InputFrames frames) {
CHECK_GT(frames.value, 0);
- CHECK(!decode_cb_.is_null());
+ CHECK(decode_cb_);
scoped_refptr<AudioBuffer> buffer;
if (hardware_params_.IsBitstreamFormat()) {
@@ -350,7 +348,7 @@ class AudioRendererImplTest : public ::testing::Test, public RendererClient {
}
void DeliverEndOfStream() {
- DCHECK(!decode_cb_.is_null());
+ DCHECK(decode_cb_);
// Return EOS buffer to trigger EOS frame.
EXPECT_CALL(demuxer_stream_, Read(_))
@@ -359,14 +357,12 @@ class AudioRendererImplTest : public ::testing::Test, public RendererClient {
// Satify pending |decode_cb_| to trigger a new DemuxerStream::Read().
message_loop_.task_runner()->PostTask(
- FROM_HERE,
- base::Bind(base::ResetAndReturn(&decode_cb_), DecodeStatus::OK));
+ FROM_HERE, base::BindOnce(std::move(decode_cb_), DecodeStatus::OK));
WaitForPendingRead();
message_loop_.task_runner()->PostTask(
- FROM_HERE,
- base::Bind(base::ResetAndReturn(&decode_cb_), DecodeStatus::OK));
+ FROM_HERE, base::BindOnce(std::move(decode_cb_), DecodeStatus::OK));
base::RunLoop().RunUntilIdle();
EXPECT_EQ(last_statistics_.audio_memory_usage,
@@ -471,21 +467,21 @@ class AudioRendererImplTest : public ::testing::Test, public RendererClient {
// TODO(scherkus): Make this a DCHECK after threading semantics are fixed.
if (base::MessageLoop::current() != &message_loop_) {
message_loop_.task_runner()->PostTask(
- FROM_HERE, base::Bind(&AudioRendererImplTest::DecodeDecoder,
- base::Unretained(this), buffer, decode_cb));
+ FROM_HERE, base::BindOnce(&AudioRendererImplTest::DecodeDecoder,
+ base::Unretained(this), buffer, decode_cb));
return;
}
- CHECK(decode_cb_.is_null()) << "Overlapping decodes are not permitted";
+ CHECK(!decode_cb_) << "Overlapping decodes are not permitted";
decode_cb_ = decode_cb;
// Wake up WaitForPendingRead() if needed.
- if (!wait_for_pending_decode_cb_.is_null())
- base::ResetAndReturn(&wait_for_pending_decode_cb_).Run();
+ if (wait_for_pending_decode_cb_)
+ std::move(wait_for_pending_decode_cb_).Run();
}
void ResetDecoder(const base::Closure& reset_cb) {
- if (!decode_cb_.is_null()) {
+ if (decode_cb_) {
// |reset_cb| will be called in DeliverBuffer(), after the decoder is
// flushed.
reset_cb_ = reset_cb;
@@ -497,14 +493,14 @@ class AudioRendererImplTest : public ::testing::Test, public RendererClient {
void DeliverBuffer(DecodeStatus status,
const scoped_refptr<AudioBuffer>& buffer) {
- CHECK(!decode_cb_.is_null());
+ CHECK(decode_cb_);
if (buffer.get() && !buffer->end_of_stream())
output_cb_.Run(buffer);
- base::ResetAndReturn(&decode_cb_).Run(status);
+ std::move(decode_cb_).Run(status);
- if (!reset_cb_.is_null())
- base::ResetAndReturn(&reset_cb_).Run();
+ if (reset_cb_)
+ std::move(reset_cb_).Run();
base::RunLoop().RunUntilIdle();
}
diff --git a/chromium/media/renderers/default_renderer_factory.cc b/chromium/media/renderers/default_renderer_factory.cc
index f7c2f8da402..6b379a6c8fd 100644
--- a/chromium/media/renderers/default_renderer_factory.cc
+++ b/chromium/media/renderers/default_renderer_factory.cc
@@ -78,7 +78,7 @@ std::unique_ptr<Renderer> DefaultRendererFactory::CreateRenderer(
media_log_));
GpuVideoAcceleratorFactories* gpu_factories = nullptr;
- if (!get_gpu_factories_cb_.is_null())
+ if (get_gpu_factories_cb_)
gpu_factories = get_gpu_factories_cb_.Run();
std::unique_ptr<GpuMemoryBufferVideoFramePool> gmb_pool;
diff --git a/chromium/media/renderers/paint_canvas_video_renderer.cc b/chromium/media/renderers/paint_canvas_video_renderer.cc
index 90bf175c21f..cdaac9614da 100644
--- a/chromium/media/renderers/paint_canvas_video_renderer.cc
+++ b/chromium/media/renderers/paint_canvas_video_renderer.cc
@@ -14,6 +14,7 @@
#include "cc/paint/paint_image.h"
#include "cc/paint/paint_image_builder.h"
#include "gpu/GLES2/gl2extchromium.h"
+#include "gpu/command_buffer/client/context_support.h"
#include "gpu/command_buffer/client/gles2_interface.h"
#include "gpu/command_buffer/common/capabilities.h"
#include "gpu/command_buffer/common/mailbox_holder.h"
@@ -275,6 +276,34 @@ void VideoFrameCopyTextureOrSubTexture(gpu::gles2::GLES2Interface* gl,
}
}
+void OnQueryDone(scoped_refptr<VideoFrame> video_frame,
+ gpu::gles2::GLES2Interface* gl,
+ unsigned query_id) {
+ gl->DeleteQueriesEXT(1, &query_id);
+ // |video_frame| is dropped here.
+}
+
+void SynchronizeVideoFrameRead(scoped_refptr<VideoFrame> video_frame,
+ gpu::gles2::GLES2Interface* gl,
+ gpu::ContextSupport* context_support) {
+ DCHECK(gl);
+ SyncTokenClientImpl client(gl);
+ video_frame->UpdateReleaseSyncToken(&client);
+
+ if (video_frame->metadata()->IsTrue(
+ VideoFrameMetadata::READ_LOCK_FENCES_ENABLED)) {
+ // |video_frame| must be kept alive during read operations.
+ DCHECK(context_support);
+ unsigned query_id = 0;
+ gl->GenQueriesEXT(1, &query_id);
+ DCHECK(query_id);
+ gl->BeginQueryEXT(GL_COMMANDS_COMPLETED_CHROMIUM, query_id);
+ gl->EndQueryEXT(GL_COMMANDS_COMPLETED_CHROMIUM);
+ context_support->SignalQuery(
+ query_id, base::BindOnce(&OnQueryDone, video_frame, gl, query_id));
+ }
+}
+
} // anonymous namespace
// Generates an RGB image from a VideoFrame. Convert YUV to RGB plain on GPU.
@@ -416,7 +445,8 @@ void PaintCanvasVideoRenderer::Paint(
const gfx::RectF& dest_rect,
cc::PaintFlags& flags,
VideoRotation video_rotation,
- const Context3D& context_3d) {
+ const Context3D& context_3d,
+ gpu::ContextSupport* context_support) {
DCHECK(thread_checker_.CalledOnValidThread());
if (flags.getAlpha() == 0) {
return;
@@ -509,22 +539,23 @@ void PaintCanvasVideoRenderer::Paint(
canvas->flush();
if (video_frame->HasTextures()) {
- DCHECK(gl);
- SyncTokenClientImpl client(gl);
- video_frame->UpdateReleaseSyncToken(&client);
+ // Synchronize |video_frame| with the read operations in UpdateLastImage(),
+ // which are triggered by canvas->flush().
+ SynchronizeVideoFrameRead(video_frame, gl, context_support);
}
}
void PaintCanvasVideoRenderer::Copy(
const scoped_refptr<VideoFrame>& video_frame,
cc::PaintCanvas* canvas,
- const Context3D& context_3d) {
+ const Context3D& context_3d,
+ gpu::ContextSupport* context_support) {
cc::PaintFlags flags;
flags.setBlendMode(SkBlendMode::kSrc);
flags.setFilterQuality(kLow_SkFilterQuality);
Paint(video_frame, canvas,
gfx::RectF(gfx::SizeF(video_frame->visible_rect().size())), flags,
- media::VIDEO_ROTATION_0, context_3d);
+ media::VIDEO_ROTATION_0, context_3d, context_support);
}
namespace {
@@ -901,14 +932,14 @@ void PaintCanvasVideoRenderer::CopyVideoFrameSingleTextureToGLTexture(
target, texture, internal_format, format,
type, level, premultiply_alpha, flip_y);
gl->DeleteTextures(1, &source_texture);
- gl->Flush();
-
- SyncTokenClientImpl client(gl);
- video_frame->UpdateReleaseSyncToken(&client);
+ gl->ShallowFlushCHROMIUM();
+ // The caller must call SynchronizeVideoFrameRead() after this operation, but
+ // we can't do that because we don't have the ContextSupport.
}
bool PaintCanvasVideoRenderer::CopyVideoFrameTexturesToGLTexture(
const Context3D& context_3d,
+ gpu::ContextSupport* context_support,
gpu::gles2::GLES2Interface* destination_gl,
const scoped_refptr<VideoFrame>& video_frame,
unsigned int target,
@@ -922,7 +953,9 @@ bool PaintCanvasVideoRenderer::CopyVideoFrameTexturesToGLTexture(
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(video_frame);
DCHECK(video_frame->HasTextures());
- if (video_frame->NumTextures() > 1) {
+ if (video_frame->NumTextures() > 1 ||
+ video_frame->metadata()->IsTrue(
+ VideoFrameMetadata::READ_LOCK_FENCES_ENABLED)) {
if (!context_3d.gr_context)
return false;
if (!UpdateLastImage(video_frame, context_3d))
@@ -936,7 +969,11 @@ bool PaintCanvasVideoRenderer::CopyVideoFrameTexturesToGLTexture(
if (!backend_texture.getGLTextureInfo(&texture_info))
return false;
+ // Synchronize |video_frame| with the read operations in UpdateLastImage(),
+ // which are triggered by getBackendTexture().
gpu::gles2::GLES2Interface* canvas_gl = context_3d.gl;
+ SynchronizeVideoFrameRead(video_frame, canvas_gl, context_support);
+
gpu::MailboxHolder mailbox_holder;
mailbox_holder.texture_target = texture_info.fTarget;
canvas_gl->ProduceTextureDirectCHROMIUM(texture_info.fID,
@@ -944,7 +981,8 @@ bool PaintCanvasVideoRenderer::CopyVideoFrameTexturesToGLTexture(
// Wait for mailbox creation on canvas context before consuming it and
// copying from it on the consumer context.
- canvas_gl->GenSyncTokenCHROMIUM(mailbox_holder.sync_token.GetData());
+ canvas_gl->GenUnverifiedSyncTokenCHROMIUM(
+ mailbox_holder.sync_token.GetData());
destination_gl->WaitSyncTokenCHROMIUM(
mailbox_holder.sync_token.GetConstData());
@@ -961,15 +999,13 @@ bool PaintCanvasVideoRenderer::CopyVideoFrameTexturesToGLTexture(
// Wait for destination context to consume mailbox before deleting it in
// canvas context.
gpu::SyncToken dest_sync_token;
- destination_gl->GenSyncTokenCHROMIUM(dest_sync_token.GetData());
+ destination_gl->GenUnverifiedSyncTokenCHROMIUM(dest_sync_token.GetData());
canvas_gl->WaitSyncTokenCHROMIUM(dest_sync_token.GetConstData());
-
- SyncTokenClientImpl client(canvas_gl);
- video_frame->UpdateReleaseSyncToken(&client);
} else {
CopyVideoFrameSingleTextureToGLTexture(
destination_gl, video_frame.get(), target, texture, internal_format,
format, type, level, premultiply_alpha, flip_y);
+ SynchronizeVideoFrameRead(video_frame, destination_gl, nullptr);
}
return true;
@@ -1207,8 +1243,9 @@ bool PaintCanvasVideoRenderer::UpdateLastImage(
return false;
last_id_ = video_frame->unique_id();
}
+
+ DCHECK(last_image_);
last_image_deleting_timer_.Reset();
- DCHECK(!!last_image_);
return true;
}
diff --git a/chromium/media/renderers/paint_canvas_video_renderer.h b/chromium/media/renderers/paint_canvas_video_renderer.h
index d7321118485..d4752f33337 100644
--- a/chromium/media/renderers/paint_canvas_video_renderer.h
+++ b/chromium/media/renderers/paint_canvas_video_renderer.h
@@ -29,6 +29,7 @@ class RectF;
namespace gpu {
struct Capabilities;
+class ContextSupport;
}
namespace media {
@@ -39,25 +40,29 @@ class MEDIA_EXPORT PaintCanvasVideoRenderer {
PaintCanvasVideoRenderer();
~PaintCanvasVideoRenderer();
- // Paints |video_frame| on |canvas|, scaling and rotating the result to fit
- // dimensions specified by |dest_rect|.
+ // Paints |video_frame| translated and scaled to |dest_rect| on |canvas|.
+ //
// If the format of |video_frame| is PIXEL_FORMAT_NATIVE_TEXTURE, |context_3d|
- // must be provided.
+ // and |context_support| must be provided.
//
- // Black will be painted on |canvas| if |video_frame| is null.
+ // If |video_frame| is nullptr or an unsupported format, |dest_rect| will be
+ // painted black.
void Paint(const scoped_refptr<VideoFrame>& video_frame,
cc::PaintCanvas* canvas,
const gfx::RectF& dest_rect,
cc::PaintFlags& flags,
VideoRotation video_rotation,
- const Context3D& context_3d);
+ const Context3D& context_3d,
+ gpu::ContextSupport* context_support);
- // Copy |video_frame| on |canvas|.
+ // Paints |video_frame| scaled to its visible size on |canvas|.
+ //
// If the format of |video_frame| is PIXEL_FORMAT_NATIVE_TEXTURE, |context_3d|
- // must be provided.
+ // and |context_support| must be provided.
void Copy(const scoped_refptr<VideoFrame>& video_frame,
cc::PaintCanvas* canvas,
- const Context3D& context_3d);
+ const Context3D& context_3d,
+ gpu::ContextSupport* context_support);
// Convert the contents of |video_frame| to raw RGB pixels. |rgb_pixels|
// should point into a buffer large enough to hold as many 32 bit RGBA pixels
@@ -82,15 +87,12 @@ class MEDIA_EXPORT PaintCanvasVideoRenderer {
bool premultiply_alpha,
bool flip_y);
- // Copy the contents of texture of |video_frame| to texture |texture| in
- // context |destination_gl|.
- // |level|, |internal_format|, |type| specify target texture |texture|.
+ // Copy the contents of |video_frame| to |texture| of |destination_gl|.
+ //
// The format of |video_frame| must be VideoFrame::NATIVE_TEXTURE.
- // |context_3d| has a GrContext that may be used during the copy.
- // CorrectLastImageDimensions() ensures that the source texture will be
- // cropped to |visible_rect|. Returns true on success.
bool CopyVideoFrameTexturesToGLTexture(
const Context3D& context_3d,
+ gpu::ContextSupport* context_support,
gpu::gles2::GLES2Interface* destination_gl,
const scoped_refptr<VideoFrame>& video_frame,
unsigned int target,
@@ -168,8 +170,6 @@ class MEDIA_EXPORT PaintCanvasVideoRenderer {
// never be painted again, so we can release the resource.
void ResetCache();
- void CorrectLastImageDimensions(const SkIRect& visible_rect);
-
// Used for unit test.
SkISize LastImageDimensionsForTesting();
@@ -179,6 +179,8 @@ class MEDIA_EXPORT PaintCanvasVideoRenderer {
bool UpdateLastImage(const scoped_refptr<VideoFrame>& video_frame,
const Context3D& context_3d);
+ void CorrectLastImageDimensions(const SkIRect& visible_rect);
+
// Last image used to draw to the canvas.
cc::PaintImage last_image_;
diff --git a/chromium/media/renderers/paint_canvas_video_renderer_unittest.cc b/chromium/media/renderers/paint_canvas_video_renderer_unittest.cc
index 3bca9930233..bd3213055f2 100644
--- a/chromium/media/renderers/paint_canvas_video_renderer_unittest.cc
+++ b/chromium/media/renderers/paint_canvas_video_renderer_unittest.cc
@@ -225,7 +225,7 @@ void PaintCanvasVideoRendererTest::PaintWithoutFrame(cc::PaintCanvas* canvas) {
cc::PaintFlags flags;
flags.setFilterQuality(kLow_SkFilterQuality);
renderer_.Paint(nullptr, canvas, kNaturalRect, flags, VIDEO_ROTATION_0,
- Context3D());
+ Context3D(), nullptr);
}
void PaintCanvasVideoRendererTest::Paint(
@@ -260,13 +260,13 @@ void PaintCanvasVideoRendererTest::PaintRotated(
flags.setBlendMode(mode);
flags.setFilterQuality(kLow_SkFilterQuality);
renderer_.Paint(video_frame, canvas, dest_rect, flags, video_rotation,
- Context3D());
+ Context3D(), nullptr);
}
void PaintCanvasVideoRendererTest::Copy(
const scoped_refptr<VideoFrame>& video_frame,
cc::PaintCanvas* canvas) {
- renderer_.Copy(video_frame, canvas, Context3D());
+ renderer_.Copy(video_frame, canvas, Context3D(), nullptr);
}
TEST_F(PaintCanvasVideoRendererTest, NoFrame) {
@@ -564,7 +564,7 @@ TEST_F(PaintCanvasVideoRendererTest, Y16) {
flags.setFilterQuality(kNone_SkFilterQuality);
renderer_.Paint(video_frame, &canvas,
gfx::RectF(bitmap.width(), bitmap.height()), flags,
- VIDEO_ROTATION_0, Context3D());
+ VIDEO_ROTATION_0, Context3D(), nullptr);
for (int j = 0; j < bitmap.height(); j++) {
for (int i = 0; i < bitmap.width(); i++) {
const int value = i + j * bitmap.width();
@@ -657,43 +657,32 @@ TEST_F(PaintCanvasVideoRendererTest, ContextLost) {
cc::PaintFlags flags;
flags.setFilterQuality(kLow_SkFilterQuality);
renderer_.Paint(video_frame, &canvas, kNaturalRect, flags, VIDEO_ROTATION_90,
- context_3d);
+ context_3d, nullptr);
}
void EmptyCallback(const gpu::SyncToken& sync_token) {}
TEST_F(PaintCanvasVideoRendererTest, CorrectFrameSizeToVisibleRect) {
- int fWidth{16}, fHeight{16};
+ constexpr int fWidth{16}, fHeight{16};
SkImageInfo imInfo =
SkImageInfo::MakeN32(fWidth, fHeight, kOpaque_SkAlphaType);
- sk_sp<const GrGLInterface> glInterface(GrGLCreateNullInterface());
- sk_sp<GrContext> grContext = GrContext::MakeGL(std::move(glInterface));
-
- sk_sp<SkSurface> surface =
- SkSurface::MakeRenderTarget(grContext.get(), SkBudgeted::kYes, imInfo);
- cc::SkiaPaintCanvas canvas(surface->getCanvas());
+ cc::SkiaPaintCanvas canvas(AllocBitmap(kWidth, kHeight));
- TestGLES2Interface gles2;
- Context3D context_3d(&gles2, grContext.get());
gfx::Size coded_size(fWidth, fHeight);
gfx::Size visible_size(fWidth / 2, fHeight / 2);
- gpu::MailboxHolder mailbox_holders[VideoFrame::kMaxPlanes];
- for (size_t i = 0; i < VideoFrame::kMaxPlanes; i++) {
- mailbox_holders[i] = gpu::MailboxHolder(
- gpu::Mailbox::Generate(), gpu::SyncToken(), GL_TEXTURE_RECTANGLE_ARB);
- }
+ uint8_t memory[fWidth * fHeight * 2] = {0};
- auto video_frame = VideoFrame::WrapNativeTextures(
- PIXEL_FORMAT_I420, mailbox_holders, base::Bind(EmptyCallback), coded_size,
- gfx::Rect(visible_size), visible_size,
+ auto video_frame = media::VideoFrame::WrapExternalData(
+ media::PIXEL_FORMAT_Y16, coded_size, gfx::Rect(visible_size),
+ visible_size, &memory[0], fWidth * fHeight * 2,
base::TimeDelta::FromMilliseconds(4));
gfx::RectF visible_rect(visible_size.width(), visible_size.height());
cc::PaintFlags flags;
renderer_.Paint(video_frame, &canvas, visible_rect, flags, VIDEO_ROTATION_0,
- context_3d);
+ Context3D(), nullptr);
EXPECT_EQ(fWidth / 2, renderer_.LastImageDimensionsForTesting().width());
EXPECT_EQ(fWidth / 2, renderer_.LastImageDimensionsForTesting().height());
diff --git a/chromium/media/renderers/renderer_impl.cc b/chromium/media/renderers/renderer_impl.cc
index fe652014ee2..63e3f74e73c 100644
--- a/chromium/media/renderers/renderer_impl.cc
+++ b/chromium/media/renderers/renderer_impl.cc
@@ -15,6 +15,7 @@
#include "base/location.h"
#include "base/single_thread_task_runner.h"
#include "base/strings/string_number_conversions.h"
+#include "base/trace_event/trace_event.h"
#include "media/base/audio_decoder_config.h"
#include "media/base/audio_renderer.h"
#include "media/base/bind_to_current_loop.h"
@@ -133,11 +134,10 @@ RendererImpl::~RendererImpl() {
video_renderer_.reset();
audio_renderer_.reset();
- if (!init_cb_.is_null()) {
+ if (init_cb_)
FinishInitialization(PIPELINE_ERROR_ABORT);
- } else if (!flush_cb_.is_null()) {
- base::ResetAndReturn(&flush_cb_).Run();
- }
+ else if (flush_cb_)
+ FinishFlush();
}
void RendererImpl::Initialize(MediaResource* media_resource,
@@ -146,8 +146,9 @@ void RendererImpl::Initialize(MediaResource* media_resource,
DVLOG(1) << __func__;
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK_EQ(state_, STATE_UNINITIALIZED);
- DCHECK(!init_cb.is_null());
+ DCHECK(init_cb);
DCHECK(client);
+ TRACE_EVENT_ASYNC_BEGIN0("media", "RendererImpl::Initialize", this);
client_ = client;
media_resource_ = media_resource;
@@ -168,6 +169,7 @@ void RendererImpl::SetCdm(CdmContext* cdm_context,
DVLOG(1) << __func__;
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK(cdm_context);
+ TRACE_EVENT0("media", "RendererImpl::SetCdm");
if (cdm_context_) {
DVLOG(1) << "Switching CDM not supported.";
@@ -181,7 +183,7 @@ void RendererImpl::SetCdm(CdmContext* cdm_context,
if (state_ != STATE_INIT_PENDING_CDM)
return;
- DCHECK(!init_cb_.is_null());
+ DCHECK(init_cb_);
state_ = STATE_INITIALIZING;
InitializeAudioRenderer();
}
@@ -189,11 +191,13 @@ void RendererImpl::SetCdm(CdmContext* cdm_context,
void RendererImpl::Flush(const base::Closure& flush_cb) {
DVLOG(1) << __func__;
DCHECK(task_runner_->BelongsToCurrentThread());
- DCHECK(flush_cb_.is_null());
+ DCHECK(!flush_cb_);
DCHECK(!(pending_audio_track_change_ || pending_video_track_change_));
+ TRACE_EVENT_ASYNC_BEGIN0("media", "RendererImpl::Flush", this);
if (state_ == STATE_FLUSHED) {
- task_runner_->PostTask(FROM_HERE, flush_cb);
+ flush_cb_ = BindToCurrentLoop(flush_cb);
+ FinishFlush();
return;
}
@@ -213,6 +217,8 @@ void RendererImpl::Flush(const base::Closure& flush_cb) {
void RendererImpl::StartPlayingFrom(base::TimeDelta time) {
DVLOG(1) << __func__;
DCHECK(task_runner_->BelongsToCurrentThread());
+ TRACE_EVENT1("media", "RendererImpl::StartPlayingFrom", "time_us",
+ time.InMicroseconds());
if (state_ != STATE_FLUSHED) {
DCHECK_EQ(state_, STATE_ERROR);
@@ -235,6 +241,7 @@ void RendererImpl::StartPlayingFrom(base::TimeDelta time) {
void RendererImpl::SetPlaybackRate(double playback_rate) {
DVLOG(1) << __func__ << "(" << playback_rate << ")";
DCHECK(task_runner_->BelongsToCurrentThread());
+ TRACE_EVENT1("media", "RendererImpl::SetPlaybackRate", "rate", playback_rate);
// Playback rate changes are only carried out while playing.
if (state_ != STATE_PLAYING && state_ != STATE_FLUSHED)
@@ -334,15 +341,23 @@ bool RendererImpl::HasEncryptedStream() {
}
void RendererImpl::FinishInitialization(PipelineStatus status) {
- DCHECK(!init_cb_.is_null());
- base::ResetAndReturn(&init_cb_).Run(status);
+ DCHECK(init_cb_);
+ TRACE_EVENT_ASYNC_END1("media", "RendererImpl::Initialize", this, "status",
+ MediaLog::PipelineStatusToString(status));
+ std::move(init_cb_).Run(status);
+}
+
+void RendererImpl::FinishFlush() {
+ DCHECK(flush_cb_);
+ TRACE_EVENT_ASYNC_END0("media", "RendererImpl::Flush", this);
+ std::move(flush_cb_).Run();
}
void RendererImpl::InitializeAudioRenderer() {
DVLOG(1) << __func__;
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK_EQ(state_, STATE_INITIALIZING);
- DCHECK(!init_cb_.is_null());
+ DCHECK(init_cb_);
PipelineStatusCB done_cb =
base::Bind(&RendererImpl::OnAudioRendererInitializeDone, weak_this_);
@@ -354,7 +369,7 @@ void RendererImpl::InitializeAudioRenderer() {
if (!audio_stream) {
audio_renderer_.reset();
- task_runner_->PostTask(FROM_HERE, base::Bind(done_cb, PIPELINE_OK));
+ task_runner_->PostTask(FROM_HERE, base::BindOnce(done_cb, PIPELINE_OK));
return;
}
@@ -375,7 +390,7 @@ void RendererImpl::OnAudioRendererInitializeDone(PipelineStatus status) {
// OnError() may be fired at any time by the renderers, even if they thought
// they initialized successfully (due to delayed output device setup).
if (state_ != STATE_INITIALIZING) {
- DCHECK(init_cb_.is_null());
+ DCHECK(!init_cb_);
audio_renderer_.reset();
return;
}
@@ -385,7 +400,7 @@ void RendererImpl::OnAudioRendererInitializeDone(PipelineStatus status) {
return;
}
- DCHECK(!init_cb_.is_null());
+ DCHECK(init_cb_);
InitializeVideoRenderer();
}
@@ -393,7 +408,7 @@ void RendererImpl::InitializeVideoRenderer() {
DVLOG(1) << __func__;
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK_EQ(state_, STATE_INITIALIZING);
- DCHECK(!init_cb_.is_null());
+ DCHECK(init_cb_);
PipelineStatusCB done_cb =
base::Bind(&RendererImpl::OnVideoRendererInitializeDone, weak_this_);
@@ -405,7 +420,7 @@ void RendererImpl::InitializeVideoRenderer() {
if (!video_stream) {
video_renderer_.reset();
- task_runner_->PostTask(FROM_HERE, base::Bind(done_cb, PIPELINE_OK));
+ task_runner_->PostTask(FROM_HERE, base::BindOnce(done_cb, PIPELINE_OK));
return;
}
@@ -426,13 +441,13 @@ void RendererImpl::OnVideoRendererInitializeDone(PipelineStatus status) {
// OnError() may be fired at any time by the renderers, even if they thought
// they initialized successfully (due to delayed output device setup).
if (state_ != STATE_INITIALIZING) {
- DCHECK(init_cb_.is_null());
+ DCHECK(!init_cb_);
audio_renderer_.reset();
video_renderer_.reset();
return;
}
- DCHECK(!init_cb_.is_null());
+ DCHECK(init_cb_);
if (status != PIPELINE_OK) {
FinishInitialization(status);
@@ -457,7 +472,7 @@ void RendererImpl::FlushInternal() {
DVLOG(1) << __func__;
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK_EQ(state_, STATE_FLUSHING);
- DCHECK(!flush_cb_.is_null());
+ DCHECK(flush_cb_);
if (time_ticking_)
PausePlayback();
@@ -470,7 +485,7 @@ void RendererImpl::FlushAudioRenderer() {
DVLOG(1) << __func__;
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK_EQ(state_, STATE_FLUSHING);
- DCHECK(!flush_cb_.is_null());
+ DCHECK(flush_cb_);
if (!audio_renderer_ || !audio_playing_) {
OnAudioRendererFlushDone();
@@ -485,12 +500,12 @@ void RendererImpl::OnAudioRendererFlushDone() {
DCHECK(task_runner_->BelongsToCurrentThread());
if (state_ == STATE_ERROR) {
- DCHECK(flush_cb_.is_null());
+ DCHECK(!flush_cb_);
return;
}
DCHECK_EQ(state_, STATE_FLUSHING);
- DCHECK(!flush_cb_.is_null());
+ DCHECK(flush_cb_);
// If we had a deferred video renderer underflow prior to the flush, it should
// have been cleared by the audio renderer changing to BUFFERING_HAVE_NOTHING.
@@ -506,7 +521,7 @@ void RendererImpl::FlushVideoRenderer() {
DVLOG(1) << __func__;
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK_EQ(state_, STATE_FLUSHING);
- DCHECK(!flush_cb_.is_null());
+ DCHECK(flush_cb_);
if (!video_renderer_ || !video_playing_) {
OnVideoRendererFlushDone();
@@ -521,18 +536,18 @@ void RendererImpl::OnVideoRendererFlushDone() {
DCHECK(task_runner_->BelongsToCurrentThread());
if (state_ == STATE_ERROR) {
- DCHECK(flush_cb_.is_null());
+ DCHECK(!flush_cb_);
return;
}
DCHECK_EQ(state_, STATE_FLUSHING);
- DCHECK(!flush_cb_.is_null());
+ DCHECK(flush_cb_);
DCHECK_EQ(video_buffering_state_, BUFFERING_HAVE_NOTHING);
video_ended_ = false;
video_playing_ = false;
state_ = STATE_FLUSHED;
- base::ResetAndReturn(&flush_cb_).Run();
+ FinishFlush();
}
void RendererImpl::ReinitializeAudioRenderer(
@@ -665,10 +680,14 @@ void RendererImpl::OnBufferingStateChange(DemuxerStream::Type type,
? &audio_buffering_state_
: &video_buffering_state_;
- DVLOG(1) << __func__ << (type == DemuxerStream::AUDIO ? " audio " : " video ")
+ const auto* type_string = DemuxerStream::GetTypeName(type);
+ DVLOG(1) << __func__ << " " << type_string << " "
<< MediaLog::BufferingStateToString(*buffering_state) << " -> "
<< MediaLog::BufferingStateToString(new_buffering_state);
DCHECK(task_runner_->BelongsToCurrentThread());
+ TRACE_EVENT2("media", "RendererImpl::OnBufferingStateChange", "type",
+ type_string, "state",
+ MediaLog::BufferingStateToString(new_buffering_state));
bool was_waiting_for_enough_data = WaitingForEnoughData();
@@ -757,6 +776,8 @@ bool RendererImpl::WaitingForEnoughData() const {
void RendererImpl::PausePlayback() {
DVLOG(1) << __func__;
DCHECK(task_runner_->BelongsToCurrentThread());
+ TRACE_EVENT0("media", "RendererImpl::PausePlayback");
+
switch (state_) {
case STATE_PLAYING:
DCHECK(PlaybackHasEnded() || WaitingForEnoughData() ||
@@ -795,6 +816,7 @@ void RendererImpl::StartPlayback() {
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK_EQ(state_, STATE_PLAYING);
DCHECK(!WaitingForEnoughData());
+ TRACE_EVENT0("media", "RendererImpl::StartPlayback");
if (!time_ticking_) {
time_ticking_ = true;
@@ -808,9 +830,11 @@ void RendererImpl::StartPlayback() {
}
void RendererImpl::OnRendererEnded(DemuxerStream::Type type) {
- DVLOG(1) << __func__ << (type == DemuxerStream::AUDIO ? " audio" : " video");
+ const auto* type_string = DemuxerStream::GetTypeName(type);
+ DVLOG(1) << __func__ << ": " << type_string;
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK((type == DemuxerStream::AUDIO) || (type == DemuxerStream::VIDEO));
+ TRACE_EVENT1("media", "RendererImpl::OnRendererEnded", "type", type_string);
if (state_ != STATE_PLAYING)
return;
@@ -861,6 +885,8 @@ void RendererImpl::OnError(PipelineStatus error) {
DVLOG(1) << __func__ << "(" << error << ")";
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK_NE(PIPELINE_OK, error) << "PIPELINE_OK isn't an error!";
+ TRACE_EVENT1("media", "RendererImpl::OnError", "error",
+ MediaLog::PipelineStatusToString(error));
// An error has already been delivered.
if (state_ == STATE_ERROR)
@@ -869,7 +895,7 @@ void RendererImpl::OnError(PipelineStatus error) {
const State old_state = state_;
state_ = STATE_ERROR;
- if (!init_cb_.is_null()) {
+ if (init_cb_) {
DCHECK(old_state == STATE_INITIALIZING ||
old_state == STATE_INIT_PENDING_CDM);
FinishInitialization(error);
@@ -879,8 +905,8 @@ void RendererImpl::OnError(PipelineStatus error) {
// After OnError() returns, the pipeline may destroy |this|.
client_->OnError(error);
- if (!flush_cb_.is_null())
- base::ResetAndReturn(&flush_cb_).Run();
+ if (flush_cb_)
+ FinishFlush();
}
void RendererImpl::OnWaitingForDecryptionKey() {
@@ -923,6 +949,7 @@ void RendererImpl::OnSelectedVideoTracksChanged(
const std::vector<DemuxerStream*>& enabled_tracks,
base::OnceClosure change_completed_cb) {
DCHECK(task_runner_->BelongsToCurrentThread());
+ TRACE_EVENT0("media", "RendererImpl::OnSelectedVideoTracksChanged");
DCHECK_LT(enabled_tracks.size(), 2u);
DemuxerStream* stream = enabled_tracks.empty() ? nullptr : enabled_tracks[0];
@@ -955,6 +982,7 @@ void RendererImpl::OnEnabledAudioTracksChanged(
const std::vector<DemuxerStream*>& enabled_tracks,
base::OnceClosure change_completed_cb) {
DCHECK(task_runner_->BelongsToCurrentThread());
+ TRACE_EVENT0("media", "RendererImpl::OnEnabledAudioTracksChanged");
DCHECK_LT(enabled_tracks.size(), 2u);
DemuxerStream* stream = enabled_tracks.empty() ? nullptr : enabled_tracks[0];
diff --git a/chromium/media/renderers/renderer_impl.h b/chromium/media/renderers/renderer_impl.h
index e33e72686c3..18f63848ccd 100644
--- a/chromium/media/renderers/renderer_impl.h
+++ b/chromium/media/renderers/renderer_impl.h
@@ -98,6 +98,7 @@ class MEDIA_EXPORT RendererImpl : public Renderer {
bool HasEncryptedStream();
void FinishInitialization(PipelineStatus status);
+ void FinishFlush();
// Helper functions and callbacks for Initialize().
void InitializeAudioRenderer();
diff --git a/chromium/media/renderers/video_renderer_impl.cc b/chromium/media/renderers/video_renderer_impl.cc
index 997a94916b4..bb59b11cd2a 100644
--- a/chromium/media/renderers/video_renderer_impl.cc
+++ b/chromium/media/renderers/video_renderer_impl.cc
@@ -138,11 +138,11 @@ VideoRendererImpl::VideoRendererImpl(
VideoRendererImpl::~VideoRendererImpl() {
DCHECK(task_runner_->BelongsToCurrentThread());
- if (!init_cb_.is_null())
- base::ResetAndReturn(&init_cb_).Run(PIPELINE_ERROR_ABORT);
+ if (init_cb_)
+ FinishInitialization(PIPELINE_ERROR_ABORT);
- if (!flush_cb_.is_null())
- base::ResetAndReturn(&flush_cb_).Run();
+ if (flush_cb_)
+ FinishFlush();
if (sink_started_)
StopSink();
@@ -164,25 +164,26 @@ void VideoRendererImpl::Flush(const base::Closure& callback) {
if (buffering_state_ != BUFFERING_HAVE_NOTHING) {
buffering_state_ = BUFFERING_HAVE_NOTHING;
task_runner_->PostTask(
- FROM_HERE, base::Bind(&VideoRendererImpl::OnBufferingStateChange,
- weak_factory_.GetWeakPtr(), buffering_state_));
+ FROM_HERE,
+ base::BindOnce(&VideoRendererImpl::OnBufferingStateChange,
+ weak_factory_.GetWeakPtr(), buffering_state_));
}
received_end_of_stream_ = false;
rendered_end_of_stream_ = false;
- // Reset |video_frame_stream_| and drop any pending read callbacks from it.
+ // Reset |video_decoder_stream_| and drop any pending read callbacks from it.
pending_read_ = false;
if (gpu_memory_buffer_pool_)
gpu_memory_buffer_pool_->Abort();
frame_callback_weak_factory_.InvalidateWeakPtrs();
- video_frame_stream_->Reset(
- base::BindOnce(&VideoRendererImpl::OnVideoFrameStreamResetDone,
+ video_decoder_stream_->Reset(
+ base::BindOnce(&VideoRendererImpl::OnVideoDecoderStreamResetDone,
weak_factory_.GetWeakPtr()));
// To avoid unnecessary work by VDAs, only delete queued frames after
- // resetting |video_frame_stream_|. If this is done in the opposite order VDAs
- // will get a bunch of ReusePictureBuffer() calls before the Reset(), which
- // they may use to output more frames that won't be used.
+ // resetting |video_decoder_stream_|. If this is done in the opposite order
+ // VDAs will get a bunch of ReusePictureBuffer() calls before the Reset(),
+ // which they may use to output more frames that won't be used.
algorithm_->Reset();
painted_first_frame_ = false;
@@ -202,7 +203,7 @@ void VideoRendererImpl::StartPlayingFrom(base::TimeDelta timestamp) {
start_timestamp_ = timestamp;
painted_first_frame_ = false;
last_render_time_ = last_frame_ready_time_ = base::TimeTicks();
- video_frame_stream_->SkipPrepareUntil(start_timestamp_);
+ video_decoder_stream_->SkipPrepareUntil(start_timestamp_);
AttemptRead_Locked();
}
@@ -213,24 +214,26 @@ void VideoRendererImpl::Initialize(
const TimeSource::WallClockTimeCB& wall_clock_time_cb,
const PipelineStatusCB& init_cb) {
DCHECK(task_runner_->BelongsToCurrentThread());
+ TRACE_EVENT_ASYNC_BEGIN0("media", "VideoRendererImpl::Initialize", this);
+
base::AutoLock auto_lock(lock_);
DCHECK(stream);
DCHECK_EQ(stream->type(), DemuxerStream::VIDEO);
- DCHECK(!init_cb.is_null());
- DCHECK(!wall_clock_time_cb.is_null());
+ DCHECK(init_cb);
+ DCHECK(wall_clock_time_cb);
DCHECK(kUninitialized == state_ || kFlushed == state_);
DCHECK(!was_background_rendering_);
DCHECK(!time_progressing_);
- video_frame_stream_.reset(new VideoFrameStream(
- std::make_unique<VideoFrameStream::StreamTraits>(media_log_),
+ video_decoder_stream_.reset(new VideoDecoderStream(
+ std::make_unique<VideoDecoderStream::StreamTraits>(media_log_),
task_runner_, create_video_decoders_cb_, media_log_));
- video_frame_stream_->set_config_change_observer(base::Bind(
+ video_decoder_stream_->set_config_change_observer(base::BindRepeating(
&VideoRendererImpl::OnConfigChange, weak_factory_.GetWeakPtr()));
if (gpu_memory_buffer_pool_) {
- video_frame_stream_->SetPrepareCB(base::BindRepeating(
+ video_decoder_stream_->SetPrepareCB(base::BindRepeating(
&GpuMemoryBufferVideoFramePool::MaybeCreateHardwareFrame,
- // Safe since VideoFrameStream won't issue calls after destruction.
+ // Safe since VideoDecoderStream won't issue calls after destruction.
base::Unretained(gpu_memory_buffer_pool_.get())));
}
@@ -251,9 +254,9 @@ void VideoRendererImpl::Initialize(
current_decoder_config_ = stream->video_decoder_config();
DCHECK(current_decoder_config_.IsValidConfig());
- video_frame_stream_->Initialize(
+ video_decoder_stream_->Initialize(
stream,
- base::BindOnce(&VideoRendererImpl::OnVideoFrameStreamInitialized,
+ base::BindOnce(&VideoRendererImpl::OnVideoDecoderStreamInitialized,
weak_factory_.GetWeakPtr()),
cdm_context,
base::BindRepeating(&VideoRendererImpl::OnStatisticsUpdate,
@@ -291,8 +294,8 @@ scoped_refptr<VideoFrame> VideoRendererImpl::Render(
// held already and it fire the state changes in the wrong order.
DVLOG(3) << __func__ << " posted TransitionToHaveNothing.";
task_runner_->PostTask(
- FROM_HERE, base::Bind(&VideoRendererImpl::TransitionToHaveNothing,
- weak_factory_.GetWeakPtr()));
+ FROM_HERE, base::BindOnce(&VideoRendererImpl::TransitionToHaveNothing,
+ weak_factory_.GetWeakPtr()));
}
// We don't count dropped frames in the background to avoid skewing the count
@@ -309,9 +312,9 @@ scoped_refptr<VideoFrame> VideoRendererImpl::Render(
// the time it runs (may be delayed up to 50ms for complex decodes!) we might.
task_runner_->PostTask(
FROM_HERE,
- base::Bind(&VideoRendererImpl::AttemptReadAndCheckForMetadataChanges,
- weak_factory_.GetWeakPtr(), result->format(),
- result->natural_size()));
+ base::BindOnce(&VideoRendererImpl::AttemptReadAndCheckForMetadataChanges,
+ weak_factory_.GetWeakPtr(), result->format(),
+ result->natural_size()));
return result;
}
@@ -321,14 +324,14 @@ void VideoRendererImpl::OnFrameDropped() {
algorithm_->OnLastFrameDropped();
}
-void VideoRendererImpl::OnVideoFrameStreamInitialized(bool success) {
+void VideoRendererImpl::OnVideoDecoderStreamInitialized(bool success) {
DCHECK(task_runner_->BelongsToCurrentThread());
base::AutoLock auto_lock(lock_);
DCHECK_EQ(state_, kInitializing);
if (!success) {
state_ = kUninitialized;
- base::ResetAndReturn(&init_cb_).Run(DECODER_ERROR_NOT_SUPPORTED);
+ FinishInitialization(DECODER_ERROR_NOT_SUPPORTED);
return;
}
@@ -340,7 +343,20 @@ void VideoRendererImpl::OnVideoFrameStreamInitialized(bool success) {
if (!drop_frames_)
algorithm_->disable_frame_dropping();
- base::ResetAndReturn(&init_cb_).Run(PIPELINE_OK);
+ FinishInitialization(PIPELINE_OK);
+}
+
+void VideoRendererImpl::FinishInitialization(PipelineStatus status) {
+ DCHECK(init_cb_);
+ TRACE_EVENT_ASYNC_END1("media", "VideoRendererImpl::Initialize", this,
+ "status", MediaLog::PipelineStatusToString(status));
+ std::move(init_cb_).Run(status);
+}
+
+void VideoRendererImpl::FinishFlush() {
+ DCHECK(flush_cb_);
+ TRACE_EVENT_ASYNC_END0("media", "VideoRendererImpl::Flush", this);
+ std::move(flush_cb_).Run();
}
void VideoRendererImpl::OnPlaybackError(PipelineStatus error) {
@@ -350,6 +366,12 @@ void VideoRendererImpl::OnPlaybackError(PipelineStatus error) {
void VideoRendererImpl::OnPlaybackEnded() {
DCHECK(task_runner_->BelongsToCurrentThread());
+ {
+ // Send one last stats update so things like memory usage are correct.
+ base::AutoLock auto_lock(lock_);
+ UpdateStats_Locked(true);
+ }
+
client_->OnEnded();
}
@@ -445,7 +467,7 @@ void VideoRendererImpl::OnTimeStopped() {
}
}
-void VideoRendererImpl::FrameReady(VideoFrameStream::Status status,
+void VideoRendererImpl::FrameReady(VideoDecoderStream::Status status,
const scoped_refptr<VideoFrame>& frame) {
DCHECK(task_runner_->BelongsToCurrentThread());
base::AutoLock auto_lock(lock_);
@@ -453,18 +475,18 @@ void VideoRendererImpl::FrameReady(VideoFrameStream::Status status,
CHECK(pending_read_);
pending_read_ = false;
- if (status == VideoFrameStream::DECODE_ERROR) {
+ if (status == VideoDecoderStream::DECODE_ERROR) {
DCHECK(!frame);
task_runner_->PostTask(
FROM_HERE,
- base::Bind(&VideoRendererImpl::OnPlaybackError,
- weak_factory_.GetWeakPtr(), PIPELINE_ERROR_DECODE));
+ base::BindOnce(&VideoRendererImpl::OnPlaybackError,
+ weak_factory_.GetWeakPtr(), PIPELINE_ERROR_DECODE));
return;
}
// Can happen when demuxers are preparing for a new Seek().
if (!frame) {
- DCHECK_EQ(status, VideoFrameStream::DEMUXER_READ_ABORTED);
+ DCHECK_EQ(status, VideoDecoderStream::DEMUXER_READ_ABORTED);
return;
}
@@ -477,7 +499,7 @@ void VideoRendererImpl::FrameReady(VideoFrameStream::Status status,
frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM);
const bool is_before_start_time =
!is_eos && IsBeforeStartTime(frame->timestamp());
- const bool cant_read = !video_frame_stream_->CanReadWithoutStalling();
+ const bool cant_read = !video_decoder_stream_->CanReadWithoutStalling();
if (is_eos) {
DCHECK(!received_end_of_stream_);
@@ -502,7 +524,7 @@ void VideoRendererImpl::FrameReady(VideoFrameStream::Status status,
// we may resume too soon after a track change in the low delay case.
if (!frame->metadata()->HasKey(VideoFrameMetadata::FRAME_DURATION)) {
frame->metadata()->SetTimeDelta(VideoFrameMetadata::FRAME_DURATION,
- video_frame_stream_->AverageDuration());
+ video_decoder_stream_->AverageDuration());
}
AddReadyFrame_Locked(frame);
@@ -559,7 +581,7 @@ bool VideoRendererImpl::HaveEnoughData_Locked() const {
if (was_background_rendering_ && last_frame_ready_time_ >= last_render_time_)
return true;
- if (!low_delay_ && video_frame_stream_->CanReadWithoutStalling())
+ if (!low_delay_ && video_decoder_stream_->CanReadWithoutStalling())
return false;
// Note: We still require an effective frame in the stalling case since this
@@ -576,8 +598,8 @@ void VideoRendererImpl::TransitionToHaveEnough_Locked() {
buffering_state_ = BUFFERING_HAVE_ENOUGH;
task_runner_->PostTask(
- FROM_HERE, base::Bind(&VideoRendererImpl::OnBufferingStateChange,
- weak_factory_.GetWeakPtr(), buffering_state_));
+ FROM_HERE, base::BindOnce(&VideoRendererImpl::OnBufferingStateChange,
+ weak_factory_.GetWeakPtr(), buffering_state_));
}
void VideoRendererImpl::TransitionToHaveNothing() {
@@ -598,8 +620,8 @@ void VideoRendererImpl::TransitionToHaveNothing_Locked() {
buffering_state_ = BUFFERING_HAVE_NOTHING;
task_runner_->PostTask(
- FROM_HERE, base::Bind(&VideoRendererImpl::OnBufferingStateChange,
- weak_factory_.GetWeakPtr(), buffering_state_));
+ FROM_HERE, base::BindOnce(&VideoRendererImpl::OnBufferingStateChange,
+ weak_factory_.GetWeakPtr(), buffering_state_));
}
void VideoRendererImpl::AddReadyFrame_Locked(
@@ -633,7 +655,7 @@ void VideoRendererImpl::AttemptRead_Locked() {
switch (state_) {
case kPlaying:
pending_read_ = true;
- video_frame_stream_->Read(
+ video_decoder_stream_->Read(
base::BindOnce(&VideoRendererImpl::FrameReady,
frame_callback_weak_factory_.GetWeakPtr()));
return;
@@ -645,7 +667,7 @@ void VideoRendererImpl::AttemptRead_Locked() {
}
}
-void VideoRendererImpl::OnVideoFrameStreamResetDone() {
+void VideoRendererImpl::OnVideoDecoderStreamResetDone() {
// We don't need to acquire the |lock_| here, because we can only get here
// when Flush is in progress, so rendering and video sink must be stopped.
DCHECK(task_runner_->BelongsToCurrentThread());
@@ -656,17 +678,19 @@ void VideoRendererImpl::OnVideoFrameStreamResetDone() {
DCHECK_EQ(buffering_state_, BUFFERING_HAVE_NOTHING);
state_ = kFlushed;
- base::ResetAndReturn(&flush_cb_).Run();
+ FinishFlush();
}
-void VideoRendererImpl::UpdateStats_Locked() {
+void VideoRendererImpl::UpdateStats_Locked(bool force_update) {
DCHECK(task_runner_->BelongsToCurrentThread());
lock_.AssertAcquired();
// No need to check for `stats_.video_frames_decoded_power_efficient` because
// if it is greater than 0, `stats_.video_frames_decoded` will too.
- if (!stats_.video_frames_decoded && !stats_.video_frames_dropped)
+ if (!force_update && !stats_.video_frames_decoded &&
+ !stats_.video_frames_dropped) {
return;
+ }
if (stats_.video_frames_dropped) {
TRACE_EVENT_INSTANT2("media", "VideoFramesDropped",
@@ -733,8 +757,8 @@ void VideoRendererImpl::MaybeFireEndedCallback_Locked(bool time_progressing) {
algorithm_->average_frame_duration().is_zero())) {
rendered_end_of_stream_ = true;
task_runner_->PostTask(FROM_HERE,
- base::Bind(&VideoRendererImpl::OnPlaybackEnded,
- weak_factory_.GetWeakPtr()));
+ base::BindOnce(&VideoRendererImpl::OnPlaybackEnded,
+ weak_factory_.GetWeakPtr()));
}
}
@@ -754,7 +778,8 @@ base::TimeTicks VideoRendererImpl::GetCurrentMediaTimeAsWallClockTime() {
}
bool VideoRendererImpl::IsBeforeStartTime(base::TimeDelta timestamp) {
- return timestamp + video_frame_stream_->AverageDuration() < start_timestamp_;
+ return timestamp + video_decoder_stream_->AverageDuration() <
+ start_timestamp_;
}
void VideoRendererImpl::RemoveFramesForUnderflowOrBackgroundRendering() {
diff --git a/chromium/media/renderers/video_renderer_impl.h b/chromium/media/renderers/video_renderer_impl.h
index 49cc84d392e..0eb6391fecf 100644
--- a/chromium/media/renderers/video_renderer_impl.h
+++ b/chromium/media/renderers/video_renderer_impl.h
@@ -37,7 +37,7 @@ class TickClock;
namespace media {
-// VideoRendererImpl handles reading from a VideoFrameStream storing the
+// VideoRendererImpl handles reading from a VideoDecoderStream storing the
// results in a queue of decoded frames and executing a callback when a frame is
// ready for rendering.
class MEDIA_EXPORT VideoRendererImpl
@@ -86,8 +86,11 @@ class MEDIA_EXPORT VideoRendererImpl
void OnFrameDropped() override;
private:
- // Callback for |video_frame_stream_| initialization.
- void OnVideoFrameStreamInitialized(bool success);
+ // Callback for |video_decoder_stream_| initialization.
+ void OnVideoDecoderStreamInitialized(bool success);
+
+ void FinishInitialization(PipelineStatus status);
+ void FinishFlush();
// Functions to notify certain events to the RendererClient.
void OnPlaybackError(PipelineStatus error);
@@ -96,25 +99,25 @@ class MEDIA_EXPORT VideoRendererImpl
void OnBufferingStateChange(BufferingState state);
void OnWaitingForDecryptionKey();
- // Called by the VideoFrameStream when a config change occurs. Will notify
+ // Called by the VideoDecoderStream when a config change occurs. Will notify
// RenderClient of the new config.
void OnConfigChange(const VideoDecoderConfig& config);
- // Callback for |video_frame_stream_| to deliver decoded video frames and
+ // Callback for |video_decoder_stream_| to deliver decoded video frames and
// report video decoding status.
- void FrameReady(VideoFrameStream::Status status,
+ void FrameReady(VideoDecoderStream::Status status,
const scoped_refptr<VideoFrame>& frame);
// Helper method for enqueueing a frame to |alogorithm_|.
void AddReadyFrame_Locked(const scoped_refptr<VideoFrame>& frame);
// Helper method that schedules an asynchronous read from the
- // |video_frame_stream_| as long as there isn't a pending read and we have
+ // |video_decoder_stream_| as long as there isn't a pending read and we have
// capacity.
void AttemptRead_Locked();
- // Called when VideoFrameStream::Reset() completes.
- void OnVideoFrameStreamResetDone();
+ // Called when VideoDecoderStream::Reset() completes.
+ void OnVideoDecoderStreamResetDone();
// Returns true if the renderer has enough data for playback purposes.
// Note that having enough data may be due to reaching end of stream.
@@ -124,8 +127,9 @@ class MEDIA_EXPORT VideoRendererImpl
void TransitionToHaveNothing_Locked();
// Runs |statistics_cb_| with |frames_decoded_| and |frames_dropped_|, resets
- // them to 0.
- void UpdateStats_Locked();
+ // them to 0. If |force_update| is true, sends an update even if no frames
+ // have been decoded since the last update.
+ void UpdateStats_Locked(bool force_update = false);
// Returns true if there is no more room for additional buffered frames.
bool HaveReachedBufferingCap() const;
@@ -202,11 +206,11 @@ class MEDIA_EXPORT VideoRendererImpl
// Pool of GpuMemoryBuffers and resources used to create hardware frames.
// Ensure this is destructed after |algorithm_| for optimal memory release
// when a frames are still held by the compositor. Must be destructed after
- // |video_frame_stream_| since it holds a callback to the pool.
+ // |video_decoder_stream_| since it holds a callback to the pool.
std::unique_ptr<GpuMemoryBufferVideoFramePool> gpu_memory_buffer_pool_;
// Provides video frames to VideoRendererImpl.
- std::unique_ptr<VideoFrameStream> video_frame_stream_;
+ std::unique_ptr<VideoDecoderStream> video_decoder_stream_;
MediaLog* media_log_;
@@ -244,7 +248,7 @@ class MEDIA_EXPORT VideoRendererImpl
// CreateVideoDecodersCB.
CreateVideoDecodersCB create_video_decoders_cb_;
- // Keep track of the outstanding read on the VideoFrameStream. Flushing can
+ // Keep track of the outstanding read on the VideoDecoderStream. Flushing can
// only complete once the read has completed.
bool pending_read_;
diff --git a/chromium/media/renderers/video_renderer_impl_unittest.cc b/chromium/media/renderers/video_renderer_impl_unittest.cc
index ce74ddd075f..7b6c4ce88de 100644
--- a/chromium/media/renderers/video_renderer_impl_unittest.cc
+++ b/chromium/media/renderers/video_renderer_impl_unittest.cc
@@ -209,9 +209,7 @@ class VideoRendererImplTest : public testing::Test {
decode_results_.push_back(std::make_pair(status, frame));
}
- bool IsReadPending() {
- return !decode_cb_.is_null();
- }
+ bool IsReadPending() { return !!decode_cb_; }
void WaitForError(PipelineStatus expected) {
SCOPED_TRACE(base::StringPrintf("WaitForError(%d)", expected));
@@ -232,21 +230,21 @@ class VideoRendererImplTest : public testing::Test {
void WaitForPendingDecode() {
SCOPED_TRACE("WaitForPendingDecode()");
- if (!decode_cb_.is_null())
+ if (decode_cb_)
return;
- DCHECK(wait_for_pending_decode_cb_.is_null());
+ DCHECK(!wait_for_pending_decode_cb_);
WaitableMessageLoopEvent event;
wait_for_pending_decode_cb_ = event.GetClosure();
event.RunAndWait();
- DCHECK(!decode_cb_.is_null());
- DCHECK(wait_for_pending_decode_cb_.is_null());
+ DCHECK(decode_cb_);
+ DCHECK(!wait_for_pending_decode_cb_);
}
void SatisfyPendingDecode() {
- CHECK(!decode_cb_.is_null());
+ CHECK(decode_cb_);
CHECK(!decode_results_.empty());
// Post tasks for OutputCB and DecodeCB.
@@ -255,13 +253,13 @@ class VideoRendererImplTest : public testing::Test {
message_loop_.task_runner()->PostTask(FROM_HERE,
base::Bind(output_cb_, frame));
message_loop_.task_runner()->PostTask(
- FROM_HERE, base::Bind(base::ResetAndReturn(&decode_cb_),
- decode_results_.front().first));
+ FROM_HERE,
+ base::Bind(std::move(decode_cb_), decode_results_.front().first));
decode_results_.pop_front();
}
void SatisfyPendingDecodeWithEndOfStream() {
- DCHECK(!decode_cb_.is_null());
+ DCHECK(decode_cb_);
// Return EOS buffer to trigger EOS frame.
EXPECT_CALL(demuxer_stream_, Read(_))
@@ -270,14 +268,12 @@ class VideoRendererImplTest : public testing::Test {
// Satify pending |decode_cb_| to trigger a new DemuxerStream::Read().
message_loop_.task_runner()->PostTask(
- FROM_HERE,
- base::Bind(base::ResetAndReturn(&decode_cb_), DecodeStatus::OK));
+ FROM_HERE, base::BindOnce(std::move(decode_cb_), DecodeStatus::OK));
WaitForPendingDecode();
message_loop_.task_runner()->PostTask(
- FROM_HERE,
- base::Bind(base::ResetAndReturn(&decode_cb_), DecodeStatus::OK));
+ FROM_HERE, base::BindOnce(std::move(decode_cb_), DecodeStatus::OK));
}
void AdvanceWallclockTimeInMs(int time_ms) {
@@ -472,12 +468,12 @@ class VideoRendererImplTest : public testing::Test {
void DecodeRequested(scoped_refptr<DecoderBuffer> buffer,
const VideoDecoder::DecodeCB& decode_cb) {
DCHECK_EQ(&message_loop_, base::MessageLoopCurrent::Get());
- CHECK(decode_cb_.is_null());
+ CHECK(!decode_cb_);
decode_cb_ = decode_cb;
// Wake up WaitForPendingDecode() if needed.
- if (!wait_for_pending_decode_cb_.is_null())
- base::ResetAndReturn(&wait_for_pending_decode_cb_).Run();
+ if (wait_for_pending_decode_cb_)
+ std::move(wait_for_pending_decode_cb_).Run();
if (decode_results_.empty())
return;
@@ -491,7 +487,7 @@ class VideoRendererImplTest : public testing::Test {
void FlushRequested(const base::Closure& callback) {
DCHECK_EQ(&message_loop_, base::MessageLoopCurrent::Get());
decode_results_.clear();
- if (!decode_cb_.is_null()) {
+ if (decode_cb_) {
QueueFrames("abort");
SatisfyPendingDecode();
}
@@ -878,6 +874,9 @@ TEST_F(VideoRendererImplTest, RenderingStartedThenStopped) {
// calls must all have occurred before playback starts.
EXPECT_EQ(0u, last_pipeline_statistics.video_frames_dropped);
EXPECT_EQ(1u, last_pipeline_statistics.video_frames_decoded);
+
+ // Note: This is not the total, but just the increase in the last call since
+ // the previous call, the total should be 4 * 115200.
EXPECT_EQ(115200, last_pipeline_statistics.video_memory_usage);
// Consider the case that rendering is faster than we setup the test event.
@@ -898,10 +897,21 @@ TEST_F(VideoRendererImplTest, RenderingStartedThenStopped) {
AdvanceTimeInMs(91);
EXPECT_CALL(mock_cb_, FrameReceived(HasTimestampMatcher(90)));
WaitForPendingDecode();
+
+ EXPECT_CALL(mock_cb_, OnStatisticsUpdate(_))
+ .WillOnce(SaveArg<0>(&last_pipeline_statistics));
SatisfyPendingDecodeWithEndOfStream();
AdvanceTimeInMs(30);
WaitForEnded();
+
+ EXPECT_EQ(0u, last_pipeline_statistics.video_frames_dropped);
+ EXPECT_EQ(0u, last_pipeline_statistics.video_frames_decoded);
+
+ // Memory usage is relative, so the prior lines increased memory usage to
+ // 4 * 115200, so this last one should show we only have 1 frame left.
+ EXPECT_EQ(-3 * 115200, last_pipeline_statistics.video_memory_usage);
+
Destroy();
}
diff --git a/chromium/media/renderers/video_resource_updater.cc b/chromium/media/renderers/video_resource_updater.cc
index 952c9a7bb9a..5b4abce5d3a 100644
--- a/chromium/media/renderers/video_resource_updater.cc
+++ b/chromium/media/renderers/video_resource_updater.cc
@@ -866,7 +866,7 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes(
// This is software path, so canvas and video_frame are always backed
// by software.
- video_renderer_->Copy(video_frame, &canvas, Context3D());
+ video_renderer_->Copy(video_frame, &canvas, Context3D(), nullptr);
} else {
HardwarePlaneResource* hardware_resource = plane_resource->AsHardware();
size_t bytes_per_row = viz::ResourceSizes::CheckedWidthInBytes<size_t>(