summaryrefslogtreecommitdiff
path: root/chromium/media/base
diff options
context:
space:
mode:
authorAllan Sandfeld Jensen <allan.jensen@theqtcompany.com>2015-08-14 11:38:45 +0200
committerAllan Sandfeld Jensen <allan.jensen@theqtcompany.com>2015-08-14 17:16:47 +0000
commit3a97ca8dd9b96b599ae2d33e40df0dd2f7ea5859 (patch)
tree43cc572ba067417c7341db81f71ae7cc6e0fcc3e /chromium/media/base
parentf61ab1ac7f855cd281809255c0aedbb1895e1823 (diff)
downloadqtwebengine-chromium-3a97ca8dd9b96b599ae2d33e40df0dd2f7ea5859.tar.gz
BASELINE: Update chromium to 45.0.2454.40
Change-Id: Id2121d9f11a8fc633677236c65a3e41feef589e4 Reviewed-by: Andras Becsi <andras.becsi@theqtcompany.com>
Diffstat (limited to 'chromium/media/base')
-rw-r--r--chromium/media/base/BUILD.gn35
-rw-r--r--chromium/media/base/android/BUILD.gn13
-rw-r--r--chromium/media/base/android/access_unit_queue.cc199
-rw-r--r--chromium/media/base/android/access_unit_queue.h106
-rw-r--r--chromium/media/base/android/access_unit_queue_unittest.cc363
-rw-r--r--chromium/media/base/android/audio_decoder_job.cc2
-rw-r--r--chromium/media/base/android/demuxer_stream_player_params.cc80
-rw-r--r--chromium/media/base/android/demuxer_stream_player_params.h7
-rw-r--r--chromium/media/base/android/media_codec_audio_decoder.cc216
-rw-r--r--chromium/media/base/android/media_codec_audio_decoder.h91
-rw-r--r--chromium/media/base/android/media_codec_bridge.cc8
-rw-r--r--chromium/media/base/android/media_codec_decoder.cc625
-rw-r--r--chromium/media/base/android/media_codec_decoder.h295
-rw-r--r--chromium/media/base/android/media_codec_decoder_unittest.cc651
-rw-r--r--chromium/media/base/android/media_codec_player.cc616
-rw-r--r--chromium/media/base/android/media_codec_player.h210
-rw-r--r--chromium/media/base/android/media_codec_player_unittest.cc424
-rw-r--r--chromium/media/base/android/media_codec_video_decoder.cc270
-rw-r--r--chromium/media/base/android/media_codec_video_decoder.h112
-rw-r--r--chromium/media/base/android/media_drm_bridge.cc24
-rw-r--r--chromium/media/base/android/media_player_android.cc4
-rw-r--r--chromium/media/base/android/media_player_android.h23
-rw-r--r--chromium/media/base/android/media_player_bridge.cc8
-rw-r--r--chromium/media/base/android/media_player_listener.cc6
-rw-r--r--chromium/media/base/android/media_player_manager.h7
-rw-r--r--chromium/media/base/android/media_source_player.cc5
-rw-r--r--chromium/media/base/android/media_source_player_unittest.cc58
-rw-r--r--chromium/media/base/android/test_data_factory.cc119
-rw-r--r--chromium/media/base/android/test_data_factory.h73
-rw-r--r--chromium/media/base/android/video_decoder_job.cc2
-rw-r--r--chromium/media/base/audio_buffer.cc161
-rw-r--r--chromium/media/base/audio_buffer.h5
-rw-r--r--chromium/media/base/audio_buffer_unittest.cc42
-rw-r--r--chromium/media/base/audio_converter.cc6
-rw-r--r--chromium/media/base/audio_converter.h3
-rw-r--r--chromium/media/base/audio_decoder.h14
-rw-r--r--chromium/media/base/audio_renderer_mixer.cc10
-rw-r--r--chromium/media/base/audio_renderer_mixer.h5
-rw-r--r--chromium/media/base/audio_renderer_mixer_input.cc14
-rw-r--r--chromium/media/base/audio_renderer_mixer_input.h4
-rw-r--r--chromium/media/base/audio_renderer_sink.h26
-rw-r--r--chromium/media/base/audio_splicer.cc90
-rw-r--r--chromium/media/base/audio_splicer.h4
-rw-r--r--chromium/media/base/audio_splicer_unittest.cc2
-rw-r--r--chromium/media/base/audio_video_metadata_extractor.cc4
-rw-r--r--chromium/media/base/audio_video_metadata_extractor_unittest.cc3
-rw-r--r--chromium/media/base/bind_to_current_loop.h1
-rw-r--r--chromium/media/base/bit_reader_core.cc7
-rw-r--r--chromium/media/base/cdm_callback_promise.h2
-rw-r--r--chromium/media/base/cdm_config.h2
-rw-r--r--chromium/media/base/cdm_context.cc12
-rw-r--r--chromium/media/base/cdm_context.h36
-rw-r--r--chromium/media/base/cdm_initialized_promise.cc30
-rw-r--r--chromium/media/base/cdm_initialized_promise.h38
-rw-r--r--chromium/media/base/demuxer.h3
-rw-r--r--chromium/media/base/demuxer_perftest.cc2
-rw-r--r--chromium/media/base/fake_audio_renderer_sink.cc10
-rw-r--r--chromium/media/base/fake_audio_renderer_sink.h5
-rw-r--r--chromium/media/base/fake_demuxer_stream.cc4
-rw-r--r--chromium/media/base/key_systems.cc8
-rw-r--r--chromium/media/base/key_systems_unittest.cc37
-rw-r--r--chromium/media/base/mac/coremedia_glue.mm2
-rw-r--r--chromium/media/base/mac/video_frame_mac.cc2
-rw-r--r--chromium/media/base/mac/video_frame_mac_unittests.cc6
-rw-r--r--chromium/media/base/media.cc4
-rw-r--r--chromium/media/base/media_keys.h3
-rw-r--r--chromium/media/base/media_log.cc21
-rw-r--r--chromium/media/base/media_log.h39
-rw-r--r--chromium/media/base/media_log_event.h7
-rw-r--r--chromium/media/base/media_switches.cc9
-rw-r--r--chromium/media/base/media_switches.h3
-rw-r--r--chromium/media/base/mime_util.cc658
-rw-r--r--chromium/media/base/mime_util.h76
-rw-r--r--chromium/media/base/mime_util_unittest.cc142
-rw-r--r--chromium/media/base/mock_audio_renderer_sink.h8
-rw-r--r--chromium/media/base/mock_filters.cc4
-rw-r--r--chromium/media/base/mock_filters.h12
-rw-r--r--chromium/media/base/multi_channel_resampler.cc5
-rw-r--r--chromium/media/base/multi_channel_resampler.h3
-rw-r--r--chromium/media/base/null_video_sink.cc2
-rw-r--r--chromium/media/base/null_video_sink.h3
-rw-r--r--chromium/media/base/pipeline.cc5
-rw-r--r--chromium/media/base/pipeline_status.h2
-rw-r--r--chromium/media/base/pipeline_unittest.cc2
-rw-r--r--chromium/media/base/renderer.h3
-rw-r--r--chromium/media/base/test_helpers.cc13
-rw-r--r--chromium/media/base/test_helpers.h1
-rw-r--r--chromium/media/base/text_renderer_unittest.cc4
-rw-r--r--chromium/media/base/user_input_monitor.h2
-rw-r--r--chromium/media/base/user_input_monitor_linux.cc4
-rw-r--r--chromium/media/base/user_input_monitor_unittest.cc2
-rw-r--r--chromium/media/base/user_input_monitor_win.cc4
-rw-r--r--chromium/media/base/video_capture_types.cc112
-rw-r--r--chromium/media/base/video_capture_types.h41
-rw-r--r--chromium/media/base/video_decoder.h16
-rw-r--r--chromium/media/base/video_decoder_config.cc16
-rw-r--r--chromium/media/base/video_decoder_config.h1
-rw-r--r--chromium/media/base/video_frame.cc843
-rw-r--r--chromium/media/base/video_frame.h341
-rw-r--r--chromium/media/base/video_frame_metadata.cc49
-rw-r--r--chromium/media/base/video_frame_metadata.h52
-rw-r--r--chromium/media/base/video_frame_pool.cc6
-rw-r--r--chromium/media/base/video_frame_unittest.cc62
-rw-r--r--chromium/media/base/video_util.cc21
-rw-r--r--chromium/media/base/video_util_unittest.cc10
-rw-r--r--chromium/media/base/win/BUILD.gn18
-rw-r--r--chromium/media/base/win/mf_initializer.cc41
-rw-r--r--chromium/media/base/win/mf_initializer.h18
108 files changed, 6954 insertions, 986 deletions
diff --git a/chromium/media/base/BUILD.gn b/chromium/media/base/BUILD.gn
index d85a615cc2c..098b6e5563d 100644
--- a/chromium/media/base/BUILD.gn
+++ b/chromium/media/base/BUILD.gn
@@ -63,10 +63,13 @@ source_set("base") {
"byte_queue.h",
"cdm_callback_promise.cc",
"cdm_callback_promise.h",
+ "cdm_config.h",
"cdm_context.cc",
"cdm_context.h",
"cdm_factory.cc",
"cdm_factory.h",
+ "cdm_initialized_promise.cc",
+ "cdm_initialized_promise.h",
"cdm_key_information.cc",
"cdm_key_information.h",
"cdm_promise.cc",
@@ -119,10 +122,14 @@ source_set("base") {
"media_permission.h",
"media_switches.cc",
"media_switches.h",
+ "mime_util.cc",
+ "mime_util.h",
"moving_average.cc",
"moving_average.h",
"multi_channel_resampler.cc",
"multi_channel_resampler.h",
+ "null_video_sink.cc",
+ "null_video_sink.h",
"pipeline.cc",
"pipeline.h",
"pipeline_status.h",
@@ -263,11 +270,13 @@ source_set("base") {
}
if (current_cpu == "x86" || current_cpu == "x64") {
- sources += [ "simd/convert_yuv_to_rgb_x86.cc" ]
- deps += [
- ":media_yasm",
- ":media_sse2",
+ sources += [
+ "simd/convert_rgb_to_yuv_sse2.cc",
+ "simd/convert_rgb_to_yuv_ssse3.cc",
+ "simd/convert_yuv_to_rgb_x86.cc",
+ "simd/filter_yuv_sse2.cc",
]
+ deps += [ ":media_yasm" ]
}
configs += [ "//build/config/compiler:no_size_t_to_int_warning" ]
@@ -329,8 +338,6 @@ source_set("test_support") {
"mock_demuxer_host.h",
"mock_filters.cc",
"mock_filters.h",
- "null_video_sink.cc",
- "null_video_sink.h",
"test_data_util.cc",
"test_data_util.h",
"test_helpers.cc",
@@ -374,6 +381,7 @@ source_set("unittests") {
"fake_demuxer_stream_unittest.cc",
"gmock_callback_support_unittest.cc",
"key_systems_unittest.cc",
+ "mime_util_unittest.cc",
"moving_average_unittest.cc",
"multi_channel_resampler_unittest.cc",
"null_video_sink_unittest.cc",
@@ -454,21 +462,6 @@ source_set("perftests") {
}
if (current_cpu == "x86" || current_cpu == "x64") {
- source_set("media_sse2") {
- sources = [
- "simd/convert_rgb_to_yuv_sse2.cc",
- "simd/convert_rgb_to_yuv_ssse3.cc",
- "simd/filter_yuv_sse2.cc",
- ]
- configs += [
- "//media:media_config",
- "//media:media_implementation",
- ]
- if (!is_win) {
- cflags = [ "-msse2" ]
- }
- }
-
import("//third_party/yasm/yasm_assemble.gni")
yasm_assemble("media_yasm") {
sources = [
diff --git a/chromium/media/base/android/BUILD.gn b/chromium/media/base/android/BUILD.gn
index b9853b0bd94..7c87a7f50d6 100644
--- a/chromium/media/base/android/BUILD.gn
+++ b/chromium/media/base/android/BUILD.gn
@@ -12,6 +12,8 @@ assert(is_android)
source_set("android") {
sources = [
+ "access_unit_queue.cc",
+ "access_unit_queue.h",
"audio_decoder_job.cc",
"audio_decoder_job.h",
"browser_cdm_factory_android.cc",
@@ -21,10 +23,16 @@ source_set("android") {
"demuxer_stream_player_params.h",
"media_client_android.cc",
"media_client_android.h",
+ "media_codec_audio_decoder.cc",
+ "media_codec_audio_decoder.h",
"media_codec_bridge.cc",
"media_codec_bridge.h",
+ "media_codec_decoder.cc",
+ "media_codec_decoder.h",
"media_codec_player.cc",
"media_codec_player.h",
+ "media_codec_video_decoder.cc",
+ "media_codec_video_decoder.h",
"media_decoder_job.cc",
"media_decoder_job.h",
"media_drm_bridge.cc",
@@ -67,9 +75,14 @@ source_set("android") {
source_set("unittests") {
testonly = true
sources = [
+ "access_unit_queue_unittest.cc",
"media_codec_bridge_unittest.cc",
+ "media_codec_decoder_unittest.cc",
+ "media_codec_player_unittest.cc",
"media_drm_bridge_unittest.cc",
"media_source_player_unittest.cc",
+ "test_data_factory.cc",
+ "test_data_factory.h",
]
deps = [
":android",
diff --git a/chromium/media/base/android/access_unit_queue.cc b/chromium/media/base/android/access_unit_queue.cc
new file mode 100644
index 00000000000..e1973c0fa6c
--- /dev/null
+++ b/chromium/media/base/android/access_unit_queue.cc
@@ -0,0 +1,199 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/android/access_unit_queue.h"
+
+#include "base/logging.h"
+#include "base/stl_util.h"
+#include "media/base/demuxer_stream.h"
+
+namespace media {
+
+namespace {
+// Amount of history chunks we keep by default. The zero size means we do not
+// keep chunks before the current one and the history is limited by the size
+// of one chunk.
+const int kDefaultHistoryChunksAmount = 0;
+}
+
+AccessUnitQueue::AccessUnitQueue()
+ : index_in_chunk_(0),
+ history_chunks_amount_(kDefaultHistoryChunksAmount),
+ has_eos_(false) {
+ current_chunk_ = chunks_.end();
+}
+
+AccessUnitQueue::~AccessUnitQueue() {
+ STLDeleteContainerPointers(chunks_.begin(), chunks_.end());
+}
+
+void AccessUnitQueue::PushBack(const DemuxerData& data) {
+ // Media thread
+ DCHECK(!data.access_units.empty());
+
+#if DCHECK_IS_ON()
+ // If there is an AU with |kConfigChanged| status, it must be the last
+ // AU in the chunk and the data should have exactly one corresponding
+ // DemuxerConfigs.
+ for (size_t i = 0; i < data.access_units.size(); ++i) {
+ const AccessUnit& unit = data.access_units[i];
+
+ // EOS must be the last unit in the chunk
+ if (unit.is_end_of_stream) {
+ DCHECK(i == data.access_units.size() - 1);
+ }
+
+ // kConfigChanged must be the last unit in the chunk.
+ if (unit.status == DemuxerStream::kConfigChanged) {
+ DCHECK(i == data.access_units.size() - 1);
+ DCHECK(data.demuxer_configs.size() == 1);
+ }
+
+ if (unit.status == DemuxerStream::kAborted) {
+ DVLOG(1) << "AccessUnitQueue::" << __FUNCTION__ << " kAborted";
+ }
+ }
+#endif
+
+ // Create the next chunk and copy data to it.
+ DemuxerData* chunk = new DemuxerData(data);
+
+ // EOS flag can only be in the last access unit.
+ bool has_eos = chunk->access_units.back().is_end_of_stream;
+
+ // Append this chunk to the queue.
+ base::AutoLock lock(lock_);
+
+ // Ignore the input after we have received EOS.
+ if (has_eos_) {
+ delete chunk;
+ return;
+ }
+
+ bool was_empty = (current_chunk_ == chunks_.end());
+
+ // The container |chunks_| will own the chunk.
+ chunks_.push_back(chunk);
+
+ // Position the current chunk.
+ if (was_empty) {
+ current_chunk_ = --chunks_.end();
+ index_in_chunk_ = 0;
+ }
+
+ // We expect that the chunk containing EOS is the last chunk.
+ DCHECK(!has_eos_);
+ has_eos_ = has_eos;
+}
+
+void AccessUnitQueue::Advance() {
+ // Decoder thread
+ base::AutoLock lock(lock_);
+
+ if (current_chunk_ == chunks_.end())
+ return;
+
+ ++index_in_chunk_;
+ if (index_in_chunk_ < (*current_chunk_)->access_units.size())
+ return;
+
+ index_in_chunk_ = 0;
+ ++current_chunk_;
+
+ // Keep only |history_chunks_amount_| before the current one.
+ // std::distance() and std::advance() do not work efficiently with std::list,
+ // but the history_size should be small (default is 0).
+ size_t num_consumed_chunks = std::distance(chunks_.begin(), current_chunk_);
+ if (num_consumed_chunks > history_chunks_amount_) {
+ DataChunkQueue::iterator first_to_keep = chunks_.begin();
+ std::advance(first_to_keep, num_consumed_chunks - history_chunks_amount_);
+ STLDeleteContainerPointers(chunks_.begin(), first_to_keep);
+ chunks_.erase(chunks_.begin(), first_to_keep);
+ }
+}
+
+void AccessUnitQueue::Flush() {
+ // Media thread
+ base::AutoLock lock(lock_);
+
+ STLDeleteContainerPointers(chunks_.begin(), chunks_.end());
+ chunks_.clear();
+
+ current_chunk_ = chunks_.end();
+ index_in_chunk_ = 0;
+ has_eos_ = false;
+}
+
+bool AccessUnitQueue::RewindToLastKeyFrame() {
+ // Media thread
+ base::AutoLock lock(lock_);
+
+ // Search for the key frame backwards. Start with the current AU.
+
+ // Start with current chunk.
+ if (current_chunk_ != chunks_.end()) {
+ for (int i = (int)index_in_chunk_; i >= 0; --i) {
+ if ((*current_chunk_)->access_units[i].is_key_frame) {
+ index_in_chunk_ = i;
+ return true;
+ }
+ }
+ }
+
+ // Position reverse iterator before the current chunk.
+ DataChunkQueue::reverse_iterator rchunk(current_chunk_);
+
+ for (; rchunk != chunks_.rend(); ++rchunk) {
+ int i = (int)(*rchunk)->access_units.size() - 1;
+ for (; i >= 0; --i) {
+ if ((*rchunk)->access_units[i].is_key_frame) {
+ index_in_chunk_ = i;
+ current_chunk_ = --rchunk.base();
+ return true;
+ }
+ }
+ }
+
+ return false;
+}
+
+AccessUnitQueue::Info AccessUnitQueue::GetInfo() const {
+ // Media thread, Decoder thread
+
+ Info info;
+ base::AutoLock lock(lock_);
+
+ info.length = GetUnconsumedAccessUnitLength();
+ info.has_eos = has_eos_;
+ info.front_unit = nullptr;
+ info.configs = nullptr;
+
+ if (info.length > 0) {
+ DCHECK(current_chunk_ != chunks_.end());
+ DCHECK(index_in_chunk_ < (*current_chunk_)->access_units.size());
+ info.front_unit = &(*current_chunk_)->access_units[index_in_chunk_];
+
+ if (info.front_unit->status == DemuxerStream::kConfigChanged) {
+ DCHECK((*current_chunk_)->demuxer_configs.size() == 1);
+ info.configs = &(*current_chunk_)->demuxer_configs[0];
+ }
+ }
+ return info;
+}
+
+void AccessUnitQueue::SetHistorySizeForTesting(size_t history_chunks_amount) {
+ history_chunks_amount_ = history_chunks_amount;
+}
+
+int AccessUnitQueue::GetUnconsumedAccessUnitLength() const {
+ int result = 0;
+ DataChunkQueue::const_iterator chunk;
+ for (chunk = current_chunk_; chunk != chunks_.end(); ++chunk)
+ result += (*chunk)->access_units.size();
+
+ result -= index_in_chunk_;
+ return result;
+}
+
+} // namespace media
diff --git a/chromium/media/base/android/access_unit_queue.h b/chromium/media/base/android/access_unit_queue.h
new file mode 100644
index 00000000000..9fae42000f4
--- /dev/null
+++ b/chromium/media/base/android/access_unit_queue.h
@@ -0,0 +1,106 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BASE_ANDROID_ACCESS_UNIT_QUEUE_H_
+#define MEDIA_BASE_ANDROID_ACCESS_UNIT_QUEUE_H_
+
+#include <list>
+
+#include "base/macros.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/synchronization/lock.h"
+#include "media/base/android/demuxer_stream_player_params.h"
+
+namespace media {
+
+// The queue of incoming data for MediaCodecDecoder.
+//
+// The data comes in the form of access units. Each access unit has a type.
+// If the type is |kConfigChanged| the access unit itself has no data, but
+// is accompanied with DemuxerConfigs.
+// The queue should be accessed on the Media thread that puts the incoming data
+// in and on the Decoder thread that gets the next access unit and eventually
+// removes it from the queue.
+class AccessUnitQueue {
+ public:
+ // Information about the queue state and the access unit at the front.
+ struct Info {
+ // The unit at front. Null if the queue is empty. This pointer may be
+ // invalidated by the next Advance() or Flush() call and must be used
+ // before the caller calls these methods. The |front_unit| is owned by
+ // the queue itself - never delete it through this pointer.
+ const AccessUnit* front_unit;
+
+ // Configs for the front unit if it is |kConfigChanged|, null otherwise.
+ // The same validity rule applies: this pointer is only valid till the next
+ // Advance() or Flush() call, and |configs| is owned by the queue itself.
+ const DemuxerConfigs* configs;
+
+ // Number of access units in the queue.
+ int length;
+
+ // Whether End Of Stream has been added to the queue. Cleared by Flush().
+ bool has_eos;
+
+ Info() : front_unit(nullptr), configs(nullptr), length(0), has_eos(false) {}
+ };
+
+ AccessUnitQueue();
+ ~AccessUnitQueue();
+
+ // Appends the incoming data to the queue.
+ void PushBack(const DemuxerData& frames);
+
+ // Advances the front position to next unit. Logically the preceding units
+ // do not exist, but they can be physically removed later.
+ void Advance();
+
+ // Clears the queue, resets the length to zero and clears EOS condition.
+ void Flush();
+
+ // Looks back for the first key frame starting from the current one (i.e.
+ // the look-back is inclusive of the current front position).
+ // If the key frame exists, sets the current access unit to it and returns
+ // true. Otherwise returns false.
+ bool RewindToLastKeyFrame();
+
+ // Returns the information about the queue.
+ // The result is invalidated by the following Advance() or Flush call.
+ // There must be only one |Info| consumer at a time.
+ Info GetInfo() const;
+
+ // For unit tests only. These methods are not thread safe.
+ size_t NumChunksForTesting() const { return chunks_.size(); }
+ void SetHistorySizeForTesting(size_t number_of_history_chunks);
+
+ private:
+ // Returns the amount of access units between the current one and the end,
+ // incuding current. Logically these are units that have not been consumed.
+ int GetUnconsumedAccessUnitLength() const;
+
+ // The queue of data chunks. It owns the chunks.
+ typedef std::list<DemuxerData*> DataChunkQueue;
+ DataChunkQueue chunks_;
+
+ // The chunk that contains the current access unit.
+ DataChunkQueue::iterator current_chunk_;
+
+ // Index of the current access unit within the current chunk.
+ size_t index_in_chunk_;
+
+ // Amount of chunks before the |current_chunk_| that's kept for history.
+ size_t history_chunks_amount_;
+
+ // Indicates that a unit with End Of Stream flag has been appended.
+ bool has_eos_;
+
+ // The lock protects all fields together.
+ mutable base::Lock lock_;
+
+ DISALLOW_COPY_AND_ASSIGN(AccessUnitQueue);
+};
+
+} // namespace media
+
+#endif // MEDIA_BASE_ANDROID_ACCESS_UNIT_QUEUE_H_
diff --git a/chromium/media/base/android/access_unit_queue_unittest.cc b/chromium/media/base/android/access_unit_queue_unittest.cc
new file mode 100644
index 00000000000..5dca0771a0a
--- /dev/null
+++ b/chromium/media/base/android/access_unit_queue_unittest.cc
@@ -0,0 +1,363 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/android/access_unit_queue.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+#define ARRAY_SIZE(x) (sizeof(x) / sizeof(x[0]))
+
+namespace media {
+
+class AccessUnitQueueTest : public testing::Test {
+ public:
+ AccessUnitQueueTest() {}
+ ~AccessUnitQueueTest() override {}
+
+ protected:
+ enum UnitType { kNormal = 0, kKeyFrame, kEOS, kConfig };
+ struct AUDescriptor {
+ UnitType unit_type;
+ std::string data;
+ };
+
+ DemuxerData CreateDemuxerData(const AUDescriptor* descr, int descr_length);
+};
+
+DemuxerData AccessUnitQueueTest::CreateDemuxerData(const AUDescriptor* descr,
+ int descr_length) {
+ DemuxerData result;
+ result.type = DemuxerStream::AUDIO; // assign a valid type
+
+ for (int i = 0; i < descr_length; ++i) {
+ result.access_units.push_back(AccessUnit());
+ AccessUnit& au = result.access_units.back();
+
+ if (descr[i].unit_type == kConfig) {
+ au.status = DemuxerStream::kConfigChanged;
+ result.demuxer_configs.push_back(DemuxerConfigs());
+ // ignore data
+ continue;
+ }
+
+ au.status = DemuxerStream::kOk;
+
+ if (descr[i].unit_type == kEOS) {
+ au.is_end_of_stream = true;
+ // ignore data
+ continue;
+ }
+
+ au.data = std::vector<uint8>(descr[i].data.begin(), descr[i].data.end());
+
+ if (descr[i].unit_type == kKeyFrame)
+ au.is_key_frame = true;
+ }
+ return result;
+}
+
+#define VERIFY_FIRST_BYTE(expected, info) \
+ do { \
+ EXPECT_NE(nullptr, info.front_unit); \
+ EXPECT_TRUE(info.front_unit->data.size() > 0); \
+ EXPECT_EQ(expected, info.front_unit->data[0]); \
+ } while (0)
+
+TEST_F(AccessUnitQueueTest, InitializedEmpty) {
+ AccessUnitQueue au_queue;
+ AccessUnitQueue::Info info = au_queue.GetInfo();
+
+ EXPECT_EQ(0, info.length);
+ EXPECT_FALSE(info.has_eos);
+ EXPECT_EQ(nullptr, info.front_unit);
+ EXPECT_EQ(nullptr, info.configs);
+}
+
+TEST_F(AccessUnitQueueTest, RewindToLastKeyFrameEmptyQueue) {
+ AccessUnitQueue au_queue;
+ EXPECT_FALSE(au_queue.RewindToLastKeyFrame());
+}
+
+TEST_F(AccessUnitQueueTest, PushAndAdvance) {
+ AUDescriptor chunk1[] = {{kNormal, "0"},
+ {kNormal, "1"},
+ {kNormal, "2"},
+ {kNormal, "3"},
+ {kNormal, "4"},
+ {kNormal, "5"}};
+ AUDescriptor chunk2[] = {{kNormal, "6"},
+ {kNormal, "7"},
+ {kNormal, "8"}};
+
+ int total_size = ARRAY_SIZE(chunk1) + ARRAY_SIZE(chunk2);
+
+ AccessUnitQueue au_queue;
+ au_queue.PushBack(CreateDemuxerData(chunk1, ARRAY_SIZE(chunk1)));
+ au_queue.PushBack(CreateDemuxerData(chunk2, ARRAY_SIZE(chunk2)));
+
+ AccessUnitQueue::Info info;
+ for (int i = 0; i < total_size; ++i) {
+ info = au_queue.GetInfo();
+
+ EXPECT_FALSE(info.has_eos);
+ EXPECT_EQ(total_size - i, info.length);
+ EXPECT_EQ(nullptr, info.configs);
+
+ ASSERT_NE(nullptr, info.front_unit);
+ EXPECT_TRUE(info.front_unit->data.size() > 0);
+ EXPECT_EQ('0' + i, info.front_unit->data[0]);
+
+ au_queue.Advance();
+ }
+
+ // After we advanced past the last AU, GetInfo() should report starvation.
+ info = au_queue.GetInfo();
+
+ EXPECT_EQ(0, info.length);
+ EXPECT_FALSE(info.has_eos);
+ EXPECT_EQ(nullptr, info.front_unit);
+ EXPECT_EQ(nullptr, info.configs);
+}
+
+TEST_F(AccessUnitQueueTest, ChunksDoNotLeak) {
+ AUDescriptor chunk[] = {
+ {kNormal, "0"}, {kNormal, "1"}, {kNormal, "2"}, {kNormal, "3"}};
+
+ AccessUnitQueue au_queue;
+
+ // Verify that the old chunks get deleted (we rely on NumChunksForTesting()).
+ // First, run the loop with default history size, which is zero chunks.
+ for (size_t i = 0; i < 100; ++i) {
+ au_queue.PushBack(CreateDemuxerData(chunk, ARRAY_SIZE(chunk)));
+ for (size_t j = 0; j < ARRAY_SIZE(chunk); ++j)
+ au_queue.Advance();
+
+ EXPECT_EQ(0U, au_queue.NumChunksForTesting());
+ }
+
+ // Change the history size and run again.
+ au_queue.SetHistorySizeForTesting(5);
+
+ for (size_t i = 0; i < 100; ++i) {
+ au_queue.PushBack(CreateDemuxerData(chunk, ARRAY_SIZE(chunk)));
+ for (size_t j = 0; j < ARRAY_SIZE(chunk); ++j)
+ au_queue.Advance();
+
+ if (i < 4)
+ EXPECT_EQ(i + 1, au_queue.NumChunksForTesting());
+ else
+ EXPECT_EQ(5U, au_queue.NumChunksForTesting());
+ }
+}
+
+TEST_F(AccessUnitQueueTest, PushAfterStarvation) {
+ // Two chunks
+ AUDescriptor chunk[][4] = {
+ {{kNormal, "0"}, {kNormal, "1"}, {kNormal, "2"}, {kNormal, "3"}},
+ {{kNormal, "4"}, {kNormal, "5"}, {kNormal, "6"}, {kNormal, "7"}}};
+
+ AccessUnitQueue au_queue;
+
+ // Push the first chunk.
+ au_queue.PushBack(CreateDemuxerData(chunk[0], ARRAY_SIZE(chunk[0])));
+
+ // Advance past the end of queue.
+ for (size_t i = 0; i < ARRAY_SIZE(chunk[0]); ++i)
+ au_queue.Advance();
+
+ // An extra Advance() should not change anything.
+ au_queue.Advance();
+
+ // Push the second chunk
+ au_queue.PushBack(CreateDemuxerData(chunk[1], ARRAY_SIZE(chunk[1])));
+
+ // Verify that we get the next access unit.
+ AccessUnitQueue::Info info = au_queue.GetInfo();
+ VERIFY_FIRST_BYTE('4', info);
+}
+
+TEST_F(AccessUnitQueueTest, HasEOS) {
+ // Two chunks
+ AUDescriptor chunk[][4] = {
+ {{kNormal, "0"}, {kNormal, "1"}, {kNormal, "2"}, {kNormal, "3"}},
+ {{kNormal, "4"}, {kNormal, "5"}, {kNormal, "6"}, {kEOS, "7"}}};
+
+ AccessUnitQueue au_queue;
+ au_queue.PushBack(CreateDemuxerData(chunk[0], ARRAY_SIZE(chunk[0])));
+ au_queue.PushBack(CreateDemuxerData(chunk[1], ARRAY_SIZE(chunk[1])));
+
+ // Verify that after EOS has been pushed into the queue,
+ // it is reported for every GetInfo()
+ for (int i = 0; i < 8; ++i) {
+ AccessUnitQueue::Info info = au_queue.GetInfo();
+
+ EXPECT_TRUE(info.has_eos);
+ EXPECT_EQ(nullptr, info.configs);
+
+ if (i == 7)
+ EXPECT_TRUE(info.front_unit->is_end_of_stream);
+ else
+ VERIFY_FIRST_BYTE('0' + i, info);
+
+ au_queue.Advance();
+ }
+}
+
+TEST_F(AccessUnitQueueTest, HasConfigs) {
+ AUDescriptor chunk[] = {
+ {kNormal, "0"}, {kNormal, "1"}, {kNormal, "2"}, {kConfig, "3"}};
+
+ AccessUnitQueue au_queue;
+ au_queue.PushBack(CreateDemuxerData(chunk, ARRAY_SIZE(chunk)));
+
+ for (int i = 0; i < 4; ++i) {
+ AccessUnitQueue::Info info = au_queue.GetInfo();
+
+ if (i != 3)
+ EXPECT_EQ(nullptr, info.configs);
+ else
+ EXPECT_NE(nullptr, info.configs);
+
+ au_queue.Advance();
+ }
+}
+
+TEST_F(AccessUnitQueueTest, ConfigsAndKeyFrame) {
+ // Two chunks
+ AUDescriptor chunk[][4] = {
+ {{kNormal, "0"}, {kKeyFrame, "1"}, {kNormal, "2"}, {kConfig, "3"}},
+ {{kKeyFrame, "4"}, {kNormal, "5"}, {kNormal, "6"}, {kNormal, "7"}}};
+
+ AccessUnitQueue::Info info;
+
+ AccessUnitQueue au_queue;
+ au_queue.PushBack(CreateDemuxerData(chunk[0], ARRAY_SIZE(chunk[0])));
+ au_queue.PushBack(CreateDemuxerData(chunk[1], ARRAY_SIZE(chunk[1])));
+
+ // There is no prior key frame
+ EXPECT_FALSE(au_queue.RewindToLastKeyFrame());
+
+ // Consume first access unit.
+ au_queue.Advance();
+
+ // Now the current one is the key frame. It would be safe to configure codec
+ // at this moment, so RewindToLastKeyFrame() should return true.
+ EXPECT_TRUE(au_queue.RewindToLastKeyFrame());
+
+ info = au_queue.GetInfo();
+ VERIFY_FIRST_BYTE('1', info);
+
+ au_queue.Advance(); // now current unit is "2"
+
+ info = au_queue.GetInfo();
+ VERIFY_FIRST_BYTE('2', info);
+
+ EXPECT_TRUE(au_queue.RewindToLastKeyFrame()); // should go back to "1"
+
+ info = au_queue.GetInfo();
+ VERIFY_FIRST_BYTE('1', info);
+
+ au_queue.Advance(); // now current unit is "2"
+ au_queue.Advance(); // now current unit is "3"
+
+ // Verify that we are at "3".
+ info = au_queue.GetInfo();
+ EXPECT_NE(nullptr, info.configs);
+
+ // Although it would be safe to configure codec (with old config) in this
+ // position since it will be immediately reconfigured from the next unit "3",
+ // current implementation returns unit "1".
+
+ EXPECT_TRUE(au_queue.RewindToLastKeyFrame()); // should go back to "1"
+
+ info = au_queue.GetInfo();
+ VERIFY_FIRST_BYTE('1', info);
+
+ au_queue.Advance(); // now current unit is "2"
+ au_queue.Advance(); // now current unit is "3"
+ au_queue.Advance(); // now current unit is "4"
+
+ info = au_queue.GetInfo();
+ VERIFY_FIRST_BYTE('4', info);
+
+ EXPECT_TRUE(au_queue.RewindToLastKeyFrame()); // should stay at "4"
+
+ info = au_queue.GetInfo();
+ VERIFY_FIRST_BYTE('4', info);
+
+ au_queue.Advance(); // now current unit is "5"
+ au_queue.Advance(); // now current unit is "6"
+
+ info = au_queue.GetInfo();
+ VERIFY_FIRST_BYTE('6', info);
+
+ EXPECT_TRUE(au_queue.RewindToLastKeyFrame()); // should go back to "4"
+
+ info = au_queue.GetInfo();
+ VERIFY_FIRST_BYTE('4', info);
+}
+
+TEST_F(AccessUnitQueueTest, KeyFrameWithLongHistory) {
+ // Four chunks
+ AUDescriptor chunk[][4] = {
+ {{kNormal, "0"}, {kKeyFrame, "1"}, {kNormal, "2"}, {kNormal, "3"}},
+ {{kNormal, "4"}, {kNormal, "5"}, {kNormal, "6"}, {kNormal, "7"}},
+ {{kNormal, "8"}, {kNormal, "9"}, {kNormal, "a"}, {kNormal, "b"}},
+ {{kNormal, "c"}, {kNormal, "d"}, {kKeyFrame, "e"}, {kNormal, "f"}}};
+
+ AccessUnitQueue::Info info;
+
+ AccessUnitQueue au_queue;
+ for (int i = 0; i < 4; ++i)
+ au_queue.PushBack(CreateDemuxerData(chunk[i], ARRAY_SIZE(chunk[i])));
+
+ au_queue.SetHistorySizeForTesting(3);
+
+ // Advance to '3'.
+ for (int i = 0; i < 3; ++i)
+ au_queue.Advance();
+
+ info = au_queue.GetInfo();
+ VERIFY_FIRST_BYTE('3', info);
+
+ // Rewind to key frame, the current unit should be '1'.
+ EXPECT_TRUE(au_queue.RewindToLastKeyFrame());
+ info = au_queue.GetInfo();
+ VERIFY_FIRST_BYTE('1', info);
+
+ // Advance to 'c'.
+ for (int i = 0; i < 11; ++i)
+ au_queue.Advance();
+
+ info = au_queue.GetInfo();
+ VERIFY_FIRST_BYTE('c', info);
+
+ // Rewind to key frame, the current unit should be '1' again.
+ EXPECT_TRUE(au_queue.RewindToLastKeyFrame());
+ info = au_queue.GetInfo();
+ VERIFY_FIRST_BYTE('1', info);
+
+ // Set history size to 0 (default)
+ au_queue.SetHistorySizeForTesting(0);
+
+ // Advance to 'd'. Should erase all chunks except the last.
+ for (int i = 0; i < 12; ++i)
+ au_queue.Advance();
+
+ info = au_queue.GetInfo();
+ VERIFY_FIRST_BYTE('d', info);
+
+ // Rewind should not find any key frames.
+ EXPECT_FALSE(au_queue.RewindToLastKeyFrame());
+
+ au_queue.Advance(); // Advance to key frame 'e'.
+ info = au_queue.GetInfo();
+ VERIFY_FIRST_BYTE('e', info);
+
+ // Rewind should find the same unit 'e.
+ EXPECT_TRUE(au_queue.RewindToLastKeyFrame());
+ info = au_queue.GetInfo();
+ VERIFY_FIRST_BYTE('e', info);
+}
+
+} // namespace media
diff --git a/chromium/media/base/android/audio_decoder_job.cc b/chromium/media/base/android/audio_decoder_job.cc
index 25de5a145d6..4e6036c952b 100644
--- a/chromium/media/base/android/audio_decoder_job.cc
+++ b/chromium/media/base/android/audio_decoder_job.cc
@@ -35,7 +35,7 @@ base::LazyInstance<AudioDecoderThread>::Leaky
AudioDecoderJob::AudioDecoderJob(
const base::Closure& request_data_cb,
const base::Closure& on_demuxer_config_changed_cb)
- : MediaDecoderJob(g_audio_decoder_thread.Pointer()->message_loop_proxy(),
+ : MediaDecoderJob(g_audio_decoder_thread.Pointer()->task_runner(),
request_data_cb,
on_demuxer_config_changed_cb),
audio_codec_(kUnknownAudioCodec),
diff --git a/chromium/media/base/android/demuxer_stream_player_params.cc b/chromium/media/base/android/demuxer_stream_player_params.cc
index 5c2a11fc284..ae656f8911c 100644
--- a/chromium/media/base/android/demuxer_stream_player_params.cc
+++ b/chromium/media/base/android/demuxer_stream_player_params.cc
@@ -26,4 +26,84 @@ DemuxerData::DemuxerData() : type(DemuxerStream::UNKNOWN) {}
DemuxerData::~DemuxerData() {}
+namespace {
+
+#undef RETURN_STRING
+#define RETURN_STRING(x) \
+ case x: \
+ return #x;
+
+const char* AsString(AudioCodec codec) {
+ switch (codec) {
+ RETURN_STRING(kUnknownAudioCodec);
+ RETURN_STRING(kCodecAAC);
+ RETURN_STRING(kCodecMP3);
+ RETURN_STRING(kCodecPCM);
+ RETURN_STRING(kCodecVorbis);
+ RETURN_STRING(kCodecFLAC);
+ RETURN_STRING(kCodecAMR_NB);
+ RETURN_STRING(kCodecAMR_WB);
+ RETURN_STRING(kCodecPCM_MULAW);
+ RETURN_STRING(kCodecGSM_MS);
+ RETURN_STRING(kCodecPCM_S16BE);
+ RETURN_STRING(kCodecPCM_S24BE);
+ RETURN_STRING(kCodecOpus);
+ RETURN_STRING(kCodecPCM_ALAW);
+ RETURN_STRING(kCodecALAC);
+ }
+ NOTREACHED();
+ return nullptr; // crash early
+}
+
+const char* AsString(VideoCodec codec) {
+ switch (codec) {
+ RETURN_STRING(kUnknownVideoCodec);
+ RETURN_STRING(kCodecH264);
+ RETURN_STRING(kCodecVC1);
+ RETURN_STRING(kCodecMPEG2);
+ RETURN_STRING(kCodecMPEG4);
+ RETURN_STRING(kCodecTheora);
+ RETURN_STRING(kCodecVP8);
+ RETURN_STRING(kCodecVP9);
+ }
+ NOTREACHED();
+ return nullptr; // crash early
+}
+
+#undef RETURN_STRING
+
+} // namespace (anonymous)
+
} // namespace media
+
+std::ostream& operator<<(std::ostream& os, const media::AccessUnit& au) {
+ os << "status:" << au.status << (au.is_end_of_stream ? " EOS" : "")
+ << (au.is_key_frame ? " KEY_FRAME" : "") << " pts:" << au.timestamp
+ << " size:" << au.data.size();
+ return os;
+}
+
+std::ostream& operator<<(std::ostream& os, const media::DemuxerConfigs& conf) {
+ os << "duration:" << conf.duration;
+
+ if (conf.audio_codec == media::kUnknownAudioCodec &&
+ conf.video_codec == media::kUnknownVideoCodec) {
+ os << " no audio, no video";
+ return os;
+ }
+
+ if (conf.audio_codec != media::kUnknownAudioCodec) {
+ os << " audio:" << media::AsString(conf.audio_codec)
+ << " channels:" << conf.audio_channels
+ << " rate:" << conf.audio_sampling_rate
+ << (conf.is_audio_encrypted ? " encrypted" : "");
+ }
+
+ if (conf.video_codec != media::kUnknownVideoCodec) {
+ os << " video:" << media::AsString(conf.video_codec) << " "
+ << conf.video_size.width() << "x" << conf.video_size.height()
+ << (conf.is_video_encrypted ? " encrypted" : "");
+ }
+
+ return os;
+}
diff --git a/chromium/media/base/android/demuxer_stream_player_params.h b/chromium/media/base/android/demuxer_stream_player_params.h
index cb8ae90e8e8..e5e96f37a69 100644
--- a/chromium/media/base/android/demuxer_stream_player_params.h
+++ b/chromium/media/base/android/demuxer_stream_player_params.h
@@ -67,4 +67,11 @@ struct MEDIA_EXPORT DemuxerData {
}; // namespace media
+// For logging
+MEDIA_EXPORT
+std::ostream& operator<<(std::ostream& os, const media::AccessUnit& au);
+
+MEDIA_EXPORT
+std::ostream& operator<<(std::ostream& os, const media::DemuxerConfigs& conf);
+
#endif // MEDIA_BASE_ANDROID_DEMUXER_STREAM_PLAYER_PARAMS_H_
diff --git a/chromium/media/base/android/media_codec_audio_decoder.cc b/chromium/media/base/android/media_codec_audio_decoder.cc
new file mode 100644
index 00000000000..6bef03de322
--- /dev/null
+++ b/chromium/media/base/android/media_codec_audio_decoder.cc
@@ -0,0 +1,216 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/android/media_codec_audio_decoder.h"
+
+#include "base/bind.h"
+#include "base/logging.h"
+#include "media/base/android/media_codec_bridge.h"
+#include "media/base/audio_timestamp_helper.h"
+#include "media/base/demuxer_stream.h"
+
+namespace {
+
+// Use 16bit PCM for audio output. Keep this value in sync with the output
+// format we passed to AudioTrack in MediaCodecBridge.
+const int kBytesPerAudioOutputSample = 2;
+}
+
+namespace media {
+
+MediaCodecAudioDecoder::MediaCodecAudioDecoder(
+ const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner,
+ const base::Closure& request_data_cb,
+ const base::Closure& starvation_cb,
+ const base::Closure& stop_done_cb,
+ const base::Closure& error_cb,
+ const SetTimeCallback& update_current_time_cb)
+ : MediaCodecDecoder(media_task_runner,
+ request_data_cb,
+ starvation_cb,
+ stop_done_cb,
+ error_cb,
+ "AudioDecoder"),
+ volume_(-1.0),
+ bytes_per_frame_(0),
+ output_sampling_rate_(0),
+ frame_count_(0),
+ update_current_time_cb_(update_current_time_cb) {
+}
+
+MediaCodecAudioDecoder::~MediaCodecAudioDecoder() {
+ DVLOG(1) << "AudioDecoder::~AudioDecoder()";
+ ReleaseDecoderResources();
+}
+
+const char* MediaCodecAudioDecoder::class_name() const {
+ return "AudioDecoder";
+}
+
+bool MediaCodecAudioDecoder::HasStream() const {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ return configs_.audio_codec != kUnknownAudioCodec;
+}
+
+void MediaCodecAudioDecoder::SetDemuxerConfigs(const DemuxerConfigs& configs) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ DVLOG(1) << class_name() << "::" << __FUNCTION__ << " " << configs;
+
+ configs_ = configs;
+ if (!media_codec_bridge_)
+ output_sampling_rate_ = configs.audio_sampling_rate;
+}
+
+void MediaCodecAudioDecoder::Flush() {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ MediaCodecDecoder::Flush();
+ frame_count_ = 0;
+}
+
+void MediaCodecAudioDecoder::SetVolume(double volume) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ DVLOG(1) << class_name() << "::" << __FUNCTION__ << " " << volume;
+
+ volume_ = volume;
+ SetVolumeInternal();
+}
+
+void MediaCodecAudioDecoder::SetBaseTimestamp(base::TimeDelta base_timestamp) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ DVLOG(1) << __FUNCTION__ << " " << base_timestamp;
+
+ base_timestamp_ = base_timestamp;
+ if (audio_timestamp_helper_)
+ audio_timestamp_helper_->SetBaseTimestamp(base_timestamp_);
+}
+
+bool MediaCodecAudioDecoder::IsCodecReconfigureNeeded(
+ const DemuxerConfigs& curr,
+ const DemuxerConfigs& next) const {
+ return curr.audio_codec != next.audio_codec ||
+ curr.audio_channels != next.audio_channels ||
+ curr.audio_sampling_rate != next.audio_sampling_rate ||
+ next.is_audio_encrypted != next.is_audio_encrypted ||
+ curr.audio_extra_data.size() != next.audio_extra_data.size() ||
+ !std::equal(curr.audio_extra_data.begin(), curr.audio_extra_data.end(),
+ next.audio_extra_data.begin());
+}
+
+MediaCodecDecoder::ConfigStatus MediaCodecAudioDecoder::ConfigureInternal() {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ DVLOG(1) << class_name() << "::" << __FUNCTION__;
+
+ media_codec_bridge_.reset(AudioCodecBridge::Create(configs_.audio_codec));
+ if (!media_codec_bridge_)
+ return CONFIG_FAILURE;
+
+ if (!(static_cast<AudioCodecBridge*>(media_codec_bridge_.get()))
+ ->Start(
+ configs_.audio_codec,
+ configs_.audio_sampling_rate,
+ configs_.audio_channels,
+ &configs_.audio_extra_data[0],
+ configs_.audio_extra_data.size(),
+ configs_.audio_codec_delay_ns,
+ configs_.audio_seek_preroll_ns,
+ true,
+ GetMediaCrypto().obj())) {
+ DVLOG(1) << class_name() << "::" << __FUNCTION__ << " failed";
+
+ media_codec_bridge_.reset();
+ return CONFIG_FAILURE;
+ }
+
+ DVLOG(1) << class_name() << "::" << __FUNCTION__ << " succeeded";
+
+ SetVolumeInternal();
+
+ bytes_per_frame_ = kBytesPerAudioOutputSample * configs_.audio_channels;
+ frame_count_ = 0;
+ ResetTimestampHelper();
+
+ return CONFIG_OK;
+}
+
+void MediaCodecAudioDecoder::OnOutputFormatChanged() {
+ DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
+
+ DCHECK(media_codec_bridge_);
+
+ int old_sampling_rate = output_sampling_rate_;
+ output_sampling_rate_ = media_codec_bridge_->GetOutputSamplingRate();
+ if (output_sampling_rate_ != old_sampling_rate)
+ ResetTimestampHelper();
+}
+
+void MediaCodecAudioDecoder::Render(int buffer_index,
+ size_t size,
+ bool render_output,
+ base::TimeDelta pts,
+ bool eos_encountered) {
+ DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
+
+ DVLOG(2) << class_name() << "::" << __FUNCTION__ << " pts:" << pts;
+
+ render_output = render_output && (size != 0u);
+
+ if (render_output) {
+ int64 head_position =
+ (static_cast<AudioCodecBridge*>(media_codec_bridge_.get()))
+ ->PlayOutputBuffer(buffer_index, size);
+
+ size_t new_frames_count = size / bytes_per_frame_;
+ frame_count_ += new_frames_count;
+ audio_timestamp_helper_->AddFrames(new_frames_count);
+ int64 frames_to_play = frame_count_ - head_position;
+ DCHECK_GE(frames_to_play, 0);
+
+ base::TimeDelta last_buffered = audio_timestamp_helper_->GetTimestamp();
+ base::TimeDelta now_playing =
+ last_buffered -
+ audio_timestamp_helper_->GetFrameDuration(frames_to_play);
+
+ DVLOG(2) << class_name() << "::" << __FUNCTION__ << " pts:" << pts
+ << " will play: [" << now_playing << "," << last_buffered << "]";
+
+ media_task_runner_->PostTask(
+ FROM_HERE,
+ base::Bind(update_current_time_cb_, now_playing, last_buffered));
+ }
+
+ media_codec_bridge_->ReleaseOutputBuffer(buffer_index, false);
+
+ CheckLastFrame(eos_encountered, false); // no delayed tasks
+}
+
+void MediaCodecAudioDecoder::SetVolumeInternal() {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ if (media_codec_bridge_) {
+ static_cast<AudioCodecBridge*>(media_codec_bridge_.get())
+ ->SetVolume(volume_);
+ }
+}
+
+void MediaCodecAudioDecoder::ResetTimestampHelper() {
+ // Media thread or Decoder thread
+ // When this method is called on Media thread, decoder thread
+ // should not be running.
+
+ if (audio_timestamp_helper_)
+ base_timestamp_ = audio_timestamp_helper_->GetTimestamp();
+
+ audio_timestamp_helper_.reset(
+ new AudioTimestampHelper(configs_.audio_sampling_rate));
+
+ audio_timestamp_helper_->SetBaseTimestamp(base_timestamp_);
+}
+
+} // namespace media
diff --git a/chromium/media/base/android/media_codec_audio_decoder.h b/chromium/media/base/android/media_codec_audio_decoder.h
new file mode 100644
index 00000000000..53bb664f601
--- /dev/null
+++ b/chromium/media/base/android/media_codec_audio_decoder.h
@@ -0,0 +1,91 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BASE_ANDROID_MEDIA_CODEC_AUDIO_DECODER_H_
+#define MEDIA_BASE_ANDROID_MEDIA_CODEC_AUDIO_DECODER_H_
+
+#include "media/base/android/media_codec_decoder.h"
+
+namespace media {
+
+class AudioTimestampHelper;
+
+// Audio decoder for MediaCodecPlayer
+class MediaCodecAudioDecoder : public MediaCodecDecoder {
+ public:
+ // For parameters see media_codec_decoder.h
+ // update_current_time_cb: callback that reports current playback time.
+ // Called for each rendered frame.
+ MediaCodecAudioDecoder(
+ const scoped_refptr<base::SingleThreadTaskRunner>& media_runner,
+ const base::Closure& request_data_cb,
+ const base::Closure& starvation_cb,
+ const base::Closure& stop_done_cb,
+ const base::Closure& error_cb,
+ const SetTimeCallback& update_current_time_cb);
+ ~MediaCodecAudioDecoder() override;
+
+ const char* class_name() const override;
+
+ bool HasStream() const override;
+ void SetDemuxerConfigs(const DemuxerConfigs& configs) override;
+ void Flush() override;
+
+ // Sets the volume of the audio output.
+ void SetVolume(double volume);
+
+ // Sets the base timestamp for |audio_timestamp_helper_|.
+ void SetBaseTimestamp(base::TimeDelta base_timestamp);
+
+ protected:
+ bool IsCodecReconfigureNeeded(const DemuxerConfigs& curr,
+ const DemuxerConfigs& next) const override;
+ ConfigStatus ConfigureInternal() override;
+ void OnOutputFormatChanged() override;
+ void Render(int buffer_index,
+ size_t size,
+ bool render_output,
+ base::TimeDelta pts,
+ bool eos_encountered) override;
+
+ private:
+ // A helper method to set the volume.
+ void SetVolumeInternal();
+
+ // Recreates |audio_timestamp_helper_|, called when sampling rate is changed.
+ void ResetTimestampHelper();
+
+ // Data.
+
+ // Configuration received from demuxer
+ DemuxerConfigs configs_;
+
+ // Requested volume
+ double volume_;
+
+ // Number of bytes per audio frame. Depends on the output format and the
+ // number of channels.
+ int bytes_per_frame_;
+
+ // The sampling rate received from decoder.
+ int output_sampling_rate_;
+
+ // Frame count to sync with audio codec output.
+ int64 frame_count_;
+
+ // Base timestamp for the |audio_timestamp_helper_|.
+ base::TimeDelta base_timestamp_;
+
+ // Object to calculate the current audio timestamp for A/V sync.
+ scoped_ptr<AudioTimestampHelper> audio_timestamp_helper_;
+
+ // Reports current playback time to the callee.
+ SetTimeCallback update_current_time_cb_;
+
+ DISALLOW_COPY_AND_ASSIGN(MediaCodecAudioDecoder);
+};
+
+} // namespace media
+
+#endif // MEDIA_BASE_ANDROID_MEDIA_CODEC_DECODER_H_
diff --git a/chromium/media/base/android/media_codec_bridge.cc b/chromium/media/base/android/media_codec_bridge.cc
index 14f33002ea8..3d22752159a 100644
--- a/chromium/media/base/android/media_codec_bridge.cc
+++ b/chromium/media/base/android/media_codec_bridge.cc
@@ -246,10 +246,10 @@ bool MediaCodecBridge::IsKnownUnaccelerated(const std::string& mime_type,
// devices while HW decoder video freezes and distortions are
// investigated - http://crbug.com/446974.
if (codec_name.length() > 0) {
- return (StartsWithASCII(codec_name, "OMX.google.", true) ||
- StartsWithASCII(codec_name, "OMX.SEC.", true) ||
- StartsWithASCII(codec_name, "OMX.MTK.", true) ||
- StartsWithASCII(codec_name, "OMX.Exynos.", true));
+ return (base::StartsWithASCII(codec_name, "OMX.google.", true) ||
+ base::StartsWithASCII(codec_name, "OMX.SEC.", true) ||
+ base::StartsWithASCII(codec_name, "OMX.MTK.", true) ||
+ base::StartsWithASCII(codec_name, "OMX.Exynos.", true));
}
return true;
}
diff --git a/chromium/media/base/android/media_codec_decoder.cc b/chromium/media/base/android/media_codec_decoder.cc
new file mode 100644
index 00000000000..8652d5bf61c
--- /dev/null
+++ b/chromium/media/base/android/media_codec_decoder.cc
@@ -0,0 +1,625 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/android/media_codec_decoder.h"
+
+#include "base/bind.h"
+#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
+#include "base/logging.h"
+#include "media/base/android/media_codec_bridge.h"
+
+namespace media {
+
+namespace {
+
+// Stop requesting new data in the kPrefetching state when the queue size
+// reaches this limit.
+const int kPrefetchLimit = 8;
+
+// Request new data in the kRunning state if the queue size is less than this.
+const int kPlaybackLowLimit = 4;
+
+// Posting delay of the next frame processing, in milliseconds
+const int kNextFrameDelay = 1;
+
+// Timeout for dequeuing an input buffer from MediaCodec in milliseconds.
+const int kInputBufferTimeout = 20;
+
+// Timeout for dequeuing an output buffer from MediaCodec in milliseconds.
+const int kOutputBufferTimeout = 20;
+}
+
+MediaCodecDecoder::MediaCodecDecoder(
+ const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner,
+ const base::Closure& external_request_data_cb,
+ const base::Closure& starvation_cb,
+ const base::Closure& stop_done_cb,
+ const base::Closure& error_cb,
+ const char* decoder_thread_name)
+ : media_task_runner_(media_task_runner),
+ decoder_thread_(decoder_thread_name),
+ external_request_data_cb_(external_request_data_cb),
+ starvation_cb_(starvation_cb),
+ stop_done_cb_(stop_done_cb),
+ error_cb_(error_cb),
+ state_(kStopped),
+ eos_enqueued_(false),
+ completed_(false),
+ last_frame_posted_(false),
+ is_data_request_in_progress_(false),
+ is_incoming_data_invalid_(false),
+ weak_factory_(this) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ DVLOG(1) << "Decoder::Decoder() " << decoder_thread_name;
+
+ internal_error_cb_ =
+ base::Bind(&MediaCodecDecoder::OnCodecError, weak_factory_.GetWeakPtr());
+ request_data_cb_ =
+ base::Bind(&MediaCodecDecoder::RequestData, weak_factory_.GetWeakPtr());
+}
+
+MediaCodecDecoder::~MediaCodecDecoder() {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ DVLOG(1) << "Decoder::~Decoder()";
+
+ // NB: ReleaseDecoderResources() is virtual
+ ReleaseDecoderResources();
+}
+
+const char* MediaCodecDecoder::class_name() const {
+ return "Decoder";
+}
+
+void MediaCodecDecoder::ReleaseDecoderResources() {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ DVLOG(1) << class_name() << "::" << __FUNCTION__;
+
+ decoder_thread_.Stop(); // synchronous
+ state_ = kStopped;
+ media_codec_bridge_.reset();
+}
+
+void MediaCodecDecoder::Flush() {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ DVLOG(1) << class_name() << "::" << __FUNCTION__;
+
+ DCHECK_EQ(GetState(), kStopped);
+
+ // Flush() is a part of the Seek request. Whenever we request a seek we need
+ // to invalidate the current data request.
+ if (is_data_request_in_progress_)
+ is_incoming_data_invalid_ = true;
+
+ eos_enqueued_ = false;
+ completed_ = false;
+ au_queue_.Flush();
+
+ if (media_codec_bridge_) {
+ // MediaCodecBridge::Reset() performs MediaCodecBridge.flush()
+ MediaCodecStatus flush_status = media_codec_bridge_->Reset();
+ if (flush_status != MEDIA_CODEC_OK) {
+ DVLOG(0) << class_name() << "::" << __FUNCTION__
+ << "MediaCodecBridge::Reset() failed";
+ media_task_runner_->PostTask(FROM_HERE, internal_error_cb_);
+ }
+ }
+}
+
+void MediaCodecDecoder::ReleaseMediaCodec() {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ DVLOG(1) << class_name() << "::" << __FUNCTION__;
+
+ media_codec_bridge_.reset();
+}
+
+bool MediaCodecDecoder::IsPrefetchingOrPlaying() const {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ base::AutoLock lock(state_lock_);
+ return state_ == kPrefetching || state_ == kRunning;
+}
+
+bool MediaCodecDecoder::IsStopped() const {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ return GetState() == kStopped;
+}
+
+bool MediaCodecDecoder::IsCompleted() const {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ return completed_;
+}
+
+base::android::ScopedJavaLocalRef<jobject> MediaCodecDecoder::GetMediaCrypto() {
+ base::android::ScopedJavaLocalRef<jobject> media_crypto;
+
+ // TODO(timav): implement DRM.
+ // drm_bridge_ is not implemented
+ // if (drm_bridge_)
+ // media_crypto = drm_bridge_->GetMediaCrypto();
+ return media_crypto;
+}
+
+void MediaCodecDecoder::Prefetch(const base::Closure& prefetch_done_cb) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ DVLOG(1) << class_name() << "::" << __FUNCTION__;
+
+ DCHECK(GetState() == kStopped);
+
+ prefetch_done_cb_ = prefetch_done_cb;
+
+ SetState(kPrefetching);
+ PrefetchNextChunk();
+}
+
+MediaCodecDecoder::ConfigStatus MediaCodecDecoder::Configure() {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ DVLOG(1) << class_name() << "::" << __FUNCTION__;
+
+ if (GetState() == kError) {
+ DVLOG(0) << class_name() << "::" << __FUNCTION__ << ": wrong state kError";
+ return CONFIG_FAILURE;
+ }
+
+ // Here I assume that OnDemuxerConfigsAvailable won't come
+ // in the middle of demuxer data.
+
+ if (media_codec_bridge_) {
+ DVLOG(1) << class_name() << "::" << __FUNCTION__
+ << ": reconfiguration is not required, ignoring";
+ return CONFIG_OK;
+ }
+
+ return ConfigureInternal();
+}
+
+bool MediaCodecDecoder::Start(base::TimeDelta current_time) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ DVLOG(1) << class_name() << "::" << __FUNCTION__
+ << " current_time:" << current_time;
+
+ DecoderState state = GetState();
+ if (state == kRunning) {
+ DVLOG(1) << class_name() << "::" << __FUNCTION__ << ": already started";
+ return true; // already started
+ }
+
+ if (state != kPrefetched) {
+ DVLOG(0) << class_name() << "::" << __FUNCTION__ << ": wrong state "
+ << AsString(state) << " ignoring";
+ return false;
+ }
+
+ if (!media_codec_bridge_) {
+ DVLOG(0) << class_name() << "::" << __FUNCTION__
+ << ": not configured, ignoring";
+ return false;
+ }
+
+ DCHECK(!decoder_thread_.IsRunning());
+
+ // We only synchronize video stream.
+ // When audio is present, the |current_time| is audio time.
+ SynchronizePTSWithTime(current_time);
+
+ last_frame_posted_ = false;
+
+ // Start the decoder thread
+ if (!decoder_thread_.Start()) {
+ DVLOG(1) << class_name() << "::" << __FUNCTION__
+ << ": cannot start decoder thread";
+ return false;
+ }
+
+ SetState(kRunning);
+
+ decoder_thread_.task_runner()->PostTask(
+ FROM_HERE,
+ base::Bind(&MediaCodecDecoder::ProcessNextFrame, base::Unretained(this)));
+
+ return true;
+}
+
+void MediaCodecDecoder::SyncStop() {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ DVLOG(1) << class_name() << "::" << __FUNCTION__;
+
+ if (GetState() == kError) {
+ DVLOG(0) << class_name() << "::" << __FUNCTION__
+ << ": wrong state kError, ignoring";
+ return;
+ }
+
+ // After this method returns, decoder thread will not be running.
+
+ decoder_thread_.Stop(); // synchronous
+ state_ = kStopped;
+
+ // Shall we move |delayed_buffers_| from VideoDecoder to Decoder class?
+ ReleaseDelayedBuffers();
+}
+
+void MediaCodecDecoder::RequestToStop() {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ DVLOG(1) << class_name() << "::" << __FUNCTION__;
+
+ DecoderState state = GetState();
+ switch (state) {
+ case kError:
+ DVLOG(0) << class_name() << "::" << __FUNCTION__
+ << ": wrong state kError, ignoring";
+ break;
+ case kRunning:
+ SetState(kStopping);
+ break;
+ case kStopping:
+ break; // ignore
+ case kStopped:
+ case kPrefetching:
+ case kPrefetched:
+ // There is nothing to wait for, we can sent nofigication right away.
+ DCHECK(!decoder_thread_.IsRunning());
+ SetState(kStopped);
+ media_task_runner_->PostTask(FROM_HERE, stop_done_cb_);
+ break;
+ default:
+ NOTREACHED();
+ break;
+ }
+}
+
+void MediaCodecDecoder::OnLastFrameRendered(bool completed) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ DVLOG(1) << class_name() << "::" << __FUNCTION__
+ << " completed:" << completed;
+
+ decoder_thread_.Stop(); // synchronous
+ state_ = kStopped;
+ completed_ = completed;
+
+ media_task_runner_->PostTask(FROM_HERE, stop_done_cb_);
+}
+
+void MediaCodecDecoder::OnDemuxerDataAvailable(const DemuxerData& data) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ const char* explain_if_skipped =
+ is_incoming_data_invalid_ ? " skipped as invalid" : "";
+
+ DVLOG(2) << class_name() << "::" << __FUNCTION__ << explain_if_skipped
+ << " #AUs:" << data.access_units.size()
+ << " #Configs:" << data.demuxer_configs.size();
+#if !defined(NDEBUG)
+ for (const auto& unit : data.access_units)
+ DVLOG(2) << class_name() << "::" << __FUNCTION__ << explain_if_skipped
+ << " au: " << unit;
+#endif
+
+ if (!is_incoming_data_invalid_)
+ au_queue_.PushBack(data);
+
+ is_incoming_data_invalid_ = false;
+ is_data_request_in_progress_ = false;
+
+ if (state_ == kPrefetching)
+ PrefetchNextChunk();
+}
+
+int MediaCodecDecoder::NumDelayedRenderTasks() const {
+ return 0;
+}
+
+void MediaCodecDecoder::CheckLastFrame(bool eos_encountered,
+ bool has_delayed_tasks) {
+ DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
+
+ bool last_frame_when_stopping = GetState() == kStopping && !has_delayed_tasks;
+
+ if (last_frame_when_stopping || eos_encountered) {
+ media_task_runner_->PostTask(
+ FROM_HERE, base::Bind(&MediaCodecDecoder::OnLastFrameRendered,
+ weak_factory_.GetWeakPtr(), eos_encountered));
+ last_frame_posted_ = true;
+ }
+}
+
+void MediaCodecDecoder::OnCodecError() {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ SetState(kError);
+ error_cb_.Run();
+}
+
+void MediaCodecDecoder::RequestData() {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ // Ensure one data request at a time.
+ if (!is_data_request_in_progress_) {
+ is_data_request_in_progress_ = true;
+ external_request_data_cb_.Run();
+ }
+}
+
+void MediaCodecDecoder::PrefetchNextChunk() {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ DVLOG(1) << class_name() << "::" << __FUNCTION__;
+
+ AccessUnitQueue::Info au_info = au_queue_.GetInfo();
+
+ if (eos_enqueued_ || au_info.length >= kPrefetchLimit || au_info.has_eos) {
+ // We are done prefetching
+ SetState(kPrefetched);
+ DVLOG(1) << class_name() << "::" << __FUNCTION__ << " posting PrefetchDone";
+ media_task_runner_->PostTask(FROM_HERE,
+ base::ResetAndReturn(&prefetch_done_cb_));
+ return;
+ }
+
+ request_data_cb_.Run();
+}
+
+void MediaCodecDecoder::ProcessNextFrame() {
+ DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
+
+ DVLOG(2) << class_name() << "::" << __FUNCTION__;
+
+ DecoderState state = GetState();
+
+ if (state != kRunning && state != kStopping) {
+ DVLOG(1) << class_name() << "::" << __FUNCTION__ << ": not running";
+ return;
+ }
+
+ if (state == kStopping) {
+ if (NumDelayedRenderTasks() == 0 && !last_frame_posted_) {
+ DVLOG(1) << class_name() << "::" << __FUNCTION__
+ << ": kStopping, posting OnLastFrameRendered";
+ media_task_runner_->PostTask(
+ FROM_HERE, base::Bind(&MediaCodecDecoder::OnLastFrameRendered,
+ weak_factory_.GetWeakPtr(), false));
+ last_frame_posted_ = true;
+ }
+
+ // We can stop processing, the |au_queue_| and MediaCodec queues can freeze.
+ // We only need to let finish the delayed rendering tasks.
+ return;
+ }
+
+ DCHECK(state == kRunning);
+
+ if (!EnqueueInputBuffer())
+ return;
+
+ bool eos_encountered = false;
+ if (!DepleteOutputBufferQueue(&eos_encountered))
+ return;
+
+ if (eos_encountered) {
+ DVLOG(1) << class_name() << "::" << __FUNCTION__
+ << " EOS dequeued, stopping frame processing";
+ return;
+ }
+
+ // We need a small delay if we want to stop this thread by
+ // decoder_thread_.Stop() reliably.
+ // The decoder thread message loop processes all pending
+ // (but not delayed) tasks before it can quit; without a delay
+ // the message loop might be forever processing the pendng tasks.
+ decoder_thread_.task_runner()->PostDelayedTask(
+ FROM_HERE,
+ base::Bind(&MediaCodecDecoder::ProcessNextFrame, base::Unretained(this)),
+ base::TimeDelta::FromMilliseconds(kNextFrameDelay));
+}
+
+// Returns false if we should stop decoding process. Right now
+// it happens if we got MediaCodec error or detected starvation.
+bool MediaCodecDecoder::EnqueueInputBuffer() {
+ DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
+
+ DVLOG(2) << class_name() << "::" << __FUNCTION__;
+
+ if (eos_enqueued_) {
+ DVLOG(1) << class_name() << "::" << __FUNCTION__
+ << ": eos_enqueued, returning";
+ return true; // Nothing to do
+ }
+
+ // Keep the number pending video frames low, ideally maintaining
+ // the same audio and video duration after stop request
+ if (NumDelayedRenderTasks() > 1) {
+ DVLOG(2) << class_name() << "::" << __FUNCTION__ << ": # delayed buffers ("
+ << NumDelayedRenderTasks() << ") exceeds 1, returning";
+ return true; // Nothing to do
+ }
+
+ // Get the next frame from the queue and the queue info
+
+ AccessUnitQueue::Info au_info = au_queue_.GetInfo();
+
+ // Request the data from Demuxer
+ if (au_info.length <= kPlaybackLowLimit && !au_info.has_eos)
+ media_task_runner_->PostTask(FROM_HERE, request_data_cb_);
+
+ // Get the next frame from the queue
+
+ if (!au_info.length) {
+ // Report starvation and return, Start() will be called again later.
+ DVLOG(1) << class_name() << "::" << __FUNCTION__ << ": starvation detected";
+ media_task_runner_->PostTask(FROM_HERE, starvation_cb_);
+ return true;
+ }
+
+ if (au_info.configs) {
+ DVLOG(1) << class_name() << "::" << __FUNCTION__
+ << ": received new configs, not implemented";
+ // post an error for now?
+ media_task_runner_->PostTask(FROM_HERE, internal_error_cb_);
+ return false;
+ }
+
+ // Dequeue input buffer
+
+ base::TimeDelta timeout =
+ base::TimeDelta::FromMilliseconds(kInputBufferTimeout);
+ int index = -1;
+ MediaCodecStatus status =
+ media_codec_bridge_->DequeueInputBuffer(timeout, &index);
+
+ DVLOG(2) << class_name() << ":: DequeueInputBuffer index:" << index;
+
+ switch (status) {
+ case MEDIA_CODEC_ERROR:
+ DVLOG(0) << class_name() << "::" << __FUNCTION__
+ << ": MEDIA_CODEC_ERROR DequeueInputBuffer failed";
+ media_task_runner_->PostTask(FROM_HERE, internal_error_cb_);
+ return false;
+
+ case MEDIA_CODEC_DEQUEUE_INPUT_AGAIN_LATER:
+ return true;
+
+ default:
+ break;
+ }
+
+ // We got the buffer
+ DCHECK_EQ(status, MEDIA_CODEC_OK);
+ DCHECK_GE(index, 0);
+
+ const AccessUnit* unit = au_info.front_unit;
+ DCHECK(unit);
+
+ if (unit->is_end_of_stream) {
+ DVLOG(1) << class_name() << "::" << __FUNCTION__ << ": QueueEOS";
+ media_codec_bridge_->QueueEOS(index);
+ eos_enqueued_ = true;
+ return true;
+ }
+
+ DVLOG(2) << class_name() << ":: QueueInputBuffer pts:" << unit->timestamp;
+
+ status = media_codec_bridge_->QueueInputBuffer(
+ index, &unit->data[0], unit->data.size(), unit->timestamp);
+
+ if (status == MEDIA_CODEC_ERROR) {
+ DVLOG(0) << class_name() << "::" << __FUNCTION__
+ << ": MEDIA_CODEC_ERROR: QueueInputBuffer failed";
+ media_task_runner_->PostTask(FROM_HERE, internal_error_cb_);
+ return false;
+ }
+
+ // Have successfully queued input buffer, go to next access unit.
+ au_queue_.Advance();
+ return true;
+}
+
+// Returns false if there was MediaCodec error.
+bool MediaCodecDecoder::DepleteOutputBufferQueue(bool* eos_encountered) {
+ DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
+
+ DVLOG(2) << class_name() << "::" << __FUNCTION__;
+
+ int buffer_index = 0;
+ size_t offset = 0;
+ size_t size = 0;
+ base::TimeDelta pts;
+ MediaCodecStatus status;
+
+ base::TimeDelta timeout =
+ base::TimeDelta::FromMilliseconds(kOutputBufferTimeout);
+
+ // Extract all output buffers that are available.
+ // Usually there will be only one, but sometimes it is preceeded by
+ // MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED or MEDIA_CODEC_OUTPUT_FORMAT_CHANGED.
+ do {
+ status = media_codec_bridge_->DequeueOutputBuffer(
+ timeout, &buffer_index, &offset, &size, &pts, eos_encountered, nullptr);
+
+ // Reset the timeout to 0 for the subsequent DequeueOutputBuffer() calls
+ // to quickly break the loop after we got all currently available buffers.
+ timeout = base::TimeDelta::FromMilliseconds(0);
+
+ switch (status) {
+ case MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED:
+ // Output buffers are replaced in MediaCodecBridge, nothing to do.
+ break;
+
+ case MEDIA_CODEC_OUTPUT_FORMAT_CHANGED:
+ DVLOG(2) << class_name() << "::" << __FUNCTION__
+ << " MEDIA_CODEC_OUTPUT_FORMAT_CHANGED";
+ OnOutputFormatChanged();
+ break;
+
+ case MEDIA_CODEC_OK:
+ // We got the decoded frame
+ Render(buffer_index, size, true, pts, *eos_encountered);
+ break;
+
+ case MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER:
+ // Nothing to do.
+ break;
+
+ case MEDIA_CODEC_ERROR:
+ DVLOG(0) << class_name() << "::" << __FUNCTION__
+ << ": MEDIA_CODEC_ERROR from DequeueOutputBuffer";
+ media_task_runner_->PostTask(FROM_HERE, internal_error_cb_);
+ break;
+
+ default:
+ NOTREACHED();
+ break;
+ }
+
+ } while (status != MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER &&
+ status != MEDIA_CODEC_ERROR && !*eos_encountered);
+
+ return status != MEDIA_CODEC_ERROR;
+}
+
+MediaCodecDecoder::DecoderState MediaCodecDecoder::GetState() const {
+ base::AutoLock lock(state_lock_);
+ return state_;
+}
+
+void MediaCodecDecoder::SetState(DecoderState state) {
+ DVLOG(1) << class_name() << "::" << __FUNCTION__ << " " << state;
+
+ base::AutoLock lock(state_lock_);
+ state_ = state;
+}
+
+#undef RETURN_STRING
+#define RETURN_STRING(x) \
+ case x: \
+ return #x;
+
+const char* MediaCodecDecoder::AsString(DecoderState state) {
+ switch (state) {
+ RETURN_STRING(kStopped);
+ RETURN_STRING(kPrefetching);
+ RETURN_STRING(kPrefetched);
+ RETURN_STRING(kRunning);
+ RETURN_STRING(kStopping);
+ RETURN_STRING(kError);
+ default:
+ return "Unknown DecoderState";
+ }
+}
+
+#undef RETURN_STRING
+
+} // namespace media
diff --git a/chromium/media/base/android/media_codec_decoder.h b/chromium/media/base/android/media_codec_decoder.h
new file mode 100644
index 00000000000..189ebc3c582
--- /dev/null
+++ b/chromium/media/base/android/media_codec_decoder.h
@@ -0,0 +1,295 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BASE_ANDROID_MEDIA_CODEC_DECODER_H_
+#define MEDIA_BASE_ANDROID_MEDIA_CODEC_DECODER_H_
+
+#include "base/android/scoped_java_ref.h"
+#include "base/callback.h"
+#include "base/macros.h"
+#include "base/memory/ref_counted.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/single_thread_task_runner.h"
+#include "base/synchronization/lock.h"
+#include "base/threading/thread.h"
+#include "base/time/time.h"
+#include "media/base/android/access_unit_queue.h"
+#include "media/base/android/demuxer_stream_player_params.h"
+
+namespace media {
+
+class MediaCodecBridge;
+
+// The decoder for MediaCodecPlayer.
+// This class accepts the incoming data into AccessUnitQueue and works with
+// MediaCodecBridge for decoding and rendering the frames. The MediaCodecPlayer
+// has two decoder objects: audio and video.
+//
+// The decoder works on two threads. The data from demuxer comes on Media
+// thread. The commands from MediaCodecPlayer, such as Prefetch, Start,
+// RequestToStop also come on the Media thread. The operations with MediaCodec
+// buffers and rendering happen on a separate thread called Decoder thread.
+// This class creates, starts and stops it as necessary.
+//
+// Decoder's internal state machine goes through the following states:
+//
+// [ Stopped ] <------------------- (any state except Error)
+// | | |
+// | Prefetch |--- internal ------|
+// v | transition v
+// [ Prefetching ] | [ Error ]
+// | |
+// | internal transition |
+// v | Error recovery:
+// [ Prefetched ] |
+// | | (any state including Error)
+// | Start | |
+// v | | ReleaseDecoderResources
+// [ Running ] | v
+// | | [ Stopped ]
+// | RequestToStop |
+// v |
+// [ Stopping ] -------------------
+//
+//
+// [ Stopped ] --------------------
+// ^ |
+// | Flush |
+// ---------------------------
+
+class MediaCodecDecoder {
+ public:
+ // The result of MediaCodec configuration, used by MediaCodecPlayer.
+ enum ConfigStatus {
+ CONFIG_FAILURE = 0,
+ CONFIG_OK,
+ CONFIG_KEY_FRAME_REQUIRED,
+ };
+
+ // The decoder reports current playback time to the MediaCodecPlayer.
+ // For audio, the parameters designate the beginning and end of a time
+ // interval. The beginning is the estimated time that is playing right now.
+ // The end is the playback time of the last buffered data. During normal
+ // playback the subsequent intervals overlap.
+ // For video both values are PTS of the corresponding frame, i.e. the interval
+ // has zero width.
+ typedef base::Callback<void(base::TimeDelta, base::TimeDelta)>
+ SetTimeCallback;
+
+ // MediaCodecDecoder constructor.
+ // Parameters:
+ // media_task_runner:
+ // A task runner for the controlling thread. All public methods should be
+ // called on this thread, and callbacks are delivered on this thread.
+ // The MediaCodecPlayer uses a dedicated (Media) thread for this.
+ // external_request_data_cb:
+ // Called periodically as the amount of internally stored data decreases.
+ // The receiver should call OnDemuxerDataAvailable() with more data.
+ // starvation_cb:
+ // Called when starvation is detected. The decoder state does not change.
+ // The player is supposed to stop and then prefetch the decoder.
+ // stop_done_cb:
+ // Called when async stop request is completed.
+ // error_cb:
+ // Called when a MediaCodec error occurred. If this happens, a player has
+ // to either call ReleaseDecoderResources() or destroy the decoder object.
+ // decoder_thread_name:
+ // The thread name to be passed to decoder thread constructor.
+ MediaCodecDecoder(
+ const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner,
+ const base::Closure& external_request_data_cb,
+ const base::Closure& starvation_cb,
+ const base::Closure& stop_done_cb,
+ const base::Closure& error_cb,
+ const char* decoder_thread_name);
+ virtual ~MediaCodecDecoder();
+
+ virtual const char* class_name() const;
+
+ // MediaCodecDecoder exists through the whole lifetime of the player
+ // to support dynamic addition and removal of the streams.
+ // This method returns true if the current stream (audio or video)
+ // is currently active.
+ virtual bool HasStream() const = 0;
+
+ // Stores configuration for the use of upcoming Configure()
+ virtual void SetDemuxerConfigs(const DemuxerConfigs& configs) = 0;
+
+ // Stops decoder thread, releases the MediaCodecBridge and other resources.
+ virtual void ReleaseDecoderResources();
+
+ // Flushes the MediaCodec and resets the AccessUnitQueue.
+ // Decoder thread should not be running.
+ virtual void Flush();
+
+ // Releases MediaCodecBridge.
+ void ReleaseMediaCodec();
+
+ // Returns corresponding conditions.
+ bool IsPrefetchingOrPlaying() const;
+ bool IsStopped() const;
+ bool IsCompleted() const;
+
+ base::android::ScopedJavaLocalRef<jobject> GetMediaCrypto();
+
+ // Starts prefetching: accumulates enough data in AccessUnitQueue.
+ // Decoder thread is not running.
+ void Prefetch(const base::Closure& prefetch_done_cb);
+
+ // Configures MediaCodec.
+ ConfigStatus Configure();
+
+ // Starts the decoder thread and resumes the playback.
+ bool Start(base::TimeDelta current_time);
+
+ // Stops the playback process synchronously. This method stops the decoder
+ // thread synchronously, and then releases all MediaCodec buffers.
+ void SyncStop();
+
+ // Requests to stop the playback and returns.
+ // Decoder will stop asynchronously after all the dequeued output buffers
+ // are rendered.
+ void RequestToStop();
+
+ // Notification posted when asynchronous stop is done or playback completed.
+ void OnLastFrameRendered(bool completed);
+
+ // Puts the incoming data into AccessUnitQueue.
+ void OnDemuxerDataAvailable(const DemuxerData& data);
+
+ protected:
+ // Returns true if the new DemuxerConfigs requires MediaCodec
+ // reconfiguration.
+ virtual bool IsCodecReconfigureNeeded(const DemuxerConfigs& curr,
+ const DemuxerConfigs& next) const = 0;
+
+ // Does the part of MediaCodecBridge configuration that is specific
+ // to audio or video.
+ virtual ConfigStatus ConfigureInternal() = 0;
+
+ // Associates PTS with device time so we can calculate delays.
+ // We use delays for video decoder only.
+ virtual void SynchronizePTSWithTime(base::TimeDelta current_time) {}
+
+ // Processes the change of the output format, varies by stream.
+ virtual void OnOutputFormatChanged() = 0;
+
+ // Renders the decoded frame and releases output buffer, or posts
+ // a delayed task to do it at a later time,
+ virtual void Render(int buffer_index,
+ size_t size,
+ bool render_output,
+ base::TimeDelta pts,
+ bool eos_encountered) = 0;
+
+ // Returns the number of delayed task (we might have them for video).
+ virtual int NumDelayedRenderTasks() const;
+
+ // Releases output buffers that are dequeued and not released yet
+ // because their rendering is delayed (video).
+ virtual void ReleaseDelayedBuffers() {}
+
+ // Helper methods.
+
+ // Notifies the decoder if the frame is the last one.
+ void CheckLastFrame(bool eos_encountered, bool has_delayed_tasks);
+
+ // Protected data.
+
+ // Object for posting tasks on Media thread.
+ scoped_refptr<base::SingleThreadTaskRunner> media_task_runner_;
+
+ // Controls Android MediaCodec
+ scoped_ptr<MediaCodecBridge> media_codec_bridge_;
+
+ // We call MediaCodecBridge on this thread for both
+ // input and output buffers.
+ base::Thread decoder_thread_;
+
+ // The queue of access units.
+ AccessUnitQueue au_queue_;
+
+ private:
+ enum DecoderState {
+ kStopped = 0,
+ kPrefetching,
+ kPrefetched,
+ kRunning,
+ kStopping,
+ kError,
+ };
+
+ // Helper method that processes an error from MediaCodec.
+ void OnCodecError();
+
+ // Requests data. Ensures there is no more than one request at a time.
+ void RequestData();
+
+ // Prefetching callback that is posted to Media thread
+ // in the kPrefetching state.
+ void PrefetchNextChunk();
+
+ // The callback to do actual playback. Posted to Decoder thread
+ // in the kRunning state.
+ void ProcessNextFrame();
+
+ // Helper method for ProcessNextFrame.
+ // Pushes one input buffer to the MediaCodec if the codec can accept it.
+ // Returns false if there was MediaCodec error.
+ bool EnqueueInputBuffer();
+
+ // Helper method for ProcessNextFrame.
+ // Pulls all currently available output frames and renders them.
+ // Returns false if there was MediaCodec error.
+ bool DepleteOutputBufferQueue(bool* eos_encountered);
+
+ DecoderState GetState() const;
+ void SetState(DecoderState state);
+ const char* AsString(DecoderState state);
+
+ // Private Data.
+
+ // External data request callback that is passed to decoder.
+ base::Closure external_request_data_cb_;
+
+ // These notifications are called on corresponding conditions.
+ base::Closure prefetch_done_cb_;
+ base::Closure starvation_cb_;
+ base::Closure stop_done_cb_;
+ base::Closure error_cb_;
+
+ // Data request callback that is posted by decoder internally.
+ base::Closure request_data_cb_;
+
+ // Callback used to post OnCodecError method.
+ base::Closure internal_error_cb_;
+
+ // Internal state.
+ DecoderState state_;
+ mutable base::Lock state_lock_;
+
+ // Flag is set when the EOS is enqueued into MediaCodec. Reset by Flush.
+ bool eos_enqueued_;
+
+ // Flag is set when the EOS is received in MediaCodec output. Reset by Flush.
+ bool completed_;
+
+ // Flag to ensure we post last frame notification once.
+ bool last_frame_posted_;
+
+ // Indicates whether the data request is in progress.
+ bool is_data_request_in_progress_;
+
+ // Indicates whether the incoming data should be ignored.
+ bool is_incoming_data_invalid_;
+
+ // NOTE: Weak pointers must be invalidated before all other member variables.
+ base::WeakPtrFactory<MediaCodecDecoder> weak_factory_;
+
+ DISALLOW_COPY_AND_ASSIGN(MediaCodecDecoder);
+};
+
+} // namespace media
+
+#endif // MEDIA_BASE_ANDROID_MEDIA_CODEC_DECODER_H_
diff --git a/chromium/media/base/android/media_codec_decoder_unittest.cc b/chromium/media/base/android/media_codec_decoder_unittest.cc
new file mode 100644
index 00000000000..7f8d81337dc
--- /dev/null
+++ b/chromium/media/base/android/media_codec_decoder_unittest.cc
@@ -0,0 +1,651 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/bind.h"
+#include "base/logging.h"
+#include "base/thread_task_runner_handle.h"
+#include "base/timer/timer.h"
+#include "media/base/android/media_codec_audio_decoder.h"
+#include "media/base/android/media_codec_bridge.h"
+#include "media/base/android/media_codec_video_decoder.h"
+#include "media/base/android/test_data_factory.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "ui/gl/android/surface_texture.h"
+
+namespace media {
+
+// Helper macro to skip the test if MediaCodecBridge isn't available.
+#define SKIP_TEST_IF_MEDIA_CODEC_BRIDGE_IS_NOT_AVAILABLE() \
+ do { \
+ if (!MediaCodecBridge::IsAvailable()) { \
+ VLOG(0) << "Could not run test - not supported on device."; \
+ return; \
+ } \
+ } while (0)
+
+namespace {
+
+const base::TimeDelta kDefaultTimeout = base::TimeDelta::FromMilliseconds(200);
+const base::TimeDelta kAudioFramePeriod = base::TimeDelta::FromMilliseconds(20);
+const base::TimeDelta kVideoFramePeriod = base::TimeDelta::FromMilliseconds(20);
+
+class AudioFactory : public TestDataFactory {
+ public:
+ AudioFactory(const base::TimeDelta& duration);
+ DemuxerConfigs GetConfigs() const override;
+
+ protected:
+ void ModifyAccessUnit(int index_in_chunk, AccessUnit* unit) override;
+};
+
+class VideoFactory : public TestDataFactory {
+ public:
+ VideoFactory(const base::TimeDelta& duration);
+ DemuxerConfigs GetConfigs() const override;
+
+ protected:
+ void ModifyAccessUnit(int index_in_chunk, AccessUnit* unit) override;
+};
+
+AudioFactory::AudioFactory(const base::TimeDelta& duration)
+ : TestDataFactory("vorbis-packet-%d", duration, kAudioFramePeriod) {
+}
+
+DemuxerConfigs AudioFactory::GetConfigs() const {
+ return TestDataFactory::CreateAudioConfigs(kCodecVorbis, duration_);
+}
+
+void AudioFactory::ModifyAccessUnit(int index_in_chunk, AccessUnit* unit) {
+ // Vorbis needs 4 extra bytes padding on Android to decode properly. Check
+ // NuMediaExtractor.cpp in Android source code.
+ uint8 padding[4] = {0xff, 0xff, 0xff, 0xff};
+ unit->data.insert(unit->data.end(), padding, padding + 4);
+}
+
+VideoFactory::VideoFactory(const base::TimeDelta& duration)
+ : TestDataFactory("h264-320x180-frame-%d", duration, kVideoFramePeriod) {
+}
+
+DemuxerConfigs VideoFactory::GetConfigs() const {
+ return TestDataFactory::CreateVideoConfigs(kCodecH264, duration_,
+ gfx::Size(320, 180));
+}
+
+void VideoFactory::ModifyAccessUnit(int index_in_chunk, AccessUnit* unit) {
+ // The frames are taken from High profile and some are B-frames.
+ // The first 4 frames appear in the file in the following order:
+ //
+ // Frames: I P B P
+ // Decoding order: 0 1 2 3
+ // Presentation order: 0 2 1 4(3)
+ //
+ // I keep the last PTS to be 3 for simplicity.
+
+ // Swap pts for second and third frames.
+ if (index_in_chunk == 1) // second frame
+ unit->timestamp += frame_period_;
+ if (index_in_chunk == 2) // third frame
+ unit->timestamp -= frame_period_;
+
+ if (index_in_chunk == 0)
+ unit->is_key_frame = true;
+}
+
+// Class that computes statistics: number of calls, minimum and maximum values.
+// It is used for PTS statistics to verify that playback did actually happen.
+
+template <typename T>
+class Minimax {
+ public:
+ Minimax() : num_values_(0) {}
+ ~Minimax() {}
+
+ void AddValue(const T& value) {
+ ++num_values_;
+ if (value < min_)
+ min_ = value;
+ else if (max_ < value)
+ max_ = value;
+ }
+
+ const T& min() const { return min_; }
+ const T& max() const { return max_; }
+ int num_values() const { return num_values_; }
+
+ private:
+ T min_;
+ T max_;
+ int num_values_;
+};
+
+} // namespace (anonymous)
+
+// The test fixture for MediaCodecDecoder
+
+class MediaCodecDecoderTest : public testing::Test {
+ public:
+ MediaCodecDecoderTest();
+ ~MediaCodecDecoderTest() override;
+
+ // Conditions we wait for.
+ bool is_prefetched() const { return is_prefetched_; }
+ bool is_stopped() const { return is_stopped_; }
+ bool is_starved() const { return is_starved_; }
+
+ void SetPrefetched(bool value) { is_prefetched_ = value; }
+ void SetStopped(bool value) { is_stopped_ = value; }
+ void SetStarved(bool value) { is_starved_ = value; }
+
+ protected:
+ typedef base::Callback<bool()> Predicate;
+
+ typedef base::Callback<void(const DemuxerData&)> DataAvailableCallback;
+
+ // Waits for condition to become true or for timeout to expire.
+ // Returns true if the condition becomes true.
+ bool WaitForCondition(const Predicate& condition,
+ const base::TimeDelta& timeout = kDefaultTimeout);
+
+ void SetDataFactory(scoped_ptr<TestDataFactory> factory) {
+ data_factory_ = factory.Pass();
+ }
+
+ DemuxerConfigs GetConfigs() const {
+ // ASSERT_NE does not compile here because it expects void return value.
+ EXPECT_NE(nullptr, data_factory_.get());
+ return data_factory_->GetConfigs();
+ }
+
+ void CreateAudioDecoder();
+ void CreateVideoDecoder();
+ void SetVideoSurface();
+ void SetStopRequestAtTime(const base::TimeDelta& time) {
+ stop_request_time_ = time;
+ }
+
+ // Decoder callbacks.
+ void OnDataRequested();
+ void OnStarvation() { is_starved_ = true; }
+ void OnStopDone() { is_stopped_ = true; }
+ void OnError() {}
+ void OnUpdateCurrentTime(base::TimeDelta now_playing,
+ base::TimeDelta last_buffered) {
+ pts_stat_.AddValue(now_playing);
+
+ if (stop_request_time_ != kNoTimestamp() &&
+ now_playing >= stop_request_time_) {
+ stop_request_time_ = kNoTimestamp();
+ decoder_->RequestToStop();
+ }
+ }
+
+ void OnVideoSizeChanged(const gfx::Size& video_size) {}
+ void OnVideoCodecCreated() {}
+
+ scoped_ptr<MediaCodecDecoder> decoder_;
+ scoped_ptr<TestDataFactory> data_factory_;
+ Minimax<base::TimeDelta> pts_stat_;
+
+ private:
+ bool is_timeout_expired() const { return is_timeout_expired_; }
+ void SetTimeoutExpired(bool value) { is_timeout_expired_ = value; }
+
+ base::MessageLoop message_loop_;
+ bool is_timeout_expired_;
+
+ bool is_prefetched_;
+ bool is_stopped_;
+ bool is_starved_;
+ base::TimeDelta stop_request_time_;
+
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
+ DataAvailableCallback data_available_cb_;
+ scoped_refptr<gfx::SurfaceTexture> surface_texture_;
+
+ DISALLOW_COPY_AND_ASSIGN(MediaCodecDecoderTest);
+};
+
+MediaCodecDecoderTest::MediaCodecDecoderTest()
+ : is_timeout_expired_(false),
+ is_prefetched_(false),
+ is_stopped_(false),
+ is_starved_(false),
+ stop_request_time_(kNoTimestamp()),
+ task_runner_(base::ThreadTaskRunnerHandle::Get()) {
+}
+
+MediaCodecDecoderTest::~MediaCodecDecoderTest() {}
+
+bool MediaCodecDecoderTest::WaitForCondition(const Predicate& condition,
+ const base::TimeDelta& timeout) {
+ // Let the message_loop_ process events.
+ // We start the timer and RunUntilIdle() until it signals.
+
+ SetTimeoutExpired(false);
+
+ base::Timer timer(false, false);
+ timer.Start(FROM_HERE, timeout,
+ base::Bind(&MediaCodecDecoderTest::SetTimeoutExpired,
+ base::Unretained(this), true));
+
+ do {
+ if (condition.Run()) {
+ timer.Stop();
+ return true;
+ }
+ message_loop_.RunUntilIdle();
+ } while (!is_timeout_expired());
+
+ DCHECK(!timer.IsRunning());
+ return false;
+}
+
+void MediaCodecDecoderTest::CreateAudioDecoder() {
+ decoder_ = scoped_ptr<MediaCodecDecoder>(new MediaCodecAudioDecoder(
+ task_runner_, base::Bind(&MediaCodecDecoderTest::OnDataRequested,
+ base::Unretained(this)),
+ base::Bind(&MediaCodecDecoderTest::OnStarvation, base::Unretained(this)),
+ base::Bind(&MediaCodecDecoderTest::OnStopDone, base::Unretained(this)),
+ base::Bind(&MediaCodecDecoderTest::OnError, base::Unretained(this)),
+ base::Bind(&MediaCodecDecoderTest::OnUpdateCurrentTime,
+ base::Unretained(this))));
+
+ data_available_cb_ = base::Bind(&MediaCodecDecoder::OnDemuxerDataAvailable,
+ base::Unretained(decoder_.get()));
+}
+
+void MediaCodecDecoderTest::CreateVideoDecoder() {
+ decoder_ = scoped_ptr<MediaCodecDecoder>(new MediaCodecVideoDecoder(
+ task_runner_, base::Bind(&MediaCodecDecoderTest::OnDataRequested,
+ base::Unretained(this)),
+ base::Bind(&MediaCodecDecoderTest::OnStarvation, base::Unretained(this)),
+ base::Bind(&MediaCodecDecoderTest::OnStopDone, base::Unretained(this)),
+ base::Bind(&MediaCodecDecoderTest::OnError, base::Unretained(this)),
+ base::Bind(&MediaCodecDecoderTest::OnUpdateCurrentTime,
+ base::Unretained(this)),
+ base::Bind(&MediaCodecDecoderTest::OnVideoSizeChanged,
+ base::Unretained(this)),
+ base::Bind(&MediaCodecDecoderTest::OnVideoCodecCreated,
+ base::Unretained(this))));
+
+ data_available_cb_ = base::Bind(&MediaCodecDecoder::OnDemuxerDataAvailable,
+ base::Unretained(decoder_.get()));
+}
+
+void MediaCodecDecoderTest::OnDataRequested() {
+ if (!data_factory_)
+ return;
+
+ DemuxerData data;
+ base::TimeDelta delay;
+ if (!data_factory_->CreateChunk(&data, &delay))
+ return;
+
+ task_runner_->PostDelayedTask(FROM_HERE, base::Bind(data_available_cb_, data),
+ delay);
+}
+
+void MediaCodecDecoderTest::SetVideoSurface() {
+ surface_texture_ = gfx::SurfaceTexture::Create(0);
+ gfx::ScopedJavaSurface surface(surface_texture_.get());
+ ASSERT_NE(nullptr, decoder_.get());
+ MediaCodecVideoDecoder* video_decoder =
+ static_cast<MediaCodecVideoDecoder*>(decoder_.get());
+ video_decoder->SetPendingSurface(surface.Pass());
+}
+
+TEST_F(MediaCodecDecoderTest, AudioPrefetch) {
+ CreateAudioDecoder();
+
+ base::TimeDelta duration = base::TimeDelta::FromMilliseconds(500);
+ SetDataFactory(scoped_ptr<TestDataFactory>(new AudioFactory(duration)));
+
+ decoder_->Prefetch(base::Bind(&MediaCodecDecoderTest::SetPrefetched,
+ base::Unretained(this), true));
+
+ EXPECT_TRUE(WaitForCondition(base::Bind(&MediaCodecDecoderTest::is_prefetched,
+ base::Unretained(this))));
+}
+
+TEST_F(MediaCodecDecoderTest, VideoPrefetch) {
+ CreateVideoDecoder();
+
+ base::TimeDelta duration = base::TimeDelta::FromMilliseconds(500);
+ SetDataFactory(scoped_ptr<VideoFactory>(new VideoFactory(duration)));
+
+ decoder_->Prefetch(base::Bind(&MediaCodecDecoderTest::SetPrefetched,
+ base::Unretained(this), true));
+
+ EXPECT_TRUE(WaitForCondition(base::Bind(&MediaCodecDecoderTest::is_prefetched,
+ base::Unretained(this))));
+}
+
+TEST_F(MediaCodecDecoderTest, AudioConfigureNoParams) {
+ SKIP_TEST_IF_MEDIA_CODEC_BRIDGE_IS_NOT_AVAILABLE();
+
+ CreateAudioDecoder();
+
+ // Cannot configure without config parameters.
+ EXPECT_EQ(MediaCodecDecoder::CONFIG_FAILURE, decoder_->Configure());
+}
+
+TEST_F(MediaCodecDecoderTest, AudioConfigureValidParams) {
+ SKIP_TEST_IF_MEDIA_CODEC_BRIDGE_IS_NOT_AVAILABLE();
+
+ CreateAudioDecoder();
+
+ base::TimeDelta duration = base::TimeDelta::FromMilliseconds(500);
+ scoped_ptr<AudioFactory> factory(new AudioFactory(duration));
+ decoder_->SetDemuxerConfigs(factory->GetConfigs());
+
+ EXPECT_EQ(MediaCodecDecoder::CONFIG_OK, decoder_->Configure());
+}
+
+TEST_F(MediaCodecDecoderTest, VideoConfigureNoParams) {
+ SKIP_TEST_IF_MEDIA_CODEC_BRIDGE_IS_NOT_AVAILABLE();
+
+ CreateVideoDecoder();
+
+ // Cannot configure without config parameters.
+ EXPECT_EQ(MediaCodecDecoder::CONFIG_FAILURE, decoder_->Configure());
+}
+
+TEST_F(MediaCodecDecoderTest, VideoConfigureNoSurface) {
+ SKIP_TEST_IF_MEDIA_CODEC_BRIDGE_IS_NOT_AVAILABLE();
+
+ CreateVideoDecoder();
+
+ // decoder_->Configure() searches back for the key frame.
+ // We have to prefetch decoder.
+
+ base::TimeDelta duration = base::TimeDelta::FromMilliseconds(500);
+ SetDataFactory(scoped_ptr<VideoFactory>(new VideoFactory(duration)));
+
+ decoder_->Prefetch(base::Bind(&MediaCodecDecoderTest::SetPrefetched,
+ base::Unretained(this), true));
+
+ EXPECT_TRUE(WaitForCondition(base::Bind(&MediaCodecDecoderTest::is_prefetched,
+ base::Unretained(this))));
+
+ decoder_->SetDemuxerConfigs(GetConfigs());
+
+ // Surface is not set, Configure() should fail.
+
+ EXPECT_EQ(MediaCodecDecoder::CONFIG_FAILURE, decoder_->Configure());
+}
+
+TEST_F(MediaCodecDecoderTest, VideoConfigureInvalidSurface) {
+ SKIP_TEST_IF_MEDIA_CODEC_BRIDGE_IS_NOT_AVAILABLE();
+
+ CreateVideoDecoder();
+
+ // decoder_->Configure() searches back for the key frame.
+ // We have to prefetch decoder.
+
+ base::TimeDelta duration = base::TimeDelta::FromMilliseconds(500);
+ SetDataFactory(scoped_ptr<VideoFactory>(new VideoFactory(duration)));
+
+ decoder_->Prefetch(base::Bind(&MediaCodecDecoderTest::SetPrefetched,
+ base::Unretained(this), true));
+
+ EXPECT_TRUE(WaitForCondition(base::Bind(&MediaCodecDecoderTest::is_prefetched,
+ base::Unretained(this))));
+
+ decoder_->SetDemuxerConfigs(GetConfigs());
+
+ // Prepare the surface.
+ scoped_refptr<gfx::SurfaceTexture> surface_texture(
+ gfx::SurfaceTexture::Create(0));
+ gfx::ScopedJavaSurface surface(surface_texture.get());
+
+ // Release the surface texture.
+ surface_texture = NULL;
+
+ MediaCodecVideoDecoder* video_decoder =
+ static_cast<MediaCodecVideoDecoder*>(decoder_.get());
+ video_decoder->SetPendingSurface(surface.Pass());
+
+ EXPECT_EQ(MediaCodecDecoder::CONFIG_FAILURE, decoder_->Configure());
+}
+
+TEST_F(MediaCodecDecoderTest, VideoConfigureValidParams) {
+ SKIP_TEST_IF_MEDIA_CODEC_BRIDGE_IS_NOT_AVAILABLE();
+
+ CreateVideoDecoder();
+
+ // decoder_->Configure() searches back for the key frame.
+ // We have to prefetch decoder.
+
+ base::TimeDelta duration = base::TimeDelta::FromMilliseconds(500);
+ SetDataFactory(scoped_ptr<VideoFactory>(new VideoFactory(duration)));
+
+ decoder_->Prefetch(base::Bind(&MediaCodecDecoderTest::SetPrefetched,
+ base::Unretained(this), true));
+
+ EXPECT_TRUE(WaitForCondition(base::Bind(&MediaCodecDecoderTest::is_prefetched,
+ base::Unretained(this))));
+
+ decoder_->SetDemuxerConfigs(GetConfigs());
+
+ SetVideoSurface();
+
+ // Now we can expect Configure() to succeed.
+
+ EXPECT_EQ(MediaCodecDecoder::CONFIG_OK, decoder_->Configure());
+}
+
+TEST_F(MediaCodecDecoderTest, AudioStartWithoutConfigure) {
+ SKIP_TEST_IF_MEDIA_CODEC_BRIDGE_IS_NOT_AVAILABLE();
+
+ CreateAudioDecoder();
+
+ // Decoder has to be prefetched and configured before the start.
+
+ // Wrong state: not prefetched
+ EXPECT_FALSE(decoder_->Start(base::TimeDelta::FromMilliseconds(0)));
+
+ // Do the prefetch.
+ base::TimeDelta duration = base::TimeDelta::FromMilliseconds(500);
+ SetDataFactory(scoped_ptr<AudioFactory>(new AudioFactory(duration)));
+
+ // Prefetch to avoid starvation at the beginning of playback.
+ decoder_->Prefetch(base::Bind(&MediaCodecDecoderTest::SetPrefetched,
+ base::Unretained(this), true));
+
+ EXPECT_TRUE(WaitForCondition(base::Bind(&MediaCodecDecoderTest::is_prefetched,
+ base::Unretained(this))));
+
+ // Still, decoder is not configured.
+ EXPECT_FALSE(decoder_->Start(base::TimeDelta::FromMilliseconds(0)));
+}
+
+TEST_F(MediaCodecDecoderTest, AudioPlayTillCompletion) {
+ SKIP_TEST_IF_MEDIA_CODEC_BRIDGE_IS_NOT_AVAILABLE();
+
+ CreateAudioDecoder();
+
+ base::TimeDelta duration = base::TimeDelta::FromMilliseconds(500);
+ base::TimeDelta timeout = base::TimeDelta::FromMilliseconds(600);
+
+ SetDataFactory(scoped_ptr<AudioFactory>(new AudioFactory(duration)));
+
+ // Prefetch to avoid starvation at the beginning of playback.
+ decoder_->Prefetch(base::Bind(&MediaCodecDecoderTest::SetPrefetched,
+ base::Unretained(this), true));
+
+ EXPECT_TRUE(WaitForCondition(base::Bind(&MediaCodecDecoderTest::is_prefetched,
+ base::Unretained(this))));
+
+ decoder_->SetDemuxerConfigs(GetConfigs());
+
+ EXPECT_EQ(MediaCodecDecoder::CONFIG_OK, decoder_->Configure());
+
+ EXPECT_TRUE(decoder_->Start(base::TimeDelta::FromMilliseconds(0)));
+
+ EXPECT_TRUE(WaitForCondition(
+ base::Bind(&MediaCodecDecoderTest::is_stopped, base::Unretained(this)),
+ timeout));
+
+ EXPECT_TRUE(decoder_->IsStopped());
+ EXPECT_TRUE(decoder_->IsCompleted());
+
+ // It is hard to properly estimate minimum and maximum values because
+ // reported times are different from PTS.
+ EXPECT_EQ(25, pts_stat_.num_values());
+}
+
+TEST_F(MediaCodecDecoderTest, VideoPlayTillCompletion) {
+ SKIP_TEST_IF_MEDIA_CODEC_BRIDGE_IS_NOT_AVAILABLE();
+
+ CreateVideoDecoder();
+
+ base::TimeDelta duration = base::TimeDelta::FromMilliseconds(500);
+ // The first output frame might come out with significant delay. Apparently
+ // the codec does initial configuration at this time. We increase the timeout
+ // to leave a room of 1 second for this initial configuration.
+ base::TimeDelta timeout = base::TimeDelta::FromMilliseconds(1500);
+ SetDataFactory(scoped_ptr<VideoFactory>(new VideoFactory(duration)));
+
+ // Prefetch
+ decoder_->Prefetch(base::Bind(&MediaCodecDecoderTest::SetPrefetched,
+ base::Unretained(this), true));
+
+ EXPECT_TRUE(WaitForCondition(base::Bind(&MediaCodecDecoderTest::is_prefetched,
+ base::Unretained(this))));
+
+ decoder_->SetDemuxerConfigs(GetConfigs());
+
+ SetVideoSurface();
+
+ EXPECT_EQ(MediaCodecDecoder::CONFIG_OK, decoder_->Configure());
+
+ EXPECT_TRUE(decoder_->Start(base::TimeDelta::FromMilliseconds(0)));
+
+ EXPECT_TRUE(WaitForCondition(
+ base::Bind(&MediaCodecDecoderTest::is_stopped, base::Unretained(this)),
+ timeout));
+
+ EXPECT_TRUE(decoder_->IsStopped());
+ EXPECT_TRUE(decoder_->IsCompleted());
+
+ EXPECT_EQ(26, pts_stat_.num_values());
+ EXPECT_EQ(data_factory_->last_pts(), pts_stat_.max());
+}
+
+TEST_F(MediaCodecDecoderTest, VideoStopAndResume) {
+ SKIP_TEST_IF_MEDIA_CODEC_BRIDGE_IS_NOT_AVAILABLE();
+
+ CreateVideoDecoder();
+
+ base::TimeDelta duration = base::TimeDelta::FromMilliseconds(500);
+ base::TimeDelta stop_request_time = base::TimeDelta::FromMilliseconds(200);
+ base::TimeDelta timeout = base::TimeDelta::FromMilliseconds(1000);
+
+ SetDataFactory(scoped_ptr<VideoFactory>(new VideoFactory(duration)));
+
+ // Prefetch
+ decoder_->Prefetch(base::Bind(&MediaCodecDecoderTest::SetPrefetched,
+ base::Unretained(this), true));
+
+ EXPECT_TRUE(WaitForCondition(base::Bind(&MediaCodecDecoderTest::is_prefetched,
+ base::Unretained(this))));
+
+ decoder_->SetDemuxerConfigs(GetConfigs());
+
+ SetVideoSurface();
+
+ EXPECT_EQ(MediaCodecDecoder::CONFIG_OK, decoder_->Configure());
+
+ SetStopRequestAtTime(stop_request_time);
+
+ // Start from the beginning.
+ EXPECT_TRUE(decoder_->Start(base::TimeDelta::FromMilliseconds(0)));
+
+ EXPECT_TRUE(WaitForCondition(
+ base::Bind(&MediaCodecDecoderTest::is_stopped, base::Unretained(this)),
+ timeout));
+
+ EXPECT_TRUE(decoder_->IsStopped());
+ EXPECT_FALSE(decoder_->IsCompleted());
+
+ base::TimeDelta last_pts = pts_stat_.max();
+
+ EXPECT_GE(last_pts, stop_request_time);
+
+ // Resume playback from last_pts:
+
+ SetPrefetched(false);
+ SetStopped(false);
+
+ // Prefetch again.
+ decoder_->Prefetch(base::Bind(&MediaCodecDecoderTest::SetPrefetched,
+ base::Unretained(this), true));
+
+ EXPECT_TRUE(WaitForCondition(base::Bind(&MediaCodecDecoderTest::is_prefetched,
+ base::Unretained(this))));
+
+ // Then start.
+ EXPECT_TRUE(decoder_->Start(last_pts));
+
+ // Wait till completion.
+ EXPECT_TRUE(WaitForCondition(
+ base::Bind(&MediaCodecDecoderTest::is_stopped, base::Unretained(this)),
+ timeout));
+
+ EXPECT_TRUE(decoder_->IsStopped());
+ EXPECT_TRUE(decoder_->IsCompleted());
+
+ // We should not skip frames in this process.
+ EXPECT_EQ(26, pts_stat_.num_values());
+ EXPECT_EQ(data_factory_->last_pts(), pts_stat_.max());
+}
+
+TEST_F(MediaCodecDecoderTest, AudioStarvationAndStop) {
+ SKIP_TEST_IF_MEDIA_CODEC_BRIDGE_IS_NOT_AVAILABLE();
+
+ CreateAudioDecoder();
+
+ base::TimeDelta duration = base::TimeDelta::FromMilliseconds(200);
+ base::TimeDelta timeout = base::TimeDelta::FromMilliseconds(400);
+
+ AudioFactory* factory = new AudioFactory(duration);
+ factory->SetStarvationMode(true);
+ SetDataFactory(scoped_ptr<AudioFactory>(factory));
+
+ // Prefetch.
+ decoder_->Prefetch(base::Bind(&MediaCodecDecoderTest::SetPrefetched,
+ base::Unretained(this), true));
+
+ EXPECT_TRUE(WaitForCondition(base::Bind(&MediaCodecDecoderTest::is_prefetched,
+ base::Unretained(this))));
+
+ // Configure.
+ decoder_->SetDemuxerConfigs(GetConfigs());
+
+ EXPECT_EQ(MediaCodecDecoder::CONFIG_OK, decoder_->Configure());
+
+ // Start.
+ EXPECT_TRUE(decoder_->Start(base::TimeDelta::FromMilliseconds(0)));
+
+ // Wait for starvation.
+ EXPECT_TRUE(WaitForCondition(
+ base::Bind(&MediaCodecDecoderTest::is_starved, base::Unretained(this)),
+ timeout));
+
+ EXPECT_FALSE(decoder_->IsStopped());
+ EXPECT_FALSE(decoder_->IsCompleted());
+
+ EXPECT_GT(pts_stat_.num_values(), 0);
+
+ // After starvation we should be able to stop decoder.
+ decoder_->RequestToStop();
+
+ EXPECT_TRUE(WaitForCondition(
+ base::Bind(&MediaCodecDecoderTest::is_stopped, base::Unretained(this))));
+
+ EXPECT_TRUE(decoder_->IsStopped());
+ EXPECT_FALSE(decoder_->IsCompleted());
+}
+
+} // namespace media
diff --git a/chromium/media/base/android/media_codec_player.cc b/chromium/media/base/android/media_codec_player.cc
index d7f84733660..aa05fdf5829 100644
--- a/chromium/media/base/android/media_codec_player.cc
+++ b/chromium/media/base/android/media_codec_player.cc
@@ -4,20 +4,27 @@
#include "media/base/android/media_codec_player.h"
+#include "base/barrier_closure.h"
#include "base/bind.h"
#include "base/lazy_instance.h"
#include "base/logging.h"
-
-#define RUN_ON_MEDIA_THREAD(METHOD, ...) \
- do { \
- if (!GetMediaTaskRunner()->BelongsToCurrentThread()) { \
- GetMediaTaskRunner()->PostTask( \
- FROM_HERE, \
- base::Bind(&MediaCodecPlayer:: METHOD, weak_this_, ##__VA_ARGS__)); \
- return; \
- } \
- } while(0)
-
+#include "base/thread_task_runner_handle.h"
+#include "base/threading/thread.h"
+#include "media/base/android/media_codec_audio_decoder.h"
+#include "media/base/android/media_codec_video_decoder.h"
+#include "media/base/android/media_player_manager.h"
+#include "media/base/buffers.h"
+
+#define RUN_ON_MEDIA_THREAD(METHOD, ...) \
+ do { \
+ if (!GetMediaTaskRunner()->BelongsToCurrentThread()) { \
+ DCHECK(ui_task_runner_->BelongsToCurrentThread()); \
+ GetMediaTaskRunner()->PostTask( \
+ FROM_HERE, base::Bind(&MediaCodecPlayer::METHOD, media_weak_this_, \
+ ##__VA_ARGS__)); \
+ return; \
+ } \
+ } while (0)
namespace media {
@@ -41,53 +48,74 @@ scoped_refptr<base::SingleThreadTaskRunner> GetMediaTaskRunner() {
MediaCodecPlayer::MediaCodecPlayer(
int player_id,
- MediaPlayerManager* manager,
+ base::WeakPtr<MediaPlayerManager> manager,
const RequestMediaResourcesCB& request_media_resources_cb,
scoped_ptr<DemuxerAndroid> demuxer,
const GURL& frame_url)
: MediaPlayerAndroid(player_id,
- manager,
+ manager.get(),
request_media_resources_cb,
frame_url),
- ui_task_runner_(base::MessageLoopProxy::current()),
+ ui_task_runner_(base::ThreadTaskRunnerHandle::Get()),
demuxer_(demuxer.Pass()),
- weak_factory_(this) {
- // UI thread
+ state_(STATE_PAUSED),
+ interpolator_(&default_tick_clock_),
+ pending_start_(false),
+ media_weak_factory_(this) {
DCHECK(ui_task_runner_->BelongsToCurrentThread());
DVLOG(1) << "MediaCodecPlayer::MediaCodecPlayer: player_id:" << player_id;
- weak_this_ = weak_factory_.GetWeakPtr();
+ request_resources_cb_ = base::Bind(request_media_resources_cb_, player_id);
+
+ completion_cb_ =
+ base::Bind(&MediaPlayerManager::OnPlaybackComplete, manager, player_id);
+ attach_listener_cb_ = base::Bind(&MediaPlayerAndroid::AttachListener,
+ WeakPtrForUIThread(), nullptr);
+ detach_listener_cb_ =
+ base::Bind(&MediaPlayerAndroid::DetachListener, WeakPtrForUIThread());
+ metadata_changed_cb_ = base::Bind(&MediaPlayerAndroid::OnMediaMetadataChanged,
+ WeakPtrForUIThread());
+ time_update_cb_ =
+ base::Bind(&MediaPlayerAndroid::OnTimeUpdate, WeakPtrForUIThread());
+
+ media_weak_this_ = media_weak_factory_.GetWeakPtr();
// Finish initializaton on Media thread
GetMediaTaskRunner()->PostTask(
- FROM_HERE, base::Bind(&MediaCodecPlayer::Initialize, weak_this_));
+ FROM_HERE, base::Bind(&MediaCodecPlayer::Initialize, media_weak_this_));
}
MediaCodecPlayer::~MediaCodecPlayer()
{
- // Media thread
DVLOG(1) << "MediaCodecPlayer::~MediaCodecPlayer";
DCHECK(GetMediaTaskRunner()->BelongsToCurrentThread());
}
void MediaCodecPlayer::Initialize() {
- // Media thread
DVLOG(1) << __FUNCTION__;
DCHECK(GetMediaTaskRunner()->BelongsToCurrentThread());
+ interpolator_.SetUpperBound(base::TimeDelta());
+
+ CreateDecoders();
+
+ // This call might in turn call MediaCodecPlayer::OnDemuxerConfigsAvailable()
+ // which propagates configs into decoders. Therefore CreateDecoders() should
+ // be called first.
demuxer_->Initialize(this);
}
-// MediaPlayerAndroid implementation.
+// The implementation of MediaPlayerAndroid interface.
void MediaCodecPlayer::DeleteOnCorrectThread() {
- // UI thread
DVLOG(1) << __FUNCTION__;
DCHECK(ui_task_runner_->BelongsToCurrentThread());
- // The listener-related portion of the base class has to be
- // destroyed on UI thread.
+ DetachListener();
+
+ // The base class part that deals with MediaPlayerListener
+ // has to be destroyed on UI thread.
DestroyListenerOnUIThread();
// Post deletion onto Media thread
@@ -97,124 +125,142 @@ void MediaCodecPlayer::DeleteOnCorrectThread() {
void MediaCodecPlayer::SetVideoSurface(gfx::ScopedJavaSurface surface) {
RUN_ON_MEDIA_THREAD(SetVideoSurface, base::Passed(&surface));
- // Media thread
- DVLOG(1) << __FUNCTION__;
+ DVLOG(1) << __FUNCTION__ << (surface.IsEmpty() ? " empty" : " non-empty");
- NOTIMPLEMENTED();
+ // I assume that if video decoder already has the surface,
+ // there will be two calls:
+ // (1) SetVideoSurface(0)
+ // (2) SetVideoSurface(new_surface)
+ video_decoder_->SetPendingSurface(surface.Pass());
+
+ if (video_decoder_->HasPendingSurface() &&
+ state_ == STATE_WAITING_FOR_SURFACE) {
+ SetState(STATE_PLAYING);
+ StartPlaybackDecoders();
+ }
}
void MediaCodecPlayer::Start() {
RUN_ON_MEDIA_THREAD(Start);
- // Media thread
DVLOG(1) << __FUNCTION__;
- NOTIMPLEMENTED();
+ switch (state_) {
+ case STATE_PAUSED:
+ if (HasAudio() || HasVideo()) {
+ SetState(STATE_PREFETCHING);
+ StartPrefetchDecoders();
+ } else {
+ SetState(STATE_WAITING_FOR_CONFIG);
+ }
+ break;
+ case STATE_STOPPING:
+ SetPendingStart(true);
+ break;
+ default:
+ // Ignore
+ break;
+ }
}
void MediaCodecPlayer::Pause(bool is_media_related_action) {
RUN_ON_MEDIA_THREAD(Pause, is_media_related_action);
- // Media thread
DVLOG(1) << __FUNCTION__;
- NOTIMPLEMENTED();
+ switch (state_) {
+ case STATE_PREFETCHING:
+ SetState(STATE_PAUSED);
+ StopDecoders();
+ break;
+ case STATE_WAITING_FOR_SURFACE:
+ SetState(STATE_PAUSED);
+ StopDecoders();
+ break;
+ case STATE_PLAYING:
+ SetState(STATE_STOPPING);
+ RequestToStopDecoders();
+ break;
+ default:
+ // Ignore
+ break;
+ }
}
void MediaCodecPlayer::SeekTo(base::TimeDelta timestamp) {
RUN_ON_MEDIA_THREAD(SeekTo, timestamp);
- // Media thread
DVLOG(1) << __FUNCTION__ << " " << timestamp;
-
NOTIMPLEMENTED();
}
void MediaCodecPlayer::Release() {
RUN_ON_MEDIA_THREAD(Release);
- // Media thread
DVLOG(1) << __FUNCTION__;
- NOTIMPLEMENTED();
+ SetState(STATE_PAUSED);
+ ReleaseDecoderResources();
}
void MediaCodecPlayer::SetVolume(double volume) {
RUN_ON_MEDIA_THREAD(SetVolume, volume);
- // Media thread
DVLOG(1) << __FUNCTION__ << " " << volume;
-
- NOTIMPLEMENTED();
+ audio_decoder_->SetVolume(volume);
}
int MediaCodecPlayer::GetVideoWidth() {
- // UI thread
DCHECK(ui_task_runner_->BelongsToCurrentThread());
-
- NOTIMPLEMENTED();
- return 320;
+ return metadata_cache_.video_size.width();
}
int MediaCodecPlayer::GetVideoHeight() {
- // UI thread
DCHECK(ui_task_runner_->BelongsToCurrentThread());
-
- NOTIMPLEMENTED();
- return 240;
+ return metadata_cache_.video_size.height();
}
base::TimeDelta MediaCodecPlayer::GetCurrentTime() {
- // UI thread, Media thread
- NOTIMPLEMENTED();
- return base::TimeDelta();
+ DCHECK(ui_task_runner_->BelongsToCurrentThread());
+ return current_time_cache_;
}
base::TimeDelta MediaCodecPlayer::GetDuration() {
- // UI thread
DCHECK(ui_task_runner_->BelongsToCurrentThread());
-
- NOTIMPLEMENTED();
- return base::TimeDelta();
+ return metadata_cache_.duration;
}
bool MediaCodecPlayer::IsPlaying() {
- // UI thread
DCHECK(ui_task_runner_->BelongsToCurrentThread());
- NOTIMPLEMENTED();
- return false;
+ return state_ == STATE_PLAYING;
}
bool MediaCodecPlayer::CanPause() {
- // UI thread
DCHECK(ui_task_runner_->BelongsToCurrentThread());
NOTIMPLEMENTED();
return false;
}
bool MediaCodecPlayer::CanSeekForward() {
- // UI thread
DCHECK(ui_task_runner_->BelongsToCurrentThread());
NOTIMPLEMENTED();
return false;
}
bool MediaCodecPlayer::CanSeekBackward() {
- // UI thread
DCHECK(ui_task_runner_->BelongsToCurrentThread());
NOTIMPLEMENTED();
return false;
}
bool MediaCodecPlayer::IsPlayerReady() {
- // UI thread
DCHECK(ui_task_runner_->BelongsToCurrentThread());
- NOTIMPLEMENTED();
+ // This method is called to check whether it's safe to release the player when
+ // the OS needs more resources. This class can be released at any time.
return true;
}
void MediaCodecPlayer::SetCdm(BrowserCdm* cdm) {
- // UI thread
DCHECK(ui_task_runner_->BelongsToCurrentThread());
NOTIMPLEMENTED();
}
@@ -223,30 +269,462 @@ void MediaCodecPlayer::SetCdm(BrowserCdm* cdm) {
void MediaCodecPlayer::OnDemuxerConfigsAvailable(
const DemuxerConfigs& configs) {
- // Media thread
DCHECK(GetMediaTaskRunner()->BelongsToCurrentThread());
- NOTIMPLEMENTED();
+ DVLOG(1) << __FUNCTION__;
+
+ duration_ = configs.duration;
+
+ SetDemuxerConfigs(configs);
+
+ // Update cache and notify manager on UI thread
+ gfx::Size video_size = HasVideo() ? configs.video_size : gfx::Size();
+ ui_task_runner_->PostTask(
+ FROM_HERE, base::Bind(metadata_changed_cb_, duration_, video_size));
}
void MediaCodecPlayer::OnDemuxerDataAvailable(const DemuxerData& data) {
- // Media thread
DCHECK(GetMediaTaskRunner()->BelongsToCurrentThread());
- NOTIMPLEMENTED();
+
+ DCHECK_LT(0u, data.access_units.size());
+ CHECK_GE(1u, data.demuxer_configs.size());
+
+ DVLOG(2) << "Player::" << __FUNCTION__;
+
+ if (data.type == DemuxerStream::AUDIO)
+ audio_decoder_->OnDemuxerDataAvailable(data);
+
+ if (data.type == DemuxerStream::VIDEO)
+ video_decoder_->OnDemuxerDataAvailable(data);
}
void MediaCodecPlayer::OnDemuxerSeekDone(
base::TimeDelta actual_browser_seek_time) {
- // Media thread
DCHECK(GetMediaTaskRunner()->BelongsToCurrentThread());
+
+ DVLOG(1) << __FUNCTION__ << " actual_time:" << actual_browser_seek_time;
+
NOTIMPLEMENTED();
}
void MediaCodecPlayer::OnDemuxerDurationChanged(
base::TimeDelta duration) {
- // Media thread
DCHECK(GetMediaTaskRunner()->BelongsToCurrentThread());
- NOTIMPLEMENTED();
+ DVLOG(1) << __FUNCTION__ << " duration:" << duration;
+
+ duration_ = duration;
+}
+
+// Events from Player, called on UI thread
+
+void MediaCodecPlayer::OnMediaMetadataChanged(base::TimeDelta duration,
+ const gfx::Size& video_size) {
+ DCHECK(ui_task_runner_->BelongsToCurrentThread());
+
+ if (duration != kNoTimestamp())
+ metadata_cache_.duration = duration;
+
+ if (!video_size.IsEmpty())
+ metadata_cache_.video_size = video_size;
+
+ manager()->OnMediaMetadataChanged(player_id(), metadata_cache_.duration,
+ metadata_cache_.video_size.width(),
+ metadata_cache_.video_size.height(), true);
+}
+
+void MediaCodecPlayer::OnTimeUpdate(base::TimeDelta current_timestamp,
+ base::TimeTicks current_time_ticks) {
+ DCHECK(ui_task_runner_->BelongsToCurrentThread());
+
+ current_time_cache_ = current_timestamp;
+ manager()->OnTimeUpdate(player_id(), current_timestamp, current_time_ticks);
+}
+
+// Events from Decoders, called on Media thread
+
+void MediaCodecPlayer::RequestDemuxerData(DemuxerStream::Type stream_type) {
+ DVLOG(2) << __FUNCTION__ << " streamType:" << stream_type;
+
+ // Use this method instead of directly binding with
+ // DemuxerAndroid::RequestDemuxerData() to avoid the race condition on
+ // deletion:
+ // 1. DeleteSoon is posted from UI to Media thread.
+ // 2. RequestDemuxerData callback is posted from Decoder to Media thread.
+ // 3. DeleteSoon arrives, we delete the player and detach from
+ // BrowserDemuxerAndroid.
+ // 4. RequestDemuxerData is processed by the media thread queue. Since the
+ // weak_ptr was invalidated in (3), this is a no-op. If we used
+ // DemuxerAndroid::RequestDemuxerData() it would arrive and will try to
+ // call the client, but the client (i.e. this player) would not exist.
+ demuxer_->RequestDemuxerData(stream_type);
+}
+
+void MediaCodecPlayer::OnPrefetchDone() {
+ DCHECK(GetMediaTaskRunner()->BelongsToCurrentThread());
+ DVLOG(1) << __FUNCTION__;
+
+ if (state_ != STATE_PREFETCHING)
+ return; // Ignore
+
+ if (!HasAudio() && !HasVideo()) {
+ // No configuration at all after prefetching.
+ // This is an error, initial configuration is expected
+ // before the first data chunk.
+ GetMediaTaskRunner()->PostTask(FROM_HERE, error_cb_);
+ return;
+ }
+
+ if (HasVideo() && !HasPendingSurface()) {
+ SetState(STATE_WAITING_FOR_SURFACE);
+ return;
+ }
+
+ SetState(STATE_PLAYING);
+ StartPlaybackDecoders();
+}
+
+void MediaCodecPlayer::OnStopDone() {
+ DCHECK(GetMediaTaskRunner()->BelongsToCurrentThread());
+ DVLOG(1) << __FUNCTION__;
+
+ if (!(audio_decoder_->IsStopped() && video_decoder_->IsStopped()))
+ return; // Wait until other stream is stopped
+
+ // At this point decoder threads should not be running
+ if (interpolator_.interpolating())
+ interpolator_.StopInterpolating();
+
+ switch (state_) {
+ case STATE_STOPPING:
+ if (HasPendingStart()) {
+ SetPendingStart(false);
+ SetState(STATE_PREFETCHING);
+ StartPrefetchDecoders();
+ } else {
+ SetState(STATE_PAUSED);
+ }
+ break;
+ case STATE_PLAYING:
+ // Unexpected stop means completion
+ SetState(STATE_PAUSED);
+ break;
+ default:
+ DVLOG(0) << __FUNCTION__ << " illegal state: " << AsString(state_);
+ NOTREACHED();
+ break;
+ }
+
+ // DetachListener to UI thread
+ ui_task_runner_->PostTask(FROM_HERE, detach_listener_cb_);
+
+ if (AudioFinished() && VideoFinished())
+ ui_task_runner_->PostTask(FROM_HERE, completion_cb_);
+}
+
+void MediaCodecPlayer::OnError() {
+ DCHECK(GetMediaTaskRunner()->BelongsToCurrentThread());
+ DVLOG(1) << __FUNCTION__;
+
+ // STATE_ERROR blocks all events
+ SetState(STATE_ERROR);
+
+ ReleaseDecoderResources();
+}
+
+void MediaCodecPlayer::OnStarvation(DemuxerStream::Type type) {
+ DCHECK(GetMediaTaskRunner()->BelongsToCurrentThread());
+ DVLOG(1) << __FUNCTION__ << " stream type:" << type;
+
+ if (state_ != STATE_PLAYING)
+ return; // Ignore
+
+ SetState(STATE_STOPPING);
+ RequestToStopDecoders();
+ SetPendingStart(true);
+}
+
+void MediaCodecPlayer::OnTimeIntervalUpdate(DemuxerStream::Type type,
+ base::TimeDelta now_playing,
+ base::TimeDelta last_buffered) {
+ DCHECK(GetMediaTaskRunner()->BelongsToCurrentThread());
+
+ interpolator_.SetBounds(now_playing, last_buffered);
+
+ // Post to UI thread
+ ui_task_runner_->PostTask(FROM_HERE,
+ base::Bind(time_update_cb_, GetInterpolatedTime(),
+ base::TimeTicks::Now()));
+}
+
+void MediaCodecPlayer::OnVideoCodecCreated() {
+ DCHECK(GetMediaTaskRunner()->BelongsToCurrentThread());
+
+ // This callback requests resources by releasing other players.
+ ui_task_runner_->PostTask(FROM_HERE, request_resources_cb_);
+}
+
+void MediaCodecPlayer::OnVideoResolutionChanged(const gfx::Size& size) {
+ DCHECK(GetMediaTaskRunner()->BelongsToCurrentThread());
+
+ DVLOG(1) << __FUNCTION__ << " " << size.width() << "x" << size.height();
+
+ // Update cache and notify manager on UI thread
+ ui_task_runner_->PostTask(
+ FROM_HERE, base::Bind(metadata_changed_cb_, kNoTimestamp(), size));
}
+// State machine operations, called on Media thread
+
+void MediaCodecPlayer::SetState(PlayerState new_state) {
+ DCHECK(GetMediaTaskRunner()->BelongsToCurrentThread());
+
+ DVLOG(1) << "SetState:" << AsString(state_) << " -> " << AsString(new_state);
+ state_ = new_state;
+}
+
+void MediaCodecPlayer::SetPendingSurface(gfx::ScopedJavaSurface surface) {
+ DCHECK(GetMediaTaskRunner()->BelongsToCurrentThread());
+ DVLOG(1) << __FUNCTION__;
+
+ video_decoder_->SetPendingSurface(surface.Pass());
+}
+
+bool MediaCodecPlayer::HasPendingSurface() {
+ return video_decoder_->HasPendingSurface();
+}
+
+void MediaCodecPlayer::SetPendingStart(bool need_to_start) {
+ DCHECK(GetMediaTaskRunner()->BelongsToCurrentThread());
+ DVLOG(1) << __FUNCTION__ << ": " << need_to_start;
+ pending_start_ = need_to_start;
+}
+
+bool MediaCodecPlayer::HasPendingStart() {
+ DCHECK(GetMediaTaskRunner()->BelongsToCurrentThread());
+ return pending_start_;
+}
+
+bool MediaCodecPlayer::HasAudio() {
+ DCHECK(GetMediaTaskRunner()->BelongsToCurrentThread());
+ return audio_decoder_->HasStream();
+}
+
+bool MediaCodecPlayer::HasVideo() {
+ DCHECK(GetMediaTaskRunner()->BelongsToCurrentThread());
+ return video_decoder_->HasStream();
+}
+
+void MediaCodecPlayer::SetDemuxerConfigs(const DemuxerConfigs& configs) {
+ DCHECK(GetMediaTaskRunner()->BelongsToCurrentThread());
+ DVLOG(1) << __FUNCTION__ << " " << configs;
+
+ DCHECK(audio_decoder_);
+ DCHECK(video_decoder_);
+
+ // At least one valid codec must be present.
+ DCHECK(configs.audio_codec != kUnknownAudioCodec ||
+ configs.video_codec != kUnknownVideoCodec);
+
+ if (configs.audio_codec != kUnknownAudioCodec)
+ audio_decoder_->SetDemuxerConfigs(configs);
+
+ if (configs.video_codec != kUnknownVideoCodec)
+ video_decoder_->SetDemuxerConfigs(configs);
+
+ if (state_ == STATE_WAITING_FOR_CONFIG) {
+ SetState(STATE_PREFETCHING);
+ StartPrefetchDecoders();
+ }
+}
+
+void MediaCodecPlayer::StartPrefetchDecoders() {
+ DCHECK(GetMediaTaskRunner()->BelongsToCurrentThread());
+ DVLOG(1) << __FUNCTION__;
+
+ bool do_audio = false;
+ bool do_video = false;
+ int count = 0;
+ if (!AudioFinished()) {
+ do_audio = true;
+ ++count;
+ }
+ if (!VideoFinished()) {
+ do_video = true;
+ ++count;
+ }
+
+ DCHECK_LT(0, count); // at least one decoder should be active
+
+ base::Closure prefetch_cb = base::BarrierClosure(
+ count, base::Bind(&MediaCodecPlayer::OnPrefetchDone, media_weak_this_));
+
+ if (do_audio)
+ audio_decoder_->Prefetch(prefetch_cb);
+
+ if (do_video)
+ video_decoder_->Prefetch(prefetch_cb);
+}
+
+void MediaCodecPlayer::StartPlaybackDecoders() {
+ DCHECK(GetMediaTaskRunner()->BelongsToCurrentThread());
+ DVLOG(1) << __FUNCTION__;
+
+ // Configure all streams before the start since
+ // we may discover that browser seek is required.
+
+ bool do_audio = !AudioFinished();
+ bool do_video = !VideoFinished();
+
+ // If there is nothing to play, the state machine should determine
+ // this at the prefetch state and never call this method.
+ DCHECK(do_audio || do_video);
+
+ if (do_audio) {
+ MediaCodecDecoder::ConfigStatus status = audio_decoder_->Configure();
+ if (status != MediaCodecDecoder::CONFIG_OK) {
+ GetMediaTaskRunner()->PostTask(FROM_HERE, error_cb_);
+ return;
+ }
+ }
+
+ if (do_video) {
+ MediaCodecDecoder::ConfigStatus status = video_decoder_->Configure();
+ if (status != MediaCodecDecoder::CONFIG_OK) {
+ GetMediaTaskRunner()->PostTask(FROM_HERE, error_cb_);
+ return;
+ }
+ }
+
+ // At this point decoder threads should not be running.
+ if (!interpolator_.interpolating())
+ interpolator_.StartInterpolating();
+
+ base::TimeDelta current_time = GetInterpolatedTime();
+
+ if (do_audio) {
+ if (!audio_decoder_->Start(current_time)) {
+ GetMediaTaskRunner()->PostTask(FROM_HERE, error_cb_);
+ return;
+ }
+
+ // Attach listener on UI thread
+ ui_task_runner_->PostTask(FROM_HERE, attach_listener_cb_);
+ }
+
+ if (do_video) {
+ if (!video_decoder_->Start(current_time)) {
+ GetMediaTaskRunner()->PostTask(FROM_HERE, error_cb_);
+ return;
+ }
+ }
+}
+
+void MediaCodecPlayer::StopDecoders() {
+ DCHECK(GetMediaTaskRunner()->BelongsToCurrentThread());
+ DVLOG(1) << __FUNCTION__;
+
+ audio_decoder_->SyncStop();
+ video_decoder_->SyncStop();
+}
+
+void MediaCodecPlayer::RequestToStopDecoders() {
+ DCHECK(GetMediaTaskRunner()->BelongsToCurrentThread());
+ DVLOG(1) << __FUNCTION__;
+
+ bool do_audio = false;
+ bool do_video = false;
+
+ if (audio_decoder_->IsPrefetchingOrPlaying())
+ do_audio = true;
+ if (video_decoder_->IsPrefetchingOrPlaying())
+ do_video = true;
+
+ if (!do_audio && !do_video) {
+ GetMediaTaskRunner()->PostTask(
+ FROM_HERE, base::Bind(&MediaCodecPlayer::OnStopDone, media_weak_this_));
+ return;
+ }
+
+ if (do_audio)
+ audio_decoder_->RequestToStop();
+ if (do_video)
+ video_decoder_->RequestToStop();
+}
+
+void MediaCodecPlayer::ReleaseDecoderResources() {
+ DCHECK(GetMediaTaskRunner()->BelongsToCurrentThread());
+ DVLOG(1) << __FUNCTION__;
+
+ if (audio_decoder_)
+ audio_decoder_->ReleaseDecoderResources();
+
+ if (video_decoder_)
+ video_decoder_->ReleaseDecoderResources();
+
+ // At this point decoder threads should not be running
+ if (interpolator_.interpolating())
+ interpolator_.StopInterpolating();
+}
+
+void MediaCodecPlayer::CreateDecoders() {
+ DCHECK(GetMediaTaskRunner()->BelongsToCurrentThread());
+ DVLOG(1) << __FUNCTION__;
+
+ error_cb_ = base::Bind(&MediaCodecPlayer::OnError, media_weak_this_);
+
+ audio_decoder_.reset(new MediaCodecAudioDecoder(
+ GetMediaTaskRunner(), base::Bind(&MediaCodecPlayer::RequestDemuxerData,
+ media_weak_this_, DemuxerStream::AUDIO),
+ base::Bind(&MediaCodecPlayer::OnStarvation, media_weak_this_,
+ DemuxerStream::AUDIO),
+ base::Bind(&MediaCodecPlayer::OnStopDone, media_weak_this_), error_cb_,
+ base::Bind(&MediaCodecPlayer::OnTimeIntervalUpdate, media_weak_this_,
+ DemuxerStream::AUDIO)));
+
+ video_decoder_.reset(new MediaCodecVideoDecoder(
+ GetMediaTaskRunner(), base::Bind(&MediaCodecPlayer::RequestDemuxerData,
+ media_weak_this_, DemuxerStream::VIDEO),
+ base::Bind(&MediaCodecPlayer::OnStarvation, media_weak_this_,
+ DemuxerStream::VIDEO),
+ base::Bind(&MediaCodecPlayer::OnStopDone, media_weak_this_), error_cb_,
+ MediaCodecDecoder::SetTimeCallback(), // null callback
+ base::Bind(&MediaCodecPlayer::OnVideoResolutionChanged, media_weak_this_),
+ base::Bind(&MediaCodecPlayer::OnVideoCodecCreated, media_weak_this_)));
+}
+
+bool MediaCodecPlayer::AudioFinished() {
+ return audio_decoder_->IsCompleted() || !audio_decoder_->HasStream();
+}
+
+bool MediaCodecPlayer::VideoFinished() {
+ return video_decoder_->IsCompleted() || !video_decoder_->HasStream();
+}
+
+base::TimeDelta MediaCodecPlayer::GetInterpolatedTime() {
+ DCHECK(GetMediaTaskRunner()->BelongsToCurrentThread());
+
+ base::TimeDelta interpolated_time = interpolator_.GetInterpolatedTime();
+ return std::min(interpolated_time, duration_);
+}
+
+#undef RETURN_STRING
+#define RETURN_STRING(x) \
+ case x: \
+ return #x;
+
+const char* MediaCodecPlayer::AsString(PlayerState state) {
+ switch (state) {
+ RETURN_STRING(STATE_PAUSED);
+ RETURN_STRING(STATE_WAITING_FOR_CONFIG);
+ RETURN_STRING(STATE_PREFETCHING);
+ RETURN_STRING(STATE_PLAYING);
+ RETURN_STRING(STATE_STOPPING);
+ RETURN_STRING(STATE_WAITING_FOR_SURFACE);
+ RETURN_STRING(STATE_ERROR);
+ }
+ return nullptr; // crash early
+}
+
+#undef RETURN_STRING
+
} // namespace media
diff --git a/chromium/media/base/android/media_codec_player.h b/chromium/media/base/android/media_codec_player.h
index 518adbbbd8b..f9f95703bd9 100644
--- a/chromium/media/base/android/media_codec_player.h
+++ b/chromium/media/base/android/media_codec_player.h
@@ -9,35 +9,117 @@
#include "base/memory/scoped_ptr.h"
#include "base/memory/weak_ptr.h"
#include "base/threading/thread.h"
+#include "base/time/default_tick_clock.h"
#include "media/base/android/demuxer_android.h"
#include "media/base/android/media_player_android.h"
+#include "media/base/demuxer_stream.h"
#include "media/base/media_export.h"
+#include "media/base/time_delta_interpolator.h"
+#include "ui/gfx/geometry/size.h"
#include "ui/gl/android/scoped_java_surface.h"
+// The MediaCodecPlayer class implements the media player by using Android's
+// MediaCodec. It differs from MediaSourcePlayer in that it removes most
+// processing away from the UI thread: it uses a dedicated Media thread to
+// receive the data and to handle the commands.
+
+// The player works as a state machine. Here are relationships between states:
+//
+// [ Paused ] ------------------------ (Any state)
+// | | |
+// | v v
+// | <------------------[ WaitingForConfig ] [ Error ]
+// |
+// |
+// |
+// v
+// [ Prefetching ] -------------------
+// | |
+// | v
+// | <-----------------[ WaitingForSurface ]
+// v
+// [ Playing ]
+// |
+// |
+// v
+// [ Stopping ]
+
+
+// Events and actions for pause/resume workflow.
+// ---------------------------------------------
+//
+// Start, no config:
+// ------------------------> [ Paused ] -----------------> [ Waiting ]
+// | StopDone: [ for configs ]
+// | ^ | /
+// | | | /
+// | Pause: | | Start w/config: /
+// | | | dec.Prefetch /
+// | | | /
+// | | | /
+// | | | /
+// | | | / DemuxerConfigs:
+// | | | / dec.Prefetch
+// | | | /
+// | | | /
+// | | v /
+// | /
+// | ------------------> [ Prefetching ] <--------/ [ Waiting ]
+// | | [ ] --------------> [ for surface ]
+// | | | PrefetchDone, /
+// | | | no surface: /
+// | | | /
+// | | | /
+// | | StopDone w/ | /
+// | | pending start: | PrefetchDone: /
+// | | dec.Prefetch | dec.Start /
+// | | | / SetSurface:
+// | | | / dec.Start
+// | | | /
+// | | v /
+// | | /
+// | | [ Playing ] <----------/
+// | |
+// | | |
+// | | |
+// | | | Pause: dec.RequestToStop
+// | | |
+// | | |
+// | | v
+// | |
+// ------------------------- [ Stopping ]
+
namespace media {
class BrowserCdm;
+class MediaCodecAudioDecoder;
+class MediaCodecVideoDecoder;
// Returns the task runner for the media thread
MEDIA_EXPORT scoped_refptr<base::SingleThreadTaskRunner> GetMediaTaskRunner();
-
-// This class implements the media player using Android's MediaCodec.
-// It differs from MediaSourcePlayer in that it removes most
-// processing away from UI thread: it uses a dedicated Media thread
-// to receive the data and to handle commands.
class MEDIA_EXPORT MediaCodecPlayer : public MediaPlayerAndroid,
public DemuxerAndroidClient {
public:
+ // Typedefs for the notification callbacks
+ typedef base::Callback<void(base::TimeDelta, const gfx::Size&)>
+ MetadataChangedCallback;
+
+ typedef base::Callback<void(base::TimeDelta, base::TimeTicks)>
+ TimeUpdateCallback;
+
// Constructs a player with the given ID and demuxer. |manager| must outlive
// the lifetime of this object.
MediaCodecPlayer(int player_id,
- MediaPlayerManager* manager,
+ base::WeakPtr<MediaPlayerManager> manager,
const RequestMediaResourcesCB& request_media_resources_cb,
scoped_ptr<DemuxerAndroid> demuxer,
const GURL& frame_url);
~MediaCodecPlayer() override;
+ // A helper method that performs the media thread part of initialization.
+ void Initialize();
+
// MediaPlayerAndroid implementation.
void DeleteOnCorrectThread() override;
void SetVideoSurface(gfx::ScopedJavaSurface surface) override;
@@ -63,19 +145,123 @@ class MEDIA_EXPORT MediaCodecPlayer : public MediaPlayerAndroid,
void OnDemuxerSeekDone(base::TimeDelta actual_browser_seek_time) override;
void OnDemuxerDurationChanged(base::TimeDelta duration) override;
- // Helper methods
- void Initialize();
- void DestroySelf();
-
private:
+ // The state machine states.
+ enum PlayerState {
+ STATE_PAUSED,
+ STATE_WAITING_FOR_CONFIG,
+ STATE_PREFETCHING,
+ STATE_PLAYING,
+ STATE_STOPPING,
+ STATE_WAITING_FOR_SURFACE,
+ STATE_ERROR,
+ };
+
+ // Cached values for the manager.
+ struct MediaMetadata {
+ base::TimeDelta duration;
+ gfx::Size video_size;
+ };
+
+ // MediaPlayerAndroid implementation.
+ // This method caches the data and calls manager's OnMediaMetadataChanged().
+ void OnMediaMetadataChanged(base::TimeDelta duration,
+ const gfx::Size& video_size) override;
+
+ // This method caches the current time and calls manager's OnTimeUpdate().
+ void OnTimeUpdate(base::TimeDelta current_timestamp,
+ base::TimeTicks current_time_ticks) override;
+
+ // Callbacks from decoders
+ void RequestDemuxerData(DemuxerStream::Type stream_type);
+ void OnPrefetchDone();
+ void OnStopDone();
+ void OnError();
+ void OnStarvation(DemuxerStream::Type stream_type);
+ void OnTimeIntervalUpdate(DemuxerStream::Type stream_type,
+ base::TimeDelta now_playing,
+ base::TimeDelta last_buffered);
+
+ // Callbacks from video decoder
+ void OnVideoCodecCreated();
+ void OnVideoResolutionChanged(const gfx::Size& size);
+
+ // Operations called from the state machine.
+ void SetState(PlayerState new_state);
+ void SetPendingSurface(gfx::ScopedJavaSurface surface);
+ bool HasPendingSurface();
+ void SetPendingStart(bool need_to_start);
+ bool HasPendingStart();
+ bool HasVideo();
+ bool HasAudio();
+ void SetDemuxerConfigs(const DemuxerConfigs& configs);
+ void StartPrefetchDecoders();
+ void StartPlaybackDecoders();
+ void StopDecoders();
+ void RequestToStopDecoders();
+ void ReleaseDecoderResources();
+
+ // Helper methods.
+ void CreateDecoders();
+ bool AudioFinished();
+ bool VideoFinished();
+ base::TimeDelta GetInterpolatedTime();
+
+ static const char* AsString(PlayerState state);
+
+ // Data.
+
// Object for posting tasks on UI thread.
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner_;
+ // Major components: demuxer, audio and video decoders.
scoped_ptr<DemuxerAndroid> demuxer_;
+ scoped_ptr<MediaCodecAudioDecoder> audio_decoder_;
+ scoped_ptr<MediaCodecVideoDecoder> video_decoder_;
+
+ // The state of the state machine.
+ PlayerState state_;
+
+ // Notification callbacks, they call MediaPlayerManager.
+ base::Closure request_resources_cb_;
+ TimeUpdateCallback time_update_cb_;
+ base::Closure completion_cb_;
+
+ // A callback that updates metadata cache and calls the manager.
+ MetadataChangedCallback metadata_changed_cb_;
+
+ // We call the base class' AttachListener() and DetachListener() methods on UI
+ // thread with these callbacks.
+ base::Closure attach_listener_cb_;
+ base::Closure detach_listener_cb_;
+
+ // Error callback is posted by decoders or by this class itself if we cannot
+ // configure or start decoder.
+ base::Closure error_cb_;
+
+ // Total duration reported by demuxer.
+ base::TimeDelta duration_;
+
+ // base::TickClock used by |interpolator_|.
+ base::DefaultTickClock default_tick_clock_;
+
+ // Tracks the most recent media time update and provides interpolated values
+ // as playback progresses.
+ TimeDeltaInterpolator interpolator_;
+
+ // Pending data to be picked up by the upcoming state.
+ gfx::ScopedJavaSurface pending_surface_;
+ bool pending_start_;
+
+ // Configuration data for the manager, accessed on the UI thread.
+ MediaMetadata metadata_cache_;
+
+ // Cached current time, accessed on UI thread.
+ base::TimeDelta current_time_cache_;
- base::WeakPtr<MediaCodecPlayer> weak_this_;
+ base::WeakPtr<MediaCodecPlayer> media_weak_this_;
// NOTE: Weak pointers must be invalidated before all other member variables.
- base::WeakPtrFactory<MediaCodecPlayer> weak_factory_;
+ base::WeakPtrFactory<MediaCodecPlayer> media_weak_factory_;
DISALLOW_COPY_AND_ASSIGN(MediaCodecPlayer);
};
diff --git a/chromium/media/base/android/media_codec_player_unittest.cc b/chromium/media/base/android/media_codec_player_unittest.cc
new file mode 100644
index 00000000000..2eb77a5e3ce
--- /dev/null
+++ b/chromium/media/base/android/media_codec_player_unittest.cc
@@ -0,0 +1,424 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/bind.h"
+#include "base/logging.h"
+#include "base/timer/timer.h"
+#include "media/base/android/demuxer_android.h"
+#include "media/base/android/media_codec_bridge.h"
+#include "media/base/android/media_codec_player.h"
+#include "media/base/android/media_player_manager.h"
+#include "media/base/android/test_data_factory.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace media {
+
+// Helper macro to skip the test if MediaCodecBridge isn't available.
+#define SKIP_TEST_IF_MEDIA_CODEC_BRIDGE_IS_NOT_AVAILABLE() \
+ do { \
+ if (!MediaCodecBridge::IsAvailable()) { \
+ VLOG(0) << "Could not run test - not supported on device."; \
+ return; \
+ } \
+ } while (0)
+
+#define RUN_ON_MEDIA_THREAD(CLASS, METHOD, ...) \
+ do { \
+ if (!GetMediaTaskRunner()->BelongsToCurrentThread()) { \
+ GetMediaTaskRunner()->PostTask( \
+ FROM_HERE, \
+ base::Bind(&CLASS::METHOD, base::Unretained(this), ##__VA_ARGS__)); \
+ return; \
+ } \
+ } while (0)
+
+namespace {
+
+const base::TimeDelta kDefaultTimeout = base::TimeDelta::FromMilliseconds(200);
+const base::TimeDelta kAudioFramePeriod = base::TimeDelta::FromMilliseconds(20);
+
+// Mock of MediaPlayerManager for testing purpose.
+
+class MockMediaPlayerManager : public MediaPlayerManager {
+ public:
+ MockMediaPlayerManager()
+ : playback_completed_(false), weak_ptr_factory_(this) {}
+ ~MockMediaPlayerManager() override {}
+
+ MediaResourceGetter* GetMediaResourceGetter() override { return nullptr; }
+ MediaUrlInterceptor* GetMediaUrlInterceptor() override { return nullptr; }
+ void OnTimeUpdate(int player_id,
+ base::TimeDelta current_timestamp,
+ base::TimeTicks current_time_ticks) override {}
+ void OnMediaMetadataChanged(int player_id,
+ base::TimeDelta duration,
+ int width,
+ int height,
+ bool success) override {
+ media_metadata_.duration = duration;
+ media_metadata_.width = width;
+ media_metadata_.height = height;
+ media_metadata_.modified = true;
+ }
+
+ void OnPlaybackComplete(int player_id) override {
+ playback_completed_ = true;
+ }
+ void OnMediaInterrupted(int player_id) override {}
+ void OnBufferingUpdate(int player_id, int percentage) override {}
+ void OnSeekComplete(int player_id,
+ const base::TimeDelta& current_time) override {}
+ void OnError(int player_id, int error) override {}
+ void OnVideoSizeChanged(int player_id, int width, int height) override {}
+ void OnAudibleStateChanged(int player_id, bool is_audible_now) override {}
+ void OnWaitingForDecryptionKey(int player_id) override {}
+ MediaPlayerAndroid* GetFullscreenPlayer() override { return nullptr; }
+ MediaPlayerAndroid* GetPlayer(int player_id) override { return nullptr; }
+ bool RequestPlay(int player_id) override { return true; }
+
+ void OnMediaResourcesRequested(int player_id) {}
+
+ base::WeakPtr<MockMediaPlayerManager> GetWeakPtr() {
+ return weak_ptr_factory_.GetWeakPtr();
+ }
+
+ // Conditions to wait for.
+ bool IsMetadataChanged() const { return media_metadata_.modified; }
+ bool IsPlaybackCompleted() const { return playback_completed_; }
+
+ struct MediaMetadata {
+ base::TimeDelta duration;
+ int width;
+ int height;
+ bool modified;
+ MediaMetadata() : width(0), height(0), modified(false) {}
+ };
+ MediaMetadata media_metadata_;
+
+ private:
+ bool playback_completed_;
+
+ base::WeakPtrFactory<MockMediaPlayerManager> weak_ptr_factory_;
+
+ DISALLOW_COPY_AND_ASSIGN(MockMediaPlayerManager);
+};
+
+// Helper method that creates demuxer configuration.
+
+DemuxerConfigs CreateAudioVideoConfigs(const base::TimeDelta& duration,
+ const gfx::Size& video_size) {
+ DemuxerConfigs configs =
+ TestDataFactory::CreateAudioConfigs(kCodecVorbis, duration);
+ configs.video_codec = kCodecVP8;
+ configs.video_size = video_size;
+ configs.is_video_encrypted = false;
+ return configs;
+}
+
+DemuxerConfigs CreateAudioVideoConfigs(const TestDataFactory* audio,
+ const TestDataFactory* video) {
+ DemuxerConfigs result = audio->GetConfigs();
+ DemuxerConfigs vconf = video->GetConfigs();
+
+ result.video_codec = vconf.video_codec;
+ result.video_size = vconf.video_size;
+ result.is_video_encrypted = vconf.is_video_encrypted;
+ return result;
+}
+
+// AudioFactory creates data chunks that simulate audio stream from demuxer.
+
+class AudioFactory : public TestDataFactory {
+ public:
+ AudioFactory(const base::TimeDelta& duration)
+ : TestDataFactory("vorbis-packet-%d", duration, kAudioFramePeriod) {}
+
+ DemuxerConfigs GetConfigs() const override {
+ return TestDataFactory::CreateAudioConfigs(kCodecVorbis, duration_);
+ }
+
+ protected:
+ void ModifyAccessUnit(int index_in_chunk, AccessUnit* unit) override {
+ // Vorbis needs 4 extra bytes padding on Android to decode properly.
+ // Check NuMediaExtractor.cpp in Android source code.
+ uint8 padding[4] = {0xff, 0xff, 0xff, 0xff};
+ unit->data.insert(unit->data.end(), padding, padding + 4);
+ }
+};
+
+// Mock of DemuxerAndroid for testing purpose.
+
+class MockDemuxerAndroid : public DemuxerAndroid {
+ public:
+ MockDemuxerAndroid() : client_(nullptr) {}
+ ~MockDemuxerAndroid() override {}
+
+ // DemuxerAndroid implementation
+ void Initialize(DemuxerAndroidClient* client) override;
+ void RequestDemuxerData(DemuxerStream::Type type) override;
+ void RequestDemuxerSeek(const base::TimeDelta& time_to_seek,
+ bool is_browser_seek) override {}
+
+ // Sets the audio data factory.
+ void SetAudioFactory(scoped_ptr<TestDataFactory> factory) {
+ audio_factory_ = factory.Pass();
+ }
+
+ // Sets the video data factory.
+ void SetVideoFactory(scoped_ptr<TestDataFactory> factory) {
+ video_factory_ = factory.Pass();
+ }
+
+ // Post DemuxerConfigs to the client (i.e. the player) on correct thread.
+ void PostConfigs(const DemuxerConfigs& configs);
+
+ // Post DemuxerConfigs derived from data factories that has been set.
+ void PostInternalConfigs();
+
+ // Conditions to wait for.
+ bool IsInitialized() const { return client_; }
+ bool HasPendingConfigs() const { return pending_configs_; }
+
+ private:
+ DemuxerAndroidClient* client_;
+ scoped_ptr<DemuxerConfigs> pending_configs_;
+ scoped_ptr<TestDataFactory> audio_factory_;
+ scoped_ptr<TestDataFactory> video_factory_;
+
+ DISALLOW_COPY_AND_ASSIGN(MockDemuxerAndroid);
+};
+
+void MockDemuxerAndroid::Initialize(DemuxerAndroidClient* client) {
+ DVLOG(1) << "MockDemuxerAndroid::" << __FUNCTION__;
+ DCHECK(GetMediaTaskRunner()->BelongsToCurrentThread());
+
+ client_ = client;
+ if (pending_configs_)
+ client_->OnDemuxerConfigsAvailable(*pending_configs_);
+}
+
+void MockDemuxerAndroid::RequestDemuxerData(DemuxerStream::Type type) {
+ DemuxerData chunk;
+ base::TimeDelta delay;
+
+ bool created = false;
+ if (type == DemuxerStream::AUDIO && audio_factory_)
+ created = audio_factory_->CreateChunk(&chunk, &delay);
+ else if (type == DemuxerStream::VIDEO && audio_factory_)
+ created = video_factory_->CreateChunk(&chunk, &delay);
+
+ if (!created)
+ return;
+
+ chunk.type = type;
+
+ // Post to Media thread.
+ DCHECK(client_);
+ GetMediaTaskRunner()->PostDelayedTask(
+ FROM_HERE, base::Bind(&DemuxerAndroidClient::OnDemuxerDataAvailable,
+ base::Unretained(client_), chunk),
+ delay);
+}
+
+void MockDemuxerAndroid::PostConfigs(const DemuxerConfigs& configs) {
+ DVLOG(1) << "MockDemuxerAndroid::" << __FUNCTION__;
+ RUN_ON_MEDIA_THREAD(MockDemuxerAndroid, PostConfigs, configs);
+
+ DCHECK(GetMediaTaskRunner()->BelongsToCurrentThread());
+
+ if (client_)
+ client_->OnDemuxerConfigsAvailable(configs);
+ else
+ pending_configs_ = scoped_ptr<DemuxerConfigs>(new DemuxerConfigs(configs));
+}
+
+void MockDemuxerAndroid::PostInternalConfigs() {
+ ASSERT_TRUE(audio_factory_ || video_factory_);
+
+ if (audio_factory_ && video_factory_) {
+ PostConfigs(
+ CreateAudioVideoConfigs(audio_factory_.get(), video_factory_.get()));
+ } else if (audio_factory_) {
+ PostConfigs(audio_factory_->GetConfigs());
+ } else if (video_factory_) {
+ PostConfigs(video_factory_->GetConfigs());
+ }
+}
+
+} // namespace (anonymous)
+
+// The test fixture for MediaCodecPlayer
+
+class MediaCodecPlayerTest : public testing::Test {
+ public:
+ MediaCodecPlayerTest();
+ ~MediaCodecPlayerTest() override;
+
+ protected:
+ typedef base::Callback<bool()> Predicate;
+
+ void CreatePlayer();
+
+ // Waits for condition to become true or for timeout to expire.
+ // Returns true if the condition becomes true.
+ bool WaitForCondition(const Predicate& condition,
+ const base::TimeDelta& timeout = kDefaultTimeout);
+
+ base::MessageLoop message_loop_;
+ MockMediaPlayerManager manager_;
+ MockDemuxerAndroid* demuxer_; // owned by player_
+ MediaCodecPlayer* player_; // raw pointer due to DeleteOnCorrectThread()
+
+ private:
+ bool is_timeout_expired() const { return is_timeout_expired_; }
+ void SetTimeoutExpired(bool value) { is_timeout_expired_ = value; }
+
+ bool is_timeout_expired_;
+
+ DISALLOW_COPY_AND_ASSIGN(MediaCodecPlayerTest);
+};
+
+MediaCodecPlayerTest::MediaCodecPlayerTest()
+ : demuxer_(new MockDemuxerAndroid()), player_(nullptr) {
+}
+
+void MediaCodecPlayerTest::CreatePlayer() {
+ DCHECK(demuxer_);
+ player_ = new MediaCodecPlayer(
+ 0, // player_id
+ manager_.GetWeakPtr(),
+ base::Bind(&MockMediaPlayerManager::OnMediaResourcesRequested,
+ base::Unretained(&manager_)),
+ scoped_ptr<MockDemuxerAndroid>(demuxer_), GURL());
+
+ DCHECK(player_);
+}
+
+MediaCodecPlayerTest::~MediaCodecPlayerTest() {
+ if (player_)
+ player_->DeleteOnCorrectThread();
+}
+
+bool MediaCodecPlayerTest::WaitForCondition(const Predicate& condition,
+ const base::TimeDelta& timeout) {
+ // Let the message_loop_ process events.
+ // We start the timer and RunUntilIdle() until it signals.
+
+ SetTimeoutExpired(false);
+
+ base::Timer timer(false, false);
+ timer.Start(FROM_HERE, timeout,
+ base::Bind(&MediaCodecPlayerTest::SetTimeoutExpired,
+ base::Unretained(this), true));
+
+ do {
+ if (condition.Run()) {
+ timer.Stop();
+ return true;
+ }
+ message_loop_.RunUntilIdle();
+ } while (!is_timeout_expired());
+
+ DCHECK(!timer.IsRunning());
+ return false;
+}
+
+TEST_F(MediaCodecPlayerTest, SetAudioConfigsBeforePlayerCreation) {
+ // Post configuration when there is no player yet.
+ EXPECT_EQ(nullptr, player_);
+
+ base::TimeDelta duration = base::TimeDelta::FromSeconds(10);
+
+ demuxer_->PostConfigs(
+ TestDataFactory::CreateAudioConfigs(kCodecVorbis, duration));
+
+ // Wait until the configuration gets to the media thread.
+ EXPECT_TRUE(WaitForCondition(base::Bind(
+ &MockDemuxerAndroid::HasPendingConfigs, base::Unretained(demuxer_))));
+
+ // Then create the player.
+ CreatePlayer();
+
+ // Configuration should propagate through the player and to the manager.
+ EXPECT_TRUE(
+ WaitForCondition(base::Bind(&MockMediaPlayerManager::IsMetadataChanged,
+ base::Unretained(&manager_))));
+
+ EXPECT_EQ(duration, manager_.media_metadata_.duration);
+ EXPECT_EQ(0, manager_.media_metadata_.width);
+ EXPECT_EQ(0, manager_.media_metadata_.height);
+}
+
+TEST_F(MediaCodecPlayerTest, SetAudioConfigsAfterPlayerCreation) {
+ CreatePlayer();
+
+ // Wait till the player is initialized on media thread.
+ EXPECT_TRUE(WaitForCondition(base::Bind(&MockDemuxerAndroid::IsInitialized,
+ base::Unretained(demuxer_))));
+
+ // Post configuration after the player has been initialized.
+ base::TimeDelta duration = base::TimeDelta::FromSeconds(10);
+ demuxer_->PostConfigs(
+ TestDataFactory::CreateAudioConfigs(kCodecVorbis, duration));
+
+ // Configuration should propagate through the player and to the manager.
+ EXPECT_TRUE(
+ WaitForCondition(base::Bind(&MockMediaPlayerManager::IsMetadataChanged,
+ base::Unretained(&manager_))));
+
+ EXPECT_EQ(duration, manager_.media_metadata_.duration);
+ EXPECT_EQ(0, manager_.media_metadata_.width);
+ EXPECT_EQ(0, manager_.media_metadata_.height);
+}
+
+TEST_F(MediaCodecPlayerTest, SetAudioVideoConfigsAfterPlayerCreation) {
+ CreatePlayer();
+
+ // Wait till the player is initialized on media thread.
+ EXPECT_TRUE(WaitForCondition(base::Bind(&MockDemuxerAndroid::IsInitialized,
+ base::Unretained(demuxer_))));
+
+ // Post configuration after the player has been initialized.
+ base::TimeDelta duration = base::TimeDelta::FromSeconds(10);
+ demuxer_->PostConfigs(CreateAudioVideoConfigs(duration, gfx::Size(320, 240)));
+
+ // Configuration should propagate through the player and to the manager.
+ EXPECT_TRUE(
+ WaitForCondition(base::Bind(&MockMediaPlayerManager::IsMetadataChanged,
+ base::Unretained(&manager_))));
+
+ EXPECT_EQ(duration, manager_.media_metadata_.duration);
+ EXPECT_EQ(320, manager_.media_metadata_.width);
+ EXPECT_EQ(240, manager_.media_metadata_.height);
+}
+
+TEST_F(MediaCodecPlayerTest, PlayAudioTillCompletion) {
+ SKIP_TEST_IF_MEDIA_CODEC_BRIDGE_IS_NOT_AVAILABLE();
+
+ base::TimeDelta duration = base::TimeDelta::FromMilliseconds(1000);
+ base::TimeDelta timeout = base::TimeDelta::FromMilliseconds(1100);
+
+ demuxer_->SetAudioFactory(
+ scoped_ptr<AudioFactory>(new AudioFactory(duration)));
+
+ CreatePlayer();
+
+ // Wait till the player is initialized on media thread.
+ EXPECT_TRUE(WaitForCondition(base::Bind(&MockDemuxerAndroid::IsInitialized,
+ base::Unretained(demuxer_))));
+
+ // Post configuration after the player has been initialized.
+ demuxer_->PostInternalConfigs();
+
+ EXPECT_FALSE(manager_.IsPlaybackCompleted());
+
+ player_->Start();
+
+ EXPECT_TRUE(
+ WaitForCondition(base::Bind(&MockMediaPlayerManager::IsPlaybackCompleted,
+ base::Unretained(&manager_)),
+ timeout));
+}
+
+} // namespace media
diff --git a/chromium/media/base/android/media_codec_video_decoder.cc b/chromium/media/base/android/media_codec_video_decoder.cc
new file mode 100644
index 00000000000..2f8c11e2670
--- /dev/null
+++ b/chromium/media/base/android/media_codec_video_decoder.cc
@@ -0,0 +1,270 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/android/media_codec_video_decoder.h"
+
+#include "base/bind.h"
+#include "base/logging.h"
+#include "media/base/android/media_codec_bridge.h"
+#include "media/base/buffers.h"
+#include "media/base/demuxer_stream.h"
+
+namespace media {
+
+namespace {
+const int kDelayForStandAloneEOS = 2; // milliseconds
+}
+
+MediaCodecVideoDecoder::MediaCodecVideoDecoder(
+ const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner,
+ const base::Closure& request_data_cb,
+ const base::Closure& starvation_cb,
+ const base::Closure& stop_done_cb,
+ const base::Closure& error_cb,
+ const SetTimeCallback& update_current_time_cb,
+ const VideoSizeChangedCallback& video_size_changed_cb,
+ const base::Closure& codec_created_cb)
+ : MediaCodecDecoder(media_task_runner,
+ request_data_cb,
+ starvation_cb,
+ stop_done_cb,
+ error_cb,
+ "VideoDecoder"),
+ update_current_time_cb_(update_current_time_cb),
+ video_size_changed_cb_(video_size_changed_cb),
+ codec_created_cb_(codec_created_cb) {
+}
+
+MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
+ DVLOG(1) << "VideoDecoder::~VideoDecoder()";
+ ReleaseDecoderResources();
+}
+
+const char* MediaCodecVideoDecoder::class_name() const {
+ return "VideoDecoder";
+}
+
+bool MediaCodecVideoDecoder::HasStream() const {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ return configs_.video_codec != kUnknownVideoCodec;
+}
+
+void MediaCodecVideoDecoder::SetDemuxerConfigs(const DemuxerConfigs& configs) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ DVLOG(1) << class_name() << "::" << __FUNCTION__ << " " << configs;
+
+ configs_ = configs;
+
+ if (video_size_.IsEmpty()) {
+ video_size_ = configs_.video_size;
+ media_task_runner_->PostTask(
+ FROM_HERE, base::Bind(video_size_changed_cb_, video_size_));
+ }
+}
+
+void MediaCodecVideoDecoder::ReleaseDecoderResources() {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ DVLOG(1) << class_name() << "::" << __FUNCTION__;
+
+ MediaCodecDecoder::ReleaseDecoderResources();
+ surface_ = gfx::ScopedJavaSurface();
+ delayed_buffers_.clear();
+}
+
+void MediaCodecVideoDecoder::SetPendingSurface(gfx::ScopedJavaSurface surface) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ surface_ = surface.Pass();
+
+ if (surface_.IsEmpty()) {
+ // Synchronously stop decoder thread and release MediaCodec
+ ReleaseDecoderResources();
+ }
+}
+
+bool MediaCodecVideoDecoder::HasPendingSurface() const {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ return !surface_.IsEmpty();
+}
+
+bool MediaCodecVideoDecoder::IsCodecReconfigureNeeded(
+ const DemuxerConfigs& curr,
+ const DemuxerConfigs& next) const {
+ if (curr.video_codec != next.video_codec ||
+ curr.is_video_encrypted != next.is_video_encrypted) {
+ return true;
+ }
+
+ // Only size changes below this point
+
+ if (curr.video_size.width() == next.video_size.width() &&
+ curr.video_size.height() == next.video_size.height()) {
+ return false; // i.e. curr == next
+ }
+
+ return !static_cast<VideoCodecBridge*>(media_codec_bridge_.get())
+ ->IsAdaptivePlaybackSupported(next.video_size.width(),
+ next.video_size.height());
+}
+
+MediaCodecDecoder::ConfigStatus MediaCodecVideoDecoder::ConfigureInternal() {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ DVLOG(1) << class_name() << "::" << __FUNCTION__;
+
+ // If we cannot find a key frame in cache, the browser seek is needed.
+ if (!au_queue_.RewindToLastKeyFrame()) {
+ DVLOG(1) << class_name() << "::" << __FUNCTION__ << " key frame required";
+
+ // The processing of CONFIG_KEY_FRAME_REQUIRED is not implemented yet,
+ // return error for now.
+ // TODO(timav): Replace this with the following line together with
+ // implementing the browser seek:
+ // return CONFIG_KEY_FRAME_REQUIRED;
+ return CONFIG_FAILURE;
+ }
+
+ // TODO(timav): implement DRM.
+ // bool is_secure = is_content_encrypted() && drm_bridge() &&
+ // drm_bridge()->IsProtectedSurfaceRequired();
+
+ bool is_secure = false; // DRM is not implemented
+
+ if (surface_.IsEmpty()) {
+ DVLOG(0) << class_name() << "::" << __FUNCTION__ << " surface required";
+ return CONFIG_FAILURE;
+ }
+
+ media_codec_bridge_.reset(VideoCodecBridge::CreateDecoder(
+ configs_.video_codec,
+ is_secure,
+ configs_.video_size,
+ surface_.j_surface().obj(),
+ GetMediaCrypto().obj()));
+
+ if (!media_codec_bridge_) {
+ DVLOG(1) << class_name() << "::" << __FUNCTION__ << " failed";
+ return CONFIG_FAILURE;
+ }
+
+ DVLOG(1) << class_name() << "::" << __FUNCTION__ << " succeeded";
+
+ media_task_runner_->PostTask(FROM_HERE, codec_created_cb_);
+
+ return CONFIG_OK;
+}
+
+void MediaCodecVideoDecoder::SynchronizePTSWithTime(
+ base::TimeDelta current_time) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ start_time_ticks_ = base::TimeTicks::Now();
+ start_pts_ = current_time;
+ last_seen_pts_ = current_time;
+}
+
+void MediaCodecVideoDecoder::OnOutputFormatChanged() {
+ DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
+
+ gfx::Size prev_size = video_size_;
+
+ // See b/18224769. The values reported from MediaCodecBridge::GetOutputFormat
+ // correspond to the actual video frame size, but this is not necessarily the
+ // size that should be output.
+ video_size_ = configs_.video_size;
+ if (video_size_ != prev_size) {
+ media_task_runner_->PostTask(
+ FROM_HERE, base::Bind(video_size_changed_cb_, video_size_));
+ }
+}
+
+void MediaCodecVideoDecoder::Render(int buffer_index,
+ size_t size,
+ bool render_output,
+ base::TimeDelta pts,
+ bool eos_encountered) {
+ DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
+
+ DVLOG(2) << class_name() << "::" << __FUNCTION__ << " pts:" << pts
+ << " index:" << buffer_index << " size:" << size
+ << (eos_encountered ? " EOS" : "");
+
+ // Normally EOS comes as a separate access unit that does not have data,
+ // the corresponding |size| will be 0.
+ if (!size && eos_encountered) {
+ // Stand-alone EOS
+ // Discard the PTS that comes with it and ensure it is released last.
+ pts = last_seen_pts_ +
+ base::TimeDelta::FromMilliseconds(kDelayForStandAloneEOS);
+ } else {
+ // Keep track of last seen PTS
+ last_seen_pts_ = pts;
+ }
+
+ if (!render_output) {
+ ReleaseOutputBuffer(buffer_index, pts, size, false, eos_encountered);
+ return;
+ }
+
+ base::TimeDelta time_to_render =
+ pts - (base::TimeTicks::Now() - start_time_ticks_ + start_pts_);
+
+ if (time_to_render < base::TimeDelta()) {
+ // Skip late frames
+ ReleaseOutputBuffer(buffer_index, pts, size, false, eos_encountered);
+ return;
+ }
+
+ delayed_buffers_.insert(buffer_index);
+
+ bool do_render = size > 0;
+ decoder_thread_.task_runner()->PostDelayedTask(
+ FROM_HERE, base::Bind(&MediaCodecVideoDecoder::ReleaseOutputBuffer,
+ base::Unretained(this), buffer_index, pts,
+ size, do_render, eos_encountered),
+ time_to_render);
+}
+
+int MediaCodecVideoDecoder::NumDelayedRenderTasks() const {
+ DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
+
+ return delayed_buffers_.size();
+}
+
+void MediaCodecVideoDecoder::ReleaseDelayedBuffers() {
+ // Media thread
+ // Called when there is no decoder thread
+ for (int index : delayed_buffers_)
+ media_codec_bridge_->ReleaseOutputBuffer(index, false);
+ delayed_buffers_.clear();
+}
+
+void MediaCodecVideoDecoder::ReleaseOutputBuffer(int buffer_index,
+ base::TimeDelta pts,
+ size_t size,
+ bool render,
+ bool eos_encountered) {
+ DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
+
+ DVLOG(2) << class_name() << "::" << __FUNCTION__ << " pts:" << pts;
+
+ media_codec_bridge_->ReleaseOutputBuffer(buffer_index, render);
+
+ delayed_buffers_.erase(buffer_index);
+
+ CheckLastFrame(eos_encountered, !delayed_buffers_.empty());
+
+ // |update_current_time_cb_| might be null if there is audio stream.
+ // Do not update current time for stand-alone EOS frames.
+ if (!update_current_time_cb_.is_null() && !(eos_encountered && !size)) {
+ media_task_runner_->PostTask(FROM_HERE,
+ base::Bind(update_current_time_cb_, pts, pts));
+ }
+}
+
+} // namespace media
diff --git a/chromium/media/base/android/media_codec_video_decoder.h b/chromium/media/base/android/media_codec_video_decoder.h
new file mode 100644
index 00000000000..1bec99ef768
--- /dev/null
+++ b/chromium/media/base/android/media_codec_video_decoder.h
@@ -0,0 +1,112 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BASE_ANDROID_MEDIA_CODEC_VIDEO_DECODER_H_
+#define MEDIA_BASE_ANDROID_MEDIA_CODEC_VIDEO_DECODER_H_
+
+#include <set>
+#include "media/base/android/media_codec_decoder.h"
+#include "ui/gfx/geometry/size.h"
+#include "ui/gl/android/scoped_java_surface.h"
+
+namespace media {
+
+// Video decoder for MediaCodecPlayer
+class MediaCodecVideoDecoder : public MediaCodecDecoder {
+ public:
+ // Typedefs for the notification callbacks
+ typedef base::Callback<void(const gfx::Size& video_size)>
+ VideoSizeChangedCallback;
+
+ // For parameters see media_codec_decoder.h
+ // update_current_time_cb: callback that reports current playback time.
+ // Called for released output frame,
+ // video_size_changed_cb: reports the new video size,
+ // codec_created_cb: reports that video codec has been created. A controller
+ // class might use it to release more resources so that this
+ // decoder can use them.
+ MediaCodecVideoDecoder(
+ const scoped_refptr<base::SingleThreadTaskRunner>& media_runner,
+ const base::Closure& request_data_cb,
+ const base::Closure& starvation_cb,
+ const base::Closure& stop_done_cb,
+ const base::Closure& error_cb,
+ const SetTimeCallback& update_current_time_cb,
+ const VideoSizeChangedCallback& video_size_changed_cb,
+ const base::Closure& codec_created_cb);
+ ~MediaCodecVideoDecoder() override;
+
+ const char* class_name() const override;
+
+ bool HasStream() const override;
+ void SetDemuxerConfigs(const DemuxerConfigs& configs) override;
+ void ReleaseDecoderResources() override;
+
+ // Stores the video surface to use with upcoming Configure()
+ void SetPendingSurface(gfx::ScopedJavaSurface surface);
+
+ // Returns true if there is a video surface to use.
+ bool HasPendingSurface() const;
+
+ protected:
+ bool IsCodecReconfigureNeeded(const DemuxerConfigs& curr,
+ const DemuxerConfigs& next) const override;
+ ConfigStatus ConfigureInternal() override;
+ void SynchronizePTSWithTime(base::TimeDelta current_time) override;
+ void OnOutputFormatChanged() override;
+ void Render(int buffer_index,
+ size_t size,
+ bool render_output,
+ base::TimeDelta pts,
+ bool eos_encountered) override;
+
+ int NumDelayedRenderTasks() const override;
+ void ReleaseDelayedBuffers() override;
+
+ private:
+ // A helper method that releases output buffers and does
+ // post-release checks. Might be called by Render() or posted
+ // for later execution.
+ void ReleaseOutputBuffer(int buffer_index,
+ base::TimeDelta pts,
+ size_t size,
+ bool render,
+ bool eos_encountered);
+
+ // Data.
+
+ // Configuration received from demuxer
+ DemuxerConfigs configs_;
+
+ // Video surface that we render to.
+ gfx::ScopedJavaSurface surface_;
+
+ // Reports current playback time to the callee.
+ SetTimeCallback update_current_time_cb_;
+
+ // Informs the callee that video size is changed.
+ VideoSizeChangedCallback video_size_changed_cb_;
+
+ // Informs the callee that the MediaCodec is created.
+ base::Closure codec_created_cb_;
+
+ // Current video size to be sent with |video_size_changed_cb_|.
+ gfx::Size video_size_;
+
+ // Indices of output buffers that are posted for rendering.
+ std::set<int> delayed_buffers_;
+
+ // Associate presentation timestamps with time.
+ base::TimeTicks start_time_ticks_;
+ base::TimeDelta start_pts_;
+
+ // Mantain the last seen PTS for stand-alone EOS.
+ base::TimeDelta last_seen_pts_;
+
+ DISALLOW_COPY_AND_ASSIGN(MediaCodecVideoDecoder);
+};
+
+} // namespace media
+
+#endif // MEDIA_BASE_ANDROID_MEDIA_CODEC_VIDEO_DECODER_H_
diff --git a/chromium/media/base/android/media_drm_bridge.cc b/chromium/media/base/android/media_drm_bridge.cc
index fcb446b28b2..4200635ef5e 100644
--- a/chromium/media/base/android/media_drm_bridge.cc
+++ b/chromium/media/base/android/media_drm_bridge.cc
@@ -56,16 +56,19 @@ const uint8 kWidevineUuid[16] = {
// Convert |init_data_type| to a string supported by MediaDRM.
// "audio"/"video" does not matter, so use "video".
std::string ConvertInitDataType(media::EmeInitDataType init_data_type) {
- // TODO(jrummell): API level >=20 supports "webm" and "cenc", so switch
- // to those strings.
+ // TODO(jrummell/xhwang): EME init data types like "webm" and "cenc" are
+ // supported in API level >=21 for Widevine key system. Switch to use those
+ // strings when they are officially supported in Android for all key systems.
switch (init_data_type) {
case media::EmeInitDataType::WEBM:
return "video/webm";
case media::EmeInitDataType::CENC:
return "video/mp4";
+ case media::EmeInitDataType::KEYIDS:
+ return "keyids";
default:
NOTREACHED();
- return "video/unknown";
+ return "unknown";
}
}
@@ -293,8 +296,16 @@ bool MediaDrmBridge::SetSecurityLevel(SecurityLevel security_level) {
void MediaDrmBridge::SetServerCertificate(
const std::vector<uint8_t>& certificate,
scoped_ptr<media::SimpleCdmPromise> promise) {
- promise->reject(NOT_SUPPORTED_ERROR, 0,
- "SetServerCertificate() is not supported.");
+ DCHECK(!certificate.empty());
+
+ JNIEnv* env = AttachCurrentThread();
+ ScopedJavaLocalRef<jbyteArray> j_certificate;
+ if (Java_MediaDrmBridge_setServerCertificate(env, j_media_drm_.obj(),
+ j_certificate.obj())) {
+ promise->resolve();
+ } else {
+ promise->reject(INVALID_ACCESS_ERROR, 0, "Set server certificate failed.");
+ }
}
void MediaDrmBridge::CreateSessionAndGenerateRequest(
@@ -305,6 +316,7 @@ void MediaDrmBridge::CreateSessionAndGenerateRequest(
DVLOG(1) << __FUNCTION__;
if (session_type != media::MediaKeys::TEMPORARY_SESSION) {
+ NOTIMPLEMENTED() << "EME persistent sessions not yet supported on Android.";
promise->reject(NOT_SUPPORTED_ERROR, 0,
"Only the temporary session type is supported.");
return;
@@ -356,6 +368,7 @@ void MediaDrmBridge::LoadSession(
SessionType session_type,
const std::string& session_id,
scoped_ptr<media::NewSessionCdmPromise> promise) {
+ NOTIMPLEMENTED() << "EME persistent sessions not yet supported on Android.";
promise->reject(NOT_SUPPORTED_ERROR, 0, "LoadSession() is not supported.");
}
@@ -391,6 +404,7 @@ void MediaDrmBridge::CloseSession(const std::string& session_id,
void MediaDrmBridge::RemoveSession(
const std::string& session_id,
scoped_ptr<media::SimpleCdmPromise> promise) {
+ NOTIMPLEMENTED() << "EME persistent sessions not yet supported on Android.";
promise->reject(NOT_SUPPORTED_ERROR, 0, "RemoveSession() is not supported.");
}
diff --git a/chromium/media/base/android/media_player_android.cc b/chromium/media/base/android/media_player_android.cc
index 60e1dfc0f82..99668e8122f 100644
--- a/chromium/media/base/android/media_player_android.cc
+++ b/chromium/media/base/android/media_player_android.cc
@@ -98,4 +98,8 @@ void MediaPlayerAndroid::SetAudible(bool is_audible) {
}
}
+base::WeakPtr<MediaPlayerAndroid> MediaPlayerAndroid::WeakPtrForUIThread() {
+ return weak_factory_.GetWeakPtr();
+}
+
} // namespace media
diff --git a/chromium/media/base/android/media_player_android.h b/chromium/media/base/android/media_player_android.h
index 8928222156b..e52362411cb 100644
--- a/chromium/media/base/android/media_player_android.h
+++ b/chromium/media/base/android/media_player_android.h
@@ -13,6 +13,7 @@
#include "base/time/time.h"
#include "media/base/android/media_player_listener.h"
#include "media/base/media_export.h"
+#include "ui/gfx/geometry/size.h"
#include "ui/gl/android/scoped_java_surface.h"
#include "url/gurl.h"
@@ -79,10 +80,24 @@ class MEDIA_EXPORT MediaPlayerAndroid {
// Associates the |cdm| with this player.
virtual void SetCdm(BrowserCdm* cdm);
+ // Overridden in MediaCodecPlayer to pass data between threads.
+ virtual void OnMediaMetadataChanged(base::TimeDelta duration,
+ const gfx::Size& video_size) {}
+
+ // Overridden in MediaCodecPlayer to pass data between threads.
+ virtual void OnTimeUpdate(base::TimeDelta current_timestamp,
+ base::TimeTicks current_time_ticks) {}
+
int player_id() { return player_id_; }
GURL frame_url() { return frame_url_; }
+ // Attach/Detaches |listener_| for listening to all the media events. If
+ // |j_media_player| is NULL, |listener_| only listens to the system media
+ // events. Otherwise, it also listens to the events from |j_media_player|.
+ void AttachListener(jobject j_media_player);
+ void DetachListener();
+
protected:
MediaPlayerAndroid(int player_id,
MediaPlayerManager* manager,
@@ -101,12 +116,6 @@ class MEDIA_EXPORT MediaPlayerAndroid {
virtual void OnSeekComplete();
virtual void OnMediaPrepared();
- // Attach/Detaches |listener_| for listening to all the media events. If
- // |j_media_player| is NULL, |listener_| only listens to the system media
- // events. Otherwise, it also listens to the events from |j_media_player|.
- void AttachListener(jobject j_media_player);
- void DetachListener();
-
// When destroying a subclassed object on a non-UI thread
// it is still required to destroy the |listener_| related stuff
// on the UI thread.
@@ -115,6 +124,8 @@ class MEDIA_EXPORT MediaPlayerAndroid {
MediaPlayerManager* manager() { return manager_; }
+ base::WeakPtr<MediaPlayerAndroid> WeakPtrForUIThread();
+
RequestMediaResourcesCB request_media_resources_cb_;
private:
diff --git a/chromium/media/base/android/media_player_bridge.cc b/chromium/media/base/android/media_player_bridge.cc
index ed8854f1b68..5cc8b3933ed 100644
--- a/chromium/media/base/android/media_player_bridge.cc
+++ b/chromium/media/base/android/media_player_bridge.cc
@@ -8,7 +8,6 @@
#include "base/android/jni_string.h"
#include "base/basictypes.h"
#include "base/logging.h"
-#include "base/message_loop/message_loop_proxy.h"
#include "base/strings/string_util.h"
#include "jni/MediaPlayerBridge_jni.h"
#include "media/base/android/media_common_android.h"
@@ -161,7 +160,7 @@ void MediaPlayerBridge::SetDataSource(const std::string& url) {
DCHECK(j_context);
const std::string data_uri_prefix("data:");
- if (StartsWithASCII(url, data_uri_prefix, true)) {
+ if (base::StartsWithASCII(url, data_uri_prefix, true)) {
if (!Java_MediaPlayerBridge_setDataUriDataSource(
env, j_media_player_bridge_.obj(), j_context, j_url_string.obj())) {
OnMediaError(MEDIA_ERROR_FORMAT);
@@ -456,6 +455,11 @@ void MediaPlayerBridge::UpdateAllowedOperations() {
}
void MediaPlayerBridge::StartInternal() {
+ if (!manager()->RequestPlay(player_id())) {
+ Pause(true);
+ return;
+ }
+
JNIEnv* env = base::android::AttachCurrentThread();
Java_MediaPlayerBridge_start(env, j_media_player_bridge_.obj());
if (!time_update_timer_.IsRunning()) {
diff --git a/chromium/media/base/android/media_player_listener.cc b/chromium/media/base/android/media_player_listener.cc
index 861a34f7b0b..2561debd72c 100644
--- a/chromium/media/base/android/media_player_listener.cc
+++ b/chromium/media/base/android/media_player_listener.cc
@@ -41,12 +41,6 @@ void MediaPlayerListener::CreateMediaPlayerListener(
void MediaPlayerListener::ReleaseMediaPlayerListenerResources() {
- JNIEnv* env = AttachCurrentThread();
- CHECK(env);
- if (!j_media_player_listener_.is_null()) {
- Java_MediaPlayerListener_releaseResources(
- env, j_media_player_listener_.obj());
- }
j_media_player_listener_.Reset();
}
diff --git a/chromium/media/base/android/media_player_manager.h b/chromium/media/base/android/media_player_manager.h
index feac84e9193..bf6ca4e0734 100644
--- a/chromium/media/base/android/media_player_manager.h
+++ b/chromium/media/base/android/media_player_manager.h
@@ -76,8 +76,11 @@ class MEDIA_EXPORT MediaPlayerManager {
// Returns the player with the specified id.
virtual MediaPlayerAndroid* GetPlayer(int player_id) = 0;
- // Called by the player to get a hardware protected surface.
- virtual void RequestFullScreen(int player_id) = 0;
+ // Called by the player to request to play. The manager should use this
+ // opportunity to check if the current context is appropriate for a media to
+ // play.
+ // Returns whether the request was granted.
+ virtual bool RequestPlay(int player_id) = 0;
};
} // namespace media
diff --git a/chromium/media/base/android/media_source_player.cc b/chromium/media/base/android/media_source_player.cc
index 7e8f49a4613..460fbec8fb6 100644
--- a/chromium/media/base/android/media_source_player.cc
+++ b/chromium/media/base/android/media_source_player.cc
@@ -217,6 +217,11 @@ void MediaSourcePlayer::StartInternal() {
if (pending_event_ != NO_EVENT_PENDING)
return;
+ if (!manager()->RequestPlay(player_id())) {
+ Pause(true);
+ return;
+ }
+
// When we start, we could have new demuxed data coming in. This new data
// could be clear (not encrypted) or encrypted with different keys. So key
// related info should all be cleared.
diff --git a/chromium/media/base/android/media_source_player_unittest.cc b/chromium/media/base/android/media_source_player_unittest.cc
index f37cf205a54..be6db141b87 100644
--- a/chromium/media/base/android/media_source_player_unittest.cc
+++ b/chromium/media/base/android/media_source_player_unittest.cc
@@ -48,7 +48,8 @@ class MockMediaPlayerManager : public MediaPlayerManager {
num_metadata_changes_(0),
timestamp_updated_(false),
is_audible_(false),
- is_delay_expired_(false) {}
+ is_delay_expired_(false),
+ allow_play_(true) {}
~MockMediaPlayerManager() override {}
// MediaPlayerManager implementation.
@@ -80,7 +81,10 @@ class MockMediaPlayerManager : public MediaPlayerManager {
void OnWaitingForDecryptionKey(int player_id) override {}
MediaPlayerAndroid* GetFullscreenPlayer() override { return NULL; }
MediaPlayerAndroid* GetPlayer(int player_id) override { return NULL; }
- void RequestFullScreen(int player_id) override {}
+
+ bool RequestPlay(int player_id) override {
+ return allow_play_;
+ }
void OnAudibleStateChanged(int player_id, bool is_audible_now) override {
is_audible_ = is_audible_now;
@@ -122,6 +126,10 @@ class MockMediaPlayerManager : public MediaPlayerManager {
is_delay_expired_ = value;
}
+ void set_allow_play(bool value) {
+ allow_play_ = value;
+ }
+
private:
base::MessageLoop* message_loop_;
bool playback_completed_;
@@ -135,6 +143,8 @@ class MockMediaPlayerManager : public MediaPlayerManager {
bool is_audible_;
// Helper flag to ensure delay for WaitForDelay().
bool is_delay_expired_;
+ // Whether the manager will allow players that request playing.
+ bool allow_play_;
DISALLOW_COPY_AND_ASSIGN(MockMediaPlayerManager);
};
@@ -2513,4 +2523,48 @@ TEST_F(MediaSourcePlayerTest, VideoMetadataChangeAfterConfigChange) {
WaitForVideoDecodeDone();
}
+TEST_F(MediaSourcePlayerTest, RequestPlayDeniedDontPlay_Audio) {
+ SKIP_TEST_IF_MEDIA_CODEC_BRIDGE_IS_NOT_AVAILABLE();
+
+ EXPECT_EQ(demuxer_->num_data_requests(), 0);
+ player_.OnDemuxerConfigsAvailable(CreateDemuxerConfigs(true, false));
+
+ manager_.set_allow_play(false);
+ player_.Start();
+ EXPECT_FALSE(player_.IsPlaying());
+}
+
+TEST_F(MediaSourcePlayerTest, RequestPlayDeniedDontPlay_Video) {
+ SKIP_TEST_IF_MEDIA_CODEC_BRIDGE_IS_NOT_AVAILABLE();
+
+ EXPECT_EQ(demuxer_->num_data_requests(), 0);
+ player_.OnDemuxerConfigsAvailable(CreateDemuxerConfigs(false, true));
+
+ manager_.set_allow_play(false);
+ player_.Start();
+ EXPECT_FALSE(player_.IsPlaying());
+}
+
+TEST_F(MediaSourcePlayerTest, RequestPlayDeniedDontPlay_AV) {
+ SKIP_TEST_IF_MEDIA_CODEC_BRIDGE_IS_NOT_AVAILABLE();
+
+ EXPECT_EQ(demuxer_->num_data_requests(), 0);
+ player_.OnDemuxerConfigsAvailable(CreateDemuxerConfigs(true, true));
+
+ manager_.set_allow_play(false);
+ player_.Start();
+ EXPECT_FALSE(player_.IsPlaying());
+}
+
+TEST_F(MediaSourcePlayerTest, RequestPlayGrantedPlays) {
+ SKIP_TEST_IF_MEDIA_CODEC_BRIDGE_IS_NOT_AVAILABLE();
+
+ EXPECT_EQ(demuxer_->num_data_requests(), 0);
+ player_.OnDemuxerConfigsAvailable(CreateDemuxerConfigs(true, true));
+
+ manager_.set_allow_play(true);
+ player_.Start();
+ EXPECT_TRUE(player_.IsPlaying());
+}
+
} // namespace media
diff --git a/chromium/media/base/android/test_data_factory.cc b/chromium/media/base/android/test_data_factory.cc
new file mode 100644
index 00000000000..8af93826060
--- /dev/null
+++ b/chromium/media/base/android/test_data_factory.cc
@@ -0,0 +1,119 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/android/test_data_factory.h"
+
+#include "base/strings/stringprintf.h"
+#include "media/base/android/demuxer_stream_player_params.h"
+#include "media/base/decoder_buffer.h"
+#include "media/base/test_data_util.h"
+
+namespace media {
+
+DemuxerConfigs TestDataFactory::CreateAudioConfigs(
+ AudioCodec audio_codec,
+ const base::TimeDelta& duration) {
+ DemuxerConfigs configs;
+ configs.audio_codec = audio_codec;
+ configs.audio_channels = 2;
+ configs.is_audio_encrypted = false;
+ configs.duration = duration;
+
+ switch (audio_codec) {
+ case kCodecVorbis: {
+ configs.audio_sampling_rate = 44100;
+ scoped_refptr<DecoderBuffer> buffer =
+ ReadTestDataFile("vorbis-extradata");
+ configs.audio_extra_data = std::vector<uint8>(
+ buffer->data(), buffer->data() + buffer->data_size());
+ } break;
+
+ case kCodecAAC: {
+ configs.audio_sampling_rate = 48000;
+ uint8 aac_extra_data[] = {0x13, 0x10};
+ configs.audio_extra_data =
+ std::vector<uint8>(aac_extra_data, aac_extra_data + 2);
+ } break;
+
+ default:
+ // Other codecs are not supported by this helper.
+ NOTREACHED();
+ break;
+ }
+
+ return configs;
+}
+
+DemuxerConfigs TestDataFactory::CreateVideoConfigs(
+ VideoCodec video_codec,
+ const base::TimeDelta& duration,
+ const gfx::Size& video_size) {
+ DemuxerConfigs configs;
+ configs.video_codec = video_codec;
+ configs.video_size = video_size;
+ configs.is_video_encrypted = false;
+ configs.duration = duration;
+
+ return configs;
+}
+
+TestDataFactory::TestDataFactory(const char* file_name_template,
+ const base::TimeDelta& duration,
+ const base::TimeDelta& frame_period)
+ : duration_(duration),
+ frame_period_(frame_period),
+ starvation_mode_(false) {
+ LoadPackets(file_name_template);
+}
+
+TestDataFactory::~TestDataFactory() {}
+
+bool TestDataFactory::CreateChunk(DemuxerData* chunk, base::TimeDelta* delay) {
+ DCHECK(chunk);
+ DCHECK(delay);
+
+ *delay = base::TimeDelta();
+
+ if (regular_pts_ > duration_)
+ return false;
+
+ for (int i = 0; i < 4; ++i) {
+ chunk->access_units.push_back(AccessUnit());
+ AccessUnit& unit = chunk->access_units.back();
+ unit.status = DemuxerStream::kOk;
+
+ unit.timestamp = regular_pts_;
+ regular_pts_ += frame_period_;
+
+ if (unit.timestamp > duration_) {
+ if (starvation_mode_)
+ return false;
+
+ unit.is_end_of_stream = true;
+ break; // EOS units have no data.
+ }
+
+ unit.data = packet_[i];
+
+ // Allow for modification by subclasses.
+ ModifyAccessUnit(i, &unit);
+
+ // Maintain last PTS. ModifyAccessUnit() can modify unit's PTS.
+ if (last_pts_ < unit.timestamp)
+ last_pts_ = unit.timestamp;
+ }
+
+ return true;
+}
+
+void TestDataFactory::LoadPackets(const char* file_name_template) {
+ for (int i = 0; i < 4; ++i) {
+ scoped_refptr<DecoderBuffer> buffer =
+ ReadTestDataFile(base::StringPrintf(file_name_template, i));
+ packet_[i] = std::vector<uint8>(buffer->data(),
+ buffer->data() + buffer->data_size());
+ }
+}
+
+} // namespace media
diff --git a/chromium/media/base/android/test_data_factory.h b/chromium/media/base/android/test_data_factory.h
new file mode 100644
index 00000000000..b186d75e276
--- /dev/null
+++ b/chromium/media/base/android/test_data_factory.h
@@ -0,0 +1,73 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BASE_ANDROID_TEST_DATA_FACTORY_H_
+#define MEDIA_BASE_ANDROID_TEST_DATA_FACTORY_H_
+
+#include <stdint.h>
+#include <vector>
+#include "base/memory/ref_counted.h"
+#include "base/time/time.h"
+#include "media/base/android/demuxer_stream_player_params.h"
+
+namespace media {
+
+// TestDataFactory is used by MediaCodecDecoder unit test and MediaCodecPlayer
+// unit test to simulate the audio or video access unit stream.
+class TestDataFactory {
+ public:
+ // These methods return corresponding demuxer configs.
+ static DemuxerConfigs CreateAudioConfigs(AudioCodec audio_codec,
+ const base::TimeDelta& duration);
+ static DemuxerConfigs CreateVideoConfigs(VideoCodec video_codec,
+ const base::TimeDelta& duration,
+ const gfx::Size& video_size);
+
+ // Constructor calls |LoadPackets| to load packets from files.
+ // Parameters:
+ // file_name_template: the sprintf format string used to generate a file
+ // name for the packet in the form e.g. "h264-AxB-%d"
+ // The |%d| will be replaced by 0, 1, 2, 3.
+ // duration: after the last AU exceeds duration the factory generates EOS
+ // unit and stops.
+ // frame_period: PTS increment between units.
+ TestDataFactory(const char* file_name_template,
+ const base::TimeDelta& duration,
+ const base::TimeDelta& frame_period);
+ virtual ~TestDataFactory();
+
+ // Returns demuxer configuration for this factory.
+ virtual DemuxerConfigs GetConfigs() const = 0;
+
+ // Populates next chunk and the corresponding delay and returns true if
+ // duration is not exceeded, otherwise returns false.
+ // Default implementation repeatedly uses |packet_| array in order 0-1-2-3
+ // and monotonically increases timestamps from 0 to |duration_|.
+ // The first unit to exceed |duration_| becomes EOS. The delay is set to 0.
+ virtual bool CreateChunk(DemuxerData* chunk, base::TimeDelta* delay);
+
+ // In starvation mode we do not add EOS at the end.
+ void SetStarvationMode(bool value) { starvation_mode_ = value; }
+
+ base::TimeDelta last_pts() const { return last_pts_; }
+
+ protected:
+ // Called by constructor to load packets from files referred by
+ // |file_name_template|.
+ virtual void LoadPackets(const char* file_name_template);
+
+ // Used to modify the generated access unit by a subclass.
+ virtual void ModifyAccessUnit(int index_in_chunk, AccessUnit* unit) {}
+
+ base::TimeDelta duration_;
+ base::TimeDelta frame_period_;
+ std::vector<uint8_t> packet_[4];
+ base::TimeDelta regular_pts_; // monotonically increasing PTS
+ base::TimeDelta last_pts_; // subclass can modify PTS, maintains the last
+ bool starvation_mode_; // true means no EOS at the end
+};
+
+} // namespace media
+
+#endif // MEDIA_BASE_ANDROID_TEST_DATA_FACTORY_H_
diff --git a/chromium/media/base/android/video_decoder_job.cc b/chromium/media/base/android/video_decoder_job.cc
index d34ea28aada..d49d2d2e6a0 100644
--- a/chromium/media/base/android/video_decoder_job.cc
+++ b/chromium/media/base/android/video_decoder_job.cc
@@ -29,7 +29,7 @@ VideoDecoderJob::VideoDecoderJob(
const base::Closure& request_data_cb,
const base::Closure& request_resources_cb,
const base::Closure& on_demuxer_config_changed_cb)
- : MediaDecoderJob(g_video_decoder_thread.Pointer()->message_loop_proxy(),
+ : MediaDecoderJob(g_video_decoder_thread.Pointer()->task_runner(),
request_data_cb,
on_demuxer_config_changed_cb),
video_codec_(kUnknownVideoCodec),
diff --git a/chromium/media/base/audio_buffer.cc b/chromium/media/base/audio_buffer.cc
index 3eff8045637..f07ed6dca2b 100644
--- a/chromium/media/base/audio_buffer.cc
+++ b/chromium/media/base/audio_buffer.cc
@@ -4,6 +4,8 @@
#include "media/base/audio_buffer.h"
+#include <cmath>
+
#include "base/logging.h"
#include "media/base/audio_bus.h"
#include "media/base/buffers.h"
@@ -162,12 +164,52 @@ scoped_refptr<AudioBuffer> AudioBuffer::CreateEOSBuffer() {
kNoTimestamp()));
}
+template <typename Target, typename Dest>
+static inline Dest ConvertSample(Target value);
+
// Convert int16 values in the range [INT16_MIN, INT16_MAX] to [-1.0, 1.0].
-static inline float ConvertS16ToFloat(int16 value) {
+template <>
+inline float ConvertSample<int16, float>(int16 value) {
return value * (value < 0 ? -1.0f / std::numeric_limits<int16>::min()
: 1.0f / std::numeric_limits<int16>::max());
}
+// Specializations for int32
+template <>
+inline int32 ConvertSample<int16, int32>(int16 value) {
+ return static_cast<int32>(value) << 16;
+}
+
+template <>
+inline int32 ConvertSample<int32, int32>(int32 value) {
+ return value;
+}
+
+template <>
+inline int32 ConvertSample<float, int32>(float value) {
+ return static_cast<int32>(value < 0
+ ? (-value) * std::numeric_limits<int32>::min()
+ : value * std::numeric_limits<int32>::max());
+}
+
+// Specializations for int16
+template <>
+inline int16 ConvertSample<int16, int16>(int16 sample) {
+ return sample;
+}
+
+template <>
+inline int16 ConvertSample<int32, int16>(int32 sample) {
+ return sample >> 16;
+}
+
+template <>
+inline int16 ConvertSample<float, int16>(float sample) {
+ return static_cast<int16>(
+ nearbyint(sample < 0 ? (-sample) * std::numeric_limits<int16>::min()
+ : sample * std::numeric_limits<int16>::max()));
+}
+
void AudioBuffer::ReadFrames(int frames_to_copy,
int source_frame_offset,
int dest_frame_offset,
@@ -213,7 +255,7 @@ void AudioBuffer::ReadFrames(int frames_to_copy,
source_frame_offset;
float* dest_data = dest->channel(ch) + dest_frame_offset;
for (int i = 0; i < frames_to_copy; ++i) {
- dest_data[i] = ConvertS16ToFloat(source_data[i]);
+ dest_data[i] = ConvertSample<int16, float>(source_data[i]);
}
}
return;
@@ -245,98 +287,55 @@ void AudioBuffer::ReadFrames(int frames_to_copy,
source_data, dest_frame_offset, frames_to_copy, bytes_per_channel);
}
-static inline int32 ConvertS16ToS32(int16 value) {
- return static_cast<int32>(value) << 16;
-}
-
-static inline int32 ConvertF32ToS32(float value) {
- return static_cast<int32>(value < 0
- ? (-value) * std::numeric_limits<int32>::min()
- : value * std::numeric_limits<int32>::max());
-}
-
-// No need for conversion. Return value as is. Keeping function to align with
-// code structure.
-static inline int32 ConvertS32ToS32(int32 value) {
- return value;
-}
-
-template <class Target, typename Converter>
-void InterleaveToS32(const std::vector<uint8*>& channel_data,
- size_t frames_to_copy,
- int trim_start,
- int32* dest_data,
- Converter convert_func) {
+template <class Target, typename Dest>
+void InterleaveAndConvert(const std::vector<uint8*>& channel_data,
+ size_t frames_to_copy,
+ int trim_start,
+ Dest* dest_data) {
for (size_t ch = 0; ch < channel_data.size(); ++ch) {
const Target* source_data =
reinterpret_cast<const Target*>(channel_data[ch]) + trim_start;
for (size_t i = 0, offset = ch; i < frames_to_copy;
++i, offset += channel_data.size()) {
- dest_data[offset] = convert_func(source_data[i]);
+ dest_data[offset] = ConvertSample<Target, Dest>(source_data[i]);
}
}
}
-void AudioBuffer::ReadFramesInterleavedS32(int frames_to_copy,
- int32* dest_data) {
- DCHECK_LE(frames_to_copy, adjusted_frame_count_);
-
- switch (sample_format_) {
+template <typename Dest>
+void ReadFramesInterleaved(const std::vector<uint8*>& channel_data,
+ int channel_count,
+ SampleFormat sample_format,
+ int frames_to_copy,
+ int trim_start,
+ Dest* dest_data) {
+ switch (sample_format) {
case kSampleFormatU8:
- NOTIMPLEMENTED();
+ NOTREACHED();
break;
case kSampleFormatS16:
- // Format is interleaved signed16. Convert each value into int32 and
- // insert into output channel data.
- InterleaveToS32<int16>(channel_data_,
- frames_to_copy * channel_count_,
- trim_start_,
- dest_data,
- ConvertS16ToS32);
+ InterleaveAndConvert<int16, Dest>(
+ channel_data, frames_to_copy * channel_count, trim_start, dest_data);
+ break;
+ case kSampleFormatS32:
+ InterleaveAndConvert<int32, Dest>(
+ channel_data, frames_to_copy * channel_count, trim_start, dest_data);
break;
- case kSampleFormatS32: {
- // Format is interleaved signed32; just copy the data.
- const int32* source_data =
- reinterpret_cast<const int32*>(channel_data_[0]) + trim_start_;
- memcpy(dest_data,
- source_data,
- frames_to_copy * channel_count_ * sizeof(int32));
- } break;
case kSampleFormatF32:
- // Format is interleaved float. Convert each value into int32 and insert
- // into output channel data.
- InterleaveToS32<float>(channel_data_,
- frames_to_copy * channel_count_,
- trim_start_,
- dest_data,
- ConvertF32ToS32);
+ InterleaveAndConvert<float, Dest>(
+ channel_data, frames_to_copy * channel_count, trim_start, dest_data);
break;
case kSampleFormatPlanarS16:
- // Format is planar signed 16 bit. Convert each value into int32 and
- // insert into output channel data.
- InterleaveToS32<int16>(channel_data_,
- frames_to_copy,
- trim_start_,
- dest_data,
- ConvertS16ToS32);
+ InterleaveAndConvert<int16, Dest>(channel_data, frames_to_copy,
+ trim_start, dest_data);
break;
case kSampleFormatPlanarF32:
- // Format is planar float. Convert each value into int32 and insert into
- // output channel data.
- InterleaveToS32<float>(channel_data_,
- frames_to_copy,
- trim_start_,
- dest_data,
- ConvertF32ToS32);
+ InterleaveAndConvert<float, Dest>(channel_data, frames_to_copy,
+ trim_start, dest_data);
break;
case kSampleFormatPlanarS32:
- // Format is planar signed 32 bit. Convert each value into int32 and
- // insert into output channel data.
- InterleaveToS32<int32>(channel_data_,
- frames_to_copy,
- trim_start_,
- dest_data,
- ConvertS32ToS32);
+ InterleaveAndConvert<int32, Dest>(channel_data, frames_to_copy,
+ trim_start, dest_data);
break;
case kUnknownSampleFormat:
NOTREACHED();
@@ -344,6 +343,20 @@ void AudioBuffer::ReadFramesInterleavedS32(int frames_to_copy,
}
}
+void AudioBuffer::ReadFramesInterleavedS32(int frames_to_copy,
+ int32* dest_data) {
+ DCHECK_LE(frames_to_copy, adjusted_frame_count_);
+ ReadFramesInterleaved<int32>(channel_data_, channel_count_, sample_format_,
+ frames_to_copy, trim_start_, dest_data);
+}
+
+void AudioBuffer::ReadFramesInterleavedS16(int frames_to_copy,
+ int16* dest_data) {
+ DCHECK_LE(frames_to_copy, adjusted_frame_count_);
+ ReadFramesInterleaved<int16>(channel_data_, channel_count_, sample_format_,
+ frames_to_copy, trim_start_, dest_data);
+}
+
void AudioBuffer::TrimStart(int frames_to_trim) {
CHECK_GE(frames_to_trim, 0);
CHECK_LE(frames_to_trim, adjusted_frame_count_);
diff --git a/chromium/media/base/audio_buffer.h b/chromium/media/base/audio_buffer.h
index f81c2f63ab1..63390d4781c 100644
--- a/chromium/media/base/audio_buffer.h
+++ b/chromium/media/base/audio_buffer.h
@@ -78,6 +78,11 @@ class MEDIA_EXPORT AudioBuffer
// interleaved int32.
void ReadFramesInterleavedS32(int frames_to_copy, int32* dest);
+ // Copy |frames_to_copy| frames into |dest|, |frames_to_copy| is the number of
+ // frames to copy. The frames are converted from their source format into
+ // interleaved int16.
+ void ReadFramesInterleavedS16(int frames_to_copy, int16* dest);
+
// Trim an AudioBuffer by removing |frames_to_trim| frames from the start.
// Timestamp and duration are adjusted to reflect the fewer frames.
// Note that repeated calls to TrimStart() may result in timestamp() and
diff --git a/chromium/media/base/audio_buffer_unittest.cc b/chromium/media/base/audio_buffer_unittest.cc
index 168445a1e19..43c763e848f 100644
--- a/chromium/media/base/audio_buffer_unittest.cc
+++ b/chromium/media/base/audio_buffer_unittest.cc
@@ -548,4 +548,46 @@ TEST(AudioBufferTest, ReadFramesInterleavedS32FromPlanarF32) {
ReadFramesInterleavedS32Test(kSampleFormatPlanarF32);
}
+static void ReadFramesInterleavedS16Test(SampleFormat sample_format) {
+ const ChannelLayout channel_layout = CHANNEL_LAYOUT_4_0;
+ const int channels = ChannelLayoutToChannelCount(channel_layout);
+ const int frames = kSampleRate / 100;
+ const base::TimeDelta duration = base::TimeDelta::FromMilliseconds(10);
+ scoped_refptr<AudioBuffer> buffer = MakeReadFramesInterleavedTestBuffer(
+ sample_format, kSampleRate, channel_layout, channels, frames);
+ EXPECT_EQ(frames, buffer->frame_count());
+ EXPECT_EQ(duration, buffer->duration());
+
+ int16* dest = new int16[frames * channels];
+ buffer->ReadFramesInterleavedS16(frames, dest);
+
+ int count = 0;
+ for (int i = 0; i < frames; ++i) {
+ for (int ch = 0; ch < channels; ++ch) {
+ EXPECT_EQ(dest[count++], (frames * ch + i));
+ }
+ }
+ delete[] dest;
+}
+
+TEST(AudioBufferTest, ReadFramesInterleavedS16FromS16) {
+ ReadFramesInterleavedS16Test(kSampleFormatS16);
+}
+
+TEST(AudioBufferTest, ReadFramesInterleavedS16FromS32) {
+ ReadFramesInterleavedS16Test(kSampleFormatS32);
+}
+
+TEST(AudioBufferTest, ReadFramesInterleavedS16FromF32) {
+ ReadFramesInterleavedS16Test(kSampleFormatF32);
+}
+
+TEST(AudioBufferTest, ReadFramesInterleavedS16FromPlanarS16) {
+ ReadFramesInterleavedS16Test(kSampleFormatPlanarS16);
+}
+
+TEST(AudioBufferTest, ReadFramesInterleavedS16FromPlanarF32) {
+ ReadFramesInterleavedS16Test(kSampleFormatPlanarF32);
+}
+
} // namespace media
diff --git a/chromium/media/base/audio_converter.cc b/chromium/media/base/audio_converter.cc
index be68ac627fa..48686046fd7 100644
--- a/chromium/media/base/audio_converter.cc
+++ b/chromium/media/base/audio_converter.cc
@@ -117,6 +117,12 @@ int AudioConverter::ChunkSize() const {
return resampler_->ChunkSize();
}
+void AudioConverter::PrimeWithSilence() {
+ if (resampler_) {
+ resampler_->PrimeWithSilence();
+ }
+}
+
void AudioConverter::ConvertWithDelay(const base::TimeDelta& initial_delay,
AudioBus* dest) {
initial_delay_ = initial_delay;
diff --git a/chromium/media/base/audio_converter.h b/chromium/media/base/audio_converter.h
index b12dcb95e7d..0b546dca2c4 100644
--- a/chromium/media/base/audio_converter.h
+++ b/chromium/media/base/audio_converter.h
@@ -90,6 +90,9 @@ class MEDIA_EXPORT AudioConverter {
// to each input's ProvideInput for more data.
int ChunkSize() const;
+ // See SincResampler::PrimeWithSilence.
+ void PrimeWithSilence();
+
bool empty() const { return transform_inputs_.empty(); }
private:
diff --git a/chromium/media/base/audio_decoder.h b/chromium/media/base/audio_decoder.h
index 44e79a4d5b3..3da8547c8ee 100644
--- a/chromium/media/base/audio_decoder.h
+++ b/chromium/media/base/audio_decoder.h
@@ -26,12 +26,14 @@ class MEDIA_EXPORT AudioDecoder {
// TODO(rileya): Now that both AudioDecoder and VideoDecoder Status enums
// match, break them into a decoder_status.h.
enum Status {
- kOk, // We're all good.
- kAborted, // We aborted as a result of Reset() or destruction.
- kDecodeError, // A decoding error occurred.
- kDecryptError // Decrypting error happened.
+ kOk, // We're all good.
+ kAborted, // We aborted as a result of Reset() or destruction.
+ kDecodeError // A decoding error occurred.
};
+ // Callback for VideoDecoder initialization.
+ typedef base::Callback<void(bool success)> InitCB;
+
// Callback for AudioDecoder to return a decoded frame whenever it becomes
// available. Only non-EOS frames should be returned via this callback.
typedef base::Callback<void(const scoped_refptr<AudioBuffer>&)> OutputCB;
@@ -54,10 +56,10 @@ class MEDIA_EXPORT AudioDecoder {
// Initializes an AudioDecoder with the given DemuxerStream, executing the
// callback upon completion.
- // |statistics_cb| is used to update global pipeline statistics.
+ // |init_cb| is used to return initialization status.
// |output_cb| is called for decoded audio buffers (see Decode()).
virtual void Initialize(const AudioDecoderConfig& config,
- const PipelineStatusCB& status_cb,
+ const InitCB& init_cb,
const OutputCB& output_cb) = 0;
// Requests samples to be decoded. Only one decode may be in flight at any
diff --git a/chromium/media/base/audio_renderer_mixer.cc b/chromium/media/base/audio_renderer_mixer.cc
index 112d7ef8b77..5b5fe49f0d2 100644
--- a/chromium/media/base/audio_renderer_mixer.cc
+++ b/chromium/media/base/audio_renderer_mixer.cc
@@ -71,6 +71,16 @@ void AudioRendererMixer::RemoveErrorCallback(const base::Closure& error_cb) {
NOTREACHED();
}
+void AudioRendererMixer::SwitchOutputDevice(
+ const std::string& device_id,
+ const GURL& security_origin,
+ const SwitchOutputDeviceCB& callback) {
+ DVLOG(1) << __FUNCTION__ << "(" << device_id << ", " << security_origin
+ << ")";
+ base::AutoLock auto_lock(lock_);
+ audio_sink_->SwitchOutputDevice(device_id, security_origin, callback);
+}
+
int AudioRendererMixer::Render(AudioBus* audio_bus,
int audio_delay_milliseconds) {
base::AutoLock auto_lock(lock_);
diff --git a/chromium/media/base/audio_renderer_mixer.h b/chromium/media/base/audio_renderer_mixer.h
index 35ed484976f..dc9ab925b24 100644
--- a/chromium/media/base/audio_renderer_mixer.h
+++ b/chromium/media/base/audio_renderer_mixer.h
@@ -6,6 +6,7 @@
#define MEDIA_BASE_AUDIO_RENDERER_MIXER_H_
#include <map>
+#include <string>
#include "base/synchronization/lock.h"
#include "base/time/time.h"
@@ -39,6 +40,10 @@ class MEDIA_EXPORT AudioRendererMixer
pause_delay_ = delay;
}
+ void SwitchOutputDevice(const std::string& device_id,
+ const GURL& security_origin,
+ const SwitchOutputDeviceCB& callback);
+
private:
// AudioRendererSink::RenderCallback implementation.
int Render(AudioBus* audio_bus, int audio_delay_milliseconds) override;
diff --git a/chromium/media/base/audio_renderer_mixer_input.cc b/chromium/media/base/audio_renderer_mixer_input.cc
index ab9f0a7ecab..6194f21ca94 100644
--- a/chromium/media/base/audio_renderer_mixer_input.cc
+++ b/chromium/media/base/audio_renderer_mixer_input.cc
@@ -5,6 +5,7 @@
#include "media/base/audio_renderer_mixer_input.h"
#include "base/bind.h"
+#include "base/location.h"
#include "base/logging.h"
#include "media/base/audio_renderer_mixer.h"
@@ -94,6 +95,19 @@ bool AudioRendererMixerInput::SetVolume(double volume) {
return true;
}
+void AudioRendererMixerInput::SwitchOutputDevice(
+ const std::string& device_id,
+ const GURL& security_origin,
+ const SwitchOutputDeviceCB& callback) {
+ DVLOG(1) << __FUNCTION__
+ << "(" << device_id << ", " << security_origin << ")";
+ if (mixer_) {
+ mixer_->SwitchOutputDevice(device_id, security_origin, callback);
+ } else {
+ callback.Run(SWITCH_OUTPUT_DEVICE_RESULT_ERROR_NOT_SUPPORTED);
+ }
+}
+
double AudioRendererMixerInput::ProvideInput(AudioBus* audio_bus,
base::TimeDelta buffer_delay) {
int frames_filled = callback_->Render(
diff --git a/chromium/media/base/audio_renderer_mixer_input.h b/chromium/media/base/audio_renderer_mixer_input.h
index d097aa874ad..06a0de1b037 100644
--- a/chromium/media/base/audio_renderer_mixer_input.h
+++ b/chromium/media/base/audio_renderer_mixer_input.h
@@ -5,6 +5,7 @@
#ifndef MEDIA_BASE_AUDIO_RENDERER_MIXER_INPUT_H_
#define MEDIA_BASE_AUDIO_RENDERER_MIXER_INPUT_H_
+#include <string>
#include <vector>
#include "base/callback.h"
@@ -32,6 +33,9 @@ class MEDIA_EXPORT AudioRendererMixerInput
void Play() override;
void Pause() override;
bool SetVolume(double volume) override;
+ void SwitchOutputDevice(const std::string& device_id,
+ const GURL& security_origin,
+ const SwitchOutputDeviceCB& callback) override;
void Initialize(const AudioParameters& params,
AudioRendererSink::RenderCallback* renderer) override;
diff --git a/chromium/media/base/audio_renderer_sink.h b/chromium/media/base/audio_renderer_sink.h
index fa1ee84c250..753135d0c26 100644
--- a/chromium/media/base/audio_renderer_sink.h
+++ b/chromium/media/base/audio_renderer_sink.h
@@ -5,16 +5,27 @@
#ifndef MEDIA_BASE_AUDIO_RENDERER_SINK_H_
#define MEDIA_BASE_AUDIO_RENDERER_SINK_H_
+#include <string>
#include <vector>
+
#include "base/basictypes.h"
+#include "base/callback.h"
#include "base/logging.h"
#include "base/memory/ref_counted.h"
+#include "media/audio/audio_output_ipc.h"
#include "media/audio/audio_parameters.h"
#include "media/base/audio_bus.h"
#include "media/base/media_export.h"
+#include "url/gurl.h"
+
+namespace base {
+class SingleThreadTaskRunner;
+}
namespace media {
+typedef base::Callback<void(SwitchOutputDeviceResult)> SwitchOutputDeviceCB;
+
// AudioRendererSink is an interface representing the end-point for
// rendered audio. An implementation is expected to
// periodically call Render() on a callback object.
@@ -56,6 +67,21 @@ class AudioRendererSink
// Returns |true| on success.
virtual bool SetVolume(double volume) = 0;
+ // Attempts to switch the audio output device.
+ // Once the attempt is finished, |callback| is invoked with the
+ // result of the operation passed as a parameter. The result is a value from
+ // the media::SwitchOutputDeviceResult enum.
+ // There is no guarantee about the thread where |callback| will
+ // be invoked, so users are advised to use media::BindToCurrentLoop() to
+ // ensure that |callback| runs on the correct thread.
+ // Note also that copy constructors and destructors for arguments bound to
+ // |callback| may run on arbitrary threads as |callback| is moved across
+ // threads. It is advisable to bind arguments such that they are released by
+ // |callback| when it runs in order to avoid surprises.
+ virtual void SwitchOutputDevice(const std::string& device_id,
+ const GURL& security_origin,
+ const SwitchOutputDeviceCB& callback) = 0;
+
protected:
friend class base::RefCountedThreadSafe<AudioRendererSink>;
virtual ~AudioRendererSink() {}
diff --git a/chromium/media/base/audio_splicer.cc b/chromium/media/base/audio_splicer.cc
index 9424b0b39d6..accff36cd30 100644
--- a/chromium/media/base/audio_splicer.cc
+++ b/chromium/media/base/audio_splicer.cc
@@ -12,27 +12,39 @@
#include "media/base/audio_bus.h"
#include "media/base/audio_decoder_config.h"
#include "media/base/audio_timestamp_helper.h"
+#include "media/base/media_log.h"
#include "media/base/vector_math.h"
namespace media {
-// Minimum gap size needed before the splicer will take action to
-// fill a gap. This avoids periodically inserting and then dropping samples
-// when the buffer timestamps are slightly off because of timestamp rounding
-// in the source content. Unit is frames.
-static const int kMinGapSize = 2;
+namespace {
+
+enum {
+ // Minimum gap size needed before the splicer will take action to
+ // fill a gap. This avoids periodically inserting and then dropping samples
+ // when the buffer timestamps are slightly off because of timestamp rounding
+ // in the source content. Unit is frames.
+ kMinGapSize = 2,
+
+ // Limits the number of MEDIA_LOG() per sanitizer instance warning the user
+ // about splicer overlaps within |kMaxTimeDeltaInMilliseconds| or gaps larger
+ // than |kMinGapSize| and less than |kMaxTimeDeltaInMilliseconds|. These
+ // warnings may be frequent for some streams, and number of sanitizer
+ // instances may be high, so keep this limit low to help reduce log spam.
+ kMaxSanitizerWarningLogs = 5,
+};
// AudioBuffer::TrimStart() is not as accurate as the timestamp helper, so
// manually adjust the duration and timestamp after trimming.
-static void AccurateTrimStart(int frames_to_trim,
- const scoped_refptr<AudioBuffer> buffer,
- const AudioTimestampHelper& timestamp_helper) {
+void AccurateTrimStart(int frames_to_trim,
+ const scoped_refptr<AudioBuffer> buffer,
+ const AudioTimestampHelper& timestamp_helper) {
buffer->TrimStart(frames_to_trim);
buffer->set_timestamp(timestamp_helper.GetTimestamp());
}
// Returns an AudioBus whose frame buffer is backed by the provided AudioBuffer.
-static scoped_ptr<AudioBus> CreateAudioBufferWrapper(
+scoped_ptr<AudioBus> CreateAudioBufferWrapper(
const scoped_refptr<AudioBuffer>& buffer) {
scoped_ptr<AudioBus> wrapper =
AudioBus::CreateWrapper(buffer->channel_count());
@@ -44,9 +56,12 @@ static scoped_ptr<AudioBus> CreateAudioBufferWrapper(
return wrapper.Pass();
}
+} // namespace
+
class AudioStreamSanitizer {
public:
- explicit AudioStreamSanitizer(int samples_per_second);
+ AudioStreamSanitizer(int samples_per_second,
+ const scoped_refptr<MediaLog>& media_log);
~AudioStreamSanitizer();
// Resets the sanitizer state by clearing the output buffers queue, and
@@ -89,12 +104,23 @@ class AudioStreamSanitizer {
typedef std::deque<scoped_refptr<AudioBuffer> > BufferQueue;
BufferQueue output_buffers_;
+ scoped_refptr<MediaLog> media_log_;
+
+ // To prevent log spam, counts the number of audio gap or overlaps warned in
+ // logs.
+ int num_warning_logs_;
+
DISALLOW_ASSIGN(AudioStreamSanitizer);
};
-AudioStreamSanitizer::AudioStreamSanitizer(int samples_per_second)
+AudioStreamSanitizer::AudioStreamSanitizer(
+ int samples_per_second,
+ const scoped_refptr<MediaLog>& media_log)
: output_timestamp_helper_(samples_per_second),
- received_end_of_stream_(false) {}
+ received_end_of_stream_(false),
+ media_log_(media_log),
+ num_warning_logs_(0) {
+}
AudioStreamSanitizer::~AudioStreamSanitizer() {}
@@ -128,7 +154,12 @@ bool AudioStreamSanitizer::AddInput(const scoped_refptr<AudioBuffer>& input) {
output_timestamp_helper_.SetBaseTimestamp(input->timestamp());
if (output_timestamp_helper_.base_timestamp() > input->timestamp()) {
- DVLOG(1) << "Input timestamp is before the base timestamp.";
+ MEDIA_LOG(ERROR, media_log_)
+ << "Audio splicing failed: unexpected timestamp sequence. base "
+ "timestamp="
+ << output_timestamp_helper_.base_timestamp().InMicroseconds()
+ << "us, input timestamp=" << input->timestamp().InMicroseconds()
+ << "us";
return false;
}
@@ -139,7 +170,13 @@ bool AudioStreamSanitizer::AddInput(const scoped_refptr<AudioBuffer>& input) {
if (std::abs(delta.InMilliseconds()) >
AudioSplicer::kMaxTimeDeltaInMilliseconds) {
- DVLOG(1) << "Timestamp delta too large: " << delta.InMicroseconds() << "us";
+ MEDIA_LOG(ERROR, media_log_)
+ << "Audio splicing failed: coded frame timestamp differs from "
+ "expected timestamp " << expected_timestamp.InMicroseconds()
+ << "us by " << delta.InMicroseconds()
+ << "us, more than threshold of +/-"
+ << AudioSplicer::kMaxTimeDeltaInMilliseconds
+ << "ms. Expected timestamp is based on decoded frames and frame rate.";
return false;
}
@@ -153,6 +190,11 @@ bool AudioStreamSanitizer::AddInput(const scoped_refptr<AudioBuffer>& input) {
}
if (frames_to_fill > 0) {
+ LIMITED_MEDIA_LOG(DEBUG, media_log_, num_warning_logs_,
+ kMaxSanitizerWarningLogs)
+ << "Audio splicer inserting silence for small gap of "
+ << delta.InMicroseconds() << "us at time "
+ << expected_timestamp.InMicroseconds() << "us.";
DVLOG(1) << "Gap detected @ " << expected_timestamp.InMicroseconds()
<< " us: " << delta.InMicroseconds() << " us";
@@ -177,6 +219,11 @@ bool AudioStreamSanitizer::AddInput(const scoped_refptr<AudioBuffer>& input) {
//
// A crossfade can't be done here because only the current buffer is available
// at this point, not previous buffers.
+ LIMITED_MEDIA_LOG(DEBUG, media_log_, num_warning_logs_,
+ kMaxSanitizerWarningLogs)
+ << "Audio splicer skipping frames for small overlap of "
+ << -delta.InMicroseconds() << "us at time "
+ << expected_timestamp.InMicroseconds() << "us.";
DVLOG(1) << "Overlap detected @ " << expected_timestamp.InMicroseconds()
<< " us: " << -delta.InMicroseconds() << " us";
@@ -227,15 +274,20 @@ bool AudioStreamSanitizer::DrainInto(AudioStreamSanitizer* output) {
return true;
}
-AudioSplicer::AudioSplicer(int samples_per_second)
+AudioSplicer::AudioSplicer(int samples_per_second,
+ const scoped_refptr<MediaLog>& media_log)
: max_crossfade_duration_(
base::TimeDelta::FromMilliseconds(kCrossfadeDurationInMilliseconds)),
splice_timestamp_(kNoTimestamp()),
max_splice_end_timestamp_(kNoTimestamp()),
- output_sanitizer_(new AudioStreamSanitizer(samples_per_second)),
- pre_splice_sanitizer_(new AudioStreamSanitizer(samples_per_second)),
- post_splice_sanitizer_(new AudioStreamSanitizer(samples_per_second)),
- have_all_pre_splice_buffers_(false) {}
+ output_sanitizer_(
+ new AudioStreamSanitizer(samples_per_second, media_log)),
+ pre_splice_sanitizer_(
+ new AudioStreamSanitizer(samples_per_second, media_log)),
+ post_splice_sanitizer_(
+ new AudioStreamSanitizer(samples_per_second, media_log)),
+ have_all_pre_splice_buffers_(false) {
+}
AudioSplicer::~AudioSplicer() {}
diff --git a/chromium/media/base/audio_splicer.h b/chromium/media/base/audio_splicer.h
index e32a9fd028c..0d35f605269 100644
--- a/chromium/media/base/audio_splicer.h
+++ b/chromium/media/base/audio_splicer.h
@@ -17,11 +17,13 @@ namespace media {
class AudioBuffer;
class AudioBus;
class AudioStreamSanitizer;
+class MediaLog;
// Helper class that handles filling gaps and resolving overlaps.
class MEDIA_EXPORT AudioSplicer {
public:
- explicit AudioSplicer(int samples_per_second);
+ AudioSplicer(int samples_per_second,
+ const scoped_refptr<MediaLog>& media_log);
~AudioSplicer();
enum {
diff --git a/chromium/media/base/audio_splicer_unittest.cc b/chromium/media/base/audio_splicer_unittest.cc
index d64302c7627..7b226bb0759 100644
--- a/chromium/media/base/audio_splicer_unittest.cc
+++ b/chromium/media/base/audio_splicer_unittest.cc
@@ -25,7 +25,7 @@ static const int kDefaultBufferSize = 100;
class AudioSplicerTest : public ::testing::Test {
public:
AudioSplicerTest()
- : splicer_(kDefaultSampleRate),
+ : splicer_(kDefaultSampleRate, new MediaLog()),
input_timestamp_helper_(kDefaultSampleRate) {
input_timestamp_helper_.SetBaseTimestamp(base::TimeDelta());
}
diff --git a/chromium/media/base/audio_video_metadata_extractor.cc b/chromium/media/base/audio_video_metadata_extractor.cc
index fd666f6c919..0ba36e12062 100644
--- a/chromium/media/base/audio_video_metadata_extractor.cc
+++ b/chromium/media/base/audio_video_metadata_extractor.cc
@@ -23,7 +23,7 @@ void OnError(bool* succeeded) {
// Returns true if the |tag| matches |expected_key|.
bool ExtractString(AVDictionaryEntry* tag, const char* expected_key,
std::string* destination) {
- if (!LowerCaseEqualsASCII(std::string(tag->key), expected_key))
+ if (!base::LowerCaseEqualsASCII(std::string(tag->key), expected_key))
return false;
if (destination->empty())
@@ -35,7 +35,7 @@ bool ExtractString(AVDictionaryEntry* tag, const char* expected_key,
// Returns true if the |tag| matches |expected_key|.
bool ExtractInt(AVDictionaryEntry* tag, const char* expected_key,
int* destination) {
- if (!LowerCaseEqualsASCII(std::string(tag->key), expected_key))
+ if (!base::LowerCaseEqualsASCII(std::string(tag->key), expected_key))
return false;
int temporary = -1;
diff --git a/chromium/media/base/audio_video_metadata_extractor_unittest.cc b/chromium/media/base/audio_video_metadata_extractor_unittest.cc
index 0af6e165e7b..d0927d5a64e 100644
--- a/chromium/media/base/audio_video_metadata_extractor_unittest.cc
+++ b/chromium/media/base/audio_video_metadata_extractor_unittest.cc
@@ -188,9 +188,8 @@ TEST(AudioVideoMetadataExtractorTest, AudioMP3) {
EXPECT_EQ(0u, extractor->stream_infos()[1].tags.size());
EXPECT_EQ("png", extractor->stream_infos()[2].type);
- EXPECT_EQ(2u, extractor->stream_infos()[2].tags.size());
+ EXPECT_EQ(1u, extractor->stream_infos()[2].tags.size());
EXPECT_EQ("Other", extractor->stream_infos()[2].tags.find("comment")->second);
- EXPECT_EQ("", extractor->stream_infos()[2].tags.find("title")->second);
EXPECT_EQ(1u, extractor->attached_images_bytes().size());
EXPECT_EQ(155752u, extractor->attached_images_bytes()[0].size());
diff --git a/chromium/media/base/bind_to_current_loop.h b/chromium/media/base/bind_to_current_loop.h
index c9eda2ac3f8..d34afe4094c 100644
--- a/chromium/media/base/bind_to_current_loop.h
+++ b/chromium/media/base/bind_to_current_loop.h
@@ -7,7 +7,6 @@
#include "base/bind.h"
#include "base/location.h"
-#include "base/message_loop/message_loop_proxy.h"
#include "base/single_thread_task_runner.h"
#include "base/thread_task_runner_handle.h"
diff --git a/chromium/media/base/bit_reader_core.cc b/chromium/media/base/bit_reader_core.cc
index f292f97a1bc..042e73c7e69 100644
--- a/chromium/media/base/bit_reader_core.cc
+++ b/chromium/media/base/bit_reader_core.cc
@@ -4,8 +4,9 @@
#include "media/base/bit_reader_core.h"
-#include <base/port.h>
-#include <base/sys_byteorder.h>
+#include <stdint.h>
+
+#include "base/sys_byteorder.h"
namespace {
const int kRegWidthInBits = sizeof(uint64) * 8;
@@ -35,7 +36,7 @@ bool BitReaderCore::ReadFlag(bool* flag) {
if (nbits_ == 0 && !Refill(1))
return false;
- *flag = (reg_ & (GG_UINT64_C(1) << (kRegWidthInBits - 1))) != 0;
+ *flag = (reg_ & (UINT64_C(1) << (kRegWidthInBits - 1))) != 0;
reg_ <<= 1;
nbits_--;
bits_read_++;
diff --git a/chromium/media/base/cdm_callback_promise.h b/chromium/media/base/cdm_callback_promise.h
index e08d95f35ca..8685c17a240 100644
--- a/chromium/media/base/cdm_callback_promise.h
+++ b/chromium/media/base/cdm_callback_promise.h
@@ -34,7 +34,7 @@ class MEDIA_EXPORT CdmCallbackPromise : public CdmPromiseTemplate<T...> {
const std::string& error_message) override;
private:
- using media::CdmPromiseTemplate<T...>::MarkPromiseSettled;
+ using CdmPromiseTemplate<T...>::MarkPromiseSettled;
base::Callback<void(const T&...)> resolve_cb_;
PromiseRejectedCB reject_cb_;
diff --git a/chromium/media/base/cdm_config.h b/chromium/media/base/cdm_config.h
index 4b57ebb96c1..6f723485f7b 100644
--- a/chromium/media/base/cdm_config.h
+++ b/chromium/media/base/cdm_config.h
@@ -12,7 +12,7 @@ namespace media {
// The runtime configuration for new CDM instances as computed by
// |requestMediaKeySystemAccess|. This is in some sense the Chromium-side
// counterpart of Blink's WebMediaKeySystemConfiguration.
-struct MEDIA_EXPORT CdmConfig {
+struct CdmConfig {
// Allow access to a distinctive identifier.
bool allow_distinctive_identifier = false;
diff --git a/chromium/media/base/cdm_context.cc b/chromium/media/base/cdm_context.cc
index ec2b633b3a3..b53fc7d3a54 100644
--- a/chromium/media/base/cdm_context.cc
+++ b/chromium/media/base/cdm_context.cc
@@ -6,9 +6,17 @@
namespace media {
-CdmContext::CdmContext() {}
+CdmContext::CdmContext() {
+}
+
+CdmContext::~CdmContext() {
+}
-CdmContext::~CdmContext() {}
+CdmContextProvider::CdmContextProvider() {
+}
+
+CdmContextProvider::~CdmContextProvider() {
+}
void IgnoreCdmAttached(bool success) {
}
diff --git a/chromium/media/base/cdm_context.h b/chromium/media/base/cdm_context.h
index a0861c4e5db..7d897608cc3 100644
--- a/chromium/media/base/cdm_context.h
+++ b/chromium/media/base/cdm_context.h
@@ -15,6 +15,7 @@ class Decryptor;
// An interface representing the context that a media pipeline needs from a
// content decryption module (CDM) to decrypt (and decode) encrypted buffers.
+// Only used for implementing SetCdm().
class MEDIA_EXPORT CdmContext {
public:
// Indicates an invalid CDM ID. See GetCdmId() for details.
@@ -22,16 +23,16 @@ class MEDIA_EXPORT CdmContext {
virtual ~CdmContext();
- // Gets the Decryptor object associated with the CDM. Returns NULL if the CDM
- // does not support a Decryptor. The returned object is only guaranteed to be
- // valid during the CDM's lifetime.
+ // Gets the Decryptor object associated with the CDM. Returns nullptr if the
+ // CDM does not support a Decryptor. Must not return nullptr if GetCdmId()
+ // returns kInvalidCdmId. The returned object is only guaranteed to be valid
+ // during the CDM's lifetime.
virtual Decryptor* GetDecryptor() = 0;
- // Returns an ID associated with the CDM, which can be used to locate the real
- // CDM instance. This is useful when the CDM is hosted remotely, e.g. in a
- // different process.
- // Returns kInvalidCdmId if the CDM cannot be used remotely. In this case,
- // GetDecryptor() should return a non-null Decryptor.
+ // Returns an ID that identifies a CDM, or kInvalidCdmId. The interpretation
+ // is implementation-specific; current implementations use the ID to locate a
+ // remote CDM in a different process. The return value will not be
+ // kInvalidCdmId if GetDecryptor() returns nullptr.
virtual int GetCdmId() const = 0;
protected:
@@ -41,6 +42,25 @@ class MEDIA_EXPORT CdmContext {
DISALLOW_COPY_AND_ASSIGN(CdmContext);
};
+// An interface for looking up CdmContext objects by the CDM ID.
+class MEDIA_EXPORT CdmContextProvider {
+ public:
+ virtual ~CdmContextProvider();
+
+ // Returns the CdmContext corresponding to |cdm_id|. Returns nullptr if no
+ // such CdmContext can be found.
+ // Note: Calling GetCdmId() on the returned CdmContext returns kInvalidCdmId
+ // (in all current cases) because the CDM will be local in the process where
+ // GetCdmContext() is called.
+ virtual CdmContext* GetCdmContext(int cdm_id) = 0;
+
+ protected:
+ CdmContextProvider();
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(CdmContextProvider);
+};
+
// Callback to notify that the CdmContext has been completely attached to
// the media pipeline. Parameter indicates whether the operation succeeded.
typedef base::Callback<void(bool)> CdmAttachedCB;
diff --git a/chromium/media/base/cdm_initialized_promise.cc b/chromium/media/base/cdm_initialized_promise.cc
new file mode 100644
index 00000000000..dcc9c79c830
--- /dev/null
+++ b/chromium/media/base/cdm_initialized_promise.cc
@@ -0,0 +1,30 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/cdm_initialized_promise.h"
+
+namespace media {
+
+CdmInitializedPromise::CdmInitializedPromise(const CdmCreatedCB& cdm_created_cb,
+ scoped_ptr<MediaKeys> cdm)
+ : cdm_created_cb_(cdm_created_cb), cdm_(cdm.Pass()) {
+}
+
+CdmInitializedPromise::~CdmInitializedPromise() {
+}
+
+void CdmInitializedPromise::resolve() {
+ MarkPromiseSettled();
+ cdm_created_cb_.Run(cdm_.Pass(), "");
+}
+
+void CdmInitializedPromise::reject(MediaKeys::Exception exception_code,
+ uint32 system_code,
+ const std::string& error_message) {
+ MarkPromiseSettled();
+ cdm_created_cb_.Run(nullptr, error_message);
+ // Usually after this |this| (and the |cdm_| within it) will be destroyed.
+}
+
+} // namespace media
diff --git a/chromium/media/base/cdm_initialized_promise.h b/chromium/media/base/cdm_initialized_promise.h
new file mode 100644
index 00000000000..bc4a56970ce
--- /dev/null
+++ b/chromium/media/base/cdm_initialized_promise.h
@@ -0,0 +1,38 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BASE_CDM_INITIALIZED_PROMISE_H_
+#define MEDIA_BASE_CDM_INITIALIZED_PROMISE_H_
+
+#include "base/memory/scoped_ptr.h"
+#include "media/base/cdm_factory.h"
+#include "media/base/cdm_promise.h"
+#include "media/base/media_export.h"
+#include "media/base/media_keys.h"
+
+namespace media {
+
+// Promise to be resolved when the CDM is initialized. It owns the MediaKeys
+// object until the initialization completes, which it then passes to
+// |cdm_created_cb|.
+class MEDIA_EXPORT CdmInitializedPromise : public SimpleCdmPromise {
+ public:
+ CdmInitializedPromise(const CdmCreatedCB& cdm_created_cb,
+ scoped_ptr<MediaKeys> cdm);
+ ~CdmInitializedPromise() override;
+
+ // SimpleCdmPromise implementation.
+ void resolve() override;
+ void reject(MediaKeys::Exception exception_code,
+ uint32 system_code,
+ const std::string& error_message) override;
+
+ private:
+ CdmCreatedCB cdm_created_cb_;
+ scoped_ptr<MediaKeys> cdm_;
+};
+
+} // namespace media
+
+#endif // MEDIA_BASE_CDM_INITIALIZED_PROMISE_H_
diff --git a/chromium/media/base/demuxer.h b/chromium/media/base/demuxer.h
index 3075b595765..91f389d0175 100644
--- a/chromium/media/base/demuxer.h
+++ b/chromium/media/base/demuxer.h
@@ -56,6 +56,9 @@ class MEDIA_EXPORT Demuxer : public DemuxerStreamProvider {
Demuxer();
~Demuxer() override;
+ // Returns the name of the demuxer for logging purpose.
+ virtual std::string GetDisplayName() const = 0;
+
// Completes initialization of the demuxer.
//
// The demuxer does not own |host| as it is guaranteed to outlive the
diff --git a/chromium/media/base/demuxer_perftest.cc b/chromium/media/base/demuxer_perftest.cc
index 9708e584222..90e19896651 100644
--- a/chromium/media/base/demuxer_perftest.cc
+++ b/chromium/media/base/demuxer_perftest.cc
@@ -177,7 +177,7 @@ static void RunDemuxerBenchmark(const std::string& filename) {
Demuxer::EncryptedMediaInitDataCB encrypted_media_init_data_cb =
base::Bind(&OnEncryptedMediaInitData);
- FFmpegDemuxer demuxer(message_loop.message_loop_proxy(), &data_source,
+ FFmpegDemuxer demuxer(message_loop.task_runner(), &data_source,
encrypted_media_init_data_cb, new MediaLog());
demuxer.Initialize(&demuxer_host,
diff --git a/chromium/media/base/fake_audio_renderer_sink.cc b/chromium/media/base/fake_audio_renderer_sink.cc
index d42db6de1b9..4c4233b5e08 100644
--- a/chromium/media/base/fake_audio_renderer_sink.cc
+++ b/chromium/media/base/fake_audio_renderer_sink.cc
@@ -4,7 +4,10 @@
#include "media/base/fake_audio_renderer_sink.h"
+#include "base/bind.h"
+#include "base/location.h"
#include "base/logging.h"
+#include "base/single_thread_task_runner.h"
namespace media {
@@ -52,6 +55,13 @@ bool FakeAudioRendererSink::SetVolume(double volume) {
return true;
}
+void FakeAudioRendererSink::SwitchOutputDevice(
+ const std::string& device_id,
+ const GURL& security_origin,
+ const SwitchOutputDeviceCB& callback) {
+ callback.Run(SWITCH_OUTPUT_DEVICE_RESULT_SUCCESS);
+}
+
bool FakeAudioRendererSink::Render(AudioBus* dest, int audio_delay_milliseconds,
int* frames_written) {
if (state_ != kPlaying)
diff --git a/chromium/media/base/fake_audio_renderer_sink.h b/chromium/media/base/fake_audio_renderer_sink.h
index c3c313ae531..1982ba3d373 100644
--- a/chromium/media/base/fake_audio_renderer_sink.h
+++ b/chromium/media/base/fake_audio_renderer_sink.h
@@ -5,6 +5,8 @@
#ifndef MEDIA_BASE_FAKE_AUDIO_RENDERER_SINK_H_
#define MEDIA_BASE_FAKE_AUDIO_RENDERER_SINK_H_
+#include <string>
+
#include "media/audio/audio_parameters.h"
#include "media/base/audio_renderer_sink.h"
@@ -30,6 +32,9 @@ class FakeAudioRendererSink : public AudioRendererSink {
void Pause() override;
void Play() override;
bool SetVolume(double volume) override;
+ void SwitchOutputDevice(const std::string& device_id,
+ const GURL& security_origin,
+ const SwitchOutputDeviceCB& callback) override;
// Attempts to call Render() on the callback provided to
// Initialize() with |dest| and |audio_delay_milliseconds|.
diff --git a/chromium/media/base/fake_demuxer_stream.cc b/chromium/media/base/fake_demuxer_stream.cc
index f0fbe9b5057..ba10ae71a05 100644
--- a/chromium/media/base/fake_demuxer_stream.cc
+++ b/chromium/media/base/fake_demuxer_stream.cc
@@ -144,8 +144,8 @@ void FakeDemuxerStream::UpdateVideoDecoderConfig() {
const gfx::Rect kVisibleRect(kStartWidth, kStartHeight);
video_decoder_config_.Initialize(
kCodecVP8, VIDEO_CODEC_PROFILE_UNKNOWN, VideoFrame::YV12,
- next_coded_size_, kVisibleRect, next_coded_size_,
- NULL, 0, is_encrypted_, false);
+ VideoFrame::COLOR_SPACE_UNSPECIFIED, next_coded_size_, kVisibleRect,
+ next_coded_size_, NULL, 0, is_encrypted_, false);
next_coded_size_.Enlarge(kWidthDelta, kHeightDelta);
}
diff --git a/chromium/media/base/key_systems.cc b/chromium/media/base/key_systems.cc
index 1cd202ca425..32fa1c7314f 100644
--- a/chromium/media/base/key_systems.cc
+++ b/chromium/media/base/key_systems.cc
@@ -611,8 +611,8 @@ bool KeySystemsImpl::UseAesDecryptor(
KeySystemInfoMap::const_iterator key_system_iter =
concrete_key_system_map_.find(concrete_key_system);
if (key_system_iter == concrete_key_system_map_.end()) {
- DLOG(FATAL) << concrete_key_system << " is not a known concrete system";
- return false;
+ DLOG(ERROR) << concrete_key_system << " is not a known concrete system";
+ return false;
}
return key_system_iter->second.use_aes_decryptor;
@@ -674,12 +674,12 @@ EmeConfigRule KeySystemsImpl::GetContentTypeConfigRule(
SupportedCodecs media_type_codec_mask = EME_CODEC_NONE;
switch (media_type) {
case EmeMediaType::AUDIO:
- if (!StartsWithASCII(container_mime_type, "audio/", true))
+ if (!base::StartsWithASCII(container_mime_type, "audio/", true))
return EmeConfigRule::NOT_SUPPORTED;
media_type_codec_mask = audio_codec_mask_;
break;
case EmeMediaType::VIDEO:
- if (!StartsWithASCII(container_mime_type, "video/", true))
+ if (!base::StartsWithASCII(container_mime_type, "video/", true))
return EmeConfigRule::NOT_SUPPORTED;
media_type_codec_mask = video_codec_mask_;
break;
diff --git a/chromium/media/base/key_systems_unittest.cc b/chromium/media/base/key_systems_unittest.cc
index a1e95e06c78..b97852d132f 100644
--- a/chromium/media/base/key_systems_unittest.cc
+++ b/chromium/media/base/key_systems_unittest.cc
@@ -19,24 +19,6 @@
#include "media/base/media_client.h"
#include "testing/gtest/include/gtest/gtest.h"
-// Death tests are not always available, including on Android.
-// EXPECT_DEBUG_DEATH_PORTABLE executes tests correctly except in the case that
-// death tests are not available and NDEBUG is not defined.
-#if defined(GTEST_HAS_DEATH_TEST) && !defined(OS_ANDROID)
-#define EXPECT_DEBUG_DEATH_PORTABLE(statement, regex) \
- EXPECT_DEBUG_DEATH(statement, regex)
-#else
-#if defined(NDEBUG)
-#define EXPECT_DEBUG_DEATH_PORTABLE(statement, regex) \
- do { statement; } while (false)
-#else
-#include "base/logging.h"
-#define EXPECT_DEBUG_DEATH_PORTABLE(statement, regex) \
- LOG(WARNING) << "Death tests are not supported on this platform.\n" \
- << "Statement '" #statement "' cannot be verified.";
-#endif // defined(NDEBUG)
-#endif // defined(GTEST_HAS_DEATH_TEST) && !defined(OS_ANDROID)
-
namespace media {
// These are the (fake) key systems that are registered for these tests.
@@ -406,12 +388,7 @@ TEST_F(KeySystemsTest, Basic_UnrecognizedKeySystem) {
EXPECT_FALSE(IsSupportedKeySystem(kUnrecognized));
EXPECT_EQ("Unknown", GetKeySystemNameForUMA(kUnrecognized));
-
- bool can_use = false;
- EXPECT_DEBUG_DEATH_PORTABLE(
- can_use = CanUseAesDecryptor(kUnrecognized),
- "x-org.example.unrecognized is not a known concrete system");
- EXPECT_FALSE(can_use);
+ EXPECT_FALSE(CanUseAesDecryptor(kUnrecognized));
#if defined(ENABLE_PEPPER_CDMS)
std::string type;
@@ -494,10 +471,8 @@ TEST_F(KeySystemsTest, Parent_NoParentRegistered) {
// The parent is not supported for most things.
EXPECT_EQ("Unknown", GetKeySystemNameForUMA(kUsesAesParent));
- bool result = false;
- EXPECT_DEBUG_DEATH_PORTABLE(result = CanUseAesDecryptor(kUsesAesParent),
- "x-org.example is not a known concrete system");
- EXPECT_FALSE(result);
+ EXPECT_FALSE(CanUseAesDecryptor(kUsesAesParent));
+
#if defined(ENABLE_PEPPER_CDMS)
std::string type;
EXPECT_DEBUG_DEATH(type = GetPepperType(kUsesAesParent),
@@ -613,10 +588,8 @@ TEST_F(KeySystemsTest, Parent_ParentRegistered) {
// The parent is not supported for most things.
EXPECT_EQ("Unknown", GetKeySystemNameForUMA(kExternalParent));
- bool result = false;
- EXPECT_DEBUG_DEATH_PORTABLE(result = CanUseAesDecryptor(kExternalParent),
- "x-com.example is not a known concrete system");
- EXPECT_FALSE(result);
+ EXPECT_FALSE(CanUseAesDecryptor(kExternalParent));
+
#if defined(ENABLE_PEPPER_CDMS)
std::string type;
EXPECT_DEBUG_DEATH(type = GetPepperType(kExternalParent),
diff --git a/chromium/media/base/mac/coremedia_glue.mm b/chromium/media/base/mac/coremedia_glue.mm
index 61e9199f05b..e161a6e584d 100644
--- a/chromium/media/base/mac/coremedia_glue.mm
+++ b/chromium/media/base/mac/coremedia_glue.mm
@@ -7,8 +7,8 @@
#include <dlfcn.h>
#import <Foundation/Foundation.h>
-#include "base/logging.h"
#include "base/lazy_instance.h"
+#include "base/logging.h"
namespace {
diff --git a/chromium/media/base/mac/video_frame_mac.cc b/chromium/media/base/mac/video_frame_mac.cc
index e532ddc7511..9fd290465c0 100644
--- a/chromium/media/base/mac/video_frame_mac.cc
+++ b/chromium/media/base/mac/video_frame_mac.cc
@@ -43,7 +43,7 @@ WrapVideoFrameInCVPixelBuffer(const VideoFrame& frame) {
// represent I420 and NV12 frames. In addition, VideoFrame does not carry
// colorimetric information, so this function assumes standard video range
// and ITU Rec 709 primaries.
- VideoFrame::Format video_frame_format = frame.format();
+ const VideoFrame::Format video_frame_format = frame.format();
OSType cv_format;
if (video_frame_format == VideoFrame::Format::I420) {
cv_format = kCVPixelFormatType_420YpCbCr8Planar;
diff --git a/chromium/media/base/mac/video_frame_mac_unittests.cc b/chromium/media/base/mac/video_frame_mac_unittests.cc
index 96c3bc31a0c..aa037955485 100644
--- a/chromium/media/base/mac/video_frame_mac_unittests.cc
+++ b/chromium/media/base/mac/video_frame_mac_unittests.cc
@@ -61,13 +61,12 @@ TEST(VideoFrameMac, CheckBasicAttributes) {
}
TEST(VideoFrameMac, CheckFormats) {
+ // CreateFrame() does not support non planar YUV, e.g. NV12.
const FormatPair format_pairs[] = {
{VideoFrame::I420, kCVPixelFormatType_420YpCbCr8Planar},
-
{VideoFrame::YV12, 0},
{VideoFrame::YV16, 0},
{VideoFrame::YV12A, 0},
- {VideoFrame::YV12J, 0},
{VideoFrame::YV24, 0},
};
@@ -93,7 +92,8 @@ TEST(VideoFrameMac, CheckLifetime) {
int instances_destroyed = 0;
auto wrapper_frame = VideoFrame::WrapVideoFrame(
- frame, frame->visible_rect(), frame->natural_size(),
+ frame, frame->visible_rect(), frame->natural_size());
+ wrapper_frame->AddDestructionObserver(
base::Bind(&Increment, &instances_destroyed));
ASSERT_TRUE(wrapper_frame.get());
diff --git a/chromium/media/base/media.cc b/chromium/media/base/media.cc
index c85d4654d3b..388c6e0eb35 100644
--- a/chromium/media/base/media.cc
+++ b/chromium/media/base/media.cc
@@ -27,6 +27,10 @@ class MediaInitializer {
InitializeCPUSpecificYUVConversions();
#if !defined(MEDIA_DISABLE_FFMPEG)
+ // Initialize CPU flags outside of the sandbox as this may query /proc for
+ // details on the current CPU for NEON, VFP, etc optimizations.
+ av_get_cpu_flags();
+
// Disable logging as it interferes with layout tests.
av_log_set_level(AV_LOG_QUIET);
diff --git a/chromium/media/base/media_keys.h b/chromium/media/base/media_keys.h
index ba2a680d39f..dc4da6c4a41 100644
--- a/chromium/media/base/media_keys.h
+++ b/chromium/media/base/media_keys.h
@@ -72,7 +72,8 @@ class MEDIA_EXPORT MediaKeys{
enum SessionType {
TEMPORARY_SESSION,
PERSISTENT_LICENSE_SESSION,
- PERSISTENT_RELEASE_MESSAGE_SESSION
+ PERSISTENT_RELEASE_MESSAGE_SESSION,
+ SESSION_TYPE_MAX = PERSISTENT_RELEASE_MESSAGE_SESSION
};
// Type of message being sent to the application.
diff --git a/chromium/media/base/media_log.cc b/chromium/media/base/media_log.cc
index c89c4151fb6..f5f58de73f4 100644
--- a/chromium/media/base/media_log.cc
+++ b/chromium/media/base/media_log.cc
@@ -4,7 +4,6 @@
#include "media/base/media_log.h"
-
#include "base/atomic_sequence_num.h"
#include "base/json/json_writer.h"
#include "base/values.h"
@@ -47,10 +46,6 @@ std::string MediaLog::EventTypeToString(MediaLogEvent::Type type) {
return "WEBMEDIAPLAYER_CREATED";
case MediaLogEvent::WEBMEDIAPLAYER_DESTROYED:
return "WEBMEDIAPLAYER_DESTROYED";
- case MediaLogEvent::PIPELINE_CREATED:
- return "PIPELINE_CREATED";
- case MediaLogEvent::PIPELINE_DESTROYED:
- return "PIPELINE_DESTROYED";
case MediaLogEvent::LOAD:
return "LOAD";
case MediaLogEvent::SEEK:
@@ -100,8 +95,6 @@ std::string MediaLog::PipelineStatusToString(PipelineStatus status) {
return "pipeline: network error";
case PIPELINE_ERROR_DECODE:
return "pipeline: decode error";
- case PIPELINE_ERROR_DECRYPT:
- return "pipeline: decrypt error";
case PIPELINE_ERROR_ABORT:
return "pipeline: abort";
case PIPELINE_ERROR_INITIALIZATION_FAILED:
@@ -139,7 +132,7 @@ std::string MediaLog::MediaEventToLogString(const MediaLogEvent& event) {
media::MediaLog::PipelineStatusToString(status);
}
std::string params_json;
- base::JSONWriter::Write(&event.params, &params_json);
+ base::JSONWriter::Write(event.params, &params_json);
return EventTypeToString(event.type) + " " + params_json;
}
@@ -282,10 +275,16 @@ LogHelper::LogHelper(MediaLog::MediaLogLevel level, const LogCB& log_cb)
: level_(level), log_cb_(log_cb) {
}
+LogHelper::LogHelper(MediaLog::MediaLogLevel level,
+ const scoped_refptr<MediaLog>& media_log)
+ : level_(level), media_log_(media_log) {
+}
+
LogHelper::~LogHelper() {
- if (log_cb_.is_null())
- return;
- log_cb_.Run(level_, stream_.str());
+ if (!log_cb_.is_null())
+ log_cb_.Run(level_, stream_.str());
+ else if (media_log_)
+ media_log_->AddLogEvent(level_, stream_.str());
}
} //namespace media
diff --git a/chromium/media/base/media_log.h b/chromium/media/base/media_log.h
index 8b40b5d4e61..5394fff3963 100644
--- a/chromium/media/base/media_log.h
+++ b/chromium/media/base/media_log.h
@@ -18,7 +18,6 @@
namespace media {
-
class MEDIA_EXPORT MediaLog : public base::RefCountedThreadSafe<MediaLog> {
public:
enum MediaLogLevel {
@@ -87,10 +86,12 @@ class MEDIA_EXPORT MediaLog : public base::RefCountedThreadSafe<MediaLog> {
// Second parameter - The string to add to the log.
typedef base::Callback<void(MediaLog::MediaLogLevel, const std::string&)> LogCB;
-// Helper class to make it easier to use log_cb like DVLOG().
-class LogHelper {
+// Helper class to make it easier to use LogCB or MediaLog like DVLOG().
+class MEDIA_EXPORT LogHelper {
public:
LogHelper(MediaLog::MediaLogLevel level, const LogCB& log_cb);
+ LogHelper(MediaLog::MediaLogLevel level,
+ const scoped_refptr<MediaLog>& media_log);
~LogHelper();
std::ostream& stream() { return stream_; }
@@ -98,18 +99,34 @@ class LogHelper {
private:
MediaLog::MediaLogLevel level_;
LogCB log_cb_;
+ const scoped_refptr<MediaLog> media_log_;
std::stringstream stream_;
};
// Provides a stringstream to collect a log entry to pass to the provided
-// LogCB at the requested level.
-#define MEDIA_LOG(level, log_cb) \
- LogHelper((MediaLog::MEDIALOG_##level), (log_cb)).stream()
-
-// Logs only while count < max. Increments count for each log. Use LAZY_STREAM
-// to avoid wasteful evaluation of subsequent stream arguments.
-#define LIMITED_MEDIA_LOG(level, log_cb, count, max) \
- LAZY_STREAM(MEDIA_LOG(level, log_cb), (count) < (max) && ((count)++ || true))
+// logger (LogCB or MediaLog) at the requested level.
+#define MEDIA_LOG(level, logger) \
+ LogHelper((MediaLog::MEDIALOG_##level), (logger)).stream()
+
+// Logs only while |count| < |max|, increments |count| for each log, and warns
+// in the log if |count| has just reached |max|.
+// Multiple short-circuit evaluations are involved in this macro:
+// 1) LAZY_STREAM avoids wasteful MEDIA_LOG and evaluation of subsequent stream
+// arguments if |count| is >= |max|, and
+// 2) the |condition| given to LAZY_STREAM itself short-circuits to prevent
+// incrementing |count| beyond |max|.
+// Note that LAZY_STREAM guarantees exactly one evaluation of |condition|, so
+// |count| will be incremented at most once each time this macro runs.
+// The "|| true" portion of |condition| lets logging occur correctly when
+// |count| < |max| and |count|++ is 0.
+// TODO(wolenetz,chcunningham): Consider using a helper class instead of a macro
+// to improve readability.
+#define LIMITED_MEDIA_LOG(level, logger, count, max) \
+ LAZY_STREAM(MEDIA_LOG(level, logger), \
+ (count) < (max) && ((count)++ || true)) \
+ << (((count) == (max)) ? "(Log limit reached. Further similar entries " \
+ "may be suppressed): " \
+ : "")
} // namespace media
diff --git a/chromium/media/base/media_log_event.h b/chromium/media/base/media_log_event.h
index db687fbb176..66aa5f7826d 100644
--- a/chromium/media/base/media_log_event.h
+++ b/chromium/media/base/media_log_event.h
@@ -32,11 +32,6 @@ struct MediaLogEvent {
WEBMEDIAPLAYER_CREATED,
WEBMEDIAPLAYER_DESTROYED,
- // A Pipeline is being created or destroyed.
- // params: none.
- PIPELINE_CREATED,
- PIPELINE_DESTROYED,
-
// A media player is loading a resource.
// params: "url": <URL of the resource>.
LOAD,
@@ -82,7 +77,7 @@ struct MediaLogEvent {
// "buffer_end": <last buffered byte>.
BUFFERED_EXTENTS_CHANGED,
- // Error log reported by media code such as details of an MSE parse error.
+ // Error log reported by media code such as reasons of playback error.
MEDIA_ERROR_LOG_ENTRY,
// params: "error": Error string describing the error detected.
diff --git a/chromium/media/base/media_switches.cc b/chromium/media/base/media_switches.cc
index 05a04b713f5..0b0154628df 100644
--- a/chromium/media/base/media_switches.cc
+++ b/chromium/media/base/media_switches.cc
@@ -16,10 +16,6 @@ const char kDisableNewVideoRenderer[] = "disable-new-video-renderer";
const char kVideoThreads[] = "video-threads";
#if defined(OS_ANDROID)
-// Disables the infobar popup for accessing protected media identifier.
-const char kDisableInfobarForProtectedMediaIdentifier[] =
- "disable-infobar-for-protected-media-identifier";
-
// Sets the MediaSource player that uses the separate media thread
const char kEnableMediaThreadForMediaPlayback[] =
"enable-media-thread-for-media-playback";
@@ -32,6 +28,11 @@ const char kAlsaInputDevice[] = "alsa-input-device";
const char kAlsaOutputDevice[] = "alsa-output-device";
#endif
+// Use GpuMemoryBuffers for Video Capture when this is an option for the device.
+// Experimental, see http://crbug.com/503835 and http://crbug.com/440843.
+const char kUseGpuMemoryBuffersForCapture[] =
+ "use-gpu-memory-buffers-for-capture";
+
#if defined(OS_MACOSX)
// AVFoundation is available in versions 10.7 and onwards, and is to be used
// http://crbug.com/288562 for both audio and video device monitoring and for
diff --git a/chromium/media/base/media_switches.h b/chromium/media/base/media_switches.h
index 4d7aad7c239..c8c8488d95d 100644
--- a/chromium/media/base/media_switches.h
+++ b/chromium/media/base/media_switches.h
@@ -19,7 +19,6 @@ MEDIA_EXPORT extern const char kDisableNewVideoRenderer[];
MEDIA_EXPORT extern const char kVideoThreads[];
#if defined(OS_ANDROID)
-MEDIA_EXPORT extern const char kDisableInfobarForProtectedMediaIdentifier[];
MEDIA_EXPORT extern const char kEnableMediaThreadForMediaPlayback[];
#endif
@@ -28,6 +27,8 @@ MEDIA_EXPORT extern const char kAlsaInputDevice[];
MEDIA_EXPORT extern const char kAlsaOutputDevice[];
#endif
+MEDIA_EXPORT extern const char kUseGpuMemoryBuffersForCapture[];
+
#if defined(OS_MACOSX)
MEDIA_EXPORT extern const char kEnableAVFoundation[];
MEDIA_EXPORT extern const char kForceQTKit[];
diff --git a/chromium/media/base/mime_util.cc b/chromium/media/base/mime_util.cc
new file mode 100644
index 00000000000..535837c1474
--- /dev/null
+++ b/chromium/media/base/mime_util.cc
@@ -0,0 +1,658 @@
+// Copyright 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <map>
+
+#include "base/containers/hash_tables.h"
+#include "base/lazy_instance.h"
+#include "base/strings/string_number_conversions.h"
+#include "base/strings/string_split.h"
+#include "base/strings/string_util.h"
+#include "build/build_config.h"
+#include "media/base/mime_util.h"
+
+#if defined(OS_ANDROID)
+#include "base/android/build_info.h"
+#endif
+
+namespace media {
+
+// Singleton utility class for mime types.
+class MimeUtil {
+ public:
+ enum Codec {
+ INVALID_CODEC,
+ PCM,
+ MP3,
+ MPEG2_AAC_LC,
+ MPEG2_AAC_MAIN,
+ MPEG2_AAC_SSR,
+ MPEG4_AAC_LC,
+ MPEG4_AAC_SBR_v1,
+ MPEG4_AAC_SBR_PS_v2,
+ VORBIS,
+ OPUS,
+ H264_BASELINE,
+ H264_MAIN,
+ H264_HIGH,
+ VP8,
+ VP9,
+ THEORA
+ };
+
+ bool IsSupportedMediaMimeType(const std::string& mime_type) const;
+
+ bool AreSupportedMediaCodecs(const std::vector<std::string>& codecs) const;
+
+ void ParseCodecString(const std::string& codecs,
+ std::vector<std::string>* codecs_out,
+ bool strip);
+
+ bool IsStrictMediaMimeType(const std::string& mime_type) const;
+ SupportsType IsSupportedStrictMediaMimeType(
+ const std::string& mime_type,
+ const std::vector<std::string>& codecs) const;
+
+ void RemoveProprietaryMediaTypesAndCodecsForTests();
+
+ private:
+ friend struct base::DefaultLazyInstanceTraits<MimeUtil>;
+
+ typedef base::hash_set<int> CodecSet;
+ typedef std::map<std::string, CodecSet> StrictMappings;
+ struct CodecEntry {
+ CodecEntry() : codec(INVALID_CODEC), is_ambiguous(true) {}
+ CodecEntry(Codec c, bool ambiguous) : codec(c), is_ambiguous(ambiguous) {}
+ Codec codec;
+ bool is_ambiguous;
+ };
+ typedef std::map<std::string, CodecEntry> StringToCodecMappings;
+
+ MimeUtil();
+
+ // For faster lookup, keep hash sets.
+ void InitializeMimeTypeMaps();
+
+ // Returns IsSupported if all codec IDs in |codecs| are unambiguous
+ // and are supported by the platform. MayBeSupported is returned if
+ // at least one codec ID in |codecs| is ambiguous but all the codecs
+ // are supported by the platform. IsNotSupported is returned if at
+ // least one codec ID is not supported by the platform.
+ SupportsType AreSupportedCodecs(
+ const CodecSet& supported_codecs,
+ const std::vector<std::string>& codecs) const;
+
+ // Converts a codec ID into an Codec enum value and indicates
+ // whether the conversion was ambiguous.
+ // Returns true if this method was able to map |codec_id| to a specific
+ // Codec enum value. |codec| and |is_ambiguous| are only valid if true
+ // is returned. Otherwise their value is undefined after the call.
+ // |is_ambiguous| is true if |codec_id| did not have enough information to
+ // unambiguously determine the proper Codec enum value. If |is_ambiguous|
+ // is true |codec| contains the best guess for the intended Codec enum value.
+ bool StringToCodec(const std::string& codec_id,
+ Codec* codec,
+ bool* is_ambiguous) const;
+
+ // Returns true if |codec| is supported by the platform.
+ // Note: This method will return false if the platform supports proprietary
+ // codecs but |allow_proprietary_codecs_| is set to false.
+ bool IsCodecSupported(Codec codec) const;
+
+ // Returns true if |codec| refers to a proprietary codec.
+ bool IsCodecProprietary(Codec codec) const;
+
+ // Returns true and sets |*default_codec| if |mime_type| has a default codec
+ // associated with it. Returns false otherwise and the value of
+ // |*default_codec| is undefined.
+ bool GetDefaultCodecLowerCase(const std::string& mime_type_lower_case,
+ Codec* default_codec) const;
+
+ // Returns true if |mime_type_lower_case| has a default codec associated with
+ // it and IsCodecSupported() returns true for that particular codec.
+ bool IsDefaultCodecSupportedLowerCase(
+ const std::string& mime_type_lower_case) const;
+
+ using MimeTypes = base::hash_set<std::string>;
+ MimeTypes media_map_;
+
+ // A map of mime_types and hash map of the supported codecs for the mime_type.
+ StrictMappings strict_format_map_;
+
+ // Keeps track of whether proprietary codec support should be
+ // advertised to callers.
+ bool allow_proprietary_codecs_;
+
+ // Lookup table for string compare based string -> Codec mappings.
+ StringToCodecMappings string_to_codec_map_;
+
+ DISALLOW_COPY_AND_ASSIGN(MimeUtil);
+}; // class MimeUtil
+
+// This variable is Leaky because it is accessed from WorkerPool threads.
+static base::LazyInstance<MimeUtil>::Leaky g_media_mime_util =
+ LAZY_INSTANCE_INITIALIZER;
+
+
+// A list of media types: http://en.wikipedia.org/wiki/Internet_media_type
+// A comprehensive mime type list: http://plugindoc.mozdev.org/winmime.php
+// This set of codecs is supported by all variations of Chromium.
+static const char* const common_media_types[] = {
+ // Ogg.
+ "audio/ogg",
+ "application/ogg",
+#if !defined(OS_ANDROID) // Android doesn't support Ogg Theora.
+ "video/ogg",
+#endif
+
+ // WebM.
+ "video/webm",
+ "audio/webm",
+
+ // Wav.
+ "audio/wav",
+ "audio/x-wav",
+
+#if defined(OS_ANDROID)
+ // HLS.
+ "application/vnd.apple.mpegurl",
+ "application/x-mpegurl",
+#endif
+};
+
+// List of proprietary types only supported by Google Chrome.
+static const char* const proprietary_media_types[] = {
+ // MPEG-4.
+ "video/mp4",
+ "video/x-m4v",
+ "audio/mp4",
+ "audio/x-m4a",
+
+ // MP3.
+ "audio/mp3",
+ "audio/x-mp3",
+ "audio/mpeg",
+ "audio/aac",
+
+#if defined(ENABLE_MPEG2TS_STREAM_PARSER)
+ // MPEG-2 TS.
+ "video/mp2t",
+#endif
+};
+
+#if defined(OS_ANDROID)
+static bool IsCodecSupportedOnAndroid(MimeUtil::Codec codec) {
+ switch (codec) {
+ case MimeUtil::INVALID_CODEC:
+ return false;
+
+ case MimeUtil::PCM:
+ case MimeUtil::MP3:
+ case MimeUtil::MPEG4_AAC_LC:
+ case MimeUtil::MPEG4_AAC_SBR_v1:
+ case MimeUtil::MPEG4_AAC_SBR_PS_v2:
+ case MimeUtil::H264_BASELINE:
+ case MimeUtil::H264_MAIN:
+ case MimeUtil::H264_HIGH:
+ case MimeUtil::VP8:
+ case MimeUtil::VORBIS:
+ return true;
+
+ case MimeUtil::MPEG2_AAC_LC:
+ case MimeUtil::MPEG2_AAC_MAIN:
+ case MimeUtil::MPEG2_AAC_SSR:
+ // MPEG-2 variants of AAC are not supported on Android.
+ return false;
+
+ case MimeUtil::VP9:
+ // VP9 is supported only in KitKat+ (API Level 19).
+ return base::android::BuildInfo::GetInstance()->sdk_int() >= 19;
+
+ case MimeUtil::OPUS:
+ // Opus is supported only in Lollipop+ (API Level 21).
+ return base::android::BuildInfo::GetInstance()->sdk_int() >= 21;
+
+ case MimeUtil::THEORA:
+ return false;
+ }
+
+ return false;
+}
+#endif
+
+struct MediaFormatStrict {
+ const char* const mime_type;
+ const char* const codecs_list;
+};
+
+// Following is the list of RFC 6381 compliant codecs:
+// mp4a.66 - MPEG-2 AAC MAIN
+// mp4a.67 - MPEG-2 AAC LC
+// mp4a.68 - MPEG-2 AAC SSR
+// mp4a.69 - MPEG-2 extension to MPEG-1
+// mp4a.6B - MPEG-1 audio
+// mp4a.40.2 - MPEG-4 AAC LC
+// mp4a.40.02 - MPEG-4 AAC LC (leading 0 in aud-oti for compatibility)
+// mp4a.40.5 - MPEG-4 HE-AAC v1 (AAC LC + SBR)
+// mp4a.40.05 - MPEG-4 HE-AAC v1 (AAC LC + SBR) (leading 0 in aud-oti for
+// compatibility)
+// mp4a.40.29 - MPEG-4 HE-AAC v2 (AAC LC + SBR + PS)
+//
+// avc1.42E0xx - H.264 Baseline
+// avc1.4D40xx - H.264 Main
+// avc1.6400xx - H.264 High
+static const char kMP4AudioCodecsExpression[] =
+ "mp4a.66,mp4a.67,mp4a.68,mp4a.69,mp4a.6B,mp4a.40.2,mp4a.40.02,mp4a.40.5,"
+ "mp4a.40.05,mp4a.40.29";
+static const char kMP4VideoCodecsExpression[] =
+ // This is not a complete list of supported avc1 codecs. It is simply used
+ // to register support for the corresponding Codec enum. Instead of using
+ // strings in these three arrays, we should use the Codec enum values.
+ // This will avoid confusion and unnecessary parsing at runtime.
+ // kUnambiguousCodecStringMap/kAmbiguousCodecStringMap should be the only
+ // mapping from strings to codecs. See crbug.com/461009.
+ "avc1.42E00A,avc1.4D400A,avc1.64000A,"
+ "mp4a.66,mp4a.67,mp4a.68,mp4a.69,mp4a.6B,mp4a.40.2,mp4a.40.02,mp4a.40.5,"
+ "mp4a.40.05,mp4a.40.29";
+
+// These containers are also included in
+// common_media_types/proprietary_media_types. See crbug.com/461012.
+static const MediaFormatStrict format_codec_mappings[] = {
+ {"video/webm", "opus,vorbis,vp8,vp8.0,vp9,vp9.0"},
+ {"audio/webm", "opus,vorbis"},
+ {"audio/wav", "1"},
+ {"audio/x-wav", "1"},
+// Android does not support Opus in Ogg container.
+#if defined(OS_ANDROID)
+ {"video/ogg", "theora,vorbis"},
+ {"audio/ogg", "vorbis"},
+ {"application/ogg", "theora,vorbis"},
+#else
+ {"video/ogg", "opus,theora,vorbis"},
+ {"audio/ogg", "opus,vorbis"},
+ {"application/ogg", "opus,theora,vorbis"},
+#endif
+ {"audio/mpeg", "mp3"},
+ {"audio/mp3", ""},
+ {"audio/x-mp3", ""},
+ {"audio/mp4", kMP4AudioCodecsExpression},
+ {"audio/x-m4a", kMP4AudioCodecsExpression},
+ {"video/mp4", kMP4VideoCodecsExpression},
+ {"video/x-m4v", kMP4VideoCodecsExpression},
+ {"application/x-mpegurl", kMP4VideoCodecsExpression},
+ {"application/vnd.apple.mpegurl", kMP4VideoCodecsExpression}};
+
+struct CodecIDMappings {
+ const char* const codec_id;
+ MimeUtil::Codec codec;
+};
+
+// List of codec IDs that provide enough information to determine the
+// codec and profile being requested.
+//
+// The "mp4a" strings come from RFC 6381.
+static const CodecIDMappings kUnambiguousCodecStringMap[] = {
+ {"1", MimeUtil::PCM}, // We only allow this for WAV so it isn't ambiguous.
+ // avc1/avc3.XXXXXX may be unambiguous; handled by ParseH264CodecID().
+ {"mp3", MimeUtil::MP3},
+ {"mp4a.66", MimeUtil::MPEG2_AAC_MAIN},
+ {"mp4a.67", MimeUtil::MPEG2_AAC_LC},
+ {"mp4a.68", MimeUtil::MPEG2_AAC_SSR},
+ {"mp4a.69", MimeUtil::MP3},
+ {"mp4a.6B", MimeUtil::MP3},
+ {"mp4a.40.2", MimeUtil::MPEG4_AAC_LC},
+ {"mp4a.40.02", MimeUtil::MPEG4_AAC_LC},
+ {"mp4a.40.5", MimeUtil::MPEG4_AAC_SBR_v1},
+ {"mp4a.40.05", MimeUtil::MPEG4_AAC_SBR_v1},
+ {"mp4a.40.29", MimeUtil::MPEG4_AAC_SBR_PS_v2},
+ {"vorbis", MimeUtil::VORBIS},
+ {"opus", MimeUtil::OPUS},
+ {"vp8", MimeUtil::VP8},
+ {"vp8.0", MimeUtil::VP8},
+ {"vp9", MimeUtil::VP9},
+ {"vp9.0", MimeUtil::VP9},
+ {"theora", MimeUtil::THEORA}};
+
+// List of codec IDs that are ambiguous and don't provide
+// enough information to determine the codec and profile.
+// The codec in these entries indicate the codec and profile
+// we assume the user is trying to indicate.
+static const CodecIDMappings kAmbiguousCodecStringMap[] = {
+ {"mp4a.40", MimeUtil::MPEG4_AAC_LC},
+ {"avc1", MimeUtil::H264_BASELINE},
+ {"avc3", MimeUtil::H264_BASELINE},
+ // avc1/avc3.XXXXXX may be ambiguous; handled by ParseH264CodecID().
+};
+
+MimeUtil::MimeUtil() : allow_proprietary_codecs_(false) {
+ InitializeMimeTypeMaps();
+}
+
+SupportsType MimeUtil::AreSupportedCodecs(
+ const CodecSet& supported_codecs,
+ const std::vector<std::string>& codecs) const {
+ DCHECK(!supported_codecs.empty());
+ DCHECK(!codecs.empty());
+
+ SupportsType result = IsSupported;
+ for (size_t i = 0; i < codecs.size(); ++i) {
+ bool is_ambiguous = true;
+ Codec codec = INVALID_CODEC;
+ if (!StringToCodec(codecs[i], &codec, &is_ambiguous))
+ return IsNotSupported;
+
+ if (!IsCodecSupported(codec) ||
+ supported_codecs.find(codec) == supported_codecs.end()) {
+ return IsNotSupported;
+ }
+
+ if (is_ambiguous)
+ result = MayBeSupported;
+ }
+
+ return result;
+}
+
+void MimeUtil::InitializeMimeTypeMaps() {
+ // Initialize the supported media types.
+ for (size_t i = 0; i < arraysize(common_media_types); ++i)
+ media_map_.insert(common_media_types[i]);
+#if defined(USE_PROPRIETARY_CODECS)
+ allow_proprietary_codecs_ = true;
+
+ for (size_t i = 0; i < arraysize(proprietary_media_types); ++i)
+ media_map_.insert(proprietary_media_types[i]);
+#endif
+
+ for (size_t i = 0; i < arraysize(kUnambiguousCodecStringMap); ++i) {
+ string_to_codec_map_[kUnambiguousCodecStringMap[i].codec_id] =
+ CodecEntry(kUnambiguousCodecStringMap[i].codec, false);
+ }
+
+ for (size_t i = 0; i < arraysize(kAmbiguousCodecStringMap); ++i) {
+ string_to_codec_map_[kAmbiguousCodecStringMap[i].codec_id] =
+ CodecEntry(kAmbiguousCodecStringMap[i].codec, true);
+ }
+
+ // Initialize the strict supported media types.
+ for (size_t i = 0; i < arraysize(format_codec_mappings); ++i) {
+ std::vector<std::string> mime_type_codecs;
+ ParseCodecString(format_codec_mappings[i].codecs_list,
+ &mime_type_codecs,
+ false);
+
+ CodecSet codecs;
+ for (size_t j = 0; j < mime_type_codecs.size(); ++j) {
+ Codec codec = INVALID_CODEC;
+ bool is_ambiguous = true;
+ CHECK(StringToCodec(mime_type_codecs[j], &codec, &is_ambiguous));
+ DCHECK(!is_ambiguous);
+ codecs.insert(codec);
+ }
+
+ strict_format_map_[format_codec_mappings[i].mime_type] = codecs;
+ }
+}
+
+bool MimeUtil::IsSupportedMediaMimeType(const std::string& mime_type) const {
+ return media_map_.find(base::StringToLowerASCII(mime_type)) !=
+ media_map_.end();
+}
+
+
+bool MimeUtil::AreSupportedMediaCodecs(
+ const std::vector<std::string>& codecs) const {
+ for (size_t i = 0; i < codecs.size(); ++i) {
+ Codec codec = INVALID_CODEC;
+ bool is_ambiguous = true;
+ if (!StringToCodec(codecs[i], &codec, &is_ambiguous) ||
+ !IsCodecSupported(codec)) {
+ return false;
+ }
+ }
+ return true;
+}
+
+void MimeUtil::ParseCodecString(const std::string& codecs,
+ std::vector<std::string>* codecs_out,
+ bool strip) {
+ std::string no_quote_codecs;
+ base::TrimString(codecs, "\"", &no_quote_codecs);
+ base::SplitString(no_quote_codecs, ',', codecs_out);
+
+ if (!strip)
+ return;
+
+ // Strip everything past the first '.'
+ for (std::vector<std::string>::iterator it = codecs_out->begin();
+ it != codecs_out->end();
+ ++it) {
+ size_t found = it->find_first_of('.');
+ if (found != std::string::npos)
+ it->resize(found);
+ }
+}
+
+bool MimeUtil::IsStrictMediaMimeType(const std::string& mime_type) const {
+ return strict_format_map_.find(base::StringToLowerASCII(mime_type)) !=
+ strict_format_map_.end();
+}
+
+SupportsType MimeUtil::IsSupportedStrictMediaMimeType(
+ const std::string& mime_type,
+ const std::vector<std::string>& codecs) const {
+ const std::string mime_type_lower_case = base::StringToLowerASCII(mime_type);
+ StrictMappings::const_iterator it_strict_map =
+ strict_format_map_.find(mime_type_lower_case);
+ if (it_strict_map == strict_format_map_.end())
+ return codecs.empty() ? MayBeSupported : IsNotSupported;
+
+ if (it_strict_map->second.empty()) {
+ // We get here if the mimetype does not expect a codecs parameter.
+ return (codecs.empty() &&
+ IsDefaultCodecSupportedLowerCase(mime_type_lower_case))
+ ? IsSupported
+ : IsNotSupported;
+ }
+
+ if (codecs.empty()) {
+ // We get here if the mimetype expects to get a codecs parameter,
+ // but didn't get one. If |mime_type_lower_case| does not have a default
+ // codec the best we can do is say "maybe" because we don't have enough
+ // information.
+ Codec default_codec = INVALID_CODEC;
+ if (!GetDefaultCodecLowerCase(mime_type_lower_case, &default_codec))
+ return MayBeSupported;
+
+ return IsCodecSupported(default_codec) ? IsSupported : IsNotSupported;
+ }
+
+ return AreSupportedCodecs(it_strict_map->second, codecs);
+}
+
+void MimeUtil::RemoveProprietaryMediaTypesAndCodecsForTests() {
+ for (size_t i = 0; i < arraysize(proprietary_media_types); ++i)
+ media_map_.erase(proprietary_media_types[i]);
+ allow_proprietary_codecs_ = false;
+}
+
+// Returns true iff |profile_str| conforms to hex string "42y0".
+//
+// |profile_str| is the first four characters of the H.264 suffix string. From
+// ISO-14496-10 7.3.2.1, it consists of:
+// 8 bits: profile_idc; required to be 0x42 here.
+// 1 bit: constraint_set0_flag; ignored here.
+// 1 bit: constraint_set1_flag; ignored here.
+// 1 bit: constraint_set2_flag; ignored here.
+// 1 bit: constraint_set3_flag; ignored here.
+// 4 bits: reserved; required to be 0 here.
+//
+// The spec indicates other ways, not implemented here, that a |profile_str|
+// can indicate a baseline conforming decoder is sufficient for decode in Annex
+// A.2.1: "[profile_idc not necessarily 0x42] with constraint_set0_flag set and
+// in which level_idc and constraint_set3_flag represent a level less than or
+// equal to the specified level."
+static bool IsValidH264BaselineProfile(const std::string& profile_str) {
+ return (profile_str.size() == 4 && profile_str[0] == '4' &&
+ profile_str[1] == '2' && base::IsHexDigit(profile_str[2]) &&
+ profile_str[3] == '0');
+}
+
+static bool IsValidH264Level(const std::string& level_str) {
+ uint32 level;
+ if (level_str.size() != 2 || !base::HexStringToUInt(level_str, &level))
+ return false;
+
+ // Valid levels taken from Table A-1 in ISO-14496-10.
+ // Essentially |level_str| is toHex(10 * level).
+ return ((level >= 10 && level <= 13) ||
+ (level >= 20 && level <= 22) ||
+ (level >= 30 && level <= 32) ||
+ (level >= 40 && level <= 42) ||
+ (level >= 50 && level <= 51));
+}
+
+// Handle parsing H.264 codec IDs as outlined in RFC 6381 and ISO-14496-10.
+// avc1.42y0xx, y >= 8 - H.264 Baseline
+// avc1.4D40xx - H.264 Main
+// avc1.6400xx - H.264 High
+//
+// avc1.xxxxxx & avc3.xxxxxx are considered ambiguous forms that are trying to
+// signal H.264 Baseline. For example, the idc_level, profile_idc and
+// constraint_set3_flag pieces may explicitly require decoder to conform to
+// baseline profile at the specified level (see Annex A and constraint_set0 in
+// ISO-14496-10).
+static bool ParseH264CodecID(const std::string& codec_id,
+ MimeUtil::Codec* codec,
+ bool* is_ambiguous) {
+ // Make sure we have avc1.xxxxxx or avc3.xxxxxx
+ if (codec_id.size() != 11 ||
+ (!base::StartsWithASCII(codec_id, "avc1.", true) &&
+ !base::StartsWithASCII(codec_id, "avc3.", true))) {
+ return false;
+ }
+
+ std::string profile = base::StringToUpperASCII(codec_id.substr(5, 4));
+ if (IsValidH264BaselineProfile(profile)) {
+ *codec = MimeUtil::H264_BASELINE;
+ } else if (profile == "4D40") {
+ *codec = MimeUtil::H264_MAIN;
+ } else if (profile == "6400") {
+ *codec = MimeUtil::H264_HIGH;
+ } else {
+ *codec = MimeUtil::H264_BASELINE;
+ *is_ambiguous = true;
+ return true;
+ }
+
+ *is_ambiguous =
+ !IsValidH264Level(base::StringToUpperASCII(codec_id.substr(9)));
+ return true;
+}
+
+bool MimeUtil::StringToCodec(const std::string& codec_id,
+ Codec* codec,
+ bool* is_ambiguous) const {
+ StringToCodecMappings::const_iterator itr =
+ string_to_codec_map_.find(codec_id);
+ if (itr != string_to_codec_map_.end()) {
+ *codec = itr->second.codec;
+ *is_ambiguous = itr->second.is_ambiguous;
+ return true;
+ }
+
+ // If |codec_id| is not in |string_to_codec_map_|, then we assume that it is
+ // an H.264 codec ID because currently those are the only ones that can't be
+ // stored in the |string_to_codec_map_| and require parsing.
+ return ParseH264CodecID(codec_id, codec, is_ambiguous);
+}
+
+bool MimeUtil::IsCodecSupported(Codec codec) const {
+ DCHECK_NE(codec, INVALID_CODEC);
+
+#if defined(OS_ANDROID)
+ if (!IsCodecSupportedOnAndroid(codec))
+ return false;
+#endif
+
+ return allow_proprietary_codecs_ || !IsCodecProprietary(codec);
+}
+
+bool MimeUtil::IsCodecProprietary(Codec codec) const {
+ switch (codec) {
+ case INVALID_CODEC:
+ case MP3:
+ case MPEG2_AAC_LC:
+ case MPEG2_AAC_MAIN:
+ case MPEG2_AAC_SSR:
+ case MPEG4_AAC_LC:
+ case MPEG4_AAC_SBR_v1:
+ case MPEG4_AAC_SBR_PS_v2:
+ case H264_BASELINE:
+ case H264_MAIN:
+ case H264_HIGH:
+ return true;
+
+ case PCM:
+ case VORBIS:
+ case OPUS:
+ case VP8:
+ case VP9:
+ case THEORA:
+ return false;
+ }
+
+ return true;
+}
+
+bool MimeUtil::GetDefaultCodecLowerCase(const std::string& mime_type_lower_case,
+ Codec* default_codec) const {
+ if (mime_type_lower_case == "audio/mpeg" ||
+ mime_type_lower_case == "audio/mp3" ||
+ mime_type_lower_case == "audio/x-mp3") {
+ *default_codec = MimeUtil::MP3;
+ return true;
+ }
+
+ return false;
+}
+
+bool MimeUtil::IsDefaultCodecSupportedLowerCase(
+ const std::string& mime_type_lower_case) const {
+ Codec default_codec = Codec::INVALID_CODEC;
+ if (!GetDefaultCodecLowerCase(mime_type_lower_case, &default_codec))
+ return false;
+ return IsCodecSupported(default_codec);
+}
+
+bool IsSupportedMediaMimeType(const std::string& mime_type) {
+ return g_media_mime_util.Get().IsSupportedMediaMimeType(mime_type);
+}
+
+bool AreSupportedMediaCodecs(const std::vector<std::string>& codecs) {
+ return g_media_mime_util.Get().AreSupportedMediaCodecs(codecs);
+}
+
+bool IsStrictMediaMimeType(const std::string& mime_type) {
+ return g_media_mime_util.Get().IsStrictMediaMimeType(mime_type);
+}
+
+SupportsType IsSupportedStrictMediaMimeType(
+ const std::string& mime_type,
+ const std::vector<std::string>& codecs) {
+ return g_media_mime_util.Get().IsSupportedStrictMediaMimeType(
+ mime_type, codecs);
+}
+
+void ParseCodecString(const std::string& codecs,
+ std::vector<std::string>* codecs_out,
+ const bool strip) {
+ g_media_mime_util.Get().ParseCodecString(codecs, codecs_out, strip);
+}
+
+void RemoveProprietaryMediaTypesAndCodecsForTests() {
+ g_media_mime_util.Get().RemoveProprietaryMediaTypesAndCodecsForTests();
+}
+
+} // namespace media
diff --git a/chromium/media/base/mime_util.h b/chromium/media/base/mime_util.h
new file mode 100644
index 00000000000..a5c80d56e1b
--- /dev/null
+++ b/chromium/media/base/mime_util.h
@@ -0,0 +1,76 @@
+// Copyright 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BASE_MIME_UTIL_H__
+#define MEDIA_BASE_MIME_UTIL_H__
+
+#include <string>
+#include <vector>
+
+#include "media/base/media_export.h"
+
+namespace media {
+
+// Check to see if a particular MIME type is in the list of
+// supported/recognized MIME types.
+MEDIA_EXPORT bool IsSupportedMediaMimeType(const std::string& mime_type);
+
+// Returns true if and only if all codecs are supported, false otherwise.
+MEDIA_EXPORT bool AreSupportedMediaCodecs(
+ const std::vector<std::string>& codecs);
+
+// Parses a codec string, populating |codecs_out| with the prefix of each codec
+// in the string |codecs_in|. For example, passed "aaa.b.c,dd.eee", if
+// |strip| == true |codecs_out| will contain {"aaa", "dd"}, if |strip| == false
+// |codecs_out| will contain {"aaa.b.c", "dd.eee"}.
+// See http://www.ietf.org/rfc/rfc4281.txt.
+MEDIA_EXPORT void ParseCodecString(const std::string& codecs,
+ std::vector<std::string>* codecs_out,
+ bool strip);
+
+// Check to see if a particular MIME type is in our list which only supports a
+// certain subset of codecs.
+MEDIA_EXPORT bool IsStrictMediaMimeType(const std::string& mime_type);
+
+// Indicates that the MIME type and (possible codec string) are supported by the
+// underlying platform.
+enum SupportsType {
+ // The underlying platform is known not to support the given MIME type and
+ // codec combination.
+ IsNotSupported,
+
+ // The underlying platform is known to support the given MIME type and codec
+ // combination.
+ IsSupported,
+
+ // The underlying platform is unsure whether the given MIME type and codec
+ // combination can be rendered or not before actually trying to play it.
+ MayBeSupported
+};
+
+// Checks the |mime_type| and |codecs| against the MIME types known to support
+// only a particular subset of codecs.
+// * Returns IsSupported if the |mime_type| is supported and all the codecs
+// within the |codecs| are supported for the |mime_type|.
+// * Returns MayBeSupported if the |mime_type| is supported and is known to
+// support only a subset of codecs, but |codecs| was empty. Also returned if
+// all the codecs in |codecs| are supported, but additional codec parameters
+// were supplied (such as profile) for which the support cannot be decided.
+// * Returns IsNotSupported if either the |mime_type| is not supported or the
+// |mime_type| is supported but at least one of the codecs within |codecs| is
+// not supported for the |mime_type|.
+MEDIA_EXPORT SupportsType IsSupportedStrictMediaMimeType(
+ const std::string& mime_type,
+ const std::vector<std::string>& codecs);
+
+// Test only method that removes proprietary media types and codecs from the
+// list of supported MIME types and codecs. These types and codecs must be
+// removed to ensure consistent layout test results across all Chromium
+// variations.
+MEDIA_EXPORT void RemoveProprietaryMediaTypesAndCodecsForTests();
+
+} // namespace media
+
+#endif // MEDIA_BASE_MIME_UTIL_H__
+
diff --git a/chromium/media/base/mime_util_unittest.cc b/chromium/media/base/mime_util_unittest.cc
new file mode 100644
index 00000000000..d1d6722d21f
--- /dev/null
+++ b/chromium/media/base/mime_util_unittest.cc
@@ -0,0 +1,142 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/basictypes.h"
+#include "base/strings/string_split.h"
+#include "build/build_config.h"
+#include "media/base/mime_util.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace media {
+
+TEST(MimeUtilTest, StrictMediaMimeType) {
+ EXPECT_TRUE(IsStrictMediaMimeType("video/webm"));
+ EXPECT_TRUE(IsStrictMediaMimeType("Video/WEBM"));
+ EXPECT_TRUE(IsStrictMediaMimeType("audio/webm"));
+
+ EXPECT_TRUE(IsStrictMediaMimeType("audio/wav"));
+ EXPECT_TRUE(IsStrictMediaMimeType("audio/x-wav"));
+
+ EXPECT_TRUE(IsStrictMediaMimeType("video/ogg"));
+ EXPECT_TRUE(IsStrictMediaMimeType("audio/ogg"));
+ EXPECT_TRUE(IsStrictMediaMimeType("application/ogg"));
+
+ EXPECT_TRUE(IsStrictMediaMimeType("audio/mpeg"));
+ EXPECT_TRUE(IsStrictMediaMimeType("audio/mp3"));
+ EXPECT_TRUE(IsStrictMediaMimeType("audio/x-mp3"));
+
+ EXPECT_TRUE(IsStrictMediaMimeType("video/mp4"));
+ EXPECT_TRUE(IsStrictMediaMimeType("video/x-m4v"));
+ EXPECT_TRUE(IsStrictMediaMimeType("audio/mp4"));
+ EXPECT_TRUE(IsStrictMediaMimeType("audio/x-m4a"));
+
+ EXPECT_TRUE(IsStrictMediaMimeType("application/x-mpegurl"));
+ EXPECT_TRUE(IsStrictMediaMimeType("application/vnd.apple.mpegurl"));
+
+ EXPECT_FALSE(IsStrictMediaMimeType("video/unknown"));
+ EXPECT_FALSE(IsStrictMediaMimeType("Video/UNKNOWN"));
+ EXPECT_FALSE(IsStrictMediaMimeType("audio/unknown"));
+ EXPECT_FALSE(IsStrictMediaMimeType("application/unknown"));
+ EXPECT_FALSE(IsStrictMediaMimeType("unknown/unknown"));
+}
+
+TEST(MimeUtilTest, CommonMediaMimeType) {
+ EXPECT_TRUE(IsSupportedMediaMimeType("audio/webm"));
+ EXPECT_TRUE(IsSupportedMediaMimeType("video/webm"));
+
+ EXPECT_TRUE(IsSupportedMediaMimeType("audio/wav"));
+ EXPECT_TRUE(IsSupportedMediaMimeType("audio/x-wav"));
+
+ EXPECT_TRUE(IsSupportedMediaMimeType("audio/ogg"));
+ EXPECT_TRUE(IsSupportedMediaMimeType("application/ogg"));
+#if defined(OS_ANDROID)
+ EXPECT_FALSE(IsSupportedMediaMimeType("video/ogg"));
+#else
+ EXPECT_TRUE(IsSupportedMediaMimeType("video/ogg"));
+#endif // OS_ANDROID
+
+#if defined(OS_ANDROID)
+ // HLS is supported on Android API level 14 and higher and Chrome supports
+ // API levels 15 and higher, so these are expected to be supported.
+ bool kHlsSupported = true;
+#else
+ bool kHlsSupported = false;
+#endif
+
+ EXPECT_EQ(kHlsSupported, IsSupportedMediaMimeType("application/x-mpegurl"));
+ EXPECT_EQ(kHlsSupported, IsSupportedMediaMimeType("Application/X-MPEGURL"));
+ EXPECT_EQ(kHlsSupported, IsSupportedMediaMimeType(
+ "application/vnd.apple.mpegurl"));
+
+#if defined(USE_PROPRIETARY_CODECS)
+ EXPECT_TRUE(IsSupportedMediaMimeType("audio/mp4"));
+ EXPECT_TRUE(IsSupportedMediaMimeType("audio/x-m4a"));
+ EXPECT_TRUE(IsSupportedMediaMimeType("video/mp4"));
+ EXPECT_TRUE(IsSupportedMediaMimeType("video/x-m4v"));
+
+ EXPECT_TRUE(IsSupportedMediaMimeType("audio/mp3"));
+ EXPECT_TRUE(IsSupportedMediaMimeType("audio/x-mp3"));
+ EXPECT_TRUE(IsSupportedMediaMimeType("audio/mpeg"));
+ EXPECT_TRUE(IsSupportedMediaMimeType("audio/aac"));
+
+#if defined(ENABLE_MPEG2TS_STREAM_PARSER)
+ EXPECT_TRUE(IsSupportedMediaMimeType("video/mp2t"));
+#else
+ EXPECT_FALSE(IsSupportedMediaMimeType("video/mp2t"));
+#endif
+#else
+ EXPECT_FALSE(IsSupportedMediaMimeType("audio/mp4"));
+ EXPECT_FALSE(IsSupportedMediaMimeType("audio/x-m4a"));
+ EXPECT_FALSE(IsSupportedMediaMimeType("video/mp4"));
+ EXPECT_FALSE(IsSupportedMediaMimeType("video/x-m4v"));
+
+ EXPECT_FALSE(IsSupportedMediaMimeType("audio/mp3"));
+ EXPECT_FALSE(IsSupportedMediaMimeType("audio/x-mp3"));
+ EXPECT_FALSE(IsSupportedMediaMimeType("audio/mpeg"));
+ EXPECT_FALSE(IsSupportedMediaMimeType("audio/aac"));
+#endif // USE_PROPRIETARY_CODECS
+ EXPECT_FALSE(IsSupportedMediaMimeType("video/mp3"));
+
+ EXPECT_FALSE(IsSupportedMediaMimeType("video/unknown"));
+ EXPECT_FALSE(IsSupportedMediaMimeType("audio/unknown"));
+ EXPECT_FALSE(IsSupportedMediaMimeType("unknown/unknown"));
+}
+
+// Note: codecs should only be a list of 2 or fewer; hence the restriction of
+// results' length to 2.
+TEST(MimeUtilTest, ParseCodecString) {
+ const struct {
+ const char* const original;
+ size_t expected_size;
+ const char* const results[2];
+ } tests[] = {
+ { "\"bogus\"", 1, { "bogus" } },
+ { "0", 1, { "0" } },
+ { "avc1.42E01E, mp4a.40.2", 2, { "avc1", "mp4a" } },
+ { "\"mp4v.20.240, mp4a.40.2\"", 2, { "mp4v", "mp4a" } },
+ { "mp4v.20.8, samr", 2, { "mp4v", "samr" } },
+ { "\"theora, vorbis\"", 2, { "theora", "vorbis" } },
+ { "", 0, { } },
+ { "\"\"", 0, { } },
+ { "\" \"", 0, { } },
+ { ",", 2, { "", "" } },
+ };
+
+ for (size_t i = 0; i < arraysize(tests); ++i) {
+ std::vector<std::string> codecs_out;
+ ParseCodecString(tests[i].original, &codecs_out, true);
+ ASSERT_EQ(tests[i].expected_size, codecs_out.size());
+ for (size_t j = 0; j < tests[i].expected_size; ++j)
+ EXPECT_EQ(tests[i].results[j], codecs_out[j]);
+ }
+
+ // Test without stripping the codec type.
+ std::vector<std::string> codecs_out;
+ ParseCodecString("avc1.42E01E, mp4a.40.2", &codecs_out, false);
+ ASSERT_EQ(2u, codecs_out.size());
+ EXPECT_EQ("avc1.42E01E", codecs_out[0]);
+ EXPECT_EQ("mp4a.40.2", codecs_out[1]);
+}
+
+} // namespace media
diff --git a/chromium/media/base/mock_audio_renderer_sink.h b/chromium/media/base/mock_audio_renderer_sink.h
index 3a85d528a03..b813257601a 100644
--- a/chromium/media/base/mock_audio_renderer_sink.h
+++ b/chromium/media/base/mock_audio_renderer_sink.h
@@ -5,6 +5,8 @@
#ifndef MEDIA_BASE_MOCK_AUDIO_RENDERER_SINK_H_
#define MEDIA_BASE_MOCK_AUDIO_RENDERER_SINK_H_
+#include <string>
+
#include "media/audio/audio_parameters.h"
#include "media/base/audio_renderer_sink.h"
#include "testing/gmock/include/gmock/gmock.h"
@@ -20,6 +22,12 @@ class MockAudioRendererSink : public AudioRendererSink {
MOCK_METHOD0(Pause, void());
MOCK_METHOD0(Play, void());
MOCK_METHOD1(SetVolume, bool(double volume));
+ MOCK_METHOD0(SwitchOutputDevice, void());
+ void SwitchOutputDevice(const std::string&,
+ const GURL& security_origin,
+ const SwitchOutputDeviceCB& callback) override {
+ SwitchOutputDevice();
+ }
void Initialize(const AudioParameters& params,
RenderCallback* renderer) override;
diff --git a/chromium/media/base/mock_filters.cc b/chromium/media/base/mock_filters.cc
index 0a66c466b9b..f9fd4b513d5 100644
--- a/chromium/media/base/mock_filters.cc
+++ b/chromium/media/base/mock_filters.cc
@@ -18,6 +18,10 @@ MockDemuxer::MockDemuxer() {}
MockDemuxer::~MockDemuxer() {}
+std::string MockDemuxer::GetDisplayName() const {
+ return "MockDemuxer";
+}
+
MockDemuxerStream::MockDemuxerStream(DemuxerStream::Type type)
: type_(type), liveness_(LIVENESS_UNKNOWN) {
}
diff --git a/chromium/media/base/mock_filters.h b/chromium/media/base/mock_filters.h
index 31358758079..99e113b27ee 100644
--- a/chromium/media/base/mock_filters.h
+++ b/chromium/media/base/mock_filters.h
@@ -32,6 +32,7 @@ class MockDemuxer : public Demuxer {
virtual ~MockDemuxer();
// Demuxer implementation.
+ virtual std::string GetDisplayName() const;
MOCK_METHOD3(Initialize,
void(DemuxerHost* host, const PipelineStatusCB& cb, bool));
MOCK_METHOD1(SetPlaybackRate, void(double playback_rate));
@@ -82,10 +83,11 @@ class MockVideoDecoder : public VideoDecoder {
// VideoDecoder implementation.
virtual std::string GetDisplayName() const;
- MOCK_METHOD4(Initialize, void(const VideoDecoderConfig& config,
- bool low_delay,
- const PipelineStatusCB& status_cb,
- const OutputCB& output_cb));
+ MOCK_METHOD4(Initialize,
+ void(const VideoDecoderConfig& config,
+ bool low_delay,
+ const InitCB& init_cb,
+ const OutputCB& output_cb));
MOCK_METHOD2(Decode, void(const scoped_refptr<DecoderBuffer>& buffer,
const DecodeCB&));
MOCK_METHOD1(Reset, void(const base::Closure&));
@@ -104,7 +106,7 @@ class MockAudioDecoder : public AudioDecoder {
virtual std::string GetDisplayName() const;
MOCK_METHOD3(Initialize,
void(const AudioDecoderConfig& config,
- const PipelineStatusCB& status_cb,
+ const InitCB& init_cb,
const OutputCB& output_cb));
MOCK_METHOD2(Decode,
void(const scoped_refptr<DecoderBuffer>& buffer,
diff --git a/chromium/media/base/multi_channel_resampler.cc b/chromium/media/base/multi_channel_resampler.cc
index b0ab88d6820..4ec44d8ede9 100644
--- a/chromium/media/base/multi_channel_resampler.cc
+++ b/chromium/media/base/multi_channel_resampler.cc
@@ -119,5 +119,10 @@ double MultiChannelResampler::BufferedFrames() const {
return resamplers_[0]->BufferedFrames();
}
+void MultiChannelResampler::PrimeWithSilence() {
+ DCHECK(!resamplers_.empty());
+ for (size_t i = 0; i < resamplers_.size(); ++i)
+ resamplers_[i]->PrimeWithSilence();
+}
} // namespace media
diff --git a/chromium/media/base/multi_channel_resampler.h b/chromium/media/base/multi_channel_resampler.h
index 84fa9fe3ac1..690f750bb12 100644
--- a/chromium/media/base/multi_channel_resampler.h
+++ b/chromium/media/base/multi_channel_resampler.h
@@ -54,6 +54,9 @@ class MEDIA_EXPORT MultiChannelResampler {
// See SincResampler::BufferedFrames.
double BufferedFrames() const;
+ // See SincResampler::PrimeWithSilence.
+ void PrimeWithSilence();
+
private:
// SincResampler::ReadCB implementation. ProvideInput() will be called for
// each channel (in channel order) as SincResampler needs more data.
diff --git a/chromium/media/base/null_video_sink.cc b/chromium/media/base/null_video_sink.cc
index 03a834f3361..0d9746d3620 100644
--- a/chromium/media/base/null_video_sink.cc
+++ b/chromium/media/base/null_video_sink.cc
@@ -58,7 +58,7 @@ void NullVideoSink::CallRender() {
current_render_time_, end_of_interval, background_render_);
const bool is_new_frame = new_frame != last_frame_;
last_frame_ = new_frame;
- if (is_new_frame)
+ if (is_new_frame && !new_frame_cb_.is_null())
new_frame_cb_.Run(new_frame);
current_render_time_ += interval_;
diff --git a/chromium/media/base/null_video_sink.h b/chromium/media/base/null_video_sink.h
index 66ff79f59f9..b5a384b5bed 100644
--- a/chromium/media/base/null_video_sink.h
+++ b/chromium/media/base/null_video_sink.h
@@ -9,6 +9,7 @@
#include "base/memory/scoped_ptr.h"
#include "base/time/default_tick_clock.h"
#include "base/time/tick_clock.h"
+#include "media/base/media_export.h"
#include "media/base/video_renderer_sink.h"
namespace base {
@@ -17,7 +18,7 @@ class SingleThreadTaskRunner;
namespace media {
-class NullVideoSink : public VideoRendererSink {
+class MEDIA_EXPORT NullVideoSink : public VideoRendererSink {
public:
using NewFrameCB = base::Callback<void(const scoped_refptr<VideoFrame>&)>;
diff --git a/chromium/media/base/pipeline.cc b/chromium/media/base/pipeline.cc
index 4a397049fc0..387470aec07 100644
--- a/chromium/media/base/pipeline.cc
+++ b/chromium/media/base/pipeline.cc
@@ -46,8 +46,6 @@ Pipeline::Pipeline(
pending_cdm_context_(nullptr),
weak_factory_(this) {
media_log_->AddEvent(media_log_->CreatePipelineStateChangedEvent(kCreated));
- media_log_->AddEvent(
- media_log_->CreateEvent(MediaLogEvent::PIPELINE_CREATED));
}
Pipeline::~Pipeline() {
@@ -56,9 +54,6 @@ Pipeline::~Pipeline() {
DCHECK(!running_) << "Stop() must complete before destroying object";
DCHECK(stop_cb_.is_null());
DCHECK(seek_cb_.is_null());
-
- media_log_->AddEvent(
- media_log_->CreateEvent(MediaLogEvent::PIPELINE_DESTROYED));
}
void Pipeline::Start(Demuxer* demuxer,
diff --git a/chromium/media/base/pipeline_status.h b/chromium/media/base/pipeline_status.h
index 15e5c9dd76c..daec9237988 100644
--- a/chromium/media/base/pipeline_status.h
+++ b/chromium/media/base/pipeline_status.h
@@ -19,7 +19,7 @@ enum PipelineStatus {
PIPELINE_ERROR_URL_NOT_FOUND = 1,
PIPELINE_ERROR_NETWORK = 2,
PIPELINE_ERROR_DECODE = 3,
- PIPELINE_ERROR_DECRYPT = 4,
+ // Deprecated: PIPELINE_ERROR_DECRYPT = 4,
PIPELINE_ERROR_ABORT = 5,
PIPELINE_ERROR_INITIALIZATION_FAILED = 6,
PIPELINE_ERROR_COULD_NOT_RENDER = 8,
diff --git a/chromium/media/base/pipeline_unittest.cc b/chromium/media/base/pipeline_unittest.cc
index d4f9fa0984b..83367b55960 100644
--- a/chromium/media/base/pipeline_unittest.cc
+++ b/chromium/media/base/pipeline_unittest.cc
@@ -91,7 +91,7 @@ class PipelineTest : public ::testing::Test {
};
PipelineTest()
- : pipeline_(new Pipeline(message_loop_.message_loop_proxy(),
+ : pipeline_(new Pipeline(message_loop_.task_runner(),
new MediaLog())),
demuxer_(new StrictMock<MockDemuxer>()),
scoped_renderer_(new StrictMock<MockRenderer>()),
diff --git a/chromium/media/base/renderer.h b/chromium/media/base/renderer.h
index 75372fba3a1..d5b323bb25e 100644
--- a/chromium/media/base/renderer.h
+++ b/chromium/media/base/renderer.h
@@ -20,9 +20,6 @@ class VideoFrame;
class MEDIA_EXPORT Renderer {
public:
- typedef base::Callback<void(const scoped_refptr<VideoFrame>&)> PaintCB;
- typedef base::Callback<base::TimeDelta()> TimeDeltaCB;
-
Renderer();
// Stops rendering and fires any pending callbacks.
diff --git a/chromium/media/base/test_helpers.cc b/chromium/media/base/test_helpers.cc
index bde12b4061a..f4ba9915007 100644
--- a/chromium/media/base/test_helpers.cc
+++ b/chromium/media/base/test_helpers.cc
@@ -27,6 +27,7 @@ class MockCallback : public base::RefCountedThreadSafe<MockCallback> {
public:
MockCallback();
MOCK_METHOD0(Run, void());
+ MOCK_METHOD1(RunWithBool, void(bool));
MOCK_METHOD1(RunWithStatus, void(PipelineStatus));
protected:
@@ -46,6 +47,12 @@ base::Closure NewExpectedClosure() {
return base::Bind(&MockCallback::Run, callback);
}
+base::Callback<void(bool)> NewExpectedBoolCB(bool success) {
+ StrictMock<MockCallback>* callback = new StrictMock<MockCallback>();
+ EXPECT_CALL(*callback, RunWithBool(success));
+ return base::Bind(&MockCallback::RunWithBool, callback);
+}
+
PipelineStatusCB NewExpectedStatusCB(PipelineStatus status) {
StrictMock<MockCallback>* callback = new StrictMock<MockCallback>();
EXPECT_CALL(*callback, RunWithStatus(status));
@@ -217,7 +224,7 @@ static const char kFakeVideoBufferHeader[] = "FakeVideoBufferForTest";
scoped_refptr<DecoderBuffer> CreateFakeVideoBufferForTest(
const VideoDecoderConfig& config,
base::TimeDelta timestamp, base::TimeDelta duration) {
- Pickle pickle;
+ base::Pickle pickle;
pickle.WriteString(kFakeVideoBufferHeader);
pickle.WriteInt(config.coded_size().width());
pickle.WriteInt(config.coded_size().height());
@@ -237,8 +244,8 @@ bool VerifyFakeVideoBufferForTest(
const scoped_refptr<DecoderBuffer>& buffer,
const VideoDecoderConfig& config) {
// Check if the input |buffer| matches the |config|.
- PickleIterator pickle(Pickle(reinterpret_cast<const char*>(buffer->data()),
- buffer->data_size()));
+ base::PickleIterator pickle(base::Pickle(
+ reinterpret_cast<const char*>(buffer->data()), buffer->data_size()));
std::string header;
int width = 0;
int height = 0;
diff --git a/chromium/media/base/test_helpers.h b/chromium/media/base/test_helpers.h
index 712812fc4f6..05ba898994d 100644
--- a/chromium/media/base/test_helpers.h
+++ b/chromium/media/base/test_helpers.h
@@ -28,6 +28,7 @@ class DecoderBuffer;
// Return a callback that expects to be run once.
base::Closure NewExpectedClosure();
+base::Callback<void(bool)> NewExpectedBoolCB(bool success);
PipelineStatusCB NewExpectedStatusCB(PipelineStatus status);
// Helper class for running a message loop until a callback has run. Useful for
diff --git a/chromium/media/base/text_renderer_unittest.cc b/chromium/media/base/text_renderer_unittest.cc
index 3b9e95ae01c..56074ee8995 100644
--- a/chromium/media/base/text_renderer_unittest.cc
+++ b/chromium/media/base/text_renderer_unittest.cc
@@ -54,7 +54,7 @@ class TextRendererTest : public testing::Test {
DCHECK(!text_renderer_);
text_renderer_.reset(
- new TextRenderer(message_loop_.message_loop_proxy(),
+ new TextRenderer(message_loop_.task_runner(),
base::Bind(&TextRendererTest::OnAddTextTrack,
base::Unretained(this))));
text_renderer_->Initialize(base::Bind(&TextRendererTest::OnEnd,
@@ -214,7 +214,7 @@ class TextRendererTest : public testing::Test {
TEST_F(TextRendererTest, CreateTextRendererNoInit) {
text_renderer_.reset(
- new TextRenderer(message_loop_.message_loop_proxy(),
+ new TextRenderer(message_loop_.task_runner(),
base::Bind(&TextRendererTest::OnAddTextTrack,
base::Unretained(this))));
text_renderer_.reset();
diff --git a/chromium/media/base/user_input_monitor.h b/chromium/media/base/user_input_monitor.h
index ab572694ed3..8016a009061 100644
--- a/chromium/media/base/user_input_monitor.h
+++ b/chromium/media/base/user_input_monitor.h
@@ -33,7 +33,7 @@ class MEDIA_EXPORT UserInputMonitor {
protected:
virtual ~MouseEventListener() {}
};
- typedef ObserverListThreadSafe<UserInputMonitor::MouseEventListener>
+ typedef base::ObserverListThreadSafe<UserInputMonitor::MouseEventListener>
MouseListenerList;
UserInputMonitor();
diff --git a/chromium/media/base/user_input_monitor_linux.cc b/chromium/media/base/user_input_monitor_linux.cc
index 55675ecb8c9..2be9b18e1b9 100644
--- a/chromium/media/base/user_input_monitor_linux.cc
+++ b/chromium/media/base/user_input_monitor_linux.cc
@@ -68,8 +68,8 @@ class UserInputMonitorLinuxCore
static void ProcessReply(XPointer self, XRecordInterceptData* data);
scoped_refptr<base::SingleThreadTaskRunner> io_task_runner_;
- scoped_refptr<ObserverListThreadSafe<UserInputMonitor::MouseEventListener> >
- mouse_listeners_;
+ scoped_refptr<base::ObserverListThreadSafe<
+ UserInputMonitor::MouseEventListener>> mouse_listeners_;
//
// The following members should only be accessed on the IO thread.
diff --git a/chromium/media/base/user_input_monitor_unittest.cc b/chromium/media/base/user_input_monitor_unittest.cc
index 55e6cca8076..b172048a50d 100644
--- a/chromium/media/base/user_input_monitor_unittest.cc
+++ b/chromium/media/base/user_input_monitor_unittest.cc
@@ -54,7 +54,7 @@ TEST(UserInputMonitorTest, CreatePlatformSpecific) {
base::RunLoop run_loop;
scoped_ptr<UserInputMonitor> monitor = UserInputMonitor::Create(
- message_loop.message_loop_proxy(), message_loop.message_loop_proxy());
+ message_loop.task_runner(), message_loop.task_runner());
if (!monitor)
return;
diff --git a/chromium/media/base/user_input_monitor_win.cc b/chromium/media/base/user_input_monitor_win.cc
index 62102367f7e..3e172c418d8 100644
--- a/chromium/media/base/user_input_monitor_win.cc
+++ b/chromium/media/base/user_input_monitor_win.cc
@@ -60,8 +60,8 @@ class UserInputMonitorWinCore
// Task runner on which |window_| is created.
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner_;
- scoped_refptr<ObserverListThreadSafe<UserInputMonitor::MouseEventListener> >
- mouse_listeners_;
+ scoped_refptr<base::ObserverListThreadSafe<
+ UserInputMonitor::MouseEventListener>> mouse_listeners_;
// These members are only accessed on the UI thread.
scoped_ptr<base::win::MessageWindow> window_;
diff --git a/chromium/media/base/video_capture_types.cc b/chromium/media/base/video_capture_types.cc
index f8463cfa5d6..1796bf2f64c 100644
--- a/chromium/media/base/video_capture_types.cc
+++ b/chromium/media/base/video_capture_types.cc
@@ -11,14 +11,29 @@
namespace media {
VideoCaptureFormat::VideoCaptureFormat()
- : frame_rate(0.0f), pixel_format(PIXEL_FORMAT_UNKNOWN) {}
+ : frame_rate(0.0f),
+ pixel_format(PIXEL_FORMAT_UNKNOWN),
+ pixel_storage(PIXEL_STORAGE_CPU) {
+}
VideoCaptureFormat::VideoCaptureFormat(const gfx::Size& frame_size,
float frame_rate,
VideoPixelFormat pixel_format)
: frame_size(frame_size),
frame_rate(frame_rate),
- pixel_format(pixel_format) {}
+ pixel_format(pixel_format),
+ pixel_storage(PIXEL_STORAGE_CPU) {
+}
+
+VideoCaptureFormat::VideoCaptureFormat(const gfx::Size& frame_size,
+ float frame_rate,
+ VideoPixelFormat pixel_format,
+ VideoPixelStorage pixel_storage)
+ : frame_size(frame_size),
+ frame_rate(frame_rate),
+ pixel_format(pixel_format),
+ pixel_storage(pixel_storage) {
+}
bool VideoCaptureFormat::IsValid() const {
return (frame_size.width() < media::limits::kMaxDimension) &&
@@ -27,8 +42,8 @@ bool VideoCaptureFormat::IsValid() const {
(frame_size.GetArea() < media::limits::kMaxCanvas) &&
(frame_rate >= 0.0f) &&
(frame_rate < media::limits::kMaxFramesPerSecond) &&
- (pixel_format >= 0) &&
- (pixel_format < PIXEL_FORMAT_MAX);
+ (pixel_storage != PIXEL_STORAGE_TEXTURE ||
+ pixel_format == PIXEL_FORMAT_ARGB);
}
size_t VideoCaptureFormat::ImageAllocationSize() const {
@@ -49,13 +64,9 @@ size_t VideoCaptureFormat::ImageAllocationSize() const {
break;
case PIXEL_FORMAT_RGB32:
case PIXEL_FORMAT_ARGB:
- // GpuMemoryBuffer is an endianness-agnostic 32bpp pixel format until
- // http://crbug.com/439520 is closed.
- case PIXEL_FORMAT_GPUMEMORYBUFFER:
result_frame_size *= 4;
break;
case PIXEL_FORMAT_MJPEG:
- case PIXEL_FORMAT_TEXTURE:
result_frame_size = 0;
break;
default: // Sizes for the rest of the formats are unknown.
@@ -65,49 +76,64 @@ size_t VideoCaptureFormat::ImageAllocationSize() const {
return result_frame_size;
}
-std::string VideoCaptureFormat::ToString() const {
- return base::StringPrintf("resolution: %s, fps: %.3f, pixel format: %s",
- frame_size.ToString().c_str(),
- frame_rate,
- PixelFormatToString(pixel_format).c_str());
+//static
+std::string VideoCaptureFormat::ToString(const VideoCaptureFormat& format) {
+ return base::StringPrintf("(%s)@%.3ffps, pixel format: %s storage: %s.",
+ format.frame_size.ToString().c_str(),
+ format.frame_rate,
+ PixelFormatToString(format.pixel_format).c_str(),
+ PixelStorageToString(format.pixel_storage).c_str());
}
+// static
std::string VideoCaptureFormat::PixelFormatToString(VideoPixelFormat format) {
switch (format) {
- case PIXEL_FORMAT_UNKNOWN:
- return "UNKNOWN";
- case PIXEL_FORMAT_I420:
- return "I420";
- case PIXEL_FORMAT_YUY2:
- return "YUY2";
- case PIXEL_FORMAT_UYVY:
- return "UYVY";
- case PIXEL_FORMAT_RGB24:
- return "RGB24";
- case PIXEL_FORMAT_RGB32:
- return "RGB32";
- case PIXEL_FORMAT_ARGB:
- return "ARGB";
- case PIXEL_FORMAT_MJPEG:
- return "MJPEG";
- case PIXEL_FORMAT_NV12:
- return "NV12";
- case PIXEL_FORMAT_NV21:
- return "NV21";
- case PIXEL_FORMAT_YV12:
- return "YV12";
- case PIXEL_FORMAT_TEXTURE:
- return "TEXTURE";
- case PIXEL_FORMAT_GPUMEMORYBUFFER:
- return "GPUMEMORYBUFFER";
- case PIXEL_FORMAT_MAX:
- break;
+ case PIXEL_FORMAT_UNKNOWN:
+ return "UNKNOWN";
+ case PIXEL_FORMAT_I420:
+ return "I420";
+ case PIXEL_FORMAT_YUY2:
+ return "YUY2";
+ case PIXEL_FORMAT_UYVY:
+ return "UYVY";
+ case PIXEL_FORMAT_RGB24:
+ return "RGB24";
+ case PIXEL_FORMAT_RGB32:
+ return "RGB32";
+ case PIXEL_FORMAT_ARGB:
+ return "ARGB";
+ case PIXEL_FORMAT_MJPEG:
+ return "MJPEG";
+ case PIXEL_FORMAT_NV12:
+ return "NV12";
+ case PIXEL_FORMAT_NV21:
+ return "NV21";
+ case PIXEL_FORMAT_YV12:
+ return "YV12";
}
NOTREACHED() << "Invalid VideoPixelFormat provided: " << format;
- return "";
+ return std::string();
+}
+
+// static
+std::string VideoCaptureFormat::PixelStorageToString(
+ VideoPixelStorage storage) {
+ switch (storage) {
+ case PIXEL_STORAGE_CPU:
+ return "CPU";
+ case PIXEL_STORAGE_TEXTURE:
+ return "TEXTURE";
+ case PIXEL_STORAGE_GPUMEMORYBUFFER:
+ return "GPUMEMORYBUFFER";
+ }
+ NOTREACHED() << "Invalid VideoPixelStorage provided: "
+ << static_cast<int>(storage);
+ return std::string();
}
VideoCaptureParams::VideoCaptureParams()
- : resolution_change_policy(RESOLUTION_POLICY_FIXED_RESOLUTION) {}
+ : resolution_change_policy(RESOLUTION_POLICY_FIXED_RESOLUTION),
+ use_gpu_memory_buffers(false) {
+}
} // namespace media
diff --git a/chromium/media/base/video_capture_types.h b/chromium/media/base/video_capture_types.h
index c849ed7b35f..2a8c243f27f 100644
--- a/chromium/media/base/video_capture_types.h
+++ b/chromium/media/base/video_capture_types.h
@@ -7,6 +7,7 @@
#include <vector>
+#include "build/build_config.h"
#include "media/base/media_export.h"
#include "ui/gfx/geometry/size.h"
@@ -17,6 +18,8 @@ namespace media {
typedef int VideoCaptureSessionId;
// Color formats from camera. This list is sorted in order of preference.
+// TODO(mcasas): Consider if this list can be merged with media::Format.
+// TODO(mcasas): http://crbug.com/504160 Consider making this an enum class.
enum VideoPixelFormat {
PIXEL_FORMAT_I420,
PIXEL_FORMAT_YV12,
@@ -28,10 +31,19 @@ enum VideoPixelFormat {
PIXEL_FORMAT_RGB32,
PIXEL_FORMAT_ARGB,
PIXEL_FORMAT_MJPEG,
- PIXEL_FORMAT_TEXTURE, // Capture format as a GL texture.
- PIXEL_FORMAT_GPUMEMORYBUFFER,
PIXEL_FORMAT_UNKNOWN, // Color format not set.
- PIXEL_FORMAT_MAX,
+ PIXEL_FORMAT_MAX = PIXEL_FORMAT_UNKNOWN,
+};
+
+// Storage type for the pixels. In principle, all combinations of Storage and
+// Format are possible, though some are very typical, such as texture + ARGB,
+// and others are only available if the platform allows it e.g. GpuMemoryBuffer.
+// TODO(mcasas): http://crbug.com/504160 Consider making this an enum class.
+enum VideoPixelStorage {
+ PIXEL_STORAGE_CPU,
+ PIXEL_STORAGE_TEXTURE,
+ PIXEL_STORAGE_GPUMEMORYBUFFER,
+ PIXEL_STORAGE_MAX = PIXEL_STORAGE_GPUMEMORYBUFFER,
};
// Policies for capture devices that have source content that varies in size.
@@ -53,7 +65,8 @@ enum ResolutionChangePolicy {
// exceeding the maximum dimensions specified.
RESOLUTION_POLICY_ANY_WITHIN_LIMIT,
- RESOLUTION_POLICY_LAST,
+ // Must always be equal to largest entry in the enum.
+ RESOLUTION_POLICY_LAST = RESOLUTION_POLICY_ANY_WITHIN_LIMIT,
};
// Some drivers use rational time per frame instead of float frame rate, this
@@ -64,15 +77,19 @@ const int kFrameRatePrecision = 10000;
// This class is used by the video capture device to specify the format of every
// frame captured and returned to a client. It is also used to specify a
// supported capture format by a device.
-class MEDIA_EXPORT VideoCaptureFormat {
- public:
+struct MEDIA_EXPORT VideoCaptureFormat {
VideoCaptureFormat();
VideoCaptureFormat(const gfx::Size& frame_size,
float frame_rate,
VideoPixelFormat pixel_format);
+ VideoCaptureFormat(const gfx::Size& frame_size,
+ float frame_rate,
+ VideoPixelFormat pixel_format,
+ VideoPixelStorage pixel_storage);
- std::string ToString() const;
+ static std::string ToString(const VideoCaptureFormat& format);
static std::string PixelFormatToString(VideoPixelFormat format);
+ static std::string PixelStorageToString(VideoPixelStorage storage);
// Returns the required buffer size to hold an image of a given
// VideoCaptureFormat with no padding and tightly packed.
@@ -91,6 +108,7 @@ class MEDIA_EXPORT VideoCaptureFormat {
gfx::Size frame_size;
float frame_rate;
VideoPixelFormat pixel_format;
+ VideoPixelStorage pixel_storage;
};
typedef std::vector<VideoCaptureFormat> VideoCaptureFormats;
@@ -99,13 +117,13 @@ typedef std::vector<VideoCaptureFormat> VideoCaptureFormats;
// This class is used by the client of a video capture device to specify the
// format of frames in which the client would like to have captured frames
// returned.
-class MEDIA_EXPORT VideoCaptureParams {
- public:
+struct MEDIA_EXPORT VideoCaptureParams {
VideoCaptureParams();
bool operator==(const VideoCaptureParams& other) const {
return requested_format == other.requested_format &&
- resolution_change_policy == other.resolution_change_policy;
+ use_gpu_memory_buffers == other.use_gpu_memory_buffers &&
+ resolution_change_policy == other.resolution_change_policy;
}
// Requests a resolution and format at which the capture will occur.
@@ -113,6 +131,9 @@ class MEDIA_EXPORT VideoCaptureParams {
// Policy for resolution change.
ResolutionChangePolicy resolution_change_policy;
+
+ // Indication to the Driver to try to use GpuMemoryBuffers.
+ bool use_gpu_memory_buffers;
};
} // namespace media
diff --git a/chromium/media/base/video_decoder.h b/chromium/media/base/video_decoder.h
index c49e0dcdbd2..f7bae3cce46 100644
--- a/chromium/media/base/video_decoder.h
+++ b/chromium/media/base/video_decoder.h
@@ -25,12 +25,14 @@ class MEDIA_EXPORT VideoDecoder {
// TODO(rileya): Now that both AudioDecoder and VideoDecoder Status enums
// match, break them into a decoder_status.h.
enum Status {
- kOk, // Everything went as planned.
- kAborted, // Decode was aborted as a result of Reset() being called.
- kDecodeError, // Decoding error happened.
- kDecryptError // Decrypting error happened.
+ kOk, // Everything went as planned.
+ kAborted, // Decode was aborted as a result of Reset() being called.
+ kDecodeError // Decoding error happened.
};
+ // Callback for VideoDecoder initialization.
+ typedef base::Callback<void(bool success)> InitCB;
+
// Callback for VideoDecoder to return a decoded frame whenever it becomes
// available. Only non-EOS frames should be returned via this callback.
typedef base::Callback<void(const scoped_refptr<VideoFrame>&)> OutputCB;
@@ -52,7 +54,7 @@ class MEDIA_EXPORT VideoDecoder {
virtual std::string GetDisplayName() const = 0;
// Initializes a VideoDecoder with the given |config|, executing the
- // |status_cb| upon completion. |output_cb| is called for each output frame
+ // |init_cb| upon completion. |output_cb| is called for each output frame
// decoded by Decode().
//
// If |low_delay| is true then the decoder is not allowed to queue frames,
@@ -65,10 +67,10 @@ class MEDIA_EXPORT VideoDecoder {
// 1) The VideoDecoder will be reinitialized if it was initialized before.
// Upon reinitialization, all internal buffered frames will be dropped.
// 2) This method should not be called during pending decode or reset.
- // 3) No VideoDecoder calls should be made before |status_cb| is executed.
+ // 3) No VideoDecoder calls should be made before |init_cb| is executed.
virtual void Initialize(const VideoDecoderConfig& config,
bool low_delay,
- const PipelineStatusCB& status_cb,
+ const InitCB& init_cb,
const OutputCB& output_cb) = 0;
// Requests a |buffer| to be decoded. The status of the decoder and decoded
diff --git a/chromium/media/base/video_decoder_config.cc b/chromium/media/base/video_decoder_config.cc
index 27fe9f59b9f..8825e2d615d 100644
--- a/chromium/media/base/video_decoder_config.cc
+++ b/chromium/media/base/video_decoder_config.cc
@@ -25,8 +25,9 @@ VideoDecoderConfig::VideoDecoderConfig(VideoCodec codec,
const uint8* extra_data,
size_t extra_data_size,
bool is_encrypted) {
- Initialize(codec, profile, format, coded_size, visible_rect, natural_size,
- extra_data, extra_data_size, is_encrypted, true);
+ Initialize(codec, profile, format, VideoFrame::COLOR_SPACE_UNSPECIFIED,
+ coded_size, visible_rect, natural_size, extra_data,
+ extra_data_size, is_encrypted, true);
}
VideoDecoderConfig::~VideoDecoderConfig() {}
@@ -56,6 +57,7 @@ static void UmaHistogramAspectRatio(const char* name, const T& size) {
void VideoDecoderConfig::Initialize(VideoCodec codec,
VideoCodecProfile profile,
VideoFrame::Format format,
+ VideoFrame::ColorSpace color_space,
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
@@ -76,8 +78,10 @@ void VideoDecoderConfig::Initialize(VideoCodec codec,
UmaHistogramAspectRatio("Media.VideoCodedAspectRatio", coded_size);
UMA_HISTOGRAM_COUNTS_10000("Media.VideoVisibleWidth", visible_rect.width());
UmaHistogramAspectRatio("Media.VideoVisibleAspectRatio", visible_rect);
- UMA_HISTOGRAM_ENUMERATION(
- "Media.VideoPixelFormat", format, VideoFrame::FORMAT_MAX + 1);
+ UMA_HISTOGRAM_ENUMERATION("Media.VideoFramePixelFormat", format,
+ VideoFrame::FORMAT_MAX + 1);
+ UMA_HISTOGRAM_ENUMERATION("Media.VideoFrameColorSpace", color_space,
+ VideoFrame::COLOR_SPACE_MAX + 1);
}
codec_ = codec;
@@ -94,8 +98,8 @@ bool VideoDecoderConfig::IsValidConfig() const {
return codec_ != kUnknownVideoCodec &&
natural_size_.width() > 0 &&
natural_size_.height() > 0 &&
- VideoFrame::IsValidConfig(format_, coded_size_, visible_rect_,
- natural_size_);
+ VideoFrame::IsValidConfig(format_, VideoFrame::STORAGE_UNOWNED_MEMORY,
+ coded_size_, visible_rect_, natural_size_);
}
bool VideoDecoderConfig::Matches(const VideoDecoderConfig& config) const {
diff --git a/chromium/media/base/video_decoder_config.h b/chromium/media/base/video_decoder_config.h
index 356b467f640..00de84e2fb5 100644
--- a/chromium/media/base/video_decoder_config.h
+++ b/chromium/media/base/video_decoder_config.h
@@ -90,6 +90,7 @@ class MEDIA_EXPORT VideoDecoderConfig {
void Initialize(VideoCodec codec,
VideoCodecProfile profile,
VideoFrame::Format format,
+ VideoFrame::ColorSpace color_space,
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
diff --git a/chromium/media/base/video_frame.cc b/chromium/media/base/video_frame.cc
index cae98705170..1d36743d49a 100644
--- a/chromium/media/base/video_frame.cc
+++ b/chromium/media/base/video_frame.cc
@@ -31,10 +31,31 @@ static inline size_t RoundDown(size_t value, size_t alignment) {
return value & ~(alignment - 1);
}
+// Returns true if |plane| is a valid plane index for the given |format|.
+static bool IsValidPlane(size_t plane, VideoFrame::Format format) {
+ DCHECK_LE(VideoFrame::NumPlanes(format),
+ static_cast<size_t>(VideoFrame::kMaxPlanes));
+ return (plane < VideoFrame::NumPlanes(format));
+}
+
+// Returns true if |frame| is accesible mapped in the VideoFrame memory space.
+// static
+static bool IsStorageTypeMappable(VideoFrame::StorageType storage_type) {
+ return
+#if defined(OS_LINUX)
+ // This is not strictly needed but makes explicit that, at VideoFrame
+ // level, DmaBufs are not mappable from userspace.
+ storage_type != VideoFrame::STORAGE_DMABUFS &&
+#endif
+ (storage_type == VideoFrame::STORAGE_UNOWNED_MEMORY ||
+ storage_type == VideoFrame::STORAGE_OWNED_MEMORY ||
+ storage_type == VideoFrame::STORAGE_SHMEM);
+}
+
// Returns the pixel size per element for given |plane| and |format|. E.g. 2x2
// for the U-plane in I420.
static gfx::Size SampleSize(VideoFrame::Format format, size_t plane) {
- DCHECK(VideoFrame::IsValidPlane(plane, format));
+ DCHECK(IsValidPlane(plane, format));
switch (plane) {
case VideoFrame::kYPlane:
@@ -51,19 +72,16 @@ static gfx::Size SampleSize(VideoFrame::Format format, size_t plane) {
return gfx::Size(2, 1);
case VideoFrame::YV12:
- case VideoFrame::YV12J:
- case VideoFrame::YV12HD:
case VideoFrame::I420:
case VideoFrame::YV12A:
+#if defined(OS_MACOSX) || defined(OS_CHROMEOS)
case VideoFrame::NV12:
+#endif
return gfx::Size(2, 2);
case VideoFrame::UNKNOWN:
-#if defined(VIDEO_HOLE)
- case VideoFrame::HOLE:
-#endif // defined(VIDEO_HOLE)
- case VideoFrame::NATIVE_TEXTURE:
case VideoFrame::ARGB:
+ case VideoFrame::XRGB:
break;
}
}
@@ -84,15 +102,16 @@ static gfx::Size CommonAlignment(VideoFrame::Format format) {
return gfx::Size(max_sample_width, max_sample_height);
}
-// Returns the number of bytes per element for given |plane| and |format|. E.g.
-// 2 for the UV plane in NV12.
+// Returns the number of bytes per element for given |plane| and |format|.
static int BytesPerElement(VideoFrame::Format format, size_t plane) {
- DCHECK(VideoFrame::IsValidPlane(plane, format));
- if (format == VideoFrame::ARGB)
+ DCHECK(IsValidPlane(plane, format));
+ if (format == VideoFrame::ARGB || format == VideoFrame::XRGB)
return 4;
+#if defined(OS_MACOSX) || defined(OS_CHROMEOS)
if (format == VideoFrame::NV12 && plane == VideoFrame::kUVPlane)
return 2;
+#endif
return 1;
}
@@ -106,83 +125,57 @@ static gfx::Size AdjustCodedSize(VideoFrame::Format format,
}
// static
-scoped_refptr<VideoFrame> VideoFrame::CreateFrame(
- VideoFrame::Format format,
- const gfx::Size& coded_size,
- const gfx::Rect& visible_rect,
- const gfx::Size& natural_size,
- base::TimeDelta timestamp) {
+std::string VideoFrame::FormatToString(Format format) {
switch (format) {
- case VideoFrame::YV12:
- case VideoFrame::YV16:
- case VideoFrame::I420:
- case VideoFrame::YV12A:
- case VideoFrame::YV12J:
- case VideoFrame::YV24:
- case VideoFrame::YV12HD:
- break;
-
- case VideoFrame::UNKNOWN:
- case VideoFrame::NV12:
- case VideoFrame::NATIVE_TEXTURE:
-#if defined(VIDEO_HOLE)
- case VideoFrame::HOLE:
-#endif // defined(VIDEO_HOLE)
- case VideoFrame::ARGB:
- NOTIMPLEMENTED();
- return nullptr;
- }
-
- // Since we're creating a new YUV frame (and allocating memory for it
- // ourselves), we can pad the requested |coded_size| if necessary if the
- // request does not line up on sample boundaries.
- const gfx::Size new_coded_size = AdjustCodedSize(format, coded_size);
- DCHECK(IsValidConfig(format, new_coded_size, visible_rect, natural_size));
-
- gpu::MailboxHolder mailboxes[kMaxPlanes];
- scoped_refptr<VideoFrame> frame(
- new VideoFrame(format, new_coded_size, visible_rect, natural_size,
- mailboxes, TEXTURE_RGBA, timestamp, false));
- frame->AllocateYUV();
- return frame;
-}
-
-// static
-std::string VideoFrame::FormatToString(VideoFrame::Format format) {
- switch (format) {
- case VideoFrame::UNKNOWN:
+ case UNKNOWN:
return "UNKNOWN";
- case VideoFrame::YV12:
+ case YV12:
return "YV12";
- case VideoFrame::YV16:
+ case YV16:
return "YV16";
- case VideoFrame::I420:
+ case I420:
return "I420";
- case VideoFrame::NATIVE_TEXTURE:
- return "NATIVE_TEXTURE";
-#if defined(VIDEO_HOLE)
- case VideoFrame::HOLE:
- return "HOLE";
-#endif // defined(VIDEO_HOLE)
- case VideoFrame::YV12A:
+ case YV12A:
return "YV12A";
- case VideoFrame::YV12J:
- return "YV12J";
- case VideoFrame::NV12:
- return "NV12";
- case VideoFrame::YV24:
+ case YV24:
return "YV24";
- case VideoFrame::ARGB:
+ case ARGB:
return "ARGB";
- case VideoFrame::YV12HD:
- return "YV12HD";
+ case XRGB:
+ return "XRGB";
+#if defined(OS_MACOSX) || defined(OS_CHROMEOS)
+ case NV12:
+ return "NV12";
+#endif
}
- NOTREACHED() << "Invalid videoframe format provided: " << format;
+ NOTREACHED() << "Invalid VideoFrame format provided: " << format;
return "";
}
// static
-bool VideoFrame::IsValidConfig(VideoFrame::Format format,
+bool VideoFrame::IsYuvPlanar(Format format) {
+ switch (format) {
+ case YV12:
+ case I420:
+ case YV16:
+ case YV12A:
+ case YV24:
+#if defined(OS_MACOSX) || defined(OS_CHROMEOS)
+ case NV12:
+#endif
+ return true;
+
+ case UNKNOWN:
+ case ARGB:
+ case XRGB:
+ return false;
+ }
+ return false;
+}
+
+// static
+bool VideoFrame::IsValidConfig(Format format,
+ StorageType storage_type,
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size) {
@@ -198,29 +191,26 @@ bool VideoFrame::IsValidConfig(VideoFrame::Format format,
natural_size.height() > limits::kMaxDimension)
return false;
+ // TODO(mcasas): Remove parameter |storage_type| when the opaque storage types
+ // comply with the checks below. Right now we skip them.
+ if (!IsStorageTypeMappable(storage_type))
+ return true;
+
// Check format-specific width/height requirements.
switch (format) {
- case VideoFrame::UNKNOWN:
+ case UNKNOWN:
return (coded_size.IsEmpty() && visible_rect.IsEmpty() &&
natural_size.IsEmpty());
-
- // NATIVE_TEXTURE and HOLE have no software-allocated buffers and are
- // allowed to skip the below check.
- case VideoFrame::NATIVE_TEXTURE:
-#if defined(VIDEO_HOLE)
- case VideoFrame::HOLE:
-#endif // defined(VIDEO_HOLE)
- return true;
-
- case VideoFrame::YV24:
- case VideoFrame::YV12:
- case VideoFrame::YV12J:
- case VideoFrame::I420:
- case VideoFrame::YV12A:
- case VideoFrame::NV12:
- case VideoFrame::YV12HD:
- case VideoFrame::YV16:
- case VideoFrame::ARGB:
+ case YV24:
+ case YV12:
+ case I420:
+ case YV12A:
+ case YV16:
+ case ARGB:
+ case XRGB:
+#if defined(OS_MACOSX) || defined(OS_CHROMEOS)
+ case NV12:
+#endif
// Check that software-allocated buffer formats are aligned correctly and
// not empty.
const gfx::Size alignment = CommonAlignment(format);
@@ -232,29 +222,56 @@ bool VideoFrame::IsValidConfig(VideoFrame::Format format,
!natural_size.IsEmpty();
}
+ // TODO(mcasas): Check that storage type and underlying mailboxes/dataptr are
+ // matching.
NOTREACHED();
return false;
}
// static
+scoped_refptr<VideoFrame> VideoFrame::CreateFrame(Format format,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ base::TimeDelta timestamp) {
+ if (!IsYuvPlanar(format)) {
+ NOTIMPLEMENTED();
+ return nullptr;
+ }
+
+ // Since we're creating a new YUV frame (and allocating memory for it
+ // ourselves), we can pad the requested |coded_size| if necessary if the
+ // request does not line up on sample boundaries.
+ const gfx::Size new_coded_size = AdjustCodedSize(format, coded_size);
+ DCHECK(IsValidConfig(format, STORAGE_OWNED_MEMORY, new_coded_size,
+ visible_rect, natural_size));
+
+ scoped_refptr<VideoFrame> frame(new VideoFrame(format, STORAGE_OWNED_MEMORY,
+ new_coded_size, visible_rect,
+ natural_size, timestamp));
+ frame->AllocateYUV();
+ return frame;
+}
+
+// static
scoped_refptr<VideoFrame> VideoFrame::WrapNativeTexture(
+ Format format,
const gpu::MailboxHolder& mailbox_holder,
const ReleaseMailboxCB& mailbox_holder_release_cb,
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
- base::TimeDelta timestamp,
- bool allow_overlay,
- bool has_alpha) {
+ base::TimeDelta timestamp) {
+ if (format != ARGB) {
+ DLOG(ERROR) << "Only ARGB pixel format supported, got "
+ << FormatToString(format);
+ return nullptr;
+ }
gpu::MailboxHolder mailbox_holders[kMaxPlanes];
mailbox_holders[kARGBPlane] = mailbox_holder;
- TextureFormat texture_format = has_alpha ? TEXTURE_RGBA : TEXTURE_RGB;
- scoped_refptr<VideoFrame> frame(
- new VideoFrame(NATIVE_TEXTURE, coded_size, visible_rect, natural_size,
- mailbox_holders, texture_format, timestamp, false));
- frame->mailbox_holders_release_cb_ = mailbox_holder_release_cb;
- frame->allow_overlay_ = allow_overlay;
- return frame;
+ return new VideoFrame(format, STORAGE_OPAQUE, coded_size,
+ visible_rect, natural_size, mailbox_holders,
+ mailbox_holder_release_cb, timestamp);
}
// static
@@ -266,22 +283,32 @@ scoped_refptr<VideoFrame> VideoFrame::WrapYUV420NativeTextures(
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
- base::TimeDelta timestamp,
- bool allow_overlay) {
+ base::TimeDelta timestamp) {
gpu::MailboxHolder mailbox_holders[kMaxPlanes];
mailbox_holders[kYPlane] = y_mailbox_holder;
mailbox_holders[kUPlane] = u_mailbox_holder;
mailbox_holders[kVPlane] = v_mailbox_holder;
- scoped_refptr<VideoFrame> frame(
- new VideoFrame(NATIVE_TEXTURE, coded_size, visible_rect, natural_size,
- mailbox_holders, TEXTURE_YUV_420, timestamp, false));
- frame->mailbox_holders_release_cb_ = mailbox_holder_release_cb;
- frame->allow_overlay_ = allow_overlay;
- return frame;
+ return new VideoFrame(I420, STORAGE_OPAQUE, coded_size, visible_rect,
+ natural_size, mailbox_holders,
+ mailbox_holder_release_cb, timestamp);
}
// static
-scoped_refptr<VideoFrame> VideoFrame::WrapExternalPackedMemory(
+scoped_refptr<VideoFrame> VideoFrame::WrapExternalData(
+ Format format,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ uint8* data,
+ size_t data_size,
+ base::TimeDelta timestamp) {
+ return WrapExternalStorage(format, STORAGE_UNOWNED_MEMORY, coded_size,
+ visible_rect, natural_size, data, data_size,
+ timestamp, base::SharedMemory::NULLHandle(), 0);
+}
+
+// static
+scoped_refptr<VideoFrame> VideoFrame::WrapExternalSharedMemory(
Format format,
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
@@ -290,36 +317,10 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalPackedMemory(
size_t data_size,
base::SharedMemoryHandle handle,
size_t data_offset,
- base::TimeDelta timestamp,
- const base::Closure& no_longer_needed_cb) {
- const gfx::Size new_coded_size = AdjustCodedSize(format, coded_size);
-
- if (!IsValidConfig(format, new_coded_size, visible_rect, natural_size))
- return NULL;
- if (data_size < AllocationSize(format, new_coded_size))
- return NULL;
-
- switch (format) {
- case VideoFrame::I420: {
- gpu::MailboxHolder mailbox_holders[kMaxPlanes];
- scoped_refptr<VideoFrame> frame(
- new VideoFrame(format, new_coded_size, visible_rect, natural_size,
- mailbox_holders, TEXTURE_RGBA, timestamp, false));
- frame->shared_memory_handle_ = handle;
- frame->shared_memory_offset_ = data_offset;
- frame->strides_[kYPlane] = new_coded_size.width();
- frame->strides_[kUPlane] = new_coded_size.width() / 2;
- frame->strides_[kVPlane] = new_coded_size.width() / 2;
- frame->data_[kYPlane] = data;
- frame->data_[kUPlane] = data + new_coded_size.GetArea();
- frame->data_[kVPlane] = data + (new_coded_size.GetArea() * 5 / 4);
- frame->no_longer_needed_cb_ = no_longer_needed_cb;
- return frame;
- }
- default:
- NOTIMPLEMENTED();
- return NULL;
- }
+ base::TimeDelta timestamp) {
+ return WrapExternalStorage(format, STORAGE_SHMEM, coded_size, visible_rect,
+ natural_size, data, data_size, timestamp, handle,
+ data_offset);
}
// static
@@ -334,66 +335,47 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalYuvData(
uint8* y_data,
uint8* u_data,
uint8* v_data,
- base::TimeDelta timestamp,
- const base::Closure& no_longer_needed_cb) {
+ base::TimeDelta timestamp) {
const gfx::Size new_coded_size = AdjustCodedSize(format, coded_size);
- CHECK(IsValidConfig(format, new_coded_size, visible_rect, natural_size));
+ CHECK(IsValidConfig(format, STORAGE_UNOWNED_MEMORY, new_coded_size,
+ visible_rect, natural_size));
- gpu::MailboxHolder mailbox_holders[kMaxPlanes];
- scoped_refptr<VideoFrame> frame(
- new VideoFrame(format, new_coded_size, visible_rect, natural_size,
- mailbox_holders, TEXTURE_RGBA, timestamp, false));
+ scoped_refptr<VideoFrame> frame(new VideoFrame(format, STORAGE_UNOWNED_MEMORY,
+ new_coded_size, visible_rect,
+ natural_size, timestamp));
frame->strides_[kYPlane] = y_stride;
frame->strides_[kUPlane] = u_stride;
frame->strides_[kVPlane] = v_stride;
frame->data_[kYPlane] = y_data;
frame->data_[kUPlane] = u_data;
frame->data_[kVPlane] = v_data;
- frame->no_longer_needed_cb_ = no_longer_needed_cb;
return frame;
}
-#if defined(OS_POSIX)
+#if defined(OS_LINUX)
// static
scoped_refptr<VideoFrame> VideoFrame::WrapExternalDmabufs(
Format format,
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
- const std::vector<int> dmabuf_fds,
- base::TimeDelta timestamp,
- const base::Closure& no_longer_needed_cb) {
- if (!IsValidConfig(format, coded_size, visible_rect, natural_size))
- return NULL;
+ const std::vector<int>& dmabuf_fds,
+ base::TimeDelta timestamp) {
+#if defined(OS_CHROMEOS)
+ DCHECK_EQ(format, NV12);
+#endif
- // TODO(posciak): This is not exactly correct, it's possible for one
- // buffer to contain more than one plane.
- if (dmabuf_fds.size() != NumPlanes(format)) {
- LOG(FATAL) << "Not enough dmabuf fds provided!";
- return NULL;
+ if (!IsValidConfig(format, STORAGE_DMABUFS, coded_size, visible_rect,
+ natural_size)) {
+ return nullptr;
}
-
gpu::MailboxHolder mailbox_holders[kMaxPlanes];
- scoped_refptr<VideoFrame> frame(
- new VideoFrame(format, coded_size, visible_rect, natural_size,
- mailbox_holders, TEXTURE_RGBA, timestamp, false));
-
- for (size_t i = 0; i < dmabuf_fds.size(); ++i) {
- int duped_fd = HANDLE_EINTR(dup(dmabuf_fds[i]));
- if (duped_fd == -1) {
- // The already-duped in previous iterations fds will be closed when
- // the partially-created frame drops out of scope here.
- DLOG(ERROR) << "Failed duplicating a dmabuf fd";
- return NULL;
- }
-
- frame->dmabuf_fds_[i].reset(duped_fd);
- // Data is accessible only via fds.
- frame->data_[i] = NULL;
- frame->strides_[i] = 0;
- }
-
- frame->no_longer_needed_cb_ = no_longer_needed_cb;
+ scoped_refptr<VideoFrame> frame =
+ new VideoFrame(format, STORAGE_DMABUFS, coded_size, visible_rect,
+ natural_size, mailbox_holders, ReleaseMailboxCB(),
+ timestamp);
+ if (!frame || !frame->DuplicateFileDescriptors(dmabuf_fds))
+ return nullptr;
return frame;
}
#endif
@@ -410,13 +392,13 @@ scoped_refptr<VideoFrame> VideoFrame::WrapCVPixelBuffer(
Format format;
// There are very few compatible CV pixel formats, so just check each.
if (cv_format == kCVPixelFormatType_420YpCbCr8Planar) {
- format = Format::I420;
+ format = I420;
} else if (cv_format == kCVPixelFormatType_444YpCbCr8) {
- format = Format::YV24;
+ format = YV24;
} else if (cv_format == '420v') {
// TODO(jfroy): Use kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange when the
// minimum OS X and iOS SDKs permits it.
- format = Format::NV12;
+ format = NV12;
} else {
DLOG(ERROR) << "CVPixelBuffer format not supported: " << cv_format;
return NULL;
@@ -426,13 +408,14 @@ scoped_refptr<VideoFrame> VideoFrame::WrapCVPixelBuffer(
const gfx::Rect visible_rect(CVImageBufferGetCleanRect(cv_pixel_buffer));
const gfx::Size natural_size(CVImageBufferGetDisplaySize(cv_pixel_buffer));
- if (!IsValidConfig(format, coded_size, visible_rect, natural_size))
+ if (!IsValidConfig(format, STORAGE_UNOWNED_MEMORY,
+ coded_size, visible_rect, natural_size)) {
return NULL;
+ }
- gpu::MailboxHolder mailbox_holders[kMaxPlanes];
- scoped_refptr<VideoFrame> frame(
- new VideoFrame(format, coded_size, visible_rect, natural_size,
- mailbox_holders, TEXTURE_RGBA, timestamp, false));
+ scoped_refptr<VideoFrame> frame(new VideoFrame(format, STORAGE_UNOWNED_MEMORY,
+ coded_size, visible_rect,
+ natural_size, timestamp));
frame->cv_pixel_buffer_.reset(cv_pixel_buffer, base::scoped_policy::RETAIN);
return frame;
@@ -443,34 +426,46 @@ scoped_refptr<VideoFrame> VideoFrame::WrapCVPixelBuffer(
scoped_refptr<VideoFrame> VideoFrame::WrapVideoFrame(
const scoped_refptr<VideoFrame>& frame,
const gfx::Rect& visible_rect,
- const gfx::Size& natural_size,
- const base::Closure& no_longer_needed_cb) {
- // NATIVE_TEXTURE frames need mailbox info propagated, and there's no support
+ const gfx::Size& natural_size) {
+ // Frames with textures need mailbox info propagated, and there's no support
// for that here yet, see http://crbug/362521.
- CHECK_NE(frame->format(), NATIVE_TEXTURE);
+ CHECK(!frame->HasTextures());
DCHECK(frame->visible_rect().Contains(visible_rect));
- gpu::MailboxHolder mailbox_holders[kMaxPlanes];
- scoped_refptr<VideoFrame> wrapped_frame(
- new VideoFrame(frame->format(), frame->coded_size(), visible_rect,
- natural_size, mailbox_holders, TEXTURE_RGBA,
- frame->timestamp(), frame->end_of_stream()));
+ scoped_refptr<VideoFrame> wrapping_frame(new VideoFrame(
+ frame->format(), frame->storage_type(), frame->coded_size(), visible_rect,
+ natural_size, frame->timestamp()));
+ if (frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM)) {
+ wrapping_frame->metadata()->SetBoolean(VideoFrameMetadata::END_OF_STREAM,
+ true);
+ }
for (size_t i = 0; i < NumPlanes(frame->format()); ++i) {
- wrapped_frame->strides_[i] = frame->stride(i);
- wrapped_frame->data_[i] = frame->data(i);
+ wrapping_frame->strides_[i] = frame->stride(i);
+ wrapping_frame->data_[i] = frame->data(i);
}
- wrapped_frame->no_longer_needed_cb_ = no_longer_needed_cb;
- return wrapped_frame;
+#if defined(OS_LINUX)
+ // If there are any |dmabuf_fds_| plugged in, we should duplicate them.
+ if (frame->storage_type() == STORAGE_DMABUFS) {
+ std::vector<int> original_fds;
+ for (size_t i = 0; i < kMaxPlanes; ++i)
+ original_fds.push_back(frame->dmabuf_fd(i));
+ if (!wrapping_frame->DuplicateFileDescriptors(original_fds))
+ return nullptr;
+ }
+#endif
+
+ return wrapping_frame;
}
// static
scoped_refptr<VideoFrame> VideoFrame::CreateEOSFrame() {
- gpu::MailboxHolder mailbox_holders[kMaxPlanes];
- return new VideoFrame(VideoFrame::UNKNOWN, gfx::Size(), gfx::Rect(),
- gfx::Size(), mailbox_holders, TEXTURE_RGBA,
- kNoTimestamp(), true);
+ scoped_refptr<VideoFrame> frame =
+ new VideoFrame(UNKNOWN, STORAGE_UNKNOWN, gfx::Size(),
+ gfx::Rect(), gfx::Size(), kNoTimestamp());
+ frame->metadata()->SetBoolean(VideoFrameMetadata::END_OF_STREAM, true);
+ return frame;
}
// static
@@ -478,8 +473,8 @@ scoped_refptr<VideoFrame> VideoFrame::CreateColorFrame(
const gfx::Size& size,
uint8 y, uint8 u, uint8 v,
base::TimeDelta timestamp) {
- scoped_refptr<VideoFrame> frame = VideoFrame::CreateFrame(
- VideoFrame::YV12, size, gfx::Rect(size), size, timestamp);
+ scoped_refptr<VideoFrame> frame =
+ CreateFrame(YV12, size, gfx::Rect(size), size, timestamp);
FillYUV(frame.get(), y, u, v);
return frame;
}
@@ -499,28 +494,23 @@ scoped_refptr<VideoFrame> VideoFrame::CreateTransparentFrame(
const uint8 kBlackUV = 0x00;
const uint8 kTransparentA = 0x00;
const base::TimeDelta kZero;
- scoped_refptr<VideoFrame> frame = VideoFrame::CreateFrame(
- VideoFrame::YV12A, size, gfx::Rect(size), size, kZero);
+ scoped_refptr<VideoFrame> frame =
+ CreateFrame(YV12A, size, gfx::Rect(size), size, kZero);
FillYUVA(frame.get(), kBlackY, kBlackUV, kBlackUV, kTransparentA);
return frame;
}
#if defined(VIDEO_HOLE)
// This block and other blocks wrapped around #if defined(VIDEO_HOLE) is not
-// maintained by the general compositor team. Please contact the following
-// people instead:
-//
-// wonsik@chromium.org
-// ycheo@chromium.org
+// maintained by the general compositor team. Please contact
+// wonsik@chromium.org .
// static
scoped_refptr<VideoFrame> VideoFrame::CreateHoleFrame(
const gfx::Size& size) {
- DCHECK(IsValidConfig(VideoFrame::HOLE, size, gfx::Rect(size), size));
- gpu::MailboxHolder mailboxes[kMaxPlanes];
- scoped_refptr<VideoFrame> frame(
- new VideoFrame(VideoFrame::HOLE, size, gfx::Rect(size), size, mailboxes,
- TEXTURE_RGBA, base::TimeDelta(), false));
+ DCHECK(IsValidConfig(UNKNOWN, STORAGE_HOLE, size, gfx::Rect(size), size));
+ scoped_refptr<VideoFrame> frame(new VideoFrame(
+ UNKNOWN, STORAGE_HOLE, size, gfx::Rect(size), size, base::TimeDelta()));
return frame;
}
#endif // defined(VIDEO_HOLE)
@@ -528,25 +518,21 @@ scoped_refptr<VideoFrame> VideoFrame::CreateHoleFrame(
// static
size_t VideoFrame::NumPlanes(Format format) {
switch (format) {
- case VideoFrame::NATIVE_TEXTURE:
-#if defined(VIDEO_HOLE)
- case VideoFrame::HOLE:
-#endif // defined(VIDEO_HOLE)
- return 0;
- case VideoFrame::ARGB:
+ case ARGB:
+ case XRGB:
return 1;
- case VideoFrame::NV12:
+#if defined(OS_MACOSX) || defined(OS_CHROMEOS)
+ case NV12:
return 2;
- case VideoFrame::YV12:
- case VideoFrame::YV16:
- case VideoFrame::I420:
- case VideoFrame::YV12J:
- case VideoFrame::YV12HD:
- case VideoFrame::YV24:
+#endif
+ case YV12:
+ case YV16:
+ case I420:
+ case YV24:
return 3;
- case VideoFrame::YV12A:
+ case YV12A:
return 4;
- case VideoFrame::UNKNOWN:
+ case UNKNOWN:
break;
}
NOTREACHED() << "Unsupported video frame format: " << format;
@@ -554,24 +540,10 @@ size_t VideoFrame::NumPlanes(Format format) {
}
// static
-size_t VideoFrame::NumTextures(TextureFormat texture_format) {
- switch (texture_format) {
- case TEXTURE_RGBA:
- case TEXTURE_RGB:
- return 1;
- case TEXTURE_YUV_420:
- return 3;
- }
-
- NOTREACHED();
- return 0;
-}
-
-// static
size_t VideoFrame::AllocationSize(Format format, const gfx::Size& coded_size) {
size_t total = 0;
for (size_t i = 0; i < NumPlanes(format); ++i)
- total += PlaneAllocationSize(format, i, coded_size);
+ total += PlaneSize(format, i, coded_size).GetArea();
return total;
}
@@ -583,7 +555,7 @@ gfx::Size VideoFrame::PlaneSize(Format format,
int width = coded_size.width();
int height = coded_size.height();
- if (format != VideoFrame::ARGB) {
+ if (format != ARGB) {
// Align to multiple-of-two size overall. This ensures that non-subsampled
// planes can be addressed by pixel with the same scaling as the subsampled
// planes.
@@ -598,12 +570,6 @@ gfx::Size VideoFrame::PlaneSize(Format format,
height / subsample.height());
}
-size_t VideoFrame::PlaneAllocationSize(Format format,
- size_t plane,
- const gfx::Size& coded_size) {
- return PlaneSize(format, plane, coded_size).GetArea();
-}
-
// static
int VideoFrame::PlaneHorizontalBitsPerPixel(Format format, size_t plane) {
DCHECK(IsValidPlane(plane, format));
@@ -620,93 +586,47 @@ int VideoFrame::PlaneBitsPerPixel(Format format, size_t plane) {
SampleSize(format, plane).height();
}
-// Release data allocated by AllocateYUV().
-static void ReleaseData(uint8* data) {
- DCHECK(data);
- base::AlignedFree(data);
+// static
+size_t VideoFrame::RowBytes(size_t plane, Format format, int width) {
+ DCHECK(IsValidPlane(plane, format));
+ return BytesPerElement(format, plane) * Columns(plane, format, width);
}
-void VideoFrame::AllocateYUV() {
- DCHECK(format_ == YV12 || format_ == YV16 || format_ == YV12A ||
- format_ == I420 || format_ == YV12J || format_ == YV24 ||
- format_ == YV12HD);
- static_assert(0 == kYPlane, "y plane data must be index 0");
-
- size_t data_size = 0;
- size_t offset[kMaxPlanes];
- for (size_t plane = 0; plane < VideoFrame::NumPlanes(format_); ++plane) {
- // The *2 in alignment for height is because some formats (e.g. h264) allow
- // interlaced coding, and then the size needs to be a multiple of two
- // macroblocks (vertically). See
- // libavcodec/utils.c:avcodec_align_dimensions2().
- const size_t height = RoundUp(rows(plane), kFrameSizeAlignment * 2);
- strides_[plane] = RoundUp(row_bytes(plane), kFrameSizeAlignment);
- offset[plane] = data_size;
- data_size += height * strides_[plane];
- }
-
- // The extra line of UV being allocated is because h264 chroma MC
- // overreads by one line in some cases, see libavcodec/utils.c:
- // avcodec_align_dimensions2() and libavcodec/x86/h264_chromamc.asm:
- // put_h264_chroma_mc4_ssse3().
- DCHECK(IsValidPlane(kUPlane, format_));
- data_size += strides_[kUPlane] + kFrameSizePadding;
-
- // FFmpeg expects the initialize allocation to be zero-initialized. Failure
- // to do so can lead to unitialized value usage. See http://crbug.com/390941
- uint8* data = reinterpret_cast<uint8*>(
- base::AlignedAlloc(data_size, kFrameAddressAlignment));
- memset(data, 0, data_size);
-
- for (size_t plane = 0; plane < VideoFrame::NumPlanes(format_); ++plane)
- data_[plane] = data + offset[plane];
-
- no_longer_needed_cb_ = base::Bind(&ReleaseData, data);
+// static
+size_t VideoFrame::Rows(size_t plane, Format format, int height) {
+ DCHECK(IsValidPlane(plane, format));
+ const int sample_height = SampleSize(format, plane).height();
+ return RoundUp(height, sample_height) / sample_height;
}
-VideoFrame::VideoFrame(VideoFrame::Format format,
- const gfx::Size& coded_size,
- const gfx::Rect& visible_rect,
- const gfx::Size& natural_size,
- const gpu::MailboxHolder(&mailbox_holders)[kMaxPlanes],
- VideoFrame::TextureFormat texture_format,
- base::TimeDelta timestamp,
- bool end_of_stream)
- : format_(format),
- texture_format_(texture_format),
- coded_size_(coded_size),
- visible_rect_(visible_rect),
- natural_size_(natural_size),
- shared_memory_handle_(base::SharedMemory::NULLHandle()),
- shared_memory_offset_(0),
- timestamp_(timestamp),
- release_sync_point_(0),
- end_of_stream_(end_of_stream),
- allow_overlay_(false) {
- DCHECK(IsValidConfig(format_, coded_size_, visible_rect_, natural_size_));
- memcpy(&mailbox_holders_, mailbox_holders, sizeof(mailbox_holders_));
- memset(&strides_, 0, sizeof(strides_));
- memset(&data_, 0, sizeof(data_));
+// static
+size_t VideoFrame::Columns(size_t plane, Format format, int width) {
+ DCHECK(IsValidPlane(plane, format));
+ const int sample_width = SampleSize(format, plane).width();
+ return RoundUp(width, sample_width) / sample_width;
}
-VideoFrame::~VideoFrame() {
- if (!mailbox_holders_release_cb_.is_null()) {
- uint32 release_sync_point;
- {
- // To ensure that changes to |release_sync_point_| are visible on this
- // thread (imply a memory barrier).
- base::AutoLock locker(release_sync_point_lock_);
- release_sync_point = release_sync_point_;
+// static
+void VideoFrame::HashFrameForTesting(base::MD5Context* context,
+ const scoped_refptr<VideoFrame>& frame) {
+ DCHECK(context);
+ for (size_t plane = 0; plane < NumPlanes(frame->format()); ++plane) {
+ for (int row = 0; row < frame->rows(plane); ++row) {
+ base::MD5Update(
+ context,
+ base::StringPiece(reinterpret_cast<char*>(frame->data(plane) +
+ frame->stride(plane) * row),
+ frame->row_bytes(plane)));
}
- base::ResetAndReturn(&mailbox_holders_release_cb_).Run(release_sync_point);
}
- if (!no_longer_needed_cb_.is_null())
- base::ResetAndReturn(&no_longer_needed_cb_).Run();
}
-// static
-bool VideoFrame::IsValidPlane(size_t plane, VideoFrame::Format format) {
- return (plane < NumPlanes(format));
+bool VideoFrame::IsMappable() const {
+ return IsStorageTypeMappable(storage_type_);
+}
+
+bool VideoFrame::HasTextures() const {
+ return !mailbox_holders_[0].mailbox.IsZero();
}
int VideoFrame::stride(size_t plane) const {
@@ -714,48 +634,29 @@ int VideoFrame::stride(size_t plane) const {
return strides_[plane];
}
-// static
-size_t VideoFrame::RowBytes(size_t plane,
- VideoFrame::Format format,
- int width) {
- DCHECK(IsValidPlane(plane, format));
- return BytesPerElement(format, plane) * Columns(plane, format, width);
-}
-
int VideoFrame::row_bytes(size_t plane) const {
return RowBytes(plane, format_, coded_size_.width());
}
-// static
-size_t VideoFrame::Rows(size_t plane, VideoFrame::Format format, int height) {
- DCHECK(IsValidPlane(plane, format));
- const int sample_height = SampleSize(format, plane).height();
- return RoundUp(height, sample_height) / sample_height;
-}
-
-// static
-size_t VideoFrame::Columns(size_t plane, Format format, int width) {
- DCHECK(IsValidPlane(plane, format));
- const int sample_width = SampleSize(format, plane).width();
- return RoundUp(width, sample_width) / sample_width;
-}
-
int VideoFrame::rows(size_t plane) const {
return Rows(plane, format_, coded_size_.height());
}
const uint8* VideoFrame::data(size_t plane) const {
DCHECK(IsValidPlane(plane, format_));
+ DCHECK(IsMappable());
return data_[plane];
}
uint8* VideoFrame::data(size_t plane) {
DCHECK(IsValidPlane(plane, format_));
+ DCHECK(IsMappable());
return data_[plane];
}
const uint8* VideoFrame::visible_data(size_t plane) const {
DCHECK(IsValidPlane(plane, format_));
+ DCHECK(IsMappable());
// Calculate an offset that is properly aligned for all planes.
const gfx::Size alignment = CommonAlignment(format_);
@@ -776,22 +677,78 @@ uint8* VideoFrame::visible_data(size_t plane) {
static_cast<const VideoFrame*>(this)->visible_data(plane));
}
-const gpu::MailboxHolder& VideoFrame::mailbox_holder(size_t texture) const {
- DCHECK_EQ(format_, NATIVE_TEXTURE);
- DCHECK_LT(texture, NumTextures(texture_format_));
- return mailbox_holders_[texture];
+const gpu::MailboxHolder&
+VideoFrame::mailbox_holder(size_t texture_index) const {
+ DCHECK(HasTextures());
+ DCHECK(IsValidPlane(texture_index, format_));
+ return mailbox_holders_[texture_index];
}
base::SharedMemoryHandle VideoFrame::shared_memory_handle() const {
+ DCHECK_EQ(storage_type_, STORAGE_SHMEM);
+ DCHECK(shared_memory_handle_ != base::SharedMemory::NULLHandle());
return shared_memory_handle_;
}
size_t VideoFrame::shared_memory_offset() const {
+ DCHECK_EQ(storage_type_, STORAGE_SHMEM);
+ DCHECK(shared_memory_handle_ != base::SharedMemory::NULLHandle());
return shared_memory_offset_;
}
+#if defined(OS_LINUX)
+int VideoFrame::dmabuf_fd(size_t plane) const {
+ DCHECK_EQ(storage_type_, STORAGE_DMABUFS);
+ DCHECK(IsValidPlane(plane, format_));
+ return dmabuf_fds_[plane].get();
+}
+
+bool VideoFrame::DuplicateFileDescriptors(const std::vector<int>& in_fds) {
+ // TODO(mcasas): Support offsets for e.g. multiplanar inside a single |in_fd|.
+
+ storage_type_ = STORAGE_DMABUFS;
+ // TODO(posciak): This is not exactly correct, it's possible for one
+ // buffer to contain more than one plane.
+ if (in_fds.size() != NumPlanes(format_)) {
+ LOG(FATAL) << "Not enough dmabuf fds provided, got: " << in_fds.size()
+ << ", expected: " << NumPlanes(format_);
+ return false;
+ }
+
+ // Make sure that all fds are closed if any dup() fails,
+ base::ScopedFD temp_dmabuf_fds[kMaxPlanes];
+ for (size_t i = 0; i < in_fds.size(); ++i) {
+ temp_dmabuf_fds[i] = base::ScopedFD(HANDLE_EINTR(dup(in_fds[i])));
+ if (!temp_dmabuf_fds[i].is_valid()) {
+ DPLOG(ERROR) << "Failed duplicating a dmabuf fd";
+ return false;
+ }
+ }
+ for (size_t i = 0; i < kMaxPlanes; ++i)
+ dmabuf_fds_[i].reset(temp_dmabuf_fds[i].release());
+
+ return true;
+}
+#endif
+
+void VideoFrame::AddSharedMemoryHandle(base::SharedMemoryHandle handle) {
+ storage_type_ = STORAGE_SHMEM;
+ shared_memory_handle_ = handle;
+}
+
+#if defined(OS_MACOSX)
+CVPixelBufferRef VideoFrame::cv_pixel_buffer() const {
+ return cv_pixel_buffer_.get();
+}
+#endif
+
+void VideoFrame::AddDestructionObserver(const base::Closure& callback) {
+ DCHECK(!callback.is_null());
+ done_callbacks_.push_back(callback);
+}
+
void VideoFrame::UpdateReleaseSyncPoint(SyncPointClient* client) {
- DCHECK_EQ(format_, NATIVE_TEXTURE);
+ DCHECK(HasTextures());
base::AutoLock locker(release_sync_point_lock_);
// Must wait on the previous sync point before inserting a new sync point so
// that |mailbox_holders_release_cb_| guarantees the previous sync point
@@ -801,26 +758,158 @@ void VideoFrame::UpdateReleaseSyncPoint(SyncPointClient* client) {
release_sync_point_ = client->InsertSyncPoint();
}
-#if defined(OS_POSIX)
-int VideoFrame::dmabuf_fd(size_t plane) const {
- return dmabuf_fds_[plane].get();
+// static
+scoped_refptr<VideoFrame> VideoFrame::WrapExternalStorage(
+ Format format,
+ StorageType storage_type,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ uint8* data,
+ size_t data_size,
+ base::TimeDelta timestamp,
+ base::SharedMemoryHandle handle,
+ size_t data_offset) {
+ DCHECK(IsStorageTypeMappable(storage_type));
+
+ const gfx::Size new_coded_size = AdjustCodedSize(format, coded_size);
+ if (!IsValidConfig(format, storage_type, new_coded_size, visible_rect,
+ natural_size) ||
+ data_size < AllocationSize(format, new_coded_size)) {
+ return NULL;
+ }
+ DLOG_IF(ERROR, format != I420) << "Only I420 format supported: "
+ << FormatToString(format);
+ if (format != I420)
+ return NULL;
+
+ scoped_refptr<VideoFrame> frame;
+ if (storage_type == STORAGE_SHMEM) {
+ frame = new VideoFrame(format, storage_type, new_coded_size,
+ visible_rect, natural_size, timestamp, handle,
+ data_offset);
+ } else {
+ frame = new VideoFrame(format, storage_type, new_coded_size,
+ visible_rect, natural_size, timestamp);
+ }
+ frame->strides_[kYPlane] = new_coded_size.width();
+ frame->strides_[kUPlane] = new_coded_size.width() / 2;
+ frame->strides_[kVPlane] = new_coded_size.width() / 2;
+ frame->data_[kYPlane] = data;
+ frame->data_[kUPlane] = data + new_coded_size.GetArea();
+ frame->data_[kVPlane] = data + (new_coded_size.GetArea() * 5 / 4);
+ return frame;
}
-#endif
-#if defined(OS_MACOSX)
-CVPixelBufferRef VideoFrame::cv_pixel_buffer() const {
- return cv_pixel_buffer_.get();
+VideoFrame::VideoFrame(Format format,
+ StorageType storage_type,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ base::TimeDelta timestamp)
+ : format_(format),
+ storage_type_(storage_type),
+ coded_size_(coded_size),
+ visible_rect_(visible_rect),
+ natural_size_(natural_size),
+ shared_memory_handle_(base::SharedMemory::NULLHandle()),
+ shared_memory_offset_(0),
+ timestamp_(timestamp),
+ release_sync_point_(0) {
+ DCHECK(IsValidConfig(format_, storage_type, coded_size_, visible_rect_,
+ natural_size_));
+ memset(&mailbox_holders_, 0, sizeof(mailbox_holders_));
+ memset(&strides_, 0, sizeof(strides_));
+ memset(&data_, 0, sizeof(data_));
}
-#endif
-void VideoFrame::HashFrameForTesting(base::MD5Context* context) {
- for (size_t plane = 0; plane < NumPlanes(format_); ++plane) {
- for (int row = 0; row < rows(plane); ++row) {
- base::MD5Update(context, base::StringPiece(
- reinterpret_cast<char*>(data(plane) + stride(plane) * row),
- row_bytes(plane)));
+VideoFrame::VideoFrame(Format format,
+ StorageType storage_type,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ base::TimeDelta timestamp,
+ base::SharedMemoryHandle handle,
+ size_t shared_memory_offset)
+ : VideoFrame(format,
+ storage_type,
+ coded_size,
+ visible_rect,
+ natural_size,
+ timestamp) {
+ DCHECK_EQ(storage_type, STORAGE_SHMEM);
+ AddSharedMemoryHandle(handle);
+ shared_memory_offset_ = shared_memory_offset;
+}
+
+VideoFrame::VideoFrame(Format format,
+ StorageType storage_type,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ const gpu::MailboxHolder(&mailbox_holders)[kMaxPlanes],
+ const ReleaseMailboxCB& mailbox_holder_release_cb,
+ base::TimeDelta timestamp)
+ : VideoFrame(format,
+ storage_type,
+ coded_size,
+ visible_rect,
+ natural_size,
+ timestamp) {
+ memcpy(&mailbox_holders_, mailbox_holders, sizeof(mailbox_holders_));
+ mailbox_holders_release_cb_ = mailbox_holder_release_cb;
+}
+
+VideoFrame::~VideoFrame() {
+ if (!mailbox_holders_release_cb_.is_null()) {
+ uint32 release_sync_point;
+ {
+ // To ensure that changes to |release_sync_point_| are visible on this
+ // thread (imply a memory barrier).
+ base::AutoLock locker(release_sync_point_lock_);
+ release_sync_point = release_sync_point_;
}
+ base::ResetAndReturn(&mailbox_holders_release_cb_).Run(release_sync_point);
+ }
+
+ for (auto& callback : done_callbacks_)
+ base::ResetAndReturn(&callback).Run();
+}
+
+void VideoFrame::AllocateYUV() {
+ DCHECK_EQ(storage_type_, STORAGE_OWNED_MEMORY);
+ static_assert(0 == kYPlane, "y plane data must be index 0");
+
+ size_t data_size = 0;
+ size_t offset[kMaxPlanes];
+ for (size_t plane = 0; plane < NumPlanes(format_); ++plane) {
+ // The *2 in alignment for height is because some formats (e.g. h264) allow
+ // interlaced coding, and then the size needs to be a multiple of two
+ // macroblocks (vertically). See
+ // libavcodec/utils.c:avcodec_align_dimensions2().
+ const size_t height = RoundUp(rows(plane), kFrameSizeAlignment * 2);
+ strides_[plane] = RoundUp(row_bytes(plane), kFrameSizeAlignment);
+ offset[plane] = data_size;
+ data_size += height * strides_[plane];
}
+
+ // The extra line of UV being allocated is because h264 chroma MC
+ // overreads by one line in some cases, see libavcodec/utils.c:
+ // avcodec_align_dimensions2() and libavcodec/x86/h264_chromamc.asm:
+ // put_h264_chroma_mc4_ssse3().
+ DCHECK(IsValidPlane(kUPlane, format_));
+ data_size += strides_[kUPlane] + kFrameSizePadding;
+
+ // FFmpeg expects the initialize allocation to be zero-initialized. Failure
+ // to do so can lead to unitialized value usage. See http://crbug.com/390941
+ uint8* data = reinterpret_cast<uint8*>(
+ base::AlignedAlloc(data_size, kFrameAddressAlignment));
+ memset(data, 0, data_size);
+
+ for (size_t plane = 0; plane < NumPlanes(format_); ++plane)
+ data_[plane] = data + offset[plane];
+
+ AddDestructionObserver(base::Bind(&base::AlignedFree, data));
}
} // namespace media
diff --git a/chromium/media/base/video_frame.h b/chromium/media/base/video_frame.h
index 7312c1136ea..2588e46e99d 100644
--- a/chromium/media/base/video_frame.h
+++ b/chromium/media/base/video_frame.h
@@ -43,78 +43,125 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
kAPlane = 3,
};
- // Surface formats roughly based on FOURCC labels, see:
- // http://www.fourcc.org/rgb.php
- // http://www.fourcc.org/yuv.php
- // Logged to UMA, so never reuse values.
+ // Pixel formats roughly based on FOURCC labels, see:
+ // http://www.fourcc.org/rgb.php and http://www.fourcc.org/yuv.php
+ // Logged to UMA, so never reuse values. Leave gaps if necessary.
enum Format {
- UNKNOWN = 0, // Unknown format value.
- YV12 = 1, // 12bpp YVU planar 1x1 Y, 2x2 VU samples
- YV16 = 2, // 16bpp YVU planar 1x1 Y, 2x1 VU samples
- I420 = 3, // 12bpp YVU planar 1x1 Y, 2x2 UV samples.
- YV12A = 4, // 20bpp YUVA planar 1x1 Y, 2x2 VU, 1x1 A samples.
-#if defined(VIDEO_HOLE)
- HOLE = 5, // Hole frame.
-#endif // defined(VIDEO_HOLE)
- NATIVE_TEXTURE = 6, // Native texture. Pixel-format agnostic.
- YV12J = 7, // JPEG color range version of YV12
- NV12 = 8, // 12bpp 1x1 Y plane followed by an interleaved 2x2 UV plane.
- YV24 = 9, // 24bpp YUV planar, no subsampling.
- ARGB = 10, // 32bpp ARGB, 1 plane.
- YV12HD = 11, // Rec709 "HD" color space version of YV12
+ UNKNOWN = 0, // Unknown or unspecified format value.
+ YV12 = 1, // 12bpp YVU planar 1x1 Y, 2x2 VU samples.
+ I420 = 2, // 12bpp YUV planar 1x1 Y, 2x2 UV samples, a.k.a. YU12.
+ YV16 = 3, // 16bpp YVU planar 1x1 Y, 2x1 VU samples.
+ YV12A = 4, // 20bpp YUVA planar 1x1 Y, 2x2 VU, 1x1 A samples.
+ YV24 = 5, // 24bpp YUV planar, no subsampling.
+#if defined(OS_MACOSX) || defined(OS_CHROMEOS)
+ NV12 = 6, // 12bpp with Y plane followed by a 2x2 interleaved UV plane.
+#endif
+ ARGB = 7, // 32bpp ARGB, 1 plane.
+ XRGB = 8, // 24bpp XRGB, 1 plane.
// Please update UMA histogram enumeration when adding new formats here.
- FORMAT_MAX = YV12HD, // Must always be equal to largest entry logged.
+ FORMAT_MAX = XRGB, // Must always be equal to largest entry logged.
+ };
+
+ // Color space or color range used for the pixels, in general this is left
+ // unspecified, meaning Rec601 (SD) is assumed.
+ // Logged to UMA, so never reuse values. Leave gaps if necessary.
+ enum ColorSpace {
+ COLOR_SPACE_UNSPECIFIED = 0, // In general this is Rec601.
+ COLOR_SPACE_JPEG = 1, // JPEG color range.
+ COLOR_SPACE_HD_REC709 = 2, // Rec709 "HD" color space.
+ COLOR_SPACE_MAX = COLOR_SPACE_HD_REC709,
};
- // Defines the internal format and the number of the textures in the mailbox
- // holders.
- enum TextureFormat {
- TEXTURE_RGBA, // One RGBA texture.
- TEXTURE_RGB, // One RGB texture.
- TEXTURE_YUV_420, // 3 RED textures one per channel. UV are 2x2 subsampled.
+ // Defines the pixel storage type. Differentiates between directly accessible
+ // |data_| and pixels that are only indirectly accessible and not via mappable
+ // memory.
+ // Note that VideoFrames of any StorageType can also have Texture backing,
+ // with "classical" GPU Driver-only textures identified as STORAGE_OPAQUE.
+ enum StorageType {
+ STORAGE_UNKNOWN = 0,
+ STORAGE_OPAQUE = 1, // We don't know how VideoFrame's pixels are stored.
+ STORAGE_UNOWNED_MEMORY = 2, // External, non owned data pointers.
+ STORAGE_OWNED_MEMORY = 3, // VideoFrame has allocated its own data buffer.
+ STORAGE_SHMEM = 4, // Pixels are backed by Shared Memory.
+#if defined(OS_LINUX)
+ // TODO(mcasas): Consider turning this type into STORAGE_NATIVE or another
+ // meaningful name and handle it appropriately in all cases.
+ STORAGE_DMABUFS = 5, // Each plane is stored into a DmaBuf.
+#endif
+#if defined(VIDEO_HOLE)
+ // Indicates protected media that needs to be directly rendered to hw. It
+ // is, in principle, platform independent, see http://crbug.com/323157 and
+ // https://groups.google.com/a/google.com/d/topic/chrome-gpu/eIM1RwarUmk/discussion
+ STORAGE_HOLE = 6,
+#endif
+
+#if defined(VIDEO_HOLE)
+ STORAGE_LAST = STORAGE_HOLE,
+#elif defined(OS_LINUX)
+ STORAGE_LAST = STORAGE_DMABUFS,
+#else
+ STORAGE_LAST = STORAGE_SHMEM
+#endif
+ };
+
+ // CB to be called on the mailbox backing this frame when the frame is
+ // destroyed.
+ typedef base::Callback<void(uint32)> ReleaseMailboxCB;
+
+ // Interface representing client operations on a SyncPoint, i.e. insert one in
+ // the GPU Command Buffer and wait for it.
+ class SyncPointClient {
+ public:
+ SyncPointClient() {}
+ virtual uint32 InsertSyncPoint() = 0;
+ virtual void WaitSyncPoint(uint32 sync_point) = 0;
+
+ protected:
+ virtual ~SyncPointClient() {}
+
+ DISALLOW_COPY_AND_ASSIGN(SyncPointClient);
};
// Returns the name of a Format as a string.
static std::string FormatToString(Format format);
- // Creates a new frame in system memory with given parameters. Buffers for
- // the frame are allocated but not initialized.
- static scoped_refptr<VideoFrame> CreateFrame(
- Format format,
- const gfx::Size& coded_size,
- const gfx::Rect& visible_rect,
- const gfx::Size& natural_size,
- base::TimeDelta timestamp);
-
- // Returns true if |plane| is a valid plane number for the given format. This
- // can be used to DCHECK() plane parameters.
- static bool IsValidPlane(size_t plane, VideoFrame::Format format);
+ // Returns true if |format| is a YUV format. This includes (multi)planar
+ // and/or (partially) interleaved formats.
+ static bool IsYuvPlanar(Format format);
// Call prior to CreateFrame to ensure validity of frame configuration. Called
// automatically by VideoDecoderConfig::IsValidConfig().
// TODO(scherkus): VideoDecoderConfig shouldn't call this method
static bool IsValidConfig(Format format,
+ StorageType storage_type,
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size);
- // CB to be called on the mailbox backing this frame when the frame is
- // destroyed.
- typedef base::Callback<void(uint32)> ReleaseMailboxCB;
+ // Creates a new YUV frame in system memory with given parameters (|format|
+ // must be YUV). Buffers for the frame are allocated but not initialized. The
+ // caller most not make assumptions about the actual underlying size(s), but
+ // check the returned VideoFrame instead.
+ // TODO(mcasas): implement the RGB version of this factory method.
+ static scoped_refptr<VideoFrame> CreateFrame(
+ Format format,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ base::TimeDelta timestamp);
// Wraps a native texture of the given parameters with a VideoFrame.
// The backing of the VideoFrame is held in the mailbox held by
// |mailbox_holder|, and |mailbox_holder_release_cb| will be called with
// a syncpoint as the argument when the VideoFrame is to be destroyed.
static scoped_refptr<VideoFrame> WrapNativeTexture(
+ Format format,
const gpu::MailboxHolder& mailbox_holder,
const ReleaseMailboxCB& mailbox_holder_release_cb,
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
- base::TimeDelta timestamp,
- bool allow_overlay,
- bool has_alpha);
+ base::TimeDelta timestamp);
// Wraps a set of native textures representing YUV data with a VideoFrame.
// |mailbox_holders_release_cb| will be called with a syncpoint as the
@@ -127,17 +174,23 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
- base::TimeDelta timestamp,
- bool allow_overlay);
+ base::TimeDelta timestamp);
// Wraps packed image data residing in a memory buffer with a VideoFrame.
// The image data resides in |data| and is assumed to be packed tightly in a
// buffer of logical dimensions |coded_size| with the appropriate bit depth
- // and plane count as given by |format|. The shared memory handle of the
- // backing allocation, if present, can be passed in with |handle|. When the
- // frame is destroyed, |no_longer_needed_cb.Run()| will be called.
- // Returns NULL on failure.
- static scoped_refptr<VideoFrame> WrapExternalPackedMemory(
+ // and plane count as given by |format|. Returns NULL on failure.
+ static scoped_refptr<VideoFrame> WrapExternalData(
+ Format format,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ uint8* data,
+ size_t data_size,
+ base::TimeDelta timestamp);
+
+ // Same as WrapExternalData() with SharedMemoryHandle and its offset.
+ static scoped_refptr<VideoFrame> WrapExternalSharedMemory(
Format format,
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
@@ -146,12 +199,10 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
size_t data_size,
base::SharedMemoryHandle handle,
size_t shared_memory_offset,
- base::TimeDelta timestamp,
- const base::Closure& no_longer_needed_cb);
+ base::TimeDelta timestamp);
// Wraps external YUV data of the given parameters with a VideoFrame.
- // The returned VideoFrame does not own the data passed in. When the frame
- // is destroyed |no_longer_needed_cb.Run()| will be called.
+ // The returned VideoFrame does not own the data passed in.
static scoped_refptr<VideoFrame> WrapExternalYuvData(
Format format,
const gfx::Size& coded_size,
@@ -163,10 +214,9 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
uint8* y_data,
uint8* u_data,
uint8* v_data,
- base::TimeDelta timestamp,
- const base::Closure& no_longer_needed_cb);
+ base::TimeDelta timestamp);
-#if defined(OS_POSIX)
+#if defined(OS_LINUX)
// Wraps provided dmabufs
// (https://www.kernel.org/doc/Documentation/dma-buf-sharing.txt) with a
// VideoFrame. The dmabuf fds are dup()ed on creation, so that the VideoFrame
@@ -176,16 +226,14 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
// The image data is only accessible via dmabuf fds, which are usually passed
// directly to a hardware device and/or to another process, or can also be
// mapped via mmap() for CPU access.
- // When the frame is destroyed, |no_longer_needed_cb.Run()| will be called.
// Returns NULL on failure.
static scoped_refptr<VideoFrame> WrapExternalDmabufs(
Format format,
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
- const std::vector<int> dmabuf_fds,
- base::TimeDelta timestamp,
- const base::Closure& no_longer_needed_cb);
+ const std::vector<int>& dmabuf_fds,
+ base::TimeDelta timestamp);
#endif
#if defined(OS_MACOSX)
@@ -202,14 +250,12 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
base::TimeDelta timestamp);
#endif
- // Wraps |frame| and calls |no_longer_needed_cb| when the wrapper VideoFrame
- // gets destroyed. |visible_rect| must be a sub rect within
+ // Wraps |frame|. |visible_rect| must be a sub rect within
// frame->visible_rect().
static scoped_refptr<VideoFrame> WrapVideoFrame(
const scoped_refptr<VideoFrame>& frame,
const gfx::Rect& visible_rect,
- const gfx::Size& natural_size,
- const base::Closure& no_longer_needed_cb);
+ const gfx::Size& natural_size);
// Creates a frame which indicates end-of-stream.
static scoped_refptr<VideoFrame> CreateEOSFrame();
@@ -236,24 +282,16 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
static size_t NumPlanes(Format format);
- static size_t NumTextures(TextureFormat texture_format);
-
// Returns the required allocation size for a (tightly packed) frame of the
// given coded size and format.
static size_t AllocationSize(Format format, const gfx::Size& coded_size);
- // Returns the plane size (in bytes) for a plane of the given coded size and
- // format.
+ // Returns the plane gfx::Size (in bytes) for a plane of the given coded size
+ // and format.
static gfx::Size PlaneSize(Format format,
size_t plane,
const gfx::Size& coded_size);
- // Returns the required allocation size for a (tightly packed) plane of the
- // given coded size and format.
- static size_t PlaneAllocationSize(Format format,
- size_t plane,
- const gfx::Size& coded_size);
-
// Returns horizontal bits per pixel for given |plane| and |format|.
static int PlaneHorizontalBitsPerPixel(Format format, size_t plane);
@@ -272,9 +310,22 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
// The width may be aligned to format requirements.
static size_t Columns(size_t plane, Format format, int width);
- Format format() const { return format_; }
+ // Used to keep a running hash of seen frames. Expects an initialized MD5
+ // context. Calls MD5Update with the context and the contents of the frame.
+ static void HashFrameForTesting(base::MD5Context* context,
+ const scoped_refptr<VideoFrame>& frame);
- TextureFormat texture_format() const { return texture_format_; }
+ // Returns true if |frame| is accessible and mapped in the VideoFrame memory
+ // space. If false, clients should refrain from accessing data(),
+ // visible_data() etc.
+ bool IsMappable() const;
+
+ // Returns true if |frame| has textures with any StorageType and should not be
+ // accessed via data(), visible_data() etc.
+ bool HasTextures() const;
+
+ Format format() const { return format_; }
+ StorageType storage_type() const { return storage_type_; }
const gfx::Size& coded_size() const { return coded_size_; }
const gfx::Rect& visible_rect() const { return visible_rect_; }
@@ -289,22 +340,23 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
int row_bytes(size_t plane) const;
int rows(size_t plane) const;
- // Returns pointer to the buffer for a given plane. The memory is owned by
- // VideoFrame object and must not be freed by the caller.
+ // Returns pointer to the buffer for a given plane, if this is an
+ // IsMappable() frame type. The memory is owned by VideoFrame object and must
+ // not be freed by the caller.
const uint8* data(size_t plane) const;
uint8* data(size_t plane);
- // Returns pointer to the data in the visible region of the frame. I.e. the
- // returned pointer is offsetted into the plane buffer specified by
- // visible_rect().origin(). Memory is owned by VideoFrame object and must not
- // be freed by the caller.
+ // Returns pointer to the data in the visible region of the frame, for
+ // IsMappable() storage types. The returned pointer is offsetted into the
+ // plane buffer specified by visible_rect().origin(). Memory is owned by
+ // VideoFrame object and must not be freed by the caller.
const uint8* visible_data(size_t plane) const;
uint8* visible_data(size_t plane);
// Returns a mailbox holder for a given texture.
// Only valid to call if this is a NATIVE_TEXTURE frame. Before using the
// mailbox, the caller must wait for the included sync point.
- const gpu::MailboxHolder& mailbox_holder(size_t texture) const;
+ const gpu::MailboxHolder& mailbox_holder(size_t texture_index) const;
// Returns the shared-memory handle, if present
base::SharedMemoryHandle shared_memory_handle() const;
@@ -312,70 +364,91 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
// Returns the offset into the shared memory where the frame data begins.
size_t shared_memory_offset() const;
- // Returns a dictionary of optional metadata. This contains information
- // associated with the frame that downstream clients might use for frame-level
- // logging, quality/performance optimizations, signaling, etc.
- //
- // TODO(miu): Move some of the "extra" members of VideoFrame (below) into
- // here as a later clean-up step.
- const VideoFrameMetadata* metadata() const { return &metadata_; }
- VideoFrameMetadata* metadata() { return &metadata_; }
-
- bool allow_overlay() const { return allow_overlay_; }
-
-#if defined(OS_POSIX)
- // Returns backing dmabuf file descriptor for given |plane|, if present.
+#if defined(OS_LINUX)
+ // Returns backing DmaBuf file descriptor for given |plane|, if present, or
+ // -1 if not.
+ // TODO(mcasas): Rename to DmabufFd() to comply with Style Guide.
int dmabuf_fd(size_t plane) const;
+
+ // Duplicates internally the |fds_in|, overwriting the current ones. Returns
+ // false if something goes wrong, and leaves all internal fds closed.
+ bool DuplicateFileDescriptors(const std::vector<int>& fds_in);
#endif
+ void AddSharedMemoryHandle(base::SharedMemoryHandle handle);
+
#if defined(OS_MACOSX)
// Returns the backing CVPixelBuffer, if present.
+ // TODO(mcasas): Rename to CvPixelBuffer() to comply with Style Guide.
CVPixelBufferRef cv_pixel_buffer() const;
#endif
- // Returns true if this VideoFrame represents the end of the stream.
- bool end_of_stream() const { return end_of_stream_; }
+ // Adds a callback to be run when the VideoFrame is about to be destroyed.
+ // The callback may be run from ANY THREAD, and so it is up to the client to
+ // ensure thread safety. Although read-only access to the members of this
+ // VideoFrame is permitted while the callback executes (including
+ // VideoFrameMetadata), clients should not assume the data pointers are
+ // valid.
+ void AddDestructionObserver(const base::Closure& callback);
- base::TimeDelta timestamp() const {
- return timestamp_;
- }
- void set_timestamp(const base::TimeDelta& timestamp) {
+ // Returns a dictionary of optional metadata. This contains information
+ // associated with the frame that downstream clients might use for frame-level
+ // logging, quality/performance optimizations, signaling, etc.
+ //
+ // TODO(miu): Move some of the "extra" members of VideoFrame (below) into
+ // here as a later clean-up step.
+ const VideoFrameMetadata* metadata() const { return &metadata_; }
+ VideoFrameMetadata* metadata() { return &metadata_; }
+
+ base::TimeDelta timestamp() const { return timestamp_; }
+ void set_timestamp(base::TimeDelta timestamp) {
timestamp_ = timestamp;
}
- class SyncPointClient {
- public:
- SyncPointClient() {}
- virtual uint32 InsertSyncPoint() = 0;
- virtual void WaitSyncPoint(uint32 sync_point) = 0;
-
- protected:
- virtual ~SyncPointClient() {}
-
- DISALLOW_COPY_AND_ASSIGN(SyncPointClient);
- };
// It uses |client| to insert a new sync point and potentially waits on a
// older sync point. The final sync point will be used to release this
// VideoFrame.
// This method is thread safe. Both blink and compositor threads can call it.
void UpdateReleaseSyncPoint(SyncPointClient* client);
- // Used to keep a running hash of seen frames. Expects an initialized MD5
- // context. Calls MD5Update with the context and the contents of the frame.
- void HashFrameForTesting(base::MD5Context* context);
-
private:
friend class base::RefCountedThreadSafe<VideoFrame>;
- // Clients must use the static CreateFrame() method to create a new frame.
+ static scoped_refptr<VideoFrame> WrapExternalStorage(
+ Format format,
+ StorageType storage_type,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ uint8* data,
+ size_t data_size,
+ base::TimeDelta timestamp,
+ base::SharedMemoryHandle handle,
+ size_t data_offset);
+
+ // Clients must use the static factory/wrapping methods to create a new frame.
VideoFrame(Format format,
+ StorageType storage_type,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ base::TimeDelta timestamp);
+ VideoFrame(Format format,
+ StorageType storage_type,
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
- const gpu::MailboxHolder(&mailbox_holders)[kMaxPlanes],
- TextureFormat texture_format,
base::TimeDelta timestamp,
- bool end_of_stream);
+ base::SharedMemoryHandle handle,
+ size_t shared_memory_offset);
+ VideoFrame(Format format,
+ StorageType storage_type,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ const gpu::MailboxHolder(&mailbox_holders)[kMaxPlanes],
+ const ReleaseMailboxCB& mailbox_holder_release_cb,
+ base::TimeDelta timestamp);
virtual ~VideoFrame();
void AllocateYUV();
@@ -383,8 +456,8 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
// Frame format.
const Format format_;
- // Format of the native textures associated with this frame.
- const TextureFormat texture_format_;
+ // Storage type for the different planes.
+ StorageType storage_type_; // TODO(mcasas): make const
// Width and height of the video frame, in pixels. This must include pixel
// data for the whole image; i.e. for YUV formats with subsampled chroma
@@ -408,21 +481,21 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
int32 strides_[kMaxPlanes];
// Array of data pointers to each plane.
+ // TODO(mcasas): we don't know on ctor if we own |data_| or not. After
+ // refactoring VideoFrame, change to scoped_ptr<uint8, AlignedFreeDeleter>.
uint8* data_[kMaxPlanes];
- // Native texture mailboxes, if this is a NATIVE_TEXTURE frame.
+ // Native texture mailboxes, if this is a IsTexture() frame.
gpu::MailboxHolder mailbox_holders_[kMaxPlanes];
ReleaseMailboxCB mailbox_holders_release_cb_;
- // Shared memory handle, if this frame was allocated from shared memory.
+ // Shared memory handle and associated offset inside it, if this frame is
+ // a STORAGE_SHMEM one.
base::SharedMemoryHandle shared_memory_handle_;
-
- // Offset in shared memory buffer.
size_t shared_memory_offset_;
-#if defined(OS_POSIX)
- // Dmabufs for each plane, if this frame is wrapping memory
- // acquired via dmabuf.
+#if defined(OS_LINUX)
+ // Dmabufs for each plane. If set, this frame has DmaBuf backing in some way.
base::ScopedFD dmabuf_fds_[kMaxPlanes];
#endif
@@ -431,19 +504,15 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
base::ScopedCFTypeRef<CVPixelBufferRef> cv_pixel_buffer_;
#endif
- base::Closure no_longer_needed_cb_;
+ std::vector<base::Closure> done_callbacks_;
base::TimeDelta timestamp_;
base::Lock release_sync_point_lock_;
uint32 release_sync_point_;
- const bool end_of_stream_;
-
VideoFrameMetadata metadata_;
- bool allow_overlay_;
-
DISALLOW_IMPLICIT_CONSTRUCTORS(VideoFrame);
};
diff --git a/chromium/media/base/video_frame_metadata.cc b/chromium/media/base/video_frame_metadata.cc
index d14bbe9a318..938a0181713 100644
--- a/chromium/media/base/video_frame_metadata.cc
+++ b/chromium/media/base/video_frame_metadata.cc
@@ -47,14 +47,27 @@ void VideoFrameMetadata::SetString(Key key, const std::string& value) {
base::BinaryValue::CreateWithCopiedBuffer(value.data(), value.size()));
}
-void VideoFrameMetadata::SetTimeTicks(Key key, const base::TimeTicks& value) {
+namespace {
+template<class TimeType>
+void SetTimeValue(VideoFrameMetadata::Key key,
+ const TimeType& value,
+ base::DictionaryValue* dictionary) {
const int64 internal_value = value.ToInternalValue();
- dictionary_.SetWithoutPathExpansion(
+ dictionary->SetWithoutPathExpansion(
ToInternalKey(key),
base::BinaryValue::CreateWithCopiedBuffer(
reinterpret_cast<const char*>(&internal_value),
sizeof(internal_value)));
}
+} // namespace
+
+void VideoFrameMetadata::SetTimeDelta(Key key, const base::TimeDelta& value) {
+ SetTimeValue(key, value, &dictionary_);
+}
+
+void VideoFrameMetadata::SetTimeTicks(Key key, const base::TimeTicks& value) {
+ SetTimeValue(key, value, &dictionary_);
+}
void VideoFrameMetadata::SetValue(Key key, scoped_ptr<base::Value> value) {
dictionary_.SetWithoutPathExpansion(ToInternalKey(key), value.Pass());
@@ -83,16 +96,27 @@ bool VideoFrameMetadata::GetString(Key key, std::string* value) const {
return !!binary_value;
}
-bool VideoFrameMetadata::GetTimeTicks(Key key, base::TimeTicks* value) const {
+namespace {
+template<class TimeType>
+bool ToTimeValue(const base::BinaryValue& binary_value, TimeType* value) {
DCHECK(value);
+ int64 internal_value;
+ if (binary_value.GetSize() != sizeof(internal_value))
+ return false;
+ memcpy(&internal_value, binary_value.GetBuffer(), sizeof(internal_value));
+ *value = TimeType::FromInternalValue(internal_value);
+ return true;
+}
+} // namespace
+
+bool VideoFrameMetadata::GetTimeDelta(Key key, base::TimeDelta* value) const {
const base::BinaryValue* const binary_value = GetBinaryValue(key);
- if (binary_value && binary_value->GetSize() == sizeof(int64)) {
- int64 internal_value;
- memcpy(&internal_value, binary_value->GetBuffer(), sizeof(internal_value));
- *value = base::TimeTicks::FromInternalValue(internal_value);
- return true;
- }
- return false;
+ return binary_value && ToTimeValue(*binary_value, value);
+}
+
+bool VideoFrameMetadata::GetTimeTicks(Key key, base::TimeTicks* value) const {
+ const base::BinaryValue* const binary_value = GetBinaryValue(key);
+ return binary_value && ToTimeValue(*binary_value, value);
}
const base::Value* VideoFrameMetadata::GetValue(Key key) const {
@@ -102,6 +126,11 @@ const base::Value* VideoFrameMetadata::GetValue(Key key) const {
return result;
}
+bool VideoFrameMetadata::IsTrue(Key key) const {
+ bool value = false;
+ return GetBoolean(key, &value) && value;
+}
+
void VideoFrameMetadata::MergeInternalValuesInto(
base::DictionaryValue* out) const {
out->MergeDictionary(&dictionary_);
diff --git a/chromium/media/base/video_frame_metadata.h b/chromium/media/base/video_frame_metadata.h
index 31fbe749892..cee4917b25e 100644
--- a/chromium/media/base/video_frame_metadata.h
+++ b/chromium/media/base/video_frame_metadata.h
@@ -15,16 +15,59 @@ namespace media {
class MEDIA_EXPORT VideoFrameMetadata {
public:
enum Key {
+ // Sources of VideoFrames use this marker to indicate that the associated
+ // VideoFrame can be overlayed, case in which its contents do not need to be
+ // further composited but displayed directly. Use Get/SetBoolean() for
+ // this Key.
+ ALLOW_OVERLAY,
+
// Video capture begin/end timestamps. Consumers can use these values for
// dynamic optimizations, logging stats, etc. Use Get/SetTimeTicks() for
// these keys.
CAPTURE_BEGIN_TIME,
CAPTURE_END_TIME,
+ // Some VideoFrames have an indication of the color space used. Use
+ // GetInteger()/SetInteger() and VideoFrame::ColorSpace enumeration.
+ COLOR_SPACE,
+
+ // Indicates if the current frame is the End of its current Stream. Use
+ // Get/SetBoolean() for this Key.
+ END_OF_STREAM,
+
+ // The estimated duration of this frame (i.e., the amount of time between
+ // the media timestamp of this frame and the next). Note that this is not
+ // the same information provided by FRAME_RATE as the FRAME_DURATION can
+ // vary unpredictably for every frame. Consumers can use this to optimize
+ // playback scheduling, make encoding quality decisions, and/or compute
+ // frame-level resource utilization stats. Use Get/SetTimeDelta() for this
+ // key.
+ FRAME_DURATION,
+
// Represents either the fixed frame rate, or the maximum frame rate to
- // expect from a variable-rate source. Use Get/SetDouble() for this key.
+ // expect from a variable-rate source. This value generally remains the
+ // same for all frames in the same session. Use Get/SetDouble() for this
+ // key.
FRAME_RATE,
+ // A feedback signal that indicates the fraction of the tolerable maximum
+ // amount of resources that were utilized to process this frame. A producer
+ // can check this value after-the-fact, usually via a VideoFrame destruction
+ // observer, to determine whether the consumer can handle more or less data
+ // volume, and achieve the right quality versus performance trade-off.
+ //
+ // Use Get/SetDouble() for this key. Values are interpreted as follows:
+ // Less than 0.0 is meaningless and should be ignored. 1.0 indicates a
+ // maximum sustainable utilization. Greater than 1.0 indicates the consumer
+ // is likely to stall or drop frames if the data volume is not reduced.
+ //
+ // Example: In a system that encodes and transmits video frames over the
+ // network, this value can be used to indicate whether sufficient CPU
+ // is available for encoding and/or sufficient bandwidth is available for
+ // transmission over the network. The maximum of the two utilization
+ // measurements would be used as feedback.
+ RESOURCE_UTILIZATION,
+
NUM_KEYS
};
@@ -40,19 +83,24 @@ class MEDIA_EXPORT VideoFrameMetadata {
void SetInteger(Key key, int value);
void SetDouble(Key key, double value);
void SetString(Key key, const std::string& value);
+ void SetTimeDelta(Key key, const base::TimeDelta& value);
void SetTimeTicks(Key key, const base::TimeTicks& value);
void SetValue(Key key, scoped_ptr<base::Value> value);
- // Getters. Returns true if |key| was present and has the value has been set.
+ // Getters. Returns true if |key| is present, and its value has been set.
bool GetBoolean(Key key, bool* value) const WARN_UNUSED_RESULT;
bool GetInteger(Key key, int* value) const WARN_UNUSED_RESULT;
bool GetDouble(Key key, double* value) const WARN_UNUSED_RESULT;
bool GetString(Key key, std::string* value) const WARN_UNUSED_RESULT;
+ bool GetTimeDelta(Key key, base::TimeDelta* value) const WARN_UNUSED_RESULT;
bool GetTimeTicks(Key key, base::TimeTicks* value) const WARN_UNUSED_RESULT;
// Returns null if |key| was not present.
const base::Value* GetValue(Key key) const WARN_UNUSED_RESULT;
+ // Convenience method that returns true if |key| exists and is set to true.
+ bool IsTrue(Key key) const WARN_UNUSED_RESULT;
+
// For serialization.
void MergeInternalValuesInto(base::DictionaryValue* out) const;
void MergeInternalValuesFrom(const base::DictionaryValue& in);
diff --git a/chromium/media/base/video_frame_pool.cc b/chromium/media/base/video_frame_pool.cc
index 88308e979b8..ac021348c14 100644
--- a/chromium/media/base/video_frame_pool.cc
+++ b/chromium/media/base/video_frame_pool.cc
@@ -85,9 +85,11 @@ scoped_refptr<VideoFrame> VideoFramePool::PoolImpl::CreateFrame(
format, coded_size, visible_rect, natural_size, timestamp);
}
- return VideoFrame::WrapVideoFrame(
- frame, frame->visible_rect(), frame->natural_size(),
+ scoped_refptr<VideoFrame> wrapped_frame = VideoFrame::WrapVideoFrame(
+ frame, frame->visible_rect(), frame->natural_size());
+ wrapped_frame->AddDestructionObserver(
base::Bind(&VideoFramePool::PoolImpl::FrameReleased, this, frame));
+ return wrapped_frame;
}
void VideoFramePool::PoolImpl::Shutdown() {
diff --git a/chromium/media/base/video_frame_unittest.cc b/chromium/media/base/video_frame_unittest.cc
index e22a8184228..24b8a2b4b5d 100644
--- a/chromium/media/base/video_frame_unittest.cc
+++ b/chromium/media/base/video_frame_unittest.cc
@@ -24,8 +24,8 @@ using base::MD5DigestToBase16;
// frame will be black, if 1 then the entire frame will be white.
void InitializeYV12Frame(VideoFrame* frame, double white_to_black) {
EXPECT_EQ(VideoFrame::YV12, frame->format());
- int first_black_row = static_cast<int>(frame->coded_size().height() *
- white_to_black);
+ const int first_black_row =
+ static_cast<int>(frame->coded_size().height() * white_to_black);
uint8* y_plane = frame->data(VideoFrame::kYPlane);
for (int row = 0; row < frame->coded_size().height(); ++row) {
int color = (row < first_black_row) ? 0xFF : 0x00;
@@ -76,8 +76,7 @@ void ExpectFrameColor(media::VideoFrame* yv12_frame, uint32 expect_rgb_color) {
uint32* rgb_row_data = reinterpret_cast<uint32*>(
rgb_data + (bytes_per_row * row));
for (int col = 0; col < yv12_frame->coded_size().width(); ++col) {
- SCOPED_TRACE(
- base::StringPrintf("Checking (%d, %d)", row, col));
+ SCOPED_TRACE(base::StringPrintf("Checking (%d, %d)", row, col));
EXPECT_EQ(expect_rgb_color, rgb_row_data[col]);
}
}
@@ -113,7 +112,7 @@ void ExpectFrameExtents(VideoFrame::Format format, const char* expected_hash) {
base::MD5Context context;
base::MD5Init(&context);
- frame->HashFrameForTesting(&context);
+ VideoFrame::HashFrameForTesting(&context, frame);
base::MD5Digest digest;
base::MD5Final(&digest, &context);
EXPECT_EQ(MD5DigestToBase16(digest), expected_hash);
@@ -141,7 +140,7 @@ TEST(VideoFrame, CreateFrame) {
base::MD5Digest digest;
base::MD5Context context;
base::MD5Init(&context);
- frame->HashFrameForTesting(&context);
+ VideoFrame::HashFrameForTesting(&context, frame);
base::MD5Final(&digest, &context);
EXPECT_EQ(MD5DigestToBase16(digest), "9065c841d9fca49186ef8b4ef547e79b");
{
@@ -150,13 +149,14 @@ TEST(VideoFrame, CreateFrame) {
ExpectFrameColor(frame.get(), 0xFFFFFFFF);
}
base::MD5Init(&context);
- frame->HashFrameForTesting(&context);
+ VideoFrame::HashFrameForTesting(&context, frame);
base::MD5Final(&digest, &context);
EXPECT_EQ(MD5DigestToBase16(digest), "911991d51438ad2e1a40ed5f6fc7c796");
// Test an empty frame.
frame = VideoFrame::CreateEOSFrame();
- EXPECT_TRUE(frame->end_of_stream());
+ EXPECT_TRUE(
+ frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM));
}
TEST(VideoFrame, CreateBlackFrame) {
@@ -168,10 +168,12 @@ TEST(VideoFrame, CreateBlackFrame) {
scoped_refptr<media::VideoFrame> frame =
VideoFrame::CreateBlackFrame(gfx::Size(kWidth, kHeight));
ASSERT_TRUE(frame.get());
+ EXPECT_TRUE(frame->IsMappable());
// Test basic properties.
EXPECT_EQ(0, frame->timestamp().InMicroseconds());
- EXPECT_FALSE(frame->end_of_stream());
+ EXPECT_FALSE(
+ frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM));
// Test |frame| properties.
EXPECT_EQ(VideoFrame::YV12, frame->format());
@@ -205,7 +207,7 @@ TEST(VideoFrame, WrapVideoFrame) {
const int kWidth = 4;
const int kHeight = 4;
scoped_refptr<media::VideoFrame> frame;
- bool no_longer_needed_triggered = false;
+ bool done_callback_was_run = false;
{
scoped_refptr<media::VideoFrame> wrapped_frame =
VideoFrame::CreateBlackFrame(gfx::Size(kWidth, kHeight));
@@ -214,9 +216,10 @@ TEST(VideoFrame, WrapVideoFrame) {
gfx::Rect visible_rect(1, 1, 1, 1);
gfx::Size natural_size = visible_rect.size();
frame = media::VideoFrame::WrapVideoFrame(
- wrapped_frame, visible_rect, natural_size,
+ wrapped_frame, visible_rect, natural_size);
+ frame->AddDestructionObserver(
base::Bind(&FrameNoLongerNeededCallback, wrapped_frame,
- &no_longer_needed_triggered));
+ &done_callback_was_run));
EXPECT_EQ(wrapped_frame->coded_size(), frame->coded_size());
EXPECT_EQ(wrapped_frame->data(media::VideoFrame::kYPlane),
frame->data(media::VideoFrame::kYPlane));
@@ -226,9 +229,9 @@ TEST(VideoFrame, WrapVideoFrame) {
EXPECT_EQ(natural_size, frame->natural_size());
}
- EXPECT_FALSE(no_longer_needed_triggered);
+ EXPECT_FALSE(done_callback_was_run);
frame = NULL;
- EXPECT_TRUE(no_longer_needed_triggered);
+ EXPECT_TRUE(done_callback_was_run);
}
// Ensure each frame is properly sized and allocated. Will trigger OOB reads
@@ -252,15 +255,16 @@ TEST(VideoFrame, TextureNoLongerNeededCallbackIsCalled) {
{
scoped_refptr<VideoFrame> frame = VideoFrame::WrapNativeTexture(
- gpu::MailboxHolder(gpu::Mailbox(), 5, 0 /* sync_point */),
+ VideoFrame::ARGB,
+ gpu::MailboxHolder(gpu::Mailbox::Generate(), 5, 0 /* sync_point */),
base::Bind(&TextureCallback, &called_sync_point),
gfx::Size(10, 10), // coded_size
gfx::Rect(10, 10), // visible_rect
gfx::Size(10, 10), // natural_size
- base::TimeDelta(), // timestamp
- false, // allow_overlay
- true); // has_alpha
- EXPECT_EQ(VideoFrame::TEXTURE_RGBA, frame->texture_format());
+ base::TimeDelta()); // timestamp
+ EXPECT_EQ(VideoFrame::ARGB, frame->format());
+ EXPECT_EQ(VideoFrame::STORAGE_OPAQUE, frame->storage_type());
+ EXPECT_TRUE(frame->HasTextures());
}
// Nobody set a sync point to |frame|, so |frame| set |called_sync_point| to 0
// as default value.
@@ -306,13 +310,13 @@ TEST(VideoFrame,
gfx::Size(10, 10), // coded_size
gfx::Rect(10, 10), // visible_rect
gfx::Size(10, 10), // natural_size
- base::TimeDelta(), // timestamp
- false); // allow_overlay
+ base::TimeDelta()); // timestamp
- EXPECT_EQ(VideoFrame::TEXTURE_YUV_420, frame->texture_format());
- EXPECT_EQ(3u, VideoFrame::NumTextures(frame->texture_format()));
- for (size_t i = 0; i < VideoFrame::NumTextures(frame->texture_format());
- ++i) {
+ EXPECT_EQ(VideoFrame::STORAGE_OPAQUE, frame->storage_type());
+ EXPECT_EQ(VideoFrame::I420, frame->format());
+ EXPECT_EQ(3u, VideoFrame::NumPlanes(frame->format()));
+ EXPECT_TRUE(frame->HasTextures());
+ for (size_t i = 0; i < VideoFrame::NumPlanes(frame->format()); ++i) {
const gpu::MailboxHolder& mailbox_holder = frame->mailbox_holder(i);
EXPECT_EQ(mailbox[i].name[0], mailbox_holder.mailbox.name[0]);
EXPECT_EQ(target, mailbox_holder.texture_target);
@@ -379,6 +383,14 @@ TEST(VideoFrameMetadata, SetAndThenGetAllKeysForAllTypes) {
metadata.Clear();
EXPECT_FALSE(metadata.HasKey(key));
+ metadata.SetTimeDelta(key, base::TimeDelta::FromInternalValue(42 + i));
+ EXPECT_TRUE(metadata.HasKey(key));
+ base::TimeDelta delta_value;
+ EXPECT_TRUE(metadata.GetTimeDelta(key, &delta_value));
+ EXPECT_EQ(base::TimeDelta::FromInternalValue(42 + i), delta_value);
+ metadata.Clear();
+
+ EXPECT_FALSE(metadata.HasKey(key));
metadata.SetTimeTicks(key, base::TimeTicks::FromInternalValue(~(0LL) + i));
EXPECT_TRUE(metadata.HasKey(key));
base::TimeTicks ticks_value;
diff --git a/chromium/media/base/video_util.cc b/chromium/media/base/video_util.cc
index e04a5a3c543..63635d79bc0 100644
--- a/chromium/media/base/video_util.cc
+++ b/chromium/media/base/video_util.cc
@@ -7,6 +7,8 @@
#include <cmath>
#include "base/logging.h"
+#include "base/numerics/safe_conversions.h"
+#include "base/numerics/safe_math.h"
#include "media/base/video_frame.h"
#include "media/base/yuv_convert.h"
@@ -158,7 +160,6 @@ void LetterboxYUV(VideoFrame* frame, const gfx::Rect& view_area) {
DCHECK(!(view_area.width() & 1));
DCHECK(!(view_area.height() & 1));
DCHECK(frame->format() == VideoFrame::YV12 ||
- frame->format() == VideoFrame::YV12J ||
frame->format() == VideoFrame::I420);
LetterboxPlane(frame, VideoFrame::kYPlane, view_area, 0x00);
gfx::Rect half_view_area(view_area.x() / 2,
@@ -270,6 +271,16 @@ void RotatePlaneByPixels(
}
}
+// Helper function to return |a| divided by |b|, rounded to the nearest integer.
+static int RoundedDivision(int64 a, int b) {
+ DCHECK_GE(a, 0);
+ DCHECK_GT(b, 0);
+ base::CheckedNumeric<uint64> result(a);
+ result += b / 2;
+ result /= b;
+ return base::checked_cast<int>(result.ValueOrDie());
+}
+
// Common logic for the letterboxing and scale-within/scale-encompassing
// functions. Scales |size| to either fit within or encompass |target|,
// depending on whether |fit_within_target| is true.
@@ -283,8 +294,8 @@ static gfx::Size ScaleSizeToTarget(const gfx::Size& size,
const int64 y = static_cast<int64>(size.height()) * target.width();
const bool use_target_width = fit_within_target ? (y < x) : (x < y);
return use_target_width ?
- gfx::Size(target.width(), static_cast<int>(y / size.width())) :
- gfx::Size(static_cast<int>(x / size.height()), target.height());
+ gfx::Size(target.width(), RoundedDivision(y, size.width())) :
+ gfx::Size(RoundedDivision(x, size.height()), target.height());
}
gfx::Rect ComputeLetterboxRegion(const gfx::Rect& bounds,
@@ -317,8 +328,8 @@ gfx::Size PadToMatchAspectRatio(const gfx::Size& size,
const int64 x = static_cast<int64>(size.width()) * target.height();
const int64 y = static_cast<int64>(size.height()) * target.width();
if (x < y)
- return gfx::Size(static_cast<int>(y / target.height()), size.height());
- return gfx::Size(size.width(), static_cast<int>(x / target.width()));
+ return gfx::Size(RoundedDivision(y, target.height()), size.height());
+ return gfx::Size(size.width(), RoundedDivision(x, target.width()));
}
void CopyRGBToVideoFrame(const uint8* source,
diff --git a/chromium/media/base/video_util_unittest.cc b/chromium/media/base/video_util_unittest.cc
index 79c53159ab8..ff01110f00a 100644
--- a/chromium/media/base/video_util_unittest.cc
+++ b/chromium/media/base/video_util_unittest.cc
@@ -331,13 +331,13 @@ INSTANTIATE_TEST_CASE_P(, VideoUtilRotationTest,
// Tests the ComputeLetterboxRegion function. Also, because of shared code
// internally, this also tests ScaleSizeToFitWithinTarget().
TEST_F(VideoUtilTest, ComputeLetterboxRegion) {
- EXPECT_EQ(gfx::Rect(167, 0, 666, 500),
+ EXPECT_EQ(gfx::Rect(166, 0, 667, 500),
ComputeLetterboxRegion(gfx::Rect(0, 0, 1000, 500),
gfx::Size(640, 480)));
EXPECT_EQ(gfx::Rect(0, 312, 500, 375),
ComputeLetterboxRegion(gfx::Rect(0, 0, 500, 1000),
gfx::Size(640, 480)));
- EXPECT_EQ(gfx::Rect(56, 0, 888, 500),
+ EXPECT_EQ(gfx::Rect(55, 0, 889, 500),
ComputeLetterboxRegion(gfx::Rect(0, 0, 1000, 500),
gfx::Size(1920, 1080)));
EXPECT_EQ(gfx::Rect(0, 12, 100, 75),
@@ -357,15 +357,15 @@ TEST_F(VideoUtilTest, ScaleSizeToEncompassTarget) {
EXPECT_EQ(gfx::Size(1333, 1000),
ScaleSizeToEncompassTarget(gfx::Size(640, 480),
gfx::Size(500, 1000)));
- EXPECT_EQ(gfx::Size(1000, 562),
+ EXPECT_EQ(gfx::Size(1000, 563),
ScaleSizeToEncompassTarget(gfx::Size(1920, 1080),
gfx::Size(1000, 500)));
EXPECT_EQ(gfx::Size(133, 100),
ScaleSizeToEncompassTarget(gfx::Size(400, 300),
gfx::Size(100, 100)));
- EXPECT_EQ(gfx::Size(2666666666, 2000000000),
+ EXPECT_EQ(gfx::Size(266666667, 200000000),
ScaleSizeToEncompassTarget(gfx::Size(40000, 30000),
- gfx::Size(2000000000, 2000000000)));
+ gfx::Size(200000000, 200000000)));
EXPECT_TRUE(ScaleSizeToEncompassTarget(
gfx::Size(0, 0), gfx::Size(2000000000, 2000000000)).IsEmpty());
}
diff --git a/chromium/media/base/win/BUILD.gn b/chromium/media/base/win/BUILD.gn
new file mode 100644
index 00000000000..4888d6f41ce
--- /dev/null
+++ b/chromium/media/base/win/BUILD.gn
@@ -0,0 +1,18 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+assert(is_win)
+
+source_set("win") {
+ set_sources_assignment_filter([])
+ sources = [
+ "mf_initializer.cc",
+ "mf_initializer.h",
+ ]
+ set_sources_assignment_filter(sources_assignment_filter)
+ configs += [
+ "//media:media_config",
+ "//media:media_implementation",
+ ]
+}
diff --git a/chromium/media/base/win/mf_initializer.cc b/chromium/media/base/win/mf_initializer.cc
new file mode 100644
index 00000000000..ff62a451e92
--- /dev/null
+++ b/chromium/media/base/win/mf_initializer.cc
@@ -0,0 +1,41 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/win/mf_initializer.h"
+
+#include <mfapi.h>
+
+#include "base/lazy_instance.h"
+#include "base/macros.h"
+
+namespace media {
+
+namespace {
+
+// LazyInstance to initialize the Media Foundation Library.
+class MFInitializer {
+ public:
+ MFInitializer()
+ : mf_started_(MFStartup(MF_VERSION, MFSTARTUP_LITE) == S_OK) {}
+
+ ~MFInitializer() {
+ if (mf_started_)
+ MFShutdown();
+ }
+
+ private:
+ const bool mf_started_;
+
+ DISALLOW_COPY_AND_ASSIGN(MFInitializer);
+};
+
+base::LazyInstance<MFInitializer> g_mf_initializer = LAZY_INSTANCE_INITIALIZER;
+
+} // namespace
+
+void InitializeMediaFoundation() {
+ g_mf_initializer.Get();
+}
+
+} // namespace media
diff --git a/chromium/media/base/win/mf_initializer.h b/chromium/media/base/win/mf_initializer.h
new file mode 100644
index 00000000000..4c10d6adce8
--- /dev/null
+++ b/chromium/media/base/win/mf_initializer.h
@@ -0,0 +1,18 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BASE_WIN_MF_INITIALIZER_H_
+#define MEDIA_BASE_WIN_MF_INITIALIZER_H_
+
+#include "media/base/media_export.h"
+
+namespace media {
+
+// Makes sure MFStartup() is called exactly once, and that this call is paired
+// by a call to MFShutdown().
+MEDIA_EXPORT void InitializeMediaFoundation();
+
+} // namespace media
+
+#endif // MEDIA_BASE_WIN_MF_INITIALIZER_H_