summaryrefslogtreecommitdiff
path: root/Source/WebCore/platform/audio/gstreamer/AudioDestinationGStreamer.cpp
diff options
context:
space:
mode:
authorLorry Tar Creator <lorry-tar-importer@lorry>2017-06-27 06:07:23 +0000
committerLorry Tar Creator <lorry-tar-importer@lorry>2017-06-27 06:07:23 +0000
commit1bf1084f2b10c3b47fd1a588d85d21ed0eb41d0c (patch)
tree46dcd36c86e7fbc6e5df36deb463b33e9967a6f7 /Source/WebCore/platform/audio/gstreamer/AudioDestinationGStreamer.cpp
parent32761a6cee1d0dee366b885b7b9c777e67885688 (diff)
downloadWebKitGtk-tarball-master.tar.gz
Diffstat (limited to 'Source/WebCore/platform/audio/gstreamer/AudioDestinationGStreamer.cpp')
-rw-r--r--Source/WebCore/platform/audio/gstreamer/AudioDestinationGStreamer.cpp88
1 files changed, 40 insertions, 48 deletions
diff --git a/Source/WebCore/platform/audio/gstreamer/AudioDestinationGStreamer.cpp b/Source/WebCore/platform/audio/gstreamer/AudioDestinationGStreamer.cpp
index 25ddcb9fa..758389ced 100644
--- a/Source/WebCore/platform/audio/gstreamer/AudioDestinationGStreamer.cpp
+++ b/Source/WebCore/platform/audio/gstreamer/AudioDestinationGStreamer.cpp
@@ -1,5 +1,6 @@
/*
* Copyright (C) 2011, 2012 Igalia S.L
+ * Copyright (C) 2014 Sebastian Dröge <sebastian@centricular.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
@@ -27,9 +28,9 @@
#include "GRefPtrGStreamer.h"
#include "Logging.h"
#include "WebKitWebAudioSourceGStreamer.h"
+#include <gst/audio/gstaudiobasesink.h>
#include <gst/gst.h>
-#include <gst/pbutils/pbutils.h>
-#include <wtf/gobject/GUniquePtr.h>
+#include <wtf/glib/GUniquePtr.h>
namespace WebCore {
@@ -42,6 +43,12 @@ gboolean messageCallback(GstBus*, GstMessage* message, AudioDestinationGStreamer
return destination->handleMessage(message);
}
+static void autoAudioSinkChildAddedCallback(GstChildProxy*, GObject* object, gchar*, gpointer)
+{
+ if (GST_IS_AUDIO_BASE_SINK(object))
+ g_object_set(GST_AUDIO_BASE_SINK(object), "buffer-time", static_cast<gint64>(100000), nullptr);
+}
+
std::unique_ptr<AudioDestination> AudioDestination::create(AudioIOCallback& callback, const String&, unsigned numberOfInputChannels, unsigned numberOfOutputChannels, float sampleRate)
{
// FIXME: make use of inputDeviceId as appropriate.
@@ -85,45 +92,17 @@ AudioDestinationGStreamer::AudioDestinationGStreamer(AudioIOCallback& callback,
"rate", sampleRate,
"bus", m_renderBus.get(),
"provider", &m_callback,
- "frames", framesToPull, NULL));
-
- GstElement* wavParser = gst_element_factory_make("wavparse", 0);
-
- m_wavParserAvailable = wavParser;
- ASSERT_WITH_MESSAGE(m_wavParserAvailable, "Failed to create GStreamer wavparse element");
- if (!m_wavParserAvailable)
- return;
-
- gst_bin_add_many(GST_BIN(m_pipeline), webkitAudioSrc, wavParser, NULL);
- gst_element_link_pads_full(webkitAudioSrc, "src", wavParser, "sink", GST_PAD_LINK_CHECK_NOTHING);
-
- GRefPtr<GstPad> srcPad = adoptGRef(gst_element_get_static_pad(wavParser, "src"));
- finishBuildingPipelineAfterWavParserPadReady(srcPad.get());
-}
-
-AudioDestinationGStreamer::~AudioDestinationGStreamer()
-{
- GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline)));
- ASSERT(bus);
- g_signal_handlers_disconnect_by_func(bus.get(), reinterpret_cast<gpointer>(messageCallback), this);
- gst_bus_remove_signal_watch(bus.get());
-
- gst_element_set_state(m_pipeline, GST_STATE_NULL);
- gst_object_unref(m_pipeline);
-}
-
-void AudioDestinationGStreamer::finishBuildingPipelineAfterWavParserPadReady(GstPad* pad)
-{
- ASSERT(m_wavParserAvailable);
+ "frames", framesToPull, nullptr));
- GRefPtr<GstElement> audioSink = gst_element_factory_make("autoaudiosink", 0);
+ GRefPtr<GstElement> audioSink = gst_element_factory_make("autoaudiosink", nullptr);
m_audioSinkAvailable = audioSink;
-
if (!audioSink) {
LOG_ERROR("Failed to create GStreamer autoaudiosink element");
return;
}
+ g_signal_connect(audioSink.get(), "child-added", G_CALLBACK(autoAudioSinkChildAddedCallback), nullptr);
+
// Autoaudiosink does the real sink detection in the GST_STATE_NULL->READY transition
// so it's best to roll it to READY as soon as possible to ensure the underlying platform
// audiosink was loaded correctly.
@@ -135,17 +114,25 @@ void AudioDestinationGStreamer::finishBuildingPipelineAfterWavParserPadReady(Gst
return;
}
- GstElement* audioConvert = gst_element_factory_make("audioconvert", 0);
- gst_bin_add_many(GST_BIN(m_pipeline), audioConvert, audioSink.get(), NULL);
+ GstElement* audioConvert = gst_element_factory_make("audioconvert", nullptr);
+ GstElement* audioResample = gst_element_factory_make("audioresample", nullptr);
+ gst_bin_add_many(GST_BIN(m_pipeline), webkitAudioSrc, audioConvert, audioResample, audioSink.get(), nullptr);
- // Link wavparse's src pad to audioconvert sink pad.
- GRefPtr<GstPad> sinkPad = adoptGRef(gst_element_get_static_pad(audioConvert, "sink"));
- gst_pad_link_full(pad, sinkPad.get(), GST_PAD_LINK_CHECK_NOTHING);
+ // Link src pads from webkitAudioSrc to audioConvert ! audioResample ! autoaudiosink.
+ gst_element_link_pads_full(webkitAudioSrc, "src", audioConvert, "sink", GST_PAD_LINK_CHECK_NOTHING);
+ gst_element_link_pads_full(audioConvert, "src", audioResample, "sink", GST_PAD_LINK_CHECK_NOTHING);
+ gst_element_link_pads_full(audioResample, "src", audioSink.get(), "sink", GST_PAD_LINK_CHECK_NOTHING);
+}
- // Link audioconvert to audiosink and roll states.
- gst_element_link_pads_full(audioConvert, "src", audioSink.get(), "sink", GST_PAD_LINK_CHECK_NOTHING);
- gst_element_sync_state_with_parent(audioConvert);
- gst_element_sync_state_with_parent(audioSink.leakRef());
+AudioDestinationGStreamer::~AudioDestinationGStreamer()
+{
+ GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline)));
+ ASSERT(bus);
+ g_signal_handlers_disconnect_by_func(bus.get(), reinterpret_cast<gpointer>(messageCallback), this);
+ gst_bus_remove_signal_watch(bus.get());
+
+ gst_element_set_state(m_pipeline, GST_STATE_NULL);
+ gst_object_unref(m_pipeline);
}
gboolean AudioDestinationGStreamer::handleMessage(GstMessage* message)
@@ -172,18 +159,23 @@ gboolean AudioDestinationGStreamer::handleMessage(GstMessage* message)
void AudioDestinationGStreamer::start()
{
- ASSERT(m_wavParserAvailable);
- if (!m_wavParserAvailable)
+ ASSERT(m_audioSinkAvailable);
+ if (!m_audioSinkAvailable)
return;
- gst_element_set_state(m_pipeline, GST_STATE_PLAYING);
+ if (gst_element_set_state(m_pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
+ g_warning("Error: Failed to set pipeline to playing");
+ m_isPlaying = false;
+ return;
+ }
+
m_isPlaying = true;
}
void AudioDestinationGStreamer::stop()
{
- ASSERT(m_wavParserAvailable && m_audioSinkAvailable);
- if (!m_wavParserAvailable || !m_audioSinkAvailable)
+ ASSERT(m_audioSinkAvailable);
+ if (!m_audioSinkAvailable)
return;
gst_element_set_state(m_pipeline, GST_STATE_PAUSED);