summaryrefslogtreecommitdiff
path: root/Source/WebCore/Modules/webaudio/AudioDestinationNode.cpp
diff options
context:
space:
mode:
authorLorry Tar Creator <lorry-tar-importer@lorry>2017-06-27 06:07:23 +0000
committerLorry Tar Creator <lorry-tar-importer@lorry>2017-06-27 06:07:23 +0000
commit1bf1084f2b10c3b47fd1a588d85d21ed0eb41d0c (patch)
tree46dcd36c86e7fbc6e5df36deb463b33e9967a6f7 /Source/WebCore/Modules/webaudio/AudioDestinationNode.cpp
parent32761a6cee1d0dee366b885b7b9c777e67885688 (diff)
downloadWebKitGtk-tarball-master.tar.gz
Diffstat (limited to 'Source/WebCore/Modules/webaudio/AudioDestinationNode.cpp')
-rw-r--r--Source/WebCore/Modules/webaudio/AudioDestinationNode.cpp62
1 files changed, 45 insertions, 17 deletions
diff --git a/Source/WebCore/Modules/webaudio/AudioDestinationNode.cpp b/Source/WebCore/Modules/webaudio/AudioDestinationNode.cpp
index cbe89a9a8..511281851 100644
--- a/Source/WebCore/Modules/webaudio/AudioDestinationNode.cpp
+++ b/Source/WebCore/Modules/webaudio/AudioDestinationNode.cpp
@@ -36,9 +36,12 @@
namespace WebCore {
-AudioDestinationNode::AudioDestinationNode(AudioContext* context, float sampleRate)
+AudioDestinationNode::AudioDestinationNode(AudioContext& context, float sampleRate)
: AudioNode(context, sampleRate)
, m_currentSampleFrame(0)
+ , m_isSilent(true)
+ , m_isEffectivelyPlayingAudio(false)
+ , m_muted(false)
{
addInput(std::make_unique<AudioNodeInput>(this));
@@ -50,36 +53,30 @@ AudioDestinationNode::~AudioDestinationNode()
uninitialize();
}
-void AudioDestinationNode::render(AudioBus* sourceBus, AudioBus* destinationBus, size_t numberOfFrames)
+void AudioDestinationNode::render(AudioBus*, AudioBus* destinationBus, size_t numberOfFrames)
{
// We don't want denormals slowing down any of the audio processing
// since they can very seriously hurt performance.
// This will take care of all AudioNodes because they all process within this scope.
DenormalDisabler denormalDisabler;
- context()->setAudioThread(currentThread());
+ context().setAudioThread(currentThread());
- if (!context()->isRunnable()) {
+ if (!context().isInitialized()) {
destinationBus->zero();
+ setIsSilent(true);
return;
}
- if (context()->userGestureRequiredForAudioStart()) {
- destinationBus->zero();
- return;
- }
-
- if (context()->pageConsentRequiredForAudioStart()) {
+ ASSERT(numberOfFrames);
+ if (!numberOfFrames) {
destinationBus->zero();
+ setIsSilent(true);
return;
}
// Let the context take care of any business at the start of each render quantum.
- context()->handlePreRenderTasks();
-
- // Prepare the local audio input provider for this render quantum.
- if (sourceBus)
- m_localAudioInputProvider.set(sourceBus);
+ context().handlePreRenderTasks();
// This will cause the node(s) connected to us to process, which in turn will pull on their input(s),
// all the way backwards through the rendering graph.
@@ -93,13 +90,44 @@ void AudioDestinationNode::render(AudioBus* sourceBus, AudioBus* destinationBus,
}
// Process nodes which need a little extra help because they are not connected to anything, but still need to process.
- context()->processAutomaticPullNodes(numberOfFrames);
+ context().processAutomaticPullNodes(numberOfFrames);
// Let the context take care of any business at the end of each render quantum.
- context()->handlePostRenderTasks();
+ context().handlePostRenderTasks();
// Advance current sample-frame.
m_currentSampleFrame += numberOfFrames;
+
+ setIsSilent(destinationBus->isSilent());
+
+ // The reason we are handling mute after the call to setIsSilent() is because the muted state does
+ // not affect the audio destination node's effective playing state.
+ if (m_muted)
+ destinationBus->zero();
+}
+
+void AudioDestinationNode::isPlayingDidChange()
+{
+ updateIsEffectivelyPlayingAudio();
+}
+
+void AudioDestinationNode::setIsSilent(bool isSilent)
+{
+ if (m_isSilent == isSilent)
+ return;
+
+ m_isSilent = isSilent;
+ updateIsEffectivelyPlayingAudio();
+}
+
+void AudioDestinationNode::updateIsEffectivelyPlayingAudio()
+{
+ bool isEffectivelyPlayingAudio = isPlaying() && !m_isSilent;
+ if (m_isEffectivelyPlayingAudio == isEffectivelyPlayingAudio)
+ return;
+
+ m_isEffectivelyPlayingAudio = isEffectivelyPlayingAudio;
+ context().isPlayingAudioDidChange();
}
} // namespace WebCore