summaryrefslogtreecommitdiff
path: root/Source/WebCore/Modules/mediastream/MediaStreamTrack.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'Source/WebCore/Modules/mediastream/MediaStreamTrack.cpp')
-rw-r--r--Source/WebCore/Modules/mediastream/MediaStreamTrack.cpp387
1 files changed, 220 insertions, 167 deletions
diff --git a/Source/WebCore/Modules/mediastream/MediaStreamTrack.cpp b/Source/WebCore/Modules/mediastream/MediaStreamTrack.cpp
index 476dec103..96cc6298b 100644
--- a/Source/WebCore/Modules/mediastream/MediaStreamTrack.cpp
+++ b/Source/WebCore/Modules/mediastream/MediaStreamTrack.cpp
@@ -1,7 +1,7 @@
/*
* Copyright (C) 2011 Google Inc. All rights reserved.
- * Copyright (C) 2011 Ericsson AB. All rights reserved.
- * Copyright (C) 2013 Apple Inc. All rights reserved.
+ * Copyright (C) 2011, 2015 Ericsson AB. All rights reserved.
+ * Copyright (C) 2013-2016 Apple Inc. All rights reserved.
* Copyright (C) 2013 Nokia Corporation and/or its subsidiary(-ies).
*
* Redistribution and use in source and binary forms, with or without
@@ -30,292 +30,345 @@
#if ENABLE(MEDIA_STREAM)
-#include "AllAudioCapabilities.h"
-#include "AllVideoCapabilities.h"
-#include "AudioStreamTrack.h"
-#include "Dictionary.h"
#include "Event.h"
-#include "ExceptionCode.h"
-#include "ExceptionCodePlaceholder.h"
+#include "EventNames.h"
+#include "JSOverconstrainedError.h"
#include "MediaConstraintsImpl.h"
-#include "MediaSourceStates.h"
#include "MediaStream.h"
-#include "MediaStreamCenter.h"
#include "MediaStreamPrivate.h"
-#include "MediaStreamTrackSourcesCallback.h"
-#include "MediaStreamTrackSourcesRequest.h"
-#include "MediaTrackConstraints.h"
#include "NotImplemented.h"
-#include "VideoStreamTrack.h"
-#include <wtf/Functional.h>
+#include "OverconstrainedError.h"
+#include "ScriptExecutionContext.h"
#include <wtf/NeverDestroyed.h>
namespace WebCore {
-MediaStreamTrack::MediaStreamTrack(ScriptExecutionContext& context, MediaStreamTrackPrivate& privateTrack, const Dictionary* constraints)
- : RefCounted()
- , ActiveDOMObject(&context)
- , m_privateTrack(privateTrack)
- , m_eventDispatchScheduled(false)
- , m_stoppingTrack(false)
+Ref<MediaStreamTrack> MediaStreamTrack::create(ScriptExecutionContext& context, Ref<MediaStreamTrackPrivate>&& privateTrack)
{
- suspendIfNeeded();
-
- m_privateTrack->setClient(this);
-
- if (constraints)
- applyConstraints(*constraints);
+ return adoptRef(*new MediaStreamTrack(context, WTFMove(privateTrack)));
}
-MediaStreamTrack::MediaStreamTrack(MediaStreamTrack& other)
- : RefCounted()
- , ActiveDOMObject(other.scriptExecutionContext())
- , m_privateTrack(*other.privateTrack().clone())
- , m_eventDispatchScheduled(false)
- , m_stoppingTrack(false)
+MediaStreamTrack::MediaStreamTrack(ScriptExecutionContext& context, Ref<MediaStreamTrackPrivate>&& privateTrack)
+ : ActiveDOMObject(&context)
+ , m_private(WTFMove(privateTrack))
+ , m_weakPtrFactory(this)
{
suspendIfNeeded();
- m_privateTrack->setClient(this);
+ m_private->addObserver(*this);
}
MediaStreamTrack::~MediaStreamTrack()
{
- m_privateTrack->setClient(nullptr);
+ m_private->removeObserver(*this);
}
-void MediaStreamTrack::setSource(PassRefPtr<MediaStreamSource> newSource)
+const AtomicString& MediaStreamTrack::kind() const
{
- m_privateTrack->setSource(newSource);
+ static NeverDestroyed<AtomicString> audioKind("audio", AtomicString::ConstructFromLiteral);
+ static NeverDestroyed<AtomicString> videoKind("video", AtomicString::ConstructFromLiteral);
+
+ if (m_private->type() == RealtimeMediaSource::Audio)
+ return audioKind;
+ return videoKind;
}
const String& MediaStreamTrack::id() const
{
- return m_privateTrack->id();
+ return m_private->id();
}
const String& MediaStreamTrack::label() const
{
- return m_privateTrack->label();
+ return m_private->label();
}
bool MediaStreamTrack::enabled() const
{
- return m_privateTrack->enabled();
+ return m_private->enabled();
}
void MediaStreamTrack::setEnabled(bool enabled)
{
- m_privateTrack->setEnabled(enabled);
-}
-
-bool MediaStreamTrack::stopped() const
-{
- return m_privateTrack->stopped();
+ m_private->setEnabled(enabled);
}
bool MediaStreamTrack::muted() const
{
- return m_privateTrack->muted();
+ return m_private->muted();
}
bool MediaStreamTrack::readonly() const
{
- return m_privateTrack->readonly();
+ return m_private->readonly();
}
bool MediaStreamTrack::remote() const
{
- return m_privateTrack->remote();
+ return m_private->remote();
}
-const AtomicString& MediaStreamTrack::readyState() const
+auto MediaStreamTrack::readyState() const -> State
{
- static NeverDestroyed<AtomicString> ended("ended", AtomicString::ConstructFromLiteral);
- static NeverDestroyed<AtomicString> live("live", AtomicString::ConstructFromLiteral);
- static NeverDestroyed<AtomicString> newState("new", AtomicString::ConstructFromLiteral);
-
- switch (m_privateTrack->readyState()) {
- case MediaStreamSource::Live:
- return live;
- case MediaStreamSource::New:
- return newState;
- case MediaStreamSource::Ended:
- return ended;
- }
-
- ASSERT_NOT_REACHED();
- return emptyAtom;
+ return ended() ? State::Ended : State::Live;
}
-void MediaStreamTrack::getSources(ScriptExecutionContext* context, PassRefPtr<MediaStreamTrackSourcesCallback> callback, ExceptionCode& ec)
+bool MediaStreamTrack::ended() const
{
- RefPtr<MediaStreamTrackSourcesRequest> request = MediaStreamTrackSourcesRequest::create(context, callback);
- if (!MediaStreamCenter::shared().getMediaStreamTrackSources(request.release()))
- ec = NOT_SUPPORTED_ERR;
+ return m_ended || m_private->ended();
}
-RefPtr<MediaTrackConstraints> MediaStreamTrack::constraints() const
+Ref<MediaStreamTrack> MediaStreamTrack::clone()
{
- // FIXME: https://bugs.webkit.org/show_bug.cgi?id=122428
- notImplemented();
- return 0;
+ return MediaStreamTrack::create(*scriptExecutionContext(), m_private->clone());
}
-RefPtr<MediaSourceStates> MediaStreamTrack::states() const
+void MediaStreamTrack::stopProducingData()
{
- return MediaSourceStates::create(m_privateTrack->states());
-}
+ // NOTE: this method is called when the "stop" method is called from JS, using
+ // the "ImplementedAs" IDL attribute. This is done because ActiveDOMObject requires
+ // a "stop" method.
-RefPtr<MediaStreamCapabilities> MediaStreamTrack::capabilities() const
-{
- // The source may be shared by multiple tracks, so its states is not necessarily
- // in sync with the track state. A track that is new or has ended always has a source
- // type of "none".
- RefPtr<MediaStreamSourceCapabilities> sourceCapabilities = m_privateTrack->capabilities();
- MediaStreamSource::ReadyState readyState = m_privateTrack->readyState();
- if (readyState == MediaStreamSource::New || readyState == MediaStreamSource::Ended)
- sourceCapabilities->setSourceType(MediaStreamSourceStates::None);
-
- return MediaStreamCapabilities::create(sourceCapabilities.release());
+ // http://w3c.github.io/mediacapture-main/#widl-MediaStreamTrack-stop-void
+ // 4.3.3.2 Methods
+ // When a MediaStreamTrack object's stop() method is invoked, the User Agent must run following steps:
+ // 1. Let track be the current MediaStreamTrack object.
+ // 2. If track is sourced by a non-local source, then abort these steps.
+ if (remote() || ended())
+ return;
+
+ // 3. Notify track's source that track is ended so that the source may be stopped, unless other
+ // MediaStreamTrack objects depend on it.
+ // 4. Set track's readyState attribute to ended.
+
+ // Set m_ended to true before telling the private to stop so we do not fire an 'ended' event.
+ m_ended = true;
+
+ m_private->endTrack();
+}
+
+MediaStreamTrack::TrackSettings MediaStreamTrack::getSettings() const
+{
+ auto& settings = m_private->settings();
+ TrackSettings result;
+ if (settings.supportsWidth())
+ result.width = settings.width();
+ if (settings.supportsHeight())
+ result.height = settings.height();
+ if (settings.supportsAspectRatio() && settings.aspectRatio()) // FIXME: Why the check for zero here?
+ result.aspectRatio = settings.aspectRatio();
+ if (settings.supportsFrameRate())
+ result.frameRate = settings.frameRate();
+ if (settings.supportsFacingMode())
+ result.facingMode = RealtimeMediaSourceSettings::facingMode(settings.facingMode());
+ if (settings.supportsVolume())
+ result.volume = settings.volume();
+ if (settings.supportsSampleRate())
+ result.sampleRate = settings.sampleRate();
+ if (settings.supportsSampleSize())
+ result.sampleSize = settings.sampleSize();
+ if (settings.supportsEchoCancellation())
+ result.echoCancellation = settings.echoCancellation();
+ if (settings.supportsDeviceId())
+ result.deviceId = settings.deviceId();
+ if (settings.supportsGroupId())
+ result.groupId = settings.groupId();
+ return result;
+}
+
+static DoubleRange capabilityDoubleRange(const CapabilityValueOrRange& value)
+{
+ DoubleRange range;
+ switch (value.type()) {
+ case CapabilityValueOrRange::Double:
+ range.min = value.value().asDouble;
+ range.max = range.min;
+ break;
+ case CapabilityValueOrRange::DoubleRange:
+ range.min = value.rangeMin().asDouble;
+ range.max = value.rangeMax().asDouble;
+ break;
+ case CapabilityValueOrRange::Undefined:
+ case CapabilityValueOrRange::ULong:
+ case CapabilityValueOrRange::ULongRange:
+ ASSERT_NOT_REACHED();
+ }
+ return range;
+}
+
+static LongRange capabilityIntRange(const CapabilityValueOrRange& value)
+{
+ LongRange range;
+ switch (value.type()) {
+ case CapabilityValueOrRange::ULong:
+ range.min = value.value().asInt;
+ range.max = range.min;
+ break;
+ case CapabilityValueOrRange::ULongRange:
+ range.min = value.rangeMin().asInt;
+ range.max = value.rangeMax().asInt;
+ break;
+ case CapabilityValueOrRange::Undefined:
+ case CapabilityValueOrRange::Double:
+ case CapabilityValueOrRange::DoubleRange:
+ ASSERT_NOT_REACHED();
+ }
+ return range;
}
-void MediaStreamTrack::applyConstraints(const Dictionary& constraints)
+static Vector<String> capabilityStringVector(const Vector<RealtimeMediaSourceSettings::VideoFacingMode>& modes)
{
- m_constraints->initialize(constraints);
- m_privateTrack->applyConstraints(m_constraints);
+ Vector<String> result;
+ result.reserveCapacity(modes.size());
+ for (auto& mode : modes)
+ result.uncheckedAppend(RealtimeMediaSourceSettings::facingMode(mode));
+ return result;
}
-void MediaStreamTrack::applyConstraints(PassRefPtr<MediaConstraints>)
+static Vector<bool> capabilityBooleanVector(RealtimeMediaSourceCapabilities::EchoCancellation cancellation)
{
- // FIXME: apply the new constraints to the track
- // https://bugs.webkit.org/show_bug.cgi?id=122428
+ Vector<bool> result;
+ result.reserveCapacity(2);
+ result.uncheckedAppend(true);
+ result.uncheckedAppend(cancellation == RealtimeMediaSourceCapabilities::EchoCancellation::ReadWrite);
+ return result;
}
-RefPtr<MediaStreamTrack> MediaStreamTrack::clone()
+MediaStreamTrack::TrackCapabilities MediaStreamTrack::getCapabilities() const
{
- if (m_privateTrack->type() == MediaStreamSource::Audio)
- return AudioStreamTrack::create(*this);
-
- return VideoStreamTrack::create(*this);
+ auto capabilities = m_private->capabilities();
+ TrackCapabilities result;
+ if (capabilities->supportsWidth())
+ result.width = capabilityIntRange(capabilities->width());
+ if (capabilities->supportsHeight())
+ result.height = capabilityIntRange(capabilities->height());
+ if (capabilities->supportsAspectRatio())
+ result.aspectRatio = capabilityDoubleRange(capabilities->aspectRatio());
+ if (capabilities->supportsFrameRate())
+ result.frameRate = capabilityDoubleRange(capabilities->frameRate());
+ if (capabilities->supportsFacingMode())
+ result.facingMode = capabilityStringVector(capabilities->facingMode());
+ if (capabilities->supportsVolume())
+ result.volume = capabilityDoubleRange(capabilities->volume());
+ if (capabilities->supportsSampleRate())
+ result.sampleRate = capabilityIntRange(capabilities->sampleRate());
+ if (capabilities->supportsSampleSize())
+ result.sampleSize = capabilityIntRange(capabilities->sampleSize());
+ if (capabilities->supportsEchoCancellation())
+ result.echoCancellation = capabilityBooleanVector(capabilities->echoCancellation());
+ if (capabilities->supportsDeviceId())
+ result.deviceId = capabilities->deviceId();
+ if (capabilities->supportsGroupId())
+ result.groupId = capabilities->groupId();
+ return result;
}
-void MediaStreamTrack::stopProducingData()
+static Ref<MediaConstraintsImpl> createMediaConstraintsImpl(const std::optional<MediaTrackConstraints>& constraints)
{
- // NOTE: this method is called when the "stop" method is called from JS, using
- // the "ImplementedAs" IDL attribute. This is done because ActiveDOMObject requires
- // a "stop" method.
-
- // The stop method should "Permanently stop the generation of data for track's source", but it
- // should not post an 'ended' event.
- m_stoppingTrack = true;
- m_privateTrack->stop(MediaStreamTrackPrivate::StopTrackAndStopSource);
- m_stoppingTrack = false;
+ if (!constraints)
+ return MediaConstraintsImpl::create({ }, { }, true);
+ return createMediaConstraintsImpl(constraints.value());
}
-bool MediaStreamTrack::ended() const
+void MediaStreamTrack::applyConstraints(const std::optional<MediaTrackConstraints>& constraints, DOMPromise<void>&& promise)
{
- return m_privateTrack->ended();
+ m_promise = WTFMove(promise);
+
+ auto weakThis = createWeakPtr();
+ auto failureHandler = [weakThis] (const String& failedConstraint, const String& message) {
+ if (!weakThis || !weakThis->m_promise)
+ return;
+ weakThis->m_promise->rejectType<IDLInterface<OverconstrainedError>>(OverconstrainedError::create(failedConstraint, message).get());
+ };
+ auto successHandler = [weakThis, constraints] () {
+ if (!weakThis || !weakThis->m_promise)
+ return;
+ weakThis->m_promise->resolve();
+ weakThis->m_constraints = constraints.value_or(MediaTrackConstraints { });
+ };
+ m_private->applyConstraints(createMediaConstraintsImpl(constraints), successHandler, failureHandler);
}
-void MediaStreamTrack::addObserver(MediaStreamTrack::Observer* observer)
+void MediaStreamTrack::addObserver(Observer& observer)
{
- m_observers.append(observer);
+ m_observers.append(&observer);
}
-void MediaStreamTrack::removeObserver(MediaStreamTrack::Observer* observer)
+void MediaStreamTrack::removeObserver(Observer& observer)
{
- size_t pos = m_observers.find(observer);
- if (pos != notFound)
- m_observers.remove(pos);
+ m_observers.removeFirst(&observer);
}
-void MediaStreamTrack::trackReadyStateChanged()
+void MediaStreamTrack::trackEnded(MediaStreamTrackPrivate&)
{
- if (stopped())
+ // http://w3c.github.io/mediacapture-main/#life-cycle
+ // When a MediaStreamTrack track ends for any reason other than the stop() method being invoked, the User Agent must queue a task that runs the following steps:
+ // 1. If the track's readyState attribute has the value ended already, then abort these steps.
+ if (m_ended)
return;
- MediaStreamSource::ReadyState readyState = m_privateTrack->readyState();
- if (readyState == MediaStreamSource::Live)
- scheduleEventDispatch(Event::create(eventNames().startedEvent, false, false));
- else if (readyState == MediaStreamSource::Ended && !m_stoppingTrack)
- scheduleEventDispatch(Event::create(eventNames().endedEvent, false, false));
+ // 2. Set track's readyState attribute to ended.
+ m_ended = true;
+
+ if (scriptExecutionContext()->activeDOMObjectsAreSuspended() || scriptExecutionContext()->activeDOMObjectsAreStopped())
+ return;
+
+ // 3. Notify track's source that track is ended so that the source may be stopped, unless other MediaStreamTrack objects depend on it.
+ // 4. Fire a simple event named ended at the object.
+ dispatchEvent(Event::create(eventNames().endedEvent, false, false));
+
+ for (auto& observer : m_observers)
+ observer->trackDidEnd();
configureTrackRendering();
}
-void MediaStreamTrack::trackMutedChanged()
+void MediaStreamTrack::trackMutedChanged(MediaStreamTrackPrivate&)
{
- if (stopped())
+ if (scriptExecutionContext()->activeDOMObjectsAreSuspended() || scriptExecutionContext()->activeDOMObjectsAreStopped())
return;
- if (muted())
- scheduleEventDispatch(Event::create(eventNames().muteEvent, false, false));
- else
- scheduleEventDispatch(Event::create(eventNames().unmuteEvent, false, false));
+ AtomicString eventType = muted() ? eventNames().muteEvent : eventNames().unmuteEvent;
+ dispatchEvent(Event::create(eventType, false, false));
configureTrackRendering();
}
-void MediaStreamTrack::trackEnabledChanged()
+void MediaStreamTrack::trackSettingsChanged(MediaStreamTrackPrivate&)
{
- if (stopped())
- return;
+ configureTrackRendering();
+}
- setEnabled(m_privateTrack->enabled());
+void MediaStreamTrack::trackEnabledChanged(MediaStreamTrackPrivate&)
+{
configureTrackRendering();
}
void MediaStreamTrack::configureTrackRendering()
{
- if (stopped())
- return;
-
// 4.3.1
// ... media from the source only flows when a MediaStreamTrack object is both unmuted and enabled
}
-void MediaStreamTrack::trackDidEnd()
+void MediaStreamTrack::stop()
{
- m_privateTrack->setReadyState(MediaStreamSource::Ended);
-
- for (Vector<Observer*>::iterator i = m_observers.begin(); i != m_observers.end(); ++i)
- (*i)->trackDidEnd();
+ stopProducingData();
}
-void MediaStreamTrack::stop()
+const char* MediaStreamTrack::activeDOMObjectName() const
{
- m_privateTrack->stop(MediaStreamTrackPrivate::StopTrackOnly);
+ return "MediaStreamTrack";
}
-void MediaStreamTrack::scheduleEventDispatch(PassRefPtr<Event> event)
+bool MediaStreamTrack::canSuspendForDocumentSuspension() const
{
- {
- MutexLocker locker(m_mutex);
- m_scheduledEvents.append(event);
- if (m_eventDispatchScheduled)
- return;
- m_eventDispatchScheduled = true;
- }
-
- callOnMainThread(bind(&MediaStreamTrack::dispatchQueuedEvents, this));
+ // FIXME: We should try and do better here.
+ return false;
}
-void MediaStreamTrack::dispatchQueuedEvents()
+AudioSourceProvider* MediaStreamTrack::audioSourceProvider()
{
- Vector<RefPtr<Event>> events;
- {
- MutexLocker locker(m_mutex);
- m_eventDispatchScheduled = false;
- events.swap(m_scheduledEvents);
- }
- if (!scriptExecutionContext())
- return;
-
- for (auto it = events.begin(); it != events.end(); ++it)
- dispatchEvent((*it).release());
-
- events.clear();
+ return m_private->audioSourceProvider();
}
} // namespace WebCore