Source code

Revision control

Other Tools

/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifdef XP_WIN
# include "objbase.h"
#endif
#include "mozilla/dom/HTMLMediaElement.h"
#include "AudioDeviceInfo.h"
#include "AudioStreamTrack.h"
#include "AutoplayPolicy.h"
#include "ChannelMediaDecoder.h"
#include "CrossGraphPort.h"
#include "DOMMediaStream.h"
#include "DecoderDoctorDiagnostics.h"
#include "DecoderDoctorLogger.h"
#include "DecoderTraits.h"
#include "FrameStatistics.h"
#include "GMPCrashHelper.h"
#include "GVAutoplayPermissionRequest.h"
#ifdef MOZ_ANDROID_HLS_SUPPORT
# include "HLSDecoder.h"
#endif
#include "HTMLMediaElement.h"
#include "ImageContainer.h"
#include "Layers.h"
#include "MP4Decoder.h"
#include "MediaContainerType.h"
#include "MediaError.h"
#include "MediaManager.h"
#include "MediaMetadataManager.h"
#include "MediaResource.h"
#include "MediaShutdownManager.h"
#include "MediaSourceDecoder.h"
#include "MediaStreamError.h"
#include "MediaTrackGraphImpl.h"
#include "MediaTrackListener.h"
#include "MediaStreamWindowCapturer.h"
#include "MediaTrack.h"
#include "MediaTrackList.h"
#include "TimeRanges.h"
#include "VideoFrameContainer.h"
#include "VideoOutput.h"
#include "VideoStreamTrack.h"
#include "base/basictypes.h"
#include "jsapi.h"
#include "mozilla/ArrayUtils.h"
#include "mozilla/AsyncEventDispatcher.h"
#include "mozilla/EMEUtils.h"
#include "mozilla/EventDispatcher.h"
#include "mozilla/FloatingPoint.h"
#include "mozilla/MathAlgorithms.h"
#include "mozilla/NotNull.h"
#include "mozilla/Preferences.h"
#include "mozilla/PresShell.h"
#include "mozilla/Sprintf.h"
#include "mozilla/StaticPrefs_media.h"
#include "mozilla/SVGObserverUtils.h"
#include "mozilla/Telemetry.h"
#include "mozilla/dom/AudioTrack.h"
#include "mozilla/dom/AudioTrackList.h"
#include "mozilla/dom/BlobURLProtocolHandler.h"
#include "mozilla/dom/ContentMediaController.h"
#include "mozilla/dom/ElementInlines.h"
#include "mozilla/dom/HTMLAudioElement.h"
#include "mozilla/dom/HTMLInputElement.h"
#include "mozilla/dom/HTMLMediaElementBinding.h"
#include "mozilla/dom/HTMLSourceElement.h"
#include "mozilla/dom/HTMLVideoElement.h"
#include "mozilla/dom/MediaControlUtils.h"
#include "mozilla/dom/MediaEncryptedEvent.h"
#include "mozilla/dom/MediaErrorBinding.h"
#include "mozilla/dom/MediaSource.h"
#include "mozilla/dom/PlayPromise.h"
#include "mozilla/dom/Promise.h"
#include "mozilla/dom/TextTrack.h"
#include "mozilla/dom/UserActivation.h"
#include "mozilla/dom/VideoPlaybackQuality.h"
#include "mozilla/dom/VideoTrack.h"
#include "mozilla/dom/VideoTrackList.h"
#include "mozilla/dom/WakeLock.h"
#include "mozilla/dom/power/PowerManagerService.h"
#include "mozilla/net/UrlClassifierFeatureFactory.h"
#include "nsAttrValueInlines.h"
#include "nsContentPolicyUtils.h"
#include "nsContentUtils.h"
#include "nsCycleCollectionParticipant.h"
#include "nsDisplayList.h"
#include "nsDocShell.h"
#include "nsError.h"
#include "nsGenericHTMLElement.h"
#include "nsGkAtoms.h"
#include "nsIAsyncVerifyRedirectCallback.h"
#include "nsICachingChannel.h"
#include "nsIClassOfService.h"
#include "nsIContentPolicy.h"
#include "nsIDocShell.h"
#include "mozilla/dom/Document.h"
#include "nsIFrame.h"
#include "nsIHttpChannel.h"
#include "nsIObserverService.h"
#include "nsIRequest.h"
#include "nsIScriptError.h"
#include "nsISupportsPrimitives.h"
#include "nsIThreadRetargetableStreamListener.h"
#include "nsITimer.h"
#include "nsJSUtils.h"
#include "nsLayoutUtils.h"
#include "nsMediaFragmentURIParser.h"
#include "nsMimeTypes.h"
#include "nsNetUtil.h"
#include "nsNodeInfoManager.h"
#include "nsPresContext.h"
#include "nsQueryObject.h"
#include "nsRange.h"
#include "nsSize.h"
#include "nsThreadUtils.h"
#include "nsURIHashKey.h"
#include "nsVideoFrame.h"
#include "ReferrerInfo.h"
#include "xpcpublic.h"
#include <algorithm>
#include <cmath>
#include <limits>
#include <type_traits>
mozilla::LazyLogModule gMediaElementLog("nsMediaElement");
static mozilla::LazyLogModule gMediaElementEventsLog("nsMediaElementEvents");
extern mozilla::LazyLogModule gAutoplayPermissionLog;
#define AUTOPLAY_LOG(msg, ...) \
MOZ_LOG(gAutoplayPermissionLog, LogLevel::Debug, (msg, ##__VA_ARGS__))
// avoid redefined macro in unified build
#undef MEDIACONTROL_LOG
#define MEDIACONTROL_LOG(msg, ...) \
MOZ_LOG(gMediaControlLog, LogLevel::Debug, \
("HTMLMediaElement=%p, " msg, this, ##__VA_ARGS__))
#undef CONTROLLER_TIMER_LOG
#define CONTROLLER_TIMER_LOG(element, msg, ...) \
MOZ_LOG(gMediaControlLog, LogLevel::Debug, \
("HTMLMediaElement=%p, " msg, element, ##__VA_ARGS__))
#define LOG(type, msg) MOZ_LOG(gMediaElementLog, type, msg)
#define LOG_EVENT(type, msg) MOZ_LOG(gMediaElementEventsLog, type, msg)
using namespace mozilla::layers;
using mozilla::net::nsMediaFragmentURIParser;
using namespace mozilla::dom::HTMLMediaElement_Binding;
namespace mozilla::dom {
using AudibleState = AudioChannelService::AudibleState;
// Number of milliseconds between progress events as defined by spec
static const uint32_t PROGRESS_MS = 350;
// Number of milliseconds of no data before a stall event is fired as defined by
// spec
static const uint32_t STALL_MS = 3000;
// Used by AudioChannel for suppresssing the volume to this ratio.
#define FADED_VOLUME_RATIO 0.25
// These constants are arbitrary
// Minimum playbackRate for a media
static const double MIN_PLAYBACKRATE = 1.0 / 16;
// Maximum playbackRate for a media
static const double MAX_PLAYBACKRATE = 16.0;
// These are the limits beyonds which SoundTouch does not perform too well and
// when speech is hard to understand anyway. Threshold above which audio is
// muted
static const double THRESHOLD_HIGH_PLAYBACKRATE_AUDIO = 4.0;
// Threshold under which audio is muted
static const double THRESHOLD_LOW_PLAYBACKRATE_AUDIO = 0.25;
static double ClampPlaybackRate(double aPlaybackRate) {
MOZ_ASSERT(aPlaybackRate >= 0.0);
if (aPlaybackRate == 0.0) {
return aPlaybackRate;
}
if (aPlaybackRate < MIN_PLAYBACKRATE) {
return MIN_PLAYBACKRATE;
}
if (aPlaybackRate > MAX_PLAYBACKRATE) {
return MAX_PLAYBACKRATE;
}
return aPlaybackRate;
}
// Media error values. These need to match the ones in MediaError.webidl.
static const unsigned short MEDIA_ERR_ABORTED = 1;
static const unsigned short MEDIA_ERR_NETWORK = 2;
static const unsigned short MEDIA_ERR_DECODE = 3;
static const unsigned short MEDIA_ERR_SRC_NOT_SUPPORTED = 4;
static void ResolvePromisesWithUndefined(
const nsTArray<RefPtr<PlayPromise>>& aPromises) {
for (auto& promise : aPromises) {
promise->MaybeResolveWithUndefined();
}
}
static void RejectPromises(const nsTArray<RefPtr<PlayPromise>>& aPromises,
nsresult aError) {
for (auto& promise : aPromises) {
promise->MaybeReject(aError);
}
}
// Under certain conditions there may be no-one holding references to
// a media element from script, DOM parent, etc, but the element may still
// fire meaningful events in the future so we can't destroy it yet:
// 1) If the element is delaying the load event (or would be, if it were
// in a document), then events up to loadeddata or error could be fired,
// so we need to stay alive.
// 2) If the element is not paused and playback has not ended, then
// we will (or might) play, sending timeupdate and ended events and possibly
// audio output, so we need to stay alive.
// 3) if the element is seeking then we will fire seeking events and possibly
// start playing afterward, so we need to stay alive.
// 4) If autoplay could start playback in this element (if we got enough data),
// then we need to stay alive.
// 5) if the element is currently loading, not suspended, and its source is
// not a MediaSource, then script might be waiting for progress events or a
// 'stalled' or 'suspend' event, so we need to stay alive.
// If we're already suspended then (all other conditions being met),
// it's OK to just disappear without firing any more events,
// since we have the freedom to remain suspended indefinitely. Note
// that we could use this 'suspended' loophole to garbage-collect a suspended
// element in case 4 even if it had 'autoplay' set, but we choose not to.
// If someone throws away all references to a loading 'autoplay' element
// sound should still eventually play.
// 6) If the source is a MediaSource, most loading events will not fire unless
// appendBuffer() is called on a SourceBuffer, in which case something is
// already referencing the SourceBuffer, which keeps the associated media
// element alive. Further, a MediaSource will never time out the resource
// fetch, and so should not keep the media element alive if it is
// unreferenced. A pending 'stalled' event keeps the media element alive.
//
// Media elements owned by inactive documents (i.e. documents not contained in
// any document viewer) should never hold a self-reference because none of the
// above conditions are allowed: the element will stop loading and playing
// and never resume loading or playing unless its owner document changes to
// an active document (which can only happen if there is an external reference
// to the element).
// Media elements with no owner doc should be able to hold a self-reference.
// Something native must have created the element and may expect it to
// stay alive to play.
// It's very important that any change in state which could change the value of
// needSelfReference in AddRemoveSelfReference be followed by a call to
// AddRemoveSelfReference before this element could die!
// It's especially important if needSelfReference would change to 'true',
// since if we neglect to add a self-reference, this element might be
// garbage collected while there are still event listeners that should
// receive events. If we neglect to remove the self-reference then the element
// just lives longer than it needs to.
class nsMediaEvent : public Runnable {
public:
explicit nsMediaEvent(const char* aName, HTMLMediaElement* aElement)
: Runnable(aName),
mElement(aElement),
mLoadID(mElement->GetCurrentLoadID()) {}
~nsMediaEvent() = default;
NS_IMETHOD Run() override = 0;
protected:
bool IsCancelled() { return mElement->GetCurrentLoadID() != mLoadID; }
RefPtr<HTMLMediaElement> mElement;
uint32_t mLoadID;
};
class HTMLMediaElement::nsAsyncEventRunner : public nsMediaEvent {
private:
nsString mName;
public:
nsAsyncEventRunner(const nsAString& aName, HTMLMediaElement* aElement)
: nsMediaEvent("HTMLMediaElement::nsAsyncEventRunner", aElement),
mName(aName) {}
NS_IMETHOD Run() override {
// Silently cancel if our load has been cancelled.
if (IsCancelled()) return NS_OK;
return mElement->DispatchEvent(mName);
}
};
/*
* If no error is passed while constructing an instance, the instance will
* resolve the passed promises with undefined; otherwise, the instance will
* reject the passed promises with the passed error.
*
* The constructor appends the constructed instance into the passed media
* element's mPendingPlayPromisesRunners member and once the the runner is run
* (whether fulfilled or canceled), it removes itself from
* mPendingPlayPromisesRunners.
*/
class HTMLMediaElement::nsResolveOrRejectPendingPlayPromisesRunner
: public nsMediaEvent {
nsTArray<RefPtr<PlayPromise>> mPromises;
nsresult mError;
public:
nsResolveOrRejectPendingPlayPromisesRunner(
HTMLMediaElement* aElement, nsTArray<RefPtr<PlayPromise>>&& aPromises,
nsresult aError = NS_OK)
: nsMediaEvent(
"HTMLMediaElement::nsResolveOrRejectPendingPlayPromisesRunner",
aElement),
mPromises(std::move(aPromises)),
mError(aError) {
mElement->mPendingPlayPromisesRunners.AppendElement(this);
}
void ResolveOrReject() {
if (NS_SUCCEEDED(mError)) {
ResolvePromisesWithUndefined(mPromises);
} else {
RejectPromises(mPromises, mError);
}
}
NS_IMETHOD Run() override {
if (!IsCancelled()) {
ResolveOrReject();
}
mElement->mPendingPlayPromisesRunners.RemoveElement(this);
return NS_OK;
}
};
class HTMLMediaElement::nsNotifyAboutPlayingRunner
: public nsResolveOrRejectPendingPlayPromisesRunner {
public:
nsNotifyAboutPlayingRunner(
HTMLMediaElement* aElement,
nsTArray<RefPtr<PlayPromise>>&& aPendingPlayPromises)
: nsResolveOrRejectPendingPlayPromisesRunner(
aElement, std::move(aPendingPlayPromises)) {}
NS_IMETHOD Run() override {
if (IsCancelled()) {
mElement->mPendingPlayPromisesRunners.RemoveElement(this);
return NS_OK;
}
mElement->DispatchEvent(u"playing"_ns);
return nsResolveOrRejectPendingPlayPromisesRunner::Run();
}
};
class nsSourceErrorEventRunner : public nsMediaEvent {
private:
nsCOMPtr<nsIContent> mSource;
public:
nsSourceErrorEventRunner(HTMLMediaElement* aElement, nsIContent* aSource)
: nsMediaEvent("dom::nsSourceErrorEventRunner", aElement),
mSource(aSource) {}
NS_IMETHOD Run() override {
// Silently cancel if our load has been cancelled.
if (IsCancelled()) return NS_OK;
LOG_EVENT(LogLevel::Debug,
("%p Dispatching simple event source error", mElement.get()));
return nsContentUtils::DispatchTrustedEvent(mElement->OwnerDoc(), mSource,
u"error"_ns, CanBubble::eNo,
Cancelable::eNo);
}
};
/**
* We use MediaControlKeyListener to listen to media control key in order to
* play and pause media element when user press media control keys and update
* media's playback and audible state to the media controller.
*
* Use `Start()` to start listening event and use `Stop()` to stop listening
* event. In addition, notifying any change to media controller MUST be done
* after successfully calling `Start()`.
*/
class HTMLMediaElement::MediaControlKeyListener final
: public ContentMediaControlKeyReceiver {
public:
NS_INLINE_DECL_REFCOUNTING(MediaControlKeyListener, override)
MOZ_INIT_OUTSIDE_CTOR explicit MediaControlKeyListener(
HTMLMediaElement* aElement)
: mElement(aElement) {
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(aElement);
}
/**
* Start listening to the media control keys which would make media being able
* to be controlled via pressing media control keys.
*/
void Start() {
MOZ_ASSERT(NS_IsMainThread());
if (IsStarted()) {
// We have already been started, do not notify start twice.
return;
}
// Fail to init media agent, we are not able to notify the media controller
// any update and also are not able to receive media control key events.
if (!InitMediaAgent()) {
MEDIACONTROL_LOG("Failed to start due to not able to init media agent!");
return;
}
NotifyPlaybackStateChanged(MediaPlaybackState::eStarted);
// If owner has started playing before the listener starts, we should update
// the playing state as well. Eg. media starts inaudily and becomes audible
// later.
if (!Owner()->Paused()) {
NotifyMediaStartedPlaying();
}
if (StaticPrefs::media_mediacontrol_testingevents_enabled()) {
auto dispatcher = MakeRefPtr<AsyncEventDispatcher>(
Owner(), u"MozStartMediaControl"_ns, CanBubble::eYes,
ChromeOnlyDispatch::eYes);
dispatcher->PostDOMEvent();
}
}
/**
* Stop listening to the media control keys which would make media not be able
* to be controlled via pressing media control keys. If we haven't started
* listening to the media control keys, then nothing would happen.
*/
void StopIfNeeded() {
MOZ_ASSERT(NS_IsMainThread());
if (!IsStarted()) {
// We have already been stopped, do not notify stop twice.
return;
}
NotifyMediaStoppedPlaying();
NotifyPlaybackStateChanged(MediaPlaybackState::eStopped);
// Remove ourselves from media agent, which would stop receiving event.
mControlAgent->RemoveReceiver(this);
mControlAgent = nullptr;
}
bool IsStarted() const { return mState != MediaPlaybackState::eStopped; }
bool IsPlaying() const override {
return Owner() ? !Owner()->Paused() : false;
}
/**
* Following methods should only be used after starting listener.
*/
void NotifyMediaStartedPlaying() {
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(IsStarted());
if (mState == MediaPlaybackState::eStarted ||
mState == MediaPlaybackState::ePaused) {
NotifyPlaybackStateChanged(MediaPlaybackState::ePlayed);
// If media is `inaudible` in the beginning, then we don't need to notify
// the state, because notifying `inaudible` should always come after
// notifying `audible`.
if (mIsOwnerAudible) {
NotifyAudibleStateChanged(MediaAudibleState::eAudible);
}
}
}
void NotifyMediaStoppedPlaying() {
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(IsStarted());
if (mState == MediaPlaybackState::ePlayed) {
NotifyPlaybackStateChanged(MediaPlaybackState::ePaused);
// As media are going to be paused, so no sound is possible to be heard.
if (mIsOwnerAudible) {
NotifyAudibleStateChanged(MediaAudibleState::eInaudible);
}
}
}
// This method can be called before the listener starts, which would cache
// the audible state and update after the listener starts.
void UpdateMediaAudibleState(bool aIsOwnerAudible) {
MOZ_ASSERT(NS_IsMainThread());
if (mIsOwnerAudible == aIsOwnerAudible) {
return;
}
mIsOwnerAudible = aIsOwnerAudible;
MEDIACONTROL_LOG("Media becomes %s",
mIsOwnerAudible ? "audible" : "inaudible");
// If media hasn't started playing, it doesn't make sense to update media
// audible state. Therefore, in that case we would noitfy the audible state
// when media starts playing.
if (mState == MediaPlaybackState::ePlayed) {
NotifyAudibleStateChanged(mIsOwnerAudible
? MediaAudibleState::eAudible
: MediaAudibleState::eInaudible);
}
}
void SetPictureInPictureModeEnabled(bool aIsEnabled) {
MOZ_ASSERT(NS_IsMainThread());
if (mIsPictureInPictureEnabled == aIsEnabled) {
return;
}
// PIP state changes might happen before the listener starts or stops where
// we haven't call `InitMediaAgent()` yet. Eg. Reset the PIP video's src,
// then cancel the PIP. In addition, not like playback and audible state
// which should be restricted to update via the same agent in order to keep
// those states correct in each `ContextMediaInfo`, PIP state can be updated
// through any browsing context, so we would use `ContentMediaAgent::Get()`
// directly to update PIP state.
mIsPictureInPictureEnabled = aIsEnabled;
if (RefPtr<IMediaInfoUpdater> updater =
ContentMediaAgent::Get(GetCurrentBrowsingContext())) {
updater->SetIsInPictureInPictureMode(mOwnerBrowsingContextId,
mIsPictureInPictureEnabled);
}
}
void HandleMediaKey(MediaControlKey aKey) override {
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(IsStarted());
MEDIACONTROL_LOG("HandleEvent '%s'", ToMediaControlKeyStr(aKey));
if (aKey == MediaControlKey::Play) {
Owner()->Play();
} else if (aKey == MediaControlKey::Pause) {
Owner()->Pause();
} else {
MOZ_ASSERT(aKey == MediaControlKey::Stop,
"Not supported key for media element!");
Owner()->Pause();
StopIfNeeded();
}
}
void UpdateOwnerBrowsingContextIfNeeded() {
// Has not notified any information about the owner context yet.
if (!IsStarted()) {
return;
}
BrowsingContext* currentBC = GetCurrentBrowsingContext();
MOZ_ASSERT(currentBC);
// Still in the same browsing context, no need to update.
if (currentBC->Id() == mOwnerBrowsingContextId) {
return;
}
MEDIACONTROL_LOG("Change browsing context from %" PRIu64 " to %" PRIu64,
mOwnerBrowsingContextId, currentBC->Id());
// This situation would happen when we start a media in an original browsing
// context, then we move it to another browsing context, such as an iframe,
// so its owner browsing context would be changed. Therefore, we should
// reset the media status for the previous browsing context by calling
// `Stop()`, in which the listener would notify `ePaused` (if it's playing)
// and `eStop`. Then calls `Start()`, in which the listener would notify
// `eStart` to the new browsing context. If the media was playing before,
// we would also notify `ePlayed`.
bool wasInPlayingState = mState == MediaPlaybackState::ePlayed;
StopIfNeeded();
Start();
if (wasInPlayingState) {
NotifyMediaStartedPlaying();
}
}
private:
~MediaControlKeyListener() = default;
// The media can be moved around different browsing contexts, so this context
// might be different from the one that we used to initialize
// `ContentMediaAgent`.
BrowsingContext* GetCurrentBrowsingContext() const {
// Owner has been CCed, which would break the link of the weaker pointer.
if (!Owner()) {
return nullptr;
}
nsPIDOMWindowInner* window = Owner()->OwnerDoc()->GetInnerWindow();
return window ? window->GetBrowsingContext() : nullptr;
}
bool InitMediaAgent() {
MOZ_ASSERT(NS_IsMainThread());
BrowsingContext* currentBC = GetCurrentBrowsingContext();
mControlAgent = ContentMediaAgent::Get(currentBC);
if (!mControlAgent) {
return false;
}
MOZ_ASSERT(currentBC);
mOwnerBrowsingContextId = currentBC->Id();
MEDIACONTROL_LOG("Init agent in browsing context %" PRIu64,
mOwnerBrowsingContextId);
mControlAgent->AddReceiver(this);
return true;
}
HTMLMediaElement* Owner() const {
// `mElement` would be clear during CC unlinked, but it would only happen
// after stopping the listener.
MOZ_ASSERT(mElement || !IsStarted());
return mElement.get();
}
void NotifyPlaybackStateChanged(MediaPlaybackState aState) {
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(mControlAgent);
MEDIACONTROL_LOG("NotifyMediaState from state='%s' to state='%s'",
ToMediaPlaybackStateStr(mState),
ToMediaPlaybackStateStr(aState));
MOZ_ASSERT(mState != aState, "Should not notify same state again!");
mState = aState;
mControlAgent->NotifyMediaPlaybackChanged(mOwnerBrowsingContextId, mState);
}
void NotifyAudibleStateChanged(MediaAudibleState aState) {
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(IsStarted());
mControlAgent->NotifyMediaAudibleChanged(mOwnerBrowsingContextId, aState);
}
MediaPlaybackState mState = MediaPlaybackState::eStopped;
WeakPtr<HTMLMediaElement> mElement;
RefPtr<ContentMediaAgent> mControlAgent;
bool mIsPictureInPictureEnabled = false;
bool mIsOwnerAudible = false;
uint64_t mOwnerBrowsingContextId;
};
class HTMLMediaElement::MediaStreamTrackListener
: public DOMMediaStream::TrackListener {
public:
explicit MediaStreamTrackListener(HTMLMediaElement* aElement)
: mElement(aElement) {}
void NotifyTrackAdded(const RefPtr<MediaStreamTrack>& aTrack) override {
if (!mElement) {
return;
}
mElement->NotifyMediaStreamTrackAdded(aTrack);
}
void NotifyTrackRemoved(const RefPtr<MediaStreamTrack>& aTrack) override {
if (!mElement) {
return;
}
mElement->NotifyMediaStreamTrackRemoved(aTrack);
}
void OnActive() {
MOZ_ASSERT(mElement);
// mediacapture-main says:
// Note that once ended equals true the HTMLVideoElement will not play media
// even if new MediaStreamTracks are added to the MediaStream (causing it to
// return to the active state) unless autoplay is true or the web
// application restarts the element, e.g., by calling play().
//
// This is vague on exactly how to go from becoming active to playing, when
// autoplaying. However, per the media element spec, to play an autoplaying
// media element, we must load the source and reach readyState
// HAVE_ENOUGH_DATA [1]. Hence, a MediaStream being assigned to a media
// element and becoming active runs the load algorithm, so that it can
// eventually be played.
//
// [1]
LOG(LogLevel::Debug, ("%p, mSrcStream %p became active, checking if we "
"need to run the load algorithm",
mElement.get(), mElement->mSrcStream.get()));
if (!mElement->IsPlaybackEnded()) {
return;
}
if (!mElement->Autoplay()) {
return;
}
LOG(LogLevel::Info, ("%p, mSrcStream %p became active on autoplaying, "
"ended element. Reloading.",
mElement.get(), mElement->mSrcStream.get()));
mElement->DoLoad();
}
void NotifyActive() override {
if (!mElement) {
return;
}
if (!mElement->IsVideo()) {
// Audio elements use NotifyAudible().
return;
}
OnActive();
}
void NotifyAudible() override {
if (!mElement) {
return;
}
if (mElement->IsVideo()) {
// Video elements use NotifyActive().
return;
}
OnActive();
}
void OnInactive() {
MOZ_ASSERT(mElement);
if (mElement->IsPlaybackEnded()) {
return;
}
LOG(LogLevel::Debug, ("%p, mSrcStream %p became inactive", mElement.get(),
mElement->mSrcStream.get()));
mElement->PlaybackEnded();
}
void NotifyInactive() override {
if (!mElement) {
return;
}
if (!mElement->IsVideo()) {
// Audio elements use NotifyInaudible().
return;
}
OnInactive();
}
void NotifyInaudible() override {
if (!mElement) {
return;
}
if (mElement->IsVideo()) {
// Video elements use NotifyInactive().
return;
}
OnInactive();
}
protected:
const WeakPtr<HTMLMediaElement> mElement;
};
/**
* Helper class that manages audio and video outputs for all enabled tracks in a
* media element. It also manages calculating the current time when playing a
* MediaStream.
*/
class HTMLMediaElement::MediaStreamRenderer
: public DOMMediaStream::TrackListener {
public:
NS_INLINE_DECL_REFCOUNTING(MediaStreamRenderer)
MediaStreamRenderer(AbstractThread* aMainThread,
VideoFrameContainer* aVideoContainer,
FirstFrameVideoOutput* aFirstFrameVideoOutput,
void* aAudioOutputKey)
: mVideoContainer(aVideoContainer),
mAudioOutputKey(aAudioOutputKey),
mWatchManager(this, aMainThread),
mFirstFrameVideoOutput(aFirstFrameVideoOutput) {
if (mFirstFrameVideoOutput) {
mWatchManager.Watch(mFirstFrameVideoOutput->mFirstFrameRendered,
&MediaStreamRenderer::SetFirstFrameRendered);
}
}
void Shutdown() {
for (const auto& t : mAudioTracks.Clone()) {
if (t) {
RemoveTrack(t->AsAudioStreamTrack());
}
}
if (mVideoTrack) {
RemoveTrack(mVideoTrack->AsVideoStreamTrack());
}
mWatchManager.Shutdown();
mFirstFrameVideoOutput = nullptr;
}
void UpdateGraphTime() {
mGraphTime =
mGraphTimeDummy->mTrack->Graph()->CurrentTime() - *mGraphTimeOffset;
}
void SetFirstFrameRendered() {
if (!mFirstFrameVideoOutput) {
return;
}
if (mVideoTrack) {
mVideoTrack->AsVideoStreamTrack()->RemoveVideoOutput(
mFirstFrameVideoOutput);
}
mWatchManager.Unwatch(mFirstFrameVideoOutput->mFirstFrameRendered,
&MediaStreamRenderer::SetFirstFrameRendered);
mFirstFrameVideoOutput = nullptr;
}
void SetProgressingCurrentTime(bool aProgress) {
if (aProgress == mProgressingCurrentTime) {
return;
}
MOZ_DIAGNOSTIC_ASSERT(mGraphTimeDummy);
mProgressingCurrentTime = aProgress;
MediaTrackGraph* graph = mGraphTimeDummy->mTrack->Graph();
if (mProgressingCurrentTime) {
mGraphTimeOffset = Some(graph->CurrentTime().Ref() - mGraphTime);
mWatchManager.Watch(graph->CurrentTime(),
&MediaStreamRenderer::UpdateGraphTime);
} else {
mWatchManager.Unwatch(graph->CurrentTime(),
&MediaStreamRenderer::UpdateGraphTime);
}
}
void Start() {
if (mRendering) {
return;
}
mRendering = true;
if (!mGraphTimeDummy) {
return;
}
for (const auto& t : mAudioTracks) {
if (t) {
if (mAudioOutputSink) {
t->AsAudioStreamTrack()->SetAudioOutputDevice(mAudioOutputKey,
mAudioOutputSink);
}
t->AsAudioStreamTrack()->AddAudioOutput(mAudioOutputKey);
t->AsAudioStreamTrack()->SetAudioOutputVolume(mAudioOutputKey,
mAudioOutputVolume);
}
}
if (mVideoTrack) {
mVideoTrack->AsVideoStreamTrack()->AddVideoOutput(mVideoContainer);
}
}
void Stop() {
if (!mRendering) {
return;
}
mRendering = false;
if (!mGraphTimeDummy) {
return;
}
for (const auto& t : mAudioTracks) {
if (t) {
t->AsAudioStreamTrack()->RemoveAudioOutput(mAudioOutputKey);
}
}
if (mVideoTrack) {
mVideoTrack->AsVideoStreamTrack()->RemoveVideoOutput(mVideoContainer);
}
}
void SetAudioOutputVolume(float aVolume) {
if (mAudioOutputVolume == aVolume) {
return;
}
mAudioOutputVolume = aVolume;
if (!mRendering) {
return;
}
for (const auto& t : mAudioTracks) {
if (t) {
t->AsAudioStreamTrack()->SetAudioOutputVolume(mAudioOutputKey,
mAudioOutputVolume);
}
}
}
RefPtr<GenericPromise::AllPromiseType> SetAudioOutputDevice(
AudioDeviceInfo* aSink) {
MOZ_ASSERT(aSink);
MOZ_ASSERT(mAudioOutputSink != aSink);
mAudioOutputSink = aSink;
if (!mRendering) {
return GenericPromise::AllPromiseType::CreateAndResolve(nsTArray<bool>(),
__func__);
}
nsTArray<RefPtr<GenericPromise>> promises;
for (const auto& t : mAudioTracks) {
// SetAudioOutputDevice will create a new output MediaTrack, so the
// AudioOutput is removed for the current MediaTrack and re-added after
// the new MediaTrack has been created.
t->AsAudioStreamTrack()->RemoveAudioOutput(mAudioOutputKey);
promises.AppendElement(t->AsAudioStreamTrack()->SetAudioOutputDevice(
mAudioOutputKey, mAudioOutputSink));
t->AsAudioStreamTrack()->AddAudioOutput(mAudioOutputKey);
t->AsAudioStreamTrack()->SetAudioOutputVolume(mAudioOutputKey,
mAudioOutputVolume);
}
if (!promises.Length()) {
// Not active track, save it for later
return GenericPromise::AllPromiseType::CreateAndResolve(nsTArray<bool>(),
__func__);
}
return GenericPromise::All(GetCurrentSerialEventTarget(), promises);
}
void AddTrack(AudioStreamTrack* aTrack) {
MOZ_DIAGNOSTIC_ASSERT(!mAudioTracks.Contains(aTrack));
mAudioTracks.AppendElement(aTrack);
EnsureGraphTimeDummy();
if (mRendering) {
if (mAudioOutputSink) {
aTrack->SetAudioOutputDevice(mAudioOutputKey, mAudioOutputSink);
}
aTrack->AddAudioOutput(mAudioOutputKey);
aTrack->SetAudioOutputVolume(mAudioOutputKey, mAudioOutputVolume);
}
}
void AddTrack(VideoStreamTrack* aTrack) {
MOZ_DIAGNOSTIC_ASSERT(!mVideoTrack);
if (!mVideoContainer) {
return;
}
mVideoTrack = aTrack;
EnsureGraphTimeDummy();
if (mFirstFrameVideoOutput) {
// Add the first frame output even if we are rendering. It will only
// accept one frame. If we are rendering, then the main output will
// overwrite that with the same frame (and possibly more frames).
aTrack->AddVideoOutput(mFirstFrameVideoOutput);
}
if (mRendering) {
aTrack->AddVideoOutput(mVideoContainer);
}
}
void RemoveTrack(AudioStreamTrack* aTrack) {
MOZ_DIAGNOSTIC_ASSERT(mAudioTracks.Contains(aTrack));
if (mRendering) {
aTrack->RemoveAudioOutput(mAudioOutputKey);
}
mAudioTracks.RemoveElement(aTrack);
}
void RemoveTrack(VideoStreamTrack* aTrack) {
MOZ_DIAGNOSTIC_ASSERT(mVideoTrack == aTrack);
if (!mVideoContainer) {
return;
}
if (mFirstFrameVideoOutput) {
aTrack->RemoveVideoOutput(mFirstFrameVideoOutput);
}
if (mRendering) {
aTrack->RemoveVideoOutput(mVideoContainer);
}
mVideoTrack = nullptr;
}
double CurrentTime() const {
if (!mGraphTimeDummy) {
return 0.0;
}
return mGraphTimeDummy->mTrack->GraphImpl()->MediaTimeToSeconds(mGraphTime);
}
Watchable<GraphTime>& CurrentGraphTime() { return mGraphTime; }
// Set if we're rendering video.
const RefPtr<VideoFrameContainer> mVideoContainer;
// Set if we're rendering audio, nullptr otherwise.
void* const mAudioOutputKey;
private:
~MediaStreamRenderer() { Shutdown(); }
void EnsureGraphTimeDummy() {
if (mGraphTimeDummy) {
return;
}
MediaTrackGraph* graph = nullptr;
for (const auto& t : mAudioTracks) {
if (t && !t->Ended()) {
graph = t->Graph();
break;
}
}
if (!graph && mVideoTrack && !mVideoTrack->Ended()) {
graph = mVideoTrack->Graph();
}
if (!graph) {
return;
}
// This dummy keeps `graph` alive and ensures access to it.
mGraphTimeDummy = MakeRefPtr<SharedDummyTrack>(
graph->CreateSourceTrack(MediaSegment::AUDIO));
}
// True when all tracks are being rendered, i.e., when the media element is
// playing.
bool mRendering = false;
// True while we're progressing mGraphTime. False otherwise.
bool mProgressingCurrentTime = false;
// The audio output volume for all audio tracks.
float mAudioOutputVolume = 1.0f;
// The sink device for all audio tracks.
RefPtr<AudioDeviceInfo> mAudioOutputSink;
// WatchManager for mGraphTime.
WatchManager<MediaStreamRenderer> mWatchManager;
// A dummy MediaTrack to guarantee a MediaTrackGraph is kept alive while
// we're actively rendering, so we can track the graph's current time. Set
// when the first track is added, never unset.
RefPtr<SharedDummyTrack> mGraphTimeDummy;
// Watchable that relays the graph's currentTime updates to the media element
// only while we're rendering. This is the current time of the rendering in
// GraphTime units.
Watchable<GraphTime> mGraphTime = {0, "MediaStreamRenderer::mGraphTime"};
// Nothing until a track has been added. Then, the current GraphTime at the
// time when we were last Start()ed.
Maybe<GraphTime> mGraphTimeOffset;
// Currently enabled (and rendered) audio tracks.
nsTArray<WeakPtr<MediaStreamTrack>> mAudioTracks;
// Currently selected (and rendered) video track.
WeakPtr<MediaStreamTrack> mVideoTrack;
// Holds a reference to the first-frame-getting video output attached to
// mVideoTrack. Set by the constructor, unset when the media element tells us
// it has rendered the first frame.
RefPtr<FirstFrameVideoOutput> mFirstFrameVideoOutput;
};
class HTMLMediaElement::MediaElementTrackSource
: public MediaStreamTrackSource,
public MediaStreamTrackSource::Sink,
public MediaStreamTrackConsumer {
public:
NS_DECL_ISUPPORTS_INHERITED
NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(MediaElementTrackSource,
MediaStreamTrackSource)
/* MediaDecoder track source */
MediaElementTrackSource(nsISerialEventTarget* aMainThreadEventTarget,
ProcessedMediaTrack* aTrack, nsIPrincipal* aPrincipal,
OutputMuteState aMuteState, bool aHasAlpha)
: MediaStreamTrackSource(aPrincipal, nsString()),
mMainThreadEventTarget(aMainThreadEventTarget),
mTrack(aTrack),
mIntendedElementMuteState(aMuteState),
mElementMuteState(aMuteState),
mMediaDecoderHasAlpha(Some(aHasAlpha)) {
MOZ_ASSERT(mTrack);
}
/* MediaStream track source */
MediaElementTrackSource(nsISerialEventTarget* aMainThreadEventTarget,
MediaStreamTrack* aCapturedTrack,
MediaStreamTrackSource* aCapturedTrackSource,
ProcessedMediaTrack* aTrack, MediaInputPort* aPort,
OutputMuteState aMuteState)
: MediaStreamTrackSource(aCapturedTrackSource->GetPrincipal(),
nsString()),
mMainThreadEventTarget(aMainThreadEventTarget),
mCapturedTrack(aCapturedTrack),
mCapturedTrackSource(aCapturedTrackSource),
mTrack(aTrack),
mPort(aPort),
mIntendedElementMuteState(aMuteState),
mElementMuteState(aMuteState) {
MOZ_ASSERT(mTrack);
MOZ_ASSERT(mCapturedTrack);
MOZ_ASSERT(mCapturedTrackSource);
MOZ_ASSERT(mPort);
mCapturedTrack->AddConsumer(this);
mCapturedTrackSource->RegisterSink(this);
}
void SetEnabled(bool aEnabled) {
if (!mTrack) {
return;
}
mTrack->SetDisabledTrackMode(aEnabled ? DisabledTrackMode::ENABLED
: DisabledTrackMode::SILENCE_FREEZE);
}
void SetPrincipal(RefPtr<nsIPrincipal> aPrincipal) {
mPrincipal = std::move(aPrincipal);
MediaStreamTrackSource::PrincipalChanged();
}
void SetMutedByElement(OutputMuteState aMuteState) {
if (mIntendedElementMuteState == aMuteState) {
return;
}
mIntendedElementMuteState = aMuteState;
mMainThreadEventTarget->Dispatch(NS_NewRunnableFunction(
"MediaElementTrackSource::SetMutedByElement",
[self = RefPtr<MediaElementTrackSource>(this), this, aMuteState] {
mElementMuteState = aMuteState;
MediaStreamTrackSource::MutedChanged(Muted());
}));
}
void Destroy() override {
if (mCapturedTrack) {
mCapturedTrack->RemoveConsumer(this);
mCapturedTrack = nullptr;
}
if (mCapturedTrackSource) {
mCapturedTrackSource->UnregisterSink(this);
mCapturedTrackSource = nullptr;
}
if (mTrack && !mTrack->IsDestroyed()) {
mTrack->Destroy();
}
if (mPort) {
mPort->Destroy();
mPort = nullptr;
}
}
MediaSourceEnum GetMediaSource() const override {
return MediaSourceEnum::Other;
}
void Stop() override {
// Do nothing. There may appear new output streams
// that need tracks sourced from this source, so we
// cannot destroy things yet.
}
/**
* Do not keep the track source alive. The source lifetime is controlled by
* its associated tracks.
*/
bool KeepsSourceAlive() const override { return false; }
/**
* Do not keep the track source on. It is controlled by its associated tracks.
*/
bool Enabled() const override { return false; }
void Disable() override {}
void Enable() override {}
void PrincipalChanged() override {
if (!mCapturedTrackSource) {
// This could happen during shutdown.
return;
}
SetPrincipal(mCapturedTrackSource->GetPrincipal());
}
void MutedChanged(bool aNewState) override {
MediaStreamTrackSource::MutedChanged(Muted());
}
void OverrideEnded() override {
Destroy();
MediaStreamTrackSource::OverrideEnded();
}
void NotifyEnabledChanged(MediaStreamTrack* aTrack, bool aEnabled) override {
MediaStreamTrackSource::MutedChanged(Muted());
}
bool Muted() const {
return mElementMuteState == OutputMuteState::Muted ||
(mCapturedTrack &&
(mCapturedTrack->Muted() || !mCapturedTrack->Enabled()));
}
bool HasAlpha() const override {
if (mCapturedTrack) {
return mCapturedTrack->AsVideoStreamTrack()
? mCapturedTrack->AsVideoStreamTrack()->HasAlpha()
: false;
}
return mMediaDecoderHasAlpha.valueOr(false);
}
ProcessedMediaTrack* Track() const { return mTrack; }
private:
virtual ~MediaElementTrackSource() { Destroy(); };
const RefPtr<nsISerialEventTarget> mMainThreadEventTarget;
RefPtr<MediaStreamTrack> mCapturedTrack;
RefPtr<MediaStreamTrackSource> mCapturedTrackSource;
const RefPtr<ProcessedMediaTrack> mTrack;
RefPtr<MediaInputPort> mPort;
// The mute state as intended by the media element.
OutputMuteState mIntendedElementMuteState;
// The mute state as applied to this track source. It is applied async, so
// needs to be tracked separately from the intended state.
OutputMuteState mElementMuteState;
// Some<bool> if this is a MediaDecoder track source.
const Maybe<bool> mMediaDecoderHasAlpha;
};
HTMLMediaElement::OutputMediaStream::OutputMediaStream(
RefPtr<DOMMediaStream> aStream, bool aCapturingAudioOnly,
bool aFinishWhenEnded)
: mStream(std::move(aStream)),
mCapturingAudioOnly(aCapturingAudioOnly),
mFinishWhenEnded(aFinishWhenEnded) {}
HTMLMediaElement::OutputMediaStream::~OutputMediaStream() = default;
void ImplCycleCollectionTraverse(nsCycleCollectionTraversalCallback& aCallback,
HTMLMediaElement::OutputMediaStream& aField,
const char* aName, uint32_t aFlags) {
ImplCycleCollectionTraverse(aCallback, aField.mStream, "mStream", aFlags);
ImplCycleCollectionTraverse(aCallback, aField.mLiveTracks, "mLiveTracks",
aFlags);
ImplCycleCollectionTraverse(aCallback, aField.mFinishWhenEndedLoadingSrc,
"mFinishWhenEndedLoadingSrc", aFlags);
ImplCycleCollectionTraverse(aCallback, aField.mFinishWhenEndedAttrStream,
"mFinishWhenEndedAttrStream", aFlags);
}
void ImplCycleCollectionUnlink(HTMLMediaElement::OutputMediaStream& aField) {
ImplCycleCollectionUnlink(aField.mStream);
ImplCycleCollectionUnlink(aField.mLiveTracks);
ImplCycleCollectionUnlink(aField.mFinishWhenEndedLoadingSrc);
ImplCycleCollectionUnlink(aField.mFinishWhenEndedAttrStream);
}
NS_IMPL_ADDREF_INHERITED(HTMLMediaElement::MediaElementTrackSource,
MediaStreamTrackSource)
NS_IMPL_RELEASE_INHERITED(HTMLMediaElement::MediaElementTrackSource,
MediaStreamTrackSource)
NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(
HTMLMediaElement::MediaElementTrackSource)
NS_INTERFACE_MAP_END_INHERITING(MediaStreamTrackSource)
NS_IMPL_CYCLE_COLLECTION_CLASS(HTMLMediaElement::MediaElementTrackSource)
NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_INHERITED(
HTMLMediaElement::MediaElementTrackSource, MediaStreamTrackSource)
tmp->Destroy();
NS_IMPL_CYCLE_COLLECTION_UNLINK(mCapturedTrack)
NS_IMPL_CYCLE_COLLECTION_UNLINK(mCapturedTrackSource)
NS_IMPL_CYCLE_COLLECTION_UNLINK_END
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(
HTMLMediaElement::MediaElementTrackSource, MediaStreamTrackSource)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mCapturedTrack)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mCapturedTrackSource)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
/**
* There is a reference cycle involving this class: MediaLoadListener
* holds a reference to the HTMLMediaElement, which holds a reference
* to an nsIChannel, which holds a reference to this listener.
* We break the reference cycle in OnStartRequest by clearing mElement.
*/
class HTMLMediaElement::MediaLoadListener final
: public nsIStreamListener,
public nsIChannelEventSink,
public nsIInterfaceRequestor,
public nsIObserver,
public nsIThreadRetargetableStreamListener {
~MediaLoadListener() = default;
NS_DECL_THREADSAFE_ISUPPORTS
NS_DECL_NSIREQUESTOBSERVER
NS_DECL_NSISTREAMLISTENER
NS_DECL_NSICHANNELEVENTSINK
NS_DECL_NSIOBSERVER
NS_DECL_NSIINTERFACEREQUESTOR
NS_DECL_NSITHREADRETARGETABLESTREAMLISTENER
public:
explicit MediaLoadListener(HTMLMediaElement* aElement)
: mElement(aElement), mLoadID(aElement->GetCurrentLoadID()) {
MOZ_ASSERT(mElement, "Must pass an element to call back");
}
private:
RefPtr<HTMLMediaElement> mElement;
nsCOMPtr<nsIStreamListener> mNextListener;
const uint32_t mLoadID;
};
NS_IMPL_ISUPPORTS(HTMLMediaElement::MediaLoadListener, nsIRequestObserver,
nsIStreamListener, nsIChannelEventSink, nsIInterfaceRequestor,
nsIObserver, nsIThreadRetargetableStreamListener)
NS_IMETHODIMP
HTMLMediaElement::MediaLoadListener::Observe(nsISupports* aSubject,
const char* aTopic,
const char16_t* aData) {
nsContentUtils::UnregisterShutdownObserver(this);
// Clear mElement to break cycle so we don't leak on shutdown
mElement = nullptr;
return NS_OK;
}
NS_IMETHODIMP
HTMLMediaElement::MediaLoadListener::OnStartRequest(nsIRequest* aRequest) {
nsContentUtils::UnregisterShutdownObserver(this);
if (!mElement) {
// We've been notified by the shutdown observer, and are shutting down.
return NS_BINDING_ABORTED;
}
// The element is only needed until we've had a chance to call
// InitializeDecoderForChannel. So make sure mElement is cleared here.
RefPtr<HTMLMediaElement> element;
element.swap(mElement);
if (mLoadID != element->GetCurrentLoadID()) {
// The channel has been cancelled before we had a chance to create
// a decoder. Abort, don't dispatch an "error" event, as the new load
// may not be in an error state.
return NS_BINDING_ABORTED;
}
// Don't continue to load if the request failed or has been canceled.
nsresult status;
nsresult rv = aRequest->GetStatus(&status);
NS_ENSURE_SUCCESS(rv, rv);
if (NS_FAILED(status)) {
if (element) {
// Handle media not loading error because source was a tracking URL (or
// fingerprinting, cryptomining, etc).
// We make a note of this media node by including it in a dedicated
// array of blocked tracking nodes under its parent document.
if (net::UrlClassifierFeatureFactory::IsClassifierBlockingErrorCode(
status)) {
element->OwnerDoc()->AddBlockedNodeByClassifier(element);
}
element->NotifyLoadError(
nsPrintfCString("%u: %s", uint32_t(status), "Request failed"));
}
return status;
}
nsCOMPtr<nsIHttpChannel> hc = do_QueryInterface(aRequest);
bool succeeded;
if (hc && NS_SUCCEEDED(hc->GetRequestSucceeded(&succeeded)) && !succeeded) {
uint32_t responseStatus = 0;
Unused << hc->GetResponseStatus(&responseStatus);
nsAutoCString statusText;
Unused << hc->GetResponseStatusText(statusText);
element->NotifyLoadError(
nsPrintfCString("%u: %s", responseStatus, statusText.get()));
nsAutoString code;
code.AppendInt(responseStatus);
nsAutoString src;
element->GetCurrentSrc(src);
AutoTArray<nsString, 2> params = {code, src};
element->ReportLoadError("MediaLoadHttpError", params);
return NS_BINDING_ABORTED;
}
nsCOMPtr<nsIChannel> channel = do_QueryInterface(aRequest);
if (channel &&
NS_SUCCEEDED(rv = element->InitializeDecoderForChannel(
channel, getter_AddRefs(mNextListener))) &&
mNextListener) {
rv = mNextListener->OnStartRequest(aRequest);
} else {
// If InitializeDecoderForChannel() returned an error, fire a network error.
if (NS_FAILED(rv) && !mNextListener) {
// Load failed, attempt to load the next candidate resource. If there
// are none, this will trigger a MEDIA_ERR_SRC_NOT_SUPPORTED error.
element->NotifyLoadError("Failed to init decoder"_ns);
}
// If InitializeDecoderForChannel did not return a listener (but may
// have otherwise succeeded), we abort the connection since we aren't
// interested in keeping the channel alive ourselves.
rv = NS_BINDING_ABORTED;
}
return rv;
}
NS_IMETHODIMP
HTMLMediaElement::MediaLoadListener::OnStopRequest(nsIRequest* aRequest,
nsresult aStatus) {
if (mNextListener) {
return mNextListener->OnStopRequest(aRequest, aStatus);
}
return NS_OK;
}
NS_IMETHODIMP
HTMLMediaElement::MediaLoadListener::OnDataAvailable(nsIRequest* aRequest,
nsIInputStream* aStream,
uint64_t aOffset,
uint32_t aCount) {
if (!mNextListener) {
NS_ERROR(
"Must have a chained listener; OnStartRequest should have "
"canceled this request");
return NS_BINDING_ABORTED;
}
return mNextListener->OnDataAvailable(aRequest, aStream, aOffset, aCount);
}
NS_IMETHODIMP
HTMLMediaElement::MediaLoadListener::AsyncOnChannelRedirect(
nsIChannel* aOldChannel, nsIChannel* aNewChannel, uint32_t aFlags,
nsIAsyncVerifyRedirectCallback* cb) {
// TODO is this really correct?? See bug #579329.
if (mElement) {
mElement->OnChannelRedirect(aOldChannel, aNewChannel, aFlags);
}
nsCOMPtr<nsIChannelEventSink> sink = do_QueryInterface(mNextListener);
if (sink) {
return sink->AsyncOnChannelRedirect(aOldChannel, aNewChannel, aFlags, cb);
}
cb->OnRedirectVerifyCallback(NS_OK);
return NS_OK;
}
NS_IMETHODIMP
HTMLMediaElement::MediaLoadListener::CheckListenerChain() {
MOZ_ASSERT(mNextListener);
nsCOMPtr<nsIThreadRetargetableStreamListener> retargetable =
do_QueryInterface(mNextListener);
if (retargetable) {
return retargetable->CheckListenerChain();
}
return NS_ERROR_NO_INTERFACE;
}
NS_IMETHODIMP
HTMLMediaElement::MediaLoadListener::GetInterface(const nsIID& aIID,
void** aResult) {
return QueryInterface(aIID, aResult);
}
void HTMLMediaElement::ReportLoadError(const char* aMsg,
const nsTArray<nsString>& aParams) {
ReportToConsole(nsIScriptError::warningFlag, aMsg, aParams);
}
void HTMLMediaElement::ReportToConsole(
uint32_t aErrorFlags, const char* aMsg,
const nsTArray<nsString>& aParams) const {
nsContentUtils::ReportToConsole(aErrorFlags, "Media"_ns, OwnerDoc(),
nsContentUtils::eDOM_PROPERTIES, aMsg,
aParams);
}
class HTMLMediaElement::AudioChannelAgentCallback final
: public nsIAudioChannelAgentCallback {
public:
NS_DECL_CYCLE_COLLECTING_ISUPPORTS
NS_DECL_CYCLE_COLLECTION_CLASS(AudioChannelAgentCallback)
explicit AudioChannelAgentCallback(HTMLMediaElement* aOwner)
: mOwner(aOwner),
mAudioChannelVolume(1.0),
mPlayingThroughTheAudioChannel(false),
mIsOwnerAudible(IsOwnerAudible()),
mIsShutDown(false) {
MOZ_ASSERT(mOwner);
MaybeCreateAudioChannelAgent();
}
void UpdateAudioChannelPlayingState() {
MOZ_ASSERT(!mIsShutDown);
bool playingThroughTheAudioChannel = IsPlayingThroughTheAudioChannel();
if (playingThroughTheAudioChannel != mPlayingThroughTheAudioChannel) {
if (!MaybeCreateAudioChannelAgent()) {
return;
}
mPlayingThroughTheAudioChannel = playingThroughTheAudioChannel;
if (mPlayingThroughTheAudioChannel) {
StartAudioChannelAgent();
} else {
StopAudioChanelAgent();
}
}
}
void NotifyPlayStateChanged() {
MOZ_ASSERT(!mIsShutDown);
UpdateAudioChannelPlayingState();
}
NS_IMETHODIMP WindowVolumeChanged(float aVolume, bool aMuted) override {
MOZ_ASSERT(mAudioChannelAgent);
MOZ_LOG(
AudioChannelService::GetAudioChannelLog(), LogLevel::Debug,
("HTMLMediaElement::AudioChannelAgentCallback, WindowVolumeChanged, "
"this = %p, aVolume = %f, aMuted = %s\n",
this, aVolume, aMuted ? "true" : "false"));
if (mAudioChannelVolume != aVolume) {
mAudioChannelVolume = aVolume;
mOwner->SetVolumeInternal();
}
const uint32_t muted = mOwner->mMuted;
if (aMuted && !mOwner->ComputedMuted()) {
mOwner->SetMutedInternal(muted | MUTED_BY_AUDIO_CHANNEL);
} else if (!aMuted && mOwner->ComputedMuted()) {
mOwner->SetMutedInternal(muted & ~MUTED_BY_AUDIO_CHANNEL);
}
return NS_OK;
}
NS_IMETHODIMP WindowSuspendChanged(SuspendTypes aSuspend) override {
// Currently this method is only be used for delaying autoplay, and we've
// separated related codes to `MediaPlaybackDelayPolicy`.
return NS_OK;
}
NS_IMETHODIMP WindowAudioCaptureChanged(bool aCapture) override {
MOZ_ASSERT(mAudioChannelAgent);
AudioCaptureTrackChangeIfNeeded();
return NS_OK;
}
void AudioCaptureTrackChangeIfNeeded() {
MOZ_ASSERT(!mIsShutDown);
if (!IsPlayingStarted()) {
return;
}
MOZ_ASSERT(mAudioChannelAgent);
bool isCapturing = mAudioChannelAgent->IsWindowAudioCapturingEnabled();
mOwner->AudioCaptureTrackChange(isCapturing);
}
void NotifyAudioPlaybackChanged(AudibleChangedReasons aReason) {
MOZ_ASSERT(!mIsShutDown);
AudibleState newAudibleState = IsOwnerAudible();
MOZ_LOG(AudioChannelService::GetAudioChannelLog(), LogLevel::Debug,
("HTMLMediaElement::AudioChannelAgentCallback, "
"NotifyAudioPlaybackChanged, this=%p, current=%s, new=%s",
this, AudibleStateToStr(mIsOwnerAudible),
AudibleStateToStr(newAudibleState)));
if (mIsOwnerAudible == newAudibleState) {
return;
}
mIsOwnerAudible = newAudibleState;
if (IsPlayingStarted()) {
mAudioChannelAgent->NotifyStartedAudible(mIsOwnerAudible, aReason);
}
}
void Shutdown() {
MOZ_ASSERT(!mIsShutDown);
if (mAudioChannelAgent && mAudioChannelAgent->IsPlayingStarted()) {
StopAudioChanelAgent();
}
mAudioChannelAgent = nullptr;
mIsShutDown = true;
}
float GetEffectiveVolume() const {
MOZ_ASSERT(!mIsShutDown);
return mOwner->Volume() * mAudioChannelVolume;
}
private:
~AudioChannelAgentCallback() { MOZ_ASSERT(mIsShutDown); };
bool MaybeCreateAudioChannelAgent() {
if (mAudioChannelAgent) {
return true;
}
mAudioChannelAgent = new AudioChannelAgent();
nsresult rv =
mAudioChannelAgent->Init(mOwner->OwnerDoc()->GetInnerWindow(), this);
if (NS_WARN_IF(NS_FAILED(rv))) {
mAudioChannelAgent = nullptr;
MOZ_LOG(
AudioChannelService::GetAudioChannelLog(), LogLevel::Debug,
("HTMLMediaElement::AudioChannelAgentCallback, Fail to initialize "
"the audio channel agent, this = %p\n",
this));
return false;
}
return true;
}
void StartAudioChannelAgent() {
MOZ_ASSERT(mAudioChannelAgent);
MOZ_ASSERT(!mAudioChannelAgent->IsPlayingStarted());
if (NS_WARN_IF(NS_FAILED(
mAudioChannelAgent->NotifyStartedPlaying(IsOwnerAudible())))) {
return;
}
mAudioChannelAgent->PullInitialUpdate();
}
void StopAudioChanelAgent() {
MOZ_ASSERT(mAudioChannelAgent);
MOZ_ASSERT(mAudioChannelAgent->IsPlayingStarted());
mAudioChannelAgent->NotifyStoppedPlaying();
// If we have started audio capturing before, we have to tell media element
// to clear the output capturing track.
mOwner->AudioCaptureTrackChange(false);
}
bool IsPlayingStarted() {
if (MaybeCreateAudioChannelAgent()) {
return mAudioChannelAgent->IsPlayingStarted();
}
return false;
}
AudibleState IsOwnerAudible() const {
// paused media doesn't produce any sound.
if (mOwner->mPaused) {
return AudibleState::eNotAudible;
}
return mOwner->IsAudible() ? AudibleState::eAudible
: AudibleState::eNotAudible;
}
bool IsPlayingThroughTheAudioChannel() const {
// If we have an error, we are not playing.
if (mOwner->GetError()) {