Line data Source code
1 : /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2 : /* vim:set ts=2 sw=2 sts=2 et cindent: */
3 : /* This Source Code Form is subject to the terms of the Mozilla Public
4 : * License, v. 2.0. If a copy of the MPL was not distributed with this
5 : * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 :
7 : #include "AudioContext.h"
8 :
9 : #include "blink/PeriodicWave.h"
10 :
11 : #include "mozilla/ErrorResult.h"
12 : #include "mozilla/OwningNonNull.h"
13 : #include "mozilla/RefPtr.h"
14 :
15 : #include "mozilla/dom/AnalyserNode.h"
16 : #include "mozilla/dom/AnalyserNodeBinding.h"
17 : #include "mozilla/dom/AudioBufferSourceNodeBinding.h"
18 : #include "mozilla/dom/AudioContextBinding.h"
19 : #include "mozilla/dom/BaseAudioContextBinding.h"
20 : #include "mozilla/dom/BiquadFilterNodeBinding.h"
21 : #include "mozilla/dom/ChannelMergerNodeBinding.h"
22 : #include "mozilla/dom/ChannelSplitterNodeBinding.h"
23 : #include "mozilla/dom/ConvolverNodeBinding.h"
24 : #include "mozilla/dom/DelayNodeBinding.h"
25 : #include "mozilla/dom/DynamicsCompressorNodeBinding.h"
26 : #include "mozilla/dom/GainNodeBinding.h"
27 : #include "mozilla/dom/IIRFilterNodeBinding.h"
28 : #include "mozilla/dom/HTMLMediaElement.h"
29 : #include "mozilla/dom/MediaElementAudioSourceNodeBinding.h"
30 : #include "mozilla/dom/MediaStreamAudioSourceNodeBinding.h"
31 : #include "mozilla/dom/OfflineAudioContextBinding.h"
32 : #include "mozilla/dom/OscillatorNodeBinding.h"
33 : #include "mozilla/dom/PannerNodeBinding.h"
34 : #include "mozilla/dom/PeriodicWaveBinding.h"
35 : #include "mozilla/dom/Promise.h"
36 : #include "mozilla/dom/StereoPannerNodeBinding.h"
37 : #include "mozilla/dom/WaveShaperNodeBinding.h"
38 :
39 : #include "AudioBuffer.h"
40 : #include "AudioBufferSourceNode.h"
41 : #include "AudioChannelService.h"
42 : #include "AudioDestinationNode.h"
43 : #include "AudioListener.h"
44 : #include "AudioStream.h"
45 : #include "BiquadFilterNode.h"
46 : #include "ChannelMergerNode.h"
47 : #include "ChannelSplitterNode.h"
48 : #include "ConstantSourceNode.h"
49 : #include "ConvolverNode.h"
50 : #include "DelayNode.h"
51 : #include "DynamicsCompressorNode.h"
52 : #include "GainNode.h"
53 : #include "IIRFilterNode.h"
54 : #include "MediaElementAudioSourceNode.h"
55 : #include "MediaStreamAudioDestinationNode.h"
56 : #include "MediaStreamAudioSourceNode.h"
57 : #include "MediaStreamGraph.h"
58 : #include "nsContentUtils.h"
59 : #include "nsNetCID.h"
60 : #include "nsNetUtil.h"
61 : #include "nsPIDOMWindow.h"
62 : #include "nsPrintfCString.h"
63 : #include "nsRFPService.h"
64 : #include "OscillatorNode.h"
65 : #include "PannerNode.h"
66 : #include "PeriodicWave.h"
67 : #include "ScriptProcessorNode.h"
68 : #include "StereoPannerNode.h"
69 : #include "WaveShaperNode.h"
70 :
71 : namespace mozilla {
72 : namespace dom {
73 :
74 : // 0 is a special value that MediaStreams use to denote they are not part of a
75 : // AudioContext.
76 : static dom::AudioContext::AudioContextId gAudioContextId = 1;
77 :
78 : NS_IMPL_CYCLE_COLLECTION_CLASS(AudioContext)
79 :
80 0 : NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(AudioContext)
81 0 : NS_IMPL_CYCLE_COLLECTION_UNLINK(mDestination)
82 0 : NS_IMPL_CYCLE_COLLECTION_UNLINK(mListener)
83 0 : NS_IMPL_CYCLE_COLLECTION_UNLINK(mPromiseGripArray)
84 0 : if (!tmp->mIsStarted) {
85 0 : NS_IMPL_CYCLE_COLLECTION_UNLINK(mActiveNodes)
86 : }
87 : // mDecodeJobs owns the WebAudioDecodeJob objects whose lifetime is managed explicitly.
88 : // mAllNodes is an array of weak pointers, ignore it here.
89 : // mPannerNodes is an array of weak pointers, ignore it here.
90 : // mBasicWaveFormCache cannot participate in cycles, ignore it here.
91 :
92 : // Remove weak reference on the global window as the context is not usable
93 : // without mDestination.
94 0 : tmp->DisconnectFromWindow();
95 0 : NS_IMPL_CYCLE_COLLECTION_UNLINK_END_INHERITED(DOMEventTargetHelper)
96 :
97 0 : NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(AudioContext,
98 : DOMEventTargetHelper)
99 0 : NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mDestination)
100 0 : NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mListener)
101 0 : NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mPromiseGripArray)
102 0 : if (!tmp->mIsStarted) {
103 0 : MOZ_ASSERT(tmp->mIsOffline,
104 : "Online AudioContexts should always be started");
105 0 : NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mActiveNodes)
106 : }
107 : // mDecodeJobs owns the WebAudioDecodeJob objects whose lifetime is managed explicitly.
108 : // mAllNodes is an array of weak pointers, ignore it here.
109 : // mPannerNodes is an array of weak pointers, ignore it here.
110 : // mBasicWaveFormCache cannot participate in cycles, ignore it here.
111 0 : NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
112 :
113 0 : NS_IMPL_ADDREF_INHERITED(AudioContext, DOMEventTargetHelper)
114 0 : NS_IMPL_RELEASE_INHERITED(AudioContext, DOMEventTargetHelper)
115 :
116 0 : NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION_INHERITED(AudioContext)
117 0 : NS_INTERFACE_MAP_ENTRY(nsIMemoryReporter)
118 0 : NS_INTERFACE_MAP_END_INHERITING(DOMEventTargetHelper)
119 :
120 0 : static float GetSampleRateForAudioContext(bool aIsOffline, float aSampleRate)
121 : {
122 0 : if (aIsOffline) {
123 0 : return aSampleRate;
124 : } else {
125 0 : return static_cast<float>(CubebUtils::PreferredSampleRate());
126 : }
127 : }
128 :
129 0 : AudioContext::AudioContext(nsPIDOMWindowInner* aWindow,
130 : bool aIsOffline,
131 : AudioChannel aChannel,
132 : uint32_t aNumberOfChannels,
133 : uint32_t aLength,
134 0 : float aSampleRate)
135 : : DOMEventTargetHelper(aWindow)
136 0 : , mId(gAudioContextId++)
137 0 : , mSampleRate(GetSampleRateForAudioContext(aIsOffline, aSampleRate))
138 : , mAudioContextState(AudioContextState::Suspended)
139 : , mNumberOfChannels(aNumberOfChannels)
140 : , mIsOffline(aIsOffline)
141 0 : , mIsStarted(!aIsOffline)
142 : , mIsShutDown(false)
143 : , mCloseCalled(false)
144 : , mSuspendCalled(false)
145 0 : , mIsDisconnecting(false)
146 : {
147 0 : bool mute = aWindow->AddAudioContext(this);
148 :
149 : // Note: AudioDestinationNode needs an AudioContext that must already be
150 : // bound to the window.
151 : mDestination = new AudioDestinationNode(this, aIsOffline, aChannel,
152 0 : aNumberOfChannels, aLength, aSampleRate);
153 :
154 : // The context can't be muted until it has a destination.
155 0 : if (mute) {
156 0 : Mute();
157 : }
158 0 : }
159 :
160 : nsresult
161 0 : AudioContext::Init()
162 : {
163 0 : if (!mIsOffline) {
164 0 : nsresult rv = mDestination->CreateAudioChannelAgent();
165 0 : if (NS_WARN_IF(NS_FAILED(rv))) {
166 0 : return rv;
167 : }
168 : }
169 :
170 0 : return NS_OK;
171 : }
172 :
173 : void
174 0 : AudioContext::DisconnectFromWindow()
175 : {
176 0 : nsPIDOMWindowInner* window = GetOwner();
177 0 : if (window) {
178 0 : window->RemoveAudioContext(this);
179 : }
180 0 : }
181 :
182 0 : AudioContext::~AudioContext()
183 : {
184 0 : DisconnectFromWindow();
185 0 : UnregisterWeakMemoryReporter(this);
186 0 : }
187 :
188 : JSObject*
189 0 : AudioContext::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto)
190 : {
191 0 : if (mIsOffline) {
192 0 : return OfflineAudioContextBinding::Wrap(aCx, this, aGivenProto);
193 : } else {
194 0 : return AudioContextBinding::Wrap(aCx, this, aGivenProto);
195 : }
196 : }
197 :
198 : /* static */ already_AddRefed<AudioContext>
199 0 : AudioContext::Constructor(const GlobalObject& aGlobal,
200 : ErrorResult& aRv)
201 : {
202 0 : nsCOMPtr<nsPIDOMWindowInner> window = do_QueryInterface(aGlobal.GetAsSupports());
203 0 : if (!window) {
204 0 : aRv.Throw(NS_ERROR_FAILURE);
205 0 : return nullptr;
206 : }
207 :
208 : RefPtr<AudioContext> object =
209 : new AudioContext(window, false,
210 0 : AudioChannelService::GetDefaultAudioChannel());
211 0 : aRv = object->Init();
212 0 : if (NS_WARN_IF(aRv.Failed())) {
213 0 : return nullptr;
214 : }
215 :
216 0 : RegisterWeakMemoryReporter(object);
217 :
218 0 : return object.forget();
219 : }
220 :
221 : /* static */ already_AddRefed<AudioContext>
222 0 : AudioContext::Constructor(const GlobalObject& aGlobal,
223 : uint32_t aNumberOfChannels,
224 : uint32_t aLength,
225 : float aSampleRate,
226 : ErrorResult& aRv)
227 : {
228 0 : nsCOMPtr<nsPIDOMWindowInner> window = do_QueryInterface(aGlobal.GetAsSupports());
229 0 : if (!window) {
230 0 : aRv.Throw(NS_ERROR_FAILURE);
231 0 : return nullptr;
232 : }
233 :
234 0 : if (aNumberOfChannels == 0 ||
235 0 : aNumberOfChannels > WebAudioUtils::MaxChannelCount ||
236 0 : aLength == 0 ||
237 0 : aSampleRate < WebAudioUtils::MinSampleRate ||
238 : aSampleRate > WebAudioUtils::MaxSampleRate) {
239 : // The DOM binding protects us against infinity and NaN
240 0 : aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
241 0 : return nullptr;
242 : }
243 :
244 : RefPtr<AudioContext> object = new AudioContext(window,
245 : true,
246 : AudioChannel::Normal,
247 : aNumberOfChannels,
248 : aLength,
249 0 : aSampleRate);
250 :
251 0 : RegisterWeakMemoryReporter(object);
252 :
253 0 : return object.forget();
254 : }
255 :
256 0 : bool AudioContext::CheckClosed(ErrorResult& aRv)
257 : {
258 0 : if (mAudioContextState == AudioContextState::Closed ||
259 0 : mIsShutDown ||
260 0 : mIsDisconnecting) {
261 0 : aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
262 0 : return true;
263 : }
264 0 : return false;
265 : }
266 :
267 : already_AddRefed<AudioBufferSourceNode>
268 0 : AudioContext::CreateBufferSource(ErrorResult& aRv)
269 : {
270 : return AudioBufferSourceNode::Create(nullptr, *this,
271 0 : AudioBufferSourceOptions(),
272 0 : aRv);
273 : }
274 :
275 : already_AddRefed<ConstantSourceNode>
276 0 : AudioContext::CreateConstantSource(ErrorResult& aRv)
277 : {
278 0 : if (CheckClosed(aRv)) {
279 0 : return nullptr;
280 : }
281 :
282 : RefPtr<ConstantSourceNode> constantSourceNode =
283 0 : new ConstantSourceNode(this);
284 0 : return constantSourceNode.forget();
285 : }
286 :
287 : already_AddRefed<AudioBuffer>
288 0 : AudioContext::CreateBuffer(uint32_t aNumberOfChannels, uint32_t aLength,
289 : float aSampleRate,
290 : ErrorResult& aRv)
291 : {
292 0 : if (!aNumberOfChannels) {
293 0 : aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR);
294 0 : return nullptr;
295 : }
296 :
297 : return AudioBuffer::Create(GetOwner(), aNumberOfChannels, aLength,
298 0 : aSampleRate, aRv);
299 : }
300 :
301 : namespace {
302 :
303 0 : bool IsValidBufferSize(uint32_t aBufferSize) {
304 0 : switch (aBufferSize) {
305 : case 0: // let the implementation choose the buffer size
306 : case 256:
307 : case 512:
308 : case 1024:
309 : case 2048:
310 : case 4096:
311 : case 8192:
312 : case 16384:
313 0 : return true;
314 : default:
315 0 : return false;
316 : }
317 : }
318 :
319 : } // namespace
320 :
321 : already_AddRefed<MediaStreamAudioDestinationNode>
322 0 : AudioContext::CreateMediaStreamDestination(ErrorResult& aRv)
323 : {
324 0 : return MediaStreamAudioDestinationNode::Create(*this, AudioNodeOptions(),
325 0 : aRv);
326 : }
327 :
328 : already_AddRefed<ScriptProcessorNode>
329 0 : AudioContext::CreateScriptProcessor(uint32_t aBufferSize,
330 : uint32_t aNumberOfInputChannels,
331 : uint32_t aNumberOfOutputChannels,
332 : ErrorResult& aRv)
333 : {
334 0 : if ((aNumberOfInputChannels == 0 && aNumberOfOutputChannels == 0) ||
335 0 : aNumberOfInputChannels > WebAudioUtils::MaxChannelCount ||
336 0 : aNumberOfOutputChannels > WebAudioUtils::MaxChannelCount ||
337 0 : !IsValidBufferSize(aBufferSize)) {
338 0 : aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR);
339 0 : return nullptr;
340 : }
341 :
342 0 : if (CheckClosed(aRv)) {
343 0 : return nullptr;
344 : }
345 :
346 : RefPtr<ScriptProcessorNode> scriptProcessor =
347 : new ScriptProcessorNode(this, aBufferSize, aNumberOfInputChannels,
348 0 : aNumberOfOutputChannels);
349 0 : return scriptProcessor.forget();
350 : }
351 :
352 : already_AddRefed<AnalyserNode>
353 0 : AudioContext::CreateAnalyser(ErrorResult& aRv)
354 : {
355 0 : return AnalyserNode::Create(*this, AnalyserOptions(), aRv);
356 : }
357 :
358 : already_AddRefed<StereoPannerNode>
359 0 : AudioContext::CreateStereoPanner(ErrorResult& aRv)
360 : {
361 0 : return StereoPannerNode::Create(*this, StereoPannerOptions(), aRv);
362 : }
363 :
364 : already_AddRefed<MediaElementAudioSourceNode>
365 0 : AudioContext::CreateMediaElementSource(HTMLMediaElement& aMediaElement,
366 : ErrorResult& aRv)
367 : {
368 0 : MediaElementAudioSourceOptions options;
369 0 : options.mMediaElement = aMediaElement;
370 :
371 0 : return MediaElementAudioSourceNode::Create(*this, options, aRv);
372 : }
373 :
374 : already_AddRefed<MediaStreamAudioSourceNode>
375 0 : AudioContext::CreateMediaStreamSource(DOMMediaStream& aMediaStream,
376 : ErrorResult& aRv)
377 : {
378 0 : MediaStreamAudioSourceOptions options;
379 0 : options.mMediaStream = aMediaStream;
380 :
381 0 : return MediaStreamAudioSourceNode::Create(*this, options, aRv);
382 : }
383 :
384 : already_AddRefed<GainNode>
385 0 : AudioContext::CreateGain(ErrorResult& aRv)
386 : {
387 0 : return GainNode::Create(*this, GainOptions(), aRv);
388 : }
389 :
390 : already_AddRefed<WaveShaperNode>
391 0 : AudioContext::CreateWaveShaper(ErrorResult& aRv)
392 : {
393 0 : return WaveShaperNode::Create(*this, WaveShaperOptions(), aRv);
394 : }
395 :
396 : already_AddRefed<DelayNode>
397 0 : AudioContext::CreateDelay(double aMaxDelayTime, ErrorResult& aRv)
398 : {
399 0 : DelayOptions options;
400 0 : options.mMaxDelayTime = aMaxDelayTime;
401 0 : return DelayNode::Create(*this, options, aRv);
402 : }
403 :
404 : already_AddRefed<PannerNode>
405 0 : AudioContext::CreatePanner(ErrorResult& aRv)
406 : {
407 0 : return PannerNode::Create(*this, PannerOptions(), aRv);
408 : }
409 :
410 : already_AddRefed<ConvolverNode>
411 0 : AudioContext::CreateConvolver(ErrorResult& aRv)
412 : {
413 0 : return ConvolverNode::Create(nullptr, *this, ConvolverOptions(), aRv);
414 : }
415 :
416 : already_AddRefed<ChannelSplitterNode>
417 0 : AudioContext::CreateChannelSplitter(uint32_t aNumberOfOutputs, ErrorResult& aRv)
418 : {
419 0 : ChannelSplitterOptions options;
420 0 : options.mNumberOfOutputs = aNumberOfOutputs;
421 0 : return ChannelSplitterNode::Create(*this, options, aRv);
422 : }
423 :
424 : already_AddRefed<ChannelMergerNode>
425 0 : AudioContext::CreateChannelMerger(uint32_t aNumberOfInputs, ErrorResult& aRv)
426 : {
427 0 : ChannelMergerOptions options;
428 0 : options.mNumberOfInputs = aNumberOfInputs;
429 0 : return ChannelMergerNode::Create(*this, options, aRv);
430 : }
431 :
432 : already_AddRefed<DynamicsCompressorNode>
433 0 : AudioContext::CreateDynamicsCompressor(ErrorResult& aRv)
434 : {
435 0 : return DynamicsCompressorNode::Create(*this, DynamicsCompressorOptions(), aRv);
436 : }
437 :
438 : already_AddRefed<BiquadFilterNode>
439 0 : AudioContext::CreateBiquadFilter(ErrorResult& aRv)
440 : {
441 0 : return BiquadFilterNode::Create(*this, BiquadFilterOptions(), aRv);
442 : }
443 :
444 : already_AddRefed<IIRFilterNode>
445 0 : AudioContext::CreateIIRFilter(const Sequence<double>& aFeedforward,
446 : const Sequence<double>& aFeedback,
447 : mozilla::ErrorResult& aRv)
448 : {
449 0 : IIRFilterOptions options;
450 0 : options.mFeedforward = aFeedforward;
451 0 : options.mFeedback = aFeedback;
452 0 : return IIRFilterNode::Create(*this, options, aRv);
453 : }
454 :
455 : already_AddRefed<OscillatorNode>
456 0 : AudioContext::CreateOscillator(ErrorResult& aRv)
457 : {
458 0 : return OscillatorNode::Create(*this, OscillatorOptions(), aRv);
459 : }
460 :
461 : already_AddRefed<PeriodicWave>
462 0 : AudioContext::CreatePeriodicWave(const Float32Array& aRealData,
463 : const Float32Array& aImagData,
464 : const PeriodicWaveConstraints& aConstraints,
465 : ErrorResult& aRv)
466 : {
467 0 : aRealData.ComputeLengthAndData();
468 0 : aImagData.ComputeLengthAndData();
469 :
470 0 : if (aRealData.Length() != aImagData.Length() ||
471 0 : aRealData.Length() == 0) {
472 0 : aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR);
473 0 : return nullptr;
474 : }
475 :
476 : RefPtr<PeriodicWave> periodicWave =
477 0 : new PeriodicWave(this, aRealData.Data(), aImagData.Data(),
478 0 : aImagData.Length(), aConstraints.mDisableNormalization,
479 0 : aRv);
480 0 : if (aRv.Failed()) {
481 0 : return nullptr;
482 : }
483 0 : return periodicWave.forget();
484 : }
485 :
486 : AudioListener*
487 0 : AudioContext::Listener()
488 : {
489 0 : if (!mListener) {
490 0 : mListener = new AudioListener(this);
491 : }
492 0 : return mListener;
493 : }
494 :
495 : already_AddRefed<Promise>
496 0 : AudioContext::DecodeAudioData(const ArrayBuffer& aBuffer,
497 : const Optional<OwningNonNull<DecodeSuccessCallback> >& aSuccessCallback,
498 : const Optional<OwningNonNull<DecodeErrorCallback> >& aFailureCallback,
499 : ErrorResult& aRv)
500 : {
501 0 : nsCOMPtr<nsIGlobalObject> parentObject = do_QueryInterface(GetParentObject());
502 0 : RefPtr<Promise> promise;
503 0 : AutoJSAPI jsapi;
504 0 : jsapi.Init();
505 0 : JSContext* cx = jsapi.cx();
506 0 : JSAutoCompartment ac(cx, aBuffer.Obj());
507 :
508 0 : promise = Promise::Create(parentObject, aRv);
509 0 : if (aRv.Failed()) {
510 0 : return nullptr;
511 : }
512 :
513 0 : aBuffer.ComputeLengthAndData();
514 :
515 0 : if (aBuffer.IsShared()) {
516 : // Throw if the object is mapping shared memory (must opt in).
517 0 : aRv.ThrowTypeError<MSG_TYPEDARRAY_IS_SHARED>(NS_LITERAL_STRING("Argument of AudioContext.decodeAudioData"));
518 0 : return nullptr;
519 : }
520 :
521 0 : if (!aBuffer.Data()) {
522 : // Throw if the buffer is detached
523 0 : aRv.ThrowTypeError<MSG_TYPEDARRAY_IS_DETACHED>(NS_LITERAL_STRING("Argument of AudioContext.decodeAudioData"));
524 0 : return nullptr;
525 : }
526 :
527 : // Detach the array buffer
528 0 : size_t length = aBuffer.Length();
529 0 : JS::RootedObject obj(cx, aBuffer.Obj());
530 :
531 0 : uint8_t* data = static_cast<uint8_t*>(JS_StealArrayBufferContents(cx, obj));
532 :
533 : // Sniff the content of the media.
534 : // Failed type sniffing will be handled by AsyncDecodeWebAudio.
535 0 : nsAutoCString contentType;
536 0 : NS_SniffContent(NS_DATA_SNIFFER_CATEGORY, nullptr, data, length, contentType);
537 :
538 0 : RefPtr<DecodeErrorCallback> failureCallback;
539 0 : RefPtr<DecodeSuccessCallback> successCallback;
540 0 : if (aFailureCallback.WasPassed()) {
541 0 : failureCallback = &aFailureCallback.Value();
542 : }
543 0 : if (aSuccessCallback.WasPassed()) {
544 0 : successCallback = &aSuccessCallback.Value();
545 : }
546 : UniquePtr<WebAudioDecodeJob> job(
547 : new WebAudioDecodeJob(contentType, this,
548 0 : promise, successCallback, failureCallback));
549 0 : AsyncDecodeWebAudio(contentType.get(), data, length, *job);
550 : // Transfer the ownership to mDecodeJobs
551 0 : mDecodeJobs.AppendElement(Move(job));
552 :
553 0 : return promise.forget();
554 : }
555 :
556 : void
557 0 : AudioContext::RemoveFromDecodeQueue(WebAudioDecodeJob* aDecodeJob)
558 : {
559 : // Since UniquePtr doesn't provide an operator== which allows you to compare
560 : // against raw pointers, we need to iterate manually.
561 0 : for (uint32_t i = 0; i < mDecodeJobs.Length(); ++i) {
562 0 : if (mDecodeJobs[i].get() == aDecodeJob) {
563 0 : mDecodeJobs.RemoveElementAt(i);
564 0 : break;
565 : }
566 : }
567 0 : }
568 :
569 : void
570 0 : AudioContext::RegisterActiveNode(AudioNode* aNode)
571 : {
572 0 : if (!mIsShutDown) {
573 0 : mActiveNodes.PutEntry(aNode);
574 : }
575 0 : }
576 :
577 : void
578 0 : AudioContext::UnregisterActiveNode(AudioNode* aNode)
579 : {
580 0 : mActiveNodes.RemoveEntry(aNode);
581 0 : }
582 :
583 : void
584 0 : AudioContext::UnregisterAudioBufferSourceNode(AudioBufferSourceNode* aNode)
585 : {
586 0 : UpdatePannerSource();
587 0 : }
588 :
589 : void
590 0 : AudioContext::UnregisterPannerNode(PannerNode* aNode)
591 : {
592 0 : mPannerNodes.RemoveEntry(aNode);
593 0 : if (mListener) {
594 0 : mListener->UnregisterPannerNode(aNode);
595 : }
596 0 : }
597 :
598 : void
599 0 : AudioContext::UpdatePannerSource()
600 : {
601 0 : for (auto iter = mPannerNodes.Iter(); !iter.Done(); iter.Next()) {
602 0 : iter.Get()->GetKey()->FindConnectedSources();
603 : }
604 0 : }
605 :
606 : uint32_t
607 0 : AudioContext::MaxChannelCount() const
608 : {
609 0 : return mIsOffline ? mNumberOfChannels : CubebUtils::MaxNumberOfChannels();
610 : }
611 :
612 : uint32_t
613 0 : AudioContext::ActiveNodeCount() const
614 : {
615 0 : return mActiveNodes.Count();
616 : }
617 :
618 : MediaStreamGraph*
619 0 : AudioContext::Graph() const
620 : {
621 0 : return Destination()->Stream()->Graph();
622 : }
623 :
624 : MediaStream*
625 0 : AudioContext::DestinationStream() const
626 : {
627 0 : if (Destination()) {
628 0 : return Destination()->Stream();
629 : }
630 0 : return nullptr;
631 : }
632 :
633 : double
634 0 : AudioContext::CurrentTime() const
635 : {
636 0 : MediaStream* stream = Destination()->Stream();
637 0 : return nsRFPService::ReduceTimePrecisionAsSecs(
638 0 : stream->StreamTimeToSeconds(stream->GetCurrentTime()));
639 : }
640 :
641 0 : void AudioContext::DisconnectFromOwner()
642 : {
643 0 : mIsDisconnecting = true;
644 0 : Shutdown();
645 0 : DOMEventTargetHelper::DisconnectFromOwner();
646 0 : }
647 :
648 : void
649 0 : AudioContext::Shutdown()
650 : {
651 0 : mIsShutDown = true;
652 :
653 : // We don't want to touch promises if the global is going away soon.
654 0 : if (!mIsDisconnecting) {
655 0 : if (!mIsOffline) {
656 0 : IgnoredErrorResult dummy;
657 0 : RefPtr<Promise> ignored = Close(dummy);
658 : }
659 :
660 0 : for (auto p : mPromiseGripArray) {
661 0 : p->MaybeReject(NS_ERROR_DOM_INVALID_STATE_ERR);
662 : }
663 :
664 0 : mPromiseGripArray.Clear();
665 : }
666 :
667 : // Release references to active nodes.
668 : // Active AudioNodes don't unregister in destructors, at which point the
669 : // Node is already unregistered.
670 0 : mActiveNodes.Clear();
671 :
672 : // For offline contexts, we can destroy the MediaStreamGraph at this point.
673 0 : if (mIsOffline && mDestination) {
674 0 : mDestination->OfflineShutdown();
675 : }
676 0 : }
677 :
678 0 : StateChangeTask::StateChangeTask(AudioContext* aAudioContext,
679 : void* aPromise,
680 0 : AudioContextState aNewState)
681 : : Runnable("dom::StateChangeTask")
682 : , mAudioContext(aAudioContext)
683 : , mPromise(aPromise)
684 : , mAudioNodeStream(nullptr)
685 0 : , mNewState(aNewState)
686 : {
687 0 : MOZ_ASSERT(NS_IsMainThread(),
688 : "This constructor should be used from the main thread.");
689 0 : }
690 :
691 0 : StateChangeTask::StateChangeTask(AudioNodeStream* aStream,
692 : void* aPromise,
693 0 : AudioContextState aNewState)
694 : : Runnable("dom::StateChangeTask")
695 : , mAudioContext(nullptr)
696 : , mPromise(aPromise)
697 : , mAudioNodeStream(aStream)
698 0 : , mNewState(aNewState)
699 : {
700 0 : MOZ_ASSERT(!NS_IsMainThread(),
701 : "This constructor should be used from the graph thread.");
702 0 : }
703 :
704 : NS_IMETHODIMP
705 0 : StateChangeTask::Run()
706 : {
707 0 : MOZ_ASSERT(NS_IsMainThread());
708 :
709 0 : if (!mAudioContext && !mAudioNodeStream) {
710 0 : return NS_OK;
711 : }
712 0 : if (mAudioNodeStream) {
713 0 : AudioNode* node = mAudioNodeStream->Engine()->NodeMainThread();
714 0 : if (!node) {
715 0 : return NS_OK;
716 : }
717 0 : mAudioContext = node->Context();
718 0 : if (!mAudioContext) {
719 0 : return NS_OK;
720 : }
721 : }
722 :
723 0 : mAudioContext->OnStateChanged(mPromise, mNewState);
724 : // We have can't call Release() on the AudioContext on the MSG thread, so we
725 : // unref it here, on the main thread.
726 0 : mAudioContext = nullptr;
727 :
728 0 : return NS_OK;
729 : }
730 :
731 : /* This runnable allows to fire the "statechange" event */
732 0 : class OnStateChangeTask final : public Runnable
733 : {
734 : public:
735 0 : explicit OnStateChangeTask(AudioContext* aAudioContext)
736 0 : : Runnable("dom::OnStateChangeTask")
737 0 : , mAudioContext(aAudioContext)
738 0 : {}
739 :
740 : NS_IMETHODIMP
741 0 : Run() override
742 : {
743 0 : nsPIDOMWindowInner* parent = mAudioContext->GetParentObject();
744 0 : if (!parent) {
745 0 : return NS_ERROR_FAILURE;
746 : }
747 :
748 0 : nsIDocument* doc = parent->GetExtantDoc();
749 0 : if (!doc) {
750 0 : return NS_ERROR_FAILURE;
751 : }
752 :
753 0 : return nsContentUtils::DispatchTrustedEvent(doc,
754 0 : static_cast<DOMEventTargetHelper*>(mAudioContext),
755 0 : NS_LITERAL_STRING("statechange"),
756 0 : false, false);
757 : }
758 :
759 : private:
760 : RefPtr<AudioContext> mAudioContext;
761 : };
762 :
763 :
764 : void
765 0 : AudioContext::Dispatch(already_AddRefed<nsIRunnable>&& aRunnable)
766 : {
767 0 : MOZ_ASSERT(NS_IsMainThread());
768 : nsCOMPtr<nsIGlobalObject> parentObject =
769 0 : do_QueryInterface(GetParentObject());
770 : // It can happen that this runnable took a long time to reach the main thread,
771 : // and the global is not valid anymore.
772 0 : if (parentObject) {
773 0 : parentObject->AbstractMainThreadFor(TaskCategory::Other)
774 0 : ->Dispatch(std::move(aRunnable));
775 : } else {
776 0 : RefPtr<nsIRunnable> runnable(aRunnable);
777 0 : runnable = nullptr;
778 : }
779 0 : }
780 :
781 : void
782 0 : AudioContext::OnStateChanged(void* aPromise, AudioContextState aNewState)
783 : {
784 0 : MOZ_ASSERT(NS_IsMainThread());
785 :
786 : // This can happen if close() was called right after creating the
787 : // AudioContext, before the context has switched to "running".
788 0 : if (mAudioContextState == AudioContextState::Closed &&
789 0 : aNewState == AudioContextState::Running &&
790 : !aPromise) {
791 0 : return;
792 : }
793 :
794 : // This can happen if this is called in reaction to a
795 : // MediaStreamGraph shutdown, and a AudioContext was being
796 : // suspended at the same time, for example if a page was being
797 : // closed.
798 0 : if (mAudioContextState == AudioContextState::Closed &&
799 : aNewState == AudioContextState::Suspended) {
800 0 : return;
801 : }
802 :
803 : #ifndef WIN32 // Bug 1170547
804 : #ifndef XP_MACOSX
805 : #ifdef DEBUG
806 :
807 0 : if (!((mAudioContextState == AudioContextState::Suspended &&
808 0 : aNewState == AudioContextState::Running) ||
809 0 : (mAudioContextState == AudioContextState::Running &&
810 0 : aNewState == AudioContextState::Suspended) ||
811 0 : (mAudioContextState == AudioContextState::Running &&
812 0 : aNewState == AudioContextState::Closed) ||
813 0 : (mAudioContextState == AudioContextState::Suspended &&
814 : aNewState == AudioContextState::Closed) ||
815 0 : (mAudioContextState == aNewState))) {
816 0 : fprintf(stderr,
817 : "Invalid transition: mAudioContextState: %d -> aNewState %d\n",
818 0 : static_cast<int>(mAudioContextState), static_cast<int>(aNewState));
819 0 : MOZ_ASSERT(false);
820 : }
821 :
822 : #endif // DEBUG
823 : #endif // XP_MACOSX
824 : #endif // WIN32
825 :
826 0 : MOZ_ASSERT(
827 : mIsOffline || aPromise || aNewState == AudioContextState::Running,
828 : "We should have a promise here if this is a real-time AudioContext."
829 : "Or this is the first time we switch to \"running\".");
830 :
831 0 : if (aPromise) {
832 0 : Promise* promise = reinterpret_cast<Promise*>(aPromise);
833 : // It is possible for the promise to have been removed from
834 : // mPromiseGripArray if the cycle collector has severed our connections. DO
835 : // NOT dereference the promise pointer in that case since it may point to
836 : // already freed memory.
837 0 : if (mPromiseGripArray.Contains(promise)) {
838 0 : promise->MaybeResolveWithUndefined();
839 0 : DebugOnly<bool> rv = mPromiseGripArray.RemoveElement(promise);
840 0 : MOZ_ASSERT(rv, "Promise wasn't in the grip array?");
841 : }
842 : }
843 :
844 0 : if (mAudioContextState != aNewState) {
845 0 : RefPtr<OnStateChangeTask> task = new OnStateChangeTask(this);
846 0 : Dispatch(task.forget());
847 : }
848 :
849 0 : mAudioContextState = aNewState;
850 : }
851 :
852 : nsTArray<MediaStream*>
853 0 : AudioContext::GetAllStreams() const
854 : {
855 0 : nsTArray<MediaStream*> streams;
856 0 : for (auto iter = mAllNodes.ConstIter(); !iter.Done(); iter.Next()) {
857 0 : MediaStream* s = iter.Get()->GetKey()->GetStream();
858 0 : if (s) {
859 0 : streams.AppendElement(s);
860 : }
861 : }
862 0 : return streams;
863 : }
864 :
865 : already_AddRefed<Promise>
866 0 : AudioContext::Suspend(ErrorResult& aRv)
867 : {
868 0 : nsCOMPtr<nsIGlobalObject> parentObject = do_QueryInterface(GetParentObject());
869 0 : RefPtr<Promise> promise;
870 0 : promise = Promise::Create(parentObject, aRv);
871 0 : if (aRv.Failed()) {
872 0 : return nullptr;
873 : }
874 0 : if (mIsOffline) {
875 0 : promise->MaybeReject(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
876 0 : return promise.forget();
877 : }
878 :
879 0 : if (mAudioContextState == AudioContextState::Closed ||
880 0 : mCloseCalled) {
881 0 : promise->MaybeReject(NS_ERROR_DOM_INVALID_STATE_ERR);
882 0 : return promise.forget();
883 : }
884 :
885 0 : Destination()->Suspend();
886 :
887 0 : mPromiseGripArray.AppendElement(promise);
888 :
889 0 : nsTArray<MediaStream*> streams;
890 : // If mSuspendCalled is true then we already suspended all our streams,
891 : // so don't suspend them again (since suspend(); suspend(); resume(); should
892 : // cancel both suspends). But we still need to do ApplyAudioContextOperation
893 : // to ensure our new promise is resolved.
894 0 : if (!mSuspendCalled) {
895 0 : streams = GetAllStreams();
896 : }
897 0 : Graph()->ApplyAudioContextOperation(DestinationStream()->AsAudioNodeStream(),
898 : streams,
899 0 : AudioContextOperation::Suspend, promise);
900 :
901 0 : mSuspendCalled = true;
902 :
903 0 : return promise.forget();
904 : }
905 :
906 : already_AddRefed<Promise>
907 0 : AudioContext::Resume(ErrorResult& aRv)
908 : {
909 0 : nsCOMPtr<nsIGlobalObject> parentObject = do_QueryInterface(GetParentObject());
910 0 : RefPtr<Promise> promise;
911 0 : promise = Promise::Create(parentObject, aRv);
912 0 : if (aRv.Failed()) {
913 0 : return nullptr;
914 : }
915 :
916 0 : if (mIsOffline) {
917 0 : promise->MaybeReject(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
918 0 : return promise.forget();
919 : }
920 :
921 0 : if (mAudioContextState == AudioContextState::Closed ||
922 0 : mCloseCalled) {
923 0 : promise->MaybeReject(NS_ERROR_DOM_INVALID_STATE_ERR);
924 0 : return promise.forget();
925 : }
926 :
927 0 : Destination()->Resume();
928 :
929 0 : nsTArray<MediaStream*> streams;
930 : // If mSuspendCalled is false then we already resumed all our streams,
931 : // so don't resume them again (since suspend(); resume(); resume(); should
932 : // be OK). But we still need to do ApplyAudioContextOperation
933 : // to ensure our new promise is resolved.
934 0 : if (mSuspendCalled) {
935 0 : streams = GetAllStreams();
936 : }
937 0 : mPromiseGripArray.AppendElement(promise);
938 0 : Graph()->ApplyAudioContextOperation(DestinationStream()->AsAudioNodeStream(),
939 : streams,
940 0 : AudioContextOperation::Resume, promise);
941 :
942 0 : mSuspendCalled = false;
943 :
944 0 : return promise.forget();
945 : }
946 :
947 : already_AddRefed<Promise>
948 0 : AudioContext::Close(ErrorResult& aRv)
949 : {
950 0 : nsCOMPtr<nsIGlobalObject> parentObject = do_QueryInterface(GetParentObject());
951 0 : RefPtr<Promise> promise;
952 0 : promise = Promise::Create(parentObject, aRv);
953 0 : if (aRv.Failed()) {
954 0 : return nullptr;
955 : }
956 :
957 0 : if (mIsOffline) {
958 0 : promise->MaybeReject(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
959 0 : return promise.forget();
960 : }
961 :
962 0 : if (mAudioContextState == AudioContextState::Closed) {
963 0 : promise->MaybeResolve(NS_ERROR_DOM_INVALID_STATE_ERR);
964 0 : return promise.forget();
965 : }
966 :
967 0 : if (Destination()) {
968 0 : Destination()->DestroyAudioChannelAgent();
969 : }
970 :
971 0 : mPromiseGripArray.AppendElement(promise);
972 :
973 : // This can be called when freeing a document, and the streams are dead at
974 : // this point, so we need extra null-checks.
975 0 : MediaStream* ds = DestinationStream();
976 0 : if (ds) {
977 0 : nsTArray<MediaStream*> streams;
978 : // If mSuspendCalled or mCloseCalled are true then we already suspended
979 : // all our streams, so don't suspend them again. But we still need to do
980 : // ApplyAudioContextOperation to ensure our new promise is resolved.
981 0 : if (!mSuspendCalled && !mCloseCalled) {
982 0 : streams = GetAllStreams();
983 : }
984 0 : Graph()->ApplyAudioContextOperation(ds->AsAudioNodeStream(), streams,
985 0 : AudioContextOperation::Close, promise);
986 : }
987 0 : mCloseCalled = true;
988 :
989 0 : return promise.forget();
990 : }
991 :
992 : void
993 0 : AudioContext::RegisterNode(AudioNode* aNode)
994 : {
995 0 : MOZ_ASSERT(!mAllNodes.Contains(aNode));
996 0 : mAllNodes.PutEntry(aNode);
997 0 : }
998 :
999 : void
1000 0 : AudioContext::UnregisterNode(AudioNode* aNode)
1001 : {
1002 0 : MOZ_ASSERT(mAllNodes.Contains(aNode));
1003 0 : mAllNodes.RemoveEntry(aNode);
1004 0 : }
1005 :
1006 : JSObject*
1007 0 : AudioContext::GetGlobalJSObject() const
1008 : {
1009 0 : nsCOMPtr<nsIGlobalObject> parentObject = do_QueryInterface(GetParentObject());
1010 0 : if (!parentObject) {
1011 0 : return nullptr;
1012 : }
1013 :
1014 : // This can also return null.
1015 0 : return parentObject->GetGlobalJSObject();
1016 : }
1017 :
1018 : already_AddRefed<Promise>
1019 0 : AudioContext::StartRendering(ErrorResult& aRv)
1020 : {
1021 0 : nsCOMPtr<nsIGlobalObject> parentObject = do_QueryInterface(GetParentObject());
1022 :
1023 0 : MOZ_ASSERT(mIsOffline, "This should only be called on OfflineAudioContext");
1024 0 : if (mIsStarted) {
1025 0 : aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
1026 0 : return nullptr;
1027 : }
1028 :
1029 0 : mIsStarted = true;
1030 0 : RefPtr<Promise> promise = Promise::Create(parentObject, aRv);
1031 0 : if (aRv.Failed()) {
1032 0 : return nullptr;
1033 : }
1034 0 : mDestination->StartRendering(promise);
1035 :
1036 0 : OnStateChanged(nullptr, AudioContextState::Running);
1037 :
1038 0 : return promise.forget();
1039 : }
1040 :
1041 : unsigned long
1042 0 : AudioContext::Length()
1043 : {
1044 0 : MOZ_ASSERT(mIsOffline);
1045 0 : return mDestination->Length();
1046 : }
1047 :
1048 : void
1049 0 : AudioContext::Mute() const
1050 : {
1051 0 : MOZ_ASSERT(!mIsOffline);
1052 0 : if (mDestination) {
1053 0 : mDestination->Mute();
1054 : }
1055 0 : }
1056 :
1057 : void
1058 0 : AudioContext::Unmute() const
1059 : {
1060 0 : MOZ_ASSERT(!mIsOffline);
1061 0 : if (mDestination) {
1062 0 : mDestination->Unmute();
1063 : }
1064 0 : }
1065 :
1066 : size_t
1067 0 : AudioContext::SizeOfIncludingThis(mozilla::MallocSizeOf aMallocSizeOf) const
1068 : {
1069 : // AudioNodes are tracked separately because we do not want the AudioContext
1070 : // to track all of the AudioNodes it creates, so we wouldn't be able to
1071 : // traverse them from here.
1072 :
1073 0 : size_t amount = aMallocSizeOf(this);
1074 0 : if (mListener) {
1075 0 : amount += mListener->SizeOfIncludingThis(aMallocSizeOf);
1076 : }
1077 0 : amount += mDecodeJobs.ShallowSizeOfExcludingThis(aMallocSizeOf);
1078 0 : for (uint32_t i = 0; i < mDecodeJobs.Length(); ++i) {
1079 0 : amount += mDecodeJobs[i]->SizeOfIncludingThis(aMallocSizeOf);
1080 : }
1081 0 : amount += mActiveNodes.ShallowSizeOfExcludingThis(aMallocSizeOf);
1082 0 : amount += mPannerNodes.ShallowSizeOfExcludingThis(aMallocSizeOf);
1083 0 : return amount;
1084 : }
1085 :
1086 : NS_IMETHODIMP
1087 0 : AudioContext::CollectReports(nsIHandleReportCallback* aHandleReport,
1088 : nsISupports* aData, bool aAnonymize)
1089 : {
1090 : const nsLiteralCString
1091 0 : nodeDescription("Memory used by AudioNode DOM objects (Web Audio).");
1092 0 : for (auto iter = mAllNodes.ConstIter(); !iter.Done(); iter.Next()) {
1093 0 : AudioNode* node = iter.Get()->GetKey();
1094 0 : int64_t amount = node->SizeOfIncludingThis(MallocSizeOf);
1095 : nsPrintfCString domNodePath("explicit/webaudio/audio-node/%s/dom-nodes",
1096 0 : node->NodeType());
1097 0 : aHandleReport->Callback(EmptyCString(), domNodePath, KIND_HEAP, UNITS_BYTES,
1098 0 : amount, nodeDescription, aData);
1099 : }
1100 :
1101 0 : int64_t amount = SizeOfIncludingThis(MallocSizeOf);
1102 0 : MOZ_COLLECT_REPORT(
1103 : "explicit/webaudio/audiocontext", KIND_HEAP, UNITS_BYTES, amount,
1104 0 : "Memory used by AudioContext objects (Web Audio).");
1105 :
1106 0 : return NS_OK;
1107 : }
1108 :
1109 : BasicWaveFormCache*
1110 0 : AudioContext::GetBasicWaveFormCache()
1111 : {
1112 0 : MOZ_ASSERT(NS_IsMainThread());
1113 0 : if (!mBasicWaveFormCache) {
1114 0 : mBasicWaveFormCache = new BasicWaveFormCache(SampleRate());
1115 : }
1116 0 : return mBasicWaveFormCache;
1117 : }
1118 :
1119 0 : BasicWaveFormCache::BasicWaveFormCache(uint32_t aSampleRate)
1120 0 : : mSampleRate(aSampleRate)
1121 : {
1122 0 : MOZ_ASSERT(NS_IsMainThread());
1123 0 : }
1124 0 : BasicWaveFormCache::~BasicWaveFormCache()
1125 0 : { }
1126 :
1127 : WebCore::PeriodicWave*
1128 0 : BasicWaveFormCache::GetBasicWaveForm(OscillatorType aType)
1129 : {
1130 0 : MOZ_ASSERT(!NS_IsMainThread());
1131 0 : if (aType == OscillatorType::Sawtooth) {
1132 0 : if (!mSawtooth) {
1133 0 : mSawtooth = WebCore::PeriodicWave::createSawtooth(mSampleRate);
1134 : }
1135 0 : return mSawtooth;
1136 0 : } else if (aType == OscillatorType::Square) {
1137 0 : if (!mSquare) {
1138 0 : mSquare = WebCore::PeriodicWave::createSquare(mSampleRate);
1139 : }
1140 0 : return mSquare;
1141 0 : } else if (aType == OscillatorType::Triangle) {
1142 0 : if (!mTriangle) {
1143 0 : mTriangle = WebCore::PeriodicWave::createTriangle(mSampleRate);
1144 : }
1145 0 : return mTriangle;
1146 : } else {
1147 0 : MOZ_ASSERT(false, "Not reached");
1148 : return nullptr;
1149 : }
1150 : }
1151 :
1152 : } // namespace dom
1153 : } // namespace mozilla
|