Line data Source code
1 : /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2 : /* vim:set ts=2 sw=2 sts=2 et cindent: */
3 : /* This Source Code Form is subject to the terms of the Mozilla Public
4 : * License, v. 2.0. If a copy of the MPL was not distributed with this
5 : * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 :
7 : #include "AudioDestinationNode.h"
8 : #include "AlignmentUtils.h"
9 : #include "AudioContext.h"
10 : #include "mozilla/dom/AudioDestinationNodeBinding.h"
11 : #include "mozilla/dom/OfflineAudioCompletionEvent.h"
12 : #include "mozilla/dom/ScriptSettings.h"
13 : #include "mozilla/Services.h"
14 : #include "AudioChannelAgent.h"
15 : #include "AudioChannelService.h"
16 : #include "AudioNodeEngine.h"
17 : #include "AudioNodeStream.h"
18 : #include "MediaStreamGraph.h"
19 : #include "nsContentUtils.h"
20 : #include "nsIInterfaceRequestorUtils.h"
21 : #include "nsIDocShell.h"
22 : #include "nsIPermissionManager.h"
23 : #include "nsIScriptObjectPrincipal.h"
24 : #include "nsServiceManagerUtils.h"
25 : #include "mozilla/dom/Promise.h"
26 :
27 : namespace mozilla {
28 : namespace dom {
29 :
30 : static uint8_t gWebAudioOutputKey;
31 :
32 0 : class OfflineDestinationNodeEngine final : public AudioNodeEngine
33 : {
34 : public:
35 0 : OfflineDestinationNodeEngine(AudioDestinationNode* aNode,
36 : uint32_t aNumberOfChannels,
37 : uint32_t aLength,
38 : float aSampleRate)
39 0 : : AudioNodeEngine(aNode)
40 : , mWriteIndex(0)
41 : , mNumberOfChannels(aNumberOfChannels)
42 : , mLength(aLength)
43 : , mSampleRate(aSampleRate)
44 0 : , mBufferAllocated(false)
45 : {
46 0 : }
47 :
48 0 : void ProcessBlock(AudioNodeStream* aStream,
49 : GraphTime aFrom,
50 : const AudioBlock& aInput,
51 : AudioBlock* aOutput,
52 : bool* aFinished) override
53 : {
54 : // Do this just for the sake of political correctness; this output
55 : // will not go anywhere.
56 0 : *aOutput = aInput;
57 :
58 : // The output buffer is allocated lazily, on the rendering thread, when
59 : // non-null input is received.
60 0 : if (!mBufferAllocated && !aInput.IsNull()) {
61 : // These allocations might fail if content provides a huge number of
62 : // channels or size, but it's OK since we'll deal with the failure
63 : // gracefully.
64 : mBuffer = ThreadSharedFloatArrayBufferList::
65 0 : Create(mNumberOfChannels, mLength, fallible);
66 0 : if (mBuffer && mWriteIndex) {
67 : // Zero leading for any null chunks that were skipped.
68 0 : for (uint32_t i = 0; i < mNumberOfChannels; ++i) {
69 0 : float* channelData = mBuffer->GetDataForWrite(i);
70 0 : PodZero(channelData, mWriteIndex);
71 : }
72 : }
73 :
74 0 : mBufferAllocated = true;
75 : }
76 :
77 : // Skip copying if there is no buffer.
78 0 : uint32_t outputChannelCount = mBuffer ? mNumberOfChannels : 0;
79 :
80 : // Record our input buffer
81 0 : MOZ_ASSERT(mWriteIndex < mLength, "How did this happen?");
82 0 : const uint32_t duration = std::min(WEBAUDIO_BLOCK_SIZE, mLength - mWriteIndex);
83 0 : const uint32_t inputChannelCount = aInput.ChannelCount();
84 0 : for (uint32_t i = 0; i < outputChannelCount; ++i) {
85 0 : float* outputData = mBuffer->GetDataForWrite(i) + mWriteIndex;
86 0 : if (aInput.IsNull() || i >= inputChannelCount) {
87 0 : PodZero(outputData, duration);
88 : } else {
89 0 : const float* inputBuffer = static_cast<const float*>(aInput.mChannelData[i]);
90 0 : if (duration == WEBAUDIO_BLOCK_SIZE && IS_ALIGNED16(inputBuffer)) {
91 : // Use the optimized version of the copy with scale operation
92 0 : AudioBlockCopyChannelWithScale(inputBuffer, aInput.mVolume,
93 0 : outputData);
94 : } else {
95 0 : if (aInput.mVolume == 1.0f) {
96 0 : PodCopy(outputData, inputBuffer, duration);
97 : } else {
98 0 : for (uint32_t j = 0; j < duration; ++j) {
99 0 : outputData[j] = aInput.mVolume * inputBuffer[j];
100 : }
101 : }
102 : }
103 : }
104 : }
105 0 : mWriteIndex += duration;
106 :
107 0 : if (mWriteIndex >= mLength) {
108 0 : NS_ASSERTION(mWriteIndex == mLength, "Overshot length");
109 : // Go to finished state. When the graph's current time eventually reaches
110 : // the end of the stream, then the main thread will be notified and we'll
111 : // shut down the AudioContext.
112 0 : *aFinished = true;
113 : }
114 0 : }
115 :
116 0 : bool IsActive() const override
117 : {
118 : // Keep processing to track stream time, which is used for all timelines
119 : // associated with the same AudioContext.
120 0 : return true;
121 : }
122 :
123 :
124 0 : class OnCompleteTask final : public Runnable
125 : {
126 : public:
127 0 : OnCompleteTask(AudioContext* aAudioContext, AudioBuffer* aRenderedBuffer)
128 0 : : Runnable("dom::OfflineDestinationNodeEngine::OnCompleteTask")
129 : , mAudioContext(aAudioContext)
130 0 : , mRenderedBuffer(aRenderedBuffer)
131 0 : {}
132 :
133 0 : NS_IMETHOD Run() override
134 : {
135 0 : OfflineAudioCompletionEventInit param;
136 0 : param.mRenderedBuffer = mRenderedBuffer;
137 :
138 : RefPtr<OfflineAudioCompletionEvent> event =
139 0 : OfflineAudioCompletionEvent::Constructor(mAudioContext,
140 0 : NS_LITERAL_STRING("complete"),
141 0 : param);
142 0 : mAudioContext->DispatchTrustedEvent(event);
143 :
144 0 : return NS_OK;
145 : }
146 : private:
147 : RefPtr<AudioContext> mAudioContext;
148 : RefPtr<AudioBuffer> mRenderedBuffer;
149 : };
150 :
151 0 : void FireOfflineCompletionEvent(AudioDestinationNode* aNode)
152 : {
153 0 : AudioContext* context = aNode->Context();
154 0 : context->Shutdown();
155 : // Shutdown drops self reference, but the context is still referenced by aNode,
156 : // which is strongly referenced by the runnable that called
157 : // AudioDestinationNode::FireOfflineCompletionEvent.
158 :
159 : // Create the input buffer
160 0 : ErrorResult rv;
161 : RefPtr<AudioBuffer> renderedBuffer =
162 0 : AudioBuffer::Create(context->GetOwner(), mNumberOfChannels, mLength,
163 0 : mSampleRate, mBuffer.forget(), rv);
164 0 : if (rv.Failed()) {
165 0 : rv.SuppressException();
166 0 : return;
167 : }
168 :
169 0 : aNode->ResolvePromise(renderedBuffer);
170 :
171 0 : context->Dispatch(do_AddRef(new OnCompleteTask(context, renderedBuffer)));
172 :
173 0 : context->OnStateChanged(nullptr, AudioContextState::Closed);
174 : }
175 :
176 0 : size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const override
177 : {
178 0 : size_t amount = AudioNodeEngine::SizeOfExcludingThis(aMallocSizeOf);
179 0 : if (mBuffer) {
180 0 : amount += mBuffer->SizeOfIncludingThis(aMallocSizeOf);
181 : }
182 0 : return amount;
183 : }
184 :
185 0 : size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const override
186 : {
187 0 : return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
188 : }
189 :
190 : private:
191 : // The input to the destination node is recorded in mBuffer.
192 : // When this buffer fills up with mLength frames, the buffered input is sent
193 : // to the main thread in order to dispatch OfflineAudioCompletionEvent.
194 : RefPtr<ThreadSharedFloatArrayBufferList> mBuffer;
195 : // An index representing the next offset in mBuffer to be written to.
196 : uint32_t mWriteIndex;
197 : uint32_t mNumberOfChannels;
198 : // How many frames the OfflineAudioContext intends to produce.
199 : uint32_t mLength;
200 : float mSampleRate;
201 : bool mBufferAllocated;
202 : };
203 :
204 0 : class InputMutedRunnable final : public Runnable
205 : {
206 : public:
207 0 : InputMutedRunnable(AudioNodeStream* aStream, bool aInputMuted)
208 0 : : Runnable("dom::InputMutedRunnable")
209 : , mStream(aStream)
210 0 : , mInputMuted(aInputMuted)
211 : {
212 0 : }
213 :
214 0 : NS_IMETHOD Run() override
215 : {
216 0 : MOZ_ASSERT(NS_IsMainThread());
217 0 : RefPtr<AudioNode> node = mStream->Engine()->NodeMainThread();
218 :
219 0 : if (node) {
220 : RefPtr<AudioDestinationNode> destinationNode =
221 0 : static_cast<AudioDestinationNode*>(node.get());
222 0 : destinationNode->InputMuted(mInputMuted);
223 : }
224 0 : return NS_OK;
225 : }
226 :
227 : private:
228 : RefPtr<AudioNodeStream> mStream;
229 : bool mInputMuted;
230 : };
231 :
232 0 : class DestinationNodeEngine final : public AudioNodeEngine
233 : {
234 : public:
235 0 : explicit DestinationNodeEngine(AudioDestinationNode* aNode)
236 0 : : AudioNodeEngine(aNode)
237 : , mVolume(1.0f)
238 : , mLastInputMuted(true)
239 0 : , mSuspended(false)
240 : {
241 0 : MOZ_ASSERT(aNode);
242 0 : }
243 :
244 0 : void ProcessBlock(AudioNodeStream* aStream,
245 : GraphTime aFrom,
246 : const AudioBlock& aInput,
247 : AudioBlock* aOutput,
248 : bool* aFinished) override
249 : {
250 0 : *aOutput = aInput;
251 0 : aOutput->mVolume *= mVolume;
252 :
253 0 : if (mSuspended) {
254 0 : return;
255 : }
256 :
257 0 : bool newInputMuted = aInput.IsNull() || aInput.IsMuted();
258 0 : if (newInputMuted != mLastInputMuted) {
259 0 : mLastInputMuted = newInputMuted;
260 :
261 : RefPtr<InputMutedRunnable> runnable =
262 0 : new InputMutedRunnable(aStream, newInputMuted);
263 0 : aStream->Graph()->DispatchToMainThreadAfterStreamStateUpdate(
264 0 : runnable.forget());
265 : }
266 : }
267 :
268 0 : bool IsActive() const override
269 : {
270 : // Keep processing to track stream time, which is used for all timelines
271 : // associated with the same AudioContext. If there are no other engines
272 : // for the AudioContext, then this could return false to suspend the
273 : // stream, but the stream is blocked anyway through
274 : // AudioDestinationNode::SetIsOnlyNodeForContext().
275 0 : return true;
276 : }
277 :
278 0 : void SetDoubleParameter(uint32_t aIndex, double aParam) override
279 : {
280 0 : if (aIndex == VOLUME) {
281 0 : mVolume = aParam;
282 : }
283 0 : }
284 :
285 0 : void SetInt32Parameter(uint32_t aIndex, int32_t aParam) override
286 : {
287 0 : if (aIndex == SUSPENDED) {
288 0 : mSuspended = !!aParam;
289 0 : if (mSuspended) {
290 0 : mLastInputMuted = true;
291 : }
292 : }
293 0 : }
294 :
295 : enum Parameters {
296 : VOLUME,
297 : SUSPENDED,
298 : };
299 :
300 0 : size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const override
301 : {
302 0 : return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
303 : }
304 :
305 : private:
306 : float mVolume;
307 : bool mLastInputMuted;
308 : bool mSuspended;
309 : };
310 :
311 0 : NS_IMPL_CYCLE_COLLECTION_INHERITED(AudioDestinationNode, AudioNode,
312 : mAudioChannelAgent,
313 : mOfflineRenderingPromise)
314 :
315 0 : NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION_INHERITED(AudioDestinationNode)
316 0 : NS_INTERFACE_MAP_ENTRY(nsIAudioChannelAgentCallback)
317 0 : NS_INTERFACE_MAP_END_INHERITING(AudioNode)
318 :
319 0 : NS_IMPL_ADDREF_INHERITED(AudioDestinationNode, AudioNode)
320 0 : NS_IMPL_RELEASE_INHERITED(AudioDestinationNode, AudioNode)
321 :
322 0 : AudioDestinationNode::AudioDestinationNode(AudioContext* aContext,
323 : bool aIsOffline,
324 : AudioChannel aChannel,
325 : uint32_t aNumberOfChannels,
326 0 : uint32_t aLength, float aSampleRate)
327 : : AudioNode(aContext, aIsOffline ? aNumberOfChannels : 2,
328 : ChannelCountMode::Explicit, ChannelInterpretation::Speakers)
329 : , mFramesToProduce(aLength)
330 : , mAudioChannel(AudioChannel::Normal)
331 : , mIsOffline(aIsOffline)
332 : , mAudioChannelSuspended(false)
333 : , mCaptured(false)
334 0 : , mAudible(AudioChannelService::AudibleState::eAudible)
335 : {
336 0 : nsPIDOMWindowInner* window = aContext->GetParentObject();
337 : MediaStreamGraph* graph =
338 : aIsOffline
339 0 : ? MediaStreamGraph::CreateNonRealtimeInstance(aSampleRate, window)
340 : : MediaStreamGraph::GetInstance(
341 0 : MediaStreamGraph::AUDIO_THREAD_DRIVER, aChannel, window);
342 0 : AudioNodeEngine* engine = aIsOffline ?
343 : new OfflineDestinationNodeEngine(this, aNumberOfChannels,
344 0 : aLength, aSampleRate) :
345 0 : static_cast<AudioNodeEngine*>(new DestinationNodeEngine(this));
346 :
347 : AudioNodeStream::Flags flags =
348 : AudioNodeStream::NEED_MAIN_THREAD_CURRENT_TIME |
349 : AudioNodeStream::NEED_MAIN_THREAD_FINISHED |
350 0 : AudioNodeStream::EXTERNAL_OUTPUT;
351 0 : mStream = AudioNodeStream::Create(aContext, engine, flags, graph);
352 0 : mStream->AddMainThreadListener(this);
353 0 : mStream->AddAudioOutput(&gWebAudioOutputKey);
354 :
355 0 : if (!aIsOffline) {
356 0 : graph->NotifyWhenGraphStarted(mStream);
357 : }
358 :
359 0 : if (aChannel != AudioChannel::Normal) {
360 0 : ErrorResult rv;
361 0 : SetMozAudioChannelType(aChannel, rv);
362 : }
363 0 : }
364 :
365 0 : AudioDestinationNode::~AudioDestinationNode()
366 : {
367 0 : }
368 :
369 : size_t
370 0 : AudioDestinationNode::SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
371 : {
372 0 : size_t amount = AudioNode::SizeOfExcludingThis(aMallocSizeOf);
373 : // Might be useful in the future:
374 : // - mAudioChannelAgent
375 0 : return amount;
376 : }
377 :
378 : size_t
379 0 : AudioDestinationNode::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const
380 : {
381 0 : return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
382 : }
383 :
384 : void
385 0 : AudioDestinationNode::DestroyAudioChannelAgent()
386 : {
387 0 : if (mAudioChannelAgent && !Context()->IsOffline()) {
388 0 : mAudioChannelAgent->NotifyStoppedPlaying();
389 0 : mAudioChannelAgent = nullptr;
390 : // Reset the state, and it would always be regard as audible.
391 0 : mAudible = AudioChannelService::AudibleState::eAudible;
392 : }
393 0 : }
394 :
395 : void
396 0 : AudioDestinationNode::DestroyMediaStream()
397 : {
398 0 : DestroyAudioChannelAgent();
399 :
400 0 : if (!mStream)
401 0 : return;
402 :
403 0 : mStream->RemoveMainThreadListener(this);
404 0 : MediaStreamGraph* graph = mStream->Graph();
405 0 : if (graph->IsNonRealtime()) {
406 0 : MediaStreamGraph::DestroyNonRealtimeInstance(graph);
407 : }
408 0 : AudioNode::DestroyMediaStream();
409 : }
410 :
411 : void
412 0 : AudioDestinationNode::NotifyMainThreadStreamFinished()
413 : {
414 0 : MOZ_ASSERT(mStream->IsFinished());
415 :
416 0 : if (mIsOffline) {
417 0 : NS_DispatchToCurrentThread(
418 0 : NewRunnableMethod("dom::AudioDestinationNode::FireOfflineCompletionEvent",
419 : this,
420 0 : &AudioDestinationNode::FireOfflineCompletionEvent));
421 : }
422 0 : }
423 :
424 : void
425 0 : AudioDestinationNode::FireOfflineCompletionEvent()
426 : {
427 : OfflineDestinationNodeEngine* engine =
428 0 : static_cast<OfflineDestinationNodeEngine*>(Stream()->Engine());
429 0 : engine->FireOfflineCompletionEvent(this);
430 0 : }
431 :
432 : void
433 0 : AudioDestinationNode::ResolvePromise(AudioBuffer* aRenderedBuffer)
434 : {
435 0 : MOZ_ASSERT(NS_IsMainThread());
436 0 : MOZ_ASSERT(mIsOffline);
437 0 : mOfflineRenderingPromise->MaybeResolve(aRenderedBuffer);
438 0 : }
439 :
440 : uint32_t
441 0 : AudioDestinationNode::MaxChannelCount() const
442 : {
443 0 : return Context()->MaxChannelCount();
444 : }
445 :
446 : void
447 0 : AudioDestinationNode::SetChannelCount(uint32_t aChannelCount, ErrorResult& aRv)
448 : {
449 0 : if (aChannelCount > MaxChannelCount()) {
450 0 : aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR);
451 0 : return;
452 : }
453 :
454 0 : AudioNode::SetChannelCount(aChannelCount, aRv);
455 : }
456 :
457 : void
458 0 : AudioDestinationNode::Mute()
459 : {
460 0 : MOZ_ASSERT(Context() && !Context()->IsOffline());
461 0 : SendDoubleParameterToStream(DestinationNodeEngine::VOLUME, 0.0f);
462 0 : }
463 :
464 : void
465 0 : AudioDestinationNode::Unmute()
466 : {
467 0 : MOZ_ASSERT(Context() && !Context()->IsOffline());
468 0 : SendDoubleParameterToStream(DestinationNodeEngine::VOLUME, 1.0f);
469 0 : }
470 :
471 : void
472 0 : AudioDestinationNode::Suspend()
473 : {
474 0 : DestroyAudioChannelAgent();
475 0 : SendInt32ParameterToStream(DestinationNodeEngine::SUSPENDED, 1);
476 0 : }
477 :
478 : void
479 0 : AudioDestinationNode::Resume()
480 : {
481 0 : CreateAudioChannelAgent();
482 0 : SendInt32ParameterToStream(DestinationNodeEngine::SUSPENDED, 0);
483 0 : }
484 :
485 : void
486 0 : AudioDestinationNode::OfflineShutdown()
487 : {
488 0 : MOZ_ASSERT(Context() && Context()->IsOffline(),
489 : "Should only be called on a valid OfflineAudioContext");
490 :
491 0 : MediaStreamGraph::DestroyNonRealtimeInstance(mStream->Graph());
492 0 : mOfflineRenderingRef.Drop(this);
493 0 : }
494 :
495 : JSObject*
496 0 : AudioDestinationNode::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto)
497 : {
498 0 : return AudioDestinationNodeBinding::Wrap(aCx, this, aGivenProto);
499 : }
500 :
501 : void
502 0 : AudioDestinationNode::StartRendering(Promise* aPromise)
503 : {
504 0 : mOfflineRenderingPromise = aPromise;
505 0 : mOfflineRenderingRef.Take(this);
506 0 : mStream->Graph()->StartNonRealtimeProcessing(mFramesToProduce);
507 0 : }
508 :
509 : NS_IMETHODIMP
510 0 : AudioDestinationNode::WindowVolumeChanged(float aVolume, bool aMuted)
511 : {
512 0 : if (!mStream) {
513 0 : return NS_OK;
514 : }
515 :
516 0 : MOZ_LOG(AudioChannelService::GetAudioChannelLog(), LogLevel::Debug,
517 : ("AudioDestinationNode, WindowVolumeChanged, "
518 : "this = %p, aVolume = %f, aMuted = %s\n",
519 : this, aVolume, aMuted ? "true" : "false"));
520 :
521 0 : float volume = aMuted ? 0.0 : aVolume;
522 0 : mStream->SetAudioOutputVolume(&gWebAudioOutputKey, volume);
523 :
524 0 : AudioChannelService::AudibleState audible = volume > 0.0 ?
525 : AudioChannelService::AudibleState::eAudible :
526 0 : AudioChannelService::AudibleState::eNotAudible;
527 0 : if (mAudible != audible) {
528 0 : mAudible = audible;
529 0 : mAudioChannelAgent->NotifyStartedAudible(mAudible,
530 0 : AudioChannelService::AudibleChangedReasons::eVolumeChanged);
531 : }
532 0 : return NS_OK;
533 : }
534 :
535 : NS_IMETHODIMP
536 0 : AudioDestinationNode::WindowSuspendChanged(nsSuspendedTypes aSuspend)
537 : {
538 0 : if (!mStream) {
539 0 : return NS_OK;
540 : }
541 :
542 0 : bool suspended = (aSuspend != nsISuspendedTypes::NONE_SUSPENDED);
543 0 : if (mAudioChannelSuspended == suspended) {
544 0 : return NS_OK;
545 : }
546 :
547 0 : MOZ_LOG(AudioChannelService::GetAudioChannelLog(), LogLevel::Debug,
548 : ("AudioDestinationNode, WindowSuspendChanged, "
549 : "this = %p, aSuspend = %s\n", this, SuspendTypeToStr(aSuspend)));
550 :
551 0 : mAudioChannelSuspended = suspended;
552 :
553 0 : DisabledTrackMode disabledMode = suspended ? DisabledTrackMode::SILENCE_BLACK
554 0 : : DisabledTrackMode::ENABLED;
555 0 : mStream->SetTrackEnabled(AudioNodeStream::AUDIO_TRACK, disabledMode);
556 :
557 : AudioChannelService::AudibleState audible =
558 0 : aSuspend == nsISuspendedTypes::NONE_SUSPENDED ?
559 : AudioChannelService::AudibleState::eAudible :
560 0 : AudioChannelService::AudibleState::eNotAudible;
561 0 : if (mAudible != audible) {
562 0 : mAudible = audible;
563 0 : mAudioChannelAgent->NotifyStartedAudible(audible,
564 0 : AudioChannelService::AudibleChangedReasons::ePauseStateChanged);
565 : }
566 0 : return NS_OK;
567 : }
568 :
569 : NS_IMETHODIMP
570 0 : AudioDestinationNode::WindowAudioCaptureChanged(bool aCapture)
571 : {
572 0 : MOZ_ASSERT(mAudioChannelAgent);
573 :
574 0 : if (!mStream || Context()->IsOffline()) {
575 0 : return NS_OK;
576 : }
577 :
578 0 : nsCOMPtr<nsPIDOMWindowInner> ownerWindow = GetOwner();
579 0 : if (!ownerWindow) {
580 0 : return NS_OK;
581 : }
582 :
583 0 : if (aCapture != mCaptured) {
584 0 : if (aCapture) {
585 0 : nsCOMPtr<nsPIDOMWindowInner> window = Context()->GetParentObject();
586 0 : uint64_t id = window->WindowID();
587 : mCaptureStreamPort =
588 0 : mStream->Graph()->ConnectToCaptureStream(id, mStream);
589 : } else {
590 0 : mCaptureStreamPort->Destroy();
591 : }
592 0 : mCaptured = aCapture;
593 : }
594 :
595 0 : return NS_OK;
596 : }
597 :
598 : AudioChannel
599 0 : AudioDestinationNode::MozAudioChannelType() const
600 : {
601 0 : return mAudioChannel;
602 : }
603 :
604 : void
605 0 : AudioDestinationNode::SetMozAudioChannelType(AudioChannel aValue, ErrorResult& aRv)
606 : {
607 0 : if (Context()->IsOffline()) {
608 0 : aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
609 0 : return;
610 : }
611 :
612 0 : if (aValue != mAudioChannel &&
613 0 : CheckAudioChannelPermissions(aValue)) {
614 0 : mAudioChannel = aValue;
615 :
616 0 : if (mStream) {
617 0 : mStream->SetAudioChannelType(mAudioChannel);
618 : }
619 :
620 0 : if (mAudioChannelAgent) {
621 0 : CreateAudioChannelAgent();
622 : }
623 : }
624 : }
625 :
626 : bool
627 0 : AudioDestinationNode::CheckAudioChannelPermissions(AudioChannel aValue)
628 : {
629 : // Only normal channel doesn't need permission.
630 0 : if (aValue == AudioChannel::Normal) {
631 0 : return true;
632 : }
633 :
634 : // Maybe this audio channel is equal to the default one.
635 0 : if (aValue == AudioChannelService::GetDefaultAudioChannel()) {
636 0 : return true;
637 : }
638 :
639 : nsCOMPtr<nsIPermissionManager> permissionManager =
640 0 : services::GetPermissionManager();
641 0 : if (!permissionManager) {
642 0 : return false;
643 : }
644 :
645 0 : nsCOMPtr<nsIScriptObjectPrincipal> sop = do_QueryInterface(GetOwner());
646 0 : NS_ASSERTION(sop, "Window didn't QI to nsIScriptObjectPrincipal!");
647 0 : nsCOMPtr<nsIPrincipal> principal = sop->GetPrincipal();
648 :
649 0 : uint32_t perm = nsIPermissionManager::UNKNOWN_ACTION;
650 :
651 0 : nsCString channel;
652 0 : channel.AssignASCII(AudioChannelValues::strings[uint32_t(aValue)].value,
653 0 : AudioChannelValues::strings[uint32_t(aValue)].length);
654 0 : permissionManager->TestExactPermissionFromPrincipal(principal,
655 0 : nsCString(NS_LITERAL_CSTRING("audio-channel-") + channel).get(),
656 0 : &perm);
657 :
658 0 : return perm == nsIPermissionManager::ALLOW_ACTION;
659 : }
660 :
661 : nsresult
662 0 : AudioDestinationNode::CreateAudioChannelAgent()
663 : {
664 0 : if (mIsOffline) {
665 0 : return NS_OK;
666 : }
667 :
668 0 : nsresult rv = NS_OK;
669 0 : if (mAudioChannelAgent) {
670 0 : rv = mAudioChannelAgent->NotifyStoppedPlaying();
671 0 : if (NS_WARN_IF(NS_FAILED(rv))) {
672 0 : return rv;
673 : }
674 : }
675 :
676 0 : mAudioChannelAgent = new AudioChannelAgent();
677 0 : rv = mAudioChannelAgent->InitWithWeakCallback(GetOwner(),
678 0 : static_cast<int32_t>(mAudioChannel),
679 0 : this);
680 0 : if (NS_WARN_IF(NS_FAILED(rv))) {
681 0 : return rv;
682 : }
683 :
684 0 : return NS_OK;
685 : }
686 :
687 : void
688 0 : AudioDestinationNode::InputMuted(bool aMuted)
689 : {
690 0 : MOZ_ASSERT(Context() && !Context()->IsOffline());
691 :
692 0 : if (!mAudioChannelAgent) {
693 0 : if (aMuted) {
694 0 : return;
695 : }
696 0 : CreateAudioChannelAgent();
697 : }
698 :
699 0 : if (aMuted) {
700 0 : mAudioChannelAgent->NotifyStoppedPlaying();
701 : // Reset the state, and it would always be regard as audible.
702 0 : mAudible = AudioChannelService::AudibleState::eAudible;
703 0 : return;
704 : }
705 :
706 0 : AudioPlaybackConfig config;
707 0 : nsresult rv = mAudioChannelAgent->NotifyStartedPlaying(&config,
708 0 : mAudible);
709 0 : if (NS_WARN_IF(NS_FAILED(rv))) {
710 0 : return;
711 : }
712 :
713 0 : WindowVolumeChanged(config.mVolume, config.mMuted);
714 0 : WindowSuspendChanged(config.mSuspend);
715 : }
716 :
717 : } // namespace dom
718 : } // namespace mozilla
|