Line data Source code
1 : /* This Source Code Form is subject to the terms of the Mozilla Public
2 : * License, v. 2.0. If a copy of the MPL was not distributed with this file,
3 : * You can obtain one at http://mozilla.org/MPL/2.0/. */
4 :
5 : #include "MediaEngineDefault.h"
6 :
7 : #include "nsCOMPtr.h"
8 : #include "mozilla/dom/File.h"
9 : #include "mozilla/UniquePtr.h"
10 : #include "nsILocalFile.h"
11 : #include "Layers.h"
12 : #include "ImageContainer.h"
13 : #include "ImageTypes.h"
14 : #include "nsContentUtils.h"
15 : #include "MediaStreamGraph.h"
16 :
17 : #include "nsIFilePicker.h"
18 : #include "nsIPrefService.h"
19 : #include "nsIPrefBranch.h"
20 :
21 : #ifdef MOZ_WIDGET_ANDROID
22 : #include "nsISupportsUtils.h"
23 : #endif
24 :
25 : #ifdef MOZ_WEBRTC
26 : #include "YuvStamper.h"
27 : #endif
28 :
29 : #define AUDIO_RATE mozilla::MediaEngine::DEFAULT_SAMPLE_RATE
30 : #define DEFAULT_AUDIO_TIMER_MS 10
31 : namespace mozilla {
32 :
33 : using namespace mozilla::gfx;
34 :
35 0 : NS_IMPL_ISUPPORTS(MediaEngineDefaultVideoSource, nsITimerCallback)
36 : /**
37 : * Default video source.
38 : */
39 :
40 0 : MediaEngineDefaultVideoSource::MediaEngineDefaultVideoSource()
41 : #ifdef MOZ_WEBRTC
42 : : MediaEngineCameraVideoSource("FakeVideo.Monitor")
43 : #else
44 : : MediaEngineVideoSource()
45 : #endif
46 : , mTimer(nullptr)
47 : , mMonitor("Fake video")
48 0 : , mCb(16), mCr(16)
49 : {
50 : mImageContainer =
51 0 : layers::LayerManager::CreateImageContainer(layers::ImageContainer::ASYNCHRONOUS);
52 0 : }
53 :
54 0 : MediaEngineDefaultVideoSource::~MediaEngineDefaultVideoSource()
55 0 : {}
56 :
57 : void
58 0 : MediaEngineDefaultVideoSource::GetName(nsAString& aName) const
59 : {
60 0 : aName.AssignLiteral(u"Default Video Device");
61 0 : return;
62 : }
63 :
64 : void
65 0 : MediaEngineDefaultVideoSource::GetUUID(nsACString& aUUID) const
66 : {
67 0 : aUUID.AssignLiteral("1041FCBD-3F12-4F7B-9E9B-1EC556DD5676");
68 0 : return;
69 : }
70 :
71 : uint32_t
72 0 : MediaEngineDefaultVideoSource::GetBestFitnessDistance(
73 : const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
74 : const nsString& aDeviceId) const
75 : {
76 0 : uint32_t distance = 0;
77 : #ifdef MOZ_WEBRTC
78 0 : for (const auto* cs : aConstraintSets) {
79 0 : distance = GetMinimumFitnessDistance(*cs, aDeviceId);
80 0 : break; // distance is read from first entry only
81 : }
82 : #endif
83 0 : return distance;
84 : }
85 :
86 : nsresult
87 0 : MediaEngineDefaultVideoSource::Allocate(const dom::MediaTrackConstraints &aConstraints,
88 : const MediaEnginePrefs &aPrefs,
89 : const nsString& aDeviceId,
90 : const mozilla::ipc::PrincipalInfo& aPrincipalInfo,
91 : AllocationHandle** aOutHandle,
92 : const char** aOutBadConstraint)
93 : {
94 0 : if (mState != kReleased) {
95 0 : return NS_ERROR_FAILURE;
96 : }
97 :
98 0 : FlattenedConstraints c(aConstraints);
99 :
100 : // Mock failure for automated tests.
101 0 : if (c.mDeviceId.mIdeal.find(NS_LITERAL_STRING("bad device")) !=
102 0 : c.mDeviceId.mIdeal.end()) {
103 0 : return NS_ERROR_FAILURE;
104 : }
105 :
106 :
107 : // emulator debug is very, very slow; reduce load on it with smaller/slower fake video
108 0 : mOpts = aPrefs;
109 0 : mOpts.mWidth = c.mWidth.Get(aPrefs.mWidth ? aPrefs.mWidth :
110 : #ifdef DEBUG
111 : MediaEngine::DEFAULT_43_VIDEO_WIDTH/2
112 : #else
113 : MediaEngine::DEFAULT_43_VIDEO_WIDTH
114 : #endif
115 : );
116 0 : mOpts.mHeight = c.mHeight.Get(aPrefs.mHeight ? aPrefs.mHeight :
117 : #ifdef DEBUG
118 : MediaEngine::DEFAULT_43_VIDEO_HEIGHT/2
119 : #else
120 : MediaEngine::DEFAULT_43_VIDEO_HEIGHT
121 : #endif
122 : );
123 0 : mState = kAllocated;
124 0 : *aOutHandle = nullptr;
125 0 : return NS_OK;
126 : }
127 :
128 : nsresult
129 0 : MediaEngineDefaultVideoSource::Deallocate(AllocationHandle* aHandle)
130 : {
131 0 : MOZ_ASSERT(!aHandle);
132 0 : if (mState != kStopped && mState != kAllocated) {
133 0 : return NS_ERROR_FAILURE;
134 : }
135 0 : mState = kReleased;
136 0 : mImage = nullptr;
137 0 : return NS_OK;
138 : }
139 :
140 0 : static void AllocateSolidColorFrame(layers::PlanarYCbCrData& aData,
141 : int aWidth, int aHeight,
142 : int aY, int aCb, int aCr)
143 : {
144 0 : MOZ_ASSERT(!(aWidth&1));
145 0 : MOZ_ASSERT(!(aHeight&1));
146 : // Allocate a single frame with a solid color
147 0 : int yLen = aWidth*aHeight;
148 0 : int cbLen = yLen>>2;
149 0 : int crLen = cbLen;
150 0 : uint8_t* frame = (uint8_t*) malloc(yLen+cbLen+crLen);
151 0 : memset(frame, aY, yLen);
152 0 : memset(frame+yLen, aCb, cbLen);
153 0 : memset(frame+yLen+cbLen, aCr, crLen);
154 :
155 0 : aData.mYChannel = frame;
156 0 : aData.mYSize = IntSize(aWidth, aHeight);
157 0 : aData.mYStride = aWidth;
158 0 : aData.mCbCrStride = aWidth>>1;
159 0 : aData.mCbChannel = frame + yLen;
160 0 : aData.mCrChannel = aData.mCbChannel + cbLen;
161 0 : aData.mCbCrSize = IntSize(aWidth>>1, aHeight>>1);
162 0 : aData.mPicX = 0;
163 0 : aData.mPicY = 0;
164 0 : aData.mPicSize = IntSize(aWidth, aHeight);
165 0 : aData.mStereoMode = StereoMode::MONO;
166 0 : }
167 :
168 0 : static void ReleaseFrame(layers::PlanarYCbCrData& aData)
169 : {
170 0 : free(aData.mYChannel);
171 0 : }
172 :
173 : nsresult
174 0 : MediaEngineDefaultVideoSource::Start(SourceMediaStream* aStream, TrackID aID,
175 : const PrincipalHandle& aPrincipalHandle)
176 : {
177 0 : if (mState != kAllocated) {
178 0 : return NS_ERROR_FAILURE;
179 : }
180 :
181 0 : mTimer = do_CreateInstance(NS_TIMER_CONTRACTID);
182 0 : if (!mTimer) {
183 0 : return NS_ERROR_FAILURE;
184 : }
185 :
186 0 : aStream->AddTrack(aID, 0, new VideoSegment(), SourceMediaStream::ADDTRACK_QUEUED);
187 :
188 : // Remember TrackID so we can end it later
189 0 : mTrackID = aID;
190 :
191 : // Start timer for subsequent frames
192 : #if (defined(MOZ_WIDGET_GONK) || defined(MOZ_WIDGET_ANDROID)) && defined(DEBUG)
193 : // emulator debug is very, very slow and has problems dealing with realtime audio inputs
194 : mTimer->InitWithCallback(this, (1000 / mOpts.mFPS)*10, nsITimer::TYPE_REPEATING_SLACK);
195 : #else
196 0 : mTimer->InitWithCallback(this, 1000 / mOpts.mFPS, nsITimer::TYPE_REPEATING_SLACK);
197 : #endif
198 0 : mState = kStarted;
199 :
200 0 : return NS_OK;
201 : }
202 :
203 : nsresult
204 0 : MediaEngineDefaultVideoSource::Stop(SourceMediaStream *aSource, TrackID aID)
205 : {
206 0 : if (mState != kStarted) {
207 0 : return NS_ERROR_FAILURE;
208 : }
209 0 : if (!mTimer) {
210 0 : return NS_ERROR_FAILURE;
211 : }
212 :
213 0 : mTimer->Cancel();
214 0 : mTimer = nullptr;
215 :
216 0 : aSource->EndTrack(aID);
217 :
218 0 : mState = kStopped;
219 0 : mImage = nullptr;
220 0 : return NS_OK;
221 : }
222 :
223 : nsresult
224 0 : MediaEngineDefaultVideoSource::Restart(
225 : AllocationHandle* aHandle,
226 : const dom::MediaTrackConstraints& aConstraints,
227 : const MediaEnginePrefs &aPrefs,
228 : const nsString& aDeviceId,
229 : const char** aOutBadConstraint)
230 : {
231 0 : return NS_OK;
232 : }
233 :
234 : NS_IMETHODIMP
235 0 : MediaEngineDefaultVideoSource::Notify(nsITimer* aTimer)
236 : {
237 : // Update the target color
238 0 : if (mCr <= 16) {
239 0 : if (mCb < 240) {
240 0 : mCb++;
241 : } else {
242 0 : mCr++;
243 : }
244 0 : } else if (mCb >= 240) {
245 0 : if (mCr < 240) {
246 0 : mCr++;
247 : } else {
248 0 : mCb--;
249 : }
250 0 : } else if (mCr >= 240) {
251 0 : if (mCb > 16) {
252 0 : mCb--;
253 : } else {
254 0 : mCr--;
255 : }
256 : } else {
257 0 : mCr--;
258 : }
259 :
260 : // Allocate a single solid color image
261 0 : RefPtr<layers::PlanarYCbCrImage> ycbcr_image = mImageContainer->CreatePlanarYCbCrImage();
262 0 : layers::PlanarYCbCrData data;
263 0 : AllocateSolidColorFrame(data, mOpts.mWidth, mOpts.mHeight, 0x80, mCb, mCr);
264 :
265 : #ifdef MOZ_WEBRTC
266 0 : uint64_t timestamp = PR_Now();
267 0 : YuvStamper::Encode(mOpts.mWidth, mOpts.mHeight, mOpts.mWidth,
268 : data.mYChannel,
269 : reinterpret_cast<unsigned char*>(×tamp), sizeof(timestamp),
270 0 : 0, 0);
271 : #endif
272 :
273 0 : bool setData = ycbcr_image->CopyData(data);
274 0 : MOZ_ASSERT(setData);
275 :
276 : // SetData copies data, so we can free the frame
277 0 : ReleaseFrame(data);
278 :
279 0 : if (!setData) {
280 0 : return NS_ERROR_FAILURE;
281 : }
282 :
283 0 : MonitorAutoLock lock(mMonitor);
284 :
285 : // implicitly releases last image
286 0 : mImage = ycbcr_image.forget();
287 :
288 0 : return NS_OK;
289 : }
290 :
291 : void
292 0 : MediaEngineDefaultVideoSource::NotifyPull(MediaStreamGraph* aGraph,
293 : SourceMediaStream *aSource,
294 : TrackID aID,
295 : StreamTime aDesiredTime,
296 : const PrincipalHandle& aPrincipalHandle)
297 : {
298 : // AddTrack takes ownership of segment
299 0 : VideoSegment segment;
300 0 : MonitorAutoLock lock(mMonitor);
301 0 : if (mState != kStarted) {
302 0 : return;
303 : }
304 :
305 : // Note: we're not giving up mImage here
306 0 : RefPtr<layers::Image> image = mImage;
307 0 : StreamTime delta = aDesiredTime - aSource->GetEndOfAppendedData(aID);
308 :
309 0 : if (delta > 0) {
310 : // nullptr images are allowed
311 0 : IntSize size(image ? mOpts.mWidth : 0, image ? mOpts.mHeight : 0);
312 0 : segment.AppendFrame(image.forget(), delta, size, aPrincipalHandle);
313 : // This can fail if either a) we haven't added the track yet, or b)
314 : // we've removed or finished the track.
315 0 : aSource->AppendToTrack(aID, &segment);
316 : }
317 : }
318 :
319 : // generate 1k sine wave per second
320 0 : class SineWaveGenerator
321 : {
322 : public:
323 : static const int bytesPerSample = 2;
324 : static const int millisecondsPerSecond = PR_MSEC_PER_SEC;
325 :
326 0 : explicit SineWaveGenerator(uint32_t aSampleRate, uint32_t aFrequency) :
327 0 : mTotalLength(aSampleRate / aFrequency),
328 0 : mReadLength(0) {
329 : // If we allow arbitrary frequencies, there's no guarantee we won't get rounded here
330 : // We could include an error term and adjust for it in generation; not worth the trouble
331 : //MOZ_ASSERT(mTotalLength * aFrequency == aSampleRate);
332 0 : mAudioBuffer = MakeUnique<int16_t[]>(mTotalLength);
333 0 : for (int i = 0; i < mTotalLength; i++) {
334 : // Set volume to -20db. It's from 32768.0 * 10^(-20/20) = 3276.8
335 0 : mAudioBuffer[i] = (3276.8f * sin(2 * M_PI * i / mTotalLength));
336 : }
337 0 : }
338 :
339 : // NOTE: only safely called from a single thread (MSG callback)
340 0 : void generate(int16_t* aBuffer, int16_t aLengthInSamples) {
341 0 : int16_t remaining = aLengthInSamples;
342 :
343 0 : while (remaining) {
344 0 : int16_t processSamples = 0;
345 :
346 0 : if (mTotalLength - mReadLength >= remaining) {
347 0 : processSamples = remaining;
348 : } else {
349 0 : processSamples = mTotalLength - mReadLength;
350 : }
351 0 : memcpy(aBuffer, &mAudioBuffer[mReadLength], processSamples * bytesPerSample);
352 0 : aBuffer += processSamples;
353 0 : mReadLength += processSamples;
354 0 : remaining -= processSamples;
355 0 : if (mReadLength == mTotalLength) {
356 0 : mReadLength = 0;
357 : }
358 : }
359 0 : }
360 :
361 : private:
362 : UniquePtr<int16_t[]> mAudioBuffer;
363 : int16_t mTotalLength;
364 : int16_t mReadLength;
365 : };
366 :
367 : /**
368 : * Default audio source.
369 : */
370 :
371 0 : NS_IMPL_ISUPPORTS0(MediaEngineDefaultAudioSource)
372 :
373 0 : MediaEngineDefaultAudioSource::MediaEngineDefaultAudioSource()
374 : : MediaEngineAudioSource(kReleased)
375 0 : , mLastNotify(0)
376 0 : {}
377 :
378 0 : MediaEngineDefaultAudioSource::~MediaEngineDefaultAudioSource()
379 0 : {}
380 :
381 : void
382 0 : MediaEngineDefaultAudioSource::GetName(nsAString& aName) const
383 : {
384 0 : aName.AssignLiteral(u"Default Audio Device");
385 0 : return;
386 : }
387 :
388 : void
389 0 : MediaEngineDefaultAudioSource::GetUUID(nsACString& aUUID) const
390 : {
391 0 : aUUID.AssignLiteral("B7CBD7C1-53EF-42F9-8353-73F61C70C092");
392 0 : return;
393 : }
394 :
395 : uint32_t
396 0 : MediaEngineDefaultAudioSource::GetBestFitnessDistance(
397 : const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
398 : const nsString& aDeviceId) const
399 : {
400 0 : uint32_t distance = 0;
401 : #ifdef MOZ_WEBRTC
402 0 : for (const auto* cs : aConstraintSets) {
403 0 : distance = GetMinimumFitnessDistance(*cs, aDeviceId);
404 0 : break; // distance is read from first entry only
405 : }
406 : #endif
407 0 : return distance;
408 : }
409 :
410 : nsresult
411 0 : MediaEngineDefaultAudioSource::Allocate(const dom::MediaTrackConstraints &aConstraints,
412 : const MediaEnginePrefs &aPrefs,
413 : const nsString& aDeviceId,
414 : const mozilla::ipc::PrincipalInfo& aPrincipalInfo,
415 : AllocationHandle** aOutHandle,
416 : const char** aOutBadConstraint)
417 : {
418 0 : if (mState != kReleased) {
419 0 : return NS_ERROR_FAILURE;
420 : }
421 :
422 : // Mock failure for automated tests.
423 0 : if (aConstraints.mDeviceId.IsString() &&
424 0 : aConstraints.mDeviceId.GetAsString().EqualsASCII("bad device")) {
425 0 : return NS_ERROR_FAILURE;
426 : }
427 :
428 0 : mState = kAllocated;
429 : // generate sine wave (default 1KHz)
430 : mSineGenerator = new SineWaveGenerator(AUDIO_RATE,
431 0 : static_cast<uint32_t>(aPrefs.mFreq ? aPrefs.mFreq : 1000));
432 0 : *aOutHandle = nullptr;
433 0 : return NS_OK;
434 : }
435 :
436 : nsresult
437 0 : MediaEngineDefaultAudioSource::Deallocate(AllocationHandle* aHandle)
438 : {
439 0 : MOZ_ASSERT(!aHandle);
440 0 : if (mState != kStopped && mState != kAllocated) {
441 0 : return NS_ERROR_FAILURE;
442 : }
443 0 : mState = kReleased;
444 0 : return NS_OK;
445 : }
446 :
447 : nsresult
448 0 : MediaEngineDefaultAudioSource::Start(SourceMediaStream* aStream, TrackID aID,
449 : const PrincipalHandle& aPrincipalHandle)
450 : {
451 0 : if (mState != kAllocated) {
452 0 : return NS_ERROR_FAILURE;
453 : }
454 :
455 : // AddTrack will take ownership of segment
456 0 : AudioSegment* segment = new AudioSegment();
457 0 : aStream->AddAudioTrack(aID, AUDIO_RATE, 0, segment, SourceMediaStream::ADDTRACK_QUEUED);
458 :
459 : // Remember TrackID so we can finish later
460 0 : mTrackID = aID;
461 :
462 0 : mLastNotify = 0;
463 0 : mState = kStarted;
464 0 : return NS_OK;
465 : }
466 :
467 : nsresult
468 0 : MediaEngineDefaultAudioSource::Stop(SourceMediaStream *aSource, TrackID aID)
469 : {
470 0 : if (mState != kStarted) {
471 0 : return NS_ERROR_FAILURE;
472 : }
473 0 : aSource->EndTrack(aID);
474 :
475 0 : mState = kStopped;
476 0 : return NS_OK;
477 : }
478 :
479 : nsresult
480 0 : MediaEngineDefaultAudioSource::Restart(AllocationHandle* aHandle,
481 : const dom::MediaTrackConstraints& aConstraints,
482 : const MediaEnginePrefs &aPrefs,
483 : const nsString& aDeviceId,
484 : const char** aOutBadConstraint)
485 : {
486 0 : return NS_OK;
487 : }
488 :
489 : void
490 0 : MediaEngineDefaultAudioSource::AppendToSegment(AudioSegment& aSegment,
491 : TrackTicks aSamples,
492 : const PrincipalHandle& aPrincipalHandle)
493 : {
494 0 : RefPtr<SharedBuffer> buffer = SharedBuffer::Create(aSamples * sizeof(int16_t));
495 0 : int16_t* dest = static_cast<int16_t*>(buffer->Data());
496 :
497 0 : mSineGenerator->generate(dest, aSamples);
498 0 : AutoTArray<const int16_t*,1> channels;
499 0 : channels.AppendElement(dest);
500 0 : aSegment.AppendFrames(buffer.forget(), channels, aSamples, aPrincipalHandle);
501 0 : }
502 :
503 : void
504 0 : MediaEngineDefaultAudioSource::NotifyPull(MediaStreamGraph* aGraph,
505 : SourceMediaStream *aSource,
506 : TrackID aID,
507 : StreamTime aDesiredTime,
508 : const PrincipalHandle& aPrincipalHandle)
509 : {
510 0 : MOZ_ASSERT(aID == mTrackID);
511 0 : AudioSegment segment;
512 : // avoid accumulating rounding errors
513 0 : TrackTicks desired = aSource->TimeToTicksRoundUp(AUDIO_RATE, aDesiredTime);
514 0 : TrackTicks delta = desired - mLastNotify;
515 0 : mLastNotify += delta;
516 0 : AppendToSegment(segment, delta, aPrincipalHandle);
517 0 : aSource->AppendToTrack(mTrackID, &segment);
518 0 : }
519 :
520 : void
521 0 : MediaEngineDefault::EnumerateVideoDevices(dom::MediaSourceEnum aMediaSource,
522 : nsTArray<RefPtr<MediaEngineVideoSource> >* aVSources) {
523 0 : MutexAutoLock lock(mMutex);
524 :
525 : // only supports camera sources (for now). See Bug 1038241
526 0 : if (aMediaSource != dom::MediaSourceEnum::Camera) {
527 0 : return;
528 : }
529 :
530 : // We once had code here to find a VideoSource with the same settings and re-use that.
531 : // This no longer is possible since the resolution is being set in Allocate().
532 :
533 0 : RefPtr<MediaEngineVideoSource> newSource = new MediaEngineDefaultVideoSource();
534 0 : mVSources.AppendElement(newSource);
535 0 : aVSources->AppendElement(newSource);
536 :
537 0 : return;
538 : }
539 :
540 : void
541 0 : MediaEngineDefault::EnumerateAudioDevices(dom::MediaSourceEnum aMediaSource,
542 : nsTArray<RefPtr<MediaEngineAudioSource> >* aASources) {
543 0 : MutexAutoLock lock(mMutex);
544 0 : int32_t len = mASources.Length();
545 :
546 : // aMediaSource is ignored for audio devices (for now).
547 :
548 0 : for (int32_t i = 0; i < len; i++) {
549 0 : RefPtr<MediaEngineAudioSource> source = mASources.ElementAt(i);
550 0 : if (source->IsAvailable()) {
551 0 : aASources->AppendElement(source);
552 : }
553 : }
554 :
555 : // All streams are currently busy, just make a new one.
556 0 : if (aASources->Length() == 0) {
557 : RefPtr<MediaEngineAudioSource> newSource =
558 0 : new MediaEngineDefaultAudioSource();
559 0 : mASources.AppendElement(newSource);
560 0 : aASources->AppendElement(newSource);
561 : }
562 0 : return;
563 : }
564 :
565 : } // namespace mozilla
|