LCOV - code coverage report
Current view: top level - dom/media/webrtc - MediaEngineWebRTC.h (source / functions) Hit Total Coverage
Test: output.info Lines: 0 191 0.0 %
Date: 2017-07-14 16:53:18 Functions: 0 64 0.0 %
Legend: Lines: hit not hit

          Line data    Source code
       1             : /* This Source Code Form is subject to the terms of the Mozilla Public
       2             :  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
       3             :  * You can obtain one at http://mozilla.org/MPL/2.0/. */
       4             : 
       5             : #ifndef MEDIAENGINEWEBRTC_H_
       6             : #define MEDIAENGINEWEBRTC_H_
       7             : 
       8             : #include "prcvar.h"
       9             : #include "prthread.h"
      10             : #include "nsIThread.h"
      11             : #include "nsIRunnable.h"
      12             : 
      13             : #include "mozilla/dom/File.h"
      14             : #include "mozilla/Mutex.h"
      15             : #include "mozilla/StaticMutex.h"
      16             : #include "mozilla/Monitor.h"
      17             : #include "mozilla/Sprintf.h"
      18             : #include "mozilla/UniquePtr.h"
      19             : #include "nsAutoPtr.h"
      20             : #include "nsCOMPtr.h"
      21             : #include "nsThreadUtils.h"
      22             : #include "DOMMediaStream.h"
      23             : #include "nsDirectoryServiceDefs.h"
      24             : #include "nsComponentManagerUtils.h"
      25             : #include "nsRefPtrHashtable.h"
      26             : 
      27             : #include "ipc/IPCMessageUtils.h"
      28             : #include "VideoUtils.h"
      29             : #include "MediaEngineCameraVideoSource.h"
      30             : #include "VideoSegment.h"
      31             : #include "AudioSegment.h"
      32             : #include "StreamTracks.h"
      33             : #include "MediaStreamGraph.h"
      34             : #include "cubeb/cubeb.h"
      35             : #include "CubebUtils.h"
      36             : #include "AudioPacketizer.h"
      37             : 
      38             : #include "MediaEngineWrapper.h"
      39             : #include "mozilla/dom/MediaStreamTrackBinding.h"
      40             : #include "CamerasChild.h"
      41             : 
      42             : // WebRTC library includes follow
      43             : // Audio Engine
      44             : #include "webrtc/voice_engine/include/voe_base.h"
      45             : #include "webrtc/voice_engine/include/voe_codec.h"
      46             : #include "webrtc/voice_engine/include/voe_hardware.h"
      47             : #include "webrtc/voice_engine/include/voe_network.h"
      48             : #include "webrtc/voice_engine/include/voe_audio_processing.h"
      49             : #include "webrtc/voice_engine/include/voe_volume_control.h"
      50             : #include "webrtc/voice_engine/include/voe_external_media.h"
      51             : #include "webrtc/voice_engine/include/voe_audio_processing.h"
      52             : #include "webrtc/modules/audio_processing/include/audio_processing.h"
      53             : 
      54             : // Video Engine
      55             : // conflicts with #include of scoped_ptr.h
      56             : #undef FF
      57             : 
      58             : // WebRTC imports
      59             : #include "webrtc/modules/video_capture/video_capture_defines.h"
      60             : 
      61             : #include "NullTransport.h"
      62             : #include "AudioOutputObserver.h"
      63             : 
      64             : namespace mozilla {
      65             : 
      66             : class MediaEngineWebRTCAudioCaptureSource : public MediaEngineAudioSource
      67             : {
      68             : public:
      69             :   NS_DECL_THREADSAFE_ISUPPORTS
      70             : 
      71           0 :   explicit MediaEngineWebRTCAudioCaptureSource(const char* aUuid)
      72           0 :     : MediaEngineAudioSource(kReleased)
      73             :   {
      74           0 :   }
      75             :   void GetName(nsAString& aName) const override;
      76             :   void GetUUID(nsACString& aUUID) const override;
      77           0 :   nsresult Allocate(const dom::MediaTrackConstraints& aConstraints,
      78             :                     const MediaEnginePrefs& aPrefs,
      79             :                     const nsString& aDeviceId,
      80             :                     const mozilla::ipc::PrincipalInfo& aPrincipalInfo,
      81             :                     AllocationHandle** aOutHandle,
      82             :                     const char** aOutBadConstraint) override
      83             :   {
      84             :     // Nothing to do here, everything is managed in MediaManager.cpp
      85           0 :     *aOutHandle = nullptr;
      86           0 :     return NS_OK;
      87             :   }
      88           0 :   nsresult Deallocate(AllocationHandle* aHandle) override
      89             :   {
      90             :     // Nothing to do here, everything is managed in MediaManager.cpp
      91           0 :     MOZ_ASSERT(!aHandle);
      92           0 :     return NS_OK;
      93             :   }
      94             :   nsresult Start(SourceMediaStream* aMediaStream,
      95             :                  TrackID aId,
      96             :                  const PrincipalHandle& aPrincipalHandle) override;
      97             :   nsresult Stop(SourceMediaStream* aMediaStream, TrackID aId) override;
      98             :   nsresult Restart(AllocationHandle* aHandle,
      99             :                    const dom::MediaTrackConstraints& aConstraints,
     100             :                    const MediaEnginePrefs &aPrefs,
     101             :                    const nsString& aDeviceId,
     102             :                    const char** aOutBadConstraint) override;
     103           0 :   void SetDirectListeners(bool aDirect) override
     104           0 :   {}
     105           0 :   void NotifyOutputData(MediaStreamGraph* aGraph,
     106             :                         AudioDataValue* aBuffer, size_t aFrames,
     107             :                         TrackRate aRate, uint32_t aChannels) override
     108           0 :   {}
     109           0 :   void DeviceChanged() override
     110           0 :   {}
     111           0 :   void NotifyInputData(MediaStreamGraph* aGraph,
     112             :                        const AudioDataValue* aBuffer, size_t aFrames,
     113             :                        TrackRate aRate, uint32_t aChannels) override
     114           0 :   {}
     115           0 :   void NotifyPull(MediaStreamGraph* aGraph,
     116             :                   SourceMediaStream* aSource,
     117             :                   TrackID aID,
     118             :                   StreamTime aDesiredTime,
     119             :                   const PrincipalHandle& aPrincipalHandle) override
     120           0 :   {}
     121           0 :   dom::MediaSourceEnum GetMediaSource() const override
     122             :   {
     123           0 :     return dom::MediaSourceEnum::AudioCapture;
     124             :   }
     125           0 :   bool IsFake() override
     126             :   {
     127           0 :     return false;
     128             :   }
     129           0 :   nsresult TakePhoto(MediaEnginePhotoCallback* aCallback) override
     130             :   {
     131           0 :     return NS_ERROR_NOT_IMPLEMENTED;
     132             :   }
     133             :   uint32_t GetBestFitnessDistance(
     134             :     const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
     135             :     const nsString& aDeviceId) const override;
     136             : 
     137             : protected:
     138           0 :   virtual ~MediaEngineWebRTCAudioCaptureSource() {}
     139             :   nsCString mUUID;
     140             : };
     141             : 
     142             : // Small subset of VoEHardware
     143             : class AudioInput
     144             : {
     145             : public:
     146           0 :   explicit AudioInput(webrtc::VoiceEngine* aVoiceEngine) : mVoiceEngine(aVoiceEngine) {};
     147             :   // Threadsafe because it's referenced from an MicrophoneSource, which can
     148             :   // had references to it on other threads.
     149           0 :   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(AudioInput)
     150             : 
     151             :   virtual int GetNumOfRecordingDevices(int& aDevices) = 0;
     152             :   virtual int GetRecordingDeviceName(int aIndex, char (&aStrNameUTF8)[128],
     153             :                                      char aStrGuidUTF8[128]) = 0;
     154             :   virtual int GetRecordingDeviceStatus(bool& aIsAvailable) = 0;
     155             :   virtual void GetChannelCount(uint32_t& aChannels) = 0;
     156             :   virtual int GetMaxAvailableChannels(uint32_t& aChannels) = 0;
     157             :   virtual void StartRecording(SourceMediaStream *aStream, AudioDataListener *aListener) = 0;
     158             :   virtual void StopRecording(SourceMediaStream *aStream) = 0;
     159             :   virtual int SetRecordingDevice(int aIndex) = 0;
     160             :   virtual void SetUserChannelCount(uint32_t aChannels) = 0;
     161             : 
     162             : protected:
     163             :   // Protected destructor, to discourage deletion outside of Release():
     164           0 :   virtual ~AudioInput() {}
     165             : 
     166             :   webrtc::VoiceEngine* mVoiceEngine;
     167             : };
     168             : 
     169             : class AudioInputCubeb final : public AudioInput
     170             : {
     171             : public:
     172           0 :   explicit AudioInputCubeb(webrtc::VoiceEngine* aVoiceEngine, int aIndex = 0) :
     173           0 :     AudioInput(aVoiceEngine), mSelectedDevice(aIndex), mInUseCount(0)
     174             :   {
     175           0 :     if (!mDeviceIndexes) {
     176           0 :       mDeviceIndexes = new nsTArray<int>;
     177           0 :       mDeviceNames = new nsTArray<nsCString>;
     178           0 :       mDefaultDevice = -1;
     179             :     }
     180           0 :   }
     181             : 
     182           0 :   static void CleanupGlobalData()
     183             :   {
     184           0 :     cubeb_device_collection_destroy(CubebUtils::GetCubebContext(), &mDevices);
     185           0 :     delete mDeviceIndexes;
     186           0 :     mDeviceIndexes = nullptr;
     187           0 :     delete mDeviceNames;
     188           0 :     mDeviceNames = nullptr;
     189           0 :   }
     190             : 
     191           0 :   int GetNumOfRecordingDevices(int& aDevices)
     192             :   {
     193             : #ifdef MOZ_WIDGET_ANDROID
     194             :     // OpenSL ES does not support enumerate device.
     195             :     aDevices = 1;
     196             : #else
     197           0 :     UpdateDeviceList();
     198           0 :     aDevices = mDeviceIndexes->Length();
     199             : #endif
     200           0 :     return 0;
     201             :   }
     202             : 
     203           0 :   static int32_t DeviceIndex(int aIndex)
     204             :   {
     205             :     // -1 = system default if any
     206           0 :     if (aIndex == -1) {
     207           0 :       if (mDefaultDevice == -1) {
     208           0 :         aIndex = 0;
     209             :       } else {
     210           0 :         aIndex = mDefaultDevice;
     211             :       }
     212             :     }
     213           0 :     MOZ_ASSERT(mDeviceIndexes);
     214           0 :     if (aIndex < 0 || aIndex >= (int) mDeviceIndexes->Length()) {
     215           0 :       return -1;
     216             :     }
     217             :     // Note: if the device is gone, this will be -1
     218           0 :     return (*mDeviceIndexes)[aIndex]; // translate to mDevices index
     219             :   }
     220             : 
     221           0 :   static StaticMutex& Mutex()
     222             :   {
     223           0 :     return sMutex;
     224             :   }
     225             : 
     226           0 :   static bool GetDeviceID(int aDeviceIndex, CubebUtils::AudioDeviceID &aID)
     227             :   {
     228             :     // Assert sMutex is held
     229           0 :     sMutex.AssertCurrentThreadOwns();
     230             : #ifdef MOZ_WIDGET_ANDROID
     231             :     aID = nullptr;
     232             :     return true;
     233             : #else
     234           0 :     int dev_index = DeviceIndex(aDeviceIndex);
     235           0 :     if (dev_index != -1) {
     236           0 :       aID = mDevices.device[dev_index].devid;
     237           0 :       return true;
     238             :     }
     239           0 :     return false;
     240             : #endif
     241             :   }
     242             : 
     243           0 :   int GetRecordingDeviceName(int aIndex, char (&aStrNameUTF8)[128],
     244             :                              char aStrGuidUTF8[128])
     245             :   {
     246             : #ifdef MOZ_WIDGET_ANDROID
     247             :     aStrNameUTF8[0] = '\0';
     248             :     aStrGuidUTF8[0] = '\0';
     249             : #else
     250           0 :     int32_t devindex = DeviceIndex(aIndex);
     251           0 :     if (mDevices.count == 0 || devindex < 0) {
     252           0 :       return 1;
     253             :     }
     254           0 :     SprintfLiteral(aStrNameUTF8, "%s%s", aIndex == -1 ? "default: " : "",
     255           0 :                    mDevices.device[devindex].friendly_name);
     256           0 :     aStrGuidUTF8[0] = '\0';
     257             : #endif
     258           0 :     return 0;
     259             :   }
     260             : 
     261           0 :   int GetRecordingDeviceStatus(bool& aIsAvailable)
     262             :   {
     263             :     // With cubeb, we only expose devices of type CUBEB_DEVICE_TYPE_INPUT,
     264             :     // so unless it was removed, say it's available
     265           0 :     aIsAvailable = true;
     266           0 :     return 0;
     267             :   }
     268             : 
     269           0 :   void GetChannelCount(uint32_t& aChannels)
     270             :   {
     271           0 :     GetUserChannelCount(mSelectedDevice, aChannels);
     272           0 :   }
     273             : 
     274           0 :   static void GetUserChannelCount(int aDeviceIndex, uint32_t& aChannels)
     275             :   {
     276           0 :     aChannels = sUserChannelCount;
     277           0 :   }
     278             : 
     279           0 :   int GetMaxAvailableChannels(uint32_t& aChannels)
     280             :   {
     281           0 :     return GetDeviceMaxChannels(mSelectedDevice, aChannels);
     282             :   }
     283             : 
     284           0 :   static int GetDeviceMaxChannels(int aDeviceIndex, uint32_t& aChannels)
     285             :   {
     286             : #ifdef MOZ_WIDGET_ANDROID
     287             :     aChannels = 1;
     288             : #else
     289           0 :     int32_t devindex = DeviceIndex(aDeviceIndex);
     290           0 :     if (mDevices.count == 0 || devindex < 0) {
     291           0 :       return 1;
     292             :     }
     293           0 :     aChannels = mDevices.device[devindex].max_channels;
     294             : #endif
     295           0 :     return 0;
     296             :   }
     297             : 
     298           0 :   void SetUserChannelCount(uint32_t aChannels)
     299             :   {
     300           0 :     if (GetDeviceMaxChannels(mSelectedDevice, sUserChannelCount)) {
     301           0 :       sUserChannelCount = 1; // error capture mono
     302           0 :       return;
     303             :     }
     304             : 
     305           0 :     if (aChannels && aChannels < sUserChannelCount) {
     306           0 :       sUserChannelCount = aChannels;
     307             :     }
     308             :   }
     309             : 
     310           0 :   void StartRecording(SourceMediaStream *aStream, AudioDataListener *aListener)
     311             :   {
     312             : #ifdef MOZ_WIDGET_ANDROID
     313             :     // OpenSL ES does not support enumerating devices.
     314             :     MOZ_ASSERT(mDevices.count == 0);
     315             : #else
     316           0 :     MOZ_ASSERT(mDevices.count > 0);
     317             : #endif
     318             : 
     319           0 :     if (mInUseCount == 0) {
     320           0 :       ScopedCustomReleasePtr<webrtc::VoEExternalMedia> ptrVoEXMedia;
     321           0 :       ptrVoEXMedia = webrtc::VoEExternalMedia::GetInterface(mVoiceEngine);
     322           0 :       if (ptrVoEXMedia) {
     323           0 :         ptrVoEXMedia->SetExternalRecordingStatus(true);
     324             :       }
     325           0 :       mAnyInUse = true;
     326             :     }
     327           0 :     mInUseCount++;
     328             :     // Always tell the stream we're using it for input
     329           0 :     aStream->OpenAudioInput(mSelectedDevice, aListener);
     330           0 :   }
     331             : 
     332           0 :   void StopRecording(SourceMediaStream *aStream)
     333             :   {
     334           0 :     aStream->CloseAudioInput();
     335           0 :     if (--mInUseCount == 0) {
     336           0 :       mAnyInUse = false;
     337             :     }
     338           0 :   }
     339             : 
     340           0 :   int SetRecordingDevice(int aIndex)
     341             :   {
     342           0 :     mSelectedDevice = aIndex;
     343           0 :     return 0;
     344             :   }
     345             : 
     346             : protected:
     347           0 :   ~AudioInputCubeb() {
     348           0 :     MOZ_RELEASE_ASSERT(mInUseCount == 0);
     349           0 :   }
     350             : 
     351             : private:
     352             :   // It would be better to watch for device-change notifications
     353             :   void UpdateDeviceList();
     354             : 
     355             :   // We have an array, which consists of indexes to the current mDevices
     356             :   // list.  This is updated on mDevices updates.  Many devices in mDevices
     357             :   // won't be included in the array (wrong type, etc), or if a device is
     358             :   // removed it will map to -1 (and opens of this device will need to check
     359             :   // for this - and be careful of threading access.  The mappings need to
     360             :   // updated on each re-enumeration.
     361             :   int mSelectedDevice;
     362             :   uint32_t mInUseCount;
     363             : 
     364             :   // pointers to avoid static constructors
     365             :   static nsTArray<int>* mDeviceIndexes;
     366             :   static int mDefaultDevice; // -1 == not set
     367             :   static nsTArray<nsCString>* mDeviceNames;
     368             :   static cubeb_device_collection mDevices;
     369             :   static bool mAnyInUse;
     370             :   static StaticMutex sMutex;
     371             :   static uint32_t sUserChannelCount;
     372             : };
     373             : 
     374             : class AudioInputWebRTC final : public AudioInput
     375             : {
     376             : public:
     377           0 :   explicit AudioInputWebRTC(webrtc::VoiceEngine* aVoiceEngine) : AudioInput(aVoiceEngine) {}
     378             : 
     379           0 :   int GetNumOfRecordingDevices(int& aDevices)
     380             :   {
     381           0 :     ScopedCustomReleasePtr<webrtc::VoEHardware> ptrVoEHw;
     382           0 :     ptrVoEHw = webrtc::VoEHardware::GetInterface(mVoiceEngine);
     383           0 :     if (!ptrVoEHw)  {
     384           0 :       return 1;
     385             :     }
     386           0 :     return ptrVoEHw->GetNumOfRecordingDevices(aDevices);
     387             :   }
     388             : 
     389           0 :   int GetRecordingDeviceName(int aIndex, char (&aStrNameUTF8)[128],
     390             :                              char aStrGuidUTF8[128])
     391             :   {
     392           0 :     ScopedCustomReleasePtr<webrtc::VoEHardware> ptrVoEHw;
     393           0 :     ptrVoEHw = webrtc::VoEHardware::GetInterface(mVoiceEngine);
     394           0 :     if (!ptrVoEHw)  {
     395           0 :       return 1;
     396             :     }
     397           0 :     return ptrVoEHw->GetRecordingDeviceName(aIndex, aStrNameUTF8,
     398           0 :                                             aStrGuidUTF8);
     399             :   }
     400             : 
     401           0 :   int GetRecordingDeviceStatus(bool& aIsAvailable)
     402             :   {
     403           0 :     ScopedCustomReleasePtr<webrtc::VoEHardware> ptrVoEHw;
     404           0 :     ptrVoEHw = webrtc::VoEHardware::GetInterface(mVoiceEngine);
     405           0 :     if (!ptrVoEHw)  {
     406           0 :       return 1;
     407             :     }
     408           0 :     ptrVoEHw->GetRecordingDeviceStatus(aIsAvailable);
     409           0 :     return 0;
     410             :   }
     411             : 
     412           0 :   void GetChannelCount(uint32_t& aChannels)
     413             :   {
     414           0 :     aChannels = 1; // default to mono
     415           0 :   }
     416             : 
     417           0 :   int GetMaxAvailableChannels(uint32_t& aChannels)
     418             :   {
     419           0 :     aChannels = 1;
     420           0 :     return 0;
     421             :   }
     422             : 
     423           0 :   void SetUserChannelCount(uint32_t aChannels)
     424           0 :   {}
     425             : 
     426           0 :   void StartRecording(SourceMediaStream *aStream, AudioDataListener *aListener) {}
     427           0 :   void StopRecording(SourceMediaStream *aStream) {}
     428             : 
     429           0 :   int SetRecordingDevice(int aIndex)
     430             :   {
     431           0 :     ScopedCustomReleasePtr<webrtc::VoEHardware> ptrVoEHw;
     432           0 :     ptrVoEHw = webrtc::VoEHardware::GetInterface(mVoiceEngine);
     433           0 :     if (!ptrVoEHw)  {
     434           0 :       return 1;
     435             :     }
     436           0 :     return ptrVoEHw->SetRecordingDevice(aIndex);
     437             :   }
     438             : 
     439             : protected:
     440             :   // Protected destructor, to discourage deletion outside of Release():
     441           0 :   ~AudioInputWebRTC() {}
     442             : };
     443             : 
     444             : class WebRTCAudioDataListener : public AudioDataListener
     445             : {
     446             : protected:
     447             :   // Protected destructor, to discourage deletion outside of Release():
     448           0 :   virtual ~WebRTCAudioDataListener() {}
     449             : 
     450             : public:
     451           0 :   explicit WebRTCAudioDataListener(MediaEngineAudioSource* aAudioSource)
     452           0 :     : mMutex("WebRTCAudioDataListener")
     453           0 :     , mAudioSource(aAudioSource)
     454           0 :   {}
     455             : 
     456             :   // AudioDataListenerInterface methods
     457           0 :   virtual void NotifyOutputData(MediaStreamGraph* aGraph,
     458             :                                 AudioDataValue* aBuffer, size_t aFrames,
     459             :                                 TrackRate aRate, uint32_t aChannels) override
     460             :   {
     461           0 :     MutexAutoLock lock(mMutex);
     462           0 :     if (mAudioSource) {
     463           0 :       mAudioSource->NotifyOutputData(aGraph, aBuffer, aFrames, aRate, aChannels);
     464             :     }
     465           0 :   }
     466           0 :   virtual void NotifyInputData(MediaStreamGraph* aGraph,
     467             :                                const AudioDataValue* aBuffer, size_t aFrames,
     468             :                                TrackRate aRate, uint32_t aChannels) override
     469             :   {
     470           0 :     MutexAutoLock lock(mMutex);
     471           0 :     if (mAudioSource) {
     472           0 :       mAudioSource->NotifyInputData(aGraph, aBuffer, aFrames, aRate, aChannels);
     473             :     }
     474           0 :   }
     475           0 :   virtual void DeviceChanged() override
     476             :   {
     477           0 :     MutexAutoLock lock(mMutex);
     478           0 :     if (mAudioSource) {
     479           0 :       mAudioSource->DeviceChanged();
     480             :     }
     481           0 :   }
     482             : 
     483           0 :   void Shutdown()
     484             :   {
     485           0 :     MutexAutoLock lock(mMutex);
     486           0 :     mAudioSource = nullptr;
     487           0 :   }
     488             : 
     489             : private:
     490             :   Mutex mMutex;
     491             :   RefPtr<MediaEngineAudioSource> mAudioSource;
     492             : };
     493             : 
     494             : class MediaEngineWebRTCMicrophoneSource : public MediaEngineAudioSource,
     495             :                                           public webrtc::VoEMediaProcess
     496             : {
     497             :   typedef MediaEngineAudioSource Super;
     498             : public:
     499             :   MediaEngineWebRTCMicrophoneSource(webrtc::VoiceEngine* aVoiceEnginePtr,
     500             :                                     mozilla::AudioInput* aAudioInput,
     501             :                                     int aIndex,
     502             :                                     const char* name,
     503             :                                     const char* uuid);
     504             : 
     505             :   void GetName(nsAString& aName) const override;
     506             :   void GetUUID(nsACString& aUUID) const override;
     507             : 
     508             :   nsresult Deallocate(AllocationHandle* aHandle) override;
     509             :   nsresult Start(SourceMediaStream* aStream,
     510             :                  TrackID aID,
     511             :                  const PrincipalHandle& aPrincipalHandle) override;
     512             :   nsresult Stop(SourceMediaStream* aSource, TrackID aID) override;
     513             :   nsresult Restart(AllocationHandle* aHandle,
     514             :                    const dom::MediaTrackConstraints& aConstraints,
     515             :                    const MediaEnginePrefs &aPrefs,
     516             :                    const nsString& aDeviceId,
     517             :                    const char** aOutBadConstraint) override;
     518           0 :   void SetDirectListeners(bool aHasDirectListeners) override {};
     519             : 
     520             :   void NotifyPull(MediaStreamGraph* aGraph,
     521             :                   SourceMediaStream* aSource,
     522             :                   TrackID aId,
     523             :                   StreamTime aDesiredTime,
     524             :                   const PrincipalHandle& aPrincipalHandle) override;
     525             : 
     526             :   // AudioDataListenerInterface methods
     527             :   void NotifyOutputData(MediaStreamGraph* aGraph,
     528             :                         AudioDataValue* aBuffer, size_t aFrames,
     529             :                         TrackRate aRate, uint32_t aChannels) override;
     530             :   void NotifyInputData(MediaStreamGraph* aGraph,
     531             :                        const AudioDataValue* aBuffer, size_t aFrames,
     532             :                        TrackRate aRate, uint32_t aChannels) override;
     533             : 
     534             :   void DeviceChanged() override;
     535             : 
     536           0 :   bool IsFake() override {
     537           0 :     return false;
     538             :   }
     539             : 
     540           0 :   dom::MediaSourceEnum GetMediaSource() const override {
     541           0 :     return dom::MediaSourceEnum::Microphone;
     542             :   }
     543             : 
     544           0 :   nsresult TakePhoto(MediaEnginePhotoCallback* aCallback) override
     545             :   {
     546           0 :     return NS_ERROR_NOT_IMPLEMENTED;
     547             :   }
     548             : 
     549             :   uint32_t GetBestFitnessDistance(
     550             :       const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
     551             :       const nsString& aDeviceId) const override;
     552             : 
     553             :   // VoEMediaProcess.
     554             :   virtual void Process(int channel, webrtc::ProcessingTypes type,
     555             :                        int16_t audio10ms[], size_t length,
     556             :                        int samplingFreq, bool isStereo) override;
     557             : 
     558             :   void Shutdown() override;
     559             : 
     560             :   NS_DECL_THREADSAFE_ISUPPORTS
     561             : 
     562             : protected:
     563           0 :   ~MediaEngineWebRTCMicrophoneSource() {}
     564             : 
     565             : private:
     566             :   nsresult
     567             :   UpdateSingleSource(const AllocationHandle* aHandle,
     568             :                      const NormalizedConstraints& aNetConstraints,
     569             :                      const MediaEnginePrefs& aPrefs,
     570             :                      const nsString& aDeviceId,
     571             :                      const char** aOutBadConstraint) override;
     572             : 
     573             :   void SetLastPrefs(const MediaEnginePrefs& aPrefs);
     574             : 
     575             :   // These allocate/configure and release the channel
     576             :   bool AllocChannel();
     577             :   void FreeChannel();
     578             :   // These start/stop VoEBase and associated interfaces
     579             :   bool InitEngine();
     580             :   void DeInitEngine();
     581             : 
     582             :   // This is true when all processing is disabled, we can skip
     583             :   // packetization, resampling and other processing passes.
     584           0 :   bool PassThrough() {
     585           0 :     return mSkipProcessing;
     586             :   }
     587             :   template<typename T>
     588             :   void InsertInGraph(const T* aBuffer,
     589             :                      size_t aFrames,
     590             :                      uint32_t aChannels);
     591             : 
     592             :   void PacketizeAndProcess(MediaStreamGraph* aGraph,
     593             :                            const AudioDataValue* aBuffer,
     594             :                            size_t aFrames,
     595             :                            TrackRate aRate,
     596             :                            uint32_t aChannels);
     597             : 
     598             :   webrtc::VoiceEngine* mVoiceEngine;
     599             :   RefPtr<mozilla::AudioInput> mAudioInput;
     600             :   RefPtr<WebRTCAudioDataListener> mListener;
     601             :   RefPtr<AudioOutputObserver> mAudioOutputObserver;
     602             : 
     603             :   // Note: shared across all microphone sources - we don't want to Terminate()
     604             :   // the VoEBase until there are no active captures
     605             :   static int sChannelsOpen;
     606             :   static ScopedCustomReleasePtr<webrtc::VoEBase> mVoEBase;
     607             :   static ScopedCustomReleasePtr<webrtc::VoEExternalMedia> mVoERender;
     608             :   static ScopedCustomReleasePtr<webrtc::VoENetwork> mVoENetwork;
     609             :   static ScopedCustomReleasePtr<webrtc::VoEAudioProcessing> mVoEProcessing;
     610             : 
     611             : 
     612             :   // accessed from the GraphDriver thread except for deletion
     613             :   nsAutoPtr<AudioPacketizer<AudioDataValue, int16_t>> mPacketizer;
     614             :   ScopedCustomReleasePtr<webrtc::VoEExternalMedia> mVoERenderListener;
     615             : 
     616             :   // mMonitor protects mSources[] and mPrinicpalIds[] access/changes, and
     617             :   // transitions of mState from kStarted to kStopped (which are combined with
     618             :   // EndTrack()). mSources[] and mPrincipalHandles[] are accessed from webrtc
     619             :   // threads.
     620             :   Monitor mMonitor;
     621             :   nsTArray<RefPtr<SourceMediaStream>> mSources;
     622             :   nsTArray<PrincipalHandle> mPrincipalHandles; // Maps to mSources.
     623             : 
     624             :   int mCapIndex;
     625             :   int mChannel;
     626             :   MOZ_INIT_OUTSIDE_CTOR TrackID mTrackID;
     627             :   bool mStarted;
     628             : 
     629             :   nsString mDeviceName;
     630             :   nsCString mDeviceUUID;
     631             : 
     632             :   int32_t mSampleFrequency;
     633             :   uint64_t mTotalFrames;
     634             :   uint64_t mLastLogFrames;
     635             :   int32_t mPlayoutDelay;
     636             : 
     637             :   NullTransport *mNullTransport;
     638             : 
     639             :   nsTArray<int16_t> mInputBuffer;
     640             :   // mSkipProcessing is true if none of the processing passes are enabled,
     641             :   // because of prefs or constraints. This allows simply copying the audio into
     642             :   // the MSG, skipping resampling and the whole webrtc.org code.
     643             :   bool mSkipProcessing;
     644             : 
     645             :   // To only update microphone when needed, we keep track of previous settings.
     646             :   MediaEnginePrefs mLastPrefs;
     647             : };
     648             : 
     649             : class MediaEngineWebRTC : public MediaEngine
     650             : {
     651             :   typedef MediaEngine Super;
     652             : public:
     653             :   explicit MediaEngineWebRTC(MediaEnginePrefs& aPrefs);
     654             : 
     655             :   virtual void SetFakeDeviceChangeEvents() override;
     656             : 
     657             :   // Clients should ensure to clean-up sources video/audio sources
     658             :   // before invoking Shutdown on this class.
     659             :   void Shutdown() override;
     660             : 
     661             :   // Returns whether the host supports duplex audio stream.
     662             :   bool SupportsDuplex();
     663             : 
     664             :   void EnumerateVideoDevices(dom::MediaSourceEnum,
     665             :                              nsTArray<RefPtr<MediaEngineVideoSource>>*) override;
     666             :   void EnumerateAudioDevices(dom::MediaSourceEnum,
     667             :                              nsTArray<RefPtr<MediaEngineAudioSource>>*) override;
     668             : private:
     669           0 :   ~MediaEngineWebRTC() {}
     670             : 
     671             :   nsCOMPtr<nsIThread> mThread;
     672             : 
     673             :   // gUM runnables can e.g. Enumerate from multiple threads
     674             :   Mutex mMutex;
     675             :   webrtc::VoiceEngine* mVoiceEngine;
     676             :   RefPtr<mozilla::AudioInput> mAudioInput;
     677             :   bool mFullDuplex;
     678             :   bool mHasTabVideoSource;
     679             : 
     680             :   // Store devices we've already seen in a hashtable for quick return.
     681             :   // Maps UUID to MediaEngineSource (one set for audio, one for video).
     682             :   nsRefPtrHashtable<nsStringHashKey, MediaEngineVideoSource> mVideoSources;
     683             :   nsRefPtrHashtable<nsStringHashKey, MediaEngineAudioSource> mAudioSources;
     684             : };
     685             : 
     686             : }
     687             : 
     688             : #endif /* NSMEDIAENGINEWEBRTC_H_ */

Generated by: LCOV version 1.13