LCOV - code coverage report
Current view: top level - media/webrtc/signaling/src/media-conduit - VideoConduit.cpp (source / functions) Hit Total Coverage
Test: output.info Lines: 1 1053 0.1 %
Date: 2017-07-14 16:53:18 Functions: 2 102 2.0 %
Legend: Lines: hit not hit

          Line data    Source code
       1             : /* This Source Code Form is subject to the terms of the Mozilla Public
       2             :  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
       3             :  * You can obtain one at http://mozilla.org/MPL/2.0/. */
       4             : 
       5             : #include "CSFLog.h"
       6             : #include "nspr.h"
       7             : #include "plstr.h"
       8             : 
       9             : #include "AudioConduit.h"
      10             : #include "VideoConduit.h"
      11             : #include "YuvStamper.h"
      12             : #include "mozilla/TemplateLib.h"
      13             : #include "mozilla/media/MediaUtils.h"
      14             : #include "nsComponentManagerUtils.h"
      15             : #include "nsIPrefBranch.h"
      16             : #include "nsIGfxInfo.h"
      17             : #include "nsIPrefService.h"
      18             : #include "nsServiceManagerUtils.h"
      19             : 
      20             : #include "nsThreadUtils.h"
      21             : 
      22             : #include "pk11pub.h"
      23             : 
      24             : #include "webrtc/common_types.h"
      25             : #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
      26             : #include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
      27             : #include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
      28             : #include "webrtc/modules/video_coding/codecs/vp9/include/vp9.h"
      29             : #include "webrtc/common_video/include/video_frame_buffer.h"
      30             : #include "webrtc/api/video/i420_buffer.h"
      31             : #if defined(MAC_OS_X_VERSION_10_8) && \
      32             :   (MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_8)
      33             : // XXX not available in Mac 10.7 SDK
      34             : #include "webrtc/sdk/objc/Framework/Classes/corevideo_frame_buffer.h"
      35             : #endif
      36             : 
      37             : #include "mozilla/Unused.h"
      38             : 
      39             : #if defined(MOZ_WIDGET_ANDROID)
      40             : #include "AndroidJNIWrapper.h"
      41             : #include "VideoEngine.h"
      42             : #endif
      43             : 
      44             : #include "GmpVideoCodec.h"
      45             : #ifdef MOZ_WEBRTC_OMX
      46             : #include "OMXCodecWrapper.h"
      47             : #include "OMXVideoCodec.h"
      48             : #endif
      49             : 
      50             : #ifdef MOZ_WEBRTC_MEDIACODEC
      51             : #include "MediaCodecVideoCodec.h"
      52             : #endif
      53             : #include "WebrtcGmpVideoCodec.h"
      54             : 
      55             : // for ntohs
      56             : #ifdef _MSC_VER
      57             : #include "Winsock2.h"
      58             : #else
      59             : #include <netinet/in.h>
      60             : #endif
      61             : 
      62             : #include <algorithm>
      63             : #include <math.h>
      64             : #include <cinttypes>
      65             : 
      66             : #define DEFAULT_VIDEO_MAX_FRAMERATE 30
      67             : #define INVALID_RTP_PAYLOAD 255 // valid payload types are 0 to 127
      68             : 
      69             : namespace mozilla {
      70             : 
      71             : static const char* logTag = "WebrtcVideoSessionConduit";
      72             : 
      73             : static const int kNullPayloadType = -1;
      74             : static const char* kUlpFecPayloadName = "ulpfec";
      75             : static const char* kRedPayloadName = "red";
      76             : 
      77             : // Convert (SI) kilobits/sec to (SI) bits/sec
      78             : #define KBPS(kbps) kbps * 1000
      79             : const uint32_t WebrtcVideoConduit::kDefaultMinBitrate_bps =  KBPS(200);
      80             : const uint32_t WebrtcVideoConduit::kDefaultStartBitrate_bps = KBPS(300);
      81             : const uint32_t WebrtcVideoConduit::kDefaultMaxBitrate_bps = KBPS(2000);
      82             : 
      83             : // 32 bytes is what WebRTC CodecInst expects
      84             : const unsigned int WebrtcVideoConduit::CODEC_PLNAME_SIZE = 32;
      85             : static const int kViEMinCodecBitrate_bps = KBPS(30);
      86             : 
      87             : template<typename T>
      88           0 : T MinIgnoreZero(const T& a, const T& b)
      89             : {
      90           0 :   return std::min(a? a:b, b? b:a);
      91             : }
      92             : 
      93             : template <class t>
      94             : static void
      95             : ConstrainPreservingAspectRatioExact(uint32_t max_fs, t* width, t* height)
      96             : {
      97             :   // We could try to pick a better starting divisor, but it won't make any real
      98             :   // performance difference.
      99             :   for (size_t d = 1; d < std::min(*width, *height); ++d) {
     100             :     if ((*width % d) || (*height % d)) {
     101             :       continue; // Not divisible
     102             :     }
     103             : 
     104             :     if (((*width) * (*height)) / (d * d) <= max_fs) {
     105             :       *width /= d;
     106             :       *height /= d;
     107             :       return;
     108             :     }
     109             :   }
     110             : 
     111             :   *width = 0;
     112             :   *height = 0;
     113             : }
     114             : 
     115             : template <class t>
     116             : static void
     117           0 : ConstrainPreservingAspectRatio(uint16_t max_width, uint16_t max_height,
     118             :                                t* width, t* height)
     119             : {
     120           0 :   if (((*width) <= max_width) && ((*height) <= max_height)) {
     121           0 :     return;
     122             :   }
     123             : 
     124           0 :   if ((*width) * max_height > max_width * (*height)) {
     125           0 :     (*height) = max_width * (*height) / (*width);
     126           0 :     (*width) = max_width;
     127             :   } else {
     128           0 :     (*width) = max_height * (*width) / (*height);
     129           0 :     (*height) = max_height;
     130             :   }
     131             : }
     132             : 
     133             : void
     134           0 : WebrtcVideoConduit::StreamStatistics::Update(const double aFrameRate,
     135             :                                              const double aBitrate)
     136             : {
     137           0 :   mFrameRate.Push(aFrameRate);
     138           0 :   mBitrate.Push(aBitrate);
     139           0 : }
     140             : 
     141             : bool
     142           0 : WebrtcVideoConduit::StreamStatistics::GetVideoStreamStats(
     143             :     double& aOutFrMean, double& aOutFrStdDev, double& aOutBrMean,
     144             :     double& aOutBrStdDev) const
     145             : {
     146           0 :   if (mFrameRate.NumDataValues() && mBitrate.NumDataValues()) {
     147           0 :     aOutFrMean = mFrameRate.Mean();
     148           0 :     aOutFrStdDev = mFrameRate.StandardDeviation();
     149           0 :     aOutBrMean = mBitrate.Mean();
     150           0 :     aOutBrStdDev = mBitrate.StandardDeviation();
     151           0 :     return true;
     152             :   }
     153           0 :   return false;
     154             : }
     155             : 
     156             : void
     157           0 : WebrtcVideoConduit::SendStreamStatistics::DroppedFrames(
     158             :   uint32_t& aOutDroppedFrames) const
     159             : {
     160           0 :       aOutDroppedFrames = mDroppedFrames;
     161           0 : }
     162             : 
     163             : void
     164           0 : WebrtcVideoConduit::SendStreamStatistics::Update(
     165             :   const webrtc::VideoSendStream::Stats& aStats)
     166             : {
     167           0 :   StreamStatistics::Update(aStats.encode_frame_rate, aStats.media_bitrate_bps);
     168           0 :   if (!aStats.substreams.empty()) {
     169             :     const webrtc::FrameCounts& fc =
     170           0 :       aStats.substreams.begin()->second.frame_counts;
     171           0 :     mFramesEncoded = fc.key_frames + fc.delta_frames;
     172           0 :     CSFLogVerbose(logTag,
     173             :                   "%s: framerate: %u, bitrate: %u, dropped frames delta: %u",
     174             :                   __FUNCTION__, aStats.encode_frame_rate,
     175             :                   aStats.media_bitrate_bps,
     176           0 :                   mFramesDeliveredToEncoder - mFramesEncoded - mDroppedFrames);
     177           0 :     mDroppedFrames = mFramesDeliveredToEncoder - mFramesEncoded;
     178             :   } else {
     179           0 :     CSFLogVerbose(logTag, "%s stats.substreams is empty", __FUNCTION__);
     180             :   }
     181           0 : }
     182             : 
     183             : void
     184           0 : WebrtcVideoConduit::ReceiveStreamStatistics::DiscardedPackets(
     185             :   uint32_t& aOutDiscPackets) const
     186             : {
     187           0 :   aOutDiscPackets = mDiscardedPackets;
     188           0 : }
     189             : 
     190             : void
     191           0 : WebrtcVideoConduit::ReceiveStreamStatistics::FramesDecoded(
     192             :   uint32_t& aFramesDecoded) const
     193             : {
     194           0 :   aFramesDecoded = mFramesDecoded;
     195           0 : }
     196             : 
     197             : void
     198           0 : WebrtcVideoConduit::ReceiveStreamStatistics::Update(
     199             :   const webrtc::VideoReceiveStream::Stats& aStats)
     200             : {
     201           0 :   CSFLogVerbose(logTag, "%s ", __FUNCTION__);
     202           0 :   StreamStatistics::Update(aStats.decode_frame_rate, aStats.total_bitrate_bps);
     203           0 :   mDiscardedPackets = aStats.discarded_packets;
     204           0 :   mFramesDecoded = aStats.frame_counts.key_frames
     205           0 :                    + aStats.frame_counts.delta_frames;
     206           0 : }
     207             : 
     208             : /**
     209             :  * Factory Method for VideoConduit
     210             :  */
     211             : RefPtr<VideoSessionConduit>
     212           0 : VideoSessionConduit::Create(RefPtr<WebRtcCallWrapper> aCall)
     213             : {
     214           0 :   NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
     215           0 :   NS_ASSERTION(aCall, "missing required parameter: aCall");
     216           0 :   CSFLogVerbose(logTag, "%s", __FUNCTION__);
     217             : 
     218           0 :   if (!aCall) {
     219           0 :     return nullptr;
     220             :   }
     221             : 
     222           0 :   nsAutoPtr<WebrtcVideoConduit> obj(new WebrtcVideoConduit(aCall));
     223           0 :   if(obj->Init() != kMediaConduitNoError) {
     224           0 :     CSFLogError(logTag, "%s VideoConduit Init Failed ", __FUNCTION__);
     225           0 :     return nullptr;
     226             :   }
     227           0 :   CSFLogVerbose(logTag, "%s Successfully created VideoConduit ", __FUNCTION__);
     228           0 :   return obj.forget();
     229             : }
     230             : 
     231           0 : WebrtcVideoConduit::WebrtcVideoConduit(RefPtr<WebRtcCallWrapper> aCall)
     232             :   : mTransportMonitor("WebrtcVideoConduit")
     233             :   , mRenderer(nullptr)
     234             :   , mVideoAdapter(1)
     235             :   , mVideoBroadcaster()
     236             :   , mEngineTransmitting(false)
     237             :   , mEngineReceiving(false)
     238             :   , mCapId(-1)
     239             :   , mCodecMutex("VideoConduit codec db")
     240             :   , mInReconfig(false)
     241             :   , mRecvStream(nullptr)
     242             :   , mSendStream(nullptr)
     243             :   , mLastWidth(0)
     244             :   , mLastHeight(0) // initializing as 0 forces a check for reconfig at start
     245             :   , mSendingWidth(0)
     246             :   , mSendingHeight(0)
     247             :   , mReceivingWidth(0)
     248             :   , mReceivingHeight(0)
     249             :   , mSendingFramerate(DEFAULT_VIDEO_MAX_FRAMERATE)
     250             :   , mLastFramerateTenths(DEFAULT_VIDEO_MAX_FRAMERATE * 10)
     251             :   , mNumReceivingStreams(1)
     252             :   , mVideoLatencyTestEnable(false)
     253             :   , mVideoLatencyAvg(0)
     254             :   , mMinBitrate(0)
     255             :   , mStartBitrate(0)
     256             :   , mPrefMaxBitrate(0)
     257             :   , mNegotiatedMaxBitrate(0)
     258             :   , mMinBitrateEstimate(0)
     259             :   , mDenoising(false)
     260             :   , mLockScaling(false)
     261             :   , mSpatialLayers(1)
     262             :   , mTemporalLayers(1)
     263             :   , mCodecMode(webrtc::kRealtimeVideo)
     264             :   , mCall(aCall) // refcounted store of the call object
     265             :   , mSendStreamConfig(this) // 'this' is stored but not  dereferenced in the constructor.
     266             :   , mRecvStreamConfig(this) // 'this' is stored but not  dereferenced in the constructor.
     267             :   , mRecvSSRC(0)
     268             :   , mRecvSSRCSetInProgress(false)
     269             :   , mSendCodecPlugin(nullptr)
     270             :   , mRecvCodecPlugin(nullptr)
     271           0 :   , mVideoStatsTimer(do_CreateInstance(NS_TIMER_CONTRACTID))
     272             : {
     273           0 :   mRecvStreamConfig.renderer = this;
     274             : 
     275             :   // Video Stats Callback
     276           0 :   nsTimerCallbackFunc callback = [](nsITimer* aTimer, void* aClosure) {
     277           0 :     CSFLogDebug(logTag, "StreamStats polling scheduled for VideoConduit: %p", aClosure);
     278           0 :     auto self = static_cast<WebrtcVideoConduit*>(aClosure);
     279           0 :     MutexAutoLock lock(self->mCodecMutex);
     280           0 :     if (self->mEngineTransmitting && self->mSendStream) {
     281           0 :       const auto& stats = self->mSendStream->GetStats();
     282           0 :       self->mSendStreamStats.Update(stats);
     283           0 :       if (!stats.substreams.empty()) {
     284             :           self->mSendPacketCounts =
     285           0 :             stats.substreams.begin()->second.rtcp_packet_type_counts;
     286             :       }
     287             :     }
     288           0 :     if (self->mEngineReceiving && self->mRecvStream) {
     289           0 :       const auto& stats = self->mRecvStream->GetStats();
     290           0 :       self->mRecvStreamStats.Update(stats);
     291           0 :       self->mRecvPacketCounts = stats.rtcp_packet_type_counts;
     292             :     }
     293           0 :   };
     294           0 :   mVideoStatsTimer->InitWithNamedFuncCallback(
     295             :     callback, this, 1000, nsITimer::TYPE_REPEATING_PRECISE_CAN_SKIP,
     296           0 :     "WebrtcVideoConduit::WebrtcVideoConduit");
     297           0 : }
     298             : 
     299           0 : WebrtcVideoConduit::~WebrtcVideoConduit()
     300             : {
     301           0 :   CSFLogDebug(logTag, "%s ", __FUNCTION__);
     302           0 :   NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
     303           0 :   if (mVideoStatsTimer) {
     304           0 :     CSFLogDebug(logTag, "canceling StreamStats for VideoConduit: %p", this);
     305           0 :     MutexAutoLock lock(mCodecMutex);
     306           0 :     CSFLogDebug(logTag, "StreamStats cancelled for VideoConduit: %p", this);
     307           0 :     mVideoStatsTimer->Cancel();
     308             :   }
     309             : 
     310             :   // Release AudioConduit first by dropping reference on MainThread, where it expects to be
     311           0 :   SyncTo(nullptr);
     312           0 :   Destroy();
     313           0 : }
     314             : 
     315             : void
     316           0 : WebrtcVideoConduit::SetLocalRTPExtensions(bool aIsSend,
     317             :   const std::vector<webrtc::RtpExtension> & aExtensions)
     318             : {
     319             :   auto& extList = aIsSend ? mSendStreamConfig.rtp.extensions :
     320           0 :                   mRecvStreamConfig.rtp.extensions;
     321           0 :   extList = aExtensions;
     322           0 : }
     323             : 
     324             : std::vector<webrtc::RtpExtension>
     325           0 : WebrtcVideoConduit::GetLocalRTPExtensions(bool aIsSend) const
     326             : {
     327           0 :   return aIsSend ? mSendStreamConfig.rtp.extensions : mRecvStreamConfig.rtp.extensions;
     328             : }
     329             : 
     330           0 : bool WebrtcVideoConduit::SetLocalSSRCs(const std::vector<unsigned int> & aSSRCs)
     331             : {
     332             :   // Special case: the local SSRCs are the same - do nothing.
     333           0 :   if (mSendStreamConfig.rtp.ssrcs == aSSRCs) {
     334           0 :     return true;
     335             :   }
     336             : 
     337             :   // Update the value of the ssrcs in the config structure.
     338           0 :   mSendStreamConfig.rtp.ssrcs = aSSRCs;
     339             : 
     340           0 :   bool wasTransmitting = mEngineTransmitting;
     341           0 :   if (StopTransmitting() != kMediaConduitNoError) {
     342           0 :     return false;
     343             :   }
     344             : 
     345           0 :   MutexAutoLock lock(mCodecMutex);
     346             :   // On the next StartTransmitting() or ConfigureSendMediaCodec, force
     347             :   // building a new SendStream to switch SSRCs.
     348           0 :   DeleteSendStream();
     349           0 :   if (wasTransmitting) {
     350           0 :     if (StartTransmitting() != kMediaConduitNoError) {
     351           0 :       return false;
     352             :     }
     353             :   }
     354             : 
     355           0 :   return true;
     356             : }
     357             : 
     358             : std::vector<unsigned int>
     359           0 : WebrtcVideoConduit::GetLocalSSRCs() const
     360             : {
     361           0 :   return mSendStreamConfig.rtp.ssrcs;
     362             : }
     363             : 
     364             : bool
     365           0 : WebrtcVideoConduit::SetLocalCNAME(const char* cname)
     366             : {
     367           0 :   mSendStreamConfig.rtp.c_name = cname;
     368           0 :   return true;
     369             : }
     370             : 
     371             : MediaConduitErrorCode
     372           0 : WebrtcVideoConduit::ConfigureCodecMode(webrtc::VideoCodecMode mode)
     373             : {
     374           0 :   CSFLogVerbose(logTag, "%s ", __FUNCTION__);
     375           0 :   if (mode == webrtc::VideoCodecMode::kRealtimeVideo ||
     376             :       mode == webrtc::VideoCodecMode::kScreensharing) {
     377           0 :     mCodecMode = mode;
     378           0 :     return kMediaConduitNoError;
     379             :   }
     380             : 
     381           0 :   return kMediaConduitMalformedArgument;
     382             : }
     383             : 
     384             : void
     385           0 : WebrtcVideoConduit::DeleteSendStream()
     386             : {
     387           0 :   mCodecMutex.AssertCurrentThreadOwns();
     388           0 :   if (mSendStream) {
     389           0 :     mCall->Call()->DestroyVideoSendStream(mSendStream);
     390           0 :     mSendStream = nullptr;
     391           0 :     mEncoder = nullptr;
     392             :   }
     393           0 : }
     394             : 
     395             : webrtc::VideoCodecType
     396           0 : SupportedCodecType(webrtc::VideoCodecType aType)
     397             : {
     398           0 :   switch (aType) {
     399             :     case webrtc::VideoCodecType::kVideoCodecVP8:
     400             :     case webrtc::VideoCodecType::kVideoCodecVP9:
     401             :     case webrtc::VideoCodecType::kVideoCodecH264:
     402           0 :       return aType;
     403             :     default:
     404           0 :       return webrtc::VideoCodecType::kVideoCodecUnknown;
     405             :   }
     406             :   // NOTREACHED
     407             : }
     408             : 
     409             : MediaConduitErrorCode
     410           0 : WebrtcVideoConduit::CreateSendStream()
     411             : {
     412           0 :   mCodecMutex.AssertCurrentThreadOwns();
     413             : 
     414             :   webrtc::VideoCodecType encoder_type =
     415           0 :     SupportedCodecType(
     416           0 :       webrtc::PayloadNameToCodecType(mSendStreamConfig.encoder_settings.payload_name)
     417           0 :         .value_or(webrtc::VideoCodecType::kVideoCodecUnknown));
     418           0 :   if (encoder_type == webrtc::VideoCodecType::kVideoCodecUnknown) {
     419           0 :     return kMediaConduitInvalidSendCodec;
     420             :   }
     421             : 
     422             :   nsAutoPtr<webrtc::VideoEncoder> encoder(
     423           0 :     CreateEncoder(encoder_type, mEncoderConfig.StreamCount() > 0));
     424           0 :   if (!encoder) {
     425           0 :     return kMediaConduitInvalidSendCodec;
     426             :   }
     427             : 
     428           0 :   mSendStreamConfig.encoder_settings.encoder = encoder.get();
     429             : 
     430           0 :   MOZ_RELEASE_ASSERT(mEncoderConfig.NumberOfStreams() != 0,
     431             :                      "mEncoderConfig - There are no configured streams!");
     432           0 :   MOZ_ASSERT(mSendStreamConfig.rtp.ssrcs.size() == mEncoderConfig.NumberOfStreams(),
     433             :              "Each video substream must have a corresponding ssrc.");
     434             : 
     435           0 :   mSendStream = mCall->Call()->CreateVideoSendStream(mSendStreamConfig.Copy(), mEncoderConfig.CopyConfig());
     436             : 
     437           0 :   if (!mSendStream) {
     438           0 :     return kMediaConduitVideoSendStreamError;
     439             :   }
     440           0 :   mSendStream->SetSource(this, webrtc::VideoSendStream::DegradationPreference::kBalanced);
     441             : 
     442           0 :   mEncoder = encoder;
     443             : 
     444           0 :   return kMediaConduitNoError;
     445             : }
     446             : 
     447             : void
     448           0 : WebrtcVideoConduit::DeleteRecvStream()
     449             : {
     450           0 :   mCodecMutex.AssertCurrentThreadOwns();
     451           0 :   if (mRecvStream) {
     452           0 :     mCall->Call()->DestroyVideoReceiveStream(mRecvStream);
     453           0 :     mRecvStream = nullptr;
     454           0 :     mDecoders.clear();
     455             :   }
     456           0 : }
     457             : 
     458             : MediaConduitErrorCode
     459           0 : WebrtcVideoConduit::CreateRecvStream()
     460             : {
     461           0 :   mCodecMutex.AssertCurrentThreadOwns();
     462             : 
     463           0 :   webrtc::VideoReceiveStream::Decoder decoder_desc;
     464           0 :   std::unique_ptr<webrtc::VideoDecoder> decoder;
     465             :   webrtc::VideoCodecType decoder_type;
     466             : 
     467           0 :   mRecvStreamConfig.decoders.clear();
     468           0 :   for (auto& config : mRecvCodecList) {
     469           0 :     decoder_type = SupportedCodecType(webrtc::PayloadNameToCodecType(config->mName)
     470           0 :                                       .value_or(webrtc::VideoCodecType::kVideoCodecUnknown));
     471           0 :     if (decoder_type == webrtc::VideoCodecType::kVideoCodecUnknown) {
     472           0 :       CSFLogError(logTag, "%s Unknown decoder type: %s", __FUNCTION__,
     473           0 :                   config->mName.c_str());
     474           0 :       continue;
     475             :     }
     476             : 
     477           0 :     decoder.reset(CreateDecoder(decoder_type));
     478             : 
     479           0 :     if (!decoder) {
     480             :       // This really should never happen unless something went wrong
     481             :       // in the negotiation code
     482           0 :       NS_ASSERTION(decoder, "Failed to create video decoder");
     483           0 :       CSFLogError(logTag, "Failed to create decoder of type %s (%d)",
     484           0 :                   config->mName.c_str(), decoder_type);
     485             :       // don't stop
     486           0 :       continue;
     487             :     }
     488             : 
     489           0 :     decoder_desc.decoder = decoder.get();
     490           0 :     mDecoders.push_back(std::move(decoder));
     491           0 :     decoder_desc.payload_name = config->mName;
     492           0 :     decoder_desc.payload_type = config->mType;
     493             :     // XXX Ok, add:
     494             :     // Set decoder_desc.codec_params (fmtp)
     495           0 :     mRecvStreamConfig.decoders.push_back(decoder_desc);
     496             :   }
     497             : 
     498           0 :   mRecvStream = mCall->Call()->CreateVideoReceiveStream(mRecvStreamConfig.Copy());
     499           0 :   if (!mRecvStream) {
     500           0 :     mDecoders.clear();
     501           0 :     return kMediaConduitUnknownError;
     502             :   }
     503           0 :   CSFLogDebug(logTag, "Created VideoReceiveStream %p for SSRC %u (0x%x)",
     504           0 :               mRecvStream, mRecvStreamConfig.rtp.remote_ssrc, mRecvStreamConfig.rtp.remote_ssrc);
     505             : 
     506           0 :   return kMediaConduitNoError;
     507             : }
     508             : 
     509             : static rtc::scoped_refptr<webrtc::VideoEncoderConfig::EncoderSpecificSettings>
     510           0 : ConfigureVideoEncoderSettings(const VideoCodecConfig* aConfig,
     511             :                               const WebrtcVideoConduit* aConduit)
     512             : {
     513           0 :   bool is_screencast = aConduit->CodecMode() == webrtc::VideoCodecMode::kScreensharing;
     514             :   // No automatic resizing when using simulcast or screencast.
     515           0 :   bool automatic_resize = !is_screencast && aConfig->mSimulcastEncodings.size() <= 1;
     516           0 :   bool frame_dropping = !is_screencast;
     517             :   bool denoising;
     518           0 :   bool codec_default_denoising = false;
     519           0 :   if (is_screencast) {
     520           0 :     denoising = false;
     521             :   } else {
     522             :     // Use codec default if video_noise_reduction is unset.
     523           0 :     denoising = aConduit->Denoising();
     524           0 :     codec_default_denoising = !denoising;
     525             :   }
     526             : 
     527           0 :   if (aConfig->mName == "H264") {
     528             :     webrtc::VideoCodecH264 h264_settings =
     529           0 :         webrtc::VideoEncoder::GetDefaultH264Settings();
     530           0 :     h264_settings.frameDroppingOn = frame_dropping;
     531           0 :     h264_settings.packetizationMode = aConfig->mPacketizationMode;
     532             :     return new rtc::RefCountedObject<
     533           0 :         webrtc::VideoEncoderConfig::H264EncoderSpecificSettings>(h264_settings);
     534             : 
     535           0 :   } else if (aConfig->mName == "VP8") {
     536             :     webrtc::VideoCodecVP8 vp8_settings =
     537           0 :         webrtc::VideoEncoder::GetDefaultVp8Settings();
     538           0 :     vp8_settings.automaticResizeOn = automatic_resize;
     539             :     // VP8 denoising is enabled by default.
     540           0 :     vp8_settings.denoisingOn = codec_default_denoising ? true : denoising;
     541           0 :     vp8_settings.frameDroppingOn = frame_dropping;
     542             :     return new rtc::RefCountedObject<
     543           0 :         webrtc::VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings);
     544             : 
     545           0 :   } else if (aConfig->mName == "VP9") {
     546             :     webrtc::VideoCodecVP9 vp9_settings =
     547           0 :         webrtc::VideoEncoder::GetDefaultVp9Settings();
     548           0 :     if (is_screencast) {
     549             :       // TODO(asapersson): Set to 2 for now since there is a DCHECK in
     550             :       // VideoSendStream::ReconfigureVideoEncoder.
     551           0 :       vp9_settings.numberOfSpatialLayers = 2;
     552             :     } else {
     553           0 :       vp9_settings.numberOfSpatialLayers = aConduit->SpatialLayers();
     554             :     }
     555             :     // VP9 denoising is disabled by default.
     556           0 :     vp9_settings.denoisingOn = codec_default_denoising ? false : denoising;
     557           0 :     vp9_settings.frameDroppingOn = frame_dropping;
     558             :     return new rtc::RefCountedObject<
     559           0 :         webrtc::VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
     560             :   }
     561           0 :   return nullptr;
     562             : }
     563             : 
     564             : std::vector<webrtc::VideoStream>
     565           0 : WebrtcVideoConduit::VideoStreamFactory::CreateEncoderStreams(int width, int height,
     566             :                                                              const webrtc::VideoEncoderConfig& config)
     567             : {
     568           0 :   auto streamCount = config.number_of_streams;
     569           0 :   std::vector<webrtc::VideoStream> streams;
     570           0 :   streams.reserve(streamCount);
     571           0 :   MOZ_ASSERT(mConduit);
     572           0 :   MutexAutoLock lock(mConduit->mCodecMutex); // for mCurSendCodecConfig
     573             : 
     574             :   // XXX webrtc.org code has a restriction on simulcast layers that each
     575             :   // layer must be 1/2 the dimension of the previous layer - not sure why.
     576             :   // This means we can't use scaleResolutionBy/scaleDownBy (yet), even if
     577             :   // the user specified it.  The one exception is that we can apply it on
     578             :   // the full-resolution stream (which also happens to handle the
     579             :   // non-simulcast usage case). NOTE: we make an assumption here, not in the
     580             :   // spec, that the first stream is the full-resolution stream.
     581           0 :   auto& simulcastEncoding = mConduit->mCurSendCodecConfig->mSimulcastEncodings[0];
     582             : #if 0
     583             :   // XXX What we'd like to do for each simulcast stream...
     584             :   if (simulcastEncoding.constraints.scaleDownBy > 1.0) {
     585             :     uint32_t new_width = width / simulcastEncoding.constraints.scaleDownBy;
     586             :     uint32_t new_height = height / simulcastEncoding.constraints.scaleDownBy;
     587             : 
     588             :     if (new_width != width || new_height != height) {
     589             :       if (streamCount == 1) {
     590             :         CSFLogVerbose(logTag, "%s: ConstrainPreservingAspectRatio", __FUNCTION__);
     591             :         // Use less strict scaling in unicast. That way 320x240 / 3 = 106x79.
     592             :         ConstrainPreservingAspectRatio(new_width, new_height,
     593             :                                        &width, &height);
     594             :       } else {
     595             :         CSFLogVerbose(logTag, "%s: ConstrainPreservingAspectRatioExact", __FUNCTION__);
     596             :         // webrtc.org supposedly won't tolerate simulcast unless every stream
     597             :         // is exactly the same aspect ratio. 320x240 / 3 = 80x60.
     598             :         ConstrainPreservingAspectRatioExact(new_width * new_height,
     599             :                                             &width, &height);
     600             :       }
     601             :     }
     602             :   }
     603             : #endif
     604             : 
     605           0 :   for (size_t idx = streamCount - 1; streamCount > 0; idx--, streamCount--) {
     606           0 :     webrtc::VideoStream video_stream;
     607             :     // Stream dimensions must be divisable by 2^(n-1), where n is the number of layers.
     608             :     // Each lower resolution layer is 1/2^(n-1) of the size of largest layer,
     609             :     // where n is the number of the layer
     610             : 
     611             :     // width/height will be overridden on the first frame; they must be 'sane' for
     612             :     // SetSendCodec()
     613           0 :     video_stream.width = width >> idx;
     614           0 :     video_stream.height = height >> idx;
     615             :     // We want to ensure this picks up the current framerate, so indirect
     616           0 :     video_stream.max_framerate = mConduit->mSendingFramerate;
     617             : 
     618           0 :     simulcastEncoding = mConduit->mCurSendCodecConfig->mSimulcastEncodings[idx];
     619           0 :     MOZ_ASSERT(simulcastEncoding.constraints.scaleDownBy >= 1.0);
     620             : 
     621             :     // leave vector temporal_layer_thresholds_bps empty
     622           0 :     video_stream.temporal_layer_thresholds_bps.clear();
     623             :     // Calculate these first
     624           0 :     video_stream.max_bitrate_bps = MinIgnoreZero(simulcastEncoding.constraints.maxBr,
     625             :                                                  kDefaultMaxBitrate_bps);
     626           0 :     video_stream.max_bitrate_bps = MinIgnoreZero((int) mConduit->mPrefMaxBitrate*1000,
     627             :                                                  video_stream.max_bitrate_bps);
     628           0 :     video_stream.min_bitrate_bps = (mConduit->mMinBitrate ?
     629           0 :                                     mConduit->mMinBitrate : kDefaultMinBitrate_bps);
     630           0 :     if (video_stream.min_bitrate_bps > video_stream.max_bitrate_bps) {
     631           0 :       video_stream.min_bitrate_bps = video_stream.max_bitrate_bps;
     632             :     }
     633           0 :     video_stream.target_bitrate_bps = (mConduit->mStartBitrate ?
     634           0 :                                        mConduit->mStartBitrate : kDefaultStartBitrate_bps);
     635           0 :     if (video_stream.target_bitrate_bps > video_stream.max_bitrate_bps) {
     636           0 :       video_stream.target_bitrate_bps = video_stream.max_bitrate_bps;
     637             :     }
     638           0 :     if (video_stream.target_bitrate_bps < video_stream.min_bitrate_bps) {
     639           0 :       video_stream.target_bitrate_bps = video_stream.min_bitrate_bps;
     640             :     }
     641             :     // We should use SelectBitrates here for the case of already-sending and no reconfig needed;
     642             :     // overrides the calculations above
     643           0 :     if (mConduit->mSendingWidth) { // cleared if we need a reconfig
     644           0 :       mConduit->SelectBitrates(video_stream.width, video_stream.height, // use video_stream.foo!
     645           0 :                                simulcastEncoding.constraints.maxBr,
     646           0 :                                mConduit->mLastFramerateTenths, video_stream);
     647             :     }
     648             : 
     649           0 :     video_stream.max_qp = kQpMax;
     650           0 :     video_stream.SetRid(simulcastEncoding.rid);
     651             : 
     652           0 :     if (mConduit->mCurSendCodecConfig->mName == "H264") {
     653           0 :       if (mConduit->mCurSendCodecConfig->mEncodingConstraints.maxMbps > 0) {
     654             :         // Not supported yet!
     655           0 :         CSFLogError(logTag, "%s H.264 max_mbps not supported yet", __FUNCTION__);
     656             :       }
     657             :     }
     658           0 :     streams.push_back(video_stream);
     659             :   }
     660           0 :   return streams;
     661             : }
     662             : 
     663             : /**
     664             :  * Note: Setting the send-codec on the Video Engine will restart the encoder,
     665             :  * sets up new SSRC and reset RTP_RTCP module with the new codec setting.
     666             :  *
     667             :  * Note: this is called from MainThread, and the codec settings are read on
     668             :  * videoframe delivery threads (i.e in SendVideoFrame().  With
     669             :  * renegotiation/reconfiguration, this now needs a lock!  Alternatively
     670             :  * changes could be queued until the next frame is delivered using an
     671             :  * Atomic pointer and swaps.
     672             :  */
     673             : 
     674             : MediaConduitErrorCode
     675           0 : WebrtcVideoConduit::ConfigureSendMediaCodec(const VideoCodecConfig* codecConfig)
     676             : {
     677           0 :   CSFLogDebug(logTag, "%s for %s", __FUNCTION__,
     678           0 :     codecConfig ? codecConfig->mName.c_str() : "<null>");
     679             : 
     680           0 :   MediaConduitErrorCode condError = kMediaConduitNoError;
     681             : 
     682             :   // validate basic params
     683           0 :   if ((condError = ValidateCodecConfig(codecConfig, true)) != kMediaConduitNoError) {
     684           0 :     return condError;
     685             :   }
     686             : 
     687           0 :   size_t streamCount = std::min(codecConfig->mSimulcastEncodings.size(),
     688           0 :                                 (size_t)webrtc::kMaxSimulcastStreams);
     689           0 :   CSFLogDebug(logTag, "%s for VideoConduit:%p stream count:%d", __FUNCTION__,
     690           0 :               this, static_cast<int>(streamCount));
     691             : 
     692           0 :   mSendingFramerate = 0;
     693           0 :   mEncoderConfig.ClearStreams();
     694           0 :   mSendStreamConfig.rtp.rids.clear();
     695             : 
     696             :   int max_framerate;
     697           0 :   if (codecConfig->mEncodingConstraints.maxFps > 0) {
     698           0 :     max_framerate = codecConfig->mEncodingConstraints.maxFps;
     699             :   } else {
     700           0 :     max_framerate = DEFAULT_VIDEO_MAX_FRAMERATE;
     701             :   }
     702             :   // apply restrictions from maxMbps/etc
     703           0 :   mSendingFramerate = SelectSendFrameRate(codecConfig,
     704             :                                           max_framerate,
     705           0 :                                           mSendingWidth,
     706           0 :                                           mSendingHeight);
     707             : 
     708             :   // So we can comply with b=TIAS/b=AS/maxbr=X when input resolution changes
     709           0 :   mNegotiatedMaxBitrate = codecConfig->mTias;
     710             : 
     711             :   // width/height will be overridden on the first frame; they must be 'sane' for
     712             :   // SetSendCodec()
     713             : 
     714           0 :   if (mSendingWidth != 0) {
     715             :     // We're already in a call and are reconfiguring (perhaps due to
     716             :     // ReplaceTrack).
     717             :     bool resolutionChanged;
     718             :     {
     719           0 :       MutexAutoLock lock(mCodecMutex);
     720           0 :       resolutionChanged = !mCurSendCodecConfig->ResolutionEquals(*codecConfig);
     721             :     }
     722             : 
     723           0 :     if (resolutionChanged) {
     724             :       // We're already in a call and due to renegotiation an encoder parameter
     725             :       // that requires reconfiguration has changed. Resetting these members
     726             :       // triggers reconfig on the next frame.
     727           0 :       mLastWidth = 0;
     728           0 :       mLastHeight = 0;
     729           0 :       mSendingWidth = 0;
     730           0 :       mSendingHeight = 0;
     731             :     } else {
     732             :       // We're already in a call but changes don't require a reconfiguration.
     733             :       // We update the resolutions in the send codec to match the current
     734             :       // settings.  Framerate is already set.
     735             :     }
     736           0 :   } else if (mMinBitrateEstimate) {
     737             :     // Only do this at the start; use "have we send a frame" as a reasonable stand-in.
     738             :     // min <= start <= max (which can be -1, note!)
     739           0 :     webrtc::Call::Config::BitrateConfig config;
     740           0 :     config.min_bitrate_bps = mMinBitrateEstimate;
     741           0 :     if (config.start_bitrate_bps < mMinBitrateEstimate) {
     742           0 :       config.start_bitrate_bps = mMinBitrateEstimate;
     743             :     }
     744           0 :     if (config.max_bitrate_bps > 0 &&
     745           0 :         config.max_bitrate_bps < mMinBitrateEstimate) {
     746           0 :       config.max_bitrate_bps = mMinBitrateEstimate;
     747             :     }
     748           0 :     mCall->Call()->SetBitrateConfig(config);
     749             :   }
     750             : 
     751             :   // NOTE: the lifetime of this object MUST be less than the lifetime of the Conduit
     752           0 :   mEncoderConfig.SetVideoStreamFactory(
     753             :     new rtc::RefCountedObject<WebrtcVideoConduit::VideoStreamFactory>(
     754           0 :       codecConfig->mName, this));
     755             : 
     756             :   // Always call this to ensure it's reset
     757           0 :   mVideoAdapter.OnScaleResolutionBy(
     758           0 :     (streamCount >= 1 && codecConfig->mSimulcastEncodings[0].constraints.scaleDownBy > 1.0) ?
     759           0 :     rtc::Optional<float>(codecConfig->mSimulcastEncodings[0].constraints.scaleDownBy) :
     760           0 :     rtc::Optional<float>());
     761             : 
     762             :   // XXX parse the encoded SPS/PPS data and set spsData/spsLen/ppsData/ppsLen
     763           0 :   mEncoderConfig.SetEncoderSpecificSettings(ConfigureVideoEncoderSettings(codecConfig, this));
     764           0 :   mEncoderConfig.SetResolutionDivisor(1);
     765             : 
     766           0 :   mEncoderConfig.SetContentType(mCodecMode == webrtc::kRealtimeVideo ?
     767             :     webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo :
     768           0 :     webrtc::VideoEncoderConfig::ContentType::kScreen);
     769             :   // for the GMP H.264 encoder/decoder!!
     770           0 :   mEncoderConfig.SetMinTransmitBitrateBps(0);
     771             :   // Expected max number of encodings
     772           0 :   mEncoderConfig.SetMaxEncodings(codecConfig->mSimulcastEncodings.size());
     773             : 
     774             :   // If only encoder stream attibutes have been changed, there is no need to stop,
     775             :   // create a new webrtc::VideoSendStream, and restart.
     776             :   // Recreating on PayloadType change may be overkill, but is safe.
     777           0 :   if (mSendStream) {
     778           0 :     if (!RequiresNewSendStream(*codecConfig)) {
     779           0 :       mSendStream->ReconfigureVideoEncoder(mEncoderConfig.CopyConfig());
     780           0 :       return kMediaConduitNoError;
     781             :     }
     782             : 
     783           0 :     condError = StopTransmitting();
     784           0 :     if (condError != kMediaConduitNoError) {
     785           0 :       return condError;
     786             :     }
     787             : 
     788             :     // This will cause a new encoder to be created by StartTransmitting()
     789           0 :     MutexAutoLock lock(mCodecMutex);
     790           0 :     DeleteSendStream();
     791             :   }
     792             : 
     793           0 :   mSendStreamConfig.encoder_settings.payload_name = codecConfig->mName;
     794           0 :   mSendStreamConfig.encoder_settings.payload_type = codecConfig->mType;
     795           0 :   mSendStreamConfig.rtp.rtcp_mode = webrtc::RtcpMode::kCompound;
     796           0 :   mSendStreamConfig.rtp.max_packet_size = kVideoMtu;
     797             : 
     798             :   // See Bug 1297058, enabling FEC when basic NACK is to be enabled in H.264 is problematic
     799           0 :   if (codecConfig->RtcpFbFECIsSet() &&
     800           0 :       !(codecConfig->mName == "H264" && codecConfig->RtcpFbNackIsSet(""))) {
     801           0 :     mSendStreamConfig.rtp.ulpfec.ulpfec_payload_type = codecConfig->mULPFECPayloadType;
     802           0 :     mSendStreamConfig.rtp.ulpfec.red_payload_type = codecConfig->mREDPayloadType;
     803           0 :     mSendStreamConfig.rtp.ulpfec.red_rtx_payload_type = codecConfig->mREDRTXPayloadType;
     804             :   }
     805             : 
     806           0 :   mSendStreamConfig.rtp.nack.rtp_history_ms =
     807           0 :     codecConfig->RtcpFbNackIsSet("") ? 1000 : 0;
     808             : 
     809             :   {
     810           0 :     MutexAutoLock lock(mCodecMutex);
     811             :     // Copy the applied config for future reference.
     812           0 :     mCurSendCodecConfig = new VideoCodecConfig(*codecConfig);
     813             :   }
     814             : 
     815           0 :   mSendStreamConfig.rtp.rids.clear();
     816           0 :   bool has_rid = false;
     817           0 :   for (size_t idx = 0; idx < streamCount; idx++) {
     818           0 :     auto& simulcastEncoding = mCurSendCodecConfig->mSimulcastEncodings[idx];
     819           0 :     if (simulcastEncoding.rid[0]) {
     820           0 :       has_rid = true;
     821           0 :       break;
     822             :     }
     823             :   }
     824           0 :   if (has_rid) {
     825           0 :     for (size_t idx = streamCount; idx > 0; idx--) {
     826           0 :       auto& simulcastEncoding = mCurSendCodecConfig->mSimulcastEncodings[idx-1];
     827           0 :       mSendStreamConfig.rtp.rids.push_back(simulcastEncoding.rid);
     828             :     }
     829             :   }
     830             : 
     831           0 :   return condError;
     832             : }
     833             : 
     834             : bool
     835           0 : WebrtcVideoConduit::SetRemoteSSRC(unsigned int ssrc)
     836             : {
     837           0 :   CSFLogDebug(logTag, "%s: SSRC %u (0x%x)", __FUNCTION__, ssrc, ssrc);
     838           0 :   mRecvStreamConfig.rtp.remote_ssrc = ssrc;
     839             : 
     840             :   unsigned int current_ssrc;
     841           0 :   if (!GetRemoteSSRC(&current_ssrc)) {
     842           0 :     return false;
     843             :   }
     844             : 
     845           0 :   if (current_ssrc == ssrc) {
     846           0 :     return true;
     847             :   }
     848             : 
     849           0 :   bool wasReceiving = mEngineReceiving;
     850           0 :   if (StopReceiving() != kMediaConduitNoError) {
     851           0 :     return false;
     852             :   }
     853             : 
     854             :   // This will destroy mRecvStream and create a new one (argh, why can't we change
     855             :   // it without a full destroy?)
     856             :   // We're going to modify mRecvStream, we must lock.  Only modified on MainThread.
     857             :   // All non-MainThread users must lock before reading/using
     858             :   {
     859           0 :     MutexAutoLock lock(mCodecMutex);
     860             :     // On the next StartReceiving() or ConfigureRecvMediaCodec, force
     861             :     // building a new RecvStream to switch SSRCs.
     862           0 :     DeleteRecvStream();
     863           0 :     if (!wasReceiving) {
     864           0 :       return true;
     865             :     }
     866           0 :     MediaConduitErrorCode rval = CreateRecvStream();
     867           0 :     if (rval != kMediaConduitNoError) {
     868           0 :       CSFLogError(logTag, "%s Start Receive Error %d ", __FUNCTION__, rval);
     869           0 :       return false;
     870             :     }
     871             :   }
     872           0 :   return (StartReceiving() == kMediaConduitNoError);
     873             : }
     874             : 
     875             : bool
     876           0 : WebrtcVideoConduit::GetRemoteSSRC(unsigned int* ssrc)
     877             : {
     878             :   {
     879           0 :     MutexAutoLock lock(mCodecMutex);
     880           0 :     if (!mRecvStream) {
     881           0 :       return false;
     882             :     }
     883             : 
     884           0 :     const webrtc::VideoReceiveStream::Stats& stats = mRecvStream->GetStats();
     885           0 :     *ssrc = stats.ssrc;
     886             :   }
     887             : 
     888           0 :   return true;
     889             : }
     890             : 
     891             : bool
     892           0 : WebrtcVideoConduit::GetSendPacketTypeStats(
     893             :     webrtc::RtcpPacketTypeCounter* aPacketCounts)
     894             : {
     895           0 :   MutexAutoLock lock(mCodecMutex);
     896           0 :   if (!mEngineTransmitting || !mSendStream) { // Not transmitting
     897           0 :     return false;
     898             :   }
     899           0 :   *aPacketCounts = mSendPacketCounts;
     900           0 :   return true;
     901             : }
     902             : 
     903             : bool
     904           0 : WebrtcVideoConduit::GetRecvPacketTypeStats(
     905             :     webrtc::RtcpPacketTypeCounter* aPacketCounts)
     906             : {
     907           0 :   MutexAutoLock lock(mCodecMutex);
     908           0 :   if (!mEngineReceiving || !mRecvStream) { // Not receiving
     909           0 :     return false;
     910             :   }
     911           0 :   *aPacketCounts = mRecvPacketCounts;
     912           0 :   return true;
     913             : }
     914             : 
     915             : bool
     916           0 : WebrtcVideoConduit::GetVideoEncoderStats(double* framerateMean,
     917             :                                          double* framerateStdDev,
     918             :                                          double* bitrateMean,
     919             :                                          double* bitrateStdDev,
     920             :                                          uint32_t* droppedFrames,
     921             :                                          uint32_t* framesEncoded)
     922             : {
     923             :   {
     924           0 :     MutexAutoLock lock(mCodecMutex);
     925           0 :     if (!mEngineTransmitting || !mSendStream) {
     926           0 :       return false;
     927             :     }
     928           0 :     mSendStreamStats.GetVideoStreamStats(*framerateMean, *framerateStdDev,
     929           0 :       *bitrateMean, *bitrateStdDev);
     930           0 :     mSendStreamStats.DroppedFrames(*droppedFrames);
     931           0 :     *framesEncoded = mSendStreamStats.FramesEncoded();
     932           0 :     return true;
     933             :   }
     934             : }
     935             : 
     936             : bool
     937           0 : WebrtcVideoConduit::GetVideoDecoderStats(double* framerateMean,
     938             :                                          double* framerateStdDev,
     939             :                                          double* bitrateMean,
     940             :                                          double* bitrateStdDev,
     941             :                                          uint32_t* discardedPackets,
     942             :                                          uint32_t* framesDecoded)
     943             : {
     944             :   {
     945           0 :     MutexAutoLock lock(mCodecMutex);
     946           0 :     if (!mEngineReceiving || !mRecvStream) {
     947           0 :       return false;
     948             :     }
     949           0 :     mRecvStreamStats.GetVideoStreamStats(*framerateMean, *framerateStdDev,
     950           0 :       *bitrateMean, *bitrateStdDev);
     951           0 :     mRecvStreamStats.DiscardedPackets(*discardedPackets);
     952           0 :     mRecvStreamStats.FramesDecoded(*framesDecoded);
     953           0 :     return true;
     954             :   }
     955             : }
     956             : 
     957             : bool
     958           0 : WebrtcVideoConduit::GetAVStats(int32_t* jitterBufferDelayMs,
     959             :                                int32_t* playoutBufferDelayMs,
     960             :                                int32_t* avSyncOffsetMs)
     961             : {
     962           0 :   return false;
     963             : }
     964             : 
     965             : bool
     966           0 : WebrtcVideoConduit::GetRTPStats(unsigned int* jitterMs,
     967             :                                 unsigned int* cumulativeLost)
     968             : {
     969           0 :   CSFLogVerbose(logTag, "%s for VideoConduit:%p", __FUNCTION__, this);
     970             :   {
     971           0 :     MutexAutoLock lock(mCodecMutex);
     972           0 :     if (!mRecvStream) {
     973           0 :       return false;
     974             :     }
     975             : 
     976           0 :     const webrtc::VideoReceiveStream::Stats& stats = mRecvStream->GetStats();
     977           0 :     *jitterMs =
     978           0 :         stats.rtcp_stats.jitter / (webrtc::kVideoPayloadTypeFrequency / 1000);
     979           0 :     *cumulativeLost = stats.rtcp_stats.cumulative_lost;
     980             :   }
     981           0 :   return true;
     982             : }
     983             : 
     984           0 : bool WebrtcVideoConduit::GetRTCPReceiverReport(DOMHighResTimeStamp* timestamp,
     985             :                                                uint32_t* jitterMs,
     986             :                                                uint32_t* packetsReceived,
     987             :                                                uint64_t* bytesReceived,
     988             :                                                uint32_t* cumulativeLost,
     989             :                                                int32_t* rttMs)
     990             : {
     991             :   {
     992           0 :     CSFLogVerbose(logTag, "%s for VideoConduit:%p", __FUNCTION__, this);
     993           0 :     MutexAutoLock lock(mCodecMutex);
     994           0 :     if (!mSendStream) {
     995           0 :       return false;
     996             :     }
     997           0 :     const webrtc::VideoSendStream::Stats& sendStats = mSendStream->GetStats();
     998           0 :     if (sendStats.substreams.size() == 0
     999           0 :         || mSendStreamConfig.rtp.ssrcs.size() == 0) {
    1000           0 :       return false;
    1001             :     }
    1002           0 :     uint32_t ssrc = mSendStreamConfig.rtp.ssrcs.front();
    1003           0 :     auto ind = sendStats.substreams.find(ssrc);
    1004           0 :     if (ind == sendStats.substreams.end()) {
    1005             :       CSFLogError(logTag,
    1006             :         "%s for VideoConduit:%p ssrc not found in SendStream stats.",
    1007           0 :         __FUNCTION__, this);
    1008           0 :       return false;
    1009             :     }
    1010           0 :     *jitterMs = ind->second.rtcp_stats.jitter
    1011           0 :         / (webrtc::kVideoPayloadTypeFrequency / 1000);
    1012           0 :     *cumulativeLost = ind->second.rtcp_stats.cumulative_lost;
    1013           0 :     *bytesReceived = ind->second.rtp_stats.MediaPayloadBytes();
    1014           0 :     *packetsReceived = ind->second.rtp_stats.transmitted.packets;
    1015           0 :     auto stats = mCall->Call()->GetStats();
    1016           0 :     int64_t rtt = stats.rtt_ms;
    1017             : #ifdef DEBUG
    1018           0 :     if (rtt > INT32_MAX) {
    1019             :       CSFLogError(logTag,
    1020             :         "%s for VideoConduit:%p RTT is larger than the"
    1021           0 :         " maximum size of an RTCP RTT.", __FUNCTION__, this);
    1022             :     }
    1023             : #endif
    1024           0 :     if (rtt > 0) {
    1025           0 :       *rttMs = rtt;
    1026             :     } else {
    1027           0 :       *rttMs = 0;
    1028             :     }
    1029             :     // Note: timestamp is not correct per the spec... should be time the rtcp
    1030             :     // was received (remote) or sent (local)
    1031           0 :     *timestamp = webrtc::Clock::GetRealTimeClock()->TimeInMilliseconds();
    1032             :   }
    1033           0 :   return true;
    1034             : }
    1035             : 
    1036             : bool
    1037           0 : WebrtcVideoConduit::GetRTCPSenderReport(DOMHighResTimeStamp* timestamp,
    1038             :                                         unsigned int* packetsSent,
    1039             :                                         uint64_t* bytesSent)
    1040             : {
    1041           0 :   CSFLogVerbose(logTag, "%s for VideoConduit:%p", __FUNCTION__, this);
    1042             :   webrtc::RTCPSenderInfo senderInfo;
    1043             :   {
    1044           0 :     MutexAutoLock lock(mCodecMutex);
    1045           0 :     if (!mRecvStream || !mRecvStream->GetRemoteRTCPSenderInfo(&senderInfo)) {
    1046           0 :       return false;
    1047             :     }
    1048             :   }
    1049           0 :   *timestamp = webrtc::Clock::GetRealTimeClock()->TimeInMilliseconds();
    1050           0 :   *packetsSent = senderInfo.sendPacketCount;
    1051           0 :   *bytesSent = senderInfo.sendOctetCount;
    1052           0 :   return true;
    1053             : }
    1054             : 
    1055             : MediaConduitErrorCode
    1056           0 : WebrtcVideoConduit::InitMain()
    1057             : {
    1058             :   // already know we must be on MainThread barring unit test weirdness
    1059           0 :   MOZ_ASSERT(NS_IsMainThread());
    1060             : 
    1061             :   nsresult rv;
    1062           0 :   nsCOMPtr<nsIPrefService> prefs = do_GetService("@mozilla.org/preferences-service;1", &rv);
    1063           0 :   if (!NS_WARN_IF(NS_FAILED(rv))) {
    1064           0 :     nsCOMPtr<nsIPrefBranch> branch = do_QueryInterface(prefs);
    1065             : 
    1066           0 :     if (branch) {
    1067             :       int32_t temp;
    1068           0 :       Unused <<  NS_WARN_IF(NS_FAILED(branch->GetBoolPref("media.video.test_latency",
    1069             :                                                           &mVideoLatencyTestEnable)));
    1070           0 :       Unused << NS_WARN_IF(NS_FAILED(branch->GetBoolPref("media.video.test_latency",
    1071             :                                                          &mVideoLatencyTestEnable)));
    1072           0 :       if (!NS_WARN_IF(NS_FAILED(branch->GetIntPref(
    1073             :             "media.peerconnection.video.min_bitrate", &temp))))
    1074             :       {
    1075           0 :          if (temp >= 0) {
    1076           0 :            mMinBitrate = KBPS(temp);
    1077             :          }
    1078             :       }
    1079           0 :       if (!NS_WARN_IF(NS_FAILED(branch->GetIntPref(
    1080             :             "media.peerconnection.video.start_bitrate", &temp))))
    1081             :       {
    1082           0 :          if (temp >= 0) {
    1083           0 :            mStartBitrate = KBPS(temp);
    1084             :          }
    1085             :       }
    1086           0 :       if (!NS_WARN_IF(NS_FAILED(branch->GetIntPref(
    1087             :             "media.peerconnection.video.max_bitrate", &temp))))
    1088             :       {
    1089           0 :         if (temp >= 0) {
    1090           0 :           mPrefMaxBitrate = KBPS(temp);
    1091             :         }
    1092             :       }
    1093           0 :       if (mMinBitrate != 0 && mMinBitrate < kViEMinCodecBitrate_bps) {
    1094           0 :         mMinBitrate = kViEMinCodecBitrate_bps;
    1095             :       }
    1096           0 :       if (mStartBitrate < mMinBitrate) {
    1097           0 :         mStartBitrate = mMinBitrate;
    1098             :       }
    1099           0 :       if (mPrefMaxBitrate && mStartBitrate > mPrefMaxBitrate) {
    1100           0 :         mStartBitrate = mPrefMaxBitrate;
    1101             :       }
    1102             :       // XXX We'd love if this was a live param for testing adaptation/etc
    1103             :       // in automation
    1104           0 :       if (!NS_WARN_IF(NS_FAILED(branch->GetIntPref(
    1105             :             "media.peerconnection.video.min_bitrate_estimate", &temp))))
    1106             :       {
    1107           0 :         if (temp >= 0) {
    1108           0 :           mMinBitrateEstimate = temp; // bps!
    1109             :         }
    1110             :       }
    1111           0 :       if (!NS_WARN_IF(NS_FAILED(branch->GetIntPref(
    1112             :             "media.peerconnection.video.svc.spatial", &temp))))
    1113             :       {
    1114           0 :          if (temp >= 0) {
    1115           0 :             mSpatialLayers = temp;
    1116             :          }
    1117             :       }
    1118           0 :       if (!NS_WARN_IF(NS_FAILED(branch->GetIntPref(
    1119             :             "media.peerconnection.video.svc.temporal", &temp))))
    1120             :       {
    1121           0 :          if (temp >= 0) {
    1122           0 :             mTemporalLayers = temp;
    1123             :          }
    1124             :       }
    1125           0 :       Unused << NS_WARN_IF(NS_FAILED(branch->GetBoolPref(
    1126             :         "media.peerconnection.video.denoising", &mDenoising)));
    1127           0 :       Unused << NS_WARN_IF(NS_FAILED(branch->GetBoolPref(
    1128             :         "media.peerconnection.video.lock_scaling", &mLockScaling)));
    1129             :     }
    1130             :   }
    1131             : #ifdef MOZ_WIDGET_ANDROID
    1132             :   // get the JVM
    1133             :   JavaVM *jvm = jsjni_GetVM();
    1134             : 
    1135             :   if (mozilla::camera::VideoEngine::SetAndroidObjects(jvm) != 0) {
    1136             :     CSFLogError(logTag,  "%s: could not set Android objects", __FUNCTION__);
    1137             :     return kMediaConduitSessionNotInited;
    1138             :   }
    1139             : #endif  //MOZ_WIDGET_ANDROID
    1140           0 :   return kMediaConduitNoError;
    1141             : }
    1142             : 
    1143             : /**
    1144             :  * Performs initialization of the MANDATORY components of the Video Engine
    1145             :  */
    1146             : MediaConduitErrorCode
    1147           0 : WebrtcVideoConduit::Init()
    1148             : {
    1149           0 :   CSFLogDebug(logTag, "%s this=%p", __FUNCTION__, this);
    1150             :   MediaConduitErrorCode result;
    1151             :   // Run code that must run on MainThread first
    1152           0 :   MOZ_ASSERT(NS_IsMainThread());
    1153           0 :   result = InitMain();
    1154           0 :   if (result != kMediaConduitNoError) {
    1155           0 :     return result;
    1156             :   }
    1157             : 
    1158           0 :   CSFLogError(logTag, "%s Initialization Done", __FUNCTION__);
    1159           0 :   return kMediaConduitNoError;
    1160             : }
    1161             : 
    1162             : void
    1163           0 : WebrtcVideoConduit::Destroy()
    1164             : {
    1165             :   // We can't delete the VideoEngine until all these are released!
    1166             :   // And we can't use a Scoped ptr, since the order is arbitrary
    1167             : 
    1168           0 :   MutexAutoLock lock(mCodecMutex);
    1169           0 :   DeleteSendStream();
    1170           0 :   DeleteRecvStream();
    1171           0 : }
    1172             : 
    1173             : void
    1174           0 : WebrtcVideoConduit::SyncTo(WebrtcAudioConduit* aConduit)
    1175             : {
    1176           0 :   CSFLogDebug(logTag, "%s Synced to %p", __FUNCTION__, aConduit);
    1177             :   {
    1178           0 :     MutexAutoLock lock(mCodecMutex);
    1179             : 
    1180           0 :     if (!mRecvStream) {
    1181           0 :       CSFLogError(logTag, "SyncTo called with no receive stream");
    1182           0 :       return;
    1183             :     }
    1184             : 
    1185           0 :     if (aConduit) {
    1186           0 :       mRecvStream->SetSyncChannel(aConduit->GetVoiceEngine(),
    1187           0 :                                   aConduit->GetChannel());
    1188           0 :     } else if (mSyncedTo) {
    1189           0 :       mRecvStream->SetSyncChannel(mSyncedTo->GetVoiceEngine(), -1);
    1190             :     }
    1191             :   }
    1192             : 
    1193           0 :   mSyncedTo = aConduit;
    1194             : }
    1195             : 
    1196             : MediaConduitErrorCode
    1197           0 : WebrtcVideoConduit::AttachRenderer(RefPtr<mozilla::VideoRenderer> aVideoRenderer)
    1198             : {
    1199           0 :   CSFLogDebug(logTag, "%s", __FUNCTION__);
    1200             : 
    1201             :   // null renderer
    1202           0 :   if (!aVideoRenderer) {
    1203           0 :     CSFLogError(logTag, "%s NULL Renderer", __FUNCTION__);
    1204           0 :     MOZ_ASSERT(false);
    1205             :     return kMediaConduitInvalidRenderer;
    1206             :   }
    1207             : 
    1208             :   // This function is called only from main, so we only need to protect against
    1209             :   // modifying mRenderer while any webrtc.org code is trying to use it.
    1210             :   {
    1211           0 :     ReentrantMonitorAutoEnter enter(mTransportMonitor);
    1212           0 :     mRenderer = aVideoRenderer;
    1213             :     // Make sure the renderer knows the resolution
    1214           0 :     mRenderer->FrameSizeChange(mReceivingWidth,
    1215           0 :                                mReceivingHeight,
    1216           0 :                                mNumReceivingStreams);
    1217             :   }
    1218             : 
    1219           0 :   return kMediaConduitNoError;
    1220             : }
    1221             : 
    1222             : void
    1223           0 : WebrtcVideoConduit::DetachRenderer()
    1224             : {
    1225             :   {
    1226           0 :     ReentrantMonitorAutoEnter enter(mTransportMonitor);
    1227           0 :     if (mRenderer) {
    1228           0 :       mRenderer = nullptr;
    1229             :     }
    1230             :   }
    1231           0 : }
    1232             : 
    1233             : MediaConduitErrorCode
    1234           0 : WebrtcVideoConduit::SetTransmitterTransport(
    1235             :   RefPtr<TransportInterface> aTransport)
    1236             : {
    1237           0 :   CSFLogDebug(logTag, "%s ", __FUNCTION__);
    1238             : 
    1239           0 :   ReentrantMonitorAutoEnter enter(mTransportMonitor);
    1240             :   // set the transport
    1241           0 :   mTransmitterTransport = aTransport;
    1242           0 :   return kMediaConduitNoError;
    1243             : }
    1244             : 
    1245             : MediaConduitErrorCode
    1246           0 : WebrtcVideoConduit::SetReceiverTransport(RefPtr<TransportInterface> aTransport)
    1247             : {
    1248           0 :   CSFLogDebug(logTag, "%s ", __FUNCTION__);
    1249             : 
    1250           0 :   ReentrantMonitorAutoEnter enter(mTransportMonitor);
    1251             :   // set the transport
    1252           0 :   mReceiverTransport = aTransport;
    1253           0 :   return kMediaConduitNoError;
    1254             : }
    1255             : 
    1256             : MediaConduitErrorCode
    1257           0 : WebrtcVideoConduit::ConfigureRecvMediaCodecs(
    1258             :   const std::vector<VideoCodecConfig* >& codecConfigList)
    1259             : {
    1260           0 :   CSFLogDebug(logTag, "%s ", __FUNCTION__);
    1261           0 :   MediaConduitErrorCode condError = kMediaConduitNoError;
    1262           0 :   std::string payloadName;
    1263             : 
    1264           0 :   if (codecConfigList.empty()) {
    1265           0 :     CSFLogError(logTag, "%s Zero number of codecs to configure", __FUNCTION__);
    1266           0 :     return kMediaConduitMalformedArgument;
    1267             :   }
    1268             : 
    1269           0 :   webrtc::KeyFrameRequestMethod kf_request_method = webrtc::kKeyFrameReqPliRtcp;
    1270           0 :   bool kf_request_enabled = false;
    1271           0 :   bool use_nack_basic = false;
    1272           0 :   bool use_tmmbr = false;
    1273           0 :   bool use_remb = false;
    1274           0 :   bool use_fec = false;
    1275           0 :   int ulpfec_payload_type = kNullPayloadType;
    1276           0 :   int red_payload_type = kNullPayloadType;
    1277           0 :   bool configuredH264 = false;
    1278           0 :   nsTArray<UniquePtr<VideoCodecConfig>> recv_codecs;
    1279             : 
    1280             :   // Try Applying the codecs in the list
    1281             :   // we treat as success if at least one codec was applied and reception was
    1282             :   // started successfully.
    1283           0 :   for (const auto& codec_config : codecConfigList) {
    1284             :     // if the codec param is invalid or duplicate, return error
    1285           0 :     if ((condError = ValidateCodecConfig(codec_config, false))
    1286             :         != kMediaConduitNoError) {
    1287           0 :       CSFLogError(logTag, "%s Invalid config for %s decoder: %i", __FUNCTION__,
    1288           0 :                   codec_config->mName.c_str(), condError);
    1289           0 :       continue;
    1290             :     }
    1291             : 
    1292           0 :     if (codec_config->mName == "H264") {
    1293             :       // TODO(bug 1200768): We can only handle configuring one recv H264 codec
    1294           0 :       if (configuredH264) {
    1295           0 :         continue;
    1296             :       }
    1297           0 :       configuredH264 = true;
    1298             :     }
    1299             : 
    1300           0 :     if (codec_config->mName == kUlpFecPayloadName) {
    1301           0 :       ulpfec_payload_type = codec_config->mType;
    1302           0 :       continue;
    1303             :     }
    1304             : 
    1305           0 :     if (codec_config->mName == kRedPayloadName) {
    1306           0 :       red_payload_type = codec_config->mType;
    1307           0 :       continue;
    1308             :     }
    1309             : 
    1310             :     // Check for the keyframe request type: PLI is preferred
    1311             :     // over FIR, and FIR is preferred over none.
    1312             :     // XXX (See upstream issue https://bugs.chromium.org/p/webrtc/issues/detail?id=7002):
    1313             :     // There is no 'none' option in webrtc.org
    1314           0 :     if (codec_config->RtcpFbNackIsSet("pli")) {
    1315           0 :       kf_request_enabled = true;
    1316           0 :       kf_request_method = webrtc::kKeyFrameReqPliRtcp;
    1317           0 :     } else if (!kf_request_enabled && codec_config->RtcpFbCcmIsSet("fir")) {
    1318           0 :       kf_request_enabled = true;
    1319           0 :       kf_request_method = webrtc::kKeyFrameReqFirRtcp;
    1320             :     }
    1321             : 
    1322             :     // What if codec A has Nack and REMB, and codec B has TMMBR, and codec C has none?
    1323             :     // In practice, that's not a useful configuration, and VideoReceiveStream::Config can't
    1324             :     // represent that, so simply union the (boolean) settings
    1325           0 :     use_nack_basic |= codec_config->RtcpFbNackIsSet("");
    1326           0 :     use_tmmbr |= codec_config->RtcpFbCcmIsSet("tmmbr");
    1327           0 :     use_remb |= codec_config->RtcpFbRembIsSet();
    1328           0 :     use_fec |= codec_config->RtcpFbFECIsSet();
    1329             : 
    1330           0 :     recv_codecs.AppendElement(new VideoCodecConfig(*codec_config));
    1331             :   }
    1332             : 
    1333             :   // Now decide if we need to recreate the receive stream, or can keep it
    1334           0 :   if (!mRecvStream ||
    1335           0 :       CodecsDifferent(recv_codecs, mRecvCodecList) ||
    1336           0 :       mRecvStreamConfig.rtp.nack.rtp_history_ms != (use_nack_basic ? 1000 : 0) ||
    1337           0 :       mRecvStreamConfig.rtp.remb != use_remb ||
    1338           0 :       mRecvStreamConfig.rtp.tmmbr != use_tmmbr ||
    1339           0 :       mRecvStreamConfig.rtp.keyframe_method != kf_request_method ||
    1340           0 :       (use_fec &&
    1341           0 :        (mRecvStreamConfig.rtp.ulpfec.ulpfec_payload_type != ulpfec_payload_type ||
    1342           0 :         mRecvStreamConfig.rtp.ulpfec.red_payload_type != red_payload_type))) {
    1343             : 
    1344           0 :     condError = StopReceiving();
    1345           0 :     if (condError != kMediaConduitNoError) {
    1346           0 :       return condError;
    1347             :     }
    1348             : 
    1349             :     // If we fail after here things get ugly
    1350           0 :     mRecvStreamConfig.rtp.rtcp_mode = webrtc::RtcpMode::kCompound;
    1351           0 :     mRecvStreamConfig.rtp.nack.rtp_history_ms = use_nack_basic ? 1000 : 0;
    1352           0 :     mRecvStreamConfig.rtp.remb = use_remb;
    1353           0 :     mRecvStreamConfig.rtp.tmmbr = use_tmmbr;
    1354           0 :     mRecvStreamConfig.rtp.keyframe_method = kf_request_method;
    1355             : 
    1356           0 :     if (use_fec) {
    1357           0 :       mRecvStreamConfig.rtp.ulpfec.ulpfec_payload_type = ulpfec_payload_type;
    1358           0 :       mRecvStreamConfig.rtp.ulpfec.red_payload_type = red_payload_type;
    1359           0 :       mRecvStreamConfig.rtp.ulpfec.red_rtx_payload_type = -1;
    1360             :     }
    1361             : 
    1362             :     // SetRemoteSSRC should have populated this already
    1363           0 :     mRecvSSRC = mRecvStreamConfig.rtp.remote_ssrc;
    1364             : 
    1365             :     // XXX ugh! same SSRC==0 problem that webrtc.org has
    1366           0 :     if (mRecvSSRC == 0) {
    1367             :       // Handle un-signalled SSRCs by creating a random one and then when it actually gets set,
    1368             :       // we'll destroy and recreate.  Simpler than trying to unwind all the logic that assumes
    1369             :       // the receive stream is created and started when we ConfigureRecvMediaCodecs()
    1370             :       unsigned int ssrc;
    1371           0 :       do {
    1372           0 :         SECStatus rv = PK11_GenerateRandom(reinterpret_cast<unsigned char*>(&ssrc), sizeof(ssrc));
    1373           0 :         if (rv != SECSuccess) {
    1374           0 :           return kMediaConduitUnknownError;
    1375             :         }
    1376           0 :       } while (ssrc == 0); // webrtc.org code has fits if you select an SSRC of 0
    1377             : 
    1378           0 :       mRecvStreamConfig.rtp.remote_ssrc = ssrc;
    1379           0 :       mRecvSSRC = ssrc;
    1380             :     }
    1381             : 
    1382             :     // 0 isn't allowed.  Would be best to ask for a random SSRC from the
    1383             :     // RTP code.  Would need to call rtp_sender.cc -- GenerateNewSSRC(),
    1384             :     // which isn't exposed.  It's called on collision, or when we decide to
    1385             :     // send.  it should be called on receiver creation.  Here, we're
    1386             :     // generating the SSRC value - but this causes ssrc_forced in set in
    1387             :     // rtp_sender, which locks us into the SSRC - even a collision won't
    1388             :     // change it!!!
    1389           0 :     MOZ_ASSERT(!mSendStreamConfig.rtp.ssrcs.empty());
    1390           0 :     auto ssrc = mSendStreamConfig.rtp.ssrcs.front();
    1391           0 :     Unused << NS_WARN_IF(ssrc == mRecvStreamConfig.rtp.remote_ssrc);
    1392             : 
    1393           0 :     while (ssrc == mRecvStreamConfig.rtp.remote_ssrc || ssrc == 0) {
    1394           0 :       SECStatus rv = PK11_GenerateRandom(reinterpret_cast<unsigned char*>(&ssrc), sizeof(ssrc));
    1395           0 :       if (rv != SECSuccess) {
    1396           0 :         return kMediaConduitUnknownError;
    1397             :       }
    1398             :     }
    1399             :     // webrtc.org code has fits if you select an SSRC of 0
    1400             : 
    1401           0 :     mRecvStreamConfig.rtp.local_ssrc = ssrc;
    1402           0 :     CSFLogDebug(logTag, "%s (%p): Local SSRC 0x%08x (of %u), remote SSRC 0x%08x",
    1403             :                 __FUNCTION__, (void*) this, ssrc,
    1404             :                 (uint32_t) mSendStreamConfig.rtp.ssrcs.size(),
    1405           0 :                 mRecvStreamConfig.rtp.remote_ssrc);
    1406             : 
    1407             :     // XXX Copy over those that are the same and don't rebuild them
    1408           0 :     mRecvCodecList.SwapElements(recv_codecs);
    1409           0 :     recv_codecs.Clear();
    1410           0 :     mRecvStreamConfig.rtp.rtx.clear();
    1411             : 
    1412             :     {
    1413           0 :       MutexAutoLock lock(mCodecMutex);
    1414           0 :       DeleteRecvStream();
    1415             :       // Rebuilds mRecvStream from mRecvStreamConfig
    1416           0 :       MediaConduitErrorCode rval = CreateRecvStream();
    1417           0 :       if (rval != kMediaConduitNoError) {
    1418           0 :         CSFLogError(logTag, "%s Start Receive Error %d ", __FUNCTION__, rval);
    1419           0 :         return rval;
    1420             :       }
    1421             :     }
    1422           0 :     return StartReceiving();
    1423             :   }
    1424           0 :   return kMediaConduitNoError;
    1425             : }
    1426             : 
    1427             : webrtc::VideoDecoder*
    1428           0 : WebrtcVideoConduit::CreateDecoder(webrtc::VideoCodecType aType)
    1429             : {
    1430           0 :   webrtc::VideoDecoder* decoder = nullptr;
    1431             : #ifdef MOZ_WEBRTC_MEDIACODEC
    1432             :   bool enabled = false;
    1433             : #endif
    1434             : 
    1435           0 :   switch (aType) {
    1436             :     case webrtc::VideoCodecType::kVideoCodecH264:
    1437             :       // get an external decoder
    1438             : #ifdef MOZ_WEBRTC_OMX
    1439             :       decoder = OMXVideoCodec::CreateDecoder(OMXVideoCodec::CodecType::CODEC_H264);
    1440             : #else
    1441           0 :       decoder = GmpVideoCodec::CreateDecoder();
    1442             : #endif
    1443           0 :       if (decoder) {
    1444           0 :         mRecvCodecPlugin = static_cast<WebrtcVideoDecoder*>(decoder);
    1445             :       }
    1446           0 :       break;
    1447             : 
    1448             :     case webrtc::VideoCodecType::kVideoCodecVP8:
    1449             : #ifdef MOZ_WEBRTC_MEDIACODEC
    1450             :       // attempt to get a decoder
    1451             :       enabled = mozilla::Preferences::GetBool(
    1452             :         "media.navigator.hardware.vp8_decode.acceleration_enabled", false);
    1453             :       if (enabled) {
    1454             :         nsCOMPtr<nsIGfxInfo> gfxInfo = do_GetService("@mozilla.org/gfx/info;1");
    1455             :         if (gfxInfo) {
    1456             :           int32_t status;
    1457             :           nsCString discardFailureId;
    1458             : 
    1459             :           if (NS_SUCCEEDED(gfxInfo->GetFeatureStatus(
    1460             :                              nsIGfxInfo::FEATURE_WEBRTC_HW_ACCELERATION_DECODE,
    1461             :                              discardFailureId, &status))) {
    1462             : 
    1463             :             if (status != nsIGfxInfo::FEATURE_STATUS_OK) {
    1464             :               NS_WARNING("VP8 decoder hardware is not whitelisted: disabling.\n");
    1465             :             } else {
    1466             :               decoder = MediaCodecVideoCodec::CreateDecoder(
    1467             :                 MediaCodecVideoCodec::CodecType::CODEC_VP8);
    1468             :             }
    1469             :           }
    1470             :         }
    1471             :       }
    1472             : #endif
    1473             :       // Use a software VP8 decoder as a fallback.
    1474           0 :       if (!decoder) {
    1475           0 :         decoder = webrtc::VP8Decoder::Create();
    1476             :       }
    1477           0 :       break;
    1478             : 
    1479             :     case webrtc::VideoCodecType::kVideoCodecVP9:
    1480           0 :       MOZ_ASSERT(webrtc::VP9Decoder::IsSupported());
    1481           0 :       decoder = webrtc::VP9Decoder::Create();
    1482           0 :       break;
    1483             : 
    1484             :     default:
    1485           0 :       break;
    1486             :   }
    1487             : 
    1488           0 :   return decoder;
    1489             : }
    1490             : 
    1491             : webrtc::VideoEncoder*
    1492           0 : WebrtcVideoConduit::CreateEncoder(webrtc::VideoCodecType aType,
    1493             :                                   bool enable_simulcast)
    1494             : {
    1495           0 :   webrtc::VideoEncoder* encoder = nullptr;
    1496             : #ifdef MOZ_WEBRTC_MEDIACODEC
    1497             :   bool enabled = false;
    1498             : #endif
    1499             : 
    1500           0 :   switch (aType) {
    1501             :     case webrtc::VideoCodecType::kVideoCodecH264:
    1502             :       // get an external encoder
    1503             : #ifdef MOZ_WEBRTC_OMX
    1504             :       encoder = OMXVideoCodec::CreateEncoder(OMXVideoCodec::CodecType::CODEC_H264);
    1505             : #else
    1506           0 :       encoder = GmpVideoCodec::CreateEncoder();
    1507             : #endif
    1508           0 :       if (encoder) {
    1509           0 :         mSendCodecPlugin = static_cast<WebrtcVideoEncoder*>(encoder);
    1510             :       }
    1511           0 :       break;
    1512             : 
    1513             :     case webrtc::VideoCodecType::kVideoCodecVP8:
    1514             : #ifdef MOZ_WEBRTC_MEDIACODEC
    1515             :       // attempt to get a encoder
    1516             :       enabled = mozilla::Preferences::GetBool(
    1517             :         "media.navigator.hardware.vp8_encode.acceleration_enabled", false);
    1518             :       if (enabled) {
    1519             :         nsCOMPtr<nsIGfxInfo> gfxInfo = do_GetService("@mozilla.org/gfx/info;1");
    1520             :         if (gfxInfo) {
    1521             :           int32_t status;
    1522             :           nsCString discardFailureId;
    1523             : 
    1524             :           if (NS_SUCCEEDED(gfxInfo->GetFeatureStatus(
    1525             :                            nsIGfxInfo::FEATURE_WEBRTC_HW_ACCELERATION_ENCODE,
    1526             :                            discardFailureId, &status))) {
    1527             : 
    1528             :             if (status != nsIGfxInfo::FEATURE_STATUS_OK) {
    1529             :               NS_WARNING("VP8 encoder hardware is not whitelisted: disabling.\n");
    1530             :             } else {
    1531             :               encoder = MediaCodecVideoCodec::CreateEncoder(
    1532             :                                           MediaCodecVideoCodec::CodecType::CODEC_VP8);
    1533             :             }
    1534             :           }
    1535             :         }
    1536             :       }
    1537             : #endif
    1538             :       // Use a software VP8 encoder as a fallback.
    1539           0 :       if (!encoder) {
    1540           0 :         encoder = webrtc::VP8Encoder::Create();
    1541             :       }
    1542           0 :       break;
    1543             : 
    1544             :     case webrtc::VideoCodecType::kVideoCodecVP9:
    1545           0 :       encoder = webrtc::VP9Encoder::Create();
    1546           0 :       break;
    1547             : 
    1548             :     default:
    1549           0 :       break;
    1550             :   }
    1551           0 :   return encoder;
    1552             : }
    1553             : 
    1554             : struct ResolutionAndBitrateLimits
    1555             : {
    1556             :   int resolution_in_mb;
    1557             :   int min_bitrate_bps;
    1558             :   int start_bitrate_bps;
    1559             :   int max_bitrate_bps;
    1560             : };
    1561             : 
    1562             : #define MB_OF(w,h) ((unsigned int)((((w+15)>>4))*((unsigned int)((h+15)>>4))))
    1563             : // For now, try to set the max rates well above the knee in the curve.
    1564             : // Chosen somewhat arbitrarily; it's hard to find good data oriented for
    1565             : // realtime interactive/talking-head recording.  These rates assume
    1566             : // 30fps.
    1567             : 
    1568             : // XXX Populate this based on a pref (which we should consider sorting because
    1569             : // people won't assume they need to).
    1570             : static ResolutionAndBitrateLimits kResolutionAndBitrateLimits[] = {
    1571             :   {MB_OF(1920, 1200), KBPS(1500), KBPS(2000), KBPS(10000)}, // >HD (3K, 4K, etc)
    1572             :   {MB_OF(1280, 720), KBPS(1200), KBPS(1500), KBPS(5000)}, // HD ~1080-1200
    1573             :   {MB_OF(800, 480), KBPS(600), KBPS(800), KBPS(2500)}, // HD ~720
    1574             :   {tl::Max<MB_OF(400, 240), MB_OF(352, 288)>::value, KBPS(200), KBPS(300), KBPS(1300)}, // VGA, WVGA
    1575             :   {MB_OF(176, 144), KBPS(100), KBPS(150), KBPS(500)}, // WQVGA, CIF
    1576             :   {0 , KBPS(40), KBPS(80), KBPS(250)} // QCIF and below
    1577             : };
    1578             : 
    1579             : void
    1580           0 : WebrtcVideoConduit::SelectBitrates(
    1581             :   unsigned short width, unsigned short height, int cap,
    1582             :   int32_t aLastFramerateTenths,
    1583             :   webrtc::VideoStream& aVideoStream)
    1584             : {
    1585           0 :   int& out_min = aVideoStream.min_bitrate_bps;
    1586           0 :   int& out_start = aVideoStream.target_bitrate_bps;
    1587           0 :   int& out_max = aVideoStream.max_bitrate_bps;
    1588             :   // max bandwidth should be proportional (not linearly!) to resolution, and
    1589             :   // proportional (perhaps linearly, or close) to current frame rate.
    1590           0 :   int fs = MB_OF(width, height);
    1591             : 
    1592           0 :   for (ResolutionAndBitrateLimits resAndLimits : kResolutionAndBitrateLimits) {
    1593           0 :     if (fs > resAndLimits.resolution_in_mb &&
    1594             :         // pick the highest range where at least start rate is within cap
    1595             :         // (or if we're at the end of the array).
    1596           0 :         (!cap || resAndLimits.start_bitrate_bps <= cap ||
    1597           0 :          resAndLimits.resolution_in_mb == 0)) {
    1598           0 :       out_min = MinIgnoreZero(resAndLimits.min_bitrate_bps, cap);
    1599           0 :       out_start = MinIgnoreZero(resAndLimits.start_bitrate_bps, cap);
    1600           0 :       out_max = MinIgnoreZero(resAndLimits.max_bitrate_bps, cap);
    1601           0 :       break;
    1602             :     }
    1603             :   }
    1604             : 
    1605             :   // mLastFramerateTenths is scaled by *10
    1606           0 :   double framerate = std::min((aLastFramerateTenths / 10.), 60.0);
    1607           0 :   MOZ_ASSERT(framerate > 0);
    1608             :   // Now linear reduction/increase based on fps (max 60fps i.e. doubling)
    1609           0 :   if (framerate >= 10) {
    1610           0 :     out_min = out_min * (framerate / 30);
    1611           0 :     out_start = out_start * (framerate / 30);
    1612           0 :     out_max = std::max(static_cast<int>(out_max * (framerate / 30)), cap);
    1613             :   } else {
    1614             :     // At low framerates, don't reduce bandwidth as much - cut slope to 1/2.
    1615             :     // Mostly this would be ultra-low-light situations/mobile or screensharing.
    1616           0 :     out_min = out_min * ((10 - (framerate / 2)) / 30);
    1617           0 :     out_start = out_start * ((10 - (framerate / 2)) / 30);
    1618           0 :     out_max = std::max(static_cast<int>(out_max * ((10 - (framerate / 2)) / 30)), cap);
    1619             :   }
    1620             : 
    1621             :   // Note: mNegotiatedMaxBitrate is the max transport bitrate - it applies to
    1622             :   // a single codec encoding, but should also apply to the sum of all
    1623             :   // simulcast layers in this encoding!  So sum(layers.maxBitrate) <=
    1624             :   // mNegotiatedMaxBitrate
    1625             :   // Note that out_max already has had mPrefMaxBitrate applied to it
    1626           0 :   out_max = MinIgnoreZero((int)mNegotiatedMaxBitrate, out_max);
    1627           0 :   out_min = std::min(out_min, out_max);
    1628           0 :   out_start = std::min(out_start, out_max);
    1629             : 
    1630           0 :   if (mMinBitrate && mMinBitrate > out_min) {
    1631           0 :     out_min = mMinBitrate;
    1632             :   }
    1633             :   // If we try to set a minimum bitrate that is too low, ViE will reject it.
    1634           0 :   out_min = std::max(kViEMinCodecBitrate_bps, out_min);
    1635           0 :   if (mStartBitrate && mStartBitrate > out_start) {
    1636           0 :     out_start = mStartBitrate;
    1637             :   }
    1638           0 :   out_start = std::max(out_start, out_min);
    1639             : 
    1640           0 :   MOZ_ASSERT(mPrefMaxBitrate == 0 || out_max <= mPrefMaxBitrate);
    1641           0 : }
    1642             : 
    1643             : // XXX we need to figure out how to feed back changes in preferred capture
    1644             : // resolution to the getUserMedia source.
    1645             : // Returns boolean if we've submitted an async change (and took ownership
    1646             : // of *frame's data)
    1647             : bool
    1648           0 : WebrtcVideoConduit::SelectSendResolution(unsigned short width,
    1649             :                                          unsigned short height,
    1650             :                                          webrtc::VideoFrame* frame) // may be null
    1651             : {
    1652           0 :   mCodecMutex.AssertCurrentThreadOwns();
    1653             :   // XXX This will do bandwidth-resolution adaptation as well - bug 877954
    1654             : 
    1655           0 :   mLastWidth = width;
    1656           0 :   mLastHeight = height;
    1657             :   // Enforce constraints
    1658           0 :   if (mCurSendCodecConfig) {
    1659           0 :     uint16_t max_width = mCurSendCodecConfig->mEncodingConstraints.maxWidth;
    1660           0 :     uint16_t max_height = mCurSendCodecConfig->mEncodingConstraints.maxHeight;
    1661           0 :     if (max_width || max_height) {
    1662           0 :       max_width = max_width ? max_width : UINT16_MAX;
    1663           0 :       max_height = max_height ? max_height : UINT16_MAX;
    1664           0 :       ConstrainPreservingAspectRatio(max_width, max_height, &width, &height);
    1665             :     }
    1666             : 
    1667             :     // Limit resolution to max-fs while keeping same aspect ratio as the
    1668             :     // incoming image.
    1669           0 :     if (mCurSendCodecConfig->mEncodingConstraints.maxFs) {
    1670           0 :       uint32_t max_fs = mCurSendCodecConfig->mEncodingConstraints.maxFs;
    1671             :       unsigned int cur_fs, mb_width, mb_height, mb_max;
    1672             : 
    1673             :       // Could we make this simpler by picking the larger of width and height,
    1674             :       // calculating a max for just that value based on the scale parameter,
    1675             :       // and then let ConstrainPreservingAspectRatio do the rest?
    1676           0 :       mb_width = (width + 15) >> 4;
    1677           0 :       mb_height = (height + 15) >> 4;
    1678             : 
    1679           0 :       cur_fs = mb_width * mb_height;
    1680             : 
    1681             :       // Limit resolution to max_fs, but don't scale up.
    1682           0 :       if (cur_fs > max_fs) {
    1683             :         double scale_ratio;
    1684             : 
    1685           0 :         scale_ratio = sqrt((double)max_fs / (double)cur_fs);
    1686             : 
    1687           0 :         mb_width = mb_width * scale_ratio;
    1688           0 :         mb_height = mb_height * scale_ratio;
    1689             : 
    1690             :         // Adjust mb_width and mb_height if they were truncated to zero.
    1691           0 :         if (mb_width == 0) {
    1692           0 :           mb_width = 1;
    1693           0 :           mb_height = std::min(mb_height, max_fs);
    1694             :         }
    1695           0 :         if (mb_height == 0) {
    1696           0 :           mb_height = 1;
    1697           0 :           mb_width = std::min(mb_width, max_fs);
    1698             :         }
    1699             :       }
    1700             : 
    1701             :       // Limit width/height seperately to limit effect of extreme aspect ratios.
    1702           0 :       mb_max = (unsigned)sqrt(8 * (double)max_fs);
    1703             : 
    1704           0 :       max_width = 16 * std::min(mb_width, mb_max);
    1705           0 :       max_height = 16 * std::min(mb_height, mb_max);
    1706           0 :       ConstrainPreservingAspectRatio(max_width, max_height, &width, &height);
    1707             :     }
    1708             :   }
    1709             : 
    1710             : 
    1711             :   // Adapt to getUserMedia resolution changes
    1712             :   // check if we need to reconfigure the sending resolution.
    1713             :   // NOTE: mSendingWidth != mLastWidth, because of maxwidth/height/etc above
    1714           0 :   bool changed = false;
    1715           0 :   if (mSendingWidth != width || mSendingHeight != height) {
    1716           0 :     CSFLogDebug(logTag, "%s: resolution changing to %ux%u (from %ux%u)",
    1717           0 :                 __FUNCTION__, width, height, mSendingWidth, mSendingHeight);
    1718             :     // This will avoid us continually retrying this operation if it fails.
    1719             :     // If the resolution changes, we'll try again.  In the meantime, we'll
    1720             :     // keep using the old size in the encoder.
    1721           0 :     mSendingWidth = width;
    1722           0 :     mSendingHeight = height;
    1723           0 :     changed = true;
    1724             :   }
    1725             : 
    1726           0 :   unsigned int framerate = SelectSendFrameRate(mCurSendCodecConfig,
    1727             :                                                mSendingFramerate,
    1728           0 :                                                mSendingWidth,
    1729           0 :                                                mSendingHeight);
    1730           0 :   if (mSendingFramerate != framerate) {
    1731           0 :     CSFLogDebug(logTag, "%s: framerate changing to %u (from %u)",
    1732           0 :                 __FUNCTION__, framerate, mSendingFramerate);
    1733           0 :     mSendingFramerate = framerate;
    1734           0 :     changed = true;
    1735             :   }
    1736             : 
    1737           0 :   if (changed) {
    1738             :     // On a resolution change, bounce this to the correct thread to
    1739             :     // re-configure (same as used for Init().  Do *not* block the calling
    1740             :     // thread since that may be the MSG thread.
    1741             : 
    1742             :     // MUST run on the same thread as Init()/etc
    1743           0 :     if (!NS_IsMainThread()) {
    1744             :       // Note: on *initial* config (first frame), best would be to drop
    1745             :       // frames until the config is done, then encode the most recent frame
    1746             :       // provided and continue from there.  We don't do this, but we do drop
    1747             :       // all frames while in the process of a reconfig and then encode the
    1748             :       // frame that started the reconfig, which is close.  There may be
    1749             :       // barely perceptible glitch in the video due to the dropped frame(s).
    1750           0 :       mInReconfig = true;
    1751             : 
    1752             :       // We can't pass a UniquePtr<> or unique_ptr<> to a lambda directly
    1753           0 :       webrtc::VideoFrame* new_frame = nullptr;
    1754           0 :       if (frame) {
    1755             :         // the internal buffer pointer is refcounted, so we don't have 2 copies here
    1756           0 :         new_frame = new webrtc::VideoFrame(*frame);
    1757             :       }
    1758           0 :       RefPtr<WebrtcVideoConduit> self(this);
    1759             :       RefPtr<Runnable> webrtc_runnable =
    1760           0 :         media::NewRunnableFrom([self, width, height, new_frame]() -> nsresult {
    1761           0 :             UniquePtr<webrtc::VideoFrame> local_frame(new_frame); // Simplify cleanup
    1762             : 
    1763           0 :             MutexAutoLock lock(self->mCodecMutex);
    1764           0 :             return self->ReconfigureSendCodec(width, height, new_frame);
    1765           0 :           });
    1766             :       // new_frame now owned by lambda
    1767           0 :       CSFLogDebug(logTag, "%s: proxying lambda to WebRTC thread for reconfig (width %u/%u, height %u/%u",
    1768           0 :                   __FUNCTION__, width, mLastWidth, height, mLastHeight);
    1769           0 :       NS_DispatchToMainThread(webrtc_runnable.forget());
    1770           0 :       if (new_frame) {
    1771           0 :         return true; // queued it
    1772             :       }
    1773             :     } else {
    1774             :       // already on the right thread
    1775           0 :       ReconfigureSendCodec(width, height, frame);
    1776             :     }
    1777             :   }
    1778           0 :   return false;
    1779             : }
    1780             : 
    1781             : nsresult
    1782           0 : WebrtcVideoConduit::ReconfigureSendCodec(unsigned short width,
    1783             :                                          unsigned short height,
    1784             :                                          webrtc::VideoFrame* frame)
    1785             : {
    1786           0 :   mCodecMutex.AssertCurrentThreadOwns();
    1787             : 
    1788             :   // Test in case the stream hasn't started yet!  We could get a frame in
    1789             :   // before we get around to StartTransmitting(), and that would dispatch a
    1790             :   // runnable to call this.
    1791           0 :   mInReconfig = false;
    1792           0 :   if (mSendStream) {
    1793           0 :     mSendStream->ReconfigureVideoEncoder(mEncoderConfig.CopyConfig());
    1794           0 :     if (frame) {
    1795           0 :       mVideoBroadcaster.OnFrame(*frame);
    1796           0 :       CSFLogDebug(logTag, "%s Inserted a frame from reconfig lambda", __FUNCTION__);
    1797             :     }
    1798             :   }
    1799           0 :   return NS_OK;
    1800             : }
    1801             : 
    1802             : unsigned int
    1803           0 : WebrtcVideoConduit::SelectSendFrameRate(const VideoCodecConfig* codecConfig,
    1804             :                                         unsigned int old_framerate,
    1805             :                                         unsigned short sending_width,
    1806             :                                         unsigned short sending_height) const
    1807             : {
    1808           0 :   unsigned int new_framerate = old_framerate;
    1809             : 
    1810             :   // Limit frame rate based on max-mbps
    1811           0 :   if (codecConfig && codecConfig->mEncodingConstraints.maxMbps)
    1812             :   {
    1813             :     unsigned int cur_fs, mb_width, mb_height;
    1814             : 
    1815           0 :     mb_width = (sending_width + 15) >> 4;
    1816           0 :     mb_height = (sending_height + 15) >> 4;
    1817             : 
    1818           0 :     cur_fs = mb_width * mb_height;
    1819           0 :     if (cur_fs > 0) { // in case no frames have been sent
    1820           0 :       new_framerate = codecConfig->mEncodingConstraints.maxMbps / cur_fs;
    1821             : 
    1822           0 :       new_framerate = MinIgnoreZero(new_framerate, codecConfig->mEncodingConstraints.maxFps);
    1823             :     }
    1824             :   }
    1825           0 :   return new_framerate;
    1826             : }
    1827             : 
    1828             : MediaConduitErrorCode
    1829           0 : WebrtcVideoConduit::SendVideoFrame(unsigned char* video_buffer,
    1830             :                                    unsigned int video_length,
    1831             :                                    unsigned short width,
    1832             :                                    unsigned short height,
    1833             :                                    VideoType video_type,
    1834             :                                    uint64_t capture_time)
    1835             : {
    1836             : 
    1837             :   // check for parameter sanity
    1838           0 :   if (!video_buffer || video_length == 0 || width == 0 || height == 0) {
    1839           0 :     CSFLogError(logTag, "%s Invalid Parameters ", __FUNCTION__);
    1840           0 :     MOZ_ASSERT(false);
    1841             :     return kMediaConduitMalformedArgument;
    1842             :   }
    1843           0 :   MOZ_ASSERT(video_type == VideoType::kVideoI420);
    1844             : 
    1845             :   // Transmission should be enabled before we insert any frames.
    1846           0 :   if (!mEngineTransmitting) {
    1847           0 :     CSFLogError(logTag, "%s Engine not transmitting ", __FUNCTION__);
    1848           0 :     return kMediaConduitSessionNotInited;
    1849             :   }
    1850             : 
    1851             :   // insert the frame to video engine in I420 format only
    1852           0 :   const int stride_y = width;
    1853           0 :   const int stride_uv = (width + 1) / 2;
    1854             : 
    1855           0 :   const uint8_t* buffer_y = video_buffer;
    1856           0 :   const uint8_t* buffer_u = buffer_y + stride_y * height;
    1857           0 :   const uint8_t* buffer_v = buffer_u + stride_uv * ((height + 1) / 2);
    1858           0 :   rtc::Callback0<void> callback_unused;
    1859             :   rtc::scoped_refptr<webrtc::WrappedI420Buffer> video_frame_buffer(
    1860             :     new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
    1861             :       width, height,
    1862             :       buffer_y, stride_y,
    1863             :       buffer_u, stride_uv,
    1864             :       buffer_v, stride_uv,
    1865           0 :       callback_unused));
    1866             : 
    1867             :   webrtc::VideoFrame video_frame(video_frame_buffer, capture_time,
    1868           0 :                                  capture_time, webrtc::kVideoRotation_0); // XXX
    1869             : 
    1870           0 :   return SendVideoFrame(video_frame);
    1871             : }
    1872             : 
    1873             : void
    1874           0 : WebrtcVideoConduit::AddOrUpdateSink(
    1875             :   rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
    1876             :   const rtc::VideoSinkWants& wants)
    1877             : {
    1878           0 :   CSFLogDebug(logTag, "%s (send SSRC %u (0x%x)) - wants pixels = %d/%d", __FUNCTION__,
    1879             :               mSendStreamConfig.rtp.ssrcs.front(), mSendStreamConfig.rtp.ssrcs.front(),
    1880             :               wants.max_pixel_count ? *wants.max_pixel_count : -1,
    1881           0 :               wants.max_pixel_count_step_up ? *wants.max_pixel_count_step_up : -1);
    1882             : 
    1883             :   // MUST run on the same thread as first call (MainThread)
    1884           0 :   if (!NS_IsMainThread()) {
    1885             :     // This can be asynchronous
    1886           0 :     RefPtr<WebrtcVideoConduit> self(this);
    1887           0 :     NS_DispatchToMainThread(media::NewRunnableFrom([self, sink, wants]() {
    1888           0 :           self->mVideoBroadcaster.AddOrUpdateSink(sink, wants);
    1889           0 :           self->OnSinkWantsChanged(self->mVideoBroadcaster.wants());
    1890           0 :           return NS_OK;
    1891           0 :         }));
    1892             :   } else {
    1893           0 :     mVideoBroadcaster.AddOrUpdateSink(sink, wants);
    1894           0 :     OnSinkWantsChanged(mVideoBroadcaster.wants());
    1895             :   }
    1896           0 : }
    1897             : 
    1898             : void
    1899           0 : WebrtcVideoConduit::RemoveSink(
    1900             :   rtc::VideoSinkInterface<webrtc::VideoFrame>* sink)
    1901             : {
    1902           0 :   mVideoBroadcaster.RemoveSink(sink);
    1903           0 :   OnSinkWantsChanged(mVideoBroadcaster.wants());
    1904           0 : }
    1905             : 
    1906             : void
    1907           0 : WebrtcVideoConduit::OnSinkWantsChanged(
    1908             :   const rtc::VideoSinkWants& wants) {
    1909           0 :   NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
    1910           0 :   if (!mLockScaling) {
    1911           0 :     mVideoAdapter.OnResolutionRequest(wants.max_pixel_count,
    1912           0 :                                       wants.max_pixel_count_step_up);
    1913             :   }
    1914           0 : }
    1915             : 
    1916             : MediaConduitErrorCode
    1917           0 : WebrtcVideoConduit::SendVideoFrame(webrtc::VideoFrame& frame)
    1918             : {
    1919             :   // XXX Google uses a "timestamp_aligner" to translate timestamps from the
    1920             :   // camera via TranslateTimestamp(); we should look at doing the same.  This
    1921             :   // avoids sampling error when capturing frames, but google had to deal with some
    1922             :   // broken cameras, include Logitech c920's IIRC.
    1923             : 
    1924           0 :   CSFLogVerbose(logTag, "%s (send SSRC %u (0x%x))", __FUNCTION__,
    1925           0 :               mSendStreamConfig.rtp.ssrcs.front(), mSendStreamConfig.rtp.ssrcs.front());
    1926             :   // See if we need to recalculate what we're sending.
    1927             :   // Don't compute mSendingWidth/Height, since those may not be the same as the input.
    1928             :   {
    1929           0 :     MutexAutoLock lock(mCodecMutex);
    1930           0 :     if (mInReconfig) {
    1931             :       // Waiting for it to finish
    1932           0 :       return kMediaConduitNoError;
    1933             :     }
    1934           0 :     if (frame.width() != mLastWidth || frame.height() != mLastHeight) {
    1935           0 :       CSFLogVerbose(logTag, "%s: call SelectSendResolution with %ux%u",
    1936           0 :                     __FUNCTION__, frame.width(), frame.height());
    1937           0 :       if (SelectSendResolution(frame.width(), frame.height(), &frame)) {
    1938             :         // SelectSendResolution took ownership of the data in i420_frame.
    1939             :         // Submit the frame after reconfig is done
    1940           0 :         return kMediaConduitNoError;
    1941             :       }
    1942             :     }
    1943             :     // adapt input video to wants of sink
    1944           0 :     if (!mVideoBroadcaster.frame_wanted()) {
    1945           0 :       return kMediaConduitNoError;
    1946             :     }
    1947             : 
    1948             :     int adapted_width;
    1949             :     int adapted_height;
    1950             :     int crop_width;
    1951             :     int crop_height;
    1952             :     int crop_x;
    1953             :     int crop_y;
    1954           0 :     if (!mVideoAdapter.AdaptFrameResolution(
    1955             :           frame.width(), frame.height(),
    1956           0 :           frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec,
    1957             :           &crop_width, &crop_height, &adapted_width, &adapted_height)) {
    1958             :       // VideoAdapter dropped the frame.
    1959           0 :       return kMediaConduitNoError;
    1960             :     }
    1961           0 :     crop_x = (frame.width() - crop_width) / 2;
    1962           0 :     crop_y = (frame.height() - crop_height) / 2;
    1963             : 
    1964           0 :     rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer;
    1965           0 :     if (adapted_width == frame.width() && adapted_height == frame.height()) {
    1966             :       // No adaption - optimized path.
    1967           0 :       buffer = frame.video_frame_buffer();
    1968             :       // XXX Bug 1367651 - Use nativehandles where possible instead of software scaling
    1969             : #ifdef WEBRTC_MAC
    1970             : #if defined(MAC_OS_X_VERSION_10_8) && \
    1971             :   (MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_8)
    1972             :       // XXX not available in Mac 10.7 SDK
    1973             :       // code adapted from objvideotracksource.mm
    1974             :     } else if (frame.nativeHandle) {
    1975             :       // Adapted CVPixelBuffer frame.
    1976             :       buffer = new rtc::RefCountedObject<CoreVideoFrameBuffer>(
    1977             :         static_cast<CVPixelBufferRef>(frame.nativeHandle), adapted_width, adapted_height,
    1978             :         crop_width, crop_height, crop_x, crop_y);
    1979             : #endif
    1980             : #elif WEBRTC_WIN
    1981             :       // XX FIX
    1982             : #elif WEBRTC_LINUX
    1983             :       // XX FIX
    1984             : #elif WEBRTC_ANDROID
    1985             :       // XX FIX
    1986             : #endif
    1987             :     } else {
    1988             :       // Adapted I420 frame.
    1989             :       // TODO(magjed): Optimize this I420 path.
    1990             :       rtc::scoped_refptr<webrtc::I420Buffer> i420_buffer =
    1991           0 :         webrtc::I420Buffer::Create(adapted_width, adapted_height);
    1992           0 :       i420_buffer->CropAndScaleFrom(*frame.video_frame_buffer(), crop_x, crop_y, crop_width, crop_height);
    1993           0 :       buffer = i420_buffer;
    1994             :     }
    1995             : 
    1996             : #if 0
    1997             :     // Applying rotation is only supported for legacy reasons and performance is
    1998             :     // not critical here.
    1999             :     // XXX We're rotating at capture time; if we want to change that we'll need to
    2000             :     // rotate at input to any sink that can't handle rotated frames internally. We
    2001             :     // probably wouldn't need to rotate here unless the CVO extension wasn't agreed to.
    2002             :     // That state (CVO) would feed apply_rotation()
    2003             :     webrtc::VideoRotation rotation = static_cast<webrtc::VideoRotation>(frame.rotation);
    2004             :     if (apply_rotation() && rotation != kVideoRotation_0) {
    2005             :       buffer = I420Buffer::Rotate(*buffer->NativeToI420Buffer(), rotation);
    2006             :       rotation = kVideoRotation_0;
    2007             :     }
    2008             : #endif
    2009             : 
    2010           0 :     mVideoBroadcaster.OnFrame(webrtc::VideoFrame(buffer, webrtc::kVideoRotation_0,
    2011           0 :                                                  /*rotation, translated_*/ frame.timestamp_us()));
    2012             :   }
    2013             : 
    2014           0 :   mSendStreamStats.FrameDeliveredToEncoder();
    2015           0 :   return kMediaConduitNoError;
    2016             : }
    2017             : 
    2018             : // Transport Layer Callbacks
    2019             : 
    2020             : MediaConduitErrorCode
    2021           0 : WebrtcVideoConduit::DeliverPacket(const void* data, int len)
    2022             : {
    2023             :   // Media Engine should be receiving already.
    2024           0 :   if (!mCall) {
    2025           0 :     CSFLogError(logTag, "Error: %s when not receiving", __FUNCTION__);
    2026           0 :     return kMediaConduitSessionNotInited;
    2027             :   }
    2028             : 
    2029             :   // XXX we need to get passed the time the packet was received
    2030             :   webrtc::PacketReceiver::DeliveryStatus status =
    2031           0 :     mCall->Call()->Receiver()->DeliverPacket(webrtc::MediaType::VIDEO,
    2032             :                                              static_cast<const uint8_t*>(data),
    2033           0 :                                              len, webrtc::PacketTime());
    2034             : 
    2035           0 :   if (status != webrtc::PacketReceiver::DELIVERY_OK) {
    2036           0 :     CSFLogError(logTag, "%s DeliverPacket Failed, %d", __FUNCTION__, status);
    2037           0 :     return kMediaConduitRTPProcessingFailed;
    2038             :   }
    2039             : 
    2040           0 :   return kMediaConduitNoError;
    2041             : }
    2042             : 
    2043             : MediaConduitErrorCode
    2044           0 : WebrtcVideoConduit::ReceivedRTPPacket(const void* data, int len, uint32_t ssrc)
    2045             : {
    2046             :   // Handle the unknown ssrc (and ssrc-not-signaled case).
    2047             :   // We can't just do this here; it has to happen on MainThread :-(
    2048             :   // We also don't want to drop the packet, nor stall this thread, so we hold
    2049             :   // the packet (and any following) for inserting once the SSRC is set.
    2050           0 :   bool queue = mRecvSSRCSetInProgress;
    2051           0 :   if (queue || mRecvSSRC != ssrc) {
    2052             :     // capture packet for insertion after ssrc is set -- do this before
    2053             :     // sending the runnable, since it may pull from this.  Since it
    2054             :     // dispatches back to us, it's less critial to do this here, but doesn't
    2055             :     // hurt.
    2056           0 :     UniquePtr<QueuedPacket> packet((QueuedPacket*) malloc(sizeof(QueuedPacket) + len-1));
    2057           0 :     packet->mLen = len;
    2058           0 :     memcpy(packet->mData, data, len);
    2059           0 :     CSFLogDebug(logTag, "queuing packet: seq# %u, Len %d ",
    2060           0 :                 (uint16_t)ntohs(((uint16_t*) packet->mData)[1]), packet->mLen);
    2061           0 :     if (queue) {
    2062           0 :       mQueuedPackets.AppendElement(Move(packet));
    2063           0 :       return kMediaConduitNoError;
    2064             :     }
    2065             :     // a new switch needs to be done
    2066             :     // any queued packets are from a previous switch that hasn't completed
    2067             :     // yet; drop them and only process the latest SSRC
    2068           0 :     mQueuedPackets.Clear();
    2069           0 :     mQueuedPackets.AppendElement(Move(packet));
    2070             : 
    2071           0 :     CSFLogDebug(logTag, "%s: switching from SSRC %u to %u", __FUNCTION__,
    2072           0 :                 mRecvSSRC, ssrc);
    2073             :     // we "switch" here immediately, but buffer until the queue is released
    2074           0 :     mRecvSSRC = ssrc;
    2075           0 :     mRecvSSRCSetInProgress = true;
    2076           0 :     queue = true;
    2077             : 
    2078             :     // Ensure lamba captures refs
    2079           0 :     RefPtr<WebrtcVideoConduit> self = this;
    2080           0 :     nsCOMPtr<nsIThread> thread;
    2081           0 :     if (NS_WARN_IF(NS_FAILED(NS_GetCurrentThread(getter_AddRefs(thread))))) {
    2082           0 :       return kMediaConduitRTPProcessingFailed;
    2083             :     }
    2084           0 :     NS_DispatchToMainThread(media::NewRunnableFrom([self, thread, ssrc]() mutable {
    2085             :           // Normally this is done in CreateOrUpdateMediaPipeline() for
    2086             :           // initial creation and renegotiation, but here we're rebuilding the
    2087             :           // Receive channel at a lower level.  This is needed whenever we're
    2088             :           // creating a GMPVideoCodec (in particular, H264) so it can communicate
    2089             :           // errors to the PC.
    2090           0 :           WebrtcGmpPCHandleSetter setter(self->mPCHandle);
    2091           0 :           self->SetRemoteSSRC(ssrc); // this will likely re-create the VideoReceiveStream
    2092             :           // We want to unblock the queued packets on the original thread
    2093           0 :           thread->Dispatch(media::NewRunnableFrom([self, ssrc]() mutable {
    2094           0 :                 if (ssrc == self->mRecvSSRC) {
    2095             :                   // SSRC is set; insert queued packets
    2096           0 :                   for (auto& packet : self->mQueuedPackets) {
    2097           0 :                     CSFLogDebug(logTag, "Inserting queued packets: seq# %u, Len %d ",
    2098           0 :                                 (uint16_t)ntohs(((uint16_t*) packet->mData)[1]), packet->mLen);
    2099             : 
    2100           0 :                     if (self->DeliverPacket(packet->mData, packet->mLen) != kMediaConduitNoError) {
    2101           0 :                       CSFLogError(logTag, "%s RTP Processing Failed", __FUNCTION__);
    2102             :                       // Keep delivering and then clear the queue
    2103             :                     }
    2104             :                   }
    2105           0 :                   self->mQueuedPackets.Clear();
    2106             :                   // we don't leave inprogress until there are no changes in-flight
    2107           0 :                   self->mRecvSSRCSetInProgress = false;
    2108             :                 }
    2109             :                 // else this is an intermediate switch; another is in-flight
    2110             : 
    2111           0 :                 return NS_OK;
    2112           0 :               }), NS_DISPATCH_NORMAL);
    2113           0 :           return NS_OK;
    2114           0 :         }));
    2115           0 :     return kMediaConduitNoError;
    2116             :   }
    2117             : 
    2118           0 :   CSFLogVerbose(logTag, "%s: seq# %u, Len %d, SSRC %u (0x%x) ", __FUNCTION__,
    2119             :                 (uint16_t)ntohs(((uint16_t*) data)[1]), len,
    2120             :                 (uint32_t) ntohl(((uint32_t*) data)[2]),
    2121           0 :                 (uint32_t) ntohl(((uint32_t*) data)[2]));
    2122             : 
    2123           0 :   if (DeliverPacket(data, len) != kMediaConduitNoError) {
    2124           0 :     CSFLogError(logTag, "%s RTP Processing Failed", __FUNCTION__);
    2125           0 :     return kMediaConduitRTPProcessingFailed;
    2126             :   }
    2127           0 :   return kMediaConduitNoError;
    2128             : }
    2129             : 
    2130             : MediaConduitErrorCode
    2131           0 : WebrtcVideoConduit::ReceivedRTCPPacket(const void* data, int len)
    2132             : {
    2133           0 :   CSFLogVerbose(logTag, " %s Len %d ", __FUNCTION__, len);
    2134             : 
    2135           0 :   if (DeliverPacket(data, len) != kMediaConduitNoError) {
    2136           0 :     CSFLogError(logTag, "%s RTCP Processing Failed", __FUNCTION__);
    2137           0 :     return kMediaConduitRTPProcessingFailed;
    2138             :   }
    2139             : 
    2140           0 :   return kMediaConduitNoError;
    2141             : }
    2142             : 
    2143             : MediaConduitErrorCode
    2144           0 : WebrtcVideoConduit::StopTransmitting()
    2145             : {
    2146           0 :   if (mEngineTransmitting) {
    2147             :     {
    2148           0 :       MutexAutoLock lock(mCodecMutex);
    2149           0 :       if (mSendStream) {
    2150           0 :           CSFLogDebug(logTag, "%s Engine Already Sending. Attemping to Stop ", __FUNCTION__);
    2151           0 :           mSendStream->Stop();
    2152             :       }
    2153             :     }
    2154             : 
    2155           0 :     mEngineTransmitting = false;
    2156             :   }
    2157           0 :   return kMediaConduitNoError;
    2158             : }
    2159             : 
    2160             : MediaConduitErrorCode
    2161           0 : WebrtcVideoConduit::StartTransmitting()
    2162             : {
    2163           0 :   if (mEngineTransmitting) {
    2164           0 :     return kMediaConduitNoError;
    2165             :   }
    2166             : 
    2167           0 :   CSFLogDebug(logTag, "%s Attemping to start... ", __FUNCTION__);
    2168             :   {
    2169             :     // Start Transmitting on the video engine
    2170           0 :     MutexAutoLock lock(mCodecMutex);
    2171             : 
    2172           0 :     if (!mSendStream) {
    2173           0 :       MediaConduitErrorCode rval = CreateSendStream();
    2174           0 :       if (rval != kMediaConduitNoError) {
    2175           0 :         CSFLogError(logTag, "%s Start Send Error %d ", __FUNCTION__, rval);
    2176           0 :         return rval;
    2177             :       }
    2178             :     }
    2179             : 
    2180           0 :     mSendStream->Start();
    2181             :     // XXX File a bug to consider hooking this up to the state of mtransport
    2182           0 :     mCall->Call()->SignalChannelNetworkState(webrtc::MediaType::VIDEO, webrtc::kNetworkUp);
    2183           0 :     mEngineTransmitting = true;
    2184             :   }
    2185             : 
    2186           0 :   return kMediaConduitNoError;
    2187             : }
    2188             : 
    2189             : MediaConduitErrorCode
    2190           0 : WebrtcVideoConduit::StopReceiving()
    2191             : {
    2192           0 :   NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
    2193             :   // Are we receiving already? If so, stop receiving and playout
    2194             :   // since we can't apply new recv codec when the engine is playing.
    2195           0 :   if (mEngineReceiving && mRecvStream) {
    2196           0 :     CSFLogDebug(logTag, "%s Engine Already Receiving . Attemping to Stop ", __FUNCTION__);
    2197           0 :     mRecvStream->Stop();
    2198             :   }
    2199             : 
    2200           0 :   mEngineReceiving = false;
    2201           0 :   return kMediaConduitNoError;
    2202             : }
    2203             : 
    2204             : MediaConduitErrorCode
    2205           0 : WebrtcVideoConduit::StartReceiving()
    2206             : {
    2207           0 :   if (mEngineReceiving) {
    2208           0 :     return kMediaConduitNoError;
    2209             :   }
    2210             : 
    2211           0 :   CSFLogDebug(logTag, "%s Attemping to start... (SSRC %u (0x%x))", __FUNCTION__, mRecvSSRC, mRecvSSRC);
    2212             :   {
    2213             :     // Start Receive on the video engine
    2214           0 :     MutexAutoLock lock(mCodecMutex);
    2215           0 :     MOZ_ASSERT(mRecvStream);
    2216             : 
    2217           0 :     mRecvStream->Start();
    2218             :     // XXX File a bug to consider hooking this up to the state of mtransport
    2219           0 :     mCall->Call()->SignalChannelNetworkState(webrtc::MediaType::VIDEO, webrtc::kNetworkUp);
    2220           0 :     mEngineReceiving = true;
    2221             :   }
    2222             : 
    2223           0 :   return kMediaConduitNoError;
    2224             : }
    2225             : 
    2226             : // WebRTC::RTP Callback Implementation
    2227             : // Called on MSG thread
    2228             : bool
    2229           0 : WebrtcVideoConduit::SendRtp(const uint8_t* packet, size_t length,
    2230             :                             const webrtc::PacketOptions& options)
    2231             : {
    2232             :   // XXX(pkerr) - PacketOptions possibly containing RTP extensions are ignored.
    2233             :   // The only field in it is the packet_id, which is used when the header
    2234             :   // extension for TransportSequenceNumber is being used, which we don't.
    2235           0 :   CSFLogVerbose(logTag, "%s Sent RTP Packet seq %d, len %lu, SSRC %u (0x%x)",
    2236             :                 __FUNCTION__,
    2237             :                 (uint16_t) ntohs(*((uint16_t*) &packet[2])),
    2238             :                 (unsigned long)length,
    2239             :                 (uint32_t) ntohl(*((uint32_t*) &packet[8])),
    2240           0 :                 (uint32_t) ntohl(*((uint32_t*) &packet[8])));
    2241             : 
    2242           0 :   ReentrantMonitorAutoEnter enter(mTransportMonitor);
    2243           0 :   if (!mTransmitterTransport ||
    2244           0 :      NS_FAILED(mTransmitterTransport->SendRtpPacket(packet, length)))
    2245             :   {
    2246           0 :     CSFLogError(logTag, "%s RTP Packet Send Failed ", __FUNCTION__);
    2247           0 :     return false;
    2248             :   }
    2249           0 :   return true;
    2250             : }
    2251             : 
    2252             : // Called from multiple threads including webrtc Process thread
    2253             : bool
    2254           0 : WebrtcVideoConduit::SendRtcp(const uint8_t* packet, size_t length)
    2255             : {
    2256           0 :   CSFLogVerbose(logTag, "%s : len %lu ", __FUNCTION__, (unsigned long)length);
    2257             :   // We come here if we have only one pipeline/conduit setup,
    2258             :   // such as for unidirectional streams.
    2259             :   // We also end up here if we are receiving
    2260           0 :   ReentrantMonitorAutoEnter enter(mTransportMonitor);
    2261           0 :   if (mReceiverTransport &&
    2262           0 :       NS_SUCCEEDED(mReceiverTransport->SendRtcpPacket(packet, length)))
    2263             :   {
    2264             :     // Might be a sender report, might be a receiver report, we don't know.
    2265           0 :     CSFLogDebug(logTag, "%s Sent RTCP Packet ", __FUNCTION__);
    2266           0 :     return true;
    2267             :   }
    2268           0 :   if (mTransmitterTransport &&
    2269           0 :              NS_SUCCEEDED(mTransmitterTransport->SendRtcpPacket(packet, length))) {
    2270           0 :     return true;
    2271             :   }
    2272             : 
    2273           0 :   CSFLogError(logTag, "%s RTCP Packet Send Failed ", __FUNCTION__);
    2274           0 :   return false;
    2275             : }
    2276             : 
    2277             : void
    2278           0 : WebrtcVideoConduit::OnFrame(const webrtc::VideoFrame& video_frame)
    2279             : {
    2280           0 :   CSFLogVerbose(logTag, "%s: recv SSRC %u (0x%x), size %ux%u", __FUNCTION__,
    2281           0 :                 mRecvSSRC, mRecvSSRC, video_frame.width(), video_frame.height());
    2282           0 :   ReentrantMonitorAutoEnter enter(mTransportMonitor);
    2283             : 
    2284           0 :   if (!mRenderer) {
    2285           0 :     CSFLogError(logTag, "%s Renderer is NULL  ", __FUNCTION__);
    2286           0 :     return;
    2287             :   }
    2288             : 
    2289           0 :   if (mReceivingWidth != video_frame.width() ||
    2290           0 :       mReceivingHeight != video_frame.height()) {
    2291           0 :     mReceivingWidth = video_frame.width();
    2292           0 :     mReceivingHeight = video_frame.height();
    2293           0 :     mRenderer->FrameSizeChange(mReceivingWidth, mReceivingHeight, mNumReceivingStreams);
    2294             :   }
    2295             : 
    2296             :   // Attempt to retrieve an timestamp encoded in the image pixels if enabled.
    2297           0 :   if (mVideoLatencyTestEnable && mReceivingWidth && mReceivingHeight) {
    2298           0 :     uint64_t now = PR_Now();
    2299           0 :     uint64_t timestamp = 0;
    2300           0 :     bool ok = YuvStamper::Decode(mReceivingWidth, mReceivingHeight, mReceivingWidth,
    2301           0 :                                  const_cast<unsigned char*>(video_frame.video_frame_buffer()->DataY()),
    2302             :                                  reinterpret_cast<unsigned char*>(&timestamp),
    2303           0 :                                  sizeof(timestamp), 0, 0);
    2304           0 :     if (ok) {
    2305           0 :       VideoLatencyUpdate(now - timestamp);
    2306             :     }
    2307             :   }
    2308             : 
    2309           0 :   const ImageHandle img_handle(nullptr);
    2310           0 :   mRenderer->RenderVideoFrame(*video_frame.video_frame_buffer(),
    2311             :                               video_frame.timestamp(),
    2312             :                               video_frame.render_time_ms(),
    2313           0 :                               img_handle);
    2314             : }
    2315             : 
    2316             : // Compare lists of codecs
    2317             : bool
    2318           0 : WebrtcVideoConduit::CodecsDifferent(const nsTArray<UniquePtr<VideoCodecConfig>>& a,
    2319             :                                     const nsTArray<UniquePtr<VideoCodecConfig>>& b)
    2320             : {
    2321             :   // return a != b;
    2322             :   // would work if UniquePtr<> operator== compared contents!
    2323           0 :   auto len = a.Length();
    2324           0 :   if (len != b.Length()) {
    2325           0 :     return true;
    2326             :   }
    2327             : 
    2328             :   // XXX std::equal would work, if we could use it on this - fails for the
    2329             :   // same reason as above.  c++14 would let us pass a comparator function.
    2330           0 :   for (uint32_t i = 0; i < len; ++i) {
    2331           0 :     if (!(*a[i] == *b[i])) {
    2332           0 :       return true;
    2333             :     }
    2334             :   }
    2335             : 
    2336           0 :   return false;
    2337             : }
    2338             : 
    2339             : /**
    2340             :  * Perform validation on the codecConfig to be applied
    2341             :  * Verifies if the codec is already applied.
    2342             :  */
    2343             : MediaConduitErrorCode
    2344           0 : WebrtcVideoConduit::ValidateCodecConfig(const VideoCodecConfig* codecInfo,
    2345             :                                         bool send)
    2346             : {
    2347           0 :   if(!codecInfo) {
    2348           0 :     CSFLogError(logTag, "%s Null CodecConfig ", __FUNCTION__);
    2349           0 :     return kMediaConduitMalformedArgument;
    2350             :   }
    2351             : 
    2352           0 :   if((codecInfo->mName.empty()) ||
    2353           0 :      (codecInfo->mName.length() >= CODEC_PLNAME_SIZE)) {
    2354           0 :     CSFLogError(logTag, "%s Invalid Payload Name Length ", __FUNCTION__);
    2355           0 :     return kMediaConduitMalformedArgument;
    2356             :   }
    2357             : 
    2358           0 :   return kMediaConduitNoError;
    2359             : }
    2360             : 
    2361             : void
    2362           0 : WebrtcVideoConduit::DumpCodecDB() const
    2363             : {
    2364           0 :   for (auto& entry : mRecvCodecList) {
    2365           0 :     CSFLogDebug(logTag, "Payload Name: %s", entry->mName.c_str());
    2366           0 :     CSFLogDebug(logTag, "Payload Type: %d", entry->mType);
    2367           0 :     CSFLogDebug(logTag, "Payload Max Frame Size: %d", entry->mEncodingConstraints.maxFs);
    2368           0 :     CSFLogDebug(logTag, "Payload Max Frame Rate: %d", entry->mEncodingConstraints.maxFps);
    2369             :   }
    2370           0 : }
    2371             : 
    2372             : void
    2373           0 : WebrtcVideoConduit::VideoLatencyUpdate(uint64_t newSample)
    2374             : {
    2375           0 :   mVideoLatencyAvg = (sRoundingPadding * newSample + sAlphaNum * mVideoLatencyAvg) / sAlphaDen;
    2376           0 : }
    2377             : 
    2378             : uint64_t
    2379           0 : WebrtcVideoConduit::MozVideoLatencyAvg()
    2380             : {
    2381           0 :   return mVideoLatencyAvg / sRoundingPadding;
    2382             : }
    2383             : 
    2384             : uint64_t
    2385           0 : WebrtcVideoConduit::CodecPluginID()
    2386             : {
    2387           0 :   if (mSendCodecPlugin) {
    2388           0 :     return mSendCodecPlugin->PluginID();
    2389             :   }
    2390           0 :   if (mRecvCodecPlugin) {
    2391           0 :     return mRecvCodecPlugin->PluginID();
    2392             :   }
    2393             : 
    2394           0 :   return 0;
    2395             : }
    2396             : 
    2397             : bool
    2398           0 : WebrtcVideoConduit::RequiresNewSendStream(const VideoCodecConfig& newConfig) const
    2399             : {
    2400           0 :   return !mCurSendCodecConfig
    2401           0 :     || mCurSendCodecConfig->mName != newConfig.mName
    2402           0 :     || mCurSendCodecConfig->mType != newConfig.mType
    2403           0 :     || mCurSendCodecConfig->RtcpFbNackIsSet("") != newConfig.RtcpFbNackIsSet("")
    2404           0 :     || mCurSendCodecConfig->RtcpFbFECIsSet() != newConfig.RtcpFbFECIsSet()
    2405             : #if 0
    2406             :     // XXX Do we still want/need to do this?
    2407             :     || (newConfig.mName == "H264" &&
    2408             :         !CompatibleH264Config(mEncoderSpecificH264, newConfig))
    2409             : #endif
    2410             :     ;
    2411             : }
    2412             : 
    2413             : void
    2414           0 : WebrtcVideoConduit::VideoEncoderConfigBuilder::SetEncoderSpecificSettings(
    2415             :   rtc::scoped_refptr<webrtc::VideoEncoderConfig::EncoderSpecificSettings> aSettings)
    2416             : {
    2417           0 :   mConfig.encoder_specific_settings = aSettings;
    2418           0 : }
    2419             : 
    2420             : void
    2421           0 : WebrtcVideoConduit::VideoEncoderConfigBuilder::SetVideoStreamFactory(rtc::scoped_refptr<WebrtcVideoConduit::VideoStreamFactory> aFactory)
    2422             : {
    2423           0 :   mConfig.video_stream_factory = aFactory;
    2424           0 : }
    2425             : 
    2426             : void
    2427           0 : WebrtcVideoConduit::VideoEncoderConfigBuilder::SetMinTransmitBitrateBps(
    2428             :   int aXmitMinBps)
    2429             : {
    2430           0 :   mConfig.min_transmit_bitrate_bps = aXmitMinBps;
    2431           0 : }
    2432             : 
    2433             : void
    2434           0 : WebrtcVideoConduit::VideoEncoderConfigBuilder::SetContentType(
    2435             :   webrtc::VideoEncoderConfig::ContentType aContentType)
    2436             : {
    2437           0 :   mConfig.content_type = aContentType;
    2438           0 : }
    2439             : 
    2440             : void
    2441           0 : WebrtcVideoConduit::VideoEncoderConfigBuilder::SetResolutionDivisor(
    2442             :   unsigned char aDivisor)
    2443             : {
    2444           0 :   mConfig.resolution_divisor = aDivisor;
    2445           0 : }
    2446             : 
    2447             : void
    2448           0 : WebrtcVideoConduit::VideoEncoderConfigBuilder::SetMaxEncodings(
    2449             :   size_t aMaxStreams)
    2450             : {
    2451           0 :   mConfig.number_of_streams = aMaxStreams;
    2452           0 : }
    2453             : 
    2454             : void
    2455           0 : WebrtcVideoConduit::VideoEncoderConfigBuilder::AddStream(
    2456             :   webrtc::VideoStream aStream)
    2457             : {
    2458           0 :   mSimulcastStreams.push_back(SimulcastStreamConfig());
    2459           0 :   MOZ_ASSERT(mSimulcastStreams.size() <= mConfig.number_of_streams);
    2460           0 : }
    2461             : 
    2462             : void
    2463           0 : WebrtcVideoConduit::VideoEncoderConfigBuilder::AddStream(
    2464             :   webrtc::VideoStream aStream, const SimulcastStreamConfig& aSimulcastConfig)
    2465             : {
    2466           0 :   mSimulcastStreams.push_back(aSimulcastConfig);
    2467           0 :   MOZ_ASSERT(mSimulcastStreams.size() <= mConfig.number_of_streams);
    2468           0 : }
    2469             : 
    2470             : size_t
    2471           0 : WebrtcVideoConduit::VideoEncoderConfigBuilder::StreamCount() const
    2472             : {
    2473           0 :   return mSimulcastStreams.size();
    2474             : }
    2475             : 
    2476             : void
    2477           0 : WebrtcVideoConduit::VideoEncoderConfigBuilder::ClearStreams()
    2478             : {
    2479           0 :   mSimulcastStreams.clear();
    2480           0 : }
    2481             : 
    2482           9 : } // end namespace

Generated by: LCOV version 1.13