LCOV - code coverage report
Current view: top level - media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8 - simulcast_encoder_adapter.cc (source / functions) Hit Total Coverage
Test: output.info Lines: 0 238 0.0 %
Date: 2017-07-14 16:53:18 Functions: 0 27 0.0 %
Legend: Lines: hit not hit

          Line data    Source code
       1             : /*
       2             :  *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
       3             :  *
       4             :  *  Use of this source code is governed by a BSD-style license
       5             :  *  that can be found in the LICENSE file in the root of the source
       6             :  *  tree. An additional intellectual property rights grant can be found
       7             :  *  in the file PATENTS.  All contributing project authors may
       8             :  *  be found in the AUTHORS file in the root of the source tree.
       9             :  */
      10             : 
      11             : #include "webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.h"
      12             : 
      13             : #include <algorithm>
      14             : 
      15             : // NOTE(ajm): Path provided by gyp.
      16             : #include "libyuv/scale.h"  // NOLINT
      17             : 
      18             : #include "webrtc/api/video/i420_buffer.h"
      19             : #include "webrtc/base/checks.h"
      20             : #include "webrtc/modules/video_coding/codecs/vp8/screenshare_layers.h"
      21             : #include "webrtc/modules/video_coding/utility/simulcast_rate_allocator.h"
      22             : #include "webrtc/system_wrappers/include/clock.h"
      23             : 
      24             : namespace {
      25             : 
      26             : const unsigned int kDefaultMinQp = 2;
      27             : const unsigned int kDefaultMaxQp = 56;
      28             : // Max qp for lowest spatial resolution when doing simulcast.
      29             : const unsigned int kLowestResMaxQp = 45;
      30             : 
      31           0 : uint32_t SumStreamMaxBitrate(int streams, const webrtc::VideoCodec& codec) {
      32           0 :   uint32_t bitrate_sum = 0;
      33           0 :   for (int i = 0; i < streams; ++i) {
      34           0 :     bitrate_sum += codec.simulcastStream[i].maxBitrate;
      35             :   }
      36           0 :   return bitrate_sum;
      37             : }
      38             : 
      39           0 : int NumberOfStreams(const webrtc::VideoCodec& codec) {
      40             :   int streams =
      41           0 :       codec.numberOfSimulcastStreams < 1 ? 1 : codec.numberOfSimulcastStreams;
      42           0 :   uint32_t simulcast_max_bitrate = SumStreamMaxBitrate(streams, codec);
      43           0 :   if (simulcast_max_bitrate == 0) {
      44           0 :     streams = 1;
      45             :   }
      46           0 :   return streams;
      47             : }
      48             : 
      49           0 : bool ValidSimulcastResolutions(const webrtc::VideoCodec& codec,
      50             :                                int num_streams) {
      51           0 :   if (codec.width != codec.simulcastStream[num_streams - 1].width ||
      52           0 :       codec.height != codec.simulcastStream[num_streams - 1].height) {
      53           0 :     return false;
      54             :   }
      55           0 :   for (int i = 0; i < num_streams; ++i) {
      56           0 :     if (codec.width * codec.simulcastStream[i].height !=
      57           0 :         codec.height * codec.simulcastStream[i].width) {
      58           0 :       return false;
      59             :     }
      60             :   }
      61           0 :   return true;
      62             : }
      63             : 
      64           0 : int VerifyCodec(const webrtc::VideoCodec* inst) {
      65           0 :   if (inst == NULL) {
      66           0 :     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
      67             :   }
      68           0 :   if (inst->maxFramerate < 1) {
      69           0 :     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
      70             :   }
      71             :   // allow zero to represent an unspecified maxBitRate
      72           0 :   if (inst->maxBitrate > 0 && inst->startBitrate > inst->maxBitrate) {
      73           0 :     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
      74             :   }
      75           0 :   if (inst->width <= 1 || inst->height <= 1) {
      76           0 :     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
      77             :   }
      78           0 :   if (inst->VP8().feedbackModeOn && inst->numberOfSimulcastStreams > 1) {
      79           0 :     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
      80             :   }
      81           0 :   if (inst->VP8().automaticResizeOn && inst->numberOfSimulcastStreams > 1) {
      82           0 :     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
      83             :   }
      84           0 :   return WEBRTC_VIDEO_CODEC_OK;
      85             : }
      86             : 
      87             : // An EncodedImageCallback implementation that forwards on calls to a
      88             : // SimulcastEncoderAdapter, but with the stream index it's registered with as
      89             : // the first parameter to Encoded.
      90           0 : class AdapterEncodedImageCallback : public webrtc::EncodedImageCallback {
      91             :  public:
      92           0 :   AdapterEncodedImageCallback(webrtc::SimulcastEncoderAdapter* adapter,
      93             :                               size_t stream_idx)
      94           0 :       : adapter_(adapter), stream_idx_(stream_idx) {}
      95             : 
      96           0 :   EncodedImageCallback::Result OnEncodedImage(
      97             :       const webrtc::EncodedImage& encoded_image,
      98             :       const webrtc::CodecSpecificInfo* codec_specific_info,
      99             :       const webrtc::RTPFragmentationHeader* fragmentation) override {
     100           0 :     return adapter_->OnEncodedImage(stream_idx_, encoded_image,
     101           0 :                                     codec_specific_info, fragmentation);
     102             :   }
     103             : 
     104             :  private:
     105             :   webrtc::SimulcastEncoderAdapter* const adapter_;
     106             :   const size_t stream_idx_;
     107             : };
     108             : 
     109             : // Utility class used to adapt the simulcast id as reported by the temporal
     110             : // layers factory, since each sub-encoder will report stream 0.
     111             : class TemporalLayersFactoryAdapter : public webrtc::TemporalLayersFactory {
     112             :  public:
     113           0 :   TemporalLayersFactoryAdapter(int adapted_simulcast_id,
     114             :                                const TemporalLayersFactory& tl_factory)
     115           0 :       : adapted_simulcast_id_(adapted_simulcast_id), tl_factory_(tl_factory) {}
     116           0 :   ~TemporalLayersFactoryAdapter() override {}
     117           0 :   webrtc::TemporalLayers* Create(int simulcast_id,
     118             :                                  int temporal_layers,
     119             :                                  uint8_t initial_tl0_pic_idx) const override {
     120           0 :     return tl_factory_.Create(adapted_simulcast_id_, temporal_layers,
     121           0 :                               initial_tl0_pic_idx);
     122             :   }
     123             : 
     124             :   const int adapted_simulcast_id_;
     125             :   const TemporalLayersFactory& tl_factory_;
     126             : };
     127             : 
     128             : }  // namespace
     129             : 
     130             : namespace webrtc {
     131             : 
     132           0 : SimulcastEncoderAdapter::SimulcastEncoderAdapter(VideoEncoderFactory* factory)
     133             :     : factory_(factory),
     134             :       encoded_complete_callback_(nullptr),
     135           0 :       implementation_name_("SimulcastEncoderAdapter") {
     136           0 :   memset(&codec_, 0, sizeof(webrtc::VideoCodec));
     137           0 : }
     138             : 
     139           0 : SimulcastEncoderAdapter::~SimulcastEncoderAdapter() {
     140           0 :   Release();
     141           0 : }
     142             : 
     143           0 : int SimulcastEncoderAdapter::Release() {
     144             :   // TODO(pbos): Keep the last encoder instance but call ::Release() on it, then
     145             :   // re-use this instance in ::InitEncode(). This means that changing
     146             :   // resolutions doesn't require reallocation of the first encoder, but only
     147             :   // reinitialization, which makes sense. Then Destroy this instance instead in
     148             :   // ~SimulcastEncoderAdapter().
     149           0 :   while (!streaminfos_.empty()) {
     150           0 :     VideoEncoder* encoder = streaminfos_.back().encoder;
     151           0 :     EncodedImageCallback* callback = streaminfos_.back().callback;
     152           0 :     factory_->Destroy(encoder);
     153           0 :     delete callback;
     154           0 :     streaminfos_.pop_back();
     155             :   }
     156           0 :   return WEBRTC_VIDEO_CODEC_OK;
     157             : }
     158             : 
     159           0 : int SimulcastEncoderAdapter::InitEncode(const VideoCodec* inst,
     160             :                                         int number_of_cores,
     161             :                                         size_t max_payload_size) {
     162           0 :   if (number_of_cores < 1) {
     163           0 :     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
     164             :   }
     165             : 
     166           0 :   int ret = VerifyCodec(inst);
     167           0 :   if (ret < 0) {
     168           0 :     return ret;
     169             :   }
     170             : 
     171           0 :   ret = Release();
     172           0 :   if (ret < 0) {
     173           0 :     return ret;
     174             :   }
     175             : 
     176           0 :   int number_of_streams = NumberOfStreams(*inst);
     177           0 :   const bool doing_simulcast = (number_of_streams > 1);
     178             : 
     179           0 :   if (doing_simulcast && !ValidSimulcastResolutions(*inst, number_of_streams)) {
     180           0 :     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
     181             :   }
     182             : 
     183           0 :   codec_ = *inst;
     184           0 :   SimulcastRateAllocator rate_allocator(codec_, nullptr);
     185             :   BitrateAllocation allocation = rate_allocator.GetAllocation(
     186           0 :       codec_.startBitrate * 1000, codec_.maxFramerate);
     187           0 :   std::vector<uint32_t> start_bitrates;
     188           0 :   for (int i = 0; i < kMaxSimulcastStreams; ++i) {
     189           0 :     uint32_t stream_bitrate = allocation.GetSpatialLayerSum(i) / 1000;
     190           0 :     start_bitrates.push_back(stream_bitrate);
     191             :   }
     192             : 
     193           0 :   std::string implementation_name;
     194             :   // Create |number_of_streams| of encoder instances and init them.
     195           0 :   for (int i = 0; i < number_of_streams; ++i) {
     196           0 :     VideoCodec stream_codec;
     197           0 :     uint32_t start_bitrate_kbps = start_bitrates[i];
     198           0 :     if (!doing_simulcast) {
     199           0 :       stream_codec = codec_;
     200           0 :       stream_codec.numberOfSimulcastStreams = 1;
     201             :     } else {
     202             :       // Cap start bitrate to the min bitrate in order to avoid strange codec
     203             :       // behavior. Since sending sending will be false, this should not matter.
     204           0 :       start_bitrate_kbps =
     205           0 :           std::max(codec_.simulcastStream[i].minBitrate, start_bitrate_kbps);
     206           0 :       bool highest_resolution_stream = (i == (number_of_streams - 1));
     207           0 :       PopulateStreamCodec(&codec_, i, start_bitrate_kbps,
     208           0 :                           highest_resolution_stream, &stream_codec);
     209             :     }
     210             :     TemporalLayersFactoryAdapter tl_factory_adapter(i,
     211           0 :                                                     *codec_.VP8()->tl_factory);
     212           0 :     stream_codec.VP8()->tl_factory = &tl_factory_adapter;
     213             : 
     214             :     // TODO(ronghuawu): Remove once this is handled in VP8EncoderImpl.
     215           0 :     if (stream_codec.qpMax < kDefaultMinQp) {
     216           0 :       stream_codec.qpMax = kDefaultMaxQp;
     217             :     }
     218             : 
     219           0 :     VideoEncoder* encoder = factory_->Create();
     220           0 :     ret = encoder->InitEncode(&stream_codec, number_of_cores, max_payload_size);
     221           0 :     if (ret < 0) {
     222             :       // Explicitly destroy the current encoder; because we haven't registered a
     223             :       // StreamInfo for it yet, Release won't do anything about it.
     224           0 :       factory_->Destroy(encoder);
     225           0 :       Release();
     226           0 :       return ret;
     227             :     }
     228           0 :     EncodedImageCallback* callback = new AdapterEncodedImageCallback(this, i);
     229           0 :     encoder->RegisterEncodeCompleteCallback(callback);
     230           0 :     streaminfos_.push_back(StreamInfo(encoder, callback, stream_codec.width,
     231           0 :                                       stream_codec.height,
     232           0 :                                       start_bitrate_kbps > 0));
     233           0 :     if (i != 0)
     234           0 :       implementation_name += ", ";
     235           0 :     implementation_name += streaminfos_[i].encoder->ImplementationName();
     236             :   }
     237           0 :   if (doing_simulcast) {
     238             :     implementation_name_ =
     239           0 :         "SimulcastEncoderAdapter (" + implementation_name + ")";
     240             :   } else {
     241           0 :     implementation_name_ = implementation_name;
     242             :   }
     243           0 :   return WEBRTC_VIDEO_CODEC_OK;
     244             : }
     245             : 
     246           0 : int SimulcastEncoderAdapter::Encode(
     247             :     const VideoFrame& input_image,
     248             :     const CodecSpecificInfo* codec_specific_info,
     249             :     const std::vector<FrameType>* frame_types) {
     250           0 :   if (!Initialized()) {
     251           0 :     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
     252             :   }
     253           0 :   if (encoded_complete_callback_ == NULL) {
     254           0 :     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
     255             :   }
     256             : 
     257             :   // All active streams should generate a key frame if
     258             :   // a key frame is requested by any stream.
     259           0 :   bool send_key_frame = false;
     260           0 :   if (frame_types) {
     261           0 :     for (size_t i = 0; i < frame_types->size(); ++i) {
     262           0 :       if (frame_types->at(i) == kVideoFrameKey) {
     263           0 :         send_key_frame = true;
     264           0 :         break;
     265             :       }
     266             :     }
     267             :   }
     268           0 :   for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) {
     269           0 :     if (streaminfos_[stream_idx].key_frame_request &&
     270           0 :         streaminfos_[stream_idx].send_stream) {
     271           0 :       send_key_frame = true;
     272           0 :       break;
     273             :     }
     274             :   }
     275             : 
     276           0 :   int src_width = input_image.width();
     277           0 :   int src_height = input_image.height();
     278           0 :   for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) {
     279             :     // Don't encode frames in resolutions that we don't intend to send.
     280           0 :     if (!streaminfos_[stream_idx].send_stream)
     281           0 :       continue;
     282             : 
     283           0 :     std::vector<FrameType> stream_frame_types;
     284           0 :     if (send_key_frame) {
     285           0 :       stream_frame_types.push_back(kVideoFrameKey);
     286           0 :       streaminfos_[stream_idx].key_frame_request = false;
     287             :     } else {
     288           0 :       stream_frame_types.push_back(kVideoFrameDelta);
     289             :     }
     290             : 
     291           0 :     int dst_width = streaminfos_[stream_idx].width;
     292           0 :     int dst_height = streaminfos_[stream_idx].height;
     293             :     // If scaling isn't required, because the input resolution
     294             :     // matches the destination or the input image is empty (e.g.
     295             :     // a keyframe request for encoders with internal camera
     296             :     // sources) or the source image has a native handle, pass the image on
     297             :     // directly. Otherwise, we'll scale it to match what the encoder expects
     298             :     // (below).
     299             :     // For texture frames, the underlying encoder is expected to be able to
     300             :     // correctly sample/scale the source texture.
     301             :     // TODO(perkj): ensure that works going forward, and figure out how this
     302             :     // affects webrtc:5683.
     303           0 :     if ((dst_width == src_width && dst_height == src_height) ||
     304           0 :         input_image.video_frame_buffer()->native_handle()) {
     305           0 :       int ret = streaminfos_[stream_idx].encoder->Encode(
     306           0 :           input_image, codec_specific_info, &stream_frame_types);
     307           0 :       if (ret != WEBRTC_VIDEO_CODEC_OK) {
     308           0 :         return ret;
     309             :       }
     310             :     } else {
     311             :       // Aligning stride values based on width.
     312             :       rtc::scoped_refptr<I420Buffer> dst_buffer =
     313             :           I420Buffer::Create(dst_width, dst_height, dst_width,
     314           0 :                              (dst_width + 1) / 2, (dst_width + 1) / 2);
     315           0 :       libyuv::I420Scale(input_image.video_frame_buffer()->DataY(),
     316           0 :                         input_image.video_frame_buffer()->StrideY(),
     317           0 :                         input_image.video_frame_buffer()->DataU(),
     318           0 :                         input_image.video_frame_buffer()->StrideU(),
     319           0 :                         input_image.video_frame_buffer()->DataV(),
     320           0 :                         input_image.video_frame_buffer()->StrideV(),
     321             :                         src_width, src_height,
     322           0 :                         dst_buffer->MutableDataY(), dst_buffer->StrideY(),
     323           0 :                         dst_buffer->MutableDataU(), dst_buffer->StrideU(),
     324           0 :                         dst_buffer->MutableDataV(), dst_buffer->StrideV(),
     325             :                         dst_width, dst_height,
     326           0 :                         libyuv::kFilterBilinear);
     327             : 
     328           0 :       int ret = streaminfos_[stream_idx].encoder->Encode(
     329           0 :           VideoFrame(dst_buffer, input_image.timestamp(),
     330             :                      input_image.render_time_ms(), webrtc::kVideoRotation_0),
     331           0 :           codec_specific_info, &stream_frame_types);
     332           0 :       if (ret != WEBRTC_VIDEO_CODEC_OK) {
     333           0 :         return ret;
     334             :       }
     335             :     }
     336             :   }
     337             : 
     338           0 :   return WEBRTC_VIDEO_CODEC_OK;
     339             : }
     340             : 
     341           0 : int SimulcastEncoderAdapter::RegisterEncodeCompleteCallback(
     342             :     EncodedImageCallback* callback) {
     343           0 :   encoded_complete_callback_ = callback;
     344           0 :   return WEBRTC_VIDEO_CODEC_OK;
     345             : }
     346             : 
     347           0 : int SimulcastEncoderAdapter::SetChannelParameters(uint32_t packet_loss,
     348             :                                                   int64_t rtt) {
     349           0 :   for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) {
     350           0 :     streaminfos_[stream_idx].encoder->SetChannelParameters(packet_loss, rtt);
     351             :   }
     352           0 :   return WEBRTC_VIDEO_CODEC_OK;
     353             : }
     354             : 
     355           0 : int SimulcastEncoderAdapter::SetRateAllocation(const BitrateAllocation& bitrate,
     356             :                                                uint32_t new_framerate) {
     357           0 :   if (!Initialized())
     358           0 :     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
     359             : 
     360           0 :   if (new_framerate < 1)
     361           0 :     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
     362             : 
     363           0 :   if (codec_.maxBitrate > 0 && bitrate.get_sum_kbps() > codec_.maxBitrate)
     364           0 :     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
     365             : 
     366           0 :   if (bitrate.get_sum_bps() > 0) {
     367             :     // Make sure the bitrate fits the configured min bitrates. 0 is a special
     368             :     // value that means paused, though, so leave it alone.
     369           0 :     if (bitrate.get_sum_kbps() < codec_.minBitrate)
     370           0 :       return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
     371             : 
     372           0 :     if (codec_.numberOfSimulcastStreams > 0 &&
     373           0 :         bitrate.get_sum_kbps() < codec_.simulcastStream[0].minBitrate) {
     374           0 :       return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
     375             :     }
     376             :   }
     377             : 
     378           0 :   codec_.maxFramerate = new_framerate;
     379             : 
     380           0 :   for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) {
     381             :     uint32_t stream_bitrate_kbps =
     382           0 :         bitrate.GetSpatialLayerSum(stream_idx) / 1000;
     383             : 
     384             :     // Need a key frame if we have not sent this stream before.
     385           0 :     if (stream_bitrate_kbps > 0 && !streaminfos_[stream_idx].send_stream) {
     386           0 :       streaminfos_[stream_idx].key_frame_request = true;
     387             :     }
     388           0 :     streaminfos_[stream_idx].send_stream = stream_bitrate_kbps > 0;
     389             : 
     390             :     // Slice the temporal layers out of the full allocation and pass it on to
     391             :     // the encoder handling the current simulcast stream.
     392           0 :     BitrateAllocation stream_allocation;
     393           0 :     for (int i = 0; i < kMaxTemporalStreams; ++i)
     394           0 :       stream_allocation.SetBitrate(0, i, bitrate.GetBitrate(stream_idx, i));
     395           0 :     streaminfos_[stream_idx].encoder->SetRateAllocation(stream_allocation,
     396           0 :                                                         new_framerate);
     397             :   }
     398             : 
     399           0 :   return WEBRTC_VIDEO_CODEC_OK;
     400             : }
     401             : 
     402           0 : EncodedImageCallback::Result SimulcastEncoderAdapter::OnEncodedImage(
     403             :     size_t stream_idx,
     404             :     const EncodedImage& encodedImage,
     405             :     const CodecSpecificInfo* codecSpecificInfo,
     406             :     const RTPFragmentationHeader* fragmentation) {
     407           0 :   CodecSpecificInfo stream_codec_specific = *codecSpecificInfo;
     408           0 :   stream_codec_specific.codec_name = implementation_name_.c_str();
     409           0 :   CodecSpecificInfoVP8* vp8Info = &(stream_codec_specific.codecSpecific.VP8);
     410           0 :   vp8Info->simulcastIdx = stream_idx;
     411             : 
     412           0 :   return encoded_complete_callback_->OnEncodedImage(
     413           0 :       encodedImage, &stream_codec_specific, fragmentation);
     414             : }
     415             : 
     416           0 : void SimulcastEncoderAdapter::PopulateStreamCodec(
     417             :     const webrtc::VideoCodec* inst,
     418             :     int stream_index,
     419             :     uint32_t start_bitrate_kbps,
     420             :     bool highest_resolution_stream,
     421             :     webrtc::VideoCodec* stream_codec) {
     422           0 :   *stream_codec = *inst;
     423             : 
     424             :   // Stream specific settings.
     425           0 :   stream_codec->VP8()->numberOfTemporalLayers =
     426           0 :       inst->simulcastStream[stream_index].numberOfTemporalLayers;
     427           0 :   stream_codec->numberOfSimulcastStreams = 0;
     428           0 :   stream_codec->width = inst->simulcastStream[stream_index].width;
     429           0 :   stream_codec->height = inst->simulcastStream[stream_index].height;
     430           0 :   stream_codec->maxBitrate = inst->simulcastStream[stream_index].maxBitrate;
     431           0 :   stream_codec->minBitrate = inst->simulcastStream[stream_index].minBitrate;
     432           0 :   stream_codec->qpMax = inst->simulcastStream[stream_index].qpMax;
     433             :   // Settings that are based on stream/resolution.
     434           0 :   if (stream_index == 0) {
     435             :     // Settings for lowest spatial resolutions.
     436           0 :     stream_codec->qpMax = kLowestResMaxQp;
     437             :   }
     438           0 :   if (!highest_resolution_stream) {
     439             :     // For resolutions below CIF, set the codec |complexity| parameter to
     440             :     // kComplexityHigher, which maps to cpu_used = -4.
     441           0 :     int pixels_per_frame = stream_codec->width * stream_codec->height;
     442           0 :     if (pixels_per_frame < 352 * 288) {
     443           0 :       stream_codec->VP8()->complexity = webrtc::kComplexityHigher;
     444             :     }
     445             :     // Turn off denoising for all streams but the highest resolution.
     446           0 :     stream_codec->VP8()->denoisingOn = false;
     447             :   }
     448             :   // TODO(ronghuawu): what to do with targetBitrate.
     449             : 
     450           0 :   stream_codec->startBitrate = start_bitrate_kbps;
     451           0 : }
     452             : 
     453           0 : bool SimulcastEncoderAdapter::Initialized() const {
     454           0 :   return !streaminfos_.empty();
     455             : }
     456             : 
     457           0 : bool SimulcastEncoderAdapter::SupportsNativeHandle() const {
     458             :   // We should not be calling this method before streaminfos_ are configured.
     459           0 :   RTC_DCHECK(!streaminfos_.empty());
     460           0 :   for (const auto& streaminfo : streaminfos_) {
     461           0 :     if (!streaminfo.encoder->SupportsNativeHandle())
     462           0 :       return false;
     463             :   }
     464           0 :   return true;
     465             : }
     466             : 
     467           0 : VideoEncoder::ScalingSettings SimulcastEncoderAdapter::GetScalingSettings()
     468             :     const {
     469             :   // Turn off quality scaling for simulcast.
     470           0 :   if (!Initialized() || NumberOfStreams(codec_) != 1)
     471           0 :     return VideoEncoder::ScalingSettings(false);
     472           0 :   return streaminfos_[0].encoder->GetScalingSettings();
     473             : }
     474             : 
     475           0 : const char* SimulcastEncoderAdapter::ImplementationName() const {
     476           0 :   return implementation_name_.c_str();
     477             : }
     478             : 
     479             : }  // namespace webrtc

Generated by: LCOV version 1.13