LCOV - code coverage report
Current view: top level - media/webrtc/trunk/webrtc/modules/video_coding - jitter_buffer.cc (source / functions) Hit Total Coverage
Test: output.info Lines: 0 735 0.0 %
Date: 2017-07-14 16:53:18 Functions: 0 69 0.0 %
Legend: Lines: hit not hit

          Line data    Source code
       1             : /*
       2             :  *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
       3             :  *
       4             :  *  Use of this source code is governed by a BSD-style license
       5             :  *  that can be found in the LICENSE file in the root of the source
       6             :  *  tree. An additional intellectual property rights grant can be found
       7             :  *  in the file PATENTS.  All contributing project authors may
       8             :  *  be found in the AUTHORS file in the root of the source tree.
       9             :  */
      10             : #include "webrtc/modules/video_coding/jitter_buffer.h"
      11             : 
      12             : #include <assert.h>
      13             : 
      14             : #include <algorithm>
      15             : #include <limits>
      16             : #include <utility>
      17             : 
      18             : #include "webrtc/base/checks.h"
      19             : #include "webrtc/base/logging.h"
      20             : #include "webrtc/base/trace_event.h"
      21             : #include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
      22             : #include "webrtc/modules/video_coding/include/video_coding.h"
      23             : #include "webrtc/modules/video_coding/frame_buffer.h"
      24             : #include "webrtc/modules/video_coding/inter_frame_delay.h"
      25             : #include "webrtc/modules/video_coding/internal_defines.h"
      26             : #include "webrtc/modules/video_coding/jitter_buffer_common.h"
      27             : #include "webrtc/modules/video_coding/jitter_estimator.h"
      28             : #include "webrtc/modules/video_coding/packet.h"
      29             : #include "webrtc/system_wrappers/include/clock.h"
      30             : #include "webrtc/system_wrappers/include/critical_section_wrapper.h"
      31             : #include "webrtc/system_wrappers/include/event_wrapper.h"
      32             : #include "webrtc/system_wrappers/include/field_trial.h"
      33             : #include "webrtc/system_wrappers/include/metrics.h"
      34             : 
      35             : namespace webrtc {
      36             : // Interval for updating SS data.
      37             : static const uint32_t kSsCleanupIntervalSec = 60;
      38             : 
      39             : // Use this rtt if no value has been reported.
      40             : static const int64_t kDefaultRtt = 200;
      41             : 
      42             : // Request a keyframe if no continuous frame has been received for this
      43             : // number of milliseconds and NACKs are disabled.
      44             : static const int64_t kMaxDiscontinuousFramesTime = 1000;
      45             : 
      46             : typedef std::pair<uint32_t, VCMFrameBuffer*> FrameListPair;
      47             : 
      48           0 : bool IsKeyFrame(FrameListPair pair) {
      49           0 :   return pair.second->FrameType() == kVideoFrameKey;
      50             : }
      51             : 
      52           0 : bool HasNonEmptyState(FrameListPair pair) {
      53           0 :   return pair.second->GetState() != kStateEmpty;
      54             : }
      55             : 
      56           0 : void FrameList::InsertFrame(VCMFrameBuffer* frame) {
      57           0 :   insert(rbegin().base(), FrameListPair(frame->TimeStamp(), frame));
      58           0 : }
      59             : 
      60             : // Find a Frame which (may) include seq_num
      61             : // Note: if we don't have an end for the frame yet AND there are multiple Frames
      62             : // with the same timestamp being input, in theory you can get packets
      63             : // for a later Frame mixed with an earlier one where there's a reordering.
      64             : // e.g. for <frame 1: 1 2 3> <frame 2: 4 5 6> and we receive
      65             : //          1 2 4 3 5 6
      66             : // or       4 1 2 3 5 6
      67             : // we'll return <frame 1> for packet 4, and at some point it needs to move to
      68             : // <frame 2>.  You can't key off isFirstPacket or kNaluStart because the OOO packet
      69             : // may be 5:
      70             : //          1 5 2 3 4 6
      71             : //          1 5 3 4 2 6 etc
      72             : 
      73             : // This can be done by re-characterizing 4 when <frame 1> becomes complete
      74             : // and we find it doesn't include 4.  Perhaps a better abstraction would be
      75             : // to keep the packets in a single sorted list (per timestamp or not,
      76             : // doesn't really matter), and then on insertion look to see if it's in a
      77             : // complete unit (kNaluComplete or kNaluStart ... kNaluEnd sequence), and
      78             : // remove the set *then*.
      79             : //
      80             : // If we instead limit multiple frames with the same timestamp to
      81             : // kNaluComplete (single-packet) frames (i.e. Mode 0 H264), it's simpler.
      82             : // You do need to be careful to pull off Frames only if they're contiguous
      83             : // in sequence number to the previous frame, but that's normal since you
      84             : // can get 4 5 6 1 2 3
      85             : //
      86             : // Note that you have to be careful reordering still:
      87             : // <frame 1: 1> <frame 2: 2 3 4>
      88             : // and arrival 2 1 3 4
      89             : // means you must not match the frame created for 2 when 1 comes in
      90             : 
      91             : // XXX This is NOT implemented here; we need to redo this
      92             : 
      93           0 : VCMFrameBuffer* FrameList::PopFrame(uint32_t timestamp) {
      94           0 :   FrameList::iterator it = find(timestamp);
      95           0 :   if (it == end())
      96           0 :     return NULL;
      97           0 :   VCMFrameBuffer* frame = it->second;
      98           0 :   erase(it);
      99           0 :   return frame;
     100             : }
     101             : 
     102           0 : VCMFrameBuffer* FrameList::Front() const {
     103           0 :   return begin()->second;
     104             : }
     105             : 
     106           0 : VCMFrameBuffer* FrameList::Back() const {
     107           0 :   return rbegin()->second;
     108             : }
     109             : 
     110           0 : int FrameList::RecycleFramesUntilKeyFrame(FrameList::iterator* key_frame_it,
     111             :                                           UnorderedFrameList* free_frames) {
     112           0 :   int drop_count = 0;
     113           0 :   FrameList::iterator it = begin();
     114           0 :   while (!empty()) {
     115             :     // Throw at least one frame.
     116           0 :     it->second->Reset();
     117           0 :     free_frames->push_back(it->second);
     118           0 :     erase(it++);
     119           0 :     ++drop_count;
     120           0 :     if (it != end() && it->second->FrameType() == kVideoFrameKey) {
     121           0 :       *key_frame_it = it;
     122           0 :       return drop_count;
     123             :     }
     124             :   }
     125           0 :   *key_frame_it = end();
     126           0 :   return drop_count;
     127             : }
     128             : 
     129           0 : void FrameList::CleanUpOldOrEmptyFrames(VCMDecodingState* decoding_state,
     130             :                                         UnorderedFrameList* free_frames) {
     131           0 :   while (!empty()) {
     132           0 :     VCMFrameBuffer* oldest_frame = Front();
     133           0 :     bool remove_frame = false;
     134           0 :     if (oldest_frame->GetState() == kStateEmpty && size() > 1) {
     135             :       // This frame is empty, try to update the last decoded state and drop it
     136             :       // if successful.
     137           0 :       remove_frame = decoding_state->UpdateEmptyFrame(oldest_frame);
     138             :     } else {
     139           0 :       remove_frame = decoding_state->IsOldFrame(oldest_frame);
     140             :     }
     141           0 :     if (!remove_frame) {
     142           0 :       break;
     143             :     }
     144           0 :     free_frames->push_back(oldest_frame);
     145           0 :     TRACE_EVENT_INSTANT1("webrtc", "JB::OldOrEmptyFrameDropped", "timestamp",
     146             :                          oldest_frame->TimeStamp());
     147           0 :     erase(begin());
     148             :   }
     149           0 : }
     150             : 
     151           0 : void FrameList::Reset(UnorderedFrameList* free_frames) {
     152           0 :   while (!empty()) {
     153           0 :     begin()->second->Reset();
     154           0 :     free_frames->push_back(begin()->second);
     155           0 :     erase(begin());
     156             :   }
     157           0 : }
     158             : 
     159           0 : bool Vp9SsMap::Insert(const VCMPacket& packet) {
     160           0 :   if (!packet.video_header.codecHeader.VP9.ss_data_available)
     161           0 :     return false;
     162             : 
     163           0 :   ss_map_[packet.timestamp] = packet.video_header.codecHeader.VP9.gof;
     164           0 :   return true;
     165             : }
     166             : 
     167           0 : void Vp9SsMap::Reset() {
     168           0 :   ss_map_.clear();
     169           0 : }
     170             : 
     171           0 : bool Vp9SsMap::Find(uint32_t timestamp, SsMap::iterator* it_out) {
     172           0 :   bool found = false;
     173           0 :   for (SsMap::iterator it = ss_map_.begin(); it != ss_map_.end(); ++it) {
     174           0 :     if (it->first == timestamp || IsNewerTimestamp(timestamp, it->first)) {
     175           0 :       *it_out = it;
     176           0 :       found = true;
     177             :     }
     178             :   }
     179           0 :   return found;
     180             : }
     181             : 
     182           0 : void Vp9SsMap::RemoveOld(uint32_t timestamp) {
     183           0 :   if (!TimeForCleanup(timestamp))
     184           0 :     return;
     185             : 
     186           0 :   SsMap::iterator it;
     187           0 :   if (!Find(timestamp, &it))
     188           0 :     return;
     189             : 
     190           0 :   ss_map_.erase(ss_map_.begin(), it);
     191           0 :   AdvanceFront(timestamp);
     192             : }
     193             : 
     194           0 : bool Vp9SsMap::TimeForCleanup(uint32_t timestamp) const {
     195           0 :   if (ss_map_.empty() || !IsNewerTimestamp(timestamp, ss_map_.begin()->first))
     196           0 :     return false;
     197             : 
     198           0 :   uint32_t diff = timestamp - ss_map_.begin()->first;
     199           0 :   return diff / kVideoPayloadTypeFrequency >= kSsCleanupIntervalSec;
     200             : }
     201             : 
     202           0 : void Vp9SsMap::AdvanceFront(uint32_t timestamp) {
     203           0 :   RTC_DCHECK(!ss_map_.empty());
     204           0 :   GofInfoVP9 gof = ss_map_.begin()->second;
     205           0 :   ss_map_.erase(ss_map_.begin());
     206           0 :   ss_map_[timestamp] = gof;
     207           0 : }
     208             : 
     209             : // TODO(asapersson): Update according to updates in RTP payload profile.
     210           0 : bool Vp9SsMap::UpdatePacket(VCMPacket* packet) {
     211           0 :   uint8_t gof_idx = packet->video_header.codecHeader.VP9.gof_idx;
     212           0 :   if (gof_idx == kNoGofIdx)
     213           0 :     return false;  // No update needed.
     214             : 
     215           0 :   SsMap::iterator it;
     216           0 :   if (!Find(packet->timestamp, &it))
     217           0 :     return false;  // Corresponding SS not yet received.
     218             : 
     219           0 :   if (gof_idx >= it->second.num_frames_in_gof)
     220           0 :     return false;  // Assume corresponding SS not yet received.
     221             : 
     222           0 :   RTPVideoHeaderVP9* vp9 = &packet->video_header.codecHeader.VP9;
     223           0 :   vp9->temporal_idx = it->second.temporal_idx[gof_idx];
     224           0 :   vp9->temporal_up_switch = it->second.temporal_up_switch[gof_idx];
     225             : 
     226             :   // TODO(asapersson): Set vp9.ref_picture_id[i] and add usage.
     227           0 :   vp9->num_ref_pics = it->second.num_ref_pics[gof_idx];
     228           0 :   for (uint8_t i = 0; i < it->second.num_ref_pics[gof_idx]; ++i) {
     229           0 :     vp9->pid_diff[i] = it->second.pid_diff[gof_idx][i];
     230             :   }
     231           0 :   return true;
     232             : }
     233             : 
     234           0 : void Vp9SsMap::UpdateFrames(FrameList* frames) {
     235           0 :   for (const auto& frame_it : *frames) {
     236             :     uint8_t gof_idx =
     237           0 :         frame_it.second->CodecSpecific()->codecSpecific.VP9.gof_idx;
     238           0 :     if (gof_idx == kNoGofIdx) {
     239           0 :       continue;
     240             :     }
     241           0 :     SsMap::iterator ss_it;
     242           0 :     if (Find(frame_it.second->TimeStamp(), &ss_it)) {
     243           0 :       if (gof_idx >= ss_it->second.num_frames_in_gof) {
     244           0 :         continue;  // Assume corresponding SS not yet received.
     245             :       }
     246           0 :       frame_it.second->SetGofInfo(ss_it->second, gof_idx);
     247             :     }
     248             :   }
     249           0 : }
     250             : 
     251           0 : VCMJitterBuffer::VCMJitterBuffer(Clock* clock,
     252             :                                  std::unique_ptr<EventWrapper> event,
     253             :                                  NackSender* nack_sender,
     254           0 :                                  KeyFrameRequestSender* keyframe_request_sender)
     255             :     : clock_(clock),
     256             :       running_(false),
     257           0 :       crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
     258           0 :       frame_event_(std::move(event)),
     259             :       max_number_of_frames_(kStartNumberOfFrames),
     260             :       free_frames_(),
     261             :       decodable_frames_(),
     262             :       incomplete_frames_(),
     263             :       last_decoded_state_(),
     264             :       first_packet_since_reset_(true),
     265             :       stats_callback_(nullptr),
     266             :       incoming_frame_rate_(0),
     267             :       incoming_frame_count_(0),
     268             :       time_last_incoming_frame_count_(0),
     269             :       incoming_bit_count_(0),
     270             :       incoming_bit_rate_(0),
     271             :       num_consecutive_old_packets_(0),
     272             :       num_packets_(0),
     273             :       num_duplicated_packets_(0),
     274             :       num_discarded_packets_(0),
     275             :       time_first_packet_ms_(0),
     276             :       jitter_estimate_(clock),
     277           0 :       inter_frame_delay_(clock_->TimeInMilliseconds()),
     278             :       rtt_ms_(kDefaultRtt),
     279             :       nack_mode_(kNoNack),
     280             :       low_rtt_nack_threshold_ms_(-1),
     281             :       high_rtt_nack_threshold_ms_(-1),
     282           0 :       missing_sequence_numbers_(SequenceNumberLessThan()),
     283             :       latest_received_sequence_number_(0),
     284             :       max_nack_list_size_(0),
     285             :       max_packet_age_to_nack_(0),
     286             :       max_incomplete_time_ms_(0),
     287             :       decode_error_mode_(kNoErrors),
     288             :       average_packets_per_frame_(0.0f),
     289           0 :       frame_counter_(0) {
     290           0 :   for (int i = 0; i < kStartNumberOfFrames; i++)
     291           0 :     free_frames_.push_back(new VCMFrameBuffer());
     292           0 : }
     293             : 
     294           0 : VCMJitterBuffer::~VCMJitterBuffer() {
     295           0 :   Stop();
     296           0 :   for (UnorderedFrameList::iterator it = free_frames_.begin();
     297           0 :        it != free_frames_.end(); ++it) {
     298           0 :     delete *it;
     299             :   }
     300           0 :   for (FrameList::iterator it = incomplete_frames_.begin();
     301           0 :        it != incomplete_frames_.end(); ++it) {
     302           0 :     delete it->second;
     303             :   }
     304           0 :   for (FrameList::iterator it = decodable_frames_.begin();
     305           0 :        it != decodable_frames_.end(); ++it) {
     306           0 :     delete it->second;
     307             :   }
     308           0 :   delete crit_sect_;
     309           0 : }
     310             : 
     311           0 : void VCMJitterBuffer::UpdateHistograms() {
     312           0 :   if (num_packets_ <= 0 || !running_) {
     313           0 :     return;
     314             :   }
     315             :   int64_t elapsed_sec =
     316           0 :       (clock_->TimeInMilliseconds() - time_first_packet_ms_) / 1000;
     317           0 :   if (elapsed_sec < metrics::kMinRunTimeInSeconds) {
     318           0 :     return;
     319             :   }
     320             : 
     321           0 :   RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.DiscardedPacketsInPercent",
     322             :                            num_discarded_packets_ * 100 / num_packets_);
     323           0 :   RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.DuplicatedPacketsInPercent",
     324             :                            num_duplicated_packets_ * 100 / num_packets_);
     325             : 
     326             :   int total_frames =
     327           0 :       receive_statistics_.key_frames + receive_statistics_.delta_frames;
     328           0 :   if (total_frames > 0) {
     329           0 :     RTC_HISTOGRAM_COUNTS_100(
     330             :         "WebRTC.Video.CompleteFramesReceivedPerSecond",
     331             :         static_cast<int>((total_frames / elapsed_sec) + 0.5f));
     332           0 :     RTC_HISTOGRAM_COUNTS_1000(
     333             :         "WebRTC.Video.KeyFramesReceivedInPermille",
     334             :         static_cast<int>(
     335             :             (receive_statistics_.key_frames * 1000.0f / total_frames) + 0.5f));
     336             :   }
     337             : }
     338             : 
     339           0 : void VCMJitterBuffer::Start() {
     340           0 :   CriticalSectionScoped cs(crit_sect_);
     341           0 :   running_ = true;
     342           0 :   incoming_frame_count_ = 0;
     343           0 :   incoming_frame_rate_ = 0;
     344           0 :   incoming_bit_count_ = 0;
     345           0 :   incoming_bit_rate_ = 0;
     346           0 :   time_last_incoming_frame_count_ = clock_->TimeInMilliseconds();
     347           0 :   receive_statistics_ = FrameCounts();
     348             : 
     349           0 :   num_consecutive_old_packets_ = 0;
     350           0 :   num_packets_ = 0;
     351           0 :   num_duplicated_packets_ = 0;
     352           0 :   num_discarded_packets_ = 0;
     353           0 :   time_first_packet_ms_ = 0;
     354             : 
     355             :   // Start in a non-signaled state.
     356           0 :   waiting_for_completion_.frame_size = 0;
     357           0 :   waiting_for_completion_.timestamp = 0;
     358           0 :   waiting_for_completion_.latest_packet_time = -1;
     359           0 :   first_packet_since_reset_ = true;
     360           0 :   rtt_ms_ = kDefaultRtt;
     361           0 :   last_decoded_state_.Reset();
     362             : 
     363           0 :   decodable_frames_.Reset(&free_frames_);
     364           0 :   incomplete_frames_.Reset(&free_frames_);
     365           0 : }
     366             : 
     367           0 : void VCMJitterBuffer::Stop() {
     368           0 :   CriticalSectionScoped cs(crit_sect_);
     369           0 :   UpdateHistograms();
     370           0 :   running_ = false;
     371           0 :   last_decoded_state_.Reset();
     372             : 
     373             :   // Make sure we wake up any threads waiting on these events.
     374           0 :   frame_event_->Set();
     375           0 : }
     376             : 
     377           0 : bool VCMJitterBuffer::Running() const {
     378           0 :   CriticalSectionScoped cs(crit_sect_);
     379           0 :   return running_;
     380             : }
     381             : 
     382           0 : void VCMJitterBuffer::Flush() {
     383           0 :   CriticalSectionScoped cs(crit_sect_);
     384           0 :   decodable_frames_.Reset(&free_frames_);
     385           0 :   incomplete_frames_.Reset(&free_frames_);
     386           0 :   last_decoded_state_.Reset();  // TODO(mikhal): sync reset.
     387           0 :   num_consecutive_old_packets_ = 0;
     388             :   // Also reset the jitter and delay estimates
     389           0 :   jitter_estimate_.Reset();
     390           0 :   inter_frame_delay_.Reset(clock_->TimeInMilliseconds());
     391           0 :   waiting_for_completion_.frame_size = 0;
     392           0 :   waiting_for_completion_.timestamp = 0;
     393           0 :   waiting_for_completion_.latest_packet_time = -1;
     394           0 :   first_packet_since_reset_ = true;
     395           0 :   missing_sequence_numbers_.clear();
     396           0 : }
     397             : 
     398             : // Get received key and delta frames
     399           0 : FrameCounts VCMJitterBuffer::FrameStatistics() const {
     400           0 :   CriticalSectionScoped cs(crit_sect_);
     401           0 :   return receive_statistics_;
     402             : }
     403             : 
     404           0 : int VCMJitterBuffer::num_packets() const {
     405           0 :   CriticalSectionScoped cs(crit_sect_);
     406           0 :   return num_packets_;
     407             : }
     408             : 
     409           0 : int VCMJitterBuffer::num_duplicated_packets() const {
     410           0 :   CriticalSectionScoped cs(crit_sect_);
     411           0 :   return num_duplicated_packets_;
     412             : }
     413             : 
     414           0 : int VCMJitterBuffer::num_discarded_packets() const {
     415           0 :   CriticalSectionScoped cs(crit_sect_);
     416           0 :   return num_discarded_packets_;
     417             : }
     418             : 
     419             : // Calculate framerate and bitrate.
     420           0 : void VCMJitterBuffer::IncomingRateStatistics(unsigned int* framerate,
     421             :                                              unsigned int* bitrate) {
     422           0 :   assert(framerate);
     423           0 :   assert(bitrate);
     424           0 :   CriticalSectionScoped cs(crit_sect_);
     425           0 :   const int64_t now = clock_->TimeInMilliseconds();
     426           0 :   int64_t diff = now - time_last_incoming_frame_count_;
     427           0 :   if (diff < 1000 && incoming_frame_rate_ > 0 && incoming_bit_rate_ > 0) {
     428             :     // Make sure we report something even though less than
     429             :     // 1 second has passed since last update.
     430           0 :     *framerate = incoming_frame_rate_;
     431           0 :     *bitrate = incoming_bit_rate_;
     432           0 :   } else if (incoming_frame_count_ != 0) {
     433             :     // We have received frame(s) since last call to this function
     434             : 
     435             :     // Prepare calculations
     436           0 :     if (diff <= 0) {
     437           0 :       diff = 1;
     438             :     }
     439             :     // we add 0.5f for rounding
     440           0 :     float rate = 0.5f + ((incoming_frame_count_ * 1000.0f) / diff);
     441           0 :     if (rate < 1.0f) {
     442           0 :       rate = 1.0f;
     443             :     }
     444             : 
     445             :     // Calculate frame rate
     446             :     // Let r be rate.
     447             :     // r(0) = 1000*framecount/delta_time.
     448             :     // (I.e. frames per second since last calculation.)
     449             :     // frame_rate = r(0)/2 + r(-1)/2
     450             :     // (I.e. fr/s average this and the previous calculation.)
     451           0 :     *framerate = (incoming_frame_rate_ + static_cast<unsigned int>(rate)) / 2;
     452           0 :     incoming_frame_rate_ = static_cast<unsigned int>(rate);
     453             : 
     454             :     // Calculate bit rate
     455           0 :     if (incoming_bit_count_ == 0) {
     456           0 :       *bitrate = 0;
     457             :     } else {
     458           0 :       *bitrate =
     459           0 :           10 * ((100 * incoming_bit_count_) / static_cast<unsigned int>(diff));
     460             :     }
     461           0 :     incoming_bit_rate_ = *bitrate;
     462             : 
     463             :     // Reset count
     464           0 :     incoming_frame_count_ = 0;
     465           0 :     incoming_bit_count_ = 0;
     466           0 :     time_last_incoming_frame_count_ = now;
     467             : 
     468             :   } else {
     469             :     // No frames since last call
     470           0 :     time_last_incoming_frame_count_ = clock_->TimeInMilliseconds();
     471           0 :     *framerate = 0;
     472           0 :     *bitrate = 0;
     473           0 :     incoming_frame_rate_ = 0;
     474           0 :     incoming_bit_rate_ = 0;
     475             :   }
     476           0 : }
     477             : 
     478             : // Returns immediately or a |max_wait_time_ms| ms event hang waiting for a
     479             : // complete frame, |max_wait_time_ms| decided by caller.
     480           0 : VCMEncodedFrame* VCMJitterBuffer::NextCompleteFrame(uint32_t max_wait_time_ms) {
     481           0 :   crit_sect_->Enter();
     482           0 :   if (!running_) {
     483           0 :     crit_sect_->Leave();
     484           0 :     return nullptr;
     485             :   }
     486           0 :   CleanUpOldOrEmptyFrames();
     487             : 
     488           0 :   if (decodable_frames_.empty() ||
     489           0 :       decodable_frames_.Front()->GetState() != kStateComplete) {
     490             :     const int64_t end_wait_time_ms =
     491           0 :         clock_->TimeInMilliseconds() + max_wait_time_ms;
     492           0 :     int64_t wait_time_ms = max_wait_time_ms;
     493           0 :     while (wait_time_ms > 0) {
     494           0 :       crit_sect_->Leave();
     495             :       const EventTypeWrapper ret =
     496           0 :           frame_event_->Wait(static_cast<uint32_t>(wait_time_ms));
     497           0 :       crit_sect_->Enter();
     498           0 :       if (ret == kEventSignaled) {
     499             :         // Are we shutting down the jitter buffer?
     500           0 :         if (!running_) {
     501           0 :           crit_sect_->Leave();
     502           0 :           return nullptr;
     503             :         }
     504             :         // Finding oldest frame ready for decoder.
     505           0 :         CleanUpOldOrEmptyFrames();
     506           0 :         if (decodable_frames_.empty() ||
     507           0 :             decodable_frames_.Front()->GetState() != kStateComplete) {
     508           0 :           wait_time_ms = end_wait_time_ms - clock_->TimeInMilliseconds();
     509             :         } else {
     510           0 :           break;
     511             :         }
     512             :       } else {
     513           0 :         break;
     514             :       }
     515             :     }
     516             :   }
     517           0 :   if (decodable_frames_.empty() ||
     518           0 :       decodable_frames_.Front()->GetState() != kStateComplete) {
     519           0 :     crit_sect_->Leave();
     520           0 :     return nullptr;
     521             :   }
     522           0 :   VCMEncodedFrame* encoded_frame = decodable_frames_.Front();
     523           0 :   crit_sect_->Leave();
     524           0 :   return encoded_frame;
     525             : }
     526             : 
     527           0 : bool VCMJitterBuffer::NextMaybeIncompleteTimestamp(uint32_t* timestamp) {
     528           0 :   CriticalSectionScoped cs(crit_sect_);
     529           0 :   if (!running_) {
     530           0 :     return false;
     531             :   }
     532           0 :   if (decode_error_mode_ == kNoErrors) {
     533             :     // No point to continue, as we are not decoding with errors.
     534           0 :     return false;
     535             :   }
     536             : 
     537           0 :   CleanUpOldOrEmptyFrames();
     538             : 
     539             :   VCMFrameBuffer* oldest_frame;
     540           0 :   if (decodable_frames_.empty()) {
     541           0 :     if (nack_mode_ != kNoNack || incomplete_frames_.size() <= 1) {
     542           0 :       return false;
     543             :     }
     544           0 :     oldest_frame = incomplete_frames_.Front();
     545             :     // Frame will only be removed from buffer if it is complete (or decodable).
     546           0 :     if (oldest_frame->GetState() < kStateComplete) {
     547           0 :       return false;
     548             :     }
     549             :   } else {
     550           0 :     oldest_frame = decodable_frames_.Front();
     551             :     // If we have exactly one frame in the buffer, release it only if it is
     552             :     // complete. We know decodable_frames_ is  not empty due to the previous
     553             :     // check.
     554           0 :     if (decodable_frames_.size() == 1 && incomplete_frames_.empty() &&
     555           0 :         oldest_frame->GetState() != kStateComplete) {
     556           0 :       return false;
     557             :     }
     558             :   }
     559             : 
     560           0 :   *timestamp = oldest_frame->TimeStamp();
     561           0 :   return true;
     562             : }
     563             : 
     564           0 : VCMEncodedFrame* VCMJitterBuffer::ExtractAndSetDecode(uint32_t timestamp) {
     565           0 :   CriticalSectionScoped cs(crit_sect_);
     566           0 :   if (!running_) {
     567           0 :     return NULL;
     568             :   }
     569             :   // Extract the frame with the desired timestamp.
     570             :   // This removes the frame, so if you have 2 1 3 4
     571             :   // so when 2 comes in, we pull it
     572             :   // XXX This is NOT implemented here; we may need to redo this
     573             : 
     574           0 :   VCMFrameBuffer* frame = decodable_frames_.PopFrame(timestamp);
     575           0 :   bool continuous = true;
     576           0 :   if (!frame) {
     577           0 :     frame = incomplete_frames_.PopFrame(timestamp);
     578           0 :     if (frame)
     579           0 :       continuous = last_decoded_state_.ContinuousFrame(frame);
     580             :     else
     581           0 :       return NULL;
     582             :   }
     583           0 :   TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", timestamp, "Extract");
     584             :   // Frame pulled out from jitter buffer, update the jitter estimate.
     585           0 :   const bool retransmitted = (frame->GetNackCount() > 0);
     586           0 :   if (retransmitted) {
     587           0 :     if (WaitForRetransmissions())
     588           0 :       jitter_estimate_.FrameNacked();
     589           0 :   } else if (frame->Length() > 0) {
     590             :     // Ignore retransmitted and empty frames.
     591           0 :     if (waiting_for_completion_.latest_packet_time >= 0) {
     592           0 :       UpdateJitterEstimate(waiting_for_completion_, true);
     593             :     }
     594           0 :     if (frame->GetState() == kStateComplete) {
     595           0 :       UpdateJitterEstimate(*frame, false);
     596             :     } else {
     597             :       // Wait for this one to get complete.
     598           0 :       waiting_for_completion_.frame_size = frame->Length();
     599           0 :       waiting_for_completion_.latest_packet_time = frame->LatestPacketTimeMs();
     600           0 :       waiting_for_completion_.timestamp = frame->TimeStamp();
     601             :     }
     602             :   }
     603             : 
     604             :   // The state must be changed to decoding before cleaning up zero sized
     605             :   // frames to avoid empty frames being cleaned up and then given to the
     606             :   // decoder. Propagates the missing_frame bit.
     607           0 :   frame->PrepareForDecode(continuous);
     608             : 
     609             :   // We have a frame - update the last decoded state and nack list.
     610           0 :   last_decoded_state_.SetState(frame);
     611           0 :   DropPacketsFromNackList(last_decoded_state_.sequence_num());
     612             : 
     613           0 :   if ((*frame).IsSessionComplete())
     614           0 :     UpdateAveragePacketsPerFrame(frame->NumPackets());
     615             : 
     616           0 :   if (frame->Length() == 0) {
     617             :     // Normally only if MakeDecodable() on an incomplete frame threw it all away
     618           0 :     ReleaseFrame(frame);
     619           0 :     return NULL;
     620             :   }
     621           0 :   return frame;
     622             : }
     623             : 
     624             : // Release frame when done with decoding. Should never be used to release
     625             : // frames from within the jitter buffer.
     626           0 : void VCMJitterBuffer::ReleaseFrame(VCMEncodedFrame* frame) {
     627           0 :   RTC_CHECK(frame != nullptr);
     628           0 :   CriticalSectionScoped cs(crit_sect_);
     629           0 :   VCMFrameBuffer* frame_buffer = static_cast<VCMFrameBuffer*>(frame);
     630           0 :   RecycleFrameBuffer(frame_buffer);
     631           0 : }
     632             : 
     633             : // Gets frame to use for this timestamp. If no match, get empty frame.
     634           0 : VCMFrameBufferEnum VCMJitterBuffer::GetFrame(const VCMPacket& packet,
     635             :                                              VCMFrameBuffer** frame,
     636             :                                              FrameList** frame_list) {
     637             :   // Handle the 2 1 3 4 case (where 2 3 4 are frame 2 with the timestamp)
     638             :   // from above, for complete nalu's (single-nalus) only.
     639             : 
     640             :   // TODO(jesup) To handle a sequence of fragmented nalus which all are
     641             :   // slices of the same lower-case frame (timestamp), the more complete
     642             :   // solution for FindFrame that uses the seqNum and can move packets
     643             :   // between sessions would be needed.
     644           0 :   *frame = incomplete_frames_.PopFrame(packet.timestamp);
     645           0 :   if (*frame != NULL) {
     646           0 :     *frame_list = &incomplete_frames_;
     647           0 :     return kNoError;
     648             :   }
     649           0 :   *frame = decodable_frames_.PopFrame(packet.timestamp);
     650           0 :   if (*frame != NULL) {
     651           0 :     *frame_list = &decodable_frames_;
     652           0 :     return kNoError;
     653             :   }
     654             : 
     655           0 :   *frame_list = NULL;
     656             :   // No match, return empty frame.
     657           0 :   *frame = GetEmptyFrame();
     658           0 :   if (*frame == NULL) {
     659             :     // No free frame! Try to reclaim some...
     660           0 :     LOG(LS_WARNING) << "Unable to get empty frame; Recycling.";
     661           0 :     bool found_key_frame = RecycleFramesUntilKeyFrame();
     662           0 :     *frame = GetEmptyFrame();
     663           0 :     RTC_CHECK(*frame);
     664           0 :     if (!*frame) {
     665           0 :       LOG(LS_ERROR) << "GetEmptyFrame returned NULL.";
     666           0 :       return kGeneralError;
     667           0 :     } else if (!found_key_frame) {
     668           0 :       RecycleFrameBuffer(*frame);
     669           0 :       return kFlushIndicator;
     670             :     }
     671             :   }
     672           0 :   (*frame)->Reset();
     673           0 :   return kNoError;
     674             : }
     675             : 
     676           0 : int64_t VCMJitterBuffer::LastPacketTime(const VCMEncodedFrame* frame,
     677             :                                         bool* retransmitted) const {
     678           0 :   assert(retransmitted);
     679           0 :   CriticalSectionScoped cs(crit_sect_);
     680             :   const VCMFrameBuffer* frame_buffer =
     681           0 :       static_cast<const VCMFrameBuffer*>(frame);
     682           0 :   *retransmitted = (frame_buffer->GetNackCount() > 0);
     683           0 :   return frame_buffer->LatestPacketTimeMs();
     684             : }
     685             : 
     686           0 : VCMFrameBufferEnum VCMJitterBuffer::InsertPacket(const VCMPacket& packet,
     687             :                                                  bool* retransmitted) {
     688           0 :   CriticalSectionScoped cs(crit_sect_);
     689             : 
     690           0 :   ++num_packets_;
     691           0 :   if (num_packets_ == 1) {
     692           0 :     time_first_packet_ms_ = clock_->TimeInMilliseconds();
     693             :   }
     694             :   // Does this packet belong to an old frame?
     695           0 :   if (last_decoded_state_.IsOldPacket(&packet)) {
     696             :     // Account only for media packets.
     697           0 :     if (packet.sizeBytes > 0) {
     698           0 :       num_discarded_packets_++;
     699           0 :       num_consecutive_old_packets_++;
     700           0 :       if (stats_callback_ != NULL)
     701           0 :         stats_callback_->OnDiscardedPacketsUpdated(num_discarded_packets_);
     702             :     }
     703             :     // Update last decoded sequence number if the packet arrived late and
     704             :     // belongs to a frame with a timestamp equal to the last decoded
     705             :     // timestamp.
     706           0 :     last_decoded_state_.UpdateOldPacket(&packet);
     707           0 :     DropPacketsFromNackList(last_decoded_state_.sequence_num());
     708             : 
     709             :     // Also see if this old packet made more incomplete frames continuous.
     710           0 :     FindAndInsertContinuousFramesWithState(last_decoded_state_);
     711             : 
     712           0 :     if (num_consecutive_old_packets_ > kMaxConsecutiveOldPackets) {
     713           0 :       LOG(LS_WARNING)
     714             :           << num_consecutive_old_packets_
     715           0 :           << " consecutive old packets received. Flushing the jitter buffer.";
     716           0 :       Flush();
     717           0 :       return kFlushIndicator;
     718             :     }
     719           0 :     return kOldPacket;
     720             :   }
     721             : 
     722           0 :   num_consecutive_old_packets_ = 0;
     723             : 
     724             :   VCMFrameBuffer* frame;
     725             :   FrameList* frame_list;
     726           0 :   const VCMFrameBufferEnum error = GetFrame(packet, &frame, &frame_list);
     727           0 :   if (error != kNoError)
     728           0 :     return error;
     729             : 
     730           0 :   int64_t now_ms = clock_->TimeInMilliseconds();
     731             :   // We are keeping track of the first and latest seq numbers, and
     732             :   // the number of wraps to be able to calculate how many packets we expect.
     733           0 :   if (first_packet_since_reset_) {
     734             :     // Now it's time to start estimating jitter
     735             :     // reset the delay estimate.
     736           0 :     inter_frame_delay_.Reset(now_ms);
     737             :   }
     738             : 
     739             :   // Empty packets may bias the jitter estimate (lacking size component),
     740             :   // therefore don't let empty packet trigger the following updates:
     741           0 :   if (packet.frameType != kEmptyFrame) {
     742           0 :     if (waiting_for_completion_.timestamp == packet.timestamp) {
     743             :       // This can get bad if we have a lot of duplicate packets,
     744             :       // we will then count some packet multiple times.
     745           0 :       waiting_for_completion_.frame_size += packet.sizeBytes;
     746           0 :       waiting_for_completion_.latest_packet_time = now_ms;
     747           0 :     } else if (waiting_for_completion_.latest_packet_time >= 0 &&
     748           0 :                waiting_for_completion_.latest_packet_time + 2000 <= now_ms) {
     749             :       // A packet should never be more than two seconds late
     750           0 :       UpdateJitterEstimate(waiting_for_completion_, true);
     751           0 :       waiting_for_completion_.latest_packet_time = -1;
     752           0 :       waiting_for_completion_.frame_size = 0;
     753           0 :       waiting_for_completion_.timestamp = 0;
     754             :     }
     755             :   }
     756             : 
     757           0 :   VCMFrameBufferStateEnum previous_state = frame->GetState();
     758             :   // Insert packet.
     759             :   FrameData frame_data;
     760           0 :   frame_data.rtt_ms = rtt_ms_;
     761           0 :   frame_data.rolling_average_packets_per_frame = average_packets_per_frame_;
     762             :   VCMFrameBufferEnum buffer_state =
     763           0 :       frame->InsertPacket(packet, now_ms, decode_error_mode_, frame_data);
     764             : 
     765           0 :   if (previous_state != kStateComplete) {
     766           0 :     TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", frame->TimeStamp(), "timestamp",
     767             :                              frame->TimeStamp());
     768             :   }
     769             : 
     770           0 :   if (buffer_state > 0) {
     771           0 :     incoming_bit_count_ += packet.sizeBytes << 3;
     772           0 :     if (first_packet_since_reset_) {
     773           0 :       latest_received_sequence_number_ = packet.seqNum;
     774           0 :       first_packet_since_reset_ = false;
     775             :     } else {
     776           0 :       if (IsPacketRetransmitted(packet)) {
     777           0 :         frame->IncrementNackCount();
     778             :       }
     779           0 :       if (!UpdateNackList(packet.seqNum) &&
     780           0 :           packet.frameType != kVideoFrameKey) {
     781           0 :         buffer_state = kFlushIndicator;
     782             :       }
     783             : 
     784           0 :       latest_received_sequence_number_ =
     785           0 :           LatestSequenceNumber(latest_received_sequence_number_, packet.seqNum);
     786             :     }
     787             :   }
     788             : 
     789             :   // Is the frame already in the decodable list?
     790           0 :   bool continuous = IsContinuous(*frame);
     791           0 :   switch (buffer_state) {
     792             :     case kGeneralError:
     793             :     case kTimeStampError:
     794             :     case kSizeError: {
     795           0 :       RecycleFrameBuffer(frame);
     796           0 :       break;
     797             :     }
     798             :     case kCompleteSession: {
     799           0 :       if (previous_state != kStateDecodable &&
     800             :           previous_state != kStateComplete) {
     801           0 :         CountFrame(*frame);
     802           0 :         if (continuous) {
     803             :           // Signal that we have a complete session.
     804           0 :           frame_event_->Set();
     805             :         }
     806             :       }
     807             :       FALLTHROUGH();
     808             :     }
     809             :     // Note: There is no break here - continuing to kDecodableSession.
     810             :     case kDecodableSession: {
     811           0 :       *retransmitted = (frame->GetNackCount() > 0);
     812           0 :       if (continuous) {
     813           0 :         decodable_frames_.InsertFrame(frame);
     814           0 :         FindAndInsertContinuousFrames(*frame);
     815             :       } else {
     816           0 :         incomplete_frames_.InsertFrame(frame);
     817             :         // If NACKs are enabled, keyframes are triggered by |GetNackList|.
     818           0 :         if (nack_mode_ == kNoNack &&
     819           0 :             NonContinuousOrIncompleteDuration() >
     820             :                 90 * kMaxDiscontinuousFramesTime) {
     821           0 :           return kFlushIndicator;
     822             :         }
     823             :       }
     824           0 :       break;
     825             :     }
     826             :     case kIncomplete: {
     827           0 :       if (frame->GetState() == kStateEmpty &&
     828           0 :           last_decoded_state_.UpdateEmptyFrame(frame)) {
     829           0 :         RecycleFrameBuffer(frame);
     830           0 :         return kNoError;
     831             :       } else {
     832           0 :         incomplete_frames_.InsertFrame(frame);
     833             :         // If NACKs are enabled, keyframes are triggered by |GetNackList|.
     834           0 :         if (nack_mode_ == kNoNack &&
     835           0 :             NonContinuousOrIncompleteDuration() >
     836             :                 90 * kMaxDiscontinuousFramesTime) {
     837           0 :           return kFlushIndicator;
     838             :         }
     839             :       }
     840           0 :       break;
     841             :     }
     842             :     case kNoError:
     843             :     case kOutOfBoundsPacket:
     844             :     case kDuplicatePacket: {
     845             :       // Put back the frame where it came from.
     846           0 :       if (frame_list != NULL) {
     847           0 :         frame_list->InsertFrame(frame);
     848             :       } else {
     849           0 :         RecycleFrameBuffer(frame);
     850             :       }
     851           0 :       ++num_duplicated_packets_;
     852           0 :       break;
     853             :     }
     854             :     case kFlushIndicator:
     855           0 :       RecycleFrameBuffer(frame);
     856           0 :       return kFlushIndicator;
     857             :     default:
     858           0 :       assert(false);
     859             :   }
     860           0 :   return buffer_state;
     861             : }
     862             : 
     863           0 : bool VCMJitterBuffer::IsContinuousInState(
     864             :     const VCMFrameBuffer& frame,
     865             :     const VCMDecodingState& decoding_state) const {
     866             :   // Is this frame (complete or decodable) and continuous?
     867             :   // kStateDecodable will never be set when decode_error_mode_ is false
     868             :   // as SessionInfo determines this state based on the error mode (and frame
     869             :   // completeness).
     870           0 :   return (frame.GetState() == kStateComplete ||
     871           0 :           frame.GetState() == kStateDecodable) &&
     872           0 :          decoding_state.ContinuousFrame(&frame);
     873             : }
     874             : 
     875           0 : bool VCMJitterBuffer::IsContinuous(const VCMFrameBuffer& frame) const {
     876           0 :   if (IsContinuousInState(frame, last_decoded_state_)) {
     877           0 :     return true;
     878             :   }
     879           0 :   VCMDecodingState decoding_state;
     880           0 :   decoding_state.CopyFrom(last_decoded_state_);
     881           0 :   for (FrameList::const_iterator it = decodable_frames_.begin();
     882           0 :        it != decodable_frames_.end(); ++it) {
     883           0 :     VCMFrameBuffer* decodable_frame = it->second;
     884           0 :     if (IsNewerTimestamp(decodable_frame->TimeStamp(), frame.TimeStamp())) {
     885           0 :       break;
     886             :     }
     887           0 :     decoding_state.SetState(decodable_frame);
     888           0 :     if (IsContinuousInState(frame, decoding_state)) {
     889           0 :       return true;
     890             :     }
     891             :   }
     892           0 :   return false;
     893             : }
     894             : 
     895           0 : void VCMJitterBuffer::FindAndInsertContinuousFrames(
     896             :     const VCMFrameBuffer& new_frame) {
     897           0 :   VCMDecodingState decoding_state;
     898           0 :   decoding_state.CopyFrom(last_decoded_state_);
     899           0 :   decoding_state.SetState(&new_frame);
     900           0 :   FindAndInsertContinuousFramesWithState(decoding_state);
     901           0 : }
     902             : 
     903           0 : void VCMJitterBuffer::FindAndInsertContinuousFramesWithState(
     904             :     const VCMDecodingState& original_decoded_state) {
     905             :   // Copy original_decoded_state so we can move the state forward with each
     906             :   // decodable frame we find.
     907           0 :   VCMDecodingState decoding_state;
     908           0 :   decoding_state.CopyFrom(original_decoded_state);
     909             : 
     910             :   // When temporal layers are available, we search for a complete or decodable
     911             :   // frame until we hit one of the following:
     912             :   // 1. Continuous base or sync layer.
     913             :   // 2. The end of the list was reached.
     914           0 :   for (FrameList::iterator it = incomplete_frames_.begin();
     915           0 :        it != incomplete_frames_.end();) {
     916           0 :     VCMFrameBuffer* frame = it->second;
     917           0 :     if (IsNewerTimestamp(original_decoded_state.time_stamp(),
     918             :                          frame->TimeStamp())) {
     919           0 :       ++it;
     920           0 :       continue;
     921             :     }
     922           0 :     if (IsContinuousInState(*frame, decoding_state)) {
     923           0 :       decodable_frames_.InsertFrame(frame);
     924           0 :       incomplete_frames_.erase(it++);
     925           0 :       decoding_state.SetState(frame);
     926           0 :     } else if (frame->TemporalId() <= 0) {
     927           0 :       break;
     928             :     } else {
     929           0 :       ++it;
     930             :     }
     931             :   }
     932           0 : }
     933             : 
     934           0 : uint32_t VCMJitterBuffer::EstimatedJitterMs() {
     935           0 :   CriticalSectionScoped cs(crit_sect_);
     936             :   // Compute RTT multiplier for estimation.
     937             :   // low_rtt_nackThresholdMs_ == -1 means no FEC.
     938           0 :   double rtt_mult = 1.0f;
     939           0 :   if (low_rtt_nack_threshold_ms_ >= 0 &&
     940           0 :       rtt_ms_ >= low_rtt_nack_threshold_ms_) {
     941             :     // For RTTs above low_rtt_nack_threshold_ms_ we don't apply extra delay
     942             :     // when waiting for retransmissions.
     943           0 :     rtt_mult = 0.0f;
     944             :   }
     945           0 :   return jitter_estimate_.GetJitterEstimate(rtt_mult);
     946             : }
     947             : 
     948           0 : void VCMJitterBuffer::UpdateRtt(int64_t rtt_ms) {
     949           0 :   CriticalSectionScoped cs(crit_sect_);
     950           0 :   rtt_ms_ = rtt_ms;
     951           0 :   jitter_estimate_.UpdateRtt(rtt_ms);
     952           0 :   if (!WaitForRetransmissions())
     953           0 :     jitter_estimate_.ResetNackCount();
     954           0 : }
     955             : 
     956           0 : void VCMJitterBuffer::SetNackMode(VCMNackMode mode,
     957             :                                   int64_t low_rtt_nack_threshold_ms,
     958             :                                   int64_t high_rtt_nack_threshold_ms) {
     959           0 :   CriticalSectionScoped cs(crit_sect_);
     960           0 :   nack_mode_ = mode;
     961           0 :   if (mode == kNoNack) {
     962           0 :     missing_sequence_numbers_.clear();
     963             :   }
     964           0 :   assert(low_rtt_nack_threshold_ms >= -1 && high_rtt_nack_threshold_ms >= -1);
     965           0 :   assert(high_rtt_nack_threshold_ms == -1 ||
     966             :          low_rtt_nack_threshold_ms <= high_rtt_nack_threshold_ms);
     967           0 :   assert(low_rtt_nack_threshold_ms > -1 || high_rtt_nack_threshold_ms == -1);
     968           0 :   low_rtt_nack_threshold_ms_ = low_rtt_nack_threshold_ms;
     969           0 :   high_rtt_nack_threshold_ms_ = high_rtt_nack_threshold_ms;
     970             :   // Don't set a high start rtt if high_rtt_nack_threshold_ms_ is used, to not
     971             :   // disable NACK in |kNack| mode.
     972           0 :   if (rtt_ms_ == kDefaultRtt && high_rtt_nack_threshold_ms_ != -1) {
     973           0 :     rtt_ms_ = 0;
     974             :   }
     975           0 :   if (!WaitForRetransmissions()) {
     976           0 :     jitter_estimate_.ResetNackCount();
     977             :   }
     978           0 : }
     979             : 
     980           0 : void VCMJitterBuffer::SetNackSettings(size_t max_nack_list_size,
     981             :                                       int max_packet_age_to_nack,
     982             :                                       int max_incomplete_time_ms) {
     983           0 :   CriticalSectionScoped cs(crit_sect_);
     984           0 :   assert(max_packet_age_to_nack >= 0);
     985           0 :   assert(max_incomplete_time_ms_ >= 0);
     986           0 :   max_nack_list_size_ = max_nack_list_size;
     987           0 :   max_packet_age_to_nack_ = max_packet_age_to_nack;
     988           0 :   max_incomplete_time_ms_ = max_incomplete_time_ms;
     989           0 : }
     990             : 
     991           0 : VCMNackMode VCMJitterBuffer::nack_mode() const {
     992           0 :   CriticalSectionScoped cs(crit_sect_);
     993           0 :   return nack_mode_;
     994             : }
     995             : 
     996           0 : int VCMJitterBuffer::NonContinuousOrIncompleteDuration() {
     997           0 :   if (incomplete_frames_.empty()) {
     998           0 :     return 0;
     999             :   }
    1000           0 :   uint32_t start_timestamp = incomplete_frames_.Front()->TimeStamp();
    1001           0 :   if (!decodable_frames_.empty()) {
    1002           0 :     start_timestamp = decodable_frames_.Back()->TimeStamp();
    1003             :   }
    1004           0 :   return incomplete_frames_.Back()->TimeStamp() - start_timestamp;
    1005             : }
    1006             : 
    1007           0 : uint16_t VCMJitterBuffer::EstimatedLowSequenceNumber(
    1008             :     const VCMFrameBuffer& frame) const {
    1009           0 :   assert(frame.GetLowSeqNum() >= 0);
    1010           0 :   if (frame.HaveFirstPacket())
    1011           0 :     return frame.GetLowSeqNum();
    1012             : 
    1013             :   // This estimate is not accurate if more than one packet with lower sequence
    1014             :   // number is lost.
    1015           0 :   return frame.GetLowSeqNum() - 1;
    1016             : }
    1017             : 
    1018           0 : std::vector<uint16_t> VCMJitterBuffer::GetNackList(bool* request_key_frame) {
    1019           0 :   CriticalSectionScoped cs(crit_sect_);
    1020           0 :   *request_key_frame = false;
    1021           0 :   if (nack_mode_ == kNoNack) {
    1022           0 :     return std::vector<uint16_t>();
    1023             :   }
    1024           0 :   if (last_decoded_state_.in_initial_state()) {
    1025           0 :     VCMFrameBuffer* next_frame = NextFrame();
    1026           0 :     const bool first_frame_is_key = next_frame &&
    1027           0 :                                     next_frame->FrameType() == kVideoFrameKey &&
    1028           0 :                                     next_frame->HaveFirstPacket();
    1029           0 :     if (!first_frame_is_key) {
    1030             :       bool have_non_empty_frame =
    1031           0 :           decodable_frames_.end() != find_if(decodable_frames_.begin(),
    1032             :                                              decodable_frames_.end(),
    1033           0 :                                              HasNonEmptyState);
    1034           0 :       if (!have_non_empty_frame) {
    1035             :         have_non_empty_frame =
    1036           0 :             incomplete_frames_.end() != find_if(incomplete_frames_.begin(),
    1037             :                                                 incomplete_frames_.end(),
    1038           0 :                                                 HasNonEmptyState);
    1039             :       }
    1040           0 :       bool found_key_frame = RecycleFramesUntilKeyFrame();
    1041           0 :       if (!found_key_frame) {
    1042           0 :         *request_key_frame = have_non_empty_frame;
    1043           0 :         return std::vector<uint16_t>();
    1044             :       }
    1045             :     }
    1046             :   }
    1047           0 :   if (TooLargeNackList()) {
    1048           0 :     *request_key_frame = !HandleTooLargeNackList();
    1049             :   }
    1050           0 :   if (max_incomplete_time_ms_ > 0) {
    1051             :     int non_continuous_incomplete_duration =
    1052           0 :         NonContinuousOrIncompleteDuration();
    1053           0 :     if (non_continuous_incomplete_duration > 90 * max_incomplete_time_ms_) {
    1054           0 :       LOG_F(LS_WARNING) << "Too long non-decodable duration: "
    1055           0 :                         << non_continuous_incomplete_duration << " > "
    1056           0 :                         << 90 * max_incomplete_time_ms_;
    1057             :       FrameList::reverse_iterator rit = find_if(
    1058           0 :           incomplete_frames_.rbegin(), incomplete_frames_.rend(), IsKeyFrame);
    1059           0 :       if (rit == incomplete_frames_.rend()) {
    1060             :         // Request a key frame if we don't have one already.
    1061           0 :         *request_key_frame = true;
    1062           0 :         return std::vector<uint16_t>();
    1063             :       } else {
    1064             :         // Skip to the last key frame. If it's incomplete we will start
    1065             :         // NACKing it.
    1066             :         // Note that the estimated low sequence number is correct for VP8
    1067             :         // streams because only the first packet of a key frame is marked.
    1068           0 :         last_decoded_state_.Reset();
    1069           0 :         DropPacketsFromNackList(EstimatedLowSequenceNumber(*rit->second));
    1070             :       }
    1071             :     }
    1072             :   }
    1073             :   std::vector<uint16_t> nack_list(missing_sequence_numbers_.begin(),
    1074           0 :                                   missing_sequence_numbers_.end());
    1075           0 :   return nack_list;
    1076             : }
    1077             : 
    1078           0 : void VCMJitterBuffer::SetDecodeErrorMode(VCMDecodeErrorMode error_mode) {
    1079           0 :   CriticalSectionScoped cs(crit_sect_);
    1080           0 :   decode_error_mode_ = error_mode;
    1081           0 : }
    1082             : 
    1083           0 : VCMFrameBuffer* VCMJitterBuffer::NextFrame() const {
    1084           0 :   if (!decodable_frames_.empty())
    1085           0 :     return decodable_frames_.Front();
    1086           0 :   if (!incomplete_frames_.empty())
    1087           0 :     return incomplete_frames_.Front();
    1088           0 :   return NULL;
    1089             : }
    1090             : 
    1091           0 : bool VCMJitterBuffer::UpdateNackList(uint16_t sequence_number) {
    1092           0 :   if (nack_mode_ == kNoNack) {
    1093           0 :     return true;
    1094             :   }
    1095             :   // Make sure we don't add packets which are already too old to be decoded.
    1096           0 :   if (!last_decoded_state_.in_initial_state()) {
    1097           0 :     latest_received_sequence_number_ = LatestSequenceNumber(
    1098           0 :         latest_received_sequence_number_, last_decoded_state_.sequence_num());
    1099             :   }
    1100           0 :   if (IsNewerSequenceNumber(sequence_number,
    1101           0 :                             latest_received_sequence_number_)) {
    1102             :     // Push any missing sequence numbers to the NACK list.
    1103           0 :     for (uint16_t i = latest_received_sequence_number_ + 1;
    1104           0 :          IsNewerSequenceNumber(sequence_number, i); ++i) {
    1105           0 :       missing_sequence_numbers_.insert(missing_sequence_numbers_.end(), i);
    1106           0 :       TRACE_EVENT_INSTANT1(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"), "AddNack",
    1107             :                            "seqnum", i);
    1108             :     }
    1109           0 :     if (TooLargeNackList() && !HandleTooLargeNackList()) {
    1110           0 :       LOG(LS_WARNING) << "Requesting key frame due to too large NACK list.";
    1111           0 :       return false;
    1112             :     }
    1113           0 :     if (MissingTooOldPacket(sequence_number) &&
    1114           0 :         !HandleTooOldPackets(sequence_number)) {
    1115           0 :       LOG(LS_WARNING) << "Requesting key frame due to missing too old packets";
    1116           0 :       return false;
    1117             :     }
    1118             :   } else {
    1119           0 :     missing_sequence_numbers_.erase(sequence_number);
    1120           0 :     TRACE_EVENT_INSTANT1(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"), "RemoveNack",
    1121             :                          "seqnum", sequence_number);
    1122             :   }
    1123           0 :   return true;
    1124             : }
    1125             : 
    1126           0 : bool VCMJitterBuffer::TooLargeNackList() const {
    1127           0 :   return missing_sequence_numbers_.size() > max_nack_list_size_;
    1128             : }
    1129             : 
    1130           0 : bool VCMJitterBuffer::HandleTooLargeNackList() {
    1131             :   // Recycle frames until the NACK list is small enough. It is likely cheaper to
    1132             :   // request a key frame than to retransmit this many missing packets.
    1133           0 :   LOG_F(LS_WARNING) << "NACK list has grown too large: "
    1134           0 :                     << missing_sequence_numbers_.size() << " > "
    1135           0 :                     << max_nack_list_size_;
    1136           0 :   bool key_frame_found = false;
    1137           0 :   while (TooLargeNackList()) {
    1138           0 :     key_frame_found = RecycleFramesUntilKeyFrame();
    1139             :   }
    1140           0 :   return key_frame_found;
    1141             : }
    1142             : 
    1143           0 : bool VCMJitterBuffer::MissingTooOldPacket(
    1144             :     uint16_t latest_sequence_number) const {
    1145           0 :   if (missing_sequence_numbers_.empty()) {
    1146           0 :     return false;
    1147             :   }
    1148             :   const uint16_t age_of_oldest_missing_packet =
    1149           0 :       latest_sequence_number - *missing_sequence_numbers_.begin();
    1150             :   // Recycle frames if the NACK list contains too old sequence numbers as
    1151             :   // the packets may have already been dropped by the sender.
    1152           0 :   return age_of_oldest_missing_packet > max_packet_age_to_nack_;
    1153             : }
    1154             : 
    1155           0 : bool VCMJitterBuffer::HandleTooOldPackets(uint16_t latest_sequence_number) {
    1156           0 :   bool key_frame_found = false;
    1157             :   const uint16_t age_of_oldest_missing_packet =
    1158           0 :       latest_sequence_number - *missing_sequence_numbers_.begin();
    1159           0 :   LOG_F(LS_WARNING) << "NACK list contains too old sequence numbers: "
    1160           0 :                     << age_of_oldest_missing_packet << " > "
    1161           0 :                     << max_packet_age_to_nack_;
    1162           0 :   while (MissingTooOldPacket(latest_sequence_number)) {
    1163           0 :     key_frame_found = RecycleFramesUntilKeyFrame();
    1164             :   }
    1165           0 :   return key_frame_found;
    1166             : }
    1167             : 
    1168           0 : void VCMJitterBuffer::DropPacketsFromNackList(
    1169             :     uint16_t last_decoded_sequence_number) {
    1170             :   // Erase all sequence numbers from the NACK list which we won't need any
    1171             :   // longer.
    1172             :   missing_sequence_numbers_.erase(
    1173             :       missing_sequence_numbers_.begin(),
    1174           0 :       missing_sequence_numbers_.upper_bound(last_decoded_sequence_number));
    1175           0 : }
    1176             : 
    1177           0 : void VCMJitterBuffer::RegisterStatsCallback(
    1178             :     VCMReceiveStatisticsCallback* callback) {
    1179           0 :   CriticalSectionScoped cs(crit_sect_);
    1180           0 :   stats_callback_ = callback;
    1181           0 : }
    1182             : 
    1183           0 : VCMFrameBuffer* VCMJitterBuffer::GetEmptyFrame() {
    1184           0 :   if (free_frames_.empty()) {
    1185           0 :     if (!TryToIncreaseJitterBufferSize()) {
    1186           0 :       return NULL;
    1187             :     }
    1188             :   }
    1189           0 :   VCMFrameBuffer* frame = free_frames_.front();
    1190           0 :   free_frames_.pop_front();
    1191           0 :   return frame;
    1192             : }
    1193             : 
    1194           0 : bool VCMJitterBuffer::TryToIncreaseJitterBufferSize() {
    1195           0 :   if (max_number_of_frames_ >= kMaxNumberOfFrames)
    1196           0 :     return false;
    1197           0 :   free_frames_.push_back(new VCMFrameBuffer());
    1198           0 :   ++max_number_of_frames_;
    1199           0 :   TRACE_COUNTER1("webrtc", "JBMaxFrames", max_number_of_frames_);
    1200           0 :   return true;
    1201             : }
    1202             : 
    1203             : // Recycle oldest frames up to a key frame, used if jitter buffer is completely
    1204             : // full.
    1205           0 : bool VCMJitterBuffer::RecycleFramesUntilKeyFrame() {
    1206             :   // First release incomplete frames, and only release decodable frames if there
    1207             :   // are no incomplete ones.
    1208           0 :   FrameList::iterator key_frame_it;
    1209           0 :   bool key_frame_found = false;
    1210           0 :   int dropped_frames = 0;
    1211           0 :   dropped_frames += incomplete_frames_.RecycleFramesUntilKeyFrame(
    1212             :       &key_frame_it, &free_frames_);
    1213           0 :   key_frame_found = key_frame_it != incomplete_frames_.end();
    1214           0 :   if (dropped_frames == 0) {
    1215           0 :     dropped_frames += decodable_frames_.RecycleFramesUntilKeyFrame(
    1216             :         &key_frame_it, &free_frames_);
    1217           0 :     key_frame_found = key_frame_it != decodable_frames_.end();
    1218             :   }
    1219           0 :   TRACE_EVENT_INSTANT0("webrtc", "JB::RecycleFramesUntilKeyFrame");
    1220           0 :   if (key_frame_found) {
    1221           0 :     LOG(LS_INFO) << "Found key frame while dropping frames.";
    1222             :     // Reset last decoded state to make sure the next frame decoded is a key
    1223             :     // frame, and start NACKing from here.
    1224           0 :     last_decoded_state_.Reset();
    1225           0 :     DropPacketsFromNackList(EstimatedLowSequenceNumber(*key_frame_it->second));
    1226           0 :   } else if (decodable_frames_.empty()) {
    1227             :     // All frames dropped. Reset the decoding state and clear missing sequence
    1228             :     // numbers as we're starting fresh.
    1229           0 :     last_decoded_state_.Reset();
    1230           0 :     missing_sequence_numbers_.clear();
    1231             :   }
    1232           0 :   return key_frame_found;
    1233             : }
    1234             : 
    1235             : // Must be called under the critical section |crit_sect_|.
    1236           0 : void VCMJitterBuffer::CountFrame(const VCMFrameBuffer& frame) {
    1237           0 :   incoming_frame_count_++;
    1238             : 
    1239           0 :   if (frame.FrameType() == kVideoFrameKey) {
    1240           0 :     TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", frame.TimeStamp(),
    1241             :                             "KeyComplete");
    1242             :   } else {
    1243           0 :     TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", frame.TimeStamp(),
    1244             :                             "DeltaComplete");
    1245             :   }
    1246             : 
    1247             :   // Update receive statistics. We count all layers, thus when you use layers
    1248             :   // adding all key and delta frames might differ from frame count.
    1249           0 :   if (frame.IsSessionComplete()) {
    1250           0 :     if (frame.FrameType() == kVideoFrameKey) {
    1251           0 :       ++receive_statistics_.key_frames;
    1252           0 :       if (receive_statistics_.key_frames == 1) {
    1253           0 :         LOG(LS_INFO) << "Received first complete key frame";
    1254             :       }
    1255             :     } else {
    1256           0 :       ++receive_statistics_.delta_frames;
    1257             :     }
    1258             : 
    1259           0 :     if (stats_callback_ != NULL)
    1260           0 :       stats_callback_->OnFrameCountsUpdated(receive_statistics_);
    1261             :   }
    1262           0 : }
    1263             : 
    1264           0 : void VCMJitterBuffer::UpdateAveragePacketsPerFrame(int current_number_packets) {
    1265           0 :   if (frame_counter_ > kFastConvergeThreshold) {
    1266           0 :     average_packets_per_frame_ =
    1267           0 :         average_packets_per_frame_ * (1 - kNormalConvergeMultiplier) +
    1268           0 :         current_number_packets * kNormalConvergeMultiplier;
    1269           0 :   } else if (frame_counter_ > 0) {
    1270           0 :     average_packets_per_frame_ =
    1271           0 :         average_packets_per_frame_ * (1 - kFastConvergeMultiplier) +
    1272           0 :         current_number_packets * kFastConvergeMultiplier;
    1273           0 :     frame_counter_++;
    1274             :   } else {
    1275           0 :     average_packets_per_frame_ = current_number_packets;
    1276           0 :     frame_counter_++;
    1277             :   }
    1278           0 : }
    1279             : 
    1280             : // Must be called under the critical section |crit_sect_|.
    1281           0 : void VCMJitterBuffer::CleanUpOldOrEmptyFrames() {
    1282           0 :   decodable_frames_.CleanUpOldOrEmptyFrames(&last_decoded_state_,
    1283           0 :                                             &free_frames_);
    1284           0 :   incomplete_frames_.CleanUpOldOrEmptyFrames(&last_decoded_state_,
    1285           0 :                                              &free_frames_);
    1286           0 :   if (!last_decoded_state_.in_initial_state()) {
    1287           0 :     DropPacketsFromNackList(last_decoded_state_.sequence_num());
    1288             :   }
    1289           0 : }
    1290             : 
    1291             : // Must be called from within |crit_sect_|.
    1292           0 : bool VCMJitterBuffer::IsPacketRetransmitted(const VCMPacket& packet) const {
    1293           0 :   return missing_sequence_numbers_.find(packet.seqNum) !=
    1294           0 :          missing_sequence_numbers_.end();
    1295             : }
    1296             : 
    1297             : // Must be called under the critical section |crit_sect_|. Should never be
    1298             : // called with retransmitted frames, they must be filtered out before this
    1299             : // function is called.
    1300           0 : void VCMJitterBuffer::UpdateJitterEstimate(const VCMJitterSample& sample,
    1301             :                                            bool incomplete_frame) {
    1302           0 :   if (sample.latest_packet_time == -1) {
    1303           0 :     return;
    1304             :   }
    1305           0 :   UpdateJitterEstimate(sample.latest_packet_time, sample.timestamp,
    1306           0 :                        sample.frame_size, incomplete_frame);
    1307             : }
    1308             : 
    1309             : // Must be called under the critical section crit_sect_. Should never be
    1310             : // called with retransmitted frames, they must be filtered out before this
    1311             : // function is called.
    1312           0 : void VCMJitterBuffer::UpdateJitterEstimate(const VCMFrameBuffer& frame,
    1313             :                                            bool incomplete_frame) {
    1314           0 :   if (frame.LatestPacketTimeMs() == -1) {
    1315           0 :     return;
    1316             :   }
    1317             :   // No retransmitted frames should be a part of the jitter
    1318             :   // estimate.
    1319           0 :   UpdateJitterEstimate(frame.LatestPacketTimeMs(), frame.TimeStamp(),
    1320           0 :                        frame.Length(), incomplete_frame);
    1321             : }
    1322             : 
    1323             : // Must be called under the critical section |crit_sect_|. Should never be
    1324             : // called with retransmitted frames, they must be filtered out before this
    1325             : // function is called.
    1326           0 : void VCMJitterBuffer::UpdateJitterEstimate(int64_t latest_packet_time_ms,
    1327             :                                            uint32_t timestamp,
    1328             :                                            unsigned int frame_size,
    1329             :                                            bool incomplete_frame) {
    1330           0 :   if (latest_packet_time_ms == -1) {
    1331           0 :     return;
    1332             :   }
    1333             :   int64_t frame_delay;
    1334           0 :   bool not_reordered = inter_frame_delay_.CalculateDelay(
    1335           0 :       timestamp, &frame_delay, latest_packet_time_ms);
    1336             :   // Filter out frames which have been reordered in time by the network
    1337           0 :   if (not_reordered) {
    1338             :     // Update the jitter estimate with the new samples
    1339           0 :     jitter_estimate_.UpdateEstimate(frame_delay, frame_size, incomplete_frame);
    1340             :   }
    1341             : }
    1342             : 
    1343           0 : bool VCMJitterBuffer::WaitForRetransmissions() {
    1344           0 :   if (nack_mode_ == kNoNack) {
    1345             :     // NACK disabled -> don't wait for retransmissions.
    1346           0 :     return false;
    1347             :   }
    1348             :   // Evaluate if the RTT is higher than |high_rtt_nack_threshold_ms_|, and in
    1349             :   // that case we don't wait for retransmissions.
    1350           0 :   if (high_rtt_nack_threshold_ms_ >= 0 &&
    1351           0 :       rtt_ms_ >= high_rtt_nack_threshold_ms_) {
    1352           0 :     return false;
    1353             :   }
    1354           0 :   return true;
    1355             : }
    1356             : 
    1357           0 : void VCMJitterBuffer::RecycleFrameBuffer(VCMFrameBuffer* frame) {
    1358           0 :   frame->Reset();
    1359           0 :   free_frames_.push_back(frame);
    1360           0 : }
    1361             : 
    1362             : }  // namespace webrtc

Generated by: LCOV version 1.13