LCOV - code coverage report
Current view: top level - media/webrtc/trunk/webrtc/modules/rtp_rtcp/source - rtp_format_h264.cc (source / functions) Hit Total Coverage
Test: output.info Lines: 0 347 0.0 %
Date: 2017-07-14 16:53:18 Functions: 0 23 0.0 %
Legend: Lines: hit not hit

          Line data    Source code
       1             : /*
       2             :  *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
       3             :  *
       4             :  *  Use of this source code is governed by a BSD-style license
       5             :  *  that can be found in the LICENSE file in the root of the source
       6             :  *  tree. An additional intellectual property rights grant can be found
       7             :  *  in the file PATENTS.  All contributing project authors may
       8             :  *  be found in the AUTHORS file in the root of the source tree.
       9             :  */
      10             : 
      11             : #include "webrtc/modules/rtp_rtcp/source/rtp_format_h264.h"
      12             : 
      13             : #include <string.h>
      14             : #include <memory>
      15             : #include <utility>
      16             : #include <vector>
      17             : 
      18             : #include "webrtc/base/checks.h"
      19             : #include "webrtc/base/logging.h"
      20             : #include "webrtc/modules/include/module_common_types.h"
      21             : #include "webrtc/modules/rtp_rtcp/source/byte_io.h"
      22             : #include "webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h"
      23             : #include "webrtc/common_video/h264/sps_vui_rewriter.h"
      24             : #include "webrtc/common_video/h264/h264_common.h"
      25             : #include "webrtc/common_video/h264/pps_parser.h"
      26             : #include "webrtc/common_video/h264/sps_parser.h"
      27             : #include "webrtc/system_wrappers/include/metrics.h"
      28             : 
      29             : namespace webrtc {
      30             : namespace {
      31             : 
      32             : static const size_t kNalHeaderSize = 1;
      33             : static const size_t kFuAHeaderSize = 2;
      34             : static const size_t kLengthFieldSize = 2;
      35             : static const size_t kStapAHeaderSize = kNalHeaderSize + kLengthFieldSize;
      36             : 
      37             : static const char* kSpsValidHistogramName = "WebRTC.Video.H264.SpsValid";
      38             : enum SpsValidEvent {
      39             :   kReceivedSpsPocOk = 0,
      40             :   kReceivedSpsVuiOk = 1,
      41             :   kReceivedSpsRewritten = 2,
      42             :   kReceivedSpsParseFailure = 3,
      43             :   kSentSpsPocOk = 4,
      44             :   kSentSpsVuiOk = 5,
      45             :   kSentSpsRewritten = 6,
      46             :   kSentSpsParseFailure = 7,
      47             :   kSpsRewrittenMax = 8
      48             : };
      49             : 
      50             : // Bit masks for FU (A and B) indicators.
      51             : enum NalDefs : uint8_t { kFBit = 0x80, kNriMask = 0x60, kTypeMask = 0x1F };
      52             : 
      53             : // Bit masks for FU (A and B) headers.
      54             : enum FuDefs : uint8_t { kSBit = 0x80, kEBit = 0x40, kRBit = 0x20 };
      55             : 
      56             : // TODO(pbos): Avoid parsing this here as well as inside the jitter buffer.
      57           0 : bool ParseStapAStartOffsets(const uint8_t* nalu_ptr,
      58             :                             size_t length_remaining,
      59             :                             std::vector<size_t>* offsets) {
      60           0 :   size_t offset = 0;
      61           0 :   while (length_remaining > 0) {
      62             :     // Buffer doesn't contain room for additional nalu length.
      63           0 :     if (length_remaining < sizeof(uint16_t))
      64           0 :       return false;
      65           0 :     uint16_t nalu_size = ByteReader<uint16_t>::ReadBigEndian(nalu_ptr);
      66           0 :     nalu_ptr += sizeof(uint16_t);
      67           0 :     length_remaining -= sizeof(uint16_t);
      68           0 :     if (nalu_size > length_remaining)
      69           0 :       return false;
      70           0 :     nalu_ptr += nalu_size;
      71           0 :     length_remaining -= nalu_size;
      72             : 
      73           0 :     offsets->push_back(offset + kStapAHeaderSize);
      74           0 :     offset += kLengthFieldSize + nalu_size;
      75             :   }
      76           0 :   return true;
      77             : }
      78             : 
      79             : }  // namespace
      80             : 
      81           0 : RtpPacketizerH264::RtpPacketizerH264(size_t max_payload_len,
      82           0 :                                      H264PacketizationMode packetization_mode)
      83             :     : max_payload_len_(max_payload_len),
      84           0 :       packetization_mode_(packetization_mode) {
      85             :   // Guard against uninitialized memory in packetization_mode.
      86           0 :   RTC_CHECK(packetization_mode == H264PacketizationMode::NonInterleaved ||
      87           0 :             packetization_mode == H264PacketizationMode::SingleNalUnit);
      88           0 : }
      89             : 
      90           0 : RtpPacketizerH264::~RtpPacketizerH264() {
      91           0 : }
      92             : 
      93           0 : RtpPacketizerH264::Fragment::Fragment(const uint8_t* buffer, size_t length)
      94           0 :     : buffer(buffer), length(length) {}
      95           0 : RtpPacketizerH264::Fragment::Fragment(const Fragment& fragment)
      96           0 :     : buffer(fragment.buffer), length(fragment.length) {}
      97             : 
      98           0 : void RtpPacketizerH264::SetPayloadData(
      99             :     const uint8_t* payload_data,
     100             :     size_t payload_size,
     101             :     const RTPFragmentationHeader* fragmentation) {
     102           0 :   RTC_DCHECK(packets_.empty());
     103           0 :   RTC_DCHECK(input_fragments_.empty());
     104           0 :   RTC_DCHECK(fragmentation);
     105           0 :   for (int i = 0; i < fragmentation->fragmentationVectorSize; ++i) {
     106             :     const uint8_t* buffer =
     107           0 :         &payload_data[fragmentation->fragmentationOffset[i]];
     108           0 :     size_t length = fragmentation->fragmentationLength[i];
     109             : 
     110           0 :     bool updated_sps = false;
     111           0 :     H264::NaluType nalu_type = H264::ParseNaluType(buffer[0]);
     112           0 :     if (nalu_type == H264::NaluType::kSps) {
     113             :       // Check if stream uses picture order count type 0, and if so rewrite it
     114             :       // to enable faster decoding. Streams in that format incur additional
     115             :       // delay because it allows decode order to differ from render order.
     116             :       // The mechanism used is to rewrite (edit or add) the SPS's VUI to contain
     117             :       // restrictions on the maximum number of reordered pictures. This reduces
     118             :       // latency significantly, though it still adds about a frame of latency to
     119             :       // decoding.
     120             :       // Note that we do this rewriting both here (send side, in order to
     121             :       // protect legacy receive clients) and below in
     122             :       // RtpDepacketizerH264::ParseSingleNalu (receive side, in orderer to
     123             :       // protect us from unknown or legacy send clients).
     124             : 
     125           0 :       rtc::Optional<SpsParser::SpsState> sps;
     126             : 
     127           0 :       std::unique_ptr<rtc::Buffer> output_buffer(new rtc::Buffer());
     128             :       // Add the type header to the output buffer first, so that the rewriter
     129             :       // can append modified payload on top of that.
     130           0 :       output_buffer->AppendData(buffer[0]);
     131           0 :       SpsVuiRewriter::ParseResult result = SpsVuiRewriter::ParseAndRewriteSps(
     132             :           buffer + H264::kNaluTypeSize, length - H264::kNaluTypeSize, &sps,
     133           0 :           output_buffer.get());
     134             : 
     135           0 :       switch (result) {
     136             :         case SpsVuiRewriter::ParseResult::kVuiRewritten:
     137           0 :           input_fragments_.push_back(
     138           0 :               Fragment(output_buffer->data(), output_buffer->size()));
     139           0 :           input_fragments_.rbegin()->tmp_buffer = std::move(output_buffer);
     140           0 :           updated_sps = true;
     141           0 :           RTC_HISTOGRAM_ENUMERATION(kSpsValidHistogramName,
     142             :                                     SpsValidEvent::kSentSpsRewritten,
     143             :                                     SpsValidEvent::kSpsRewrittenMax);
     144           0 :           break;
     145             :         case SpsVuiRewriter::ParseResult::kPocOk:
     146           0 :           RTC_HISTOGRAM_ENUMERATION(kSpsValidHistogramName,
     147             :                                     SpsValidEvent::kSentSpsPocOk,
     148             :                                     SpsValidEvent::kSpsRewrittenMax);
     149           0 :           break;
     150             :         case SpsVuiRewriter::ParseResult::kVuiOk:
     151           0 :           RTC_HISTOGRAM_ENUMERATION(kSpsValidHistogramName,
     152             :                                     SpsValidEvent::kSentSpsVuiOk,
     153             :                                     SpsValidEvent::kSpsRewrittenMax);
     154           0 :           break;
     155             :         case SpsVuiRewriter::ParseResult::kFailure:
     156           0 :           RTC_HISTOGRAM_ENUMERATION(kSpsValidHistogramName,
     157             :                                     SpsValidEvent::kSentSpsParseFailure,
     158             :                                     SpsValidEvent::kSpsRewrittenMax);
     159           0 :           break;
     160             :       }
     161             :     }
     162             : 
     163           0 :     if (!updated_sps)
     164           0 :       input_fragments_.push_back(Fragment(buffer, length));
     165             :   }
     166           0 :   GeneratePackets();
     167           0 : }
     168             : 
     169           0 : void RtpPacketizerH264::GeneratePackets() {
     170           0 :   for (size_t i = 0; i < input_fragments_.size();) {
     171           0 :     switch (packetization_mode_) {
     172             :       case H264PacketizationMode::SingleNalUnit:
     173           0 :         PacketizeSingleNalu(i);
     174           0 :         ++i;
     175           0 :         break;
     176             :       case H264PacketizationMode::NonInterleaved:
     177           0 :         if (input_fragments_[i].length > max_payload_len_) {
     178           0 :           PacketizeFuA(i);
     179           0 :           ++i;
     180             :         } else {
     181           0 :           i = PacketizeStapA(i);
     182             :         }
     183           0 :         break;
     184             :     }
     185             :   }
     186           0 : }
     187             : 
     188           0 : void RtpPacketizerH264::PacketizeFuA(size_t fragment_index) {
     189             :   // Fragment payload into packets (FU-A).
     190             :   // Strip out the original header and leave room for the FU-A header.
     191           0 :   const Fragment& fragment = input_fragments_[fragment_index];
     192             : 
     193           0 :   size_t fragment_length = fragment.length - kNalHeaderSize;
     194           0 :   size_t offset = kNalHeaderSize;
     195           0 :   size_t bytes_available = max_payload_len_ - kFuAHeaderSize;
     196             :   const size_t num_fragments =
     197           0 :       (fragment_length + (bytes_available - 1)) / bytes_available;
     198             : 
     199           0 :   const size_t avg_size = (fragment_length + num_fragments - 1) / num_fragments;
     200           0 :   while (fragment_length > 0) {
     201           0 :     size_t packet_length = avg_size;
     202           0 :     if (fragment_length < avg_size)
     203           0 :       packet_length = fragment_length;
     204           0 :     packets_.push(PacketUnit(Fragment(fragment.buffer + offset, packet_length),
     205             :                              offset - kNalHeaderSize == 0,
     206             :                              fragment_length == packet_length, false,
     207           0 :                              fragment.buffer[0]));
     208           0 :     offset += packet_length;
     209           0 :     fragment_length -= packet_length;
     210             :   }
     211           0 :   RTC_CHECK_EQ(0, fragment_length);
     212           0 : }
     213             : 
     214           0 : size_t RtpPacketizerH264::PacketizeStapA(size_t fragment_index) {
     215             :   // Aggregate fragments into one packet (STAP-A).
     216           0 :   size_t payload_size_left = max_payload_len_;
     217           0 :   int aggregated_fragments = 0;
     218           0 :   size_t fragment_headers_length = 0;
     219           0 :   const Fragment* fragment = &input_fragments_[fragment_index];
     220           0 :   RTC_CHECK_GE(payload_size_left, fragment->length);
     221           0 :   while (payload_size_left >= fragment->length + fragment_headers_length) {
     222           0 :     RTC_CHECK_GT(fragment->length, 0);
     223           0 :     packets_.push(PacketUnit(*fragment, aggregated_fragments == 0, false, true,
     224           0 :                              fragment->buffer[0]));
     225           0 :     payload_size_left -= fragment->length;
     226           0 :     payload_size_left -= fragment_headers_length;
     227             : 
     228             :     // Next fragment.
     229           0 :     ++fragment_index;
     230           0 :     if (fragment_index == input_fragments_.size())
     231           0 :       break;
     232           0 :     fragment = &input_fragments_[fragment_index];
     233             : 
     234           0 :     fragment_headers_length = kLengthFieldSize;
     235             :     // If we are going to try to aggregate more fragments into this packet
     236             :     // we need to add the STAP-A NALU header and a length field for the first
     237             :     // NALU of this packet.
     238           0 :     if (aggregated_fragments == 0)
     239           0 :       fragment_headers_length += kNalHeaderSize + kLengthFieldSize;
     240           0 :     ++aggregated_fragments;
     241             :   }
     242           0 :   packets_.back().last_fragment = true;
     243           0 :   return fragment_index;
     244             : }
     245             : 
     246           0 : void RtpPacketizerH264::PacketizeSingleNalu(size_t fragment_index) {
     247             :   // Add a single NALU to the queue, no aggregation.
     248           0 :   size_t payload_size_left = max_payload_len_;
     249           0 :   const Fragment* fragment = &input_fragments_[fragment_index];
     250           0 :   RTC_CHECK_GE(payload_size_left, fragment->length)
     251           0 :       << "Payload size left " << payload_size_left << ", fragment length "
     252           0 :       << fragment->length << ", packetization mode " << packetization_mode_;
     253           0 :   RTC_CHECK_GT(fragment->length, 0u);
     254           0 :   packets_.push(PacketUnit(*fragment, true /* first */, true /* last */,
     255           0 :                            false /* aggregated */, fragment->buffer[0]));
     256           0 : }
     257             : 
     258           0 : bool RtpPacketizerH264::NextPacket(RtpPacketToSend* rtp_packet,
     259             :                                    bool* last_packet) {
     260           0 :   RTC_DCHECK(rtp_packet);
     261           0 :   RTC_DCHECK(last_packet);
     262           0 :   if (packets_.empty()) {
     263           0 :     *last_packet = true;
     264           0 :     return false;
     265             :   }
     266             : 
     267           0 :   PacketUnit packet = packets_.front();
     268           0 :   if (packet.first_fragment && packet.last_fragment) {
     269             :     // Single NAL unit packet.
     270           0 :     size_t bytes_to_send = packet.source_fragment.length;
     271           0 :     uint8_t* buffer = rtp_packet->AllocatePayload(bytes_to_send);
     272           0 :     memcpy(buffer, packet.source_fragment.buffer, bytes_to_send);
     273           0 :     packets_.pop();
     274           0 :     input_fragments_.pop_front();
     275           0 :   } else if (packet.aggregated) {
     276           0 :     RTC_CHECK_EQ(H264PacketizationMode::NonInterleaved, packetization_mode_);
     277           0 :     NextAggregatePacket(rtp_packet);
     278             :   } else {
     279           0 :     RTC_CHECK_EQ(H264PacketizationMode::NonInterleaved, packetization_mode_);
     280           0 :     NextFragmentPacket(rtp_packet);
     281             :   }
     282           0 :   RTC_DCHECK_LE(rtp_packet->payload_size(), max_payload_len_);
     283           0 :   *last_packet = packets_.empty();
     284           0 :   rtp_packet->SetMarker(*last_packet);
     285           0 :   return true;
     286             : }
     287             : 
     288           0 : void RtpPacketizerH264::NextAggregatePacket(RtpPacketToSend* rtp_packet) {
     289           0 :   uint8_t* buffer = rtp_packet->AllocatePayload(max_payload_len_);
     290           0 :   RTC_DCHECK(buffer);
     291           0 :   PacketUnit* packet = &packets_.front();
     292           0 :   RTC_CHECK(packet->first_fragment);
     293             :   // STAP-A NALU header.
     294           0 :   buffer[0] = (packet->header & (kFBit | kNriMask)) | H264::NaluType::kStapA;
     295           0 :   size_t index = kNalHeaderSize;
     296           0 :   while (packet->aggregated) {
     297           0 :     const Fragment& fragment = packet->source_fragment;
     298             :     // Add NAL unit length field.
     299           0 :     ByteWriter<uint16_t>::WriteBigEndian(&buffer[index], fragment.length);
     300           0 :     index += kLengthFieldSize;
     301             :     // Add NAL unit.
     302           0 :     memcpy(&buffer[index], fragment.buffer, fragment.length);
     303           0 :     index += fragment.length;
     304           0 :     packets_.pop();
     305           0 :     input_fragments_.pop_front();
     306           0 :     if (packet->last_fragment)
     307           0 :       break;
     308           0 :     packet = &packets_.front();
     309             :   }
     310           0 :   RTC_CHECK(packet->last_fragment);
     311           0 :   rtp_packet->SetPayloadSize(index);
     312           0 : }
     313             : 
     314           0 : void RtpPacketizerH264::NextFragmentPacket(RtpPacketToSend* rtp_packet) {
     315           0 :   PacketUnit* packet = &packets_.front();
     316             :   // NAL unit fragmented over multiple packets (FU-A).
     317             :   // We do not send original NALU header, so it will be replaced by the
     318             :   // FU indicator header of the first packet.
     319             :   uint8_t fu_indicator =
     320           0 :       (packet->header & (kFBit | kNriMask)) | H264::NaluType::kFuA;
     321           0 :   uint8_t fu_header = 0;
     322             : 
     323             :   // S | E | R | 5 bit type.
     324           0 :   fu_header |= (packet->first_fragment ? kSBit : 0);
     325           0 :   fu_header |= (packet->last_fragment ? kEBit : 0);
     326           0 :   uint8_t type = packet->header & kTypeMask;
     327           0 :   fu_header |= type;
     328           0 :   const Fragment& fragment = packet->source_fragment;
     329             :   uint8_t* buffer =
     330           0 :       rtp_packet->AllocatePayload(kFuAHeaderSize + fragment.length);
     331           0 :   buffer[0] = fu_indicator;
     332           0 :   buffer[1] = fu_header;
     333           0 :   memcpy(buffer + kFuAHeaderSize, fragment.buffer, fragment.length);
     334           0 :   if (packet->last_fragment)
     335           0 :     input_fragments_.pop_front();
     336           0 :   packets_.pop();
     337           0 : }
     338             : 
     339           0 : ProtectionType RtpPacketizerH264::GetProtectionType() {
     340           0 :   return kProtectedPacket;
     341             : }
     342             : 
     343           0 : StorageType RtpPacketizerH264::GetStorageType(
     344             :     uint32_t retransmission_settings) {
     345           0 :   return kAllowRetransmission;
     346             : }
     347             : 
     348           0 : std::string RtpPacketizerH264::ToString() {
     349           0 :   return "RtpPacketizerH264";
     350             : }
     351             : 
     352           0 : RtpDepacketizerH264::RtpDepacketizerH264() : offset_(0), length_(0) {}
     353           0 : RtpDepacketizerH264::~RtpDepacketizerH264() {}
     354             : 
     355           0 : bool RtpDepacketizerH264::Parse(ParsedPayload* parsed_payload,
     356             :                                 const uint8_t* payload_data,
     357             :                                 size_t payload_data_length) {
     358           0 :   RTC_CHECK(parsed_payload != nullptr);
     359           0 :   if (payload_data_length == 0) {
     360           0 :     LOG(LS_ERROR) << "Empty payload.";
     361           0 :     return false;
     362             :   }
     363             : 
     364           0 :   offset_ = 0;
     365           0 :   length_ = payload_data_length;
     366           0 :   modified_buffer_.reset();
     367             : 
     368           0 :   uint8_t nal_type = payload_data[0] & kTypeMask;
     369           0 :   parsed_payload->type.Video.codecHeader.H264.nalus_length = 0;
     370           0 :   if (nal_type == H264::NaluType::kFuA) {
     371             :     // Fragmented NAL units (FU-A).
     372           0 :     if (!ParseFuaNalu(parsed_payload, payload_data))
     373           0 :       return false;
     374             :   } else {
     375             :     // We handle STAP-A and single NALU's the same way here. The jitter buffer
     376             :     // will depacketize the STAP-A into NAL units later.
     377             :     // TODO(sprang): Parse STAP-A offsets here and store in fragmentation vec.
     378           0 :     if (!ProcessStapAOrSingleNalu(parsed_payload, payload_data))
     379           0 :       return false;
     380             :   }
     381             : 
     382             :   const uint8_t* payload =
     383           0 :       modified_buffer_ ? modified_buffer_->data() : payload_data;
     384             : 
     385           0 :   parsed_payload->payload = payload + offset_;
     386           0 :   parsed_payload->payload_length = length_;
     387           0 :   return true;
     388             : }
     389             : 
     390           0 : bool RtpDepacketizerH264::ProcessStapAOrSingleNalu(
     391             :     ParsedPayload* parsed_payload,
     392             :     const uint8_t* payload_data) {
     393           0 :   parsed_payload->type.Video.width = 0;
     394           0 :   parsed_payload->type.Video.height = 0;
     395           0 :   parsed_payload->type.Video.codec = kRtpVideoH264;
     396           0 :   parsed_payload->type.Video.is_first_packet_in_frame = true;
     397             :   RTPVideoHeaderH264* h264_header =
     398           0 :       &parsed_payload->type.Video.codecHeader.H264;
     399             : 
     400           0 :   const uint8_t* nalu_start = payload_data + kNalHeaderSize;
     401           0 :   const size_t nalu_length = length_ - kNalHeaderSize;
     402           0 :   uint8_t nal_type = payload_data[0] & kTypeMask;
     403           0 :   std::vector<size_t> nalu_start_offsets;
     404           0 :   if (nal_type == H264::NaluType::kStapA) {
     405             :     // Skip the StapA header (StapA NAL type + length).
     406           0 :     if (length_ <= kStapAHeaderSize) {
     407           0 :       LOG(LS_ERROR) << "StapA header truncated.";
     408           0 :       return false;
     409             :     }
     410             : 
     411           0 :     if (!ParseStapAStartOffsets(nalu_start, nalu_length, &nalu_start_offsets)) {
     412           0 :       LOG(LS_ERROR) << "StapA packet with incorrect NALU packet lengths.";
     413           0 :       return false;
     414             :     }
     415             : 
     416           0 :     h264_header->packetization_type = kH264StapA;
     417           0 :     nal_type = payload_data[kStapAHeaderSize] & kTypeMask;
     418             :   } else {
     419           0 :     h264_header->packetization_type = kH264SingleNalu;
     420           0 :     nalu_start_offsets.push_back(0);
     421             :   }
     422           0 :   h264_header->nalu_type = nal_type;
     423           0 :   parsed_payload->frame_type = kVideoFrameDelta;
     424             : 
     425           0 :   nalu_start_offsets.push_back(length_ + kLengthFieldSize);  // End offset.
     426           0 :   for (size_t i = 0; i < nalu_start_offsets.size() - 1; ++i) {
     427           0 :     size_t start_offset = nalu_start_offsets[i];
     428             :     // End offset is actually start offset for next unit, excluding length field
     429             :     // so remove that from this units length.
     430           0 :     size_t end_offset = nalu_start_offsets[i + 1] - kLengthFieldSize;
     431           0 :     if (end_offset - start_offset < H264::kNaluTypeSize) {
     432           0 :       LOG(LS_ERROR) << "STAP-A packet too short";
     433           0 :       return false;
     434             :     }
     435             : 
     436             :     NaluInfo nalu;
     437           0 :     nalu.type = payload_data[start_offset] & kTypeMask;
     438           0 :     nalu.offset = start_offset;
     439           0 :     nalu.size = end_offset - start_offset;
     440           0 :     nalu.sps_id = -1;
     441           0 :     nalu.pps_id = -1;
     442           0 :     start_offset += H264::kNaluTypeSize;
     443             : 
     444           0 :     switch (nalu.type) {
     445             :       case H264::NaluType::kSps: {
     446             :         // Check if VUI is present in SPS and if it needs to be modified to
     447             :         // avoid
     448             :         // excessive decoder latency.
     449             : 
     450             :         // Copy any previous data first (likely just the first header).
     451           0 :         std::unique_ptr<rtc::Buffer> output_buffer(new rtc::Buffer());
     452           0 :         if (start_offset)
     453           0 :           output_buffer->AppendData(payload_data, start_offset);
     454             : 
     455           0 :         rtc::Optional<SpsParser::SpsState> sps;
     456             : 
     457           0 :         SpsVuiRewriter::ParseResult result = SpsVuiRewriter::ParseAndRewriteSps(
     458             :             &payload_data[start_offset], end_offset - start_offset, &sps,
     459           0 :             output_buffer.get());
     460           0 :         switch (result) {
     461             :           case SpsVuiRewriter::ParseResult::kVuiRewritten:
     462           0 :             if (modified_buffer_) {
     463           0 :               LOG(LS_WARNING)
     464             :                   << "More than one H264 SPS NAL units needing "
     465             :                      "rewriting found within a single STAP-A packet. "
     466           0 :                      "Keeping the first and rewriting the last.";
     467             :             }
     468             : 
     469             :             // Rewrite length field to new SPS size.
     470           0 :             if (h264_header->packetization_type == kH264StapA) {
     471             :               size_t length_field_offset =
     472           0 :                   start_offset - (H264::kNaluTypeSize + kLengthFieldSize);
     473             :               // Stap-A Length includes payload data and type header.
     474             :               size_t rewritten_size =
     475           0 :                   output_buffer->size() - start_offset + H264::kNaluTypeSize;
     476           0 :               ByteWriter<uint16_t>::WriteBigEndian(
     477           0 :                   &(*output_buffer)[length_field_offset], rewritten_size);
     478             :             }
     479             : 
     480             :             // Append rest of packet.
     481           0 :             output_buffer->AppendData(
     482             :                 &payload_data[end_offset],
     483           0 :                 nalu_length + kNalHeaderSize - end_offset);
     484             : 
     485           0 :             modified_buffer_ = std::move(output_buffer);
     486           0 :             length_ = modified_buffer_->size();
     487             : 
     488           0 :             RTC_HISTOGRAM_ENUMERATION(kSpsValidHistogramName,
     489             :                                       SpsValidEvent::kReceivedSpsRewritten,
     490             :                                       SpsValidEvent::kSpsRewrittenMax);
     491           0 :             break;
     492             :           case SpsVuiRewriter::ParseResult::kPocOk:
     493           0 :             RTC_HISTOGRAM_ENUMERATION(kSpsValidHistogramName,
     494             :                                       SpsValidEvent::kReceivedSpsPocOk,
     495             :                                       SpsValidEvent::kSpsRewrittenMax);
     496           0 :             break;
     497             :           case SpsVuiRewriter::ParseResult::kVuiOk:
     498           0 :             RTC_HISTOGRAM_ENUMERATION(kSpsValidHistogramName,
     499             :                                       SpsValidEvent::kReceivedSpsVuiOk,
     500             :                                       SpsValidEvent::kSpsRewrittenMax);
     501           0 :             break;
     502             :           case SpsVuiRewriter::ParseResult::kFailure:
     503           0 :             RTC_HISTOGRAM_ENUMERATION(kSpsValidHistogramName,
     504             :                                       SpsValidEvent::kReceivedSpsParseFailure,
     505             :                                       SpsValidEvent::kSpsRewrittenMax);
     506           0 :             break;
     507             :         }
     508             : 
     509           0 :         if (sps) {
     510           0 :           parsed_payload->type.Video.width = sps->width;
     511           0 :           parsed_payload->type.Video.height = sps->height;
     512           0 :           nalu.sps_id = sps->id;
     513             :         } else {
     514           0 :           LOG(LS_WARNING) << "Failed to parse SPS id from SPS slice.";
     515             :         }
     516           0 :         parsed_payload->frame_type = kVideoFrameKey;
     517           0 :         break;
     518             :       }
     519             :       case H264::NaluType::kPps: {
     520             :         uint32_t pps_id;
     521             :         uint32_t sps_id;
     522           0 :         if (PpsParser::ParsePpsIds(&payload_data[start_offset],
     523             :                                     end_offset - start_offset, &pps_id,
     524             :                                     &sps_id)) {
     525           0 :           nalu.pps_id = pps_id;
     526           0 :           nalu.sps_id = sps_id;
     527             :         } else {
     528           0 :           LOG(LS_WARNING)
     529           0 :               << "Failed to parse PPS id and SPS id from PPS slice.";
     530             :         }
     531           0 :         break;
     532             :       }
     533             :       case H264::NaluType::kIdr:
     534           0 :         parsed_payload->frame_type = kVideoFrameKey;
     535             :         FALLTHROUGH();
     536             :       case H264::NaluType::kSlice: {
     537             :         rtc::Optional<uint32_t> pps_id = PpsParser::ParsePpsIdFromSlice(
     538           0 :             &payload_data[start_offset], end_offset - start_offset);
     539           0 :         if (pps_id) {
     540           0 :           nalu.pps_id = *pps_id;
     541             :         } else {
     542           0 :           LOG(LS_WARNING) << "Failed to parse PPS id from slice of type: "
     543           0 :                           << static_cast<int>(nalu.type);
     544             :         }
     545           0 :         break;
     546             :       }
     547             :       // Slices below don't contain SPS or PPS ids.
     548             :       case H264::NaluType::kAud:
     549             :       case H264::NaluType::kEndOfSequence:
     550             :       case H264::NaluType::kEndOfStream:
     551             :       case H264::NaluType::kFiller:
     552           0 :         break;
     553             : 
     554             :       // key frames start with SPS, PPS, IDR, or Recovery Point SEI
     555             :       // Recovery Point SEI's are used in AIR and GDR refreshes, which don't
     556             :       // send large iframes, and instead use forms of incremental/continuous refresh.
     557             :       case H264::NaluType::kSei:
     558           0 :         if (nalu.size <= 1) {
     559           0 :           LOG(LS_ERROR) << "KSei packet with incorrect packet length";
     560           0 :           return false; // malformed NAL
     561             :         }
     562           0 :         if (payload_data[start_offset + 1] != H264::SeiType::kSeiRecPt) {
     563             :           // some other form of SEI - not a keyframe
     564           0 :           parsed_payload->frame_type = kVideoFrameDelta;
     565             :         } else {
     566             :           // GDR is like IDR
     567           0 :           parsed_payload->frame_type = kVideoFrameKey;
     568             :         }
     569           0 :         break;
     570             :       case H264::NaluType::kStapA:
     571             :       case H264::NaluType::kFuA:
     572           0 :         LOG(LS_WARNING) << "Unexpected STAP-A or FU-A received.";
     573           0 :         return false;
     574             :     }
     575           0 :     RTPVideoHeaderH264* h264 = &parsed_payload->type.Video.codecHeader.H264;
     576           0 :     if (h264->nalus_length == kMaxNalusPerPacket) {
     577           0 :       LOG(LS_WARNING)
     578           0 :           << "Received packet containing more than " << kMaxNalusPerPacket
     579           0 :           << " NAL units. Will not keep track sps and pps ids for all of them.";
     580             :     } else {
     581           0 :       h264->nalus[h264->nalus_length++] = nalu;
     582             :     }
     583             :   }
     584             : 
     585           0 :   return true;
     586             : }
     587             : 
     588           0 : bool RtpDepacketizerH264::ParseFuaNalu(
     589             :     RtpDepacketizer::ParsedPayload* parsed_payload,
     590             :     const uint8_t* payload_data) {
     591           0 :   if (length_ < kFuAHeaderSize) {
     592           0 :     LOG(LS_ERROR) << "FU-A NAL units truncated.";
     593           0 :     return false;
     594             :   }
     595           0 :   uint8_t fnri = payload_data[0] & (kFBit | kNriMask);
     596           0 :   uint8_t original_nal_type = payload_data[1] & kTypeMask;
     597           0 :   bool first_fragment = (payload_data[1] & kSBit) > 0;
     598             :   NaluInfo nalu;
     599           0 :   nalu.type = original_nal_type;
     600           0 :   nalu.sps_id = -1;
     601           0 :   nalu.pps_id = -1;
     602           0 :   if (first_fragment) {
     603           0 :     offset_ = 0;
     604           0 :     length_ -= kNalHeaderSize;
     605             :     rtc::Optional<uint32_t> pps_id = PpsParser::ParsePpsIdFromSlice(
     606           0 :         payload_data + 2 * kNalHeaderSize, length_ - kNalHeaderSize);
     607           0 :     if (pps_id) {
     608           0 :       nalu.pps_id = *pps_id;
     609             :     } else {
     610           0 :       LOG(LS_WARNING) << "Failed to parse PPS from first fragment of FU-A NAL "
     611           0 :                          "unit with original type: "
     612           0 :                       << static_cast<int>(nalu.type);
     613             :     }
     614           0 :     uint8_t original_nal_header = fnri | original_nal_type;
     615           0 :     modified_buffer_.reset(new rtc::Buffer());
     616           0 :     modified_buffer_->AppendData(payload_data + kNalHeaderSize, length_);
     617           0 :     (*modified_buffer_)[0] = original_nal_header;
     618             :   } else {
     619           0 :     offset_ = kFuAHeaderSize;
     620           0 :     length_ -= kFuAHeaderSize;
     621             :   }
     622             : 
     623           0 :   if (original_nal_type == H264::NaluType::kIdr) {
     624           0 :     parsed_payload->frame_type = kVideoFrameKey;
     625             :   } else {
     626           0 :     parsed_payload->frame_type = kVideoFrameDelta;
     627             :   }
     628           0 :   parsed_payload->type.Video.width = 0;
     629           0 :   parsed_payload->type.Video.height = 0;
     630           0 :   parsed_payload->type.Video.codec = kRtpVideoH264;
     631           0 :   parsed_payload->type.Video.is_first_packet_in_frame = first_fragment;
     632           0 :   RTPVideoHeaderH264* h264 = &parsed_payload->type.Video.codecHeader.H264;
     633           0 :   h264->packetization_type = kH264FuA;
     634           0 :   h264->nalu_type = original_nal_type;
     635           0 :   if (first_fragment) {
     636           0 :     h264->nalus[h264->nalus_length] = nalu;
     637           0 :     h264->nalus_length = 1;
     638             :   }
     639           0 :   return true;
     640             : }
     641             : 
     642             : }  // namespace webrtc

Generated by: LCOV version 1.13