LCOV - code coverage report
Current view: top level - dom/media/encoder - VP8TrackEncoder.cpp (source / functions) Hit Total Coverage
Test: output.info Lines: 0 353 0.0 %
Date: 2017-07-14 16:53:18 Functions: 0 19 0.0 %
Legend: Lines: hit not hit

          Line data    Source code
       1             : /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
       2             : /* This Source Code Form is subject to the terms of the Mozilla Public
       3             :  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
       4             :  * You can obtain one at http://mozilla.org/MPL/2.0/. */
       5             : 
       6             : #include "VP8TrackEncoder.h"
       7             : #include "GeckoProfiler.h"
       8             : #include "LayersLogging.h"
       9             : #include "libyuv.h"
      10             : #include "mozilla/gfx/2D.h"
      11             : #include "prsystem.h"
      12             : #include "VideoSegment.h"
      13             : #include "VideoUtils.h"
      14             : #include "vpx/vp8cx.h"
      15             : #include "vpx/vpx_encoder.h"
      16             : #include "WebMWriter.h"
      17             : #include "mozilla/media/MediaUtils.h"
      18             : 
      19             : namespace mozilla {
      20             : 
      21             : LazyLogModule gVP8TrackEncoderLog("VP8TrackEncoder");
      22             : #define VP8LOG(level, msg, ...) MOZ_LOG(gVP8TrackEncoderLog, \
      23             :                                         level, \
      24             :                                         (msg, ##__VA_ARGS__))
      25             : 
      26             : #define DEFAULT_BITRATE_BPS 2500000
      27             : 
      28             : using namespace mozilla::gfx;
      29             : using namespace mozilla::layers;
      30             : using namespace mozilla::media;
      31             : 
      32             : static already_AddRefed<SourceSurface>
      33           0 : GetSourceSurface(already_AddRefed<Image> aImg)
      34             : {
      35           0 :   RefPtr<Image> img = aImg;
      36           0 :   if (!img) {
      37           0 :     return nullptr;
      38             :   }
      39             : 
      40           0 :   if (!img->AsGLImage() || NS_IsMainThread()) {
      41           0 :     RefPtr<SourceSurface> surf = img->GetAsSourceSurface();
      42           0 :     return surf.forget();
      43             :   }
      44             : 
      45             :   // GLImage::GetAsSourceSurface() only supports main thread
      46           0 :   RefPtr<SourceSurface> surf;
      47           0 :   RefPtr<Runnable> runnable = NewRunnableFrom([img, &surf]() -> nsresult {
      48           0 :     surf = img->GetAsSourceSurface();
      49           0 :     return NS_OK;
      50           0 :   });
      51             : 
      52           0 :   NS_DispatchToMainThread(runnable, NS_DISPATCH_SYNC);
      53           0 :   return surf.forget();
      54             : }
      55             : 
      56           0 : VP8TrackEncoder::VP8TrackEncoder(TrackRate aTrackRate)
      57             :   : VideoTrackEncoder(aTrackRate)
      58             :   , mEncodedTimestamp(0)
      59           0 :   , mVPXContext(new vpx_codec_ctx_t())
      60           0 :   , mVPXImageWrapper(new vpx_image_t())
      61             : {
      62           0 :   MOZ_COUNT_CTOR(VP8TrackEncoder);
      63           0 : }
      64             : 
      65           0 : VP8TrackEncoder::~VP8TrackEncoder()
      66             : {
      67           0 :   Destroy();
      68           0 :   MOZ_COUNT_DTOR(VP8TrackEncoder);
      69           0 : }
      70             : 
      71             : void
      72           0 : VP8TrackEncoder::Destroy()
      73             : {
      74           0 :   ReentrantMonitorAutoEnter mon(mReentrantMonitor);
      75           0 :   if (mInitialized) {
      76           0 :     vpx_codec_destroy(mVPXContext);
      77             :   }
      78             : 
      79           0 :   if (mVPXImageWrapper) {
      80           0 :     vpx_img_free(mVPXImageWrapper);
      81             :   }
      82           0 :   mInitialized = false;
      83           0 : }
      84             : 
      85             : nsresult
      86           0 : VP8TrackEncoder::Init(int32_t aWidth, int32_t aHeight, int32_t aDisplayWidth,
      87             :                       int32_t aDisplayHeight)
      88             : {
      89           0 :   if (aWidth < 1 || aHeight < 1 || aDisplayWidth < 1 || aDisplayHeight < 1) {
      90           0 :     return NS_ERROR_FAILURE;
      91             :   }
      92             : 
      93           0 :   ReentrantMonitorAutoEnter mon(mReentrantMonitor);
      94           0 :   if (mInitialized) {
      95           0 :     MOZ_ASSERT(false);
      96             :     return NS_ERROR_FAILURE;
      97             :   }
      98             : 
      99             :   // Encoder configuration structure.
     100             :   vpx_codec_enc_cfg_t config;
     101           0 :   nsresult rv = SetConfigurationValues(aWidth, aHeight, aDisplayWidth, aDisplayHeight, config);
     102           0 :   NS_ENSURE_SUCCESS(rv, NS_ERROR_FAILURE);
     103             : 
     104             :   // Creating a wrapper to the image - setting image data to NULL. Actual
     105             :   // pointer will be set in encode. Setting align to 1, as it is meaningless
     106             :   // (actual memory is not allocated).
     107           0 :   vpx_img_wrap(mVPXImageWrapper, VPX_IMG_FMT_I420,
     108           0 :                mFrameWidth, mFrameHeight, 1, nullptr);
     109             : 
     110           0 :   vpx_codec_flags_t flags = 0;
     111           0 :   flags |= VPX_CODEC_USE_OUTPUT_PARTITION;
     112           0 :   if (vpx_codec_enc_init(mVPXContext, vpx_codec_vp8_cx(), &config, flags)) {
     113           0 :     return NS_ERROR_FAILURE;
     114             :   }
     115             : 
     116           0 :   vpx_codec_control(mVPXContext, VP8E_SET_STATIC_THRESHOLD, 1);
     117           0 :   vpx_codec_control(mVPXContext, VP8E_SET_CPUUSED, -6);
     118           0 :   vpx_codec_control(mVPXContext, VP8E_SET_TOKEN_PARTITIONS,
     119           0 :                     VP8_ONE_TOKENPARTITION);
     120             : 
     121           0 :   mInitialized = true;
     122           0 :   mon.NotifyAll();
     123             : 
     124           0 :   return NS_OK;
     125             : }
     126             : 
     127             : nsresult
     128           0 : VP8TrackEncoder::Reconfigure(int32_t aWidth, int32_t aHeight,
     129             :                              int32_t aDisplayWidth, int32_t aDisplayHeight)
     130             : {
     131           0 :   if(aWidth <= 0 || aHeight <= 0 || aDisplayWidth <= 0 || aDisplayHeight <= 0) {
     132           0 :     MOZ_ASSERT(false);
     133             :     return NS_ERROR_FAILURE;
     134             :   }
     135             : 
     136           0 :   ReentrantMonitorAutoEnter mon(mReentrantMonitor);
     137           0 :   if (!mInitialized) {
     138           0 :     MOZ_ASSERT(false);
     139             :     return NS_ERROR_FAILURE;
     140             :   }
     141             : 
     142           0 :   mInitialized = false;
     143             :   // Recreate image wrapper
     144           0 :   vpx_img_free(mVPXImageWrapper);
     145           0 :   vpx_img_wrap(mVPXImageWrapper, VPX_IMG_FMT_I420, aWidth, aHeight, 1, nullptr);
     146             :   // Encoder configuration structure.
     147             :   vpx_codec_enc_cfg_t config;
     148           0 :   nsresult rv = SetConfigurationValues(aWidth, aHeight, aDisplayWidth, aDisplayHeight, config);
     149           0 :   NS_ENSURE_SUCCESS(rv, NS_ERROR_FAILURE);
     150             :   // Set new configuration
     151           0 :   if (vpx_codec_enc_config_set(mVPXContext.get(), &config) != VPX_CODEC_OK) {
     152           0 :     VP8LOG(LogLevel::Error, "Failed to set new configuration");
     153           0 :     return NS_ERROR_FAILURE;
     154             :   }
     155           0 :   mInitialized = true;
     156           0 :   return NS_OK;
     157             : }
     158             : 
     159             : nsresult
     160           0 : VP8TrackEncoder::SetConfigurationValues(int32_t aWidth, int32_t aHeight, int32_t aDisplayWidth,
     161             :                                         int32_t aDisplayHeight, vpx_codec_enc_cfg_t& config)
     162             : {
     163           0 :   mFrameWidth = aWidth;
     164           0 :   mFrameHeight = aHeight;
     165           0 :   mDisplayWidth = aDisplayWidth;
     166           0 :   mDisplayHeight = aDisplayHeight;
     167             : 
     168             :   // Encoder configuration structure.
     169           0 :   memset(&config, 0, sizeof(vpx_codec_enc_cfg_t));
     170           0 :   if (vpx_codec_enc_config_default(vpx_codec_vp8_cx(), &config, 0)) {
     171           0 :     VP8LOG(LogLevel::Error, "Failed to get default configuration");
     172           0 :     return NS_ERROR_FAILURE;
     173             :   }
     174             : 
     175           0 :   config.g_w = mFrameWidth;
     176           0 :   config.g_h = mFrameHeight;
     177             :   // TODO: Maybe we should have various aFrameRate bitrate pair for each devices?
     178             :   // or for different platform
     179             : 
     180             :   // rc_target_bitrate needs kbit/s
     181           0 :   config.rc_target_bitrate = (mVideoBitrate != 0 ? mVideoBitrate : DEFAULT_BITRATE_BPS)/1000;
     182             : 
     183             :   // Setting the time base of the codec
     184           0 :   config.g_timebase.num = 1;
     185           0 :   config.g_timebase.den = mTrackRate;
     186             : 
     187           0 :   config.g_error_resilient = 0;
     188             : 
     189           0 :   config.g_lag_in_frames = 0; // 0- no frame lagging
     190             : 
     191           0 :   int32_t number_of_cores = PR_GetNumberOfProcessors();
     192           0 :   if (mFrameWidth * mFrameHeight > 1280 * 960 && number_of_cores >= 6) {
     193           0 :     config.g_threads = 3; // 3 threads for 1080p.
     194           0 :   } else if (mFrameWidth * mFrameHeight > 640 * 480 && number_of_cores >= 3) {
     195           0 :     config.g_threads = 2; // 2 threads for qHD/HD.
     196             :   } else {
     197           0 :     config.g_threads = 1; // 1 thread for VGA or less
     198             :   }
     199             : 
     200             :   // rate control settings
     201           0 :   config.rc_dropframe_thresh = 0;
     202           0 :   config.rc_end_usage = VPX_VBR;
     203           0 :   config.g_pass = VPX_RC_ONE_PASS;
     204             :   // ffmpeg doesn't currently support streams that use resize.
     205             :   // Therefore, for safety, we should turn it off until it does.
     206           0 :   config.rc_resize_allowed = 0;
     207           0 :   config.rc_undershoot_pct = 100;
     208           0 :   config.rc_overshoot_pct = 15;
     209           0 :   config.rc_buf_initial_sz = 500;
     210           0 :   config.rc_buf_optimal_sz = 600;
     211           0 :   config.rc_buf_sz = 1000;
     212             : 
     213           0 :   config.kf_mode = VPX_KF_AUTO;
     214             :   // Ensure that we can output one I-frame per second.
     215           0 :   config.kf_max_dist = 60;
     216             : 
     217           0 :   return NS_OK;
     218             : }
     219             : 
     220             : already_AddRefed<TrackMetadataBase>
     221           0 : VP8TrackEncoder::GetMetadata()
     222             : {
     223           0 :   AUTO_PROFILER_LABEL("VP8TrackEncoder::GetMetadata", OTHER);
     224             :   {
     225             :     // Wait if mEncoder is not initialized.
     226           0 :     ReentrantMonitorAutoEnter mon(mReentrantMonitor);
     227           0 :     while (!mCanceled && !mInitialized) {
     228           0 :       mon.Wait();
     229             :     }
     230             :   }
     231             : 
     232           0 :   if (mCanceled || mEncodingComplete) {
     233           0 :     return nullptr;
     234             :   }
     235             : 
     236           0 :   RefPtr<VP8Metadata> meta = new VP8Metadata();
     237           0 :   meta->mWidth = mFrameWidth;
     238           0 :   meta->mHeight = mFrameHeight;
     239           0 :   meta->mDisplayWidth = mDisplayWidth;
     240           0 :   meta->mDisplayHeight = mDisplayHeight;
     241             : 
     242           0 :   return meta.forget();
     243             : }
     244             : 
     245             : nsresult
     246           0 : VP8TrackEncoder::GetEncodedPartitions(EncodedFrameContainer& aData)
     247             : {
     248           0 :   vpx_codec_iter_t iter = nullptr;
     249           0 :   EncodedFrame::FrameType frameType = EncodedFrame::VP8_P_FRAME;
     250           0 :   nsTArray<uint8_t> frameData;
     251           0 :   const vpx_codec_cx_pkt_t *pkt = nullptr;
     252           0 :   while ((pkt = vpx_codec_get_cx_data(mVPXContext, &iter)) != nullptr) {
     253           0 :     switch (pkt->kind) {
     254             :       case VPX_CODEC_CX_FRAME_PKT: {
     255             :         // Copy the encoded data from libvpx to frameData
     256           0 :         frameData.AppendElements((uint8_t*)pkt->data.frame.buf,
     257           0 :                                  pkt->data.frame.sz);
     258           0 :         break;
     259             :       }
     260             :       default: {
     261           0 :         break;
     262             :       }
     263             :     }
     264             :     // End of frame
     265           0 :     if ((pkt->data.frame.flags & VPX_FRAME_IS_FRAGMENT) == 0) {
     266           0 :       if (pkt->data.frame.flags & VPX_FRAME_IS_KEY) {
     267           0 :         frameType = EncodedFrame::VP8_I_FRAME;
     268             :       }
     269           0 :       break;
     270             :     }
     271             :   }
     272             : 
     273           0 :   if (!frameData.IsEmpty()) {
     274             :     // Copy the encoded data to aData.
     275           0 :     EncodedFrame* videoData = new EncodedFrame();
     276           0 :     videoData->SetFrameType(frameType);
     277             : 
     278             :     // Convert the timestamp and duration to Usecs.
     279           0 :     CheckedInt64 timestamp = FramesToUsecs(pkt->data.frame.pts, mTrackRate);
     280           0 :     if (!timestamp.isValid()) {
     281           0 :       NS_ERROR("Microsecond timestamp overflow");
     282           0 :       return NS_ERROR_DOM_MEDIA_OVERFLOW_ERR;
     283             :     }
     284           0 :     videoData->SetTimeStamp((uint64_t)timestamp.value());
     285             : 
     286           0 :     mExtractedDuration += pkt->data.frame.duration;
     287           0 :     if (!mExtractedDuration.isValid()) {
     288           0 :       NS_ERROR("Duration overflow");
     289           0 :       return NS_ERROR_DOM_MEDIA_OVERFLOW_ERR;
     290             :     }
     291             : 
     292             :     CheckedInt64 totalDuration =
     293           0 :       FramesToUsecs(mExtractedDuration.value(), mTrackRate);
     294           0 :     if (!totalDuration.isValid()) {
     295           0 :       NS_ERROR("Duration overflow");
     296           0 :       return NS_ERROR_DOM_MEDIA_OVERFLOW_ERR;
     297             :     }
     298             : 
     299           0 :     CheckedInt64 duration = totalDuration - mExtractedDurationUs;
     300           0 :     if (!duration.isValid()) {
     301           0 :       NS_ERROR("Duration overflow");
     302           0 :       return NS_ERROR_DOM_MEDIA_OVERFLOW_ERR;
     303             :     }
     304             : 
     305           0 :     mExtractedDurationUs = totalDuration;
     306           0 :     videoData->SetDuration((uint64_t)duration.value());
     307           0 :     videoData->SwapInFrameData(frameData);
     308           0 :     VP8LOG(LogLevel::Verbose,
     309             :            "GetEncodedPartitions TimeStamp %" PRIu64 ", Duration %" PRIu64 ", FrameType %d",
     310             :            videoData->GetTimeStamp(), videoData->GetDuration(),
     311             :            videoData->GetFrameType());
     312           0 :     aData.AppendEncodedFrame(videoData);
     313             :   }
     314             : 
     315           0 :   return pkt ? NS_OK : NS_ERROR_NOT_AVAILABLE;
     316             : }
     317             : 
     318           0 : static bool isYUV420(const PlanarYCbCrImage::Data *aData)
     319             : {
     320           0 :   if (aData->mYSize == aData->mCbCrSize * 2) {
     321           0 :     return true;
     322             :   }
     323           0 :   return false;
     324             : }
     325             : 
     326           0 : static bool isYUV422(const PlanarYCbCrImage::Data *aData)
     327             : {
     328           0 :   if ((aData->mYSize.width == aData->mCbCrSize.width * 2) &&
     329           0 :       (aData->mYSize.height == aData->mCbCrSize.height)) {
     330           0 :     return true;
     331             :   }
     332           0 :   return false;
     333             : }
     334             : 
     335           0 : static bool isYUV444(const PlanarYCbCrImage::Data *aData)
     336             : {
     337           0 :   if (aData->mYSize == aData->mCbCrSize) {
     338           0 :     return true;
     339             :   }
     340           0 :   return false;
     341             : }
     342             : 
     343           0 : nsresult VP8TrackEncoder::PrepareRawFrame(VideoChunk &aChunk)
     344             : {
     345           0 :   RefPtr<Image> img;
     346           0 :   if (aChunk.mFrame.GetForceBlack() || aChunk.IsNull()) {
     347           0 :     if (!mMuteFrame) {
     348           0 :       mMuteFrame = VideoFrame::CreateBlackImage(gfx::IntSize(mFrameWidth, mFrameHeight));
     349             :     }
     350           0 :     if (!mMuteFrame) {
     351           0 :       VP8LOG(LogLevel::Warning, "Failed to allocate black image of size %dx%d",
     352             :              mFrameWidth, mFrameHeight);
     353           0 :       return NS_OK;
     354             :     }
     355           0 :     img = mMuteFrame;
     356             :   } else {
     357           0 :     img = aChunk.mFrame.GetImage();
     358             :   }
     359             : 
     360           0 :   if (img->GetSize() != IntSize(mFrameWidth, mFrameHeight)) {
     361           0 :     VP8LOG(LogLevel::Info,
     362             :            "Dynamic resolution change (was %dx%d, now %dx%d).",
     363             :            mFrameWidth, mFrameHeight, img->GetSize().width, img->GetSize().height);
     364             : 
     365             : 
     366           0 :     gfx::IntSize intrinsicSize = aChunk.mFrame.GetIntrinsicSize();
     367           0 :     gfx::IntSize imgSize = aChunk.mFrame.GetImage()->GetSize();
     368           0 :     if (imgSize <= IntSize(mFrameWidth, mFrameHeight) && // check buffer size instead
     369             :         // If the new size is less than or equal to old,
     370             :         // the existing encoder instance can continue.
     371           0 :         NS_SUCCEEDED(Reconfigure(imgSize.width,
     372             :                                  imgSize.height,
     373             :                                  intrinsicSize.width,
     374             :                                  intrinsicSize.height))) {
     375           0 :       VP8LOG(LogLevel::Info, "Reconfigured VP8 encoder.");
     376             :     } else {
     377             :       // New frame size is larger; re-create the encoder.
     378           0 :       Destroy();
     379           0 :       nsresult rv = Init(imgSize.width,
     380             :                          imgSize.height,
     381             :                          intrinsicSize.width,
     382           0 :                          intrinsicSize.height);
     383           0 :       VP8LOG(LogLevel::Info, "Recreated VP8 encoder.");
     384           0 :       NS_ENSURE_SUCCESS(rv, rv);
     385             :     }
     386             :   }
     387             : 
     388           0 :   ImageFormat format = img->GetFormat();
     389           0 :   if (format == ImageFormat::PLANAR_YCBCR) {
     390           0 :     PlanarYCbCrImage* yuv = static_cast<PlanarYCbCrImage *>(img.get());
     391             : 
     392           0 :     MOZ_RELEASE_ASSERT(yuv);
     393           0 :     if (!yuv->IsValid()) {
     394           0 :       NS_WARNING("PlanarYCbCrImage is not valid");
     395           0 :       return NS_ERROR_FAILURE;
     396             :     }
     397           0 :     const PlanarYCbCrImage::Data *data = yuv->GetData();
     398             : 
     399           0 :     if (isYUV420(data) && !data->mCbSkip) {
     400             :       // 420 planar, no need for conversions
     401           0 :       mVPXImageWrapper->planes[VPX_PLANE_Y] = data->mYChannel;
     402           0 :       mVPXImageWrapper->planes[VPX_PLANE_U] = data->mCbChannel;
     403           0 :       mVPXImageWrapper->planes[VPX_PLANE_V] = data->mCrChannel;
     404           0 :       mVPXImageWrapper->stride[VPX_PLANE_Y] = data->mYStride;
     405           0 :       mVPXImageWrapper->stride[VPX_PLANE_U] = data->mCbCrStride;
     406           0 :       mVPXImageWrapper->stride[VPX_PLANE_V] = data->mCbCrStride;
     407             : 
     408           0 :       return NS_OK;
     409             :     }
     410             :   }
     411             : 
     412             :   // Not 420 planar, have to convert
     413           0 :   uint32_t yPlaneSize = mFrameWidth * mFrameHeight;
     414           0 :   uint32_t halfWidth = (mFrameWidth + 1) / 2;
     415           0 :   uint32_t halfHeight = (mFrameHeight + 1) / 2;
     416           0 :   uint32_t uvPlaneSize = halfWidth * halfHeight;
     417             : 
     418           0 :   if (mI420Frame.Length() != yPlaneSize + uvPlaneSize * 2) {
     419           0 :     mI420Frame.SetLength(yPlaneSize + uvPlaneSize * 2);
     420             :   }
     421             : 
     422           0 :   uint8_t *y = mI420Frame.Elements();
     423           0 :   uint8_t *cb = mI420Frame.Elements() + yPlaneSize;
     424           0 :   uint8_t *cr = mI420Frame.Elements() + yPlaneSize + uvPlaneSize;
     425             : 
     426           0 :   if (format == ImageFormat::PLANAR_YCBCR) {
     427           0 :     PlanarYCbCrImage* yuv = static_cast<PlanarYCbCrImage *>(img.get());
     428             : 
     429           0 :     MOZ_RELEASE_ASSERT(yuv);
     430           0 :     if (!yuv->IsValid()) {
     431           0 :       NS_WARNING("PlanarYCbCrImage is not valid");
     432           0 :       return NS_ERROR_FAILURE;
     433             :     }
     434           0 :     const PlanarYCbCrImage::Data *data = yuv->GetData();
     435             : 
     436             :     int rv;
     437           0 :     std::string yuvFormat;
     438           0 :     if (isYUV420(data) && data->mCbSkip) {
     439             :       // If mCbSkip is set, we assume it's nv12 or nv21.
     440           0 :       if (data->mCbChannel < data->mCrChannel) { // nv12
     441           0 :         rv = libyuv::NV12ToI420(data->mYChannel, data->mYStride,
     442           0 :                                 data->mCbChannel, data->mCbCrStride,
     443             :                                 y, mFrameWidth,
     444             :                                 cb, halfWidth,
     445             :                                 cr, halfWidth,
     446           0 :                                 mFrameWidth, mFrameHeight);
     447           0 :         yuvFormat = "NV12";
     448             :       } else { // nv21
     449           0 :         rv = libyuv::NV21ToI420(data->mYChannel, data->mYStride,
     450           0 :                                 data->mCrChannel, data->mCbCrStride,
     451             :                                 y, mFrameWidth,
     452             :                                 cb, halfWidth,
     453             :                                 cr, halfWidth,
     454           0 :                                 mFrameWidth, mFrameHeight);
     455           0 :         yuvFormat = "NV21";
     456             :       }
     457           0 :     } else if (isYUV444(data) && !data->mCbSkip) {
     458           0 :       rv = libyuv::I444ToI420(data->mYChannel, data->mYStride,
     459           0 :                               data->mCbChannel, data->mCbCrStride,
     460           0 :                               data->mCrChannel, data->mCbCrStride,
     461             :                               y, mFrameWidth,
     462             :                               cb, halfWidth,
     463             :                               cr, halfWidth,
     464           0 :                               mFrameWidth, mFrameHeight);
     465           0 :       yuvFormat = "I444";
     466           0 :     } else if (isYUV422(data) && !data->mCbSkip) {
     467           0 :       rv = libyuv::I422ToI420(data->mYChannel, data->mYStride,
     468           0 :                               data->mCbChannel, data->mCbCrStride,
     469           0 :                               data->mCrChannel, data->mCbCrStride,
     470             :                               y, mFrameWidth,
     471             :                               cb, halfWidth,
     472             :                               cr, halfWidth,
     473           0 :                               mFrameWidth, mFrameHeight);
     474           0 :       yuvFormat = "I422";
     475             :     } else {
     476           0 :       VP8LOG(LogLevel::Error, "Unsupported planar format");
     477           0 :       NS_ASSERTION(false, "Unsupported planar format");
     478           0 :       return NS_ERROR_NOT_IMPLEMENTED;
     479             :     }
     480             : 
     481           0 :     if (rv != 0) {
     482           0 :       VP8LOG(LogLevel::Error, "Converting an %s frame to I420 failed", yuvFormat.c_str());
     483           0 :       return NS_ERROR_FAILURE;
     484             :     }
     485             : 
     486           0 :     VP8LOG(LogLevel::Verbose, "Converted an %s frame to I420", yuvFormat.c_str());
     487             :   } else {
     488             :     // Not YCbCr at all. Try to get access to the raw data and convert.
     489             : 
     490           0 :     RefPtr<SourceSurface> surf = GetSourceSurface(img.forget());
     491           0 :     if (!surf) {
     492           0 :       VP8LOG(LogLevel::Error, "Getting surface from %s image failed", Stringify(format).c_str());
     493           0 :       return NS_ERROR_FAILURE;
     494             :     }
     495             : 
     496           0 :     RefPtr<DataSourceSurface> data = surf->GetDataSurface();
     497           0 :     if (!data) {
     498           0 :       VP8LOG(LogLevel::Error, "Getting data surface from %s image with %s (%s) surface failed",
     499             :              Stringify(format).c_str(), Stringify(surf->GetType()).c_str(),
     500             :              Stringify(surf->GetFormat()).c_str());
     501           0 :       return NS_ERROR_FAILURE;
     502             :     }
     503             : 
     504           0 :     DataSourceSurface::ScopedMap map(data, DataSourceSurface::READ);
     505           0 :     if (!map.IsMapped()) {
     506           0 :       VP8LOG(LogLevel::Error, "Reading DataSourceSurface from %s image with %s (%s) surface failed",
     507             :              Stringify(format).c_str(), Stringify(surf->GetType()).c_str(),
     508             :              Stringify(surf->GetFormat()).c_str());
     509           0 :       return NS_ERROR_FAILURE;
     510             :     }
     511             : 
     512             :     int rv;
     513           0 :     switch (surf->GetFormat()) {
     514             :       case SurfaceFormat::B8G8R8A8:
     515             :       case SurfaceFormat::B8G8R8X8:
     516           0 :         rv = libyuv::ARGBToI420(static_cast<uint8*>(map.GetData()),
     517             :                                 map.GetStride(),
     518             :                                 y, mFrameWidth,
     519             :                                 cb, halfWidth,
     520             :                                 cr, halfWidth,
     521           0 :                                 mFrameWidth, mFrameHeight);
     522           0 :         break;
     523             :       case SurfaceFormat::R5G6B5_UINT16:
     524           0 :         rv = libyuv::RGB565ToI420(static_cast<uint8*>(map.GetData()),
     525             :                                   map.GetStride(),
     526             :                                   y, mFrameWidth,
     527             :                                   cb, halfWidth,
     528             :                                   cr, halfWidth,
     529           0 :                                   mFrameWidth, mFrameHeight);
     530           0 :         break;
     531             :       default:
     532           0 :         VP8LOG(LogLevel::Error, "Unsupported SourceSurface format %s",
     533             :                Stringify(surf->GetFormat()).c_str());
     534           0 :         NS_ASSERTION(false, "Unsupported SourceSurface format");
     535           0 :         return NS_ERROR_NOT_IMPLEMENTED;
     536             :     }
     537             : 
     538           0 :     if (rv != 0) {
     539           0 :       VP8LOG(LogLevel::Error, "%s to I420 conversion failed",
     540             :              Stringify(surf->GetFormat()).c_str());
     541           0 :       return NS_ERROR_FAILURE;
     542             :     }
     543             : 
     544           0 :     VP8LOG(LogLevel::Verbose, "Converted a %s frame to I420",
     545             :              Stringify(surf->GetFormat()).c_str());
     546             :   }
     547             : 
     548           0 :   mVPXImageWrapper->planes[VPX_PLANE_Y] = y;
     549           0 :   mVPXImageWrapper->planes[VPX_PLANE_U] = cb;
     550           0 :   mVPXImageWrapper->planes[VPX_PLANE_V] = cr;
     551           0 :   mVPXImageWrapper->stride[VPX_PLANE_Y] = mFrameWidth;
     552           0 :   mVPXImageWrapper->stride[VPX_PLANE_U] = halfWidth;
     553           0 :   mVPXImageWrapper->stride[VPX_PLANE_V] = halfWidth;
     554             : 
     555           0 :   return NS_OK;
     556             : }
     557             : 
     558             : // These two define value used in GetNextEncodeOperation to determine the
     559             : // EncodeOperation for next target frame.
     560             : #define I_FRAME_RATIO (0.5)
     561             : #define SKIP_FRAME_RATIO (0.75)
     562             : 
     563             : /**
     564             :  * Compares the elapsed time from the beginning of GetEncodedTrack and
     565             :  * the processed frame duration in mSourceSegment
     566             :  * in order to set the nextEncodeOperation for next target frame.
     567             :  */
     568             : VP8TrackEncoder::EncodeOperation
     569           0 : VP8TrackEncoder::GetNextEncodeOperation(TimeDuration aTimeElapsed,
     570             :                                         StreamTime aProcessedDuration)
     571             : {
     572             :   int64_t durationInUsec =
     573           0 :     FramesToUsecs(aProcessedDuration, mTrackRate).value();
     574           0 :   if (aTimeElapsed.ToMicroseconds() > (durationInUsec * SKIP_FRAME_RATIO)) {
     575             :     // The encoder is too slow.
     576             :     // We should skip next frame to consume the mSourceSegment.
     577           0 :     return SKIP_FRAME;
     578           0 :   } else if (aTimeElapsed.ToMicroseconds() > (durationInUsec * I_FRAME_RATIO)) {
     579             :     // The encoder is a little slow.
     580             :     // We force the encoder to encode an I-frame to accelerate.
     581           0 :     return ENCODE_I_FRAME;
     582             :   } else {
     583           0 :     return ENCODE_NORMAL_FRAME;
     584             :   }
     585             : }
     586             : 
     587             : /**
     588             :  * Encoding flow in GetEncodedTrack():
     589             :  * 1: Check the mInitialized state and the packet duration.
     590             :  * 2: Move the data from mRawSegment to mSourceSegment.
     591             :  * 3: Encode the video chunks in mSourceSegment in a for-loop.
     592             :  * 3.1: The duration is taken straight from the video chunk's duration.
     593             :  * 3.2: Setup the video chunk with mVPXImageWrapper by PrepareRawFrame().
     594             :  * 3.3: Pass frame to vp8 encoder by vpx_codec_encode().
     595             :  * 3.4: Get the encoded frame from encoder by GetEncodedPartitions().
     596             :  * 3.5: Set the nextEncodeOperation for the next target frame.
     597             :  *      There is a heuristic: If the frame duration we have processed in
     598             :  *      mSourceSegment is 100ms, means that we can't spend more than 100ms to
     599             :  *      encode it.
     600             :  * 4. Remove the encoded chunks in mSourceSegment after for-loop.
     601             :  */
     602             : nsresult
     603           0 : VP8TrackEncoder::GetEncodedTrack(EncodedFrameContainer& aData)
     604             : {
     605           0 :   AUTO_PROFILER_LABEL("VP8TrackEncoder::GetEncodedTrack", OTHER);
     606             :   bool EOS;
     607             :   {
     608             :     // Move all the samples from mRawSegment to mSourceSegment. We only hold
     609             :     // the monitor in this block.
     610           0 :     ReentrantMonitorAutoEnter mon(mReentrantMonitor);
     611             :     // Wait if mEncoder is not initialized, or when not enough raw data, but is
     612             :     // not the end of stream nor is being canceled.
     613           0 :     while (!mCanceled && (!mInitialized ||
     614           0 :            (mRawSegment.GetDuration() + mSourceSegment.GetDuration() == 0 &&
     615           0 :             !mEndOfStream))) {
     616           0 :       mon.Wait();
     617             :     }
     618           0 :     if (mCanceled || mEncodingComplete) {
     619           0 :       return NS_ERROR_FAILURE;
     620             :     }
     621           0 :     mSourceSegment.AppendFrom(&mRawSegment);
     622           0 :     EOS = mEndOfStream;
     623             :   }
     624             : 
     625           0 :   StreamTime totalProcessedDuration = 0;
     626           0 :   TimeStamp timebase = TimeStamp::Now();
     627           0 :   EncodeOperation nextEncodeOperation = ENCODE_NORMAL_FRAME;
     628             : 
     629           0 :   for (VideoSegment::ChunkIterator iter(mSourceSegment);
     630           0 :        !iter.IsEnded(); iter.Next()) {
     631           0 :     VideoChunk &chunk = *iter;
     632           0 :     VP8LOG(LogLevel::Verbose, "nextEncodeOperation is %d for frame of duration %" PRId64,
     633             :              nextEncodeOperation, chunk.GetDuration());
     634             : 
     635             :     // Encode frame.
     636           0 :     if (nextEncodeOperation != SKIP_FRAME) {
     637           0 :       nsresult rv = PrepareRawFrame(chunk);
     638           0 :       NS_ENSURE_SUCCESS(rv, NS_ERROR_FAILURE);
     639             : 
     640             :       // Encode the data with VP8 encoder
     641           0 :       int flags = 0;
     642           0 :       if (nextEncodeOperation == ENCODE_I_FRAME) {
     643           0 :         VP8LOG(LogLevel::Warning, "MediaRecorder lagging behind. Encoding keyframe.");
     644           0 :         flags |= VPX_EFLAG_FORCE_KF;
     645             :       }
     646           0 :       if (vpx_codec_encode(mVPXContext, mVPXImageWrapper, mEncodedTimestamp,
     647           0 :                            (unsigned long)chunk.GetDuration(), flags,
     648           0 :                            VPX_DL_REALTIME)) {
     649           0 :         VP8LOG(LogLevel::Error, "vpx_codec_encode failed to encode the frame.");
     650           0 :         return NS_ERROR_FAILURE;
     651             :       }
     652             :       // Get the encoded data from VP8 encoder.
     653           0 :       rv = GetEncodedPartitions(aData);
     654           0 :       NS_ENSURE_SUCCESS(rv, NS_ERROR_FAILURE);
     655             :     } else {
     656             :       // SKIP_FRAME
     657             :       // Extend the duration of the last encoded data in aData
     658             :       // because this frame will be skipped.
     659           0 :       VP8LOG(LogLevel::Warning, "MediaRecorder lagging behind. Skipping a frame.");
     660           0 :       RefPtr<EncodedFrame> last = aData.GetEncodedFrames().LastElement();
     661           0 :       if (last) {
     662           0 :         mExtractedDuration += chunk.mDuration;
     663           0 :         if (!mExtractedDuration.isValid()) {
     664           0 :           NS_ERROR("skipped duration overflow");
     665           0 :           return NS_ERROR_DOM_MEDIA_OVERFLOW_ERR;
     666             :         }
     667             : 
     668           0 :         CheckedInt64 totalDuration = FramesToUsecs(mExtractedDuration.value(), mTrackRate);
     669           0 :         CheckedInt64 skippedDuration = totalDuration - mExtractedDurationUs;
     670           0 :         mExtractedDurationUs = totalDuration;
     671           0 :         if (!skippedDuration.isValid()) {
     672           0 :           NS_ERROR("skipped duration overflow");
     673           0 :           return NS_ERROR_DOM_MEDIA_OVERFLOW_ERR;
     674             :         }
     675           0 :         last->SetDuration(last->GetDuration() +
     676           0 :                           (static_cast<uint64_t>(skippedDuration.value())));
     677             :       }
     678             :     }
     679             : 
     680             :     // Move forward the mEncodedTimestamp.
     681           0 :     mEncodedTimestamp += chunk.GetDuration();
     682           0 :     totalProcessedDuration += chunk.GetDuration();
     683             : 
     684             :     // Check what to do next.
     685           0 :     TimeDuration elapsedTime = TimeStamp::Now() - timebase;
     686             :     nextEncodeOperation = GetNextEncodeOperation(elapsedTime,
     687           0 :                                                  totalProcessedDuration);
     688             :   }
     689             : 
     690             :   // Remove the chunks we have processed.
     691           0 :   mSourceSegment.Clear();
     692             : 
     693             :   // End of stream, pull the rest frames in encoder.
     694           0 :   if (EOS) {
     695           0 :     VP8LOG(LogLevel::Debug, "mEndOfStream is true");
     696           0 :     mEncodingComplete = true;
     697             :     // Bug 1243611, keep calling vpx_codec_encode and vpx_codec_get_cx_data
     698             :     // until vpx_codec_get_cx_data return null.
     699             : 
     700           0 :     do {
     701           0 :       if (vpx_codec_encode(mVPXContext, nullptr, mEncodedTimestamp,
     702             :                            0, 0, VPX_DL_REALTIME)) {
     703           0 :         return NS_ERROR_FAILURE;
     704             :       }
     705           0 :     } while(NS_SUCCEEDED(GetEncodedPartitions(aData)));
     706             :   }
     707             : 
     708           0 :   return NS_OK ;
     709             : }
     710             : 
     711             : } // namespace mozilla

Generated by: LCOV version 1.13