tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

libvpx_vp8_encoder.cc (62634B)


      1 /*
      2 *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
      3 *
      4 *  Use of this source code is governed by a BSD-style license
      5 *  that can be found in the LICENSE file in the root of the source
      6 *  tree. An additional intellectual property rights grant can be found
      7 *  in the file PATENTS.  All contributing project authors may
      8 *  be found in the AUTHORS file in the root of the source tree.
      9 */
     10 
     11 #include "modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h"
     12 
     13 #include <algorithm>
     14 #include <cmath>
     15 #include <cstdint>
     16 #include <cstring>
     17 #include <iterator>
     18 #include <memory>
     19 #include <optional>
     20 #include <string>
     21 #include <utility>
     22 #include <vector>
     23 
     24 #include "absl/algorithm/container.h"
     25 #include "absl/container/inlined_vector.h"
     26 #include "api/environment/environment.h"
     27 #include "api/fec_controller_override.h"
     28 #include "api/field_trials_view.h"
     29 #include "api/scoped_refptr.h"
     30 #include "api/units/time_delta.h"
     31 #include "api/units/timestamp.h"
     32 #include "api/video/encoded_image.h"
     33 #include "api/video/render_resolution.h"
     34 #include "api/video/video_bitrate_allocation.h"
     35 #include "api/video/video_bitrate_allocator.h"
     36 #include "api/video/video_codec_constants.h"
     37 #include "api/video/video_codec_type.h"
     38 #include "api/video/video_frame.h"
     39 #include "api/video/video_frame_buffer.h"
     40 #include "api/video/video_frame_type.h"
     41 #include "api/video_codecs/scalability_mode.h"
     42 #include "api/video_codecs/video_codec.h"
     43 #include "api/video_codecs/video_encoder.h"
     44 #include "api/video_codecs/vp8_frame_buffer_controller.h"
     45 #include "api/video_codecs/vp8_frame_config.h"
     46 #include "api/video_codecs/vp8_temporal_layers_factory.h"
     47 #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
     48 #include "modules/video_coding/codecs/interface/common_constants.h"
     49 #include "modules/video_coding/codecs/interface/libvpx_interface.h"
     50 #include "modules/video_coding/codecs/vp8/include/vp8.h"
     51 #include "modules/video_coding/codecs/vp8/vp8_scalability.h"
     52 #include "modules/video_coding/include/video_codec_interface.h"
     53 #include "modules/video_coding/include/video_error_codes.h"
     54 #include "modules/video_coding/utility/corruption_detection_settings_generator.h"
     55 #include "modules/video_coding/utility/simulcast_rate_allocator.h"
     56 #include "modules/video_coding/utility/simulcast_utility.h"
     57 #include "modules/video_coding/utility/vp8_constants.h"
     58 #include "rtc_base/checks.h"
     59 #include "rtc_base/experiments/field_trial_parser.h"
     60 #include "rtc_base/logging.h"
     61 #include "rtc_base/numerics/safe_conversions.h"
     62 #include "rtc_base/trace_event.h"
     63 #include "third_party/libvpx/source/libvpx/vpx/vp8cx.h"
     64 #include "third_party/libvpx/source/libvpx/vpx/vpx_codec.h"
     65 #include "third_party/libvpx/source/libvpx/vpx/vpx_encoder.h"
     66 #include "third_party/libvpx/source/libvpx/vpx/vpx_image.h"
     67 
     68 #if (defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64)) && \
     69    (defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS))
     70 #define MOBILE_ARM
     71 #endif
     72 
     73 namespace webrtc {
     74 namespace {
     75 #if defined(WEBRTC_IOS)
     76 constexpr char kVP8IosMaxNumberOfThreadFieldTrial[] =
     77    "WebRTC-VP8IosMaxNumberOfThread";
     78 constexpr char kVP8IosMaxNumberOfThreadFieldTrialParameter[] = "max_thread";
     79 #endif
     80 
     81 namespace variable_framerate_screenshare {
     82 constexpr double kMinFps = 5.0;
     83 constexpr int kUndershootPct = 30;
     84 }  // namespace variable_framerate_screenshare
     85 
     86 constexpr char kVp8ForcePartitionResilience[] =
     87    "WebRTC-VP8-ForcePartitionResilience";
     88 
     89 // QP is obtained from VP8-bitstream for HW, so the QP corresponds to the
     90 // bitstream range of [0, 127] and not the user-level range of [0,63].
     91 constexpr int kLowVp8QpThreshold = 29;
     92 constexpr int kHighVp8QpThreshold = 95;
     93 constexpr int kScreenshareMinQp = 15;
     94 
     95 constexpr int kTokenPartitions = VP8_ONE_TOKENPARTITION;
     96 constexpr uint32_t kVp832ByteAlign = 32u;
     97 
     98 constexpr int kRtpTicksPerMs = kVideoPayloadTypeFrequency / 1000;
     99 
    100 // If internal frame dropping is enabled, force the encoder to output a frame
    101 // on an encode request after this timeout even if this causes some
    102 // bitrate overshoot compared to the nominal target. Otherwise we risk the
    103 // receivers incorrectly identifying the gap as a fault and they may needlessly
    104 // send keyframe requests to recover.
    105 constexpr TimeDelta kDefaultMaxFrameDropInterval = TimeDelta::Seconds(2);
    106 
    107 // VP8 denoiser states.
    108 enum denoiserState : uint32_t {
    109  kDenoiserOff,
    110  kDenoiserOnYOnly,
    111  kDenoiserOnYUV,
    112  kDenoiserOnYUVAggressive,
    113  // Adaptive mode defaults to kDenoiserOnYUV on key frame, but may switch
    114  // to kDenoiserOnYUVAggressive based on a computed noise metric.
    115  kDenoiserOnAdaptive
    116 };
    117 
    118 // Greatest common divisior
    119 int GCD(int a, int b) {
    120  int c = a % b;
    121  while (c != 0) {
    122    a = b;
    123    b = c;
    124    c = a % b;
    125  }
    126  return b;
    127 }
    128 
    129 static_assert(Vp8EncoderConfig::TemporalLayerConfig::kMaxPeriodicity ==
    130                  VPX_TS_MAX_PERIODICITY,
    131              "Vp8EncoderConfig::kMaxPeriodicity must be kept in sync with the "
    132              "constant in libvpx.");
    133 static_assert(Vp8EncoderConfig::TemporalLayerConfig::kMaxLayers ==
    134                  VPX_TS_MAX_LAYERS,
    135              "Vp8EncoderConfig::kMaxLayers must be kept in sync with the "
    136              "constant in libvpx.");
    137 
    138 // Allow a newer value to override a current value only if the new value
    139 // is set.
    140 template <typename T>
    141 bool MaybeSetNewValue(const std::optional<T>& new_value,
    142                      std::optional<T>* base_value) {
    143  if (new_value.has_value() && new_value != *base_value) {
    144    *base_value = new_value;
    145    return true;
    146  } else {
    147    return false;
    148  }
    149 }
    150 
    151 // Adds configuration from `new_config` to `base_config`. Both configs consist
    152 // of optionals, and only optionals which are set in `new_config` can have
    153 // an effect. (That is, set values in `base_config` cannot be unset.)
    154 // Returns `true` iff any changes were made to `base_config`.
    155 bool MaybeExtendVp8EncoderConfig(const Vp8EncoderConfig& new_config,
    156                                 Vp8EncoderConfig* base_config) {
    157  bool changes_made = false;
    158  changes_made |= MaybeSetNewValue(new_config.temporal_layer_config,
    159                                   &base_config->temporal_layer_config);
    160  changes_made |= MaybeSetNewValue(new_config.rc_target_bitrate,
    161                                   &base_config->rc_target_bitrate);
    162  changes_made |= MaybeSetNewValue(new_config.rc_max_quantizer,
    163                                   &base_config->rc_max_quantizer);
    164  changes_made |= MaybeSetNewValue(new_config.g_error_resilient,
    165                                   &base_config->g_error_resilient);
    166  return changes_made;
    167 }
    168 
    169 void ApplyVp8EncoderConfigToVpxConfig(const Vp8EncoderConfig& encoder_config,
    170                                      vpx_codec_enc_cfg_t* vpx_config) {
    171  if (encoder_config.temporal_layer_config.has_value()) {
    172    const Vp8EncoderConfig::TemporalLayerConfig& ts_config =
    173        encoder_config.temporal_layer_config.value();
    174    vpx_config->ts_number_layers = ts_config.ts_number_layers;
    175    std::copy(ts_config.ts_target_bitrate.begin(),
    176              ts_config.ts_target_bitrate.end(),
    177              std::begin(vpx_config->ts_target_bitrate));
    178    std::copy(ts_config.ts_rate_decimator.begin(),
    179              ts_config.ts_rate_decimator.end(),
    180              std::begin(vpx_config->ts_rate_decimator));
    181    vpx_config->ts_periodicity = ts_config.ts_periodicity;
    182    std::copy(ts_config.ts_layer_id.begin(), ts_config.ts_layer_id.end(),
    183              std::begin(vpx_config->ts_layer_id));
    184  } else {
    185    vpx_config->ts_number_layers = 1;
    186    vpx_config->ts_rate_decimator[0] = 1;
    187    vpx_config->ts_periodicity = 1;
    188    vpx_config->ts_layer_id[0] = 0;
    189  }
    190 
    191  if (encoder_config.rc_target_bitrate.has_value()) {
    192    vpx_config->rc_target_bitrate = encoder_config.rc_target_bitrate.value();
    193  }
    194 
    195  if (encoder_config.rc_max_quantizer.has_value()) {
    196    vpx_config->rc_max_quantizer = encoder_config.rc_max_quantizer.value();
    197  }
    198 
    199  if (encoder_config.g_error_resilient.has_value()) {
    200    vpx_config->g_error_resilient = encoder_config.g_error_resilient.value();
    201  }
    202 }
    203 
    204 bool IsCompatibleVideoFrameBufferType(VideoFrameBuffer::Type left,
    205                                      VideoFrameBuffer::Type right) {
    206  if (left == VideoFrameBuffer::Type::kI420 ||
    207      left == VideoFrameBuffer::Type::kI420A) {
    208    // LibvpxVp8Encoder does not care about the alpha channel, I420A and I420
    209    // are considered compatible.
    210    return right == VideoFrameBuffer::Type::kI420 ||
    211           right == VideoFrameBuffer::Type::kI420A;
    212  }
    213  return left == right;
    214 }
    215 
    216 void SetRawImagePlanes(vpx_image_t* raw_image, VideoFrameBuffer* buffer) {
    217  switch (buffer->type()) {
    218    case VideoFrameBuffer::Type::kI420:
    219    case VideoFrameBuffer::Type::kI420A: {
    220      const I420BufferInterface* i420_buffer = buffer->GetI420();
    221      RTC_DCHECK(i420_buffer);
    222      raw_image->planes[VPX_PLANE_Y] =
    223          const_cast<uint8_t*>(i420_buffer->DataY());
    224      raw_image->planes[VPX_PLANE_U] =
    225          const_cast<uint8_t*>(i420_buffer->DataU());
    226      raw_image->planes[VPX_PLANE_V] =
    227          const_cast<uint8_t*>(i420_buffer->DataV());
    228      raw_image->stride[VPX_PLANE_Y] = i420_buffer->StrideY();
    229      raw_image->stride[VPX_PLANE_U] = i420_buffer->StrideU();
    230      raw_image->stride[VPX_PLANE_V] = i420_buffer->StrideV();
    231      break;
    232    }
    233    case VideoFrameBuffer::Type::kNV12: {
    234      const NV12BufferInterface* nv12_buffer = buffer->GetNV12();
    235      RTC_DCHECK(nv12_buffer);
    236      raw_image->planes[VPX_PLANE_Y] =
    237          const_cast<uint8_t*>(nv12_buffer->DataY());
    238      raw_image->planes[VPX_PLANE_U] =
    239          const_cast<uint8_t*>(nv12_buffer->DataUV());
    240      raw_image->planes[VPX_PLANE_V] = raw_image->planes[VPX_PLANE_U] + 1;
    241      raw_image->stride[VPX_PLANE_Y] = nv12_buffer->StrideY();
    242      raw_image->stride[VPX_PLANE_U] = nv12_buffer->StrideUV();
    243      raw_image->stride[VPX_PLANE_V] = nv12_buffer->StrideUV();
    244      break;
    245    }
    246    default:
    247      RTC_DCHECK_NOTREACHED();
    248  }
    249 }
    250 
    251 // Helper class used to temporarily change the frame drop threshold for an
    252 // encoder. Returns the setting to the previous value when upon destruction.
    253 class FrameDropConfigOverride {
    254 public:
    255  FrameDropConfigOverride(LibvpxInterface* libvpx,
    256                          vpx_codec_ctx_t* encoder,
    257                          vpx_codec_enc_cfg_t* config,
    258                          uint32_t temporary_frame_drop_threshold)
    259      : libvpx_(libvpx),
    260        encoder_(encoder),
    261        config_(config),
    262        original_frame_drop_threshold_(config->rc_dropframe_thresh) {
    263    config_->rc_dropframe_thresh = temporary_frame_drop_threshold;
    264    libvpx_->codec_enc_config_set(encoder_, config_);
    265  }
    266  ~FrameDropConfigOverride() {
    267    config_->rc_dropframe_thresh = original_frame_drop_threshold_;
    268    libvpx_->codec_enc_config_set(encoder_, config_);
    269  }
    270 
    271 private:
    272  LibvpxInterface* const libvpx_;
    273  vpx_codec_ctx_t* const encoder_;
    274  vpx_codec_enc_cfg_t* const config_;
    275  const uint32_t original_frame_drop_threshold_;
    276 };
    277 
    278 std::optional<TimeDelta> ParseFrameDropInterval(
    279    const FieldTrialsView& field_trials) {
    280  FieldTrialFlag disabled = FieldTrialFlag("Disabled");
    281  FieldTrialParameter<TimeDelta> interval("interval",
    282                                          kDefaultMaxFrameDropInterval);
    283  ParseFieldTrial({&disabled, &interval},
    284                  field_trials.Lookup("WebRTC-VP8-MaxFrameInterval"));
    285  if (disabled.Get()) {
    286    // Kill switch set, don't use any max frame interval.
    287    return std::nullopt;
    288  }
    289  return interval.Get();
    290 }
    291 
    292 }  // namespace
    293 
    294 std::unique_ptr<VideoEncoder> CreateVp8Encoder(const Environment& env,
    295                                               Vp8EncoderSettings settings) {
    296  return std::make_unique<LibvpxVp8Encoder>(env, std::move(settings),
    297                                            LibvpxInterface::Create());
    298 }
    299 
    300 vpx_enc_frame_flags_t LibvpxVp8Encoder::EncodeFlags(
    301    const Vp8FrameConfig& references) {
    302  RTC_DCHECK(!references.drop_frame);
    303 
    304  vpx_enc_frame_flags_t flags = 0;
    305 
    306  if ((references.last_buffer_flags &
    307       Vp8FrameConfig::BufferFlags::kReference) == 0)
    308    flags |= VP8_EFLAG_NO_REF_LAST;
    309  if ((references.last_buffer_flags & Vp8FrameConfig::BufferFlags::kUpdate) ==
    310      0)
    311    flags |= VP8_EFLAG_NO_UPD_LAST;
    312  if ((references.golden_buffer_flags &
    313       Vp8FrameConfig::BufferFlags::kReference) == 0)
    314    flags |= VP8_EFLAG_NO_REF_GF;
    315  if ((references.golden_buffer_flags & Vp8FrameConfig::BufferFlags::kUpdate) ==
    316      0)
    317    flags |= VP8_EFLAG_NO_UPD_GF;
    318  if ((references.arf_buffer_flags & Vp8FrameConfig::BufferFlags::kReference) ==
    319      0)
    320    flags |= VP8_EFLAG_NO_REF_ARF;
    321  if ((references.arf_buffer_flags & Vp8FrameConfig::BufferFlags::kUpdate) == 0)
    322    flags |= VP8_EFLAG_NO_UPD_ARF;
    323  if (references.freeze_entropy)
    324    flags |= VP8_EFLAG_NO_UPD_ENTROPY;
    325 
    326  return flags;
    327 }
    328 
    329 LibvpxVp8Encoder::LibvpxVp8Encoder(const Environment& env,
    330                                   Vp8EncoderSettings settings,
    331                                   std::unique_ptr<LibvpxInterface> interface)
    332    : env_(env),
    333      libvpx_(std::move(interface)),
    334      rate_control_settings_(env_.field_trials()),
    335      resolution_bitrate_limits_(std::move(settings.resolution_bitrate_limits)),
    336      key_frame_request_(kMaxSimulcastStreams, false),
    337      last_encoder_output_time_(kMaxSimulcastStreams,
    338                                Timestamp::MinusInfinity()),
    339      framerate_controller_(variable_framerate_screenshare::kMinFps),
    340      encoder_info_override_(env_.field_trials()),
    341      max_frame_drop_interval_(ParseFrameDropInterval(env_.field_trials())),
    342      android_specific_threading_settings_(env_.field_trials().IsEnabled(
    343          "WebRTC-LibvpxVp8Encoder-AndroidSpecificThreadingSettings")),
    344      calculate_psnr_(
    345          env.field_trials().IsEnabled("WebRTC-Video-CalculatePsnr")) {
    346  // TODO(eladalon/ilnik): These reservations might be wasting memory.
    347  // InitEncode() is resizing to the actual size, which might be smaller.
    348  raw_images_.reserve(kMaxSimulcastStreams);
    349  encoded_images_.reserve(kMaxSimulcastStreams);
    350  send_stream_.reserve(kMaxSimulcastStreams);
    351  cpu_speed_.assign(kMaxSimulcastStreams, cpu_speed_default_);
    352  encoders_.reserve(kMaxSimulcastStreams);
    353  vpx_configs_.reserve(kMaxSimulcastStreams);
    354  config_overrides_.reserve(kMaxSimulcastStreams);
    355  downsampling_factors_.reserve(kMaxSimulcastStreams);
    356 }
    357 
    358 LibvpxVp8Encoder::~LibvpxVp8Encoder() {
    359  Release();
    360 }
    361 
    362 int LibvpxVp8Encoder::Release() {
    363  int ret_val = WEBRTC_VIDEO_CODEC_OK;
    364 
    365  encoded_images_.clear();
    366 
    367  if (inited_) {
    368    for (auto it = encoders_.rbegin(); it != encoders_.rend(); ++it) {
    369      if (libvpx_->codec_destroy(&*it)) {
    370        ret_val = WEBRTC_VIDEO_CODEC_MEMORY;
    371      }
    372    }
    373  }
    374  encoders_.clear();
    375 
    376  vpx_configs_.clear();
    377  config_overrides_.clear();
    378  send_stream_.clear();
    379  cpu_speed_.clear();
    380 
    381  for (auto it = raw_images_.rbegin(); it != raw_images_.rend(); ++it) {
    382    libvpx_->img_free(&*it);
    383  }
    384  raw_images_.clear();
    385 
    386  frame_buffer_controller_.reset();
    387  inited_ = false;
    388  return ret_val;
    389 }
    390 
    391 void LibvpxVp8Encoder::SetRates(const RateControlParameters& parameters) {
    392  if (!inited_) {
    393    RTC_LOG(LS_WARNING) << "SetRates() while not initialize";
    394    return;
    395  }
    396 
    397  if (encoders_[0].err) {
    398    RTC_LOG(LS_WARNING) << "Encoder in error state.";
    399    return;
    400  }
    401 
    402  if (parameters.framerate_fps < 1.0) {
    403    RTC_LOG(LS_WARNING) << "Unsupported framerate (must be >= 1.0): "
    404                        << parameters.framerate_fps;
    405    return;
    406  }
    407 
    408  if (parameters.bitrate.get_sum_bps() == 0) {
    409    // Encoder paused, turn off all encoding.
    410    const int num_streams = static_cast<size_t>(encoders_.size());
    411    for (int i = 0; i < num_streams; ++i)
    412      SetStreamState(false, i);
    413    return;
    414  }
    415 
    416  codec_.maxFramerate = static_cast<uint32_t>(parameters.framerate_fps + 0.5);
    417 
    418  if (encoders_.size() > 1) {
    419    // If we have more than 1 stream, reduce the qp_max for the low resolution
    420    // stream if frame rate is not too low. The trade-off with lower qp_max is
    421    // possibly more dropped frames, so we only do this if the frame rate is
    422    // above some threshold (base temporal layer is down to 1/4 for 3 layers).
    423    // We may want to condition this on bitrate later.
    424    if (rate_control_settings_.Vp8BoostBaseLayerQuality() &&
    425        parameters.framerate_fps > 20.0) {
    426      vpx_configs_[encoders_.size() - 1].rc_max_quantizer = 45;
    427    } else {
    428      // Go back to default value set in InitEncode.
    429      vpx_configs_[encoders_.size() - 1].rc_max_quantizer = qp_max_;
    430    }
    431  }
    432 
    433  for (size_t i = 0; i < encoders_.size(); ++i) {
    434    const size_t stream_idx = encoders_.size() - 1 - i;
    435 
    436    unsigned int target_bitrate_kbps =
    437        parameters.bitrate.GetSpatialLayerSum(stream_idx) / 1000;
    438 
    439    bool send_stream = target_bitrate_kbps > 0;
    440    if (send_stream || encoders_.size() > 1)
    441      SetStreamState(send_stream, stream_idx);
    442 
    443    vpx_configs_[i].rc_target_bitrate = target_bitrate_kbps;
    444    if (send_stream) {
    445      frame_buffer_controller_->OnRatesUpdated(
    446          stream_idx, parameters.bitrate.GetTemporalLayerAllocation(stream_idx),
    447          static_cast<int>(parameters.framerate_fps + 0.5));
    448    }
    449 
    450    UpdateVpxConfiguration(stream_idx);
    451 
    452    vpx_codec_err_t err =
    453        libvpx_->codec_enc_config_set(&encoders_[i], &vpx_configs_[i]);
    454    if (err != VPX_CODEC_OK) {
    455      RTC_LOG(LS_WARNING) << "Error configuring codec, error code: " << err
    456                          << ", details: "
    457                          << libvpx_->codec_error_detail(&encoders_[i]);
    458    }
    459  }
    460 }
    461 
    462 void LibvpxVp8Encoder::OnPacketLossRateUpdate(float packet_loss_rate) {
    463  // TODO(bugs.webrtc.org/10431): Replace condition by DCHECK.
    464  if (frame_buffer_controller_) {
    465    frame_buffer_controller_->OnPacketLossRateUpdate(packet_loss_rate);
    466  }
    467 }
    468 
    469 void LibvpxVp8Encoder::OnRttUpdate(int64_t rtt_ms) {
    470  // TODO(bugs.webrtc.org/10431): Replace condition by DCHECK.
    471  if (frame_buffer_controller_) {
    472    frame_buffer_controller_->OnRttUpdate(rtt_ms);
    473  }
    474 }
    475 
    476 void LibvpxVp8Encoder::OnLossNotification(
    477    const LossNotification& loss_notification) {
    478  if (frame_buffer_controller_) {
    479    frame_buffer_controller_->OnLossNotification(loss_notification);
    480  }
    481 }
    482 
    483 void LibvpxVp8Encoder::SetStreamState(bool send_stream, int stream_idx) {
    484  if (send_stream && !send_stream_[stream_idx]) {
    485    // Need a key frame if we have not sent this stream before.
    486    key_frame_request_[stream_idx] = true;
    487  }
    488  send_stream_[stream_idx] = send_stream;
    489 }
    490 
    491 void LibvpxVp8Encoder::SetFecControllerOverride(
    492    FecControllerOverride* fec_controller_override) {
    493  // TODO(bugs.webrtc.org/10769): Update downstream and remove ability to
    494  // pass nullptr.
    495  // RTC_DCHECK(fec_controller_override);
    496  RTC_DCHECK(!fec_controller_override_);
    497  fec_controller_override_ = fec_controller_override;
    498 }
    499 
    500 // TODO(eladalon): s/inst/codec_settings/g.
    501 int LibvpxVp8Encoder::InitEncode(const VideoCodec* inst,
    502                                 const VideoEncoder::Settings& settings) {
    503  if (inst == nullptr) {
    504    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
    505  }
    506  if (inst->maxFramerate < 1) {
    507    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
    508  }
    509  // allow zero to represent an unspecified maxBitRate
    510  if (inst->maxBitrate > 0 && inst->startBitrate > inst->maxBitrate) {
    511    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
    512  }
    513  if (inst->width < 1 || inst->height < 1) {
    514    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
    515  }
    516  if (settings.number_of_cores < 1) {
    517    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
    518  }
    519 
    520  if (std::optional<ScalabilityMode> scalability_mode =
    521          inst->GetScalabilityMode();
    522      scalability_mode.has_value() &&
    523      !VP8SupportsScalabilityMode(*scalability_mode)) {
    524    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
    525  }
    526 
    527  num_active_streams_ = 0;
    528  for (int i = 0; i < inst->numberOfSimulcastStreams; ++i) {
    529    if (inst->simulcastStream[i].active) {
    530      ++num_active_streams_;
    531    }
    532  }
    533  if (inst->numberOfSimulcastStreams == 0 && inst->active) {
    534    num_active_streams_ = 1;
    535  }
    536 
    537  if (inst->VP8().automaticResizeOn && num_active_streams_ > 1) {
    538    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
    539  }
    540 
    541  // Use the previous pixel format to avoid extra image allocations.
    542  vpx_img_fmt_t pixel_format =
    543      raw_images_.empty() ? VPX_IMG_FMT_I420 : raw_images_[0].fmt;
    544 
    545  int retVal = Release();
    546  if (retVal < 0) {
    547    return retVal;
    548  }
    549 
    550  int number_of_streams = SimulcastUtility::NumberOfSimulcastStreams(*inst);
    551  if (number_of_streams > 1 &&
    552      !SimulcastUtility::ValidSimulcastParameters(*inst, number_of_streams)) {
    553    return WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED;
    554  }
    555 
    556  RTC_DCHECK(!frame_buffer_controller_);
    557  Vp8TemporalLayersFactory factory;
    558  frame_buffer_controller_ =
    559      factory.Create(env_, *inst, settings, fec_controller_override_);
    560  RTC_DCHECK(frame_buffer_controller_);
    561 
    562  number_of_cores_ = settings.number_of_cores;
    563  timestamp_ = 0;
    564  codec_ = *inst;
    565 
    566  // Code expects simulcastStream resolutions to be correct, make sure they are
    567  // filled even when there are no simulcast layers.
    568  if (codec_.numberOfSimulcastStreams == 0) {
    569    codec_.simulcastStream[0].width = codec_.width;
    570    codec_.simulcastStream[0].height = codec_.height;
    571  }
    572 
    573  encoded_images_.resize(number_of_streams);
    574  encoders_.resize(number_of_streams);
    575  vpx_configs_.resize(number_of_streams);
    576  config_overrides_.resize(number_of_streams);
    577  downsampling_factors_.resize(number_of_streams);
    578  raw_images_.resize(number_of_streams);
    579  send_stream_.resize(number_of_streams);
    580  send_stream_[0] = true;  // For non-simulcast case.
    581  cpu_speed_.resize(number_of_streams);
    582  std::fill(key_frame_request_.begin(), key_frame_request_.end(), false);
    583  std::fill(last_encoder_output_time_.begin(), last_encoder_output_time_.end(),
    584            Timestamp::MinusInfinity());
    585 
    586  int idx = number_of_streams - 1;
    587  for (int i = 0; i < (number_of_streams - 1); ++i, --idx) {
    588    int gcd = GCD(inst->simulcastStream[idx].width,
    589                  inst->simulcastStream[idx - 1].width);
    590    downsampling_factors_[i].num = inst->simulcastStream[idx].width / gcd;
    591    downsampling_factors_[i].den = inst->simulcastStream[idx - 1].width / gcd;
    592    send_stream_[i] = false;
    593  }
    594  if (number_of_streams > 1) {
    595    send_stream_[number_of_streams - 1] = false;
    596    downsampling_factors_[number_of_streams - 1].num = 1;
    597    downsampling_factors_[number_of_streams - 1].den = 1;
    598  }
    599 
    600  // populate encoder configuration with default values
    601  if (libvpx_->codec_enc_config_default(vpx_codec_vp8_cx(), &vpx_configs_[0],
    602                                        0)) {
    603    return WEBRTC_VIDEO_CODEC_ERROR;
    604  }
    605  // setting the time base of the codec
    606  vpx_configs_[0].g_timebase.num = 1;
    607  vpx_configs_[0].g_timebase.den = kVideoPayloadTypeFrequency;
    608  vpx_configs_[0].g_lag_in_frames = 0;  // 0- no frame lagging
    609 
    610  // Set the error resilience mode for temporal layers (but not simulcast).
    611  vpx_configs_[0].g_error_resilient =
    612      (SimulcastUtility::NumberOfTemporalLayers(*inst, 0) > 1)
    613          ? VPX_ERROR_RESILIENT_DEFAULT
    614          : 0;
    615 
    616  // Override the error resilience mode if this is not simulcast, but we are
    617  // using temporal layers.
    618  if (env_.field_trials().IsEnabled(kVp8ForcePartitionResilience) &&
    619      (number_of_streams == 1) &&
    620      (SimulcastUtility::NumberOfTemporalLayers(*inst, 0) > 1)) {
    621    RTC_LOG(LS_INFO) << "Overriding g_error_resilient from "
    622                     << vpx_configs_[0].g_error_resilient << " to "
    623                     << VPX_ERROR_RESILIENT_PARTITIONS;
    624    vpx_configs_[0].g_error_resilient = VPX_ERROR_RESILIENT_PARTITIONS;
    625  }
    626 
    627  // rate control settings
    628  vpx_configs_[0].rc_dropframe_thresh = FrameDropThreshold(0);
    629  vpx_configs_[0].rc_end_usage = VPX_CBR;
    630  vpx_configs_[0].g_pass = VPX_RC_ONE_PASS;
    631  // Handle resizing outside of libvpx.
    632  vpx_configs_[0].rc_resize_allowed = 0;
    633  vpx_configs_[0].rc_min_quantizer =
    634      codec_.mode == VideoCodecMode::kScreensharing ? 12 : 2;
    635  if (inst->qpMax >= vpx_configs_[0].rc_min_quantizer) {
    636    qp_max_ = inst->qpMax;
    637  }
    638  if (rate_control_settings_.LibvpxVp8QpMax()) {
    639    qp_max_ = std::max(rate_control_settings_.LibvpxVp8QpMax().value(),
    640                       static_cast<int>(vpx_configs_[0].rc_min_quantizer));
    641  }
    642  vpx_configs_[0].rc_max_quantizer = qp_max_;
    643  vpx_configs_[0].rc_undershoot_pct = 100;
    644  vpx_configs_[0].rc_overshoot_pct = 15;
    645  vpx_configs_[0].rc_buf_initial_sz = 500;
    646  vpx_configs_[0].rc_buf_optimal_sz = 600;
    647  vpx_configs_[0].rc_buf_sz = 1000;
    648 
    649  // Set the maximum target size of any key-frame.
    650  rc_max_intra_target_ = MaxIntraTarget(vpx_configs_[0].rc_buf_optimal_sz);
    651 
    652  if (inst->VP8().keyFrameInterval > 0) {
    653    vpx_configs_[0].kf_mode = VPX_KF_AUTO;
    654    vpx_configs_[0].kf_max_dist = inst->VP8().keyFrameInterval;
    655  } else {
    656    vpx_configs_[0].kf_mode = VPX_KF_DISABLED;
    657  }
    658 
    659  // Allow the user to set the complexity for the base stream.
    660  switch (inst->GetVideoEncoderComplexity()) {
    661    case VideoCodecComplexity::kComplexityHigh:
    662      cpu_speed_[0] = -5;
    663      break;
    664    case VideoCodecComplexity::kComplexityHigher:
    665      cpu_speed_[0] = -4;
    666      break;
    667    case VideoCodecComplexity::kComplexityMax:
    668      cpu_speed_[0] = -3;
    669      break;
    670    default:
    671      cpu_speed_[0] = -6;
    672      break;
    673  }
    674  cpu_speed_default_ = cpu_speed_[0];
    675  // Set encoding complexity (cpu_speed) based on resolution and/or platform.
    676  cpu_speed_[0] = GetCpuSpeed(inst->width, inst->height);
    677  for (int i = 1; i < number_of_streams; ++i) {
    678    cpu_speed_[i] =
    679        GetCpuSpeed(inst->simulcastStream[number_of_streams - 1 - i].width,
    680                    inst->simulcastStream[number_of_streams - 1 - i].height);
    681  }
    682  vpx_configs_[0].g_w = inst->width;
    683  vpx_configs_[0].g_h = inst->height;
    684 
    685  // Determine number of threads based on the image size and #cores.
    686  // TODO(fbarchard): Consider number of Simulcast layers.
    687  vpx_configs_[0].g_threads = NumberOfThreads(
    688      vpx_configs_[0].g_w, vpx_configs_[0].g_h, settings.number_of_cores);
    689  if (settings.encoder_thread_limit.has_value()) {
    690    RTC_DCHECK_GE(settings.encoder_thread_limit.value(), 1);
    691    vpx_configs_[0].g_threads = std::min(
    692        vpx_configs_[0].g_threads,
    693        static_cast<unsigned int>(settings.encoder_thread_limit.value()));
    694  }
    695 
    696  // Creating a wrapper to the image - setting image data to NULL.
    697  // Actual pointer will be set in encode. Setting align to 1, as it
    698  // is meaningless (no memory allocation is done here).
    699  libvpx_->img_wrap(&raw_images_[0], pixel_format, inst->width, inst->height, 1,
    700                    nullptr);
    701 
    702  // Note the order we use is different from webm, we have lowest resolution
    703  // at position 0 and they have highest resolution at position 0.
    704  const size_t stream_idx_cfg_0 = encoders_.size() - 1;
    705  SimulcastRateAllocator init_allocator(env_, codec_);
    706  VideoBitrateAllocation allocation =
    707      init_allocator.Allocate(VideoBitrateAllocationParameters(
    708          inst->startBitrate * 1000, inst->maxFramerate));
    709  std::vector<uint32_t> stream_bitrates;
    710  for (int i = 0; i == 0 || i < inst->numberOfSimulcastStreams; ++i) {
    711    uint32_t bitrate = allocation.GetSpatialLayerSum(i) / 1000;
    712    stream_bitrates.push_back(bitrate);
    713  }
    714 
    715  vpx_configs_[0].rc_target_bitrate = stream_bitrates[stream_idx_cfg_0];
    716  if (stream_bitrates[stream_idx_cfg_0] > 0) {
    717    uint32_t maxFramerate =
    718        inst->simulcastStream[stream_idx_cfg_0].maxFramerate;
    719    if (!maxFramerate) {
    720      maxFramerate = inst->maxFramerate;
    721    }
    722 
    723    frame_buffer_controller_->OnRatesUpdated(
    724        stream_idx_cfg_0,
    725        allocation.GetTemporalLayerAllocation(stream_idx_cfg_0), maxFramerate);
    726  }
    727  frame_buffer_controller_->SetQpLimits(stream_idx_cfg_0,
    728                                        vpx_configs_[0].rc_min_quantizer,
    729                                        vpx_configs_[0].rc_max_quantizer);
    730  UpdateVpxConfiguration(stream_idx_cfg_0);
    731  vpx_configs_[0].rc_dropframe_thresh = FrameDropThreshold(stream_idx_cfg_0);
    732 
    733  for (size_t i = 1; i < encoders_.size(); ++i) {
    734    const size_t stream_idx = encoders_.size() - 1 - i;
    735    memcpy(&vpx_configs_[i], &vpx_configs_[0], sizeof(vpx_configs_[0]));
    736 
    737    vpx_configs_[i].g_w = inst->simulcastStream[stream_idx].width;
    738    vpx_configs_[i].g_h = inst->simulcastStream[stream_idx].height;
    739 
    740    // Use 1 thread for lower resolutions.
    741    vpx_configs_[i].g_threads = 1;
    742 
    743    vpx_configs_[i].rc_dropframe_thresh = FrameDropThreshold(stream_idx);
    744 
    745    // Setting alignment to 32 - as that ensures at least 16 for all
    746    // planes (32 for Y, 16 for U,V). Libvpx sets the requested stride for
    747    // the y plane, but only half of it to the u and v planes.
    748    libvpx_->img_alloc(
    749        &raw_images_[i], pixel_format, inst->simulcastStream[stream_idx].width,
    750        inst->simulcastStream[stream_idx].height, kVp832ByteAlign);
    751    SetStreamState(stream_bitrates[stream_idx] > 0, stream_idx);
    752    vpx_configs_[i].rc_target_bitrate = stream_bitrates[stream_idx];
    753    if (stream_bitrates[stream_idx] > 0) {
    754      uint32_t maxFramerate = inst->simulcastStream[stream_idx].maxFramerate;
    755      if (!maxFramerate) {
    756        maxFramerate = inst->maxFramerate;
    757      }
    758      frame_buffer_controller_->OnRatesUpdated(
    759          stream_idx, allocation.GetTemporalLayerAllocation(stream_idx),
    760          maxFramerate);
    761    }
    762    frame_buffer_controller_->SetQpLimits(stream_idx,
    763                                          vpx_configs_[i].rc_min_quantizer,
    764                                          vpx_configs_[i].rc_max_quantizer);
    765    UpdateVpxConfiguration(stream_idx);
    766  }
    767 
    768  corruption_detection_settings_generator_ =
    769      std::make_unique<CorruptionDetectionSettingsGenerator>(
    770          CorruptionDetectionSettingsGenerator::ExponentialFunctionParameters{
    771              .scale = 0.006,
    772              .exponent_factor = 0.01857465,
    773              .exponent_offset = -4.26470513},
    774          CorruptionDetectionSettingsGenerator::ErrorThresholds{.luma = 5,
    775                                                                .chroma = 6},
    776          // On large changes, increase error threshold by one and std_dev
    777          // by 2.0. Trigger on qp changes larger than 30, and fade down the
    778          // adjusted value over 4 * num_temporal_layers to allow the base layer
    779          // to converge somewhat. Set a minim filter size of 1.25 since some
    780          // outlier pixels deviate a bit from truth even at very low QP,
    781          // seeminly by bleeding into neighbours.
    782          CorruptionDetectionSettingsGenerator::TransientParameters{
    783              .max_qp = 127,
    784              .keyframe_threshold_offset = 1,
    785              .keyframe_stddev_offset = 2.0,
    786              .keyframe_offset_duration_frames =
    787                  std::max(1,
    788                           SimulcastUtility::NumberOfTemporalLayers(*inst, 0)) *
    789                  4,
    790              .large_qp_change_threshold = 30,
    791              .std_dev_lower_bound = 1.25});
    792 
    793  return InitAndSetControlSettings();
    794 }
    795 
    796 int LibvpxVp8Encoder::GetCpuSpeed(int width, int height) {
    797 #if defined(MOBILE_ARM) || defined(WEBRTC_ARCH_MIPS)
    798  // On mobile platform, use a lower speed setting for lower resolutions for
    799  // CPUs with 4 or more cores.
    800  RTC_DCHECK_GT(number_of_cores_, 0);
    801 
    802  if (number_of_cores_ <= 3)
    803    return -12;
    804 
    805  if (width * height <= 352 * 288)
    806    return -8;
    807  else if (width * height <= 640 * 480)
    808    return -10;
    809  else
    810    return -12;
    811 #else
    812  // For non-ARM, increase encoding complexity (i.e., use lower speed setting)
    813  // if resolution is below CIF. Otherwise, keep the default/user setting
    814  // (`cpu_speed_default_`) set on InitEncode via VP8().complexity.
    815  if (width * height < 352 * 288)
    816    return (cpu_speed_default_ < -4) ? -4 : cpu_speed_default_;
    817  else
    818    return cpu_speed_default_;
    819 #endif
    820 }
    821 
    822 int LibvpxVp8Encoder::NumberOfThreads(int width, int height, int cpus) {
    823 #if defined(WEBRTC_ANDROID)
    824  if (android_specific_threading_settings_) {
    825 #endif
    826 #if defined(WEBRTC_ANDROID) || defined(WEBRTC_ARCH_MIPS)
    827    if (width * height >= 320 * 180) {
    828      if (cpus >= 4) {
    829        // 3 threads for CPUs with 4 and more cores since most of times only 4
    830        // cores will be active.
    831        return 3;
    832      } else if (cpus == 3 || cpus == 2) {
    833        return 2;
    834      } else {
    835        return 1;
    836      }
    837    }
    838    return 1;
    839 #if defined(WEBRTC_ANDROID)
    840  }
    841 #endif
    842 #elif defined(WEBRTC_IOS)
    843  std::string trial_string =
    844      env_.field_trials().Lookup(kVP8IosMaxNumberOfThreadFieldTrial);
    845  FieldTrialParameter<int> max_thread_number(
    846      kVP8IosMaxNumberOfThreadFieldTrialParameter, 0);
    847  ParseFieldTrial({&max_thread_number}, trial_string);
    848  if (max_thread_number.Get() > 0) {
    849    if (width * height < 320 * 180) {
    850      return 1;  // Use single thread for small screens
    851    }
    852    // thread number must be less than or equal to the number of CPUs.
    853    return std::min(cpus, max_thread_number.Get());
    854  }
    855 #endif  // defined(WEBRTC_IOS)
    856  if (width * height >= 1920 * 1080 && cpus > 8) {
    857    return 8;  // 8 threads for 1080p on high perf machines.
    858  } else if (width * height > 1280 * 960 && cpus >= 6) {
    859    // 3 threads for 1080p.
    860    return 3;
    861  } else if (width * height > 640 * 480 && cpus >= 3) {
    862    // Default 2 threads for qHD/HD, but allow 3 if core count is high enough,
    863    // as this will allow more margin for high-core/low clock machines or if
    864    // not built with highest optimization.
    865    if (cpus >= 6) {
    866      return 3;
    867    }
    868    return 2;
    869  }
    870 
    871  // 1 thread for VGA or less.
    872  return 1;
    873 }
    874 
    875 int LibvpxVp8Encoder::InitAndSetControlSettings() {
    876  vpx_codec_flags_t flags = 0;
    877  flags |= VPX_CODEC_USE_OUTPUT_PARTITION;
    878 
    879  if (encoders_.size() > 1) {
    880    int error = libvpx_->codec_enc_init_multi(
    881        &encoders_[0], vpx_codec_vp8_cx(), &vpx_configs_[0], encoders_.size(),
    882        flags, &downsampling_factors_[0]);
    883    if (error) {
    884      return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
    885    }
    886  } else {
    887    if (libvpx_->codec_enc_init(&encoders_[0], vpx_codec_vp8_cx(),
    888                                &vpx_configs_[0], flags)) {
    889      return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
    890    }
    891  }
    892  // Enable denoising for the highest resolution stream, and for
    893  // the second highest resolution if we are doing more than 2
    894  // spatial layers/streams.
    895  // TODO(holmer): Investigate possibility of adding a libvpx API
    896  // for getting the denoised frame from the encoder and using that
    897  // when encoding lower resolution streams. Would it work with the
    898  // multi-res encoding feature?
    899 #if defined(MOBILE_ARM) || defined(WEBRTC_ARCH_MIPS)
    900  denoiserState denoiser_state = kDenoiserOnYOnly;
    901 #else
    902  denoiserState denoiser_state = kDenoiserOnAdaptive;
    903 #endif
    904  libvpx_->codec_control(
    905      &encoders_[0], VP8E_SET_NOISE_SENSITIVITY,
    906      codec_.VP8()->denoisingOn ? denoiser_state : kDenoiserOff);
    907  if (encoders_.size() > 2) {
    908    libvpx_->codec_control(
    909        &encoders_[1], VP8E_SET_NOISE_SENSITIVITY,
    910        codec_.VP8()->denoisingOn ? denoiser_state : kDenoiserOff);
    911  }
    912  for (size_t i = 0; i < encoders_.size(); ++i) {
    913    // Allow more screen content to be detected as static.
    914    libvpx_->codec_control(
    915        &(encoders_[i]), VP8E_SET_STATIC_THRESHOLD,
    916        codec_.mode == VideoCodecMode::kScreensharing ? 100u : 1u);
    917    libvpx_->codec_control(&(encoders_[i]), VP8E_SET_CPUUSED, cpu_speed_[i]);
    918    libvpx_->codec_control(
    919        &(encoders_[i]), VP8E_SET_TOKEN_PARTITIONS,
    920        static_cast<vp8e_token_partitions>(kTokenPartitions));
    921    libvpx_->codec_control(&(encoders_[i]), VP8E_SET_MAX_INTRA_BITRATE_PCT,
    922                           rc_max_intra_target_);
    923    // VP8E_SET_SCREEN_CONTENT_MODE 2 = screen content with more aggressive
    924    // rate control (drop frames on large target bitrate overshoot)
    925    libvpx_->codec_control(
    926        &(encoders_[i]), VP8E_SET_SCREEN_CONTENT_MODE,
    927        codec_.mode == VideoCodecMode::kScreensharing ? 2u : 0u);
    928  }
    929  inited_ = true;
    930  return WEBRTC_VIDEO_CODEC_OK;
    931 }
    932 
    933 uint32_t LibvpxVp8Encoder::MaxIntraTarget(uint32_t optimalBuffersize) {
    934  // Set max to the optimal buffer level (normalized by target BR),
    935  // and scaled by a scalePar.
    936  // Max target size = scalePar * optimalBufferSize * targetBR[Kbps].
    937  // This values is presented in percentage of perFrameBw:
    938  // perFrameBw = targetBR[Kbps] * 1000 / frameRate.
    939  // The target in % is as follows:
    940 
    941  float scalePar = 0.5;
    942  uint32_t targetPct = optimalBuffersize * scalePar * codec_.maxFramerate / 10;
    943 
    944  // Don't go below 3 times the per frame bandwidth.
    945  const uint32_t minIntraTh = 300;
    946  return (targetPct < minIntraTh) ? minIntraTh : targetPct;
    947 }
    948 
    949 uint32_t LibvpxVp8Encoder::FrameDropThreshold(size_t spatial_idx) const {
    950  if (!codec_.GetFrameDropEnabled()) {
    951    return 0;
    952  }
    953 
    954  // If temporal layers are used, they get to override the frame dropping
    955  // setting, as eg. ScreenshareLayers does not work as intended with frame
    956  // dropping on and DefaultTemporalLayers will have performance issues with
    957  // frame dropping off.
    958  RTC_DCHECK(frame_buffer_controller_);
    959  RTC_DCHECK_LT(spatial_idx, frame_buffer_controller_->StreamCount());
    960  return frame_buffer_controller_->SupportsEncoderFrameDropping(spatial_idx)
    961             ? 30
    962             : 0;
    963 }
    964 
    965 size_t LibvpxVp8Encoder::SteadyStateSize(int sid, int tid) {
    966  const int encoder_id = encoders_.size() - 1 - sid;
    967  size_t bitrate_bps;
    968  float fps;
    969  if ((SimulcastUtility::IsConferenceModeScreenshare(codec_) && sid == 0) ||
    970      vpx_configs_[encoder_id].ts_number_layers <= 1) {
    971    // In conference screenshare there's no defined per temporal layer bitrate
    972    // and framerate.
    973    bitrate_bps = vpx_configs_[encoder_id].rc_target_bitrate * 1000;
    974    fps = codec_.maxFramerate;
    975  } else {
    976    bitrate_bps = vpx_configs_[encoder_id].ts_target_bitrate[tid] * 1000;
    977    fps = codec_.maxFramerate /
    978          fmax(vpx_configs_[encoder_id].ts_rate_decimator[tid], 1.0);
    979    if (tid > 0) {
    980      // Layer bitrate and fps are counted as a partial sums.
    981      bitrate_bps -= vpx_configs_[encoder_id].ts_target_bitrate[tid - 1] * 1000;
    982      fps = codec_.maxFramerate /
    983            fmax(vpx_configs_[encoder_id].ts_rate_decimator[tid - 1], 1.0);
    984    }
    985  }
    986 
    987  if (fps < 1e-9)
    988    return 0;
    989  return static_cast<size_t>(
    990      bitrate_bps / (8 * fps) *
    991          (100 - variable_framerate_screenshare::kUndershootPct) / 100 +
    992      0.5);
    993 }
    994 
    995 bool LibvpxVp8Encoder::UpdateVpxConfiguration(size_t stream_index) {
    996  RTC_DCHECK(frame_buffer_controller_);
    997 
    998  const size_t config_index = vpx_configs_.size() - 1 - stream_index;
    999 
   1000  RTC_DCHECK_LT(config_index, config_overrides_.size());
   1001  Vp8EncoderConfig* config = &config_overrides_[config_index];
   1002 
   1003  const Vp8EncoderConfig new_config =
   1004      frame_buffer_controller_->UpdateConfiguration(stream_index);
   1005 
   1006  if (new_config.reset_previous_configuration_overrides) {
   1007    *config = new_config;
   1008    return true;
   1009  }
   1010 
   1011  const bool changes_made = MaybeExtendVp8EncoderConfig(new_config, config);
   1012 
   1013  // Note that overrides must be applied even if they haven't changed.
   1014  RTC_DCHECK_LT(config_index, vpx_configs_.size());
   1015  vpx_codec_enc_cfg_t* vpx_config = &vpx_configs_[config_index];
   1016  ApplyVp8EncoderConfigToVpxConfig(*config, vpx_config);
   1017 
   1018  return changes_made;
   1019 }
   1020 
   1021 int LibvpxVp8Encoder::Encode(const VideoFrame& frame,
   1022                             const std::vector<VideoFrameType>* frame_types) {
   1023  RTC_DCHECK_EQ(frame.width(), codec_.width);
   1024  RTC_DCHECK_EQ(frame.height(), codec_.height);
   1025 
   1026  if (!inited_)
   1027    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
   1028  if (encoded_complete_callback_ == nullptr)
   1029    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
   1030 
   1031  bool key_frame_requested = false;
   1032  for (size_t i = 0; i < key_frame_request_.size() && i < send_stream_.size();
   1033       ++i) {
   1034    if (key_frame_request_[i] && send_stream_[i]) {
   1035      key_frame_requested = true;
   1036      break;
   1037    }
   1038  }
   1039  if (!key_frame_requested && frame_types) {
   1040    for (size_t i = 0; i < frame_types->size() && i < send_stream_.size();
   1041         ++i) {
   1042      if ((*frame_types)[i] == VideoFrameType::kVideoFrameKey &&
   1043          send_stream_[i]) {
   1044        key_frame_requested = true;
   1045        break;
   1046      }
   1047    }
   1048  }
   1049 
   1050  // Check if any encoder risks timing out and force a frame in that case.
   1051  std::vector<FrameDropConfigOverride> frame_drop_overrides_;
   1052  if (max_frame_drop_interval_.has_value()) {
   1053    Timestamp now = Timestamp::Micros(frame.timestamp_us());
   1054    for (size_t i = 0; i < send_stream_.size(); ++i) {
   1055      if (send_stream_[i] && FrameDropThreshold(i) > 0 &&
   1056          last_encoder_output_time_[i].IsFinite() &&
   1057          (now - last_encoder_output_time_[i]) >= *max_frame_drop_interval_) {
   1058        RTC_LOG(LS_INFO) << "Forcing frame to avoid timeout for stream " << i;
   1059        size_t encoder_idx = encoders_.size() - 1 - i;
   1060        frame_drop_overrides_.emplace_back(libvpx_.get(),
   1061                                           &encoders_[encoder_idx],
   1062                                           &vpx_configs_[encoder_idx], 0);
   1063      }
   1064    }
   1065  }
   1066 
   1067  if (frame.update_rect().IsEmpty() && num_steady_state_frames_ >= 3 &&
   1068      !key_frame_requested) {
   1069    if (framerate_controller_.DropFrame(frame.rtp_timestamp() /
   1070                                        kRtpTicksPerMs) &&
   1071        frame_drop_overrides_.empty()) {
   1072      return WEBRTC_VIDEO_CODEC_OK;
   1073    }
   1074    framerate_controller_.AddFrame(frame.rtp_timestamp() / kRtpTicksPerMs);
   1075  }
   1076 
   1077  bool send_key_frame = key_frame_requested;
   1078  bool drop_frame = false;
   1079  bool retransmission_allowed = true;
   1080  Vp8FrameConfig tl_configs[kMaxSimulcastStreams];
   1081  for (size_t i = 0; i < encoders_.size(); ++i) {
   1082    tl_configs[i] =
   1083        frame_buffer_controller_->NextFrameConfig(i, frame.rtp_timestamp());
   1084    send_key_frame |= tl_configs[i].IntraFrame();
   1085    drop_frame |= tl_configs[i].drop_frame;
   1086    RTC_DCHECK(i == 0 ||
   1087               retransmission_allowed == tl_configs[i].retransmission_allowed);
   1088    retransmission_allowed = tl_configs[i].retransmission_allowed;
   1089  }
   1090 
   1091  if (drop_frame && !send_key_frame) {
   1092    return WEBRTC_VIDEO_CODEC_OK;
   1093  }
   1094 
   1095  vpx_enc_frame_flags_t flags[kMaxSimulcastStreams];
   1096  for (size_t i = 0; i < encoders_.size(); ++i) {
   1097    flags[i] = send_key_frame ? VPX_EFLAG_FORCE_KF : EncodeFlags(tl_configs[i]);
   1098  }
   1099 
   1100 #if defined(WEBRTC_ENCODER_PSNR_STATS) && defined(VPX_EFLAG_CALCULATE_PSNR)
   1101  if (calculate_psnr_ && psnr_frame_sampler_.ShouldBeSampled(frame)) {
   1102    for (size_t i = 0; i < encoders_.size(); ++i) {
   1103      flags[i] |= VPX_EFLAG_CALCULATE_PSNR;
   1104    }
   1105  }
   1106 #endif
   1107 
   1108  // Scale and map buffers and set `raw_images_` to hold pointers to the result.
   1109  // Because `raw_images_` are set to hold pointers to the prepared buffers, we
   1110  // need to keep these buffers alive through reference counting until after
   1111  // encoding is complete.
   1112  std::vector<scoped_refptr<VideoFrameBuffer>> prepared_buffers =
   1113      PrepareBuffers(frame.video_frame_buffer());
   1114  if (prepared_buffers.empty()) {
   1115    return WEBRTC_VIDEO_CODEC_ERROR;
   1116  }
   1117  struct CleanUpOnExit {
   1118    explicit CleanUpOnExit(
   1119        vpx_image_t* raw_image,
   1120        std::vector<scoped_refptr<VideoFrameBuffer>> prepared_buffers)
   1121        : raw_image_(raw_image),
   1122          prepared_buffers_(std::move(prepared_buffers)) {}
   1123    ~CleanUpOnExit() {
   1124      raw_image_->planes[VPX_PLANE_Y] = nullptr;
   1125      raw_image_->planes[VPX_PLANE_U] = nullptr;
   1126      raw_image_->planes[VPX_PLANE_V] = nullptr;
   1127    }
   1128    vpx_image_t* raw_image_;
   1129    std::vector<scoped_refptr<VideoFrameBuffer>> prepared_buffers_;
   1130  } clean_up_on_exit(&raw_images_[0], std::move(prepared_buffers));
   1131 
   1132  if (send_key_frame) {
   1133    // Adapt the size of the key frame when in screenshare with 1 temporal
   1134    // layer.
   1135    if (encoders_.size() == 1 &&
   1136        codec_.mode == VideoCodecMode::kScreensharing &&
   1137        codec_.VP8()->numberOfTemporalLayers <= 1) {
   1138      const uint32_t forceKeyFrameIntraTh = 100;
   1139      libvpx_->codec_control(&(encoders_[0]), VP8E_SET_MAX_INTRA_BITRATE_PCT,
   1140                             forceKeyFrameIntraTh);
   1141    }
   1142 
   1143    std::fill(key_frame_request_.begin(), key_frame_request_.end(), false);
   1144  }
   1145 
   1146  // Set the encoder frame flags and temporal layer_id for each spatial stream.
   1147  // Note that streams are defined starting from lowest resolution at
   1148  // position 0 to highest resolution at position |encoders_.size() - 1|,
   1149  // whereas `encoder_` is from highest to lowest resolution.
   1150  for (size_t i = 0; i < encoders_.size(); ++i) {
   1151    const size_t stream_idx = encoders_.size() - 1 - i;
   1152 
   1153    if (UpdateVpxConfiguration(stream_idx)) {
   1154      if (libvpx_->codec_enc_config_set(&encoders_[i], &vpx_configs_[i]))
   1155        return WEBRTC_VIDEO_CODEC_ERROR;
   1156    }
   1157 
   1158    libvpx_->codec_control(&encoders_[i], VP8E_SET_FRAME_FLAGS,
   1159                           static_cast<int>(flags[stream_idx]));
   1160    libvpx_->codec_control(&encoders_[i], VP8E_SET_TEMPORAL_LAYER_ID,
   1161                           tl_configs[i].encoder_layer_id);
   1162  }
   1163  // TODO(holmer): Ideally the duration should be the timestamp diff of this
   1164  // frame and the next frame to be encoded, which we don't have. Instead we
   1165  // would like to use the duration of the previous frame. Unfortunately the
   1166  // rate control seems to be off with that setup. Using the average input
   1167  // frame rate to calculate an average duration for now.
   1168  RTC_DCHECK_GT(codec_.maxFramerate, 0);
   1169  uint32_t duration = kVideoPayloadTypeFrequency / codec_.maxFramerate;
   1170 
   1171  int error = WEBRTC_VIDEO_CODEC_OK;
   1172  int num_tries = 0;
   1173  // If the first try returns WEBRTC_VIDEO_CODEC_TARGET_BITRATE_OVERSHOOT
   1174  // the frame must be reencoded with the same parameters again because
   1175  // target bitrate is exceeded and encoder state has been reset.
   1176  while (num_tries == 0 ||
   1177         (num_tries == 1 &&
   1178          error == WEBRTC_VIDEO_CODEC_TARGET_BITRATE_OVERSHOOT)) {
   1179    ++num_tries;
   1180    // Note we must pass 0 for `flags` field in encode call below since they are
   1181    // set above in `libvpx_interface_->vpx_codec_control_` function for each
   1182    // encoder/spatial layer.
   1183    error = libvpx_->codec_encode(&encoders_[0], &raw_images_[0], timestamp_,
   1184                                  duration, 0, VPX_DL_REALTIME);
   1185    // Reset specific intra frame thresholds, following the key frame.
   1186    if (send_key_frame) {
   1187      libvpx_->codec_control(&(encoders_[0]), VP8E_SET_MAX_INTRA_BITRATE_PCT,
   1188                             rc_max_intra_target_);
   1189    }
   1190    if (error)
   1191      return WEBRTC_VIDEO_CODEC_ERROR;
   1192    // Examines frame timestamps only.
   1193    error = GetEncodedPartitions(frame, retransmission_allowed);
   1194  }
   1195  // TODO(sprang): Shouldn't we use the frame timestamp instead?
   1196  timestamp_ += duration;
   1197  return error;
   1198 }
   1199 
   1200 void LibvpxVp8Encoder::PopulateCodecSpecific(CodecSpecificInfo* codec_specific,
   1201                                             const vpx_codec_cx_pkt_t& pkt,
   1202                                             int stream_idx,
   1203                                             int encoder_idx,
   1204                                             uint32_t timestamp) {
   1205  RTC_DCHECK(codec_specific);
   1206  codec_specific->codecType = kVideoCodecVP8;
   1207  codec_specific->codecSpecific.VP8.keyIdx =
   1208      kNoKeyIdx;  // TODO(hlundin) populate this
   1209  codec_specific->codecSpecific.VP8.nonReference =
   1210      (pkt.data.frame.flags & VPX_FRAME_IS_DROPPABLE) != 0;
   1211 
   1212  int qp = 0;
   1213  vpx_codec_control(&encoders_[encoder_idx], VP8E_GET_LAST_QUANTIZER_64, &qp);
   1214  bool is_keyframe = (pkt.data.frame.flags & VPX_FRAME_IS_KEY) != 0;
   1215  frame_buffer_controller_->OnEncodeDone(stream_idx, timestamp,
   1216                                         encoded_images_[encoder_idx].size(),
   1217                                         is_keyframe, qp, codec_specific);
   1218  if (is_keyframe && codec_specific->template_structure != std::nullopt) {
   1219    // Number of resolutions must match number of spatial layers, VP8 structures
   1220    // expected to use single spatial layer. Templates must be ordered by
   1221    // spatial_id, so assumption there is exactly one spatial layer is same as
   1222    // assumption last template uses spatial_id = 0.
   1223    // This check catches potential scenario where template_structure is shared
   1224    // across multiple vp8 streams and they are distinguished using spatial_id.
   1225    // Assigning single resolution doesn't support such scenario, i.e. assumes
   1226    // vp8 simulcast is sent using multiple ssrcs.
   1227    RTC_DCHECK(!codec_specific->template_structure->templates.empty());
   1228    RTC_DCHECK_EQ(
   1229        codec_specific->template_structure->templates.back().spatial_id, 0);
   1230    codec_specific->template_structure->resolutions = {
   1231        RenderResolution(pkt.data.frame.width[0], pkt.data.frame.height[0])};
   1232  }
   1233  switch (vpx_configs_[encoder_idx].ts_number_layers) {
   1234    case 1:
   1235      codec_specific->scalability_mode = ScalabilityMode::kL1T1;
   1236      break;
   1237    case 2:
   1238      codec_specific->scalability_mode = ScalabilityMode::kL1T2;
   1239      break;
   1240    case 3:
   1241      codec_specific->scalability_mode = ScalabilityMode::kL1T3;
   1242      break;
   1243  }
   1244 }
   1245 
   1246 int LibvpxVp8Encoder::GetEncodedPartitions(const VideoFrame& input_image,
   1247                                           bool retransmission_allowed) {
   1248  int stream_idx = static_cast<int>(encoders_.size()) - 1;
   1249  int result = WEBRTC_VIDEO_CODEC_OK;
   1250  for (size_t encoder_idx = 0; encoder_idx < encoders_.size();
   1251       ++encoder_idx, --stream_idx) {
   1252    vpx_codec_iter_t iter = nullptr;
   1253    encoded_images_[encoder_idx].set_size(0);
   1254    encoded_images_[encoder_idx].set_psnr(std::nullopt);
   1255    encoded_images_[encoder_idx]._frameType = VideoFrameType::kVideoFrameDelta;
   1256    CodecSpecificInfo codec_specific;
   1257    const vpx_codec_cx_pkt_t* pkt = nullptr;
   1258 
   1259    size_t encoded_size = 0;
   1260    while ((pkt = libvpx_->codec_get_cx_data(&encoders_[encoder_idx], &iter)) !=
   1261           nullptr) {
   1262      if (pkt->kind == VPX_CODEC_CX_FRAME_PKT) {
   1263        encoded_size += pkt->data.frame.sz;
   1264      }
   1265    }
   1266 
   1267    auto buffer = EncodedImageBuffer::Create(encoded_size);
   1268 
   1269    iter = nullptr;
   1270    size_t encoded_pos = 0;
   1271    while ((pkt = libvpx_->codec_get_cx_data(&encoders_[encoder_idx], &iter)) !=
   1272           nullptr) {
   1273      switch (pkt->kind) {
   1274        case VPX_CODEC_CX_FRAME_PKT: {
   1275          RTC_CHECK_LE(encoded_pos + pkt->data.frame.sz, buffer->size());
   1276          memcpy(&buffer->data()[encoded_pos], pkt->data.frame.buf,
   1277                 pkt->data.frame.sz);
   1278          encoded_pos += pkt->data.frame.sz;
   1279          break;
   1280        }
   1281        case VPX_CODEC_PSNR_PKT:
   1282          // PSNR index: 0: total, 1: Y, 2: U, 3: V
   1283          encoded_images_[encoder_idx].set_psnr(
   1284              EncodedImage::Psnr({.y = pkt->data.psnr.psnr[1],
   1285                                  .u = pkt->data.psnr.psnr[2],
   1286                                  .v = pkt->data.psnr.psnr[3]}));
   1287          break;
   1288        default:
   1289          break;
   1290      }
   1291      // End of frame
   1292      if (pkt->kind == VPX_CODEC_CX_FRAME_PKT &&
   1293          (pkt->data.frame.flags & VPX_FRAME_IS_FRAGMENT) == 0) {
   1294        // check if encoded frame is a key frame
   1295        if (pkt->data.frame.flags & VPX_FRAME_IS_KEY) {
   1296          encoded_images_[encoder_idx]._frameType =
   1297              VideoFrameType::kVideoFrameKey;
   1298        }
   1299        encoded_images_[encoder_idx].SetEncodedData(buffer);
   1300        encoded_images_[encoder_idx].set_size(encoded_pos);
   1301        encoded_images_[encoder_idx].SetSimulcastIndex(stream_idx);
   1302        PopulateCodecSpecific(&codec_specific, *pkt, stream_idx, encoder_idx,
   1303                              input_image.rtp_timestamp());
   1304        if (codec_specific.codecSpecific.VP8.temporalIdx != kNoTemporalIdx) {
   1305          encoded_images_[encoder_idx].SetTemporalIndex(
   1306              codec_specific.codecSpecific.VP8.temporalIdx);
   1307        }
   1308        break;
   1309      }
   1310    }
   1311    encoded_images_[encoder_idx].SetRtpTimestamp(input_image.rtp_timestamp());
   1312    encoded_images_[encoder_idx].SetPresentationTimestamp(
   1313        input_image.presentation_timestamp());
   1314    encoded_images_[encoder_idx].SetColorSpace(input_image.color_space());
   1315    encoded_images_[encoder_idx].SetRetransmissionAllowed(
   1316        retransmission_allowed);
   1317 
   1318    if (send_stream_[stream_idx]) {
   1319      if (encoded_images_[encoder_idx].size() > 0) {
   1320        TRACE_COUNTER_ID1("webrtc", "EncodedFrameSize", encoder_idx,
   1321                          encoded_images_[encoder_idx].size());
   1322        encoded_images_[encoder_idx]._encodedHeight =
   1323            codec_.simulcastStream[stream_idx].height;
   1324        encoded_images_[encoder_idx]._encodedWidth =
   1325            codec_.simulcastStream[stream_idx].width;
   1326        int qp_128 = -1;
   1327        libvpx_->codec_control(&encoders_[encoder_idx], VP8E_GET_LAST_QUANTIZER,
   1328                               &qp_128);
   1329        encoded_images_[encoder_idx].qp_ = qp_128;
   1330        last_encoder_output_time_[stream_idx] =
   1331            Timestamp::Micros(input_image.timestamp_us());
   1332 
   1333        encoded_images_[encoder_idx].set_corruption_detection_filter_settings(
   1334            corruption_detection_settings_generator_->OnFrame(
   1335                encoded_images_[encoder_idx].FrameType() ==
   1336                    VideoFrameType::kVideoFrameKey,
   1337                qp_128));
   1338 
   1339        encoded_complete_callback_->OnEncodedImage(encoded_images_[encoder_idx],
   1340                                                   &codec_specific);
   1341        const size_t steady_state_size = SteadyStateSize(
   1342            stream_idx, codec_specific.codecSpecific.VP8.temporalIdx);
   1343        if (qp_128 > kVp8SteadyStateQpThreshold ||
   1344            encoded_images_[encoder_idx].size() > steady_state_size) {
   1345          num_steady_state_frames_ = 0;
   1346        } else {
   1347          ++num_steady_state_frames_;
   1348        }
   1349      } else if (!frame_buffer_controller_->SupportsEncoderFrameDropping(
   1350                     stream_idx)) {
   1351        result = WEBRTC_VIDEO_CODEC_TARGET_BITRATE_OVERSHOOT;
   1352        if (encoded_images_[encoder_idx].size() == 0) {
   1353          // Dropped frame that will be re-encoded.
   1354          frame_buffer_controller_->OnFrameDropped(stream_idx,
   1355                                                   input_image.rtp_timestamp());
   1356        }
   1357      }
   1358    }
   1359  }
   1360  return result;
   1361 }
   1362 
   1363 VideoEncoder::EncoderInfo LibvpxVp8Encoder::GetEncoderInfo() const {
   1364  EncoderInfo info;
   1365  info.supports_native_handle = false;
   1366  info.implementation_name = "libvpx";
   1367  info.has_trusted_rate_controller =
   1368      rate_control_settings_.LibvpxVp8TrustedRateController();
   1369  info.is_hardware_accelerated = false;
   1370  info.supports_simulcast = true;
   1371  if (!resolution_bitrate_limits_.empty()) {
   1372    info.resolution_bitrate_limits = resolution_bitrate_limits_;
   1373  }
   1374  if (encoder_info_override_.requested_resolution_alignment()) {
   1375    info.requested_resolution_alignment =
   1376        *encoder_info_override_.requested_resolution_alignment();
   1377    info.apply_alignment_to_all_simulcast_layers =
   1378        encoder_info_override_.apply_alignment_to_all_simulcast_layers();
   1379  }
   1380  if (!encoder_info_override_.resolution_bitrate_limits().empty()) {
   1381    info.resolution_bitrate_limits =
   1382        encoder_info_override_.resolution_bitrate_limits();
   1383  }
   1384 
   1385  const bool enable_scaling =
   1386      num_active_streams_ == 1 &&
   1387      (vpx_configs_.empty() || vpx_configs_[0].rc_dropframe_thresh > 0) &&
   1388      codec_.VP8().automaticResizeOn;
   1389 
   1390  info.scaling_settings = enable_scaling
   1391                              ? VideoEncoder::ScalingSettings(
   1392                                    kLowVp8QpThreshold, kHighVp8QpThreshold)
   1393                              : VideoEncoder::ScalingSettings::kOff;
   1394  if (rate_control_settings_.LibvpxVp8MinPixels()) {
   1395    info.scaling_settings.min_pixels_per_frame =
   1396        rate_control_settings_.LibvpxVp8MinPixels().value();
   1397  }
   1398  info.preferred_pixel_formats = {VideoFrameBuffer::Type::kI420,
   1399                                  VideoFrameBuffer::Type::kNV12};
   1400 
   1401  if (inited_) {
   1402    // `encoder_idx` is libvpx index where 0 is highest resolution.
   1403    // `si` is simulcast index, where 0 is lowest resolution.
   1404    for (size_t si = 0, encoder_idx = encoders_.size() - 1;
   1405         si < encoders_.size(); ++si, --encoder_idx) {
   1406      info.fps_allocation[si].clear();
   1407      if ((codec_.numberOfSimulcastStreams > si &&
   1408           !codec_.simulcastStream[si].active) ||
   1409          (si == 0 && SimulcastUtility::IsConferenceModeScreenshare(codec_))) {
   1410        // No defined frame rate fractions if not active or if using
   1411        // ScreenshareLayers, leave vector empty and continue;
   1412        continue;
   1413      }
   1414      if (vpx_configs_[encoder_idx].ts_number_layers <= 1) {
   1415        info.fps_allocation[si].push_back(EncoderInfo::kMaxFramerateFraction);
   1416      } else {
   1417        for (size_t ti = 0; ti < vpx_configs_[encoder_idx].ts_number_layers;
   1418             ++ti) {
   1419          RTC_DCHECK_GT(vpx_configs_[encoder_idx].ts_rate_decimator[ti], 0);
   1420          info.fps_allocation[si].push_back(saturated_cast<uint8_t>(
   1421              EncoderInfo::kMaxFramerateFraction /
   1422                  vpx_configs_[encoder_idx].ts_rate_decimator[ti] +
   1423              0.5));
   1424        }
   1425      }
   1426    }
   1427 
   1428    info.mapped_resolution =
   1429        VideoEncoder::Resolution(raw_images_[0].d_w, raw_images_[0].d_h);
   1430 
   1431    if (codec_.mode == VideoCodecMode::kScreensharing) {
   1432      info.min_qp = kScreenshareMinQp;
   1433    }
   1434  }
   1435 
   1436  return info;
   1437 }
   1438 
   1439 int LibvpxVp8Encoder::RegisterEncodeCompleteCallback(
   1440    EncodedImageCallback* callback) {
   1441  encoded_complete_callback_ = callback;
   1442  return WEBRTC_VIDEO_CODEC_OK;
   1443 }
   1444 
   1445 void LibvpxVp8Encoder::MaybeUpdatePixelFormat(vpx_img_fmt fmt) {
   1446  RTC_DCHECK(!raw_images_.empty());
   1447  if (raw_images_[0].fmt == fmt) {
   1448    RTC_DCHECK(std::all_of(
   1449        std::next(raw_images_.begin()), raw_images_.end(),
   1450        [fmt](const vpx_image_t& raw_img) { return raw_img.fmt == fmt; }))
   1451        << "Not all raw images had the right format!";
   1452    return;
   1453  }
   1454  RTC_LOG(LS_INFO) << "Updating vp8 encoder pixel format to "
   1455                   << (fmt == VPX_IMG_FMT_NV12 ? "NV12" : "I420");
   1456  for (size_t i = 0; i < raw_images_.size(); ++i) {
   1457    vpx_image_t& img = raw_images_[i];
   1458    auto d_w = img.d_w;
   1459    auto d_h = img.d_h;
   1460    libvpx_->img_free(&img);
   1461    // First image is wrapping the input frame, the rest are allocated.
   1462    if (i == 0) {
   1463      libvpx_->img_wrap(&img, fmt, d_w, d_h, 1, nullptr);
   1464    } else {
   1465      libvpx_->img_alloc(&img, fmt, d_w, d_h, kVp832ByteAlign);
   1466    }
   1467  }
   1468 }
   1469 
   1470 std::vector<scoped_refptr<VideoFrameBuffer>> LibvpxVp8Encoder::PrepareBuffers(
   1471    scoped_refptr<VideoFrameBuffer> buffer) {
   1472  RTC_DCHECK_EQ(buffer->width(), raw_images_[0].d_w);
   1473  RTC_DCHECK_EQ(buffer->height(), raw_images_[0].d_h);
   1474  absl::InlinedVector<VideoFrameBuffer::Type, kMaxPreferredPixelFormats>
   1475      supported_formats = {VideoFrameBuffer::Type::kI420,
   1476                           VideoFrameBuffer::Type::kNV12};
   1477 
   1478  scoped_refptr<VideoFrameBuffer> mapped_buffer;
   1479  if (buffer->type() != VideoFrameBuffer::Type::kNative) {
   1480    // `buffer` is already mapped.
   1481    mapped_buffer = buffer;
   1482  } else {
   1483    // Attempt to map to one of the supported formats.
   1484    mapped_buffer = buffer->GetMappedFrameBuffer(supported_formats);
   1485  }
   1486  if (!mapped_buffer ||
   1487      (absl::c_find(supported_formats, mapped_buffer->type()) ==
   1488           supported_formats.end() &&
   1489       mapped_buffer->type() != VideoFrameBuffer::Type::kI420A)) {
   1490    // Unknown pixel format or unable to map, convert to I420 and prepare that
   1491    // buffer instead to ensure Scale() is safe to use.
   1492    auto converted_buffer = buffer->ToI420();
   1493    if (!converted_buffer) {
   1494      RTC_LOG(LS_ERROR) << "Failed to convert "
   1495                        << VideoFrameBufferTypeToString(buffer->type())
   1496                        << " image to I420. Can't encode frame.";
   1497      return {};
   1498    }
   1499    RTC_CHECK(converted_buffer->type() == VideoFrameBuffer::Type::kI420 ||
   1500              converted_buffer->type() == VideoFrameBuffer::Type::kI420A);
   1501 
   1502    // Because `buffer` had to be converted, use `converted_buffer` instead...
   1503    buffer = mapped_buffer = converted_buffer;
   1504  }
   1505 
   1506  // Maybe update pixel format.
   1507  absl::InlinedVector<VideoFrameBuffer::Type, kMaxPreferredPixelFormats>
   1508      mapped_type = {mapped_buffer->type()};
   1509  switch (mapped_buffer->type()) {
   1510    case VideoFrameBuffer::Type::kI420:
   1511    case VideoFrameBuffer::Type::kI420A:
   1512      MaybeUpdatePixelFormat(VPX_IMG_FMT_I420);
   1513      break;
   1514    case VideoFrameBuffer::Type::kNV12:
   1515      MaybeUpdatePixelFormat(VPX_IMG_FMT_NV12);
   1516      break;
   1517    default:
   1518      RTC_DCHECK_NOTREACHED();
   1519  }
   1520 
   1521  // Prepare `raw_images_` from `mapped_buffer` and, if simulcast, scaled
   1522  // versions of `buffer`.
   1523  std::vector<scoped_refptr<VideoFrameBuffer>> prepared_buffers;
   1524  SetRawImagePlanes(&raw_images_[0], mapped_buffer.get());
   1525  prepared_buffers.push_back(mapped_buffer);
   1526  for (size_t i = 1; i < encoders_.size(); ++i) {
   1527    // Native buffers should implement optimized scaling and is the preferred
   1528    // buffer to scale. But if the buffer isn't native, it should be cheaper to
   1529    // scale from the previously prepared buffer which is smaller than `buffer`.
   1530    VideoFrameBuffer* buffer_to_scale =
   1531        buffer->type() == VideoFrameBuffer::Type::kNative
   1532            ? buffer.get()
   1533            : prepared_buffers.back().get();
   1534 
   1535    auto scaled_buffer =
   1536        buffer_to_scale->Scale(raw_images_[i].d_w, raw_images_[i].d_h);
   1537    if (scaled_buffer->type() == VideoFrameBuffer::Type::kNative) {
   1538      auto mapped_scaled_buffer =
   1539          scaled_buffer->GetMappedFrameBuffer(mapped_type);
   1540      RTC_DCHECK(mapped_scaled_buffer) << "Unable to map the scaled buffer.";
   1541      if (!mapped_scaled_buffer) {
   1542        RTC_LOG(LS_ERROR) << "Failed to map scaled "
   1543                          << VideoFrameBufferTypeToString(scaled_buffer->type())
   1544                          << " image to "
   1545                          << VideoFrameBufferTypeToString(mapped_buffer->type())
   1546                          << ". Can't encode frame.";
   1547        return {};
   1548      }
   1549      scaled_buffer = mapped_scaled_buffer;
   1550    }
   1551    if (!IsCompatibleVideoFrameBufferType(scaled_buffer->type(),
   1552                                          mapped_buffer->type())) {
   1553      RTC_LOG(LS_ERROR) << "When scaling "
   1554                        << VideoFrameBufferTypeToString(buffer_to_scale->type())
   1555                        << ", the image was unexpectedly converted to "
   1556                        << VideoFrameBufferTypeToString(scaled_buffer->type())
   1557                        << " instead of "
   1558                        << VideoFrameBufferTypeToString(mapped_buffer->type())
   1559                        << ". Can't encode frame.";
   1560      RTC_DCHECK_NOTREACHED()
   1561          << "Scaled buffer type "
   1562          << VideoFrameBufferTypeToString(scaled_buffer->type())
   1563          << " is not compatible with mapped buffer type "
   1564          << VideoFrameBufferTypeToString(mapped_buffer->type());
   1565      return {};
   1566    }
   1567    SetRawImagePlanes(&raw_images_[i], scaled_buffer.get());
   1568    prepared_buffers.push_back(scaled_buffer);
   1569  }
   1570  return prepared_buffers;
   1571 }
   1572 
   1573 }  // namespace webrtc