tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

simulcast_encoder_adapter.cc (41209B)


      1 /*
      2 *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
      3 *
      4 *  Use of this source code is governed by a BSD-style license
      5 *  that can be found in the LICENSE file in the root of the source
      6 *  tree. An additional intellectual property rights grant can be found
      7 *  in the file PATENTS.  All contributing project authors may
      8 *  be found in the AUTHORS file in the root of the source tree.
      9 */
     10 
     11 #include "media/engine/simulcast_encoder_adapter.h"
     12 
     13 #include <algorithm>
     14 #include <cstdint>
     15 #include <cstdio>
     16 #include <cstring>
     17 #include <iterator>
     18 #include <memory>
     19 #include <numeric>
     20 #include <optional>
     21 #include <string>
     22 #include <tuple>
     23 #include <utility>
     24 #include <vector>
     25 
     26 #include "absl/algorithm/container.h"
     27 #include "absl/base/nullability.h"
     28 #include "api/array_view.h"
     29 #include "api/environment/environment.h"
     30 #include "api/fec_controller_override.h"
     31 #include "api/field_trials_view.h"
     32 #include "api/scoped_refptr.h"
     33 #include "api/sequence_checker.h"
     34 #include "api/units/data_rate.h"
     35 #include "api/units/timestamp.h"
     36 #include "api/video/encoded_image.h"
     37 #include "api/video/video_bitrate_allocation.h"
     38 #include "api/video/video_bitrate_allocator.h"
     39 #include "api/video/video_codec_constants.h"
     40 #include "api/video/video_codec_type.h"
     41 #include "api/video/video_frame.h"
     42 #include "api/video/video_frame_buffer.h"
     43 #include "api/video/video_frame_type.h"
     44 #include "api/video/video_rotation.h"
     45 #include "api/video_codecs/scalability_mode.h"
     46 #include "api/video_codecs/sdp_video_format.h"
     47 #include "api/video_codecs/simulcast_stream.h"
     48 #include "api/video_codecs/video_codec.h"
     49 #include "api/video_codecs/video_encoder.h"
     50 #include "api/video_codecs/video_encoder_factory.h"
     51 #include "api/video_codecs/video_encoder_software_fallback_wrapper.h"
     52 #include "common_video/framerate_controller.h"
     53 #include "media/base/sdp_video_format_utils.h"
     54 #include "modules/video_coding/include/video_error_codes.h"
     55 #include "modules/video_coding/include/video_error_codes_utils.h"
     56 #include "modules/video_coding/utility/simulcast_rate_allocator.h"
     57 #include "rtc_base/checks.h"
     58 #include "rtc_base/experiments/rate_control_settings.h"
     59 #include "rtc_base/logging.h"
     60 #include "rtc_base/strings/str_join.h"
     61 #include "rtc_base/strings/string_builder.h"
     62 
     63 namespace webrtc {
     64 namespace {
     65 
     66 // Max qp for lowest spatial resolution when doing simulcast.
     67 const unsigned int kLowestResMaxQp = 45;
     68 
     69 uint32_t SumStreamMaxBitrate(int streams, const VideoCodec& codec) {
     70  uint32_t bitrate_sum = 0;
     71  for (int i = 0; i < streams; ++i) {
     72    bitrate_sum += codec.simulcastStream[i].maxBitrate;
     73  }
     74  return bitrate_sum;
     75 }
     76 
     77 int CountAllStreams(const VideoCodec& codec) {
     78  int total_streams_count =
     79      codec.numberOfSimulcastStreams < 1 ? 1 : codec.numberOfSimulcastStreams;
     80  uint32_t simulcast_max_bitrate =
     81      SumStreamMaxBitrate(total_streams_count, codec);
     82  if (simulcast_max_bitrate == 0) {
     83    total_streams_count = 1;
     84  }
     85  return total_streams_count;
     86 }
     87 
     88 int CountActiveStreams(const VideoCodec& codec) {
     89  if (codec.numberOfSimulcastStreams < 1) {
     90    return 1;
     91  }
     92  int total_streams_count = CountAllStreams(codec);
     93  int active_streams_count = 0;
     94  for (int i = 0; i < total_streams_count; ++i) {
     95    if (codec.simulcastStream[i].active) {
     96      ++active_streams_count;
     97    }
     98  }
     99  return active_streams_count;
    100 }
    101 
    102 int VerifyCodec(const VideoCodec* codec_settings) {
    103  if (codec_settings == nullptr) {
    104    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
    105  }
    106  if (codec_settings->maxFramerate < 1) {
    107    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
    108  }
    109  // allow zero to represent an unspecified maxBitRate
    110  if (codec_settings->maxBitrate > 0 &&
    111      codec_settings->startBitrate > codec_settings->maxBitrate) {
    112    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
    113  }
    114  if (codec_settings->width <= 1 || codec_settings->height <= 1) {
    115    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
    116  }
    117  if (codec_settings->codecType == kVideoCodecVP8 &&
    118      codec_settings->VP8().automaticResizeOn &&
    119      CountActiveStreams(*codec_settings) > 1) {
    120    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
    121  }
    122  return WEBRTC_VIDEO_CODEC_OK;
    123 }
    124 
    125 bool StreamQualityCompare(const SimulcastStream& a, const SimulcastStream& b) {
    126  return std::tie(a.height, a.width, a.maxBitrate, a.maxFramerate) <
    127         std::tie(b.height, b.width, b.maxBitrate, b.maxFramerate);
    128 }
    129 
    130 void GetLowestAndHighestQualityStreamIndixes(
    131    ArrayView<const SimulcastStream> streams,
    132    int* lowest_quality_stream_idx,
    133    int* highest_quality_stream_idx) {
    134  const auto lowest_highest_quality_streams =
    135      absl::c_minmax_element(streams, StreamQualityCompare);
    136  *lowest_quality_stream_idx =
    137      std::distance(streams.begin(), lowest_highest_quality_streams.first);
    138  *highest_quality_stream_idx =
    139      std::distance(streams.begin(), lowest_highest_quality_streams.second);
    140 }
    141 
    142 std::vector<uint32_t> GetStreamStartBitratesKbps(const Environment& env,
    143                                                 const VideoCodec& codec) {
    144  std::vector<uint32_t> start_bitrates;
    145  VideoBitrateAllocation allocation =
    146      SimulcastRateAllocator(env, codec)
    147          .Allocate(VideoBitrateAllocationParameters(codec.startBitrate * 1000,
    148                                                     codec.maxFramerate));
    149 
    150  int total_streams_count = CountAllStreams(codec);
    151  for (int i = 0; i < total_streams_count; ++i) {
    152    uint32_t stream_bitrate = allocation.GetSpatialLayerSum(i) / 1000;
    153    start_bitrates.push_back(stream_bitrate);
    154  }
    155  return start_bitrates;
    156 }
    157 
    158 }  // namespace
    159 
    160 SimulcastEncoderAdapter::EncoderContext::EncoderContext(
    161    std::unique_ptr<VideoEncoder> encoder,
    162    bool prefer_temporal_support,
    163    VideoEncoder::EncoderInfo primary_info,
    164    VideoEncoder::EncoderInfo fallback_info,
    165    SdpVideoFormat video_format)
    166    : encoder_(std::move(encoder)),
    167      prefer_temporal_support_(prefer_temporal_support),
    168      primary_info_(std::move(primary_info)),
    169      fallback_info_(std::move(fallback_info)),
    170      video_format_(std::move(video_format)) {}
    171 
    172 void SimulcastEncoderAdapter::EncoderContext::Release() {
    173  if (encoder_) {
    174    encoder_->Release();
    175    encoder_->RegisterEncodeCompleteCallback(nullptr);
    176  }
    177 }
    178 
    179 SimulcastEncoderAdapter::StreamContext::StreamContext(
    180    SimulcastEncoderAdapter* parent,
    181    std::unique_ptr<EncoderContext> encoder_context,
    182    std::unique_ptr<FramerateController> framerate_controller,
    183    int stream_idx,
    184    uint16_t width,
    185    uint16_t height,
    186    bool is_paused)
    187    : parent_(parent),
    188      encoder_context_(std::move(encoder_context)),
    189      framerate_controller_(std::move(framerate_controller)),
    190      stream_idx_(stream_idx),
    191      width_(width),
    192      height_(height),
    193      is_keyframe_needed_(false),
    194      is_paused_(is_paused) {
    195  if (parent_) {
    196    encoder_context_->encoder().RegisterEncodeCompleteCallback(this);
    197  }
    198 }
    199 
    200 SimulcastEncoderAdapter::StreamContext::StreamContext(StreamContext&& rhs)
    201    : parent_(rhs.parent_),
    202      encoder_context_(std::move(rhs.encoder_context_)),
    203      framerate_controller_(std::move(rhs.framerate_controller_)),
    204      stream_idx_(rhs.stream_idx_),
    205      width_(rhs.width_),
    206      height_(rhs.height_),
    207      is_keyframe_needed_(rhs.is_keyframe_needed_),
    208      is_paused_(rhs.is_paused_) {
    209  if (parent_) {
    210    encoder_context_->encoder().RegisterEncodeCompleteCallback(this);
    211  }
    212 }
    213 
    214 SimulcastEncoderAdapter::StreamContext::~StreamContext() {
    215  if (encoder_context_) {
    216    encoder_context_->Release();
    217  }
    218 }
    219 
    220 std::unique_ptr<SimulcastEncoderAdapter::EncoderContext>
    221 SimulcastEncoderAdapter::StreamContext::ReleaseEncoderContext() && {
    222  encoder_context_->Release();
    223  return std::move(encoder_context_);
    224 }
    225 
    226 void SimulcastEncoderAdapter::StreamContext::OnKeyframe(Timestamp timestamp) {
    227  is_keyframe_needed_ = false;
    228  if (framerate_controller_) {
    229    framerate_controller_->KeepFrame(timestamp.us() * 1000);
    230  }
    231 }
    232 
    233 bool SimulcastEncoderAdapter::StreamContext::ShouldDropFrame(
    234    Timestamp timestamp) {
    235  if (!framerate_controller_) {
    236    return false;
    237  }
    238  return framerate_controller_->ShouldDropFrame(timestamp.us() * 1000);
    239 }
    240 
    241 EncodedImageCallback::Result
    242 SimulcastEncoderAdapter::StreamContext::OnEncodedImage(
    243    const EncodedImage& encoded_image,
    244    const CodecSpecificInfo* codec_specific_info) {
    245  RTC_CHECK(parent_);  // If null, this method should never be called.
    246  return parent_->OnEncodedImage(stream_idx_, encoded_image,
    247                                 codec_specific_info);
    248 }
    249 
    250 void SimulcastEncoderAdapter::StreamContext::OnDroppedFrame(
    251    DropReason /*reason*/) {
    252  RTC_CHECK(parent_);  // If null, this method should never be called.
    253  parent_->OnDroppedFrame(stream_idx_);
    254 }
    255 
    256 SimulcastEncoderAdapter::SimulcastEncoderAdapter(
    257    const Environment& env,
    258    VideoEncoderFactory* absl_nonnull primary_factory,
    259    VideoEncoderFactory* absl_nullable fallback_factory,
    260    const SdpVideoFormat& format)
    261    : env_(env),
    262      inited_(0),
    263      primary_encoder_factory_(primary_factory),
    264      fallback_encoder_factory_(fallback_factory),
    265      video_format_(format),
    266      total_streams_count_(0),
    267      bypass_mode_(false),
    268      encoded_complete_callback_(nullptr),
    269      boost_base_layer_quality_(
    270          RateControlSettings(env_.field_trials()).Vp8BoostBaseLayerQuality()),
    271      prefer_temporal_support_on_base_layer_(env_.field_trials().IsEnabled(
    272          "WebRTC-Video-PreferTemporalSupportOnBaseLayer")),
    273      per_layer_pli_(SupportsPerLayerPictureLossIndication(format.parameters)),
    274      drop_unaligned_resolution_(!env_.field_trials().IsDisabled(
    275          "WebRTC-SimulcastEncoderAdapter-DropUnalignedResolution")),
    276      encoder_info_override_(env.field_trials()) {
    277  RTC_DCHECK(primary_factory);
    278 
    279  // The adapter is typically created on the worker thread, but operated on
    280  // the encoder task queue.
    281  encoder_queue_.Detach();
    282 }
    283 
    284 SimulcastEncoderAdapter::~SimulcastEncoderAdapter() {
    285  RTC_DCHECK_RUN_ON(&encoder_queue_);
    286  RTC_DCHECK(!Initialized());
    287  DestroyStoredEncoders();
    288 }
    289 
    290 void SimulcastEncoderAdapter::SetFecControllerOverride(
    291    FecControllerOverride* /*fec_controller_override*/) {
    292  // Ignored.
    293 }
    294 
    295 int SimulcastEncoderAdapter::Release() {
    296  RTC_DCHECK_RUN_ON(&encoder_queue_);
    297 
    298  while (!stream_contexts_.empty()) {
    299    // Move the encoder instances and put it on the `cached_encoder_contexts_`
    300    // where it may possibly be reused from (ordering does not matter).
    301    cached_encoder_contexts_.push_front(
    302        std::move(stream_contexts_.back()).ReleaseEncoderContext());
    303    stream_contexts_.pop_back();
    304  }
    305 
    306  bypass_mode_ = false;
    307 
    308  // It's legal to move the encoder to another queue now.
    309  encoder_queue_.Detach();
    310 
    311  inited_.store(0);
    312 
    313  return WEBRTC_VIDEO_CODEC_OK;
    314 }
    315 
    316 int SimulcastEncoderAdapter::InitEncode(
    317    const VideoCodec* codec_settings,
    318    const VideoEncoder::Settings& settings) {
    319  RTC_DCHECK_RUN_ON(&encoder_queue_);
    320 
    321  if (settings.number_of_cores < 1) {
    322    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
    323  }
    324 
    325  int ret = VerifyCodec(codec_settings);
    326  if (ret < 0) {
    327    return ret;
    328  }
    329 
    330  Release();
    331 
    332  codec_ = *codec_settings;
    333  total_streams_count_ = CountAllStreams(*codec_settings);
    334 
    335  bool is_legacy_singlecast = codec_.numberOfSimulcastStreams == 0;
    336  int lowest_quality_stream_idx = 0;
    337  int highest_quality_stream_idx = 0;
    338  if (!is_legacy_singlecast) {
    339    GetLowestAndHighestQualityStreamIndixes(
    340        ArrayView<SimulcastStream>(codec_.simulcastStream,
    341                                   total_streams_count_),
    342        &lowest_quality_stream_idx, &highest_quality_stream_idx);
    343  }
    344 
    345  std::unique_ptr<EncoderContext> encoder_context = FetchOrCreateEncoderContext(
    346      /*is_lowest_quality_stream=*/(
    347          is_legacy_singlecast ||
    348          codec_.simulcastStream[lowest_quality_stream_idx].active),
    349      /*stream_idx=*/is_legacy_singlecast
    350          ? std::nullopt
    351          : std::make_optional(lowest_quality_stream_idx));
    352  if (encoder_context == nullptr) {
    353    return WEBRTC_VIDEO_CODEC_MEMORY;
    354  }
    355 
    356  bool is_mixed_codec = codec_.IsMixedCodec();
    357 
    358  // Two distinct scenarios:
    359  // * Singlecast (total_streams_count == 1) or simulcast with simulcast-capable
    360  //   underlaying encoder implementation if active_streams_count > 1. SEA
    361  //   operates in bypass mode: original settings are passed to the underlaying
    362  //   encoder, frame encode complete callback is not intercepted.
    363  // * Multi-encoder simulcast or singlecast if layers are deactivated
    364  //   (active_streams_count >= 1). SEA creates N=active_streams_count encoders
    365  //   and configures each to produce a single stream.
    366 
    367  int active_streams_count = CountActiveStreams(*codec_settings);
    368  // If we only have a single active layer it is better to create an encoder
    369  // with only one configured layer than creating it with all-but-one disabled
    370  // layers because that way we control scaling.
    371  // The use of the nonstandard x-google-per-layer-pli fmtp parameter also
    372  // forces the use of SEA with separate encoders to support per-layer
    373  // handling of PLIs.
    374  bool separate_encoders_needed =
    375      is_mixed_codec ||
    376      !encoder_context->encoder().GetEncoderInfo().supports_simulcast ||
    377      active_streams_count == 1 || per_layer_pli_;
    378  RTC_LOG(LS_INFO) << "[SEA] InitEncode: total_streams_count: "
    379                   << total_streams_count_
    380                   << ", active_streams_count: " << active_streams_count
    381                   << ", separate_encoders_needed: "
    382                   << (separate_encoders_needed ? "true" : "false");
    383  // Singlecast or simulcast with simulcast-capable underlaying encoder.
    384  if (total_streams_count_ == 1 || !separate_encoders_needed) {
    385    RTC_LOG(LS_INFO) << "[SEA] InitEncode: Single-encoder mode";
    386    int result = encoder_context->encoder().InitEncode(&codec_, settings);
    387    if (result >= 0) {
    388      stream_contexts_.emplace_back(
    389          /*parent=*/nullptr, std::move(encoder_context),
    390          /*framerate_controller=*/nullptr, /*stream_idx=*/0, codec_.width,
    391          codec_.height, /*is_paused=*/active_streams_count == 0);
    392      bypass_mode_ = true;
    393 
    394      DestroyStoredEncoders();
    395      inited_.store(1);
    396      return WEBRTC_VIDEO_CODEC_OK;
    397    }
    398 
    399    encoder_context->Release();
    400    encoder_context->encoder().RegisterEncodeCompleteCallback(
    401        encoded_complete_callback_);
    402    if (total_streams_count_ == 1) {
    403      RTC_LOG(LS_ERROR) << "[SEA] InitEncode: failed with error code: "
    404                        << WebRtcVideoCodecErrorToString(ret);
    405      return ret;
    406    }
    407    RTC_LOG(LS_WARNING) << "[SEA] InitEncode: failed with error code: "
    408                        << WebRtcVideoCodecErrorToString(ret)
    409                        << ". Falling back to multi-encoder mode.";
    410  }
    411 
    412  // Multi-encoder simulcast or singlecast (deactivated layers).
    413  std::vector<uint32_t> stream_start_bitrate_kbps =
    414      GetStreamStartBitratesKbps(env_, codec_);
    415 
    416  for (int stream_idx = 0; stream_idx < total_streams_count_; ++stream_idx) {
    417    if (!is_legacy_singlecast && !codec_.simulcastStream[stream_idx].active) {
    418      continue;
    419    }
    420 
    421    if (encoder_context == nullptr || is_mixed_codec) {
    422      encoder_context = FetchOrCreateEncoderContext(
    423          /*is_lowest_quality_stream=*/stream_idx == lowest_quality_stream_idx,
    424          stream_idx);
    425    }
    426    if (encoder_context == nullptr) {
    427      Release();
    428      return WEBRTC_VIDEO_CODEC_MEMORY;
    429    }
    430 
    431    VideoCodec stream_codec = MakeStreamCodec(
    432        codec_, stream_idx, stream_start_bitrate_kbps[stream_idx],
    433        /*is_lowest_quality_stream=*/stream_idx == lowest_quality_stream_idx,
    434        /*is_highest_quality_stream=*/stream_idx == highest_quality_stream_idx);
    435 
    436    RTC_LOG(LS_INFO) << "[SEA] Multi-encoder mode: initializing stream: "
    437                     << stream_idx << ", active: "
    438                     << (codec_.simulcastStream[stream_idx].active ? "true"
    439                                                                   : "false");
    440    int result = encoder_context->encoder().InitEncode(&stream_codec, settings);
    441    if (result < 0) {
    442      encoder_context.reset();
    443      Release();
    444      RTC_LOG(LS_ERROR) << "[SEA] InitEncode: failed with error code: "
    445                        << WebRtcVideoCodecErrorToString(ret);
    446      return result;
    447    }
    448 
    449    // Intercept frame encode complete callback only for upper streams, where
    450    // we need to set a correct stream index. Set `parent` to nullptr for the
    451    // lowest stream to bypass the callback.
    452    SimulcastEncoderAdapter* parent = stream_idx > 0 ? this : nullptr;
    453 
    454    bool is_paused = stream_start_bitrate_kbps[stream_idx] == 0;
    455    stream_contexts_.emplace_back(
    456        parent, std::move(encoder_context),
    457        std::make_unique<FramerateController>(stream_codec.maxFramerate),
    458        stream_idx, stream_codec.width, stream_codec.height, is_paused);
    459    encoder_context = nullptr;
    460  }
    461 
    462  // To save memory, don't store encoders that we don't use.
    463  DestroyStoredEncoders();
    464 
    465  inited_.store(1);
    466  return WEBRTC_VIDEO_CODEC_OK;
    467 }
    468 
    469 int SimulcastEncoderAdapter::Encode(
    470    const VideoFrame& input_image,
    471    const std::vector<VideoFrameType>* frame_types) {
    472  RTC_DCHECK_RUN_ON(&encoder_queue_);
    473 
    474  if (!Initialized()) {
    475    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
    476  }
    477  if (encoded_complete_callback_ == nullptr) {
    478    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
    479  }
    480 
    481  if (encoder_info_override_.requested_resolution_alignment()) {
    482    const int alignment =
    483        *encoder_info_override_.requested_resolution_alignment();
    484    if (input_image.width() % alignment != 0 ||
    485        input_image.height() % alignment != 0) {
    486      RTC_LOG(LS_WARNING) << "Frame " << input_image.width() << "x"
    487                          << input_image.height() << " not divisible by "
    488                          << alignment;
    489      return drop_unaligned_resolution_ ? WEBRTC_VIDEO_CODEC_NO_OUTPUT
    490                                        : WEBRTC_VIDEO_CODEC_ERROR;
    491    }
    492    if (encoder_info_override_.apply_alignment_to_all_simulcast_layers()) {
    493      for (const auto& layer : stream_contexts_) {
    494        if (layer.width() % alignment != 0 || layer.height() % alignment != 0) {
    495          RTC_LOG(LS_WARNING)
    496              << "Codec " << layer.width() << "x" << layer.height()
    497              << " not divisible by " << alignment;
    498          return drop_unaligned_resolution_ ? WEBRTC_VIDEO_CODEC_NO_OUTPUT
    499                                            : WEBRTC_VIDEO_CODEC_ERROR;
    500        }
    501      }
    502    }
    503  }
    504 
    505  bool is_keyframe_needed = false;
    506  for (const auto& layer : stream_contexts_) {
    507    if (layer.is_keyframe_needed()) {
    508      // This is legacy behavior, generating a keyframe on all layers
    509      // when generating one for a layer that became active for the first time
    510      // or after being disabled.
    511      is_keyframe_needed = true;
    512      break;
    513    }
    514  }
    515 
    516  // Temporary thay may hold the result of texture to i420 buffer conversion.
    517  scoped_refptr<VideoFrameBuffer> src_buffer;
    518  int src_width = input_image.width();
    519  int src_height = input_image.height();
    520 
    521  for (auto& layer : stream_contexts_) {
    522    // Don't encode frames in resolutions that we don't intend to send.
    523    if (layer.is_paused()) {
    524      continue;
    525    }
    526 
    527    // Convert timestamp from RTP 90kHz clock.
    528    const Timestamp frame_timestamp =
    529        Timestamp::Micros((1000 * input_image.rtp_timestamp()) / 90);
    530 
    531    // If adapter is passed through and only one sw encoder does simulcast,
    532    // frame types for all streams should be passed to the encoder unchanged.
    533    // Otherwise a single per-encoder frame type is passed.
    534    std::vector<VideoFrameType> stream_frame_types(
    535        bypass_mode_
    536            ? std::max<unsigned char>(codec_.numberOfSimulcastStreams, 1)
    537            : 1,
    538        VideoFrameType::kVideoFrameDelta);
    539 
    540    bool keyframe_requested = false;
    541    if (is_keyframe_needed) {
    542      std::fill(stream_frame_types.begin(), stream_frame_types.end(),
    543                VideoFrameType::kVideoFrameKey);
    544      keyframe_requested = true;
    545    } else if (frame_types) {
    546      if (bypass_mode_) {
    547        // In bypass mode, we effectively pass on frame_types.
    548        RTC_DCHECK_EQ(frame_types->size(), stream_frame_types.size());
    549        stream_frame_types = *frame_types;
    550        keyframe_requested =
    551            absl::c_any_of(*frame_types, [](const VideoFrameType frame_type) {
    552              return frame_type == VideoFrameType::kVideoFrameKey;
    553            });
    554      } else {
    555        size_t stream_idx = static_cast<size_t>(layer.stream_idx());
    556        if (frame_types->size() >= stream_idx &&
    557            (*frame_types)[stream_idx] == VideoFrameType::kVideoFrameKey) {
    558          stream_frame_types[0] = VideoFrameType::kVideoFrameKey;
    559          keyframe_requested = true;
    560        }
    561      }
    562    }
    563    if (keyframe_requested) {
    564      layer.OnKeyframe(frame_timestamp);
    565    } else if (layer.ShouldDropFrame(frame_timestamp)) {
    566      continue;
    567    }
    568 
    569    // If scaling isn't required, because the input resolution
    570    // matches the destination or the input image is empty (e.g.
    571    // a keyframe request for encoders with internal camera
    572    // sources) or the source image has a native handle, pass the image on
    573    // directly. Otherwise, we'll scale it to match what the encoder expects
    574    // (below).
    575    // For texture frames, the underlying encoder is expected to be able to
    576    // correctly sample/scale the source texture.
    577    // TODO(perkj): ensure that works going forward, and figure out how this
    578    // affects webrtc:5683.
    579    if ((layer.width() == src_width && layer.height() == src_height) ||
    580        (input_image.video_frame_buffer()->type() ==
    581             VideoFrameBuffer::Type::kNative &&
    582         layer.encoder().GetEncoderInfo().supports_native_handle)) {
    583      int ret = layer.encoder().Encode(input_image, &stream_frame_types);
    584      if (ret != WEBRTC_VIDEO_CODEC_OK) {
    585        return ret;
    586      }
    587    } else {
    588      if (src_buffer == nullptr) {
    589        src_buffer = input_image.video_frame_buffer();
    590      }
    591      scoped_refptr<VideoFrameBuffer> dst_buffer =
    592          src_buffer->Scale(layer.width(), layer.height());
    593      if (!dst_buffer) {
    594        RTC_LOG(LS_ERROR) << "Failed to scale video frame";
    595        return WEBRTC_VIDEO_CODEC_ENCODER_FAILURE;
    596      }
    597 
    598      // UpdateRect is not propagated to lower simulcast layers currently.
    599      // TODO(ilnik): Consider scaling UpdateRect together with the buffer.
    600      VideoFrame frame(input_image);
    601      frame.set_video_frame_buffer(dst_buffer);
    602      frame.set_rotation(kVideoRotation_0);
    603      frame.set_update_rect(VideoFrame::UpdateRect{.offset_x = 0,
    604                                                   .offset_y = 0,
    605                                                   .width = frame.width(),
    606                                                   .height = frame.height()});
    607      int ret = layer.encoder().Encode(frame, &stream_frame_types);
    608      if (ret != WEBRTC_VIDEO_CODEC_OK) {
    609        return ret;
    610      }
    611    }
    612  }
    613 
    614  return WEBRTC_VIDEO_CODEC_OK;
    615 }
    616 
    617 int SimulcastEncoderAdapter::RegisterEncodeCompleteCallback(
    618    EncodedImageCallback* callback) {
    619  RTC_DCHECK_RUN_ON(&encoder_queue_);
    620  encoded_complete_callback_ = callback;
    621  if (!stream_contexts_.empty() && stream_contexts_.front().stream_idx() == 0) {
    622    // Bypass frame encode complete callback for the lowest layer since there is
    623    // no need to override frame's spatial index.
    624    stream_contexts_.front().encoder().RegisterEncodeCompleteCallback(callback);
    625  }
    626  return WEBRTC_VIDEO_CODEC_OK;
    627 }
    628 
    629 void SimulcastEncoderAdapter::SetRates(
    630    const RateControlParameters& parameters) {
    631  RTC_DCHECK_RUN_ON(&encoder_queue_);
    632 
    633  if (!Initialized()) {
    634    RTC_LOG(LS_WARNING) << "SetRates while not initialized";
    635    return;
    636  }
    637 
    638  if (parameters.framerate_fps < 1.0) {
    639    RTC_LOG(LS_WARNING) << "Invalid framerate: " << parameters.framerate_fps;
    640    return;
    641  }
    642 
    643  codec_.maxFramerate = static_cast<uint32_t>(parameters.framerate_fps + 0.5);
    644 
    645  if (bypass_mode_) {
    646    stream_contexts_.front().encoder().SetRates(parameters);
    647    return;
    648  }
    649 
    650  for (StreamContext& layer_context : stream_contexts_) {
    651    int stream_idx = layer_context.stream_idx();
    652    uint32_t stream_bitrate_kbps =
    653        parameters.bitrate.GetSpatialLayerSum(stream_idx) / 1000;
    654 
    655    // Need a key frame if we have not sent this stream before.
    656    if (stream_bitrate_kbps > 0 && layer_context.is_paused()) {
    657      layer_context.set_is_keyframe_needed();
    658    }
    659    layer_context.set_is_paused(stream_bitrate_kbps == 0);
    660 
    661    // Slice the temporal layers out of the full allocation and pass it on to
    662    // the encoder handling the current simulcast stream.
    663    RateControlParameters stream_parameters = parameters;
    664    stream_parameters.bitrate = VideoBitrateAllocation();
    665    for (int i = 0; i < kMaxTemporalStreams; ++i) {
    666      if (parameters.bitrate.HasBitrate(stream_idx, i)) {
    667        stream_parameters.bitrate.SetBitrate(
    668            0, i, parameters.bitrate.GetBitrate(stream_idx, i));
    669      }
    670    }
    671 
    672    // Assign link allocation proportionally to spatial layer allocation.
    673    if (!parameters.bandwidth_allocation.IsZero() &&
    674        parameters.bitrate.get_sum_bps() > 0) {
    675      stream_parameters.bandwidth_allocation =
    676          DataRate::BitsPerSec((parameters.bandwidth_allocation.bps() *
    677                                stream_parameters.bitrate.get_sum_bps()) /
    678                               parameters.bitrate.get_sum_bps());
    679      // Make sure we don't allocate bandwidth lower than target bitrate.
    680      if (stream_parameters.bandwidth_allocation.bps() <
    681          stream_parameters.bitrate.get_sum_bps()) {
    682        stream_parameters.bandwidth_allocation =
    683            DataRate::BitsPerSec(stream_parameters.bitrate.get_sum_bps());
    684      }
    685    }
    686 
    687    stream_parameters.framerate_fps = std::min<double>(
    688        parameters.framerate_fps,
    689        layer_context.target_fps().value_or(parameters.framerate_fps));
    690 
    691    layer_context.encoder().SetRates(stream_parameters);
    692  }
    693 }
    694 
    695 void SimulcastEncoderAdapter::OnPacketLossRateUpdate(float packet_loss_rate) {
    696  for (auto& c : stream_contexts_) {
    697    c.encoder().OnPacketLossRateUpdate(packet_loss_rate);
    698  }
    699 }
    700 
    701 void SimulcastEncoderAdapter::OnRttUpdate(int64_t rtt_ms) {
    702  for (auto& c : stream_contexts_) {
    703    c.encoder().OnRttUpdate(rtt_ms);
    704  }
    705 }
    706 
    707 void SimulcastEncoderAdapter::OnLossNotification(
    708    const LossNotification& loss_notification) {
    709  for (auto& c : stream_contexts_) {
    710    c.encoder().OnLossNotification(loss_notification);
    711  }
    712 }
    713 
    714 // TODO(brandtr): Add task checker to this member function, when all encoder
    715 // callbacks are coming in on the encoder queue.
    716 EncodedImageCallback::Result SimulcastEncoderAdapter::OnEncodedImage(
    717    size_t stream_idx,
    718    const EncodedImage& encodedImage,
    719    const CodecSpecificInfo* codecSpecificInfo) {
    720  EncodedImage stream_image(encodedImage);
    721  CodecSpecificInfo stream_codec_specific = *codecSpecificInfo;
    722 
    723  stream_image.SetSimulcastIndex(stream_idx);
    724 
    725  if (codec_.IsMixedCodec()) {
    726    stream_image.SetSpatialIndex(std::nullopt);
    727  }
    728 
    729  return encoded_complete_callback_->OnEncodedImage(stream_image,
    730                                                    &stream_codec_specific);
    731 }
    732 
    733 void SimulcastEncoderAdapter::OnDroppedFrame(size_t /* stream_idx */) {
    734  // Not yet implemented.
    735 }
    736 
    737 bool SimulcastEncoderAdapter::Initialized() const {
    738  return inited_.load() == 1;
    739 }
    740 
    741 void SimulcastEncoderAdapter::DestroyStoredEncoders() {
    742  RTC_DCHECK_RUN_ON(&encoder_queue_);
    743  while (!cached_encoder_contexts_.empty()) {
    744    cached_encoder_contexts_.pop_back();
    745  }
    746 }
    747 
    748 std::unique_ptr<SimulcastEncoderAdapter::EncoderContext>
    749 SimulcastEncoderAdapter::FetchOrCreateEncoderContext(
    750    bool is_lowest_quality_stream,
    751    std::optional<int> stream_idx) const {
    752  RTC_DCHECK_RUN_ON(&encoder_queue_);
    753  if (stream_idx) {
    754    RTC_CHECK_LT(*stream_idx, codec_.numberOfSimulcastStreams);
    755  }
    756  SdpVideoFormat video_format =
    757      stream_idx
    758          ? codec_.simulcastStream[*stream_idx].format.value_or(video_format_)
    759          : video_format_;
    760  bool prefer_temporal_support = fallback_encoder_factory_ != nullptr &&
    761                                 is_lowest_quality_stream &&
    762                                 prefer_temporal_support_on_base_layer_;
    763 
    764  // Toggling of `prefer_temporal_support` requires encoder recreation. Find
    765  // and reuse encoder with desired `prefer_temporal_support` and
    766  // `video_format`. Otherwise, if there is no such encoder in the cache, create
    767  // a new instance.
    768  auto encoder_context_iter =
    769      std::find_if(cached_encoder_contexts_.begin(),
    770                   cached_encoder_contexts_.end(), [&](auto& encoder_context) {
    771                     return encoder_context->prefer_temporal_support() ==
    772                                prefer_temporal_support &&
    773                            encoder_context->video_format() == video_format;
    774                   });
    775 
    776  std::unique_ptr<SimulcastEncoderAdapter::EncoderContext> encoder_context;
    777  if (encoder_context_iter != cached_encoder_contexts_.end()) {
    778    encoder_context = std::move(*encoder_context_iter);
    779    cached_encoder_contexts_.erase(encoder_context_iter);
    780  } else {
    781    std::unique_ptr<VideoEncoder> primary_encoder =
    782        primary_encoder_factory_->Create(env_, video_format);
    783 
    784    std::unique_ptr<VideoEncoder> fallback_encoder;
    785    if (fallback_encoder_factory_ != nullptr) {
    786      fallback_encoder = fallback_encoder_factory_->Create(env_, video_format);
    787    }
    788 
    789    std::unique_ptr<VideoEncoder> encoder;
    790    VideoEncoder::EncoderInfo primary_info;
    791    VideoEncoder::EncoderInfo fallback_info;
    792 
    793    if (primary_encoder != nullptr) {
    794      primary_info = primary_encoder->GetEncoderInfo();
    795      fallback_info = primary_info;
    796 
    797      if (fallback_encoder == nullptr) {
    798        encoder = std::move(primary_encoder);
    799      } else {
    800        encoder = CreateVideoEncoderSoftwareFallbackWrapper(
    801            env_, std::move(fallback_encoder), std::move(primary_encoder),
    802            prefer_temporal_support);
    803      }
    804    } else if (fallback_encoder != nullptr) {
    805      RTC_LOG(LS_WARNING) << "Failed to create primary " << video_format.name
    806                          << " encoder. Use fallback encoder.";
    807      fallback_info = fallback_encoder->GetEncoderInfo();
    808      primary_info = fallback_info;
    809      encoder = std::move(fallback_encoder);
    810    } else {
    811      RTC_LOG(LS_ERROR) << "Failed to create primary and fallback "
    812                        << video_format.name << " encoders.";
    813      return nullptr;
    814    }
    815 
    816    encoder_context = std::make_unique<SimulcastEncoderAdapter::EncoderContext>(
    817        std::move(encoder), prefer_temporal_support, primary_info,
    818        fallback_info, std::move(video_format));
    819  }
    820 
    821  encoder_context->encoder().RegisterEncodeCompleteCallback(
    822      encoded_complete_callback_);
    823  return encoder_context;
    824 }
    825 
    826 VideoCodec SimulcastEncoderAdapter::MakeStreamCodec(
    827    const VideoCodec& codec,
    828    int stream_idx,
    829    uint32_t start_bitrate_kbps,
    830    bool is_lowest_quality_stream,
    831    bool is_highest_quality_stream) {
    832  VideoCodec codec_params = codec;
    833  const SimulcastStream& stream_params = codec.simulcastStream[stream_idx];
    834  webrtc::VideoCodecType codec_type =
    835      stream_params.format
    836          ? PayloadStringToCodecType(stream_params.format->name)
    837          : codec.codecType;
    838 
    839  codec_params.codecType = codec_type;
    840  codec_params.numberOfSimulcastStreams = 0;
    841  codec_params.width = stream_params.width;
    842  codec_params.height = stream_params.height;
    843  codec_params.maxBitrate = stream_params.maxBitrate;
    844  codec_params.minBitrate = stream_params.minBitrate;
    845  codec_params.maxFramerate = stream_params.maxFramerate;
    846  codec_params.qpMax = stream_params.qpMax;
    847  codec_params.active = stream_params.active;
    848  // By default, `scalability_mode` comes from SimulcastStream when
    849  // SimulcastEncoderAdapter is used. This allows multiple encodings of L1Tx,
    850  // but SimulcastStream currently does not support multiple spatial layers.
    851  std::optional<ScalabilityMode> scalability_mode =
    852      stream_params.GetScalabilityMode();
    853  // To support the full set of scalability modes in the event that this is the
    854  // only active encoding, prefer VideoCodec::GetScalabilityMode() if all other
    855  // encodings are inactive.
    856  bool only_active_stream = true;
    857  for (int i = 0; i < codec.numberOfSimulcastStreams; ++i) {
    858    if (i != stream_idx && codec.simulcastStream[i].active) {
    859      only_active_stream = false;
    860      break;
    861    }
    862  }
    863  if (codec.GetScalabilityMode().has_value() && only_active_stream) {
    864    scalability_mode = codec.GetScalabilityMode();
    865  }
    866  if (scalability_mode.has_value()) {
    867    codec_params.SetScalabilityMode(*scalability_mode);
    868  }
    869  // Settings that are based on stream/resolution.
    870  if (is_lowest_quality_stream) {
    871    // Settings for lowest spatial resolutions.
    872    if (codec.mode == VideoCodecMode::kRealtimeVideo &&
    873        boost_base_layer_quality_) {
    874      codec_params.qpMax = kLowestResMaxQp;
    875    }
    876  }
    877 
    878  // Ensure default codec specifics matches the correct codec type for this
    879  // stream. This can only differ in mixed-codec simulcast.
    880  if (codec_type != codec.codecType) {
    881    switch (codec_type) {
    882      case kVideoCodecVP8:
    883        *codec_params.VP8() = VideoEncoder::GetDefaultVp8Settings();
    884        break;
    885      case kVideoCodecVP9:
    886        *codec_params.VP9() = VideoEncoder::GetDefaultVp9Settings();
    887        break;
    888      case kVideoCodecH264:
    889        *codec_params.H264() = VideoEncoder::GetDefaultH264Settings();
    890        break;
    891      case kVideoCodecAV1:
    892        memset(codec_params.AV1(), 0, sizeof(VideoCodecAV1));
    893        break;
    894      default:
    895        break;
    896    }
    897  }
    898 
    899  if (codec_type == kVideoCodecVP8) {
    900    codec_params.VP8()->numberOfTemporalLayers =
    901        stream_params.numberOfTemporalLayers;
    902    if (!is_highest_quality_stream) {
    903      // For resolutions below CIF, set the codec `complexity` parameter to
    904      // kComplexityHigher, which maps to cpu_used = -4.
    905      int pixels_per_frame = codec_params.width * codec_params.height;
    906      if (pixels_per_frame < 352 * 288) {
    907        codec_params.SetVideoEncoderComplexity(
    908            VideoCodecComplexity::kComplexityHigher);
    909      }
    910      // Turn off denoising for all streams but the highest resolution.
    911      codec_params.VP8()->denoisingOn = false;
    912    }
    913  } else if (codec_type == kVideoCodecH264) {
    914    codec_params.H264()->numberOfTemporalLayers =
    915        stream_params.numberOfTemporalLayers;
    916  } else if (codec_type == kVideoCodecVP9 && scalability_mode.has_value() &&
    917             !only_active_stream) {
    918    // If VP9 simulcast then explicitly set a single spatial layer for each
    919    // simulcast stream.
    920    codec_params.VP9()->numberOfSpatialLayers = 1;
    921    codec_params.VP9()->numberOfTemporalLayers =
    922        stream_params.GetNumberOfTemporalLayers();
    923    codec_params.VP9()->interLayerPred = InterLayerPredMode::kOff;
    924    codec_params.spatialLayers[0] = stream_params;
    925  }
    926 
    927  // Cap start bitrate to the min bitrate in order to avoid strange codec
    928  // behavior.
    929  codec_params.startBitrate =
    930      std::max(stream_params.minBitrate, start_bitrate_kbps);
    931 
    932  // Legacy screenshare mode is only enabled for the first simulcast layer
    933  codec_params.legacy_conference_mode =
    934      codec.legacy_conference_mode && stream_idx == 0;
    935 
    936  return codec_params;
    937 }
    938 
    939 void SimulcastEncoderAdapter::OverrideFromFieldTrial(
    940    VideoEncoder::EncoderInfo* info) const {
    941  if (encoder_info_override_.requested_resolution_alignment()) {
    942    info->requested_resolution_alignment =
    943        std::lcm(info->requested_resolution_alignment,
    944                 *encoder_info_override_.requested_resolution_alignment());
    945    info->apply_alignment_to_all_simulcast_layers =
    946        info->apply_alignment_to_all_simulcast_layers ||
    947        encoder_info_override_.apply_alignment_to_all_simulcast_layers();
    948  }
    949  // Override resolution bitrate limits unless they're set already.
    950  if (info->resolution_bitrate_limits.empty() &&
    951      !encoder_info_override_.resolution_bitrate_limits().empty()) {
    952    info->resolution_bitrate_limits =
    953        encoder_info_override_.resolution_bitrate_limits();
    954  }
    955 }
    956 
    957 VideoEncoder::EncoderInfo SimulcastEncoderAdapter::GetEncoderInfo() const {
    958  if (stream_contexts_.size() == 1) {
    959    // Not using simulcast adapting functionality, just pass through.
    960    VideoEncoder::EncoderInfo info =
    961        stream_contexts_.front().encoder().GetEncoderInfo();
    962    OverrideFromFieldTrial(&info);
    963    return info;
    964  }
    965 
    966  VideoEncoder::EncoderInfo encoder_info;
    967  encoder_info.implementation_name = "SimulcastEncoderAdapter";
    968  encoder_info.requested_resolution_alignment = 1;
    969  encoder_info.apply_alignment_to_all_simulcast_layers = false;
    970  encoder_info.supports_native_handle = true;
    971  encoder_info.scaling_settings.thresholds = std::nullopt;
    972 
    973  if (stream_contexts_.empty()) {
    974    // GetEncoderInfo queried before InitEncode. Only alignment info is needed
    975    // to be filled.
    976    // Create one encoder and query it.
    977 
    978    std::unique_ptr<SimulcastEncoderAdapter::EncoderContext> encoder_context =
    979        FetchOrCreateEncoderContext(/*is_lowest_quality_stream=*/true,
    980                                    std::nullopt);
    981    if (encoder_context == nullptr) {
    982      return encoder_info;
    983    }
    984 
    985    const VideoEncoder::EncoderInfo& primary_info =
    986        encoder_context->PrimaryInfo();
    987    const VideoEncoder::EncoderInfo& fallback_info =
    988        encoder_context->FallbackInfo();
    989 
    990    encoder_info.requested_resolution_alignment =
    991        std::lcm(primary_info.requested_resolution_alignment,
    992                 fallback_info.requested_resolution_alignment);
    993 
    994    encoder_info.apply_alignment_to_all_simulcast_layers =
    995        primary_info.apply_alignment_to_all_simulcast_layers ||
    996        fallback_info.apply_alignment_to_all_simulcast_layers;
    997 
    998    if (!primary_info.supports_simulcast || !fallback_info.supports_simulcast) {
    999      encoder_info.apply_alignment_to_all_simulcast_layers = true;
   1000    }
   1001 
   1002    cached_encoder_contexts_.emplace_back(std::move(encoder_context));
   1003 
   1004    OverrideFromFieldTrial(&encoder_info);
   1005    return encoder_info;
   1006  }
   1007 
   1008  encoder_info.scaling_settings = VideoEncoder::ScalingSettings::kOff;
   1009  std::vector<std::string> encoder_names;
   1010 
   1011  for (size_t i = 0; i < stream_contexts_.size(); ++i) {
   1012    VideoEncoder::EncoderInfo encoder_impl_info =
   1013        stream_contexts_[i].encoder().GetEncoderInfo();
   1014 
   1015    // Encoder name indicates names of all active sub-encoders.
   1016    if (!stream_contexts_[i].is_paused()) {
   1017      encoder_names.push_back(encoder_impl_info.implementation_name);
   1018    }
   1019 
   1020    if (encoder_impl_info.mapped_resolution.has_value() &&
   1021        (!encoder_info.mapped_resolution.has_value() ||
   1022         encoder_info.mapped_resolution->width <
   1023             encoder_impl_info.mapped_resolution->width)) {
   1024      encoder_info.mapped_resolution = encoder_impl_info.mapped_resolution;
   1025    }
   1026 
   1027    if (i == 0) {
   1028      encoder_info.supports_native_handle =
   1029          encoder_impl_info.supports_native_handle;
   1030      encoder_info.has_trusted_rate_controller =
   1031          encoder_impl_info.has_trusted_rate_controller;
   1032      encoder_info.is_hardware_accelerated =
   1033          encoder_impl_info.is_hardware_accelerated;
   1034      encoder_info.is_qp_trusted = encoder_impl_info.is_qp_trusted;
   1035    } else {
   1036      // Native handle supported if any encoder supports it.
   1037      encoder_info.supports_native_handle |=
   1038          encoder_impl_info.supports_native_handle;
   1039 
   1040      // Trusted rate controller only if all encoders have it.
   1041      encoder_info.has_trusted_rate_controller &=
   1042          encoder_impl_info.has_trusted_rate_controller;
   1043 
   1044      // Uses hardware support if any of the encoders uses it.
   1045      // For example, if we are having issues with down-scaling due to
   1046      // pipelining delay in HW encoders we need higher encoder usage
   1047      // thresholds in CPU adaptation.
   1048      encoder_info.is_hardware_accelerated |=
   1049          encoder_impl_info.is_hardware_accelerated;
   1050 
   1051      // Treat QP from frame/slice/tile header as average QP only if all
   1052      // encoders report it as average QP.
   1053      encoder_info.is_qp_trusted =
   1054          encoder_info.is_qp_trusted.value_or(true) &&
   1055          encoder_impl_info.is_qp_trusted.value_or(true);
   1056    }
   1057    encoder_info.fps_allocation[i] = encoder_impl_info.fps_allocation[0];
   1058    encoder_info.requested_resolution_alignment =
   1059        std::lcm(encoder_info.requested_resolution_alignment,
   1060                 encoder_impl_info.requested_resolution_alignment);
   1061    // request alignment on all layers if any of the encoders may need it, or
   1062    // if any non-top layer encoder requests a non-trivial alignment.
   1063    if (encoder_impl_info.apply_alignment_to_all_simulcast_layers ||
   1064        (encoder_impl_info.requested_resolution_alignment > 1 &&
   1065         (codec_.simulcastStream[i].height < codec_.height ||
   1066          codec_.simulcastStream[i].width < codec_.width))) {
   1067      encoder_info.apply_alignment_to_all_simulcast_layers = true;
   1068    }
   1069  }
   1070 
   1071  if (!encoder_names.empty()) {
   1072    StringBuilder implementation_name_builder(" (");
   1073    implementation_name_builder << StrJoin(encoder_names, ", ");
   1074    implementation_name_builder << ")";
   1075    encoder_info.implementation_name += implementation_name_builder.Release();
   1076  }
   1077 
   1078  OverrideFromFieldTrial(&encoder_info);
   1079 
   1080  return encoder_info;
   1081 }
   1082 
   1083 }  // namespace webrtc