tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

encoded_image.h (11390B)


      1 /*
      2 *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
      3 *
      4 *  Use of this source code is governed by a BSD-style license
      5 *  that can be found in the LICENSE file in the root of the source
      6 *  tree. An additional intellectual property rights grant can be found
      7 *  in the file PATENTS.  All contributing project authors may
      8 *  be found in the AUTHORS file in the root of the source tree.
      9 */
     10 
     11 #ifndef API_VIDEO_ENCODED_IMAGE_H_
     12 #define API_VIDEO_ENCODED_IMAGE_H_
     13 
     14 #include <stdint.h>
     15 
     16 #include <cstddef>
     17 #include <map>
     18 #include <optional>
     19 #include <utility>
     20 
     21 #include "api/ref_count.h"
     22 #include "api/rtp_packet_infos.h"
     23 #include "api/scoped_refptr.h"
     24 #include "api/units/timestamp.h"
     25 #include "api/video/color_space.h"
     26 #include "api/video/corruption_detection/corruption_detection_filter_settings.h"
     27 #include "api/video/video_codec_constants.h"
     28 #include "api/video/video_content_type.h"
     29 #include "api/video/video_frame_type.h"
     30 #include "api/video/video_rotation.h"
     31 #include "api/video/video_timing.h"
     32 #include "rtc_base/buffer.h"
     33 #include "rtc_base/checks.h"
     34 #include "rtc_base/system/rtc_export.h"
     35 
     36 namespace webrtc {
     37 
     38 // Abstract interface for buffer storage. Intended to support buffers owned by
     39 // external encoders with special release requirements, e.g, java encoders with
     40 // releaseOutputBuffer.
     41 class EncodedImageBufferInterface : public RefCountInterface {
     42 public:
     43  using value_type = uint8_t;
     44 
     45  virtual const uint8_t* data() const = 0;
     46  virtual size_t size() const = 0;
     47 
     48  const uint8_t* begin() const { return data(); }
     49  const uint8_t* end() const { return data() + size(); }
     50 };
     51 
     52 // Basic implementation of EncodedImageBufferInterface.
     53 class RTC_EXPORT EncodedImageBuffer : public EncodedImageBufferInterface {
     54 public:
     55  static scoped_refptr<EncodedImageBuffer> Create() { return Create(0); }
     56  static scoped_refptr<EncodedImageBuffer> Create(size_t size);
     57  static scoped_refptr<EncodedImageBuffer> Create(const uint8_t* data,
     58                                                  size_t size);
     59  static scoped_refptr<EncodedImageBuffer> Create(Buffer buffer);
     60 
     61  const uint8_t* data() const override;
     62  uint8_t* data();
     63  size_t size() const override;
     64  void Realloc(size_t t);
     65 
     66 protected:
     67  explicit EncodedImageBuffer(size_t size);
     68  EncodedImageBuffer(const uint8_t* data, size_t size);
     69  explicit EncodedImageBuffer(Buffer buffer);
     70 
     71  Buffer buffer_;
     72 };
     73 
     74 // TODO(bug.webrtc.org/9378): This is a legacy api class, which is slowly being
     75 // cleaned up. Direct use of its members is strongly discouraged.
     76 class RTC_EXPORT EncodedImage {
     77 public:
     78  // Peak signal to noise ratio, Y/U/V components.
     79  struct Psnr {
     80    double y = 0.0;
     81    double u = 0.0;
     82    double v = 0.0;
     83  };
     84 
     85  EncodedImage();
     86  EncodedImage(EncodedImage&&);
     87  EncodedImage(const EncodedImage&);
     88 
     89  ~EncodedImage();
     90 
     91  EncodedImage& operator=(EncodedImage&&);
     92  EncodedImage& operator=(const EncodedImage&);
     93 
     94  // Frame capture time in RTP timestamp representation (90kHz).
     95  void SetRtpTimestamp(uint32_t timestamp) { timestamp_rtp_ = timestamp; }
     96  uint32_t RtpTimestamp() const { return timestamp_rtp_; }
     97 
     98  void SetEncodeTime(int64_t encode_start_ms, int64_t encode_finish_ms);
     99 
    100  // Frame capture time in local time.
    101  Timestamp CaptureTime() const;
    102 
    103  // Frame capture time in ntp epoch time, i.e. time since 1st Jan 1900
    104  int64_t NtpTimeMs() const { return ntp_time_ms_; }
    105 
    106  // Every simulcast layer (= encoding) has its own encoder and RTP stream.
    107  // There can be no dependencies between different simulcast layers.
    108  std::optional<int> SimulcastIndex() const { return simulcast_index_; }
    109  void SetSimulcastIndex(std::optional<int> simulcast_index) {
    110    RTC_DCHECK_GE(simulcast_index.value_or(0), 0);
    111    RTC_DCHECK_LT(simulcast_index.value_or(0), kMaxSimulcastStreams);
    112    simulcast_index_ = simulcast_index;
    113  }
    114 
    115  const std::optional<Timestamp>& PresentationTimestamp() const {
    116    return presentation_timestamp_;
    117  }
    118  void SetPresentationTimestamp(
    119      const std::optional<Timestamp>& presentation_timestamp) {
    120    presentation_timestamp_ = presentation_timestamp;
    121  }
    122 
    123  // Encoded images can have dependencies between spatial and/or temporal
    124  // layers, depending on the scalability mode used by the encoder. See diagrams
    125  // at https://w3c.github.io/webrtc-svc/#dependencydiagrams*.
    126  std::optional<int> SpatialIndex() const { return spatial_index_; }
    127  void SetSpatialIndex(std::optional<int> spatial_index) {
    128    RTC_DCHECK_GE(spatial_index.value_or(0), 0);
    129    RTC_DCHECK_LT(spatial_index.value_or(0), kMaxSpatialLayers);
    130    spatial_index_ = spatial_index;
    131  }
    132 
    133  std::optional<int> TemporalIndex() const { return temporal_index_; }
    134  void SetTemporalIndex(std::optional<int> temporal_index) {
    135    RTC_DCHECK_GE(temporal_index_.value_or(0), 0);
    136    RTC_DCHECK_LT(temporal_index_.value_or(0), kMaxTemporalStreams);
    137    temporal_index_ = temporal_index;
    138  }
    139 
    140  // These methods can be used to set/get size of subframe with spatial index
    141  // `spatial_index` on encoded frames that consist of multiple spatial layers.
    142  std::optional<size_t> SpatialLayerFrameSize(int spatial_index) const;
    143  void SetSpatialLayerFrameSize(int spatial_index, size_t size_bytes);
    144 
    145  const webrtc::ColorSpace* ColorSpace() const {
    146    return color_space_ ? &*color_space_ : nullptr;
    147  }
    148  void SetColorSpace(const std::optional<webrtc::ColorSpace>& color_space) {
    149    color_space_ = color_space;
    150  }
    151 
    152  std::optional<VideoPlayoutDelay> PlayoutDelay() const {
    153    return playout_delay_;
    154  }
    155 
    156  void SetPlayoutDelay(std::optional<VideoPlayoutDelay> playout_delay) {
    157    playout_delay_ = playout_delay;
    158  }
    159 
    160  // These methods along with the private member video_frame_tracking_id_ are
    161  // meant for media quality testing purpose only.
    162  std::optional<uint16_t> VideoFrameTrackingId() const {
    163    return video_frame_tracking_id_;
    164  }
    165  void SetVideoFrameTrackingId(std::optional<uint16_t> tracking_id) {
    166    video_frame_tracking_id_ = tracking_id;
    167  }
    168 
    169  const RtpPacketInfos& PacketInfos() const { return packet_infos_; }
    170  void SetPacketInfos(RtpPacketInfos packet_infos) {
    171    packet_infos_ = std::move(packet_infos);
    172  }
    173 
    174  bool RetransmissionAllowed() const { return retransmission_allowed_; }
    175  void SetRetransmissionAllowed(bool retransmission_allowed) {
    176    retransmission_allowed_ = retransmission_allowed;
    177  }
    178 
    179  size_t size() const { return size_; }
    180  void set_size(size_t new_size) {
    181    // Allow set_size(0) even if we have no buffer.
    182    RTC_DCHECK_LE(new_size, new_size == 0 ? 0 : capacity());
    183    size_ = new_size;
    184  }
    185 
    186  void SetEncodedData(scoped_refptr<EncodedImageBufferInterface> encoded_data) {
    187    encoded_data_ = encoded_data;
    188    size_ = encoded_data->size();
    189  }
    190 
    191  void ClearEncodedData() {
    192    encoded_data_ = nullptr;
    193    size_ = 0;
    194  }
    195 
    196  scoped_refptr<EncodedImageBufferInterface> GetEncodedData() const {
    197    return encoded_data_;
    198  }
    199 
    200  const uint8_t* data() const {
    201    return encoded_data_ ? encoded_data_->data() : nullptr;
    202  }
    203 
    204  const uint8_t* begin() const { return data(); }
    205  const uint8_t* end() const { return data() + size(); }
    206 
    207  // Returns whether the encoded image can be considered to be of target
    208  // quality.
    209  [[deprecated]] bool IsAtTargetQuality() const { return at_target_quality_; }
    210 
    211  // Sets that the encoded image can be considered to be of target quality to
    212  // true or false.
    213  [[deprecated]] void SetAtTargetQuality(bool at_target_quality) {
    214    at_target_quality_ = at_target_quality;
    215  }
    216 
    217  // Returns whether the frame that was encoded is a steady-state refresh frame
    218  // intended to improve the visual quality.
    219  bool IsSteadyStateRefreshFrame() const {
    220    return is_steady_state_refresh_frame_;
    221  }
    222 
    223  void SetIsSteadyStateRefreshFrame(bool refresh_frame) {
    224    is_steady_state_refresh_frame_ = refresh_frame;
    225  }
    226 
    227  VideoFrameType FrameType() const { return _frameType; }
    228 
    229  void SetFrameType(VideoFrameType frame_type) { _frameType = frame_type; }
    230  VideoContentType contentType() const { return content_type_; }
    231  VideoRotation rotation() const { return rotation_; }
    232 
    233  std::optional<CorruptionDetectionFilterSettings>
    234  corruption_detection_filter_settings() const {
    235    return corruption_detection_filter_settings_;
    236  }
    237  void set_corruption_detection_filter_settings(
    238      const CorruptionDetectionFilterSettings& settings) {
    239    corruption_detection_filter_settings_ = settings;
    240  }
    241 
    242  uint32_t _encodedWidth = 0;
    243  uint32_t _encodedHeight = 0;
    244  // NTP time of the capture time in local timebase in milliseconds.
    245  // TODO(minyue): make this member private.
    246  int64_t ntp_time_ms_ = 0;
    247  int64_t capture_time_ms_ = 0;
    248  VideoFrameType _frameType = VideoFrameType::kVideoFrameDelta;
    249  VideoRotation rotation_ = kVideoRotation_0;
    250  VideoContentType content_type_ = VideoContentType::UNSPECIFIED;
    251  int qp_ = -1;  // Quantizer value.
    252 
    253  struct Timing {
    254    uint8_t flags = VideoSendTiming::kInvalid;
    255    int64_t encode_start_ms = 0;
    256    int64_t encode_finish_ms = 0;
    257    int64_t packetization_finish_ms = 0;
    258    int64_t pacer_exit_ms = 0;
    259    int64_t network_timestamp_ms = 0;
    260    int64_t network2_timestamp_ms = 0;
    261    int64_t receive_start_ms = 0;
    262    int64_t receive_finish_ms = 0;
    263  } timing_;
    264  EncodedImage::Timing video_timing() const { return timing_; }
    265  EncodedImage::Timing* video_timing_mutable() { return &timing_; }
    266 
    267  std::optional<Psnr> psnr() const { return psnr_; }
    268  void set_psnr(std::optional<Psnr> psnr) { psnr_ = psnr; }
    269 
    270 private:
    271  size_t capacity() const { return encoded_data_ ? encoded_data_->size() : 0; }
    272 
    273  // When set, indicates that all future frames will be constrained with those
    274  // limits until the application indicates a change again.
    275  std::optional<VideoPlayoutDelay> playout_delay_;
    276 
    277  scoped_refptr<EncodedImageBufferInterface> encoded_data_;
    278  size_t size_ = 0;  // Size of encoded frame data.
    279  uint32_t timestamp_rtp_ = 0;
    280  std::optional<int> simulcast_index_;
    281  std::optional<Timestamp> presentation_timestamp_;
    282  std::optional<int> spatial_index_;
    283  std::optional<int> temporal_index_;
    284  std::map<int, size_t> spatial_layer_frame_size_bytes_;
    285  std::optional<webrtc::ColorSpace> color_space_;
    286  // This field is meant for media quality testing purpose only. When enabled it
    287  // carries the VideoFrame id field from the sender to the receiver.
    288  std::optional<uint16_t> video_frame_tracking_id_;
    289  // Information about packets used to assemble this video frame. This is needed
    290  // by `SourceTracker` when the frame is delivered to the RTCRtpReceiver's
    291  // MediaStreamTrack, in order to implement getContributingSources(). See:
    292  // https://w3c.github.io/webrtc-pc/#dom-rtcrtpreceiver-getcontributingsources
    293  RtpPacketInfos packet_infos_;
    294  bool retransmission_allowed_ = true;
    295  // True if the encoded image can be considered to be of target quality.
    296  bool at_target_quality_ = false;
    297  // True if the frame that was encoded is a steady-state refresh frame intended
    298  // to improve the visual quality.
    299  bool is_steady_state_refresh_frame_ = false;
    300 
    301  // Filter settings for corruption detection suggested by the encoder
    302  // implementation, if any. Otherwise generic per-codec-type settings will be
    303  // used.
    304  std::optional<CorruptionDetectionFilterSettings>
    305      corruption_detection_filter_settings_;
    306 
    307  // Encoders may compute PSNR for a frame.
    308  std::optional<Psnr> psnr_;
    309 };
    310 
    311 }  // namespace webrtc
    312 
    313 #endif  // API_VIDEO_ENCODED_IMAGE_H_