tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

SampleIterator.cpp (25203B)


      1 /* This Source Code Form is subject to the terms of the Mozilla Public
      2 * License, v. 2.0. If a copy of the MPL was not distributed with this
      3 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      4 
      5 #include "SampleIterator.h"
      6 
      7 #include <algorithm>
      8 #include <limits>
      9 
     10 #include "BufferReader.h"
     11 #include "MP4Interval.h"
     12 #include "MP4Metadata.h"
     13 #include "MediaDataDemuxer.h"
     14 #include "SinfParser.h"
     15 #include "mozilla/RefPtr.h"
     16 
     17 using namespace mozilla::media;
     18 
     19 namespace mozilla {
     20 
     21 class MOZ_STACK_CLASS RangeFinder {
     22 public:
     23  // Given that we're processing this in order we don't use a binary search
     24  // to find the apropriate time range. Instead we search linearly from the
     25  // last used point.
     26  explicit RangeFinder(const MediaByteRangeSet& ranges)
     27      : mRanges(ranges), mIndex(0) {
     28    // Ranges must be normalised for this to work
     29  }
     30 
     31  bool Contains(const MediaByteRange& aByteRange);
     32 
     33 private:
     34  const MediaByteRangeSet& mRanges;
     35  size_t mIndex;
     36 };
     37 
     38 bool RangeFinder::Contains(const MediaByteRange& aByteRange) {
     39  if (mRanges.IsEmpty()) {
     40    return false;
     41  }
     42 
     43  if (mRanges[mIndex].ContainsStrict(aByteRange)) {
     44    return true;
     45  }
     46 
     47  if (aByteRange.mStart < mRanges[mIndex].mStart) {
     48    // Search backwards
     49    do {
     50      if (!mIndex) {
     51        return false;
     52      }
     53      --mIndex;
     54      if (mRanges[mIndex].ContainsStrict(aByteRange)) {
     55        return true;
     56      }
     57    } while (aByteRange.mStart < mRanges[mIndex].mStart);
     58 
     59    return false;
     60  }
     61 
     62  while (aByteRange.mEnd > mRanges[mIndex].mEnd) {
     63    if (mIndex == mRanges.Length() - 1) {
     64      return false;
     65    }
     66    ++mIndex;
     67    if (mRanges[mIndex].ContainsStrict(aByteRange)) {
     68      return true;
     69    }
     70  }
     71 
     72  return false;
     73 }
     74 
     75 SampleIterator::SampleIterator(MP4SampleIndex* aIndex)
     76    : mIndex(aIndex), mCurrentMoof(0), mCurrentSample(0) {
     77  mIndex->RegisterIterator(this);
     78 }
     79 
     80 SampleIterator::~SampleIterator() { mIndex->UnregisterIterator(this); }
     81 
     82 bool SampleIterator::HasNext() { return Get().isOk(); }
     83 
     84 already_AddRefed<MediaRawData> SampleIterator::GetNextHeader() {
     85  auto current = Get();
     86  if (current.isErr()) {
     87    return nullptr;
     88  }
     89  Sample* s = current.unwrap();
     90 
     91  int64_t length = std::numeric_limits<int64_t>::max();
     92  mIndex->mSource->Length(&length);
     93  if (s->mByteRange.mEnd > length) {
     94    // We don't have this complete sample.
     95    return nullptr;
     96  }
     97 
     98  RefPtr<MediaRawData> sample = new MediaRawData();
     99  sample->mTimecode = s->mDecodeTime;
    100  sample->mTime = s->mCompositionRange.start;
    101  sample->mDuration = s->mCompositionRange.Length();
    102  sample->mOffset = s->mByteRange.mStart;
    103  sample->mKeyframe = s->mSync;
    104  Next();
    105  return sample.forget();
    106 }
    107 
    108 Result<already_AddRefed<MediaRawData>, MediaResult> SampleIterator::GetNext() {
    109  auto current = Get();
    110  if (current.isErr()) {
    111    return current.propagateErr();
    112  }
    113  Sample* s = current.unwrap();
    114 
    115  int64_t length = std::numeric_limits<int64_t>::max();
    116  mIndex->mSource->Length(&length);
    117  if (s->mByteRange.mEnd > length) {
    118    return Err(MediaResult::Logged(
    119        NS_ERROR_DOM_MEDIA_RANGE_ERR,
    120        RESULT_DETAIL("Sample data byte range beyond end of resource"),
    121        gMediaDemuxerLog));
    122  }
    123 
    124  RefPtr<MediaRawData> sample = new MediaRawData();
    125  sample->mTimecode = s->mDecodeTime;
    126  sample->mTime = s->mCompositionRange.start;
    127  sample->mDuration = s->mCompositionRange.Length();
    128  sample->mOffset = s->mByteRange.mStart;
    129  sample->mKeyframe = s->mSync;
    130 
    131  UniquePtr<MediaRawDataWriter> writer(sample->CreateWriter());
    132  // Do the blocking read
    133  if (!writer->SetSize(s->mByteRange.Length())) {
    134    return Err(MediaResult::Logged(NS_ERROR_OUT_OF_MEMORY, __func__,
    135                                   gMediaDemuxerLog));
    136  }
    137 
    138  size_t bytesRead;
    139  nsresult rv = mIndex->mSource->ReadAt(sample->mOffset, writer->Data(),
    140                                        sample->Size(), &bytesRead);
    141  if (NS_FAILED(rv) || bytesRead != sample->Size()) {
    142    return Err(MediaResult::Logged(
    143        // Fewer bytes read means end of stream, or the bytes are not
    144        // available because a network error has occurred.
    145        // A sample range extending past the end of stream is a bad range.
    146        NS_FAILED(rv) ? rv : NS_ERROR_DOM_MEDIA_RANGE_ERR,
    147        RESULT_DETAIL("Sample data read failed"), gMediaDemuxerLog));
    148  }
    149 
    150  MoofParser* moofParser = mIndex->mMoofParser.get();
    151  if (!moofParser) {
    152    // File is not fragmented, we can't have crypto, just early return.
    153    Next();
    154    return sample.forget();
    155  }
    156 
    157  const nsTArray<Moof>& moofs = moofParser->Moofs();
    158  const Moof* currentMoof = &moofs[mCurrentMoof];
    159  // We need to check if this moof has init data the CDM expects us to surface.
    160  // This should happen when handling the first sample, even if that sample
    161  // isn't encrypted (samples later in the moof may be).
    162  if (mCurrentSample == 0) {
    163    if (!currentMoof->mPsshes.IsEmpty()) {
    164      // This Moof contained crypto init data. Report that. We only report
    165      // the init data on the Moof's first sample, to avoid reporting it more
    166      // than once per Moof.
    167      writer->mCrypto.mInitDatas.AppendElements(currentMoof->mPsshes);
    168      writer->mCrypto.mInitDataType = u"cenc"_ns;
    169    }
    170  }
    171 
    172  auto cryptoSchemeResult = GetEncryptionScheme();
    173  if (cryptoSchemeResult.isErr()) {
    174    return Err(MediaResult::Logged(NS_ERROR_DOM_MEDIA_DEMUXER_ERR,
    175                                   cryptoSchemeResult.unwrapErr(),
    176                                   gMediaDemuxerLog));
    177  }
    178  CryptoScheme cryptoScheme = cryptoSchemeResult.unwrap();
    179  if (cryptoScheme == CryptoScheme::None) {
    180    // No crypto to handle, early return.
    181    Next();
    182    return sample.forget();
    183  }
    184 
    185  writer->mCrypto.mCryptoScheme = cryptoScheme;
    186  MOZ_ASSERT(writer->mCrypto.mCryptoScheme != CryptoScheme::None,
    187             "Should have early returned if we don't have a crypto scheme!");
    188  MOZ_ASSERT(writer->mCrypto.mKeyId.IsEmpty(),
    189             "Sample should not already have a key ID");
    190  MOZ_ASSERT(writer->mCrypto.mConstantIV.IsEmpty(),
    191             "Sample should not already have a constant IV");
    192  const CencSampleEncryptionInfoEntry* sampleInfo = GetSampleEncryptionEntry();
    193  if (sampleInfo) {
    194    // Use sample group information if present, this supersedes track level
    195    // information.
    196    writer->mCrypto.mKeyId.AppendElements(sampleInfo->mKeyId);
    197    writer->mCrypto.mIVSize = sampleInfo->mIVSize;
    198    writer->mCrypto.mCryptByteBlock = sampleInfo->mCryptByteBlock;
    199    writer->mCrypto.mSkipByteBlock = sampleInfo->mSkipByteBlock;
    200    writer->mCrypto.mConstantIV.AppendElements(sampleInfo->mConsantIV);
    201  } else {
    202    // Use the crypto info from track metadata
    203    writer->mCrypto.mKeyId.AppendElements(moofParser->mSinf.mDefaultKeyID, 16);
    204    writer->mCrypto.mIVSize = moofParser->mSinf.mDefaultIVSize;
    205    writer->mCrypto.mCryptByteBlock = moofParser->mSinf.mDefaultCryptByteBlock;
    206    writer->mCrypto.mSkipByteBlock = moofParser->mSinf.mDefaultSkipByteBlock;
    207    writer->mCrypto.mConstantIV.AppendElements(
    208        moofParser->mSinf.mDefaultConstantIV);
    209  }
    210 
    211  if ((writer->mCrypto.mIVSize == 0 && writer->mCrypto.mConstantIV.IsEmpty()) ||
    212      (writer->mCrypto.mIVSize != 0 &&
    213       (s->mCencRange.IsEmpty() && !currentMoof->SencIsValid()))) {
    214    // If mIVSize == 0, this indicates that a constant IV is in use, thus we
    215    // should have a non empty constant IV. Alternatively if IV size is non
    216    // zero, we should have an IV for this sample, which we need to look up
    217    // in mCencRange (which must then be non empty). If neither of these are
    218    // true we have bad crypto data, so bail.
    219    return Err(MediaResult::Logged(NS_ERROR_DOM_MEDIA_DEMUXER_ERR,
    220                                   RESULT_DETAIL("Crypto IV size inconsistent"),
    221                                   gMediaDemuxerLog));
    222  }
    223  // Retrieve encryption information
    224  // This information might come from two places: the senc box, or the
    225  // auxiliary data (indicated by saio and saiz boxes)
    226  // Try to use senc information first, and fallback to auxiliary data if not
    227  // present
    228  if (currentMoof->SencIsValid()) {
    229    if (writer->mCrypto.mIVSize != s->mIV.Length()) {
    230      return Err(MediaResult::Logged(
    231          NS_ERROR_DOM_MEDIA_DEMUXER_ERR,
    232          RESULT_DETAIL("Inconsistent crypto IV size"), gMediaDemuxerLog));
    233    }
    234    writer->mCrypto.mIV = s->mIV;
    235    writer->mCrypto.mPlainSizes = s->mPlainSizes;
    236    writer->mCrypto.mEncryptedSizes = s->mEncryptedSizes;
    237  } else if (!s->mCencRange.IsEmpty()) {
    238    // The size comes from an 8 bit field
    239    AutoTArray<uint8_t, 256> cencAuxInfo;
    240    cencAuxInfo.SetLength(s->mCencRange.Length());
    241    // Sample Auxiliary Information may be stored anywhere in the file, but
    242    // encryption is supported in only fragmented mp4, so the offsets are
    243    // assumed in the traf or a subsequent box.
    244    rv = mIndex->mSource->ReadAt(s->mCencRange.mStart, cencAuxInfo.Elements(),
    245                                 cencAuxInfo.Length(), &bytesRead);
    246    if (NS_FAILED(rv) || bytesRead != cencAuxInfo.Length()) {
    247      return Err(MediaResult::Logged(
    248          // Unless pref "eme.mse-only" is set to false, encryption is supported
    249          // only in MSE, where fewer bytes means end of stream.  mCencRange
    250          // extending past the end of stream means an error with the range.
    251          NS_FAILED(rv) ? rv : NS_ERROR_DOM_MEDIA_RANGE_ERR,
    252          RESULT_DETAIL("cenc Sample Auxiliary Information read failed"),
    253          gMediaDemuxerLog));
    254    }
    255    BufferReader reader(cencAuxInfo);
    256    if (!reader.ReadArray(writer->mCrypto.mIV, writer->mCrypto.mIVSize)) {
    257      return Err(MediaResult::Logged(
    258          NS_ERROR_DOM_MEDIA_DEMUXER_ERR,
    259          RESULT_DETAIL("sample InitializationVector error"),
    260          gMediaDemuxerLog));
    261    }
    262 
    263    // Parse the auxiliary information for subsample information
    264    auto res = reader.ReadU16();
    265    if (res.isOk() && res.unwrap() > 0) {
    266      uint16_t count = res.unwrap();
    267 
    268      for (size_t i = 0; i < count; i++) {
    269        auto res_16 = reader.ReadU16();
    270        auto res_32 = reader.ReadU32();
    271        if (res_16.isErr() || res_32.isErr()) {
    272          return Err(MediaResult::Logged(
    273              NS_ERROR_DOM_MEDIA_DEMUXER_ERR,
    274              RESULT_DETAIL("cenc subsample_count too large for"
    275                            "CencSampleAuxiliaryDataFormat"),
    276              gMediaDemuxerLog));
    277        }
    278        writer->mCrypto.mPlainSizes.AppendElement(res_16.unwrap());
    279        writer->mCrypto.mEncryptedSizes.AppendElement(res_32.unwrap());
    280      }
    281    } else {
    282      // No subsample information means the entire sample is encrypted.
    283      writer->mCrypto.mPlainSizes.AppendElement(0);
    284      writer->mCrypto.mEncryptedSizes.AppendElement(sample->Size());
    285    }
    286  }
    287 
    288  Next();
    289 
    290  return sample.forget();
    291 }
    292 
    293 SampleDescriptionEntry* SampleIterator::GetSampleDescriptionEntry() {
    294  nsTArray<Moof>& moofs = mIndex->mMoofParser->Moofs();
    295  Moof& currentMoof = moofs[mCurrentMoof];
    296  uint32_t sampleDescriptionIndex =
    297      currentMoof.mTfhd.mDefaultSampleDescriptionIndex;
    298  // Mp4 indices start at 1, shift down 1 so we index our array correctly.
    299  sampleDescriptionIndex--;
    300  FallibleTArray<SampleDescriptionEntry>& sampleDescriptions =
    301      mIndex->mMoofParser->mSampleDescriptions;
    302  if (sampleDescriptionIndex >= sampleDescriptions.Length()) {
    303    // The sample description index is invalid, the mp4 is malformed. Bail out.
    304    return nullptr;
    305  }
    306  return &sampleDescriptions[sampleDescriptionIndex];
    307 }
    308 
    309 const CencSampleEncryptionInfoEntry* SampleIterator::GetSampleEncryptionEntry()
    310    const {
    311  return mIndex->mMoofParser->GetSampleEncryptionEntry(mCurrentMoof,
    312                                                       mCurrentSample);
    313 }
    314 
    315 Result<CryptoScheme, nsCString> SampleIterator::GetEncryptionScheme() {
    316  // See ISO/IEC 23001-7 for information on the metadata being checked.
    317  MoofParser* moofParser = mIndex->mMoofParser.get();
    318  if (!moofParser) {
    319    // This mp4 isn't fragmented so it can't be encrypted.
    320    return CryptoScheme::None;
    321  }
    322 
    323  SampleDescriptionEntry* sampleDescriptionEntry = GetSampleDescriptionEntry();
    324  if (!sampleDescriptionEntry) {
    325    // For the file to be valid the tfhd must reference a sample description
    326    // entry.
    327    // If we encounter this error often, we may consider using the first
    328    // sample description entry if the index is out of bounds.
    329    return mozilla::Err(RESULT_DETAIL(
    330        "Could not determine encryption scheme due to bad index for sample "
    331        "description entry."));
    332  }
    333 
    334  if (!sampleDescriptionEntry->mIsEncryptedEntry) {
    335    return CryptoScheme::None;
    336  }
    337 
    338  if (!moofParser->mSinf.IsValid()) {
    339    // The sample description entry says this sample is encrypted, but we
    340    // don't have a valid sinf box. This shouldn't happen as the sinf box is
    341    // part of the sample description entry. Suggests a malformed file, bail.
    342    return mozilla::Err(RESULT_DETAIL(
    343        "Could not determine encryption scheme. Sample description entry "
    344        "indicates encryption, but could not find associated sinf box."));
    345  }
    346 
    347  const CencSampleEncryptionInfoEntry* sampleInfo = GetSampleEncryptionEntry();
    348  if (sampleInfo && !sampleInfo->mIsEncrypted) {
    349    // May not have sample encryption info, but if we do, it should match other
    350    // metadata.
    351    return mozilla::Err(RESULT_DETAIL(
    352        "Could not determine encryption scheme. Sample description entry "
    353        "indicates encryption, but sample encryption entry indicates sample is "
    354        "not encrypted. These should be consistent."));
    355  }
    356 
    357  if (moofParser->mSinf.mDefaultEncryptionType == AtomType("cenc")) {
    358    return CryptoScheme::Cenc;
    359  } else if (moofParser->mSinf.mDefaultEncryptionType == AtomType("cbcs")) {
    360    return CryptoScheme::Cbcs;
    361  }
    362  return mozilla::Err(RESULT_DETAIL(
    363      "Could not determine encryption scheme. Sample description entry "
    364      "reports sample is encrypted, but no scheme, or an unsupported scheme "
    365      "is in use."));
    366 }
    367 
    368 Result<Sample*, nsresult> SampleIterator::Get() {
    369  if (!mIndex->mMoofParser) {
    370    MOZ_ASSERT(!mCurrentMoof);
    371    if (mCurrentSample >= mIndex->mIndex.Length()) {
    372      return Err(NS_ERROR_DOM_MEDIA_END_OF_STREAM);
    373    }
    374    return &mIndex->mIndex[mCurrentSample];
    375  }
    376 
    377  nsTArray<Moof>& moofs = mIndex->mMoofParser->Moofs();
    378  while (true) {
    379    if (mCurrentMoof == moofs.Length()) {
    380      nsresult rv = mIndex->mMoofParser->BlockingReadNextMoof();
    381      if (NS_FAILED(rv)) {
    382        return Err(rv);
    383      }
    384      MOZ_ASSERT(mCurrentMoof < moofs.Length());
    385    }
    386    if (mCurrentSample < moofs[mCurrentMoof].mIndex.Length()) {
    387      break;
    388    }
    389    mCurrentSample = 0;
    390    ++mCurrentMoof;
    391  }
    392  return &moofs[mCurrentMoof].mIndex[mCurrentSample];
    393 }
    394 
    395 void SampleIterator::Next() { ++mCurrentSample; }
    396 
    397 void SampleIterator::Seek(const TimeUnit& aTime, SyncSampleMode aMode) {
    398  size_t syncMoof = 0;
    399  size_t syncSample = 0;
    400  mCurrentMoof = 0;
    401  mCurrentSample = 0;
    402  while (Sample* sample = Get().unwrapOr(nullptr)) {
    403    if (sample->mCompositionRange.start > aTime) {
    404      break;
    405    }
    406    if (sample->mSync) {
    407      syncMoof = mCurrentMoof;
    408      syncSample = mCurrentSample;
    409      if (aMode == SyncSampleMode::First) {
    410        break;
    411      }
    412    }
    413    if (sample->mCompositionRange.start == aTime) {
    414      break;
    415    }
    416    Next();
    417  }
    418  mCurrentMoof = syncMoof;
    419  mCurrentSample = syncSample;
    420 }
    421 
    422 TimeUnit SampleIterator::GetNextKeyframeTime() {
    423  SampleIterator itr(*this);
    424  while (Sample* sample = itr.Get().unwrapOr(nullptr)) {
    425    if (sample->mSync) {
    426      return sample->mCompositionRange.start;
    427    }
    428    itr.Next();
    429  }
    430  return TimeUnit::Invalid();
    431 }
    432 
    433 MP4SampleIndex::MP4SampleIndex(const IndiceWrapper& aIndices,
    434                               ByteStream* aSource, uint32_t aTrackId,
    435                               bool aIsAudio, uint32_t aTimeScale)
    436    : mSource(aSource), mIsAudio(aIsAudio) {
    437  if (!aIndices.Length()) {
    438    mMoofParser =
    439        MakeUnique<MoofParser>(aSource, AsVariant(aTrackId), aIsAudio);
    440  } else {
    441    if (!mIndex.SetCapacity(aIndices.Length(), fallible)) {
    442      // OOM.
    443      return;
    444    }
    445    media::IntervalSet<TimeUnit> intervalTime;
    446    MediaByteRange intervalRange;
    447    bool haveSync = false;
    448    bool progressive = true;
    449    int64_t lastOffset = 0;
    450    for (size_t i = 0; i < aIndices.Length(); i++) {
    451      Indice indice{};
    452      int64_t timescale =
    453          mMoofParser ? AssertedCast<int64_t>(mMoofParser->mMvhd.mTimescale)
    454                      : aTimeScale;
    455      if (!aIndices.GetIndice(i, indice)) {
    456        // Out of index?
    457        return;
    458      }
    459      if (indice.sync || mIsAudio) {
    460        haveSync = true;
    461      }
    462      if (!haveSync) {
    463        continue;
    464      }
    465      Sample sample;
    466      sample.mByteRange =
    467          MediaByteRange(indice.start_offset, indice.end_offset);
    468      sample.mCompositionRange = MP4Interval<media::TimeUnit>(
    469          TimeUnit(indice.start_composition, timescale),
    470          TimeUnit(indice.end_composition, timescale));
    471      sample.mDecodeTime = TimeUnit(indice.start_decode, timescale);
    472      sample.mSync = indice.sync || mIsAudio;
    473      // FIXME: Make this infallible after bug 968520 is done.
    474      MOZ_ALWAYS_TRUE(mIndex.AppendElement(sample, fallible));
    475      if (indice.start_offset < lastOffset) {
    476        NS_WARNING("Chunks in MP4 out of order, expect slow down");
    477        progressive = false;
    478      }
    479      lastOffset = indice.end_offset;
    480 
    481      // Pack audio samples in group of 128.
    482      if (sample.mSync && progressive && (!mIsAudio || !(i % 128))) {
    483        if (mDataOffset.Length()) {
    484          auto& last = mDataOffset.LastElement();
    485          last.mEndOffset = intervalRange.mEnd;
    486          NS_ASSERTION(intervalTime.Length() == 1,
    487                       "Discontinuous samples between keyframes");
    488          last.mTime.start = intervalTime.GetStart();
    489          last.mTime.end = intervalTime.GetEnd();
    490        }
    491        if (!mDataOffset.AppendElement(
    492                MP4DataOffset(mIndex.Length() - 1, indice.start_offset),
    493                fallible)) {
    494          // OOM.
    495          return;
    496        }
    497        intervalTime = media::IntervalSet<TimeUnit>();
    498        intervalRange = MediaByteRange();
    499      }
    500      intervalTime += media::Interval<TimeUnit>(sample.mCompositionRange.start,
    501                                                sample.mCompositionRange.end);
    502      intervalRange = intervalRange.Span(sample.mByteRange);
    503    }
    504 
    505    if (mDataOffset.Length() && progressive) {
    506      Indice indice;
    507      if (!aIndices.GetIndice(aIndices.Length() - 1, indice)) {
    508        return;
    509      }
    510      auto& last = mDataOffset.LastElement();
    511      last.mEndOffset = indice.end_offset;
    512      last.mTime =
    513          MP4Interval<TimeUnit>(intervalTime.GetStart(), intervalTime.GetEnd());
    514    } else {
    515      mDataOffset.Clear();
    516    }
    517  }
    518 }
    519 
    520 MP4SampleIndex::~MP4SampleIndex() = default;
    521 
    522 void MP4SampleIndex::UpdateMoofIndex(const MediaByteRangeSet& aByteRanges) {
    523  UpdateMoofIndex(aByteRanges, false);
    524 }
    525 
    526 void MP4SampleIndex::UpdateMoofIndex(const MediaByteRangeSet& aByteRanges,
    527                                     bool aCanEvict) {
    528  if (!mMoofParser) {
    529    return;
    530  }
    531  size_t moofs = mMoofParser->Moofs().Length();
    532  bool canEvict = aCanEvict && moofs > 1;
    533  if (canEvict) {
    534    // Check that we can trim the mMoofParser. We can only do so if all
    535    // iterators have demuxed all possible samples.
    536    for (const SampleIterator* iterator : mIterators) {
    537      if ((iterator->mCurrentSample == 0 && iterator->mCurrentMoof == moofs) ||
    538          iterator->mCurrentMoof == moofs - 1) {
    539        continue;
    540      }
    541      canEvict = false;
    542      break;
    543    }
    544  }
    545  mMoofParser->RebuildFragmentedIndex(aByteRanges, &canEvict);
    546  if (canEvict) {
    547    // The moofparser got trimmed. Adjust all registered iterators.
    548    for (SampleIterator* iterator : mIterators) {
    549      iterator->mCurrentMoof -= moofs - 1;
    550    }
    551  }
    552 }
    553 
    554 TimeIntervals MP4SampleIndex::ConvertByteRangesToTimeRanges(
    555    const MediaByteRangeSet& aByteRanges) {
    556  if (aByteRanges == mLastCachedRanges) {
    557    return mLastBufferedRanges;
    558  }
    559  mLastCachedRanges = aByteRanges;
    560 
    561  if (mDataOffset.Length()) {
    562    TimeIntervals timeRanges;
    563    for (const auto& range : aByteRanges) {
    564      uint32_t start = mDataOffset.IndexOfFirstElementGt(range.mStart - 1);
    565      if (!mIsAudio && start == mDataOffset.Length()) {
    566        continue;
    567      }
    568      uint32_t end = mDataOffset.IndexOfFirstElementGt(
    569          range.mEnd, MP4DataOffset::EndOffsetComparator());
    570      if (!mIsAudio && end < start) {
    571        continue;
    572      }
    573      if (mIsAudio && start &&
    574          range.Intersects(MediaByteRange(mDataOffset[start - 1].mStartOffset,
    575                                          mDataOffset[start - 1].mEndOffset))) {
    576        // Check if previous audio data block contains some available samples.
    577        for (size_t i = mDataOffset[start - 1].mIndex; i < mIndex.Length();
    578             i++) {
    579          if (range.ContainsStrict(mIndex[i].mByteRange)) {
    580            timeRanges += TimeInterval(mIndex[i].mCompositionRange.start,
    581                                       mIndex[i].mCompositionRange.end);
    582          }
    583        }
    584      }
    585      if (end > start) {
    586        for (uint32_t i = start; i < end; i++) {
    587          timeRanges += TimeInterval(mDataOffset[i].mTime.start,
    588                                     mDataOffset[i].mTime.end);
    589        }
    590      }
    591      if (end < mDataOffset.Length()) {
    592        // Find samples in partial block contained in the byte range.
    593        for (size_t i = mDataOffset[end].mIndex;
    594             i < mIndex.Length() && range.ContainsStrict(mIndex[i].mByteRange);
    595             i++) {
    596          timeRanges += TimeInterval(mIndex[i].mCompositionRange.start,
    597                                     mIndex[i].mCompositionRange.end);
    598        }
    599      }
    600    }
    601    mLastBufferedRanges = timeRanges;
    602    return timeRanges;
    603  }
    604 
    605  RangeFinder rangeFinder(aByteRanges);
    606  nsTArray<MP4Interval<media::TimeUnit>> timeRanges;
    607  nsTArray<FallibleTArray<Sample>*> indexes;
    608  if (mMoofParser) {
    609    // We take the index out of the moof parser and move it into a local
    610    // variable so we don't get concurrency issues. It gets freed when we
    611    // exit this function.
    612    for (int i = 0; i < mMoofParser->Moofs().Length(); i++) {
    613      Moof& moof = mMoofParser->Moofs()[i];
    614 
    615      // We need the entire moof in order to play anything
    616      if (rangeFinder.Contains(moof.mRange)) {
    617        if (rangeFinder.Contains(moof.mMdatRange)) {
    618          MP4Interval<media::TimeUnit>::SemiNormalAppend(timeRanges,
    619                                                         moof.mTimeRange);
    620        } else {
    621          indexes.AppendElement(&moof.mIndex);
    622        }
    623      }
    624    }
    625  } else {
    626    indexes.AppendElement(&mIndex);
    627  }
    628 
    629  bool hasSync = false;
    630  for (size_t i = 0; i < indexes.Length(); i++) {
    631    FallibleTArray<Sample>* index = indexes[i];
    632    for (size_t j = 0; j < index->Length(); j++) {
    633      const Sample& sample = (*index)[j];
    634      if (!rangeFinder.Contains(sample.mByteRange)) {
    635        // We process the index in decode order so we clear hasSync when we hit
    636        // a range that isn't buffered.
    637        hasSync = false;
    638        continue;
    639      }
    640 
    641      hasSync |= sample.mSync;
    642      if (!hasSync) {
    643        continue;
    644      }
    645 
    646      MP4Interval<media::TimeUnit>::SemiNormalAppend(timeRanges,
    647                                                     sample.mCompositionRange);
    648    }
    649  }
    650 
    651  // This fixes up when the compositon order differs from the byte range order
    652  nsTArray<MP4Interval<TimeUnit>> timeRangesNormalized;
    653  MP4Interval<media::TimeUnit>::Normalize(timeRanges, &timeRangesNormalized);
    654  // convert timeRanges.
    655  media::TimeIntervals ranges;
    656  for (size_t i = 0; i < timeRangesNormalized.Length(); i++) {
    657    ranges += media::TimeInterval(timeRangesNormalized[i].start,
    658                                  timeRangesNormalized[i].end);
    659  }
    660  mLastBufferedRanges = ranges;
    661  return ranges;
    662 }
    663 
    664 uint64_t MP4SampleIndex::GetEvictionOffset(const TimeUnit& aTime) {
    665  uint64_t offset = std::numeric_limits<uint64_t>::max();
    666  if (mMoofParser) {
    667    // We need to keep the whole moof if we're keeping any of it because the
    668    // parser doesn't keep parsed moofs.
    669    for (int i = 0; i < mMoofParser->Moofs().Length(); i++) {
    670      Moof& moof = mMoofParser->Moofs()[i];
    671 
    672      if (!moof.mTimeRange.Length().IsZero() && moof.mTimeRange.end > aTime) {
    673        offset = std::min(offset, uint64_t(std::min(moof.mRange.mStart,
    674                                                    moof.mMdatRange.mStart)));
    675      }
    676    }
    677  } else {
    678    // We've already parsed and stored the moov so we don't need to keep it.
    679    // All we need to keep is the sample data itself.
    680    for (size_t i = 0; i < mIndex.Length(); i++) {
    681      const Sample& sample = mIndex[i];
    682      if (aTime >= sample.mCompositionRange.end) {
    683        offset = std::min(offset, uint64_t(sample.mByteRange.mEnd));
    684      }
    685    }
    686  }
    687  return offset;
    688 }
    689 
    690 void MP4SampleIndex::RegisterIterator(SampleIterator* aIterator) {
    691  mIterators.AppendElement(aIterator);
    692 }
    693 
    694 void MP4SampleIndex::UnregisterIterator(SampleIterator* aIterator) {
    695  mIterators.RemoveElement(aIterator);
    696 }
    697 
    698 }  // namespace mozilla