tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

AndroidDataEncoder.cpp (18397B)


      1 /* This Source Code Form is subject to the terms of the Mozilla Public
      2 * License, v. 2.0. If a copy of the MPL was not distributed with this
      3 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      4 
      5 #include "AndroidDataEncoder.h"
      6 
      7 #include "AnnexB.h"
      8 #include "H264.h"
      9 #include "ImageContainer.h"
     10 #include "MediaData.h"
     11 #include "MediaInfo.h"
     12 #include "libyuv/convert_from.h"
     13 #include "mozilla/Logging.h"
     14 #include "nsThreadUtils.h"
     15 
     16 namespace mozilla {
     17 
     18 extern LazyLogModule sPEMLog;
     19 #define AND_ENC_LOG(arg, ...)                \
     20  MOZ_LOG(sPEMLog, mozilla::LogLevel::Debug, \
     21          ("AndroidDataEncoder(%p)::%s: " arg, this, __func__, ##__VA_ARGS__))
     22 #define AND_ENC_LOGE(arg, ...)               \
     23  MOZ_LOG(sPEMLog, mozilla::LogLevel::Error, \
     24          ("AndroidDataEncoder(%p)::%s: " arg, this, __func__, ##__VA_ARGS__))
     25 
     26 #define REJECT_IF_ERROR()                                                \
     27  do {                                                                   \
     28    if (mError) {                                                        \
     29      auto error = mError.value();                                       \
     30      mError.reset();                                                    \
     31      return EncodePromise::CreateAndReject(std::move(error), __func__); \
     32    }                                                                    \
     33  } while (0)
     34 
     35 RefPtr<MediaDataEncoder::InitPromise> AndroidDataEncoder::Init() {
     36  // Sanity-check the input size for Android software encoder fails to do it.
     37  if (mConfig.mSize.width == 0 || mConfig.mSize.height == 0) {
     38    return InitPromise::CreateAndReject(NS_ERROR_ILLEGAL_VALUE, __func__);
     39  }
     40 
     41  return InvokeAsync(mTaskQueue, this, __func__,
     42                     &AndroidDataEncoder::ProcessInit);
     43 }
     44 
     45 static const char* MimeTypeOf(CodecType aCodec) {
     46  switch (aCodec) {
     47    case CodecType::H264:
     48      return "video/avc";
     49    case CodecType::VP8:
     50      return "video/x-vnd.on2.vp8";
     51    case CodecType::VP9:
     52      return "video/x-vnd.on2.vp9";
     53    default:
     54      return "";
     55  }
     56 }
     57 
     58 using FormatResult = Result<java::sdk::MediaFormat::LocalRef, MediaResult>;
     59 
     60 FormatResult ToMediaFormat(const EncoderConfig& aConfig) {
     61  nsresult rv = NS_OK;
     62  java::sdk::MediaFormat::LocalRef format;
     63  rv = java::sdk::MediaFormat::CreateVideoFormat(MimeTypeOf(aConfig.mCodec),
     64                                                 aConfig.mSize.width,
     65                                                 aConfig.mSize.height, &format);
     66  NS_ENSURE_SUCCESS(
     67      rv, FormatResult(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
     68                                   "fail to create Java MediaFormat object")));
     69 
     70  rv =
     71      format->SetInteger(java::sdk::MediaFormat::KEY_BITRATE_MODE, 2 /* CBR */);
     72  NS_ENSURE_SUCCESS(rv, FormatResult(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
     73                                                 "fail to set bitrate mode")));
     74 
     75  rv = format->SetInteger(java::sdk::MediaFormat::KEY_BIT_RATE,
     76                          AssertedCast<int>(aConfig.mBitrate));
     77  NS_ENSURE_SUCCESS(rv, FormatResult(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
     78                                                 "fail to set bitrate")));
     79 
     80  // COLOR_FormatYUV420SemiPlanar(NV12) is the most widely supported
     81  // format.
     82  rv = format->SetInteger(java::sdk::MediaFormat::KEY_COLOR_FORMAT, 0x15);
     83  NS_ENSURE_SUCCESS(rv, FormatResult(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
     84                                                 "fail to set color format")));
     85 
     86  rv = format->SetInteger(java::sdk::MediaFormat::KEY_FRAME_RATE,
     87                          aConfig.mFramerate);
     88  NS_ENSURE_SUCCESS(rv, FormatResult(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
     89                                                 "fail to set frame rate")));
     90 
     91  // Ensure interval >= 1. A negative value means no key frames are
     92  // requested after the first frame. A zero value means a stream
     93  // containing all key frames is requested.
     94  int32_t intervalInSec = AssertedCast<int32_t>(
     95      std::max<size_t>(1, aConfig.mKeyframeInterval / aConfig.mFramerate));
     96  rv = format->SetInteger(java::sdk::MediaFormat::KEY_I_FRAME_INTERVAL,
     97                          intervalInSec);
     98  NS_ENSURE_SUCCESS(rv,
     99                    FormatResult(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
    100                                             "fail to set I-frame interval")));
    101 
    102  return format;
    103 }
    104 
    105 RefPtr<MediaDataEncoder::InitPromise> AndroidDataEncoder::ProcessInit() {
    106  AssertOnTaskQueue();
    107  MOZ_ASSERT(!mJavaEncoder);
    108 
    109  java::sdk::MediaCodec::BufferInfo::LocalRef bufferInfo;
    110  if (NS_FAILED(java::sdk::MediaCodec::BufferInfo::New(&bufferInfo)) ||
    111      !bufferInfo) {
    112    return InitPromise::CreateAndReject(NS_ERROR_OUT_OF_MEMORY, __func__);
    113  }
    114  mInputBufferInfo = bufferInfo;
    115 
    116  FormatResult result = ToMediaFormat(mConfig);
    117  if (result.isErr()) {
    118    return InitPromise::CreateAndReject(result.unwrapErr(), __func__);
    119  }
    120  mFormat = result.unwrap();
    121 
    122  // Register native methods.
    123  JavaCallbacksSupport::Init();
    124 
    125  mJavaCallbacks = java::CodecProxy::NativeCallbacks::New();
    126  if (!mJavaCallbacks) {
    127    return InitPromise::CreateAndReject(
    128        MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
    129                    "cannot create Java callback object"),
    130        __func__);
    131  }
    132  JavaCallbacksSupport::AttachNative(
    133      mJavaCallbacks, mozilla::MakeUnique<CallbacksSupport>(this));
    134 
    135  mJavaEncoder = java::CodecProxy::Create(true /* encoder */, mFormat, nullptr,
    136                                          mJavaCallbacks, u""_ns);
    137  if (!mJavaEncoder) {
    138    return InitPromise::CreateAndReject(
    139        MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
    140                    "cannot create Java encoder object"),
    141        __func__);
    142  }
    143 
    144  mIsHardwareAccelerated = mJavaEncoder->IsHardwareAccelerated();
    145  mDrainState = DrainState::DRAINABLE;
    146 
    147  return InitPromise::CreateAndResolve(true, __func__);
    148 }
    149 
    150 RefPtr<MediaDataEncoder::EncodePromise> AndroidDataEncoder::Encode(
    151    const MediaData* aSample) {
    152  RefPtr<AndroidDataEncoder> self = this;
    153  MOZ_ASSERT(aSample != nullptr);
    154 
    155  return InvokeAsync(
    156      mTaskQueue, __func__,
    157      [self, sample = RefPtr<MediaData>(const_cast<MediaData*>(aSample))]() {
    158        return self->ProcessEncode({sample});
    159      });
    160 }
    161 
    162 // TODO(Bug 1984936): For realtime mode, resolve the promise after the first
    163 // sample's result is available, then continue processing remaining samples.
    164 // This allows the caller to keep submitting new samples while the encoder
    165 // handles pending ones.
    166 RefPtr<MediaDataEncoder::EncodePromise> AndroidDataEncoder::Encode(
    167    nsTArray<RefPtr<MediaData>>&& aSamples) {
    168  RefPtr<AndroidDataEncoder> self = this;
    169  MOZ_ASSERT(!aSamples.IsEmpty());
    170 
    171  return InvokeAsync(mTaskQueue, __func__,
    172                     [self, samples = std::move(aSamples)]() mutable {
    173                       return self->ProcessEncode(std::move(samples));
    174                     });
    175 }
    176 
    177 static jni::ByteBuffer::LocalRef ConvertI420ToNV12Buffer(
    178    RefPtr<const VideoData>& aSample, RefPtr<MediaByteBuffer>& aYUVBuffer,
    179    int aStride, int aYPlaneHeight) {
    180  const layers::PlanarYCbCrImage* image = aSample->mImage->AsPlanarYCbCrImage();
    181  MOZ_ASSERT(image);
    182  const layers::PlanarYCbCrData* yuv = image->GetData();
    183  auto ySize = yuv->YDataSize();
    184  auto cbcrSize = yuv->CbCrDataSize();
    185  // If we have a stride or height passed in from the Codec we need to use
    186  // those.
    187  auto yStride = aStride != 0 ? aStride : yuv->mYStride;
    188  auto height = aYPlaneHeight != 0 ? aYPlaneHeight : ySize.height;
    189  size_t yLength = yStride * height;
    190  size_t length =
    191      yLength + yStride * (cbcrSize.height - 1) + cbcrSize.width * 2;
    192 
    193  if (!aYUVBuffer || aYUVBuffer->Capacity() < length) {
    194    aYUVBuffer = MakeRefPtr<MediaByteBuffer>(length);
    195    aYUVBuffer->SetLength(length);
    196  } else {
    197    MOZ_ASSERT(aYUVBuffer->Length() >= length);
    198  }
    199 
    200  if (libyuv::I420ToNV12(yuv->mYChannel, yuv->mYStride, yuv->mCbChannel,
    201                         yuv->mCbCrStride, yuv->mCrChannel, yuv->mCbCrStride,
    202                         aYUVBuffer->Elements(), yStride,
    203                         aYUVBuffer->Elements() + yLength, yStride, ySize.width,
    204                         ySize.height) != 0) {
    205    return nullptr;
    206  }
    207 
    208  return jni::ByteBuffer::New(aYUVBuffer->Elements(), aYUVBuffer->Length());
    209 }
    210 
    211 RefPtr<MediaDataEncoder::EncodePromise> AndroidDataEncoder::ProcessEncode(
    212    nsTArray<RefPtr<MediaData>>&& aSamples) {
    213  AssertOnTaskQueue();
    214 
    215  REJECT_IF_ERROR();
    216 
    217  // TODO(Bug 1984936): Looping here for large batches is inefficient, as it can
    218  // take excessive shared memory and file descriptors due to passing both input
    219  // and output buffers between the content and media codec processes.
    220  for (auto& s : aSamples) {
    221    RefPtr<const VideoData> sample(s->As<const VideoData>());
    222    MOZ_ASSERT(sample);
    223 
    224    mInputSampleDuration = s->mDuration;
    225 
    226    // Bug 1789846: Check with the Encoder if MediaCodec has a stride or height
    227    // value to use.
    228    jni::ByteBuffer::LocalRef buffer = ConvertI420ToNV12Buffer(
    229        sample, mYUVBuffer, mJavaEncoder->GetInputFormatStride(),
    230        mJavaEncoder->GetInputFormatYPlaneHeight());
    231    if (!buffer) {
    232      return EncodePromise::CreateAndReject(NS_ERROR_ILLEGAL_INPUT, __func__);
    233    }
    234 
    235    if (s->mKeyframe) {
    236      mInputBufferInfo->Set(0, AssertedCast<int32_t>(mYUVBuffer->Length()),
    237                            s->mTime.ToMicroseconds(),
    238                            java::sdk::MediaCodec::BUFFER_FLAG_SYNC_FRAME);
    239    } else {
    240      mInputBufferInfo->Set(0, AssertedCast<int32_t>(mYUVBuffer->Length()),
    241                            s->mTime.ToMicroseconds(), 0);
    242    }
    243 
    244    mJavaEncoder->Input(buffer, mInputBufferInfo, nullptr);
    245  }
    246 
    247  if (mEncodedData.Length() > 0) {
    248    EncodedData pending = std::move(mEncodedData);
    249    return EncodePromise::CreateAndResolve(std::move(pending), __func__);
    250  }
    251  return EncodePromise::CreateAndResolve(EncodedData(), __func__);
    252 }
    253 
    254 class AutoRelease final {
    255 public:
    256  AutoRelease(java::CodecProxy::Param aEncoder, java::Sample::Param aSample)
    257      : mEncoder(aEncoder), mSample(aSample) {}
    258 
    259  ~AutoRelease() { mEncoder->ReleaseOutput(mSample, false); }
    260 
    261 private:
    262  java::CodecProxy::GlobalRef mEncoder;
    263  java::Sample::GlobalRef mSample;
    264 };
    265 
    266 static bool IsAVCC(EncoderConfig::CodecSpecific& aCodecSpecific) {
    267  return aCodecSpecific.is<H264Specific>() &&
    268         aCodecSpecific.as<H264Specific>().mFormat == H264BitStreamFormat::AVC;
    269 }
    270 
    271 static RefPtr<MediaByteBuffer> ExtractCodecConfig(
    272    java::SampleBuffer::Param aBuffer, const int32_t aOffset,
    273    const int32_t aSize, const bool aAsAVCC) {
    274  auto config = MakeRefPtr<MediaByteBuffer>(aSize);
    275  config->SetLength(aSize);
    276  jni::ByteBuffer::LocalRef dest =
    277      jni::ByteBuffer::New(config->Elements(), aSize);
    278  aBuffer->WriteToByteBuffer(dest, aOffset, aSize);
    279  if (!aAsAVCC) {
    280    return config;
    281  }
    282  return AnnexB::ExtractExtraDataForAVCC(*config);
    283 }
    284 
    285 void AndroidDataEncoder::ProcessOutput(
    286    java::Sample::GlobalRef&& aSample,
    287    java::SampleBuffer::GlobalRef&& aBuffer) {
    288  if (!mTaskQueue->IsCurrentThreadIn()) {
    289    nsresult rv =
    290        mTaskQueue->Dispatch(NewRunnableMethod<java::Sample::GlobalRef&&,
    291                                               java::SampleBuffer::GlobalRef&&>(
    292            "AndroidDataEncoder::ProcessOutput", this,
    293            &AndroidDataEncoder::ProcessOutput, std::move(aSample),
    294            std::move(aBuffer)));
    295    MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
    296    (void)rv;
    297    return;
    298  }
    299  AssertOnTaskQueue();
    300 
    301  if (!mJavaEncoder) {
    302    return;
    303  }
    304 
    305  AutoRelease releaseSample(mJavaEncoder, aSample);
    306 
    307  java::sdk::MediaCodec::BufferInfo::LocalRef info = aSample->Info();
    308  MOZ_ASSERT(info);
    309 
    310  int32_t flags;
    311  bool ok = NS_SUCCEEDED(info->Flags(&flags));
    312  bool isEOS = !!(flags & java::sdk::MediaCodec::BUFFER_FLAG_END_OF_STREAM);
    313 
    314  int32_t offset;
    315  ok &= NS_SUCCEEDED(info->Offset(&offset));
    316 
    317  int32_t size;
    318  ok &= NS_SUCCEEDED(info->Size(&size));
    319 
    320  int64_t presentationTimeUs;
    321  ok &= NS_SUCCEEDED(info->PresentationTimeUs(&presentationTimeUs));
    322 
    323  if (!ok) {
    324    return;
    325  }
    326 
    327  if (size > 0) {
    328    if ((flags & java::sdk::MediaCodec::BUFFER_FLAG_CODEC_CONFIG) != 0) {
    329      auto configData = ExtractCodecConfig(aBuffer, offset, size,
    330                                           IsAVCC(mConfig.mCodecSpecific));
    331      if (configData) {
    332        mConfigData = std::move(configData);
    333      } else {
    334        MOZ_ASSERT_UNREACHABLE("Bad config data!");
    335        Error(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
    336                          "fail to extract codec config"_ns));
    337      }
    338      return;
    339    }
    340    RefPtr<MediaRawData> output;
    341    if (mConfig.mCodec == CodecType::H264) {
    342      output = GetOutputDataH264(
    343          aBuffer, offset, size,
    344          !!(flags & java::sdk::MediaCodec::BUFFER_FLAG_KEY_FRAME));
    345    } else {
    346      output = GetOutputData(
    347          aBuffer, offset, size,
    348          !!(flags & java::sdk::MediaCodec::BUFFER_FLAG_KEY_FRAME));
    349    }
    350    output->mEOS = isEOS;
    351    output->mTime = media::TimeUnit::FromMicroseconds(presentationTimeUs);
    352    output->mDuration = mInputSampleDuration;
    353    mEncodedData.AppendElement(std::move(output));
    354  }
    355 
    356  if (isEOS) {
    357    mDrainState = DrainState::DRAINED;
    358  }
    359  if (!mDrainPromise.IsEmpty()) {
    360    EncodedData pending = std::move(mEncodedData);
    361    mDrainPromise.Resolve(std::move(pending), __func__);
    362  }
    363 }
    364 
    365 RefPtr<MediaRawData> AndroidDataEncoder::GetOutputData(
    366    java::SampleBuffer::Param aBuffer, const int32_t aOffset,
    367    const int32_t aSize, const bool aIsKeyFrame) {
    368  // Copy frame data from Java buffer.
    369  auto output = MakeRefPtr<MediaRawData>();
    370  UniquePtr<MediaRawDataWriter> writer(output->CreateWriter());
    371  if (!writer->SetSize(aSize)) {
    372    AND_ENC_LOGE("fail to allocate output buffer");
    373    return nullptr;
    374  }
    375 
    376  jni::ByteBuffer::LocalRef buf = jni::ByteBuffer::New(writer->Data(), aSize);
    377  aBuffer->WriteToByteBuffer(buf, aOffset, aSize);
    378  output->mKeyframe = aIsKeyFrame;
    379 
    380  return output;
    381 }
    382 
    383 // AVC/H.264 frame can be in avcC or Annex B and needs extra conversion steps.
    384 RefPtr<MediaRawData> AndroidDataEncoder::GetOutputDataH264(
    385    java::SampleBuffer::Param aBuffer, const int32_t aOffset,
    386    const int32_t aSize, const bool aIsKeyFrame) {
    387  auto output = MakeRefPtr<MediaRawData>();
    388 
    389  size_t prependSize = 0;
    390  RefPtr<MediaByteBuffer> avccHeader;
    391  bool asAVCC = IsAVCC(mConfig.mCodecSpecific);
    392  if (aIsKeyFrame && mConfigData) {
    393    if (asAVCC) {
    394      avccHeader = mConfigData;
    395    } else {
    396      prependSize = mConfigData->Length();
    397    }
    398  }
    399 
    400  UniquePtr<MediaRawDataWriter> writer(output->CreateWriter());
    401  if (!writer->SetSize(prependSize + aSize)) {
    402    AND_ENC_LOGE("fail to allocate output buffer");
    403    return nullptr;
    404  }
    405 
    406  if (prependSize > 0) {
    407    PodCopy(writer->Data(), mConfigData->Elements(), prependSize);
    408  }
    409 
    410  jni::ByteBuffer::LocalRef buf =
    411      jni::ByteBuffer::New(writer->Data() + prependSize, aSize);
    412  aBuffer->WriteToByteBuffer(buf, aOffset, aSize);
    413 
    414  if (asAVCC && !AnnexB::ConvertSampleToAVCC(output, avccHeader)) {
    415    AND_ENC_LOGE("fail to convert annex-b sample to AVCC");
    416    return nullptr;
    417  }
    418 
    419  output->mKeyframe = aIsKeyFrame;
    420 
    421  return output;
    422 }
    423 
    424 RefPtr<MediaDataEncoder::EncodePromise> AndroidDataEncoder::Drain() {
    425  return InvokeAsync(mTaskQueue, this, __func__,
    426                     &AndroidDataEncoder::ProcessDrain);
    427 }
    428 
    429 RefPtr<MediaDataEncoder::EncodePromise> AndroidDataEncoder::ProcessDrain() {
    430  AssertOnTaskQueue();
    431  MOZ_ASSERT(mJavaEncoder);
    432  MOZ_ASSERT(mDrainPromise.IsEmpty());
    433 
    434  REJECT_IF_ERROR();
    435 
    436  switch (mDrainState) {
    437    case DrainState::DRAINABLE:
    438      mInputBufferInfo->Set(0, 0, -1,
    439                            java::sdk::MediaCodec::BUFFER_FLAG_END_OF_STREAM);
    440      mJavaEncoder->Input(nullptr, mInputBufferInfo, nullptr);
    441      mDrainState = DrainState::DRAINING;
    442      [[fallthrough]];
    443    case DrainState::DRAINING:
    444      if (mEncodedData.IsEmpty()) {
    445        return mDrainPromise.Ensure(__func__);  // Pending promise.
    446      }
    447      [[fallthrough]];
    448    case DrainState::DRAINED:
    449      if (mEncodedData.Length() > 0) {
    450        EncodedData pending = std::move(mEncodedData);
    451        return EncodePromise::CreateAndResolve(std::move(pending), __func__);
    452      } else {
    453        return EncodePromise::CreateAndResolve(EncodedData(), __func__);
    454      }
    455  }
    456 }
    457 
    458 RefPtr<ShutdownPromise> AndroidDataEncoder::Shutdown() {
    459  return InvokeAsync(mTaskQueue, this, __func__,
    460                     &AndroidDataEncoder::ProcessShutdown);
    461 }
    462 
    463 RefPtr<ShutdownPromise> AndroidDataEncoder::ProcessShutdown() {
    464  AssertOnTaskQueue();
    465  if (mJavaEncoder) {
    466    mJavaEncoder->Release();
    467    mJavaEncoder = nullptr;
    468  }
    469 
    470  if (mJavaCallbacks) {
    471    JavaCallbacksSupport::GetNative(mJavaCallbacks)->Cancel();
    472    JavaCallbacksSupport::DisposeNative(mJavaCallbacks);
    473    mJavaCallbacks = nullptr;
    474  }
    475 
    476  mFormat = nullptr;
    477 
    478  return ShutdownPromise::CreateAndResolve(true, __func__);
    479 }
    480 
    481 RefPtr<GenericPromise> AndroidDataEncoder::SetBitrate(uint32_t aBitsPerSec) {
    482  RefPtr<AndroidDataEncoder> self(this);
    483  return InvokeAsync(mTaskQueue, __func__, [self, aBitsPerSec]() {
    484    self->mJavaEncoder->SetBitrate(AssertedCast<int>(aBitsPerSec));
    485    return GenericPromise::CreateAndResolve(true, __func__);
    486  });
    487 }
    488 
    489 void AndroidDataEncoder::Error(const MediaResult& aError) {
    490  if (!mTaskQueue->IsCurrentThreadIn()) {
    491    nsresult rv = mTaskQueue->Dispatch(NewRunnableMethod<MediaResult>(
    492        "AndroidDataEncoder::Error", this, &AndroidDataEncoder::Error, aError));
    493    MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
    494    (void)rv;
    495    return;
    496  }
    497  AssertOnTaskQueue();
    498 
    499  mError = Some(aError);
    500 }
    501 
    502 void AndroidDataEncoder::CallbacksSupport::HandleInput(int64_t aTimestamp,
    503                                                       bool aProcessed) {}
    504 
    505 void AndroidDataEncoder::CallbacksSupport::HandleOutput(
    506    java::Sample::Param aSample, java::SampleBuffer::Param aBuffer) {
    507  MutexAutoLock lock(mMutex);
    508  if (mEncoder) {
    509    mEncoder->ProcessOutput(aSample, aBuffer);
    510  }
    511 }
    512 
    513 void AndroidDataEncoder::CallbacksSupport::HandleOutputFormatChanged(
    514    java::sdk::MediaFormat::Param aFormat) {}
    515 
    516 void AndroidDataEncoder::CallbacksSupport::HandleError(
    517    const MediaResult& aError) {
    518  MutexAutoLock lock(mMutex);
    519  if (mEncoder) {
    520    mEncoder->Error(aError);
    521  }
    522 }
    523 
    524 }  // namespace mozilla
    525 
    526 #undef AND_ENC_LOG
    527 #undef AND_ENC_LOGE