tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

ScriptProcessorNode.cpp (19061B)


      1 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
      2 /* vim:set ts=2 sw=2 sts=2 et cindent: */
      3 /* This Source Code Form is subject to the terms of the Mozilla Public
      4 * License, v. 2.0. If a copy of the MPL was not distributed with this
      5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      6 
      7 #include "ScriptProcessorNode.h"
      8 
      9 #include <deque>
     10 
     11 #include "AudioBuffer.h"
     12 #include "AudioDestinationNode.h"
     13 #include "AudioNodeEngine.h"
     14 #include "AudioNodeTrack.h"
     15 #include "AudioProcessingEvent.h"
     16 #include "Tracing.h"
     17 #include "mozilla/Mutex.h"
     18 #include "mozilla/PodOperations.h"
     19 #include "mozilla/dom/ScriptProcessorNodeBinding.h"
     20 #include "mozilla/dom/ScriptSettings.h"
     21 #include "nsGlobalWindowInner.h"
     22 
     23 namespace mozilla::dom {
     24 
     25 // The maximum latency, in seconds, that we can live with before dropping
     26 // buffers.
     27 static const float MAX_LATENCY_S = 0.5;
     28 
     29 // This class manages a queue of output buffers shared between
     30 // the main thread and the Media Track Graph thread.
     31 class SharedBuffers final {
     32 private:
     33  class OutputQueue final {
     34   public:
     35    explicit OutputQueue(const char* aName) : mMutex(aName) {}
     36 
     37    size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
     38        MOZ_REQUIRES(mMutex) {
     39      mMutex.AssertCurrentThreadOwns();
     40 
     41      size_t amount = 0;
     42      for (size_t i = 0; i < mBufferList.size(); i++) {
     43        amount += mBufferList[i].SizeOfExcludingThis(aMallocSizeOf, false);
     44      }
     45 
     46      return amount;
     47    }
     48 
     49    Mutex& Lock() const MOZ_RETURN_CAPABILITY(mMutex) {
     50      return const_cast<OutputQueue*>(this)->mMutex;
     51    }
     52 
     53    size_t ReadyToConsume() const MOZ_REQUIRES(mMutex) {
     54      // Accessed on both main thread and media graph thread.
     55      mMutex.AssertCurrentThreadOwns();
     56      return mBufferList.size();
     57    }
     58 
     59    // Produce one buffer
     60    AudioChunk& Produce() MOZ_REQUIRES(mMutex) {
     61      mMutex.AssertCurrentThreadOwns();
     62      MOZ_ASSERT(NS_IsMainThread());
     63      mBufferList.push_back(AudioChunk());
     64      return mBufferList.back();
     65    }
     66 
     67    // Consumes one buffer.
     68    AudioChunk Consume() MOZ_REQUIRES(mMutex) {
     69      mMutex.AssertCurrentThreadOwns();
     70      MOZ_ASSERT(!NS_IsMainThread());
     71      MOZ_ASSERT(ReadyToConsume() > 0);
     72      AudioChunk front = mBufferList.front();
     73      mBufferList.pop_front();
     74      return front;
     75    }
     76 
     77    // Empties the buffer queue.
     78    void Clear() MOZ_REQUIRES(mMutex) {
     79      mMutex.AssertCurrentThreadOwns();
     80      mBufferList.clear();
     81    }
     82 
     83   private:
     84    typedef std::deque<AudioChunk> BufferList;
     85 
     86    // Synchronizes access to mBufferList.  Note that it's the responsibility
     87    // of the callers to perform the required locking, and we assert that every
     88    // time we access mBufferList.
     89    Mutex mMutex MOZ_UNANNOTATED;
     90    // The list representing the queue.
     91    BufferList mBufferList;
     92  };
     93 
     94 public:
     95  explicit SharedBuffers(float aSampleRate)
     96      : mOutputQueue("SharedBuffers::outputQueue"),
     97        mDelaySoFar(TRACK_TIME_MAX),
     98        mSampleRate(aSampleRate),
     99        mLatency(0.0),
    100        mDroppingBuffers(false) {}
    101 
    102  size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const {
    103    size_t amount = aMallocSizeOf(this);
    104 
    105    {
    106      MutexAutoLock lock(mOutputQueue.Lock());
    107      amount += mOutputQueue.SizeOfExcludingThis(aMallocSizeOf);
    108    }
    109 
    110    return amount;
    111  }
    112 
    113  // main thread
    114 
    115  // NotifyNodeIsConnected() may be called even when the state has not
    116  // changed.
    117  void NotifyNodeIsConnected(bool aIsConnected) {
    118    MOZ_ASSERT(NS_IsMainThread());
    119    if (!aIsConnected) {
    120      // Reset main thread state for FinishProducingOutputBuffer().
    121      mLatency = 0.0f;
    122      mLastEventTime = TimeStamp();
    123      mDroppingBuffers = false;
    124      // Don't flush the output buffer here because the graph thread may be
    125      // using it now.  The graph thread will flush when it knows it is
    126      // disconnected.
    127    }
    128    mNodeIsConnected = aIsConnected;
    129  }
    130 
    131  void FinishProducingOutputBuffer(const AudioChunk& aBuffer) {
    132    MOZ_ASSERT(NS_IsMainThread());
    133 
    134    if (!mNodeIsConnected) {
    135      // The output buffer is not used, and mLastEventTime will not be
    136      // initialized until the node is re-connected.
    137      return;
    138    }
    139 
    140    TimeStamp now = TimeStamp::Now();
    141 
    142    if (mLastEventTime.IsNull()) {
    143      mLastEventTime = now;
    144    } else {
    145      // When main thread blocking has built up enough so
    146      // |mLatency > MAX_LATENCY_S|, frame dropping starts. It continues until
    147      // the output buffer is completely empty, at which point the accumulated
    148      // latency is also reset to 0.
    149      // It could happen that the output queue becomes empty before the input
    150      // node has fully caught up. In this case there will be events where
    151      // |(now - mLastEventTime)| is very short, making mLatency negative.
    152      // As this happens and the size of |mLatency| becomes greater than
    153      // MAX_LATENCY_S, frame dropping starts again to maintain an as short
    154      // output queue as possible.
    155      float latency = (now - mLastEventTime).ToSeconds();
    156      float bufferDuration = aBuffer.mDuration / mSampleRate;
    157      mLatency += latency - bufferDuration;
    158      mLastEventTime = now;
    159      if (fabs(mLatency) > MAX_LATENCY_S) {
    160        mDroppingBuffers = true;
    161      }
    162    }
    163 
    164    MutexAutoLock lock(mOutputQueue.Lock());
    165    if (mDroppingBuffers) {
    166      if (mOutputQueue.ReadyToConsume()) {
    167        return;
    168      }
    169      mDroppingBuffers = false;
    170      mLatency = 0;
    171    }
    172 
    173    for (uint32_t offset = 0; offset < aBuffer.mDuration;
    174         offset += WEBAUDIO_BLOCK_SIZE) {
    175      AudioChunk& chunk = mOutputQueue.Produce();
    176      chunk = aBuffer;
    177      chunk.SliceTo(offset, offset + WEBAUDIO_BLOCK_SIZE);
    178    }
    179  }
    180 
    181  // graph thread
    182 
    183  AudioChunk GetOutputBuffer() {
    184    MOZ_ASSERT(!NS_IsMainThread());
    185    AudioChunk buffer;
    186 
    187    {
    188      MutexAutoLock lock(mOutputQueue.Lock());
    189      if (mOutputQueue.ReadyToConsume() > 0) {
    190        if (mDelaySoFar == TRACK_TIME_MAX) {
    191          mDelaySoFar = 0;
    192        }
    193        buffer = mOutputQueue.Consume();
    194      } else {
    195        // If we're out of buffers to consume, just output silence
    196        buffer.SetNull(WEBAUDIO_BLOCK_SIZE);
    197        if (mDelaySoFar != TRACK_TIME_MAX) {
    198          // Remember the delay that we just hit
    199          mDelaySoFar += WEBAUDIO_BLOCK_SIZE;
    200        }
    201      }
    202    }
    203 
    204    return buffer;
    205  }
    206 
    207  TrackTime DelaySoFar() const {
    208    MOZ_ASSERT(!NS_IsMainThread());
    209    return mDelaySoFar == TRACK_TIME_MAX ? 0 : mDelaySoFar;
    210  }
    211 
    212  void Flush() {
    213    MOZ_ASSERT(!NS_IsMainThread());
    214    mDelaySoFar = TRACK_TIME_MAX;
    215    {
    216      MutexAutoLock lock(mOutputQueue.Lock());
    217      mOutputQueue.Clear();
    218    }
    219  }
    220 
    221 private:
    222  OutputQueue mOutputQueue;
    223  // How much delay we've seen so far.  This measures the amount of delay
    224  // caused by the main thread lagging behind in producing output buffers.
    225  // TRACK_TIME_MAX means that we have not received our first buffer yet.
    226  // Graph thread only.
    227  TrackTime mDelaySoFar;
    228  // The samplerate of the context.
    229  const float mSampleRate;
    230  // The remaining members are main thread only.
    231  // This is the latency caused by the buffering. If this grows too high, we
    232  // will drop buffers until it is acceptable.
    233  float mLatency;
    234  // This is the time at which we last produced a buffer, to detect if the main
    235  // thread has been blocked.
    236  TimeStamp mLastEventTime;
    237  // True if we should be dropping buffers.
    238  bool mDroppingBuffers;
    239  // True iff the AudioNode has at least one input or output connected.
    240  bool mNodeIsConnected;
    241 };
    242 
    243 class ScriptProcessorNodeEngine final : public AudioNodeEngine {
    244 public:
    245  ScriptProcessorNodeEngine(ScriptProcessorNode* aNode,
    246                            AudioDestinationNode* aDestination,
    247                            uint32_t aBufferSize,
    248                            uint32_t aNumberOfInputChannels)
    249      : AudioNodeEngine(aNode),
    250        mDestination(aDestination->Track()),
    251        mSharedBuffers(new SharedBuffers(mDestination->mSampleRate)),
    252        mBufferSize(aBufferSize),
    253        mInputChannelCount(aNumberOfInputChannels),
    254        mInputWriteIndex(0) {}
    255 
    256  SharedBuffers* GetSharedBuffers() const { return mSharedBuffers.get(); }
    257 
    258  enum {
    259    IS_CONNECTED,
    260  };
    261 
    262  void SetInt32Parameter(uint32_t aIndex, int32_t aParam) override {
    263    switch (aIndex) {
    264      case IS_CONNECTED:
    265        mIsConnected = aParam;
    266        break;
    267      default:
    268        NS_ERROR("Bad Int32Parameter");
    269    }  // End index switch.
    270  }
    271 
    272  void ProcessBlock(AudioNodeTrack* aTrack, GraphTime aFrom,
    273                    const AudioBlock& aInput, AudioBlock* aOutput,
    274                    bool* aFinished) override {
    275    TRACE("ScriptProcessorNodeEngine::ProcessBlock");
    276 
    277    // This node is not connected to anything. Per spec, we don't fire the
    278    // onaudioprocess event. We also want to clear out the input and output
    279    // buffer queue, and output a null buffer.
    280    if (!mIsConnected) {
    281      aOutput->SetNull(WEBAUDIO_BLOCK_SIZE);
    282      mSharedBuffers->Flush();
    283      mInputWriteIndex = 0;
    284      return;
    285    }
    286 
    287    // The input buffer is allocated lazily when non-null input is received.
    288    if (!aInput.IsNull() && !mInputBuffer) {
    289      mInputBuffer = ThreadSharedFloatArrayBufferList::Create(
    290          mInputChannelCount, mBufferSize, fallible);
    291      if (mInputBuffer && mInputWriteIndex) {
    292        // Zero leading for null chunks that were skipped.
    293        for (uint32_t i = 0; i < mInputChannelCount; ++i) {
    294          float* channelData = mInputBuffer->GetDataForWrite(i);
    295          PodZero(channelData, mInputWriteIndex);
    296        }
    297      }
    298    }
    299 
    300    // First, record our input buffer, if its allocation succeeded.
    301    uint32_t inputChannelCount = mInputBuffer ? mInputBuffer->GetChannels() : 0;
    302    for (uint32_t i = 0; i < inputChannelCount; ++i) {
    303      float* writeData = mInputBuffer->GetDataForWrite(i) + mInputWriteIndex;
    304      if (aInput.IsNull()) {
    305        PodZero(writeData, aInput.GetDuration());
    306      } else {
    307        MOZ_ASSERT(aInput.GetDuration() == WEBAUDIO_BLOCK_SIZE, "sanity check");
    308        MOZ_ASSERT(aInput.ChannelCount() == inputChannelCount);
    309        AudioBlockCopyChannelWithScale(
    310            static_cast<const float*>(aInput.mChannelData[i]), aInput.mVolume,
    311            writeData);
    312      }
    313    }
    314    mInputWriteIndex += aInput.GetDuration();
    315 
    316    // Now, see if we have data to output
    317    // Note that we need to do this before sending the buffer to the main
    318    // thread so that our delay time is updated.
    319    *aOutput = mSharedBuffers->GetOutputBuffer();
    320 
    321    if (mInputWriteIndex >= mBufferSize) {
    322      SendBuffersToMainThread(aTrack, aFrom);
    323      mInputWriteIndex -= mBufferSize;
    324    }
    325  }
    326 
    327  bool IsActive() const override {
    328    // Could return false when !mIsConnected after all output chunks produced
    329    // by main thread events calling
    330    // SharedBuffers::FinishProducingOutputBuffer() have been processed.
    331    return true;
    332  }
    333 
    334  size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const override {
    335    // Not owned:
    336    // - mDestination (probably)
    337    size_t amount = AudioNodeEngine::SizeOfExcludingThis(aMallocSizeOf);
    338    amount += mSharedBuffers->SizeOfIncludingThis(aMallocSizeOf);
    339    if (mInputBuffer) {
    340      amount += mInputBuffer->SizeOfIncludingThis(aMallocSizeOf);
    341    }
    342 
    343    return amount;
    344  }
    345 
    346  size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const override {
    347    return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
    348  }
    349 
    350 private:
    351  void SendBuffersToMainThread(AudioNodeTrack* aTrack, GraphTime aFrom) {
    352    MOZ_ASSERT(!NS_IsMainThread());
    353 
    354    // we now have a full input buffer ready to be sent to the main thread.
    355    TrackTime playbackTick = mDestination->GraphTimeToTrackTime(aFrom);
    356    // Add the duration of the current sample
    357    playbackTick += WEBAUDIO_BLOCK_SIZE;
    358    // Add the delay caused by the main thread
    359    playbackTick += mSharedBuffers->DelaySoFar();
    360    // Compute the playback time in the coordinate system of the destination
    361    double playbackTime = mDestination->TrackTimeToSeconds(playbackTick);
    362 
    363    class Command final : public Runnable {
    364     public:
    365      Command(AudioNodeTrack* aTrack,
    366              already_AddRefed<ThreadSharedFloatArrayBufferList> aInputBuffer,
    367              double aPlaybackTime)
    368          : mozilla::Runnable("Command"),
    369            mTrack(aTrack),
    370            mInputBuffer(aInputBuffer),
    371            mPlaybackTime(aPlaybackTime) {}
    372 
    373      NS_IMETHOD Run() override {
    374        auto engine = static_cast<ScriptProcessorNodeEngine*>(mTrack->Engine());
    375        AudioChunk output;
    376        output.SetNull(engine->mBufferSize);
    377        {
    378          auto node =
    379              static_cast<ScriptProcessorNode*>(engine->NodeMainThread());
    380          if (!node) {
    381            return NS_OK;
    382          }
    383 
    384          if (node->HasListenersFor(nsGkAtoms::onaudioprocess)) {
    385            DispatchAudioProcessEvent(node, &output);
    386          }
    387          // The node may have been destroyed during event dispatch.
    388        }
    389 
    390        // Append it to our output buffer queue
    391        engine->GetSharedBuffers()->FinishProducingOutputBuffer(output);
    392 
    393        return NS_OK;
    394      }
    395 
    396      // Sets up |output| iff buffers are set in event handlers.
    397      void DispatchAudioProcessEvent(ScriptProcessorNode* aNode,
    398                                     AudioChunk* aOutput) {
    399        AudioContext* context = aNode->Context();
    400        if (!context) {
    401          return;
    402        }
    403 
    404        AutoJSAPI jsapi;
    405        if (NS_WARN_IF(!jsapi.Init(aNode->GetOwnerWindow()))) {
    406          return;
    407        }
    408        JSContext* cx = jsapi.cx();
    409        uint32_t inputChannelCount = aNode->ChannelCount();
    410 
    411        // Create the input buffer
    412        RefPtr<AudioBuffer> inputBuffer;
    413        if (mInputBuffer) {
    414          ErrorResult rv;
    415          inputBuffer = AudioBuffer::Create(
    416              context->GetOwnerWindow(), inputChannelCount, aNode->BufferSize(),
    417              context->SampleRate(), mInputBuffer.forget(), rv);
    418          if (rv.Failed()) {
    419            rv.SuppressException();
    420            return;
    421          }
    422        }
    423 
    424        // Ask content to produce data in the output buffer
    425        // Note that we always avoid creating the output buffer here, and we try
    426        // to avoid creating the input buffer as well.  The AudioProcessingEvent
    427        // class knows how to lazily create them if needed once the script tries
    428        // to access them.  Otherwise, we may be able to get away without
    429        // creating them!
    430        RefPtr<AudioProcessingEvent> event =
    431            new AudioProcessingEvent(aNode, nullptr, nullptr);
    432        event->InitEvent(inputBuffer, inputChannelCount, mPlaybackTime);
    433        aNode->DispatchTrustedEvent(event);
    434 
    435        // Steal the output buffers if they have been set.
    436        // Don't create a buffer if it hasn't been used to return output;
    437        // FinishProducingOutputBuffer() will optimize output = null.
    438        // GetThreadSharedChannelsForRate() may also return null after OOM.
    439        if (event->HasOutputBuffer()) {
    440          ErrorResult rv;
    441          AudioBuffer* buffer = event->GetOutputBuffer(rv);
    442          // HasOutputBuffer() returning true means that GetOutputBuffer()
    443          // will not fail.
    444          MOZ_ASSERT(!rv.Failed());
    445          *aOutput = buffer->GetThreadSharedChannelsForRate(cx);
    446          MOZ_ASSERT(aOutput->IsNull() ||
    447                         aOutput->mBufferFormat == AUDIO_FORMAT_FLOAT32,
    448                     "AudioBuffers initialized from JS have float data");
    449        }
    450      }
    451 
    452     private:
    453      RefPtr<AudioNodeTrack> mTrack;
    454      RefPtr<ThreadSharedFloatArrayBufferList> mInputBuffer;
    455      double mPlaybackTime;
    456    };
    457 
    458    RefPtr<Command> command =
    459        new Command(aTrack, mInputBuffer.forget(), playbackTime);
    460    AbstractThread::MainThread()->Dispatch(command.forget());
    461  }
    462 
    463  friend class ScriptProcessorNode;
    464 
    465  RefPtr<AudioNodeTrack> mDestination;
    466  UniquePtr<SharedBuffers> mSharedBuffers;
    467  RefPtr<ThreadSharedFloatArrayBufferList> mInputBuffer;
    468  const uint32_t mBufferSize;
    469  const uint32_t mInputChannelCount;
    470  // The write index into the current input buffer
    471  uint32_t mInputWriteIndex;
    472  bool mIsConnected = false;
    473 };
    474 
    475 ScriptProcessorNode::ScriptProcessorNode(AudioContext* aContext,
    476                                         uint32_t aBufferSize,
    477                                         uint32_t aNumberOfInputChannels,
    478                                         uint32_t aNumberOfOutputChannels)
    479    : AudioNode(aContext, aNumberOfInputChannels,
    480                mozilla::dom::ChannelCountMode::Explicit,
    481                mozilla::dom::ChannelInterpretation::Speakers),
    482      mBufferSize(aBufferSize ? aBufferSize
    483                              :  // respect what the web developer requested
    484                      4096)      // choose our own buffer size -- 4KB for now
    485      ,
    486      mNumberOfOutputChannels(aNumberOfOutputChannels) {
    487  MOZ_ASSERT(BufferSize() % WEBAUDIO_BLOCK_SIZE == 0, "Invalid buffer size");
    488  ScriptProcessorNodeEngine* engine = new ScriptProcessorNodeEngine(
    489      this, aContext->Destination(), BufferSize(), aNumberOfInputChannels);
    490  mTrack = AudioNodeTrack::Create(
    491      aContext, engine, AudioNodeTrack::NO_TRACK_FLAGS, aContext->Graph());
    492 }
    493 
    494 ScriptProcessorNode::~ScriptProcessorNode() = default;
    495 
    496 size_t ScriptProcessorNode::SizeOfExcludingThis(
    497    MallocSizeOf aMallocSizeOf) const {
    498  size_t amount = AudioNode::SizeOfExcludingThis(aMallocSizeOf);
    499  return amount;
    500 }
    501 
    502 size_t ScriptProcessorNode::SizeOfIncludingThis(
    503    MallocSizeOf aMallocSizeOf) const {
    504  return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
    505 }
    506 
    507 void ScriptProcessorNode::EventListenerAdded(nsAtom* aType) {
    508  AudioNode::EventListenerAdded(aType);
    509  if (aType == nsGkAtoms::onaudioprocess) {
    510    UpdateConnectedStatus();
    511  }
    512 }
    513 
    514 void ScriptProcessorNode::EventListenerRemoved(nsAtom* aType) {
    515  AudioNode::EventListenerRemoved(aType);
    516  if (aType == nsGkAtoms::onaudioprocess && mTrack) {
    517    UpdateConnectedStatus();
    518  }
    519 }
    520 
    521 JSObject* ScriptProcessorNode::WrapObject(JSContext* aCx,
    522                                          JS::Handle<JSObject*> aGivenProto) {
    523  return ScriptProcessorNode_Binding::Wrap(aCx, this, aGivenProto);
    524 }
    525 
    526 void ScriptProcessorNode::UpdateConnectedStatus() {
    527  bool isConnected =
    528      mHasPhantomInput || !(OutputNodes().IsEmpty() &&
    529                            OutputParams().IsEmpty() && InputNodes().IsEmpty());
    530 
    531  // Events are queued even when there is no listener because a listener
    532  // may be added while events are in the queue.
    533  SendInt32ParameterToTrack(ScriptProcessorNodeEngine::IS_CONNECTED,
    534                            isConnected);
    535 
    536  if (isConnected && HasListenersFor(nsGkAtoms::onaudioprocess)) {
    537    MarkActive();
    538  } else {
    539    MarkInactive();
    540  }
    541 
    542  // MarkInactive above might have released this node, check if it has a track.
    543  if (!mTrack) {
    544    return;
    545  }
    546 
    547  auto engine = static_cast<ScriptProcessorNodeEngine*>(mTrack->Engine());
    548  engine->GetSharedBuffers()->NotifyNodeIsConnected(isConnected);
    549 }
    550 
    551 }  // namespace mozilla::dom