tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

ONNX.webidl (2924B)


      1 /* -*- Mode: IDL; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
      2 /* This Source Code Form is subject to the terms of the Mozilla Public
      3 * License, v. 2.0. If a copy of the MPL was not distributed with this file,
      4 * You can obtain one at http://mozilla.org/MPL/2.0/.
      5 */
      6 
      7 [Func="InferenceSession::InInferenceProcess", Exposed=(DedicatedWorker,Window)]
      8 interface Tensor {
      9  [Throws]
     10  constructor(UTF8String type,
     11              (ArrayBufferView or sequence<any>) data,
     12              sequence<long> dims);
     13 
     14  [Cached, Pure]
     15  attribute sequence<long> dims;
     16  readonly attribute UTF8String type;
     17  readonly attribute ArrayBufferView data;
     18  readonly attribute TensorDataLocation location;
     19  Promise<any> getData(optional boolean releaseData);
     20  undefined dispose();
     21 };
     22 
     23 // Tensor Data Location
     24 enum TensorDataLocation {
     25  "none",
     26  "cpu",
     27  "cpu-pinned",
     28  "texture",
     29  "gpu-buffer",
     30  "ml-tensor"
     31 };
     32 
     33 // Input/Output types
     34 typedef record<UTF8String, Tensor> InferenceSessionTensorMapType;
     35 typedef record<UTF8String, Tensor?> InferenceSessionNullableTensorMapType;
     36 typedef InferenceSessionTensorMapType InferenceSessionFeedsType;
     37 typedef (sequence<UTF8String> or InferenceSessionNullableTensorMapType) InferenceSessionFetchesType;
     38 typedef InferenceSessionTensorMapType InferenceSessionReturnType;
     39 
     40 dictionary InferenceSessionRunOptions {
     41  unsigned short logSeverityLevel = 0; // 0 - 4
     42  unsigned long logVerbosityLevel = 0;
     43  boolean terminate = true;
     44  UTF8String tag = "";
     45 };
     46 
     47 dictionary InferenceSessionSessionOptions {
     48  sequence<any> executionProviders;
     49  unsigned long intraOpNumThreads = 0;
     50  unsigned long interOpNumThreads = 0;
     51  record<UTF8String, unsigned long> freeDimensionOverrides;
     52  UTF8String graphOptimizationLevel = "all";
     53  boolean enableCpuMemArena = true;
     54  boolean enableMemPattern = true;
     55  UTF8String executionMode = "sequential";
     56  UTF8String optimizedModelFilePath = "";
     57  boolean enableProfiling = false;
     58  UTF8String profileFilePrefix = "";
     59  UTF8String logId = "";
     60  unsigned short logSeverityLevel = 4; // 0 - 4
     61  unsigned long logVerbosityLevel = 0;
     62  (TensorDataLocation or record<UTF8String, TensorDataLocation>) preferredOutputLocation;
     63  boolean enableGraphCapture = false;
     64  record<UTF8String, any> extra;
     65 };
     66 
     67 [Func="InferenceSession::InInferenceProcess", Exposed=(DedicatedWorker,Window)]
     68 interface InferenceSession {
     69  [NewObject]
     70  Promise<InferenceSessionReturnType> run(InferenceSessionFeedsType feeds, optional InferenceSessionRunOptions options = {});
     71  [NewObject] static Promise<InferenceSession> create((UTF8String or Uint8Array) uriOrBuffer, optional InferenceSessionSessionOptions options = {});
     72  [BinaryName=ReleaseSession]
     73  Promise<undefined> release();
     74  undefined startProfiling();
     75  undefined endProfiling();
     76  [Cached, Pure]
     77  readonly attribute sequence<UTF8String> inputNames;
     78  [Cached, Pure]
     79  readonly attribute sequence<UTF8String> outputNames;
     80 };