tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

test_smart_assist_engine.js (7549B)


      1 /* Any copyright is dedicated to the Public Domain.
      2 * http://creativecommons.org/publicdomain/zero/1.0/ */
      3 
      4 const { SmartAssistEngine } = ChromeUtils.importESModule(
      5  "moz-src:///browser/components/genai/SmartAssistEngine.sys.mjs"
      6 );
      7 
      8 const { sinon } = ChromeUtils.importESModule(
      9  "resource://testing-common/Sinon.sys.mjs"
     10 );
     11 
     12 // Prefs
     13 const PREF_API_KEY = "browser.ml.smartAssist.apiKey";
     14 const PREF_ENDPOINT = "browser.ml.smartAssist.endpoint";
     15 const PREF_MODEL = "browser.ml.smartAssist.model";
     16 
     17 // Clean prefs after all tests
     18 registerCleanupFunction(() => {
     19  for (let pref of [PREF_API_KEY, PREF_ENDPOINT, PREF_MODEL]) {
     20    if (Services.prefs.prefHasUserValue(pref)) {
     21      Services.prefs.clearUserPref(pref);
     22    }
     23  }
     24 });
     25 
     26 add_task(async function test_createOpenAIEngine_uses_prefs_and_static_fields() {
     27  Services.prefs.setStringPref(PREF_API_KEY, "test-key-123");
     28  Services.prefs.setStringPref(PREF_ENDPOINT, "https://example.test/v1");
     29  Services.prefs.setStringPref(PREF_MODEL, "gpt-fake");
     30 
     31  const sb = sinon.createSandbox();
     32  try {
     33    const fakeEngine = {
     34      runWithGenerator() {
     35        throw new Error("not used");
     36      },
     37    };
     38    const stub = sb
     39      .stub(SmartAssistEngine, "_createEngine")
     40      .resolves(fakeEngine);
     41 
     42    const engine = await SmartAssistEngine.createOpenAIEngine();
     43 
     44    Assert.strictEqual(
     45      engine,
     46      fakeEngine,
     47      "Should return engine from _createEngine"
     48    );
     49    Assert.ok(stub.calledOnce, "_createEngine should be called once");
     50 
     51    const opts = stub.firstCall.args[0];
     52    Assert.equal(opts.apiKey, "test-key-123", "apiKey should come from pref");
     53    Assert.equal(
     54      opts.baseURL,
     55      "https://example.test/v1",
     56      "baseURL should come from pref"
     57    );
     58    Assert.equal(opts.modelId, "gpt-fake", "modelId should come from pref");
     59  } finally {
     60    sb.restore();
     61  }
     62 });
     63 
     64 add_task(async function test_fetchWithHistory_streams_and_forwards_args() {
     65  const sb = sinon.createSandbox();
     66  try {
     67    let capturedArgs = null;
     68    let capturedStreamOption = null;
     69 
     70    // Fake async generator that yields three text chunks and one empty (ignored)
     71    const fakeEngine = {
     72      runWithGenerator({ streamOptions, args }) {
     73        capturedArgs = args;
     74        capturedStreamOption = streamOptions;
     75        async function* gen() {
     76          yield { text: "Hello" };
     77          yield { text: " from" };
     78          yield { text: " fake engine!" };
     79          yield {}; // ignored by SmartAssistEngine
     80        }
     81        return gen();
     82      },
     83    };
     84 
     85    sb.stub(SmartAssistEngine, "_createEngine").resolves(fakeEngine);
     86    sb.stub(SmartAssistEngine, "_getFxAccountToken").resolves("mock_token");
     87 
     88    const messages = [
     89      { role: "system", content: "You are helpful" },
     90      { role: "user", content: "Hi there" },
     91    ];
     92 
     93    // Collect streamed output
     94    let acc = "";
     95    for await (const t of SmartAssistEngine.fetchWithHistory(messages)) {
     96      acc += t;
     97    }
     98 
     99    Assert.equal(
    100      acc,
    101      "Hello from fake engine!",
    102      "Should concatenate streamed chunks"
    103    );
    104    Assert.deepEqual(
    105      capturedArgs,
    106      messages,
    107      "Should forward messages as args to runWithGenerator()"
    108    );
    109    Assert.deepEqual(
    110      capturedStreamOption.enabled,
    111      true,
    112      "Should enable streaming in runWithGenerator()"
    113    );
    114  } finally {
    115    sb.restore();
    116  }
    117 });
    118 
    119 add_task(
    120  async function test_fetchWithHistory_propagates_engine_creation_rejection() {
    121    const sb = sinon.createSandbox();
    122    try {
    123      const err = new Error("creation failed (generic)");
    124      const stub = sb.stub(SmartAssistEngine, "_createEngine").rejects(err);
    125      sb.stub(SmartAssistEngine, "_getFxAccountToken").resolves("mock_token");
    126      const messages = [{ role: "user", content: "Hi" }];
    127 
    128      // Must CONSUME the async generator to trigger the rejection
    129      const consume = async () => {
    130        for await (const _message of SmartAssistEngine.fetchWithHistory(
    131          messages
    132        )) {
    133          void _message;
    134        }
    135      };
    136 
    137      await Assert.rejects(
    138        consume(),
    139        e => e === err,
    140        "Should propagate the same error thrown by _createEngine"
    141      );
    142      Assert.ok(stub.calledOnce, "_createEngine should be called once");
    143    } finally {
    144      sb.restore();
    145    }
    146  }
    147 );
    148 
    149 add_task(async function test_fetchWithHistory_propagates_stream_error() {
    150  const sb = sinon.createSandbox();
    151  try {
    152    const fakeEngine = {
    153      runWithGenerator() {
    154        async function* gen() {
    155          yield { text: "partial" };
    156          throw new Error("engine stream boom");
    157        }
    158        return gen();
    159      },
    160    };
    161    sb.stub(SmartAssistEngine, "_createEngine").resolves(fakeEngine);
    162    sb.stub(SmartAssistEngine, "_getFxAccountToken").resolves("mock_token");
    163 
    164    const consume = async () => {
    165      let acc = "";
    166      for await (const t of SmartAssistEngine.fetchWithHistory([
    167        { role: "user", content: "x" },
    168      ])) {
    169        acc += t;
    170      }
    171      return acc;
    172    };
    173 
    174    await Assert.rejects(
    175      consume(),
    176      e => /engine stream boom/.test(e.message),
    177      "Should propagate errors thrown during streaming"
    178    );
    179  } finally {
    180    sb.restore();
    181  }
    182 });
    183 
    184 add_task(async function test_getPromptIntent_basic() {
    185  const sb = sinon.createSandbox();
    186  const cases = [
    187    { prompt: "please search for news on firefox", expected: "search" },
    188    { prompt: "Can you FIND me the docs for PageAssist?", expected: "search" }, // case-insensitive
    189    { prompt: "look up the best pizza in SF", expected: "search" },
    190    { prompt: "hello there, how are you?", expected: "chat" },
    191    { prompt: "tell me a joke", expected: "chat" },
    192  ];
    193 
    194  const fakeEngine = {
    195    run({ args: [[query]] }) {
    196      const searchKeywords = [
    197        "search",
    198        "find",
    199        "look",
    200        "query",
    201        "locate",
    202        "explore",
    203      ];
    204      const formattedPrompt = query.toLowerCase();
    205      const isSearch = searchKeywords.some(keyword =>
    206        formattedPrompt.includes(keyword)
    207      );
    208 
    209      // Simulate model confidence scores
    210      if (isSearch) {
    211        return [
    212          { label: "search", score: 0.95 },
    213          { label: "chat", score: 0.05 },
    214        ];
    215      }
    216      return [
    217        { label: "chat", score: 0.95 },
    218        { label: "search", score: 0.05 },
    219      ];
    220    },
    221  };
    222 
    223  sb.stub(SmartAssistEngine, "_createEngine").resolves(fakeEngine);
    224 
    225  for (const { prompt, expected } of cases) {
    226    const intent = await SmartAssistEngine.getPromptIntent(prompt);
    227    Assert.equal(
    228      intent,
    229      expected,
    230      `getPromptIntent("${prompt}") should return "${expected}"`
    231    );
    232  }
    233 
    234  sb.restore();
    235 });
    236 
    237 add_task(async function test_preprocessQuery_removes_question_marks() {
    238  const sb = sinon.createSandbox();
    239 
    240  // Use a minimal fake SmartAssistEngine to test only preprocessing
    241  const engine = Object.create(SmartAssistEngine);
    242 
    243  const cases = [
    244    { input: "hello?", expected: "hello" },
    245    { input: "?prompt", expected: "prompt" },
    246    { input: "multiple???", expected: "multiple" },
    247    { input: "mid?dle", expected: "middle" },
    248    { input: "question? ", expected: "question" },
    249    { input: " no?  spaces? ", expected: "no  spaces" },
    250    { input: "???", expected: "" },
    251    { input: "clean input", expected: "clean input" },
    252  ];
    253 
    254  for (const { input, expected } of cases) {
    255    const result = engine._preprocessQuery(input);
    256    Assert.equal(
    257      result,
    258      expected,
    259      `Expected "${input}" to preprocess to "${expected}", got "${result}"`
    260    );
    261  }
    262 
    263  sb.restore();
    264 });