tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

test_Utils_RemoteSettings.js (10787B)


      1 /* This Source Code Form is subject to the terms of the Mozilla Public
      2 * License, v. 2.0. If a copy of the MPL was not distributed with this
      3 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      4 
      5 const { openAIEngine, MODEL_FEATURES } = ChromeUtils.importESModule(
      6  "moz-src:///browser/components/aiwindow/models/Utils.sys.mjs"
      7 );
      8 
      9 const { sinon } = ChromeUtils.importESModule(
     10  "resource://testing-common/Sinon.sys.mjs"
     11 );
     12 
     13 const PREF_API_KEY = "browser.aiwindow.apiKey";
     14 const PREF_ENDPOINT = "browser.aiwindow.endpoint";
     15 const PREF_MODEL = "browser.aiwindow.model";
     16 
     17 const API_KEY = "fake-key";
     18 const ENDPOINT = "https://api.fake-endpoint.com/v1";
     19 
     20 async function loadRemoteSettingsSnapshot() {
     21  const file = do_get_file("ai-window-prompts-remote-settings-snapshot.json");
     22  const data = await IOUtils.readUTF8(file.path);
     23  return JSON.parse(data);
     24 }
     25 
     26 let REAL_REMOTE_SETTINGS_SNAPSHOT;
     27 
     28 add_setup(async function () {
     29  REAL_REMOTE_SETTINGS_SNAPSHOT = await loadRemoteSettingsSnapshot();
     30 });
     31 
     32 registerCleanupFunction(() => {
     33  for (let pref of [PREF_API_KEY, PREF_ENDPOINT, PREF_MODEL]) {
     34    if (Services.prefs.prefHasUserValue(pref)) {
     35      Services.prefs.clearUserPref(pref);
     36    }
     37  }
     38 });
     39 
     40 add_task(async function test_loadConfig_basic_with_real_snapshot() {
     41  Services.prefs.setStringPref(PREF_API_KEY, API_KEY);
     42  Services.prefs.setStringPref(PREF_ENDPOINT, ENDPOINT);
     43 
     44  const sb = sinon.createSandbox();
     45  try {
     46    const fakeEngine = {
     47      runWithGenerator() {
     48        throw new Error("not used");
     49      },
     50    };
     51    sb.stub(openAIEngine, "_createEngine").resolves(fakeEngine);
     52 
     53    sb.stub(openAIEngine, "getRemoteClient").returns({
     54      get: sb.stub().resolves(REAL_REMOTE_SETTINGS_SNAPSHOT),
     55    });
     56 
     57    const engine = new openAIEngine();
     58 
     59    await engine.loadConfig(MODEL_FEATURES.CHAT);
     60 
     61    Assert.equal(
     62      engine.feature,
     63      MODEL_FEATURES.CHAT,
     64      "Feature should be set correctly"
     65    );
     66    Assert.ok(engine.model, "Model should be loaded from remote settings");
     67 
     68    const config = engine.getConfig(MODEL_FEATURES.CHAT);
     69    Assert.ok(config, "Config should be loaded");
     70    Assert.ok(config.prompts, "Prompts should be loaded from remote settings");
     71    Assert.ok(
     72      config.prompts.includes("browser assistant"),
     73      "Prompts should contain expected content"
     74    );
     75  } finally {
     76    sb.restore();
     77  }
     78 });
     79 
     80 add_task(async function test_loadConfig_with_user_pref_model() {
     81  Services.prefs.setStringPref(PREF_API_KEY, API_KEY);
     82  Services.prefs.setStringPref(PREF_ENDPOINT, ENDPOINT);
     83  Services.prefs.setStringPref(PREF_MODEL, "gpt-oss-120b");
     84 
     85  const sb = sinon.createSandbox();
     86  try {
     87    const fakeEngine = {
     88      runWithGenerator() {
     89        throw new Error("not used");
     90      },
     91    };
     92    sb.stub(openAIEngine, "_createEngine").resolves(fakeEngine);
     93 
     94    sb.stub(openAIEngine, "getRemoteClient").returns({
     95      get: sb.stub().resolves(REAL_REMOTE_SETTINGS_SNAPSHOT),
     96    });
     97 
     98    const engine = new openAIEngine();
     99 
    100    await engine.loadConfig(MODEL_FEATURES.CHAT);
    101 
    102    Assert.equal(
    103      engine.model,
    104      "gpt-oss-120b",
    105      "User pref model should filter to matching configs"
    106    );
    107    const config = engine.getConfig(MODEL_FEATURES.CHAT);
    108    Assert.equal(
    109      config.model,
    110      "gpt-oss-120b",
    111      "Selected config should be for user's preferred model"
    112    );
    113  } finally {
    114    sb.restore();
    115    Services.prefs.clearUserPref(PREF_MODEL);
    116  }
    117 });
    118 
    119 add_task(async function test_loadConfig_no_records() {
    120  Services.prefs.setStringPref(PREF_API_KEY, API_KEY);
    121  Services.prefs.setStringPref(PREF_ENDPOINT, ENDPOINT);
    122 
    123  const sb = sinon.createSandbox();
    124  try {
    125    const fakeEngine = {
    126      runWithGenerator() {
    127        throw new Error("not used");
    128      },
    129    };
    130    sb.stub(openAIEngine, "_createEngine").resolves(fakeEngine);
    131 
    132    sb.stub(openAIEngine, "getRemoteClient").returns({
    133      get: sb.stub().resolves([]),
    134    });
    135 
    136    const engine = new openAIEngine();
    137 
    138    await engine.loadConfig(MODEL_FEATURES.CHAT);
    139 
    140    Assert.equal(
    141      engine.model,
    142      "qwen3-235b-a22b-instruct-2507-maas",
    143      "Should fall back to default model when remote settings returns no records"
    144    );
    145    Assert.equal(
    146      engine.feature,
    147      MODEL_FEATURES.CHAT,
    148      "Should set feature when remote settings returns no records"
    149    );
    150  } finally {
    151    sb.restore();
    152  }
    153 });
    154 
    155 add_task(async function test_loadConfig_filters_by_major_version() {
    156  Services.prefs.setStringPref(PREF_API_KEY, API_KEY);
    157  Services.prefs.setStringPref(PREF_ENDPOINT, ENDPOINT);
    158 
    159  const sb = sinon.createSandbox();
    160  try {
    161    const fakeEngine = {
    162      runWithGenerator() {
    163        throw new Error("not used");
    164      },
    165    };
    166    sb.stub(openAIEngine, "_createEngine").resolves(fakeEngine);
    167 
    168    // Add a v2.0 record to test data
    169    const recordsWithV2 = [
    170      ...REAL_REMOTE_SETTINGS_SNAPSHOT,
    171      {
    172        model: "future-model",
    173        feature: "chat",
    174        prompts: "Future version prompt",
    175        version: "v2.0",
    176        is_default: true,
    177      },
    178    ];
    179 
    180    sb.stub(openAIEngine, "getRemoteClient").returns({
    181      get: sb.stub().resolves(recordsWithV2),
    182    });
    183 
    184    const engine = new openAIEngine();
    185    await engine.loadConfig(MODEL_FEATURES.CHAT);
    186 
    187    const config = engine.getConfig(MODEL_FEATURES.CHAT);
    188    // Should get v1.x, not v2.0
    189    Assert.ok(config.version.startsWith("v1."), "Should select v1.x, not v2.0");
    190  } finally {
    191    sb.restore();
    192  }
    193 });
    194 
    195 add_task(async function test_loadConfig_fallback_when_user_model_not_found() {
    196  Services.prefs.setStringPref(PREF_API_KEY, API_KEY);
    197  Services.prefs.setStringPref(PREF_ENDPOINT, ENDPOINT);
    198  Services.prefs.setStringPref(PREF_MODEL, "nonexistent-model");
    199 
    200  const sb = sinon.createSandbox();
    201  try {
    202    const fakeEngine = {
    203      runWithGenerator() {
    204        throw new Error("not used");
    205      },
    206    };
    207    sb.stub(openAIEngine, "_createEngine").resolves(fakeEngine);
    208 
    209    sb.stub(openAIEngine, "getRemoteClient").returns({
    210      get: sb.stub().resolves(REAL_REMOTE_SETTINGS_SNAPSHOT),
    211    });
    212 
    213    const engine = new openAIEngine();
    214    await engine.loadConfig(MODEL_FEATURES.CHAT);
    215 
    216    // Should fall back to default model
    217    Assert.notEqual(
    218      engine.model,
    219      "nonexistent-model",
    220      "Should not use invalid user model"
    221    );
    222    const config = engine.getConfig(MODEL_FEATURES.CHAT);
    223    Assert.equal(config.is_default, true, "Should fall back to default config");
    224    Assert.equal(
    225      config.model,
    226      engine.model,
    227      "Engine model should match the default config's model"
    228    );
    229    Assert.equal(config.version, "v1.0", "Should use v1.0");
    230  } finally {
    231    sb.restore();
    232    Services.prefs.clearUserPref(PREF_MODEL);
    233  }
    234 });
    235 
    236 add_task(async function test_loadPrompt_from_remote_settings() {
    237  Services.prefs.setStringPref(PREF_API_KEY, API_KEY);
    238  Services.prefs.setStringPref(PREF_ENDPOINT, ENDPOINT);
    239 
    240  const sb = sinon.createSandbox();
    241  try {
    242    const fakeEngine = {
    243      runWithGenerator() {
    244        throw new Error("not used");
    245      },
    246    };
    247    sb.stub(openAIEngine, "_createEngine").resolves(fakeEngine);
    248 
    249    sb.stub(openAIEngine, "getRemoteClient").returns({
    250      get: sb.stub().resolves(REAL_REMOTE_SETTINGS_SNAPSHOT),
    251    });
    252 
    253    const engine = new openAIEngine();
    254    await engine.loadConfig(MODEL_FEATURES.TITLE_GENERATION);
    255 
    256    const prompt = await engine.loadPrompt(MODEL_FEATURES.TITLE_GENERATION);
    257 
    258    Assert.ok(prompt, "Prompt should be loaded from remote settings");
    259    Assert.ok(
    260      prompt.includes("title") || prompt.includes("conversation"),
    261      "Prompt should contain expected content for title generation"
    262    );
    263  } finally {
    264    sb.restore();
    265  }
    266 });
    267 
    268 add_task(async function test_loadPrompt_fallback_to_local() {
    269  Services.prefs.setStringPref(PREF_API_KEY, API_KEY);
    270  Services.prefs.setStringPref(PREF_ENDPOINT, ENDPOINT);
    271 
    272  const sb = sinon.createSandbox();
    273  try {
    274    const fakeEngine = {
    275      runWithGenerator() {
    276        throw new Error("not used");
    277      },
    278    };
    279    sb.stub(openAIEngine, "_createEngine").resolves(fakeEngine);
    280 
    281    sb.stub(openAIEngine, "getRemoteClient").returns({
    282      get: sb.stub().resolves([]),
    283    });
    284 
    285    const engine = new openAIEngine();
    286    await engine.loadConfig(MODEL_FEATURES.TITLE_GENERATION);
    287 
    288    const prompt = await engine.loadPrompt(MODEL_FEATURES.TITLE_GENERATION);
    289 
    290    Assert.ok(prompt, "Prompt should fallback to local prompt");
    291    Assert.ok(
    292      prompt.includes("Generate a concise chat title"),
    293      "Should load local prompt when remote settings has no config"
    294    );
    295  } finally {
    296    sb.restore();
    297  }
    298 });
    299 
    300 add_task(async function test_build_with_feature() {
    301  Services.prefs.setStringPref(PREF_API_KEY, API_KEY);
    302  Services.prefs.setStringPref(PREF_ENDPOINT, ENDPOINT);
    303 
    304  const sb = sinon.createSandbox();
    305  try {
    306    const fakeEngine = {
    307      runWithGenerator() {
    308        throw new Error("not used");
    309      },
    310    };
    311    const createEngineStub = sb
    312      .stub(openAIEngine, "_createEngine")
    313      .resolves(fakeEngine);
    314 
    315    sb.stub(openAIEngine, "getRemoteClient").returns({
    316      get: sb.stub().resolves(REAL_REMOTE_SETTINGS_SNAPSHOT),
    317    });
    318 
    319    const engine = await openAIEngine.build(MODEL_FEATURES.CHAT);
    320 
    321    Assert.ok(engine.engineInstance, "Engine instance should be created");
    322    Assert.equal(engine.feature, MODEL_FEATURES.CHAT, "Feature should be set");
    323    Assert.ok(engine.model, "Model should be loaded from remote settings");
    324 
    325    const opts = createEngineStub.firstCall.args[0];
    326    Assert.ok(opts.modelId, "Model should be passed to engine creation");
    327    Assert.equal(
    328      opts.modelId,
    329      engine.model,
    330      "Model passed to engine should match loaded model"
    331    );
    332  } finally {
    333    sb.restore();
    334  }
    335 });
    336 
    337 add_task(async function test_inference_params_from_config() {
    338  Services.prefs.setStringPref(PREF_API_KEY, API_KEY);
    339  Services.prefs.setStringPref(PREF_ENDPOINT, ENDPOINT);
    340 
    341  const sb = sinon.createSandbox();
    342  try {
    343    const fakeEngine = {
    344      runWithGenerator() {
    345        throw new Error("not used");
    346      },
    347    };
    348    sb.stub(openAIEngine, "_createEngine").resolves(fakeEngine);
    349 
    350    sb.stub(openAIEngine, "getRemoteClient").returns({
    351      get: sb.stub().resolves(REAL_REMOTE_SETTINGS_SNAPSHOT),
    352    });
    353 
    354    const engine = new openAIEngine();
    355    await engine.loadConfig(MODEL_FEATURES.CHAT);
    356 
    357    const config = engine.getConfig(MODEL_FEATURES.CHAT);
    358    Assert.ok(config, "Config should be loaded");
    359 
    360    const inferenceParams = config?.parameters || {};
    361    Assert.equal(
    362      typeof inferenceParams,
    363      "object",
    364      "Inference parameters should be an object"
    365    );
    366    Assert.equal(
    367      inferenceParams.temperature,
    368      1.0,
    369      "Temperature should be loaded from parameters"
    370    );
    371  } finally {
    372    sb.restore();
    373  }
    374 });