commit a7adf02cc574dfce2f855c1adfaabba37efec670
parent f8131c020cce09fd41877ecc56c743d5386325fa
Author: Omar Gonzalez <s9tpepper@apache.org>
Date: Fri, 9 Jan 2026 20:52:30 +0000
Bug 2008658 - fixes messages array for OpenAI api format r=tzhang,ai-frontend-reviewers,ai-models-reviewers
Differential Revision: https://phabricator.services.mozilla.com/D278325
Diffstat:
5 files changed, 69 insertions(+), 19 deletions(-)
diff --git a/browser/components/aiwindow/models/Chat.sys.mjs b/browser/components/aiwindow/models/Chat.sys.mjs
@@ -128,10 +128,14 @@ export const Chat = {
throw new Error(`No such tool: ${name}`);
}
- result = await toolFunc(toolParams);
+ if (Object.keys(toolParams).length) {
+ result = await toolFunc(toolParams);
+ } else {
+ result = await toolFunc();
+ }
// Create special tool call log message to show in the UI log panel
- const content = { tool_call_id: id, body: result };
+ const content = { tool_call_id: id, body: result, name };
conversation.addToolCallMessage(content, currentTurn, toolRoleOpts);
} catch (e) {
result = { error: `Tool execution failed: ${String(e)}` };
diff --git a/browser/components/aiwindow/models/Tools.sys.mjs b/browser/components/aiwindow/models/Tools.sys.mjs
@@ -16,8 +16,9 @@ ChromeUtils.defineESModuleGetters(lazy, {
AIWindow:
"moz-src:///browser/components/aiwindow/ui/modules/AIWindow.sys.mjs",
BrowserWindowTracker: "resource:///modules/BrowserWindowTracker.sys.mjs",
- PageDataService:
- "moz-src:///browser/components/pagedata/PageDataService.sys.mjs",
+ // @todo Bug 2009194
+ // PageDataService:
+ // "moz-src:///browser/components/pagedata/PageDataService.sys.mjs",
});
const GET_OPEN_TABS = "get_open_tabs";
@@ -143,10 +144,18 @@ export async function getOpenTabs(n = 15) {
topTabs.map(async ({ url, title, lastAccessed }) => {
let description = "";
if (url) {
- description =
- lazy.PageDataService.getCached(url)?.description ||
- (await lazy.PageDataService.fetchPageData(url))?.description ||
- "";
+ // @todo Bug 2009194
+ // PageDataService halts code execution even in try/catch
+ //
+ // try {
+ // description =
+ // lazy.PageDataService.getCached(url)?.description ||
+ // (await lazy.PageDataService.fetchPageData(url))?.description ||
+ // "";
+ // } catch (e) {
+ // console.log(e);
+ // description = "";
+ // }
}
return { url, title, description, lastAccessed };
})
diff --git a/browser/components/aiwindow/ui/components/ai-window/ai-window.mjs b/browser/components/aiwindow/ui/components/ai-window/ai-window.mjs
@@ -163,8 +163,11 @@ export class AIWindow extends MozLitElement {
assistantRoleOpts
);
+ const currentMessage = this.#conversation.messages
+ .filter(message => message.role === lazy.MESSAGE_ROLE.ASSISTANT)
+ .at(-1);
+
for await (const chunk of stream) {
- const currentMessage = this.#conversation.messages.at(-1);
currentMessage.content.body += chunk;
this.#updateConversation();
@@ -216,12 +219,13 @@ export class AIWindow extends MozLitElement {
#dispatchMessageToChatContent(message) {
const actor = this.#getAIChatContentActor();
+ const newMessage = { ...message };
if (typeof message.role !== "string") {
- const roleLabel = lazy.getRoleLabel(message.role).toLowerCase();
- message.role = roleLabel;
+ const roleLabel = lazy.getRoleLabel(newMessage.role).toLowerCase();
+ newMessage.role = roleLabel;
}
- return actor.dispatchMessageToChatContent(message);
+ return actor.dispatchMessageToChatContent(newMessage);
}
/**
diff --git a/browser/components/aiwindow/ui/modules/ChatConversation.sys.mjs b/browser/components/aiwindow/ui/modules/ChatConversation.sys.mjs
@@ -130,7 +130,7 @@ export class ChatConversation {
const currentMessages = this?.messages || [];
const ordinal = currentMessages.length ? currentMessages.length + 1 : 1;
- const message_data = {
+ const messageData = {
parentMessageId,
content,
ordinal,
@@ -141,7 +141,7 @@ export class ChatConversation {
...opts,
};
- const newMessage = new ChatMessage(message_data);
+ const newMessage = new ChatMessage(messageData);
this.messages.push(newMessage);
}
@@ -233,6 +233,18 @@ export class ChatConversation {
* @param {URL} pageUrl - The URL of the page when prompt was submitted
*/
async generatePrompt(prompt, pageUrl) {
+ this.#messages = this.#messages.filter(message => {
+ const isRealTimeInjection =
+ message.role === MESSAGE_ROLE.SYSTEM &&
+ message.content.type === SYSTEM_PROMPT_TYPE.REAL_TIME;
+
+ const isInsightsInjection =
+ message.role === MESSAGE_ROLE.SYSTEM &&
+ message.content.type === SYSTEM_PROMPT_TYPE.INSIGHTS;
+
+ return !isRealTimeInjection && !isInsightsInjection;
+ });
+
if (!this.#messages.length) {
// TODO: Bug 2008865
// switch to use remote settings prompt accessed via engine.loadPrompt(feature)
@@ -316,10 +328,23 @@ export class ChatConversation {
);
})
.map(message => {
- return {
+ const msg = {
role: getRoleLabel(message.role).toLowerCase(),
content: message.content?.body ?? message.content,
};
+
+ if (msg.content.tool_calls) {
+ msg.tool_calls = msg.content.tool_calls;
+ msg.content = "";
+ }
+
+ if (msg.role === "tool") {
+ msg.tool_call_id = message.content.tool_call_id;
+ msg.name = message.content.name;
+ msg.content = JSON.stringify(message.content.body);
+ }
+
+ return msg;
});
}
diff --git a/browser/components/aiwindow/ui/test/xpcshell/test_ChatConversation.js b/browser/components/aiwindow/ui/test/xpcshell/test_ChatConversation.js
@@ -483,10 +483,18 @@ add_task(function test_ChatConversation_getMessagesInOpenAiFormat() {
const conversation = new ChatConversation({});
conversation.addSystemMessage("text", "the system prompt");
conversation.addUserMessage("a user's prompt", "https://www.somesite.com");
- conversation.addToolCallMessage({ some: "tool call details" });
+ conversation.addToolCallMessage({
+ tool_call_id: "123",
+ name: "tool_1",
+ body: [1, 2, 3],
+ });
conversation.addAssistantMessage("text", "the llm response");
conversation.addUserMessage("a user's second prompt", "some question");
- conversation.addToolCallMessage({ some: "more tool call details" });
+ conversation.addToolCallMessage({
+ tool_call_id: "456",
+ name: "tool_1",
+ body: [4, 5, 6],
+ });
conversation.addAssistantMessage("text", "the second llm response");
const openAiFormat = conversation.getMessagesInOpenAiFormat();
@@ -494,10 +502,10 @@ add_task(function test_ChatConversation_getMessagesInOpenAiFormat() {
Assert.deepEqual(openAiFormat, [
{ role: "system", content: "the system prompt" },
{ role: "user", content: "a user's prompt" },
- { role: "tool", content: { some: "tool call details" } },
+ { role: "tool", content: "[1,2,3]", name: "tool_1", tool_call_id: "123" },
{ role: "assistant", content: "the llm response" },
{ role: "user", content: "a user's second prompt" },
- { role: "tool", content: { some: "more tool call details" } },
+ { role: "tool", content: "[4,5,6]", name: "tool_1", tool_call_id: "456" },
{ role: "assistant", content: "the second llm response" },
]);
});