commit e26eaecf3958b8685d1fcaa0bd4a86826722bbea
parent 98d5e7ce92a5436a03dacf644345a8dfbbcdcf19
Author: Isaac Ahouma <iahouma@google.com>
Date: Thu, 27 Nov 2025 15:28:25 +0000
Bug 2002708 [wpt PR 56316] - [Prompt API WPTs] Update `inputUsage` to reflect total, a=testonly
Automatic update from web-platform-tests
[Prompt API WPTs] Update `inputUsage` to reflect total
tokens used in AI Language Model.
The `inputUsage` field in `AILanguageModelInstanceInfo` and
`ModelExecutionContextInfo` now reports the total number of tokens
consumed by the session, including both the initial prompts and all
subsequent inputs and outputs. This is achieved by tracking the initial
token count in the `Context` and summing it with the tokens from
subsequent interactions.
Bug: 460794950
Change-Id: I5b98e86d6795d4b1463b91b12d3f2e7c6751c8c0
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/7170679
Reviewed-by: Nathan Memmott <memmott@chromium.org>
Reviewed-by: Reilly Grant <reillyg@chromium.org>
Commit-Queue: Isaac Ahouma <iahouma@google.com>
Cr-Commit-Position: refs/heads/main@{#1550722}
--
wpt-commits: b6282e607975976de75d00b41b6d6d783c9a0dd7
wpt-pr: 56316
Diffstat:
1 file changed, 3 insertions(+), 0 deletions(-)
diff --git a/testing/web-platform/tests/ai/language-model/language-model-clone.tentative.https.window.js b/testing/web-platform/tests/ai/language-model/language-model-clone.tentative.https.window.js
@@ -34,4 +34,7 @@ promise_test(async () => {
const clone_result = await cloned_session.prompt(kTestPrompt);
assert_equals(typeof clone_result, 'string');
+ assert_greater_than(
+ cloned_session.inputUsage, session.inputUsage,
+ 'cloned session should have increased inputUsage after prompting.');
});