commit ab5b5007b1240c3a31a9a2397fa03bdc5bcf4196
parent b809a39b5d18756e6a643cb2b4e930c1e1824773
Author: Julian Descottes <jdescottes@mozilla.com>
Date: Wed, 5 Nov 2025 16:17:33 +0000
Bug 1988955 - [wdspec] Update mozilla specific test for max total size with request dataType r=whimboo
Differential Revision: https://phabricator.services.mozilla.com/D267829
Diffstat:
1 file changed, 85 insertions(+), 53 deletions(-)
diff --git a/testing/web-platform/mozilla/tests/webdriver/bidi/network/add_data_collector/max_total_data_size.py b/testing/web-platform/mozilla/tests/webdriver/bidi/network/add_data_collector/max_total_data_size.py
@@ -1,34 +1,89 @@
import pytest
+import pytest_asyncio
from tests.bidi.network import (
+ BEFORE_REQUEST_SENT_EVENT,
PAGE_EMPTY_HTML,
RESPONSE_COMPLETED_EVENT,
)
from webdriver.bidi import error
+MAX_TOTAL_SIZE = 1000
+
+# Prepare various data sizes to test against a max total size of 1000
+big_data = MAX_TOTAL_SIZE - 100
+half_size_data = int(MAX_TOTAL_SIZE / 2)
+max_size_data = MAX_TOTAL_SIZE
+small_data = int(MAX_TOTAL_SIZE / 100)
+too_big_data = MAX_TOTAL_SIZE + 100
+too_big_one_byte_data = MAX_TOTAL_SIZE + 1
+
+
+@pytest_asyncio.fixture
+async def send_request(wait_for_event, inline, fetch, wait_for_future_safe):
+ # This flag is dedicated to support the "request or response" mode.
+ mode_flip = False
+
+ async def _send_request(size, mode):
+ nonlocal mode_flip
+
+ # In request or response mode, alternate between request and response
+ # for every request.
+ if mode == "request or response":
+ mode_flip = not mode_flip
+ data_type = "request" if mode_flip else "response"
+ else:
+ data_type = mode
+
+ data = "".join("A" for i in range(size))
+ if data_type == "request":
+ post_data = data
+ response_data = ""
+ elif data_type == "response":
+ response_data = data
+ post_data = None
+
+ on_response_completed = wait_for_event(RESPONSE_COMPLETED_EVENT)
+ # Note: We use the "js" doctype here to avoid any boilerplate in the inline
+ # response, which would inflate the sizes unexpectedly.
+ await fetch(url=inline(response_data, doctype="js"), post_data=post_data)
+ event = await wait_for_future_safe(on_response_completed)
+
+ # Return both the request id and the actual data_type where the data was
+ # set.
+ return {"request": event["request"]["request"], "data_type": data_type}
+
+ return _send_request
+
@pytest.mark.capabilities(
{
"moz:firefoxOptions": {
"prefs": {
- "remote.network.maxTotalDataSize": 1000,
+ "remote.network.maxTotalDataSize": MAX_TOTAL_SIZE,
},
},
}
)
+@pytest.mark.parametrize(
+ "mode",
+ [
+ "request",
+ "response",
+ "request or response",
+ ],
+)
@pytest.mark.asyncio
async def test_max_total_data_size(
bidi_session,
- url,
- inline,
setup_network_test,
top_context,
- wait_for_event,
- wait_for_future_safe,
add_data_collector,
- fetch,
+ send_request,
+ mode,
):
await setup_network_test(
events=[
+ BEFORE_REQUEST_SENT_EVENT,
RESPONSE_COMPLETED_EVENT,
]
)
@@ -41,38 +96,19 @@ async def test_max_total_data_size(
# Add a collector, with the same max size as the total size.
await add_data_collector(
- collector_type="blob", data_types=["response"], max_encoded_data_size=1000
+ collector_type="blob",
+ data_types=["request", "response"],
+ max_encoded_data_size=MAX_TOTAL_SIZE,
)
- # Build a url with a response size slightly below the maximum.
- big_response = "".join("A" for i in range(900))
- big_url = inline(big_response, doctype="js")
-
- # Build a url with a small response size.
- small_response = "".join("A" for i in range(10))
- small_url = inline(small_response, doctype="js")
-
- # Build a url with a response size slightly over the maximum.
- too_big_response = "".join("A" for i in range(1100))
- too_big_url = inline(too_big_response, doctype="js")
-
- # Note: We use the "js" doctype here to avoid any boilerplate in the inline
- # response, which would inflate the sizes unexpectedly.
-
- async def send_request(url):
- on_response_completed = wait_for_event(RESPONSE_COMPLETED_EVENT)
- await fetch(url, method="GET")
- event = await wait_for_future_safe(on_response_completed)
- return event["request"]["request"]
-
# Send a request to store the 900 chars (uncompressed) response.
- request_1_big = await send_request(big_url)
+ request_1_big = await send_request(size=big_data, mode=mode)
await assert_request_data_available(request_1_big, bidi_session)
# Send another big request.
# Check a previous request is evicted if more space is needed.
- request_2_big = await send_request(big_url)
+ request_2_big = await send_request(size=big_data, mode=mode)
# Expected: 1->evicted, 2->OK
await assert_request_data_unavailable(request_1_big, bidi_session)
@@ -80,7 +116,7 @@ async def test_max_total_data_size(
# Send a small request for a 10 chars response.
# Check eviction only done if more space is required.
- request_3_small = await send_request(small_url)
+ request_3_small = await send_request(size=small_data, mode=mode)
# Expected: 2->OK, 3->OK
await assert_request_data_available(request_2_big, bidi_session)
@@ -89,7 +125,7 @@ async def test_max_total_data_size(
# Send another big request.
# Check eviction only removes requests as needed (preserves small request if
# enough space is available).
- request_4_big = await send_request(big_url)
+ request_4_big = await send_request(size=big_data, mode=mode)
# Expected: 2->evicted, 3->OK, 4->OK
await assert_request_data_unavailable(request_2_big, bidi_session)
@@ -98,7 +134,7 @@ async def test_max_total_data_size(
# Send another small request.
# This is a preparatory step for the next check.
- request_5_small = await send_request(small_url)
+ request_5_small = await send_request(size=small_data, mode=mode)
# Expected: 3->OK, 4->OK, 5->OK
await assert_request_data_available(request_3_small, bidi_session)
@@ -110,7 +146,7 @@ async def test_max_total_data_size(
# evicted because it arrived before the 4th big request (which is
# mandatory to delete to store the new one).
# But the 5th small request should still be available.
- request_6_big = await send_request(big_url)
+ request_6_big = await send_request(size=big_data, mode=mode)
# Expected: 3->evicted, 4->evicted, 5->OK, 6->OK
await assert_request_data_unavailable(request_3_small, bidi_session)
@@ -121,7 +157,7 @@ async def test_max_total_data_size(
# Send a request which is too big for the collector.
# No other request should be evicted in this case, 5th and 6th requests
# should still be available.
- request_7_too_big = await send_request(too_big_url)
+ request_7_too_big = await send_request(size=too_big_data, mode=mode)
# Expected: 5->OK, 6->OK, 7->no such data
await assert_request_data_available(request_5_small, bidi_session)
@@ -130,14 +166,14 @@ async def test_max_total_data_size(
# case.
with pytest.raises(error.NoSuchNetworkDataException):
await bidi_session.network.get_data(
- request=request_7_too_big,
- data_type="response",
+ request=request_7_too_big["request"],
+ data_type=request_7_too_big["data_type"],
)
# Send a request which is too big by just one byte.
- too_big_one_byte_response = "".join("A" for i in range(1001))
- too_big_one_byte_url = inline(too_big_one_byte_response, doctype="js")
- request_8_too_big_one_byte = await send_request(too_big_one_byte_url)
+ request_8_too_big_one_byte = await send_request(
+ size=too_big_one_byte_data, mode=mode
+ )
# Expected: 5->OK, 6->OK, 8->no such data
await assert_request_data_available(request_5_small, bidi_session)
@@ -146,14 +182,12 @@ async def test_max_total_data_size(
# case.
with pytest.raises(error.NoSuchNetworkDataException):
await bidi_session.network.get_data(
- request=request_8_too_big_one_byte,
- data_type="response",
+ request=request_8_too_big_one_byte["request"],
+ data_type=request_8_too_big_one_byte["data_type"],
)
# Send a request which is exactly the max size.
- max_size_response = "".join("A" for i in range(1000))
- max_size_url = inline(max_size_response, doctype="js")
- request_9_max_size = await send_request(max_size_url)
+ request_9_max_size = await send_request(size=max_size_data, mode=mode)
# Expected: 5->evicted, 6->evicted, 9->OK
await assert_request_data_unavailable(request_5_small, bidi_session)
@@ -161,10 +195,8 @@ async def test_max_total_data_size(
await assert_request_data_available(request_9_max_size, bidi_session)
# Send two requests which add up to the max size.
- half_size_response = "".join("A" for i in range(500))
- half_size_url = inline(half_size_response, doctype="js")
- request_10_half_size = await send_request(half_size_url)
- request_11_half_size = await send_request(half_size_url)
+ request_10_half_size = await send_request(size=half_size_data, mode=mode)
+ request_11_half_size = await send_request(size=half_size_data, mode=mode)
# Expected: 9->evicted, 10->OK, 11->OK
await assert_request_data_unavailable(request_9_max_size, bidi_session)
@@ -174,8 +206,8 @@ async def test_max_total_data_size(
async def assert_request_data_available(request, bidi_session):
data = await bidi_session.network.get_data(
- request=request,
- data_type="response",
+ request=request["request"],
+ data_type=request["data_type"],
)
assert isinstance(data["value"], str)
@@ -183,6 +215,6 @@ async def assert_request_data_available(request, bidi_session):
async def assert_request_data_unavailable(request, bidi_session):
with pytest.raises(error.UnavailableNetworkDataException):
await bidi_session.network.get_data(
- request=request,
- data_type="response",
+ request=request["request"],
+ data_type=request["data_type"],
)