Skip to content

Commit b1eb793

Browse files
metcoder95github-actions[bot]
authored andcommitted
chore: update WPT
1 parent 6211fac commit b1eb793

File tree

123 files changed

+4201
-1664
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

123 files changed

+4201
-1664
lines changed

test/fixtures/wpt/common/get-host-info.sub.js

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@ function get_host_info() {
3030
PORT2: PORT2,
3131
ORIGINAL_HOST: ORIGINAL_HOST,
3232
REMOTE_HOST: REMOTE_HOST,
33+
NOTSAMESITE_HOST,
3334

3435
ORIGIN: PROTOCOL + "//" + ORIGINAL_HOST + PORT_ELIDED,
3536
HTTP_ORIGIN: 'http://' + ORIGINAL_HOST + HTTP_PORT_ELIDED,
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
features:
2+
- name: fetch-request-streams
3+
files:
4+
- request-upload*

test/fixtures/wpt/fetch/api/body/mime-type.any.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,7 @@
8787

8888
[
8989
() => new Request("about:blank", { method: "POST", body: new Blob([""], { type: "Text/Plain" }), headers: [["Content-Type", "Text/Html"]] }),
90-
() => new Response(new Blob([""], { type: "Text/Plain" }, { headers: [["Content-Type", "Text/Html"]] }))
90+
() => new Response(new Blob([""], { type: "Text/Plain" }), { headers: [["Content-Type", "Text/Html"]] })
9191
].forEach(bodyContainerCreator => {
9292
const bodyContainer = bodyContainerCreator();
9393
const cloned = bodyContainer.clone();
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
features:
2+
- name: fetch-priority
3+
files:
4+
- request-init-priority.any.js

test/fixtures/wpt/fetch/api/resources/keepalive-helper.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -117,7 +117,7 @@ function assertStashedTokenAsync(
117117
*
118118
* `unloadIframe` to unload the iframe before verifying stashed token to
119119
* simulate the situation that unloads after fetching. Note that this test is
120-
* different from `keepaliveRedirectInUnloadTest()` in that the the latter
120+
* different from `keepaliveRedirectInUnloadTest()` in that the latter
121121
* performs fetch() call directly in `unload` event handler, while this test
122122
* does it in `load`.
123123
*/
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
These are the tests for the [Compression Dictionary Transport](https://datatracker.ietf.org/doc/draft-ietf-httpbis-compression-dictionary/) standard (currently in IETF draft state, approved for publication). The tests are marked as tentative, pending the publication of the RFC.
2+
3+
The MDN reference is [here](https://developer.mozilla.org/en-US/docs/Web/HTTP/Guides/Compression_dictionary_transport).

test/fixtures/wpt/fetch/compression-dictionary/dictionary-clear-site-data-cache.tentative.https.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
<meta name="timeout" content="long"/>
55
<script src="/resources/testharness.js"></script>
66
<script src="/resources/testharnessreport.js"></script>
7-
<script src="./resources/compression-dictionary-util.js"></script>
7+
<script src="./resources/compression-dictionary-util.sub.js"></script>
88
</head>
99
<body>
1010
<script>

test/fixtures/wpt/fetch/compression-dictionary/dictionary-clear-site-data-cookies.tentative.https.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
<meta name="timeout" content="long"/>
55
<script src="/resources/testharness.js"></script>
66
<script src="/resources/testharnessreport.js"></script>
7-
<script src="./resources/compression-dictionary-util.js"></script>
7+
<script src="./resources/compression-dictionary-util.sub.js"></script>
88
</head>
99
<body>
1010
<script>

test/fixtures/wpt/fetch/compression-dictionary/dictionary-clear-site-data-storage.tentative.https.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
<meta name="timeout" content="long"/>
55
<script src="/resources/testharness.js"></script>
66
<script src="/resources/testharnessreport.js"></script>
7-
<script src="./resources/compression-dictionary-util.js"></script>
7+
<script src="./resources/compression-dictionary-util.sub.js"></script>
88
</head>
99
<body>
1010
<script>
Lines changed: 105 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,105 @@
1+
<!DOCTYPE html>
2+
<head>
3+
<meta charset="utf-8">
4+
<meta name="timeout" content="long"/>
5+
<script src="/resources/testharness.js"></script>
6+
<script src="/resources/testharnessreport.js"></script>
7+
<script src="/common/get-host-info.sub.js"></script>
8+
<script src="./resources/compression-dictionary-util.sub.js"></script>
9+
</head>
10+
<body>
11+
<script>
12+
13+
// This is a set of tests for the dictionary itself being compressed, both by
14+
// non-dictionary content encodings and dictionary encodings. The encoding used
15+
// for the dictionary itself is independent of the encoding used for the data
16+
// so the test uses different encodings just to make sure that the dictionaries
17+
// don't carry any encoding-specific dependencies.
18+
19+
compression_dictionary_promise_test(async (t) => {
20+
const dictionaryUrl =
21+
`${SAME_ORIGIN_RESOURCES_URL}/register-dictionary.py?content_encoding=gzip`;
22+
const dict = await (await fetch(dictionaryUrl)).text();
23+
assert_equals(dict, kDefaultDictionaryContent);
24+
const dictionary_hash = await waitUntilAvailableDictionaryHeader(t, {});
25+
assert_equals(dictionary_hash, kDefaultDictionaryHashBase64);
26+
27+
// Check if the data compressed using the dictionary can be decompressed.
28+
const data_url = `${kCompressedDataPath}?content_encoding=dcb`;
29+
const data = await (await fetch(data_url)).text();
30+
assert_equals(data, kExpectedCompressedData);
31+
}, 'Decompresion using gzip-encoded dictionary works as expected');
32+
33+
compression_dictionary_promise_test(async (t) => {
34+
const dictionaryUrl =
35+
`${SAME_ORIGIN_RESOURCES_URL}/register-dictionary.py?content_encoding=br`;
36+
const dict = await (await fetch(dictionaryUrl)).text();
37+
assert_equals(dict, kDefaultDictionaryContent);
38+
const dictionary_hash = await waitUntilAvailableDictionaryHeader(t, {});
39+
assert_equals(dictionary_hash, kDefaultDictionaryHashBase64);
40+
41+
// Check if the data compressed using the dictionary can be decompressed.
42+
const data_url = `${kCompressedDataPath}?content_encoding=dcz`;
43+
const data = await (await fetch(data_url)).text();
44+
assert_equals(data, kExpectedCompressedData);
45+
}, 'Decompresion using Brotli-encoded dictionary works as expected');
46+
47+
compression_dictionary_promise_test(async (t) => {
48+
const dictionaryUrl =
49+
`${SAME_ORIGIN_RESOURCES_URL}/register-dictionary.py?content_encoding=zstd`;
50+
const dict = await (await fetch(dictionaryUrl)).text();
51+
assert_equals(dict, kDefaultDictionaryContent);
52+
const dictionary_hash = await waitUntilAvailableDictionaryHeader(t, {});
53+
assert_equals(dictionary_hash, kDefaultDictionaryHashBase64);
54+
55+
// Check if the data compressed using Brotli with the dictionary can be
56+
// decompressed (Zstandard decompression of the data is tested separately).
57+
const data_url = `${kCompressedDataPath}?content_encoding=dcb`;
58+
const data = await (await fetch(data_url)).text();
59+
assert_equals(data, kExpectedCompressedData);
60+
}, 'Decompresion using Zstandard-encoded dictionary works as expected');
61+
62+
compression_dictionary_promise_test(async (t) => {
63+
const dictionaryUrl = `${SAME_ORIGIN_RESOURCES_URL}/register-dictionary.py?id=id1`;
64+
const dict = await (await fetch(dictionaryUrl)).text();
65+
assert_equals(dict, kDefaultDictionaryContent);
66+
assert_equals(
67+
await waitUntilAvailableDictionaryHeader(t, {}),
68+
kDefaultDictionaryHashBase64);
69+
70+
// Register another dictionary, compressed with dcb using the first dictionary.
71+
const compressedDictionaryUrl =
72+
`${SAME_ORIGIN_RESOURCES_URL}/register-dictionary.py?content_encoding=dcb&id=id2`;
73+
const dict2 = await (await fetch(compressedDictionaryUrl)).text();
74+
assert_equals(dict2, kDefaultDictionaryContent);
75+
await waitUntilHeader(t, "dictionary-id", {expected_header: '"id2"'});
76+
77+
// Check if the data compressed using dcz with the updated dictionary works.
78+
const data_url = `${SAME_ORIGIN_RESOURCES_URL}/compressed-data.py?content_encoding=dcz`;
79+
const data = await (await fetch(data_url)).text();
80+
assert_equals(data, kExpectedCompressedData);
81+
}, 'A dcb dictionary-compressed dictionary can be used as a dictionary for future requests.');
82+
83+
compression_dictionary_promise_test(async (t) => {
84+
const dictionaryUrl = `${SAME_ORIGIN_RESOURCES_URL}/register-dictionary.py?id=id1`;
85+
const dict = await (await fetch(dictionaryUrl)).text();
86+
assert_equals(dict, kDefaultDictionaryContent);
87+
assert_equals(
88+
await waitUntilAvailableDictionaryHeader(t, {}),
89+
kDefaultDictionaryHashBase64);
90+
91+
// Register another dictionary, compressed with dcz using the first dictionary.
92+
const compressedDictionaryUrl =
93+
`${SAME_ORIGIN_RESOURCES_URL}/register-dictionary.py?content_encoding=dcz&id=id2`;
94+
const dict2 = await (await fetch(compressedDictionaryUrl)).text();
95+
assert_equals(dict2, kDefaultDictionaryContent);
96+
await waitUntilHeader(t, "dictionary-id", {expected_header: '"id2"'});
97+
98+
// Check if the data compressed using dcb with the updated dictionary works.
99+
const data_url = `${SAME_ORIGIN_RESOURCES_URL}/compressed-data.py?content_encoding=dcb`;
100+
const data = await (await fetch(data_url)).text();
101+
assert_equals(data, kExpectedCompressedData);
102+
}, 'A dcz dictionary-compressed dictionary can be used as a dictionary for future requests.');
103+
104+
</script>
105+
</body>

0 commit comments

Comments
 (0)