Skip to content

Commit da39dfa

Browse files
committed
Add tests and documentation
1 parent dfb5c4c commit da39dfa

File tree

10 files changed

+313
-192
lines changed

10 files changed

+313
-192
lines changed

README.md

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,16 +21,24 @@ Our modified version of the wptserve HTTP server implementation can be found in
2121
- Manually check if the server and the tests are working: Visit http://sub.headers.websec.saarland:80/_hp/tests/framing.sub.html
2222
- Automatic testrunners:
2323
- `cd _hp/hp/tools/crawler`
24-
- Android: `poetry run python android_intent.py` (Additional config required)
24+
- Android: `poetry run python android_intent.py` (TODO: Additional config required; solve android_intent and more?!)
2525
- MacOS/Ubuntu: `poetry run python desktop_selenium.py` (For a quick test run: `poetry run python desktop_selenium.py --debug_browsers --resp_type debug --ignore_certs`)
2626
- iPadOS/iOS: `poetry run python desktop_selenium.py ----gen_page_runner --page_runner_json urls.json --max_urls_until_restart 10000"`, then visit the URLs in that file manually
2727
- TODO: Exact settings of the runs for our experiment:
2828
- TODO: some information about how to exactly reproduce our results?
2929
- TODO: repeat to ensure each test has 5x repetitions (`poetry run python create_repeat ...`)
3030
- ...
31+
- Optional configuration to run headfull browsers on linux server:
32+
```bash
33+
Xvfb :99 -screen 0 1920x1080x24 &
34+
x11vnc -display :99 -bg -shared -forever -passwd abc -xkb -rfbport 5900
35+
export DISPLAY=:99 && fluxbox -log fluxbox.log &
36+
```
3137
- Analysis:
3238
- Run `cd _hp/hp/tools/analysis && poetry run jupyter-lab`
33-
- Open `_hp/hp/tools/analysis/main_analysis_desktop_basic+parsing.ipynb` (Also contains the mobile analysis)
39+
- Open `_hp/hp/tools/analysis/main_analysis_desktop_basic+parsing.ipynb`
40+
- TODO: rename: (Also contains the mobile analysis)
41+
- TODO: check analysis code and improve
3442

3543
## Inventory
3644
- `_hp/`: All test and analysis code for the paper:

_hp/hp/test_internals.py

Lines changed: 43 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
import pytest
22
import json
3-
import httpx
43

5-
from hp.tools.crawler.selenium_run_specific import config, run_specific
6-
from hp.tools.crawler.utils import generate_short_uuid
4+
from hp.tools.crawler.selenium_run_specific import run_specific
5+
from hp.tools.crawler.utils import generate_short_uuid, get_tests, get_resp_ids, get_or_create_browser
76
from hp.tools.create_responses import create_responses
7+
from hp.tools.crawler.desktop_selenium import get_child_processes
88

99
with open("_hp/wpt-config.json", "r") as f:
1010
wpt_config = json.load(f)
@@ -14,10 +14,14 @@ def test_generate_short_uuid():
1414
"""Assert length of generate_shot_uuid is correct."""
1515
assert len(generate_short_uuid(3)) == 3
1616

17-
1817
def test_selenium_test_specific():
1918
"""Smoke test for run_specific: function runs without crashing, browser can visit one of our test pages."""
2019
url = f"http://sub.{wpt_config['browser_host']}/_hp/tests/referrer-access-rp.sub.html?resp_type=basic&browser_id=1&label=RP&first_id=199&last_id=199&scheme=http&t_resp_id=199&t_element_relation=iframe_window.open&t_resp_origin=http://sub.{wpt_config['browser_host']}"
20+
config = [
21+
# Browsers (managed by Selenium itself)
22+
# Released 2024-01-23
23+
("firefox", "122", None, ["-headless"], get_or_create_browser("firefox", "122", "Ubuntu 22.04", "headless", "selenium", "")),
24+
]
2125
for browser_name, browser_version, binary_location, arguments, _ in config:
2226
run_specific(url, browser_name, browser_version, binary_location, arguments)
2327

@@ -31,3 +35,38 @@ def test_create_responses():
3135
status_code = 200
3236
create_responses(header_list=header_list, label=label, status_code=status_code, resp_type=resp_type)
3337
assert True
38+
39+
def test_get_tests():
40+
"""Check whether get_tests returns test URLs"""
41+
resp_type = "basic"
42+
browser_id = "-5"
43+
scheme = "http"
44+
max_popups = 2
45+
max_resps = 10
46+
browser_modifier = 2
47+
tests = get_tests(resp_type=resp_type, browser_id=browser_id, scheme=scheme, max_popups=max_popups, max_resps=max_resps, browser_modifier=browser_modifier)
48+
assert len(tests) == 325
49+
50+
def test_get_resp_ids():
51+
"""Check whether get_resp_ids returns valid splits
52+
"""
53+
label = "XFO"
54+
resp_type = "basic"
55+
num_resp_ids = 3
56+
splits = get_resp_ids(label=label, resp_type=resp_type, num_resp_ids=num_resp_ids)
57+
58+
assert len(splits) == 4
59+
60+
def test_get_or_create_browser():
61+
"""Check whether a browser configuration entry can be created
62+
"""
63+
# The unknown browser always has to be ID 1
64+
browser = get_or_create_browser(name="Unknown", version="Unknown", os="Unknown", headless_mode="real", automation_mode="manual", add_info=None)
65+
assert browser == 1
66+
67+
def test_get_child_processes():
68+
"""Check that 0 has a couple of childs
69+
"""
70+
process_list = get_child_processes(0)
71+
assert len(process_list) > 5
72+

_hp/hp/tools/crawler/android_intent.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66

77

88
from hp.tools.crawler.desktop_selenium import CustomErrorTimeout, CustomTimeout
9-
from hp.tools.crawler.utils import get_tests, get_or_create_browser, TIMEOUT
9+
from hp.tools.crawler.utils import get_tests, get_or_create_browser
1010
from multiprocessing import Pool
1111
import psycopg
1212
import uuid
@@ -206,7 +206,7 @@ def main(browser_list, url_dict, repeat_times, num_devices, resp_type, auto_rest
206206

207207
device_ids = get_available_device()
208208
while len(device_ids) > 0:
209-
print('Force stop current emulatos ...')
209+
print('Force stop current emulators ...')
210210
force_stop_emulators()
211211
device_ids = get_available_device()
212212
time.sleep(2)
Lines changed: 12 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,6 @@
1+
"""
2+
Generate URLs to visit that contain all URLs that have to be repeated in a browser (iOS/MacOS)
3+
"""
14
import json
25
from hp.tools.crawler.utils import create_test_page_runner, generate_short_uuid
36
import argparse
@@ -6,17 +9,17 @@
69
parser = argparse.ArgumentParser(description="Convert repeats for pagerunner.")
710
parser.add_argument("--browser_id", type=int, required=True)
811
args = parser.parse_args()
12+
browser_id = str(args.browser_id)
13+
rand_token = generate_short_uuid()
914

1015
with open("../repeat.json", "r") as f:
1116
d = json.load(f)
12-
13-
browser_id = str(args.browser_id)
14-
15-
rand_token = generate_short_uuid()
17+
# Ensure there are some entries for the browser_id in the repeat dict
1618
print(len(d[browser_id]))
17-
18-
safari_special = False
19-
if safari_special:
19+
20+
# Either create one repeat URL or several ones with maximum 1000 URLs per chunk
21+
chunk = False
22+
if chunk:
2023
test_urls = d[browser_id]
2124
url_chunks = [test_urls[i:i + 1000] for i in range(0, len(test_urls), 1000)]
2225
url_list = []
@@ -26,8 +29,8 @@
2629
chunk_id += 1
2730

2831
print(f"URLs to visit: {url_list}")
29-
with open(f"parsing-MaxURLs1000-MaxResps10-MaxPopups100-{rand_token}.json", "w") as f:
32+
with open(f"repeats-MaxURLs1000-{rand_token}.json", "w") as f:
3033
json.dump(url_list, f)
3134
else:
3235
r = create_test_page_runner(browser_id, f"{rand_token}-0", d[browser_id])
33-
print(r)
36+
print(r)

_hp/hp/tools/crawler/demo_receiver.py

Lines changed: 0 additions & 33 deletions
This file was deleted.

0 commit comments

Comments
 (0)