Skip to content
This repository was archived by the owner on Mar 10, 2026. It is now read-only.

Commit 0f4b96d

Browse files
committed
test: Create test for Selenium with bad url
1 parent 56c96c7 commit 0f4b96d

File tree

2 files changed

+39
-0
lines changed

2 files changed

+39
-0
lines changed

.pre-commit-config.yaml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,3 +33,4 @@ repos:
3333
rev: '1.9.2'
3434
hooks:
3535
- id: bandit
36+
args: ["--exclude", "tests"]

tests/core/test_network.py

Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,10 @@
11
"""Tests for the network module."""
22

3+
import json
4+
5+
import mdverse_scrapers.core.network as network
36
import mdverse_scrapers.core.toolbox as toolbox
7+
from mdverse_scrapers.core.logger import create_logger
48

59

610
def test_make_http_get_request_with_retries_200():
@@ -38,3 +42,37 @@ def test_make_http_get_request_with_retries_404():
3842
max_attempts=1,
3943
)
4044
assert response is None
45+
46+
47+
def test_get_html_page_with_selenium_good_url():
48+
"""Test the get_html_page_with_selenium function with a bad URL."""
49+
url = "https://figshare.com/ndownloader/files/21988230/preview/21988230/structure.json"
50+
expected_json = {
51+
"files": [],
52+
"path": "ROOT",
53+
"dirs": [
54+
{
55+
"files": [
56+
{"path": "NIPAM-FF1.3x/NIPAM-64-wat-ch-1.3.top"},
57+
{"path": "NIPAM-FF1.3x/NIPAM-64-wat.gro"},
58+
{"path": "NIPAM-FF1.3x/md.mdp"},
59+
{"path": "NIPAM-FF1.3x/NIPAM-ch-1.3.itp"},
60+
],
61+
"path": "NIPAM-FF1.3x",
62+
"dirs": [],
63+
}
64+
],
65+
}
66+
content = network.get_html_page_with_selenium(url=url, tag="pre")
67+
assert json.loads(content) == expected_json
68+
69+
70+
def test_get_html_page_with_selenium_bad_url(capsys) -> None:
71+
"""Test the get_html_page_with_selenium function with a bad URL."""
72+
url = "https://figshare.com/ndownloader/files/28089615/preview/28089615/structure.json"
73+
content = network.get_html_page_with_selenium(
74+
url=url, tag="pre", logger=create_logger(level="DEBUG")
75+
)
76+
assert content is None
77+
captured = capsys.readouterr()
78+
assert "Timeout while retrieving page" in captured.out

0 commit comments

Comments
 (0)