Skip to content

Commit 54ffa11

Browse files
Jannis-MittenzweiArBridgeman
authored andcommitted
changed docs:links
1 parent 75e2e3d commit 54ffa11

File tree

2 files changed

+90
-32
lines changed

2 files changed

+90
-32
lines changed

doc/conf.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -79,6 +79,6 @@
7979
"accent_color": "grass",
8080
}
8181
# -- Configure link checking behavior ----------------------------------------
82-
linkcheck_ignore = [
83-
r'http[s]?://'
84-
]
82+
extra_linkcheck_ignores = os.getenv("SPHINX_EXTRA_LINKCHECK_IGNORES")
83+
linkcheck_ignore = [] if not extra_linkcheck_ignores else extra_linkcheck_ignores.split(",")
84+

exasol/toolbox/nox/_documentation.py

Lines changed: 87 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,12 @@
11
from __future__ import annotations
22

3+
import json
4+
import os
5+
import re
36
import shutil
47
import subprocess
58
import sys
6-
import requests
9+
import tempfile
710
import webbrowser
811
from itertools import repeat
912
from pathlib import Path
@@ -14,17 +17,19 @@
1417
Tuple,
1518
)
1619

17-
import re
1820
import nox
1921
from nox import Session
22+
from requests import (
23+
get,
24+
head,
25+
)
26+
from requests.exceptions import Timeout
2027

2128
from exasol.toolbox.nox._shared import DOCS_OUTPUT_DIR
2229
from noxconfig import (
2330
PROJECT_CONFIG,
2431
Config,
2532
)
26-
import tempfile
27-
import json
2833

2934

3035
def _build_docs(session: nox.Session, config: Config) -> None:
@@ -103,54 +108,107 @@ def clean_docs(_session: Session) -> None:
103108
@nox.session(name="docs:links", python=False)
104109
def docs_list_links(session: Session) -> None:
105110
"""List all the links within the documentation."""
106-
for path, url in _doc_urls(_doc_files(PROJECT_CONFIG.root)):
107-
session.log(f"Url: {url}, File: {path}")
111+
ignore = [r".*"]
112+
env = os.environ.copy()
113+
env["SPHINX_EXTRA_LINKCHECK_IGNORES"] = ",".join(ignore)
114+
with tempfile.TemporaryDirectory() as path:
115+
tmpdir = Path(path)
116+
sp = subprocess.run(
117+
[
118+
"poetry",
119+
"run",
120+
"--",
121+
"sphinx-build",
122+
"-b",
123+
"linkcheck",
124+
PROJECT_CONFIG.root / "doc",
125+
tmpdir,
126+
],
127+
capture_output=True,
128+
text=True,
129+
env=env,
130+
)
131+
print(sp.returncode)
132+
if sp.returncode >= 2:
133+
print(sp.stderr)
134+
session.error(2)
135+
output = tmpdir / "output.json"
136+
links = output.read_text().split("\n")
137+
file_links = []
138+
for link in links:
139+
if link != "":
140+
line = json.loads(link)
141+
if not line["uri"].startswith("#"):
142+
file_links.append(line)
143+
file_links.sort(key=lambda file: file["filename"])
144+
print(
145+
"\n".join(
146+
f"filename: {f["filename"]} -> uri: {f["uri"]}" for f in file_links
147+
)
148+
)
108149

109150

110151
@nox.session(name="docs:links:check", python=False)
111152
def docs_links_check(session: Session) -> None:
112153
"""Checks whether all links in the documentation are accessible."""
113-
with tempfile.TemporaryDirectory() as tmpdir:
114-
tmpdir = Path(tmpdir)
115-
sp = subprocess.run(["poetry", "run", "--", "sphinx-build", "-b", 'linkcheck', PROJECT_CONFIG.root/"doc", tmpdir], capture_output=True, text=True)
154+
ignore = [r"https?://"]
155+
env = os.environ.copy()
156+
env["SPHINX_EXTRA_LINKCHECK_IGNORES"] = ",".join(ignore)
157+
with tempfile.TemporaryDirectory() as path:
158+
tmpdir = Path(path)
159+
sp = subprocess.run(
160+
[
161+
"poetry",
162+
"run",
163+
"--",
164+
"sphinx-build",
165+
"-b",
166+
"linkcheck",
167+
PROJECT_CONFIG.root / "doc",
168+
tmpdir,
169+
],
170+
capture_output=True,
171+
text=True,
172+
env=env,
173+
)
116174
print(sp.returncode)
117175
if sp.returncode >= 2:
118176
print(sp.stderr)
119177
session.error(2)
120-
output = tmpdir/"output.json"
178+
output = tmpdir / "output.json"
121179
results = output.read_text().split("\n")
122180
reslen = len(results)
123181
resstr = results[-1]
124182
if (reslen == 0) or ((reslen == 1) and (resstr == "")):
125183
return
126184
elif resstr == "":
127185
results.pop()
128-
for line, result in enumerate(results):
186+
for line_nr, result in enumerate(results):
129187
resdict = json.loads(result)
130-
if resdict['status'] == 'ignored' and resdict['uri'].startswith('http'):
188+
if resdict["status"] == "ignored" and resdict["uri"].startswith("http"):
131189
try:
132190
match = re.search(r"https?://[^\s\"\'<>]+", resdict["uri"])
133191
if match:
134-
resdict['uri'] = match.group()
135-
print(f"{line}/{reslen}")
136-
result = requests.head(resdict['uri'], timeout=5)
137-
if result.status_code != 200:
138-
result = requests.get(resdict['uri'], timeout=5, stream=True)
139-
result.close()
140-
if result.status_code >= 400:
141-
resdict['status'] = 'broken'
142-
resdict['code'] = result.status_code
143-
if result.status_code < 400:
144-
resdict['status'] = 'working'
145-
resdict['code'] = result.status_code
146-
except requests.exceptions.Timeout:
147-
resdict['status'] = 'timeout'
148-
results[line] = json.dumps(resdict)
192+
resdict["uri"] = match.group()
193+
print(f"{line_nr}/{reslen}")
194+
request = head(resdict["uri"], timeout=5)
195+
if request.status_code != 200:
196+
request = get(resdict["uri"], timeout=5, stream=True)
197+
request.close()
198+
if request.status_code >= 400:
199+
resdict["status"] = "broken"
200+
resdict["code"] = request.status_code
201+
if request.status_code < 400:
202+
resdict["status"] = "working"
203+
resdict["code"] = request.status_code
204+
except Timeout:
205+
resdict["status"] = "timeout"
206+
results[line_nr] = json.dumps(resdict)
149207
output.write_text("\n".join(f"{r}" for r in results))
150208
errors = []
151209
for result in results:
152-
line = json.loads(result)
153-
if (line["status"] == "broken") or line["status"] == "timeout":
210+
data = json.loads(result)
211+
if (data["status"] == "broken") or data["status"] == "timeout":
154212
errors.append(result)
155213
if errors:
156214
print("Error" + "s" if len(errors) > 1 else "")

0 commit comments

Comments
 (0)