Skip to content

Commit b497e62

Browse files
committed
basic incremental worker
1 parent bfa6cfb commit b497e62

File tree

5 files changed

+75
-27
lines changed

5 files changed

+75
-27
lines changed

sphinxdocs/private/sphinx.bzl

Lines changed: 13 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -272,19 +272,22 @@ def _run_sphinx(ctx, format, source_path, inputs, output_prefix, use_persistent_
272272

273273
args.add("--show-traceback") # Full tracebacks on error
274274
run_args.append("--show-traceback")
275-
args.add("--builder", format)
276-
run_args.extend(("--builder", format))
275+
args.add(format, format = "--builder=%s")
276+
run_args.append("--builder={}".format(format))
277277

278-
if ctx.attr._quiet_flag[BuildSettingInfo].value:
279-
# Not added to run_args because run_args is for debugging
280-
args.add("--quiet") # Suppress stdout informational text
278+
##if ctx.attr._quiet_flag[BuildSettingInfo].value:
279+
## # Not added to run_args because run_args is for debugging
280+
## args.add("--quiet") # Suppress stdout informational text
281281

282282
# Build in parallel, if possible
283283
# Don't add to run_args: parallel building breaks interactive debugging
284-
args.add("--jobs", "auto")
284+
args.add("--jobs=auto")
285285

286286
if use_persistent_workers:
287-
args.add("--doctree-dir", paths.join(output_dir.path, ".doctrees"))
287+
# Sphinx normally uses `.doctrees`, but we use underscore so it isn't
288+
# hidden by default
289+
args.add(paths.join(output_dir.path + "_doctrees"), format = "--doctree-dir=%s")
290+
288291
else:
289292
args.add("--fresh-env") # Don't try to use cache files. Bazel can't make use of them.
290293
run_args.append("--fresh-env")
@@ -312,6 +315,9 @@ def _run_sphinx(ctx, format, source_path, inputs, output_prefix, use_persistent_
312315

313316
execution_requirements = {}
314317
if use_persistent_workers:
318+
args.add("-v")
319+
args.add("-v")
320+
args.add("-v")
315321
args.use_param_file("@%s", use_always = True)
316322
args.set_param_file_format("multiline")
317323
execution_requirements["supports-workers"] = "1"

sphinxdocs/private/sphinx_build.py

Lines changed: 39 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -23,17 +23,27 @@
2323
# parser.add_argument('srcdir')
2424
# parser.add_argument('outdir')
2525
parser.add_argument("--persistent_worker", action="store_true")
26-
parser.add_argument("--doctree-dir")
26+
##parser.add_argument("--doctree-dir")
2727

28+
logger = logging.getLogger('sphinxdocs-build')
2829

2930
class Worker:
3031

3132
def __init__(self, instream: "typing.TextIO", outstream: "typing.TextIO"):
3233
self._instream = instream
3334
self._outstream = outstream
34-
self._logger = logging.getLogger("worker")
35-
logging.basicConfig(filename='echo.log', encoding='utf-8', level=logging.DEBUG)
36-
self._logger.info("starting worker")
35+
# Annoying. Sphinx resets its loging config as part of main()
36+
# and the Sphinx() app setup/invocation. So any logging we try
37+
# to setup here to get info out of sphinx is meaningless.
38+
# -v -v -v will output more logging, but to stderr/stdout, and thus
39+
# bazel's worker log file, due to sphinx's logging re-configuration.
40+
# one-liner to get most recent worker log:
41+
# find $workerLogDir -type f -printf '%T@ %p\n' | sort -n | tail -1 | awk '{print $2}'
42+
logging.basicConfig(
43+
##filename='/tmp/sphinx-builder.log', encoding='utf-8',
44+
level=logging.DEBUG
45+
)
46+
logger.info("starting worker")
3747
self._current = {}
3848
self._previous = {}
3949
self._cache = {}
@@ -45,13 +55,14 @@ def run(self) -> None:
4555
try:
4656
request = self._get_next_request()
4757
if request is None:
48-
self._logger.info("Empty request: exiting")
58+
logger.info("Empty request: exiting")
4959
break
5060
response = self._process_request(request)
61+
logger.info("response:%s", response)
5162
if response:
5263
self._send_response(response)
5364
except Exception:
54-
self._logger.exception("Unhandled error: request=%s", request)
65+
logger.exception("Unhandled error: request=%s", request)
5566
output = (
5667
f"Unhandled error:\nRequest: {request}\n"
5768
+ traceback.format_exc()
@@ -65,7 +76,7 @@ def run(self) -> None:
6576
}
6677
)
6778
finally:
68-
self._logger.info("Worker shutting down")
79+
logger.info("Worker shutting down")
6980

7081
def _get_next_request(self) -> "object | None":
7182
line = self._instream.readline()
@@ -81,13 +92,14 @@ def inputs(self):
8192

8293
def _update_digest(self, request):
8394
args, unknown = parser.parse_known_args(request["arguments"])
84-
# Make room for the new build's data.
95+
# Make room for the new build's data.
8596
self._previous = self._current
8697
# Rearrange the new data into a dict to make comparisons easier.
8798
self._current = {}
8899
for page in request["inputs"]:
89100
path = page["path"]
90101
self._current[path] = page["digest"]
102+
logger.info("path mtime: %s", pathlib.Path(path).stat().st_mtime)
91103
# Compare the content hashes to determine what pages have changed.
92104
changed_paths = []
93105
for path in self._current:
@@ -104,20 +116,21 @@ def _update_digest(self, request):
104116
# Normalize the paths into docnames
105117
digest = []
106118
for path in changed_paths:
119+
logger.info("Changed: %s", path)
107120
if not path.endswith(".rst"):
108121
continue
109122
srcdir = self.args[0]
110123
docname = path.replace(srcdir + "/", "")
111124
docname = docname.replace(".rst", "")
112125
digest.append(docname)
113126
args, unknown = parser.parse_known_args(self.args)
114-
# Save the digest.
115-
doctree_dir = Path(args.doctree_dir)
116-
# On a fresh build, _restore_cache() does nothing, so this dir won't exist yet.
117-
if not doctree_dir.is_dir():
118-
doctree_dir.mkdir(parents=True)
119-
with open(doctree_dir / Path("digest.json"), "w") as f:
120-
json.dump(digest, f, indent=2)
127+
### Save the digest.
128+
##doctree_dir = Path(args.doctree_dir)
129+
### On a fresh build, _restore_cache() does nothing, so this dir won't exist yet.
130+
##if not doctree_dir.is_dir():
131+
## doctree_dir.mkdir(parents=True)
132+
##with open(doctree_dir / Path("digest.json"), "w") as f:
133+
## json.dump(digest, f, indent=2)
121134

122135
def _restore_cache(self):
123136
for filepath in self._cache:
@@ -138,13 +151,20 @@ def _update_cache(self):
138151
self._cache[str(filepath)] = f.read()
139152

140153
def _process_request(self, request: "WorkRequest") -> "WorkResponse | None":
154+
logger.info("request:%s", json.dumps(request, sort_keys=True, indent=2))
141155
if request.get("cancel"):
142156
return None
143157
self.args = request["arguments"]
144-
self._restore_cache()
145-
self._update_digest(request)
146-
main(self.args)
147-
self._update_cache()
158+
##self._restore_cache()
159+
##self._update_digest(request)
160+
logger.info("main: %s", self.args)
161+
orig_stdout = sys.stdout
162+
sys.stdout = sys.stderr
163+
try:
164+
main(self.args)
165+
finally:
166+
sys.stdout = orig_stdout
167+
##self._update_cache()
148168
response = {
149169
"requestId": request.get("requestId", 0),
150170
"exitCode": 0,

sphinxdocs/tests/sphinx_docs/conf.py

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,11 +5,24 @@
55

66
# -- Project info
77

8-
project = "Sphinx Docs Test"
8+
project = "Sphinx Docs Test xx"
99

1010
extensions = [
1111
"myst_parser",
1212
]
1313
myst_enable_extensions = [
1414
"colon_fence",
1515
]
16+
17+
import logging
18+
logger = logging.getLogger('conf')
19+
20+
def on_env_get_outdated(*args, **kwargs):
21+
logger.info("env-get-outdated args: %s", args)
22+
logger.info("env-get-outdated kwargs: %s", kwargs)
23+
return []
24+
25+
26+
def setup(app):
27+
28+
app.connect('env-get-outdated', on_env_get_outdated)
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
# doc1
2+
3+
hello doc 1
4+
x
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
# doc 2
2+
3+
4+
hello doc 3
5+
x

0 commit comments

Comments
 (0)