-
-
Notifications
You must be signed in to change notification settings - Fork 636
feat: add persistent worker for sphinxdocs #2938
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Merged
Merged
Changes from 2 commits
Commits
Show all changes
20 commits
Select commit
Hold shift + click to select a range
b655782
feat: add persistent worker for sphinxdocs
ccfadf8
Merge branch 'main' of https://github.com/bazel-contrib/rules_python …
rickeylev fb6279b
rename tmp to changed paths
rickeylev 782b0d4
rename use_cache to use_persistent_worker
rickeylev 4cf2ca8
cleanup logic to run action
rickeylev bfa6cfb
fix use_persistent_workers type, enable for basic test
rickeylev b497e62
basic incremental worker
rickeylev 3420416
generate info file for extensions to use. also cleanup
rickeylev 34e1cde
cleanup
rickeylev bed4556
cleanup
rickeylev c487060
doc attr
rickeylev d968327
trying to debug rbe
rickeylev 4cea20a
support non-worker invocation when rules try to use a worker invocation
rickeylev 2912daf
fix bad arg
rickeylev 78651ea
always set doctreedir
rickeylev 283e565
enable worker by default so its used when available
rickeylev 1e7fa2a
fix doc typo
rickeylev ceefdce
rm old use_persistent_Worker arg name
rickeylev f6e1c8b
format files
rickeylev 2520a2d
register config key so its available
rickeylev File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,8 +1,164 @@ | ||
| from pathlib import Path | ||
|
|
||
| import argparse | ||
| import json | ||
| import logging | ||
| import os | ||
| import pathlib | ||
| import sys | ||
| import time | ||
| import traceback | ||
| import typing | ||
|
|
||
| from sphinx.cmd.build import main | ||
|
|
||
|
|
||
| WorkRequest = object | ||
| WorkResponse = object | ||
|
|
||
|
|
||
| parser = argparse.ArgumentParser( | ||
| fromfile_prefix_chars='@' | ||
| ) | ||
| # parser.add_argument('srcdir') | ||
| # parser.add_argument('outdir') | ||
| parser.add_argument("--persistent_worker", action="store_true") | ||
| parser.add_argument("--doctree-dir") | ||
|
|
||
|
|
||
| class Worker: | ||
|
|
||
| def __init__(self, instream: "typing.TextIO", outstream: "typing.TextIO"): | ||
| self._instream = instream | ||
| self._outstream = outstream | ||
| self._logger = logging.getLogger("worker") | ||
| logging.basicConfig(filename='echo.log', encoding='utf-8', level=logging.DEBUG) | ||
| self._logger.info("starting worker") | ||
| self._current = {} | ||
| self._previous = {} | ||
| self._cache = {} | ||
|
|
||
| def run(self) -> None: | ||
| try: | ||
| while True: | ||
| request = None | ||
| try: | ||
| request = self._get_next_request() | ||
| if request is None: | ||
| self._logger.info("Empty request: exiting") | ||
| break | ||
| response = self._process_request(request) | ||
| if response: | ||
| self._send_response(response) | ||
| except Exception: | ||
| self._logger.exception("Unhandled error: request=%s", request) | ||
| output = ( | ||
| f"Unhandled error:\nRequest: {request}\n" | ||
| + traceback.format_exc() | ||
| ) | ||
| request_id = 0 if not request else request.get("requestId", 0) | ||
| self._send_response( | ||
| { | ||
| "exitCode": 3, | ||
| "output": output, | ||
| "requestId": request_id, | ||
| } | ||
| ) | ||
| finally: | ||
| self._logger.info("Worker shutting down") | ||
|
|
||
| def _get_next_request(self) -> "object | None": | ||
| line = self._instream.readline() | ||
| if not line: | ||
| return None | ||
| return json.loads(line) | ||
|
|
||
| @property | ||
| def inputs(self): | ||
| self._previous | ||
| self._current | ||
| return self._value | ||
|
|
||
| def _update_digest(self, request): | ||
| args, unknown = parser.parse_known_args(request["arguments"]) | ||
| # Make room for the new build's data. | ||
| self._previous = self._current | ||
| # Rearrange the new data into a dict to make comparisons easier. | ||
| self._current = {} | ||
| for page in request["inputs"]: | ||
| path = page["path"] | ||
| self._current[path] = page["digest"] | ||
| # Compare the content hashes to determine what pages have changed. | ||
| tmp = [] | ||
| for path in self._current: | ||
| if path not in self._previous: | ||
| tmp.append(path) | ||
| continue | ||
| if self._current[path] != self._previous[path]: | ||
| tmp.append(path) | ||
| continue | ||
| for path in self._previous: | ||
| if path not in self._current: | ||
| tmp.append(path) | ||
| continue | ||
| # Normalize the paths into docnames | ||
| digest = [] | ||
| for path in tmp: | ||
| if not path.endswith(".rst"): | ||
| continue | ||
| srcdir = self.args[0] | ||
| docname = path.replace(srcdir + "/", "") | ||
| docname = docname.replace(".rst", "") | ||
| digest.append(docname) | ||
| args, unknown = parser.parse_known_args(self.args) | ||
| # Save the digest. | ||
| doctree_dir = Path(args.doctree_dir) | ||
| # On a fresh build, _restore_cache() does nothing, so this dir won't exist yet. | ||
| if not doctree_dir.is_dir(): | ||
| doctree_dir.mkdir(parents=True) | ||
| with open(doctree_dir / Path("digest.json"), "w") as f: | ||
| json.dump(digest, f, indent=2) | ||
|
|
||
| def _restore_cache(self): | ||
| for filepath in self._cache: | ||
| data = self._cache[filepath] | ||
| parent = Path(os.path.dirname(filepath)) | ||
| if not parent.is_dir(): | ||
| parent.mkdir(parents=True) | ||
| with open(filepath, "wb") as f: | ||
| f.write(data) | ||
|
|
||
| def _update_cache(self): | ||
| args, unknown = parser.parse_known_args(self.args) | ||
| self._cache = {} | ||
| for root, _, files in os.walk(args.doctree_dir): | ||
| for filename in files: | ||
| filepath = Path(root) / Path(filename) | ||
| with open(filepath, "rb") as f: | ||
| self._cache[str(filepath)] = f.read() | ||
|
|
||
| def _process_request(self, request: "WorkRequest") -> "WorkResponse | None": | ||
| if request.get("cancel"): | ||
| return None | ||
| self.args = request["arguments"] | ||
| self._restore_cache() | ||
| self._update_digest(request) | ||
| main(self.args) | ||
| self._update_cache() | ||
| response = { | ||
| "requestId": request.get("requestId", 0), | ||
| "exitCode": 0, | ||
| } | ||
| return response | ||
|
|
||
| def _send_response(self, response: "WorkResponse") -> None: | ||
| self._outstream.write(json.dumps(response) + "\n") | ||
| self._outstream.flush() | ||
|
|
||
|
|
||
| if __name__ == "__main__": | ||
| sys.exit(main()) | ||
| args, unknown = parser.parse_known_args() | ||
| if args.persistent_worker: | ||
| Worker(sys.stdin, sys.stdout).run() | ||
| else: | ||
| sys.exit(main()) |
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
Uh oh!
There was an error while loading. Please reload this page.