diff --git a/.github/workflows/builddoc.yml b/.github/workflows/builddoc.yml deleted file mode 100644 index 83186ad4343..00000000000 --- a/.github/workflows/builddoc.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: Render documentation - -on: - push: - pull_request: - workflow_dispatch: - -permissions: - contents: read - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} - cancel-in-progress: true - -env: - FORCE_COLOR: "1" - -jobs: - build: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3 - - name: Install dependencies - run: | - sudo apt update - sudo apt install -y graphviz - python -m pip install --upgrade pip - python -m pip install .[docs] - - name: Render the documentation - run: > - sphinx-build - -M html ./doc ./build/sphinx - -T - -W - --jobs=auto - -n - -vvv - --keep-going diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml deleted file mode 100644 index 868a8c2c81c..00000000000 --- a/.github/workflows/coverage.yml +++ /dev/null @@ -1,40 +0,0 @@ -name: Coverage - -on: [push] - -permissions: - contents: read - -env: - FORCE_COLOR: "1" - -jobs: - coverage: - runs-on: ubuntu-latest - if: github.repository_owner == 'sphinx-doc' - - steps: - - uses: actions/checkout@v3 - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3 - - - name: Check Python version - run: python --version - - - name: Install graphviz - run: sudo apt-get install graphviz - - - name: Install dependencies - run: | - python -m pip install --upgrade pip - python -m pip install .[test] pytest-cov - - - name: Test with pytest - run: python -m pytest -vv --cov . --cov-append --cov-config pyproject.toml - env: - VIRTUALENV_SYSTEM_SITE_PACKAGES: "1" - - - name: codecov - uses: codecov/codecov-action@v3 diff --git a/.github/workflows/create-release.yml b/.github/workflows/create-release.yml deleted file mode 100644 index 12ac825fb71..00000000000 --- a/.github/workflows/create-release.yml +++ /dev/null @@ -1,23 +0,0 @@ -name: Create release - -on: - push: - tags: - - "v*.*.*" - -permissions: - contents: read - -jobs: - create-release: - permissions: - contents: write # for softprops/action-gh-release to create GitHub release - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v3 - - name: Release - uses: softprops/action-gh-release@v1 - if: startsWith(github.ref, 'refs/tags/') - with: - body: "Changelog: https://www.sphinx-doc.org/en/master/changes.html" diff --git a/.github/workflows/latex.yml b/.github/workflows/latex.yml deleted file mode 100644 index 1eaeccaa99a..00000000000 --- a/.github/workflows/latex.yml +++ /dev/null @@ -1,38 +0,0 @@ -name: CI (LaTeX) - -on: [push, pull_request] - -permissions: - contents: read - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} - cancel-in-progress: true - -jobs: - test: - runs-on: ubuntu-latest - name: Test on LaTeX image - container: - image: ghcr.io/sphinx-doc/sphinx-ci - env: - DO_EPUBCHECK: "1" - steps: - - name: Alias python3 to python - run: ln -s /usr/bin/python3 /usr/bin/python - - uses: actions/checkout@v3 - - name: Check Python version - run: python --version - - name: Install dependencies - run: | - python -m pip install --upgrade pip - python -m pip install .[test] - - name: Test with pytest - run: > - python - -X dev - -X warn_default_encoding - -m pytest - -vv - --color yes - --durations 25 diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml deleted file mode 100644 index 963906e822c..00000000000 --- a/.github/workflows/lint.yml +++ /dev/null @@ -1,126 +0,0 @@ -name: Lint source code - -on: [push, pull_request] - -permissions: - contents: read - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} - cancel-in-progress: true - -env: - FORCE_COLOR: "1" - -jobs: - ruff: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3 - - name: Install pip - run: python -m pip install --upgrade pip - - - name: Install known good Ruff - run: python -m pip install ruff==0.0.261 - - name: Lint with known good Ruff - run: ruff . --diff --format github - - - name: Install latest Ruff - run: python -m pip install --upgrade ruff - - name: Lint with Ruff - continue-on-error: true - run: ruff . --diff --format github - - flake8: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3 - - name: Install dependencies - run: | - python -m pip install --upgrade pip - python -m pip install --upgrade "flake8>=3.5.0" "flake8-simplify" - - name: Lint with flake8 - run: flake8 . - - isort: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3 - - name: Install dependencies - run: | - python -m pip install --upgrade pip - python -m pip install --upgrade isort - - name: Lint with isort - run: isort --check-only --diff . - - mypy: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3 - - name: Install dependencies - run: | - python -m pip install --upgrade pip - python -m pip install --upgrade "mypy>=0.990" docutils-stubs types-requests - - name: Type check with mypy - run: mypy sphinx/ - - docs-lint: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3 - - name: Install dependencies - run: | - python -m pip install --upgrade pip - python -m pip install --upgrade sphinx-lint - - name: Lint documentation with sphinx-lint - run: > - sphinx-lint - --enable line-too-long - --max-line-length 85 - CHANGES - CONTRIBUTING.rst - README.rst - doc/ - - twine: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3 - - name: Install dependencies - run: | - python -m pip install --upgrade pip - python -m pip install --upgrade twine build - - name: Lint with twine - run: | - python -m build . - twine check dist/* diff --git a/.github/workflows/lock.yml b/.github/workflows/lock.yml deleted file mode 100644 index d86f4b36282..00000000000 --- a/.github/workflows/lock.yml +++ /dev/null @@ -1,20 +0,0 @@ -name: Lock old threads - -on: - schedule: - - cron: "0 0 * * *" - -permissions: - issues: write - pull-requests: write - -jobs: - action: - if: github.repository_owner == 'sphinx-doc' - runs-on: ubuntu-latest - steps: - - uses: dessant/lock-threads@v3 - with: - github-token: ${{ github.token }} - issue-inactive-days: "30" - pr-inactive-days: "30" diff --git a/.github/workflows/nodejs.yml b/.github/workflows/nodejs.yml deleted file mode 100644 index 184d33211a3..00000000000 --- a/.github/workflows/nodejs.yml +++ /dev/null @@ -1,27 +0,0 @@ -name: CI (node.js) - -on: [push, pull_request] - -permissions: - contents: read - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} - cancel-in-progress: true - -jobs: - build: - runs-on: ubuntu-latest - env: - node-version: "16" - - steps: - - uses: actions/checkout@v3 - - name: Use Node.js ${{ env.node-version }} - uses: actions/setup-node@v3 - with: - node-version: ${{ env.node-version }} - cache: "npm" - - run: npm install - - name: Run headless test - run: xvfb-run -a npm test diff --git a/.github/workflows/transifex.yml b/.github/workflows/transifex.yml deleted file mode 100644 index da99f8c0049..00000000000 --- a/.github/workflows/transifex.yml +++ /dev/null @@ -1,75 +0,0 @@ -name: Synchronise translations - -on: - schedule: - # 22:38 GMT, every Sunday. Chosen to be a random time. - - cron: "38 22 * * SUN" - workflow_dispatch: - -permissions: - contents: read - -jobs: - push: - if: github.repository_owner == 'sphinx-doc' - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3 - - name: Install transifex client - run: | - mkdir -p /tmp/tx_cli && cd $_ - curl -o- https://raw.githubusercontent.com/transifex/cli/master/install.sh | bash - shell: bash - - name: Install dependencies - run: pip install --upgrade babel jinja2 - - name: Extract translations from source code - run: python utils/babel_runner.py extract - - name: Push translations to transifex.com - run: | - cd sphinx/locale - /tmp/tx_cli/tx push --source --use-git-timestamps --workers 10 - env: - TX_TOKEN: ${{ secrets.TX_TOKEN }} - - pull: - permissions: - contents: write # for peter-evans/create-pull-request to create branch - pull-requests: write # for peter-evans/create-pull-request to create a PR - if: github.repository_owner == 'sphinx-doc' - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3 - - name: Install transifex client - run: | - mkdir -p /tmp/tx_cli && cd $_ - curl -o- https://raw.githubusercontent.com/transifex/cli/master/install.sh | bash - shell: bash - - name: Install dependencies - run: pip install --upgrade babel jinja2 - - name: Extract translations from source code - run: python utils/babel_runner.py extract - - name: Pull translations from transifex.com - run: | - cd sphinx/locale - /tmp/tx_cli/tx pull --translations --all --force --use-git-timestamps --workers 10 - env: - TX_TOKEN: ${{ secrets.TX_TOKEN }} - - name: Compile message catalogs - run: python utils/babel_runner.py compile - - name: Create Pull Request - uses: peter-evans/create-pull-request@v4 - with: - commit-message: "[internationalisation] Update translations" - branch: bot/pull-translations - title: "[bot]: Update message catalogues" - labels: "internals:internationalisation" diff --git a/tests/test_build_linkcheck.py b/tests/test_build_linkcheck.py index 260cf2c4214..a48cebb1e2c 100644 --- a/tests/test_build_linkcheck.py +++ b/tests/test_build_linkcheck.py @@ -2,13 +2,13 @@ from __future__ import annotations -import base64 import http.server import json import re import textwrap import time import wsgiref.handlers +from base64 import b64encode from datetime import datetime from os import path from queue import Queue @@ -27,24 +27,39 @@ class DefaultsHandler(http.server.BaseHTTPRequestHandler): + protocol_version = "HTTP/1.1" + def do_HEAD(self): if self.path[1:].rstrip() == "": self.send_response(200, "OK") + self.send_header("Content-Length", "0") self.end_headers() elif self.path[1:].rstrip() == "anchor.html": self.send_response(200, "OK") self.end_headers() else: self.send_response(404, "Not Found") + self.send_header("Content-Length", "0") self.end_headers() def do_GET(self): - self.do_HEAD() if self.path[1:].rstrip() == "": - self.wfile.write(b"ok\n\n") + content = b"ok\n\n" elif self.path[1:].rstrip() == "anchor.html": doc = '
' - self.wfile.write(doc.encode('utf-8')) + content = doc.encode("utf-8") + else: + content = b"" + + if content: + self.send_response(200, "OK") + self.send_header("Content-Length", str(len(content))) + self.end_headers() + self.wfile.write(content) + else: + self.send_response(404, "Not Found") + self.send_header("Content-Length", "0") + self.end_headers() @pytest.mark.sphinx('linkcheck', testroot='linkcheck', freshenv=True) @@ -181,6 +196,8 @@ def test_anchors_ignored(app): @pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-anchor', freshenv=True) def test_raises_for_invalid_status(app): class InternalServerErrorHandler(http.server.BaseHTTPRequestHandler): + protocol_version = "HTTP/1.1" + def do_GET(self): self.send_error(500, "Internal Server Error") @@ -194,16 +211,44 @@ def do_GET(self): ) -def capture_headers_handler(records): - class HeadersDumperHandler(http.server.BaseHTTPRequestHandler): +def custom_handler(valid_credentials=None, success_criteria=lambda _: True): + """ + Returns an HTTP request handler that authenticates the client and then determines + an appropriate HTTP response code, based on caller-provided credentials and optional + success criteria, respectively. + """ + protocol_version = "HTTP/1.1" + + expected_token = None + if valid_credentials: + assert len(valid_credentials) == 2, "expected a pair of strings as credentials" + expected_token = b64encode(":".join(valid_credentials).encode()).decode("utf-8") + del valid_credentials + + class CustomHandler(http.server.BaseHTTPRequestHandler): + def authenticated(method): + def method_if_authenticated(self): + if expected_token is None: + return method(self) + elif self.headers["Authorization"] == f"Basic {expected_token}": + return method(self) + else: + self.send_response(403, "Forbidden") + self.end_headers() + + return method_if_authenticated + + @authenticated def do_HEAD(self): self.do_GET() + @authenticated def do_GET(self): - self.send_response(200, "OK") + response = (200, "OK") if success_criteria(self) else (400, "Bad Request") + self.send_response(*response) self.end_headers() - records.append(self.headers.as_string()) - return HeadersDumperHandler + + return CustomHandler @pytest.mark.sphinx( @@ -214,25 +259,26 @@ def do_GET(self): (r'.*local.*', ('user2', 'hunter2')), ]}) def test_auth_header_uses_first_match(app): - records = [] - with http_server(capture_headers_handler(records)): + with http_server(custom_handler(valid_credentials=("user1", "password"))): app.build() - stdout = "\n".join(records) - encoded_auth = base64.b64encode(b'user1:password').decode('ascii') - assert f"Authorization: Basic {encoded_auth}\n" in stdout + with open(app.outdir / "output.json", encoding="utf-8") as fp: + content = json.load(fp) + + assert content["status"] == "working" @pytest.mark.sphinx( 'linkcheck', testroot='linkcheck-localserver', freshenv=True, confoverrides={'linkcheck_auth': [(r'^$', ('user1', 'password'))]}) def test_auth_header_no_match(app): - records = [] - with http_server(capture_headers_handler(records)): + with http_server(custom_handler(valid_credentials=("user1", "password"))): app.build() - stdout = "\n".join(records) - assert "Authorization" not in stdout + with open(app.outdir / "output.json", encoding="utf-8") as fp: + content = json.load(fp) + + assert content["status"] == "broken" @pytest.mark.sphinx( @@ -246,14 +292,20 @@ def test_auth_header_no_match(app): }, }}) def test_linkcheck_request_headers(app): - records = [] - with http_server(capture_headers_handler(records)): + def check_headers(self): + if "X-Secret" in self.headers: + return False + if self.headers["Accept"] != "text/html": + return False + return True + + with http_server(custom_handler(success_criteria=check_headers)): app.build() - stdout = "\n".join(records) - assert "Accept: text/html\n" in stdout - assert "X-Secret" not in stdout - assert "sesami" not in stdout + with open(app.outdir / "output.json", encoding="utf-8") as fp: + content = json.load(fp) + + assert content["status"] == "working" @pytest.mark.sphinx( @@ -263,14 +315,20 @@ def test_linkcheck_request_headers(app): "*": {"X-Secret": "open sesami"}, }}) def test_linkcheck_request_headers_no_slash(app): - records = [] - with http_server(capture_headers_handler(records)): + def check_headers(self): + if "X-Secret" in self.headers: + return False + if self.headers["Accept"] != "application/json": + return False + return True + + with http_server(custom_handler(success_criteria=check_headers)): app.build() - stdout = "\n".join(records) - assert "Accept: application/json\n" in stdout - assert "X-Secret" not in stdout - assert "sesami" not in stdout + with open(app.outdir / "output.json", encoding="utf-8") as fp: + content = json.load(fp) + + assert content["status"] == "working" @pytest.mark.sphinx( @@ -280,22 +338,32 @@ def test_linkcheck_request_headers_no_slash(app): "*": {"X-Secret": "open sesami"}, }}) def test_linkcheck_request_headers_default(app): - records = [] - with http_server(capture_headers_handler(records)): + def check_headers(self): + if self.headers["X-Secret"] != "open sesami": + return False + if self.headers["Accept"] == "application/json": + return False + return True + + with http_server(custom_handler(success_criteria=check_headers)): app.build() - stdout = "\n".join(records) - assert "Accepts: application/json\n" not in stdout - assert "X-Secret: open sesami\n" in stdout + with open(app.outdir / "output.json", encoding="utf-8") as fp: + content = json.load(fp) + + assert content["status"] == "working" def make_redirect_handler(*, support_head): class RedirectOnceHandler(http.server.BaseHTTPRequestHandler): + protocol_version = "HTTP/1.1" + def do_HEAD(self): if support_head: self.do_GET() else: self.send_response(405, "Method Not Allowed") + self.send_header("Content-Length", "0") self.end_headers() def do_GET(self): @@ -304,6 +372,7 @@ def do_GET(self): else: self.send_response(302, "Found") self.send_header("Location", "http://localhost:7777/?redirected=1") + self.send_header("Content-Length", "0") self.end_headers() def log_date_time_string(self): @@ -381,13 +450,19 @@ def test_linkcheck_allowed_redirects(app, warning): class OKHandler(http.server.BaseHTTPRequestHandler): + protocol_version = "HTTP/1.1" + def do_HEAD(self): self.send_response(200, "OK") + self.send_header("Content-Length", "0") self.end_headers() def do_GET(self): - self.do_HEAD() - self.wfile.write(b"ok\n") + content = b"ok\n" + self.send_response(200, "OK") + self.send_header("Content-Length", str(len(content))) + self.end_headers() + self.wfile.write(content) @pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-https', freshenv=True) @@ -492,15 +567,21 @@ def test_connect_to_selfsigned_nonexistent_cert_file(app): class InfiniteRedirectOnHeadHandler(http.server.BaseHTTPRequestHandler): + protocol_version = "HTTP/1.1" + def do_HEAD(self): self.send_response(302, "Found") self.send_header("Location", "http://localhost:7777/") + self.send_header("Content-Length", "0") self.end_headers() def do_GET(self): + content = b"ok\n" self.send_response(200, "OK") + self.send_header("Content-Length", str(len(content))) self.end_headers() - self.wfile.write(b"ok\n") + self.wfile.write(content) + self.close_connection = True # we don't expect the client to read this response body @pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver', freshenv=True) @@ -526,11 +607,14 @@ def test_TooManyRedirects_on_HEAD(app, monkeypatch): def make_retry_after_handler(responses): class RetryAfterHandler(http.server.BaseHTTPRequestHandler): + protocol_version = "HTTP/1.1" + def do_HEAD(self): status, retry_after = responses.pop(0) self.send_response(status) if retry_after: self.send_header('Retry-After', retry_after) + self.send_header("Content-Length", "0") self.end_headers() def log_date_time_string(self): @@ -677,11 +761,14 @@ def test_limit_rate_bails_out_after_waiting_max_time(app): class ConnectionResetHandler(http.server.BaseHTTPRequestHandler): + protocol_version = "HTTP/1.1" + def do_HEAD(self): - self.connection.close() + self.close_connection = True def do_GET(self): self.send_response(200, "OK") + self.send_header("Content-Length", "0") self.end_headers() diff --git a/tests/utils.py b/tests/utils.py index 429bbd2b2e2..32636b7936c 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -19,7 +19,7 @@ class HttpServerThread(threading.Thread): def __init__(self, handler, *args, **kwargs): super().__init__(*args, **kwargs) - self.server = http.server.HTTPServer(("localhost", 7777), handler) + self.server = http.server.ThreadingHTTPServer(("localhost", 7777), handler) def run(self): self.server.serve_forever(poll_interval=0.001)