Skip to content

Don't run CI steps that require secrets on PRs from fork branch to fork main #9962

Don't run CI steps that require secrets on PRs from fork branch to fork main

Don't run CI steps that require secrets on PRs from fork branch to fork main #9962

Workflow file for this run

name: CI
on:
push:
branches:
- main
tags:
- "**"
pull_request: {}
env:
COLUMNS: 150
UV_PYTHON: 3.12
UV_FROZEN: "1"
permissions:
contents: read
jobs:
lint:
name: lint on ${{ matrix.python-version }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
# Typecheck both Python 3.10 and 3.13. We've had issues due to not checking against both.
python-version: ["3.10", "3.13"]
env:
PYRIGHT_PYTHON: ${{ matrix.python-version }}
steps:
- uses: actions/checkout@v4
- uses: astral-sh/setup-uv@v5
with:
python-version: ${{ matrix.python-version }}
enable-cache: true
cache-suffix: lint
- name: Install dependencies
run: uv sync --all-extras --all-packages --group lint
- uses: pre-commit/[email protected]
with:
extra_args: --all-files --verbose
env:
SKIP: no-commit-to-branch
- run: uv build --all-packages
- run: ls -lh dist/
# mypy and lint are a bit slower than other jobs, so we run them separately
mypy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: astral-sh/setup-uv@v5
with:
enable-cache: true
cache-suffix: mypy
- name: Install dependencies
run: uv sync --no-dev --group lint
- run: make typecheck-mypy
docs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: astral-sh/setup-uv@v5
with:
enable-cache: true
cache-suffix: docs
- run: uv sync --group docs
# always build docs to check it works without insiders packages
- run: make docs
- run: make docs-insiders
if: (github.event.pull_request.head.repo.full_name == github.repository || github.ref == 'refs/heads/main') && github.repository == 'pydantic/pydantic-ai'
env:
PPPR_TOKEN: ${{ secrets.PPPR_TOKEN }}
- run: tree -sh site
- uses: actions/setup-node@v4
- run: npm install
working-directory: docs-site
- run: npm run typecheck
working-directory: docs-site
- name: Store docs
uses: actions/upload-artifact@v4
with:
name: site
path: site
# check all docs images are tinified, You'll need an API key from https://tinify.com/ to fix this if it fails
- run: uvx tinicly docs --check
test-live:
runs-on: ubuntu-latest
timeout-minutes: 5
if: (github.event.pull_request.head.repo.full_name == github.repository || github.event_name == 'push') && github.repository == 'pydantic/pydantic-ai'
steps:
- uses: actions/checkout@v4
- uses: astral-sh/setup-uv@v5
with:
enable-cache: true
cache-suffix: live
- uses: pydantic/ollama-action@v3
with:
model: qwen2:0.5b
- run: uv sync --only-dev
- run: >
uv run
--package pydantic-ai-slim
--extra openai
--extra vertexai
--extra google
--extra groq
--extra anthropic
--extra mistral
--extra cohere
pytest tests/test_live.py -v
--durations=100
env:
PYDANTIC_AI_LIVE_TEST_DANGEROUS: "CHARGE-ME!"
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
GEMINI_API_KEY: ${{ secrets.GEMINI_API_KEY }}
GOOGLE_SERVICE_ACCOUNT_CONTENT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT_CONTENT }}
GROQ_API_KEY: ${{ secrets.GROQ_API_KEY }}
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
MISTRAL_API_KEY: ${{ secrets.MISTRAL_API_KEY }}
CO_API_KEY: ${{ secrets.COHERE_API_KEY }}
test:
name: test on ${{ matrix.python-version }} (${{ matrix.install.name }})
runs-on: ubuntu-latest
timeout-minutes: 20
strategy:
fail-fast: false
matrix:
python-version: ["3.10", "3.11", "3.12", "3.13"]
install:
- name: pydantic-ai-slim
command: "--package pydantic-ai-slim"
- name: standard
command: ""
- name: all-extras
command: "--all-extras"
env:
CI: true
COVERAGE_PROCESS_START: ./pyproject.toml
steps:
- uses: actions/checkout@v4
- uses: astral-sh/setup-uv@v5
with:
python-version: ${{ matrix.python-version }}
enable-cache: true
cache-suffix: ${{ matrix.install.name }}
- uses: denoland/setup-deno@v2
with:
deno-version: v2.x
- run: mkdir .coverage
- run: uv sync --only-dev
- run: uv run mcp-run-python example --deps=numpy
- name: cache HuggingFace models
uses: actions/cache@v4
with:
path: ~/.cache/huggingface
key: hf-${{ runner.os }}-${{ hashFiles('**/uv.lock') }}
restore-keys: |
hf-${{ runner.os }}-
- run: uv run ${{ matrix.install.command }} coverage run -m pytest --durations=100 -n auto --dist=loadgroup
env:
COVERAGE_FILE: .coverage/.coverage.${{ matrix.python-version }}-${{ matrix.install.name }}
- name: store coverage files
uses: actions/upload-artifact@v4
with:
name: coverage-${{ matrix.python-version }}-${{ matrix.install.name }}
path: .coverage
include-hidden-files: true
test-lowest-versions:
name: test on ${{ matrix.python-version }} (lowest-versions)
runs-on: ubuntu-latest
timeout-minutes: 20
strategy:
fail-fast: false
matrix:
python-version: ["3.10", "3.11", "3.12", "3.13"]
env:
CI: true
COVERAGE_PROCESS_START: ./pyproject.toml
steps:
- uses: actions/checkout@v4
- uses: astral-sh/setup-uv@v5
with:
python-version: ${{ matrix.python-version }}
enable-cache: true
cache-suffix: lowest-versions
- uses: denoland/setup-deno@v2
with:
deno-version: v2.x
- run: mkdir .coverage
- run: uv sync --group dev
- run: uv run mcp-run-python example --deps=numpy
- name: cache HuggingFace models
uses: actions/cache@v4
with:
path: ~/.cache/huggingface
key: hf-${{ runner.os }}-${{ hashFiles('**/uv.lock') }}
restore-keys: |
hf-${{ runner.os }}-
- run: unset UV_FROZEN
- run: uv run --all-extras --resolution lowest-direct coverage run -m pytest --durations=100 -n auto --dist=loadgroup
env:
COVERAGE_FILE: .coverage/.coverage.${{matrix.python-version}}-lowest-versions
- name: store coverage files
uses: actions/upload-artifact@v4
with:
name: coverage-${{ matrix.python-version }}-lowest-versions
path: .coverage
include-hidden-files: true
test-examples:
name: test examples on ${{ matrix.python-version }}
runs-on: ubuntu-latest
timeout-minutes: 10
strategy:
fail-fast: false
matrix:
python-version: ["3.11", "3.12", "3.13"]
env:
CI: true
steps:
- uses: actions/checkout@v4
- uses: astral-sh/setup-uv@v5
with:
python-version: ${{ matrix.python-version }}
enable-cache: true
cache-suffix: examples
- name: cache HuggingFace models
uses: actions/cache@v4
with:
path: ~/.cache/huggingface
key: hf-${{ runner.os }}-${{ hashFiles('**/uv.lock') }}
restore-keys: |
hf-${{ runner.os }}-
- run: uv run --all-extras python tests/import_examples.py
coverage:
runs-on: ubuntu-latest
needs: [test, test-lowest-versions]
steps:
- uses: actions/checkout@v4
with:
# needed for diff-cover
fetch-depth: 0
- name: get coverage files
uses: actions/download-artifact@v4
with:
merge-multiple: true
path: .coverage
- uses: astral-sh/setup-uv@v5
with:
enable-cache: true
cache-suffix: dev
- run: uv sync --group dev
- run: uv run coverage combine
- run: uv run coverage report
- run: uv run strict-no-cover
env:
COVERAGE_FILE: .coverage/.coverage
- run: uv run coverage html --show-contexts --title "Pydantic AI coverage for ${{ github.sha }}"
- uses: actions/upload-artifact@v4
with:
name: coverage-html
path: htmlcov
include-hidden-files: true
# https://github.com/marketplace/actions/alls-green#why used for branch protection checks
check:
if: always()
needs:
- lint
- mypy
- docs
- test-live
- test
- test-lowest-versions
- test-examples
- coverage
runs-on: ubuntu-latest
steps:
- name: Decide whether the needed jobs succeeded or failed
uses: re-actors/alls-green@release/v1
with:
jobs: ${{ toJSON(needs) }}
allowed-skips: test-live
deploy-docs:
needs: [check]
if: success() && startsWith(github.ref, 'refs/tags/')
runs-on: ubuntu-latest
environment:
name: deploy-docs
url: https://ai.pydantic.dev
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
- run: npm install
working-directory: docs-site
- uses: astral-sh/setup-uv@v5
with:
enable-cache: true
cache-suffix: docs-upload
- uses: actions/download-artifact@v4
with:
name: site
path: site
- uses: cloudflare/wrangler-action@v3
with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
workingDirectory: docs-site
command: >
deploy
--var GIT_COMMIT_SHA:${{ github.sha }}
--var GIT_BRANCH:main
- run: uv sync --group docs-upload
- run: uv run python docs/.hooks/algolia.py upload
env:
ALGOLIA_WRITE_API_KEY: ${{ secrets.ALGOLIA_WRITE_API_KEY }}
deploy-docs-preview:
needs: [check]
if: success() && github.ref == 'refs/heads/main'
runs-on: ubuntu-latest
environment:
name: deploy-docs-preview
permissions:
deployments: write
statuses: write
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
- run: npm install
working-directory: docs-site
- uses: astral-sh/setup-uv@v5
with:
enable-cache: true
cache-suffix: deploy-docs-preview
- uses: actions/download-artifact@v4
with:
name: site
path: site
- uses: cloudflare/wrangler-action@v3
id: deploy
with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
environment: previews
workingDirectory: docs-site
command: >
deploy
--var GIT_COMMIT_SHA:${{ github.sha }}
--var GIT_BRANCH:main
- name: Set preview URL
run: uv run --no-project --with httpx .github/set_docs_main_preview_url.py
env:
DEPLOY_OUTPUT: ${{ steps.deploy.outputs.command-output }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
REPOSITORY: ${{ github.repository }}
REF: ${{ github.sha }}
# TODO(Marcelo): We need to split this into two jobs: `build` and `release`.
release:
needs: [check]
if: success() && startsWith(github.ref, 'refs/tags/')
runs-on: ubuntu-latest
environment:
name: release
url: https://pypi.org/project/pydantic-ai/${{ steps.inspect_package.outputs.version }}
permissions:
id-token: write
outputs:
package-version: ${{ steps.inspect_package.outputs.version }}
steps:
- uses: actions/checkout@v4
- uses: astral-sh/setup-uv@v5
with:
enable-cache: true
cache-suffix: release
- run: uv build --all-packages
- name: Inspect package version
id: inspect_package
run: |
uv tool install --with uv-dynamic-versioning hatchling
version=$(uvx hatchling version)
echo "version=$version" >> "$GITHUB_OUTPUT"
- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
skip-existing: true
send-tweet:
name: Send tweet
needs: [release]
if: needs.release.result == 'success'
runs-on: ubuntu-latest
steps:
- uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Install dependencies
run: pip install tweepy==4.14.0
- name: Send tweet
shell: python
run: |
import os
import tweepy
client = tweepy.Client(
access_token=os.getenv("TWITTER_ACCESS_TOKEN"),
access_token_secret=os.getenv("TWITTER_ACCESS_TOKEN_SECRET"),
consumer_key=os.getenv("TWITTER_CONSUMER_KEY"),
consumer_secret=os.getenv("TWITTER_CONSUMER_SECRET"),
)
version = os.getenv("VERSION").strip('"')
tweet = os.getenv("TWEET").format(version=version)
client.create_tweet(text=tweet)
env:
VERSION: ${{ needs.release.outputs.package-version }}
TWEET: |
Pydantic AI version {version} is out! 🎉
https://github.com/pydantic/pydantic-ai/releases/tag/v{version}
TWITTER_CONSUMER_KEY: ${{ secrets.TWITTER_CONSUMER_KEY }}
TWITTER_CONSUMER_SECRET: ${{ secrets.TWITTER_CONSUMER_SECRET }}
TWITTER_ACCESS_TOKEN: ${{ secrets.TWITTER_ACCESS_TOKEN }}
TWITTER_ACCESS_TOKEN_SECRET: ${{ secrets.TWITTER_ACCESS_TOKEN_SECRET }}