|
| 1 | +import re |
| 2 | + |
| 3 | +import httpx |
| 4 | +import pytest |
| 5 | +from pytest_mock import MockerFixture |
| 6 | + |
| 7 | +from pydantic_ai.exceptions import UserError |
| 8 | +from pydantic_ai.profiles._json_schema import InlineDefsJsonSchemaTransformer |
| 9 | +from pydantic_ai.profiles.cohere import cohere_model_profile |
| 10 | +from pydantic_ai.profiles.deepseek import deepseek_model_profile |
| 11 | +from pydantic_ai.profiles.grok import grok_model_profile |
| 12 | +from pydantic_ai.profiles.meta import meta_model_profile |
| 13 | +from pydantic_ai.profiles.mistral import mistral_model_profile |
| 14 | +from pydantic_ai.profiles.openai import OpenAIJsonSchemaTransformer, openai_model_profile |
| 15 | + |
| 16 | +from ..conftest import TestEnv, try_import |
| 17 | + |
| 18 | +with try_import() as imports_successful: |
| 19 | + import openai |
| 20 | + |
| 21 | + from pydantic_ai.providers.github import GitHubProvider |
| 22 | + |
| 23 | +pytestmark = pytest.mark.skipif(not imports_successful(), reason='openai not installed') |
| 24 | + |
| 25 | + |
| 26 | +def test_github_provider(): |
| 27 | + provider = GitHubProvider(api_key='ghp_test_token') |
| 28 | + assert provider.name == 'github' |
| 29 | + assert provider.base_url == 'https://models.github.ai/inference' |
| 30 | + assert isinstance(provider.client, openai.AsyncOpenAI) |
| 31 | + assert provider.client.api_key == 'ghp_test_token' |
| 32 | + |
| 33 | + |
| 34 | +def test_github_provider_need_api_key(env: TestEnv) -> None: |
| 35 | + env.remove('GITHUB_API_KEY') |
| 36 | + with pytest.raises( |
| 37 | + UserError, |
| 38 | + match=re.escape( |
| 39 | + 'Set the `GITHUB_API_KEY` environment variable or pass it via `GitHubProvider(api_key=...)`' |
| 40 | + ' to use the GitHub Models provider.' |
| 41 | + ), |
| 42 | + ): |
| 43 | + GitHubProvider() |
| 44 | + |
| 45 | + |
| 46 | +def test_github_provider_pass_http_client() -> None: |
| 47 | + http_client = httpx.AsyncClient() |
| 48 | + provider = GitHubProvider(http_client=http_client, api_key='ghp_test_token') |
| 49 | + assert provider.client._client == http_client # type: ignore[reportPrivateUsage] |
| 50 | + |
| 51 | + |
| 52 | +def test_github_pass_openai_client() -> None: |
| 53 | + openai_client = openai.AsyncOpenAI(api_key='ghp_test_token') |
| 54 | + provider = GitHubProvider(openai_client=openai_client) |
| 55 | + assert provider.client == openai_client |
| 56 | + |
| 57 | + |
| 58 | +def test_github_provider_model_profile(mocker: MockerFixture): |
| 59 | + provider = GitHubProvider(api_key='ghp_test_token') |
| 60 | + |
| 61 | + ns = 'pydantic_ai.providers.github' |
| 62 | + meta_model_profile_mock = mocker.patch(f'{ns}.meta_model_profile', wraps=meta_model_profile) |
| 63 | + deepseek_model_profile_mock = mocker.patch(f'{ns}.deepseek_model_profile', wraps=deepseek_model_profile) |
| 64 | + mistral_model_profile_mock = mocker.patch(f'{ns}.mistral_model_profile', wraps=mistral_model_profile) |
| 65 | + cohere_model_profile_mock = mocker.patch(f'{ns}.cohere_model_profile', wraps=cohere_model_profile) |
| 66 | + grok_model_profile_mock = mocker.patch(f'{ns}.grok_model_profile', wraps=grok_model_profile) |
| 67 | + openai_model_profile_mock = mocker.patch(f'{ns}.openai_model_profile', wraps=openai_model_profile) |
| 68 | + |
| 69 | + meta_profile = provider.model_profile('meta/Llama-3.2-11B-Vision-Instruct') |
| 70 | + meta_model_profile_mock.assert_called_with('llama-3.2-11b-vision-instruct') |
| 71 | + assert meta_profile is not None |
| 72 | + assert meta_profile.json_schema_transformer == InlineDefsJsonSchemaTransformer |
| 73 | + |
| 74 | + meta_profile = provider.model_profile('meta/Llama-3.1-405B-Instruct') |
| 75 | + meta_model_profile_mock.assert_called_with('llama-3.1-405b-instruct') |
| 76 | + assert meta_profile is not None |
| 77 | + assert meta_profile.json_schema_transformer == InlineDefsJsonSchemaTransformer |
| 78 | + |
| 79 | + deepseek_profile = provider.model_profile('deepseek/deepseek-coder') |
| 80 | + deepseek_model_profile_mock.assert_called_with('deepseek-coder') |
| 81 | + assert deepseek_profile is not None |
| 82 | + assert deepseek_profile.json_schema_transformer == OpenAIJsonSchemaTransformer |
| 83 | + |
| 84 | + mistral_profile = provider.model_profile('mistral-ai/mixtral-8x7b-instruct') |
| 85 | + mistral_model_profile_mock.assert_called_with('mixtral-8x7b-instruct') |
| 86 | + assert mistral_profile is not None |
| 87 | + assert mistral_profile.json_schema_transformer == OpenAIJsonSchemaTransformer |
| 88 | + |
| 89 | + cohere_profile = provider.model_profile('cohere/command-r-plus') |
| 90 | + cohere_model_profile_mock.assert_called_with('command-r-plus') |
| 91 | + assert cohere_profile is not None |
| 92 | + assert cohere_profile.json_schema_transformer == OpenAIJsonSchemaTransformer |
| 93 | + |
| 94 | + grok_profile = provider.model_profile('xai/grok-3-mini') |
| 95 | + grok_model_profile_mock.assert_called_with('grok-3-mini') |
| 96 | + assert grok_profile is not None |
| 97 | + assert grok_profile.json_schema_transformer == OpenAIJsonSchemaTransformer |
| 98 | + |
| 99 | + microsoft_profile = provider.model_profile('microsoft/Phi-3.5-mini-instruct') |
| 100 | + openai_model_profile_mock.assert_called_with('phi-3.5-mini-instruct') |
| 101 | + assert microsoft_profile is not None |
| 102 | + assert microsoft_profile.json_schema_transformer == OpenAIJsonSchemaTransformer |
| 103 | + |
| 104 | + unknown_profile = provider.model_profile('some-unknown-model') |
| 105 | + openai_model_profile_mock.assert_called_with('some-unknown-model') |
| 106 | + assert unknown_profile is not None |
| 107 | + assert unknown_profile.json_schema_transformer == OpenAIJsonSchemaTransformer |
| 108 | + |
| 109 | + unknown_profile_with_prefix = provider.model_profile('unknown-publisher/some-unknown-model') |
| 110 | + openai_model_profile_mock.assert_called_with('some-unknown-model') |
| 111 | + assert unknown_profile_with_prefix is not None |
| 112 | + assert unknown_profile_with_prefix.json_schema_transformer == OpenAIJsonSchemaTransformer |
0 commit comments