Skip to content

Commit 3caed16

Browse files
committed
feat: add comprehensive Codex API testing infrastructure
Add complete test coverage for OpenAI Codex integration including API endpoints, authentication flows, and proxy service functionality. **Test Infrastructure Added:** - OpenAI Codex API mocking fixtures with httpx_mock for external HTTP calls - Codex-specific test clients with credential mocking - Response format validation helpers for Codex API compliance - Test data structures for various Codex request scenarios **API Endpoint Tests:** - `/codex/responses` endpoint with success and error scenarios - `/codex/{session_id}/responses` session-based endpoint testing - Streaming response handling and SSE format compliance - Request validation for missing/malformed input handling **Authentication Tests:** - OpenAI OAuth PKCE flow testing with token management - Credential storage and retrieval file operations - JWT token validation and expiration handling - Error scenarios for authentication failures **Proxy Service Tests:** - Request transformation between Codex and OpenAI formats - Streaming to non-streaming response conversion logic - Session ID handling and forwarding - Error propagation and HTTP status code mapping **Coverage:** 1031 lines across 7 test files providing comprehensive coverage of Codex functionality including edge cases, error conditions, and integration points.
1 parent f8991df commit 3caed16

File tree

7 files changed

+1031
-0
lines changed

7 files changed

+1031
-0
lines changed

tests/conftest.py

Lines changed: 101 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,7 @@
4545
"tests.fixtures.claude_sdk.internal_mocks",
4646
"tests.fixtures.claude_sdk.client_mocks",
4747
"tests.fixtures.external_apis.anthropic_api",
48+
"tests.fixtures.external_apis.openai_codex_api",
4849
]
4950

5051

@@ -964,6 +965,106 @@ def client(app: FastAPI) -> TestClient:
964965
return TestClient(app)
965966

966967

968+
# Codex-specific fixtures following Claude patterns
969+
970+
971+
@pytest.fixture
972+
def mock_openai_credentials(isolated_environment: Path) -> dict[str, Any]:
973+
"""Mock OpenAI credentials for testing."""
974+
import time
975+
from datetime import UTC, datetime
976+
977+
# Set expiration to 1 hour from now (future)
978+
future_timestamp = int(time.time()) + 3600
979+
980+
return {
981+
"access_token": "test-openai-access-token-12345",
982+
"refresh_token": "test-openai-refresh-token-67890",
983+
"expires_at": datetime.fromtimestamp(future_timestamp, UTC),
984+
"account_id": "test-account-id",
985+
}
986+
987+
988+
@pytest.fixture
989+
def client_with_mock_codex(
990+
test_settings: Settings,
991+
mock_openai_credentials: dict[str, Any],
992+
fastapi_app_factory: "FastAPIAppFactory",
993+
) -> Generator[TestClient, None, None]:
994+
"""Test client with mocked Codex service (no auth)."""
995+
app = fastapi_app_factory.create_app(
996+
settings=test_settings,
997+
auth_enabled=False,
998+
)
999+
1000+
# Mock OpenAI credentials
1001+
from unittest.mock import patch
1002+
1003+
with patch("ccproxy.auth.openai.OpenAITokenManager.load_credentials") as mock_load:
1004+
from ccproxy.auth.openai import OpenAICredentials
1005+
1006+
mock_load.return_value = OpenAICredentials(**mock_openai_credentials)
1007+
1008+
yield TestClient(app)
1009+
1010+
1011+
@pytest.fixture
1012+
def client_with_mock_codex_streaming(
1013+
test_settings: Settings,
1014+
mock_openai_credentials: dict[str, Any],
1015+
fastapi_app_factory: "FastAPIAppFactory",
1016+
) -> Generator[TestClient, None, None]:
1017+
"""Test client with mocked Codex streaming service (no auth)."""
1018+
app = fastapi_app_factory.create_app(
1019+
settings=test_settings,
1020+
auth_enabled=False,
1021+
)
1022+
1023+
# Mock OpenAI credentials
1024+
from unittest.mock import patch
1025+
1026+
with patch("ccproxy.auth.openai.OpenAITokenManager.load_credentials") as mock_load:
1027+
from ccproxy.auth.openai import OpenAICredentials
1028+
1029+
mock_load.return_value = OpenAICredentials(**mock_openai_credentials)
1030+
1031+
yield TestClient(app)
1032+
1033+
1034+
@pytest.fixture
1035+
def codex_responses() -> dict[str, Any]:
1036+
"""Load standard Codex API responses for testing.
1037+
1038+
Returns a dictionary of mock Codex API responses.
1039+
"""
1040+
return {
1041+
"standard_completion": {
1042+
"id": "codex_01234567890",
1043+
"object": "codex.response",
1044+
"created": 1234567890,
1045+
"model": "gpt-5",
1046+
"choices": [
1047+
{
1048+
"index": 0,
1049+
"message": {
1050+
"role": "assistant",
1051+
"content": "Hello! How can I help you with coding today?",
1052+
},
1053+
"finish_reason": "stop",
1054+
}
1055+
],
1056+
"usage": {"prompt_tokens": 10, "completion_tokens": 12, "total_tokens": 22},
1057+
},
1058+
"error_response": {
1059+
"error": {
1060+
"type": "invalid_request_error",
1061+
"message": "Invalid model specified",
1062+
"code": "invalid_model",
1063+
}
1064+
},
1065+
}
1066+
1067+
9671068
# Test Utilities
9681069

9691070

Lines changed: 153 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,153 @@
1+
"""External OpenAI Codex API mocks using httpx_mock.
2+
3+
These fixtures intercept HTTP calls to chatgpt.com/backend-api/codex for testing ProxyService
4+
and other components that make direct HTTP requests to external APIs.
5+
"""
6+
7+
from typing import Any
8+
9+
import pytest
10+
from pytest_httpx import HTTPXMock
11+
12+
13+
@pytest.fixture
14+
def mock_external_openai_codex_api(
15+
httpx_mock: HTTPXMock, codex_responses: dict[str, Any]
16+
) -> HTTPXMock:
17+
"""Mock OpenAI Codex API responses for standard completion requests.
18+
19+
This fixture intercepts HTTP calls to chatgpt.com/backend-api/codex and returns
20+
mock responses for testing ProxyService and similar components.
21+
22+
Mocking Strategy: External HTTP interception via httpx_mock
23+
Use Case: Testing HTTP calls to chatgpt.com/backend-api/codex
24+
HTTP Calls: Intercepted and mocked
25+
26+
Args:
27+
httpx_mock: HTTPXMock fixture for HTTP interception
28+
codex_responses: Response data fixture
29+
30+
Returns:
31+
HTTPXMock configured with Codex API responses
32+
"""
33+
httpx_mock.add_response(
34+
url="https://chatgpt.com/backend-api/codex/responses",
35+
json=codex_responses["standard_completion"],
36+
status_code=200,
37+
headers={"content-type": "application/json"},
38+
)
39+
return httpx_mock
40+
41+
42+
@pytest.fixture
43+
def mock_external_openai_codex_api_streaming(httpx_mock: HTTPXMock) -> HTTPXMock:
44+
"""Mock OpenAI Codex API streaming responses using SSE format.
45+
46+
This fixture intercepts HTTP calls to chatgpt.com/backend-api/codex for streaming
47+
responses and returns SSE-formatted mock data.
48+
49+
Mocking Strategy: External HTTP interception via httpx_mock
50+
Use Case: Testing streaming HTTP calls to Codex API
51+
HTTP Calls: Intercepted and mocked with SSE format
52+
53+
Args:
54+
httpx_mock: HTTPXMock fixture for HTTP interception
55+
56+
Returns:
57+
HTTPXMock configured with streaming Codex responses
58+
"""
59+
streaming_response = """data: {"id":"codex_streaming_123","object":"codex.response.chunk","created":1234567890,"model":"gpt-5","choices":[{"index":0,"delta":{"role":"assistant","content":"Hello"},"finish_reason":null}],"usage":null}
60+
61+
data: {"id":"codex_streaming_123","object":"codex.response.chunk","created":1234567890,"model":"gpt-5","choices":[{"index":0,"delta":{"content":" from"},"finish_reason":null}],"usage":null}
62+
63+
data: {"id":"codex_streaming_123","object":"codex.response.chunk","created":1234567890,"model":"gpt-5","choices":[{"index":0,"delta":{"content":" Codex!"},"finish_reason":"stop"}],"usage":{"prompt_tokens":10,"completion_tokens":3,"total_tokens":13}}
64+
65+
data: [DONE]
66+
67+
"""
68+
69+
httpx_mock.add_response(
70+
url="https://chatgpt.com/backend-api/codex/responses",
71+
content=streaming_response.encode(),
72+
status_code=200,
73+
headers={
74+
"content-type": "text/event-stream",
75+
"cache-control": "no-cache",
76+
"connection": "keep-alive",
77+
},
78+
)
79+
return httpx_mock
80+
81+
82+
@pytest.fixture
83+
def mock_external_openai_codex_api_error(httpx_mock: HTTPXMock) -> HTTPXMock:
84+
"""Mock OpenAI Codex API error responses.
85+
86+
This fixture intercepts HTTP calls to chatgpt.com/backend-api/codex and returns
87+
error responses for testing error handling.
88+
89+
Mocking Strategy: External HTTP interception via httpx_mock
90+
Use Case: Testing error scenarios for Codex API calls
91+
HTTP Calls: Intercepted and mocked with error responses
92+
93+
Args:
94+
httpx_mock: HTTPXMock fixture for HTTP interception
95+
96+
Returns:
97+
HTTPXMock configured with Codex error responses
98+
"""
99+
httpx_mock.add_response(
100+
url="https://chatgpt.com/backend-api/codex/responses",
101+
json={
102+
"error": {
103+
"type": "invalid_request_error",
104+
"message": "Invalid model specified",
105+
"code": "invalid_model",
106+
}
107+
},
108+
status_code=400,
109+
headers={"content-type": "application/json"},
110+
)
111+
return httpx_mock
112+
113+
114+
@pytest.fixture
115+
def mock_external_openai_oauth_api(httpx_mock: HTTPXMock) -> HTTPXMock:
116+
"""Mock OpenAI OAuth API responses for authentication.
117+
118+
This fixture intercepts HTTP calls to OpenAI's OAuth endpoints for
119+
testing authentication flows.
120+
121+
Mocking Strategy: External HTTP interception via httpx_mock
122+
Use Case: Testing OAuth authentication flows
123+
HTTP Calls: Intercepted and mocked
124+
125+
Args:
126+
httpx_mock: HTTPXMock fixture for HTTP interception
127+
128+
Returns:
129+
HTTPXMock configured with OAuth responses
130+
"""
131+
# Mock token endpoint
132+
httpx_mock.add_response(
133+
url="https://auth0.openai.com/oauth/token",
134+
json={
135+
"access_token": "test-oauth-access-token-12345",
136+
"refresh_token": "test-oauth-refresh-token-67890",
137+
"token_type": "Bearer",
138+
"expires_in": 3600,
139+
"scope": "model.request model.read organization.read",
140+
},
141+
status_code=200,
142+
headers={"content-type": "application/json"},
143+
)
144+
145+
# Mock userinfo endpoint
146+
httpx_mock.add_response(
147+
url="https://auth0.openai.com/userinfo",
148+
json={"sub": "test-user-123", "email": "[email protected]", "name": "Test User"},
149+
status_code=200,
150+
headers={"content-type": "application/json"},
151+
)
152+
153+
return httpx_mock

tests/helpers/assertions.py

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -100,6 +100,33 @@ def assert_sse_format_compliance(chunks: list[str]) -> None:
100100
)
101101

102102

103+
def assert_codex_response_format(data: dict[str, Any]) -> None:
104+
"""Assert that response follows OpenAI Codex API format."""
105+
required_fields = ["id", "object", "created", "model", "choices", "usage"]
106+
for field in required_fields:
107+
assert field in data, f"Missing required field: {field}"
108+
109+
# Verify types
110+
assert isinstance(data["id"], str)
111+
assert isinstance(data["object"], str)
112+
assert isinstance(data["created"], int)
113+
assert isinstance(data["model"], str)
114+
assert isinstance(data["choices"], list)
115+
assert isinstance(data["usage"], dict)
116+
117+
# Verify choice structure
118+
if data["choices"]:
119+
choice = data["choices"][0]
120+
assert "index" in choice
121+
assert "message" in choice
122+
assert "finish_reason" in choice
123+
124+
# Verify message structure
125+
message = choice["message"]
126+
assert message["role"] == "assistant"
127+
assert "content" in message
128+
129+
103130
def assert_health_response_format(
104131
data: dict[str, Any], status_values: list[str]
105132
) -> None:

0 commit comments

Comments
 (0)