Skip to content

Commit 5891559

Browse files
committed
feat(copilot): Premium request should be counted when user initial chat
1 parent 96eaa1c commit 5891559

File tree

3 files changed

+90
-54
lines changed

3 files changed

+90
-54
lines changed

AgentCrew/modules/a2a/server.py

Lines changed: 22 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,10 @@
2323
InvalidRequestError,
2424
JSONParseError,
2525
InternalError,
26+
SendMessageRequest,
27+
SendStreamingMessageRequest,
28+
GetTaskRequest,
29+
CancelTaskRequest,
2630
)
2731

2832
logger = logging.getLogger(__name__)
@@ -160,13 +164,17 @@ async def process_jsonrpc_request(request: Request):
160164
method = json_rpc_request.root.method
161165
logger.debug(f"Processing method: {method}")
162166

163-
if method == "message/send":
167+
if method == "message/send" and isinstance(
168+
json_rpc_request.root, SendMessageRequest
169+
):
164170
logger.debug("Handling message/send request")
165171
result = await task_manager.on_send_message(json_rpc_request.root)
166172
logger.debug(f"message/send result: {result}")
167173
return JSONResponse(result.model_dump(exclude_none=True))
168174

169-
elif method == "message/stream":
175+
elif method == "message/stream" and isinstance(
176+
json_rpc_request.root, SendStreamingMessageRequest
177+
):
170178
result_stream = task_manager.on_send_message_streaming(
171179
json_rpc_request.root
172180
)
@@ -182,13 +190,17 @@ async def event_generator():
182190

183191
return EventSourceResponse(event_generator())
184192

185-
elif method == "tasks/send":
193+
elif method == "tasks/send" and isinstance(
194+
json_rpc_request.root, SendMessageRequest
195+
):
186196
logger.debug("Handling legacy tasks/send request")
187197
result = await task_manager.on_send_task(json_rpc_request.root)
188198
logger.debug(f"tasks/send result: {result}")
189199
return JSONResponse(result.model_dump(exclude_none=True))
190200

191-
elif method == "tasks/sendSubscribe":
201+
elif method == "tasks/sendSubscribe" and isinstance(
202+
json_rpc_request.root, SendStreamingMessageRequest
203+
):
192204
result_stream = task_manager.on_send_task_subscribe(
193205
json_rpc_request.root
194206
)
@@ -204,11 +216,15 @@ async def event_generator():
204216

205217
return EventSourceResponse(event_generator())
206218

207-
elif method == "tasks/get":
219+
elif method == "tasks/get" and isinstance(
220+
json_rpc_request.root, GetTaskRequest
221+
):
208222
result = await task_manager.on_get_task(json_rpc_request.root)
209223
return JSONResponse(result.model_dump(exclude_none=True))
210224

211-
elif method == "tasks/cancel":
225+
elif method == "tasks/cancel" and isinstance(
226+
json_rpc_request.root, CancelTaskRequest
227+
):
212228
result = await task_manager.on_cancel_task(json_rpc_request.root)
213229
return JSONResponse(result.model_dump(exclude_none=True))
214230

AgentCrew/modules/custom_llm/github_copilot_service.py

Lines changed: 68 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,9 @@
1+
from uuid import uuid4
12
from AgentCrew.modules.custom_llm import CustomLLMService
23
import os
34
from dotenv import load_dotenv
45
from AgentCrew.modules import logger
6+
from datetime import datetime
57

68

79
class GithubCopilotService(CustomLLMService):
@@ -27,4 +29,70 @@ def __init__(self):
2729
self.current_output_tokens = 0
2830
self.temperature = 0.6
2931
self._is_thinking = False
32+
# self._interaction_id = None
3033
logger.info("Initialized Github Copilot Service")
34+
35+
def github_copilot_token_to_open_ai_key(self, copilot_api_key):
36+
"""
37+
Convert GitHub Copilot token to OpenAI key format.
38+
39+
Args:
40+
copilot_api_key: The GitHub Copilot token
41+
42+
Returns:
43+
Updated OpenAI compatible token
44+
"""
45+
openai_api_key = self.client.api_key
46+
47+
if openai_api_key.startswith("ghu") or int(
48+
dict(x.split("=") for x in openai_api_key.split(";"))["exp"]
49+
) < int(datetime.now().timestamp()):
50+
import requests
51+
52+
headers = {
53+
"Authorization": f"Bearer {copilot_api_key}",
54+
"Content-Type": "application/json",
55+
}
56+
if self.extra_headers:
57+
headers.update(self.extra_headers)
58+
res = requests.get(
59+
"https://api.github.com/copilot_internal/v2/token", headers=headers
60+
)
61+
self.client.api_key = res.json()["token"]
62+
63+
async def process_message(self, prompt: str, temperature: float = 0) -> str:
64+
# Check if using GitHub Copilot
65+
if self.base_url:
66+
from urllib.parse import urlparse
67+
68+
parsed_url = urlparse(self.base_url)
69+
host = parsed_url.hostname
70+
if host and host.endswith(".githubcopilot.com"):
71+
self.base_url = self.base_url.rstrip("/")
72+
self.github_copilot_token_to_open_ai_key(self.api_key)
73+
return await super().process_message(prompt, temperature)
74+
75+
async def stream_assistant_response(self, messages):
76+
"""Stream the assistant's response with tool support."""
77+
# Check if using GitHub Copilot
78+
if self.base_url:
79+
from urllib.parse import urlparse
80+
81+
parsed_url = urlparse(self.base_url)
82+
host = parsed_url.hostname
83+
if host and host.endswith(".githubcopilot.com"):
84+
self.base_url = self.base_url.rstrip("/")
85+
self.github_copilot_token_to_open_ai_key(self.api_key)
86+
# if len([m for m in messages if m.get("role") == "assistant"]) == 0:
87+
# self._interaction_id = str(uuid4())
88+
if self.extra_headers:
89+
self.extra_headers["X-Initiator"] = (
90+
"user"
91+
if messages[-1].get("role", "assistant") == "user"
92+
else "agent"
93+
)
94+
self.extra_headers["X-Request-Id"] = str(uuid4())
95+
# if self._interaction_id:
96+
# self.extra_headers["X-Interaction-Id"] = self._interaction_id
97+
98+
return await super().stream_assistant_response(messages)

AgentCrew/modules/custom_llm/service.py

Lines changed: 0 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
from mcp.types import ImageContent, TextContent
55
from typing import Dict, Any, List, Optional, Tuple
66
import json
7-
from datetime import datetime
87
from AgentCrew.modules import logger
98

109

@@ -75,16 +74,6 @@ def format_tool_result(
7574

7675
async def process_message(self, prompt: str, temperature: float = 0) -> str:
7776
try:
78-
# Check if using GitHub Copilot
79-
if self.base_url:
80-
from urllib.parse import urlparse
81-
82-
parsed_url = urlparse(self.base_url)
83-
host = parsed_url.hostname
84-
if host and host.endswith(".githubcopilot.com"):
85-
self.base_url = self.base_url.rstrip("/")
86-
self.github_copilot_token_to_open_ai_key(self.api_key)
87-
8877
response = await self.client.chat.completions.create(
8978
model=self.model,
9079
max_tokens=3000,
@@ -132,15 +121,6 @@ async def process_message(self, prompt: str, temperature: float = 0) -> str:
132121

133122
async def stream_assistant_response(self, messages):
134123
"""Stream the assistant's response with tool support."""
135-
# Check if using GitHub Copilot
136-
if self.base_url:
137-
from urllib.parse import urlparse
138-
139-
parsed_url = urlparse(self.base_url)
140-
host = parsed_url.hostname
141-
if host and host.endswith(".githubcopilot.com"):
142-
self.base_url = self.base_url.rstrip("/")
143-
self.github_copilot_token_to_open_ai_key(self.api_key)
144124

145125
stream_params = {
146126
"model": self.model,
@@ -203,34 +183,6 @@ def process_stream_chunk(
203183
else:
204184
return self._process_non_stream_chunk(chunk, assistant_response, tool_uses)
205185

206-
def github_copilot_token_to_open_ai_key(self, copilot_api_key):
207-
"""
208-
Convert GitHub Copilot token to OpenAI key format.
209-
210-
Args:
211-
copilot_api_key: The GitHub Copilot token
212-
213-
Returns:
214-
Updated OpenAI compatible token
215-
"""
216-
openai_api_key = self.client.api_key
217-
218-
if openai_api_key.startswith("ghu") or int(
219-
dict(x.split("=") for x in openai_api_key.split(";"))["exp"]
220-
) < int(datetime.now().timestamp()):
221-
import requests
222-
223-
headers = {
224-
"Authorization": f"Bearer {copilot_api_key}",
225-
"Content-Type": "application/json",
226-
}
227-
if self.extra_headers:
228-
headers.update(self.extra_headers)
229-
res = requests.get(
230-
"https://api.github.com/copilot_internal/v2/token", headers=headers
231-
)
232-
self.client.api_key = res.json()["token"]
233-
234186
def _process_non_stream_chunk(
235187
self, chunk, assistant_response, tool_uses
236188
) -> Tuple[str, List[Dict], int, int, Optional[str], Optional[tuple]]:

0 commit comments

Comments
 (0)