-
Notifications
You must be signed in to change notification settings - Fork 4
Expand file tree
/
Copy pathsdl_agents.py
More file actions
277 lines (236 loc) · 10.1 KB
/
sdl_agents.py
File metadata and controls
277 lines (236 loc) · 10.1 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
import os
from typing import Dict, Any
import PyPDF2
import autogen
from autogen import (
UserProxyAgent,
AssistantAgent,
ConversableAgent,
register_function,
)
from autogen.coding import LocalCommandLineCodeExecutor
from utils.teachability_filtered import DedupTeachability
from config.settings import OPENAI_API_KEY, anthropic_api_key
from utils.system_messages import code_writer_system_message
import asyncio
import time
class CaptureGroupChatManager(autogen.GroupChatManager):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.captured_messages = []
def receive(self, message, sender, request_reply=None, silent=False):
# Capture the message
if not silent:
agent_name = getattr(sender, "name", "Unknown")
if isinstance(message, dict) and "content" in message:
content = message["content"]
else:
content = str(message)
if content:
self.captured_messages.append(f"{agent_name}: {content}")
return super().receive(message, sender, request_reply, silent)
def get_llm_config(llm_type: str) -> Dict[str, Any]:
"""
Get LLM configuration based on the selected model type.
Args:
llm_type (str): Type of LLM to use ('gpt4', 'claude')
Returns:
dict: LLM configuration dictionary
"""
llm_configs = {
'gpt4o-mini': {
"model": "gpt-4o-mini",
'api_key': OPENAI_API_KEY,
'temperature':0,
"cache_seed": 0,
},
'gpt4o': {
"model": "gpt-4o",
'api_key': OPENAI_API_KEY,
'temperature':0,
"cache_seed": 0,
},
'claude_35': {
"model": "claude-3-5-sonnet-20240620",
'api_key': anthropic_api_key,
'api_type': 'anthropic',
'temperature':0,
"cache_seed": 0,
},
'ArgoLLMs': { # Local client operates only within the organization
"model": "gpto1preview",
"model_client_cls": "ArgoModelClient",
'temperature': 0,
"cache_seed": 0,
}
}
return llm_configs.get(llm_type, llm_configs['ArgoLLMs'])
def pdf_to_text(pdf_file: str) -> str:
"""
Extract text from a PDF file.
Args:
pdf_file (str): Path to the PDF file
Returns:
str: Extracted text from the PDF
"""
with open(pdf_file, 'rb') as file:
pdf_reader = PyPDF2.PdfReader(file)
text = ""
for page_num in range(len(pdf_reader.pages)):
page = pdf_reader.pages[page_num]
text += page.extract_text()
return text
# Custom termination function to detect successful code execution
def is_termination_msg(msg):
"""Determines if the conversation should terminate based on message content"""
if msg.get("content") is not None:
# Check for explicit TERMINATE command
if "TERMINATE" in msg["content"]:
return True
# Check for successful code execution message
if ">>>>>>>> EXECUTING CODE BLOCK" in msg["content"] and "execution succeeded" in msg["content"]:
return True
return False
class AutoGenSystem:
def __init__(self, llm_type: str, workdir: str, polybot_file_path: str):
"""
Initialize AutoGen system with specified LLM configuration.
Args:
llm_type (str): Type of LLM to use
workdir (str): Working directory path
polybot_file_path (str): Path to the polybot file
"""
self.llm_type = llm_type
self.llm_config = get_llm_config(llm_type)
self.workdir = workdir
# Read polybot file
with open(polybot_file_path, 'r') as polybot_file:
self.polybot_file = ''.join(polybot_file.readlines())
# Initialize executor
self.executor = LocalCommandLineCodeExecutor(
timeout=120,
work_dir=workdir,
)
self._setup_agents()
self._setup_group_chat()
self._setup_teachability() # enable this to add teachability
print("POLYBOT ADMIN TYPE:", type(self.polybot_admin))
print("CODE WRITER TYPE:", type(self.code_writer_agent))
def _setup_agents(self):
"""Set up all required agents with the specified LLM configuration."""
self.code_writer_agent = ConversableAgent(
name="code_writer_agent",
system_message=code_writer_system_message + self.polybot_file,
llm_config=self.llm_config,
code_execution_config=False,
human_input_mode="ALWAYS",
)
# Code review agent
self.code_review_agent = ConversableAgent(
name="code_reviewer_agent",
system_message=f"Your task is to review the code provided by the code writer agent and provide feedback on necessary corrections. "
"Ensure that all required libraries are imported, and only the existing, approved operation functions are used."
"The only allowed libraries and operating functions are provided in the {self.polybot_file}."
"Operate only when code is provided.",
llm_config=self.llm_config,
code_execution_config=False,
human_input_mode="NEVER",
)
# PDF scraper agent
self.scraper_agent = ConversableAgent(
name="PDFScraper",
llm_config=self.llm_config,
system_message="You are a PDF scrapper and you can scrape any PDF using the tools provided if a PDF is provided for context. "
"After reading the text you can provide specific answers based on the context of the PDF file. "
"Returns 'TERMINATE' when the scraping is done.",
human_input_mode="NEVER",
)
# Admin agent
self.polybot_admin = UserProxyAgent(
name="admin",
is_termination_msg=is_termination_msg,
# is_termination_msg=lambda msg: msg.get("content") is not None and "TERMINATE" in msg["content"],
human_input_mode="ALWAYS",
system_message="admin. You pose the task. Return 'TERMINATE' in the end when everything is over. Always 'TERMINATE' when execution succeeded.",
llm_config=self.llm_config,
code_execution_config= {"executor": self.executor},
# code_execution_config=False
# {
# "work_dir": "coding_scripts",
# "use_docker": False,
# },
)
# Register the PDF scraping function
register_function(
pdf_to_text,
caller=self.scraper_agent,
executor=self.polybot_admin,
name="scrape_pdf",
description="Scrape PDF files and return the content.",
)
def _setup_group_chat(self):
"""Set up group chat and manager."""
self.groupchat = autogen.GroupChat(
agents=[self.polybot_admin, self.code_writer_agent, self.code_review_agent, self.scraper_agent],
messages=[],
max_round=20,
select_speaker_auto_llm_config=self.llm_config
)
self.manager = CaptureGroupChatManager(groupchat=self.groupchat, llm_config=self.llm_config)
print("Group chat manager initialized with agents:")
for agent in self.groupchat.agents:
print(" -", agent.name)
def _setup_teachability(self):
"""Set up teachability for the agents."""
self.teachability = DedupTeachability(
verbosity=0,
reset_db=False,
path_to_db_dir=f"./teachability_db_{self.llm_type}",
recall_threshold=6,
llm_config=self.llm_config,
use_cosine=True
)
# Add teachability to agents
for agent in [self.code_writer_agent, self.code_review_agent, self.polybot_admin, self.manager]:
self.teachability.add_to_agent(agent)
def initiate_chat(self, prompt: str) -> Any:
"""
Initiate a chat with the specified prompt.
Args:
prompt (str): The prompt to initiate the chat with
Returns:
Any: Chat result
"""
return self.polybot_admin.initiate_chat(
self.manager,
message=prompt,
)
async def a_initiate_chat(self, message: str):
await self.polybot_admin.a_initiate_chat(
recipient=self.manager,
message=message,
clear_history=True
)
# Usage example:
if __name__ == "__main__":
workdir = "polybot_exec_run"
polybot_file_path = 'n9_robot_operation_commands.py'
llm_type = "gpt4o" # "gpt4o" #"gpt4o-mini" # "claude_35" #"gpt4o" #"gpt4o-mini" # "claude" #'gpt4o'
# llm_config = {"model": "claude-3-5-sonnet-20240620", 'api_key': anthropic_api_key, 'api_type': 'anthropic'}
# Initialize the system with desired LLM
autogen_system = AutoGenSystem(
llm_type=llm_type,
workdir=workdir,
polybot_file_path=polybot_file_path
)
# Example prompts
prompt_1 = """Write the execution code to move the vial with PEDOT:PSS to the clamp holder."""
prompt_1a ="""Write the code to move the vial with polymer A to the clamp."""
prompt_2 = """Pick up a substrate and move it to the coating station."""
prompt_2a = """Write the code to pick up a substrate and move it to the coating stage."""
prompt_3 = """Write the execution code to create a polymer film using only PEDOT:PSS defined as polymer A.
Extract the best range of the film processing conditions from the PEDOT_PSS_manuscript.pdf."""
prompt_3a = """Write the code to create a polymer film with only PEDOT:PSS defined as polymer A.
Identify the best processing conditions from the paper PEDOT PSS manuscript.pdf”."""
# Initiate chat with desired prompt
chat_result = autogen_system.initiate_chat(prompt_3)