-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathapp.py
More file actions
188 lines (159 loc) · 8.02 KB
/
app.py
File metadata and controls
188 lines (159 loc) · 8.02 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
import gradio as gr
import uuid
from src.graph import app as graph_app
from src.config.logger import get_logger
import os
logger = get_logger("App")
def process_chat(user_input, history, json_data, thread_id, is_initial):
"""
Main handler for the Chat UI.
"""
logger.info(f"Chat Triggered: is_initial={is_initial}, thread_id={thread_id}")
config = {"configurable": {"thread_id": thread_id}}
# Append user message to history immediately for UI feedback
if history is None:
history = []
history.append((user_input, None)) # None for bot response initially
# 1. INITIAL PHASE: User provides description -> Analyst -> Blueprint
if is_initial:
logger.info(f"Starting initial analysis with: {user_input[:50]}...")
inputs = {"input_data": user_input, "messages": []}
try:
# Run graph until interrupt (after Analyst)
for event in graph_app.stream(inputs, config=config):
pass
except Exception as e:
err_msg = f"Error during analysis: {str(e)}"
logger.error(err_msg, exc_info=True)
history[-1] = (user_input, err_msg)
# Must return 7 values: history, json, model, file, is_initial, code, test_report
return history, {}, None, None, True, "", ""
# Fetch state
snapshot = graph_app.get_state(config)
vals = snapshot.values
blueprint = vals.get("json_blueprint", {})
code = vals.get("bpy_code", "")
test_report = vals.get("test_report", "")
bot_msg = "I've analyzed your request. Please review the **Blueprint** on the right.\n\nIf it looks good, type **'Proceed'** or **'Build'**. If you want changes, just tell me (e.g., 'Make it taller')."
history[-1] = (user_input, bot_msg)
return (
history, # Updated Chat
blueprint, # JSON Output
None, # 3D Model (None)
None, # Download (None)
False, # is_initial -> False
code, # BPY Code
test_report # Quality Report
)
# 2. FEEDBACK LOOP: User feedback -> Supervisor -> Analyst/Architect/Coder -> Validator -> Tester
else:
logger.info(f"Resuming with feedback: {user_input[:50]}...")
snapshot = graph_app.get_state(config)
# Decide if we are RESUMING or STARTING A NEW RUN
if not snapshot.next:
# Graph already completed, start fresh from supervisor with feedback
logger.info("Graph at END. Starting new run from entry point.")
stream_input = {"feedback": user_input}
else:
# Graph is interrupted (at Analyst or Tester)
logger.info(f"Graph is interrupted at {snapshot.next}. Resuming.")
graph_app.update_state(config, {"json_blueprint": json_data, "feedback": user_input})
stream_input = None
# Execute
try:
for event in graph_app.stream(stream_input, config=config):
# We could stream partial status updates to chat here if we wanted
pass
except Exception as e:
err_msg = f"Error during generation: {str(e)}"
logger.error(err_msg, exc_info=True)
history[-1] = (user_input, err_msg)
return history, {}, None, None, False, "", ""
# Fetch final state
snapshot = graph_app.get_state(config)
vals = snapshot.values
final_stl = vals.get("stl_path")
errors = vals.get("errors", [])
final_code = vals.get("bpy_code", "")
test_report = vals.get("test_report", "")
if final_stl:
msg = f"✅ **Generation Complete!**\n\nI've generated the 3D model. You can preview it on the right or download the STL file."
if errors:
msg += f"\n\n⚠️ **Note:** There were technical issues: {errors}"
history[-1] = (user_input, msg)
return history, vals.get("json_blueprint", {}), final_stl, final_stl, False, final_code, test_report
elif errors:
msg = f"❌ **Generation Failed**\n\nIssues found:\n" + "\n".join([f"- {e}" for e in errors])
history[-1] = (user_input, msg)
return history, vals.get("json_blueprint", {}), None, None, False, final_code, test_report
else:
# If the graph is interrupted (meaning we are waiting for user review)
next_nodes = list(snapshot.next) if snapshot.next else []
if "tester" in next_nodes:
bot_msg = "✅ **3D Model Ready!**\n\nI've generated the first version. Review the **Quality Report** and **3D Preview**. \n\nIf you want changes, type them here. Otherwise, we're done!"
elif "supervisor" in next_nodes or "analyst" in next_nodes:
bot_msg = "I've updated the plan. Review the **Blueprint** and type **'Build'** if it's ready."
else:
bot_msg = "Processing complete. Check the tabs for results."
history[-1] = (user_input, bot_msg)
return history, vals.get("json_blueprint", {}), None, None, False, final_code, test_report
with gr.Blocks(title="3D Designer Agent", theme=gr.themes.Soft(primary_hue="blue", secondary_hue="slate")) as demo:
gr.Markdown("# 🛠️ Autonomous 3D Designer Agent")
# Session State
thread_state = gr.State(lambda: str(uuid.uuid4()))
is_initial_state = gr.State(True)
with gr.Row(equal_height=True):
# LEFT COLUMN: Chat Interface
with gr.Column(scale=1):
chatbot = gr.Chatbot(
label="Designer Assistant",
height=600,
avatar_images=(None, "https://api.dicebear.com/7.x/bottts/svg?seed=3dagent"),
bubble_full_width=False
)
with gr.Row():
msg_input = gr.Textbox(
show_label=False,
placeholder="Describe a 3D object (e.g. 'A simple coffee mug')...",
scale=4,
container=False
)
submit_btn = gr.Button("Send", variant="primary", scale=1)
gr.Examples(
examples=["A futuristic chair with 3 legs", "A simple red cube", "A chess pawn"],
inputs=msg_input
)
# RIGHT COLUMN: Inspector (Blueprint & 3D View)
with gr.Column(scale=1):
with gr.Tabs():
with gr.TabItem("3D Preview"):
model_output = gr.Model3D(
label="Model Preview",
clear_color=[0.1, 0.1, 0.1, 1.0],
interactive=True,
height=400
)
download_output = gr.File(label="Download Generated STL")
with gr.TabItem("Blueprint (JSON)"):
json_output = gr.JSON(label="Reverse Engineering Plan", height=400)
with gr.TabItem("Generated Code"):
code_output = gr.Code(label="BPY Script", language="python", lines=20)
with gr.TabItem("Quality Report"):
test_output = gr.Markdown(label="Technical Analysis")
# Event Handlers
submit_btn.click(
process_chat,
inputs=[msg_input, chatbot, json_output, thread_state, is_initial_state],
outputs=[chatbot, json_output, model_output, download_output, is_initial_state, code_output, test_output]
).then(
lambda: "", None, msg_input # Clear input box
)
msg_input.submit(
process_chat,
inputs=[msg_input, chatbot, json_output, thread_state, is_initial_state],
outputs=[chatbot, json_output, model_output, download_output, is_initial_state, code_output, test_output]
).then(
lambda: "", None, msg_input
)
if __name__ == "__main__":
demo.launch()