Skip to content

Commit f2966f0

Browse files
[dev] [Marfuen] mariano/small-ui-ux-fixes (#1579)
* chore: standardize size of panels * chore: make sure latest processing message is auto expanded not only the first one * chore: make view mode switch smaller * chore: use gpt-5-mini and improve test ui * chore: add confetti animation to TestDialog on success --------- Co-authored-by: Mariano Fuentes <marfuen98@gmail.com>
1 parent a04341c commit f2966f0

File tree

20 files changed

+521
-149
lines changed

20 files changed

+521
-149
lines changed

apps/app/src/ai/constants.ts

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,18 +5,22 @@ export enum Models {
55
AnthropicClaude4Sonnet = 'anthropic/claude-4-sonnet',
66
GoogleGeminiFlash = 'google/gemini-2.5-flash',
77
MoonshotKimiK2 = 'moonshotai/kimi-k2',
8-
OpenAIGPT5 = 'gpt-5',
8+
OpenAIGPT5 = 'openai/gpt-5',
9+
OpenAIGPT5Mini = 'openai/gpt-5-mini',
10+
OpenAIGPT4oMini = 'openai/gpt-4o-mini',
911
XaiGrok3Fast = 'xai/grok-3-fast',
1012
}
1113

12-
export const DEFAULT_MODEL = Models.OpenAIGPT5;
14+
export const DEFAULT_MODEL = Models.OpenAIGPT5Mini;
1315

1416
export const SUPPORTED_MODELS: GatewayModelId[] = [
1517
Models.AmazonNovaPro,
1618
Models.AnthropicClaude4Sonnet,
1719
Models.GoogleGeminiFlash,
1820
Models.MoonshotKimiK2,
1921
Models.OpenAIGPT5,
22+
Models.OpenAIGPT5Mini,
23+
Models.OpenAIGPT4oMini,
2024
Models.XaiGrok3Fast,
2125
];
2226

apps/app/src/ai/gateway.ts

Lines changed: 17 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -1,29 +1,27 @@
1-
import { createGatewayProvider } from '@ai-sdk/gateway'
2-
import { Models } from './constants'
3-
import type { JSONValue } from 'ai'
4-
import type { OpenAIResponsesProviderOptions } from '@ai-sdk/openai'
5-
import type { LanguageModelV2 } from '@ai-sdk/provider'
1+
import { createGatewayProvider } from '@ai-sdk/gateway';
2+
import type { OpenAIResponsesProviderOptions } from '@ai-sdk/openai';
3+
import type { LanguageModelV2 } from '@ai-sdk/provider';
4+
import type { JSONValue } from 'ai';
5+
import { Models } from './constants';
66

77
export async function getAvailableModels() {
8-
const gateway = gatewayInstance()
9-
const response = await gateway.getAvailableModels()
10-
return response.models
11-
.map((model) => ({ id: model.id, name: model.name }))
12-
.concat([{ id: Models.OpenAIGPT5, name: 'GPT-5' }])
8+
const gateway = gatewayInstance();
9+
const response = await gateway.getAvailableModels();
10+
return response.models.map((model) => ({ id: model.id, name: model.name }));
1311
}
1412

1513
export interface ModelOptions {
16-
model: LanguageModelV2
17-
providerOptions?: Record<string, Record<string, JSONValue>>
18-
headers?: Record<string, string>
14+
model: LanguageModelV2;
15+
providerOptions?: Record<string, Record<string, JSONValue>>;
16+
headers?: Record<string, string>;
1917
}
2018

2119
export function getModelOptions(
2220
modelId: string,
23-
options?: { reasoningEffort?: 'minimal' | 'low' | 'medium' }
21+
options?: { reasoningEffort?: 'minimal' | 'low' | 'medium' },
2422
): ModelOptions {
25-
const gateway = gatewayInstance()
26-
if (modelId === Models.OpenAIGPT5) {
23+
const gateway = gatewayInstance();
24+
if (modelId === Models.OpenAIGPT5 || modelId === Models.OpenAIGPT5Mini) {
2725
return {
2826
model: gateway(modelId),
2927
providerOptions: {
@@ -34,28 +32,16 @@ export function getModelOptions(
3432
serviceTier: 'priority',
3533
} satisfies OpenAIResponsesProviderOptions,
3634
},
37-
}
38-
}
39-
40-
if (modelId === Models.AnthropicClaude4Sonnet) {
41-
return {
42-
model: gateway(modelId),
43-
headers: { 'anthropic-beta': 'fine-grained-tool-streaming-2025-05-14' },
44-
providerOptions: {
45-
anthropic: {
46-
cacheControl: { type: 'ephemeral' },
47-
},
48-
},
49-
}
35+
};
5036
}
5137

5238
return {
5339
model: gateway(modelId),
54-
}
40+
};
5541
}
5642

5743
function gatewayInstance() {
5844
return createGatewayProvider({
5945
baseURL: process.env.AI_GATEWAY_BASE_URL,
60-
})
46+
});
6147
}

apps/app/src/app/(app)/[orgId]/tasks/[taskId]/automation/chat.tsx

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ export function Chat({ className, orgId, taskId }: Props) {
3636
if (text.trim()) {
3737
sendMessage(
3838
{ text },
39-
{ body: { modelId: Models.OpenAIGPT5, reasoningEffort: 'medium', orgId, taskId } },
39+
{ body: { modelId: Models.OpenAIGPT5Mini, reasoningEffort: 'medium', orgId, taskId } },
4040
);
4141
setInput('');
4242
}
@@ -51,7 +51,7 @@ export function Chat({ className, orgId, taskId }: Props) {
5151
{
5252
text: `I've added the secret "${secretName}". You can now use it in the automation script.`,
5353
},
54-
{ body: { modelId: Models.OpenAIGPT5, reasoningEffort: 'medium', orgId, taskId } },
54+
{ body: { modelId: Models.OpenAIGPT5Mini, reasoningEffort: 'medium', orgId, taskId } },
5555
);
5656
},
5757
[sendMessage, orgId, taskId],
@@ -69,7 +69,7 @@ export function Chat({ className, orgId, taskId }: Props) {
6969
{
7070
text: `I've provided the following information:\n\n${infoText}\n\nYou can now continue with creating the automation script.`,
7171
},
72-
{ body: { modelId: Models.OpenAIGPT5, reasoningEffort: 'medium', orgId, taskId } },
72+
{ body: { modelId: Models.OpenAIGPT5Mini, reasoningEffort: 'medium', orgId, taskId } },
7373
);
7474
},
7575
[sendMessage, orgId, taskId],

apps/app/src/app/(app)/[orgId]/tasks/[taskId]/automation/components/chat/message.tsx

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,16 @@ export const Message = memo(function Message({
3636
.filter(({ part }) => part.type === 'reasoning');
3737

3838
useEffect(() => {
39-
// Only auto-expand once or when no selection exists.
39+
// Prefer expanding the latest streaming reasoning part if present.
40+
const latestStreaming = [...reasoningParts]
41+
.reverse()
42+
.find(({ part }) => (part as any)?.state === 'streaming');
43+
if (latestStreaming && latestStreaming.index !== expandedReasoningIndex) {
44+
setExpandedReasoningIndex(latestStreaming.index);
45+
return;
46+
}
47+
48+
// Otherwise, if nothing expanded yet, expand the latest reasoning block.
4049
if (expandedReasoningIndex === null && reasoningParts.length > 0) {
4150
const latestReasoningIndex = reasoningParts[reasoningParts.length - 1].index;
4251
setExpandedReasoningIndex(latestReasoningIndex);

apps/app/src/app/(app)/[orgId]/tasks/[taskId]/automation/components/panels/panels.tsx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ export function PanelHeader({ className, children }: Props) {
3131
return (
3232
<div
3333
className={cn(
34-
'relative flex items-center px-5 py-3',
34+
'relative flex items-center shrink-0 h-12 px-4',
3535
// Primary-tinted header for better contrast
3636
'bg-primary/5',
3737
// Primary-accented divider

apps/app/src/app/(app)/[orgId]/tasks/[taskId]/automation/components/settings/reasoning-effort.tsx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ import { useModelId, useReasoningEffort } from './use-settings';
66
export function ReasoningEffort() {
77
const [modelId] = useModelId();
88
const [effort, setEffort] = useReasoningEffort();
9-
if (modelId !== Models.OpenAIGPT5) {
9+
if (modelId !== Models.OpenAIGPT5 && modelId !== Models.OpenAIGPT5Mini) {
1010
return null;
1111
}
1212

Lines changed: 112 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,112 @@
1+
'use client';
2+
3+
import { useEffect, useRef } from 'react';
4+
5+
// Confetti particle class
6+
class Particle {
7+
x: number;
8+
y: number;
9+
vx: number;
10+
vy: number;
11+
size: number;
12+
color: string;
13+
alpha: number;
14+
decay: number;
15+
16+
constructor(x: number, y: number) {
17+
this.x = x;
18+
this.y = y;
19+
this.vx = (Math.random() - 0.5) * 12;
20+
this.vy = Math.random() * -15 - 10;
21+
this.size = Math.random() * 6 + 2;
22+
this.color = ['#10b981', '#22c55e', '#6ee7b7', '#34d399', '#86efac'][
23+
Math.floor(Math.random() * 5)
24+
];
25+
this.alpha = 1;
26+
this.decay = 0.015;
27+
}
28+
29+
update() {
30+
this.x += this.vx;
31+
this.y += this.vy;
32+
this.vy += 0.5; // gravity
33+
this.vx *= 0.99; // air resistance
34+
this.alpha -= this.decay;
35+
// Keep alpha at 0 minimum so particles keep falling
36+
if (this.alpha < 0) this.alpha = 0;
37+
}
38+
39+
draw(ctx: CanvasRenderingContext2D) {
40+
ctx.save();
41+
ctx.globalAlpha = this.alpha;
42+
ctx.fillStyle = this.color;
43+
ctx.fillRect(this.x, this.y, this.size, this.size);
44+
ctx.restore();
45+
}
46+
}
47+
48+
interface ConfettiEffectProps {
49+
trigger: boolean;
50+
particleCount?: number;
51+
}
52+
53+
export function ConfettiEffect({ trigger, particleCount = 100 }: ConfettiEffectProps) {
54+
const canvasRef = useRef<HTMLCanvasElement>(null);
55+
const particlesRef = useRef<Particle[]>([]);
56+
const animationRef = useRef<number>(0);
57+
58+
useEffect(() => {
59+
if (trigger && canvasRef.current) {
60+
const canvas = canvasRef.current;
61+
const ctx = canvas.getContext('2d');
62+
if (!ctx) return;
63+
64+
// Set canvas size
65+
canvas.width = window.innerWidth;
66+
canvas.height = window.innerHeight;
67+
68+
// Create initial particles from dialog center
69+
const centerX = canvas.width / 2;
70+
const centerY = canvas.height / 2 - 100; // Slightly above center
71+
72+
particlesRef.current = Array.from(
73+
{ length: particleCount },
74+
() => new Particle(centerX, centerY),
75+
);
76+
77+
// Animation loop
78+
const animate = () => {
79+
ctx.clearRect(0, 0, canvas.width, canvas.height);
80+
81+
particlesRef.current = particlesRef.current.filter((particle) => {
82+
particle.update();
83+
particle.draw(ctx);
84+
// Only remove particles that have fallen off the bottom of the screen
85+
return particle.y < canvas.height + 50; // +50 to ensure they're fully off-screen
86+
});
87+
88+
if (particlesRef.current.length > 0) {
89+
animationRef.current = requestAnimationFrame(animate);
90+
}
91+
};
92+
93+
animate();
94+
95+
return () => {
96+
if (animationRef.current) {
97+
cancelAnimationFrame(animationRef.current);
98+
}
99+
};
100+
}
101+
}, [trigger, particleCount]);
102+
103+
if (!trigger) return null;
104+
105+
return (
106+
<canvas
107+
ref={canvasRef}
108+
className="fixed inset-0 pointer-events-none z-[100]"
109+
style={{ width: '100vw', height: '100vh' }}
110+
/>
111+
);
112+
}

0 commit comments

Comments
 (0)