Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .review/pr-8274
Submodule pr-8274 added at e46929
17 changes: 14 additions & 3 deletions src/core/task/Task.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1666,9 +1666,20 @@ export class Task extends EventEmitter<TaskEvents> implements TaskLike {
content: [{ type: "text", text: `[new_task completed] Result: ${lastMessage}` }],
})

// Set skipPrevResponseIdOnce to ensure the next API call sends the full conversation
// including the subtask result, not just from before the subtask was created
this.skipPrevResponseIdOnce = true
// For GPT-5 models, we should NOT skip the previous_response_id after subtask completion
// because GPT-5 relies on response continuity to maintain context. When we skip it,
// GPT-5 only receives the latest message without the subtask result context.
// For other models, we skip to ensure the full conversation is sent.
const modelId = this.api.getModel().id
const isGpt5Model = modelId && modelId.startsWith("gpt-5")
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

[P1] GPT‑5 detection robustness. Model ids can be prefixed or vary in case across providers. Suggest normalizing and using includes:\n\nsuggestion\n\t\t\tconst modelId = this.api.getModel().id\n\t\t\tconst isGpt5Model = typeof modelId === \"string\" && modelId.toLowerCase().includes(\"gpt-5\")\n\n\nAlso consider centralizing this in a small helper to keep parity with other call sites.


if (!isGpt5Model) {
// Only skip previous_response_id for non-GPT-5 models
// This ensures the next API call sends the full conversation
// including the subtask result, not just from before the subtask was created
this.skipPrevResponseIdOnce = true
}
// For GPT-5 models, we maintain continuity by keeping the previous_response_id chain intact
} catch (error) {
this.providerRef
.deref()
Expand Down
123 changes: 123 additions & 0 deletions src/core/task/__tests__/Task.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1776,4 +1776,127 @@ describe("Cline", () => {
consoleErrorSpy.mockRestore()
})
})

describe("GPT-5 Subtask Completion", () => {
it("should NOT skip previous_response_id for GPT-5 models after subtask completion", async () => {
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

[P2] Completeness. Add a test that, after completeSubtask() with GPT‑5, the subsequent attemptApiRequest() includes metadata.previousResponseId (and omits suppressPreviousResponseId). This validates the continuity chain beyond just the flag.

// Create a mock GPT-5 API configuration
const gpt5ApiConfig = {
apiProvider: "openai-native" as const,
apiModelId: "gpt-5-codex",
openAiNativeApiKey: "test-key",
}

// Create parent task with GPT-5 model
const parentTask = new Task({
provider: mockProvider,
apiConfiguration: gpt5ApiConfig,
task: "parent task",
startTask: false,
})

// Mock the API model to return GPT-5
vi.spyOn(parentTask.api, "getModel").mockReturnValue({
id: "gpt-5-codex",
info: {
contextWindow: 128000,
maxTokens: 4096,
inputPrice: 0.25,
outputPrice: 0.75,
} as any,
})

// Spy on say and addToApiConversationHistory
const saySpy = vi.spyOn(parentTask, "say").mockResolvedValue(undefined)
const addToApiSpy = vi.spyOn(parentTask as any, "addToApiConversationHistory").mockResolvedValue(undefined)

// Call completeSubtask
await parentTask.completeSubtask("Subtask completed successfully")

// Verify the subtask result was added
expect(saySpy).toHaveBeenCalledWith("subtask_result", "Subtask completed successfully")
expect(addToApiSpy).toHaveBeenCalledWith({
role: "user",
content: [{ type: "text", text: "[new_task completed] Result: Subtask completed successfully" }],
})

// Verify skipPrevResponseIdOnce is NOT set for GPT-5 models
expect((parentTask as any).skipPrevResponseIdOnce).toBe(false)
})

it("should skip previous_response_id for non-GPT-5 models after subtask completion", async () => {
// Create a mock non-GPT-5 API configuration
const claudeApiConfig = {
apiProvider: "anthropic" as const,
apiModelId: "claude-3-5-sonnet-20241022",
apiKey: "test-key",
}

// Create parent task with Claude model
const parentTask = new Task({
provider: mockProvider,
apiConfiguration: claudeApiConfig,
task: "parent task",
startTask: false,
})

// Mock the API model to return Claude
vi.spyOn(parentTask.api, "getModel").mockReturnValue({
id: "claude-3-5-sonnet-20241022",
info: {
contextWindow: 200000,
maxTokens: 4096,
inputPrice: 0.25,
outputPrice: 0.75,
} as any,
})

// Spy on say and addToApiConversationHistory
const saySpy = vi.spyOn(parentTask, "say").mockResolvedValue(undefined)
const addToApiSpy = vi.spyOn(parentTask as any, "addToApiConversationHistory").mockResolvedValue(undefined)

// Call completeSubtask
await parentTask.completeSubtask("Subtask completed successfully")

// Verify the subtask result was added
expect(saySpy).toHaveBeenCalledWith("subtask_result", "Subtask completed successfully")
expect(addToApiSpy).toHaveBeenCalledWith({
role: "user",
content: [{ type: "text", text: "[new_task completed] Result: Subtask completed successfully" }],
})

// Verify skipPrevResponseIdOnce IS set for non-GPT-5 models
expect((parentTask as any).skipPrevResponseIdOnce).toBe(true)
})

it("should handle edge case where model ID is undefined", async () => {
// Create task with minimal configuration
const parentTask = new Task({
provider: mockProvider,
apiConfiguration: mockApiConfig,
task: "parent task",
startTask: false,
})

// Mock the API model to return undefined ID
vi.spyOn(parentTask.api, "getModel").mockReturnValue({
id: undefined as any,
info: {
contextWindow: 200000,
maxTokens: 4096,
inputPrice: 0.25,
outputPrice: 0.75,
} as any,
})

// Spy on say and addToApiConversationHistory
const saySpy = vi.spyOn(parentTask, "say").mockResolvedValue(undefined)
const addToApiSpy = vi.spyOn(parentTask as any, "addToApiConversationHistory").mockResolvedValue(undefined)

// Call completeSubtask
await parentTask.completeSubtask("Subtask completed")

// When model ID is undefined, should default to skipping (non-GPT-5 behavior)
expect((parentTask as any).skipPrevResponseIdOnce).toBe(true)
})
})
})
1 change: 1 addition & 0 deletions tmp/pr-8287-Roo-Code
Submodule pr-8287-Roo-Code added at 88a473
Loading