Skip to content

Commit 2893f9f

Browse files
committed
feat: Add OpenAI Codex CLI module with AgentAPI web chat UI and task reporting
- Implemented Rust-based OpenAI Codex CLI module for Coder Registry - Added AgentAPI integration for web-based chat UI interface - Included full task reporting support for Coder + Tasks UI - Features: code generation, completion, explanation, and interactive chat Addresses issue #236 - OpenAI Codex CLI module with task reporting
1 parent 53af6e0 commit 2893f9f

File tree

11 files changed

+1219
-0
lines changed

11 files changed

+1219
-0
lines changed
13.5 KB
Loading

registry/krikera/README.md

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
---
2+
display_name: Krishna Ketan Rai
3+
bio: I'm a computer science student
4+
github: krikera
5+
avatar: ./.images/avatar.png
6+
website: https://www.krishnaketanrai.tech/
7+
status: community
8+
---
Lines changed: 106 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,106 @@
1+
---
2+
display_name: "OpenAI Codex CLI"
3+
description: "Rust-based OpenAI Codex CLI with AgentAPI web chat UI and task reporting"
4+
icon: "../../../../.icons/claude.svg"
5+
maintainer_github: "krikera"
6+
verified: false
7+
tags: ["ai", "assistant", "codex", "openai", "rust", "tasks"]
8+
---
9+
10+
# OpenAI Codex CLI
11+
12+
A Rust-based OpenAI Codex CLI tool with AgentAPI web chat UI integration and full task reporting support for Coder + Tasks UI.
13+
14+
## Features
15+
16+
- **Rust-based CLI**: High-performance Rust implementation of OpenAI Codex
17+
- **Web Chat UI**: Interactive web interface through AgentAPI integration
18+
- **Task Reporting**: Full integration with Coder Tasks UI
19+
- **Code Generation**: Generate code from natural language descriptions
20+
- **Code Completion**: Smart code completion and suggestions
21+
- **Code Explanation**: Get explanations for existing code
22+
23+
```tf
24+
module "codex" {
25+
source = "registry.coder.com/krikera/codex/coder"
26+
version = "1.0.0"
27+
agent_id = coder_agent.example.id
28+
}
29+
```
30+
31+
## Examples
32+
33+
### Basic Usage
34+
35+
```tf
36+
module "codex" {
37+
source = "registry.coder.com/krikera/codex/coder"
38+
version = "1.0.0"
39+
agent_id = coder_agent.example.id
40+
}
41+
```
42+
43+
### Custom Configuration
44+
45+
```tf
46+
module "codex" {
47+
source = "registry.coder.com/krikera/codex/coder"
48+
version = "1.0.0"
49+
agent_id = coder_agent.example.id
50+
openai_model = "gpt-4"
51+
temperature = 0.7
52+
max_tokens = 2048
53+
folder = "/home/coder/workspace"
54+
}
55+
```
56+
57+
### With Custom OpenAI API Key
58+
59+
```tf
60+
module "codex" {
61+
source = "registry.coder.com/krikera/codex/coder"
62+
version = "1.0.0"
63+
agent_id = coder_agent.example.id
64+
openai_api_key = var.openai_api_key
65+
}
66+
```
67+
68+
### Advanced Configuration
69+
70+
```tf
71+
module "codex" {
72+
source = "registry.coder.com/krikera/codex/coder"
73+
version = "1.0.0"
74+
agent_id = coder_agent.example.id
75+
openai_model = "gpt-4"
76+
temperature = 0.2
77+
max_tokens = 4096
78+
install_codex = true
79+
codex_version = "latest"
80+
pre_install_script = "apt-get update && apt-get install -y build-essential"
81+
folder = "/workspace"
82+
order = 1
83+
group = "AI Tools"
84+
}
85+
```
86+
87+
### With Task Reporting
88+
89+
```tf
90+
data "coder_parameter" "ai_prompt" {
91+
type = "string"
92+
name = "AI Prompt"
93+
default = ""
94+
description = "Write a prompt for the Codex CLI"
95+
mutable = true
96+
}
97+
98+
module "codex" {
99+
source = "registry.coder.com/krikera/codex/coder"
100+
version = "1.0.0"
101+
agent_id = coder_agent.example.id
102+
openai_api_key = var.openai_api_key
103+
ai_prompt = data.coder_parameter.ai_prompt.value
104+
folder = "/home/coder/projects"
105+
}
106+
```
Lines changed: 164 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,164 @@
1+
import {
2+
test,
3+
afterEach,
4+
expect,
5+
describe,
6+
setDefaultTimeout,
7+
beforeAll,
8+
} from "bun:test";
9+
import { execContainer, runTerraformInit } from "~test";
10+
import {
11+
setupContainer,
12+
loadTestFile,
13+
writeExecutable,
14+
execModuleScript,
15+
expectAgentAPIStarted,
16+
} from "./test-util";
17+
18+
let cleanupFunctions: (() => Promise<void>)[] = [];
19+
20+
const registerCleanup = (cleanup: () => Promise<void>) => {
21+
cleanupFunctions.push(cleanup);
22+
};
23+
24+
afterEach(async () => {
25+
const cleanupFnsCopy = cleanupFunctions.slice().reverse();
26+
cleanupFunctions = [];
27+
for (const cleanup of cleanupFnsCopy) {
28+
try {
29+
await cleanup();
30+
} catch (error) {
31+
console.error("Error during cleanup:", error);
32+
}
33+
}
34+
});
35+
36+
const moduleDir = import.meta.dir;
37+
38+
beforeAll(async () => {
39+
await runTerraformInit(moduleDir);
40+
});
41+
42+
describe("codex", () => {
43+
test("creates codex module with default configuration", async () => {
44+
const { id, coderScript, cleanup } = await setupContainer({
45+
moduleDir,
46+
image: "codercom/enterprise-node:latest",
47+
});
48+
registerCleanup(cleanup);
49+
50+
// Execute the module script to install the mock CLI
51+
const scriptResult = await execModuleScript({
52+
containerId: id,
53+
coderScript,
54+
});
55+
expect(scriptResult.exitCode).toBe(0);
56+
57+
// Test that the module installs correctly
58+
const result = await execContainer(id, ["which", "codex-cli"]);
59+
expect(result.exitCode).toBe(0);
60+
});
61+
62+
test("creates codex module with custom configuration", async () => {
63+
const { id, coderScript, cleanup } = await setupContainer({
64+
moduleDir,
65+
image: "codercom/enterprise-node:latest",
66+
vars: {
67+
openai_model: "gpt-4",
68+
temperature: "0.7",
69+
max_tokens: "2048",
70+
folder: "/workspace",
71+
install_codex: "true",
72+
codex_version: "latest",
73+
order: "1",
74+
group: "AI Tools",
75+
},
76+
});
77+
registerCleanup(cleanup);
78+
79+
// Execute the module script to install the mock CLI
80+
const scriptResult = await execModuleScript({
81+
containerId: id,
82+
coderScript,
83+
});
84+
expect(scriptResult.exitCode).toBe(0);
85+
86+
// Test that the module installs correctly with custom configuration
87+
const result = await execContainer(id, ["which", "codex-cli"]);
88+
expect(result.exitCode).toBe(0);
89+
90+
// Test that configuration is properly set
91+
const configResult = await execContainer(id, ["test", "-f", "/home/coder/.config/codex/config.toml"]);
92+
expect(configResult.exitCode).toBe(0);
93+
});
94+
95+
test("creates codex module with custom API key", async () => {
96+
const { id, coderScript, cleanup } = await setupContainer({
97+
moduleDir,
98+
image: "codercom/enterprise-node:latest",
99+
vars: {
100+
openai_api_key: "sk-test-api-key",
101+
openai_model: "gpt-3.5-turbo",
102+
},
103+
});
104+
registerCleanup(cleanup);
105+
106+
// Execute the module script to install the mock CLI
107+
const scriptResult = await execModuleScript({
108+
containerId: id,
109+
coderScript,
110+
});
111+
expect(scriptResult.exitCode).toBe(0);
112+
113+
// Test that the module installs correctly
114+
const result = await execContainer(id, ["which", "codex-cli"]);
115+
expect(result.exitCode).toBe(0);
116+
});
117+
118+
test("creates codex module with installation disabled", async () => {
119+
const { id, cleanup } = await setupContainer({
120+
moduleDir,
121+
image: "codercom/enterprise-node:latest",
122+
vars: {
123+
install_codex: "false",
124+
},
125+
});
126+
registerCleanup(cleanup);
127+
128+
// Test that codex-cli is not installed when disabled
129+
const result = await execContainer(id, ["which", "codex-cli"]);
130+
expect(result.exitCode).toBe(1);
131+
});
132+
133+
test("validates temperature range", async () => {
134+
// Test with invalid temperature (should fail during terraform plan/apply)
135+
try {
136+
await setupContainer({
137+
moduleDir,
138+
image: "codercom/enterprise-node:latest",
139+
vars: {
140+
temperature: "2.5", // Invalid - should be between 0.0 and 2.0
141+
},
142+
});
143+
expect(true).toBe(false); // Should not reach here
144+
} catch (error) {
145+
expect((error as Error).message).toContain("Temperature must be between 0.0 and 2.0");
146+
}
147+
});
148+
149+
test("validates max_tokens range", async () => {
150+
// Test with invalid max_tokens (should fail during terraform plan/apply)
151+
try {
152+
await setupContainer({
153+
moduleDir,
154+
image: "codercom/enterprise-node:latest",
155+
vars: {
156+
max_tokens: "5000", // Invalid - should be between 1 and 4096
157+
},
158+
});
159+
expect(true).toBe(false); // Should not reach here
160+
} catch (error) {
161+
expect((error as Error).message).toContain("Max tokens must be between 1 and 4096");
162+
}
163+
});
164+
});

0 commit comments

Comments
 (0)