Skip to content

feat: Add OpenAI Codex CLI module with AgentAPI web chat UI and task reporting #244

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 6 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Binary file added registry/krikera/.images/avatar.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
8 changes: 8 additions & 0 deletions registry/krikera/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
---
display_name: Krishna Ketan Rai
bio: I'm a computer science student
github: krikera
avatar: ./.images/avatar.png
website: https://www.krishnaketanrai.tech/
status: community
---
97 changes: 97 additions & 0 deletions registry/krikera/modules/codex/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
---
display_name: "OpenAI Codex"
description: "Rust-based OpenAI Codex CLI with AgentAPI web chat UI and task reporting"
icon: "../../../../.icons/claude.svg"
verified: false
tags: ["ai", "agent", "codex", "openai"]
---

# OpenAI Codex

A Rust-based OpenAI Codex CLI tool with AgentAPI web chat UI integration and full task reporting support for Coder + Tasks UI.


```tf
module "codex" {
source = "registry.coder.com/krikera/codex/coder"
version = "1.0.0"
agent_id = coder_agent.example.id
}
```

## Examples

### Basic Usage

```tf
module "codex" {
source = "registry.coder.com/krikera/codex/coder"
version = "1.0.0"
agent_id = coder_agent.example.id
}
```

### Custom Configuration

```tf
module "codex" {
source = "registry.coder.com/krikera/codex/coder"
version = "1.0.0"
agent_id = coder_agent.example.id
openai_model = "gpt-4"
temperature = 0.7
max_tokens = 2048
folder = "/home/coder/workspace"
}
```

### With Custom OpenAI API Key

```tf
module "codex" {
source = "registry.coder.com/krikera/codex/coder"
version = "1.0.0"
agent_id = coder_agent.example.id
openai_api_key = var.openai_api_key
}
```

### Advanced Configuration

```tf
module "codex" {
source = "registry.coder.com/krikera/codex/coder"
version = "1.0.0"
agent_id = coder_agent.example.id
openai_model = "gpt-4"
temperature = 0.2
max_tokens = 4096
install_codex = true
codex_version = "latest"
pre_install_script = "apt-get update && apt-get install -y build-essential"
folder = "/workspace"
order = 1
group = "AI Tools"
}
```

### With Task Reporting

```tf
data "coder_parameter" "ai_prompt" {
type = "string"
name = "AI Prompt"
default = ""
description = "Write a prompt for the Codex CLI"
mutable = true
}

module "codex" {
source = "registry.coder.com/krikera/codex/coder"
version = "1.0.0"
agent_id = coder_agent.example.id
openai_api_key = var.openai_api_key
ai_prompt = data.coder_parameter.ai_prompt.value
folder = "/home/coder/projects"
}
```
164 changes: 164 additions & 0 deletions registry/krikera/modules/codex/main.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,164 @@
import {
test,
afterEach,
expect,
describe,
setDefaultTimeout,
beforeAll,
} from "bun:test";
import { execContainer, runTerraformInit } from "~test";
import {
setupContainer,
loadTestFile,
writeExecutable,
execModuleScript,
expectAgentAPIStarted,
} from "./test-util";

let cleanupFunctions: (() => Promise<void>)[] = [];

const registerCleanup = (cleanup: () => Promise<void>) => {
cleanupFunctions.push(cleanup);
};

afterEach(async () => {
const cleanupFnsCopy = cleanupFunctions.slice().reverse();
cleanupFunctions = [];
for (const cleanup of cleanupFnsCopy) {
try {
await cleanup();
} catch (error) {
console.error("Error during cleanup:", error);
}
}
});

const moduleDir = import.meta.dir;

beforeAll(async () => {
await runTerraformInit(moduleDir);
});

describe("codex", () => {
test("creates codex module with default configuration", async () => {
const { id, coderScript, cleanup } = await setupContainer({
moduleDir,
image: "codercom/enterprise-node:latest",
});
registerCleanup(cleanup);

// Execute the module script to install the mock CLI
const scriptResult = await execModuleScript({
containerId: id,
coderScript,
});
expect(scriptResult.exitCode).toBe(0);

// Test that the module installs correctly
const result = await execContainer(id, ["which", "codex-cli"]);
expect(result.exitCode).toBe(0);
});

test("creates codex module with custom configuration", async () => {
const { id, coderScript, cleanup } = await setupContainer({
moduleDir,
image: "codercom/enterprise-node:latest",
vars: {
openai_model: "gpt-4",
temperature: "0.7",
max_tokens: "2048",
folder: "/workspace",
install_codex: "true",
codex_version: "latest",
order: "1",
group: "AI Tools",
},
});
registerCleanup(cleanup);

// Execute the module script to install the mock CLI
const scriptResult = await execModuleScript({
containerId: id,
coderScript,
});
expect(scriptResult.exitCode).toBe(0);

// Test that the module installs correctly with custom configuration
const result = await execContainer(id, ["which", "codex-cli"]);
expect(result.exitCode).toBe(0);

// Test that configuration is properly set
const configResult = await execContainer(id, ["test", "-f", "/home/coder/.config/codex/config.toml"]);
expect(configResult.exitCode).toBe(0);
});

test("creates codex module with custom API key", async () => {
const { id, coderScript, cleanup } = await setupContainer({
moduleDir,
image: "codercom/enterprise-node:latest",
vars: {
openai_api_key: "sk-test-api-key",
openai_model: "gpt-3.5-turbo",
},
});
registerCleanup(cleanup);

// Execute the module script to install the mock CLI
const scriptResult = await execModuleScript({
containerId: id,
coderScript,
});
expect(scriptResult.exitCode).toBe(0);

// Test that the module installs correctly
const result = await execContainer(id, ["which", "codex-cli"]);
expect(result.exitCode).toBe(0);
});

test("creates codex module with installation disabled", async () => {
const { id, cleanup } = await setupContainer({
moduleDir,
image: "codercom/enterprise-node:latest",
vars: {
install_codex: "false",
},
});
registerCleanup(cleanup);

// Test that codex-cli is not installed when disabled
const result = await execContainer(id, ["which", "codex-cli"]);
expect(result.exitCode).toBe(1);
});

test("validates temperature range", async () => {
// Test with invalid temperature (should fail during terraform plan/apply)
try {
await setupContainer({
moduleDir,
image: "codercom/enterprise-node:latest",
vars: {
temperature: "2.5", // Invalid - should be between 0.0 and 2.0
},
});
expect(true).toBe(false); // Should not reach here
} catch (error) {
expect((error as Error).message).toContain("Temperature must be between 0.0 and 2.0");
}
});

test("validates max_tokens range", async () => {
// Test with invalid max_tokens (should fail during terraform plan/apply)
try {
await setupContainer({
moduleDir,
image: "codercom/enterprise-node:latest",
vars: {
max_tokens: "5000", // Invalid - should be between 1 and 4096
},
});
expect(true).toBe(false); // Should not reach here
} catch (error) {
expect((error as Error).message).toContain("Max tokens must be between 1 and 4096");
}
});
});
Loading
Loading