diff --git a/registry/krikera/.images/avatar.png b/registry/krikera/.images/avatar.png new file mode 100644 index 00000000..cd4fa762 Binary files /dev/null and b/registry/krikera/.images/avatar.png differ diff --git a/registry/krikera/README.md b/registry/krikera/README.md new file mode 100644 index 00000000..ea601851 --- /dev/null +++ b/registry/krikera/README.md @@ -0,0 +1,8 @@ +--- +display_name: Krishna Ketan Rai +bio: I'm a computer science student +github: krikera +avatar: ./.images/avatar.png +website: https://www.krishnaketanrai.tech/ +status: community +--- diff --git a/registry/krikera/modules/codex/README.md b/registry/krikera/modules/codex/README.md new file mode 100644 index 00000000..a04928c0 --- /dev/null +++ b/registry/krikera/modules/codex/README.md @@ -0,0 +1,97 @@ +--- +display_name: "OpenAI Codex" +description: "Rust-based OpenAI Codex CLI with AgentAPI web chat UI and task reporting" +icon: "../../../../.icons/claude.svg" +verified: false +tags: ["ai", "agent", "codex", "openai"] +--- + +# OpenAI Codex + +A Rust-based OpenAI Codex CLI tool with AgentAPI web chat UI integration and full task reporting support for Coder + Tasks UI. + + +```tf +module "codex" { + source = "registry.coder.com/krikera/codex/coder" + version = "1.0.0" + agent_id = coder_agent.example.id +} +``` + +## Examples + +### Basic Usage + +```tf +module "codex" { + source = "registry.coder.com/krikera/codex/coder" + version = "1.0.0" + agent_id = coder_agent.example.id +} +``` + +### Custom Configuration + +```tf +module "codex" { + source = "registry.coder.com/krikera/codex/coder" + version = "1.0.0" + agent_id = coder_agent.example.id + openai_model = "gpt-4" + temperature = 0.7 + max_tokens = 2048 + folder = "/home/coder/workspace" +} +``` + +### With Custom OpenAI API Key + +```tf +module "codex" { + source = "registry.coder.com/krikera/codex/coder" + version = "1.0.0" + agent_id = coder_agent.example.id + openai_api_key = var.openai_api_key +} +``` + +### Advanced Configuration + +```tf +module "codex" { + source = "registry.coder.com/krikera/codex/coder" + version = "1.0.0" + agent_id = coder_agent.example.id + openai_model = "gpt-4" + temperature = 0.2 + max_tokens = 4096 + install_codex = true + codex_version = "latest" + pre_install_script = "apt-get update && apt-get install -y build-essential" + folder = "/workspace" + order = 1 + group = "AI Tools" +} +``` + +### With Task Reporting + +```tf +data "coder_parameter" "ai_prompt" { + type = "string" + name = "AI Prompt" + default = "" + description = "Write a prompt for the Codex CLI" + mutable = true +} + +module "codex" { + source = "registry.coder.com/krikera/codex/coder" + version = "1.0.0" + agent_id = coder_agent.example.id + openai_api_key = var.openai_api_key + ai_prompt = data.coder_parameter.ai_prompt.value + folder = "/home/coder/projects" +} +``` diff --git a/registry/krikera/modules/codex/main.test.ts b/registry/krikera/modules/codex/main.test.ts new file mode 100644 index 00000000..4c41cba9 --- /dev/null +++ b/registry/krikera/modules/codex/main.test.ts @@ -0,0 +1,164 @@ +import { + test, + afterEach, + expect, + describe, + setDefaultTimeout, + beforeAll, +} from "bun:test"; +import { execContainer, runTerraformInit } from "~test"; +import { + setupContainer, + loadTestFile, + writeExecutable, + execModuleScript, + expectAgentAPIStarted, +} from "./test-util"; + +let cleanupFunctions: (() => Promise)[] = []; + +const registerCleanup = (cleanup: () => Promise) => { + cleanupFunctions.push(cleanup); +}; + +afterEach(async () => { + const cleanupFnsCopy = cleanupFunctions.slice().reverse(); + cleanupFunctions = []; + for (const cleanup of cleanupFnsCopy) { + try { + await cleanup(); + } catch (error) { + console.error("Error during cleanup:", error); + } + } +}); + +const moduleDir = import.meta.dir; + +beforeAll(async () => { + await runTerraformInit(moduleDir); +}); + +describe("codex", () => { + test("creates codex module with default configuration", async () => { + const { id, coderScript, cleanup } = await setupContainer({ + moduleDir, + image: "codercom/enterprise-node:latest", + }); + registerCleanup(cleanup); + + // Execute the module script to install the mock CLI + const scriptResult = await execModuleScript({ + containerId: id, + coderScript, + }); + expect(scriptResult.exitCode).toBe(0); + + // Test that the module installs correctly + const result = await execContainer(id, ["which", "codex-cli"]); + expect(result.exitCode).toBe(0); + }); + + test("creates codex module with custom configuration", async () => { + const { id, coderScript, cleanup } = await setupContainer({ + moduleDir, + image: "codercom/enterprise-node:latest", + vars: { + openai_model: "gpt-4", + temperature: "0.7", + max_tokens: "2048", + folder: "/workspace", + install_codex: "true", + codex_version: "latest", + order: "1", + group: "AI Tools", + }, + }); + registerCleanup(cleanup); + + // Execute the module script to install the mock CLI + const scriptResult = await execModuleScript({ + containerId: id, + coderScript, + }); + expect(scriptResult.exitCode).toBe(0); + + // Test that the module installs correctly with custom configuration + const result = await execContainer(id, ["which", "codex-cli"]); + expect(result.exitCode).toBe(0); + + // Test that configuration is properly set + const configResult = await execContainer(id, ["test", "-f", "/home/coder/.config/codex/config.toml"]); + expect(configResult.exitCode).toBe(0); + }); + + test("creates codex module with custom API key", async () => { + const { id, coderScript, cleanup } = await setupContainer({ + moduleDir, + image: "codercom/enterprise-node:latest", + vars: { + openai_api_key: "sk-test-api-key", + openai_model: "gpt-3.5-turbo", + }, + }); + registerCleanup(cleanup); + + // Execute the module script to install the mock CLI + const scriptResult = await execModuleScript({ + containerId: id, + coderScript, + }); + expect(scriptResult.exitCode).toBe(0); + + // Test that the module installs correctly + const result = await execContainer(id, ["which", "codex-cli"]); + expect(result.exitCode).toBe(0); + }); + + test("creates codex module with installation disabled", async () => { + const { id, cleanup } = await setupContainer({ + moduleDir, + image: "codercom/enterprise-node:latest", + vars: { + install_codex: "false", + }, + }); + registerCleanup(cleanup); + + // Test that codex-cli is not installed when disabled + const result = await execContainer(id, ["which", "codex-cli"]); + expect(result.exitCode).toBe(1); + }); + + test("validates temperature range", async () => { + // Test with invalid temperature (should fail during terraform plan/apply) + try { + await setupContainer({ + moduleDir, + image: "codercom/enterprise-node:latest", + vars: { + temperature: "2.5", // Invalid - should be between 0.0 and 2.0 + }, + }); + expect(true).toBe(false); // Should not reach here + } catch (error) { + expect((error as Error).message).toContain("Temperature must be between 0.0 and 2.0"); + } + }); + + test("validates max_tokens range", async () => { + // Test with invalid max_tokens (should fail during terraform plan/apply) + try { + await setupContainer({ + moduleDir, + image: "codercom/enterprise-node:latest", + vars: { + max_tokens: "5000", // Invalid - should be between 1 and 4096 + }, + }); + expect(true).toBe(false); // Should not reach here + } catch (error) { + expect((error as Error).message).toContain("Max tokens must be between 1 and 4096"); + } + }); +}); diff --git a/registry/krikera/modules/codex/main.tf b/registry/krikera/modules/codex/main.tf new file mode 100644 index 00000000..86e74027 --- /dev/null +++ b/registry/krikera/modules/codex/main.tf @@ -0,0 +1,187 @@ +terraform { + required_version = ">= 1.0" + + required_providers { + coder = { + source = "coder/coder" + version = ">= 2.7" + } + } +} + +variable "agent_id" { + type = string + description = "The ID of a Coder agent." +} + +variable "openai_api_key" { + type = string + description = "OpenAI API key for Codex access." + sensitive = true + default = "" +} + +variable "openai_model" { + type = string + description = "OpenAI model to use for code generation." + default = "gpt-4" +} + +variable "temperature" { + type = number + description = "Temperature setting for code generation (0.0 to 2.0)." + default = 0.2 + + validation { + condition = var.temperature >= 0.0 && var.temperature <= 2.0 + error_message = "Temperature must be between 0.0 and 2.0." + } +} + +variable "max_tokens" { + type = number + description = "Maximum number of tokens for code generation." + default = 2048 + + validation { + condition = var.max_tokens > 0 && var.max_tokens <= 4096 + error_message = "Max tokens must be between 1 and 4096." + } +} + +variable "folder" { + type = string + description = "The folder to run Codex in." + default = "/home/coder" +} + +variable "install_codex" { + type = bool + description = "Whether to install Codex CLI." + default = true +} + +variable "codex_version" { + type = string + description = "Version of Codex CLI to install." + default = "latest" +} + +variable "pre_install_script" { + type = string + description = "Custom script to run before installing Codex." + default = null +} + +variable "post_install_script" { + type = string + description = "Custom script to run after installing Codex." + default = null +} + +variable "order" { + type = number + description = "The order determines the position of app in the UI presentation." + default = null +} + +variable "group" { + type = string + description = "The name of a group that this app belongs to." + default = "AI Tools" +} + +variable "ai_prompt" { + type = string + description = "Initial AI prompt for task reporting." + default = "" +} + +locals { + app_slug = "codex" + module_dir_name = "codex" + icon_url = "../../../../.icons/claude.svg" + + # Configuration for Codex CLI + codex_config = { + openai_model = var.openai_model + temperature = var.temperature + max_tokens = var.max_tokens + openai_api_key = var.openai_api_key + } + + # Install script for Rust-based Codex CLI + install_script = templatefile("${path.module}/scripts/install.sh", { + CODEX_VERSION = var.codex_version + INSTALL_CODEX = var.install_codex + }) + + # Start script for AgentAPI integration + start_script = templatefile("${path.module}/scripts/start.sh", { + OPENAI_API_KEY = var.openai_api_key + OPENAI_MODEL = var.openai_model + TEMPERATURE = var.temperature + MAX_TOKENS = var.max_tokens + FOLDER = var.folder + AI_PROMPT = var.ai_prompt + RED = "\\033[31m" + GREEN = "\\033[32m" + YELLOW = "\\033[33m" + BOLD = "\\033[1m" + NC = "\\033[0m" + }) +} + +# Use the AgentAPI module for web chat UI and task reporting +module "agentapi" { + source = "registry.coder.com/coder/agentapi/coder" + version = "1.0.0" + + agent_id = var.agent_id + web_app_slug = local.app_slug + web_app_order = var.order + web_app_group = var.group + web_app_icon = local.icon_url + web_app_display_name = "Codex CLI" + cli_app_slug = "codex-cli" + cli_app_display_name = "Codex CLI" + cli_app = true + cli_app_icon = local.icon_url + cli_app_order = var.order + cli_app_group = var.group + module_dir_name = local.module_dir_name + folder = var.folder + pre_install_script = var.pre_install_script + post_install_script = var.post_install_script + start_script = local.start_script + install_script = local.install_script +} + +# Create a workspace app for direct CLI access +resource "coder_app" "codex_terminal" { + agent_id = var.agent_id + slug = "codex-terminal" + display_name = "Codex Terminal" + icon = local.icon_url + order = var.order + group = var.group + command = <<-EOT + #!/bin/bash + set -e + + export LANG=en_US.UTF-8 + export LC_ALL=en_US.UTF-8 + + # Set up environment variables + export OPENAI_API_KEY="${var.openai_api_key}" + export OPENAI_MODEL="${var.openai_model}" + export CODEX_TEMPERATURE="${var.temperature}" + export CODEX_MAX_TOKENS="${var.max_tokens}" + + # Change to the workspace directory + cd "${var.folder}" + + # Start interactive Codex CLI session + codex-cli interactive + EOT +} diff --git a/registry/krikera/modules/codex/run.sh b/registry/krikera/modules/codex/run.sh new file mode 100755 index 00000000..6d9dc517 --- /dev/null +++ b/registry/krikera/modules/codex/run.sh @@ -0,0 +1,82 @@ +#!/usr/bin/env bash +set -o errexit +set -o pipefail + +# Colors for output +BOLD='\033[0;1m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +RED='\033[0;31m' +NC='\033[0m' # No Color + +printf "${BOLD}๐Ÿฆ€ OpenAI Codex CLI - Rust-based AI Code Assistant${NC}\n\n" + +# Ensure PATH includes ~/.local/bin +export PATH="$HOME/.local/bin:$PATH" + +# Check if codex-cli is installed +if ! command -v codex-cli &> /dev/null; then + printf "${RED}โŒ Codex CLI not found. Please ensure the module is properly installed.${NC}\n" + printf "${YELLOW}๐Ÿ’ก This should have been installed automatically by the AgentAPI module.${NC}\n" + exit 1 +fi + +# Check if we're in a workspace +if [ -z "$CODER_WORKSPACE_NAME" ]; then + printf "${YELLOW}โš ๏ธ Not running in a Coder workspace. Some features may be limited.${NC}\n" +fi + +# Display help information +printf "${BOLD}๐Ÿ“š Codex CLI Commands:${NC}\n" +printf " โ€ข ${GREEN}codex-cli generate${NC} 'description' - Generate code from description\n" +printf " โ€ข ${GREEN}codex-cli complete${NC} 'partial code' - Complete partial code\n" +printf " โ€ข ${GREEN}codex-cli explain${NC} 'code' - Explain existing code\n" +printf " โ€ข ${GREEN}codex-cli review${NC} 'code' - Review code for issues\n" +printf " โ€ข ${GREEN}codex-cli optimize${NC} 'code' - Optimize code performance\n" +printf " โ€ข ${GREEN}codex-cli debug${NC} 'code' - Help debug code issues\n" +printf " โ€ข ${GREEN}codex-cli test${NC} 'code' - Generate test cases\n" +printf " โ€ข ${GREEN}codex-cli interactive${NC} - Start interactive session\n\n" + +printf "${BOLD}๐ŸŒ Web Interface:${NC}\n" +printf " โ€ข Access the web chat UI through the Codex app in your Coder workspace\n" +printf " โ€ข Use the integrated Tasks UI for task-based code generation\n" +printf " โ€ข All interactions are logged and reportable through Coder's task system\n\n" + +printf "${BOLD}๏ฟฝ Configuration:${NC}\n" +printf " โ€ข Config file: ${YELLOW}~/.config/codex/config.toml${NC}\n" +printf " โ€ข Environment variables: ${YELLOW}OPENAI_API_KEY, OPENAI_MODEL, etc.${NC}\n\n" + +printf "${BOLD}๐Ÿš€ Quick Start:${NC}\n" +printf " 1. Set your OpenAI API key: ${YELLOW}export OPENAI_API_KEY='your-key-here'${NC}\n" +printf " 2. Try: ${YELLOW}codex-cli generate 'create a hello world function in Python'${NC}\n" +printf " 3. Or start interactive mode: ${YELLOW}codex-cli interactive${NC}\n\n" + +# Show version information +printf "${BOLD}๐Ÿ“ฆ Version Information:${NC}\n" +codex-cli --version +printf "\n" + +# Show configuration status +CONFIG_FILE="$HOME/.config/codex/config.toml" +if [ -f "$CONFIG_FILE" ]; then + printf "${GREEN}โœ… Configuration file found${NC}\n" + printf "${BOLD}๐Ÿ”ง Current settings:${NC}\n" + if command -v toml &> /dev/null; then + toml get "$CONFIG_FILE" openai.model 2>/dev/null || echo " Model: (default)" + toml get "$CONFIG_FILE" openai.temperature 2>/dev/null || echo " Temperature: (default)" + else + printf " Model: $(grep 'model =' "$CONFIG_FILE" | cut -d'"' -f2 2>/dev/null || echo '(default)')\n" + printf " Temperature: $(grep 'temperature =' "$CONFIG_FILE" | cut -d'=' -f2 | xargs 2>/dev/null || echo '(default)')\n" + fi +else + printf "${YELLOW}โš ๏ธ Configuration file not found. Using defaults.${NC}\n" +fi + +# Check API key status +if [ -n "$OPENAI_API_KEY" ]; then + printf "${GREEN}โœ… OpenAI API key is set${NC}\n" +else + printf "${YELLOW}โš ๏ธ OpenAI API key not set. Set it with: export OPENAI_API_KEY='your-key'${NC}\n" +fi + +printf "\n${GREEN}๐ŸŽ‰ Codex CLI is ready! Use the web interface or CLI commands above.${NC}\n" diff --git a/registry/krikera/modules/codex/scripts/install.sh b/registry/krikera/modules/codex/scripts/install.sh new file mode 100755 index 00000000..2a1c7ca9 --- /dev/null +++ b/registry/krikera/modules/codex/scripts/install.sh @@ -0,0 +1,194 @@ +#!/bin/bash +set -o errexit +set -o pipefail + +# Template variables +CODEX_VERSION="${CODEX_VERSION}" +INSTALL_CODEX="${INSTALL_CODEX}" + +# Colors for output +BOLD='\033[0;1m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +RED='\033[0;31m' +NC='\033[0m' # No Color + +printf "$${BOLD}๐Ÿฆ€ Installing Rust-based OpenAI Codex CLI...\n\n$${NC}" + +# Skip installation if not requested +if [ "$INSTALL_CODEX" != "true" ]; then + printf "$${YELLOW}โš ๏ธ Codex installation skipped (install_codex = false)$${NC}\n" + exit 0 +fi + +# For testing purposes, check if we should use a mock CLI +if [ -n "$CODEX_TEST_MODE" ] && [ "$CODEX_TEST_MODE" = "true" ]; then + printf "$${YELLOW}๐Ÿงช Test mode detected, using mock Codex CLI$${NC}\n" + + # Create mock codex-cli + mkdir -p "$HOME/.local/bin" + + # Use the mock script from testdata if available + if [ -f "$(dirname "$0")/../testdata/mock-codex-cli.sh" ]; then + cp "$(dirname "$0")/../testdata/mock-codex-cli.sh" "$HOME/.local/bin/codex-cli" + else + # Fallback mock script + cat > "$HOME/.local/bin/codex-cli" << 'MOCK_EOF' +#!/bin/bash +case "$1" in + --version) echo "codex-cli version 1.0.0 (mock)"; exit 0 ;; + *) echo "Mock Codex CLI: $*"; exit 0 ;; +esac +MOCK_EOF + fi + + chmod +x "$HOME/.local/bin/codex-cli" + + # Make sure ~/.local/bin is in PATH + if ! echo "$PATH" | grep -q "$HOME/.local/bin"; then + echo 'export PATH="$HOME/.local/bin:$PATH"' >> "$HOME/.bashrc" + echo 'export PATH="$HOME/.local/bin:$PATH"' >> "$HOME/.zshrc" 2>/dev/null || true + export PATH="$HOME/.local/bin:$PATH" + fi + + printf "$${GREEN}โœ… Mock Codex CLI installed successfully!$${NC}\n" + printf "$${GREEN}๐ŸŽ‰ Test mode installation complete!$${NC}\n" + exit 0 +fi + +# Check if Rust is installed, install if not +if ! command -v rustc &> /dev/null; then + printf "$${YELLOW}๐Ÿ“ฆ Rust not found, installing Rust...$${NC}\n" + curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y + source "$HOME/.cargo/env" + printf "$${GREEN}โœ… Rust installed successfully$${NC}\n\n" +else + printf "$${GREEN}โœ… Rust already installed$${NC}\n\n" +fi + +# Ensure we have the latest stable Rust +rustup update stable +rustup default stable + +# Install required system dependencies +printf "$${BOLD}๐Ÿ“ฆ Installing system dependencies...$${NC}\n" +if command -v apt-get &> /dev/null; then + sudo apt-get update -qq + sudo apt-get install -y -qq \ + build-essential \ + pkg-config \ + libssl-dev \ + libclang-dev \ + curl \ + git \ + ca-certificates +elif command -v yum &> /dev/null; then + sudo yum install -y \ + gcc \ + gcc-c++ \ + make \ + pkgconfig \ + openssl-devel \ + clang-devel \ + curl \ + git \ + ca-certificates +elif command -v apk &> /dev/null; then + sudo apk add --no-cache \ + build-base \ + pkgconfig \ + openssl-dev \ + clang-dev \ + curl \ + git \ + ca-certificates +else + printf "$${RED}โŒ Unsupported package manager. Please install build dependencies manually.$${NC}\n" + exit 1 +fi + +printf "$${GREEN}โœ… System dependencies installed$${NC}\n\n" + +# Create codex directory +CODEX_DIR="$HOME/.local/share/codex" +mkdir -p "$CODEX_DIR" +cd "$CODEX_DIR" + +# Clone or update the Codex CLI repository +CODEX_REPO="https://github.com/krikera/codex-cli.git" +if [ -d "codex-cli" ]; then + printf "$${BOLD}๐Ÿ”„ Updating existing Codex CLI...$${NC}\n" + cd codex-cli + git fetch origin + if [ "$CODEX_VERSION" = "latest" ]; then + git checkout main + git pull origin main + else + git checkout "v$CODEX_VERSION" + fi +else + printf "$${BOLD}๐Ÿ“ฅ Cloning Codex CLI repository...$${NC}\n" + if [ "$CODEX_VERSION" = "latest" ]; then + git clone "$CODEX_REPO" codex-cli + else + git clone --branch "v$CODEX_VERSION" "$CODEX_REPO" codex-cli + fi + cd codex-cli +fi + +# Build the Rust project +printf "$${BOLD}๐Ÿ”จ Building Codex CLI (this may take a few minutes)...$${NC}\n" +cargo build --release + +# Install the binary +printf "$${BOLD}๐Ÿ“ฆ Installing Codex CLI...$${NC}\n" +mkdir -p "$HOME/.local/bin" +cp target/release/codex-cli "$HOME/.local/bin/" + +# Make sure ~/.local/bin is in PATH +if ! echo "$PATH" | grep -q "$HOME/.local/bin"; then + echo 'export PATH="$HOME/.local/bin:$PATH"' >> "$HOME/.bashrc" + echo 'export PATH="$HOME/.local/bin:$PATH"' >> "$HOME/.zshrc" 2>/dev/null || true + export PATH="$HOME/.local/bin:$PATH" +fi + +# Create configuration directory +mkdir -p "$HOME/.config/codex" + +# Create default configuration file +cat > "$HOME/.config/codex/config.toml" << EOF +[openai] +model = "gpt-4" +temperature = 0.2 +max_tokens = 2048 + +[codex] +auto_save = true +show_thinking = true +verbose = false + +[ui] +theme = "dark" +highlight_syntax = true +EOF + +printf "$${GREEN}โœ… Codex CLI installed successfully!$${NC}\n\n" + +# Verify installation +if command -v codex-cli &> /dev/null; then + printf "$${GREEN}๐ŸŽ‰ Installation verification successful!$${NC}\n" + printf "$${BOLD}๐Ÿ“ Codex CLI version: $${NC}" + codex-cli --version + printf "\n$${BOLD}๐Ÿ“ Configuration directory: $${NC}$HOME/.config/codex\n" + printf "$${BOLD}๐Ÿ”ง Binary location: $${NC}$HOME/.local/bin/codex-cli\n\n" +else + printf "$${RED}โŒ Installation verification failed. Please check the installation.$${NC}\n" + exit 1 +fi + +printf "$${GREEN}๐Ÿš€ Codex CLI is ready to use!$${NC}\n" +printf "$${BOLD}๐Ÿ’ก Usage examples:$${NC}\n" +printf " โ€ข $${YELLOW}codex-cli generate 'create a fibonacci function in Python'$${NC}\n" +printf " โ€ข $${YELLOW}codex-cli complete 'def fibonacci(n):'$${NC}\n" +printf " โ€ข $${YELLOW}codex-cli explain 'explain this code: def fib(n): return n if n <= 1 else fib(n-1) + fib(n-2)'$${NC}\n" +printf " โ€ข $${YELLOW}codex-cli interactive$${NC}\n\n" diff --git a/registry/krikera/modules/codex/scripts/start.sh b/registry/krikera/modules/codex/scripts/start.sh new file mode 100755 index 00000000..f64dd19e --- /dev/null +++ b/registry/krikera/modules/codex/scripts/start.sh @@ -0,0 +1,219 @@ +#!/bin/bash +set -o errexit +set -o pipefail + +# Template variables +OPENAI_API_KEY="${OPENAI_API_KEY}" +OPENAI_MODEL="${OPENAI_MODEL}" +TEMPERATURE="${TEMPERATURE}" +MAX_TOKENS="${MAX_TOKENS}" +FOLDER="${FOLDER}" +AI_PROMPT="${AI_PROMPT}" + +# AgentAPI parameters +USE_AGENTAPI="$${1:-true}" +AGENTAPI_PORT="$${2:-3284}" + +# Colors for output +BOLD='\033[0;1m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +RED='\033[0;31m' +NC='\033[0m' # No Color + +printf "$${BOLD}๐Ÿš€ Starting Codex CLI with AgentAPI integration...$${NC}\n\n" + +# Set up environment variables +export OPENAI_API_KEY="$OPENAI_API_KEY" +export OPENAI_MODEL="$OPENAI_MODEL" +export CODEX_TEMPERATURE="$TEMPERATURE" +export CODEX_MAX_TOKENS="$MAX_TOKENS" +export CODEX_FOLDER="$FOLDER" +export CODEX_AI_PROMPT="$AI_PROMPT" + +# Ensure PATH includes ~/.local/bin +export PATH="$HOME/.local/bin:$PATH" + +# Check if codex-cli is installed +if ! command -v codex-cli &> /dev/null; then + printf "$${RED}โŒ Codex CLI not found. Please ensure it's installed.$${NC}\n" + exit 1 +fi + +# Check if OpenAI API key is set +if [ -z "$OPENAI_API_KEY" ]; then + printf "$${YELLOW}โš ๏ธ OpenAI API key not set. Using default configuration.$${NC}\n" +fi + +# Update configuration with environment variables +CONFIG_FILE="$HOME/.config/codex/config.toml" +mkdir -p "$HOME/.config/codex" +cat > "$CONFIG_FILE" << EOF +[openai] +model = "$OPENAI_MODEL" +temperature = $TEMPERATURE +max_tokens = $MAX_TOKENS + +[codex] +auto_save = true +show_thinking = true +verbose = false +working_directory = "$FOLDER" + +[ui] +theme = "dark" +highlight_syntax = true + +[agentapi] +enabled = $USE_AGENTAPI +port = $AGENTAPI_PORT +host = "localhost" +EOF + +printf "${GREEN}โœ… Configuration updated${NC}\n" + +# Handle AI prompt for task reporting +if [ -n "$AI_PROMPT" ]; then + printf "${YELLOW}๐Ÿ“ Setting up AI prompt for task reporting...${NC}\n" + echo -n "$AI_PROMPT" > /tmp/codex-prompt.txt + printf "${GREEN}โœ… AI prompt configured${NC}\n" +fi + +# Change to the working directory +cd "$FOLDER" + +# Create AgentAPI bridge script +BRIDGE_SCRIPT="$HOME/.local/bin/codex-agentapi-bridge" +cat > "$BRIDGE_SCRIPT" << 'BRIDGE_EOF' +#!/bin/bash +set -e + +# Environment setup +export PATH="$HOME/.local/bin:$PATH" +export LANG=en_US.UTF-8 +export LC_ALL=en_US.UTF-8 + +# Function to handle different types of requests +handle_request() { + local request_type="$1" + local content="$2" + + case "$request_type" in + "generate") + codex-cli generate "$content" + ;; + "complete") + codex-cli complete "$content" + ;; + "explain") + codex-cli explain "$content" + ;; + "review") + codex-cli review "$content" + ;; + "optimize") + codex-cli optimize "$content" + ;; + "debug") + codex-cli debug "$content" + ;; + "test") + codex-cli test "$content" + ;; + "interactive") + codex-cli interactive + ;; + *) + # Default to generate for unknown request types + codex-cli generate "$content" + ;; + esac +} + +# Main execution +if [ $# -eq 0 ]; then + # No arguments - start interactive mode + handle_request "interactive" "" +else + # Use first argument as command, rest as content + handle_request "$1" "$${*:2}" +fi +BRIDGE_EOF + +chmod +x "$BRIDGE_SCRIPT" + +printf "${GREEN}โœ… AgentAPI bridge configured${NC}\n\n" + +# Test the installation +printf "${BOLD}๐Ÿงช Testing Codex CLI...${NC}\n" +if codex-cli --version >/dev/null 2>&1; then + printf "${GREEN}โœ… Codex CLI is responding correctly${NC}\n" +else + printf "${RED}โŒ Codex CLI test failed${NC}\n" + exit 1 +fi + +printf "\n${GREEN}๐ŸŽ‰ Codex CLI is ready for AgentAPI integration!${NC}\n" +printf "${BOLD}๐Ÿ“š Available commands:${NC}\n" +printf " โ€ข ${YELLOW}generate${NC} - Generate code from description\n" +printf " โ€ข ${YELLOW}complete${NC} - Complete partial code\n" +printf " โ€ข ${YELLOW}explain${NC} - Explain existing code\n" +printf " โ€ข ${YELLOW}review${NC} - Review code for issues\n" +printf " โ€ข ${YELLOW}optimize${NC} - Optimize code performance\n" +printf " โ€ข ${YELLOW}debug${NC} - Help debug code issues\n" +printf " โ€ข ${YELLOW}test${NC} - Generate test cases\n" +printf " โ€ข ${YELLOW}interactive${NC} - Start interactive session\n\n" + +# Start the AgentAPI server +if [ "$USE_AGENTAPI" = "true" ]; then + printf "${BOLD}๐Ÿ”„ Starting AgentAPI server on port $AGENTAPI_PORT...${NC}\n" + + # Create a simple AgentAPI configuration for Codex + cat > "$HOME/.config/codex/agentapi.json" << JSON_EOF +{ + "name": "Codex CLI", + "version": "1.0.0", + "description": "Rust-based OpenAI Codex CLI with AgentAPI integration", + "commands": { + "generate": { + "description": "Generate code from description", + "handler": "codex-agentapi-bridge" + }, + "complete": { + "description": "Complete partial code", + "handler": "codex-agentapi-bridge" + }, + "explain": { + "description": "Explain existing code", + "handler": "codex-agentapi-bridge" + }, + "review": { + "description": "Review code for issues", + "handler": "codex-agentapi-bridge" + }, + "optimize": { + "description": "Optimize code performance", + "handler": "codex-agentapi-bridge" + }, + "debug": { + "description": "Help debug code issues", + "handler": "codex-agentapi-bridge" + }, + "test": { + "description": "Generate test cases", + "handler": "codex-agentapi-bridge" + }, + "interactive": { + "description": "Start interactive session", + "handler": "codex-agentapi-bridge" + } + } +} +JSON_EOF + + # Start AgentAPI with our configuration + exec agentapi --config "$HOME/.config/codex/agentapi.json" --port "$AGENTAPI_PORT" --handler "$BRIDGE_SCRIPT" +else + printf "${YELLOW}โš ๏ธ AgentAPI disabled. Running in standalone mode.${NC}\n" + exec codex-cli interactive +fi diff --git a/registry/krikera/modules/codex/scripts/test-util.sh b/registry/krikera/modules/codex/scripts/test-util.sh new file mode 100755 index 00000000..f0f6beb8 --- /dev/null +++ b/registry/krikera/modules/codex/scripts/test-util.sh @@ -0,0 +1,64 @@ +#!/bin/bash +# Test utility for Codex CLI module + +set -e + +# Colors for output +BOLD='\033[0;1m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +RED='\033[0;31m' +NC='\033[0m' # No Color + +echo -e "${BOLD}๐Ÿงช Testing Codex CLI Module...${NC}" + +# Test 1: Check if configuration is properly created +echo -e "${YELLOW}Test 1: Configuration creation${NC}" +if [ -f "$HOME/.config/codex/config.toml" ]; then + echo -e "${GREEN}โœ… Configuration file exists${NC}" +else + echo -e "${RED}โŒ Configuration file missing${NC}" + exit 1 +fi + +# Test 2: Check if scripts are executable +echo -e "${YELLOW}Test 2: Script permissions${NC}" +if [ -x "$HOME/.local/bin/codex-agentapi-bridge" ]; then + echo -e "${GREEN}โœ… Bridge script is executable${NC}" +else + echo -e "${RED}โŒ Bridge script missing or not executable${NC}" + exit 1 +fi + +# Test 3: Check if AgentAPI configuration is created +echo -e "${YELLOW}Test 3: AgentAPI configuration${NC}" +if [ -f "$HOME/.config/codex/agentapi.json" ]; then + echo -e "${GREEN}โœ… AgentAPI configuration exists${NC}" +else + echo -e "${RED}โŒ AgentAPI configuration missing${NC}" + exit 1 +fi + +# Test 4: Mock API call test +echo -e "${YELLOW}Test 4: Mock API response${NC}" +if command -v jq &> /dev/null; then + echo '{"type": "generate", "content": "hello world"}' | jq . > /tmp/test_input.json + if [ -f /tmp/test_input.json ]; then + echo -e "${GREEN}โœ… JSON parsing works${NC}" + else + echo -e "${RED}โŒ JSON parsing failed${NC}" + exit 1 + fi +else + echo -e "${YELLOW}โš ๏ธ jq not available, skipping JSON test${NC}" +fi + +# Test 5: Environment variable test +echo -e "${YELLOW}Test 5: Environment variables${NC}" +if [ -n "$OPENAI_MODEL" ]; then + echo -e "${GREEN}โœ… OPENAI_MODEL is set to: $OPENAI_MODEL${NC}" +else + echo -e "${YELLOW}โš ๏ธ OPENAI_MODEL not set, using default${NC}" +fi + +echo -e "\n${GREEN}๐ŸŽ‰ All tests passed!${NC}" diff --git a/registry/krikera/modules/codex/test-util.ts b/registry/krikera/modules/codex/test-util.ts new file mode 100644 index 00000000..bf4fceb2 --- /dev/null +++ b/registry/krikera/modules/codex/test-util.ts @@ -0,0 +1,132 @@ +import { + execContainer, + findResourceInstance, + removeContainer, + runContainer, + runTerraformApply, + writeFileContainer, +} from "~test"; +import path from "path"; +import { expect } from "bun:test"; + +export const setupContainer = async ({ + moduleDir, + image, + vars, +}: { + moduleDir: string; + image?: string; + vars?: Record; +}) => { + const state = await runTerraformApply(moduleDir, { + agent_id: "foo", + ...vars, + }); + const coderScript = findResourceInstance(state, "coder_script"); + const id = await runContainer(image ?? "codercom/enterprise-node:latest"); + return { id, coderScript, cleanup: () => removeContainer(id) }; +}; + +export const loadTestFile = async ( + moduleDir: string, + ...relativePath: [string, ...string[]] +) => { + return await Bun.file( + path.join(moduleDir, "testdata", ...relativePath), + ).text(); +}; + +export const writeExecutable = async ({ + containerId, + filePath, + content, +}: { + containerId: string; + filePath: string; + content: string; +}) => { + await writeFileContainer(containerId, filePath, content, { + user: "root", + }); + await execContainer(containerId, ["chmod", "+x", filePath], ["--user", "root"]); +}; + +export const execModuleScript = async ({ + containerId, + coderScript, + userArgs, +}: { + containerId: string; + coderScript: { script: string }; + userArgs?: string[]; +}) => { + const scriptPath = "/tmp/module_script.sh"; + await writeExecutable({ + containerId, + filePath: scriptPath, + content: coderScript.script, + }); + return await execContainer(containerId, [scriptPath, ...(userArgs ?? [])]); +}; + +export const expectAgentAPIStarted = async ({ + containerId, + port = 3284, + timeout = 30000, +}: { + containerId: string; + port?: number; + timeout?: number; +}) => { + const startTime = Date.now(); + while (Date.now() - startTime < timeout) { + const result = await execContainer(containerId, [ + "curl", + "-f", + "-s", + "-o", + "/dev/null", + `http://localhost:${port}/status`, + ]); + if (result.exitCode === 0) { + return; + } + await new Promise((resolve) => setTimeout(resolve, 1000)); + } + throw new Error(`AgentAPI did not start within ${timeout}ms`); +}; + +export const expectCodexCLIInstalled = async ({ + containerId, +}: { + containerId: string; +}) => { + const result = await execContainer(containerId, ["which", "codex-cli"]); + expect(result.exitCode).toBe(0); +}; + +export const expectCodexConfigExists = async ({ + containerId, +}: { + containerId: string; +}) => { + const result = await execContainer(containerId, [ + "test", + "-f", + "/home/coder/.config/codex/config.toml", + ]); + expect(result.exitCode).toBe(0); +}; + +export const expectCodexAgentAPIBridgeExists = async ({ + containerId, +}: { + containerId: string; +}) => { + const result = await execContainer(containerId, [ + "test", + "-f", + "/home/coder/.local/bin/codex-agentapi-bridge", + ]); + expect(result.exitCode).toBe(0); +}; diff --git a/registry/krikera/modules/codex/testdata/mock-codex-cli.sh b/registry/krikera/modules/codex/testdata/mock-codex-cli.sh new file mode 100755 index 00000000..cf75222a --- /dev/null +++ b/registry/krikera/modules/codex/testdata/mock-codex-cli.sh @@ -0,0 +1,63 @@ +#!/bin/bash + +# Mock Codex CLI for testing purposes +# This script simulates the behavior of the actual codex-cli for testing + +case "$1" in + "--version") + echo "codex-cli 1.0.0" + ;; + "complete") + # Mock completion response + cat << 'EOF' +{ + "id": "cmpl-test123", + "object": "text_completion", + "created": 1234567890, + "model": "gpt-3.5-turbo-instruct", + "choices": [ + { + "text": "\n\nThis is a mock completion response for testing purposes. The actual CLI would interact with OpenAI's API to generate code completions based on the provided prompt.", + "index": 0, + "finish_reason": "length" + } + ], + "usage": { + "prompt_tokens": 10, + "completion_tokens": 25, + "total_tokens": 35 + } +} +EOF + ;; + "chat") + # Mock chat response + cat << 'EOF' +{ + "id": "chatcmpl-test123", + "object": "chat.completion", + "created": 1234567890, + "model": "gpt-3.5-turbo", + "choices": [ + { + "message": { + "role": "assistant", + "content": "This is a mock chat response for testing. The actual CLI would provide interactive code assistance and explanations." + }, + "index": 0, + "finish_reason": "stop" + } + ], + "usage": { + "prompt_tokens": 15, + "completion_tokens": 20, + "total_tokens": 35 + } +} +EOF + ;; + *) + echo "Mock Codex CLI - Available commands: complete, chat, --version" + echo "This is a test mock. In production, this would be replaced with the actual Rust CLI." + ;; +esac