From 2893f9fe5b6d93c5c0c185ca755102681eaefd05 Mon Sep 17 00:00:00 2001 From: Krishna Ketan Rai Date: Sun, 20 Jul 2025 20:16:50 +0530 Subject: [PATCH 1/6] feat: Add OpenAI Codex CLI module with AgentAPI web chat UI and task reporting - Implemented Rust-based OpenAI Codex CLI module for Coder Registry - Added AgentAPI integration for web-based chat UI interface - Included full task reporting support for Coder + Tasks UI - Features: code generation, completion, explanation, and interactive chat Addresses issue #236 - OpenAI Codex CLI module with task reporting --- registry/krikera/.images/avatar.png | Bin 0 -> 13807 bytes registry/krikera/README.md | 8 + registry/krikera/modules/codex/README.md | 106 +++++++++ registry/krikera/modules/codex/main.test.ts | 164 +++++++++++++ registry/krikera/modules/codex/main.tf | 187 +++++++++++++++ registry/krikera/modules/codex/run.sh | 82 +++++++ .../krikera/modules/codex/scripts/install.sh | 194 ++++++++++++++++ .../krikera/modules/codex/scripts/start.sh | 219 ++++++++++++++++++ .../modules/codex/scripts/test-util.sh | 64 +++++ registry/krikera/modules/codex/test-util.ts | 132 +++++++++++ .../modules/codex/testdata/mock-codex-cli.sh | 63 +++++ 11 files changed, 1219 insertions(+) create mode 100644 registry/krikera/.images/avatar.png create mode 100644 registry/krikera/README.md create mode 100644 registry/krikera/modules/codex/README.md create mode 100644 registry/krikera/modules/codex/main.test.ts create mode 100644 registry/krikera/modules/codex/main.tf create mode 100755 registry/krikera/modules/codex/run.sh create mode 100755 registry/krikera/modules/codex/scripts/install.sh create mode 100755 registry/krikera/modules/codex/scripts/start.sh create mode 100755 registry/krikera/modules/codex/scripts/test-util.sh create mode 100644 registry/krikera/modules/codex/test-util.ts create mode 100755 registry/krikera/modules/codex/testdata/mock-codex-cli.sh diff --git a/registry/krikera/.images/avatar.png b/registry/krikera/.images/avatar.png new file mode 100644 index 0000000000000000000000000000000000000000..cd4fa762c089c07b32a5992fc1f5d5dd88979a1d GIT binary patch literal 13807 zcmb_@1z4M15^k{KP>MS(v^Xse#Y;nQ2o{_I#fv)>x8e@P-3b(TDXzuct+G_5DI0V@^xi}Ee(b2Imu}HA7 zNjRQTJmvVWuZMO39vZ?B!VnU|GXNqU0umm=Ll;6e0Dypk@TUv#mkR+A2^r-PDjGTl z=3@XN0umA;G7<_3GBW&55BMAa84u;jQ+BaO_+Wk1XSM_!KG9idv~McD5-N?I&~X~r z`J!VG5tERTJ%2&Zz{teK&BM#b|62SlPy!?=C9SNYss>Tl&@_B+WNcz;W^V7`=;Z9; z>gMMk5E%3+I3y-EE*tDn~h0Hi;w1AqOqLVwT)53Ua)GBOe}>Mwl|5S`(I zgoljsl>N~YF)*sWE&ekOA2fnD(ODH=(P=rAP6!R`#xRKJxYnMZ{!-~r8vSPq`TkED z{jSjO`aH}7u#gbo21CLFhyYG77_zA&WOtThW+OqvxfIibM?AAwDPsVA0=&lA$=U z%GEYN`3LV=etf&PLgOvp?j>lv+JX>=PWgyg7lPL{vcr_?9W_P%(*+ObHo38sR2(S^ z_+DEiNr1Vba>V4YenCh z&&fUjoT>@|*&z(S2|}M!U%_a|g2*e{ic+S9Pe;L8lggro+Mq4j{#mk!@;&ffKj4D< zjZeTIpZ#CF#{}{uasjH9j1##K=+M8*hVEHdkgPhC-U+i*M4eM$jFx_H|LZ#V`}D86 zO?!0jzEgVYvoFW&V>5*1%Xe>le5iC@JZ3?NUmVlO*Lg{@h3*C82ButdPJHxTe7b18e)_ozKY zLRgVy`eYyf$&h}nUJKjn3^5gMWM6@wF|!o`S{Uk^V>VUHYAgP$X3Zc|(Z2Aopjm(y z)BsZOQTSweN^9=x3deTAlENuh?m-ZW6Xf)Ju4(Oi4xocxAf>c0x*!~Prf<% z#d3Ppjcl-_(SB`CgbdJNOPzvA?>Y5J0tXkYgZgn==s;JezgN7k7_o%_EU6D9P(*}l z{!9CRDX`!rXQn4Y#c!*V7RWK~%%WvaDYCjv#Xg2*y%PF%XQKdk_&8bubX1Hz?XKPOCMIc6WE62O&? zJjo6qKmZab|8iM`ZwnEqOOz-)a1Sma;u&^zM`?_BZ^L&!@&G6+nO+GPUo{W3FnwFu z6COYZKqu+HAmzep+8~bLFFn6{MOzmcuiKTeEjRj+`T5ac%BW%89)6C<6~-=CIWDyN z*!`o@I&8s~&Iq!rW(`{sk$-MdKjfFe`bbMe;j}bmCtSt4gy|||mn$40r)B|@A1_m$ zeg7JgLS+Hbo7Pfq{_#qUV4a&j)p^*cm@9%%%$VLAm2RqA!(6szbpPYaHQ=P7%>JeN zp;VXo5&qEjN{!nhSwyHl;+y1v7!<%e*_J>divg}kM2kd4=+`P0EMa8<|MqJ~sQ$?h zEkU9hy&F@n>}mU|v5M}RN+CvW4<6++bD}Vh0?h>V!}7yrQZ>d^Ti@JA%DU#l6z0R> zGdzCxj}*>}a;`i|7d}p32Q7^5NcYAt8=Dre6}F!i_`Vq3bYGw|SG9#1p0DdljNoun8=0p+?&%~sS z8N*}vectdW17OIGhy#En?0-E{T58>;lqKR(^fwd;_f8vT^~>r-?Y&qUQ(7u%`)~K? zcZ|G7wt3EmBP5mv*p9j=a`OR$@K{OHmSS?Pic4e~W2y-oZcut=@)BKW^h|_F4^vmp z!iz~X{eA3PRh4trbk4)+{M)fcrp3yt*VKgjl^O^}=C&-MiTYDwhp;lX>7IRX)8kHZ z!7#h7i1gf<>{-+3g(jWF`1;nUBQp}4f>DeH!j=|afoBY6Bvp_3jRhx*7;->F-*hM1 zbNMWq+Qsfk60ei@xP)G;NJUY;erzn#)9FrqQj)-9w+Eq;5gwb0cgMBYF-pil@wE(a z6;i(}-2%^!0iVqOTrp9oer(h`1e603hECRG!q+E|@SDp~5AzC|9w}f6@n=OXXCU=` zl@4|NraR>2>7$gQ}O~k^tA>4>2i)#XeX1Nv{NraRx1~Pc*aKi z^(SpmdyL#xf+xM+sLHy>t2!tSBDkRMFWAbwTzNM}J&|B#{eubgEVRS4z0IJt{R|xQ za3h-zvGB3JOz%Mr!lQJ-lhv7X9#h&!7mL=P9sn-RdV{cX;S>3Q0v8`gY>rbh<~&46 zV^pkM8!@WX5S2Y09se~m^$iBtMOo`aN5X!oo~c{v$fK;tbN<&L#`9ZqJxmuS_hAVs zZVTs-fw_=A|J5ch2i5$%3N}_N``VX{)GQHdrKE`8ioL>YbymVzSSLU=3FA{wWi)uA zNqYM_uko&DGL{+7-ufLgezTA$mC{oTLY93^g{=rJ0NYwdG$lT(kbL^-oSXJnB!2Q2 zaPSKt{9DQM4_N&96)6%5l&5wWMTF&WEiK2z0S+hME1PDbj=Jp2=5PU;3iqX?TdM&F zVBk|4rN{X5O)mj1SzN5(%%&)4{K&FUbQ8C&!{O4+=+sn*{I~7VWtvf_pj1xvlK>WF z2Yj4j_s1U0-D=}E9rrYTH*c2KOjqb>19elWRznUvsjpTgXvVmoNU}UxHe49PJ?Gvb zn+`H8-~B#*Av8Fp?XfAGIm5lgYf{l9ZN9U$5yPJ?W%zEa@kmJ^#GjUUk=P-^si!sG zUhZV+DEP_lSwlow;~&(~W_J|jL8VT4^0oECoE(Ta%8DbpjdTciTDZlN9dw zA8LzyDpi&^O$dk1)4Z)uIha9^K(eCX-Sc8PvY^);-s1li)_=}|vpe1PJ(v32KD&aXaRjC+f0bNn6tG;L( zEzYMs%j``?9HBZKBbPBvFzYkapTqXL&jP0frWz+cY}QQ}kp!gNYjUI#U7*5BQd+wx zy2F}uG7U#lmCRpxcc_w#pb`?aFDxHF04%t32NECKUTUkod9_n8NE-Mid{UW#B^03|YGHBM0i`l-HFKU+vT?>4=!oDMx+5o&E~Z~T-HUu0qhK+-v+pr&)(Xuy9)MNgu0MdiNyHrw8&ZQW?&s;IVLJM z#2ktBzx@dj`If!2VW?wc9f?a4CPYRTK#WGFN{N_{(E^!YlompZuH^iCcXYJaknQqbIh9iO1bDzc=f(3LzL#{WHL=ZE~(AQoGZ0@6(5|raeU}u!^QabGY zLF382rvNmJ8+XSvv}q-)i3$~X|6~s*&B)4!^WIH0^0AriUL)3*ZW0Yv$WWc?4&a`8 zAQ3pBB{QBAV&8bA{C!cxuD8at=Z4}HMFjf-!5iLBRK$h7aT=&r*dP-dC3@Ly>t~I) zc)Q_-CCl?1YnX|l5u^&T5C$UFbU)*KS0rwNWg7##sw}thgKXN^`6RYbz!46lkic!Gm%xmWPQ@9C)*hksU-@B_(zJblQE7xNrv05$f|JXxB`k+JeJ9m&vj8hOWUio<7t;N;!P;`FE zqtL8u8-iipFJbuCtt$f!?vSQa-6{)$SJh66(v<>ktJ~=-0S+nm3uSwWK}TOJr`gZ> z4S#-bTrO=-3O1&Lam6XG%l%-wnVv?9I32Qd-TM3tYu*jFT3TTVdlch`hXiXSi?_`p z+=<9SBM(wm2a~wb>G*{b1tF>5GE8e}?zZj?8or}oO5Fzgh&aHYymcXlEj{wDr|IQu z3Q*QvnJCtR2tS84GGzZpMc`kn(LWTsNKis&=?VHNn2bB7l~G!4nH#RXC?&nOASYE* zDOX}NQDvRZH(Z2;%H)fkF@s=QAVx6&BL{$6V4jt=OLR>YixU2YT+t{Mw{>aQbcRyn z2WN5WhcdW@-mCwMiUwieY6|(wu420Cu?pUPx0z1&hf5r)fw>bK%#MN@#Z$;|(GKJ6 z()Kx&Jmcyn@ow+9gKlgDV*51lVDBKyFoezwLm~@=p&+I)Ph))rvfPRlw(KNLP2WtN zN=bLvUv=|HR=7hRvm^-kZ& z#e2(9jhyxA{Q*;#d%A@`I4JI&Ev?tt4(J<^+}Ui1Z}0X?iCW8t)O)abUzNZ zjk6;XoHU^L)pTGU^7*txnKcy(B!Dp1|BI#mRZt=fUFFcK zj;ec&jP0KjW;u9`Z#@9qDx1w^rE(V|upr)^h7SO!W*z^H%uVWj{EGiUZX}ilUTeAbBtDaLP z0?(_bWXJ9Ka!V1&$TYLc(dniT)y^GKf2Gx_c272bKTP^We%ql6FT8iwP0hYAJJg?T zSV%;ncKpDwsyd90Jq%%G|7xmOT64qdI!)^rPWp4H|95oMZ`2fjIYoC{XP~TCQ4-xq z0iVc_>b7GtrrG4C6vh|fW|jHFf`xN|LH((9zH8XMLB&TA$BPv#^@f`~%W%y?OxTS3CfA-ju4ieWn*nM8;iyx_!4McYunpUc!^8l>D7Wrl*VJ8fstsHBhW< zM&$tzl{RuSHDvBYZDQQuV;6Lsd3eN(N|ia_psW(8IADjiI#G;lKMBXY{|U?V#{lz0 z9J#8mq9dHPqN6x0#xs6LnCeQ;)+u_?;z^3>=l9~&9C*jG5TVr-e6J4P64Knrq1w0) z@gVRK^Wp(Frkv6++d;mRzHQoon+M47P<<*crvl2?eFV(inI?N)Soft)J+@(Q81|xk z3}{#9?Bu{onwTRAIb#;I#`%wm4-hDCKMIV`kgg}`lHsmJpf+MM}KV4u^o-p zxFlK=6wlXtj}ooBtXDP>0;8JYmx+H=I(q`^7wPD8ffr9IS6{#Gy`CfsA$8E&)Ky4q zqH3RVK#HGy0Bk-NX-X~>rA2=4P~w&x-)83~OyPl6T=RK|_08ac{~+T1=G_)`hX<%e zrSE|AYWnVc(AtPAh=Q@5dysn(%+rG$5AcjnJ;&#O=f=O`M}Kq3e>+Z%35pk*5?ctx z9e7hznlNpQh74o}6|&;moIC)!gm^WQ1zdA@?Awss5&f^N4zFhP)6=T-UOl4CD$5Z) z{Z^5ML^gc!2@Pp5H34bIK3ZKq940Xa9ZNq2+PV>Ud2lKLWmZ{?=}8SgkKAI`ZRmW` zrxT0*iY2T@LUR-H^(%BZE_T+*qsY9XE~93_YlT@Zf_XMyX9k__?d{hYEYw=2M_So` zWkvtNuw}}<$%Z;RgH9QqqsJmg>9|)j5ZPq9bW+-AXWAkxA+dcqfmd#+NAUu>J5CTS zKcX<&=sVa?g|m?j@IE^8&J2~*-GP8fBvrAU7kjjZCwV=YTAk}!Qp%+zGr6%9+DF?f zyvUyF8kw}qAYfIr=L8cqkNrS<+SQTd#%Z})>SU&!ltu#dn0E5RT1&7~X-P~$p-{IR zIorXjfmci`;;ILSh82||<4=~gUB6o8rswBB0AOl^2J-G|wXEb623oRjH>SJ-+Q_;L z2%dB?P{g=Fj7mzqjNE_X(7ru8620Fxz79FtADnJ6RvlOf6p(P$-?LzJ&t%JUtD6m^ zGQdvo(sm*ombq_~g)LfGn*so@n4g_WYu5_=(5SOvhI5lN@Obs7y6(Rn*%XSPcSzmy z`9b!0A>To@YAA11P#xe;A}02S>gJD?&ad~VxEpDV>n_}b zl>LG@Sb6R~qZ#>(({i?Ddm~U&r|43ge1U*^0q0i!-a6@hM?&2EC0c-LmW|joS^L;K zExzXwPper9NUq_*wDz36IB<_F{##fm8#Es|LcEsK^2PedZpcUCGpQ#s_W9acJ$H0^ z8Pvx(*_9t{S;8&eZH4mALyC5F(BrCY5qidAJZqN7mwQlIH(J2ZLso}<6J^Tz!~`J! zVov5Y1l^~H`O9fph>zjaVO)5@V7>MU=UC9jbp7>!b89A{uDd=S z^y{s^Ha&o^v%&9W{Xa9mnCozOJ3MPx-i8~fpH*dCTRi~2)gG&}26R&Sz7WYOQ=$u3xh3f`-g)b1z+re?fju+F;q`s(7eHpd-HbXq_a)qXuk1<1LY2?}e%-EuU6RvH3gS)INGA8t37L1nYKb5hC$RP6Eoj+2Ey& zEO5q(j_%dueOY+Bj|!|!Q91dR#{Hu&@g+tTmkDyj++`2b$_0hq519P}z$5acjFDM@ zQff(iVzD(lymQji@v{S8q*hAC&4E_{^(+`C5u$gw@&M3G@HPi)Y{;^gA05*xi8PCq z5cS)VFbkG2+&?MtyBbIA{wGZxY)43Vbn8AA-X3#s>&Zn`t$>U=G<}}p(`V8 zh@m)aQPi2quabz&ePdN#w8pO=i zutQz15_7N_k&-sw;-fcZu7+A2)qNMXd=pLySZ{8=%FM-k(| zf+QEVl?SI8S+@zY`J>5j6%W9Aei?p;-eZ=RP}T!SQnlq%RYQn%!;43+FSwUxkS9VX z_d?}dC{vOqqhu6ZWsSZ--2f{q zSSJov`z~%|#xBA`$K--Eaa#VmtWmMD=~a>NC#Lf#LwO0LC_D3No5b)H3Whri{l~6M zLHW#vh_$J6bw)>IWhz8peloO&J?Xk96k?}<{iXZ!_rdSK`5Vdao!=s)#>9 z*fGHX7!$;AWy5}!p$B#4c5cIn^$Mo5KGJIPEM_u}mERJZ2UWszLc3!Cr5hMLP&Qg0 zAx0h@V8`jV5OIz`KatFg-#Mmdo-iTqS|+F&UmC!sAF<3@G%_Fo$<=NDSJwBz^$kEA~H$_VFs>o!Zdbj3j=V_4*xnw0+VCbudnH~7v_N+sM2MwJI<~=YLHGIkn{3-Pk?D2_G5aW` zQo9$Na=yFQJI$vwkwrxXSC8B|>7AA;uMHEx#!K6dJ01W5)F##xdELdD?~W98h>O!#uOGWS%P&P&z2z+J zhiYxyuzCGN>z*;uXNufDOtxfv_P#NDMmH`eq{ZDlEY!oaL&jBNRb>+nVig}XB%AF? zI}`mi9pXyAGjfUXqV1e1!y)NW@lYV#5-$?YZ^7-wCpmvg@&HJPg>R>kN9?_M&Z`OG zpQjc_ri2f?lw~IfTqBm+i@4d(Bs_*W;XU+(pjn9FVqcXFI~VROJL*}{h-S#HRP9Gc zB$4>%&4XNGLEx_h>8oDE78!p^lXjW!L-5raWVq}-O0z0yoecd?p-QRu;~yXP0hhDBu->L_r$ zL89bLP#uTRcLl4+*Ni-t7m2q?D``A^0NlE0c+D8zmIT77bKKQeMGZ1a+6yM5AwjYt z`>0<^CP3fc_U|zZC=59pbFPH!85?II#QBYekqv|>n>p;bQDIv+l`cuMSjOQrbMrj% z-UO?#Sb6F(rW7m<>*fBxEIk_o0>i#C$U<<4h$LNt9`Gzi&Y6n(N@kr>XCvl_djhJ3 z+HSqdAE=9(6ejCLv)fuWo-09ZZCK8{oE@!he5XNv_u?nd(Z9@|j$?tpKZ01M` zSTjaYzW+%*Ipb_$VrgVmd+`N`#oW$^+?YQ9X-QB1 z@~}N(@5NquWV6T|Uth5^Po?HyMej|)q#Eic$0gp*`KA2gI@-hjo1)Cf44!AM*gGKb zdD+(Visr;xy{}^bv*2gZ?taQj#1IKB&W!9dI%}*zUJ`w|QjB^f=e(Q(E`&apn|rZo zllv4eacx%TbRp_frW)bc$W04h!rgcGMEc6Ma>k2{L5dOD`dWl}Ui0a*K?+GuM3gZUkRzNCigWr)jg;ODC=I#f{8ysjyyN_-a?Zi;%C# z^wHRV3ksBcl5N20rU-GF#?=mYFCGA@sp9%Z++7(2MS(0@W3ndsaq)L0DZ240SvXPlC`PjMo6xR(&_WUdA1nZ$6nk_ANrrBRC zTLx!Qk$xGIuxG!Ku93FCbjL;@(Bm?J!R`fWN#;Efm0y{hlZvu@yKS;sxk>39W(lKn zzEfp7DfwtbPucg_E-E}Zi7iKLw$0tfWL#K9c|XH>v{&f9#^?xC>uC^p^eEl*ienyu z&d#fvzi%XqP)$KaGs9j!WcYRbKv+1YhHwX*LkO3cug!{Faxu}cUvQB2gK;%zX!lL$ zj?}}xH|$z9o7qUuUA+?(;GXL6qA|~HMLJch-Q1g>RP0$f>v>_Bvw-RCrTMXCwqeXm z<1{cjJG@F8z2E_m{_1h>sSt|mMb5^ZJ2>fEN{Wo9C|ZaX)dA?NTzsPS$<9To=H7P; zzZr8Mk~%n#=;qzv^ZgJlstWOOS z?BsD9tWU~iPY4&USvM8VMYqL5zDY{zBylG9RjdMoZYgZ&O8O^rm7s?*rsG2Yc_8<`H>-| zcwe(kn^_J*J*mQI3I%-}13Hn0tEkfXgfC{eIl|BDP{m7I3idM#zGq6eJ%$u^1MK0U zU9{sL*~s4u4#dBkL+vU(Lk7q60^E{eBiOg3^--PnWlJpJD3S$WS{Id#R^~O1Mc-7& z7n;?Gz*>g~0MUM<7LrM^4#;m*T2jPk2DFTit z2@(_GDP_?H^}`w+0=B7%_A{^jP(~)s!-0FmeDfVSbD*S~(wMd%zX+u{US|Q&fS|hS zi*>f&bUmJ?=hGI8JG<{X3(@3*3Pu#31uv?AZuO>i?c0S%aMP9f{@F%vCXJ&gwg|vV z|5dF{X$pr<+LIMSr>~psI9~B1Q{bH!nr7lEoCI}yh|8r5SQ>+xDg}wHliyzB&+J&t zad|y9(>#}u^&SA++8-$^vo|nhgsUE)6Xc;Sia@4b~{z5mL{M`fKx}}71Oldan zMI^MaoatnxM~3;+dhHBN4C6KVrx^y10T?yZ&7)?jWs{k^^mLdU@HNM>ldzwG?+D)O zodn+&7XAaARFp@cdykVjrg7b}GIuDA^rOF>YLbUDeyU9L6xebr^=b3bxG`*d6W$s? z+&G15ex20E-<`p84HW9{EPt+G_;bC?G&$7#wdZaQyUv>)(hVW;hzEdp>Xg_LK{Z#n zfmzqfFvqU@`wVhVtpH0k?6&?>#6txshrr%RAw7vFtkcvXHQ;22j2QO4jY4{sBbmGU-bstT6uh1 zqtx1@E?QnJyl;DVylN+Fzn&^S@78SQP~|zS1$q{_uD{iA3!=_*eofqijidzBc`H>Q z>vsHe;{KX?O;6Y%uOC*rC5>e+N3Ey1thw^`d3Ak=OJASqBJWwzSJde1nZDZBc}FQU zG2iw~NhrS4GvAgpj<&wEX#BXxO-vThcFOV@Lbc|7?^^mi$ifiF<$(aS^Q5{8UV8vM z?)`KWkT-IB((o=wxHs!|zbHa|3%U1MJxn=cOTE6PK`LK3grNvdoHBYt=u31$OD^;; zA6x2mG_p!;987aa;-Hi06_xRUxTl8HqNrB}Kts*`dh3SwZppz;WL#Ww{N-gRn{BMs zPdpF3wo4)kc5wF<_eX&_SH6^z?0DuMYJX&`X^0N6pMS&1=xCfZh(J>!@m3Kf1l2&M z90o1*#RmYM_T!b1XvKYV{i$m5u?GN4xxc&zktpQndd*Sb(wLU!aX+l0AI}x-&xdW_RCg#>em7R7Jcm~uWt}v?LfJ3b z?}eHy&XO9l1py*FYqEee{P13CuKvGaCjZmo95ImATY(#z;u!D*=qm%4F(_fNK5krcU zZ#k%NWKXEQJpa5!enx#FtF0xnxc=IMK-8Z3VFHyu~Vw3im zZ&rk?ATvU9u6fv5InM3{p0vmdy}N|UTfJg&i`<&Vx+Kq4DJ+`zG0-O4b0kVRpmPcQ zG-ZmGfOWOb2$nvQO>}_sAp3!^b}t!o}Q zr1{B5u2x6n4nO*g?2laYa=Tfb^WMLMhv4F0Cj@^hZxM#afP_f@wsr9@`fvaEoxE-U z6VLRBG%Ulf^{FYhl!KJf5=uwXQ#BzvstoXLNxx{h%oS+OVt?eZcv<|}!H30l@dp4a zHS#;$o_wdRNn4@45Izc=I7s@3w_ki5e!Sm`V-pIH5hcth*>AM+LrYO^mT@!;KxG;7 z003gIFrL&C&ju(-0UZ0wXuBGRl3Qjjbc489Xh-H>qAwB9&Yl~eQ`}6xPU|UIC-=iX z_nMoh$G&*C($=KAP?5o^VB^|rp4wOn?!=P9ZWhPuBr7uaky*}rN_k^Ww$*9PieU|y zGLlLp&vXhy_`txlwAKtWLTD)?79QJVI0&Eu>sHE%<(B>`8>14)69ZBOB3I#i17PO` zGuS4mx6dKT-T8I)g4rFgY$B#NZ~Lb?+U@3$8hQ!wAcjUgr=O>+@~Tdj8kYsdFB4;V z23^`#wU2~5s#r_21Mgr$ViU?!DFOBGyKG)w8j4S}3GK~Av0z$YfyJz;-gIK1fUj_B zDBl*pti+%h8fP~)P*069;{(Tf3yhx=4qlC@$8>xXka5No{J98Hm36$e;47dvNdL5T z3oZebm@j0}_xs37DtOu{{5&tUY90r$Pee*4N>9~wmI{$xERIMD_DwzSw+ck?ERR_%~@1zYZ|p+sIbT@4&5dcF&4>6((g*nQqZ zd-kWOc%V7rTMzSiHxf6&(4-?ne!X|uA)l9CtPQ{9LXB4-cSS8*a`1 z%3~K}%#yQk-X)fj@Yy4e{o-S7rO?odVrL8mJ!wq^x9C)y?CE=w<|$T_C{RH^^}@j@ zOoKD{u_p@}wdPrio_fRd(Z{)a_0bHb=QqVd3Pn&as(s?*^5CPV4K@bV22t5zD5#{j ea`xN{(lJ*_ak&@p$l56RkKy&tzseDLnEO9+rL716 literal 0 HcmV?d00001 diff --git a/registry/krikera/README.md b/registry/krikera/README.md new file mode 100644 index 00000000..ea601851 --- /dev/null +++ b/registry/krikera/README.md @@ -0,0 +1,8 @@ +--- +display_name: Krishna Ketan Rai +bio: I'm a computer science student +github: krikera +avatar: ./.images/avatar.png +website: https://www.krishnaketanrai.tech/ +status: community +--- diff --git a/registry/krikera/modules/codex/README.md b/registry/krikera/modules/codex/README.md new file mode 100644 index 00000000..a6bc9638 --- /dev/null +++ b/registry/krikera/modules/codex/README.md @@ -0,0 +1,106 @@ +--- +display_name: "OpenAI Codex CLI" +description: "Rust-based OpenAI Codex CLI with AgentAPI web chat UI and task reporting" +icon: "../../../../.icons/claude.svg" +maintainer_github: "krikera" +verified: false +tags: ["ai", "assistant", "codex", "openai", "rust", "tasks"] +--- + +# OpenAI Codex CLI + +A Rust-based OpenAI Codex CLI tool with AgentAPI web chat UI integration and full task reporting support for Coder + Tasks UI. + +## Features + +- **Rust-based CLI**: High-performance Rust implementation of OpenAI Codex +- **Web Chat UI**: Interactive web interface through AgentAPI integration +- **Task Reporting**: Full integration with Coder Tasks UI +- **Code Generation**: Generate code from natural language descriptions +- **Code Completion**: Smart code completion and suggestions +- **Code Explanation**: Get explanations for existing code + +```tf +module "codex" { + source = "registry.coder.com/krikera/codex/coder" + version = "1.0.0" + agent_id = coder_agent.example.id +} +``` + +## Examples + +### Basic Usage + +```tf +module "codex" { + source = "registry.coder.com/krikera/codex/coder" + version = "1.0.0" + agent_id = coder_agent.example.id +} +``` + +### Custom Configuration + +```tf +module "codex" { + source = "registry.coder.com/krikera/codex/coder" + version = "1.0.0" + agent_id = coder_agent.example.id + openai_model = "gpt-4" + temperature = 0.7 + max_tokens = 2048 + folder = "/home/coder/workspace" +} +``` + +### With Custom OpenAI API Key + +```tf +module "codex" { + source = "registry.coder.com/krikera/codex/coder" + version = "1.0.0" + agent_id = coder_agent.example.id + openai_api_key = var.openai_api_key +} +``` + +### Advanced Configuration + +```tf +module "codex" { + source = "registry.coder.com/krikera/codex/coder" + version = "1.0.0" + agent_id = coder_agent.example.id + openai_model = "gpt-4" + temperature = 0.2 + max_tokens = 4096 + install_codex = true + codex_version = "latest" + pre_install_script = "apt-get update && apt-get install -y build-essential" + folder = "/workspace" + order = 1 + group = "AI Tools" +} +``` + +### With Task Reporting + +```tf +data "coder_parameter" "ai_prompt" { + type = "string" + name = "AI Prompt" + default = "" + description = "Write a prompt for the Codex CLI" + mutable = true +} + +module "codex" { + source = "registry.coder.com/krikera/codex/coder" + version = "1.0.0" + agent_id = coder_agent.example.id + openai_api_key = var.openai_api_key + ai_prompt = data.coder_parameter.ai_prompt.value + folder = "/home/coder/projects" +} +``` diff --git a/registry/krikera/modules/codex/main.test.ts b/registry/krikera/modules/codex/main.test.ts new file mode 100644 index 00000000..4c41cba9 --- /dev/null +++ b/registry/krikera/modules/codex/main.test.ts @@ -0,0 +1,164 @@ +import { + test, + afterEach, + expect, + describe, + setDefaultTimeout, + beforeAll, +} from "bun:test"; +import { execContainer, runTerraformInit } from "~test"; +import { + setupContainer, + loadTestFile, + writeExecutable, + execModuleScript, + expectAgentAPIStarted, +} from "./test-util"; + +let cleanupFunctions: (() => Promise)[] = []; + +const registerCleanup = (cleanup: () => Promise) => { + cleanupFunctions.push(cleanup); +}; + +afterEach(async () => { + const cleanupFnsCopy = cleanupFunctions.slice().reverse(); + cleanupFunctions = []; + for (const cleanup of cleanupFnsCopy) { + try { + await cleanup(); + } catch (error) { + console.error("Error during cleanup:", error); + } + } +}); + +const moduleDir = import.meta.dir; + +beforeAll(async () => { + await runTerraformInit(moduleDir); +}); + +describe("codex", () => { + test("creates codex module with default configuration", async () => { + const { id, coderScript, cleanup } = await setupContainer({ + moduleDir, + image: "codercom/enterprise-node:latest", + }); + registerCleanup(cleanup); + + // Execute the module script to install the mock CLI + const scriptResult = await execModuleScript({ + containerId: id, + coderScript, + }); + expect(scriptResult.exitCode).toBe(0); + + // Test that the module installs correctly + const result = await execContainer(id, ["which", "codex-cli"]); + expect(result.exitCode).toBe(0); + }); + + test("creates codex module with custom configuration", async () => { + const { id, coderScript, cleanup } = await setupContainer({ + moduleDir, + image: "codercom/enterprise-node:latest", + vars: { + openai_model: "gpt-4", + temperature: "0.7", + max_tokens: "2048", + folder: "/workspace", + install_codex: "true", + codex_version: "latest", + order: "1", + group: "AI Tools", + }, + }); + registerCleanup(cleanup); + + // Execute the module script to install the mock CLI + const scriptResult = await execModuleScript({ + containerId: id, + coderScript, + }); + expect(scriptResult.exitCode).toBe(0); + + // Test that the module installs correctly with custom configuration + const result = await execContainer(id, ["which", "codex-cli"]); + expect(result.exitCode).toBe(0); + + // Test that configuration is properly set + const configResult = await execContainer(id, ["test", "-f", "/home/coder/.config/codex/config.toml"]); + expect(configResult.exitCode).toBe(0); + }); + + test("creates codex module with custom API key", async () => { + const { id, coderScript, cleanup } = await setupContainer({ + moduleDir, + image: "codercom/enterprise-node:latest", + vars: { + openai_api_key: "sk-test-api-key", + openai_model: "gpt-3.5-turbo", + }, + }); + registerCleanup(cleanup); + + // Execute the module script to install the mock CLI + const scriptResult = await execModuleScript({ + containerId: id, + coderScript, + }); + expect(scriptResult.exitCode).toBe(0); + + // Test that the module installs correctly + const result = await execContainer(id, ["which", "codex-cli"]); + expect(result.exitCode).toBe(0); + }); + + test("creates codex module with installation disabled", async () => { + const { id, cleanup } = await setupContainer({ + moduleDir, + image: "codercom/enterprise-node:latest", + vars: { + install_codex: "false", + }, + }); + registerCleanup(cleanup); + + // Test that codex-cli is not installed when disabled + const result = await execContainer(id, ["which", "codex-cli"]); + expect(result.exitCode).toBe(1); + }); + + test("validates temperature range", async () => { + // Test with invalid temperature (should fail during terraform plan/apply) + try { + await setupContainer({ + moduleDir, + image: "codercom/enterprise-node:latest", + vars: { + temperature: "2.5", // Invalid - should be between 0.0 and 2.0 + }, + }); + expect(true).toBe(false); // Should not reach here + } catch (error) { + expect((error as Error).message).toContain("Temperature must be between 0.0 and 2.0"); + } + }); + + test("validates max_tokens range", async () => { + // Test with invalid max_tokens (should fail during terraform plan/apply) + try { + await setupContainer({ + moduleDir, + image: "codercom/enterprise-node:latest", + vars: { + max_tokens: "5000", // Invalid - should be between 1 and 4096 + }, + }); + expect(true).toBe(false); // Should not reach here + } catch (error) { + expect((error as Error).message).toContain("Max tokens must be between 1 and 4096"); + } + }); +}); diff --git a/registry/krikera/modules/codex/main.tf b/registry/krikera/modules/codex/main.tf new file mode 100644 index 00000000..86e74027 --- /dev/null +++ b/registry/krikera/modules/codex/main.tf @@ -0,0 +1,187 @@ +terraform { + required_version = ">= 1.0" + + required_providers { + coder = { + source = "coder/coder" + version = ">= 2.7" + } + } +} + +variable "agent_id" { + type = string + description = "The ID of a Coder agent." +} + +variable "openai_api_key" { + type = string + description = "OpenAI API key for Codex access." + sensitive = true + default = "" +} + +variable "openai_model" { + type = string + description = "OpenAI model to use for code generation." + default = "gpt-4" +} + +variable "temperature" { + type = number + description = "Temperature setting for code generation (0.0 to 2.0)." + default = 0.2 + + validation { + condition = var.temperature >= 0.0 && var.temperature <= 2.0 + error_message = "Temperature must be between 0.0 and 2.0." + } +} + +variable "max_tokens" { + type = number + description = "Maximum number of tokens for code generation." + default = 2048 + + validation { + condition = var.max_tokens > 0 && var.max_tokens <= 4096 + error_message = "Max tokens must be between 1 and 4096." + } +} + +variable "folder" { + type = string + description = "The folder to run Codex in." + default = "/home/coder" +} + +variable "install_codex" { + type = bool + description = "Whether to install Codex CLI." + default = true +} + +variable "codex_version" { + type = string + description = "Version of Codex CLI to install." + default = "latest" +} + +variable "pre_install_script" { + type = string + description = "Custom script to run before installing Codex." + default = null +} + +variable "post_install_script" { + type = string + description = "Custom script to run after installing Codex." + default = null +} + +variable "order" { + type = number + description = "The order determines the position of app in the UI presentation." + default = null +} + +variable "group" { + type = string + description = "The name of a group that this app belongs to." + default = "AI Tools" +} + +variable "ai_prompt" { + type = string + description = "Initial AI prompt for task reporting." + default = "" +} + +locals { + app_slug = "codex" + module_dir_name = "codex" + icon_url = "../../../../.icons/claude.svg" + + # Configuration for Codex CLI + codex_config = { + openai_model = var.openai_model + temperature = var.temperature + max_tokens = var.max_tokens + openai_api_key = var.openai_api_key + } + + # Install script for Rust-based Codex CLI + install_script = templatefile("${path.module}/scripts/install.sh", { + CODEX_VERSION = var.codex_version + INSTALL_CODEX = var.install_codex + }) + + # Start script for AgentAPI integration + start_script = templatefile("${path.module}/scripts/start.sh", { + OPENAI_API_KEY = var.openai_api_key + OPENAI_MODEL = var.openai_model + TEMPERATURE = var.temperature + MAX_TOKENS = var.max_tokens + FOLDER = var.folder + AI_PROMPT = var.ai_prompt + RED = "\\033[31m" + GREEN = "\\033[32m" + YELLOW = "\\033[33m" + BOLD = "\\033[1m" + NC = "\\033[0m" + }) +} + +# Use the AgentAPI module for web chat UI and task reporting +module "agentapi" { + source = "registry.coder.com/coder/agentapi/coder" + version = "1.0.0" + + agent_id = var.agent_id + web_app_slug = local.app_slug + web_app_order = var.order + web_app_group = var.group + web_app_icon = local.icon_url + web_app_display_name = "Codex CLI" + cli_app_slug = "codex-cli" + cli_app_display_name = "Codex CLI" + cli_app = true + cli_app_icon = local.icon_url + cli_app_order = var.order + cli_app_group = var.group + module_dir_name = local.module_dir_name + folder = var.folder + pre_install_script = var.pre_install_script + post_install_script = var.post_install_script + start_script = local.start_script + install_script = local.install_script +} + +# Create a workspace app for direct CLI access +resource "coder_app" "codex_terminal" { + agent_id = var.agent_id + slug = "codex-terminal" + display_name = "Codex Terminal" + icon = local.icon_url + order = var.order + group = var.group + command = <<-EOT + #!/bin/bash + set -e + + export LANG=en_US.UTF-8 + export LC_ALL=en_US.UTF-8 + + # Set up environment variables + export OPENAI_API_KEY="${var.openai_api_key}" + export OPENAI_MODEL="${var.openai_model}" + export CODEX_TEMPERATURE="${var.temperature}" + export CODEX_MAX_TOKENS="${var.max_tokens}" + + # Change to the workspace directory + cd "${var.folder}" + + # Start interactive Codex CLI session + codex-cli interactive + EOT +} diff --git a/registry/krikera/modules/codex/run.sh b/registry/krikera/modules/codex/run.sh new file mode 100755 index 00000000..6d9dc517 --- /dev/null +++ b/registry/krikera/modules/codex/run.sh @@ -0,0 +1,82 @@ +#!/usr/bin/env bash +set -o errexit +set -o pipefail + +# Colors for output +BOLD='\033[0;1m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +RED='\033[0;31m' +NC='\033[0m' # No Color + +printf "${BOLD}๐Ÿฆ€ OpenAI Codex CLI - Rust-based AI Code Assistant${NC}\n\n" + +# Ensure PATH includes ~/.local/bin +export PATH="$HOME/.local/bin:$PATH" + +# Check if codex-cli is installed +if ! command -v codex-cli &> /dev/null; then + printf "${RED}โŒ Codex CLI not found. Please ensure the module is properly installed.${NC}\n" + printf "${YELLOW}๐Ÿ’ก This should have been installed automatically by the AgentAPI module.${NC}\n" + exit 1 +fi + +# Check if we're in a workspace +if [ -z "$CODER_WORKSPACE_NAME" ]; then + printf "${YELLOW}โš ๏ธ Not running in a Coder workspace. Some features may be limited.${NC}\n" +fi + +# Display help information +printf "${BOLD}๐Ÿ“š Codex CLI Commands:${NC}\n" +printf " โ€ข ${GREEN}codex-cli generate${NC} 'description' - Generate code from description\n" +printf " โ€ข ${GREEN}codex-cli complete${NC} 'partial code' - Complete partial code\n" +printf " โ€ข ${GREEN}codex-cli explain${NC} 'code' - Explain existing code\n" +printf " โ€ข ${GREEN}codex-cli review${NC} 'code' - Review code for issues\n" +printf " โ€ข ${GREEN}codex-cli optimize${NC} 'code' - Optimize code performance\n" +printf " โ€ข ${GREEN}codex-cli debug${NC} 'code' - Help debug code issues\n" +printf " โ€ข ${GREEN}codex-cli test${NC} 'code' - Generate test cases\n" +printf " โ€ข ${GREEN}codex-cli interactive${NC} - Start interactive session\n\n" + +printf "${BOLD}๐ŸŒ Web Interface:${NC}\n" +printf " โ€ข Access the web chat UI through the Codex app in your Coder workspace\n" +printf " โ€ข Use the integrated Tasks UI for task-based code generation\n" +printf " โ€ข All interactions are logged and reportable through Coder's task system\n\n" + +printf "${BOLD}๏ฟฝ Configuration:${NC}\n" +printf " โ€ข Config file: ${YELLOW}~/.config/codex/config.toml${NC}\n" +printf " โ€ข Environment variables: ${YELLOW}OPENAI_API_KEY, OPENAI_MODEL, etc.${NC}\n\n" + +printf "${BOLD}๐Ÿš€ Quick Start:${NC}\n" +printf " 1. Set your OpenAI API key: ${YELLOW}export OPENAI_API_KEY='your-key-here'${NC}\n" +printf " 2. Try: ${YELLOW}codex-cli generate 'create a hello world function in Python'${NC}\n" +printf " 3. Or start interactive mode: ${YELLOW}codex-cli interactive${NC}\n\n" + +# Show version information +printf "${BOLD}๐Ÿ“ฆ Version Information:${NC}\n" +codex-cli --version +printf "\n" + +# Show configuration status +CONFIG_FILE="$HOME/.config/codex/config.toml" +if [ -f "$CONFIG_FILE" ]; then + printf "${GREEN}โœ… Configuration file found${NC}\n" + printf "${BOLD}๐Ÿ”ง Current settings:${NC}\n" + if command -v toml &> /dev/null; then + toml get "$CONFIG_FILE" openai.model 2>/dev/null || echo " Model: (default)" + toml get "$CONFIG_FILE" openai.temperature 2>/dev/null || echo " Temperature: (default)" + else + printf " Model: $(grep 'model =' "$CONFIG_FILE" | cut -d'"' -f2 2>/dev/null || echo '(default)')\n" + printf " Temperature: $(grep 'temperature =' "$CONFIG_FILE" | cut -d'=' -f2 | xargs 2>/dev/null || echo '(default)')\n" + fi +else + printf "${YELLOW}โš ๏ธ Configuration file not found. Using defaults.${NC}\n" +fi + +# Check API key status +if [ -n "$OPENAI_API_KEY" ]; then + printf "${GREEN}โœ… OpenAI API key is set${NC}\n" +else + printf "${YELLOW}โš ๏ธ OpenAI API key not set. Set it with: export OPENAI_API_KEY='your-key'${NC}\n" +fi + +printf "\n${GREEN}๐ŸŽ‰ Codex CLI is ready! Use the web interface or CLI commands above.${NC}\n" diff --git a/registry/krikera/modules/codex/scripts/install.sh b/registry/krikera/modules/codex/scripts/install.sh new file mode 100755 index 00000000..2a1c7ca9 --- /dev/null +++ b/registry/krikera/modules/codex/scripts/install.sh @@ -0,0 +1,194 @@ +#!/bin/bash +set -o errexit +set -o pipefail + +# Template variables +CODEX_VERSION="${CODEX_VERSION}" +INSTALL_CODEX="${INSTALL_CODEX}" + +# Colors for output +BOLD='\033[0;1m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +RED='\033[0;31m' +NC='\033[0m' # No Color + +printf "$${BOLD}๐Ÿฆ€ Installing Rust-based OpenAI Codex CLI...\n\n$${NC}" + +# Skip installation if not requested +if [ "$INSTALL_CODEX" != "true" ]; then + printf "$${YELLOW}โš ๏ธ Codex installation skipped (install_codex = false)$${NC}\n" + exit 0 +fi + +# For testing purposes, check if we should use a mock CLI +if [ -n "$CODEX_TEST_MODE" ] && [ "$CODEX_TEST_MODE" = "true" ]; then + printf "$${YELLOW}๐Ÿงช Test mode detected, using mock Codex CLI$${NC}\n" + + # Create mock codex-cli + mkdir -p "$HOME/.local/bin" + + # Use the mock script from testdata if available + if [ -f "$(dirname "$0")/../testdata/mock-codex-cli.sh" ]; then + cp "$(dirname "$0")/../testdata/mock-codex-cli.sh" "$HOME/.local/bin/codex-cli" + else + # Fallback mock script + cat > "$HOME/.local/bin/codex-cli" << 'MOCK_EOF' +#!/bin/bash +case "$1" in + --version) echo "codex-cli version 1.0.0 (mock)"; exit 0 ;; + *) echo "Mock Codex CLI: $*"; exit 0 ;; +esac +MOCK_EOF + fi + + chmod +x "$HOME/.local/bin/codex-cli" + + # Make sure ~/.local/bin is in PATH + if ! echo "$PATH" | grep -q "$HOME/.local/bin"; then + echo 'export PATH="$HOME/.local/bin:$PATH"' >> "$HOME/.bashrc" + echo 'export PATH="$HOME/.local/bin:$PATH"' >> "$HOME/.zshrc" 2>/dev/null || true + export PATH="$HOME/.local/bin:$PATH" + fi + + printf "$${GREEN}โœ… Mock Codex CLI installed successfully!$${NC}\n" + printf "$${GREEN}๐ŸŽ‰ Test mode installation complete!$${NC}\n" + exit 0 +fi + +# Check if Rust is installed, install if not +if ! command -v rustc &> /dev/null; then + printf "$${YELLOW}๐Ÿ“ฆ Rust not found, installing Rust...$${NC}\n" + curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y + source "$HOME/.cargo/env" + printf "$${GREEN}โœ… Rust installed successfully$${NC}\n\n" +else + printf "$${GREEN}โœ… Rust already installed$${NC}\n\n" +fi + +# Ensure we have the latest stable Rust +rustup update stable +rustup default stable + +# Install required system dependencies +printf "$${BOLD}๐Ÿ“ฆ Installing system dependencies...$${NC}\n" +if command -v apt-get &> /dev/null; then + sudo apt-get update -qq + sudo apt-get install -y -qq \ + build-essential \ + pkg-config \ + libssl-dev \ + libclang-dev \ + curl \ + git \ + ca-certificates +elif command -v yum &> /dev/null; then + sudo yum install -y \ + gcc \ + gcc-c++ \ + make \ + pkgconfig \ + openssl-devel \ + clang-devel \ + curl \ + git \ + ca-certificates +elif command -v apk &> /dev/null; then + sudo apk add --no-cache \ + build-base \ + pkgconfig \ + openssl-dev \ + clang-dev \ + curl \ + git \ + ca-certificates +else + printf "$${RED}โŒ Unsupported package manager. Please install build dependencies manually.$${NC}\n" + exit 1 +fi + +printf "$${GREEN}โœ… System dependencies installed$${NC}\n\n" + +# Create codex directory +CODEX_DIR="$HOME/.local/share/codex" +mkdir -p "$CODEX_DIR" +cd "$CODEX_DIR" + +# Clone or update the Codex CLI repository +CODEX_REPO="https://github.com/krikera/codex-cli.git" +if [ -d "codex-cli" ]; then + printf "$${BOLD}๐Ÿ”„ Updating existing Codex CLI...$${NC}\n" + cd codex-cli + git fetch origin + if [ "$CODEX_VERSION" = "latest" ]; then + git checkout main + git pull origin main + else + git checkout "v$CODEX_VERSION" + fi +else + printf "$${BOLD}๐Ÿ“ฅ Cloning Codex CLI repository...$${NC}\n" + if [ "$CODEX_VERSION" = "latest" ]; then + git clone "$CODEX_REPO" codex-cli + else + git clone --branch "v$CODEX_VERSION" "$CODEX_REPO" codex-cli + fi + cd codex-cli +fi + +# Build the Rust project +printf "$${BOLD}๐Ÿ”จ Building Codex CLI (this may take a few minutes)...$${NC}\n" +cargo build --release + +# Install the binary +printf "$${BOLD}๐Ÿ“ฆ Installing Codex CLI...$${NC}\n" +mkdir -p "$HOME/.local/bin" +cp target/release/codex-cli "$HOME/.local/bin/" + +# Make sure ~/.local/bin is in PATH +if ! echo "$PATH" | grep -q "$HOME/.local/bin"; then + echo 'export PATH="$HOME/.local/bin:$PATH"' >> "$HOME/.bashrc" + echo 'export PATH="$HOME/.local/bin:$PATH"' >> "$HOME/.zshrc" 2>/dev/null || true + export PATH="$HOME/.local/bin:$PATH" +fi + +# Create configuration directory +mkdir -p "$HOME/.config/codex" + +# Create default configuration file +cat > "$HOME/.config/codex/config.toml" << EOF +[openai] +model = "gpt-4" +temperature = 0.2 +max_tokens = 2048 + +[codex] +auto_save = true +show_thinking = true +verbose = false + +[ui] +theme = "dark" +highlight_syntax = true +EOF + +printf "$${GREEN}โœ… Codex CLI installed successfully!$${NC}\n\n" + +# Verify installation +if command -v codex-cli &> /dev/null; then + printf "$${GREEN}๐ŸŽ‰ Installation verification successful!$${NC}\n" + printf "$${BOLD}๐Ÿ“ Codex CLI version: $${NC}" + codex-cli --version + printf "\n$${BOLD}๐Ÿ“ Configuration directory: $${NC}$HOME/.config/codex\n" + printf "$${BOLD}๐Ÿ”ง Binary location: $${NC}$HOME/.local/bin/codex-cli\n\n" +else + printf "$${RED}โŒ Installation verification failed. Please check the installation.$${NC}\n" + exit 1 +fi + +printf "$${GREEN}๐Ÿš€ Codex CLI is ready to use!$${NC}\n" +printf "$${BOLD}๐Ÿ’ก Usage examples:$${NC}\n" +printf " โ€ข $${YELLOW}codex-cli generate 'create a fibonacci function in Python'$${NC}\n" +printf " โ€ข $${YELLOW}codex-cli complete 'def fibonacci(n):'$${NC}\n" +printf " โ€ข $${YELLOW}codex-cli explain 'explain this code: def fib(n): return n if n <= 1 else fib(n-1) + fib(n-2)'$${NC}\n" +printf " โ€ข $${YELLOW}codex-cli interactive$${NC}\n\n" diff --git a/registry/krikera/modules/codex/scripts/start.sh b/registry/krikera/modules/codex/scripts/start.sh new file mode 100755 index 00000000..f64dd19e --- /dev/null +++ b/registry/krikera/modules/codex/scripts/start.sh @@ -0,0 +1,219 @@ +#!/bin/bash +set -o errexit +set -o pipefail + +# Template variables +OPENAI_API_KEY="${OPENAI_API_KEY}" +OPENAI_MODEL="${OPENAI_MODEL}" +TEMPERATURE="${TEMPERATURE}" +MAX_TOKENS="${MAX_TOKENS}" +FOLDER="${FOLDER}" +AI_PROMPT="${AI_PROMPT}" + +# AgentAPI parameters +USE_AGENTAPI="$${1:-true}" +AGENTAPI_PORT="$${2:-3284}" + +# Colors for output +BOLD='\033[0;1m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +RED='\033[0;31m' +NC='\033[0m' # No Color + +printf "$${BOLD}๐Ÿš€ Starting Codex CLI with AgentAPI integration...$${NC}\n\n" + +# Set up environment variables +export OPENAI_API_KEY="$OPENAI_API_KEY" +export OPENAI_MODEL="$OPENAI_MODEL" +export CODEX_TEMPERATURE="$TEMPERATURE" +export CODEX_MAX_TOKENS="$MAX_TOKENS" +export CODEX_FOLDER="$FOLDER" +export CODEX_AI_PROMPT="$AI_PROMPT" + +# Ensure PATH includes ~/.local/bin +export PATH="$HOME/.local/bin:$PATH" + +# Check if codex-cli is installed +if ! command -v codex-cli &> /dev/null; then + printf "$${RED}โŒ Codex CLI not found. Please ensure it's installed.$${NC}\n" + exit 1 +fi + +# Check if OpenAI API key is set +if [ -z "$OPENAI_API_KEY" ]; then + printf "$${YELLOW}โš ๏ธ OpenAI API key not set. Using default configuration.$${NC}\n" +fi + +# Update configuration with environment variables +CONFIG_FILE="$HOME/.config/codex/config.toml" +mkdir -p "$HOME/.config/codex" +cat > "$CONFIG_FILE" << EOF +[openai] +model = "$OPENAI_MODEL" +temperature = $TEMPERATURE +max_tokens = $MAX_TOKENS + +[codex] +auto_save = true +show_thinking = true +verbose = false +working_directory = "$FOLDER" + +[ui] +theme = "dark" +highlight_syntax = true + +[agentapi] +enabled = $USE_AGENTAPI +port = $AGENTAPI_PORT +host = "localhost" +EOF + +printf "${GREEN}โœ… Configuration updated${NC}\n" + +# Handle AI prompt for task reporting +if [ -n "$AI_PROMPT" ]; then + printf "${YELLOW}๐Ÿ“ Setting up AI prompt for task reporting...${NC}\n" + echo -n "$AI_PROMPT" > /tmp/codex-prompt.txt + printf "${GREEN}โœ… AI prompt configured${NC}\n" +fi + +# Change to the working directory +cd "$FOLDER" + +# Create AgentAPI bridge script +BRIDGE_SCRIPT="$HOME/.local/bin/codex-agentapi-bridge" +cat > "$BRIDGE_SCRIPT" << 'BRIDGE_EOF' +#!/bin/bash +set -e + +# Environment setup +export PATH="$HOME/.local/bin:$PATH" +export LANG=en_US.UTF-8 +export LC_ALL=en_US.UTF-8 + +# Function to handle different types of requests +handle_request() { + local request_type="$1" + local content="$2" + + case "$request_type" in + "generate") + codex-cli generate "$content" + ;; + "complete") + codex-cli complete "$content" + ;; + "explain") + codex-cli explain "$content" + ;; + "review") + codex-cli review "$content" + ;; + "optimize") + codex-cli optimize "$content" + ;; + "debug") + codex-cli debug "$content" + ;; + "test") + codex-cli test "$content" + ;; + "interactive") + codex-cli interactive + ;; + *) + # Default to generate for unknown request types + codex-cli generate "$content" + ;; + esac +} + +# Main execution +if [ $# -eq 0 ]; then + # No arguments - start interactive mode + handle_request "interactive" "" +else + # Use first argument as command, rest as content + handle_request "$1" "$${*:2}" +fi +BRIDGE_EOF + +chmod +x "$BRIDGE_SCRIPT" + +printf "${GREEN}โœ… AgentAPI bridge configured${NC}\n\n" + +# Test the installation +printf "${BOLD}๐Ÿงช Testing Codex CLI...${NC}\n" +if codex-cli --version >/dev/null 2>&1; then + printf "${GREEN}โœ… Codex CLI is responding correctly${NC}\n" +else + printf "${RED}โŒ Codex CLI test failed${NC}\n" + exit 1 +fi + +printf "\n${GREEN}๐ŸŽ‰ Codex CLI is ready for AgentAPI integration!${NC}\n" +printf "${BOLD}๐Ÿ“š Available commands:${NC}\n" +printf " โ€ข ${YELLOW}generate${NC} - Generate code from description\n" +printf " โ€ข ${YELLOW}complete${NC} - Complete partial code\n" +printf " โ€ข ${YELLOW}explain${NC} - Explain existing code\n" +printf " โ€ข ${YELLOW}review${NC} - Review code for issues\n" +printf " โ€ข ${YELLOW}optimize${NC} - Optimize code performance\n" +printf " โ€ข ${YELLOW}debug${NC} - Help debug code issues\n" +printf " โ€ข ${YELLOW}test${NC} - Generate test cases\n" +printf " โ€ข ${YELLOW}interactive${NC} - Start interactive session\n\n" + +# Start the AgentAPI server +if [ "$USE_AGENTAPI" = "true" ]; then + printf "${BOLD}๐Ÿ”„ Starting AgentAPI server on port $AGENTAPI_PORT...${NC}\n" + + # Create a simple AgentAPI configuration for Codex + cat > "$HOME/.config/codex/agentapi.json" << JSON_EOF +{ + "name": "Codex CLI", + "version": "1.0.0", + "description": "Rust-based OpenAI Codex CLI with AgentAPI integration", + "commands": { + "generate": { + "description": "Generate code from description", + "handler": "codex-agentapi-bridge" + }, + "complete": { + "description": "Complete partial code", + "handler": "codex-agentapi-bridge" + }, + "explain": { + "description": "Explain existing code", + "handler": "codex-agentapi-bridge" + }, + "review": { + "description": "Review code for issues", + "handler": "codex-agentapi-bridge" + }, + "optimize": { + "description": "Optimize code performance", + "handler": "codex-agentapi-bridge" + }, + "debug": { + "description": "Help debug code issues", + "handler": "codex-agentapi-bridge" + }, + "test": { + "description": "Generate test cases", + "handler": "codex-agentapi-bridge" + }, + "interactive": { + "description": "Start interactive session", + "handler": "codex-agentapi-bridge" + } + } +} +JSON_EOF + + # Start AgentAPI with our configuration + exec agentapi --config "$HOME/.config/codex/agentapi.json" --port "$AGENTAPI_PORT" --handler "$BRIDGE_SCRIPT" +else + printf "${YELLOW}โš ๏ธ AgentAPI disabled. Running in standalone mode.${NC}\n" + exec codex-cli interactive +fi diff --git a/registry/krikera/modules/codex/scripts/test-util.sh b/registry/krikera/modules/codex/scripts/test-util.sh new file mode 100755 index 00000000..f0f6beb8 --- /dev/null +++ b/registry/krikera/modules/codex/scripts/test-util.sh @@ -0,0 +1,64 @@ +#!/bin/bash +# Test utility for Codex CLI module + +set -e + +# Colors for output +BOLD='\033[0;1m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +RED='\033[0;31m' +NC='\033[0m' # No Color + +echo -e "${BOLD}๐Ÿงช Testing Codex CLI Module...${NC}" + +# Test 1: Check if configuration is properly created +echo -e "${YELLOW}Test 1: Configuration creation${NC}" +if [ -f "$HOME/.config/codex/config.toml" ]; then + echo -e "${GREEN}โœ… Configuration file exists${NC}" +else + echo -e "${RED}โŒ Configuration file missing${NC}" + exit 1 +fi + +# Test 2: Check if scripts are executable +echo -e "${YELLOW}Test 2: Script permissions${NC}" +if [ -x "$HOME/.local/bin/codex-agentapi-bridge" ]; then + echo -e "${GREEN}โœ… Bridge script is executable${NC}" +else + echo -e "${RED}โŒ Bridge script missing or not executable${NC}" + exit 1 +fi + +# Test 3: Check if AgentAPI configuration is created +echo -e "${YELLOW}Test 3: AgentAPI configuration${NC}" +if [ -f "$HOME/.config/codex/agentapi.json" ]; then + echo -e "${GREEN}โœ… AgentAPI configuration exists${NC}" +else + echo -e "${RED}โŒ AgentAPI configuration missing${NC}" + exit 1 +fi + +# Test 4: Mock API call test +echo -e "${YELLOW}Test 4: Mock API response${NC}" +if command -v jq &> /dev/null; then + echo '{"type": "generate", "content": "hello world"}' | jq . > /tmp/test_input.json + if [ -f /tmp/test_input.json ]; then + echo -e "${GREEN}โœ… JSON parsing works${NC}" + else + echo -e "${RED}โŒ JSON parsing failed${NC}" + exit 1 + fi +else + echo -e "${YELLOW}โš ๏ธ jq not available, skipping JSON test${NC}" +fi + +# Test 5: Environment variable test +echo -e "${YELLOW}Test 5: Environment variables${NC}" +if [ -n "$OPENAI_MODEL" ]; then + echo -e "${GREEN}โœ… OPENAI_MODEL is set to: $OPENAI_MODEL${NC}" +else + echo -e "${YELLOW}โš ๏ธ OPENAI_MODEL not set, using default${NC}" +fi + +echo -e "\n${GREEN}๐ŸŽ‰ All tests passed!${NC}" diff --git a/registry/krikera/modules/codex/test-util.ts b/registry/krikera/modules/codex/test-util.ts new file mode 100644 index 00000000..bf4fceb2 --- /dev/null +++ b/registry/krikera/modules/codex/test-util.ts @@ -0,0 +1,132 @@ +import { + execContainer, + findResourceInstance, + removeContainer, + runContainer, + runTerraformApply, + writeFileContainer, +} from "~test"; +import path from "path"; +import { expect } from "bun:test"; + +export const setupContainer = async ({ + moduleDir, + image, + vars, +}: { + moduleDir: string; + image?: string; + vars?: Record; +}) => { + const state = await runTerraformApply(moduleDir, { + agent_id: "foo", + ...vars, + }); + const coderScript = findResourceInstance(state, "coder_script"); + const id = await runContainer(image ?? "codercom/enterprise-node:latest"); + return { id, coderScript, cleanup: () => removeContainer(id) }; +}; + +export const loadTestFile = async ( + moduleDir: string, + ...relativePath: [string, ...string[]] +) => { + return await Bun.file( + path.join(moduleDir, "testdata", ...relativePath), + ).text(); +}; + +export const writeExecutable = async ({ + containerId, + filePath, + content, +}: { + containerId: string; + filePath: string; + content: string; +}) => { + await writeFileContainer(containerId, filePath, content, { + user: "root", + }); + await execContainer(containerId, ["chmod", "+x", filePath], ["--user", "root"]); +}; + +export const execModuleScript = async ({ + containerId, + coderScript, + userArgs, +}: { + containerId: string; + coderScript: { script: string }; + userArgs?: string[]; +}) => { + const scriptPath = "/tmp/module_script.sh"; + await writeExecutable({ + containerId, + filePath: scriptPath, + content: coderScript.script, + }); + return await execContainer(containerId, [scriptPath, ...(userArgs ?? [])]); +}; + +export const expectAgentAPIStarted = async ({ + containerId, + port = 3284, + timeout = 30000, +}: { + containerId: string; + port?: number; + timeout?: number; +}) => { + const startTime = Date.now(); + while (Date.now() - startTime < timeout) { + const result = await execContainer(containerId, [ + "curl", + "-f", + "-s", + "-o", + "/dev/null", + `http://localhost:${port}/status`, + ]); + if (result.exitCode === 0) { + return; + } + await new Promise((resolve) => setTimeout(resolve, 1000)); + } + throw new Error(`AgentAPI did not start within ${timeout}ms`); +}; + +export const expectCodexCLIInstalled = async ({ + containerId, +}: { + containerId: string; +}) => { + const result = await execContainer(containerId, ["which", "codex-cli"]); + expect(result.exitCode).toBe(0); +}; + +export const expectCodexConfigExists = async ({ + containerId, +}: { + containerId: string; +}) => { + const result = await execContainer(containerId, [ + "test", + "-f", + "/home/coder/.config/codex/config.toml", + ]); + expect(result.exitCode).toBe(0); +}; + +export const expectCodexAgentAPIBridgeExists = async ({ + containerId, +}: { + containerId: string; +}) => { + const result = await execContainer(containerId, [ + "test", + "-f", + "/home/coder/.local/bin/codex-agentapi-bridge", + ]); + expect(result.exitCode).toBe(0); +}; diff --git a/registry/krikera/modules/codex/testdata/mock-codex-cli.sh b/registry/krikera/modules/codex/testdata/mock-codex-cli.sh new file mode 100755 index 00000000..cf75222a --- /dev/null +++ b/registry/krikera/modules/codex/testdata/mock-codex-cli.sh @@ -0,0 +1,63 @@ +#!/bin/bash + +# Mock Codex CLI for testing purposes +# This script simulates the behavior of the actual codex-cli for testing + +case "$1" in + "--version") + echo "codex-cli 1.0.0" + ;; + "complete") + # Mock completion response + cat << 'EOF' +{ + "id": "cmpl-test123", + "object": "text_completion", + "created": 1234567890, + "model": "gpt-3.5-turbo-instruct", + "choices": [ + { + "text": "\n\nThis is a mock completion response for testing purposes. The actual CLI would interact with OpenAI's API to generate code completions based on the provided prompt.", + "index": 0, + "finish_reason": "length" + } + ], + "usage": { + "prompt_tokens": 10, + "completion_tokens": 25, + "total_tokens": 35 + } +} +EOF + ;; + "chat") + # Mock chat response + cat << 'EOF' +{ + "id": "chatcmpl-test123", + "object": "chat.completion", + "created": 1234567890, + "model": "gpt-3.5-turbo", + "choices": [ + { + "message": { + "role": "assistant", + "content": "This is a mock chat response for testing. The actual CLI would provide interactive code assistance and explanations." + }, + "index": 0, + "finish_reason": "stop" + } + ], + "usage": { + "prompt_tokens": 15, + "completion_tokens": 20, + "total_tokens": 35 + } +} +EOF + ;; + *) + echo "Mock Codex CLI - Available commands: complete, chat, --version" + echo "This is a test mock. In production, this would be replaced with the actual Rust CLI." + ;; +esac From e2d5c27bbe8b55ada75b8b8d225ccd2f8022f160 Mon Sep 17 00:00:00 2001 From: K <84141602+krikera@users.noreply.github.com> Date: Mon, 21 Jul 2025 10:38:49 +0530 Subject: [PATCH 2/6] Update registry/krikera/modules/codex/README.md Co-authored-by: Atif Ali --- registry/krikera/modules/codex/README.md | 8 -------- 1 file changed, 8 deletions(-) diff --git a/registry/krikera/modules/codex/README.md b/registry/krikera/modules/codex/README.md index a6bc9638..f7367557 100644 --- a/registry/krikera/modules/codex/README.md +++ b/registry/krikera/modules/codex/README.md @@ -11,14 +11,6 @@ tags: ["ai", "assistant", "codex", "openai", "rust", "tasks"] A Rust-based OpenAI Codex CLI tool with AgentAPI web chat UI integration and full task reporting support for Coder + Tasks UI. -## Features - -- **Rust-based CLI**: High-performance Rust implementation of OpenAI Codex -- **Web Chat UI**: Interactive web interface through AgentAPI integration -- **Task Reporting**: Full integration with Coder Tasks UI -- **Code Generation**: Generate code from natural language descriptions -- **Code Completion**: Smart code completion and suggestions -- **Code Explanation**: Get explanations for existing code ```tf module "codex" { From 623b0a4bdefa941a66c131dbb27c388d3e114eb8 Mon Sep 17 00:00:00 2001 From: K <84141602+krikera@users.noreply.github.com> Date: Mon, 21 Jul 2025 10:38:59 +0530 Subject: [PATCH 3/6] Update registry/krikera/modules/codex/README.md Co-authored-by: Atif Ali --- registry/krikera/modules/codex/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/registry/krikera/modules/codex/README.md b/registry/krikera/modules/codex/README.md index f7367557..12d3f281 100644 --- a/registry/krikera/modules/codex/README.md +++ b/registry/krikera/modules/codex/README.md @@ -1,5 +1,5 @@ --- -display_name: "OpenAI Codex CLI" +display_name: "OpenAI Codex" description: "Rust-based OpenAI Codex CLI with AgentAPI web chat UI and task reporting" icon: "../../../../.icons/claude.svg" maintainer_github: "krikera" From 3de374103dbce2f8f8e83fa82f3d7dc439fcd6fe Mon Sep 17 00:00:00 2001 From: K <84141602+krikera@users.noreply.github.com> Date: Mon, 21 Jul 2025 10:39:08 +0530 Subject: [PATCH 4/6] Update registry/krikera/modules/codex/README.md Co-authored-by: Atif Ali --- registry/krikera/modules/codex/README.md | 1 - 1 file changed, 1 deletion(-) diff --git a/registry/krikera/modules/codex/README.md b/registry/krikera/modules/codex/README.md index 12d3f281..3aaf75ca 100644 --- a/registry/krikera/modules/codex/README.md +++ b/registry/krikera/modules/codex/README.md @@ -2,7 +2,6 @@ display_name: "OpenAI Codex" description: "Rust-based OpenAI Codex CLI with AgentAPI web chat UI and task reporting" icon: "../../../../.icons/claude.svg" -maintainer_github: "krikera" verified: false tags: ["ai", "assistant", "codex", "openai", "rust", "tasks"] --- From 9849f3aea7122d67f289fe8755e090765c2ef379 Mon Sep 17 00:00:00 2001 From: K <84141602+krikera@users.noreply.github.com> Date: Mon, 21 Jul 2025 10:39:18 +0530 Subject: [PATCH 5/6] Update registry/krikera/modules/codex/README.md Co-authored-by: Atif Ali --- registry/krikera/modules/codex/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/registry/krikera/modules/codex/README.md b/registry/krikera/modules/codex/README.md index 3aaf75ca..aa91680d 100644 --- a/registry/krikera/modules/codex/README.md +++ b/registry/krikera/modules/codex/README.md @@ -3,7 +3,7 @@ display_name: "OpenAI Codex" description: "Rust-based OpenAI Codex CLI with AgentAPI web chat UI and task reporting" icon: "../../../../.icons/claude.svg" verified: false -tags: ["ai", "assistant", "codex", "openai", "rust", "tasks"] +tags: ["ai", "agent", "codex", "openai"] --- # OpenAI Codex CLI From 5ea8a76074cb769939fd2e6a505343442d090394 Mon Sep 17 00:00:00 2001 From: K <84141602+krikera@users.noreply.github.com> Date: Mon, 21 Jul 2025 10:39:26 +0530 Subject: [PATCH 6/6] Update registry/krikera/modules/codex/README.md Co-authored-by: Atif Ali --- registry/krikera/modules/codex/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/registry/krikera/modules/codex/README.md b/registry/krikera/modules/codex/README.md index aa91680d..a04928c0 100644 --- a/registry/krikera/modules/codex/README.md +++ b/registry/krikera/modules/codex/README.md @@ -6,7 +6,7 @@ verified: false tags: ["ai", "agent", "codex", "openai"] --- -# OpenAI Codex CLI +# OpenAI Codex A Rust-based OpenAI Codex CLI tool with AgentAPI web chat UI integration and full task reporting support for Coder + Tasks UI.