`25YSPMx9I*<#1l
z2pGRSNg43Dbu3d7+B{GN7R+R=scHPdvT3Z`a)@VxE(EerrKIswD&%K^=dal
z6(q-qpU?!?uY~F*?r)oXnsk-I7MI%b!*lf(f6Ksw2k}*l6?ZE&CsrU@u(Nn%4#b@%
z=fqfm1_wH9$f?UZdxBvCs-IuqVPIPL5>Y
z^}D&LfQ4UkccnisUbImAsY4dAedPRZP7ea5
zZWJgCl~V&bcAjkCtRYtm&4Vx;`ja=lHS2EOC+O0_rrL0vRYxDbJ6ib$0g_zIkXgrZ
z%EY?m&6|M4*>@_X1rmC>pE|z1hZR&NtX;IDhqbH
zm@air$HL*lBO=n4oXL7~Iex)$o&~-Ajif3x9CBwBy9JaTT_6N@t3)PR^feeyAFL=V-ezOi
zcUQ~HEuOu+$X>F}AgS6-7Q604w~7m24_B4VYyh806+1c00n(U9(1Y0MkIh)Hhu{f!V*d-KVGzUzaK`lQ^-eP;~<;$Bt
zq>?iHZf(UqM1JAP*PqupH(S)`WTJ|A9S*3dzhjOh14Nix|4+03$?&iFyKVVx;w=51
z=UYe!R%Z|J@F9W7_i05Vrt&EyBI1yOc=Jc)3u;5!KiQvi-a3EW5eNwd&77l;8obb&
zUAlCM$8El&_TEh0ephxIFSdCiJFT?0@_+%~woXotbstPd;As2!dkT3tHfRFj>7696
zYFJukQg~*7N8Gw!YrJT4t#;O$*A1TR@TdP&Bo5cNkB@pd7qb6(+pDre7qW)>ek|Ny
z_w&VzsM?zcg;^MLs`~^MaNxaZ+6qIVqpf{2KEo|E@NC`Dc@!1hWly~n{`#l0>y-m;
z?Vsj9;t9i}$M)%}=QWZ}T0+SBHu~hmOQ};E$y^+LHnRv?rx21hAS0yg(IA&@5z&Nj~bI>*e`_AjN=Upu%Lm6esP)f@(|e9>)r4vE7C|^-MHN+bXdT7&7)H-QXKml&VAjy#+C&w7xVw(
zEkYNEjA>r=DyCaTC2svQXF1A6yPC81XpefIU;@kD;W1VZP{>gpRd=XNhx(C_m<
z0vrTG*Qsb#NiQYs=WXh%A^5z~Z0uuRpENXehq>W;tEe+w@C3oL6ZjdD=iJSoR65F0
zQ%$?vSbcqm(x5Iav5+3BCJN*NH!}la(9qw@1r}^?{kja%?q|hOBuK!j_0KNl-MS^=
ze1}MuCHs^OrV<&u;6k_5MSt&TV#-Yu6aad_vXoygsmjBYF<;<>#Vx#th8L^jxLzWO
z+t0SgKvq+#T`p_a>I%3B0KZ7L-_KFZC$L?xz+B-rP^j>80ez%|(EiA|OcWSDk~iL}
zwT@<26xW-Q7AUTirq?H(#J1a`EsQdh{ygL_c+`O*f|Z
zJbaf;dh6x&4VrmV!aKf2b-tdE)xkQs{a5OG=~9
z!9pEfN+2A=;E}f2nJw$F<)grNXHc&mJ&-C6_8HNz>2cY76NTHH%SAGicfP^~4xd~9
zYDP>DT3nS$I&Zx=dHock?>FBjmD?xZv+apElDaM1mpr4?oW*ZvYkQJnLdBl8!ALDl
zV=j?Qm-VgYG4OJYMUvQmqVdJPn0Bixj8lui>)Xb`VZ@Yc^5+nUo;x>q$4tr$%O_ti$1x)mPS_nVwta{{1uUa^~6FKev20A|8cpB=d(>tvYxv@e-slMHxV9
znfo|Db>u`hTotuU6cFX1zzLGkIb^1Zc^O@-36jbIrWzAvrvFmS^qkMhTU8ew-}Doi
zG{X&?`&`;-ytzxaPaKGAC!E|_DkIYF-gkWkD)75%I=)95pJ%T5F!G#8R1jBN84rC{
zIq-w=V$$YL?a=cET>Y(KrjeD$)tTh{EOefy*)%T;==_zac-F!(a;7>;3!h`gi@yKH|Zw
zp=5F20MKy9$ogNEhd{q^Cioz8}xsLdIkOo~f`=|Ka`SXssJ0~vD`RD)1m5ywVj1@Y0vA+dZ
zuWu3BFuHcFB%nXt9bl$8bH)sHrPffRr2dkYzE+7e+=T`@dyfNONDQYql=AT!Z~^dL
zH^1O!_%kZ5tZpUUMF>{#;NwNSj4>=X`tmGw+V+&OLnKxxMXR#?_vbe}ij^h(<<`sT
zC(P26(?rsPh!w?EdHE)|-mSqc*b|-F>k#pKG95ACI1UWmeru4`m8EB*ZPca6GbzpDN$<4>oH^)xH><42Vj=b
z$e=bddA-kwccZyChK5-&G0TY9odbE&$7t~vDj#e0(0uYLz%Rr3cF^>smQtlnZ2
zEf^TlQUF5rSF`eLgtA}5YEO4obpWJk49ocYe
zAQjBQ@s*e0%}%HvfW)f_=a}Htnm#hp!cQFrtFXPvNNA?I8}-d6X=}+p$RqCAO5%02
zvgK{bfaKWdeZj%<0@vaW-rG9Gk8&){plFJ7k?(Mks`9ec3s6`e3~yeCKdl$(7-=#I
z%m3iwCGFwr2MAhf#8wtN(TLhy)*t=<65&O+gL7|Fq*nPAQ;`sx$$5wT8h!1;rt}nD;QpUXMOj&Sl=9zn
z$-Qoy^ZAl%Q6B<3{fMVTlP6Dx*UfGJ;oxu0q@=43l=)VTiy#jJ=Ap`lB6#*bkhBFN
zW`EBCZZ0l;l#~`y^*r~MA9y-0dBKA^s1F5GN>WJ9&E#q>3{pI8;l8DdWqw|s243$}
zBkW`~*YkZm9)G3_`fktwC4^Wb9KIg8lQIY^ioP1KlA|4!%rLoup@ld1-mt+v>T6$>
z+*72mMaf!yb|2Gl(0}ihotU_i7(g*Gc1pZb#+`%7@k73Vs(Iw)N#~TOjwd!CSt)xl
zUDKv~aE9kVLD67Jf#+wM(e;-m1nHhTcI;?#?*8$4<_J{gz+HCW9NF9By1z?2xA3B6
z2b=b~f+?qu+NPe?pl5&U^w_f-zQOv8zAD|Tp@8)CU{KJy?NV*i@vmL&wHmYhs_$|s
zQ5OEzRD@6F9~IcgRY;UP4ON-Z;;sHT$E)E8nf`zJ?WS0-^s8N}3pgi>5(3z}|8@pc
z_vscubv3?rZpzA2F4|(~1fdvA>)bPBiz@pk<2!vR7VT~oK!;%tw
z#W0WOe7(J5XjQ_2xj~C`k53)n8ps`-s4Bm>YSo*`wPlI5e@|KfvzxvsSs7`s{!Y7v
z>mb;uRLjfB$;r!~9<@xuRD0B@OATgGTQ6OwMTC}8LazDYfok~FQ7WM#i4%J&+bgSi
zC7GC93^Z1X^NkM3D4`!*`&YVlH2En&Z|74iGd=*JaXGk#u}VAJ4F6#$4*XV}&@59M
zg9$mH@Qd+0xj#l&fx1|?#7#Oz{3lCX*zkhihfqKn!X!8glw3em+BMl@^b&mL6I<(q
z0)rgHsqkOq=_#k~lH1fSe{j;^3-q2{&qtj`cHRq)9;Qp26jcb0LgkM`6BfTschM!0
z26(63eFw!Yc(5j)8hBj#ZTl{)`b%ohm*KE~QYIKo=q^r^((ppm>8FlUl-G)T0Mp8~
z6Q;g~=-DuAA-Y`n6m@^9h%*YDX9od(U%wr3|L$D?YLAv%%@p?r@F>|EUps)FCUQ+%
zC%1Vb8Px3;I9bKx>#!YKMg0Vid+SH2CoSPDUE)Cf@bze7x*(-8JXIbG=MP?
zAn*$1uFTy7iR*hh6~Lu1G^f_qw@&Nm6|uTy5^`Z4C`{+@ianxA3^#--0U6F&c{2^X
zWBaS9)HYngF^!9?T6@$OZ3U@td)|g``cV(Ruoo^^Y4n!Lt81D)XU?0xxwmLZ=DZMs
zpGW-Nx*%w@cnjU#G0161I@!p<{l_L;zI=T4*fNVh@b5fLb@;mg^Ttm(jH?!f0VtQM
zva)UTP5j)6)9mKRVV`ChM~#l24U};8@Oa>sKA`865C6oC-NTtSD`$M8$A5N_ue=OY
zf}VuhQqcjR!g{aDcCxV8f4^j1ER0B`vb{JhBEmjBQRd;(QukT2-u2dSUAwkz?wPQ@
zeb;a4k_^>FBk1WyZEOywfw%_N%=T6^K8}_T7DM2l+omAn$I1v5$UHT<@nE&rcZLWh
zB3F2q)vJr&c%GG)^1Tt0nD~w%8;0d0@120jjy^(^5q~_j99YTz{@Fv-@X~Vmc1h!d
zpw40o?h0kYDN`aFja}gccar2$@pyg@rSYT(XQIee<{^*rZS3pJ(?s^1NcV%anw+~n
zr(KAXom7zd8u4SfBws-}`OLHsL6s2|R8p8Ed8v%SO!B-DSz^C}qKk&yndJ}KlAED8
zFs=n5@qj150V7}!Ib&%oZ=YA8$+
zjs_*`@I4lTDnQS)blR#F8r^#1Nn`dU@}6YLF3r*L@fBW8qvyv{-LOi`2hgr1kFj)`
zc!wVY5Dlx6Rx=q-T#4K8kO`@;WPsHZCb4ug8Z$XTqQzL1ogoY2lfV;DEga{$~lfgmZkRv1i#aD4~r
zN=j1ob1==Q`|dKo=_B>zN#2cS6d2TMR@@0o>NuKTBVu`g478
zb$>fGURtF4gpI*_241TCh!_c6X{#m|z0XW@&s3gm5ajlBbMtv>1=X`COquU`(358lA=$po+v)msgImYM`tw?e7PaM*B;q<8;kXY!^)56OhS}0o~2LB(;wr^@T
z9?(0;JioK_=~F
z^0^iEYKzHOXrp0fD0B*nBIDIvwwg}9R;p+&CFj8XA
zQqC+-c}eXAnmq}6H9iCS_h;1ZzQzO3Ba7XX{%CodzMUsMqYJ#E#`i}i$dE%50d)VB
zNH6IP3O$T0X8`omeT7>A=Ye#7T8yIzt@`T*vUN&dSG079k={N@`6$oUv%^le4zN
zDz?@y>{&c%je+9~=lJC0Zf3{W9OzR_mOW}@Vc
zP28vcDy>GKLYTSr8IQ`!JhAy5xhszR0)tG1?98X$K^o51rfZZWGPi9OJ&ZItcakyu
zYHe-X`)tpd!E&{P1Imf-Jg>4{SduE#cq8==4@f9>JfY|s&L;b#7z6|S+}Xi0rutbw
z$efl{9m)YHeHZS+lk+;O>ZPAV5GjlYD=uh)Faap+W1s&0uQH;8fC15lu9M`Jiv7i3
z^A4IZoX<`$2~674Cy5lFS_pf$x9RMmw%Upv6x~(jEvBv+dG3DLOq0zMH`Yp-YU}7=
z<0}yB&{xfeyCnTh-GnaWy*tmw#5^i~fR3UQ76K{J@bFUKG$@&;yra;K!d?Zb$5ChL=!KW2Kbjg+K#YN(Us>;b?F9&svhw7OwPtNcO5Cu^
zB%4GM$yp4WRQm}Mm}EkC`0$MsBffwCh&E483q7j#`0*9bo^7Q&zcuF)_Z{uMUicbj
zf6^V-Y-m`6K>Rt)P3EyPWoI7`1=xTcc-C`r3;vWbNk|Au^3qC{Fh$-3rAOuyPOA0q
zzu4M(DQ4Hy6rqJ2kNzO7b(}zQv2J_P0=<)@SR{TEupY?CxzbrJm2~X*@y%#g@#p}}
z3RX(B_vl>sngjg?qw|1X!m{{#DE(!7DEkU!??NJvgTo|U-SfC`@I;`jaz
z73gDD~F!=VF>DMvv}o
zRPuU_WKtY`S;tQ+qKFg-a7VQI9I`q(hl*T3SKo-*X)t2M2YhiL)sP@glZ!O+8E9lc
z=7IQivVWevcrn-?e6%6Gn$%grlmwYMZM;adp^kv(YrlK*1lBZM_!s%NPHWiA
zIfv^Bi;T_VPV^*Vyi8jW$V`K&Q%Y`Kk!i4;?KqvCA~@ihdw*_dXpoCxx1o8qT+dWl
z9%q)WCkxJeJs{Se%(t>~{4eUdjnrbQ{+hXchm(_<)dbwmR+sk@xw^s55s*S&G0Pkg
zjT@WXKO{4Ayhp{FcJ&m;31pXlD1^+pt;RJZ#F83zix#;v^z8dfs0M1or_aaV-EuJD
ziawu2>qDBBe|j)dZ7u&&NR)Vk3G24xwVZJKVt>eI#k`3TWkp3@!=(rvO~>5=y`ida
z4KZs7;uMpMNqG5XLrp#t>^pA=gRoLk=$^x4BlgPnA0H1Ya6lrcT<cOYyd|tBR6~LY+8B}nY}SO@
zY4yOK)tjXxOkzK^UjD&h*6^X{u)h-qt#7yx|9at2=}HQ7$J-37dVvoNf7V$ttl;q7
z{5Q7#*6<(-rjT{@iE@6?cIDr?Pbfv9(2hc(m6VBWs1M<Um5Gah>xRBgF>>5_uRn8)ji^n;!N&*~f(>jj+UVfU2{D@qWciM=|u~1^0&a
z%E?LF07NSD?B2Y6`{Vti<#eVHbP*Kn{{CK=6@tQ;mI=Xt9Bmrhcsn^X&;#sMbnhN-
zGwfsuHLx=g5zjd|0T<{t;Y3pZwEPT~m6auGlieRUaDZM!TACCTE>R~&-@woD5{CFP
ztw|U%ehbwALl)6A-lI3Ezy+rWDOp(%^rk;NuTcKOFB}d8o9yv|^9>|x
zev?89Pnl^Vw4?FBhi3^2x`Oe?tKdofwlqFIsD1#O?H$!QudHpS_kvx{XaoCdjQrhht=PgGbCcV!35@
zxJHaM?bSucwMx&!(TC3Pf8o3Wi@{mW&YgT)z$e%P4TaDHiT&TvB7EQ`|FM_3j${jv
zC>;5Yzd=&f>^Vk6R_*O>be85ca@oA%;zj}_@dJ~fFq$biQ*iO8ot-p5upi|r5H~Kc
zvugte-e4>zGcZ6naTo6sTr$0$9{Cowc)s
z(T*+QhA;Y6Q)zf(w&Q#jJW4;<%MKo0Rs;TKIGNte&mTNt!Vf-iu9X#1_2UoCdW-Uh
zC9N9!mNycJbAE>izS7lIb<*Y@8j5}Tl)1m7J8oTd?i3@VK`r^K8q!7^81(AajoMi@
zyMoEnUzaa0&rLQC!=dJJb#*m29CDF$PQOzJTqVLF_A_m!zd2)gZ!GzLAWj4RXE~O=
zKXNl(q1k*iq$1mf)!4xy4#&+9N`oDx9cQ%Yj|L;W2a62ic
z-U<_7w%}9de%w%DXB@X^(8GBiR=DI<0y)}{J~PXEr;~?USjaAnJPMGaF3Zj7-@`n%
z9pb=P9mk$^UPI;lXyzI@MFEPB7KiVn9kmsO#$Wr#KWrTw$Z5+3r-lz#3LSA>VkC5W
zO6(^-XEtgL=e~Uo#vxKToUu;o>B&%~`6I%E@a=l~bp03E1E`|f+ICL&NS-f6^?}^u
zBfD^ij}0IKS0u8@6C)K&$ymrRT`R08M9xyXk4~Sor#2yfF;Sg^DI$2KJ28s2_=1Tw
zCooc&9<0;e=h&1r25JxXWsZ4in!ZKWKO{8NraTDV$+aP|x6t)jEr{}uWjd%l@aw+B
z_|XjK8D8S+@qvktHq9InJ|{iAzx6uIj^_-JgJR#feA160XQi(sjGHr3F6MY+&;X|s
zBVT4J{w((I-%3vUza{qn*OSrz|2KT(uzfJmA|b&%H_x}@%d*x41Al;FU?E<`BMAA8dn
zvHwNvj0J^-w_*u~{u~8NCyWt?+~J1w?9n4B*z^&2JxE;+^-*{E{j3AbF=lwHJL8Ln
zdr0e_4@f1R;da3Xpd#W0(*l_$&lzWN*uYJ)iZbro+3Sw$7*qwqZ)0x`IfQmw9s++h^mU_ly+Gu#a4D4tKOs4O
zxo;m96Rm>(^Vaxg$6ve{b>hTKZ*^m=8T%xSk@mb(*TfnGj3I@$f?rnmCuL`D8$xK2
zZ}4aIAqX>IIgOH|oP-1=GO}l{WMh@Rd8LFTgi=KLZArT~o5*e3x)nPp
zG>^U#^fc^UuuFX|XW}mu_uTLcVzbTtnbd8p?)z)BF%;6{(o(Lk)P``hmTEEh6ngb)
zqXG)WjMWv%46wti5sJ5)2@PI<`XkhY6Nw3T?>~N=?XBMY#3rq~sXHwP$Y;);KOYAJ
zYHU&SUP?ub{tJ}1l}cXHYVNzK*RSum^*
zF$iHoX{7ya*D+`1_itCb;`u)!C+=R^lP60qE$u?L%++LA!-&!9BXHKCicb9prUI*w
zM6v4hM=_myVH{8Z2tM=Bu0UtXZn0ubk?)w^a#DzNLCfH8QaOIUplsusJ8_T(JT7&H
z&>(kKyX3H4mP6qTj+Ze*cS)rEOG5(y601DC58i7&)-BZQOpJ~e8T^YNNafI#^VC9*?D^Q-LOU?t
z7+{9Cw3IKsDFTZn50>b(Ble@N&1l@4z%VqXdSk}5;phgtzQp&6gjWb|b<$vJ9lcU~4)rS5
zQ?n+2fb`}L<96`PIl4Bc-FS283CLPe5d+GPG`V|OH
z#J!bWv@Gcm5~k4WlpKVH2lUxRt}3u|Sy{%b%pVDiiB;m^0S1VPjB^AI0j$i*Shh^#
zfwpza5T6jI=QcBE3UoOl+Hc>!aqjblyRRGDO`=#YIgti!@)cgNUu9)!bo>BitbL3%
z3iX;vQGWusU=M(FfM+U?IayhwlzSxCHD*Q6cOH+W0f7$r?d^gB4qD=@8Nf7bwN|chp@WYv7YPJ
zjl4P?jWHZ5oBG(DcriadQPs-aTxNeh7Lo5O&V_S9fdziAeyAz9Duo?6eLA>&HhD2o
z|I?pWjGjXTRjmE_8qRPpn^^G2%;Tz9RE&v*QsPt5e?b!8cS+sQMOr4-YXj9*TRwQ;
z!&6j|ZEcr#Dk(o~>uBHMIXaQ4G3cpvdd;;A#1bPZu6ju0?uTv3I)W<}DS`9kl
z?R8MBY$H_maRy{a6n^A$7A=E_V9gONR
ztz2thV1PEC_HHUnLrTWM8uBC{tSCYe$oP9+h|rsR_v{%Y0PqMEnZ3AjB-7v-Ytbw0
zi$dtX;Y?wOv36gvg5H!3yOg|CWIB1SrHlA@k2tobZolqI?$q#L7n9^atv^suo0XM?
z3lze90SJOC^Oo-XBo=egyto2haM3J!BJ08vd(3E^ysym+C7PW$56EEoZp0ir3t78v
zd{6}EW_sn?J;n+QNIADq-!ab2<>pScnsChM>9?P4cp5?J
zazUGa9U{FZN8#8M^swa%3oR+aFg<3~9LKS+nv_
z+##Ic%fO2m*s<#&|4EP=2SNN}caLFz%0z4=;>V|7)7K$4Oz*^h4;;#3;l+*Eid
zHOZGmkN2J}KXA|>Bg|?RoBgU_pcZOUXQUB7ejgILcV@7lOceP3V96|#-24aWqGNKd
zO6t9E;&M%(_@?>$bY_}i2~{VHzm-ShJ&>>-5{Pqg_0x{+&@3e2D-5;ca&wo6BdKDh
zrQIto?l$y*q@`?H_mC@W(^GEG)?Q{;omi#feb+v7H5~16L4bRBZtUR6J6lU1JP3aN
zV>Q!X-O^FaKL4J=*-zu&i>FUp*7x#rsM}~hoIZ)GQhOf_Gp?j+Y1c;DX1)aor)-4#S4xCYmgW$Q$o^RObScKOimTJetrx0|BgF#A}yBLMm((
zJ$_)4GFlEBN=ipgBw(a+ZA@1`yg{Kahkrin&BCSyr7*7P?I2tYAvqmzS&5p&8;#1*w<|7nWd=
zO`xG@dAY2&FWwfzu3sDdB%?i{FUO;BGR&AUKY%`CDw*zeTJxbA6KOFnCByCF9gB=gW2}=dU9x`j
znl8z5NEw&0bG%*7FIYo*K|zeTC?x@V2_9Zn-2=-c3aU9PJ;g#@uQcI`~mKGYaE
zXh4v8&dbwj-+=UZo(M*wLW!SjNM0hn049H5(q5kgMQZxz=i?lBg=jcrIw>@xmn=4OM}~3p+ek
zueO{jsPgvaz4j94WM@Bl`jlKP(&`;W+x!{|7$!@WoGxq&yw;XsO04QzS3;wKJHgyRM?6W{abHt{QGYcgJZXJbmn%z
zdO@VSl)I(>+tSkcqG-ZNM$@$buK|6yPXHV7Dbg8`jj)@^p~Xk{eB0}_#{<#LWS@>}
zsi`(4=fkbF$BiBx`pnBv6}Jpa!y%eF+cwcWef$l_0#$9;j>C8V>K4#37Fe@rlrsK$
zPtM*@4Ox@q(sjzVrrLm)9t&mC$`BGV#w}_1iUc8BFjW0aZg1-&YOlT8{d*j1V9OSR
zT~7^8%x+z^xJ+71+E)AT_OUO!n;8E4m;ve{~E5QMFFNxlTQ;P$Atu}sS?Rc-B=
z%C$r7MdP!!?D;(WTxHHnk1H=C6NcvQ{Mo063`xt@nLVrzdYS08l8K|JYWeX4HXaYm
zao0Sx6$KXzGDgr$*!0jYNhaCqTRwjNy#4yCrK8?^UPVJ0X|=Mro1RWVUDJP93nDuE
z;CF)*lUJTy~@lZxXDQF@zuS@A}_?C^<)XB5CxH`HMZulHb=A;A78OpPgi%`
z$r6r>x~5B;yMOHr(JNd*>Utz@hE_8db+ohMTXXZ@=CKv4GwoA`9&PCHTdsa+nY1W+
o%BvYNBKN~yUH&ip@kM{wvpr7XsoDQPr-^1xw=%n6y43G~0VP}N
Date: Mon, 19 May 2025 17:23:53 -0400
Subject: [PATCH 13/44] docs: add new dynamic parameters information to
parameters doc (#17653)
Co-authored-by: EdwardAngert <17991901+EdwardAngert@users.noreply.github.com>
Co-authored-by: Steven Masley
Co-authored-by: Stephen Kirby
Co-authored-by: Stephen Kirby <58410745+stirby@users.noreply.github.com>
---
.../extending-templates/parameters.md | 547 +++++++++++++++++-
1 file changed, 546 insertions(+), 1 deletion(-)
diff --git a/docs/admin/templates/extending-templates/parameters.md b/docs/admin/templates/extending-templates/parameters.md
index b5e6473ab6b4f..7f216bd3e64f9 100644
--- a/docs/admin/templates/extending-templates/parameters.md
+++ b/docs/admin/templates/extending-templates/parameters.md
@@ -252,7 +252,7 @@ data "coder_parameter" "force_rebuild" {
## Validating parameters
-Coder supports rich parameters with multiple validation modes: min, max,
+Coder supports parameters with multiple validation modes: min, max,
monotonic numbers, and regular expressions.
### Number
@@ -391,3 +391,548 @@ parameters in one of two ways:
```
Or set the [environment variable](../../setup/index.md), `CODER_EXPERIMENTS=auto-fill-parameters`
+
+## Dynamic Parameters
+
+Dynamic Parameters enhances Coder's existing parameter system with real-time validation,
+conditional parameter behavior, and richer input types.
+This feature allows template authors to create more interactive and responsive workspace creation experiences.
+
+### Enable Dynamic Parameters (Early Access)
+
+To use Dynamic Parameters, enable the experiment flag or set the environment variable.
+
+Note that as of v2.22.0, Dynamic parameters are an unsafe experiment and will not be enabled with the experiment wildcard.
+
+
+
+#### Flag
+
+```shell
+coder server --experiments=dynamic-parameters
+```
+
+#### Env Variable
+
+```shell
+CODER_EXPERIMENTS=dynamic-parameters
+```
+
+
+
+Dynamic Parameters also require version >=2.4.0 of the Coder provider.
+
+Enable the experiment, then include the following at the top of your template:
+
+```terraform
+terraform {
+ required_providers {
+ coder = {
+ source = "coder/coder"
+ version = ">=2.4.0"
+ }
+ }
+}
+```
+
+Once enabled, users can toggle between the experimental and classic interfaces during
+workspace creation using an escape hatch in the workspace creation form.
+
+## Features and Capabilities
+
+Dynamic Parameters introduces three primary enhancements to the standard parameter system:
+
+- **Conditional Parameters**
+
+ - Parameters can respond to changes in other parameters
+ - Show or hide parameters based on other selections
+ - Modify validation rules conditionally
+ - Create branching paths in workspace creation forms
+
+- **Reference User Properties**
+
+ - Read user data at build time from [`coder_workspace_owner`](https://registry.terraform.io/providers/coder/coder/latest/docs/data-sources/workspace_owner)
+ - Conditionally hide parameters based on user's role
+ - Change parameter options based on user groups
+ - Reference user name in parameters
+
+- **Additional Form Inputs**
+
+ - Searchable dropdown lists for easier selection
+ - Multi-select options for choosing multiple items
+ - Secret text inputs for sensitive information
+ - Key-value pair inputs for complex data
+ - Button parameters for toggling sections
+
+## Available Form Input Types
+
+Dynamic Parameters supports a variety of form types to create rich, interactive user experiences.
+
+You can specify the form type using the `form_type` property.
+Different parameter types support different form types.
+
+The "Options" column in the table below indicates whether the form type requires options to be defined (Yes) or doesn't support/require them (No). When required, options are specified using one or more `option` blocks in your parameter definition, where each option has a `name` (displayed to the user) and a `value` (used in your template logic).
+
+| Form Type | Parameter Types | Options | Notes |
+|----------------|--------------------------------------------|---------|------------------------------------------------------------------------------------------------------------------------------|
+| `checkbox` | `bool` | No | A single checkbox for boolean parameters. Default for boolean parameters. |
+| `dropdown` | `string`, `number` | Yes | Searchable dropdown list for choosing a single option from a list. Default for `string` or `number` parameters with options. |
+| `input` | `string`, `number` | No | Standard single-line text input field. Default for string/number parameters without options. |
+| `key-value` | `string` | No | For entering key-value pairs (as JSON). |
+| `multi-select` | `list(string)` | Yes | Select multiple items from a list with checkboxes. |
+| `password` | `string` | No | Masked input field for sensitive information. |
+| `radio` | `string`, `number`, `bool`, `list(string)` | Yes | Radio buttons for selecting a single option with all choices visible at once. |
+| `slider` | `number` | No | Slider selection with min/max validation for numeric values. |
+| `switch` | `bool` | No | Toggle switch alternative for boolean parameters. |
+| `tag-select` | `list(string)` | No | Default for list(string) parameters without options. |
+| `textarea` | `string` | No | Multi-line text input field for longer content. | |
+
+### Form Type Examples
+
+`checkbox`: A single checkbox for boolean values
+
+```tf
+data "coder_parameter" "enable_gpu" {
+ name = "enable_gpu"
+ display_name = "Enable GPU"
+ type = "bool"
+ form_type = "checkbox" # This is the default for boolean parameters
+ default = false
+}
+```
+
+
+
+`dropdown`: A searchable select menu for choosing a single option from a list
+
+```tf
+data "coder_parameter" "region" {
+ name = "region"
+ display_name = "Region"
+ description = "Select a region"
+ type = "string"
+ form_type = "dropdown" # This is the default for string parameters with options
+
+ option {
+ name = "US East"
+ value = "us-east-1"
+ }
+ option {
+ name = "US West"
+ value = "us-west-2"
+ }
+}
+```
+
+
+
+`input`: A standard text input field
+
+```tf
+data "coder_parameter" "custom_domain" {
+ name = "custom_domain"
+ display_name = "Custom Domain"
+ type = "string"
+ form_type = "input" # This is the default for string parameters without options
+ default = ""
+}
+```
+
+
+
+`key-value`: Input for entering key-value pairs
+
+```tf
+data "coder_parameter" "environment_vars" {
+ name = "environment_vars"
+ display_name = "Environment Variables"
+ type = "string"
+ form_type = "key-value"
+ default = jsonencode({"NODE_ENV": "development"})
+}
+```
+
+
+
+`multi-select`: Checkboxes for selecting multiple options from a list
+
+```tf
+data "coder_parameter" "tools" {
+ name = "tools"
+ display_name = "Developer Tools"
+ type = "list(string)"
+ form_type = "multi-select"
+ default = jsonencode(["git", "docker"])
+
+ option {
+ name = "Git"
+ value = "git"
+ }
+ option {
+ name = "Docker"
+ value = "docker"
+ }
+ option {
+ name = "Kubernetes CLI"
+ value = "kubectl"
+ }
+}
+```
+
+
+
+`password`: A text input that masks sensitive information
+
+```tf
+data "coder_parameter" "api_key" {
+ name = "api_key"
+ display_name = "API Key"
+ type = "string"
+ form_type = "password"
+ secret = true
+}
+```
+
+
+
+`radio`: Radio buttons for selecting a single option with high visibility
+
+```tf
+data "coder_parameter" "environment" {
+ name = "environment"
+ display_name = "Environment"
+ type = "string"
+ form_type = "radio"
+ default = "dev"
+
+ option {
+ name = "Development"
+ value = "dev"
+ }
+ option {
+ name = "Staging"
+ value = "staging"
+ }
+}
+```
+
+
+
+`slider`: A slider for selecting numeric values within a range
+
+```tf
+data "coder_parameter" "cpu_cores" {
+ name = "cpu_cores"
+ display_name = "CPU Cores"
+ type = "number"
+ form_type = "slider"
+ default = 2
+ validation {
+ min = 1
+ max = 8
+ }
+}
+```
+
+
+
+`switch`: A toggle switch for boolean values
+
+```tf
+data "coder_parameter" "advanced_mode" {
+ name = "advanced_mode"
+ display_name = "Advanced Mode"
+ type = "bool"
+ form_type = "switch"
+ default = false
+}
+```
+
+
+
+`textarea`: A multi-line text input field for longer content
+
+```tf
+data "coder_parameter" "init_script" {
+ name = "init_script"
+ display_name = "Initialization Script"
+ type = "string"
+ form_type = "textarea"
+ default = "#!/bin/bash\necho 'Hello World'"
+}
+```
+
+
+
+## Dynamic Parameter Use Case Examples
+
+Conditional Parameters: Region and Instance Types
+
+This example shows instance types based on the selected region:
+
+```tf
+data "coder_parameter" "region" {
+ name = "region"
+ display_name = "Region"
+ description = "Select a region for your workspace"
+ type = "string"
+ default = "us-east-1"
+
+ option {
+ name = "US East (N. Virginia)"
+ value = "us-east-1"
+ }
+
+ option {
+ name = "US West (Oregon)"
+ value = "us-west-2"
+ }
+}
+
+data "coder_parameter" "instance_type" {
+ name = "instance_type"
+ display_name = "Instance Type"
+ description = "Select an instance type available in the selected region"
+ type = "string"
+
+ # This option will only appear when us-east-1 is selected
+ dynamic "option" {
+ for_each = data.coder_parameter.region.value == "us-east-1" ? [1] : []
+ content {
+ name = "t3.large (US East)"
+ value = "t3.large"
+ }
+ }
+
+ # This option will only appear when us-west-2 is selected
+ dynamic "option" {
+ for_each = data.coder_parameter.region.value == "us-west-2" ? [1] : []
+ content {
+ name = "t3.medium (US West)"
+ value = "t3.medium"
+ }
+ }
+}
+```
+
+
+
+Advanced Options Toggle
+
+This example shows how to create an advanced options section:
+
+```tf
+data "coder_parameter" "show_advanced" {
+ name = "show_advanced"
+ display_name = "Show Advanced Options"
+ description = "Enable to show advanced configuration options"
+ type = "bool"
+ default = false
+ order = 0
+}
+
+data "coder_parameter" "advanced_setting" {
+ # This parameter is only visible when show_advanced is true
+ count = data.coder_parameter.show_advanced.value ? 1 : 0
+ name = "advanced_setting"
+ display_name = "Advanced Setting"
+ description = "An advanced configuration option"
+ type = "string"
+ default = "default_value"
+ mutable = true
+ order = 1
+}
+
+
+
+Multi-select IDE Options
+
+This example allows selecting multiple IDEs to install:
+
+```tf
+data "coder_parameter" "ides" {
+ name = "ides"
+ display_name = "IDEs to Install"
+ description = "Select which IDEs to install in your workspace"
+ type = "list(string)"
+ default = jsonencode(["vscode"])
+ mutable = true
+ form_type = "multi-select"
+
+ option {
+ name = "VS Code"
+ value = "vscode"
+ icon = "/icon/vscode.png"
+ }
+
+ option {
+ name = "JetBrains IntelliJ"
+ value = "intellij"
+ icon = "/icon/intellij.png"
+ }
+
+ option {
+ name = "JupyterLab"
+ value = "jupyter"
+ icon = "/icon/jupyter.png"
+ }
+}
+```
+
+
+
+Team-specific Resources
+
+This example filters resources based on user group membership:
+
+```tf
+data "coder_parameter" "instance_type" {
+ name = "instance_type"
+ display_name = "Instance Type"
+ description = "Select an instance type for your workspace"
+ type = "string"
+
+ # Show GPU options only if user belongs to the "data-science" group
+ dynamic "option" {
+ for_each = contains(data.coder_workspace_owner.me.groups, "data-science") ? [1] : []
+ content {
+ name = "p3.2xlarge (GPU)"
+ value = "p3.2xlarge"
+ }
+ }
+
+ # Standard options for all users
+ option {
+ name = "t3.medium (Standard)"
+ value = "t3.medium"
+ }
+}
+```
+
+### Advanced Usage Patterns
+
+Creating Branching Paths
+
+For templates serving multiple teams or use cases, you can create comprehensive branching paths:
+
+```tf
+data "coder_parameter" "environment_type" {
+ name = "environment_type"
+ display_name = "Environment Type"
+ description = "Select your preferred development environment"
+ type = "string"
+ default = "container"
+
+ option {
+ name = "Container"
+ value = "container"
+ }
+
+ option {
+ name = "Virtual Machine"
+ value = "vm"
+ }
+}
+
+# Container-specific parameters
+data "coder_parameter" "container_image" {
+ name = "container_image"
+ display_name = "Container Image"
+ description = "Select a container image for your environment"
+ type = "string"
+ default = "ubuntu:latest"
+
+ # Only show when container environment is selected
+ condition {
+ field = data.coder_parameter.environment_type.name
+ value = "container"
+ }
+
+ option {
+ name = "Ubuntu"
+ value = "ubuntu:latest"
+ }
+
+ option {
+ name = "Python"
+ value = "python:3.9"
+ }
+}
+
+# VM-specific parameters
+data "coder_parameter" "vm_image" {
+ name = "vm_image"
+ display_name = "VM Image"
+ description = "Select a VM image for your environment"
+ type = "string"
+ default = "ubuntu-20.04"
+
+ # Only show when VM environment is selected
+ condition {
+ field = data.coder_parameter.environment_type.name
+ value = "vm"
+ }
+
+ option {
+ name = "Ubuntu 20.04"
+ value = "ubuntu-20.04"
+ }
+
+ option {
+ name = "Debian 11"
+ value = "debian-11"
+ }
+}
+```
+
+
+
+Conditional Validation
+
+Adjust validation rules dynamically based on parameter values:
+
+```tf
+data "coder_parameter" "team" {
+ name = "team"
+ display_name = "Team"
+ type = "string"
+ default = "engineering"
+
+ option {
+ name = "Engineering"
+ value = "engineering"
+ }
+
+ option {
+ name = "Data Science"
+ value = "data-science"
+ }
+}
+
+data "coder_parameter" "cpu_count" {
+ name = "cpu_count"
+ display_name = "CPU Count"
+ type = "number"
+ default = 2
+
+ # Engineering team has lower limits
+ dynamic "validation" {
+ for_each = data.coder_parameter.team.value == "engineering" ? [1] : []
+ content {
+ min = 1
+ max = 4
+ }
+ }
+
+ # Data Science team has higher limits
+ dynamic "validation" {
+ for_each = data.coder_parameter.team.value == "data-science" ? [1] : []
+ content {
+ min = 2
+ max = 8
+ }
+ }
+}
+```
+
+
From cc53c4d1d5dc2a2c4842e2f4d50b80be06e347f9 Mon Sep 17 00:00:00 2001
From: Bruno Quaresma
Date: Mon, 19 May 2025 18:38:38 -0300
Subject: [PATCH 14/44] fix: fix devcontainer port button (#17924)
---
site/src/modules/resources/AgentDevcontainerCard.tsx | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/site/src/modules/resources/AgentDevcontainerCard.tsx b/site/src/modules/resources/AgentDevcontainerCard.tsx
index d9a591625b2f8..543004de5c1e2 100644
--- a/site/src/modules/resources/AgentDevcontainerCard.tsx
+++ b/site/src/modules/resources/AgentDevcontainerCard.tsx
@@ -88,7 +88,7 @@ export const AgentDevcontainerCard: FC = ({
return (
-
+
From 9c000468a1b64d35e3b89c0f7ba5710f3d122ff6 Mon Sep 17 00:00:00 2001
From: Steven Masley
Date: Mon, 19 May 2025 16:59:15 -0500
Subject: [PATCH 15/44] chore: expose use_classic_parameter_flow on workspace
response (#17925)
---
cli/testdata/coder_list_--output_json.golden | 1 +
coderd/apidoc/docs.go | 3 +++
coderd/apidoc/swagger.json | 3 +++
coderd/workspaces.go | 1 +
codersdk/workspaces.go | 1 +
docs/reference/api/schemas.md | 3 +++
docs/reference/api/workspaces.md | 6 ++++++
site/src/api/typesGenerated.ts | 1 +
site/src/testHelpers/entities.ts | 1 +
9 files changed, 20 insertions(+)
diff --git a/cli/testdata/coder_list_--output_json.golden b/cli/testdata/coder_list_--output_json.golden
index 5f293787de719..9cdaa98c3f813 100644
--- a/cli/testdata/coder_list_--output_json.golden
+++ b/cli/testdata/coder_list_--output_json.golden
@@ -15,6 +15,7 @@
"template_allow_user_cancel_workspace_jobs": false,
"template_active_version_id": "============[version ID]============",
"template_require_active_version": false,
+ "template_use_classic_parameter_flow": false,
"latest_build": {
"id": "========[workspace build ID]========",
"created_at": "====[timestamp]=====",
diff --git a/coderd/apidoc/docs.go b/coderd/apidoc/docs.go
index 075f33aeac02f..f59fcd308c655 100644
--- a/coderd/apidoc/docs.go
+++ b/coderd/apidoc/docs.go
@@ -17006,6 +17006,9 @@ const docTemplate = `{
"template_require_active_version": {
"type": "boolean"
},
+ "template_use_classic_parameter_flow": {
+ "type": "boolean"
+ },
"ttl_ms": {
"type": "integer"
},
diff --git a/coderd/apidoc/swagger.json b/coderd/apidoc/swagger.json
index e00ab22232483..25f3c2166755d 100644
--- a/coderd/apidoc/swagger.json
+++ b/coderd/apidoc/swagger.json
@@ -15513,6 +15513,9 @@
"template_require_active_version": {
"type": "boolean"
},
+ "template_use_classic_parameter_flow": {
+ "type": "boolean"
+ },
"ttl_ms": {
"type": "integer"
},
diff --git a/coderd/workspaces.go b/coderd/workspaces.go
index 203c9f8599298..35960d1f95a12 100644
--- a/coderd/workspaces.go
+++ b/coderd/workspaces.go
@@ -2259,6 +2259,7 @@ func convertWorkspace(
TemplateAllowUserCancelWorkspaceJobs: template.AllowUserCancelWorkspaceJobs,
TemplateActiveVersionID: template.ActiveVersionID,
TemplateRequireActiveVersion: template.RequireActiveVersion,
+ TemplateUseClassicParameterFlow: template.UseClassicParameterFlow,
Outdated: workspaceBuild.TemplateVersionID.String() != template.ActiveVersionID.String(),
Name: workspace.Name,
AutostartSchedule: autostartSchedule,
diff --git a/codersdk/workspaces.go b/codersdk/workspaces.go
index 311c4bcba35d4..b39b220ca33b8 100644
--- a/codersdk/workspaces.go
+++ b/codersdk/workspaces.go
@@ -41,6 +41,7 @@ type Workspace struct {
TemplateAllowUserCancelWorkspaceJobs bool `json:"template_allow_user_cancel_workspace_jobs"`
TemplateActiveVersionID uuid.UUID `json:"template_active_version_id" format:"uuid"`
TemplateRequireActiveVersion bool `json:"template_require_active_version"`
+ TemplateUseClassicParameterFlow bool `json:"template_use_classic_parameter_flow"`
LatestBuild WorkspaceBuild `json:"latest_build"`
LatestAppStatus *WorkspaceAppStatus `json:"latest_app_status"`
Outdated bool `json:"outdated"`
diff --git a/docs/reference/api/schemas.md b/docs/reference/api/schemas.md
index 91f70950e989e..b35c35361cb1f 100644
--- a/docs/reference/api/schemas.md
+++ b/docs/reference/api/schemas.md
@@ -8416,6 +8416,7 @@ If the schedule is empty, the user will be updated to use the default schedule.|
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
"template_name": "string",
"template_require_active_version": true,
+ "template_use_classic_parameter_flow": true,
"ttl_ms": 0,
"updated_at": "2019-08-24T14:15:22Z"
}
@@ -8452,6 +8453,7 @@ If the schedule is empty, the user will be updated to use the default schedule.|
| `template_id` | string | false | | |
| `template_name` | string | false | | |
| `template_require_active_version` | boolean | false | | |
+| `template_use_classic_parameter_flow` | boolean | false | | |
| `ttl_ms` | integer | false | | |
| `updated_at` | string | false | | |
@@ -10088,6 +10090,7 @@ If the schedule is empty, the user will be updated to use the default schedule.|
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
"template_name": "string",
"template_require_active_version": true,
+ "template_use_classic_parameter_flow": true,
"ttl_ms": 0,
"updated_at": "2019-08-24T14:15:22Z"
}
diff --git a/docs/reference/api/workspaces.md b/docs/reference/api/workspaces.md
index 49377ec14c6fd..241d80ac05f7d 100644
--- a/docs/reference/api/workspaces.md
+++ b/docs/reference/api/workspaces.md
@@ -296,6 +296,7 @@ of the template will be used.
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
"template_name": "string",
"template_require_active_version": true,
+ "template_use_classic_parameter_flow": true,
"ttl_ms": 0,
"updated_at": "2019-08-24T14:15:22Z"
}
@@ -578,6 +579,7 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/workspace/{workspacenam
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
"template_name": "string",
"template_require_active_version": true,
+ "template_use_classic_parameter_flow": true,
"ttl_ms": 0,
"updated_at": "2019-08-24T14:15:22Z"
}
@@ -886,6 +888,7 @@ of the template will be used.
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
"template_name": "string",
"template_require_active_version": true,
+ "template_use_classic_parameter_flow": true,
"ttl_ms": 0,
"updated_at": "2019-08-24T14:15:22Z"
}
@@ -1154,6 +1157,7 @@ curl -X GET http://coder-server:8080/api/v2/workspaces \
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
"template_name": "string",
"template_require_active_version": true,
+ "template_use_classic_parameter_flow": true,
"ttl_ms": 0,
"updated_at": "2019-08-24T14:15:22Z"
}
@@ -1437,6 +1441,7 @@ curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace} \
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
"template_name": "string",
"template_require_active_version": true,
+ "template_use_classic_parameter_flow": true,
"ttl_ms": 0,
"updated_at": "2019-08-24T14:15:22Z"
}
@@ -1835,6 +1840,7 @@ curl -X PUT http://coder-server:8080/api/v2/workspaces/{workspace}/dormant \
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
"template_name": "string",
"template_require_active_version": true,
+ "template_use_classic_parameter_flow": true,
"ttl_ms": 0,
"updated_at": "2019-08-24T14:15:22Z"
}
diff --git a/site/src/api/typesGenerated.ts b/site/src/api/typesGenerated.ts
index 68cf0940ad8e1..9a73fc9f3d6bf 100644
--- a/site/src/api/typesGenerated.ts
+++ b/site/src/api/typesGenerated.ts
@@ -3246,6 +3246,7 @@ export interface Workspace {
readonly template_allow_user_cancel_workspace_jobs: boolean;
readonly template_active_version_id: string;
readonly template_require_active_version: boolean;
+ readonly template_use_classic_parameter_flow: boolean;
readonly latest_build: WorkspaceBuild;
readonly latest_app_status: WorkspaceAppStatus | null;
readonly outdated: boolean;
diff --git a/site/src/testHelpers/entities.ts b/site/src/testHelpers/entities.ts
index e09b196a82446..1e8d6f3aa7b0b 100644
--- a/site/src/testHelpers/entities.ts
+++ b/site/src/testHelpers/entities.ts
@@ -1410,6 +1410,7 @@ export const MockWorkspace: TypesGen.Workspace = {
MockTemplate.allow_user_cancel_workspace_jobs,
template_active_version_id: MockTemplate.active_version_id,
template_require_active_version: MockTemplate.require_active_version,
+ template_use_classic_parameter_flow: false,
outdated: false,
owner_id: MockUserOwner.id,
organization_id: MockOrganization.id,
From dc21016151389efc502b951e1f8a27405bf993c9 Mon Sep 17 00:00:00 2001
From: Jaayden Halko
Date: Mon, 19 May 2025 23:20:40 +0100
Subject: [PATCH 16/44] fix: get presets working correctly with dynamic params
(#17923)
This adds a few fixes to get presets working correctly with dynamic
params
1. Changes to preset params need to be rendered and displayed correctly
2. Changes to preset params need to be sent to the websocket
3. Changes to preset params need to be marked as touched so they won't
be automatically changed later because of dynamic defaults. Dynamic
defaults means any default parameter value can be changed by the
websocket response unless edited by the user, set by autofill or set by
a preset.
---
.../DynamicParameter/DynamicParameter.tsx | 11 ++-
.../CreateWorkspacePageExperimental.tsx | 2 +-
.../CreateWorkspacePageViewExperimental.tsx | 73 ++++++++++++++++---
3 files changed, 72 insertions(+), 14 deletions(-)
diff --git a/site/src/modules/workspaces/DynamicParameter/DynamicParameter.tsx b/site/src/modules/workspaces/DynamicParameter/DynamicParameter.tsx
index cbc7852bd14e5..94fa3bc383074 100644
--- a/site/src/modules/workspaces/DynamicParameter/DynamicParameter.tsx
+++ b/site/src/modules/workspaces/DynamicParameter/DynamicParameter.tsx
@@ -222,6 +222,15 @@ const DebouncedParameterField: FC = ({
const onChangeEvent = useEffectEvent(onChange);
// prevDebouncedValueRef is to prevent calling the onChangeEvent on the initial render
const prevDebouncedValueRef = useRef();
+ const prevValueRef = useRef(value);
+
+ // This is necessary in the case of fields being set by preset parameters
+ useEffect(() => {
+ if (value !== undefined && value !== prevValueRef.current) {
+ setLocalValue(value);
+ prevValueRef.current = value;
+ }
+ }, [value]);
useEffect(() => {
if (prevDebouncedValueRef.current !== undefined) {
@@ -458,7 +467,7 @@ const ParameterField: FC = ({
{
onChange(value.toString());
}}
diff --git a/site/src/pages/CreateWorkspacePage/CreateWorkspacePageExperimental.tsx b/site/src/pages/CreateWorkspacePage/CreateWorkspacePageExperimental.tsx
index 8268ded111b59..fbb35c61ee047 100644
--- a/site/src/pages/CreateWorkspacePage/CreateWorkspacePageExperimental.tsx
+++ b/site/src/pages/CreateWorkspacePage/CreateWorkspacePageExperimental.tsx
@@ -101,7 +101,7 @@ const CreateWorkspacePageExperimental: FC = () => {
}
}, []);
- // On sends all initial parameter values to the websocket
+ // On page load, sends all initial parameter values to the websocket
// (including defaults and autofilled from the url)
// This ensures the backend has the complete initial state of the form,
// which is vital for correctly rendering dynamic UI elements where parameter visibility
diff --git a/site/src/pages/CreateWorkspacePage/CreateWorkspacePageViewExperimental.tsx b/site/src/pages/CreateWorkspacePage/CreateWorkspacePageViewExperimental.tsx
index 434cd23fb9a92..630faf8e806d2 100644
--- a/site/src/pages/CreateWorkspacePage/CreateWorkspacePageViewExperimental.tsx
+++ b/site/src/pages/CreateWorkspacePage/CreateWorkspacePageViewExperimental.tsx
@@ -213,6 +213,15 @@ export const CreateWorkspacePageViewExperimental: FC<
setPresetParameterNames(selectedPreset.Parameters.map((p) => p.Name));
+ const currentValues = form.values.rich_parameter_values ?? [];
+
+ const updates: Array<{
+ field: string;
+ fieldValue: TypesGen.WorkspaceBuildParameter;
+ parameter: PreviewParameter;
+ presetValue: string;
+ }> = [];
+
for (const presetParameter of selectedPreset.Parameters) {
const parameterIndex = parameters.findIndex(
(p) => p.name === presetParameter.Name,
@@ -220,32 +229,64 @@ export const CreateWorkspacePageViewExperimental: FC<
if (parameterIndex === -1) continue;
const parameterField = `rich_parameter_values.${parameterIndex}`;
+ const parameter = parameters[parameterIndex];
+ const currentValue = currentValues.find(
+ (p) => p.name === presetParameter.Name,
+ )?.value;
+
+ if (currentValue !== presetParameter.Value) {
+ updates.push({
+ field: parameterField,
+ fieldValue: {
+ name: presetParameter.Name,
+ value: presetParameter.Value,
+ },
+ parameter,
+ presetValue: presetParameter.Value,
+ });
+ }
+ }
- form.setFieldValue(parameterField, {
- name: presetParameter.Name,
- value: presetParameter.Value,
- });
+ if (updates.length > 0) {
+ for (const update of updates) {
+ form.setFieldValue(update.field, update.fieldValue);
+ form.setFieldTouched(update.parameter.name, true);
+ }
+
+ sendDynamicParamsRequest(
+ updates.map((update) => ({
+ parameter: update.parameter,
+ value: update.presetValue,
+ })),
+ );
}
}, [
presetOptions,
selectedPresetIndex,
presets,
form.setFieldValue,
+ form.setFieldTouched,
parameters,
+ form.values.rich_parameter_values,
]);
// send the last user modified parameter and all touched parameters to the websocket
const sendDynamicParamsRequest = (
- parameter: PreviewParameter,
- value: string,
+ parameters: Array<{ parameter: PreviewParameter; value: string }>,
) => {
const formInputs: Record = {};
- formInputs[parameter.name] = value;
- const parameters = form.values.rich_parameter_values ?? [];
+ const formParameters = form.values.rich_parameter_values ?? [];
+
+ for (const { parameter, value } of parameters) {
+ formInputs[parameter.name] = value;
+ }
for (const [fieldName, isTouched] of Object.entries(form.touched)) {
- if (isTouched && fieldName !== parameter.name) {
- const param = parameters.find((p) => p.name === fieldName);
+ if (
+ isTouched &&
+ !parameters.some((p) => p.parameter.name === fieldName)
+ ) {
+ const param = formParameters.find((p) => p.name === fieldName);
if (param?.value) {
formInputs[fieldName] = param.value;
}
@@ -260,12 +301,20 @@ export const CreateWorkspacePageViewExperimental: FC<
parameterField: string,
value: string,
) => {
+ const currentFormValue = form.values.rich_parameter_values?.find(
+ (p) => p.name === parameter.name,
+ )?.value;
+
await form.setFieldValue(parameterField, {
name: parameter.name,
value,
});
- form.setFieldTouched(parameter.name, true);
- sendDynamicParamsRequest(parameter, value);
+
+ // Only send the request if the value has changed from the form value
+ if (currentFormValue !== value) {
+ form.setFieldTouched(parameter.name, true);
+ sendDynamicParamsRequest([{ parameter, value }]);
+ }
};
useSyncFormParameters({
From e5758a12c778e461a71dbab30ee7a07809e15c7c Mon Sep 17 00:00:00 2001
From: Ethan <39577870+ethanndickson@users.noreply.github.com>
Date: Tue, 20 May 2025 14:25:13 +1000
Subject: [PATCH 17/44] fix(site): center `/cli-auth` on firefox (#17929)
`-webkit-fill-available` is not available in Firefox: https://caniuse.com/mdn-css_properties_height_stretch
`-moz-available` doesn't work on `height`, so we have to use `100vh`.
Before:
After:
The existing CSS is retained in browsers that support `-webkit-fill-available`, i.e. chrome:
---
site/src/components/SignInLayout/SignInLayout.tsx | 3 ++-
site/src/pages/CliInstallPage/CliInstallPageView.tsx | 3 ++-
2 files changed, 4 insertions(+), 2 deletions(-)
diff --git a/site/src/components/SignInLayout/SignInLayout.tsx b/site/src/components/SignInLayout/SignInLayout.tsx
index 6a0d4f5865ea1..c557fd3b4c797 100644
--- a/site/src/components/SignInLayout/SignInLayout.tsx
+++ b/site/src/components/SignInLayout/SignInLayout.tsx
@@ -17,7 +17,8 @@ export const SignInLayout: FC = ({ children }) => {
const styles = {
container: {
flex: 1,
- height: "-webkit-fill-available",
+ // Fallback to 100vh
+ height: ["100vh", "-webkit-fill-available"],
display: "flex",
justifyContent: "center",
alignItems: "center",
diff --git a/site/src/pages/CliInstallPage/CliInstallPageView.tsx b/site/src/pages/CliInstallPage/CliInstallPageView.tsx
index 9356cee6153b3..db77abcb28f04 100644
--- a/site/src/pages/CliInstallPage/CliInstallPageView.tsx
+++ b/site/src/pages/CliInstallPage/CliInstallPageView.tsx
@@ -39,7 +39,8 @@ export const CliInstallPageView: FC = ({ origin }) => {
const styles = {
container: {
flex: 1,
- height: "-webkit-fill-available",
+ // Fallback to 100vh
+ height: ["100vh", "-webkit-fill-available"],
display: "flex",
flexDirection: "column",
justifyContent: "center",
From 613117bde29cba74127ebe2e32ceeb46ade06bb5 Mon Sep 17 00:00:00 2001
From: Sas Swart
Date: Tue, 20 May 2025 14:45:26 +0200
Subject: [PATCH 18/44] chore: add presets with prebuilds to our dogfood
template (#17933)
This PR adds a preset with prebuilds for each region to our dogfood
template. Creating a workspace based on a preset should now save time
compared to creating a workspace from scratch
---
dogfood/coder/main.tf | 83 +++++++++++++++++++++++++++++++++++++++++++
1 file changed, 83 insertions(+)
diff --git a/dogfood/coder/main.tf b/dogfood/coder/main.tf
index e21602a26e922..06da4d79c549a 100644
--- a/dogfood/coder/main.tf
+++ b/dogfood/coder/main.tf
@@ -30,6 +30,81 @@ locals {
container_name = "coder-${data.coder_workspace_owner.me.name}-${lower(data.coder_workspace.me.name)}"
}
+data "coder_workspace_preset" "cpt" {
+ name = "Cape Town"
+ parameters = {
+ (data.coder_parameter.region.name) = "za-cpt"
+ (data.coder_parameter.image_type.name) = "codercom/oss-dogfood:latest"
+ (data.coder_parameter.repo_base_dir.name) = "~"
+ (data.coder_parameter.res_mon_memory_threshold.name) = 80
+ (data.coder_parameter.res_mon_volume_threshold.name) = 90
+ (data.coder_parameter.res_mon_volume_path.name) = "/home/coder"
+ }
+ prebuilds {
+ instances = 1
+ }
+}
+
+data "coder_workspace_preset" "pittsburgh" {
+ name = "Pittsburgh"
+ parameters = {
+ (data.coder_parameter.region.name) = "us-pittsburgh"
+ (data.coder_parameter.image_type.name) = "codercom/oss-dogfood:latest"
+ (data.coder_parameter.repo_base_dir.name) = "~"
+ (data.coder_parameter.res_mon_memory_threshold.name) = 80
+ (data.coder_parameter.res_mon_volume_threshold.name) = 90
+ (data.coder_parameter.res_mon_volume_path.name) = "/home/coder"
+ }
+ prebuilds {
+ instances = 2
+ }
+}
+
+data "coder_workspace_preset" "falkenstein" {
+ name = "Falkenstein"
+ parameters = {
+ (data.coder_parameter.region.name) = "eu-helsinki"
+ (data.coder_parameter.image_type.name) = "codercom/oss-dogfood:latest"
+ (data.coder_parameter.repo_base_dir.name) = "~"
+ (data.coder_parameter.res_mon_memory_threshold.name) = 80
+ (data.coder_parameter.res_mon_volume_threshold.name) = 90
+ (data.coder_parameter.res_mon_volume_path.name) = "/home/coder"
+ }
+ prebuilds {
+ instances = 1
+ }
+}
+
+data "coder_workspace_preset" "sydney" {
+ name = "Sydney"
+ parameters = {
+ (data.coder_parameter.region.name) = "ap-sydney"
+ (data.coder_parameter.image_type.name) = "codercom/oss-dogfood:latest"
+ (data.coder_parameter.repo_base_dir.name) = "~"
+ (data.coder_parameter.res_mon_memory_threshold.name) = 80
+ (data.coder_parameter.res_mon_volume_threshold.name) = 90
+ (data.coder_parameter.res_mon_volume_path.name) = "/home/coder"
+ }
+ prebuilds {
+ instances = 1
+ }
+}
+
+data "coder_workspace_preset" "saopaulo" {
+ name = "São Paulo"
+ parameters = {
+ (data.coder_parameter.region.name) = "sa-saopaulo"
+ (data.coder_parameter.image_type.name) = "codercom/oss-dogfood:latest"
+ (data.coder_parameter.repo_base_dir.name) = "~"
+ (data.coder_parameter.res_mon_memory_threshold.name) = 80
+ (data.coder_parameter.res_mon_volume_threshold.name) = 90
+ (data.coder_parameter.res_mon_volume_path.name) = "/home/coder"
+ }
+ prebuilds {
+ instances = 1
+ }
+}
+
data "coder_parameter" "repo_base_dir" {
type = "string"
name = "Coder Repository Base Directory"
@@ -438,6 +513,14 @@ resource "docker_image" "dogfood" {
}
resource "docker_container" "workspace" {
+ lifecycle {
+ // Ignore changes that would invalidate prebuilds
+ ignore_changes = [
+ name,
+ hostname,
+ labels,
+ ]
+ }
count = data.coder_workspace.me.start_count
image = docker_image.dogfood.name
name = local.container_name
From 769c9ee3372c45dea1085eb5c663363cdf14bf65 Mon Sep 17 00:00:00 2001
From: Michael Suchacz <203725896+ibetitsmike@users.noreply.github.com>
Date: Tue, 20 May 2025 15:22:44 +0200
Subject: [PATCH 19/44] feat: cancel stuck pending jobs (#17803)
Closes: #16488
---
cli/server.go | 12 +-
cli/testdata/server-config.yaml.golden | 2 +-
coderd/coderdtest/coderdtest.go | 12 +-
coderd/database/dbauthz/dbauthz.go | 139 +++++----
coderd/database/dbauthz/dbauthz_test.go | 51 ++--
coderd/database/dbmem/dbmem.go | 76 +++--
coderd/database/dbmetrics/querymetrics.go | 28 +-
coderd/database/dbmock/dbmock.go | 59 +++-
coderd/database/querier.go | 7 +-
coderd/database/queries.sql.go | 189 +++++++++----
coderd/database/queries/provisionerjobs.sql | 45 ++-
coderd/httpmw/loggermw/logger.go | 2 +-
coderd/{unhanger => jobreaper}/detector.go | 146 ++++++----
.../{unhanger => jobreaper}/detector_test.go | 264 ++++++++++++++++--
coderd/rbac/authz.go | 2 +-
coderd/rbac/object_gen.go | 2 +
coderd/rbac/policy/policy.go | 4 +-
coderd/rbac/roles.go | 2 +-
coderd/rbac/roles_test.go | 2 +-
codersdk/deployment.go | 8 +-
codersdk/rbacresources_gen.go | 2 +-
provisioner/terraform/serve.go | 8 +-
site/src/api/rbacresourcesGenerated.ts | 2 +
23 files changed, 773 insertions(+), 291 deletions(-)
rename coderd/{unhanger => jobreaper}/detector.go (72%)
rename coderd/{unhanger => jobreaper}/detector_test.go (73%)
diff --git a/cli/server.go b/cli/server.go
index c5532e07e7a81..59993b55771a9 100644
--- a/cli/server.go
+++ b/cli/server.go
@@ -87,6 +87,7 @@ import (
"github.com/coder/coder/v2/coderd/externalauth"
"github.com/coder/coder/v2/coderd/gitsshkey"
"github.com/coder/coder/v2/coderd/httpmw"
+ "github.com/coder/coder/v2/coderd/jobreaper"
"github.com/coder/coder/v2/coderd/notifications"
"github.com/coder/coder/v2/coderd/oauthpki"
"github.com/coder/coder/v2/coderd/prometheusmetrics"
@@ -95,7 +96,6 @@ import (
"github.com/coder/coder/v2/coderd/schedule"
"github.com/coder/coder/v2/coderd/telemetry"
"github.com/coder/coder/v2/coderd/tracing"
- "github.com/coder/coder/v2/coderd/unhanger"
"github.com/coder/coder/v2/coderd/updatecheck"
"github.com/coder/coder/v2/coderd/util/ptr"
"github.com/coder/coder/v2/coderd/util/slice"
@@ -1127,11 +1127,11 @@ func (r *RootCmd) Server(newAPI func(context.Context, *coderd.Options) (*coderd.
ctx, options.Database, options.Pubsub, options.PrometheusRegistry, coderAPI.TemplateScheduleStore, &coderAPI.Auditor, coderAPI.AccessControlStore, logger, autobuildTicker.C, options.NotificationsEnqueuer)
autobuildExecutor.Run()
- hangDetectorTicker := time.NewTicker(vals.JobHangDetectorInterval.Value())
- defer hangDetectorTicker.Stop()
- hangDetector := unhanger.New(ctx, options.Database, options.Pubsub, logger, hangDetectorTicker.C)
- hangDetector.Start()
- defer hangDetector.Close()
+ jobReaperTicker := time.NewTicker(vals.JobReaperDetectorInterval.Value())
+ defer jobReaperTicker.Stop()
+ jobReaper := jobreaper.New(ctx, options.Database, options.Pubsub, logger, jobReaperTicker.C)
+ jobReaper.Start()
+ defer jobReaper.Close()
waitForProvisionerJobs := false
// Currently there is no way to ask the server to shut
diff --git a/cli/testdata/server-config.yaml.golden b/cli/testdata/server-config.yaml.golden
index fc76a6c2ec8a0..9995a7f389130 100644
--- a/cli/testdata/server-config.yaml.golden
+++ b/cli/testdata/server-config.yaml.golden
@@ -183,7 +183,7 @@ networking:
# Interval to poll for scheduled workspace builds.
# (default: 1m0s, type: duration)
autobuildPollInterval: 1m0s
-# Interval to poll for hung jobs and automatically terminate them.
+# Interval to poll for hung and pending jobs and automatically terminate them.
# (default: 1m0s, type: duration)
jobHangDetectorInterval: 1m0s
introspection:
diff --git a/coderd/coderdtest/coderdtest.go b/coderd/coderdtest/coderdtest.go
index b395a2cf2afbe..90a29e0f0d876 100644
--- a/coderd/coderdtest/coderdtest.go
+++ b/coderd/coderdtest/coderdtest.go
@@ -68,6 +68,7 @@ import (
"github.com/coder/coder/v2/coderd/externalauth"
"github.com/coder/coder/v2/coderd/gitsshkey"
"github.com/coder/coder/v2/coderd/httpmw"
+ "github.com/coder/coder/v2/coderd/jobreaper"
"github.com/coder/coder/v2/coderd/notifications"
"github.com/coder/coder/v2/coderd/notifications/notificationstest"
"github.com/coder/coder/v2/coderd/rbac"
@@ -75,7 +76,6 @@ import (
"github.com/coder/coder/v2/coderd/runtimeconfig"
"github.com/coder/coder/v2/coderd/schedule"
"github.com/coder/coder/v2/coderd/telemetry"
- "github.com/coder/coder/v2/coderd/unhanger"
"github.com/coder/coder/v2/coderd/updatecheck"
"github.com/coder/coder/v2/coderd/util/ptr"
"github.com/coder/coder/v2/coderd/webpush"
@@ -368,11 +368,11 @@ func NewOptions(t testing.TB, options *Options) (func(http.Handler), context.Can
).WithStatsChannel(options.AutobuildStats)
lifecycleExecutor.Run()
- hangDetectorTicker := time.NewTicker(options.DeploymentValues.JobHangDetectorInterval.Value())
- defer hangDetectorTicker.Stop()
- hangDetector := unhanger.New(ctx, options.Database, options.Pubsub, options.Logger.Named("unhanger.detector"), hangDetectorTicker.C)
- hangDetector.Start()
- t.Cleanup(hangDetector.Close)
+ jobReaperTicker := time.NewTicker(options.DeploymentValues.JobReaperDetectorInterval.Value())
+ defer jobReaperTicker.Stop()
+ jobReaper := jobreaper.New(ctx, options.Database, options.Pubsub, options.Logger.Named("reaper.detector"), jobReaperTicker.C)
+ jobReaper.Start()
+ t.Cleanup(jobReaper.Close)
if options.TelemetryReporter == nil {
options.TelemetryReporter = telemetry.NewNoop()
diff --git a/coderd/database/dbauthz/dbauthz.go b/coderd/database/dbauthz/dbauthz.go
index 928dee0e30ea3..20afcf66c7867 100644
--- a/coderd/database/dbauthz/dbauthz.go
+++ b/coderd/database/dbauthz/dbauthz.go
@@ -170,10 +170,10 @@ var (
Identifier: rbac.RoleIdentifier{Name: "provisionerd"},
DisplayName: "Provisioner Daemon",
Site: rbac.Permissions(map[string][]policy.Action{
- // TODO: Add ProvisionerJob resource type.
- rbac.ResourceFile.Type: {policy.ActionRead},
- rbac.ResourceSystem.Type: {policy.WildcardSymbol},
- rbac.ResourceTemplate.Type: {policy.ActionRead, policy.ActionUpdate},
+ rbac.ResourceProvisionerJobs.Type: {policy.ActionRead, policy.ActionUpdate, policy.ActionCreate},
+ rbac.ResourceFile.Type: {policy.ActionRead},
+ rbac.ResourceSystem.Type: {policy.WildcardSymbol},
+ rbac.ResourceTemplate.Type: {policy.ActionRead, policy.ActionUpdate},
// Unsure why provisionerd needs update and read personal
rbac.ResourceUser.Type: {policy.ActionRead, policy.ActionReadPersonal, policy.ActionUpdatePersonal},
rbac.ResourceWorkspaceDormant.Type: {policy.ActionDelete, policy.ActionRead, policy.ActionUpdate, policy.ActionWorkspaceStop},
@@ -219,19 +219,20 @@ var (
Scope: rbac.ScopeAll,
}.WithCachedASTValue()
- // See unhanger package.
- subjectHangDetector = rbac.Subject{
- Type: rbac.SubjectTypeHangDetector,
- FriendlyName: "Hang Detector",
+ // See reaper package.
+ subjectJobReaper = rbac.Subject{
+ Type: rbac.SubjectTypeJobReaper,
+ FriendlyName: "Job Reaper",
ID: uuid.Nil.String(),
Roles: rbac.Roles([]rbac.Role{
{
- Identifier: rbac.RoleIdentifier{Name: "hangdetector"},
- DisplayName: "Hang Detector Daemon",
+ Identifier: rbac.RoleIdentifier{Name: "jobreaper"},
+ DisplayName: "Job Reaper Daemon",
Site: rbac.Permissions(map[string][]policy.Action{
- rbac.ResourceSystem.Type: {policy.WildcardSymbol},
- rbac.ResourceTemplate.Type: {policy.ActionRead},
- rbac.ResourceWorkspace.Type: {policy.ActionRead, policy.ActionUpdate},
+ rbac.ResourceSystem.Type: {policy.WildcardSymbol},
+ rbac.ResourceTemplate.Type: {policy.ActionRead},
+ rbac.ResourceWorkspace.Type: {policy.ActionRead, policy.ActionUpdate},
+ rbac.ResourceProvisionerJobs.Type: {policy.ActionRead, policy.ActionUpdate},
}),
Org: map[string][]rbac.Permission{},
User: []rbac.Permission{},
@@ -346,6 +347,7 @@ var (
rbac.ResourceNotificationTemplate.Type: {policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete},
rbac.ResourceCryptoKey.Type: {policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete},
rbac.ResourceFile.Type: {policy.ActionCreate, policy.ActionRead},
+ rbac.ResourceProvisionerJobs.Type: {policy.ActionRead, policy.ActionUpdate, policy.ActionCreate},
}),
Org: map[string][]rbac.Permission{},
User: []rbac.Permission{},
@@ -407,10 +409,10 @@ func AsAutostart(ctx context.Context) context.Context {
return As(ctx, subjectAutostart)
}
-// AsHangDetector returns a context with an actor that has permissions required
-// for unhanger.Detector to function.
-func AsHangDetector(ctx context.Context) context.Context {
- return As(ctx, subjectHangDetector)
+// AsJobReaper returns a context with an actor that has permissions required
+// for reaper.Detector to function.
+func AsJobReaper(ctx context.Context) context.Context {
+ return As(ctx, subjectJobReaper)
}
// AsKeyRotator returns a context with an actor that has permissions required for rotating crypto keys.
@@ -1085,11 +1087,10 @@ func (q *querier) AcquireNotificationMessages(ctx context.Context, arg database.
return q.db.AcquireNotificationMessages(ctx, arg)
}
-// TODO: We need to create a ProvisionerJob resource type
func (q *querier) AcquireProvisionerJob(ctx context.Context, arg database.AcquireProvisionerJobParams) (database.ProvisionerJob, error) {
- // if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil {
- // return database.ProvisionerJob{}, err
- // }
+ if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceProvisionerJobs); err != nil {
+ return database.ProvisionerJob{}, err
+ }
return q.db.AcquireProvisionerJob(ctx, arg)
}
@@ -1912,14 +1913,6 @@ func (q *querier) GetHealthSettings(ctx context.Context) (string, error) {
return q.db.GetHealthSettings(ctx)
}
-// TODO: We need to create a ProvisionerJob resource type
-func (q *querier) GetHungProvisionerJobs(ctx context.Context, hungSince time.Time) ([]database.ProvisionerJob, error) {
- // if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil {
- // return nil, err
- // }
- return q.db.GetHungProvisionerJobs(ctx, hungSince)
-}
-
func (q *querier) GetInboxNotificationByID(ctx context.Context, id uuid.UUID) (database.InboxNotification, error) {
return fetchWithAction(q.log, q.auth, policy.ActionRead, q.db.GetInboxNotificationByID)(ctx, id)
}
@@ -2307,6 +2300,13 @@ func (q *querier) GetProvisionerJobByID(ctx context.Context, id uuid.UUID) (data
return job, nil
}
+func (q *querier) GetProvisionerJobByIDForUpdate(ctx context.Context, id uuid.UUID) (database.ProvisionerJob, error) {
+ if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceProvisionerJobs); err != nil {
+ return database.ProvisionerJob{}, err
+ }
+ return q.db.GetProvisionerJobByIDForUpdate(ctx, id)
+}
+
func (q *querier) GetProvisionerJobTimingsByJobID(ctx context.Context, jobID uuid.UUID) ([]database.ProvisionerJobTiming, error) {
_, err := q.GetProvisionerJobByID(ctx, jobID)
if err != nil {
@@ -2315,31 +2315,49 @@ func (q *querier) GetProvisionerJobTimingsByJobID(ctx context.Context, jobID uui
return q.db.GetProvisionerJobTimingsByJobID(ctx, jobID)
}
-// TODO: We have a ProvisionerJobs resource, but it hasn't been checked for this use-case.
func (q *querier) GetProvisionerJobsByIDs(ctx context.Context, ids []uuid.UUID) ([]database.ProvisionerJob, error) {
- // if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil {
- // return nil, err
- // }
- return q.db.GetProvisionerJobsByIDs(ctx, ids)
+ provisionerJobs, err := q.db.GetProvisionerJobsByIDs(ctx, ids)
+ if err != nil {
+ return nil, err
+ }
+ orgIDs := make(map[uuid.UUID]struct{})
+ for _, job := range provisionerJobs {
+ orgIDs[job.OrganizationID] = struct{}{}
+ }
+ for orgID := range orgIDs {
+ if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceProvisionerJobs.InOrg(orgID)); err != nil {
+ return nil, err
+ }
+ }
+ return provisionerJobs, nil
}
-// TODO: We have a ProvisionerJobs resource, but it hasn't been checked for this use-case.
func (q *querier) GetProvisionerJobsByIDsWithQueuePosition(ctx context.Context, ids []uuid.UUID) ([]database.GetProvisionerJobsByIDsWithQueuePositionRow, error) {
+ // TODO: Remove this once we have a proper rbac check for provisioner jobs.
+ // Details in https://github.com/coder/coder/issues/16160
return q.db.GetProvisionerJobsByIDsWithQueuePosition(ctx, ids)
}
func (q *querier) GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisioner(ctx context.Context, arg database.GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisionerParams) ([]database.GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisionerRow, error) {
+ // TODO: Remove this once we have a proper rbac check for provisioner jobs.
+ // Details in https://github.com/coder/coder/issues/16160
return fetchWithPostFilter(q.auth, policy.ActionRead, q.db.GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisioner)(ctx, arg)
}
-// TODO: We have a ProvisionerJobs resource, but it hasn't been checked for this use-case.
func (q *querier) GetProvisionerJobsCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.ProvisionerJob, error) {
- // if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil {
- // return nil, err
- // }
+ if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceProvisionerJobs); err != nil {
+ return nil, err
+ }
return q.db.GetProvisionerJobsCreatedAfter(ctx, createdAt)
}
+func (q *querier) GetProvisionerJobsToBeReaped(ctx context.Context, arg database.GetProvisionerJobsToBeReapedParams) ([]database.ProvisionerJob, error) {
+ if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceProvisionerJobs); err != nil {
+ return nil, err
+ }
+ return q.db.GetProvisionerJobsToBeReaped(ctx, arg)
+}
+
func (q *querier) GetProvisionerKeyByHashedSecret(ctx context.Context, hashedSecret []byte) (database.ProvisionerKey, error) {
return fetch(q.log, q.auth, q.db.GetProvisionerKeyByHashedSecret)(ctx, hashedSecret)
}
@@ -3533,27 +3551,22 @@ func (q *querier) InsertPresetParameters(ctx context.Context, arg database.Inser
return q.db.InsertPresetParameters(ctx, arg)
}
-// TODO: We need to create a ProvisionerJob resource type
func (q *querier) InsertProvisionerJob(ctx context.Context, arg database.InsertProvisionerJobParams) (database.ProvisionerJob, error) {
- // if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil {
- // return database.ProvisionerJob{}, err
- // }
+ // TODO: Remove this once we have a proper rbac check for provisioner jobs.
+ // Details in https://github.com/coder/coder/issues/16160
return q.db.InsertProvisionerJob(ctx, arg)
}
-// TODO: We need to create a ProvisionerJob resource type
func (q *querier) InsertProvisionerJobLogs(ctx context.Context, arg database.InsertProvisionerJobLogsParams) ([]database.ProvisionerJobLog, error) {
- // if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil {
- // return nil, err
- // }
+ // TODO: Remove this once we have a proper rbac check for provisioner jobs.
+ // Details in https://github.com/coder/coder/issues/16160
return q.db.InsertProvisionerJobLogs(ctx, arg)
}
-// TODO: We need to create a ProvisionerJob resource type
func (q *querier) InsertProvisionerJobTimings(ctx context.Context, arg database.InsertProvisionerJobTimingsParams) ([]database.ProvisionerJobTiming, error) {
- // if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil {
- // return nil, err
- // }
+ if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceProvisionerJobs); err != nil {
+ return nil, err
+ }
return q.db.InsertProvisionerJobTimings(ctx, arg)
}
@@ -4176,15 +4189,17 @@ func (q *querier) UpdateProvisionerDaemonLastSeenAt(ctx context.Context, arg dat
return q.db.UpdateProvisionerDaemonLastSeenAt(ctx, arg)
}
-// TODO: We need to create a ProvisionerJob resource type
func (q *querier) UpdateProvisionerJobByID(ctx context.Context, arg database.UpdateProvisionerJobByIDParams) error {
- // if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil {
- // return err
- // }
+ if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceProvisionerJobs); err != nil {
+ return err
+ }
return q.db.UpdateProvisionerJobByID(ctx, arg)
}
func (q *querier) UpdateProvisionerJobWithCancelByID(ctx context.Context, arg database.UpdateProvisionerJobWithCancelByIDParams) error {
+ // TODO: Remove this once we have a proper rbac check for provisioner jobs.
+ // Details in https://github.com/coder/coder/issues/16160
+
job, err := q.db.GetProvisionerJobByID(ctx, arg.ID)
if err != nil {
return err
@@ -4251,14 +4266,20 @@ func (q *querier) UpdateProvisionerJobWithCancelByID(ctx context.Context, arg da
return q.db.UpdateProvisionerJobWithCancelByID(ctx, arg)
}
-// TODO: We need to create a ProvisionerJob resource type
func (q *querier) UpdateProvisionerJobWithCompleteByID(ctx context.Context, arg database.UpdateProvisionerJobWithCompleteByIDParams) error {
- // if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil {
- // return err
- // }
+ if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceProvisionerJobs); err != nil {
+ return err
+ }
return q.db.UpdateProvisionerJobWithCompleteByID(ctx, arg)
}
+func (q *querier) UpdateProvisionerJobWithCompleteWithStartedAtByID(ctx context.Context, arg database.UpdateProvisionerJobWithCompleteWithStartedAtByIDParams) error {
+ if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceProvisionerJobs); err != nil {
+ return err
+ }
+ return q.db.UpdateProvisionerJobWithCompleteWithStartedAtByID(ctx, arg)
+}
+
func (q *querier) UpdateReplica(ctx context.Context, arg database.UpdateReplicaParams) (database.Replica, error) {
if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil {
return database.Replica{}, err
diff --git a/coderd/database/dbauthz/dbauthz_test.go b/coderd/database/dbauthz/dbauthz_test.go
index a0289f222392b..1e4b4ea879b77 100644
--- a/coderd/database/dbauthz/dbauthz_test.go
+++ b/coderd/database/dbauthz/dbauthz_test.go
@@ -694,9 +694,12 @@ func (s *MethodTestSuite) TestProvisionerJob() {
Asserts(v.RBACObject(tpl), []policy.Action{policy.ActionRead, policy.ActionUpdate}).Returns()
}))
s.Run("GetProvisionerJobsByIDs", s.Subtest(func(db database.Store, check *expects) {
- a := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{})
- b := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{})
- check.Args([]uuid.UUID{a.ID, b.ID}).Asserts().Returns(slice.New(a, b))
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ a := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{OrganizationID: o.ID})
+ b := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{OrganizationID: o.ID})
+ check.Args([]uuid.UUID{a.ID, b.ID}).
+ Asserts(rbac.ResourceProvisionerJobs.InOrg(o.ID), policy.ActionRead).
+ Returns(slice.New(a, b))
}))
s.Run("GetProvisionerLogsAfterID", s.Subtest(func(db database.Store, check *expects) {
u := dbgen.User(s.T(), db, database.User{})
@@ -3923,9 +3926,8 @@ func (s *MethodTestSuite) TestSystemFunctions() {
check.Args().Asserts(rbac.ResourceSystem, policy.ActionDelete)
}))
s.Run("GetProvisionerJobsCreatedAfter", s.Subtest(func(db database.Store, check *expects) {
- // TODO: add provisioner job resource type
_ = dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{CreatedAt: time.Now().Add(-time.Hour)})
- check.Args(time.Now()).Asserts( /*rbac.ResourceSystem, policy.ActionRead*/ )
+ check.Args(time.Now()).Asserts(rbac.ResourceProvisionerJobs, policy.ActionRead)
}))
s.Run("GetTemplateVersionsByIDs", s.Subtest(func(db database.Store, check *expects) {
dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
@@ -4008,11 +4010,11 @@ func (s *MethodTestSuite) TestSystemFunctions() {
Returns([]database.WorkspaceAgent{agt})
}))
s.Run("GetProvisionerJobsByIDs", s.Subtest(func(db database.Store, check *expects) {
- // TODO: add a ProvisionerJob resource type
- a := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{})
- b := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ a := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{OrganizationID: o.ID})
+ b := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{OrganizationID: o.ID})
check.Args([]uuid.UUID{a.ID, b.ID}).
- Asserts( /*rbac.ResourceSystem, policy.ActionRead*/ ).
+ Asserts(rbac.ResourceProvisionerJobs.InOrg(o.ID), policy.ActionRead).
Returns(slice.New(a, b))
}))
s.Run("InsertWorkspaceAgent", s.Subtest(func(db database.Store, check *expects) {
@@ -4048,7 +4050,6 @@ func (s *MethodTestSuite) TestSystemFunctions() {
}).Asserts(rbac.ResourceSystem, policy.ActionUpdate).Returns()
}))
s.Run("AcquireProvisionerJob", s.Subtest(func(db database.Store, check *expects) {
- // TODO: we need to create a ProvisionerJob resource
j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
StartedAt: sql.NullTime{Valid: false},
UpdatedAt: time.Now(),
@@ -4058,47 +4059,48 @@ func (s *MethodTestSuite) TestSystemFunctions() {
OrganizationID: j.OrganizationID,
Types: []database.ProvisionerType{j.Provisioner},
ProvisionerTags: must(json.Marshal(j.Tags)),
- }).Asserts( /*rbac.ResourceSystem, policy.ActionUpdate*/ )
+ }).Asserts(rbac.ResourceProvisionerJobs, policy.ActionUpdate)
}))
s.Run("UpdateProvisionerJobWithCompleteByID", s.Subtest(func(db database.Store, check *expects) {
- // TODO: we need to create a ProvisionerJob resource
j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{})
check.Args(database.UpdateProvisionerJobWithCompleteByIDParams{
ID: j.ID,
- }).Asserts( /*rbac.ResourceSystem, policy.ActionUpdate*/ )
+ }).Asserts(rbac.ResourceProvisionerJobs, policy.ActionUpdate)
+ }))
+ s.Run("UpdateProvisionerJobWithCompleteWithStartedAtByID", s.Subtest(func(db database.Store, check *expects) {
+ j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{})
+ check.Args(database.UpdateProvisionerJobWithCompleteWithStartedAtByIDParams{
+ ID: j.ID,
+ }).Asserts(rbac.ResourceProvisionerJobs, policy.ActionUpdate)
}))
s.Run("UpdateProvisionerJobByID", s.Subtest(func(db database.Store, check *expects) {
- // TODO: we need to create a ProvisionerJob resource
j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{})
check.Args(database.UpdateProvisionerJobByIDParams{
ID: j.ID,
UpdatedAt: time.Now(),
- }).Asserts( /*rbac.ResourceSystem, policy.ActionUpdate*/ )
+ }).Asserts(rbac.ResourceProvisionerJobs, policy.ActionUpdate)
}))
s.Run("InsertProvisionerJob", s.Subtest(func(db database.Store, check *expects) {
dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
- // TODO: we need to create a ProvisionerJob resource
check.Args(database.InsertProvisionerJobParams{
ID: uuid.New(),
Provisioner: database.ProvisionerTypeEcho,
StorageMethod: database.ProvisionerStorageMethodFile,
Type: database.ProvisionerJobTypeWorkspaceBuild,
Input: json.RawMessage("{}"),
- }).Asserts( /*rbac.ResourceSystem, policy.ActionCreate*/ )
+ }).Asserts( /* rbac.ResourceProvisionerJobs, policy.ActionCreate */ )
}))
s.Run("InsertProvisionerJobLogs", s.Subtest(func(db database.Store, check *expects) {
- // TODO: we need to create a ProvisionerJob resource
j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{})
check.Args(database.InsertProvisionerJobLogsParams{
JobID: j.ID,
- }).Asserts( /*rbac.ResourceSystem, policy.ActionCreate*/ )
+ }).Asserts( /* rbac.ResourceProvisionerJobs, policy.ActionUpdate */ )
}))
s.Run("InsertProvisionerJobTimings", s.Subtest(func(db database.Store, check *expects) {
- // TODO: we need to create a ProvisionerJob resource
j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{})
check.Args(database.InsertProvisionerJobTimingsParams{
JobID: j.ID,
- }).Asserts( /*rbac.ResourceSystem, policy.ActionCreate*/ )
+ }).Asserts(rbac.ResourceProvisionerJobs, policy.ActionUpdate)
}))
s.Run("UpsertProvisionerDaemon", s.Subtest(func(db database.Store, check *expects) {
dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
@@ -4234,8 +4236,8 @@ func (s *MethodTestSuite) TestSystemFunctions() {
s.Run("GetFileTemplates", s.Subtest(func(db database.Store, check *expects) {
check.Args(uuid.New()).Asserts(rbac.ResourceSystem, policy.ActionRead)
}))
- s.Run("GetHungProvisionerJobs", s.Subtest(func(db database.Store, check *expects) {
- check.Args(time.Time{}).Asserts()
+ s.Run("GetProvisionerJobsToBeReaped", s.Subtest(func(db database.Store, check *expects) {
+ check.Args(database.GetProvisionerJobsToBeReapedParams{}).Asserts(rbac.ResourceProvisionerJobs, policy.ActionRead)
}))
s.Run("UpsertOAuthSigningKey", s.Subtest(func(db database.Store, check *expects) {
check.Args("foo").Asserts(rbac.ResourceSystem, policy.ActionUpdate)
@@ -4479,6 +4481,9 @@ func (s *MethodTestSuite) TestSystemFunctions() {
VapidPrivateKey: "test",
}).Asserts(rbac.ResourceDeploymentConfig, policy.ActionUpdate)
}))
+ s.Run("GetProvisionerJobByIDForUpdate", s.Subtest(func(db database.Store, check *expects) {
+ check.Args(uuid.New()).Asserts(rbac.ResourceProvisionerJobs, policy.ActionRead).Errors(sql.ErrNoRows)
+ }))
}
func (s *MethodTestSuite) TestNotifications() {
diff --git a/coderd/database/dbmem/dbmem.go b/coderd/database/dbmem/dbmem.go
index 7dec84f8aaeb0..3ab2895876ac5 100644
--- a/coderd/database/dbmem/dbmem.go
+++ b/coderd/database/dbmem/dbmem.go
@@ -8,6 +8,7 @@ import (
"errors"
"fmt"
"math"
+ insecurerand "math/rand" //#nosec // this is only used for shuffling an array to pick random jobs to reap
"reflect"
"regexp"
"slices"
@@ -3707,23 +3708,6 @@ func (q *FakeQuerier) GetHealthSettings(_ context.Context) (string, error) {
return string(q.healthSettings), nil
}
-func (q *FakeQuerier) GetHungProvisionerJobs(_ context.Context, hungSince time.Time) ([]database.ProvisionerJob, error) {
- q.mutex.RLock()
- defer q.mutex.RUnlock()
-
- hungJobs := []database.ProvisionerJob{}
- for _, provisionerJob := range q.provisionerJobs {
- if provisionerJob.StartedAt.Valid && !provisionerJob.CompletedAt.Valid && provisionerJob.UpdatedAt.Before(hungSince) {
- // clone the Tags before appending, since maps are reference types and
- // we don't want the caller to be able to mutate the map we have inside
- // dbmem!
- provisionerJob.Tags = maps.Clone(provisionerJob.Tags)
- hungJobs = append(hungJobs, provisionerJob)
- }
- }
- return hungJobs, nil
-}
-
func (q *FakeQuerier) GetInboxNotificationByID(_ context.Context, id uuid.UUID) (database.InboxNotification, error) {
q.mutex.RLock()
defer q.mutex.RUnlock()
@@ -4642,6 +4626,13 @@ func (q *FakeQuerier) GetProvisionerJobByID(ctx context.Context, id uuid.UUID) (
return q.getProvisionerJobByIDNoLock(ctx, id)
}
+func (q *FakeQuerier) GetProvisionerJobByIDForUpdate(ctx context.Context, id uuid.UUID) (database.ProvisionerJob, error) {
+ q.mutex.RLock()
+ defer q.mutex.RUnlock()
+
+ return q.getProvisionerJobByIDNoLock(ctx, id)
+}
+
func (q *FakeQuerier) GetProvisionerJobTimingsByJobID(_ context.Context, jobID uuid.UUID) ([]database.ProvisionerJobTiming, error) {
q.mutex.RLock()
defer q.mutex.RUnlock()
@@ -4884,6 +4875,33 @@ func (q *FakeQuerier) GetProvisionerJobsCreatedAfter(_ context.Context, after ti
return jobs, nil
}
+func (q *FakeQuerier) GetProvisionerJobsToBeReaped(_ context.Context, arg database.GetProvisionerJobsToBeReapedParams) ([]database.ProvisionerJob, error) {
+ q.mutex.RLock()
+ defer q.mutex.RUnlock()
+ maxJobs := arg.MaxJobs
+
+ hungJobs := []database.ProvisionerJob{}
+ for _, provisionerJob := range q.provisionerJobs {
+ if !provisionerJob.CompletedAt.Valid {
+ if (provisionerJob.StartedAt.Valid && provisionerJob.UpdatedAt.Before(arg.HungSince)) ||
+ (!provisionerJob.StartedAt.Valid && provisionerJob.UpdatedAt.Before(arg.PendingSince)) {
+ // clone the Tags before appending, since maps are reference types and
+ // we don't want the caller to be able to mutate the map we have inside
+ // dbmem!
+ provisionerJob.Tags = maps.Clone(provisionerJob.Tags)
+ hungJobs = append(hungJobs, provisionerJob)
+ if len(hungJobs) >= int(maxJobs) {
+ break
+ }
+ }
+ }
+ }
+ insecurerand.Shuffle(len(hungJobs), func(i, j int) {
+ hungJobs[i], hungJobs[j] = hungJobs[j], hungJobs[i]
+ })
+ return hungJobs, nil
+}
+
func (q *FakeQuerier) GetProvisionerKeyByHashedSecret(_ context.Context, hashedSecret []byte) (database.ProvisionerKey, error) {
q.mutex.RLock()
defer q.mutex.RUnlock()
@@ -10958,6 +10976,30 @@ func (q *FakeQuerier) UpdateProvisionerJobWithCompleteByID(_ context.Context, ar
return sql.ErrNoRows
}
+func (q *FakeQuerier) UpdateProvisionerJobWithCompleteWithStartedAtByID(_ context.Context, arg database.UpdateProvisionerJobWithCompleteWithStartedAtByIDParams) error {
+ if err := validateDatabaseType(arg); err != nil {
+ return err
+ }
+
+ q.mutex.Lock()
+ defer q.mutex.Unlock()
+
+ for index, job := range q.provisionerJobs {
+ if arg.ID != job.ID {
+ continue
+ }
+ job.UpdatedAt = arg.UpdatedAt
+ job.CompletedAt = arg.CompletedAt
+ job.Error = arg.Error
+ job.ErrorCode = arg.ErrorCode
+ job.StartedAt = arg.StartedAt
+ job.JobStatus = provisionerJobStatus(job)
+ q.provisionerJobs[index] = job
+ return nil
+ }
+ return sql.ErrNoRows
+}
+
func (q *FakeQuerier) UpdateReplica(_ context.Context, arg database.UpdateReplicaParams) (database.Replica, error) {
if err := validateDatabaseType(arg); err != nil {
return database.Replica{}, err
diff --git a/coderd/database/dbmetrics/querymetrics.go b/coderd/database/dbmetrics/querymetrics.go
index a5a22aad1a0bf..9122cedbf786c 100644
--- a/coderd/database/dbmetrics/querymetrics.go
+++ b/coderd/database/dbmetrics/querymetrics.go
@@ -865,13 +865,6 @@ func (m queryMetricsStore) GetHealthSettings(ctx context.Context) (string, error
return r0, r1
}
-func (m queryMetricsStore) GetHungProvisionerJobs(ctx context.Context, hungSince time.Time) ([]database.ProvisionerJob, error) {
- start := time.Now()
- jobs, err := m.s.GetHungProvisionerJobs(ctx, hungSince)
- m.queryLatencies.WithLabelValues("GetHungProvisionerJobs").Observe(time.Since(start).Seconds())
- return jobs, err
-}
-
func (m queryMetricsStore) GetInboxNotificationByID(ctx context.Context, id uuid.UUID) (database.InboxNotification, error) {
start := time.Now()
r0, r1 := m.s.GetInboxNotificationByID(ctx, id)
@@ -1194,6 +1187,13 @@ func (m queryMetricsStore) GetProvisionerJobByID(ctx context.Context, id uuid.UU
return job, err
}
+func (m queryMetricsStore) GetProvisionerJobByIDForUpdate(ctx context.Context, id uuid.UUID) (database.ProvisionerJob, error) {
+ start := time.Now()
+ r0, r1 := m.s.GetProvisionerJobByIDForUpdate(ctx, id)
+ m.queryLatencies.WithLabelValues("GetProvisionerJobByIDForUpdate").Observe(time.Since(start).Seconds())
+ return r0, r1
+}
+
func (m queryMetricsStore) GetProvisionerJobTimingsByJobID(ctx context.Context, jobID uuid.UUID) ([]database.ProvisionerJobTiming, error) {
start := time.Now()
r0, r1 := m.s.GetProvisionerJobTimingsByJobID(ctx, jobID)
@@ -1229,6 +1229,13 @@ func (m queryMetricsStore) GetProvisionerJobsCreatedAfter(ctx context.Context, c
return jobs, err
}
+func (m queryMetricsStore) GetProvisionerJobsToBeReaped(ctx context.Context, arg database.GetProvisionerJobsToBeReapedParams) ([]database.ProvisionerJob, error) {
+ start := time.Now()
+ r0, r1 := m.s.GetProvisionerJobsToBeReaped(ctx, arg)
+ m.queryLatencies.WithLabelValues("GetProvisionerJobsToBeReaped").Observe(time.Since(start).Seconds())
+ return r0, r1
+}
+
func (m queryMetricsStore) GetProvisionerKeyByHashedSecret(ctx context.Context, hashedSecret []byte) (database.ProvisionerKey, error) {
start := time.Now()
r0, r1 := m.s.GetProvisionerKeyByHashedSecret(ctx, hashedSecret)
@@ -2706,6 +2713,13 @@ func (m queryMetricsStore) UpdateProvisionerJobWithCompleteByID(ctx context.Cont
return err
}
+func (m queryMetricsStore) UpdateProvisionerJobWithCompleteWithStartedAtByID(ctx context.Context, arg database.UpdateProvisionerJobWithCompleteWithStartedAtByIDParams) error {
+ start := time.Now()
+ r0 := m.s.UpdateProvisionerJobWithCompleteWithStartedAtByID(ctx, arg)
+ m.queryLatencies.WithLabelValues("UpdateProvisionerJobWithCompleteWithStartedAtByID").Observe(time.Since(start).Seconds())
+ return r0
+}
+
func (m queryMetricsStore) UpdateReplica(ctx context.Context, arg database.UpdateReplicaParams) (database.Replica, error) {
start := time.Now()
replica, err := m.s.UpdateReplica(ctx, arg)
diff --git a/coderd/database/dbmock/dbmock.go b/coderd/database/dbmock/dbmock.go
index 0d66dcec11848..e7af9ecd8fee8 100644
--- a/coderd/database/dbmock/dbmock.go
+++ b/coderd/database/dbmock/dbmock.go
@@ -1743,21 +1743,6 @@ func (mr *MockStoreMockRecorder) GetHealthSettings(ctx any) *gomock.Call {
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetHealthSettings", reflect.TypeOf((*MockStore)(nil).GetHealthSettings), ctx)
}
-// GetHungProvisionerJobs mocks base method.
-func (m *MockStore) GetHungProvisionerJobs(ctx context.Context, updatedAt time.Time) ([]database.ProvisionerJob, error) {
- m.ctrl.T.Helper()
- ret := m.ctrl.Call(m, "GetHungProvisionerJobs", ctx, updatedAt)
- ret0, _ := ret[0].([]database.ProvisionerJob)
- ret1, _ := ret[1].(error)
- return ret0, ret1
-}
-
-// GetHungProvisionerJobs indicates an expected call of GetHungProvisionerJobs.
-func (mr *MockStoreMockRecorder) GetHungProvisionerJobs(ctx, updatedAt any) *gomock.Call {
- mr.mock.ctrl.T.Helper()
- return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetHungProvisionerJobs", reflect.TypeOf((*MockStore)(nil).GetHungProvisionerJobs), ctx, updatedAt)
-}
-
// GetInboxNotificationByID mocks base method.
func (m *MockStore) GetInboxNotificationByID(ctx context.Context, id uuid.UUID) (database.InboxNotification, error) {
m.ctrl.T.Helper()
@@ -2448,6 +2433,21 @@ func (mr *MockStoreMockRecorder) GetProvisionerJobByID(ctx, id any) *gomock.Call
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetProvisionerJobByID", reflect.TypeOf((*MockStore)(nil).GetProvisionerJobByID), ctx, id)
}
+// GetProvisionerJobByIDForUpdate mocks base method.
+func (m *MockStore) GetProvisionerJobByIDForUpdate(ctx context.Context, id uuid.UUID) (database.ProvisionerJob, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "GetProvisionerJobByIDForUpdate", ctx, id)
+ ret0, _ := ret[0].(database.ProvisionerJob)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// GetProvisionerJobByIDForUpdate indicates an expected call of GetProvisionerJobByIDForUpdate.
+func (mr *MockStoreMockRecorder) GetProvisionerJobByIDForUpdate(ctx, id any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetProvisionerJobByIDForUpdate", reflect.TypeOf((*MockStore)(nil).GetProvisionerJobByIDForUpdate), ctx, id)
+}
+
// GetProvisionerJobTimingsByJobID mocks base method.
func (m *MockStore) GetProvisionerJobTimingsByJobID(ctx context.Context, jobID uuid.UUID) ([]database.ProvisionerJobTiming, error) {
m.ctrl.T.Helper()
@@ -2523,6 +2523,21 @@ func (mr *MockStoreMockRecorder) GetProvisionerJobsCreatedAfter(ctx, createdAt a
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetProvisionerJobsCreatedAfter", reflect.TypeOf((*MockStore)(nil).GetProvisionerJobsCreatedAfter), ctx, createdAt)
}
+// GetProvisionerJobsToBeReaped mocks base method.
+func (m *MockStore) GetProvisionerJobsToBeReaped(ctx context.Context, arg database.GetProvisionerJobsToBeReapedParams) ([]database.ProvisionerJob, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "GetProvisionerJobsToBeReaped", ctx, arg)
+ ret0, _ := ret[0].([]database.ProvisionerJob)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// GetProvisionerJobsToBeReaped indicates an expected call of GetProvisionerJobsToBeReaped.
+func (mr *MockStoreMockRecorder) GetProvisionerJobsToBeReaped(ctx, arg any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetProvisionerJobsToBeReaped", reflect.TypeOf((*MockStore)(nil).GetProvisionerJobsToBeReaped), ctx, arg)
+}
+
// GetProvisionerKeyByHashedSecret mocks base method.
func (m *MockStore) GetProvisionerKeyByHashedSecret(ctx context.Context, hashedSecret []byte) (database.ProvisionerKey, error) {
m.ctrl.T.Helper()
@@ -5732,6 +5747,20 @@ func (mr *MockStoreMockRecorder) UpdateProvisionerJobWithCompleteByID(ctx, arg a
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateProvisionerJobWithCompleteByID", reflect.TypeOf((*MockStore)(nil).UpdateProvisionerJobWithCompleteByID), ctx, arg)
}
+// UpdateProvisionerJobWithCompleteWithStartedAtByID mocks base method.
+func (m *MockStore) UpdateProvisionerJobWithCompleteWithStartedAtByID(ctx context.Context, arg database.UpdateProvisionerJobWithCompleteWithStartedAtByIDParams) error {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "UpdateProvisionerJobWithCompleteWithStartedAtByID", ctx, arg)
+ ret0, _ := ret[0].(error)
+ return ret0
+}
+
+// UpdateProvisionerJobWithCompleteWithStartedAtByID indicates an expected call of UpdateProvisionerJobWithCompleteWithStartedAtByID.
+func (mr *MockStoreMockRecorder) UpdateProvisionerJobWithCompleteWithStartedAtByID(ctx, arg any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateProvisionerJobWithCompleteWithStartedAtByID", reflect.TypeOf((*MockStore)(nil).UpdateProvisionerJobWithCompleteWithStartedAtByID), ctx, arg)
+}
+
// UpdateReplica mocks base method.
func (m *MockStore) UpdateReplica(ctx context.Context, arg database.UpdateReplicaParams) (database.Replica, error) {
m.ctrl.T.Helper()
diff --git a/coderd/database/querier.go b/coderd/database/querier.go
index 81b8d58758ada..78a88426349da 100644
--- a/coderd/database/querier.go
+++ b/coderd/database/querier.go
@@ -196,7 +196,6 @@ type sqlcQuerier interface {
GetGroupMembersCountByGroupID(ctx context.Context, arg GetGroupMembersCountByGroupIDParams) (int64, error)
GetGroups(ctx context.Context, arg GetGroupsParams) ([]GetGroupsRow, error)
GetHealthSettings(ctx context.Context) (string, error)
- GetHungProvisionerJobs(ctx context.Context, updatedAt time.Time) ([]ProvisionerJob, error)
GetInboxNotificationByID(ctx context.Context, id uuid.UUID) (InboxNotification, error)
// Fetches inbox notifications for a user filtered by templates and targets
// param user_id: The user ID
@@ -265,11 +264,16 @@ type sqlcQuerier interface {
// Previous job information.
GetProvisionerDaemonsWithStatusByOrganization(ctx context.Context, arg GetProvisionerDaemonsWithStatusByOrganizationParams) ([]GetProvisionerDaemonsWithStatusByOrganizationRow, error)
GetProvisionerJobByID(ctx context.Context, id uuid.UUID) (ProvisionerJob, error)
+ // Gets a single provisioner job by ID for update.
+ // This is used to securely reap jobs that have been hung/pending for a long time.
+ GetProvisionerJobByIDForUpdate(ctx context.Context, id uuid.UUID) (ProvisionerJob, error)
GetProvisionerJobTimingsByJobID(ctx context.Context, jobID uuid.UUID) ([]ProvisionerJobTiming, error)
GetProvisionerJobsByIDs(ctx context.Context, ids []uuid.UUID) ([]ProvisionerJob, error)
GetProvisionerJobsByIDsWithQueuePosition(ctx context.Context, ids []uuid.UUID) ([]GetProvisionerJobsByIDsWithQueuePositionRow, error)
GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisioner(ctx context.Context, arg GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisionerParams) ([]GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisionerRow, error)
GetProvisionerJobsCreatedAfter(ctx context.Context, createdAt time.Time) ([]ProvisionerJob, error)
+ // To avoid repeatedly attempting to reap the same jobs, we randomly order and limit to @max_jobs.
+ GetProvisionerJobsToBeReaped(ctx context.Context, arg GetProvisionerJobsToBeReapedParams) ([]ProvisionerJob, error)
GetProvisionerKeyByHashedSecret(ctx context.Context, hashedSecret []byte) (ProvisionerKey, error)
GetProvisionerKeyByID(ctx context.Context, id uuid.UUID) (ProvisionerKey, error)
GetProvisionerKeyByName(ctx context.Context, arg GetProvisionerKeyByNameParams) (ProvisionerKey, error)
@@ -567,6 +571,7 @@ type sqlcQuerier interface {
UpdateProvisionerJobByID(ctx context.Context, arg UpdateProvisionerJobByIDParams) error
UpdateProvisionerJobWithCancelByID(ctx context.Context, arg UpdateProvisionerJobWithCancelByIDParams) error
UpdateProvisionerJobWithCompleteByID(ctx context.Context, arg UpdateProvisionerJobWithCompleteByIDParams) error
+ UpdateProvisionerJobWithCompleteWithStartedAtByID(ctx context.Context, arg UpdateProvisionerJobWithCompleteWithStartedAtByIDParams) error
UpdateReplica(ctx context.Context, arg UpdateReplicaParams) (Replica, error)
UpdateTailnetPeerStatusByCoordinator(ctx context.Context, arg UpdateTailnetPeerStatusByCoordinatorParams) error
UpdateTemplateACLByID(ctx context.Context, arg UpdateTemplateACLByIDParams) error
diff --git a/coderd/database/queries.sql.go b/coderd/database/queries.sql.go
index fdb9252bf27ee..b956fc1db5f91 100644
--- a/coderd/database/queries.sql.go
+++ b/coderd/database/queries.sql.go
@@ -7384,71 +7384,57 @@ func (q *sqlQuerier) AcquireProvisionerJob(ctx context.Context, arg AcquireProvi
return i, err
}
-const getHungProvisionerJobs = `-- name: GetHungProvisionerJobs :many
+const getProvisionerJobByID = `-- name: GetProvisionerJobByID :one
SELECT
id, created_at, updated_at, started_at, canceled_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, type, input, worker_id, file_id, tags, error_code, trace_metadata, job_status
FROM
provisioner_jobs
WHERE
- updated_at < $1
- AND started_at IS NOT NULL
- AND completed_at IS NULL
+ id = $1
`
-func (q *sqlQuerier) GetHungProvisionerJobs(ctx context.Context, updatedAt time.Time) ([]ProvisionerJob, error) {
- rows, err := q.db.QueryContext(ctx, getHungProvisionerJobs, updatedAt)
- if err != nil {
- return nil, err
- }
- defer rows.Close()
- var items []ProvisionerJob
- for rows.Next() {
- var i ProvisionerJob
- if err := rows.Scan(
- &i.ID,
- &i.CreatedAt,
- &i.UpdatedAt,
- &i.StartedAt,
- &i.CanceledAt,
- &i.CompletedAt,
- &i.Error,
- &i.OrganizationID,
- &i.InitiatorID,
- &i.Provisioner,
- &i.StorageMethod,
- &i.Type,
- &i.Input,
- &i.WorkerID,
- &i.FileID,
- &i.Tags,
- &i.ErrorCode,
- &i.TraceMetadata,
- &i.JobStatus,
- ); err != nil {
- return nil, err
- }
- items = append(items, i)
- }
- if err := rows.Close(); err != nil {
- return nil, err
- }
- if err := rows.Err(); err != nil {
- return nil, err
- }
- return items, nil
+func (q *sqlQuerier) GetProvisionerJobByID(ctx context.Context, id uuid.UUID) (ProvisionerJob, error) {
+ row := q.db.QueryRowContext(ctx, getProvisionerJobByID, id)
+ var i ProvisionerJob
+ err := row.Scan(
+ &i.ID,
+ &i.CreatedAt,
+ &i.UpdatedAt,
+ &i.StartedAt,
+ &i.CanceledAt,
+ &i.CompletedAt,
+ &i.Error,
+ &i.OrganizationID,
+ &i.InitiatorID,
+ &i.Provisioner,
+ &i.StorageMethod,
+ &i.Type,
+ &i.Input,
+ &i.WorkerID,
+ &i.FileID,
+ &i.Tags,
+ &i.ErrorCode,
+ &i.TraceMetadata,
+ &i.JobStatus,
+ )
+ return i, err
}
-const getProvisionerJobByID = `-- name: GetProvisionerJobByID :one
+const getProvisionerJobByIDForUpdate = `-- name: GetProvisionerJobByIDForUpdate :one
SELECT
id, created_at, updated_at, started_at, canceled_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, type, input, worker_id, file_id, tags, error_code, trace_metadata, job_status
FROM
provisioner_jobs
WHERE
id = $1
+FOR UPDATE
+SKIP LOCKED
`
-func (q *sqlQuerier) GetProvisionerJobByID(ctx context.Context, id uuid.UUID) (ProvisionerJob, error) {
- row := q.db.QueryRowContext(ctx, getProvisionerJobByID, id)
+// Gets a single provisioner job by ID for update.
+// This is used to securely reap jobs that have been hung/pending for a long time.
+func (q *sqlQuerier) GetProvisionerJobByIDForUpdate(ctx context.Context, id uuid.UUID) (ProvisionerJob, error) {
+ row := q.db.QueryRowContext(ctx, getProvisionerJobByIDForUpdate, id)
var i ProvisionerJob
err := row.Scan(
&i.ID,
@@ -7913,6 +7899,79 @@ func (q *sqlQuerier) GetProvisionerJobsCreatedAfter(ctx context.Context, created
return items, nil
}
+const getProvisionerJobsToBeReaped = `-- name: GetProvisionerJobsToBeReaped :many
+SELECT
+ id, created_at, updated_at, started_at, canceled_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, type, input, worker_id, file_id, tags, error_code, trace_metadata, job_status
+FROM
+ provisioner_jobs
+WHERE
+ (
+ -- If the job has not been started before @pending_since, reap it.
+ updated_at < $1
+ AND started_at IS NULL
+ AND completed_at IS NULL
+ )
+ OR
+ (
+ -- If the job has been started but not completed before @hung_since, reap it.
+ updated_at < $2
+ AND started_at IS NOT NULL
+ AND completed_at IS NULL
+ )
+ORDER BY random()
+LIMIT $3
+`
+
+type GetProvisionerJobsToBeReapedParams struct {
+ PendingSince time.Time `db:"pending_since" json:"pending_since"`
+ HungSince time.Time `db:"hung_since" json:"hung_since"`
+ MaxJobs int32 `db:"max_jobs" json:"max_jobs"`
+}
+
+// To avoid repeatedly attempting to reap the same jobs, we randomly order and limit to @max_jobs.
+func (q *sqlQuerier) GetProvisionerJobsToBeReaped(ctx context.Context, arg GetProvisionerJobsToBeReapedParams) ([]ProvisionerJob, error) {
+ rows, err := q.db.QueryContext(ctx, getProvisionerJobsToBeReaped, arg.PendingSince, arg.HungSince, arg.MaxJobs)
+ if err != nil {
+ return nil, err
+ }
+ defer rows.Close()
+ var items []ProvisionerJob
+ for rows.Next() {
+ var i ProvisionerJob
+ if err := rows.Scan(
+ &i.ID,
+ &i.CreatedAt,
+ &i.UpdatedAt,
+ &i.StartedAt,
+ &i.CanceledAt,
+ &i.CompletedAt,
+ &i.Error,
+ &i.OrganizationID,
+ &i.InitiatorID,
+ &i.Provisioner,
+ &i.StorageMethod,
+ &i.Type,
+ &i.Input,
+ &i.WorkerID,
+ &i.FileID,
+ &i.Tags,
+ &i.ErrorCode,
+ &i.TraceMetadata,
+ &i.JobStatus,
+ ); err != nil {
+ return nil, err
+ }
+ items = append(items, i)
+ }
+ if err := rows.Close(); err != nil {
+ return nil, err
+ }
+ if err := rows.Err(); err != nil {
+ return nil, err
+ }
+ return items, nil
+}
+
const insertProvisionerJob = `-- name: InsertProvisionerJob :one
INSERT INTO
provisioner_jobs (
@@ -8121,6 +8180,40 @@ func (q *sqlQuerier) UpdateProvisionerJobWithCompleteByID(ctx context.Context, a
return err
}
+const updateProvisionerJobWithCompleteWithStartedAtByID = `-- name: UpdateProvisionerJobWithCompleteWithStartedAtByID :exec
+UPDATE
+ provisioner_jobs
+SET
+ updated_at = $2,
+ completed_at = $3,
+ error = $4,
+ error_code = $5,
+ started_at = $6
+WHERE
+ id = $1
+`
+
+type UpdateProvisionerJobWithCompleteWithStartedAtByIDParams struct {
+ ID uuid.UUID `db:"id" json:"id"`
+ UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
+ CompletedAt sql.NullTime `db:"completed_at" json:"completed_at"`
+ Error sql.NullString `db:"error" json:"error"`
+ ErrorCode sql.NullString `db:"error_code" json:"error_code"`
+ StartedAt sql.NullTime `db:"started_at" json:"started_at"`
+}
+
+func (q *sqlQuerier) UpdateProvisionerJobWithCompleteWithStartedAtByID(ctx context.Context, arg UpdateProvisionerJobWithCompleteWithStartedAtByIDParams) error {
+ _, err := q.db.ExecContext(ctx, updateProvisionerJobWithCompleteWithStartedAtByID,
+ arg.ID,
+ arg.UpdatedAt,
+ arg.CompletedAt,
+ arg.Error,
+ arg.ErrorCode,
+ arg.StartedAt,
+ )
+ return err
+}
+
const deleteProvisionerKey = `-- name: DeleteProvisionerKey :exec
DELETE FROM
provisioner_keys
diff --git a/coderd/database/queries/provisionerjobs.sql b/coderd/database/queries/provisionerjobs.sql
index 2ab7774e660b8..88bacc705601c 100644
--- a/coderd/database/queries/provisionerjobs.sql
+++ b/coderd/database/queries/provisionerjobs.sql
@@ -41,6 +41,18 @@ FROM
WHERE
id = $1;
+-- name: GetProvisionerJobByIDForUpdate :one
+-- Gets a single provisioner job by ID for update.
+-- This is used to securely reap jobs that have been hung/pending for a long time.
+SELECT
+ *
+FROM
+ provisioner_jobs
+WHERE
+ id = $1
+FOR UPDATE
+SKIP LOCKED;
+
-- name: GetProvisionerJobsByIDs :many
SELECT
*
@@ -262,15 +274,40 @@ SET
WHERE
id = $1;
--- name: GetHungProvisionerJobs :many
+-- name: UpdateProvisionerJobWithCompleteWithStartedAtByID :exec
+UPDATE
+ provisioner_jobs
+SET
+ updated_at = $2,
+ completed_at = $3,
+ error = $4,
+ error_code = $5,
+ started_at = $6
+WHERE
+ id = $1;
+
+-- name: GetProvisionerJobsToBeReaped :many
SELECT
*
FROM
provisioner_jobs
WHERE
- updated_at < $1
- AND started_at IS NOT NULL
- AND completed_at IS NULL;
+ (
+ -- If the job has not been started before @pending_since, reap it.
+ updated_at < @pending_since
+ AND started_at IS NULL
+ AND completed_at IS NULL
+ )
+ OR
+ (
+ -- If the job has been started but not completed before @hung_since, reap it.
+ updated_at < @hung_since
+ AND started_at IS NOT NULL
+ AND completed_at IS NULL
+ )
+-- To avoid repeatedly attempting to reap the same jobs, we randomly order and limit to @max_jobs.
+ORDER BY random()
+LIMIT @max_jobs;
-- name: InsertProvisionerJobTimings :many
INSERT INTO provisioner_job_timings (job_id, started_at, ended_at, stage, source, action, resource)
diff --git a/coderd/httpmw/loggermw/logger.go b/coderd/httpmw/loggermw/logger.go
index 9eeb07a5f10e5..30e5e2d811ad8 100644
--- a/coderd/httpmw/loggermw/logger.go
+++ b/coderd/httpmw/loggermw/logger.go
@@ -132,7 +132,7 @@ var actorLogOrder = []rbac.SubjectType{
rbac.SubjectTypeAutostart,
rbac.SubjectTypeCryptoKeyReader,
rbac.SubjectTypeCryptoKeyRotator,
- rbac.SubjectTypeHangDetector,
+ rbac.SubjectTypeJobReaper,
rbac.SubjectTypeNotifier,
rbac.SubjectTypePrebuildsOrchestrator,
rbac.SubjectTypeProvisionerd,
diff --git a/coderd/unhanger/detector.go b/coderd/jobreaper/detector.go
similarity index 72%
rename from coderd/unhanger/detector.go
rename to coderd/jobreaper/detector.go
index 14383b1839363..ad5774ee6b95d 100644
--- a/coderd/unhanger/detector.go
+++ b/coderd/jobreaper/detector.go
@@ -1,11 +1,10 @@
-package unhanger
+package jobreaper
import (
"context"
"database/sql"
"encoding/json"
- "fmt"
- "math/rand" //#nosec // this is only used for shuffling an array to pick random jobs to unhang
+ "fmt" //#nosec // this is only used for shuffling an array to pick random jobs to unhang
"time"
"golang.org/x/xerrors"
@@ -21,10 +20,14 @@ import (
)
const (
- // HungJobDuration is the duration of time since the last update to a job
- // before it is considered hung.
+ // HungJobDuration is the duration of time since the last update
+ // to a RUNNING job before it is considered hung.
HungJobDuration = 5 * time.Minute
+ // PendingJobDuration is the duration of time since last update
+ // to a PENDING job before it is considered dead.
+ PendingJobDuration = 30 * time.Minute
+
// HungJobExitTimeout is the duration of time that provisioners should allow
// for a graceful exit upon cancellation due to failing to send an update to
// a job.
@@ -38,16 +41,30 @@ const (
MaxJobsPerRun = 10
)
-// HungJobLogMessages are written to provisioner job logs when a job is hung and
-// terminated.
-var HungJobLogMessages = []string{
- "",
- "====================",
- "Coder: Build has been detected as hung for 5 minutes and will be terminated.",
- "====================",
- "",
+// jobLogMessages are written to provisioner job logs when a job is reaped
+func JobLogMessages(reapType ReapType, threshold time.Duration) []string {
+ return []string{
+ "",
+ "====================",
+ fmt.Sprintf("Coder: Build has been detected as %s for %.0f minutes and will be terminated.", reapType, threshold.Minutes()),
+ "====================",
+ "",
+ }
+}
+
+type jobToReap struct {
+ ID uuid.UUID
+ Threshold time.Duration
+ Type ReapType
}
+type ReapType string
+
+const (
+ Pending ReapType = "pending"
+ Hung ReapType = "hung"
+)
+
// acquireLockError is returned when the detector fails to acquire a lock and
// cancels the current run.
type acquireLockError struct{}
@@ -93,10 +110,10 @@ type Stats struct {
Error error
}
-// New returns a new hang detector.
+// New returns a new job reaper.
func New(ctx context.Context, db database.Store, pub pubsub.Pubsub, log slog.Logger, tick <-chan time.Time) *Detector {
- //nolint:gocritic // Hang detector has a limited set of permissions.
- ctx, cancel := context.WithCancel(dbauthz.AsHangDetector(ctx))
+ //nolint:gocritic // Job reaper has a limited set of permissions.
+ ctx, cancel := context.WithCancel(dbauthz.AsJobReaper(ctx))
d := &Detector{
ctx: ctx,
cancel: cancel,
@@ -172,34 +189,42 @@ func (d *Detector) run(t time.Time) Stats {
Error: nil,
}
- // Find all provisioner jobs that are currently running but have not
- // received an update in the last 5 minutes.
- jobs, err := d.db.GetHungProvisionerJobs(ctx, t.Add(-HungJobDuration))
+ // Find all provisioner jobs to be reaped
+ jobs, err := d.db.GetProvisionerJobsToBeReaped(ctx, database.GetProvisionerJobsToBeReapedParams{
+ PendingSince: t.Add(-PendingJobDuration),
+ HungSince: t.Add(-HungJobDuration),
+ MaxJobs: MaxJobsPerRun,
+ })
if err != nil {
- stats.Error = xerrors.Errorf("get hung provisioner jobs: %w", err)
+ stats.Error = xerrors.Errorf("get provisioner jobs to be reaped: %w", err)
return stats
}
- // Limit the number of jobs we'll unhang in a single run to avoid
- // timing out.
- if len(jobs) > MaxJobsPerRun {
- // Pick a random subset of the jobs to unhang.
- rand.Shuffle(len(jobs), func(i, j int) {
- jobs[i], jobs[j] = jobs[j], jobs[i]
- })
- jobs = jobs[:MaxJobsPerRun]
- }
+ jobsToReap := make([]*jobToReap, 0, len(jobs))
- // Send a message into the build log for each hung job saying that it
- // has been detected and will be terminated, then mark the job as
- // failed.
for _, job := range jobs {
+ j := &jobToReap{
+ ID: job.ID,
+ }
+ if job.JobStatus == database.ProvisionerJobStatusPending {
+ j.Threshold = PendingJobDuration
+ j.Type = Pending
+ } else {
+ j.Threshold = HungJobDuration
+ j.Type = Hung
+ }
+ jobsToReap = append(jobsToReap, j)
+ }
+
+ // Send a message into the build log for each hung or pending job saying that it
+ // has been detected and will be terminated, then mark the job as failed.
+ for _, job := range jobsToReap {
log := d.log.With(slog.F("job_id", job.ID))
- err := unhangJob(ctx, log, d.db, d.pubsub, job.ID)
+ err := reapJob(ctx, log, d.db, d.pubsub, job)
if err != nil {
if !(xerrors.As(err, &acquireLockError{}) || xerrors.As(err, &jobIneligibleError{})) {
- log.Error(ctx, "error forcefully terminating hung provisioner job", slog.Error(err))
+ log.Error(ctx, "error forcefully terminating provisioner job", slog.F("type", job.Type), slog.Error(err))
}
continue
}
@@ -210,47 +235,34 @@ func (d *Detector) run(t time.Time) Stats {
return stats
}
-func unhangJob(ctx context.Context, log slog.Logger, db database.Store, pub pubsub.Pubsub, jobID uuid.UUID) error {
+func reapJob(ctx context.Context, log slog.Logger, db database.Store, pub pubsub.Pubsub, jobToReap *jobToReap) error {
var lowestLogID int64
err := db.InTx(func(db database.Store) error {
- locked, err := db.TryAcquireLock(ctx, database.GenLockID(fmt.Sprintf("hang-detector:%s", jobID)))
- if err != nil {
- return xerrors.Errorf("acquire lock: %w", err)
- }
- if !locked {
- // This error is ignored.
- return acquireLockError{}
- }
-
// Refetch the job while we hold the lock.
- job, err := db.GetProvisionerJobByID(ctx, jobID)
+ job, err := db.GetProvisionerJobByIDForUpdate(ctx, jobToReap.ID)
if err != nil {
+ if xerrors.Is(err, sql.ErrNoRows) {
+ return acquireLockError{}
+ }
return xerrors.Errorf("get provisioner job: %w", err)
}
- // Check if we should still unhang it.
- if !job.StartedAt.Valid {
- // This shouldn't be possible to hit because the query only selects
- // started and not completed jobs, and a job can't be "un-started".
- return jobIneligibleError{
- Err: xerrors.New("job is not started"),
- }
- }
if job.CompletedAt.Valid {
return jobIneligibleError{
Err: xerrors.Errorf("job is completed (status %s)", job.JobStatus),
}
}
- if job.UpdatedAt.After(time.Now().Add(-HungJobDuration)) {
+ if job.UpdatedAt.After(time.Now().Add(-jobToReap.Threshold)) {
return jobIneligibleError{
Err: xerrors.New("job has been updated recently"),
}
}
log.Warn(
- ctx, "detected hung provisioner job, forcefully terminating",
- "threshold", HungJobDuration,
+ ctx, "forcefully terminating provisioner job",
+ "type", jobToReap.Type,
+ "threshold", jobToReap.Threshold,
)
// First, get the latest logs from the build so we can make sure
@@ -260,7 +272,7 @@ func unhangJob(ctx context.Context, log slog.Logger, db database.Store, pub pubs
CreatedAfter: 0,
})
if err != nil {
- return xerrors.Errorf("get logs for hung job: %w", err)
+ return xerrors.Errorf("get logs for %s job: %w", jobToReap.Type, err)
}
logStage := ""
if len(logs) != 0 {
@@ -280,7 +292,7 @@ func unhangJob(ctx context.Context, log slog.Logger, db database.Store, pub pubs
Output: nil,
}
now := dbtime.Now()
- for i, msg := range HungJobLogMessages {
+ for i, msg := range JobLogMessages(jobToReap.Type, jobToReap.Threshold) {
// Set the created at in a way that ensures each message has
// a unique timestamp so they will be sorted correctly.
insertParams.CreatedAt = append(insertParams.CreatedAt, now.Add(time.Millisecond*time.Duration(i)))
@@ -291,13 +303,22 @@ func unhangJob(ctx context.Context, log slog.Logger, db database.Store, pub pubs
}
newLogs, err := db.InsertProvisionerJobLogs(ctx, insertParams)
if err != nil {
- return xerrors.Errorf("insert logs for hung job: %w", err)
+ return xerrors.Errorf("insert logs for %s job: %w", job.JobStatus, err)
}
lowestLogID = newLogs[0].ID
// Mark the job as failed.
now = dbtime.Now()
- err = db.UpdateProvisionerJobWithCompleteByID(ctx, database.UpdateProvisionerJobWithCompleteByIDParams{
+
+ // If the job was never started (pending), set the StartedAt time to the current
+ // time so that the build duration is correct.
+ if job.JobStatus == database.ProvisionerJobStatusPending {
+ job.StartedAt = sql.NullTime{
+ Time: now,
+ Valid: true,
+ }
+ }
+ err = db.UpdateProvisionerJobWithCompleteWithStartedAtByID(ctx, database.UpdateProvisionerJobWithCompleteWithStartedAtByIDParams{
ID: job.ID,
UpdatedAt: now,
CompletedAt: sql.NullTime{
@@ -305,12 +326,13 @@ func unhangJob(ctx context.Context, log slog.Logger, db database.Store, pub pubs
Valid: true,
},
Error: sql.NullString{
- String: "Coder: Build has been detected as hung for 5 minutes and has been terminated by hang detector.",
+ String: fmt.Sprintf("Coder: Build has been detected as %s for %.0f minutes and has been terminated by the reaper.", jobToReap.Type, jobToReap.Threshold.Minutes()),
Valid: true,
},
ErrorCode: sql.NullString{
Valid: false,
},
+ StartedAt: job.StartedAt,
})
if err != nil {
return xerrors.Errorf("mark job as failed: %w", err)
@@ -364,7 +386,7 @@ func unhangJob(ctx context.Context, log slog.Logger, db database.Store, pub pubs
if err != nil {
return xerrors.Errorf("marshal log notification: %w", err)
}
- err = pub.Publish(provisionersdk.ProvisionerJobLogsNotifyChannel(jobID), data)
+ err = pub.Publish(provisionersdk.ProvisionerJobLogsNotifyChannel(jobToReap.ID), data)
if err != nil {
return xerrors.Errorf("publish log notification: %w", err)
}
diff --git a/coderd/unhanger/detector_test.go b/coderd/jobreaper/detector_test.go
similarity index 73%
rename from coderd/unhanger/detector_test.go
rename to coderd/jobreaper/detector_test.go
index 43eb62bfa884b..28457aeeca3a8 100644
--- a/coderd/unhanger/detector_test.go
+++ b/coderd/jobreaper/detector_test.go
@@ -1,4 +1,4 @@
-package unhanger_test
+package jobreaper_test
import (
"context"
@@ -20,9 +20,9 @@ import (
"github.com/coder/coder/v2/coderd/database/dbauthz"
"github.com/coder/coder/v2/coderd/database/dbgen"
"github.com/coder/coder/v2/coderd/database/dbtestutil"
+ "github.com/coder/coder/v2/coderd/jobreaper"
"github.com/coder/coder/v2/coderd/provisionerdserver"
"github.com/coder/coder/v2/coderd/rbac"
- "github.com/coder/coder/v2/coderd/unhanger"
"github.com/coder/coder/v2/provisionersdk"
"github.com/coder/coder/v2/testutil"
)
@@ -39,10 +39,10 @@ func TestDetectorNoJobs(t *testing.T) {
db, pubsub = dbtestutil.NewDB(t)
log = testutil.Logger(t)
tickCh = make(chan time.Time)
- statsCh = make(chan unhanger.Stats)
+ statsCh = make(chan jobreaper.Stats)
)
- detector := unhanger.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh)
+ detector := jobreaper.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh)
detector.Start()
tickCh <- time.Now()
@@ -62,7 +62,7 @@ func TestDetectorNoHungJobs(t *testing.T) {
db, pubsub = dbtestutil.NewDB(t)
log = testutil.Logger(t)
tickCh = make(chan time.Time)
- statsCh = make(chan unhanger.Stats)
+ statsCh = make(chan jobreaper.Stats)
)
// Insert some jobs that are running and haven't been updated in a while,
@@ -89,7 +89,7 @@ func TestDetectorNoHungJobs(t *testing.T) {
})
}
- detector := unhanger.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh)
+ detector := jobreaper.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh)
detector.Start()
tickCh <- now
@@ -109,7 +109,7 @@ func TestDetectorHungWorkspaceBuild(t *testing.T) {
db, pubsub = dbtestutil.NewDB(t)
log = testutil.Logger(t)
tickCh = make(chan time.Time)
- statsCh = make(chan unhanger.Stats)
+ statsCh = make(chan jobreaper.Stats)
)
var (
@@ -195,7 +195,7 @@ func TestDetectorHungWorkspaceBuild(t *testing.T) {
t.Log("previous job ID: ", previousWorkspaceBuildJob.ID)
t.Log("current job ID: ", currentWorkspaceBuildJob.ID)
- detector := unhanger.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh)
+ detector := jobreaper.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh)
detector.Start()
tickCh <- now
@@ -231,7 +231,7 @@ func TestDetectorHungWorkspaceBuildNoOverrideState(t *testing.T) {
db, pubsub = dbtestutil.NewDB(t)
log = testutil.Logger(t)
tickCh = make(chan time.Time)
- statsCh = make(chan unhanger.Stats)
+ statsCh = make(chan jobreaper.Stats)
)
var (
@@ -318,7 +318,7 @@ func TestDetectorHungWorkspaceBuildNoOverrideState(t *testing.T) {
t.Log("previous job ID: ", previousWorkspaceBuildJob.ID)
t.Log("current job ID: ", currentWorkspaceBuildJob.ID)
- detector := unhanger.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh)
+ detector := jobreaper.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh)
detector.Start()
tickCh <- now
@@ -354,7 +354,7 @@ func TestDetectorHungWorkspaceBuildNoOverrideStateIfNoExistingBuild(t *testing.T
db, pubsub = dbtestutil.NewDB(t)
log = testutil.Logger(t)
tickCh = make(chan time.Time)
- statsCh = make(chan unhanger.Stats)
+ statsCh = make(chan jobreaper.Stats)
)
var (
@@ -411,7 +411,7 @@ func TestDetectorHungWorkspaceBuildNoOverrideStateIfNoExistingBuild(t *testing.T
t.Log("current job ID: ", currentWorkspaceBuildJob.ID)
- detector := unhanger.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh)
+ detector := jobreaper.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh)
detector.Start()
tickCh <- now
@@ -439,6 +439,100 @@ func TestDetectorHungWorkspaceBuildNoOverrideStateIfNoExistingBuild(t *testing.T
detector.Wait()
}
+func TestDetectorPendingWorkspaceBuildNoOverrideStateIfNoExistingBuild(t *testing.T) {
+ t.Parallel()
+
+ var (
+ ctx = testutil.Context(t, testutil.WaitLong)
+ db, pubsub = dbtestutil.NewDB(t)
+ log = testutil.Logger(t)
+ tickCh = make(chan time.Time)
+ statsCh = make(chan jobreaper.Stats)
+ )
+
+ var (
+ now = time.Now()
+ thirtyFiveMinAgo = now.Add(-time.Minute * 35)
+ org = dbgen.Organization(t, db, database.Organization{})
+ user = dbgen.User(t, db, database.User{})
+ file = dbgen.File(t, db, database.File{})
+ template = dbgen.Template(t, db, database.Template{
+ OrganizationID: org.ID,
+ CreatedBy: user.ID,
+ })
+ templateVersion = dbgen.TemplateVersion(t, db, database.TemplateVersion{
+ OrganizationID: org.ID,
+ TemplateID: uuid.NullUUID{
+ UUID: template.ID,
+ Valid: true,
+ },
+ CreatedBy: user.ID,
+ })
+ workspace = dbgen.Workspace(t, db, database.WorkspaceTable{
+ OwnerID: user.ID,
+ OrganizationID: org.ID,
+ TemplateID: template.ID,
+ })
+
+ // First build.
+ expectedWorkspaceBuildState = []byte(`{"dean":"cool","colin":"also cool"}`)
+ currentWorkspaceBuildJob = dbgen.ProvisionerJob(t, db, pubsub, database.ProvisionerJob{
+ CreatedAt: thirtyFiveMinAgo,
+ UpdatedAt: thirtyFiveMinAgo,
+ StartedAt: sql.NullTime{
+ Time: time.Time{},
+ Valid: false,
+ },
+ OrganizationID: org.ID,
+ InitiatorID: user.ID,
+ Provisioner: database.ProvisionerTypeEcho,
+ StorageMethod: database.ProvisionerStorageMethodFile,
+ FileID: file.ID,
+ Type: database.ProvisionerJobTypeWorkspaceBuild,
+ Input: []byte("{}"),
+ })
+ currentWorkspaceBuild = dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{
+ WorkspaceID: workspace.ID,
+ TemplateVersionID: templateVersion.ID,
+ BuildNumber: 1,
+ JobID: currentWorkspaceBuildJob.ID,
+ // Should not be overridden.
+ ProvisionerState: expectedWorkspaceBuildState,
+ })
+ )
+
+ t.Log("current job ID: ", currentWorkspaceBuildJob.ID)
+
+ detector := jobreaper.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh)
+ detector.Start()
+ tickCh <- now
+
+ stats := <-statsCh
+ require.NoError(t, stats.Error)
+ require.Len(t, stats.TerminatedJobIDs, 1)
+ require.Equal(t, currentWorkspaceBuildJob.ID, stats.TerminatedJobIDs[0])
+
+ // Check that the current provisioner job was updated.
+ job, err := db.GetProvisionerJobByID(ctx, currentWorkspaceBuildJob.ID)
+ require.NoError(t, err)
+ require.WithinDuration(t, now, job.UpdatedAt, 30*time.Second)
+ require.True(t, job.CompletedAt.Valid)
+ require.WithinDuration(t, now, job.CompletedAt.Time, 30*time.Second)
+ require.True(t, job.StartedAt.Valid)
+ require.WithinDuration(t, now, job.StartedAt.Time, 30*time.Second)
+ require.True(t, job.Error.Valid)
+ require.Contains(t, job.Error.String, "Build has been detected as pending")
+ require.False(t, job.ErrorCode.Valid)
+
+ // Check that the provisioner state was NOT updated.
+ build, err := db.GetWorkspaceBuildByID(ctx, currentWorkspaceBuild.ID)
+ require.NoError(t, err)
+ require.Equal(t, expectedWorkspaceBuildState, build.ProvisionerState)
+
+ detector.Close()
+ detector.Wait()
+}
+
func TestDetectorHungOtherJobTypes(t *testing.T) {
t.Parallel()
@@ -447,7 +541,7 @@ func TestDetectorHungOtherJobTypes(t *testing.T) {
db, pubsub = dbtestutil.NewDB(t)
log = testutil.Logger(t)
tickCh = make(chan time.Time)
- statsCh = make(chan unhanger.Stats)
+ statsCh = make(chan jobreaper.Stats)
)
var (
@@ -509,7 +603,7 @@ func TestDetectorHungOtherJobTypes(t *testing.T) {
t.Log("template import job ID: ", templateImportJob.ID)
t.Log("template dry-run job ID: ", templateDryRunJob.ID)
- detector := unhanger.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh)
+ detector := jobreaper.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh)
detector.Start()
tickCh <- now
@@ -543,6 +637,113 @@ func TestDetectorHungOtherJobTypes(t *testing.T) {
detector.Wait()
}
+func TestDetectorPendingOtherJobTypes(t *testing.T) {
+ t.Parallel()
+
+ var (
+ ctx = testutil.Context(t, testutil.WaitLong)
+ db, pubsub = dbtestutil.NewDB(t)
+ log = testutil.Logger(t)
+ tickCh = make(chan time.Time)
+ statsCh = make(chan jobreaper.Stats)
+ )
+
+ var (
+ now = time.Now()
+ thirtyFiveMinAgo = now.Add(-time.Minute * 35)
+ org = dbgen.Organization(t, db, database.Organization{})
+ user = dbgen.User(t, db, database.User{})
+ file = dbgen.File(t, db, database.File{})
+
+ // Template import job.
+ templateImportJob = dbgen.ProvisionerJob(t, db, pubsub, database.ProvisionerJob{
+ CreatedAt: thirtyFiveMinAgo,
+ UpdatedAt: thirtyFiveMinAgo,
+ StartedAt: sql.NullTime{
+ Time: time.Time{},
+ Valid: false,
+ },
+ OrganizationID: org.ID,
+ InitiatorID: user.ID,
+ Provisioner: database.ProvisionerTypeEcho,
+ StorageMethod: database.ProvisionerStorageMethodFile,
+ FileID: file.ID,
+ Type: database.ProvisionerJobTypeTemplateVersionImport,
+ Input: []byte("{}"),
+ })
+ _ = dbgen.TemplateVersion(t, db, database.TemplateVersion{
+ OrganizationID: org.ID,
+ JobID: templateImportJob.ID,
+ CreatedBy: user.ID,
+ })
+ )
+
+ // Template dry-run job.
+ dryRunVersion := dbgen.TemplateVersion(t, db, database.TemplateVersion{
+ OrganizationID: org.ID,
+ CreatedBy: user.ID,
+ })
+ input, err := json.Marshal(provisionerdserver.TemplateVersionDryRunJob{
+ TemplateVersionID: dryRunVersion.ID,
+ })
+ require.NoError(t, err)
+ templateDryRunJob := dbgen.ProvisionerJob(t, db, pubsub, database.ProvisionerJob{
+ CreatedAt: thirtyFiveMinAgo,
+ UpdatedAt: thirtyFiveMinAgo,
+ StartedAt: sql.NullTime{
+ Time: time.Time{},
+ Valid: false,
+ },
+ OrganizationID: org.ID,
+ InitiatorID: user.ID,
+ Provisioner: database.ProvisionerTypeEcho,
+ StorageMethod: database.ProvisionerStorageMethodFile,
+ FileID: file.ID,
+ Type: database.ProvisionerJobTypeTemplateVersionDryRun,
+ Input: input,
+ })
+
+ t.Log("template import job ID: ", templateImportJob.ID)
+ t.Log("template dry-run job ID: ", templateDryRunJob.ID)
+
+ detector := jobreaper.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh)
+ detector.Start()
+ tickCh <- now
+
+ stats := <-statsCh
+ require.NoError(t, stats.Error)
+ require.Len(t, stats.TerminatedJobIDs, 2)
+ require.Contains(t, stats.TerminatedJobIDs, templateImportJob.ID)
+ require.Contains(t, stats.TerminatedJobIDs, templateDryRunJob.ID)
+
+ // Check that the template import job was updated.
+ job, err := db.GetProvisionerJobByID(ctx, templateImportJob.ID)
+ require.NoError(t, err)
+ require.WithinDuration(t, now, job.UpdatedAt, 30*time.Second)
+ require.True(t, job.CompletedAt.Valid)
+ require.WithinDuration(t, now, job.CompletedAt.Time, 30*time.Second)
+ require.True(t, job.StartedAt.Valid)
+ require.WithinDuration(t, now, job.StartedAt.Time, 30*time.Second)
+ require.True(t, job.Error.Valid)
+ require.Contains(t, job.Error.String, "Build has been detected as pending")
+ require.False(t, job.ErrorCode.Valid)
+
+ // Check that the template dry-run job was updated.
+ job, err = db.GetProvisionerJobByID(ctx, templateDryRunJob.ID)
+ require.NoError(t, err)
+ require.WithinDuration(t, now, job.UpdatedAt, 30*time.Second)
+ require.True(t, job.CompletedAt.Valid)
+ require.WithinDuration(t, now, job.CompletedAt.Time, 30*time.Second)
+ require.True(t, job.StartedAt.Valid)
+ require.WithinDuration(t, now, job.StartedAt.Time, 30*time.Second)
+ require.True(t, job.Error.Valid)
+ require.Contains(t, job.Error.String, "Build has been detected as pending")
+ require.False(t, job.ErrorCode.Valid)
+
+ detector.Close()
+ detector.Wait()
+}
+
func TestDetectorHungCanceledJob(t *testing.T) {
t.Parallel()
@@ -551,7 +752,7 @@ func TestDetectorHungCanceledJob(t *testing.T) {
db, pubsub = dbtestutil.NewDB(t)
log = testutil.Logger(t)
tickCh = make(chan time.Time)
- statsCh = make(chan unhanger.Stats)
+ statsCh = make(chan jobreaper.Stats)
)
var (
@@ -591,7 +792,7 @@ func TestDetectorHungCanceledJob(t *testing.T) {
t.Log("template import job ID: ", templateImportJob.ID)
- detector := unhanger.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh)
+ detector := jobreaper.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh)
detector.Start()
tickCh <- now
@@ -653,7 +854,7 @@ func TestDetectorPushesLogs(t *testing.T) {
db, pubsub = dbtestutil.NewDB(t)
log = testutil.Logger(t)
tickCh = make(chan time.Time)
- statsCh = make(chan unhanger.Stats)
+ statsCh = make(chan jobreaper.Stats)
)
var (
@@ -706,7 +907,7 @@ func TestDetectorPushesLogs(t *testing.T) {
require.Len(t, logs, 10)
}
- detector := unhanger.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh)
+ detector := jobreaper.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh)
detector.Start()
// Create pubsub subscription to listen for new log events.
@@ -741,12 +942,19 @@ func TestDetectorPushesLogs(t *testing.T) {
CreatedAfter: after,
})
require.NoError(t, err)
- require.Len(t, logs, len(unhanger.HungJobLogMessages))
+ threshold := jobreaper.HungJobDuration
+ jobType := jobreaper.Hung
+ if templateImportJob.JobStatus == database.ProvisionerJobStatusPending {
+ threshold = jobreaper.PendingJobDuration
+ jobType = jobreaper.Pending
+ }
+ expectedLogs := jobreaper.JobLogMessages(jobType, threshold)
+ require.Len(t, logs, len(expectedLogs))
for i, log := range logs {
assert.Equal(t, database.LogLevelError, log.Level)
assert.Equal(t, c.expectStage, log.Stage)
assert.Equal(t, database.LogSourceProvisionerDaemon, log.Source)
- assert.Equal(t, unhanger.HungJobLogMessages[i], log.Output)
+ assert.Equal(t, expectedLogs[i], log.Output)
}
// Double check the full log count.
@@ -755,7 +963,7 @@ func TestDetectorPushesLogs(t *testing.T) {
CreatedAfter: 0,
})
require.NoError(t, err)
- require.Len(t, logs, c.preLogCount+len(unhanger.HungJobLogMessages))
+ require.Len(t, logs, c.preLogCount+len(expectedLogs))
detector.Close()
detector.Wait()
@@ -771,15 +979,15 @@ func TestDetectorMaxJobsPerRun(t *testing.T) {
db, pubsub = dbtestutil.NewDB(t)
log = testutil.Logger(t)
tickCh = make(chan time.Time)
- statsCh = make(chan unhanger.Stats)
+ statsCh = make(chan jobreaper.Stats)
org = dbgen.Organization(t, db, database.Organization{})
user = dbgen.User(t, db, database.User{})
file = dbgen.File(t, db, database.File{})
)
- // Create unhanger.MaxJobsPerRun + 1 hung jobs.
+ // Create MaxJobsPerRun + 1 hung jobs.
now := time.Now()
- for i := 0; i < unhanger.MaxJobsPerRun+1; i++ {
+ for i := 0; i < jobreaper.MaxJobsPerRun+1; i++ {
pj := dbgen.ProvisionerJob(t, db, pubsub, database.ProvisionerJob{
CreatedAt: now.Add(-time.Hour),
UpdatedAt: now.Add(-time.Hour),
@@ -802,14 +1010,14 @@ func TestDetectorMaxJobsPerRun(t *testing.T) {
})
}
- detector := unhanger.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh)
+ detector := jobreaper.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh)
detector.Start()
tickCh <- now
- // Make sure that only unhanger.MaxJobsPerRun jobs are terminated.
+ // Make sure that only MaxJobsPerRun jobs are terminated.
stats := <-statsCh
require.NoError(t, stats.Error)
- require.Len(t, stats.TerminatedJobIDs, unhanger.MaxJobsPerRun)
+ require.Len(t, stats.TerminatedJobIDs, jobreaper.MaxJobsPerRun)
// Run the detector again and make sure that only the remaining job is
// terminated.
@@ -823,7 +1031,7 @@ func TestDetectorMaxJobsPerRun(t *testing.T) {
}
// wrapDBAuthz adds our Authorization/RBAC around the given database store, to
-// ensure the unhanger has the right permissions to do its work.
+// ensure the reaper has the right permissions to do its work.
func wrapDBAuthz(db database.Store, logger slog.Logger) database.Store {
return dbauthz.New(
db,
diff --git a/coderd/rbac/authz.go b/coderd/rbac/authz.go
index d2c6d5d0675be..c63042a2a1363 100644
--- a/coderd/rbac/authz.go
+++ b/coderd/rbac/authz.go
@@ -65,7 +65,7 @@ const (
SubjectTypeUser SubjectType = "user"
SubjectTypeProvisionerd SubjectType = "provisionerd"
SubjectTypeAutostart SubjectType = "autostart"
- SubjectTypeHangDetector SubjectType = "hang_detector"
+ SubjectTypeJobReaper SubjectType = "job_reaper"
SubjectTypeResourceMonitor SubjectType = "resource_monitor"
SubjectTypeCryptoKeyRotator SubjectType = "crypto_key_rotator"
SubjectTypeCryptoKeyReader SubjectType = "crypto_key_reader"
diff --git a/coderd/rbac/object_gen.go b/coderd/rbac/object_gen.go
index 40b7dc87a56f8..ad1a510fd44bd 100644
--- a/coderd/rbac/object_gen.go
+++ b/coderd/rbac/object_gen.go
@@ -234,7 +234,9 @@ var (
// ResourceProvisionerJobs
// Valid Actions
+ // - "ActionCreate" :: create provisioner jobs
// - "ActionRead" :: read provisioner jobs
+ // - "ActionUpdate" :: update provisioner jobs
ResourceProvisionerJobs = Object{
Type: "provisioner_jobs",
}
diff --git a/coderd/rbac/policy/policy.go b/coderd/rbac/policy/policy.go
index 35da0892abfdb..c37e84c48f964 100644
--- a/coderd/rbac/policy/policy.go
+++ b/coderd/rbac/policy/policy.go
@@ -182,7 +182,9 @@ var RBACPermissions = map[string]PermissionDefinition{
},
"provisioner_jobs": {
Actions: map[Action]ActionDefinition{
- ActionRead: actDef("read provisioner jobs"),
+ ActionRead: actDef("read provisioner jobs"),
+ ActionUpdate: actDef("update provisioner jobs"),
+ ActionCreate: actDef("create provisioner jobs"),
},
},
"organization": {
diff --git a/coderd/rbac/roles.go b/coderd/rbac/roles.go
index 56124faee44e2..0b94a74201b16 100644
--- a/coderd/rbac/roles.go
+++ b/coderd/rbac/roles.go
@@ -503,7 +503,7 @@ func ReloadBuiltinRoles(opts *RoleOptions) {
// the ability to create templates and provisioners has
// a lot of overlap.
ResourceProvisionerDaemon.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete},
- ResourceProvisionerJobs.Type: {policy.ActionRead},
+ ResourceProvisionerJobs.Type: {policy.ActionRead, policy.ActionUpdate, policy.ActionCreate},
}),
},
User: []Permission{},
diff --git a/coderd/rbac/roles_test.go b/coderd/rbac/roles_test.go
index e90c89914fdec..6d42a01474d1a 100644
--- a/coderd/rbac/roles_test.go
+++ b/coderd/rbac/roles_test.go
@@ -580,7 +580,7 @@ func TestRolePermissions(t *testing.T) {
},
{
Name: "ProvisionerJobs",
- Actions: []policy.Action{policy.ActionRead},
+ Actions: []policy.Action{policy.ActionRead, policy.ActionUpdate, policy.ActionCreate},
Resource: rbac.ResourceProvisionerJobs.InOrg(orgID),
AuthorizeMap: map[bool][]hasAuthSubjects{
true: {owner, orgTemplateAdmin, orgAdmin},
diff --git a/codersdk/deployment.go b/codersdk/deployment.go
index 0741bf9e3844a..39b67feb2c73a 100644
--- a/codersdk/deployment.go
+++ b/codersdk/deployment.go
@@ -345,7 +345,7 @@ type DeploymentValues struct {
// HTTPAddress is a string because it may be set to zero to disable.
HTTPAddress serpent.String `json:"http_address,omitempty" typescript:",notnull"`
AutobuildPollInterval serpent.Duration `json:"autobuild_poll_interval,omitempty"`
- JobHangDetectorInterval serpent.Duration `json:"job_hang_detector_interval,omitempty"`
+ JobReaperDetectorInterval serpent.Duration `json:"job_hang_detector_interval,omitempty"`
DERP DERP `json:"derp,omitempty" typescript:",notnull"`
Prometheus PrometheusConfig `json:"prometheus,omitempty" typescript:",notnull"`
Pprof PprofConfig `json:"pprof,omitempty" typescript:",notnull"`
@@ -1287,13 +1287,13 @@ func (c *DeploymentValues) Options() serpent.OptionSet {
Annotations: serpent.Annotations{}.Mark(annotationFormatDuration, "true"),
},
{
- Name: "Job Hang Detector Interval",
- Description: "Interval to poll for hung jobs and automatically terminate them.",
+ Name: "Job Reaper Detect Interval",
+ Description: "Interval to poll for hung and pending jobs and automatically terminate them.",
Flag: "job-hang-detector-interval",
Env: "CODER_JOB_HANG_DETECTOR_INTERVAL",
Hidden: true,
Default: time.Minute.String(),
- Value: &c.JobHangDetectorInterval,
+ Value: &c.JobReaperDetectorInterval,
YAML: "jobHangDetectorInterval",
Annotations: serpent.Annotations{}.Mark(annotationFormatDuration, "true"),
},
diff --git a/codersdk/rbacresources_gen.go b/codersdk/rbacresources_gen.go
index 54f65767928d6..6157281f21356 100644
--- a/codersdk/rbacresources_gen.go
+++ b/codersdk/rbacresources_gen.go
@@ -90,7 +90,7 @@ var RBACResourceActions = map[RBACResource][]RBACAction{
ResourceOrganization: {ActionCreate, ActionDelete, ActionRead, ActionUpdate},
ResourceOrganizationMember: {ActionCreate, ActionDelete, ActionRead, ActionUpdate},
ResourceProvisionerDaemon: {ActionCreate, ActionDelete, ActionRead, ActionUpdate},
- ResourceProvisionerJobs: {ActionRead},
+ ResourceProvisionerJobs: {ActionCreate, ActionRead, ActionUpdate},
ResourceReplicas: {ActionRead},
ResourceSystem: {ActionCreate, ActionDelete, ActionRead, ActionUpdate},
ResourceTailnetCoordinator: {ActionCreate, ActionDelete, ActionRead, ActionUpdate},
diff --git a/provisioner/terraform/serve.go b/provisioner/terraform/serve.go
index 562946d8ef92e..3e671b0c68e56 100644
--- a/provisioner/terraform/serve.go
+++ b/provisioner/terraform/serve.go
@@ -16,7 +16,7 @@ import (
"cdr.dev/slog"
"github.com/coder/coder/v2/coderd/database"
- "github.com/coder/coder/v2/coderd/unhanger"
+ "github.com/coder/coder/v2/coderd/jobreaper"
"github.com/coder/coder/v2/provisionersdk"
)
@@ -39,9 +39,9 @@ type ServeOptions struct {
//
// This is a no-op on Windows where the process can't be interrupted.
//
- // Default value: 3 minutes (unhanger.HungJobExitTimeout). This value should
+ // Default value: 3 minutes (jobreaper.HungJobExitTimeout). This value should
// be kept less than the value that Coder uses to mark hung jobs as failed,
- // which is 5 minutes (see unhanger package).
+ // which is 5 minutes (see jobreaper package).
ExitTimeout time.Duration
}
@@ -131,7 +131,7 @@ func Serve(ctx context.Context, options *ServeOptions) error {
options.Tracer = trace.NewNoopTracerProvider().Tracer("noop")
}
if options.ExitTimeout == 0 {
- options.ExitTimeout = unhanger.HungJobExitTimeout
+ options.ExitTimeout = jobreaper.HungJobExitTimeout
}
return provisionersdk.Serve(ctx, &server{
execMut: &sync.Mutex{},
diff --git a/site/src/api/rbacresourcesGenerated.ts b/site/src/api/rbacresourcesGenerated.ts
index 079dcb4a87a61..3acb86c079908 100644
--- a/site/src/api/rbacresourcesGenerated.ts
+++ b/site/src/api/rbacresourcesGenerated.ts
@@ -130,7 +130,9 @@ export const RBACResourceActions: Partial<
update: "update a provisioner daemon",
},
provisioner_jobs: {
+ create: "create provisioner jobs",
read: "read provisioner jobs",
+ update: "update provisioner jobs",
},
replicas: {
read: "read replicas",
From 1267c9c4056810adaad72d86ecc25e1e0201caa0 Mon Sep 17 00:00:00 2001
From: Danielle Maywood
Date: Tue, 20 May 2025 16:01:57 +0100
Subject: [PATCH 20/44] fix: ensure reason present for workspace autoupdated
notification (#17935)
Fixes https://github.com/coder/coder/issues/17930
Update the `WorkspaceAutoUpdated` notification to only display the
reason if it is present.
---
coderd/autobuild/lifecycle_executor.go | 7 ++++++-
1 file changed, 6 insertions(+), 1 deletion(-)
diff --git a/coderd/autobuild/lifecycle_executor.go b/coderd/autobuild/lifecycle_executor.go
index cc4e48b43544c..eedcc812bb19c 100644
--- a/coderd/autobuild/lifecycle_executor.go
+++ b/coderd/autobuild/lifecycle_executor.go
@@ -349,13 +349,18 @@ func (e *Executor) runOnce(t time.Time) Stats {
nextBuildReason = string(nextBuild.Reason)
}
+ templateVersionMessage := activeTemplateVersion.Message
+ if templateVersionMessage == "" {
+ templateVersionMessage = "None provided"
+ }
+
if _, err := e.notificationsEnqueuer.Enqueue(e.ctx, ws.OwnerID, notifications.TemplateWorkspaceAutoUpdated,
map[string]string{
"name": ws.Name,
"initiator": "autobuild",
"reason": nextBuildReason,
"template_version_name": activeTemplateVersion.Name,
- "template_version_message": activeTemplateVersion.Message,
+ "template_version_message": templateVersionMessage,
}, "autobuild",
// Associate this notification with all the related entities.
ws.ID, ws.OwnerID, ws.TemplateID, ws.OrganizationID,
From 93f17bc73e71d9eb23543bdd4c2ada22ff35a2c8 Mon Sep 17 00:00:00 2001
From: Thomas Kosiewski
Date: Tue, 20 May 2025 17:07:50 +0200
Subject: [PATCH 21/44] fix: remove unnecessary user lookup in agent API calls
(#17934)
# Use workspace.OwnerUsername instead of fetching the owner
This PR optimizes the agent API by using the `workspace.OwnerUsername` field directly instead of making an additional database query to fetch the owner's username. The change removes the need to call `GetUserByID` in the manifest API and workspace agent RPC endpoints.
An issue arose when the agent token was scoped without access to user data (`api_key_scope = "no_user_data"`), causing the agent to fail to fetch the manifest due to an RBAC issue.
Change-Id: I3b6e7581134e2374b364ee059e3b18ece3d98b41
Signed-off-by: Thomas Kosiewski
---
coderd/agentapi/manifest.go | 11 +-
coderd/agentapi/manifest_test.go | 10 +-
coderd/workspaceagents_test.go | 64 ++++++---
coderd/workspaceagentsrpc.go | 13 +-
coderd/workspaceagentsrpc_test.go | 212 +++++++++++++++++++-----------
flake.nix | 1 +
6 files changed, 194 insertions(+), 117 deletions(-)
diff --git a/coderd/agentapi/manifest.go b/coderd/agentapi/manifest.go
index 66bfe4cb5f94f..855ff4b8acd37 100644
--- a/coderd/agentapi/manifest.go
+++ b/coderd/agentapi/manifest.go
@@ -47,7 +47,6 @@ func (a *ManifestAPI) GetManifest(ctx context.Context, _ *agentproto.GetManifest
scripts []database.WorkspaceAgentScript
metadata []database.WorkspaceAgentMetadatum
workspace database.Workspace
- owner database.User
devcontainers []database.WorkspaceAgentDevcontainer
)
@@ -76,10 +75,6 @@ func (a *ManifestAPI) GetManifest(ctx context.Context, _ *agentproto.GetManifest
if err != nil {
return xerrors.Errorf("getting workspace by id: %w", err)
}
- owner, err = a.Database.GetUserByID(ctx, workspace.OwnerID)
- if err != nil {
- return xerrors.Errorf("getting workspace owner by id: %w", err)
- }
return err
})
eg.Go(func() (err error) {
@@ -98,7 +93,7 @@ func (a *ManifestAPI) GetManifest(ctx context.Context, _ *agentproto.GetManifest
AppSlugOrPort: "{{port}}",
AgentName: workspaceAgent.Name,
WorkspaceName: workspace.Name,
- Username: owner.Username,
+ Username: workspace.OwnerUsername,
}
vscodeProxyURI := vscodeProxyURI(appSlug, a.AccessURL, a.AppHostname)
@@ -115,7 +110,7 @@ func (a *ManifestAPI) GetManifest(ctx context.Context, _ *agentproto.GetManifest
}
}
- apps, err := dbAppsToProto(dbApps, workspaceAgent, owner.Username, workspace)
+ apps, err := dbAppsToProto(dbApps, workspaceAgent, workspace.OwnerUsername, workspace)
if err != nil {
return nil, xerrors.Errorf("converting workspace apps: %w", err)
}
@@ -128,7 +123,7 @@ func (a *ManifestAPI) GetManifest(ctx context.Context, _ *agentproto.GetManifest
return &agentproto.Manifest{
AgentId: workspaceAgent.ID[:],
AgentName: workspaceAgent.Name,
- OwnerUsername: owner.Username,
+ OwnerUsername: workspace.OwnerUsername,
WorkspaceId: workspace.ID[:],
WorkspaceName: workspace.Name,
GitAuthConfigs: gitAuthConfigs,
diff --git a/coderd/agentapi/manifest_test.go b/coderd/agentapi/manifest_test.go
index 9273acb0c40ff..fc46f5fe480f8 100644
--- a/coderd/agentapi/manifest_test.go
+++ b/coderd/agentapi/manifest_test.go
@@ -46,9 +46,10 @@ func TestGetManifest(t *testing.T) {
Username: "cool-user",
}
workspace = database.Workspace{
- ID: uuid.New(),
- OwnerID: owner.ID,
- Name: "cool-workspace",
+ ID: uuid.New(),
+ OwnerID: owner.ID,
+ OwnerUsername: owner.Username,
+ Name: "cool-workspace",
}
agent = database.WorkspaceAgent{
ID: uuid.New(),
@@ -336,7 +337,6 @@ func TestGetManifest(t *testing.T) {
}).Return(metadata, nil)
mDB.EXPECT().GetWorkspaceAgentDevcontainersByAgentID(gomock.Any(), agent.ID).Return(devcontainers, nil)
mDB.EXPECT().GetWorkspaceByID(gomock.Any(), workspace.ID).Return(workspace, nil)
- mDB.EXPECT().GetUserByID(gomock.Any(), workspace.OwnerID).Return(owner, nil)
got, err := api.GetManifest(context.Background(), &agentproto.GetManifestRequest{})
require.NoError(t, err)
@@ -404,7 +404,6 @@ func TestGetManifest(t *testing.T) {
}).Return([]database.WorkspaceAgentMetadatum{}, nil)
mDB.EXPECT().GetWorkspaceAgentDevcontainersByAgentID(gomock.Any(), childAgent.ID).Return([]database.WorkspaceAgentDevcontainer{}, nil)
mDB.EXPECT().GetWorkspaceByID(gomock.Any(), workspace.ID).Return(workspace, nil)
- mDB.EXPECT().GetUserByID(gomock.Any(), workspace.OwnerID).Return(owner, nil)
got, err := api.GetManifest(context.Background(), &agentproto.GetManifestRequest{})
require.NoError(t, err)
@@ -468,7 +467,6 @@ func TestGetManifest(t *testing.T) {
}).Return(metadata, nil)
mDB.EXPECT().GetWorkspaceAgentDevcontainersByAgentID(gomock.Any(), agent.ID).Return(devcontainers, nil)
mDB.EXPECT().GetWorkspaceByID(gomock.Any(), workspace.ID).Return(workspace, nil)
- mDB.EXPECT().GetUserByID(gomock.Any(), workspace.OwnerID).Return(owner, nil)
got, err := api.GetManifest(context.Background(), &agentproto.GetManifestRequest{})
require.NoError(t, err)
diff --git a/coderd/workspaceagents_test.go b/coderd/workspaceagents_test.go
index 27da80b3c579b..f4f3dcdec9f89 100644
--- a/coderd/workspaceagents_test.go
+++ b/coderd/workspaceagents_test.go
@@ -439,25 +439,55 @@ func TestWorkspaceAgentConnectRPC(t *testing.T) {
t.Run("Connect", func(t *testing.T) {
t.Parallel()
- client, db := coderdtest.NewWithDatabase(t, nil)
- user := coderdtest.CreateFirstUser(t, client)
- r := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{
- OrganizationID: user.OrganizationID,
- OwnerID: user.UserID,
- }).WithAgent().Do()
- _ = agenttest.New(t, client.URL, r.AgentToken)
- resources := coderdtest.AwaitWorkspaceAgents(t, client, r.Workspace.ID)
+ for _, tc := range []struct {
+ name string
+ apiKeyScope rbac.ScopeName
+ }{
+ {
+ name: "empty (backwards compat)",
+ apiKeyScope: "",
+ },
+ {
+ name: "all",
+ apiKeyScope: rbac.ScopeAll,
+ },
+ {
+ name: "no_user_data",
+ apiKeyScope: rbac.ScopeNoUserData,
+ },
+ {
+ name: "application_connect",
+ apiKeyScope: rbac.ScopeApplicationConnect,
+ },
+ } {
+ t.Run(tc.name, func(t *testing.T) {
+ client, db := coderdtest.NewWithDatabase(t, nil)
+ user := coderdtest.CreateFirstUser(t, client)
+ r := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{
+ OrganizationID: user.OrganizationID,
+ OwnerID: user.UserID,
+ }).WithAgent(func(agents []*proto.Agent) []*proto.Agent {
+ for _, agent := range agents {
+ agent.ApiKeyScope = string(tc.apiKeyScope)
+ }
- ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
- defer cancel()
+ return agents
+ }).Do()
+ _ = agenttest.New(t, client.URL, r.AgentToken)
+ resources := coderdtest.NewWorkspaceAgentWaiter(t, client, r.Workspace.ID).AgentNames([]string{}).Wait()
- conn, err := workspacesdk.New(client).
- DialAgent(ctx, resources[0].Agents[0].ID, nil)
- require.NoError(t, err)
- defer func() {
- _ = conn.Close()
- }()
- conn.AwaitReachable(ctx)
+ ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
+ defer cancel()
+
+ conn, err := workspacesdk.New(client).
+ DialAgent(ctx, resources[0].Agents[0].ID, nil)
+ require.NoError(t, err)
+ defer func() {
+ _ = conn.Close()
+ }()
+ conn.AwaitReachable(ctx)
+ })
+ }
})
t.Run("FailNonLatestBuild", func(t *testing.T) {
diff --git a/coderd/workspaceagentsrpc.go b/coderd/workspaceagentsrpc.go
index 43da35410f632..2dcf65bd8c7d5 100644
--- a/coderd/workspaceagentsrpc.go
+++ b/coderd/workspaceagentsrpc.go
@@ -76,17 +76,8 @@ func (api *API) workspaceAgentRPC(rw http.ResponseWriter, r *http.Request) {
return
}
- owner, err := api.Database.GetUserByID(ctx, workspace.OwnerID)
- if err != nil {
- httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
- Message: "Internal error fetching user.",
- Detail: err.Error(),
- })
- return
- }
-
logger = logger.With(
- slog.F("owner", owner.Username),
+ slog.F("owner", workspace.OwnerUsername),
slog.F("workspace_name", workspace.Name),
slog.F("agent_name", workspaceAgent.Name),
)
@@ -170,7 +161,7 @@ func (api *API) workspaceAgentRPC(rw http.ResponseWriter, r *http.Request) {
})
streamID := tailnet.StreamID{
- Name: fmt.Sprintf("%s-%s-%s", owner.Username, workspace.Name, workspaceAgent.Name),
+ Name: fmt.Sprintf("%s-%s-%s", workspace.OwnerUsername, workspace.Name, workspaceAgent.Name),
ID: workspaceAgent.ID,
Auth: tailnet.AgentCoordinateeAuth{ID: workspaceAgent.ID},
}
diff --git a/coderd/workspaceagentsrpc_test.go b/coderd/workspaceagentsrpc_test.go
index caea9b39c2f54..5175f80b0b723 100644
--- a/coderd/workspaceagentsrpc_test.go
+++ b/coderd/workspaceagentsrpc_test.go
@@ -13,6 +13,7 @@ import (
"github.com/coder/coder/v2/coderd/database"
"github.com/coder/coder/v2/coderd/database/dbfake"
"github.com/coder/coder/v2/coderd/database/dbtime"
+ "github.com/coder/coder/v2/coderd/rbac"
"github.com/coder/coder/v2/codersdk/agentsdk"
"github.com/coder/coder/v2/provisionersdk/proto"
"github.com/coder/coder/v2/testutil"
@@ -22,89 +23,150 @@ import (
func TestWorkspaceAgentReportStats(t *testing.T) {
t.Parallel()
- tickCh := make(chan time.Time)
- flushCh := make(chan int, 1)
- client, db := coderdtest.NewWithDatabase(t, &coderdtest.Options{
- WorkspaceUsageTrackerFlush: flushCh,
- WorkspaceUsageTrackerTick: tickCh,
- })
- user := coderdtest.CreateFirstUser(t, client)
- r := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{
- OrganizationID: user.OrganizationID,
- OwnerID: user.UserID,
- LastUsedAt: dbtime.Now().Add(-time.Minute),
- }).WithAgent().Do()
+ for _, tc := range []struct {
+ name string
+ apiKeyScope rbac.ScopeName
+ }{
+ {
+ name: "empty (backwards compat)",
+ apiKeyScope: "",
+ },
+ {
+ name: "all",
+ apiKeyScope: rbac.ScopeAll,
+ },
+ {
+ name: "no_user_data",
+ apiKeyScope: rbac.ScopeNoUserData,
+ },
+ {
+ name: "application_connect",
+ apiKeyScope: rbac.ScopeApplicationConnect,
+ },
+ } {
+ t.Run(tc.name, func(t *testing.T) {
+ t.Parallel()
- ac := agentsdk.New(client.URL)
- ac.SetSessionToken(r.AgentToken)
- conn, err := ac.ConnectRPC(context.Background())
- require.NoError(t, err)
- defer func() {
- _ = conn.Close()
- }()
- agentAPI := agentproto.NewDRPCAgentClient(conn)
+ tickCh := make(chan time.Time)
+ flushCh := make(chan int, 1)
+ client, db := coderdtest.NewWithDatabase(t, &coderdtest.Options{
+ WorkspaceUsageTrackerFlush: flushCh,
+ WorkspaceUsageTrackerTick: tickCh,
+ })
+ user := coderdtest.CreateFirstUser(t, client)
+ r := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{
+ OrganizationID: user.OrganizationID,
+ OwnerID: user.UserID,
+ LastUsedAt: dbtime.Now().Add(-time.Minute),
+ }).WithAgent(
+ func(agent []*proto.Agent) []*proto.Agent {
+ for _, a := range agent {
+ a.ApiKeyScope = string(tc.apiKeyScope)
+ }
- _, err = agentAPI.UpdateStats(context.Background(), &agentproto.UpdateStatsRequest{
- Stats: &agentproto.Stats{
- ConnectionsByProto: map[string]int64{"TCP": 1},
- ConnectionCount: 1,
- RxPackets: 1,
- RxBytes: 1,
- TxPackets: 1,
- TxBytes: 1,
- SessionCountVscode: 1,
- SessionCountJetbrains: 0,
- SessionCountReconnectingPty: 0,
- SessionCountSsh: 0,
- ConnectionMedianLatencyMs: 10,
- },
- })
- require.NoError(t, err)
+ return agent
+ },
+ ).Do()
+
+ ac := agentsdk.New(client.URL)
+ ac.SetSessionToken(r.AgentToken)
+ conn, err := ac.ConnectRPC(context.Background())
+ require.NoError(t, err)
+ defer func() {
+ _ = conn.Close()
+ }()
+ agentAPI := agentproto.NewDRPCAgentClient(conn)
+
+ _, err = agentAPI.UpdateStats(context.Background(), &agentproto.UpdateStatsRequest{
+ Stats: &agentproto.Stats{
+ ConnectionsByProto: map[string]int64{"TCP": 1},
+ ConnectionCount: 1,
+ RxPackets: 1,
+ RxBytes: 1,
+ TxPackets: 1,
+ TxBytes: 1,
+ SessionCountVscode: 1,
+ SessionCountJetbrains: 0,
+ SessionCountReconnectingPty: 0,
+ SessionCountSsh: 0,
+ ConnectionMedianLatencyMs: 10,
+ },
+ })
+ require.NoError(t, err)
- tickCh <- dbtime.Now()
- count := <-flushCh
- require.Equal(t, 1, count, "expected one flush with one id")
+ tickCh <- dbtime.Now()
+ count := <-flushCh
+ require.Equal(t, 1, count, "expected one flush with one id")
- newWorkspace, err := client.Workspace(context.Background(), r.Workspace.ID)
- require.NoError(t, err)
+ newWorkspace, err := client.Workspace(context.Background(), r.Workspace.ID)
+ require.NoError(t, err)
- assert.True(t,
- newWorkspace.LastUsedAt.After(r.Workspace.LastUsedAt),
- "%s is not after %s", newWorkspace.LastUsedAt, r.Workspace.LastUsedAt,
- )
+ assert.True(t,
+ newWorkspace.LastUsedAt.After(r.Workspace.LastUsedAt),
+ "%s is not after %s", newWorkspace.LastUsedAt, r.Workspace.LastUsedAt,
+ )
+ })
+ }
}
func TestAgentAPI_LargeManifest(t *testing.T) {
t.Parallel()
- ctx := testutil.Context(t, testutil.WaitLong)
- client, store := coderdtest.NewWithDatabase(t, nil)
- adminUser := coderdtest.CreateFirstUser(t, client)
- n := 512000
- longScript := make([]byte, n)
- for i := range longScript {
- longScript[i] = 'q'
+
+ for _, tc := range []struct {
+ name string
+ apiKeyScope rbac.ScopeName
+ }{
+ {
+ name: "empty (backwards compat)",
+ apiKeyScope: "",
+ },
+ {
+ name: "all",
+ apiKeyScope: rbac.ScopeAll,
+ },
+ {
+ name: "no_user_data",
+ apiKeyScope: rbac.ScopeNoUserData,
+ },
+ {
+ name: "application_connect",
+ apiKeyScope: rbac.ScopeApplicationConnect,
+ },
+ } {
+ t.Run(tc.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := testutil.Context(t, testutil.WaitLong)
+ client, store := coderdtest.NewWithDatabase(t, nil)
+ adminUser := coderdtest.CreateFirstUser(t, client)
+ n := 512000
+ longScript := make([]byte, n)
+ for i := range longScript {
+ longScript[i] = 'q'
+ }
+ r := dbfake.WorkspaceBuild(t, store, database.WorkspaceTable{
+ OrganizationID: adminUser.OrganizationID,
+ OwnerID: adminUser.UserID,
+ }).WithAgent(func(agents []*proto.Agent) []*proto.Agent {
+ agents[0].Scripts = []*proto.Script{
+ {
+ Script: string(longScript),
+ },
+ }
+ agents[0].ApiKeyScope = string(tc.apiKeyScope)
+ return agents
+ }).Do()
+ ac := agentsdk.New(client.URL)
+ ac.SetSessionToken(r.AgentToken)
+ conn, err := ac.ConnectRPC(ctx)
+ defer func() {
+ _ = conn.Close()
+ }()
+ require.NoError(t, err)
+ agentAPI := agentproto.NewDRPCAgentClient(conn)
+ manifest, err := agentAPI.GetManifest(ctx, &agentproto.GetManifestRequest{})
+ require.NoError(t, err)
+ require.Len(t, manifest.Scripts, 1)
+ require.Len(t, manifest.Scripts[0].Script, n)
+ })
}
- r := dbfake.WorkspaceBuild(t, store, database.WorkspaceTable{
- OrganizationID: adminUser.OrganizationID,
- OwnerID: adminUser.UserID,
- }).WithAgent(func(agents []*proto.Agent) []*proto.Agent {
- agents[0].Scripts = []*proto.Script{
- {
- Script: string(longScript),
- },
- }
- return agents
- }).Do()
- ac := agentsdk.New(client.URL)
- ac.SetSessionToken(r.AgentToken)
- conn, err := ac.ConnectRPC(ctx)
- defer func() {
- _ = conn.Close()
- }()
- require.NoError(t, err)
- agentAPI := agentproto.NewDRPCAgentClient(conn)
- manifest, err := agentAPI.GetManifest(ctx, &agentproto.GetManifestRequest{})
- require.NoError(t, err)
- require.Len(t, manifest.Scripts, 1)
- require.Len(t, manifest.Scripts[0].Script, n)
}
diff --git a/flake.nix b/flake.nix
index bff207662f913..c0f36c3be6e0f 100644
--- a/flake.nix
+++ b/flake.nix
@@ -141,6 +141,7 @@
kubectl
kubectx
kubernetes-helm
+ lazydocker
lazygit
less
mockgen
From e76d58f2b692e12ba37c0dba22bb2960bf313568 Mon Sep 17 00:00:00 2001
From: Steven Masley
Date: Tue, 20 May 2025 10:09:53 -0500
Subject: [PATCH 22/44] chore: disable parameter validatation for dynamic
params for all transitions (#17926)
Dynamic params skip parameter validation in coder/coder.
This is because conditional parameters cannot be validated
with the static parameters in the database.
---
cli/server.go | 2 +-
coderd/apidoc/docs.go | 4 +
coderd/apidoc/swagger.json | 4 +
coderd/autobuild/lifecycle_executor.go | 6 +-
coderd/coderdtest/coderdtest.go | 2 +
coderd/parameters.go | 11 +--
coderd/workspacebuilds.go | 17 ++++
coderd/workspaces.go | 4 +-
coderd/wsbuilder/wsbuilder.go | 78 +++++++++++++++--
coderd/wsbuilder/wsbuilder_test.go | 26 ++++++
codersdk/workspaces.go | 4 +
docs/reference/api/builds.md | 1 +
docs/reference/api/schemas.md | 2 +
enterprise/coderd/workspaces_test.go | 113 +++++++++++++++++++++++++
site/src/api/typesGenerated.ts | 1 +
15 files changed, 258 insertions(+), 17 deletions(-)
diff --git a/cli/server.go b/cli/server.go
index 59993b55771a9..1794044bce48f 100644
--- a/cli/server.go
+++ b/cli/server.go
@@ -1124,7 +1124,7 @@ func (r *RootCmd) Server(newAPI func(context.Context, *coderd.Options) (*coderd.
autobuildTicker := time.NewTicker(vals.AutobuildPollInterval.Value())
defer autobuildTicker.Stop()
autobuildExecutor := autobuild.NewExecutor(
- ctx, options.Database, options.Pubsub, options.PrometheusRegistry, coderAPI.TemplateScheduleStore, &coderAPI.Auditor, coderAPI.AccessControlStore, logger, autobuildTicker.C, options.NotificationsEnqueuer)
+ ctx, options.Database, options.Pubsub, options.PrometheusRegistry, coderAPI.TemplateScheduleStore, &coderAPI.Auditor, coderAPI.AccessControlStore, logger, autobuildTicker.C, options.NotificationsEnqueuer, coderAPI.Experiments)
autobuildExecutor.Run()
jobReaperTicker := time.NewTicker(vals.JobReaperDetectorInterval.Value())
diff --git a/coderd/apidoc/docs.go b/coderd/apidoc/docs.go
index f59fcd308c655..95e2cc0f48ac8 100644
--- a/coderd/apidoc/docs.go
+++ b/coderd/apidoc/docs.go
@@ -11998,6 +11998,10 @@ const docTemplate = `{
"dry_run": {
"type": "boolean"
},
+ "enable_dynamic_parameters": {
+ "description": "EnableDynamicParameters skips some of the static parameter checking.\nIt will default to whatever the template has marked as the default experience.\nRequires the \"dynamic-experiment\" to be used.",
+ "type": "boolean"
+ },
"log_level": {
"description": "Log level changes the default logging verbosity of a provider (\"info\" if empty).",
"enum": [
diff --git a/coderd/apidoc/swagger.json b/coderd/apidoc/swagger.json
index 25f3c2166755d..02212d9944415 100644
--- a/coderd/apidoc/swagger.json
+++ b/coderd/apidoc/swagger.json
@@ -10716,6 +10716,10 @@
"dry_run": {
"type": "boolean"
},
+ "enable_dynamic_parameters": {
+ "description": "EnableDynamicParameters skips some of the static parameter checking.\nIt will default to whatever the template has marked as the default experience.\nRequires the \"dynamic-experiment\" to be used.",
+ "type": "boolean"
+ },
"log_level": {
"description": "Log level changes the default logging verbosity of a provider (\"info\" if empty).",
"enum": ["debug"],
diff --git a/coderd/autobuild/lifecycle_executor.go b/coderd/autobuild/lifecycle_executor.go
index eedcc812bb19c..b0cba60111335 100644
--- a/coderd/autobuild/lifecycle_executor.go
+++ b/coderd/autobuild/lifecycle_executor.go
@@ -27,6 +27,7 @@ import (
"github.com/coder/coder/v2/coderd/notifications"
"github.com/coder/coder/v2/coderd/schedule"
"github.com/coder/coder/v2/coderd/wsbuilder"
+ "github.com/coder/coder/v2/codersdk"
)
// Executor automatically starts or stops workspaces.
@@ -43,6 +44,7 @@ type Executor struct {
// NotificationsEnqueuer handles enqueueing notifications for delivery by SMTP, webhook, etc.
notificationsEnqueuer notifications.Enqueuer
reg prometheus.Registerer
+ experiments codersdk.Experiments
metrics executorMetrics
}
@@ -59,7 +61,7 @@ type Stats struct {
}
// New returns a new wsactions executor.
-func NewExecutor(ctx context.Context, db database.Store, ps pubsub.Pubsub, reg prometheus.Registerer, tss *atomic.Pointer[schedule.TemplateScheduleStore], auditor *atomic.Pointer[audit.Auditor], acs *atomic.Pointer[dbauthz.AccessControlStore], log slog.Logger, tick <-chan time.Time, enqueuer notifications.Enqueuer) *Executor {
+func NewExecutor(ctx context.Context, db database.Store, ps pubsub.Pubsub, reg prometheus.Registerer, tss *atomic.Pointer[schedule.TemplateScheduleStore], auditor *atomic.Pointer[audit.Auditor], acs *atomic.Pointer[dbauthz.AccessControlStore], log slog.Logger, tick <-chan time.Time, enqueuer notifications.Enqueuer, exp codersdk.Experiments) *Executor {
factory := promauto.With(reg)
le := &Executor{
//nolint:gocritic // Autostart has a limited set of permissions.
@@ -73,6 +75,7 @@ func NewExecutor(ctx context.Context, db database.Store, ps pubsub.Pubsub, reg p
accessControlStore: acs,
notificationsEnqueuer: enqueuer,
reg: reg,
+ experiments: exp,
metrics: executorMetrics{
autobuildExecutionDuration: factory.NewHistogram(prometheus.HistogramOpts{
Namespace: "coderd",
@@ -258,6 +261,7 @@ func (e *Executor) runOnce(t time.Time) Stats {
builder := wsbuilder.New(ws, nextTransition).
SetLastWorkspaceBuildInTx(&latestBuild).
SetLastWorkspaceBuildJobInTx(&latestJob).
+ Experiments(e.experiments).
Reason(reason)
log.Debug(e.ctx, "auto building workspace", slog.F("transition", nextTransition))
if nextTransition == database.WorkspaceTransitionStart &&
diff --git a/coderd/coderdtest/coderdtest.go b/coderd/coderdtest/coderdtest.go
index 90a29e0f0d876..a8f444c8f632e 100644
--- a/coderd/coderdtest/coderdtest.go
+++ b/coderd/coderdtest/coderdtest.go
@@ -354,6 +354,7 @@ func NewOptions(t testing.TB, options *Options) (func(http.Handler), context.Can
auditor.Store(&options.Auditor)
ctx, cancelFunc := context.WithCancel(context.Background())
+ experiments := coderd.ReadExperiments(*options.Logger, options.DeploymentValues.Experiments)
lifecycleExecutor := autobuild.NewExecutor(
ctx,
options.Database,
@@ -365,6 +366,7 @@ func NewOptions(t testing.TB, options *Options) (func(http.Handler), context.Can
*options.Logger,
options.AutobuildTicker,
options.NotificationsEnqueuer,
+ experiments,
).WithStatsChannel(options.AutobuildStats)
lifecycleExecutor.Run()
diff --git a/coderd/parameters.go b/coderd/parameters.go
index c3fc4ffdeeede..13b1346991c90 100644
--- a/coderd/parameters.go
+++ b/coderd/parameters.go
@@ -12,13 +12,13 @@ import (
"golang.org/x/sync/errgroup"
"golang.org/x/xerrors"
- "github.com/coder/coder/v2/apiversion"
"github.com/coder/coder/v2/coderd/database"
"github.com/coder/coder/v2/coderd/database/dbauthz"
"github.com/coder/coder/v2/coderd/files"
"github.com/coder/coder/v2/coderd/httpapi"
"github.com/coder/coder/v2/coderd/httpmw"
"github.com/coder/coder/v2/coderd/util/ptr"
+ "github.com/coder/coder/v2/coderd/wsbuilder"
"github.com/coder/coder/v2/codersdk"
"github.com/coder/coder/v2/codersdk/wsjson"
sdkproto "github.com/coder/coder/v2/provisionersdk/proto"
@@ -69,13 +69,10 @@ func (api *API) templateVersionDynamicParameters(rw http.ResponseWriter, r *http
return
}
- major, minor, err := apiversion.Parse(tf.ProvisionerdVersion)
- // If the api version is not valid or less than 1.5, we need to use the static parameters
- useStaticParams := err != nil || major < 1 || (major == 1 && minor < 6)
- if useStaticParams {
- api.handleStaticParameters(rw, r, templateVersion.ID)
- } else {
+ if wsbuilder.ProvisionerVersionSupportsDynamicParameters(tf.ProvisionerdVersion) {
api.handleDynamicParameters(rw, r, tf, templateVersion)
+ } else {
+ api.handleStaticParameters(rw, r, templateVersion.ID)
}
}
diff --git a/coderd/workspacebuilds.go b/coderd/workspacebuilds.go
index 719d4e2a48123..08b90b834ccca 100644
--- a/coderd/workspacebuilds.go
+++ b/coderd/workspacebuilds.go
@@ -338,6 +338,7 @@ func (api *API) postWorkspaceBuilds(rw http.ResponseWriter, r *http.Request) {
RichParameterValues(createBuild.RichParameterValues).
LogLevel(string(createBuild.LogLevel)).
DeploymentValues(api.Options.DeploymentValues).
+ Experiments(api.Experiments).
TemplateVersionPresetID(createBuild.TemplateVersionPresetID)
var (
@@ -383,6 +384,22 @@ func (api *API) postWorkspaceBuilds(rw http.ResponseWriter, r *http.Request) {
builder = builder.State(createBuild.ProvisionerState)
}
+ // Only defer to dynamic parameters if the experiment is enabled.
+ if api.Experiments.Enabled(codersdk.ExperimentDynamicParameters) {
+ if createBuild.EnableDynamicParameters != nil {
+ // Explicit opt-in
+ builder = builder.DynamicParameters(*createBuild.EnableDynamicParameters)
+ }
+ } else {
+ if createBuild.EnableDynamicParameters != nil {
+ api.Logger.Warn(ctx, "ignoring dynamic parameter field sent by request, the experiment is not enabled",
+ slog.F("field", *createBuild.EnableDynamicParameters),
+ slog.F("user", apiKey.UserID.String()),
+ slog.F("transition", string(createBuild.Transition)),
+ )
+ }
+ }
+
workspaceBuild, provisionerJob, provisionerDaemons, err = builder.Build(
ctx,
tx,
diff --git a/coderd/workspaces.go b/coderd/workspaces.go
index 35960d1f95a12..fe0c2d3f609a2 100644
--- a/coderd/workspaces.go
+++ b/coderd/workspaces.go
@@ -704,6 +704,8 @@ func createWorkspace(
Reason(database.BuildReasonInitiator).
Initiator(initiatorID).
ActiveVersion().
+ Experiments(api.Experiments).
+ DeploymentValues(api.DeploymentValues).
RichParameterValues(req.RichParameterValues)
if req.TemplateVersionID != uuid.Nil {
builder = builder.VersionID(req.TemplateVersionID)
@@ -716,7 +718,7 @@ func createWorkspace(
}
if req.EnableDynamicParameters && api.Experiments.Enabled(codersdk.ExperimentDynamicParameters) {
- builder = builder.UsingDynamicParameters()
+ builder = builder.DynamicParameters(req.EnableDynamicParameters)
}
workspaceBuild, provisionerJob, provisionerDaemons, err = builder.Build(
diff --git a/coderd/wsbuilder/wsbuilder.go b/coderd/wsbuilder/wsbuilder.go
index 64389b7532066..46035f28dda77 100644
--- a/coderd/wsbuilder/wsbuilder.go
+++ b/coderd/wsbuilder/wsbuilder.go
@@ -13,7 +13,9 @@ import (
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hcl/v2/hclsyntax"
+ "github.com/coder/coder/v2/apiversion"
"github.com/coder/coder/v2/coderd/rbac/policy"
+ "github.com/coder/coder/v2/coderd/util/ptr"
"github.com/coder/coder/v2/provisioner/terraform/tfparse"
"github.com/coder/coder/v2/provisionersdk"
sdkproto "github.com/coder/coder/v2/provisionersdk/proto"
@@ -51,9 +53,11 @@ type Builder struct {
state stateTarget
logLevel string
deploymentValues *codersdk.DeploymentValues
+ experiments codersdk.Experiments
- richParameterValues []codersdk.WorkspaceBuildParameter
- dynamicParametersEnabled bool
+ richParameterValues []codersdk.WorkspaceBuildParameter
+ // dynamicParametersEnabled is non-nil if set externally
+ dynamicParametersEnabled *bool
initiator uuid.UUID
reason database.BuildReason
templateVersionPresetID uuid.UUID
@@ -66,6 +70,7 @@ type Builder struct {
template *database.Template
templateVersion *database.TemplateVersion
templateVersionJob *database.ProvisionerJob
+ terraformValues *database.TemplateVersionTerraformValue
templateVersionParameters *[]database.TemplateVersionParameter
templateVersionVariables *[]database.TemplateVersionVariable
templateVersionWorkspaceTags *[]database.TemplateVersionWorkspaceTag
@@ -155,6 +160,14 @@ func (b Builder) DeploymentValues(dv *codersdk.DeploymentValues) Builder {
return b
}
+func (b Builder) Experiments(exp codersdk.Experiments) Builder {
+ // nolint: revive
+ cpy := make(codersdk.Experiments, len(exp))
+ copy(cpy, exp)
+ b.experiments = cpy
+ return b
+}
+
func (b Builder) Initiator(u uuid.UUID) Builder {
// nolint: revive
b.initiator = u
@@ -187,8 +200,9 @@ func (b Builder) MarkPrebuiltWorkspaceClaim() Builder {
return b
}
-func (b Builder) UsingDynamicParameters() Builder {
- b.dynamicParametersEnabled = true
+func (b Builder) DynamicParameters(using bool) Builder {
+ // nolint: revive
+ b.dynamicParametersEnabled = ptr.Ref(using)
return b
}
@@ -516,6 +530,22 @@ func (b *Builder) getTemplateVersionID() (uuid.UUID, error) {
return bld.TemplateVersionID, nil
}
+func (b *Builder) getTemplateTerraformValues() (*database.TemplateVersionTerraformValue, error) {
+ if b.terraformValues != nil {
+ return b.terraformValues, nil
+ }
+ v, err := b.getTemplateVersion()
+ if err != nil {
+ return nil, xerrors.Errorf("get template version so we can get terraform values: %w", err)
+ }
+ vals, err := b.store.GetTemplateVersionTerraformValues(b.ctx, v.ID)
+ if err != nil {
+ return nil, xerrors.Errorf("get template version terraform values %s: %w", v.JobID, err)
+ }
+ b.terraformValues = &vals
+ return b.terraformValues, err
+}
+
func (b *Builder) getLastBuild() (*database.WorkspaceBuild, error) {
if b.lastBuild != nil {
return b.lastBuild, nil
@@ -593,9 +623,10 @@ func (b *Builder) getParameters() (names, values []string, err error) {
return nil, nil, BuildError{http.StatusBadRequest, "Unable to build workspace with unsupported parameters", err}
}
- if b.dynamicParametersEnabled {
- // Dynamic parameters skip all parameter validation.
- // Pass the user's input as is.
+ // Dynamic parameters skip all parameter validation.
+ // Deleting a workspace also should skip parameter validation.
+ // Pass the user's input as is.
+ if b.usingDynamicParameters() {
// TODO: The previous behavior was only to pass param values
// for parameters that exist. Since dynamic params can have
// conditional parameter existence, the static frame of reference
@@ -989,3 +1020,36 @@ func (b *Builder) checkRunningBuild() error {
}
return nil
}
+
+func (b *Builder) usingDynamicParameters() bool {
+ if !b.experiments.Enabled(codersdk.ExperimentDynamicParameters) {
+ // Experiment required
+ return false
+ }
+
+ vals, err := b.getTemplateTerraformValues()
+ if err != nil {
+ return false
+ }
+
+ if !ProvisionerVersionSupportsDynamicParameters(vals.ProvisionerdVersion) {
+ return false
+ }
+
+ if b.dynamicParametersEnabled != nil {
+ return *b.dynamicParametersEnabled
+ }
+
+ tpl, err := b.getTemplate()
+ if err != nil {
+ return false // Let another part of the code get this error
+ }
+ return !tpl.UseClassicParameterFlow
+}
+
+func ProvisionerVersionSupportsDynamicParameters(version string) bool {
+ major, minor, err := apiversion.Parse(version)
+ // If the api version is not valid or less than 1.6, we need to use the static parameters
+ useStaticParams := err != nil || major < 1 || (major == 1 && minor < 6)
+ return !useStaticParams
+}
diff --git a/coderd/wsbuilder/wsbuilder_test.go b/coderd/wsbuilder/wsbuilder_test.go
index 00b7b5f0ae08b..abe5e3fe9b8b7 100644
--- a/coderd/wsbuilder/wsbuilder_test.go
+++ b/coderd/wsbuilder/wsbuilder_test.go
@@ -839,6 +839,32 @@ func TestWorkspaceBuildWithPreset(t *testing.T) {
req.NoError(err)
}
+func TestProvisionerVersionSupportsDynamicParameters(t *testing.T) {
+ t.Parallel()
+
+ for v, dyn := range map[string]bool{
+ "": false,
+ "na": false,
+ "0.0": false,
+ "0.10": false,
+ "1.4": false,
+ "1.5": false,
+ "1.6": true,
+ "1.7": true,
+ "1.8": true,
+ "2.0": true,
+ "2.17": true,
+ "4.0": true,
+ } {
+ t.Run(v, func(t *testing.T) {
+ t.Parallel()
+
+ does := wsbuilder.ProvisionerVersionSupportsDynamicParameters(v)
+ require.Equal(t, dyn, does)
+ })
+ }
+}
+
type txExpect func(mTx *dbmock.MockStore)
func expectDB(t *testing.T, opts ...txExpect) *dbmock.MockStore {
diff --git a/codersdk/workspaces.go b/codersdk/workspaces.go
index b39b220ca33b8..e0f1b9b1e2c2a 100644
--- a/codersdk/workspaces.go
+++ b/codersdk/workspaces.go
@@ -110,6 +110,10 @@ type CreateWorkspaceBuildRequest struct {
LogLevel ProvisionerLogLevel `json:"log_level,omitempty" validate:"omitempty,oneof=debug"`
// TemplateVersionPresetID is the ID of the template version preset to use for the build.
TemplateVersionPresetID uuid.UUID `json:"template_version_preset_id,omitempty" format:"uuid"`
+ // EnableDynamicParameters skips some of the static parameter checking.
+ // It will default to whatever the template has marked as the default experience.
+ // Requires the "dynamic-experiment" to be used.
+ EnableDynamicParameters *bool `json:"enable_dynamic_parameters,omitempty"`
}
type WorkspaceOptions struct {
diff --git a/docs/reference/api/builds.md b/docs/reference/api/builds.md
index 00417c700cdfd..3cfd25f2a6e0f 100644
--- a/docs/reference/api/builds.md
+++ b/docs/reference/api/builds.md
@@ -1731,6 +1731,7 @@ curl -X POST http://coder-server:8080/api/v2/workspaces/{workspace}/builds \
```json
{
"dry_run": true,
+ "enable_dynamic_parameters": true,
"log_level": "debug",
"orphan": true,
"rich_parameter_values": [
diff --git a/docs/reference/api/schemas.md b/docs/reference/api/schemas.md
index b35c35361cb1f..9325d751bc352 100644
--- a/docs/reference/api/schemas.md
+++ b/docs/reference/api/schemas.md
@@ -1917,6 +1917,7 @@ This is required on creation to enable a user-flow of validating a template work
```json
{
"dry_run": true,
+ "enable_dynamic_parameters": true,
"log_level": "debug",
"orphan": true,
"rich_parameter_values": [
@@ -1939,6 +1940,7 @@ This is required on creation to enable a user-flow of validating a template work
| Name | Type | Required | Restrictions | Description |
|------------------------------|-------------------------------------------------------------------------------|----------|--------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `dry_run` | boolean | false | | |
+| `enable_dynamic_parameters` | boolean | false | | Enable dynamic parameters skips some of the static parameter checking. It will default to whatever the template has marked as the default experience. Requires the "dynamic-experiment" to be used. |
| `log_level` | [codersdk.ProvisionerLogLevel](#codersdkprovisionerloglevel) | false | | Log level changes the default logging verbosity of a provider ("info" if empty). |
| `orphan` | boolean | false | | Orphan may be set for the Destroy transition. |
| `rich_parameter_values` | array of [codersdk.WorkspaceBuildParameter](#codersdkworkspacebuildparameter) | false | | Rich parameter values are optional. It will write params to the 'workspace' scope. This will overwrite any existing parameters with the same name. This will not delete old params not included in this list. |
diff --git a/enterprise/coderd/workspaces_test.go b/enterprise/coderd/workspaces_test.go
index 7005c93ca36f5..226232f37bf7f 100644
--- a/enterprise/coderd/workspaces_test.go
+++ b/enterprise/coderd/workspaces_test.go
@@ -1659,6 +1659,119 @@ func TestTemplateDoesNotAllowUserAutostop(t *testing.T) {
})
}
+// TestWorkspaceTemplateParamsChange tests a workspace with a parameter that
+// validation changes on apply. The params used in create workspace are invalid
+// according to the static params on import.
+//
+// This is testing that dynamic params defers input validation to terraform.
+// It does not try to do this in coder/coder.
+func TestWorkspaceTemplateParamsChange(t *testing.T) {
+ mainTfTemplate := `
+ terraform {
+ required_providers {
+ coder = {
+ source = "coder/coder"
+ }
+ }
+ }
+ provider "coder" {}
+ data "coder_workspace" "me" {}
+ data "coder_workspace_owner" "me" {}
+
+ data "coder_parameter" "param_min" {
+ name = "param_min"
+ type = "number"
+ default = 10
+ }
+
+ data "coder_parameter" "param" {
+ name = "param"
+ type = "number"
+ default = 12
+ validation {
+ min = data.coder_parameter.param_min.value
+ }
+ }
+ `
+ tfCliConfigPath := downloadProviders(t, mainTfTemplate)
+ t.Setenv("TF_CLI_CONFIG_FILE", tfCliConfigPath)
+
+ logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: false})
+ dv := coderdtest.DeploymentValues(t)
+ dv.Experiments = []string{string(codersdk.ExperimentDynamicParameters)}
+ client, owner := coderdenttest.New(t, &coderdenttest.Options{
+ Options: &coderdtest.Options{
+ Logger: &logger,
+ // We intentionally do not run a built-in provisioner daemon here.
+ IncludeProvisionerDaemon: false,
+ DeploymentValues: dv,
+ },
+ LicenseOptions: &coderdenttest.LicenseOptions{
+ Features: license.Features{
+ codersdk.FeatureExternalProvisionerDaemons: 1,
+ },
+ },
+ })
+ templateAdmin, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.RoleTemplateAdmin())
+ member, memberUser := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID)
+
+ _ = coderdenttest.NewExternalProvisionerDaemonTerraform(t, client, owner.OrganizationID, nil)
+
+ // This can take a while, so set a relatively long timeout.
+ ctx := testutil.Context(t, 2*testutil.WaitSuperLong)
+
+ // Creating a template as a template admin must succeed
+ templateFiles := map[string]string{"main.tf": mainTfTemplate}
+ tarBytes := testutil.CreateTar(t, templateFiles)
+ fi, err := templateAdmin.Upload(ctx, "application/x-tar", bytes.NewReader(tarBytes))
+ require.NoError(t, err, "failed to upload file")
+
+ tv, err := templateAdmin.CreateTemplateVersion(ctx, owner.OrganizationID, codersdk.CreateTemplateVersionRequest{
+ Name: testutil.GetRandomName(t),
+ FileID: fi.ID,
+ StorageMethod: codersdk.ProvisionerStorageMethodFile,
+ Provisioner: codersdk.ProvisionerTypeTerraform,
+ UserVariableValues: []codersdk.VariableValue{},
+ })
+ require.NoError(t, err, "failed to create template version")
+ coderdtest.AwaitTemplateVersionJobCompleted(t, templateAdmin, tv.ID)
+ tpl := coderdtest.CreateTemplate(t, templateAdmin, owner.OrganizationID, tv.ID)
+ require.False(t, tpl.UseClassicParameterFlow, "template to use dynamic parameters")
+
+ // When: we create a workspace build using the above template but with
+ // parameter values that are different from those defined in the template.
+ // The new values are not valid according to the original plan, but are valid.
+ ws, err := member.CreateUserWorkspace(ctx, memberUser.Username, codersdk.CreateWorkspaceRequest{
+ TemplateID: tpl.ID,
+ Name: coderdtest.RandomUsername(t),
+ RichParameterValues: []codersdk.WorkspaceBuildParameter{
+ {
+ Name: "param_min",
+ Value: "5",
+ },
+ {
+ Name: "param",
+ Value: "7",
+ },
+ },
+ EnableDynamicParameters: true,
+ })
+
+ // Then: the build should succeed. The updated value of param_min should be
+ // used to validate param instead of the value defined in the temp
+ require.NoError(t, err, "failed to create workspace")
+ createBuild := coderdtest.AwaitWorkspaceBuildJobCompleted(t, member, ws.LatestBuild.ID)
+ require.Equal(t, createBuild.Status, codersdk.WorkspaceStatusRunning)
+
+ // Now delete the workspace
+ build, err := member.CreateWorkspaceBuild(ctx, ws.ID, codersdk.CreateWorkspaceBuildRequest{
+ Transition: codersdk.WorkspaceTransitionDelete,
+ })
+ require.NoError(t, err)
+ build = coderdtest.AwaitWorkspaceBuildJobCompleted(t, member, build.ID)
+ require.Equal(t, codersdk.WorkspaceStatusDeleted, build.Status)
+}
+
// TestWorkspaceTagsTerraform tests that a workspace can be created with tags.
// This is an end-to-end-style test, meaning that we actually run the
// real Terraform provisioner and validate that the workspace is created
diff --git a/site/src/api/typesGenerated.ts b/site/src/api/typesGenerated.ts
index 9a73fc9f3d6bf..d367302186870 100644
--- a/site/src/api/typesGenerated.ts
+++ b/site/src/api/typesGenerated.ts
@@ -490,6 +490,7 @@ export interface CreateWorkspaceBuildRequest {
readonly rich_parameter_values?: readonly WorkspaceBuildParameter[];
readonly log_level?: ProvisionerLogLevel;
readonly template_version_preset_id?: string;
+ readonly enable_dynamic_parameters?: boolean;
}
// From codersdk/workspaceproxy.go
From a123900fe86ded9ddf3ac8f9dda3d8355945544a Mon Sep 17 00:00:00 2001
From: Steven Masley
Date: Tue, 20 May 2025 10:45:12 -0500
Subject: [PATCH 23/44] chore: remove coder/preview dependency from codersdk
(#17939)
---
cli/parameterresolver.go | 2 +-
coderd/database/db2sdk/db2sdk.go | 82 ++++++++++++
coderd/parameters.go | 9 +-
coderd/parameters_test.go | 16 +--
codersdk/parameters.go | 118 ++++++++++++++++--
codersdk/templateversions.go | 16 ---
enterprise/coderd/parameters_test.go | 12 +-
go.mod | 8 +-
go.sum | 16 +--
site/src/api/typesGenerated.ts | 96 +++++++++-----
.../CreateWorkspacePageViewExperimental.tsx | 4 +-
11 files changed, 292 insertions(+), 87 deletions(-)
diff --git a/cli/parameterresolver.go b/cli/parameterresolver.go
index 41c61d5315a77..40625331fa6aa 100644
--- a/cli/parameterresolver.go
+++ b/cli/parameterresolver.go
@@ -226,7 +226,7 @@ func (pr *ParameterResolver) resolveWithInput(resolved []codersdk.WorkspaceBuild
if p != nil {
continue
}
- // Parameter has not been resolved yet, so CLI needs to determine if user should input it.
+ // PreviewParameter has not been resolved yet, so CLI needs to determine if user should input it.
firstTimeUse := pr.isFirstTimeUse(tvp.Name)
promptParameterOption := pr.isLastBuildParameterInvalidOption(tvp)
diff --git a/coderd/database/db2sdk/db2sdk.go b/coderd/database/db2sdk/db2sdk.go
index 18d1d8a6ac788..ed258a07820ab 100644
--- a/coderd/database/db2sdk/db2sdk.go
+++ b/coderd/database/db2sdk/db2sdk.go
@@ -12,6 +12,7 @@ import (
"time"
"github.com/google/uuid"
+ "github.com/hashicorp/hcl/v2"
"golang.org/x/xerrors"
"tailscale.com/tailcfg"
@@ -24,6 +25,7 @@ import (
"github.com/coder/coder/v2/codersdk"
"github.com/coder/coder/v2/provisionersdk/proto"
"github.com/coder/coder/v2/tailnet"
+ previewtypes "github.com/coder/preview/types"
)
// List is a helper function to reduce boilerplate when converting slices of
@@ -764,3 +766,83 @@ func Chat(chat database.Chat) codersdk.Chat {
func Chats(chats []database.Chat) []codersdk.Chat {
return List(chats, Chat)
}
+
+func PreviewParameter(param previewtypes.Parameter) codersdk.PreviewParameter {
+ return codersdk.PreviewParameter{
+ PreviewParameterData: codersdk.PreviewParameterData{
+ Name: param.Name,
+ DisplayName: param.DisplayName,
+ Description: param.Description,
+ Type: codersdk.OptionType(param.Type),
+ FormType: codersdk.ParameterFormType(param.FormType),
+ Styling: codersdk.PreviewParameterStyling{
+ Placeholder: param.Styling.Placeholder,
+ Disabled: param.Styling.Disabled,
+ Label: param.Styling.Label,
+ },
+ Mutable: param.Mutable,
+ DefaultValue: PreviewHCLString(param.DefaultValue),
+ Icon: param.Icon,
+ Options: List(param.Options, PreviewParameterOption),
+ Validations: List(param.Validations, PreviewParameterValidation),
+ Required: param.Required,
+ Order: param.Order,
+ Ephemeral: param.Ephemeral,
+ },
+ Value: PreviewHCLString(param.Value),
+ Diagnostics: PreviewDiagnostics(param.Diagnostics),
+ }
+}
+
+func HCLDiagnostics(d hcl.Diagnostics) []codersdk.FriendlyDiagnostic {
+ return PreviewDiagnostics(previewtypes.Diagnostics(d))
+}
+
+func PreviewDiagnostics(d previewtypes.Diagnostics) []codersdk.FriendlyDiagnostic {
+ f := d.FriendlyDiagnostics()
+ return List(f, func(f previewtypes.FriendlyDiagnostic) codersdk.FriendlyDiagnostic {
+ return codersdk.FriendlyDiagnostic{
+ Severity: codersdk.DiagnosticSeverityString(f.Severity),
+ Summary: f.Summary,
+ Detail: f.Detail,
+ Extra: codersdk.DiagnosticExtra{
+ Code: f.Extra.Code,
+ },
+ }
+ })
+}
+
+func PreviewHCLString(h previewtypes.HCLString) codersdk.NullHCLString {
+ n := h.NullHCLString()
+ return codersdk.NullHCLString{
+ Value: n.Value,
+ Valid: n.Valid,
+ }
+}
+
+func PreviewParameterOption(o *previewtypes.ParameterOption) codersdk.PreviewParameterOption {
+ if o == nil {
+ // This should never be sent
+ return codersdk.PreviewParameterOption{}
+ }
+ return codersdk.PreviewParameterOption{
+ Name: o.Name,
+ Description: o.Description,
+ Value: PreviewHCLString(o.Value),
+ Icon: o.Icon,
+ }
+}
+
+func PreviewParameterValidation(v *previewtypes.ParameterValidation) codersdk.PreviewParameterValidation {
+ if v == nil {
+ // This should never be sent
+ return codersdk.PreviewParameterValidation{}
+ }
+ return codersdk.PreviewParameterValidation{
+ Error: v.Error,
+ Regex: v.Regex,
+ Min: v.Min,
+ Max: v.Max,
+ Monotonic: v.Monotonic,
+ }
+}
diff --git a/coderd/parameters.go b/coderd/parameters.go
index 13b1346991c90..1a0c1f92ddbf9 100644
--- a/coderd/parameters.go
+++ b/coderd/parameters.go
@@ -13,6 +13,7 @@ import (
"golang.org/x/xerrors"
"github.com/coder/coder/v2/coderd/database"
+ "github.com/coder/coder/v2/coderd/database/db2sdk"
"github.com/coder/coder/v2/coderd/database/dbauthz"
"github.com/coder/coder/v2/coderd/files"
"github.com/coder/coder/v2/coderd/httpapi"
@@ -286,10 +287,10 @@ func (api *API) handleParameterWebsocket(rw http.ResponseWriter, r *http.Request
result, diagnostics := render(ctx, map[string]string{})
response := codersdk.DynamicParametersResponse{
ID: -1, // Always start with -1.
- Diagnostics: previewtypes.Diagnostics(diagnostics),
+ Diagnostics: db2sdk.HCLDiagnostics(diagnostics),
}
if result != nil {
- response.Parameters = result.Parameters
+ response.Parameters = db2sdk.List(result.Parameters, db2sdk.PreviewParameter)
}
err = stream.Send(response)
if err != nil {
@@ -314,10 +315,10 @@ func (api *API) handleParameterWebsocket(rw http.ResponseWriter, r *http.Request
result, diagnostics := render(ctx, update.Inputs)
response := codersdk.DynamicParametersResponse{
ID: update.ID,
- Diagnostics: previewtypes.Diagnostics(diagnostics),
+ Diagnostics: db2sdk.HCLDiagnostics(diagnostics),
}
if result != nil {
- response.Parameters = result.Parameters
+ response.Parameters = db2sdk.List(result.Parameters, db2sdk.PreviewParameter)
}
err = stream.Send(response)
if err != nil {
diff --git a/coderd/parameters_test.go b/coderd/parameters_test.go
index e7fc77f141efc..8edadc9b7e797 100644
--- a/coderd/parameters_test.go
+++ b/coderd/parameters_test.go
@@ -68,8 +68,8 @@ func TestDynamicParametersOwnerSSHPublicKey(t *testing.T) {
require.Equal(t, -1, preview.ID)
require.Empty(t, preview.Diagnostics)
require.Equal(t, "public_key", preview.Parameters[0].Name)
- require.True(t, preview.Parameters[0].Value.Valid())
- require.Equal(t, sshKey.PublicKey, preview.Parameters[0].Value.Value.AsString())
+ require.True(t, preview.Parameters[0].Value.Valid)
+ require.Equal(t, sshKey.PublicKey, preview.Parameters[0].Value.Value)
}
func TestDynamicParametersWithTerraformValues(t *testing.T) {
@@ -103,8 +103,8 @@ func TestDynamicParametersWithTerraformValues(t *testing.T) {
require.Len(t, preview.Parameters, 1)
require.Equal(t, "jetbrains_ide", preview.Parameters[0].Name)
- require.True(t, preview.Parameters[0].Value.Valid())
- require.Equal(t, "CL", preview.Parameters[0].Value.AsString())
+ require.True(t, preview.Parameters[0].Value.Valid)
+ require.Equal(t, "CL", preview.Parameters[0].Value.Value)
})
// OldProvisioners use the static parameters in the dynamic param flow
@@ -154,8 +154,8 @@ func TestDynamicParametersWithTerraformValues(t *testing.T) {
require.Contains(t, preview.Diagnostics[0].Summary, "required metadata to support dynamic parameters")
require.Len(t, preview.Parameters, 1)
require.Equal(t, "jetbrains_ide", preview.Parameters[0].Name)
- require.True(t, preview.Parameters[0].Value.Valid())
- require.Equal(t, defaultValue, preview.Parameters[0].Value.AsString())
+ require.True(t, preview.Parameters[0].Value.Valid)
+ require.Equal(t, defaultValue, preview.Parameters[0].Value.Value)
// Test some inputs
for _, exp := range []string{defaultValue, "GO", "Invalid", defaultValue} {
@@ -182,8 +182,8 @@ func TestDynamicParametersWithTerraformValues(t *testing.T) {
require.Len(t, preview.Parameters[0].Diagnostics, 0)
}
require.Equal(t, "jetbrains_ide", preview.Parameters[0].Name)
- require.True(t, preview.Parameters[0].Value.Valid())
- require.Equal(t, exp, preview.Parameters[0].Value.AsString())
+ require.True(t, preview.Parameters[0].Value.Valid)
+ require.Equal(t, exp, preview.Parameters[0].Value.Value)
}
})
diff --git a/codersdk/parameters.go b/codersdk/parameters.go
index 881aaf99f573c..d81dc7cf55ca0 100644
--- a/codersdk/parameters.go
+++ b/codersdk/parameters.go
@@ -7,17 +7,121 @@ import (
"github.com/google/uuid"
"github.com/coder/coder/v2/codersdk/wsjson"
- previewtypes "github.com/coder/preview/types"
"github.com/coder/websocket"
)
-// FriendlyDiagnostic is included to guarantee it is generated in the output
-// types. This is used as the type override for `previewtypes.Diagnostic`.
-type FriendlyDiagnostic = previewtypes.FriendlyDiagnostic
+type ParameterFormType string
-// NullHCLString is included to guarantee it is generated in the output
-// types. This is used as the type override for `previewtypes.HCLString`.
-type NullHCLString = previewtypes.NullHCLString
+const (
+ ParameterFormTypeDefault ParameterFormType = ""
+ ParameterFormTypeRadio ParameterFormType = "radio"
+ ParameterFormTypeSlider ParameterFormType = "slider"
+ ParameterFormTypeInput ParameterFormType = "input"
+ ParameterFormTypeDropdown ParameterFormType = "dropdown"
+ ParameterFormTypeCheckbox ParameterFormType = "checkbox"
+ ParameterFormTypeSwitch ParameterFormType = "switch"
+ ParameterFormTypeMultiSelect ParameterFormType = "multi-select"
+ ParameterFormTypeTagSelect ParameterFormType = "tag-select"
+ ParameterFormTypeTextArea ParameterFormType = "textarea"
+ ParameterFormTypeError ParameterFormType = "error"
+)
+
+type OptionType string
+
+const (
+ OptionTypeString OptionType = "string"
+ OptionTypeNumber OptionType = "number"
+ OptionTypeBoolean OptionType = "bool"
+ OptionTypeListString OptionType = "list(string)"
+)
+
+type DiagnosticSeverityString string
+
+const (
+ DiagnosticSeverityError DiagnosticSeverityString = "error"
+ DiagnosticSeverityWarning DiagnosticSeverityString = "warning"
+)
+
+// FriendlyDiagnostic == previewtypes.FriendlyDiagnostic
+// Copied to avoid import deps
+type FriendlyDiagnostic struct {
+ Severity DiagnosticSeverityString `json:"severity"`
+ Summary string `json:"summary"`
+ Detail string `json:"detail"`
+
+ Extra DiagnosticExtra `json:"extra"`
+}
+
+type DiagnosticExtra struct {
+ Code string `json:"code"`
+}
+
+// NullHCLString == `previewtypes.NullHCLString`.
+type NullHCLString struct {
+ Value string `json:"value"`
+ Valid bool `json:"valid"`
+}
+
+type PreviewParameter struct {
+ PreviewParameterData
+ Value NullHCLString `json:"value"`
+ Diagnostics []FriendlyDiagnostic `json:"diagnostics"`
+}
+
+type PreviewParameterData struct {
+ Name string `json:"name"`
+ DisplayName string `json:"display_name"`
+ Description string `json:"description"`
+ Type OptionType `json:"type"`
+ FormType ParameterFormType `json:"form_type"`
+ Styling PreviewParameterStyling `json:"styling"`
+ Mutable bool `json:"mutable"`
+ DefaultValue NullHCLString `json:"default_value"`
+ Icon string `json:"icon"`
+ Options []PreviewParameterOption `json:"options"`
+ Validations []PreviewParameterValidation `json:"validations"`
+ Required bool `json:"required"`
+ // legacy_variable_name was removed (= 14)
+ Order int64 `json:"order"`
+ Ephemeral bool `json:"ephemeral"`
+}
+
+type PreviewParameterStyling struct {
+ Placeholder *string `json:"placeholder,omitempty"`
+ Disabled *bool `json:"disabled,omitempty"`
+ Label *string `json:"label,omitempty"`
+}
+
+type PreviewParameterOption struct {
+ Name string `json:"name"`
+ Description string `json:"description"`
+ Value NullHCLString `json:"value"`
+ Icon string `json:"icon"`
+}
+
+type PreviewParameterValidation struct {
+ Error string `json:"validation_error"`
+
+ // All validation attributes are optional.
+ Regex *string `json:"validation_regex"`
+ Min *int64 `json:"validation_min"`
+ Max *int64 `json:"validation_max"`
+ Monotonic *string `json:"validation_monotonic"`
+}
+
+type DynamicParametersRequest struct {
+ // ID identifies the request. The response contains the same
+ // ID so that the client can match it to the request.
+ ID int `json:"id"`
+ Inputs map[string]string `json:"inputs"`
+}
+
+type DynamicParametersResponse struct {
+ ID int `json:"id"`
+ Diagnostics []FriendlyDiagnostic `json:"diagnostics"`
+ Parameters []PreviewParameter `json:"parameters"`
+ // TODO: Workspace tags
+}
func (c *Client) TemplateVersionDynamicParameters(ctx context.Context, userID, version uuid.UUID) (*wsjson.Stream[DynamicParametersResponse, DynamicParametersRequest], error) {
conn, err := c.Dial(ctx, fmt.Sprintf("/api/v2/users/%s/templateversions/%s/parameters", userID, version), nil)
diff --git a/codersdk/templateversions.go b/codersdk/templateversions.go
index 42b381fadebce..de8bb7b970957 100644
--- a/codersdk/templateversions.go
+++ b/codersdk/templateversions.go
@@ -9,8 +9,6 @@ import (
"time"
"github.com/google/uuid"
-
- previewtypes "github.com/coder/preview/types"
)
type TemplateVersionWarning string
@@ -125,20 +123,6 @@ func (c *Client) CancelTemplateVersion(ctx context.Context, version uuid.UUID) e
return nil
}
-type DynamicParametersRequest struct {
- // ID identifies the request. The response contains the same
- // ID so that the client can match it to the request.
- ID int `json:"id"`
- Inputs map[string]string `json:"inputs"`
-}
-
-type DynamicParametersResponse struct {
- ID int `json:"id"`
- Diagnostics previewtypes.Diagnostics `json:"diagnostics"`
- Parameters []previewtypes.Parameter `json:"parameters"`
- // TODO: Workspace tags
-}
-
// TemplateVersionParameters returns parameters a template version exposes.
func (c *Client) TemplateVersionRichParameters(ctx context.Context, version uuid.UUID) ([]TemplateVersionParameter, error) {
res, err := c.Request(ctx, http.MethodGet, fmt.Sprintf("/api/v2/templateversions/%s/rich-parameters", version), nil)
diff --git a/enterprise/coderd/parameters_test.go b/enterprise/coderd/parameters_test.go
index e6bc564e43da2..76bd5a1eafdbb 100644
--- a/enterprise/coderd/parameters_test.go
+++ b/enterprise/coderd/parameters_test.go
@@ -70,8 +70,8 @@ func TestDynamicParametersOwnerGroups(t *testing.T) {
require.Equal(t, -1, preview.ID)
require.Empty(t, preview.Diagnostics)
require.Equal(t, "group", preview.Parameters[0].Name)
- require.True(t, preview.Parameters[0].Value.Valid())
- require.Equal(t, database.EveryoneGroup, preview.Parameters[0].Value.Value.AsString())
+ require.True(t, preview.Parameters[0].Value.Valid)
+ require.Equal(t, database.EveryoneGroup, preview.Parameters[0].Value.Value)
// Send a new value, and see it reflected
err = stream.Send(codersdk.DynamicParametersRequest{
@@ -83,8 +83,8 @@ func TestDynamicParametersOwnerGroups(t *testing.T) {
require.Equal(t, 1, preview.ID)
require.Empty(t, preview.Diagnostics)
require.Equal(t, "group", preview.Parameters[0].Name)
- require.True(t, preview.Parameters[0].Value.Valid())
- require.Equal(t, group.Name, preview.Parameters[0].Value.Value.AsString())
+ require.True(t, preview.Parameters[0].Value.Valid)
+ require.Equal(t, group.Name, preview.Parameters[0].Value.Value)
// Back to default
err = stream.Send(codersdk.DynamicParametersRequest{
@@ -96,6 +96,6 @@ func TestDynamicParametersOwnerGroups(t *testing.T) {
require.Equal(t, 3, preview.ID)
require.Empty(t, preview.Diagnostics)
require.Equal(t, "group", preview.Parameters[0].Name)
- require.True(t, preview.Parameters[0].Value.Valid())
- require.Equal(t, database.EveryoneGroup, preview.Parameters[0].Value.Value.AsString())
+ require.True(t, preview.Parameters[0].Value.Valid)
+ require.Equal(t, database.EveryoneGroup, preview.Parameters[0].Value.Value)
}
diff --git a/go.mod b/go.mod
index c43feefefee4d..0c6b482b38f4e 100644
--- a/go.mod
+++ b/go.mod
@@ -96,12 +96,12 @@ require (
github.com/chromedp/chromedp v0.13.3
github.com/cli/safeexec v1.0.1
github.com/coder/flog v1.1.0
- github.com/coder/guts v1.3.1-0.20250428170043-ad369017e95b
+ github.com/coder/guts v1.5.0
github.com/coder/pretty v0.0.0-20230908205945-e89ba86370e0
github.com/coder/quartz v0.1.3
github.com/coder/retry v1.5.1
github.com/coder/serpent v0.10.0
- github.com/coder/terraform-provider-coder/v2 v2.4.1
+ github.com/coder/terraform-provider-coder/v2 v2.4.2
github.com/coder/websocket v1.8.13
github.com/coder/wgtunnel v0.1.13-0.20240522110300-ade90dfb2da0
github.com/coreos/go-oidc/v3 v3.14.1
@@ -204,7 +204,7 @@ require (
golang.org/x/sys v0.33.0
golang.org/x/term v0.32.0
golang.org/x/text v0.25.0 // indirect
- golang.org/x/tools v0.32.0
+ golang.org/x/tools v0.33.0
golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da
google.golang.org/api v0.231.0
google.golang.org/grpc v1.72.0
@@ -485,7 +485,7 @@ require (
require (
github.com/anthropics/anthropic-sdk-go v0.2.0-beta.3
- github.com/coder/preview v0.0.2-0.20250516233606-a1da43489319
+ github.com/coder/preview v0.0.2-0.20250520134327-ac391431027d
github.com/fsnotify/fsnotify v1.9.0
github.com/kylecarbs/aisdk-go v0.0.8
github.com/mark3labs/mcp-go v0.28.0
diff --git a/go.sum b/go.sum
index 9ffd716b334de..0f5638614d275 100644
--- a/go.sum
+++ b/go.sum
@@ -905,14 +905,14 @@ github.com/coder/go-httpstat v0.0.0-20230801153223-321c88088322 h1:m0lPZjlQ7vdVp
github.com/coder/go-httpstat v0.0.0-20230801153223-321c88088322/go.mod h1:rOLFDDVKVFiDqZFXoteXc97YXx7kFi9kYqR+2ETPkLQ=
github.com/coder/go-scim/pkg/v2 v2.0.0-20230221055123-1d63c1222136 h1:0RgB61LcNs24WOxc3PBvygSNTQurm0PYPujJjLLOzs0=
github.com/coder/go-scim/pkg/v2 v2.0.0-20230221055123-1d63c1222136/go.mod h1:VkD1P761nykiq75dz+4iFqIQIZka189tx1BQLOp0Skc=
-github.com/coder/guts v1.3.1-0.20250428170043-ad369017e95b h1:tfLKcE2s6D7YpFk7MUUCDE0Xbbmac+k2GqO8KMjv/Ug=
-github.com/coder/guts v1.3.1-0.20250428170043-ad369017e95b/go.mod h1:31NO4z6MVTOD4WaCLqE/hUAHGgNok9sRbuMc/LZFopI=
+github.com/coder/guts v1.5.0 h1:a94apf7xMf5jDdg1bIHzncbRiTn3+BvBZgrFSDbUnyI=
+github.com/coder/guts v1.5.0/go.mod h1:0Sbv5Kp83u1Nl7MIQiV2zmacJ3o02I341bkWkjWXSUQ=
github.com/coder/pq v1.10.5-0.20240813183442-0c420cb5a048 h1:3jzYUlGH7ZELIH4XggXhnTnP05FCYiAFeQpoN+gNR5I=
github.com/coder/pq v1.10.5-0.20240813183442-0c420cb5a048/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/coder/pretty v0.0.0-20230908205945-e89ba86370e0 h1:3A0ES21Ke+FxEM8CXx9n47SZOKOpgSE1bbJzlE4qPVs=
github.com/coder/pretty v0.0.0-20230908205945-e89ba86370e0/go.mod h1:5UuS2Ts+nTToAMeOjNlnHFkPahrtDkmpydBen/3wgZc=
-github.com/coder/preview v0.0.2-0.20250516233606-a1da43489319 h1:flPwcvOZ9RwENDYcLOnfYEClbKWfFvpQCddODdSS6Co=
-github.com/coder/preview v0.0.2-0.20250516233606-a1da43489319/go.mod h1:GfkwIv5gQLpL01qeGU1/YoxoFtt5trzCqnWZLo77clU=
+github.com/coder/preview v0.0.2-0.20250520134327-ac391431027d h1:MxAAuqcno5hMM45Ihl3KAjVOXbyZyt/+tjSiq9XMTC0=
+github.com/coder/preview v0.0.2-0.20250520134327-ac391431027d/go.mod h1:9bwyhQSVDjcxAWuFFaG6/qBqhaiW5oqF5PEQMhevKLs=
github.com/coder/quartz v0.1.3 h1:hA2nI8uUA2fNN9uhXv2I4xZD4aHkA7oH3g2t03v4xf8=
github.com/coder/quartz v0.1.3/go.mod h1:vsiCc+AHViMKH2CQpGIpFgdHIEQsxwm8yCscqKmzbRA=
github.com/coder/retry v1.5.1 h1:iWu8YnD8YqHs3XwqrqsjoBTAVqT9ml6z9ViJ2wlMiqc=
@@ -925,8 +925,8 @@ github.com/coder/tailscale v1.1.1-0.20250422090654-5090e715905e h1:nope/SZfoLB9M
github.com/coder/tailscale v1.1.1-0.20250422090654-5090e715905e/go.mod h1:1ggFFdHTRjPRu9Yc1yA7nVHBYB50w9Ce7VIXNqcW6Ko=
github.com/coder/terraform-config-inspect v0.0.0-20250107175719-6d06d90c630e h1:JNLPDi2P73laR1oAclY6jWzAbucf70ASAvf5mh2cME0=
github.com/coder/terraform-config-inspect v0.0.0-20250107175719-6d06d90c630e/go.mod h1:Gz/z9Hbn+4KSp8A2FBtNszfLSdT2Tn/uAKGuVqqWmDI=
-github.com/coder/terraform-provider-coder/v2 v2.4.1 h1:+HxLJVENJ+kvGhibQ0jbr8Evi6M857d9691ytxNbv90=
-github.com/coder/terraform-provider-coder/v2 v2.4.1/go.mod h1:2kaBpn5k9ZWtgKq5k4JbkVZG9DzEqR4mJSmpdshcO+s=
+github.com/coder/terraform-provider-coder/v2 v2.4.2 h1:41SJkgwgiA555kwQzGIQcNS3bCm12sVMUmBSa5zGr+A=
+github.com/coder/terraform-provider-coder/v2 v2.4.2/go.mod h1:2kaBpn5k9ZWtgKq5k4JbkVZG9DzEqR4mJSmpdshcO+s=
github.com/coder/trivy v0.0.0-20250409153844-e6b004bc465a h1:yryP7e+IQUAArlycH4hQrjXQ64eRNbxsV5/wuVXHgME=
github.com/coder/trivy v0.0.0-20250409153844-e6b004bc465a/go.mod h1:dDvq9axp3kZsT63gY2Znd1iwzfqDq3kXbQnccIrjRYY=
github.com/coder/websocket v1.8.13 h1:f3QZdXy7uGVz+4uCJy2nTZyM0yTBj8yANEHhqlXZ9FE=
@@ -2412,8 +2412,8 @@ golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s=
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
-golang.org/x/tools v0.32.0 h1:Q7N1vhpkQv7ybVzLFtTjvQya2ewbwNDZzUgfXGqtMWU=
-golang.org/x/tools v0.32.0/go.mod h1:ZxrU41P/wAbZD8EDa6dDCa6XfpkhJ7HFMjHJXfBDu8s=
+golang.org/x/tools v0.33.0 h1:4qz2S3zmRxbGIhDIAgjxvFutSvH5EfnsYrRBj0UI0bc=
+golang.org/x/tools v0.33.0/go.mod h1:CIJMaWEY88juyUfo7UbgPqbC8rU2OqfAV1h2Qp0oMYI=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
diff --git a/site/src/api/typesGenerated.ts b/site/src/api/typesGenerated.ts
index d367302186870..4e337bd7c65f0 100644
--- a/site/src/api/typesGenerated.ts
+++ b/site/src/api/typesGenerated.ts
@@ -349,7 +349,7 @@ export interface ConvertLoginRequest {
// From codersdk/chat.go
export interface CreateChatMessageRequest {
readonly model: string;
- // embedded anonymous struct, please fix by naming it
+ // external type "github.com/kylecarbs/aisdk-go.Message", to include this type the package must be explicitly included in the parsing
readonly message: unknown;
readonly thinking: boolean;
}
@@ -741,6 +741,19 @@ export interface DeploymentValues {
readonly address?: string;
}
+// From codersdk/parameters.go
+export interface DiagnosticExtra {
+ readonly code: string;
+}
+
+// From codersdk/parameters.go
+export type DiagnosticSeverityString = "error" | "warning";
+
+export const DiagnosticSeverityStrings: DiagnosticSeverityString[] = [
+ "error",
+ "warning",
+];
+
// From codersdk/workspaceagents.go
export type DisplayApp =
| "port_forwarding_helper"
@@ -757,16 +770,16 @@ export const DisplayApps: DisplayApp[] = [
"web_terminal",
];
-// From codersdk/templateversions.go
+// From codersdk/parameters.go
export interface DynamicParametersRequest {
readonly id: number;
readonly inputs: Record;
}
-// From codersdk/templateversions.go
+// From codersdk/parameters.go
export interface DynamicParametersResponse {
readonly id: number;
- readonly diagnostics: PreviewDiagnostics;
+ readonly diagnostics: readonly FriendlyDiagnostic[];
readonly parameters: readonly PreviewParameter[];
}
@@ -969,10 +982,10 @@ export const FormatZip = "zip";
// From codersdk/parameters.go
export interface FriendlyDiagnostic {
- readonly severity: PreviewDiagnosticSeverityString;
+ readonly severity: DiagnosticSeverityString;
readonly summary: string;
readonly detail: string;
- readonly extra: PreviewDiagnosticExtra;
+ readonly extra: DiagnosticExtra;
}
// From codersdk/apikey.go
@@ -1596,6 +1609,16 @@ export interface OIDCConfig {
readonly skip_issuer_checks: boolean;
}
+// From codersdk/parameters.go
+export type OptionType = "bool" | "list(string)" | "number" | "string";
+
+export const OptionTypes: OptionType[] = [
+ "bool",
+ "list(string)",
+ "number",
+ "string",
+];
+
// From codersdk/organizations.go
export interface Organization extends MinimalOrganization {
readonly description: string;
@@ -1663,6 +1686,34 @@ export interface Pagination {
readonly offset?: number;
}
+// From codersdk/parameters.go
+export type ParameterFormType =
+ | "checkbox"
+ | ""
+ | "dropdown"
+ | "error"
+ | "input"
+ | "multi-select"
+ | "radio"
+ | "slider"
+ | "switch"
+ | "tag-select"
+ | "textarea";
+
+export const ParameterFormTypes: ParameterFormType[] = [
+ "checkbox",
+ "",
+ "dropdown",
+ "error",
+ "input",
+ "multi-select",
+ "radio",
+ "slider",
+ "switch",
+ "tag-select",
+ "textarea",
+];
+
// From codersdk/idpsync.go
export interface PatchGroupIDPSyncConfigRequest {
readonly field: string;
@@ -1778,33 +1829,19 @@ export interface PresetParameter {
readonly Value: string;
}
-// From types/diagnostics.go
-export interface PreviewDiagnosticExtra {
- readonly code: string;
- // empty interface{} type, falling back to unknown
- readonly Wrapped: unknown;
-}
-
-// From types/diagnostics.go
-export type PreviewDiagnosticSeverityString = string;
-
-// From types/diagnostics.go
-export type PreviewDiagnostics = readonly FriendlyDiagnostic[];
-
-// From types/parameter.go
+// From codersdk/parameters.go
export interface PreviewParameter extends PreviewParameterData {
readonly value: NullHCLString;
- readonly diagnostics: PreviewDiagnostics;
+ readonly diagnostics: readonly FriendlyDiagnostic[];
}
-// From types/parameter.go
+// From codersdk/parameters.go
export interface PreviewParameterData {
readonly name: string;
readonly display_name: string;
readonly description: string;
- readonly type: PreviewParameterType;
- // this is likely an enum in an external package "github.com/coder/terraform-provider-coder/v2/provider.ParameterFormType"
- readonly form_type: string;
+ readonly type: OptionType;
+ readonly form_type: ParameterFormType;
readonly styling: PreviewParameterStyling;
readonly mutable: boolean;
readonly default_value: NullHCLString;
@@ -1816,7 +1853,7 @@ export interface PreviewParameterData {
readonly ephemeral: boolean;
}
-// From types/parameter.go
+// From codersdk/parameters.go
export interface PreviewParameterOption {
readonly name: string;
readonly description: string;
@@ -1824,17 +1861,14 @@ export interface PreviewParameterOption {
readonly icon: string;
}
-// From types/parameter.go
+// From codersdk/parameters.go
export interface PreviewParameterStyling {
readonly placeholder?: string;
readonly disabled?: boolean;
readonly label?: string;
}
-// From types/enum.go
-export type PreviewParameterType = string;
-
-// From types/parameter.go
+// From codersdk/parameters.go
export interface PreviewParameterValidation {
readonly validation_error: string;
readonly validation_regex: string | null;
diff --git a/site/src/pages/CreateWorkspacePage/CreateWorkspacePageViewExperimental.tsx b/site/src/pages/CreateWorkspacePage/CreateWorkspacePageViewExperimental.tsx
index 630faf8e806d2..cb4451b53acd7 100644
--- a/site/src/pages/CreateWorkspacePage/CreateWorkspacePageViewExperimental.tsx
+++ b/site/src/pages/CreateWorkspacePage/CreateWorkspacePageViewExperimental.tsx
@@ -1,5 +1,5 @@
import type * as TypesGen from "api/typesGenerated";
-import type { PreviewDiagnostics, PreviewParameter } from "api/typesGenerated";
+import type { FriendlyDiagnostic, PreviewParameter } from "api/typesGenerated";
import { Alert } from "components/Alert/Alert";
import { ErrorAlert } from "components/Alert/ErrorAlert";
import { Avatar } from "components/Avatar/Avatar";
@@ -51,7 +51,7 @@ export interface CreateWorkspacePageViewExperimentalProps {
creatingWorkspace: boolean;
defaultName?: string | null;
defaultOwner: TypesGen.User;
- diagnostics: PreviewDiagnostics;
+ diagnostics: readonly FriendlyDiagnostic[];
disabledParams?: string[];
error: unknown;
externalAuth: TypesGen.TemplateVersionExternalAuth[];
From b51c902e4859919dba9c30f804cce3642a6735a1 Mon Sep 17 00:00:00 2001
From: Edward Angert
Date: Tue, 20 May 2025 12:46:07 -0400
Subject: [PATCH 24/44] docs: add early access badge to devcontainers admin
(#17937)
[preview](https://coder.com/docs/@dev-container-tweaks/admin/templates/extending-templates/devcontainers)
---------
Co-authored-by: EdwardAngert <17991901+EdwardAngert@users.noreply.github.com>
---
docs/admin/templates/extending-templates/devcontainers.md | 2 ++
docs/manifest.json | 3 ++-
2 files changed, 4 insertions(+), 1 deletion(-)
diff --git a/docs/admin/templates/extending-templates/devcontainers.md b/docs/admin/templates/extending-templates/devcontainers.md
index 4894a012476a1..d4284bf48efde 100644
--- a/docs/admin/templates/extending-templates/devcontainers.md
+++ b/docs/admin/templates/extending-templates/devcontainers.md
@@ -122,3 +122,5 @@ resource "docker_container" "workspace" {
## Next Steps
- [Dev Containers Integration](../../../user-guides/devcontainers/index.md)
+- [Working with Dev Containers](../../../user-guides/devcontainers/working-with-dev-containers.md)
+- [Troubleshooting Dev Containers](../../../user-guides/devcontainers/troubleshooting-dev-containers.md)
diff --git a/docs/manifest.json b/docs/manifest.json
index 3af0cc7505057..6c85934017ebb 100644
--- a/docs/manifest.json
+++ b/docs/manifest.json
@@ -506,7 +506,8 @@
{
"title": "Configure a template for dev containers",
"description": "How to use configure your template for dev containers",
- "path": "./admin/templates/extending-templates/devcontainers.md"
+ "path": "./admin/templates/extending-templates/devcontainers.md",
+ "state": ["early access"]
},
{
"title": "Process Logging",
From 55313cffbccb03246de8c46554e464c40ec77a30 Mon Sep 17 00:00:00 2001
From: Julio <13398285+ggjulio@users.noreply.github.com>
Date: Tue, 20 May 2025 19:19:38 +0200
Subject: [PATCH 25/44] chore: add vsphere icon (#17936)
---
site/src/theme/icons.json | 1 +
site/static/icon/vsphere.svg | 14 ++++++++++++++
2 files changed, 15 insertions(+)
create mode 100644 site/static/icon/vsphere.svg
diff --git a/site/src/theme/icons.json b/site/src/theme/icons.json
index 96f3abb704ef9..8e92dd9a48198 100644
--- a/site/src/theme/icons.json
+++ b/site/src/theme/icons.json
@@ -102,6 +102,7 @@
"typescript.svg",
"ubuntu.svg",
"vault.svg",
+ "vsphere.svg",
"webstorm.svg",
"widgets.svg",
"windsurf.svg",
diff --git a/site/static/icon/vsphere.svg b/site/static/icon/vsphere.svg
new file mode 100644
index 0000000000000..e50dd3ca83c69
--- /dev/null
+++ b/site/static/icon/vsphere.svg
@@ -0,0 +1,14 @@
+
+
+
+
+
+
+
+
From b551a062d7a418ff0c6c83164759f9d055bf0b35 Mon Sep 17 00:00:00 2001
From: Thomas Kosiewski
Date: Tue, 20 May 2025 19:35:19 +0200
Subject: [PATCH 26/44] fix: correct environment variable name for MCP app
status slug (#17948)
Fixed environment variable name for app status slug in Claude MCP configuration from `CODER_MCP_CLAUDE_APP_STATUS_SLUG` to `CODER_MCP_APP_STATUS_SLUG` to maintain consistency with other MCP environment variables.
This also caused the User level Claude.md to not contain instructions to report its progress, so it did not receive status reports.
---
cli/exp_mcp.go | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/cli/exp_mcp.go b/cli/exp_mcp.go
index 6174f0cffbf0e..fb866666daf4a 100644
--- a/cli/exp_mcp.go
+++ b/cli/exp_mcp.go
@@ -255,7 +255,7 @@ func (*RootCmd) mcpConfigureClaudeCode() *serpent.Command {
{
Name: "app-status-slug",
Description: "The app status slug to use when running the Coder MCP server.",
- Env: "CODER_MCP_CLAUDE_APP_STATUS_SLUG",
+ Env: "CODER_MCP_APP_STATUS_SLUG",
Flag: "claude-app-status-slug",
Value: serpent.StringOf(&appStatusSlug),
},
From 1f54c363753c22927db88abaa4d7f0ffa502a6ce Mon Sep 17 00:00:00 2001
From: Edward Angert
Date: Tue, 20 May 2025 15:10:52 -0400
Subject: [PATCH 27/44] docs: rename external-auth heading in setup doc
(#17868)
to help point searchers to the correct doc
[preview](https://coder.com/docs/@setup-ext-auth/admin/setup#continue-your-setup-with-external-authentication)
---------
Co-authored-by: EdwardAngert <17991901+EdwardAngert@users.noreply.github.com>
---
docs/admin/setup/index.md | 2 +-
docs/manifest.json | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/docs/admin/setup/index.md b/docs/admin/setup/index.md
index 96000292266e2..1a34920e733e8 100644
--- a/docs/admin/setup/index.md
+++ b/docs/admin/setup/index.md
@@ -140,7 +140,7 @@ To configure Coder behind a corporate proxy, set the environment variables
`HTTP_PROXY` and `HTTPS_PROXY`. Be sure to restart the server. Lowercase values
(e.g. `http_proxy`) are also respected in this case.
-## External Authentication
+## Continue your setup with external authentication
Coder supports external authentication via OAuth2.0. This allows enabling
integrations with Git providers, such as GitHub, GitLab, and Bitbucket.
diff --git a/docs/manifest.json b/docs/manifest.json
index 6c85934017ebb..c191eda07c425 100644
--- a/docs/manifest.json
+++ b/docs/manifest.json
@@ -551,7 +551,7 @@
]
},
{
- "title": "External Auth",
+ "title": "External Authentication",
"description": "Learn how to configure external authentication",
"path": "./admin/external-auth.md",
"icon_path": "./images/icons/plug.svg"
From d2d21898f24e559e910011e800615dc14c19b5fc Mon Sep 17 00:00:00 2001
From: Danny Kopping
Date: Tue, 20 May 2025 22:16:23 +0200
Subject: [PATCH 28/44] chore: reduce `ignore_changes` suggestion scope
(#17947)
We probably shouldn't be suggesting `ignore_changes = all`. Only the
attributes which cause drift in prebuilds should be ignored; everything
else can behave as normal.
---------
Signed-off-by: Danny Kopping
Co-authored-by: Edward Angert
---
.../extending-templates/prebuilt-workspaces.md | 17 +++--------------
1 file changed, 3 insertions(+), 14 deletions(-)
diff --git a/docs/admin/templates/extending-templates/prebuilt-workspaces.md b/docs/admin/templates/extending-templates/prebuilt-workspaces.md
index 3fd82d62d1943..57f3dc0b3109f 100644
--- a/docs/admin/templates/extending-templates/prebuilt-workspaces.md
+++ b/docs/admin/templates/extending-templates/prebuilt-workspaces.md
@@ -142,7 +142,7 @@ To prevent this, add a `lifecycle` block with `ignore_changes`:
```hcl
resource "docker_container" "workspace" {
lifecycle {
- ignore_changes = all
+ ignore_changes = [env, image] # include all fields which caused drift
}
count = data.coder_workspace.me.start_count
@@ -151,19 +151,8 @@ resource "docker_container" "workspace" {
}
```
-For more targeted control, specify which attributes to ignore:
-
-```hcl
-resource "docker_container" "workspace" {
- lifecycle {
- ignore_changes = [name]
- }
-
- count = data.coder_workspace.me.start_count
- name = "coder-${data.coder_workspace_owner.me.name}-${lower(data.coder_workspace.me.name)}"
- ...
-}
-```
+Limit the scope of `ignore_changes` to include only the fields specified in the notification.
+If you include too many fields, Terraform might ignore changes that wouldn't otherwise cause drift.
Learn more about `ignore_changes` in the [Terraform documentation](https://developer.hashicorp.com/terraform/language/meta-arguments/lifecycle#ignore_changes).
From 3e7ff9d9e1c359285a8c39a15947231de6ee74c0 Mon Sep 17 00:00:00 2001
From: Danielle Maywood
Date: Tue, 20 May 2025 21:20:56 +0100
Subject: [PATCH 29/44] chore(coderd/rbac): add `Action{Create,Delete}Agent` to
`ResourceWorkspace` (#17932)
---
coderd/apidoc/docs.go | 4 ++
coderd/apidoc/swagger.json | 4 ++
coderd/database/dbauthz/dbauthz.go | 25 ++++++++--
coderd/database/dbauthz/dbauthz_test.go | 33 ++++++++++++-
coderd/database/dbmem/dbmem.go | 27 +++++++++++
coderd/database/dbmetrics/querymetrics.go | 7 +++
coderd/database/dbmock/dbmock.go | 15 ++++++
coderd/database/querier.go | 1 +
coderd/database/queries.sql.go | 59 +++++++++++++++++++++++
coderd/database/queries/workspaces.sql | 24 +++++++++
coderd/rbac/object_gen.go | 6 +++
coderd/rbac/policy/policy.go | 6 +++
coderd/rbac/roles.go | 16 ++++--
coderd/rbac/roles_test.go | 11 ++++-
codersdk/rbacresources_gen.go | 6 ++-
docs/reference/api/members.md | 10 ++++
docs/reference/api/schemas.md | 2 +
site/src/api/rbacresourcesGenerated.ts | 4 ++
site/src/api/typesGenerated.ts | 4 ++
19 files changed, 253 insertions(+), 11 deletions(-)
diff --git a/coderd/apidoc/docs.go b/coderd/apidoc/docs.go
index 95e2cc0f48ac8..e98197d3b5bb2 100644
--- a/coderd/apidoc/docs.go
+++ b/coderd/apidoc/docs.go
@@ -14901,7 +14901,9 @@ const docTemplate = `{
"application_connect",
"assign",
"create",
+ "create_agent",
"delete",
+ "delete_agent",
"read",
"read_personal",
"ssh",
@@ -14917,7 +14919,9 @@ const docTemplate = `{
"ActionApplicationConnect",
"ActionAssign",
"ActionCreate",
+ "ActionCreateAgent",
"ActionDelete",
+ "ActionDeleteAgent",
"ActionRead",
"ActionReadPersonal",
"ActionSSH",
diff --git a/coderd/apidoc/swagger.json b/coderd/apidoc/swagger.json
index 02212d9944415..fa103f55fbe9f 100644
--- a/coderd/apidoc/swagger.json
+++ b/coderd/apidoc/swagger.json
@@ -13509,7 +13509,9 @@
"application_connect",
"assign",
"create",
+ "create_agent",
"delete",
+ "delete_agent",
"read",
"read_personal",
"ssh",
@@ -13525,7 +13527,9 @@
"ActionApplicationConnect",
"ActionAssign",
"ActionCreate",
+ "ActionCreateAgent",
"ActionDelete",
+ "ActionDeleteAgent",
"ActionRead",
"ActionReadPersonal",
"ActionSSH",
diff --git a/coderd/database/dbauthz/dbauthz.go b/coderd/database/dbauthz/dbauthz.go
index 20afcf66c7867..ab3781452dd2d 100644
--- a/coderd/database/dbauthz/dbauthz.go
+++ b/coderd/database/dbauthz/dbauthz.go
@@ -177,7 +177,7 @@ var (
// Unsure why provisionerd needs update and read personal
rbac.ResourceUser.Type: {policy.ActionRead, policy.ActionReadPersonal, policy.ActionUpdatePersonal},
rbac.ResourceWorkspaceDormant.Type: {policy.ActionDelete, policy.ActionRead, policy.ActionUpdate, policy.ActionWorkspaceStop},
- rbac.ResourceWorkspace.Type: {policy.ActionDelete, policy.ActionRead, policy.ActionUpdate, policy.ActionWorkspaceStart, policy.ActionWorkspaceStop},
+ rbac.ResourceWorkspace.Type: {policy.ActionDelete, policy.ActionRead, policy.ActionUpdate, policy.ActionWorkspaceStart, policy.ActionWorkspaceStop, policy.ActionCreateAgent},
rbac.ResourceApiKey.Type: {policy.WildcardSymbol},
// When org scoped provisioner credentials are implemented,
// this can be reduced to read a specific org.
@@ -339,7 +339,7 @@ var (
rbac.ResourceProvisionerDaemon.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate},
rbac.ResourceUser.Type: rbac.ResourceUser.AvailableActions(),
rbac.ResourceWorkspaceDormant.Type: {policy.ActionUpdate, policy.ActionDelete, policy.ActionWorkspaceStop},
- rbac.ResourceWorkspace.Type: {policy.ActionUpdate, policy.ActionDelete, policy.ActionWorkspaceStart, policy.ActionWorkspaceStop, policy.ActionSSH},
+ rbac.ResourceWorkspace.Type: {policy.ActionUpdate, policy.ActionDelete, policy.ActionWorkspaceStart, policy.ActionWorkspaceStop, policy.ActionSSH, policy.ActionCreateAgent, policy.ActionDeleteAgent},
rbac.ResourceWorkspaceProxy.Type: {policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete},
rbac.ResourceDeploymentConfig.Type: {policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete},
rbac.ResourceNotificationMessage.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete},
@@ -3180,6 +3180,10 @@ func (q *querier) GetWorkspaceByOwnerIDAndName(ctx context.Context, arg database
return fetch(q.log, q.auth, q.db.GetWorkspaceByOwnerIDAndName)(ctx, arg)
}
+func (q *querier) GetWorkspaceByResourceID(ctx context.Context, resourceID uuid.UUID) (database.Workspace, error) {
+ return fetch(q.log, q.auth, q.db.GetWorkspaceByResourceID)(ctx, resourceID)
+}
+
func (q *querier) GetWorkspaceByWorkspaceAppID(ctx context.Context, workspaceAppID uuid.UUID) (database.Workspace, error) {
return fetch(q.log, q.auth, q.db.GetWorkspaceByWorkspaceAppID)(ctx, workspaceAppID)
}
@@ -3713,9 +3717,24 @@ func (q *querier) InsertWorkspace(ctx context.Context, arg database.InsertWorksp
}
func (q *querier) InsertWorkspaceAgent(ctx context.Context, arg database.InsertWorkspaceAgentParams) (database.WorkspaceAgent, error) {
- if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil {
+ // NOTE(DanielleMaywood):
+ // Currently, the only way to link a Resource back to a Workspace is by following this chain:
+ //
+ // WorkspaceResource -> WorkspaceBuild -> Workspace
+ //
+ // It is possible for this function to be called without there existing
+ // a `WorkspaceBuild` to link back to. This means that we want to allow
+ // execution to continue if there isn't a workspace found to allow this
+ // behavior to continue.
+ workspace, err := q.db.GetWorkspaceByResourceID(ctx, arg.ResourceID)
+ if err != nil && !errors.Is(err, sql.ErrNoRows) {
return database.WorkspaceAgent{}, err
}
+
+ if err := q.authorizeContext(ctx, policy.ActionCreateAgent, workspace); err != nil {
+ return database.WorkspaceAgent{}, err
+ }
+
return q.db.InsertWorkspaceAgent(ctx, arg)
}
diff --git a/coderd/database/dbauthz/dbauthz_test.go b/coderd/database/dbauthz/dbauthz_test.go
index 1e4b4ea879b77..e8b90afbc396d 100644
--- a/coderd/database/dbauthz/dbauthz_test.go
+++ b/coderd/database/dbauthz/dbauthz_test.go
@@ -1928,6 +1928,22 @@ func (s *MethodTestSuite) TestWorkspace() {
})
check.Args(ws.ID).Asserts(ws, policy.ActionRead)
}))
+ s.Run("GetWorkspaceByResourceID", s.Subtest(func(db database.Store, check *expects) {
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{Type: database.ProvisionerJobTypeWorkspaceBuild})
+ tpl := dbgen.Template(s.T(), db, database.Template{CreatedBy: u.ID, OrganizationID: o.ID})
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ JobID: j.ID,
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{OwnerID: u.ID, TemplateID: tpl.ID, OrganizationID: o.ID})
+ _ = dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: j.ID, TemplateVersionID: tv.ID})
+ res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: j.ID})
+ check.Args(res.ID).Asserts(ws, policy.ActionRead)
+ }))
s.Run("GetWorkspaces", s.Subtest(func(_ database.Store, check *expects) {
// No asserts here because SQLFilter.
check.Args(database.GetWorkspacesParams{}).Asserts()
@@ -4018,12 +4034,25 @@ func (s *MethodTestSuite) TestSystemFunctions() {
Returns(slice.New(a, b))
}))
s.Run("InsertWorkspaceAgent", s.Subtest(func(db database.Store, check *expects) {
- dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{Type: database.ProvisionerJobTypeWorkspaceBuild})
+ tpl := dbgen.Template(s.T(), db, database.Template{CreatedBy: u.ID, OrganizationID: o.ID})
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ JobID: j.ID,
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{OwnerID: u.ID, TemplateID: tpl.ID, OrganizationID: o.ID})
+ _ = dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: j.ID, TemplateVersionID: tv.ID})
+ res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: j.ID})
check.Args(database.InsertWorkspaceAgentParams{
ID: uuid.New(),
+ ResourceID: res.ID,
Name: "dev",
APIKeyScope: database.AgentKeyScopeEnumAll,
- }).Asserts(rbac.ResourceSystem, policy.ActionCreate)
+ }).Asserts(ws, policy.ActionCreateAgent)
}))
s.Run("InsertWorkspaceApp", s.Subtest(func(db database.Store, check *expects) {
dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
diff --git a/coderd/database/dbmem/dbmem.go b/coderd/database/dbmem/dbmem.go
index 3ab2895876ac5..75c56b9c2324d 100644
--- a/coderd/database/dbmem/dbmem.go
+++ b/coderd/database/dbmem/dbmem.go
@@ -8053,6 +8053,33 @@ func (q *FakeQuerier) GetWorkspaceByOwnerIDAndName(_ context.Context, arg databa
return database.Workspace{}, sql.ErrNoRows
}
+func (q *FakeQuerier) GetWorkspaceByResourceID(ctx context.Context, resourceID uuid.UUID) (database.Workspace, error) {
+ q.mutex.RLock()
+ defer q.mutex.RUnlock()
+
+ for _, resource := range q.workspaceResources {
+ if resource.ID != resourceID {
+ continue
+ }
+
+ for _, build := range q.workspaceBuilds {
+ if build.JobID != resource.JobID {
+ continue
+ }
+
+ for _, workspace := range q.workspaces {
+ if workspace.ID != build.WorkspaceID {
+ continue
+ }
+
+ return q.extendWorkspace(workspace), nil
+ }
+ }
+ }
+
+ return database.Workspace{}, sql.ErrNoRows
+}
+
func (q *FakeQuerier) GetWorkspaceByWorkspaceAppID(_ context.Context, workspaceAppID uuid.UUID) (database.Workspace, error) {
if err := validateDatabaseType(workspaceAppID); err != nil {
return database.Workspace{}, err
diff --git a/coderd/database/dbmetrics/querymetrics.go b/coderd/database/dbmetrics/querymetrics.go
index 9122cedbf786c..47ec185915660 100644
--- a/coderd/database/dbmetrics/querymetrics.go
+++ b/coderd/database/dbmetrics/querymetrics.go
@@ -1887,6 +1887,13 @@ func (m queryMetricsStore) GetWorkspaceByOwnerIDAndName(ctx context.Context, arg
return workspace, err
}
+func (m queryMetricsStore) GetWorkspaceByResourceID(ctx context.Context, resourceID uuid.UUID) (database.Workspace, error) {
+ start := time.Now()
+ r0, r1 := m.s.GetWorkspaceByResourceID(ctx, resourceID)
+ m.queryLatencies.WithLabelValues("GetWorkspaceByResourceID").Observe(time.Since(start).Seconds())
+ return r0, r1
+}
+
func (m queryMetricsStore) GetWorkspaceByWorkspaceAppID(ctx context.Context, workspaceAppID uuid.UUID) (database.Workspace, error) {
start := time.Now()
workspace, err := m.s.GetWorkspaceByWorkspaceAppID(ctx, workspaceAppID)
diff --git a/coderd/database/dbmock/dbmock.go b/coderd/database/dbmock/dbmock.go
index e7af9ecd8fee8..e3a9a14698e42 100644
--- a/coderd/database/dbmock/dbmock.go
+++ b/coderd/database/dbmock/dbmock.go
@@ -3963,6 +3963,21 @@ func (mr *MockStoreMockRecorder) GetWorkspaceByOwnerIDAndName(ctx, arg any) *gom
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetWorkspaceByOwnerIDAndName", reflect.TypeOf((*MockStore)(nil).GetWorkspaceByOwnerIDAndName), ctx, arg)
}
+// GetWorkspaceByResourceID mocks base method.
+func (m *MockStore) GetWorkspaceByResourceID(ctx context.Context, resourceID uuid.UUID) (database.Workspace, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "GetWorkspaceByResourceID", ctx, resourceID)
+ ret0, _ := ret[0].(database.Workspace)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// GetWorkspaceByResourceID indicates an expected call of GetWorkspaceByResourceID.
+func (mr *MockStoreMockRecorder) GetWorkspaceByResourceID(ctx, resourceID any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetWorkspaceByResourceID", reflect.TypeOf((*MockStore)(nil).GetWorkspaceByResourceID), ctx, resourceID)
+}
+
// GetWorkspaceByWorkspaceAppID mocks base method.
func (m *MockStore) GetWorkspaceByWorkspaceAppID(ctx context.Context, workspaceAppID uuid.UUID) (database.Workspace, error) {
m.ctrl.T.Helper()
diff --git a/coderd/database/querier.go b/coderd/database/querier.go
index 78a88426349da..d248780397ead 100644
--- a/coderd/database/querier.go
+++ b/coderd/database/querier.go
@@ -422,6 +422,7 @@ type sqlcQuerier interface {
GetWorkspaceByAgentID(ctx context.Context, agentID uuid.UUID) (Workspace, error)
GetWorkspaceByID(ctx context.Context, id uuid.UUID) (Workspace, error)
GetWorkspaceByOwnerIDAndName(ctx context.Context, arg GetWorkspaceByOwnerIDAndNameParams) (Workspace, error)
+ GetWorkspaceByResourceID(ctx context.Context, resourceID uuid.UUID) (Workspace, error)
GetWorkspaceByWorkspaceAppID(ctx context.Context, workspaceAppID uuid.UUID) (Workspace, error)
GetWorkspaceModulesByJobID(ctx context.Context, jobID uuid.UUID) ([]WorkspaceModule, error)
GetWorkspaceModulesCreatedAfter(ctx context.Context, createdAt time.Time) ([]WorkspaceModule, error)
diff --git a/coderd/database/queries.sql.go b/coderd/database/queries.sql.go
index b956fc1db5f91..99a8bf4603b57 100644
--- a/coderd/database/queries.sql.go
+++ b/coderd/database/queries.sql.go
@@ -18143,6 +18143,65 @@ func (q *sqlQuerier) GetWorkspaceByOwnerIDAndName(ctx context.Context, arg GetWo
return i, err
}
+const getWorkspaceByResourceID = `-- name: GetWorkspaceByResourceID :one
+SELECT
+ id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, owner_avatar_url, owner_username, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description
+FROM
+ workspaces_expanded as workspaces
+WHERE
+ workspaces.id = (
+ SELECT
+ workspace_id
+ FROM
+ workspace_builds
+ WHERE
+ workspace_builds.job_id = (
+ SELECT
+ job_id
+ FROM
+ workspace_resources
+ WHERE
+ workspace_resources.id = $1
+ )
+ )
+LIMIT
+ 1
+`
+
+func (q *sqlQuerier) GetWorkspaceByResourceID(ctx context.Context, resourceID uuid.UUID) (Workspace, error) {
+ row := q.db.QueryRowContext(ctx, getWorkspaceByResourceID, resourceID)
+ var i Workspace
+ err := row.Scan(
+ &i.ID,
+ &i.CreatedAt,
+ &i.UpdatedAt,
+ &i.OwnerID,
+ &i.OrganizationID,
+ &i.TemplateID,
+ &i.Deleted,
+ &i.Name,
+ &i.AutostartSchedule,
+ &i.Ttl,
+ &i.LastUsedAt,
+ &i.DormantAt,
+ &i.DeletingAt,
+ &i.AutomaticUpdates,
+ &i.Favorite,
+ &i.NextStartAt,
+ &i.OwnerAvatarUrl,
+ &i.OwnerUsername,
+ &i.OrganizationName,
+ &i.OrganizationDisplayName,
+ &i.OrganizationIcon,
+ &i.OrganizationDescription,
+ &i.TemplateName,
+ &i.TemplateDisplayName,
+ &i.TemplateIcon,
+ &i.TemplateDescription,
+ )
+ return i, err
+}
+
const getWorkspaceByWorkspaceAppID = `-- name: GetWorkspaceByWorkspaceAppID :one
SELECT
id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, owner_avatar_url, owner_username, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description
diff --git a/coderd/database/queries/workspaces.sql b/coderd/database/queries/workspaces.sql
index 4ec74c066fe41..44b7dcbf0387d 100644
--- a/coderd/database/queries/workspaces.sql
+++ b/coderd/database/queries/workspaces.sql
@@ -8,6 +8,30 @@ WHERE
LIMIT
1;
+-- name: GetWorkspaceByResourceID :one
+SELECT
+ *
+FROM
+ workspaces_expanded as workspaces
+WHERE
+ workspaces.id = (
+ SELECT
+ workspace_id
+ FROM
+ workspace_builds
+ WHERE
+ workspace_builds.job_id = (
+ SELECT
+ job_id
+ FROM
+ workspace_resources
+ WHERE
+ workspace_resources.id = @resource_id
+ )
+ )
+LIMIT
+ 1;
+
-- name: GetWorkspaceByWorkspaceAppID :one
SELECT
*
diff --git a/coderd/rbac/object_gen.go b/coderd/rbac/object_gen.go
index ad1a510fd44bd..f19d90894dd55 100644
--- a/coderd/rbac/object_gen.go
+++ b/coderd/rbac/object_gen.go
@@ -308,7 +308,9 @@ var (
// Valid Actions
// - "ActionApplicationConnect" :: connect to workspace apps via browser
// - "ActionCreate" :: create a new workspace
+ // - "ActionCreateAgent" :: create a new workspace agent
// - "ActionDelete" :: delete workspace
+ // - "ActionDeleteAgent" :: delete an existing workspace agent
// - "ActionRead" :: read workspace data to view on the UI
// - "ActionSSH" :: ssh into a given workspace
// - "ActionWorkspaceStart" :: allows starting a workspace
@@ -338,7 +340,9 @@ var (
// Valid Actions
// - "ActionApplicationConnect" :: connect to workspace apps via browser
// - "ActionCreate" :: create a new workspace
+ // - "ActionCreateAgent" :: create a new workspace agent
// - "ActionDelete" :: delete workspace
+ // - "ActionDeleteAgent" :: delete an existing workspace agent
// - "ActionRead" :: read workspace data to view on the UI
// - "ActionSSH" :: ssh into a given workspace
// - "ActionWorkspaceStart" :: allows starting a workspace
@@ -406,7 +410,9 @@ func AllActions() []policy.Action {
policy.ActionApplicationConnect,
policy.ActionAssign,
policy.ActionCreate,
+ policy.ActionCreateAgent,
policy.ActionDelete,
+ policy.ActionDeleteAgent,
policy.ActionRead,
policy.ActionReadPersonal,
policy.ActionSSH,
diff --git a/coderd/rbac/policy/policy.go b/coderd/rbac/policy/policy.go
index c37e84c48f964..160062283f857 100644
--- a/coderd/rbac/policy/policy.go
+++ b/coderd/rbac/policy/policy.go
@@ -24,6 +24,9 @@ const (
ActionReadPersonal Action = "read_personal"
ActionUpdatePersonal Action = "update_personal"
+
+ ActionCreateAgent Action = "create_agent"
+ ActionDeleteAgent Action = "delete_agent"
)
type PermissionDefinition struct {
@@ -67,6 +70,9 @@ var workspaceActions = map[Action]ActionDefinition{
// Running a workspace
ActionSSH: actDef("ssh into a given workspace"),
ActionApplicationConnect: actDef("connect to workspace apps via browser"),
+
+ ActionCreateAgent: actDef("create a new workspace agent"),
+ ActionDeleteAgent: actDef("delete an existing workspace agent"),
}
// RBACPermissions is indexed by the type
diff --git a/coderd/rbac/roles.go b/coderd/rbac/roles.go
index 0b94a74201b16..89f86b567a48d 100644
--- a/coderd/rbac/roles.go
+++ b/coderd/rbac/roles.go
@@ -272,7 +272,7 @@ func ReloadBuiltinRoles(opts *RoleOptions) {
// This adds back in the Workspace permissions.
Permissions(map[string][]policy.Action{
ResourceWorkspace.Type: ownerWorkspaceActions,
- ResourceWorkspaceDormant.Type: {policy.ActionRead, policy.ActionDelete, policy.ActionCreate, policy.ActionUpdate, policy.ActionWorkspaceStop},
+ ResourceWorkspaceDormant.Type: {policy.ActionRead, policy.ActionDelete, policy.ActionCreate, policy.ActionUpdate, policy.ActionWorkspaceStop, policy.ActionCreateAgent, policy.ActionDeleteAgent},
})...),
Org: map[string][]Permission{},
User: []Permission{},
@@ -291,7 +291,7 @@ func ReloadBuiltinRoles(opts *RoleOptions) {
User: append(allPermsExcept(ResourceWorkspaceDormant, ResourceUser, ResourceOrganizationMember),
Permissions(map[string][]policy.Action{
// Reduced permission set on dormant workspaces. No build, ssh, or exec
- ResourceWorkspaceDormant.Type: {policy.ActionRead, policy.ActionDelete, policy.ActionCreate, policy.ActionUpdate, policy.ActionWorkspaceStop},
+ ResourceWorkspaceDormant.Type: {policy.ActionRead, policy.ActionDelete, policy.ActionCreate, policy.ActionUpdate, policy.ActionWorkspaceStop, policy.ActionCreateAgent, policy.ActionDeleteAgent},
// Users cannot do create/update/delete on themselves, but they
// can read their own details.
ResourceUser.Type: {policy.ActionRead, policy.ActionReadPersonal, policy.ActionUpdatePersonal},
@@ -412,7 +412,7 @@ func ReloadBuiltinRoles(opts *RoleOptions) {
Org: map[string][]Permission{
// Org admins should not have workspace exec perms.
organizationID.String(): append(allPermsExcept(ResourceWorkspace, ResourceWorkspaceDormant, ResourceAssignRole), Permissions(map[string][]policy.Action{
- ResourceWorkspaceDormant.Type: {policy.ActionRead, policy.ActionDelete, policy.ActionCreate, policy.ActionUpdate, policy.ActionWorkspaceStop},
+ ResourceWorkspaceDormant.Type: {policy.ActionRead, policy.ActionDelete, policy.ActionCreate, policy.ActionUpdate, policy.ActionWorkspaceStop, policy.ActionCreateAgent, policy.ActionDeleteAgent},
ResourceWorkspace.Type: slice.Omit(ResourceWorkspace.AvailableActions(), policy.ActionApplicationConnect, policy.ActionSSH),
})...),
},
@@ -529,6 +529,16 @@ func ReloadBuiltinRoles(opts *RoleOptions) {
ResourceType: ResourceWorkspace.Type,
Action: policy.ActionDelete,
},
+ {
+ Negate: true,
+ ResourceType: ResourceWorkspace.Type,
+ Action: policy.ActionCreateAgent,
+ },
+ {
+ Negate: true,
+ ResourceType: ResourceWorkspace.Type,
+ Action: policy.ActionDeleteAgent,
+ },
},
},
User: []Permission{},
diff --git a/coderd/rbac/roles_test.go b/coderd/rbac/roles_test.go
index 6d42a01474d1a..4dfbc8fa2ab31 100644
--- a/coderd/rbac/roles_test.go
+++ b/coderd/rbac/roles_test.go
@@ -226,6 +226,15 @@ func TestRolePermissions(t *testing.T) {
false: {setOtherOrg, setOrgNotMe, memberMe, templateAdmin, userAdmin},
},
},
+ {
+ Name: "CreateDeleteWorkspaceAgent",
+ Actions: []policy.Action{policy.ActionCreateAgent, policy.ActionDeleteAgent},
+ Resource: rbac.ResourceWorkspace.WithID(workspaceID).InOrg(orgID).WithOwner(currentUser.String()),
+ AuthorizeMap: map[bool][]hasAuthSubjects{
+ true: {owner, orgMemberMe, orgAdmin},
+ false: {setOtherOrg, memberMe, userAdmin, templateAdmin, orgTemplateAdmin, orgUserAdmin, orgAuditor, orgMemberMeBanWorkspace},
+ },
+ },
{
Name: "Templates",
Actions: []policy.Action{policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete},
@@ -462,7 +471,7 @@ func TestRolePermissions(t *testing.T) {
},
{
Name: "WorkspaceDormant",
- Actions: append(crud, policy.ActionWorkspaceStop),
+ Actions: append(crud, policy.ActionWorkspaceStop, policy.ActionCreateAgent, policy.ActionDeleteAgent),
Resource: rbac.ResourceWorkspaceDormant.WithID(uuid.New()).InOrg(orgID).WithOwner(memberMe.Actor.ID),
AuthorizeMap: map[bool][]hasAuthSubjects{
true: {orgMemberMe, orgAdmin, owner},
diff --git a/codersdk/rbacresources_gen.go b/codersdk/rbacresources_gen.go
index 6157281f21356..95792bb8e2a7b 100644
--- a/codersdk/rbacresources_gen.go
+++ b/codersdk/rbacresources_gen.go
@@ -49,7 +49,9 @@ const (
ActionApplicationConnect RBACAction = "application_connect"
ActionAssign RBACAction = "assign"
ActionCreate RBACAction = "create"
+ ActionCreateAgent RBACAction = "create_agent"
ActionDelete RBACAction = "delete"
+ ActionDeleteAgent RBACAction = "delete_agent"
ActionRead RBACAction = "read"
ActionReadPersonal RBACAction = "read_personal"
ActionSSH RBACAction = "ssh"
@@ -97,9 +99,9 @@ var RBACResourceActions = map[RBACResource][]RBACAction{
ResourceTemplate: {ActionCreate, ActionDelete, ActionRead, ActionUpdate, ActionUse, ActionViewInsights},
ResourceUser: {ActionCreate, ActionDelete, ActionRead, ActionReadPersonal, ActionUpdate, ActionUpdatePersonal},
ResourceWebpushSubscription: {ActionCreate, ActionDelete, ActionRead},
- ResourceWorkspace: {ActionApplicationConnect, ActionCreate, ActionDelete, ActionRead, ActionSSH, ActionWorkspaceStart, ActionWorkspaceStop, ActionUpdate},
+ ResourceWorkspace: {ActionApplicationConnect, ActionCreate, ActionCreateAgent, ActionDelete, ActionDeleteAgent, ActionRead, ActionSSH, ActionWorkspaceStart, ActionWorkspaceStop, ActionUpdate},
ResourceWorkspaceAgentDevcontainers: {ActionCreate},
ResourceWorkspaceAgentResourceMonitor: {ActionCreate, ActionRead, ActionUpdate},
- ResourceWorkspaceDormant: {ActionApplicationConnect, ActionCreate, ActionDelete, ActionRead, ActionSSH, ActionWorkspaceStart, ActionWorkspaceStop, ActionUpdate},
+ ResourceWorkspaceDormant: {ActionApplicationConnect, ActionCreate, ActionCreateAgent, ActionDelete, ActionDeleteAgent, ActionRead, ActionSSH, ActionWorkspaceStart, ActionWorkspaceStop, ActionUpdate},
ResourceWorkspaceProxy: {ActionCreate, ActionDelete, ActionRead, ActionUpdate},
}
diff --git a/docs/reference/api/members.md b/docs/reference/api/members.md
index a58a597d1ea2a..6b5d124753bc0 100644
--- a/docs/reference/api/members.md
+++ b/docs/reference/api/members.md
@@ -169,7 +169,9 @@ Status Code **200**
| `action` | `application_connect` |
| `action` | `assign` |
| `action` | `create` |
+| `action` | `create_agent` |
| `action` | `delete` |
+| `action` | `delete_agent` |
| `action` | `read` |
| `action` | `read_personal` |
| `action` | `ssh` |
@@ -336,7 +338,9 @@ Status Code **200**
| `action` | `application_connect` |
| `action` | `assign` |
| `action` | `create` |
+| `action` | `create_agent` |
| `action` | `delete` |
+| `action` | `delete_agent` |
| `action` | `read` |
| `action` | `read_personal` |
| `action` | `ssh` |
@@ -503,7 +507,9 @@ Status Code **200**
| `action` | `application_connect` |
| `action` | `assign` |
| `action` | `create` |
+| `action` | `create_agent` |
| `action` | `delete` |
+| `action` | `delete_agent` |
| `action` | `read` |
| `action` | `read_personal` |
| `action` | `ssh` |
@@ -639,7 +645,9 @@ Status Code **200**
| `action` | `application_connect` |
| `action` | `assign` |
| `action` | `create` |
+| `action` | `create_agent` |
| `action` | `delete` |
+| `action` | `delete_agent` |
| `action` | `read` |
| `action` | `read_personal` |
| `action` | `ssh` |
@@ -997,7 +1005,9 @@ Status Code **200**
| `action` | `application_connect` |
| `action` | `assign` |
| `action` | `create` |
+| `action` | `create_agent` |
| `action` | `delete` |
+| `action` | `delete_agent` |
| `action` | `read` |
| `action` | `read_personal` |
| `action` | `ssh` |
diff --git a/docs/reference/api/schemas.md b/docs/reference/api/schemas.md
index 9325d751bc352..86cc4644c2685 100644
--- a/docs/reference/api/schemas.md
+++ b/docs/reference/api/schemas.md
@@ -5913,7 +5913,9 @@ Git clone makes use of this by parsing the URL from: 'Username for "https://gith
| `application_connect` |
| `assign` |
| `create` |
+| `create_agent` |
| `delete` |
+| `delete_agent` |
| `read` |
| `read_personal` |
| `ssh` |
diff --git a/site/src/api/rbacresourcesGenerated.ts b/site/src/api/rbacresourcesGenerated.ts
index 3acb86c079908..885f603c1eb82 100644
--- a/site/src/api/rbacresourcesGenerated.ts
+++ b/site/src/api/rbacresourcesGenerated.ts
@@ -173,7 +173,9 @@ export const RBACResourceActions: Partial<
workspace: {
application_connect: "connect to workspace apps via browser",
create: "create a new workspace",
+ create_agent: "create a new workspace agent",
delete: "delete workspace",
+ delete_agent: "delete an existing workspace agent",
read: "read workspace data to view on the UI",
ssh: "ssh into a given workspace",
start: "allows starting a workspace",
@@ -191,7 +193,9 @@ export const RBACResourceActions: Partial<
workspace_dormant: {
application_connect: "connect to workspace apps via browser",
create: "create a new workspace",
+ create_agent: "create a new workspace agent",
delete: "delete workspace",
+ delete_agent: "delete an existing workspace agent",
read: "read workspace data to view on the UI",
ssh: "ssh into a given workspace",
start: "allows starting a workspace",
diff --git a/site/src/api/typesGenerated.ts b/site/src/api/typesGenerated.ts
index 4e337bd7c65f0..35cd006ec6c55 100644
--- a/site/src/api/typesGenerated.ts
+++ b/site/src/api/typesGenerated.ts
@@ -2131,7 +2131,9 @@ export type RBACAction =
| "application_connect"
| "assign"
| "create"
+ | "create_agent"
| "delete"
+ | "delete_agent"
| "read"
| "read_personal"
| "ssh"
@@ -2147,7 +2149,9 @@ export const RBACActions: RBACAction[] = [
"application_connect",
"assign",
"create",
+ "create_agent",
"delete",
+ "delete_agent",
"read",
"read_personal",
"ssh",
From 36224f263f2b7c5f9af1250e6daa8e612b476ff5 Mon Sep 17 00:00:00 2001
From: Bruno Quaresma
Date: Tue, 20 May 2025 22:24:17 -0300
Subject: [PATCH 30/44] chore: replace MUI icons with Lucide icons - 17
(#17957)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
1. ExpandMoreOutlined → ChevronDownIcon
2. Error/ErrorIcon → CircleAlertIcon
3. CheckCircle → CircleCheckIcon
4. Warning → TriangleAlertIcon
---
.../WorkspaceAppStatus/WorkspaceAppStatus.tsx | 12 ++++++------
.../workspaces/WorkspaceTiming/StagesChart.tsx | 6 +++---
site/src/pages/ChatPage/ChatToolInvocation.tsx | 14 ++++++++++----
.../TemplateInsightsPage/IntervalMenu.tsx | 4 ++--
.../TemplateInsightsPage/WeekPicker.tsx | 4 ++--
.../ProvisionerTagsPopover.tsx | 4 ++--
site/src/pages/WorkspacePage/AppStatuses.tsx | 12 ++++++------
7 files changed, 31 insertions(+), 25 deletions(-)
diff --git a/site/src/modules/workspaces/WorkspaceAppStatus/WorkspaceAppStatus.tsx b/site/src/modules/workspaces/WorkspaceAppStatus/WorkspaceAppStatus.tsx
index 412df60d9203e..95123ce8734df 100644
--- a/site/src/modules/workspaces/WorkspaceAppStatus/WorkspaceAppStatus.tsx
+++ b/site/src/modules/workspaces/WorkspaceAppStatus/WorkspaceAppStatus.tsx
@@ -1,10 +1,7 @@
import type { Theme } from "@emotion/react";
import { useTheme } from "@emotion/react";
import AppsIcon from "@mui/icons-material/Apps";
-import CheckCircle from "@mui/icons-material/CheckCircle";
-import ErrorIcon from "@mui/icons-material/Error";
import InsertDriveFile from "@mui/icons-material/InsertDriveFile";
-import Warning from "@mui/icons-material/Warning";
import CircularProgress from "@mui/material/CircularProgress";
import type {
WorkspaceAppStatus as APIWorkspaceAppStatus,
@@ -12,6 +9,9 @@ import type {
WorkspaceAgent,
WorkspaceApp,
} from "api/typesGenerated";
+import { CircleCheckIcon } from "lucide-react";
+import { CircleAlertIcon } from "lucide-react";
+import { TriangleAlertIcon } from "lucide-react";
import { ExternalLinkIcon } from "lucide-react";
import { useAppLink } from "modules/apps/useAppLink";
import type { FC } from "react";
@@ -46,13 +46,13 @@ const getStatusIcon = (theme: Theme, state: APIWorkspaceAppStatus["state"]) => {
const color = getStatusColor(theme, state);
switch (state) {
case "complete":
- return