diff --git a/coderd/provisionerdserver/acquirer_test.go b/coderd/provisionerdserver/acquirer_test.go index a916cb68fba1f..12429bf5ccb3c 100644 --- a/coderd/provisionerdserver/acquirer_test.go +++ b/coderd/provisionerdserver/acquirer_test.go @@ -523,8 +523,8 @@ func TestAcquirer_MatchTags(t *testing.T) { // Generate a table that can be copy-pasted into docs/admin/provisioners.md lines := []string{ "\n", - "| Provisioner Tags | Job Tags | Can Run Job? |", - "|------------------|----------|--------------|", + "| Provisioner Tags | Job Tags | Same Org | Can Run Job? |", + "|------------------|----------|----------|--------------|", } // turn the JSON map into k=v for readability kvs := func(m map[string]string) string { @@ -539,10 +539,14 @@ func TestAcquirer_MatchTags(t *testing.T) { } for _, tt := range testCases { acquire := "✅" + sameOrg := "✅" if !tt.expectAcquire { acquire = "❌" } - s := fmt.Sprintf("| %s | %s | %s |", kvs(tt.acquireJobTags), kvs(tt.provisionerJobTags), acquire) + if tt.unmatchedOrg { + sameOrg = "❌" + } + s := fmt.Sprintf("| %s | %s | %s | %s |", kvs(tt.acquireJobTags), kvs(tt.provisionerJobTags), sameOrg, acquire) lines = append(lines, s) } t.Logf("You can paste this into docs/admin/provisioners.md") diff --git a/docs/admin/provisioners.md b/docs/admin/provisioners.md index b8350f9237e5e..159ef79332de4 100644 --- a/docs/admin/provisioners.md +++ b/docs/admin/provisioners.md @@ -178,7 +178,8 @@ A provisioner can run a given build job if one of the below is true: 1. If a job has any explicit tags, it can only run on a provisioner with those explicit tags (the provisioner could have additional tags). -The external provisioner in the above example can run build jobs with tags: +The external provisioner in the above example can run build jobs in the same +organization with tags: - `environment=on_prem` - `datacenter=chicago` @@ -186,7 +187,8 @@ The external provisioner in the above example can run build jobs with tags: However, it will not pick up any build jobs that do not have either of the `environment` or `datacenter` tags set. It will also not pick up any build jobs -from templates with the tag `scope=user` set. +from templates with the tag `scope=user` set, or build jobs from templates in +different organizations. > [!NOTE] If you only run tagged provisioners, you will need to specify a set of > tags that matches at least one provisioner for _all_ template import jobs and @@ -198,34 +200,35 @@ from templates with the tag `scope=user` set. This is illustrated in the below table: -| Provisioner Tags | Job Tags | Can Run Job? | -| ----------------------------------------------------------------- | ---------------------------------------------------------------- | ------------ | -| scope=organization owner= | scope=organization owner= | ✅ | -| scope=organization owner= environment=on-prem | scope=organization owner= environment=on-prem | ✅ | -| scope=organization owner= environment=on-prem datacenter=chicago | scope=organization owner= environment=on-prem | ✅ | -| scope=organization owner= environment=on-prem datacenter=chicago | scope=organization owner= environment=on-prem datacenter=chicago | ✅ | -| scope=user owner=aaa | scope=user owner=aaa | ✅ | -| scope=user owner=aaa environment=on-prem | scope=user owner=aaa | ✅ | -| scope=user owner=aaa environment=on-prem | scope=user owner=aaa environment=on-prem | ✅ | -| scope=user owner=aaa environment=on-prem datacenter=chicago | scope=user owner=aaa environment=on-prem | ✅ | -| scope=user owner=aaa environment=on-prem datacenter=chicago | scope=user owner=aaa environment=on-prem datacenter=chicago | ✅ | -| scope=organization owner= | scope=organization owner= environment=on-prem | ❌ | -| scope=organization owner= environment=on-prem | scope=organization owner= | ❌ | -| scope=organization owner= environment=on-prem | scope=organization owner= environment=on-prem datacenter=chicago | ❌ | -| scope=organization owner= environment=on-prem datacenter=new_york | scope=organization owner= environment=on-prem datacenter=chicago | ❌ | -| scope=user owner=aaa | scope=organization owner= | ❌ | -| scope=user owner=aaa | scope=user owner=bbb | ❌ | -| scope=organization owner= | scope=user owner=aaa | ❌ | -| scope=organization owner= | scope=user owner=aaa environment=on-prem | ❌ | -| scope=user owner=aaa | scope=user owner=aaa environment=on-prem | ❌ | -| scope=user owner=aaa environment=on-prem | scope=user owner=aaa environment=on-prem datacenter=chicago | ❌ | -| scope=user owner=aaa environment=on-prem datacenter=chicago | scope=user owner=aaa environment=on-prem datacenter=new_york | ❌ | +| Provisioner Tags | Job Tags | Same Org | Can Run Job? | +| ----------------------------------------------------------------- | ---------------------------------------------------------------- | -------- | ------------ | +| scope=organization owner= | scope=organization owner= | ✅ | ✅ | +| scope=organization owner= environment=on-prem | scope=organization owner= environment=on-prem | ✅ | ✅ | +| scope=organization owner= environment=on-prem datacenter=chicago | scope=organization owner= environment=on-prem | ✅ | ✅ | +| scope=organization owner= environment=on-prem datacenter=chicago | scope=organization owner= environment=on-prem datacenter=chicago | ✅ | ✅ | +| scope=user owner=aaa | scope=user owner=aaa | ✅ | ✅ | +| scope=user owner=aaa environment=on-prem | scope=user owner=aaa | ✅ | ✅ | +| scope=user owner=aaa environment=on-prem | scope=user owner=aaa environment=on-prem | ✅ | ✅ | +| scope=user owner=aaa environment=on-prem datacenter=chicago | scope=user owner=aaa environment=on-prem | ✅ | ✅ | +| scope=user owner=aaa environment=on-prem datacenter=chicago | scope=user owner=aaa environment=on-prem datacenter=chicago | ✅ | ✅ | +| scope=organization owner= | scope=organization owner= environment=on-prem | ✅ | ❌ | +| scope=organization owner= environment=on-prem | scope=organization owner= | ✅ | ❌ | +| scope=organization owner= environment=on-prem | scope=organization owner= environment=on-prem datacenter=chicago | ✅ | ❌ | +| scope=organization owner= environment=on-prem datacenter=new_york | scope=organization owner= environment=on-prem datacenter=chicago | ✅ | ❌ | +| scope=user owner=aaa | scope=organization owner= | ✅ | ❌ | +| scope=user owner=aaa | scope=user owner=bbb | ✅ | ❌ | +| scope=organization owner= | scope=user owner=aaa | ✅ | ❌ | +| scope=organization owner= | scope=user owner=aaa environment=on-prem | ✅ | ❌ | +| scope=user owner=aaa | scope=user owner=aaa environment=on-prem | ✅ | ❌ | +| scope=user owner=aaa environment=on-prem | scope=user owner=aaa environment=on-prem datacenter=chicago | ✅ | ❌ | +| scope=user owner=aaa environment=on-prem datacenter=chicago | scope=user owner=aaa environment=on-prem datacenter=new_york | ✅ | ❌ | +| scope=organization owner= environment=on-prem | scope=organization owner= environment=on-prem | ❌ | ❌ | > **Note to maintainers:** to generate this table, run the following command and > copy the output: > > ``` -> go test -v -count=1 ./coderd/provisionerserver/ -test.run='^TestAcquirer_MatchTags/GenTable$' +> go test -v -count=1 ./coderd/provisionerdserver/ -test.run='^TestAcquirer_MatchTags/GenTable$' > ``` ## Types of provisioners @@ -288,8 +291,7 @@ will use in concert with the Helm chart for deploying the Coder server. ```sh coder provisioner keys create my-cool-key --org default # Optionally, you can specify tags for the provisioner key: - # coder provisioner keys create my-cool-key --org default --tags location=auh kind=k8s - ``` + # coder provisioner keys create my-cool-key --org default --tag location=auh --tag kind=k8s Successfully created provisioner key kubernetes-key! Save this authentication token, it will not be shown again. @@ -300,25 +302,7 @@ will use in concert with the Helm chart for deploying the Coder server. 1. Store the key in a kubernetes secret: ```sh - kubectl create secret generic coder-provisioner-psk --from-literal=key1=`` - ``` - -1. Modify your Coder `values.yaml` to include - - ```yaml - provisionerDaemon: - keySecretName: "coder-provisioner-keys" - keySecretKey: "key1" - ``` - -1. Redeploy Coder with the new `values.yaml` to roll out the PSK. You can omit - `--version ` to also upgrade Coder to the latest version. - - ```sh - helm upgrade coder coder-v2/coder \ - --namespace coder \ - --version \ - --values values.yaml + kubectl create secret generic coder-provisioner-psk --from-literal=my-cool-key=`` ``` 1. Create a `provisioner-values.yaml` file for the provisioner daemons Helm @@ -331,13 +315,17 @@ will use in concert with the Helm chart for deploying the Coder server. value: "https://coder.example.com" replicaCount: 10 provisionerDaemon: + # NOTE: in older versions of the Helm chart (2.17.0 and below), it is required to set this to an empty string. + pskSecretName: "" keySecretName: "coder-provisioner-keys" - keySecretKey: "key1" + keySecretKey: "my-cool-key" ``` This example creates a deployment of 10 provisioner daemons (for 10 - concurrent builds) with the listed tags. For generic provisioners, remove the - tags. + concurrent builds) authenticating using the above key. The daemons will + authenticate using the provisioner key created in the previous step and + acquire jobs matching the tags specified when the provisioner key was + created. The set of tags is inferred automatically from the provisioner key. > Refer to the > [values.yaml](https://github.com/coder/coder/blob/main/helm/provisioner/values.yaml) diff --git a/helm/provisioner/templates/_coder.tpl b/helm/provisioner/templates/_coder.tpl index 9c2b2dece130f..585393a6bf118 100644 --- a/helm/provisioner/templates/_coder.tpl +++ b/helm/provisioner/templates/_coder.tpl @@ -34,22 +34,23 @@ env: value: "0.0.0.0:2112" {{- if and (empty .Values.provisionerDaemon.pskSecretName) (empty .Values.provisionerDaemon.keySecretName) }} {{ fail "Either provisionerDaemon.pskSecretName or provisionerDaemon.keySecretName must be specified." }} -{{- else if and (.Values.provisionerDaemon.pskSecretName) (.Values.provisionerDaemon.keySecretName) }} -{{ fail "Either provisionerDaemon.pskSecretName or provisionerDaemon.keySecretName must be specified, but not both." }} -{{- end }} -{{- if .Values.provisionerDaemon.pskSecretName }} -- name: CODER_PROVISIONER_DAEMON_PSK - valueFrom: - secretKeyRef: - name: {{ .Values.provisionerDaemon.pskSecretName | quote }} - key: psk -{{- end }} -{{- if and .Values.provisionerDaemon.keySecretName .Values.provisionerDaemon.keySecretKey }} +{{- else if and .Values.provisionerDaemon.keySecretName .Values.provisionerDaemon.keySecretKey }} + {{- if and (not (empty .Values.provisionerDaemon.pskSecretName)) (ne .Values.provisionerDaemon.pskSecretName "coder-provisioner-psk") }} + {{ fail "Either provisionerDaemon.pskSecretName or provisionerDaemon.keySecretName must be specified, but not both." }} + {{- else if .Values.provisionerDaemon.tags }} + {{ fail "provisionerDaemon.tags may not be specified with provisionerDaemon.keySecretName." }} + {{- end }} - name: CODER_PROVISIONER_DAEMON_KEY valueFrom: secretKeyRef: name: {{ .Values.provisionerDaemon.keySecretName | quote }} key: {{ .Values.provisionerDaemon.keySecretKey | quote }} +{{- else }} +- name: CODER_PROVISIONER_DAEMON_PSK + valueFrom: + secretKeyRef: + name: {{ .Values.provisionerDaemon.pskSecretName | quote }} + key: psk {{- end }} {{- if include "provisioner.tags" . }} - name: CODER_PROVISIONERD_TAGS diff --git a/helm/provisioner/tests/chart_test.go b/helm/provisioner/tests/chart_test.go index ab6d8445e8f61..4bb54e2d787ed 100644 --- a/helm/provisioner/tests/chart_test.go +++ b/helm/provisioner/tests/chart_test.go @@ -56,6 +56,12 @@ var testCases = []testCase{ name: "provisionerd_key", expectedError: "", }, + // Test explicitly for the workaround where setting provisionerDaemon.pskSecretName="" + // was required to use provisioner keys. + { + name: "provisionerd_key_psk_empty_workaround", + expectedError: "", + }, { name: "provisionerd_psk_and_key", expectedError: `Either provisionerDaemon.pskSecretName or provisionerDaemon.keySecretName must be specified, but not both.`, @@ -64,6 +70,10 @@ var testCases = []testCase{ name: "provisionerd_no_psk_or_key", expectedError: `Either provisionerDaemon.pskSecretName or provisionerDaemon.keySecretName must be specified.`, }, + { + name: "provisionerd_key_tags", + expectedError: `provisionerDaemon.tags may not be specified with provisionerDaemon.keySecretName.`, + }, { name: "extra_templates", expectedError: "", diff --git a/helm/provisioner/tests/testdata/provisionerd_key.golden b/helm/provisioner/tests/testdata/provisionerd_key.golden index c4f33f766df43..c4c23ec6da2a3 100644 --- a/helm/provisioner/tests/testdata/provisionerd_key.golden +++ b/helm/provisioner/tests/testdata/provisionerd_key.golden @@ -112,8 +112,6 @@ spec: secretKeyRef: key: provisionerd-key name: coder-provisionerd-key - - name: CODER_PROVISIONERD_TAGS - value: clusterType=k8s,location=auh - name: CODER_URL value: http://coder.default.svc.cluster.local image: ghcr.io/coder/coder:latest diff --git a/helm/provisioner/tests/testdata/provisionerd_key.yaml b/helm/provisioner/tests/testdata/provisionerd_key.yaml index c5ab331a45078..82f786637ee19 100644 --- a/helm/provisioner/tests/testdata/provisionerd_key.yaml +++ b/helm/provisioner/tests/testdata/provisionerd_key.yaml @@ -2,9 +2,5 @@ coder: image: tag: latest provisionerDaemon: - pskSecretName: "" keySecretName: "coder-provisionerd-key" keySecretKey: "provisionerd-key" - tags: - location: auh - clusterType: k8s diff --git a/helm/provisioner/tests/testdata/provisionerd_key_psk_empty_workaround.golden b/helm/provisioner/tests/testdata/provisionerd_key_psk_empty_workaround.golden new file mode 100644 index 0000000000000..c4c23ec6da2a3 --- /dev/null +++ b/helm/provisioner/tests/testdata/provisionerd_key_psk_empty_workaround.golden @@ -0,0 +1,135 @@ +--- +# Source: coder-provisioner/templates/coder.yaml +apiVersion: v1 +kind: ServiceAccount +metadata: + annotations: {} + labels: + app.kubernetes.io/instance: release-name + app.kubernetes.io/managed-by: Helm + app.kubernetes.io/name: coder-provisioner + app.kubernetes.io/part-of: coder-provisioner + app.kubernetes.io/version: 0.1.0 + helm.sh/chart: coder-provisioner-0.1.0 + name: coder-provisioner +--- +# Source: coder-provisioner/templates/rbac.yaml +apiVersion: rbac.authorization.k8s.io/v1 +kind: Role +metadata: + name: coder-provisioner-workspace-perms +rules: + - apiGroups: [""] + resources: ["pods"] + verbs: + - create + - delete + - deletecollection + - get + - list + - patch + - update + - watch + - apiGroups: [""] + resources: ["persistentvolumeclaims"] + verbs: + - create + - delete + - deletecollection + - get + - list + - patch + - update + - watch + - apiGroups: + - apps + resources: + - deployments + verbs: + - create + - delete + - deletecollection + - get + - list + - patch + - update + - watch +--- +# Source: coder-provisioner/templates/rbac.yaml +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + name: "coder-provisioner" +subjects: + - kind: ServiceAccount + name: "coder-provisioner" +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: coder-provisioner-workspace-perms +--- +# Source: coder-provisioner/templates/coder.yaml +apiVersion: apps/v1 +kind: Deployment +metadata: + annotations: {} + labels: + app.kubernetes.io/instance: release-name + app.kubernetes.io/managed-by: Helm + app.kubernetes.io/name: coder-provisioner + app.kubernetes.io/part-of: coder-provisioner + app.kubernetes.io/version: 0.1.0 + helm.sh/chart: coder-provisioner-0.1.0 + name: coder-provisioner +spec: + replicas: 1 + selector: + matchLabels: + app.kubernetes.io/instance: release-name + app.kubernetes.io/name: coder-provisioner + template: + metadata: + annotations: {} + labels: + app.kubernetes.io/instance: release-name + app.kubernetes.io/managed-by: Helm + app.kubernetes.io/name: coder-provisioner + app.kubernetes.io/part-of: coder-provisioner + app.kubernetes.io/version: 0.1.0 + helm.sh/chart: coder-provisioner-0.1.0 + spec: + containers: + - args: + - provisionerd + - start + command: + - /opt/coder + env: + - name: CODER_PROMETHEUS_ADDRESS + value: 0.0.0.0:2112 + - name: CODER_PROVISIONER_DAEMON_KEY + valueFrom: + secretKeyRef: + key: provisionerd-key + name: coder-provisionerd-key + - name: CODER_URL + value: http://coder.default.svc.cluster.local + image: ghcr.io/coder/coder:latest + imagePullPolicy: IfNotPresent + lifecycle: {} + name: coder + ports: null + resources: {} + securityContext: + allowPrivilegeEscalation: false + readOnlyRootFilesystem: null + runAsGroup: 1000 + runAsNonRoot: true + runAsUser: 1000 + seccompProfile: + type: RuntimeDefault + volumeMounts: [] + restartPolicy: Always + serviceAccountName: coder-provisioner + terminationGracePeriodSeconds: 600 + volumes: [] diff --git a/helm/provisioner/tests/testdata/provisionerd_key_psk_empty_workaround.yaml b/helm/provisioner/tests/testdata/provisionerd_key_psk_empty_workaround.yaml new file mode 100644 index 0000000000000..cfa46974c3e9a --- /dev/null +++ b/helm/provisioner/tests/testdata/provisionerd_key_psk_empty_workaround.yaml @@ -0,0 +1,7 @@ +coder: + image: + tag: latest +provisionerDaemon: + pskSecretName: "" + keySecretName: "coder-provisionerd-key" + keySecretKey: "provisionerd-key" diff --git a/helm/provisioner/tests/testdata/provisionerd_key_tags.yaml b/helm/provisioner/tests/testdata/provisionerd_key_tags.yaml new file mode 100644 index 0000000000000..7cb35f0052918 --- /dev/null +++ b/helm/provisioner/tests/testdata/provisionerd_key_tags.yaml @@ -0,0 +1,9 @@ +coder: + image: + tag: latest +provisionerDaemon: + keySecretName: "coder-provisionerd-key" + keySecretKey: "provisionerd-key" + tags: + location: auh + clusterType: k8s diff --git a/helm/provisioner/tests/testdata/provisionerd_no_psk_or_key.yaml b/helm/provisioner/tests/testdata/provisionerd_no_psk_or_key.yaml index dbb0eca812de9..4d883a59fcb06 100644 --- a/helm/provisioner/tests/testdata/provisionerd_no_psk_or_key.yaml +++ b/helm/provisioner/tests/testdata/provisionerd_no_psk_or_key.yaml @@ -4,6 +4,3 @@ coder: provisionerDaemon: pskSecretName: "" keySecretName: "" - tags: - location: auh - clusterType: k8s diff --git a/helm/provisioner/tests/testdata/provisionerd_psk.golden b/helm/provisioner/tests/testdata/provisionerd_psk.golden index b641ee0db37cb..c1d9421c3c9dd 100644 --- a/helm/provisioner/tests/testdata/provisionerd_psk.golden +++ b/helm/provisioner/tests/testdata/provisionerd_psk.golden @@ -111,7 +111,7 @@ spec: valueFrom: secretKeyRef: key: psk - name: coder-provisionerd-psk + name: not-the-default-coder-provisioner-psk - name: CODER_PROVISIONERD_TAGS value: clusterType=k8s,location=auh - name: CODER_URL diff --git a/helm/provisioner/tests/testdata/provisionerd_psk.yaml b/helm/provisioner/tests/testdata/provisionerd_psk.yaml index f891b007db539..c53958d4b856b 100644 --- a/helm/provisioner/tests/testdata/provisionerd_psk.yaml +++ b/helm/provisioner/tests/testdata/provisionerd_psk.yaml @@ -2,7 +2,7 @@ coder: image: tag: latest provisionerDaemon: - pskSecretName: "coder-provisionerd-psk" + pskSecretName: "not-the-default-coder-provisioner-psk" tags: location: auh clusterType: k8s diff --git a/helm/provisioner/tests/testdata/provisionerd_psk_and_key.yaml b/helm/provisioner/tests/testdata/provisionerd_psk_and_key.yaml index 530f48807edff..d2da1c370d422 100644 --- a/helm/provisioner/tests/testdata/provisionerd_psk_and_key.yaml +++ b/helm/provisioner/tests/testdata/provisionerd_psk_and_key.yaml @@ -2,7 +2,7 @@ coder: image: tag: latest provisionerDaemon: - pskSecretName: "coder-provisionerd-psk" + pskSecretName: "not-the-default-coder-provisioner-psk" keySecretName: "coder-provisionerd-key" keySecretKey: "provisionerd-key" tags: diff --git a/helm/provisioner/values.yaml b/helm/provisioner/values.yaml index 446a4605db677..869ddc876c78b 100644 --- a/helm/provisioner/values.yaml +++ b/helm/provisioner/values.yaml @@ -204,14 +204,23 @@ provisionerDaemon: # provisionerDaemon.keySecretName -- The name of the Kubernetes # secret that contains a provisioner key to use to authenticate with Coder. # See: https://coder.com/docs/admin/provisioners#authentication + # NOTE: it is not permitted to specify both provisionerDaemon.keySecretName + # and provisionerDaemon.pskSecretName. An exception is made for the purposes + # of backwards-compatibility: if provisionerDaemon.pskSecretName is unchanged + # from the default value and provisionerDaemon.keySecretName is set, then + # provisionerDaemon.keySecretName and provisionerDaemon.keySecretKey will take + # precedence over provisionerDaemon.pskSecretName. keySecretName: "" # provisionerDaemon.keySecretKey -- The key of the Kubernetes # secret specified in provisionerDaemon.keySecretName that contains # the provisioner key. Defaults to "key". keySecretKey: "key" - # provisionerDaemon.tags -- Tags to filter provisioner jobs by. + # provisionerDaemon.tags -- If using a PSK, specify the set of provisioner + # job tags for which this provisioner daemon is responsible. # See: https://coder.com/docs/admin/provisioners#provisioner-tags + # NOTE: it is not permitted to specify both provisionerDaemon.tags and + # provsionerDaemon.keySecretName. tags: {} # location: usa