diff --git a/.github/scripts/check_registry_site_health.sh b/.github/scripts/check_registry_site_health.sh
index d1bdea9..0ad742e 100755
--- a/.github/scripts/check_registry_site_health.sh
+++ b/.github/scripts/check_registry_site_health.sh
@@ -4,23 +4,23 @@ set -u
VERBOSE="${VERBOSE:-0}"
if [[ "${VERBOSE}" -ne "0" ]]; then
- set -x
+ set -x
fi
# List of required environment variables
required_vars=(
- "INSTATUS_API_KEY"
- "INSTATUS_PAGE_ID"
- "INSTATUS_COMPONENT_ID"
- "VERCEL_API_KEY"
+ "INSTATUS_API_KEY"
+ "INSTATUS_PAGE_ID"
+ "INSTATUS_COMPONENT_ID"
+ "VERCEL_API_KEY"
)
# Check if each required variable is set
for var in "${required_vars[@]}"; do
- if [[ -z "${!var:-}" ]]; then
- echo "Error: Environment variable '$var' is not set."
- exit 1
- fi
+ if [[ -z "${!var:-}" ]]; then
+ echo "Error: Environment variable '$var' is not set."
+ exit 1
+ fi
done
REGISTRY_BASE_URL="${REGISTRY_BASE_URL:-https://registry.coder.com}"
@@ -31,38 +31,38 @@ declare -a failures=()
# Collect all module directories containing a main.tf file
for path in $(find . -maxdepth 2 -not -path '*/.*' -type f -name main.tf | cut -d '/' -f 2 | sort -u); do
- modules+=("${path}")
+ modules+=("${path}")
done
echo "Checking modules: ${modules[*]}"
# Function to update the component status on Instatus
update_component_status() {
- local component_status=$1
- # see https://instatus.com/help/api/components
- (curl -X PUT "https://api.instatus.com/v1/$INSTATUS_PAGE_ID/components/$INSTATUS_COMPONENT_ID" \
- -H "Authorization: Bearer $INSTATUS_API_KEY" \
- -H "Content-Type: application/json" \
- -d "{\"status\": \"$component_status\"}")
+ local component_status=$1
+ # see https://instatus.com/help/api/components
+ (curl -X PUT "https://api.instatus.com/v1/$INSTATUS_PAGE_ID/components/$INSTATUS_COMPONENT_ID" \
+ -H "Authorization: Bearer $INSTATUS_API_KEY" \
+ -H "Content-Type: application/json" \
+ -d "{\"status\": \"$component_status\"}")
}
# Function to create an incident
create_incident() {
- local incident_name="Degraded Service"
- local message="The following modules are experiencing issues:\n"
- for i in "${!failures[@]}"; do
- message+="$((i + 1)). ${failures[$i]}\n"
- done
-
- component_status="PARTIALOUTAGE"
- if ((${#failures[@]} == ${#modules[@]})); then
- component_status="MAJOROUTAGE"
- fi
- # see https://instatus.com/help/api/incidents
- incident_id=$(curl -s -X POST "https://api.instatus.com/v1/$INSTATUS_PAGE_ID/incidents" \
- -H "Authorization: Bearer $INSTATUS_API_KEY" \
- -H "Content-Type: application/json" \
- -d "{
+ local incident_name="Degraded Service"
+ local message="The following modules are experiencing issues:\n"
+ for i in "${!failures[@]}"; do
+ message+="$((i + 1)). ${failures[$i]}\n"
+ done
+
+ component_status="PARTIALOUTAGE"
+ if ((${#failures[@]} == ${#modules[@]})); then
+ component_status="MAJOROUTAGE"
+ fi
+ # see https://instatus.com/help/api/incidents
+ incident_id=$(curl -s -X POST "https://api.instatus.com/v1/$INSTATUS_PAGE_ID/incidents" \
+ -H "Authorization: Bearer $INSTATUS_API_KEY" \
+ -H "Content-Type: application/json" \
+ -d "{
\"name\": \"$incident_name\",
\"message\": \"$message\",
\"components\": [\"$INSTATUS_COMPONENT_ID\"],
@@ -76,129 +76,129 @@ create_incident() {
]
}" | jq -r '.id')
- echo "Created incident with ID: $incident_id"
+ echo "Created incident with ID: $incident_id"
}
# Function to check for existing unresolved incidents
check_existing_incident() {
- # Fetch the latest incidents with status not equal to "RESOLVED"
- local unresolved_incidents=$(curl -s -X GET "https://api.instatus.com/v1/$INSTATUS_PAGE_ID/incidents" \
- -H "Authorization: Bearer $INSTATUS_API_KEY" \
- -H "Content-Type: application/json" | jq -r '.incidents[] | select(.status != "RESOLVED") | .id')
-
- if [[ -n "$unresolved_incidents" ]]; then
- echo "Unresolved incidents found: $unresolved_incidents"
- return 0 # Indicate that there are unresolved incidents
- else
- echo "No unresolved incidents found."
- return 1 # Indicate that no unresolved incidents exist
- fi
+ # Fetch the latest incidents with status not equal to "RESOLVED"
+ local unresolved_incidents=$(curl -s -X GET "https://api.instatus.com/v1/$INSTATUS_PAGE_ID/incidents" \
+ -H "Authorization: Bearer $INSTATUS_API_KEY" \
+ -H "Content-Type: application/json" | jq -r '.incidents[] | select(.status != "RESOLVED") | .id')
+
+ if [[ -n "$unresolved_incidents" ]]; then
+ echo "Unresolved incidents found: $unresolved_incidents"
+ return 0 # Indicate that there are unresolved incidents
+ else
+ echo "No unresolved incidents found."
+ return 1 # Indicate that no unresolved incidents exist
+ fi
}
force_redeploy_registry() {
- # These are not secret values; safe to just expose directly in script
- local VERCEL_TEAM_SLUG="codercom"
- local VERCEL_TEAM_ID="team_tGkWfhEGGelkkqUUm9nXq17r"
- local VERCEL_APP="registry"
-
- local latest_res
- latest_res=$(
- curl "https://api.vercel.com/v6/deployments?app=$VERCEL_APP&limit=1&slug=$VERCEL_TEAM_SLUG&teamId=$VERCEL_TEAM_ID&target=production&state=BUILDING,INITIALIZING,QUEUED,READY" \
- --fail \
- --silent \
- --header "Authorization: Bearer $VERCEL_API_KEY" \
- --header "Content-Type: application/json"
- )
-
- # If we have zero deployments, something is VERY wrong. Make the whole
- # script exit with a non-zero status code
- local latest_id
- latest_id=$(echo "${latest_res}" | jq -r '.deployments[0].uid')
- if [[ "${latest_id}" = "null" ]]; then
- echo "Unable to pull any previous deployments for redeployment"
- echo "Please redeploy the latest deployment manually in Vercel."
- echo "https://vercel.com/codercom/registry/deployments"
- exit 1
- fi
-
- local latest_date_ts_seconds
- latest_date_ts_seconds=$(echo "${latest_res}" | jq -r '.deployments[0].createdAt/1000|floor')
- local current_date_ts_seconds
- current_date_ts_seconds="$(date +%s)"
- local max_redeploy_interval_seconds=7200 # 2 hours
- if ((current_date_ts_seconds - latest_date_ts_seconds < max_redeploy_interval_seconds)); then
- echo "The registry was deployed less than 2 hours ago."
- echo "Not automatically re-deploying the regitstry."
- echo "A human reading this message should decide if a redeployment is necessary."
- echo "Please check the Vercel dashboard for more information."
- echo "https://vercel.com/codercom/registry/deployments"
- exit 1
- fi
-
- local latest_deployment_state
- latest_deployment_state="$(echo "${latest_res}" | jq -r '.deployments[0].state')"
- if [[ "${latest_deployment_state}" != "READY" ]]; then
- echo "Last deployment was not in READY state. Skipping redeployment."
- echo "A human reading this message should decide if a redeployment is necessary."
- echo "Please check the Vercel dashboard for more information."
- echo "https://vercel.com/codercom/registry/deployments"
- exit 1
- fi
-
- echo "============================================================="
- echo "!!! Redeploying registry with deployment ID: ${latest_id} !!!"
- echo "============================================================="
-
- if ! curl -X POST "https://api.vercel.com/v13/deployments?forceNew=1&skipAutoDetectionConfirmation=1&slug=$VERCEL_TEAM_SLUG&teamId=$VERCEL_TEAM_ID" \
- --fail \
- --header "Authorization: Bearer $VERCEL_API_KEY" \
- --header "Content-Type: application/json" \
- --data-raw "{ \"deploymentId\": \"${latest_id}\", \"name\": \"${VERCEL_APP}\", \"target\": \"production\" }"; then
- echo "DEPLOYMENT FAILED! Please check the Vercel dashboard for more information."
- echo "https://vercel.com/codercom/registry/deployments"
- exit 1
- fi
+ # These are not secret values; safe to just expose directly in script
+ local VERCEL_TEAM_SLUG="codercom"
+ local VERCEL_TEAM_ID="team_tGkWfhEGGelkkqUUm9nXq17r"
+ local VERCEL_APP="registry"
+
+ local latest_res
+ latest_res=$(
+ curl "https://api.vercel.com/v6/deployments?app=$VERCEL_APP&limit=1&slug=$VERCEL_TEAM_SLUG&teamId=$VERCEL_TEAM_ID&target=production&state=BUILDING,INITIALIZING,QUEUED,READY" \
+ --fail \
+ --silent \
+ --header "Authorization: Bearer $VERCEL_API_KEY" \
+ --header "Content-Type: application/json"
+ )
+
+ # If we have zero deployments, something is VERY wrong. Make the whole
+ # script exit with a non-zero status code
+ local latest_id
+ latest_id=$(echo "${latest_res}" | jq -r '.deployments[0].uid')
+ if [[ "${latest_id}" = "null" ]]; then
+ echo "Unable to pull any previous deployments for redeployment"
+ echo "Please redeploy the latest deployment manually in Vercel."
+ echo "https://vercel.com/codercom/registry/deployments"
+ exit 1
+ fi
+
+ local latest_date_ts_seconds
+ latest_date_ts_seconds=$(echo "${latest_res}" | jq -r '.deployments[0].createdAt/1000|floor')
+ local current_date_ts_seconds
+ current_date_ts_seconds="$(date +%s)"
+ local max_redeploy_interval_seconds=7200 # 2 hours
+ if ((current_date_ts_seconds - latest_date_ts_seconds < max_redeploy_interval_seconds)); then
+ echo "The registry was deployed less than 2 hours ago."
+ echo "Not automatically re-deploying the regitstry."
+ echo "A human reading this message should decide if a redeployment is necessary."
+ echo "Please check the Vercel dashboard for more information."
+ echo "https://vercel.com/codercom/registry/deployments"
+ exit 1
+ fi
+
+ local latest_deployment_state
+ latest_deployment_state="$(echo "${latest_res}" | jq -r '.deployments[0].state')"
+ if [[ "${latest_deployment_state}" != "READY" ]]; then
+ echo "Last deployment was not in READY state. Skipping redeployment."
+ echo "A human reading this message should decide if a redeployment is necessary."
+ echo "Please check the Vercel dashboard for more information."
+ echo "https://vercel.com/codercom/registry/deployments"
+ exit 1
+ fi
+
+ echo "============================================================="
+ echo "!!! Redeploying registry with deployment ID: ${latest_id} !!!"
+ echo "============================================================="
+
+ if ! curl -X POST "https://api.vercel.com/v13/deployments?forceNew=1&skipAutoDetectionConfirmation=1&slug=$VERCEL_TEAM_SLUG&teamId=$VERCEL_TEAM_ID" \
+ --fail \
+ --header "Authorization: Bearer $VERCEL_API_KEY" \
+ --header "Content-Type: application/json" \
+ --data-raw "{ \"deploymentId\": \"${latest_id}\", \"name\": \"${VERCEL_APP}\", \"target\": \"production\" }"; then
+ echo "DEPLOYMENT FAILED! Please check the Vercel dashboard for more information."
+ echo "https://vercel.com/codercom/registry/deployments"
+ exit 1
+ fi
}
# Check each module's accessibility
for module in "${modules[@]}"; do
- # Trim leading/trailing whitespace from module name
- module=$(echo "${module}" | xargs)
- url="${REGISTRY_BASE_URL}/modules/${module}"
- printf "=== Checking module %s at %s\n" "${module}" "${url}"
- status_code=$(curl --output /dev/null --head --silent --fail --location "${url}" --retry 3 --write-out "%{http_code}")
- if ((status_code != 200)); then
- printf "==> FAIL(%s)\n" "${status_code}"
- status=1
- failures+=("${module}")
- else
- printf "==> OK(%s)\n" "${status_code}"
- fi
+ # Trim leading/trailing whitespace from module name
+ module=$(echo "${module}" | xargs)
+ url="${REGISTRY_BASE_URL}/modules/${module}"
+ printf "=== Checking module %s at %s\n" "${module}" "${url}"
+ status_code=$(curl --output /dev/null --head --silent --fail --location "${url}" --retry 3 --write-out "%{http_code}")
+ if ((status_code != 200)); then
+ printf "==> FAIL(%s)\n" "${status_code}"
+ status=1
+ failures+=("${module}")
+ else
+ printf "==> OK(%s)\n" "${status_code}"
+ fi
done
# Determine overall status and update Instatus component
if ((status == 0)); then
- echo "All modules are operational."
- # set to
- update_component_status "OPERATIONAL"
+ echo "All modules are operational."
+ # set to
+ update_component_status "OPERATIONAL"
else
- echo "The following modules have issues: ${failures[*]}"
- # check if all modules are down
- if ((${#failures[@]} == ${#modules[@]})); then
- update_component_status "MAJOROUTAGE"
- else
- update_component_status "PARTIALOUTAGE"
- fi
-
- # Check if there is an existing incident before creating a new one
- if ! check_existing_incident; then
- create_incident
- fi
-
- # If a module is down, force a reployment to try getting things back online
- # ASAP
- # EDIT: registry.coder.com is no longer hosted on vercel
- #force_redeploy_registry
+ echo "The following modules have issues: ${failures[*]}"
+ # check if all modules are down
+ if ((${#failures[@]} == ${#modules[@]})); then
+ update_component_status "MAJOROUTAGE"
+ else
+ update_component_status "PARTIALOUTAGE"
+ fi
+
+ # Check if there is an existing incident before creating a new one
+ if ! check_existing_incident; then
+ create_incident
+ fi
+
+ # If a module is down, force a reployment to try getting things back online
+ # ASAP
+ # EDIT: registry.coder.com is no longer hosted on vercel
+ #force_redeploy_registry
fi
exit "${status}"
diff --git a/.github/typos.toml b/.github/typos.toml
new file mode 100644
index 0000000..5889c7d
--- /dev/null
+++ b/.github/typos.toml
@@ -0,0 +1,4 @@
+[default.extend-words]
+muc = "muc" # For Munich location code
+Hashi = "Hashi"
+HashiCorp = "HashiCorp"
\ No newline at end of file
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 159e8c9..bbbdc06 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -7,20 +7,8 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: ${{ github.ref != 'refs/heads/main' }}
jobs:
- validate-readme-files:
- runs-on: ubuntu-latest
- steps:
- - name: Check out code
- uses: actions/checkout@v4
- - name: Set up Go
- uses: actions/setup-go@v5
- with:
- go-version: "1.23.2"
- - name: Validate contributors
- run: go build ./scripts/contributors && ./contributors
- - name: Remove build file artifact
- run: rm ./contributors
test-terraform:
+ name: Validate Terraform output
runs-on: ubuntu-latest
steps:
- name: Check out code
@@ -38,5 +26,45 @@ jobs:
bun-version: latest
- name: Install dependencies
run: bun install
- - name: Run tests
+ - name: Run TypeScript tests
run: bun test
+ - name: Run Terraform Validate
+ run: bun terraform-validate
+ validate-style:
+ name: Check for typos and unformatted code
+ runs-on: ubuntu-latest
+ steps:
+ - name: Check out code
+ uses: actions/checkout@v4
+ - name: Install Bun
+ uses: oven-sh/setup-bun@v2
+ with:
+ bun-version: latest
+ # Need Terraform for its formatter
+ - name: Install Terraform
+ uses: coder/coder/.github/actions/setup-tf@main
+ - name: Install dependencies
+ run: bun install
+ - name: Validate formatting
+ run: bun fmt:ci
+ - name: Check for typos
+ uses: crate-ci/typos@v1.32.0
+ with:
+ config: .github/typos.toml
+ validate-readme-files:
+ name: Validate README files
+ runs-on: ubuntu-latest
+ # We want to do some basic README checks first before we try analyzing the
+ # contents
+ needs: validate-style
+ steps:
+ - name: Check out code
+ uses: actions/checkout@v4
+ - name: Set up Go
+ uses: actions/setup-go@v5
+ with:
+ go-version: "1.23.2"
+ - name: Validate contributors
+ run: go build ./cmd/readmevalidation && ./readmevalidation
+ - name: Remove build file artifact
+ run: rm ./readmevalidation
diff --git a/.gitignore b/.gitignore
index 6ee570e..157c642 100644
--- a/.gitignore
+++ b/.gitignore
@@ -135,8 +135,8 @@ dist
.yarn/install-state.gz
.pnp.*
-# Script output
-/contributors
+# Things needed for CI
+/readmevalidation
# Terraform files generated during testing
.terraform*
diff --git a/.icons/devcontainers.svg b/.icons/devcontainers.svg
new file mode 100644
index 0000000..fb0443b
--- /dev/null
+++ b/.icons/devcontainers.svg
@@ -0,0 +1,2 @@
+
+
\ No newline at end of file
diff --git a/.icons/windsurf.svg b/.icons/windsurf.svg
new file mode 100644
index 0000000..2e4e4e4
--- /dev/null
+++ b/.icons/windsurf.svg
@@ -0,0 +1,3 @@
+
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000..81bd2b7
--- /dev/null
+++ b/CODE_OF_CONDUCT.md
@@ -0,0 +1,3 @@
+# Code of Conduct
+
+[Please see our code of conduct on the official Coder website](https://coder.com/docs/contributing/CODE_OF_CONDUCT)
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000..3b624b7
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,285 @@
+# Contributing
+
+## Getting started
+
+This repo uses two main runtimes to verify the correctness of a module/template before it is published:
+
+- [Bun](https://bun.sh/) – Used to run tests for each module/template to validate overall functionality and correctness of Terraform output
+- [Go](https://go.dev/) – Used to validate all README files in the directory. The README content is used to populate [the Registry website](https://registry.coder.com).
+
+### Installing Bun
+
+To install Bun, you can run this command on Linux/MacOS:
+
+```shell
+curl -fsSL https://bun.sh/install | bash
+```
+
+Or this command on Windows:
+
+```shell
+powershell -c "irm bun.sh/install.ps1 | iex"
+```
+
+Follow the instructions to ensure that Bun is available globally. Once Bun is installed, install all necessary dependencies from the root of the repo:
+
+Via NPM:
+
+```shell
+npm i
+```
+
+Via PNPM:
+
+```shell
+pnpm i
+```
+
+This repo does not support Yarn.
+
+### Installing Go (optional)
+
+This step can be skipped if you are not working on any of the README validation logic. The validation will still run as part of CI.
+
+[Navigate to the official Go Installation page](https://go.dev/doc/install), and install the correct version for your operating system.
+
+Once Go has been installed, verify the installation via:
+
+```shell
+go version
+```
+
+## Namespaces
+
+All Coder resources are scoped to namespaces placed at the top level of the `/registry` directory. Any modules or templates must be placed inside a namespace to be accepted as a contribution. For example, all modules created by CoderEmployeeBob would be placed under `/registry/coderemployeebob/modules`, with a subdirectory for each individual module the user has published.
+
+If a namespace is already taken, you will need to create a different, unique namespace, but will still be able to choose any display name. (The display name is shown in the Registry website. More info below.)
+
+### Namespace (contributor profile) README files
+
+More information about contributor profile README files can be found below.
+
+### Images
+
+Any images needed for either the main namespace directory or a module/template can be placed in a relative `/images` directory at the top of the namespace directory. (e.g., CoderEmployeeBob can have a `/registry/coderemployeebob/images` directory, that can be referenced by the main README file, as well as a README file in `/registry/coderemployeebob/modules/custom_module/README.md`.) This is to minimize the risk of file name conflicts between different users as they add images to help illustrate parts of their README files.
+
+## Coder modules
+
+### Adding a new module
+
+> [!WARNING]
+> These instructions cannot be followed just yet; the script referenced will be made available shortly. Contributors looking to add modules early will need to create all directories manually.
+
+Once Bun (and possibly Go) have been installed, clone the Coder Registry repository. From there, you can run this script to make it easier to start contributing a new module or template:
+
+```shell
+./new.sh USER_NAMESPACE/NAME_OF_NEW_MODULE
+```
+
+You can also create a module file manually by creating the necessary files and directories.
+
+### The composition of a Coder module
+
+Each Coder Module must contain the following files:
+
+- A `main.tf` file that defines the main Terraform-based functionality
+- A `main.test.ts` file that is used to validate that the module works as expected
+- A `README.md` file containing required information (listed below)
+
+You are free to include any additional files in the module, as needed by the module. For example, the [Windows RDP module](https://github.com/coder/registry/tree/main/registry/coder/modules/windows-rdp) contains additional files for injecting specific functionality into a Coder Workspace.
+
+> [!NOTE]
+> Some legacy modules do not have test files defined just yet. This will be addressed soon.
+
+### The `main.tf` file
+
+This file defines all core Terraform functionality, to be mixed into your Coder workspaces. More information about [Coder's use of Terraform can be found here](https://coder.com/docs/admin/templates/extending-templates/modules), and [general information about the Terraform language can be found in the official documentation](https://developer.hashicorp.com/terraform/docs).
+
+### The structure of a module README
+
+Validation criteria for module README files is listed below.
+
+### Testing a Module
+
+> [!IMPORTANT]
+> It is the responsibility of the module author to implement tests for every new module they wish to contribute. It is expected the author has tested the module locally before opening a PR. Feel free to reference existing test files to get an idea for how to set them up.
+
+All general-purpose test helpers for validating Terraform can be found in the top-level `/testing` directory. The helpers run `terraform apply` on modules that use variables, testing the script output against containers.
+
+When writing a test file, you can import the test utilities via the `~test` import alias:
+
+```ts
+// This works regardless of how deeply-nested your test file is in the file
+// structure
+import {
+ runTerraformApply,
+ runTerraformInit,
+ testRequiredVariables,
+} from "~test";
+```
+
+> [!NOTE]
+> The testing suite must be able to run docker containers with the `--network=host` flag. This typically requires running the tests on Linux as this flag does not apply to Docker Desktop for MacOS or Windows. MacOS users can work around this by using something like [colima](https://github.com/abiosoft/colima) or [Orbstack](https://orbstack.dev/) instead of Docker Desktop.
+
+#### Running tests
+
+You can run all tests by running this command from the root of the Registry directory:
+
+```shell
+bun test
+```
+
+Note that running _all_ tests can take some time, so you likely don't want to be running this command as part of your core development loop.
+
+To run specific tests, you can use the `-t` flag, which accepts a filepath regex:
+
+```shell
+bun test -t ''
+```
+
+To ensure that the module runs predictably in local development, you can update the Terraform source as follows:
+
+```tf
+module "example" {
+ # You may need to remove the 'version' field, it is incompatible with some sources.
+ source = "git::https://github.com//.git//?ref="
+}
+```
+
+## Updating README files
+
+This repo uses Go to validate each README file. If you are working with the README files at all (i.e., creating them, modifying them), it is strongly recommended that you install Go (installation instructions mentioned above), so that the files can be validated locally.
+
+### Validating all README files
+
+To validate all README files throughout the entire repo, you can run the following:
+
+```shell
+go build ./cmd/readmevalidation && ./readmevalidation
+```
+
+The resulting binary is already part of the `.gitignore` file, but you can remove it with:
+
+```shell
+rm ./readmevalidation
+```
+
+### README validation criteria
+
+The following criteria exists for two reasons:
+
+1. Content accessibility
+2. Having content be designed in a way that's easy for the Registry site build step to use
+
+#### General README requirements
+
+- There must be a frontmatter section.
+- There must be exactly one h1 header, and it must be at the very top, directly below the frontmatter.
+- The README body (if it exists) must start with an h1 header. No other content (including GitHub-Flavored Markdown alerts) is allowed to be placed above it.
+- When increasing the level of a header, the header's level must be incremented by one each time.
+- Any `.hcl` code snippets must be labeled as `.tf` snippets instead
+
+ ```txt
+ \`\`\`tf
+ Content
+ \`\`\`
+ ```
+
+#### Namespace (contributor profile) criteria
+
+In addition to the general criteria, all README files must have the following:
+
+- Frontmatter metadata with support for the following fields:
+
+ - `display_name` (required string) – The name to use when displaying your user profile in the Coder Registry site.
+ - `bio` (optional string) – A short description of who you are.
+ - `github` (optional string) – Your GitHub handle.
+ - `avatar_url` (optional string) – A relative/absolute URL pointing to your avatar for the Registry site. It is strongly recommended that you commit avatar images to this repo and reference them via a relative URL.
+ - `linkedin` (optional string) – A URL pointing to your LinkedIn page.
+ - `support_email` (optional string) – An email for users to reach you at if they need help with a published module/template.
+ - `status` (string union) – If defined, this must be one of `"community"`, `"partner"`, or `"official"`. `"community"` should be used for the majority of external contributions. `"partner"` is for companies who have a formal business partnership with Coder. `"official"` should be used only by Coder employees.
+
+- The README body (the content that goes directly below the frontmatter) is allowed to be empty, but if it isn't, it must follow all the rules above.
+
+You are free to customize the body of a contributor profile however you like, adding any number of images or information. Its content will never be rendered in the Registry website.
+
+Additional information can be placed in the README file below the content listed above, using any number of headers.
+
+Additional image/video assets can be placed in the same user namespace directory where that user's main content lives.
+
+#### Module criteria
+
+In addition to the general criteria, all README files must have the following:
+
+- Frontmatter that describes metadata for the module:
+ - `display_name` (required string) – This is the name displayed on the Coder Registry website
+ - `description` (required string) – A short description of the module, which is displayed on the Registry website
+ - `icon` (required string) – A relative/absolute URL pointing to the icon to display for the module in the Coder Registry website.
+ - `verified` (optional boolean) – Indicates whether the module has been officially verified by Coder. Please do not set this without approval from a Coder employee.
+ - `tags` (required string array) – A list of metadata tags to describe the module. Used in the Registry site for search and navigation functionality.
+ - `maintainer_github` (deprecated string) – The name of the creator of the module. This field exists for backwards compatibility with previous versions of the Registry, but going forward, the value will be inferred from the namespace directory.
+ - `partner_github` (deprecated string) - The name of any additional creators for a module. This field exists for backwards compatibility with previous versions of the Registry, but should not ever be used going forward.
+- The following content directly under the h1 header (without another header between them):
+
+ - A description of what the module does
+ - A Terraform snippet for letting other users import the functionality
+
+ ```tf
+ module "cursor" {
+ count = data.coder_workspace.me.start_count
+ source = "registry.coder.com/modules/cursor/coder"
+ version = "1.0.19"
+ agent_id = coder_agent.example.id
+ }
+ ```
+
+Additional information can be placed in the README file below the content listed above, using any number of headers.
+
+Additional image/video assets can be placed in one of two places:
+
+1. In the same user namespace directory where that user's main content lives
+2. If the image is an icon, it can be placed in the top-level `.icons` directory (this is done because a lot of modules will be based off the same products)
+
+## Releases
+
+The release process is automated with these steps:
+
+### 1. Create and merge a new PR
+
+- Create a PR with your module changes
+- Get your PR reviewed, approved, and merged into the `main` branch
+
+### 2. Prepare Release (Maintainer Task)
+
+After merging to `main`, a maintainer will:
+
+- View all modules and their current versions:
+
+ ```shell
+ ./release.sh --list
+ ```
+
+- Determine the next version number based on changes:
+
+ - **Patch version** (1.2.3 → 1.2.4): Bug fixes
+ - **Minor version** (1.2.3 → 1.3.0): New features, adding inputs, deprecating inputs
+ - **Major version** (1.2.3 → 2.0.0): Breaking changes (removing inputs, changing input types)
+
+- Create and push an annotated tag:
+
+ ```shell
+ # Fetch latest changes
+ git fetch origin
+
+ # Create and push tag
+ ./release.sh module-name 1.2.3 --push
+ ```
+
+ The tag format will be: `release/module-name/v1.2.3`
+
+### 3. Publishing to Coder Registry
+
+Our automated processes will handle publishing new data to [registry.coder.com](https://registry.coder.com).
+
+> [!NOTE]
+> Some data in registry.coder.com is fetched on demand from the [coder/modules](https://github.com/coder/modules) repo's `main` branch. This data should update almost immediately after a release, while other changes will take some time to propagate.
diff --git a/README.md b/README.md
index 58621ba..1e44ad9 100644
--- a/README.md
+++ b/README.md
@@ -1,3 +1,50 @@
-# hub
+# Coder Registry
-Publish Coder modules and templates for other developers to use.
+[Registry Site](https://registry.coder.com) • [Coder OSS](https://github.com/coder/coder) • [Coder Docs](https://www.coder.com/docs) • [Official Discord](https://discord.gg/coder)
+
+[](https://github.com/coder/registry/actions/workflows/check_registry_site_health.yaml)
+
+Coder Registry is a community-driven platform for extending your Coder workspaces. Publish reusable Terraform as Coder Modules for users all over the world.
+
+> [!NOTE]
+> The Coder Registry repo will be updated to support Coder Templates in the coming weeks. You can currently find all official templates in the official coder/coder repo, [under the `examples/templates` directory](https://github.com/coder/coder/tree/main/examples/templates).
+
+## Overview
+
+Coder is built on HashiCorp's open-source Terraform language to provide developers an easy, declarative way to define the infrastructure for their remote development environments. Coder-flavored versions of Terraform allow you to mix in reusable Terraform snippets to add integrations with other popular development tools, such as JetBrains, Cursor, or Visual Studio Code.
+
+Simply add the correct import snippet, along with any data dependencies, and your workspace can start using the new functionality immediately.
+
+
+
+More information [about Coder Modules can be found here](https://coder.com/docs/admin/templates/extending-templates/modules), while more information [about Coder Templates can be found here](https://coder.com/docs/admin/templates/creating-templates).
+
+## Getting started
+
+The easiest way to discover new modules and templates is by visiting [the official Coder Registry website](https://registry.coder.com/). The website is a full mirror of the Coder Registry repo, and it is where .tar versions of the various resources can be downloaded from, for use within your Coder deployment.
+
+Note that while Coder has a baseline set of requirements for allowing an external PR to be published, Coder cannot vouch for the validity or functionality of a resource until that resource has been flagged with the `verified` status. [All modules under the Coder namespace](https://github.com/coder/registry/tree/main/registry/coder) are automatically verified.
+
+### Getting started with modules
+
+To get started with a module, navigate to that module's page in either the registry site, or the main repo:
+
+- [The Cursor repo directory](https://github.com/coder/registry/tree/main/registry/coder/modules/cursor)
+- [The Cursor module page on the main website](https://registry.coder.com/modules/cursor)
+
+In both cases, the main README contains a Terraform snippet for integrating the module into your workspace. The snippet for Cursor looks like this:
+
+```tf
+module "cursor" {
+ count = data.coder_workspace.me.start_count
+ source = "registry.coder.com/modules/cursor/coder"
+ version = "1.0.19"
+ agent_id = coder_agent.example.id
+}
+```
+
+Simply include that snippet inside your Coder template, defining any data dependencies referenced, and the next time you create a new workspace, the functionality will be ready for you to use.
+
+## Contributing
+
+We are always accepting new contributions. [Please see our contributing guide for more information.](./CONTRIBUTING.md)
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 0000000..313692d
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,3 @@
+# Security
+
+[Please see our security policy on the official Coder website](https://coder.com/security/policy)
diff --git a/cmd/readmevalidation/coderresources.go b/cmd/readmevalidation/coderresources.go
new file mode 100644
index 0000000..98a953c
--- /dev/null
+++ b/cmd/readmevalidation/coderresources.go
@@ -0,0 +1,354 @@
+package main
+
+import (
+ "bufio"
+ "errors"
+ "fmt"
+ "log"
+ "net/url"
+ "os"
+ "path"
+ "regexp"
+ "slices"
+ "strings"
+
+ "gopkg.in/yaml.v3"
+)
+
+var supportedResourceTypes = []string{"modules", "templates"}
+
+type coderResourceFrontmatter struct {
+ Description string `yaml:"description"`
+ IconURL string `yaml:"icon"`
+ DisplayName *string `yaml:"display_name"`
+ Verified *bool `yaml:"verified"`
+ Tags []string `yaml:"tags"`
+}
+
+// coderResourceReadme represents a README describing a Terraform resource used
+// to help create Coder workspaces. As of 2025-04-15, this encapsulates both
+// Coder Modules and Coder Templates
+type coderResourceReadme struct {
+ resourceType string
+ filePath string
+ body string
+ frontmatter coderResourceFrontmatter
+}
+
+func validateCoderResourceDisplayName(displayName *string) error {
+ if displayName != nil && *displayName == "" {
+ return errors.New("if defined, display_name must not be empty string")
+ }
+ return nil
+}
+
+func validateCoderResourceDescription(description string) error {
+ if description == "" {
+ return errors.New("frontmatter description cannot be empty")
+ }
+ return nil
+}
+
+func validateCoderResourceIconURL(iconURL string) []error {
+ problems := []error{}
+
+ if iconURL == "" {
+ problems = append(problems, errors.New("icon URL cannot be empty"))
+ return problems
+ }
+
+ isAbsoluteURL := !strings.HasPrefix(iconURL, ".") && !strings.HasPrefix(iconURL, "/")
+ if isAbsoluteURL {
+ if _, err := url.ParseRequestURI(iconURL); err != nil {
+ problems = append(problems, errors.New("absolute icon URL is not correctly formatted"))
+ }
+ if strings.Contains(iconURL, "?") {
+ problems = append(problems, errors.New("icon URLs cannot contain query parameters"))
+ }
+ return problems
+ }
+
+ // Would normally be skittish about having relative paths like this, but it
+ // should be safe because we have guarantees about the structure of the
+ // repo, and where this logic will run
+ isPermittedRelativeURL := strings.HasPrefix(iconURL, "./") ||
+ strings.HasPrefix(iconURL, "/") ||
+ strings.HasPrefix(iconURL, "../../../../.icons")
+ if !isPermittedRelativeURL {
+ problems = append(problems, fmt.Errorf("relative icon URL %q must either be scoped to that module's directory, or the top-level /.icons directory (this can usually be done by starting the path with \"../../../.icons\")", iconURL))
+ }
+
+ return problems
+}
+
+func validateCoderResourceTags(tags []string) error {
+ if tags == nil {
+ return errors.New("provided tags array is nil")
+ }
+ if len(tags) == 0 {
+ return nil
+ }
+
+ // All of these tags are used for the module/template filter controls in the
+ // Registry site. Need to make sure they can all be placed in the browser
+ // URL without issue
+ invalidTags := []string{}
+ for _, t := range tags {
+ if t != url.QueryEscape(t) {
+ invalidTags = append(invalidTags, t)
+ }
+ }
+
+ if len(invalidTags) != 0 {
+ return fmt.Errorf("found invalid tags (tags that cannot be used for filter state in the Registry website): [%s]", strings.Join(invalidTags, ", "))
+ }
+ return nil
+}
+
+// Todo: This is a holdover from the validation logic used by the Coder Modules
+// repo. It gives us some assurance, but realistically, we probably want to
+// parse any Terraform code snippets, and make some deeper guarantees about how
+// it's structured. Just validating whether it *can* be parsed as Terraform
+// would be a big improvement.
+var terraformVersionRe = regexp.MustCompile("^\\s*\\bversion\\s+=")
+
+func validateCoderResourceReadmeBody(body string) []error {
+ trimmed := strings.TrimSpace(body)
+ var errs []error
+ errs = append(errs, validateReadmeBody(trimmed)...)
+
+ foundParagraph := false
+ terraformCodeBlockCount := 0
+ foundTerraformVersionRef := false
+
+ lineNum := 0
+ isInsideCodeBlock := false
+ isInsideTerraform := false
+
+ lineScanner := bufio.NewScanner(strings.NewReader(trimmed))
+ for lineScanner.Scan() {
+ lineNum++
+ nextLine := lineScanner.Text()
+
+ // Code assumes that invalid headers would've already been handled by
+ // the base validation function, so we don't need to check deeper if the
+ // first line isn't an h1
+ if lineNum == 1 {
+ if !strings.HasPrefix(nextLine, "# ") {
+ break
+ }
+ continue
+ }
+
+ if strings.HasPrefix(nextLine, "```") {
+ isInsideCodeBlock = !isInsideCodeBlock
+ isInsideTerraform = isInsideCodeBlock && strings.HasPrefix(nextLine, "```tf")
+ if isInsideTerraform {
+ terraformCodeBlockCount++
+ }
+ if strings.HasPrefix(nextLine, "```hcl") {
+ errs = append(errs, errors.New("all .hcl language references must be converted to .tf"))
+ }
+ continue
+ }
+
+ if isInsideCodeBlock {
+ if isInsideTerraform {
+ foundTerraformVersionRef = foundTerraformVersionRef || terraformVersionRe.MatchString(nextLine)
+ }
+ continue
+ }
+
+ // Code assumes that we can treat this case as the end of the "h1
+ // section" and don't need to process any further lines
+ if lineNum > 1 && strings.HasPrefix(nextLine, "#") {
+ break
+ }
+
+ // Code assumes that if we've reached this point, the only other options
+ // are: (1) empty spaces, (2) paragraphs, (3) HTML, and (4) asset
+ // references made via [] syntax
+ trimmedLine := strings.TrimSpace(nextLine)
+ isParagraph := trimmedLine != "" && !strings.HasPrefix(trimmedLine, "![") && !strings.HasPrefix(trimmedLine, "<")
+ foundParagraph = foundParagraph || isParagraph
+ }
+
+ if terraformCodeBlockCount == 0 {
+ errs = append(errs, errors.New("did not find Terraform code block within h1 section"))
+ } else {
+ if terraformCodeBlockCount > 1 {
+ errs = append(errs, errors.New("cannot have more than one Terraform code block in h1 section"))
+ }
+ if !foundTerraformVersionRef {
+ errs = append(errs, errors.New("did not find Terraform code block that specifies 'version' field"))
+ }
+ }
+ if !foundParagraph {
+ errs = append(errs, errors.New("did not find paragraph within h1 section"))
+ }
+ if isInsideCodeBlock {
+ errs = append(errs, errors.New("code blocks inside h1 section do not all terminate before end of file"))
+ }
+
+ return errs
+}
+
+func validateCoderResourceReadme(rm coderResourceReadme) []error {
+ var errs []error
+
+ for _, err := range validateCoderResourceReadmeBody(rm.body) {
+ errs = append(errs, addFilePathToError(rm.filePath, err))
+ }
+
+ if err := validateCoderResourceDisplayName(rm.frontmatter.DisplayName); err != nil {
+ errs = append(errs, addFilePathToError(rm.filePath, err))
+ }
+ if err := validateCoderResourceDescription(rm.frontmatter.Description); err != nil {
+ errs = append(errs, addFilePathToError(rm.filePath, err))
+ }
+ if err := validateCoderResourceTags(rm.frontmatter.Tags); err != nil {
+ errs = append(errs, addFilePathToError(rm.filePath, err))
+ }
+
+ for _, err := range validateCoderResourceIconURL(rm.frontmatter.IconURL) {
+ errs = append(errs, addFilePathToError(rm.filePath, err))
+ }
+
+ return errs
+}
+
+func parseCoderResourceReadme(resourceType string, rm readme) (coderResourceReadme, error) {
+ fm, body, err := separateFrontmatter(rm.rawText)
+ if err != nil {
+ return coderResourceReadme{}, fmt.Errorf("%q: failed to parse frontmatter: %v", rm.filePath, err)
+ }
+
+ yml := coderResourceFrontmatter{}
+ if err := yaml.Unmarshal([]byte(fm), &yml); err != nil {
+ return coderResourceReadme{}, fmt.Errorf("%q: failed to parse: %v", rm.filePath, err)
+ }
+
+ return coderResourceReadme{
+ resourceType: resourceType,
+ filePath: rm.filePath,
+ body: body,
+ frontmatter: yml,
+ }, nil
+}
+
+func parseCoderResourceReadmeFiles(resourceType string, rms []readme) (map[string]coderResourceReadme, error) {
+ resources := map[string]coderResourceReadme{}
+ var yamlParsingErrs []error
+ for _, rm := range rms {
+ p, err := parseCoderResourceReadme(resourceType, rm)
+ if err != nil {
+ yamlParsingErrs = append(yamlParsingErrs, err)
+ continue
+ }
+
+ resources[p.filePath] = p
+ }
+ if len(yamlParsingErrs) != 0 {
+ return nil, validationPhaseError{
+ phase: validationPhaseReadmeParsing,
+ errors: yamlParsingErrs,
+ }
+ }
+
+ yamlValidationErrors := []error{}
+ for _, readme := range resources {
+ errors := validateCoderResourceReadme(readme)
+ if len(errors) > 0 {
+ yamlValidationErrors = append(yamlValidationErrors, errors...)
+ }
+ }
+ if len(yamlValidationErrors) != 0 {
+ return nil, validationPhaseError{
+ phase: validationPhaseReadmeParsing,
+ errors: yamlValidationErrors,
+ }
+ }
+
+ return resources, nil
+}
+
+// Todo: Need to beef up this function by grabbing each image/video URL from
+// the body's AST
+func validateCoderResourceRelativeUrls(resources map[string]coderResourceReadme) error {
+ return nil
+}
+
+func aggregateCoderResourceReadmeFiles(resourceType string) ([]readme, error) {
+ registryFiles, err := os.ReadDir(rootRegistryPath)
+ if err != nil {
+ return nil, err
+ }
+
+ var allReadmeFiles []readme
+ var errs []error
+ for _, rf := range registryFiles {
+ if !rf.IsDir() {
+ continue
+ }
+
+ resourceRootPath := path.Join(rootRegistryPath, rf.Name(), resourceType)
+ resourceDirs, err := os.ReadDir(resourceRootPath)
+ if err != nil {
+ if !errors.Is(err, os.ErrNotExist) {
+ errs = append(errs, err)
+ }
+ continue
+ }
+
+ for _, rd := range resourceDirs {
+ if !rd.IsDir() || rd.Name() == ".coder" {
+ continue
+ }
+
+ resourceReadmePath := path.Join(resourceRootPath, rd.Name(), "README.md")
+ rm, err := os.ReadFile(resourceReadmePath)
+ if err != nil {
+ errs = append(errs, err)
+ continue
+ }
+
+ allReadmeFiles = append(allReadmeFiles, readme{
+ filePath: resourceReadmePath,
+ rawText: string(rm),
+ })
+ }
+ }
+
+ if len(errs) != 0 {
+ return nil, validationPhaseError{
+ phase: validationPhaseFileLoad,
+ errors: errs,
+ }
+ }
+ return allReadmeFiles, nil
+}
+
+func validateAllCoderResourceFilesOfType(resourceType string) error {
+ if !slices.Contains(supportedResourceTypes, resourceType) {
+ return fmt.Errorf("resource type %q is not part of supported list [%s]", resourceType, strings.Join(supportedResourceTypes, ", "))
+ }
+
+ allReadmeFiles, err := aggregateCoderResourceReadmeFiles(resourceType)
+ if err != nil {
+ return err
+ }
+
+ log.Printf("Processing %d README files\n", len(allReadmeFiles))
+ resources, err := parseCoderResourceReadmeFiles(resourceType, allReadmeFiles)
+ if err != nil {
+ return err
+ }
+ log.Printf("Processed %d README files as valid Coder resources with type %q", len(resources), resourceType)
+
+ err = validateCoderResourceRelativeUrls(resources)
+ if err != nil {
+ return err
+ }
+ log.Printf("All relative URLs for %s READMEs are valid\n", resourceType)
+ return nil
+}
diff --git a/cmd/readmevalidation/coderresources_test.go b/cmd/readmevalidation/coderresources_test.go
new file mode 100644
index 0000000..71ec75f
--- /dev/null
+++ b/cmd/readmevalidation/coderresources_test.go
@@ -0,0 +1,22 @@
+package main
+
+import (
+ _ "embed"
+ "testing"
+)
+
+//go:embed testSamples/sampleReadmeBody.md
+var testBody string
+
+func TestValidateCoderResourceReadmeBody(t *testing.T) {
+ t.Parallel()
+
+ t.Run("Parses a valid README body with zero issues", func(t *testing.T) {
+ t.Parallel()
+
+ errs := validateCoderResourceReadmeBody(testBody)
+ for _, e := range errs {
+ t.Error(e)
+ }
+ })
+}
diff --git a/cmd/readmevalidation/contributors.go b/cmd/readmevalidation/contributors.go
new file mode 100644
index 0000000..daee82c
--- /dev/null
+++ b/cmd/readmevalidation/contributors.go
@@ -0,0 +1,336 @@
+package main
+
+import (
+ "errors"
+ "fmt"
+ "log"
+ "net/url"
+ "os"
+ "path"
+ "slices"
+ "strings"
+
+ "gopkg.in/yaml.v3"
+)
+
+var validContributorStatuses = []string{"official", "partner", "community"}
+
+type contributorProfileFrontmatter struct {
+ DisplayName string `yaml:"display_name"`
+ Bio string `yaml:"bio"`
+ ContributorStatus string `yaml:"status"`
+ // Script assumes that if avatar URL is nil, the Registry site build step
+ // will backfill the value with the user's GitHub avatar URL
+ AvatarURL *string `yaml:"avatar"`
+ LinkedinURL *string `yaml:"linkedin"`
+ WebsiteURL *string `yaml:"website"`
+ SupportEmail *string `yaml:"support_email"`
+}
+
+type contributorProfileReadme struct {
+ frontmatter contributorProfileFrontmatter
+ namespace string
+ filePath string
+}
+
+func validateContributorDisplayName(displayName string) error {
+ if displayName == "" {
+ return fmt.Errorf("missing display_name")
+ }
+
+ return nil
+}
+
+func validateContributorLinkedinURL(linkedinURL *string) error {
+ if linkedinURL == nil {
+ return nil
+ }
+
+ if _, err := url.ParseRequestURI(*linkedinURL); err != nil {
+ return fmt.Errorf("linkedIn URL %q is not valid: %v", *linkedinURL, err)
+ }
+
+ return nil
+}
+
+func validateContributorSupportEmail(email *string) []error {
+ if email == nil {
+ return nil
+ }
+
+ errs := []error{}
+
+ // Can't 100% validate that this is correct without actually sending
+ // an email, and especially with some contributors being individual
+ // developers, we don't want to do that on every single run of the CI
+ // pipeline. Best we can do is verify the general structure
+ username, server, ok := strings.Cut(*email, "@")
+ if !ok {
+ errs = append(errs, fmt.Errorf("email address %q is missing @ symbol", *email))
+ return errs
+ }
+
+ if username == "" {
+ errs = append(errs, fmt.Errorf("email address %q is missing username", *email))
+ }
+
+ domain, tld, ok := strings.Cut(server, ".")
+ if !ok {
+ errs = append(errs, fmt.Errorf("email address %q is missing period for server segment", *email))
+ return errs
+ }
+
+ if domain == "" {
+ errs = append(errs, fmt.Errorf("email address %q is missing domain", *email))
+ }
+ if tld == "" {
+ errs = append(errs, fmt.Errorf("email address %q is missing top-level domain", *email))
+ }
+ if strings.Contains(*email, "?") {
+ errs = append(errs, errors.New("email is not allowed to contain query parameters"))
+ }
+
+ return errs
+}
+
+func validateContributorWebsite(websiteURL *string) error {
+ if websiteURL == nil {
+ return nil
+ }
+
+ if _, err := url.ParseRequestURI(*websiteURL); err != nil {
+ return fmt.Errorf("linkedIn URL %q is not valid: %v", *websiteURL, err)
+ }
+
+ return nil
+}
+
+func validateContributorStatus(status string) error {
+ if !slices.Contains(validContributorStatuses, status) {
+ return fmt.Errorf("contributor status %q is not valid", status)
+ }
+
+ return nil
+}
+
+// Can't validate the image actually leads to a valid resource in a pure
+// function, but can at least catch obvious problems
+func validateContributorAvatarURL(avatarURL *string) []error {
+ if avatarURL == nil {
+ return nil
+ }
+
+ errs := []error{}
+ if *avatarURL == "" {
+ errs = append(errs, errors.New("avatar URL must be omitted or non-empty string"))
+ return errs
+ }
+
+ // Have to use .Parse instead of .ParseRequestURI because this is the
+ // one field that's allowed to be a relative URL
+ if _, err := url.Parse(*avatarURL); err != nil {
+ errs = append(errs, fmt.Errorf("URL %q is not a valid relative or absolute URL", *avatarURL))
+ }
+ if strings.Contains(*avatarURL, "?") {
+ errs = append(errs, errors.New("avatar URL is not allowed to contain search parameters"))
+ }
+
+ matched := false
+ for _, ff := range supportedAvatarFileFormats {
+ matched = strings.HasSuffix(*avatarURL, ff)
+ if matched {
+ break
+ }
+ }
+ if !matched {
+ segments := strings.Split(*avatarURL, ".")
+ fileExtension := segments[len(segments)-1]
+ errs = append(errs, fmt.Errorf("avatar URL '.%s' does not end in a supported file format: [%s]", fileExtension, strings.Join(supportedAvatarFileFormats, ", ")))
+ }
+
+ return errs
+}
+
+func validateContributorReadme(rm contributorProfileReadme) []error {
+ allErrs := []error{}
+
+ if err := validateContributorDisplayName(rm.frontmatter.DisplayName); err != nil {
+ allErrs = append(allErrs, addFilePathToError(rm.filePath, err))
+ }
+ if err := validateContributorLinkedinURL(rm.frontmatter.LinkedinURL); err != nil {
+ allErrs = append(allErrs, addFilePathToError(rm.filePath, err))
+ }
+ if err := validateContributorWebsite(rm.frontmatter.WebsiteURL); err != nil {
+ allErrs = append(allErrs, addFilePathToError(rm.filePath, err))
+ }
+ if err := validateContributorStatus(rm.frontmatter.ContributorStatus); err != nil {
+ allErrs = append(allErrs, addFilePathToError(rm.filePath, err))
+ }
+
+ for _, err := range validateContributorSupportEmail(rm.frontmatter.SupportEmail) {
+ allErrs = append(allErrs, addFilePathToError(rm.filePath, err))
+ }
+ for _, err := range validateContributorAvatarURL(rm.frontmatter.AvatarURL) {
+ allErrs = append(allErrs, addFilePathToError(rm.filePath, err))
+ }
+
+ return allErrs
+}
+
+func parseContributorProfile(rm readme) (contributorProfileReadme, error) {
+ fm, _, err := separateFrontmatter(rm.rawText)
+ if err != nil {
+ return contributorProfileReadme{}, fmt.Errorf("%q: failed to parse frontmatter: %v", rm.filePath, err)
+ }
+
+ yml := contributorProfileFrontmatter{}
+ if err := yaml.Unmarshal([]byte(fm), &yml); err != nil {
+ return contributorProfileReadme{}, fmt.Errorf("%q: failed to parse: %v", rm.filePath, err)
+ }
+
+ return contributorProfileReadme{
+ filePath: rm.filePath,
+ frontmatter: yml,
+ namespace: strings.TrimSuffix(strings.TrimPrefix(rm.filePath, "registry/"), "/README.md"),
+ }, nil
+}
+
+func parseContributorFiles(readmeEntries []readme) (map[string]contributorProfileReadme, error) {
+ profilesByNamespace := map[string]contributorProfileReadme{}
+ yamlParsingErrors := []error{}
+ for _, rm := range readmeEntries {
+ p, err := parseContributorProfile(rm)
+ if err != nil {
+ yamlParsingErrors = append(yamlParsingErrors, err)
+ continue
+ }
+
+ if prev, alreadyExists := profilesByNamespace[p.namespace]; alreadyExists {
+ yamlParsingErrors = append(yamlParsingErrors, fmt.Errorf("%q: namespace %q conflicts with namespace from %q", p.filePath, p.namespace, prev.filePath))
+ continue
+ }
+ profilesByNamespace[p.namespace] = p
+ }
+ if len(yamlParsingErrors) != 0 {
+ return nil, validationPhaseError{
+ phase: validationPhaseReadmeParsing,
+ errors: yamlParsingErrors,
+ }
+ }
+
+ yamlValidationErrors := []error{}
+ for _, p := range profilesByNamespace {
+ errors := validateContributorReadme(p)
+ if len(errors) > 0 {
+ yamlValidationErrors = append(yamlValidationErrors, errors...)
+ continue
+ }
+ }
+ if len(yamlValidationErrors) != 0 {
+ return nil, validationPhaseError{
+ phase: validationPhaseReadmeParsing,
+ errors: yamlValidationErrors,
+ }
+ }
+
+ return profilesByNamespace, nil
+}
+
+func aggregateContributorReadmeFiles() ([]readme, error) {
+ dirEntries, err := os.ReadDir(rootRegistryPath)
+ if err != nil {
+ return nil, err
+ }
+
+ allReadmeFiles := []readme{}
+ errs := []error{}
+ for _, e := range dirEntries {
+ dirPath := path.Join(rootRegistryPath, e.Name())
+ if !e.IsDir() {
+ continue
+ }
+
+ readmePath := path.Join(dirPath, "README.md")
+ rmBytes, err := os.ReadFile(readmePath)
+ if err != nil {
+ errs = append(errs, err)
+ continue
+ }
+ allReadmeFiles = append(allReadmeFiles, readme{
+ filePath: readmePath,
+ rawText: string(rmBytes),
+ })
+ }
+
+ if len(errs) != 0 {
+ return nil, validationPhaseError{
+ phase: validationPhaseFileLoad,
+ errors: errs,
+ }
+ }
+
+ return allReadmeFiles, nil
+}
+
+func validateContributorRelativeUrls(contributors map[string]contributorProfileReadme) error {
+ // This function only validates relative avatar URLs for now, but it can be
+ // beefed up to validate more in the future
+ errs := []error{}
+
+ for _, con := range contributors {
+ // If the avatar URL is missing, we'll just assume that the Registry
+ // site build step will take care of filling in the data properly
+ if con.frontmatter.AvatarURL == nil {
+ continue
+ }
+
+ isRelativeURL := strings.HasPrefix(*con.frontmatter.AvatarURL, ".") ||
+ strings.HasPrefix(*con.frontmatter.AvatarURL, "/")
+ if !isRelativeURL {
+ continue
+ }
+
+ if strings.HasPrefix(*con.frontmatter.AvatarURL, "..") {
+ errs = append(errs, fmt.Errorf("%q: relative avatar URLs cannot be placed outside a user's namespaced directory", con.filePath))
+ continue
+ }
+
+ absolutePath := strings.TrimSuffix(con.filePath, "README.md") +
+ *con.frontmatter.AvatarURL
+ _, err := os.ReadFile(absolutePath)
+ if err != nil {
+ errs = append(errs, fmt.Errorf("%q: relative avatar path %q does not point to image in file system", con.filePath, *con.frontmatter.AvatarURL))
+ }
+ }
+
+ if len(errs) == 0 {
+ return nil
+ }
+ return validationPhaseError{
+ phase: validationPhaseAssetCrossReference,
+ errors: errs,
+ }
+}
+
+func validateAllContributorFiles() error {
+ allReadmeFiles, err := aggregateContributorReadmeFiles()
+ if err != nil {
+ return err
+ }
+
+ log.Printf("Processing %d README files\n", len(allReadmeFiles))
+ contributors, err := parseContributorFiles(allReadmeFiles)
+ if err != nil {
+ return err
+ }
+ log.Printf("Processed %d README files as valid contributor profiles", len(contributors))
+
+ err = validateContributorRelativeUrls(contributors)
+ if err != nil {
+ return err
+ }
+ log.Println("All relative URLs for READMEs are valid")
+
+ log.Printf("Processed all READMEs in the %q directory\n", rootRegistryPath)
+ return nil
+}
diff --git a/cmd/readmevalidation/errors.go b/cmd/readmevalidation/errors.go
new file mode 100644
index 0000000..d9dbb17
--- /dev/null
+++ b/cmd/readmevalidation/errors.go
@@ -0,0 +1,28 @@
+package main
+
+import "fmt"
+
+// validationPhaseError represents an error that occurred during a specific
+// phase of README validation. It should be used to collect ALL validation
+// errors that happened during a specific phase, rather than the first one
+// encountered.
+type validationPhaseError struct {
+ phase validationPhase
+ errors []error
+}
+
+var _ error = validationPhaseError{}
+
+func (vpe validationPhaseError) Error() string {
+ msg := fmt.Sprintf("Error during %q phase of README validation:", vpe.phase)
+ for _, e := range vpe.errors {
+ msg += fmt.Sprintf("\n- %v", e)
+ }
+ msg += "\n"
+
+ return msg
+}
+
+func addFilePathToError(filePath string, err error) error {
+ return fmt.Errorf("%q: %v", filePath, err)
+}
diff --git a/cmd/readmevalidation/main.go b/cmd/readmevalidation/main.go
new file mode 100644
index 0000000..6f33f74
--- /dev/null
+++ b/cmd/readmevalidation/main.go
@@ -0,0 +1,44 @@
+// This package is for validating all contributors within the main Registry
+// directory. It validates that it has nothing but sub-directories, and that
+// each sub-directory has a README.md file. Each of those files must then
+// describe a specific contributor. The contents of these files will be parsed
+// by the Registry site build step, to be displayed in the Registry site's UI.
+package main
+
+import (
+ "fmt"
+ "log"
+ "os"
+)
+
+func main() {
+ log.Println("Starting README validation")
+
+ // If there are fundamental problems with how the repo is structured, we
+ // can't make any guarantees that any further validations will be relevant
+ // or accurate
+ repoErr := validateRepoStructure()
+ if repoErr != nil {
+ log.Println(repoErr)
+ os.Exit(1)
+ }
+
+ var errs []error
+ err := validateAllContributorFiles()
+ if err != nil {
+ errs = append(errs, err)
+ }
+ err = validateAllCoderResourceFilesOfType("modules")
+ if err != nil {
+ errs = append(errs, err)
+ }
+
+ if len(errs) == 0 {
+ log.Printf("Processed all READMEs in the %q directory\n", rootRegistryPath)
+ os.Exit(0)
+ }
+ for _, err := range errs {
+ fmt.Println(err)
+ }
+ os.Exit(1)
+}
diff --git a/cmd/readmevalidation/readmefiles.go b/cmd/readmevalidation/readmefiles.go
new file mode 100644
index 0000000..2967652
--- /dev/null
+++ b/cmd/readmevalidation/readmefiles.go
@@ -0,0 +1,178 @@
+package main
+
+import (
+ "bufio"
+ "errors"
+ "fmt"
+ "regexp"
+ "strings"
+)
+
+const rootRegistryPath = "./registry"
+
+var supportedAvatarFileFormats = []string{".png", ".jpeg", ".jpg", ".gif", ".svg"}
+
+// readme represents a single README file within the repo (usually within the
+// top-level "/registry" directory).
+type readme struct {
+ filePath string
+ rawText string
+}
+
+// separateFrontmatter attempts to separate a README file's frontmatter content
+// from the main README body, returning both values in that order. It does not
+// validate whether the structure of the frontmatter is valid (i.e., that it's
+// structured as YAML).
+func separateFrontmatter(readmeText string) (string, string, error) {
+ if readmeText == "" {
+ return "", "", errors.New("README is empty")
+ }
+
+ const fence = "---"
+ fm := ""
+ body := ""
+ fenceCount := 0
+
+ lineScanner := bufio.NewScanner(strings.NewReader(strings.TrimSpace(readmeText)))
+ for lineScanner.Scan() {
+ nextLine := lineScanner.Text()
+ if fenceCount < 2 && nextLine == fence {
+ fenceCount++
+ continue
+ }
+ // Break early if the very first line wasn't a fence, because then we
+ // know for certain that the README has problems
+ if fenceCount == 0 {
+ break
+ }
+
+ // It should be safe to trim each line of the frontmatter on a per-line
+ // basis, because there shouldn't be any extra meaning attached to the
+ // indentation. The same does NOT apply to the README; best we can do is
+ // gather all the lines, and then trim around it
+ if inReadmeBody := fenceCount >= 2; inReadmeBody {
+ body += nextLine + "\n"
+ } else {
+ fm += strings.TrimSpace(nextLine) + "\n"
+ }
+ }
+ if fenceCount < 2 {
+ return "", "", errors.New("README does not have two sets of frontmatter fences")
+ }
+ if fm == "" {
+ return "", "", errors.New("readme has frontmatter fences but no frontmatter content")
+ }
+
+ return fm, strings.TrimSpace(body), nil
+}
+
+var readmeHeaderRe = regexp.MustCompile("^(#{1,})(\\s*)")
+
+// Todo: This seems to work okay for now, but the really proper way of doing
+// this is by parsing this as an AST, and then checking the resulting nodes
+func validateReadmeBody(body string) []error {
+ trimmed := strings.TrimSpace(body)
+
+ if trimmed == "" {
+ return []error{errors.New("README body is empty")}
+ }
+
+ // If the very first line of the README, there's a risk that the rest of the
+ // validation logic will break, since we don't have many guarantees about
+ // how the README is actually structured
+ if !strings.HasPrefix(trimmed, "# ") {
+ return []error{errors.New("README body must start with ATX-style h1 header (i.e., \"# \")")}
+ }
+
+ var errs []error
+ latestHeaderLevel := 0
+ foundFirstH1 := false
+ isInCodeBlock := false
+
+ lineScanner := bufio.NewScanner(strings.NewReader(trimmed))
+ for lineScanner.Scan() {
+ nextLine := lineScanner.Text()
+
+ // Have to check this because a lot of programming languages support #
+ // comments (including Terraform), and without any context, there's no
+ // way to tell the difference between a markdown header and code comment
+ if strings.HasPrefix(nextLine, "```") {
+ isInCodeBlock = !isInCodeBlock
+ continue
+ }
+ if isInCodeBlock {
+ continue
+ }
+
+ headerGroups := readmeHeaderRe.FindStringSubmatch(nextLine)
+ if headerGroups == nil {
+ continue
+ }
+
+ spaceAfterHeader := headerGroups[2]
+ if spaceAfterHeader == "" {
+ errs = append(errs, errors.New("header does not have space between header characters and main header text"))
+ }
+
+ nextHeaderLevel := len(headerGroups[1])
+ if nextHeaderLevel == 1 && !foundFirstH1 {
+ foundFirstH1 = true
+ latestHeaderLevel = 1
+ continue
+ }
+
+ // If we have obviously invalid headers, it's not really safe to keep
+ // proceeding with the rest of the content
+ if nextHeaderLevel == 1 {
+ errs = append(errs, errors.New("READMEs cannot contain more than h1 header"))
+ break
+ }
+ if nextHeaderLevel > 6 {
+ errs = append(errs, fmt.Errorf("README/HTML files cannot have headers exceed level 6 (found level %d)", nextHeaderLevel))
+ break
+ }
+
+ // This is something we need to enforce for accessibility, not just for
+ // the Registry website, but also when users are viewing the README
+ // files in the GitHub web view
+ if nextHeaderLevel > latestHeaderLevel && nextHeaderLevel != (latestHeaderLevel+1) {
+ errs = append(errs, fmt.Errorf("headers are not allowed to increase more than 1 level at a time"))
+ continue
+ }
+
+ // As long as the above condition passes, there's no problems with
+ // going up a header level or going down 1+ header levels
+ latestHeaderLevel = nextHeaderLevel
+ }
+
+ return errs
+}
+
+// validationPhase represents a specific phase during README validation. It is
+// expected that each phase is discrete, and errors during one will prevent a
+// future phase from starting.
+type validationPhase string
+
+const (
+ // validationPhaseFileStructureValidation indicates when the entire Registry
+ // directory is being verified for having all files be placed in the file
+ // system as expected.
+ validationPhaseFileStructureValidation validationPhase = "File structure validation"
+
+ // validationPhaseFileLoad indicates when README files are being read from
+ // the file system
+ validationPhaseFileLoad = "Filesystem reading"
+
+ // validationPhaseReadmeParsing indicates when a README's frontmatter is
+ // being parsed as YAML. This phase does not include YAML validation.
+ validationPhaseReadmeParsing = "README parsing"
+
+ // validationPhaseReadmeValidation indicates when a README's frontmatter is
+ // being validated as proper YAML with expected keys.
+ validationPhaseReadmeValidation = "README validation"
+
+ // validationPhaseAssetCrossReference indicates when a README's frontmatter
+ // is having all its relative URLs be validated for whether they point to
+ // valid resources.
+ validationPhaseAssetCrossReference = "Cross-referencing relative asset URLs"
+)
diff --git a/cmd/readmevalidation/repostructure.go b/cmd/readmevalidation/repostructure.go
new file mode 100644
index 0000000..11bd920
--- /dev/null
+++ b/cmd/readmevalidation/repostructure.go
@@ -0,0 +1,142 @@
+package main
+
+import (
+ "errors"
+ "fmt"
+ "os"
+ "path"
+ "slices"
+ "strings"
+)
+
+var supportedUserNameSpaceDirectories = append(supportedResourceTypes[:], ".icons", ".images")
+
+func validateCoderResourceSubdirectory(dirPath string) []error {
+ errs := []error{}
+
+ subDir, err := os.Stat(dirPath)
+ if err != nil {
+ // It's valid for a specific resource directory not to exist. It's just
+ // that if it does exist, it must follow specific rules
+ if !errors.Is(err, os.ErrNotExist) {
+ errs = append(errs, addFilePathToError(dirPath, err))
+ }
+ return errs
+ }
+
+ if !subDir.IsDir() {
+ errs = append(errs, fmt.Errorf("%q: path is not a directory", dirPath))
+ return errs
+ }
+
+ files, err := os.ReadDir(dirPath)
+ if err != nil {
+ errs = append(errs, addFilePathToError(dirPath, err))
+ return errs
+ }
+ for _, f := range files {
+ // The .coder subdirectories are sometimes generated as part of Bun
+ // tests. These subdirectories will never be committed to the repo, but
+ // in the off chance that they don't get cleaned up properly, we want to
+ // skip over them
+ if !f.IsDir() || f.Name() == ".coder" {
+ continue
+ }
+
+ resourceReadmePath := path.Join(dirPath, f.Name(), "README.md")
+ _, err := os.Stat(resourceReadmePath)
+ if err != nil {
+ if errors.Is(err, os.ErrNotExist) {
+ errs = append(errs, fmt.Errorf("%q: 'README.md' does not exist", resourceReadmePath))
+ } else {
+ errs = append(errs, addFilePathToError(resourceReadmePath, err))
+ }
+ }
+
+ mainTerraformPath := path.Join(dirPath, f.Name(), "main.tf")
+ _, err = os.Stat(mainTerraformPath)
+ if err != nil {
+ if errors.Is(err, os.ErrNotExist) {
+ errs = append(errs, fmt.Errorf("%q: 'main.tf' file does not exist", mainTerraformPath))
+ } else {
+ errs = append(errs, addFilePathToError(mainTerraformPath, err))
+ }
+ }
+
+ }
+
+ return errs
+}
+
+func validateRegistryDirectory() []error {
+ userDirs, err := os.ReadDir(rootRegistryPath)
+ if err != nil {
+ return []error{err}
+ }
+
+ allErrs := []error{}
+ for _, d := range userDirs {
+ dirPath := path.Join(rootRegistryPath, d.Name())
+ if !d.IsDir() {
+ allErrs = append(allErrs, fmt.Errorf("detected non-directory file %q at base of main Registry directory", dirPath))
+ continue
+ }
+
+ contributorReadmePath := path.Join(dirPath, "README.md")
+ _, err := os.Stat(contributorReadmePath)
+ if err != nil {
+ allErrs = append(allErrs, err)
+ }
+
+ files, err := os.ReadDir(dirPath)
+ if err != nil {
+ allErrs = append(allErrs, err)
+ continue
+ }
+
+ for _, f := range files {
+ // Todo: Decide if there's anything more formal that we want to
+ // ensure about non-directories scoped to user namespaces
+ if !f.IsDir() {
+ continue
+ }
+
+ segment := f.Name()
+ filePath := path.Join(dirPath, segment)
+
+ if !slices.Contains(supportedUserNameSpaceDirectories, segment) {
+ allErrs = append(allErrs, fmt.Errorf("%q: only these sub-directories are allowed at top of user namespace: [%s]", filePath, strings.Join(supportedUserNameSpaceDirectories, ", ")))
+ continue
+ }
+
+ if slices.Contains(supportedResourceTypes, segment) {
+ errs := validateCoderResourceSubdirectory(filePath)
+ if len(errs) != 0 {
+ allErrs = append(allErrs, errs...)
+ }
+ }
+ }
+ }
+
+ return allErrs
+}
+
+func validateRepoStructure() error {
+ var problems []error
+ if errs := validateRegistryDirectory(); len(errs) != 0 {
+ problems = append(problems, errs...)
+ }
+
+ _, err := os.Stat("./.icons")
+ if err != nil {
+ problems = append(problems, errors.New("missing top-level .icons directory (used for storing reusable Coder resource icons)"))
+ }
+
+ if len(problems) != 0 {
+ return validationPhaseError{
+ phase: validationPhaseFileStructureValidation,
+ errors: problems,
+ }
+ }
+ return nil
+}
diff --git a/cmd/readmevalidation/testSamples/sampleReadmeBody.md b/cmd/readmevalidation/testSamples/sampleReadmeBody.md
new file mode 100644
index 0000000..958fe21
--- /dev/null
+++ b/cmd/readmevalidation/testSamples/sampleReadmeBody.md
@@ -0,0 +1,121 @@
+# Goose
+
+Run the [Goose](https://block.github.io/goose/) agent in your workspace to generate code and perform tasks.
+
+```tf
+module "goose" {
+ source = "registry.coder.com/modules/goose/coder"
+ version = "1.0.31"
+ agent_id = coder_agent.example.id
+ folder = "/home/coder"
+ install_goose = true
+ goose_version = "v1.0.16"
+}
+```
+
+## Prerequisites
+
+- `screen` must be installed in your workspace to run Goose in the background
+- You must add the [Coder Login](https://registry.coder.com/modules/coder-login) module to your template
+
+The `codercom/oss-dogfood:latest` container image can be used for testing on container-based workspaces.
+
+## Examples
+
+Your workspace must have `screen` installed to use this.
+
+### Run in the background and report tasks (Experimental)
+
+> This functionality is in early access as of Coder v2.21 and is still evolving.
+> For now, we recommend testing it in a demo or staging environment,
+> rather than deploying to production
+>
+> Learn more in [the Coder documentation](https://coder.com/docs/tutorials/ai-agents)
+>
+> Join our [Discord channel](https://discord.gg/coder) or
+> [contact us](https://coder.com/contact) to get help or share feedback.
+
+```tf
+module "coder-login" {
+ count = data.coder_workspace.me.start_count
+ source = "registry.coder.com/modules/coder-login/coder"
+ version = "1.0.15"
+ agent_id = coder_agent.example.id
+}
+
+variable "anthropic_api_key" {
+ type = string
+ description = "The Anthropic API key"
+ sensitive = true
+}
+
+data "coder_parameter" "ai_prompt" {
+ type = "string"
+ name = "AI Prompt"
+ default = ""
+ description = "Write a prompt for Goose"
+ mutable = true
+}
+
+# Set the prompt and system prompt for Goose via environment variables
+resource "coder_agent" "main" {
+ # ...
+ env = {
+ GOOSE_SYSTEM_PROMPT = <<-EOT
+ You are a helpful assistant that can help write code.
+
+ Run all long running tasks (e.g. npm run dev) in the background and not in the foreground.
+
+ Periodically check in on background tasks.
+
+ Notify Coder of the status of the task before and after your steps.
+ EOT
+ GOOSE_TASK_PROMPT = data.coder_parameter.ai_prompt.value
+
+ # An API key is required for experiment_auto_configure
+ # See https://block.github.io/goose/docs/getting-started/providers
+ ANTHROPIC_API_KEY = var.anthropic_api_key # or use a coder_parameter
+ }
+}
+
+module "goose" {
+ count = data.coder_workspace.me.start_count
+ source = "registry.coder.com/modules/goose/coder"
+ version = "1.0.31"
+ agent_id = coder_agent.example.id
+ folder = "/home/coder"
+ install_goose = true
+ goose_version = "v1.0.16"
+
+ # Enable experimental features
+ experiment_report_tasks = true
+
+ # Run Goose in the background
+ experiment_use_screen = true
+
+ # Avoid configuring Goose manually
+ experiment_auto_configure = true
+
+ # Required for experiment_auto_configure
+ experiment_goose_provider = "anthropic"
+ experiment_goose_model = "claude-3-5-sonnet-latest"
+}
+```
+
+## Run standalone
+
+Run Goose as a standalone app in your workspace. This will install Goose and run it directly without using screen or any task reporting to the Coder UI.
+
+```tf
+module "goose" {
+ source = "registry.coder.com/modules/goose/coder"
+ version = "1.0.31"
+ agent_id = coder_agent.example.id
+ folder = "/home/coder"
+ install_goose = true
+ goose_version = "v1.0.16"
+
+ # Icon is not available in Coder v2.20 and below, so we'll use a custom icon URL
+ icon = "https://raw.githubusercontent.com/block/goose/refs/heads/main/ui/desktop/src/images/icon.svg"
+}
+```
diff --git a/examples/modules/run.sh b/examples/modules/run.sh
index f50f6ba..a15fcf6 100644
--- a/examples/modules/run.sh
+++ b/examples/modules/run.sh
@@ -11,10 +11,10 @@ BOLD='\033[0;1m'
printf "$${BOLD}Installing MODULE_NAME ...\n\n"
# Add code here
-# Use varibles from the templatefile function in main.tf
+# Use variables from the templatefile function in main.tf
# e.g. LOG_PATH, PORT, etc.
-printf "🥳 Installation comlete!\n\n"
+printf "🥳 Installation complete!\n\n"
printf "👷 Starting MODULE_NAME in background...\n\n"
# Start the app in here
diff --git a/images/coder-agent-bar.png b/images/coder-agent-bar.png
new file mode 100644
index 0000000..b58aa20
Binary files /dev/null and b/images/coder-agent-bar.png differ
diff --git a/package.json b/package.json
index aa3c7e2..733230d 100644
--- a/package.json
+++ b/package.json
@@ -1,9 +1,10 @@
{
- "name": "modules",
+ "name": "registry",
"scripts": {
+ "fmt": "bun x prettier --write **/*.sh **/*.ts **/*.md *.md && terraform fmt -recursive -diff",
+ "fmt:ci": "bun x prettier --check **/*.sh **/*.ts **/*.md *.md && terraform fmt -check -recursive -diff",
+ "terraform-validate": "./scripts/terraform_validate.sh",
"test": "bun test",
- "fmt": "bun x prettier -w **/*.sh .sample/run.sh new.sh **/*.ts **/*.md *.md && terraform fmt **/*.tf .sample/main.tf",
- "fmt:ci": "bun x prettier --check **/*.sh .sample/run.sh new.sh **/*.ts **/*.md *.md && terraform fmt -check **/*.tf .sample/main.tf",
"update-version": "./update-version.sh"
},
"devDependencies": {
diff --git a/registry/coder/modules/claude-code/README.md b/registry/coder/modules/claude-code/README.md
index b693440..a94b318 100644
--- a/registry/coder/modules/claude-code/README.md
+++ b/registry/coder/modules/claude-code/README.md
@@ -14,7 +14,7 @@ Run the [Claude Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude
```tf
module "claude-code" {
source = "registry.coder.com/modules/claude-code/coder"
- version = "1.0.31"
+ version = "1.2.1"
agent_id = coder_agent.example.id
folder = "/home/coder"
install_claude_code = true
@@ -22,10 +22,10 @@ module "claude-code" {
}
```
-### Prerequisites
+## Prerequisites
- Node.js and npm must be installed in your workspace to install Claude Code
-- `screen` must be installed in your workspace to run Claude Code in the background
+- Either `screen` or `tmux` must be installed in your workspace to run Claude Code in the background
- You must add the [Coder Login](https://registry.coder.com/modules/coder-login) module to your template
The `codercom/oss-dogfood:latest` container image can be used for testing on container-based workspaces.
@@ -43,7 +43,7 @@ The `codercom/oss-dogfood:latest` container image can be used for testing on con
> Join our [Discord channel](https://discord.gg/coder) or
> [contact us](https://coder.com/contact) to get help or share feedback.
-Your workspace must have `screen` installed to use this.
+Your workspace must have either `screen` or `tmux` installed to use this.
```tf
variable "anthropic_api_key" {
@@ -71,7 +71,7 @@ data "coder_parameter" "ai_prompt" {
resource "coder_agent" "main" {
# ...
env = {
- CODER_MCP_CLAUDE_API_KEY = var.anthropic_api_key # or use a coder_parameter
+ CODER_MCP_CLAUDE_API_KEY = var.anthropic_api_key # or use a coder_parameter
CODER_MCP_CLAUDE_TASK_PROMPT = data.coder_parameter.ai_prompt.value
CODER_MCP_APP_STATUS_SLUG = "claude-code"
CODER_MCP_CLAUDE_SYSTEM_PROMPT = <<-EOT
@@ -83,14 +83,14 @@ resource "coder_agent" "main" {
module "claude-code" {
count = data.coder_workspace.me.start_count
source = "registry.coder.com/modules/claude-code/coder"
- version = "1.0.31"
+ version = "1.1.0"
agent_id = coder_agent.example.id
folder = "/home/coder"
install_claude_code = true
claude_code_version = "0.2.57"
# Enable experimental features
- experiment_use_screen = true
+ experiment_use_screen = true # Or use experiment_use_tmux = true to use tmux instead
experiment_report_tasks = true
}
```
@@ -102,7 +102,7 @@ Run Claude Code as a standalone app in your workspace. This will install Claude
```tf
module "claude-code" {
source = "registry.coder.com/modules/claude-code/coder"
- version = "1.0.31"
+ version = "1.2.1"
agent_id = coder_agent.example.id
folder = "/home/coder"
install_claude_code = true
diff --git a/registry/coder/modules/claude-code/main.tf b/registry/coder/modules/claude-code/main.tf
index 349af17..cc7b27e 100644
--- a/registry/coder/modules/claude-code/main.tf
+++ b/registry/coder/modules/claude-code/main.tf
@@ -54,12 +54,35 @@ variable "experiment_use_screen" {
default = false
}
+variable "experiment_use_tmux" {
+ type = bool
+ description = "Whether to use tmux instead of screen for running Claude Code in the background."
+ default = false
+}
+
variable "experiment_report_tasks" {
type = bool
description = "Whether to enable task reporting."
default = false
}
+variable "experiment_pre_install_script" {
+ type = string
+ description = "Custom script to run before installing Claude Code."
+ default = null
+}
+
+variable "experiment_post_install_script" {
+ type = string
+ description = "Custom script to run after installing Claude Code."
+ default = null
+}
+
+locals {
+ encoded_pre_install_script = var.experiment_pre_install_script != null ? base64encode(var.experiment_pre_install_script) : ""
+ encoded_post_install_script = var.experiment_post_install_script != null ? base64encode(var.experiment_post_install_script) : ""
+}
+
# Install and Initialize Claude Code
resource "coder_script" "claude_code" {
agent_id = var.agent_id
@@ -74,6 +97,14 @@ resource "coder_script" "claude_code" {
command -v "$1" >/dev/null 2>&1
}
+ # Run pre-install script if provided
+ if [ -n "${local.encoded_pre_install_script}" ]; then
+ echo "Running pre-install script..."
+ echo "${local.encoded_pre_install_script}" | base64 -d > /tmp/pre_install.sh
+ chmod +x /tmp/pre_install.sh
+ /tmp/pre_install.sh
+ fi
+
# Install Claude Code if enabled
if [ "${var.install_claude_code}" = "true" ]; then
if ! command_exists npm; then
@@ -84,11 +115,52 @@ resource "coder_script" "claude_code" {
npm install -g @anthropic-ai/claude-code@${var.claude_code_version}
fi
+ # Run post-install script if provided
+ if [ -n "${local.encoded_post_install_script}" ]; then
+ echo "Running post-install script..."
+ echo "${local.encoded_post_install_script}" | base64 -d > /tmp/post_install.sh
+ chmod +x /tmp/post_install.sh
+ /tmp/post_install.sh
+ fi
+
if [ "${var.experiment_report_tasks}" = "true" ]; then
echo "Configuring Claude Code to report tasks via Coder MCP..."
coder exp mcp configure claude-code ${var.folder}
fi
+ # Handle terminal multiplexer selection (tmux or screen)
+ if [ "${var.experiment_use_tmux}" = "true" ] && [ "${var.experiment_use_screen}" = "true" ]; then
+ echo "Error: Both experiment_use_tmux and experiment_use_screen cannot be true simultaneously."
+ echo "Please set only one of them to true."
+ exit 1
+ fi
+
+ # Run with tmux if enabled
+ if [ "${var.experiment_use_tmux}" = "true" ]; then
+ echo "Running Claude Code in the background with tmux..."
+
+ # Check if tmux is installed
+ if ! command_exists tmux; then
+ echo "Error: tmux is not installed. Please install tmux manually."
+ exit 1
+ fi
+
+ touch "$HOME/.claude-code.log"
+
+ export LANG=en_US.UTF-8
+ export LC_ALL=en_US.UTF-8
+
+ # Create a new tmux session in detached mode
+ tmux new-session -d -s claude-code -c ${var.folder} "claude --dangerously-skip-permissions"
+
+ # Send the prompt to the tmux session if needed
+ if [ -n "$CODER_MCP_CLAUDE_TASK_PROMPT" ]; then
+ tmux send-keys -t claude-code "$CODER_MCP_CLAUDE_TASK_PROMPT"
+ sleep 5
+ tmux send-keys -t claude-code Enter
+ fi
+ fi
+
# Run with screen if enabled
if [ "${var.experiment_use_screen}" = "true" ]; then
echo "Running Claude Code in the background..."
@@ -149,20 +221,27 @@ resource "coder_app" "claude_code" {
#!/bin/bash
set -e
- if [ "${var.experiment_use_screen}" = "true" ]; then
+ export LANG=en_US.UTF-8
+ export LC_ALL=en_US.UTF-8
+
+ if [ "${var.experiment_use_tmux}" = "true" ]; then
+ if tmux has-session -t claude-code 2>/dev/null; then
+ echo "Attaching to existing Claude Code tmux session." | tee -a "$HOME/.claude-code.log"
+ tmux attach-session -t claude-code
+ else
+ echo "Starting a new Claude Code tmux session." | tee -a "$HOME/.claude-code.log"
+ tmux new-session -s claude-code -c ${var.folder} "claude --dangerously-skip-permissions | tee -a \"$HOME/.claude-code.log\"; exec bash"
+ fi
+ elif [ "${var.experiment_use_screen}" = "true" ]; then
if screen -list | grep -q "claude-code"; then
- export LANG=en_US.UTF-8
- export LC_ALL=en_US.UTF-8
- echo "Attaching to existing Claude Code session." | tee -a "$HOME/.claude-code.log"
+ echo "Attaching to existing Claude Code screen session." | tee -a "$HOME/.claude-code.log"
screen -xRR claude-code
else
- echo "Starting a new Claude Code session." | tee -a "$HOME/.claude-code.log"
- screen -S claude-code bash -c 'export LANG=en_US.UTF-8; export LC_ALL=en_US.UTF-8; claude --dangerously-skip-permissions | tee -a "$HOME/.claude-code.log"; exec bash'
+ echo "Starting a new Claude Code screen session." | tee -a "$HOME/.claude-code.log"
+ screen -S claude-code bash -c 'claude --dangerously-skip-permissions | tee -a "$HOME/.claude-code.log"; exec bash'
fi
else
cd ${var.folder}
- export LANG=en_US.UTF-8
- export LC_ALL=en_US.UTF-8
claude
fi
EOT
diff --git a/registry/coder/modules/code-server/README.md b/registry/coder/modules/code-server/README.md
index 30aeff0..9080bdd 100644
--- a/registry/coder/modules/code-server/README.md
+++ b/registry/coder/modules/code-server/README.md
@@ -15,7 +15,7 @@ Automatically install [code-server](https://github.com/coder/code-server) in a w
module "code-server" {
count = data.coder_workspace.me.start_count
source = "registry.coder.com/modules/code-server/coder"
- version = "1.0.31"
+ version = "1.1.0"
agent_id = coder_agent.example.id
}
```
@@ -30,7 +30,7 @@ module "code-server" {
module "code-server" {
count = data.coder_workspace.me.start_count
source = "registry.coder.com/modules/code-server/coder"
- version = "1.0.31"
+ version = "1.1.0"
agent_id = coder_agent.example.id
install_version = "4.8.3"
}
@@ -44,7 +44,7 @@ Install the Dracula theme from [OpenVSX](https://open-vsx.org/):
module "code-server" {
count = data.coder_workspace.me.start_count
source = "registry.coder.com/modules/code-server/coder"
- version = "1.0.31"
+ version = "1.1.0"
agent_id = coder_agent.example.id
extensions = [
"dracula-theme.theme-dracula"
@@ -62,7 +62,7 @@ Configure VS Code's [settings.json](https://code.visualstudio.com/docs/getstarte
module "code-server" {
count = data.coder_workspace.me.start_count
source = "registry.coder.com/modules/code-server/coder"
- version = "1.0.31"
+ version = "1.1.0"
agent_id = coder_agent.example.id
extensions = ["dracula-theme.theme-dracula"]
settings = {
@@ -79,7 +79,7 @@ Just run code-server in the background, don't fetch it from GitHub:
module "code-server" {
count = data.coder_workspace.me.start_count
source = "registry.coder.com/modules/code-server/coder"
- version = "1.0.31"
+ version = "1.1.0"
agent_id = coder_agent.example.id
extensions = ["dracula-theme.theme-dracula", "ms-azuretools.vscode-docker"]
}
@@ -95,7 +95,7 @@ Run an existing copy of code-server if found, otherwise download from GitHub:
module "code-server" {
count = data.coder_workspace.me.start_count
source = "registry.coder.com/modules/code-server/coder"
- version = "1.0.31"
+ version = "1.1.0"
agent_id = coder_agent.example.id
use_cached = true
extensions = ["dracula-theme.theme-dracula", "ms-azuretools.vscode-docker"]
@@ -108,7 +108,7 @@ Just run code-server in the background, don't fetch it from GitHub:
module "code-server" {
count = data.coder_workspace.me.start_count
source = "registry.coder.com/modules/code-server/coder"
- version = "1.0.31"
+ version = "1.1.0"
agent_id = coder_agent.example.id
offline = true
}
diff --git a/registry/coder/modules/code-server/main.tf b/registry/coder/modules/code-server/main.tf
index c80e537..ca4ff3a 100644
--- a/registry/coder/modules/code-server/main.tf
+++ b/registry/coder/modules/code-server/main.tf
@@ -4,7 +4,7 @@ terraform {
required_providers {
coder = {
source = "coder/coder"
- version = ">= 0.17"
+ version = ">= 2.1"
}
}
}
@@ -122,6 +122,20 @@ variable "subdomain" {
default = false
}
+variable "open_in" {
+ type = string
+ description = <<-EOT
+ Determines where the app will be opened. Valid values are `"tab"` and `"slim-window" (default)`.
+ `"tab"` opens in a new tab in the same browser window.
+ `"slim-window"` opens a new browser window without navigation controls.
+ EOT
+ default = "slim-window"
+ validation {
+ condition = contains(["tab", "slim-window"], var.open_in)
+ error_message = "The 'open_in' variable must be one of: 'tab', 'slim-window'."
+ }
+}
+
resource "coder_script" "code-server" {
agent_id = var.agent_id
display_name = "code-server"
@@ -166,6 +180,7 @@ resource "coder_app" "code-server" {
subdomain = var.subdomain
share = var.share
order = var.order
+ open_in = var.open_in
healthcheck {
url = "http://localhost:${var.port}/healthz"
diff --git a/registry/coder/modules/devcontainers-cli/README.md b/registry/coder/modules/devcontainers-cli/README.md
new file mode 100644
index 0000000..ec73696
--- /dev/null
+++ b/registry/coder/modules/devcontainers-cli/README.md
@@ -0,0 +1,22 @@
+---
+display_name: devcontainers-cli
+description: devcontainers-cli module provides an easy way to install @devcontainers/cli into a workspace
+icon: ../../../../.icons/devcontainers.svg
+verified: true
+maintainer_github: coder
+tags: [devcontainers]
+---
+
+# devcontainers-cli
+
+The devcontainers-cli module provides an easy way to install [`@devcontainers/cli`](https://github.com/devcontainers/cli) into a workspace. It can be used within any workspace as it runs only if
+@devcontainers/cli is not installed yet.
+`npm` is required and should be pre-installed in order for the module to work.
+
+```tf
+module "devcontainers-cli" {
+ source = "registry.coder.com/modules/devcontainers-cli/coder"
+ version = "1.0.3"
+ agent_id = coder_agent.example.id
+}
+```
diff --git a/registry/coder/modules/devcontainers-cli/main.test.ts b/registry/coder/modules/devcontainers-cli/main.test.ts
new file mode 100644
index 0000000..6cfe4d0
--- /dev/null
+++ b/registry/coder/modules/devcontainers-cli/main.test.ts
@@ -0,0 +1,144 @@
+import { describe, expect, it } from "bun:test";
+import {
+ execContainer,
+ executeScriptInContainer,
+ findResourceInstance,
+ runContainer,
+ runTerraformApply,
+ runTerraformInit,
+ testRequiredVariables,
+ type TerraformState,
+} from "~test";
+
+const executeScriptInContainerWithPackageManager = async (
+ state: TerraformState,
+ image: string,
+ packageManager: string,
+ shell = "sh",
+): Promise<{
+ exitCode: number;
+ stdout: string[];
+ stderr: string[];
+}> => {
+ const instance = findResourceInstance(state, "coder_script");
+ const id = await runContainer(image);
+
+ // Install the specified package manager
+ if (packageManager === "npm") {
+ await execContainer(id, [shell, "-c", "apk add nodejs npm"]);
+ } else if (packageManager === "pnpm") {
+ await execContainer(id, [
+ shell,
+ "-c",
+ `wget -qO- https://get.pnpm.io/install.sh | ENV="$HOME/.shrc" SHELL="$(which sh)" sh -`,
+ ]);
+ } else if (packageManager === "yarn") {
+ await execContainer(id, [
+ shell,
+ "-c",
+ "apk add nodejs npm && npm install -g yarn",
+ ]);
+ }
+
+ const pathResp = await execContainer(id, [shell, "-c", "echo $PATH"]);
+ const path = pathResp.stdout.trim();
+
+ console.log(path);
+
+ const resp = await execContainer(
+ id,
+ [shell, "-c", instance.script],
+ [
+ "--env",
+ "CODER_SCRIPT_BIN_DIR=/tmp/coder-script-data/bin",
+ "--env",
+ `PATH=${path}:/tmp/coder-script-data/bin`,
+ ],
+ );
+ const stdout = resp.stdout.trim().split("\n");
+ const stderr = resp.stderr.trim().split("\n");
+ return {
+ exitCode: resp.exitCode,
+ stdout,
+ stderr,
+ };
+};
+
+describe("devcontainers-cli", async () => {
+ await runTerraformInit(import.meta.dir);
+
+ testRequiredVariables(import.meta.dir, {
+ agent_id: "some-agent-id",
+ });
+
+ it("misses all package managers", async () => {
+ const state = await runTerraformApply(import.meta.dir, {
+ agent_id: "some-agent-id",
+ });
+ const output = await executeScriptInContainer(state, "docker:dind");
+ expect(output.exitCode).toBe(1);
+ expect(output.stderr).toEqual([
+ "ERROR: No supported package manager (npm, pnpm, yarn) is installed. Please install one first.",
+ ]);
+ }, 15000);
+
+ it("installs devcontainers-cli with npm", async () => {
+ const state = await runTerraformApply(import.meta.dir, {
+ agent_id: "some-agent-id",
+ });
+
+ const output = await executeScriptInContainerWithPackageManager(
+ state,
+ "docker:dind",
+ "npm",
+ );
+ expect(output.exitCode).toBe(0);
+
+ expect(output.stdout[0]).toEqual(
+ "Installing @devcontainers/cli using npm...",
+ );
+ expect(output.stdout[output.stdout.length - 1]).toEqual(
+ "🥳 @devcontainers/cli has been installed into /usr/local/bin/devcontainer!",
+ );
+ }, 15000);
+
+ it("installs devcontainers-cli with yarn", async () => {
+ const state = await runTerraformApply(import.meta.dir, {
+ agent_id: "some-agent-id",
+ });
+
+ const output = await executeScriptInContainerWithPackageManager(
+ state,
+ "docker:dind",
+ "yarn",
+ );
+ expect(output.exitCode).toBe(0);
+
+ expect(output.stdout[0]).toEqual(
+ "Installing @devcontainers/cli using yarn...",
+ );
+ expect(output.stdout[output.stdout.length - 1]).toEqual(
+ "🥳 @devcontainers/cli has been installed into /tmp/coder-script-data/bin/devcontainer!",
+ );
+ }, 15000);
+
+ it("displays warning if docker is not installed", async () => {
+ const state = await runTerraformApply(import.meta.dir, {
+ agent_id: "some-agent-id",
+ });
+
+ const output = await executeScriptInContainerWithPackageManager(
+ state,
+ "alpine",
+ "npm",
+ );
+ expect(output.exitCode).toBe(0);
+
+ expect(output.stdout[0]).toEqual(
+ "WARNING: Docker was not found but is required to use @devcontainers/cli, please make sure it is available.",
+ );
+ expect(output.stdout[output.stdout.length - 1]).toEqual(
+ "🥳 @devcontainers/cli has been installed into /usr/local/bin/devcontainer!",
+ );
+ }, 15000);
+});
diff --git a/registry/coder/modules/devcontainers-cli/main.tf b/registry/coder/modules/devcontainers-cli/main.tf
new file mode 100644
index 0000000..a2aee34
--- /dev/null
+++ b/registry/coder/modules/devcontainers-cli/main.tf
@@ -0,0 +1,23 @@
+terraform {
+ required_version = ">= 1.0"
+
+ required_providers {
+ coder = {
+ source = "coder/coder"
+ version = ">= 0.17"
+ }
+ }
+}
+
+variable "agent_id" {
+ type = string
+ description = "The ID of a Coder agent."
+}
+
+resource "coder_script" "devcontainers-cli" {
+ agent_id = var.agent_id
+ display_name = "devcontainers-cli"
+ icon = "/icon/devcontainers.svg"
+ script = templatefile("${path.module}/run.sh", {})
+ run_on_start = true
+}
diff --git a/registry/coder/modules/devcontainers-cli/run.sh b/registry/coder/modules/devcontainers-cli/run.sh
new file mode 100644
index 0000000..f7bf852
--- /dev/null
+++ b/registry/coder/modules/devcontainers-cli/run.sh
@@ -0,0 +1,56 @@
+#!/usr/bin/env sh
+
+# If @devcontainers/cli is already installed, we can skip
+if command -v devcontainer >/dev/null 2>&1; then
+ echo "🥳 @devcontainers/cli is already installed into $(which devcontainer)!"
+ exit 0
+fi
+
+# Check if docker is installed
+if ! command -v docker >/dev/null 2>&1; then
+ echo "WARNING: Docker was not found but is required to use @devcontainers/cli, please make sure it is available."
+fi
+
+# Determine the package manager to use: npm, pnpm, or yarn
+if command -v yarn >/dev/null 2>&1; then
+ PACKAGE_MANAGER="yarn"
+elif command -v npm >/dev/null 2>&1; then
+ PACKAGE_MANAGER="npm"
+elif command -v pnpm >/dev/null 2>&1; then
+ PACKAGE_MANAGER="pnpm"
+else
+ echo "ERROR: No supported package manager (npm, pnpm, yarn) is installed. Please install one first." 1>&2
+ exit 1
+fi
+
+install() {
+ echo "Installing @devcontainers/cli using $PACKAGE_MANAGER..."
+ if [ "$PACKAGE_MANAGER" = "npm" ]; then
+ npm install -g @devcontainers/cli
+ elif [ "$PACKAGE_MANAGER" = "pnpm" ]; then
+ # Check if PNPM_HOME is set, if not, set it to the script's bin directory
+ # pnpm needs this to be set to install binaries
+ # coder agent ensures this part is part of the PATH
+ # so that the devcontainer command is available
+ if [ -z "$PNPM_HOME" ]; then
+ PNPM_HOME="$CODER_SCRIPT_BIN_DIR"
+ export M_HOME
+ fi
+ pnpm add -g @devcontainers/cli
+ elif [ "$PACKAGE_MANAGER" = "yarn" ]; then
+ yarn global add @devcontainers/cli --prefix "$(dirname "$CODER_SCRIPT_BIN_DIR")"
+ fi
+}
+
+if ! install; then
+ echo "Failed to install @devcontainers/cli" >&2
+ exit 1
+fi
+
+if ! command -v devcontainer >/dev/null 2>&1; then
+ echo "Installation completed but 'devcontainer' command not found in PATH" >&2
+ exit 1
+fi
+
+echo "🥳 @devcontainers/cli has been installed into $(which devcontainer)!"
+exit 0
diff --git a/registry/coder/modules/filebrowser/main.test.ts b/registry/coder/modules/filebrowser/main.test.ts
new file mode 100644
index 0000000..136fa25
--- /dev/null
+++ b/registry/coder/modules/filebrowser/main.test.ts
@@ -0,0 +1,105 @@
+import { describe, expect, it } from "bun:test";
+import {
+ executeScriptInContainer,
+ runTerraformApply,
+ runTerraformInit,
+ type scriptOutput,
+ testRequiredVariables,
+} from "~test";
+
+function testBaseLine(output: scriptOutput) {
+ expect(output.exitCode).toBe(0);
+
+ const expectedLines = [
+ "\u001b[[0;1mInstalling filebrowser ",
+ "🥳 Installation complete! ",
+ "👷 Starting filebrowser in background... ",
+ "📂 Serving /root at http://localhost:13339 ",
+ "📝 Logs at /tmp/filebrowser.log",
+ ];
+
+ // we could use expect(output.stdout).toEqual(expect.arrayContaining(expectedLines)), but when it errors, it doesn't say which line is wrong
+ for (const line of expectedLines) {
+ expect(output.stdout).toContain(line);
+ }
+}
+
+describe("filebrowser", async () => {
+ await runTerraformInit(import.meta.dir);
+
+ testRequiredVariables(import.meta.dir, {
+ agent_id: "foo",
+ });
+
+ it("fails with wrong database_path", async () => {
+ const state = await runTerraformApply(import.meta.dir, {
+ agent_id: "foo",
+ database_path: "nofb",
+ }).catch((e) => {
+ if (!e.message.startsWith("\nError: Invalid value for variable")) {
+ throw e;
+ }
+ });
+ });
+
+ it("runs with default", async () => {
+ const state = await runTerraformApply(import.meta.dir, {
+ agent_id: "foo",
+ });
+
+ const output = await executeScriptInContainer(
+ state,
+ "alpine/curl",
+ "sh",
+ "apk add bash",
+ );
+
+ testBaseLine(output);
+ });
+
+ it("runs with database_path var", async () => {
+ const state = await runTerraformApply(import.meta.dir, {
+ agent_id: "foo",
+ database_path: ".config/filebrowser.db",
+ });
+
+ const output = await await executeScriptInContainer(
+ state,
+ "alpine/curl",
+ "sh",
+ "apk add bash",
+ );
+
+ testBaseLine(output);
+ });
+
+ it("runs with folder var", async () => {
+ const state = await runTerraformApply(import.meta.dir, {
+ agent_id: "foo",
+ folder: "/home/coder/project",
+ });
+ const output = await await executeScriptInContainer(
+ state,
+ "alpine/curl",
+ "sh",
+ "apk add bash",
+ );
+ });
+
+ it("runs with subdomain=false", async () => {
+ const state = await runTerraformApply(import.meta.dir, {
+ agent_id: "foo",
+ agent_name: "main",
+ subdomain: false,
+ });
+
+ const output = await await executeScriptInContainer(
+ state,
+ "alpine/curl",
+ "sh",
+ "apk add bash",
+ );
+
+ testBaseLine(output);
+ });
+});
diff --git a/registry/coder/modules/filebrowser/run.sh b/registry/coder/modules/filebrowser/run.sh
index 84810e4..ffb87f0 100644
--- a/registry/coder/modules/filebrowser/run.sh
+++ b/registry/coder/modules/filebrowser/run.sh
@@ -1,11 +1,13 @@
#!/usr/bin/env bash
+set -euo pipefail
+
BOLD='\033[[0;1m'
printf "$${BOLD}Installing filebrowser \n\n"
# Check if filebrowser is installed
-if ! command -v filebrowser &> /dev/null; then
+if ! command -v filebrowser &>/dev/null; then
curl -fsSL https://raw.githubusercontent.com/filebrowser/get/master/get.sh | bash
fi
@@ -32,6 +34,6 @@ printf "👷 Starting filebrowser in background... \n\n"
printf "📂 Serving $${ROOT_DIR} at http://localhost:${PORT} \n\n"
-filebrowser >> ${LOG_PATH} 2>&1 &
+filebrowser >>${LOG_PATH} 2>&1 &
printf "📝 Logs at ${LOG_PATH} \n\n"
diff --git a/registry/coder/modules/github-upload-public-key/README.md b/registry/coder/modules/github-upload-public-key/README.md
index 3659ade..779d419 100644
--- a/registry/coder/modules/github-upload-public-key/README.md
+++ b/registry/coder/modules/github-upload-public-key/README.md
@@ -20,13 +20,13 @@ module "github-upload-public-key" {
}
```
-# Requirements
+## Requirements
This module requires `curl` and `jq` to be installed inside your workspace.
Github External Auth must be enabled in the workspace for this module to work. The Github app that is configured for external auth must have both read and write permissions to "Git SSH keys" in order to upload the public key. Additionally, a Coder admin must also have the `admin:public_key` scope added to the external auth configuration of the Coder deployment. For example:
-```
+```txt
CODER_EXTERNAL_AUTH_0_ID="USER_DEFINED_ID"
CODER_EXTERNAL_AUTH_0_TYPE=github
CODER_EXTERNAL_AUTH_0_CLIENT_ID=xxxxxx
@@ -36,7 +36,7 @@ CODER_EXTERNAL_AUTH_0_SCOPES="repo,workflow,admin:public_key"
Note that the default scopes if not provided are `repo,workflow`. If the module is failing to complete after updating the external auth configuration, instruct users of the module to "Unlink" and "Link" their Github account in the External Auth user settings page to get the new scopes.
-# Example
+## Example
Using a coder github external auth with a non-default id: (default is `github`)
diff --git a/registry/coder/modules/goose/README.md b/registry/coder/modules/goose/README.md
index 5c1dcb8..55ce4eb 100644
--- a/registry/coder/modules/goose/README.md
+++ b/registry/coder/modules/goose/README.md
@@ -14,7 +14,7 @@ Run the [Goose](https://block.github.io/goose/) agent in your workspace to gener
```tf
module "goose" {
source = "registry.coder.com/modules/goose/coder"
- version = "1.0.31"
+ version = "1.1.0"
agent_id = coder_agent.example.id
folder = "/home/coder"
install_goose = true
@@ -22,7 +22,7 @@ module "goose" {
}
```
-### Prerequisites
+## Prerequisites
- `screen` must be installed in your workspace to run Goose in the background
- You must add the [Coder Login](https://registry.coder.com/modules/coder-login) module to your template
@@ -72,11 +72,11 @@ resource "coder_agent" "main" {
env = {
GOOSE_SYSTEM_PROMPT = <<-EOT
You are a helpful assistant that can help write code.
-
+
Run all long running tasks (e.g. npm run dev) in the background and not in the foreground.
-
+
Periodically check in on background tasks.
-
+
Notify Coder of the status of the task before and after your steps.
EOT
GOOSE_TASK_PROMPT = data.coder_parameter.ai_prompt.value
@@ -90,7 +90,7 @@ resource "coder_agent" "main" {
module "goose" {
count = data.coder_workspace.me.start_count
source = "registry.coder.com/modules/goose/coder"
- version = "1.0.31"
+ version = "1.1.0"
agent_id = coder_agent.example.id
folder = "/home/coder"
install_goose = true
@@ -111,6 +111,36 @@ module "goose" {
}
```
+### Adding Custom Extensions (MCP)
+
+You can extend Goose's capabilities by adding custom extensions. For example, to add the desktop-commander extension:
+
+```tf
+module "goose" {
+ # ... other configuration ...
+
+ experiment_pre_install_script = <<-EOT
+ npm i -g @wonderwhy-er/desktop-commander@latest
+ EOT
+
+ experiment_additional_extensions = <<-EOT
+ desktop-commander:
+ args: []
+ cmd: desktop-commander
+ description: Ideal for background tasks
+ enabled: true
+ envs: {}
+ name: desktop-commander
+ timeout: 300
+ type: stdio
+ EOT
+}
+```
+
+This will add the desktop-commander extension to Goose, allowing it to run commands in the background. The extension will be available in the Goose interface and can be used to run long-running processes like development servers.
+
+Note: The indentation in the heredoc is preserved, so you can write the YAML naturally.
+
## Run standalone
Run Goose as a standalone app in your workspace. This will install Goose and run it directly without using screen or any task reporting to the Coder UI.
@@ -118,7 +148,7 @@ Run Goose as a standalone app in your workspace. This will install Goose and run
```tf
module "goose" {
source = "registry.coder.com/modules/goose/coder"
- version = "1.0.31"
+ version = "1.1.0"
agent_id = coder_agent.example.id
folder = "/home/coder"
install_goose = true
diff --git a/registry/coder/modules/goose/main.tf b/registry/coder/modules/goose/main.tf
index fcb6baa..0043000 100644
--- a/registry/coder/modules/goose/main.tf
+++ b/registry/coder/modules/goose/main.tf
@@ -78,6 +78,60 @@ variable "experiment_goose_model" {
default = null
}
+variable "experiment_pre_install_script" {
+ type = string
+ description = "Custom script to run before installing Goose."
+ default = null
+}
+
+variable "experiment_post_install_script" {
+ type = string
+ description = "Custom script to run after installing Goose."
+ default = null
+}
+
+variable "experiment_additional_extensions" {
+ type = string
+ description = "Additional extensions configuration in YAML format to append to the config."
+ default = null
+}
+
+locals {
+ base_extensions = <<-EOT
+coder:
+ args:
+ - exp
+ - mcp
+ - server
+ cmd: coder
+ description: Report ALL tasks and statuses (in progress, done, failed) you are working on.
+ enabled: true
+ envs:
+ CODER_MCP_APP_STATUS_SLUG: goose
+ name: Coder
+ timeout: 3000
+ type: stdio
+developer:
+ display_name: Developer
+ enabled: true
+ name: developer
+ timeout: 300
+ type: builtin
+EOT
+
+ # Add two spaces to each line of extensions to match YAML structure
+ formatted_base = " ${replace(trimspace(local.base_extensions), "\n", "\n ")}"
+ additional_extensions = var.experiment_additional_extensions != null ? "\n ${replace(trimspace(var.experiment_additional_extensions), "\n", "\n ")}" : ""
+
+ combined_extensions = <<-EOT
+extensions:
+${local.formatted_base}${local.additional_extensions}
+EOT
+
+ encoded_pre_install_script = var.experiment_pre_install_script != null ? base64encode(var.experiment_pre_install_script) : ""
+ encoded_post_install_script = var.experiment_post_install_script != null ? base64encode(var.experiment_post_install_script) : ""
+}
+
# Install and Initialize Goose
resource "coder_script" "goose" {
agent_id = var.agent_id
@@ -92,6 +146,14 @@ resource "coder_script" "goose" {
command -v "$1" >/dev/null 2>&1
}
+ # Run pre-install script if provided
+ if [ -n "${local.encoded_pre_install_script}" ]; then
+ echo "Running pre-install script..."
+ echo "${local.encoded_pre_install_script}" | base64 -d > /tmp/pre_install.sh
+ chmod +x /tmp/pre_install.sh
+ /tmp/pre_install.sh
+ fi
+
# Install Goose if enabled
if [ "${var.install_goose}" = "true" ]; then
if ! command_exists npm; then
@@ -102,6 +164,14 @@ resource "coder_script" "goose" {
RELEASE_TAG=v${var.goose_version} curl -fsSL https://github.com/block/goose/releases/download/stable/download_cli.sh | CONFIGURE=false bash
fi
+ # Run post-install script if provided
+ if [ -n "${local.encoded_post_install_script}" ]; then
+ echo "Running post-install script..."
+ echo "${local.encoded_post_install_script}" | base64 -d > /tmp/post_install.sh
+ chmod +x /tmp/post_install.sh
+ /tmp/post_install.sh
+ fi
+
# Configure Goose if auto-configure is enabled
if [ "${var.experiment_auto_configure}" = "true" ]; then
echo "Configuring Goose..."
@@ -109,29 +179,14 @@ resource "coder_script" "goose" {
cat > "$HOME/.config/goose/config.yaml" << EOL
GOOSE_PROVIDER: ${var.experiment_goose_provider}
GOOSE_MODEL: ${var.experiment_goose_model}
-extensions:
- coder:
- args:
- - exp
- - mcp
- - server
- cmd: coder
- description: Report ALL tasks and statuses (in progress, done, failed) before and after starting
- enabled: true
- envs:
- CODER_MCP_APP_STATUS_SLUG: goose
- name: Coder
- timeout: 3000
- type: stdio
- developer:
- display_name: Developer
- enabled: true
- name: developer
- timeout: 300
- type: builtin
+${trimspace(local.combined_extensions)}
EOL
fi
+ # Write system prompt to config
+ mkdir -p "$HOME/.config/goose"
+ echo "$GOOSE_SYSTEM_PROMPT" > "$HOME/.config/goose/.goosehints"
+
# Run with screen if enabled
if [ "${var.experiment_use_screen}" = "true" ]; then
echo "Running Goose in the background..."
@@ -162,14 +217,28 @@ EOL
export LANG=en_US.UTF-8
export LC_ALL=en_US.UTF-8
- screen -U -dmS goose bash -c '
+ # Determine goose command
+ if command_exists goose; then
+ GOOSE_CMD=goose
+ elif [ -f "$HOME/.local/bin/goose" ]; then
+ GOOSE_CMD="$HOME/.local/bin/goose"
+ else
+ echo "Error: Goose is not installed. Please enable install_goose or install it manually."
+ exit 1
+ fi
+
+ screen -U -dmS goose bash -c "
cd ${var.folder}
- $HOME/.local/bin/goose run --text "$GOOSE_SYSTEM_PROMPT. Your task: $GOOSE_TASK_PROMPT" --interactive | tee -a "$HOME/.goose.log"
- exec bash
- '
+ \"$GOOSE_CMD\" run --text \"Review your goosehints. Every step of the way, report tasks to Coder with proper descriptions and statuses. Your task at hand: $GOOSE_TASK_PROMPT\" --interactive | tee -a \"$HOME/.goose.log\"
+ /bin/bash
+ "
else
# Check if goose is installed before running
- if ! command_exists $HOME/.local/bin/goose; then
+ if command_exists goose; then
+ GOOSE_CMD=goose
+ elif [ -f "$HOME/.local/bin/goose" ]; then
+ GOOSE_CMD="$HOME/.local/bin/goose"
+ else
echo "Error: Goose is not installed. Please enable install_goose or install it manually."
exit 1
fi
@@ -186,21 +255,34 @@ resource "coder_app" "goose" {
#!/bin/bash
set -e
+ # Function to check if a command exists
+ command_exists() {
+ command -v "$1" >/dev/null 2>&1
+ }
+
+ # Determine goose command
+ if command_exists goose; then
+ GOOSE_CMD=goose
+ elif [ -f "$HOME/.local/bin/goose" ]; then
+ GOOSE_CMD="$HOME/.local/bin/goose"
+ else
+ echo "Error: Goose is not installed. Please enable install_goose or install it manually."
+ exit 1
+ fi
+
if [ "${var.experiment_use_screen}" = "true" ]; then
- if screen -list | grep -q "goose"; then
- export LANG=en_US.UTF-8
- export LC_ALL=en_US.UTF-8
- echo "Attaching to existing Goose session." | tee -a "$HOME/.goose.log"
- screen -xRR goose
- else
- echo "Starting a new Goose session." | tee -a "$HOME/.goose.log"
- screen -S goose bash -c 'export LANG=en_US.UTF-8; export LC_ALL=en_US.UTF-8; $HOME/.local/bin/goose run --text "Always report status and instructions to Coder, before and after your steps" --interactive | tee -a "$HOME/.goose.log"; exec bash'
+ # Check if session exists first
+ if ! screen -list | grep -q "goose"; then
+ echo "Error: No existing Goose session found. Please wait for the script to start it."
+ exit 1
fi
+ # Only attach to existing session
+ screen -xRR goose
else
cd ${var.folder}
export LANG=en_US.UTF-8
export LC_ALL=en_US.UTF-8
- $HOME/.local/bin/goose
+ "$GOOSE_CMD" run --text "Review goosehints. Your task: $GOOSE_TASK_PROMPT" --interactive
fi
EOT
icon = var.icon
diff --git a/registry/coder/modules/jetbrains-gateway/README.md b/registry/coder/modules/jetbrains-gateway/README.md
index dbf4dba..e38aae2 100644
--- a/registry/coder/modules/jetbrains-gateway/README.md
+++ b/registry/coder/modules/jetbrains-gateway/README.md
@@ -18,7 +18,7 @@ Consult the [JetBrains documentation](https://www.jetbrains.com/help/idea/prereq
module "jetbrains_gateway" {
count = data.coder_workspace.me.start_count
source = "registry.coder.com/modules/jetbrains-gateway/coder"
- version = "1.0.28"
+ version = "1.1.0"
agent_id = coder_agent.example.id
folder = "/home/coder/example"
jetbrains_ides = ["CL", "GO", "IU", "PY", "WS"]
@@ -26,7 +26,7 @@ module "jetbrains_gateway" {
}
```
-
+
## Examples
@@ -36,7 +36,7 @@ module "jetbrains_gateway" {
module "jetbrains_gateway" {
count = data.coder_workspace.me.start_count
source = "registry.coder.com/modules/jetbrains-gateway/coder"
- version = "1.0.28"
+ version = "1.1.0"
agent_id = coder_agent.example.id
folder = "/home/coder/example"
jetbrains_ides = ["GO", "WS"]
@@ -50,7 +50,7 @@ module "jetbrains_gateway" {
module "jetbrains_gateway" {
count = data.coder_workspace.me.start_count
source = "registry.coder.com/modules/jetbrains-gateway/coder"
- version = "1.0.28"
+ version = "1.1.0"
agent_id = coder_agent.example.id
folder = "/home/coder/example"
jetbrains_ides = ["IU", "PY"]
@@ -65,7 +65,7 @@ module "jetbrains_gateway" {
module "jetbrains_gateway" {
count = data.coder_workspace.me.start_count
source = "registry.coder.com/modules/jetbrains-gateway/coder"
- version = "1.0.28"
+ version = "1.1.0"
agent_id = coder_agent.example.id
folder = "/home/coder/example"
jetbrains_ides = ["IU", "PY"]
@@ -90,7 +90,7 @@ module "jetbrains_gateway" {
module "jetbrains_gateway" {
count = data.coder_workspace.me.start_count
source = "registry.coder.com/modules/jetbrains-gateway/coder"
- version = "1.0.28"
+ version = "1.1.0"
agent_id = coder_agent.example.id
folder = "/home/coder/example"
jetbrains_ides = ["GO", "WS"]
@@ -108,7 +108,7 @@ Due to the highest priority of the `ide_download_link` parameter in the `(jetbra
module "jetbrains_gateway" {
count = data.coder_workspace.me.start_count
source = "registry.coder.com/modules/jetbrains-gateway/coder"
- version = "1.0.28"
+ version = "1.1.0"
agent_id = coder_agent.example.id
folder = "/home/coder/example"
jetbrains_ides = ["GO", "WS"]
diff --git a/registry/coder/modules/jetbrains-gateway/main.tf b/registry/coder/modules/jetbrains-gateway/main.tf
index d197399..502469f 100644
--- a/registry/coder/modules/jetbrains-gateway/main.tf
+++ b/registry/coder/modules/jetbrains-gateway/main.tf
@@ -13,6 +13,16 @@ terraform {
}
}
+variable "arch" {
+ type = string
+ description = "The target architecture of the workspace"
+ default = "amd64"
+ validation {
+ condition = contains(["amd64", "arm64"], var.arch)
+ error_message = "Architecture must be either 'amd64' or 'arm64'."
+ }
+}
+
variable "agent_id" {
type = string
description = "The ID of a Coder agent."
@@ -178,78 +188,100 @@ data "http" "jetbrains_ide_versions" {
}
locals {
+ # AMD64 versions of the images just use the version string, while ARM64
+ # versions append "-aarch64". Eg:
+ #
+ # https://download.jetbrains.com/idea/ideaIU-2025.1.tar.gz
+ # https://download.jetbrains.com/idea/ideaIU-2025.1.tar.gz
+ #
+ # We rewrite the data map above dynamically based on the user's architecture parameter.
+ #
+ effective_jetbrains_ide_versions = {
+ for k, v in var.jetbrains_ide_versions : k => {
+ build_number = v.build_number
+ version = var.arch == "arm64" ? "${v.version}-aarch64" : v.version
+ }
+ }
+
+ # When downloading the latest IDE, the download link in the JSON is either:
+ #
+ # linux.download_link
+ # linuxARM64.download_link
+ #
+ download_key = var.arch == "arm64" ? "linuxARM64" : "linux"
+
jetbrains_ides = {
"GO" = {
icon = "/icon/goland.svg",
name = "GoLand",
identifier = "GO",
- build_number = var.jetbrains_ide_versions["GO"].build_number,
- download_link = "${var.download_base_link}/go/goland-${var.jetbrains_ide_versions["GO"].version}.tar.gz"
- version = var.jetbrains_ide_versions["GO"].version
+ build_number = local.effective_jetbrains_ide_versions["GO"].build_number,
+ download_link = "${var.download_base_link}/go/goland-${local.effective_jetbrains_ide_versions["GO"].version}.tar.gz"
+ version = local.effective_jetbrains_ide_versions["GO"].version
},
"WS" = {
icon = "/icon/webstorm.svg",
name = "WebStorm",
identifier = "WS",
- build_number = var.jetbrains_ide_versions["WS"].build_number,
- download_link = "${var.download_base_link}/webstorm/WebStorm-${var.jetbrains_ide_versions["WS"].version}.tar.gz"
- version = var.jetbrains_ide_versions["WS"].version
+ build_number = local.effective_jetbrains_ide_versions["WS"].build_number,
+ download_link = "${var.download_base_link}/webstorm/WebStorm-${local.effective_jetbrains_ide_versions["WS"].version}.tar.gz"
+ version = local.effective_jetbrains_ide_versions["WS"].version
},
"IU" = {
icon = "/icon/intellij.svg",
name = "IntelliJ IDEA Ultimate",
identifier = "IU",
- build_number = var.jetbrains_ide_versions["IU"].build_number,
- download_link = "${var.download_base_link}/idea/ideaIU-${var.jetbrains_ide_versions["IU"].version}.tar.gz"
- version = var.jetbrains_ide_versions["IU"].version
+ build_number = local.effective_jetbrains_ide_versions["IU"].build_number,
+ download_link = "${var.download_base_link}/idea/ideaIU-${local.effective_jetbrains_ide_versions["IU"].version}.tar.gz"
+ version = local.effective_jetbrains_ide_versions["IU"].version
},
"PY" = {
icon = "/icon/pycharm.svg",
name = "PyCharm Professional",
identifier = "PY",
- build_number = var.jetbrains_ide_versions["PY"].build_number,
- download_link = "${var.download_base_link}/python/pycharm-professional-${var.jetbrains_ide_versions["PY"].version}.tar.gz"
- version = var.jetbrains_ide_versions["PY"].version
+ build_number = local.effective_jetbrains_ide_versions["PY"].build_number,
+ download_link = "${var.download_base_link}/python/pycharm-professional-${local.effective_jetbrains_ide_versions["PY"].version}.tar.gz"
+ version = local.effective_jetbrains_ide_versions["PY"].version
},
"CL" = {
icon = "/icon/clion.svg",
name = "CLion",
identifier = "CL",
- build_number = var.jetbrains_ide_versions["CL"].build_number,
- download_link = "${var.download_base_link}/cpp/CLion-${var.jetbrains_ide_versions["CL"].version}.tar.gz"
- version = var.jetbrains_ide_versions["CL"].version
+ build_number = local.effective_jetbrains_ide_versions["CL"].build_number,
+ download_link = "${var.download_base_link}/cpp/CLion-${local.effective_jetbrains_ide_versions["CL"].version}.tar.gz"
+ version = local.effective_jetbrains_ide_versions["CL"].version
},
"PS" = {
icon = "/icon/phpstorm.svg",
name = "PhpStorm",
identifier = "PS",
- build_number = var.jetbrains_ide_versions["PS"].build_number,
- download_link = "${var.download_base_link}/webide/PhpStorm-${var.jetbrains_ide_versions["PS"].version}.tar.gz"
- version = var.jetbrains_ide_versions["PS"].version
+ build_number = local.effective_jetbrains_ide_versions["PS"].build_number,
+ download_link = "${var.download_base_link}/webide/PhpStorm-${local.effective_jetbrains_ide_versions["PS"].version}.tar.gz"
+ version = local.effective_jetbrains_ide_versions["PS"].version
},
"RM" = {
icon = "/icon/rubymine.svg",
name = "RubyMine",
identifier = "RM",
- build_number = var.jetbrains_ide_versions["RM"].build_number,
- download_link = "${var.download_base_link}/ruby/RubyMine-${var.jetbrains_ide_versions["RM"].version}.tar.gz"
- version = var.jetbrains_ide_versions["RM"].version
+ build_number = local.effective_jetbrains_ide_versions["RM"].build_number,
+ download_link = "${var.download_base_link}/ruby/RubyMine-${local.effective_jetbrains_ide_versions["RM"].version}.tar.gz"
+ version = local.effective_jetbrains_ide_versions["RM"].version
},
"RD" = {
icon = "/icon/rider.svg",
name = "Rider",
identifier = "RD",
- build_number = var.jetbrains_ide_versions["RD"].build_number,
- download_link = "${var.download_base_link}/rider/JetBrains.Rider-${var.jetbrains_ide_versions["RD"].version}.tar.gz"
- version = var.jetbrains_ide_versions["RD"].version
+ build_number = local.effective_jetbrains_ide_versions["RD"].build_number,
+ download_link = "${var.download_base_link}/rider/JetBrains.Rider-${local.effective_jetbrains_ide_versions["RD"].version}.tar.gz"
+ version = local.effective_jetbrains_ide_versions["RD"].version
},
"RR" = {
icon = "/icon/rustrover.svg",
name = "RustRover",
identifier = "RR",
- build_number = var.jetbrains_ide_versions["RR"].build_number,
- download_link = "${var.download_base_link}/rustrover/RustRover-${var.jetbrains_ide_versions["RR"].version}.tar.gz"
- version = var.jetbrains_ide_versions["RR"].version
+ build_number = local.effective_jetbrains_ide_versions["RR"].build_number,
+ download_link = "${var.download_base_link}/rustrover/RustRover-${local.effective_jetbrains_ide_versions["RR"].version}.tar.gz"
+ version = local.effective_jetbrains_ide_versions["RR"].version
}
}
@@ -258,7 +290,7 @@ locals {
key = var.latest ? keys(local.json_data)[0] : ""
display_name = local.jetbrains_ides[data.coder_parameter.jetbrains_ide.value].name
identifier = data.coder_parameter.jetbrains_ide.value
- download_link = var.latest ? local.json_data[local.key][0].downloads.linux.link : local.jetbrains_ides[data.coder_parameter.jetbrains_ide.value].download_link
+ download_link = var.latest ? local.json_data[local.key][0].downloads[local.download_key].link : local.jetbrains_ides[data.coder_parameter.jetbrains_ide.value].download_link
build_number = var.latest ? local.json_data[local.key][0].build : local.jetbrains_ides[data.coder_parameter.jetbrains_ide.value].build_number
version = var.latest ? local.json_data[local.key][0].version : var.jetbrains_ide_versions[data.coder_parameter.jetbrains_ide.value].version
}
diff --git a/registry/coder/modules/jupyterlab/README.md b/registry/coder/modules/jupyterlab/README.md
index 64d9c1c..c0c4011 100644
--- a/registry/coder/modules/jupyterlab/README.md
+++ b/registry/coder/modules/jupyterlab/README.md
@@ -17,7 +17,7 @@ A module that adds JupyterLab in your Coder template.
module "jupyterlab" {
count = data.coder_workspace.me.start_count
source = "registry.coder.com/modules/jupyterlab/coder"
- version = "1.0.30"
+ version = "1.0.31"
agent_id = coder_agent.example.id
}
```
diff --git a/registry/coder/modules/jupyterlab/run.sh b/registry/coder/modules/jupyterlab/run.sh
index 2dd34ac..e9a45b5 100644
--- a/registry/coder/modules/jupyterlab/run.sh
+++ b/registry/coder/modules/jupyterlab/run.sh
@@ -3,13 +3,13 @@ INSTALLER=""
check_available_installer() {
# check if pipx is installed
echo "Checking for a supported installer"
- if command -v pipx > /dev/null 2>&1; then
+ if command -v pipx >/dev/null 2>&1; then
echo "pipx is installed"
INSTALLER="pipx"
return
fi
# check if uv is installed
- if command -v uv > /dev/null 2>&1; then
+ if command -v uv >/dev/null 2>&1; then
echo "uv is installed"
INSTALLER="uv"
return
@@ -26,32 +26,33 @@ fi
BOLD='\033[0;1m'
# check if jupyterlab is installed
-if ! command -v jupyter-lab > /dev/null 2>&1; then
+if ! command -v jupyter-lab >/dev/null 2>&1; then
# install jupyterlab
check_available_installer
printf "$${BOLD}Installing jupyterlab!\n"
case $INSTALLER in
- uv)
- uv pip install -q jupyterlab \
- && printf "%s\n" "🥳 jupyterlab has been installed"
- JUPYTERPATH="$HOME/.venv/bin/"
- ;;
- pipx)
- pipx install jupyterlab \
- && printf "%s\n" "🥳 jupyterlab has been installed"
- JUPYTERPATH="$HOME/.local/bin"
- ;;
+ uv)
+ uv pip install -q jupyterlab &&
+ printf "%s\n" "🥳 jupyterlab has been installed"
+ JUPYTER="$HOME/.venv/bin/jupyter-lab"
+ ;;
+ pipx)
+ pipx install jupyterlab &&
+ printf "%s\n" "🥳 jupyterlab has been installed"
+ JUPYTER="$HOME/.local/bin/jupyter-lab"
+ ;;
esac
else
printf "%s\n\n" "🥳 jupyterlab is already installed"
+ JUPYTER=$(command -v jupyter-lab)
fi
printf "👷 Starting jupyterlab in background..."
printf "check logs at ${LOG_PATH}"
-$JUPYTERPATH/jupyter-lab --no-browser \
+$JUPYTER --no-browser \
"$BASE_URL_FLAG" \
--ServerApp.ip='*' \
--ServerApp.port="${PORT}" \
--ServerApp.token='' \
--ServerApp.password='' \
- > "${LOG_PATH}" 2>&1 &
+ >"${LOG_PATH}" 2>&1 &
diff --git a/registry/coder/modules/slackme/README.md b/registry/coder/modules/slackme/README.md
index d28862c..bc2bf2a 100644
--- a/registry/coder/modules/slackme/README.md
+++ b/registry/coder/modules/slackme/README.md
@@ -11,6 +11,16 @@ tags: [helper]
Add the `slackme` command to your workspace that DMs you on Slack when your command finishes running.
+```tf
+module "slackme" {
+ count = data.coder_workspace.me.start_count
+ source = "registry.coder.com/modules/slackme/coder"
+ version = "1.0.2"
+ agent_id = coder_agent.example.id
+ auth_provider_id = "slack"
+}
+```
+
```bash
slackme npm run long-build
```
@@ -54,16 +64,6 @@ slackme npm run long-build
3. Restart your Coder deployment. Any Template can now import the Slack Me module, and `slackme` will be available on the `$PATH`:
- ```tf
- module "slackme" {
- count = data.coder_workspace.me.start_count
- source = "registry.coder.com/modules/slackme/coder"
- version = "1.0.2"
- agent_id = coder_agent.example.id
- auth_provider_id = "slack"
- }
- ```
-
## Examples
### Custom Slack Message
diff --git a/registry/coder/modules/vault-jwt/README.md b/registry/coder/modules/vault-jwt/README.md
index 9837f90..409a835 100644
--- a/registry/coder/modules/vault-jwt/README.md
+++ b/registry/coder/modules/vault-jwt/README.md
@@ -10,16 +10,17 @@ tags: [helper, integration, vault, jwt, oidc]
# Hashicorp Vault Integration (JWT)
-This module lets you authenticate with [Hashicorp Vault](https://www.vaultproject.io/) in your Coder workspaces by reusing the [OIDC](https://coder.com/docs/admin/users/oidc-auth) access token from Coder's OIDC authentication method. This requires configuring the Vault [JWT/OIDC](https://developer.hashicorp.com/vault/docs/auth/jwt#configuration) auth method.
+This module lets you authenticate with [Hashicorp Vault](https://www.vaultproject.io/) in your Coder workspaces by reusing the [OIDC](https://coder.com/docs/admin/users/oidc-auth) access token from Coder's OIDC authentication method or another source of jwt token. This requires configuring the Vault [JWT/OIDC](https://developer.hashicorp.com/vault/docs/auth/jwt#configuration) auth method.
```tf
module "vault" {
- count = data.coder_workspace.me.start_count
- source = "registry.coder.com/modules/vault-jwt/coder"
- version = "1.0.20"
- agent_id = coder_agent.example.id
- vault_addr = "https://vault.example.com"
- vault_jwt_role = "coder" # The Vault role to use for authentication
+ count = data.coder_workspace.me.start_count
+ source = "registry.coder.com/modules/vault-jwt/coder"
+ version = "1.1.0"
+ agent_id = coder_agent.example.id
+ vault_addr = "https://vault.example.com"
+ vault_jwt_role = "coder" # The Vault role to use for authentication
+ vault_jwt_token = "eyJhbGciOiJIUzI1N..." # optional, if not present, defaults to user's oidc authentication token
}
```
@@ -43,7 +44,7 @@ curl -H "X-Vault-Token: ${VAULT_TOKEN}" -X GET "${VAULT_ADDR}/v1/coder/secrets/d
module "vault" {
count = data.coder_workspace.me.start_count
source = "registry.coder.com/modules/vault-jwt/coder"
- version = "1.0.20"
+ version = "1.0.31"
agent_id = coder_agent.example.id
vault_addr = "https://vault.example.com"
vault_jwt_auth_path = "oidc"
@@ -59,7 +60,7 @@ data "coder_workspace_owner" "me" {}
module "vault" {
count = data.coder_workspace.me.start_count
source = "registry.coder.com/modules/vault-jwt/coder"
- version = "1.0.20"
+ version = "1.0.31"
agent_id = coder_agent.example.id
vault_addr = "https://vault.example.com"
vault_jwt_role = data.coder_workspace_owner.me.groups[0]
@@ -72,10 +73,113 @@ module "vault" {
module "vault" {
count = data.coder_workspace.me.start_count
source = "registry.coder.com/modules/vault-jwt/coder"
- version = "1.0.20"
+ version = "1.0.31"
agent_id = coder_agent.example.id
vault_addr = "https://vault.example.com"
vault_jwt_role = "coder" # The Vault role to use for authentication
vault_cli_version = "1.17.5"
}
```
+
+### Use a custom JWT token
+
+```tf
+
+terraform {
+ required_providers {
+ jwt = {
+ source = "geektheripper/jwt"
+ version = "1.1.4"
+ }
+ time = {
+ source = "hashicorp/time"
+ version = "0.11.1"
+ }
+ }
+}
+
+
+resource "jwt_signed_token" "vault" {
+ count = data.coder_workspace.me.start_count
+ algorithm = "RS256"
+ # `openssl genrsa -out key.pem 4096` and `openssl rsa -in key.pem -pubout > pub.pem` to generate keys
+ key = file("key.pem")
+ claims_json = jsonencode({
+ iss = "https://code.example.com"
+ sub = "${data.coder_workspace.me.id}"
+ aud = "https://vault.example.com"
+ iat = provider::time::rfc3339_parse(plantimestamp()).unix
+ # Uncomment to set an expiry on the JWT token(default 3600 seconds).
+ # workspace will need to be restarted to generate a new token if it expires
+ #exp = provider::time::rfc3339_parse(timeadd(timestamp(), 3600)).unix agent = coder_agent.main.id
+ provisioner = data.coder_provisioner.main.id
+ provisioner_arch = data.coder_provisioner.main.arch
+ provisioner_os = data.coder_provisioner.main.os
+
+ workspace = data.coder_workspace.me.id
+ workspace_url = data.coder_workspace.me.access_url
+ workspace_port = data.coder_workspace.me.access_port
+ workspace_name = data.coder_workspace.me.name
+ template = data.coder_workspace.me.template_id
+ template_name = data.coder_workspace.me.template_name
+ template_version = data.coder_workspace.me.template_version
+ owner = data.coder_workspace_owner.me.id
+ owner_name = data.coder_workspace_owner.me.name
+ owner_email = data.coder_workspace_owner.me.email
+ owner_login_type = data.coder_workspace_owner.me.login_type
+ owner_groups = data.coder_workspace_owner.me.groups
+ })
+}
+
+module "vault" {
+ count = data.coder_workspace.me.start_count
+ source = "registry.coder.com/modules/vault-jwt/coder"
+ version = "1.1.0"
+ agent_id = coder_agent.example.id
+ vault_addr = "https://vault.example.com"
+ vault_jwt_role = "coder" # The Vault role to use for authentication
+ vault_jwt_token = jwt_signed_token.vault[0].token
+}
+```
+
+#### Example Vault JWT role
+
+```shell
+vault write auth/JWT_MOUNT/role/workspace - << EOF
+{
+ "user_claim": "sub",
+ "bound_audiences": "https://vault.example.com",
+ "role_type": "jwt",
+ "ttl": "1h",
+ "claim_mappings": {
+ "owner": "owner",
+ "owner_email": "owner_email",
+ "owner_login_type": "owner_login_type",
+ "owner_name": "owner_name",
+ "provisioner": "provisioner",
+ "provisioner_arch": "provisioner_arch",
+ "provisioner_os": "provisioner_os",
+ "sub": "sub",
+ "template": "template",
+ "template_name": "template_name",
+ "template_version": "template_version",
+ "workspace": "workspace",
+ "workspace_name": "workspace_name",
+ "workspace_id": "workspace_id"
+ }
+}
+EOF
+```
+
+#### Example workspace access Vault policy
+
+```tf
+path "kv/data/app/coder/{{identity.entity.aliases..metadata.owner_name}}/{{identity.entity.aliases..metadata.workspace_name}}" {
+ capabilities = ["create", "read", "update", "delete", "list", "subscribe"]
+ subscribe_event_types = ["*"]
+}
+path "kv/metadata/app/coder/{{identity.entity.aliases..metadata.owner_name}}/{{identity.entity.aliases..metadata.workspace_name}}" {
+ capabilities = ["create", "read", "update", "delete", "list", "subscribe"]
+ subscribe_event_types = ["*"]
+}
+```
diff --git a/registry/coder/modules/vault-jwt/main.tf b/registry/coder/modules/vault-jwt/main.tf
index adcc34d..17288e0 100644
--- a/registry/coder/modules/vault-jwt/main.tf
+++ b/registry/coder/modules/vault-jwt/main.tf
@@ -20,6 +20,13 @@ variable "vault_addr" {
description = "The address of the Vault server."
}
+variable "vault_jwt_token" {
+ type = string
+ description = "The JWT token used for authentication with Vault."
+ default = null
+ sensitive = true
+}
+
variable "vault_jwt_auth_path" {
type = string
description = "The path to the Vault JWT auth method."
@@ -46,7 +53,7 @@ resource "coder_script" "vault" {
display_name = "Vault (GitHub)"
icon = "/icon/vault.svg"
script = templatefile("${path.module}/run.sh", {
- CODER_OIDC_ACCESS_TOKEN : data.coder_workspace_owner.me.oidc_access_token,
+ CODER_OIDC_ACCESS_TOKEN : var.vault_jwt_token != null ? var.vault_jwt_token : data.coder_workspace_owner.me.oidc_access_token,
VAULT_JWT_AUTH_PATH : var.vault_jwt_auth_path,
VAULT_JWT_ROLE : var.vault_jwt_role,
VAULT_CLI_VERSION : var.vault_cli_version,
diff --git a/registry/coder/modules/vault-jwt/run.sh b/registry/coder/modules/vault-jwt/run.sh
index ef45884..6d47854 100644
--- a/registry/coder/modules/vault-jwt/run.sh
+++ b/registry/coder/modules/vault-jwt/run.sh
@@ -9,11 +9,11 @@ CODER_OIDC_ACCESS_TOKEN=${CODER_OIDC_ACCESS_TOKEN}
fetch() {
dest="$1"
url="$2"
- if command -v curl > /dev/null 2>&1; then
+ if command -v curl >/dev/null 2>&1; then
curl -sSL --fail "$${url}" -o "$${dest}"
- elif command -v wget > /dev/null 2>&1; then
+ elif command -v wget >/dev/null 2>&1; then
wget -O "$${dest}" "$${url}"
- elif command -v busybox > /dev/null 2>&1; then
+ elif command -v busybox >/dev/null 2>&1; then
busybox wget -O "$${dest}" "$${url}"
else
printf "curl, wget, or busybox is not installed. Please install curl or wget in your image.\n"
@@ -22,9 +22,9 @@ fetch() {
}
unzip_safe() {
- if command -v unzip > /dev/null 2>&1; then
+ if command -v unzip >/dev/null 2>&1; then
command unzip "$@"
- elif command -v busybox > /dev/null 2>&1; then
+ elif command -v busybox >/dev/null 2>&1; then
busybox unzip "$@"
else
printf "unzip or busybox is not installed. Please install unzip in your image.\n"
@@ -56,7 +56,7 @@ install() {
# Check if the vault CLI is installed and has the correct version
installation_needed=1
- if command -v vault > /dev/null 2>&1; then
+ if command -v vault >/dev/null 2>&1; then
CURRENT_VERSION=$(vault version | grep -oE '[0-9]+\.[0-9]+\.[0-9]+')
if [ "$${CURRENT_VERSION}" = "$${VAULT_CLI_VERSION}" ]; then
printf "Vault version %s is already installed and up-to-date.\n\n" "$${CURRENT_VERSION}"
@@ -81,7 +81,7 @@ install() {
return 1
fi
rm vault.zip
- if sudo mv vault /usr/local/bin/vault 2> /dev/null; then
+ if sudo mv vault /usr/local/bin/vault 2>/dev/null; then
printf "Vault installed successfully!\n\n"
else
mkdir -p ~/.local/bin
@@ -107,6 +107,6 @@ rm -rf "$TMP"
# Authenticate with Vault
printf "🔑 Authenticating with Vault ...\n\n"
-echo "$${CODER_OIDC_ACCESS_TOKEN}" | vault write auth/"$${VAULT_JWT_AUTH_PATH}"/login role="$${VAULT_JWT_ROLE}" jwt=-
+echo "$${CODER_OIDC_ACCESS_TOKEN}" | vault write -field=token auth/"$${VAULT_JWT_AUTH_PATH}"/login role="$${VAULT_JWT_ROLE}" jwt=- | vault login -
printf "🥳 Vault authentication complete!\n\n"
printf "You can now use Vault CLI to access secrets.\n"
diff --git a/registry/coder/modules/windsurf/README.md b/registry/coder/modules/windsurf/README.md
new file mode 100644
index 0000000..afdb525
--- /dev/null
+++ b/registry/coder/modules/windsurf/README.md
@@ -0,0 +1,37 @@
+---
+display_name: Windsurf Editor
+description: Add a one-click button to launch Windsurf Editor
+icon: ../../../../.icons/windsurf.svg
+maintainer_github: coder
+verified: true
+tags: [ide, windsurf, helper, ai]
+---
+
+# Windsurf Editor
+
+Add a button to open any workspace with a single click in Windsurf Editor.
+
+Uses the [Coder Remote VS Code Extension](https://github.com/coder/vscode-coder).
+
+```tf
+module "windsurf" {
+ count = data.coder_workspace.me.start_count
+ source = "registry.coder.com/modules/windsurf/coder"
+ version = "1.0.0"
+ agent_id = coder_agent.example.id
+}
+```
+
+## Examples
+
+### Open in a specific directory
+
+```tf
+module "windsurf" {
+ count = data.coder_workspace.me.start_count
+ source = "registry.coder.com/modules/windsurf/coder"
+ version = "1.0.0"
+ agent_id = coder_agent.example.id
+ folder = "/home/coder/project"
+}
+```
diff --git a/registry/coder/modules/windsurf/main.test.ts b/registry/coder/modules/windsurf/main.test.ts
new file mode 100644
index 0000000..6b520d3
--- /dev/null
+++ b/registry/coder/modules/windsurf/main.test.ts
@@ -0,0 +1,88 @@
+import { describe, expect, it } from "bun:test";
+import {
+ runTerraformApply,
+ runTerraformInit,
+ testRequiredVariables,
+} from "~test";
+
+describe("windsurf", async () => {
+ await runTerraformInit(import.meta.dir);
+
+ testRequiredVariables(import.meta.dir, {
+ agent_id: "foo",
+ });
+
+ it("default output", async () => {
+ const state = await runTerraformApply(import.meta.dir, {
+ agent_id: "foo",
+ });
+ expect(state.outputs.windsurf_url.value).toBe(
+ "windsurf://coder.coder-remote/open?owner=default&workspace=default&url=https://mydeployment.coder.com&token=$SESSION_TOKEN",
+ );
+
+ const coder_app = state.resources.find(
+ (res) => res.type === "coder_app" && res.name === "windsurf",
+ );
+
+ expect(coder_app).not.toBeNull();
+ expect(coder_app?.instances.length).toBe(1);
+ expect(coder_app?.instances[0].attributes.order).toBeNull();
+ });
+
+ it("adds folder", async () => {
+ const state = await runTerraformApply(import.meta.dir, {
+ agent_id: "foo",
+ folder: "/foo/bar",
+ });
+ expect(state.outputs.windsurf_url.value).toBe(
+ "windsurf://coder.coder-remote/open?owner=default&workspace=default&folder=/foo/bar&url=https://mydeployment.coder.com&token=$SESSION_TOKEN",
+ );
+ });
+
+ it("adds folder and open_recent", async () => {
+ const state = await runTerraformApply(import.meta.dir, {
+ agent_id: "foo",
+ folder: "/foo/bar",
+ open_recent: true,
+ });
+ expect(state.outputs.windsurf_url.value).toBe(
+ "windsurf://coder.coder-remote/open?owner=default&workspace=default&folder=/foo/bar&openRecent&url=https://mydeployment.coder.com&token=$SESSION_TOKEN",
+ );
+ });
+
+ it("adds folder but not open_recent", async () => {
+ const state = await runTerraformApply(import.meta.dir, {
+ agent_id: "foo",
+ folder: "/foo/bar",
+ open_recent: false,
+ });
+ expect(state.outputs.windsurf_url.value).toBe(
+ "windsurf://coder.coder-remote/open?owner=default&workspace=default&folder=/foo/bar&url=https://mydeployment.coder.com&token=$SESSION_TOKEN",
+ );
+ });
+
+ it("adds open_recent", async () => {
+ const state = await runTerraformApply(import.meta.dir, {
+ agent_id: "foo",
+ open_recent: true,
+ });
+ expect(state.outputs.windsurf_url.value).toBe(
+ "windsurf://coder.coder-remote/open?owner=default&workspace=default&openRecent&url=https://mydeployment.coder.com&token=$SESSION_TOKEN",
+ );
+ });
+
+ it("expect order to be set", async () => {
+ const state = await runTerraformApply(import.meta.dir, {
+ agent_id: "foo",
+ order: 22,
+ });
+
+ const coder_app = state.resources.find(
+ (res) => res.type === "coder_app" && res.name === "windsurf",
+ );
+
+ expect(coder_app).not.toBeNull();
+ expect(coder_app?.instances.length).toBe(1);
+ expect(coder_app?.instances[0].attributes.order).toBe(22);
+ });
+});
diff --git a/registry/coder/modules/windsurf/main.tf b/registry/coder/modules/windsurf/main.tf
new file mode 100644
index 0000000..1d836d7
--- /dev/null
+++ b/registry/coder/modules/windsurf/main.tf
@@ -0,0 +1,62 @@
+terraform {
+ required_version = ">= 1.0"
+
+ required_providers {
+ coder = {
+ source = "coder/coder"
+ version = ">= 0.23"
+ }
+ }
+}
+
+variable "agent_id" {
+ type = string
+ description = "The ID of a Coder agent."
+}
+
+variable "folder" {
+ type = string
+ description = "The folder to open in Cursor IDE."
+ default = ""
+}
+
+variable "open_recent" {
+ type = bool
+ description = "Open the most recent workspace or folder. Falls back to the folder if there is no recent workspace or folder to open."
+ default = false
+}
+
+variable "order" {
+ type = number
+ description = "The order determines the position of app in the UI presentation. The lowest order is shown first and apps with equal order are sorted by name (ascending order)."
+ default = null
+}
+
+data "coder_workspace" "me" {}
+data "coder_workspace_owner" "me" {}
+
+resource "coder_app" "windsurf" {
+ agent_id = var.agent_id
+ external = true
+ icon = "/icon/windsurf.svg"
+ slug = "windsurf"
+ display_name = "Windsurf Editor"
+ order = var.order
+ url = join("", [
+ "windsurf://coder.coder-remote/open",
+ "?owner=",
+ data.coder_workspace_owner.me.name,
+ "&workspace=",
+ data.coder_workspace.me.name,
+ var.folder != "" ? join("", ["&folder=", var.folder]) : "",
+ var.open_recent ? "&openRecent" : "",
+ "&url=",
+ data.coder_workspace.me.access_url,
+ "&token=$SESSION_TOKEN",
+ ])
+}
+
+output "windsurf_url" {
+ value = coder_app.windsurf.url
+ description = "Windsurf Editor URL."
+}
diff --git a/registry/hashicorp/README.md b/registry/hashicorp/README.md
deleted file mode 100644
index 59bbe8a..0000000
--- a/registry/hashicorp/README.md
+++ /dev/null
@@ -1,8 +0,0 @@
----
-display_name: HashiCorp
-bio: HashiCorp, an IBM company, empowers organizations to automate and secure multi-cloud and hybrid environments with The Infrastructure Cloud™. Our suite of Infrastructure Lifecycle Management and Security Lifecycle Management solutions are built on projects with source code freely available at their core. The HashiCorp suite underpins the world's most critical applications, helping enterprises achieve efficiency, security, and scalability at any stage of their cloud journey.
-github: hashicorp
-linkedin: https://www.linkedin.com/company/hashicorp
-website: https://www.hashicorp.com/
-status: partner
----
diff --git a/registry/jfrog/README.md b/registry/jfrog/README.md
deleted file mode 100644
index 8dea670..0000000
--- a/registry/jfrog/README.md
+++ /dev/null
@@ -1,8 +0,0 @@
----
-display_name: Jfrog
-bio: At JFrog, we are making endless software versions a thing of the past, with liquid software that flows continuously and automatically from build all the way through to production.
-github: jfrog
-linkedin: https://www.linkedin.com/company/jfrog-ltd
-website: https://jfrog.com/
-status: partner
----
diff --git a/registry/nataindata/README.md b/registry/nataindata/README.md
index ddc5095..5f29181 100644
--- a/registry/nataindata/README.md
+++ b/registry/nataindata/README.md
@@ -3,5 +3,5 @@ display_name: Nataindata
bio: Data engineer
github: nataindata
website: https://www.nataindata.com
-status: community
+status: partner
---
diff --git a/registry/nataindata/apache-airflow/README.md b/registry/nataindata/modules/apache-airflow/README.md
similarity index 100%
rename from registry/nataindata/apache-airflow/README.md
rename to registry/nataindata/modules/apache-airflow/README.md
diff --git a/registry/nataindata/apache-airflow/main.tf b/registry/nataindata/modules/apache-airflow/main.tf
similarity index 100%
rename from registry/nataindata/apache-airflow/main.tf
rename to registry/nataindata/modules/apache-airflow/main.tf
diff --git a/registry/nataindata/apache-airflow/run.sh b/registry/nataindata/modules/apache-airflow/run.sh
similarity index 100%
rename from registry/nataindata/apache-airflow/run.sh
rename to registry/nataindata/modules/apache-airflow/run.sh
diff --git a/registry/whizus/modules/exoscale-instance-type/main.test.ts b/registry/whizus/modules/exoscale-instance-type/main.test.ts
index 8a63cbf..4b0e59e 100644
--- a/registry/whizus/modules/exoscale-instance-type/main.test.ts
+++ b/registry/whizus/modules/exoscale-instance-type/main.test.ts
@@ -23,13 +23,13 @@ describe("exoscale-instance-type", async () => {
expect(state.outputs.value.value).toBe("gpu3.huge");
});
- it("fails because of wrong categroy definition", async () => {
+ it("fails when default value is provided without any matching category definitions", async () => {
expect(async () => {
await runTerraformApply(import.meta.dir, {
default: "gpu3.huge",
// type_category: ["standard"] is standard
});
- }).toThrow('default value "gpu3.huge" must be defined as one of options');
+ }).toThrow(/value "gpu3.huge" must be defined as one of options/);
});
it("set custom order for coder_parameter", async () => {
diff --git a/scripts/contributors/contributors.go b/scripts/contributors/contributors.go
deleted file mode 100644
index 02823f2..0000000
--- a/scripts/contributors/contributors.go
+++ /dev/null
@@ -1,446 +0,0 @@
-package main
-
-import (
- "bufio"
- "errors"
- "fmt"
- "net/url"
- "os"
- "path"
- "slices"
- "strings"
-
- "gopkg.in/yaml.v3"
-)
-
-const rootRegistryPath = "./registry"
-
-var (
- validContributorStatuses = []string{"official", "partner", "community"}
- supportedAvatarFileFormats = []string{".png", ".jpeg", ".jpg", ".gif", ".svg"}
-)
-
-type readme struct {
- filePath string
- rawText string
-}
-
-type contributorProfileFrontmatter struct {
- DisplayName string `yaml:"display_name"`
- Bio string `yaml:"bio"`
- GithubUsername string `yaml:"github"`
- // Script assumes that if value is nil, the Registry site build step will
- // backfill the value with the user's GitHub avatar URL
- AvatarURL *string `yaml:"avatar"`
- LinkedinURL *string `yaml:"linkedin"`
- WebsiteURL *string `yaml:"website"`
- SupportEmail *string `yaml:"support_email"`
- EmployerGithubUsername *string `yaml:"employer_github"`
- ContributorStatus *string `yaml:"status"`
-}
-
-type contributorProfile struct {
- frontmatter contributorProfileFrontmatter
- filePath string
-}
-
-var _ error = validationPhaseError{}
-
-type validationPhaseError struct {
- phase string
- errors []error
-}
-
-func (vpe validationPhaseError) Error() string {
- validationStrs := []string{}
- for _, e := range vpe.errors {
- validationStrs = append(validationStrs, fmt.Sprintf("- %v", e))
- }
- slices.Sort(validationStrs)
-
- msg := fmt.Sprintf("Error during %q phase of README validation:", vpe.phase)
- msg += strings.Join(validationStrs, "\n")
- msg += "\n"
-
- return msg
-}
-
-func extractFrontmatter(readmeText string) (string, error) {
- if readmeText == "" {
- return "", errors.New("README is empty")
- }
-
- const fence = "---"
- fm := ""
- fenceCount := 0
- lineScanner := bufio.NewScanner(
- strings.NewReader(strings.TrimSpace(readmeText)),
- )
- for lineScanner.Scan() {
- nextLine := lineScanner.Text()
- if fenceCount == 0 && nextLine != fence {
- return "", errors.New("README does not start with frontmatter fence")
- }
-
- if nextLine != fence {
- fm += nextLine + "\n"
- continue
- }
-
- fenceCount++
- if fenceCount >= 2 {
- break
- }
- }
-
- if fenceCount == 1 {
- return "", errors.New("README does not have two sets of frontmatter fences")
- }
- return fm, nil
-}
-
-func validateContributorGithubUsername(githubUsername string) error {
- if githubUsername == "" {
- return errors.New("missing GitHub username")
- }
-
- lower := strings.ToLower(githubUsername)
- if uriSafe := url.PathEscape(lower); uriSafe != lower {
- return fmt.Errorf("gitHub username %q is not a valid URL path segment", githubUsername)
- }
-
- return nil
-}
-
-func validateContributorEmployerGithubUsername(
- employerGithubUsername *string,
- githubUsername string,
-) []error {
- if employerGithubUsername == nil {
- return nil
- }
-
- problems := []error{}
- if *employerGithubUsername == "" {
- problems = append(problems, errors.New("company_github field is defined but has empty value"))
- return problems
- }
-
- lower := strings.ToLower(*employerGithubUsername)
- if uriSafe := url.PathEscape(lower); uriSafe != lower {
- problems = append(problems, fmt.Errorf("gitHub company username %q is not a valid URL path segment", *employerGithubUsername))
- }
-
- if *employerGithubUsername == githubUsername {
- problems = append(problems, fmt.Errorf("cannot list own GitHub name (%q) as employer", githubUsername))
- }
-
- return problems
-}
-
-func validateContributorDisplayName(displayName string) error {
- if displayName == "" {
- return fmt.Errorf("missing display_name")
- }
-
- return nil
-}
-
-func validateContributorLinkedinURL(linkedinURL *string) error {
- if linkedinURL == nil {
- return nil
- }
-
- if _, err := url.ParseRequestURI(*linkedinURL); err != nil {
- return fmt.Errorf("linkedIn URL %q is not valid: %v", *linkedinURL, err)
- }
-
- return nil
-}
-
-func validateContributorSupportEmail(email *string) []error {
- if email == nil {
- return nil
- }
-
- problems := []error{}
-
- // Can't 100% validate that this is correct without actually sending
- // an email, and especially with some contributors being individual
- // developers, we don't want to do that on every single run of the CI
- // pipeline. Best we can do is verify the general structure
- username, server, ok := strings.Cut(*email, "@")
- if !ok {
- problems = append(problems, fmt.Errorf("email address %q is missing @ symbol", *email))
- return problems
- }
-
- if username == "" {
- problems = append(problems, fmt.Errorf("email address %q is missing username", *email))
- }
-
- domain, tld, ok := strings.Cut(server, ".")
- if !ok {
- problems = append(problems, fmt.Errorf("email address %q is missing period for server segment", *email))
- return problems
- }
-
- if domain == "" {
- problems = append(problems, fmt.Errorf("email address %q is missing domain", *email))
- }
- if tld == "" {
- problems = append(problems, fmt.Errorf("email address %q is missing top-level domain", *email))
- }
- if strings.Contains(*email, "?") {
- problems = append(problems, errors.New("email is not allowed to contain query parameters"))
- }
-
- return problems
-}
-
-func validateContributorWebsite(websiteURL *string) error {
- if websiteURL == nil {
- return nil
- }
-
- if _, err := url.ParseRequestURI(*websiteURL); err != nil {
- return fmt.Errorf("linkedIn URL %q is not valid: %v", *websiteURL, err)
- }
-
- return nil
-}
-
-func validateContributorStatus(status *string) error {
- if status == nil {
- return nil
- }
-
- if !slices.Contains(validContributorStatuses, *status) {
- return fmt.Errorf("contributor status %q is not valid", *status)
- }
-
- return nil
-}
-
-// Can't validate the image actually leads to a valid resource in a pure
-// function, but can at least catch obvious problems
-func validateContributorAvatarURL(avatarURL *string) []error {
- if avatarURL == nil {
- return nil
- }
-
- problems := []error{}
- if *avatarURL == "" {
- problems = append(problems, errors.New("avatar URL must be omitted or non-empty string"))
- return problems
- }
-
- // Have to use .Parse instead of .ParseRequestURI because this is the
- // one field that's allowed to be a relative URL
- if _, err := url.Parse(*avatarURL); err != nil {
- problems = append(problems, fmt.Errorf("URL %q is not a valid relative or absolute URL", *avatarURL))
- }
- if strings.Contains(*avatarURL, "?") {
- problems = append(problems, errors.New("avatar URL is not allowed to contain search parameters"))
- }
-
- matched := false
- for _, ff := range supportedAvatarFileFormats {
- matched = strings.HasSuffix(*avatarURL, ff)
- if matched {
- break
- }
- }
- if !matched {
- segments := strings.Split(*avatarURL, ".")
- fileExtension := segments[len(segments)-1]
- problems = append(problems, fmt.Errorf("avatar URL '.%s' does not end in a supported file format: [%s]", fileExtension, strings.Join(supportedAvatarFileFormats, ", ")))
- }
-
- return problems
-}
-
-func addFilePathToError(filePath string, err error) error {
- return fmt.Errorf("%q: %v", filePath, err)
-}
-
-func validateContributorYaml(yml contributorProfile) []error {
- allProblems := []error{}
-
- if err := validateContributorGithubUsername(yml.frontmatter.GithubUsername); err != nil {
- allProblems = append(allProblems, addFilePathToError(yml.filePath, err))
- }
- if err := validateContributorDisplayName(yml.frontmatter.DisplayName); err != nil {
- allProblems = append(allProblems, addFilePathToError(yml.filePath, err))
- }
- if err := validateContributorLinkedinURL(yml.frontmatter.LinkedinURL); err != nil {
- allProblems = append(allProblems, addFilePathToError(yml.filePath, err))
- }
- if err := validateContributorWebsite(yml.frontmatter.WebsiteURL); err != nil {
- allProblems = append(allProblems, addFilePathToError(yml.filePath, err))
- }
- if err := validateContributorStatus(yml.frontmatter.ContributorStatus); err != nil {
- allProblems = append(allProblems, addFilePathToError(yml.filePath, err))
- }
-
- for _, err := range validateContributorEmployerGithubUsername(yml.frontmatter.EmployerGithubUsername, yml.frontmatter.GithubUsername) {
- allProblems = append(allProblems, addFilePathToError(yml.filePath, err))
- }
- for _, err := range validateContributorSupportEmail(yml.frontmatter.SupportEmail) {
- allProblems = append(allProblems, addFilePathToError(yml.filePath, err))
- }
- for _, err := range validateContributorAvatarURL(yml.frontmatter.AvatarURL) {
- allProblems = append(allProblems, addFilePathToError(yml.filePath, err))
- }
-
- return allProblems
-}
-
-func parseContributorProfile(rm readme) (contributorProfile, error) {
- fm, err := extractFrontmatter(rm.rawText)
- if err != nil {
- return contributorProfile{}, fmt.Errorf("%q: failed to parse frontmatter: %v", rm.filePath, err)
- }
-
- yml := contributorProfileFrontmatter{}
- if err := yaml.Unmarshal([]byte(fm), &yml); err != nil {
- return contributorProfile{}, fmt.Errorf("%q: failed to parse: %v", rm.filePath, err)
- }
-
- return contributorProfile{
- filePath: rm.filePath,
- frontmatter: yml,
- }, nil
-}
-
-func parseContributorFiles(readmeEntries []readme) (map[string]contributorProfile, error) {
- profilesByUsername := map[string]contributorProfile{}
- yamlParsingErrors := []error{}
- for _, rm := range readmeEntries {
- p, err := parseContributorProfile(rm)
- if err != nil {
- yamlParsingErrors = append(yamlParsingErrors, err)
- continue
- }
-
- if prev, alreadyExists := profilesByUsername[p.frontmatter.GithubUsername]; alreadyExists {
- yamlParsingErrors = append(yamlParsingErrors, fmt.Errorf("%q: GitHub name %s conflicts with field defined in %q", p.filePath, p.frontmatter.GithubUsername, prev.filePath))
- continue
- }
- profilesByUsername[p.frontmatter.GithubUsername] = p
- }
- if len(yamlParsingErrors) != 0 {
- return nil, validationPhaseError{
- phase: "YAML parsing",
- errors: yamlParsingErrors,
- }
- }
-
- employeeGithubGroups := map[string][]string{}
- yamlValidationErrors := []error{}
- for _, p := range profilesByUsername {
- errors := validateContributorYaml(p)
- if len(errors) > 0 {
- yamlValidationErrors = append(yamlValidationErrors, errors...)
- continue
- }
-
- if p.frontmatter.EmployerGithubUsername != nil {
- employeeGithubGroups[*p.frontmatter.EmployerGithubUsername] = append(
- employeeGithubGroups[*p.frontmatter.EmployerGithubUsername],
- p.frontmatter.GithubUsername,
- )
- }
- }
- for companyName, group := range employeeGithubGroups {
- if _, found := profilesByUsername[companyName]; found {
- continue
- }
- yamlValidationErrors = append(yamlValidationErrors, fmt.Errorf("company %q does not exist in %q directory but is referenced by these profiles: [%s]", companyName, rootRegistryPath, strings.Join(group, ", ")))
- }
- if len(yamlValidationErrors) != 0 {
- return nil, validationPhaseError{
- phase: "Raw YAML Validation",
- errors: yamlValidationErrors,
- }
- }
-
- return profilesByUsername, nil
-}
-
-func aggregateContributorReadmeFiles() ([]readme, error) {
- dirEntries, err := os.ReadDir(rootRegistryPath)
- if err != nil {
- return nil, err
- }
-
- allReadmeFiles := []readme{}
- problems := []error{}
- for _, e := range dirEntries {
- dirPath := path.Join(rootRegistryPath, e.Name())
- if !e.IsDir() {
- problems = append(problems, fmt.Errorf("detected non-directory file %q at base of main Registry directory", dirPath))
- continue
- }
-
- readmePath := path.Join(dirPath, "README.md")
- rmBytes, err := os.ReadFile(readmePath)
- if err != nil {
- problems = append(problems, err)
- continue
- }
- allReadmeFiles = append(allReadmeFiles, readme{
- filePath: readmePath,
- rawText: string(rmBytes),
- })
- }
-
- if len(problems) != 0 {
- return nil, validationPhaseError{
- phase: "FileSystem reading",
- errors: problems,
- }
- }
-
- return allReadmeFiles, nil
-}
-
-func validateRelativeUrls(
- contributors map[string]contributorProfile,
-) error {
- // This function only validates relative avatar URLs for now, but it can be
- // beefed up to validate more in the future
- problems := []error{}
-
- for _, con := range contributors {
- // If the avatar URL is missing, we'll just assume that the Registry
- // site build step will take care of filling in the data properly
- if con.frontmatter.AvatarURL == nil {
- continue
- }
- if isRelativeURL := strings.HasPrefix(*con.frontmatter.AvatarURL, ".") ||
- strings.HasPrefix(*con.frontmatter.AvatarURL, "/"); !isRelativeURL {
- continue
- }
-
- if strings.HasPrefix(*con.frontmatter.AvatarURL, "..") {
- problems = append(problems, fmt.Errorf("%q: relative avatar URLs cannot be placed outside a user's namespaced directory", con.filePath))
- continue
- }
-
- absolutePath := strings.TrimSuffix(con.filePath, "README.md") +
- *con.frontmatter.AvatarURL
- _, err := os.ReadFile(absolutePath)
- if err != nil {
- problems = append(problems, fmt.Errorf("%q: relative avatar path %q does not point to image in file system", con.filePath, *con.frontmatter.AvatarURL))
- }
- }
-
- if len(problems) == 0 {
- return nil
- }
- return validationPhaseError{
- phase: "Relative URL validation",
- errors: problems,
- }
-}
diff --git a/scripts/contributors/main.go b/scripts/contributors/main.go
deleted file mode 100644
index 9091318..0000000
--- a/scripts/contributors/main.go
+++ /dev/null
@@ -1,39 +0,0 @@
-// This package is for validating all contributors within the main Registry
-// directory. It validates that it has nothing but sub-directories, and that
-// each sub-directory has a README.md file. Each of those files must then
-// describe a specific contributor. The contents of these files will be parsed
-// by the Registry site build step, to be displayed in the Registry site's UI.
-package main
-
-import (
- "log"
-)
-
-func main() {
- log.Println("Starting README validation")
- allReadmeFiles, err := aggregateContributorReadmeFiles()
- if err != nil {
- log.Panic(err)
- }
-
- log.Printf("Processing %d README files\n", len(allReadmeFiles))
- contributors, err := parseContributorFiles(allReadmeFiles)
- log.Printf(
- "Processed %d README files as valid contributor profiles",
- len(contributors),
- )
- if err != nil {
- log.Panic(err)
- }
-
- err = validateRelativeUrls(contributors)
- if err != nil {
- log.Panic(err)
- }
- log.Println("All relative URLs for READMEs are valid")
-
- log.Printf(
- "Processed all READMEs in the %q directory\n",
- rootRegistryPath,
- )
-}
diff --git a/scripts/terraform_validate.sh b/scripts/terraform_validate.sh
new file mode 100755
index 0000000..8b22d4a
--- /dev/null
+++ b/scripts/terraform_validate.sh
@@ -0,0 +1,37 @@
+#!/bin/bash
+
+set -euo pipefail
+
+validate_terraform_directory() {
+ local dir="$1"
+ echo "Running \`terraform validate\` in $dir"
+ pushd "$dir"
+ terraform init -upgrade
+ terraform validate
+ popd
+}
+
+main() {
+ # Get the directory of the script
+ local script_dir=$(dirname "$(readlink -f "$0")")
+
+ # Code assumes that registry directory will always be in same position
+ # relative to the main script directory
+ local registry_dir="$script_dir/../registry"
+
+ # Get all subdirectories in the registry directory. Code assumes that
+ # Terraform directories won't begin to appear until three levels deep into
+ # the registry (e.g., registry/coder/modules/coder-login, which will then
+ # have a main.tf file inside it)
+ local subdirs=$(find "$registry_dir" -mindepth 3 -type d | sort)
+
+ for dir in $subdirs; do
+ # Skip over any directories that obviously don't have the necessary
+ # files
+ if test -f "$dir/main.tf"; then
+ validate_terraform_directory "$dir"
+ fi
+ done
+}
+
+main
diff --git a/test/test.ts b/test/test.ts
index ab3727e..4f41318 100644
--- a/test/test.ts
+++ b/test/test.ts
@@ -30,6 +30,12 @@ export const runContainer = async (
return containerID.trim();
};
+export interface scriptOutput {
+ exitCode: number;
+ stdout: string[];
+ stderr: string[];
+}
+
/**
* Finds the only "coder_script" resource in the given state and runs it in a
* container.
@@ -38,13 +44,15 @@ export const executeScriptInContainer = async (
state: TerraformState,
image: string,
shell = "sh",
-): Promise<{
- exitCode: number;
- stdout: string[];
- stderr: string[];
-}> => {
+ before?: string,
+): Promise => {
const instance = findResourceInstance(state, "coder_script");
const id = await runContainer(image);
+
+ if (before) {
+ await execContainer(id, [shell, "-c", before]);
+ }
+
const resp = await execContainer(id, [shell, "-c", instance.script]);
const stdout = resp.stdout.trim().split("\n");
const stderr = resp.stderr.trim().split("\n");
@@ -58,12 +66,13 @@ export const executeScriptInContainer = async (
export const execContainer = async (
id: string,
cmd: string[],
+ args?: string[],
): Promise<{
exitCode: number;
stderr: string;
stdout: string;
}> => {
- const proc = spawn(["docker", "exec", id, ...cmd], {
+ const proc = spawn(["docker", "exec", ...(args ?? []), id, ...cmd], {
stderr: "pipe",
stdout: "pipe",
});