diff --git a/cmd/readmevalidation/coderresources.go b/cmd/readmevalidation/coderresources.go new file mode 100644 index 00000000..98a953c0 --- /dev/null +++ b/cmd/readmevalidation/coderresources.go @@ -0,0 +1,354 @@ +package main + +import ( + "bufio" + "errors" + "fmt" + "log" + "net/url" + "os" + "path" + "regexp" + "slices" + "strings" + + "gopkg.in/yaml.v3" +) + +var supportedResourceTypes = []string{"modules", "templates"} + +type coderResourceFrontmatter struct { + Description string `yaml:"description"` + IconURL string `yaml:"icon"` + DisplayName *string `yaml:"display_name"` + Verified *bool `yaml:"verified"` + Tags []string `yaml:"tags"` +} + +// coderResourceReadme represents a README describing a Terraform resource used +// to help create Coder workspaces. As of 2025-04-15, this encapsulates both +// Coder Modules and Coder Templates +type coderResourceReadme struct { + resourceType string + filePath string + body string + frontmatter coderResourceFrontmatter +} + +func validateCoderResourceDisplayName(displayName *string) error { + if displayName != nil && *displayName == "" { + return errors.New("if defined, display_name must not be empty string") + } + return nil +} + +func validateCoderResourceDescription(description string) error { + if description == "" { + return errors.New("frontmatter description cannot be empty") + } + return nil +} + +func validateCoderResourceIconURL(iconURL string) []error { + problems := []error{} + + if iconURL == "" { + problems = append(problems, errors.New("icon URL cannot be empty")) + return problems + } + + isAbsoluteURL := !strings.HasPrefix(iconURL, ".") && !strings.HasPrefix(iconURL, "/") + if isAbsoluteURL { + if _, err := url.ParseRequestURI(iconURL); err != nil { + problems = append(problems, errors.New("absolute icon URL is not correctly formatted")) + } + if strings.Contains(iconURL, "?") { + problems = append(problems, errors.New("icon URLs cannot contain query parameters")) + } + return problems + } + + // Would normally be skittish about having relative paths like this, but it + // should be safe because we have guarantees about the structure of the + // repo, and where this logic will run + isPermittedRelativeURL := strings.HasPrefix(iconURL, "./") || + strings.HasPrefix(iconURL, "/") || + strings.HasPrefix(iconURL, "../../../../.icons") + if !isPermittedRelativeURL { + problems = append(problems, fmt.Errorf("relative icon URL %q must either be scoped to that module's directory, or the top-level /.icons directory (this can usually be done by starting the path with \"../../../.icons\")", iconURL)) + } + + return problems +} + +func validateCoderResourceTags(tags []string) error { + if tags == nil { + return errors.New("provided tags array is nil") + } + if len(tags) == 0 { + return nil + } + + // All of these tags are used for the module/template filter controls in the + // Registry site. Need to make sure they can all be placed in the browser + // URL without issue + invalidTags := []string{} + for _, t := range tags { + if t != url.QueryEscape(t) { + invalidTags = append(invalidTags, t) + } + } + + if len(invalidTags) != 0 { + return fmt.Errorf("found invalid tags (tags that cannot be used for filter state in the Registry website): [%s]", strings.Join(invalidTags, ", ")) + } + return nil +} + +// Todo: This is a holdover from the validation logic used by the Coder Modules +// repo. It gives us some assurance, but realistically, we probably want to +// parse any Terraform code snippets, and make some deeper guarantees about how +// it's structured. Just validating whether it *can* be parsed as Terraform +// would be a big improvement. +var terraformVersionRe = regexp.MustCompile("^\\s*\\bversion\\s+=") + +func validateCoderResourceReadmeBody(body string) []error { + trimmed := strings.TrimSpace(body) + var errs []error + errs = append(errs, validateReadmeBody(trimmed)...) + + foundParagraph := false + terraformCodeBlockCount := 0 + foundTerraformVersionRef := false + + lineNum := 0 + isInsideCodeBlock := false + isInsideTerraform := false + + lineScanner := bufio.NewScanner(strings.NewReader(trimmed)) + for lineScanner.Scan() { + lineNum++ + nextLine := lineScanner.Text() + + // Code assumes that invalid headers would've already been handled by + // the base validation function, so we don't need to check deeper if the + // first line isn't an h1 + if lineNum == 1 { + if !strings.HasPrefix(nextLine, "# ") { + break + } + continue + } + + if strings.HasPrefix(nextLine, "```") { + isInsideCodeBlock = !isInsideCodeBlock + isInsideTerraform = isInsideCodeBlock && strings.HasPrefix(nextLine, "```tf") + if isInsideTerraform { + terraformCodeBlockCount++ + } + if strings.HasPrefix(nextLine, "```hcl") { + errs = append(errs, errors.New("all .hcl language references must be converted to .tf")) + } + continue + } + + if isInsideCodeBlock { + if isInsideTerraform { + foundTerraformVersionRef = foundTerraformVersionRef || terraformVersionRe.MatchString(nextLine) + } + continue + } + + // Code assumes that we can treat this case as the end of the "h1 + // section" and don't need to process any further lines + if lineNum > 1 && strings.HasPrefix(nextLine, "#") { + break + } + + // Code assumes that if we've reached this point, the only other options + // are: (1) empty spaces, (2) paragraphs, (3) HTML, and (4) asset + // references made via [] syntax + trimmedLine := strings.TrimSpace(nextLine) + isParagraph := trimmedLine != "" && !strings.HasPrefix(trimmedLine, "![") && !strings.HasPrefix(trimmedLine, "<") + foundParagraph = foundParagraph || isParagraph + } + + if terraformCodeBlockCount == 0 { + errs = append(errs, errors.New("did not find Terraform code block within h1 section")) + } else { + if terraformCodeBlockCount > 1 { + errs = append(errs, errors.New("cannot have more than one Terraform code block in h1 section")) + } + if !foundTerraformVersionRef { + errs = append(errs, errors.New("did not find Terraform code block that specifies 'version' field")) + } + } + if !foundParagraph { + errs = append(errs, errors.New("did not find paragraph within h1 section")) + } + if isInsideCodeBlock { + errs = append(errs, errors.New("code blocks inside h1 section do not all terminate before end of file")) + } + + return errs +} + +func validateCoderResourceReadme(rm coderResourceReadme) []error { + var errs []error + + for _, err := range validateCoderResourceReadmeBody(rm.body) { + errs = append(errs, addFilePathToError(rm.filePath, err)) + } + + if err := validateCoderResourceDisplayName(rm.frontmatter.DisplayName); err != nil { + errs = append(errs, addFilePathToError(rm.filePath, err)) + } + if err := validateCoderResourceDescription(rm.frontmatter.Description); err != nil { + errs = append(errs, addFilePathToError(rm.filePath, err)) + } + if err := validateCoderResourceTags(rm.frontmatter.Tags); err != nil { + errs = append(errs, addFilePathToError(rm.filePath, err)) + } + + for _, err := range validateCoderResourceIconURL(rm.frontmatter.IconURL) { + errs = append(errs, addFilePathToError(rm.filePath, err)) + } + + return errs +} + +func parseCoderResourceReadme(resourceType string, rm readme) (coderResourceReadme, error) { + fm, body, err := separateFrontmatter(rm.rawText) + if err != nil { + return coderResourceReadme{}, fmt.Errorf("%q: failed to parse frontmatter: %v", rm.filePath, err) + } + + yml := coderResourceFrontmatter{} + if err := yaml.Unmarshal([]byte(fm), &yml); err != nil { + return coderResourceReadme{}, fmt.Errorf("%q: failed to parse: %v", rm.filePath, err) + } + + return coderResourceReadme{ + resourceType: resourceType, + filePath: rm.filePath, + body: body, + frontmatter: yml, + }, nil +} + +func parseCoderResourceReadmeFiles(resourceType string, rms []readme) (map[string]coderResourceReadme, error) { + resources := map[string]coderResourceReadme{} + var yamlParsingErrs []error + for _, rm := range rms { + p, err := parseCoderResourceReadme(resourceType, rm) + if err != nil { + yamlParsingErrs = append(yamlParsingErrs, err) + continue + } + + resources[p.filePath] = p + } + if len(yamlParsingErrs) != 0 { + return nil, validationPhaseError{ + phase: validationPhaseReadmeParsing, + errors: yamlParsingErrs, + } + } + + yamlValidationErrors := []error{} + for _, readme := range resources { + errors := validateCoderResourceReadme(readme) + if len(errors) > 0 { + yamlValidationErrors = append(yamlValidationErrors, errors...) + } + } + if len(yamlValidationErrors) != 0 { + return nil, validationPhaseError{ + phase: validationPhaseReadmeParsing, + errors: yamlValidationErrors, + } + } + + return resources, nil +} + +// Todo: Need to beef up this function by grabbing each image/video URL from +// the body's AST +func validateCoderResourceRelativeUrls(resources map[string]coderResourceReadme) error { + return nil +} + +func aggregateCoderResourceReadmeFiles(resourceType string) ([]readme, error) { + registryFiles, err := os.ReadDir(rootRegistryPath) + if err != nil { + return nil, err + } + + var allReadmeFiles []readme + var errs []error + for _, rf := range registryFiles { + if !rf.IsDir() { + continue + } + + resourceRootPath := path.Join(rootRegistryPath, rf.Name(), resourceType) + resourceDirs, err := os.ReadDir(resourceRootPath) + if err != nil { + if !errors.Is(err, os.ErrNotExist) { + errs = append(errs, err) + } + continue + } + + for _, rd := range resourceDirs { + if !rd.IsDir() || rd.Name() == ".coder" { + continue + } + + resourceReadmePath := path.Join(resourceRootPath, rd.Name(), "README.md") + rm, err := os.ReadFile(resourceReadmePath) + if err != nil { + errs = append(errs, err) + continue + } + + allReadmeFiles = append(allReadmeFiles, readme{ + filePath: resourceReadmePath, + rawText: string(rm), + }) + } + } + + if len(errs) != 0 { + return nil, validationPhaseError{ + phase: validationPhaseFileLoad, + errors: errs, + } + } + return allReadmeFiles, nil +} + +func validateAllCoderResourceFilesOfType(resourceType string) error { + if !slices.Contains(supportedResourceTypes, resourceType) { + return fmt.Errorf("resource type %q is not part of supported list [%s]", resourceType, strings.Join(supportedResourceTypes, ", ")) + } + + allReadmeFiles, err := aggregateCoderResourceReadmeFiles(resourceType) + if err != nil { + return err + } + + log.Printf("Processing %d README files\n", len(allReadmeFiles)) + resources, err := parseCoderResourceReadmeFiles(resourceType, allReadmeFiles) + if err != nil { + return err + } + log.Printf("Processed %d README files as valid Coder resources with type %q", len(resources), resourceType) + + err = validateCoderResourceRelativeUrls(resources) + if err != nil { + return err + } + log.Printf("All relative URLs for %s READMEs are valid\n", resourceType) + return nil +} diff --git a/cmd/readmevalidation/coderresources_test.go b/cmd/readmevalidation/coderresources_test.go new file mode 100644 index 00000000..71ec75f4 --- /dev/null +++ b/cmd/readmevalidation/coderresources_test.go @@ -0,0 +1,22 @@ +package main + +import ( + _ "embed" + "testing" +) + +//go:embed testSamples/sampleReadmeBody.md +var testBody string + +func TestValidateCoderResourceReadmeBody(t *testing.T) { + t.Parallel() + + t.Run("Parses a valid README body with zero issues", func(t *testing.T) { + t.Parallel() + + errs := validateCoderResourceReadmeBody(testBody) + for _, e := range errs { + t.Error(e) + } + }) +} diff --git a/cmd/readmevalidation/contributors.go b/cmd/readmevalidation/contributors.go index 89a2b1af..3ef980ed 100644 --- a/cmd/readmevalidation/contributors.go +++ b/cmd/readmevalidation/contributors.go @@ -27,7 +27,7 @@ type contributorProfileFrontmatter struct { ContributorStatus *string `yaml:"status"` } -type contributorProfile struct { +type contributorProfileReadme struct { frontmatter contributorProfileFrontmatter namespace string filePath string @@ -155,52 +155,52 @@ func validateContributorAvatarURL(avatarURL *string) []error { return errs } -func validateContributorYaml(yml contributorProfile) []error { +func validateContributorReadme(rm contributorProfileReadme) []error { allErrs := []error{} - if err := validateContributorDisplayName(yml.frontmatter.DisplayName); err != nil { - allErrs = append(allErrs, addFilePathToError(yml.filePath, err)) + if err := validateContributorDisplayName(rm.frontmatter.DisplayName); err != nil { + allErrs = append(allErrs, addFilePathToError(rm.filePath, err)) } - if err := validateContributorLinkedinURL(yml.frontmatter.LinkedinURL); err != nil { - allErrs = append(allErrs, addFilePathToError(yml.filePath, err)) + if err := validateContributorLinkedinURL(rm.frontmatter.LinkedinURL); err != nil { + allErrs = append(allErrs, addFilePathToError(rm.filePath, err)) } - if err := validateContributorWebsite(yml.frontmatter.WebsiteURL); err != nil { - allErrs = append(allErrs, addFilePathToError(yml.filePath, err)) + if err := validateContributorWebsite(rm.frontmatter.WebsiteURL); err != nil { + allErrs = append(allErrs, addFilePathToError(rm.filePath, err)) } - if err := validateContributorStatus(yml.frontmatter.ContributorStatus); err != nil { - allErrs = append(allErrs, addFilePathToError(yml.filePath, err)) + if err := validateContributorStatus(rm.frontmatter.ContributorStatus); err != nil { + allErrs = append(allErrs, addFilePathToError(rm.filePath, err)) } - for _, err := range validateContributorSupportEmail(yml.frontmatter.SupportEmail) { - allErrs = append(allErrs, addFilePathToError(yml.filePath, err)) + for _, err := range validateContributorSupportEmail(rm.frontmatter.SupportEmail) { + allErrs = append(allErrs, addFilePathToError(rm.filePath, err)) } - for _, err := range validateContributorAvatarURL(yml.frontmatter.AvatarURL) { - allErrs = append(allErrs, addFilePathToError(yml.filePath, err)) + for _, err := range validateContributorAvatarURL(rm.frontmatter.AvatarURL) { + allErrs = append(allErrs, addFilePathToError(rm.filePath, err)) } return allErrs } -func parseContributorProfile(rm readme) (contributorProfile, error) { +func parseContributorProfile(rm readme) (contributorProfileReadme, error) { fm, _, err := separateFrontmatter(rm.rawText) if err != nil { - return contributorProfile{}, fmt.Errorf("%q: failed to parse frontmatter: %v", rm.filePath, err) + return contributorProfileReadme{}, fmt.Errorf("%q: failed to parse frontmatter: %v", rm.filePath, err) } yml := contributorProfileFrontmatter{} if err := yaml.Unmarshal([]byte(fm), &yml); err != nil { - return contributorProfile{}, fmt.Errorf("%q: failed to parse: %v", rm.filePath, err) + return contributorProfileReadme{}, fmt.Errorf("%q: failed to parse: %v", rm.filePath, err) } - return contributorProfile{ + return contributorProfileReadme{ filePath: rm.filePath, frontmatter: yml, namespace: strings.TrimSuffix(strings.TrimPrefix(rm.filePath, "registry/"), "/README.md"), }, nil } -func parseContributorFiles(readmeEntries []readme) (map[string]contributorProfile, error) { - profilesByNamespace := map[string]contributorProfile{} +func parseContributorFiles(readmeEntries []readme) (map[string]contributorProfileReadme, error) { + profilesByNamespace := map[string]contributorProfileReadme{} yamlParsingErrors := []error{} for _, rm := range readmeEntries { p, err := parseContributorProfile(rm) @@ -224,7 +224,7 @@ func parseContributorFiles(readmeEntries []readme) (map[string]contributorProfil yamlValidationErrors := []error{} for _, p := range profilesByNamespace { - errors := validateContributorYaml(p) + errors := validateContributorReadme(p) if len(errors) > 0 { yamlValidationErrors = append(yamlValidationErrors, errors...) continue @@ -276,7 +276,7 @@ func aggregateContributorReadmeFiles() ([]readme, error) { return allReadmeFiles, nil } -func validateContributorRelativeUrls(contributors map[string]contributorProfile) error { +func validateContributorRelativeUrls(contributors map[string]contributorProfileReadme) error { // This function only validates relative avatar URLs for now, but it can be // beefed up to validate more in the future errs := []error{} diff --git a/cmd/readmevalidation/errors.go b/cmd/readmevalidation/errors.go index db13edc5..d9dbb179 100644 --- a/cmd/readmevalidation/errors.go +++ b/cmd/readmevalidation/errors.go @@ -14,7 +14,7 @@ type validationPhaseError struct { var _ error = validationPhaseError{} func (vpe validationPhaseError) Error() string { - msg := fmt.Sprintf("Error during %q phase of README validation:", vpe.phase.String()) + msg := fmt.Sprintf("Error during %q phase of README validation:", vpe.phase) for _, e := range vpe.errors { msg += fmt.Sprintf("\n- %v", e) } diff --git a/cmd/readmevalidation/main.go b/cmd/readmevalidation/main.go index 2c0f452c..6f33f745 100644 --- a/cmd/readmevalidation/main.go +++ b/cmd/readmevalidation/main.go @@ -23,13 +23,18 @@ func main() { os.Exit(1) } - errs := []error{} + var errs []error err := validateAllContributorFiles() if err != nil { errs = append(errs, err) } + err = validateAllCoderResourceFilesOfType("modules") + if err != nil { + errs = append(errs, err) + } if len(errs) == 0 { + log.Printf("Processed all READMEs in the %q directory\n", rootRegistryPath) os.Exit(0) } for _, err := range errs { diff --git a/cmd/readmevalidation/readmefiles.go b/cmd/readmevalidation/readmefiles.go index 69ccf9fa..29676527 100644 --- a/cmd/readmevalidation/readmefiles.go +++ b/cmd/readmevalidation/readmefiles.go @@ -4,6 +4,7 @@ import ( "bufio" "errors" "fmt" + "regexp" "strings" ) @@ -31,9 +32,8 @@ func separateFrontmatter(readmeText string) (string, string, error) { fm := "" body := "" fenceCount := 0 - lineScanner := bufio.NewScanner( - strings.NewReader(strings.TrimSpace(readmeText)), - ) + + lineScanner := bufio.NewScanner(strings.NewReader(strings.TrimSpace(readmeText))) for lineScanner.Scan() { nextLine := lineScanner.Text() if fenceCount < 2 && nextLine == fence { @@ -66,48 +66,113 @@ func separateFrontmatter(readmeText string) (string, string, error) { return fm, strings.TrimSpace(body), nil } +var readmeHeaderRe = regexp.MustCompile("^(#{1,})(\\s*)") + +// Todo: This seems to work okay for now, but the really proper way of doing +// this is by parsing this as an AST, and then checking the resulting nodes +func validateReadmeBody(body string) []error { + trimmed := strings.TrimSpace(body) + + if trimmed == "" { + return []error{errors.New("README body is empty")} + } + + // If the very first line of the README, there's a risk that the rest of the + // validation logic will break, since we don't have many guarantees about + // how the README is actually structured + if !strings.HasPrefix(trimmed, "# ") { + return []error{errors.New("README body must start with ATX-style h1 header (i.e., \"# \")")} + } + + var errs []error + latestHeaderLevel := 0 + foundFirstH1 := false + isInCodeBlock := false + + lineScanner := bufio.NewScanner(strings.NewReader(trimmed)) + for lineScanner.Scan() { + nextLine := lineScanner.Text() + + // Have to check this because a lot of programming languages support # + // comments (including Terraform), and without any context, there's no + // way to tell the difference between a markdown header and code comment + if strings.HasPrefix(nextLine, "```") { + isInCodeBlock = !isInCodeBlock + continue + } + if isInCodeBlock { + continue + } + + headerGroups := readmeHeaderRe.FindStringSubmatch(nextLine) + if headerGroups == nil { + continue + } + + spaceAfterHeader := headerGroups[2] + if spaceAfterHeader == "" { + errs = append(errs, errors.New("header does not have space between header characters and main header text")) + } + + nextHeaderLevel := len(headerGroups[1]) + if nextHeaderLevel == 1 && !foundFirstH1 { + foundFirstH1 = true + latestHeaderLevel = 1 + continue + } + + // If we have obviously invalid headers, it's not really safe to keep + // proceeding with the rest of the content + if nextHeaderLevel == 1 { + errs = append(errs, errors.New("READMEs cannot contain more than h1 header")) + break + } + if nextHeaderLevel > 6 { + errs = append(errs, fmt.Errorf("README/HTML files cannot have headers exceed level 6 (found level %d)", nextHeaderLevel)) + break + } + + // This is something we need to enforce for accessibility, not just for + // the Registry website, but also when users are viewing the README + // files in the GitHub web view + if nextHeaderLevel > latestHeaderLevel && nextHeaderLevel != (latestHeaderLevel+1) { + errs = append(errs, fmt.Errorf("headers are not allowed to increase more than 1 level at a time")) + continue + } + + // As long as the above condition passes, there's no problems with + // going up a header level or going down 1+ header levels + latestHeaderLevel = nextHeaderLevel + } + + return errs +} + // validationPhase represents a specific phase during README validation. It is // expected that each phase is discrete, and errors during one will prevent a // future phase from starting. -type validationPhase int +type validationPhase string const ( // validationPhaseFileStructureValidation indicates when the entire Registry // directory is being verified for having all files be placed in the file // system as expected. - validationPhaseFileStructureValidation validationPhase = iota + validationPhaseFileStructureValidation validationPhase = "File structure validation" // validationPhaseFileLoad indicates when README files are being read from // the file system - validationPhaseFileLoad + validationPhaseFileLoad = "Filesystem reading" // validationPhaseReadmeParsing indicates when a README's frontmatter is // being parsed as YAML. This phase does not include YAML validation. - validationPhaseReadmeParsing + validationPhaseReadmeParsing = "README parsing" // validationPhaseReadmeValidation indicates when a README's frontmatter is // being validated as proper YAML with expected keys. - validationPhaseReadmeValidation + validationPhaseReadmeValidation = "README validation" // validationPhaseAssetCrossReference indicates when a README's frontmatter // is having all its relative URLs be validated for whether they point to // valid resources. - validationPhaseAssetCrossReference + validationPhaseAssetCrossReference = "Cross-referencing relative asset URLs" ) - -func (p validationPhase) String() string { - switch p { - case validationPhaseFileStructureValidation: - return "File structure validation" - case validationPhaseFileLoad: - return "Filesystem reading" - case validationPhaseReadmeParsing: - return "README parsing" - case validationPhaseReadmeValidation: - return "README validation" - case validationPhaseAssetCrossReference: - return "Cross-referencing relative asset URLs" - default: - return fmt.Sprintf("Unknown validation phase: %d", p) - } -} diff --git a/cmd/readmevalidation/repostructure.go b/cmd/readmevalidation/repostructure.go index 164547fd..11bd920d 100644 --- a/cmd/readmevalidation/repostructure.go +++ b/cmd/readmevalidation/repostructure.go @@ -9,10 +9,7 @@ import ( "strings" ) -var ( - supportedResourceTypes = []string{"modules", "templates"} - supportedUserNameSpaceDirectories = append(supportedResourceTypes[:], ".icons", ".images") -) +var supportedUserNameSpaceDirectories = append(supportedResourceTypes[:], ".icons", ".images") func validateCoderResourceSubdirectory(dirPath string) []error { errs := []error{} diff --git a/cmd/readmevalidation/testSamples/sampleReadmeBody.md b/cmd/readmevalidation/testSamples/sampleReadmeBody.md new file mode 100644 index 00000000..958fe21c --- /dev/null +++ b/cmd/readmevalidation/testSamples/sampleReadmeBody.md @@ -0,0 +1,121 @@ +# Goose + +Run the [Goose](https://block.github.io/goose/) agent in your workspace to generate code and perform tasks. + +```tf +module "goose" { + source = "registry.coder.com/modules/goose/coder" + version = "1.0.31" + agent_id = coder_agent.example.id + folder = "/home/coder" + install_goose = true + goose_version = "v1.0.16" +} +``` + +## Prerequisites + +- `screen` must be installed in your workspace to run Goose in the background +- You must add the [Coder Login](https://registry.coder.com/modules/coder-login) module to your template + +The `codercom/oss-dogfood:latest` container image can be used for testing on container-based workspaces. + +## Examples + +Your workspace must have `screen` installed to use this. + +### Run in the background and report tasks (Experimental) + +> This functionality is in early access as of Coder v2.21 and is still evolving. +> For now, we recommend testing it in a demo or staging environment, +> rather than deploying to production +> +> Learn more in [the Coder documentation](https://coder.com/docs/tutorials/ai-agents) +> +> Join our [Discord channel](https://discord.gg/coder) or +> [contact us](https://coder.com/contact) to get help or share feedback. + +```tf +module "coder-login" { + count = data.coder_workspace.me.start_count + source = "registry.coder.com/modules/coder-login/coder" + version = "1.0.15" + agent_id = coder_agent.example.id +} + +variable "anthropic_api_key" { + type = string + description = "The Anthropic API key" + sensitive = true +} + +data "coder_parameter" "ai_prompt" { + type = "string" + name = "AI Prompt" + default = "" + description = "Write a prompt for Goose" + mutable = true +} + +# Set the prompt and system prompt for Goose via environment variables +resource "coder_agent" "main" { + # ... + env = { + GOOSE_SYSTEM_PROMPT = <<-EOT + You are a helpful assistant that can help write code. + + Run all long running tasks (e.g. npm run dev) in the background and not in the foreground. + + Periodically check in on background tasks. + + Notify Coder of the status of the task before and after your steps. + EOT + GOOSE_TASK_PROMPT = data.coder_parameter.ai_prompt.value + + # An API key is required for experiment_auto_configure + # See https://block.github.io/goose/docs/getting-started/providers + ANTHROPIC_API_KEY = var.anthropic_api_key # or use a coder_parameter + } +} + +module "goose" { + count = data.coder_workspace.me.start_count + source = "registry.coder.com/modules/goose/coder" + version = "1.0.31" + agent_id = coder_agent.example.id + folder = "/home/coder" + install_goose = true + goose_version = "v1.0.16" + + # Enable experimental features + experiment_report_tasks = true + + # Run Goose in the background + experiment_use_screen = true + + # Avoid configuring Goose manually + experiment_auto_configure = true + + # Required for experiment_auto_configure + experiment_goose_provider = "anthropic" + experiment_goose_model = "claude-3-5-sonnet-latest" +} +``` + +## Run standalone + +Run Goose as a standalone app in your workspace. This will install Goose and run it directly without using screen or any task reporting to the Coder UI. + +```tf +module "goose" { + source = "registry.coder.com/modules/goose/coder" + version = "1.0.31" + agent_id = coder_agent.example.id + folder = "/home/coder" + install_goose = true + goose_version = "v1.0.16" + + # Icon is not available in Coder v2.20 and below, so we'll use a custom icon URL + icon = "https://raw.githubusercontent.com/block/goose/refs/heads/main/ui/desktop/src/images/icon.svg" +} +``` diff --git a/registry/coder/modules/claude-code/README.md b/registry/coder/modules/claude-code/README.md index b6934406..38c8b313 100644 --- a/registry/coder/modules/claude-code/README.md +++ b/registry/coder/modules/claude-code/README.md @@ -22,7 +22,7 @@ module "claude-code" { } ``` -### Prerequisites +## Prerequisites - Node.js and npm must be installed in your workspace to install Claude Code - `screen` must be installed in your workspace to run Claude Code in the background diff --git a/registry/coder/modules/github-upload-public-key/README.md b/registry/coder/modules/github-upload-public-key/README.md index 3659aded..779d4197 100644 --- a/registry/coder/modules/github-upload-public-key/README.md +++ b/registry/coder/modules/github-upload-public-key/README.md @@ -20,13 +20,13 @@ module "github-upload-public-key" { } ``` -# Requirements +## Requirements This module requires `curl` and `jq` to be installed inside your workspace. Github External Auth must be enabled in the workspace for this module to work. The Github app that is configured for external auth must have both read and write permissions to "Git SSH keys" in order to upload the public key. Additionally, a Coder admin must also have the `admin:public_key` scope added to the external auth configuration of the Coder deployment. For example: -``` +```txt CODER_EXTERNAL_AUTH_0_ID="USER_DEFINED_ID" CODER_EXTERNAL_AUTH_0_TYPE=github CODER_EXTERNAL_AUTH_0_CLIENT_ID=xxxxxx @@ -36,7 +36,7 @@ CODER_EXTERNAL_AUTH_0_SCOPES="repo,workflow,admin:public_key" Note that the default scopes if not provided are `repo,workflow`. If the module is failing to complete after updating the external auth configuration, instruct users of the module to "Unlink" and "Link" their Github account in the External Auth user settings page to get the new scopes. -# Example +## Example Using a coder github external auth with a non-default id: (default is `github`) diff --git a/registry/coder/modules/goose/README.md b/registry/coder/modules/goose/README.md index 5c1dcb85..e8b844c2 100644 --- a/registry/coder/modules/goose/README.md +++ b/registry/coder/modules/goose/README.md @@ -22,7 +22,7 @@ module "goose" { } ``` -### Prerequisites +## Prerequisites - `screen` must be installed in your workspace to run Goose in the background - You must add the [Coder Login](https://registry.coder.com/modules/coder-login) module to your template diff --git a/registry/coder/modules/slackme/README.md b/registry/coder/modules/slackme/README.md index d28862cf..bc2bf2a4 100644 --- a/registry/coder/modules/slackme/README.md +++ b/registry/coder/modules/slackme/README.md @@ -11,6 +11,16 @@ tags: [helper] Add the `slackme` command to your workspace that DMs you on Slack when your command finishes running. +```tf +module "slackme" { + count = data.coder_workspace.me.start_count + source = "registry.coder.com/modules/slackme/coder" + version = "1.0.2" + agent_id = coder_agent.example.id + auth_provider_id = "slack" +} +``` + ```bash slackme npm run long-build ``` @@ -54,16 +64,6 @@ slackme npm run long-build 3. Restart your Coder deployment. Any Template can now import the Slack Me module, and `slackme` will be available on the `$PATH`: - ```tf - module "slackme" { - count = data.coder_workspace.me.start_count - source = "registry.coder.com/modules/slackme/coder" - version = "1.0.2" - agent_id = coder_agent.example.id - auth_provider_id = "slack" - } - ``` - ## Examples ### Custom Slack Message