From 3c848d3797432125085c369fc20395e7d8cb1a10 Mon Sep 17 00:00:00 2001 From: Cian Johnston Date: Fri, 25 Oct 2024 19:05:27 +0100 Subject: [PATCH 01/14] feat(provisioner/terraform/tfparse): implement WorkspaceTagDefaultsFromFile --- provisioner/terraform/tfparse/tfextract.go | 182 ------- provisioner/terraform/tfparse/tfparse.go | 490 ++++++++++++++++++ provisioner/terraform/tfparse/tfparse_test.go | 357 +++++++++++++ 3 files changed, 847 insertions(+), 182 deletions(-) delete mode 100644 provisioner/terraform/tfparse/tfextract.go create mode 100644 provisioner/terraform/tfparse/tfparse.go create mode 100644 provisioner/terraform/tfparse/tfparse_test.go diff --git a/provisioner/terraform/tfparse/tfextract.go b/provisioner/terraform/tfparse/tfextract.go deleted file mode 100644 index ed85732e00d5e..0000000000000 --- a/provisioner/terraform/tfparse/tfextract.go +++ /dev/null @@ -1,182 +0,0 @@ -package tfparse - -import ( - "context" - "encoding/json" - "os" - "slices" - "sort" - "strings" - - "github.com/coder/coder/v2/provisionersdk/proto" - - "github.com/hashicorp/hcl/v2" - "github.com/hashicorp/hcl/v2/hclparse" - "github.com/hashicorp/hcl/v2/hclsyntax" - "github.com/hashicorp/terraform-config-inspect/tfconfig" - "golang.org/x/xerrors" - - "cdr.dev/slog" -) - -// WorkspaceTags extracts tags from coder_workspace_tags data sources defined in module. -func WorkspaceTags(ctx context.Context, logger slog.Logger, module *tfconfig.Module) (map[string]string, error) { - workspaceTags := map[string]string{} - - for _, dataResource := range module.DataResources { - if dataResource.Type != "coder_workspace_tags" { - logger.Debug(ctx, "skip resource as it is not a coder_workspace_tags", "resource_name", dataResource.Name, "resource_type", dataResource.Type) - continue - } - - var file *hcl.File - var diags hcl.Diagnostics - parser := hclparse.NewParser() - - if !strings.HasSuffix(dataResource.Pos.Filename, ".tf") { - logger.Debug(ctx, "only .tf files can be parsed", "filename", dataResource.Pos.Filename) - continue - } - // We know in which HCL file is the data resource defined. - file, diags = parser.ParseHCLFile(dataResource.Pos.Filename) - if diags.HasErrors() { - return nil, xerrors.Errorf("can't parse the resource file: %s", diags.Error()) - } - - // Parse root to find "coder_workspace_tags". - content, _, diags := file.Body.PartialContent(rootTemplateSchema) - if diags.HasErrors() { - return nil, xerrors.Errorf("can't parse the resource file: %s", diags.Error()) - } - - // Iterate over blocks to locate the exact "coder_workspace_tags" data resource. - for _, block := range content.Blocks { - if !slices.Equal(block.Labels, []string{"coder_workspace_tags", dataResource.Name}) { - continue - } - - // Parse "coder_workspace_tags" to find all key-value tags. - resContent, _, diags := block.Body.PartialContent(coderWorkspaceTagsSchema) - if diags.HasErrors() { - return nil, xerrors.Errorf(`can't parse the resource coder_workspace_tags: %s`, diags.Error()) - } - - if resContent == nil { - continue // workspace tags are not present - } - - if _, ok := resContent.Attributes["tags"]; !ok { - return nil, xerrors.Errorf(`"tags" attribute is required by coder_workspace_tags`) - } - - expr := resContent.Attributes["tags"].Expr - tagsExpr, ok := expr.(*hclsyntax.ObjectConsExpr) - if !ok { - return nil, xerrors.Errorf(`"tags" attribute is expected to be a key-value map`) - } - - // Parse key-value entries in "coder_workspace_tags" - for _, tagItem := range tagsExpr.Items { - key, err := previewFileContent(tagItem.KeyExpr.Range()) - if err != nil { - return nil, xerrors.Errorf("can't preview the resource file: %v", err) - } - key = strings.Trim(key, `"`) - - value, err := previewFileContent(tagItem.ValueExpr.Range()) - if err != nil { - return nil, xerrors.Errorf("can't preview the resource file: %v", err) - } - - logger.Info(ctx, "workspace tag found", "key", key, "value", value) - - if _, ok := workspaceTags[key]; ok { - return nil, xerrors.Errorf(`workspace tag %q is defined multiple times`, key) - } - workspaceTags[key] = value - } - } - } - return workspaceTags, nil -} - -var rootTemplateSchema = &hcl.BodySchema{ - Blocks: []hcl.BlockHeaderSchema{ - { - Type: "data", - LabelNames: []string{"type", "name"}, - }, - }, -} - -var coderWorkspaceTagsSchema = &hcl.BodySchema{ - Attributes: []hcl.AttributeSchema{ - { - Name: "tags", - }, - }, -} - -func previewFileContent(fileRange hcl.Range) (string, error) { - body, err := os.ReadFile(fileRange.Filename) - if err != nil { - return "", err - } - return string(fileRange.SliceBytes(body)), nil -} - -// LoadTerraformVariables extracts all Terraform variables from module and converts them -// to template variables. The variables are sorted by source position. -func LoadTerraformVariables(module *tfconfig.Module) ([]*proto.TemplateVariable, error) { - // Sort variables by (filename, line) to make the ordering consistent - variables := make([]*tfconfig.Variable, 0, len(module.Variables)) - for _, v := range module.Variables { - variables = append(variables, v) - } - sort.Slice(variables, func(i, j int) bool { - return compareSourcePos(variables[i].Pos, variables[j].Pos) - }) - - var templateVariables []*proto.TemplateVariable - for _, v := range variables { - mv, err := convertTerraformVariable(v) - if err != nil { - return nil, err - } - templateVariables = append(templateVariables, mv) - } - return templateVariables, nil -} - -// convertTerraformVariable converts a Terraform variable to a template-wide variable, processed by Coder. -func convertTerraformVariable(variable *tfconfig.Variable) (*proto.TemplateVariable, error) { - var defaultData string - if variable.Default != nil { - var valid bool - defaultData, valid = variable.Default.(string) - if !valid { - defaultDataRaw, err := json.Marshal(variable.Default) - if err != nil { - return nil, xerrors.Errorf("parse variable %q default: %w", variable.Name, err) - } - defaultData = string(defaultDataRaw) - } - } - - return &proto.TemplateVariable{ - Name: variable.Name, - Description: variable.Description, - Type: variable.Type, - DefaultValue: defaultData, - // variable.Required is always false. Empty string is a valid default value, so it doesn't enforce required to be "true". - Required: variable.Default == nil, - Sensitive: variable.Sensitive, - }, nil -} - -func compareSourcePos(x, y tfconfig.SourcePos) bool { - if x.Filename != y.Filename { - return x.Filename < y.Filename - } - return x.Line < y.Line -} diff --git a/provisioner/terraform/tfparse/tfparse.go b/provisioner/terraform/tfparse/tfparse.go new file mode 100644 index 0000000000000..7936fab6ba785 --- /dev/null +++ b/provisioner/terraform/tfparse/tfparse.go @@ -0,0 +1,490 @@ +package tfparse + +import ( + "archive/zip" + "bytes" + "context" + "encoding/json" + "io" + "os" + "slices" + "sort" + "strconv" + "strings" + + "github.com/coder/coder/v2/archive" + "github.com/coder/coder/v2/provisionersdk" + "github.com/coder/coder/v2/provisionersdk/proto" + + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/hclparse" + "github.com/hashicorp/hcl/v2/hclsyntax" + "github.com/hashicorp/terraform-config-inspect/tfconfig" + "github.com/zclconf/go-cty/cty" + "golang.org/x/xerrors" + + "cdr.dev/slog" +) + +// NOTE: This is duplicated from coderd but we can't import it here without +// introducing a circular dependency +const maxFileSizeBytes = 10 * (10 << 20) // 10 MB + +// WorkspaceTags extracts tags from coder_workspace_tags data sources defined in module. +// Note that this only returns the lexical values of the data source, and does not +// evaluate variables and such. To do this, see evalProvisionerTags below. +func WorkspaceTags(ctx context.Context, logger slog.Logger, module *tfconfig.Module) (map[string]string, error) { + workspaceTags := map[string]string{} + + for _, dataResource := range module.DataResources { + if dataResource.Type != "coder_workspace_tags" { + logger.Debug(ctx, "skip resource as it is not a coder_workspace_tags", "resource_name", dataResource.Name, "resource_type", dataResource.Type) + continue + } + + var file *hcl.File + var diags hcl.Diagnostics + parser := hclparse.NewParser() + + if !strings.HasSuffix(dataResource.Pos.Filename, ".tf") { + logger.Debug(ctx, "only .tf files can be parsed", "filename", dataResource.Pos.Filename) + continue + } + // We know in which HCL file is the data resource defined. + file, diags = parser.ParseHCLFile(dataResource.Pos.Filename) + if diags.HasErrors() { + return nil, xerrors.Errorf("can't parse the resource file: %s", diags.Error()) + } + + // Parse root to find "coder_workspace_tags". + content, _, diags := file.Body.PartialContent(rootTemplateSchema) + if diags.HasErrors() { + return nil, xerrors.Errorf("can't parse the resource file: %s", diags.Error()) + } + + // Iterate over blocks to locate the exact "coder_workspace_tags" data resource. + for _, block := range content.Blocks { + if !slices.Equal(block.Labels, []string{"coder_workspace_tags", dataResource.Name}) { + continue + } + + // Parse "coder_workspace_tags" to find all key-value tags. + resContent, _, diags := block.Body.PartialContent(coderWorkspaceTagsSchema) + if diags.HasErrors() { + return nil, xerrors.Errorf(`can't parse the resource coder_workspace_tags: %s`, diags.Error()) + } + + if resContent == nil { + continue // workspace tags are not present + } + + if _, ok := resContent.Attributes["tags"]; !ok { + return nil, xerrors.Errorf(`"tags" attribute is required by coder_workspace_tags`) + } + + expr := resContent.Attributes["tags"].Expr + tagsExpr, ok := expr.(*hclsyntax.ObjectConsExpr) + if !ok { + return nil, xerrors.Errorf(`"tags" attribute is expected to be a key-value map`) + } + + // Parse key-value entries in "coder_workspace_tags" + for _, tagItem := range tagsExpr.Items { + key, err := previewFileContent(tagItem.KeyExpr.Range()) + if err != nil { + return nil, xerrors.Errorf("can't preview the resource file: %v", err) + } + key = strings.Trim(key, `"`) + + value, err := previewFileContent(tagItem.ValueExpr.Range()) + if err != nil { + return nil, xerrors.Errorf("can't preview the resource file: %v", err) + } + + logger.Info(ctx, "workspace tag found", "key", key, "value", value) + + if _, ok := workspaceTags[key]; ok { + return nil, xerrors.Errorf(`workspace tag %q is defined multiple times`, key) + } + workspaceTags[key] = value + } + } + } + return workspaceTags, nil +} + +// WorkspaceTagDefaultsFromFile extracts the default values for a `coder_workspace_tags` resource from the given +// +// file. It also ensures that any uses of the `coder_workspace_tags` data source only +// reference the following data types: +// 1. Static variables +// 2. Template variables +// 3. Coder parameters +// Any other data types are not allowed, as their values cannot be known at +// the time of template import. +func WorkspaceTagDefaultsFromFile(ctx context.Context, logger slog.Logger, file []byte, mimetype string) (tags map[string]string, err error) { + module, cleanup, err := loadModuleFromFile(file, mimetype) + if err != nil { + return nil, xerrors.Errorf("load module from file: %w", err) + } + defer func() { + if err := cleanup(); err != nil { + logger.Error(ctx, "failed to clean up", slog.Error(err)) + } + }() + + // This only gets us the expressions. We need to evaluate them. + // Example: var.region -> "us" + tags, err = WorkspaceTags(ctx, logger, module) + if err != nil { + return nil, xerrors.Errorf("extract workspace tags: %w", err) + } + + // To evalute the expressions, we need to load the default values for + // variables and parameters. + varsDefaults, paramsDefaults, err := loadDefaults(ctx, logger, module) + if err != nil { + return nil, xerrors.Errorf("load defaults: %w", err) + } + + // Evaluate the tags expressions given the inputs. + // This will resolve any variables or parameters to their default + // values. + evalTags, err := EvalProvisionerTags(varsDefaults, paramsDefaults, tags) + if err != nil { + return nil, xerrors.Errorf("eval provisioner tags: %w", err) + } + + // Ensure that none of the tag values are empty after evaluation. + for k, v := range evalTags { + if len(strings.TrimSpace(v)) > 0 { + continue + } + return nil, xerrors.Errorf("provisioner tag %q evaluated to an empty value, please set a default value", k) + } + return evalTags, nil +} + +func loadModuleFromFile(file []byte, mimetype string) (module *tfconfig.Module, cleanup func() error, err error) { + // Create a temporary directory + cleanup = func() error { return nil } // no-op cleanup + tmpDir, err := os.MkdirTemp("", "tfparse-*") + if err != nil { + return nil, cleanup, xerrors.Errorf("create temp dir: %w", err) + } + cleanup = func() error { // real cleanup + return os.RemoveAll(tmpDir) + } + + // Untar the file into the temporary directory + var rdr io.Reader + switch mimetype { + case "application/x-tar": + rdr = bytes.NewReader(file) + case "application/zip": + zr, err := zip.NewReader(bytes.NewReader(file), int64(len(file))) + if err != nil { + return nil, cleanup, xerrors.Errorf("read zip file: %w", err) + } + tarBytes, err := archive.CreateTarFromZip(zr, maxFileSizeBytes) + if err != nil { + return nil, cleanup, xerrors.Errorf("convert zip to tar: %w", err) + } + rdr = bytes.NewReader(tarBytes) + default: + return nil, cleanup, xerrors.Errorf("unsupported mimetype: %s", mimetype) + } + + if err := provisionersdk.Untar(tmpDir, rdr); err != nil { + return nil, cleanup, xerrors.Errorf("untar: %w", err) + } + + module, diags := tfconfig.LoadModule(tmpDir) + if diags.HasErrors() { + return nil, cleanup, xerrors.Errorf("load module: %s", diags.Error()) + } + + return module, cleanup, nil +} + +// loadDefaults inspects the given module and returns the default values for +// all variables and coder_parameter data sources referenced there. +func loadDefaults(ctx context.Context, logger slog.Logger, module *tfconfig.Module) (varsDefaults map[string]string, paramsDefaults map[string]string, err error) { + // iterate through module.Variables to get the default values for all + // variables. + varsDefaults = make(map[string]string) + for _, v := range module.Variables { + sv, err := interfaceToString(v.Default) + if err != nil { + return nil, nil, xerrors.Errorf("can't convert variable default value to string: %v", err) + } + varsDefaults[v.Name] = strings.Trim(sv, `"`) + } + + // iterate through module.DataResources to get the default values for all + // coder_parameter data resources. + paramsDefaults = make(map[string]string) + for _, dataResource := range module.DataResources { + if dataResource.Type != "coder_parameter" { + logger.Debug(ctx, "skip resource as it is not a coder_parameter", "resource_name", dataResource.Name, "resource_type", dataResource.Type) + } + + var file *hcl.File + var diags hcl.Diagnostics + parser := hclparse.NewParser() + + if !strings.HasSuffix(dataResource.Pos.Filename, ".tf") { + logger.Debug(ctx, "only .tf files can be parsed", "filename", dataResource.Pos.Filename) + continue + } + + // We know in which HCL file is the data resource defined. + file, diags = parser.ParseHCLFile(dataResource.Pos.Filename) + if diags.HasErrors() { + return nil, nil, xerrors.Errorf("can't parse the resource file %q: %s", dataResource.Pos.Filename, diags.Error()) + } + + // Parse root to find "coder_parameter". + content, _, diags := file.Body.PartialContent(rootTemplateSchema) + if diags.HasErrors() { + return nil, nil, xerrors.Errorf("can't parse the resource file: %s", diags.Error()) + } + + // Iterate over blocks to locate the exact "coder_parameter" data resource. + for _, block := range content.Blocks { + if !slices.Equal(block.Labels, []string{"coder_parameter", dataResource.Name}) { + continue + } + + // Parse "coder_parameter" to find the default value. + resContent, _, diags := block.Body.PartialContent(coderParameterSchema) + if diags.HasErrors() { + return nil, nil, xerrors.Errorf(`can't parse the coder_parameter: %s`, diags.Error()) + } + + if _, ok := resContent.Attributes["default"]; !ok { + paramsDefaults[dataResource.Name] = "" + } else { + expr := resContent.Attributes["default"].Expr + value, err := previewFileContent(expr.Range()) + if err != nil { + return nil, nil, xerrors.Errorf("can't preview the resource file: %v", err) + } + paramsDefaults[dataResource.Name] = strings.Trim(value, `"`) + } + } + } + return varsDefaults, paramsDefaults, nil +} + +// EvalProvisionerTags evaluates the given workspaceTags based on the given +// default values for variables and coder_parameter data sources. +func EvalProvisionerTags(varsDefaults, paramsDefaults, workspaceTags map[string]string) (map[string]string, error) { + // Filter only allowed data sources for preflight check. + // This is not strictly required but provides a friendlier error. + if err := validWorkspaceTagValues(workspaceTags); err != nil { + return nil, err + } + // We only add variables and coder_parameter data sources. Anything else will be + // undefined and will raise a Terraform error. + evalCtx := buildEvalContext(varsDefaults, paramsDefaults) + tags := make(map[string]string) + for workspaceTagKey, workspaceTagValue := range workspaceTags { + expr, diags := hclsyntax.ParseExpression([]byte(workspaceTagValue), "expression.hcl", hcl.InitialPos) + if diags.HasErrors() { + return nil, xerrors.Errorf("failed to parse workspace tag key %q value %q: %s", workspaceTagKey, workspaceTagValue, diags.Error()) + } + + val, diags := expr.Value(evalCtx) + if diags.HasErrors() { + return nil, xerrors.Errorf("failed to evaluate workspace tag key %q value %q: %s", workspaceTagKey, workspaceTagValue, diags.Error()) + } + + // Do not use "val.AsString()" as it can panic + str, err := ctyValueString(val) + if err != nil { + return nil, xerrors.Errorf("failed to marshal workspace tag key %q value %q as string: %s", workspaceTagKey, workspaceTagValue, err) + } + tags[workspaceTagKey] = str + } + return tags, nil +} + +// validWorkspaceTagValues returns an error if any value of the given tags map +// evaluates to a datasource other than "coder_parameter". +// This only serves to provide a friendly error if a user attempts to reference +// a data source other than "coder_parameter" in "coder_workspace_tags". +func validWorkspaceTagValues(tags map[string]string) error { + for _, v := range tags { + parts := strings.SplitN(v, ".", 3) + if len(parts) != 3 { + continue + } + if parts[0] == "data" && parts[1] != "coder_parameter" { + return xerrors.Errorf("invalid workspace tag value %q: only the \"coder_parameter\" data source is supported here", v) + } + } + return nil +} + +func buildEvalContext(varDefaults map[string]string, paramDefaults map[string]string) *hcl.EvalContext { + varDefaultsM := map[string]cty.Value{} + for varName, varDefault := range varDefaults { + varDefaultsM[varName] = cty.MapVal(map[string]cty.Value{ + "value": cty.StringVal(varDefault), + }) + } + + paramDefaultsM := map[string]cty.Value{} + for paramName, paramDefault := range paramDefaults { + paramDefaultsM[paramName] = cty.MapVal(map[string]cty.Value{ + "value": cty.StringVal(paramDefault), + }) + } + + evalCtx := &hcl.EvalContext{ + Variables: map[string]cty.Value{}, + } + if len(varDefaultsM) != 0 { + evalCtx.Variables["var"] = cty.MapVal(varDefaultsM) + } + if len(paramDefaultsM) != 0 { + evalCtx.Variables["data"] = cty.MapVal(map[string]cty.Value{ + "coder_parameter": cty.MapVal(paramDefaultsM), + }) + } + + return evalCtx +} + +var rootTemplateSchema = &hcl.BodySchema{ + Blocks: []hcl.BlockHeaderSchema{ + { + Type: "data", + LabelNames: []string{"type", "name"}, + }, + }, +} + +var coderWorkspaceTagsSchema = &hcl.BodySchema{ + Attributes: []hcl.AttributeSchema{ + { + Name: "tags", + }, + }, +} + +var coderParameterSchema = &hcl.BodySchema{ + Attributes: []hcl.AttributeSchema{ + { + Name: "default", + }, + }, +} + +func previewFileContent(fileRange hcl.Range) (string, error) { + body, err := os.ReadFile(fileRange.Filename) + if err != nil { + return "", err + } + return string(fileRange.SliceBytes(body)), nil +} + +// LoadTerraformVariables extracts all Terraform variables from module and converts them +// to template variables. The variables are sorted by source position. +func LoadTerraformVariables(module *tfconfig.Module) ([]*proto.TemplateVariable, error) { + // Sort variables by (filename, line) to make the ordering consistent + variables := make([]*tfconfig.Variable, 0, len(module.Variables)) + for _, v := range module.Variables { + variables = append(variables, v) + } + sort.Slice(variables, func(i, j int) bool { + return compareSourcePos(variables[i].Pos, variables[j].Pos) + }) + + var templateVariables []*proto.TemplateVariable + for _, v := range variables { + mv, err := convertTerraformVariable(v) + if err != nil { + return nil, err + } + templateVariables = append(templateVariables, mv) + } + return templateVariables, nil +} + +// convertTerraformVariable converts a Terraform variable to a template-wide variable, processed by Coder. +func convertTerraformVariable(variable *tfconfig.Variable) (*proto.TemplateVariable, error) { + var defaultData string + if variable.Default != nil { + var valid bool + defaultData, valid = variable.Default.(string) + if !valid { + defaultDataRaw, err := json.Marshal(variable.Default) + if err != nil { + return nil, xerrors.Errorf("parse variable %q default: %w", variable.Name, err) + } + defaultData = string(defaultDataRaw) + } + } + + return &proto.TemplateVariable{ + Name: variable.Name, + Description: variable.Description, + Type: variable.Type, + DefaultValue: defaultData, + // variable.Required is always false. Empty string is a valid default value, so it doesn't enforce required to be "true". + Required: variable.Default == nil, + Sensitive: variable.Sensitive, + }, nil +} + +func compareSourcePos(x, y tfconfig.SourcePos) bool { + if x.Filename != y.Filename { + return x.Filename < y.Filename + } + return x.Line < y.Line +} + +func ctyValueString(val cty.Value) (string, error) { + switch val.Type() { + case cty.Bool: + if val.True() { + return "true", nil + } else { + return "false", nil + } + case cty.Number: + return val.AsBigFloat().String(), nil + case cty.String: + return val.AsString(), nil + // We may also have a map[string]interface{} with key "value". + case cty.Map(cty.String): + valval, ok := val.AsValueMap()["value"] + if !ok { + return "", xerrors.Errorf("map does not have key 'value'") + } + return ctyValueString(valval) + default: + return "", xerrors.Errorf("only primitive types are supported - bool, number, and string") + } +} + +func interfaceToString(i interface{}) (string, error) { + switch v := i.(type) { + case nil: + return "", nil + case string: + return v, nil + case []byte: + return string(v), nil + case int: + return strconv.FormatInt(int64(v), 10), nil + case float64: + return strconv.FormatFloat(v, 'f', -1, 64), nil + case bool: + return strconv.FormatBool(v), nil + default: + return "", xerrors.Errorf("unsupported type %T", v) + } +} diff --git a/provisioner/terraform/tfparse/tfparse_test.go b/provisioner/terraform/tfparse/tfparse_test.go new file mode 100644 index 0000000000000..699f01d45a1d5 --- /dev/null +++ b/provisioner/terraform/tfparse/tfparse_test.go @@ -0,0 +1,357 @@ +package tfparse_test + +import ( + "archive/tar" + "bytes" + "testing" + + "cdr.dev/slog/sloggers/slogtest" + "github.com/coder/coder/v2/archive" + "github.com/coder/coder/v2/provisioner/terraform/tfparse" + "github.com/coder/coder/v2/testutil" + + "github.com/stretchr/testify/require" +) + +func Test_WorkspaceTagDefaultsFromFile(t *testing.T) { + t.Parallel() + + for _, tc := range []struct { + name string + files map[string]string + expectTags map[string]string + expectError string + }{ + { + name: "empty", + files: map[string]string{}, + expectTags: map[string]string{}, + expectError: "", + }, + { + name: "single text file", + files: map[string]string{ + "file.txt": ` + hello world`, + }, + expectTags: map[string]string{}, + expectError: "", + }, + { + name: "main.tf with no workspace_tags", + files: map[string]string{ + "main.tf": ` + provider "foo" {} + resource "foo_bar" "baz" {} + variable "region" { + type = string + default = "us" + } + data "coder_parameter" "az" { + name = "az" + type = "string" + default = "a" + }`, + }, + expectTags: map[string]string{}, + expectError: "", + }, + { + name: "main.tf with empty workspace tags", + files: map[string]string{ + "main.tf": ` + provider "foo" {} + resource "foo_bar" "baz" {} + variable "region" { + type = string + default = "us" + } + data "coder_parameter" "az" { + name = "az" + type = "string" + default = "a" + } + data "coder_workspace_tags" "tags" {}`, + }, + expectTags: map[string]string{}, + expectError: `"tags" attribute is required by coder_workspace_tags`, + }, + { + name: "main.tf with valid workspace tags", + files: map[string]string{ + "main.tf": ` + provider "foo" {} + resource "foo_bar" "baz" {} + variable "region" { + type = string + default = "us" + } + data "coder_parameter" "az" { + name = "az" + type = "string" + default = "a" + } + data "coder_workspace_tags" "tags" { + tags = { + "platform" = "kubernetes", + "cluster" = "${"devel"}${"opers"}" + "region" = var.region + "az" = data.coder_parameter.az.value + } + }`, + }, + expectTags: map[string]string{"platform": "kubernetes", "cluster": "developers", "region": "us", "az": "a"}, + expectError: "", + }, + { + name: "main.tf with multiple valid workspace tags", + files: map[string]string{ + "main.tf": ` + provider "foo" {} + resource "foo_bar" "baz" {} + variable "region" { + type = string + default = "us" + } + variable "region2" { + type = string + default = "eu" + } + data "coder_parameter" "az" { + name = "az" + type = "string" + default = "a" + } + data "coder_parameter" "az2" { + name = "az2" + type = "string" + default = "b" + } + data "coder_workspace_tags" "tags" { + tags = { + "platform" = "kubernetes", + "cluster" = "${"devel"}${"opers"}" + "region" = var.region + "az" = data.coder_parameter.az.value + } + } + data "coder_workspace_tags" "more_tags" { + tags = { + "foo" = "bar" + } + }`, + }, + expectTags: map[string]string{"platform": "kubernetes", "cluster": "developers", "region": "us", "az": "a", "foo": "bar"}, + expectError: "", + }, + { + name: "main.tf with missing parameter default value for workspace tags", + files: map[string]string{ + "main.tf": ` + provider "foo" {} + resource "foo_bar" "baz" {} + variable "region" { + type = string + default = "us" + } + data "coder_parameter" "az" { + name = "az" + type = "string" + } + data "coder_workspace_tags" "tags" { + tags = { + "platform" = "kubernetes", + "cluster" = "${"devel"}${"opers"}" + "region" = var.region + "az" = data.coder_parameter.az.value + } + }`, + }, + expectError: `provisioner tag "az" evaluated to an empty value, please set a default value`, + }, + { + name: "main.tf with missing parameter default value outside workspace tags", + files: map[string]string{ + "main.tf": ` + provider "foo" {} + resource "foo_bar" "baz" {} + variable "region" { + type = string + default = "us" + } + data "coder_parameter" "az" { + name = "az" + type = "string" + default = "a" + } + data "coder_parameter" "notaz" { + name = "notaz" + type = "string" + } + data "coder_workspace_tags" "tags" { + tags = { + "platform" = "kubernetes", + "cluster" = "${"devel"}${"opers"}" + "region" = var.region + "az" = data.coder_parameter.az.value + } + }`, + }, + expectTags: map[string]string{"platform": "kubernetes", "cluster": "developers", "region": "us", "az": "a"}, + expectError: ``, + }, + { + name: "main.tf with missing variable default value outside workspace tags", + files: map[string]string{ + "main.tf": ` + provider "foo" {} + resource "foo_bar" "baz" {} + variable "region" { + type = string + default = "us" + } + variable "notregion" { + type = string + } + data "coder_parameter" "az" { + name = "az" + type = "string" + default = "a" + } + data "coder_workspace_tags" "tags" { + tags = { + "platform" = "kubernetes", + "cluster" = "${"devel"}${"opers"}" + "region" = var.region + "az" = data.coder_parameter.az.value + } + }`, + }, + expectTags: map[string]string{"platform": "kubernetes", "cluster": "developers", "region": "us", "az": "a"}, + expectError: ``, + }, + { + name: "main.tf with disallowed data source for workspace tags", + files: map[string]string{ + "main.tf": ` + provider "foo" {} + resource "foo_bar" "baz" { + name = "foobar" + } + variable "region" { + type = string + default = "us" + } + data "coder_parameter" "az" { + name = "az" + type = "string" + default = "a" + } + data "local_file" "hostname" { + filename = "/etc/hostname" + } + data "coder_workspace_tags" "tags" { + tags = { + "platform" = "kubernetes", + "cluster" = "${"devel"}${"opers"}" + "region" = var.region + "az" = data.coder_parameter.az.value + "hostname" = data.local_file.hostname.content + } + }`, + }, + expectTags: nil, + expectError: `invalid workspace tag value "data.local_file.hostname.content": only the "coder_parameter" data source is supported here`, + }, + { + name: "main.tf with disallowed resource for workspace tags", + files: map[string]string{ + "main.tf": ` + provider "foo" {} + resource "foo_bar" "baz" { + name = "foobar" + } + variable "region" { + type = string + default = "us" + } + data "coder_parameter" "az" { + name = "az" + type = "string" + default = "a" + } + data "coder_workspace_tags" "tags" { + tags = { + "platform" = "kubernetes", + "cluster" = "${"devel"}${"opers"}" + "region" = var.region + "az" = data.coder_parameter.az.value + "foobarbaz" = foo_bar.baz.name + } + }`, + }, + expectTags: nil, + // TODO: this error isn't great, but it has the desired effect. + expectError: `There is no variable named "foo_bar"`, + }, + } { + tc := tc + t.Run(tc.name+"/tar", func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitShort) + tar := createTar(t, tc.files) + logger := slogtest.Make(t, nil) + tags, err := tfparse.WorkspaceTagDefaultsFromFile(ctx, logger, tar, "application/x-tar") + if tc.expectError != "" { + require.NotNil(t, err) + require.Contains(t, err.Error(), tc.expectError) + } else { + require.NoError(t, err) + require.Equal(t, tc.expectTags, tags) + } + }) + t.Run(tc.name+"/zip", func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitShort) + zip := createZip(t, tc.files) + logger := slogtest.Make(t, nil) + tags, err := tfparse.WorkspaceTagDefaultsFromFile(ctx, logger, zip, "application/zip") + if tc.expectError != "" { + require.Contains(t, err.Error(), tc.expectError) + } else { + require.NoError(t, err) + require.Equal(t, tc.expectTags, tags) + } + }) + } +} + +func createTar(t *testing.T, files map[string]string) []byte { + var buffer bytes.Buffer + writer := tar.NewWriter(&buffer) + for path, content := range files { + err := writer.WriteHeader(&tar.Header{ + Name: path, + Size: int64(len(content)), + Uid: 65534, // nobody + Gid: 65534, // nogroup + Mode: 0o666, // -rw-rw-rw- + }) + require.NoError(t, err) + + _, err = writer.Write([]byte(content)) + require.NoError(t, err) + } + + err := writer.Flush() + require.NoError(t, err) + return buffer.Bytes() +} + +func createZip(t *testing.T, files map[string]string) []byte { + ta := createTar(t, files) + tr := tar.NewReader(bytes.NewReader(ta)) + za, err := archive.CreateZipFromTar(tr, int64(len(ta))) + require.NoError(t, err) + return za +} From 53722d06285acaee351ff06e543426d3b0d1e756 Mon Sep 17 00:00:00 2001 From: Cian Johnston Date: Wed, 6 Nov 2024 17:13:27 +0000 Subject: [PATCH 02/14] undo rename for now --- provisioner/terraform/tfparse/{tfparse.go => tfextract.go} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename provisioner/terraform/tfparse/{tfparse.go => tfextract.go} (100%) diff --git a/provisioner/terraform/tfparse/tfparse.go b/provisioner/terraform/tfparse/tfextract.go similarity index 100% rename from provisioner/terraform/tfparse/tfparse.go rename to provisioner/terraform/tfparse/tfextract.go From 1cda7ae63840cbf45791ec4d52b9eb53e66c68ba Mon Sep 17 00:00:00 2001 From: Cian Johnston Date: Wed, 6 Nov 2024 17:31:56 +0000 Subject: [PATCH 03/14] add benchmark for WorkspaceTagDefaultsFromFile --- provisioner/terraform/tfparse/tfparse_test.go | 71 ++++++++++++++++++- 1 file changed, 69 insertions(+), 2 deletions(-) diff --git a/provisioner/terraform/tfparse/tfparse_test.go b/provisioner/terraform/tfparse/tfparse_test.go index 699f01d45a1d5..82cf2db216e3b 100644 --- a/provisioner/terraform/tfparse/tfparse_test.go +++ b/provisioner/terraform/tfparse/tfparse_test.go @@ -3,8 +3,13 @@ package tfparse_test import ( "archive/tar" "bytes" + "context" + "io" + "log" "testing" + "cdr.dev/slog" + "cdr.dev/slog/sloggers/sloghuman" "cdr.dev/slog/sloggers/slogtest" "github.com/coder/coder/v2/archive" "github.com/coder/coder/v2/provisioner/terraform/tfparse" @@ -326,7 +331,7 @@ func Test_WorkspaceTagDefaultsFromFile(t *testing.T) { } } -func createTar(t *testing.T, files map[string]string) []byte { +func createTar(t testing.TB, files map[string]string) []byte { var buffer bytes.Buffer writer := tar.NewWriter(&buffer) for path, content := range files { @@ -348,10 +353,72 @@ func createTar(t *testing.T, files map[string]string) []byte { return buffer.Bytes() } -func createZip(t *testing.T, files map[string]string) []byte { +func createZip(t testing.TB, files map[string]string) []byte { ta := createTar(t, files) tr := tar.NewReader(bytes.NewReader(ta)) za, err := archive.CreateZipFromTar(tr, int64(len(ta))) require.NoError(t, err) return za } + +// Current benchmark results before any changes / caching. +// goos: linux +// goarch: amd64 +// pkg: github.com/coder/coder/v2/provisioner/terraform/tfparse +// cpu: AMD EPYC 7502P 32-Core Processor +// BenchmarkWorkspaceTagDefaultsFromFile/Tar-16 766 1493850 ns/op 339935 B/op 2238 allocs/op +// BenchmarkWorkspaceTagDefaultsFromFile/Zip-16 706 1633258 ns/op 389421 B/op 2296 allocs/op +// PASS +func BenchmarkWorkspaceTagDefaultsFromFile(b *testing.B) { + files := map[string]string{ + "main.tf": ` + provider "foo" {} + resource "foo_bar" "baz" {} + variable "region" { + type = string + default = "us" + } + data "coder_parameter" "az" { + name = "az" + type = "string" + default = "a" + } + data "coder_workspace_tags" "tags" { + tags = { + "platform" = "kubernetes", + "cluster" = "${"devel"}${"opers"}" + "region" = var.region + "az" = data.coder_parameter.az.value + } + }`, + } + tarFile := createTar(b, files) + zipFile := createZip(b, files) + logger := discardLogger(b) + b.ResetTimer() + b.Run("Tar", func(b *testing.B) { + ctx := context.Background() + for i := 0; i < b.N; i++ { + _, err := tfparse.WorkspaceTagDefaultsFromFile(ctx, logger, tarFile, "application/x-tar") + if err != nil { + b.Fatal(err) + } + } + }) + + b.Run("Zip", func(b *testing.B) { + ctx := context.Background() + for i := 0; i < b.N; i++ { + _, err := tfparse.WorkspaceTagDefaultsFromFile(ctx, logger, zipFile, "application/zip") + if err != nil { + b.Fatal(err) + } + } + }) +} + +func discardLogger(t testing.TB) slog.Logger { + l := slog.Make(sloghuman.Sink(io.Discard)) + log.SetOutput(slog.Stdlib(context.Background(), l, slog.LevelInfo).Writer()) + return l +} From 1d77ed56202cc9da538ee86d0cb0a87478dc9b79 Mon Sep 17 00:00:00 2001 From: Cian Johnston Date: Thu, 7 Nov 2024 17:24:08 +0000 Subject: [PATCH 04/14] reduce logging --- provisioner/terraform/tfparse/tfextract.go | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/provisioner/terraform/tfparse/tfextract.go b/provisioner/terraform/tfparse/tfextract.go index 7936fab6ba785..8c0196a2df404 100644 --- a/provisioner/terraform/tfparse/tfextract.go +++ b/provisioner/terraform/tfparse/tfextract.go @@ -33,12 +33,11 @@ const maxFileSizeBytes = 10 * (10 << 20) // 10 MB // WorkspaceTags extracts tags from coder_workspace_tags data sources defined in module. // Note that this only returns the lexical values of the data source, and does not // evaluate variables and such. To do this, see evalProvisionerTags below. -func WorkspaceTags(ctx context.Context, logger slog.Logger, module *tfconfig.Module) (map[string]string, error) { +func WorkspaceTags(ctx context.Context, module *tfconfig.Module) (map[string]string, error) { workspaceTags := map[string]string{} for _, dataResource := range module.DataResources { if dataResource.Type != "coder_workspace_tags" { - logger.Debug(ctx, "skip resource as it is not a coder_workspace_tags", "resource_name", dataResource.Name, "resource_type", dataResource.Type) continue } @@ -47,7 +46,6 @@ func WorkspaceTags(ctx context.Context, logger slog.Logger, module *tfconfig.Mod parser := hclparse.NewParser() if !strings.HasSuffix(dataResource.Pos.Filename, ".tf") { - logger.Debug(ctx, "only .tf files can be parsed", "filename", dataResource.Pos.Filename) continue } // We know in which HCL file is the data resource defined. @@ -101,8 +99,6 @@ func WorkspaceTags(ctx context.Context, logger slog.Logger, module *tfconfig.Mod return nil, xerrors.Errorf("can't preview the resource file: %v", err) } - logger.Info(ctx, "workspace tag found", "key", key, "value", value) - if _, ok := workspaceTags[key]; ok { return nil, xerrors.Errorf(`workspace tag %q is defined multiple times`, key) } @@ -135,14 +131,14 @@ func WorkspaceTagDefaultsFromFile(ctx context.Context, logger slog.Logger, file // This only gets us the expressions. We need to evaluate them. // Example: var.region -> "us" - tags, err = WorkspaceTags(ctx, logger, module) + tags, err = WorkspaceTags(ctx, module) if err != nil { return nil, xerrors.Errorf("extract workspace tags: %w", err) } - // To evalute the expressions, we need to load the default values for + // To evaluate the expressions, we need to load the default values for // variables and parameters. - varsDefaults, paramsDefaults, err := loadDefaults(ctx, logger, module) + varsDefaults, paramsDefaults, err := loadDefaults(module) if err != nil { return nil, xerrors.Errorf("load defaults: %w", err) } @@ -162,6 +158,8 @@ func WorkspaceTagDefaultsFromFile(ctx context.Context, logger slog.Logger, file } return nil, xerrors.Errorf("provisioner tag %q evaluated to an empty value, please set a default value", k) } + logger.Info(ctx, "found workspace tags", slog.F("tags", evalTags)) + return evalTags, nil } @@ -209,7 +207,7 @@ func loadModuleFromFile(file []byte, mimetype string) (module *tfconfig.Module, // loadDefaults inspects the given module and returns the default values for // all variables and coder_parameter data sources referenced there. -func loadDefaults(ctx context.Context, logger slog.Logger, module *tfconfig.Module) (varsDefaults map[string]string, paramsDefaults map[string]string, err error) { +func loadDefaults(module *tfconfig.Module) (varsDefaults map[string]string, paramsDefaults map[string]string, err error) { // iterate through module.Variables to get the default values for all // variables. varsDefaults = make(map[string]string) @@ -226,7 +224,7 @@ func loadDefaults(ctx context.Context, logger slog.Logger, module *tfconfig.Modu paramsDefaults = make(map[string]string) for _, dataResource := range module.DataResources { if dataResource.Type != "coder_parameter" { - logger.Debug(ctx, "skip resource as it is not a coder_parameter", "resource_name", dataResource.Name, "resource_type", dataResource.Type) + continue } var file *hcl.File @@ -234,7 +232,6 @@ func loadDefaults(ctx context.Context, logger slog.Logger, module *tfconfig.Modu parser := hclparse.NewParser() if !strings.HasSuffix(dataResource.Pos.Filename, ".tf") { - logger.Debug(ctx, "only .tf files can be parsed", "filename", dataResource.Pos.Filename) continue } From cb4be2620aa8d548b471bb0f3e62925cf1fbb972 Mon Sep 17 00:00:00 2001 From: Cian Johnston Date: Thu, 7 Nov 2024 17:24:36 +0000 Subject: [PATCH 05/14] unused-param --- provisioner/terraform/tfparse/tfparse_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/provisioner/terraform/tfparse/tfparse_test.go b/provisioner/terraform/tfparse/tfparse_test.go index 82cf2db216e3b..eadc1f62eaa78 100644 --- a/provisioner/terraform/tfparse/tfparse_test.go +++ b/provisioner/terraform/tfparse/tfparse_test.go @@ -417,7 +417,7 @@ func BenchmarkWorkspaceTagDefaultsFromFile(b *testing.B) { }) } -func discardLogger(t testing.TB) slog.Logger { +func discardLogger(_ testing.TB) slog.Logger { l := slog.Make(sloghuman.Sink(io.Discard)) log.SetOutput(slog.Stdlib(context.Background(), l, slog.LevelInfo).Writer()) return l From 37ec3e0957f4cab2bfc5fc209745db6653d4a00a Mon Sep 17 00:00:00 2001 From: Cian Johnston Date: Fri, 8 Nov 2024 10:05:21 +0000 Subject: [PATCH 06/14] RED: add test case for function calls --- provisioner/terraform/tfparse/tfparse_test.go | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/provisioner/terraform/tfparse/tfparse_test.go b/provisioner/terraform/tfparse/tfparse_test.go index eadc1f62eaa78..159cb5cabe701 100644 --- a/provisioner/terraform/tfparse/tfparse_test.go +++ b/provisioner/terraform/tfparse/tfparse_test.go @@ -299,6 +299,35 @@ func Test_WorkspaceTagDefaultsFromFile(t *testing.T) { // TODO: this error isn't great, but it has the desired effect. expectError: `There is no variable named "foo_bar"`, }, + { + name: "main.tf with functions in workspace tags", + files: map[string]string{ + "main.tf": ` + provider "foo" {} + resource "foo_bar" "baz" { + name = "foobar" + } + variable "region" { + type = string + default = "region.us" + } + data "coder_parameter" "az" { + name = "az" + type = "string" + default = "az.a" + } + data "coder_workspace_tags" "tags" { + tags = { + "platform" = "kubernetes", + "cluster" = "${"devel"}${"opers"}" + "region" = try(split(".", var.region)[1], "placeholder") + "az" = try(split(".", data.coder_parameter.az.value)[1], "placeholder") + } + }`, + }, + expectTags: map[string]string{"platform": "kubernetes", "cluster": "developers", "region": "us", "az": "a"}, + expectError: ``, + }, } { tc := tc t.Run(tc.name+"/tar", func(t *testing.T) { From ad2d3a926c442a6847838c19d5b99fa428535288 Mon Sep 17 00:00:00 2001 From: Cian Johnston Date: Fri, 8 Nov 2024 10:44:00 +0000 Subject: [PATCH 07/14] clarify lack of function support --- provisioner/terraform/tfparse/tfextract.go | 5 +++++ provisioner/terraform/tfparse/tfparse_test.go | 4 ++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/provisioner/terraform/tfparse/tfextract.go b/provisioner/terraform/tfparse/tfextract.go index 8c0196a2df404..20faaca27c01b 100644 --- a/provisioner/terraform/tfparse/tfextract.go +++ b/provisioner/terraform/tfparse/tfextract.go @@ -341,6 +341,11 @@ func buildEvalContext(varDefaults map[string]string, paramDefaults map[string]st evalCtx := &hcl.EvalContext{ Variables: map[string]cty.Value{}, + // NOTE: we do not currently support function execution here. + // The default function map for Terraform is not exposed, so we would essentially + // have to re-implement or copy the entire map or a subset thereof. + // ref: https://github.com/hashicorp/terraform/blob/e044e569c5bc81f82e9a4d7891f37c6fbb0a8a10/internal/lang/functions.go#L54 + Functions: nil, } if len(varDefaultsM) != 0 { evalCtx.Variables["var"] = cty.MapVal(varDefaultsM) diff --git a/provisioner/terraform/tfparse/tfparse_test.go b/provisioner/terraform/tfparse/tfparse_test.go index 159cb5cabe701..09197b59694ca 100644 --- a/provisioner/terraform/tfparse/tfparse_test.go +++ b/provisioner/terraform/tfparse/tfparse_test.go @@ -325,8 +325,8 @@ func Test_WorkspaceTagDefaultsFromFile(t *testing.T) { } }`, }, - expectTags: map[string]string{"platform": "kubernetes", "cluster": "developers", "region": "us", "az": "a"}, - expectError: ``, + expectTags: nil, + expectError: `Function calls not allowed; Functions may not be called here.`, }, } { tc := tc From 14a5901edf81ca22787445d2bb082968b7c2bf72 Mon Sep 17 00:00:00 2001 From: Cian Johnston Date: Mon, 11 Nov 2024 13:19:52 +0000 Subject: [PATCH 08/14] use caching of hcl.NewParser --- provisioner/terraform/parse.go | 2 +- provisioner/terraform/tfparse/tfextract.go | 87 ++++++++++++------- provisioner/terraform/tfparse/tfparse_test.go | 6 +- 3 files changed, 60 insertions(+), 35 deletions(-) diff --git a/provisioner/terraform/parse.go b/provisioner/terraform/parse.go index 86dcec2e4cfeb..c56cb91fa548b 100644 --- a/provisioner/terraform/parse.go +++ b/provisioner/terraform/parse.go @@ -26,7 +26,7 @@ func (s *server) Parse(sess *provisionersdk.Session, _ *proto.ParseRequest, _ <- return provisionersdk.ParseErrorf("load module: %s", formatDiagnostics(sess.WorkDirectory, diags)) } - workspaceTags, err := tfparse.WorkspaceTags(ctx, s.logger, module) + workspaceTags, err := tfparse.WorkspaceTags(module) if err != nil { return provisionersdk.ParseErrorf("can't load workspace tags: %v", err) } diff --git a/provisioner/terraform/tfparse/tfextract.go b/provisioner/terraform/tfparse/tfextract.go index 20faaca27c01b..dfda5eb208db7 100644 --- a/provisioner/terraform/tfparse/tfextract.go +++ b/provisioner/terraform/tfparse/tfextract.go @@ -21,6 +21,7 @@ import ( "github.com/hashicorp/hcl/v2/hclsyntax" "github.com/hashicorp/terraform-config-inspect/tfconfig" "github.com/zclconf/go-cty/cty" + "golang.org/x/exp/maps" "golang.org/x/xerrors" "cdr.dev/slog" @@ -30,11 +31,25 @@ import ( // introducing a circular dependency const maxFileSizeBytes = 10 * (10 << 20) // 10 MB +// hclparse.Parser by default keeps track of all files it has ever parsed +// by filename. By re-using the same parser instance we can avoid repeating +// previous work. +// NOTE: we can only do this if we _just_ use ParseHCLFile(). +// The ParseHCL() method allows specifying the filename directly, which allows +// easily getting previously cached results. Whenever we parse HCL files from disk +// we do so in a unique file path. +// See: provisionersdk/session.go#L51 +var hclFileParser ParseHCLFiler = hclparse.NewParser() + +type ParseHCLFiler interface { + ParseHCLFile(filename string) (*hcl.File, hcl.Diagnostics) +} + // WorkspaceTags extracts tags from coder_workspace_tags data sources defined in module. // Note that this only returns the lexical values of the data source, and does not // evaluate variables and such. To do this, see evalProvisionerTags below. -func WorkspaceTags(ctx context.Context, module *tfconfig.Module) (map[string]string, error) { - workspaceTags := map[string]string{} +func WorkspaceTags(module *tfconfig.Module) (map[string]string, error) { + tags := map[string]string{} for _, dataResource := range module.DataResources { if dataResource.Type != "coder_workspace_tags" { @@ -43,13 +58,12 @@ func WorkspaceTags(ctx context.Context, module *tfconfig.Module) (map[string]str var file *hcl.File var diags hcl.Diagnostics - parser := hclparse.NewParser() if !strings.HasSuffix(dataResource.Pos.Filename, ".tf") { continue } // We know in which HCL file is the data resource defined. - file, diags = parser.ParseHCLFile(dataResource.Pos.Filename) + file, diags = hclFileParser.ParseHCLFile(dataResource.Pos.Filename) if diags.HasErrors() { return nil, xerrors.Errorf("can't parse the resource file: %s", diags.Error()) } @@ -99,14 +113,14 @@ func WorkspaceTags(ctx context.Context, module *tfconfig.Module) (map[string]str return nil, xerrors.Errorf("can't preview the resource file: %v", err) } - if _, ok := workspaceTags[key]; ok { + if _, ok := tags[key]; ok { return nil, xerrors.Errorf(`workspace tag %q is defined multiple times`, key) } - workspaceTags[key] = value + tags[key] = value } } } - return workspaceTags, nil + return tags, nil } // WorkspaceTagDefaultsFromFile extracts the default values for a `coder_workspace_tags` resource from the given @@ -131,16 +145,20 @@ func WorkspaceTagDefaultsFromFile(ctx context.Context, logger slog.Logger, file // This only gets us the expressions. We need to evaluate them. // Example: var.region -> "us" - tags, err = WorkspaceTags(ctx, module) + tags, err = WorkspaceTags(module) if err != nil { return nil, xerrors.Errorf("extract workspace tags: %w", err) } // To evaluate the expressions, we need to load the default values for // variables and parameters. - varsDefaults, paramsDefaults, err := loadDefaults(module) + varsDefaults, err := loadVarsDefaults(maps.Values(module.Variables)) + if err != nil { + return nil, xerrors.Errorf("load variable defaults: %w", err) + } + paramsDefaults, err := loadParamsDefaults(maps.Values(module.DataResources)) if err != nil { - return nil, xerrors.Errorf("load defaults: %w", err) + return nil, xerrors.Errorf("load parameter defaults: %w", err) } // Evaluate the tags expressions given the inputs. @@ -205,46 +223,53 @@ func loadModuleFromFile(file []byte, mimetype string) (module *tfconfig.Module, return module, cleanup, nil } -// loadDefaults inspects the given module and returns the default values for -// all variables and coder_parameter data sources referenced there. -func loadDefaults(module *tfconfig.Module) (varsDefaults map[string]string, paramsDefaults map[string]string, err error) { - // iterate through module.Variables to get the default values for all +// loadVarsDefaults returns the default values for all variables passed to it. +func loadVarsDefaults(variables []*tfconfig.Variable) (map[string]string, error) { + // iterate through vars to get the default values for all // variables. - varsDefaults = make(map[string]string) - for _, v := range module.Variables { + m := make(map[string]string) + for _, v := range variables { + if v == nil { + continue + } sv, err := interfaceToString(v.Default) if err != nil { - return nil, nil, xerrors.Errorf("can't convert variable default value to string: %v", err) + return nil, xerrors.Errorf("can't convert variable default value to string: %v", err) } - varsDefaults[v.Name] = strings.Trim(sv, `"`) + m[v.Name] = strings.Trim(sv, `"`) } + return m, nil +} + +// loadParamsDefaults returns the default values of all coder_parameter data sources data sources provided. +func loadParamsDefaults(dataSources []*tfconfig.Resource) (map[string]string, error) { + defaultsM := make(map[string]string) + for _, dataResource := range dataSources { + if dataResource == nil { + continue + } - // iterate through module.DataResources to get the default values for all - // coder_parameter data resources. - paramsDefaults = make(map[string]string) - for _, dataResource := range module.DataResources { if dataResource.Type != "coder_parameter" { continue } var file *hcl.File var diags hcl.Diagnostics - parser := hclparse.NewParser() if !strings.HasSuffix(dataResource.Pos.Filename, ".tf") { continue } // We know in which HCL file is the data resource defined. - file, diags = parser.ParseHCLFile(dataResource.Pos.Filename) + file, diags = hclFileParser.ParseHCLFile(dataResource.Pos.Filename) if diags.HasErrors() { - return nil, nil, xerrors.Errorf("can't parse the resource file %q: %s", dataResource.Pos.Filename, diags.Error()) + return nil, xerrors.Errorf("can't parse the resource file %q: %s", dataResource.Pos.Filename, diags.Error()) } // Parse root to find "coder_parameter". content, _, diags := file.Body.PartialContent(rootTemplateSchema) if diags.HasErrors() { - return nil, nil, xerrors.Errorf("can't parse the resource file: %s", diags.Error()) + return nil, xerrors.Errorf("can't parse the resource file: %s", diags.Error()) } // Iterate over blocks to locate the exact "coder_parameter" data resource. @@ -256,22 +281,22 @@ func loadDefaults(module *tfconfig.Module) (varsDefaults map[string]string, para // Parse "coder_parameter" to find the default value. resContent, _, diags := block.Body.PartialContent(coderParameterSchema) if diags.HasErrors() { - return nil, nil, xerrors.Errorf(`can't parse the coder_parameter: %s`, diags.Error()) + return nil, xerrors.Errorf(`can't parse the coder_parameter: %s`, diags.Error()) } if _, ok := resContent.Attributes["default"]; !ok { - paramsDefaults[dataResource.Name] = "" + defaultsM[dataResource.Name] = "" } else { expr := resContent.Attributes["default"].Expr value, err := previewFileContent(expr.Range()) if err != nil { - return nil, nil, xerrors.Errorf("can't preview the resource file: %v", err) + return nil, xerrors.Errorf("can't preview the resource file: %v", err) } - paramsDefaults[dataResource.Name] = strings.Trim(value, `"`) + defaultsM[dataResource.Name] = strings.Trim(value, `"`) } } } - return varsDefaults, paramsDefaults, nil + return defaultsM, nil } // EvalProvisionerTags evaluates the given workspaceTags based on the given diff --git a/provisioner/terraform/tfparse/tfparse_test.go b/provisioner/terraform/tfparse/tfparse_test.go index 09197b59694ca..100f920b5a1ea 100644 --- a/provisioner/terraform/tfparse/tfparse_test.go +++ b/provisioner/terraform/tfparse/tfparse_test.go @@ -390,13 +390,13 @@ func createZip(t testing.TB, files map[string]string) []byte { return za } -// Current benchmark results before any changes / caching. +// Last run results: // goos: linux // goarch: amd64 // pkg: github.com/coder/coder/v2/provisioner/terraform/tfparse // cpu: AMD EPYC 7502P 32-Core Processor -// BenchmarkWorkspaceTagDefaultsFromFile/Tar-16 766 1493850 ns/op 339935 B/op 2238 allocs/op -// BenchmarkWorkspaceTagDefaultsFromFile/Zip-16 706 1633258 ns/op 389421 B/op 2296 allocs/op +// BenchmarkWorkspaceTagDefaultsFromFile/Tar-16 1147 1073487 ns/op 200266 B/op 1309 allocs/op +// BenchmarkWorkspaceTagDefaultsFromFile/Zip-16 991 1030536 ns/op 248063 B/op 1364 allocs/op // PASS func BenchmarkWorkspaceTagDefaultsFromFile(b *testing.B) { files := map[string]string{ From bc69d1ec1109c54b58b68ef1cad43dbc4d3725b8 Mon Sep 17 00:00:00 2001 From: Cian Johnston Date: Mon, 11 Nov 2024 13:31:36 +0000 Subject: [PATCH 09/14] avoid too much caching, allow caller to bring their own parser --- provisioner/terraform/parse.go | 2 +- provisioner/terraform/tfparse/tfextract.go | 34 +++++++++---------- provisioner/terraform/tfparse/tfparse_test.go | 4 +-- 3 files changed, 20 insertions(+), 20 deletions(-) diff --git a/provisioner/terraform/parse.go b/provisioner/terraform/parse.go index c56cb91fa548b..51b070423fd6f 100644 --- a/provisioner/terraform/parse.go +++ b/provisioner/terraform/parse.go @@ -26,7 +26,7 @@ func (s *server) Parse(sess *provisionersdk.Session, _ *proto.ParseRequest, _ <- return provisionersdk.ParseErrorf("load module: %s", formatDiagnostics(sess.WorkDirectory, diags)) } - workspaceTags, err := tfparse.WorkspaceTags(module) + workspaceTags, err := tfparse.WorkspaceTags(nil, module) if err != nil { return provisionersdk.ParseErrorf("can't load workspace tags: %v", err) } diff --git a/provisioner/terraform/tfparse/tfextract.go b/provisioner/terraform/tfparse/tfextract.go index dfda5eb208db7..abedf9d112fe6 100644 --- a/provisioner/terraform/tfparse/tfextract.go +++ b/provisioner/terraform/tfparse/tfextract.go @@ -31,24 +31,22 @@ import ( // introducing a circular dependency const maxFileSizeBytes = 10 * (10 << 20) // 10 MB -// hclparse.Parser by default keeps track of all files it has ever parsed -// by filename. By re-using the same parser instance we can avoid repeating -// previous work. -// NOTE: we can only do this if we _just_ use ParseHCLFile(). -// The ParseHCL() method allows specifying the filename directly, which allows -// easily getting previously cached results. Whenever we parse HCL files from disk -// we do so in a unique file path. -// See: provisionersdk/session.go#L51 -var hclFileParser ParseHCLFiler = hclparse.NewParser() - -type ParseHCLFiler interface { +// ParseHCLFile() is only method of hclparse.Parser we use in this package. +// hclparse.Parser will by default cache all previous files it has ever +// parsed. While leveraging this caching behavior is nice, we _never_ want +// to end up in a situation where we end up returning stale values. +type Parser interface { ParseHCLFile(filename string) (*hcl.File, hcl.Diagnostics) } // WorkspaceTags extracts tags from coder_workspace_tags data sources defined in module. // Note that this only returns the lexical values of the data source, and does not // evaluate variables and such. To do this, see evalProvisionerTags below. -func WorkspaceTags(module *tfconfig.Module) (map[string]string, error) { +// If the provided Parser is nil, a new instance of hclparse.Parser will be used instead. +func WorkspaceTags(parser Parser, module *tfconfig.Module) (map[string]string, error) { + if parser == nil { + parser = hclparse.NewParser() + } tags := map[string]string{} for _, dataResource := range module.DataResources { @@ -63,7 +61,7 @@ func WorkspaceTags(module *tfconfig.Module) (map[string]string, error) { continue } // We know in which HCL file is the data resource defined. - file, diags = hclFileParser.ParseHCLFile(dataResource.Pos.Filename) + file, diags = parser.ParseHCLFile(dataResource.Pos.Filename) if diags.HasErrors() { return nil, xerrors.Errorf("can't parse the resource file: %s", diags.Error()) } @@ -143,9 +141,11 @@ func WorkspaceTagDefaultsFromFile(ctx context.Context, logger slog.Logger, file } }() + parser := hclparse.NewParser() + // This only gets us the expressions. We need to evaluate them. // Example: var.region -> "us" - tags, err = WorkspaceTags(module) + tags, err = WorkspaceTags(parser, module) if err != nil { return nil, xerrors.Errorf("extract workspace tags: %w", err) } @@ -156,7 +156,7 @@ func WorkspaceTagDefaultsFromFile(ctx context.Context, logger slog.Logger, file if err != nil { return nil, xerrors.Errorf("load variable defaults: %w", err) } - paramsDefaults, err := loadParamsDefaults(maps.Values(module.DataResources)) + paramsDefaults, err := loadParamsDefaults(parser, maps.Values(module.DataResources)) if err != nil { return nil, xerrors.Errorf("load parameter defaults: %w", err) } @@ -242,7 +242,7 @@ func loadVarsDefaults(variables []*tfconfig.Variable) (map[string]string, error) } // loadParamsDefaults returns the default values of all coder_parameter data sources data sources provided. -func loadParamsDefaults(dataSources []*tfconfig.Resource) (map[string]string, error) { +func loadParamsDefaults(parser Parser, dataSources []*tfconfig.Resource) (map[string]string, error) { defaultsM := make(map[string]string) for _, dataResource := range dataSources { if dataResource == nil { @@ -261,7 +261,7 @@ func loadParamsDefaults(dataSources []*tfconfig.Resource) (map[string]string, er } // We know in which HCL file is the data resource defined. - file, diags = hclFileParser.ParseHCLFile(dataResource.Pos.Filename) + file, diags = parser.ParseHCLFile(dataResource.Pos.Filename) if diags.HasErrors() { return nil, xerrors.Errorf("can't parse the resource file %q: %s", dataResource.Pos.Filename, diags.Error()) } diff --git a/provisioner/terraform/tfparse/tfparse_test.go b/provisioner/terraform/tfparse/tfparse_test.go index 100f920b5a1ea..c786f0ff8e1e3 100644 --- a/provisioner/terraform/tfparse/tfparse_test.go +++ b/provisioner/terraform/tfparse/tfparse_test.go @@ -395,8 +395,8 @@ func createZip(t testing.TB, files map[string]string) []byte { // goarch: amd64 // pkg: github.com/coder/coder/v2/provisioner/terraform/tfparse // cpu: AMD EPYC 7502P 32-Core Processor -// BenchmarkWorkspaceTagDefaultsFromFile/Tar-16 1147 1073487 ns/op 200266 B/op 1309 allocs/op -// BenchmarkWorkspaceTagDefaultsFromFile/Zip-16 991 1030536 ns/op 248063 B/op 1364 allocs/op +// BenchmarkWorkspaceTagDefaultsFromFile/Tar-16 1077 1081951 ns/op 200754 B/op 1312 allocs/op +// BenchmarkWorkspaceTagDefaultsFromFile/Zip-16 984 1187299 ns/op 249574 B/op 1369 allocs/op // PASS func BenchmarkWorkspaceTagDefaultsFromFile(b *testing.B) { files := map[string]string{ From 3db3e6bfcf4b8b4b44273e8420a090ce5733c4e3 Mon Sep 17 00:00:00 2001 From: Cian Johnston Date: Mon, 11 Nov 2024 13:57:18 +0000 Subject: [PATCH 10/14] bring back logging --- provisioner/terraform/parse.go | 2 +- provisioner/terraform/tfparse/tfextract.go | 22 ++++++----- provisioner/terraform/tfparse/tfparse_test.go | 38 +++++++++++++++++-- 3 files changed, 48 insertions(+), 14 deletions(-) diff --git a/provisioner/terraform/parse.go b/provisioner/terraform/parse.go index 51b070423fd6f..840be1a2728d6 100644 --- a/provisioner/terraform/parse.go +++ b/provisioner/terraform/parse.go @@ -26,7 +26,7 @@ func (s *server) Parse(sess *provisionersdk.Session, _ *proto.ParseRequest, _ <- return provisionersdk.ParseErrorf("load module: %s", formatDiagnostics(sess.WorkDirectory, diags)) } - workspaceTags, err := tfparse.WorkspaceTags(nil, module) + workspaceTags, err := tfparse.WorkspaceTags(ctx, s.logger, nil, module) if err != nil { return provisionersdk.ParseErrorf("can't load workspace tags: %v", err) } diff --git a/provisioner/terraform/tfparse/tfextract.go b/provisioner/terraform/tfparse/tfextract.go index abedf9d112fe6..937fdd954b8c7 100644 --- a/provisioner/terraform/tfparse/tfextract.go +++ b/provisioner/terraform/tfparse/tfextract.go @@ -43,14 +43,15 @@ type Parser interface { // Note that this only returns the lexical values of the data source, and does not // evaluate variables and such. To do this, see evalProvisionerTags below. // If the provided Parser is nil, a new instance of hclparse.Parser will be used instead. -func WorkspaceTags(parser Parser, module *tfconfig.Module) (map[string]string, error) { +func WorkspaceTags(ctx context.Context, logger slog.Logger, parser Parser, module *tfconfig.Module) (map[string]string, error) { if parser == nil { parser = hclparse.NewParser() } tags := map[string]string{} - + var skipped []string for _, dataResource := range module.DataResources { if dataResource.Type != "coder_workspace_tags" { + skipped = append(skipped, strings.Join([]string{"data", dataResource.Type, dataResource.Name}, ".")) continue } @@ -118,6 +119,7 @@ func WorkspaceTags(parser Parser, module *tfconfig.Module) (map[string]string, e } } } + logger.Debug(ctx, "found workspace tags", slog.F("tags", maps.Keys(tags)), slog.F("skipped", skipped)) return tags, nil } @@ -145,18 +147,18 @@ func WorkspaceTagDefaultsFromFile(ctx context.Context, logger slog.Logger, file // This only gets us the expressions. We need to evaluate them. // Example: var.region -> "us" - tags, err = WorkspaceTags(parser, module) + tags, err = WorkspaceTags(ctx, logger, parser, module) if err != nil { return nil, xerrors.Errorf("extract workspace tags: %w", err) } // To evaluate the expressions, we need to load the default values for // variables and parameters. - varsDefaults, err := loadVarsDefaults(maps.Values(module.Variables)) + varsDefaults, err := loadVarsDefaults(ctx, logger, maps.Values(module.Variables)) if err != nil { return nil, xerrors.Errorf("load variable defaults: %w", err) } - paramsDefaults, err := loadParamsDefaults(parser, maps.Values(module.DataResources)) + paramsDefaults, err := loadParamsDefaults(ctx, logger, parser, maps.Values(module.DataResources)) if err != nil { return nil, xerrors.Errorf("load parameter defaults: %w", err) } @@ -176,8 +178,6 @@ func WorkspaceTagDefaultsFromFile(ctx context.Context, logger slog.Logger, file } return nil, xerrors.Errorf("provisioner tag %q evaluated to an empty value, please set a default value", k) } - logger.Info(ctx, "found workspace tags", slog.F("tags", evalTags)) - return evalTags, nil } @@ -224,7 +224,7 @@ func loadModuleFromFile(file []byte, mimetype string) (module *tfconfig.Module, } // loadVarsDefaults returns the default values for all variables passed to it. -func loadVarsDefaults(variables []*tfconfig.Variable) (map[string]string, error) { +func loadVarsDefaults(ctx context.Context, logger slog.Logger, variables []*tfconfig.Variable) (map[string]string, error) { // iterate through vars to get the default values for all // variables. m := make(map[string]string) @@ -238,18 +238,21 @@ func loadVarsDefaults(variables []*tfconfig.Variable) (map[string]string, error) } m[v.Name] = strings.Trim(sv, `"`) } + logger.Debug(ctx, "found default values for variables", slog.F("defaults", m)) return m, nil } // loadParamsDefaults returns the default values of all coder_parameter data sources data sources provided. -func loadParamsDefaults(parser Parser, dataSources []*tfconfig.Resource) (map[string]string, error) { +func loadParamsDefaults(ctx context.Context, logger slog.Logger, parser Parser, dataSources []*tfconfig.Resource) (map[string]string, error) { defaultsM := make(map[string]string) + var skipped []string for _, dataResource := range dataSources { if dataResource == nil { continue } if dataResource.Type != "coder_parameter" { + skipped = append(skipped, strings.Join([]string{"data", dataResource.Type, dataResource.Name}, ".")) continue } @@ -296,6 +299,7 @@ func loadParamsDefaults(parser Parser, dataSources []*tfconfig.Resource) (map[st } } } + logger.Debug(ctx, "found default values for parameters", slog.F("defaults", defaultsM), slog.F("skipped", skipped)) return defaultsM, nil } diff --git a/provisioner/terraform/tfparse/tfparse_test.go b/provisioner/terraform/tfparse/tfparse_test.go index c786f0ff8e1e3..d4ad8020e0b66 100644 --- a/provisioner/terraform/tfparse/tfparse_test.go +++ b/provisioner/terraform/tfparse/tfparse_test.go @@ -52,6 +52,9 @@ func Test_WorkspaceTagDefaultsFromFile(t *testing.T) { type = string default = "us" } + data "base" "ours" { + all = true + } data "coder_parameter" "az" { name = "az" type = "string" @@ -71,6 +74,9 @@ func Test_WorkspaceTagDefaultsFromFile(t *testing.T) { type = string default = "us" } + data "base" "ours" { + all = true + } data "coder_parameter" "az" { name = "az" type = "string" @@ -91,6 +97,9 @@ func Test_WorkspaceTagDefaultsFromFile(t *testing.T) { type = string default = "us" } + data "base" "ours" { + all = true + } data "coder_parameter" "az" { name = "az" type = "string" @@ -122,6 +131,9 @@ func Test_WorkspaceTagDefaultsFromFile(t *testing.T) { type = string default = "eu" } + data "base" "ours" { + all = true + } data "coder_parameter" "az" { name = "az" type = "string" @@ -159,6 +171,9 @@ func Test_WorkspaceTagDefaultsFromFile(t *testing.T) { type = string default = "us" } + data "base" "ours" { + all = true + } data "coder_parameter" "az" { name = "az" type = "string" @@ -184,6 +199,9 @@ func Test_WorkspaceTagDefaultsFromFile(t *testing.T) { type = string default = "us" } + data "base" "ours" { + all = true + } data "coder_parameter" "az" { name = "az" type = "string" @@ -218,6 +236,9 @@ func Test_WorkspaceTagDefaultsFromFile(t *testing.T) { variable "notregion" { type = string } + data "base" "ours" { + all = true + } data "coder_parameter" "az" { name = "az" type = "string" @@ -247,6 +268,9 @@ func Test_WorkspaceTagDefaultsFromFile(t *testing.T) { type = string default = "us" } + data "base" "ours" { + all = true + } data "coder_parameter" "az" { name = "az" type = "string" @@ -280,6 +304,9 @@ func Test_WorkspaceTagDefaultsFromFile(t *testing.T) { type = string default = "us" } + data "base" "ours" { + all = true + } data "coder_parameter" "az" { name = "az" type = "string" @@ -311,6 +338,9 @@ func Test_WorkspaceTagDefaultsFromFile(t *testing.T) { type = string default = "region.us" } + data "base" "ours" { + all = true + } data "coder_parameter" "az" { name = "az" type = "string" @@ -334,7 +364,7 @@ func Test_WorkspaceTagDefaultsFromFile(t *testing.T) { t.Parallel() ctx := testutil.Context(t, testutil.WaitShort) tar := createTar(t, tc.files) - logger := slogtest.Make(t, nil) + logger := slogtest.Make(t, nil).Leveled(slog.LevelDebug) tags, err := tfparse.WorkspaceTagDefaultsFromFile(ctx, logger, tar, "application/x-tar") if tc.expectError != "" { require.NotNil(t, err) @@ -348,7 +378,7 @@ func Test_WorkspaceTagDefaultsFromFile(t *testing.T) { t.Parallel() ctx := testutil.Context(t, testutil.WaitShort) zip := createZip(t, tc.files) - logger := slogtest.Make(t, nil) + logger := slogtest.Make(t, nil).Leveled(slog.LevelDebug) tags, err := tfparse.WorkspaceTagDefaultsFromFile(ctx, logger, zip, "application/zip") if tc.expectError != "" { require.Contains(t, err.Error(), tc.expectError) @@ -395,8 +425,8 @@ func createZip(t testing.TB, files map[string]string) []byte { // goarch: amd64 // pkg: github.com/coder/coder/v2/provisioner/terraform/tfparse // cpu: AMD EPYC 7502P 32-Core Processor -// BenchmarkWorkspaceTagDefaultsFromFile/Tar-16 1077 1081951 ns/op 200754 B/op 1312 allocs/op -// BenchmarkWorkspaceTagDefaultsFromFile/Zip-16 984 1187299 ns/op 249574 B/op 1369 allocs/op +// BenchmarkWorkspaceTagDefaultsFromFile/Tar-16 1770 1064920 ns/op 197638 B/op 1312 allocs/op +// BenchmarkWorkspaceTagDefaultsFromFile/Zip-16 954 1197070 ns/op 246819 B/op 1369 allocs/op // PASS func BenchmarkWorkspaceTagDefaultsFromFile(b *testing.B) { files := map[string]string{ From 4ba258993c8bc14a2667d7b97217ee3e7b1b8fb8 Mon Sep 17 00:00:00 2001 From: Cian Johnston Date: Thu, 14 Nov 2024 11:09:04 +0000 Subject: [PATCH 11/14] refactor --- provisioner/terraform/parse.go | 10 +- provisioner/terraform/tfparse/tfextract.go | 217 +++++++++--------- provisioner/terraform/tfparse/tfparse_test.go | 25 +- 3 files changed, 139 insertions(+), 113 deletions(-) diff --git a/provisioner/terraform/parse.go b/provisioner/terraform/parse.go index 840be1a2728d6..18fd7ed87317e 100644 --- a/provisioner/terraform/parse.go +++ b/provisioner/terraform/parse.go @@ -21,17 +21,21 @@ func (s *server) Parse(sess *provisionersdk.Session, _ *proto.ParseRequest, _ <- defer span.End() // Load the module and print any parse errors. - module, diags := tfconfig.LoadModule(sess.WorkDirectory) + // module, diags := tfconfig.LoadModule(sess.WorkDirectory) + // if diags.HasErrors() { + // } + + parser, diags := tfparse.New(sess.WorkDirectory, tfparse.WithLogger(s.logger.Named("tfparse"))) if diags.HasErrors() { return provisionersdk.ParseErrorf("load module: %s", formatDiagnostics(sess.WorkDirectory, diags)) } - workspaceTags, err := tfparse.WorkspaceTags(ctx, s.logger, nil, module) + workspaceTags, err := parser.WorkspaceTags(ctx) if err != nil { return provisionersdk.ParseErrorf("can't load workspace tags: %v", err) } - templateVariables, err := tfparse.LoadTerraformVariables(module) + templateVariables, err := parser.TemplateVariables() if err != nil { return provisionersdk.ParseErrorf("can't load template variables: %v", err) } diff --git a/provisioner/terraform/tfparse/tfextract.go b/provisioner/terraform/tfparse/tfextract.go index 937fdd954b8c7..3807c518cbb73 100644 --- a/provisioner/terraform/tfparse/tfextract.go +++ b/provisioner/terraform/tfparse/tfextract.go @@ -31,25 +31,60 @@ import ( // introducing a circular dependency const maxFileSizeBytes = 10 * (10 << 20) // 10 MB -// ParseHCLFile() is only method of hclparse.Parser we use in this package. -// hclparse.Parser will by default cache all previous files it has ever -// parsed. While leveraging this caching behavior is nice, we _never_ want -// to end up in a situation where we end up returning stale values. -type Parser interface { +// parseHCLFiler is the actual interface of *hclparse.Parser we use +// to parse HCL. This is extracted to an interface so we can more +// easily swap this out for an alternative implementation later on. +type parseHCLFiler interface { ParseHCLFile(filename string) (*hcl.File, hcl.Diagnostics) } -// WorkspaceTags extracts tags from coder_workspace_tags data sources defined in module. -// Note that this only returns the lexical values of the data source, and does not -// evaluate variables and such. To do this, see evalProvisionerTags below. -// If the provided Parser is nil, a new instance of hclparse.Parser will be used instead. -func WorkspaceTags(ctx context.Context, logger slog.Logger, parser Parser, module *tfconfig.Module) (map[string]string, error) { - if parser == nil { - parser = hclparse.NewParser() +// Parser parses a Terraform module on disk. +type Parser struct { + logger slog.Logger + underlying parseHCLFiler + module *tfconfig.Module + workdir string +} + +// Option is an option for a new instance of Parser. +type Option func(*Parser) + +// WithLogger sets the logger to be used by Parser +func WithLogger(logger slog.Logger) Option { + return func(p *Parser) { + p.logger = logger + } +} + +// New returns a new instance of Parser, as well as any diagnostics +// encountered while parsing the module. +func New(workdir string, opts ...Option) (*Parser, tfconfig.Diagnostics) { + p := Parser{ + logger: slog.Make(), + underlying: hclparse.NewParser(), + workdir: workdir, + module: nil, + } + for _, o := range opts { + o(&p) + } + + var diags tfconfig.Diagnostics + if p.module == nil { + m, ds := tfconfig.LoadModule(workdir) + diags = ds + p.module = m } + + return &p, diags +} + +// WorkspaceTags looks for all coder_workspace_tags datasource in the module +// and returns the raw values for the tags. Use +func (p *Parser) WorkspaceTags(ctx context.Context) (map[string]string, error) { tags := map[string]string{} var skipped []string - for _, dataResource := range module.DataResources { + for _, dataResource := range p.module.DataResources { if dataResource.Type != "coder_workspace_tags" { skipped = append(skipped, strings.Join([]string{"data", dataResource.Type, dataResource.Name}, ".")) continue @@ -62,7 +97,7 @@ func WorkspaceTags(ctx context.Context, logger slog.Logger, parser Parser, modul continue } // We know in which HCL file is the data resource defined. - file, diags = parser.ParseHCLFile(dataResource.Pos.Filename) + file, diags = p.underlying.ParseHCLFile(dataResource.Pos.Filename) if diags.HasErrors() { return nil, xerrors.Errorf("can't parse the resource file: %s", diags.Error()) } @@ -119,46 +154,25 @@ func WorkspaceTags(ctx context.Context, logger slog.Logger, parser Parser, modul } } } - logger.Debug(ctx, "found workspace tags", slog.F("tags", maps.Keys(tags)), slog.F("skipped", skipped)) + p.logger.Debug(ctx, "found workspace tags", slog.F("tags", maps.Keys(tags)), slog.F("skipped", skipped)) return tags, nil } -// WorkspaceTagDefaultsFromFile extracts the default values for a `coder_workspace_tags` resource from the given -// -// file. It also ensures that any uses of the `coder_workspace_tags` data source only -// reference the following data types: -// 1. Static variables -// 2. Template variables -// 3. Coder parameters -// Any other data types are not allowed, as their values cannot be known at -// the time of template import. -func WorkspaceTagDefaultsFromFile(ctx context.Context, logger slog.Logger, file []byte, mimetype string) (tags map[string]string, err error) { - module, cleanup, err := loadModuleFromFile(file, mimetype) - if err != nil { - return nil, xerrors.Errorf("load module from file: %w", err) - } - defer func() { - if err := cleanup(); err != nil { - logger.Error(ctx, "failed to clean up", slog.Error(err)) - } - }() - - parser := hclparse.NewParser() - +func (p *Parser) WorkspaceTagDefaults(ctx context.Context) (map[string]string, error) { // This only gets us the expressions. We need to evaluate them. // Example: var.region -> "us" - tags, err = WorkspaceTags(ctx, logger, parser, module) + tags, err := p.WorkspaceTags(ctx) if err != nil { return nil, xerrors.Errorf("extract workspace tags: %w", err) } // To evaluate the expressions, we need to load the default values for // variables and parameters. - varsDefaults, err := loadVarsDefaults(ctx, logger, maps.Values(module.Variables)) + varsDefaults, err := p.VariableDefaults(ctx) if err != nil { return nil, xerrors.Errorf("load variable defaults: %w", err) } - paramsDefaults, err := loadParamsDefaults(ctx, logger, parser, maps.Values(module.DataResources)) + paramsDefaults, err := p.CoderParameterDefaults(ctx) if err != nil { return nil, xerrors.Errorf("load parameter defaults: %w", err) } @@ -166,7 +180,7 @@ func WorkspaceTagDefaultsFromFile(ctx context.Context, logger slog.Logger, file // Evaluate the tags expressions given the inputs. // This will resolve any variables or parameters to their default // values. - evalTags, err := EvalProvisionerTags(varsDefaults, paramsDefaults, tags) + evalTags, err := evaluateWorkspaceTags(varsDefaults, paramsDefaults, tags) if err != nil { return nil, xerrors.Errorf("eval provisioner tags: %w", err) } @@ -181,54 +195,64 @@ func WorkspaceTagDefaultsFromFile(ctx context.Context, logger slog.Logger, file return evalTags, nil } -func loadModuleFromFile(file []byte, mimetype string) (module *tfconfig.Module, cleanup func() error, err error) { - // Create a temporary directory - cleanup = func() error { return nil } // no-op cleanup - tmpDir, err := os.MkdirTemp("", "tfparse-*") - if err != nil { - return nil, cleanup, xerrors.Errorf("create temp dir: %w", err) +// TemplateVariables returns all of the Terraform variables in the module +// as TemplateVariables. +func (p *Parser) TemplateVariables() ([]*proto.TemplateVariable, error) { + // Sort variables by (filename, line) to make the ordering consistent + variables := make([]*tfconfig.Variable, 0, len(p.module.Variables)) + for _, v := range p.module.Variables { + variables = append(variables, v) } - cleanup = func() error { // real cleanup - return os.RemoveAll(tmpDir) + sort.Slice(variables, func(i, j int) bool { + return compareSourcePos(variables[i].Pos, variables[j].Pos) + }) + + var templateVariables []*proto.TemplateVariable + for _, v := range variables { + mv, err := convertTerraformVariable(v) + if err != nil { + return nil, err + } + templateVariables = append(templateVariables, mv) } + return templateVariables, nil +} - // Untar the file into the temporary directory +// WriteArchive is a helper function to write a in-memory archive +// with the given mimetype to disk. Only zip and tar archives +// are currently supported. +func WriteArchive(bs []byte, mimetype string, path string) error { + // Check if we need to convert the file first! var rdr io.Reader switch mimetype { case "application/x-tar": - rdr = bytes.NewReader(file) + rdr = bytes.NewReader(bs) case "application/zip": - zr, err := zip.NewReader(bytes.NewReader(file), int64(len(file))) - if err != nil { - return nil, cleanup, xerrors.Errorf("read zip file: %w", err) - } - tarBytes, err := archive.CreateTarFromZip(zr, maxFileSizeBytes) - if err != nil { - return nil, cleanup, xerrors.Errorf("convert zip to tar: %w", err) + if zr, err := zip.NewReader(bytes.NewReader(bs), int64(len(bs))); err != nil { + return xerrors.Errorf("read zip file: %w", err) + } else if tarBytes, err := archive.CreateTarFromZip(zr, maxFileSizeBytes); err != nil { + return xerrors.Errorf("convert zip to tar: %w", err) + } else { + rdr = bytes.NewReader(tarBytes) } - rdr = bytes.NewReader(tarBytes) default: - return nil, cleanup, xerrors.Errorf("unsupported mimetype: %s", mimetype) + return xerrors.Errorf("unsupported mimetype: %s", mimetype) } - if err := provisionersdk.Untar(tmpDir, rdr); err != nil { - return nil, cleanup, xerrors.Errorf("untar: %w", err) - } - - module, diags := tfconfig.LoadModule(tmpDir) - if diags.HasErrors() { - return nil, cleanup, xerrors.Errorf("load module: %s", diags.Error()) + // Untar the file into the temporary directory + if err := provisionersdk.Untar(path, rdr); err != nil { + return xerrors.Errorf("untar: %w", err) } - return module, cleanup, nil + return nil } -// loadVarsDefaults returns the default values for all variables passed to it. -func loadVarsDefaults(ctx context.Context, logger slog.Logger, variables []*tfconfig.Variable) (map[string]string, error) { +// VariableDefaults returns the default values for all variables passed to it. +func (p *Parser) VariableDefaults(ctx context.Context) (map[string]string, error) { // iterate through vars to get the default values for all // variables. m := make(map[string]string) - for _, v := range variables { + for _, v := range p.module.Variables { if v == nil { continue } @@ -238,15 +262,21 @@ func loadVarsDefaults(ctx context.Context, logger slog.Logger, variables []*tfco } m[v.Name] = strings.Trim(sv, `"`) } - logger.Debug(ctx, "found default values for variables", slog.F("defaults", m)) + p.logger.Debug(ctx, "found default values for variables", slog.F("defaults", m)) return m, nil } -// loadParamsDefaults returns the default values of all coder_parameter data sources data sources provided. -func loadParamsDefaults(ctx context.Context, logger slog.Logger, parser Parser, dataSources []*tfconfig.Resource) (map[string]string, error) { +// CoderParameterDefaults returns the default values of all coder_parameter data sources +// in the parsed module. +func (p *Parser) CoderParameterDefaults(ctx context.Context) (map[string]string, error) { defaultsM := make(map[string]string) - var skipped []string - for _, dataResource := range dataSources { + var ( + skipped []string + file *hcl.File + diags hcl.Diagnostics + ) + + for _, dataResource := range p.module.DataResources { if dataResource == nil { continue } @@ -256,15 +286,13 @@ func loadParamsDefaults(ctx context.Context, logger slog.Logger, parser Parser, continue } - var file *hcl.File - var diags hcl.Diagnostics - if !strings.HasSuffix(dataResource.Pos.Filename, ".tf") { continue } // We know in which HCL file is the data resource defined. - file, diags = parser.ParseHCLFile(dataResource.Pos.Filename) + // NOTE: hclparse.Parser will cache multiple successive calls to parse the same file. + file, diags = p.underlying.ParseHCLFile(dataResource.Pos.Filename) if diags.HasErrors() { return nil, xerrors.Errorf("can't parse the resource file %q: %s", dataResource.Pos.Filename, diags.Error()) } @@ -299,13 +327,13 @@ func loadParamsDefaults(ctx context.Context, logger slog.Logger, parser Parser, } } } - logger.Debug(ctx, "found default values for parameters", slog.F("defaults", defaultsM), slog.F("skipped", skipped)) + p.logger.Debug(ctx, "found default values for parameters", slog.F("defaults", defaultsM), slog.F("skipped", skipped)) return defaultsM, nil } -// EvalProvisionerTags evaluates the given workspaceTags based on the given +// evaluateWorkspaceTags evaluates the given workspaceTags based on the given // default values for variables and coder_parameter data sources. -func EvalProvisionerTags(varsDefaults, paramsDefaults, workspaceTags map[string]string) (map[string]string, error) { +func evaluateWorkspaceTags(varsDefaults, paramsDefaults, workspaceTags map[string]string) (map[string]string, error) { // Filter only allowed data sources for preflight check. // This is not strictly required but provides a friendlier error. if err := validWorkspaceTagValues(workspaceTags); err != nil { @@ -421,29 +449,6 @@ func previewFileContent(fileRange hcl.Range) (string, error) { return string(fileRange.SliceBytes(body)), nil } -// LoadTerraformVariables extracts all Terraform variables from module and converts them -// to template variables. The variables are sorted by source position. -func LoadTerraformVariables(module *tfconfig.Module) ([]*proto.TemplateVariable, error) { - // Sort variables by (filename, line) to make the ordering consistent - variables := make([]*tfconfig.Variable, 0, len(module.Variables)) - for _, v := range module.Variables { - variables = append(variables, v) - } - sort.Slice(variables, func(i, j int) bool { - return compareSourcePos(variables[i].Pos, variables[j].Pos) - }) - - var templateVariables []*proto.TemplateVariable - for _, v := range variables { - mv, err := convertTerraformVariable(v) - if err != nil { - return nil, err - } - templateVariables = append(templateVariables, mv) - } - return templateVariables, nil -} - // convertTerraformVariable converts a Terraform variable to a template-wide variable, processed by Coder. func convertTerraformVariable(variable *tfconfig.Variable) (*proto.TemplateVariable, error) { var defaultData string diff --git a/provisioner/terraform/tfparse/tfparse_test.go b/provisioner/terraform/tfparse/tfparse_test.go index d4ad8020e0b66..b941c1bdbc0a7 100644 --- a/provisioner/terraform/tfparse/tfparse_test.go +++ b/provisioner/terraform/tfparse/tfparse_test.go @@ -365,7 +365,11 @@ func Test_WorkspaceTagDefaultsFromFile(t *testing.T) { ctx := testutil.Context(t, testutil.WaitShort) tar := createTar(t, tc.files) logger := slogtest.Make(t, nil).Leveled(slog.LevelDebug) - tags, err := tfparse.WorkspaceTagDefaultsFromFile(ctx, logger, tar, "application/x-tar") + tmpDir := t.TempDir() + tfparse.WriteArchive(tar, "application/x-tar", tmpDir) + parser, diags := tfparse.New(tmpDir, tfparse.WithLogger(logger)) + require.NoError(t, diags.Err()) + tags, err := parser.WorkspaceTagDefaults(ctx) if tc.expectError != "" { require.NotNil(t, err) require.Contains(t, err.Error(), tc.expectError) @@ -379,8 +383,13 @@ func Test_WorkspaceTagDefaultsFromFile(t *testing.T) { ctx := testutil.Context(t, testutil.WaitShort) zip := createZip(t, tc.files) logger := slogtest.Make(t, nil).Leveled(slog.LevelDebug) - tags, err := tfparse.WorkspaceTagDefaultsFromFile(ctx, logger, zip, "application/zip") + tmpDir := t.TempDir() + tfparse.WriteArchive(zip, "application/zip", tmpDir) + parser, diags := tfparse.New(tmpDir, tfparse.WithLogger(logger)) + require.NoError(t, diags.Err()) + tags, err := parser.WorkspaceTagDefaults(ctx) if tc.expectError != "" { + require.Error(t, err) require.Contains(t, err.Error(), tc.expectError) } else { require.NoError(t, err) @@ -458,7 +467,11 @@ func BenchmarkWorkspaceTagDefaultsFromFile(b *testing.B) { b.Run("Tar", func(b *testing.B) { ctx := context.Background() for i := 0; i < b.N; i++ { - _, err := tfparse.WorkspaceTagDefaultsFromFile(ctx, logger, tarFile, "application/x-tar") + tmpDir := b.TempDir() + tfparse.WriteArchive(tarFile, "application/x-tar", tmpDir) + parser, diags := tfparse.New(tmpDir, tfparse.WithLogger(logger)) + require.NoError(b, diags.Err()) + _, err := parser.WorkspaceTags(ctx) if err != nil { b.Fatal(err) } @@ -468,7 +481,11 @@ func BenchmarkWorkspaceTagDefaultsFromFile(b *testing.B) { b.Run("Zip", func(b *testing.B) { ctx := context.Background() for i := 0; i < b.N; i++ { - _, err := tfparse.WorkspaceTagDefaultsFromFile(ctx, logger, zipFile, "application/zip") + tmpDir := b.TempDir() + tfparse.WriteArchive(zipFile, "application/zip", tmpDir) + parser, diags := tfparse.New(tmpDir, tfparse.WithLogger(logger)) + require.NoError(b, diags.Err()) + _, err := parser.WorkspaceTags(ctx) if err != nil { b.Fatal(err) } From 90d09be8d7537f91cb7289897507704a92316168 Mon Sep 17 00:00:00 2001 From: Cian Johnston Date: Thu, 14 Nov 2024 11:09:44 +0000 Subject: [PATCH 12/14] bench --- provisioner/terraform/tfparse/tfparse_test.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/provisioner/terraform/tfparse/tfparse_test.go b/provisioner/terraform/tfparse/tfparse_test.go index b941c1bdbc0a7..a08f9ff76887e 100644 --- a/provisioner/terraform/tfparse/tfparse_test.go +++ b/provisioner/terraform/tfparse/tfparse_test.go @@ -434,8 +434,8 @@ func createZip(t testing.TB, files map[string]string) []byte { // goarch: amd64 // pkg: github.com/coder/coder/v2/provisioner/terraform/tfparse // cpu: AMD EPYC 7502P 32-Core Processor -// BenchmarkWorkspaceTagDefaultsFromFile/Tar-16 1770 1064920 ns/op 197638 B/op 1312 allocs/op -// BenchmarkWorkspaceTagDefaultsFromFile/Zip-16 954 1197070 ns/op 246819 B/op 1369 allocs/op +// BenchmarkWorkspaceTagDefaultsFromFile/Tar-16 1922 847236 ns/op 176257 B/op 1073 allocs/op +// BenchmarkWorkspaceTagDefaultsFromFile/Zip-16 1273 946910 ns/op 225293 B/op 1130 allocs/op // PASS func BenchmarkWorkspaceTagDefaultsFromFile(b *testing.B) { files := map[string]string{ From a982a749b5870e449a52c16a642a0d1f87d840a8 Mon Sep 17 00:00:00 2001 From: Cian Johnston Date: Thu, 14 Nov 2024 11:32:50 +0000 Subject: [PATCH 13/14] fixup! bench --- provisioner/terraform/parse.go | 4 ---- 1 file changed, 4 deletions(-) diff --git a/provisioner/terraform/parse.go b/provisioner/terraform/parse.go index 18fd7ed87317e..9c60102fc8579 100644 --- a/provisioner/terraform/parse.go +++ b/provisioner/terraform/parse.go @@ -21,10 +21,6 @@ func (s *server) Parse(sess *provisionersdk.Session, _ *proto.ParseRequest, _ <- defer span.End() // Load the module and print any parse errors. - // module, diags := tfconfig.LoadModule(sess.WorkDirectory) - // if diags.HasErrors() { - // } - parser, diags := tfparse.New(sess.WorkDirectory, tfparse.WithLogger(s.logger.Named("tfparse"))) if diags.HasErrors() { return provisionersdk.ParseErrorf("load module: %s", formatDiagnostics(sess.WorkDirectory, diags)) From 1f6ffe18787e1c4dc8dbb38c527b3a87d330a7b8 Mon Sep 17 00:00:00 2001 From: Cian Johnston Date: Thu, 14 Nov 2024 12:11:04 +0000 Subject: [PATCH 14/14] rename again before merge --- provisioner/terraform/tfparse/{tfextract.go => tfparse.go} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename provisioner/terraform/tfparse/{tfextract.go => tfparse.go} (100%) diff --git a/provisioner/terraform/tfparse/tfextract.go b/provisioner/terraform/tfparse/tfparse.go similarity index 100% rename from provisioner/terraform/tfparse/tfextract.go rename to provisioner/terraform/tfparse/tfparse.go