diff --git a/provisioner/terraform/parse.go b/provisioner/terraform/parse.go index 86dcec2e4cfeb..9c60102fc8579 100644 --- a/provisioner/terraform/parse.go +++ b/provisioner/terraform/parse.go @@ -21,17 +21,17 @@ func (s *server) Parse(sess *provisionersdk.Session, _ *proto.ParseRequest, _ <- defer span.End() // Load the module and print any parse errors. - module, diags := tfconfig.LoadModule(sess.WorkDirectory) + parser, diags := tfparse.New(sess.WorkDirectory, tfparse.WithLogger(s.logger.Named("tfparse"))) if diags.HasErrors() { return provisionersdk.ParseErrorf("load module: %s", formatDiagnostics(sess.WorkDirectory, diags)) } - workspaceTags, err := tfparse.WorkspaceTags(ctx, s.logger, module) + workspaceTags, err := parser.WorkspaceTags(ctx) if err != nil { return provisionersdk.ParseErrorf("can't load workspace tags: %v", err) } - templateVariables, err := tfparse.LoadTerraformVariables(module) + templateVariables, err := parser.TemplateVariables() if err != nil { return provisionersdk.ParseErrorf("can't load template variables: %v", err) } diff --git a/provisioner/terraform/tfparse/tfextract.go b/provisioner/terraform/tfparse/tfextract.go deleted file mode 100644 index ed85732e00d5e..0000000000000 --- a/provisioner/terraform/tfparse/tfextract.go +++ /dev/null @@ -1,182 +0,0 @@ -package tfparse - -import ( - "context" - "encoding/json" - "os" - "slices" - "sort" - "strings" - - "github.com/coder/coder/v2/provisionersdk/proto" - - "github.com/hashicorp/hcl/v2" - "github.com/hashicorp/hcl/v2/hclparse" - "github.com/hashicorp/hcl/v2/hclsyntax" - "github.com/hashicorp/terraform-config-inspect/tfconfig" - "golang.org/x/xerrors" - - "cdr.dev/slog" -) - -// WorkspaceTags extracts tags from coder_workspace_tags data sources defined in module. -func WorkspaceTags(ctx context.Context, logger slog.Logger, module *tfconfig.Module) (map[string]string, error) { - workspaceTags := map[string]string{} - - for _, dataResource := range module.DataResources { - if dataResource.Type != "coder_workspace_tags" { - logger.Debug(ctx, "skip resource as it is not a coder_workspace_tags", "resource_name", dataResource.Name, "resource_type", dataResource.Type) - continue - } - - var file *hcl.File - var diags hcl.Diagnostics - parser := hclparse.NewParser() - - if !strings.HasSuffix(dataResource.Pos.Filename, ".tf") { - logger.Debug(ctx, "only .tf files can be parsed", "filename", dataResource.Pos.Filename) - continue - } - // We know in which HCL file is the data resource defined. - file, diags = parser.ParseHCLFile(dataResource.Pos.Filename) - if diags.HasErrors() { - return nil, xerrors.Errorf("can't parse the resource file: %s", diags.Error()) - } - - // Parse root to find "coder_workspace_tags". - content, _, diags := file.Body.PartialContent(rootTemplateSchema) - if diags.HasErrors() { - return nil, xerrors.Errorf("can't parse the resource file: %s", diags.Error()) - } - - // Iterate over blocks to locate the exact "coder_workspace_tags" data resource. - for _, block := range content.Blocks { - if !slices.Equal(block.Labels, []string{"coder_workspace_tags", dataResource.Name}) { - continue - } - - // Parse "coder_workspace_tags" to find all key-value tags. - resContent, _, diags := block.Body.PartialContent(coderWorkspaceTagsSchema) - if diags.HasErrors() { - return nil, xerrors.Errorf(`can't parse the resource coder_workspace_tags: %s`, diags.Error()) - } - - if resContent == nil { - continue // workspace tags are not present - } - - if _, ok := resContent.Attributes["tags"]; !ok { - return nil, xerrors.Errorf(`"tags" attribute is required by coder_workspace_tags`) - } - - expr := resContent.Attributes["tags"].Expr - tagsExpr, ok := expr.(*hclsyntax.ObjectConsExpr) - if !ok { - return nil, xerrors.Errorf(`"tags" attribute is expected to be a key-value map`) - } - - // Parse key-value entries in "coder_workspace_tags" - for _, tagItem := range tagsExpr.Items { - key, err := previewFileContent(tagItem.KeyExpr.Range()) - if err != nil { - return nil, xerrors.Errorf("can't preview the resource file: %v", err) - } - key = strings.Trim(key, `"`) - - value, err := previewFileContent(tagItem.ValueExpr.Range()) - if err != nil { - return nil, xerrors.Errorf("can't preview the resource file: %v", err) - } - - logger.Info(ctx, "workspace tag found", "key", key, "value", value) - - if _, ok := workspaceTags[key]; ok { - return nil, xerrors.Errorf(`workspace tag %q is defined multiple times`, key) - } - workspaceTags[key] = value - } - } - } - return workspaceTags, nil -} - -var rootTemplateSchema = &hcl.BodySchema{ - Blocks: []hcl.BlockHeaderSchema{ - { - Type: "data", - LabelNames: []string{"type", "name"}, - }, - }, -} - -var coderWorkspaceTagsSchema = &hcl.BodySchema{ - Attributes: []hcl.AttributeSchema{ - { - Name: "tags", - }, - }, -} - -func previewFileContent(fileRange hcl.Range) (string, error) { - body, err := os.ReadFile(fileRange.Filename) - if err != nil { - return "", err - } - return string(fileRange.SliceBytes(body)), nil -} - -// LoadTerraformVariables extracts all Terraform variables from module and converts them -// to template variables. The variables are sorted by source position. -func LoadTerraformVariables(module *tfconfig.Module) ([]*proto.TemplateVariable, error) { - // Sort variables by (filename, line) to make the ordering consistent - variables := make([]*tfconfig.Variable, 0, len(module.Variables)) - for _, v := range module.Variables { - variables = append(variables, v) - } - sort.Slice(variables, func(i, j int) bool { - return compareSourcePos(variables[i].Pos, variables[j].Pos) - }) - - var templateVariables []*proto.TemplateVariable - for _, v := range variables { - mv, err := convertTerraformVariable(v) - if err != nil { - return nil, err - } - templateVariables = append(templateVariables, mv) - } - return templateVariables, nil -} - -// convertTerraformVariable converts a Terraform variable to a template-wide variable, processed by Coder. -func convertTerraformVariable(variable *tfconfig.Variable) (*proto.TemplateVariable, error) { - var defaultData string - if variable.Default != nil { - var valid bool - defaultData, valid = variable.Default.(string) - if !valid { - defaultDataRaw, err := json.Marshal(variable.Default) - if err != nil { - return nil, xerrors.Errorf("parse variable %q default: %w", variable.Name, err) - } - defaultData = string(defaultDataRaw) - } - } - - return &proto.TemplateVariable{ - Name: variable.Name, - Description: variable.Description, - Type: variable.Type, - DefaultValue: defaultData, - // variable.Required is always false. Empty string is a valid default value, so it doesn't enforce required to be "true". - Required: variable.Default == nil, - Sensitive: variable.Sensitive, - }, nil -} - -func compareSourcePos(x, y tfconfig.SourcePos) bool { - if x.Filename != y.Filename { - return x.Filename < y.Filename - } - return x.Line < y.Line -} diff --git a/provisioner/terraform/tfparse/tfparse.go b/provisioner/terraform/tfparse/tfparse.go new file mode 100644 index 0000000000000..3807c518cbb73 --- /dev/null +++ b/provisioner/terraform/tfparse/tfparse.go @@ -0,0 +1,526 @@ +package tfparse + +import ( + "archive/zip" + "bytes" + "context" + "encoding/json" + "io" + "os" + "slices" + "sort" + "strconv" + "strings" + + "github.com/coder/coder/v2/archive" + "github.com/coder/coder/v2/provisionersdk" + "github.com/coder/coder/v2/provisionersdk/proto" + + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/hclparse" + "github.com/hashicorp/hcl/v2/hclsyntax" + "github.com/hashicorp/terraform-config-inspect/tfconfig" + "github.com/zclconf/go-cty/cty" + "golang.org/x/exp/maps" + "golang.org/x/xerrors" + + "cdr.dev/slog" +) + +// NOTE: This is duplicated from coderd but we can't import it here without +// introducing a circular dependency +const maxFileSizeBytes = 10 * (10 << 20) // 10 MB + +// parseHCLFiler is the actual interface of *hclparse.Parser we use +// to parse HCL. This is extracted to an interface so we can more +// easily swap this out for an alternative implementation later on. +type parseHCLFiler interface { + ParseHCLFile(filename string) (*hcl.File, hcl.Diagnostics) +} + +// Parser parses a Terraform module on disk. +type Parser struct { + logger slog.Logger + underlying parseHCLFiler + module *tfconfig.Module + workdir string +} + +// Option is an option for a new instance of Parser. +type Option func(*Parser) + +// WithLogger sets the logger to be used by Parser +func WithLogger(logger slog.Logger) Option { + return func(p *Parser) { + p.logger = logger + } +} + +// New returns a new instance of Parser, as well as any diagnostics +// encountered while parsing the module. +func New(workdir string, opts ...Option) (*Parser, tfconfig.Diagnostics) { + p := Parser{ + logger: slog.Make(), + underlying: hclparse.NewParser(), + workdir: workdir, + module: nil, + } + for _, o := range opts { + o(&p) + } + + var diags tfconfig.Diagnostics + if p.module == nil { + m, ds := tfconfig.LoadModule(workdir) + diags = ds + p.module = m + } + + return &p, diags +} + +// WorkspaceTags looks for all coder_workspace_tags datasource in the module +// and returns the raw values for the tags. Use +func (p *Parser) WorkspaceTags(ctx context.Context) (map[string]string, error) { + tags := map[string]string{} + var skipped []string + for _, dataResource := range p.module.DataResources { + if dataResource.Type != "coder_workspace_tags" { + skipped = append(skipped, strings.Join([]string{"data", dataResource.Type, dataResource.Name}, ".")) + continue + } + + var file *hcl.File + var diags hcl.Diagnostics + + if !strings.HasSuffix(dataResource.Pos.Filename, ".tf") { + continue + } + // We know in which HCL file is the data resource defined. + file, diags = p.underlying.ParseHCLFile(dataResource.Pos.Filename) + if diags.HasErrors() { + return nil, xerrors.Errorf("can't parse the resource file: %s", diags.Error()) + } + + // Parse root to find "coder_workspace_tags". + content, _, diags := file.Body.PartialContent(rootTemplateSchema) + if diags.HasErrors() { + return nil, xerrors.Errorf("can't parse the resource file: %s", diags.Error()) + } + + // Iterate over blocks to locate the exact "coder_workspace_tags" data resource. + for _, block := range content.Blocks { + if !slices.Equal(block.Labels, []string{"coder_workspace_tags", dataResource.Name}) { + continue + } + + // Parse "coder_workspace_tags" to find all key-value tags. + resContent, _, diags := block.Body.PartialContent(coderWorkspaceTagsSchema) + if diags.HasErrors() { + return nil, xerrors.Errorf(`can't parse the resource coder_workspace_tags: %s`, diags.Error()) + } + + if resContent == nil { + continue // workspace tags are not present + } + + if _, ok := resContent.Attributes["tags"]; !ok { + return nil, xerrors.Errorf(`"tags" attribute is required by coder_workspace_tags`) + } + + expr := resContent.Attributes["tags"].Expr + tagsExpr, ok := expr.(*hclsyntax.ObjectConsExpr) + if !ok { + return nil, xerrors.Errorf(`"tags" attribute is expected to be a key-value map`) + } + + // Parse key-value entries in "coder_workspace_tags" + for _, tagItem := range tagsExpr.Items { + key, err := previewFileContent(tagItem.KeyExpr.Range()) + if err != nil { + return nil, xerrors.Errorf("can't preview the resource file: %v", err) + } + key = strings.Trim(key, `"`) + + value, err := previewFileContent(tagItem.ValueExpr.Range()) + if err != nil { + return nil, xerrors.Errorf("can't preview the resource file: %v", err) + } + + if _, ok := tags[key]; ok { + return nil, xerrors.Errorf(`workspace tag %q is defined multiple times`, key) + } + tags[key] = value + } + } + } + p.logger.Debug(ctx, "found workspace tags", slog.F("tags", maps.Keys(tags)), slog.F("skipped", skipped)) + return tags, nil +} + +func (p *Parser) WorkspaceTagDefaults(ctx context.Context) (map[string]string, error) { + // This only gets us the expressions. We need to evaluate them. + // Example: var.region -> "us" + tags, err := p.WorkspaceTags(ctx) + if err != nil { + return nil, xerrors.Errorf("extract workspace tags: %w", err) + } + + // To evaluate the expressions, we need to load the default values for + // variables and parameters. + varsDefaults, err := p.VariableDefaults(ctx) + if err != nil { + return nil, xerrors.Errorf("load variable defaults: %w", err) + } + paramsDefaults, err := p.CoderParameterDefaults(ctx) + if err != nil { + return nil, xerrors.Errorf("load parameter defaults: %w", err) + } + + // Evaluate the tags expressions given the inputs. + // This will resolve any variables or parameters to their default + // values. + evalTags, err := evaluateWorkspaceTags(varsDefaults, paramsDefaults, tags) + if err != nil { + return nil, xerrors.Errorf("eval provisioner tags: %w", err) + } + + // Ensure that none of the tag values are empty after evaluation. + for k, v := range evalTags { + if len(strings.TrimSpace(v)) > 0 { + continue + } + return nil, xerrors.Errorf("provisioner tag %q evaluated to an empty value, please set a default value", k) + } + return evalTags, nil +} + +// TemplateVariables returns all of the Terraform variables in the module +// as TemplateVariables. +func (p *Parser) TemplateVariables() ([]*proto.TemplateVariable, error) { + // Sort variables by (filename, line) to make the ordering consistent + variables := make([]*tfconfig.Variable, 0, len(p.module.Variables)) + for _, v := range p.module.Variables { + variables = append(variables, v) + } + sort.Slice(variables, func(i, j int) bool { + return compareSourcePos(variables[i].Pos, variables[j].Pos) + }) + + var templateVariables []*proto.TemplateVariable + for _, v := range variables { + mv, err := convertTerraformVariable(v) + if err != nil { + return nil, err + } + templateVariables = append(templateVariables, mv) + } + return templateVariables, nil +} + +// WriteArchive is a helper function to write a in-memory archive +// with the given mimetype to disk. Only zip and tar archives +// are currently supported. +func WriteArchive(bs []byte, mimetype string, path string) error { + // Check if we need to convert the file first! + var rdr io.Reader + switch mimetype { + case "application/x-tar": + rdr = bytes.NewReader(bs) + case "application/zip": + if zr, err := zip.NewReader(bytes.NewReader(bs), int64(len(bs))); err != nil { + return xerrors.Errorf("read zip file: %w", err) + } else if tarBytes, err := archive.CreateTarFromZip(zr, maxFileSizeBytes); err != nil { + return xerrors.Errorf("convert zip to tar: %w", err) + } else { + rdr = bytes.NewReader(tarBytes) + } + default: + return xerrors.Errorf("unsupported mimetype: %s", mimetype) + } + + // Untar the file into the temporary directory + if err := provisionersdk.Untar(path, rdr); err != nil { + return xerrors.Errorf("untar: %w", err) + } + + return nil +} + +// VariableDefaults returns the default values for all variables passed to it. +func (p *Parser) VariableDefaults(ctx context.Context) (map[string]string, error) { + // iterate through vars to get the default values for all + // variables. + m := make(map[string]string) + for _, v := range p.module.Variables { + if v == nil { + continue + } + sv, err := interfaceToString(v.Default) + if err != nil { + return nil, xerrors.Errorf("can't convert variable default value to string: %v", err) + } + m[v.Name] = strings.Trim(sv, `"`) + } + p.logger.Debug(ctx, "found default values for variables", slog.F("defaults", m)) + return m, nil +} + +// CoderParameterDefaults returns the default values of all coder_parameter data sources +// in the parsed module. +func (p *Parser) CoderParameterDefaults(ctx context.Context) (map[string]string, error) { + defaultsM := make(map[string]string) + var ( + skipped []string + file *hcl.File + diags hcl.Diagnostics + ) + + for _, dataResource := range p.module.DataResources { + if dataResource == nil { + continue + } + + if dataResource.Type != "coder_parameter" { + skipped = append(skipped, strings.Join([]string{"data", dataResource.Type, dataResource.Name}, ".")) + continue + } + + if !strings.HasSuffix(dataResource.Pos.Filename, ".tf") { + continue + } + + // We know in which HCL file is the data resource defined. + // NOTE: hclparse.Parser will cache multiple successive calls to parse the same file. + file, diags = p.underlying.ParseHCLFile(dataResource.Pos.Filename) + if diags.HasErrors() { + return nil, xerrors.Errorf("can't parse the resource file %q: %s", dataResource.Pos.Filename, diags.Error()) + } + + // Parse root to find "coder_parameter". + content, _, diags := file.Body.PartialContent(rootTemplateSchema) + if diags.HasErrors() { + return nil, xerrors.Errorf("can't parse the resource file: %s", diags.Error()) + } + + // Iterate over blocks to locate the exact "coder_parameter" data resource. + for _, block := range content.Blocks { + if !slices.Equal(block.Labels, []string{"coder_parameter", dataResource.Name}) { + continue + } + + // Parse "coder_parameter" to find the default value. + resContent, _, diags := block.Body.PartialContent(coderParameterSchema) + if diags.HasErrors() { + return nil, xerrors.Errorf(`can't parse the coder_parameter: %s`, diags.Error()) + } + + if _, ok := resContent.Attributes["default"]; !ok { + defaultsM[dataResource.Name] = "" + } else { + expr := resContent.Attributes["default"].Expr + value, err := previewFileContent(expr.Range()) + if err != nil { + return nil, xerrors.Errorf("can't preview the resource file: %v", err) + } + defaultsM[dataResource.Name] = strings.Trim(value, `"`) + } + } + } + p.logger.Debug(ctx, "found default values for parameters", slog.F("defaults", defaultsM), slog.F("skipped", skipped)) + return defaultsM, nil +} + +// evaluateWorkspaceTags evaluates the given workspaceTags based on the given +// default values for variables and coder_parameter data sources. +func evaluateWorkspaceTags(varsDefaults, paramsDefaults, workspaceTags map[string]string) (map[string]string, error) { + // Filter only allowed data sources for preflight check. + // This is not strictly required but provides a friendlier error. + if err := validWorkspaceTagValues(workspaceTags); err != nil { + return nil, err + } + // We only add variables and coder_parameter data sources. Anything else will be + // undefined and will raise a Terraform error. + evalCtx := buildEvalContext(varsDefaults, paramsDefaults) + tags := make(map[string]string) + for workspaceTagKey, workspaceTagValue := range workspaceTags { + expr, diags := hclsyntax.ParseExpression([]byte(workspaceTagValue), "expression.hcl", hcl.InitialPos) + if diags.HasErrors() { + return nil, xerrors.Errorf("failed to parse workspace tag key %q value %q: %s", workspaceTagKey, workspaceTagValue, diags.Error()) + } + + val, diags := expr.Value(evalCtx) + if diags.HasErrors() { + return nil, xerrors.Errorf("failed to evaluate workspace tag key %q value %q: %s", workspaceTagKey, workspaceTagValue, diags.Error()) + } + + // Do not use "val.AsString()" as it can panic + str, err := ctyValueString(val) + if err != nil { + return nil, xerrors.Errorf("failed to marshal workspace tag key %q value %q as string: %s", workspaceTagKey, workspaceTagValue, err) + } + tags[workspaceTagKey] = str + } + return tags, nil +} + +// validWorkspaceTagValues returns an error if any value of the given tags map +// evaluates to a datasource other than "coder_parameter". +// This only serves to provide a friendly error if a user attempts to reference +// a data source other than "coder_parameter" in "coder_workspace_tags". +func validWorkspaceTagValues(tags map[string]string) error { + for _, v := range tags { + parts := strings.SplitN(v, ".", 3) + if len(parts) != 3 { + continue + } + if parts[0] == "data" && parts[1] != "coder_parameter" { + return xerrors.Errorf("invalid workspace tag value %q: only the \"coder_parameter\" data source is supported here", v) + } + } + return nil +} + +func buildEvalContext(varDefaults map[string]string, paramDefaults map[string]string) *hcl.EvalContext { + varDefaultsM := map[string]cty.Value{} + for varName, varDefault := range varDefaults { + varDefaultsM[varName] = cty.MapVal(map[string]cty.Value{ + "value": cty.StringVal(varDefault), + }) + } + + paramDefaultsM := map[string]cty.Value{} + for paramName, paramDefault := range paramDefaults { + paramDefaultsM[paramName] = cty.MapVal(map[string]cty.Value{ + "value": cty.StringVal(paramDefault), + }) + } + + evalCtx := &hcl.EvalContext{ + Variables: map[string]cty.Value{}, + // NOTE: we do not currently support function execution here. + // The default function map for Terraform is not exposed, so we would essentially + // have to re-implement or copy the entire map or a subset thereof. + // ref: https://github.com/hashicorp/terraform/blob/e044e569c5bc81f82e9a4d7891f37c6fbb0a8a10/internal/lang/functions.go#L54 + Functions: nil, + } + if len(varDefaultsM) != 0 { + evalCtx.Variables["var"] = cty.MapVal(varDefaultsM) + } + if len(paramDefaultsM) != 0 { + evalCtx.Variables["data"] = cty.MapVal(map[string]cty.Value{ + "coder_parameter": cty.MapVal(paramDefaultsM), + }) + } + + return evalCtx +} + +var rootTemplateSchema = &hcl.BodySchema{ + Blocks: []hcl.BlockHeaderSchema{ + { + Type: "data", + LabelNames: []string{"type", "name"}, + }, + }, +} + +var coderWorkspaceTagsSchema = &hcl.BodySchema{ + Attributes: []hcl.AttributeSchema{ + { + Name: "tags", + }, + }, +} + +var coderParameterSchema = &hcl.BodySchema{ + Attributes: []hcl.AttributeSchema{ + { + Name: "default", + }, + }, +} + +func previewFileContent(fileRange hcl.Range) (string, error) { + body, err := os.ReadFile(fileRange.Filename) + if err != nil { + return "", err + } + return string(fileRange.SliceBytes(body)), nil +} + +// convertTerraformVariable converts a Terraform variable to a template-wide variable, processed by Coder. +func convertTerraformVariable(variable *tfconfig.Variable) (*proto.TemplateVariable, error) { + var defaultData string + if variable.Default != nil { + var valid bool + defaultData, valid = variable.Default.(string) + if !valid { + defaultDataRaw, err := json.Marshal(variable.Default) + if err != nil { + return nil, xerrors.Errorf("parse variable %q default: %w", variable.Name, err) + } + defaultData = string(defaultDataRaw) + } + } + + return &proto.TemplateVariable{ + Name: variable.Name, + Description: variable.Description, + Type: variable.Type, + DefaultValue: defaultData, + // variable.Required is always false. Empty string is a valid default value, so it doesn't enforce required to be "true". + Required: variable.Default == nil, + Sensitive: variable.Sensitive, + }, nil +} + +func compareSourcePos(x, y tfconfig.SourcePos) bool { + if x.Filename != y.Filename { + return x.Filename < y.Filename + } + return x.Line < y.Line +} + +func ctyValueString(val cty.Value) (string, error) { + switch val.Type() { + case cty.Bool: + if val.True() { + return "true", nil + } else { + return "false", nil + } + case cty.Number: + return val.AsBigFloat().String(), nil + case cty.String: + return val.AsString(), nil + // We may also have a map[string]interface{} with key "value". + case cty.Map(cty.String): + valval, ok := val.AsValueMap()["value"] + if !ok { + return "", xerrors.Errorf("map does not have key 'value'") + } + return ctyValueString(valval) + default: + return "", xerrors.Errorf("only primitive types are supported - bool, number, and string") + } +} + +func interfaceToString(i interface{}) (string, error) { + switch v := i.(type) { + case nil: + return "", nil + case string: + return v, nil + case []byte: + return string(v), nil + case int: + return strconv.FormatInt(int64(v), 10), nil + case float64: + return strconv.FormatFloat(v, 'f', -1, 64), nil + case bool: + return strconv.FormatBool(v), nil + default: + return "", xerrors.Errorf("unsupported type %T", v) + } +} diff --git a/provisioner/terraform/tfparse/tfparse_test.go b/provisioner/terraform/tfparse/tfparse_test.go new file mode 100644 index 0000000000000..a08f9ff76887e --- /dev/null +++ b/provisioner/terraform/tfparse/tfparse_test.go @@ -0,0 +1,500 @@ +package tfparse_test + +import ( + "archive/tar" + "bytes" + "context" + "io" + "log" + "testing" + + "cdr.dev/slog" + "cdr.dev/slog/sloggers/sloghuman" + "cdr.dev/slog/sloggers/slogtest" + "github.com/coder/coder/v2/archive" + "github.com/coder/coder/v2/provisioner/terraform/tfparse" + "github.com/coder/coder/v2/testutil" + + "github.com/stretchr/testify/require" +) + +func Test_WorkspaceTagDefaultsFromFile(t *testing.T) { + t.Parallel() + + for _, tc := range []struct { + name string + files map[string]string + expectTags map[string]string + expectError string + }{ + { + name: "empty", + files: map[string]string{}, + expectTags: map[string]string{}, + expectError: "", + }, + { + name: "single text file", + files: map[string]string{ + "file.txt": ` + hello world`, + }, + expectTags: map[string]string{}, + expectError: "", + }, + { + name: "main.tf with no workspace_tags", + files: map[string]string{ + "main.tf": ` + provider "foo" {} + resource "foo_bar" "baz" {} + variable "region" { + type = string + default = "us" + } + data "base" "ours" { + all = true + } + data "coder_parameter" "az" { + name = "az" + type = "string" + default = "a" + }`, + }, + expectTags: map[string]string{}, + expectError: "", + }, + { + name: "main.tf with empty workspace tags", + files: map[string]string{ + "main.tf": ` + provider "foo" {} + resource "foo_bar" "baz" {} + variable "region" { + type = string + default = "us" + } + data "base" "ours" { + all = true + } + data "coder_parameter" "az" { + name = "az" + type = "string" + default = "a" + } + data "coder_workspace_tags" "tags" {}`, + }, + expectTags: map[string]string{}, + expectError: `"tags" attribute is required by coder_workspace_tags`, + }, + { + name: "main.tf with valid workspace tags", + files: map[string]string{ + "main.tf": ` + provider "foo" {} + resource "foo_bar" "baz" {} + variable "region" { + type = string + default = "us" + } + data "base" "ours" { + all = true + } + data "coder_parameter" "az" { + name = "az" + type = "string" + default = "a" + } + data "coder_workspace_tags" "tags" { + tags = { + "platform" = "kubernetes", + "cluster" = "${"devel"}${"opers"}" + "region" = var.region + "az" = data.coder_parameter.az.value + } + }`, + }, + expectTags: map[string]string{"platform": "kubernetes", "cluster": "developers", "region": "us", "az": "a"}, + expectError: "", + }, + { + name: "main.tf with multiple valid workspace tags", + files: map[string]string{ + "main.tf": ` + provider "foo" {} + resource "foo_bar" "baz" {} + variable "region" { + type = string + default = "us" + } + variable "region2" { + type = string + default = "eu" + } + data "base" "ours" { + all = true + } + data "coder_parameter" "az" { + name = "az" + type = "string" + default = "a" + } + data "coder_parameter" "az2" { + name = "az2" + type = "string" + default = "b" + } + data "coder_workspace_tags" "tags" { + tags = { + "platform" = "kubernetes", + "cluster" = "${"devel"}${"opers"}" + "region" = var.region + "az" = data.coder_parameter.az.value + } + } + data "coder_workspace_tags" "more_tags" { + tags = { + "foo" = "bar" + } + }`, + }, + expectTags: map[string]string{"platform": "kubernetes", "cluster": "developers", "region": "us", "az": "a", "foo": "bar"}, + expectError: "", + }, + { + name: "main.tf with missing parameter default value for workspace tags", + files: map[string]string{ + "main.tf": ` + provider "foo" {} + resource "foo_bar" "baz" {} + variable "region" { + type = string + default = "us" + } + data "base" "ours" { + all = true + } + data "coder_parameter" "az" { + name = "az" + type = "string" + } + data "coder_workspace_tags" "tags" { + tags = { + "platform" = "kubernetes", + "cluster" = "${"devel"}${"opers"}" + "region" = var.region + "az" = data.coder_parameter.az.value + } + }`, + }, + expectError: `provisioner tag "az" evaluated to an empty value, please set a default value`, + }, + { + name: "main.tf with missing parameter default value outside workspace tags", + files: map[string]string{ + "main.tf": ` + provider "foo" {} + resource "foo_bar" "baz" {} + variable "region" { + type = string + default = "us" + } + data "base" "ours" { + all = true + } + data "coder_parameter" "az" { + name = "az" + type = "string" + default = "a" + } + data "coder_parameter" "notaz" { + name = "notaz" + type = "string" + } + data "coder_workspace_tags" "tags" { + tags = { + "platform" = "kubernetes", + "cluster" = "${"devel"}${"opers"}" + "region" = var.region + "az" = data.coder_parameter.az.value + } + }`, + }, + expectTags: map[string]string{"platform": "kubernetes", "cluster": "developers", "region": "us", "az": "a"}, + expectError: ``, + }, + { + name: "main.tf with missing variable default value outside workspace tags", + files: map[string]string{ + "main.tf": ` + provider "foo" {} + resource "foo_bar" "baz" {} + variable "region" { + type = string + default = "us" + } + variable "notregion" { + type = string + } + data "base" "ours" { + all = true + } + data "coder_parameter" "az" { + name = "az" + type = "string" + default = "a" + } + data "coder_workspace_tags" "tags" { + tags = { + "platform" = "kubernetes", + "cluster" = "${"devel"}${"opers"}" + "region" = var.region + "az" = data.coder_parameter.az.value + } + }`, + }, + expectTags: map[string]string{"platform": "kubernetes", "cluster": "developers", "region": "us", "az": "a"}, + expectError: ``, + }, + { + name: "main.tf with disallowed data source for workspace tags", + files: map[string]string{ + "main.tf": ` + provider "foo" {} + resource "foo_bar" "baz" { + name = "foobar" + } + variable "region" { + type = string + default = "us" + } + data "base" "ours" { + all = true + } + data "coder_parameter" "az" { + name = "az" + type = "string" + default = "a" + } + data "local_file" "hostname" { + filename = "/etc/hostname" + } + data "coder_workspace_tags" "tags" { + tags = { + "platform" = "kubernetes", + "cluster" = "${"devel"}${"opers"}" + "region" = var.region + "az" = data.coder_parameter.az.value + "hostname" = data.local_file.hostname.content + } + }`, + }, + expectTags: nil, + expectError: `invalid workspace tag value "data.local_file.hostname.content": only the "coder_parameter" data source is supported here`, + }, + { + name: "main.tf with disallowed resource for workspace tags", + files: map[string]string{ + "main.tf": ` + provider "foo" {} + resource "foo_bar" "baz" { + name = "foobar" + } + variable "region" { + type = string + default = "us" + } + data "base" "ours" { + all = true + } + data "coder_parameter" "az" { + name = "az" + type = "string" + default = "a" + } + data "coder_workspace_tags" "tags" { + tags = { + "platform" = "kubernetes", + "cluster" = "${"devel"}${"opers"}" + "region" = var.region + "az" = data.coder_parameter.az.value + "foobarbaz" = foo_bar.baz.name + } + }`, + }, + expectTags: nil, + // TODO: this error isn't great, but it has the desired effect. + expectError: `There is no variable named "foo_bar"`, + }, + { + name: "main.tf with functions in workspace tags", + files: map[string]string{ + "main.tf": ` + provider "foo" {} + resource "foo_bar" "baz" { + name = "foobar" + } + variable "region" { + type = string + default = "region.us" + } + data "base" "ours" { + all = true + } + data "coder_parameter" "az" { + name = "az" + type = "string" + default = "az.a" + } + data "coder_workspace_tags" "tags" { + tags = { + "platform" = "kubernetes", + "cluster" = "${"devel"}${"opers"}" + "region" = try(split(".", var.region)[1], "placeholder") + "az" = try(split(".", data.coder_parameter.az.value)[1], "placeholder") + } + }`, + }, + expectTags: nil, + expectError: `Function calls not allowed; Functions may not be called here.`, + }, + } { + tc := tc + t.Run(tc.name+"/tar", func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitShort) + tar := createTar(t, tc.files) + logger := slogtest.Make(t, nil).Leveled(slog.LevelDebug) + tmpDir := t.TempDir() + tfparse.WriteArchive(tar, "application/x-tar", tmpDir) + parser, diags := tfparse.New(tmpDir, tfparse.WithLogger(logger)) + require.NoError(t, diags.Err()) + tags, err := parser.WorkspaceTagDefaults(ctx) + if tc.expectError != "" { + require.NotNil(t, err) + require.Contains(t, err.Error(), tc.expectError) + } else { + require.NoError(t, err) + require.Equal(t, tc.expectTags, tags) + } + }) + t.Run(tc.name+"/zip", func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitShort) + zip := createZip(t, tc.files) + logger := slogtest.Make(t, nil).Leveled(slog.LevelDebug) + tmpDir := t.TempDir() + tfparse.WriteArchive(zip, "application/zip", tmpDir) + parser, diags := tfparse.New(tmpDir, tfparse.WithLogger(logger)) + require.NoError(t, diags.Err()) + tags, err := parser.WorkspaceTagDefaults(ctx) + if tc.expectError != "" { + require.Error(t, err) + require.Contains(t, err.Error(), tc.expectError) + } else { + require.NoError(t, err) + require.Equal(t, tc.expectTags, tags) + } + }) + } +} + +func createTar(t testing.TB, files map[string]string) []byte { + var buffer bytes.Buffer + writer := tar.NewWriter(&buffer) + for path, content := range files { + err := writer.WriteHeader(&tar.Header{ + Name: path, + Size: int64(len(content)), + Uid: 65534, // nobody + Gid: 65534, // nogroup + Mode: 0o666, // -rw-rw-rw- + }) + require.NoError(t, err) + + _, err = writer.Write([]byte(content)) + require.NoError(t, err) + } + + err := writer.Flush() + require.NoError(t, err) + return buffer.Bytes() +} + +func createZip(t testing.TB, files map[string]string) []byte { + ta := createTar(t, files) + tr := tar.NewReader(bytes.NewReader(ta)) + za, err := archive.CreateZipFromTar(tr, int64(len(ta))) + require.NoError(t, err) + return za +} + +// Last run results: +// goos: linux +// goarch: amd64 +// pkg: github.com/coder/coder/v2/provisioner/terraform/tfparse +// cpu: AMD EPYC 7502P 32-Core Processor +// BenchmarkWorkspaceTagDefaultsFromFile/Tar-16 1922 847236 ns/op 176257 B/op 1073 allocs/op +// BenchmarkWorkspaceTagDefaultsFromFile/Zip-16 1273 946910 ns/op 225293 B/op 1130 allocs/op +// PASS +func BenchmarkWorkspaceTagDefaultsFromFile(b *testing.B) { + files := map[string]string{ + "main.tf": ` + provider "foo" {} + resource "foo_bar" "baz" {} + variable "region" { + type = string + default = "us" + } + data "coder_parameter" "az" { + name = "az" + type = "string" + default = "a" + } + data "coder_workspace_tags" "tags" { + tags = { + "platform" = "kubernetes", + "cluster" = "${"devel"}${"opers"}" + "region" = var.region + "az" = data.coder_parameter.az.value + } + }`, + } + tarFile := createTar(b, files) + zipFile := createZip(b, files) + logger := discardLogger(b) + b.ResetTimer() + b.Run("Tar", func(b *testing.B) { + ctx := context.Background() + for i := 0; i < b.N; i++ { + tmpDir := b.TempDir() + tfparse.WriteArchive(tarFile, "application/x-tar", tmpDir) + parser, diags := tfparse.New(tmpDir, tfparse.WithLogger(logger)) + require.NoError(b, diags.Err()) + _, err := parser.WorkspaceTags(ctx) + if err != nil { + b.Fatal(err) + } + } + }) + + b.Run("Zip", func(b *testing.B) { + ctx := context.Background() + for i := 0; i < b.N; i++ { + tmpDir := b.TempDir() + tfparse.WriteArchive(zipFile, "application/zip", tmpDir) + parser, diags := tfparse.New(tmpDir, tfparse.WithLogger(logger)) + require.NoError(b, diags.Err()) + _, err := parser.WorkspaceTags(ctx) + if err != nil { + b.Fatal(err) + } + } + }) +} + +func discardLogger(_ testing.TB) slog.Logger { + l := slog.Make(sloghuman.Sink(io.Discard)) + log.SetOutput(slog.Stdlib(context.Background(), l, slog.LevelInfo).Writer()) + return l +}
Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.
Alternative Proxies: