Restrict HCL special casing of map[string]interface{} fields

The HCL2 parser needs to apply special parsing tweaks so it can parse
the task config the same way as HCL1. Particularly, it needs to
reinterprets `map[string]interface{}` fields and blocks that appear when
attributes are expected.

This commit restricts the special casing to the Job fields, and ignore
`variables` and `locals` block.
This commit is contained in:
Mahmood Ali
2020-11-12 11:35:39 -05:00
parent 809b6b9898
commit a673b8a2bd
5 changed files with 150 additions and 7 deletions

View File

@@ -11,11 +11,9 @@ import (
"strings"
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hcl/v2/ext/dynblock"
"github.com/hashicorp/hcl/v2/hclsyntax"
hcljson "github.com/hashicorp/hcl/v2/json"
"github.com/hashicorp/nomad/api"
"github.com/hashicorp/nomad/jobspec2/hclutil"
)
func Parse(path string, r io.Reader) (*api.Job, error) {
@@ -104,9 +102,7 @@ func decode(c *jobConfig) error {
return diags
}
body := hclutil.BlocksAsAttrs(file.Body)
body = dynblock.Expand(body, c.EvalContext())
diags = c.decodeBody(body)
diags = c.decodeBody(file.Body)
if diags.HasErrors() {
var str strings.Builder
for i, diag := range diags {
@@ -117,7 +113,9 @@ func decode(c *jobConfig) error {
}
return errors.New(str.String())
}
diags = append(diags, decodeMapInterfaceType(&c, c.EvalContext())...)
diags = append(diags, decodeMapInterfaceType(&c.Job, c.EvalContext())...)
diags = append(diags, decodeMapInterfaceType(&c.Tasks, c.EvalContext())...)
diags = append(diags, decodeMapInterfaceType(&c.Vault, c.EvalContext())...)
return nil
}

View File

@@ -113,6 +113,10 @@ func normalizeTemplates(templates []*api.Template) {
}
}
func intToPtr(v int) *int {
return &v
}
func int8ToPtr(v int8) *int8 {
return &v
}

View File

@@ -41,6 +41,11 @@ func (w *walker) Map(m reflect.Value) error {
return nil
}
// ignore private map fields
if !m.CanSet() {
return nil
}
for _, k := range m.MapKeys() {
v := m.MapIndex(k)
if attr, ok := v.Interface().(*hcl.Attribute); ok {

View File

@@ -5,6 +5,7 @@ import (
"os"
"testing"
"github.com/hashicorp/nomad/api"
"github.com/hashicorp/nomad/jobspec"
"github.com/stretchr/testify/require"
)
@@ -469,3 +470,132 @@ job "example" {
})
}
}
func TestParseJob_JobWithFunctionsAndLookups(t *testing.T) {
hcl := `
variable "env" {
description = "target environment for the job"
}
locals {
environments = {
prod = { count = 20, dcs = ["prod-dc1", "prod-dc2"] },
staging = { count = 3, dcs = ["dc1"] },
}
env = lookup(local.environments, var.env, { count = 0, dcs = [] })
}
job "job-webserver" {
datacenters = local.env.dcs
group "group-webserver" {
count = local.env.count
task "server" {
driver = "docker"
config {
image = "hashicorp/http-echo"
args = ["-text", "Hello from ${var.env}"]
}
}
}
}
`
cases := []struct {
env string
expectedJob *api.Job
}{
{
"prod",
&api.Job{
ID: stringToPtr("job-webserver"),
Name: stringToPtr("job-webserver"),
Datacenters: []string{"prod-dc1", "prod-dc2"},
TaskGroups: []*api.TaskGroup{
{
Name: stringToPtr("group-webserver"),
Count: intToPtr(20),
Tasks: []*api.Task{
{
Name: "server",
Driver: "docker",
Config: map[string]interface{}{
"image": "hashicorp/http-echo",
"args": []interface{}{"-text", "Hello from prod"},
},
},
},
},
},
},
},
{
"staging",
&api.Job{
ID: stringToPtr("job-webserver"),
Name: stringToPtr("job-webserver"),
Datacenters: []string{"dc1"},
TaskGroups: []*api.TaskGroup{
{
Name: stringToPtr("group-webserver"),
Count: intToPtr(3),
Tasks: []*api.Task{
{
Name: "server",
Driver: "docker",
Config: map[string]interface{}{
"image": "hashicorp/http-echo",
"args": []interface{}{"-text", "Hello from staging"},
},
},
},
},
},
},
},
{
"unknown",
&api.Job{
ID: stringToPtr("job-webserver"),
Name: stringToPtr("job-webserver"),
Datacenters: []string{},
TaskGroups: []*api.TaskGroup{
{
Name: stringToPtr("group-webserver"),
Count: intToPtr(0),
Tasks: []*api.Task{
{
Name: "server",
Driver: "docker",
Config: map[string]interface{}{
"image": "hashicorp/http-echo",
"args": []interface{}{"-text", "Hello from unknown"},
},
},
},
},
},
},
},
}
for _, c := range cases {
t.Run(c.env, func(t *testing.T) {
found, err := ParseWithConfig(&ParseConfig{
Path: "example.hcl",
Body: []byte(hcl),
AllowFS: false,
ArgVars: []string{"env=" + c.env},
})
require.NoError(t, err)
require.Equal(t, c.expectedJob, found)
})
}
}

View File

@@ -5,7 +5,9 @@ import (
"strings"
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hcl/v2/ext/dynblock"
"github.com/hashicorp/nomad/api"
"github.com/hashicorp/nomad/jobspec2/hclutil"
"github.com/zclconf/go-cty/cty"
)
@@ -240,6 +242,9 @@ func (c *jobConfig) decodeJob(content *hcl.BodyContent, ctx *hcl.EvalContext) hc
continue
}
body := hclutil.BlocksAsAttrs(b.Body)
body = dynblock.Expand(body, ctx)
if found != nil {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
@@ -256,12 +261,13 @@ func (c *jobConfig) decodeJob(content *hcl.BodyContent, ctx *hcl.EvalContext) hc
c.JobID = b.Labels[0]
extra, remain, mdiags := b.Body.PartialContent(&hcl.BodySchema{
extra, remain, mdiags := body.PartialContent(&hcl.BodySchema{
Blocks: []hcl.BlockHeaderSchema{
{Type: "vault"},
{Type: "task", LabelNames: []string{"name"}},
},
})
diags = append(diags, mdiags...)
diags = append(diags, c.decodeTopLevelExtras(extra, ctx)...)
diags = append(diags, hclDecoder.DecodeBody(remain, ctx, c.Job)...)