E2E: Consul compatibility matrix tests (#18799)

Set up a new test suite that exercises Nomad's compatibility with Consul. This
suite installs all currently supported versions of Consul, spins up a Consul
agent with appropriate configuration, and a Nomad agent running in dev
mode. Then it runs a Connect job against each pair.
This commit is contained in:
Tim Gross
2023-10-24 16:03:53 -04:00
committed by GitHub
parent 8de7af51cb
commit 6c2d5a0fbb
13 changed files with 901 additions and 12 deletions

View File

@@ -35,7 +35,7 @@ on:
- 'website/**'
jobs:
test-e2e:
test-e2e-vault:
runs-on: ${{ endsWith(github.repository, '-enterprise') && fromJSON('["self-hosted", "ondemand", "linux"]') || 'ubuntu-latest' }}
steps:
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3
@@ -51,6 +51,22 @@ jobs:
- name: Vault Compatability
run: make integration-test
- run: make e2e-test
test-e2e-consul:
runs-on: 'ubuntu-22.04' # this job requires sudo, so not currently suitable for self-hosted runners
steps:
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3
- name: Git config token
if: endsWith(github.repository, '-enterprise')
run: git config --global url.'https://${{ secrets.ELEVATED_GITHUB_TOKEN }}@github.com'.insteadOf 'https://github.com'
- uses: hashicorp/setup-golang@v1
- name: Consul Compatability
run: |
make deps
sudo make cni
sudo sed -i 's!Defaults!#Defaults!g' /etc/sudoers
sudo -E env "PATH=$PATH" make integration-test-consul
permissions:
contents: read
id-token: write

View File

@@ -326,6 +326,17 @@ integration-test: dev ## Run Nomad integration tests
-tags "$(GO_TAGS)" \
github.com/hashicorp/nomad/e2e/vaultcompat
.PHONY: integration-test-consul
integration-test-consul: dev ## Run Nomad integration tests
@echo "==> Running Nomad integration test suite for Consul:"
NOMAD_E2E_CONSULCOMPAT=1 go test \
-v \
-race \
-timeout=900s \
-count=1 \
-tags "$(GO_TAGS)" \
github.com/hashicorp/nomad/e2e/consulcompat
.PHONY: clean
clean: GOPATH=$(shell go env GOPATH)
clean: ## Remove build artifacts
@@ -439,3 +450,9 @@ copywriteheaders:
cd jobspec && $(CURDIR)/scripts/copywrite-exceptions.sh
cd jobspec2 && $(CURDIR)/scripts/copywrite-exceptions.sh
cd demo && $(CURDIR)/scripts/copywrite-exceptions.sh
.PHONY: cni
cni: ## Install CNI plugins. Run this as root.
mkdir -p /opt/cni/bin
curl --fail -LsO "https://github.com/containernetworking/plugins/releases/download/v1.3.0/cni-plugins-linux-amd64-v1.3.0.tgz"
tar -C /opt/cni/bin -xf cni-plugins-linux-amd64-v1.3.0.tgz

View File

@@ -0,0 +1,43 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package consulcompat
import (
"os"
"syscall"
"testing"
"github.com/hashicorp/nomad/client/testutil"
)
const (
envTempDir = "NOMAD_E2E_CONSULCOMPAT_BASEDIR"
envGate = "NOMAD_E2E_CONSULCOMPAT"
)
func TestConsulCompat(t *testing.T) {
if os.Getenv(envGate) != "1" {
t.Skip(envGate + " is not set; skipping")
}
if syscall.Geteuid() != 0 {
t.Skip("must be run as root so that clients can run Docker tasks")
}
testutil.RequireLinux(t)
t.Run("testConsulVersions", func(t *testing.T) {
baseDir := os.Getenv(envTempDir)
if baseDir == "" {
baseDir = t.TempDir()
}
versions := scanConsulVersions(t, getMinimumVersion(t))
versions.ForEach(func(b build) bool {
downloadConsulBuild(t, b, baseDir)
testConsulBuildLegacy(t, b, baseDir)
testConsulBuild(t, b, baseDir)
return true
})
})
}

5
e2e/consulcompat/doc.go Normal file
View File

@@ -0,0 +1,5 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
// Package consulcompat contains Consul compatibility tests.
package consulcompat

View File

@@ -0,0 +1,69 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
job "countdash" {
group "api" {
network {
mode = "bridge"
}
service {
name = "count-api"
port = "9001"
connect {
sidecar_service {}
}
}
task "web" {
driver = "docker"
config {
image = "hashicorpdev/counter-api:v3"
auth_soft_fail = true
}
}
}
group "dashboard" {
network {
mode = "bridge"
port "http" {
static = 9002
to = 9002
}
}
service {
name = "count-dashboard"
port = "9002"
connect {
sidecar_service {
proxy {
upstreams {
destination_name = "count-api"
local_bind_port = 8080
}
}
}
}
}
task "dashboard" {
driver = "docker"
env {
COUNTING_SERVICE_URL = "http://${NOMAD_UPSTREAM_ADDR_count_api}"
}
config {
image = "hashicorpdev/counter-dashboard:v3"
auth_soft_fail = true
}
}
}
}

View File

@@ -0,0 +1,48 @@
# Copyright (c) HashiCorp, Inc.
# SPDX-License-Identifier: BUSL-1.1
# Policy for the Nomad agent. Note that this policy will work with Workload
# Identity for Connect jobs, but is more highly-privileged than we need.
# The operator:write permission is required for creating config entries for
# connect ingress gateways. operator ACLs are not namespaced, though the
# config entries they can generate are.
operator = "write"
agent_prefix "" {
policy = "read"
}
# The acl:write permission is required for minting Consul Service Identity
# tokens for Connect services with Consul CE (which has no namespaces)
acl = "write"
key_prefix "" {
policy = "read"
}
node_prefix "" {
policy = "read"
}
service_prefix "" {
policy = "write"
}
# for use with Consul ENT
namespace_prefix "" {
acl = "write"
key_prefix "" {
policy = "read"
}
node_prefix "" {
policy = "read"
}
service_prefix "" {
policy = "write"
}
}

View File

@@ -0,0 +1,42 @@
# Copyright (c) HashiCorp, Inc.
# SPDX-License-Identifier: BUSL-1.1
# Policy for the Nomad agent. Note that with this policy we must use Workload
# Identity for Connect jobs, or we'll get "failed to derive SI token" errors
# from the client because the Nomad agent's token doesn't have "acl:write"
# The operator:write permission is required for creating config entries for
# connect ingress gateways. operator ACLs are not namespaced, though the
# config entries they can generate are.
operator = "write"
agent_prefix "" {
policy = "read"
}
key_prefix "" {
policy = "read"
}
node_prefix "" {
policy = "read"
}
service_prefix "" {
policy = "write"
}
# for use with Consul ENT
namespace_prefix "" {
key_prefix "" {
policy = "read"
}
node_prefix "" {
policy = "read"
}
service_prefix "" {
policy = "write"
}
}

View File

@@ -0,0 +1,13 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
// policy without namespaces, for Consul CE. This policy is for Nomad tasks
// using WI so they can read services and KV from Consul when rendering templates.
key_prefix "" {
policy = "read"
}
service_prefix "" {
policy = "read"
}

View File

@@ -0,0 +1,252 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package consulcompat
import (
"fmt"
"os"
"testing"
"time"
consulapi "github.com/hashicorp/consul/api"
"github.com/hashicorp/nomad/helper/uuid"
"github.com/hashicorp/nomad/testutil"
"github.com/shoenig/test/must"
)
func testConsulBuildLegacy(t *testing.T, b build, baseDir string) {
t.Run("consul-legacy("+b.Version+")", func(t *testing.T) {
consulHTTPAddr, consulAPI := startConsul(t, b, baseDir, "")
// smoke test before we continue
verifyConsulVersion(t, consulAPI, b.Version)
// we need an ACL policy that allows the Nomad agent to fingerprint
// Consul, register services, render templates, and mint new SI tokens
consulToken := setupConsulACLsForServices(t, consulAPI,
"./input/consul-policy-for-nomad-legacy.hcl")
// we need service intentions so Connect apps can reach each other
setupConsulServiceIntentions(t, consulAPI)
// note: Nomad needs to be live before we can setupConsul because we
// need it up to serve the JWKS endpoint
consulCfg := &testutil.Consul{
Name: "default",
Address: consulHTTPAddr,
Auth: "",
Token: consulToken,
}
nc := startNomad(t, consulCfg)
verifyConsulFingerprint(t, nc, b.Version, "default")
runConnectJob(t, nc)
})
}
func testConsulBuild(t *testing.T, b build, baseDir string) {
t.Run("consul("+b.Version+")", func(t *testing.T) {
consulHTTPAddr, consulAPI := startConsul(t, b, baseDir, "")
// smoke test before we continue
verifyConsulVersion(t, consulAPI, b.Version)
// we need an ACL policy that only allows the Nomad agent to fingerprint
// Consul and register itself, and set up service intentions
consulToken := setupConsulACLsForServices(t, consulAPI,
"./input/consul-policy-for-nomad.hcl")
// we need service intentions so Connect apps can reach each other, and
// an ACL role and policy that tasks will be able to use to render
// templates
setupConsulServiceIntentions(t, consulAPI)
setupConsulACLsForTasks(t, consulAPI, "./input/consul-policy-for-tasks.hcl")
// note: Nomad needs to be live before we can setup Consul auth methods
// because we need it up to serve the JWKS endpoint
consulCfg := &testutil.Consul{
Name: "default",
Address: consulHTTPAddr,
Auth: "",
Token: consulToken,
ServiceIdentityAuthMethod: "nomad-workloads",
ServiceIdentity: &testutil.WorkloadIdentityConfig{
Audience: []string{"consul.io"},
TTL: "1h",
},
TaskIdentityAuthMethod: "nomad-tasks",
TaskIdentity: &testutil.WorkloadIdentityConfig{
Audience: []string{"consul.io"},
TTL: "1h",
},
}
nc := startNomad(t, consulCfg)
// configure authentication for WI to Consul
setupConsulJWTAuthForServices(t, consulAPI, nc.Address())
setupConsulJWTAuthForTasks(t, consulAPI, nc.Address())
verifyConsulFingerprint(t, nc, b.Version, "default")
runConnectJob(t, nc)
})
}
// setupConsulACLsForServices installs a base set of ACL policies and returns a
// token that the Nomad agent can use
func setupConsulACLsForServices(t *testing.T, consulAPI *consulapi.Client, policyFilePath string) string {
policyRules, err := os.ReadFile(policyFilePath)
must.NoError(t, err, must.Sprintf("could not open policy file %s", policyFilePath))
// policy without namespaces, for Consul CE. Note that with this policy we
// must use Workload Identity for Connect jobs, or we'll get "failed to
// derive SI token" errors from the client because the Nomad agent's token
// doesn't have "acl:write"
policy := &consulapi.ACLPolicy{
Name: "nomad-cluster-" + uuid.Short(),
Description: "policy for nomad agent",
Rules: string(policyRules),
}
policy, _, err = consulAPI.ACL().PolicyCreate(policy, nil)
must.NoError(t, err, must.Sprint("could not write policy to Consul"))
token := &consulapi.ACLToken{
Description: "token for Nomad agent",
Policies: []*consulapi.ACLLink{{
ID: policy.ID,
Name: policy.Name,
}},
}
token, _, err = consulAPI.ACL().TokenCreate(token, nil)
must.NoError(t, err, must.Sprint("could not create token in Consul"))
return token.SecretID
}
func setupConsulServiceIntentions(t *testing.T, consulAPI *consulapi.Client) {
ixn := &consulapi.Intention{
SourceName: "count-dashboard",
DestinationName: "count-api",
Action: "allow",
}
_, err := consulAPI.Connect().IntentionUpsert(ixn, nil)
must.NoError(t, err, must.Sprint("could not create intention"))
}
// setupConsulACLsForTasks installs a base set of ACL policies and returns a
// token that the Nomad agent can use
func setupConsulACLsForTasks(t *testing.T, consulAPI *consulapi.Client, policyFilePath string) {
policyRules, err := os.ReadFile(policyFilePath)
must.NoError(t, err, must.Sprintf("could not open policy file %s", policyFilePath))
// policy without namespaces, for Consul CE.
policy := &consulapi.ACLPolicy{
Name: "nomad-tasks-" + uuid.Short(),
Description: "policy for nomad tasks",
Rules: string(policyRules),
}
policy, _, err = consulAPI.ACL().PolicyCreate(policy, nil)
must.NoError(t, err, must.Sprint("could not write policy to Consul"))
role := &consulapi.ACLRole{
Name: "nomad-default", // must match Nomad namespace
Description: "role for nomad tasks",
Policies: []*consulapi.ACLLink{{
ID: policy.ID,
Name: policy.Name,
}},
}
_, _, err = consulAPI.ACL().RoleCreate(role, nil)
must.NoError(t, err, must.Sprint("could not create token in Consul"))
}
func setupConsulJWTAuthForServices(t *testing.T, consulAPI *consulapi.Client, address string) {
authConfig := map[string]any{
"JWKSURL": fmt.Sprintf("%s/.well-known/jwks.json", address),
"JWTSupportedAlgs": []string{"RS256"},
"BoundAudiences": "consul.io",
"ClaimMappings": map[string]string{
"nomad_namespace": "nomad_namespace",
"nomad_job_id": "nomad_job_id",
"nomad_task": "nomad_task",
"nomad_service": "nomad_service",
},
}
// note: we can't include NamespaceRules here because Consul CE doesn't
// support namespaces
_, _, err := consulAPI.ACL().AuthMethodCreate(&consulapi.ACLAuthMethod{
Name: "nomad-workloads",
Type: "jwt",
DisplayName: "nomad-workloads",
Description: "login method for Nomad workload identities (WI)",
MaxTokenTTL: time.Hour,
TokenLocality: "local",
Config: authConfig,
}, nil)
must.NoError(t, err, must.Sprint("could not create Consul auth method for services"))
// note: we can't include Namespace here because Consul CE doesn't support
// namespaces
rule := &consulapi.ACLBindingRule{
ID: "",
Description: "binding rule for Nomad workload identities (WI) for services",
AuthMethod: "nomad-workloads",
Selector: "",
BindType: "service",
BindName: "${value.nomad_namespace}-${value.nomad_service}",
}
_, _, err = consulAPI.ACL().BindingRuleCreate(rule, nil)
must.NoError(t, err, must.Sprint("could not create Consul binding rule"))
}
func setupConsulJWTAuthForTasks(t *testing.T, consulAPI *consulapi.Client, address string) {
authConfig := map[string]any{
"JWKSURL": fmt.Sprintf("%s/.well-known/jwks.json", address),
"JWTSupportedAlgs": []string{"RS256"},
"BoundAudiences": "consul.io",
"ClaimMappings": map[string]string{
"nomad_namespace": "nomad_namespace",
"nomad_job_id": "nomad_job_id",
"nomad_task": "nomad_task",
"nomad_service": "nomad_service",
},
}
// note: we can't include NamespaceRules here because Consul CE doesn't
// support namespaces
_, _, err := consulAPI.ACL().AuthMethodCreate(&consulapi.ACLAuthMethod{
Name: "nomad-tasks",
Type: "jwt",
DisplayName: "nomad-tasks",
Description: "login method for Nomad tasks with workload identity (WI)",
MaxTokenTTL: time.Hour,
TokenLocality: "local",
Config: authConfig,
}, nil)
must.NoError(t, err, must.Sprint("could not create Consul auth method for tasks"))
// note: we can't include Namespace here because Consul CE doesn't support
// namespaces
rule := &consulapi.ACLBindingRule{
ID: "",
Description: "binding rule for Nomad workload identities (WI) for tasks",
AuthMethod: "nomad-tasks",
Selector: "",
BindType: "role",
BindName: "nomad-${value.nomad_namespace}",
}
_, _, err = consulAPI.ACL().BindingRuleCreate(rule, nil)
must.NoError(t, err, must.Sprint("could not create Consul binding rule"))
}

View File

@@ -0,0 +1,160 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package consulcompat
import (
"context"
"encoding/json"
"os"
"os/exec"
"path/filepath"
"runtime"
"testing"
"time"
"github.com/hashicorp/go-cleanhttp"
"github.com/hashicorp/go-set/v2"
"github.com/hashicorp/go-version"
"github.com/shoenig/test/must"
)
// TODO: it would be good if we can add the latest non-GA'd beta/release
// candidate version as well; that'll give us some lead time on any breaking
// changes
const (
binDir = "consul-bins"
minConsulVersion = "1.15.0"
// environment variable to pick only one Consul version for testing
exactConsulVersionEnv = "NOMAD_E2E_CONSULCOMPAT_CONSUL_VERSION"
)
func downloadConsulBuild(t *testing.T, b build, baseDir string) {
path := filepath.Join(baseDir, binDir, b.Version)
must.NoError(t, os.MkdirAll(path, 0755))
if _, err := os.Stat(filepath.Join(path, "consul")); !os.IsNotExist(err) {
t.Log("download: already have consul at", path)
return
}
t.Log("download: installing consul at", path)
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
cmd := exec.CommandContext(ctx, "hc-install", "install", "-version", b.Version, "-path", path, "consul")
bs, err := cmd.CombinedOutput()
must.NoError(t, err, must.Sprintf("failed to download consul %s: %s", b.Version, string(bs)))
}
func getMinimumVersion(t *testing.T) *version.Version {
v, err := version.NewVersion(minConsulVersion)
must.NoError(t, err)
return v
}
type build struct {
Version string `json:"version"`
OS string `json:"os"`
Arch string `json:"arch"`
URL string `json:"url"`
}
func (b build) String() string { return b.Version }
func (b build) compare(o build) int {
B := version.Must(version.NewVersion(b.Version))
O := version.Must(version.NewVersion(o.Version))
return B.Compare(O)
}
type consulJSON struct {
Versions map[string]struct {
Builds []build `json:"builds"`
} `json:"versions"`
}
func usable(v, minimum *version.Version) bool {
switch {
case v.Prerelease() != "":
return false
case v.Metadata() != "":
return false
case v.LessThan(minimum):
return false
default:
return true
}
}
func keep(b build) bool {
exactVersion := os.Getenv(exactConsulVersionEnv)
if exactVersion != "" {
if b.Version != exactVersion {
return false
}
}
switch {
case b.OS != runtime.GOOS:
return false
case b.Arch != runtime.GOARCH:
return false
default:
return true
}
}
// A tracker keeps track of the set of patch versions for each minor version.
// The patch versions are stored in a treeset so we can grab the highest patch
// version of each minor version at the end.
type tracker map[int]*set.TreeSet[build]
func (t tracker) add(v *version.Version, b build) {
y := v.Segments()[1] // minor version
// create the treeset for this minor version if needed
if _, exists := t[y]; !exists {
cmp := func(g, h build) int { return g.compare(h) }
t[y] = set.NewTreeSet[build](cmp)
}
// insert the patch version into the set of patch versions for this minor version
t[y].Insert(b)
}
func scanConsulVersions(t *testing.T, minimum *version.Version) *set.Set[build] {
httpClient := cleanhttp.DefaultClient()
httpClient.Timeout = 1 * time.Minute
response, err := httpClient.Get("https://releases.hashicorp.com/consul/index.json")
must.NoError(t, err, must.Sprint("unable to download consul versions index"))
var payload consulJSON
must.NoError(t, json.NewDecoder(response.Body).Decode(&payload))
must.Close(t, response.Body)
// sort the versions for the Y in each consul version X.Y.Z
// this only works for consul 1.Y.Z which is fine for now
track := make(tracker)
for s, obj := range payload.Versions {
v, err := version.NewVersion(s)
must.NoError(t, err, must.Sprint("unable to parse consul version"))
if !usable(v, minimum) {
continue
}
for _, build := range obj.Builds {
if keep(build) {
track.add(v, build)
}
}
}
// take the latest patch version for each minor version
result := set.New[build](len(track))
for _, tree := range track {
max := tree.Max()
result.Insert(max)
}
return result
}

View File

@@ -0,0 +1,102 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package consulcompat
import (
"fmt"
"os"
"testing"
"time"
consulapi "github.com/hashicorp/consul/api"
nomadapi "github.com/hashicorp/nomad/api"
"github.com/shoenig/test/must"
"github.com/shoenig/test/wait"
)
// verifyConsulVersion ensures that we've successfully spun up a Consul cluster
// on the expected version (this ensures we don't have stray running Consul from
// previous runs or from the development environment)
func verifyConsulVersion(t *testing.T, consulAPI *consulapi.Client, version string) {
self, err := consulAPI.Agent().Self()
must.NoError(t, err)
vers := self["Config"]["Version"].(string)
must.Eq(t, version, vers)
}
// verifyConsulFingerprint ensures that we've successfully fingerprinted Consul
func verifyConsulFingerprint(t *testing.T, nc *nomadapi.Client, version, clusterName string) {
stubs, _, err := nc.Nodes().List(nil)
must.NoError(t, err)
must.Len(t, 1, stubs)
node, _, err := nc.Nodes().Info(stubs[0].ID, nil)
if clusterName == "default" {
must.Eq(t, version, node.Attributes["consul.version"])
} else {
must.Eq(t, version, node.Attributes["consul."+clusterName+".version"])
}
}
func runConnectJob(t *testing.T, nc *nomadapi.Client) {
b, err := os.ReadFile("./input/connect.nomad.hcl")
must.NoError(t, err)
jobs := nc.Jobs()
job, err := jobs.ParseHCL(string(b), true)
must.NoError(t, err, must.Sprint("failed to parse job HCL"))
resp, _, err := jobs.Register(job, nil)
must.NoError(t, err, must.Sprint("failed to register job"))
evalID := resp.EvalID
t.Logf("eval: %s", evalID)
must.Wait(t, wait.InitialSuccess(
wait.ErrorFunc(func() error {
eval, _, err := nc.Evaluations().Info(evalID, nil)
must.NoError(t, err)
if eval.Status == "complete" {
// if we have failed allocations it can be difficult to debug in
// CI, so dump the struct values here so they show up in the
// logs
must.MapEmpty(t, eval.FailedTGAllocs,
must.Sprintf("api=>%#v dash=>%#v",
eval.FailedTGAllocs["api"], eval.FailedTGAllocs["dashboard"]))
return nil
} else {
return fmt.Errorf("eval is not complete: %s", eval.Status)
}
}),
wait.Timeout(time.Second),
wait.Gap(100*time.Millisecond),
))
t.Cleanup(func() {
_, _, err = jobs.Deregister(*job.Name, true, nil)
must.NoError(t, err, must.Sprint("failed to deregister job"))
})
must.Wait(t, wait.InitialSuccess(
wait.ErrorFunc(func() error {
allocs, _, err := jobs.Allocations(*job.ID, false, nil)
if err != nil {
return err
}
if n := len(allocs); n != 2 {
return fmt.Errorf("expected 2 alloc, got %d", n)
}
for _, alloc := range allocs {
if alloc.ClientStatus != "running" {
return fmt.Errorf(
"expected alloc status running, got %s for %s",
alloc.ClientStatus, alloc.ID)
}
}
return nil
}),
wait.Timeout(30*time.Second),
wait.Gap(1*time.Second),
))
}

View File

@@ -0,0 +1,117 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package consulcompat
import (
"io"
"os"
"path/filepath"
"testing"
consulapi "github.com/hashicorp/consul/api"
consulTestUtil "github.com/hashicorp/consul/sdk/testutil"
nomadapi "github.com/hashicorp/nomad/api"
"github.com/hashicorp/nomad/helper/testlog"
"github.com/hashicorp/nomad/helper/uuid"
"github.com/hashicorp/nomad/testutil"
"github.com/shoenig/test/must"
)
const (
consulDataDir = "consul-data"
)
// startConsul runs a Consul agent with bootstrapped ACLs and returns a stop
// function, the HTTP address, and a HTTP API client
func startConsul(t *testing.T, b build, baseDir, ns string) (string, *consulapi.Client) {
path := filepath.Join(baseDir, binDir, b.Version)
cwd, _ := os.Getwd()
os.Chdir(path) // so that we can launch Consul from the current directory
defer os.Chdir(cwd) // return to the test dir so we can find job files
oldpath := os.Getenv("PATH")
os.Setenv("PATH", path+":"+oldpath)
t.Cleanup(func() {
os.Setenv("PATH", oldpath)
})
consulDC1 := "dc1"
rootToken := uuid.Generate()
testconsul, err := consulTestUtil.NewTestServerConfigT(t,
func(c *consulTestUtil.TestServerConfig) {
c.ACL.Enabled = true
c.ACL.DefaultPolicy = "deny"
c.ACL.Tokens = consulTestUtil.TestTokens{
InitialManagement: rootToken,
}
c.Datacenter = consulDC1
c.DataDir = filepath.Join(baseDir, binDir, b.Version, consulDataDir)
c.LogLevel = "info"
c.Connect = map[string]any{"enabled": true}
c.Server = true
if !testing.Verbose() {
c.Stdout = io.Discard
c.Stderr = io.Discard
}
})
must.NoError(t, err, must.Sprint("error starting test consul server"))
t.Cleanup(func() {
testconsul.Stop()
os.RemoveAll(filepath.Join(baseDir, binDir, b.Version, consulDataDir))
})
testconsul.WaitForLeader(t)
testconsul.WaitForActiveCARoot(t)
// TODO: we should run this entire test suite with mTLS everywhere
consulClient, err := consulapi.NewClient(&consulapi.Config{
Address: testconsul.HTTPAddr,
Scheme: "http",
Datacenter: consulDC1,
HttpClient: consulapi.DefaultConfig().HttpClient,
Token: rootToken,
Namespace: ns,
TLSConfig: consulapi.TLSConfig{},
})
must.NoError(t, err)
return testconsul.HTTPAddr, consulClient
}
// startNomad runs a Nomad agent in dev mode with bootstrapped ACLs
func startNomad(t *testing.T, consulConfig *testutil.Consul) *nomadapi.Client {
rootToken := uuid.Generate()
ts := testutil.NewTestServer(t, func(c *testutil.TestServerConfig) {
c.DevMode = true
c.LogLevel = testlog.HCLoggerTestLevel().String()
c.Consul = consulConfig
c.ACL = &testutil.ACLConfig{
Enabled: true,
BootstrapToken: rootToken,
}
if !testing.Verbose() {
c.Stdout = io.Discard
c.Stderr = io.Discard
}
})
t.Cleanup(ts.Stop)
// TODO: we should run this entire test suite with mTLS everywhere
nc, err := nomadapi.NewClient(&nomadapi.Config{
Address: "http://" + ts.HTTPAddr,
TLSConfig: &nomadapi.TLSConfig{},
})
must.NoError(t, err, must.Sprint("unable to create nomad api client"))
nc.SetSecretID(rootToken)
return nc
}

View File

@@ -51,9 +51,22 @@ type TestServerConfig struct {
// Consul is used to configure the communication with Consul
type Consul struct {
Address string `json:"address,omitempty"`
Auth string `json:"auth,omitempty"`
Token string `json:"token,omitempty"`
Name string `json:"name,omitempty"`
Address string `json:"address,omitempty"`
Auth string `json:"auth,omitempty"`
Token string `json:"token,omitempty"`
ServiceIdentity *WorkloadIdentityConfig `json:"service_identity,omitempty"`
ServiceIdentityAuthMethod string `json:"service_auth_method,omitempty"`
TaskIdentity *WorkloadIdentityConfig `json:"task_identity,omitempty"`
TaskIdentityAuthMethod string `json:"task_auth_method,omitempty"`
}
// WorkloadIdentityConfig is the configuration for default workload identities.
type WorkloadIdentityConfig struct {
Audience []string `json:"aud"`
Env bool `json:"env"`
File bool `json:"file"`
TTL string `json:"ttl"`
}
// Advertise is used to configure the addresses to advertise
@@ -101,14 +114,6 @@ type ACLConfig struct {
BootstrapToken string `json:"-"` // not in the real config
}
// WorkloadIdentityConfig is the configuration for default workload identities.
type WorkloadIdentityConfig struct {
Audience []string `json:"aud"`
Env bool `json:"env"`
File bool `json:"file"`
TTL string `json:"ttl"`
}
// ServerConfigCallback is a function interface which can be
// passed to NewTestServerConfig to modify the server config.
type ServerConfigCallback func(c *TestServerConfig)