mirror of
https://github.com/kemko/nomad.git
synced 2026-01-07 10:55:42 +03:00
e2e: upgrade terraform to 0.12.x (#6489)
This commit is contained in:
@@ -1,25 +1,25 @@
|
||||
data "template_file" "user_data_server" {
|
||||
template = "${file("${path.root}/user-data-server.sh")}"
|
||||
template = file("${path.root}/user-data-server.sh")
|
||||
|
||||
vars {
|
||||
server_count = "${var.server_count}"
|
||||
region = "${var.region}"
|
||||
retry_join = "${var.retry_join}"
|
||||
vars = {
|
||||
server_count = var.server_count
|
||||
region = var.region
|
||||
retry_join = var.retry_join
|
||||
}
|
||||
}
|
||||
|
||||
data "template_file" "user_data_client" {
|
||||
template = "${file("${path.root}/user-data-client.sh")}"
|
||||
count = "${var.client_count}"
|
||||
template = file("${path.root}/user-data-client.sh")
|
||||
count = var.client_count
|
||||
|
||||
vars {
|
||||
region = "${var.region}"
|
||||
retry_join = "${var.retry_join}"
|
||||
vars = {
|
||||
region = var.region
|
||||
retry_join = var.retry_join
|
||||
}
|
||||
}
|
||||
|
||||
data "template_file" "nomad_client_config" {
|
||||
template = "${file("${path.root}/configs/client.hcl")}"
|
||||
template = file("${path.root}/configs/client.hcl")
|
||||
}
|
||||
|
||||
data "template_file" "nomad_server_config" {
|
||||
@@ -27,30 +27,34 @@ data "template_file" "nomad_server_config" {
|
||||
}
|
||||
|
||||
resource "aws_instance" "server" {
|
||||
ami = "${data.aws_ami.main.image_id}"
|
||||
instance_type = "${var.instance_type}"
|
||||
key_name = "${module.keys.key_name}"
|
||||
vpc_security_group_ids = ["${aws_security_group.primary.id}"]
|
||||
count = "${var.server_count}"
|
||||
ami = data.aws_ami.main.image_id
|
||||
instance_type = var.instance_type
|
||||
key_name = module.keys.key_name
|
||||
vpc_security_group_ids = [aws_security_group.primary.id]
|
||||
count = var.server_count
|
||||
|
||||
# Instance tags
|
||||
tags {
|
||||
tags = {
|
||||
Name = "${local.random_name}-server-${count.index}"
|
||||
ConsulAutoJoin = "auto-join"
|
||||
SHA = "${var.nomad_sha}"
|
||||
User = "${data.aws_caller_identity.current.arn}"
|
||||
SHA = var.nomad_sha
|
||||
User = data.aws_caller_identity.current.arn
|
||||
}
|
||||
|
||||
user_data = "${data.template_file.user_data_server.rendered}"
|
||||
iam_instance_profile = "${aws_iam_instance_profile.instance_profile.name}"
|
||||
user_data = data.template_file.user_data_server.rendered
|
||||
iam_instance_profile = aws_iam_instance_profile.instance_profile.name
|
||||
|
||||
provisioner "file" {
|
||||
content = "${file("${path.root}/configs/${var.indexed == false ? "server.hcl" : "indexed/server-${count.index}.hcl"}")}"
|
||||
content = file(
|
||||
"${path.root}/configs/${var.indexed == false ? "server.hcl" : "indexed/server-${count.index}.hcl"}",
|
||||
)
|
||||
destination = "/tmp/server.hcl"
|
||||
|
||||
connection {
|
||||
host = coalesce(self.public_ip, self.private_ip)
|
||||
type = "ssh"
|
||||
user = "ubuntu"
|
||||
private_key = "${module.keys.private_key_pem}"
|
||||
private_key = module.keys.private_key_pem
|
||||
}
|
||||
}
|
||||
provisioner "remote-exec" {
|
||||
@@ -59,45 +63,51 @@ resource "aws_instance" "server" {
|
||||
]
|
||||
|
||||
connection {
|
||||
host = coalesce(self.public_ip, self.private_ip)
|
||||
type = "ssh"
|
||||
user = "ubuntu"
|
||||
private_key = "${module.keys.private_key_pem}"
|
||||
private_key = module.keys.private_key_pem
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
resource "aws_instance" "client" {
|
||||
ami = "${data.aws_ami.main.image_id}"
|
||||
instance_type = "${var.instance_type}"
|
||||
key_name = "${module.keys.key_name}"
|
||||
vpc_security_group_ids = ["${aws_security_group.primary.id}"]
|
||||
count = "${var.client_count}"
|
||||
depends_on = ["aws_instance.server"]
|
||||
ami = data.aws_ami.main.image_id
|
||||
instance_type = var.instance_type
|
||||
key_name = module.keys.key_name
|
||||
vpc_security_group_ids = [aws_security_group.primary.id]
|
||||
count = var.client_count
|
||||
depends_on = [aws_instance.server]
|
||||
|
||||
# Instance tags
|
||||
tags {
|
||||
tags = {
|
||||
Name = "${local.random_name}-client-${count.index}"
|
||||
ConsulAutoJoin = "auto-join"
|
||||
SHA = "${var.nomad_sha}"
|
||||
User = "${data.aws_caller_identity.current.arn}"
|
||||
SHA = var.nomad_sha
|
||||
User = data.aws_caller_identity.current.arn
|
||||
}
|
||||
|
||||
ebs_block_device = {
|
||||
ebs_block_device {
|
||||
device_name = "/dev/xvdd"
|
||||
volume_type = "gp2"
|
||||
volume_size = "50"
|
||||
delete_on_termination = "true"
|
||||
}
|
||||
|
||||
user_data = "${element(data.template_file.user_data_client.*.rendered, count.index)}"
|
||||
iam_instance_profile = "${aws_iam_instance_profile.instance_profile.name}"
|
||||
user_data = element(data.template_file.user_data_client.*.rendered, count.index)
|
||||
iam_instance_profile = aws_iam_instance_profile.instance_profile.name
|
||||
|
||||
provisioner "file" {
|
||||
content = "${file("${path.root}/configs/${var.indexed == false ? "client.hcl" : "indexed/client-${count.index}.hcl"}")}"
|
||||
content = file(
|
||||
"${path.root}/configs/${var.indexed == false ? "client.hcl" : "indexed/client-${count.index}.hcl"}",
|
||||
)
|
||||
destination = "/tmp/client.hcl"
|
||||
|
||||
connection {
|
||||
host = coalesce(self.public_ip, self.private_ip)
|
||||
type = "ssh"
|
||||
user = "ubuntu"
|
||||
private_key = "${module.keys.private_key_pem}"
|
||||
private_key = module.keys.private_key_pem
|
||||
}
|
||||
}
|
||||
|
||||
@@ -107,8 +117,11 @@ resource "aws_instance" "client" {
|
||||
]
|
||||
|
||||
connection {
|
||||
host = coalesce(self.public_ip, self.private_ip)
|
||||
type = "ssh"
|
||||
user = "ubuntu"
|
||||
private_key = "${module.keys.private_key_pem}"
|
||||
private_key = module.keys.private_key_pem
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
resource "aws_iam_instance_profile" "instance_profile" {
|
||||
name_prefix = "${local.random_name}"
|
||||
role = "${aws_iam_role.instance_role.name}"
|
||||
name_prefix = local.random_name
|
||||
role = aws_iam_role.instance_role.name
|
||||
}
|
||||
|
||||
resource "aws_iam_role" "instance_role" {
|
||||
name_prefix = "${local.random_name}"
|
||||
assume_role_policy = "${data.aws_iam_policy_document.instance_role.json}"
|
||||
name_prefix = local.random_name
|
||||
assume_role_policy = data.aws_iam_policy_document.instance_role.json
|
||||
}
|
||||
|
||||
data "aws_iam_policy_document" "instance_role" {
|
||||
@@ -22,8 +22,8 @@ data "aws_iam_policy_document" "instance_role" {
|
||||
|
||||
resource "aws_iam_role_policy" "auto_discover_cluster" {
|
||||
name = "auto-discover-cluster"
|
||||
role = "${aws_iam_role.instance_role.id}"
|
||||
policy = "${data.aws_iam_policy_document.auto_discover_cluster.json}"
|
||||
role = aws_iam_role.instance_role.id
|
||||
policy = data.aws_iam_policy_document.auto_discover_cluster.json
|
||||
}
|
||||
|
||||
# Note: Overloading this instance profile to access
|
||||
@@ -55,10 +55,11 @@ data "aws_iam_policy_document" "auto_discover_cluster" {
|
||||
effect = "Allow"
|
||||
|
||||
actions = [
|
||||
"s3:PutObject",
|
||||
"s3:GetObject",
|
||||
"s3:DeleteObject"
|
||||
"s3:PutObject",
|
||||
"s3:GetObject",
|
||||
"s3:DeleteObject",
|
||||
]
|
||||
resources = ["arn:aws:s3:::nomad-team-test-binary/*"]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -38,10 +38,11 @@ variable "nomad_sha" {
|
||||
}
|
||||
|
||||
provider "aws" {
|
||||
region = "${var.region}"
|
||||
region = var.region
|
||||
}
|
||||
|
||||
resource "random_pet" "e2e" {}
|
||||
resource "random_pet" "e2e" {
|
||||
}
|
||||
|
||||
locals {
|
||||
random_name = "${var.name}-${random_pet.e2e.id}"
|
||||
@@ -49,10 +50,10 @@ locals {
|
||||
|
||||
# Generates keys to use for provisioning and access
|
||||
module "keys" {
|
||||
name = "${local.random_name}"
|
||||
path = "${path.root}/keys"
|
||||
source = "mitchellh/dynamic-keys/aws"
|
||||
version = "v1.0.0"
|
||||
name = local.random_name
|
||||
path = "${path.root}/keys"
|
||||
source = "mitchellh/dynamic-keys/aws"
|
||||
version = "v2.0.0"
|
||||
}
|
||||
|
||||
data "aws_ami" "main" {
|
||||
@@ -68,17 +69,17 @@ data "aws_ami" "main" {
|
||||
name = "tag:OS"
|
||||
values = ["Ubuntu"]
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
data "aws_caller_identity" "current" {}
|
||||
data "aws_caller_identity" "current" {
|
||||
}
|
||||
|
||||
output "servers" {
|
||||
value = "${aws_instance.server.*.public_ip}"
|
||||
value = aws_instance.server.*.public_ip
|
||||
}
|
||||
|
||||
output "clients" {
|
||||
value = "${aws_instance.client.*.public_ip}"
|
||||
value = aws_instance.client.*.public_ip
|
||||
}
|
||||
|
||||
output "message" {
|
||||
@@ -87,8 +88,8 @@ Your cluster has been provisioned! - To prepare your environment, run the
|
||||
following:
|
||||
|
||||
```
|
||||
export NOMAD_ADDR=http://${aws_instance.client.0.public_ip}:4646
|
||||
export CONSUL_HTTP_ADDR=http://${aws_instance.client.0.public_ip}:8500
|
||||
export NOMAD_ADDR=http://${aws_instance.client[0].public_ip}:4646
|
||||
export CONSUL_HTTP_ADDR=http://${aws_instance.client[0].public_ip}:8500
|
||||
export NOMAD_E2E=1
|
||||
```
|
||||
|
||||
@@ -100,7 +101,8 @@ go test -v ./e2e
|
||||
|
||||
ssh into nodes with:
|
||||
```
|
||||
ssh -i keys/${local.random_name}.pem ubuntu@${aws_instance.client.0.public_ip}
|
||||
ssh -i keys/${local.random_name}.pem ubuntu@${aws_instance.client[0].public_ip}
|
||||
```
|
||||
EOM
|
||||
|
||||
}
|
||||
|
||||
@@ -3,8 +3,8 @@ data "aws_vpc" "default" {
|
||||
}
|
||||
|
||||
resource "aws_security_group" "primary" {
|
||||
name = "${local.random_name}"
|
||||
vpc_id = "${data.aws_vpc.default.id}"
|
||||
name = local.random_name
|
||||
vpc_id = data.aws_vpc.default.id
|
||||
|
||||
ingress {
|
||||
from_port = 22
|
||||
|
||||
4
e2e/terraform/versions.tf
Normal file
4
e2e/terraform/versions.tf
Normal file
@@ -0,0 +1,4 @@
|
||||
|
||||
terraform {
|
||||
required_version = ">= 0.12"
|
||||
}
|
||||
Reference in New Issue
Block a user