This repository has been archived by the owner on Jan 25, 2023. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 488
/
main.tf
157 lines (129 loc) · 7.24 KB
/
main.tf
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
# ---------------------------------------------------------------------------------------------------------------------
# DEPLOY A CONSUL CLUSTER IN AWS
# These templates show an example of how to use the consul-cluster module to deploy Consul in AWS. We deploy two Auto
# Scaling Groups (ASGs): one with a small number of Consul server nodes and one with a larger number of Consul client
# nodes. Note that these templates assume that the AMI you provide via the ami_id input variable is built from
# the examples/consul-ami/consul.json Packer template.
# ---------------------------------------------------------------------------------------------------------------------
# ----------------------------------------------------------------------------------------------------------------------
# REQUIRE A SPECIFIC TERRAFORM VERSION OR HIGHER
# ----------------------------------------------------------------------------------------------------------------------
terraform {
# This module is now only being tested with Terraform 1.0.x. However, to make upgrading easier, we are setting
# 0.14.0 as the minimum version, as that version added support for validation and the alltrue function
# Removing the validation completely will yield a version compatible with 0.12.26 as that added support for
# required_providers with source URLs
required_version = ">= 0.14.0"
}
# ---------------------------------------------------------------------------------------------------------------------
# AUTOMATICALLY LOOK UP THE LATEST PRE-BUILT AMI
# This repo contains a CircleCI job that automatically builds and publishes the latest AMI by building the Packer
# template at /examples/consul-ami upon every new release. The Terraform data source below automatically looks up the
# latest AMI so that a simple "terraform apply" will just work without the user needing to manually build an AMI and
# fill in the right value.
#
# !! WARNING !! These exmaple AMIs are meant only convenience when initially testing this repo. Do NOT use these example
# AMIs in a production setting because it is important that you consciously think through the configuration you want
# in your own production AMI.
#
# NOTE: This Terraform data source must return at least one AMI result or the entire template will fail. See
# /_ci/publish-amis-in-new-account.md for more information.
# ---------------------------------------------------------------------------------------------------------------------
data "aws_ami" "consul" {
most_recent = true
# If we change the AWS Account in which test are run, update this value.
owners = ["562637147889"]
filter {
name = "virtualization-type"
values = ["hvm"]
}
filter {
name = "is-public"
values = ["true"]
}
filter {
name = "name"
values = ["consul-ubuntu-*"]
}
}
# ---------------------------------------------------------------------------------------------------------------------
# DEPLOY THE CONSUL SERVER NODES
# ---------------------------------------------------------------------------------------------------------------------
module "consul_servers" {
# When using these modules in your own templates, you will need to use a Git URL with a ref attribute that pins you
# to a specific version of the modules, such as the following example:
# source = "git::[email protected]:hashicorp/terraform-aws-consul.git//modules/consul-cluster?ref=v0.0.1"
source = "./modules/consul-cluster"
cluster_name = "${var.cluster_name}-server"
cluster_size = var.num_servers
instance_type = "t2.micro"
spot_price = var.spot_price
# The EC2 Instances will use these tags to automatically discover each other and form a cluster
cluster_tag_key = var.cluster_tag_key
cluster_tag_value = var.cluster_name
ami_id = var.ami_id == null ? data.aws_ami.consul.image_id : var.ami_id
user_data = templatefile("${path.module}/examples/root-example/user-data-server.sh", {
cluster_tag_key = var.cluster_tag_key
cluster_tag_value = var.cluster_name
})
vpc_id = data.aws_vpc.default.id
subnet_ids = data.aws_subnet_ids.default.ids
# If set to true, this allows access to the consul HTTPS API
enable_https_port = var.enable_https_port
# To make testing easier, we allow Consul and SSH requests from any IP address here but in a production
# deployment, we strongly recommend you limit this to the IP address ranges of known, trusted servers inside your VPC.
allowed_ssh_cidr_blocks = ["0.0.0.0/0"]
allowed_inbound_cidr_blocks = ["0.0.0.0/0"]
ssh_key_name = var.ssh_key_name
tags = [
{
key = "Environment"
value = "development"
propagate_at_launch = true
}
]
}
# ---------------------------------------------------------------------------------------------------------------------
# DEPLOY THE CONSUL CLIENT NODES
# Note that you do not have to use the consul-cluster module to deploy your clients. We do so simply because it
# provides a convenient way to deploy an Auto Scaling Group with the necessary IAM and security group permissions for
# Consul, but feel free to deploy those clients however you choose (e.g. a single EC2 Instance, a Docker cluster, etc).
# ---------------------------------------------------------------------------------------------------------------------
module "consul_clients" {
# When using these modules in your own templates, you will need to use a Git URL with a ref attribute that pins you
# to a specific version of the modules, such as the following example:
# source = "git::[email protected]:hashicorp/terraform-aws-consul.git//modules/consul-cluster?ref=v0.0.1"
source = "./modules/consul-cluster"
cluster_name = "${var.cluster_name}-client"
cluster_size = var.num_clients
instance_type = "t2.micro"
spot_price = var.spot_price
cluster_tag_key = "consul-clients"
cluster_tag_value = var.cluster_name
ami_id = var.ami_id == null ? data.aws_ami.consul.image_id : var.ami_id
user_data = templatefile("${path.module}/examples/root-example/user-data-client.sh", {
cluster_tag_key = var.cluster_tag_key
cluster_tag_value = var.cluster_name
})
vpc_id = data.aws_vpc.default.id
subnet_ids = data.aws_subnet_ids.default.ids
# To make testing easier, we allow Consul and SSH requests from any IP address here but in a production
# deployment, we strongly recommend you limit this to the IP address ranges of known, trusted servers inside your VPC.
allowed_ssh_cidr_blocks = ["0.0.0.0/0"]
allowed_inbound_cidr_blocks = ["0.0.0.0/0"]
ssh_key_name = var.ssh_key_name
}
# ---------------------------------------------------------------------------------------------------------------------
# DEPLOY CONSUL IN THE DEFAULT VPC AND SUBNETS
# Using the default VPC and subnets makes this example easy to run and test, but it means Consul is accessible from the
# public Internet. For a production deployment, we strongly recommend deploying into a custom VPC with private subnets.
# ---------------------------------------------------------------------------------------------------------------------
data "aws_vpc" "default" {
default = var.vpc_id == null ? true : false
id = var.vpc_id
}
data "aws_subnet_ids" "default" {
vpc_id = data.aws_vpc.default.id
}
data "aws_region" "current" {
}