-
Notifications
You must be signed in to change notification settings - Fork 1
/
main.tf
217 lines (185 loc) · 6.07 KB
/
main.tf
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
provider "aws" {
region = "us-west-2"
}
resource "aws_vpc" "default" {
cidr_block = "10.0.0.0/16"
}
resource "aws_internet_gateway" "default" {
vpc_id = "${aws_vpc.default.id}"
}
resource "aws_route" "internet_access" {
route_table_id = "${aws_vpc.default.main_route_table_id}"
destination_cidr_block = "0.0.0.0/0"
gateway_id = "${aws_internet_gateway.default.id}"
}
resource "aws_subnet" "default" {
vpc_id = "${aws_vpc.default.id}"
cidr_block = "10.0.1.0/24"
map_public_ip_on_launch = true
}
resource "aws_security_group" "elb" {
name = "airflow_elb"
description = "ELB allowing airflow http ingress"
vpc_id = "${aws_vpc.default.id}"
# HTTP access from anywhere
ingress {
from_port = 80
to_port = 80
protocol = "tcp"
cidr_blocks = ["0.0.0.0/0"]
}
# outbound internet access
egress {
from_port = 0
to_port = 0
protocol = "-1"
cidr_blocks = ["0.0.0.0/0"]
}
}
resource "aws_security_group" "default" {
name = "airflow_sec_group"
description = "Used in the terraform"
vpc_id = "${aws_vpc.default.id}"
# SSH access from anywhere
ingress {
from_port = 22
to_port = 22
protocol = "tcp"
cidr_blocks = ["0.0.0.0/0"]
}
# HTTP access from the VPC
ingress {
from_port = 80
to_port = 80
protocol = "tcp"
cidr_blocks = ["10.0.0.0/16"]
}
# outbound internet access
egress {
from_port = 0
to_port = 0
protocol = "-1"
cidr_blocks = ["0.0.0.0/0"]
}
}
resource "aws_security_group" "rds" {
name = "airflow_db_sec_group"
description = "Used in the terraform"
vpc_id = "${aws_vpc.default.id}"
# MySQL access from the VPC (yes, postgres has a bug sending version numbers, womp)
ingress {
from_port = 3306
to_port = 3306
protocol = "tcp"
cidr_blocks = ["10.0.0.0/16"]
}
# outbound internet access
egress {
from_port = 0
to_port = 0
protocol = "-1"
cidr_blocks = ["0.0.0.0/0"]
}
}
resource "aws_elb" "web" {
name = "airflow-elb"
subnets = ["${aws_subnet.default.id}"]
security_groups = ["${aws_security_group.elb.id}"]
instances = ["${aws_instance.web.id}"]
listener {
instance_port = 80
instance_protocol = "http"
lb_port = 80
lb_protocol = "http"
}
}
resource "aws_key_pair" "auth" {
key_name = "${var.key_name}"
public_key = "${file(var.public_key_path)}"
}
resource "aws_subnet" "db_1" {
vpc_id = "${aws_vpc.default.id}"
cidr_block = "10.0.2.0/24"
availability_zone = "us-west-2b"
}
resource "aws_subnet" "db_2" {
vpc_id = "${aws_vpc.default.id}"
cidr_block = "10.0.3.0/24"
availability_zone = "us-west-2a"
}
resource "aws_db_subnet_group" "default" {
name = "airflow_db_subnet_group"
description = "Our main group of subnets"
subnet_ids = ["${aws_subnet.db_1.id}", "${aws_subnet.db_2.id}"]
}
resource "aws_db_instance" "default" {
depends_on = ["aws_security_group.rds"]
identifier = "airflow"
allocated_storage = "10"
engine = "mysql"
engine_version = "5.6.35"
instance_class = "db.t2.micro"
name = "airflow"
username = "airflow"
password = "${var.password}"
vpc_security_group_ids = ["${aws_security_group.rds.id}"]
db_subnet_group_name = "${aws_db_subnet_group.default.id}"
skip_final_snapshot = "true"
}
resource "aws_instance" "web" {
depends_on = ["aws_db_instance.default"]
# The connection block tells our provisioner how to
# communicate with the resource (instance)
connection {
# The default username for our AMI
user = "ubuntu"
agent = "false"
private_key = "${file(var.private_key_path)}"
# The connection will use the local SSH agent for authentication.
}
instance_type = "t2.micro"
# Lookup the correct AMI based on the region
# we specified
ami = "ami-6e1a0117"
# The name of our SSH keypair we created above.
key_name = "${aws_key_pair.auth.id}"
# Our Security group to allow HTTP and SSH access
vpc_security_group_ids = ["${aws_security_group.default.id}"]
# We're going to launch into the same subnet as our ELB. In a production
# environment it's more common to have a separate private subnet for
# backend instances.
subnet_id = "${aws_subnet.default.id}"
provisioner "file" {
source = "airflow.cfg"
destination = "/tmp/airflow.cfg"
}
# We run a remote provisioner on the instance after creating it.
# In this case, we just install nginx and start it. By default,
# this should be on port 80
provisioner "remote-exec" {
inline = [
"sudo apt-get -y update",
"sudo apt-get install -yqq --no-install-recommends python python3-dev libkrb5-dev libsasl2-dev libssl-dev libffi-dev build-essential libblas-dev liblapack-dev libpq-dev git python3-pip python3-requests apt-utils curl netcat locales libmysqlclient-dev mysql-client",
"sudo sed -i 's/^# en_US.UTF-8 UTF-8$/en_US.UTF-8 UTF-8/g' /etc/locale.gen",
"sudo locale-gen",
"sudo update-locale LANG=en_US.UTF-8 LC_ALL=en_US.UTF-8",
"sudo wget https://bootstrap.pypa.io/get-pip.py -O /tmp/get-pip.py",
"sudo python3 /tmp/get-pip.py",
"sudo python3 -m pip install -U pip setuptools wheel",
"sudo pip install Cython",
"sudo pip install pytz",
"sudo pip install pyOpenSSL",
"sudo pip install ndg-httpsclient",
"sudo pip install pyasn1",
"sudo pip install eventlet",
"sudo pip install apache-airflow[crypto,mysql,jdbc]",
"sudo sed -i -e 's/MYSQLPASSWORDHERE/${var.password}/g' /tmp/airflow.cfg",
"sudo sed -i -e 's/MYSQLHOSTHERE/${aws_db_instance.default.address}/g' /tmp/airflow.cfg",
# "sudo sed -i -e 's/ELBVALUEHERE/${aws_elb.web.dns_name}/g /tmp/airflow.cfg",
"sudo mkdir /root/airflow",
"sudo cp /tmp/airflow.cfg /root/airflow",
"sudo airflow initdb",
"sudo nohup airflow webserver &"
]
}
}