Commit e55f8c73 authored by Cameron McFarland's avatar Cameron McFarland

Adding launch configs and auto-scaling groups.

parent 113d2f49
......@@ -309,12 +309,356 @@ resource "aws_kinesis_stream" "snowplow_s3loader_bad" {
}
// EC2 Launch Configs
//resource "aws_launch_configuration" "snowplow_collector_launch_config" {
//}
data "aws_ami" "amazonlinux2" {
most_recent = true
filter {
name = "name"
values = ["amzn2-ami-hvm-*-x86_64-gp2"]
}
filter {
name = "virtualization-type"
values = ["hvm"]
}
owners = ["137112412989"] # Amazon Images
}
// Security Groups
resource "aws_launch_configuration" "snowplow_collector_launch_config" {
image_id = "${data.aws_ami.amazonlinux2.id}"
instance_type = "t2.micro"
associate_public_ip_address = "true"
enable_monitoring = "false"
iam_instance_profile = "snowplow-collector-role"
security_groups = [
"sg-0270f6c10c30d8bf3",
]
user_data = <<EOUD
#!/bin/bash
## This has, so far, been written to run on Amazon Linux 2 AMI.
## Install Java 1.8
yum -y install java-1.8.0-openjdk
## Set up user, group, and install location
groupadd snowplow
adduser --system --gid snowplow snowplow
mkdir -p /snowplow
mkdir -p /snowplow/config
## Install SnowPlow Kinesis Collector
mkdir -p /tmp/build
cd /tmp/build
wget -q http://dl.bintray.com/snowplow/snowplow-generic/snowplow_scala_stream_collector_kinesis_0.15.0.zip
unzip -d /snowplow/bin snowplow_scala_stream_collector_kinesis_0.15.0.zip
cd /tmp
rm -rf /tmp/build
## Need to copy in a config
cat > /snowplow/config/collector.hocon <<EOF
collector {
interface = "0.0.0.0"
port = "8000"
production = true
p3p {
policyRef = "/w3c/p3p.xml"
CP = "NOI DSP COR NID PSA OUR IND COM NAV STA"
}
crossDomain {
enabled = true
domains = [ "*" ]
secure = true
}
cookie {
enabled = false
expiration = "365 days"
name = "snwplw"
domain = "gitlab.sinter-collect.com"
}
doNotTrackCookie {
enabled = false
name = "COLLECTOR_DO_NOT_TRACK_COOKIE_NAME"
value = "COLLECTOR_DO_NOT_TRACK_COOKIE_VALUE"
}
cookieBounce {
enabled = false
name = "n3pc"
fallbackNetworkUserId = "00000000-0000-4000-A000-000000000000"
forwardedProtocolHeader = "X-Forwarded-Proto"
}
redirectMacro {
enabled = false
}
cors {
accessControlMaxAge = 5 seconds
}
rootResponse {
enabled = false
statusCode = 302
}
prometheusMetrics {
enabled = false
}
streams {
good = "snowplow-raw-good"
bad = "snowplow-raw-bad"
useIpAddressAsPartitionKey = true
sink {
enabled = kinesis
region = "us-east-1"
threadPoolSize = 10
aws {
accessKey = iam
secretKey = iam
}
backoffPolicy {
minBackoff = 10
maxBackoff = 300000
}
}
buffer {
byteLimit = 16384
recordLimit = 1000
timeLimit = 10000
}
}
}
akka {
loglevel = OFF
loggers = ["akka.event.slf4j.Slf4jLogger"]
http.server {
remote-address-header = on
raw-request-uri-header = on
parsing {
max-uri-length = 32768
uri-parsing-mode = relaxed
}
}
}
EOF
chown -R snowplow:snowplow /snowplow
## Star the collector service
su snowplow -g snowplow -c '/usr/bin/java -jar /snowplow/bin/snowplow-stream-collector-kinesis-0.15.0.jar --config /snowplow/config/collector.hocon'
EOUD
lifecycle {
create_before_destroy = true
}
}
resource "aws_launch_configuration" "snowplow_enricher_launch_config" {
image_id = "${data.aws_ami.amazonlinux2.id}"
instance_type = "t2.micro"
associate_public_ip_address = "true"
enable_monitoring = "false"
iam_instance_profile = "snowplow-enricher-role"
security_groups = [
"sg-0270f6c10c30d8bf3",
]
user_data = <<EOUD
#!/bin/bash
## This has, so far, been written to run on Amazon Linux 2 AMI.
## Install Java 1.8
yum -y install java-1.8.0-openjdk
## Set up user, group, and install location
groupadd snowplow
adduser --system --gid snowplow snowplow
mkdir -p /snowplow
mkdir -p /snowplow/config
## Install SnowPlow Kinesis Collector
mkdir -p /tmp/build
cd /tmp/build
wget -q http://dl.bintray.com/snowplow/snowplow-generic/snowplow_stream_enrich_kinesis_0.21.0.zip
unzip -d /snowplow/bin snowplow_stream_enrich_kinesis_0.21.0.zip
cd /tmp
rm -rf /tmp/build
## We need an IGLU Resolver config
cat > /snowplow/config/iglu_resolver.json <<EOJ
{
"schema": "iglu:com.snowplowanalytics.iglu/resolver-config/jsonschema/1-0-1",
"data": {
"cacheSize": 500,
"repositories": [
{
"name": "Iglu Central",
"priority": 0,
"vendorPrefixes": [ "com.snowplowanalytics" ],
"connection": {
"http": {
"uri": "http://iglucentral.com"
}
}
},
{
"name": "Iglu Central - GCP Mirror",
"priority": 1,
"vendorPrefixes": [ "com.snowplowanalytics" ],
"connection": {
"http": {
"uri": "http://mirror01.iglucentral.com"
}
}
}
]
}
}
EOJ
## Need to copy in a config
cat > /snowplow/config/enricher.hocon <<EOF
enrich {
production = true
streams {
in {
raw = "snowplow-raw-good"
}
out {
enriched = "snowplow-enriched-good"
bad = "snowplow-enriched-bad"
partitionKey = "user_ipaddress"
}
sourceSink {
enabled = kinesis
aws {
accessKey = iam
secretKey = iam
}
region = "us-east-1"
maxRecords = 10000
initialPosition = TRIM_HORIZON
backoffPolicy {
minBackoff = 10
maxBackoff = 300000
}
}
buffer {
byteLimit = 16384
recordLimit = 1000
timeLimit = 10000
}
appName = "SnowplowEnrich-gitlab-us-east-1"
}
}
EOF
chown -R snowplow:snowplow /snowplow
## Star the collector service
su snowplow -g snowplow -c '/usr/bin/java -jar /snowplow/bin/snowplow-stream-enrich-kinesis-0.21.0.jar --config /snowplow/config/enricher.hocon --resolver file:/snowplow/config/iglu_resolver.json'
EOUD
lifecycle {
create_before_destroy = true
}
}
resource "aws_launch_configuration" "snowplow_s3loader_launch_config" {
image_id = "${data.aws_ami.amazonlinux2.id}"
instance_type = "t2.micro"
associate_public_ip_address = "true"
enable_monitoring = "false"
iam_instance_profile = "snowplow-s3loader-role"
security_groups = [
"sg-0270f6c10c30d8bf3",
]
user_data = <<EOUD
#!/bin/bash
## This has, so far, been written to run on Amazon Linux 2 AMI.
## Install Java 1.8
yum -y install java-1.8.0-openjdk lzop lzo-devel
## Set up user, group, and install location
groupadd snowplow
adduser --system --gid snowplow snowplow
mkdir -p /snowplow
mkdir -p /snowplow/config
## Install SnowPlow Kinesis Collector
mkdir -p /tmp/build
cd /tmp/build
wget -q http://dl.bintray.com/snowplow/snowplow-generic/snowplow_s3_loader_0.6.0.zip
unzip -d /snowplow/bin snowplow_s3_loader_0.6.0.zip
cd /tmp
rm -rf /tmp/build
## Need to copy in a config
cat > /snowplow/config/s3loader.hocon <<EOF
source = "kinesis"
sink = "kinesis"
aws {
accessKey = "iam"
secretKey = "iam"
}
nsq {
channelName = "noidea"
host = "snowplownsq.gitlab.com"
port = 8085
lookupPort = 8090
}
kinesis {
initialPosition = "LATEST"
maxRecords = 5
region = "us-east-1"
appName = "SnowplowS3Loader-gitlab-us-east-1"
}
streams {
inStreamName = "snowplow-enriched-good"
outStreamName = "snowplow-s3loader-bad"
buffer {
byteLimit = 16384
recordLimit = 1000
timeLimit = 10000
}
}
s3 {
region = "us-east-1"
bucket = "gitlab-com-snowplow-test-one"
format = "gzip"
maxTimeout = 120000000
}
EOF
chown -R snowplow:snowplow /snowplow
## Star the collector service
su snowplow -g snowplow -c '/usr/bin/java -jar /snowplow/bin/snowplow-s3-loader-0.6.0.jar --config /snowplow/config/s3loader.hocon'
EOUD
lifecycle {
create_before_destroy = true
}
}
// EC2 Auto Scaling Groups
resource "aws_autoscaling_group" "snowplow_collector_autoscaling_group" {
launch_configuration = "SnowPlowCollector-v3"
launch_configuration = "${aws_launch_configuration.snowplow_collector_launch_config.id}"
max_size = "3"
min_size = "3"
......@@ -332,7 +676,7 @@ resource "aws_autoscaling_group" "snowplow_collector_autoscaling_group" {
}
resource "aws_autoscaling_group" "snowplow_enricher_autoscaling_group" {
launch_configuration = "SnowPlowEnricher-v2"
launch_configuration = "${aws_launch_configuration.snowplow_enricher_launch_config.id}"
max_size = "3"
min_size = "3"
......@@ -350,7 +694,7 @@ resource "aws_autoscaling_group" "snowplow_enricher_autoscaling_group" {
}
resource "aws_autoscaling_group" "snowplow_s3loader_autoscaling_group" {
launch_configuration = "SnowPlowS3Loader-v1"
launch_configuration = "${aws_launch_configuration.snowplow_s3loader_launch_config.id}"
max_size = "1"
min_size = "1"
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment