Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ cypress/videos
cypress/screenshots

# Terraform
deploy/*terraform*
deploy/*/*terraform*
archive.zip


4 changes: 4 additions & 0 deletions deploy/forwardLogs/cloudwatch.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
resource "aws_cloudwatch_log_group" "alb_access_log_group" {
name = "${var.environment}-cloudfront-react-log"
retention_in_days = 400
}
26 changes: 26 additions & 0 deletions deploy/forwardLogs/config/lambda-forward-logs-s3-cloudwatch.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": [
"logs:CreateLogGroup",
"logs:CreateLogStream",
"logs:DescribeLogGroups",
"logs:DescribeLogStreams",
"logs:PutLogEvents",
"logs:GetLogEvents",
"logs:FilterLogEvents",
"ec2:CreateNetworkInterface",
"ec2:DescribeNetworkInterfaces",
"ec2:DeleteNetworkInterface"
],
"Resource": "*"
},
{
"Action": ["s3:GetObject"],
"Effect": "Allow",
"Resource": ["*"]
}
]
}
227 changes: 227 additions & 0 deletions deploy/forwardLogs/forward-logs-cloudwatch-lambda/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,227 @@
const aws = require('aws-sdk');
const zlib = require('zlib');
const s3 = new aws.S3({ apiVersion: '2006-03-01' });
const cloudWatchLogs = new aws.CloudWatchLogs({
apiVersion: '2014-03-28',
});

const readline = require('readline');
const stream = require('stream');

let logGroupName = process.env.logGroupName;
let logStreamName;

exports.handler = (event, context, callback) => {
logStreamName = context.logStreamName;
console.log('S3 object is:', event.Records[0].s3);
const bucket = event.Records[0].s3.bucket.name;
console.log('Name of S3 bucket is:', bucket);
const key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' '));

const params = {
Bucket: bucket,
Key: key,
};

s3.getObject(params, (err, data) => {
if (err) {
console.log(err);
const message = `Error getting object ${key} from bucket ${bucket}. Make sure they exist and your bucket is in the same region as this function.`;
console.log(message);
callback(message);
} else {
zlib.gunzip(data.Body, function (error, buffer) {
if (error) {
console.log('Error uncompressing data', error);
return;
}

const logData = buffer.toString('ascii');
manageLogGroups(logData);
});
callback(null, data.ContentType);
}
});

function manageLogGroups(logData) {
const describeLogGroupParams = {
logGroupNamePrefix: logGroupName,
};

cloudWatchLogs.describeLogGroups(describeLogGroupParams, function (err, data) {
if (err) {
console.log('Error while describing log group:', err);
createLogGroup(logData);
} else {
if (!data.logGroups[0]) {
console.log('Need to create log group:', data);
createLogGroup(logData);
} else {
console.log('Success while describing log group:', data);
manageLogStreams(logData);
}
}
});
}

function createLogGroup(logData) {
const logGroupParams = {
logGroupName: logGroupName,
};
cloudWatchLogs.createLogGroup(logGroupParams, function (err, data) {
if (err) {
console.log('error while creating log group: ', err, err.stack);
return;
} else {
console.log('Success in creating log group: ', logGroupName);
manageLogStreams(logData);
}
});
}

function manageLogStreams(logData) {
const describeLogStreamsParams = {
logGroupName: logGroupName,
logStreamNamePrefix: logStreamName,
};

cloudWatchLogs.describeLogStreams(describeLogStreamsParams, function (err, data) {
if (err) {
console.log('Error during describe log streams:', err);
createLogStream(logData);
} else {
if (!data.logStreams[0]) {
console.log('Need to create log stream:', data);
createLogStream(logData);
} else {
console.log('Log Stream already defined:', logStreamName);
putLogEvents(data.logStreams[0].uploadSequenceToken, logData);
}
}
});
}

function createLogStream(logData) {
const logStreamParams = {
logGroupName: logGroupName,
logStreamName: logStreamName,
};

cloudWatchLogs.createLogStream(logStreamParams, function (err, data) {
if (err) {
console.log('error while creating log stream: ', err, err.stack);
return;
} else {
console.log('Success in creating log stream: ', logStreamName);
putLogEvents(null, logData);
}
});
}

function putLogEvents(sequenceToken, logData) {
//From http://docs.aws.amazon.com/AmazonCloudWatchLogs/latest/APIReference/API_PutLogEvents.html
const MAX_BATCH_SIZE = 1048576; // maximum size in bytes of Log Events (with overhead) per invocation of PutLogEvents
const MAX_BATCH_COUNT = 10000; // maximum number of Log Events per invocation of PutLogEvents
const LOG_EVENT_OVERHEAD = 26; // bytes of overhead per Log Event

const batches = [];

let batch = [];

let batch_size = 0;

const bufferStream = new stream.PassThrough();
bufferStream.end(logData);

const rl = readline.createInterface({
input: bufferStream,
});

let line_count = 0;

rl.on('line', (line) => {
++line_count;
if (line[0] != '#') {
let timeValue = Date.parse(line.split('\t')[0]);

const event_size = line.length + LOG_EVENT_OVERHEAD;

batch_size += event_size;

if (batch_size >= MAX_BATCH_SIZE || batch.length >= MAX_BATCH_COUNT) {
batches.push(batch);
batch = [];
batch_size = event_size;
}

batch.push({
message: line,
timestamp: timeValue,
});
}
});

rl.on('close', () => {
if (batch.length > 0) {
batches.push(batch);
}
sendBatches(sequenceToken, batches);
});
}

function sendBatches(sequenceToken, batches) {
let count = 0;
let batch_count = 0;

function sendNextBatch(err, nextSequenceToken) {
if (err) {
console.log('Error sending batch: ', err, err.stack);
return;
} else {
const nextBatch = batches.shift();
if (nextBatch) {
++batch_count;
count += nextBatch.length;
sendBatch(nextSequenceToken, nextBatch, sendNextBatch);
} else {
const msg = `Successfully put ${count} events in ${batch_count} batches`;
console.log(msg);
callback(null, msg);
}
}
}

sendNextBatch(null, sequenceToken);
}

function sendBatch(sequenceToken, batch, doNext) {
const putLogEventParams = {
logEvents: batch,
logGroupName: logGroupName,
logStreamName: logStreamName,
};
if (sequenceToken) {
putLogEventParams['sequenceToken'] = sequenceToken;
}

putLogEventParams.logEvents.sort(function (a, b) {
if (a.timestamp > b.timestamp) {
return 1;
}
if (a.timestamp < b.timestamp) {
return -1;
}
return 0;
});

cloudWatchLogs.putLogEvents(putLogEventParams, function (err, data) {
if (err) {
console.log('Error during put log events: ', err, err.stack);
doNext(err, null);
} else {
console.log(`Success in putting ${putLogEventParams.logEvents.length} events`);
doNext(null, data.nextSequenceToken);
}
});
}
};
57 changes: 57 additions & 0 deletions deploy/forwardLogs/iam-role.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
resource "aws_iam_role" "lambda_forward_logs_s3_cloudwatch_role" {
name = "${var.environment}-lambda-forward-logs-s3-cloudwatch-role"

assume_role_policy = <<EOF
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Principal": {
"Service": "lambda.amazonaws.com"
},
"Action": "sts:AssumeRole"
}
]
}
EOF
tags = merge({
Name = "${var.environment}-iam-role"
},
)
}

resource "aws_iam_role_policy" "lambda_forward_logs_s3_cloudwatch_role_policy" {
name = "${var.environment}-lambda-forward-logs-s3-cloudwatch-policy"
role = aws_iam_role.lambda_forward_logs_s3_cloudwatch_role.id
policy = <<EOF
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": [
"logs:CreateLogGroup",
"logs:CreateLogStream",
"logs:DescribeLogGroups",
"logs:DescribeLogStreams",
"logs:PutLogEvents",
"logs:GetLogEvents",
"logs:FilterLogEvents",
"ec2:CreateNetworkInterface",
"ec2:DescribeNetworkInterfaces",
"ec2:DeleteNetworkInterface"
],
"Resource": "*"
},
{
"Action": [
"s3:GetObject"
],
"Effect": "Allow",
"Resource": ["*"]
}
]
}
EOF
}
45 changes: 45 additions & 0 deletions deploy/forwardLogs/lambda.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
data "archive_file" "archive_pipe_logs_s3_cloudwatch_lambda" {
type = "zip"
source_file = "${path.module}/forward-logs-cloudwatch-lambda/index.js"
output_path = "${path.module}/forward-logs-cloudwatch-lambda/archive.zip"
}

resource "aws_lambda_function" "forward_logs_s3_cloudwatch" {
filename = "${path.module}/forward-logs-cloudwatch-lambda/archive.zip"
function_name = "${var.environment}-forward-cloudfront-logs"
role = aws_iam_role.lambda_forward_logs_s3_cloudwatch_role.arn
handler = "index.handler"
runtime = "nodejs14.x"
timeout = 30

source_code_hash = data.archive_file.archive_pipe_logs_s3_cloudwatch_lambda.output_base64sha256

tracing_config {
mode = "Active"
}

environment {
variables = {
environment = var.environment
logGroupName = aws_cloudwatch_log_group.alb_access_log_group.name
}
}
}

resource "aws_lambda_permission" "allow_bucket_forward_logs" {
statement_id = "AllowExecutionFromS3Bucket"
action = "lambda:InvokeFunction"
function_name = aws_lambda_function.forward_logs_s3_cloudwatch.arn
principal = "s3.amazonaws.com"
source_arn = var.s3_source_arn_event_notification
source_account = var.account_id
}

resource "aws_s3_bucket_notification" "aws-lambda-trigger-alb-cloudfront" {
depends_on = [aws_lambda_permission.allow_bucket_forward_logs]
bucket = var.s3_bucket_name
lambda_function {
lambda_function_arn = aws_lambda_function.forward_logs_s3_cloudwatch.arn
events = ["s3:ObjectCreated:*"]
}
}
23 changes: 23 additions & 0 deletions deploy/forwardLogs/variables.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
variable "environment" {
type = string
description = "Environment name or equivalent for CI CD and resource naming purpose."
default = "dev"
}

variable "s3_source_arn_event_notification" {
type = string
description = "The S3 ARN that will trigger the lambda through even notification"
default = ""
}

variable "s3_bucket_name" {
type = string
description = "The S3 bucket name"
default = ""
}

variable "account_id" {
type = string
description = "Account Id"
default = ""
}
Loading