Skip to content

Commit

Permalink
Merge pull request #28 from schubergphilis/tk/logging
Browse files Browse the repository at this point in the history
feat: new resource `aws_redshift_logging`
  • Loading branch information
thulasirajkomminar authored May 28, 2024
2 parents fe4412a + 4824430 commit 94bdb28
Show file tree
Hide file tree
Showing 6 changed files with 80 additions and 51 deletions.
7 changes: 4 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
# terraform-aws-mcaf-redshift
Terraform module to setup and manage an AWS Redshift cluster

<!--- BEGIN_TF_DOCS --->
Terraform module to setup and manage an AWS Redshift cluster.

<!-- BEGIN_TF_DOCS -->
## Requirements

| Name | Version |
Expand Down Expand Up @@ -61,4 +62,4 @@ Terraform module to setup and manage an AWS Redshift cluster
| security\_group\_id | The ID of the security group associated with the cluster |
| username | Username for the master DB user |

<!--- END_TF_DOCS --->
<!-- END_TF_DOCS -->
8 changes: 6 additions & 2 deletions examples/README.md → examples/basic/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,17 @@
<!--- BEGIN_TF_DOCS --->
## Requirements

No requirements.
| Name | Version |
|------|---------|
| terraform | >= 1.3 |
| aws | >= 4.62.0 |
| random | >= 3.4.0 |

## Providers

| Name | Version |
|------|---------|
| random | n/a |
| random | >= 3.4.0 |

## Inputs

Expand Down
10 changes: 7 additions & 3 deletions examples/example.tf → examples/basic/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,17 @@ resource "random_string" "redshift_password" {
}

module "redshift" {
source = "../"
source = "../.."
name = "example-1"
cidr_blocks = ["0.0.0.0/0"]
ingress_cidr_blocks = ["0.0.0.0/0"]
database = "example_db"
password = random_string.redshift_password.result
publicly_accessible = true
logging_bucket = "example-redshift-logging-bucket"
username = "root"
tags = { Environment = "test", Stack = "Example" }

logging = {
bucket_name = "example-redshift-logging-bucket"
log_destination_type = "s3"
}
}
13 changes: 13 additions & 0 deletions examples/basic/versions.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
terraform {
required_providers {
aws = {
source = "hashicorp/aws"
version = ">= 4.62.0"
}
random = {
source = "hashicorp/random"
version = ">= 3.4.0"
}
}
required_version = ">= 1.3"
}
60 changes: 36 additions & 24 deletions main.tf
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
locals {
elastic_ip = var.publicly_accessible ? aws_eip.default[0].public_ip : null
subnet_group_name = var.subnet_ids == null ? "default" : (var.redshift_subnet_group != null ? var.redshift_subnet_group : var.name)
create_logging_bucket = try(var.logging.create_bucket, false) && try(var.logging.log_destination_type, "") == "s3" ? 1 : 0
elastic_ip = var.publicly_accessible ? aws_eip.default[0].public_ip : null
subnet_group_name = var.subnet_ids == null ? "default" : (var.redshift_subnet_group != null ? var.redshift_subnet_group : var.name)
}

resource "aws_eip" "default" {
#checkov:skip=CKV2_AWS_19:The EIP is created conditionally based on the publicly_accessible variable and attached to the cluster
count = var.publicly_accessible ? 1 : 0
domain = "vpc"
tags = merge(var.tags, { "Name" = "redshift-${var.name}" })
Expand Down Expand Up @@ -90,25 +92,29 @@ resource "aws_redshift_parameter_group" "default" {
}

module "logging_bucket" {
count = var.logging ? 1 : 0
count = local.create_logging_bucket

source = "github.com/schubergphilis/terraform-aws-mcaf-s3?ref=v0.10.0"
name = var.logging_bucket
policy = data.aws_iam_policy_document.logging.json
name = var.logging.bucket_name
force_destroy = var.force_destroy
policy = data.aws_iam_policy_document.logging[0].json
versioning = true
lifecycle_rule = var.logging.bucket_lifecycle_rule
tags = var.tags
lifecycle_rule = var.lifecycle_rule
}

data "aws_iam_policy_document" "logging" {
count = local.create_logging_bucket

statement {
sid = "Put bucket policy needed for Redshift audit logging"
actions = [
"s3:PutObject",
"s3:GetBucketAcl",
]
resources = [
"arn:aws:s3:::${var.logging_bucket}",
"arn:aws:s3:::${var.logging_bucket}/*",
"arn:aws:s3:::${var.logging.bucket_name}",
"arn:aws:s3:::${var.logging.bucket_name}/*",
]
principals {
type = "Service"
Expand All @@ -118,6 +124,7 @@ data "aws_iam_policy_document" "logging" {
}

resource "aws_redshift_cluster" "default" {
#checkov:skip=CKV_AWS_71: Logging is enabled using the aws_redshift_logging resource
cluster_identifier = var.name
database_name = var.database
master_username = var.username
Expand All @@ -129,20 +136,25 @@ resource "aws_redshift_cluster" "default" {
cluster_type = var.cluster_type
elastic_ip = local.elastic_ip
encrypted = true
enhanced_vpc_routing = var.enhanced_vpc_routing
final_snapshot_identifier = var.final_snapshot_identifier
iam_roles = var.iam_roles
kms_key_id = var.kms_key_arn
node_type = var.node_type
number_of_nodes = var.number_of_nodes
publicly_accessible = var.publicly_accessible
skip_final_snapshot = var.skip_final_snapshot
vpc_security_group_ids = [aws_security_group.default.id]
tags = var.tags

logging {
enable = var.logging
bucket_name = module.logging_bucket[0].name
s3_key_prefix = "redshift-audit-logs/"
}
#checkov:skip=CKV_AWS_321:User defined
enhanced_vpc_routing = var.enhanced_vpc_routing
final_snapshot_identifier = var.final_snapshot_identifier
iam_roles = var.iam_roles
kms_key_id = var.kms_key_arn
node_type = var.node_type
number_of_nodes = var.number_of_nodes
publicly_accessible = var.publicly_accessible
skip_final_snapshot = var.skip_final_snapshot
vpc_security_group_ids = [aws_security_group.default.id]
tags = var.tags
}

resource "aws_redshift_logging" "default" {
count = var.logging != null ? 1 : 0

cluster_identifier = aws_redshift_cluster.default.id
bucket_name = var.logging.create_bucket ? module.logging_bucket[0].name : var.logging.bucket_name
log_destination_type = var.logging.log_destination_type
log_exports = var.logging.log_exports
s3_key_prefix = var.logging.log_destination_type == "s3" ? var.logging.bucket_prefix : null
}
33 changes: 14 additions & 19 deletions variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,6 @@ variable "automated_snapshot_retention_period" {
description = "The number of days automated snapshots should be retained"
}

variable "availability_zones" {
type = list(string)
default = []
description = "List of availability zones to deploy Redshift in"
}

variable "cluster_type" {
type = string
default = "single-node"
Expand Down Expand Up @@ -85,21 +79,22 @@ variable "kms_key_arn" {
description = "The ARN for the KMS encryption key to encrypt the Redshift cluster"
}

variable "lifecycle_rule" {
type = any
default = []
description = "List of maps containing lifecycle management configuration settings"
}

variable "logging" {
type = bool
default = true
description = "Enables logging information such as queries and connection attempts"
}
type = object({
bucket_lifecycle_rule = optional(any, [])
bucket_name = optional(string, null)
bucket_prefix = optional(string, "redshift-audit-logs/")
create_bucket = optional(bool, true)
log_destination_type = string
log_exports = optional(list(string), ["connectionlog", "useractivitylog", "userlog"])
})
default = null
description = "Logging configuration"

variable "logging_bucket" {
type = string
description = "Name of the S3 bucket to write logging information to"
validation {
condition = var.logging == null ? true : contains(["s3", "cloudwatch"], var.logging.log_destination_type)
error_message = "Valid values are \"s3\" or \"cloudwatch\"."
}
}

variable "name" {
Expand Down

0 comments on commit 94bdb28

Please sign in to comment.