Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions packages/php-storage-driver-snowflake/provisioning/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
composer.lock
vendor
env_export
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
# Local .terraform directories
**/.terraform/*

# .tfstate files
*.tfstate
*.tfstate.*

# Crash log files
crash.log

# Exclude all .tfvars files, which are likely to contain sentitive data, such as
# password, private keys, and other secrets. These should not be part of version
# control as they are data points which are potentially sensitive and subject
# to change depending on the environment.
#
*.tfvars

# Ignore override files as they are usually used to override resources locally and so
# are not checked in
override.tf
override.tf.json
*_override.tf
*_override.tf.json

# Include override files you do wish to add to version control using negated pattern
#
# !example_override.tf

# Include tfplan files to ignore the plan output of command: terraform plan -out=tfplan
# example: *tfplan*

# Ignore CLI configuration files
.terraformrc
terraform.rc

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

156 changes: 156 additions & 0 deletions packages/php-storage-driver-snowflake/provisioning/local/aws_s3.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,156 @@
// File storage
resource "aws_s3_bucket" "S3FilesBucket" {
bucket = "${local.serviceName}-s3-files-storage-bucket"

tags = {
Name = "keboola-file-storage"
}

force_destroy = true
}

resource "aws_s3_bucket_cors_configuration" "S3FilesBucketCorsConfiguration" {
bucket = aws_s3_bucket.S3FilesBucket.bucket

cors_rule {
allowed_headers = ["*"]
allowed_methods = [
"GET",
"PUT",
"POST",
"DELETE"
]
allowed_origins = ["*"]
max_age_seconds = 3600
}
}

resource "aws_s3_bucket_lifecycle_configuration" "S3FilesBucketLifecycleConfig" {
bucket = aws_s3_bucket.S3FilesBucket.bucket
rule {
id = "After 30 days IA, 180 days to glacier and 270 delete"
filter {
prefix = "exp-180"
}

expiration {
days = 270
}

transition {
storage_class = "STANDARD_IA"
days = 30
}

transition {
storage_class = "GLACIER"
days = 180
}

status = "Enabled"
}

rule {
id = "Delete after 30 days"
status = "Enabled"

filter {
prefix = "exp-30"
}

expiration {
days = 30
}
}

rule {
id = "Delete after 15 days"
status = "Enabled"

filter {
prefix = "exp-15"
}

expiration {
days = 15
}
}

rule {
id = "Delete after 48 hours"
status = "Enabled"

filter {
prefix = "exp-2"
}
expiration {
days = 2
}
}

rule {
id = "Delete incomplete multipart uploads"
status = "Enabled"

abort_incomplete_multipart_upload {
days_after_initiation = 7
}
}
}

resource "aws_iam_user" "S3FileStorageUser" {
name = "${local.serviceName}-s3-file-storage-user"

path = "/"
}

data "aws_iam_policy_document" "S3AccessDocument" {
statement {
effect = "Allow"
actions = [
"s3:*"
]
resources = [
"${aws_s3_bucket.S3FilesBucket.arn}/*"
]
}

statement {
sid = "AllowListingOfUserFolder"
actions = [
"s3:ListBucket",
"s3:GetBucketLocation"
]
effect = "Allow"
resources = [
aws_s3_bucket.S3FilesBucket.arn
]
}
}

data "aws_iam_policy_document" "STSAccessDocument" {
statement {
effect = "Allow"
actions = [
"sts:GetFederationToken"
]

resources = ["*"]
}
}

resource "aws_iam_user_policy" "S3Access" {
name = "S3Access"
user = aws_iam_user.S3FileStorageUser.name
policy = data.aws_iam_policy_document.S3AccessDocument.json
}

resource "aws_iam_user_policy" "STSAccess" {
name = "STSAccess"
user = aws_iam_user.S3FileStorageUser.name
policy = data.aws_iam_policy_document.STSAccessDocument.json
}

resource "aws_iam_access_key" "S3FileStorageUserAccessKey" {
user = aws_iam_user.S3FileStorageUser.name
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
#!/usr/bin/env bash
set -Eeuo pipefail

TF_OUTPUTS_FILE=$1

cd "$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source ./functions.sh $TF_OUTPUTS_FILE

# output variables
output_var 'AWS_S3_BUCKET' $(terraform_output 'FileStorageBucket')
output_var 'AWS_S3_KEY' 'exp-2'

echo ""
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
#!/usr/bin/env bash
set -Eeuo pipefail

TF_OUTPUTS_FILE=$1

cd "$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source ./functions.sh $TF_OUTPUTS_FILE

# output variables
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
#!/usr/bin/env bash
set -Eeuo pipefail

TF_OUTPUTS_FILE=$1
SCRIPT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="${SCRIPT_PATH}/../../.."

terraform_output() {
jq ".${1}.value" -r $TF_OUTPUTS_FILE
}

terraform_output_json() {
jq ".${1}.value" -r $TF_OUTPUTS_FILE | jq -c
}

output_var() {
echo "${1}=\"${2}\""
}

output_var_no_ticks() {
echo "${1}=${2}"
}

output_var_json() {
echo "${1}='${2}'"
}

output_file() {
mkdir -p "${PROJECT_ROOT}/$(dirname "${1}")"
echo "${2}" >"${PROJECT_ROOT}/${1}"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
#!/usr/bin/env bash
set -Eeuo pipefail

TF_OUTPUTS_FILE=$1

cd "$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source ./../functions.sh $TF_OUTPUTS_FILE
20 changes: 20 additions & 0 deletions packages/php-storage-driver-snowflake/provisioning/local/main.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
terraform {
required_providers {
aws = {
source = "hashicorp/aws"
version = "~> 3.74"
}
}
}

variable "name_prefix" {
type = string
}

locals {
serviceName = "${var.name_prefix}-php-storage-driver-snowflake"
}

output "KEBOOLA_STORAGE_API__CLIENT_DB_PREFIX" {
value = var.name_prefix
}
Loading
Loading