Skip to content

Commit

Permalink
Add bucket versioning (#1522)
Browse files Browse the repository at this point in the history
### Feature or Bugfix
<!-- please choose -->
- Enhancement


### Detail
- Add bucket versioning to any bucket that does not have
    - Pipeline Artifacts Bucket
    - CloudFront FE Bucket
    - Cloudfront User Guide Bucket

### Relates
N/A

### Security
Please answer the questions below briefly where applicable, or write
`N/A`. Based on
[OWASP 10](https://owasp.org/Top10/en/).

- Does this PR introduce or modify any input fields or queries - this
includes
fetching data from storage outside the application (e.g. a database, an
S3 bucket)?
  - Is the input sanitized?
- What precautions are you taking before deserializing the data you
consume?
  - Is injection prevented by parametrizing queries?
  - Have you ensured no `eval` or similar functions are used?
- Does this PR introduce any functionality or component that requires
authorization?
- How have you ensured it respects the existing AuthN/AuthZ mechanisms?
  - Are you logging failed auth attempts?
- Are you using or adding any cryptographic features?
  - Do you use a standard proven implementations?
  - Are the used keys controlled by the customer? Where are they stored?
- Are you introducing any new policies/roles/users?
  - Have you used the least-privilege principle? How?


By submitting this pull request, I confirm that my contribution is made
under the terms of the Apache 2.0 license.
  • Loading branch information
noah-paige committed Sep 10, 2024
1 parent cf3b457 commit 9b8197c
Show file tree
Hide file tree
Showing 4 changed files with 58 additions and 24 deletions.
28 changes: 6 additions & 22 deletions .checkov.baseline
Original file line number Diff line number Diff line change
Expand Up @@ -365,8 +365,7 @@
{
"resource": "AWS::S3::Bucket.dataalldevfrontend64065639",
"check_ids": [
"CKV_AWS_18",
"CKV_AWS_21"
"CKV_AWS_18"
]
},
{
Expand All @@ -378,8 +377,7 @@
{
"resource": "AWS::S3::Bucket.dataalldevuserguide5964DC13",
"check_ids": [
"CKV_AWS_18",
"CKV_AWS_21"
"CKV_AWS_18"
]
}
]
Expand Down Expand Up @@ -444,12 +442,6 @@
"CKV_AWS_111"
]
},
{
"resource": "AWS::IAM::Policy.dataallmaincdkpipelinePipelineRoleDefaultPolicy98FFDB2A",
"check_ids": [
"CKV_AWS_111"
]
},
{
"resource": "AWS::Lambda::Function.CustomS3AutoDeleteObjectsCustomResourceProviderHandler9D90184F",
"check_ids": [
Expand All @@ -459,10 +451,9 @@
]
},
{
"resource": "AWS::S3::Bucket.dataallmaincdkpipelinePipelineArtifactsBucketF1C6C602",
"resource": "AWS::S3::Bucket.pipelineartifactsbucketE44F7DE9",
"check_ids": [
"CKV_AWS_18",
"CKV_AWS_21"
"CKV_AWS_18"
]
},
{
Expand Down Expand Up @@ -633,16 +624,9 @@
]
},
{
"resource": "AWS::KMS::Key.thistableArtifactsBucketEncryptionKey127159D3",
"check_ids": [
"CKV_AWS_7"
]
},
{
"resource": "AWS::S3::Bucket.thistableArtifactsBucket145BFFDF",
"resource": "AWS::S3::Bucket.thistableartifactsbucketDB1C8C64",
"check_ids": [
"CKV_AWS_18",
"CKV_AWS_21"
"CKV_AWS_18"
]
}
]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from aws_cdk import aws_codepipeline_actions as codepipeline_actions
from aws_cdk import aws_iam as iam
from aws_cdk import aws_kms as kms
from aws_cdk import aws_s3 as s3
from aws_cdk.aws_s3_assets import Asset
from botocore.exceptions import ClientError

Expand Down Expand Up @@ -71,6 +72,27 @@ def get_env_team(self, pipeline: DataPipeline) -> EnvironmentGroup:
env = EnvironmentService.get_environment_group(session, pipeline.SamlGroupName, pipeline.environmentUri)
return env

def create_pipeline_artifacts_bucket(self, artifact_bucket_base_name: str):
artifact_bucket_key = kms.Key(
self,
f'{artifact_bucket_base_name}-key',
removal_policy=RemovalPolicy.DESTROY,
alias=f'{artifact_bucket_base_name}-key',
enable_key_rotation=True,
)
artifact_bucket = s3.Bucket(
self,
f'{artifact_bucket_base_name}-bucket',
bucket_name=f'{artifact_bucket_base_name}-bucket',
block_public_access=s3.BlockPublicAccess.BLOCK_ALL,
removal_policy=RemovalPolicy.DESTROY,
versioned=True,
encryption_key=artifact_bucket_key,
enforce_ssl=True,
)

return artifact_bucket

def __init__(self, scope, id, target_uri: str = None, **kwargs):
kwargs.setdefault('tags', {}).update({'utility': 'dataall-data-pipeline'})
super().__init__(
Expand Down Expand Up @@ -232,6 +254,9 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs):
id=pipeline.name,
pipeline_name=pipeline.name,
restart_execution_on_update=True,
artifact_bucket=self.create_pipeline_artifacts_bucket(
artifact_bucket_base_name=f'{pipeline.name}-artifacts'
),
)
self.codepipeline_pipeline = codepipeline_pipeline
self.source_artifact = codepipeline.Artifact()
Expand Down Expand Up @@ -301,6 +326,9 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs):
id=f'{pipeline.name}-{env.stage}',
pipeline_name=f'{pipeline.name}-{env.stage}',
restart_execution_on_update=True,
artifact_bucket=self.create_pipeline_artifacts_bucket(
artifact_bucket_base_name=f'{pipeline.name}-artifacts-{env.stage}'
),
)
self.codepipeline_pipeline = codepipeline_pipeline
self.source_artifact = codepipeline.Artifact()
Expand Down
2 changes: 2 additions & 0 deletions deploy/stacks/cloudfront.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,7 @@ def __init__(
removal_policy=RemovalPolicy.DESTROY,
block_public_access=s3.BlockPublicAccess.BLOCK_ALL,
enforce_ssl=True,
versioned=True,
object_ownership=s3.ObjectOwnership.OBJECT_WRITER,
)

Expand Down Expand Up @@ -387,6 +388,7 @@ def build_static_site(
removal_policy=RemovalPolicy.DESTROY,
block_public_access=s3.BlockPublicAccess.BLOCK_ALL,
enforce_ssl=True,
versioned=True,
object_ownership=s3.ObjectOwnership.OBJECT_WRITER,
)

Expand Down
24 changes: 22 additions & 2 deletions deploy/stacks/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from aws_cdk import aws_codecommit as codecommit
from aws_cdk import aws_ec2 as ec2
from aws_cdk import aws_iam as iam
from aws_cdk import aws_kms as kms
from aws_cdk import aws_s3 as s3
from aws_cdk import pipelines
from aws_cdk.aws_codebuild import BuildEnvironmentVariable, BuildEnvironmentVariableType
Expand Down Expand Up @@ -118,6 +119,26 @@ def __init__(
)
self.pipeline_bucket.grant_read_write(iam.AccountPrincipal(self.account))

self.artifact_bucket_name = f'{self.resource_prefix}-{self.git_branch}-artifacts-{self.account}-{self.region}'
self.artifact_bucket_key = kms.Key(
self,
f'{self.artifact_bucket_name}-key',
removal_policy=RemovalPolicy.DESTROY,
alias=f'{self.artifact_bucket_name}-key',
enable_key_rotation=True,
)
self.artifact_bucket = s3.Bucket(
self,
'pipeline-artifacts-bucket',
bucket_name=f'{self.resource_prefix}-{self.git_branch}-artifacts-{self.account}-{self.region}',
block_public_access=s3.BlockPublicAccess.BLOCK_ALL,
removal_policy=RemovalPolicy.DESTROY,
versioned=True,
encryption_key=self.artifact_bucket_key,
enforce_ssl=True,
auto_delete_objects=True,
)

if self.source == 'codestar_connection':
source = CodePipelineSource.connection(
repo_string=repo_string, branch=self.git_branch, connection_arn=repo_connection_arn
Expand All @@ -134,6 +155,7 @@ def __init__(
f'{self.resource_prefix}-{self.git_branch}-cdkpipeline',
pipeline_name=f'{self.resource_prefix}-pipeline-{self.git_branch}',
publish_assets_in_parallel=False,
artifact_bucket=self.artifact_bucket,
synth=pipelines.CodeBuildStep(
'Synth',
input=source,
Expand All @@ -151,8 +173,6 @@ def __init__(
role=self.baseline_codebuild_role.without_policy_updates(),
vpc=self.vpc,
),
cross_account_keys=True,
enable_key_rotation=True,
code_build_defaults=pipelines.CodeBuildOptions(
build_environment=codebuild.BuildEnvironment(
environment_variables={
Expand Down

0 comments on commit 9b8197c

Please sign in to comment.