From 70b9776861a601ddf1064b689f4f0159e1ce6898 Mon Sep 17 00:00:00 2001 From: Dan Xie Date: Mon, 24 Mar 2025 16:09:12 +0800 Subject: [PATCH] New Feature: Add support for the S3 code provider of adf bootstrap and adf deployment map pipelines. --- docs/admin-guide.md | 17 ++ docs/installation-guide.md | 64 ++++ .../initial_commit/adfconfig.yml.j2 | 16 + .../adf-bootstrap/deployment/global.yml | 285 ++++++++++++++++-- .../bootstrap_repository/adf-build/config.py | 9 + .../bootstrap_repository/adf-build/main.py | 46 +++ .../adf-build/tests/stubs/stub_adfconfig.yml | 2 + .../adf-build/tests/test_config.py | 4 + .../adf-build/tests/test_main.py | 12 +- src/template.yml | 246 +++++++++++++-- 10 files changed, 649 insertions(+), 52 deletions(-) diff --git a/docs/admin-guide.md b/docs/admin-guide.md index 731eda722..186b9af49 100644 --- a/docs/admin-guide.md +++ b/docs/admin-guide.md @@ -229,6 +229,23 @@ Config has five components in `main-notification-endpoint`, `scp`, `scm`, temporarily). - target AWS Accounts by tag with no AWS Accounts having that tag assigned (yet). + - `codebase-source`, the value is a map which contains deployment map code source + configuration. if not set, it will use codecommit and handled by ADF. + - `source-type`, the source type deployment map pipeline. Can be + `codecommit` or `s3`, by default it is `codecommit`. + - `s3-bucket-name`, the S3 bucket name where the codebase is stored, + if not set, it will be created automatically with a random name. + (only used when source-type is s3) + - `s3-object-key`, the S3 key where the codebase is stored, by default, + it is set to `aws-deployment-framework-pipelines.zip` + (only used when source-type is s3). + - `s3-use-existing`, determines if the existing S3 bucket should be used. + By default, it is set to `disabled` (only used when source-type is s3). + If you want to specific configuration of your S3 bucket, you could use + the existing S3 bucket. Please make sure the existing S3 bucket have + following configuration: + - [versioning](https://docs.aws.amazon.com/AmazonS3/latest/userguide/manage-versioning-examples.html) + - [S3 notifications](https://docs.aws.amazon.com/AmazonS3/latest/userguide/enable-event-notifications-eventbridge.html) - `org` configures settings in case of staged multi-organization ADF deployments. - `stage` defines the AWS Organization stage in case of staged multi- organization ADF deployments. This is an optional setting. In enterprise- diff --git a/docs/installation-guide.md b/docs/installation-guide.md index 237ad23f8..eff21659b 100644 --- a/docs/installation-guide.md +++ b/docs/installation-guide.md @@ -546,6 +546,70 @@ Please update the configuration to use in the `adfconfig.yml` file instead, as documented in the [adfconfig section in the Admin Guide](./admin-guide.md#adfconfig). +#### Parameter SourceType + +Optional, default value: (CodeCommit) + +Example: `CodeCommit` + +**Explanation:** +An optional string of Source Type of ADF Bootstrap pipeline, Currently only +support `S3` and `CodeCommit`. + +*This is not required when performing an update between versions of ADF.* + +Please note that changing this value will directly change the source provider of +ADF Boostrap pipeline in the management account. + +#### Parameter S3SourceBucketName + +Optional, default value: (empty) + +Example: `aws-deployment-framework-bootstrap-pipeline-` + +**Explanation:** +An optional string specifying the S3 bucket name for the ADF Bootstrap source code. +This parameter is only applicable when SourceType is set to S3. If left empty while +SourceType is S3, a new bucket with a random name will be created automatically. + +*This is not required when performing an update between versions of ADF.* +*only used when SourceType is S3* + +#### Parameter S3SourceObjectKey + +Optional, default value: (aws-deployment-framework-bootstrap.zip) + +Example: `aws-deployment-framework-bootstrap.zip` + +**Explanation:** +An optional string specifying the S3 object key for the ADF Bootstrap source code. +This parameter is only applicable when SourceType is set to S3. + +*This is not required when performing an update between versions of ADF.* +*only used when SourceType is S3* + +#### Parameter S3SourceUseExistingBucket + +Optional, default value: (No) + +Example: `No` + +**Explanation:** +An optional string specifying whether to use an existing S3 bucket +for the ADF Bootstrap source code. +This parameter is only applicable when SourceType is set to S3. +If set to "Yes," the ADF Bootstrap source will be +directed to the existing S3 bucket. + +Additionally, the bucket must be located in the ADF installation account. +The S3 bucket following configured: + +- [versioning](https://docs.aws.amazon.com/AmazonS3/latest/userguide/manage-versioning-examples.html) +- [S3 notifications](https://docs.aws.amazon.com/AmazonS3/latest/userguide/enable-event-notifications-eventbridge.html) + +*This is not required when performing an update between versions of ADF.* +*only used when SourceType is S3* + #### Parameter LogLevel Optional, default value: `INFO` diff --git a/src/lambda_codebase/initial_commit/adfconfig.yml.j2 b/src/lambda_codebase/initial_commit/adfconfig.yml.j2 index 8eef343ad..e937e1255 100644 --- a/src/lambda_codebase/initial_commit/adfconfig.yml.j2 +++ b/src/lambda_codebase/initial_commit/adfconfig.yml.j2 @@ -37,6 +37,22 @@ config: allow-empty-target: disabled # ^ Needs to be set to "enabled" to activate. Defaults to "disabled" when # not set. + # Optional: + # codebase-source: + # ^ The source of the codebase configuration. if not set, it will use codecommit + # source-type: codecommit + # ^ The source of the codebase. Can be codecommit or s3, by default it is + # set to codecommit + # s3-bucket-name: aws-deployment-framework-pipelines- + # ^ The S3 bucket name where the codebase is stored, if not set, it will be created + # (only used when source-type is s3) + # s3-object-key: aws-deployment-framework-pipelines.zip + # ^ The S3 key where the codebase is stored, by default, it is set to + # aws-deployment-framework-pipelines.zip (only used when source-type is s3) + # s3-use-existing: disabled + # ^ Determines if the existing S3 bucket should be used. By default, it is set to + # "disabled". (only used when source-type is s3) + # Optional config for multi-organization deployments of ADF: # org: # # Optional: Use this variable to define the AWS Organization in case of diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/global.yml b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/global.yml index 6d6220ad1..8847d5cd1 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/global.yml +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/global.yml @@ -35,6 +35,26 @@ Parameters: Type: "AWS::SSM::Parameter::Value" Default: /adf/cross_account_access_role + SourceType: + Type: "AWS::SSM::Parameter::Value" + Description: Deployment Map Source Type + Default: /adf/deployment_maps/codebase-source/source-type + + S3SourceBucketName: + Type: "AWS::SSM::Parameter::Value" + Description: Deployment Map S3 Source Bucket Name + Default: /adf/deployment_maps/codebase-source/s3-source-details/s3-bucket-name + + S3SourceObjectKey: + Type: "AWS::SSM::Parameter::Value" + Description: Deployment Map S3 Source Bucket Object Name + Default: /adf/deployment_maps/codebase-source/s3-source-details/s3-object-key + + S3SourceUseExistingBucket: + Type: "AWS::SSM::Parameter::Value" + Description: Use an existing S3 bucket for the Deployment Map S3 Source Bucket, by default a new bucket will be created + Default: /adf/deployment_maps/codebase-source/s3-source-details/s3-use-existing + Image: Description: The Image you wish to use for CodeBuild (defaults to Ubuntu - standard:7.0). Type: String @@ -74,6 +94,22 @@ Parameters: Conditions: ADFTerraformExtensionEnabled: !Equals [!Ref ADFTerraformExtension, "True"] + UseS3Source: !Equals + - !Ref SourceType + - "s3" + UseCodeCommitSource: !Equals + - !Ref SourceType + - "codecommit" + HasS3SourceBucketName: !Not + - !Equals + - !Ref S3SourceBucketName + - "none" + NoExistingS3SourceBucket: !Equals + - !Ref S3SourceUseExistingBucket + - "disabled" + CreateNewS3SourceBucket: !And + - !Condition UseS3Source + - !Condition NoExistingS3SourceBucket Globals: Function: @@ -197,6 +233,68 @@ Resources: IgnorePublicAcls: true RestrictPublicBuckets: true + SourceCodeBucketPolicy: + Type: AWS::S3::BucketPolicy + Condition: CreateNewS3SourceBucket + Properties: + Bucket: !Ref "SourceCodeBucket" + PolicyDocument: + Statement: + - Sid: "DenyInsecureConnections" + Action: + - "s3:*" + Effect: Deny + Condition: + Bool: + aws:SecureTransport: "false" + Resource: + - !Sub arn:${AWS::Partition}:s3:::${SourceCodeBucket} + - !Sub arn:${AWS::Partition}:s3:::${SourceCodeBucket}/* + Principal: + AWS: "*" + - Sid: "DenyInsecureTLS" + Action: + - "s3:*" + Effect: Deny + Condition: + NumericLessThan: + "s3:TlsVersion": "1.2" + Resource: + - !Sub arn:${AWS::Partition}:s3:::${SourceCodeBucket} + - !Sub arn:${AWS::Partition}:s3:::${SourceCodeBucket}/* + Principal: + AWS: "*" + + SourceCodeBucket: + Type: "AWS::S3::Bucket" + Condition: CreateNewS3SourceBucket + DependsOn: BootstrapUpdateDeploymentRole + DeletionPolicy: Retain + UpdateReplacePolicy: Retain + Properties: + AccessControl: BucketOwnerFullControl + BucketName: !If + - HasS3SourceBucketName + - !Ref S3SourceBucketName + - !Ref "AWS::NoValue" + OwnershipControls: + Rules: + - ObjectOwnership: BucketOwnerEnforced + BucketEncryption: + ServerSideEncryptionConfiguration: + - ServerSideEncryptionByDefault: + SSEAlgorithm: AES256 + VersioningConfiguration: + Status: Enabled + NotificationConfiguration: + EventBridgeConfiguration: + EventBridgeEnabled: true + PublicAccessBlockConfiguration: + BlockPublicAcls: true + BlockPublicPolicy: true + IgnorePublicAcls: true + RestrictPublicBuckets: true + PipelineManagementApplication: Type: AWS::Serverless::Application DeletionPolicy: Delete @@ -879,16 +977,36 @@ Resources: Resource: - !Sub arn:${AWS::Partition}:s3:::${PipelineBucket} - !Sub arn:${AWS::Partition}:s3:::${PipelineBucket}/* - - Effect: Allow - Sid: "CodeCommit" - Action: - - codecommit:GetBranch - - codecommit:GetCommit - - codecommit:UploadArchive - - codecommit:GetUploadArchiveStatus - - codecommit:CancelUploadArchive - Resource: - - !GetAtt CodeCommitRepository.Arn + - !If + - UseCodeCommitSource + - Effect: Allow + Sid: "CodeCommit" + Action: + - codecommit:GetBranch + - codecommit:GetCommit + - codecommit:UploadArchive + - codecommit:GetUploadArchiveStatus + - codecommit:CancelUploadArchive + Resource: + - !GetAtt CodeCommitRepository.Arn + - Effect: Allow + Sid: "S3Source" + Action: + - "s3:GetObject" + - "s3:GetObjectVersion" + - "s3:GetBucketVersioning" + - "s3:GetBucketPolicy" + - "s3:ListBucket" + - "s3:ListBucketVersions" + Resource: + - !If + - HasS3SourceBucketName + - !Sub "arn:${AWS::Partition}:s3:::${S3SourceBucketName}" + - !Sub "arn:${AWS::Partition}:s3:::adf-global-base-deployment-*" + - !If + - HasS3SourceBucketName + - !Sub "arn:${AWS::Partition}:s3:::${S3SourceBucketName}/*" + - !Ref "AWS::NoValue" - Effect: Allow Action: - kms:Decrypt @@ -913,21 +1031,41 @@ Resources: RestartExecutionOnUpdate: true Name: "aws-deployment-framework-pipelines" Stages: - - Name: CodeCommit + - Name: Source Actions: - - Name: Source - ActionTypeId: - Category: Source - Owner: AWS - Version: "1" - Provider: CodeCommit - OutputArtifacts: - - Name: "Source" - Configuration: - BranchName: !GetAtt DetermineDefaultBranchName.DefaultBranchName - RepositoryName: !GetAtt CodeCommitRepository.Name - PollForSourceChanges: false - RunOrder: 1 + # CodeCommit source action (used when SourceType is CodeCommit) + - !If + - UseCodeCommitSource + - Name: Source + ActionTypeId: + Category: Source + Owner: AWS + Version: "1" + Provider: CodeCommit + OutputArtifacts: + - Name: "Source" + Configuration: + BranchName: !GetAtt DetermineDefaultBranchName.DefaultBranchName + RepositoryName: !GetAtt CodeCommitRepository.Name + PollForSourceChanges: false + RunOrder: 1 + # S3 source action (used when SourceType is S3) + - Name: Source + ActionTypeId: + Category: Source + Owner: AWS + Version: "1" + Provider: S3 + OutputArtifacts: + - Name: "Source" + Configuration: + S3Bucket: !If + - CreateNewS3SourceBucket + - !Ref SourceCodeBucket + - !Ref S3SourceBucketName + S3ObjectKey: !Ref S3SourceObjectKey + PollForSourceChanges: false + RunOrder: 1 - Name: KickoffCreateOrUpdatePipelines Actions: - Name: CreateOrUpdate @@ -1704,6 +1842,7 @@ Resources: PipelineCloudWatchEventRule: Type: AWS::Events::Rule + Condition: UseCodeCommitSource Properties: EventPattern: source: @@ -1725,6 +1864,30 @@ Resources: RoleArn: !GetAtt PipelineCloudWatchEventRole.Arn Id: adf-codepipeline-trigger-pipeline + S3PipelineCloudWatchEventRule: + Type: AWS::Events::Rule + Condition: UseS3Source + Properties: + EventPattern: + source: + - aws.s3 + detail-type: + - "Object Created" + - "Object Copy" + detail: + bucket: + name: !If + - CreateNewS3SourceBucket + - [!Ref SourceCodeBucket] + - [!Ref S3SourceBucketName] + object: + key: + - !Ref S3SourceObjectKey + Targets: + - Arn: !Sub "arn:${AWS::Partition}:codepipeline:${AWS::Region}:${AWS::AccountId}:${CodePipeline}" + RoleArn: !GetAtt PipelineCloudWatchEventRole.Arn + Id: adf-codepipeline-trigger-pipeline + TerraformLockTable: Condition: ADFTerraformExtensionEnabled Type: "AWS::DynamoDB::Table" @@ -2051,7 +2214,63 @@ Resources: Resource: - !Sub arn:${AWS::Partition}:states:${AWS::Region}:${AWS::AccountId}:stateMachine:adf-bootstrap-enable-cross-account - !Sub arn:${AWS::Partition}:states:${AWS::Region}:${AWS::AccountId}:execution:adf-bootstrap-enable-cross-account:* - + - Effect: Allow + Sid: "UpdatePipeline" + Action: + - "codepipeline:UpdatePipeline" + - "codepipeline:GetPipeline" + - "codepipeline:StartPipelineExecution" + Resource: + - !Sub arn:${AWS::Partition}:codepipeline:${AWS::Region}:${AWS::AccountId}:aws-deployment-framework-pipelines + - !Sub arn:${AWS::Partition}:codepipeline:${AWS::Region}:${AWS::AccountId}:aws-deployment-framework-pipelines/* + - Effect: Allow + Sid: "UpdateEventRule" + Action: + - "events:DescribeRule" + - "events:EnableRule" + - "events:ListRules" + - "events:PutEvents" + - "events:PutRule" + - "events:PutTargets" + - "events:RemoveTargets" + - "events:DeleteRule" + Resource: + - !Sub arn:${AWS::Partition}:events:${AWS::Region}:${AWS::AccountId}:rule/adf-global-base-deploymen* + - Effect: Allow + Sid: "UpdateS3Policy" + Action: + - "s3:DeleteBucketPolicy" + Resource: "*" + - Sid: "IAMUpdatePolicy" + Effect: "Allow" + Action: + - "iam:PutRolePolicy" + Resource: + - !Sub "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/adf/bootstrap/adf-bootstrap-update-deployment-role" + - !If + - UseS3Source + - Effect: Allow + Sid: "S3Source" + Action: + - "s3:GetObject" + - "s3:GetObjectVersion" + - "s3:GetBucketVersioning" + - "s3:GetBucketPolicy" + - "s3:ListBucket" + - "s3:ListBucketVersions" + - "s3:PutObject" + - "s3:PutObjectAcl" + - "s3:CreateBucket" + Resource: + - !If + - HasS3SourceBucketName + - !Sub "arn:${AWS::Partition}:s3:::${S3SourceBucketName}" + - !Sub "arn:${AWS::Partition}:s3:::adf-global-base-deployment-*" + - !If + - HasS3SourceBucketName + - !Sub "arn:${AWS::Partition}:s3:::${S3SourceBucketName}/*" + - !Ref "AWS::NoValue" + - !Ref "AWS::NoValue" Outputs: ADFVersionNumber: Value: !Ref ADFVersion @@ -2100,6 +2319,22 @@ Outputs: Export: Name: "aws-deployment-framework-pipelines-codecommit-ssh-url" + SourceCodeBucketName: + Condition: UseS3Source + Description: "The S3 bucket Name used as source" + Value: !If + - CreateNewS3SourceBucket + - !Ref SourceCodeBucket + - !Ref S3SourceBucketName + + SourceCodeBucketArn: + Condition: UseS3Source + Description: "The ARN of the S3 Source bucket being used" + Value: !If + - CreateNewS3SourceBucket + - !GetAtt SourceCodeBucket.Arn + - !Sub "arn:${AWS::Partition}:s3:::${S3SourceBucketName}" + CodePipelineRoleArn: Description: "The CodePipeline Arn" Value: !GetAtt CodePipelineRole.Arn diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/config.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/config.py index 0b9b99d44..63230f216 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/config.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/config.py @@ -100,6 +100,15 @@ def _validate(self): if not isinstance(self.target_regions, list): self.target_regions = [self.target_regions] + if self.config.get( + "deployment-maps", {} + ).get("codebase-source", {}).get( + "source-type", "codecommit" + ) not in ['codecommit', 's3']: + raise InvalidConfigError( + "ADF currently only supports codecommit or s3 as deployment map codebase source" + ) from None + def _load_config_file(self): """ Checks for an Org Specific adfconfig.yml (adfconfig.{ORG_ID}.yml) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/main.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/main.py index a4cd8eddb..0b1c24bc0 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/main.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/main.py @@ -56,6 +56,10 @@ ) ADF_DEFAULT_SCM_FALLBACK_BRANCH = 'main' ADF_DEFAULT_DEPLOYMENT_MAPS_ALLOW_EMPTY_TARGET = 'disabled' +ADF_DEFAULT_DEPLOYMENT_MAPS_CODEBASE_SOURCE_TYPE = 'codecommit' +ADF_DEFAULT_DEPLOYMENT_MAPS_CODEBASE_SOURCE_S3_BUCKET_NAME = 'none' +ADF_DEFAULT_DEPLOYMENT_MAPS_CODEBASE_SOURCE_S3_KEY = 'aws-deployment-framework-pipelines.zip' +ADF_DEFAULT_DEPLOYMENT_MAPS_CODEBASE_SOURCE_S3_USE_EXISTING = 'disabled' ADF_DEFAULT_ORG_STAGE = "none" LOGGER = configure_logger(__name__) @@ -195,6 +199,48 @@ def prepare_deployment_account(sts, deployment_account_id, config): ADF_DEFAULT_DEPLOYMENT_MAPS_ALLOW_EMPTY_TARGET, ) ) + deployment_account_parameter_store.put_parameter( + 'deployment_maps/codebase-source/source-type', + config.config.get('deployment-maps', {}).get( + 'codebase-source', {} + ).get( + 'source-type', + ADF_DEFAULT_DEPLOYMENT_MAPS_CODEBASE_SOURCE_TYPE, + ) + ) + deployment_account_parameter_store.put_parameter( + 'deployment_maps/codebase-source/s3-source-details/s3-bucket-name', + config.config.get('deployment-maps', {}).get( + 'codebase-source', {} + ).get( + 's3-source-details', {} + ).get( + 's3-bucket-name', + ADF_DEFAULT_DEPLOYMENT_MAPS_CODEBASE_SOURCE_S3_BUCKET_NAME, + ) + ) + deployment_account_parameter_store.put_parameter( + 'deployment_maps/codebase-source/s3-source-details/s3-object-key', + config.config.get('deployment-maps', {}).get( + 'codebase-source', {} + ).get( + 's3-source-details', {} + ).get( + 's3-object-key', + ADF_DEFAULT_DEPLOYMENT_MAPS_CODEBASE_SOURCE_S3_KEY, + ) + ) + deployment_account_parameter_store.put_parameter( + 'deployment_maps/codebase-source/s3-source-details/s3-use-existing', + config.config.get('deployment-maps', {}).get( + 'codebase-source', {} + ).get( + 's3-source-details', {} + ).get( + 's3-use-existing', + ADF_DEFAULT_DEPLOYMENT_MAPS_CODEBASE_SOURCE_S3_USE_EXISTING, + ) + ) deployment_account_parameter_store.put_parameter( 'org/stage', config.config.get('org', {}).get( diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/tests/stubs/stub_adfconfig.yml b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/tests/stubs/stub_adfconfig.yml index 8945cbf7b..520ed915d 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/tests/stubs/stub_adfconfig.yml +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/tests/stubs/stub_adfconfig.yml @@ -23,3 +23,5 @@ config: keep-default-scp: enabled deployment-maps: allow-empty-target: disabled + codebase-source: + source-type: codecommit diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/tests/test_config.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/tests/test_config.py index 3246130a1..267cf669b 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/tests/test_config.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/tests/test_config.py @@ -68,6 +68,10 @@ def test_raise_validation_length_deployment_target_region(cls): with raises(InvalidConfigError): assert cls._parse_config() +def test_raise_validation_deployment_maps_codebase_source_source_type(cls): + cls.config_contents["config"]["deployment-maps"]["codebase-source"]["source-type"] = "github" + with raises(InvalidConfigError): + assert cls._parse_config() def test_sorted_regions(cls): cls.config_contents["regions"]["deployment-account"] = [ diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/tests/test_main.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/tests/test_main.py index 8d66e5b08..7df22d23c 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/tests/test_main.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/tests/test_main.py @@ -134,7 +134,7 @@ def test_prepare_deployment_account_defaults(param_store_cls, cls, sts): ) for param_store in parameter_store_list: assert param_store.put_parameter.call_count == ( - 15 if param_store == deploy_param_store else 9 + 19 if param_store == deploy_param_store else 9 ) param_store.put_parameter.assert_has_calls( [ @@ -161,6 +161,10 @@ def test_prepare_deployment_account_defaults(param_store_cls, cls, sts): deployment_account_id, ), call('deployment_maps/allow_empty_target', 'disabled'), + call('deployment_maps/codebase-source/source-type', 'codecommit'), + call('deployment_maps/codebase-source/s3-source-details/s3-bucket-name', 'none'), + call('deployment_maps/codebase-source/s3-source-details/s3-object-key', 'aws-deployment-framework-pipelines.zip'), + call('deployment_maps/codebase-source/s3-source-details/s3-use-existing', 'disabled'), call('org/stage', 'none'), call('notification_type', 'email'), call('notification_endpoint', 'john@example.com'), @@ -236,7 +240,7 @@ def test_prepare_deployment_account_specific_config(param_store_cls, cls, sts): ) for param_store in parameter_store_list: assert param_store.put_parameter.call_count == ( - 17 if param_store == deploy_param_store else 9 + 21 if param_store == deploy_param_store else 9 ) param_store.put_parameter.assert_has_calls( [ @@ -264,6 +268,10 @@ def test_prepare_deployment_account_specific_config(param_store_cls, cls, sts): deployment_account_id, ), call('deployment_maps/allow_empty_target', 'disabled'), + call('deployment_maps/codebase-source/source-type', 'codecommit'), + call('deployment_maps/codebase-source/s3-source-details/s3-bucket-name', 'none'), + call('deployment_maps/codebase-source/s3-source-details/s3-object-key', 'aws-deployment-framework-pipelines.zip'), + call('deployment_maps/codebase-source/s3-source-details/s3-use-existing', 'disabled'), call('org/stage', 'test-stage'), call('notification_type', 'slack'), call( diff --git a/src/template.yml b/src/template.yml index f1118633c..4d75b1cd5 100644 --- a/src/template.yml +++ b/src/template.yml @@ -159,6 +159,36 @@ Parameters: Default: "1900-12-31T23:59:59Z" AllowedPattern: "\\d{4}-[0-1]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d([+-][0-2]\\d:[0-5]\\d|Z)" + SourceType: + Type: String + Default: "CodeCommit" + AllowedValues: + - "CodeCommit" + - "S3" + Description: "Source type for the ADF Bootstrap Pipeline (CodeCommit or S3)" + + S3SourceObjectKey: + Type: String + Default: "aws-deployment-framework-bootstrap.zip" + Description: "The S3 object key for the ADF Bootstrap source code (only used when SourceType is S3)" + + S3SourceBucketName: + Type: String + Default: "none" + Description: "The S3 bucket name for the ADF Bootstrap source code (only used when SourceType is S3). If left empty, a new bucket will be created." + + S3SourceUseExistingBucket: + Description: >- + Use an existing S3 bucket for the ADF Bootstrap source code + (only used when SourceType is S3). If set Yes, The source will + direct to the existing S3 bucket. Additionally, the bucket should + be in the ADF installation account. + Type: String + Default: "No" + AllowedValues: + - "Yes" + - "No" + Globals: Function: Architectures: @@ -171,6 +201,22 @@ Conditions: CreateCrossAccountAccessRole: !Equals - !Ref AllowBootstrappingOfManagementAccount - "Yes" + UseS3Source: !Equals + - !Ref SourceType + - "S3" + UseCodeCommitSource: !Equals + - !Ref SourceType + - "CodeCommit" + HasS3SourceBucketName: !Not + - !Equals + - !Ref S3SourceBucketName + - "none" + NoExistingS3SourceBucket: !Equals + - !Ref S3SourceUseExistingBucket + - "No" + CreateNewS3SourceBucket: !And + - !Condition UseS3Source + - !Condition NoExistingS3SourceBucket Resources: BootstrapTemplatesBucketPolicy: @@ -262,6 +308,67 @@ Resources: IgnorePublicAcls: true RestrictPublicBuckets: true + SourceCodeBucketPolicy: + Type: AWS::S3::BucketPolicy + Condition: CreateNewS3SourceBucket + Properties: + Bucket: !Ref "SourceCodeBucket" + PolicyDocument: + Statement: + - Sid: "DenyInsecureConnections" + Action: + - "s3:*" + Effect: Deny + Condition: + Bool: + aws:SecureTransport: "false" + Resource: + - !Sub arn:${AWS::Partition}:s3:::${SourceCodeBucket} + - !Sub arn:${AWS::Partition}:s3:::${SourceCodeBucket}/* + Principal: + AWS: "*" + - Sid: "DenyInsecureTLS" + Action: + - "s3:*" + Effect: Deny + Condition: + NumericLessThan: + "s3:TlsVersion": "1.2" + Resource: + - !Sub arn:${AWS::Partition}:s3:::${SourceCodeBucket} + - !Sub arn:${AWS::Partition}:s3:::${SourceCodeBucket}/* + Principal: + AWS: "*" + + SourceCodeBucket: + Type: "AWS::S3::Bucket" + Condition: CreateNewS3SourceBucket + DeletionPolicy: Retain + UpdateReplacePolicy: Retain + Properties: + AccessControl: BucketOwnerFullControl + BucketName: !If + - HasS3SourceBucketName + - !Ref S3SourceBucketName + - !Ref "AWS::NoValue" + OwnershipControls: + Rules: + - ObjectOwnership: BucketOwnerEnforced + BucketEncryption: + ServerSideEncryptionConfiguration: + - ServerSideEncryptionByDefault: + SSEAlgorithm: AES256 + VersioningConfiguration: + Status: Enabled + NotificationConfiguration: + EventBridgeConfiguration: + EventBridgeEnabled: true + PublicAccessBlockConfiguration: + BlockPublicAcls: true + BlockPublicPolicy: true + IgnorePublicAcls: true + RestrictPublicBuckets: true + ### Account processing begin AccountFileProcessingLambdaRole: Type: "AWS::IAM::Role" @@ -1728,21 +1835,41 @@ Resources: RoleArn: !GetAtt BootstrapCodePipelineRole.Arn Name: "aws-deployment-framework-bootstrap-pipeline" Stages: - - Name: CodeCommit + - Name: CodeSource Actions: - - Name: Source - ActionTypeId: - Category: Source - Owner: AWS - Version: "1" - Provider: CodeCommit - OutputArtifacts: - - Name: "TemplateSource" - Configuration: - BranchName: !GetAtt DetermineDefaultBranchName.DefaultBranchName - RepositoryName: !GetAtt CodeCommitRepository.Name - PollForSourceChanges: false - RunOrder: 1 + # CodeCommit source action (used when SourceType is CodeCommit) + - !If + - UseCodeCommitSource + - Name: Source + ActionTypeId: + Category: Source + Owner: AWS + Version: "1" + Provider: CodeCommit + OutputArtifacts: + - Name: "TemplateSource" + Configuration: + BranchName: !GetAtt DetermineDefaultBranchName.DefaultBranchName + RepositoryName: !GetAtt CodeCommitRepository.Name + PollForSourceChanges: false + RunOrder: 1 + # S3 source action (used when SourceType is S3) + - Name: Source + ActionTypeId: + Category: Source + Owner: AWS + Version: "1" + Provider: S3 + OutputArtifacts: + - Name: "TemplateSource" + Configuration: + S3Bucket: !If + - CreateNewS3SourceBucket + - !Ref SourceCodeBucket + - !Ref S3SourceBucketName + S3ObjectKey: !Ref S3SourceObjectKey + PollForSourceChanges: false + RunOrder: 1 - Name: EnableBootstrappingViaJumpRole Actions: - Name: EnableBootstrappingViaJumpRole @@ -1824,6 +1951,27 @@ Resources: Resource: - !GetAtt "BootstrapArtifactStorageBucket.Arn" - !Sub "${BootstrapArtifactStorageBucket.Arn}/*" + - !If + - UseS3Source + - Effect: Allow + Sid: "S3Source" + Action: + - "s3:GetObject" + - "s3:GetObjectVersion" + - "s3:GetBucketVersioning" + - "s3:ListBucket" + - "s3:ListBucketVersions" + - "s3:PutObject" + Resource: + - !If + - CreateNewS3SourceBucket + - !GetAtt "SourceCodeBucket.Arn" + - !Sub "arn:${AWS::Partition}:s3:::${S3SourceBucketName}" + - !If + - CreateNewS3SourceBucket + - !Sub "${SourceCodeBucket.Arn}/*" + - !Sub "arn:${AWS::Partition}:s3:::${S3SourceBucketName}/*" + - !Ref "AWS::NoValue" - Effect: Allow Sid: "CodeBuild" Action: @@ -1831,16 +1979,19 @@ Resources: - "codebuild:StartBuild" Resource: - !GetAtt CodeBuildProject.Arn - - Effect: Allow - Sid: "CodeCommit" - Action: - - "codecommit:GetBranch" - - "codecommit:GetCommit" - - "codecommit:UploadArchive" - - "codecommit:GetUploadArchiveStatus" - - "codecommit:CancelUploadArchive" - Resource: - - !GetAtt CodeCommitRepository.Arn + - !If + - UseCodeCommitSource + - Effect: Allow + Sid: "CodeCommit" + Action: + - "codecommit:GetBranch" + - "codecommit:GetCommit" + - "codecommit:UploadArchive" + - "codecommit:GetUploadArchiveStatus" + - "codecommit:CancelUploadArchive" + Resource: + - !GetAtt CodeCommitRepository.Arn + - !Ref "AWS::NoValue" - Effect: Allow Sid: "Lambda" Action: @@ -2627,7 +2778,10 @@ Resources: Action: sts:AssumeRole Condition: ArnEquals: - "aws:SourceArn": !Sub "arn:${AWS::Partition}:events:${AWS::Region}:${AWS::AccountId}:rule/adf-bootstrap-pipeline-watch-repo" + "aws:SourceArn": !If + - UseS3Source + - !Sub "arn:${AWS::Partition}:events:${AWS::Region}:${AWS::AccountId}:rule/adf-bootstrap-pipeline-watch-s3" + - !Sub "arn:${AWS::Partition}:events:${AWS::Region}:${AWS::AccountId}:rule/adf-bootstrap-pipeline-watch-repo" Policies: - PolicyName: adf-bootstrap-execute-cwe PolicyDocument: @@ -2639,6 +2793,7 @@ Resources: PipelineCloudWatchEventRule: Type: "AWS::Events::Rule" + Condition: UseCodeCommitSource Properties: Name: "adf-bootstrap-pipeline-watch-repo" EventPattern: @@ -2661,6 +2816,31 @@ Resources: RoleArn: !GetAtt PipelineCloudWatchEventRole.Arn Id: adf-codepipeline-trigger-bootstrap + S3PipelineCloudWatchEventRule: + Type: "AWS::Events::Rule" + Condition: UseS3Source + Properties: + Name: "adf-bootstrap-pipeline-watch-s3" + EventPattern: + source: + - aws.s3 + detail-type: + - "Object Created" + - "Object Copy" + detail: + bucket: + name: !If + - CreateNewS3SourceBucket + - [!Ref SourceCodeBucket] + - [!Ref S3SourceBucketName] + object: + key: + - !Ref S3SourceObjectKey + Targets: + - Arn: !Sub "arn:${AWS::Partition}:codepipeline:${AWS::Region}:${AWS::AccountId}:aws-deployment-framework-bootstrap-pipeline" + RoleArn: !GetAtt PipelineCloudWatchEventRole.Arn + Id: adf-codepipeline-trigger-bootstrap + Outputs: ADFVersionNumber: Value: !FindInMap ["Metadata", "ADF", "Version"] @@ -2678,3 +2858,19 @@ Outputs: Value: !GetAtt CodeCommitRepository.CloneUrlSsh Export: Name: "BaseTemplatesRepoSSHURL" + + SourceCodeBucketName: + Condition: UseS3Source + Description: "The S3 bucket Name used as source" + Value: !If + - CreateNewS3SourceBucket + - !Ref SourceCodeBucket + - !Ref S3SourceBucketName + + SourceCodeBucketArn: + Condition: UseS3Source + Description: "The ARN of the S3 Source bucket being used" + Value: !If + - CreateNewS3SourceBucket + - !GetAtt SourceCodeBucket.Arn + - !Sub "arn:${AWS::Partition}:s3:::${S3SourceBucketName}"