From 3a6e08ba63efaecd85709904a8e77b1ec41f15c8 Mon Sep 17 00:00:00 2001 From: Dan Xie Date: Thu, 6 Feb 2025 23:01:39 +0800 Subject: [PATCH 1/2] feat: 1) Add default s3 bucket name, s3 object and account id of the bucket properties in S3 provder 2) Add s3 source pipeline trigger via s3 notification events function 3) Add cross account s3 event rule fowarder function --- docs/providers-guide.md | 44 ++++++++-- .../initial_commit/adfconfig.yml.j2 | 1 + .../create_or_update_rule.py | 85 +++++++++++++------ .../adf-bootstrap/global.yml | 11 +-- .../bootstrap_repository/adf-build/main.py | 9 ++ .../cdk/cdk_constructs/adf_codepipeline.py | 43 +++++++--- .../shared/cdk/cdk_constructs/adf_events.py | 83 ++++++++++++------ .../adf-build/shared/python/rule.py | 30 +++++-- .../shared/python/schema_validation.py | 7 +- .../adf-build/shared/templates/events-s3.yml | 62 ++++++++++++++ .../adf-build/tests/test_main.py | 6 +- 11 files changed, 297 insertions(+), 84 deletions(-) create mode 100644 src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/templates/events-s3.yml diff --git a/docs/providers-guide.md b/docs/providers-guide.md index 7645f6b3c..db32d5606 100644 --- a/docs/providers-guide.md +++ b/docs/providers-guide.md @@ -125,15 +125,41 @@ Please add the required S3 read permissions to the `adf-codecomit-role` via the the `adf-codecommit-role` S3 read permissions in the bucket policy of the source bucket. +If the `poll_for_changes` proerty set to `False`, ADF will monitor the S3 events +`Object Created` or `Object Copy` for the defined `object_key` of the defined +`bucket_name` and trigger the related pipeline. + +The source S3 bucket should enable `Bucket Versioning` and `Amazon EventBridge`, +otherwise, the auto pipeline trigger will not work. + +ADF supports source S3 bucket in a target account other than default Deployment account. +To make it work, an event bridge policy should be manually added to the default event bus +in the Deployment map. For example: + +``` +{ + "Version": "2012-10-17", + "Statement": [{ + "Sid": "allow_account_to_put_events", + "Effect": "Allow", + "Principal": { + "AWS": "arn:aws:iam:::root" + }, + "Action": "events:PutEvents", + "Resource": "arn:aws:events:eu-central-1::event-bus/default" + }] +} +``` + Provider type: `s3`. #### Properties -- *account_id* - *(String)* **(required)** - - The AWS Account ID where the source S3 Bucket is located. -- *bucket_name* - *(String)* **(required)** +- *account_id* - *(String)* **(optional)** + - The AWS Account ID where the source S3 Bucket is located. By default, it is Deployment account ID. +- *bucket_name* - *(String)* - The Name of the S3 Bucket that will be the source of the pipeline. -- *object_key* - *(String)* **(required)** +- *object_key* - *(String)* - The Specific Object within the bucket that will trigger the pipeline execution. - *trigger_on_changes* - *(Boolean)* default: `True`. @@ -144,7 +170,15 @@ Provider type: `s3`. - **By default**, it will trigger on changes using the polling mechanism of CodePipeline. Monitoring the S3 object so it can trigger a release when an update took place. - +- *poll_for_changes* - *(Boolean)* default: `True`. + - If CodePipeline should poll the repository for changes, defaults to `False` + in favor of Amazon EventBridge events. As the name implies, when polling + for changes it will check the repository for updates every minute or so. + This will show up as actions in CloudTrail. + - **By default**, it will poll for changes, however, if set to `False`, it + will use the event triggered by S3 notification when an update to the + s3 object took place. + ### CodeConnections Use CodeConnections as a source to trigger your pipeline. The source action retrieves diff --git a/src/lambda_codebase/initial_commit/adfconfig.yml.j2 b/src/lambda_codebase/initial_commit/adfconfig.yml.j2 index 8eef343ad..cfe055ca7 100644 --- a/src/lambda_codebase/initial_commit/adfconfig.yml.j2 +++ b/src/lambda_codebase/initial_commit/adfconfig.yml.j2 @@ -33,6 +33,7 @@ config: default-scm-branch: main # Optional: # default-scm-codecommit-account-id: "123456789012" + # default-s3-source-bucket-name: "mys3deploymentbucket" deployment-maps: allow-empty-target: disabled # ^ Needs to be set to "enabled" to activate. Defaults to "disabled" when diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/create_or_update_rule.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/create_or_update_rule.py index 15865ed89..d1ffb2b11 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/create_or_update_rule.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/deployment/lambda_codebase/pipeline_management/create_or_update_rule.py @@ -23,7 +23,8 @@ CLOUDWATCH = boto3.client("cloudwatch") METRICS = ADFMetrics(CLOUDWATCH, "PIPELINE_MANAGEMENT/RULE") -_CACHE = None +_CACHE_S3 = None +_CACHE_CODECOMMIT = None def lambda_handler(event, _): @@ -38,31 +39,52 @@ def lambda_handler(event, _): event (dict): The ADF Pipeline Management State Machine execution input object. """ - # pylint: disable=W0603 # Global variable here to cache across lambda execution runtimes. - global _CACHE - if not _CACHE: - _CACHE = Cache() + global _CACHE_S3, _CACHE_CODECOMMIT + + if not _CACHE_S3: + _CACHE_S3 = Cache() + METRICS.put_metric_data( + {"MetricName": "S3CacheInitialized", "Value": 1, "Unit": "Count"} + ) + + if not _CACHE_CODECOMMIT: + _CACHE_CODECOMMIT = Cache() METRICS.put_metric_data( - {"MetricName": "CacheInitialized", "Value": 1, "Unit": "Count"} + {"MetricName": "CodeCommitCacheInitialized", "Value": 1, "Unit": "Count"} ) LOGGER.info(event) pipeline = event['pipeline_definition'] - source_provider = ( - pipeline.get("default_providers", {}) - .get("source", {}) - .get("provider", "codecommit") - ) - source_account_id = ( - pipeline.get("default_providers", {}) - .get("source", {}) - .get("properties", {}) - .get("account_id") - ) + default_source_provider = pipeline.get("default_providers", {}).get("source", {}) + source_provider = default_source_provider.get("provider", "codecommit") + source_provider_properties = default_source_provider.get("properties", {}) + source_account_id = source_provider_properties.get("account_id") + source_bucket_name = source_provider_properties.get("bucket_name") + if source_provider == "s3": + if not source_account_id: + source_account_id = DEPLOYMENT_ACCOUNT_ID + pipeline["default_providers"]["source"].setdefault("properties", {})["account_id"] = source_account_id + if not source_bucket_name: + try: + parameter_store = ParameterStore(DEPLOYMENT_ACCOUNT_REGION, boto3) + default_s3_source_bucket_name = parameter_store.fetch_parameter( + "/adf/scm/default-s3-source-bucket-name" + ) + except ParameterNotFoundError: + default_s3_source_bucket_name = os.environ["S3_BUCKET_NAME"] + LOGGER.debug("default_s3_source_bucket_name not found in SSM - Fall back to s3_bucket_name.") + pipeline["default_providers"]["source"].setdefault("properties", {})["bucket_name"] = default_s3_source_bucket_name + source_bucket_name = default_s3_source_bucket_name + event_params = { + "SourceS3BucketName": source_bucket_name + } + else: + event_params = {} + # Resolve codecommit source_account_id in case it is not set if source_provider == "codecommit" and not source_account_id: @@ -98,25 +120,36 @@ def lambda_handler(event, _): ) if ( - source_provider == "codecommit" - and source_account_id + source_account_id and int(source_account_id) != int(DEPLOYMENT_ACCOUNT_ID) - and not _CACHE.exists(source_account_id) + and ( + (source_provider == "codecommit" and not _CACHE_CODECOMMIT.exists(source_account_id)) + or (source_provider == "s3" and not _CACHE_S3.exists(source_account_id)) + ) ): LOGGER.info( - "Source is CodeCommit and the repository is hosted in the %s " + "Source is %s and the repository/bucket is hosted in the %s " "account instead of the deployment account (%s). Creating or " "updating EventBridge forward rule to forward change events " "from the source account to the deployment account in " "EventBridge.", + source_provider, source_account_id, DEPLOYMENT_ACCOUNT_ID, ) - rule = Rule(source_account_id) + + rule = Rule(source_account_id, source_provider, event_params) rule.create_update() - _CACHE.add(source_account_id, True) - METRICS.put_metric_data( - {"MetricName": "CreateOrUpdate", "Value": 1, "Unit": "Count"} - ) + + if source_provider == "codecommit": + _CACHE_CODECOMMIT.add(source_account_id, True) + METRICS.put_metric_data( + {"MetricName": "CodeCommitCreateOrUpdate", "Value": 1, "Unit": "Count"} + ) + elif source_provider == "s3": + _CACHE_S3.add(source_account_id, True) + METRICS.put_metric_data( + {"MetricName": "S3CreateOrUpdate", "Value": 1, "Unit": "Count"} + ) return event diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/global.yml b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/global.yml index 12a9bcee3..5c185c20a 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/global.yml +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-bootstrap/global.yml @@ -70,6 +70,7 @@ Resources: - "codecommit:UploadArchive" - "codepipeline:StartPipelineExecution" - "events:PutEvents" + - "s3:Get*" Resource: "*" - Effect: Allow Action: @@ -372,7 +373,7 @@ Resources: - "iam:TagRole" - "iam:UntagRole" Resource: - - !Sub "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/adf/cross-account-events/adf-cc-event-from-${AWS::AccountId}-to-${DeploymentAccountId}" + - !Sub "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/adf/cross-account-events/adf-*-event-from-${AWS::AccountId}-to-${DeploymentAccountId}" - Effect: Allow Sid: "IAMFullPathAndNameOnly" Action: @@ -381,21 +382,21 @@ Resources: - "iam:GetRolePolicy" - "iam:PutRolePolicy" Resource: - - !Sub "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/adf/cross-account-events/adf-cc-event-from-${AWS::AccountId}-to-${DeploymentAccountId}" - - !Sub "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/adf-cc-event-from-${AWS::AccountId}-to-${DeploymentAccountId}" + - !Sub "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/adf/cross-account-events/adf-*-event-from-${AWS::AccountId}-to-${DeploymentAccountId}" + - !Sub "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/adf-*-event-from-${AWS::AccountId}-to-${DeploymentAccountId}" - Effect: Allow Sid: "IAMPassRole" Action: - "iam:PassRole" Resource: - - !Sub "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/adf/cross-account-events/adf-cc-event-from-${AWS::AccountId}-to-${DeploymentAccountId}" + - !Sub "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/adf/cross-account-events/adf-*-event-from-${AWS::AccountId}-to-${DeploymentAccountId}" Condition: StringEquals: 'iam:PassedToService': - "events.amazonaws.com" ArnEquals: 'iam:AssociatedResourceArn': - - !Sub "arn:${AWS::Partition}:events:${AWS::Region}:${AWS::AccountId}:rule/adf-cc-event-from-${AWS::AccountId}-to-${DeploymentAccountId}" + - !Sub "arn:${AWS::Partition}:events:${AWS::Region}:${AWS::AccountId}:rule/adf-*-event-from-${AWS::AccountId}-to-${DeploymentAccountId}" - Effect: Allow Sid: "KMS" Action: diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/main.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/main.py index a22c9375f..67766ca15 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/main.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/main.py @@ -188,6 +188,15 @@ def prepare_deployment_account(sts, deployment_account_id, config): .get('default-scm-codecommit-account-id', deployment_account_id) ) ) + # TODO merge + deployment_account_parameter_store.put_parameter( + 'scm/default-s3-source-bucket-name', + ( + config.config + .get('scm', {}) + .get('default-s3-source-bucket-name', S3_BUCKET_NAME) + ) + ) deployment_account_parameter_store.put_parameter( 'deployment_maps/allow_empty_target', config.config.get('deployment-maps', {}).get( diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py index 61a77553e..e200ba1b8 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_codepipeline.py @@ -158,14 +158,18 @@ def _generate_configuration(self): .get('default_providers', {}) .get('source', {}) .get('properties', {}) - .get('object_key') + .get('object_key', f"{self.map_params['name']}.zip") ), "PollForSourceChanges": ( - self.map_params - .get('default_providers', {}) - .get('source', {}) - .get('properties', {}) - .get('trigger_on_changes', True) + ( + self.map_params['default_providers']['source'] + .get('properties', {}) + .get('trigger_on_changes', True) + ) and ( + self.map_params['default_providers']['source'] + .get('properties', {}) + .get('poll_for_changes', True) + ) ), } if self.provider == "S3" and self.category == "Deploy": @@ -202,7 +206,7 @@ def _generate_configuration(self): .get('default_providers', {}) .get('deploy', {}) .get('properties', {}) - .get('object_key') + .get('object_key', f"{self.map_params['name']}.zip") )) ), "KMSEncryptionKeyARN": ( @@ -706,7 +710,12 @@ def __init__( 'pipeline', **pipeline_args ) - adf_events.Events(self, 'events', { + _provider = (map_params + .get('default_providers', {}) + .get('source', {}) + .get('provider') + ) + _event_params = { "pipeline": ( f'arn:{ADF_DEPLOYMENT_PARTITION}:codepipeline:' f'{ADF_DEPLOYMENT_REGION}:{ADF_DEPLOYMENT_ACCOUNT_ID}:' @@ -753,7 +762,7 @@ def __init__( .get('default_providers', {}) .get('source', {}) .get('properties', {}) - .get('poll_for_changes', False) + .get('poll_for_changes', True if _provider == "s3" else False) ), "trigger_on_changes": ( map_params @@ -763,7 +772,21 @@ def __init__( .get('trigger_on_changes', True) ), } - }) + } + if _provider == "s3": + _event_params["s3_bucket_name"] = (map_params + .get('default_providers', {}) + .get('source', {}) + .get('properties', {}) + .get('bucket_name') + ) + _event_params["s3_object_key"] = (map_params + .get('default_providers', {}) + .get('source', {}) + .get('properties', {}) + .get('object_key', f"{map_params['name']}.zip") + ) + adf_events.Events(self, 'events', _event_params) @staticmethod def restructure_tags(current_tags): diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_events.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_events.py index d63ea4247..128bd1052 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_events.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_constructs/adf_events.py @@ -33,6 +33,7 @@ def __init__(self, scope: Construct, id: str, params: dict, **kwargs): _provider = params.get('source', {}).get('provider') _add_trigger_on_changes = ( _provider == 'codecommit' + or _provider == 's3' and _source_account and params.get('source', {}).get('trigger_on_changes') and not params.get('source', {}).get('poll_for_changes') @@ -43,38 +44,64 @@ def __init__(self, scope: Construct, id: str, params: dict, **kwargs): repo_name = params['source']['repo_name'] if _add_trigger_on_changes: - _event = _events.Rule( - self, - f'trigger_{name}', - description=f'Triggers {name} on changes in source CodeCommit repository', - event_pattern=_events.EventPattern( - resources=[ - f'arn:{stack.partition}:codecommit:' - f'{ADF_DEPLOYMENT_REGION}:{account_id}:{repo_name}' - ], - source=["aws.codecommit"], - detail_type=[ - 'CodeCommit Repository State Change' - ], - detail={ - "event": [ - "referenceCreated", - "referenceUpdated" + _event = None + if _provider == 'codecommit': + _event = _events.Rule( + self, + f'trigger_{name}', + description=f'Triggers {name} on changes in source CodeCommit repository', + event_pattern=_events.EventPattern( + resources=[ + f'arn:{stack.partition}:codecommit:' + f'{ADF_DEPLOYMENT_REGION}:{account_id}:{repo_name}' ], - "referenceType": [ - "branch" + source=["aws.codecommit"], + detail_type=[ + 'CodeCommit Repository State Change' ], - "referenceName": [ - params['source']['branch'] - ] - } + detail={ + "event": [ + "referenceCreated", + "referenceUpdated" + ], + "referenceType": [ + "branch" + ], + "referenceName": [ + params['source']['branch'] + ] + } + ) ) - ) - _event.add_target( - _targets.CodePipeline( - pipeline=_pipeline + elif _provider == 's3': + _s3_bucket_name = params['s3_bucket_name'] + _s3_object_key = params['s3_object_key'] + _event = _events.Rule( + self, + f'trigger_{name}', + description=f'Triggers {_s3_object_key} on changes in source s3 bucket {_s3_bucket_name}', + event_pattern=_events.EventPattern( + source=["aws.s3"], + detail_type=[ + "Object Created", + "Object Copy" + ], + detail={ + "bucket": { + "name": [_s3_bucket_name] + }, + "object": { + "key": [_s3_object_key] + } + } + ) + ) + if _event: + _event.add_target( + _targets.CodePipeline( + pipeline=_pipeline + ) ) - ) if params.get('topic_arn'): # pylint: disable=no-value-for-parameter _topic = _sns.Topic.from_topic_arn(self, 'topic_arn', params["topic_arn"]) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/rule.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/rule.py index 0a7c01b6d..4d9e8829e 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/rule.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/rule.py @@ -28,9 +28,27 @@ class Rule: - def __init__(self, source_account_id): + def __init__( + self, + source_account_id: str, + source_provider: str, + params: dict = None + ): self.source_account_id = source_account_id - self.stack_name = f'adf-event-rule-{source_account_id}-{DEPLOYMENT_ACCOUNT_ID}' + self.source_provider = source_provider + self.parameters = [ + { + 'ParameterKey': param_key, + 'ParameterValue': param_value + } + for param_key, param_value in (params or {}).items() + ] + if source_provider == "codecommit": + self.stack_name = f'adf-event-rule-{source_account_id}-{DEPLOYMENT_ACCOUNT_ID}' + self.event_template_name = 'events.yml' + else: + self.stack_name = f'adf-event-rule-{source_account_id}-{DEPLOYMENT_ACCOUNT_ID}-{source_provider}' + self.event_template_name = f'events-{source_provider}.yml' self.partition = get_partition(DEPLOYMENT_ACCOUNT_REGION) # Requirement adf-automation-role to exist on target self.role = sts.assume_cross_account_role( @@ -44,14 +62,15 @@ def __init__(self, source_account_id): def create_update(self): s3_object_path = s3.build_pathing_style( style="path", - key="adf-build/templates/events.yml", + key=f"adf-build/templates/{self.event_template_name}", ) + cloudformation = CloudFormation( region=SOURCE_ACCOUNT_REGION, deployment_account_region=SOURCE_ACCOUNT_REGION, role=self.role, template_url=s3_object_path, - parameters=[], + parameters=self.parameters, wait=True, stack_name=self.stack_name, s3=None, @@ -59,7 +78,8 @@ def create_update(self): account_id=DEPLOYMENT_ACCOUNT_ID, ) LOGGER.info( - 'Ensuring Stack State for Event Rule forwarding from %s to %s', + 'Ensuring Stack State for %s Event Rule forwarding from %s to %s', + self.source_provider, self.source_account_id, DEPLOYMENT_ACCOUNT_ID, ) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/schema_validation.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/schema_validation.py index 3289db7f0..04d6b76af 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/schema_validation.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/python/schema_validation.py @@ -91,9 +91,10 @@ # S3 Source S3_SOURCE_PROPS = { - "account_id": AWS_ACCOUNT_ID_SCHEMA, - "bucket_name": str, - "object_key": str, + Optional("account_id"): AWS_ACCOUNT_ID_SCHEMA, + Optional("bucket_name"): str, + Optional("object_key"): str, + Optional("poll_for_changes"): bool, Optional("trigger_on_changes"): bool, } S3_SOURCE = { diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/templates/events-s3.yml b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/templates/events-s3.yml new file mode 100644 index 000000000..92037a6eb --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/templates/events-s3.yml @@ -0,0 +1,62 @@ +# Copyright Amazon.com Inc. or its affiliates. +# SPDX-License-Identifier: Apache-2.0 + +Parameters: + DeploymentAccountId: + Type: "AWS::SSM::Parameter::Value" + Description: Deployment Account ID + Default: /adf/deployment_account_id + SourceS3BucketName: + Type: "String" + Description: Source S3 Bucket Name + +Resources: + EventRole: + Type: AWS::IAM::Role + Properties: + Path: /adf/cross-account-events/ + RoleName: !Sub adf-s3-event-from-${AWS::AccountId}-to-${DeploymentAccountId} + AssumeRolePolicyDocument: + Version: 2012-10-17 + Statement: + - Effect: Allow + Principal: + Service: + - events.amazonaws.com + Action: sts:AssumeRole + Condition: + ArnEquals: + "aws:SourceArn": !Sub "arn:${AWS::Partition}:events:${AWS::Region}:${AWS::AccountId}:rule/adf-s3-event-from-${AWS::AccountId}-to-${DeploymentAccountId}" + Policies: + - PolicyName: !Sub events-to-${DeploymentAccountId} + PolicyDocument: + Version: 2012-10-17 + Statement: + - Effect: Allow + Action: events:PutEvents + Resource: + - !Sub "arn:${AWS::Partition}:events:${AWS::Region}:${DeploymentAccountId}:event-bus/default" + Condition: + ForAnyValue:StringEquals: + "events:detail-type": + - "Object Created" + - "Object Copy" + + EventRule: + Type: AWS::Events::Rule + Properties: + Name: !Sub adf-s3-event-from-${AWS::AccountId}-to-${DeploymentAccountId} + EventPattern: + source: + - aws.s3 + detail-type: + - "Object Created" + - "Object Copy" + detail: + bucket: + name: + - !Ref SourceS3BucketName + Targets: + - Arn: !Sub arn:${AWS::Partition}:events:${AWS::Region}:${DeploymentAccountId}:event-bus/default + RoleArn: !GetAtt EventRole.Arn + Id: s3-push-event diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/tests/test_main.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/tests/test_main.py index 8d66e5b08..c8ff8b9a5 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/tests/test_main.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/tests/test_main.py @@ -134,7 +134,7 @@ def test_prepare_deployment_account_defaults(param_store_cls, cls, sts): ) for param_store in parameter_store_list: assert param_store.put_parameter.call_count == ( - 15 if param_store == deploy_param_store else 9 + 16 if param_store == deploy_param_store else 9 ) param_store.put_parameter.assert_has_calls( [ @@ -160,6 +160,7 @@ def test_prepare_deployment_account_defaults(param_store_cls, cls, sts): 'scm/default_scm_codecommit_account_id', deployment_account_id, ), + call('scm/default-s3-source-bucket-name', 'some_bucket'), call('deployment_maps/allow_empty_target', 'disabled'), call('org/stage', 'none'), call('notification_type', 'email'), @@ -236,7 +237,7 @@ def test_prepare_deployment_account_specific_config(param_store_cls, cls, sts): ) for param_store in parameter_store_list: assert param_store.put_parameter.call_count == ( - 17 if param_store == deploy_param_store else 9 + 18 if param_store == deploy_param_store else 9 ) param_store.put_parameter.assert_has_calls( [ @@ -263,6 +264,7 @@ def test_prepare_deployment_account_specific_config(param_store_cls, cls, sts): 'scm/default_scm_codecommit_account_id', deployment_account_id, ), + call('scm/default-s3-source-bucket-name', 'some_bucket'), call('deployment_maps/allow_empty_target', 'disabled'), call('org/stage', 'test-stage'), call('notification_type', 'slack'), From 64c8df254e719a5a6085c6eb279fa7a89e903b4e Mon Sep 17 00:00:00 2001 From: Dan Xie Date: Mon, 17 Feb 2025 22:16:35 +0800 Subject: [PATCH 2/2] update the docs/providers-guide.md --- docs/providers-guide.md | 27 +++++++++++++++------------ 1 file changed, 15 insertions(+), 12 deletions(-) diff --git a/docs/providers-guide.md b/docs/providers-guide.md index db32d5606..862ce1478 100644 --- a/docs/providers-guide.md +++ b/docs/providers-guide.md @@ -125,16 +125,16 @@ Please add the required S3 read permissions to the `adf-codecomit-role` via the the `adf-codecommit-role` S3 read permissions in the bucket policy of the source bucket. -If the `poll_for_changes` proerty set to `False`, ADF will monitor the S3 events +The Source S3 bucket should be manually created in advance. Additionally, the source S3 bucket should enable [Bucket Versioning](https://docs.aws.amazon.com/AmazonS3/latest/userguide/manage-versioning-examples.html) and [Amazon EventBridge](https://docs.aws.amazon.com/AmazonS3/latest/userguide/enable-event-notifications-eventbridge.html), +otherwise, the auto pipeline trigger will not work. + +S3 provder supports property `poll_for_changes`, by default, it set to `True`, however, it is suggested to set to `False`, when it set to `False`, ADF will monitor the S3 events `Object Created` or `Object Copy` for the defined `object_key` of the defined `bucket_name` and trigger the related pipeline. -The source S3 bucket should enable `Bucket Versioning` and `Amazon EventBridge`, -otherwise, the auto pipeline trigger will not work. - ADF supports source S3 bucket in a target account other than default Deployment account. -To make it work, an event bridge policy should be manually added to the default event bus -in the Deployment map. For example: +To make it work, an [event bus resource policy](https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-event-bus-permissions-manage.html) should be manually added to the default event bus +in the Deployment account. For example: ``` { @@ -156,13 +156,16 @@ Provider type: `s3`. #### Properties - *account_id* - *(String)* **(optional)** - - The AWS Account ID where the source S3 Bucket is located. By default, it is Deployment account ID. -- *bucket_name* - *(String)* + - The AWS Account ID where the source S3 Bucket is located. + - If not set here in the provider, the deployment account id will be used as default value. +- *bucket_name* - *(String)* **(optional)** - The Name of the S3 Bucket that will be the source of the pipeline. -- *object_key* - *(String)* + - Additionally, the default source bucket name, can be set in + [adfconfig.yml: config/scm/default-s3-source-bucket-name](./admin-guide.md#adfconfig). +- *object_key* - *(String)* **(optional)** - The Specific Object within the bucket that will trigger the pipeline - execution. -- *trigger_on_changes* - *(Boolean)* default: `True`. + execution. By default, it will use the `pipeline name`.zip. +- *trigger_on_changes* - *(Boolean)* default: `True`. **(optional)** - Whether CodePipeline should release a change and trigger the pipeline if a change was detected in the S3 object. - When set to **False**, you either need to trigger the pipeline manually, @@ -170,7 +173,7 @@ Provider type: `s3`. - **By default**, it will trigger on changes using the polling mechanism of CodePipeline. Monitoring the S3 object so it can trigger a release when an update took place. -- *poll_for_changes* - *(Boolean)* default: `True`. +- *poll_for_changes* - *(Boolean)* default: `True`. **(optional)** - If CodePipeline should poll the repository for changes, defaults to `False` in favor of Amazon EventBridge events. As the name implies, when polling for changes it will check the repository for updates every minute or so.