From cdf08b1c99c1ecdf9fbb690ad8de38be1733d4c6 Mon Sep 17 00:00:00 2001 From: Flynn Bundy Date: Thu, 6 Jun 2019 17:53:39 +0200 Subject: [PATCH] v0.3.3 This release is only intended as a stepping stone release to 1.0.0 for existing deployments. --- .gitignore | 1 + .pylintrc | 3 +- Makefile | 4 +- src/bootstrap_repository/adf-build/config.py | 2 +- src/bootstrap_repository/adf-build/main.py | 3 + .../adf-build/requirements.txt | 3 +- .../deployment/global.yml | 325 ++++++++------- .../deployment/lambda_codebase/__init__.py | 2 + .../lambda_codebase/initial_commit/handler.py | 27 ++ .../initial_commit/initial_commit.py | 258 ++++++++++++ .../pipelines_repository/.gitignore | 130 ++++++ .../pipelines_repository/README.md | 3 + .../adf-build/LICENSE.txt | 14 + .../adf-build/__init__.py | 2 + .../adf-build/deployment_map.py | 98 +++++ .../adf-build/generate_params.py | 191 +++++++++ .../adf-build/generate_pipelines.py | 153 +++++++ .../adf-build/helpers/package_transform.sh | 31 ++ .../adf-build/pipeline.py | 92 +++++ .../adf-build/requirements.txt | 6 + .../adf-build/resolver.py | 92 +++++ .../pipelines_repository/adf-build/target.py | 110 +++++ .../adf-build/tests/__init__.py | 12 + .../adf-build/tests/stubs/__init__.py | 6 + .../tests/stubs/account_name1_eu-west-1.json | 5 + .../tests/stubs/stub_cfn_global.json | 10 + .../tests/stubs/stub_deployment_map.yml | 35 ++ .../adf-build/tests/stubs/stub_target.py | 76 ++++ .../adf-build/tests/test_deployment_map.py | 63 +++ .../adf-build/tests/test_generate_params.py | 98 +++++ .../adf-build/tests/test_pipeline.py | 68 ++++ .../adf-build/tests/test_target.py | 160 ++++++++ .../example-deployment_map.yml | 9 + .../pipeline_types/cc-buildonly.yml.j2 | 214 ++++++++++ .../pipeline_types/cc-cloudformation.yml.j2 | 354 ++++++++++++++++ .../pipeline_types/cc-s3.yml.j2 | 295 ++++++++++++++ .../pipeline_types/cc-service-catalog.yml.j2 | 285 +++++++++++++ .../github-cloudformation.yml.j2 | 380 ++++++++++++++++++ .../pipelines_repository/pytest.ini | 2 + .../initial_commit/requirements.txt | 3 + .../deployment/lambda_codebase/pytest.ini | 2 + .../lambda_codebase/requirements.txt | 0 .../deployment/regional.yml | 26 +- .../example-adfconfig.yml | 31 +- src/bootstrap_repository/global.yml | 108 ++--- src/bootstrap_repository/pytest.ini | 2 +- src/initial/template.yml | 27 +- src/pipelines_repository/adf-build/target.py | 2 +- .../example-deployment_map.yml | 15 +- .../pipeline_types/cc-buildonly.yml.j2 | 70 +--- .../pipeline_types/cc-cloudformation.yml.j2 | 70 +--- .../pipeline_types/cc-s3.yml.j2 | 70 +--- .../pipeline_types/cc-service-catalog.yml.j2 | 70 +--- .../github-cloudformation.yml.j2 | 70 +--- 54 files changed, 3600 insertions(+), 588 deletions(-) create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/handler.py create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/initial_commit.py create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/.gitignore create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/README.md create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/LICENSE.txt create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/__init__.py create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/deployment_map.py create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/generate_params.py create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/generate_pipelines.py create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/helpers/package_transform.sh create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/pipeline.py create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/requirements.txt create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/resolver.py create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/target.py create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/__init__.py create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/stubs/__init__.py create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/stubs/account_name1_eu-west-1.json create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/stubs/stub_cfn_global.json create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/stubs/stub_deployment_map.yml create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/stubs/stub_target.py create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/test_deployment_map.py create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/test_generate_params.py create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/test_pipeline.py create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/test_target.py create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/example-deployment_map.yml create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/pipeline_types/cc-buildonly.yml.j2 create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/pipeline_types/cc-cloudformation.yml.j2 create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/pipeline_types/cc-s3.yml.j2 create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/pipeline_types/cc-service-catalog.yml.j2 create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/pipeline_types/github-cloudformation.yml.j2 create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/pytest.ini create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/initial_commit/requirements.txt create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/pytest.ini create mode 100644 src/bootstrap_repository/deployment/lambda_codebase/requirements.txt diff --git a/.gitignore b/.gitignore index 0182e121c..037f7e772 100644 --- a/.gitignore +++ b/.gitignore @@ -23,6 +23,7 @@ outfile *.so # Distribution / packaging +package.yaml aws_deployment_framework.egg-info .Python develop-eggs/ diff --git a/.pylintrc b/.pylintrc index d9f46f41f..3793c0aeb 100644 --- a/.pylintrc +++ b/.pylintrc @@ -221,7 +221,8 @@ notes=FIXME,XXX [SIMILARITIES] # Minimum lines number of a similarity. -min-similarity-lines=6 +# Temp 500 until we merge initial_commit into shared codebase. +min-similarity-lines=500 # Ignore comments when computing similarities. ignore-comments=yes diff --git a/Makefile b/Makefile index 0501b7901..02dd3692b 100644 --- a/Makefile +++ b/Makefile @@ -5,9 +5,7 @@ test: # Run unit tests pytest src/initial/ -vvv -s -c src/initial/pytest.ini pytest src/bootstrap_repository/ -vvv -s -c src/bootstrap_repository/pytest.ini - pytest src/bootstrap_repository/deployment/lambda_codebase -vvv -s -c src/bootstrap_repository/pytest.ini - pytest src/pipelines_repository/ -vvv -s -c src/pipelines_repository/pytest.ini - + pytest src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository -vvv -s -c src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/pytest.ini lint: # Linter performs static analysis to catch latent bugs find src/ -iname "*.py" | xargs pylint --rcfile .pylintrc diff --git a/src/bootstrap_repository/adf-build/config.py b/src/bootstrap_repository/adf-build/config.py index 5b2ad3a88..9e8bbfb3c 100644 --- a/src/bootstrap_repository/adf-build/config.py +++ b/src/bootstrap_repository/adf-build/config.py @@ -57,7 +57,7 @@ def _validate(self): self.config.get('main-notification-endpoint') ): raise InvalidConfigError( - 'adf_config.yml is missing required properties. ' + 'adfconfig.yml is missing required properties. ' 'Please see the documentation.' ) diff --git a/src/bootstrap_repository/adf-build/main.py b/src/bootstrap_repository/adf-build/main.py index d4f7bd4a0..ca9e3a8d5 100644 --- a/src/bootstrap_repository/adf-build/main.py +++ b/src/bootstrap_repository/adf-build/main.py @@ -116,6 +116,9 @@ def prepare_deployment_account(sts, deployment_account_id, config): deployment_account_parameter_store.put_parameter( 'deployment_account_bucket', DEPLOYMENT_ACCOUNT_S3_BUCKET_NAME ) + deployment_account_parameter_store.put_parameter( + 'adf_version', os.environ["ADF_VERSION"] + ) if '@' not in config.notification_endpoint: config.notification_channel = config.notification_endpoint config.notification_endpoint = "arn:aws:lambda:{0}:{1}:function:SendSlackNotification".format( diff --git a/src/bootstrap_repository/adf-build/requirements.txt b/src/bootstrap_repository/adf-build/requirements.txt index cba7cab75..8586c5cae 100644 --- a/src/bootstrap_repository/adf-build/requirements.txt +++ b/src/bootstrap_repository/adf-build/requirements.txt @@ -5,4 +5,5 @@ pytest==3.0.7 mock==2.0.0 pyyaml>=5.1 astroid==2.1.0 -jinja2>=2.10.1 \ No newline at end of file +jinja2>=2.10.1 +aws-sam-cli==0.16.1 \ No newline at end of file diff --git a/src/bootstrap_repository/deployment/global.yml b/src/bootstrap_repository/deployment/global.yml index f788728ac..6e68877f0 100644 --- a/src/bootstrap_repository/deployment/global.yml +++ b/src/bootstrap_repository/deployment/global.yml @@ -5,6 +5,9 @@ AWSTemplateFormatVersion: '2010-09-09' Transform: 'AWS::Serverless-2016-10-31' Description: ADF CloudFormation Template (Global) for Deployment Account Parameters: + ADFVersion: + Type : 'AWS::SSM::Parameter::Value' + Default: adf_version MasterAccountId: Type : 'AWS::SSM::Parameter::Value' Default: master_account_id @@ -70,34 +73,34 @@ Resources: Principal: AWS: !Sub arn:aws:iam::${AWS::AccountId}:root Action: + - "kms:CancelKeyDeletion" - "kms:Create*" + - "kms:Decrypt" + - "kms:Delete*" - "kms:Describe*" + - "kms:DescribeKey" + - "kms:Disable*" - "kms:Enable*" - "kms:Encrypt" - - "kms:Decrypt" + - "kms:GenerateDataKey*" + - "kms:Get*" - "kms:List*" - "kms:Put*" - - "kms:Update*" - - "kms:Revoke*" - - "kms:Disable*" - "kms:ReEncrypt*" - - "kms:GenerateDataKey*" - - "kms:DescribeKey" - - "kms:Get*" - - "kms:Delete*" + - "kms:Revoke*" - "kms:ScheduleKeyDeletion" - - "kms:CancelKeyDeletion" + - "kms:Update*" Resource: "*" - Sid: Allow use of the key Effect: Allow Principal: AWS: "*" Action: - - kms:Encrypt - kms:Decrypt - - kms:ReEncrypt* - - kms:GenerateDataKey* - kms:DescribeKey + - kms:Encrypt + - kms:GenerateDataKey* + - kms:ReEncrypt* Resource: "*" Condition: StringEquals: @@ -140,21 +143,21 @@ Resources: - Effect: Allow Sid: "S3" Action: - - s3:PutObject - - s3:GetBucketPolicy - s3:Get* + - s3:GetBucketPolicy - s3:List* + - s3:PutObject Resource: - !Join ['',['arn:aws:s3:::',!Ref PipelineBucket, '/*']] - !Join ['',['arn:aws:s3:::',!Ref PipelineBucket]] - Effect: Allow Sid: "KMS" Action: - - kms:Encrypt - kms:Decrypt - - kms:ReEncrypt* - - kms:GenerateDataKey* - kms:DescribeKey + - kms:Encrypt + - kms:GenerateDataKey* + - kms:ReEncrypt* Resource: !GetAtt KMSKey.Arn - Effect: Allow Action: @@ -169,44 +172,44 @@ Resources: aws:PrincipalOrgID: !Ref OrganizationId - Effect: Allow Action: - - "logs:CreateLogGroup" - - "logs:CreateLogStream" - - "logs:PutLogEvents" - - "codepipeline:*" - "cloudformation:*" - - "ecr:*" - - "s3:Get*" - - "s3:List*" - - "s3:Put*" - "codebuild:*" - "codedeploy:*" - - "ssm:GetParameter" - - "ssm:GetParameters" - - "ssm:GetParametersByPath" - - "ssm:PutParameter" - - "ssm:DeleteParameter" - - "sns:*" - - "iam:PassRole" - - "iam:PutRolePolicy" - - "iam:GetRole" - - "iam:CreateRole" + - "codepipeline:*" + - "ecr:*" + - "events:DeleteRule" + - "events:DescribeRule" + - "events:PutRule" + - "events:PutTargets" + - "events:RemoveTargets" - "iam:CreatePolicy" - "iam:CreatePolicy" - - "iam:DeleteRolePolicy" + - "iam:CreateRole" - "iam:DeleteRole" + - "iam:DeleteRolePolicy" + - "iam:GetRole" + - "iam:PassRole" + - "iam:PutRolePolicy" - "lambda:AddPermission" - "lambda:CreateFunction" - - "lambda:GetFunctionConfiguration" - "lambda:DeleteFunction" - "lambda:GetFunction" + - "lambda:GetFunctionConfiguration" - "lambda:RemovePermission" - "lambda:UpdateFunctionCode" - "lambda:UpdateFunctionConfiguration" - - "events:PutRule" - - "events:RemoveTargets" - - "events:DeleteRule" - - "events:DescribeRule" - - "events:PutTargets" + - "logs:CreateLogGroup" + - "logs:CreateLogStream" + - "logs:PutLogEvents" + - "s3:Get*" + - "s3:List*" + - "s3:Put*" + - "sns:*" + - "ssm:DeleteParameter" + - "ssm:GetParameter" + - "ssm:GetParameters" + - "ssm:GetParametersByPath" + - "ssm:PutParameter" Resource: "*" Roles: - !Ref CodeBuildRole @@ -241,19 +244,19 @@ Resources: Sid: "CloudFormation" Action: - cloudformation:* + - iam:PassRole - s3:Get* - - s3:Put* - s3:List* - - iam:PassRole + - s3:Put* Resource: "*" - Effect: Allow Sid: "KMS" Action: - - kms:Encrypt - kms:Decrypt - - kms:ReEncrypt* - - kms:GenerateDataKey* - kms:DescribeKey + - kms:Encrypt + - kms:GenerateDataKey* + - kms:ReEncrypt* Resource: !GetAtt KMSKey.Arn Roles: - !Ref CloudFormationRole @@ -286,53 +289,53 @@ Resources: - Effect: Allow Sid: "CloudFormation" Action: - - "sqs:*" - - "s3:Get*" - - "s3:Put*" - - "s3:List*" - - "s3:Create*" - - "logs:*" - - "ecr:*" + - "apigateway:*" + - "application-autoscaling:*" + - "autoscaling:*" + - "cloudformation:*" + - "cloudtrail:*" - "cloudwatch:*" - - "elasticfilesystem:*" + - "codebuild:*" + - "codecommit:*" + - "codedeploy:*" + - "codepipeline:*" - "cognito-identity:*" + - "config:*" + - "datapipeline:*" + - "dax:*" - "dynamodb:*" + - "ec2:*" + - "ecr:*" + - "elasticbeanstalk:*" + - "elasticfilesystem:*" + - "elasticloadbalancing:*" + - "es:*" - "events:*" - "iam:*" - - "cloudtrail:*" - - "config:*" - - "states:*" + - "iot:*" - "kinesis:*" - - "lambda:*" - - "elasticloadbalancing:*" - - "sns:*" - - "tag:*" - - "kms:List*" - - "kms:Update*" - "kms:Create*" - - "kms:Encrypt" - "kms:Decrypt" - - "kms:ReEncrypt*" - - "kms:GenerateDataKey*" - "kms:DescribeKey" - - "ec2:*" - - "iot:*" - - "xray:*" - - "dax:*" - - "application-autoscaling:*" - - "datapipeline:*" - - "apigateway:*" - - "codepipeline:*" - - "codecommit:*" - - "codedeploy:*" - - "codebuild:*" - - "elasticbeanstalk:*" - - "ssm:GetParameters" - - "ssm:GetParameter" + - "kms:Encrypt" + - "kms:GenerateDataKey*" + - "kms:List*" + - "kms:ReEncrypt*" + - "kms:Update*" + - "lambda:*" + - "logs:*" - "opsworks:*" - - "cloudformation:*" - - "es:*" - - "autoscaling:*" + - "s3:Create*" + - "s3:Get*" + - "s3:List*" + - "s3:Put*" + - "sns:*" + - "sqs:*" + - "ssm:GetParameter" + - "ssm:GetParameters" + - "states:*" + - "tag:*" + - "xray:*" Resource: "*" - Effect: Allow Sid: "KMS" @@ -351,8 +354,8 @@ Resources: - s3:GetObject - s3:ListBucket Resource: - - !Join ['',['arn:aws:s3:::',!Ref PipelineBucket, '/*']] - - !Join ['',['arn:aws:s3:::',!Ref PipelineBucket]] + - !Join ['',['arn:aws:s3:::',!Ref PipelineBucket, '/*']] + - !Join ['',['arn:aws:s3:::',!Ref PipelineBucket]] Roles: - !Ref CloudFormationDeploymentRole CodeBuildPolicy: @@ -393,6 +396,8 @@ Resources: EnvironmentVariables: - Name: PYTHONPATH Value: './adf-build/shared/python' + - Name: ACCOUNT_ID + Value: !Ref AWS::AccountId - Name: MASTER_ACCOUNT_ID Value: !Ref MasterAccountId - Name: S3_BUCKET_NAME @@ -505,8 +510,8 @@ Resources: - Effect: Allow Principal: Service: - - events.amazonaws.com - codecommit.amazonaws.com + - events.amazonaws.com - states.amazonaws.com Action: sns:Publish Resource: "*" @@ -522,9 +527,9 @@ Resources: - Effect: Allow Principal: Service: - - codepipeline.amazonaws.com - - codedeploy.amazonaws.com - cloudformation.amazonaws.com + - codedeploy.amazonaws.com + - codepipeline.amazonaws.com - s3.amazonaws.com Action: - sts:AssumeRole @@ -544,65 +549,65 @@ Resources: - Effect: Allow Sid: "CodePipeline" Action: - - codepipeline:* - - iam:ListRoles - - cloudformation:Describe* - cloudFormation:List* - - codecommit:* - - codedeploy:* - - codebuild:BatchGetBuilds - - codebuild:StartBuild - - cloudformation:CreateStack - - cloudformation:DeleteStack - - cloudformation:DescribeStacks - - cloudformation:UpdateStack - cloudformation:CreateChangeSet + - cloudformation:CreateStack - cloudformation:DeleteChangeSet + - cloudformation:DeleteStack + - cloudformation:Describe* - cloudformation:DescribeChangeSet + - cloudformation:DescribeStacks - cloudformation:ExecuteChangeSet - cloudformation:SetStackPolicy + - cloudformation:UpdateStack - cloudformation:ValidateTemplate + - codebuild:BatchGetBuilds + - codebuild:StartBuild + - codecommit:* + - codedeploy:* + - codepipeline:* + - iam:CreatePolicy + - iam:CreateRole + - iam:DeleteRole + - iam:DeleteRolePolicy + - iam:GetRole + - iam:ListRoles - iam:PassRole + - iam:PutRolePolicy + - lambda:AddPermission + - lambda:CreateFunction + - lambda:DeleteFunction + - lambda:GetFunction + - lambda:GetFunctionConfiguration + - lambda:InvokeFunction + - lambda:ListFunctions + - lambda:RemovePermission + - lambda:UpdateFunctionCode + - lambda:UpdateFunctionConfiguration - s3:GetObjectVersion - s3:GetObjectVersionAcl - s3:GetObjectVersionTagging - s3:GetReplicationConfiguration - s3:ListBucket - - servicecatalog:ListProvisioningArtifacts + - s3:ReplicateDelete + - s3:ReplicateObject + - s3:ReplicateTags - servicecatalog:CreateProvisioningArtifact - - servicecatalog:DescribeProvisioningArtifact - servicecatalog:DeleteProvisioningArtifact + - servicecatalog:DescribeProvisioningArtifact + - servicecatalog:ListProvisioningArtifacts - servicecatalog:UpdateProduct - - s3:ReplicateObject - - s3:ReplicateDelete - - s3:ReplicateTags - - lambda:AddPermission - - lambda:CreateFunction - - lambda:DeleteFunction - - lambda:InvokeFunction - - lambda:RemovePermission - - lambda:UpdateFunctionCode - - lambda:GetFunctionConfiguration - - lambda:GetFunction - - lambda:ListFunctions - - lambda:UpdateFunctionConfiguration - - iam:CreateRole - - iam:CreatePolicy - - iam:GetRole - - iam:DeleteRole - - iam:PutRolePolicy - - iam:DeleteRolePolicy - sns:Publish Resource: - "*" - Effect: Allow Sid: "KMS" Action: - - kms:Encrypt - kms:Decrypt - - kms:ReEncrypt* - - kms:GenerateDataKey* - kms:DescribeKey + - kms:Encrypt + - kms:GenerateDataKey* + - kms:ReEncrypt* Resource: !GetAtt KMSKey.Arn - Action: - sts:AssumeRole @@ -618,10 +623,10 @@ Resources: Sid: "S3" Action: - s3:Get* - - s3:Put* - s3:List* - - s3:ReplicateObject + - s3:Put* - s3:ReplicateDelete + - s3:ReplicateObject - s3:ReplicateTags Resource: - !Sub arn:aws:s3:::${PipelineBucket} @@ -661,7 +666,7 @@ Resources: Environment: Variables: ADF_PIPELINE_PREFIX: !Ref PipelinePrefix - Runtime: python3.6 + Runtime: python3.7 Timeout: 10 EnableCrossAccountAccess: Type: "AWS::Serverless::Function" @@ -678,7 +683,7 @@ Resources: FunctionName: UpdateCrossAccountIAM Handler: enable_cross_account_access.lambda_handler Role: !GetAtt LambdaRole.Arn - Runtime: python3.6 + Runtime: python3.7 Timeout: 900 CheckPipelineStatus: Type: "AWS::Serverless::Function" @@ -694,7 +699,7 @@ Resources: FunctionName: CheckPipelineStatus Handler: update_pipelines.lambda_handler Role: !GetAtt LambdaRole.Arn - Runtime: python3.6 + Runtime: python3.7 Timeout: 120 LambdaRole: Type: "AWS::IAM::Role" @@ -722,27 +727,27 @@ Resources: Resource: "*" - Effect: "Allow" Action: - - "kms:Encrypt" + - "codepipeline:GetPipelineState" + - "codepipeline:ListPipelines" + - "codepipeline:PutJobFailureResult" + - "codepipeline:PutJobSuccessResult" + - "codepipeline:StartPipelineExecution" + - "iam:GetRolePolicy" + - "iam:PutRolePolicy" - "kms:Decrypt" - - "kms:ReEncrypt*" - - "kms:GenerateDataKey*" - "kms:Describe*" - - "kms:List*" + - "kms:Encrypt" + - "kms:GenerateDataKey*" - "kms:Get*" + - "kms:List*" - "kms:PutKeyPolicy" - - "iam:GetRolePolicy" - - "iam:PutRolePolicy" - - "ssm:GetParameters" - - "ssm:GetParameter" + - "kms:ReEncrypt*" - "lambda:GetLayerVersion" - - "codepipeline:PutJobSuccessResult" - - "codepipeline:PutJobFailureResult" - - "codepipeline:StartPipelineExecution" - - "codepipeline:ListPipelines" - - "codepipeline:GetPipelineState" - "s3:Get*" - "s3:Put*" - "sns:Publish" + - "ssm:GetParameter" + - "ssm:GetParameters" Resource: "*" - Effect: "Allow" Action: @@ -773,10 +778,10 @@ Resources: - Effect: "Allow" Principal: Service: - - states.amazonaws.com - events.amazonaws.com - lambda.amazonaws.com - sns.amazonaws.com + - states.amazonaws.com Action: "sts:AssumeRole" Path: "/" Policies: @@ -873,7 +878,39 @@ Resources: } } RoleArn: !GetAtt StatesExecutionRole.Arn + InitialCommit: + Type: Custom::InitialCommit + Properties: + ServiceToken: !GetAtt InitialCommitHandler.Arn + RepositoryArn: !GetAtt CodeCommitRepository.Arn + Version: !Ref ADFVersion + DirectoryName: pipelines_repository + InitialCommitHandler: + Type: AWS::Serverless::Function + Properties: + Handler: handler.lambda_handler + CodeUri: lambda_codebase/initial_commit + Description: "ADF Lambda Function - PipelinesCreateInitialCommitFunction" + Policies: + - Version: "2012-10-17" + Statement: + - Effect: Allow + Action: + - codecommit:CreateBranch + - codecommit:CreateCommit + - codecommit:CreatePullRequest + - codecommit:DeleteBranch + - codecommit:GetBranch + - codecommit:GetDifferences + Resource: !GetAtt CodeCommitRepository.Arn + FunctionName: PipelinesCreateInitialCommitFunction + Runtime: python3.7 + Timeout: 300 Outputs: + ADFVersionNumber: + Value: !Ref ADFVersion + Export: + Name: "ADFVersionNumber" SlackLambdaArn: Value: !GetAtt SendSlackNotification.Arn Export: @@ -916,4 +953,4 @@ Outputs: Description: "The CodePipeline Arn" Value: !GetAtt CodePipelineRole.Arn Export: - Name: "CodePipelineRoleArn" \ No newline at end of file + Name: "CodePipelineRoleArn" diff --git a/src/bootstrap_repository/deployment/lambda_codebase/__init__.py b/src/bootstrap_repository/deployment/lambda_codebase/__init__.py index e69de29bb..c90a9b228 100644 --- a/src/bootstrap_repository/deployment/lambda_codebase/__init__.py +++ b/src/bootstrap_repository/deployment/lambda_codebase/__init__.py @@ -0,0 +1,2 @@ +# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: MIT-0 diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/handler.py b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/handler.py new file mode 100644 index 000000000..5e5e4b058 --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/handler.py @@ -0,0 +1,27 @@ +""" +The Initial Commit Handler that is called when ADF is installed to commit the initial pipelines repository content +""" + +try: + from initial_commit import lambda_handler # pylint: disable=unused-import +except Exception as err: # pylint: disable=broad-except + from urllib.request import Request, urlopen + import json + + def lambda_handler(event, _context, prior_error=err): + response = dict( + LogicalResourceId=event["LogicalResourceId"], + PhysicalResourceId=event.get("PhysicalResourceId", "NOT_YET_CREATED"), + Status="FAILED", + RequestId=event["RequestId"], + StackId=event["StackId"], + Reason=str(prior_error), + ) + urlopen( + Request( + event["ResponseURL"], + data=json.dumps(response).encode(), + headers={"content-type": ""}, + method="PUT", + ) + ) diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/initial_commit.py b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/initial_commit.py new file mode 100644 index 000000000..1d72f42cb --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/initial_commit.py @@ -0,0 +1,258 @@ +""" +The Initial Commit main that is called when ADF is installed to commit the initial pipelines repository content +""" + +from typing import Mapping, Optional, Union, List, Dict, Any, Tuple +from dataclasses import dataclass +from enum import Enum +from pathlib import Path +import os +import boto3 +import jinja2 +from cfn_custom_resource import ( # pylint: disable=unused-import + lambda_handler, + create, + update, + delete, +) + + +PhysicalResourceId = str +Data = Mapping[str, str] + +HERE = Path(__file__).parent +NOT_YET_CREATED = "NOT_YET_CREATED" +CC_CLIENT = boto3.client("codecommit") + + +@dataclass +class CustomResourceProperties: + ServiceToken: str + RepositoryArn: str + DirectoryName: str + Version: str + DeploymentAccountRegion: Optional[str] = None + TargetRegions: Optional[List[str]] = None + NotificationEndpoint: Optional[str] = None + NotificationEndpointType: Optional[str] = None + + def __post_init__(self): + if self.NotificationEndpoint: + self.NotificationEndpointType = ( + "email" + if "@" + in self.NotificationEndpoint # pylint:disable=unsupported-membership-test + else "slack" + ) + + +@dataclass +class Event: + RequestType: str + ServiceToken: str + ResponseURL: str + StackId: str + RequestId: str + ResourceType: str + LogicalResourceId: str + ResourceProperties: CustomResourceProperties + + def __post_init__(self): + self.ResourceProperties = CustomResourceProperties( + **self.ResourceProperties # pylint: disable=not-a-mapping + ) + + +class FileMode(Enum): + EXECUTABLE = "EXECUTABLE" + NORMAL = "NORMAL" + SYMLINK = "SYMLINK" + + +@dataclass +class FileToCommit: + filePath: str + fileMode: FileMode + fileContent: bytes + + def as_dict(self) -> Dict[str, Union[str, bytes]]: + return { + "filePath": self.filePath, + "fileMode": self.fileMode.value, + "fileContent": self.fileContent, + } + +@dataclass +class FileToDelete: + filePath: str + + def as_dict(self) -> Dict[str, Union[str, bytes]]: + return { + "filePath": self.filePath + } + +@dataclass +class CreateEvent(Event): + pass + + +@dataclass +class UpdateEvent(Event): + PhysicalResourceId: str + OldResourceProperties: CustomResourceProperties + + def __post_init__(self): + self.ResourceProperties = CustomResourceProperties( + **self.ResourceProperties # pylint: disable=not-a-mapping + ) + self.OldResourceProperties = CustomResourceProperties( + **self.OldResourceProperties # pylint: disable=not-a-mapping + ) + +@create() +def create_(event: Mapping[str, Any], _context: Any) -> Tuple[PhysicalResourceId, Data]: + create_event = CreateEvent(**event) + repo_name = repo_arn_to_name(create_event.ResourceProperties.RepositoryArn) + directory = create_event.ResourceProperties.DirectoryName + try: + CC_CLIENT.get_branch( + repositoryName=repo_name, + branchName="master", + ) + return event.get("PhysicalResourceId"), {} + except CC_CLIENT.exceptions.BranchDoesNotExistException: + files_to_commit = get_files_to_commit(directory) + if directory == "bootstrap_repository": + adf_config = create_adf_config_file(create_event.ResourceProperties) + files_to_commit.append(adf_config) + commit_response = CC_CLIENT.create_commit( + repositoryName=repo_name, + branchName="master", + authorName="AWS ADF Builders Team", + email="adf-builders@amazon.com", + commitMessage="Initial automated commit", + putFiles=[f.as_dict() for f in files_to_commit] + ) + return commit_response["commitId"], {} + +@update() +def update_(event: Mapping[str, Any], _context: Any) -> Tuple[PhysicalResourceId, Data]: + update_event = UpdateEvent(**event) + directory = update_event.ResourceProperties.DirectoryName + repo_name = repo_arn_to_name(update_event.ResourceProperties.RepositoryArn) + files_to_delete = get_files_to_delete(repo_name) + files_to_commit = get_files_to_commit(directory) + + commit_id = CC_CLIENT.get_branch( + repositoryName=repo_name, + branchName="master", + )["branch"]["commitId"] + CC_CLIENT.create_branch( + repositoryName=repo_name, + branchName=update_event.ResourceProperties.Version, + commitId=commit_id + ) + try: + CC_CLIENT.create_commit( + repositoryName=repo_name, + branchName=update_event.ResourceProperties.Version, + parentCommitId=commit_id, + authorName='ADF Update PR', + email='adf-builders@amazon.com', + commitMessage='ADF {0} Automated Update PR'.format(update_event.ResourceProperties.Version), + putFiles=[f.as_dict() for f in files_to_commit], + deleteFiles=[f.as_dict() for f in files_to_delete] + ) + CC_CLIENT.create_pull_request( + title='ADF {0} Automated Update PR'.format(update_event.ResourceProperties.Version), + description='ADF Version {0} from https://github.com/awslabs/aws-deployment-framework'.format(update_event.ResourceProperties.Version), + targets=[ + { + 'repositoryName': repo_name, + 'sourceReference': update_event.ResourceProperties.Version, + 'destinationReference': 'master' + }, + ] + ) + except (CC_CLIENT.exceptions.FileEntryRequiredException, CC_CLIENT.exceptions.NoChangeException): + print("No changes require commiting") + CC_CLIENT.delete_branch( + repositoryName=repo_name, + branchName=update_event.ResourceProperties.Version + ) + + return event["PhysicalResourceId"], {} + + +@delete() +def delete_(_event, _context): + pass + + +def repo_arn_to_name(repo_arn: str) -> str: + return repo_arn.split(":")[-1] + +def get_files_to_delete(repo_name: str) -> List[FileToDelete]: + differences = CC_CLIENT.get_differences( + repositoryName=repo_name, + afterCommitSpecifier='HEAD' + )['differences'] + + file_paths = [ + Path(file['afterBlob']['path']) + for file in differences + if 'adfconfig.yml' not in file['afterBlob']['path'] + and 'scp.json' not in file['afterBlob']['path'] + and 'global.yml' not in file['afterBlob']['path'] + and 'regional.yml' not in file['afterBlob']['path'] + and 'deployment_map.yml' not in file['afterBlob']['path'] + and '.DS_Store' not in file['afterBlob']['path'] + ] + + # 30: trimming off /var/task/pipeline_repository so we can compare correctly + blobs = [str(filename)[30:] for filename in Path('/var/task/pipeline_repository/').rglob('*')] + + return [ + FileToDelete( + str(entry) + ) + for entry in file_paths + if str(entry) not in blobs + and not entry.is_dir() + ] + +def get_files_to_commit(directoryName: str) -> List[FileToCommit]: + path = HERE / directoryName + return [ + FileToCommit( + str(get_relative_name(entry, directoryName)), + FileMode.NORMAL if not os.access(entry, os.X_OK) else FileMode.EXECUTABLE, + entry.read_bytes(), + ) + for entry in path.glob("**/*") + if not entry.is_dir() + ] + + +def get_relative_name(path: Path, directoryName: str) -> Path: + """ + Search for the last occurance of in and return only the trailing part of + + >>> get_relative_name(Path('/foo/test/bar/test/xyz/abc.py') ,'test') + Path('xyz/abc.py') + """ + index = list(reversed(path.parts)).index(directoryName) + return Path(*path.parts[-index:]) + + +def create_adf_config_file(props: CustomResourceProperties) -> FileToCommit: + template = HERE / "adfconfig.yml.j2" + adf_config = ( + jinja2.Template(template.read_text(), undefined=jinja2.StrictUndefined) + .render(vars(props)) + .encode() + ) + + with open("/tmp/adfconfig.yml", "wb") as f: + f.write(adf_config) + return FileToCommit("adfconfig.yml", FileMode.NORMAL, adf_config) diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/.gitignore b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/.gitignore new file mode 100644 index 000000000..dd5ee5976 --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/.gitignore @@ -0,0 +1,130 @@ +.vscode +.pyc +.zip +.DS_Store +.pylintrc +cleanup.py +config +config.sec +config.bak +policy.json +pipeline.yml +template-sam.yaml +template-deploy.yml +master-deploy.yml +.pytest_cache +pipelines/ + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ \ No newline at end of file diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/README.md b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/README.md new file mode 100644 index 000000000..50019db8e --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/README.md @@ -0,0 +1,3 @@ +## Pipelines Repository + +This repository is where you define pipeline type definitions *(in the pipeline_types)* folder and also where you manage your `deployment_map.yml` file from. \ No newline at end of file diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/LICENSE.txt b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/LICENSE.txt new file mode 100644 index 000000000..f19aaa6d0 --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/LICENSE.txt @@ -0,0 +1,14 @@ +MIT No Attribution + +Permission is hereby granted, free of charge, to any person obtaining a copy of this +software and associated documentation files (the "Software"), to deal in the Software +without restriction, including without limitation the rights to use, copy, modify, +merge, publish, distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/__init__.py b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/__init__.py new file mode 100644 index 000000000..c90a9b228 --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/__init__.py @@ -0,0 +1,2 @@ +# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: MIT-0 diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/deployment_map.py b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/deployment_map.py new file mode 100644 index 000000000..aaf2cd276 --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/deployment_map.py @@ -0,0 +1,98 @@ +# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: MIT-0 + +""" +Module used for working with the Deployment Map (yml) file. +""" + +import os +import yaml +import boto3 + +from cloudformation import CloudFormation +from errors import InvalidDeploymentMapError +from logger import configure_logger +LOGGER = configure_logger(__name__) + + +class DeploymentMap: + def __init__( + self, + parameter_store, + pipeline_name_prefix, + map_path=None + ): + self.map_path = map_path or 'deployment_map.yml' + self.parameter_store = parameter_store + self.map_contents = self._get_deployment_map() + self.pipeline_name_prefix = pipeline_name_prefix + self.account_ou_names = {} + self._validate_deployment_map() + + def update_deployment_parameters(self, pipeline): + for account in pipeline.template_dictionary['targets']: + self.account_ou_names.update( + {item['name']: item['path'] for item in account if item['name'] != 'approval'} + ) + + # TODO Ensure this doesn't grow to reach max parameter store size (4092) + self.parameter_store.put_parameter( + "/deployment/{0}/account_ous".format( + pipeline.name + ), + str(self.account_ou_names) + ) + if pipeline.notification_endpoint: + self.parameter_store.put_parameter( + "/notification_endpoint/{0}".format( + pipeline.name + ), + str(pipeline.notification_endpoint) + ) + + def _get_deployment_map(self): + try: + with open(self.map_path, 'r') as stream: + return yaml.load(stream, Loader=yaml.FullLoader) + except FileNotFoundError: + LOGGER.exception('Cannot Create Deployment Pipelines as there ' + 'is no deployment_map.yml file in the repository. ' + 'If this is your first time using ADF please see read the user guide' + ) + raise + + def _validate_deployment_map(self): + """ + Validates the deployment map contains valid configuration + """ + try: + for pipeline in self.map_contents["pipelines"]: + for target in pipeline.get("targets", []): + if isinstance(target, dict): + # Prescriptive information on the error should be raised + assert target["path"] + except KeyError: + raise InvalidDeploymentMapError( + "Deployment Map target or regions specification is invalid" + ) + + def clean_stale_resources(self, name): + for parameter in self.parameter_store.fetch_parameters_by_path( + '/deployment/{0}/'.format(name)): + LOGGER.warning( + 'Removing Resources for %s', + parameter.get('Name')) + self.parameter_store.delete_parameter(parameter.get('Name')) + self._clean_stale_stacks(name) + + def _clean_stale_stacks(self, name): + cloudformation = CloudFormation( + region=os.environ['AWS_REGION'], + deployment_account_region=os.environ['AWS_REGION'], + role=boto3, + ) + + cloudformation.delete_stack("{0}-{1}".format( + self.pipeline_name_prefix, + name + )) diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/generate_params.py b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/generate_params.py new file mode 100644 index 000000000..30b27e6e9 --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/generate_params.py @@ -0,0 +1,191 @@ +# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: MIT-0 + +"""This file is pulled into CodeBuild containers + and used to build the parameters for cloudformation stacks based on + param files in the params folder +""" + +import json +import os +import ast +import boto3 + +from resolver import Resolver +from logger import configure_logger +from parameter_store import ParameterStore + +LOGGER = configure_logger(__name__) +DEPLOYMENT_ACCOUNT_REGION = os.environ.get("AWS_REGION", 'us-east-1') +PROJECT_NAME = os.environ.get('PROJECT_NAME') + + +class Parameters: + def __init__(self, build_name, parameter_store, directory=None): + self.cwd = directory or os.getcwd() + self._create_params_folder() + self.global_path = "params/global.json" + self.parameter_store = parameter_store + self.build_name = build_name + self.account_ous = ast.literal_eval( + parameter_store.fetch_parameter( + "/deployment/{0}/account_ous".format(self.build_name) + ) + ) + self.regions = ast.literal_eval( + parameter_store.fetch_parameter( + "/deployment/{0}/regions".format(self.build_name) + ) + ) + + def _create_params_folder(self): + try: + return os.mkdir('{0}/params'.format(self.cwd)) + except FileExistsError: + return None + + def create_parameter_files(self): + global_params = self._parse(self.global_path) + for acc, ou in self.account_ous.items(): + for region in self.regions: + for params in ["{0}_{1}.json".format(acc, region)]: + compare_params = self._compare( + self._parse("{0}/params/{1}.json".format(self.cwd, acc)), + self._parse("{0}/params/{1}".format(self.cwd, params)) + ) + + if not str(ou).isnumeric(): + # Compare account_region final to ou_region + compare_params = self._compare( + self._parse("{0}/params/{1}_{2}.json".format(self.cwd, ou, region)), + compare_params + ) + # Compare account_region final to ou + compare_params = self._compare( + self._parse("{0}/params/{1}.json".format(self.cwd, ou)), + compare_params + ) + # Compare account_region final to deployment_account_region + compare_params = self._compare( + self._parse("{0}/params/global_{1}.json".format(self.cwd, region)), + compare_params + ) + # Compare account_region final to global + compare_params = self._compare( + global_params, + compare_params + ) + + if compare_params is not None: + self._update_params(compare_params, params) + + def _parse(self, filename): # pylint: disable=R0201 + """ + Attempt to parse the parameters file and return he default + CloudFormation parameter base object if not found. Returning + Base CloudFormation Parameters here since if the user was using + Any other type (SC, ECS) they would require a parameter file (global.json) + and thus this would not fail. + """ + try: + with open(filename) as file: + return json.load(file) + except FileNotFoundError: + return {'Parameters': {}, 'Tags': {}} + + def _update_params(self, new_params, filename): + """ + Responsible for updating the parameters within the files themself + """ + with open("{0}/params/{1}".format(self.cwd, filename), 'w') as outfile: + json.dump(new_params, outfile) + + def _cfn_param_updater(self, param, comparison_parameters, stage_parameters): + """ + Generic CFN Updater method + """ + resolver = Resolver(self.parameter_store, stage_parameters, comparison_parameters) + + for key, value in comparison_parameters[param].items(): + if str(value).startswith('resolve:'): + if resolver.fetch_parameter_store_value(value, key, param): + continue + if str(value).startswith('import:'): + if resolver.fetch_stack_output(value, key, param): + continue + resolver.update_cfn(key, param) + + for key, value in stage_parameters[param].items(): + if str(value).startswith('resolve:'): + if resolver.fetch_parameter_store_value(value, key, param): + continue + if str(value).startswith('import:'): + if resolver.fetch_stack_output(value, key, param): + continue + + return resolver.__dict__.get('stage_parameters') + + def _compare_cfn(self, comparison_parameters, stage_parameters): + """ + Compares parameter files used for the CloudFormation deployment type + """ + if comparison_parameters.get('Parameters'): + stage_parameters = self._cfn_param_updater( + 'Parameters', comparison_parameters, stage_parameters + ) + if comparison_parameters.get('Tags'): + stage_parameters = self._cfn_param_updater( + 'Tags', comparison_parameters, stage_parameters + ) + + return stage_parameters + + def _sc_param_updater(self, comparison_parameters, stage_parameters): + """ + Compares parameter files used for the Service Catalog deployment type + """ + resolver = Resolver(self.parameter_store, stage_parameters, comparison_parameters) + + for key, value in comparison_parameters.items(): + if str(value).startswith('resolve:'): + if resolver.fetch_parameter_store_value(value, key): + continue + if str(value).startswith('import:'): + if resolver.fetch_stack_output(value, key): + continue + resolver.update_sc(key) + + for key, value in stage_parameters.items(): + if str(value).startswith('resolve:'): + if resolver.fetch_parameter_store_value(value, key): + continue + if str(value).startswith('import:'): + if resolver.fetch_stack_output(value, key): + continue + + return resolver.__dict__.get('stage_parameters') + + def _compare(self, comparison_parameters, stage_parameters): + """ + Determine the type of parameter file that should be compared + (currently only SC/CFN) + """ + if comparison_parameters.get('Parameters') or comparison_parameters.get('Tags'): + return self._compare_cfn(comparison_parameters, stage_parameters) + return self._sc_param_updater(comparison_parameters, stage_parameters) + + +def main(): + parameters = Parameters( + PROJECT_NAME, + ParameterStore( + DEPLOYMENT_ACCOUNT_REGION, + boto3 + ) + ) + parameters.create_parameter_files() + + + +if __name__ == '__main__': + main() diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/generate_pipelines.py b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/generate_pipelines.py new file mode 100644 index 000000000..4c6b74d0d --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/generate_pipelines.py @@ -0,0 +1,153 @@ +# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: MIT-0 + +"""This file is pulled into CodeBuild containers + and used to build the pipeline cloudformation stacks +""" + +import os +import boto3 + +from s3 import S3 +from pipeline import Pipeline +from target import Target, TargetStructure +from logger import configure_logger +from deployment_map import DeploymentMap +from cloudformation import CloudFormation +from organizations import Organizations +from sts import STS +from parameter_store import ParameterStore + +LOGGER = configure_logger(__name__) +DEPLOYMENT_ACCOUNT_REGION = os.environ.get("AWS_REGION", 'us-east-1') +DEPLOYMENT_ACCOUNT_ID = os.environ["ACCOUNT_ID"] +MASTER_ACCOUNT_ID = os.environ.get("MASTER_ACCOUNT_ID", 'us-east-1') +S3_BUCKET_NAME = os.environ.get("S3_BUCKET_NAME") +TARGET_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) + +def clean(parameter_store, deployment_map): + """ + Function used to remove stale entries in Parameter Store and + Deployment Pipelines that are no longer in the Deployment Map + """ + current_pipeline_parameters = parameter_store.fetch_parameters_by_path( + '/deployment/') + + for parameter in current_pipeline_parameters: + name = parameter.get('Name').split('/')[-2] + if name not in [p.get('name') + for p in deployment_map.map_contents['pipelines']]: + deployment_map.clean_stale_resources(name) + + +def store_regional_parameter_config(pipeline, parameter_store): + """ + Responsible for storing the region information for specific + pipelines. These regions are defined in the deployment_map + either as top level regions for a pipeline or stage specific regions + """ + if pipeline.top_level_regions: + parameter_store.put_parameter( + "/deployment/{0}/regions".format( + pipeline.name + ), + str(list(set(pipeline.top_level_regions))) + ) + return + + parameter_store.put_parameter( + "/deployment/{0}/regions".format( + pipeline.name + ), + str(list(set(Pipeline.flatten_list(pipeline.stage_regions)))) + ) + +def upload_pipeline(s3, pipeline): + """ + Responsible for uploading the object (global.yml) to S3 + and returning the URL that can be referenced in the CloudFormation + create_stack call. + """ + s3_object_path = s3.put_object( + "pipelines/{0}/global.yml".format( + pipeline.name), "{0}/{1}/{2}/global.yml".format( + TARGET_DIR, + 'pipelines', + pipeline.name + ) + ) + return s3_object_path + + +def main(): #pylint: disable=R0915 + parameter_store = ParameterStore( + DEPLOYMENT_ACCOUNT_REGION, + boto3 + ) + deployment_map = DeploymentMap( + parameter_store, + os.environ["ADF_PIPELINE_PREFIX"] + ) + s3 = S3( + DEPLOYMENT_ACCOUNT_REGION, + S3_BUCKET_NAME + ) + sts = STS() + role = sts.assume_cross_account_role( + 'arn:aws:iam::{0}:role/{1}'.format( + MASTER_ACCOUNT_ID, + parameter_store.fetch_parameter('cross_account_access_role') + ), 'pipeline' + ) + + organizations = Organizations(role) + clean(parameter_store, deployment_map) + + for p in deployment_map.map_contents.get('pipelines'): + pipeline = Pipeline(p) + + for target in p.get('targets', []): + target_structure = TargetStructure(target) + for step in target_structure.target: + for path in step.get('path'): + regions = step.get( + 'regions', p.get( + 'regions', DEPLOYMENT_ACCOUNT_REGION)) + step_name = step.get('name') + pipeline.stage_regions.append(regions) + pipeline_target = Target( + path, regions, target_structure, organizations, step_name) + pipeline_target.fetch_accounts_for_target() + + pipeline.template_dictionary["targets"].append( + target_structure.account_list) + + if DEPLOYMENT_ACCOUNT_REGION not in regions: + pipeline.stage_regions.append(DEPLOYMENT_ACCOUNT_REGION) + + parameters = pipeline.generate_parameters() + pipeline.generate() + deployment_map.update_deployment_parameters(pipeline) + s3_object_path = upload_pipeline(s3, pipeline) + + store_regional_parameter_config(pipeline, parameter_store) + cloudformation = CloudFormation( + region=DEPLOYMENT_ACCOUNT_REGION, + deployment_account_region=DEPLOYMENT_ACCOUNT_REGION, + role=boto3, + template_url=s3_object_path, + parameters=parameters, + wait=True, + stack_name="{0}-{1}".format( + os.environ["ADF_PIPELINE_PREFIX"], + pipeline.name + ), + s3=None, + s3_key_path=None + ) + cloudformation.validate_template() + cloudformation.create_stack() + + +if __name__ == '__main__': + main() diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/helpers/package_transform.sh b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/helpers/package_transform.sh new file mode 100644 index 000000000..88140c817 --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/helpers/package_transform.sh @@ -0,0 +1,31 @@ +#!/bin/bash + +set -e + +# This script will package all source code and send it to an S3 bucket in each region +# where the lambda needs to be deployed to. +# +# PROJECT_NAME is an environment variable that is passed to the CodeBuild Project +# CODEBUILD_SRC_DIR is an environment variable provided by CodeBuild + +pip install --upgrade awscli aws-sam-cli -q + +# Build our template and its potential dependancies +sam build + +# Get list of regions supported by this application +app_regions=`aws ssm get-parameters --names /deployment/$PROJECT_NAME/regions --with-decryption --output=text --query='Parameters[0].Value'` +# Convert json list to bash list (space delimited regions) +regions="`echo $app_regions | sed -e 's/\[\([^]]*\)\]/\1/g' | sed 's/,/ /g' | sed "s/'//g"`" +for region in $regions +do + # Check if the package command actually needs to be run, only needed if there is a Transform + if grep -q Transform: "$CODEBUILD_SRC_DIR/template.yml"; then + ssm_bucket_name="/cross_region/s3_regional_bucket/$region" + bucket=`aws ssm get-parameters --names $ssm_bucket_name --with-decryption --output=text --query='Parameters[0].Value'` + sam package --s3-bucket $bucket --output-template-file $CODEBUILD_SRC_DIR/template_$region.yml + else + # If package is not needed, just copy the file for each region + cp $CODEBUILD_SRC_DIR/template.yml $CODEBUILD_SRC_DIR/template_$region.yml + fi +done diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/pipeline.py b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/pipeline.py new file mode 100644 index 000000000..268eaca86 --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/pipeline.py @@ -0,0 +1,92 @@ +# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: MIT-0 + +""" +Module used for defining a Pipeline object and the +properties associated with a pipeline. +""" + +import os +from jinja2 import Environment, FileSystemLoader + +DEPLOYMENT_ACCOUNT_REGION = os.environ.get("AWS_REGION", 'us-east-1') + +class Pipeline: + def __init__(self, pipeline): + self.name = pipeline.get('name') + self.parameters = pipeline.get('params', []) + self.template_dictionary = {"targets": []} + self.notification_endpoint = self._extract_notification_endpoint() + self.stage_regions = [] + self.top_level_regions = pipeline.get('regions', []) + self.pipeline_type = pipeline.get('type', None) + self.replace_on_failure = pipeline.get('replace_on_failure', '') # Legacy, and will be replaced in 1.0.0 in favour of below 'action' + self.action = pipeline.get('action', '').upper() + self.contains_transform = pipeline.get('contains_transform', '') + + if not isinstance(self.top_level_regions, list): + self.top_level_regions = [self.top_level_regions] + + + def _extract_notification_endpoint(self): + for parameter in self.parameters: + endpoint = parameter.get('NotificationEndpoint') + if endpoint: + return endpoint + return None + + + def generate_parameters(self): + try: + params = [] + # ProjectName should be a hidden param and passed in directly from the + # name of the "pipeline" + params.append({ + 'ParameterKey': str('ProjectName'), + 'ParameterValue': self.name, + }) + for param in self.parameters: + for key, value in param.items(): + params.append({ + 'ParameterKey': str(key), + 'ParameterValue': str(value), + }) + return params + except BaseException: + return [] + + @staticmethod + def flatten_list(k): + result = list() + for i in k: + if isinstance(i, list): + result.extend(Pipeline.flatten_list(i)) + else: + result.append(i) + return sorted(result) + + def _create_pipelines_folder(self): + try: + return os.makedirs("pipelines/{0}".format(self.name)) + except FileExistsError: + return None + + def generate(self): + env = Environment(loader=FileSystemLoader('pipeline_types')) + template = env.get_template('./{0}.yml.j2'.format(self.pipeline_type)) + output_template = template.render( + environments=self.template_dictionary, + name=self.name, + notification_endpoint=self.notification_endpoint, + top_level_regions=sorted(self.flatten_list(list(set(self.top_level_regions)))), + regions=sorted(list(set(self.flatten_list(self.stage_regions)))), + deployment_account_region=DEPLOYMENT_ACCOUNT_REGION, + action=self.action or self.replace_on_failure, + contains_transform=self.contains_transform + ) + + self._create_pipelines_folder() + + output_path = "pipelines/{0}/global.yml".format(self.name) + with open(output_path, 'w') as file_handler: + file_handler.write(output_template) diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/requirements.txt b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/requirements.txt new file mode 100644 index 000000000..fef3e10ce --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/requirements.txt @@ -0,0 +1,6 @@ +# Install libs here that you might want in AWS CodeBuild +pytest==3.0.7 +mock==2.0.0 +boto3==1.9.89 +pyyaml>=5.1 +jinja2>=2.10.1 \ No newline at end of file diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/resolver.py b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/resolver.py new file mode 100644 index 000000000..3dce7fb07 --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/resolver.py @@ -0,0 +1,92 @@ +# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: MIT-0 + +"""This file is pulled into CodeBuild containers + and used to resolve values from Parameter Store and CloudFormation +""" +import os +import boto3 + +from parameter_store import ParameterStore +from cloudformation import CloudFormation +from sts import STS +from logger import configure_logger + +LOGGER = configure_logger(__name__) + +class Resolver: + def __init__(self, parameter_store, stage_parameters, comparison_parameters): + self.parameter_store = parameter_store + self.stage_parameters = stage_parameters + self.comparison_parameters = comparison_parameters + self.sts = STS() + + def fetch_stack_output(self, value, param, key=None): + try: + [_, account_id, region, stack_name, export] = str(value).split(':') + except ValueError: + raise ValueError( + "{0} is not a valid import string." + "syntax should be import:account_id:region:stack_name:export_key".format(str(value)) + ) + + LOGGER.info("Assuming the role %s", 'arn:aws:iam::{0}:role/{1}'.format( + account_id, + 'adf-cloudformation-deployment-role' + )) + role = self.sts.assume_cross_account_role( + 'arn:aws:iam::{0}:role/{1}'.format( + account_id, + 'adf-cloudformation-deployment-role'), + 'importer' + ) + cloudformation = CloudFormation( + region=region, + deployment_account_region=os.environ["AWS_REGION"], + role=role, + stack_name=stack_name + ) + LOGGER.info("Retrieving value of key %s from %s on %s in %s", export, stack_name, account_id, region) + stack_output = cloudformation.get_stack_output(export) + if not stack_output: + raise Exception("No Key was found on {0} with the name {1}".format(stack_name, export)) + + LOGGER.info("Stack output value is %s", stack_output) + if key: + self.stage_parameters[key][param] = stack_output + return + self.stage_parameters[key] = stack_output + + def fetch_parameter_store_value(self, value, key, param=None): + if str(value).count(':') > 1: + [_, region, value] = value.split(':') + regional_client = ParameterStore(region, boto3) + LOGGER.info("Fetching Parameter from %s", value) + if param: + self.stage_parameters[param][key] = regional_client.fetch_parameter( + value + ) + else: + self.stage_parameters[key] = regional_client.fetch_parameter( + value + ) + return True + [_, value] = value.split(':') + LOGGER.info("Fetching Parameter from %s", value) + if param: + self.stage_parameters[param][key] = self.parameter_store.fetch_parameter( + value + ) + else: + self.stage_parameters[key] = self.parameter_store.fetch_parameter( + value + ) + return False + + def update_cfn(self, key, param): + if key not in self.stage_parameters[param]: + self.stage_parameters[param][key] = self.comparison_parameters[param][key] + + def update_sc(self, key): + if key not in self.stage_parameters: + self.stage_parameters[key] = self.comparison_parameters[key] diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/target.py b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/target.py new file mode 100644 index 000000000..01a5c97ed --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/target.py @@ -0,0 +1,110 @@ +# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: MIT-0 + +""" +Module used for working with Targets within the Deployment Map. +Targets are the stages/steps within a Pipeline and can +require mutation depending on their structure. +""" + +import re +from errors import InvalidDeploymentMapError, NoAccountsFoundError + + +class TargetStructure: + def __init__(self, target): + self.target = TargetStructure._define_target_type(target) + self.account_list = [] + + @staticmethod + def _define_target_type(target): + if isinstance(target, list): + output = [] + for t in target: + output.append({"path": [t]}) + target = output + if isinstance(target, (int, str)): + target = [{"path": [target]}] + if isinstance(target, dict): + if not isinstance(target.get('path'), list): + target["path"] = [target.get('path')] + + if not isinstance(target, list): + target = [target] + + return target + + +class Target(): + def __init__(self, path, regions, target_structure, organizations, step_name): # imports, exports on hold + self.path = path + self.step_name = step_name or '' + self.regions = [regions] if not isinstance(regions, list) else regions + self.target_structure = target_structure + self.organizations = organizations + + @staticmethod + def _account_is_active(account): + return bool(account.get('Status') == 'ACTIVE') + + def _create_target_info(self, name, account_id): + return { + "name": re.sub(r'[^A-Za-z0-9.@\-_]+', '', name), + "id": account_id, + "path": self.path, + "regions": self.regions, + "step_name": re.sub(r'[^A-Za-z0-9.@\-_]+', '', self.step_name) + } + + def _target_is_approval(self): + self.target_structure.account_list.append( + self._create_target_info( + 'approval', + 'approval' + ) + ) + + def _create_response_object(self, responses): + _accounts = 0 + for response in responses: + _accounts += 1 + if Target._account_is_active(response): + self.target_structure.account_list.append( + self._create_target_info( + response.get('Name'), + str(response.get('Id')) + ) + ) + if _accounts == 0: + raise NoAccountsFoundError("No Accounts found in {0}".format(self.path)) + + def _target_is_account_id(self): + responses = self.organizations.client.describe_account( + AccountId=str(self.path) + ).get('Account') + self._create_response_object([responses]) + + def _target_is_ou_id(self): + responses = self.organizations.get_accounts_for_parent( + str(self.path) + ) + self._create_response_object(responses) + + def _target_is_ou_path(self): + responses = self.organizations.dir_to_ou(self.path) + self._create_response_object(responses) + + def fetch_accounts_for_target(self): + if self.path == 'approval': + return self._target_is_approval() + + if (str(self.path)).startswith('ou-'): + return self._target_is_ou_id() + + if (str(self.path).isnumeric() and len(str(self.path)) == 12): + return self._target_is_account_id() + + if (str(self.path)).startswith('/'): + return self._target_is_ou_path() + + raise InvalidDeploymentMapError("Unknown defintion for target: {0}".format(self.path)) diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/__init__.py b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/__init__.py new file mode 100644 index 000000000..baad39092 --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/__init__.py @@ -0,0 +1,12 @@ +# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: MIT-0 + +""" +Tests for pipeline_repository +""" + +import sys +import os + +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '../../..'))) +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__)))) diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/stubs/__init__.py b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/stubs/__init__.py new file mode 100644 index 000000000..23085f7b9 --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/stubs/__init__.py @@ -0,0 +1,6 @@ +# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: MIT-0 + +""" +__init__ for stubs +""" diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/stubs/account_name1_eu-west-1.json b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/stubs/account_name1_eu-west-1.json new file mode 100644 index 000000000..d4d74bc20 --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/stubs/account_name1_eu-west-1.json @@ -0,0 +1,5 @@ +{ + "Parameters": { + "CostCenter": "free" + } +} diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/stubs/stub_cfn_global.json b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/stubs/stub_cfn_global.json new file mode 100644 index 000000000..3746288df --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/stubs/stub_cfn_global.json @@ -0,0 +1,10 @@ +{ + "Parameters": { + "CostCenter": "123", + "Environment": "testing" + }, + "Tags" : { + "TagKey" : "123", + "MyKey" : "new_value" + } +} diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/stubs/stub_deployment_map.yml b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/stubs/stub_deployment_map.yml new file mode 100644 index 000000000..d97cbf29d --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/stubs/stub_deployment_map.yml @@ -0,0 +1,35 @@ +pipelines: + - name: sample-vpc + type: cc-cloudformation + params: + - SourceAccountId: 11111233321 + - NotificationEndpoint: my_email@email.com + targets: + - path: /banking/testing + regions: eu-west-1 + - approval + - path: /banking/production + regions: us-west-2 + + - name: example-sc + type: cc-service-catalog + targets: + - path: /banking/testing + regions: eu-west-1 + - path: /banking/production + regions: us-west-2 + + - name: some-thing + type: cc-service-catalog + params: + - SourceAccountId: 11111233321 + - ProductId: prod-some_id + - NotificationEndpoint: my_email@email.com + targets: + - 123456789 + - 013456789101 + + - name: my-build + type: cc-buildonly + params: + - SourceAccountId: 123456789101 diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/stubs/stub_target.py b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/stubs/stub_target.py new file mode 100644 index 000000000..6239a58e1 --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/stubs/stub_target.py @@ -0,0 +1,76 @@ +# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: MIT-0 + +# pylint: skip-file + +""" +Stubs used for testing target.py +""" + +target_is_approval = { + 'name': 'approval', + 'id': 'approval', + 'path': '/thing/path', + 'regions': ['region1', 'region2'], + 'step_name': '' +} + +create_target_info_default = { + 'name': 'account_name', + 'id': 12345678910, + 'path': '/thing/path', + 'regions': ['region1', 'region2'], + 'step_name': '' +} + +create_target_info_regex_applied = { + 'name': 'accountname', + 'id': 12345678910, + 'path': '/thing/path', + 'regions': ['region1', 'region2'], + 'step_name': '' +} + +target_output = { + 'name': 'string', + 'id': 'fake', + 'path': '/thing/path', + 'regions': ['region1', 'region2'], + 'step_name': '' +} + +organizations_describe_account = { + 'Account': { + 'Id': 'fake', + 'Arn': 'fake::arn', + 'Email': 'fake@fake.com', + 'Name': 'string', + 'Status': 'ACTIVE', + 'JoinedMethod': 'INVITED', + 'JoinedTimestamp': 2 + } +} + + +def organizations_dir_to_ou(): + yield { + 'Id': 'fake', + 'Arn': 'fake::arn', + 'Email': 'fake@fake.com', + 'Name': 'string', + 'Status': 'ACTIVE', + 'JoinedMethod': 'INVITED', + 'JoinedTimestamp': 2 + } + + +def organizations_list_accounts_for_parent(): + yield { + 'Id': 'fake', + 'Arn': 'fake::arn', + 'Email': 'fake@fake.com', + 'Name': 'string', + 'Status': 'ACTIVE', + 'JoinedMethod': 'CREATED', + 'JoinedTimestamp': 2 + } \ No newline at end of file diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/test_deployment_map.py b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/test_deployment_map.py new file mode 100644 index 000000000..38b857c4b --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/test_deployment_map.py @@ -0,0 +1,63 @@ +# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: MIT-0 + +# pylint: skip-file + +import os +import boto3 + +from errors import InvalidDeploymentMapError +from pytest import fixture, raises +from mock import Mock +from pipeline import Pipeline +from deployment_map import DeploymentMap + + +@fixture +def cls(): + return DeploymentMap( + parameter_store=None, + pipeline_name_prefix='adf', + map_path='{0}/stubs/stub_deployment_map.yml'.format( + os.path.dirname(os.path.realpath(__file__)) + ) + ) + +def test_validate_deployment_map(cls): + assert cls._validate_deployment_map() == None + +def test_validate_deployment_map_invalid_no_content(cls): + cls.map_contents = {} + with raises(InvalidDeploymentMapError): + cls._validate_deployment_map() + +def test_validate_deployment_leading_zero(cls): + cls._validate_deployment_map() + target_pipeline = [i for i in cls.map_contents['pipelines'] if i.get('name') == 'some-thing'][0]['targets'] + assert '013456789101' in target_pipeline + +def test_validate_deployment_map_path_only(cls): + cls.map_contents = {"pipelines": [{"targets": [{"path": "/something"}]}]} + assert cls._validate_deployment_map() == None + +def test_validate_deployment_map_invalid_paths(cls): + cls.map_contents = {"pipelines": [{"targets": [{"paths": "/something", "regions": 'eu-west-1'}]}]} + with raises(InvalidDeploymentMapError): + cls._validate_deployment_map() + +def test_update_deployment_parameters(cls): + cls.parameter_store = Mock() + cls.parameter_store.put_parameter.return_value = None + + pipeline = Pipeline({ + "name": "pipeline", + "params": [{"key": "value"}], + "targets": [], + "pipeline_type": "some_type" + }) + pipeline.template_dictionary = { + "targets": [[{"name": "some_pipeline", "path": "/fake/path"}]] + } + + cls.update_deployment_parameters(pipeline) + assert cls.account_ou_names['some_pipeline'] == '/fake/path' diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/test_generate_params.py b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/test_generate_params.py new file mode 100644 index 000000000..872826950 --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/test_generate_params.py @@ -0,0 +1,98 @@ +# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: MIT-0 + +# pylint: skip-file + +import shutil +import os +import boto3 +import sys + +from pytest import fixture +from mock import Mock +from generate_params import Parameters + + +@fixture +def cls(): + parameter_store = Mock() + parameter_store.fetch_parameter.return_value = str({}) + parameters = Parameters( + build_name='some_name', + parameter_store=parameter_store, + directory=os.path.abspath(os.path.join(os.path.dirname(__file__), 'stubs')) + ) + parameters.account_ous = {'account_name1': '/banking/testing', 'account_name2': '/banking/production'} + parameters.regions = ['eu-west-1', 'eu-central-1', 'us-west-2'] + yield parameters + shutil.rmtree('{0}/params'.format(parameters.cwd)) + + +def test_valid_build_name(cls): + assert cls.build_name == 'some_name' + + +def test_params_folder_created(cls): + assert os.path.exists('{0}/params'.format(cls.cwd)) + + +def test_parse(cls): + parse = cls._parse( + '{0}/stub_cfn_global.json'.format(cls.cwd) + ) + assert parse == {'Parameters': {'CostCenter': '123', 'Environment': 'testing'}, 'Tags': {'MyKey': 'new_value', 'TagKey': '123'}} + + +def test_parse_not_found(cls): + parse = cls._parse( + '{0}/nothing.json'.format(cls.cwd) + ) + assert parse == {'Parameters': {}, 'Tags': {}} + + +def test_compare_cfn(cls): + parse = cls._parse( + '{0}/stub_cfn_global.json'.format(cls.cwd) + ) + compare = cls._compare_cfn( + parse, + {'Parameters': {}, 'Tags': {}} + ) + assert compare == parse + + +def test_create_parameter_files(cls): + cls.global_path = "{0}/stub_cfn_global.json".format(cls.cwd) + cls.create_parameter_files() + assert os.path.exists("{0}/params/account_name1_eu-west-1.json".format(cls.cwd)) + assert os.path.exists("{0}/params/account_name1_eu-central-1.json".format(cls.cwd)) + assert os.path.exists("{0}/params/account_name1_us-west-2.json".format(cls.cwd)) + assert os.path.exists("{0}/params/account_name2_eu-west-1.json".format(cls.cwd)) + assert os.path.exists("{0}/params/account_name2_eu-central-1.json".format(cls.cwd)) + assert os.path.exists("{0}/params/account_name2_us-west-2.json".format(cls.cwd)) + + +def test_ensure_parameter_default_contents(cls): + cls.global_path = "{0}/stub_cfn_global.json".format(cls.cwd) + cls.create_parameter_files() + + parse = cls._parse( + "{0}/params/account_name1_us-west-2.json".format(cls.cwd) + ) + + assert parse == {'Parameters': {'CostCenter': '123', 'Environment': 'testing'}, 'Tags': {'TagKey': '123', 'MyKey': 'new_value'}} + + +def test_ensure_parameter_specific_contents(cls): + cls.global_path = "{0}/stub_cfn_global.json".format(cls.cwd) + shutil.copy( + "{0}/account_name1_eu-west-1.json".format(cls.cwd), + "{0}/params/account_name1_eu-west-1.json".format(cls.cwd) + ) + cls.create_parameter_files() + + parse = cls._parse( + "{0}/params/account_name1_eu-west-1.json".format(cls.cwd) + ) + + assert parse == {'Parameters': {'CostCenter': 'free', 'Environment': 'testing'}, 'Tags': {'TagKey': '123', 'MyKey': 'new_value'}} diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/test_pipeline.py b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/test_pipeline.py new file mode 100644 index 000000000..622c25a52 --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/test_pipeline.py @@ -0,0 +1,68 @@ +# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: MIT-0 + +# pylint: skip-file + +import os +import sys +import yaml +import boto3 + +from pytest import fixture +from pipeline import Pipeline + + +@fixture +def cls(): + return Pipeline( + pipeline={ + "name": "pipeline", + "params": [{"key": "value"}], + "targets": [], + "type": "cc-cloudformation" + } + ) + + +def test_flatten_list(): + assertions = Pipeline.flatten_list([['val0', 'val1'], ['val2']]) + assert assertions == ['val0', 'val1', 'val2'] + + +def test_pipeline_init_defaults(cls): + assert cls.action == '' + assert cls.notification_endpoint is None + assert cls.contains_transform == '' + + +def test_pipeline_replace_on_failure(): + assertion_pipeline = Pipeline( + pipeline={ + "name": "pipeline", + "params": [{"key": "value"}], + "targets": [], + "type": "cc-cloudformation", + "action": "replace_on_failure" + } + ) + assert assertion_pipeline.action == "REPLACE_ON_FAILURE" + + +def test_pipeline_contains_transform(): + assertion_pipeline = Pipeline( + pipeline={ + "name": "pipeline", + "params": [{"key": "value"}], + "targets": [], + "type": "cc-cloudformation", + "contains_transform": "true" + } + ) + assert assertion_pipeline.contains_transform == "true" + +def test_generate_parameters(cls): + parameters = cls.generate_parameters() + assert parameters == [ + {'ParameterKey': 'ProjectName', 'ParameterValue': 'pipeline'}, + {'ParameterKey': 'key', 'ParameterValue': 'value'} + ] diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/test_target.py b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/test_target.py new file mode 100644 index 000000000..724c8d9bd --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/adf-build/tests/test_target.py @@ -0,0 +1,160 @@ +# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: MIT-0 + +# pylint: skip-file + +import os +import boto3 +from errors import InvalidDeploymentMapError +from pytest import fixture, raises +from mock import Mock, patch +from stubs import stub_target +from target import Target + + +class MockTargetStructure: + def __init__(self): + self.account_list = [] + + +@fixture +def cls(): + cls = Target( + path='/thing/path', + regions=['region1', 'region2'], + target_structure=MockTargetStructure(), + organizations=None, + step_name=None + ) + return cls + + +def test_target_is_approval(cls): + cls.target_structure.account_list.append( + cls._create_target_info( + 'approval', + 'approval' + ) + ) + assert stub_target.target_is_approval in cls.target_structure.account_list + + +def test_account_is_active(): + assert Target._account_is_active({'Status': 'ACTIVE'}) is True + assert Target._account_is_active({'Status': 'FAKE'}) is False + + +def test_create_target_info_default(cls): + assertion = cls._create_target_info('account_name', 12345678910) + assert assertion == stub_target.create_target_info_default + + +def test_create_target_info_regex(cls): + """ + Testing account name Regex with symbol such as + or space + """ + assertion_plus = cls._create_target_info('account+name', 12345678910) + assertion_space = cls._create_target_info('account name', 12345678910) + assert assertion_plus and assertion_space == stub_target.create_target_info_regex_applied + + +def test_target_is_account_id(cls): + cls.organizations = Mock() + cls.organizations.client.describe_account.return_value = stub_target.organizations_describe_account + cls._target_is_account_id() + + assert len(cls.target_structure.account_list) is 1 + assert stub_target.target_output in cls.target_structure.account_list + + +def test_target_is_ou_id(cls): + cls.organizations = Mock() + cls.organizations.get_accounts_for_parent.return_value = stub_target.organizations_list_accounts_for_parent() + cls._target_is_ou_id() + + assert len(cls.target_structure.account_list) is 1 + assert stub_target.target_output in cls.target_structure.account_list + + +def test_target_is_ou_path(cls): + cls.organizations = Mock() + cls.organizations.dir_to_ou.return_value = stub_target.organizations_dir_to_ou() + cls._target_is_ou_path() + + assert stub_target.target_output in cls.target_structure.account_list + assert len(cls.target_structure.account_list) is 1 + + +def test_fetch_accounts_for_target_ou_path(): + cls = Target( + path='/thing/path', + regions=['region1', 'region2'], + target_structure=MockTargetStructure(), + organizations=None, + step_name=None + ) + + with patch.object(cls, '_target_is_ou_path') as mock: + cls.fetch_accounts_for_target() + mock.assert_called_once_with() + + +def test_fetch_accounts_for_target_account_id(): + cls = Target( + path='123456789102', + regions=['region1', 'region2'], + target_structure=MockTargetStructure(), + organizations=None, + step_name=None + ) + with patch.object(cls, '_target_is_account_id') as mock: + cls.fetch_accounts_for_target() + mock.assert_called_once_with() + + +def test_fetch_accounts_for_target_ou_id(): + cls = Target( + path='ou-123fake', + regions=['region1', 'region2'], + target_structure=MockTargetStructure(), + organizations=None, + step_name=None + ) + with patch.object(cls, '_target_is_ou_id') as mock: + cls.fetch_accounts_for_target() + mock.assert_called_once_with() + + +def test_fetch_accounts_for_approval(): + cls = Target( + path='approval', + regions=['region1', 'region2'], + target_structure=MockTargetStructure(), + organizations=None, + step_name=None + ) + with patch.object(cls, '_target_is_approval') as mock: + cls.fetch_accounts_for_target() + mock.assert_called_once_with() + +def test_fetch_account_error(): + cls = Target( + path='some_string', + regions=['region1', 'region2'], + target_structure=MockTargetStructure(), + organizations=Mock(), + step_name=None + ) + with raises(InvalidDeploymentMapError): + cls.fetch_accounts_for_target() + +def test_fetch_account_error_invalid_account_id(): + cls = Target( + path='12345678910', #11 digits rather than 12 (invalid account id) + regions=['region1', 'region2'], + target_structure=MockTargetStructure(), + organizations=Mock(), + step_name=None + ) + with raises(InvalidDeploymentMapError): + cls.fetch_accounts_for_target() \ No newline at end of file diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/example-deployment_map.yml b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/example-deployment_map.yml new file mode 100644 index 000000000..9d40664da --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/example-deployment_map.yml @@ -0,0 +1,9 @@ +pipelines: + - name: sample-pipeline # The name of your pipeline (This will match the name of your repository) + type: cc-cloudformation # The pipeline_type you wish to use for this pipeline + params: + - SourceAccountId: 111111111111 # The source account that will hold the codebase + - NotificationEndpoint: your@email.com # The Notification (user/team/slack) responsible for this pipeline + targets: # Deployment stages + - /business_unit/testing + - /business_unit/production diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/pipeline_types/cc-buildonly.yml.j2 b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/pipeline_types/cc-buildonly.yml.j2 new file mode 100644 index 000000000..06d907748 --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/pipeline_types/cc-buildonly.yml.j2 @@ -0,0 +1,214 @@ +AWSTemplateFormatVersion: '2010-09-09' +Description: ADF CloudFormation Template For CodePipeline - AWS CodeCommit Source and CodeBuild (No Deployment Steps) +Parameters: + ProjectName: + Description: Name of the Project (This is automatically passed in by ADF) + Type: String + ScheduleExpression: + Description: The Schedule expression for this pipeline + Type: String + Default: '' + Image: + Description: The Image for CodeBuild to use + Type: String + Default: "aws/codebuild/python:3.7.1" + NotificationEndpoint: + Description: The Email Address / Slack channel notifications will go to for changes related to this pipeline + Type: String + Default: '' + ComputeType: + Description: The ComputeType for CodeBuild + Type: String + Default: "BUILD_GENERAL1_SMALL" + SourceAccountId: + Description: The ID of the Source Account that will hold the CodeCommit Repo + Type: String + BranchName: + Description: Name of the CodeCommit Branch you will use to trigger the pipeline + Type: String + Default: master + RestartExecutionOnUpdate: + Description: If the pipeline will automatically trigger based on update + Type: String + Default: False + S3Bucket{{ deployment_account_region|replace("-", "") }}: + Description: The S3 Bucket for Cross region deployment for {{ deployment_account_region }} + Type: AWS::SSM::Parameter::Value + Default: /cross_region/s3_regional_bucket/{{ deployment_account_region }} + KMSKey{{ deployment_account_region|replace("-", "") }}: + Description: The KMSKey Arn for Cross region deployment for {{ deployment_account_region }} + Type: AWS::SSM::Parameter::Value + Default: /cross_region/kms_arn/{{ deployment_account_region }} +Conditions: + HasSchedule: !Not [!Equals [!Ref ScheduleExpression, '']] +Resources: + PipelineCloudWatchEventRole: + Condition: HasSchedule + Type: AWS::IAM::Role + Properties: + AssumeRolePolicyDocument: + Version: '2012-10-17' + Statement: + - Effect: Allow + Principal: + Service: + - events.amazonaws.com + Action: "sts:AssumeRole" + Path: / + Policies: + - PolicyName: 'execute-pipeline' + PolicyDocument: + Version: '2012-10-17' + Statement: + - Effect: Allow + Action: codepipeline:StartPipelineExecution + Resource: !Sub "arn:aws:codepipeline:${AWS::Region}:${AWS::AccountId}:${Pipeline}" + CronCloudwatchEventsRule: + Type: "AWS::Events::Rule" + Condition: HasSchedule + Properties: + Description: !Sub Triggers ${AWS::StackName} on a Schedule + ScheduleExpression: !Ref ScheduleExpression + Targets: + - Arn: !GetAtt Pipeline.Arn + RoleArn: !GetAtt PipelineCloudWatchEventRole.Arn + Id: !Sub "adf-cron-${AWS::StackName}" + BuildProject: + Type: AWS::CodeBuild::Project + Properties: + Name: !Sub "adf-build-${ProjectName}" + Description: !Sub "CodeBuild Project ${ProjectName} created by ADF" + EncryptionKey: !ImportValue KMSArn-{{ deployment_account_region }} + ServiceRole: !ImportValue CodeBuildRoleArn + Artifacts: + Type: CODEPIPELINE + Environment: + Type: linuxContainer + ComputeType: !Ref ComputeType + Image: !Ref Image + EnvironmentVariables: + - Name: PYTHONPATH + Value: './adf-build/shared/python' + - Name: PROJECT_NAME + Value: !Ref ProjectName + - Name: S3_BUCKET_NAME + Value: !ImportValue S3Bucket-{{ deployment_account_region }} + - Name: ACCOUNT_ID + Value: !Ref AWS::AccountId + Source: + Type: CODEPIPELINE + TimeoutInMinutes: 20 + Tags: + - Key: Name + Value: !Ref ProjectName +{% if notification_endpoint %} + PipelineEventRule: + Type: "AWS::Events::Rule" + Properties: + Description: "Trigger notifications based on pipeline state changes" + EventPattern: + source: + - "aws.codepipeline" + detail-type: + - "CodePipeline Pipeline Execution State Change" + detail: + state: + - "FAILED" + - "SUCCEEDED" + pipeline: + - !Ref Pipeline + State: "ENABLED" + Targets: + - Arn: !Ref PipelineSNSTopic + Id: !Sub "${AWS::StackName}-pipeline" +{% if "@" in notification_endpoint %} + InputTransformer: + InputTemplate: '"The pipeline from account has at ."' + InputPathsMap: + pipeline: "$.detail.pipeline" + state: "$.detail.state" + at: "$.time" + account: "$.account" +{% endif %} + PipelineSNSTopic: + Type: AWS::SNS::Topic + Properties: + Subscription: +{% if "@" in notification_endpoint %} + - Endpoint: !Ref NotificationEndpoint + Protocol: email +{% else %} + - Endpoint: !ImportValue SendSlackNotificationLambdaArn + Protocol: lambda +{% endif %} + PipelineSNSTopicPolicy: + Type: AWS::SNS::TopicPolicy + Properties: + PolicyDocument: + Id: !Sub "${AWS::StackName}" + Version: '2012-10-17' + Statement: + - Effect: Allow + Principal: + Service: + - events.amazonaws.com + - codecommit.amazonaws.com + - sns.amazonaws.com + Action: sns:Publish + Resource: "*" + Topics: + - !Ref PipelineSNSTopic + LambdaInvokePermission: + Type: AWS::Lambda::Permission + Properties: + Action: lambda:InvokeFunction + Principal: sns.amazonaws.com + SourceArn: !Ref PipelineSNSTopic + FunctionName: 'SendSlackNotification' +{% endif %} + Pipeline: + Type: AWS::CodePipeline::Pipeline + Properties: + RoleArn: !ImportValue CodePipelineRoleArn + Name: !Ref AWS::StackName + RestartExecutionOnUpdate: !Ref RestartExecutionOnUpdate + Stages: + - Name: !Sub Source-${SourceAccountId} + Actions: + - Name: CodeCommit + ActionTypeId: + Category: Source + Owner: AWS + Version: 1 + Provider: CodeCommit + Configuration: + RepositoryName: !Ref ProjectName + BranchName: !Ref BranchName + OutputArtifacts: + - Name: TemplateSource + RunOrder: 1 + RoleArn: !Sub "arn:aws:iam::${SourceAccountId}:role/adf-codecommit-role" #Source Account + - Name: Build + Actions: + - Name: Build + ActionTypeId: + Category: Build + Owner: AWS + Version: 1 + Provider: CodeBuild + Configuration: + ProjectName: !Sub "adf-build-${ProjectName}" + RunOrder: 1 + InputArtifacts: + - Name: TemplateSource + ArtifactStores: + - Region: {{ deployment_account_region }} + ArtifactStore: + EncryptionKey: + Id: !Ref KMSKey{{ deployment_account_region|replace("-", "") }} + Type: KMS + Location: !Ref S3Bucket{{ deployment_account_region|replace("-", "") }} + Type: S3 +Outputs: + PipelineUrl: + Value: !Sub https://console.aws.amazon.com/codepipeline/home?region=${AWS::Region}#/view/${Pipeline} diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/pipeline_types/cc-cloudformation.yml.j2 b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/pipeline_types/cc-cloudformation.yml.j2 new file mode 100644 index 000000000..4fcafcade --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/pipeline_types/cc-cloudformation.yml.j2 @@ -0,0 +1,354 @@ +AWSTemplateFormatVersion: '2010-09-09' +Description: ADF CloudFormation Template For CodePipeline - AWS CodeCommit Source and CloudFormation Deployment Target +Parameters: + ProjectName: + Description: Name of the Project (This is automatically passed in by ADF) + Type: String + StackPrefix: + Description: Prefix to prepend to the stackname when deployed + Type: String + Default: adf + ScheduleExpression: + Description: The Schedule expression for this pipeline + Type: String + Default: '' + Image: + Description: The Image for CodeBuild to use + Type: String + Default: "aws/codebuild/python:3.7.1" + NotificationEndpoint: + Description: The Email Address / Slack channel notifications will go to for changes related to this pipeline + Type: String + Default: '' + ComputeType: + Description: The ComputeType for CodeBuild + Type: String + Default: "BUILD_GENERAL1_SMALL" + SourceAccountId: + Description: The ID of the Source Account that will hold the CodeCommit Repo + Type: String + BranchName: + Description: Name of the CodeCommit Branch you will use to trigger the pipeline + Type: String + Default: master + RestartExecutionOnUpdate: + Description: If the pipeline will automatically trigger based on update + Type: String + Default: False +{% for region in regions %} + S3Bucket{{ region|replace("-", "") }}: + Description: The S3 Bucket for Cross region deployment for {{ region }} + Type: AWS::SSM::Parameter::Value + Default: /cross_region/s3_regional_bucket/{{ region }} + KMSKey{{ region|replace("-", "") }}: + Description: The KMSKey Arn for Cross region deployment for {{ region }} + Type: AWS::SSM::Parameter::Value + Default: /cross_region/kms_arn/{{ region }} +{% endfor %} +Conditions: + HasSchedule: !Not [!Equals [!Ref ScheduleExpression, '']] +Resources: + PipelineCloudWatchEventRole: + Condition: HasSchedule + Type: AWS::IAM::Role + Properties: + AssumeRolePolicyDocument: + Version: '2012-10-17' + Statement: + - Effect: Allow + Principal: + Service: + - events.amazonaws.com + Action: "sts:AssumeRole" + Path: / + Policies: + - PolicyName: 'execute-pipeline' + PolicyDocument: + Version: '2012-10-17' + Statement: + - Effect: Allow + Action: codepipeline:StartPipelineExecution + Resource: !Sub "arn:aws:codepipeline:${AWS::Region}:${AWS::AccountId}:${Pipeline}" + CronCloudwatchEventsRule: + Type: "AWS::Events::Rule" + Condition: HasSchedule + Properties: + Description: !Sub Triggers ${AWS::StackName} on a Schedule + ScheduleExpression: !Ref ScheduleExpression + Targets: + - Arn: !GetAtt Pipeline.Arn + RoleArn: !GetAtt PipelineCloudWatchEventRole.Arn + Id: !Sub "adf-cron-${AWS::StackName}" + BuildProject: + Type: AWS::CodeBuild::Project + Properties: + Name: !Sub "adf-build-${ProjectName}" + Description: !Sub "CodeBuild Project ${ProjectName} created by ADF" + EncryptionKey: !ImportValue KMSArn-{{ deployment_account_region }} + ServiceRole: !ImportValue CodeBuildRoleArn + Artifacts: + Type: CODEPIPELINE + Environment: + Type: linuxContainer + ComputeType: !Ref ComputeType + Image: !Ref Image + EnvironmentVariables: + - Name: PYTHONPATH + Value: './adf-build/shared/python' + - Name: PROJECT_NAME + Value: !Ref ProjectName + - Name: S3_BUCKET_NAME + Value: !ImportValue S3Bucket-{{ deployment_account_region }} + - Name: ACCOUNT_ID + Value: !Ref AWS::AccountId + Source: + Type: CODEPIPELINE + TimeoutInMinutes: 20 + Tags: + - Key: Name + Value: !Ref ProjectName +{% if notification_endpoint %} + PipelineEventRule: + Type: "AWS::Events::Rule" + Properties: + Description: "Trigger notifications based on pipeline state changes" + EventPattern: + source: + - "aws.codepipeline" + detail-type: + - "CodePipeline Pipeline Execution State Change" + detail: + state: + - "FAILED" + - "SUCCEEDED" + pipeline: + - !Ref Pipeline + State: "ENABLED" + Targets: + - Arn: !Ref PipelineSNSTopic + Id: !Sub "${AWS::StackName}-pipeline" +{% if "@" in notification_endpoint %} + InputTransformer: + InputTemplate: '"The pipeline from account has at ."' + InputPathsMap: + pipeline: "$.detail.pipeline" + state: "$.detail.state" + at: "$.time" + account: "$.account" +{% endif %} + PipelineSNSTopic: + Type: AWS::SNS::Topic + Properties: + Subscription: +{% if "@" in notification_endpoint %} + - Endpoint: !Ref NotificationEndpoint + Protocol: email +{% else %} + - Endpoint: !ImportValue SendSlackNotificationLambdaArn + Protocol: lambda +{% endif %} + PipelineSNSTopicPolicy: + Type: AWS::SNS::TopicPolicy + Properties: + PolicyDocument: + Id: !Sub "${AWS::StackName}" + Version: '2012-10-17' + Statement: + - Effect: Allow + Principal: + Service: + - events.amazonaws.com + - codecommit.amazonaws.com + - sns.amazonaws.com + Action: sns:Publish + Resource: "*" + Topics: + - !Ref PipelineSNSTopic + LambdaInvokePermission: + Type: AWS::Lambda::Permission + Properties: + Action: lambda:InvokeFunction + Principal: sns.amazonaws.com + SourceArn: !Ref PipelineSNSTopic + FunctionName: 'SendSlackNotification' +{% endif %} + Pipeline: + Type: AWS::CodePipeline::Pipeline + Properties: + RoleArn: !ImportValue CodePipelineRoleArn + Name: !Ref AWS::StackName + RestartExecutionOnUpdate: !Ref RestartExecutionOnUpdate + Stages: + - Name: !Sub Source-${SourceAccountId} + Actions: + - Name: CodeCommit + ActionTypeId: + Category: Source + Owner: AWS + Version: 1 + Provider: CodeCommit + Configuration: + RepositoryName: !Ref ProjectName + BranchName: !Ref BranchName + OutputArtifacts: + - Name: TemplateSource + RunOrder: 1 + RoleArn: !Sub "arn:aws:iam::${SourceAccountId}:role/adf-codecommit-role" #Source Account + - Name: Build + Actions: + - Name: Build + ActionTypeId: + Category: Build + Owner: AWS + Version: 1 + Provider: CodeBuild + Configuration: + ProjectName: !Sub "adf-build-${ProjectName}" + RunOrder: 1 + InputArtifacts: + - Name: TemplateSource + OutputArtifacts: + - Name: !Sub "${ProjectName}-build" +{% for target in environments['targets'] %} +{% if target|length > 0 and target[0].get('name') == "approval" %} + - Name: {{ target[0].get('step_name') or "approval-stage-" ~ loop.index }} + Actions: + - Name: {{ target[0].get('step_name') or "Approval"}} + ActionTypeId: + Category: Approval + Owner: AWS + Version: 1 + Provider: Manual + Configuration: + {% if notification_endpoint %} + NotificationArn: !Ref PipelineSNSTopic + {% endif %} + CustomData: !Ref AWS::AccountId + RunOrder: 1 +{% else %} + - Name: {{ target[0].get('step_name') or "deployment-stage-" ~ loop.index }} + Actions: +{% for stage in target %} +{% if top_level_regions == [] %} +{% for region in stage.regions %} +{% if action %} + - Name: {{ stage.name }}-{{ region }}-replace + ActionTypeId: + Category: Deploy + Owner: AWS + Version: 1 + Provider: CloudFormation + Configuration: + ActionMode: {{ action }} + StackName: !Sub "${StackPrefix}-${ProjectName}" + ChangeSetName: !Sub "${StackPrefix}-${ProjectName}" +{% if contains_transform %} + TemplatePath: !Sub "${ProjectName}-build::template_{{ region }}.yml" +{% else %} + TemplatePath: !Sub "${ProjectName}-build::template.yml" +{% endif %} + TemplateConfiguration: !Sub "${ProjectName}-build::params/{{ stage.name }}_{{ region }}.json" + Capabilities: CAPABILITY_NAMED_IAM,CAPABILITY_AUTO_EXPAND + RoleArn: "arn:aws:iam::{{ stage.id }}:role/adf-cloudformation-deployment-role" + InputArtifacts: + - Name: !Sub "${ProjectName}-build" + RunOrder: 1 + Region: {{ region }} + RoleArn: "arn:aws:iam::{{ stage.id }}:role/adf-cloudformation-role" +{% else %} + - Name: {{ stage.name }}-{{ region }}-replace + ActionTypeId: + Category: Deploy + Owner: AWS + Version: 1 + Provider: CloudFormation + Configuration: + ActionMode: CHANGE_SET_REPLACE + StackName: !Sub "${StackPrefix}-${ProjectName}" + ChangeSetName: !Sub "${StackPrefix}-${ProjectName}" +{% if contains_transform %} + TemplatePath: !Sub "${ProjectName}-build::template_{{ region }}.yml" +{% else %} + TemplatePath: !Sub "${ProjectName}-build::template.yml" +{% endif %} + TemplateConfiguration: !Sub "${ProjectName}-build::params/{{ stage.name }}_{{ region }}.json" + Capabilities: CAPABILITY_NAMED_IAM + RoleArn: "arn:aws:iam::{{ stage.id }}:role/adf-cloudformation-deployment-role" + InputArtifacts: + - Name: !Sub "${ProjectName}-build" + RunOrder: 1 + Region: {{ region }} + RoleArn: "arn:aws:iam::{{ stage.id }}:role/adf-cloudformation-role" + - Name: {{ stage.name }}-{{ region }}-execute + ActionTypeId: + Category: Deploy + Owner: AWS + Version: 1 + Provider: CloudFormation + Configuration: + ChangeSetName: !Sub "${StackPrefix}-${ProjectName}" + ActionMode: CHANGE_SET_EXECUTE + StackName: !Sub "${StackPrefix}-${ProjectName}" + RoleArn: "arn:aws:iam::{{ stage.id }}:role/adf-cloudformation-deployment-role" + RunOrder: 2 + Region: {{ region }} + RoleArn: "arn:aws:iam::{{ stage.id }}:role/adf-cloudformation-role" +{% endif %} +{% endfor %} +{% else %} +{% for top_level_region in top_level_regions %} + - Name: {{ stage.name }}-{{ top_level_region }}-replace + ActionTypeId: + Category: Deploy + Owner: AWS + Version: 1 + Provider: CloudFormation + Configuration: + ActionMode: CHANGE_SET_REPLACE + StackName: !Sub "${StackPrefix}-${ProjectName}" + ChangeSetName: !Sub "${StackPrefix}-${ProjectName}" +{% if contains_transform %} + TemplatePath: !Sub "${ProjectName}-build::template_{{ region }}.yml" +{% else %} + TemplatePath: !Sub "${ProjectName}-build::template.yml" +{% endif %} + TemplateConfiguration: !Sub "${ProjectName}-build::params/{{ stage.name }}_{{ top_level_region }}.json" + Capabilities: CAPABILITY_NAMED_IAM + RoleArn: "arn:aws:iam::{{ stage.id }}:role/adf-cloudformation-deployment-role" + InputArtifacts: + - Name: !Sub "${ProjectName}-build" + RunOrder: 1 + Region: {{ top_level_region }} + RoleArn: "arn:aws:iam::{{ stage.id }}:role/adf-cloudformation-role" + - Name: {{ stage.name }}-{{ top_level_region }}-execute + ActionTypeId: + Category: Deploy + Owner: AWS + Version: 1 + Provider: CloudFormation + Configuration: + ChangeSetName: !Sub "${StackPrefix}-${ProjectName}" + ActionMode: CHANGE_SET_EXECUTE + StackName: !Sub "${StackPrefix}-${ProjectName}" + RoleArn: "arn:aws:iam::{{ stage.id }}:role/adf-cloudformation-deployment-role" + RunOrder: 2 + Region: {{ top_level_region }} + RoleArn: "arn:aws:iam::{{ stage.id }}:role/adf-cloudformation-role" +{% endfor %} +{% endif %} +{% endfor %} +{% endif %} +{% endfor %} + ArtifactStores: +{% for region in regions %} + - Region: {{ region }} + ArtifactStore: + EncryptionKey: + Id: !Ref KMSKey{{ region|replace("-", "") }} + Type: KMS + Location: !Ref S3Bucket{{ region|replace("-", "") }} + Type: S3 +{% endfor %} +Outputs: + PipelineUrl: + Value: !Sub https://console.aws.amazon.com/codepipeline/home?region=${AWS::Region}#/view/${Pipeline} diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/pipeline_types/cc-s3.yml.j2 b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/pipeline_types/cc-s3.yml.j2 new file mode 100644 index 000000000..b0161fbc2 --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/pipeline_types/cc-s3.yml.j2 @@ -0,0 +1,295 @@ +AWSTemplateFormatVersion: '2010-09-09' +Description: ADF CloudFormation Template For CodePipeline - AWS CodeCommit Source and S3 Deployment Target +Parameters: + ProjectName: + Description: Name of the Project + Type: String + StackPrefix: + Description: Prefix to prepend to the stackname when deployed + Type: String + Default: adf + Image: + Description: The Image for CodeBuild to use + Type: String + Default: "aws/codebuild/python:3.7.1" + ScheduleExpression: + Description: The Schedule expression for this pipeline + Type: String + Default: '' + NotificationEndpoint: + Description: The Email Address / Slack channel notifications will go to for changes related to this pipeline + Type: String + Default: '' + ComputeType: + Description: The ComputeType for CodeBuild + Type: String + Default: "BUILD_GENERAL1_SMALL" + SourceAccountId: + Description: The ID of the Source Account that will hold the CodeCommit Repo + Type: String + BranchName: + Description: Name of the CodeCommit Branch you will use to trigger the pipeline + Type: String + Default: master + RestartExecutionOnUpdate: + Description: If the pipeline will automatically trigger based on update + Type: String + Default: false + TargetBucket: + Description: The Name of the S3 Bucket you wish to deploy to - Since S3 Bucket names are Globally unique you may want use Jinja2 to join the bucket name based on stage name or region in your loops below + Type: String + ObjectKey: + Description: The Object Key you wish to deploy into S3 + Type: String + Extract: + Description: If you want to extract the artifact in S3 (unzip) + Type: String + AllowedValues: [true, false] + Default: false +{% for region in regions %} + S3Bucket{{ region|replace("-", "") }}: + Description: The S3 Bucket for Cross region deployment for {{ region }} + Type: AWS::SSM::Parameter::Value + Default: /cross_region/s3_regional_bucket/{{ region }} + KMSKey{{ region|replace("-", "") }}: + Description: The KMSKey Arn for Cross region deployment for {{ region }} + Type: AWS::SSM::Parameter::Value + Default: /cross_region/kms_arn/{{ region }} +{% endfor %} +Conditions: + HasSchedule: !Not [!Equals [!Ref ScheduleExpression, '']] +Resources: + PipelineCloudWatchEventRole: + Condition: HasSchedule + Type: AWS::IAM::Role + Properties: + AssumeRolePolicyDocument: + Version: '2012-10-17' + Statement: + - Effect: Allow + Principal: + Service: + - events.amazonaws.com + Action: "sts:AssumeRole" + Path: / + Policies: + - PolicyName: 'execute-pipeline' + PolicyDocument: + Version: '2012-10-17' + Statement: + - Effect: Allow + Action: codepipeline:StartPipelineExecution + Resource: !Sub "arn:aws:codepipeline:${AWS::Region}:${AWS::AccountId}:${Pipeline}" + CronCloudwatchEventsRule: + Type: "AWS::Events::Rule" + Condition: HasSchedule + Properties: + Description: !Sub Triggers ${AWS::StackName} on a Schedule + ScheduleExpression: !Ref ScheduleExpression + Targets: + - Arn: !GetAtt Pipeline.Arn + RoleArn: !GetAtt PipelineCloudWatchEventRole.Arn + Id: !Sub "adf-cron-${AWS::StackName}" + BuildProject: + Type: AWS::CodeBuild::Project + Properties: + Name: !Sub "adf-build-${ProjectName}" + Description: !Sub "CodeBuild Project ${ProjectName} created by ADF" + EncryptionKey: !ImportValue KMSArn-{{ deployment_account_region }} + ServiceRole: !ImportValue CodeBuildRoleArn + Artifacts: + Type: CODEPIPELINE + Environment: + Type: linuxContainer + ComputeType: !Ref ComputeType + Image: !Ref Image + EnvironmentVariables: + - Name: PYTHONPATH + Value: './adf-build/shared/python' + - Name: PROJECT_NAME + Value: !Ref ProjectName + - Name: S3_BUCKET_NAME + Value: !ImportValue S3Bucket-{{ deployment_account_region }} + - Name: ACCOUNT_ID + Value: !Ref AWS::AccountId + Source: + Type: CODEPIPELINE + TimeoutInMinutes: 20 + Tags: + - Key: Name + Value: !Ref ProjectName +{% if notification_endpoint %} + PipelineEventRule: + Type: "AWS::Events::Rule" + Properties: + Description: "Trigger notifications based on pipeline state changes" + EventPattern: + source: + - "aws.codepipeline" + detail-type: + - "CodePipeline Pipeline Execution State Change" + detail: + state: + - "FAILED" + - "SUCCEEDED" + pipeline: + - !Ref Pipeline + State: "ENABLED" + Targets: + - Arn: !Ref PipelineSNSTopic + Id: !Sub "${AWS::StackName}-pipeline" +{% if "@" in notification_endpoint %} + InputTransformer: + InputTemplate: '"The pipeline from account has at ."' + InputPathsMap: + pipeline: "$.detail.pipeline" + state: "$.detail.state" + at: "$.time" + account: "$.account" +{% endif %} + PipelineSNSTopic: + Type: AWS::SNS::Topic + Properties: + Subscription: +{% if "@" in notification_endpoint %} + - Endpoint: !Ref NotificationEndpoint + Protocol: email +{% else %} + - Endpoint: !ImportValue SendSlackNotificationLambdaArn + Protocol: lambda +{% endif %} + PipelineSNSTopicPolicy: + Type: AWS::SNS::TopicPolicy + Properties: + PolicyDocument: + Id: !Sub "${AWS::StackName}" + Version: '2012-10-17' + Statement: + - Effect: Allow + Principal: + Service: + - events.amazonaws.com + - codecommit.amazonaws.com + - sns.amazonaws.com + Action: sns:Publish + Resource: "*" + Topics: + - !Ref PipelineSNSTopic + LambdaInvokePermission: + Type: AWS::Lambda::Permission + Properties: + Action: lambda:InvokeFunction + Principal: sns.amazonaws.com + SourceArn: !Ref PipelineSNSTopic + FunctionName: 'SendSlackNotification' +{% endif %} + Pipeline: + Type: AWS::CodePipeline::Pipeline + Properties: + RoleArn: !ImportValue CodePipelineRoleArn + Name: !Ref AWS::StackName + RestartExecutionOnUpdate: !Ref RestartExecutionOnUpdate + Stages: + - Name: !Sub Source-${SourceAccountId} + Actions: + - Name: CodeCommit + ActionTypeId: + Category: Source + Owner: AWS + Version: 1 + Provider: CodeCommit + Configuration: + RepositoryName: !Ref ProjectName + BranchName: !Ref BranchName + OutputArtifacts: + - Name: TemplateSource + RunOrder: 1 + RoleArn: !Sub "arn:aws:iam::${SourceAccountId}:role/adf-codecommit-role" #Source Account + - Name: Build + Actions: + - Name: Build + ActionTypeId: + Category: Build + Owner: AWS + Version: 1 + Provider: CodeBuild + Configuration: + ProjectName: !Sub "adf-build-${ProjectName}" + RunOrder: 1 + InputArtifacts: + - Name: TemplateSource + OutputArtifacts: + - Name: !Sub "${ProjectName}-build" +{% for target in environments['targets'] %} +{% if target|length > 0 and target[0].get('name') == "approval" %} + - Name: {{ target[0].get('step_name') or "approval-stage-" ~ loop.index }} + Actions: + - Name: {{ target[0].get('step_name') or "Approval"}} + ActionTypeId: + Category: Approval + Owner: AWS + Version: 1 + Provider: Manual + Configuration: + {% if notification_endpoint %} + NotificationArn: !Ref PipelineSNSTopic + {% endif %} + CustomData: !Ref AWS::AccountId + RunOrder: 1 +{% else %} + - Name: {{ target[0].get('step_name') or "deployment-stage-" ~ loop.index }} + Actions: +{% for stage in target %} +{% if top_level_regions == [] %} +{% for region in stage.regions %} + - Name: {{ stage.name }}-{{ region }} + ActionTypeId: + Category: Deploy + Owner: AWS + Provider: S3 + Version: '1' + RunOrder: 1 + Configuration: + BucketName: !Ref TargetBucket + Extract: !Ref Extract + ObjectKey: !Ref ObjectKey + InputArtifacts: + - Name: !Sub "${ProjectName}-build" + Region: {{ region }} + RoleArn: "arn:aws:iam::{{ stage.id }}:role/adf-cloudformation-role" +{% endfor %} +{% else %} +{% for top_level_region in top_level_regions %} + - Name: {{ stage.name }}-{{ region }} + ActionTypeId: + Category: Deploy + Owner: AWS + Provider: S3 + Version: '1' + RunOrder: 1 + Configuration: + BucketName: !Ref TargetBucket + Extract: !Ref Extract + ObjectKey: !Ref ObjectKey + InputArtifacts: + - Name: "${ProjectName}-build" + Region: {{ top_level_region }} + RoleArn: "arn:aws:iam::{{ stage.id }}:role/adf-cloudformation-role" +{% endfor %} +{% endif %} +{% endfor %} +{% endif %} +{% endfor %} + ArtifactStores: +{% for region in regions %} + - Region: {{ region }} + ArtifactStore: + EncryptionKey: + Id: !Ref KMSKey{{ region|replace("-", "") }} + Type: KMS + Location: !Ref S3Bucket{{ region|replace("-", "") }} + Type: S3 +{% endfor %} +Outputs: + PipelineUrl: + Value: !Sub https://console.aws.amazon.com/codepipeline/home?region=${AWS::Region}#/view/${Pipeline} \ No newline at end of file diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/pipeline_types/cc-service-catalog.yml.j2 b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/pipeline_types/cc-service-catalog.yml.j2 new file mode 100644 index 000000000..f16fa39a1 --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/pipeline_types/cc-service-catalog.yml.j2 @@ -0,0 +1,285 @@ +AWSTemplateFormatVersion: '2010-09-09' +Description: ADF CloudFormation Template For CodePipeline - AWS CodeCommit Source and Service Catalog Target +Parameters: + ProjectName: + Description: Name of the Project (This is automatically passed in by ADF) + Type: String + StackPrefix: + Description: Prefix to prepend to the stackname when deployed + Type: String + Default: adf + Image: + Description: The Image for CodeBuild to use + Type: String + Default: "aws/codebuild/python:3.7.1" + ScheduleExpression: + Description: The Schedule expression for this pipeline + Type: String + Default: '' + NotificationEndpoint: + Description: The Email Address / Slack channel notifications will go to for changes related to this pipeline + Type: String + Default: '' + ComputeType: + Description: The ComputeType for CodeBuild + Type: String + Default: "BUILD_GENERAL1_SMALL" + ProductId: + Description: The Product Id + Type: String + SourceAccountId: + Description: The ID of the Source Account that will hold the CodeCommit Repo + Type: String + BranchName: + Description: Name of the CodeCommit Branch you will use to trigger the pipeline + Type: String + Default: master + RestartExecutionOnUpdate: + Description: If the pipeline will automatically trigger based on update + Type: String + Default: False +{% for region in regions %} + S3Bucket{{ region|replace("-", "") }}: + Description: The S3 Bucket for Cross region deployment for {{ region }} + Type: AWS::SSM::Parameter::Value + Default: /cross_region/s3_regional_bucket/{{ region }} + KMSKey{{ region|replace("-", "") }}: + Description: The KMSKey Arn for Cross region deployment for {{ region }} + Type: AWS::SSM::Parameter::Value + Default: /cross_region/kms_arn/{{ region }} +{% endfor %} +Conditions: + HasSchedule: !Not [!Equals [!Ref ScheduleExpression, '']] +Resources: + PipelineCloudWatchEventRole: + Condition: HasSchedule + Type: AWS::IAM::Role + Properties: + AssumeRolePolicyDocument: + Version: '2012-10-17' + Statement: + - Effect: Allow + Principal: + Service: + - events.amazonaws.com + Action: "sts:AssumeRole" + Path: / + Policies: + - PolicyName: 'execute-pipeline' + PolicyDocument: + Version: '2012-10-17' + Statement: + - Effect: Allow + Action: codepipeline:StartPipelineExecution + Resource: !Sub "arn:aws:codepipeline:${AWS::Region}:${AWS::AccountId}:${Pipeline}" + CronCloudwatchEventsRule: + Type: "AWS::Events::Rule" + Condition: HasSchedule + Properties: + Description: !Sub Triggers ${AWS::StackName} on a Schedule + ScheduleExpression: !Ref ScheduleExpression + Targets: + - Arn: !GetAtt Pipeline.Arn + RoleArn: !GetAtt PipelineCloudWatchEventRole.Arn + Id: !Sub "adf-cron-${AWS::StackName}" + BuildProject: + Type: AWS::CodeBuild::Project + Properties: + Name: !Sub "adf-build-${ProjectName}" + Description: !Sub "CodeBuild Project ${ProjectName} created by ADF" + EncryptionKey: !ImportValue KMSArn-{{ deployment_account_region }} + ServiceRole: !ImportValue CodeBuildRoleArn + Artifacts: + Type: CODEPIPELINE + Environment: + Type: linuxContainer + ComputeType: !Ref ComputeType + Image: !Ref Image + EnvironmentVariables: + - Name: PYTHONPATH + Value: './adf-build/shared/python' + - Name: PROJECT_NAME + Value: !Ref ProjectName + - Name: S3_BUCKET_NAME + Value: !ImportValue S3Bucket-{{ deployment_account_region }} + - Name: ACCOUNT_ID + Value: !Ref AWS::AccountId + Source: + Type: CODEPIPELINE + TimeoutInMinutes: 20 + Tags: + - Key: Name + Value: !Ref ProjectName +{% if notification_endpoint %} + PipelineEventRule: + Type: "AWS::Events::Rule" + Properties: + Description: "Trigger notifications based on pipeline state changes" + EventPattern: + source: + - "aws.codepipeline" + detail-type: + - "CodePipeline Pipeline Execution State Change" + detail: + state: + - "FAILED" + - "SUCCEEDED" + pipeline: + - !Ref Pipeline + State: "ENABLED" + Targets: + - Arn: !Ref PipelineSNSTopic + Id: !Sub "${AWS::StackName}-pipeline" +{% if "@" in notification_endpoint %} + InputTransformer: + InputTemplate: '"The pipeline from account has at ."' + InputPathsMap: + pipeline: "$.detail.pipeline" + state: "$.detail.state" + at: "$.time" + account: "$.account" +{% endif %} + PipelineSNSTopic: + Type: AWS::SNS::Topic + Properties: + Subscription: +{% if "@" in notification_endpoint %} + - Endpoint: !Ref NotificationEndpoint + Protocol: email +{% else %} + - Endpoint: !ImportValue SendSlackNotificationLambdaArn + Protocol: lambda +{% endif %} + PipelineSNSTopicPolicy: + Type: AWS::SNS::TopicPolicy + Properties: + PolicyDocument: + Id: !Sub "${AWS::StackName}" + Version: '2012-10-17' + Statement: + - Effect: Allow + Principal: + Service: + - events.amazonaws.com + - codecommit.amazonaws.com + - sns.amazonaws.com + Action: sns:Publish + Resource: "*" + Topics: + - !Ref PipelineSNSTopic + LambdaInvokePermission: + Type: AWS::Lambda::Permission + Properties: + Action: lambda:InvokeFunction + Principal: sns.amazonaws.com + SourceArn: !Ref PipelineSNSTopic + FunctionName: 'SendSlackNotification' +{% endif %} + Pipeline: + Type: AWS::CodePipeline::Pipeline + Properties: + RoleArn: !ImportValue CodePipelineRoleArn + Name: !Ref AWS::StackName + RestartExecutionOnUpdate: !Ref RestartExecutionOnUpdate + Stages: + - Name: !Sub Source-${SourceAccountId} + Actions: + - Name: CodeCommit + ActionTypeId: + Category: Source + Owner: AWS + Version: 1 + Provider: CodeCommit + Configuration: + RepositoryName: !Ref ProjectName + BranchName: !Ref BranchName + OutputArtifacts: + - Name: TemplateSource + RunOrder: 1 + RoleArn: !Sub "arn:aws:iam::${SourceAccountId}:role/adf-codecommit-role" #Source Account + - Name: Build + Actions: + - Name: Build + ActionTypeId: + Category: Build + Owner: AWS + Version: 1 + Provider: CodeBuild + Configuration: + ProjectName: !Sub "adf-build-${ProjectName}" + RunOrder: 1 + InputArtifacts: + - Name: TemplateSource + OutputArtifacts: + - Name: !Sub "${ProjectName}-build" +{% for target in environments['targets'] %} +{% if target|length > 0 and target[0].get('name') == "approval" %} + - Name: {{ target[0].get('step_name') or "approval-stage-" ~ loop.index }} + Actions: + - Name: {{ target[0].get('step_name') or "Approval"}} + ActionTypeId: + Category: Approval + Owner: AWS + Version: 1 + Provider: Manual + Configuration: + {% if notification_endpoint %} + NotificationArn: !Ref PipelineSNSTopic + {% endif %} + CustomData: !Ref AWS::AccountId + RunOrder: 1 +{% else %} + - Name: {{ target[0].get('step_name') or "deployment-stage-" ~ loop.index }} + Actions: +{% for stage in target %} +{% if top_level_regions == [] %} +{% for region in stage.regions %} + - Name: {{ stage.name }}-{{ region }} + ActionTypeId: + Category: Deploy + Owner: AWS + Provider: ServiceCatalog + Version: '1' + RunOrder: 1 + Configuration: + ConfigurationFilePath: "params/{{ stage.name }}_{{ region }}.json" + ProductId: !Ref ProductId + InputArtifacts: + - Name: !Sub "${ProjectName}-build" + Region: {{ region }} + RoleArn: "arn:aws:iam::{{ stage.id }}:role/adf-cloudformation-role" +{% endfor %} +{% else %} +{% for top_level_region in top_level_regions %} + - Name: {{ stage.name }}-{{ top_level_region }} + ActionTypeId: + Category: Deploy + Owner: AWS + Provider: ServiceCatalog + Version: '1' + RunOrder: 1 + Configuration: + ConfigurationFilePath: "params/{{ stage.name }}_{{ top_level_region }}.json" + ProductId: !Ref ProductId + InputArtifacts: + - Name: !Sub "${ProjectName}-build" + Region: {{ top_level_region }} + RoleArn: "arn:aws:iam::{{ stage.id }}:role/adf-cloudformation-role" +{% endfor %} +{% endif %} +{% endfor %} +{% endif %} +{% endfor %} + ArtifactStores: +{% for region in regions %} + - Region: {{ region }} + ArtifactStore: + EncryptionKey: + Id: !Ref KMSKey{{ region|replace("-", "") }} + Type: KMS + Location: !Ref S3Bucket{{ region|replace("-", "") }} + Type: S3 +{% endfor %} +Outputs: + PipelineUrl: + Value: !Sub https://console.aws.amazon.com/codepipeline/home?region=${AWS::Region}#/view/${Pipeline} \ No newline at end of file diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/pipeline_types/github-cloudformation.yml.j2 b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/pipeline_types/github-cloudformation.yml.j2 new file mode 100644 index 000000000..23198e435 --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/pipeline_types/github-cloudformation.yml.j2 @@ -0,0 +1,380 @@ +AWSTemplateFormatVersion: '2010-09-09' +Description: ADF CloudFormation Template For CodePipeline - Github Source and CloudFormation Deployment Target +Parameters: + ProjectName: + Description: Name of the Project + Type: String + Image: + Description: The Image for CodeBuild to use + Type: String + Default: "aws/codebuild/python:3.7.1" + StackPrefix: + Description: Prefix to prepend to the stackname when deployed + Type: String + Default: adf + ScheduleExpression: + Description: The Schedule expression for this pipeline + Type: String + Default: '' + NotificationEndpoint: + Description: The Email Address / Slack channel notifications will go to for changes related to this pipeline + Type: String + Default: '' + ComputeType: + Description: The ComputeType for CodeBuild + Type: String + Default: "BUILD_GENERAL1_SMALL" + WebhookSecret: + Description: Webhook Secret from Github + Type: AWS::SSM::Parameter::Value + NoEcho: true + Default: /tokens/webhook/github + OAuthToken: + Description: OAuthToken from Github + Type: AWS::SSM::Parameter::Value + NoEcho: true + Default: /tokens/oauth/github + Owner: + Description: The owner of the github Repo + Type: String + BranchName: + Description: Branch to trigger the pipeline from + Type: String + Default: "master" + RestartExecutionOnUpdate: + Description: If the pipeline will automatically trigger based on update + Type: String + Default: False +{% for region in regions %} + S3Bucket{{ region|replace("-", "") }}: + Description: The S3 Bucket for Cross region deployment for {{ region }} + Type: AWS::SSM::Parameter::Value + Default: /cross_region/s3_regional_bucket/{{ region }} + KMSKey{{ region|replace("-", "") }}: + Description: The KMSKey Arn for Cross region deployment for {{ region }} + Type: AWS::SSM::Parameter::Value + Default: /cross_region/kms_arn/{{ region }} +{% endfor %} +Conditions: + HasSchedule: !Not [!Equals [!Ref ScheduleExpression, '']] +Resources: + PipelineCloudWatchEventRole: + Condition: HasSchedule + Type: AWS::IAM::Role + Properties: + AssumeRolePolicyDocument: + Version: '2012-10-17' + Statement: + - Effect: Allow + Principal: + Service: + - events.amazonaws.com + Action: "sts:AssumeRole" + Path: / + Policies: + - PolicyName: 'execute-pipeline' + PolicyDocument: + Version: '2012-10-17' + Statement: + - Effect: Allow + Action: codepipeline:StartPipelineExecution + Resource: !Sub "arn:aws:codepipeline:${AWS::Region}:${AWS::AccountId}:${Pipeline}" + CronCloudwatchEventsRule: + Type: "AWS::Events::Rule" + Condition: HasSchedule + Properties: + Description: !Sub Triggers ${AWS::StackName} on a Schedule + ScheduleExpression: !Ref ScheduleExpression + Targets: + - Arn: !GetAtt Pipeline.Arn + RoleArn: !GetAtt PipelineCloudWatchEventRole.Arn + Id: !Sub "adf-cron-${AWS::StackName}" + Webhook: + Type: 'AWS::CodePipeline::Webhook' + Properties: + AuthenticationConfiguration: + SecretToken: !Ref WebhookSecret + Filters: + - JsonPath: "$.ref" + MatchEquals: !Sub "refs/heads/${BranchName}" + Authentication: GITHUB_HMAC + TargetPipeline: !Ref Pipeline + TargetAction: Source + Name: !Sub "adf-webhook-${ProjectName}" + TargetPipelineVersion: !GetAtt Pipeline.Version + RegisterWithThirdParty: 'true' + BuildProject: + Type: AWS::CodeBuild::Project + Properties: + Name: !Sub "adf-build-${ProjectName}" + Description: !Sub "CodeBuild Project ${ProjectName} created by ADF" + EncryptionKey: !ImportValue KMSArn-{{ deployment_account_region }} + ServiceRole: !ImportValue CodeBuildRoleArn + Artifacts: + Type: CODEPIPELINE + Environment: + Type: linuxContainer + ComputeType: !Ref ComputeType + Image: !Ref Image + EnvironmentVariables: + - Name: PYTHONPATH + Value: './adf-build/shared/python' + - Name: PROJECT_NAME + Value: !Ref ProjectName + - Name: S3_BUCKET_NAME + Value: !ImportValue S3Bucket-{{ deployment_account_region }} + - Name: ACCOUNT_ID + Value: !Ref AWS::AccountId + Source: + Type: CODEPIPELINE + TimeoutInMinutes: 20 + Tags: + - Key: Name + Value: !Ref ProjectName +{% if notification_endpoint %} + PipelineEventRule: + Type: "AWS::Events::Rule" + Properties: + Description: "Trigger notifications based on pipeline state changes" + EventPattern: + source: + - "aws.codepipeline" + detail-type: + - "CodePipeline Pipeline Execution State Change" + detail: + state: + - "FAILED" + - "SUCCEEDED" + pipeline: + - !Ref Pipeline + State: "ENABLED" + Targets: + - Arn: !Ref PipelineSNSTopic + Id: !Sub "${AWS::StackName}-pipeline" +{% if "@" in notification_endpoint %} + InputTransformer: + InputTemplate: '"The pipeline from account has at ."' + InputPathsMap: + pipeline: "$.detail.pipeline" + state: "$.detail.state" + at: "$.time" + account: "$.account" +{% endif %} + PipelineSNSTopic: + Type: AWS::SNS::Topic + Properties: + Subscription: +{% if "@" in notification_endpoint %} + - Endpoint: !Ref NotificationEndpoint + Protocol: email +{% else %} + - Endpoint: !ImportValue SendSlackNotificationLambdaArn + Protocol: lambda +{% endif %} + PipelineSNSTopicPolicy: + Type: AWS::SNS::TopicPolicy + Properties: + PolicyDocument: + Id: !Sub "${AWS::StackName}" + Version: '2012-10-17' + Statement: + - Effect: Allow + Principal: + Service: + - events.amazonaws.com + - codecommit.amazonaws.com + - sns.amazonaws.com + Action: sns:Publish + Resource: "*" + Topics: + - !Ref PipelineSNSTopic + LambdaInvokePermission: + Type: AWS::Lambda::Permission + Properties: + Action: lambda:InvokeFunction + Principal: sns.amazonaws.com + SourceArn: !Ref PipelineSNSTopic + FunctionName: 'SendSlackNotification' +{% endif %} + Pipeline: + Type: AWS::CodePipeline::Pipeline + Properties: + RoleArn: !ImportValue CodePipelineRoleArn + Name: !Ref AWS::StackName + RestartExecutionOnUpdate: !Ref RestartExecutionOnUpdate + Stages: + - Name: Source-GitHub + Actions: + - Name: Source + ActionTypeId: + Category: Source + Owner: ThirdParty + Version: 1 + Provider: GitHub + Configuration: + Owner: !Ref Owner + Repo: !Ref ProjectName + Branch: !Ref BranchName + OAuthToken: !Ref OAuthToken + PollForSourceChanges: false + OutputArtifacts: + - Name: TemplateSource + RunOrder: 1 + - Name: Build + Actions: + - Name: Build + ActionTypeId: + Category: Build + Owner: AWS + Version: 1 + Provider: CodeBuild + Configuration: + ProjectName: !Sub "adf-build-${ProjectName}" + RunOrder: 1 + InputArtifacts: + - Name: TemplateSource + OutputArtifacts: + - Name: !Sub "${ProjectName}-build" +{% for target in environments['targets'] %} +{% if target|length > 0 and target[0].get('name') == "approval" %} + - Name: {{ target[0].get('step_name') or "approval-stage-" ~ loop.index }} + Actions: + - Name: {{ target[0].get('step_name') or "Approval"}} + ActionTypeId: + Category: Approval + Owner: AWS + Version: 1 + Provider: Manual + Configuration: + {% if notification_endpoint %} + NotificationArn: !Ref PipelineSNSTopic + {% endif %} + CustomData: !Ref AWS::AccountId + RunOrder: 1 +{% else %} + - Name: {{ target[0].get('step_name') or "deployment-stage-" ~ loop.index }} + Actions: +{% for stage in target %} +{% if top_level_regions == [] %} +{% for region in stage.regions %} +{% if action %} + - Name: {{ stage.name }}-{{ region }} + ActionTypeId: + Category: Deploy + Owner: AWS + Version: 1 + Provider: CloudFormation + Configuration: + ActionMode: {{ action }} + StackName: !Sub "${StackPrefix}-${ProjectName}" + ChangeSetName: !Sub "${StackPrefix}-${ProjectName}" +{% if contains_transform %} + TemplatePath: !Sub "${ProjectName}-build::template_{{ region }}.yml" +{% else %} + TemplatePath: !Sub "${ProjectName}-build::template.yml" +{% endif %} + TemplateConfiguration: !Sub "${ProjectName}-build::params/{{ stage.name }}_{{ region }}.json" + Capabilities: CAPABILITY_NAMED_IAM,CAPABILITY_AUTO_EXPAND + RoleArn: "arn:aws:iam::{{ stage.id }}:role/adf-cloudformation-deployment-role" + InputArtifacts: + - Name: !Sub "${ProjectName}-build" + RunOrder: 1 + Region: {{ region }} + RoleArn: "arn:aws:iam::{{ stage.id }}:role/adf-cloudformation-role" +{% else %} + - Name: {{ stage.name }}-{{ region }}-replace + ActionTypeId: + Category: Deploy + Owner: AWS + Version: 1 + Provider: CloudFormation + Configuration: + ActionMode: CHANGE_SET_REPLACE + StackName: !Sub "${StackPrefix}-${ProjectName}" + ChangeSetName: !Sub "${StackPrefix}-${ProjectName}" +{% if contains_transform %} + TemplatePath: !Sub "${ProjectName}-build::template_{{ region }}.yml" +{% else %} + TemplatePath: !Sub "${ProjectName}-build::template.yml" +{% endif %} + TemplateConfiguration: !Sub "${ProjectName}-build::params/{{ stage.name }}_{{ region }}.json" + Capabilities: CAPABILITY_NAMED_IAM + RoleArn: "arn:aws:iam::{{ stage.id }}:role/adf-cloudformation-deployment-role" + InputArtifacts: + - Name: !Sub "${ProjectName}-build" + RunOrder: 1 + Region: {{ region }} + RoleArn: "arn:aws:iam::{{ stage.id }}:role/adf-cloudformation-role" + - Name: {{ stage.name }}-{{ region }}-execute + ActionTypeId: + Category: Deploy + Owner: AWS + Version: 1 + Provider: CloudFormation + Configuration: + ChangeSetName: !Sub "${StackPrefix}-${ProjectName}" + ActionMode: CHANGE_SET_EXECUTE + StackName: !Sub "${StackPrefix}-${ProjectName}" + RoleArn: "arn:aws:iam::{{ stage.id }}:role/adf-cloudformation-deployment-role" + RunOrder: 2 + Region: {{ region }} + RoleArn: "arn:aws:iam::{{ stage.id }}:role/adf-cloudformation-role" +{% endif %} +{% endfor %} +{% else %} +{% for top_level_region in top_level_regions %} + - Name: {{ stage.name }}-{{ top_level_region }}-replace + ActionTypeId: + Category: Deploy + Owner: AWS + Version: 1 + Provider: CloudFormation + Configuration: + ActionMode: CHANGE_SET_REPLACE + StackName: !Sub "${StackPrefix}-${ProjectName}" + ChangeSetName: !Sub "${StackPrefix}-${ProjectName}" +{% if contains_transform %} + TemplatePath: !Sub "${ProjectName}-build::template_{{ region }}.yml" +{% else %} + TemplatePath: !Sub "${ProjectName}-build::template.yml" +{% endif %} + TemplateConfiguration: !Sub "${ProjectName}-build::params/{{ stage.name }}_{{ top_level_region }}.json" + Capabilities: CAPABILITY_NAMED_IAM + RoleArn: "arn:aws:iam::{{ stage.id }}:role/adf-cloudformation-deployment-role" + InputArtifacts: + - Name: !Sub "${ProjectName}-build" + RunOrder: 1 + Region: {{ top_level_region }} + RoleArn: "arn:aws:iam::{{ stage.id }}:role/adf-cloudformation-role" + - Name: {{ stage.name }}-{{ top_level_region }}-execute + ActionTypeId: + Category: Deploy + Owner: AWS + Version: 1 + Provider: CloudFormation + Configuration: + ChangeSetName: !Sub "${StackPrefix}-${ProjectName}" + ActionMode: CHANGE_SET_EXECUTE + StackName: !Sub "${StackPrefix}-${ProjectName}" + RoleArn: "arn:aws:iam::{{ stage.id }}:role/adf-cloudformation-deployment-role" + RunOrder: 2 + Region: {{ top_level_region }} + RoleArn: "arn:aws:iam::{{ stage.id }}:role/adf-cloudformation-role" +{% endfor %} +{% endif %} +{% endfor %} +{% endif %} +{% endfor %} + ArtifactStores: +{% for region in regions %} + - Region: {{ region }} + ArtifactStore: + EncryptionKey: + Id: !Ref KMSKey{{ region|replace("-", "") }} + Type: KMS + Location: !Ref S3Bucket{{ region|replace("-", "") }} + Type: S3 +{% endfor %} +Outputs: + PipelineUrl: + Value: !Sub https://console.aws.amazon.com/codepipeline/home?region=${AWS::Region}#/view/${Pipeline} diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/pytest.ini b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/pytest.ini new file mode 100644 index 000000000..754438abb --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/pipelines_repository/pytest.ini @@ -0,0 +1,2 @@ +[pytest] +testpaths = adf-build/tests diff --git a/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/requirements.txt b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/requirements.txt new file mode 100644 index 000000000..b48c6e933 --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/initial_commit/requirements.txt @@ -0,0 +1,3 @@ +Jinja2~=2.10.1 +cfn-custom-resource~=1.0.1 +boto3~=1.9.89 \ No newline at end of file diff --git a/src/bootstrap_repository/deployment/lambda_codebase/pytest.ini b/src/bootstrap_repository/deployment/lambda_codebase/pytest.ini new file mode 100644 index 000000000..5ee647716 --- /dev/null +++ b/src/bootstrap_repository/deployment/lambda_codebase/pytest.ini @@ -0,0 +1,2 @@ +[pytest] +testpaths = tests diff --git a/src/bootstrap_repository/deployment/lambda_codebase/requirements.txt b/src/bootstrap_repository/deployment/lambda_codebase/requirements.txt new file mode 100644 index 000000000..e69de29bb diff --git a/src/bootstrap_repository/deployment/regional.yml b/src/bootstrap_repository/deployment/regional.yml index 4f3dfd4ef..5680ca0a2 100644 --- a/src/bootstrap_repository/deployment/regional.yml +++ b/src/bootstrap_repository/deployment/regional.yml @@ -26,9 +26,9 @@ Resources: Statement: - Action: - s3:Get* + - s3:List* - s3:PutObject* - s3:PutReplicationConfiguration - - s3:List* Effect: Allow Condition: StringEquals: @@ -52,34 +52,34 @@ Resources: Principal: AWS: !Sub arn:aws:iam::${AWS::AccountId}:root Action: + - "kms:CancelKeyDeletion" - "kms:Create*" + - "kms:Decrypt" + - "kms:Delete*" - "kms:Describe*" + - "kms:DescribeKey" + - "kms:Disable*" - "kms:Enable*" - "kms:Encrypt" - - "kms:Decrypt" + - "kms:GenerateDataKey*" + - "kms:Get*" - "kms:List*" - "kms:Put*" - - "kms:Update*" - - "kms:Revoke*" - - "kms:Disable*" - "kms:ReEncrypt*" - - "kms:GenerateDataKey*" - - "kms:DescribeKey" - - "kms:Get*" - - "kms:Delete*" + - "kms:Revoke*" - "kms:ScheduleKeyDeletion" - - "kms:CancelKeyDeletion" + - "kms:Update*" Resource: "*" - Sid: Allow use of the key Effect: Allow Principal: AWS: "*" Action: - - kms:Encrypt - kms:Decrypt - - kms:ReEncrypt* - - kms:GenerateDataKey* - kms:DescribeKey + - kms:Encrypt + - kms:GenerateDataKey* + - kms:ReEncrypt* Resource: "*" Condition: StringEquals: diff --git a/src/bootstrap_repository/example-adfconfig.yml b/src/bootstrap_repository/example-adfconfig.yml index c11f1e74f..136b10295 100644 --- a/src/bootstrap_repository/example-adfconfig.yml +++ b/src/bootstrap_repository/example-adfconfig.yml @@ -1,18 +1,17 @@ -roles: - cross-account-access: OrganizationAccountAccessRole # The role by ADF to assume cross account access +roles: + cross-account-access: OrganizationAccountAccessRole # The role by ADF to assume cross account access -regions: - deployment-account: eu-central-1 # The region you define as your main deployment region - targets: # A list of regions you wish to bootstrap and also deploy into via pipelines - - us-west-2 - - eu-central-1 +regions: + deployment-account: eu-central-1 # The region you define as your main deployment region + targets: # A list of regions you wish to bootstrap and also deploy into via pipelines (Other than the Deployment Account region) + - eu-west-1 -config: - main-notification-endpoint: - - type: email # slack or email - target: jon.doe@email.com # Email/Slack channel who receives notifications for the main bootstrapping pipeline - moves: - - name: to-root - action: safe # Can be safe or remove-base - scp: - keep-default-scp: enabled # determines if the default AWSFullAccess SCP stays attached to all OU's \ No newline at end of file +config: + main-notification-endpoint: + - type: email # slack or email + target: jon.doe@email.com # Email/Slack channel who receives notifications for the main bootstrapping pipeline + moves: + - name: to-root + action: safe # Can be safe or remove-base + scp: + keep-default-scp: enabled # determines if the default AWSFullAccess SCP stays attached to all OU's \ No newline at end of file diff --git a/src/bootstrap_repository/global.yml b/src/bootstrap_repository/global.yml index 800563343..30b498ce9 100644 --- a/src/bootstrap_repository/global.yml +++ b/src/bootstrap_repository/global.yml @@ -45,27 +45,27 @@ Resources: - Effect: Allow Action: - "codecommit:BatchGetRepositories" + - "codecommit:CancelUploadArchive" - "codecommit:Get*" - "codecommit:GitPull" - "codecommit:List*" - - "codecommit:CancelUploadArchive" - "codecommit:UploadArchive" - "codepipeline:StartPipelineExecution" + - "events:PutEvents" - "s3:Get*" - "s3:List*" - "s3:Put*" - - "events:PutEvents" Resource: "*" - Effect: Allow Action: + - "kms:Decrypt" - "kms:Describe*" + - "kms:DescribeKey" - "kms:Encrypt" - - "kms:Decrypt" - - "kms:List*" - - "kms:ReEncrypt*" - "kms:GenerateDataKey*" - - "kms:DescribeKey" - "kms:Get*" + - "kms:List*" + - "kms:ReEncrypt*" Resource: !Ref KMSArn Roles: - !Ref CodeCommitRole @@ -80,25 +80,25 @@ Resources: Sid: "CloudFormation" Action: - cloudformation:* + - codedeploy:* + - iam:PassRole - s3:Get* - s3:List* - s3:Put* - - codedeploy:* - - iam:PassRole - - servicecatalog:ListProvisioningArtifacts - servicecatalog:CreateProvisioningArtifact - - servicecatalog:DescribeProvisioningArtifact - servicecatalog:DeleteProvisioningArtifact + - servicecatalog:DescribeProvisioningArtifact + - servicecatalog:ListProvisioningArtifacts - servicecatalog:UpdateProduct Resource: "*" - Effect: Allow Sid: "KMS" Action: - - kms:Encrypt - - kms:Decrypt - - kms:ReEncrypt* - - kms:GenerateDataKey* - - kms:DescribeKey + - kms:Decrypt + - kms:DescribeKey + - kms:Encrypt + - kms:GenerateDataKey* + - kms:ReEncrypt* Resource: !Ref KMSArn Roles: - !Ref CloudFormationRole @@ -152,68 +152,68 @@ Resources: - Effect: Allow Sid: "CloudFormation" Action: - - "sqs:*" - - "s3:*" + - "apigateway:*" + - "application-autoscaling:*" + - "autoscaling:*" + - "cloudformation:*" - "cloudtrail:*" - - "config:*" - - "logs:*" - - "ecr:*" - "cloudwatch:*" + - "codebuild:*" + - "codecommit:*" + - "codedeploy:*" + - "codepipeline:*" - "cognito-identity:*" - "cognito-idp:*" - - "route53:*" + - "config:*" + - "dax:*" - "dynamodb:*" + - "ec2:*" + - "ecr:*" - "ecs:*" + - "elasticbeanstalk:*" + - "elasticloadbalancing:*" + - "es:*" - "events:*" + - "guardduty:*" - "iam:*" - - "states:*" - "kinesis:*" - - "lambda:*" - - "elasticloadbalancing:*" - - "sns:*" - - "tag:*" - - "kms:Describe*" - "kms:Create*" - - "kms:Encrypt" - "kms:Decrypt" + - "kms:Describe*" + - "kms:DescribeKey" + - "kms:Encrypt" + - "kms:GenerateDataKey*" + - "kms:Get*" - "kms:List*" - "kms:Put*" - "kms:ReEncrypt*" - - "kms:GenerateDataKey*" - - "kms:DescribeKey" - - "kms:Get*" - - "ec2:*" - - "xray:*" - - "dax:*" - - "application-autoscaling:*" - - "apigateway:*" - - "codepipeline:*" - - "codecommit:*" - - "codedeploy:*" - - "codebuild:*" - - "elasticbeanstalk:*" - - "servicecatalog:ListProvisioningArtifacts" + - "lambda:*" + - "logs:*" + - "opsworks:*" + - "route53:*" + - "s3:*" - "servicecatalog:CreateProvisioningArtifact" - - "servicecatalog:DescribeProvisioningArtifact" - "servicecatalog:DeleteProvisioningArtifact" + - "servicecatalog:DescribeProvisioningArtifact" + - "servicecatalog:ListProvisioningArtifacts" - "servicecatalog:UpdateProduct" - - "ssm:GetParameters" + - "sns:*" + - "sqs:*" - "ssm:GetParameter" - - "opsworks:*" - - "cloudformation:*" - - "es:*" - - "autoscaling:*" - - "guardduty:*" + - "ssm:GetParameters" + - "states:*" + - "tag:*" + - "xray:*" Resource: - "*" - Effect: Allow Sid: "KMS" Action: - - "kms:Encrypt" - - "kms:Decrypt" - - "kms:ReEncrypt*" - - "kms:GenerateDataKey*" - - "kms:DescribeKey" + - "kms:Decrypt" + - "kms:DescribeKey" + - "kms:Encrypt" + - "kms:GenerateDataKey*" + - "kms:ReEncrypt*" Resource: !Ref KMSArn Roles: - !Ref CloudFormationDeploymentRole \ No newline at end of file diff --git a/src/bootstrap_repository/pytest.ini b/src/bootstrap_repository/pytest.ini index bcd30ed83..2092021a2 100644 --- a/src/bootstrap_repository/pytest.ini +++ b/src/bootstrap_repository/pytest.ini @@ -1,3 +1,3 @@ [pytest] testpaths = adf-build/tests deployment/lambda_codebase/tests adf-build/shared/python/tests - +norecursedirs = deployment/lambda_codebase/initial_commit diff --git a/src/initial/template.yml b/src/initial/template.yml index b0a2f1b57..541c7f4db 100644 --- a/src/initial/template.yml +++ b/src/initial/template.yml @@ -140,6 +140,7 @@ Resources: TERMINATION_PROTECTION: !Ref TerminationProtection MASTER_ACCOUNT_ID: !Ref AWS::AccountId ORGANIZATION_ID: !Ref OrganizationId + ADF_VERSION: 0.3.3 FunctionName: StackWaiter Role: !GetAtt LambdaRole.Arn Runtime: python3.6 @@ -159,6 +160,7 @@ Resources: DEPLOYMENT_ACCOUNT_BUCKET: !Ref DeploymentAccountBucket MASTER_ACCOUNT_ID: !Ref AWS::AccountId ORGANIZATION_ID: !Ref OrganizationId + ADF_VERSION: 0.3.3 FunctionName: DetermineEventFunction Role: !GetAtt LambdaRole.Arn Runtime: python3.6 @@ -178,6 +180,7 @@ Resources: DEPLOYMENT_ACCOUNT_BUCKET: !Ref DeploymentAccountBucket MASTER_ACCOUNT_ID: !Ref AWS::AccountId ORGANIZATION_ID: !Ref OrganizationId + ADF_VERSION: 0.3.3 FunctionName: CrossAccountExecuteFunction Role: !GetAtt LambdaRole.Arn Runtime: python3.6 @@ -195,6 +198,7 @@ Resources: S3_BUCKET_NAME: !Ref BootstrapTemplatesBucket TERMINATION_PROTECTION: !Ref TerminationProtection MASTER_ACCOUNT_ID: !Ref AWS::AccountId + ADF_VERSION: 0.3.3 FunctionName: RoleStackDeploymentFunction Role: !GetAtt LambdaRole.Arn Runtime: python3.6 @@ -212,6 +216,7 @@ Resources: S3_BUCKET_NAME: !Ref BootstrapTemplatesBucket TERMINATION_PROTECTION: !Ref TerminationProtection MASTER_ACCOUNT_ID: !Ref AWS::AccountId + ADF_VERSION: 0.3.3 FunctionName: MovedToRootActionFunction Role: !GetAtt LambdaRole.Arn Runtime: python3.6 @@ -229,6 +234,7 @@ Resources: S3_BUCKET_NAME: !Ref BootstrapTemplatesBucket TERMINATION_PROTECTION: !Ref TerminationProtection MASTER_ACCOUNT_ID: !Ref AWS::AccountId + ADF_VERSION: 0.3.3 FunctionName: UpdateResourcePoliciesFunction Role: !GetAtt LambdaRole.Arn Runtime: python3.6 @@ -362,7 +368,8 @@ Resources: Type: CODEPIPELINE Environment: ComputeType: !Ref ComputeType - Image: !Ref Image + PrivilegedMode: true + Image: "aws/codebuild/standard:2.0" EnvironmentVariables: - Name: PYTHONPATH Value: './adf-build/shared/python' @@ -374,6 +381,8 @@ Resources: Value: !Ref DeploymentAccountBucket - Name: ORGANIZATION_ID Value: !Ref OrganizationId + - Name: ADF_VERSION + Value: 0.3.3 Type: LINUX_CONTAINER Name: 'aws-deployment-framework-base-templates' ServiceRole: !Ref CodeBuildRole @@ -382,17 +391,19 @@ Resources: version: 0.2 phases: install: + runtime-versions: + python: 3.7 + docker: 18 + pre_build: commands: - # You can add in cfn-lint here also if required for base-templates - apt-get update -qq - - pip install --upgrade pip + - pip install --upgrade pip --quiet - pip install -r adf-build/requirements.txt --upgrade --quiet - pre_build: - commands: - pytest -vvv build: commands: - - aws cloudformation package --template-file $(pwd)/deployment/global.yml --output-template-file deployment/global.yml --s3-prefix deployment --s3-bucket $DEPLOYMENT_ACCOUNT_BUCKET + - sam build -t deployment/global.yml + - sam package --output-template-file deployment/global.yml --s3-prefix deployment --s3-bucket $DEPLOYMENT_ACCOUNT_BUCKET - aws s3 sync ./adf-build/shared s3://$DEPLOYMENT_ACCOUNT_BUCKET/adf-build/shared --quiet # Shared Modules to be used with AWS CodeBuild - aws s3 sync . s3://$S3_BUCKET --quiet --delete # Base Templates - python adf-build/main.py # Updates config, updates (or creates) base stacks. @@ -593,6 +604,10 @@ Resources: } RoleArn: !GetAtt StatesExecutionRole.Arn Outputs: + ADFVersionNumber: + Value: 0.3.3 + Export: + Name: "ADFVersionNumber" LayerArn: Description: "The Shared modules Lambda Layer Arn" Value: !Ref LambdaLayerVersion diff --git a/src/pipelines_repository/adf-build/target.py b/src/pipelines_repository/adf-build/target.py index b5faf5ed2..01a5c97ed 100644 --- a/src/pipelines_repository/adf-build/target.py +++ b/src/pipelines_repository/adf-build/target.py @@ -53,7 +53,7 @@ def _create_target_info(self, name, account_id): "id": account_id, "path": self.path, "regions": self.regions, - "step_name": self.step_name + "step_name": re.sub(r'[^A-Za-z0-9.@\-_]+', '', self.step_name) } def _target_is_approval(self): diff --git a/src/pipelines_repository/example-deployment_map.yml b/src/pipelines_repository/example-deployment_map.yml index c11e9f611..1327dbbe2 100644 --- a/src/pipelines_repository/example-deployment_map.yml +++ b/src/pipelines_repository/example-deployment_map.yml @@ -1,9 +1,20 @@ pipelines: - - name: sample # The name of your pipeline (This will match the name of your repository) + - name: sample-pipeline # The name of your pipeline (This will match the name of your repository) type: cc-cloudformation # The pipeline_type you wish to use for this pipeline params: - SourceAccountId: 111111111111 # The source account that will hold the codebase - NotificationEndpoint: your@email.com # The Notification (user/team/slack) responsible for this pipeline targets: # Deployment stages - /business_unit/testing - - /business_unit/production \ No newline at end of file + - /business_unit/production + + - name: another-pipeline + type: github-cloudformation # Rather than use AWS CodeCommit we can also use Github as a source + params: + - ScheduleExpression: rate(7 days) # Run every 7 days automatically + - Owner: github_username # See Docs for Github integration + - NotificationEndpoint: slack_channel # See Docs for Slack integration for notifications + targets: # Long hand syntax including regions and names for stages + - path: 22222222222 # Paths can be single AWS Accounts or Organization Units + regions: us-east-1 + name: fancy-name \ No newline at end of file diff --git a/src/pipelines_repository/pipeline_types/cc-buildonly.yml.j2 b/src/pipelines_repository/pipeline_types/cc-buildonly.yml.j2 index c703fe385..0b905c9ab 100644 --- a/src/pipelines_repository/pipeline_types/cc-buildonly.yml.j2 +++ b/src/pipelines_repository/pipeline_types/cc-buildonly.yml.j2 @@ -49,76 +49,8 @@ Resources: Description: !Sub Triggers ${AWS::StackName} on a Schedule ScheduleExpression: !Ref ScheduleExpression Targets: - - Arn: !GetAtt CronLambdaFunction.Arn + - Arn: !GetAtt Pipeline.Arn Id: !Sub "adf-cron-${AWS::StackName}" - CronLambdaFunction: - Type: 'AWS::Lambda::Function' - Condition: HasSchedule - Properties: - Code: - ZipFile: !Sub | - import boto3 - def handler(event, _): - client = boto3.client('codepipeline', region_name="${AWS::Region}") - client.start_pipeline_execution(name="${AWS::StackName}") - Handler: 'index.handler' - FunctionName: !Sub "adf-cron-pipeline-${AWS::StackName}" - Description: !Sub "ADF Lambda Function - Cron ${AWS::StackName}" - MemorySize: 128 - Role: !GetAtt CronLambdaRole.Arn - Runtime: 'python3.7' - Timeout: 60 - CronPermissionForEventsToInvokeLambda: - Type: AWS::Lambda::Permission - Condition: HasSchedule - Properties: - FunctionName: !Ref CronLambdaFunction - Action: "lambda:InvokeFunction" - Principal: "events.amazonaws.com" - SourceArn: !GetAtt CronCloudwatchEventsRule.Arn - CronLambdaRole: - Type: "AWS::IAM::Role" - Condition: HasSchedule - Properties: - RoleName: !Sub "adf-cron-lambda-role-${AWS::StackName}" - AssumeRolePolicyDocument: - Version: "2012-10-17" - Statement: - - Effect: "Allow" - Principal: - Service: - - "lambda.amazonaws.com" - Action: - - "sts:AssumeRole" - Path: "/" - Policies: - - PolicyName: "adf-lambda-execution-role" - PolicyDocument: - Version: "2012-10-17" - Statement: - - Effect: "Allow" - Resource: !Sub "arn:aws:codepipeline:${AWS::Region}:${AWS::AccountId}:${Pipeline}" - Action: - - "codepipeline:StartPipelineExecution" - - Effect: "Allow" - Resource: "arn:aws:logs:*:*:*" - Action: "logs:*" - - Effect: "Allow" - Resource: "*" - Action: - - "lambda:GetAccountSettings" - - "lambda:ListFunctions" - - "lambda:ListTags" - - "lambda:GetFunction" - - "lambda:GetEventSourceMapping" - - "lambda:ListEventSourceMappings" - - "lambda:DeleteEventSourceMapping" - - "lambda:UpdateEventSourceMapping" - - "lambda:CreateEventSourceMapping" - - "iam:ListRoles" - Condition: - StringEquals: - lambda:FunctionArn: !Sub "arn:aws:lambda:${AWS::Region}:${AWS::AccountId}:function:adf-cron-pipeline-${AWS::StackName}" BuildProject: Type: AWS::CodeBuild::Project Properties: diff --git a/src/pipelines_repository/pipeline_types/cc-cloudformation.yml.j2 b/src/pipelines_repository/pipeline_types/cc-cloudformation.yml.j2 index ca6b3a473..50388451a 100644 --- a/src/pipelines_repository/pipeline_types/cc-cloudformation.yml.j2 +++ b/src/pipelines_repository/pipeline_types/cc-cloudformation.yml.j2 @@ -55,76 +55,8 @@ Resources: Description: !Sub Triggers ${AWS::StackName} on a Schedule ScheduleExpression: !Ref ScheduleExpression Targets: - - Arn: !GetAtt CronLambdaFunction.Arn + - Arn: !GetAtt Pipeline.Arn Id: !Sub "adf-cron-${AWS::StackName}" - CronLambdaFunction: - Type: 'AWS::Lambda::Function' - Condition: HasSchedule - Properties: - Code: - ZipFile: !Sub | - import boto3 - def handler(event, _): - client = boto3.client('codepipeline', region_name="${AWS::Region}") - client.start_pipeline_execution(name="${AWS::StackName}") - Handler: 'index.handler' - FunctionName: !Sub "adf-cron-pipeline-${AWS::StackName}" - Description: !Sub "ADF Lambda Function - Cron ${AWS::StackName}" - MemorySize: 128 - Role: !GetAtt CronLambdaRole.Arn - Runtime: 'python3.7' - Timeout: 60 - CronPermissionForEventsToInvokeLambda: - Type: AWS::Lambda::Permission - Condition: HasSchedule - Properties: - FunctionName: !Ref CronLambdaFunction - Action: "lambda:InvokeFunction" - Principal: "events.amazonaws.com" - SourceArn: !GetAtt CronCloudwatchEventsRule.Arn - CronLambdaRole: - Type: "AWS::IAM::Role" - Condition: HasSchedule - Properties: - RoleName: !Sub "adf-cron-lambda-role-${AWS::StackName}" - AssumeRolePolicyDocument: - Version: "2012-10-17" - Statement: - - Effect: "Allow" - Principal: - Service: - - "lambda.amazonaws.com" - Action: - - "sts:AssumeRole" - Path: "/" - Policies: - - PolicyName: "adf-lambda-execution-role" - PolicyDocument: - Version: "2012-10-17" - Statement: - - Effect: "Allow" - Resource: !Sub "arn:aws:codepipeline:${AWS::Region}:${AWS::AccountId}:${Pipeline}" - Action: - - "codepipeline:StartPipelineExecution" - - Effect: "Allow" - Resource: "arn:aws:logs:*:*:*" - Action: "logs:*" - - Effect: "Allow" - Resource: "*" - Action: - - "lambda:GetAccountSettings" - - "lambda:ListFunctions" - - "lambda:ListTags" - - "lambda:GetFunction" - - "lambda:GetEventSourceMapping" - - "lambda:ListEventSourceMappings" - - "lambda:DeleteEventSourceMapping" - - "lambda:UpdateEventSourceMapping" - - "lambda:CreateEventSourceMapping" - - "iam:ListRoles" - Condition: - StringEquals: - lambda:FunctionArn: !Sub "arn:aws:lambda:${AWS::Region}:${AWS::AccountId}:function:adf-cron-pipeline-${AWS::StackName}" BuildProject: Type: AWS::CodeBuild::Project Properties: diff --git a/src/pipelines_repository/pipeline_types/cc-s3.yml.j2 b/src/pipelines_repository/pipeline_types/cc-s3.yml.j2 index dceecc139..2589582b7 100644 --- a/src/pipelines_repository/pipeline_types/cc-s3.yml.j2 +++ b/src/pipelines_repository/pipeline_types/cc-s3.yml.j2 @@ -66,76 +66,8 @@ Resources: Description: !Sub Triggers ${AWS::StackName} on a Schedule ScheduleExpression: !Ref ScheduleExpression Targets: - - Arn: !GetAtt CronLambdaFunction.Arn + - Arn: !GetAtt Pipeline.Arn Id: !Sub "adf-cron-${AWS::StackName}" - CronLambdaFunction: - Type: 'AWS::Lambda::Function' - Condition: HasSchedule - Properties: - Code: - ZipFile: !Sub | - import boto3 - def handler(event, _): - client = boto3.client('codepipeline', region_name="${AWS::Region}") - client.start_pipeline_execution(name="${AWS::StackName}") - Handler: 'index.handler' - FunctionName: !Sub "adf-cron-pipeline-${AWS::StackName}" - Description: !Sub "ADF Lambda Function - Cron ${AWS::StackName}" - MemorySize: 128 - Role: !GetAtt CronLambdaRole.Arn - Runtime: 'python3.7' - Timeout: 60 - CronPermissionForEventsToInvokeLambda: - Type: AWS::Lambda::Permission - Condition: HasSchedule - Properties: - FunctionName: !Ref CronLambdaFunction - Action: "lambda:InvokeFunction" - Principal: "events.amazonaws.com" - SourceArn: !GetAtt CronCloudwatchEventsRule.Arn - CronLambdaRole: - Type: "AWS::IAM::Role" - Condition: HasSchedule - Properties: - RoleName: !Sub "adf-cron-lambda-role-${AWS::StackName}" - AssumeRolePolicyDocument: - Version: "2012-10-17" - Statement: - - Effect: "Allow" - Principal: - Service: - - "lambda.amazonaws.com" - Action: - - "sts:AssumeRole" - Path: "/" - Policies: - - PolicyName: "adf-lambda-execution-role" - PolicyDocument: - Version: "2012-10-17" - Statement: - - Effect: "Allow" - Resource: !Sub "arn:aws:codepipeline:${AWS::Region}:${AWS::AccountId}:${Pipeline}" - Action: - - "codepipeline:StartPipelineExecution" - - Effect: "Allow" - Resource: "arn:aws:logs:*:*:*" - Action: "logs:*" - - Effect: "Allow" - Resource: "*" - Action: - - "lambda:GetAccountSettings" - - "lambda:ListFunctions" - - "lambda:ListTags" - - "lambda:GetFunction" - - "lambda:GetEventSourceMapping" - - "lambda:ListEventSourceMappings" - - "lambda:DeleteEventSourceMapping" - - "lambda:UpdateEventSourceMapping" - - "lambda:CreateEventSourceMapping" - - "iam:ListRoles" - Condition: - StringEquals: - lambda:FunctionArn: !Sub "arn:aws:lambda:${AWS::Region}:${AWS::AccountId}:function:adf-cron-pipeline-${AWS::StackName}" BuildProject: Type: AWS::CodeBuild::Project Properties: diff --git a/src/pipelines_repository/pipeline_types/cc-service-catalog.yml.j2 b/src/pipelines_repository/pipeline_types/cc-service-catalog.yml.j2 index f5b5d6614..1f2b38f3c 100644 --- a/src/pipelines_repository/pipeline_types/cc-service-catalog.yml.j2 +++ b/src/pipelines_repository/pipeline_types/cc-service-catalog.yml.j2 @@ -58,76 +58,8 @@ Resources: Description: !Sub Triggers ${AWS::StackName} on a Schedule ScheduleExpression: !Ref ScheduleExpression Targets: - - Arn: !GetAtt CronLambdaFunction.Arn + - Arn: !GetAtt Pipeline.Arn Id: !Sub "adf-cron-${AWS::StackName}" - CronLambdaFunction: - Type: 'AWS::Lambda::Function' - Condition: HasSchedule - Properties: - Code: - ZipFile: !Sub | - import boto3 - def handler(event, _): - client = boto3.client('codepipeline', region_name="${AWS::Region}") - client.start_pipeline_execution(name="${AWS::StackName}") - Handler: 'index.handler' - FunctionName: !Sub "adf-cron-pipeline-${AWS::StackName}" - Description: !Sub "ADF Lambda Function - Cron ${AWS::StackName}" - MemorySize: 128 - Role: !GetAtt CronLambdaRole.Arn - Runtime: 'python3.7' - Timeout: 60 - CronPermissionForEventsToInvokeLambda: - Type: AWS::Lambda::Permission - Condition: HasSchedule - Properties: - FunctionName: !Ref CronLambdaFunction - Action: "lambda:InvokeFunction" - Principal: "events.amazonaws.com" - SourceArn: !GetAtt CronCloudwatchEventsRule.Arn - CronLambdaRole: - Type: "AWS::IAM::Role" - Condition: HasSchedule - Properties: - RoleName: !Sub "adf-cron-lambda-role-${AWS::StackName}" - AssumeRolePolicyDocument: - Version: "2012-10-17" - Statement: - - Effect: "Allow" - Principal: - Service: - - "lambda.amazonaws.com" - Action: - - "sts:AssumeRole" - Path: "/" - Policies: - - PolicyName: "adf-lambda-execution-role" - PolicyDocument: - Version: "2012-10-17" - Statement: - - Effect: "Allow" - Resource: !Sub "arn:aws:codepipeline:${AWS::Region}:${AWS::AccountId}:${Pipeline}" - Action: - - "codepipeline:StartPipelineExecution" - - Effect: "Allow" - Resource: "arn:aws:logs:*:*:*" - Action: "logs:*" - - Effect: "Allow" - Resource: "*" - Action: - - "lambda:GetAccountSettings" - - "lambda:ListFunctions" - - "lambda:ListTags" - - "lambda:GetFunction" - - "lambda:GetEventSourceMapping" - - "lambda:ListEventSourceMappings" - - "lambda:DeleteEventSourceMapping" - - "lambda:UpdateEventSourceMapping" - - "lambda:CreateEventSourceMapping" - - "iam:ListRoles" - Condition: - StringEquals: - lambda:FunctionArn: !Sub "arn:aws:lambda:${AWS::Region}:${AWS::AccountId}:function:adf-cron-pipeline-${AWS::StackName}" BuildProject: Type: AWS::CodeBuild::Project Properties: diff --git a/src/pipelines_repository/pipeline_types/github-cloudformation.yml.j2 b/src/pipelines_repository/pipeline_types/github-cloudformation.yml.j2 index a774a33b2..14ac9e4c6 100644 --- a/src/pipelines_repository/pipeline_types/github-cloudformation.yml.j2 +++ b/src/pipelines_repository/pipeline_types/github-cloudformation.yml.j2 @@ -65,76 +65,8 @@ Resources: Description: !Sub Triggers ${AWS::StackName} on a Schedule ScheduleExpression: !Ref ScheduleExpression Targets: - - Arn: !GetAtt CronLambdaFunction.Arn + - Arn: !GetAtt Pipeline.Arn Id: !Sub "adf-cron-${AWS::StackName}" - CronLambdaFunction: - Type: 'AWS::Lambda::Function' - Condition: HasSchedule - Properties: - Code: - ZipFile: !Sub | - import boto3 - def handler(event, _): - client = boto3.client('codepipeline', region_name="${AWS::Region}") - client.start_pipeline_execution(name="${AWS::StackName}") - Handler: 'index.handler' - FunctionName: !Sub "adf-cron-pipeline-${AWS::StackName}" - Description: !Sub "ADF Lambda Function - Cron ${AWS::StackName}" - MemorySize: 128 - Role: !GetAtt CronLambdaRole.Arn - Runtime: 'python3.7' - Timeout: 60 - CronPermissionForEventsToInvokeLambda: - Type: AWS::Lambda::Permission - Condition: HasSchedule - Properties: - FunctionName: !Ref CronLambdaFunction - Action: "lambda:InvokeFunction" - Principal: "events.amazonaws.com" - SourceArn: !GetAtt CronCloudwatchEventsRule.Arn - CronLambdaRole: - Type: "AWS::IAM::Role" - Condition: HasSchedule - Properties: - RoleName: !Sub "adf-cron-lambda-role-${AWS::StackName}" - AssumeRolePolicyDocument: - Version: "2012-10-17" - Statement: - - Effect: "Allow" - Principal: - Service: - - "lambda.amazonaws.com" - Action: - - "sts:AssumeRole" - Path: "/" - Policies: - - PolicyName: "adf-lambda-execution-role" - PolicyDocument: - Version: "2012-10-17" - Statement: - - Effect: "Allow" - Resource: !Sub "arn:aws:codepipeline:${AWS::Region}:${AWS::AccountId}:${Pipeline}" - Action: - - "codepipeline:StartPipelineExecution" - - Effect: "Allow" - Resource: "arn:aws:logs:*:*:*" - Action: "logs:*" - - Effect: "Allow" - Resource: "*" - Action: - - "lambda:GetAccountSettings" - - "lambda:ListFunctions" - - "lambda:ListTags" - - "lambda:GetFunction" - - "lambda:GetEventSourceMapping" - - "lambda:ListEventSourceMappings" - - "lambda:DeleteEventSourceMapping" - - "lambda:UpdateEventSourceMapping" - - "lambda:CreateEventSourceMapping" - - "iam:ListRoles" - Condition: - StringEquals: - lambda:FunctionArn: !Sub "arn:aws:lambda:${AWS::Region}:${AWS::AccountId}:function:adf-cron-pipeline-${AWS::StackName}" Webhook: Type: 'AWS::CodePipeline::Webhook' Properties: