diff --git a/.coveragerc b/.coveragerc
new file mode 100644
index 00000000..845ab7c5
--- /dev/null
+++ b/.coveragerc
@@ -0,0 +1,13 @@
+# Copyright Amazon.com, Inc. or its affiliates. All rights reserved.
+# SPDX-License-Identifier: Apache-2.0
+#
+[run]
+branch = True
+omit = tests/*
+
+[report]
+show_missing = True
+precision = 1
+exclude_lines =
+ # Required due to conditional mypy imports
+ if TYPE_CHECKING:
diff --git a/.github/ISSUE_TEMPLATE/bug_report_template.md b/.github/ISSUE_TEMPLATE/bug_report_template.md
new file mode 100644
index 00000000..1151cfca
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/bug_report_template.md
@@ -0,0 +1,42 @@
+---
+name: Bug/Issue
+about: Use this to report bugs with AFT.
+labels: bug, pending investigation
+---
+
+**Terraform Version & Prov:**
+
+**AFT Version:**
+(Can be found in the AFT Management Account in the SSM Parameter `/aft/config/aft/version`)
+
+**Terraform Version & Provider Versions**
+Please provide the outputs of `terraform version` and `terraform providers` from within your AFT environment
+
+`terraform version`
+```
+{Replace me}
+```
+
+`terraform providers`
+```
+{Replace me}
+```
+
+**Bug Description**
+A clear and concise description of what the bug is.
+
+**To Reproduce**
+Steps to reproduce the behavior:
+1. Go to '...'
+2. Click on '....'
+3. Scroll down to '....'
+4. See error
+
+**Expected behavior**
+A clear and concise description of what you expected to happen.
+
+**Related Logs**
+Provide any related logs or error messages to help explain your problem.
+
+**Additional context**
+Add any other context about the problem here.
diff --git a/.github/ISSUE_TEMPLATE/feature_request_template.md b/.github/ISSUE_TEMPLATE/feature_request_template.md
new file mode 100644
index 00000000..91ee44a3
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/feature_request_template.md
@@ -0,0 +1,17 @@
+---
+name: Feature Request
+about: Suggest ideas or enhancements for AFT.
+labels: enhancement
+---
+
+**Describe the outcome you'd like**
+
+A clear and concise description of what you want to happen.
+
+**Is your feature request related to a problem you are currently experiencing? If so, please describe.**
+
+A clear and concise description of what the problem is.
+
+**Additional context**
+
+Add any other context or screenshots about the feature request here.
diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md
new file mode 100644
index 00000000..7b7fdfb7
--- /dev/null
+++ b/.github/pull_request_template.md
@@ -0,0 +1,7 @@
+# Contributing to the AWS Control Tower Account Factory for Terraform
+
+Thank you for your interest in contributing to the AWS Control Tower Account Factory for Terraform.
+
+At this time, we are not accepting contributions. If contributions are accepted in the future, the AWS Control Tower Account Factory for Terraform is released under the [Apache license](http://aws.amazon.com/apache2.0/) and any code submitted will be released under that license.
+
+If you have a feature request, please create an issue using the Feature Request template, thanks!
diff --git a/.gitignore b/.gitignore
index 4e09bd7f..5e945389 100644
--- a/.gitignore
+++ b/.gitignore
@@ -190,6 +190,7 @@ cython_debug/
.tflint.hcl
.terraform.lock.hcl
backend.tf
+.terraform
# Local .terraform directories
**/.terraform/*
@@ -225,6 +226,9 @@ override.tf.json
.terraformrc
terraform.rc
+# Always ignore layer build directories
+sources/aft-lambda-layer/build
+
### Windows ###
# Windows thumbnail cache files
Thumbs.db
diff --git a/CODEOWNERS b/CODEOWNERS
index 870af832..8e1cc0d8 100644
--- a/CODEOWNERS
+++ b/CODEOWNERS
@@ -1 +1 @@
-@balltrev @adam-daily @hanafya @tonynv @andrew-glenn
+* @ouyanguf @wiltangg @aviwshah @harrisonhku @JamesActually @dashbat @dhingraa-github @sk-at-amazon @aspiratr-aws
diff --git a/PYTHON_VERSION b/PYTHON_VERSION
new file mode 100644
index 00000000..2c073331
--- /dev/null
+++ b/PYTHON_VERSION
@@ -0,0 +1 @@
+3.11
diff --git a/README.md b/README.md
index 5257ddca..b7dbfe70 100644
--- a/README.md
+++ b/README.md
@@ -54,20 +54,23 @@ for more information.
Now that you have configured and deployed AWS Control Tower Account Factory for Terraform, follow the steps outlined in [Post-deployment steps](https://docs.aws.amazon.com/controltower/latest/userguide/aft-post-deployment.html) and [Provision accounts with AWS Control Tower Account Factory for Terraform](https://docs.aws.amazon.com/controltower/latest/userguide/taf-account-provisioning.html) to begin using your environment.
+## Collection of Operational Metrics
+As of version 1.6.0, AFT collects anonymous operational metrics to help AWS improve the quality and features of the solution. For more information, including how to disable this capability, please see the [documentation here](https://docs.aws.amazon.com/controltower/latest/userguide/aft-operational-metrics.html).
-
+
## Requirements
| Name | Version |
|------|---------|
-| [terraform](#requirement\_terraform) | >= 0.15.0 |
-| [aws](#requirement\_aws) | >= 3.15 |
+| [terraform](#requirement\_terraform) | >= 1.2.0, < 2.0.0 |
+| [aws](#requirement\_aws) | >= 5.11.0, < 6.0.0 |
## Providers
| Name | Version |
|------|---------|
+| [aws](#provider\_aws) | >= 5.11.0, < 6.0.0 |
| [local](#provider\_local) | n/a |
## Modules
@@ -83,11 +86,16 @@ Now that you have configured and deployed AWS Control Tower Account Factory for
| [aft\_iam\_roles](#module\_aft\_iam\_roles) | ./modules/aft-iam-roles | n/a |
| [aft\_lambda\_layer](#module\_aft\_lambda\_layer) | ./modules/aft-lambda-layer | n/a |
| [aft\_ssm\_parameters](#module\_aft\_ssm\_parameters) | ./modules/aft-ssm-parameters | n/a |
+| [packaging](#module\_packaging) | ./modules/aft-archives | n/a |
## Resources
| Name | Type |
|------|------|
+| [aws_partition.current](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/partition) | data source |
+| [aws_service.home_region_validation](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/service) | data source |
+| [aws_ssm_parameters_by_path.servicecatalog_regional_data](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/ssm_parameters_by_path) | data source |
+| [local_file.python_version](https://registry.terraform.io/providers/hashicorp/local/latest/docs/data-sources/file) | data source |
| [local_file.version](https://registry.terraform.io/providers/hashicorp/local/latest/docs/data-sources/file) | data source |
## Inputs
@@ -100,34 +108,46 @@ Now that you have configured and deployed AWS Control Tower Account Factory for
| [account\_provisioning\_customizations\_repo\_name](#input\_account\_provisioning\_customizations\_repo\_name) | Repository name for the account provisioning customizations files. For non-CodeCommit repos, name should be in the format of Org/Repo | `string` | `"aft-account-provisioning-customizations"` | no |
| [account\_request\_repo\_branch](#input\_account\_request\_repo\_branch) | Branch to source account request repo from | `string` | `"main"` | no |
| [account\_request\_repo\_name](#input\_account\_request\_repo\_name) | Repository name for the account request files. For non-CodeCommit repos, name should be in the format of Org/Repo | `string` | `"aft-account-request"` | no |
+| [aft\_backend\_bucket\_access\_logs\_object\_expiration\_days](#input\_aft\_backend\_bucket\_access\_logs\_object\_expiration\_days) | Amount of days to keep the objects stored in the access logs bucket for AFT backend buckets | `number` | `365` | no |
+| [aft\_customer\_private\_subnets](#input\_aft\_customer\_private\_subnets) | A list of private subnets to deploy AFT resources in, if customer is providing an existing VPC. Only supported for new deployments. | `list(string)` | `[]` | no |
+| [aft\_customer\_vpc\_id](#input\_aft\_customer\_vpc\_id) | The VPC ID to deploy AFT resources in, if customer is providing an existing VPC. Only supported for new deployments. | `string` | `null` | no |
+| [aft\_enable\_vpc](#input\_aft\_enable\_vpc) | Flag turning use of VPC on/off for AFT | `bool` | `true` | no |
| [aft\_feature\_cloudtrail\_data\_events](#input\_aft\_feature\_cloudtrail\_data\_events) | Feature flag toggling CloudTrail data events on/off | `bool` | `false` | no |
| [aft\_feature\_delete\_default\_vpcs\_enabled](#input\_aft\_feature\_delete\_default\_vpcs\_enabled) | Feature flag toggling deletion of default VPCs on/off | `bool` | `false` | no |
| [aft\_feature\_enterprise\_support](#input\_aft\_feature\_enterprise\_support) | Feature flag toggling Enterprise Support enrollment on/off | `bool` | `false` | no |
-| [aft\_framework\_repo\_git\_ref](#input\_aft\_framework\_repo\_git\_ref) | Git branch from which the AFT framework should be sourced from | `string` | `"main"` | no |
-| [aft\_framework\_repo\_url](#input\_aft\_framework\_repo\_url) | Git repo URL where the AFT framework should be sourced from | `string` | `"git@github.com:aws-ia/terraform-aws-control_tower_account_factory.git"` | no |
+| [aft\_framework\_repo\_git\_ref](#input\_aft\_framework\_repo\_git\_ref) | Git branch from which the AFT framework should be sourced from | `string` | `null` | no |
+| [aft\_framework\_repo\_url](#input\_aft\_framework\_repo\_url) | Git repo URL where the AFT framework should be sourced from | `string` | `"https://github.com/aws-ia/terraform-aws-control_tower_account_factory.git"` | no |
| [aft\_management\_account\_id](#input\_aft\_management\_account\_id) | AFT Management Account ID | `string` | n/a | yes |
+| [aft\_metrics\_reporting](#input\_aft\_metrics\_reporting) | Flag toggling reporting of operational metrics | `bool` | `true` | no |
| [aft\_vpc\_cidr](#input\_aft\_vpc\_cidr) | CIDR Block to allocate to the AFT VPC | `string` | `"192.168.0.0/22"` | no |
+| [aft\_vpc\_endpoints](#input\_aft\_vpc\_endpoints) | Flag turning VPC endpoints on/off for AFT VPC | `bool` | `true` | no |
| [aft\_vpc\_private\_subnet\_01\_cidr](#input\_aft\_vpc\_private\_subnet\_01\_cidr) | CIDR Block to allocate to the Private Subnet 01 | `string` | `"192.168.0.0/24"` | no |
| [aft\_vpc\_private\_subnet\_02\_cidr](#input\_aft\_vpc\_private\_subnet\_02\_cidr) | CIDR Block to allocate to the Private Subnet 02 | `string` | `"192.168.1.0/24"` | no |
| [aft\_vpc\_public\_subnet\_01\_cidr](#input\_aft\_vpc\_public\_subnet\_01\_cidr) | CIDR Block to allocate to the Public Subnet 01 | `string` | `"192.168.2.0/25"` | no |
| [aft\_vpc\_public\_subnet\_02\_cidr](#input\_aft\_vpc\_public\_subnet\_02\_cidr) | CIDR Block to allocate to the Public Subnet 02 | `string` | `"192.168.2.128/25"` | no |
-| [aft\_vpc\_endpoints](#input\_aft\_vpc\_endpoints) | Flag turning VPC endpoints on/off for AFT VPC | `bool` | `true` | no |
| [audit\_account\_id](#input\_audit\_account\_id) | Audit Account Id | `string` | n/a | yes |
+| [backup\_recovery\_point\_retention](#input\_backup\_recovery\_point\_retention) | Number of days to keep backup recovery points in AFT DynamoDB tables. Default = Never Expire | `number` | `null` | no |
| [cloudwatch\_log\_group\_retention](#input\_cloudwatch\_log\_group\_retention) | Amount of days to keep CloudWatch Log Groups for Lambda functions. 0 = Never Expire | `string` | `"0"` | no |
+| [concurrent\_account\_factory\_actions](#input\_concurrent\_account\_factory\_actions) | Maximum number of accounts that can be provisioned in parallel. | `number` | `5` | no |
| [ct\_home\_region](#input\_ct\_home\_region) | The region from which this module will be executed. This MUST be the same region as Control Tower is deployed. | `string` | n/a | yes |
| [ct\_management\_account\_id](#input\_ct\_management\_account\_id) | Control Tower Management Account Id | `string` | n/a | yes |
| [github\_enterprise\_url](#input\_github\_enterprise\_url) | GitHub enterprise URL, if GitHub Enterprise is being used | `string` | `"null"` | no |
+| [gitlab\_selfmanaged\_url](#input\_gitlab\_selfmanaged\_url) | GitLab SelfManaged URL, if GitLab SelfManaged is being used | `string` | `"null"` | no |
+| [global\_codebuild\_timeout](#input\_global\_codebuild\_timeout) | Codebuild build timeout | `number` | `60` | no |
| [global\_customizations\_repo\_branch](#input\_global\_customizations\_repo\_branch) | Branch to source global customizations repo from | `string` | `"main"` | no |
| [global\_customizations\_repo\_name](#input\_global\_customizations\_repo\_name) | Repository name for the global customization files. For non-CodeCommit repos, name should be in the format of Org/Repo | `string` | `"aft-global-customizations"` | no |
| [log\_archive\_account\_id](#input\_log\_archive\_account\_id) | Log Archive Account Id | `string` | n/a | yes |
+| [log\_archive\_bucket\_object\_expiration\_days](#input\_log\_archive\_bucket\_object\_expiration\_days) | Amount of days to keep the objects stored in the AFT logging bucket | `number` | `365` | no |
| [maximum\_concurrent\_customizations](#input\_maximum\_concurrent\_customizations) | Maximum number of customizations/pipelines to run at once | `number` | `5` | no |
+| [tags](#input\_tags) | Map of tags to apply to resources deployed by AFT. | `map(any)` | `null` | no |
| [terraform\_api\_endpoint](#input\_terraform\_api\_endpoint) | API Endpoint for Terraform. Must be in the format of https://xxx.xxx. | `string` | `"https://app.terraform.io/api/v2/"` | no |
| [terraform\_distribution](#input\_terraform\_distribution) | Terraform distribution being used for AFT - valid values are oss, tfc, or tfe | `string` | `"oss"` | no |
| [terraform\_org\_name](#input\_terraform\_org\_name) | Organization name for Terraform Cloud or Enterprise | `string` | `"null"` | no |
+| [terraform\_project\_name](#input\_terraform\_project\_name) | Project name for Terraform Cloud or Enterprise - project must exist before deployment | `string` | `"Default Project"` | no |
| [terraform\_token](#input\_terraform\_token) | Terraform token for Cloud or Enterprise | `string` | `"null"` | no |
-| [terraform\_version](#input\_terraform\_version) | Terraform version being used for AFT | `string` | `"0.15.5"` | no |
-| [tf\_backend\_secondary\_region](#input\_tf\_backend\_secondary\_region) | AFT creates a backend for state tracking for its own state as well as OSS cases. The backend's primary region is the same as the AFT region, but this defines the secondary region to replicate to. | `string` | n/a | yes |
-| [vcs\_provider](#input\_vcs\_provider) | Customer VCS Provider - valid inputs are codecommit, bitbucket, github, or githubenterprise | `string` | `"codecommit"` | no |
+| [terraform\_version](#input\_terraform\_version) | Terraform version being used for AFT | `string` | `"1.6.0"` | no |
+| [tf\_backend\_secondary\_region](#input\_tf\_backend\_secondary\_region) | AFT creates a backend for state tracking for its own state as well as OSS cases. The backend's primary region is the same as the AFT region, but this defines the secondary region to replicate to. | `string` | `""` | no |
+| [vcs\_provider](#input\_vcs\_provider) | Customer VCS Provider - valid inputs are codecommit, bitbucket, github, githubenterprise, gitlab, or gitLab self-managed | `string` | `"codecommit"` | no |
## Outputs
@@ -149,10 +169,12 @@ Now that you have configured and deployed AWS Control Tower Account Factory for
| [aft\_vpc\_public\_subnet\_01\_cidr](#output\_aft\_vpc\_public\_subnet\_01\_cidr) | n/a |
| [aft\_vpc\_public\_subnet\_02\_cidr](#output\_aft\_vpc\_public\_subnet\_02\_cidr) | n/a |
| [audit\_account\_id](#output\_audit\_account\_id) | n/a |
+| [backup\_recovery\_point\_retention](#output\_backup\_recovery\_point\_retention) | n/a |
| [cloudwatch\_log\_group\_retention](#output\_cloudwatch\_log\_group\_retention) | n/a |
| [ct\_home\_region](#output\_ct\_home\_region) | n/a |
| [ct\_management\_account\_id](#output\_ct\_management\_account\_id) | n/a |
| [github\_enterprise\_url](#output\_github\_enterprise\_url) | n/a |
+| [gitlab\_selfmanaged\_url](#output\_gitlab\_selfmanaged\_url) | n/a |
| [global\_customizations\_repo\_branch](#output\_global\_customizations\_repo\_branch) | n/a |
| [global\_customizations\_repo\_name](#output\_global\_customizations\_repo\_name) | n/a |
| [log\_archive\_account\_id](#output\_log\_archive\_account\_id) | n/a |
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 00000000..929cbfa6
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,11 @@
+## Reporting Security Issues
+
+Amazon Web Services (AWS) is dedicated to the responsible disclosure of security vulnerabilities.
+
+We kindly ask that you **do not** open a public GitHub issue to report security concerns.
+
+Instead, please submit the issue to the AWS Vulnerability Disclosure Program via [HackerOne](https://hackerone.com/aws_vdp) or send your report via [email](mailto:aws-security@amazon.com).
+
+For more details, visit the [AWS Vulnerability Reporting Page](http://aws.amazon.com/security/vulnerability-reporting/).
+
+Thank you in advance for collaborating with us to help protect our customers.
diff --git a/VERSION b/VERSION
index 31e5c843..63e799cf 100644
--- a/VERSION
+++ b/VERSION
@@ -1 +1 @@
-1.3.3
+1.14.1
diff --git a/data.tf b/data.tf
index cfe508c1..2dc1a41a 100644
--- a/data.tf
+++ b/data.tf
@@ -4,3 +4,25 @@
data "local_file" "version" {
filename = "${path.module}/VERSION"
}
+
+data "local_file" "python_version" {
+ filename = "${path.module}/PYTHON_VERSION"
+}
+
+data "aws_ssm_parameters_by_path" "servicecatalog_regional_data" {
+ count = data.aws_partition.current.partition == "aws" ? 1 : 0
+ path = "/aws/service/global-infrastructure/services/servicecatalog/regions"
+}
+
+data "aws_service" "home_region_validation" {
+ service_id = "controltower"
+ lifecycle {
+ precondition {
+ condition = try(contains(local.service_catalog_regional_availability.values, var.ct_home_region), true) == true
+ error_message = "AFT is not supported on Control Tower home region ${var.ct_home_region}. Refer to https://docs.aws.amazon.com/controltower/latest/userguide/limits.html for more information."
+ }
+ }
+}
+
+data "aws_partition" "current" {
+}
diff --git a/examples/gitlab+tf_oss/main.tf b/examples/gitlab+tf_oss/main.tf
new file mode 100644
index 00000000..eb0e4aea
--- /dev/null
+++ b/examples/gitlab+tf_oss/main.tf
@@ -0,0 +1,19 @@
+# Copyright Amazon.com, Inc. or its affiliates. All rights reserved.
+# SPDX-License-Identifier: Apache-2.0
+#
+module "aft" {
+ source = "github.com/aws-ia/terraform-aws-control_tower_account_factory"
+ # Required Vars
+ ct_management_account_id = "111122223333"
+ log_archive_account_id = "444455556666"
+ audit_account_id = "123456789012"
+ aft_management_account_id = "777788889999"
+ ct_home_region = "us-east-1"
+ tf_backend_secondary_region = "us-west-2"
+ # VCS Vars
+ vcs_provider = "gitlab"
+ account_request_repo_name = "ExampleProject/example-repo-1"
+ global_customizations_repo_name = "ExampleProject/example-repo-2"
+ account_customizations_repo_name = "ExampleProject/example-repo-3"
+ account_provisioning_customizations_repo_name = "ExampleProject/example-repo-4"
+}
diff --git a/examples/gitlabselfmanaged+tf_cloud/main.tf b/examples/gitlabselfmanaged+tf_cloud/main.tf
new file mode 100644
index 00000000..a70861aa
--- /dev/null
+++ b/examples/gitlabselfmanaged+tf_cloud/main.tf
@@ -0,0 +1,24 @@
+# Copyright Amazon.com, Inc. or its affiliates. All rights reserved.
+# SPDX-License-Identifier: Apache-2.0
+#
+module "aft" {
+ source = "github.com/aws-ia/terraform-aws-control_tower_account_factory"
+ # Required Vars
+ ct_management_account_id = "111122223333"
+ log_archive_account_id = "444455556666"
+ audit_account_id = "123456789012"
+ aft_management_account_id = "777788889999"
+ ct_home_region = "us-east-1"
+ tf_backend_secondary_region = "us-west-2"
+ # VCS Vars
+ vcs_provider = "gitlabselfmanaged"
+ gitlab_selfmanaged_url = "https://gitlab.example.com"
+ account_request_repo_name = "ExampleProject/example-repo-1"
+ global_customizations_repo_name = "ExampleProject/example-repo-2"
+ account_customizations_repo_name = "ExampleProject/example-repo-3"
+ account_provisioning_customizations_repo_name = "ExampleProject/example-repo-4"
+ # TF Vars
+ terraform_distribution = "tfc"
+ terraform_token = "EXAMPLE-uoc1c1qsw7poexampleewjeno1pte3rw"
+ terraform_org_name = "ExampleOrg"
+}
diff --git a/examples/multiple-account-customizations/account-customization-dev/terraform/backend.jinja b/examples/multiple-account-customizations/account-customization-dev/terraform/backend.jinja
index 66a9047e..cb7154de 100644
--- a/examples/multiple-account-customizations/account-customization-dev/terraform/backend.jinja
+++ b/examples/multiple-account-customizations/account-customization-dev/terraform/backend.jinja
@@ -3,7 +3,7 @@
{% if tf_distribution_type == "oss" -%}
terraform {
- required_version = ">= 0.15.1"
+ required_version = ">= {{ tf_version }}"
backend "s3" {
region = "{{ region }}"
bucket = "{{ bucket }}"
diff --git a/examples/multiple-account-customizations/account-customization-prod/terraform/backend.jinja b/examples/multiple-account-customizations/account-customization-prod/terraform/backend.jinja
index 66a9047e..cb7154de 100644
--- a/examples/multiple-account-customizations/account-customization-prod/terraform/backend.jinja
+++ b/examples/multiple-account-customizations/account-customization-prod/terraform/backend.jinja
@@ -3,7 +3,7 @@
{% if tf_distribution_type == "oss" -%}
terraform {
- required_version = ">= 0.15.1"
+ required_version = ">= {{ tf_version }}"
backend "s3" {
region = "{{ region }}"
bucket = "{{ bucket }}"
diff --git a/examples/multiple-regions-customization/multiple-regions/terraform/backend.jinja b/examples/multiple-regions-customization/multiple-regions/terraform/backend.jinja
index 66a9047e..cb7154de 100644
--- a/examples/multiple-regions-customization/multiple-regions/terraform/backend.jinja
+++ b/examples/multiple-regions-customization/multiple-regions/terraform/backend.jinja
@@ -3,7 +3,7 @@
{% if tf_distribution_type == "oss" -%}
terraform {
- required_version = ">= 0.15.1"
+ required_version = ">= {{ tf_version }}"
backend "s3" {
region = "{{ region }}"
bucket = "{{ bucket }}"
diff --git a/locals.tf b/locals.tf
index f7e5d4ca..4f231a6a 100644
--- a/locals.tf
+++ b/locals.tf
@@ -3,6 +3,7 @@
#
locals {
aft_version = chomp(trimspace(data.local_file.version.content))
+ aft_framework_repo_git_ref = var.aft_framework_repo_git_ref == null || var.aft_framework_repo_git_ref == "" ? local.aft_version : var.aft_framework_repo_git_ref
aft_account_provisioning_customizations_sfn_name = "aft-account-provisioning-customizations"
aft_account_provisioning_framework_sfn_name = "aft-account-provisioning-framework"
trigger_customizations_sfn_name = "aft-invoke-customizations"
@@ -13,10 +14,18 @@ locals {
account_factory_product_name = "AWS Control Tower Account Factory"
log_archive_bucket_name = "aws-aft-logs"
log_archive_access_logs_bucket_name = "aws-aft-s3-access-logs"
- log_archive_bucket_object_expiration_days = "365"
lambda_layer_codebuild_delay = "420s"
- lambda_layer_python_version = "3.8"
+ lambda_layer_python_version = chomp(trimspace(data.local_file.python_version.content))
+ lambda_runtime_python_version = format("%s%s", "python", chomp(trimspace(data.local_file.python_version.content)))
lambda_layer_name = "aft-common"
+ create_role_lambda_function_name = "aft-account-provisioning-framework-create-aft-execution-role"
+ tag_account_lambda_function_name = "aft-account-provisioning-framework-tag-account"
+ persist_metadata_lambda_function_name = "aft-account-provisioning-framework-persist-metadata"
+ account_metadata_ssm_lambda_function_name = "aft-account-provisioning-framework-account-metadata-ssm"
+ delete_default_vpc_lambda_function_name = "aft-delete-default-vpc"
+ enroll_support_lambda_function_name = "aft-enroll-support"
+ enable_cloudtrail_lambda_function_name = "aft-enable-cloudtrail"
+ aft_tags = merge(var.tags, { managed_by = "AFT" })
ssm_paths = {
aft_tf_aws_customizations_module_url_ssm_path = "/aft/config/aft-pipeline-code-source/repo-url"
aft_tf_aws_customizations_module_git_ref_ssm_path = "/aft/config/aft-pipeline-code-source/repo-git-ref"
@@ -26,4 +35,38 @@ locals {
aft_tf_ddb_table_ssm_path = "/aft/config/oss-backend/table-id"
aft_tf_version_ssm_path = "/aft/config/terraform/version"
}
+ service_catalog_regional_availability = [
+ "ca-central-1",
+ "cn-north-1",
+ "cn-northwest-1",
+ "eu-north-1",
+ "eu-west-2",
+ "eu-west-3",
+ "us-east-1",
+ "us-east-2",
+ "us-gov-west-1",
+ "us-west-2",
+ "af-south-1",
+ "ap-east-1",
+ "ap-south-2",
+ "ap-southeast-2",
+ "ap-southeast-3",
+ "ap-southeast-4",
+ "me-central-1",
+ "me-south-1",
+ "sa-east-1",
+ "us-gov-east-1",
+ "ap-northeast-2",
+ "ap-northeast-3",
+ "ap-south-1",
+ "ap-southeast-1",
+ "eu-central-1",
+ "eu-south-1",
+ "eu-south-2",
+ "eu-west-1",
+ "il-central-1",
+ "us-west-1",
+ "ap-northeast-1",
+ "eu-central-2"
+ ]
}
diff --git a/main.tf b/main.tf
index d38e872e..d21054bb 100644
--- a/main.tf
+++ b/main.tf
@@ -18,11 +18,20 @@ module "aft_account_provisioning_framework" {
aft_failure_sns_topic_arn = module.aft_account_request_framework.aft_failure_sns_topic_arn
aft_common_layer_arn = module.aft_lambda_layer.layer_version_arn
aft_kms_key_arn = module.aft_account_request_framework.aft_kms_key_arn
+ aft_enable_vpc = module.aft_account_request_framework.vpc_deployment
aft_vpc_private_subnets = module.aft_account_request_framework.aft_vpc_private_subnets
aft_vpc_default_sg = module.aft_account_request_framework.aft_vpc_default_sg
cloudwatch_log_group_retention = var.cloudwatch_log_group_retention
provisioning_framework_archive_path = module.packaging.provisioning_framework_archive_path
provisioning_framework_archive_hash = module.packaging.provisioning_framework_archive_hash
+ create_role_lambda_function_name = local.create_role_lambda_function_name
+ tag_account_lambda_function_name = local.tag_account_lambda_function_name
+ persist_metadata_lambda_function_name = local.persist_metadata_lambda_function_name
+ account_metadata_ssm_lambda_function_name = local.account_metadata_ssm_lambda_function_name
+ delete_default_vpc_lambda_function_name = local.delete_default_vpc_lambda_function_name
+ enroll_support_lambda_function_name = local.enroll_support_lambda_function_name
+ enable_cloudtrail_lambda_function_name = local.enable_cloudtrail_lambda_function_name
+ lambda_runtime_python_version = local.lambda_runtime_python_version
}
module "aft_account_request_framework" {
@@ -35,26 +44,32 @@ module "aft_account_request_framework" {
aft_account_provisioning_framework_sfn_name = local.aft_account_provisioning_framework_sfn_name
aft_common_layer_arn = module.aft_lambda_layer.layer_version_arn
cloudwatch_log_group_retention = var.cloudwatch_log_group_retention
+ aft_enable_vpc = var.aft_enable_vpc
aft_vpc_cidr = var.aft_vpc_cidr
aft_vpc_private_subnet_01_cidr = var.aft_vpc_private_subnet_01_cidr
aft_vpc_private_subnet_02_cidr = var.aft_vpc_private_subnet_02_cidr
aft_vpc_public_subnet_01_cidr = var.aft_vpc_public_subnet_01_cidr
aft_vpc_public_subnet_02_cidr = var.aft_vpc_public_subnet_02_cidr
aft_vpc_endpoints = var.aft_vpc_endpoints
+ concurrent_account_factory_actions = var.concurrent_account_factory_actions
request_framework_archive_path = module.packaging.request_framework_archive_path
request_framework_archive_hash = module.packaging.request_framework_archive_hash
+ lambda_runtime_python_version = local.lambda_runtime_python_version
+ backup_recovery_point_retention = var.backup_recovery_point_retention
+ aft_customer_vpc_id = var.aft_customer_vpc_id
+ aft_customer_private_subnets = var.aft_customer_private_subnets
}
-
-
module "aft_backend" {
providers = {
aws.primary_region = aws.aft_management
aws.secondary_region = aws.tf_backend_secondary_region
}
- source = "./modules/aft-backend"
- primary_region = var.ct_home_region
- secondary_region = var.tf_backend_secondary_region
+ source = "./modules/aft-backend"
+ primary_region = var.ct_home_region
+ secondary_region = var.tf_backend_secondary_region
+ aft_management_account_id = var.aft_management_account_id
+ aft_backend_bucket_access_logs_object_expiration_days = var.aft_backend_bucket_access_logs_object_expiration_days
}
module "aft_code_repositories" {
@@ -77,6 +92,7 @@ module "aft_code_repositories" {
account_customizations_repo_name = var.account_customizations_repo_name
global_customizations_repo_name = var.global_customizations_repo_name
github_enterprise_url = var.github_enterprise_url
+ gitlab_selfmanaged_url = var.gitlab_selfmanaged_url
vcs_provider = var.vcs_provider
terraform_distribution = var.terraform_distribution
account_provisioning_customizations_repo_name = var.account_provisioning_customizations_repo_name
@@ -84,6 +100,8 @@ module "aft_code_repositories" {
account_customizations_repo_branch = var.account_customizations_repo_branch
global_customizations_repo_branch = var.global_customizations_repo_branch
log_group_retention = var.cloudwatch_log_group_retention
+ global_codebuild_timeout = var.global_codebuild_timeout
+ aft_enable_vpc = module.aft_account_request_framework.vpc_deployment
}
module "aft_customizations" {
@@ -117,6 +135,9 @@ module "aft_customizations" {
maximum_concurrent_customizations = var.maximum_concurrent_customizations
customizations_archive_path = module.packaging.customizations_archive_path
customizations_archive_hash = module.packaging.customizations_archive_hash
+ global_codebuild_timeout = var.global_codebuild_timeout
+ lambda_runtime_python_version = local.lambda_runtime_python_version
+ aft_enable_vpc = module.aft_account_request_framework.vpc_deployment
}
module "aft_feature_options" {
@@ -129,9 +150,10 @@ module "aft_feature_options" {
source = "./modules/aft-feature-options"
log_archive_access_logs_bucket_name = local.log_archive_access_logs_bucket_name
log_archive_bucket_name = local.log_archive_bucket_name
- log_archive_bucket_object_expiration_days = local.log_archive_bucket_object_expiration_days
+ log_archive_bucket_object_expiration_days = var.log_archive_bucket_object_expiration_days
aft_features_sfn_name = local.aft_features_sfn_name
aft_kms_key_arn = module.aft_account_request_framework.aft_kms_key_arn
+ aft_kms_key_id = module.aft_account_request_framework.aft_kms_key_id
aft_common_layer_arn = module.aft_lambda_layer.layer_version_arn
aft_sns_topic_arn = module.aft_account_request_framework.sns_topic_arn
aft_failure_sns_topic_arn = module.aft_account_request_framework.failure_sns_topic_arn
@@ -141,6 +163,11 @@ module "aft_feature_options" {
cloudwatch_log_group_retention = var.cloudwatch_log_group_retention
feature_options_archive_path = module.packaging.feature_options_archive_path
feature_options_archive_hash = module.packaging.feature_options_archive_hash
+ delete_default_vpc_lambda_function_name = local.delete_default_vpc_lambda_function_name
+ enroll_support_lambda_function_name = local.enroll_support_lambda_function_name
+ enable_cloudtrail_lambda_function_name = local.enable_cloudtrail_lambda_function_name
+ lambda_runtime_python_version = local.lambda_runtime_python_version
+ aft_enable_vpc = module.aft_account_request_framework.vpc_deployment
}
module "aft_iam_roles" {
@@ -162,6 +189,7 @@ module "aft_lambda_layer" {
lambda_layer_name = local.lambda_layer_name
lambda_layer_codebuild_delay = local.lambda_layer_codebuild_delay
lambda_layer_python_version = local.lambda_layer_python_version
+ lambda_runtime_python_version = local.lambda_runtime_python_version
aft_tf_aws_customizations_module_git_ref_ssm_path = local.ssm_paths.aft_tf_aws_customizations_module_git_ref_ssm_path
aft_tf_aws_customizations_module_url_ssm_path = local.ssm_paths.aft_tf_aws_customizations_module_url_ssm_path
aws_region = var.ct_home_region
@@ -172,6 +200,8 @@ module "aft_lambda_layer" {
s3_bucket_name = module.aft_customizations.aft_codepipeline_customizations_bucket_name
builder_archive_path = module.packaging.builder_archive_path
builder_archive_hash = module.packaging.builder_archive_hash
+ cloudwatch_log_group_retention = var.cloudwatch_log_group_retention
+ aft_enable_vpc = module.aft_account_request_framework.vpc_deployment
}
module "aft_ssm_parameters" {
@@ -186,6 +216,7 @@ module "aft_ssm_parameters" {
aft_controltower_events_table_name = module.aft_account_request_framework.controltower_events_table_name
account_factory_product_name = module.aft_account_request_framework.account_factory_product_name
aft_invoke_aft_account_provisioning_framework_function_name = module.aft_account_request_framework.invoke_aft_account_provisioning_framework_lambda_function_name
+ aft_cleanup_resources_function_name = module.aft_account_request_framework.aft_cleanup_resources_function_name
aft_account_provisioning_framework_sfn_name = module.aft_account_request_framework.aft_account_provisioning_framework_sfn_name
aft_sns_topic_arn = module.aft_account_request_framework.sns_topic_arn
aft_failure_sns_topic_arn = module.aft_account_request_framework.failure_sns_topic_arn
@@ -194,15 +225,13 @@ module "aft_ssm_parameters" {
request_processor_function_arn = module.aft_account_request_framework.request_processor_function_arn
control_tower_event_logger_function_arn = module.aft_account_request_framework.control_tower_event_logger_function_arn
invoke_aft_account_provisioning_framework_function_arn = module.aft_account_request_framework.invoke_aft_account_provisioning_framework_function_arn
- validate_request_function_arn = module.aft_account_provisioning_framework.validate_request_function_arn
- get_account_info_function_arn = module.aft_account_provisioning_framework.get_account_info_function_arn
create_role_function_arn = module.aft_account_provisioning_framework.create_role_function_arn
tag_account_function_arn = module.aft_account_provisioning_framework.tag_account_function_arn
persist_metadata_function_arn = module.aft_account_provisioning_framework.persist_metadata_function_arn
aft_customizations_identify_targets_function_arn = module.aft_customizations.aft_customizations_identify_targets_function_arn
aft_customizations_execute_pipeline_function_arn = module.aft_customizations.aft_customizations_execute_pipeline_function_arn
aft_customizations_get_pipeline_executions_function_arn = module.aft_customizations.aft_customizations_get_pipeline_executions_function_arn
- codestar_connection_arn = module.aft_code_repositories.codestar_connection_arn
+ codeconnections_connection_arn = module.aft_code_repositories.codeconnections_connection_arn
aft_log_key_arn = module.aft_feature_options.aws_aft_log_key_arn
aft_logging_bucket_arn = module.aft_feature_options.aws_aft_logs_s3_bucket_arn
aft_config_backend_bucket_id = module.aft_backend.bucket_id
@@ -226,10 +255,11 @@ module "aft_ssm_parameters" {
aft_config_backend_primary_region = var.ct_home_region
aft_config_backend_secondary_region = var.tf_backend_secondary_region
aft_framework_repo_url = var.aft_framework_repo_url
- aft_framework_repo_git_ref = var.aft_framework_repo_git_ref
- terraform_token = var.terraform_token
+ aft_framework_repo_git_ref = local.aft_framework_repo_git_ref
+ terraform_token = var.terraform_token # Null default value #tfsec:ignore:general-secrets-no-plaintext-exposure
terraform_version = var.terraform_version
terraform_org_name = var.terraform_org_name
+ terraform_project_name = var.terraform_project_name
aft_feature_cloudtrail_data_events = var.aft_feature_cloudtrail_data_events
aft_feature_enterprise_support = var.aft_feature_enterprise_support
aft_feature_delete_default_vpcs_enabled = var.aft_feature_delete_default_vpcs_enabled
@@ -241,4 +271,6 @@ module "aft_ssm_parameters" {
account_provisioning_customizations_repo_branch = var.account_provisioning_customizations_repo_branch
maximum_concurrent_customizations = var.maximum_concurrent_customizations
github_enterprise_url = var.github_enterprise_url
+ gitlab_selfmanaged_url = var.gitlab_selfmanaged_url
+ aft_metrics_reporting = var.aft_metrics_reporting
}
diff --git a/modules/aft-account-provisioning-framework/cloudwatch.tf b/modules/aft-account-provisioning-framework/cloudwatch.tf
new file mode 100644
index 00000000..267f584a
--- /dev/null
+++ b/modules/aft-account-provisioning-framework/cloudwatch.tf
@@ -0,0 +1,42 @@
+# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+# SPDX-License-Identifier: Apache-2.0
+#
+resource "aws_cloudwatch_query_definition" "customization_request_query" {
+ name = "Account Factory for Terraform/Customization Logs by Customization Request ID"
+
+ log_group_names = [
+ "/aws/lambda/${var.create_role_lambda_function_name}",
+ "/aws/lambda/${var.persist_metadata_lambda_function_name}",
+ "/aws/lambda/${var.tag_account_lambda_function_name}",
+ "/aws/lambda/${var.account_metadata_ssm_lambda_function_name}",
+ "/aws/lambda/${var.delete_default_vpc_lambda_function_name}",
+ "/aws/lambda/${var.enroll_support_lambda_function_name}",
+ "/aws/lambda/${var.enable_cloudtrail_lambda_function_name}",
+ ]
+
+ query_string = < /dev/null || echo "None")
if [[ $ssh_key_parameter != "None" ]]; then
ssh_key=$(jq --raw-output ".Parameter.Value" <<< $ssh_key_parameter)
mkdir -p ~/.ssh
@@ -49,21 +49,22 @@ phases:
echo "Installing Terraform"
curl -o terraform_${TF_VERSION}_linux_amd64.zip https://releases.hashicorp.com/terraform/${TF_VERSION}/terraform_${TF_VERSION}_linux_amd64.zip
unzip -o terraform_${TF_VERSION}_linux_amd64.zip && mv terraform /usr/bin
- terraform --version
+ terraform -no-color --version
cd $DEFAULT_PATH/terraform
- for f in *.jinja; do jinja2 $f -D timestamp="$TIMESTAMP" -D tf_distribution_type=$TF_DISTRIBUTION -D region=$TF_BACKEND_REGION -D provider_region=$CT_MGMT_REGION -D bucket=$TF_S3_BUCKET -D key=$TF_S3_KEY -D dynamodb_table=$TF_DDB_TABLE -D kms_key_id=$TF_KMS_KEY_ID -D aft_admin_role_arn=$AFT_EXEC_ROLE_ARN >> ./$(basename $f .jinja).tf; done
+ for f in *.jinja; do jinja2 $f -D timestamp="$TIMESTAMP" -D tf_distribution_type=$TF_DISTRIBUTION -D region=$TF_BACKEND_REGION -D provider_region=$CT_MGMT_REGION -D bucket=$TF_S3_BUCKET -D key=$TF_S3_KEY -D dynamodb_table=$TF_DDB_TABLE -D kms_key_id=$TF_KMS_KEY_ID -D aft_admin_role_arn=$AFT_EXEC_ROLE_ARN -D tf_version=$TF_VERSION >> ./$(basename $f .jinja).tf; done
for f in *.tf; do echo "\n \n"; echo $f; cat $f; done
JSON=$(aws sts assume-role --role-arn ${AFT_ADMIN_ROLE_ARN} --role-session-name ${ROLE_SESSION_NAME})
#Make newly assumed role default session
export AWS_ACCESS_KEY_ID=$(echo ${JSON} | jq --raw-output ".Credentials[\"AccessKeyId\"]")
export AWS_SECRET_ACCESS_KEY=$(echo ${JSON} | jq --raw-output ".Credentials[\"SecretAccessKey\"]")
export AWS_SESSION_TOKEN=$(echo ${JSON} | jq --raw-output ".Credentials[\"SessionToken\"]")
- terraform init
+ terraform init -no-color
else
TF_BACKEND_REGION=$(aws ssm get-parameter --name "/aft/config/oss-backend/primary-region" --query "Parameter.Value" --output text)
TF_ORG_NAME=$(aws ssm get-parameter --name "/aft/config/terraform/org-name" --query "Parameter.Value" --output text)
TF_TOKEN=$(aws ssm get-parameter --name "/aft/config/terraform/token" --with-decryption --query "Parameter.Value" --output text)
TF_ENDPOINT=$(aws ssm get-parameter --name "/aft/config/terraform/api-endpoint" --query "Parameter.Value" --output text)
+ TF_PROJECT_NAME=$(aws ssm get-parameter --name "/aft/config/terraform/project-name" --query "Parameter.Value" --output text)
TF_WORKSPACE_NAME="ct-aft-account-provisioning-customizations"
TF_CONFIG_PATH="./temp_configuration_file.tar.gz"
cd $DEFAULT_PATH/terraform
@@ -71,14 +72,14 @@ phases:
for f in *.tf; do echo "\n \n"; echo $f; cat $f; done
cd $DEFAULT_PATH
tar -czf temp_configuration_file.tar.gz -C terraform --exclude .git --exclude venv .
- python3 $DEFAULT_PATH/aws-aft-core-framework/sources/scripts/workspace_manager.py --operation "deploy" --organization_name $TF_ORG_NAME --workspace_name $TF_WORKSPACE_NAME --assume_role_arn $AFT_ADMIN_ROLE_ARN --assume_role_session_name $ROLE_SESSION_NAME --api_endpoint $TF_ENDPOINT --api_token $TF_TOKEN --terraform_version $TF_VERSION --config_file $TF_CONFIG_PATH
+ python3 $DEFAULT_PATH/aws-aft-core-framework/sources/scripts/workspace_manager.py --operation "deploy" --organization_name $TF_ORG_NAME --workspace_name $TF_WORKSPACE_NAME --assume_role_arn $AFT_ADMIN_ROLE_ARN --assume_role_session_name $ROLE_SESSION_NAME --api_endpoint $TF_ENDPOINT --api_token $TF_TOKEN --terraform_version $TF_VERSION --config_file $TF_CONFIG_PATH --project_name "$TF_PROJECT_NAME"
fi
build:
commands:
- |
if [ $TF_DISTRIBUTION = "oss" ]; then
- terraform apply --auto-approve
+ terraform apply -no-color --auto-approve
fi
post_build:
commands:
diff --git a/modules/aft-code-repositories/buildspecs/ct-aft-account-request.yml b/modules/aft-code-repositories/buildspecs/ct-aft-account-request.yml
index 314b84cf..e9ed2321 100644
--- a/modules/aft-code-repositories/buildspecs/ct-aft-account-request.yml
+++ b/modules/aft-code-repositories/buildspecs/ct-aft-account-request.yml
@@ -14,12 +14,12 @@ phases:
- TF_DISTRIBUTION=$(aws ssm get-parameter --name "/aft/config/terraform/distribution" --query "Parameter.Value" --output text)
- CT_MGMT_REGION=$(aws ssm get-parameter --name "/aft/config/ct-management-region" --query "Parameter.Value" --output text)
- AFT_MGMT_ACCOUNT=$(aws sts get-caller-identity --query Account --output text)
- - AFT_EXEC_ROLE_ARN=arn:aws:iam::$AFT_MGMT_ACCOUNT:role/AWSAFTExecution
+ - AFT_EXEC_ROLE_ARN=arn:$AWS_PARTITION:iam::$AFT_MGMT_ACCOUNT:role/AWSAFTExecution
- AFT_ADMIN_ROLE_NAME=$(aws ssm get-parameter --name /aft/resources/iam/aft-administrator-role-name | jq --raw-output ".Parameter.Value")
- - AFT_ADMIN_ROLE_ARN=arn:aws:iam::$AFT_MGMT_ACCOUNT:role/$AFT_ADMIN_ROLE_NAME
+ - AFT_ADMIN_ROLE_ARN=arn:$AWS_PARTITION:iam::$AFT_MGMT_ACCOUNT:role/$AFT_ADMIN_ROLE_NAME
- ROLE_SESSION_NAME=$(aws ssm get-parameter --name /aft/resources/iam/aft-session-name | jq --raw-output ".Parameter.Value")
- |
- ssh_key_parameter=$(aws ssm get-parameter --name /aft/config/aft-ssh-key --with-decryption || echo "None")
+ ssh_key_parameter=$(aws ssm get-parameter --name /aft/config/aft-ssh-key --with-decryption 2> /dev/null || echo "None")
if [[ $ssh_key_parameter != "None" ]]; then
ssh_key=$(jq --raw-output ".Parameter.Value" <<< $ssh_key_parameter)
mkdir -p ~/.ssh
@@ -51,18 +51,19 @@ phases:
unzip -o terraform_${TF_VERSION}_linux_amd64.zip && mv terraform /usr/bin
terraform --version
cd $DEFAULT_PATH/terraform
- for f in *.jinja; do jinja2 $f -D timestamp="$TIMESTAMP" -D tf_distribution_type=$TF_DISTRIBUTION -D provider_region=$CT_MGMT_REGION -D region=$TF_BACKEND_REGION -D bucket=$TF_S3_BUCKET -D key=$TF_S3_KEY -D dynamodb_table=$TF_DDB_TABLE -D kms_key_id=$TF_KMS_KEY_ID -D aft_admin_role_arn=$AFT_EXEC_ROLE_ARN >> ./$(basename $f .jinja).tf; done
+ for f in *.jinja; do jinja2 $f -D timestamp="$TIMESTAMP" -D tf_distribution_type=$TF_DISTRIBUTION -D provider_region=$CT_MGMT_REGION -D region=$TF_BACKEND_REGION -D bucket=$TF_S3_BUCKET -D key=$TF_S3_KEY -D dynamodb_table=$TF_DDB_TABLE -D kms_key_id=$TF_KMS_KEY_ID -D aft_admin_role_arn=$AFT_EXEC_ROLE_ARN -D tf_version=$TF_VERSION >> ./$(basename $f .jinja).tf; done
for f in *.tf; do echo "\n \n"; echo $f; cat $f; done
JSON=$(aws sts assume-role --role-arn ${AFT_ADMIN_ROLE_ARN} --role-session-name ${ROLE_SESSION_NAME})
#Make newly assumed role default session
export AWS_ACCESS_KEY_ID=$(echo ${JSON} | jq --raw-output ".Credentials[\"AccessKeyId\"]")
export AWS_SECRET_ACCESS_KEY=$(echo ${JSON} | jq --raw-output ".Credentials[\"SecretAccessKey\"]")
export AWS_SESSION_TOKEN=$(echo ${JSON} | jq --raw-output ".Credentials[\"SessionToken\"]")
- terraform init
+ terraform init -no-color
else
TF_ORG_NAME=$(aws ssm get-parameter --name "/aft/config/terraform/org-name" --query "Parameter.Value" --output text)
TF_TOKEN=$(aws ssm get-parameter --name "/aft/config/terraform/token" --with-decryption --query "Parameter.Value" --output text)
TF_ENDPOINT=$(aws ssm get-parameter --name "/aft/config/terraform/api-endpoint" --query "Parameter.Value" --output text)
+ TF_PROJECT_NAME=$(aws ssm get-parameter --name "/aft/config/terraform/project-name" --query "Parameter.Value" --output text)
TF_WORKSPACE_NAME="ct-aft-account-request"
TF_CONFIG_PATH="./temp_configuration_file.tar.gz"
cd $DEFAULT_PATH/terraform
@@ -70,14 +71,14 @@ phases:
for f in *.tf; do echo "\n \n"; echo $f; cat $f; done
cd $DEFAULT_PATH
tar -czf temp_configuration_file.tar.gz -C terraform --exclude .git --exclude venv .
- python3 $DEFAULT_PATH/aws-aft-core-framework/sources/scripts/workspace_manager.py --operation "deploy" --organization_name $TF_ORG_NAME --workspace_name $TF_WORKSPACE_NAME --assume_role_arn $AFT_ADMIN_ROLE_ARN --assume_role_session_name $ROLE_SESSION_NAME --api_endpoint $TF_ENDPOINT --api_token $TF_TOKEN --terraform_version $TF_VERSION --config_file $TF_CONFIG_PATH
+ python3 $DEFAULT_PATH/aws-aft-core-framework/sources/scripts/workspace_manager.py --operation "deploy" --organization_name $TF_ORG_NAME --workspace_name $TF_WORKSPACE_NAME --assume_role_arn $AFT_ADMIN_ROLE_ARN --assume_role_session_name $ROLE_SESSION_NAME --api_endpoint $TF_ENDPOINT --api_token $TF_TOKEN --terraform_version $TF_VERSION --config_file $TF_CONFIG_PATH --project_name "$TF_PROJECT_NAME"
fi
build:
commands:
- |
if [ $TF_DISTRIBUTION = "oss" ]; then
- terraform apply --auto-approve
+ terraform apply -no-color --auto-approve
fi
post_build:
commands:
diff --git a/modules/aft-code-repositories/codebuild.tf b/modules/aft-code-repositories/codebuild.tf
index b23a1cff..860deec1 100644
--- a/modules/aft-code-repositories/codebuild.tf
+++ b/modules/aft-code-repositories/codebuild.tf
@@ -1,6 +1,7 @@
# Copyright Amazon.com, Inc. or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
#
+
data "local_file" "account_request_buildspec" {
filename = "${path.module}/buildspecs/ct-aft-account-request.yml"
}
@@ -9,10 +10,10 @@ data "local_file" "account_provisioning_customizations_buildspec" {
}
resource "aws_codebuild_project" "account_request" {
- depends_on = [aws_cloudwatch_log_group.account_request]
+ depends_on = [aws_cloudwatch_log_group.account_request, time_sleep.iam_eventual_consistency]
name = "ct-aft-account-request"
description = "Job to apply Terraform for Account Requests"
- build_timeout = "60"
+ build_timeout = tostring(var.global_codebuild_timeout)
service_role = aws_iam_role.account_request_codebuild_role.arn
encryption_key = var.aft_key_arn
@@ -22,9 +23,14 @@ resource "aws_codebuild_project" "account_request" {
environment {
compute_type = "BUILD_GENERAL1_MEDIUM"
- image = "aws/codebuild/amazonlinux2-x86_64-standard:3.0"
+ image = "aws/codebuild/amazonlinux2-x86_64-standard:5.0"
type = "LINUX_CONTAINER"
image_pull_credentials_type = "CODEBUILD"
+ environment_variable {
+ name = "AWS_PARTITION"
+ value = data.aws_partition.current.partition
+ type = "PLAINTEXT"
+ }
}
logs_config {
@@ -43,19 +49,26 @@ resource "aws_codebuild_project" "account_request" {
buildspec = data.local_file.account_request_buildspec.content
}
- vpc_config {
- vpc_id = var.vpc_id
- subnets = var.subnet_ids
- security_group_ids = var.security_group_ids
+ dynamic "vpc_config" {
+ for_each = var.aft_enable_vpc ? [1] : []
+ content {
+ vpc_id = var.vpc_id
+ subnets = var.subnet_ids
+ security_group_ids = var.security_group_ids
+ }
+ }
+
+ lifecycle {
+ ignore_changes = [project_visibility]
}
}
resource "aws_codebuild_project" "account_provisioning_customizations_pipeline" {
- depends_on = [aws_cloudwatch_log_group.account_request]
+ depends_on = [aws_cloudwatch_log_group.account_request, time_sleep.iam_eventual_consistency]
name = "ct-aft-account-provisioning-customizations"
description = "Deploys the Account Provisioning Customizations terraform project"
- build_timeout = "60"
+ build_timeout = tostring(var.global_codebuild_timeout)
service_role = aws_iam_role.account_provisioning_customizations_codebuild_role.arn
encryption_key = var.aft_key_arn
@@ -65,9 +78,15 @@ resource "aws_codebuild_project" "account_provisioning_customizations_pipeline"
environment {
compute_type = "BUILD_GENERAL1_MEDIUM"
- image = "aws/codebuild/amazonlinux2-x86_64-standard:3.0"
+ image = "aws/codebuild/amazonlinux2-x86_64-standard:5.0"
type = "LINUX_CONTAINER"
image_pull_credentials_type = "CODEBUILD"
+
+ environment_variable {
+ name = "AWS_PARTITION"
+ value = data.aws_partition.current.partition
+ type = "PLAINTEXT"
+ }
}
logs_config {
@@ -86,18 +105,28 @@ resource "aws_codebuild_project" "account_provisioning_customizations_pipeline"
buildspec = data.local_file.account_provisioning_customizations_buildspec.content
}
- vpc_config {
- vpc_id = var.vpc_id
- subnets = var.subnet_ids
- security_group_ids = var.security_group_ids
+ dynamic "vpc_config" {
+ for_each = var.aft_enable_vpc ? [1] : []
+ content {
+ vpc_id = var.vpc_id
+ subnets = var.subnet_ids
+ security_group_ids = var.security_group_ids
+ }
+ }
+
+ lifecycle {
+ ignore_changes = [project_visibility]
}
}
+#tfsec:ignore:aws-cloudwatch-log-group-customer-key
resource "aws_cloudwatch_log_group" "account_request" {
name = "/aws/codebuild/ct-aft-account-request"
retention_in_days = var.log_group_retention
}
+
+#tfsec:ignore:aws-cloudwatch-log-group-customer-key
resource "aws_cloudwatch_log_group" "account_provisioning_customizations" {
name = "/aws/codebuild/ct-aft-account-provisioning-customizations"
retention_in_days = var.log_group_retention
diff --git a/modules/aft-code-repositories/codepipeline.tf b/modules/aft-code-repositories/codepipeline.tf
index 3638bdb0..3a35b11f 100644
--- a/modules/aft-code-repositories/codepipeline.tf
+++ b/modules/aft-code-repositories/codepipeline.tf
@@ -6,9 +6,10 @@
##############################################################
resource "aws_codepipeline" "codecommit_account_request" {
- count = local.vcs.is_codecommit ? 1 : 0
- name = "ct-aft-account-request"
- role_arn = aws_iam_role.account_request_codepipeline_role.arn
+ count = local.vcs.is_codecommit ? 1 : 0
+ name = "ct-aft-account-request"
+ role_arn = aws_iam_role.account_request_codepipeline_role.arn
+ pipeline_type = "V2"
artifact_store {
location = var.codepipeline_s3_bucket_name
@@ -107,13 +108,19 @@ resource "aws_cloudwatch_event_target" "account_request" {
}
##############################################################
-# CodeStar - account-request
+# CodeConnections - account-request
##############################################################
-resource "aws_codepipeline" "codestar_account_request" {
- count = local.vcs.is_codecommit ? 0 : 1
- name = "ct-aft-account-request"
- role_arn = aws_iam_role.account_request_codepipeline_role.arn
+moved {
+ from = aws_codepipeline.codestar_account_request
+ to = aws_codepipeline.codeconnections_account_request
+}
+
+resource "aws_codepipeline" "codeconnections_account_request" {
+ count = local.vcs.is_codecommit ? 0 : 1
+ name = "ct-aft-account-request"
+ role_arn = aws_iam_role.account_request_codepipeline_role.arn
+ pipeline_type = "V2"
artifact_store {
location = var.codepipeline_s3_bucket_name
@@ -140,7 +147,7 @@ resource "aws_codepipeline" "codestar_account_request" {
output_artifacts = ["account-request"]
configuration = {
- ConnectionArn = lookup({ github = local.connection_arn.github, bitbucket = local.connection_arn.bitbucket, githubenterprise = local.connection_arn.githubenterprise }, var.vcs_provider)
+ ConnectionArn = lookup({ github = local.connection_arn.github, bitbucket = local.connection_arn.bitbucket, githubenterprise = local.connection_arn.githubenterprise, gitlab = local.connection_arn.gitlab, gitlabselfmanaged = local.connection_arn.gitlabselfmanaged }, var.vcs_provider)
FullRepositoryId = var.account_request_repo_name
BranchName = var.account_request_repo_branch
DetectChanges = true
@@ -176,9 +183,10 @@ resource "aws_codepipeline" "codestar_account_request" {
##############################################################
resource "aws_codepipeline" "codecommit_account_provisioning_customizations" {
- count = local.vcs.is_codecommit ? 1 : 0
- name = "ct-aft-account-provisioning-customizations"
- role_arn = aws_iam_role.account_provisioning_customizations_codepipeline_role.arn
+ count = local.vcs.is_codecommit ? 1 : 0
+ name = "ct-aft-account-provisioning-customizations"
+ role_arn = aws_iam_role.account_provisioning_customizations_codepipeline_role.arn
+ pipeline_type = "V2"
artifact_store {
location = var.codepipeline_s3_bucket_name
@@ -235,16 +243,19 @@ resource "aws_codepipeline" "codecommit_account_provisioning_customizations" {
}
}
-
-
##############################################################
-# CodeStar - account-provisioning-customizations
+# CodeConnections - account-provisioning-customizations
##############################################################
-resource "aws_codepipeline" "codestar_account_provisioning_customizations" {
- count = local.vcs.is_codecommit ? 0 : 1
- name = "ct-aft-account-provisioning-customizations"
- role_arn = aws_iam_role.account_provisioning_customizations_codepipeline_role.arn
+moved {
+ from = aws_codepipeline.codestar_account_provisioning_customizations
+ to = aws_codepipeline.codeconnections_account_provisioning_customizations
+}
+resource "aws_codepipeline" "codeconnections_account_provisioning_customizations" {
+ count = local.vcs.is_codecommit ? 0 : 1
+ name = "ct-aft-account-provisioning-customizations"
+ role_arn = aws_iam_role.account_provisioning_customizations_codepipeline_role.arn
+ pipeline_type = "V2"
artifact_store {
location = var.codepipeline_s3_bucket_name
@@ -271,7 +282,7 @@ resource "aws_codepipeline" "codestar_account_provisioning_customizations" {
output_artifacts = ["account-provisioning-customizations"]
configuration = {
- ConnectionArn = lookup({ github = local.connection_arn.github, bitbucket = local.connection_arn.bitbucket, githubenterprise = local.connection_arn.githubenterprise }, var.vcs_provider)
+ ConnectionArn = lookup({ github = local.connection_arn.github, bitbucket = local.connection_arn.bitbucket, githubenterprise = local.connection_arn.githubenterprise, gitlab = local.connection_arn.gitlab, gitlabselfmanaged = local.connection_arn.gitlabselfmanaged }, var.vcs_provider)
FullRepositoryId = var.account_provisioning_customizations_repo_name
BranchName = var.account_provisioning_customizations_repo_branch
DetectChanges = true
diff --git a/modules/aft-code-repositories/codestar.tf b/modules/aft-code-repositories/codestar.tf
index 6a1cad91..ce8124dc 100644
--- a/modules/aft-code-repositories/codestar.tf
+++ b/modules/aft-code-repositories/codestar.tf
@@ -1,33 +1,66 @@
# Copyright Amazon.com, Inc. or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
#
-resource "aws_codestarconnections_connection" "bitbucket" {
+
+resource "aws_codeconnections_connection" "bitbucket" {
count = local.vcs.is_bitbucket ? 1 : 0
name = "ct-aft-bitbucket-connection"
provider_type = "Bitbucket"
}
-resource "aws_codestarconnections_connection" "github" {
+resource "aws_codeconnections_connection" "github" {
count = local.vcs.is_github ? 1 : 0
name = "ct-aft-github-connection"
provider_type = "GitHub"
}
-resource "aws_codestarconnections_connection" "githubenterprise" {
+resource "aws_codeconnections_connection" "githubenterprise" {
count = local.vcs.is_github_enterprise ? 1 : 0
name = "ct-aft-github-ent-connection"
- host_arn = aws_codestarconnections_host.githubenterprise[0].arn
+ host_arn = aws_codeconnections_host.githubenterprise[0].arn
}
-resource "aws_codestarconnections_host" "githubenterprise" {
+
+resource "aws_codeconnections_host" "githubenterprise" {
count = local.vcs.is_github_enterprise ? 1 : 0
name = "github-enterprise-host"
provider_endpoint = var.github_enterprise_url
provider_type = "GitHubEnterpriseServer"
- vpc_configuration {
- security_group_ids = var.security_group_ids
- subnet_ids = var.subnet_ids
- vpc_id = var.vpc_id
+ dynamic "vpc_configuration" {
+ for_each = var.aft_enable_vpc ? [1] : []
+ content {
+ security_group_ids = var.security_group_ids
+ subnet_ids = var.subnet_ids
+ vpc_id = var.vpc_id
+ }
+ }
+}
+
+resource "aws_codeconnections_connection" "gitlab" {
+ count = local.vcs.is_gitlab ? 1 : 0
+ name = "ct-aft-gitlab-connection"
+ provider_type = "GitLab"
+}
+
+resource "aws_codeconnections_connection" "gitlabselfmanaged" {
+ count = local.vcs.is_gitlab_selfmanaged ? 1 : 0
+ name = "ct-aft-gitlab-selfmgd-connection"
+ host_arn = aws_codeconnections_host.gitlabselfmanaged[0].arn
+}
+
+resource "aws_codeconnections_host" "gitlabselfmanaged" {
+ count = local.vcs.is_gitlab_selfmanaged ? 1 : 0
+ name = "gitlab-selfmanaged-host"
+ provider_endpoint = var.gitlab_selfmanaged_url
+ provider_type = "GitLabSelfManaged"
+
+ dynamic "vpc_configuration" {
+ for_each = var.aft_enable_vpc ? [1] : []
+ content {
+ security_group_ids = var.security_group_ids
+ subnet_ids = var.subnet_ids
+ vpc_id = var.vpc_id
+ }
}
}
diff --git a/modules/aft-code-repositories/data.tf b/modules/aft-code-repositories/data.tf
index c3f273d7..638dfa04 100644
--- a/modules/aft-code-repositories/data.tf
+++ b/modules/aft-code-repositories/data.tf
@@ -1,6 +1,8 @@
# Copyright Amazon.com, Inc. or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
#
+data "aws_partition" "current" {}
+
data "aws_region" "current" {}
data "aws_caller_identity" "current" {}
diff --git a/modules/aft-code-repositories/iam.tf b/modules/aft-code-repositories/iam.tf
index cacb21a2..6807ff0d 100644
--- a/modules/aft-code-repositories/iam.tf
+++ b/modules/aft-code-repositories/iam.tf
@@ -14,6 +14,7 @@ resource "aws_iam_role_policy" "account_request_codepipeline_policy" {
policy = templatefile("${path.module}/iam/role-policies/ct_aft_account_request_codepipeline_policy.tpl", {
aws_s3_bucket_aft_codepipeline_customizations_bucket_arn = var.codepipeline_s3_bucket_arn
+ data_aws_partition_current_partition = data.aws_partition.current.partition
data_aws_region_current_name = data.aws_region.current.name
data_aws_caller_identity_current_account_id = data.aws_caller_identity.current.account_id
data_aws_kms_alias_aft_key_target_key_arn = var.aft_key_arn
@@ -31,6 +32,7 @@ resource "aws_iam_role_policy" "account_provisioning_customizations_codepipeline
policy = templatefile("${path.module}/iam/role-policies/ct_aft_account_provisioning_customizations_codepipeline_policy.tpl", {
aws_s3_bucket_aft_codepipeline_customizations_bucket_arn = var.codepipeline_s3_bucket_arn
+ data_aws_partition_current_partition = data.aws_partition.current.partition
data_aws_region_current_name = data.aws_region.current.name
data_aws_caller_identity_current_account_id = data.aws_caller_identity.current.account_id
data_aws_kms_alias_aft_key_target_key_arn = var.aft_key_arn
@@ -50,6 +52,7 @@ resource "aws_iam_role_policy" "account_provisioning_customizations_codebuild_po
policy = templatefile("${path.module}/iam/role-policies/ct_aft_codebuild_policy.tpl", {
aws_s3_bucket_aft_codepipeline_customizations_bucket_arn = var.codepipeline_s3_bucket_arn
+ data_aws_partition_current_partition = data.aws_partition.current.partition
data_aws_region_current_name = data.aws_region.current.name
data_aws_caller_identity_current_account_id = data.aws_caller_identity.current.account_id
data_aws_kms_alias_aft_key_target_key_arn = var.aft_key_arn
@@ -63,6 +66,7 @@ resource "aws_iam_role_policy" "terraform_oss_backend_account_provisioning_custo
role = aws_iam_role.account_provisioning_customizations_codebuild_role.id
policy = templatefile("${path.module}/iam/role-policies/ct_aft_codebuild_oss_backend_policy.tpl", {
+ data_aws_partition_current_partition = data.aws_partition.current.partition
data_aws_region_current_name = data.aws_region.current.name
data_aws_caller_identity_current_account_id = data.aws_caller_identity.current.account_id
data_aws_dynamo_terraform_oss_backend_table = var.aft_config_backend_table_id
@@ -82,6 +86,7 @@ resource "aws_iam_role_policy" "account_request_codebuild_policy" {
policy = templatefile("${path.module}/iam/role-policies/ct_aft_codebuild_policy.tpl", {
aws_s3_bucket_aft_codepipeline_customizations_bucket_arn = var.codepipeline_s3_bucket_arn
+ data_aws_partition_current_partition = data.aws_partition.current.partition
data_aws_region_current_name = data.aws_region.current.name
data_aws_caller_identity_current_account_id = data.aws_caller_identity.current.account_id
data_aws_kms_alias_aft_key_target_key_arn = var.aft_key_arn
@@ -95,6 +100,7 @@ resource "aws_iam_role_policy" "terraform_oss_backend_account_request_codebuild_
role = aws_iam_role.account_request_codebuild_role.id
policy = templatefile("${path.module}/iam/role-policies/ct_aft_codebuild_oss_backend_policy.tpl", {
+ data_aws_partition_current_partition = data.aws_partition.current.partition
data_aws_region_current_name = data.aws_region.current.name
data_aws_caller_identity_current_account_id = data.aws_caller_identity.current.account_id
data_aws_dynamo_terraform_oss_backend_table = var.aft_config_backend_table_id
@@ -103,6 +109,11 @@ resource "aws_iam_role_policy" "terraform_oss_backend_account_request_codebuild_
})
}
+resource "time_sleep" "iam_eventual_consistency" {
+ depends_on = [aws_iam_role.account_request_codebuild_role]
+ create_duration = "60s"
+}
+
# CloudWatch Events Role
resource "aws_iam_role" "cloudwatch_events_codepipeline_role" {
@@ -117,6 +128,7 @@ resource "aws_iam_role_policy" "cloudwatch_events_codepipeline_role" {
role = aws_iam_role.cloudwatch_events_codepipeline_role[0].id
policy = templatefile("${path.module}/iam/role-policies/ct_aft_cwe_policy.tpl", {
+ data_aws_partition_current_partition = data.aws_partition.current.partition
region = data.aws_region.current.name
account_id = data.aws_caller_identity.current.account_id
account_request_pipeline_name = aws_codepipeline.codecommit_account_request[0].name
diff --git a/modules/aft-code-repositories/iam/role-policies/ct_aft_account_provisioning_customizations_codepipeline_policy.tpl b/modules/aft-code-repositories/iam/role-policies/ct_aft_account_provisioning_customizations_codepipeline_policy.tpl
index 6c75959a..199f937d 100644
--- a/modules/aft-code-repositories/iam/role-policies/ct_aft_account_provisioning_customizations_codepipeline_policy.tpl
+++ b/modules/aft-code-repositories/iam/role-policies/ct_aft_account_provisioning_customizations_codepipeline_policy.tpl
@@ -22,7 +22,7 @@
"codebuild:BatchGetBuilds",
"codebuild:StartBuild"
],
- "Resource": "arn:aws:codebuild:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:*account-provisioning*"
+ "Resource": "arn:${data_aws_partition_current_partition}:codebuild:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:*account-provisioning*"
},
{
"Effect": "Allow",
@@ -35,7 +35,7 @@
"codecommit:GetUploadArchiveStatus",
"codecommit:CancelUploadArchive"
],
- "Resource": "arn:aws:codecommit:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:*account-provisioning*"
+ "Resource": "arn:${data_aws_partition_current_partition}:codecommit:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:*account-provisioning*"
},
{
"Effect": "Allow",
@@ -48,7 +48,10 @@
},
{
"Effect": "Allow",
- "Action": "codestar-connections:UseConnection",
+ "Action": [
+ "codestar-connections:UseConnection",
+ "codeconnections:UseConnection"
+ ],
"Resource": "*"
}
]
diff --git a/modules/aft-code-repositories/iam/role-policies/ct_aft_account_request_codepipeline_policy.tpl b/modules/aft-code-repositories/iam/role-policies/ct_aft_account_request_codepipeline_policy.tpl
index ad56371b..3c749ca3 100644
--- a/modules/aft-code-repositories/iam/role-policies/ct_aft_account_request_codepipeline_policy.tpl
+++ b/modules/aft-code-repositories/iam/role-policies/ct_aft_account_request_codepipeline_policy.tpl
@@ -22,7 +22,7 @@
"codebuild:BatchGetBuilds",
"codebuild:StartBuild"
],
- "Resource": "arn:aws:codebuild:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:*account-request*"
+ "Resource": "arn:${data_aws_partition_current_partition}:codebuild:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:*account-request*"
},
{
"Effect": "Allow",
@@ -35,7 +35,7 @@
"codecommit:GetUploadArchiveStatus",
"codecommit:CancelUploadArchive"
],
- "Resource": "arn:aws:codecommit:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:*account-request*"
+ "Resource": "arn:${data_aws_partition_current_partition}:codecommit:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:*account-request*"
},
{
"Effect": "Allow",
@@ -48,7 +48,10 @@
},
{
"Effect": "Allow",
- "Action": "codestar-connections:UseConnection",
+ "Action": [
+ "codestar-connections:UseConnection",
+ "codeconnections:UseConnection"
+ ],
"Resource": "*"
}
]
diff --git a/modules/aft-code-repositories/iam/role-policies/ct_aft_codebuild_oss_backend_policy.tpl b/modules/aft-code-repositories/iam/role-policies/ct_aft_codebuild_oss_backend_policy.tpl
index d4cd8042..fb022a03 100644
--- a/modules/aft-code-repositories/iam/role-policies/ct_aft_codebuild_oss_backend_policy.tpl
+++ b/modules/aft-code-repositories/iam/role-policies/ct_aft_codebuild_oss_backend_policy.tpl
@@ -7,7 +7,7 @@
"dynamodb:*Item"
],
"Resource": [
- "arn:aws:dynamodb:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:table/${data_aws_dynamo_terraform_oss_backend_table}"
+ "arn:${data_aws_partition_current_partition}:dynamodb:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:table/${data_aws_dynamo_terraform_oss_backend_table}"
]
},
{
@@ -21,8 +21,8 @@
"s3:PutObject"
],
"Resource": [
- "arn:aws:s3:::${aws_s3_bucket_aft_terraform_oss_backend_bucket_id}",
- "arn:aws:s3:::${aws_s3_bucket_aft_terraform_oss_backend_bucket_id}/*"
+ "arn:${data_aws_partition_current_partition}:s3:::${aws_s3_bucket_aft_terraform_oss_backend_bucket_id}",
+ "arn:${data_aws_partition_current_partition}:s3:::${aws_s3_bucket_aft_terraform_oss_backend_bucket_id}/*"
]
},
{
@@ -32,7 +32,7 @@
"kms:Encrypt",
"kms:GenerateDataKey"
],
- "Resource": "arn:aws:kms:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:key/${aws_s3_bucket_aft_terraform_oss_kms_key_id}"
+ "Resource": "arn:${data_aws_partition_current_partition}:kms:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:key/${aws_s3_bucket_aft_terraform_oss_kms_key_id}"
}
]
}
diff --git a/modules/aft-code-repositories/iam/role-policies/ct_aft_codebuild_policy.tpl b/modules/aft-code-repositories/iam/role-policies/ct_aft_codebuild_policy.tpl
index 1668da7e..33ff8419 100644
--- a/modules/aft-code-repositories/iam/role-policies/ct_aft_codebuild_policy.tpl
+++ b/modules/aft-code-repositories/iam/role-policies/ct_aft_codebuild_policy.tpl
@@ -3,7 +3,7 @@
"Statement": [
{
"Effect": "Allow",
- "Resource": "arn:aws:logs:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:log-group:/aws/codebuild/ct-aft*",
+ "Resource": "arn:${data_aws_partition_current_partition}:logs:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:log-group:/aws/codebuild/ct-aft*",
"Action": [
"logs:CreateLogStream",
"logs:PutLogEvents"
@@ -28,7 +28,7 @@
"ec2:CreateNetworkInterfacePermission"
],
"Resource": [
- "arn:aws:ec2:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:network-interface/*"
+ "arn:${data_aws_partition_current_partition}:ec2:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:network-interface/*"
]
},
{
@@ -37,7 +37,7 @@
"dynamodb:*Item"
],
"Resource": [
- "arn:aws:dynamodb:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:table/${data_aws_dynamo_account_request_table}"
+ "arn:${data_aws_partition_current_partition}:dynamodb:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:table/${data_aws_dynamo_account_request_table}"
]
},
{
@@ -71,7 +71,7 @@
"ssm:GetParameter"
],
"Resource": [
- "arn:aws:ssm:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:parameter/aft/*"
+ "arn:${data_aws_partition_current_partition}:ssm:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:parameter/aft/*"
]
},
{
@@ -85,7 +85,7 @@
"codecommit:GetUploadArchiveStatus",
"codecommit:CancelUploadArchive"
],
- "Resource": "arn:aws:codecommit:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:*account-request*"
+ "Resource": "arn:${data_aws_partition_current_partition}:codecommit:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:*account-request*"
},
{
"Effect": "Allow",
@@ -93,7 +93,7 @@
"sts:AssumeRole"
],
"Resource": [
- "arn:aws:iam::${data_aws_caller_identity_current_account_id}:role/AWSAFTAdmin"
+ "arn:${data_aws_partition_current_partition}:iam::${data_aws_caller_identity_current_account_id}:role/AWSAFTAdmin"
]
}
]
diff --git a/modules/aft-code-repositories/iam/role-policies/ct_aft_cwe_policy.tpl b/modules/aft-code-repositories/iam/role-policies/ct_aft_cwe_policy.tpl
index 776eb84d..bcc23907 100644
--- a/modules/aft-code-repositories/iam/role-policies/ct_aft_cwe_policy.tpl
+++ b/modules/aft-code-repositories/iam/role-policies/ct_aft_cwe_policy.tpl
@@ -7,8 +7,8 @@
"codepipeline:StartPipelineExecution"
],
"Resource": [
- "arn:aws:codepipeline:${region}:${account_id}:${account_request_pipeline_name}",
- "arn:aws:codepipeline:${region}:${account_id}:${provisioning_customizations_pipeline_name}"
+ "arn:${data_aws_partition_current_partition}:codepipeline:${region}:${account_id}:${account_request_pipeline_name}",
+ "arn:${data_aws_partition_current_partition}:codepipeline:${region}:${account_id}:${provisioning_customizations_pipeline_name}"
]
}
]
diff --git a/modules/aft-code-repositories/locals.tf b/modules/aft-code-repositories/locals.tf
index 58954802..72606d10 100644
--- a/modules/aft-code-repositories/locals.tf
+++ b/modules/aft-code-repositories/locals.tf
@@ -3,15 +3,19 @@
#
locals {
vcs = {
- is_codecommit = lower(var.vcs_provider) == "codecommit" ? true : false
- is_bitbucket = lower(var.vcs_provider) == "bitbucket" ? true : false
- is_github = lower(var.vcs_provider) == "github" ? true : false
- is_github_enterprise = lower(var.vcs_provider) == "githubenterprise" ? true : false
+ is_codecommit = lower(var.vcs_provider) == "codecommit" ? true : false
+ is_bitbucket = lower(var.vcs_provider) == "bitbucket" ? true : false
+ is_github = lower(var.vcs_provider) == "github" ? true : false
+ is_github_enterprise = lower(var.vcs_provider) == "githubenterprise" ? true : false
+ is_gitlab = lower(var.vcs_provider) == "gitlab" ? true : false
+ is_gitlab_selfmanaged = lower(var.vcs_provider) == "gitlabselfmanaged" ? true : false
}
connection_arn = {
- bitbucket = lower(var.vcs_provider) == "bitbucket" ? aws_codestarconnections_connection.bitbucket[0].arn : ""
- github = lower(var.vcs_provider) == "github" ? aws_codestarconnections_connection.github[0].arn : ""
- githubenterprise = lower(var.vcs_provider) == "githubenterprise" ? aws_codestarconnections_connection.githubenterprise[0].arn : ""
- codecommit = "null"
+ bitbucket = lower(var.vcs_provider) == "bitbucket" ? aws_codeconnections_connection.bitbucket[0].arn : ""
+ github = lower(var.vcs_provider) == "github" ? aws_codeconnections_connection.github[0].arn : ""
+ githubenterprise = lower(var.vcs_provider) == "githubenterprise" ? aws_codeconnections_connection.githubenterprise[0].arn : ""
+ gitlab = lower(var.vcs_provider) == "gitlab" ? aws_codeconnections_connection.gitlab[0].arn : ""
+ gitlabselfmanaged = lower(var.vcs_provider) == "gitlabselfmanaged" ? aws_codeconnections_connection.gitlabselfmanaged[0].arn : ""
+ codecommit = "null"
}
}
diff --git a/modules/aft-code-repositories/outputs.tf b/modules/aft-code-repositories/outputs.tf
index 476e06cb..26b43359 100644
--- a/modules/aft-code-repositories/outputs.tf
+++ b/modules/aft-code-repositories/outputs.tf
@@ -1,6 +1,6 @@
# Copyright Amazon.com, Inc. or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
#
-output "codestar_connection_arn" {
+output "codeconnections_connection_arn" {
value = lookup(local.connection_arn, var.vcs_provider)
}
diff --git a/modules/aft-code-repositories/variables.tf b/modules/aft-code-repositories/variables.tf
index 66f1ea73..9bd1ff64 100644
--- a/modules/aft-code-repositories/variables.tf
+++ b/modules/aft-code-repositories/variables.tf
@@ -6,7 +6,8 @@ variable "vpc_id" {
}
variable "subnet_ids" {
- type = list(string)
+ type = list(string)
+ default = null
}
variable "security_group_ids" {
@@ -29,6 +30,10 @@ variable "github_enterprise_url" {
type = string
}
+variable "gitlab_selfmanaged_url" {
+ type = string
+}
+
variable "account_request_table_name" {
type = string
}
@@ -88,3 +93,11 @@ variable "account_provisioning_customizations_repo_name" {
variable "account_provisioning_customizations_repo_branch" {
type = string
}
+
+variable "global_codebuild_timeout" {
+ type = number
+}
+
+variable "aft_enable_vpc" {
+ type = bool
+}
diff --git a/modules/aft-code-repositories/versions.tf b/modules/aft-code-repositories/versions.tf
index 8e5314f4..62a087c2 100755
--- a/modules/aft-code-repositories/versions.tf
+++ b/modules/aft-code-repositories/versions.tf
@@ -2,12 +2,12 @@
# SPDX-License-Identifier: Apache-2.0
#
terraform {
- required_version = ">= 0.15.1"
+ required_version = ">= 1.0.0"
required_providers {
aws = {
source = "hashicorp/aws"
- version = ">= 3.72, < 4.0.0"
+ version = ">= 5.84.0, < 6.0.0"
}
}
}
diff --git a/modules/aft-customizations/buildspecs/aft-account-customizations-api-helpers.yml b/modules/aft-customizations/buildspecs/aft-account-customizations-api-helpers.yml
deleted file mode 100644
index cdd36c1b..00000000
--- a/modules/aft-customizations/buildspecs/aft-account-customizations-api-helpers.yml
+++ /dev/null
@@ -1,50 +0,0 @@
-# Copyright Amazon.com, Inc. or its affiliates. All rights reserved.
-# SPDX-License-Identifier: Apache-2.0
-#
-version: 0.2
-
-phases:
- pre_build:
- commands:
- - DEFAULT_PATH=$(pwd)
- - AWS_MODULE_SOURCE=$(aws ssm get-parameter --name "/aft/config/aft-pipeline-code-source/repo-url" --query "Parameter.Value" --output text)
- - AWS_MODULE_GIT_REF=$(aws ssm get-parameter --name "/aft/config/aft-pipeline-code-source/repo-git-ref" --query "Parameter.Value" --output text)
- - |
- CUSTOMIZATION=$(aws dynamodb get-item --table-name aft-request-metadata --key "{\"id\": {\"S\": \"$VENDED_ACCOUNT_ID\"}}" --attributes-to-get "account_customizations_name" | jq --raw-output ".Item.account_customizations_name.S")
- - echo $CUSTOMIZATION
- - |
- ssh_key_parameter=$(aws ssm get-parameter --name /aft/config/aft-ssh-key --with-decryption || echo "None")
- if [[ $ssh_key_parameter != "None" ]]; then
- ssh_key=$(jq --raw-output ".Parameter.Value" <<< $ssh_key_parameter)
- mkdir -p ~/.ssh
- echo "Host *" >> ~/.ssh/config
- echo "StrictHostKeyChecking no" >> ~/.ssh/config
- echo "UserKnownHostsFile=/dev/null" >> ~/.ssh/config
- echo "$ssh_key" > ~/.ssh/ssh_key
- echo -e "\n\n" >> ~/.ssh/ssh_key
- chmod 600 ~/.ssh/ssh_key
- eval "$(ssh-agent -s)"
- ssh-add ~/.ssh/ssh_key
- fi
- - git config --global credential.helper '!aws codecommit credential-helper $@'
- - git config --global credential.UseHttpPath true
- - git clone -b $AWS_MODULE_GIT_REF $AWS_MODULE_SOURCE aws-aft-core-framework
- - chmod +x ./aws-aft-core-framework/sources/scripts/creds.sh
- - |
- if [ -d "$CUSTOMIZATION" ]; then
- echo "Found customization" $CUSTOMIZATION
- ./aws-aft-core-framework/sources/scripts/creds.sh --account-id $VENDED_ACCOUNT_ID
- python3 -m venv ./venv
- source ./venv/bin/activate
- pip install -r ./$CUSTOMIZATION/api_helpers/python/requirements.txt
- chmod +x ./$CUSTOMIZATION/api_helpers/$SHELL_SCRIPT
- fi
- build:
- commands:
- - |
- if [ -d "$CUSTOMIZATION" ]; then
- ./$CUSTOMIZATION/api_helpers/$SHELL_SCRIPT
- fi
- post_build:
- commands:
- - echo "Post-Build"
diff --git a/modules/aft-customizations/buildspecs/aft-account-customizations-terraform.yml b/modules/aft-customizations/buildspecs/aft-account-customizations-terraform.yml
index 999caa3d..ce3b88ac 100644
--- a/modules/aft-customizations/buildspecs/aft-account-customizations-terraform.yml
+++ b/modules/aft-customizations/buildspecs/aft-account-customizations-terraform.yml
@@ -4,92 +4,153 @@
version: 0.2
phases:
- pre_build:
+ install:
+ on-failure: ABORT
commands:
+ - set -e
+ # Populate Required Variables
- DEFAULT_PATH=$(pwd)
- TIMESTAMP=$(date '+%Y-%m-%d %H:%M:%S')
- - AWS_MODULE_SOURCE=$(aws ssm get-parameter --name "/aft/config/aft-pipeline-code-source/repo-url" --query "Parameter.Value" --output text)
- - AWS_MODULE_GIT_REF=$(aws ssm get-parameter --name "/aft/config/aft-pipeline-code-source/repo-git-ref" --query "Parameter.Value" --output text)
- TF_VERSION=$(aws ssm get-parameter --name "/aft/config/terraform/version" --query "Parameter.Value" --output text)
- TF_DISTRIBUTION=$(aws ssm get-parameter --name "/aft/config/terraform/distribution" --query "Parameter.Value" --output text)
- CT_MGMT_REGION=$(aws ssm get-parameter --name "/aft/config/ct-management-region" --query "Parameter.Value" --output text)
- AFT_MGMT_ACCOUNT=$(aws ssm get-parameter --name "/aft/account/aft-management/account-id" --query "Parameter.Value" --output text)
- - AFT_EXEC_ROLE_ARN=arn:aws:iam::$AFT_MGMT_ACCOUNT:role/AWSAFTExecution
- - VENDED_EXEC_ROLE_ARN=arn:aws:iam::$VENDED_ACCOUNT_ID:role/AWSAFTExecution
+ - AFT_EXEC_ROLE_ARN=arn:$AWS_PARTITION:iam::$AFT_MGMT_ACCOUNT:role/AWSAFTExecution
+ - VENDED_EXEC_ROLE_ARN=arn:$AWS_PARTITION:iam::$VENDED_ACCOUNT_ID:role/AWSAFTExecution
- AFT_ADMIN_ROLE_NAME=$(aws ssm get-parameter --name /aft/resources/iam/aft-administrator-role-name | jq --raw-output ".Parameter.Value")
- - AFT_ADMIN_ROLE_ARN=arn:aws:iam::$AFT_MGMT_ACCOUNT:role/$AFT_ADMIN_ROLE_NAME
+ - AFT_ADMIN_ROLE_ARN=arn:$AWS_PARTITION:iam::$AFT_MGMT_ACCOUNT:role/$AFT_ADMIN_ROLE_NAME
- ROLE_SESSION_NAME=$(aws ssm get-parameter --name /aft/resources/iam/aft-session-name | jq --raw-output ".Parameter.Value")
- - |
- ssh_key_parameter=$(aws ssm get-parameter --name /aft/config/aft-ssh-key --with-decryption || echo "None")
- if [[ $ssh_key_parameter != "None" ]]; then
- ssh_key=$(jq --raw-output ".Parameter.Value" <<< $ssh_key_parameter)
- mkdir -p ~/.ssh
- echo "Host *" >> ~/.ssh/config
- echo "StrictHostKeyChecking no" >> ~/.ssh/config
- echo "UserKnownHostsFile=/dev/null" >> ~/.ssh/config
- echo "$ssh_key" > ~/.ssh/ssh_key
- echo -e "\n\n" >> ~/.ssh/ssh_key
- chmod 600 ~/.ssh/ssh_key
- eval "$(ssh-agent -s)"
- ssh-add ~/.ssh/ssh_key
- fi
- - git config --global credential.helper '!aws codecommit credential-helper $@'
- - git config --global credential.UseHttpPath true
- - git clone -b $AWS_MODULE_GIT_REF $AWS_MODULE_SOURCE aws-aft-core-framework
- - python3 -m venv ./venv
- - source ./venv/bin/activate
- - pip install jinja2-cli==0.7.0 Jinja2==3.0.1 MarkupSafe==2.0.1 boto3==1.18.56 requests==2.26.0
- |
CUSTOMIZATION=$(aws dynamodb get-item --table-name aft-request-metadata --key "{\"id\": {\"S\": \"$VENDED_ACCOUNT_ID\"}}" --attributes-to-get "account_customizations_name" | jq --raw-output ".Item.account_customizations_name.S")
- - echo $CUSTOMIZATION
+
+ # Check if customization directory exists
- |
- if [ -d "$CUSTOMIZATION" ]; then
+ if [[ ! -z "$CUSTOMIZATION" ]]; then
+ if [[ ! -d "$DEFAULT_PATH/$CUSTOMIZATION" ]]; then
+ echo "Error: ${CUSTOMIZATION} directory does not exist"
+ exit 1
+ fi
+
echo "Found customization" $CUSTOMIZATION
+
+ # Configure Development SSH Key
+ ssh_key_parameter=$(aws ssm get-parameter --name /aft/config/aft-ssh-key --with-decryption 2> /dev/null || echo "None")
+
+ if [[ $ssh_key_parameter != "None" ]]; then
+ ssh_key=$(jq --raw-output ".Parameter.Value" <<< $ssh_key_parameter)
+ mkdir -p ~/.ssh
+ echo "Host *" >> ~/.ssh/config
+ echo "StrictHostKeyChecking no" >> ~/.ssh/config
+ echo "UserKnownHostsFile=/dev/null" >> ~/.ssh/config
+ echo "$ssh_key" > ~/.ssh/ssh_key
+ echo -e "\n\n" >> ~/.ssh/ssh_key
+ chmod 600 ~/.ssh/ssh_key
+ eval "$(ssh-agent -s)"
+ ssh-add ~/.ssh/ssh_key
+ fi
+
+ # Clone AFT
+ AWS_MODULE_SOURCE=$(aws ssm get-parameter --name "/aft/config/aft-pipeline-code-source/repo-url" --query "Parameter.Value" --output text)
+ AWS_MODULE_GIT_REF=$(aws ssm get-parameter --name "/aft/config/aft-pipeline-code-source/repo-git-ref" --query "Parameter.Value" --output text)
+ git config --global credential.helper '!aws codecommit credential-helper $@'
+ git config --global credential.UseHttpPath true
+ git clone --quiet -b $AWS_MODULE_GIT_REF $AWS_MODULE_SOURCE aws-aft-core-framework
+
+ # Install AFT Python Dependencies
+ python3 -m venv $DEFAULT_PATH/aft-venv
+ $DEFAULT_PATH/aft-venv/bin/pip install pip==22.1.2
+ $DEFAULT_PATH/aft-venv/bin/pip install jinja2-cli==0.7.0 Jinja2==3.0.1 MarkupSafe==2.0.1 boto3==1.18.56 requests==2.26.0
+
+ # Install API Helper Python Dependencies
+ python3 -m venv $DEFAULT_PATH/api-helpers-venv
+ $DEFAULT_PATH/api-helpers-venv/bin/pip install -r $DEFAULT_PATH/$CUSTOMIZATION/api_helpers/python/requirements.txt
+
+ # Mark helper scripts as executable
+ chmod +x $DEFAULT_PATH/$CUSTOMIZATION/api_helpers/pre-api-helpers.sh
+ chmod +x $DEFAULT_PATH/$CUSTOMIZATION/api_helpers/post-api-helpers.sh
+
+ # Generate session profiles
+ chmod +x $DEFAULT_PATH/aws-aft-core-framework/sources/scripts/creds.sh
+ $DEFAULT_PATH/aws-aft-core-framework/sources/scripts/creds.sh
+ fi
+
+
+ pre_build:
+ on-failure: ABORT
+ commands:
+ - |
+ if [[ ! -z "$CUSTOMIZATION" ]]; then
+ source $DEFAULT_PATH/api-helpers-venv/bin/activate
+ export AWS_PROFILE=aft-target
+ $DEFAULT_PATH/$CUSTOMIZATION/api_helpers/pre-api-helpers.sh
+ unset AWS_PROFILE
+ fi
+
+ build:
+ on-failure: CONTINUE
+ commands:
+ # Apply Customizations
+ - |
+ if [[ ! -z "$CUSTOMIZATION" ]]; then
+ source $DEFAULT_PATH/aft-venv/bin/activate
if [ $TF_DISTRIBUTION = "oss" ]; then
TF_BACKEND_REGION=$(aws ssm get-parameter --name "/aft/config/oss-backend/primary-region" --query "Parameter.Value" --output text)
TF_KMS_KEY_ID=$(aws ssm get-parameter --name "/aft/config/oss-backend/kms-key-id" --query "Parameter.Value" --output text)
TF_DDB_TABLE=$(aws ssm get-parameter --name "/aft/config/oss-backend/table-id" --query "Parameter.Value" --output text)
TF_S3_BUCKET=$(aws ssm get-parameter --name "/aft/config/oss-backend/bucket-id" --query "Parameter.Value" --output text)
TF_S3_KEY=$VENDED_ACCOUNT_ID-aft-account-customizations/terraform.tfstate
+
cd /tmp
echo "Installing Terraform"
- curl -o terraform_${TF_VERSION}_linux_amd64.zip https://releases.hashicorp.com/terraform/${TF_VERSION}/terraform_${TF_VERSION}_linux_amd64.zip
- unzip -o terraform_${TF_VERSION}_linux_amd64.zip && mv terraform /usr/bin
- terraform --version
+ curl -q -o terraform_${TF_VERSION}_linux_amd64.zip https://releases.hashicorp.com/terraform/${TF_VERSION}/terraform_${TF_VERSION}_linux_amd64.zip
+ mkdir -p /opt/aft/bin
+ unzip -q -o terraform_${TF_VERSION}_linux_amd64.zip
+ mv terraform /opt/aft/bin
+ /opt/aft/bin/terraform -no-color --version
+
cd $DEFAULT_PATH/$CUSTOMIZATION/terraform
- for f in *.jinja; do jinja2 $f -D timestamp="$TIMESTAMP" -D tf_distribution_type=$TF_DISTRIBUTION -D provider_region=$CT_MGMT_REGION -D region=$TF_BACKEND_REGION -D aft_admin_role_arn=$AFT_EXEC_ROLE_ARN -D target_admin_role_arn=$VENDED_EXEC_ROLE_ARN -D bucket=$TF_S3_BUCKET -D key=$TF_S3_KEY -D dynamodb_table=$TF_DDB_TABLE -D kms_key_id=$TF_KMS_KEY_ID >> ./$(basename $f .jinja).tf; done
+ for f in *.jinja; do jinja2 $f -D timestamp="$TIMESTAMP" -D tf_distribution_type=$TF_DISTRIBUTION -D provider_region=$CT_MGMT_REGION -D region=$TF_BACKEND_REGION -D aft_admin_role_arn=$AFT_EXEC_ROLE_ARN -D target_admin_role_arn=$VENDED_EXEC_ROLE_ARN -D bucket=$TF_S3_BUCKET -D key=$TF_S3_KEY -D dynamodb_table=$TF_DDB_TABLE -D kms_key_id=$TF_KMS_KEY_ID -D tf_version=$TF_VERSION >> ./$(basename $f .jinja).tf; done
for f in *.tf; do echo "\n \n"; echo $f; cat $f; done
- JSON=$(aws sts assume-role --role-arn ${AFT_ADMIN_ROLE_ARN} --role-session-name ${ROLE_SESSION_NAME})
- #Make newly assumed role default session
- export AWS_ACCESS_KEY_ID=$(echo ${JSON} | jq --raw-output ".Credentials[\"AccessKeyId\"]")
- export AWS_SECRET_ACCESS_KEY=$(echo ${JSON} | jq --raw-output ".Credentials[\"SecretAccessKey\"]")
- export AWS_SESSION_TOKEN=$(echo ${JSON} | jq --raw-output ".Credentials[\"SessionToken\"]")
- terraform init
+
+ cd $DEFAULT_PATH/$CUSTOMIZATION/terraform
+ export AWS_PROFILE=aft-management-admin
+ /opt/aft/bin/terraform init -no-color
+ /opt/aft/bin/terraform apply -no-color --auto-approve
else
TF_BACKEND_REGION=$(aws ssm get-parameter --name "/aft/config/oss-backend/primary-region" --query "Parameter.Value" --output text)
TF_ORG_NAME=$(aws ssm get-parameter --name "/aft/config/terraform/org-name" --query "Parameter.Value" --output text)
TF_TOKEN=$(aws ssm get-parameter --name "/aft/config/terraform/token" --with-decryption --query "Parameter.Value" --output text)
TF_ENDPOINT=$(aws ssm get-parameter --name "/aft/config/terraform/api-endpoint" --query "Parameter.Value" --output text)
+ TF_PROJECT_NAME=$(aws ssm get-parameter --name "/aft/config/terraform/project-name" --query "Parameter.Value" --output text)
TF_WORKSPACE_NAME=$VENDED_ACCOUNT_ID-aft-account-customizations
TF_CONFIG_PATH="./temp_configuration_file.tar.gz"
+
cd $DEFAULT_PATH/$CUSTOMIZATION/terraform
for f in *.jinja; do jinja2 $f -D timestamp="$TIMESTAMP" -D provider_region=$CT_MGMT_REGION -D tf_distribution_type=$TF_DISTRIBUTION -D aft_admin_role_arn=$AFT_EXEC_ROLE_ARN -D target_admin_role_arn=$VENDED_EXEC_ROLE_ARN -D terraform_org_name=$TF_ORG_NAME -D terraform_workspace_name=$TF_WORKSPACE_NAME >> ./$(basename $f .jinja).tf; done
for f in *.tf; do echo "\n \n"; echo $f; cat $f; done
+
cd $DEFAULT_PATH/$CUSTOMIZATION
tar -czf temp_configuration_file.tar.gz -C terraform --exclude .git --exclude venv .
- python3 $DEFAULT_PATH/aws-aft-core-framework/sources/scripts/workspace_manager.py --operation "deploy" --organization_name $TF_ORG_NAME --workspace_name $TF_WORKSPACE_NAME --assume_role_arn $AFT_ADMIN_ROLE_ARN --assume_role_session_name $ROLE_SESSION_NAME --api_endpoint $TF_ENDPOINT --api_token $TF_TOKEN --terraform_version $TF_VERSION --config_file $TF_CONFIG_PATH
+ python3 $DEFAULT_PATH/aws-aft-core-framework/sources/scripts/workspace_manager.py --operation "deploy" --organization_name $TF_ORG_NAME --workspace_name $TF_WORKSPACE_NAME --assume_role_arn $AFT_ADMIN_ROLE_ARN --assume_role_session_name $ROLE_SESSION_NAME --api_endpoint $TF_ENDPOINT --api_token $TF_TOKEN --terraform_version $TF_VERSION --config_file $TF_CONFIG_PATH --project_name "$TF_PROJECT_NAME"
fi
fi
- build:
+ post_build:
+ on-failure: ABORT
commands:
- - cd $DEFAULT_PATH
- |
- if [ -d "$CUSTOMIZATION" ]; then
- if [ $TF_DISTRIBUTION = "oss" ]; then
- cd $DEFAULT_PATH/$CUSTOMIZATION/terraform
- terraform apply --auto-approve
- fi
+ if [[ ! -z "$CUSTOMIZATION" ]]; then
+ export PYTHONPATH="$DEFAULT_PATH/aws-aft-core-framework/sources/aft-lambda-layer:$PYTHONPATH"
+ export AWS_PROFILE=aft-management
+ python3 $DEFAULT_PATH/aws-aft-core-framework/sources/aft-lambda-layer/aft_common/metrics.py --codebuild-name "aft-account-customizations" --codebuild-status $CODEBUILD_BUILD_SUCCEEDING
+ unset AWS_PROFILE
+ fi
+ - |
+ if [[ $CODEBUILD_BUILD_SUCCEEDING == 0 ]]; then
+ exit 1
+ fi
+ - |
+ if [[ ! -z "$CUSTOMIZATION" ]]; then
+ source $DEFAULT_PATH/api-helpers-venv/bin/activate
+ export AWS_PROFILE=aft-target
+ $DEFAULT_PATH/$CUSTOMIZATION/api_helpers/post-api-helpers.sh
fi
- post_build:
- commands:
- - echo "Post-Build"
diff --git a/modules/aft-customizations/buildspecs/aft-create-pipeline.yml b/modules/aft-customizations/buildspecs/aft-create-pipeline.yml
index 8d542c34..d6b21019 100644
--- a/modules/aft-customizations/buildspecs/aft-create-pipeline.yml
+++ b/modules/aft-customizations/buildspecs/aft-create-pipeline.yml
@@ -4,13 +4,23 @@
version: 0.2
phases:
- pre_build:
+ install:
commands:
+ - set -e
+ # Populate Required Variables
- DEFAULT_PATH=$(pwd)
- - AWS_MODULE_SOURCE=$(aws ssm get-parameter --name $SSM_AWS_MODULE_SOURCE --query "Parameter.Value" --output text)
- - AWS_MODULE_GIT_REF=$(aws ssm get-parameter --name $SSM_AWS_MODULE_GIT_REF --query "Parameter.Value" --output text)
+ - TIMESTAMP=$(date '+%Y-%m-%d %H:%M:%S')
+ - TF_S3_BUCKET=$(aws ssm get-parameter --name $SSM_TF_S3_BUCKET --query "Parameter.Value" --output text)
+ - TF_S3_KEY=$VENDED_ACCOUNT_ID-customizations-pipeline/terraform.tfstate
+ - TF_BACKEND_REGION=$(aws ssm get-parameter --name $SSM_TF_BACKEND_REGION --query "Parameter.Value" --output text)
+ - TF_KMS_KEY_ID=$(aws ssm get-parameter --name $SSM_TF_KMS_KEY_ID --query "Parameter.Value" --output text)
+ - TF_DDB_TABLE=$(aws ssm get-parameter --name $SSM_TF_DDB_TABLE --query "Parameter.Value" --output text)
+ - TF_VERSION=$(aws ssm get-parameter --name $SSM_TF_VERSION --query "Parameter.Value" --output text)
+
+
+ # Configure Development SSH Key
- |
- ssh_key_parameter=$(aws ssm get-parameter --name /aft/config/aft-ssh-key --with-decryption || echo "None")
+ ssh_key_parameter=$(aws ssm get-parameter --name /aft/config/aft-ssh-key --with-decryption 2> /dev/null || echo "None")
if [[ $ssh_key_parameter != "None" ]]; then
ssh_key=$(jq --raw-output ".Parameter.Value" <<< $ssh_key_parameter)
mkdir -p ~/.ssh
@@ -23,34 +33,40 @@ phases:
eval "$(ssh-agent -s)"
ssh-add ~/.ssh/ssh_key
fi
- - TF_S3_BUCKET=$(aws ssm get-parameter --name $SSM_TF_S3_BUCKET --query "Parameter.Value" --output text)
- - TF_S3_KEY=$VENDED_ACCOUNT_ID-customizations-pipeline/terraform.tfstate
- - TF_BACKEND_REGION=$(aws ssm get-parameter --name $SSM_TF_BACKEND_REGION --query "Parameter.Value" --output text)
- - TF_KMS_KEY_ID=$(aws ssm get-parameter --name $SSM_TF_KMS_KEY_ID --query "Parameter.Value" --output text)
- - TF_DDB_TABLE=$(aws ssm get-parameter --name $SSM_TF_DDB_TABLE --query "Parameter.Value" --output text)
- - TF_VERSION=$(aws ssm get-parameter --name $SSM_TF_VERSION --query "Parameter.Value" --output text)
+
+ # Clone AFT
+ - AWS_MODULE_SOURCE=$(aws ssm get-parameter --name $SSM_AWS_MODULE_SOURCE --query "Parameter.Value" --output text)
+ - AWS_MODULE_GIT_REF=$(aws ssm get-parameter --name $SSM_AWS_MODULE_GIT_REF --query "Parameter.Value" --output text)
- git config --global credential.helper '!aws codecommit credential-helper $@'
- git config --global credential.UseHttpPath true
- - git clone -b $AWS_MODULE_GIT_REF $AWS_MODULE_SOURCE aws-aft-core-framework
- - chmod +x ./aws-aft-core-framework/sources/scripts/creds.sh
- - ./aws-aft-core-framework/sources/scripts/creds.sh --aft-mgmt
- - TIMESTAMP=$(date '+%Y-%m-%d %H:%M:%S')
+ - git clone --quiet -b $AWS_MODULE_GIT_REF $AWS_MODULE_SOURCE aws-aft-core-framework
+
+ # Generate session profiles
+ - chmod +x $DEFAULT_PATH/aws-aft-core-framework/sources/scripts/creds.sh
+ - $DEFAULT_PATH/aws-aft-core-framework/sources/scripts/creds.sh
+
+ # Install Terraform
- cd /tmp
- echo "Installing Terraform"
- - curl -o terraform_${TF_VERSION}_linux_amd64.zip https://releases.hashicorp.com/terraform/${TF_VERSION}/terraform_${TF_VERSION}_linux_amd64.zip
- - unzip -o terraform_${TF_VERSION}_linux_amd64.zip && mv terraform /usr/bin
+ - curl -q -o terraform_${TF_VERSION}_linux_amd64.zip https://releases.hashicorp.com/terraform/${TF_VERSION}/terraform_${TF_VERSION}_linux_amd64.zip
+ - unzip -q -o terraform_${TF_VERSION}_linux_amd64.zip && mv terraform /usr/bin
- terraform --version
- - cd $DEFAULT_PATH
- - cd ./aws-aft-core-framework/sources/aft-customizations-common/templates/customizations_pipeline
+
+ # Install Python Dependencies
- python3 -m venv ./venv
- source ./venv/bin/activate
- - pip install jinja2-cli
- - for f in *.jinja; do jinja2 $f -D timestamp="$TIMESTAMP" -D region=$TF_BACKEND_REGION -D bucket=$TF_S3_BUCKET -D key=$TF_S3_KEY -D dynamodb_table=$TF_DDB_TABLE -D kms_key_id=$TF_KMS_KEY_ID >> $(basename $f .jinja).tf; done
+ - pip install pip==22.1.2
+ - pip install jinja2-cli==0.7.0 Jinja2==3.0.1
+
+ pre_build:
+ on-failure: ABORT
+ commands:
+ - cd $DEFAULT_PATH/aws-aft-core-framework/sources/aft-customizations-common/templates/customizations_pipeline
+ - for f in *.jinja; do jinja2 $f -D timestamp="$TIMESTAMP" -D region=$TF_BACKEND_REGION -D bucket=$TF_S3_BUCKET -D key=$TF_S3_KEY -D dynamodb_table=$TF_DDB_TABLE -D kms_key_id=$TF_KMS_KEY_ID -D tf_version=$TF_VERSION >> $(basename $f .jinja).tf; done
- for f in *.tf; do echo "\n \n"; echo $f; cat $f; done
- - terraform init
build:
+ on-failure: ABORT
commands:
- - terraform apply -var="account_id=$VENDED_ACCOUNT_ID" --auto-approve
- post_build:
- commands:
- - echo "Post-Build"
+ - export AWS_PROFILE=aft-management-admin
+ - terraform init -no-color
+ - terraform apply -var="account_id=$VENDED_ACCOUNT_ID" -no-color --auto-approve
diff --git a/modules/aft-customizations/buildspecs/aft-global-customizations-api-helpers.yml b/modules/aft-customizations/buildspecs/aft-global-customizations-api-helpers.yml
deleted file mode 100644
index 75ffe1d5..00000000
--- a/modules/aft-customizations/buildspecs/aft-global-customizations-api-helpers.yml
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright Amazon.com, Inc. or its affiliates. All rights reserved.
-# SPDX-License-Identifier: Apache-2.0
-#
-version: 0.2
-
-phases:
- pre_build:
- commands:
- - DEFAULT_PATH=$(pwd)
- - AWS_MODULE_SOURCE=$(aws ssm get-parameter --name "/aft/config/aft-pipeline-code-source/repo-url" --query "Parameter.Value" --output text)
- - AWS_MODULE_GIT_REF=$(aws ssm get-parameter --name "/aft/config/aft-pipeline-code-source/repo-git-ref" --query "Parameter.Value" --output text)
- - |
- ssh_key_parameter=$(aws ssm get-parameter --name /aft/config/aft-ssh-key --with-decryption || echo "None")
- if [[ $ssh_key_parameter != "None" ]]; then
- ssh_key=$(jq --raw-output ".Parameter.Value" <<< $ssh_key_parameter)
- mkdir -p ~/.ssh
- echo "Host *" >> ~/.ssh/config
- echo "StrictHostKeyChecking no" >> ~/.ssh/config
- echo "UserKnownHostsFile=/dev/null" >> ~/.ssh/config
- echo "$ssh_key" > ~/.ssh/ssh_key
- echo -e "\n\n" >> ~/.ssh/ssh_key
- chmod 600 ~/.ssh/ssh_key
- eval "$(ssh-agent -s)"
- ssh-add ~/.ssh/ssh_key
- fi
- - git config --global credential.helper '!aws codecommit credential-helper $@'
- - git config --global credential.UseHttpPath true
- - git clone -b $AWS_MODULE_GIT_REF $AWS_MODULE_SOURCE aws-aft-core-framework
- - chmod +x ./aws-aft-core-framework/sources/scripts/creds.sh
- - ./aws-aft-core-framework/sources/scripts/creds.sh --account-id $VENDED_ACCOUNT_ID
- - python3 -m venv ./venv
- - source ./venv/bin/activate
- - pip install -r ./api_helpers/python/requirements.txt
- - chmod +x ./api_helpers/$SHELL_SCRIPT
- build:
- commands:
- - ./api_helpers/$SHELL_SCRIPT
- post_build:
- commands:
- - echo "Post-Build"
diff --git a/modules/aft-customizations/buildspecs/aft-global-customizations-terraform.yml b/modules/aft-customizations/buildspecs/aft-global-customizations-terraform.yml
index 1e7bb043..6efa25c2 100644
--- a/modules/aft-customizations/buildspecs/aft-global-customizations-terraform.yml
+++ b/modules/aft-customizations/buildspecs/aft-global-customizations-terraform.yml
@@ -4,23 +4,25 @@
version: 0.2
phases:
- pre_build:
+ install:
commands:
+ - set -e
+ # Populate Required Variables
- DEFAULT_PATH=$(pwd)
- TIMESTAMP=$(date '+%Y-%m-%d %H:%M:%S')
- - AWS_MODULE_SOURCE=$(aws ssm get-parameter --name "/aft/config/aft-pipeline-code-source/repo-url" --query "Parameter.Value" --output text)
- - AWS_MODULE_GIT_REF=$(aws ssm get-parameter --name "/aft/config/aft-pipeline-code-source/repo-git-ref" --query "Parameter.Value" --output text)
- TF_VERSION=$(aws ssm get-parameter --name "/aft/config/terraform/version" --query "Parameter.Value" --output text)
- TF_DISTRIBUTION=$(aws ssm get-parameter --name "/aft/config/terraform/distribution" --query "Parameter.Value" --output text)
- CT_MGMT_REGION=$(aws ssm get-parameter --name "/aft/config/ct-management-region" --query "Parameter.Value" --output text)
- AFT_MGMT_ACCOUNT=$(aws ssm get-parameter --name "/aft/account/aft-management/account-id" --query "Parameter.Value" --output text)
- - AFT_EXEC_ROLE_ARN=arn:aws:iam::$AFT_MGMT_ACCOUNT:role/AWSAFTExecution
- - VENDED_EXEC_ROLE_ARN=arn:aws:iam::$VENDED_ACCOUNT_ID:role/AWSAFTExecution
+ - AFT_EXEC_ROLE_ARN=arn:$AWS_PARTITION:iam::$AFT_MGMT_ACCOUNT:role/AWSAFTExecution
+ - VENDED_EXEC_ROLE_ARN=arn:$AWS_PARTITION:iam::$VENDED_ACCOUNT_ID:role/AWSAFTExecution
- AFT_ADMIN_ROLE_NAME=$(aws ssm get-parameter --name /aft/resources/iam/aft-administrator-role-name | jq --raw-output ".Parameter.Value")
- - AFT_ADMIN_ROLE_ARN=arn:aws:iam::$AFT_MGMT_ACCOUNT:role/$AFT_ADMIN_ROLE_NAME
+ - AFT_ADMIN_ROLE_ARN=arn:$AWS_PARTITION:iam::$AFT_MGMT_ACCOUNT:role/$AFT_ADMIN_ROLE_NAME
- ROLE_SESSION_NAME=$(aws ssm get-parameter --name /aft/resources/iam/aft-session-name | jq --raw-output ".Parameter.Value")
+
+ # Configure Development SSH Key
- |
- ssh_key_parameter=$(aws ssm get-parameter --name /aft/config/aft-ssh-key --with-decryption || echo "None")
+ ssh_key_parameter=$(aws ssm get-parameter --name /aft/config/aft-ssh-key --with-decryption 2> /dev/null || echo "None")
if [[ $ssh_key_parameter != "None" ]]; then
ssh_key=$(jq --raw-output ".Parameter.Value" <<< $ssh_key_parameter)
mkdir -p ~/.ssh
@@ -33,12 +35,44 @@ phases:
eval "$(ssh-agent -s)"
ssh-add ~/.ssh/ssh_key
fi
+
+ # Clone AFT
+ - AWS_MODULE_SOURCE=$(aws ssm get-parameter --name "/aft/config/aft-pipeline-code-source/repo-url" --query "Parameter.Value" --output text)
+ - AWS_MODULE_GIT_REF=$(aws ssm get-parameter --name "/aft/config/aft-pipeline-code-source/repo-git-ref" --query "Parameter.Value" --output text)
- git config --global credential.helper '!aws codecommit credential-helper $@'
- git config --global credential.UseHttpPath true
- - git clone -b $AWS_MODULE_GIT_REF $AWS_MODULE_SOURCE aws-aft-core-framework
- - python3 -m venv ./venv
- - source ./venv/bin/activate
- - pip install jinja2-cli==0.7.0 Jinja2==3.0.1 MarkupSafe==2.0.1 boto3==1.18.56 requests==2.26.0
+ - git clone --quiet -b $AWS_MODULE_GIT_REF $AWS_MODULE_SOURCE aws-aft-core-framework
+
+ # Generate session profiles
+ - chmod +x $DEFAULT_PATH/aws-aft-core-framework/sources/scripts/creds.sh
+ - $DEFAULT_PATH/aws-aft-core-framework/sources/scripts/creds.sh
+
+ # Install AFT Python Dependencies
+ - python3 -m venv $DEFAULT_PATH/aft-venv
+ - $DEFAULT_PATH/aft-venv/bin/pip install pip==22.1.2
+ - $DEFAULT_PATH/aft-venv/bin/pip install jinja2-cli==0.7.0 Jinja2==3.0.1 MarkupSafe==2.0.1 boto3==1.18.56 requests==2.26.0
+
+ # Install API Helper Python Dependencies
+ - python3 -m venv $DEFAULT_PATH/api-helpers-venv
+ - $DEFAULT_PATH/api-helpers-venv/bin/pip install -r $DEFAULT_PATH/api_helpers/python/requirements.txt
+
+ # Mark helper scripts as executable
+ - chmod +x $DEFAULT_PATH/api_helpers/pre-api-helpers.sh
+ - chmod +x $DEFAULT_PATH/api_helpers/post-api-helpers.sh
+
+ pre_build:
+ on-failure: ABORT
+ commands:
+ - source $DEFAULT_PATH/api-helpers-venv/bin/activate
+ - export AWS_PROFILE=aft-target
+ - $DEFAULT_PATH/api_helpers/pre-api-helpers.sh
+ - unset AWS_PROFILE
+
+ build:
+ on-failure: CONTINUE
+ commands:
+ # Apply customizations
+ - source $DEFAULT_PATH/aft-venv/bin/activate
- |
if [ $TF_DISTRIBUTION = "oss" ]; then
TF_BACKEND_REGION=$(aws ssm get-parameter --name "/aft/config/oss-backend/primary-region" --query "Parameter.Value" --output text)
@@ -46,25 +80,29 @@ phases:
TF_DDB_TABLE=$(aws ssm get-parameter --name "/aft/config/oss-backend/table-id" --query "Parameter.Value" --output text)
TF_S3_BUCKET=$(aws ssm get-parameter --name "/aft/config/oss-backend/bucket-id" --query "Parameter.Value" --output text)
TF_S3_KEY=$VENDED_ACCOUNT_ID-aft-global-customizations/terraform.tfstate
+
cd /tmp
echo "Installing Terraform"
- curl -o terraform_${TF_VERSION}_linux_amd64.zip https://releases.hashicorp.com/terraform/${TF_VERSION}/terraform_${TF_VERSION}_linux_amd64.zip
- unzip -o terraform_${TF_VERSION}_linux_amd64.zip && mv terraform /usr/bin
- terraform --version
+ curl -q -o terraform_${TF_VERSION}_linux_amd64.zip https://releases.hashicorp.com/terraform/${TF_VERSION}/terraform_${TF_VERSION}_linux_amd64.zip
+ mkdir -p /opt/aft/bin
+ unzip -q -o terraform_${TF_VERSION}_linux_amd64.zip
+ mv terraform /opt/aft/bin
+ /opt/aft/bin/terraform -no-color --version
+
+ # Move back to customization module
cd $DEFAULT_PATH/terraform
- for f in *.jinja; do jinja2 $f -D timestamp="$TIMESTAMP" -D tf_distribution_type=$TF_DISTRIBUTION -D provider_region=$CT_MGMT_REGION -D region=$TF_BACKEND_REGION -D aft_admin_role_arn=$AFT_EXEC_ROLE_ARN -D target_admin_role_arn=$VENDED_EXEC_ROLE_ARN -D bucket=$TF_S3_BUCKET -D key=$TF_S3_KEY -D dynamodb_table=$TF_DDB_TABLE -D kms_key_id=$TF_KMS_KEY_ID >> ./$(basename $f .jinja).tf; done
+ for f in *.jinja; do jinja2 $f -D timestamp="$TIMESTAMP" -D tf_distribution_type=$TF_DISTRIBUTION -D provider_region=$CT_MGMT_REGION -D region=$TF_BACKEND_REGION -D aft_admin_role_arn=$AFT_EXEC_ROLE_ARN -D target_admin_role_arn=$VENDED_EXEC_ROLE_ARN -D bucket=$TF_S3_BUCKET -D key=$TF_S3_KEY -D dynamodb_table=$TF_DDB_TABLE -D kms_key_id=$TF_KMS_KEY_ID -D tf_version=$TF_VERSION >> ./$(basename $f .jinja).tf; done
for f in *.tf; do echo "\n \n"; echo $f; cat $f; done
- JSON=$(aws sts assume-role --role-arn ${AFT_ADMIN_ROLE_ARN} --role-session-name ${ROLE_SESSION_NAME})
- #Make newly assumed role default session
- export AWS_ACCESS_KEY_ID=$(echo ${JSON} | jq --raw-output ".Credentials[\"AccessKeyId\"]")
- export AWS_SECRET_ACCESS_KEY=$(echo ${JSON} | jq --raw-output ".Credentials[\"SecretAccessKey\"]")
- export AWS_SESSION_TOKEN=$(echo ${JSON} | jq --raw-output ".Credentials[\"SessionToken\"]")
- terraform init
+
+ cd $DEFAULT_PATH/terraform
+ export AWS_PROFILE=aft-management-admin
+ /opt/aft/bin/terraform init -no-color
+ /opt/aft/bin/terraform apply -no-color --auto-approve
else
- TF_BACKEND_REGION=$(aws ssm get-parameter --name "/aft/config/oss-backend/primary-region" --query "Parameter.Value" --output text)
TF_ORG_NAME=$(aws ssm get-parameter --name "/aft/config/terraform/org-name" --query "Parameter.Value" --output text)
TF_TOKEN=$(aws ssm get-parameter --name "/aft/config/terraform/token" --with-decryption --query "Parameter.Value" --output text)
TF_ENDPOINT=$(aws ssm get-parameter --name "/aft/config/terraform/api-endpoint" --query "Parameter.Value" --output text)
+ TF_PROJECT_NAME=$(aws ssm get-parameter --name "/aft/config/terraform/project-name" --query "Parameter.Value" --output text)
TF_WORKSPACE_NAME=$VENDED_ACCOUNT_ID-aft-global-customizations
TF_CONFIG_PATH="./temp_configuration_file.tar.gz"
cd $DEFAULT_PATH/terraform
@@ -72,12 +110,20 @@ phases:
for f in *.tf; do echo "\n \n"; echo $f; cat $f; done
cd $DEFAULT_PATH
tar -czf temp_configuration_file.tar.gz -C terraform --exclude .git --exclude venv .
- python3 $DEFAULT_PATH/aws-aft-core-framework/sources/scripts/workspace_manager.py --operation "deploy" --organization_name $TF_ORG_NAME --workspace_name $TF_WORKSPACE_NAME --assume_role_arn $AFT_ADMIN_ROLE_ARN --assume_role_session_name $ROLE_SESSION_NAME --api_endpoint $TF_ENDPOINT --api_token $TF_TOKEN --terraform_version $TF_VERSION --config_file $TF_CONFIG_PATH
+ python3 $DEFAULT_PATH/aws-aft-core-framework/sources/scripts/workspace_manager.py --operation "deploy" --organization_name $TF_ORG_NAME --workspace_name $TF_WORKSPACE_NAME --assume_role_arn $AFT_ADMIN_ROLE_ARN --assume_role_session_name $ROLE_SESSION_NAME --api_endpoint $TF_ENDPOINT --api_token $TF_TOKEN --terraform_version $TF_VERSION --config_file $TF_CONFIG_PATH --project_name "$TF_PROJECT_NAME"
fi
- build:
+ post_build:
+ on-failure: ABORT
commands:
+ - export PYTHONPATH="$DEFAULT_PATH/aws-aft-core-framework/sources/aft-lambda-layer:$PYTHONPATH"
+ - export AWS_PROFILE=aft-management
+ - python3 $DEFAULT_PATH/aws-aft-core-framework/sources/aft-lambda-layer/aft_common/metrics.py --codebuild-name "aft-global-customizations" --codebuild-status $CODEBUILD_BUILD_SUCCEEDING
+ - unset AWS_PROFILE
- |
- if [ $TF_DISTRIBUTION = "oss" ]; then
- terraform apply --auto-approve
+ if [[ $CODEBUILD_BUILD_SUCCEEDING == 0 ]]; then
+ exit 1
fi
+ - source $DEFAULT_PATH/api-helpers-venv/bin/activate
+ - export AWS_PROFILE=aft-target
+ - $DEFAULT_PATH/api_helpers/post-api-helpers.sh
diff --git a/modules/aft-customizations/codebuild.tf b/modules/aft-customizations/codebuild.tf
index a4ce82f8..272a5249 100644
--- a/modules/aft-customizations/codebuild.tf
+++ b/modules/aft-customizations/codebuild.tf
@@ -6,10 +6,10 @@
#####################################################
resource "aws_codebuild_project" "aft_global_customizations_terraform" {
- depends_on = [aws_cloudwatch_log_group.aft_global_customizations_terraform]
+ depends_on = [aws_cloudwatch_log_group.aft_global_customizations_terraform, time_sleep.wait_for_iam_eventual_consistency]
name = "aft-global-customizations-terraform"
description = "Job to apply Terraform provided by the customer global customizations repo"
- build_timeout = "60"
+ build_timeout = tostring(var.global_codebuild_timeout)
service_role = aws_iam_role.aft_codebuild_customizations_role.arn
encryption_key = var.aft_kms_key_arn
@@ -19,9 +19,15 @@ resource "aws_codebuild_project" "aft_global_customizations_terraform" {
environment {
compute_type = "BUILD_GENERAL1_MEDIUM"
- image = "aws/codebuild/amazonlinux2-x86_64-standard:3.0"
+ image = "aws/codebuild/amazonlinux2-x86_64-standard:5.0"
type = "LINUX_CONTAINER"
image_pull_credentials_type = "CODEBUILD"
+
+ environment_variable {
+ name = "AWS_PARTITION"
+ value = data.aws_partition.current.partition
+ type = "PLAINTEXT"
+ }
}
logs_config {
@@ -40,14 +46,30 @@ resource "aws_codebuild_project" "aft_global_customizations_terraform" {
buildspec = data.local_file.aft_global_customizations_terraform.content
}
- vpc_config {
- vpc_id = var.aft_vpc_id
- subnets = var.aft_vpc_private_subnets
- security_group_ids = var.aft_vpc_default_sg
+ dynamic "vpc_config" {
+ for_each = var.aft_enable_vpc ? [1] : []
+ content {
+ vpc_id = var.aft_vpc_id
+ subnets = var.aft_vpc_private_subnets
+ security_group_ids = var.aft_vpc_default_sg
+ }
+
}
+ lifecycle {
+ ignore_changes = [project_visibility]
+ }
+
+}
+
+# Maintain this log group for log retention reasons. This is no longer used by AFT
+#tfsec:ignore:aws-cloudwatch-log-group-customer-key
+resource "aws_cloudwatch_log_group" "aft_global_customizations_api_helpers" {
+ name = "/aws/codebuild/aft-global-customizations-api-helpers"
+ retention_in_days = var.cloudwatch_log_group_retention
}
+#tfsec:ignore:aws-cloudwatch-log-group-customer-key
resource "aws_cloudwatch_log_group" "aft_global_customizations_terraform" {
name = "/aws/codebuild/aft-global-customizations-terraform"
retention_in_days = var.cloudwatch_log_group_retention
@@ -58,10 +80,10 @@ resource "aws_cloudwatch_log_group" "aft_global_customizations_terraform" {
#####################################################
resource "aws_codebuild_project" "aft_account_customizations_terraform" {
- depends_on = [aws_cloudwatch_log_group.aft_account_customizations_terraform]
+ depends_on = [aws_cloudwatch_log_group.aft_account_customizations_terraform, time_sleep.wait_for_iam_eventual_consistency]
name = "aft-account-customizations-terraform"
description = "Job to apply Terraform provided by the customer account customizations repo"
- build_timeout = "60"
+ build_timeout = tostring(var.global_codebuild_timeout)
service_role = aws_iam_role.aft_codebuild_customizations_role.arn
encryption_key = var.aft_kms_key_arn
@@ -71,9 +93,14 @@ resource "aws_codebuild_project" "aft_account_customizations_terraform" {
environment {
compute_type = "BUILD_GENERAL1_MEDIUM"
- image = "aws/codebuild/amazonlinux2-x86_64-standard:3.0"
+ image = "aws/codebuild/amazonlinux2-x86_64-standard:5.0"
type = "LINUX_CONTAINER"
image_pull_credentials_type = "CODEBUILD"
+ environment_variable {
+ name = "AWS_PARTITION"
+ value = data.aws_partition.current.partition
+ type = "PLAINTEXT"
+ }
}
logs_config {
@@ -92,133 +119,44 @@ resource "aws_codebuild_project" "aft_account_customizations_terraform" {
buildspec = data.local_file.aft_account_customizations_terraform.content
}
- vpc_config {
- vpc_id = var.aft_vpc_id
- subnets = var.aft_vpc_private_subnets
- security_group_ids = var.aft_vpc_default_sg
- }
-
-}
-
-resource "aws_cloudwatch_log_group" "aft_account_customizations_terraform" {
- name = "/aws/codebuild/aft-account-customizations-terraform"
- retention_in_days = var.cloudwatch_log_group_retention
-}
-
-#####################################################
-# AFT Global Customizations API Helpers
-#####################################################
-
-resource "aws_codebuild_project" "aft_global_customizations_api_helpers" {
- depends_on = [aws_cloudwatch_log_group.aft_global_customizations_api_helpers]
- name = "aft-global-customizations-api-helpers"
- description = "Job to run API helpers provided by the customer AFT Global Module"
- build_timeout = "60"
- service_role = aws_iam_role.aft_codebuild_customizations_role.arn
- encryption_key = var.aft_kms_key_arn
-
- artifacts {
- type = "CODEPIPELINE"
- }
-
- environment {
- compute_type = "BUILD_GENERAL1_MEDIUM"
- image = "aws/codebuild/amazonlinux2-x86_64-standard:3.0"
- type = "LINUX_CONTAINER"
- image_pull_credentials_type = "CODEBUILD"
- }
-
- logs_config {
- cloudwatch_logs {
- group_name = aws_cloudwatch_log_group.aft_global_customizations_api_helpers.name
- }
-
- s3_logs {
- status = "ENABLED"
- location = "${aws_s3_bucket.aft_codepipeline_customizations_bucket.id}/aft-global-customizations-api-helpers-logs"
- }
- }
-
- source {
- type = "CODEPIPELINE"
- buildspec = data.local_file.aft_global_customizations_api_helpers.content
- }
-
- vpc_config {
- vpc_id = var.aft_vpc_id
- subnets = var.aft_vpc_private_subnets
- security_group_ids = var.aft_vpc_default_sg
- }
-
-}
-
-resource "aws_cloudwatch_log_group" "aft_global_customizations_api_helpers" {
- name = "/aws/codebuild/aft-global-customizations-api-helpers"
- retention_in_days = var.cloudwatch_log_group_retention
-}
-
-#####################################################
-# AFT Account Customizations API Helpers
-#####################################################
-
-resource "aws_codebuild_project" "aft_account_customizations_api_helpers" {
- depends_on = [aws_cloudwatch_log_group.aft_account_customizations_api_helpers]
- name = "aft-account-customizations-api-helpers"
- description = "Job to run API helpers provided by the customer AFT Account Module"
- build_timeout = "60"
- service_role = aws_iam_role.aft_codebuild_customizations_role.arn
- encryption_key = var.aft_kms_key_arn
-
- artifacts {
- type = "CODEPIPELINE"
- }
-
- environment {
- compute_type = "BUILD_GENERAL1_MEDIUM"
- image = "aws/codebuild/amazonlinux2-x86_64-standard:3.0"
- type = "LINUX_CONTAINER"
- image_pull_credentials_type = "CODEBUILD"
- }
+ dynamic "vpc_config" {
+ for_each = var.aft_enable_vpc ? [1] : []
- logs_config {
- cloudwatch_logs {
- group_name = aws_cloudwatch_log_group.aft_account_customizations_api_helpers.name
+ content {
+ vpc_id = var.aft_vpc_id
+ subnets = var.aft_vpc_private_subnets
+ security_group_ids = var.aft_vpc_default_sg
}
-
- s3_logs {
- status = "ENABLED"
- location = "${aws_s3_bucket.aft_codepipeline_customizations_bucket.id}/aft-account-customizations-api-helpers-logs"
- }
- }
-
- source {
- type = "CODEPIPELINE"
- buildspec = data.local_file.aft_account_customizations_api_helpers.content
}
- vpc_config {
- vpc_id = var.aft_vpc_id
- subnets = var.aft_vpc_private_subnets
- security_group_ids = var.aft_vpc_default_sg
+ lifecycle {
+ ignore_changes = [project_visibility]
}
}
+# Maintain this log group for log retention reasons. This is no longer used by AFT
+#tfsec:ignore:aws-cloudwatch-log-group-customer-key
resource "aws_cloudwatch_log_group" "aft_account_customizations_api_helpers" {
name = "/aws/codebuild/aft-account-customizations-api-helpers"
retention_in_days = var.cloudwatch_log_group_retention
}
+#tfsec:ignore:aws-cloudwatch-log-group-customer-key
+resource "aws_cloudwatch_log_group" "aft_account_customizations_terraform" {
+ name = "/aws/codebuild/aft-account-customizations-terraform"
+ retention_in_days = var.cloudwatch_log_group_retention
+}
#####################################################
# AFT Account Provisioning Framework SFN - aft-create-pipeline
#####################################################
resource "aws_codebuild_project" "aft_create_pipeline" {
- depends_on = [aws_cloudwatch_log_group.aft_create_pipeline]
+ depends_on = [aws_cloudwatch_log_group.aft_create_pipeline, time_sleep.wait_for_iam_eventual_consistency]
name = "aft-create-pipeline"
description = "Job to run Terraform required to create account specific customizations pipeline"
- build_timeout = "60"
+ build_timeout = tostring(var.global_codebuild_timeout)
service_role = aws_iam_role.aft_codebuild_customizations_role.arn
encryption_key = var.aft_kms_key_arn
@@ -228,7 +166,7 @@ resource "aws_codebuild_project" "aft_create_pipeline" {
environment {
compute_type = "BUILD_GENERAL1_MEDIUM"
- image = "aws/codebuild/amazonlinux2-x86_64-standard:3.0"
+ image = "aws/codebuild/amazonlinux2-x86_64-standard:5.0"
type = "LINUX_CONTAINER"
image_pull_credentials_type = "CODEBUILD"
@@ -279,6 +217,12 @@ resource "aws_codebuild_project" "aft_create_pipeline" {
value = var.aft_tf_version_ssm_path
type = "PLAINTEXT"
}
+
+ environment_variable {
+ name = "AWS_PARTITION"
+ value = data.aws_partition.current.partition
+ type = "PLAINTEXT"
+ }
}
logs_config {
@@ -297,14 +241,23 @@ resource "aws_codebuild_project" "aft_create_pipeline" {
buildspec = data.local_file.aft_create_pipeline.content
}
- vpc_config {
- vpc_id = var.aft_vpc_id
- subnets = var.aft_vpc_private_subnets
- security_group_ids = var.aft_vpc_default_sg
+ dynamic "vpc_config" {
+ for_each = var.aft_enable_vpc ? [1] : []
+
+ content {
+ vpc_id = var.aft_vpc_id
+ subnets = var.aft_vpc_private_subnets
+ security_group_ids = var.aft_vpc_default_sg
+ }
+ }
+
+ lifecycle {
+ ignore_changes = [project_visibility]
}
}
+#tfsec:ignore:aws-cloudwatch-log-group-customer-key
resource "aws_cloudwatch_log_group" "aft_create_pipeline" {
name = "/aws/codebuild/aft-create-pipeline"
retention_in_days = var.cloudwatch_log_group_retention
diff --git a/modules/aft-customizations/data.tf b/modules/aft-customizations/data.tf
index 34e4d28c..fbe178d9 100644
--- a/modules/aft-customizations/data.tf
+++ b/modules/aft-customizations/data.tf
@@ -1,6 +1,8 @@
# Copyright Amazon.com, Inc. or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
#
+data "aws_partition" "current" {}
+
data "aws_region" "current" {}
data "aws_caller_identity" "current" {}
@@ -20,18 +22,10 @@ data "local_file" "aft_global_customizations_terraform" {
filename = "${path.module}/buildspecs/aft-global-customizations-terraform.yml"
}
-data "local_file" "aft_global_customizations_api_helpers" {
- filename = "${path.module}/buildspecs/aft-global-customizations-api-helpers.yml"
-}
-
data "local_file" "aft_account_customizations_terraform" {
filename = "${path.module}/buildspecs/aft-account-customizations-terraform.yml"
}
-data "local_file" "aft_account_customizations_api_helpers" {
- filename = "${path.module}/buildspecs/aft-account-customizations-api-helpers.yml"
-}
-
data "local_file" "aft_create_pipeline" {
filename = "${path.module}/buildspecs/aft-create-pipeline.yml"
}
diff --git a/modules/aft-customizations/iam.tf b/modules/aft-customizations/iam.tf
index 638c3789..4616bc80 100644
--- a/modules/aft-customizations/iam.tf
+++ b/modules/aft-customizations/iam.tf
@@ -16,6 +16,7 @@ resource "aws_iam_role_policy" "aft_codepipeline_customizations_policy" {
policy = templatefile("${path.module}/iam/role-policies/aft_codepipeline_customizations_policy.tpl", {
aws_s3_bucket_aft_codepipeline_customizations_bucket_arn = aws_s3_bucket.aft_codepipeline_customizations_bucket.arn
+ data_aws_partition_current_partition = data.aws_partition.current.partition
data_aws_region_current_name = data.aws_region.current.name
data_aws_caller_identity_current_account_id = data.aws_caller_identity.current.account_id
data_aws_kms_alias_aft_key_target_key_arn = var.aft_kms_key_arn
@@ -35,6 +36,7 @@ resource "aws_iam_role_policy" "aft_codebuild_customizations_policy" {
policy = templatefile("${path.module}/iam/role-policies/aft_codebuild_customizations_policy.tpl", {
aws_s3_bucket_aft_codepipeline_customizations_bucket_arn = aws_s3_bucket.aft_codepipeline_customizations_bucket.arn
+ data_aws_partition_current_partition = data.aws_partition.current.partition
data_aws_region_current_name = data.aws_region.current.name
data_aws_caller_identity_current_account_id = data.aws_caller_identity.current.account_id
data_aws_kms_alias_aft_key_target_key_arn = var.aft_kms_key_arn
@@ -42,6 +44,11 @@ resource "aws_iam_role_policy" "aft_codebuild_customizations_policy" {
})
}
+resource "time_sleep" "wait_for_iam_eventual_consistency" {
+ depends_on = [aws_iam_role.aft_codebuild_customizations_role]
+ create_duration = "60s"
+}
+
###################################################################
# Step Functions - Invoke Customizations
###################################################################
@@ -56,8 +63,11 @@ resource "aws_iam_role_policy" "aft_invoke_customizations_sfn" {
role = aws_iam_role.aft_invoke_customizations_sfn.id
policy = templatefile("${path.module}/iam/role-policies/aft_states_invoke_customizations_policy.tpl", {
+ data_aws_partition_current_partition = data.aws_partition.current.partition
data_aws_region_aft-management_name = data.aws_region.aft_management.name
data_aws_caller_identity_aft-management_account_id = data.aws_caller_identity.aft_management.account_id
+ invoke_account_provisioning_sfn_arn = var.invoke_account_provisioning_sfn_arn
+
})
}
@@ -77,9 +87,14 @@ resource "aws_iam_role_policy" "aft_identify_targets_lambda" {
policy = templatefile("${path.module}/iam/role-policies/aft_identify_targets_lambda.tpl", {
data_aws_caller_identity_current_account_id = data.aws_caller_identity.current.account_id
+ data_aws_partition_current_partition = data.aws_partition.current.partition
data_aws_region_current_name = data.aws_region.current.name
request_metadata_table_name = var.request_metadata_table_name
+ account_request_table_name = var.account_request_table_name
aws_kms_key_aft_arn = var.aft_kms_key_arn
+ aft_sns_topic_arn = var.aft_sns_topic_arn
+ aft_failure_sns_topic_arn = var.aft_failure_sns_topic_arn
+ invoke_account_provisioning_arn = var.invoke_account_provisioning_sfn_arn
})
}
@@ -104,9 +119,12 @@ resource "aws_iam_role_policy" "aft_execute_pipeline_lambda" {
role = aws_iam_role.aft_customizations_execute_pipeline_lambda.id
policy = templatefile("${path.module}/iam/role-policies/aft_execute_pipeline_lambda.tpl", {
+ data_aws_partition_current_partition = data.aws_partition.current.partition
data_aws_region_current_name = data.aws_region.current.name
data_aws_caller_identity_current_account_id = data.aws_caller_identity.current.account_id
aws_kms_key_aft_arn = var.aft_kms_key_arn
+ aft_sns_topic_arn = var.aft_sns_topic_arn
+ aft_failure_sns_topic_arn = var.aft_failure_sns_topic_arn
})
}
@@ -131,9 +149,12 @@ resource "aws_iam_role_policy" "aft_get_pipeline_executions_lambda" {
role = aws_iam_role.aft_customizations_get_pipeline_executions_lambda.id
policy = templatefile("${path.module}/iam/role-policies/aft_get_pipeline_status_lambda.tpl", {
+ data_aws_partition_current_partition = data.aws_partition.current.partition
data_aws_region_current_name = data.aws_region.current.name
data_aws_caller_identity_current_account_id = data.aws_caller_identity.current.account_id
aws_kms_key_aft_arn = var.aft_kms_key_arn
+ aft_sns_topic_arn = var.aft_sns_topic_arn
+ aft_failure_sns_topic_arn = var.aft_failure_sns_topic_arn
})
}
@@ -150,6 +171,7 @@ resource "aws_iam_role_policy" "terraform_oss_backend_codebuild_customizations_p
role = aws_iam_role.aft_codebuild_customizations_role.id
policy = templatefile("${path.module}/iam/role-policies/ct_aft_codebuild_oss_backend_policy.tpl", {
+ data_aws_partition_current_partition = data.aws_partition.current.partition
data_aws_region_current_name = data.aws_region.current.name
data_aws_caller_identity_current_account_id = data.aws_caller_identity.current.account_id
data_aws_dynamo_terraform_oss_backend_table = var.aft_config_backend_table_id
@@ -157,33 +179,3 @@ resource "aws_iam_role_policy" "terraform_oss_backend_codebuild_customizations_p
aws_s3_bucket_aft_terraform_oss_kms_key_id = var.aft_config_backend_kms_key_id
})
}
-
-###################################################################
-# Lambda - Invoke Account Provisioning
-###################################################################
-
-resource "aws_iam_role" "aft_customizations_invoke_account_provisioning_lambda" {
- name = "aft-customizations-invoke-account-provisioning-role"
- assume_role_policy = templatefile("${path.module}/iam/trust-policies/lambda.tpl", { none = "none" })
-}
-
-resource "aws_iam_role_policy" "aft_customizations_invoke_account_provisioning_lambda" {
- name = "aft-customizations-invoke-account-provisioning-policy"
- role = aws_iam_role.aft_customizations_invoke_account_provisioning_lambda.id
-
- policy = templatefile("${path.module}/iam/role-policies/aft_customizations_invoke_account_provisioning.tpl", {
- data_aws_region_current_name = data.aws_region.current.name
- data_aws_caller_identity_current_account_id = data.aws_caller_identity.current.account_id
- data_aws_kms_alias_aft_key_target_key_arn = var.aft_kms_key_arn
- data_aws_dynamo_account_metadata_table = var.request_metadata_table_name
- data_aws_dynamo_account_request_table = var.account_request_table_name
- invoke_account_provisioning_arn = var.invoke_account_provisioning_sfn_arn
- })
-
-}
-
-resource "aws_iam_role_policy_attachment" "aft_customizations_invoke_account_provisioning_lambda" {
- count = length(local.lambda_managed_policies)
- role = aws_iam_role.aft_customizations_invoke_account_provisioning_lambda.name
- policy_arn = local.lambda_managed_policies[count.index]
-}
diff --git a/modules/aft-customizations/iam/role-policies/aft_codebuild_customizations_policy.tpl b/modules/aft-customizations/iam/role-policies/aft_codebuild_customizations_policy.tpl
index 5ec934d1..a5653ab5 100644
--- a/modules/aft-customizations/iam/role-policies/aft_codebuild_customizations_policy.tpl
+++ b/modules/aft-customizations/iam/role-policies/aft_codebuild_customizations_policy.tpl
@@ -3,7 +3,7 @@
"Statement": [
{
"Effect": "Allow",
- "Resource": "arn:aws:logs:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:log-group:/aws/codebuild/aft*",
+ "Resource": "arn:${data_aws_partition_current_partition}:logs:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:log-group:/aws/codebuild/aft*",
"Action": [
"logs:CreateLogStream",
"logs:PutLogEvents"
@@ -28,7 +28,7 @@
"ec2:CreateNetworkInterfacePermission"
],
"Resource": [
- "arn:aws:ec2:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:network-interface/*"
+ "arn:${data_aws_partition_current_partition}:ec2:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:network-interface/*"
]
},
{
@@ -62,7 +62,7 @@
"ssm:GetParameter"
],
"Resource": [
- "arn:aws:ssm:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:parameter/aft/*"
+ "arn:${data_aws_partition_current_partition}:ssm:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:parameter/aft/*"
]
},
{
@@ -76,7 +76,7 @@
"codecommit:GetUploadArchiveStatus",
"codecommit:CancelUploadArchive"
],
- "Resource": "arn:aws:codecommit:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:*customizations*"
+ "Resource": "arn:${data_aws_partition_current_partition}:codecommit:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:*customizations*"
},
{
"Effect": "Allow",
@@ -84,7 +84,7 @@
"dynamodb:*Item"
],
"Resource": [
- "arn:aws:dynamodb:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:table/${data_aws_dynamo_account_metadata_table}"
+ "arn:${data_aws_partition_current_partition}:dynamodb:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:table/${data_aws_dynamo_account_metadata_table}"
]
},
{
@@ -93,7 +93,7 @@
"sts:AssumeRole"
],
"Resource": [
- "arn:aws:iam::${data_aws_caller_identity_current_account_id}:role/AWSAFTAdmin"
+ "arn:${data_aws_partition_current_partition}:iam::${data_aws_caller_identity_current_account_id}:role/AWSAFTAdmin"
]
},
{
@@ -102,7 +102,7 @@
"dynamodb:GetItem"
],
"Resource" : [
- "arn:aws:dynamodb:${data_aws_caller_identity_current_account_id}:${data_aws_caller_identity_current_account_id}:table/aft*"
+ "arn:${data_aws_partition_current_partition}:dynamodb:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:table/aft*"
]
}
]
diff --git a/modules/aft-customizations/iam/role-policies/aft_codepipeline_customizations_policy.tpl b/modules/aft-customizations/iam/role-policies/aft_codepipeline_customizations_policy.tpl
index 0a4dd2c2..fbf0f007 100644
--- a/modules/aft-customizations/iam/role-policies/aft_codepipeline_customizations_policy.tpl
+++ b/modules/aft-customizations/iam/role-policies/aft_codepipeline_customizations_policy.tpl
@@ -2,7 +2,7 @@
"Version": "2012-10-17",
"Statement": [
{
- "Effect":"Allow",
+ "Effect": "Allow",
"Action": [
"s3:Get*",
"s3:List*",
@@ -19,7 +19,7 @@
"codebuild:BatchGetBuilds",
"codebuild:StartBuild"
],
- "Resource": "arn:aws:codebuild:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:*customizations*"
+ "Resource": "arn:${data_aws_partition_current_partition}:codebuild:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:*customizations*"
},
{
"Effect": "Allow",
@@ -32,7 +32,7 @@
"codecommit:GetUploadArchiveStatus",
"codecommit:CancelUploadArchive"
],
- "Resource": "arn:aws:codecommit:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:aft-*"
+ "Resource": "arn:${data_aws_partition_current_partition}:codecommit:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:*"
},
{
"Effect": "Allow",
@@ -43,10 +43,13 @@
],
"Resource": "${data_aws_kms_alias_aft_key_target_key_arn}"
},
- {
- "Effect": "Allow",
- "Action": "codestar-connections:UseConnection",
- "Resource": "*"
- }
+ {
+ "Effect": "Allow",
+ "Action": [
+ "codestar-connections:UseConnection",
+ "codeconnections:UseConnection"
+ ],
+ "Resource": "*"
+ }
]
}
diff --git a/modules/aft-customizations/iam/role-policies/aft_customizations_invoke_account_provisioning.tpl b/modules/aft-customizations/iam/role-policies/aft_customizations_invoke_account_provisioning.tpl
deleted file mode 100644
index 118bf291..00000000
--- a/modules/aft-customizations/iam/role-policies/aft_customizations_invoke_account_provisioning.tpl
+++ /dev/null
@@ -1,39 +0,0 @@
-{
- "Version": "2012-10-17",
- "Statement": [
- {
- "Effect": "Allow",
- "Action": "states:StartExecution",
- "Resource": "${invoke_account_provisioning_arn}"
- },
- {
- "Effect": "Allow",
- "Action": [
- "ssm:GetParameters",
- "ssm:GetParameter"
- ],
- "Resource": [
- "arn:aws:ssm:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:parameter/aft/*"
- ]
- },
- {
- "Effect": "Allow",
- "Action": [
- "dynamodb:GetItem"
- ],
- "Resource": [
- "arn:aws:dynamodb:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:table/${data_aws_dynamo_account_metadata_table}",
- "arn:aws:dynamodb:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:table/${data_aws_dynamo_account_request_table}"
- ]
- },
- {
- "Effect": "Allow",
- "Action": [
- "kms:Decrypt",
- "kms:Encrypt",
- "kms:GenerateDataKey"
- ],
- "Resource": "${data_aws_kms_alias_aft_key_target_key_arn}"
- }
- ]
-}
diff --git a/modules/aft-customizations/iam/role-policies/aft_execute_pipeline_lambda.tpl b/modules/aft-customizations/iam/role-policies/aft_execute_pipeline_lambda.tpl
index c19d305b..b38e2f51 100644
--- a/modules/aft-customizations/iam/role-policies/aft_execute_pipeline_lambda.tpl
+++ b/modules/aft-customizations/iam/role-policies/aft_execute_pipeline_lambda.tpl
@@ -11,8 +11,8 @@
"codepipeline:ListTagsForResource"
],
"Resource": [
- "arn:aws:codepipeline:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:*",
- "arn:aws:ssm:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:parameter/aft/*"
+ "arn:${data_aws_partition_current_partition}:codepipeline:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:*",
+ "arn:${data_aws_partition_current_partition}:ssm:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:parameter/aft/*"
]
},
{
@@ -30,6 +30,16 @@
"Effect": "Allow",
"Action": "sts:GetCallerIdentity",
"Resource": "*"
+ },
+ {
+ "Effect" : "Allow",
+ "Action" : [
+ "sns:Publish"
+ ],
+ "Resource" : [
+ "${aft_sns_topic_arn}",
+ "${aft_failure_sns_topic_arn}"
+ ]
}
]
}
diff --git a/modules/aft-customizations/iam/role-policies/aft_get_pipeline_status_lambda.tpl b/modules/aft-customizations/iam/role-policies/aft_get_pipeline_status_lambda.tpl
index 7df50383..6eb242cf 100644
--- a/modules/aft-customizations/iam/role-policies/aft_get_pipeline_status_lambda.tpl
+++ b/modules/aft-customizations/iam/role-policies/aft_get_pipeline_status_lambda.tpl
@@ -4,7 +4,7 @@
{
"Effect": "Allow",
"Action": "codepipeline:ListPipelineExecutions",
- "Resource": "arn:aws:codepipeline:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:*"
+ "Resource": "arn:${data_aws_partition_current_partition}:codepipeline:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:*"
},
{
"Effect": "Allow",
@@ -21,6 +21,26 @@
"Resource" : [
"${aws_kms_key_aft_arn}"
]
+ },
+ {
+ "Effect" : "Allow",
+ "Action" : [
+ "sns:Publish"
+ ],
+ "Resource" : [
+ "${aft_sns_topic_arn}",
+ "${aft_failure_sns_topic_arn}"
+ ]
+ },
+ {
+ "Effect" : "Allow",
+ "Action" : [
+ "ssm:GetParameter"
+ ],
+ "Resource" : [
+ "arn:${data_aws_partition_current_partition}:ssm:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:parameter/aft/*"
+
+ ]
}
]
}
diff --git a/modules/aft-customizations/iam/role-policies/aft_identify_targets_lambda.tpl b/modules/aft-customizations/iam/role-policies/aft_identify_targets_lambda.tpl
index cc948ea4..5cdc9b99 100644
--- a/modules/aft-customizations/iam/role-policies/aft_identify_targets_lambda.tpl
+++ b/modules/aft-customizations/iam/role-policies/aft_identify_targets_lambda.tpl
@@ -1,46 +1,58 @@
{
- "Version" : "2012-10-17",
- "Statement" : [
- {
- "Effect" : "Allow",
- "Action" : [
- "sts:AssumeRole"
- ],
- "Resource" : [
- "arn:aws:iam::${data_aws_caller_identity_current_account_id}:role/AWSAFTAdmin"
- ]
- },
- {
- "Effect" : "Allow",
- "Action" : "sts:GetCallerIdentity",
- "Resource" : "*"
- },
- {
- "Effect" : "Allow",
- "Action" : "ssm:GetParameter",
- "Resource" : [
- "arn:aws:ssm:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:parameter/aft/*"
- ]
- },
+ "Version": "2012-10-17",
+ "Statement": [
{
- "Effect" : "Allow",
- "Action" : [
- "dynamodb:Scan"
- ],
- "Resource" : [
- "arn:aws:dynamodb:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:table/${request_metadata_table_name}"
+ "Effect": "Allow",
+ "Action": [
+ "sts:AssumeRole"
+ ],
+ "Resource": [
+ "arn:${data_aws_partition_current_partition}:iam::${data_aws_caller_identity_current_account_id}:role/AWSAFTAdmin"
]
},
- {
- "Effect" : "Allow",
- "Action" : [
- "kms:GenerateDataKey",
- "kms:Encrypt",
- "kms:Decrypt"
- ],
- "Resource" : [
- "${aws_kms_key_aft_arn}"
- ]
- }
- ]
+ {
+ "Effect": "Allow",
+ "Action": "sts:GetCallerIdentity",
+ "Resource": "*"
+ },
+ {
+ "Effect": "Allow",
+ "Action": "ssm:GetParameter",
+ "Resource": [
+ "arn:${data_aws_partition_current_partition}:ssm:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:parameter/aft/*"
+ ]
+ },
+ {
+ "Effect": "Allow",
+ "Action": [
+ "kms:GenerateDataKey",
+ "kms:Encrypt",
+ "kms:Decrypt"
+ ],
+ "Resource": [
+ "${aws_kms_key_aft_arn}"
+ ]
+ },
+ {
+ "Effect": "Allow",
+ "Action": [
+ "dynamodb:GetItem",
+ "dynamodb:Scan"
+ ],
+ "Resource": [
+ "arn:${data_aws_partition_current_partition}:dynamodb:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:table/${request_metadata_table_name}",
+ "arn:${data_aws_partition_current_partition}:dynamodb:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:table/${account_request_table_name}"
+ ]
+ },
+ {
+ "Effect": "Allow",
+ "Action": [
+ "sns:Publish"
+ ],
+ "Resource": [
+ "${aft_sns_topic_arn}",
+ "${aft_failure_sns_topic_arn}"
+ ]
+ }
+ ]
}
diff --git a/modules/aft-customizations/iam/role-policies/aft_states_invoke_customizations_policy.tpl b/modules/aft-customizations/iam/role-policies/aft_states_invoke_customizations_policy.tpl
index 4d11b0c2..a3d7d6a2 100644
--- a/modules/aft-customizations/iam/role-policies/aft_states_invoke_customizations_policy.tpl
+++ b/modules/aft-customizations/iam/role-policies/aft_states_invoke_customizations_policy.tpl
@@ -7,7 +7,7 @@
"lambda:InvokeFunction"
],
"Resource": [
- "arn:aws:lambda:${data_aws_region_aft-management_name}:${data_aws_caller_identity_aft-management_account_id}:function:aft-*"
+ "arn:${data_aws_partition_current_partition}:lambda:${data_aws_region_aft-management_name}:${data_aws_caller_identity_aft-management_account_id}:function:aft-*"
]
},
{
@@ -16,7 +16,37 @@
"sns:Publish"
],
"Resource": [
- "arn:aws:sns:${data_aws_region_aft-management_name}:${data_aws_caller_identity_aft-management_account_id}:aft-*"
+ "arn:${data_aws_partition_current_partition}:sns:${data_aws_region_aft-management_name}:${data_aws_caller_identity_aft-management_account_id}:aft-*"
+ ]
+ },
+ {
+ "Effect": "Allow",
+ "Action": [
+ "events:PutTargets",
+ "events:PutRule",
+ "events:DescribeRule"
+ ],
+ "Resource": [
+ "arn:${data_aws_partition_current_partition}:events:${data_aws_region_aft-management_name}:${data_aws_caller_identity_aft-management_account_id}:*"
+ ]
+ },
+ {
+ "Effect": "Allow",
+ "Action": [
+ "states:StartExecution"
+ ],
+ "Resource": [
+ "arn:${data_aws_partition_current_partition}:states:${data_aws_region_aft-management_name}:${data_aws_caller_identity_aft-management_account_id}:stateMachine:aft-*"
+ ]
+ },
+ {
+ "Effect": "Allow",
+ "Action": [
+ "states:DescribeExecution",
+ "states:StopExecution"
+ ],
+ "Resource": [
+ "arn:${data_aws_partition_current_partition}:states:${data_aws_region_aft-management_name}:${data_aws_caller_identity_aft-management_account_id}:execution:aft-*"
]
}
]
diff --git a/modules/aft-customizations/iam/role-policies/ct_aft_codebuild_oss_backend_policy.tpl b/modules/aft-customizations/iam/role-policies/ct_aft_codebuild_oss_backend_policy.tpl
index 200f3a0b..1d3ea1b0 100644
--- a/modules/aft-customizations/iam/role-policies/ct_aft_codebuild_oss_backend_policy.tpl
+++ b/modules/aft-customizations/iam/role-policies/ct_aft_codebuild_oss_backend_policy.tpl
@@ -7,7 +7,7 @@
"dynamodb:*Item"
],
"Resource": [
- "arn:aws:dynamodb:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:table/${data_aws_dynamo_terraform_oss_backend_table}"
+ "arn:${data_aws_partition_current_partition}:dynamodb:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:table/${data_aws_dynamo_terraform_oss_backend_table}"
]
},
{
@@ -21,8 +21,8 @@
"s3:PutObject"
],
"Resource": [
- "arn:aws:s3:::${aws_s3_bucket_aft_terraform_oss_backend_bucket_id}",
- "arn:aws:s3:::${aws_s3_bucket_aft_terraform_oss_backend_bucket_id}/*"
+ "arn:${data_aws_partition_current_partition}:s3:::${aws_s3_bucket_aft_terraform_oss_backend_bucket_id}",
+ "arn:${data_aws_partition_current_partition}:s3:::${aws_s3_bucket_aft_terraform_oss_backend_bucket_id}/*"
]
},
{
@@ -32,7 +32,7 @@
"kms:Encrypt",
"kms:GenerateDataKey"
],
- "Resource": "arn:aws:kms:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:key/${aws_s3_bucket_aft_terraform_oss_kms_key_id}"
+ "Resource": "arn:${data_aws_partition_current_partition}:kms:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:key/${aws_s3_bucket_aft_terraform_oss_kms_key_id}"
}
]
}
diff --git a/modules/aft-customizations/lambda.tf b/modules/aft-customizations/lambda.tf
index 0780f3c9..d2a8a899 100644
--- a/modules/aft-customizations/lambda.tf
+++ b/modules/aft-customizations/lambda.tf
@@ -3,7 +3,7 @@
#
######## customizations_identify_targets ########
-
+#tfsec:ignore:aws-lambda-enable-tracing
resource "aws_lambda_function" "aft_customizations_identify_targets" {
filename = var.customizations_archive_path
function_name = "aft-customizations-identify-targets"
@@ -13,16 +13,21 @@ resource "aws_lambda_function" "aft_customizations_identify_targets" {
source_code_hash = var.customizations_archive_hash
memory_size = 1024
- runtime = "python3.8"
+ runtime = var.lambda_runtime_python_version
timeout = "300"
layers = [var.aft_common_layer_arn]
- vpc_config {
- subnet_ids = var.aft_vpc_private_subnets
- security_group_ids = var.aft_vpc_default_sg
+ dynamic "vpc_config" {
+ for_each = var.aft_enable_vpc ? [1] : []
+
+ content {
+ subnet_ids = var.aft_vpc_private_subnets
+ security_group_ids = var.aft_vpc_default_sg
+ }
}
}
+#tfsec:ignore:aws-cloudwatch-log-group-customer-key
resource "aws_cloudwatch_log_group" "aft_customizations_identify_targets" {
name = "/aws/lambda/${aws_lambda_function.aft_customizations_identify_targets.function_name}"
retention_in_days = var.cloudwatch_log_group_retention
@@ -30,6 +35,7 @@ resource "aws_cloudwatch_log_group" "aft_customizations_identify_targets" {
######## customizations_execute_pipeline ########
+#tfsec:ignore:aws-lambda-enable-tracing
resource "aws_lambda_function" "aft_customizations_execute_pipeline" {
filename = var.customizations_archive_path
function_name = "aft-customizations-execute-pipeline"
@@ -39,22 +45,28 @@ resource "aws_lambda_function" "aft_customizations_execute_pipeline" {
source_code_hash = var.customizations_archive_hash
memory_size = 1024
- runtime = "python3.8"
+ runtime = var.lambda_runtime_python_version
timeout = "300"
layers = [var.aft_common_layer_arn]
- vpc_config {
- subnet_ids = var.aft_vpc_private_subnets
- security_group_ids = var.aft_vpc_default_sg
+ dynamic "vpc_config" {
+ for_each = var.aft_enable_vpc ? [1] : []
+
+ content {
+ subnet_ids = var.aft_vpc_private_subnets
+ security_group_ids = var.aft_vpc_default_sg
+ }
}
}
+#tfsec:ignore:aws-cloudwatch-log-group-customer-key
resource "aws_cloudwatch_log_group" "aft_execute_pipeline" {
name = "/aws/lambda/${aws_lambda_function.aft_customizations_execute_pipeline.function_name}"
retention_in_days = var.cloudwatch_log_group_retention
}
######## customizations_get_pipeline_executions ########
+#tfsec:ignore:aws-lambda-enable-tracing
resource "aws_lambda_function" "aft_customizations_get_pipeline_executions" {
filename = var.customizations_archive_path
function_name = "aft-customizations-get-pipeline-executions"
@@ -64,43 +76,29 @@ resource "aws_lambda_function" "aft_customizations_get_pipeline_executions" {
source_code_hash = var.customizations_archive_hash
memory_size = 1024
- runtime = "python3.8"
+ runtime = var.lambda_runtime_python_version
timeout = "300"
layers = [var.aft_common_layer_arn]
- vpc_config {
- subnet_ids = var.aft_vpc_private_subnets
- security_group_ids = var.aft_vpc_default_sg
+ dynamic "vpc_config" {
+ for_each = var.aft_enable_vpc ? [1] : []
+
+ content {
+ subnet_ids = var.aft_vpc_private_subnets
+ security_group_ids = var.aft_vpc_default_sg
+ }
}
}
+#tfsec:ignore:aws-cloudwatch-log-group-customer-key
resource "aws_cloudwatch_log_group" "aft_get_pipeline_executions" {
name = "/aws/lambda/${aws_lambda_function.aft_customizations_get_pipeline_executions.function_name}"
retention_in_days = var.cloudwatch_log_group_retention
}
-######## customizations_invoke_account_provisioning ########
-resource "aws_lambda_function" "aft_customizations_invoke_account_provisioning" {
- filename = var.customizations_archive_path
- function_name = "aft-customizations-invoke-account-provisioning"
- description = "Invokes the account-provisioning SFN."
- role = aws_iam_role.aft_customizations_invoke_account_provisioning_lambda.arn
- handler = "aft_customizations_invoke_account_provisioning_framework.lambda_handler"
-
- source_code_hash = var.customizations_archive_hash
- memory_size = 1024
- runtime = "python3.8"
- timeout = "300"
- layers = [var.aft_common_layer_arn]
-
- vpc_config {
- subnet_ids = var.aft_vpc_private_subnets
- security_group_ids = var.aft_vpc_default_sg
- }
-}
-
+#tfsec:ignore:aws-cloudwatch-log-group-customer-key
resource "aws_cloudwatch_log_group" "aft_customizations_invoke_account_provisioning" {
- name = "/aws/lambda/${aws_lambda_function.aft_customizations_invoke_account_provisioning.function_name}"
+ name = "/aws/lambda/aft-customizations-invoke-account-provisioning"
retention_in_days = var.cloudwatch_log_group_retention
}
diff --git a/modules/aft-customizations/s3.tf b/modules/aft-customizations/s3.tf
index 4a996034..e93bbc87 100644
--- a/modules/aft-customizations/s3.tf
+++ b/modules/aft-customizations/s3.tf
@@ -1,20 +1,35 @@
# Copyright Amazon.com, Inc. or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
#
+
+#tfsec:ignore:aws-s3-enable-bucket-logging
resource "aws_s3_bucket" "aft_codepipeline_customizations_bucket" {
bucket = "aft-customizations-pipeline-${data.aws_caller_identity.current.account_id}"
- acl = "private"
+}
- versioning {
- enabled = true
+resource "aws_s3_bucket_public_access_block" "aft-codepipeline-customizations-block-public-access" {
+ bucket = aws_s3_bucket.aft_codepipeline_customizations_bucket.id
+
+ block_public_acls = true
+ block_public_policy = true
+ ignore_public_acls = true
+ restrict_public_buckets = true
+}
+
+resource "aws_s3_bucket_versioning" "aft-codepipeline-customizations-bucket-versioning" {
+ bucket = aws_s3_bucket.aft_codepipeline_customizations_bucket.id
+ versioning_configuration {
+ status = "Enabled"
}
+}
+
+resource "aws_s3_bucket_server_side_encryption_configuration" "aft-codepipeline-customizations-bucket-encryption" {
+ bucket = aws_s3_bucket.aft_codepipeline_customizations_bucket.id
- server_side_encryption_configuration {
- rule {
- apply_server_side_encryption_by_default {
- kms_master_key_id = var.aft_kms_key_id
- sse_algorithm = "aws:kms"
- }
+ rule {
+ apply_server_side_encryption_by_default {
+ kms_master_key_id = var.aft_kms_key_id
+ sse_algorithm = "aws:kms"
}
}
}
diff --git a/modules/aft-customizations/states.tf b/modules/aft-customizations/states.tf
index 393e9b01..c3d7feda 100644
--- a/modules/aft-customizations/states.tf
+++ b/modules/aft-customizations/states.tf
@@ -4,18 +4,24 @@
locals {
state_machine_source = "${path.module}/states/invoke_customizations.asl.json"
replacements_map = {
- identify_targets_function_arn = aws_lambda_function.aft_customizations_identify_targets.arn
- execute_pipeline_function_arn = aws_lambda_function.aft_customizations_execute_pipeline.arn
- get_pipeline_executions_function_arn = aws_lambda_function.aft_customizations_get_pipeline_executions.arn
- invoke_provisioning_framework_function_arn = aws_lambda_function.aft_customizations_invoke_account_provisioning.arn
- maximum_concurrent_customizations = var.maximum_concurrent_customizations
- aft_notification_arn = var.aft_sns_topic_arn
- aft_failure_notification_arn = var.aft_failure_sns_topic_arn
+ current_partition = data.aws_partition.current.partition
+ identify_targets_function_arn = aws_lambda_function.aft_customizations_identify_targets.arn
+ execute_pipeline_function_arn = aws_lambda_function.aft_customizations_execute_pipeline.arn
+ get_pipeline_executions_function_arn = aws_lambda_function.aft_customizations_get_pipeline_executions.arn
+ invoke_account_provisioning_sfn_arn = var.invoke_account_provisioning_sfn_arn
+ maximum_concurrent_customizations = var.maximum_concurrent_customizations
+ aft_notification_arn = var.aft_sns_topic_arn
+ aft_failure_notification_arn = var.aft_failure_sns_topic_arn
}
}
resource "aws_sfn_state_machine" "aft_invoke_customizations_sfn" {
- name = "aft-invoke-customizations"
- role_arn = aws_iam_role.aft_invoke_customizations_sfn.arn
- definition = templatefile(local.state_machine_source, local.replacements_map)
+ name = "aft-invoke-customizations"
+ role_arn = aws_iam_role.aft_invoke_customizations_sfn.arn
+ // Use valid JSON but transform (de-quote) during load to support numeric parameterization
+ definition = replace(
+ templatefile("${local.state_machine_source}", local.replacements_map),
+ "/\"MaxConcurrency\": \"(\\d+)\"/",
+ "\"MaxConcurrency\": $1"
+ )
}
diff --git a/modules/aft-customizations/states/invoke_customizations.asl.json b/modules/aft-customizations/states/invoke_customizations.asl.json
index c8757ae3..a9eb0876 100644
--- a/modules/aft-customizations/states/invoke_customizations.asl.json
+++ b/modules/aft-customizations/states/invoke_customizations.asl.json
@@ -14,16 +14,29 @@
]
},
"Invoke Provisioning Framework": {
+ "Type": "Map",
"Next": "Get Pipeline Executions",
- "Type": "Task",
- "Resource": "${invoke_provisioning_framework_function_arn}",
- "ResultPath": "$.invoke_framework",
- "Catch": [
- {
- "ErrorEquals": ["States.ALL"],
- "Next": "Notify Failure"
+ "MaxConcurrency": "${maximum_concurrent_customizations}",
+ "InputPath": "$.targets",
+ "ItemsPath": "$.target_accounts_info",
+ "Parameters": {
+ "info.$": "$$.Map.Item.Value"
+ },
+ "Iterator": {
+ "StartAt": "Invoke Account provisioning Step Function",
+ "States": {
+ "Invoke Account provisioning Step Function": {
+ "Type": "Task",
+ "Resource": "arn:${current_partition}:states:::states:startExecution.sync:2",
+ "Parameters": {
+ "StateMachineArn": "${invoke_account_provisioning_sfn_arn}",
+ "Input.$": "$.info"
+ },
+ "End": true
+ }
}
- ]
+ },
+ "ResultPath": null
},
"Get Pipeline Executions": {
"Next": "Pending Pipeline Executions?",
@@ -78,7 +91,7 @@
},
"Notify Success": {
"Type": "Task",
- "Resource": "arn:aws:states:::sns:publish",
+ "Resource": "arn:${current_partition}:states:::sns:publish",
"Parameters": {
"TopicArn": "${aft_notification_arn}",
"Message.$": "$"
@@ -87,7 +100,7 @@
},
"Notify Failure": {
"Type": "Task",
- "Resource": "arn:aws:states:::sns:publish",
+ "Resource": "arn:${current_partition}:states:::sns:publish",
"Parameters": {
"TopicArn": "${aft_failure_notification_arn}",
"Message.$": "$.Cause"
diff --git a/modules/aft-customizations/variables.tf b/modules/aft-customizations/variables.tf
index f9e62e48..a08aa1aa 100644
--- a/modules/aft-customizations/variables.tf
+++ b/modules/aft-customizations/variables.tf
@@ -9,18 +9,7 @@ variable "aft_kms_key_id" {
type = string
}
-
-
variable "aft_kms_key_arn" {
-
-
-
-
-
-
-
-
-
type = string
}
@@ -89,15 +78,18 @@ variable "aft_config_backend_bucket_id" {
}
variable "aft_vpc_id" {
- type = string
+ type = string
+ default = null
}
variable "aft_vpc_private_subnets" {
- type = list(string)
+ type = list(string)
+ default = null
}
variable "aft_vpc_default_sg" {
- type = list(string)
+ type = list(string)
+ default = null
}
variable "maximum_concurrent_customizations" {
@@ -115,3 +107,15 @@ variable "customizations_archive_path" {
variable "customizations_archive_hash" {
type = string
}
+
+variable "global_codebuild_timeout" {
+ type = number
+}
+
+variable "lambda_runtime_python_version" {
+ type = string
+}
+
+variable "aft_enable_vpc" {
+ type = bool
+}
diff --git a/modules/aft-customizations/versions.tf b/modules/aft-customizations/versions.tf
index 8e5314f4..63c463a5 100755
--- a/modules/aft-customizations/versions.tf
+++ b/modules/aft-customizations/versions.tf
@@ -2,12 +2,12 @@
# SPDX-License-Identifier: Apache-2.0
#
terraform {
- required_version = ">= 0.15.1"
+ required_version = ">= 1.0.0"
required_providers {
aws = {
source = "hashicorp/aws"
- version = ">= 3.72, < 4.0.0"
+ version = ">= 5.11.0, < 6.0.0"
}
}
}
diff --git a/modules/aft-feature-options/data.tf b/modules/aft-feature-options/data.tf
index 118d9cb6..9e993a4b 100644
--- a/modules/aft-feature-options/data.tf
+++ b/modules/aft-feature-options/data.tf
@@ -1,6 +1,8 @@
# Copyright Amazon.com, Inc. or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
#
+data "aws_partition" "current" {}
+
data "aws_region" "current" {
provider = aws.aft_management
}
diff --git a/modules/aft-feature-options/iam.tf b/modules/aft-feature-options/iam.tf
index f50673cd..c9860f3e 100644
--- a/modules/aft-feature-options/iam.tf
+++ b/modules/aft-feature-options/iam.tf
@@ -17,6 +17,7 @@ resource "aws_iam_role_policy" "aft_features_sfn" {
role = aws_iam_role.aft_features_sfn.id
policy = templatefile("${path.module}/iam/role-policies/aft_features_states.tpl", {
+ data_aws_partition_current_partition = data.aws_partition.current.partition
data_aws_region_aft-management_name = data.aws_region.current.name
data_aws_caller_identity_aft-management_account_id = data.aws_caller_identity.current.id
})
@@ -40,8 +41,12 @@ resource "aws_iam_role_policy" "aft_delete_default_vpc_lambda" {
policy = templatefile("${path.module}/iam/role-policies/aft_delete_default_vpc_lambda.tpl", {
data_aws_caller_identity_current_account_id = data.aws_caller_identity.current.account_id
+ data_aws_partition_current_partition = data.aws_partition.current.partition
data_aws_region_current_name = data.aws_region.current.name
aws_kms_key_aft_arn = var.aft_kms_key_arn
+ aws_sns_topic_aft_notifications_arn = var.aft_sns_topic_arn
+ aws_sns_topic_aft_failure_notifications_arn = var.aft_failure_sns_topic_arn
+
})
}
@@ -70,8 +75,11 @@ resource "aws_iam_role_policy" "aft_enroll_support" {
policy = templatefile("${path.module}/iam/role-policies/aft_enroll_support.tpl", {
data_aws_caller_identity_current_account_id = data.aws_caller_identity.current.account_id
+ data_aws_partition_current_partition = data.aws_partition.current.partition
data_aws_region_current_name = data.aws_region.current.name
aws_kms_key_aft_arn = var.aft_kms_key_arn
+ aws_sns_topic_aft_notifications_arn = var.aft_sns_topic_arn
+ aws_sns_topic_aft_failure_notifications_arn = var.aft_failure_sns_topic_arn
})
}
@@ -100,8 +108,11 @@ resource "aws_iam_role_policy" "aft_enable_cloudtrail" {
policy = templatefile("${path.module}/iam/role-policies/aft_enable_cloudtrail.tpl", {
data_aws_caller_identity_current_account_id = data.aws_caller_identity.current.account_id
+ data_aws_partition_current_partition = data.aws_partition.current.partition
data_aws_region_current_name = data.aws_region.current.name
aws_kms_key_aft_arn = var.aft_kms_key_arn
+ aws_sns_topic_aft_notifications_arn = var.aft_sns_topic_arn
+ aws_sns_topic_aft_failure_notifications_arn = var.aft_failure_sns_topic_arn
})
}
diff --git a/modules/aft-feature-options/iam/role-policies/aft_delete_default_vpc_lambda.tpl b/modules/aft-feature-options/iam/role-policies/aft_delete_default_vpc_lambda.tpl
index 8cb7d71c..a4753a77 100644
--- a/modules/aft-feature-options/iam/role-policies/aft_delete_default_vpc_lambda.tpl
+++ b/modules/aft-feature-options/iam/role-policies/aft_delete_default_vpc_lambda.tpl
@@ -25,7 +25,7 @@
"Effect" : "Allow",
"Action" : "ssm:GetParameter",
"Resource" : [
- "arn:aws:ssm:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:parameter/aft/*"
+ "arn:${data_aws_partition_current_partition}:ssm:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:parameter/aft/*"
]
},
{
@@ -45,13 +45,23 @@
"sts:AssumeRole"
],
"Resource" : [
- "arn:aws:iam::${data_aws_caller_identity_current_account_id}:role/AWSAFTAdmin"
+ "arn:${data_aws_partition_current_partition}:iam::${data_aws_caller_identity_current_account_id}:role/AWSAFTAdmin"
]
},
{
"Effect" : "Allow",
"Action" : "sts:GetCallerIdentity",
"Resource" : "*"
+ },
+ {
+ "Effect" : "Allow",
+ "Action" : [
+ "sns:Publish"
+ ],
+ "Resource" : [
+ "${aws_sns_topic_aft_notifications_arn}",
+ "${aws_sns_topic_aft_failure_notifications_arn}"
+ ]
}
]
}
diff --git a/modules/aft-feature-options/iam/role-policies/aft_enable_cloudtrail.tpl b/modules/aft-feature-options/iam/role-policies/aft_enable_cloudtrail.tpl
index d76eebf0..9313d89f 100644
--- a/modules/aft-feature-options/iam/role-policies/aft_enable_cloudtrail.tpl
+++ b/modules/aft-feature-options/iam/role-policies/aft_enable_cloudtrail.tpl
@@ -5,7 +5,7 @@
"Effect" : "Allow",
"Action" : "ssm:GetParameter",
"Resource" : [
- "arn:aws:ssm:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:parameter/aft/*"
+ "arn:${data_aws_partition_current_partition}:ssm:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:parameter/aft/*"
]
},
{
@@ -25,13 +25,23 @@
"sts:AssumeRole"
],
"Resource" : [
- "arn:aws:iam::${data_aws_caller_identity_current_account_id}:role/AWSAFTAdmin"
+ "arn:${data_aws_partition_current_partition}:iam::${data_aws_caller_identity_current_account_id}:role/AWSAFTAdmin"
]
},
{
"Effect" : "Allow",
"Action" : "sts:GetCallerIdentity",
"Resource" : "*"
+ },
+ {
+ "Effect" : "Allow",
+ "Action" : [
+ "sns:Publish"
+ ],
+ "Resource" : [
+ "${aws_sns_topic_aft_notifications_arn}",
+ "${aws_sns_topic_aft_failure_notifications_arn}"
+ ]
}
]
}
diff --git a/modules/aft-feature-options/iam/role-policies/aft_enroll_support.tpl b/modules/aft-feature-options/iam/role-policies/aft_enroll_support.tpl
index d76eebf0..9313d89f 100644
--- a/modules/aft-feature-options/iam/role-policies/aft_enroll_support.tpl
+++ b/modules/aft-feature-options/iam/role-policies/aft_enroll_support.tpl
@@ -5,7 +5,7 @@
"Effect" : "Allow",
"Action" : "ssm:GetParameter",
"Resource" : [
- "arn:aws:ssm:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:parameter/aft/*"
+ "arn:${data_aws_partition_current_partition}:ssm:${data_aws_region_current_name}:${data_aws_caller_identity_current_account_id}:parameter/aft/*"
]
},
{
@@ -25,13 +25,23 @@
"sts:AssumeRole"
],
"Resource" : [
- "arn:aws:iam::${data_aws_caller_identity_current_account_id}:role/AWSAFTAdmin"
+ "arn:${data_aws_partition_current_partition}:iam::${data_aws_caller_identity_current_account_id}:role/AWSAFTAdmin"
]
},
{
"Effect" : "Allow",
"Action" : "sts:GetCallerIdentity",
"Resource" : "*"
+ },
+ {
+ "Effect" : "Allow",
+ "Action" : [
+ "sns:Publish"
+ ],
+ "Resource" : [
+ "${aws_sns_topic_aft_notifications_arn}",
+ "${aws_sns_topic_aft_failure_notifications_arn}"
+ ]
}
]
}
diff --git a/modules/aft-feature-options/iam/role-policies/aft_features_states.tpl b/modules/aft-feature-options/iam/role-policies/aft_features_states.tpl
index 6c85871d..aa46ef7d 100644
--- a/modules/aft-feature-options/iam/role-policies/aft_features_states.tpl
+++ b/modules/aft-feature-options/iam/role-policies/aft_features_states.tpl
@@ -4,7 +4,7 @@
{
"Effect": "Allow",
"Action": "sns:Publish*",
- "Resource": "arn:aws:sns:${data_aws_region_aft-management_name}:${data_aws_caller_identity_aft-management_account_id}:aft-*"
+ "Resource": "arn:${data_aws_partition_current_partition}:sns:${data_aws_region_aft-management_name}:${data_aws_caller_identity_aft-management_account_id}:aft-*"
},
{
"Effect": "Allow",
@@ -12,7 +12,7 @@
"lambda:InvokeFunction"
],
"Resource": [
- "arn:aws:lambda:${data_aws_region_aft-management_name}:${data_aws_caller_identity_aft-management_account_id}:function:aft-*"
+ "arn:${data_aws_partition_current_partition}:lambda:${data_aws_region_aft-management_name}:${data_aws_caller_identity_aft-management_account_id}:function:aft-*"
]
}
]
diff --git a/modules/aft-feature-options/kms.tf b/modules/aft-feature-options/kms.tf
index 0df86045..61e10224 100644
--- a/modules/aft-feature-options/kms.tf
+++ b/modules/aft-feature-options/kms.tf
@@ -6,7 +6,8 @@ resource "aws_kms_key" "aft_log_key" {
description = "KMS key for encrypt/decrypt log files"
enable_key_rotation = "true"
policy = templatefile("${path.module}/kms/key-policies/log-key.tpl", {
- log_archive_account_id = var.log_archive_account_id
+ log_archive_account_id = var.log_archive_account_id
+ data_aws_partition_current_partition = data.aws_partition.current.partition
})
}
diff --git a/modules/aft-feature-options/kms/key-policies/log-key.tpl b/modules/aft-feature-options/kms/key-policies/log-key.tpl
index 5edccce6..9803f717 100644
--- a/modules/aft-feature-options/kms/key-policies/log-key.tpl
+++ b/modules/aft-feature-options/kms/key-policies/log-key.tpl
@@ -24,7 +24,7 @@
"Sid": "Enable IAM User Permissions",
"Effect": "Allow",
"Principal": {
- "AWS": "arn:aws:iam::${log_archive_account_id}:root"
+ "AWS": "arn:${data_aws_partition_current_partition}:iam::${log_archive_account_id}:root"
},
"Action": "kms:*",
"Resource": "*"
diff --git a/modules/aft-feature-options/lambda.tf b/modules/aft-feature-options/lambda.tf
index a4ae63c8..81cbe345 100644
--- a/modules/aft-feature-options/lambda.tf
+++ b/modules/aft-feature-options/lambda.tf
@@ -2,26 +2,32 @@
# SPDX-License-Identifier: Apache-2.0
#
######## aft_delete_default_vpc ########
+#tfsec:ignore:aws-lambda-enable-tracing
resource "aws_lambda_function" "aft_delete_default_vpc" {
provider = aws.aft_management
filename = var.feature_options_archive_path
- function_name = "aft-delete-default-vpc"
+ function_name = var.delete_default_vpc_lambda_function_name
description = "Deletes default VPCs in all regions. Called from aft-features SFN."
role = aws_iam_role.aft_delete_default_vpc_lambda.arn
handler = "aft_delete_default_vpc.lambda_handler"
source_code_hash = var.feature_options_archive_hash
memory_size = 1024
- runtime = "python3.8"
+ runtime = var.lambda_runtime_python_version
timeout = "300"
layers = [var.aft_common_layer_arn]
- vpc_config {
- subnet_ids = var.aft_vpc_private_subnets
- security_group_ids = var.aft_vpc_default_sg
+ dynamic "vpc_config" {
+ for_each = var.aft_enable_vpc ? [1] : []
+
+ content {
+ subnet_ids = var.aft_vpc_private_subnets
+ security_group_ids = var.aft_vpc_default_sg
+ }
}
}
+#tfsec:ignore:aws-cloudwatch-log-group-customer-key
resource "aws_cloudwatch_log_group" "aft_delete_default_vpc" {
provider = aws.aft_management
name = "/aws/lambda/${aws_lambda_function.aft_delete_default_vpc.function_name}"
@@ -30,26 +36,32 @@ resource "aws_cloudwatch_log_group" "aft_delete_default_vpc" {
######## aft_enroll_support ########
+#tfsec:ignore:aws-lambda-enable-tracing
resource "aws_lambda_function" "aft_enroll_support" {
provider = aws.aft_management
filename = var.feature_options_archive_path
- function_name = "aft-enroll-support"
+ function_name = var.enroll_support_lambda_function_name
description = "Creates request to enroll an account in Enterprise support. Called from aft-features SFN."
role = aws_iam_role.aft_enroll_support.arn
handler = "aft_enroll_support.lambda_handler"
source_code_hash = var.feature_options_archive_hash
memory_size = 1024
- runtime = "python3.8"
+ runtime = var.lambda_runtime_python_version
timeout = "300"
layers = [var.aft_common_layer_arn]
- vpc_config {
- subnet_ids = var.aft_vpc_private_subnets
- security_group_ids = var.aft_vpc_default_sg
+ dynamic "vpc_config" {
+ for_each = var.aft_enable_vpc ? [1] : []
+
+ content {
+ subnet_ids = var.aft_vpc_private_subnets
+ security_group_ids = var.aft_vpc_default_sg
+ }
}
}
+#tfsec:ignore:aws-cloudwatch-log-group-customer-key
resource "aws_cloudwatch_log_group" "aft_enroll_support" {
provider = aws.aft_management
name = "/aws/lambda/${aws_lambda_function.aft_enroll_support.function_name}"
@@ -57,26 +69,32 @@ resource "aws_cloudwatch_log_group" "aft_enroll_support" {
}
######## aft_enable_cloudtrail ########
+#tfsec:ignore:aws-lambda-enable-tracing
resource "aws_lambda_function" "aft_enable_cloudtrail" {
provider = aws.aft_management
filename = var.feature_options_archive_path
- function_name = "aft-enable-cloudtrail"
+ function_name = var.enable_cloudtrail_lambda_function_name
description = "Creates an Org Trail to capture data events. Called from aft-features SFN."
role = aws_iam_role.aft_enable_cloudtrail.arn
handler = "aft_enable_cloudtrail.lambda_handler"
source_code_hash = var.feature_options_archive_hash
memory_size = 1024
- runtime = "python3.8"
+ runtime = var.lambda_runtime_python_version
timeout = "300"
layers = [var.aft_common_layer_arn]
- vpc_config {
- subnet_ids = var.aft_vpc_private_subnets
- security_group_ids = var.aft_vpc_default_sg
+ dynamic "vpc_config" {
+ for_each = var.aft_enable_vpc ? [1] : []
+
+ content {
+ subnet_ids = var.aft_vpc_private_subnets
+ security_group_ids = var.aft_vpc_default_sg
+ }
}
}
+#tfsec:ignore:aws-cloudwatch-log-group-customer-key
resource "aws_cloudwatch_log_group" "aft_enable_cloudtrail" {
provider = aws.aft_management
name = "/aws/lambda/${aws_lambda_function.aft_enable_cloudtrail.function_name}"
diff --git a/modules/aft-feature-options/outputs.tf b/modules/aft-feature-options/outputs.tf
index 7e497347..3eb28351 100644
--- a/modules/aft-feature-options/outputs.tf
+++ b/modules/aft-feature-options/outputs.tf
@@ -8,7 +8,7 @@ output "aws_aft_access_logs_s3_bucket_id" {
output "aws_aft_access_logs_s3_bucket_arn" {
value = aws_s3_bucket.aft_access_logs.arn
- description = "The ARN of the bucket. Will be of format arn:aws:s3:::bucketname."
+ description = "The ARN of the bucket. Will be of format arn::s3:::bucketname."
}
output "aws_aft_access_logs_s3_bucket_region" {
@@ -23,7 +23,7 @@ output "aws_aft_logs_s3_bucket_id" {
output "aws_aft_logs_s3_bucket_arn" {
value = aws_s3_bucket.aft_logging_bucket.arn
- description = "The ARN of the bucket. Will be of format arn:aws:s3:::bucketname."
+ description = "The ARN of the bucket. Will be of format arn::s3:::bucketname."
}
output "aws_aft_logs_s3_bucket_region" {
diff --git a/modules/aft-feature-options/s3.tf b/modules/aft-feature-options/s3.tf
index b8748fad..e2d113a6 100644
--- a/modules/aft-feature-options/s3.tf
+++ b/modules/aft-feature-options/s3.tf
@@ -8,32 +8,47 @@
resource "aws_s3_bucket" "aft_logging_bucket" {
provider = aws.log_archive
bucket = "${var.log_archive_bucket_name}-${var.log_archive_account_id}-${data.aws_region.current.name}"
+}
- server_side_encryption_configuration {
- rule {
- apply_server_side_encryption_by_default {
- kms_master_key_id = aws_kms_key.aft_log_key.arn
- sse_algorithm = "aws:kms"
- }
- }
+resource "aws_s3_bucket_logging" "aft_logging_bucket_logging" {
+ provider = aws.log_archive
+ bucket = aws_s3_bucket.aft_logging_bucket.id
+ target_bucket = aws_s3_bucket.aft_access_logs.id
+ target_prefix = "log/"
+}
+
+resource "aws_s3_bucket_versioning" "aft_logging_bucket_versioning" {
+ provider = aws.log_archive
+ bucket = aws_s3_bucket.aft_logging_bucket.id
+ versioning_configuration {
+ status = "Enabled"
}
+}
- versioning {
- enabled = true
+resource "aws_s3_bucket_server_side_encryption_configuration" "aft_logging_bucket_encryption" {
+ provider = aws.log_archive
+ bucket = aws_s3_bucket.aft_logging_bucket.id
+
+ rule {
+ apply_server_side_encryption_by_default {
+ kms_master_key_id = aws_kms_key.aft_log_key.arn
+ sse_algorithm = "aws:kms"
+ }
}
+}
- lifecycle_rule {
- enabled = true
+resource "aws_s3_bucket_lifecycle_configuration" "aft_logging_bucket_lifecycle_configuration" {
+ provider = aws.log_archive
+ bucket = aws_s3_bucket.aft_logging_bucket.id
+ rule {
+ status = "Enabled"
+ id = "aft_logging_bucket_lifecycle_configuration_rule"
noncurrent_version_expiration {
- days = var.log_archive_bucket_object_expiration_days
+ noncurrent_days = var.log_archive_bucket_object_expiration_days
}
}
- logging {
- target_bucket = aws_s3_bucket.aft_access_logs.id
- target_prefix = "log/"
- }
}
resource "aws_s3_bucket_policy" "aft_logging_bucket" {
@@ -54,34 +69,58 @@ resource "aws_s3_bucket_public_access_block" "aft_logging_bucket" {
restrict_public_buckets = true
}
-
+#tfsec:ignore:aws-s3-enable-bucket-logging
resource "aws_s3_bucket" "aft_access_logs" {
provider = aws.log_archive
bucket = "${var.log_archive_access_logs_bucket_name}-${var.log_archive_account_id}-${data.aws_region.current.name}"
- acl = "log-delivery-write"
+}
+
+resource "aws_s3_bucket_policy" "aft_access_logs" {
+ provider = aws.log_archive
+ bucket = aws_s3_bucket.aft_access_logs.id
+ policy = templatefile("${path.module}/s3/bucket-policies/aft_access_logs.tpl", {
+ aws_s3_bucket_aft_access_logs_arn = aws_s3_bucket.aft_access_logs.arn
+ aws_s3_bucket_aft_logging_bucket_arn = aws_s3_bucket.aft_logging_bucket.arn
+ log_archive_account_id = var.log_archive_account_id
+ })
+}
- versioning {
- enabled = true
+resource "aws_s3_bucket_versioning" "aft_access_logs_versioning" {
+ provider = aws.log_archive
+ bucket = aws_s3_bucket.aft_access_logs.id
+ versioning_configuration {
+ status = "Enabled"
}
+}
- server_side_encryption_configuration {
- rule {
- apply_server_side_encryption_by_default {
- sse_algorithm = "AES256"
- }
+#tfsec:ignore:aws-s3-encryption-customer-key
+resource "aws_s3_bucket_server_side_encryption_configuration" "aft_access_logs_encryption" {
+ provider = aws.log_archive
+ bucket = aws_s3_bucket.aft_access_logs.id
+
+ rule {
+ apply_server_side_encryption_by_default {
+ sse_algorithm = "AES256"
}
}
+}
- lifecycle_rule {
- enabled = true
- prefix = true
+resource "aws_s3_bucket_lifecycle_configuration" "aft_access_logs_lifecycle_configuration" {
+ provider = aws.log_archive
+ bucket = aws_s3_bucket.aft_access_logs.id
+ rule {
+ status = "Enabled"
+ filter {
+ prefix = "log/"
+ }
+ id = "aft_access_logs_lifecycle_configuration_rule"
noncurrent_version_expiration {
- days = var.log_archive_bucket_object_expiration_days
+ noncurrent_days = var.log_archive_bucket_object_expiration_days
}
}
-}
+}
resource "aws_s3_bucket_public_access_block" "aft_access_logs" {
provider = aws.log_archive
diff --git a/modules/aft-feature-options/s3/bucket-policies/aft_access_logs.tpl b/modules/aft-feature-options/s3/bucket-policies/aft_access_logs.tpl
new file mode 100644
index 00000000..9628a0be
--- /dev/null
+++ b/modules/aft-feature-options/s3/bucket-policies/aft_access_logs.tpl
@@ -0,0 +1,26 @@
+{
+ "Version": "2012-10-17",
+ "Statement": [
+ {
+ "Sid": "Allow PutObject",
+ "Effect": "Allow",
+ "Principal": {
+ "Service": [
+ "logging.s3.amazonaws.com"
+ ]
+ },
+ "Action": "s3:PutObject",
+ "Resource": [
+ "${aws_s3_bucket_aft_access_logs_arn}/*"
+ ],
+ "Condition": {
+ "ArnLike": {
+ "aws:SourceArn": "${aws_s3_bucket_aft_logging_bucket_arn}"
+ },
+ "StringEquals": {
+ "aws:SourceAccount": "${log_archive_account_id}"
+ }
+ }
+ }
+ ]
+}
diff --git a/modules/aft-feature-options/states.tf b/modules/aft-feature-options/states.tf
index 6efe32bb..748899b8 100644
--- a/modules/aft-feature-options/states.tf
+++ b/modules/aft-feature-options/states.tf
@@ -4,6 +4,7 @@
locals {
state_machine_source = "${path.module}/states/aft_features.asl.json"
replacements_map = {
+ current_partition = data.aws_partition.current.partition
aft_delete_default_vpc_function_arn = aws_lambda_function.aft_delete_default_vpc.arn
aft_enroll_support_function_arn = aws_lambda_function.aft_enroll_support.arn
aft_enable_cloudtrail_function_arn = aws_lambda_function.aft_enable_cloudtrail.arn
diff --git a/modules/aft-feature-options/states/aft_features.asl.json b/modules/aft-feature-options/states/aft_features.asl.json
index 73d9b97e..2dfbe57d 100644
--- a/modules/aft-feature-options/states/aft_features.asl.json
+++ b/modules/aft-feature-options/states/aft_features.asl.json
@@ -39,7 +39,7 @@
},
"Notify Success": {
"Type": "Task",
- "Resource": "arn:aws:states:::sns:publish",
+ "Resource": "arn:${current_partition}:states:::sns:publish",
"Parameters": {
"TopicArn": "${aft_notification_arn}",
"Message.$": "$"
@@ -48,7 +48,7 @@
},
"Notify Failure": {
"Type": "Task",
- "Resource": "arn:aws:states:::sns:publish",
+ "Resource": "arn:${current_partition}:states:::sns:publish",
"Parameters": {
"TopicArn": "${aft_failure_notification_arn}",
"Message.$": "$.Cause"
diff --git a/modules/aft-feature-options/variables.tf b/modules/aft-feature-options/variables.tf
index 188ad30a..8e25f2b4 100644
--- a/modules/aft-feature-options/variables.tf
+++ b/modules/aft-feature-options/variables.tf
@@ -2,11 +2,13 @@
# SPDX-License-Identifier: Apache-2.0
#
variable "aft_vpc_private_subnets" {
- type = list(string)
+ type = list(string)
+ default = null
}
variable "aft_vpc_default_sg" {
- type = list(string)
+ type = list(string)
+ default = null
}
variable "aft_common_layer_arn" {
@@ -21,6 +23,10 @@ variable "aft_kms_key_arn" {
type = string
}
+variable "aft_kms_key_id" {
+ type = string
+}
+
variable "aft_sns_topic_arn" {
type = string
}
@@ -38,7 +44,7 @@ variable "log_archive_access_logs_bucket_name" {
}
variable "log_archive_bucket_object_expiration_days" {
- type = string
+ type = number
}
variable "log_archive_account_id" {
@@ -55,3 +61,24 @@ variable "feature_options_archive_path" {
variable "feature_options_archive_hash" {
type = string
}
+
+variable "delete_default_vpc_lambda_function_name" {
+ type = string
+}
+
+variable "enroll_support_lambda_function_name" {
+ type = string
+}
+
+variable "enable_cloudtrail_lambda_function_name" {
+ type = string
+}
+
+variable "lambda_runtime_python_version" {
+ type = string
+}
+
+variable "aft_enable_vpc" {
+ type = bool
+
+}
diff --git a/modules/aft-feature-options/versions.tf b/modules/aft-feature-options/versions.tf
index 49e3f390..5cc7b4e8 100755
--- a/modules/aft-feature-options/versions.tf
+++ b/modules/aft-feature-options/versions.tf
@@ -2,12 +2,12 @@
# SPDX-License-Identifier: Apache-2.0
#
terraform {
- required_version = ">= 0.15.1"
+ required_version = ">= 1.0.0"
required_providers {
aws = {
source = "hashicorp/aws"
- version = ">= 3.72, < 4.0.0"
+ version = ">= 4.27.0"
configuration_aliases = [aws.ct_management, aws.log_archive, aws.audit, aws.aft_management]
}
}
diff --git a/modules/aft-iam-roles/admin-role/data.tf b/modules/aft-iam-roles/admin-role/data.tf
new file mode 100644
index 00000000..a4416e60
--- /dev/null
+++ b/modules/aft-iam-roles/admin-role/data.tf
@@ -0,0 +1,5 @@
+# Copyright Amazon.com, Inc. or its affiliates. All rights reserved.
+# SPDX-License-Identifier: Apache-2.0
+#
+
+data "aws_partition" "current" {}
diff --git a/modules/aft-iam-roles/admin-role/iam.tf b/modules/aft-iam-roles/admin-role/iam.tf
index d2e07165..0c20d09b 100644
--- a/modules/aft-iam-roles/admin-role/iam.tf
+++ b/modules/aft-iam-roles/admin-role/iam.tf
@@ -11,21 +11,27 @@ variable "role_name" {
variable "trusted_entity" {
+}
+variable "aft_admin_session_arn" {
+
}
resource "aws_iam_role" "role" {
name = var.role_name
- # Terraform's "jsonencode" function converts a
- # Terraform expression result to valid JSON syntax.
assume_role_policy = templatefile("${path.module}/trust_policy.tpl",
{
- trusted_entity_type = var.trusted_entity_type
- trusted_entity = var.trusted_entity
+ trusted_entity_type = var.trusted_entity_type
+ trusted_entity = var.trusted_entity
+ aft_admin_assumed_role_arn = var.aft_admin_session_arn
+
}
)
+}
- managed_policy_arns = ["arn:aws:iam::aws:policy/AdministratorAccess"]
+resource "aws_iam_role_policy_attachment" "administrator-access-attachment" {
+ role = aws_iam_role.role.name
+ policy_arn = "arn:${data.aws_partition.current.partition}:iam::aws:policy/AdministratorAccess"
}
output "arn" {
diff --git a/modules/aft-iam-roles/admin-role/main.tf b/modules/aft-iam-roles/admin-role/main.tf
index 038cde42..411a5cc2 100644
--- a/modules/aft-iam-roles/admin-role/main.tf
+++ b/modules/aft-iam-roles/admin-role/main.tf
@@ -5,7 +5,7 @@ terraform {
required_providers {
aws = {
source = "hashicorp/aws"
- version = ">= 2.7.0"
+ version = ">= 5.11.0, < 6.0.0"
}
}
}
diff --git a/modules/aft-iam-roles/admin-role/trust_policy.tpl b/modules/aft-iam-roles/admin-role/trust_policy.tpl
index 24c46e8f..89b456c6 100644
--- a/modules/aft-iam-roles/admin-role/trust_policy.tpl
+++ b/modules/aft-iam-roles/admin-role/trust_policy.tpl
@@ -4,7 +4,10 @@
{
"Effect": "Allow",
"Principal": {
- "${trusted_entity_type}": ["${trusted_entity}"]
+ "${trusted_entity_type}": [
+ "${trusted_entity}",
+ "${aft_admin_assumed_role_arn}"
+ ]
},
"Action": "sts:AssumeRole"
}
diff --git a/modules/aft-iam-roles/data.tf b/modules/aft-iam-roles/data.tf
new file mode 100644
index 00000000..f6f8c0f7
--- /dev/null
+++ b/modules/aft-iam-roles/data.tf
@@ -0,0 +1,8 @@
+# Copyright Amazon.com, Inc. or its affiliates. All rights reserved.
+# SPDX-License-Identifier: Apache-2.0
+#
+data "aws_caller_identity" "aft_management" {
+ provider = aws.aft_management
+}
+
+data "aws_partition" "current" {}
diff --git a/modules/aft-iam-roles/iam.tf b/modules/aft-iam-roles/iam.tf
index 3bd59943..da119073 100644
--- a/modules/aft-iam-roles/iam.tf
+++ b/modules/aft-iam-roles/iam.tf
@@ -1,18 +1,14 @@
# Copyright Amazon.com, Inc. or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
#
-data "aws_caller_identity" "aft_management" {
- provider = aws.aft_management
-}
resource "aws_iam_role" "aft_admin_role" {
provider = aws.aft_management
name = "AWSAFTAdmin"
- assume_role_policy = templatefile("${path.module}/iam/aft_admin_role_trust_policy.tpl",
- {
- aft_account_id = data.aws_caller_identity.aft_management.account_id
- }
- )
+ assume_role_policy = templatefile("${path.module}/iam/aft_admin_role_trust_policy.tpl", {
+ aft_account_id = data.aws_caller_identity.aft_management.account_id
+ data_aws_partition_current_partition = data.aws_partition.current.partition
+ })
}
resource "aws_iam_role_policy" "aft_admin_role" {
@@ -20,7 +16,9 @@ resource "aws_iam_role_policy" "aft_admin_role" {
name = "aft_admin_role_policy"
role = aws_iam_role.aft_admin_role.id
- policy = file("${path.module}/iam/aft_admin_role_policy.tpl")
+ policy = templatefile("${path.module}/iam/aft_admin_role_policy.tpl", {
+ data_aws_partition_current_partition = data.aws_partition.current.partition
+ })
}
module "ct_management_exec_role" {
@@ -28,7 +26,9 @@ module "ct_management_exec_role" {
providers = {
aws = aws.ct_management
}
- trusted_entity = aws_iam_role.aft_admin_role.arn
+ trusted_entity = aws_iam_role.aft_admin_role.arn
+ aft_admin_session_arn = local.aft_admin_assumed_role_arn
+
}
module "log_archive_exec_role" {
@@ -36,7 +36,9 @@ module "log_archive_exec_role" {
providers = {
aws = aws.log_archive
}
- trusted_entity = aws_iam_role.aft_admin_role.arn
+ trusted_entity = aws_iam_role.aft_admin_role.arn
+ aft_admin_session_arn = local.aft_admin_assumed_role_arn
+
}
module "audit_exec_role" {
@@ -44,7 +46,9 @@ module "audit_exec_role" {
providers = {
aws = aws.audit
}
- trusted_entity = aws_iam_role.aft_admin_role.arn
+ trusted_entity = aws_iam_role.aft_admin_role.arn
+ aft_admin_session_arn = local.aft_admin_assumed_role_arn
+
}
module "aft_exec_role" {
@@ -52,5 +56,47 @@ module "aft_exec_role" {
providers = {
aws = aws.aft_management
}
- trusted_entity = aws_iam_role.aft_admin_role.arn
+ trusted_entity = aws_iam_role.aft_admin_role.arn
+ aft_admin_session_arn = local.aft_admin_assumed_role_arn
+
+}
+
+
+module "ct_management_service_role" {
+ source = "./service-role"
+ providers = {
+ aws = aws.ct_management
+ }
+ trusted_entity = aws_iam_role.aft_admin_role.arn
+ aft_admin_session_arn = local.aft_admin_assumed_role_arn
+
+}
+
+module "log_archive_service_role" {
+ source = "./service-role"
+ providers = {
+ aws = aws.log_archive
+ }
+ trusted_entity = aws_iam_role.aft_admin_role.arn
+ aft_admin_session_arn = local.aft_admin_assumed_role_arn
+}
+
+module "audit_service_role" {
+ source = "./service-role"
+ providers = {
+ aws = aws.audit
+ }
+ trusted_entity = aws_iam_role.aft_admin_role.arn
+ aft_admin_session_arn = local.aft_admin_assumed_role_arn
+
+}
+
+module "aft_service_role" {
+ source = "./service-role"
+ providers = {
+ aws = aws.aft_management
+ }
+ trusted_entity = aws_iam_role.aft_admin_role.arn
+ aft_admin_session_arn = local.aft_admin_assumed_role_arn
+
}
diff --git a/modules/aft-iam-roles/iam/aft_admin_role_policy.tpl b/modules/aft-iam-roles/iam/aft_admin_role_policy.tpl
index 5ed14533..5bf6091d 100644
--- a/modules/aft-iam-roles/iam/aft_admin_role_policy.tpl
+++ b/modules/aft-iam-roles/iam/aft_admin_role_policy.tpl
@@ -4,7 +4,10 @@
{
"Effect": "Allow",
"Action": "sts:AssumeRole",
- "Resource": "arn:aws:iam::*:role/AWSAFTExecution"
+ "Resource": [
+ "arn:${data_aws_partition_current_partition}:iam::*:role/AWSAFTExecution",
+ "arn:${data_aws_partition_current_partition}:iam::*:role/AWSAFTService"
+ ]
}
]
}
diff --git a/modules/aft-iam-roles/iam/aft_admin_role_trust_policy.tpl b/modules/aft-iam-roles/iam/aft_admin_role_trust_policy.tpl
index e068bc65..b6f34d9b 100644
--- a/modules/aft-iam-roles/iam/aft_admin_role_trust_policy.tpl
+++ b/modules/aft-iam-roles/iam/aft_admin_role_trust_policy.tpl
@@ -4,7 +4,7 @@
{
"Effect": "Allow",
"Principal": {
- "AWS": "arn:aws:iam::${aft_account_id}:root"
+ "AWS": "arn:${data_aws_partition_current_partition}:iam::${aft_account_id}:root"
},
"Action": "sts:AssumeRole"
}
diff --git a/modules/aft-iam-roles/locals.tf b/modules/aft-iam-roles/locals.tf
new file mode 100644
index 00000000..aacd26df
--- /dev/null
+++ b/modules/aft-iam-roles/locals.tf
@@ -0,0 +1,6 @@
+# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+# SPDX-License-Identifier: Apache-2.0
+
+locals {
+ aft_admin_assumed_role_arn = "arn:${data.aws_partition.current.partition}:sts::${data.aws_caller_identity.aft_management.account_id}:assumed-role/AWSAFTAdmin/AWSAFT-Session"
+}
diff --git a/modules/aft-iam-roles/service-role/data.tf b/modules/aft-iam-roles/service-role/data.tf
new file mode 100644
index 00000000..a4416e60
--- /dev/null
+++ b/modules/aft-iam-roles/service-role/data.tf
@@ -0,0 +1,5 @@
+# Copyright Amazon.com, Inc. or its affiliates. All rights reserved.
+# SPDX-License-Identifier: Apache-2.0
+#
+
+data "aws_partition" "current" {}
diff --git a/modules/aft-iam-roles/service-role/main.tf b/modules/aft-iam-roles/service-role/main.tf
new file mode 100644
index 00000000..35cf06d9
--- /dev/null
+++ b/modules/aft-iam-roles/service-role/main.tf
@@ -0,0 +1,48 @@
+# Copyright Amazon.com, Inc. or its affiliates. All rights reserved.
+# SPDX-License-Identifier: Apache-2.0
+#
+
+terraform {
+ required_providers {
+ aws = {
+ source = "hashicorp/aws"
+ version = ">= 5.11.0, < 6.0.0"
+ }
+ }
+}
+variable "trusted_entity_type" {
+ default = "AWS"
+}
+
+variable "role_name" {
+ default = "AWSAFTService"
+}
+
+variable "trusted_entity" {
+
+}
+variable "aft_admin_session_arn" {
+
+}
+
+resource "aws_iam_role" "role" {
+ name = var.role_name
+
+ assume_role_policy = templatefile("${path.module}/trust_policy.tpl",
+ {
+ trusted_entity_type = var.trusted_entity_type
+ trusted_entity = var.trusted_entity
+ aft_admin_assumed_role_arn = var.aft_admin_session_arn
+
+ }
+ )
+}
+
+resource "aws_iam_role_policy_attachment" "administrator-access-attachment" {
+ role = aws_iam_role.role.name
+ policy_arn = "arn:${data.aws_partition.current.partition}:iam::aws:policy/AdministratorAccess"
+}
+
+output "arn" {
+ value = aws_iam_role.role.arn
+}
diff --git a/modules/aft-iam-roles/service-role/trust_policy.tpl b/modules/aft-iam-roles/service-role/trust_policy.tpl
new file mode 100644
index 00000000..89b456c6
--- /dev/null
+++ b/modules/aft-iam-roles/service-role/trust_policy.tpl
@@ -0,0 +1,15 @@
+{
+ "Version": "2012-10-17",
+ "Statement": [
+ {
+ "Effect": "Allow",
+ "Principal": {
+ "${trusted_entity_type}": [
+ "${trusted_entity}",
+ "${aft_admin_assumed_role_arn}"
+ ]
+ },
+ "Action": "sts:AssumeRole"
+ }
+ ]
+}
diff --git a/modules/aft-iam-roles/versions.tf b/modules/aft-iam-roles/versions.tf
index 49e3f390..5cc7b4e8 100755
--- a/modules/aft-iam-roles/versions.tf
+++ b/modules/aft-iam-roles/versions.tf
@@ -2,12 +2,12 @@
# SPDX-License-Identifier: Apache-2.0
#
terraform {
- required_version = ">= 0.15.1"
+ required_version = ">= 1.0.0"
required_providers {
aws = {
source = "hashicorp/aws"
- version = ">= 3.72, < 4.0.0"
+ version = ">= 4.27.0"
configuration_aliases = [aws.ct_management, aws.log_archive, aws.audit, aws.aft_management]
}
}
diff --git a/modules/aft-lambda-layer/buildspecs/aft-lambda-layer.yml b/modules/aft-lambda-layer/buildspecs/aft-lambda-layer.yml
index 8d6be017..118d53d8 100644
--- a/modules/aft-lambda-layer/buildspecs/aft-lambda-layer.yml
+++ b/modules/aft-lambda-layer/buildspecs/aft-lambda-layer.yml
@@ -14,7 +14,7 @@ phases:
# URL Without Access ID
- URL=$(echo "$AWS_MODULE_SOURCE" | awk '{split($0,a,"@"); print a[2]}')
- |
- ssh_key_parameter=$(aws ssm get-parameter --name /aft/config/aft-ssh-key --with-decryption || echo "None")
+ ssh_key_parameter=$(aws ssm get-parameter --name /aft/config/aft-ssh-key --with-decryption 2> /dev/null || echo "None")
if [[ $ssh_key_parameter != "None" ]]; then
ssh_key=$(jq --raw-output ".Parameter.Value" <<< $ssh_key_parameter)
mkdir -p ~/.ssh
@@ -34,6 +34,7 @@ phases:
- python3 -m pip install virtualenv
- python3 -m venv .venv
- . .venv/bin/activate
+ - python3 -m pip install --upgrade 'setuptools>=70.0.0'
- python3 -m pip install ./aws-aft-core-framework/sources/aft-lambda-layer
build:
commands:
diff --git a/modules/aft-lambda-layer/codebuild.tf b/modules/aft-lambda-layer/codebuild.tf
index 0b436b47..22432423 100644
--- a/modules/aft-lambda-layer/codebuild.tf
+++ b/modules/aft-lambda-layer/codebuild.tf
@@ -1,6 +1,13 @@
# Copyright Amazon.com, Inc. or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
#
+
+#tfsec:ignore:aws-cloudwatch-log-group-customer-key
+resource "aws_cloudwatch_log_group" "codebuild_loggroup" {
+ name = "/aws/codebuild/${local.common_name}"
+ retention_in_days = var.cloudwatch_log_group_retention
+}
+
resource "aws_codebuild_project" "codebuild" {
name = local.common_name
description = "Codebuild project to create lambda layer ${var.lambda_layer_name}"
@@ -14,7 +21,7 @@ resource "aws_codebuild_project" "codebuild" {
environment {
compute_type = "BUILD_GENERAL1_MEDIUM"
- image = "aws/codebuild/amazonlinux2-x86_64-standard:3.0"
+ image = "aws/codebuild/amazonlinux2-x86_64-standard:5.0"
type = "LINUX_CONTAINER"
image_pull_credentials_type = "CODEBUILD"
@@ -22,22 +29,10 @@ resource "aws_codebuild_project" "codebuild" {
name = "PYTHON_VERSION"
value = var.lambda_layer_python_version
}
- environment_variable {
- name = "LAYER_NAME"
- value = var.lambda_layer_name
- }
environment_variable {
name = "BUCKET_NAME"
value = var.s3_bucket_name
}
- environment_variable {
- name = "EVENT_RULE_NAME"
- value = "${local.common_name}-${replace(var.aft_version, ".", "-")}"
- }
- environment_variable {
- name = "EVENT_TARGET_ID"
- value = local.target_id
- }
environment_variable {
name = "SSM_AWS_MODULE_SOURCE"
value = var.aft_tf_aws_customizations_module_url_ssm_path
@@ -48,11 +43,12 @@ resource "aws_codebuild_project" "codebuild" {
value = var.aft_tf_aws_customizations_module_git_ref_ssm_path
type = "PLAINTEXT"
}
+
}
logs_config {
cloudwatch_logs {
- group_name = local.common_name
+ group_name = aws_cloudwatch_log_group.codebuild_loggroup.name
stream_name = "build-logs"
}
@@ -67,10 +63,17 @@ resource "aws_codebuild_project" "codebuild" {
buildspec = data.local_file.aft_lambda_layer.content
}
- vpc_config {
- vpc_id = var.aft_vpc_id
- subnets = var.aft_vpc_private_subnets
- security_group_ids = var.aft_vpc_default_sg
+ dynamic "vpc_config" {
+ for_each = var.aft_enable_vpc ? [1] : []
+ content {
+ vpc_id = var.aft_vpc_id
+ subnets = var.aft_vpc_private_subnets
+ security_group_ids = var.aft_vpc_default_sg
+ }
+ }
+
+ lifecycle {
+ ignore_changes = [project_visibility]
}
}
diff --git a/modules/aft-lambda-layer/data.tf b/modules/aft-lambda-layer/data.tf
index f6a952ef..1a879801 100644
--- a/modules/aft-lambda-layer/data.tf
+++ b/modules/aft-lambda-layer/data.tf
@@ -1,6 +1,8 @@
# Copyright Amazon.com, Inc. or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
#
+data "aws_partition" "current" {}
+
data "aws_caller_identity" "session" {}
data "local_file" "aft_lambda_layer" {
diff --git a/modules/aft-lambda-layer/iam.tf b/modules/aft-lambda-layer/iam.tf
index 41243fa3..c7778b42 100644
--- a/modules/aft-lambda-layer/iam.tf
+++ b/modules/aft-lambda-layer/iam.tf
@@ -6,34 +6,35 @@ resource "aws_iam_role" "codebuild" {
assume_role_policy = file("${path.module}/iam/trust-policies/codebuild.tpl")
}
-resource "aws_iam_role" "codebuild_invoker_lambda_role" {
- name = "codebuild_invoker_role"
+resource "aws_iam_role" "codebuild_trigger_lambda_role" {
+ name = "codebuild_trigger_role"
assume_role_policy = file("${path.module}/iam/trust-policies/lambda.tpl")
}
resource "aws_iam_role_policy" "codebuild" {
role = aws_iam_role.codebuild.name
policy = templatefile("${path.module}/iam/role-policies/codebuild.tpl", {
+ "data_aws_partition_current_partition" = data.aws_partition.current.partition
"aws_region" = var.aws_region
"account_id" = local.account_id
"layer_name" = var.lambda_layer_name
"s3_bucket_name" = var.s3_bucket_name
- "cloudwatch_event_name" = local.common_name
"data_aws_kms_alias_aft_key_target_key_arn" = var.aft_kms_key_arn
})
}
-resource "aws_iam_role_policy" "codebuild_invoker_policy" {
- role = aws_iam_role.codebuild_invoker_lambda_role.name
- policy = templatefile("${path.module}/iam/role-policies/codebuild-invoker.tpl", {
- "aws_region" = var.aws_region
- "account_id" = local.account_id
- "codebuild_project_name" = aws_codebuild_project.codebuild.name
- "codebuild_invoker_function_name" = local.codebuild_invoker_function_name
+resource "aws_iam_role_policy" "codebuild_trigger_policy" {
+ role = aws_iam_role.codebuild_trigger_lambda_role.name
+ policy = templatefile("${path.module}/iam/role-policies/codebuild-trigger.tpl", {
+ "data_aws_partition_current_partition" = data.aws_partition.current.partition
+ "aws_region" = var.aws_region
+ "account_id" = local.account_id
+ "codebuild_project_name" = aws_codebuild_project.codebuild.name
+ "codebuild_trigger_function_name" = local.codebuild_trigger_function_name
})
}
-resource "aws_iam_role_policy_attachment" "codebuild_invoker_VPC_access" {
- role = aws_iam_role.codebuild_invoker_lambda_role.name
- policy_arn = "arn:aws:iam::aws:policy/service-role/AWSLambdaVPCAccessExecutionRole"
+resource "aws_iam_role_policy_attachment" "codebuild_trigger_VPC_access" {
+ role = aws_iam_role.codebuild_trigger_lambda_role.name
+ policy_arn = "arn:${data.aws_partition.current.partition}:iam::aws:policy/service-role/AWSLambdaVPCAccessExecutionRole"
}
diff --git a/modules/aft-lambda-layer/iam/role-policies/codebuild-invoker.tpl b/modules/aft-lambda-layer/iam/role-policies/codebuild-trigger.tpl
similarity index 51%
rename from modules/aft-lambda-layer/iam/role-policies/codebuild-invoker.tpl
rename to modules/aft-lambda-layer/iam/role-policies/codebuild-trigger.tpl
index 04f4c789..a44254f2 100644
--- a/modules/aft-lambda-layer/iam/role-policies/codebuild-invoker.tpl
+++ b/modules/aft-lambda-layer/iam/role-policies/codebuild-trigger.tpl
@@ -7,14 +7,14 @@
"codebuild:StartBuild"
],
"Effect": "Allow",
- "Resource": "arn:aws:codebuild:${aws_region}:${account_id}:project/${codebuild_project_name}"
+ "Resource": "arn:${data_aws_partition_current_partition}:codebuild:${aws_region}:${account_id}:project/${codebuild_project_name}"
},
{
"Action": [
"logs:CreateLogGroup"
],
"Effect": "Allow",
- "Resource": "arn:aws:logs:${aws_region}:${account_id}:*"
+ "Resource": "arn:${data_aws_partition_current_partition}:logs:${aws_region}:${account_id}:*"
},
{
"Action": [
@@ -22,7 +22,7 @@
"logs:PutLogEvents"
],
"Effect": "Allow",
- "Resource": "arn:aws:logs:${aws_region}:${account_id}:log-group:/aws/lambda/${codebuild_invoker_function_name}:*"
+ "Resource": "arn:${data_aws_partition_current_partition}:logs:${aws_region}:${account_id}:log-group:/aws/lambda/${codebuild_trigger_function_name}:*"
}
]
}
diff --git a/modules/aft-lambda-layer/iam/role-policies/codebuild.tpl b/modules/aft-lambda-layer/iam/role-policies/codebuild.tpl
index c3428e38..8e6b7d92 100644
--- a/modules/aft-lambda-layer/iam/role-policies/codebuild.tpl
+++ b/modules/aft-lambda-layer/iam/role-policies/codebuild.tpl
@@ -8,7 +8,7 @@
"lambda:PublishLayerVersion"
],
"Effect": "Allow",
- "Resource": "arn:aws:lambda:${aws_region}:${account_id}:layer:${layer_name}:*"
+ "Resource": "arn:${data_aws_partition_current_partition}:lambda:${aws_region}:${account_id}:layer:${layer_name}:*"
},
{
"Effect": "Allow",
@@ -43,8 +43,8 @@
"s3:*"
],
"Resource": [
- "arn:aws:s3:::${s3_bucket_name}",
- "arn:aws:s3:::${s3_bucket_name}/*"
+ "arn:${data_aws_partition_current_partition}:s3:::${s3_bucket_name}",
+ "arn:${data_aws_partition_current_partition}:s3:::${s3_bucket_name}/*"
]
},
{
@@ -66,7 +66,7 @@
"ec2:CreateNetworkInterfacePermission"
],
"Resource": [
- "arn:aws:ec2:${aws_region}:${account_id}:network-interface/*"
+ "arn:${data_aws_partition_current_partition}:ec2:${aws_region}:${account_id}:network-interface/*"
]
},
{
@@ -76,7 +76,7 @@
"ssm:GetParameter"
],
"Resource": [
- "arn:aws:ssm:${aws_region}:${account_id}:parameter/aft/*"
+ "arn:${data_aws_partition_current_partition}:ssm:${aws_region}:${account_id}:parameter/aft/*"
]
},
{
diff --git a/modules/aft-lambda-layer/lambda.tf b/modules/aft-lambda-layer/lambda.tf
index 9c92d378..e5f3c8ef 100644
--- a/modules/aft-lambda-layer/lambda.tf
+++ b/modules/aft-lambda-layer/lambda.tf
@@ -1,25 +1,35 @@
# Copyright Amazon.com, Inc. or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
#
-resource "aws_lambda_function" "codebuild_invoker" {
+
+#tfsec:ignore:aws-lambda-enable-tracing
+resource "aws_lambda_function" "codebuild_trigger" {
filename = var.builder_archive_path
- function_name = local.codebuild_invoker_function_name
- description = "AFT Lambda Layer - CodeBuild Invoker"
- role = aws_iam_role.codebuild_invoker_lambda_role.arn
- handler = "codebuild_invoker.lambda_handler"
+ function_name = local.codebuild_trigger_function_name
+ description = "AFT Lambda Layer - CodeBuild Trigger"
+ role = aws_iam_role.codebuild_trigger_lambda_role.arn
+ handler = "codebuild_trigger.lambda_handler"
source_code_hash = var.builder_archive_hash
memory_size = 1024
- runtime = "python3.8"
+ runtime = var.lambda_runtime_python_version
timeout = 900
-
- vpc_config {
- subnet_ids = var.aft_vpc_private_subnets
- security_group_ids = var.aft_vpc_default_sg
+ dynamic "vpc_config" {
+ for_each = var.aft_enable_vpc ? [1] : []
+ content {
+ subnet_ids = var.aft_vpc_private_subnets
+ security_group_ids = var.aft_vpc_default_sg
+ }
}
}
-data "aws_lambda_invocation" "invoke_codebuild_job" {
- function_name = aws_lambda_function.codebuild_invoker.function_name
+#tfsec:ignore:aws-cloudwatch-log-group-customer-key
+resource "aws_cloudwatch_log_group" "codebuild_trigger_loggroup" {
+ name = "/aws/lambda/${aws_lambda_function.codebuild_trigger.function_name}"
+ retention_in_days = var.cloudwatch_log_group_retention
+}
+
+data "aws_lambda_invocation" "trigger_codebuild_job" {
+ function_name = aws_lambda_function.codebuild_trigger.function_name
input = < None:
- self.ct_management_session = ct_management_session
- self.account_name = account_name
-
- @property
- def provisioned_product(self) -> Optional[ProvisionedProductDetailTypeDef]:
- client: ServiceCatalogClient = self.ct_management_session.client(
- "servicecatalog"
- )
- try:
- response: DescribeProvisionedProductOutputTypeDef = (
- client.describe_provisioned_product(Name=self.account_name)
- )
- return response["ProvisionedProductDetail"]
- except client.exceptions.ResourceNotFoundException:
- logger.debug(f"Account with name {self.account_name} does not exists")
- return None
diff --git a/sources/aft-lambda-layer/aft_common/account_provisioning_framework.py b/sources/aft-lambda-layer/aft_common/account_provisioning_framework.py
index 949ffe9b..49c70c91 100644
--- a/sources/aft-lambda-layer/aft_common/account_provisioning_framework.py
+++ b/sources/aft-lambda-layer/aft_common/account_provisioning_framework.py
@@ -2,192 +2,245 @@
# SPDX-License-Identifier: Apache-2.0
#
import json
-import os
+import logging
import time
-from typing import TYPE_CHECKING, Any, Dict, List, Sequence
+from datetime import datetime, timedelta
+from typing import TYPE_CHECKING, Any, Dict, List
import aft_common.aft_utils as utils
-import jsonschema
-from aft_common.types import AftAccountInfo
+import aft_common.constants
+import aft_common.ssm
+from aft_common import ddb
+from aft_common.aft_utils import sanitize_input_for_logging
+from aft_common.auth import AuthClient
+from aft_common.organizations import OrganizationsAgent
from boto3.session import Session
+from botocore.exceptions import ClientError
if TYPE_CHECKING:
+ from mypy_boto3_dynamodb.type_defs import PutItemOutputTableTypeDef
from mypy_boto3_iam import IAMClient, IAMServiceResource
- from mypy_boto3_iam.type_defs import CreateRoleResponseTypeDef
from mypy_boto3_organizations.type_defs import TagTypeDef
else:
+ PutItemOutputTableTypeDef = object
IAMClient = object
CreateRoleResponseTypeDef = object
IAMServiceResource = object
TagTypeDef = object
-logger = utils.get_logger()
+logger = logging.getLogger("aft")
-def get_ct_execution_session(
- aft_management_session: Session, ct_management_session: Session, account_id: str
-) -> Session:
- session_name = utils.get_ssm_parameter_value(
- aft_management_session, utils.SSM_PARAM_AFT_SESSION_NAME
- )
- admin_credentials = utils.get_assume_role_credentials(
- ct_management_session,
- utils.build_role_arn(
- ct_management_session, "AWSControlTowerExecution", account_id
- ),
- session_name,
- )
+AFT_EXEC_ROLE = "AWSAFTExecution"
- return utils.get_boto_session(admin_credentials)
+class ProvisionRoles:
+ SERVICE_ROLE_NAME = "AWSAFTService"
+ EXECUTION_ROLE_NAME = "AWSAFTExecution"
-def create_aft_execution_role(
- account_info: Dict[str, Any], session: Session, ct_management_session: Session
-) -> str:
- logger.info("Function Start - create_aft_execution_role")
- role_name = utils.get_ssm_parameter_value(session, utils.SSM_PARAM_AFT_EXEC_ROLE)
- ct_execution_session = get_ct_execution_session(
- session, ct_management_session, account_info["id"]
- )
- exec_iam_client = ct_execution_session.client("iam")
+ def __init__(self, auth: AuthClient, account_id: str) -> None:
+ self.auth = auth
+ self.target_account_id = account_id
- role_name = role_name.split("/")[-1]
+ temp_session = self.auth.get_ct_management_session()
+ self.partition = utils.get_aws_partition(temp_session)
- try:
- role = exec_iam_client.get_role(RoleName=role_name)
- logger.info("Role Exists. Updating...")
- update_aft_role_trust_policy(session, ct_execution_session, role_name)
- set_role_policy(
- ct_execution_session=ct_execution_session,
- role_name=role_name,
- policy_arn="arn:aws:iam::aws:policy/AdministratorAccess",
+ self.ADMINISTRATOR_ACCESS_MANAGED_POLICY_ARN = (
+ f"arn:{self.partition}:iam::aws:policy/AdministratorAccess"
)
- return role["Role"]["Arn"]
- except exec_iam_client.exceptions.NoSuchEntityException:
- logger.info("Role not found in account. Creating...")
- return create_role_in_account(session, ct_execution_session, role_name)
-
-
-def update_aft_role_trust_policy(
- session: Session, ct_execution_session: Session, role_name: str
-) -> None:
- assume_role_policy_document = get_aft_trust_policy_document(session)
- iam_resource: IAMServiceResource = ct_execution_session.resource("iam")
- role = iam_resource.Role(name=role_name)
- role.AssumeRolePolicy().update(PolicyDocument=assume_role_policy_document)
-
-
-def get_aft_trust_policy_document(session: Session) -> str:
- trust_policy_template = os.path.join(
- os.path.dirname(__file__), "templates/aftmanagement.tpl"
- )
- aft_management_account = utils.get_ssm_parameter_value(
- session, utils.SSM_PARAM_ACCOUNT_AFT_MANAGEMENT_ACCOUNT_ID
- )
- with open(trust_policy_template) as trust_policy_file:
- template = trust_policy_file.read()
- template = template.replace("{AftManagementAccount}", aft_management_account)
- return template
-
-
-def create_role_in_account(
- session: Session,
- ct_execution_session: Session,
- role_name: str,
-) -> str:
- logger.info("Function Start - create_role_in_account")
- assume_role_policy_document = get_aft_trust_policy_document(session=session)
- exec_client: IAMClient = ct_execution_session.client("iam")
- logger.info("Creating Role")
- response: CreateRoleResponseTypeDef = exec_client.create_role(
- RoleName=role_name.split("/")[-1],
- AssumeRolePolicyDocument=assume_role_policy_document,
- Description="AFT Execution Role",
- MaxSessionDuration=3600,
- Tags=[
- {"Key": "managed_by", "Value": "AFT"},
- ],
- )
- role_arn = response["Role"]["Arn"]
- logger.info(response)
- set_role_policy(
- ct_execution_session=ct_execution_session,
- role_name=role_name,
- policy_arn="arn:aws:iam::aws:policy/AdministratorAccess",
- )
- # Adding sleep to account for IAM Role creation eventual consistency
- eventual_consistency_sleep = 60
- logger.info(f"Sleeping for {eventual_consistency_sleep}s to ensure Role exists")
- time.sleep(eventual_consistency_sleep)
-
- return role_arn
+ def generate_aft_trust_policy(self) -> str:
+ return json.dumps(
+ {
+ "Version": "2012-10-17",
+ "Statement": [
+ {
+ "Effect": "Allow",
+ "Principal": {
+ "AWS": [
+ # TODO: Do we still need role/AWSAFTAdmin
+ f"arn:{self.partition}:iam::{self.auth.aft_management_account_id}:role/AWSAFTAdmin",
+ f"arn:{self.partition}:sts::{self.auth.aft_management_account_id}:assumed-role/AWSAFTAdmin/AWSAFT-Session",
+ ]
+ },
+ "Action": "sts:AssumeRole",
+ }
+ ],
+ }
+ )
+ def _deploy_role_in_target_account(
+ self, role_name: str, trust_policy: str, policy_arn: str
+ ) -> None:
+ """
+ Since we're creating the AFT roles in the account, we must assume
+ AWSControlTowerExecution as the target role. Since this role only
+ trusts federation from the CT Management account, we pass a hub session
+ that has already been federated into the CT Management account
+ """
+ ct_mgmt_session = self.auth.get_ct_management_session(
+ role_name=ProvisionRoles.SERVICE_ROLE_NAME
+ )
+ ct_mgmt_acc_id = ct_mgmt_session.client("sts").get_caller_identity()["Account"]
+ if self.target_account_id == ct_mgmt_acc_id:
+ target_account_session = ct_mgmt_session
+ else:
+ target_account_session = self.auth.get_target_account_session(
+ account_id=self.target_account_id,
+ hub_session=ct_mgmt_session,
+ role_name=AuthClient.CONTROL_TOWER_EXECUTION_ROLE_NAME,
+ )
+ self._put_role(
+ target_account_session=target_account_session,
+ role_name=role_name,
+ trust_policy=trust_policy,
+ )
+ self._put_policy_on_role(
+ target_account_session=target_account_session,
+ role_name=role_name,
+ policy_arn=policy_arn,
+ )
-def set_role_policy(
- ct_execution_session: Session, role_name: str, policy_arn: str
-) -> None:
- iam_resource: IAMServiceResource = ct_execution_session.resource("iam")
- role = iam_resource.Role(name=role_name)
- for policy in role.attached_policies.all():
- role.detach_policy(PolicyArn=policy.arn)
- logger.info("Attaching Role Policy")
- role.attach_policy(
- PolicyArn=policy_arn,
- )
- return None
+ def _put_role(
+ self,
+ target_account_session: Session,
+ role_name: str,
+ trust_policy: str,
+ max_attempts: int = 20,
+ delay: int = 5,
+ ) -> None:
+ client: IAMClient = target_account_session.client("iam")
+ if self.role_exists(
+ role_name=role_name, target_account_session=target_account_session
+ ):
+ client.update_assume_role_policy(
+ RoleName=role_name, PolicyDocument=trust_policy
+ )
+ else:
+ client.create_role(
+ RoleName=role_name,
+ AssumeRolePolicyDocument=trust_policy,
+ Description="Role for use with Account Factory for Terraform",
+ MaxSessionDuration=3600,
+ Tags=[{"Key": "managed_by", "Value": "AFT"}],
+ )
+ waiter = client.get_waiter("role_exists")
+ waiter.wait(
+ RoleName=role_name,
+ WaiterConfig={"Delay": delay, "MaxAttempts": max_attempts},
+ )
+
+ @staticmethod
+ def role_exists(role_name: str, target_account_session: Session) -> bool:
+ client: IAMClient = target_account_session.client("iam")
+ try:
+ client.get_role(RoleName=role_name)
+ return True
+
+ except ClientError as error:
+ if error.response["Error"]["Code"] == "NoSuchEntity":
+ return False
+ raise
+
+ def _put_policy_on_role(
+ self,
+ target_account_session: Session,
+ role_name: str,
+ policy_arn: str,
+ delay: int = 5,
+ timeout_in_mins: int = 1,
+ ) -> None:
+ if not self.role_policy_is_attached(
+ role_name=role_name,
+ policy_arn=policy_arn,
+ target_account_session=target_account_session,
+ ):
+ resource: IAMServiceResource = target_account_session.resource("iam")
+ role = resource.Role(role_name)
+ role.attach_policy(PolicyArn=policy_arn)
+ timeout = datetime.utcnow() + timedelta(minutes=timeout_in_mins)
+ while datetime.utcnow() < timeout:
+ time.sleep(delay)
+ if self.role_policy_is_attached(
+ role_name=role_name,
+ policy_arn=policy_arn,
+ target_account_session=target_account_session,
+ ):
+ return None
+ return None
+
+ @staticmethod
+ def role_policy_is_attached(
+ role_name: str, policy_arn: str, target_account_session: Session
+ ) -> bool:
+ logger.info(f"Determining if {policy_arn} is attached to {role_name}")
+ resource: IAMServiceResource = target_account_session.resource("iam")
+ role = resource.Role(role_name)
+ policy_iterator = role.attached_policies.all()
+ policy_arns = [policy.arn for policy in policy_iterator]
+ attached = policy_arn in policy_arns
+ logger.info(
+ f"{policy_arn} is {'attached' if attached else 'detached'} to {role_name}"
+ )
+ return attached
+
+ def _ensure_role_can_be_assumed(
+ self, role_name: str, timeout_in_mins: int = 1, delay: int = 5
+ ) -> None:
+ timeout = datetime.utcnow() + timedelta(minutes=timeout_in_mins)
+ while datetime.utcnow() < timeout:
+ if self._can_assume_role(role_name=role_name):
+ return None
+ time.sleep(delay)
+ raise TimeoutError(
+ f"Could not assume role {role_name} within {timeout_in_mins} minutes"
+ )
+ def _can_assume_role(self, role_name: str) -> bool:
+ try:
+ self.auth.get_target_account_session(
+ account_id=self.target_account_id, role_name=role_name
+ )
+ return True
+ except ClientError:
+ return False
-def get_account_info(
- payload: Dict[str, Any], session: Session, ct_management_session: Session
-) -> AftAccountInfo:
- logger.info("Function Start - get_account_info")
+ def deploy_aws_aft_roles(self) -> None:
+ trust_policy = self.generate_aft_trust_policy()
- account_id = None
+ aft_role_names = [
+ ProvisionRoles.SERVICE_ROLE_NAME,
+ ProvisionRoles.EXECUTION_ROLE_NAME,
+ ]
- # Handle a Control Tower Event
- if "account" in payload["control_tower_event"]:
- if (
- payload["control_tower_event"]["detail"]["eventName"]
- == "CreateManagedAccount"
- ):
- account_id = payload["control_tower_event"]["detail"][
- "serviceEventDetails"
- ]["createManagedAccountStatus"]["account"]["accountId"]
- elif (
- payload["control_tower_event"]["detail"]["eventName"]
- == "UpdateManagedAccount"
- ):
- account_id = payload["control_tower_event"]["detail"][
- "serviceEventDetails"
- ]["updateManagedAccountStatus"]["account"]["accountId"]
- if account_id:
- logger.info(f"Account Id [{account_id}] found in control_tower_event")
- return utils.get_account_info(ct_management_session, account_id)
+ logger.info(f"Deploying roles {', '.join(aft_role_names)}")
+ for role_name in aft_role_names:
+ self._deploy_role_in_target_account(
+ role_name=role_name,
+ trust_policy=trust_policy,
+ policy_arn=self.ADMINISTRATOR_ACCESS_MANAGED_POLICY_ARN,
+ )
+ logger.info(f"Deployed {role_name} role")
- elif "id" in payload["account_request"]:
- email = payload["account_request"]["id"]
- logger.info("Account Email: " + email)
- account_id = utils.get_account_id_from_email(ct_management_session, email)
- return utils.get_account_info(ct_management_session, account_id)
+ for role_name in aft_role_names:
+ self._ensure_role_can_be_assumed(role_name=role_name)
+ logger.info(f"Can assume {role_name} role")
- raise Exception("Account was not found")
+ # Guard for IAM eventual consistency
+ time.sleep(65)
# From persist-metadata Lambda
def persist_metadata(
payload: Dict[str, Any], account_info: Dict[str, str], session: Session
-) -> Dict[str, Any]:
-
- logger.info("Function Start - persist_metadata")
-
+) -> PutItemOutputTableTypeDef:
account_tags = payload["account_request"]["account_tags"]
account_customizations_name = payload["account_request"][
"account_customizations_name"
]
- metadata_table_name = utils.get_ssm_parameter_value(
- session, utils.SSM_PARAM_AFT_DDB_META_TABLE
+ metadata_table_name = aft_common.ssm.get_ssm_parameter_value(
+ session, aft_common.constants.SSM_PARAM_AFT_DDB_META_TABLE
)
item = {
@@ -206,76 +259,22 @@ def persist_metadata(
logger.info("Writing item to " + metadata_table_name)
logger.info(item)
- response = utils.put_ddb_item(session, metadata_table_name, item)
-
- logger.info(response)
+ response = ddb.put_ddb_item(session, metadata_table_name, item)
+ sanitized_response = sanitize_input_for_logging(response)
+ logger.info(sanitized_response)
return response
-AFT_EXEC_ROLE = "AWSAFTExecution"
-
-SSM_PARAMETER_PATH = "/aft/account-request/custom-fields/"
-
-
-def get_ssm_parameters_names_by_path(session: Session, path: str) -> List[str]:
-
- client = session.client("ssm")
- response = client.get_parameters_by_path(Path=path, Recursive=True)
- logger.debug(response)
-
- parameter_names = []
- for p in response["Parameters"]:
- parameter_names.append(p["Name"])
-
- return parameter_names
-
-
-def delete_ssm_parameters(session: Session, parameters: Sequence[str]) -> None:
-
- if len(parameters) > 0:
- client = session.client("ssm")
- response = client.delete_parameters(Names=parameters)
- logger.info(response)
-
-
-def create_ssm_parameters(session: Session, parameters: Dict[str, str]) -> None:
-
- client = session.client("ssm")
-
- for key, value in parameters.items():
- response = client.put_parameter(
- Name=SSM_PARAMETER_PATH + key, Value=value, Type="String", Overwrite=True
- )
- logger.info(response)
-
-
def tag_account(
payload: Dict[str, Any],
account_info: Dict[str, str],
ct_management_session: Session,
rollback: bool,
) -> None:
- logger.info("Start Function - tag_account")
logger.info(payload)
tags = payload["account_request"]["account_tags"]
tag_list: List[TagTypeDef] = [{"Key": k, "Value": v} for k, v in tags.items()]
- utils.tag_org_resource(
- ct_management_session, account_info["id"], tag_list, rollback
- )
-
-def validate_request(payload: Dict[str, Any]) -> bool:
- logger.info("Function Start - validate_request")
- schema_path = os.path.join(
- os.path.dirname(__file__), "schemas/valid_account_request_schema.json"
- )
- with open(schema_path) as schema_file:
- schema_object = json.load(schema_file)
- logger.info("Schema Loaded:" + json.dumps(schema_object))
- validated = jsonschema.validate(payload, schema_object)
- if validated is None:
- logger.info("Request Validated")
- return True
- else:
- raise Exception("Failure validating request.\n{validated}")
+ orgs_agent = OrganizationsAgent(ct_management_session)
+ orgs_agent.tag_org_resource(account_info["id"], tag_list, rollback)
diff --git a/sources/aft-lambda-layer/aft_common/account_request_framework.py b/sources/aft-lambda-layer/aft_common/account_request_framework.py
index b1c4f5ff..a8b15786 100644
--- a/sources/aft-lambda-layer/aft_common/account_request_framework.py
+++ b/sources/aft-lambda-layer/aft_common/account_request_framework.py
@@ -2,129 +2,68 @@
# SPDX-License-Identifier: Apache-2.0
#
import json
-import sys
+import logging
import uuid
from datetime import datetime
-from functools import partial
-from typing import TYPE_CHECKING, Any, Dict, List, Literal, Mapping, Sequence, cast
+from functools import cached_property, partial
+from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, cast
+import aft_common.constants
+import aft_common.service_catalog
+import aft_common.ssm
from aft_common import aft_utils as utils
-from aft_common.account import Account
+from aft_common import ddb, sqs
+from aft_common.account_provisioning_framework import ProvisionRoles
+from aft_common.aft_types import AftInvokeAccountCustomizationPayload
+from aft_common.auth import AuthClient
+from aft_common.exceptions import (
+ NoAccountFactoryPortfolioFound,
+ ServiceRoleNotAssociated,
+)
+from aft_common.organizations import OrganizationsAgent
from boto3.session import Session
if TYPE_CHECKING:
+ from mypy_boto3_dynamodb.type_defs import PutItemOutputTypeDef
+ from mypy_boto3_servicecatalog import ServiceCatalogClient
from mypy_boto3_servicecatalog.type_defs import (
+ ProvisionedProductAttributeTypeDef,
ProvisionedProductDetailTypeDef,
ProvisioningParameterTypeDef,
ProvisionProductOutputTypeDef,
- SearchProvisionedProductsInputRequestTypeDef,
UpdateProvisioningParameterTypeDef,
)
else:
+ SearchProvisionedProductsOutputTypeDef = object
+ PutItemOutputTypeDef = object
ProvisioningParameterTypeDef = object
ProvisionedProductDetailTypeDef = object
ProvisionProductOutputTypeDef = object
UpdateProvisioningParameterTypeDef = object
+ ProvisionedProductAttributeTypeDef = object
+ ServiceCatalogClient = object
-logger = utils.get_logger()
-
-
-def provisioned_product_exists(record: Dict[str, Any]) -> bool:
- # Go get all my accounts from SC (Not all PPs)
- ct_management_session = utils.get_ct_management_session(aft_mgmt_session=Session())
- account_email = utils.unmarshal_ddb_item(record["dynamodb"]["NewImage"])[
- "control_tower_parameters"
- ]["AccountEmail"]
-
- sc_product_search_filter: Mapping[Literal["SearchQuery"], Sequence[str]] = {
- "SearchQuery": [
- "type:CONTROL_TOWER_ACCOUNT",
- ]
- }
- sc_product_allowed_status = ["AVAILABLE", "TAINTED"]
- sc_client = ct_management_session.client("servicecatalog")
-
- logger.info(
- "Searching Account Factory for account with matching email in healthy status"
- )
-
- # Get products with the required type
- response = sc_client.search_provisioned_products(
- Filters=sc_product_search_filter, PageSize=100
- )
-
- pp_ids = [
- pp["Id"]
- for pp in response["ProvisionedProducts"]
- if pp["Status"] in sc_product_allowed_status
- ]
-
- if email_exists_in_batch(account_email, pp_ids, ct_management_session):
- return True
-
- while response.get("NextPageToken") is not None:
- response = sc_client.search_provisioned_products(
- Filters=sc_product_search_filter,
- PageSize=100,
- PageToken=response["NextPageToken"],
- )
-
- pp_ids = [
- pp["Id"]
- for pp in response["ProvisionedProducts"]
- if pp["Status"] in sc_product_allowed_status
- ]
-
- if email_exists_in_batch(account_email, pp_ids, ct_management_session):
- return True
-
- # We processed all batches of accounts with healthy statuses, and did not find a match
- # It is possible that the account exists, but does not have a healthy status
- logger.info(
- "Did not find account with matching email in healthy status in Account Factory"
- )
-
- return False
-
-
-def email_exists_in_batch(
- target_email: str, pps: List[str], ct_management_session: Session
-) -> bool:
- sc_client = ct_management_session.client("servicecatalog")
- for pp in pps:
- pp_email = sc_client.get_provisioned_product_outputs(
- ProvisionedProductId=pp, OutputKeys=["AccountEmail"]
- )["Outputs"][0]["OutputValue"]
- if target_email == pp_email:
- logger.info("Account email match found; provisioned product exists.")
- return True
- return False
+logger = logging.getLogger("aft")
def insert_msg_into_acc_req_queue(
event_record: Dict[Any, Any], new_account: bool, session: Session
) -> None:
- sqs_queue = utils.get_ssm_parameter_value(
- session, utils.SSM_PARAM_ACCOUNT_REQUEST_QUEUE
+ sqs_queue = aft_common.ssm.get_ssm_parameter_value(
+ session, aft_common.constants.SSM_PARAM_ACCOUNT_REQUEST_QUEUE
)
- sqs_queue = utils.build_sqs_url(session=session, queue_name=sqs_queue)
+ sqs_queue = sqs.build_sqs_url(session=session, queue_name=sqs_queue)
message = build_sqs_message(record=event_record, new_account=new_account)
- utils.send_sqs_message(session=session, sqs_url=sqs_queue, message=message)
-
-
-def delete_account_request(record: Dict[str, Any]) -> bool:
- if record["eventName"] == "REMOVE":
- return True
- return False
+ sqs.send_sqs_message(session=session, sqs_url=sqs_queue, message=message)
def control_tower_param_changed(record: Dict[str, Any]) -> bool:
if record["eventName"] == "MODIFY":
- old_image = utils.unmarshal_ddb_item(record["dynamodb"]["OldImage"])[
+ old_image = ddb.unmarshal_ddb_item(record["dynamodb"]["OldImage"])[
"control_tower_parameters"
]
- new_image = utils.unmarshal_ddb_item(record["dynamodb"]["NewImage"])[
+ new_image = ddb.unmarshal_ddb_item(record["dynamodb"]["NewImage"])[
"control_tower_parameters"
]
@@ -138,12 +77,12 @@ def build_sqs_message(record: Dict[str, Any], new_account: bool) -> Dict[str, An
message = {}
operation = "ADD" if new_account else "UPDATE"
- new_image = utils.unmarshal_ddb_item(record["dynamodb"]["NewImage"])
+ new_image = ddb.unmarshal_ddb_item(record["dynamodb"]["NewImage"])
message["operation"] = operation
message["control_tower_parameters"] = new_image["control_tower_parameters"]
if record["eventName"] == "MODIFY":
- old_image = utils.unmarshal_ddb_item(record["dynamodb"]["OldImage"])
+ old_image = ddb.unmarshal_ddb_item(record["dynamodb"]["OldImage"])
message["old_control_tower_parameters"] = old_image["control_tower_parameters"]
logger.info(message)
@@ -151,9 +90,9 @@ def build_sqs_message(record: Dict[str, Any], new_account: bool) -> Dict[str, An
def build_aft_account_provisioning_framework_event(
- record: Dict[str, Any]
+ record: Dict[str, Any],
) -> Dict[str, Any]:
- account_request = utils.unmarshal_ddb_item(record["dynamodb"]["NewImage"])
+ account_request = ddb.unmarshal_ddb_item(record["dynamodb"]["NewImage"])
aft_account_provisioning_framework_event = {
"account_request": account_request,
"control_tower_event": {},
@@ -164,65 +103,63 @@ def build_aft_account_provisioning_framework_event(
def put_audit_record(
session: Session, table: str, image: Dict[str, Any], event_name: str
-) -> Dict[str, Any]:
+) -> PutItemOutputTypeDef:
dynamodb = session.client("dynamodb")
item = image
-
datetime_format = "%Y-%m-%dT%H:%M:%S.%f"
current_time = datetime.now().strftime(datetime_format)
item["timestamp"] = {"S": current_time}
-
item["ddb_event_name"] = {"S": event_name}
-
logger.info("Inserting item into " + table + " table: " + str(item))
+ response = dynamodb.put_item(TableName=table, Item=item)
+ sanitized_response = utils.sanitize_input_for_logging(response)
+ logger.info(sanitized_response)
+ return response
- response: Dict[str, Any] = dynamodb.put_item(TableName=table, Item=item)
- logger.info(response)
+def account_name_or_email_in_use(
+ ct_management_session: Session, account_name: str, account_email: str
+) -> bool:
+ orgs = ct_management_session.client(
+ "organizations", config=utils.get_high_retry_botoconfig()
+ )
+ paginator = orgs.get_paginator("list_accounts")
+ for page in paginator.paginate():
+ for account in page["Accounts"]:
+ if account_name == account["Name"]:
+ logger.error(
+ f"Account Name: {account_name} already used in Organizations"
+ )
+ return True
+ if utils.emails_are_equal(account_email, account["Email"]):
+ logger.error(
+ f"Account Email: {account_email} already used in Organizations"
+ )
+ return True
- return response
+ return False
def new_ct_request_is_valid(session: Session, request: Dict[str, Any]) -> bool:
- logger.info("Validating new CT Account Request")
- org_account_emails = utils.get_org_account_emails(session)
- org_account_names = utils.get_org_account_names(session)
-
ct_parameters = request["control_tower_parameters"]
-
- if ct_parameters["AccountEmail"] not in org_account_emails:
- logger.info("Requested AccountEmail is valid: " + ct_parameters["AccountEmail"])
- if ct_parameters["AccountName"] not in org_account_names:
- logger.info(
- "Valid request - AccountName and AccountEmail not already in use"
- )
- return True
- else:
- logger.info(
- "Invalid Request - AccountName already exists in Organization: "
- + ct_parameters["AccountName"]
- )
- return False
- else:
- logger.info(
- f"Invalid Request - AccountEmail already exists in Organization: {ct_parameters['AccountEmail']}"
- )
- return False
+ return not account_name_or_email_in_use(
+ ct_management_session=session,
+ account_name=ct_parameters["AccountName"],
+ account_email=ct_parameters["AccountEmail"],
+ )
def modify_ct_request_is_valid(request: Dict[str, Any]) -> bool:
- logger.info("Validating modify CT Account Request")
-
old_ct_parameters = request.get("old_control_tower_parameters", {})
new_ct_parameters = request["control_tower_parameters"]
- for i in old_ct_parameters.keys():
- if i != "ManagedOrganizationalUnit":
- if old_ct_parameters[i] != new_ct_parameters[i]:
- logger.info(i + " cannot be modified")
+ for param_name in old_ct_parameters.keys():
+ if param_name != "ManagedOrganizationalUnit":
+ if old_ct_parameters[param_name] != new_ct_parameters[param_name]:
+ logger.error(
+ f"Control Tower parameter {utils.sanitize_input_for_logging(param_name)} cannot be modified"
+ )
return False
-
- logger.info("Modify CT Account Request is Valid")
return True
@@ -232,13 +169,23 @@ def add_header(request: Any, **kwargs: Any) -> None:
)
+def create_provisioned_product_name(account_name: str) -> str:
+ """
+ Replaces all space characters in an Account Name with hyphens,
+ also removes all trailing and leading whitespace
+ """
+ return account_name.strip().replace(" ", "-")
+
+
def create_new_account(
session: Session, ct_management_session: Session, request: Dict[str, Any]
) -> ProvisionProductOutputTypeDef:
client = ct_management_session.client("servicecatalog")
event_system = client.meta.events
- aft_version = utils.get_ssm_parameter_value(session, "/aft/config/aft/version")
+ aft_version = aft_common.ssm.get_ssm_parameter_value(
+ session, aft_common.constants.SSM_PARAM_ACCOUNT_AFT_VERSION
+ )
header_with_aft_version = partial(add_header, version=aft_version)
event_system.register_first("before-sign.*.*", header_with_aft_version)
@@ -248,21 +195,27 @@ def create_new_account(
provisioning_parameters.append({"Key": k, "Value": v})
logger.info(
- "Creating new account leveraging parameters: " + str(provisioning_parameters)
+ "Creating new account leveraging parameters: "
+ + utils.sanitize_input_for_logging(str(provisioning_parameters))
+ )
+ provisioned_product_name = create_provisioned_product_name(
+ account_name=request["control_tower_parameters"]["AccountName"]
)
-
response = client.provision_product(
- ProductId=utils.get_ct_product_id(session, ct_management_session),
- ProvisioningArtifactId=utils.get_ct_provisioning_artifact_id(
+ ProductId=aft_common.service_catalog.get_ct_product_id(
session, ct_management_session
),
- ProvisionedProductName=request["control_tower_parameters"]["AccountName"],
+ ProvisioningArtifactId=aft_common.service_catalog.get_ct_provisioning_artifact_id(
+ session, ct_management_session
+ ),
+ ProvisionedProductName=provisioned_product_name,
ProvisioningParameters=cast(
Sequence[ProvisioningParameterTypeDef], provisioning_parameters
),
ProvisionToken=str(uuid.uuid1()),
)
- logger.info(response)
+ sanitized_response = utils.sanitize_input_for_logging(response)
+ logger.info(sanitized_response)
return response
@@ -272,7 +225,9 @@ def update_existing_account(
client = ct_management_session.client("servicecatalog")
event_system = client.meta.events
- aft_version = utils.get_ssm_parameter_value(session, "/aft/config/aft/version")
+ aft_version = aft_common.ssm.get_ssm_parameter_value(
+ session, aft_common.constants.SSM_PARAM_ACCOUNT_AFT_VERSION
+ )
header_with_aft_version = partial(add_header, version=aft_version)
event_system.register_first("before-sign.*.*", header_with_aft_version)
@@ -280,135 +235,223 @@ def update_existing_account(
for k, v in request["control_tower_parameters"].items():
provisioning_parameters.append({"Key": k, "Value": v})
- # Get all provisioned product IDs for "CONTROL_TOWER_ACCOUNT" type
- provisioned_product_ids: List[ProvisionedProductDetailTypeDef] = []
- scan_response = client.scan_provisioned_products(
- AccessLevelFilter={"Key": "Account", "Value": "self"},
- )
-
- pps = scan_response["ProvisionedProducts"]
- while "NextPageToken" in scan_response:
- scan_response = client.scan_provisioned_products(
- AccessLevelFilter={"Key": "Account", "Value": "self"},
- PageToken=scan_response["NextPageToken"],
- )
- pps.extend(scan_response["ProvisionedProducts"])
-
- for p in pps:
- if p["Type"] == "CONTROL_TOWER_ACCOUNT":
- provisioned_product_ids.append(
- {
- "Id": p["Id"],
- "ProvisioningArtifactId": p["ProvisioningArtifactId"],
- }
+ control_tower_email_parameter = request["control_tower_parameters"]["AccountEmail"]
+ target_product: Optional[ProvisionedProductAttributeTypeDef] = None
+ for batch in aft_common.service_catalog.get_healthy_ct_product_batch(
+ ct_management_session=ct_management_session
+ ):
+ for product in batch:
+ product_outputs_response = client.get_provisioned_product_outputs(
+ ProvisionedProductId=product["Id"],
+ OutputKeys=[
+ "AccountEmail",
+ ],
)
+ provisioned_product_email = product_outputs_response["Outputs"][0][
+ "OutputValue"
+ ]
- for p in provisioned_product_ids:
- product_outputs_response = client.get_provisioned_product_outputs(
- ProvisionedProductId=p["Id"],
- OutputKeys=[
- "AccountEmail",
- ],
- )
-
- if (
- product_outputs_response["Outputs"][0]["OutputValue"]
- == request["control_tower_parameters"]["AccountEmail"]
- ):
- target_product_id = p["Id"]
-
- # check to see if the product still exists and is still active
- if utils.ct_provisioning_artifact_is_active(
- session, ct_management_session, p["ProvisioningArtifactId"]
+ if utils.emails_are_equal(
+ provisioned_product_email, control_tower_email_parameter
):
- target_provisioning_artifact_id = p["ProvisioningArtifactId"]
- else:
- target_provisioning_artifact_id = utils.get_ct_provisioning_artifact_id(
- session, ct_management_session
- )
+ target_product = product
+ break
- logger.info(
- "Modifying existing account leveraging parameters: "
- + str(provisioning_parameters)
- + " with provisioned product ID "
- + target_product_id
- )
- update_response = client.update_provisioned_product(
- ProvisionedProductId=target_product_id,
- ProductId=utils.get_ct_product_id(session, ct_management_session),
- ProvisioningArtifactId=target_provisioning_artifact_id,
- ProvisioningParameters=provisioning_parameters,
- UpdateToken=str(uuid.uuid1()),
+ if target_product is None:
+ raise Exception(
+ f"No healthy provisioned product found for {control_tower_email_parameter}"
+ )
+
+ # check to see if the product still exists and is still active
+ if aft_common.service_catalog.ct_provisioning_artifact_is_active(
+ session=session,
+ ct_management_session=ct_management_session,
+ artifact_id=target_product["ProvisioningArtifactId"],
+ ):
+ target_provisioning_artifact_id = target_product["ProvisioningArtifactId"]
+ else:
+ target_provisioning_artifact_id = (
+ aft_common.service_catalog.get_ct_provisioning_artifact_id(
+ session, ct_management_session
)
- logger.info(update_response)
- break
+ )
+
+ logger.info(
+ "Modifying existing account leveraging parameters: "
+ + utils.sanitize_input_for_logging(str(provisioning_parameters))
+ + " with provisioned product ID "
+ + target_product["Id"]
+ )
+ update_response = client.update_provisioned_product(
+ ProvisionedProductId=target_product["Id"],
+ ProductId=aft_common.service_catalog.get_ct_product_id(
+ session, ct_management_session
+ ),
+ ProvisioningArtifactId=target_provisioning_artifact_id,
+ ProvisioningParameters=provisioning_parameters,
+ UpdateToken=str(uuid.uuid1()),
+ )
+ logger.info(utils.sanitize_input_for_logging(update_response))
-def get_account_request_record(session: Session, id: str) -> Dict[str, Any]:
- table_name = utils.get_ssm_parameter_value(
- session, utils.SSM_PARAM_AFT_DDB_REQ_TABLE
+def get_account_request_record(
+ aft_management_session: Session, request_table_id: str
+) -> Dict[str, Any]:
+ table_name = aft_common.ssm.get_ssm_parameter_value(
+ aft_management_session, aft_common.constants.SSM_PARAM_AFT_DDB_REQ_TABLE
+ )
+ logger.info(
+ "Getting record for id " + request_table_id + " in DDB table " + table_name
)
- dynamodb = session.resource("dynamodb")
- table = dynamodb.Table(table_name)
- logger.info("Getting record for id " + id + " in DDB table " + table_name)
- response = table.get_item(Key={"id": id})
- logger.info(response)
- if "Item" in response:
- logger.info("Record found, returning item")
- logger.info(response["Item"])
- response_item: Dict[str, Any] = response["Item"]
- return response_item
+ item = ddb.get_ddb_item(
+ session=aft_management_session,
+ table_name=table_name,
+ primary_key={"id": request_table_id},
+ )
+ if item:
+ logger.info("Record found")
+ logger.info(utils.sanitize_input_for_logging(item))
+ return item
else:
- logger.info("Record not found in DDB table, exiting")
- sys.exit(1)
+ raise Exception(f"Account {request_table_id} not found in {table_name}")
-def is_customizations_event(event: Dict[str, Any]) -> bool:
- if "account_request" in event.keys():
- return True
- else:
+def build_account_customization_payload(
+ ct_management_session: Session,
+ account_id: str,
+ account_request: Dict[str, Any],
+ control_tower_event: Optional[Dict[str, Any]],
+) -> AftInvokeAccountCustomizationPayload:
+ orgs_agent = OrganizationsAgent(ct_management_session)
+
+ # convert ddb strings into proper data type
+ account_request["account_tags"] = json.loads(account_request["account_tags"])
+ account_info = orgs_agent.get_aft_account_info(account_id=account_id)
+
+ if control_tower_event is None:
+ control_tower_event = {}
+
+ account_customization_payload: AftInvokeAccountCustomizationPayload = {
+ "account_info": {"account": account_info},
+ # Unused by AFT but kept for aft-account-provisioning-customizations backwards compatibility
+ "control_tower_event": control_tower_event,
+ "account_request": account_request,
+ "account_provisioning": {"run_create_pipeline": "true"},
+ "customization_request_id": str(uuid.uuid4()),
+ }
+
+ return account_customization_payload
+
+
+class AccountRequest:
+ ACCOUNT_FACTORY_PORTFOLIO_NAME = "AWS Control Tower Account Factory Portfolio"
+
+ def __init__(self, auth: AuthClient) -> None:
+ self.ct_management_session = auth.get_ct_management_session(
+ role_name=ProvisionRoles.SERVICE_ROLE_NAME
+ )
+ self.ct_management_account_id = auth.get_account_id_from_session(
+ session=self.ct_management_session
+ )
+ self.aft_management_session = auth.get_aft_management_session()
+ self.account_factory_product_id = aft_common.service_catalog.get_ct_product_id(
+ session=self.aft_management_session,
+ ct_management_session=self.ct_management_session,
+ )
+
+ self.partition = utils.get_aws_partition(self.ct_management_session)
+
+ @property
+ def service_role_arn(self) -> str:
+ return f"arn:{self.partition}:iam::{self.ct_management_account_id}:role/{ProvisionRoles.SERVICE_ROLE_NAME}"
+
+ @cached_property
+ def account_factory_portfolio_id(self) -> str:
+ """
+ Paginates through all portfolios and returns the ID of the CT Account Factory Portfolio
+ if it exists, raises exception if not found
+ """
+ client: ServiceCatalogClient = self.ct_management_session.client(
+ "servicecatalog", config=utils.get_high_retry_botoconfig()
+ )
+ paginator = client.get_paginator("list_portfolios")
+ for response in paginator.paginate():
+ for portfolio in response["PortfolioDetails"]:
+ if (
+ portfolio["DisplayName"]
+ == AccountRequest.ACCOUNT_FACTORY_PORTFOLIO_NAME
+ ):
+ return portfolio["Id"]
+
+ raise NoAccountFactoryPortfolioFound(
+ f"No Portfolio ID found for {AccountRequest.ACCOUNT_FACTORY_PORTFOLIO_NAME}"
+ )
+
+ def associate_aft_service_role_with_account_factory(self) -> None:
+ """
+ Associates the AWSAFTService role with the Control Tower Account Factory Service Catalog portfolio
+ """
+ client = self.ct_management_session.client("servicecatalog")
+ aft_service_role_arn = f"arn:{self.partition}:iam::{self.ct_management_account_id}:role/{ProvisionRoles.SERVICE_ROLE_NAME}"
+ client.associate_principal_with_portfolio(
+ PortfolioId=self.account_factory_portfolio_id,
+ PrincipalARN=aft_service_role_arn,
+ PrincipalType="IAM",
+ )
+
+ def validate_service_role_associated_with_account_factory(self) -> None:
+ if not self.service_role_associated_with_account_factory():
+ raise ServiceRoleNotAssociated(
+ f"{ProvisionRoles.SERVICE_ROLE_NAME} Role not associated with portfolio {self.account_factory_portfolio_id}"
+ )
+
+ def service_role_associated_with_account_factory(self) -> bool:
+ client = self.ct_management_session.client(
+ "servicecatalog", config=utils.get_high_retry_botoconfig()
+ )
+ paginator = client.get_paginator("list_principals_for_portfolio")
+ for response in paginator.paginate(
+ PortfolioId=self.account_factory_portfolio_id
+ ):
+ if self.service_role_arn in [
+ principal["PrincipalARN"] for principal in response["Principals"]
+ ]:
+ return True
return False
+ def provisioning_threshold_reached(self, threshold: int) -> bool:
+ client: ServiceCatalogClient = self.ct_management_session.client(
+ "servicecatalog", config=utils.get_high_retry_botoconfig()
+ )
+ logger.info("Checking for account provisioning in progress")
+
+ response = client.scan_provisioned_products(
+ AccessLevelFilter={"Key": "Account", "Value": "self"},
+ )
+ pps = response["ProvisionedProducts"]
+ while "NextPageToken" in response:
+ response = client.scan_provisioned_products(
+ AccessLevelFilter={"Key": "Account", "Value": "self"},
+ PageToken=response["NextPageToken"],
+ )
+ pps.extend(response["ProvisionedProducts"])
-def build_invoke_event(
- session: Session,
- ct_management_session: Session,
- event: Dict[str, Any],
- event_type: str,
-) -> Dict[str, Any]:
- account_id: str = ""
- if event_type == "ControlTower":
- if event["detail"]["eventName"] == "CreateManagedAccount":
- account_id = event["detail"]["serviceEventDetails"][
- "createManagedAccountStatus"
- ]["account"]["accountId"]
- elif event["detail"]["eventName"] == "UpdateManagedAccount":
- account_id = event["detail"]["serviceEventDetails"][
- "updateManagedAccountStatus"
- ]["account"]["accountId"]
- account_email = utils.get_account_email_from_id(
- ct_management_session, account_id
+ return self.products_in_progress_at_threshold(
+ threshold=threshold, provisioned_products=pps
)
- ddb_record = get_account_request_record(session, account_email)
- invoke_event = {"control_tower_event": event, "account_request": ddb_record}
- # convert ddb strings into proper data type for json validation
- account_tags = json.loads(ddb_record["account_tags"])
- invoke_event["account_request"]["account_tags"] = account_tags
- invoke_event["account_provisioning"] = {}
- invoke_event["account_provisioning"]["run_create_pipeline"] = "true"
- logger.info("Invoking SFN with Event - ")
- logger.info(invoke_event)
- return invoke_event
-
- elif event_type == "Customizations":
- invoke_event = event
- # convert ddb strings into proper data type for json validation
- account_tags = json.loads(event["account_request"]["account_tags"])
- invoke_event["account_request"]["account_tags"] = account_tags
- invoke_event["account_provisioning"] = {}
- invoke_event["account_provisioning"]["run_create_pipeline"] = "true"
- logger.info("Invoking SFN with Event - ")
- logger.info(invoke_event)
- return invoke_event
-
- raise Exception("Unsupported event type")
+
+ def products_in_progress_at_threshold(
+ self,
+ threshold: int,
+ provisioned_products: List[ProvisionedProductDetailTypeDef],
+ ) -> bool:
+ in_progress_count = 0
+
+ for product in provisioned_products:
+ if product["ProductId"] != self.account_factory_product_id:
+ continue
+ logger.info("Identified CT Product - " + product["Id"])
+ if product["Status"] in ["UNDER_CHANGE", "PLAN_IN_PROGRESS"]:
+ in_progress_count += 1
+
+ return in_progress_count >= threshold
diff --git a/sources/aft-lambda-layer/aft_common/account_request_record_handler.py b/sources/aft-lambda-layer/aft_common/account_request_record_handler.py
new file mode 100644
index 00000000..e7b08311
--- /dev/null
+++ b/sources/aft-lambda-layer/aft_common/account_request_record_handler.py
@@ -0,0 +1,154 @@
+# Copyright Amazon.com, Inc. or its affiliates. All rights reserved.
+# SPDX-License-Identifier: Apache-2.0
+#
+import json
+import logging
+from typing import Any, Dict
+
+import aft_common.constants
+from aft_common import aft_utils as utils
+from aft_common import ddb, ssm
+from aft_common.account_request_framework import (
+ build_account_customization_payload,
+ build_aft_account_provisioning_framework_event,
+ control_tower_param_changed,
+ insert_msg_into_acc_req_queue,
+)
+from aft_common.auth import AuthClient
+from aft_common.organizations import OrganizationsAgent
+from aft_common.service_catalog import provisioned_product_exists
+from aft_common.shared_account import shared_account_request
+
+logger = logging.getLogger("aft")
+
+
+class AccountRequestRecordHandler:
+ def __init__(self, auth: AuthClient, event: Dict[str, Any]) -> None:
+ AccountRequestRecordHandler._validate_event(event=event)
+ self._aft_management_session = auth.get_aft_management_session()
+ self._ct_management_sesion = auth.get_ct_management_session()
+ self.record = event["Records"][0]
+ self._old_image = self.record["dynamodb"].get("OldImage")
+ self._new_image = self.record["dynamodb"].get("NewImage")
+ self.control_tower_parameters_updated = self._control_tower_parameters_changed()
+ self.auth = auth
+
+ @property
+ def is_update_action(self) -> bool:
+ return all([self._old_image, self._new_image])
+
+ @property
+ def is_create_action(self) -> bool:
+ return self._old_image is None and self._new_image is not None
+
+ def _get_account_id(self, account_request: Dict[str, Any]) -> str:
+ email = account_request["id"]
+ orgs = OrganizationsAgent(ct_management_session=self._ct_management_sesion)
+ return orgs.get_account_id_from_email(email=email)
+
+ @staticmethod
+ def _validate_event(event: Dict[str, Any]) -> None:
+ try:
+ if event["Records"][0]["eventSource"] != "aws:dynamodb":
+ raise Exception("Invalid event source")
+ except (KeyError, IndexError):
+ raise Exception("Invalid event structure")
+
+ def handle_remove(self) -> None:
+ account_request = ddb.unmarshal_ddb_item(self._old_image)
+ payload = {"account_request": account_request}
+
+ lambda_name = ssm.get_ssm_parameter_value(
+ self._aft_management_session,
+ aft_common.constants.SSM_PARAM_AFT_CLEANUP_RESOURCES_LAMBDA,
+ )
+ utils.invoke_lambda(
+ self._aft_management_session,
+ lambda_name,
+ json.dumps(payload).encode(),
+ )
+ return None
+
+ def handle_account_request(self, new_account: bool) -> None:
+ insert_msg_into_acc_req_queue(
+ event_record=self.record,
+ new_account=new_account,
+ session=self._aft_management_session,
+ )
+
+ def _control_tower_parameters_changed(self) -> bool:
+ if self.record["eventName"] == "MODIFY" and self.is_update_action:
+ old_image = ddb.unmarshal_ddb_item(self._old_image)[
+ "control_tower_parameters"
+ ]
+ new_image = ddb.unmarshal_ddb_item(self._new_image)[
+ "control_tower_parameters"
+ ]
+ return bool(old_image != new_image)
+ return False
+
+ def handle_customization_request(self) -> None:
+ account_request = ddb.unmarshal_ddb_item(self.record["dynamodb"]["NewImage"])
+ account_id = self._get_account_id(
+ account_request=account_request
+ ) # Fetch from metadata/orgs?
+
+ account_provisioning_payload = build_aft_account_provisioning_framework_event(
+ self.record
+ )
+ account_customization_payload = build_account_customization_payload(
+ ct_management_session=self._ct_management_sesion,
+ account_id=account_id,
+ account_request=account_request,
+ control_tower_event=account_provisioning_payload,
+ )
+ account_provisioning_stepfunction = ssm.get_ssm_parameter_value(
+ self._aft_management_session, aft_common.constants.SSM_PARAM_AFT_SFN_NAME
+ )
+
+ utils.invoke_step_function(
+ session=self._aft_management_session,
+ sfn_name=account_provisioning_stepfunction,
+ input=json.dumps(account_customization_payload),
+ )
+
+ def process_request(self) -> None:
+ # Removing account from AFT
+ if self.record["eventName"] == "REMOVE":
+ logger.info("Delete account request received")
+ self.handle_remove()
+
+ # Triggering customization for shared account
+ elif not control_tower_param_changed(
+ record=self.record
+ ) and shared_account_request(event_record=self.record, auth=self.auth):
+ logger.info("Customization request received")
+ self.handle_customization_request()
+
+ # Vending new account
+ elif self.is_create_action and not provisioned_product_exists(
+ record=self.record
+ ):
+ logger.info("New account request received")
+ self.handle_account_request(new_account=True)
+
+ # Importing existing CT account into AFT and triggering customization
+ elif (
+ self.is_create_action
+ and provisioned_product_exists(record=self.record)
+ and not self.control_tower_parameters_updated
+ ):
+ logger.info("Customization request received for existing CT account")
+ self.handle_customization_request()
+
+ # Updating CT parameter for existing AFT account
+ elif self.is_update_action and self.control_tower_parameters_updated:
+ logger.info("Control Tower parameter update request received")
+ self.handle_account_request(new_account=False)
+
+ # Triggering customization for existing AFT account
+ elif self.is_update_action and not self.control_tower_parameters_updated:
+ logger.info("Customization request received")
+ self.handle_customization_request()
+ else:
+ raise Exception("Unsupported account request")
diff --git a/sources/aft-lambda-layer/aft_common/types.py b/sources/aft-lambda-layer/aft_common/aft_types.py
similarity index 51%
rename from sources/aft-lambda-layer/aft_common/types.py
rename to sources/aft-lambda-layer/aft_common/aft_types.py
index d302545a..a81c596f 100644
--- a/sources/aft-lambda-layer/aft_common/types.py
+++ b/sources/aft-lambda-layer/aft_common/aft_types.py
@@ -1,7 +1,7 @@
# Copyright Amazon.com, Inc. or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
#
-from typing import Literal, TypedDict
+from typing import Any, Dict, Literal, TypedDict
class AftAccountInfo(TypedDict):
@@ -13,6 +13,13 @@ class AftAccountInfo(TypedDict):
status: str
parent_id: str
parent_type: str
- org_name: str
type: Literal["account"]
vendor: Literal["aws"]
+
+
+class AftInvokeAccountCustomizationPayload(TypedDict):
+ account_info: Dict[Literal["account"], AftAccountInfo]
+ account_request: Dict[str, Any]
+ control_tower_event: Dict[str, Any]
+ account_provisioning: Dict[str, Any]
+ customization_request_id: str
diff --git a/sources/aft-lambda-layer/aft_common/aft_utils.py b/sources/aft-lambda-layer/aft_common/aft_utils.py
index f4d8e6d9..df59ee78 100644
--- a/sources/aft-lambda-layer/aft_common/aft_utils.py
+++ b/sources/aft-lambda-layer/aft_common/aft_utils.py
@@ -1,551 +1,125 @@
# Copyright Amazon.com, Inc. or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
#
-import json
-import os
-import uuid
+import logging
+import random
+import re
+import time
+from functools import wraps
from typing import (
IO,
TYPE_CHECKING,
Any,
+ Callable,
Dict,
- List,
- Literal,
+ Iterable,
Optional,
Sequence,
+ Tuple,
Union,
- cast,
)
-import attr
-import boto3
-from boto3.dynamodb.types import TypeDeserializer
from boto3.session import Session
+from botocore.config import Config
+from botocore.exceptions import ClientError
from botocore.response import StreamingBody
if TYPE_CHECKING:
from mypy_boto3_lambda import LambdaClient
from mypy_boto3_lambda.type_defs import InvocationResponseTypeDef
- from mypy_boto3_organizations import OrganizationsClient
- from mypy_boto3_organizations.type_defs import TagTypeDef
- from mypy_boto3_servicecatalog import ServiceCatalogClient
- from mypy_boto3_sns import SNSClient
- from mypy_boto3_sns.type_defs import PublishResponseTypeDef
- from mypy_boto3_sqs import SQSClient
- from mypy_boto3_sqs.type_defs import MessageTypeDef, SendMessageResultTypeDef
from mypy_boto3_stepfunctions import SFNClient
from mypy_boto3_stepfunctions.type_defs import StartExecutionOutputTypeDef
from mypy_boto3_sts import STSClient
- from mypy_boto3_sts.type_defs import (
- AssumeRoleRequestRequestTypeDef,
- CredentialsTypeDef,
- )
else:
LambdaClient = object
InvocationResponseTypeDef = object
OrganizationsClient = object
- TagTypeDef = object
ServiceCatalogClient = object
- SNSClient = object
- PublishResponseTypeDef = object
- SQSClient = object
- MessageTypeDef = object
- SendMessageResultTypeDef = object
SFNClient = object
StartExecutionOutputTypeDef = object
STSClient = object
- CredentialsTypeDef = object
-
-from aft_common.types import AftAccountInfo
-
-from .logger import Logger
-
-SSM_PARAM_AFT_DDB_META_TABLE = "/aft/resources/ddb/aft-request-metadata-table-name"
-SSM_PARAM_AFT_SESSION_NAME = "/aft/resources/iam/aft-session-name"
-SSM_PARAM_AFT_ADMIN_ROLE = "/aft/resources/iam/aft-administrator-role-name"
-SSM_PARAM_AFT_EXEC_ROLE = "/aft/resources/iam/aft-execution-role-name"
-SSM_PARAM_SC_PRODUCT_NAME = "/aft/resources/sc/account-factory-product-name"
-SSM_PARAM_SNS_TOPIC_ARN = "/aft/account/aft-management/sns/topic-arn"
-SSM_PARAM_SNS_FAILURE_TOPIC_ARN = "/aft/account/aft-management/sns/failure-topic-arn"
-SSM_PARAM_ACCOUNT_REQUEST_QUEUE = "/aft/resources/sqs/aft-request-queue-name"
-SSM_PARAM_AFT_ACCOUNT_PROVISIONING_FRAMEWORK_LAMBDA = (
- "/aft/resources/lambda/aft-invoke-aft-account-provisioning-framework"
-)
-SSM_PARAM_AFT_EVENTS_TABLE = "/aft/resources/ddb/aft-controltower-events-table-name"
-SSM_PARAM_AFT_SFN_NAME = (
- "/aft/account/aft-management/sfn/aft-account-provisioning-framework-sfn-name"
-)
-SSM_PARAM_AFT_DDB_REQ_TABLE = "/aft/resources/ddb/aft-request-table-name"
-SSM_PARAM_AFT_DDB_AUDIT_TABLE = "/aft/resources/ddb/aft-request-audit-table-name"
-SSM_PARAM_AFT_REQUEST_ACTION_TRIGGER_FUNCTION_ARN = (
- "/aft/resources/lambda/aft-account-request-action-trigger-function-arn"
-)
-SSM_PARAM_AFT_ACCOUNT_REQUEST_AUDIT_TRIGGER_FUNCTION_ARN = (
- "/aft/resources/lambda/aft-account-request-audit-trigger-function-arn"
-)
-SSM_PARAM_AFT_ACCOUNT_REQUEST_PROCESSOR_FUNCTION_ARN = (
- "/aft/resources/lambda/aft-account-request-processor-function-arn"
-)
-SSM_PARAM_AFT_CONTROLTOWER_EVENT_LOGGER_FUNCTION_ARN = (
- "/aft/resources/lambda/aft-controltower-event-logger-function-arn"
-)
-SSM_PARAM_AFT_INVOKE_AFT_ACCOUNT_PROVISIONING_FRAMEWORK_FUNCTION_ARN = (
- "/aft/resources/lambda/aft-invoke-aft-account-provisioning-framework-function-arn"
-)
-SSM_PARAM_AFT_ACCOUNT_PROVISIONING_FRAMEWORK_VALIDATE_REQUEST_FUNCTION_ARN = "/aft/resources/lambda/aft-account-provisioning-framework-validate-request-function-arn"
-SSM_PARAM_AFT_ACCOUNT_PROVISIONING_FRAMEWORK_GET_ACCOUNT_INFO_FUNCTION_ARN = "/aft/resources/lambda/aft-account-provisioning-framework-get-account-info-function-arn"
-SSM_PARAM_AFT_ACCOUNT_PROVISIONING_FRAMEWORK_CREATE_ROLE_FUNCTION_ARN = (
- "/aft/resources/lambda/aft-account-provisioning-framework-create-role-function-arn"
-)
-SSM_PARAM_AFT_ACCOUNT_PROVISIONING_FRAMEWORK_TAG_ACCOUNT_FUNCTION_ARN = (
- "/aft/resources/lambda/aft-account-provisioning-framework-tag-account-function-arn"
-)
-SSM_PARAM_AFT_ACCOUNT_PROVISIONING_FRAMEWORK_PERSIST_METADATA_FUNCTION_ARN = "/aft/resources/lambda/aft-account-provisioning-framework-persist-metadata-function-arn"
-SSM_PARAM_AFT_ACCOUNT_PROVISIONING_FRAMEWORK_NOTIFY_ERROR_FUNCTION_ARN = (
- "/aft/resources/lambda/aft-account-provisioning-framework-notify-error-function-arn"
-)
-SSM_PARAM_AFT_ACCOUNT_PROVISIONING_FRAMEWORK_NOTIFY_SUCCESS_FUNCTION_ARN = "/aft/resources/lambda/aft-account-provisioning-framework-notify-success-function-arn"
-SSM_PARAM_AFT_MAXIMUM_CONCURRENT_CUSTOMIZATIONS = (
- "/aft/config/customizations/maximum_concurrent_customizations"
-)
-SSM_PARAM_FEATURE_CLOUDTRAIL_DATA_EVENTS_ENABLED = (
- "/aft/config/feature/cloudtrail-data-events-enabled"
-)
-SSM_PARAM_FEATURE_ENTERPRISE_SUPPORT_ENABLED = (
- "/aft/config/feature/enterprise-support-enabled"
-)
-SSM_PARAM_FEATURE_DEFAULT_VPCS_ENABLED = (
- "/aft/config/feature/delete-default-vpcs-enabled"
-)
-SSM_PARAM_ACCOUNT_CT_MANAGEMENT_ACCOUNT_ID = "/aft/account/ct-management/account-id"
-SSM_PARAM_ACCOUNT_AUDIT_ACCOUNT_ID = "/aft/account/audit/account-id"
-SSM_PARAM_ACCOUNT_LOG_ARCHIVE_ACCOUNT_ID = "/aft/account/log-archive/account-id"
-SSM_PARAM_ACCOUNT_AFT_MANAGEMENT_ACCOUNT_ID = "/aft/account/aft-management/account-id"
-
-
-# INIT
-def get_logger() -> Logger:
- # initialise logger
- if "log_level" in os.environ.keys():
- log_level = os.environ["log_level"]
- else:
- # presumed local debugging
- log_level = "info"
- logger = Logger(loglevel=log_level)
- logger.info("Logger started.")
- logger.info(str(os.environ))
- return logger
-
-
-logger = get_logger()
-
-
-def get_ssm_parameter_value(session: Session, param: str, decrypt: bool = False) -> str:
- client = session.client("ssm")
- logger.info("Getting SSM Parameter " + param)
-
- response = client.get_parameter(Name=param, WithDecryption=decrypt)
-
- param_value: str = response["Parameter"]["Value"]
- return param_value
-
-
-def put_ddb_item(
- session: Session, table_name: str, item: Dict[str, str]
-) -> Dict[str, Any]:
- dynamodb = session.resource("dynamodb")
- table = dynamodb.Table(table_name)
-
- logger.info("Inserting item into " + table_name + " table: " + str(item))
-
- response: Dict[str, Any] = table.put_item(Item=item)
-
- logger.info(response)
-
- return response
-
-
-def get_account_id_from_email(ct_management_session: Session, email: str) -> str:
- logger.info("begin get_account_by_email")
- accounts = list_accounts(ct_management_session)
- account = [a for a in accounts if a["email"] == email]
- logger.info(account)
- if len(account):
- return account[0]["id"]
- else:
- raise Exception("Account not found for email")
-
-
-def list_accounts(ct_management_session: Session) -> List[AftAccountInfo]:
- client: OrganizationsClient = ct_management_session.client("organizations")
- response = client.list_accounts()
- accounts = response["Accounts"]
- account_info: List[AftAccountInfo] = []
- while "NextToken" in response:
- response = client.list_accounts(NextToken=response["NextToken"])
- accounts.extend(response["Accounts"])
-
- for a in accounts:
- account_info.append(get_account_info(ct_management_session, a["Id"]))
-
- return account_info
-
-
-def get_account_info(ct_management_session: Session, account_id: str) -> AftAccountInfo:
- logger.info(f"Getting details for {account_id}")
-
- client: OrganizationsClient = ct_management_session.client("organizations")
- describe_response = client.describe_account(AccountId=account_id)
-
- account = describe_response["Account"]
- list_response = client.list_parents(ChildId=account["Id"])
- parents = list_response["Parents"]
- parent_id = parents[0]["Id"]
- parent_type = parents[0]["Type"]
- org_name = ""
-
- if parent_type == "ORGANIZATIONAL_UNIT":
- org_details = client.describe_organizational_unit(
- OrganizationalUnitId=parent_id
- )
- org_name = org_details["OrganizationalUnit"]["Name"]
-
- return AftAccountInfo(
- id=account["Id"],
- email=account["Email"],
- name=account["Name"],
- joined_method=account["JoinedMethod"],
- joined_date=str(account["JoinedTimestamp"]),
- status=account["Status"],
- parent_id=parent_id,
- parent_type=parent_type,
- org_name=org_name,
- type="account",
- vendor="aws",
- )
-
-
-def get_assume_role_credentials(
- session: Session,
- role_arn: str,
- session_name: str,
- external_id: Optional[str] = None,
- session_duration: int = 900,
- session_policy: Optional[str] = None,
-) -> CredentialsTypeDef:
- client: STSClient = session.client("sts")
-
- assume_role_params: AssumeRoleRequestRequestTypeDef = {
- "RoleArn": role_arn,
- "RoleSessionName": session_name,
- "DurationSeconds": session_duration,
- }
-
- if external_id:
- assume_role_params.update({"ExternalId": external_id})
-
- if session_policy:
- assume_role_params.update({"Policy": session_policy})
-
- assume_role_response = client.assume_role(**assume_role_params)
-
- credentials = assume_role_response["Credentials"]
- return credentials
-
-
-def build_role_arn(
- session: Session, role_name: str, account_id: Optional[str] = None
-) -> str:
- account_info = get_session_info(session)
- role_arn: str
- if not account_id:
- role_arn = "arn:aws:iam::" + account_info["account"] + ":role/" + role_name
- return role_arn
- else:
- role_arn = "arn:aws:iam::" + account_id + ":role/" + role_name
- return role_arn
-
-
-def get_session_info(session: Session) -> Dict[str, str]:
- client: STSClient = session.client("sts")
- response = client.get_caller_identity()
-
- account_info = {"region": session.region_name, "account": response["Account"]}
-
- return account_info
-
-
-def get_boto_session(credentials: CredentialsTypeDef) -> Session:
- return boto3.session.Session(
- aws_access_key_id=credentials["AccessKeyId"],
- aws_secret_access_key=credentials["SecretAccessKey"],
- aws_session_token=credentials["SessionToken"],
- )
-
-
-def get_ct_management_session(aft_mgmt_session: Session) -> Session:
- ct_mgmt_account = get_ssm_parameter_value(
- aft_mgmt_session, SSM_PARAM_ACCOUNT_CT_MANAGEMENT_ACCOUNT_ID
- )
- administrator_role = get_ssm_parameter_value(
- aft_mgmt_session, SSM_PARAM_AFT_ADMIN_ROLE
- )
- execution_role = get_ssm_parameter_value(aft_mgmt_session, SSM_PARAM_AFT_EXEC_ROLE)
- session_name = get_ssm_parameter_value(aft_mgmt_session, SSM_PARAM_AFT_SESSION_NAME)
-
- # Assume aws-aft-AdministratorRole locally
- local_creds = get_assume_role_credentials(
- aft_mgmt_session,
- build_role_arn(aft_mgmt_session, administrator_role),
- session_name,
- )
- local_assumed_session = get_boto_session(local_creds)
- # Assume AWSAFTExecutionRole in CT management
- ct_mgmt_creds = get_assume_role_credentials(
- local_assumed_session,
- build_role_arn(aft_mgmt_session, execution_role, ct_mgmt_account),
- session_name,
- )
- return get_boto_session(ct_mgmt_creds)
-
-
-def get_aft_admin_role_session(session: Session) -> Session:
- administrator_role = get_ssm_parameter_value(session, SSM_PARAM_AFT_ADMIN_ROLE)
- execution_role = get_ssm_parameter_value(session, SSM_PARAM_AFT_EXEC_ROLE)
- session_name = get_ssm_parameter_value(session, SSM_PARAM_AFT_SESSION_NAME)
-
- # Assume aws-aft-AdministratorRole locally
- local_creds = get_assume_role_credentials(
- session, build_role_arn(session, administrator_role), session_name
- )
-
- return get_boto_session(local_creds)
-
-
-def get_log_archive_session(session: Session) -> Session:
- log_archive_account = get_ssm_parameter_value(
- session, SSM_PARAM_ACCOUNT_LOG_ARCHIVE_ACCOUNT_ID
- )
- administrator_role = get_ssm_parameter_value(session, SSM_PARAM_AFT_ADMIN_ROLE)
- execution_role = get_ssm_parameter_value(session, SSM_PARAM_AFT_EXEC_ROLE)
- session_name = get_ssm_parameter_value(session, SSM_PARAM_AFT_SESSION_NAME)
-
- # Assume aws-aft-AdministratorRole locally
- local_creds = get_assume_role_credentials(
- session, build_role_arn(session, administrator_role), session_name
- )
- local_assumed_session = get_boto_session(local_creds)
- # Assume AWSAFTExecutionRole in CT management
- log_archive_creds = get_assume_role_credentials(
- local_assumed_session,
- build_role_arn(session, execution_role, log_archive_account),
- session_name,
- )
- return get_boto_session(log_archive_creds)
-
-
-def get_ct_product_id(session: Session, ct_management_session: Session) -> str:
- client: ServiceCatalogClient = ct_management_session.client("servicecatalog")
- sc_product_name = get_ssm_parameter_value(session, SSM_PARAM_SC_PRODUCT_NAME)
- logger.info("Getting product ID for " + sc_product_name)
-
- response = client.describe_product_as_admin(Name=sc_product_name)
- product_id: str = response["ProductViewDetail"]["ProductViewSummary"]["ProductId"]
- logger.info(product_id)
- return product_id
-
-
-def get_ct_provisioning_artifact_id(
- session: Session, ct_management_session: Session
-) -> str:
- client: ServiceCatalogClient = ct_management_session.client("servicecatalog")
- sc_product_name = get_ssm_parameter_value(session, SSM_PARAM_SC_PRODUCT_NAME)
- logger.info("Getting provisioning artifact ID for " + sc_product_name)
-
- response = client.describe_product_as_admin(Name=sc_product_name)
- provisioning_artifacts = response["ProvisioningArtifactSummaries"]
- for pa in provisioning_artifacts:
- if ct_provisioning_artifact_is_active(session, ct_management_session, pa["Id"]):
- pa_id: str = pa["Id"]
- logger.info("Using provisioning artifact ID: " + pa_id)
- return pa_id
-
- raise Exception("No Provisioning Artifact ID found")
-
-
-def ct_provisioning_artifact_is_active(
- session: Session, ct_management_session: Session, artifact_id: str
-) -> bool:
- client: ServiceCatalogClient = ct_management_session.client("servicecatalog")
- sc_product_name = get_ssm_parameter_value(session, SSM_PARAM_SC_PRODUCT_NAME)
- logger.info("Checking provisioning artifact ID " + artifact_id)
- try:
- response = client.describe_provisioning_artifact(
- ProductName=sc_product_name, ProvisioningArtifactId=artifact_id
- )
- provisioning_artifact = response["ProvisioningArtifactDetail"]
- except client.exceptions.ResourceNotFoundException:
- logger.info("Provisioning artifact id: " + artifact_id + " does not exist")
- return False
-
- if provisioning_artifact["Active"]:
- logger.info(provisioning_artifact["Id"] + " is active")
- return True
- else:
- logger.info(provisioning_artifact["Id"] + " is NOT active")
- return False
-
-
-def product_provisioning_in_progress(
- ct_management_session: Session, product_id: str
-) -> bool:
- client: ServiceCatalogClient = ct_management_session.client("servicecatalog")
-
- logger.info("Checking for product provisioning in progress")
-
- response = client.scan_provisioned_products(
- AccessLevelFilter={"Key": "Account", "Value": "self"},
- )
- pps = response["ProvisionedProducts"]
- while "NextPageToken" in response:
- response = client.scan_provisioned_products(
- AccessLevelFilter={"Key": "Account", "Value": "self"},
- PageToken=response["NextPageToken"],
- )
- pps.extend(response["ProvisionedProducts"])
-
- for p in pps:
- if p["ProductId"] == product_id:
- logger.info("Identified CT Product - " + p["Id"])
- if p["Status"] in ["UNDER_CHANGE", "PLAN_IN_PROGRESS"]:
- logger.info("Product provisioning in Progress")
- return True
-
- logger.info("No product provisioning in Progress")
- return False
-
-
-def build_sqs_url(session: Session, queue_name: str) -> str:
- account_info = get_session_info(session)
- url = (
- "https://sqs."
- + account_info["region"]
- + ".amazonaws.com/"
- + account_info["account"]
- + "/"
- + queue_name
- )
- return url
-
-
-def receive_sqs_message(session: Session, sqs_queue: str) -> Optional[MessageTypeDef]:
- client: SQSClient = session.client("sqs")
- logger.info("Fetching SQS Messages from " + build_sqs_url(session, sqs_queue))
-
- response = client.receive_message(
- QueueUrl=build_sqs_url(session, sqs_queue),
- MaxNumberOfMessages=1,
- ReceiveRequestAttemptId=str(uuid.uuid1()),
- )
- if "Messages" in response.keys():
- logger.info("There are messages pending processing")
- message = response["Messages"][0]
- logger.info("Message retrieved")
- logger.info(message)
- return message
- else:
- logger.info("There are no messages pending processing")
- return None
-
-
-def get_org_account_emails(ct_management_session: Session) -> List[str]:
- accounts = list_accounts(ct_management_session)
-
- return [a["email"] for a in accounts]
-
-def get_org_account_names(ct_management_session: Session) -> List[str]:
- accounts = list_accounts(ct_management_session)
-
- return [a["name"] for a in accounts]
-
-
-def get_org_ou_names(session: Session) -> List[str]:
- client: OrganizationsClient = session.client("organizations")
- logger.info("Listing roots in the Organization")
- root_id: str = ""
- list_roots_response = client.list_roots()
- roots = list_roots_response["Roots"]
- while "NextToken" in list_roots_response:
- list_roots_response = client.list_roots(
- NextToken=list_roots_response["NextToken"]
+logger = logging.getLogger("aft")
+
+BOTO3_CLIENT_ERROR_THROTTLING_CODES = [
+ "ThrottlingException",
+ "TooManyRequestsException",
+ "RequestLimitExceeded",
+]
+
+
+def resubmit_request_on_boto_throttle(
+ func: Callable[..., Any],
+ max_requests: int = 3,
+ max_sleep_sec: int = 16,
+) -> Callable[..., Any]:
+ """
+ Decorator to automatically resubmit boto3-based API calls on throttling errors.
+
+ This decorator will re-submit up to max_requests INITIAL requests, NOT
+ accounting for built-in boto3 retries. Each INITIAL request is independent
+ of the previous requests. This may result in duplicate work being performed.
+ As such, this decorator should ONLY be used with read calls or
+ write calls that are idempotent.
+
+ For example, using the default boto3 config of 3 retries (4 attempts) with
+ max_requests = 3 will result in 12 total calls made to the AWS service.
+ """
+
+ @wraps(func)
+ def wrapper(*args: Optional[Tuple[Any]], **kwargs: Optional[Dict[str, Any]]) -> Any:
+ jitter = float(
+ f"{random.random():.3f}" # nosec B311: Not using random numbers in a security context
)
- roots.extend(list_roots_response["Roots"])
-
- for r in roots:
- if r["Name"] == "Root":
- root_id = r["Id"]
- else:
- raise Exception("Root called 'Root' was not found")
+ retry_sleep_sec = min(2 + jitter, max_sleep_sec)
- logger.info("Listing OUs for Root " + root_id)
+ requests = 0
+ while True:
+ try:
+ return func(*args, **kwargs)
+ except ClientError as e:
+ if e.response["Error"]["Code"] in BOTO3_CLIENT_ERROR_THROTTLING_CODES:
+ sanitized_max_requests = sanitize_input_for_logging(max_requests)
+ sanitized_retry_sleep_sec = sanitize_input_for_logging(
+ retry_sleep_sec
+ )
+ if requests >= max_requests:
+ logger.info(
+ f"Exceeded max fresh-request retry attempts ({sanitized_max_requests})"
+ )
+ raise e
+ logger.info(
+ f"Exceeded max boto3 retries on previous request. Retrying with fresh request in {sanitized_retry_sleep_sec} seconds."
+ )
+ requests += 1
+ time.sleep(retry_sleep_sec)
- list_ou_response = client.list_organizational_units_for_parent(ParentId=root_id)
- ous = list_ou_response["OrganizationalUnits"]
+ # Clipped exponential backoff with 0-1sec random jitter
+ retry_sleep_sec = retry_sleep_sec * 2 + jitter
+ retry_sleep_sec = min(retry_sleep_sec, max_sleep_sec)
- logger.info(ous)
+ else:
+ raise e
- ou_names = []
+ except Exception as e:
+ # Raise on all other exceptions
+ raise e
- for o in ous:
- ou_names.append(o["Name"])
+ return wrapper
- logger.info("OU Names: " + str(ou_names))
- return ou_names
-
-def delete_sqs_message(session: Session, message: MessageTypeDef) -> None:
- client: SQSClient = session.client("sqs")
- sqs_queue = get_ssm_parameter_value(session, SSM_PARAM_ACCOUNT_REQUEST_QUEUE)
- receipt_handle = message["ReceiptHandle"]
- logger.info("Deleting SQS message with handle " + receipt_handle)
- client.delete_message(
- QueueUrl=build_sqs_url(session, sqs_queue), ReceiptHandle=receipt_handle
+def get_high_retry_botoconfig() -> Config:
+ # https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html#using-the-config-object
+ return Config(
+ retries={
+ "total_max_attempts": 16, # original request + 15 retries
+ "mode": "adaptive", # "Retries with additional client side throttling"
+ }
)
-def unmarshal_ddb_item(
- low_level_data: Dict[
- str,
- Dict[Literal["S", "N", "B", "SS", "NS", "BS", "NULL", "BOOL", "M", "L"], Any],
- ]
-) -> Dict[str, Any]:
- # To go from low-level format to python
-
- deserializer = boto3.dynamodb.types.TypeDeserializer()
- python_data = {k: deserializer.deserialize(v) for k, v in low_level_data.items()}
- return python_data
-
-
-def send_sqs_message(
- session: Session, sqs_url: str, message: Dict[str, Any]
-) -> SendMessageResultTypeDef:
- sqs: SQSClient = session.client("sqs")
- logger.info("Sending SQS message to " + sqs_url)
- logger.info(message)
-
- unique_id = str(uuid.uuid1())
-
- response = sqs.send_message(
- QueueUrl=sqs_url,
- MessageBody=json.dumps(message),
- MessageDeduplicationId=unique_id,
- MessageGroupId=unique_id,
- )
-
- logger.info(response)
-
- return response
+def emails_are_equal(first_email: str, second_email: str) -> bool:
+ return first_email.lower() == second_email.lower()
def invoke_lambda(
@@ -554,32 +128,23 @@ def invoke_lambda(
payload: Union[bytes, IO[bytes], StreamingBody],
) -> InvocationResponseTypeDef:
client: LambdaClient = session.client("lambda")
- logger.info("Invoking AFT Account Provisioning Framework Lambda")
+ sanitized_function_name = sanitize_input_for_logging(function_name)
+ logger.info(f"Invoking Lambda: {sanitized_function_name}")
response = client.invoke(
FunctionName=function_name,
InvocationType="Event",
LogType="Tail",
Payload=payload,
)
- logger.info(response)
+ sanitized_response = sanitize_input_for_logging(response)
+ logger.info(sanitized_response)
return response
-def get_account_email_from_id(ct_management_session: Session, id: str) -> str:
- accounts = list_accounts(ct_management_session)
- logger.info("Getting account email for account id " + id)
- for a in accounts:
- if a["id"] == id:
- email = a["email"]
- logger.info("Account email: " + email)
- return email
- raise Exception("Account ID " + id + " was not found in the Organization")
-
-
def build_sfn_arn(session: Session, sfn_name: str) -> str:
account_info = get_session_info(session)
sfn_arn = (
- "arn:aws:states:"
+ f"arn:{get_aws_partition(session)}:states:"
+ account_info["region"]
+ ":"
+ account_info["account"]
@@ -594,190 +159,55 @@ def invoke_step_function(
) -> StartExecutionOutputTypeDef:
client: SFNClient = session.client("stepfunctions")
sfn_arn = build_sfn_arn(session, sfn_name)
- logger.info("Starting SFN execution of " + sfn_arn)
+ sanitized_sfn_arn = sanitize_input_for_logging(sfn_arn)
+ logger.info("Starting SFN execution of " + sanitized_sfn_arn)
response = client.start_execution(stateMachineArn=sfn_arn, input=input)
- logger.info(response)
+ logger.debug(sanitize_input_for_logging(response))
return response
-def is_controltower_event(event: Dict[str, Any]) -> bool:
- if "source" in event.keys():
- if event["source"] == "aws.controltower":
- logger.info("Event is Control Tower event")
+def is_aft_supported_controltower_event(event: Dict[str, Any]) -> bool:
+ if event.get("source", None) == "aws.controltower":
+ supported_events = ["CreateManagedAccount", "UpdateManagedAccount"]
+ if event.get("detail", {}).get("eventName", None) in supported_events:
+ logger.info("Received AFT supported Control Tower Event")
return True
- else:
- logger.info("Event is NOT Control Tower event")
- return False
- return False
-
-def is_aft_supported_controltower_event(event: Dict[str, Any]) -> bool:
- supported_events = ["CreateManagedAccount", "UpdateManagedAccount"]
- if event["detail"]["eventName"] in supported_events:
- logger.info("Control Tower Event is supported")
- return True
- else:
- logger.info("Control Tower Event is NOT supported")
- return False
+ return False
-def send_sns_message(
- session: Session, topic: str, sns_message: str, subject: str
-) -> PublishResponseTypeDef:
- logger.info("Sending SNS Message")
- client: SNSClient = session.client("sns")
+def get_session_info(session: Session) -> Dict[str, str]:
+ client: STSClient = session.client("sts")
+ response = client.get_caller_identity()
- response = client.publish(TopicArn=topic, Message=sns_message, Subject=subject)
+ account_info = {"region": session.region_name, "account": response["Account"]}
- logger.info(response)
+ return account_info
- return response
+def get_aws_partition(session: Session, region: Optional[str] = None) -> str:
+ if region is None:
+ region = session.region_name
-def tag_org_resource(
- ct_management_session: Session,
- resource: str,
- tags: Sequence[TagTypeDef],
- rollback: bool = False,
-) -> None:
- client: OrganizationsClient = ct_management_session.client("organizations")
- if rollback:
- current_tags = client.list_tags_for_resource(ResourceId=resource)
- client.untag_resource(ResourceId=resource, TagKeys=[tag["Key"] for tag in tags])
- client.tag_resource(
- ResourceId=resource, Tags=cast(Sequence[TagTypeDef], current_tags)
- )
+ partition = session.get_partition_for_region(region)
+ return partition
- else:
- client.tag_resource(ResourceId=resource, Tags=tags)
+def yield_batches_from_list(
+ input: Sequence[Any], batch_size: int
+) -> Iterable[Sequence[Any]]:
+ if batch_size <= 0:
+ return []
-def get_all_aft_account_ids(session: Session) -> List[str]:
- table_name = get_ssm_parameter_value(session, SSM_PARAM_AFT_DDB_META_TABLE)
- dynamodb = session.resource("dynamodb")
- table = dynamodb.Table(table_name)
- logger.info("Scanning DynamoDB table: " + table_name)
+ idx = 0
+ while idx < len(input):
+ yield input[idx : idx + batch_size]
+ idx += batch_size
- items: List[Dict[str, str]] = []
- response = table.scan(ProjectionExpression="id", ConsistentRead=True)
- items.extend(response["Items"])
- while "LastEvaluatedKey" in response:
- logger.debug(
- "Paginated response found, continuing at {}".format(
- response["LastEvaluatedKey"]
- )
- )
- response = table.scan(ExclusiveStartKey=response["LastEvaluatedKey"])
- items.extend(response["Items"])
-
- aft_account_ids = [item["id"] for item in items]
-
- if not aft_account_ids:
- raise Exception("No accounts found in the Account Metadata table")
-
- return aft_account_ids
-
-
-def get_account_ids_in_ous(
- session: Session, ou_names: List[str]
-) -> Optional[List[str]]:
- client: OrganizationsClient = session.client("organizations")
- logger.info("Getting Account IDs in the following OUs: " + str(ou_names))
- ou_ids = []
- account_ids = []
- for n in ou_names:
- ou_ids.append(get_org_ou_id(session, n))
- logger.info("OU IDs: " + str(ou_ids))
- for ou_id in ou_ids:
- if ou_id is not None:
- logger.info("Listing accounts in the OU ID " + ou_id)
-
- response = client.list_children(ParentId=ou_id, ChildType="ACCOUNT")
- children = response["Children"]
- while "NextToken" in response:
- response = client.list_children(
- ParentId=ou_id,
- ChildType="ACCOUNT",
- NextToken=response["NextToken"],
- )
- children.extend(response["Children"])
-
- logger.info(str(children))
-
- for a in children:
- account_ids.append(a["Id"])
- else:
- logger.info("OUs in " + str(ou_names) + " was not found")
- logger.info("Account IDs: " + str(account_ids))
- if len(account_ids) > 0:
- return account_ids
- else:
- return None
-
-
-def get_org_ou_id(session: Session, ou_name: str) -> Optional[str]:
- client: OrganizationsClient = session.client("organizations")
- logger.info("Listing Org Roots")
- list_roots_response = client.list_roots(MaxResults=1)
- logger.info(list_roots_response)
- root_id = list_roots_response["Roots"][0]["Id"]
- logger.info("Root ID is " + root_id)
-
- logger.info("Listing OUs in the Organization")
-
- list_ou_response = client.list_organizational_units_for_parent(ParentId=root_id)
- ous = list_ou_response["OrganizationalUnits"]
- while "NextToken" in list_ou_response:
- list_ou_response = client.list_organizational_units_for_parent(
- ParentId=root_id, NextToken=list_ou_response["NextToken"]
- )
- ous.extend(list_ou_response["OrganizationalUnits"])
-
- logger.info(ous)
-
- for ou in ous:
- if ou["Name"] == ou_name:
- ou_id: str = ou["Id"]
- logger.info("OU ID for " + ou_name + " is " + ou_id)
- return ou_id
-
- return None
-
-
-def get_accounts_by_tags(
- aft_mgmt_session: Session, ct_mgmt_session: Session, tags: List[Dict[str, str]]
-) -> Optional[List[str]]:
- logger.info("Getting Account with tags - " + str(tags))
- # Get all AFT Managed Accounts
- all_accounts = get_all_aft_account_ids(aft_mgmt_session)
- matched_accounts = []
- client: OrganizationsClient = ct_mgmt_session.client("organizations")
- # Loop through AFT accounts, requesting tags
- if all_accounts is None:
- return None
-
- for a in all_accounts:
- account_tags = {}
- response = client.list_tags_for_resource(ResourceId=a)
- # Format tags as a dictionary rather than a list
- for t in response["Tags"]:
- account_tags[t["Key"]] = t["Value"]
- logger.info("Account tags for " + a + ": " + str(account_tags))
- counter = 0
- # Loop through tag filter. Append account to matched_accounts if all tags in filter match/ are present
- for x in tags:
- for k, v in x.items():
- if k in account_tags.keys():
- if account_tags[k] == v:
- counter += 1
- if counter == len(tags):
- logger.info(
- "Account " + a + " MATCHED with tags " + str(tags)
- )
- matched_accounts.append(a)
- logger.info(matched_accounts)
- if len(matched_accounts) > 0:
- return matched_accounts
- else:
- return None
+def sanitize_input_for_logging(input: Any) -> str:
+ """
+ Sanitize the input string by replacing newline characters, tabs with their literal string representations.
+ """
+ input_str = str(input)
+ return input_str.encode("unicode_escape").decode()
diff --git a/sources/aft-lambda-layer/aft_common/auth.py b/sources/aft-lambda-layer/aft_common/auth.py
new file mode 100644
index 00000000..f0a62513
--- /dev/null
+++ b/sources/aft-lambda-layer/aft_common/auth.py
@@ -0,0 +1,212 @@
+# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+# SPDX-License-Identifier: Apache-2.0
+#
+import logging
+from functools import cached_property
+from typing import TYPE_CHECKING, Optional
+
+from aft_common.aft_utils import get_aws_partition
+from aft_common.constants import (
+ SSM_PARAM_ACCOUNT_AFT_MANAGEMENT_ACCOUNT_ID,
+ SSM_PARAM_ACCOUNT_CT_MANAGEMENT_ACCOUNT_ID,
+ SSM_PARAM_ACCOUNT_LOG_ARCHIVE_ACCOUNT_ID,
+)
+from aft_common.ssm import get_ssm_parameter_value
+from boto3 import Session
+from botocore.exceptions import ClientError
+
+if TYPE_CHECKING:
+ from mypy_boto3_sts import STSClient
+ from mypy_boto3_sts.type_defs import AssumeRoleRequestRequestTypeDef
+else:
+ STSClient = object
+ AssumeRoleRequestRequestTypeDef = object
+
+logger = logging.getLogger("aft")
+
+
+class AuthClient:
+ SSM_PARAM_AFT_SESSION_NAME = "/aft/resources/iam/aft-session-name"
+ SSM_PARAM_AFT_ADMIN_ROLE_NAME = "/aft/resources/iam/aft-administrator-role-name"
+ SSM_PARAM_AFT_EXEC_ROLE_NAME = "/aft/resources/iam/aft-execution-role-name"
+ CONTROL_TOWER_EXECUTION_ROLE_NAME = "AWSControlTowerExecution"
+
+ def __init__(self, aft_management_session: Optional[Session] = None) -> None:
+ if aft_management_session is None:
+ aft_management_session = Session()
+ if self._is_aft_management_session(session=aft_management_session):
+ self.aft_management_account_id = aft_management_session.client(
+ "sts"
+ ).get_caller_identity()["Account"]
+ self.aft_management_session = aft_management_session
+ else:
+ raise Exception("Unable to federate into AFT Management Account")
+
+ @cached_property
+ def _assume_role_session_name(self) -> str:
+ return get_ssm_parameter_value(
+ session=self.aft_management_session,
+ param=AuthClient.SSM_PARAM_AFT_SESSION_NAME,
+ )
+
+ @staticmethod
+ def _is_aft_management_session(session: Session) -> bool:
+ try:
+ aft_management_account_id = get_ssm_parameter_value(
+ session=session, param=SSM_PARAM_ACCOUNT_AFT_MANAGEMENT_ACCOUNT_ID
+ )
+ caller_account_id = session.client("sts").get_caller_identity()["Account"]
+ return caller_account_id == aft_management_account_id
+
+ except ClientError as error:
+ if error.response["Error"]["Code"] == "ParameterNotFound":
+ return False
+ else:
+ raise error
+
+ @staticmethod
+ def _build_role_arn(partition: str, account_id: str, role_name: str) -> str:
+ return f"arn:{partition}:iam::{account_id}:role/{role_name}"
+
+ @staticmethod
+ def _get_session(
+ session: Session,
+ role_arn: str,
+ assume_role_session_name: str,
+ assume_role_session_duration: int = 900,
+ region: Optional[str] = None,
+ session_policy: Optional[str] = None,
+ external_id: Optional[str] = None,
+ ) -> Session:
+ sts: STSClient = session.client("sts")
+ params: AssumeRoleRequestRequestTypeDef = dict(
+ RoleArn=role_arn,
+ RoleSessionName=assume_role_session_name,
+ DurationSeconds=assume_role_session_duration,
+ )
+
+ if external_id:
+ params.update(dict(ExternalId=external_id))
+ if session_policy:
+ params.update(dict(Policy=session_policy))
+
+ response = sts.assume_role(**params)
+ credentials = response["Credentials"]
+ return Session(
+ aws_access_key_id=credentials["AccessKeyId"],
+ aws_secret_access_key=credentials["SecretAccessKey"],
+ aws_session_token=credentials["SessionToken"],
+ region_name=region if region is not None else session.region_name,
+ )
+
+ @staticmethod
+ def get_account_id_from_session(session: Session) -> str:
+ return session.client("sts").get_caller_identity()["Account"]
+
+ def _get_hub_session(self, session_duration: int = 900) -> Session:
+ """
+ Assumes a hub role, "AWSAFTAdmin" in the AFT Management account
+ which is trusted by all "AWSAFTExecution" roles in all managed accounts
+ """
+ role_name = get_ssm_parameter_value(
+ session=self.aft_management_session,
+ param=AuthClient.SSM_PARAM_AFT_ADMIN_ROLE_NAME,
+ )
+ role_arn = AuthClient._build_role_arn(
+ partition=get_aws_partition(session=self.aft_management_session),
+ account_id=self.aft_management_account_id,
+ role_name=role_name,
+ )
+ return AuthClient._get_session(
+ session=self.aft_management_session,
+ role_arn=role_arn,
+ assume_role_session_name=self._assume_role_session_name,
+ assume_role_session_duration=session_duration,
+ )
+
+ def get_aft_management_session(self) -> Session:
+ return self.aft_management_session
+
+ def get_target_account_session(
+ self,
+ account_id: str,
+ hub_session: Optional[Session] = None,
+ role_name: Optional[str] = None,
+ region: Optional[str] = None,
+ session_duration: int = 900,
+ session_policy: Optional[str] = None,
+ ) -> Session:
+ """
+ Leverages a hub session from AFT Management, and federates to a spoke IAM role within a target account
+ """
+ if hub_session is None:
+ logger.info(
+ "No hub session provided, creating default hub session using AWSAFTAdmin role"
+ )
+ hub_session = self._get_hub_session(session_duration=session_duration)
+
+ hub_caller_identity = hub_session.client("sts").get_caller_identity()
+
+ # Preserve behavior
+ if role_name is None:
+ logger.info("No role provided, using default AWSAFTExecution role")
+ role_name = get_ssm_parameter_value(
+ session=self.aft_management_session,
+ param=AuthClient.SSM_PARAM_AFT_EXEC_ROLE_NAME,
+ )
+
+ spoke_role_arn = AuthClient._build_role_arn(
+ partition=get_aws_partition(session=self.aft_management_session),
+ account_id=account_id,
+ role_name=role_name,
+ )
+
+ logger.info(
+ f"Generating session using {hub_caller_identity['Arn']} for {spoke_role_arn}"
+ )
+ return AuthClient._get_session(
+ session=hub_session,
+ role_arn=spoke_role_arn,
+ assume_role_session_name=self._assume_role_session_name,
+ assume_role_session_duration=session_duration,
+ region=region,
+ session_policy=session_policy,
+ )
+
+ def get_ct_management_session(
+ self,
+ role_name: Optional[str] = None,
+ region: Optional[str] = None,
+ session_policy: Optional[str] = None,
+ session_duration: int = 900,
+ ) -> Session:
+ account_id = get_ssm_parameter_value(
+ session=self.aft_management_session,
+ param=SSM_PARAM_ACCOUNT_CT_MANAGEMENT_ACCOUNT_ID,
+ )
+ return self.get_target_account_session(
+ account_id=account_id,
+ role_name=role_name,
+ region=region,
+ session_policy=session_policy,
+ session_duration=session_duration,
+ )
+
+ def get_log_archive_session(
+ self,
+ role_name: Optional[str] = None,
+ region: Optional[str] = None,
+ session_policy: Optional[str] = None,
+ session_duration: int = 900,
+ ) -> Session:
+ account_id = get_ssm_parameter_value(
+ session=self.aft_management_session,
+ param=SSM_PARAM_ACCOUNT_LOG_ARCHIVE_ACCOUNT_ID,
+ )
+ return self.get_target_account_session(
+ account_id=account_id,
+ role_name=role_name,
+ region=region,
+ session_policy=session_policy,
+ session_duration=session_duration,
+ )
diff --git a/sources/aft-lambda-layer/aft_common/codepipeline.py b/sources/aft-lambda-layer/aft_common/codepipeline.py
new file mode 100644
index 00000000..eea1c0b5
--- /dev/null
+++ b/sources/aft-lambda-layer/aft_common/codepipeline.py
@@ -0,0 +1,154 @@
+# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+# SPDX-License-Identifier: Apache-2.0
+#
+import logging
+import re
+from typing import Any, List
+
+import aft_common.aft_utils as utils
+from boto3.session import Session
+
+logger = logging.getLogger("aft")
+
+AFT_CUSTOMIZATIONS_PIPELINE_NAME_PATTERN = r"^\d\d\d\d\d\d\d\d\d\d\d\d-.*$"
+
+
+def get_pipeline_for_account(session: Session, account_id: str) -> str:
+ current_account = session.client("sts").get_caller_identity()["Account"]
+ current_region = session.region_name
+
+ sanitized_account_id = utils.sanitize_input_for_logging(account_id)
+ logger.info("Getting pipeline name for " + sanitized_account_id)
+
+ client = session.client("codepipeline", config=utils.get_high_retry_botoconfig())
+ paginator = client.get_paginator("list_pipelines")
+
+ pipelines = []
+ for page in paginator.paginate():
+ pipelines.extend(page["pipelines"])
+
+ for p in pipelines:
+ name = p["name"]
+ if name.startswith(account_id + "-"):
+ pipeline_arn = (
+ f"arn:{utils.get_aws_partition(session)}:codepipeline:"
+ + current_region
+ + ":"
+ + current_account
+ + ":"
+ + name
+ )
+ response = client.list_tags_for_resource(resourceArn=pipeline_arn)
+ for t in response["tags"]:
+ if t["key"] == "managed_by" and t["value"] == "AFT":
+ pipeline_name: str = p["name"]
+ return pipeline_name
+ raise Exception(
+ "Pipelines for account id " + sanitized_account_id + " was not found"
+ )
+
+
+def pipeline_is_running(session: Session, name: str) -> bool:
+ logger.info("Getting pipeline executions for " + name)
+
+ client = session.client("codepipeline", config=utils.get_high_retry_botoconfig())
+ paginator = client.get_paginator("list_pipeline_executions")
+
+ pipeline_execution_summaries = []
+ for page in paginator.paginate(pipelineName=name):
+ pipeline_execution_summaries.extend(page["pipelineExecutionSummaries"])
+
+ if not pipeline_execution_summaries:
+ # No executions for this pipeline in the last 12 months, so cannot be currently running
+ return False
+
+ latest_execution = sorted(
+ pipeline_execution_summaries, key=lambda i: i["startTime"], reverse=True # type: ignore
+ )[0]
+
+ logger.info(f"Latest Execution: {latest_execution}")
+ if latest_execution["status"] == "InProgress":
+ return True
+ else:
+ return False
+
+
+def execute_pipeline(session: Session, account_id: str) -> None:
+ client = session.client("codepipeline")
+ name = get_pipeline_for_account(session, account_id)
+ if not pipeline_is_running(session, name):
+ logger.info("Executing pipeline - " + name)
+ response = client.start_pipeline_execution(name=name)
+ sanitized_response = utils.sanitize_input_for_logging(response)
+ logger.info(sanitized_response)
+ else:
+ logger.info("Pipeline is currently running")
+
+
+def list_pipelines(session: Session) -> List[Any]:
+ logger.info("Listing Pipelines - ")
+
+ client = session.client("codepipeline", config=utils.get_high_retry_botoconfig())
+ paginator = client.get_paginator("list_pipelines")
+
+ pipelines = []
+ for page in paginator.paginate():
+ pipelines.extend(page["pipelines"])
+
+ pattern = re.compile(AFT_CUSTOMIZATIONS_PIPELINE_NAME_PATTERN)
+ matched_pipelines = []
+ for p in pipelines:
+ if re.match(pattern, p["name"]):
+ matched_pipelines.append(p["name"])
+
+ logger.info("The following pipelines were matched: " + str(matched_pipelines))
+ return matched_pipelines
+
+
+def get_running_pipeline_count(session: Session, pipeline_names: List[str]) -> int:
+ pipeline_counter = 0
+ client = session.client("codepipeline", config=utils.get_high_retry_botoconfig())
+
+ for name in pipeline_names:
+ logger.info("Getting pipeline executions for " + name)
+
+ paginator = client.get_paginator("list_pipeline_executions")
+ pipeline_execution_summaries = []
+ for page in paginator.paginate(pipelineName=name):
+ pipeline_execution_summaries.extend(page["pipelineExecutionSummaries"])
+
+ if not pipeline_execution_summaries:
+ # No executions for this pipeline in the last 12 months
+ continue
+ else:
+ latest_execution = sorted(
+ pipeline_execution_summaries, key=lambda i: i["startTime"], reverse=True # type: ignore
+ )[0]
+ logger.info("Latest Execution: ")
+ logger.info(latest_execution)
+
+ if latest_execution["status"] == "InProgress":
+ pipeline_counter += 1
+
+ logger.info("The number of running pipelines is " + str(pipeline_counter))
+
+ return pipeline_counter
+
+
+def delete_customization_pipeline(
+ aft_management_session: Session, account_id: str
+) -> None:
+ client = aft_management_session.client("codepipeline")
+
+ pipeline_name = get_pipeline_for_account(
+ session=aft_management_session, account_id=account_id
+ )
+ if not pipeline_is_running(session=aft_management_session, name=pipeline_name):
+ client.delete_pipeline(name=pipeline_name)
+ logger.info(
+ f"Deleted customization pipeline for {utils.sanitize_input_for_logging(account_id)}"
+ )
+ else:
+ logger.warning(
+ f"Cannot delete running customization pipeline: {pipeline_name}, skipping"
+ )
diff --git a/sources/aft-lambda-layer/aft_common/constants.py b/sources/aft-lambda-layer/aft_common/constants.py
new file mode 100644
index 00000000..4abf1999
--- /dev/null
+++ b/sources/aft-lambda-layer/aft_common/constants.py
@@ -0,0 +1,68 @@
+# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+# SPDX-License-Identifier: Apache-2.0
+#
+SSM_PARAM_AFT_DDB_META_TABLE = "/aft/resources/ddb/aft-request-metadata-table-name"
+SSM_PARAM_AFT_SESSION_NAME = "/aft/resources/iam/aft-session-name"
+SSM_PARAM_AFT_ADMIN_ROLE = "/aft/resources/iam/aft-administrator-role-name"
+SSM_PARAM_AFT_EXEC_ROLE = "/aft/resources/iam/aft-execution-role-name"
+SSM_PARAM_SC_PRODUCT_NAME = "/aft/resources/sc/account-factory-product-name"
+SSM_PARAM_SNS_TOPIC_ARN = "/aft/account/aft-management/sns/topic-arn"
+SSM_PARAM_SNS_FAILURE_TOPIC_ARN = "/aft/account/aft-management/sns/failure-topic-arn"
+SSM_PARAM_ACCOUNT_REQUEST_QUEUE = "/aft/resources/sqs/aft-request-queue-name"
+SSM_PARAM_AFT_ACCOUNT_PROVISIONING_FRAMEWORK_LAMBDA = (
+ "/aft/resources/lambda/aft-invoke-aft-account-provisioning-framework"
+)
+SSM_PARAM_AFT_CLEANUP_RESOURCES_LAMBDA = "/aft/resources/lambda/aft-cleanup-resources"
+SSM_PARAM_AFT_EVENTS_TABLE = "/aft/resources/ddb/aft-controltower-events-table-name"
+SSM_PARAM_AFT_SFN_NAME = (
+ "/aft/account/aft-management/sfn/aft-account-provisioning-framework-sfn-name"
+)
+SSM_PARAM_AFT_DDB_REQ_TABLE = "/aft/resources/ddb/aft-request-table-name"
+SSM_PARAM_AFT_DDB_AUDIT_TABLE = "/aft/resources/ddb/aft-request-audit-table-name"
+SSM_PARAM_AFT_REQUEST_ACTION_TRIGGER_FUNCTION_ARN = (
+ "/aft/resources/lambda/aft-account-request-action-trigger-function-arn"
+)
+SSM_PARAM_AFT_ACCOUNT_REQUEST_AUDIT_TRIGGER_FUNCTION_ARN = (
+ "/aft/resources/lambda/aft-account-request-audit-trigger-function-arn"
+)
+SSM_PARAM_AFT_ACCOUNT_REQUEST_PROCESSOR_FUNCTION_ARN = (
+ "/aft/resources/lambda/aft-account-request-processor-function-arn"
+)
+SSM_PARAM_AFT_CONTROLTOWER_EVENT_LOGGER_FUNCTION_ARN = (
+ "/aft/resources/lambda/aft-controltower-event-logger-function-arn"
+)
+SSM_PARAM_AFT_INVOKE_AFT_ACCOUNT_PROVISIONING_FRAMEWORK_FUNCTION_ARN = (
+ "/aft/resources/lambda/aft-invoke-aft-account-provisioning-framework-function-arn"
+)
+SSM_PARAM_AFT_ACCOUNT_PROVISIONING_FRAMEWORK_CREATE_ROLE_FUNCTION_ARN = (
+ "/aft/resources/lambda/aft-account-provisioning-framework-create-role-function-arn"
+)
+SSM_PARAM_AFT_ACCOUNT_PROVISIONING_FRAMEWORK_TAG_ACCOUNT_FUNCTION_ARN = (
+ "/aft/resources/lambda/aft-account-provisioning-framework-tag-account-function-arn"
+)
+SSM_PARAM_AFT_ACCOUNT_PROVISIONING_FRAMEWORK_PERSIST_METADATA_FUNCTION_ARN = "/aft/resources/lambda/aft-account-provisioning-framework-persist-metadata-function-arn"
+SSM_PARAM_AFT_ACCOUNT_PROVISIONING_FRAMEWORK_NOTIFY_ERROR_FUNCTION_ARN = (
+ "/aft/resources/lambda/aft-account-provisioning-framework-notify-error-function-arn"
+)
+SSM_PARAM_AFT_ACCOUNT_PROVISIONING_FRAMEWORK_NOTIFY_SUCCESS_FUNCTION_ARN = "/aft/resources/lambda/aft-account-provisioning-framework-notify-success-function-arn"
+SSM_PARAM_AFT_MAXIMUM_CONCURRENT_CUSTOMIZATIONS = (
+ "/aft/config/customizations/maximum_concurrent_customizations"
+)
+SSM_PARAM_FEATURE_CLOUDTRAIL_DATA_EVENTS_ENABLED = (
+ "/aft/config/feature/cloudtrail-data-events-enabled"
+)
+SSM_PARAM_FEATURE_ENTERPRISE_SUPPORT_ENABLED = (
+ "/aft/config/feature/enterprise-support-enabled"
+)
+SSM_PARAM_FEATURE_DEFAULT_VPCS_ENABLED = (
+ "/aft/config/feature/delete-default-vpcs-enabled"
+)
+SSM_PARAM_ACCOUNT_CT_MANAGEMENT_ACCOUNT_ID = "/aft/account/ct-management/account-id"
+SSM_PARAM_ACCOUNT_AUDIT_ACCOUNT_ID = "/aft/account/audit/account-id"
+SSM_PARAM_ACCOUNT_LOG_ARCHIVE_ACCOUNT_ID = "/aft/account/log-archive/account-id"
+SSM_PARAM_ACCOUNT_AFT_MANAGEMENT_ACCOUNT_ID = "/aft/account/aft-management/account-id"
+SSM_PARAM_ACCOUNT_AFT_VERSION = "/aft/config/aft/version"
+SSM_PARAM_ACCOUNT_TERRAFORM_VERSION = "/aft/config/terraform/version"
+SSM_PARAM_AFT_METRICS_REPORTING = "/aft/config/metrics-reporting"
+SSM_PARAM_AFT_METRICS_REPORTING_UUID = "/aft/config/metrics-reporting-uuid"
+SSM_PARAMETER_PATH = "/aft/account-request/custom-fields/"
diff --git a/sources/aft-lambda-layer/aft_common/customizations.py b/sources/aft-lambda-layer/aft_common/customizations.py
index 6216b3f8..2359e743 100644
--- a/sources/aft-lambda-layer/aft_common/customizations.py
+++ b/sources/aft-lambda-layer/aft_common/customizations.py
@@ -2,148 +2,37 @@
# SPDX-License-Identifier: Apache-2.0
#
import json
+import logging
import os
-import re
-from typing import Any, Dict, List
+from typing import TYPE_CHECKING, Any, Dict, List, Optional
-import aft_common.aft_utils as utils
import jsonschema
+from aft_common.aft_utils import get_high_retry_botoconfig, sanitize_input_for_logging
+from aft_common.constants import SSM_PARAM_AFT_DDB_META_TABLE
+from aft_common.organizations import OrganizationsAgent
+from aft_common.ssm import get_ssm_parameter_value
from boto3.session import Session
-CUSTOMIZATIONS_PIPELINE_PATTERN = "^\d\d\d\d\d\d\d\d\d\d\d\d-.*$"
+if TYPE_CHECKING:
+ from mypy_boto3_organizations import OrganizationsClient
+else:
+ OrganizationsClient = object
-AFT_PIPELINE_ACCOUNTS = ["ct-management", "log-archive", "audit"]
+AFT_SHARED_ACCOUNT_NAMES = ["ct-management", "log-archive", "audit"]
-logger = utils.get_logger()
+logger = logging.getLogger("aft")
-def get_pipeline_for_account(session: Session, account: str) -> str:
- current_account = session.client("sts").get_caller_identity()["Account"]
- current_region = session.region_name
- client = session.client("codepipeline")
- logger.info("Getting pipeline name for " + account)
-
- response = client.list_pipelines()
-
- pipelines = response["pipelines"]
- while "nextToken" in response:
- response = client.list_pipelines(nextToken=response["nextToken"])
- pipelines.extend(response["pipelines"])
-
- for p in pipelines:
- name = p["name"]
- if name.startswith(account + "-"):
- pipeline_arn = (
- "arn:aws:codepipeline:"
- + current_region
- + ":"
- + current_account
- + ":"
- + name
- )
- response = client.list_tags_for_resource(resourceArn=pipeline_arn)
- for t in response["tags"]:
- if t["key"] == "managed_by" and t["value"] == "AFT":
- pipeline_name: str = p["name"]
- return pipeline_name
- raise Exception("Pipelines for account id " + account + " was not found")
-
-
-def pipeline_is_running(session: Session, name: str) -> bool:
- client = session.client("codepipeline")
-
- logger.info("Getting pipeline executions for " + name)
-
- response = client.list_pipeline_executions(pipelineName=name)
- pipeline_execution_summaries = response["pipelineExecutionSummaries"]
-
- while "nextToken" in response:
- response = client.list_pipeline_executions(
- pipelineName=name, nextToken=response["nextToken"]
- )
- pipeline_execution_summaries.extend(response["pipelineExecutionSummaries"])
-
- latest_execution = sorted(
- pipeline_execution_summaries, key=lambda i: i["startTime"], reverse=True # type: ignore
- )[0]
- logger.info("Latest Execution: ")
- logger.info(latest_execution)
- if latest_execution["status"] == "InProgress":
- return True
- else:
- return False
-
-
-def execute_pipeline(session: Session, account: str) -> None:
- client = session.client("codepipeline")
- name = get_pipeline_for_account(session, account)
- if not pipeline_is_running(session, name):
- logger.info("Executing pipeline - " + name)
- response = client.start_pipeline_execution(name=name)
- logger.info(response)
- else:
- logger.info("Pipeline is currently running")
-
-
-def list_pipelines(session: Session) -> List[Any]:
- pattern = re.compile(CUSTOMIZATIONS_PIPELINE_PATTERN)
- matched_pipelines = []
- client = session.client("codepipeline")
- logger.info("Listing Pipelines - ")
-
- response = client.list_pipelines()
-
- pipelines = response["pipelines"]
- while "nextToken" in response:
- response = client.list_pipelines(nextToken=response["nextToken"])
- pipelines.extend(response["pipelines"])
-
- for p in pipelines:
- if re.match(pattern, p["name"]):
- matched_pipelines.append(p["name"])
-
- logger.info("The following pipelines were matched: " + str(matched_pipelines))
- return matched_pipelines
-
-
-def get_running_pipeline_count(session: Session, names: List[str]) -> int:
- pipeline_counter = 0
- client = session.client("codepipeline")
-
- for p in names:
- logger.info("Getting pipeline executions for " + p)
-
- response = client.list_pipeline_executions(pipelineName=p)
- pipeline_execution_summaries = response["pipelineExecutionSummaries"]
-
- while "nextToken" in response:
- response = client.list_pipeline_executions(
- pipelineName=p, nextToken=response["nextToken"]
- )
- pipeline_execution_summaries.extend(response["pipelineExecutionSummaries"])
-
- latest_execution = sorted(
- pipeline_execution_summaries, key=lambda i: i["startTime"], reverse=True # type: ignore
- )[0]
- logger.info("Latest Execution: ")
- logger.info(latest_execution)
-
- if latest_execution["status"] == "InProgress":
- pipeline_counter += 1
-
- logger.info("The number of running pipelines is " + str(pipeline_counter))
-
- return pipeline_counter
-
-
-def validate_request(payload: Dict[str, Any]) -> bool:
- logger.info("Function Start - validate_request")
+def validate_identify_targets_request(payload: Dict[str, Any]) -> bool:
+ logger.info("Function Start - validate_identify_targets_request")
schema_path = os.path.join(
os.path.dirname(__file__), "schemas/identify_targets_request_schema.json"
)
with open(schema_path) as schema_file:
schema_object = json.load(schema_file)
- logger.info("Schema Loaded:" + json.dumps(schema_object))
+ logger.info(
+ "Schema Loaded:" + sanitize_input_for_logging(json.dumps(schema_object))
+ )
validated = jsonschema.validate(payload, schema_object)
if validated is None:
logger.info("Request Validated")
@@ -152,10 +41,38 @@ def validate_request(payload: Dict[str, Any]) -> bool:
raise Exception("Failure validating request.\n{validated}")
+def get_all_aft_account_ids(aft_management_session: Session) -> List[str]:
+ table_name = get_ssm_parameter_value(
+ aft_management_session, SSM_PARAM_AFT_DDB_META_TABLE
+ )
+ dynamodb = aft_management_session.resource("dynamodb")
+ table = dynamodb.Table(table_name)
+ logger.info("Scanning DynamoDB table: " + table_name)
+
+ items: List[Dict[str, Any]] = []
+ response = table.scan(ProjectionExpression="id", ConsistentRead=True)
+ items.extend(response["Items"])
+ while "LastEvaluatedKey" in response:
+ logger.debug(
+ "Paginated response found, continuing at {}".format(
+ sanitize_input_for_logging(response["LastEvaluatedKey"])
+ )
+ )
+ response = table.scan(ExclusiveStartKey=response["LastEvaluatedKey"])
+ items.extend(response["Items"])
+
+ aft_account_ids = [item["id"] for item in items]
+
+ if not aft_account_ids:
+ raise Exception("No accounts found in the Account Metadata table")
+
+ return aft_account_ids
+
+
def filter_non_aft_accounts(
session: Session, account_list: List[str], operation: str = "include"
) -> List[str]:
- aft_accounts = utils.get_all_aft_account_ids(session)
+ aft_accounts = get_all_aft_account_ids(session)
core_accounts = get_core_accounts(session)
logger.info("Running AFT Filter for accounts " + str(account_list))
filtered_accounts = []
@@ -176,21 +93,67 @@ def filter_non_aft_accounts(
return account_list
-def get_core_accounts(session: Session) -> List[str]:
+def get_core_accounts(aft_management_session: Session) -> List[str]:
core_accounts = []
logger.info("Getting core accounts -")
- for a in AFT_PIPELINE_ACCOUNTS:
- id = utils.get_ssm_parameter_value(session, "/aft/account/" + a + "/account-id")
+ for a in AFT_SHARED_ACCOUNT_NAMES:
+ id = get_ssm_parameter_value(
+ aft_management_session, "/aft/account/" + a + "/account-id"
+ )
logger.info("Account ID for " + a + " is " + id)
core_accounts.append(id)
logger.info("Core accounts: " + str(core_accounts))
return core_accounts
+# TODO: Refactor to method of OrganizationsAgent
+def get_accounts_by_tags(
+ aft_mgmt_session: Session, ct_mgmt_session: Session, tags: List[Dict[str, str]]
+) -> Optional[List[str]]:
+ logger.info("Getting Account with tags - " + str(tags))
+ # Get all AFT Managed Accounts
+ all_accounts = get_all_aft_account_ids(aft_mgmt_session)
+ matched_accounts = []
+ client: OrganizationsClient = ct_mgmt_session.client(
+ "organizations", config=get_high_retry_botoconfig()
+ )
+ # Loop through AFT accounts, requesting tags
+ if all_accounts is None:
+ return None
+
+ for a in all_accounts:
+ account_tags = {}
+ response = client.list_tags_for_resource(ResourceId=a)
+ # Format tags as a dictionary rather than a list
+ for t in response["Tags"]:
+ account_tags[t["Key"]] = t["Value"]
+ logger.info("Account tags for " + a + ": " + str(account_tags))
+ counter = 0
+ # Loop through tag filter. Append account to matched_accounts if all tags in filter match/ are present
+ for x in tags:
+ for k, v in x.items():
+ if k in account_tags.keys():
+ if account_tags[k] == v:
+ counter += 1
+ if counter == len(tags):
+ logger.info(
+ "Account " + a + " MATCHED with tags " + str(tags)
+ )
+ matched_accounts.append(a)
+ logger.info(matched_accounts)
+ if len(matched_accounts) > 0:
+ return matched_accounts
+ else:
+ return None
+
+
def get_included_accounts(
- session: Session, ct_mgmt_session: Session, included: List[Dict[str, Any]]
+ aft_management_session: Session,
+ ct_mgmt_session: Session,
+ orgs_agent: OrganizationsAgent,
+ included: List[Dict[str, Any]],
) -> List[str]:
- all_aft_accounts = utils.get_all_aft_account_ids(session)
+ all_aft_accounts = get_all_aft_account_ids(aft_management_session)
logger.info("All AFT accounts: " + str(all_aft_accounts))
included_accounts = []
for d in included:
@@ -198,17 +161,15 @@ def get_included_accounts(
if all_aft_accounts is not None:
included_accounts.extend(all_aft_accounts)
if d["type"] == "core":
- core_accounts = get_core_accounts(session)
+ core_accounts = get_core_accounts(aft_management_session)
included_accounts.extend(core_accounts)
if d["type"] == "ous":
- ou_accounts = utils.get_account_ids_in_ous(
- ct_mgmt_session, d["target_value"]
+ included_accounts.extend(
+ orgs_agent.get_account_ids_in_ous(ou_names=d["target_value"])
)
- if ou_accounts is not None:
- included_accounts.extend(ou_accounts)
if d["type"] == "tags":
- tag_accounts = utils.get_accounts_by_tags(
- session, ct_mgmt_session, d["target_value"]
+ tag_accounts = get_accounts_by_tags(
+ aft_management_session, ct_mgmt_session, d["target_value"]
)
if tag_accounts is not None:
included_accounts.extend(tag_accounts)
@@ -219,29 +180,32 @@ def get_included_accounts(
logger.info("Included Accounts (pre-AFT filter): " + str(included_accounts))
# Filter non-AFT accounts
- included_accounts = filter_non_aft_accounts(session, included_accounts)
+ included_accounts = filter_non_aft_accounts(
+ aft_management_session, included_accounts
+ )
logger.info("Included Accounts (post-AFT filter): " + str(included_accounts))
return included_accounts
def get_excluded_accounts(
- session: Session, ct_mgmt_session: Session, excluded: List[Dict[str, Any]]
+ aft_management_session: Session,
+ ct_mgmt_session: Session,
+ orgs_agent: OrganizationsAgent,
+ excluded: List[Dict[str, Any]],
) -> List[str]:
excluded_accounts = []
for d in excluded:
if d["type"] == "core":
- core_accounts = get_core_accounts(session)
+ core_accounts = get_core_accounts(aft_management_session)
excluded_accounts.extend(core_accounts)
if d["type"] == "ous":
- ou_accounts = utils.get_account_ids_in_ous(
- ct_mgmt_session, d["target_value"]
+ excluded_accounts.extend(
+ orgs_agent.get_account_ids_in_ous(ou_names=d["target_value"])
)
- if ou_accounts is not None:
- excluded_accounts.extend(ou_accounts)
if d["type"] == "tags":
- tag_accounts = utils.get_accounts_by_tags(
- session, ct_mgmt_session, d["target_value"]
+ tag_accounts = get_accounts_by_tags(
+ aft_management_session, ct_mgmt_session, d["target_value"]
)
if tag_accounts is not None:
excluded_accounts.extend(tag_accounts)
@@ -252,7 +216,9 @@ def get_excluded_accounts(
logger.info("Excluded Accounts (pre-AFT filter): " + str(excluded_accounts))
# Filter non-AFT accounts
- excluded_accounts = filter_non_aft_accounts(session, excluded_accounts, "exclude")
+ excluded_accounts = filter_non_aft_accounts(
+ aft_management_session, excluded_accounts, "exclude"
+ )
logger.info("Excluded Accounts (post-AFT filter): " + str(excluded_accounts))
return excluded_accounts
@@ -266,56 +232,3 @@ def get_target_accounts(
included_accounts.remove(i)
logger.info("TARGET ACCOUNTS: " + str(included_accounts))
return included_accounts
-
-
-def get_account_metadata_record(
- session: Session, table_name: str, account_id: str
-) -> Dict[str, Any]:
- dynamodb = session.resource("dynamodb")
- table = dynamodb.Table(table_name)
- logger.info("Getting account metadata record for " + account_id)
- response = table.get_item(Key={"id": account_id})
- item: Dict[str, Any] = response["Item"]
- logger.info(item)
- return item
-
-
-def get_account_request_record(
- session: Session, table_name: str, email_address: str
-) -> Dict[str, Any]:
- dynamodb = session.resource("dynamodb")
- table = dynamodb.Table(table_name)
- logger.info("Getting account request record for " + email_address)
- response = table.get_item(Key={"id": email_address})
- item: Dict[str, Any] = response["Item"]
- logger.info(item)
- return item
-
-
-def build_invoke_event(account_request_record: Dict[str, Any]) -> Dict[str, Any]:
- logger.info("Building invoke event for " + str(account_request_record))
- account_request_record["account_tags"] = json.loads(
- account_request_record["account_tags"]
- )
- invoke_event: Dict[str, Any]
- invoke_event = {
- "account_request": account_request_record,
- "control_tower_event": {},
- "account_provisioning": {},
- }
- invoke_event["account_provisioning"]["run_create_pipeline"] = "false"
-
- logger.info(str(invoke_event))
- return invoke_event
-
-
-def invoke_account_provisioning_sfn(
- session: Session, sfn_name: str, event: Dict[str, Any]
-) -> None:
- client = session.client("stepfunctions")
- logger.info("Invoking SFN - " + sfn_name)
- response = client.start_execution(
- stateMachineArn=utils.build_sfn_arn(session, sfn_name),
- input=json.dumps(event),
- )
- logger.info(response)
diff --git a/sources/aft-lambda-layer/aft_common/datetime_encoder.py b/sources/aft-lambda-layer/aft_common/datetime_encoder.py
deleted file mode 100644
index 48159792..00000000
--- a/sources/aft-lambda-layer/aft_common/datetime_encoder.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright Amazon.com, Inc. or its affiliates. All rights reserved.
-# SPDX-License-Identifier: Apache-2.0
-#
-##############################################################################
-# Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. #
-# #
-# Licensed under the Apache License, Version 2.0 (the "License"). #
-# You may not use this file except in compliance #
-# with the License. A copy of the License is located at #
-# #
-# http://www.apache.org/licenses/LICENSE-2.0 #
-# #
-# or in the "license" file accompanying this file. This file is #
-# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY #
-# KIND, express or implied. See the License for the specific language #
-# governing permissions and limitations under the License. #
-##############################################################################
-
-# !/bin/python
-import json
-from datetime import date, datetime
-
-
-class DateTimeEncoder(json.JSONEncoder):
- def default(self, o: object) -> str:
- if isinstance(o, (datetime, date)):
- serial = o.isoformat()
- return serial
- raise TypeError("Type %s not serializable" % type(o))
diff --git a/sources/aft-lambda-layer/aft_common/ddb.py b/sources/aft-lambda-layer/aft_common/ddb.py
new file mode 100644
index 00000000..e0692708
--- /dev/null
+++ b/sources/aft-lambda-layer/aft_common/ddb.py
@@ -0,0 +1,71 @@
+# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+# SPDX-License-Identifier: Apache-2.0
+#
+import logging
+from typing import TYPE_CHECKING, Any, Dict, Optional
+
+from aft_common.aft_utils import sanitize_input_for_logging
+from boto3.dynamodb.types import TypeDeserializer
+from boto3.session import Session
+
+if TYPE_CHECKING:
+ from mypy_boto3_dynamodb.type_defs import (
+ AttributeValueTypeDef,
+ DeleteItemOutputTableTypeDef,
+ GetItemOutputTableTypeDef,
+ PutItemOutputTableTypeDef,
+ )
+else:
+ AttributeValueTypeDef = object
+ GetItemOutputTableTypeDef = object
+ PutItemOutputTableTypeDef = object
+ DeleteItemOutputTableTypeDef = object
+
+logger = logging.getLogger("aft")
+
+
+def get_ddb_item(
+ session: Session, table_name: str, primary_key: Dict[str, Any]
+) -> Optional[Dict[str, Any]]:
+ dynamodb = session.resource("dynamodb")
+ table = dynamodb.Table(table_name)
+
+ logger.info(f"Getting item with key: {primary_key} from table: {table_name}")
+ response = table.get_item(Key=primary_key)
+ return response.get("Item", None)
+
+
+def put_ddb_item(
+ session: Session, table_name: str, item: Dict[str, str]
+) -> PutItemOutputTableTypeDef:
+ dynamodb = session.resource("dynamodb")
+ table = dynamodb.Table(table_name)
+
+ logger.info(f"Inserting item into {table_name} table: {str(item)}")
+ response = table.put_item(Item=item)
+ sanitized_response = sanitize_input_for_logging(response)
+ logger.info(sanitized_response)
+ return response
+
+
+def delete_ddb_item(
+ session: Session, table_name: str, primary_key: Dict[str, Any]
+) -> DeleteItemOutputTableTypeDef:
+ dynamodb = session.resource("dynamodb")
+ table = dynamodb.Table(table_name)
+
+ logger.info(f"Deleting item with key: {primary_key} from: {table_name} table")
+ response = table.delete_item(Key=primary_key)
+ sanitized_response = sanitize_input_for_logging(response)
+ logger.info(sanitized_response)
+ return response
+
+
+def unmarshal_ddb_item(
+ low_level_data: Dict[str, AttributeValueTypeDef],
+) -> Dict[str, Any]:
+ # To go from low-level format to python
+
+ deserializer = TypeDeserializer()
+ python_data = {k: deserializer.deserialize(v) for k, v in low_level_data.items()}
+ return python_data
diff --git a/sources/aft-lambda-layer/aft_common/exceptions.py b/sources/aft-lambda-layer/aft_common/exceptions.py
new file mode 100644
index 00000000..c7d44a12
--- /dev/null
+++ b/sources/aft-lambda-layer/aft_common/exceptions.py
@@ -0,0 +1,9 @@
+# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+# SPDX-License-Identifier: Apache-2.0
+#
+class ServiceRoleNotAssociated(Exception):
+ pass
+
+
+class NoAccountFactoryPortfolioFound(Exception):
+ pass
diff --git a/sources/aft-lambda-layer/aft_common/feature_options.py b/sources/aft-lambda-layer/aft_common/feature_options.py
index 5b8a3359..a7cf069a 100644
--- a/sources/aft-lambda-layer/aft_common/feature_options.py
+++ b/sources/aft-lambda-layer/aft_common/feature_options.py
@@ -1,35 +1,25 @@
# Copyright Amazon.com, Inc. or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
#
-from typing import TYPE_CHECKING
+import logging
+from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple
-import boto3
+import aft_common.aft_utils as utils
+from boto3.session import Session
from botocore.exceptions import ClientError
if TYPE_CHECKING:
+ from mypy_boto3_cloudtrail import CloudTrailClient
from mypy_boto3_ec2 import EC2Client, EC2ServiceResource
else:
EC2Client = object
EC2ServiceResource = object
-
-from typing import TYPE_CHECKING, List
-
-from boto3.session import Session
-
-if TYPE_CHECKING:
- from mypy_boto3_cloudtrail import CloudTrailClient
-else:
CloudTrailClient = object
-from typing import Optional
-
-import aft_common.aft_utils as utils
-import boto3
-
SUPPORT_API_REGION = "us-east-1"
CLOUDTRAIL_TRAIL_NAME = "aws-aft-CustomizationsCloudTrail"
-logger = utils.get_logger()
+logger = logging.getLogger("aft")
def get_aws_regions(client: EC2Client) -> List[str]:
@@ -46,7 +36,8 @@ def get_aws_regions(client: EC2Client) -> List[str]:
def get_default_vpc(client: EC2Client) -> Optional[str]:
logger.info("Getting default VPC")
try:
- response = client.describe_vpcs(
+ describe_vpcs = client.get_paginator("describe_vpcs")
+ for page in describe_vpcs.paginate(
Filters=[
{
"Name": "isDefault",
@@ -55,11 +46,11 @@ def get_default_vpc(client: EC2Client) -> Optional[str]:
],
},
]
- )
- for v in response["Vpcs"]:
- vpc_id: str = v["VpcId"]
- logger.info(vpc_id)
- return vpc_id
+ ):
+ for v in page["Vpcs"]:
+ vpc_id: str = v["VpcId"]
+ logger.info(vpc_id)
+ return vpc_id
return None
except ClientError as e:
region = client.meta.region_name
@@ -170,7 +161,7 @@ def get_vpc_security_groups(resource: EC2ServiceResource, vpc: str) -> List[str]
sgs = []
for s in vpc_resource.security_groups.all():
sgs.append(s.id)
- logger.info("SGs: " + str(sgs))
+ logger.info("SGs: " + utils.sanitize_input_for_logging(sgs))
return sgs
@@ -285,6 +276,24 @@ def get_log_bucket_arns(session: Session) -> List[str]:
response = client.list_buckets()
bucket_arns = []
for b in response["Buckets"]:
- bucket_arns.append("arn:aws:s3:::" + b["Name"] + "/*")
+ bucket_arns.append(
+ f"arn:{utils.get_aws_partition(session)}:s3:::" + b["Name"] + "/*"
+ )
logger.info(str(bucket_arns))
return bucket_arns
+
+
+def get_target_account_and_customization_id_from_event(
+ event: Dict[str, Any],
+) -> Tuple[str, str]:
+ request_id = event["customization_request_id"]
+ target_account_id = event.get("account_info", {}).get("account", {}).get("id", "")
+ if not target_account_id or not is_valid_account_id(target_account_id):
+ raise ValueError(
+ f"Event does not contain a valid target account ID: {target_account_id}"
+ )
+ return request_id, target_account_id
+
+
+def is_valid_account_id(account_id: str) -> bool:
+ return account_id.isdigit() and len(account_id) == 12
diff --git a/sources/aft-lambda-layer/aft_common/logger.py b/sources/aft-lambda-layer/aft_common/logger.py
index 6f21fe01..972c51a1 100644
--- a/sources/aft-lambda-layer/aft_common/logger.py
+++ b/sources/aft-lambda-layer/aft_common/logger.py
@@ -16,75 +16,104 @@
import json
import logging
-from typing import Any, Callable
-
-from .datetime_encoder import DateTimeEncoder
-
-
-class Logger(object):
- def __init__(self, loglevel: str = "warning") -> None:
- """Initializes logging"""
- self.config(loglevel=loglevel)
- return
-
- def config(self, loglevel: str = "warning") -> None:
- loglevel = logging.getLevelName(loglevel.upper())
- mainlogger = logging.getLogger()
- mainlogger.setLevel(loglevel)
-
- logfmt = '{"time_stamp": "%(asctime)s", "log_level": "%(levelname)s", "log_message": %(message)s}\n'
- if len(mainlogger.handlers) == 0:
- mainlogger.addHandler(logging.StreamHandler())
- mainlogger.handlers[0].setFormatter(logging.Formatter(logfmt))
- self.log = logging.LoggerAdapter(mainlogger, {})
-
- def _format(self, message: str) -> str:
- """formats log message in json
-
- Args:
- message (str): log message, can be a dict, list, string, or json blob
- """
- try:
- message = json.loads(message)
- except Exception:
- pass
- try:
- return json.dumps(message, indent=4, cls=DateTimeEncoder)
- except Exception:
- return json.dumps(str(message))
-
- def debug(self, message: Any, **kwargs: Any) -> None:
- """wrapper for logging.debug call"""
- self.log.debug(self._format(message), **kwargs)
-
- def info(self, message: Any, **kwargs: Any) -> None:
- ## type: (object, object) -> object
- """wrapper for logging.info call"""
- self.log.info(self._format(message), **kwargs)
-
- def warning(self, message: Any, **kwargs: Any) -> None:
- """wrapper for logging.warning call"""
- self.log.warning(self._format(message), **kwargs)
-
- def error(self, message: Any, **kwargs: Any) -> None:
- """wrapper for logging.error call"""
- self.log.error(self._format(message), **kwargs)
-
- def critical(self, message: Any, **kwargs: Any) -> None:
- """wrapper for logging.critical call"""
- self.log.critical(self._format(message), **kwargs)
-
- def exception(self, message: Any, **kwargs: Any) -> None:
- """wrapper for logging.exception call"""
- self.log.exception(self._format(message), **kwargs)
-
- def log_unhandled_exception(self, message: Any) -> None:
- """log unhandled exception"""
- self.log.exception("Unhandled Exception: {}".format(message))
-
- def log_general_exception(
- self, file: Any, method: Callable[..., Any], exception: Exception
- ) -> None:
- """log general exception"""
- message = {"FILE": file, "METHOD": method, "EXCEPTION": str(exception)}
- self.log.exception(self._format(json.dumps(message)))
+import os
+from datetime import date, datetime
+from json import JSONEncoder
+from typing import TYPE_CHECKING, Any, MutableMapping, Tuple
+
+from botocore.response import StreamingBody
+
+if TYPE_CHECKING:
+ LoggerAdapter = logging.LoggerAdapter[logging.Logger]
+else:
+ from logging import LoggerAdapter
+
+
+_ORIGINAL_LOG_FACTORY = logging.getLogRecordFactory()
+
+
+ACCOUNT_ID_FIELD_NAME = "account_id"
+CUSTOMIZATION_REQUEST_ID_FIELD_NAME = "customization_request_id"
+
+
+def _already_json_encoded(blob: str) -> bool:
+ try:
+ json.loads(blob)
+ return True
+ except (json.JSONDecodeError, TypeError):
+ # TypeErrors are generated when attempting to loads an array/dict/obj
+ return False
+
+
+class _AFTEncoder(JSONEncoder):
+ def default(self, obj: object) -> object:
+ if isinstance(obj, (datetime, date)):
+ return obj.isoformat()
+ elif isinstance(obj, StreamingBody):
+ return obj.read().decode()
+ else:
+ return JSONEncoder.default(self, obj)
+
+
+class _AccountCustomizationAdapter(LoggerAdapter):
+ def process(
+ self, message: str, kwargs: MutableMapping[str, Any]
+ ) -> Tuple[str, MutableMapping[str, Any]]:
+ # Handle optionality
+ if self.extra is None:
+ self.extra = {}
+ log_tracing = {
+ ACCOUNT_ID_FIELD_NAME: self.extra.get(ACCOUNT_ID_FIELD_NAME),
+ CUSTOMIZATION_REQUEST_ID_FIELD_NAME: self.extra.get(
+ CUSTOMIZATION_REQUEST_ID_FIELD_NAME
+ ),
+ "detail": message,
+ }
+ return (
+ json.dumps(log_tracing, cls=_AFTEncoder),
+ kwargs,
+ )
+
+
+def _aft_record_factory(*args: Any, **kwargs: Any) -> logging.LogRecord:
+ record = _ORIGINAL_LOG_FACTORY(*args, **kwargs)
+ if isinstance(record.msg, dict) or not _already_json_encoded(record.msg):
+ record.msg = json.dumps(record.msg, cls=_AFTEncoder)
+ return record
+
+
+def _get_log_level() -> str:
+ # Maintaining backwards compatibility, the old implementation defaults to INFO
+ log_level = os.environ.get("log_level", "info")
+ return log_level.upper()
+
+
+def configure_aft_logger() -> None:
+ fmt = '{"time_stamp": "%(asctime)s", "module": "%(module)s", "log_level": "%(levelname)s", "log_message": %(message)s}'
+ root_logger = logging.getLogger()
+ if root_logger.hasHandlers():
+ console = root_logger.handlers[0]
+ else:
+ console = logging.StreamHandler()
+ root_logger.addHandler(console)
+ console.setFormatter(logging.Formatter(fmt))
+
+ aft_logger = logging.getLogger("aft")
+ aft_logger.setLevel(_get_log_level())
+
+ logging.setLogRecordFactory(_aft_record_factory)
+
+
+def customization_request_logger(
+ aws_account_id: str,
+ customization_request_id: str,
+) -> LoggerAdapter:
+ configure_aft_logger()
+ logger = logging.getLogger("aft.customization")
+ return _AccountCustomizationAdapter(
+ logger,
+ extra={
+ ACCOUNT_ID_FIELD_NAME: aws_account_id,
+ CUSTOMIZATION_REQUEST_ID_FIELD_NAME: customization_request_id,
+ },
+ )
diff --git a/sources/aft-lambda-layer/aft_common/metrics.py b/sources/aft-lambda-layer/aft_common/metrics.py
new file mode 100644
index 00000000..f7062bad
--- /dev/null
+++ b/sources/aft-lambda-layer/aft_common/metrics.py
@@ -0,0 +1,169 @@
+# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+# SPDX-License-Identifier: Apache-2.0
+
+import argparse
+import logging
+from datetime import datetime
+from typing import Any, Dict, Optional, TypedDict
+
+import aft_common.constants
+import requests
+from aft_common import aft_utils as utils
+from aft_common.auth import AuthClient
+from aft_common.ssm import get_ssm_parameter_value
+from boto3.session import Session
+
+logger = logging.getLogger("aft")
+
+
+class MetricsPayloadType(TypedDict):
+ Solution: str
+ TimeStamp: str
+ Version: Optional[str]
+ UUID: Optional[str]
+ Data: Dict[str, Any]
+
+
+class AFTMetrics:
+ def __init__(self) -> None:
+ self.solution_id = "SO0089-aft"
+ self.api_endpoint = "https://metrics.awssolutionsbuilder.com/generic"
+ self.auth = AuthClient()
+
+ def _get_uuid(self, aft_management_session: Session) -> str:
+ uuid = get_ssm_parameter_value(
+ aft_management_session,
+ aft_common.constants.SSM_PARAM_AFT_METRICS_REPORTING_UUID,
+ )
+ return uuid
+
+ def _metrics_reporting_enabled(self, aft_management_session: Session) -> bool:
+ flag = get_ssm_parameter_value(
+ aft_management_session, aft_common.constants.SSM_PARAM_AFT_METRICS_REPORTING
+ )
+
+ if flag.lower() == "true":
+ return True
+ return False
+
+ def _get_aft_deployment_config(
+ self, aft_management_session: Session
+ ) -> Dict[str, str]:
+ config = {}
+
+ config["cloud_trail_enabled"] = get_ssm_parameter_value(
+ aft_management_session,
+ aft_common.constants.SSM_PARAM_FEATURE_CLOUDTRAIL_DATA_EVENTS_ENABLED,
+ )
+ config["enterprise_support_enabled"] = get_ssm_parameter_value(
+ aft_management_session,
+ aft_common.constants.SSM_PARAM_FEATURE_ENTERPRISE_SUPPORT_ENABLED,
+ )
+ config["delete_default_vpc_enabled"] = get_ssm_parameter_value(
+ aft_management_session,
+ aft_common.constants.SSM_PARAM_FEATURE_DEFAULT_VPCS_ENABLED,
+ )
+
+ config["aft_version"] = get_ssm_parameter_value(
+ aft_management_session, aft_common.constants.SSM_PARAM_ACCOUNT_AFT_VERSION
+ )
+
+ config["terraform_version"] = get_ssm_parameter_value(
+ aft_management_session,
+ aft_common.constants.SSM_PARAM_ACCOUNT_TERRAFORM_VERSION,
+ )
+
+ config["region"] = utils.get_session_info(aft_management_session)["region"]
+
+ return config
+
+ def wrap_event_for_api(
+ self, aft_management_session: Session, event: Dict[str, Any]
+ ) -> MetricsPayloadType:
+ payload: MetricsPayloadType = {
+ "Solution": self.solution_id,
+ "TimeStamp": datetime.utcnow().isoformat(timespec="seconds"),
+ "Version": None,
+ "UUID": None,
+ "Data": {},
+ }
+ payload["Solution"] = self.solution_id
+
+ data_body: Dict[str, Any] = {}
+ data_body["event"] = event
+
+ errors = []
+
+ try:
+ payload["Version"] = get_ssm_parameter_value(
+ aft_management_session,
+ aft_common.constants.SSM_PARAM_ACCOUNT_AFT_VERSION,
+ )
+ except Exception as e:
+ payload["Version"] = None
+ errors.append(str(e))
+
+ try:
+ payload["UUID"] = self._get_uuid(aft_management_session)
+ except Exception as e:
+ payload["UUID"] = None
+ errors.append(str(e))
+
+ try:
+ data_body["config"] = self._get_aft_deployment_config(
+ aft_management_session
+ )
+ except Exception as e:
+ data_body["config"] = None
+ errors.append(str(e))
+
+ if not errors:
+ data_body["error"] = None
+ else:
+ data_body["error"] = " | ".join(errors)
+
+ payload["Data"] = data_body
+
+ return payload
+
+ def post_event(self, action: str, status: Optional[str] = None) -> None:
+ aft_management_session = self.auth.get_aft_management_session()
+
+ if self._metrics_reporting_enabled(aft_management_session):
+ event = {"action": action, "status": status}
+
+ payload = self.wrap_event_for_api(aft_management_session, event)
+
+ response = requests.post(self.api_endpoint, json=payload)
+
+ return None
+
+
+# Executes when run as a script from the CodeBuild containers
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ description="Script called from within CodeBuild containers to report back AFT usage metrics"
+ )
+
+ parser.add_argument(
+ "--codebuild-name", type=str, help="Name of the build container"
+ )
+ parser.add_argument(
+ "--codebuild-status",
+ type=int,
+ help="Whether the build succeeded or not (1 or 0)",
+ )
+
+ args = parser.parse_args()
+
+ codebuild_name = args.codebuild_name
+ codebuild_status = "SUCCEEDED" if args.codebuild_status == 1 else "FAILED"
+
+ try:
+ aft_metrics = AFTMetrics()
+ aft_metrics.post_event(action=args.codebuild_name, status=args.codebuild_status)
+ logger.info(f"Successfully logged metrics. Action: {args.codebuild_name}")
+ except Exception as e:
+ logger.info(
+ f"Unable to report metrics. Action: {args.codebuild_name}; Error: {e}"
+ )
diff --git a/sources/aft-lambda-layer/aft_common/notifications.py b/sources/aft-lambda-layer/aft_common/notifications.py
new file mode 100644
index 00000000..dbfc1e35
--- /dev/null
+++ b/sources/aft-lambda-layer/aft_common/notifications.py
@@ -0,0 +1,52 @@
+# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+# SPDX-License-Identifier: Apache-2.0
+#
+import logging
+from typing import TYPE_CHECKING
+
+from aft_common.aft_utils import sanitize_input_for_logging
+from aft_common.constants import SSM_PARAM_SNS_FAILURE_TOPIC_ARN
+from aft_common.ssm import get_ssm_parameter_value
+from boto3.session import Session
+
+if TYPE_CHECKING:
+ from aws_lambda_powertools.utilities.typing import LambdaContext
+ from mypy_boto3_sns import SNSClient
+ from mypy_boto3_sns.type_defs import PublishResponseTypeDef
+else:
+ LambdaClient = object
+ PublishResponseTypeDef = object
+ SNSClient = object
+ LambdaContext = object
+
+logger = logging.getLogger("aft")
+
+
+def send_sns_message(
+ session: Session, topic: str, sns_message: str, subject: str
+) -> PublishResponseTypeDef:
+ logger.info("Sending SNS Message")
+ client: SNSClient = session.client("sns")
+ response = client.publish(TopicArn=topic, Message=sns_message, Subject=subject)
+ sanitized_response = sanitize_input_for_logging(response)
+ logger.info(sanitized_response)
+ return response
+
+
+def send_lambda_failure_sns_message(
+ session: Session, message: str, subject: str, context: LambdaContext
+) -> None:
+ msg = f"""An error occurred in the '{context.function_name}' Lambda function.
+For more information, search AWS Request ID '{context.aws_request_id}' in CloudWatch log group '{context.log_group_name}'
+Error Message: {message}"""
+
+ failure_sns_topic = get_ssm_parameter_value(
+ session=session,
+ param=SSM_PARAM_SNS_FAILURE_TOPIC_ARN,
+ )
+ send_sns_message(
+ session=session,
+ topic=failure_sns_topic,
+ sns_message=msg,
+ subject=subject,
+ )
diff --git a/sources/aft-lambda-layer/aft_common/organizations.py b/sources/aft-lambda-layer/aft_common/organizations.py
new file mode 100644
index 00000000..fc612f96
--- /dev/null
+++ b/sources/aft-lambda-layer/aft_common/organizations.py
@@ -0,0 +1,323 @@
+# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+# SPDX-License-Identifier: Apache-2.0
+#
+import logging
+import re
+from copy import deepcopy
+from typing import TYPE_CHECKING, List, Optional, Sequence, Tuple, cast
+
+from aft_common.aft_types import AftAccountInfo
+from aft_common.aft_utils import (
+ emails_are_equal,
+ get_high_retry_botoconfig,
+ resubmit_request_on_boto_throttle,
+)
+from boto3.session import Session
+
+if TYPE_CHECKING:
+ from mypy_boto3_organizations import OrganizationsClient
+ from mypy_boto3_organizations.type_defs import (
+ AccountTypeDef,
+ DescribeAccountResponseTypeDef,
+ OrganizationalUnitTypeDef,
+ ParentTypeDef,
+ TagTypeDef,
+ )
+
+else:
+ DescribeAccountResponseTypeDef = object
+ OrganizationsClient = object
+ TagTypeDef = object
+ OrganizationalUnitTypeDef = object
+ AccountTypeDef = object
+ ParentTypeDef = object
+
+logger = logging.getLogger("aft")
+
+
+class OrganizationsAgent:
+ ROOT_OU = "Root"
+ # https://docs.aws.amazon.com/organizations/latest/APIReference/API_OrganizationalUnit.html
+ # Ex: Sandbox (ou-1234-zxcv)
+ OU_ID_PATTERN = r"\(ou-.*\)"
+ OU_NAME_PATTERN = r".{1,128}"
+ NESTED_OU_NAME_PATTERN = (
+ rf"{OU_NAME_PATTERN}\s{OU_ID_PATTERN}" # space ()
+ )
+
+ def __init__(self, ct_management_session: Session):
+ self.orgs_client: OrganizationsClient = ct_management_session.client(
+ "organizations", config=get_high_retry_botoconfig()
+ )
+
+ # Memoization - cache org query results
+ # Cache is not shared between AFT invocations so staleness due to org updates is unlikely
+ self.org_root_ou_id: Optional[str] = None
+ self.org_ous: Optional[List[OrganizationalUnitTypeDef]] = None
+ self.org_accounts: Optional[List[AccountTypeDef]] = None
+
+ @staticmethod
+ def ou_name_is_nested_format(ou_name: str) -> bool:
+ pattern = re.compile(OrganizationsAgent.NESTED_OU_NAME_PATTERN)
+ if pattern.match(ou_name) is not None:
+ return True
+ return False
+
+ @staticmethod
+ def get_name_and_id_from_nested_ou(
+ nested_ou_name: str,
+ ) -> Optional[Tuple[str, str]]:
+ if not OrganizationsAgent.ou_name_is_nested_format(ou_name=nested_ou_name):
+ return None
+
+ pattern = re.compile(OrganizationsAgent.OU_ID_PATTERN)
+ match = pattern.search(nested_ou_name)
+ if match is None:
+ return None
+ first_id_idx, last_id_idx = match.span()
+
+ # Grab the matched ID from the nested-ou-string using the span,
+ id = nested_ou_name[first_id_idx:last_id_idx]
+ id = id.strip("()")
+
+ # The name is what remains of the nested OU without the ID, minus
+ # the whitespace between the name and ID
+ name = nested_ou_name[: first_id_idx - 1]
+ return (name, id)
+
+ @staticmethod
+ def get_nested_ou_format_from_name_and_id(ou_name: str, ou_id: str) -> str:
+ return f"{ou_name} ({ou_id})"
+
+ def get_root_ou_id(self) -> str:
+ if self.org_root_ou_id is not None:
+ return self.org_root_ou_id
+
+ # Assumes single-root organizations
+ self.org_root_ou_id = self.orgs_client.list_roots()["Roots"][0]["Id"]
+ return self.org_root_ou_id
+
+ def get_ous_for_root(self) -> List[OrganizationalUnitTypeDef]:
+ return self.get_children_ous_from_parent_id(parent_id=self.get_root_ou_id())
+
+ def get_all_org_accounts(self) -> List[AccountTypeDef]:
+ if self.org_accounts is not None:
+ return self.org_accounts
+
+ paginator = self.orgs_client.get_paginator("list_accounts")
+ accounts = []
+ for page in paginator.paginate():
+ accounts.extend(page["Accounts"])
+
+ self.org_accounts = accounts
+ return self.org_accounts
+
+ def get_all_org_ous(self) -> List[OrganizationalUnitTypeDef]:
+ if self.org_ous is not None:
+ return self.org_ous
+
+ # Including the root OU
+ list_root_response = self.orgs_client.list_roots()
+ root_ou: OrganizationalUnitTypeDef = {
+ "Id": list_root_response["Roots"][0]["Id"],
+ "Arn": list_root_response["Roots"][0]["Arn"],
+ "Name": list_root_response["Roots"][0]["Name"],
+ }
+
+ org_ous = [root_ou]
+
+ # Get the children OUs of the root as the first pass
+ root_children = self.get_ous_for_root()
+ org_ous.extend(root_children)
+
+ # Exclude root to avoid double counting children
+ ous_to_query = deepcopy(root_children)
+
+ # Recursively search all children OUs for further children
+ while len(ous_to_query) > 0:
+ parent_id: str = ous_to_query.pop()["Id"]
+ children_ous = self.get_children_ous_from_parent_id(parent_id=parent_id)
+ org_ous.extend(children_ous)
+ ous_to_query.extend(children_ous)
+
+ self.org_ous = org_ous
+
+ return self.org_ous
+
+ def get_parents_from_account_id(self, account_id: str) -> List[ParentTypeDef]:
+ paginator = self.orgs_client.get_paginator("list_parents")
+ pages = paginator.paginate(ChildId=account_id)
+ parents = []
+ for page in pages:
+ parents.extend(page["Parents"])
+ return parents
+
+ def get_children_ous_from_parent_id(
+ self, parent_id: str
+ ) -> List[OrganizationalUnitTypeDef]:
+ paginator = self.orgs_client.get_paginator(
+ "list_organizational_units_for_parent"
+ )
+ pages = paginator.paginate(ParentId=parent_id)
+ children_ous = []
+ for page in pages:
+ children_ous.extend(page["OrganizationalUnits"])
+ return children_ous
+
+ def get_ou_ids_from_ou_names(self, target_ou_names: List[str]) -> List[str]:
+ ous = self.get_all_org_ous()
+ org_ou_map = {}
+
+ # Convert list of OUs to id->name map for constant time lookups
+ for ou in ous:
+ org_ou_map[ou["Id"]] = ou["Name"]
+
+ # Search the map for every target exactly once
+ matched_ou_ids = []
+ for target_name in target_ou_names:
+ # Only match nested OU targets if both name and ID are the same
+ nested_parsed = OrganizationsAgent.get_name_and_id_from_nested_ou(
+ nested_ou_name=target_name
+ )
+ if nested_parsed is not None: # Nested OU pattern matched!
+ target_name, target_id = nested_parsed
+ if target_id in org_ou_map.keys():
+ if org_ou_map[target_id] == target_name:
+ matched_ou_ids.append(target_id)
+ else:
+ if target_name in org_ou_map.values():
+ target_id = [
+ id for id, name in org_ou_map.items() if target_name == name
+ ][0]
+ matched_ou_ids.append(target_id)
+
+ return matched_ou_ids
+
+ def get_ou_from_account_id(self, account_id: str) -> OrganizationalUnitTypeDef:
+ # NOTE: Assumes single-parent accounts
+ parents = self.get_parents_from_account_id(account_id=account_id)
+ parent = parents[0]
+
+ # Child of Root
+ if parent["Type"] == "ROOT":
+ list_root_response = self.orgs_client.list_roots()
+ # NOTE: Assumes single root structure
+ root_ou: OrganizationalUnitTypeDef = {
+ "Id": list_root_response["Roots"][0]["Id"],
+ "Arn": list_root_response["Roots"][0]["Arn"],
+ "Name": list_root_response["Roots"][0]["Name"],
+ }
+ return root_ou
+
+ # Child of non-Root OU
+ describe_ou_response = self.orgs_client.describe_organizational_unit(
+ OrganizationalUnitId=parent["Id"]
+ )
+ parent_ou: OrganizationalUnitTypeDef = describe_ou_response[
+ "OrganizationalUnit"
+ ]
+ return parent_ou
+
+ def get_accounts_for_ou(self, ou_id: str) -> List[AccountTypeDef]:
+ paginator = self.orgs_client.get_paginator("list_accounts_for_parent")
+ pages = paginator.paginate(ParentId=ou_id)
+ accounts = []
+ for page in pages:
+ accounts.extend(page["Accounts"])
+ return accounts
+
+ def get_account_ids_in_ous(self, ou_names: List[str]) -> List[str]:
+ ou_ids = self.get_ou_ids_from_ou_names(target_ou_names=ou_names)
+ account_ids = []
+ for ou_id in ou_ids:
+ account_ids.extend(
+ [acct["Id"] for acct in self.get_accounts_for_ou(ou_id=ou_id)]
+ )
+ return account_ids
+
+ def account_is_member_of_root(self, account_id: str) -> bool:
+ # Handles (future) multi-parent case
+ account_parents = self.get_parents_from_account_id(account_id=account_id)
+ return any([parent["Type"] == "ROOT" for parent in account_parents])
+
+ def ou_contains_account(self, ou_name: str, account_id: str) -> bool:
+ # NOTE: Assumes single-parent accounts
+ current_ou = self.get_ou_from_account_id(account_id=account_id)
+ if current_ou:
+ if ou_name == current_ou["Name"]:
+ return True
+ return False
+
+ @resubmit_request_on_boto_throttle
+ def tag_org_resource(
+ self,
+ resource: str,
+ tags: Sequence[TagTypeDef],
+ rollback: bool = False,
+ ) -> None:
+ if rollback:
+ current_tags = self.orgs_client.list_tags_for_resource(ResourceId=resource)
+ self.orgs_client.untag_resource(
+ ResourceId=resource, TagKeys=[tag["Key"] for tag in tags]
+ )
+ self.orgs_client.tag_resource(
+ ResourceId=resource, Tags=cast(Sequence[TagTypeDef], current_tags)
+ )
+
+ else:
+ self.orgs_client.tag_resource(ResourceId=resource, Tags=tags)
+
+ def list_tags_for_resource(self, resource: str) -> List[TagTypeDef]:
+ return self.orgs_client.list_tags_for_resource(ResourceId=resource)["Tags"]
+
+ def get_account_email_from_id(self, account_id: str) -> str:
+ response: DescribeAccountResponseTypeDef = self.orgs_client.describe_account(
+ AccountId=account_id
+ )
+ return response["Account"]["Email"]
+
+ def get_account_id_from_email(
+ self, email: str, ou_name: Optional[str] = None
+ ) -> str:
+ if ou_name is not None:
+ # If OU known, search it instead of the entire org; supports nested OU format
+ # NOTE: Be careful using this parameter as the OU in account request is
+ # NOT always equal to the OU an account is currently in (move-OU requests)
+ account_ids_in_ou = self.get_account_ids_in_ous(ou_names=[ou_name])
+ for account_id in account_ids_in_ou:
+ account_email = self.get_account_email_from_id(account_id=account_id)
+ if emails_are_equal(account_email, email):
+ return account_id
+
+ for account in self.get_all_org_accounts():
+ if emails_are_equal(account["Email"], email):
+ return account["Id"]
+
+ raise Exception(f"Account email {email} not found in Organization")
+
+ def get_aft_account_info(self, account_id: str) -> AftAccountInfo:
+ logger.info(f"Getting details for {account_id}")
+
+ describe_response = self.orgs_client.describe_account(AccountId=account_id)
+ account = describe_response["Account"]
+
+ # NOTE: Assumes single-parent accounts
+ parents = self.get_parents_from_account_id(account_id=account_id)
+ parent = parents[0]
+
+ aft_account_info = AftAccountInfo(
+ id=account["Id"],
+ email=account["Email"],
+ name=account["Name"],
+ joined_method=account["JoinedMethod"],
+ joined_date=str(account["JoinedTimestamp"]),
+ status=account["Status"],
+ parent_id=parent["Id"],
+ parent_type=parent["Type"],
+ type="account",
+ vendor="aws",
+ )
+
+ logger.info(f"Account details: {aft_account_info}")
+
+ return aft_account_info
diff --git a/sources/aft-lambda-layer/aft_common/premium_support.py b/sources/aft-lambda-layer/aft_common/premium_support.py
index 387a9b50..8be8b88b 100644
--- a/sources/aft-lambda-layer/aft_common/premium_support.py
+++ b/sources/aft-lambda-layer/aft_common/premium_support.py
@@ -3,7 +3,12 @@
#
from typing import TYPE_CHECKING
+from aft_common.aft_utils import (
+ get_high_retry_botoconfig,
+ resubmit_request_on_boto_throttle,
+)
from boto3.session import Session
+from botocore.config import Config
if TYPE_CHECKING:
from mypy_boto3_support import SupportClient
@@ -13,6 +18,7 @@
SUPPORT_API_REGION = "us-east-1"
+@resubmit_request_on_boto_throttle
def account_enrollment_requested(
ct_management_session: Session, account_id: str
) -> bool:
@@ -22,9 +28,11 @@ def account_enrollment_requested(
"""
submitted_enroll_case_title = f"Add Account {account_id} to Enterprise Support"
- client: SupportClient = ct_management_session.client(
- "support", region_name=SUPPORT_API_REGION
+ # Must use us-east-1 region for Support API
+ botoconfig = Config.merge(
+ get_high_retry_botoconfig(), Config(region_name=SUPPORT_API_REGION)
)
+ client: SupportClient = ct_management_session.client("support", config=botoconfig)
paginator = client.get_paginator("describe_cases")
pages = paginator.paginate(
includeResolvedCases=True,
diff --git a/sources/aft-lambda-layer/aft_common/schemas/valid_account_request_schema.json b/sources/aft-lambda-layer/aft_common/schemas/valid_account_request_schema.json
deleted file mode 100644
index 103cab1d..00000000
--- a/sources/aft-lambda-layer/aft_common/schemas/valid_account_request_schema.json
+++ /dev/null
@@ -1,75 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema",
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "$schema": {
- "type": "string"
- },
- "account_request": {
- "type": "object",
- "properties": {
- "control_tower_parameters": {
- "type": "object",
- "properties": {
- "AccountEmail": {
- "type": "string"
- },
- "AccountName": {
- "type": "string"
- },
- "ManagedOrganizationalUnit": {
- "type": "string"
- },
- "SSOUserEmail": {
- "type": "string"
- },
- "SSOUserFirstName": {
- "type": "string"
- },
- "SSOUserLastName": {
- "type": "string"
- }
- },
- "required": [
- "AccountEmail",
- "AccountName",
- "ManagedOrganizationalUnit",
- "SSOUserEmail",
- "SSOUserFirstName",
- "SSOUserLastName"
- ]
- },
- "customer_customizations": {
- "type": "array"
- },
- "account_tags": {
- "type": "object",
- "additionalProperties": true,
- "properties": {}
- }
- },
- "required": [
- "account_tags",
- "control_tower_parameters"
- ]
- },
- "control_tower_event": {
- "type": "object",
- "additionalProperties": true,
- "properties": {
- "source": {
- "type": "string",
- "enum": ["aws.controltower"]
- }
- }
- },
- "test_event": {
- "type": "boolean"
- },
- "account_provisioning": {
- "type": "object"
- }
- },
- "required": [ "account_request", "control_tower_event" ]
-}
diff --git a/sources/aft-lambda-layer/aft_common/service_catalog.py b/sources/aft-lambda-layer/aft_common/service_catalog.py
new file mode 100644
index 00000000..de9c92ec
--- /dev/null
+++ b/sources/aft-lambda-layer/aft_common/service_catalog.py
@@ -0,0 +1,182 @@
+# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+# SPDX-License-Identifier: Apache-2.0
+#
+import logging
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Dict,
+ Iterable,
+ Iterator,
+ List,
+ Literal,
+ Mapping,
+ Sequence,
+)
+
+from aft_common import aft_utils as utils
+from aft_common import ddb
+from aft_common.account_provisioning_framework import ProvisionRoles
+from aft_common.auth import AuthClient
+from aft_common.constants import SSM_PARAM_SC_PRODUCT_NAME
+from aft_common.ssm import get_ssm_parameter_value
+from boto3.session import Session
+
+if TYPE_CHECKING:
+ from mypy_boto3_servicecatalog import ServiceCatalogClient
+ from mypy_boto3_servicecatalog.type_defs import (
+ ProvisionedProductAttributeTypeDef,
+ SearchProvisionedProductsOutputTypeDef,
+ )
+else:
+ ServiceCatalogClient = object
+ SearchProvisionedProductsOutputTypeDef = object
+ ProvisionedProductAttributeTypeDef = object
+
+logger = logging.getLogger("aft")
+
+
+def get_ct_product_id(session: Session, ct_management_session: Session) -> str:
+ client: ServiceCatalogClient = ct_management_session.client("servicecatalog")
+ sc_product_name = get_ssm_parameter_value(session, SSM_PARAM_SC_PRODUCT_NAME)
+ logger.info("Getting product ID for " + sc_product_name)
+
+ response = client.describe_product_as_admin(Name=sc_product_name)
+ product_id: str = response["ProductViewDetail"]["ProductViewSummary"]["ProductId"]
+ logger.info(product_id)
+ return product_id
+
+
+def ct_provisioning_artifact_is_active(
+ session: Session, ct_management_session: Session, artifact_id: str
+) -> bool:
+ client: ServiceCatalogClient = ct_management_session.client(
+ "servicecatalog", config=utils.get_high_retry_botoconfig()
+ )
+ sc_product_name = get_ssm_parameter_value(session, SSM_PARAM_SC_PRODUCT_NAME)
+ logger.info("Checking provisioning artifact ID " + artifact_id)
+ try:
+ response = client.describe_provisioning_artifact(
+ ProductName=sc_product_name, ProvisioningArtifactId=artifact_id
+ )
+ provisioning_artifact = response["ProvisioningArtifactDetail"]
+ except client.exceptions.ResourceNotFoundException:
+ logger.info("Provisioning artifact id: " + artifact_id + " does not exist")
+ return False
+
+ if provisioning_artifact["Active"]:
+ logger.info(provisioning_artifact["Id"] + " is active")
+ return True
+ else:
+ logger.info(provisioning_artifact["Id"] + " is NOT active")
+ return False
+
+
+def get_ct_provisioning_artifact_id(
+ session: Session, ct_management_session: Session
+) -> str:
+ client: ServiceCatalogClient = ct_management_session.client("servicecatalog")
+ sc_product_name = get_ssm_parameter_value(session, SSM_PARAM_SC_PRODUCT_NAME)
+ logger.info("Getting provisioning artifact ID for " + sc_product_name)
+
+ response = client.describe_product_as_admin(Name=sc_product_name)
+ provisioning_artifacts = response["ProvisioningArtifactSummaries"]
+ for pa in provisioning_artifacts:
+ if ct_provisioning_artifact_is_active(session, ct_management_session, pa["Id"]):
+ pa_id: str = pa["Id"]
+ logger.info("Using provisioning artifact ID: " + pa_id)
+ return pa_id
+
+ raise Exception("No Provisioning Artifact ID found")
+
+
+def get_healthy_ct_product_batch(
+ ct_management_session: Session,
+) -> Iterator[Iterable[ProvisionedProductAttributeTypeDef]]:
+ sc_product_search_filter: Mapping[Literal["SearchQuery"], Sequence[str]] = {
+ "SearchQuery": [
+ "type:CONTROL_TOWER_ACCOUNT",
+ ]
+ }
+ sc_client = ct_management_session.client(
+ "servicecatalog", config=utils.get_high_retry_botoconfig()
+ )
+ logger.info(
+ "Searching Account Factory for account with matching email in healthy status"
+ )
+ # Get products with the required type
+ response: SearchProvisionedProductsOutputTypeDef = (
+ sc_client.search_provisioned_products(
+ Filters=sc_product_search_filter, PageSize=100
+ )
+ )
+ provisioned_products = response["ProvisionedProducts"]
+ healthy_products: Iterable[ProvisionedProductAttributeTypeDef] = filter(
+ ct_account_product_is_healthy, provisioned_products
+ )
+
+ yield healthy_products
+
+ while response.get("NextPageToken") is not None:
+ response = sc_client.search_provisioned_products(
+ Filters=sc_product_search_filter,
+ PageSize=100,
+ PageToken=response["NextPageToken"],
+ )
+ provisioned_products = response["ProvisionedProducts"]
+ healthy_products = filter(ct_account_product_is_healthy, provisioned_products)
+
+ yield healthy_products
+
+ return
+
+
+def email_exists_in_batch(
+ target_email: str, pps: List[str], ct_management_session: Session
+) -> bool:
+ sc_client = ct_management_session.client(
+ "servicecatalog", config=utils.get_high_retry_botoconfig()
+ )
+ for pp in pps:
+ pp_email = sc_client.get_provisioned_product_outputs(
+ ProvisionedProductId=pp, OutputKeys=["AccountEmail"]
+ )["Outputs"][0]["OutputValue"]
+ if utils.emails_are_equal(target_email, pp_email):
+ logger.info("Account email match found; provisioned product exists.")
+ return True
+ return False
+
+
+def provisioned_product_exists(record: Dict[str, Any]) -> bool:
+ # Go get all my accounts from SC (Not all PPs)
+ auth = AuthClient()
+ ct_management_session = auth.get_ct_management_session(
+ role_name=ProvisionRoles.SERVICE_ROLE_NAME
+ )
+ account_email = ddb.unmarshal_ddb_item(record["dynamodb"]["NewImage"])[
+ "control_tower_parameters"
+ ]["AccountEmail"]
+
+ for batch in get_healthy_ct_product_batch(
+ ct_management_session=ct_management_session
+ ):
+ pp_ids = [product["Id"] for product in batch]
+
+ if email_exists_in_batch(account_email, pp_ids, ct_management_session):
+ return True
+
+ # We processed all batches of accounts with healthy statuses, and did not find a match
+ # It is possible that the account exists, but does not have a healthy status
+ logger.info(
+ "Did not find account with matching email in healthy status in Account Factory"
+ )
+
+ return False
+
+
+def ct_account_product_is_healthy(product: ProvisionedProductAttributeTypeDef) -> bool:
+ aft_sc_product_allowed_status = ["AVAILABLE", "TAINTED"]
+ # If LastSuccessfulProvisioningRecordId does not exist, the account was never successfully provisioned
+ return product["Status"] in aft_sc_product_allowed_status and bool(
+ product.get("LastSuccessfulProvisioningRecordId")
+ )
diff --git a/sources/aft-lambda-layer/aft_common/shared_account.py b/sources/aft-lambda-layer/aft_common/shared_account.py
new file mode 100644
index 00000000..af08995e
--- /dev/null
+++ b/sources/aft-lambda-layer/aft_common/shared_account.py
@@ -0,0 +1,78 @@
+# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+# SPDX-License-Identifier: Apache-2.0
+#
+import logging
+from typing import Any, Dict, List
+
+from aft_common import ddb
+from aft_common.account_provisioning_framework import ProvisionRoles
+from aft_common.aft_utils import emails_are_equal, get_high_retry_botoconfig
+from aft_common.auth import AuthClient
+from aft_common.constants import (
+ SSM_PARAM_ACCOUNT_AUDIT_ACCOUNT_ID,
+ SSM_PARAM_ACCOUNT_CT_MANAGEMENT_ACCOUNT_ID,
+ SSM_PARAM_ACCOUNT_LOG_ARCHIVE_ACCOUNT_ID,
+)
+from aft_common.organizations import OrganizationsAgent
+from aft_common.ssm import get_ssm_parameter_value
+from boto3.session import Session
+
+logger = logging.getLogger("aft")
+
+
+def shared_account_request(event_record: Dict[str, Any], auth: AuthClient) -> bool:
+ ct_params = ddb.unmarshal_ddb_item(event_record["dynamodb"]["NewImage"])[
+ "control_tower_parameters"
+ ]
+ account_email = ct_params["AccountEmail"]
+ account_name = ct_params["AccountName"]
+ request_ou = ct_params["ManagedOrganizationalUnit"]
+ shared_account_ids = get_shared_ids(
+ aft_management_session=auth.get_aft_management_session()
+ )
+ ct_management_session = auth.get_ct_management_session(
+ role_name=ProvisionRoles.SERVICE_ROLE_NAME
+ )
+ orgs_client = ct_management_session.client(
+ "organizations", config=get_high_retry_botoconfig()
+ )
+ for shared_account_id in shared_account_ids:
+ response = orgs_client.describe_account(AccountId=shared_account_id)
+ if (
+ emails_are_equal(response["Account"]["Email"], account_email)
+ and response["Account"]["Name"] == account_name
+ ):
+ orgs_agent = OrganizationsAgent(ct_management_session=ct_management_session)
+ if not orgs_agent.ou_contains_account(
+ ou_name=request_ou, account_id=shared_account_id
+ ):
+ raise ValueError(
+ "Unsupported action: Cannot change OU for a Shared CT account or CT management account"
+ )
+ return True
+ elif (
+ emails_are_equal(response["Account"]["Email"], account_email)
+ and response["Account"]["Name"] != account_name
+ ):
+ raise ValueError(
+ f"Account Email {account_email} is a shared account email, however, the Account Name {account_name} does not match"
+ )
+ elif response["Account"]["Name"] == account_name and not emails_are_equal(
+ response["Account"]["Email"], account_email
+ ):
+ raise ValueError(
+ f"Account Name {account_name} is a shared account Name, however, the Account Email {account_email} does not match"
+ )
+ return False
+
+
+def get_shared_ids(aft_management_session: Session) -> List[str]:
+ shared_account_ssm_params = [
+ SSM_PARAM_ACCOUNT_LOG_ARCHIVE_ACCOUNT_ID,
+ SSM_PARAM_ACCOUNT_AUDIT_ACCOUNT_ID,
+ SSM_PARAM_ACCOUNT_CT_MANAGEMENT_ACCOUNT_ID,
+ ]
+ return [
+ get_ssm_parameter_value(session=aft_management_session, param=ssm_param)
+ for ssm_param in shared_account_ssm_params
+ ]
diff --git a/sources/aft-lambda-layer/aft_common/sqs.py b/sources/aft-lambda-layer/aft_common/sqs.py
new file mode 100644
index 00000000..53204c0e
--- /dev/null
+++ b/sources/aft-lambda-layer/aft_common/sqs.py
@@ -0,0 +1,81 @@
+# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+# SPDX-License-Identifier: Apache-2.0
+#
+import json
+import logging
+import uuid
+from typing import TYPE_CHECKING, Any, Dict, Optional
+
+import aft_common.constants
+import aft_common.ssm
+from aft_common import aft_utils as utils
+from boto3.session import Session
+
+if TYPE_CHECKING:
+ from mypy_boto3_sqs import SQSClient
+ from mypy_boto3_sqs.type_defs import MessageTypeDef, SendMessageResultTypeDef
+else:
+ SQSClient = object
+ MessageTypeDef = object
+ SendMessageResultTypeDef = object
+
+logger = logging.getLogger("aft")
+
+
+def build_sqs_url(session: Session, queue_name: str) -> str:
+ account_info = utils.get_session_info(session)
+ return f'https://sqs.{account_info["region"]}.amazonaws.com/{account_info["account"]}/{queue_name}'
+
+
+def receive_sqs_message(session: Session, sqs_queue: str) -> Optional[MessageTypeDef]:
+ client: SQSClient = session.client("sqs")
+ sqs_url = build_sqs_url(session, sqs_queue)
+ logger.info(f"Fetching SQS Messages from {sqs_url}")
+
+ response = client.receive_message(
+ QueueUrl=sqs_url,
+ MaxNumberOfMessages=1,
+ ReceiveRequestAttemptId=str(uuid.uuid1()),
+ )
+ if "Messages" in response.keys():
+ logger.info("There are messages pending processing")
+ message = response["Messages"][0]
+ logger.info("Message retrieved")
+ logger.info(utils.sanitize_input_for_logging(message))
+ return message
+ else:
+ logger.info("There are no messages pending processing")
+ return None
+
+
+def delete_sqs_message(session: Session, message: MessageTypeDef) -> None:
+ client: SQSClient = session.client("sqs")
+ sqs_queue = aft_common.ssm.get_ssm_parameter_value(
+ session, aft_common.constants.SSM_PARAM_ACCOUNT_REQUEST_QUEUE
+ )
+ receipt_handle = message["ReceiptHandle"]
+ logger.info("Deleting SQS message with handle " + receipt_handle)
+ client.delete_message(
+ QueueUrl=build_sqs_url(session, sqs_queue), ReceiptHandle=receipt_handle
+ )
+
+
+def send_sqs_message(
+ session: Session, sqs_url: str, message: Dict[str, Any]
+) -> SendMessageResultTypeDef:
+ sqs: SQSClient = session.client("sqs")
+ logger.info("Sending SQS message to " + sqs_url)
+ logger.info(message)
+
+ unique_id = str(uuid.uuid1())
+
+ response = sqs.send_message(
+ QueueUrl=sqs_url,
+ MessageBody=json.dumps(message),
+ MessageDeduplicationId=unique_id,
+ MessageGroupId=unique_id,
+ )
+ sanitized_response = utils.sanitize_input_for_logging(response)
+ logger.info(sanitized_response)
+
+ return response
diff --git a/sources/aft-lambda-layer/aft_common/ssm.py b/sources/aft-lambda-layer/aft_common/ssm.py
new file mode 100644
index 00000000..87c1c06a
--- /dev/null
+++ b/sources/aft-lambda-layer/aft_common/ssm.py
@@ -0,0 +1,58 @@
+# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+# SPDX-License-Identifier: Apache-2.0
+#
+import logging
+from typing import Dict, List, Sequence
+
+from aft_common.aft_utils import (
+ get_high_retry_botoconfig,
+ resubmit_request_on_boto_throttle,
+ yield_batches_from_list,
+)
+from aft_common.constants import SSM_PARAMETER_PATH
+from boto3.session import Session
+
+logger = logging.getLogger("aft")
+
+
+@resubmit_request_on_boto_throttle
+def put_ssm_parameters(session: Session, parameters: Dict[str, str]) -> None:
+ client = session.client("ssm", config=get_high_retry_botoconfig())
+
+ for key, value in parameters.items():
+ response = client.put_parameter(
+ Name=SSM_PARAMETER_PATH + key, Value=value, Type="String", Overwrite=True
+ )
+
+
+@resubmit_request_on_boto_throttle
+def get_ssm_parameters_names_by_path(session: Session, path: str) -> List[str]:
+ client = session.client("ssm", config=get_high_retry_botoconfig())
+ paginator = client.get_paginator("get_parameters_by_path")
+ pages = paginator.paginate(Path=path, Recursive=True)
+
+ parameter_names = []
+ for page in pages:
+ parameter_names.extend([param["Name"] for param in page["Parameters"]])
+
+ return parameter_names
+
+
+@resubmit_request_on_boto_throttle
+def delete_ssm_parameters(session: Session, parameters: Sequence[str]) -> None:
+ batches = yield_batches_from_list(
+ parameters, batch_size=10
+ ) # Max batch size for API
+ client = session.client("ssm", config=get_high_retry_botoconfig())
+ for batched_names in batches:
+ response = client.delete_parameters(Names=batched_names)
+
+
+def get_ssm_parameter_value(session: Session, param: str, decrypt: bool = False) -> str:
+ client = session.client("ssm")
+ logger.info("Getting SSM Parameter " + param)
+
+ response = client.get_parameter(Name=param, WithDecryption=decrypt)
+
+ param_value: str = response["Parameter"]["Value"]
+ return param_value
diff --git a/sources/aft-lambda-layer/aft_common/templates/aftmanagement.tpl b/sources/aft-lambda-layer/aft_common/templates/aftmanagement.tpl
deleted file mode 100644
index 14789505..00000000
--- a/sources/aft-lambda-layer/aft_common/templates/aftmanagement.tpl
+++ /dev/null
@@ -1,12 +0,0 @@
-{
- "Version": "2012-10-17",
- "Statement": [
- {
- "Effect": "Allow",
- "Principal": {
- "AWS": "arn:aws:iam::{AftManagementAccount}:assumed-role/AWSAFTAdmin/AWSAFT-Session"
- },
- "Action": "sts:AssumeRole"
- }
- ]
-}
diff --git a/sources/aft-lambda-layer/pyproject.toml b/sources/aft-lambda-layer/pyproject.toml
new file mode 100644
index 00000000..a70da7cb
--- /dev/null
+++ b/sources/aft-lambda-layer/pyproject.toml
@@ -0,0 +1,47 @@
+# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+# SPDX-License-Identifier: Apache-2.0
+
+[build-system]
+requires = [
+ "setuptools >= 70.0.0",
+ "wheel",
+]
+
+
+# url="https://github.com/aws-ia/terraform-aws-control_tower_account_factory"
+
+[project]
+name = "aft-common"
+authors = [
+ {name = "AWS"}
+]
+version = "0.2.0"
+requires-python = ">=3.11"
+description="Common framework for AWS Control Tower Account Factory for Terraform"
+classifiers=[
+ "Programming Language :: Python :: 3.11",
+ "License :: OSI Approved :: MIT License",
+ "Operating System :: OS Independent",
+]
+dependencies = [
+ "boto3 == 1.28.17",
+ "botocore == 1.31.17",
+ "requests == 2.32.2",
+ "jsonschema == 4.3.2",
+ "urllib3 >= 1.26.19"
+]
+
+
+[project.optional-dependencies]
+dev = [
+ "pytest == 7.1.2",
+ "pytest-cov == 4.1.0",
+ "pytest-subtests == 0.8.0",
+ "black == 24.3.0",
+ "isort == 5.10.1",
+ "pre-commit == 2.19.0",
+ "mypy == 0.961",
+ "boto3-stubs[support, stepfunctions, ec2, organizations, servicecatalog, sqs, lambda, sns, sts, cloudtrail, ssm, iam, dynamodb, inspector2] == 1.27.1",
+ "aws_lambda_powertools == 1.25.9",
+ "types-requests == 2.27.5",
+]
diff --git a/sources/aft-lambda-layer/readme.md b/sources/aft-lambda-layer/readme.md
deleted file mode 100644
index f670f952..00000000
--- a/sources/aft-lambda-layer/readme.md
+++ /dev/null
@@ -1,67 +0,0 @@
-# AWS AFT CORE - aft_common python module & Lambda Layer
-
-## Description
-
-Common framework consumed by python modules across AFT.
-
-## Location
-
-The aft_common source codes is located here:
-
-```modules/lambda_layer/aft_common_layer/python/lib/python3.8/site-packages/aft_common```
-
-## Contents
-
-* AWS Logger
-* utils
-
-## Logger
-
-AWS-provided logger which replaces the standard python logging library.
-
-Logs objects as json which is friendlier to read in CloudWatch logs.
-
-## Utils
-
-Utils contains the shared functions for common tasks across AFT.
-
-Additionally, configuration elements stored in SSM are accessible through get_ssm_parameter and a number of constants at the top of the module.
-
-Function list:
-
-* build_role_arn
-* build_sfn_arn
-* build_sqs_url
-* ct_provisioning_artifact_is_active
-* delete_sqs_message
-* get_account
-* get_account_by_email
-* get_account_email_from_id
-* get_account_info
-* get_assume_role_credentials
-* get_boto_session
-* get_ct_execution_session
-* get_ct_management_session
-* get_ct_product_id
-* get_ct_provisioning_artifact_id
-* get_org_account_emails
-* get_org_account_names
-* get_org_accounts
-* get_org_ou_names
-* get_ssm_parameter_value
-* invoke_lambda
-* invoke_step_function
-* is_controltower_event
-* is_aft_supported_controltower_event
-* list_accounts
-* product_provisioning_in_progress
-* put_ddb_item
-* receive_sqs_message
-* send_sns_message
-* send_sqs_message
-* tag_org_resource
-* unmarshal_ddb_item
-
-For more information on the implementation of each of these functions, see the source here:
-
-```modules/lambda_layer/aft_common_layer/python/lib/python3.8/site-packages/aft_common/utils.py```
diff --git a/sources/aft-lambda-layer/setup.py b/sources/aft-lambda-layer/setup.py
index 90ba2284..05879af5 100644
--- a/sources/aft-lambda-layer/setup.py
+++ b/sources/aft-lambda-layer/setup.py
@@ -3,82 +3,7 @@
#
import setuptools
-with open("readme.md", "r", encoding="utf-8") as fh:
- long_description = fh.read()
-
setuptools.setup(
- name="aft-common",
- version="0.1.0",
- author="AWS",
- description="Common framework for AWS Control Tower Account Factory for Terraform",
- long_description=long_description,
- long_description_content_type="text/markdown",
- url="https://github.com/aws-ia/terraform-aws-control_tower_account_factory",
- classifiers=[
- "Programming Language :: Python :: 3.8",
- "License :: OSI Approved :: MIT License",
- "Operating System :: OS Independent",
- ],
packages=setuptools.find_packages(),
- package_data={"aft_common": ["schemas/*.json", "templates/*"]},
- python_requires=">=3.8",
- install_requires=[
- "appdirs==1.4.4",
- "attrs==21.2.0",
- "autopep8==1.5.7",
- "boto3==1.17.92",
- "botocore==1.20.92",
- "cffi==1.14.6",
- "cryptography == 3.4.7",
- "decorator == 5.0.9",
- "distlib == 0.3.2",
- "execnet == 1.9.0",
- "filelock == 3.0.12",
- "git-remote-codecommit == 1.15.1",
- "iniconfig == 1.1.1",
- "jmespath == 0.10.0",
- "jsonpath == 0.82",
- "jsonpath-ng == 1.5.3",
- "lark-parser == 0.10.1",
- "packaging == 20.9",
- "pluggy == 0.13.1",
- "ply == 3.11",
- "py == 1.10.0",
- "pycparser == 2.20",
- "Pygments == 2.10.0",
- "pyparsing == 2.4.7",
- "pyrsistent == 0.17.3",
- "python-dateutil == 2.8.1",
- "python-hcl2 == 3.0.1",
- "python-terraform == 0.10.1",
- "pytz == 2021.1",
- "PyYAML == 5.4.1",
- "regex == 2021.8.3",
- "s3transfer == 0.4.2",
- "six == 1.16.0",
- "toml == 0.10.2",
- "tzlocal == 2.1",
- "urllib3 == 1.26.5",
- "utils == 1.0.1",
- "virtualenv == 20.4.7",
- "wcwidth == 0.2.5",
- "whaaaaat == 0.5.2",
- "Jinja2 == 3.0.3",
- "jsonschema == 4.3.2",
- ],
- extras_require={
- "dev": [
- "pytest == 6.2.4",
- "ipython == 7.30.1",
- "ipdb == 0.13.9",
- "black == 21.12b0",
- "checkov == 2.0.694",
- "tftest == 1.6.1",
- "pre-commit == 2.16.0",
- "pycodestyle == 2.8.0",
- "mypy == 0.930",
- "boto3-stubs[support, stepfunctions, ec2, organizations, servicecatalog, sqs, lambda, sns, sts, cloudtrail, ssm, iam] == 1.20.26",
- "mypy_boto3_builder == 5.5.0",
- ]
- },
+ package_data={"aft_common": ["schemas/*.json"]},
)
diff --git a/sources/scripts/creds.sh b/sources/scripts/creds.sh
index 2679d029..519d26f8 100755
--- a/sources/scripts/creds.sh
+++ b/sources/scripts/creds.sh
@@ -3,146 +3,74 @@
# SPDX-License-Identifier: Apache-2.0
#
-#Default flags to false
-USER_DEFINED_ACCOUNT=false
-AFT=false
-CT=false
-AUDIT=false
-LOG_ARCHIVE=false
-#Ensure at option was specified
-if [ $# -eq 0 ]; then
- echo "";
- echo "No options have been provided.";
- exit;
-fi
+function write_to_credentials {
+ local profile=${1}
+ local credentials=${2}
+ local aws_access_key_id
+ local aws_secret_access_key
-#Parse options and set flags
-while [ ! $# -eq 0 ]
-do
- case "$1" in
- --account-id)
- USER_DEFINED_ACCOUNT=true
- USER_DEFINED_ACCOUNT_ID=$2
- echo $ACCOUNT_ID
- ;;
- --aft-mgmt)
- AFT=true
- ;;
- --ct-mgmt)
- CT=true
- ;;
- --ct-audit)
- AUDIT=true
- ;;
- --ct-log-archive)
- LOG_ARCHIVE=true
- ;;
- --help)
- echo ""
- echo "creds.sh creates an AWS CLI credential file leveraging AWSAFTExecutionRole for specified accounts"
- echo ""
- echo "** creds.sh should be run from the AFT Management account with a role that can assume aws-aft-AdministratorRole **"
- echo ""
- echo "usage: creds.sh [--account account_id] [--ct-mgmt] [--aft-mgmt]"
- echo ""
- echo "--account-id - Create a default credential profile for the given account number. Profile name: default"
- echo "--aft-mgmt - Create a credential profile for AFT Management account. Profile name: aft-management"
- echo "--ct-mgmt - Create a credential profile for Control Tower Management account. Profile name: ct-management"
- echo "--audit - Create a credential profile for Control Tower Audit account. Profile name: ct-audit"
- echo "--log - Create a credential profile for Control Tower Log Archive account. Profile name: ct-log-archive"
- exit
- ;;
- esac
- shift
-done
+ aws_access_key_id="$(echo "${credentials}" | jq --raw-output ".Credentials[\"AccessKeyId\"]")"
+ aws_secret_access_key="$(echo "${credentials}" | jq --raw-output ".Credentials[\"SecretAccessKey\"]")"
+ aws_session_token="$(echo "${credentials}" | jq --raw-output ".Credentials[\"SessionToken\"]")"
-# Remove Credentials file, if exists
-mkdir -p ~/.aws
-rm -f ~/.aws/credentials
+ aws configure set aws_access_key_id "${aws_access_key_id}" --profile "${profile}"
+ aws configure set aws_secret_access_key "${aws_secret_access_key}" --profile "${profile}"
+ aws configure set aws_session_token "${aws_session_token}" --profile "${profile}"
+}
-#Lookup SSM Parameters
-AFT_MGMT_ROLE=$(aws ssm get-parameter --name /aft/resources/iam/aft-administrator-role-name | jq --raw-output ".Parameter.Value")
-AFT_EXECUTION_ROLE=$(aws ssm get-parameter --name /aft/resources/iam/aft-execution-role-name | jq --raw-output ".Parameter.Value")
-ROLE_SESSION_NAME=$(aws ssm get-parameter --name /aft/resources/iam/aft-session-name | jq --raw-output ".Parameter.Value")
-AFT_MGMT_ACCOUNT=$(aws ssm get-parameter --name /aft/account/aft-management/account-id | jq --raw-output ".Parameter.Value")
-CT_MGMT_ACCOUNT=$(aws ssm get-parameter --name /aft/account/ct-management/account-id | jq --raw-output ".Parameter.Value")
-AUDIT_ACCOUNT=$(aws ssm get-parameter --name /aft/account/audit/account-id | jq --raw-output ".Parameter.Value")
-LOG_ARCHIVE_ACCOUNT=$(aws ssm get-parameter --name /aft/account/log-archive/account-id | jq --raw-output ".Parameter.Value")
-# Assume aws-aft-AdministratorRole in AFT Management account
-if $USER_DEFINED_ACCOUNT || $AFT || $CT || $AUDIT || $LOG_ARCHIVE; then
- echo "Assuming ${AFT_MGMT_ROLE} in aft-management account:" ${AFT_MGMT_ACCOUNT}
- echo "aws sts assume-role --role-arn arn:aws:iam::${AFT_MGMT_ACCOUNT}:role/${AFT_MGMT_ROLE} --role-session-name ${ROLE_SESSION_NAME}"
- JSON=$(aws sts assume-role --role-arn arn:aws:iam::${AFT_MGMT_ACCOUNT}:role/${AFT_MGMT_ROLE} --role-session-name ${ROLE_SESSION_NAME})
- #Make newly assumed role default session
- export AWS_ACCESS_KEY_ID=$(echo ${JSON} | jq --raw-output ".Credentials[\"AccessKeyId\"]")
- export AWS_SECRET_ACCESS_KEY=$(echo ${JSON} | jq --raw-output ".Credentials[\"SecretAccessKey\"]")
- export AWS_SESSION_TOKEN=$(echo ${JSON} | jq --raw-output ".Credentials[\"SessionToken\"]")
-fi
+function main {
+ local AFT_MGMT_ROLE
+ local AFT_EXECUTION_ROLE
+ local ROLE_SESSION_NAME
+ local AFT_MGMT_ACCOUNT
+ local CT_MGMT_ACCOUNT
+ local AUDIT_ACCOUNT
+ local LOG_ARCHIVE_ACCOUNT
+ local CREDENTIALS
-if $USER_DEFINED_ACCOUNT; then
-# Assume AWSAFTExecution in User Defined account
- echo "aws sts assume-role --role-arn arn:aws:iam::${USER_DEFINED_ACCOUNT_ID}:role/${AFT_EXECUTION_ROLE} --role-session-name ${ROLE_SESSION_NAME}"
- JSON=$(aws sts assume-role --role-arn arn:aws:iam::${USER_DEFINED_ACCOUNT_ID}:role/${AFT_EXECUTION_ROLE} --role-session-name ${ROLE_SESSION_NAME})
- echo "[default]" >> ~/.aws/credentials
- echo "aws_access_key_id=$(echo ${JSON} | jq --raw-output ".Credentials[\"AccessKeyId\"]")" >> ~/.aws/credentials
- echo "aws_secret_access_key=$(echo ${JSON} | jq --raw-output ".Credentials[\"SecretAccessKey\"]")" >> ~/.aws/credentials
- echo "aws_session_token=$(echo ${JSON} | jq --raw-output ".Credentials[\"SessionToken\"]")" >> ~/.aws/credentials
-fi
+ #Lookup SSM Parameters
+ AFT_MGMT_ROLE=$(aws ssm get-parameter --name /aft/resources/iam/aft-administrator-role-name | jq --raw-output ".Parameter.Value")
+ AFT_EXECUTION_ROLE=$(aws ssm get-parameter --name /aft/resources/iam/aft-execution-role-name | jq --raw-output ".Parameter.Value")
+ ROLE_SESSION_NAME=$(aws ssm get-parameter --name /aft/resources/iam/aft-session-name | jq --raw-output ".Parameter.Value")
+ AFT_MGMT_ACCOUNT=$(aws ssm get-parameter --name /aft/account/aft-management/account-id | jq --raw-output ".Parameter.Value")
+ CT_MGMT_ACCOUNT=$(aws ssm get-parameter --name /aft/account/ct-management/account-id | jq --raw-output ".Parameter.Value")
+ AUDIT_ACCOUNT=$(aws ssm get-parameter --name /aft/account/audit/account-id | jq --raw-output ".Parameter.Value")
+ LOG_ARCHIVE_ACCOUNT=$(aws ssm get-parameter --name /aft/account/log-archive/account-id | jq --raw-output ".Parameter.Value")
-if $AFT; then
-# Assume AWSAFTExecution in AFT Management account
- echo "Assuming ${AFT_EXECUTION_ROLE} in aft-management account:" ${AFT_MGMT_ACCOUNT}
- echo "aws sts assume-role --role-arn arn:aws:iam::${AFT_MGMT_ACCOUNT}:role/${AFT_EXECUTION_ROLE} --role-session-name ${ROLE_SESSION_NAME}"
- JSON=$(aws sts assume-role --role-arn arn:aws:iam::${AFT_MGMT_ACCOUNT}:role/${AFT_EXECUTION_ROLE} --role-session-name ${ROLE_SESSION_NAME})
- # Create credentials file
- echo "[aft-management]" >> ~/.aws/credentials
- echo "aws_access_key_id=$(echo ${JSON} | jq --raw-output ".Credentials[\"AccessKeyId\"]")" >> ~/.aws/credentials
- echo "aws_secret_access_key=$(echo ${JSON} | jq --raw-output ".Credentials[\"SecretAccessKey\"]")" >> ~/.aws/credentials
- echo "aws_session_token=$(echo ${JSON} | jq --raw-output ".Credentials[\"SessionToken\"]")" >> ~/.aws/credentials
-fi
+ # Assume aws-aft-Administrator Role in AFT Management account. This is a Hub role which has permissions to assume other AFT roles
+ echo "Generating credentials for ${AFT_MGMT_ROLE} in aft-management account: ${AFT_MGMT_ACCOUNT}"
+ CREDENTIALS=$(aws sts assume-role --role-arn "arn:${AWS_PARTITION}:iam::${AFT_MGMT_ACCOUNT}:role/${AFT_MGMT_ROLE}" --role-session-name "${ROLE_SESSION_NAME}")
+ write_to_credentials "aft-management-admin" "${CREDENTIALS}"
-# Assume AWSAFTExecution in CT Management account
-if $CT; then
- echo "Assuming ${AFT_EXECUTION_ROLE} in ct-management account:" ${CT_MGMT_ACCOUNT}
- echo "aws sts assume-role --role-arn arn:aws:iam::${CT_MGMT_ACCOUNT}:role/${AFT_EXECUTION_ROLE} --role-session-name ${ROLE_SESSION_NAME}"
- JSON=$(aws sts assume-role --role-arn arn:aws:iam::${CT_MGMT_ACCOUNT}:role/${AFT_EXECUTION_ROLE} --role-session-name ${ROLE_SESSION_NAME})
- # Create credentials file
- echo "[ct-management]" >> ~/.aws/credentials
- echo "aws_access_key_id=$(echo ${JSON} | jq --raw-output ".Credentials[\"AccessKeyId\"]")" >> ~/.aws/credentials
- echo "aws_secret_access_key=$(echo ${JSON} | jq --raw-output ".Credentials[\"SecretAccessKey\"]")" >> ~/.aws/credentials
- echo "aws_session_token=$(echo ${JSON} | jq --raw-output ".Credentials[\"SessionToken\"]")" >> ~/.aws/credentials
-fi
+ # Assume AWSAFTExecution in User Defined account
+ echo "Generating credentials for ${AFT_EXECUTION_ROLE} in vended account account: ${VENDED_ACCOUNT_ID}"
+ CREDENTIALS=$(aws sts assume-role --role-arn "arn:${AWS_PARTITION}:iam::${VENDED_ACCOUNT_ID}:role/${AFT_EXECUTION_ROLE}" --role-session-name "${ROLE_SESSION_NAME}" --profile aft-management-admin)
+ write_to_credentials "aft-target" "${CREDENTIALS}"
+
+ # Assume AWSAFTExecution in AFT Management account
+ echo "Generating credentials for ${AFT_EXECUTION_ROLE} in aft-management account: ${AFT_MGMT_ACCOUNT}"
+ CREDENTIALS=$(aws sts assume-role --role-arn "arn:${AWS_PARTITION}:iam::${AFT_MGMT_ACCOUNT}:role/${AFT_EXECUTION_ROLE}" --role-session-name "${ROLE_SESSION_NAME}" --profile aft-management-admin)
+ write_to_credentials "aft-management" "${CREDENTIALS}"
+
+ # Assume AWSAFTExecution in CT Management account
+ echo "Generating credentials for ${AFT_EXECUTION_ROLE} in ct-management account: ${CT_MGMT_ACCOUNT}"
+ CREDENTIALS=$(aws sts assume-role --role-arn "arn:${AWS_PARTITION}:iam::${CT_MGMT_ACCOUNT}:role/${AFT_EXECUTION_ROLE}" --role-session-name "${ROLE_SESSION_NAME}" --profile aft-management-admin)
+ write_to_credentials "ct-management" "${CREDENTIALS}"
-# Assume AWSAFTExecution in Audit account
-if $AUDIT; then
- echo "Assuming ${AFT_EXECUTION_ROLE} in Audit account:" ${AUDIT_ACCOUNT}
- echo "aws sts assume-role --role-arn arn:aws:iam::${AUDIT_ACCOUNT}:role/${AFT_EXECUTION_ROLE} --role-session-name ${ROLE_SESSION_NAME}"
- JSON=$(aws sts assume-role --role-arn arn:aws:iam::${AUDIT_ACCOUNT}:role/${AFT_EXECUTION_ROLE} --role-session-name ${ROLE_SESSION_NAME})
+ # Assume AWSAFTExecution in Audit account
+ echo "Generating credentials for ${AFT_EXECUTION_ROLE} in Audit account: ${AUDIT_ACCOUNT}"
+ CREDENTIALS=$(aws sts assume-role --role-arn "arn:${AWS_PARTITION}:iam::${AUDIT_ACCOUNT}:role/${AFT_EXECUTION_ROLE}" --role-session-name "${ROLE_SESSION_NAME}" --profile aft-management-admin)
# Create credentials file
- echo "[ct-audit]" >> ~/.aws/credentials
- echo "aws_access_key_id=$(echo ${JSON} | jq --raw-output ".Credentials[\"AccessKeyId\"]")" >> ~/.aws/credentials
- echo "aws_secret_access_key=$(echo ${JSON} | jq --raw-output ".Credentials[\"SecretAccessKey\"]")" >> ~/.aws/credentials
- echo "aws_session_token=$(echo ${JSON} | jq --raw-output ".Credentials[\"SessionToken\"]")" >> ~/.aws/credentials
-fi
+ write_to_credentials "ct-audit" "${CREDENTIALS}"
-# Assume AWSAFTExecution in Log Archive account
-if $LOG_ARCHIVE; then
- echo "Assuming ${AFT_EXECUTION_ROLE} in Log Archive account:" ${LOG_ARCHIVE_ACCOUNT}
- echo "aws sts assume-role --role-arn arn:aws:iam::${LOG_ARCHIVE_ACCOUNT}:role/${AFT_EXECUTION_ROLE} --role-session-name ${ROLE_SESSION_NAME}"
- JSON=$(aws sts assume-role --role-arn arn:aws:iam::${LOG_ARCHIVE_ACCOUNT}:role/${AFT_EXECUTION_ROLE} --role-session-name ${ROLE_SESSION_NAME})
+ # Assume AWSAFTExecution in Log Archive account
+ echo "Generating credentials for ${AFT_EXECUTION_ROLE} in Log Archive account: ${LOG_ARCHIVE_ACCOUNT}"
+ CREDENTIALS=$(aws sts assume-role --role-arn "arn:${AWS_PARTITION}:iam::${LOG_ARCHIVE_ACCOUNT}:role/${AFT_EXECUTION_ROLE}" --role-session-name "${ROLE_SESSION_NAME}" --profile aft-management-admin)
# Create credentials file
- echo "[ct-log-archive]" >> ~/.aws/credentials
- echo "aws_access_key_id=$(echo ${JSON} | jq --raw-output ".Credentials[\"AccessKeyId\"]")" >> ~/.aws/credentials
- echo "aws_secret_access_key=$(echo ${JSON} | jq --raw-output ".Credentials[\"SecretAccessKey\"]")" >> ~/.aws/credentials
- echo "aws_session_token=$(echo ${JSON} | jq --raw-output ".Credentials[\"SessionToken\"]")" >> ~/.aws/credentials
-fi
+ write_to_credentials "ct-log-archive" "${CREDENTIALS}"
+}
-# Unset env vars if any work was performed
-if $USER_DEFINED_ACCOUNT || $AFT || $CT; then
- unset AWS_ACCESS_KEY_ID
- unset AWS_SECRET_ACCESS_KEY
- unset AWS_SESSION_TOKEN
-fi
-echo "Script execution complete"
+set -e
+main
diff --git a/sources/scripts/terraform_client.py b/sources/scripts/terraform_client.py
index 0718ceda..5fd46f73 100755
--- a/sources/scripts/terraform_client.py
+++ b/sources/scripts/terraform_client.py
@@ -4,6 +4,7 @@
#
import os
import time
+from typing import Any
import requests
@@ -37,8 +38,10 @@ def check_workspace_exists(organization_name, workspace_name, api_token):
return None
-def create_workspace(organization_name, workspace_name, api_token):
+def create_workspace(organization_name, workspace_name, api_token, project_name):
workspace_id = check_workspace_exists(organization_name, workspace_name, api_token)
+ project_id = get_project_id(organization_name, project_name, api_token)
+
if workspace_id:
return workspace_id
else:
@@ -54,12 +57,34 @@ def create_workspace(organization_name, workspace_name, api_token):
"auto-apply": True,
},
"type": "workspaces",
+ "relationships": {
+ "project": {"data": {"type": "projects", "id": project_id}}
+ },
}
}
response = __post(endpoint, headers, payload)
return response["data"]["id"]
+def get_project_id(organization_name, project_name, api_token):
+ endpoint = "{}/organizations/{}/projects".format(
+ TERRAFORM_API_ENDPOINT, organization_name
+ )
+ headers = __build_standard_headers(api_token)
+ response = __get(endpoint, headers)
+ projects = response["data"]
+
+ for project in projects:
+ if project["attributes"]["name"] == project_name:
+ return project["id"]
+
+ raise ValueError(
+ "Project '{}' not found in organization '{}'".format(
+ project_name, organization_name
+ )
+ )
+
+
def create_configuration_version(workspace_id, api_token):
endpoint = "{}/workspaces/{}/configuration-versions".format(
TERRAFORM_API_ENDPOINT, workspace_id
@@ -169,17 +194,18 @@ def create_destroy_run(workspace_id, api_token):
def delete_workspace(workspace_id, api_token):
endpoint = "{}/workspaces/{}".format(TERRAFORM_API_ENDPOINT, workspace_id)
+ sanitized_workspace_id = __sanitize_input_for_logging(workspace_id)
headers = __build_standard_headers(api_token)
response = __delete(endpoint, headers)
if response is not None:
errors = response["errors"]
if len(errors) == 0:
- print("Successfully deleted workspace {}".format(workspace_id))
+ print("Successfully deleted workspace {}".format(sanitized_workspace_id))
else:
- print("Error occured deleting workspace {}".format(workspace_id))
+ print("Error occured deleting workspace {}".format(sanitized_workspace_id))
print(str(errors))
else:
- print("Successfully deleted workspace {}".format(workspace_id))
+ print("Successfully deleted workspace {}".format(sanitized_workspace_id))
def wait_to_stabilize(entity_type, entity_id, target_states, api_token):
@@ -187,7 +213,11 @@ def wait_to_stabilize(entity_type, entity_id, target_states, api_token):
status = get_action_status(entity_type, entity_id, api_token)
if status in target_states:
break
- print("{} not yet ready. In status {}".format(entity_type, status))
+ print(
+ "{} not yet ready. In status {}".format(
+ entity_type, __sanitize_input_for_logging(status)
+ )
+ )
time.sleep(10)
return status
@@ -258,6 +288,14 @@ def __handle_errors(response):
raise ClientError(status="500", message=str(errors))
+def __sanitize_input_for_logging(input: Any) -> str:
+ """
+ Sanitize the input string by replacing newline characters, tabs with their literal string representations.
+ """
+ input_str = str(input)
+ return input_str.encode("unicode_escape").decode()
+
+
class ClientError(Exception):
def __init__(self, status, message):
self.status = status
diff --git a/sources/scripts/workspace_manager.py b/sources/scripts/workspace_manager.py
index a4afccbe..3754c5c2 100755
--- a/sources/scripts/workspace_manager.py
+++ b/sources/scripts/workspace_manager.py
@@ -13,21 +13,35 @@
def setup_and_run_workspace(
- organization_name, workspace_name, assume_role_arn, role_session_name, api_token
+ organization_name,
+ workspace_name,
+ assume_role_arn,
+ role_session_name,
+ api_token,
+ project_name,
):
-
workspace_id = setup_workspace(
- organization_name, workspace_name, assume_role_arn, role_session_name, api_token
+ organization_name,
+ workspace_name,
+ assume_role_arn,
+ role_session_name,
+ api_token,
+ project_name,
)
run_id = stage_run(workspace_id, assume_role_arn, role_session_name, api_token)
return run_id
def setup_workspace(
- organization_name, workspace_name, assume_role_arn, role_session_name, api_token
+ organization_name,
+ workspace_name,
+ assume_role_arn,
+ role_session_name,
+ api_token,
+ project_name,
):
workspace_id = terraform.create_workspace(
- organization_name, workspace_name, api_token
+ organization_name, workspace_name, api_token, project_name
)
print(
"Successfully created workspace {} with ID {}".format(
@@ -47,7 +61,8 @@ def setup_workspace(
def stage_run(workspace_id, assume_role_arn, role_session_name, api_token):
cv_id, upload_url = terraform.create_configuration_version(workspace_id, api_token)
print("Successfully created a new configuration version: {}".format(cv_id))
- data = open(LOCAL_CONFIGURATION_PATH, "rb")
+ with open(LOCAL_CONFIGURATION_PATH, "rb") as file:
+ data = file.read()
terraform.upload_configuration_content(data, upload_url)
print(
"Successfully uploaded configuration content to upload URL: {}".format(
@@ -255,6 +270,7 @@ def __transform_workspace_vars(workspace_vars):
parser.add_argument("--api_token", type=str, help="Terraform API token")
parser.add_argument("--terraform_version", type=str, help="Terraform Version")
parser.add_argument("--config_file", type=str, help="Terraform Config File")
+ parser.add_argument("--project_name", type=str, help="Name of the TFE project name")
args = parser.parse_args()
@@ -278,4 +294,5 @@ def __transform_workspace_vars(workspace_vars):
args.assume_role_arn,
args.assume_role_session_name,
args.api_token,
+ args.project_name,
)
diff --git a/src/aft_lambda/aft_account_provisioning_framework/aft_account_provisioning_framework_account_metadata_ssm.py b/src/aft_lambda/aft_account_provisioning_framework/aft_account_provisioning_framework_account_metadata_ssm.py
index 26708de9..238ab358 100644
--- a/src/aft_lambda/aft_account_provisioning_framework/aft_account_provisioning_framework_account_metadata_ssm.py
+++ b/src/aft_lambda/aft_account_provisioning_framework/aft_account_provisioning_framework_account_metadata_ssm.py
@@ -3,36 +3,42 @@
#
import inspect
import json
-from typing import Any, Dict, Union
+from typing import TYPE_CHECKING, Any, Dict
-import aft_common.aft_utils as utils
-import boto3
-from aft_common.account_provisioning_framework import (
- AFT_EXEC_ROLE,
- SSM_PARAMETER_PATH,
- create_ssm_parameters,
+from aft_common import aft_utils as utils
+from aft_common import notifications
+from aft_common.account_provisioning_framework import ProvisionRoles
+from aft_common.auth import AuthClient
+from aft_common.constants import SSM_PARAMETER_PATH
+from aft_common.logger import customization_request_logger
+from aft_common.ssm import (
delete_ssm_parameters,
get_ssm_parameters_names_by_path,
+ put_ssm_parameters,
)
-logger = utils.get_logger()
+if TYPE_CHECKING:
+ from aws_lambda_powertools.utilities.typing import LambdaContext
+else:
+ LambdaContext = object
-def lambda_handler(event: Dict[str, Any], context: Union[Dict[str, Any], None]) -> None:
- try:
- account_request = event["payload"]["account_request"]
- custom_fields = json.loads(account_request.get("custom_fields", "{}"))
- target_account_id = event["payload"]["account_info"]["account"]["id"]
-
- local_session = boto3.session.Session()
+def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> None:
+ event_payload = event["payload"]
+ request_id = event_payload["customization_request_id"]
+ target_account_id = event_payload["account_info"]["account"]["id"]
+ account_request = event_payload["account_request"]
+ custom_fields = json.loads(account_request.get("custom_fields", "{}"))
- aft_session = utils.get_aft_admin_role_session(local_session)
- target_account_role_arn = utils.build_role_arn(
- aft_session, AFT_EXEC_ROLE, target_account_id
- )
+ logger = customization_request_logger(
+ aws_account_id=target_account_id, customization_request_id=request_id
+ )
+ auth = AuthClient()
+ try:
# Create the custom field parameters in the AFT home region
- target_region = aft_session.region_name
+ session = auth.get_aft_management_session()
+ target_region = session.region_name
aft_ssm_session_policy = {
"Version": "2012-10-17",
@@ -44,18 +50,23 @@ def lambda_handler(event: Dict[str, Any], context: Union[Dict[str, Any], None])
"ssm:DeleteParameters",
],
"Effect": "Allow",
- "Resource": f"arn:aws:ssm:{target_region}:{target_account_id}:parameter{SSM_PARAMETER_PATH}*",
- }
+ "Resource": f"arn:{utils.get_aws_partition(session)}:ssm:{target_region}:{target_account_id}:parameter{SSM_PARAMETER_PATH}*",
+ },
+ {
+ "Action": [
+ "sts:AssumeRole",
+ ],
+ "Effect": "Allow",
+ "Resource": f"arn:{utils.get_aws_partition(session)}:iam::{target_account_id}:role/${ProvisionRoles.EXECUTION_ROLE_NAME}",
+ },
],
}
- target_account_creds = utils.get_assume_role_credentials(
- session=aft_session,
- role_arn=target_account_role_arn,
- session_name="aft_ssm_metadata",
+ target_account_session = auth.get_target_account_session(
+ account_id=target_account_id,
session_policy=json.dumps(aft_ssm_session_policy),
+ role_name=ProvisionRoles.SERVICE_ROLE_NAME,
)
- target_account_session = utils.get_boto_session(target_account_creds)
params = get_ssm_parameters_names_by_path(
target_account_session, SSM_PARAMETER_PATH
@@ -66,18 +77,24 @@ def lambda_handler(event: Dict[str, Any], context: Union[Dict[str, Any], None])
# Delete SSM parameters which do not exist in new custom fields
params_to_remove = list(existing_keys.difference(new_keys))
- logger.info(message=f"Deleting SSM params: {params_to_remove}")
+ logger.info(f"Deleting SSM params: {params_to_remove}")
delete_ssm_parameters(target_account_session, params_to_remove)
# Update / Add SSM parameters for custom fields provided
- logger.info(message=f"Adding/Updating SSM params: {custom_fields}")
- create_ssm_parameters(target_account_session, custom_fields)
+ logger.info(f"Adding/Updating SSM params: {custom_fields}")
+ put_ssm_parameters(target_account_session, custom_fields)
- except Exception as e:
+ except Exception as error:
+ notifications.send_lambda_failure_sns_message(
+ session=auth.get_aft_management_session(),
+ message=str(error),
+ context=context,
+ subject="AFT account provisioning failed",
+ )
message = {
"FILE": __file__.split("/")[-1],
"METHOD": inspect.stack()[0][3],
- "EXCEPTION": str(e),
+ "EXCEPTION": str(error),
}
logger.exception(message)
raise
diff --git a/src/aft_lambda/aft_account_provisioning_framework/aft_account_provisioning_framework_create_role.py b/src/aft_lambda/aft_account_provisioning_framework/aft_account_provisioning_framework_create_role.py
index f63b525a..66da124a 100644
--- a/src/aft_lambda/aft_account_provisioning_framework/aft_account_provisioning_framework_create_role.py
+++ b/src/aft_lambda/aft_account_provisioning_framework/aft_account_provisioning_framework_create_role.py
@@ -2,42 +2,50 @@
# SPDX-License-Identifier: Apache-2.0
#
import inspect
-from typing import Any, Dict, Union
+from typing import TYPE_CHECKING, Any, Dict
-import aft_common.aft_utils as utils
-import boto3
-from aft_common.account_provisioning_framework import create_aft_execution_role
+from aft_common import notifications
+from aft_common.account_provisioning_framework import ProvisionRoles
+from aft_common.auth import AuthClient
+from aft_common.logger import customization_request_logger
-logger = utils.get_logger()
+if TYPE_CHECKING:
+ from aws_lambda_powertools.utilities.typing import LambdaContext
+else:
+ LambdaContext = object
-def lambda_handler(event: Dict[str, Any], context: Union[Dict[str, Any], None]) -> str:
- try:
- logger.info("AFT Account Provisioning Framework Create Role Handler Start")
-
- payload = event["payload"]
- action = event["action"]
+def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> None:
+ action = event["action"]
+ event_payload = event["payload"]
+ request_id = event_payload["customization_request_id"]
+ target_account_id = event_payload["account_info"]["account"]["id"]
- session = boto3.session.Session()
- ct_management_session = utils.get_ct_management_session(session)
+ logger = customization_request_logger(
+ aws_account_id=target_account_id, customization_request_id=request_id
+ )
- if action == "create_role":
- account_info = payload["account_info"]["account"]
- aft_role = create_aft_execution_role(
- account_info, session, ct_management_session
- )
- return_value: str = aft_role
- return return_value
- else:
+ auth = AuthClient()
+ try:
+ if action != "create_role":
raise Exception(
- "Incorrect Command Passed to Lambda Function. Input: {action}. Expected: 'create_role'"
+ f"Incorrect Command Passed to Lambda Function. Input: {action}. Expected: 'create_role'"
)
-
- except Exception as e:
+ logger.info("Deploying / managing AFT Roles in target account")
+ provisioning = ProvisionRoles(auth=auth, account_id=target_account_id)
+ provisioning.deploy_aws_aft_roles()
+
+ except Exception as error:
+ notifications.send_lambda_failure_sns_message(
+ session=auth.get_aft_management_session(),
+ message=str(error),
+ context=context,
+ subject="AFT account provisioning failed",
+ )
message = {
"FILE": __file__.split("/")[-1],
"METHOD": inspect.stack()[0][3],
- "EXCEPTION": str(e),
+ "EXCEPTION": str(error),
}
logger.exception(message)
raise
diff --git a/src/aft_lambda/aft_account_provisioning_framework/aft_account_provisioning_framework_get_account_info.py b/src/aft_lambda/aft_account_provisioning_framework/aft_account_provisioning_framework_get_account_info.py
deleted file mode 100644
index b31e0236..00000000
--- a/src/aft_lambda/aft_account_provisioning_framework/aft_account_provisioning_framework_get_account_info.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright Amazon.com, Inc. or its affiliates. All rights reserved.
-# SPDX-License-Identifier: Apache-2.0
-#
-import inspect
-from typing import Any, Dict, Union
-
-import aft_common.aft_utils as utils
-from aft_common.account_provisioning_framework import get_account_info
-from aft_common.types import AftAccountInfo
-from boto3.session import Session
-
-logger = utils.get_logger()
-
-
-def lambda_handler(
- event: Dict[str, Any], context: Union[Dict[str, Any], None]
-) -> AftAccountInfo:
- try:
- logger.info("AFT Account Provisioning Framework Get Account Info Handler Start")
-
- payload = event["payload"]
- action = event["action"]
-
- session = Session()
- ct_management_session = utils.get_ct_management_session(session)
- if action == "get_account_info":
- return get_account_info(payload, session, ct_management_session)
- else:
- raise Exception(
- "Incorrect Command Passed to Lambda Function. Input: {action}. Expected: 'get_account_info'"
- )
-
- except Exception as e:
- message = {
- "FILE": __file__.split("/")[-1],
- "METHOD": inspect.stack()[0][3],
- "EXCEPTION": str(e),
- }
- logger.exception(message)
- raise
diff --git a/src/aft_lambda/aft_account_provisioning_framework/aft_account_provisioning_framework_persist_metadata.py b/src/aft_lambda/aft_account_provisioning_framework/aft_account_provisioning_framework_persist_metadata.py
index 342b47a7..630683b8 100644
--- a/src/aft_lambda/aft_account_provisioning_framework/aft_account_provisioning_framework_persist_metadata.py
+++ b/src/aft_lambda/aft_account_provisioning_framework/aft_account_provisioning_framework_persist_metadata.py
@@ -2,21 +2,35 @@
# SPDX-License-Identifier: Apache-2.0
#
import inspect
-from typing import Any, Dict, Union
+from typing import TYPE_CHECKING, Any, Dict
-import aft_common.aft_utils as utils
+from aft_common import notifications
from aft_common.account_provisioning_framework import persist_metadata
+from aft_common.logger import customization_request_logger
from boto3.session import Session
-logger = utils.get_logger()
+if TYPE_CHECKING:
+ from aws_lambda_powertools.utilities.typing import LambdaContext
+ from mypy_boto3_dynamodb.type_defs import PutItemOutputTableTypeDef
+else:
+ PutItemOutputTableTypeDef = object
+ LambdaContext = object
def lambda_handler(
- event: Dict[str, Any], context: Union[Dict[str, Any], None]
-) -> Dict[str, Any]:
- try:
- logger.info("AFT Account Provisioning Framework Handler Start")
+ event: Dict[str, Any], context: LambdaContext
+) -> PutItemOutputTableTypeDef:
+ action = event["action"]
+ event_payload = event["payload"]
+ request_id = event_payload["customization_request_id"]
+ account_info = event_payload["account_info"]["account"]
+ target_account_id = event_payload["account_info"]["account"]["id"]
+ logger = customization_request_logger(
+ aws_account_id=target_account_id, customization_request_id=request_id
+ )
+ aft_management_session = Session()
+ try:
rollback = None
try:
@@ -25,25 +39,28 @@ def lambda_handler(
except KeyError:
pass
- payload = event["payload"]
- action = event["action"]
-
- session = Session()
-
if action == "persist_metadata":
- account_info = payload["account_info"]["account"]
- update_metadata = persist_metadata(payload, account_info, session)
+ logger.info(f"Managing AFT metadata table entry for target account")
+ update_metadata = persist_metadata(
+ event_payload, account_info, aft_management_session
+ )
return update_metadata
else:
raise Exception(
- "Incorrect Command Passed to Lambda Function. Input: {action}. Expected: 'persist_metadata'"
+ f"Incorrect Command Passed to Lambda Function. Input action: {action}. Expected: 'persist_metadata'"
)
- except Exception as e:
+ except Exception as error:
+ notifications.send_lambda_failure_sns_message(
+ session=aft_management_session,
+ message=str(error),
+ context=context,
+ subject="AFT account provisioning failed",
+ )
message = {
"FILE": __file__.split("/")[-1],
"METHOD": inspect.stack()[0][3],
- "EXCEPTION": str(e),
+ "EXCEPTION": str(error),
}
logger.exception(message)
raise
diff --git a/src/aft_lambda/aft_account_provisioning_framework/aft_account_provisioning_framework_tag_account.py b/src/aft_lambda/aft_account_provisioning_framework/aft_account_provisioning_framework_tag_account.py
index 0cfbf0a5..a030ad0e 100644
--- a/src/aft_lambda/aft_account_provisioning_framework/aft_account_provisioning_framework_tag_account.py
+++ b/src/aft_lambda/aft_account_provisioning_framework/aft_account_provisioning_framework_tag_account.py
@@ -2,19 +2,35 @@
# SPDX-License-Identifier: Apache-2.0
#
import inspect
-from typing import Any, Dict, Union
+from typing import TYPE_CHECKING, Any, Dict
-import aft_common.aft_utils as utils
-from aft_common.account_provisioning_framework import tag_account
+from aft_common import notifications
+from aft_common.account_provisioning_framework import ProvisionRoles, tag_account
+from aft_common.auth import AuthClient
+from aft_common.logger import customization_request_logger
from boto3.session import Session
-logger = utils.get_logger()
+if TYPE_CHECKING:
+ from aws_lambda_powertools.utilities.typing import LambdaContext
+else:
+ LambdaContext = object
-def lambda_handler(event: Dict[str, Any], context: Union[Dict[str, Any], None]) -> None:
- try:
- logger.info("AFT Account Provisioning Framework Handler Start")
+def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> None:
+ action = event["action"]
+ event_payload = event["payload"]
+ request_id = event_payload["customization_request_id"]
+ account_info = event_payload["account_info"]["account"]
+ target_account_id = event_payload["account_info"]["account"]["id"]
+
+ logger = customization_request_logger(
+ aws_account_id=target_account_id, customization_request_id=request_id
+ )
+
+ aft_management_session = Session()
+ auth = AuthClient()
+ try:
rollback = False
try:
if event["rollback"]:
@@ -22,25 +38,29 @@ def lambda_handler(event: Dict[str, Any], context: Union[Dict[str, Any], None])
except KeyError:
pass
- payload = event["payload"]
- action = event["action"]
-
- session = Session()
- ct_management_session = utils.get_ct_management_session(session)
+ ct_management_session = auth.get_ct_management_session(
+ role_name=ProvisionRoles.SERVICE_ROLE_NAME
+ )
if action == "tag_account":
- account_info = payload["account_info"]["account"]
- tag_account(payload, account_info, ct_management_session, rollback)
+ logger.info("Tag account Organization resource")
+ tag_account(event_payload, account_info, ct_management_session, rollback)
else:
raise Exception(
- "Incorrect Command Passed to Lambda Function. Input: {action}. Expected: 'tag_account'"
+ f"Incorrect Command Passed to Lambda Function. Input action: {action}. Expected: 'tag_account'"
)
- except Exception as e:
+ except Exception as error:
+ notifications.send_lambda_failure_sns_message(
+ session=aft_management_session,
+ message=str(error),
+ context=context,
+ subject="AFT account provisioning failed",
+ )
message = {
"FILE": __file__.split("/")[-1],
"METHOD": inspect.stack()[0][3],
- "EXCEPTION": str(e),
+ "EXCEPTION": str(error),
}
logger.exception(message)
raise
diff --git a/src/aft_lambda/aft_account_provisioning_framework/aft_account_provisioning_framework_validate_request.py b/src/aft_lambda/aft_account_provisioning_framework/aft_account_provisioning_framework_validate_request.py
deleted file mode 100644
index 958df9f4..00000000
--- a/src/aft_lambda/aft_account_provisioning_framework/aft_account_provisioning_framework_validate_request.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright Amazon.com, Inc. or its affiliates. All rights reserved.
-# SPDX-License-Identifier: Apache-2.0
-#
-import inspect
-from typing import Any, Dict, Union
-
-import aft_common.aft_utils as utils
-from aft_common.account_provisioning_framework import validate_request
-
-logger = utils.get_logger()
-
-
-def lambda_handler(event: Dict[str, Any], context: Union[Dict[str, Any], None]) -> bool:
- try:
- logger.info("AFT Account Provisioning Framework Handler Start")
-
- payload = event["payload"]
- action = event["action"]
-
- if action == "validate":
- request_validated = validate_request(payload)
- return request_validated
- else:
- raise Exception(
- "Incorrect Command Passed to Lambda Function. Input: {action}. Expected: 'validate'"
- )
-
- except Exception as e:
- message = {
- "FILE": __file__.split("/")[-1],
- "METHOD": inspect.stack()[0][3],
- "EXCEPTION": str(e),
- }
- logger.exception(message)
- raise
diff --git a/src/aft_lambda/aft_account_request_framework/aft_account_request_action_trigger.py b/src/aft_lambda/aft_account_request_framework/aft_account_request_action_trigger.py
index c67b43b4..361731a4 100644
--- a/src/aft_lambda/aft_account_request_framework/aft_account_request_action_trigger.py
+++ b/src/aft_lambda/aft_account_request_framework/aft_account_request_action_trigger.py
@@ -2,75 +2,43 @@
# SPDX-License-Identifier: Apache-2.0
#
import inspect
-import json
-from typing import Any, Dict, Union
+import logging
+from typing import TYPE_CHECKING, Any, Dict
-import aft_common.aft_utils as utils
-from aft_common.account_request_framework import (
- build_aft_account_provisioning_framework_event,
- control_tower_param_changed,
- delete_account_request,
- insert_msg_into_acc_req_queue,
- provisioned_product_exists,
-)
-from boto3.session import Session
+from aft_common import notifications
+from aft_common.account_request_record_handler import AccountRequestRecordHandler
+from aft_common.aft_utils import sanitize_input_for_logging
+from aft_common.auth import AuthClient
+from aft_common.logger import configure_aft_logger
-logger = utils.get_logger()
+if TYPE_CHECKING:
+ from aws_lambda_powertools.utilities.typing import LambdaContext
+else:
+ LambdaContext = object
-def lambda_handler(event: Dict[str, Any], context: Union[Dict[str, Any], None]) -> None:
+configure_aft_logger()
+logger = logging.getLogger("aft")
- try:
- logger.info("Lambda_handler Event")
- logger.info(event)
- session = Session()
-
- # validate event
- if "Records" not in event:
- return None
- event_record = event["Records"][0]
- if "eventSource" not in event_record:
- return None
- if event_record["eventSource"] != "aws:dynamodb":
- return None
-
- logger.info("DynamoDB Event Record Received")
- if delete_account_request(event_record):
- # Terraform handles removing the request record from DynamoDB
- # AWS does not support automated deletion of accounts
- logger.info("Delete account request received")
- return None
- new_account = not provisioned_product_exists(event_record)
- control_tower_updates = control_tower_param_changed(event_record)
-
- if new_account:
- logger.info("New account request received")
- insert_msg_into_acc_req_queue(
- event_record=event_record, new_account=True, session=session
- )
- elif not new_account and control_tower_updates:
- logger.info("Modify account request received")
- logger.info("Control Tower Parameter Update Request Received")
- insert_msg_into_acc_req_queue(
- event_record=event_record, new_account=False, session=session
- )
- elif not new_account and not control_tower_updates:
- logger.info("NON-Control Tower Parameter Update Request Received")
- payload = build_aft_account_provisioning_framework_event(event_record)
- lambda_name = utils.get_ssm_parameter_value(
- session,
- utils.SSM_PARAM_AFT_ACCOUNT_PROVISIONING_FRAMEWORK_LAMBDA,
- )
- utils.invoke_lambda(session, lambda_name, json.dumps(payload).encode())
- else:
- raise Exception("Unsupported account request")
-
- except Exception as e:
+def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> None:
+ auth = AuthClient()
+ try:
+ record_handler = AccountRequestRecordHandler(auth=auth, event=event)
+ logger.info(sanitize_input_for_logging(record_handler.record))
+ record_handler.process_request()
+
+ except Exception as error:
+ notifications.send_lambda_failure_sns_message(
+ session=auth.aft_management_session,
+ message=str(error),
+ context=context,
+ subject="AFT account request failed",
+ )
message = {
"FILE": __file__.split("/")[-1],
"METHOD": inspect.stack()[0][3],
- "EXCEPTION": str(e),
+ "EXCEPTION": str(error),
}
logger.exception(message)
raise
diff --git a/src/aft_lambda/aft_account_request_framework/aft_account_request_audit_trigger.py b/src/aft_lambda/aft_account_request_framework/aft_account_request_audit_trigger.py
index 97da27d7..ff534d35 100644
--- a/src/aft_lambda/aft_account_request_framework/aft_account_request_audit_trigger.py
+++ b/src/aft_lambda/aft_account_request_framework/aft_account_request_audit_trigger.py
@@ -2,23 +2,29 @@
# SPDX-License-Identifier: Apache-2.0
#
import inspect
+import logging
import sys
-from typing import Any, Dict, Union
+from typing import TYPE_CHECKING, Any, Dict
-import aft_common.aft_utils as utils
+from aft_common import constants as utils
+from aft_common import notifications, ssm
from aft_common.account_request_framework import put_audit_record
+from aft_common.aft_utils import sanitize_input_for_logging
+from aft_common.logger import configure_aft_logger
from boto3.session import Session
-logger = utils.get_logger()
+if TYPE_CHECKING:
+ from aws_lambda_powertools.utilities.typing import LambdaContext
+else:
+ LambdaContext = object
+configure_aft_logger()
+logger = logging.getLogger("aft")
-def lambda_handler(event: Dict[str, Any], context: Union[Dict[str, Any], None]) -> None:
+def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> None:
+ aft_management_session = Session()
try:
- logger.info("Lambda_handler Event")
- logger.info(event)
- session = Session()
-
# validate event
if "Records" in event:
response = None
@@ -26,8 +32,8 @@ def lambda_handler(event: Dict[str, Any], context: Union[Dict[str, Any], None])
if "eventSource" in event_record:
if event_record["eventSource"] == "aws:dynamodb":
logger.info("DynamoDB Event Record Received")
- table_name = utils.get_ssm_parameter_value(
- session, utils.SSM_PARAM_AFT_DDB_AUDIT_TABLE
+ table_name = ssm.get_ssm_parameter_value(
+ aft_management_session, utils.SSM_PARAM_AFT_DDB_AUDIT_TABLE
)
event_name = event_record["eventName"]
@@ -41,21 +47,31 @@ def lambda_handler(event: Dict[str, Any], context: Union[Dict[str, Any], None])
if event_name in supported_events:
logger.info("Event Name: " + event_name)
response = put_audit_record(
- session, table_name, image_to_record, event_name
+ aft_management_session,
+ table_name,
+ image_to_record,
+ event_name,
)
else:
- logger.info(f"Event Name: {event_name} is unsupported.")
+ logger.info(
+ f"Event Name: {sanitize_input_for_logging(event_name)} is unsupported."
+ )
else:
- logger.info("Non DynamoDB Event Received")
- sys.exit(1)
+ raise Exception("Non DynamoDB Event Received")
else:
logger.info("Unexpected Event Received")
- except Exception as e:
+ except Exception as error:
+ notifications.send_lambda_failure_sns_message(
+ session=aft_management_session,
+ message=str(error),
+ context=context,
+ subject="AFT account request failed",
+ )
message = {
"FILE": __file__.split("/")[-1],
"METHOD": inspect.stack()[0][3],
- "EXCEPTION": str(e),
+ "EXCEPTION": str(error),
}
logger.exception(message)
raise
diff --git a/src/aft_lambda/aft_account_request_framework/aft_account_request_processor.py b/src/aft_lambda/aft_account_request_framework/aft_account_request_processor.py
index f251abf8..7739c601 100644
--- a/src/aft_lambda/aft_account_request_framework/aft_account_request_processor.py
+++ b/src/aft_lambda/aft_account_request_framework/aft_account_request_processor.py
@@ -3,43 +3,68 @@
#
import inspect
import json
-from typing import Any, Dict, Union
+import logging
+import os
+import time
+from typing import TYPE_CHECKING, Any, Dict
-from aft_common import aft_utils as utils
+import aft_common.ssm
+from aft_common import constants as utils
+from aft_common import notifications, sqs
+from aft_common.account_provisioning_framework import ProvisionRoles
from aft_common.account_request_framework import (
+ AccountRequest,
create_new_account,
modify_ct_request_is_valid,
new_ct_request_is_valid,
update_existing_account,
)
+from aft_common.auth import AuthClient
+from aft_common.exceptions import NoAccountFactoryPortfolioFound
+from aft_common.logger import configure_aft_logger
+from aft_common.metrics import AFTMetrics
from boto3.session import Session
-logger = utils.get_logger()
+if TYPE_CHECKING:
+ from aws_lambda_powertools.utilities.typing import LambdaContext
+else:
+ LambdaContext = object
+configure_aft_logger()
+logger = logging.getLogger("aft")
-def lambda_handler(event: Dict[str, Any], context: Union[Dict[str, Any], None]) -> None:
+
+def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> None:
+ aft_management_session = Session()
+ auth = AuthClient()
+ threshold = int(os.environ["AFT_PROVISIONING_CONCURRENCY"])
try:
- logger.info("Lambda_handler Event")
- logger.info(event)
+ account_request = AccountRequest(auth=auth)
+ try:
+ account_request.associate_aft_service_role_with_account_factory()
+ except NoAccountFactoryPortfolioFound:
+ logger.warning(
+ f"Failed to automatically associate {ProvisionRoles.SERVICE_ROLE_NAME} to portfolio {AccountRequest.ACCOUNT_FACTORY_PORTFOLIO_NAME}. Manual intervention may be required"
+ )
- session = Session()
- ct_management_session = utils.get_ct_management_session(session)
+ ct_management_session = auth.get_ct_management_session(
+ role_name=ProvisionRoles.SERVICE_ROLE_NAME
+ )
- if utils.product_provisioning_in_progress(
- ct_management_session,
- utils.get_ct_product_id(session, ct_management_session),
- ):
- logger.info("Exiting due to provisioning in progress")
+ if account_request.provisioning_threshold_reached(threshold=threshold):
+ logger.info("Concurrent account provisioning threshold reached, exiting")
return None
else:
- sqs_message = utils.receive_sqs_message(
- session,
- utils.get_ssm_parameter_value(
- session, utils.SSM_PARAM_ACCOUNT_REQUEST_QUEUE
+ sqs_message = sqs.receive_sqs_message(
+ aft_management_session,
+ aft_common.ssm.get_ssm_parameter_value(
+ aft_management_session, utils.SSM_PARAM_ACCOUNT_REQUEST_QUEUE
),
)
if sqs_message is not None:
+ aft_metrics = AFTMetrics()
+
sqs_body = json.loads(sqs_message["Body"])
ct_request_is_valid = True
if sqs_body["operation"] == "ADD":
@@ -48,27 +73,61 @@ def lambda_handler(event: Dict[str, Any], context: Union[Dict[str, Any], None])
)
if ct_request_is_valid:
response = create_new_account(
- session, ct_management_session, sqs_body
+ session=aft_management_session,
+ ct_management_session=ct_management_session,
+ request=sqs_body,
)
+
+ action = "new-account-creation-invoked"
+ try:
+ aft_metrics.post_event(action=action, status="SUCCEEDED")
+ logger.info(
+ f"Successfully logged metrics. Action: {action}"
+ )
+ except Exception as e:
+ logger.info(
+ f"Unable to report metrics. Action: {action}; Error: {e}"
+ )
+
elif sqs_body["operation"] == "UPDATE":
ct_request_is_valid = modify_ct_request_is_valid(sqs_body)
if ct_request_is_valid:
update_existing_account(
- session, ct_management_session, sqs_body
+ session=aft_management_session,
+ ct_management_session=ct_management_session,
+ request=sqs_body,
)
+
+ action = "existing-account-update-invoked"
+ try:
+ aft_metrics.post_event(action=action, status="SUCCEEDED")
+ logger.info(
+ f"Successfully logged metrics. Action: {action}"
+ )
+ except Exception as e:
+ logger.info(
+ f"Unable to report metrics. Action: {action}; Error: {e}"
+ )
else:
logger.info("Unknown operation received in message")
+ raise RuntimeError("Unknown operation received in message")
- utils.delete_sqs_message(session, sqs_message)
+ sqs.delete_sqs_message(aft_management_session, sqs_message)
if not ct_request_is_valid:
logger.exception("CT Request is not valid")
- assert ct_request_is_valid
+ raise RuntimeError("CT Request is not valid")
- except Exception as e:
+ except Exception as error:
+ notifications.send_lambda_failure_sns_message(
+ session=aft_management_session,
+ message=str(error),
+ context=context,
+ subject="AFT account request failed",
+ )
message = {
"FILE": __file__.split("/")[-1],
"METHOD": inspect.stack()[0][3],
- "EXCEPTION": str(e),
+ "EXCEPTION": str(error),
}
logger.exception(message)
raise
diff --git a/src/aft_lambda/aft_account_request_framework/aft_cleanup_resources.py b/src/aft_lambda/aft_account_request_framework/aft_cleanup_resources.py
new file mode 100644
index 00000000..f2f57926
--- /dev/null
+++ b/src/aft_lambda/aft_account_request_framework/aft_cleanup_resources.py
@@ -0,0 +1,83 @@
+# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+# SPDX-License-Identifier: Apache-2.0
+#
+import inspect
+import logging
+from typing import TYPE_CHECKING, Any, Dict
+
+from aft_common import codepipeline, ddb
+from aft_common.aft_utils import sanitize_input_for_logging
+from aft_common.auth import AuthClient
+from aft_common.constants import SSM_PARAM_AFT_DDB_META_TABLE
+from aft_common.logger import configure_aft_logger
+from aft_common.notifications import send_lambda_failure_sns_message
+from aft_common.organizations import OrganizationsAgent
+from aft_common.ssm import get_ssm_parameter_value
+
+if TYPE_CHECKING:
+ from aws_lambda_powertools.utilities.typing import LambdaContext
+else:
+ LambdaContext = object
+
+
+configure_aft_logger()
+logger = logging.getLogger("aft")
+
+
+def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> None:
+ try:
+ auth = AuthClient()
+ aft_management_session = auth.get_aft_management_session()
+
+ account_request = event["account_request"]
+ account_email = account_request[
+ "id"
+ ] # the account email is stored in "id" field
+
+ logger.info(
+ f"Beginning resource cleanup for {sanitize_input_for_logging(account_email)}"
+ )
+
+ orgs_agent = OrganizationsAgent(
+ ct_management_session=auth.get_ct_management_session()
+ )
+
+ # Can NOT use known-OU optimization here as request OU may be different
+ # from existing OU
+ account_id = orgs_agent.get_account_id_from_email(email=account_email)
+
+ logger.info(f"Deleting account customization pipeline for {account_id}")
+ codepipeline.delete_customization_pipeline(
+ aft_management_session=aft_management_session, account_id=account_id
+ )
+ logger.info(f"Customization pipeline deleted")
+
+ aft_request_metadata_table_name = get_ssm_parameter_value(
+ aft_management_session,
+ SSM_PARAM_AFT_DDB_META_TABLE,
+ )
+
+ logger.info(f"Deleting account metadata record for {account_id}")
+ ddb.delete_ddb_item(
+ session=aft_management_session,
+ table_name=aft_request_metadata_table_name,
+ primary_key={"id": account_id},
+ )
+ logger.info(f"Account metadata record deleted")
+
+ logger.info(f"Cleanup for {account_id} complete ")
+
+ except Exception as error:
+ send_lambda_failure_sns_message(
+ session=aft_management_session,
+ message=str(error),
+ context=context,
+ subject="AFT account request failed",
+ )
+ message = {
+ "FILE": __file__.split("/")[-1],
+ "METHOD": inspect.stack()[0][3],
+ "EXCEPTION": str(error),
+ }
+ logger.exception(message)
+ raise
diff --git a/src/aft_lambda/aft_account_request_framework/aft_controltower_event_logger.py b/src/aft_lambda/aft_account_request_framework/aft_controltower_event_logger.py
index 0b745309..b135db5a 100644
--- a/src/aft_lambda/aft_account_request_framework/aft_controltower_event_logger.py
+++ b/src/aft_lambda/aft_account_request_framework/aft_controltower_event_logger.py
@@ -2,47 +2,51 @@
# SPDX-License-Identifier: Apache-2.0
#
import inspect
-from typing import Any, Dict, Union
+import logging
+from typing import TYPE_CHECKING, Any, Dict
-import aft_common.aft_utils as utils
+import aft_common.ssm
import boto3
+from aft_common import constants as utils
+from aft_common import ddb, notifications
+from aft_common.logger import configure_aft_logger
-logger = utils.get_logger()
+if TYPE_CHECKING:
+ from aws_lambda_powertools.utilities.typing import LambdaContext
+ from mypy_boto3_dynamodb.type_defs import PutItemOutputTableTypeDef
+else:
+ PutItemOutputTableTypeDef = object
+ LambdaContext = object
+
+configure_aft_logger()
+logger = logging.getLogger("aft")
def lambda_handler(
- event: Dict[str, Any], context: Union[Dict[str, Any], None]
-) -> Dict[str, Any]:
+ event: Dict[str, Any], context: LambdaContext
+) -> PutItemOutputTableTypeDef:
+ session = boto3.session.Session()
try:
- logger.info("Lambda_handler Event")
- logger.info(event)
-
- try:
- session = boto3.session.Session()
-
- response: Dict[str, Any] = utils.put_ddb_item(
- session,
- utils.get_ssm_parameter_value(
- session, utils.SSM_PARAM_AFT_EVENTS_TABLE
- ),
- event,
- )
- return response
-
- except Exception as e:
- message = {
- "FILE": __file__.split("/")[-1],
- "METHOD": inspect.stack()[0][3],
- "EXCEPTION": str(e),
- }
- logger.exception(message)
- raise
-
- except Exception as e:
+ response = ddb.put_ddb_item(
+ session,
+ aft_common.ssm.get_ssm_parameter_value(
+ session, utils.SSM_PARAM_AFT_EVENTS_TABLE
+ ),
+ event,
+ )
+ return response
+
+ except Exception as error:
+ notifications.send_lambda_failure_sns_message(
+ session=session,
+ message=str(error),
+ context=context,
+ subject="AFT Event Logging failed",
+ )
message = {
"FILE": __file__.split("/")[-1],
"METHOD": inspect.stack()[0][3],
- "EXCEPTION": str(e),
+ "EXCEPTION": str(error),
}
logger.exception(message)
raise
diff --git a/src/aft_lambda/aft_account_request_framework/aft_invoke_aft_account_provisioning_framework.py b/src/aft_lambda/aft_account_request_framework/aft_invoke_aft_account_provisioning_framework.py
index 51548bf2..1f067935 100644
--- a/src/aft_lambda/aft_account_request_framework/aft_invoke_aft_account_provisioning_framework.py
+++ b/src/aft_lambda/aft_account_request_framework/aft_invoke_aft_account_provisioning_framework.py
@@ -3,54 +3,114 @@
#
import inspect
import json
-from typing import Any, Dict, Union
+import logging
+from typing import TYPE_CHECKING, Any, Dict
-import aft_common.aft_utils as utils
+from aft_common.account_provisioning_framework import ProvisionRoles
from aft_common.account_request_framework import (
- build_invoke_event,
- is_customizations_event,
+ build_account_customization_payload,
+ get_account_request_record,
)
+from aft_common.aft_utils import (
+ invoke_step_function,
+ is_aft_supported_controltower_event,
+)
+from aft_common.auth import AuthClient
+from aft_common.constants import SSM_PARAM_AFT_SFN_NAME
+from aft_common.logger import configure_aft_logger
+from aft_common.notifications import send_lambda_failure_sns_message
+from aft_common.organizations import OrganizationsAgent
+from aft_common.ssm import get_ssm_parameter_value
from boto3.session import Session
-logger = utils.get_logger()
+if TYPE_CHECKING:
+ from aws_lambda_powertools.utilities.typing import LambdaContext
+else:
+ LambdaContext = object
+
+configure_aft_logger()
+logger = logging.getLogger("aft")
-def lambda_handler(event: Dict[str, Any], context: Union[Dict[str, Any], None]) -> None:
+def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> None:
+ auth = AuthClient()
+
try:
- logger.info("Lambda_handler Event")
- logger.info(event)
- session = Session()
- ct_management_session = utils.get_ct_management_session(session)
- response = None
- if utils.is_controltower_event(
- event
- ) and utils.is_aft_supported_controltower_event(event):
+ aft_management_session = auth.get_aft_management_session()
+ ct_management_session = auth.get_ct_management_session(
+ role_name=ProvisionRoles.SERVICE_ROLE_NAME
+ )
+ orgs_agent = OrganizationsAgent(ct_management_session)
+
+ control_tower_event = (
+ {}
+ ) # Unused by AFT, kept for backwards compability for use by aft-account-provisioning-customizations
+ if is_aft_supported_controltower_event(event):
+ control_tower_event = event
+
logger.info("Control Tower Event Detected")
- invoke_event = build_invoke_event(
- session, ct_management_session, event, "ControlTower"
- )
- response = utils.invoke_step_function(
- session,
- utils.get_ssm_parameter_value(session, utils.SSM_PARAM_AFT_SFN_NAME),
- json.dumps(invoke_event),
+
+ # Get account ID from CT event
+ # Different CT events have different data structures - map them for easier access
+ event_name_to_event_detail_key_map = {
+ "CreateManagedAccount": "createManagedAccountStatus",
+ "UpdateManagedAccount": "updateManagedAccountStatus",
+ }
+ event_name = event["detail"]["eventName"]
+ account_id = event["detail"]["serviceEventDetails"][
+ event_name_to_event_detail_key_map[event_name]
+ ]["account"]["accountId"]
+
+ # CT events do not contain email, which is PK of DDB table
+ account_email = orgs_agent.get_account_email_from_id(account_id=account_id)
+ account_request = get_account_request_record(
+ aft_management_session, account_email
)
- elif is_customizations_event(event):
+
+ elif "account_request" in event:
logger.info("Account Customizations Event Detected")
- invoke_event = build_invoke_event(
- session, ct_management_session, event, "Customizations"
- )
- response = utils.invoke_step_function(
- session,
- utils.get_ssm_parameter_value(session, utils.SSM_PARAM_AFT_SFN_NAME),
- json.dumps(invoke_event),
+
+ # Customization-only event does not contain ID
+ # Contains OU, and if OU-move was requested, would be completed
+ # by this step, so can optimize with OU-only search
+ account_request = event["account_request"]
+ account_ou = account_request["control_tower_parameters"][
+ "ManagedOrganizationalUnit"
+ ]
+ account_id = orgs_agent.get_account_id_from_email(
+ email=event["account_request"][
+ "id"
+ ], # `id` field of ddb table is the account email
+ ou_name=account_ou,
)
- logger.info(response)
- except Exception as e:
+ else:
+ raise RuntimeError("Invoked with unrecognized event type")
+
+ account_customization_payload = build_account_customization_payload(
+ ct_management_session=ct_management_session,
+ account_id=account_id,
+ account_request=account_request,
+ control_tower_event=control_tower_event,
+ )
+
+ invoke_step_function(
+ aft_management_session,
+ get_ssm_parameter_value(aft_management_session, SSM_PARAM_AFT_SFN_NAME),
+ json.dumps(account_customization_payload),
+ )
+
+ except Exception as error:
+ send_lambda_failure_sns_message(
+ session=aft_management_session,
+ message=str(error),
+ context=context,
+ subject="AFT account request failed",
+ )
message = {
"FILE": __file__.split("/")[-1],
"METHOD": inspect.stack()[0][3],
- "EXCEPTION": str(e),
+ "EXCEPTION": str(error),
}
logger.exception(message)
raise
diff --git a/src/aft_lambda/aft_builder/codebuild_invoker.py b/src/aft_lambda/aft_builder/codebuild_trigger.py
similarity index 66%
rename from src/aft_lambda/aft_builder/codebuild_invoker.py
rename to src/aft_lambda/aft_builder/codebuild_trigger.py
index e8631821..8ecf00ea 100644
--- a/src/aft_lambda/aft_builder/codebuild_invoker.py
+++ b/src/aft_lambda/aft_builder/codebuild_trigger.py
@@ -4,8 +4,9 @@
import datetime
import inspect
import logging
+import re
import time
-from typing import Any, Dict, TypedDict, Union
+from typing import Any, Dict, TypedDict
from boto3.session import Session
@@ -19,18 +20,20 @@ class LayerBuildStatus(TypedDict):
# This function is directly responsible for building `aft_common` library
# Do not import `aft_common` into this handler!
-def lambda_handler(
- event: Dict[str, Any], context: Union[Dict[str, Any], None]
-) -> LayerBuildStatus:
+def lambda_handler(event: Dict[str, Any], context: Dict[str, Any]) -> LayerBuildStatus:
+ session = Session()
try:
- session = Session()
client = session.client("codebuild")
codebuild_project_name = event["codebuild_project_name"]
job_id = client.start_build(projectName=codebuild_project_name)["build"]["id"]
-
- logger.info(f"Started build project {codebuild_project_name} job {job_id}")
-
+ sanitized_codebuild_project_name = re.sub(
+ r"[^a-zA-Z0-9-_]", "", codebuild_project_name
+ )
+ sanitized_job_id = re.sub(r"[^a-zA-Z0-9-_]", "", job_id)
+ logger.info(
+ f"Started build project {sanitized_codebuild_project_name} job {sanitized_job_id}"
+ )
# Wait at least 30 seconds for the build to initialize
time.sleep(30)
@@ -45,20 +48,21 @@ def lambda_handler(
time.sleep(10)
continue
elif job_status == "SUCCEEDED":
- logger.info(f"Build job {job_id} completed successfully")
+ logger.info(f"Build job {sanitized_job_id} completed successfully")
return {"Status": 200}
else:
- logger.info(f"Build {job_id} failed - non-success terminal status")
+ logger.info(
+ f"Build {sanitized_job_id} failed - non-success terminal status"
+ )
raise Exception(f"Build {job_id} failed - non-success terminal status")
-
- logger.info(f"Build {job_id} failed - time out")
+ logger.info(f"Build {sanitized_job_id} failed - time out")
raise Exception(f"Build {job_id} failed - time out")
- except Exception as e:
+ except Exception as error:
message = {
"FILE": __file__.split("/")[-1],
"METHOD": inspect.stack()[0][3],
- "EXCEPTION": str(e),
+ "EXCEPTION": str(error),
}
logger.exception(message)
raise
diff --git a/src/aft_lambda/aft_customizations/aft_customizations_execute_pipeline.py b/src/aft_lambda/aft_customizations/aft_customizations_execute_pipeline.py
index 9509f798..2d1c388e 100644
--- a/src/aft_lambda/aft_customizations/aft_customizations_execute_pipeline.py
+++ b/src/aft_lambda/aft_customizations/aft_customizations_execute_pipeline.py
@@ -2,28 +2,31 @@
# SPDX-License-Identifier: Apache-2.0
#
import inspect
-from typing import Any, Dict, Union
-
-import aft_common.aft_utils as utils
-from aft_common.customizations import execute_pipeline
+import logging
+from typing import TYPE_CHECKING, Any, Dict
+
+import aft_common.ssm
+from aft_common import constants as utils
+from aft_common import notifications
+from aft_common.aft_utils import sanitize_input_for_logging
+from aft_common.codepipeline import execute_pipeline
+from aft_common.logger import configure_aft_logger, customization_request_logger
from boto3.session import Session
-logger = utils.get_logger()
-
+if TYPE_CHECKING:
+ from aws_lambda_powertools.utilities.typing import LambdaContext
+else:
+ LambdaContext = object
-def lambda_handler(
- event: Dict[str, Any], context: Union[Dict[str, Any], None]
-) -> Dict[str, Any]:
+configure_aft_logger()
+logger = logging.getLogger("aft")
- logger.info("Lambda_handler Event")
- logger.info(event)
+def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> Dict[str, Any]:
+ session = Session()
try:
- logger.info("Lambda_handler Event")
- logger.info(event)
- session = Session()
maximum_concurrent_pipelines = int(
- utils.get_ssm_parameter_value(
+ aft_common.ssm.get_ssm_parameter_value(
session, utils.SSM_PARAM_AFT_MAXIMUM_CONCURRENT_CUSTOMIZATIONS
)
)
@@ -32,18 +35,25 @@ def lambda_handler(
pipelines_to_run = maximum_concurrent_pipelines - running_pipelines
accounts = event["targets"]["pending_accounts"]
logger.info("Accounts submitted for execution: " + str(len(accounts)))
- for p in accounts[:pipelines_to_run]:
- execute_pipeline(session, str(p))
- accounts.remove(p)
+ for account_id in accounts[:pipelines_to_run]:
+ execute_pipeline(session, str(account_id))
+ accounts.remove(account_id)
logger.info("Accounts remaining to be executed - ")
- logger.info(accounts)
+ sanitized_accounts = sanitize_input_for_logging(accounts)
+ logger.info(sanitized_accounts)
return {"number_pending_accounts": len(accounts), "pending_accounts": accounts}
- except Exception as e:
+ except Exception as error:
+ notifications.send_lambda_failure_sns_message(
+ session=session,
+ message=str(error),
+ context=context,
+ subject="Failed to trigger one or more AFT account customization pipelines",
+ )
message = {
"FILE": __file__.split("/")[-1],
"METHOD": inspect.stack()[0][3],
- "EXCEPTION": str(e),
+ "EXCEPTION": str(error),
}
logger.exception(message)
raise
diff --git a/src/aft_lambda/aft_customizations/aft_customizations_get_pipeline_executions.py b/src/aft_lambda/aft_customizations/aft_customizations_get_pipeline_executions.py
index 4342e2af..6d1e63c3 100644
--- a/src/aft_lambda/aft_customizations/aft_customizations_get_pipeline_executions.py
+++ b/src/aft_lambda/aft_customizations/aft_customizations_get_pipeline_executions.py
@@ -2,34 +2,43 @@
# SPDX-License-Identifier: Apache-2.0
#
import inspect
-from typing import Any, Dict, Union
+import logging
+from typing import TYPE_CHECKING, Any, Dict
-import aft_common.aft_utils as utils
-from aft_common.customizations import get_running_pipeline_count, list_pipelines
+from aft_common import aft_utils as utils
+from aft_common import notifications
+from aft_common.codepipeline import get_running_pipeline_count, list_pipelines
+from aft_common.logger import configure_aft_logger
from boto3.session import Session
-logger = utils.get_logger()
+if TYPE_CHECKING:
+ from aws_lambda_powertools.utilities.typing import LambdaContext
+else:
+ LambdaContext = object
+configure_aft_logger()
+logger = logging.getLogger("aft")
-def lambda_handler(
- event: Dict[str, Any], context: Union[Dict[str, Any], None]
-) -> Dict[str, int]:
-
- logger.info("Lambda_handler Event")
- logger.info(event)
+def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> Dict[str, int]:
+ session = Session()
try:
- session = Session()
pipelines = list_pipelines(session)
running_pipelines = get_running_pipeline_count(session, pipelines)
return {"running_pipelines": running_pipelines}
- except Exception as e:
+ except Exception as error:
+ notifications.send_lambda_failure_sns_message(
+ session=session,
+ message=str(error),
+ context=context,
+ subject="Failed to list all AFT account customization pipelines",
+ )
message = {
"FILE": __file__.split("/")[-1],
"METHOD": inspect.stack()[0][3],
- "EXCEPTION": str(e),
+ "EXCEPTION": str(error),
}
logger.exception(message)
raise
diff --git a/src/aft_lambda/aft_customizations/aft_customizations_identify_targets.py b/src/aft_lambda/aft_customizations/aft_customizations_identify_targets.py
index a0e05e3e..afcd6162 100644
--- a/src/aft_lambda/aft_customizations/aft_customizations_identify_targets.py
+++ b/src/aft_lambda/aft_customizations/aft_customizations_identify_targets.py
@@ -2,41 +2,59 @@
# SPDX-License-Identifier: Apache-2.0
#
import inspect
+import logging
import sys
-from typing import Any, Dict, Union
+from typing import TYPE_CHECKING, Any, Dict
-import aft_common.aft_utils as utils
+from aft_common import notifications
+from aft_common.account_request_framework import (
+ build_account_customization_payload,
+ get_account_request_record,
+)
+from aft_common.aft_utils import sanitize_input_for_logging
+from aft_common.auth import AuthClient
from aft_common.customizations import (
get_excluded_accounts,
get_included_accounts,
get_target_accounts,
- validate_request,
+ validate_identify_targets_request,
)
-from boto3.session import Session
+from aft_common.logger import configure_aft_logger
+from aft_common.organizations import OrganizationsAgent
+from botocore.exceptions import ClientError
-logger = utils.get_logger()
+if TYPE_CHECKING:
+ from aws_lambda_powertools.utilities.typing import LambdaContext
+else:
+ LambdaContext = object
-def lambda_handler(
- event: Dict[str, Any], context: Union[Dict[str, Any], None]
-) -> Dict[str, Any]:
+configure_aft_logger()
+logger = logging.getLogger("aft")
- logger.info("Lambda_handler Event")
- logger.info(event)
+def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> Dict[str, Any]:
+ auth = AuthClient()
try:
+ aft_management_session = auth.get_aft_management_session()
+ ct_mgmt_session = auth.get_ct_management_session()
+
+ # Reuse orgs agent to benefit from memoization, avoid throttling
+ orgs_agent = OrganizationsAgent(ct_mgmt_session)
+
payload = event
- if not validate_request(payload):
- sys.exit(1)
+ if not validate_identify_targets_request(payload):
+ raise ValueError("Invalid 'identify_targets_request' payload")
else:
- session = Session()
- ct_mgmt_session = utils.get_ct_management_session(session)
included_accounts = get_included_accounts(
- session, ct_mgmt_session, payload["include"]
+ aft_management_session, ct_mgmt_session, orgs_agent, payload["include"]
)
if "exclude" in payload.keys():
excluded_accounts = get_excluded_accounts(
- session, ct_mgmt_session, payload["exclude"]
+ aft_management_session,
+ ct_mgmt_session,
+ orgs_agent,
+ payload["exclude"],
)
target_accounts = get_target_accounts(
included_accounts, excluded_accounts
@@ -44,16 +62,55 @@ def lambda_handler(
else:
target_accounts = included_accounts
+ target_account_info = []
+ for account_id in target_accounts:
+ sanitized_account_id = sanitize_input_for_logging(account_id)
+ logger.info(
+ f"Building customization payload for {sanitized_account_id}"
+ )
+ try:
+ account_email = orgs_agent.get_account_email_from_id(account_id)
+ except ClientError as error:
+ if error.response["Error"]["Code"] == "AccountNotFoundException":
+ logger.info(
+ f"Account with ID {sanitized_account_id} does not exist or is suspended - ignoring"
+ )
+ target_accounts.remove(account_id)
+ continue
+ else:
+ raise error
+
+ account_request = get_account_request_record(
+ aft_management_session=aft_management_session,
+ request_table_id=account_email,
+ )
+ account_payload = build_account_customization_payload(
+ ct_management_session=ct_mgmt_session,
+ account_id=account_id,
+ account_request=account_request,
+ control_tower_event={},
+ )
+ sanitized_payload = sanitize_input_for_logging(account_payload)
+ logger.info(f"Successfully generated payload: {sanitized_payload}")
+ target_account_info.append(account_payload)
+
return {
"number_pending_accounts": len(target_accounts),
"pending_accounts": target_accounts,
+ "target_accounts_info": target_account_info,
}
- except Exception as e:
+ except Exception as error:
+ notifications.send_lambda_failure_sns_message(
+ session=aft_management_session,
+ message=str(error),
+ context=context,
+ subject="Failed to identify targets for AFT account customizations",
+ )
message = {
"FILE": __file__.split("/")[-1],
"METHOD": inspect.stack()[0][3],
- "EXCEPTION": str(e),
+ "EXCEPTION": str(error),
}
logger.exception(message)
raise
diff --git a/src/aft_lambda/aft_customizations/aft_customizations_invoke_account_provisioning_framework.py b/src/aft_lambda/aft_customizations/aft_customizations_invoke_account_provisioning_framework.py
deleted file mode 100644
index 31de7943..00000000
--- a/src/aft_lambda/aft_customizations/aft_customizations_invoke_account_provisioning_framework.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright Amazon.com, Inc. or its affiliates. All rights reserved.
-# SPDX-License-Identifier: Apache-2.0
-#
-import inspect
-from typing import Any, Dict, Union
-
-import aft_common.aft_utils as utils
-from aft_common.customizations import (
- build_invoke_event,
- get_account_metadata_record,
- get_account_request_record,
- invoke_account_provisioning_sfn,
-)
-from boto3.session import Session
-
-logger = utils.get_logger()
-
-
-def lambda_handler(event: Dict[str, Any], context: Union[Dict[str, Any], None]) -> None:
- try:
- logger.info("Lambda_handler Event")
- logger.info(event)
- session = Session()
-
- pending_account_ids = event["targets"]["pending_accounts"]
- account_metadata_table = utils.get_ssm_parameter_value(
- session, utils.SSM_PARAM_AFT_DDB_META_TABLE
- )
- account_request_table = utils.get_ssm_parameter_value(
- session, utils.SSM_PARAM_AFT_DDB_REQ_TABLE
- )
- provisioning_framework_sfn = utils.get_ssm_parameter_value(
- session, utils.SSM_PARAM_AFT_SFN_NAME
- )
-
- for a in pending_account_ids:
- account_metadata_record = get_account_metadata_record(
- session, account_metadata_table, a
- )
- account_request_email = account_metadata_record["email"]
- account_request_record = get_account_request_record(
- session, account_request_table, account_request_email
- )
- sfn_event = build_invoke_event(account_request_record)
- invoke_account_provisioning_sfn(
- session, provisioning_framework_sfn, sfn_event
- )
-
- except Exception as e:
- message = {
- "FILE": __file__.split("/")[-1],
- "METHOD": inspect.stack()[0][3],
- "EXCEPTION": str(e),
- }
- logger.exception(message)
- raise
diff --git a/src/aft_lambda/aft_feature_options/aft_delete_default_vpc.py b/src/aft_lambda/aft_feature_options/aft_delete_default_vpc.py
index bf2a08ad..ec45cd42 100644
--- a/src/aft_lambda/aft_feature_options/aft_delete_default_vpc.py
+++ b/src/aft_lambda/aft_feature_options/aft_delete_default_vpc.py
@@ -2,10 +2,14 @@
# SPDX-License-Identifier: Apache-2.0
#
import inspect
-from typing import TYPE_CHECKING, Any, Dict, Union
+from typing import TYPE_CHECKING, Any, Dict
-import aft_common.aft_utils as utils
+import aft_common.ssm
import boto3
+from aft_common import constants as utils
+from aft_common import notifications
+from aft_common.account_provisioning_framework import ProvisionRoles
+from aft_common.auth import AuthClient
from aft_common.feature_options import (
delete_acls,
delete_internet_gateways,
@@ -21,63 +25,45 @@
get_vpc_security_groups,
get_vpc_subnets,
)
+from aft_common.logger import customization_request_logger
if TYPE_CHECKING:
+ from aws_lambda_powertools.utilities.typing import LambdaContext
from mypy_boto3_ec2 import EC2Client, EC2ServiceResource
else:
EC2Client = object
EC2ServiceResource = object
+ LambdaContext = object
-logger = utils.get_logger()
+def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> None:
+ request_id = event["customization_request_id"]
+ target_account_id = event["account_info"]["account"]["id"]
-def lambda_handler(event: Dict[str, Any], context: Union[Dict[str, Any], None]) -> None:
- logger.info("Lambda_handler Event")
- logger.info(event)
+ logger = customization_request_logger(
+ aws_account_id=target_account_id, customization_request_id=request_id
+ )
+ auth = AuthClient()
+ aft_session = boto3.session.Session()
try:
- logger.info("Lambda_handler Event")
- logger.info(event)
- aft_session = boto3.session.Session()
- role_arn = utils.build_role_arn(
- aft_session,
- utils.get_ssm_parameter_value(aft_session, utils.SSM_PARAM_AFT_ADMIN_ROLE),
+ target_account_session = auth.get_target_account_session(
+ account_id=target_account_id, role_name=ProvisionRoles.SERVICE_ROLE_NAME
)
- aft_admin_session = utils.get_boto_session(
- utils.get_assume_role_credentials(
- aft_session,
- role_arn,
- utils.get_ssm_parameter_value(
- aft_session, utils.SSM_PARAM_AFT_SESSION_NAME
- ),
- )
- )
- target_account = event["account_info"]["account"]["id"]
- role_arn = utils.build_role_arn(
- aft_session,
- utils.get_ssm_parameter_value(aft_session, utils.SSM_PARAM_AFT_EXEC_ROLE),
- target_account,
- )
- session = utils.get_boto_session(
- utils.get_assume_role_credentials(
- aft_admin_session,
- role_arn,
- utils.get_ssm_parameter_value(
- aft_session, utils.SSM_PARAM_AFT_SESSION_NAME
- ),
- )
- )
- client: EC2Client = session.client("ec2")
+ client: EC2Client = target_account_session.client("ec2")
regions = get_aws_regions(client)
if (
- utils.get_ssm_parameter_value(
+ aft_common.ssm.get_ssm_parameter_value(
aft_session, utils.SSM_PARAM_FEATURE_DEFAULT_VPCS_ENABLED
).lower()
== "true"
):
for region in regions:
- logger.info("Creating boto3 session in " + region)
+ logger.info(
+ "Deleting default VPC for AFT management account in region "
+ + region
+ )
session = boto3.session.Session(region_name=region)
client = session.client("ec2")
vpc = get_default_vpc(client)
@@ -99,11 +85,17 @@ def lambda_handler(event: Dict[str, Any], context: Union[Dict[str, Any], None])
delete_security_groups(client, security_groups)
delete_vpc(client, vpc)
- except Exception as e:
+ except Exception as error:
+ notifications.send_lambda_failure_sns_message(
+ session=aft_session,
+ message=str(error),
+ context=context,
+ subject="AFT: Failed to delete default VPC",
+ )
message = {
"FILE": __file__.split("/")[-1],
"METHOD": inspect.stack()[0][3],
- "EXCEPTION": str(e),
+ "EXCEPTION": str(error),
}
logger.exception(message)
raise
diff --git a/src/aft_lambda/aft_feature_options/aft_enable_cloudtrail.py b/src/aft_lambda/aft_feature_options/aft_enable_cloudtrail.py
index 6a8f25b9..fb9e1d76 100644
--- a/src/aft_lambda/aft_feature_options/aft_enable_cloudtrail.py
+++ b/src/aft_lambda/aft_feature_options/aft_enable_cloudtrail.py
@@ -2,9 +2,13 @@
# SPDX-License-Identifier: Apache-2.0
#
import inspect
-from typing import Any, Dict, Union
+from typing import TYPE_CHECKING, Any, Dict
-import aft_common.aft_utils as utils
+import aft_common.ssm
+from aft_common import constants as utils
+from aft_common import notifications
+from aft_common.account_provisioning_framework import ProvisionRoles
+from aft_common.auth import AuthClient
from aft_common.feature_options import (
create_trail,
event_selectors_exists,
@@ -14,35 +18,51 @@
trail_exists,
trail_is_logging,
)
+from aft_common.logger import customization_request_logger
from boto3.session import Session
-logger = utils.get_logger()
+if TYPE_CHECKING:
+ from aws_lambda_powertools.utilities.typing import LambdaContext
+else:
+ LambdaContext = object
CLOUDTRAIL_TRAIL_NAME = "aws-aft-CustomizationsCloudTrail"
-def lambda_handler(event: Dict[str, Any], context: Union[Dict[str, Any], None]) -> None:
+def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> None:
+ request_id = event["customization_request_id"]
+ target_account_id = event["account_info"]["account"]["id"]
+
+ logger = customization_request_logger(
+ aws_account_id=target_account_id, customization_request_id=request_id
+ )
+
+ auth = AuthClient()
+ aft_session = Session()
+
try:
- logger.info("Lambda_handler Event")
- logger.info(event)
- aft_session = Session()
- ct_session = utils.get_ct_management_session(aft_session)
- log_archive_session = utils.get_log_archive_session(aft_session)
+ ct_session = auth.get_ct_management_session(
+ role_name=ProvisionRoles.SERVICE_ROLE_NAME
+ )
+ log_archive_session = auth.get_log_archive_session(
+ role_name=ProvisionRoles.SERVICE_ROLE_NAME
+ )
# Get SSM Parameters
- cloudtrail_enabled = utils.get_ssm_parameter_value(
+ cloudtrail_enabled = aft_common.ssm.get_ssm_parameter_value(
aft_session, utils.SSM_PARAM_FEATURE_CLOUDTRAIL_DATA_EVENTS_ENABLED
)
- s3_log_bucket_arn = utils.get_ssm_parameter_value(
+ s3_log_bucket_arn = aft_common.ssm.get_ssm_parameter_value(
aft_session, "/aft/account/log-archive/log_bucket_arn"
)
s3_bucket_name = s3_log_bucket_arn.split(":::")[1]
- kms_key_arn = utils.get_ssm_parameter_value(
+ kms_key_arn = aft_common.ssm.get_ssm_parameter_value(
aft_session, "/aft/account/log-archive/kms_key_arn"
)
log_bucket_arns = get_log_bucket_arns(log_archive_session)
if cloudtrail_enabled == "true":
+ logger.info("Enabling Cloudtrail")
if not trail_exists(ct_session):
create_trail(ct_session, s3_bucket_name, kms_key_arn)
if not event_selectors_exists(ct_session):
@@ -50,11 +70,17 @@ def lambda_handler(event: Dict[str, Any], context: Union[Dict[str, Any], None])
if not trail_is_logging(ct_session):
start_logging(ct_session)
- except Exception as e:
+ except Exception as error:
+ notifications.send_lambda_failure_sns_message(
+ session=aft_session,
+ message=str(error),
+ context=context,
+ subject="AFT: Failed to enable cloudtrail",
+ )
message = {
"FILE": __file__.split("/")[-1],
"METHOD": inspect.stack()[0][3],
- "EXCEPTION": str(e),
+ "EXCEPTION": str(error),
}
logger.exception(message)
raise
diff --git a/src/aft_lambda/aft_feature_options/aft_enroll_support.py b/src/aft_lambda/aft_feature_options/aft_enroll_support.py
index 37e9bf26..a7e44e55 100644
--- a/src/aft_lambda/aft_feature_options/aft_enroll_support.py
+++ b/src/aft_lambda/aft_feature_options/aft_enroll_support.py
@@ -2,37 +2,65 @@
# SPDX-License-Identifier: Apache-2.0
#
import inspect
-from typing import Any, Dict, Optional
+from typing import TYPE_CHECKING, Any, Dict
-import aft_common.aft_utils as utils
+import aft_common.ssm
+from aft_common import constants as utils
+from aft_common import feature_options, notifications
+from aft_common.account_provisioning_framework import ProvisionRoles
+from aft_common.auth import AuthClient
+from aft_common.logger import customization_request_logger
from aft_common.premium_support import account_enrollment_requested, generate_case
from boto3.session import Session
-logger = utils.get_logger()
+if TYPE_CHECKING:
+ from aws_lambda_powertools.utilities.typing import LambdaContext
+else:
+ LambdaContext = object
-def lambda_handler(event: Dict[str, Any], context: Optional[Dict[str, Any]]) -> None:
+def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> None:
+ request_id = event["customization_request_id"]
+ target_account_id = event["account_info"]["account"]["id"]
+ request_id, target_account_id = (
+ feature_options.get_target_account_and_customization_id_from_event(event=event)
+ )
+
+ logger = customization_request_logger(
+ aws_account_id=target_account_id, customization_request_id=request_id
+ )
+
+ auth = AuthClient()
+ aft_session = Session()
try:
- logger.info("Lambda_handler Event")
- logger.info(event)
- aft_session = Session()
- ct_mgmt_session = utils.get_ct_management_session(aft_session)
- target_account_id = event["account_info"]["account"]["id"]
+ ct_mgmt_session = auth.get_ct_management_session(
+ role_name=ProvisionRoles.SERVICE_ROLE_NAME
+ )
+
if (
- utils.get_ssm_parameter_value(
+ aft_common.ssm.get_ssm_parameter_value(
aft_session, utils.SSM_PARAM_FEATURE_ENTERPRISE_SUPPORT_ENABLED
).lower()
== "true"
):
if not account_enrollment_requested(ct_mgmt_session, target_account_id):
+ logger.info(
+ "Generating support case for enrolling target account into AWS Enterprise Support"
+ )
generate_case(ct_mgmt_session, target_account_id)
- except Exception as e:
+ except Exception as error:
+ notifications.send_lambda_failure_sns_message(
+ session=aft_session,
+ message=str(error),
+ context=context,
+ subject="AFT: Failed to enroll into Enterprise Support",
+ )
message = {
"FILE": __file__.split("/")[-1],
"METHOD": inspect.stack()[0][3],
- "EXCEPTION": str(e),
+ "EXCEPTION": str(error),
}
logger.exception(message)
raise
diff --git a/variables.tf b/variables.tf
index acbcb6dd..fa182a90 100644
--- a/variables.tf
+++ b/variables.tf
@@ -45,12 +45,8 @@ variable "aft_framework_repo_url" {
variable "aft_framework_repo_git_ref" {
description = "Git branch from which the AFT framework should be sourced from"
- default = "main"
+ default = null
type = string
- validation {
- condition = length(var.aft_framework_repo_git_ref) > 0
- error_message = "Variable var: aft_framework_repo_git_ref cannot be empty."
- }
}
variable "aft_management_account_id" {
@@ -66,7 +62,7 @@ variable "ct_home_region" {
description = "The region from which this module will be executed. This MUST be the same region as Control Tower is deployed."
type = string
validation {
- condition = can(regex("(us(-gov)?|ap|ca|cn|eu|sa)-(central|(north|south)?(east|west)?)-\\d", var.ct_home_region))
+ condition = can(regex("(us(-gov)?|ap|ca|cn|eu|sa|me|af|il)-(central|(north|south)?(east|west)?)-\\d", var.ct_home_region))
error_message = "Variable var: region is not valid."
}
}
@@ -81,13 +77,42 @@ variable "cloudwatch_log_group_retention" {
}
}
+variable "backup_recovery_point_retention" {
+ description = "Number of days to keep backup recovery points in AFT DynamoDB tables. Default = Never Expire"
+ type = number
+ default = null
+ validation {
+ condition = var.backup_recovery_point_retention == null ? true : (var.backup_recovery_point_retention >= 1 && var.backup_recovery_point_retention <= 36500)
+ error_message = "Value must be between 1 and 36500."
+ }
+}
+variable "log_archive_bucket_object_expiration_days" {
+ description = "Amount of days to keep the objects stored in the AFT logging bucket"
+ type = number
+ default = 365
+ validation {
+ condition = var.log_archive_bucket_object_expiration_days > 0
+ error_message = "Log_archive_bucket_object_expiration_days must be an integer greater than 0."
+ }
+}
+
+variable "aft_backend_bucket_access_logs_object_expiration_days" {
+ description = "Amount of days to keep the objects stored in the access logs bucket for AFT backend buckets"
+ type = number
+ default = 365
+ validation {
+ condition = var.aft_backend_bucket_access_logs_object_expiration_days > 0
+ error_message = "Aft_backend_bucket_access_logs_object_expiration_days must be an integer greater than 0."
+ }
+}
+
variable "maximum_concurrent_customizations" {
description = "Maximum number of customizations/pipelines to run at once"
type = number
default = 5
validation {
condition = var.maximum_concurrent_customizations > 0
- error_message = "Variable var: maximum_concurrent_customizations must be greater than 0."
+ error_message = "Maximum_concurrent_customizations must be greater than 0."
}
}
@@ -101,6 +126,35 @@ variable "aft_vpc_endpoints" {
}
}
+variable "concurrent_account_factory_actions" {
+ description = "Maximum number of accounts that can be provisioned in parallel."
+ type = number
+ default = 5
+ validation {
+ condition = var.concurrent_account_factory_actions > 0
+ error_message = "Maximum_concurrent_accounts_being_provisioned must be greater than 0."
+ }
+}
+
+variable "global_codebuild_timeout" {
+ type = number
+ description = "Codebuild build timeout"
+ default = 60
+ validation {
+ condition = (
+ var.global_codebuild_timeout >= 5 &&
+ var.global_codebuild_timeout <= 480
+ )
+ error_message = "Codebuild build timeout must be between 5 and 480 minutes."
+ }
+}
+
+variable "tags" {
+ description = "Map of tags to apply to resources deployed by AFT."
+ type = map(any)
+ default = null
+}
+
#########################################
# AFT Feature Flags
#########################################
@@ -140,12 +194,12 @@ variable "aft_feature_delete_default_vpcs_enabled" {
variable "vcs_provider" {
- description = "Customer VCS Provider - valid inputs are codecommit, bitbucket, github, or githubenterprise"
+ description = "Customer VCS Provider - valid inputs are codecommit, bitbucket, github, githubenterprise, gitlab, or gitLab self-managed"
type = string
default = "codecommit"
validation {
- condition = contains(["codecommit", "bitbucket", "github", "githubenterprise"], var.vcs_provider)
- error_message = "Valid values for var: vcs_provider are (codecommit, bitbucket, github, githubenterprise)."
+ condition = contains(["codecommit", "bitbucket", "github", "githubenterprise", "gitlab", "gitlabselfmanaged"], var.vcs_provider)
+ error_message = "Valid values for var: vcs_provider are (codecommit, bitbucket, github, githubenterprise, gitlab, gitlabselfmanaged)."
}
}
@@ -154,7 +208,11 @@ variable "github_enterprise_url" {
type = string
default = "null"
}
-
+variable "gitlab_selfmanaged_url" {
+ description = "GitLab SelfManaged URL, if GitLab SelfManaged is being used"
+ type = string
+ default = "null"
+}
variable "account_request_repo_name" {
description = "Repository name for the account request files. For non-CodeCommit repos, name should be in the format of Org/Repo"
type = string
@@ -242,7 +300,7 @@ variable "account_provisioning_customizations_repo_branch" {
variable "terraform_version" {
description = "Terraform version being used for AFT"
type = string
- default = "0.15.5"
+ default = "1.6.0"
validation {
condition = can(regex("\\bv?\\d+(\\.\\d+)+[\\-\\w]*\\b", var.terraform_version))
error_message = "Invalid value for var: terraform_version."
@@ -260,10 +318,11 @@ variable "terraform_distribution" {
}
variable "tf_backend_secondary_region" {
+ default = ""
type = string
description = "AFT creates a backend for state tracking for its own state as well as OSS cases. The backend's primary region is the same as the AFT region, but this defines the secondary region to replicate to."
validation {
- condition = can(regex("(us(-gov)?|ap|ca|cn|eu|sa)-(central|(north|south)?(east|west)?)-\\d", var.tf_backend_secondary_region))
+ condition = var.tf_backend_secondary_region == "" || can(regex("(us(-gov)?|ap|ca|cn|eu|sa|me|af)-(central|(north|south)?(east|west)?)-\\d", var.tf_backend_secondary_region))
error_message = "Variable var: tf_backend_secondary_region is not valid."
}
}
@@ -272,7 +331,7 @@ variable "tf_backend_secondary_region" {
variable "terraform_token" {
type = string
description = "Terraform token for Cloud or Enterprise"
- default = "null"
+ default = "null" # Non-sensitive default value #tfsec:ignore:general-secrets-no-plaintext-exposure
sensitive = true
validation {
condition = length(var.terraform_token) > 0
@@ -290,6 +349,16 @@ variable "terraform_org_name" {
}
}
+variable "terraform_project_name" {
+ type = string
+ description = "Project name for Terraform Cloud or Enterprise - project must exist before deployment"
+ default = "Default Project"
+ validation {
+ condition = length(var.terraform_project_name) > 0
+ error_message = "Variable var: terraform_project_name cannot be empty."
+ }
+}
+
variable "terraform_api_endpoint" {
description = "API Endpoint for Terraform. Must be in the format of https://xxx.xxx."
type = string
@@ -303,6 +372,15 @@ variable "terraform_api_endpoint" {
#########################################
# AFT VPC Variables
#########################################
+variable "aft_enable_vpc" {
+ description = "Flag turning use of VPC on/off for AFT"
+ type = bool
+ default = true
+ validation {
+ condition = contains([true, false], var.aft_enable_vpc)
+ error_message = "Valid values for var: aft_enable_vpc are (true, false)."
+ }
+}
variable "aft_vpc_cidr" {
type = string
@@ -353,3 +431,29 @@ variable "aft_vpc_public_subnet_02_cidr" {
error_message = "Variable var: aft_vpc_public_subnet_02_cidr value must be a valid network CIDR, x.x.x.x/y."
}
}
+
+variable "aft_customer_vpc_id" {
+ type = string
+ description = "The VPC ID to deploy AFT resources in, if customer is providing an existing VPC. Only supported for new deployments."
+ default = null
+}
+
+variable "aft_customer_private_subnets" {
+ type = list(string)
+ description = "A list of private subnets to deploy AFT resources in, if customer is providing an existing VPC. Only supported for new deployments."
+ default = []
+}
+
+#########################################
+# AFT Metrics Reporting Variables
+#########################################
+
+variable "aft_metrics_reporting" {
+ description = "Flag toggling reporting of operational metrics"
+ type = bool
+ default = true
+ validation {
+ condition = contains([true, false], var.aft_metrics_reporting)
+ error_message = "Valid values for var: aft_metrics_reporting are (true, false)."
+ }
+}
diff --git a/versions.tf b/versions.tf
index 481060d7..723ecd7f 100644
--- a/versions.tf
+++ b/versions.tf
@@ -2,12 +2,12 @@
# SPDX-License-Identifier: Apache-2.0
#
terraform {
- required_version = ">= 0.15.1"
+ required_version = ">= 1.2.0, < 2.0.0"
required_providers {
aws = {
source = "hashicorp/aws"
- version = ">= 3.72, < 4.0.0"
+ version = ">= 5.11.0, < 6.0.0"
configuration_aliases = [aws.ct_management, aws.log_archive, aws.audit, aws.aft_management, aws.tf_backend_secondary_region]
}
}