diff --git a/Pipfile.lock b/Pipfile.lock deleted file mode 100644 index cdd0740..0000000 --- a/Pipfile.lock +++ /dev/null @@ -1,92 +0,0 @@ -{ - "_meta": { - "hash": { - "sha256": "5cb34db64d920f994a218ff5e259e223a4619f61ec2d93307f98ff7102fcd371" - }, - "pipfile-spec": 6, - "requires": { - "python_version": "3.8" - }, - "sources": [ - { - "name": "pypi", - "url": "https://pypi.org/simple", - "verify_ssl": true - } - ] - }, - "default": { - "boto3": { - "hashes": [ - "sha256:726937b33b5bf6036a3889ea6ecad632439d1a53c3dd4497c1c426d348e2dae3", - "sha256:781f5d1bbec6c2fe902e8855c1c8a576845eaa06630e817b0e422afa4788620e" - ], - "index": "pypi", - "version": "==1.14.34" - }, - "botocore": { - "hashes": [ - "sha256:8170f6a236cdfd053275be7b2f1531c90c517d903d38b21d064dc6ba23f20870", - "sha256:ff39ec54893397df3c14dd516aedeaff12bb7c363051d8b9bd5fc86bc3e68231" - ], - "version": "==1.17.34" - }, - "docutils": { - "hashes": [ - "sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0", - "sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827", - "sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99" - ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==0.15.2" - }, - "jmespath": { - "hashes": [ - "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9", - "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f" - ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==0.10.0" - }, - "python-dateutil": { - "hashes": [ - "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c", - "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==2.8.1" - }, - "pytz": { - "hashes": [ - "sha256:a494d53b6d39c3c6e44c3bec237336e14305e4f29bbf800b599253057fbb79ed", - "sha256:c35965d010ce31b23eeb663ed3cc8c906275d6be1a34393a1d73a41febf4a048" - ], - "index": "pypi", - "version": "==2020.1" - }, - "s3transfer": { - "hashes": [ - "sha256:2482b4259524933a022d59da830f51bd746db62f047d6eb213f2f8855dcb8a13", - "sha256:921a37e2aefc64145e7b73d50c71bb4f26f46e4c9f414dc648c6245ff92cf7db" - ], - "version": "==0.3.3" - }, - "six": { - "hashes": [ - "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", - "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.15.0" - }, - "urllib3": { - "hashes": [ - "sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a", - "sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461" - ], - "markers": "python_version != '3.4'", - "version": "==1.25.10" - } - }, - "develop": {} -} diff --git a/delete-default-vpc/delete-default-vpcs.py b/delete-default-vpc/delete-default-vpcs.py index df4d4ab..2f6f76e 100755 --- a/delete-default-vpc/delete-default-vpcs.py +++ b/delete-default-vpc/delete-default-vpcs.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 import boto3 -from botocore.exceptions import ClientError +from botocore.exceptions import ClientError, ProfileNotFound import logging import os @@ -10,14 +10,24 @@ def main(args, logger): '''Executes the Primary Logic''' - session = boto3.Session(profile_name=args.profile, region_name=args.boto_region) + try: + session = boto3.Session(profile_name=args.profile, region_name=args.boto_region) + except ProfileNotFound as e: + logger.critical(f"Profile {args.profile} was not found: {e}") + exit(1) # Get all the Regions for this account all_regions = get_regions(session, args) # processiong regions for region in all_regions: - process_region(args, region, session, logger) + try: + process_region(args, region, session, logger) + except ClientError as e: + if e.response['Error']['Code'] == "RegionDisabledException": + logger.critical(f"Region {region} is not enabled. Skipping...") + else: + raise return @@ -127,8 +137,8 @@ def delete_vpc(vpc,logger,region,debug): logger.debug("Interface:{} attached to {}, VPC:{}, region:{}".format(eni.id,eni.attachment,vpc.id,region)) return else: - logger.info("Deleting default VPC:{}, region:{}".format(vpc.id,region)) if args.actually_do_it: + logger.info("Deleting default VPC:{}, region:{}".format(vpc.id,region)) try: vpc_resources = { # dependency order from https://aws.amazon.com/premiumsupport/knowledge-center/troubleshoot-dependency-error-delete-vpc/ @@ -156,9 +166,8 @@ def delete_vpc(vpc,logger,region,debug): logger.error("VPC:{} can't be delete due to dependency, {}".format(vpc.id, e)) else: raise - logger.info("Successfully deleted default VPC:{}, region:{}".format(vpc.id,region)) - if not args.actually_do_it: + else: logger.info("Would delete default VPC:{}, region:{}".format(vpc.id,region)) def process_region(args, region, session, logger): @@ -242,10 +251,15 @@ def do_args(): logging.getLogger('urllib3').setLevel(logging.WARNING) # create formatter - if args.timestamp: + if args.timestamp and args.profile: + formatter = logging.Formatter(f"%(asctime)s - %(name)s - %(levelname)s - {args.profile} - %(message)s") + elif args.timestamp: formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') + elif args.profile: + formatter = logging.Formatter(f"%(levelname)s - {args.profile} - %(message)s") else: formatter = logging.Formatter('%(levelname)s - %(message)s') + # add formatter to ch (console handler) ch.setFormatter(formatter) # add ch to logger diff --git a/ebs-block-public-access/README.md b/ebs-block-public-access/README.md new file mode 100644 index 0000000..5ad4ed5 --- /dev/null +++ b/ebs-block-public-access/README.md @@ -0,0 +1,42 @@ +# EBS Block Public Access + +This script will enable Block Public Access for EBS in all regions in your account. + +## Why? + +While there are a few valid use-cases for sharing a hard drive to every AWS customer, those probably don't apply to you. But it is easy to accidentally share an EBS Snapshot and threat actors scan for those regularly. AWS recently accounts [Block Public Access](https://aws.amazon.com/about-aws/whats-new/2023/11/amazon-elastic-block-store-public-access-ebs-snapshots/) for EBS. This script will enable that feature in all regions. + +## What the script does. + +This script iterates through all the regions returned by ec2:DescribeRegions and if get_snapshot_block_public_access_state() is `unblocked` calls enable_snapshot_block_public_access() to enable blocking _all_ sharing. + +## Usage + +```bash +usage: ebs-block-public-access.py [-h] [--debug] [--error] [--timestamp] + [--region REGION] [--profile PROFILE] + [--actually-do-it] [--disable] + +options: + -h, --help show this help message and exit + --debug print debugging info + --error print error info only + --timestamp Output log with timestamp and toolname + --region REGION Only Process Specified Region + --profile PROFILE Use this CLI profile (instead of default or env credentials) + --actually-do-it Actually Perform the action + --disable Disable Block Public Access rather than enable it. + +``` + +You must specify `--actually-do-it` for the changes to be made. Otherwise the script runs in dry-run mode only. + + +## AWS Docs + +* [Feature Announcement](https://aws.amazon.com/about-aws/whats-new/2023/11/amazon-elastic-block-store-public-access-ebs-snapshots/) +* [EnableSnapshotBlockPublicAccess API](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_EnableSnapshotBlockPublicAccess.html) +* [boto3 get_snapshot_block_public_access_state()](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ec2/client/get_snapshot_block_public_access_state.html) +* [boto3 enable_snapshot_block_public_access()](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ec2/client/enable_snapshot_block_public_access.html) + + diff --git a/ebs-block-public-access/ebs-block-public-access.py b/ebs-block-public-access/ebs-block-public-access.py new file mode 100755 index 0000000..bc58b24 --- /dev/null +++ b/ebs-block-public-access/ebs-block-public-access.py @@ -0,0 +1,136 @@ +#!/usr/bin/env python3 + +import boto3 +from botocore.exceptions import ClientError +import os +import logging +import json +# logger = logging.getLogger() + + +def main(args, logger): + '''Executes the Primary Logic of the Fast Fix''' + + # If they specify a profile use it. Otherwise do the normal thing + if args.profile: + session = boto3.Session(profile_name=args.profile) + else: + session = boto3.Session() + + # Get all the Regions for this account + for region in get_regions(session, args): + ec2_client = session.client("ec2", region_name=region) + + + # Then ensure the EBS Encryption is set correctly + status_response = ec2_client.get_snapshot_block_public_access_state() + if status_response['State'] == 'unblocked' and not args.disable: + # Make it true + if args.actually_do_it is True: + logger.info(f"Enabling EBS Block Public Access in {region}") + enable_bpa(ec2_client, region) + + else: + logger.info(f"You Need To Enable EBS Block Public Access in {region}") + elif status_response['State'] != 'unblocked' and args.disable: + # Make it false + if args.actually_do_it is True: + logger.info(f"Disabling EBS Block Public Access in {region}") + disable_bpa(ec2_client, region) + + else: + logger.info(f"Would Disable EBS Block Public Access in {region}") + else: + logger.debug(f"EBS Block Public Access is enabled in {region}") + + +def enable_bpa(ec2_client, region): + '''Actually perform the enabling of block public access''' + response = ec2_client.enable_snapshot_block_public_access(State='block-all-sharing') + if response['State'] == 'block-all-sharing': + return(True) + else: + logger.error(f"Attempt to enable EBS Block Public Access in {region} returned {response}") + return(False) + + +def disable_bpa(ec2_client, region): + '''Actually perform the enabling of default ebs encryption''' + response = ec2_client.disable_snapshot_block_public_access() + if response['State'] == 'unblocked': + return(True) + else: + logger.error(f"Attempt to disable EBS Block Public Access in {region} returned {response}") + return(False) + + +def get_regions(session, args): + '''Return a list of regions with us-east-1 first. If --region was specified, return a list wth just that''' + + # If we specifed a region on the CLI, return a list of just that + if args.region: + return([args.region]) + + # otherwise return all the regions, us-east-1 first + ec2 = session.client('ec2', region_name="us-east-1") + response = ec2.describe_regions() + output = ['us-east-1'] + for r in response['Regions']: + # return us-east-1 first, but dont return it twice + if r['RegionName'] == "us-east-1": + continue + output.append(r['RegionName']) + return(output) + + +def do_args(): + import argparse + + parser = argparse.ArgumentParser() + + parser.add_argument("--debug", help="print debugging info", action='store_true') + parser.add_argument("--error", help="print error info only", action='store_true') + parser.add_argument("--timestamp", help="Output log with timestamp and toolname", action='store_true') + parser.add_argument("--region", help="Only Process Specified Region") + parser.add_argument("--profile", help="Use this CLI profile (instead of default or env credentials)") + parser.add_argument("--actually-do-it", help="Actually Perform the action", action='store_true') + parser.add_argument("--disable", help="Disable Block Public Access rather than enable it.", action='store_true') + + args = parser.parse_args() + + return(args) + +if __name__ == '__main__': + + args = do_args() + + # Logging idea stolen from: https://docs.python.org/3/howto/logging.html#configuring-logging + # create console handler and set level to debug + logger = logging.getLogger('enable-ebs-default-encryption') + ch = logging.StreamHandler() + if args.debug: + logger.setLevel(logging.DEBUG) + elif args.error: + logger.setLevel(logging.ERROR) + else: + logger.setLevel(logging.INFO) + + # Silence Boto3 & Friends + logging.getLogger('botocore').setLevel(logging.WARNING) + logging.getLogger('boto3').setLevel(logging.WARNING) + logging.getLogger('urllib3').setLevel(logging.WARNING) + + # create formatter + if args.timestamp: + formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') + else: + formatter = logging.Formatter('%(levelname)s - %(message)s') + # add formatter to ch + ch.setFormatter(formatter) + # add ch to logger + logger.addHandler(ch) + + try: + main(args, logger) + except KeyboardInterrupt: + exit(1) \ No newline at end of file diff --git a/ebs-encryption/README.md b/ebs-encryption/README.md index 89c8990..1ceacda 100644 --- a/ebs-encryption/README.md +++ b/ebs-encryption/README.md @@ -8,16 +8,18 @@ Encryption-at-rest is a key security best practice. However when creating instan ## What the script does. -Thsi script iterates through all the regions returned by ec2:DescribeRegions and if get_ebs_encryption_by_default() is false calls enable_ebs_encryption_by_default() to enable with a Default AWS Managed Key. +This script iterates through all the regions returned by ec2:DescribeRegions and if get_ebs_encryption_by_default() is false calls enable_ebs_encryption_by_default() to enable with a Default AWS Managed Key. **Warning!!!** Per AWS: *After you enable encryption by default, you can no longer launch instances using instance types that do not support encryption. For more information, see [Supported Instance Types](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSEncryption.html#EBSEncryption_supported_instances).* +**Second Warning!!!** Enabling EBS encryption using the service managed key will prevent you from sharing AMIs outside of the account. If you need to share AMIs in your organization, you will want to specify the `--create-org-cmk` flag. This will create a new KMS CMK that is shared to your Org. See below for the key policy it will create: + ## Usage ```bash -usage: enable-ebs-default-encryption.py [-h] [--debug] [--error] [--timestamp] - [--region REGION] [--profile PROFILE] - [--actually-do-it] +usage: enable-ebs-default-encryption.py [-h] [--debug] [--error] [--timestamp] [--region REGION] + [--profile PROFILE] [--actually-do-it] [--disable] + [--create-cmk | --create-org-cmk | --use-cmk-id KEYID] optional arguments: -h, --help show this help message and exit @@ -27,10 +29,63 @@ optional arguments: --region REGION Only Process Specified Region --profile PROFILE Use this CLI profile (instead of default or env credentials) --actually-do-it Actually Perform the action + --disable Disable Default Encryption rather than enable it. + --create-cmk Create an AWS CMK in each region for use with EBS Default Encryption + --create-org-cmk Create an AWS CMK with org-wide permissions in each region + --use-cmk-id KEYID Enable Default Encryption with this existing key_id. + +You can specify KEYID using any of the following: + Key ID. For example, 1234abcd-12ab-34cd-56ef-1234567890ab. + Key alias. For example, alias/ExampleAlias. + Key ARN. For example, arn:aws:kms:us-east-1:012345678910:key/1234abcd-12ab-34cd-56ef-1234567890ab. + Alias ARN. For example, arn:aws:kms:us-east-1:012345678910:alias/ExampleAlias. + +Note: Amazon Web Services authenticates the KMS key asynchronously. Therefore, if you specify an ID, alias, or ARN that is not valid, the action can appear to complete, but eventually fails. + ``` You must specify `--actually-do-it` for the changes to be made. Otherwise the script runs in dry-run mode only. +### Org Wide Key Policy +```json +{ + "Version": "2012-10-17", + "Id": "EBS Key Policy For Organization", + "Statement": [ + { + "Sid": "Enable IAM User Permissions", + "Effect": "Allow", + "Principal": { + "AWS": "arn:aws:iam::123456789012:root" + }, + "Action": "kms:*", + "Resource": "*" + }, + { + "Sid": "Allow EBS use of the KMS key for organization", + "Effect": "Allow", + "Principal": { + "AWS": "*" + }, + "Action": [ + "kms:Decrypt", + "kms:DescribeKey", + "kms:Encrypt", + "kms:ReEncrypt*", + "kms:GetKeyPolicy" + ], + "Resource": "*", + "Condition": { + "StringEquals": { + "aws:PrincipalOrgID": "o-xxxxxxx", + "kms:ViaService": "ec2.us-east-1.amazonaws.com" + } + } + } + ] +} +``` + ## AWS Docs diff --git a/ebs-encryption/enable-ebs-default-encryption.py b/ebs-encryption/enable-ebs-default-encryption.py index 6681e8c..2142eb3 100755 --- a/ebs-encryption/enable-ebs-default-encryption.py +++ b/ebs-encryption/enable-ebs-default-encryption.py @@ -4,6 +4,7 @@ from botocore.exceptions import ClientError import os import logging +import json # logger = logging.getLogger() @@ -16,22 +17,199 @@ def main(args, logger): else: session = boto3.Session() + # If necessary, get the org info once + if args.create_org_cmk: + org_client = session.client("organizations", region_name="us-east-1") + org_info = org_client.describe_organization()['Organization'] + sts_client = session.client("sts", region_name="us-east-1") + account_id = sts_client.get_caller_identity()['Account'] + + # Get all the Regions for this account for region in get_regions(session, args): ec2_client = session.client("ec2", region_name=region) + kms_client = session.client("kms", region_name=region) + + # First we must Determine what the key is, and if it needs to change + key_response = ec2_client.get_ebs_default_kms_key_id() + key_id = key_response['KmsKeyId'] + logger.debug(f"Current Default key is {key_id} in {region}") + # At this point, key_id will either be the full ARN, or "alias/aws/ebs" + # So you can pass in a number of things (alias, alias_arn, key_id, but this call always has the key arn) + + if args.KeyId: + # Need to get the actual key _arn_ + new_key_details = get_kms_key_if_exists(kms_client, key_id) + if new_key_details is False: + logger.critical(f"Unable to find key {args.KeyId} in {region}. Aborting") + exit(1) + new_key_arn = new_key_details['KeyMetadata']['Arn'] + logger.info(f"Found {args.KeyId} with key arn of {new_key_arn}") + + + elif args.create_cmk is True: + key_alias = 'alias/default-ebs-cmk' + + # First see if we need to create a new key + existing_key = get_kms_key_if_exists(kms_client, key_alias) + if existing_key: + logger.warning(f"KMS Key with alias {key_alias} already exists") + new_key_arn = existing_key['KeyMetadata']['Arn'] + elif args.actually_do_it: + logger.info(f"Creating new KMS Key with alias {key_alias}") + new_key_arn = create_cmk(kms_client, region, key_alias) + else: + logger.info(f"Would create a custom CMK with alias {key_alias}") + new_key_arn = None + + elif args.create_org_cmk is True: + key_alias = 'alias/default-org-ebs-cmk' + + # First see if we need to create a new key + existing_key = get_kms_key_if_exists(kms_client, key_alias) + if existing_key: + logger.warning(f"KMS Key with alias {key_alias} already exists") + new_key_arn = existing_key['KeyMetadata']['Arn'] + elif args.actually_do_it: + logger.info(f"Creating new org-wide KMS Key with alias {key_alias}") + new_key_arn = create_org_cmk(kms_client, org_info, account_id, region, key_alias) + else: + logger.info(f"Would create a custom org-wide CMK with alias {key_alias}") + new_key_arn = None + else: + # If none of the above were specificed, then no change is needed below + new_key_arn = key_id + + # See if the default key needs to be changed + if new_key_arn != key_id: + # we need to change they key + if args.actually_do_it: + logger.info(f"Setting Default Key to {new_key_arn}. Was {key_id}") + ec2_client.modify_ebs_default_kms_key_id(KmsKeyId=new_key_arn) + elif new_key_arn is None: + logger.info(f"Would attempt to set the default EBS Key to the new key. Was {key_id}") + else: + try: + ec2_client.modify_ebs_default_kms_key_id(KmsKeyId=new_key_arn, DryRun=True) + except ClientError as e: + if e.response['Error']['Code'] == "DryRunOperation": + logger.info(f"Would attempt to set Default Key to {new_key_arn}. Was {key_id}") + else: + logger.error(f"DryRun setting Default Key to {new_key_arn} from {key_id} Failed. Error: {e}") + else: + # It doesn't + logger.info(f"Default EBS Encryption is currently set to {key_id}") + + + # Then ensure the EBS Encryption is set correctly status_response = ec2_client.get_ebs_encryption_by_default() - if status_response['EbsEncryptionByDefault'] is not True: + if status_response['EbsEncryptionByDefault'] is not True and not args.disable: # Make it true if args.actually_do_it is True: logger.info(f"Enabling Default EBS Encryption in {region}") enable_default_encryption(ec2_client, region) + else: logger.info(f"You Need To Enable Default EBS Encryption in {region}") + elif status_response['EbsEncryptionByDefault'] is True and args.disable: + # Make it false + if args.actually_do_it is True: + logger.info(f"Disabling Default EBS Encryption in {region}") + disable_default_encryption(ec2_client, region) + + else: + logger.info(f"Would Disable Default EBS Encryption in {region}") else: logger.debug(f"Default EBS Encryption is enabled in {region}") +def get_kms_key_if_exists(kms_client, key_id): + try: + key_details = kms_client.describe_key(KeyId=key_id) + return(key_details) + except ClientError as e: + if e.response['Error']['Code'] == "NotFoundException": + return(False) + else: + raise + + +def create_org_cmk(client, org_info, account_id, region, key_alias): + '''Create a new CMK for use with EBS''' + org_id = org_info['Id'] + logger.debug(f"Creating key for {org_id}") + + policy = { + "Version": "2012-10-17", + "Id": "EBS Key Policy For Organization", + "Statement": [ + { + "Sid": "Enable IAM User Permissions", + "Effect": "Allow", + "Principal": { + "AWS": f"arn:aws:iam::{account_id}:root" + }, + "Action": "kms:*", + "Resource": "*" + }, + { + "Sid": "Allow EBS use of the KMS key for organization", + "Effect": "Allow", + "Principal": { + "AWS": "*" + }, + "Action": [ + "kms:Decrypt", + "kms:DescribeKey", + "kms:Encrypt", + "kms:ReEncrypt*", + "kms:GetKeyPolicy" + ], + "Resource": "*", + "Condition": { + "StringEquals": { + "kms:ViaService": f"ec2.{region}.amazonaws.com", + "aws:PrincipalOrgID": org_id + } + } + } + ] + } + + logger.debug(f"Creating key with Policy:\n{json.dumps(policy, indent=2)}") + + response = client.create_key( + Policy=json.dumps(policy), + Description=f"Default EBS Key for {region} Shared across org {org_id}", + Origin='AWS_KMS', + BypassPolicyLockoutSafetyCheck=False + ) + key = response['KeyMetadata'] + client.create_alias( + AliasName=key_alias, + TargetKeyId=key['KeyId'] + ) + print(f"Created Key {key['KeyId']} in {region} with ARN of {key['Arn']}") + return(key['Arn']) + + +def create_cmk(client, region, key_alias): + '''Create a new CMK for use with EBS''' + response = client.create_key( + # Policy='string', + Description=f"Default EBS Key for {region}", + Origin='AWS_KMS', + BypassPolicyLockoutSafetyCheck=False + ) + key = response['KeyMetadata'] + client.create_alias( + AliasName=key_alias, + TargetKeyId=key['KeyId'] + ) + print(f"Created Key {key['KeyId']} in {region} with ARN of {key['Arn']}") + return(key['Arn']) + def enable_default_encryption(ec2_client, region): '''Actually perform the enabling of default ebs encryption''' @@ -43,6 +221,15 @@ def enable_default_encryption(ec2_client, region): return(False) +def disable_default_encryption(ec2_client, region): + '''Actually perform the enabling of default ebs encryption''' + response = ec2_client.disable_ebs_encryption_by_default() + if response['EbsEncryptionByDefault'] is False: + return(True) + else: + logger.error(f"Attempt to disable Default EBS Encryption in {region} returned {response}") + return(False) + def get_regions(session, args): '''Return a list of regions with us-east-1 first. If --region was specified, return a list wth just that''' @@ -52,7 +239,7 @@ def get_regions(session, args): return([args.region]) # otherwise return all the regions, us-east-1 first - ec2 = session.client('ec2') + ec2 = session.client('ec2', region_name="us-east-1") response = ec2.describe_regions() output = ['us-east-1'] for r in response['Regions']: @@ -63,16 +250,31 @@ def get_regions(session, args): return(output) - def do_args(): import argparse - parser = argparse.ArgumentParser() + + key_id_message = """You can specify KEYID using any of the following: + Key ID. For example, 1234abcd-12ab-34cd-56ef-1234567890ab. + Key alias. For example, alias/ExampleAlias. + Key ARN. For example, arn:aws:kms:us-east-1:012345678910:key/1234abcd-12ab-34cd-56ef-1234567890ab. + Alias ARN. For example, arn:aws:kms:us-east-1:012345678910:alias/ExampleAlias. + +Note: Amazon Web Services authenticates the KMS key asynchronously. Therefore, if you specify an ID, alias, or ARN that is not valid, the action can appear to complete, but eventually fails.""" + + parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter, epilog=key_id_message) + parser.add_argument("--debug", help="print debugging info", action='store_true') parser.add_argument("--error", help="print error info only", action='store_true') parser.add_argument("--timestamp", help="Output log with timestamp and toolname", action='store_true') parser.add_argument("--region", help="Only Process Specified Region") parser.add_argument("--profile", help="Use this CLI profile (instead of default or env credentials)") parser.add_argument("--actually-do-it", help="Actually Perform the action", action='store_true') + parser.add_argument("--disable", help="Disable Default Encryption rather than enable it.", action='store_true') + + cmk_group = parser.add_mutually_exclusive_group() + cmk_group.add_argument("--create-cmk", help="Create an AWS CMK in each region for use with EBS Default Encryption", action='store_true') + cmk_group.add_argument("--create-org-cmk", help="Create an AWS CMK with org-wide permissions in each region ", action='store_true') + cmk_group.add_argument("--use-cmk-id", dest="KeyId", help="Enable Default Encryption with this existing key_id.") args = parser.parse_args() diff --git a/inactive-iam-users/disable-inactive-keys.py b/inactive-iam-users/disable-inactive-keys.py index bb84be4..2c2357b 100755 --- a/inactive-iam-users/disable-inactive-keys.py +++ b/inactive-iam-users/disable-inactive-keys.py @@ -18,12 +18,15 @@ def main(args, logger): else: session = boto3.Session() + utc=pytz.UTC # We need to normalize the date & timezones + threshold_date = utc.localize(datetime.today() - timedelta(days=int(args.threshold))) + # S3 is a global service and we can use any regional endpoint for this. iam_client = session.client("iam") for user in get_all_users(iam_client): username = user['UserName'] - keys = get_users_keys(iam_client, username) + keys = get_users_keys(iam_client, username, threshold_date) if len(keys) == 0: logger.debug(f"User {username} has no active keys") continue @@ -35,23 +38,30 @@ def main(args, logger): if 'AccessKeyLastUsed' not in activity_response : logger.error(f"Did not get AccessKeyLastUsed for user {username} key {key}") continue - if 'LastUsedDate' not in activity_response['AccessKeyLastUsed']: - logger.debug(f"Key {key} for {username} has never been used") + elif 'LastUsedDate' not in activity_response['AccessKeyLastUsed']: + # logger.info(f"Key {key} for {username} has never been used, but is older than {args.threshold} days") + if args.actually_do_it is True: + # otherwise if we're configured to fix + logger.info(f"Disabling Key {key} for {username} - never been used, but is older than {args.threshold} days") + disable_key(iam_client, key, username) + else: + # otherwise just report + logger.info(f"Need to Disable Key {key} for {username} - never been used, but is older than {args.threshold} days") continue - - # Otherwise decide what to do - last_used_date = activity_response['AccessKeyLastUsed']['LastUsedDate'] - utc=pytz.UTC # We need to normalize the date & timezones - if last_used_date > utc.localize(datetime.today() - timedelta(days=int(args.threshold))): - # Then we are good - logger.debug(f"Key {key} ({username}) - last used {last_used_date} is OK") - elif args.actually_do_it is True: - # otherwise if we're configured to fix - logger.info(f"Disabling Key {key} for {username} - Last used {activity_response['AccessKeyLastUsed']['LastUsedDate']} in {activity_response['AccessKeyLastUsed']['Region']} for {activity_response['AccessKeyLastUsed']['ServiceName']}") - disable_key(iam_client, key, username) else: - # otherwise just report - logger.info(f"Need to Disable Key {key} for {username} - Last used {activity_response['AccessKeyLastUsed']['LastUsedDate']} in {activity_response['AccessKeyLastUsed']['Region']} for {activity_response['AccessKeyLastUsed']['ServiceName']}") + # Otherwise decide what to do + last_used_date = activity_response['AccessKeyLastUsed']['LastUsedDate'] + + if last_used_date > threshold_date: + # Then we are good + logger.debug(f"Key {key} ({username}) - last used {last_used_date} is OK") + elif args.actually_do_it is True: + # otherwise if we're configured to fix + logger.info(f"Disabling Key {key} for {username} - Last used {activity_response['AccessKeyLastUsed']['LastUsedDate']} in {activity_response['AccessKeyLastUsed']['Region']} for {activity_response['AccessKeyLastUsed']['ServiceName']}") + disable_key(iam_client, key, username) + else: + # otherwise just report + logger.info(f"Need to Disable Key {key} for {username} - Last used {activity_response['AccessKeyLastUsed']['LastUsedDate']} in {activity_response['AccessKeyLastUsed']['Region']} for {activity_response['AccessKeyLastUsed']['ServiceName']}") @@ -69,12 +79,14 @@ def disable_key(iam_client, key, username): return(False) -def get_users_keys(iam_client, username): +def get_users_keys(iam_client, username, threshold_date): '''Return Active Access keys for username''' keyids = [] response = iam_client.list_access_keys(UserName=username) if 'AccessKeyMetadata' in response: for k in response['AccessKeyMetadata']: + if k['CreateDate'] > threshold_date: + continue if k['Status'] == "Active": keyids.append(k['AccessKeyId']) return(keyids) diff --git a/inactive-iam-users/requirements.txt b/inactive-iam-users/requirements.txt new file mode 100644 index 0000000..9850437 --- /dev/null +++ b/inactive-iam-users/requirements.txt @@ -0,0 +1 @@ +pytz \ No newline at end of file diff --git a/org-configure-alternate-contacts/README.md b/org-configure-alternate-contacts/README.md new file mode 100644 index 0000000..452c5f8 --- /dev/null +++ b/org-configure-alternate-contacts/README.md @@ -0,0 +1,65 @@ +# Set Alternate Contacts across the Organization + +This script will update all the [Alternate Contacts](https://docs.aws.amazon.com/accounts/latest/reference/manage-acct-update-contact.html) for all accounts in the organization. Per [the Boto3 Docs](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/account.html#Account.Client.put_alternate_contact): + +> To use this parameter, the caller must be an identity in the organization's management account or a delegated administrator account, and the specified account ID must be a member account in the same organization. The organization must have all features enabled , and the organization must have trusted access enabled for the Account Management service, and optionally a delegated admin account assigned. + + +## Why? + +AWS will send Security, Billing and operational alerts to the Alternate Contacts enabled on an account in addition to the root email address. These settings allow security teams and finance contacts to also get important notices from AWS + +## What this script does + +This script must be run from the AWS Organizations Management Account!!! + +It will get a list of all accounts in the organization, then it will check to see if there is an Alternate Contact already set. If not it will update the contact. + +You can update all alternate contacts (not just for accounts with no alternate contact set), by specifying the `--override` parameter + +Like all Fast Fix scripts, this script will run in dry-run mode by default. To actually update the alternate contact you must specify `--actually-do-it` + + + +## Usage + +```bash +usage: configure-alternate-contact.py [-h] [--debug] [--error] [--timestamp] + [--actually-do-it] [--override] + --contact-type CONTACT_TYPE + --contact-email CONTACT_EMAIL + --contact-name CONTACT_NAME + --contact-phone CONTACT_PHONE + --contact-title CONTACT_TITLE + +optional arguments: + -h, --help show this help message and exit + --debug print debugging info + --error print error info only + --timestamp Output log with timestamp and toolname + --actually-do-it Actually set the alternate contact + --override Override any existing setting + --contact-type CONTACT_TYPE + Alternate Contact to Set (SECURITY, BILLING, OPERATIONS) + --contact-email CONTACT_EMAIL + Specifies an email address for the alternate contact + --contact-name CONTACT_NAME + Specifies an email address for the alternate contact + --contact-phone CONTACT_PHONE + Specifies a phone number for the alternate contact. + --contact-title CONTACT_TITLE + Specifies a title for the alternate contact. +``` + +You must specify `--actually-do-it` for the changes to be made. Otherwise the script runs in dry-run mode only. + + +## AWS Docs + +* [PutAlternateContact API](https://docs.aws.amazon.com/accounts/latest/reference/API_PutAlternateContact.html) +* [boto3 put_alternate_contact()](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/account.html#Account.Client.put_alternate_contact) +* [boto3 get_alternate_contact()](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/account.html#Account.Client.get_alternate_contact) +* [boto3 list_accounts()](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/organizations.html#Organizations.Client.list_accounts) + + + diff --git a/org-configure-alternate-contacts/configure-alternate-contact.py b/org-configure-alternate-contacts/configure-alternate-contact.py new file mode 100755 index 0000000..b4c7805 --- /dev/null +++ b/org-configure-alternate-contacts/configure-alternate-contact.py @@ -0,0 +1,174 @@ +#!/usr/bin/env python3 + +from botocore.exceptions import ClientError +import boto3 +import datetime +import json +import os +import time + +import logging +logger = logging.getLogger() +logger.setLevel(logging.INFO) +logging.getLogger('botocore').setLevel(logging.WARNING) +logging.getLogger('boto3').setLevel(logging.WARNING) +logging.getLogger('urllib3').setLevel(logging.WARNING) + + +VALID_TYPES=['BILLING', 'SECURITY', 'OPERATIONS'] + + +def main(args, logger): + + if args.contact_type not in VALID_TYPES: + logger.critical(f"Specified Contact Type {args.contact_type} is not one of the valid types: {' '.join(VALID_TYPES)}") + exit(1) + + account_list = get_organization_accounts(args) + logger.info(f"Found {len(account_list)} accounts in this organization") + + client = boto3.client('account') + for a in account_list: + account_id = a['Id'] + # if account_id == "373051592877": + # continue + + current_contact = get_alternate_contact(a, client, args) + logger.debug(f"Account {a['Name']} ({account_id}) has contact type {args.contact_type} of {current_contact}") + if args.actually_do_it and args.override: + update_account_contact(a, client, args) + elif current_contact is None and args.actually_do_it: + update_account_contact(a, client, args) + elif current_contact is None: + logger.info(f"No alternate contact of type {args.contact_type} set for {a['Name']} ({account_id}) ") + else: + logger.info(f"Account {a['Name']} ({account_id}) already has contact type {args.contact_type} set to {current_contact['Name']} - {current_contact['EmailAddress']}") + + +def get_alternate_contact(a, client, args): + try: + if a['Id'] == a['Arn'].split(':')[4]: + response = client.get_alternate_contact(AlternateContactType=args.contact_type) + else: + response = client.get_alternate_contact( + AccountId=a['Id'], + AlternateContactType=args.contact_type + ) + current_contact = response['AlternateContact'] + return(current_contact) + except ClientError as e: + if e.response['Error']['Code'] == "ResourceNotFoundException": + return(None) + else: + raise + + +def update_account_contact(a, client, args): + account_id = a['Id'] + try: + if a['Id'] == a['Arn'].split(':')[4]: + response = client.put_alternate_contact( + AlternateContactType=args.contact_type, + EmailAddress=args.contact_email, + Name=args.contact_name, + PhoneNumber=args.contact_phone, + Title=args.contact_title + ) + else: + response = client.put_alternate_contact( + AccountId=account_id, + AlternateContactType=args.contact_type, + EmailAddress=args.contact_email, + Name=args.contact_name, + PhoneNumber=args.contact_phone, + Title=args.contact_title + ) + logger.info(f"Set Alternate Contact {args.contact_type} for {a['Name']} ({account_id}) ") + except ClientError as e: + logger.error(f"Error Setting Alternate Contact Type {args.contact_type} for {account_id}: {e}") + + +def get_organization_accounts(args): + logger.info("Fetching account list...") + org_client = boto3.client('organizations') + try: + + output = [] + response = org_client.list_accounts(MaxResults=20) + while 'NextToken' in response: + output = output + response['Accounts'] + time.sleep(1) + response = org_client.list_accounts(MaxResults=20, NextToken=response['NextToken']) + + output = output + response['Accounts'] + return(output) + except ClientError as e: + if e.response['Error']['Code'] == 'AWSOrganizationsNotInUseException': + # This is a standalone account + logger.critical("This script is intended only for AWS Organizations. Organizations is not fully enabled for this account. Aborting...") + exit(1) + # This is what we get if we're a child in an organization, but not inventorying the payer + elif e.response['Error']['Code'] == 'AccessDeniedException': + logger.critical("This script must be run in the AWS Organizations Management Account. Aborting...") + exit(1) + else: + raise + + +def do_args(): + import argparse + parser = argparse.ArgumentParser() + parser.add_argument("--debug", help="print debugging info", action='store_true') + parser.add_argument("--error", help="print error info only", action='store_true') + parser.add_argument("--timestamp", help="Output log with timestamp and toolname", action='store_true') + parser.add_argument("--actually-do-it", help="Actually set the alternate contact", action='store_true') + parser.add_argument("--override", help="Override any existing setting", action='store_true') + parser.add_argument("--contact-type", help="Alternate Contact to Set (SECURITY, BILLING, OPERATIONS)", required=True) + parser.add_argument("--contact-email", help="Specifies an email address for the alternate contact", required=True) + parser.add_argument("--contact-name", help="Specifies an email address for the alternate contact", required=True) + parser.add_argument("--contact-phone", help="Specifies a phone number for the alternate contact.", required=True) + parser.add_argument("--contact-title", help="Specifies a title for the alternate contact.", required=True) + + + + + args = parser.parse_args() + + return(args) + +if __name__ == '__main__': + + args = do_args() + + # Logging idea stolen from: https://docs.python.org/3/howto/logging.html#configuring-logging + # create console handler and set level to debug + ch = logging.StreamHandler() + if args.error: + logger.setLevel(logging.ERROR) + elif args.debug: + logger.setLevel(logging.DEBUG) + else: + logger.setLevel(logging.INFO) + + # create formatter + if args.timestamp: + formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') + else: + formatter = logging.Formatter('%(levelname)s - %(message)s') + # add formatter to ch + ch.setFormatter(formatter) + # add ch to logger + logger.addHandler(ch) + + # # Sanity check region + # if args.region: + # os.environ['AWS_DEFAULT_REGION'] = args.region + + # if 'AWS_DEFAULT_REGION' not in os.environ: + # logger.error("AWS_DEFAULT_REGION Not set. Aborting...") + # exit(1) + + try: + main(args, logger) + except KeyboardInterrupt: + exit(1) \ No newline at end of file diff --git a/org-delegation/delegate-guardduty.py b/org-delegation/delegate-guardduty.py index 372b597..d25acaa 100755 --- a/org-delegation/delegate-guardduty.py +++ b/org-delegation/delegate-guardduty.py @@ -20,23 +20,26 @@ def main(args, logger): # GuardDuty needs to be enabled Regionally. Gah! for r in get_regions(session, args): - guardduty_client = session.client("guardduty", region_name=r) - response = guardduty_client.list_organization_admin_accounts() - if len(response['AdminAccounts']) > 1: - logger.error(f"too many admin accounts in region {r}. Cannot proceed.") - elif len(response['AdminAccounts']) == 1: - if response['AdminAccounts'][0]['AdminAccountId'] == args.accountId: - logger.debug(f"Account {args.accountId} is already the delegated admin for region {r} and in state {response['AdminAccounts'][0]['AdminStatus']}") + try: + guardduty_client = session.client("guardduty", region_name=r) + response = guardduty_client.list_organization_admin_accounts() + if len(response['AdminAccounts']) > 1: + logger.error(f"too many admin accounts in region {r}. Cannot proceed.") + elif len(response['AdminAccounts']) == 1: + if response['AdminAccounts'][0]['AdminAccountId'] == args.accountId: + logger.debug(f"Account {args.accountId} is already the delegated admin for region {r} and in state {response['AdminAccounts'][0]['AdminStatus']}") + else: + logger.error(f"{response['AdminAccounts'][0]['AdminAccountId']} is already the delegated admin in {r}. Not performing update") + elif args.actually_do_it is True: + try: + logger.info(f"Enablng GuardDuty Delegated Admin to {args.accountId} in region {r}") + guardduty_client.enable_organization_admin_account(AdminAccountId=args.accountId) + except ClientError as e: + logger.critical(e) else: - logger.error(f"{response['AdminAccounts'][0]['AdminAccountId']} is already the delegated admin in {r}. Not performing update") - elif args.actually_do_it is True: - try: - logger.info(f"Enablng GuardDuty Delegated Admin to {args.accountId} in region {r}") - guardduty_client.enable_organization_admin_account(AdminAccountId=args.accountId) - except ClientError as e: - logger.critical(e) - else: - logger.info(f"Would enable GuardDuty Delegated Admin to {args.accountId} in region {r}") + logger.info(f"Would enable GuardDuty Delegated Admin to {args.accountId} in region {r}") + except ClientError as e: + logger.warning(f"Failure in {r}: {e}") def get_regions(session, args): '''Return a list of regions with us-east-1 first. If --region was specified, return a list wth just that''' diff --git a/remove-loginprofile/README.md b/remove-loginprofile/README.md new file mode 100644 index 0000000..ae47af1 --- /dev/null +++ b/remove-loginprofile/README.md @@ -0,0 +1,52 @@ +# Remove Login Profile with No MFA + +This script will disable the ability for an IAM User to login to the AWS Console for all IAM Users that have a console password (LoginProfile) and do _not_ have MFA enabled. The script can optionally exclude users that have used their account in N number of days + + +## Why? + +Enabling Multi-factor-authentication is a common requirement for all privileged accounts. In most all cases IAM Users have privileged access to cloud APIs for the purposes of starting and stopping machines, accessing sensitive data in S3, etc. + +## What the ./remove-loginprofile-no-mfa.py script does. + +This script will first list all IAM Users. It will then look to see if the IAM User has a console password (called a LoginProfile). If the user has a LoginProfile, it checks to make sure the User also has MFA Enabled. + +If MFA is not enabled, and --threshold is not set, it will remove the user's console password. + +If --threshold is set, it will check to see if the PasswordLastUsed exists and was not within *threshold* days. If both of those are true it will remove the user's console password. + +If the user never logged in (PasswordLastUsed does not exist), it will ensure the user was not _created_ in the last *threshold* days, and then remove the user's console password. + +This script will *NOT* remove the user's console password unless --actually-do-it is specified. This script will not delete the user, nor will it delete or deactivate the user's Access Keys. **The removal of the user's console password is irreversible.** Once removed, it cannot be reapplied because the password is not known to the AWS account. AWS does not provide an option to disable the user's password. + + + + +## Usage + +```bash +usage: remove-loginprofile-no-mfa.py [-h] [--debug] [--error] [--timestamp] + [--profile PROFILE] [--actually-do-it] + [--threshold THRESHOLD] + +optional arguments: + -h, --help show this help message and exit + --debug print debugging info + --error print error info only + --timestamp Output log with timestamp and toolname + --profile PROFILE Use this CLI profile (instead of default or env credentials) + --actually-do-it Actually Perform the action + --threshold THRESHOLD + Only Disable Login Profile if inactive for this many days +``` + +You must specify `--actually-do-it` for the changes to be made. Otherwise the script runs in dry-run mode only. + + +## AWS Docs + +* [DeleteLoginProfile API](https://docs.aws.amazon.com/IAM/latest/APIReference/API_DeleteLoginProfile.html) +* [boto3 list_users()](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/iam.html#IAM.Client.list_users) +* [boto3 list_mfa_devices()](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/iam.html#IAM.Client.list_mfa_devices) +* [boto3 delete_login_profile()](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/iam.html#IAM.Client.delete_login_profile) +* [boto3 get_login_profile()](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/iam.html#IAM.Client.get_login_profile) diff --git a/remove-loginprofile/remove-loginprofile-no-mfa.py b/remove-loginprofile/remove-loginprofile-no-mfa.py new file mode 100755 index 0000000..7bfaced --- /dev/null +++ b/remove-loginprofile/remove-loginprofile-no-mfa.py @@ -0,0 +1,180 @@ +#!/usr/bin/env python3 + +import boto3 +from botocore.exceptions import ClientError +import os +import logging +from datetime import datetime, timedelta +import pytz + +utc=pytz.UTC + +def main(args, logger): + '''Executes the Primary Logic of the Fast Fix''' + + # If they specify a profile use it. Otherwise do the normal thing + if args.profile: + session = boto3.Session(profile_name=args.profile) + else: + session = boto3.Session() + + # S3 is a global service and we can use any regional endpoint for this. + iam_client = session.client("iam") + for user in get_all_users(iam_client): + username = user['UserName'] + + # Does this user have a LoginProfile? + login_profile = get_users_login_profile(iam_client, username) + if login_profile is None: + logger.debug(f"User {username} has no LoginProfile") + continue + + # Does this user have an MFA + mfa = get_users_mfa(iam_client, username) + if mfa is not None: + logger.debug(f"User {username} has MFA enabled. No action needed.") + continue + + if not args.threshold: + # If threshold is not specified, we're ready to disable the user. + if args.actually_do_it is True: + # otherwise if we're configured to fix + logger.info(f"Disabling Login for {username} - No threshold specified") + disable_login(iam_client, username) + else: + # otherwise just report + logger.info(f"Need to Disable login for {username} - No threshold specified") + + # Process next user + continue + + # Has this user logged in since --threshold? + if 'PasswordLastUsed' in user: + last_login = user['PasswordLastUsed'] + logger.debug(f"User {username} last logged in {last_login}") + + utc=pytz.UTC # We need to normalize the date & timezones + if last_login > utc.localize(datetime.today() - timedelta(days=int(args.threshold))): + # Then we are good + logger.debug(f"{username} - last login {last_login} is OK") + elif args.actually_do_it is True: + # otherwise if we're configured to fix + logger.info(f"Disabling Login for {username} - Last used {last_login}") + disable_login(iam_client, username) + else: + # otherwise just report + logger.info(f"Need to Disable login for {username} - Last used {last_login}") + else: + # Don't deactivate if the user was _created_ inside the threshold + create_date = user['CreateDate'] + logger.debug(f"User {username} was created {create_date}") + + utc=pytz.UTC # We need to normalize the date & timezones + if create_date > utc.localize(datetime.today() - timedelta(days=int(args.threshold))): + # Then we are good + logger.debug(f"{username} - created {create_date} which is OK") + elif args.actually_do_it is True: + # otherwise if we're configured to fix + logger.info(f"Disabling Login for {username} - Created {create_date}") + disable_login(iam_client, username) + else: + # otherwise just report + logger.info(f"Need to Disable login for {username} - Created {create_date}") + + +def disable_login(iam_client, username): + '''perform the key disable and check the status code''' + response = iam_client.delete_login_profile(UserName=username) + if response['ResponseMetadata']['HTTPStatusCode'] == 200: + return(True) + else: + logger.error(f"Attempt to enable LoginProfile for {username} returned {response}") + return(False) + + +def get_users_mfa(iam_client, username): + '''Return MFA or Virtual MFA Details, or None if no MFA is present''' + try: + response = iam_client.list_mfa_devices(UserName=username) + if len(response['MFADevices']) == 0: + return None + else: + return response['MFADevices'][0] + except ClientError as e: + if e.response['Error']['Code'] == 'NoSuchEntity': + return None + else: + raise + + +def get_users_login_profile(iam_client, username): + '''Return Login Profile details for user, or None if no LoginProfile present''' + try: + response = iam_client.get_login_profile(UserName=username) + except ClientError as e: + if e.response['Error']['Code'] == 'NoSuchEntity': + return None + else: + raise + return(response['LoginProfile']) + + +def get_all_users(iam_client): + '''Return an array of all IAM Users. ''' + users = [] + response = iam_client.list_users() + while 'IsTruncated' in response and response['IsTruncated'] is True: # Gotta Catch 'em all! + users += response['Users'] + response = iam_client.list_users(Marker=response['Marker']) + users += response['Users'] + return(users) + + +def do_args(): + import argparse + parser = argparse.ArgumentParser() + parser.add_argument("--debug", help="print debugging info", action='store_true') + parser.add_argument("--error", help="print error info only", action='store_true') + parser.add_argument("--timestamp", help="Output log with timestamp and toolname", action='store_true') + parser.add_argument("--profile", help="Use this CLI profile (instead of default or env credentials)") + parser.add_argument("--actually-do-it", help="Actually Perform the action", action='store_true') + parser.add_argument("--threshold", help="Only Disable Login Profile if inactive for this many days") + + args = parser.parse_args() + + return(args) + +if __name__ == '__main__': + + args = do_args() + + # Logging idea stolen from: https://docs.python.org/3/howto/logging.html#configuring-logging + # create console handler and set level to debug + logger = logging.getLogger('disable-inactive-keys') + ch = logging.StreamHandler() + if args.debug: + logger.setLevel(logging.DEBUG) + elif args.error: + logger.setLevel(logging.ERROR) + else: + logger.setLevel(logging.INFO) + + # Silence Boto3 & Friends + logging.getLogger('botocore').setLevel(logging.WARNING) + logging.getLogger('boto3').setLevel(logging.WARNING) + logging.getLogger('urllib3').setLevel(logging.WARNING) + + # create formatter + if args.timestamp: + formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') + else: + formatter = logging.Formatter('%(levelname)s - %(message)s') + # add formatter to ch + ch.setFormatter(formatter) + # add ch to logger + logger.addHandler(ch) + + try: + main(args, logger) + except KeyboardInterrupt: + exit(1) \ No newline at end of file diff --git a/remove-loginprofile/requirements.txt b/remove-loginprofile/requirements.txt new file mode 100644 index 0000000..9850437 --- /dev/null +++ b/remove-loginprofile/requirements.txt @@ -0,0 +1 @@ +pytz \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 975925f..36eeeae 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,10 +1,10 @@ -i https://pypi.org/simple -boto3==1.14.34 -botocore==1.17.34 +boto3 +botocore docutils==0.15.2; python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3' jmespath==0.10.0; python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3' python-dateutil==2.8.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' pytz==2020.1 s3transfer==0.3.3 six==1.15.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' -urllib3==1.25.10; python_version != '3.4' +urllib3 diff --git a/s3-bucket-default-encryption/enable-s3-bucket-default-encryption.py b/s3-bucket-default-encryption/enable-s3-bucket-default-encryption.py index 098068c..187124c 100755 --- a/s3-bucket-default-encryption/enable-s3-bucket-default-encryption.py +++ b/s3-bucket-default-encryption/enable-s3-bucket-default-encryption.py @@ -43,6 +43,9 @@ def main(args, logger): elif e.response['Error']['Code'] == 'AccessDeniedException': logger.warning(f"Unable to get details of key {bucket}: AccessDenied") continue + elif e.response['Error']['Code'] == 'AccessDenied': + logger.warning(f"Unable to get details of key {bucket}: AccessDenied") + continue else: raise diff --git a/shield/README.md b/shield/README.md new file mode 100644 index 0000000..a649141 --- /dev/null +++ b/shield/README.md @@ -0,0 +1,46 @@ +# enable-shield-protection + +This script will enable Shield Advanced Protections on all the resources of the specified type + +## Why? + +AWS Shield Advanced is an enterprise-grade anti-DDOS service. Leveraging AWS's control of the underlying network, and the ability to manage AWS WAF, they can provide a superior anti-DDOS capability than a normal company. + +## What the script does. + +**NOTE:** This script will not run if the AWS Shield Advanced Subscription is not enabled. + +This script will iterate though all AWS Regions and make the CreateProtection call for any unprotected resources of the specified type (Currently: CloudFront and ALB). + + +## Usage + +```bash +usage: enable-shield-protection.py [-h] [--debug] [--error] [--timestamp] + [--region REGION] [--actually-do-it] [--resource-type] + +optional arguments: + -h, --help show this help message and exit + --debug print debugging info + --error print error info only + --timestamp Output log with timestamp and toolname + --region REGION Only Process Specified Region + --profile PROFILE Use this CLI profile (instead of default or env credentials) + --actually-do-it Actually Perform the action + --resource-type {ALB,CloudFront} Type of resource to apply Shield Protections to +``` + +You must specify `--actually-do-it` for the changes to be made. Otherwise the script runs in dry-run mode only. + + +## AWS Docs + +* [Adding AWS Shield Advanced protection to AWS resources](https://docs.aws.amazon.com/waf/latest/developerguide/configure-new-protection.html) +* [CreateProtection API](https://docs.aws.amazon.com/waf/latest/DDOSAPIReference/API_CreateProtection.html) +* [boto3 create_protection()](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/shield.html#Shield.Client.create_protection) + +Other ReadOnly calls made: +* [boto3 describe_load_balancers()](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/elbv2.html#ElasticLoadBalancingv2.Client.describe_load_balancers) +* [boto3 list_distributions()](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/cloudfront.html#CloudFront.Client.list_distributions) +* [boto3 list_protections()](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/shield.html#Shield.Client.list_protections) +* [boto3 describe_subscription()](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/shield.html#Shield.Client.describe_subscription) \ No newline at end of file diff --git a/shield/enable-shield-protections.py b/shield/enable-shield-protections.py new file mode 100755 index 0000000..8a5fc1b --- /dev/null +++ b/shield/enable-shield-protections.py @@ -0,0 +1,243 @@ +#!/usr/bin/env python3 + +import boto3 +from botocore.exceptions import ClientError +import os +import logging +import json + + +def main(args, logger): + '''Executes the Primary Logic of the Fast Fix''' + + # If they specify a profile use it. Otherwise do the normal thing + if args.profile: + session = boto3.Session(profile_name=args.profile) + else: + session = boto3.Session() + + # Get all the Regions for this account. CloudFront is only in us-east-1 + if args.resource_type == "CloudFront": + all_regions = ["us-east-1"] + else: + all_regions = get_regions(session, args) + + count = 0 + subscription = get_subscription(session) + if subscription is None: + logger.critical(f"Shield Advanced is not enabled in account {args.profile}. Aborting") + exit(1) + + # Get the list of protected resource. These we do not have to process again + # This call returns a global list, it doesn't have to be run in each region. + protections = get_protected_resources(session) + + for region in all_regions: + logger.debug(f"Processing {region}") + shield_client = session.client("shield", region_name=region) + + if args.resource_type == "ALB": + unprotected_arns = get_all_albs(protections, session, region) + elif args.resource_type == "CloudFront": + unprotected_arns = get_all_cloudfront(protections, session, region) + else: + print(f"Invalid resource type: {args.resource_type}") + exit(1) + + for arn, name in unprotected_arns.items(): + count += 1 + if args.actually_do_it: + enable_protection(shield_client, arn, name) + else: + logger.info(f"Would enable Shield Protection on {name} ({arn})") + + logger.info(f"{args.profile} has {count} {args.resource_type} resources without Shield Advanced Protection") + + +def get_subscription(session): + client = session.client("shield") + try: + subscription = client.describe_subscription()['Subscription'] + # logger.debug(json.dumps(subscription, indent=2, sort_keys=True, default=str)) + except ClientError as e: + if e.response['Error']['Code'] == "ResourceNotFoundException": + subscription = None + else: + logger.critical(f"Unable to describe the subscription: {e}") + exit(1) + except Exception as e: + logger.critical(f"Unable to describe the subscription: {e}") + exit(1) + return(subscription) + + +def get_protected_resources(session): + '''Return an Array of ARNs that have Shield Advanced Protections already enabled ''' + # It doesn't matter which region I make this call from + shield_client = session.client("shield") + protections = [] + response = shield_client.list_protections() + while 'NextToken' in response: + protections += response['Protections'] + response = shield_client.list_protections(NextToken=response['NextToken']) + protections += response['Protections'] + + arns = [] + for p in protections: + arns.append(p['ResourceArn']) + return(arns) + + +def enable_protection(shield_client, arn, name): + '''Actually perform the enabling of Key rotation and checking of the status code''' + logger.info(f"Enabling Shield Protection on {arn}") + try: + response = shield_client.create_protection(Name=name, ResourceArn=arn) + if response['ResponseMetadata']['HTTPStatusCode'] == 200: + return(True) + else: + logger.error(f"Attempt to enable shield protection for {arn} returned {response}") + return(False) + except ClientError as e: + raise + + +def get_all_cloudfront(protections, session, region): + '''Return a Dict containing all unprotected CF distributions. The Dict Key is the ARN, the Dict value is the name''' + output = {} + count = 0 + client = session.client('cloudfront', region_name=region) + + + response = client.list_distributions(MaxItems="100") + if 'Items' not in response['DistributionList']: + # Empty CF List. + return(output) + for cf in response['DistributionList']['Items']: + if cf['ARN'] in protections: + logger.debug(f"Arn {cf['ARN']} is already protected by Shield Advanced") + continue + output[cf['ARN']] = f"{cf['DomainName']}-{cf['Id']}" + count += len(response['DistributionList']['Items']) + + while 'NextMarker' in response['DistributionList']: + response = client.list_distributions(MaxItems="100", Marker=response['DistributionList']['NextMarker']) + if 'Items' not in response['DistributionList']: + # Empty CF List. + return(output) + for cf in response['DistributionList']['Items']: + if cf['ARN'] in protections: + logger.debug(f"Arn {cf['ARN']} is already protected by Shield Advanced") + continue + output[cf['ARN']] = f"{cf['DomainName']}-{cf['Id']}" + count += len(response['DistributionList']['Items']) + + + logger.info(f"Found {count} Distributions") + + return(output) + + +def get_all_albs(protections, session, region): + '''Return a Dict containing all unprotected ALBs. The Dict Key is the ARN, the Dict value is the name''' + output = {} + client = session.client('elbv2', region_name=region) + + response = client.describe_load_balancers() + for lb in response['LoadBalancers']: + if lb['Type'] != 'application': + # Don't care + continue + if lb['Scheme'] != 'internet-facing': + # Also Don't care + continue + if lb['LoadBalancerArn'] in protections: + logger.debug(f"Arn {lb['LoadBalancerArn']} is already protected by Shield Advanced") + continue + output[lb['LoadBalancerArn']] = lb['LoadBalancerName'] + + while 'NextMarker' in response: + response = client.describe_load_balancers(Marker=response['NextMarker']) + for lb in response['LoadBalancers']: + if lb['Type'] != 'application': + # Don't care + continue + if lb['Scheme'] != 'internet-facing': + # Also Don't care + continue + if lb['LoadBalancerArn'] in protections: + logger.debug(f"Arn {lb['LoadBalancerArn']} is already protected by Shield Advanced") + continue + output[lb['LoadBalancerArn']] = lb['LoadBalancerName'] + + return(output) + + +def get_regions(session, args): + '''Return a list of regions with us-east-1 first. If --region was specified, return a list wth just that''' + + # If we specifed a region on the CLI, return a list of just that + if args.region: + return([args.region]) + + # otherwise return all the regions, us-east-1 first + ec2 = session.client('ec2') + response = ec2.describe_regions() + output = ['us-east-1'] + for r in response['Regions']: + # return us-east-1 first, but dont return it twice + if r['RegionName'] == "us-east-1": + continue + output.append(r['RegionName']) + return(output) + + +def do_args(): + import argparse + parser = argparse.ArgumentParser() + parser.add_argument("--debug", help="print debugging info", action='store_true') + parser.add_argument("--error", help="print error info only", action='store_true') + parser.add_argument("--timestamp", help="Output log with timestamp and toolname", action='store_true') + parser.add_argument("--region", help="Only Process Specified Region") + parser.add_argument("--profile", help="Use this CLI profile (instead of default or env credentials)") + parser.add_argument("--actually-do-it", help="Actually Perform the action", action='store_true') + parser.add_argument("--resource-type", help="Type of resource to apply Shield Protections to", required=True, choices=['ALB', 'CloudFront']) + + args = parser.parse_args() + + return(args) + +if __name__ == '__main__': + + args = do_args() + + # Logging idea stolen from: https://docs.python.org/3/howto/logging.html#configuring-logging + # create console handler and set level to debug + logger = logging.getLogger('kms-key-rotation') + ch = logging.StreamHandler() + if args.debug: + logger.setLevel(logging.DEBUG) + elif args.error: + logger.setLevel(logging.ERROR) + else: + logger.setLevel(logging.INFO) + + # Silence Boto3 & Friends + logging.getLogger('botocore').setLevel(logging.WARNING) + logging.getLogger('boto3').setLevel(logging.WARNING) + logging.getLogger('urllib3').setLevel(logging.WARNING) + + # create formatter + if args.timestamp: + formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') + else: + formatter = logging.Formatter('%(levelname)s - %(message)s') + # add formatter to ch + ch.setFormatter(formatter) + # add ch to logger + logger.addHandler(ch) + + try: + main(args, logger) + except KeyboardInterrupt: + exit(1) \ No newline at end of file diff --git a/unsubscribe_from_marketing_email/README.md b/unsubscribe_from_marketing_email/README.md new file mode 100644 index 0000000..b690b01 --- /dev/null +++ b/unsubscribe_from_marketing_email/README.md @@ -0,0 +1,17 @@ +# Unsubscribe from Marketing Emails + +AWS will send marketing promotional emails to the root email of all AWS Accounts. If you manage multiple accounts, this can be highly annoying and lead to you filtering email from AWS. Filtering email sent to the root address IS REALLY BAD, since that is also how security issues are sent. + + + +## What the unsubscribe_all_emails.sh script does. + +NOTE: This script needs to be run with profile credentials from the AWS Organizations Admin account (payer account) or from any account used for Delegated Admin (ie GuardDuty, Macie, etc). It requires the command `aws organizations list-accounts` to work. + + +## Usage + +Just run the script. It will extract all the root email addresses for invited accounts, and issue a CURL against AWS's unsubscribe URL. AWS will rate limit you, so I've included a SLEEP. + +## Credit +Credit goes to Ian Mckay ([@iann0036](https://twitter.com/iann0036)) for the idea via [this tweet](https://twitter.com/iann0036/status/1176705462940635136) \ No newline at end of file diff --git a/unsubscribe_from_marketing_email/unsubscribe_all_emails.sh b/unsubscribe_from_marketing_email/unsubscribe_all_emails.sh new file mode 100755 index 0000000..f692bbb --- /dev/null +++ b/unsubscribe_from_marketing_email/unsubscribe_all_emails.sh @@ -0,0 +1,37 @@ +#!/bin/bash +# Copyright 2021 Chris Farris +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Fast fix inspired by this tweet from Ian Mckay - https://twitter.com/iann0036/status/1176705548290535425 + +# In theory, AWS says accounts created via Organizations aren't opt-in to marketing emails. So we filter on Invited only. YMMV. +ROOT_EMAIL_LIST=`aws organizations list-accounts --query "Accounts[?JoinedMethod=='INVITED'].Email" --output text` + +# AWS will redirect you to a CloudFlare captcha page if you fire too many of these against them at once. +# Sleep is the lazy ratelimiter. check the unsubscribe.log file to see if you see messages like these which indicate success: +# {"formId":"34006","followUpUrl":"https:\/\/pages.awscloud.com\/PreferenceCenterV4-Unsub-PreferenceCenter.html"} + +SLEEP_TIME=30 + +for email in $ROOT_EMAIL_LIST; do + echo "Unsubscribing $email from AWS Marketing emails" + encoded_email=`echo ${email} | sed s/@/%40/g` + curl -s 'https://pages.awscloud.com/index.php/leadCapture/save2' --data 'FirstName=&LastName=&Email='${encoded_email}'&Company=&Phone=&Country=&preferenceCenterCategory=no&preferenceCenterGettingStarted=no&preferenceCenterOnlineInPersonEvents=no&preferenceCenterMonthlyAWSNewsletter=no&preferenceCenterTrainingandBestPracticeContent=no&preferenceCenterProductandServiceAnnoucements=no&preferenceCenterSurveys=no&PreferenceCenter_AWS_Partner_Events_Co__c=no&preferenceCenterOtherAWSCommunications=no&PreferenceCenter_Language_Preference__c=&Title=&Job_Role__c=&Industry=&Level_of_AWS_Usage__c=&LDR_Solution_Area__c=&Unsubscribed=yes&UnsubscribedReason=I%20already%20get%20email%20from%20another%20account&unsubscribedReasonOther=&useCaseMultiSelect=&zOPFormValidationBotVerification=&Website_Referral_Code__c=&zOPURLTrackingTRKCampaign=&zOPEmailValidationHygiene=validate&zOPURLTrackingSiteCatalystSource=&zOPURLTrackingSiteCatalystChannel=em&zOPURLTrackingSiteCatalystPublisher=aws&formid=34006&lpId=127906&subId=6&munchkinId=112-TZM-766&lpurl=%2F%2Fpages.awscloud.com%2Fcommunication-preferences.html%3Fcr%3D%7Bcreative%7D%26kw%3D%7Bkeyword%7D&cr=&kw=&q=&_mkt_trk=id%3A112-TZM-766%26token%3A_mch-pages.awscloud.com-1634828395353-78149&formVid=34006&mkt_tok=MTEyLVRaTS03NjYAAAGArUL0R1AJrZPQKmPub_MWYJS68FkcdjTMmCy7hrG4hzSnK08MaPDXszkwXYVw1Oo6qVoy3QrDShzVolVitJ6g9eeBa4zvvVPU-rtlT8xTKPwbEN4jyFTC&_mktoReferrer=https%3A%2F%2Fpages.awscloud.com%2Fcommunication-preferences.html%3Fsc_channel%3Dem%26sc_campaign%3DGLOBAL_CR_SU_H2-2021-CCAP-SurveyInvite_10.08.21.03%2520-%2520Survey%2520Invite%25201%2520Email%2520Send%26sc_publisher%3Daws%26sc_medium%3Dem_430081%26sc_content%3Dsurvey%26sc_country%3DUS%26sc_region%3D%3Fparam%3Dunsubscribe%26mkt_tok%3DMTEyLVRaTS03NjYAAAGArUL0R1AJrZPQKmPub_MWYJS68FkcdjTMmCy7hrG4hzSnK08MaPDXszkwXYVw1Oo6qVoy3QrDShzVolVitJ6g9eeBa4zvvVPU-rtlT8xTKPwbEN4jyFTC&checksumFields=FirstName%2CLastName%2CEmail%2CCompany%2CPhone%2CCountry%2CpreferenceCenterCategory%2CpreferenceCenterGettingStarted%2CpreferenceCenterOnlineInPersonEvents%2CpreferenceCenterMonthlyAWSNewsletter%2CpreferenceCenterTrainingandBestPracticeContent%2CpreferenceCenterProductandServiceAnnoucements%2CpreferenceCenterSurveys%2CPreferenceCenter_AWS_Partner_Events_Co__c%2CpreferenceCenterOtherAWSCommunications%2CPreferenceCenter_Language_Preference__c%2CTitle%2CJob_Role__c%2CIndustry%2CLevel_of_AWS_Usage__c&checksum=e60aa8324cf0ac1844446eab8eb95a56c6ef1edd0c7f3c8b134f5bfc0259ee90' >> unsubscribe.log + if [ $? -eq 0 ] ; then + echo "Success. Sleeping $SLEEP_TIME sec" + else + echo "Failure" + fi + sleep $SLEEP_TIME +done \ No newline at end of file diff --git a/vpc-flow-logs/enable-vpc-flowlogs.py b/vpc-flow-logs/enable-vpc-flowlogs.py index 0096fd3..dfcb4b1 100755 --- a/vpc-flow-logs/enable-vpc-flowlogs.py +++ b/vpc-flow-logs/enable-vpc-flowlogs.py @@ -18,7 +18,13 @@ def main(args, logger): # processiong regions for region in all_regions: - process_region(args, region, session, logger) + try: + process_region(args, region, session, logger) + except ClientError as e: + if e.response['Error']['Code'] == "UnauthorizedOperation": + logger.error(f"Failed to process region {region}. Denied by SCP?") + else: + raise return @@ -37,14 +43,17 @@ def process_region(args, region, session, logger): if vpcs: # processing VPCs for VpcId in vpcs: - # enable flowlogs if the vpc has eni within it - logger.debug(f" Processing VpcId {VpcId}") - network_interfaces = ec2_client.describe_network_interfaces(Filters=[{'Name':'vpc-id','Values':[VpcId]}])['NetworkInterfaces'] - if network_interfaces: - logger.debug(f" ENI found in VpcId {VpcId}") + if args.process_empty: enable_flowlogs(VpcId, ec2_client, args, region) else: - logger.debug(f" No ENI found in VpcId {VpcId}, skipped.") + # enable flowlogs if the vpc has eni within it + logger.debug(f" Processing VpcId {VpcId}") + network_interfaces = ec2_client.describe_network_interfaces(Filters=[{'Name':'vpc-id','Values':[VpcId]}])['NetworkInterfaces'] + if network_interfaces: + logger.debug(f" ENI found in VpcId {VpcId}") + enable_flowlogs(VpcId, ec2_client, args, region) + else: + logger.debug(f" No ENI found in VpcId {VpcId}, skipped.") else: logger.debug(" No VPCs to enable flow logs in region:{}".format(region)) @@ -186,6 +195,7 @@ def do_args(): parser.add_argument("--profile", help="Use this CLI profile (instead of default or env credentials)") parser.add_argument("--vpc-id", help="Only Process Specified VPC") parser.add_argument("--actually-do-it", help="Actually Perform the action", action='store_true') + parser.add_argument("--process-empty", help="Process empty VPCs too", action='store_true') parser.add_argument("--flowlog-bucket", help="S3 bucket to deposit logs to", required=True) parser.add_argument("--traffic-type", help="The type of traffic to log", default='ALL', choices=['ACCEPT','REJECT','ALL']) parser.add_argument("--force", help="Perform flowlog replacement without prompt", action='store_true')