diff --git a/.gitignore b/.gitignore
index d187ea40..6ddf57d6 100644
--- a/.gitignore
+++ b/.gitignore
@@ -14,3 +14,10 @@ packaged.yaml
.terraform.lock.hcl
terraform.tfstate
terraform.tfstate.backup
+
+# macOS
+**/.dccache
+**/.DS_Store
+
+# VSCode
+**/.vscode
diff --git a/deployment/azure-python-deploy-to-all-existing-storage/README.md b/deployment/azure-python-deploy-to-all-existing-storage/README.md
new file mode 100644
index 00000000..bde04898
--- /dev/null
+++ b/deployment/azure-python-deploy-to-all-existing-storage/README.md
@@ -0,0 +1,74 @@
+# Deploy to All Existing Azure Resource
+This script will deploy File Storage Security Stack to all storage accounts unless defined in `exclude.txt` text file. After deployment, the stacks will be registered with the Cloud One Console.
+
+**Before you deploy**
+
+ * Obtain your Cloud One API Key
+ - Generate API Key: [Cloud One API Key](https://cloudone.trendmicro.com/docs/account-and-user-management/c1-api-key/)
+
+
+
+**1. Clone Repo**
+ - Clone this repository `git clone https://github.com/trendmicro/cloudone-community.git`
+ - After cloning repo:
+```
+ cd .\cloudone-community\File-Storage-Security/Deployment/azure-python-deploy-to-all-existing-Azure
+```
+
+**2. Configure the Exclusions text file `exclude.txt`**
+ * Create a new file called `exclude.txt` with names of Azure storage accounts to exclude from FSS deployment.
+ - 1 per line, Example: [exclude.txt](https://github.com/trendmicro/cloudone-community/blob/main/File-Storage-Security/Deployment/python-deploy-to-all-existing/exclude.txt)
+ * For organizations with a large number of storage accounts, a list of Azure storage accounts can be piped into `exclude.txt` using `azure-cli` or `PowerShell`:
+ ```
+ # Bash
+ az storage account list --query "[?tags.AutoDeployFSS != 'True'].name" --output tsv > exclude.txt
+ cat exclude.txt
+
+ # PowerShell
+ Clear-Content -Path exclude.txt
+ Get-AzStorageAccount | Where-Object {$_.tags.AutoDeployFSS -ne 'True'} | Select-Object -Property StorageAccountName | ConvertTo-JSON | Out-File -FilePath exclude.json
+ $json = (Get-Content "exclude.json" -Raw) | ConvertFrom-Json
+ foreach($v in $json.StorageAccountName) {
+ Write-Output "${v}" >> exclude.txt
+ }
+ more exclude.txt
+ ```
+**3. Configuration file**
+
+* Complete the `config.json` configuration file with valid input.
+
+| Fields | Environment Variable | Type | Description | Required? |
+|--------| ---- | ----------- | --------- | --------- |
+| `app_id` | | String | Azure Application ID | Yes |
+| `tenant_id` | | String | Azure Tenant ID | Yes |
+| `subscription_id` | AZURE_SUBSCRIPTION_ID | String | Azure Subscription ID | Yes |
+| `keyvault_uri` | | String | Azure KeyVault URI | Yes |
+|`cloudone.region` | CLOUDONE_REGION | String | Cloud One Region Example: us-1 or ca-1 | Yes |
+| `cloudone.api_key` | CLOUDONE_API_KEY | String | Cloud One File Storage Security API Key. You can create an API Key using these instructions - https://cloudone.trendmicro.com/docs/workload-security/api-cookbook-set-up/#create-an-api-key | Yes |
+| `cloudone.max_storage_stack_per_scanner_stack` | MAX_STORAGE_STACK_PER_SCANNER_STACK | Number | Recommended to set to 50, i.e, for every 50 Storage stacks, 1 Scanner stack would be created. Contact the product team for your usecase requirements. | Yes |
+| `azure_creds.key` | | Boolean | Azure Credentials Client Value | Yes |
+| `azure_creds.secret` | | String | Azure Credentials Secret Value | Yes |
+
+**4. Deploy Tool via the Serverless Framework**
+* Open terminal/cmd:
+ ```
+ serverless deploy -s dev
+ ```
+
+# Additional Notes
+
+### Tags
+
+The Script will choose whether or not to deploy a storage stack depending on a storage accounts' tags. **See below for details**:
+
+| Tag | Value | Behavior |
+| -------------- | --------------------- |-------------------------------------------------------------- |
+| [no tag] | [none] | No action |
+| `AutoDeployFSS` | `True` | Storage Stack and Scanner Stack will be deployed |
+| `AutoDeployFSS` | `(any other value)` | Skip |
+| `FSSMonitored` | `yes` | Storage Stack Already Exists, Scanner Stack associated (skip) |
+| `FSSMonitored` | `(any other value)` | Skip |
+
+### Supported FSS regions
+
+Please note this script deploys Scanner Stacks to select Azure locations as listed in the supported document here - [What Azure services and regions are supported?](https://cloudone.trendmicro.com/docs/file-storage-security/supported-azure/). Storage Stacks that are not present in the same Azure locations will be mapped to Scanner Stacks deployed in the same geographyGroup as defined by Azure. Any data transfer cost(s) incurred in this data transfer would be your responsibility.
diff --git a/deployment/azure-python-deploy-to-all-existing-storage/deploy.sh b/deployment/azure-python-deploy-to-all-existing-storage/deploy.sh
new file mode 100755
index 00000000..3e62fb80
--- /dev/null
+++ b/deployment/azure-python-deploy-to-all-existing-storage/deploy.sh
@@ -0,0 +1,30 @@
+#!/usr/bin/bash
+
+# Function app and storage account names must be unique.
+
+# Variable block
+let "randomIdentifier=$RANDOM*$RANDOM"
+location="eastus"
+resourceGroup="azure-functions-deployfss-rg-$randomIdentifier"
+tag="function-app-deployfss"
+storage="deployfss$randomIdentifier"
+functionApp="deployfss-serverless-function-$randomIdentifier"
+skuStorage="Standard_LRS"
+functionsVersion="4"
+pythonVersion="3.9" #Allowed values: 3.7, 3.8, and 3.9
+
+# Create a resource group
+echo "Creating $resourceGroup in "$location"..."
+az group create --name $resourceGroup --location "$location" --tags $tag
+
+# Create an Azure storage account in the resource group.
+echo "Creating $storage"
+az storage account create --name $storage --location "$location" --resource-group $resourceGroup --sku $skuStorage
+
+# Create a serverless python function app in the resource group.
+echo "Creating $functionApp"
+az functionapp create --name $functionApp --storage-account $storage --consumption-plan-location "$location" --resource-group $resourceGroup --os-type Linux --runtime python --runtime-version $pythonVersion --functions-version $functionsVersion
+
+# Publish function app
+cd deployToAllExistingStorageAccounts
+func azure functionapp publish $functionApp
\ No newline at end of file
diff --git a/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/.funcignore b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/.funcignore
new file mode 100644
index 00000000..2eea525d
--- /dev/null
+++ b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/.funcignore
@@ -0,0 +1 @@
+.env
\ No newline at end of file
diff --git a/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/Deployer.py b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/Deployer.py
new file mode 100644
index 00000000..6232897c
--- /dev/null
+++ b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/Deployer.py
@@ -0,0 +1,95 @@
+"""
+Modified class. Original version can be found at https://raw.githubusercontent.com/Azure-Samples/resource-manager-python-template-deployment/master/deployer.py
+
+Modifications include
+ - uuid7 instead of Haikunator, for a shorter random suffix.
+ - using dynamic regions for deployment
+"""
+
+"""A deployer class to deploy a template on Azure"""
+import os.path
+import json
+from azure.identity import ClientSecretCredential
+from azure.mgmt.resource import ResourceManagementClient
+from azure.mgmt.resource.resources.models import DeploymentMode
+from azure.mgmt.resource.resources.models import Deployment
+from azure.mgmt.resource.resources.models import DeploymentProperties
+
+import keyvault
+
+class Deployer(object):
+ """ Initialize the deployer class with subscription, resource group and public key.
+
+ :raises IOError: If the public key path cannot be read (access or not exists)
+ :raises KeyError: If AZURE_CLIENT_ID, AZURE_CLIENT_SECRET or AZURE_TENANT_ID env
+ variables or not defined
+ """
+
+ def __init__(self, subscription_id, resource_group_name):
+ self.subscription_id = subscription_id
+ self.resource_group_name = resource_group_name
+ # self.credentials = DefaultAzureCredential(exclude_environment_credential=False)
+
+ # TODO: Store credentials in the Azure Key Vault, instead of environment variables
+ # print("\nClient ID : " + str(keyvault.get_secret_from_keyvault('FSS-AUTODEPLOY-CLIENT-ID')) + "\nClient Secret : " + str(keyvault.get_secret_from_keyvault('FSS-AUTODEPLOY-CLIENT-SECRET')))
+
+ # self.credentials = ServicePrincipalCredentials(
+ # client_id=os.environ['AZURE_CLIENT_ID'],
+ # secret=os.environ['AZURE_CLIENT_SECRET'],
+ # tenant=os.environ['AZURE_TENANT_ID']
+ # )
+ self.credentials = ClientSecretCredential(
+ client_id=os.environ['AZURE_CLIENT_ID'],
+ client_secret=os.environ['AZURE_CLIENT_SECRET'],
+ tenant_id=os.environ['AZURE_TENANT_ID']
+ )
+ self.client = ResourceManagementClient(self.credentials, self.subscription_id)
+
+ def deploy(self, azure_location, stack_type, stack_params={}):
+ """Deploy the template to a resource group."""
+ self.client.resource_groups.create_or_update(
+ self.resource_group_name,
+ {
+ 'location': azure_location
+ }
+ )
+
+ template_file_name = None
+ if stack_type == "scanner":
+ template_file_name = "FSS-Scanner-Stack-Template.json"
+ elif stack_type == "storage":
+ template_file_name = "FSS-Storage-Stack-Template.json"
+
+ template_path = os.path.join(os.path.dirname(__file__), 'templates', template_file_name)
+ with open(template_path, 'r') as template_file_fd:
+ template = json.load(template_file_fd)
+
+ parameters = {}
+ if stack_params:
+ parameters.update(stack_params)
+ parameters = {k: {'value': v} for k, v in parameters.items()}
+
+ deployment_properties = DeploymentProperties(
+ mode = DeploymentMode.incremental,
+ template = template,
+ parameters = parameters
+ )
+
+ # TODO: Tag your deployments so you can keep track
+ deployment_async_operation = self.client.deployments.begin_create_or_update(
+ resource_group_name = self.resource_group_name,
+ deployment_name = self.resource_group_name + '-dep',
+ parameters = Deployment(properties = deployment_properties)
+ )
+ deployment_async_operation.wait()
+
+ deployment_outputs = self.client.deployments.get(
+ resource_group_name = self.resource_group_name,
+ deployment_name = self.resource_group_name + '-dep'
+ )
+
+ return deployment_outputs.properties.outputs
+
+ def destroy(self):
+ """Destroy the given resource group"""
+ self.client.resource_groups.delete(self.resource_group_name)
diff --git a/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/__init__.py b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/cloudone_fss_api.py b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/cloudone_fss_api.py
new file mode 100644
index 00000000..b35ddf8d
--- /dev/null
+++ b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/cloudone_fss_api.py
@@ -0,0 +1,220 @@
+import json
+import logging
+from http.client import responses
+import urllib3
+# http = urllib3.PoolManager()
+urllib3.disable_warnings()
+http = urllib3.PoolManager(cert_reqs='CERT_NONE', assert_hostname=False)
+
+import utils
+
+def filter_stacks_by_subscription_id(subscription_id, cloudone_fss_stacks_output):
+
+ temp_stacks_output_list = []
+ for stack in cloudone_fss_stacks_output["stacks"]:
+ if utils.get_subscription_id_from_resource_group_id(stack["details"]["resourceGroupID"]) != subscription_id:
+ temp_stacks_output_list.append(stack)
+
+ for stack in temp_stacks_output_list:
+ cloudone_fss_stacks_output["stacks"].remove(stack)
+ return cloudone_fss_stacks_output
+
+def get_scanner_stacks():
+
+ r = None
+ try:
+ region = utils.get_cloudone_region()
+ api_key = utils.get_cloudone_api_key()
+
+ if region and api_key:
+ cloudone_fss_api_url = "https://filestorage.{}.cloudone.trendmicro.com/api".format(region)
+
+ r = http.request(
+ "GET",
+ cloudone_fss_api_url + "/stacks?provider=azure&type=scanner",
+ headers={
+ "Authorization": "ApiKey " + api_key,
+ "Api-Version": "v1",
+ },
+
+ )
+
+ if r.status == 200:
+ return json.loads(r.data)
+ else:
+ logging.error("HTTP Request failure (code: " + str(r.status) + ". Message: " + str(responses[r.status]) + "). Check cloudone section in the config.json file or environment variables [\"CLOUDONE_API_KEY\", \"CLOUDONE_REGION\"] for valid input.")
+ raise Exception("HTTP Request failure (code: " + str(r.status) + ". Message: " + str(responses[r.status]) + "). Check cloudone section in the config.json file or environment variables [\"CLOUDONE_API_KEY\", \"CLOUDONE_REGION\"] for valid input.")
+ except:
+ if r:
+ logging.error("HTTP Request failure (code: " + str(r.status) + ". Message: " + str(responses[r.status]) + "). Check the logs for more information.")
+ raise Exception("HTTP Request failure (code: " + str(r.status) + ". Message: " + str(responses[r.status]) + "). Check the logs for more information.")
+ else:
+ logging.error("HTTP Request failure. Check the logs for more information.")
+ raise Exception("HTTP Request failure. Check the logs for more information.")
+
+def get_storage_stacks():
+
+ r = None
+ try:
+ region = utils.get_cloudone_region()
+ api_key = utils.get_cloudone_api_key()
+
+ if region and api_key:
+ cloudone_fss_api_url = "https://filestorage.{}.cloudone.trendmicro.com/api".format(region)
+
+ r = http.request(
+ "GET",
+ cloudone_fss_api_url + "/stacks?provider=azure&type=storage",
+ headers={
+ "Authorization": "ApiKey " + api_key,
+ "Api-Version": "v1",
+ }
+ )
+
+ if r.status == 200:
+ return json.loads(r.data)
+ else:
+ logging.error("HTTP Request failure (code: " + str(r.status) + ". Message: " + str(responses[r.status]) + "). Check cloudone section in the config.json file or environment variables [\"CLOUDONE_API_KEY\", \"CLOUDONE_REGION\"] for valid input.")
+ raise Exception("HTTP Request failure (code: " + str(r.status) + ". Message: " + str(responses[r.status]) + "). Check cloudone section in the config.json file or environment variables [\"CLOUDONE_API_KEY\", \"CLOUDONE_REGION\"] for valid input.")
+ except:
+ if r:
+ logging.error("HTTP Request failure (code: " + str(r.status) + ". Message: " + str(responses[r.status]) + "). Check the logs for more information.")
+ raise Exception("HTTP Request failure (code: " + str(r.status) + ". Message: " + str(responses[r.status]) + "). Check the logs for more information.")
+ else:
+ logging.error("HTTP Request failure. Check the logs for more information.")
+ raise Exception("HTTP Request failure. Check the logs for more information.")
+
+def map_scanner_stacks_to_azure_locations():
+
+ subscription_id = utils.get_subscription_id()
+
+ existing_scanner_stacks_dict = filter_stacks_by_subscription_id(subscription_id, get_scanner_stacks())
+
+ if existing_scanner_stacks_dict:
+ locationsDict = {}
+ for scanner_stack in existing_scanner_stacks_dict["stacks"]:
+
+ if scanner_stack["status"] == "ok":
+
+ if scanner_stack["details"]["region"] not in locationsDict:
+ locationsDict.update({scanner_stack["details"]["region"]: []})
+
+ locationsDict[scanner_stack["details"]["region"]].append(scanner_stack)
+
+ return locationsDict
+ return None
+
+def get_associated_storage_stacks_to_scanner_stack(scanner_stack_uuid):
+
+ r = None
+ try:
+ region = utils.get_cloudone_region()
+ api_key = utils.get_cloudone_api_key()
+
+ if region and api_key:
+ cloudone_fss_api_url = "https://filestorage.{}.cloudone.trendmicro.com/api".format(region)
+
+ r = http.request(
+ "GET",
+ cloudone_fss_api_url + "/stacks?provider=azure&type=storage&scannerStack=" + scanner_stack_uuid,
+ headers={
+ "Authorization": "ApiKey " + api_key,
+ "Api-Version": "v1",
+ },
+
+ )
+ if r.status == 200:
+ return json.loads(r.data)
+ else:
+ logging.error("HTTP Request failure (code: " + str(r.status) + ". Message: " + str(responses[r.status]) + "). Check cloudone section in the config.json file or environment variables [\"CLOUDONE_API_KEY\", \"CLOUDONE_REGION\"] for valid input.")
+ raise Exception("HTTP Request failure (code: " + str(r.status) + ". Message: " + str(responses[r.status]) + "). Check cloudone section in the config.json file or environment variables [\"CLOUDONE_API_KEY\", \"CLOUDONE_REGION\"] for valid input.")
+ except:
+ if r:
+ logging.error("HTTP Request failure (code: " + str(r.status) + ". Message: " + str(responses[r.status]) + "). Check the logs for more information.")
+ raise Exception("HTTP Request failure (code: " + str(r.status) + ". Message: " + str(responses[r.status]) + "). Check the logs for more information.")
+ else:
+ logging.error("HTTP Request failure. Check the logs for more information.")
+ raise Exception("HTTP Request failure. Check the logs for more information.")
+
+def register_scanner_stack_with_cloudone(resource_group_id, tenant_id):
+
+ r = None
+ try:
+ region = utils.get_cloudone_region()
+ api_key = utils.get_cloudone_api_key()
+
+ if region and api_key:
+ cloudone_fss_api_url = "https://filestorage.{}.cloudone.trendmicro.com/api".format(region)
+
+ request_data = {
+ 'type': 'scanner',
+ 'provider': 'azure',
+ 'details': {
+ 'resourceGroupID': resource_group_id,
+ 'tenantID': tenant_id
+ }
+ }
+
+ r = http.request(
+ "POST",
+ cloudone_fss_api_url + "/stacks",
+ headers={
+ "Authorization": "ApiKey " + api_key,
+ "Api-Version": "v1",
+ },
+ body=json.dumps(request_data)
+ )
+ if r.status == 200:
+ return json.loads(r.data)["stackID"]
+ else:
+ logging.error("HTTP Request failure (code: " + str(r.status) + ". Message: " + str(responses[r.status]) + "). Check cloudone section in the config.json file or environment variables [\"CLOUDONE_API_KEY\", \"CLOUDONE_REGION\"] for valid input.")
+ raise Exception("HTTP Request failure (code: " + str(r.status) + ". Message: " + str(responses[r.status]) + "). Check cloudone section in the config.json file or environment variables [\"CLOUDONE_API_KEY\", \"CLOUDONE_REGION\"] for valid input.")
+ except:
+ if r:
+ logging.error("HTTP Request failure (code: " + str(r.status) + ". Message: " + str(responses[r.status]) + "). Check the logs for more information.")
+ raise Exception("HTTP Request failure (code: " + str(r.status) + ". Message: " + str(responses[r.status]) + "). Check the logs for more information.")
+ else:
+ logging.error("HTTP Request failure. Check the logs for more information.")
+ raise Exception("HTTP Request failure. Check the logs for more information.")
+
+def register_storage_stack_with_cloudone(cloudone_scanner_stack_id, resource_group_id, tenant_id):
+
+ r = None
+ try:
+ region = utils.get_cloudone_region()
+ api_key = utils.get_cloudone_api_key()
+
+ if region and api_key:
+ cloudone_fss_api_url = "https://filestorage.{}.cloudone.trendmicro.com/api".format(region)
+
+ request_data = {
+ 'type': 'storage',
+ 'provider': 'azure',
+ 'scannerStack': cloudone_scanner_stack_id,
+ 'details': {
+ 'resourceGroupID': resource_group_id,
+ 'tenantID': tenant_id
+ }
+ }
+
+ r = http.request(
+ "POST",
+ cloudone_fss_api_url + "/stacks",
+ headers={
+ "Authorization": "ApiKey " + api_key,
+ "Api-Version": "v1",
+ },
+ body=json.dumps(request_data)
+ )
+ if r.status == 200:
+ return json.loads(r.data)
+ else:
+ logging.error("HTTP Request failure (code: " + str(r.status) + ". Message: " + str(responses[r.status]) + "). Check cloudone section in the config.json file or environment variables [\"CLOUDONE_API_KEY\", \"CLOUDONE_REGION\"] for valid input.")
+ raise Exception("HTTP Request failure (code: " + str(r.status) + ". Message: " + str(responses[r.status]) + "). Check cloudone section in the config.json file or environment variables [\"CLOUDONE_API_KEY\", \"CLOUDONE_REGION\"] for valid input.")
+ except:
+ if r:
+ logging.error("HTTP Request failure (code: " + str(r.status) + ". Message: " + str(responses[r.status]) + "). Check the logs for more information.")
+ raise Exception("HTTP Request failure (code: " + str(r.status) + ". Message: " + str(responses[r.status]) + "). Check the logs for more information.")
+ else:
+ logging.error("HTTP Request failure. Check the logs for more information.")
+ raise Exception("HTTP Request failure. Check the logs for more information.")
diff --git a/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/config.json b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/config.json
new file mode 100644
index 00000000..6c3e3a62
--- /dev/null
+++ b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/config.json
@@ -0,0 +1,15 @@
+{
+ "app_id": "0ebb1eb3-5de1-4d7f-8cce-b04607c0c4dd",
+ "tenant_id": "e9d6e7b6-a143-44dd-9f92-8549870e2b4d",
+ "subscription_id": "52411634-d656-4c1f-b8fa-fc6c70544b0c",
+ "keyvault_uri": "https://azurefsskeyvault.vault.azure.net/",
+ "cloudone": {
+ "region": "ca-1",
+ "api_key": "",
+ "max_storage_stack_per_scanner_stack": 50
+ },
+ "azure_creds": {
+ "key": "",
+ "secret": ""
+ }
+}
\ No newline at end of file
diff --git a/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/deployment_geographies.py b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/deployment_geographies.py
new file mode 100644
index 00000000..5ab48b18
--- /dev/null
+++ b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/deployment_geographies.py
@@ -0,0 +1,179 @@
+import logging
+
+import deployments
+import locations
+import geographies
+import cloudone_fss_api
+import utils
+
+def deploy_geographically(subscription_id, azure_supported_locations_obj_by_geography_groups_dict, fss_supported_regions_list, azure_storage_account_list):
+
+ # Scanner Stack Map
+ scanner_stacks_map_by_geographies_dict = geographies.build_geographies_map_dict()
+
+ # Storage Stacks Map
+ storage_stacks_map_by_geographies_dict = geographies.build_geographies_map_dict()
+
+ # Populate the Scanner stack map by geographies that are registered within this subscription
+ # Inventory of existing FSS scanner stacks in this subscription by Azure location
+ existing_scanner_stacks_by_location = cloudone_fss_api.map_scanner_stacks_to_azure_locations()
+
+ # If scanner stacks exist
+ if existing_scanner_stacks_by_location:
+
+ # Existing scanner stack location by location
+ for existing_scanner_stack_by_location in existing_scanner_stacks_by_location:
+
+ # Get scanner stack geography
+ scanner_stack_geography = geographies.get_geography_group_from_location(existing_scanner_stack_by_location, azure_supported_locations_obj_by_geography_groups_dict)
+
+ # Build a geographical map of scanner stacks
+ scanner_stacks_map_by_geographies_dict[scanner_stack_geography] = existing_scanner_stacks_by_location[existing_scanner_stack_by_location]
+
+ # TODO: Remove any scanner stacks that violate the 50:1 Storage to Scanner stack ratio
+ for scanner_stack_geography in scanner_stacks_map_by_geographies_dict:
+
+ for scanner_stack in scanner_stacks_map_by_geographies_dict[scanner_stack_geography]:
+
+ print(str(scanner_stack))
+
+ # If storage stacks exist
+ if azure_storage_account_list:
+
+ # Existing storage account
+ for storage_account in azure_storage_account_list:
+
+ # Get storage stack geography
+ storage_stack_geography = geographies.get_geography_group_from_location(storage_account["location"], azure_supported_locations_obj_by_geography_groups_dict)
+
+ temp_storage_stacks_by_geographies_list = storage_stacks_map_by_geographies_dict[storage_stack_geography]
+
+ temp_storage_stacks_by_geographies_list.append(storage_account)
+ storage_stacks_map_by_geographies_dict[storage_stack_geography] = temp_storage_stacks_by_geographies_list
+
+ # Populate the Storage stack map by geographies
+ # Iterate storage accounts in Azure
+ for storage_account_geography in storage_stacks_map_by_geographies_dict:
+
+ # Get storage account geography
+ # storage_account_geography = geographies.get_geography_group_from_location(storage_account["location"], azure_supported_locations_obj_by_geography_groups_dict)
+
+ cloudone_scanner_stack_id = scanner_stack_identity_principal_id = scanner_stack_queue_namespace = None
+
+ for scanner_stack_geography in scanner_stacks_map_by_geographies_dict:
+
+ if storage_stacks_map_by_geographies_dict[storage_account_geography]:
+
+ if storage_account_geography == scanner_stack_geography:
+
+ # If a scanner stack exists, then map to storage stack in the geography
+ if scanner_stacks_map_by_geographies_dict[scanner_stack_geography]:
+
+ cloudone_scanner_stack_id = scanner_stacks_map_by_geographies_dict[scanner_stack_geography][0]["stackID"]
+ scanner_stack_identity_principal_id = scanner_stacks_map_by_geographies_dict[scanner_stack_geography][0]["details"]["scannerIdentityPrincipalID"]
+ scanner_stack_queue_namespace = scanner_stacks_map_by_geographies_dict[scanner_stack_geography][0]["details"]["scannerQueueNamespace"]
+
+ for storage_account in storage_stacks_map_by_geographies_dict[storage_account_geography]:
+
+ # Deploy Storage Stack for the storage_account, Associate to previously identified existing scanner stack
+ if cloudone_scanner_stack_id and scanner_stack_identity_principal_id and scanner_stack_queue_namespace:
+
+ # storage_stack_deployment_outputs =
+ deployments.deploy_fss_storage_stack(
+ subscription_id,
+ storage_account,
+ cloudone_scanner_stack_id,
+ scanner_stack_identity_principal_id,
+ scanner_stack_queue_namespace
+ )
+
+ # if storage_stack_deployment_outputs:
+ # print("\tstorage_stack_deployment_outputs - " + str(storage_stack_deployment_outputs))
+
+ else:
+ logging.error("Deployment Failed. The deployment did not create any output(s). Check deployment status for more details on how to troubleshoot this issue.")
+ raise Exception("Deployment Failed. The deployment did not create any output(s). Check deployment status for more details on how to troubleshoot this issue.")
+
+ # If no scanner stacks exist, deploy one
+ else:
+
+ azure_recommended_location = locations.get_azure_recommended_location_by_geography_group(storage_account_geography, azure_supported_locations_obj_by_geography_groups_dict, fss_supported_regions_list)
+
+ # Deploy One Scanner Stack
+ scanner_stack_deployment_outputs = deployments.deploy_fss_scanner_stack(
+ subscription_id,
+ azure_supported_locations_obj_by_geography_groups_dict,
+ azure_recommended_location,
+ fss_supported_regions_list,
+ scanner_stack_name = "fss-scanner-" + storage_account_geography + "-" + utils.trim_location_name(azure_recommended_location) + "-geo-autodeploy"
+ )
+
+ if scanner_stack_deployment_outputs:
+
+ cloudone_scanner_stack_id = scanner_stack_deployment_outputs["cloudOneScannerStackId"]
+ cloudone_scanner_stack_name = str(scanner_stack_deployment_outputs["scannerStackResourceGroupID"]["value"]).split("/")[-1:][0]
+ cloudone_scanner_stack_tenant_id = scanner_stack_deployment_outputs["tenantID"]["value"]
+ cloudone_scanner_stack_resource_group_id = scanner_stack_deployment_outputs["scannerStackResourceGroupID"]["value"]
+ scanner_stack_queue_namespace = scanner_stack_deployment_outputs["scannerQueueNamespace"]["value"]
+ cloudone_scanner_stack_region = scanner_stack_deployment_outputs["cloudOneRegion"]["value"]
+ scanner_stack_identity_principal_id = scanner_stack_deployment_outputs["scannerIdentityPrincipalID"]["value"]
+
+ temp_stack_output_skeleton = {
+ "stackID": cloudone_scanner_stack_id,
+ "name": cloudone_scanner_stack_name,
+ "details": {
+ "tenantID": cloudone_scanner_stack_tenant_id,
+ "resourceGroupID": cloudone_scanner_stack_resource_group_id,
+ "scannerQueueNamespace": scanner_stack_queue_namespace,
+ "region": cloudone_scanner_stack_region,
+ "scannerIdentityPrincipalID": scanner_stack_identity_principal_id
+ },
+ "provider": "azure",
+ "type": "scanner"
+ }
+
+ temp_scanner_stacks_by_geography = None
+ if scanner_stacks_map_by_geographies_dict[scanner_stack_geography]:
+ temp_scanner_stacks_by_geography = scanner_stacks_map_by_geographies_dict[scanner_stack_geography]
+ temp_scanner_stacks_geography_list = []
+ temp_scanner_stacks_geography_list = scanner_stacks_map_by_geographies_dict[scanner_stack_geography]
+ temp_scanner_stacks_geography_list.append(temp_stack_output_skeleton)
+ temp_scanner_stacks_by_geography.update({storage_account_geography: temp_scanner_stacks_geography_list})
+ else:
+ temp_scanner_stacks_by_geography = {}
+ temp_scanner_stacks_by_geography.update({scanner_stack_geography: [temp_stack_output_skeleton]})
+
+ scanner_stacks_map_by_geographies_dict[scanner_stack_geography] = temp_scanner_stacks_by_geography
+
+
+ else:
+ # TODO: In these scenarios, use try...except to throw exceptions
+ logging.error("Deployment Failed. The deployment did not create any output(s). Check deployment status for more details on how to troubleshoot this issue.")
+ raise Exception("Deployment Failed. The deployment did not create any output(s). Check deployment status for more details on how to troubleshoot this issue.")
+
+ # Deploy Storage Stack for the storage_account, Associate to previously identified existing scanner stack
+ if cloudone_scanner_stack_id and scanner_stack_identity_principal_id and scanner_stack_queue_namespace:
+
+ for storage_account in storage_stacks_map_by_geographies_dict[storage_account_geography]:
+
+ # storage_stack_deployment_outputs =
+ deployments.deploy_fss_storage_stack(
+ subscription_id,
+ storage_account,
+ cloudone_scanner_stack_id,
+ scanner_stack_identity_principal_id,
+ scanner_stack_queue_namespace
+ )
+
+ # if storage_stack_deployment_outputs:
+ # print(str(storage_stack_deployment_outputs))
+
+ else:
+ # TODO: In these scenarios, use try...except to throw exceptions
+ logging.error("Deployment Failed. The deployment did not create any output(s). Check deployment status for more details on how to troubleshoot this issue.")
+ raise Exception("Deployment Failed. The deployment did not create any output(s). Check deployment status for more details on how to troubleshoot this issue.")
+
+ else:
+ logging.info("Found a geography mismatch... " + str(scanner_stack_geography) + " ~ " + str(storage_account_geography) + ". Retrying...")
+ else:
+ logging.info("Skipping '" + str(scanner_stack_geography) + "' geography as no new storage stacks are needed in this region... ")
\ No newline at end of file
diff --git a/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/deployment_one_to_one.py b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/deployment_one_to_one.py
new file mode 100644
index 00000000..af259c9c
--- /dev/null
+++ b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/deployment_one_to_one.py
@@ -0,0 +1,50 @@
+import logging
+
+import deployments
+import utils
+
+def deploy_one_to_one(subscription_id, azure_supported_locations_obj_by_geography_groups_dict, fss_supported_regions_list, azure_storage_account_list):
+
+ for storage_account in azure_storage_account_list:
+
+ cloudone_scanner_stack_id = scanner_stack_identity_principal_id = scanner_stack_queue_namespace = None
+
+ # Deploy One Scanner Stack
+ scanner_stack_deployment_outputs = deployments.deploy_fss_scanner_stack(
+ subscription_id,
+ azure_supported_locations_obj_by_geography_groups_dict,
+ storage_account["location"],
+ fss_supported_regions_list,
+ azure_storage_account_name = storage_account["name"],
+ scanner_stack_name = "fss-scanner-" + utils.trim_location_name(storage_account["location"]) + "-" + utils.trim_resource_name(storage_account["name"], 12, 12) + "-autodeploy"
+ )
+
+ if scanner_stack_deployment_outputs:
+
+ cloudone_scanner_stack_id = scanner_stack_deployment_outputs["cloudOneScannerStackId"]
+ scanner_stack_identity_principal_id = scanner_stack_deployment_outputs["scannerIdentityPrincipalID"]["value"]
+ scanner_stack_queue_namespace = scanner_stack_deployment_outputs["scannerQueueNamespace"]["value"]
+
+ else:
+ # TODO: In these scenarios, use try...except to throw exceptions
+ logging.error("Deployment Failed. The deployment did not create any output(s). Check deployment status for more details on how to troubleshoot this issue.")
+ raise Exception("Deployment Failed. The deployment did not create any output(s). Check deployment status for more details on how to troubleshoot this issue.")
+
+ # Deploy One Storage Stack, Associate to previously created One Scanner Stack
+ if cloudone_scanner_stack_id and scanner_stack_identity_principal_id and scanner_stack_queue_namespace:
+
+ # storage_stack_deployment_outputs =
+ deployments.deploy_fss_storage_stack(
+ subscription_id,
+ storage_account,
+ cloudone_scanner_stack_id,
+ scanner_stack_identity_principal_id,
+ scanner_stack_queue_namespace
+ )
+
+ # if storage_stack_deployment_outputs:
+ # print("\tstorage_stack_deployment_outputs - " + str(storage_stack_deployment_outputs))
+
+ else:
+ logging.error("Deployment Failed. The deployment did not create any output(s). Check deployment status for more details on how to troubleshoot this issue.")
+ raise Exception("Deployment Failed. The deployment did not create any output(s). Check deployment status for more details on how to troubleshoot this issue.")
\ No newline at end of file
diff --git a/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/deployment_single.py b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/deployment_single.py
new file mode 100644
index 00000000..2c424b81
--- /dev/null
+++ b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/deployment_single.py
@@ -0,0 +1,51 @@
+import logging
+
+import deployments
+import cloudone_fss_api
+import utils
+
+def deploy_single(subscription_id, azure_supported_locations_obj_by_geography_groups_dict, fss_supported_regions_list, azure_storage_account_list):
+
+ # Get All Scanner Stacks in this Azure Subscription
+ scanner_stacks_list = cloudone_fss_api.filter_stacks_by_subscription_id(subscription_id, cloudone_fss_api.get_scanner_stacks())
+
+ temp_storage_account_dict = {}
+ for storage_account in azure_storage_account_list:
+
+ if storage_account["location"] not in temp_storage_account_dict:
+ temp_storage_account_dict.update({storage_account["location"]: 1})
+ else:
+ temp_storage_account_dict[storage_account["location"]] = temp_storage_account_dict[storage_account["location"]] + 1
+
+ values_list = list(temp_storage_account_dict.values())
+ max_storage_account_count = max(values_list)
+
+ cloudone_scanner_stack_id = scanner_stack_identity_principal_id = scanner_stack_queue_namespace = None
+
+ # If no Scanner Stack(s) exist in this Azure subscription
+ if not len(scanner_stacks_list["stacks"]):
+
+ # Deploy One Scanner Stack
+ scanner_stack_deployment_outputs = deployments.deploy_fss_scanner_stack(subscription_id, azure_supported_locations_obj_by_geography_groups_dict, azure_location=utils.get_dict_key(temp_storage_account_dict, max_storage_account_count), fss_supported_regions_list=fss_supported_regions_list)
+
+ if scanner_stack_deployment_outputs:
+
+ cloudone_scanner_stack_id = scanner_stack_deployment_outputs["cloudOneScannerStackId"]
+ scanner_stack_identity_principal_id = scanner_stack_deployment_outputs["scannerIdentityPrincipalID"]["value"]
+ scanner_stack_queue_namespace = scanner_stack_deployment_outputs["scannerQueueNamespace"]["value"]
+
+ else:
+ logging.error("Deployment Failed. The deployment did not create any output(s). Check deployment status for more details on how to troubleshoot this issue.")
+ raise Exception("Deployment Failed. The deployment did not create any output(s). Check deployment status for more details on how to troubleshoot this issue.")
+
+ else:
+ # TODO: Choose the Scanner Stack in this subscription with the lowest number of storage stacks
+ cloudone_scanner_stack_id = scanner_stacks_list["stacks"][0]["stackID"]
+ scanner_stack_identity_principal_id = scanner_stacks_list["stacks"][0]["details"]["scannerIdentityPrincipalID"]
+ scanner_stack_queue_namespace = scanner_stacks_list["stacks"][0]["details"]["scannerQueueNamespace"]
+
+ if cloudone_scanner_stack_id and scanner_stack_identity_principal_id and scanner_stack_queue_namespace:
+
+ for storage_account in azure_storage_account_list:
+
+ deployments.deploy_fss_storage_stack(subscription_id, storage_account, cloudone_scanner_stack_id, scanner_stack_identity_principal_id, scanner_stack_queue_namespace)
\ No newline at end of file
diff --git a/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/deployments.py b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/deployments.py
new file mode 100644
index 00000000..c32fcd2c
--- /dev/null
+++ b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/deployments.py
@@ -0,0 +1,176 @@
+import logging
+
+import utils
+import locations
+import geographies
+import service_principal
+import cloudone_fss_api
+
+from Deployer import Deployer
+
+def deploy_fss_scanner_stack(subscription_id, azure_supported_locations_obj_by_geography_groups_dict, azure_location, fss_supported_regions_list, azure_storage_account_name=None, scanner_stack_name=None, resource_group_name=None, geography_group_name=None):
+
+ # File Storage Security Scanner Stack deployment templates can be found at https://github.com/trendmicro/cloudone-filestorage-deployment-templates/blob/master/azure/FSS-Scanner-Stack-Template.json or in the ./templates directory
+
+ app_id = str(utils.get_config_from_file("app_id"))
+ cloudone_region = str(utils.get_cloudone_region())
+
+ if app_id and cloudone_region:
+
+ if azure_location not in fss_supported_regions_list:
+
+ logging.info("Azure location (" + azure_location + ") is not part of the FSS supported regions. Choosing the next Azure recommended location in the same geography.")
+ geography_group_name = geographies.get_geography_group_from_location(azure_location, azure_supported_locations_obj_by_geography_groups_dict)
+
+ azure_location = locations.get_azure_recommended_location_by_geography_group(geography_group_name, azure_supported_locations_obj_by_geography_groups_dict, fss_supported_regions_list)
+ logging.info("New Azure location: " + azure_location)
+
+ scanner_stack_name = "fss-scanner-" + utils.trim_location_name(azure_location) + "-" + utils.trim_resource_name(azure_storage_account_name, 12, 12) + "-autodeploy"
+
+ if not geography_group_name:
+ geography_group_name = geographies.get_geography_group_from_location(azure_location, azure_supported_locations_obj_by_geography_groups_dict)
+
+ if not scanner_stack_name:
+ scanner_stack_name = "fss-scanner-" + geography_group_name + "-" + azure_location + "-geo-autodeploy"
+ if not resource_group_name:
+ resource_group_name = scanner_stack_name + "-rg"
+
+ logging.info("Initializing the Deployer class with subscription id: {}, resource group: {} ...".format(subscription_id, resource_group_name))
+
+ service_principal_id = service_principal.get_service_principal_id(app_id)
+
+ scanner_stack_params = {
+ 'FileStorageSecurityServicePrincipalID': service_principal_id,
+ 'CloudOneRegion': cloudone_region,
+ 'StackPackageLocation': 'https://file-storage-security.s3.amazonaws.com',
+ 'Version': 'latest',
+ 'SharedAccessSignature': ''
+ }
+
+ # Initialize the deployer class
+ deployer = Deployer(subscription_id, resource_group_name)
+
+ # Deploy the template
+ logging.info("Beginning the deployment...")
+
+ deployment_outputs = deployer.deploy(azure_location, "scanner", scanner_stack_params)
+
+ cloudone_scanner_stack_id = cloudone_fss_api.register_scanner_stack_with_cloudone(deployment_outputs["scannerStackResourceGroupID"]["value"], deployment_outputs["tenantID"]["value"])
+ deployment_outputs.update({'cloudOneScannerStackId': cloudone_scanner_stack_id})
+
+ logging.info("Done deploying!!")
+
+ return deployment_outputs
+
+def deploy_fss_storage_stack(subscription_id, storage_account, cloudone_scanner_stack_id, scanner_identity_principal_id, scanner_queue_namespace, storage_stack_name=None, resource_group_name=None):
+
+ # File Storage Security Storage Stack deployment template can be found at https://github.com/trendmicro/cloudone-filestorage-deployment-templates/blob/master/azure/FSS-Storage-Stack-Template.json or in the ./templates directory
+
+ app_id = str(utils.get_config_from_file("app_id"))
+ cloudone_region = str(utils.get_cloudone_region())
+
+ if not storage_stack_name:
+ storage_stack_name = "fss-storage-" + utils.trim_location_name(storage_account["location"]) + "-" + utils.trim_resource_name(storage_account["name"], 12, 12) + "-autodeploy"
+
+ if not resource_group_name:
+ resource_group_name = storage_stack_name + "-rg"
+
+ logging.info("Initializing the Deployer class with subscription id: {}, resource group: {}...".format(subscription_id, resource_group_name))
+
+ service_principal_id = service_principal.query_service_principal(app_id)
+
+ # TODO: Check for Azure SDK RBAC Create Service Principal ID
+ if not service_principal_id:
+ service_principal_id = utils.azure_cli_run_command('ad sp create --id ' + app_id)
+
+ logging.info("service_principal_id - " + str(service_principal_id))
+
+ storage_stack_params = {
+ 'FileStorageSecurityServicePrincipalID': service_principal_id,
+ 'CloudOneRegion': cloudone_region,
+ 'ScannerIdentityPrincipalID': scanner_identity_principal_id,
+ 'ScannerQueueNamespace': scanner_queue_namespace,
+ 'BlobStorageAccountResourceID': storage_account["id"],
+ 'BlobSystemTopicExist': 'No',
+ 'BlobSystemTopicName': 'BlobEventTopic-' + utils.trim_resource_name(storage_account["name"], 40, 40),
+ 'UpdateScanResultToBlobMetadata': 'Yes',
+ 'ReportObjectKey': 'No',
+ 'StackPackageLocation': 'https://file-storage-security.s3.amazonaws.com',
+ 'Version': 'latest',
+ 'SharedAccessSignature': ''
+ }
+
+ # Initialize the deployer class
+ deployer = Deployer(subscription_id, resource_group_name)
+
+ logging.info("Beginning the deployment...")
+ # Deploy the template
+ deployment_outputs = deployer.deploy(storage_account["location"], "storage", storage_stack_params)
+
+ cloudone_fss_api.register_storage_stack_with_cloudone(cloudone_scanner_stack_id, deployment_outputs["storageStackResourceGroupID"]["value"], deployment_outputs["tenantID"]["value"])
+
+ logging.info("Done deploying!!")
+
+ return deployment_outputs
+
+def build_geography_dict(azure_supported_locations_obj_by_geography_groups_dict, azure_storage_account_list):
+ # Inventory of existing storage accounts
+ # unique_storage_account_geographies = geographies.get_geographies_from_storage_accounts(azure_storage_account_list, azure_supported_locations_obj_by_geography_groups_dict)
+
+ # Scanner Stack Map
+ scanner_stacks_map_by_geographies_dict = geographies.build_geographies_map_dict()
+
+ # Storage Stacks Map
+ storage_stacks_map_by_geographies_dict = geographies.build_geographies_map_dict()
+
+ # Populate the Scanner stack map by geographies
+ # Inventory of existing FSS scanner stacks by Azure location
+ existing_scanner_stacks_by_location = cloudone_fss_api.map_scanner_stacks_to_azure_locations()
+
+ if existing_scanner_stacks_by_location:
+
+ # logging.info("Scanner Stack Locations: " + str(existing_scanner_stacks_by_location))
+
+ for existing_scanner_stack_by_location in existing_scanner_stacks_by_location:
+
+ scanner_stack_geography = geographies.get_geography_group_from_location(existing_scanner_stack_by_location, azure_supported_locations_obj_by_geography_groups_dict)
+
+ scanner_stacks_map_by_geographies_dict[scanner_stack_geography] = existing_scanner_stacks_by_location[existing_scanner_stack_by_location]
+
+ # Populate the Storage stack map by geographies
+ for storage_account in azure_storage_account_list:
+
+ if existing_scanner_stacks_by_location:
+
+ for existing_scanner_stack_by_location in existing_scanner_stacks_by_location:
+
+ # if "storageStacks" not in existing_scanner_stacks_by_location[existing_scanner_stack_by_location][0].keys():
+
+ # existing_scanner_stacks_by_location[existing_scanner_stack_by_location][0]["storageStacks"] = []
+
+ existing_scanner_stack_geography = geographies.get_geography_group_from_location(existing_scanner_stacks_by_location[existing_scanner_stack_by_location][0]["details"]["region"], azure_supported_locations_obj_by_geography_groups_dict)
+ storage_account_geography = geographies.get_geography_group_from_location(storage_account["location"], azure_supported_locations_obj_by_geography_groups_dict)
+
+ if existing_scanner_stack_geography == storage_account_geography:
+
+ temp_storage_stacks_dict = storage_stacks_map_by_geographies_dict[existing_scanner_stack_geography]
+
+ temp_storage_stacks_dict.append(storage_account)
+
+ storage_stacks_map_by_geographies_dict[existing_scanner_stack_geography] = temp_storage_stacks_dict
+
+ logging.info("Found a match... " + str(existing_scanner_stack_geography) + " = " + str(storage_account_geography))
+
+ else:
+ logging.info("Found a mismatch... " + str(existing_scanner_stack_geography) + " ~ " + str(storage_account_geography))
+ else:
+ storage_account_geography = geographies.get_geography_group_from_location(storage_account["location"], azure_supported_locations_obj_by_geography_groups_dict)
+
+ temp_storage_stacks_dict = storage_stacks_map_by_geographies_dict[storage_account_geography]
+
+ temp_storage_stacks_dict.append(storage_account)
+
+ storage_stacks_map_by_geographies_dict[storage_account_geography] = temp_storage_stacks_dict
+
+
+ return scanner_stacks_map_by_geographies_dict, storage_stacks_map_by_geographies_dict
\ No newline at end of file
diff --git a/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/exclude.txt b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/exclude.txt
new file mode 100644
index 00000000..b439349a
--- /dev/null
+++ b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/exclude.txt
@@ -0,0 +1,3 @@
+my-bucket-exclusion-18764878
+another-exclude-bucket-96498703
+this-bucket-should-not-be-monitored-45326538
\ No newline at end of file
diff --git a/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/function.json b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/function.json
new file mode 100644
index 00000000..44b3ca23
--- /dev/null
+++ b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/function.json
@@ -0,0 +1,12 @@
+{
+ "scriptFile": "az-test.py",
+ "entryPoint": "main",
+ "bindings": [
+ {
+ "schedule": "* 1 * * * *",
+ "name": "deployFssTimer",
+ "type": "timerTrigger",
+ "direction": "in"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/geographies.py b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/geographies.py
new file mode 100644
index 00000000..17564e58
--- /dev/null
+++ b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/geographies.py
@@ -0,0 +1,40 @@
+import locations
+
+def get_geographies_from_storage_accounts(azure_storage_account_list, azure_supported_locations_obj_by_geography_groups_dict):
+
+ unique_scanner_stack_list = []
+
+ for storage_account in azure_storage_account_list:
+
+ azure_geography_group = get_geography_group_from_location(storage_account["location"], azure_supported_locations_obj_by_geography_groups_dict)
+
+ # logging.info("Storage Account - " + str(azure_geography_group))
+
+ if azure_geography_group not in unique_scanner_stack_list:
+ unique_scanner_stack_list.append(azure_geography_group)
+
+ return unique_scanner_stack_list
+
+def get_geography_group_from_location(azure_location_name, azure_geography_groups_dict): # eastus, { azure_geography_groups_dict ... }
+
+ for azure_geography_group_item in azure_geography_groups_dict:
+ for azure_location in azure_geography_groups_dict[azure_geography_group_item]:
+ if azure_location_name == azure_location["name"]:
+ return azure_geography_group_item
+ return None
+
+def build_geographies_map_dict():
+
+ geography_map_dict = {}
+
+ azure_supported_locations_obj_by_geography_groups_dict = locations.get_azure_supported_locations()
+
+ for azure_geography_group in azure_supported_locations_obj_by_geography_groups_dict:
+ if azure_geography_group not in geography_map_dict:
+ geography_map_dict.update({azure_geography_group: []})
+
+ # Remove any logical Azure Locations in Map Dictionary
+ geography_map_dict.pop("logical")
+
+ return geography_map_dict
+
diff --git a/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/handler.py b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/handler.py
new file mode 100644
index 00000000..632e65c0
--- /dev/null
+++ b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/handler.py
@@ -0,0 +1,119 @@
+import logging
+
+import utils
+import locations
+import storage_accounts
+import deployment_geographies
+import deployment_one_to_one
+import deployment_single
+
+# TODO: Remove unused code and dependencies
+
+'''
+This script requires an additional text file for storage accounts to exclude, found in the "exclude.txt" file
+Will deploy FSS Storage stack(s) to all existing or new storage accounts, defined by your DEPLOYMENT_MODE environment variable
+All FSS Storage stack(s) will link to FSS Scanner Stack(s), defined by your DEPLOYMENT_MODEL environment variable
+'''
+
+exclusion_file_name = 'exclude.txt'
+DEPLOYMENT_MODES = {
+ 'existing', # Deploy FSS scanning to existing buckets (runs one-time)
+ 'new', # Deploy FSS scanning for new storage accounts (via an event listener)
+}
+DEFAULT_DEPLOYMENT_MODE = 'existing'
+
+DEPLOYMENT_MODELS = {
+ 'geographies', # One per geographyGroup, Default
+ 'one-to-one', # One per Storage Account
+ 'single' # Just One for all Storage Accounts (not recommended for multi-region storage accounts)
+}
+DEFAULT_DEPLOYMENT_MODEL = 'geographies'
+
+FSS_LOOKUP_TAG = 'AutoDeployFSS'
+# TODO: Add tags to FSS deployed stacks/resources
+FSS_MONITORED_TAG = 'FSSMonitored'
+FSS_SUPPORTED_REGIONS = ["centralus", "eastus", "eastus2", "southcentralus", "westus", "westus2", "centralindia", "eastasia", "japaneast", "koreacentral", "southeastasia", "francecentral", "germanywestcentral", "northeurope", "switzerlandnorth", "uksouth", "westeurope", "uaenorth", "brazilsouth"] # List last updated on 2022-07-19T11:32:11-04:00, from https://cloudone.trendmicro.com/docs/file-storage-security/supported-azure/
+
+def main():
+ '''
+ Mind map (Existing Storage Accounts)
+ --------------------------------------
+
+ - getStorageAccounts() with the FSS_LOOKUP_TAG set to True -- DONE
+ - Iterate over each Storage Account and deploy atleast 1 Scanner Stack -- DONE
+ - Iterate over each Storage Account and build Storage Stack and Associate Scanner Stack in the process -- DONE
+ - Display warning on scalability of the Scanner Stack and Azure Service Quotas. Recommend to split into multiple Scanner Stacks by raising a support ticket - TODO
+ - Recommend scanning all existing blobs in the Storage Account(s) that are not scanned by FSS for reasons of compliance and better OpsSec - TODO
+
+ Mind map (New Storage Accounts)
+ ---------------------------------
+
+ - Setup a listener for new Storage Accounts. The listener gets triggered once the creation event occurs
+ - getStorageAccounts() with the new Storage Account name
+ - Build a Storage Stack for the new Storage Account
+ - Identify an existing Scanner Stack that can be used to associate the new Storage Stack
+ - Associate Storage Stack(s) with the Scanner Stack
+ - Display warning on scalability of the Scanner Stack and Azure Service Quotas. Recommend to split into multiple Scanner Stacks by raising a support ticket
+
+ Note: All new blobs in the new Storage Account should be scanned by FSS as and when they are dumped in the Storage Account
+ '''
+
+ subscription_id = utils.get_subscription_id()
+
+ azure_supported_locations_obj_by_geography_groups_dict = locations.get_azure_supported_locations()
+
+ # Get List of Storage Accounts to deploy FSS
+ azure_storage_account_list = []
+
+ deployment_mode = utils.get_deployment_mode_from_env('DEPLOYMENT_MODE', DEPLOYMENT_MODES, DEFAULT_DEPLOYMENT_MODE)
+ logging.info("Using Deployment Mode: ", str(deployment_mode))
+
+ if deployment_mode == 'existing':
+ azure_storage_account_list = storage_accounts.get_storage_accounts(FSS_LOOKUP_TAG)
+
+ if azure_storage_account_list:
+ azure_storage_account_list = utils.apply_exclusions(exclusion_file_name, azure_storage_account_list)
+ else:
+ logging.error('No Storage Account(s) match the \"' + FSS_LOOKUP_TAG + '\" tag. Exiting ...')
+ raise Exception('No Storage Account(s) match the \"' + FSS_LOOKUP_TAG + '\" tag. Exiting ...')
+
+ if azure_storage_account_list:
+ azure_storage_account_list = utils.remove_storage_accounts_with_cloudone_storage_stacks(azure_storage_account_list)
+ else:
+ logging.error('No Storage Account(s) match the \"' + FSS_LOOKUP_TAG + '\" tag. Exiting ...')
+ raise Exception('No Storage Account(s) match the \"' + FSS_LOOKUP_TAG + '\" tag. Exiting ...')
+
+ else: # deployment_mode == 'new'
+ # TODO: Build an event listener to trigger deployment based on Storage Account creation events.
+ logging.warn('Deploying to new storage account based on an event listener is yet to be built into this tool.')
+ raise Exception('Deploying to new storage account based on an event listener is yet to be built into this tool.')
+
+ # Get Deployment Model - geographies, one-to-one or single
+ deployment_model = utils.get_deployment_model_from_env('DEPLOYMENT_MODEL', DEPLOYMENT_MODELS, DEFAULT_DEPLOYMENT_MODEL)
+
+ if deployment_model == 'geographies':
+
+ logging.info("Executing deployments geographically...")
+
+ subscription_id = utils.get_subscription_id()
+
+ deployment_geographies.deploy_geographically(subscription_id, azure_supported_locations_obj_by_geography_groups_dict, FSS_SUPPORTED_REGIONS, azure_storage_account_list)
+
+ elif deployment_model == 'one-to-one':
+
+ logging.info("Executing deployments one by one (1 storage stack <=> 1 scanner stack)...")
+
+ subscription_id = utils.get_subscription_id()
+
+ deployment_one_to_one.deploy_one_to_one(subscription_id, azure_supported_locations_obj_by_geography_groups_dict, FSS_SUPPORTED_REGIONS, azure_storage_account_list)
+
+ elif deployment_model == 'single':
+
+ logging.info("Executing deployments in single (all storage stacks are mapped to 1 scanner stack)...")
+
+ subscription_id = utils.get_subscription_id()
+
+ deployment_single.deploy_single(subscription_id, azure_supported_locations_obj_by_geography_groups_dict, FSS_SUPPORTED_REGIONS, azure_storage_account_list)
+
+if __name__ == "__main__":
+ main()
\ No newline at end of file
diff --git a/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/keyvault.py b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/keyvault.py
new file mode 100644
index 00000000..7598f530
--- /dev/null
+++ b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/keyvault.py
@@ -0,0 +1,21 @@
+from azure.identity import DefaultAzureCredential
+from azure.keyvault.secrets import SecretClient
+
+import utils
+
+def get_secret_from_keyvault(secret_key):
+ keyvault_uri = str(utils.get_config_from_file('keyvault_uri'))
+ if keyvault_uri:
+ credential = DefaultAzureCredential()
+ secret_client = SecretClient(vault_url=keyvault_uri, credential=credential)
+ return secret_client.get_secret(secret_key)
+ return None
+
+def put_secret_into_keyvault(secret_key, secret_value):
+ keyvault_uri = str(utils.get_config_from_file('keyvault_uri'))
+ if keyvault_uri:
+ credential = DefaultAzureCredential()
+ secret_client = SecretClient(vault_url=keyvault_uri, credential=credential)
+ secret_client.set_secret(name=secret_key, value=secret_value)
+ return True
+ return None
\ No newline at end of file
diff --git a/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/locations.py b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/locations.py
new file mode 100644
index 00000000..59c8736b
--- /dev/null
+++ b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/locations.py
@@ -0,0 +1,76 @@
+from azure.mgmt.subscription import SubscriptionClient
+from azure.identity import ClientSecretCredential
+
+import os
+import utils
+import random
+
+# get_azure_recommended_location_by_geography_group - Pick one Azure recommended location in the geography location.
+def get_azure_recommended_location_by_geography_group(azure_geography_group, azure_geography_groups_dict, fss_supported_regions_list):
+ for azure_geography_group_item in azure_geography_groups_dict:
+ if azure_geography_group == azure_geography_group_item:
+
+ temp_azure_geography_group_list = []
+ for azure_location in azure_geography_groups_dict[azure_geography_group]:
+
+ if azure_location["metadata"]["regionCategory"] and azure_location["metadata"]["regionCategory"] == "Recommended" and azure_location["name"] in fss_supported_regions_list:
+
+ temp_azure_geography_group_list.append(azure_location)
+
+ return temp_azure_geography_group_list[random.randint(0, len(temp_azure_geography_group_list)-1)]["name"]
+
+# get_azure_supported_locations - Lists all supported locations for Azure in the current subscription.
+def get_azure_supported_locations():
+ azure_locations_list = utils.azure_cli_run_command('account list-locations')
+
+ azure_supported_locations_obj_by_geography_groups = {}
+
+ for azure_location in azure_locations_list:
+ if azure_location["metadata"]["geographyGroup"] and utils.trim_spaces(azure_location["metadata"]["geographyGroup"]) not in azure_supported_locations_obj_by_geography_groups.keys():
+ azure_supported_locations_obj_by_geography_groups.update({utils.trim_spaces(azure_location["metadata"]["geographyGroup"]): []})
+ elif not azure_location["metadata"]["geographyGroup"]:
+ azure_supported_locations_obj_by_geography_groups.update({"logical": []})
+
+ for azure_location in azure_locations_list:
+ if azure_location["metadata"]["regionType"] == "Physical":
+ azure_supported_locations_obj_by_geography_groups[utils.trim_spaces(azure_location["metadata"]["geographyGroup"])].append(azure_location)
+ else:
+ azure_supported_locations_obj_by_geography_groups["logical"].append(azure_location)
+
+ return azure_supported_locations_obj_by_geography_groups
+
+# get_azure_location_detail - Lists Azure location detail
+def get_azure_location_detail(azure_location_name):
+ azure_locations_list = utils.azure_cli_run_command('account list-locations')
+
+ for azure_location in azure_locations_list:
+ if azure_location["name"] == azure_location_name:
+ return azure_location
+
+# get_azure_supported_locations_sdk - Lists all supported locations for Azure in the current subscription via Azure SDK.
+def get_azure_supported_locations_sdk():
+
+ # credentials = ServicePrincipalCredentials(
+ # client_id=os.environ['AZURE_CLIENT_ID'],
+ # secret=os.environ['AZURE_CLIENT_SECRET'],
+ # tenant=os.environ['AZURE_TENANT_ID']
+ # )
+
+ credentials = ClientSecretCredential(
+ client_id=os.environ['AZURE_CLIENT_ID'],
+ client_secret=os.environ['AZURE_CLIENT_SECRET'],
+ tenant_id=os.environ['AZURE_TENANT_ID']
+ )
+
+ # credentials = DefaultAzureCredential(exclude_environment_credential=False)
+ subscription_client = SubscriptionClient(credentials, api_version='2021-01-01')
+
+ subscription_id = utils.get_subscription_id()
+
+ azure_locations_iter = subscription_client.subscriptions.list_locations(subscription_id)
+
+ azure_locations_list = []
+ for location in azure_locations_iter:
+ azure_locations_list.append(location.name)
+
+ return azure_locations_list
\ No newline at end of file
diff --git a/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/rbac.py b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/rbac.py
new file mode 100644
index 00000000..ab7ef4de
--- /dev/null
+++ b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/rbac.py
@@ -0,0 +1,67 @@
+# import azure.graphrbac.models
+from azure.graphrbac import GraphRbacManagementClient
+from azure.identity import DefaultAzureCredential
+
+import utils
+
+def createServicePrincipal():
+
+ tenant_id = str(utils.get_config_from_file("tenant_id"))
+ app_id = str(utils.get_config_from_file("app_id"))
+ credentials = DefaultAzureCredential(exclude_environment_credential=False)
+
+ if tenant_id and credentials and app_id:
+ graphrbac_client = GraphRbacManagementClient(
+ credentials,
+ tenant_id
+ )
+
+ # graphrbac_client = create_basic_client(
+ # azure.graphrbac.GraphRbacManagementClient,
+ # tenant_id=tenant_id
+ # )
+
+ # # Delete the app if already exists
+ # for app in graphrbac_client.applications.list(filter="displayName eq 'trendmicro_fss_app'"):
+ # graphrbac_client.applications.delete(app.object_id)
+
+ # app = graphrbac_client.applications.create({
+ # 'available_to_other_tenants': False,
+ # 'display_name': 'trendmicro_fss_app',
+ # 'identifier_uris': ['http://pytest_app.org'],
+ # 'app_roles': [{
+ # "allowed_member_types": ["User"],
+ # "description": "Creators can create Surveys",
+ # "display_name": "SurveyCreator",
+ # "id": "1b4f816e-5eaf-48b9-8613-7923830595ad", # Random, but fixed for tests
+ # "is_enabled": True,
+ # "value": "SurveyCreator"
+ # }]
+ # })
+
+ # # Take this opportunity to test get_objects_by_object_ids
+ # objects = graphrbac_client.objects.get_objects_by_object_ids({
+ # 'object_ids': [app.object_id],
+ # 'types': ['Application']
+ # })
+ # objects = list(objects)
+ # assert len(objects) == 1
+ # assert objects[0].display_name == 'pytest_app'
+
+ # apps = list(graphrbac_client.applications.list(
+ # filter="displayName eq 'pytest_app'"
+ # ))
+ # assert len(apps) == 1
+ # assert apps[0].app_roles[0].display_name == "SurveyCreator"
+
+ sp = graphrbac_client.service_principals.create({
+ 'app_id': app_id,
+ 'account_enabled': True
+ })
+
+ print(dir(sp))
+
+ # Testing getting SP id by app ID
+ result = graphrbac_client.applications.get_service_principals_id_by_app_id(app_id)
+
+ print("\n\tResult - \n\t" + str(result))
\ No newline at end of file
diff --git a/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/resource_groups.py b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/resource_groups.py
new file mode 100644
index 00000000..c5a62b1c
--- /dev/null
+++ b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/resource_groups.py
@@ -0,0 +1,23 @@
+# import logging
+# from azure.mgmt.resource import ResourceManagementClient
+# from azure.identity import AzureCliCredential
+
+# def create_resource_group(subscription_id, resource_group_name, azure_location):
+
+# # Acquire a credential object using CLI-based authentication.
+# credential = AzureCliCredential()
+
+# # Obtain the management object for resources.
+# resource_client = ResourceManagementClient(credential, subscription_id)
+
+# # Provision the resource group.
+# resource_group = resource_client.resource_groups.create_or_update(
+# resource_group_name,
+# {
+# "location": azure_location
+# }
+# )
+
+# logging.info(f"Provisioned resource group {resource_group.name} in the {resource_group.location} region")
+
+# return resource_group_name
\ No newline at end of file
diff --git a/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/service_principal.py b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/service_principal.py
new file mode 100644
index 00000000..0f4adfb0
--- /dev/null
+++ b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/service_principal.py
@@ -0,0 +1,17 @@
+import utils
+
+def query_service_principal(appId):
+ sp_list = utils.azure_cli_run_command('ad sp list --all')
+ for sp_item in sp_list:
+ if sp_item["appId"] == appId:
+ return sp_item["id"]
+ return None
+
+def get_service_principal_id(app_id):
+ service_principal_id = query_service_principal(app_id)
+
+ if not service_principal_id:
+ service_principal_id = utils.azure_cli_run_command('ad sp create --id ' + app_id)
+ # rbac.createServicePrincipal()
+
+ return service_principal_id
\ No newline at end of file
diff --git a/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/storage_accounts.py b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/storage_accounts.py
new file mode 100644
index 00000000..3e13b18b
--- /dev/null
+++ b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/storage_accounts.py
@@ -0,0 +1,25 @@
+import utils
+import logging
+
+# get_storage_accounts: Provides a list of all Azure Storage Accounts in this subscription with the Tag AutoDeployFSS = true
+def get_storage_accounts(FSS_LOOKUP_TAG):
+
+ storage_accounts_json_response = utils.azure_cli_run_command('storage account list')
+
+ logging.info("Tag Lookup: " + FSS_LOOKUP_TAG)
+
+ azure_storage_account_list = []
+
+ if storage_accounts_json_response:
+
+ for storage_account in storage_accounts_json_response:
+
+ if storage_account["tags"]:
+
+ if FSS_LOOKUP_TAG in storage_account["tags"].keys():
+
+ if storage_account["tags"][FSS_LOOKUP_TAG]:
+
+ azure_storage_account_list.append({"name": storage_account["name"], "location": storage_account["location"], "tags": storage_account["tags"], "id": storage_account["id"]})
+
+ return azure_storage_account_list
\ No newline at end of file
diff --git a/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/templates/FSS-Scanner-Stack-Template.json b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/templates/FSS-Scanner-Stack-Template.json
new file mode 100644
index 00000000..b42b846b
--- /dev/null
+++ b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/templates/FSS-Scanner-Stack-Template.json
@@ -0,0 +1,609 @@
+{
+ "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
+ "contentVersion": "1.0.0.0",
+ "parameters": {
+ "FileStorageSecurityServicePrincipalID": {
+ "type": "string",
+ "metadata": {
+ "description": "The ID of the Service Principal created for Trend Micro's App Registration."
+ },
+ "maxLength": 36
+ },
+ "CloudOneRegion": {
+ "type": "string",
+ "metadata": {
+ "description": "The region of the Trend Micro Cloud One services.
For more information, see supported Cloud One regions."
+ },
+ "defaultValue": "us-1"
+ },
+ "StackPackageLocation": {
+ "type": "string",
+ "defaultValue": "https://file-storage-security.s3.amazonaws.com",
+ "metadata": {
+ "description": "Warning: Do not modify the field. Modifications may cause your deployment to fail."
+ }
+ },
+ "Version": {
+ "type": "string",
+ "defaultValue": "latest",
+ "metadata": {
+ "description": "Warning: Do not modify the field. Modifications may cause your deployment to fail."
+ }
+ },
+ "SharedAccessSignature": {
+ "type": "secureString",
+ "defaultValue": "",
+ "metadata": {
+ "description": "Warning: Do not modify the field. Modifications may cause your deployment to fail."
+ }
+ }
+ },
+ "variables": {
+ "functionPackageLinkPrefix": "[concat(parameters('Version'), '/functions')]",
+ "unifiedSAS": "[if(empty(parameters('SharedAccessSignature')), '', if(startsWith(parameters('SharedAccessSignature'), '?'), parameters('SharedAccessSignature'), concat('?', parameters('SharedAccessSignature'))))]",
+ "scannerFunctionPackageLink": "[concat(parameters('StackPackageLocation'), '/', variables('functionPackageLinkPrefix'), '/azure-scanner.zip', variables('unifiedSAS'))]",
+
+ "stackUID": "[uniqueString(resourceGroup().id)]",
+ "scannerFunctionAppName": "[concat('tmsf0', variables('stackUID'))]",
+ "scannerFunctionAppAPIVersion": "2018-11-01",
+ "scannerServerFarmName": "[concat('tmssf0', variables('stackUID'))]",
+ "scannerStorageAccountName": "[concat('tmsa0', toLower(variables('stackUID')))]",
+ "location": "[resourceGroup().location]",
+ "scannerApplicationInsightsName": "[concat('tmai0', toLower(variables('stackUID')))]",
+ "scannerApplicationInsightsID": "[guid(concat('tmai0', toLower(variables('stackUID'))))]",
+ "scannerPatternRootFolder": "data",
+ "scannerPatternPath": "[concat(variables('scannerPatternRootFolder'), '/patterns')]",
+ "scannerQueueNamespaceName": "[concat('tmsbq0', toLower(variables('stackUID')))]",
+ "scannerQueueName": "scanner_queue",
+ "scannerBusQueueName": "[concat(variables('scannerQueueNamespaceName'), '/', variables('scannerQueueName'))]",
+ "scannerQueueSASKeyName": "RootManageSharedAccessKey",
+ "scannerQueueAuthRuleResourceId": "[resourceId('Microsoft.ServiceBus/namespaces/authorizationRules', variables('scannerQueueNamespaceName'), variables('scannerQueueSASKeyName'))]",
+ "scannerQueueAPIVersion": "2017-04-01",
+ "scannerLicenseKeyVaultName": "[concat('tmsl0', variables('stackUID'))]",
+ "scannerLicenseSecretName": "scanner-license",
+ "scanResultTopicName": "scan-result-topic",
+ "resourceManagementRoleName": "[guid(variables('stackUID'), 'resourceManagementRoleName')]",
+ "resourceManagementRoleAssignmentUID": "[guid(variables('stackUID'), 'resourceManagementRoleAssignment', parameters('FileStorageSecurityServicePrincipalID'))]",
+ "resourceManagementRoleDefinitionName": "[concat('FSS-ResourceManagementRole-', variables('resourceManagementRoleName'))]",
+ "scannerQueueManagementRoleName": "[guid(variables('stackUID'), 'scannerQueueManagementRoleName')]",
+ "scannerQueueManagementRoleAssignmentUID": "[guid(variables('stackUID'), 'scannerQueueManagementRoleAssignment', parameters('FileStorageSecurityServicePrincipalID'))]",
+ "scannerQueueManagementRoleDefinitionName": "[concat('FSS-ScannerQueueManagementRole-', variables('scannerQueueManagementRoleName'))]",
+ "scannerLicenseManagementRoleName": "[guid(variables('stackUID'), 'scannerLicenseManagementRoleName')]",
+ "scannerLicenseManagementRoleAssignmentUID": "[guid(variables('stackUID'), 'scannerLicenseManagementRoleAssignment', parameters('FileStorageSecurityServicePrincipalID'))]",
+ "scannerLicenseManagementRoleDefinitionName": "[concat('FSS-ScannerLicenseManagementRole-', variables('scannerLicenseManagementRoleName'))]",
+ "resourcesReaderRoleAssignmentUID": "[guid(resourceGroup().id, 'ResourcesReaderRoleAssignment')]",
+ "resourcesReaderRoleID": "acdd72a7-3385-48ef-bd42-f606fba81ae7",
+ "scannerLicenseReaderRoleAssignmentUID": "[guid(resourceGroup().id, 'scannerLicenseReaderRoleAssignment')]",
+ "keyVaultSecretsUserRoleID": "4633458b-17de-408a-b874-0445c86b69e6",
+ "functionsWorkerProcessCount": "1",
+ "fssAPIEndpoint": "[format('https://filestorage.{0}.cloudone.trendmicro.com/api/', parameters('CloudOneRegion'))]"
+ },
+ "resources": [
+ {
+ "type": "Microsoft.Authorization/roleAssignments",
+ "apiVersion": "2018-09-01-preview",
+ "name": "[variables('resourcesReaderRoleAssignmentUID')]",
+ "dependsOn": [
+ "[variables('scannerFunctionAppName')]"
+ ],
+ "scope": "[concat('Microsoft.Web/sites/', variables('scannerFunctionAppName'))]",
+ "properties": {
+ "roleDefinitionId": "[resourceId('Microsoft.Authorization/roleDefinitions', variables('resourcesReaderRoleID'))]",
+ "principalId": "[reference(resourceId('Microsoft.Web/sites', variables('scannerFunctionAppName')), variables('scannerFunctionAppAPIVersion'), 'full').identity.principalId]",
+ "principalType": "ServicePrincipal"
+ }
+ },
+ {
+ "type": "Microsoft.ServiceBus/namespaces",
+ "apiVersion": "2018-01-01-preview",
+ "name": "[variables('scannerQueueNamespaceName')]",
+ "location": "[variables('location')]",
+ "sku": {
+ "name": "Basic",
+ "tier": "Basic"
+ },
+ "properties": {
+ "zoneRedundant": false
+ },
+ "resources": [
+ {
+ "type": "Microsoft.ServiceBus/namespaces/queues",
+ "apiVersion": "2018-01-01-preview",
+ "name": "[variables('scannerBusQueueName')]",
+ "location": "[variables('location')]",
+ "dependsOn": [
+ "[resourceId('Microsoft.ServiceBus/namespaces', variables('scannerQueueNamespaceName'))]"
+ ],
+ "properties": {
+ "lockDuration": "PT3M",
+ "maxSizeInMegabytes": 1024,
+ "requiresDuplicateDetection": false,
+ "requiresSession": false,
+ "defaultMessageTimeToLive": "PT1H",
+ "deadLetteringOnMessageExpiration": true,
+ "enableBatchedOperations": false,
+ "maxDeliveryCount": 3,
+ "status": "Active",
+ "enablePartitioning": false,
+ "enableExpress": false
+ }
+ }
+ ]
+ },
+ {
+ "name": "[variables('scannerApplicationInsightsName')]",
+ "type": "microsoft.insights/components",
+ "kind": "web",
+ "location": "[variables('location')]",
+ "apiVersion": "2015-05-01",
+ "properties": {
+ "Application_Type": "web",
+ "ApplicationId": "[variables('scannerApplicationInsightsID')]"
+ }
+ },
+ {
+ "type": "Microsoft.Storage/storageAccounts",
+ "apiVersion": "2019-06-01",
+ "name": "[variables('scannerStorageAccountName')]",
+ "location": "[variables('location')]",
+ "sku": {
+ "name": "Standard_LRS"
+ },
+ "kind": "Storage",
+ "properties": {
+ "minimumTlsVersion": "TLS1_2"
+ }
+ },
+ {
+ "type": "Microsoft.Resources/deploymentScripts",
+ "apiVersion": "2020-10-01",
+ "name": "downloadPatterns",
+ "location": "[resourceGroup().location]",
+ "kind": "AzureCLI",
+ "dependsOn": [
+ "[variables('scannerFunctionAppName')]"
+ ],
+ "properties": {
+ "forceUpdateTag": "20210331",
+ "azCliVersion": "2.3.1",
+ "storageAccountSettings": {
+ "storageAccountName": "[variables('scannerStorageAccountName')]",
+ "storageAccountKey": "[listKeys(resourceId('Microsoft.Storage/storageAccounts', variables('scannerStorageAccountName')), '2019-06-01').keys[0].value]"
+ },
+ "environmentVariables": [
+ {
+ "name": "AZURE_STORAGE_ACCOUNT",
+ "value": "[variables('scannerStorageAccountName')]"
+ },
+ {
+ "name": "AZURE_STORAGE_KEY",
+ "value": "[listKeys(resourceId('Microsoft.Storage/storageAccounts', variables('scannerStorageAccountName')), '2019-06-01').keys[0].value]"
+ },
+ {
+ "name": "RESOURCE_GROUP_NAME",
+ "value": "[resourceGroup().name]"
+ },
+ {
+ "name": "FUNCTION_APP_NAME",
+ "value": "[variables('scannerFunctionAppName')]"
+ },
+ {
+ "name": "PATTERN_ROOT_FOLDER",
+ "value": "[variables('scannerPatternRootFolder')]"
+ },
+ {
+ "name": "PATTERN_PATH",
+ "value": "[variables('scannerPatternPath')]"
+ }
+ ],
+ "scriptContent": "set -x \n az storage directory create --share-name $FUNCTION_APP_NAME --name $PATTERN_ROOT_FOLDER \n az storage directory create --share-name $FUNCTION_APP_NAME --name $PATTERN_PATH \n wget https://file-storage-security.s3.amazonaws.com/latest/patterns/pattern-icrc-269.zip -O pattern.zip \n unzip pattern.zip \n az storage file upload-batch --destination $FUNCTION_APP_NAME/$PATTERN_PATH --source ./patterns",
+ "timeout": "PT10M",
+ "cleanupPreference": "OnSuccess",
+ "retentionInterval": "P1D"
+ }
+ },
+ {
+ "type": "Microsoft.Web/serverfarms",
+ "apiVersion": "2020-12-01",
+ "name": "[variables('scannerServerFarmName')]",
+ "kind": "linux",
+ "properties": {
+ "reserved": true
+ },
+ "location": "[variables('location')]",
+ "sku": {
+ "name": "EP1",
+ "tier": "ElasticPremium",
+ "size": "EP1",
+ "family": "EP"
+ }
+ },
+ {
+ "type": "Microsoft.Web/sites",
+ "apiVersion": "[variables('scannerFunctionAppAPIVersion')]",
+ "name": "[variables('scannerFunctionAppName')]",
+ "location": "[variables('location')]",
+ "kind": "functionapp,linux",
+ "identity": {
+ "type": "SystemAssigned"
+ },
+ "dependsOn": [
+ "[resourceId('Microsoft.Web/serverfarms', variables('scannerServerFarmName'))]",
+ "[resourceId('Microsoft.Storage/storageAccounts', variables('scannerStorageAccountName'))]",
+ "[resourceId('Microsoft.ServiceBus/namespaces/', variables('scannerQueueNamespaceName'))]"
+ ],
+ "properties": {
+ "enabled": true,
+ "hostNameSslStates": [
+ {
+ "name": "[concat(variables('scannerFunctionAppName'), '.azurewebsites.net')]",
+ "sslState": "Disabled",
+ "hostType": "Standard"
+ },
+ {
+ "name": "[concat(variables('scannerFunctionAppName'), '.scm.azurewebsites.net')]",
+ "sslState": "Disabled",
+ "hostType": "Repository"
+ }
+ ],
+ "serverFarmId": "[resourceId('Microsoft.Web/serverfarms', variables('scannerServerFarmName'))]",
+ "reserved": true,
+ "isXenon": false,
+ "hyperV": false,
+ "siteConfig": {
+ "appSettings": [
+ {
+ "name": "FUNCTIONS_EXTENSION_VERSION",
+ "value": "~3"
+ },
+ {
+ "name": "FUNCTIONS_WORKER_RUNTIME",
+ "value": "python"
+ },
+ {
+ "name": "APPINSIGHTS_INSTRUMENTATIONKEY",
+ "value": "[reference(resourceId('microsoft.insights/components', variables('scannerApplicationInsightsName')), '2020-02-02-preview').InstrumentationKey]"
+ },
+ {
+ "name": "APPLICATIONINSIGHTS_CONNECTION_STRING",
+ "value": "[reference(resourceId('microsoft.insights/components', variables('scannerApplicationInsightsName')), '2020-02-02-preview').ConnectionString]"
+ },
+ {
+ "name": "AzureWebJobsStorage",
+ "value": "[concat('DefaultEndpointsProtocol=https;AccountName=',variables('scannerStorageAccountName'),';AccountKey=',listKeys(resourceId('Microsoft.Storage/storageAccounts', variables('scannerStorageAccountName')), '2019-06-01').keys[0].value)]"
+ },
+ {
+ "name": "WEBSITE_CONTENTAZUREFILECONNECTIONSTRING",
+ "value": "[concat('DefaultEndpointsProtocol=https;AccountName=',variables('scannerStorageAccountName'),';AccountKey=',listKeys(resourceId('Microsoft.Storage/storageAccounts', variables('scannerStorageAccountName')), '2019-06-01').keys[0].value)]"
+ },
+ {
+ "name": "WEBSITE_CONTENTSHARE",
+ "value": "[toLower(variables('scannerFunctionAppName'))]"
+ },
+ {
+ "name": "WEBSITE_RUN_FROM_PACKAGE",
+ "value": "[variables('scannerFunctionPackageLink')]"
+ },
+ {
+ "name": "LD_LIBRARY_PATH",
+ "value": "/home/site/wwwroot:/home/site/wwwroot/lib"
+ },
+ {
+ "name": "PATTERN_PATH",
+ "value": "[concat('/home/', variables('scannerPatternPath'))]"
+ },
+ {
+ "name": "PATTERN_VERSION",
+ "value": ""
+ },
+ {
+ "name": "PATTERN_UPDATE_SCHEDULE",
+ "value": "01:00:00"
+ },
+ {
+ "name": "LICENSE",
+ "value": "[concat('@Microsoft.KeyVault(VaultName=', variables('scannerLicenseKeyVaultName'), ';SecretName=', variables('scannerLicenseSecretName'), ')')]"
+ },
+ {
+ "name": "BUS_QUEUE_CONNECTION_STR",
+ "value": "[listkeys(variables('scannerQueueAuthRuleResourceId'), variables('scannerQueueAPIVersion')).primaryConnectionString]"
+ },
+ {
+ "name": "SCANNER_QUEUE_NAME",
+ "value": "[variables('scannerQueueName')]"
+ },
+ {
+ "name": "SCAN_RESULT_TOPIC_NAME",
+ "value": "[variables('scanResultTopicName')]"
+ },
+ {
+ "name": "SUBSCRIPTION_ID",
+ "value": "[subscription().subscriptionId]"
+ },
+ {
+ "name": "SUBSCRIPTION_NAME",
+ "value": "[subscription().displayName]"
+ },
+ {
+ "name": "TENANT_ID",
+ "value": "[subscription().tenantId]"
+ },
+ {
+ "name": "CLOUD_ONE_REGION",
+ "value": "[parameters('CloudOneRegion')]"
+ },
+ {
+ "name": "FUNCTIONS_WORKER_PROCESS_COUNT",
+ "value": "[variables('functionsWorkerProcessCount')]"
+ },
+ {
+ "name": "FSS_API_ENDPOINT",
+ "value": "[variables('fssAPIEndpoint')]"
+ }
+ ],
+ "use32BitWorkerProcess": false,
+ "linuxFxVersion": "PYTHON|3.8"
+ },
+ "scmSiteAlsoStopped": false,
+ "clientAffinityEnabled": false,
+ "clientCertEnabled": false,
+ "hostNamesDisabled": false,
+ "containerSize": 1536,
+ "dailyMemoryTimeQuota": 0,
+ "httpsOnly": true,
+ "redundancyMode": "None"
+ }
+ },
+ {
+ "type": "Microsoft.Logic/workflows",
+ "apiVersion": "2019-05-01",
+ "name": "scannerWarmUpLogicApp",
+ "location": "[variables('location')]",
+ "properties": {
+ "state": "Enabled",
+ "definition": {
+ "$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#",
+ "contentVersion": "1.0.0.0",
+ "parameters": {},
+ "triggers": {
+ "Recurrence": {
+ "recurrence": {
+ "frequency": "Minute",
+ "interval": 10
+ },
+ "type": "Recurrence"
+ }
+ },
+ "actions": {
+ "HTTP": {
+ "type": "Http",
+ "inputs": {
+ "method": "GET",
+ "uri": "[concat('https://', reference(variables('scannerFunctionAppName')).defaultHostName)]"
+ }
+ }
+ },
+ "outputs": {}
+ },
+ "parameters": {}
+ }
+ },
+ {
+ "type": "Microsoft.KeyVault/vaults",
+ "apiVersion": "2019-09-01",
+ "name": "[variables('scannerLicenseKeyVaultName')]",
+ "location": "[variables('location')]",
+ "properties": {
+ "enabledForDeployment": false,
+ "enabledForDiskEncryption": false,
+ "enabledForTemplateDeployment": false,
+ "enableRbacAuthorization": true,
+ "tenantId": "[subscription().tenantId]",
+ "sku": {
+ "name": "Standard",
+ "family": "A"
+ },
+ "networkAcls": {
+ "defaultAction": "Allow",
+ "bypass": "AzureServices"
+ }
+ }
+ },
+ {
+ "type": "Microsoft.Authorization/roleAssignments",
+ "apiVersion": "2018-09-01-preview",
+ "name": "[variables('scannerLicenseReaderRoleAssignmentUID')]",
+ "dependsOn": [
+ "[resourceId('Microsoft.KeyVault/vaults', variables('scannerLicenseKeyVaultName'))]"
+ ],
+ "scope": "[resourceId('Microsoft.KeyVault/vaults', variables('scannerLicenseKeyVaultName'))]",
+ "properties": {
+ "roleDefinitionId": "[resourceId('Microsoft.Authorization/roleDefinitions', variables('keyVaultSecretsUserRoleID'))]",
+ "principalId": "[reference(resourceId('Microsoft.Web/sites', variables('scannerFunctionAppName')), variables('scannerFunctionAppAPIVersion'), 'full').identity.principalId]",
+ "principalType": "ServicePrincipal"
+ }
+ },
+ {
+ "type": "Microsoft.Authorization/roleDefinitions",
+ "apiVersion": "2018-07-01",
+ "name": "[variables('resourceManagementRoleName')]",
+ "properties": {
+ "roleName": "[variables('resourceManagementRoleDefinitionName')]",
+ "description": "FSS resource management role",
+ "type": "customRole",
+ "isCustom": true,
+ "permissions": [
+ {
+ "actions": [
+ "Microsoft.Resources/subscriptions/resourceGroups/read",
+ "Microsoft.Resources/subscriptions/resourcegroups/deployments/read",
+ "Microsoft.Resources/subscriptions/resourcegroups/deployments/operations/read",
+ "Microsoft.Resources/subscriptions/resourcegroups/deployments/operationstatuses/read",
+ "Microsoft.Resources/subscriptions/resourcegroups/resources/read",
+
+ "Microsoft.Web/sites/Read",
+ "Microsoft.Web/sites/config/list/action",
+ "Microsoft.Web/sites/config/Read",
+ "Microsoft.Web/sites/config/Write",
+ "Microsoft.Web/sites/functions/read",
+ "Microsoft.Web/sites/functions/write",
+ "Microsoft.Web/sites/host/sync/action",
+ "Microsoft.Web/sites/host/listsyncstatus/action",
+ "Microsoft.Web/sites/metrics/read",
+ "Microsoft.Web/sites/restart/Action",
+ "Microsoft.Web/sites/syncfunctiontriggers/action",
+ "Microsoft.Web/sites/usages/read",
+
+ "Microsoft.Authorization/roleDefinitions/read",
+ "Microsoft.Authorization/roleAssignments/read",
+
+ "Microsoft.ManagedIdentity/userAssignedIdentities/read",
+
+ "Microsoft.Resources/deployments/read",
+ "Microsoft.Resources/deployments/whatIf/action",
+ "Microsoft.Resources/deployments/exportTemplate/action",
+ "Microsoft.Resources/deployments/operations/read",
+ "Microsoft.Resources/deployments/operationstatuses/read",
+
+ "Microsoft.Resources/deploymentScripts/read",
+ "Microsoft.Resources/deploymentScripts/logs/read",
+
+ "Microsoft.Storage/storageAccounts/read",
+
+ "Microsoft.Insights/Components/Read",
+ "Microsoft.Insights/Components/Query/Read",
+ "Microsoft.Insights/Logs/Read",
+
+ "Microsoft.ServiceBus/namespaces/read",
+ "Microsoft.ServiceBus/namespaces/queues/read"
+ ]
+ }
+ ],
+ "assignableScopes": [
+ "[resourceGroup().id]"
+ ]
+ }
+ },
+ {
+ "type": "Microsoft.Authorization/roleAssignments",
+ "apiVersion": "2018-09-01-preview",
+ "name": "[variables('resourceManagementRoleAssignmentUID')]",
+ "condition": "[not(empty(parameters('FileStorageSecurityServicePrincipalID')))]",
+ "dependsOn": ["[variables('resourceManagementRoleName')]"],
+ "properties": {
+ "roleDefinitionId": "[resourceId('Microsoft.Authorization/roleDefinitions', variables('resourceManagementRoleName'))]",
+ "principalId": "[parameters('FileStorageSecurityServicePrincipalID')]",
+ "principalType": "ServicePrincipal"
+ }
+ },
+ {
+ "type": "Microsoft.Authorization/roleDefinitions",
+ "apiVersion": "2018-07-01",
+ "name": "[variables('scannerQueueManagementRoleName')]",
+ "properties": {
+ "roleName": "[variables('scannerQueueManagementRoleDefinitionName')]",
+ "description": "FSS scanner queue management role",
+ "type": "customRole",
+ "isCustom": true,
+ "permissions": [
+ {
+ "actions": [
+ "Microsoft.Authorization/roleAssignments/write",
+ "Microsoft.Authorization/roleAssignments/delete"
+ ]
+ }
+ ],
+ "assignableScopes": [
+ "[resourceGroup().id]"
+ ]
+ }
+ },
+ {
+ "type": "Microsoft.Authorization/roleAssignments",
+ "apiVersion": "2018-09-01-preview",
+ "name": "[variables('scannerQueueManagementRoleAssignmentUID')]",
+ "condition": "[not(empty(parameters('FileStorageSecurityServicePrincipalID')))]",
+ "dependsOn": [
+ "[variables('scannerQueueManagementRoleName')]",
+ "[resourceId('Microsoft.ServiceBus/namespaces/queues', variables('scannerQueueNamespaceName'), variables('scannerQueueName'))]"
+ ],
+ "scope": "[concat('Microsoft.ServiceBus/namespaces/', variables('scannerQueueNamespaceName'), '/queues/', variables('scannerQueueName'))]",
+ "properties": {
+ "roleDefinitionId": "[resourceId('Microsoft.Authorization/roleDefinitions', variables('scannerQueueManagementRoleName'))]",
+ "principalId": "[parameters('FileStorageSecurityServicePrincipalID')]",
+ "principalType": "ServicePrincipal"
+ }
+ },
+ {
+ "type": "Microsoft.Authorization/roleDefinitions",
+ "apiVersion": "2018-07-01",
+ "name": "[variables('scannerLicenseManagementRoleName')]",
+ "properties": {
+ "roleName": "[variables('scannerLicenseManagementRoleDefinitionName')]",
+ "description": "FSS scanner license key vault management role",
+ "type": "customRole",
+ "isCustom": true,
+ "permissions": [
+ {
+ "dataActions": [
+ "Microsoft.KeyVault/vaults/secrets/setSecret/action"
+ ]
+ }
+ ],
+ "assignableScopes": [
+ "[resourceGroup().id]"
+ ]
+ }
+ },
+ {
+ "type": "Microsoft.Authorization/roleAssignments",
+ "apiVersion": "2018-09-01-preview",
+ "name": "[variables('scannerLicenseManagementRoleAssignmentUID')]",
+ "condition": "[not(empty(parameters('FileStorageSecurityServicePrincipalID')))]",
+ "dependsOn": [
+ "[variables('scannerLicenseManagementRoleName')]",
+ "[resourceId('Microsoft.KeyVault/vaults', variables('scannerLicenseKeyVaultName'))]"
+ ],
+ "scope": "[resourceId('Microsoft.KeyVault/vaults', variables('scannerLicenseKeyVaultName'))]",
+ "properties": {
+ "roleDefinitionId": "[resourceId('Microsoft.Authorization/roleDefinitions', variables('scannerLicenseManagementRoleName'))]",
+ "principalId": "[parameters('FileStorageSecurityServicePrincipalID')]",
+ "principalType": "ServicePrincipal"
+ }
+ }
+ ],
+ "outputs": {
+ "TenantID": {
+ "type": "string",
+ "value": "[subscription().tenantId]"
+ },
+ "ScannerStackResourceGroupID": {
+ "type": "string",
+ "value": "[resourceGroup().id]"
+ },
+ "ScannerQueueNamespace": {
+ "type": "string",
+ "value": "[variables('scannerQueueNamespaceName')]"
+ },
+ "ScannerIdentityPrincipalID": {
+ "type": "string",
+ "value": "[reference(resourceId('Microsoft.Web/sites', variables('scannerFunctionAppName')), variables('scannerFunctionAppAPIVersion'), 'full').identity.principalId]"
+ },
+ "CloudOneRegion": {
+ "type": "string",
+ "value": "[parameters('CloudOneRegion')]"
+ },
+ "ScannerResourceID": {
+ "type": "string",
+ "value": "[resourceId('Microsoft.Web/sites', variables('scannerFunctionAppName'))]"
+ },
+ "ScannerQueueResourceID": {
+ "type": "string",
+ "value": "[resourceId('Microsoft.ServiceBus/namespaces/queues', variables('scannerQueueNamespaceName'), variables('scannerQueueName'))]"
+ },
+ "ScannerLicenseKeyVaultURI": {
+ "type": "string",
+ "value": "[reference(resourceId('Microsoft.KeyVault/vaults', variables('scannerLicenseKeyVaultName'))).vaultUri]"
+ }
+ }
+}
\ No newline at end of file
diff --git a/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/templates/FSS-Storage-Stack-Template.json b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/templates/FSS-Storage-Stack-Template.json
new file mode 100644
index 00000000..985854a0
--- /dev/null
+++ b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/templates/FSS-Storage-Stack-Template.json
@@ -0,0 +1,837 @@
+{
+ "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
+ "contentVersion": "1.0.0.0",
+ "parameters": {
+ "FileStorageSecurityServicePrincipalID": {
+ "type": "string",
+ "metadata": {
+ "description": "The ID of the Service Principal created for Trend Micro's App Registration."
+ },
+ "maxLength": 36
+ },
+ "CloudOneRegion": {
+ "type": "string",
+ "metadata": {
+ "description": "The region of the Trend Micro Cloud One services.
For more information, see supported Cloud One regions."
+ },
+ "defaultValue": "us-1"
+ },
+ "ScannerIdentityPrincipalID": {
+ "type": "String",
+ "metadata": {
+ "description": "The principal ID of the managed identity assigned to the Scanner Function."
+ }
+ },
+ "ScannerQueueNamespace": {
+ "type": "String",
+ "metadata": {
+ "description": "The Scanner Queue's namespace name."
+ }
+ },
+ "BlobStorageAccountResourceID": {
+ "type": "String",
+ "metadata": {
+ "description": "The resource ID of Storage Account to scan. The ID can be found in the Storage Account's Endpoints page.
For example, /subscriptions/1234abcd-3c6d-4347-9019-123456789012/resourceGroups/storage-resource-group/providers/Microsoft.Storage/storageAccounts/protectingstorageaccount"
+ }
+ },
+ "BlobSystemTopicExist": {
+ "allowedValues": [
+ "Yes",
+ "No"
+ ],
+ "metadata": {
+ "description": "Select Yes if the system topic of the protecting storage account is already created. Select No, to deploy a new system topic."
+ },
+ "defaultValue": "No",
+ "type": "String"
+ },
+ "BlobSystemTopicName": {
+ "type": "String",
+ "defaultValue": "BlobEventTopic",
+ "metadata": {
+ "description": "The name of the existing Event Grid System Topic associated to the Blob Storage Account or the Event Grid System Topic to be created and associated."
+ }
+ },
+ "UpdateScanResultToBlobMetadata": {
+ "allowedValues": [
+ "Yes",
+ "No"
+ ],
+ "type": "String",
+ "defaultValue": "Yes",
+ "metadata": {
+ "description": "Post scan action function requires Microsoft.Storage/storageAccounts/blobServices/containers/blobs/write permission to update blob metadata. Select Yes to update the blob metadata and index tag with the scan result. Select No to update only the blob index tag."
+ }
+ },
+ "ReportObjectKey": {
+ "allowedValues": [
+ "Yes",
+ "No"
+ ],
+ "type": "String",
+ "defaultValue": "No",
+ "metadata": {
+ "description": "Select Yes to report the object keys of the scanned objects to File Storage Security backend services. File Storage Security can then display the object keys of the malicious objects in the response of events API."
+ }
+ },
+ "StackPackageLocation": {
+ "type": "string",
+ "defaultValue": "https://file-storage-security.s3.amazonaws.com",
+ "metadata": {
+ "description": "Warning: Do not modify the field. Modifications may cause your deployment to fail."
+ }
+ },
+ "Version": {
+ "type": "string",
+ "defaultValue": "latest",
+ "metadata": {
+ "description": "Warning: Do not modify the field. Modifications may cause your deployment to fail."
+ }
+ },
+ "SharedAccessSignature": {
+ "type": "secureString",
+ "defaultValue": "",
+ "metadata": {
+ "description": "Warning: Do not modify the field. Modifications may cause your deployment to fail."
+ }
+ }
+ },
+ "variables": {
+ "functionPackageLinkPrefix": "[concat(parameters('Version'), '/functions')]",
+ "unifiedSAS": "[if(empty(parameters('SharedAccessSignature')), '', if(startsWith(parameters('SharedAccessSignature'), '?'), parameters('SharedAccessSignature'), concat('?', parameters('SharedAccessSignature'))))]",
+ "blobListenerFunctionPackageLink": "[concat(parameters('StackPackageLocation'), '/', variables('functionPackageLinkPrefix'), '/azure-listener.zip', variables('unifiedSAS'))]",
+ "actionTagFunctionPackageLink": "[concat(parameters('StackPackageLocation'), '/', variables('functionPackageLinkPrefix'), '/azure-action-tag.zip', variables('unifiedSAS'))]",
+
+ "stackUID": "[uniqueString(resourceGroup().id)]",
+ "blobStorageAccountResourceIdSplits": "[split(parameters('BlobStorageAccountResourceID'), '/')]",
+ "blobSubscriptionID": "[variables('blobStorageAccountResourceIdSplits')[2]]",
+ "blobResourceGroupName": "[variables('blobStorageAccountResourceIdSplits')[4]]",
+ "blobAccountName": "[variables('blobStorageAccountResourceIdSplits')[8]]",
+ "blobStorageAccountResourceID": "[resourceId(variables('blobSubscriptionID'), variables('blobResourceGroupName'), 'Microsoft.Storage/storageAccounts', variables('blobAccountName'))]",
+ "blobListenerAppName": "[concat('tmbl0', variables('stackUID'))]",
+ "blobListenerAppAPIVersion": "2018-11-01",
+ "blobListenerApplicationInsightsName": "[concat('tmblai0', variables('stackUID'))]",
+ "blobListenerApplicationInsightsID": "[guid(variables('blobListenerApplicationInsightsName'))]",
+ "blobListenerServerFarmName": "[concat('tmblsf0', variables('stackUID'))]",
+ "blobListenerStorageAccountName": "[concat('tmblsa0', variables('stackUID'))]",
+ "blobEventsDeadLetterContainer": "blob-events-dead-letter",
+ "actionTagAppName": "[concat('tmat0', variables('stackUID'))]",
+ "actionTagAppAPIVersion": "2018-11-01",
+ "actionTagApplicationInsightsName": "[concat('tmatai0', variables('stackUID'))]",
+ "actionTagApplicationInsightsID": "[guid(variables('actionTagApplicationInsightsName'))]",
+ "actionTagServerFarmName": "[concat('tmatsf0', variables('stackUID'))]",
+ "actionTagStorageAccountName": "[concat('tmatsa0', variables('stackUID'))]",
+ "location": "[resourceGroup().location]",
+ "serviceBusDataSenderRoleID": "69a216fc-b8fb-44d8-bc22-1f3c2cd27a39",
+ "scannerQueueNamespace": "[concat(parameters('ScannerQueueNamespace'), '.servicebus.windows.net')]",
+ "blobStorageAccountAPIVersion": "2019-06-01",
+ "scanResultTopicAPIVersion": "2017-04-01",
+ "scanResultTopicNamespaceName": "[concat('tmsrt0', variables('stackUID'))]",
+ "scanResultTopicNamespace": "[concat(variables('scanResultTopicNamespaceName'), '.servicebus.windows.net')]",
+ "scanResultTopicName": "scan-result-topic",
+ "scanResultTopicSubscriptionName": "scan-result",
+ "scanResultSASKeyName": "RootManageSharedAccessKey",
+ "scanResultAuthRuleResourceID": "[resourceId('Microsoft.ServiceBus/namespaces/authorizationRules', variables('scanResultTopicNamespaceName'), variables('scanResultSASKeyName'))]",
+ "blobActionTagRoleDefinitionName": "[guid(variables('stackUID'), 'blobActionTagRoleName')]",
+ "blobListenerRoleDefinitionName": "[guid(variables('stackUID'), 'blobListenerRoleName')]",
+ "blobSystemTopicSubscriptionName": "[concat(variables('blobListenerAppName'), '-trigger')]",
+ "blobActionTagRoleAssignmentUID": "[guid(resourceGroup().id, 'BlobActionTagRoleAssignment')]",
+ "blobListenerRoleAssignmentUID": "[guid(resourceGroup().id, 'BlobListenerRoleAssignment')]",
+ "blobActionTagRoleName": "[concat('FSS-BlobActionTagRole-', uniqueString(string(variables('blobActionTagDataActions')), variables('stackUID')))]",
+ "blobListenerRoleName": "[concat('FSS-BlobListenerRole-', uniqueString(string(variables('blobListenerActions')), string(variables('blobListenerDataActions')), variables('stackUID')))]",
+ "blobSystemTopicSubscriptionID": "[concat('/subscriptions/', variables('blobSubscriptionID'), '/resourcegroups/', variables('blobResourceGroupName'), '/providers/Microsoft.EventGrid/systemTopics/', parameters('BlobSystemTopicName'), '/eventSubscriptions/', variables('blobSystemTopicSubscriptionName'))]",
+ "managementRoleName": "[guid(variables('stackUID'), 'ManagementRole')]",
+ "managementRoleAssignmentUID": "[guid(variables('stackUID'), 'ManagementRoleAssignment', parameters('FileStorageSecurityServicePrincipalID'))]",
+ "functionsWorkerProcessCount": "10",
+ "blobListenerActions": [
+ "Microsoft.Storage/storageAccounts/blobServices/generateUserDelegationKey/action"
+ ],
+ "blobListenerDataActions": [
+ "Microsoft.Storage/storageAccounts/blobServices/containers/blobs/read"
+ ],
+ "blobIndexTagDataActions": [
+ "Microsoft.Storage/storageAccounts/blobServices/containers/blobs/tags/read",
+ "Microsoft.Storage/storageAccounts/blobServices/containers/blobs/tags/write"
+ ],
+ "blobDataActions": [
+ "Microsoft.Storage/storageAccounts/blobServices/containers/blobs/tags/read",
+ "Microsoft.Storage/storageAccounts/blobServices/containers/blobs/tags/write",
+ "Microsoft.Storage/storageAccounts/blobServices/containers/blobs/read",
+ "Microsoft.Storage/storageAccounts/blobServices/containers/blobs/write"
+ ],
+ "blobActionTagDataActions": "[if(equals(parameters('UpdateScanResultToBlobMetadata'), 'Yes'), variables('blobDataActions'), variables('blobIndexTagDataActions'))]",
+ "updateScanResultToBlobMetadata": "[if(equals(parameters('UpdateScanResultToBlobMetadata'), 'Yes'), 'True', 'False')]",
+ "reportObjectKey": "[if(equals(parameters('ReportObjectKey'), 'Yes'), 'True', 'False')]",
+ "functionsWarmupLogicAppName": "functionsWarmUpLogicApp"
+ },
+ "resources": [
+ {
+ "apiVersion": "2019-05-01",
+ "name": "StorageBlobRoleAssignment",
+ "type": "Microsoft.Resources/deployments",
+ "resourceGroup": "[variables('blobResourceGroupName')]",
+ "subscriptionId": "[variables('blobSubscriptionID')]",
+ "properties": {
+ "mode": "Incremental",
+ "template": {
+ "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
+ "contentVersion": "1.0.0.0",
+ "parameters": {},
+ "variables": {},
+ "resources": [
+ {
+ "type": "Microsoft.Authorization/roleDefinitions",
+ "apiVersion": "2018-07-01",
+ "name": "[variables('blobActionTagRoleDefinitionName')]",
+ "properties": {
+ "roleName": "[variables('blobActionTagRoleName')]",
+ "description": "The Role has Blob Writer permission",
+ "type": "customRole",
+ "isCustom": true,
+ "permissions": [
+ {
+ "dataActions": "[variables('blobActionTagDataActions')]"
+ }
+ ],
+ "assignableScopes": [
+ "[parameters('BlobStorageAccountResourceID')]"
+ ]
+ }
+ },
+ {
+ "type": "Microsoft.Authorization/roleDefinitions",
+ "apiVersion": "2018-07-01",
+ "name": "[variables('blobListenerRoleDefinitionName')]",
+ "properties": {
+ "roleName": "[variables('blobListenerRoleName')]",
+ "description": "The Role has Blob read and generate delegation key permission",
+ "type": "customRole",
+ "isCustom": true,
+ "permissions": [
+ {
+ "actions": "[variables('blobListenerActions')]",
+ "dataActions": "[variables('blobListenerDataActions')]"
+ }
+ ],
+ "assignableScopes": [
+ "[parameters('BlobStorageAccountResourceID')]"
+ ]
+ }
+ },
+ {
+ "type": "Microsoft.Authorization/roleAssignments",
+ "apiVersion": "2018-09-01-preview",
+ "name": "[variables('blobListenerRoleAssignmentUID')]",
+ "scope": "[concat('Microsoft.Storage/storageAccounts/', variables('blobAccountName'))]",
+ "dependsOn": [
+ "[variables('blobListenerRoleDefinitionName')]"
+ ],
+ "properties": {
+ "roleDefinitionId": "[resourceId('Microsoft.Authorization/roleDefinitions', variables('blobListenerRoleDefinitionName'))]",
+ "principalId": "[reference(resourceId('Microsoft.Web/sites', variables('blobListenerAppName')), variables('blobListenerAppAPIVersion'), 'full').identity.principalId]",
+ "principalType": "ServicePrincipal"
+ }
+ },
+ {
+ "type": "Microsoft.Authorization/roleAssignments",
+ "apiVersion": "2018-09-01-preview",
+ "name": "[variables('blobActionTagRoleAssignmentUID')]",
+ "scope": "[concat('Microsoft.Storage/storageAccounts/', variables('blobAccountName'))]",
+ "dependsOn": [
+ "[variables('blobActionTagRoleDefinitionName')]"
+ ],
+ "properties": {
+ "roleDefinitionId": "[resourceId('Microsoft.Authorization/roleDefinitions', variables('blobActionTagRoleDefinitionName'))]",
+ "principalId": "[reference(resourceId('Microsoft.Web/sites', variables('actionTagAppName')), variables('actionTagAppAPIVersion'), 'full').identity.principalId]",
+ "principalType": "ServicePrincipal"
+ }
+ }
+ ]
+ },
+ "parameters": {}
+ }
+ },
+ {
+ "apiVersion": "[variables('scanResultTopicAPIVersion')]",
+ "name": "[variables('scanResultTopicNamespaceName')]",
+ "type": "Microsoft.ServiceBus/Namespaces",
+ "location": "[variables('location')]",
+ "kind": "Messaging",
+ "sku": {
+ "name": "Standard"
+ },
+ "resources": [
+ {
+ "apiVersion": "[variables('scanResultTopicAPIVersion')]",
+ "name": "[variables('scanResultTopicName')]",
+ "type": "Topics",
+ "dependsOn": [
+ "[resourceId('Microsoft.ServiceBus/namespaces/', variables('scanResultTopicNamespaceName'))]"
+ ],
+ "properties": {
+ "path": "[variables('scanResultTopicName')]"
+ },
+ "resources": [
+ {
+ "apiVersion": "[variables('scanResultTopicAPIVersion')]",
+ "name": "[variables('scanResultTopicSubscriptionName')]",
+ "type": "Subscriptions",
+ "dependsOn": [
+ "[variables('scanResultTopicName')]"
+ ],
+ "properties": {
+ "lockDuration": "PT30S",
+ "defaultMessageTimeToLive": "PT1H",
+ "maxDeliveryCount": 20,
+ "deadLetteringOnFilterEvaluationExceptions": true,
+ "deadLetteringOnMessageExpiration": true
+ }
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "type": "Microsoft.Authorization/roleAssignments",
+ "apiVersion": "2018-09-01-preview",
+ "name": "[guid(resourceGroup().id, parameters('ScannerIdentityPrincipalID'), 'ServiceBusDataSenderRoleAssignment')]",
+ "dependsOn": [
+ "[resourceId('Microsoft.ServiceBus/namespaces/Topics', variables('scanResultTopicNamespaceName'), variables('scanResultTopicName'))]"
+ ],
+ "scope": "[concat('Microsoft.ServiceBus/namespaces/', variables('scanResultTopicNamespaceName'), '/topics/', variables('scanResultTopicName'))]",
+ "properties": {
+ "roleDefinitionId": "[resourceId('Microsoft.Authorization/roleDefinitions', variables('serviceBusDataSenderRoleID'))]",
+ "principalId": "[parameters('ScannerIdentityPrincipalID')]",
+ "principalType": "ServicePrincipal"
+ }
+ },
+ {
+ "name": "[variables('blobListenerApplicationInsightsName')]",
+ "type": "microsoft.insights/components",
+ "location": "[variables('location')]",
+ "apiVersion": "2014-08-01",
+ "properties": {
+ "Application_Type": "web",
+ "ApplicationId": "[variables('blobListenerApplicationInsightsID')]"
+ }
+ },
+ {
+ "type": "Microsoft.Storage/storageAccounts",
+ "apiVersion": "2021-04-01",
+ "name": "[variables('blobListenerStorageAccountName')]",
+ "location": "[variables('location')]",
+ "sku": {
+ "name": "Standard_LRS"
+ },
+ "kind": "Storage",
+ "properties": {
+ "minimumTlsVersion": "TLS1_2"
+ },
+ "resources": [
+ {
+ "type": "blobServices/containers",
+ "apiVersion": "2021-04-01",
+ "name": "[concat('default/', variables('blobEventsDeadLetterContainer'))]",
+ "dependsOn": [
+ "[variables('blobListenerStorageAccountName')]"
+ ],
+ "properties": {
+ "publicAccess": "None"
+ }
+ }
+ ]
+ },
+ {
+ "type": "Microsoft.Web/serverfarms",
+ "apiVersion": "2020-06-01",
+ "name": "[variables('blobListenerServerFarmName')]",
+ "kind": "linux",
+ "properties": {
+ "reserved": true
+ },
+ "location": "[variables('location')]",
+ "sku": {
+ "name": "Y1",
+ "tier": "Dynamic",
+ "size": "Y1",
+ "family": "Y"
+ }
+ },
+ {
+ "type": "Microsoft.Web/sites",
+ "apiVersion": "[variables('blobListenerAppAPIVersion')]",
+ "name": "[variables('blobListenerAppName')]",
+ "location": "[variables('location')]",
+ "kind": "functionapp,linux",
+ "identity": {
+ "type": "SystemAssigned"
+ },
+ "dependsOn": [
+ "[resourceId('Microsoft.Web/serverfarms', variables('blobListenerServerFarmName'))]",
+ "[resourceId('Microsoft.Storage/storageAccounts', variables('blobListenerStorageAccountName'))]"
+ ],
+ "properties": {
+ "enabled": true,
+ "hostNameSslStates": [
+ {
+ "name": "[concat(variables('blobListenerAppName'), '.azurewebsites.net')]",
+ "sslState": "Disabled",
+ "hostType": "Standard"
+ },
+ {
+ "name": "[concat(variables('blobListenerAppName'), '.scm.azurewebsites.net')]",
+ "sslState": "Disabled",
+ "hostType": "Repository"
+ }
+ ],
+ "serverFarmId": "[resourceId('Microsoft.Web/serverfarms', variables('blobListenerServerFarmName'))]",
+ "reserved": true,
+ "isXenon": false,
+ "hyperV": false,
+ "siteConfig": {
+ "appSettings": [
+ {
+ "name": "FUNCTIONS_EXTENSION_VERSION",
+ "value": "~3"
+ },
+ {
+ "name": "FUNCTIONS_WORKER_RUNTIME",
+ "value": "python"
+ },
+ {
+ "name": "APPINSIGHTS_INSTRUMENTATIONKEY",
+ "value": "[reference(resourceId('microsoft.insights/components', variables('blobListenerApplicationInsightsName')), '2020-02-02-preview').InstrumentationKey]"
+ },
+ {
+ "name": "APPLICATIONINSIGHTS_CONNECTION_STRING",
+ "value": "[reference(resourceId('microsoft.insights/components', variables('blobListenerApplicationInsightsName')), '2020-02-02-preview').ConnectionString]"
+ },
+ {
+ "name": "AzureWebJobsStorage",
+ "value": "[concat('DefaultEndpointsProtocol=https;AccountName=',variables('blobListenerStorageAccountName'),';AccountKey=',listKeys(resourceId('Microsoft.Storage/storageAccounts', variables('blobListenerStorageAccountName')), '2019-06-01').keys[0].value)]"
+ },
+ {
+ "name": "WEBSITE_CONTENTAZUREFILECONNECTIONSTRING",
+ "value": "[concat('DefaultEndpointsProtocol=https;AccountName=',variables('blobListenerStorageAccountName'),';AccountKey=',listKeys(resourceId('Microsoft.Storage/storageAccounts', variables('blobListenerStorageAccountName')), '2019-06-01').keys[0].value)]"
+ },
+ {
+ "name": "WEBSITE_CONTENTSHARE",
+ "value": "[toLower(variables('blobListenerAppName'))]"
+ },
+ {
+ "name": "WEBSITE_RUN_FROM_PACKAGE",
+ "value": "[variables('blobListenerFunctionPackageLink')]"
+ },
+ {
+ "name": "SCANNER_QUEUE_NAMESPACE",
+ "value": "[variables('scannerQueueNamespace')]"
+ },
+ {
+ "name": "SCAN_RESULT_TOPIC_NAMESPACE",
+ "value": "[variables('scanResultTopicNamespace')]"
+ },
+ {
+ "name": "STORAGE_STACK_NAME",
+ "value": "[resourceGroup().name]"
+ },
+ {
+ "name": "FUNCTIONS_WORKER_PROCESS_COUNT",
+ "value": "[variables('functionsWorkerProcessCount')]"
+ },
+ {
+ "name": "REPORT_OBJECT_KEY",
+ "value": "[variables('reportObjectKey')]"
+ }
+ ],
+ "use32BitWorkerProcess": false,
+ "linuxFxVersion": "PYTHON|3.8"
+ },
+ "scmSiteAlsoStopped": false,
+ "clientAffinityEnabled": false,
+ "clientCertEnabled": false,
+ "hostNamesDisabled": false,
+ "containerSize": 1536,
+ "dailyMemoryTimeQuota": 0,
+ "httpsOnly": true,
+ "redundancyMode": "None"
+ }
+ },
+ {
+ "name": "[variables('actionTagApplicationInsightsName')]",
+ "type": "microsoft.insights/components",
+ "location": "[variables('location')]",
+ "apiVersion": "2014-08-01",
+ "properties": {
+ "Application_Type": "web",
+ "ApplicationId": "[variables('actionTagApplicationInsightsID')]"
+ }
+ },
+ {
+ "type": "Microsoft.Storage/storageAccounts",
+ "apiVersion": "2019-06-01",
+ "name": "[variables('actionTagStorageAccountName')]",
+ "location": "[variables('location')]",
+ "sku": {
+ "name": "Standard_LRS"
+ },
+ "kind": "Storage",
+ "properties": {
+ "minimumTlsVersion": "TLS1_2"
+ }
+ },
+ {
+ "type": "Microsoft.Web/serverfarms",
+ "apiVersion": "2020-06-01",
+ "name": "[variables('actionTagServerFarmName')]",
+ "kind": "linux",
+ "properties": {
+ "reserved": true
+ },
+ "location": "[variables('location')]",
+ "sku": {
+ "name": "Y1",
+ "tier": "Dynamic",
+ "size": "Y1",
+ "family": "Y"
+ }
+ },
+ {
+ "type": "Microsoft.Web/sites",
+ "apiVersion": "[variables('actionTagAppAPIVersion')]",
+ "name": "[variables('actionTagAppName')]",
+ "location": "[variables('location')]",
+ "kind": "functionapp,linux",
+ "identity": {
+ "type": "SystemAssigned"
+ },
+ "dependsOn": [
+ "[resourceId('Microsoft.Web/serverfarms', variables('actionTagServerFarmName'))]",
+ "[resourceId('Microsoft.Storage/storageAccounts', variables('actionTagStorageAccountName'))]"
+ ],
+ "properties": {
+ "enabled": true,
+ "hostNameSslStates": [
+ {
+ "name": "[concat(variables('actionTagAppName'), '.azurewebsites.net')]",
+ "sslState": "Disabled",
+ "hostType": "Standard"
+ },
+ {
+ "name": "[concat(variables('actionTagAppName'), '.scm.azurewebsites.net')]",
+ "sslState": "Disabled",
+ "hostType": "Repository"
+ }
+ ],
+ "serverFarmId": "[resourceId('Microsoft.Web/serverfarms', variables('actionTagServerFarmName'))]",
+ "reserved": true,
+ "isXenon": false,
+ "hyperV": false,
+ "siteConfig": {
+ "appSettings": [
+ {
+ "name": "FUNCTIONS_EXTENSION_VERSION",
+ "value": "~3"
+ },
+ {
+ "name": "FUNCTIONS_WORKER_RUNTIME",
+ "value": "python"
+ },
+ {
+ "name": "APPINSIGHTS_INSTRUMENTATIONKEY",
+ "value": "[reference(resourceId('microsoft.insights/components', variables('actionTagApplicationInsightsName')), '2020-02-02-preview').InstrumentationKey]"
+ },
+ {
+ "name": "APPLICATIONINSIGHTS_CONNECTION_STRING",
+ "value": "[reference(resourceId('microsoft.insights/components', variables('actionTagApplicationInsightsName')), '2020-02-02-preview').ConnectionString]"
+ },
+ {
+ "name": "AzureWebJobsStorage",
+ "value": "[concat('DefaultEndpointsProtocol=https;AccountName=',variables('actionTagStorageAccountName'),';AccountKey=',listKeys(resourceId('Microsoft.Storage/storageAccounts', variables('actionTagStorageAccountName')), '2019-06-01').keys[0].value)]"
+ },
+ {
+ "name": "WEBSITE_CONTENTAZUREFILECONNECTIONSTRING",
+ "value": "[concat('DefaultEndpointsProtocol=https;AccountName=',variables('actionTagStorageAccountName'),';AccountKey=',listKeys(resourceId('Microsoft.Storage/storageAccounts', variables('actionTagStorageAccountName')), '2019-06-01').keys[0].value)]"
+ },
+ {
+ "name": "WEBSITE_CONTENTSHARE",
+ "value": "[toLower(variables('actionTagAppName'))]"
+ },
+ {
+ "name": "WEBSITE_RUN_FROM_PACKAGE",
+ "value": "[variables('actionTagFunctionPackageLink')]"
+ },
+ {
+ "name": "BUS_TOPIC_CONNECTION_STR",
+ "value": "[listkeys(variables('scanResultAuthRuleResourceID'), variables('scanResultTopicAPIVersion')).primaryConnectionString]"
+ },
+ {
+ "name": "SCAN_RESULT_TOPIC_NAME",
+ "value": "[variables('scanResultTopicName')]"
+ },
+ {
+ "name": "SCAN_RESULT_SUBSCRIPTION_NAME",
+ "value": "[variables('scanResultTopicSubscriptionName')]"
+ },
+ {
+ "name": "FUNCTIONS_WORKER_PROCESS_COUNT",
+ "value": "[variables('functionsWorkerProcessCount')]"
+ },
+ {
+ "name": "UPDATE_SCAN_RESULT_TO_BLOB_METADATA",
+ "value": "[variables('updateScanResultToBlobMetadata')]"
+ }
+ ],
+ "use32BitWorkerProcess": false,
+ "linuxFxVersion": "PYTHON|3.8"
+ },
+ "scmSiteAlsoStopped": false,
+ "clientAffinityEnabled": false,
+ "clientCertEnabled": false,
+ "hostNamesDisabled": false,
+ "containerSize": 1536,
+ "dailyMemoryTimeQuota": 0,
+ "httpsOnly": true,
+ "redundancyMode": "None"
+ }
+ },
+ {
+ "type": "Microsoft.Logic/workflows",
+ "apiVersion": "2019-05-01",
+ "name": "[variables('functionsWarmupLogicAppName')]",
+ "location": "[variables('location')]",
+ "properties": {
+ "state": "Enabled",
+ "definition": {
+ "$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#",
+ "contentVersion": "1.0.0.0",
+ "parameters": {},
+ "triggers": {
+ "Recurrence": {
+ "recurrence": {
+ "frequency": "Minute",
+ "interval": 10
+ },
+ "type": "Recurrence"
+ }
+ },
+ "actions": {
+ "getActionTagEndpoint": {
+ "type": "Http",
+ "inputs": {
+ "method": "GET",
+ "uri": "[concat('https://', reference(variables('actionTagAppName')).defaultHostName)]"
+ }
+ },
+ "getBlobListenerEndpoint": {
+ "type": "Http",
+ "inputs": {
+ "method": "GET",
+ "uri": "[concat('https://', reference(variables('blobListenerAppName')).defaultHostName)]"
+ }
+ }
+ },
+ "outputs": {}
+ },
+ "parameters": {}
+ }
+ },
+ {
+ "apiVersion": "2017-05-10",
+ "name": "BlobSystemTopicTemplate",
+ "dependsOn": [
+ "[resourceId('Microsoft.Logic/workflows', variables('functionsWarmupLogicAppName'))]"
+ ],
+ "type": "Microsoft.Resources/deployments",
+ "resourceGroup": "[variables('blobResourceGroupName')]",
+ "subscriptionId": "[variables('blobSubscriptionID')]",
+ "properties": {
+ "mode": "Incremental",
+ "template": {
+ "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
+ "contentVersion": "1.0.0.0",
+ "parameters": {},
+ "variables": {},
+ "resources": [
+ {
+ "condition": "[equals(parameters('BlobSystemTopicExist'), 'No')]",
+ "type": "Microsoft.EventGrid/systemTopics",
+ "apiVersion": "2020-04-01-preview",
+ "name": "[parameters('BlobSystemTopicName')]",
+ "location": "[reference(parameters('BlobStorageAccountResourceID'), variables('blobStorageAccountAPIVersion'), 'Full').location]",
+ "properties": {
+ "source": "[parameters('BlobStorageAccountResourceID')]",
+ "topicType": "Microsoft.Storage.StorageAccounts"
+ }
+ },
+ {
+ "type": "Microsoft.EventGrid/systemTopics/eventSubscriptions",
+ "apiVersion": "2020-04-01-preview",
+ "name": "[concat(parameters('BlobSystemTopicName'), '/', variables('blobSystemTopicSubscriptionName'))]",
+ "dependsOn": [
+ "[parameters('BlobSystemTopicName')]"
+ ],
+ "properties": {
+ "destination": {
+ "properties": {
+ "resourceId": "[resourceId('Microsoft.Web/sites/functions', variables('blobListenerAppName'), 'BlobListener')]",
+ "maxEventsPerBatch": 1,
+ "preferredBatchSizeInKilobytes": 64
+ },
+ "endpointType": "AzureFunction"
+ },
+ "filter": {
+ "includedEventTypes": [
+ "Microsoft.Storage.BlobCreated",
+ "Microsoft.Storage.BlobRenamed"
+ ]
+ },
+ "eventDeliverySchema": "EventGridSchema",
+ "retryPolicy": {
+ "maxDeliveryAttempts": 5,
+ "eventTimeToLiveInMinutes": 1440
+ },
+ "deadletterdestination": {
+ "endpointType": "StorageBlob",
+ "properties": {
+ "resourceId": "[resourceId('Microsoft.Storage/storageAccounts', variables('blobListenerStorageAccountName'))]",
+ "blobContainerName": "[variables('blobEventsDeadLetterContainer')]"
+ }
+ }
+ }
+ }
+ ]
+ },
+ "parameters": {}
+ }
+ },
+ {
+ "type": "Microsoft.Authorization/roleDefinitions",
+ "apiVersion": "2018-07-01",
+ "name": "[variables('managementRoleName')]",
+ "properties": {
+ "roleName": "[concat('FSSManagementRole-', variables('managementRoleName'))]",
+ "description": "FSS management role",
+ "type": "customRole",
+ "isCustom": true,
+ "permissions": [
+ {
+ "actions": [
+ "Microsoft.Resources/subscriptions/resourceGroups/read",
+ "Microsoft.Resources/subscriptions/resourcegroups/deployments/read",
+ "Microsoft.Resources/subscriptions/resourcegroups/deployments/operations/read",
+ "Microsoft.Resources/subscriptions/resourcegroups/deployments/operationstatuses/read",
+ "Microsoft.Resources/subscriptions/resourcegroups/resources/read",
+
+ "Microsoft.Web/sites/Read",
+ "Microsoft.Web/sites/config/list/action",
+ "Microsoft.Web/sites/config/Read",
+ "Microsoft.Web/sites/config/Write",
+ "Microsoft.Web/sites/functions/read",
+ "Microsoft.Web/sites/functions/write",
+ "Microsoft.Web/sites/host/sync/action",
+ "Microsoft.Web/sites/host/listsyncstatus/action",
+ "Microsoft.Web/sites/metrics/read",
+ "Microsoft.Web/sites/restart/Action",
+ "Microsoft.Web/sites/syncfunctiontriggers/action",
+ "Microsoft.Web/sites/usages/read",
+
+ "Microsoft.Authorization/roleDefinitions/read",
+ "Microsoft.Authorization/roleAssignments/read",
+
+ "Microsoft.ManagedIdentity/userAssignedIdentities/read",
+
+ "Microsoft.Resources/deployments/read",
+ "Microsoft.Resources/deployments/whatIf/action",
+ "Microsoft.Resources/deployments/exportTemplate/action",
+ "Microsoft.Resources/deployments/operations/read",
+ "Microsoft.Resources/deployments/operationstatuses/read",
+
+ "Microsoft.Storage/storageAccounts/read",
+
+ "Microsoft.Insights/Components/Read",
+ "Microsoft.Insights/Components/Query/Read",
+ "Microsoft.Insights/Logs/Read"
+ ]
+ }
+ ],
+ "assignableScopes": [
+ "[resourceGroup().id]"
+ ]
+ }
+ },
+ {
+ "type": "Microsoft.Authorization/roleAssignments",
+ "apiVersion": "2018-09-01-preview",
+ "name": "[variables('managementRoleAssignmentUID')]",
+ "condition": "[not(empty(parameters('FileStorageSecurityServicePrincipalID')))]",
+ "dependsOn": [ "[variables('managementRoleName')]" ],
+ "properties": {
+ "roleDefinitionId": "[resourceId('Microsoft.Authorization/roleDefinitions', variables('managementRoleName'))]",
+ "principalId": "[parameters('FileStorageSecurityServicePrincipalID')]",
+ "principalType": "ServicePrincipal"
+ }
+ }
+ ],
+ "outputs": {
+ "TenantID": {
+ "type": "string",
+ "value": "[subscription().tenantId]"
+ },
+ "StorageStackResourceGroupID": {
+ "type": "string",
+ "value": "[resourceGroup().id]"
+ },
+ "CloudOneRegion": {
+ "type": "string",
+ "value": "[parameters('CloudOneRegion')]"
+ },
+ "BlobStorageAccountResourceID": {
+ "type": "string",
+ "value": "[variables('blobStorageAccountResourceID')]"
+ },
+ "ScanResultTopicResourceID": {
+ "type": "string",
+ "value": "[resourceId('Microsoft.ServiceBus/namespaces/topics', variables('scanResultTopicNamespaceName'), variables('scanResultTopicName'))]"
+ },
+ "BlobListenerResourceID": {
+ "type": "string",
+ "value": "[resourceId('Microsoft.Web/sites', variables('blobListenerAppName'))]"
+ },
+ "BlobListenerIdentityPrincipalID": {
+ "type": "string",
+ "value": "[reference(resourceId('Microsoft.Web/sites', variables('blobListenerAppName')), variables('blobListenerAppAPIVersion'), 'full').identity.principalId]"
+ },
+ "PostScanActionTagResourceID": {
+ "type": "string",
+ "value": "[resourceId('Microsoft.Web/sites', variables('actionTagAppName'))]"
+ },
+ "BlobActionTagRoleDefinitionName": {
+ "type": "string",
+ "value": "[variables('blobActionTagRoleDefinitionName')]"
+ },
+ "BlobListenerRoleDefinitionName": {
+ "type": "string",
+ "value": "[variables('blobListenerRoleDefinitionName')]"
+ },
+ "BlobActionTagRoleAssignmentID": {
+ "type": "string",
+ "value": "[concat(variables('blobStorageAccountResourceID'), '/providers/Microsoft.Authorization/roleAssignments/', variables('blobActionTagRoleAssignmentUID'))]"
+ },
+ "BlobListenerRoleAssignmentID": {
+ "type": "string",
+ "value": "[concat(variables('blobStorageAccountResourceID'), '/providers/Microsoft.Authorization/roleAssignments/', variables('blobListenerRoleAssignmentUID'))]"
+ },
+ "BlobSystemTopicSubscriptionID": {
+ "type": "string",
+ "value": "[variables('blobSystemTopicSubscriptionID')]"
+ },
+ "BlobSystemTopicDeadLetterStorageID": {
+ "type": "string",
+ "value": "[resourceId('Microsoft.Storage/storageAccounts/blobServices/containers',variables('blobListenerStorageAccountName'), 'default', variables('blobEventsDeadLetterContainer'))]"
+ },
+ "BlobScanResultSubscriptionDeadLetterQueueID": {
+ "type": "string",
+ "value": "[concat(resourceId('Microsoft.ServiceBus/Namespaces/Topics/Subscriptions', variables('scanResultTopicNamespaceName'), variables('scanResultTopicName'), variables('scanResultTopicSubscriptionName')), '/$DeadLetterQueue')]"
+ }
+ }
+}
\ No newline at end of file
diff --git a/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/utils.py b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/utils.py
new file mode 100644
index 00000000..0ff841ce
--- /dev/null
+++ b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/deploymentHandler/utils.py
@@ -0,0 +1,216 @@
+import os
+import json
+import tempfile
+import logging
+
+from azure.cli.core import get_default_cli
+# from munch import DefaultMunch
+
+import cloudone_fss_api
+import locations
+
+# compose_tags: Adds the FSSMonitored tag to the Storage Account(s) that are monitored by Trend Micro File Storage Security
+def compose_tags(existing_tags, FSS_MONITORED_TAG):
+ return {
+ **existing_tags,
+ **{
+ f'{FSS_MONITORED_TAG}': True
+ }
+ }
+
+# get_deployment_mode_from_env: Gets the deployment mode this script is executed with
+def get_deployment_mode_from_env(mode_key, DEPLOYMENT_MODES, DEFAULT_DEPLOYMENT_MODE):
+
+ # Default mode is 'existing' storage accounts only
+ mode = os.environ.get(mode_key, 'existing').lower()
+ return mode if mode in DEPLOYMENT_MODES else DEFAULT_DEPLOYMENT_MODE
+
+# get_deployment_model_from_env: Gets the deployment model this script is executed with
+def get_deployment_model_from_env(model_key, DEPLOYMENT_MODELS, DEFAULT_DEPLOYMENT_MODEL):
+
+ # Default model is 'geographies'
+ model = os.environ.get(model_key, 'geographies').lower()
+ return model if model in DEPLOYMENT_MODELS else DEFAULT_DEPLOYMENT_MODEL
+
+# def get_blob_account_url(file_url):
+# return '/'.join(file_url.split('/')[0:3])
+
+def azure_cli_run_command(command):
+ args = command.split()
+ temp = tempfile.TemporaryFile(mode="w")
+
+ cli = get_default_cli()
+ cli.invoke(args, None, temp)
+
+ # temp.seek(0)
+ # data = temp.read().strip()
+ temp.close()
+
+ if cli.result.result:
+ return cli.result.result
+ elif cli.result.error:
+ logging.error(cli.result.error)
+ raise Exception(cli.result.error)
+ return None
+
+# apply_exclusions - get list of storage accounts to exclude from deployment
+def apply_exclusions(filename, azure_storage_account_list):
+ if not os.path.isfile(filename):
+ logging.error("No file for exclusions. File 'exclude.txt' not found.\n")
+ raise Exception("No file for exclusions. File 'exclude.txt' not found.\n")
+ else:
+ content = []
+ with open(filename) as f:
+ content = f.read().splitlines()
+
+ temp_list = []
+ for storage_account_name in content:
+ for storage_account in azure_storage_account_list:
+ if storage_account["name"] == storage_account_name:
+ temp_list.append(storage_account)
+
+ if len(temp_list):
+ temp_list_names = ""
+ for item in temp_list:
+ temp_list_names += str(item["name"]) + ", "
+
+ logging.info('Excluding ' + str(len(temp_list)) + ' storage accounts [' + temp_list_names + '] as per the contents in exclude.txt')
+ else:
+ logging.info('Excluding ' + str(len(temp_list)) + ' storage accounts from the deployment')
+
+ for item in temp_list:
+ azure_storage_account_list.remove(item)
+
+ return azure_storage_account_list
+
+# # Convert all Dict keys into a list for set-issubset checks
+# def get_all_keys(dict):
+# for key, value in dict.items():
+# yield key
+# if isinstance(value, dict):
+# yield from get_all_keys(value)
+
+def get_config_from_file(config_key):
+
+ with open('config.json', 'r+') as f:
+ json_object = json.loads(f.read())
+
+ # # Accessing Dict with Object notation for complex queries, using Munch
+ # if "." in config_key:
+ # # d = AttrDict(json_object)
+ # # print(dir(d))
+ # # print(d.)
+ # # return d
+
+ # d = DefaultMunch.fromDict(json_object)
+ # print("Munch: " + str(d[config_key]))
+ # return d[config_key]
+
+ if json_object[config_key]:
+ return json_object[config_key]
+ return None
+
+def get_cloudone_region():
+ cloudone_config = get_config_from_file('cloudone')
+ if cloudone_config and "region" in cloudone_config.keys():
+ if cloudone_config['region']:
+ return str(cloudone_config['region'])
+ if 'CLOUDONE_REGION' in os.environ.keys():
+ return os.environ.get('CLOUDONE_REGION')
+ logging.error("Missing Cloud One Region. Check \"CLOUDONE_REGION\" env. variable or \"cloudone.region\" in the config.json file.")
+ raise Exception("Missing Cloud One Region. Check \"CLOUDONE_REGION\" env. variable or \"cloudone.region\" in the config.json file.")
+
+def get_cloudone_api_key():
+ cloudone_config = get_config_from_file('cloudone')
+ if cloudone_config and "api_key" in cloudone_config.keys():
+ if cloudone_config['api_key']:
+ return str(cloudone_config['api_key'])
+ if 'CLOUDONE_API_KEY' in os.environ.keys():
+ return os.environ.get('CLOUDONE_API_KEY')
+ logging.error("Missing Cloud One API Key. Check \"CLOUDONE_API_KEY\" env. variable or \"cloudone.api_key\" in the config.json file.")
+ raise Exception("Missing Cloud One API Key. Check \"CLOUDONE_API_KEY\" env. variable or \"cloudone.api_key\" in the config.json file.")
+
+def get_cloudone_max_storage_to_scanner_count():
+ cloudone_config = get_config_from_file('cloudone')
+ if cloudone_config and "max_storage_stack_per_scanner_stack" in cloudone_config.keys():
+ if cloudone_config['max_storage_stack_per_scanner_stack']:
+ return str(cloudone_config['max_storage_stack_per_scanner_stack'])
+ if 'MAX_STORAGE_STACK_PER_SCANNER_STACK' in os.environ.keys():
+ return os.environ.get('MAX_STORAGE_STACK_PER_SCANNER_STACK')
+ return 50 # Recommended value for the number of Storage Stack(s) per Scanner Stack
+
+def get_subscription_id():
+ azure_subscription_id = str(get_config_from_file('subscription_id'))
+ if azure_subscription_id:
+ return azure_subscription_id
+ if 'AZURE_SUBSCRIPTION_ID' in os.environ.keys():
+ return os.environ.get('AZURE_SUBSCRIPTION_ID')
+ logging.error("Missing Azure Subscription ID. Check \"AZURE_SUBSCRIPTION_ID\" env. variable or \"subscription_id\" in the config.json file.")
+ raise Exception("Missing Azure Subscription ID. Check \"AZURE_SUBSCRIPTION_ID\" env. variable or \"subscription_id\" in the config.json file.")
+
+def get_subscription_id_from_resource_group_id(resource_group_id):
+
+ return resource_group_id.split('/')[2]
+
+def remove_storage_accounts_with_cloudone_storage_stacks(azure_storage_account_list):
+
+ cloudone_storage_stack_list = cloudone_fss_api.get_storage_stacks()
+
+ if cloudone_storage_stack_list:
+
+ temp_list = []
+ for storage_account in azure_storage_account_list:
+
+ for storage_stack in cloudone_storage_stack_list["stacks"]:
+
+ if storage_account["name"] == storage_stack["storage"]:
+
+ temp_list.append(storage_account)
+
+ for storage_account in temp_list:
+
+ azure_storage_account_list.remove(storage_account)
+
+ return azure_storage_account_list
+
+# function to return dict key for any value
+def get_dict_key(value_dict, val):
+ if value_dict:
+ for key, value in value_dict.items():
+ if val == value:
+ return key
+ return None
+
+def trim_resource_name(resource_name, start_trim_count=0, end_trim_count=0):
+ if len(resource_name) > (start_trim_count + end_trim_count):
+ return resource_name[:start_trim_count] + resource_name[-end_trim_count:]
+ return resource_name.lower()
+
+def trim_location_name(azure_location_name):
+ if len(azure_location_name) > 6:
+ azure_location_display_name = locations.get_azure_location_detail(azure_location_name)["displayName"]
+ string_output = None
+ temp_list = azure_location_display_name.split(" ")
+ for item in temp_list[:len(temp_list)-1]:
+ if not string_output:
+ string_output = str(item[0])
+ else:
+ string_output = string_output + str(item[0])
+ string_output = string_output + str(temp_list[-1:][0][:3])
+ return string_output.lower()
+ return azure_location_name
+
+def trim_spaces(string_value):
+ if " " in string_value:
+ string_output = None
+ temp_list = string_value.split(" ")
+ for item in temp_list[:len(temp_list)-1]:
+ if not string_output:
+ string_output = str(item[0])
+ else:
+ string_output = string_output + str(item[0])
+ string_output = string_output + str(temp_list[-1:][0])
+ return string_output.lower()
+ elif len(string_value) > 6:
+ return string_value[:3].lower()
+ return string_value.lower()
\ No newline at end of file
diff --git a/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/host.json b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/host.json
new file mode 100644
index 00000000..bf296b39
--- /dev/null
+++ b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/host.json
@@ -0,0 +1,23 @@
+{
+ "version": "2.0",
+ "logging": {
+ "fileLoggingMode": "always",
+ "logLevel": {
+ "default": "Trace",
+ "Host.Results": "Trace",
+ "Function": "Trace",
+ "Host.Aggregator": "Trace"
+ },
+ "applicationInsights": {
+ "samplingSettings": {
+ "isEnabled": true,
+ "excludedTypes": "Request"
+ }
+ }
+ },
+ "extensionBundle": {
+ "id": "Microsoft.Azure.Functions.ExtensionBundle",
+ "version": "[2.*, 3.0.0)"
+ },
+ "functionTimeout": "00:02:00"
+}
\ No newline at end of file
diff --git a/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/local.settings.json b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/local.settings.json
new file mode 100644
index 00000000..64b69e85
--- /dev/null
+++ b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/local.settings.json
@@ -0,0 +1,11 @@
+{
+ "IsEncrypted": false,
+ "Values": {
+ "AzureWebJobsStorage": "DefaultEndpointsProtocol=https;EndpointSuffix=core.windows.net;AccountName=deployfss314573532;AccountKey=lhg1tG77MAF/JoyArx0PiBDwJG39tQJLqJsIxCEZBwNBkldvF5FAuUVPU5itHKrK6usSXdMREEch+AStp2Z+yQ==",
+ "FUNCTIONS_WORKER_RUNTIME": "python",
+ "BUS_TOPIC_CONNECTION_STRING": "",
+ "FUNCTIONS_EXTENSION_VERSION": "~4",
+ "APPINSIGHTS_INSTRUMENTATIONKEY": "02dde323-d0f0-47d3-a8b2-57d47e8150f6"
+ },
+ "ConnectionStrings": {}
+}
\ No newline at end of file
diff --git a/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/requirements.txt b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/requirements.txt
new file mode 100644
index 00000000..0ecdf22a
--- /dev/null
+++ b/deployment/azure-python-deploy-to-all-existing-storage/deployToAllExistingStorageAccounts/requirements.txt
@@ -0,0 +1,13 @@
+# Do not include azure-functions-worker as it may conflict with the Azure Functions platform
+azure-cli==2.37.0
+azure-functions==1.7.2
+azure-storage-blob==12.12.0
+azure-identity==1.10.0 ## azure-cli satisfies this requirement
+azure-mgmt-resource==21.1.0b1 ## azure-cli satisfies this requirement
+azure-mgmt-subscription==3.0.0
+azure-mgmt-storage==20.0.0
+azure-keyvault-secrets==4.4.0
+urllib3==1.26.9
+argparse==1.4.0
+uuid7==0.1.0
+munch==2.5.0
\ No newline at end of file