diff --git a/.github/boring-cyborg.yml b/.github/boring-cyborg.yml index 6c79a082a0aab0..7a5554511fede7 100644 --- a/.github/boring-cyborg.yml +++ b/.github/boring-cyborg.yml @@ -25,10 +25,7 @@ labelPRBasedOnFilePath: - providers/alibaba/** provider:amazon: - - providers/src/airflow/providers/amazon/**/* - - providers/tests/amazon/aws/**/* - - docs/apache-airflow-providers-amazon/**/* - - providers/tests/system/amazon/aws/**/* + - providers/amazon/** provider:apache-beam: - providers/apache/beam/** diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4282071b3c5229..8aeaf49d7990db 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -625,8 +625,8 @@ repos: ^airflow/operators/__init__.py$| ^providers/common/sql/tests/provider_tests/common/sql/operators/test_sql_execute.py$| ^providers/fab/src/airflow/providers/fab/www/static/css/bootstrap-theme.css$| - ^providers/src/airflow/providers/amazon/aws/hooks/emr.py$| - ^providers/src/airflow/providers/amazon/aws/operators/emr.py$| + ^providers/amazon/src/airflow/providers/amazon/aws/hooks/emr.py$| + ^providers/amazon/src/airflow/providers/amazon/aws/operators/emr.py$| ^providers/apache/cassandra/src/airflow/providers/apache/cassandra/hooks/cassandra.py$| ^providers/apache/hive/src/airflow/providers/apache/hive/operators/hive_stats.py$| ^providers/apache/hive/src/airflow/providers/apache/hive/transfers/vertica_to_hive.py$| diff --git a/dev/breeze/tests/test_packages.py b/dev/breeze/tests/test_packages.py index c91e9485b3a210..74956e7e89da54 100644 --- a/dev/breeze/tests/test_packages.py +++ b/dev/breeze/tests/test_packages.py @@ -37,7 +37,6 @@ get_old_source_providers_package_path, get_pip_package_name, get_provider_info_dict, - get_provider_jinja_context, get_provider_requirements, get_removed_provider_ids, get_short_package_name, @@ -379,38 +378,5 @@ def test_get_provider_info_dict(): assert len(provider_info_dict["notifications"]) > 2 assert len(provider_info_dict["secrets-backends"]) > 1 assert len(provider_info_dict["logging"]) > 1 - assert len(provider_info_dict["additional-extras"]) > 3 assert len(provider_info_dict["config"].keys()) > 1 assert len(provider_info_dict["executors"]) > 0 - - -# TODO(potiuk) - remove when all providers are new-style -def test_old_provider_jinja_context(): - provider_info = get_provider_info_dict("amazon") - version = provider_info["versions"][0] - context = get_provider_jinja_context( - provider_id="amazon", current_release_version=version, version_suffix="rc1" - ) - expected = { - "PROVIDER_ID": "amazon", - "PACKAGE_PIP_NAME": "apache-airflow-providers-amazon", - "PACKAGE_DIST_PREFIX": "apache_airflow_providers_amazon", - "FULL_PACKAGE_NAME": "airflow.providers.amazon", - "RELEASE": version, - "RELEASE_NO_LEADING_ZEROS": version, - "VERSION_SUFFIX": ".rc1", - "PROVIDER_DESCRIPTION": "Amazon integration (including `Amazon Web Services (AWS) `__).\n", - "CHANGELOG_RELATIVE_PATH": "../../providers/src/airflow/providers/amazon", - "SUPPORTED_PYTHON_VERSIONS": ["3.9", "3.10", "3.11", "3.12"], - "PLUGINS": [], - "MIN_AIRFLOW_VERSION": "2.9.0", - "PROVIDER_REMOVED": False, - "PROVIDER_INFO": provider_info, - } - - for key, value in expected.items(): - assert context[key] == value - assert """"google" = [ - "apache-airflow-providers-google", -]""" in context["EXTRAS_REQUIREMENTS"] - assert len(context["PIP_REQUIREMENTS"]) > 10 diff --git a/dev/breeze/tests/test_pytest_args_for_test_types.py b/dev/breeze/tests/test_pytest_args_for_test_types.py index d13f70467d8ce8..e289a3ba965a25 100644 --- a/dev/breeze/tests/test_pytest_args_for_test_types.py +++ b/dev/breeze/tests/test_pytest_args_for_test_types.py @@ -88,7 +88,7 @@ def _all_new_providers() -> list[str]: ( GroupOfTests.PROVIDERS, "Providers[amazon]", - ["providers/tests/amazon"], + ["providers/amazon/tests"], ), ( GroupOfTests.PROVIDERS, @@ -99,7 +99,7 @@ def _all_new_providers() -> list[str]: GroupOfTests.PROVIDERS, "Providers[amazon,google,apache.hive]", [ - "providers/tests/amazon", + "providers/amazon/tests", "providers/google/tests", "providers/apache/hive/tests", ], @@ -114,7 +114,6 @@ def _all_new_providers() -> list[str]: if provider not in ["amazon", "google", "microsoft/azure"] ], "providers/tests", - "--ignore=providers/tests/amazon", ], ), ( @@ -245,14 +244,14 @@ def test_pytest_args_for_missing_provider(): GroupOfTests.PROVIDERS, "Providers[amazon]", [ - "providers/tests/amazon", + "providers/amazon/tests", ], ), ( GroupOfTests.PROVIDERS, "Providers[amazon] Providers[google]", [ - "providers/tests/amazon", + "providers/amazon/tests", "providers/google/tests", ], ), @@ -278,9 +277,7 @@ def test_pytest_args_for_missing_provider(): if provider not in ["amazon", "google"] ], "providers/tests", - *[ - "providers/google/tests" - ], # Once amazon is migrated to the new structure, amazon needs to be added to the list here. + *["providers/amazon/tests", "providers/google/tests"], ], ), ( diff --git a/dev/breeze/tests/test_selective_checks.py b/dev/breeze/tests/test_selective_checks.py index d52b9699fdbf5b..3eaaf96b7d3cff 100644 --- a/dev/breeze/tests/test_selective_checks.py +++ b/dev/breeze/tests/test_selective_checks.py @@ -717,7 +717,7 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ) ), pytest.param( - ("providers/src/airflow/providers/amazon/__init__.py",), + ("providers/amazon/src/airflow/providers/amazon/__init__.py",), { "selected-providers-list-as-string": "amazon apache.hive cncf.kubernetes " "common.compat common.sql exasol ftp google http imap microsoft.azure " @@ -769,7 +769,7 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): id="Providers tests run without amazon tests if no amazon file changed", ), pytest.param( - ("providers/src/airflow/providers/amazon/file.py",), + ("providers/amazon/src/airflow/providers/amazon/file.py",), { "selected-providers-list-as-string": "amazon apache.hive cncf.kubernetes " "common.compat common.sql exasol ftp google http imap microsoft.azure " diff --git a/docs/.gitignore b/docs/.gitignore index 85bd9936ad69b7..009cbfc28a9d51 100644 --- a/docs/.gitignore +++ b/docs/.gitignore @@ -2,6 +2,7 @@ # Eventually when we swtich to individually build docs for each provider, we should remove this altogether apache-airflow-providers-airbyte apache-airflow-providers-alibaba +apache-airflow-providers-amazon apache-airflow-providers-apache-beam apache-airflow-providers-apache-cassandra apache-airflow-providers-apache-drill diff --git a/docs/apache-airflow-providers-amazon/changelog.rst b/docs/apache-airflow-providers-amazon/changelog.rst deleted file mode 100644 index 8138c8db39b24c..00000000000000 --- a/docs/apache-airflow-providers-amazon/changelog.rst +++ /dev/null @@ -1,25 +0,0 @@ - - .. Licensed to the Apache Software Foundation (ASF) under one - or more contributor license agreements. See the NOTICE file - distributed with this work for additional information - regarding copyright ownership. The ASF licenses this file - to you under the Apache License, Version 2.0 (the - "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at - - .. http://www.apache.org/licenses/LICENSE-2.0 - - .. Unless required by applicable law or agreed to in writing, - software distributed under the License is distributed on an - "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - KIND, either express or implied. See the License for the - specific language governing permissions and limitations - under the License. - - .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE - OVERWRITTEN WHEN PREPARING PACKAGES. - - .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE - `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY - -.. include:: ../../providers/src/airflow/providers/amazon/CHANGELOG.rst diff --git a/providers/amazon/README.rst b/providers/amazon/README.rst new file mode 100644 index 00000000000000..6407f6df924e29 --- /dev/null +++ b/providers/amazon/README.rst @@ -0,0 +1,106 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN! + + .. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE + `PROVIDER_README_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY + + +Package ``apache-airflow-providers-amazon`` + +Release: ``9.2.0`` + + +Amazon integration (including `Amazon Web Services (AWS) `__). + + +Provider package +---------------- + +This is a provider package for ``amazon`` provider. All classes for this provider package +are in ``airflow.providers.amazon`` python package. + +You can find package information and changelog for the provider +in the `documentation `_. + +Installation +------------ + +You can install this package on top of an existing Airflow 2 installation (see ``Requirements`` below +for the minimum Airflow version supported) via +``pip install apache-airflow-providers-amazon`` + +The package supports the following python versions: 3.9,3.10,3.11,3.12 + +Requirements +------------ + +========================================== ====================== +PIP package Version required +========================================== ====================== +``apache-airflow`` ``>=2.9.0`` +``apache-airflow-providers-common-compat`` ``>=1.3.0`` +``apache-airflow-providers-common-sql`` ``>=1.20.0`` +``apache-airflow-providers-http`` +``boto3`` ``>=1.34.90`` +``botocore`` ``>=1.34.90`` +``inflection`` ``>=0.5.1`` +``watchtower`` ``>=3.0.0,!=3.3.0,<4`` +``jsonpath_ng`` ``>=1.5.3`` +``redshift_connector`` ``>=2.0.918`` +``asgiref`` ``>=2.3.0`` +``PyAthena`` ``>=3.0.10`` +``jmespath`` ``>=0.7.0`` +``python3-saml`` ``>=1.16.0`` +========================================== ====================== + +Cross provider package dependencies +----------------------------------- + +Those are dependencies that might be needed in order to use all the features of the package. +You need to install the specified provider packages in order to use them. + +You can install such cross-provider dependencies when installing from PyPI. For example: + +.. code-block:: bash + + pip install apache-airflow-providers-amazon[apache.hive] + + +====================================================================================================================== =================== +Dependent package Extra +====================================================================================================================== =================== +`apache-airflow-providers-apache-hive `_ ``apache.hive`` +`apache-airflow-providers-cncf-kubernetes `_ ``cncf.kubernetes`` +`apache-airflow-providers-common-compat `_ ``common.compat`` +`apache-airflow-providers-common-sql `_ ``common.sql`` +`apache-airflow-providers-exasol `_ ``exasol`` +`apache-airflow-providers-ftp `_ ``ftp`` +`apache-airflow-providers-google `_ ``google`` +`apache-airflow-providers-http `_ ``http`` +`apache-airflow-providers-imap `_ ``imap`` +`apache-airflow-providers-microsoft-azure `_ ``microsoft.azure`` +`apache-airflow-providers-mongo `_ ``mongo`` +`apache-airflow-providers-openlineage `_ ``openlineage`` +`apache-airflow-providers-salesforce `_ ``salesforce`` +`apache-airflow-providers-ssh `_ ``ssh`` +====================================================================================================================== =================== + +The changelog for the provider package can be found in the +`changelog `_. diff --git a/providers/src/airflow/providers/amazon/.latest-doc-only-change.txt b/providers/amazon/docs/.latest-doc-only-change.txt similarity index 100% rename from providers/src/airflow/providers/amazon/.latest-doc-only-change.txt rename to providers/amazon/docs/.latest-doc-only-change.txt diff --git a/docs/apache-airflow-providers-amazon/_partials/generic_parameters.rst b/providers/amazon/docs/_partials/generic_parameters.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/_partials/generic_parameters.rst rename to providers/amazon/docs/_partials/generic_parameters.rst diff --git a/docs/apache-airflow-providers-amazon/_partials/prerequisite_tasks.rst b/providers/amazon/docs/_partials/prerequisite_tasks.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/_partials/prerequisite_tasks.rst rename to providers/amazon/docs/_partials/prerequisite_tasks.rst diff --git a/docs/apache-airflow-providers-amazon/auth-manager/index.rst b/providers/amazon/docs/auth-manager/index.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/auth-manager/index.rst rename to providers/amazon/docs/auth-manager/index.rst diff --git a/docs/apache-airflow-providers-amazon/auth-manager/manage/index.rst b/providers/amazon/docs/auth-manager/manage/index.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/auth-manager/manage/index.rst rename to providers/amazon/docs/auth-manager/manage/index.rst diff --git a/docs/apache-airflow-providers-amazon/auth-manager/setup/amazon-verified-permissions.rst b/providers/amazon/docs/auth-manager/setup/amazon-verified-permissions.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/auth-manager/setup/amazon-verified-permissions.rst rename to providers/amazon/docs/auth-manager/setup/amazon-verified-permissions.rst diff --git a/docs/apache-airflow-providers-amazon/auth-manager/setup/config.rst b/providers/amazon/docs/auth-manager/setup/config.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/auth-manager/setup/config.rst rename to providers/amazon/docs/auth-manager/setup/config.rst diff --git a/docs/apache-airflow-providers-amazon/auth-manager/setup/identity-center.rst b/providers/amazon/docs/auth-manager/setup/identity-center.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/auth-manager/setup/identity-center.rst rename to providers/amazon/docs/auth-manager/setup/identity-center.rst diff --git a/providers/src/airflow/providers/amazon/aws/links/ec2.py b/providers/amazon/docs/aws/links/ec2.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/links/ec2.py rename to providers/amazon/docs/aws/links/ec2.py diff --git a/providers/src/airflow/providers/amazon/CHANGELOG.rst b/providers/amazon/docs/changelog.rst similarity index 100% rename from providers/src/airflow/providers/amazon/CHANGELOG.rst rename to providers/amazon/docs/changelog.rst diff --git a/docs/apache-airflow-providers-amazon/cli-ref.rst b/providers/amazon/docs/cli-ref.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/cli-ref.rst rename to providers/amazon/docs/cli-ref.rst diff --git a/docs/apache-airflow-providers-amazon/commits.rst b/providers/amazon/docs/commits.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/commits.rst rename to providers/amazon/docs/commits.rst diff --git a/docs/apache-airflow-providers-amazon/configurations-ref.rst b/providers/amazon/docs/configurations-ref.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/configurations-ref.rst rename to providers/amazon/docs/configurations-ref.rst diff --git a/docs/apache-airflow-providers-amazon/connections/athena.rst b/providers/amazon/docs/connections/athena.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/connections/athena.rst rename to providers/amazon/docs/connections/athena.rst diff --git a/docs/apache-airflow-providers-amazon/connections/aws.rst b/providers/amazon/docs/connections/aws.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/connections/aws.rst rename to providers/amazon/docs/connections/aws.rst diff --git a/docs/apache-airflow-providers-amazon/connections/chime.rst b/providers/amazon/docs/connections/chime.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/connections/chime.rst rename to providers/amazon/docs/connections/chime.rst diff --git a/docs/apache-airflow-providers-amazon/connections/emr.rst b/providers/amazon/docs/connections/emr.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/connections/emr.rst rename to providers/amazon/docs/connections/emr.rst diff --git a/docs/apache-airflow-providers-amazon/connections/index.rst b/providers/amazon/docs/connections/index.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/connections/index.rst rename to providers/amazon/docs/connections/index.rst diff --git a/docs/apache-airflow-providers-amazon/connections/redshift.rst b/providers/amazon/docs/connections/redshift.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/connections/redshift.rst rename to providers/amazon/docs/connections/redshift.rst diff --git a/docs/apache-airflow-providers-amazon/deferrable.rst b/providers/amazon/docs/deferrable.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/deferrable.rst rename to providers/amazon/docs/deferrable.rst diff --git a/docs/apache-airflow-providers-amazon/example-dags.rst b/providers/amazon/docs/example-dags.rst similarity index 94% rename from docs/apache-airflow-providers-amazon/example-dags.rst rename to providers/amazon/docs/example-dags.rst index 2762597cb4ba36..05f862102c0a3c 100644 --- a/docs/apache-airflow-providers-amazon/example-dags.rst +++ b/providers/amazon/docs/example-dags.rst @@ -20,4 +20,4 @@ Example DAGs You can learn how to use Amazon AWS integrations by analyzing the source code of the example DAGs: -* `Amazon AWS `__ +* `Amazon AWS `__ diff --git a/docs/apache-airflow-providers-amazon/executors/batch-executor.rst b/providers/amazon/docs/executors/batch-executor.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/executors/batch-executor.rst rename to providers/amazon/docs/executors/batch-executor.rst diff --git a/docs/apache-airflow-providers-amazon/executors/ecs-executor.rst b/providers/amazon/docs/executors/ecs-executor.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/executors/ecs-executor.rst rename to providers/amazon/docs/executors/ecs-executor.rst diff --git a/docs/apache-airflow-providers-amazon/executors/general.rst b/providers/amazon/docs/executors/general.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/executors/general.rst rename to providers/amazon/docs/executors/general.rst diff --git a/docs/apache-airflow-providers-amazon/executors/index.rst b/providers/amazon/docs/executors/index.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/executors/index.rst rename to providers/amazon/docs/executors/index.rst diff --git a/docs/apache-airflow-providers-amazon/img/aws-base-conn-airflow.png b/providers/amazon/docs/img/aws-base-conn-airflow.png similarity index 100% rename from docs/apache-airflow-providers-amazon/img/aws-base-conn-airflow.png rename to providers/amazon/docs/img/aws-base-conn-airflow.png diff --git a/docs/apache-airflow-providers-amazon/img/aws-secrets-manager-gcp.png b/providers/amazon/docs/img/aws-secrets-manager-gcp.png similarity index 100% rename from docs/apache-airflow-providers-amazon/img/aws-secrets-manager-gcp.png rename to providers/amazon/docs/img/aws-secrets-manager-gcp.png diff --git a/docs/apache-airflow-providers-amazon/img/aws-secrets-manager-json.png b/providers/amazon/docs/img/aws-secrets-manager-json.png similarity index 100% rename from docs/apache-airflow-providers-amazon/img/aws-secrets-manager-json.png rename to providers/amazon/docs/img/aws-secrets-manager-json.png diff --git a/docs/apache-airflow-providers-amazon/img/aws-secrets-manager-uri.png b/providers/amazon/docs/img/aws-secrets-manager-uri.png similarity index 100% rename from docs/apache-airflow-providers-amazon/img/aws-secrets-manager-uri.png rename to providers/amazon/docs/img/aws-secrets-manager-uri.png diff --git a/docs/apache-airflow-providers-amazon/img/aws-web-identity-federation-gcp.png b/providers/amazon/docs/img/aws-web-identity-federation-gcp.png similarity index 100% rename from docs/apache-airflow-providers-amazon/img/aws-web-identity-federation-gcp.png rename to providers/amazon/docs/img/aws-web-identity-federation-gcp.png diff --git a/docs/apache-airflow-providers-amazon/img/diagram_auth_manager_architecture.md5sum b/providers/amazon/docs/img/diagram_auth_manager_architecture.md5sum similarity index 100% rename from docs/apache-airflow-providers-amazon/img/diagram_auth_manager_architecture.md5sum rename to providers/amazon/docs/img/diagram_auth_manager_architecture.md5sum diff --git a/docs/apache-airflow-providers-amazon/img/diagram_auth_manager_architecture.png b/providers/amazon/docs/img/diagram_auth_manager_architecture.png similarity index 100% rename from docs/apache-airflow-providers-amazon/img/diagram_auth_manager_architecture.png rename to providers/amazon/docs/img/diagram_auth_manager_architecture.png diff --git a/docs/apache-airflow-providers-amazon/img/diagram_auth_manager_architecture.py b/providers/amazon/docs/img/diagram_auth_manager_architecture.py similarity index 100% rename from docs/apache-airflow-providers-amazon/img/diagram_auth_manager_architecture.py rename to providers/amazon/docs/img/diagram_auth_manager_architecture.py diff --git a/docs/apache-airflow-providers-amazon/img/icons/avp.png b/providers/amazon/docs/img/icons/avp.png similarity index 100% rename from docs/apache-airflow-providers-amazon/img/icons/avp.png rename to providers/amazon/docs/img/icons/avp.png diff --git a/docs/apache-airflow-providers-amazon/img/icons/idc.png b/providers/amazon/docs/img/icons/idc.png similarity index 100% rename from docs/apache-airflow-providers-amazon/img/icons/idc.png rename to providers/amazon/docs/img/icons/idc.png diff --git a/docs/apache-airflow-providers-amazon/index.rst b/providers/amazon/docs/index.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/index.rst rename to providers/amazon/docs/index.rst diff --git a/docs/apache-airflow-providers-amazon/installing-providers-from-sources.rst b/providers/amazon/docs/installing-providers-from-sources.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/installing-providers-from-sources.rst rename to providers/amazon/docs/installing-providers-from-sources.rst diff --git a/docs/integration-logos/aws/AWS-Batch_light-bg@4x.png b/providers/amazon/docs/integration-logos/AWS-Batch_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/AWS-Batch_light-bg@4x.png rename to providers/amazon/docs/integration-logos/AWS-Batch_light-bg@4x.png diff --git a/docs/integration-logos/aws/AWS-Cloud-alt_light-bg@4x.png b/providers/amazon/docs/integration-logos/AWS-Cloud-alt_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/AWS-Cloud-alt_light-bg@4x.png rename to providers/amazon/docs/integration-logos/AWS-Cloud-alt_light-bg@4x.png diff --git a/docs/integration-logos/aws/AWS-CloudFormation_light-bg@4x.png b/providers/amazon/docs/integration-logos/AWS-CloudFormation_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/AWS-CloudFormation_light-bg@4x.png rename to providers/amazon/docs/integration-logos/AWS-CloudFormation_light-bg@4x.png diff --git a/docs/integration-logos/aws/AWS-DataSync_light-bg@4x.png b/providers/amazon/docs/integration-logos/AWS-DataSync_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/AWS-DataSync_light-bg@4x.png rename to providers/amazon/docs/integration-logos/AWS-DataSync_light-bg@4x.png diff --git a/docs/integration-logos/aws/AWS-Database-Migration-Service_64@5x.png b/providers/amazon/docs/integration-logos/AWS-Database-Migration-Service_64@5x.png similarity index 100% rename from docs/integration-logos/aws/AWS-Database-Migration-Service_64@5x.png rename to providers/amazon/docs/integration-logos/AWS-Database-Migration-Service_64@5x.png diff --git a/docs/integration-logos/aws/AWS-Glue-DataBrew_64.png b/providers/amazon/docs/integration-logos/AWS-Glue-DataBrew_64.png similarity index 100% rename from docs/integration-logos/aws/AWS-Glue-DataBrew_64.png rename to providers/amazon/docs/integration-logos/AWS-Glue-DataBrew_64.png diff --git a/docs/integration-logos/aws/AWS-Glue_light-bg@4x.png b/providers/amazon/docs/integration-logos/AWS-Glue_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/AWS-Glue_light-bg@4x.png rename to providers/amazon/docs/integration-logos/AWS-Glue_light-bg@4x.png diff --git a/docs/integration-logos/aws/AWS-Lambda_light-bg@4x.png b/providers/amazon/docs/integration-logos/AWS-Lambda_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/AWS-Lambda_light-bg@4x.png rename to providers/amazon/docs/integration-logos/AWS-Lambda_light-bg@4x.png diff --git a/docs/integration-logos/aws/AWS-STS_light-bg@4x.png b/providers/amazon/docs/integration-logos/AWS-STS_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/AWS-STS_light-bg@4x.png rename to providers/amazon/docs/integration-logos/AWS-STS_light-bg@4x.png diff --git a/docs/integration-logos/aws/AWS-Secrets-Manager_light-bg@4x.png b/providers/amazon/docs/integration-logos/AWS-Secrets-Manager_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/AWS-Secrets-Manager_light-bg@4x.png rename to providers/amazon/docs/integration-logos/AWS-Secrets-Manager_light-bg@4x.png diff --git a/docs/integration-logos/aws/AWS-Step-Functions_light-bg@4x.png b/providers/amazon/docs/integration-logos/AWS-Step-Functions_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/AWS-Step-Functions_light-bg@4x.png rename to providers/amazon/docs/integration-logos/AWS-Step-Functions_light-bg@4x.png diff --git a/docs/integration-logos/aws/AWS-Systems-Manager_light-bg@4x.png b/providers/amazon/docs/integration-logos/AWS-Systems-Manager_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/AWS-Systems-Manager_light-bg@4x.png rename to providers/amazon/docs/integration-logos/AWS-Systems-Manager_light-bg@4x.png diff --git a/docs/integration-logos/aws/Amazon-Athena_light-bg@4x.png b/providers/amazon/docs/integration-logos/Amazon-Athena_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/Amazon-Athena_light-bg@4x.png rename to providers/amazon/docs/integration-logos/Amazon-Athena_light-bg@4x.png diff --git a/docs/integration-logos/aws/Amazon-Bedrock_light-bg@4x.png b/providers/amazon/docs/integration-logos/Amazon-Bedrock_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/Amazon-Bedrock_light-bg@4x.png rename to providers/amazon/docs/integration-logos/Amazon-Bedrock_light-bg@4x.png diff --git a/docs/integration-logos/aws/Amazon-Chime-light-bg.png b/providers/amazon/docs/integration-logos/Amazon-Chime-light-bg.png similarity index 100% rename from docs/integration-logos/aws/Amazon-Chime-light-bg.png rename to providers/amazon/docs/integration-logos/Amazon-Chime-light-bg.png diff --git a/docs/integration-logos/aws/Amazon-CloudWatch_light-bg@4x.png b/providers/amazon/docs/integration-logos/Amazon-CloudWatch_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/Amazon-CloudWatch_light-bg@4x.png rename to providers/amazon/docs/integration-logos/Amazon-CloudWatch_light-bg@4x.png diff --git a/docs/integration-logos/aws/Amazon-Comprehend_light-bg@4x.png b/providers/amazon/docs/integration-logos/Amazon-Comprehend_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/Amazon-Comprehend_light-bg@4x.png rename to providers/amazon/docs/integration-logos/Amazon-Comprehend_light-bg@4x.png diff --git a/docs/integration-logos/aws/Amazon-DynamoDB_light-bg@4x.png b/providers/amazon/docs/integration-logos/Amazon-DynamoDB_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/Amazon-DynamoDB_light-bg@4x.png rename to providers/amazon/docs/integration-logos/Amazon-DynamoDB_light-bg@4x.png diff --git a/docs/integration-logos/aws/Amazon-EC2_light-bg@4x.png b/providers/amazon/docs/integration-logos/Amazon-EC2_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/Amazon-EC2_light-bg@4x.png rename to providers/amazon/docs/integration-logos/Amazon-EC2_light-bg@4x.png diff --git a/docs/integration-logos/aws/Amazon-EMR_light-bg@4x.png b/providers/amazon/docs/integration-logos/Amazon-EMR_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/Amazon-EMR_light-bg@4x.png rename to providers/amazon/docs/integration-logos/Amazon-EMR_light-bg@4x.png diff --git a/docs/integration-logos/aws/Amazon-ElastiCache_light-bg@4x.png b/providers/amazon/docs/integration-logos/Amazon-ElastiCache_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/Amazon-ElastiCache_light-bg@4x.png rename to providers/amazon/docs/integration-logos/Amazon-ElastiCache_light-bg@4x.png diff --git a/docs/integration-logos/aws/Amazon-Elastic-Container-Registry_light-bg@4x.png b/providers/amazon/docs/integration-logos/Amazon-Elastic-Container-Registry_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/Amazon-Elastic-Container-Registry_light-bg@4x.png rename to providers/amazon/docs/integration-logos/Amazon-Elastic-Container-Registry_light-bg@4x.png diff --git a/docs/integration-logos/aws/Amazon-Elastic-Container-Service_light-bg@4x.png b/providers/amazon/docs/integration-logos/Amazon-Elastic-Container-Service_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/Amazon-Elastic-Container-Service_light-bg@4x.png rename to providers/amazon/docs/integration-logos/Amazon-Elastic-Container-Service_light-bg@4x.png diff --git a/docs/integration-logos/aws/Amazon-Elastic-Kubernetes-Service_light-bg@4x.png b/providers/amazon/docs/integration-logos/Amazon-Elastic-Kubernetes-Service_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/Amazon-Elastic-Kubernetes-Service_light-bg@4x.png rename to providers/amazon/docs/integration-logos/Amazon-Elastic-Kubernetes-Service_light-bg@4x.png diff --git a/docs/integration-logos/aws/Amazon-EventBridge_64.png b/providers/amazon/docs/integration-logos/Amazon-EventBridge_64.png similarity index 100% rename from docs/integration-logos/aws/Amazon-EventBridge_64.png rename to providers/amazon/docs/integration-logos/Amazon-EventBridge_64.png diff --git a/docs/integration-logos/aws/Amazon-Kinesis-Analytics_light-bg@4x.png b/providers/amazon/docs/integration-logos/Amazon-Kinesis-Analytics_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/Amazon-Kinesis-Analytics_light-bg@4x.png rename to providers/amazon/docs/integration-logos/Amazon-Kinesis-Analytics_light-bg@4x.png diff --git a/docs/integration-logos/aws/Amazon-Kinesis-Data-Firehose_light-bg@4x.png b/providers/amazon/docs/integration-logos/Amazon-Kinesis-Data-Firehose_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/Amazon-Kinesis-Data-Firehose_light-bg@4x.png rename to providers/amazon/docs/integration-logos/Amazon-Kinesis-Data-Firehose_light-bg@4x.png diff --git a/docs/integration-logos/aws/Amazon-Neptune_64.png b/providers/amazon/docs/integration-logos/Amazon-Neptune_64.png similarity index 100% rename from docs/integration-logos/aws/Amazon-Neptune_64.png rename to providers/amazon/docs/integration-logos/Amazon-Neptune_64.png diff --git a/docs/integration-logos/aws/Amazon-OpenSearch_light-bg@4x.png b/providers/amazon/docs/integration-logos/Amazon-OpenSearch_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/Amazon-OpenSearch_light-bg@4x.png rename to providers/amazon/docs/integration-logos/Amazon-OpenSearch_light-bg@4x.png diff --git a/docs/integration-logos/aws/Amazon-Quicksight_light-bg@4x.png b/providers/amazon/docs/integration-logos/Amazon-Quicksight_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/Amazon-Quicksight_light-bg@4x.png rename to providers/amazon/docs/integration-logos/Amazon-Quicksight_light-bg@4x.png diff --git a/docs/integration-logos/aws/Amazon-RDS_light-bg@4x.png b/providers/amazon/docs/integration-logos/Amazon-RDS_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/Amazon-RDS_light-bg@4x.png rename to providers/amazon/docs/integration-logos/Amazon-RDS_light-bg@4x.png diff --git a/docs/integration-logos/aws/Amazon-Redshift_light-bg@4x.png b/providers/amazon/docs/integration-logos/Amazon-Redshift_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/Amazon-Redshift_light-bg@4x.png rename to providers/amazon/docs/integration-logos/Amazon-Redshift_light-bg@4x.png diff --git a/docs/integration-logos/aws/Amazon-S3-Glacier_light-bg@4x.png b/providers/amazon/docs/integration-logos/Amazon-S3-Glacier_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/Amazon-S3-Glacier_light-bg@4x.png rename to providers/amazon/docs/integration-logos/Amazon-S3-Glacier_light-bg@4x.png diff --git a/docs/integration-logos/aws/Amazon-SageMaker_light-bg@4x.png b/providers/amazon/docs/integration-logos/Amazon-SageMaker_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/Amazon-SageMaker_light-bg@4x.png rename to providers/amazon/docs/integration-logos/Amazon-SageMaker_light-bg@4x.png diff --git a/docs/integration-logos/aws/Amazon-Simple-Email-Service-SES_light-bg@4x.png b/providers/amazon/docs/integration-logos/Amazon-Simple-Email-Service-SES_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/Amazon-Simple-Email-Service-SES_light-bg@4x.png rename to providers/amazon/docs/integration-logos/Amazon-Simple-Email-Service-SES_light-bg@4x.png diff --git a/docs/integration-logos/aws/Amazon-Simple-Notification-Service-SNS_light-bg@4x.png b/providers/amazon/docs/integration-logos/Amazon-Simple-Notification-Service-SNS_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/Amazon-Simple-Notification-Service-SNS_light-bg@4x.png rename to providers/amazon/docs/integration-logos/Amazon-Simple-Notification-Service-SNS_light-bg@4x.png diff --git a/docs/integration-logos/aws/Amazon-Simple-Queue-Service-SQS_light-bg@4x.png b/providers/amazon/docs/integration-logos/Amazon-Simple-Queue-Service-SQS_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/Amazon-Simple-Queue-Service-SQS_light-bg@4x.png rename to providers/amazon/docs/integration-logos/Amazon-Simple-Queue-Service-SQS_light-bg@4x.png diff --git a/docs/integration-logos/aws/Amazon-Simple-Storage-Service-S3_light-bg@4x.png b/providers/amazon/docs/integration-logos/Amazon-Simple-Storage-Service-S3_light-bg@4x.png similarity index 100% rename from docs/integration-logos/aws/Amazon-Simple-Storage-Service-S3_light-bg@4x.png rename to providers/amazon/docs/integration-logos/Amazon-Simple-Storage-Service-S3_light-bg@4x.png diff --git a/docs/integration-logos/aws/Amazon-Verified-Permissions.png b/providers/amazon/docs/integration-logos/Amazon-Verified-Permissions.png similarity index 100% rename from docs/integration-logos/aws/Amazon-Verified-Permissions.png rename to providers/amazon/docs/integration-logos/Amazon-Verified-Permissions.png diff --git a/docs/integration-logos/aws/Amazon_AppFlow_light.png b/providers/amazon/docs/integration-logos/Amazon_AppFlow_light.png similarity index 100% rename from docs/integration-logos/aws/Amazon_AppFlow_light.png rename to providers/amazon/docs/integration-logos/Amazon_AppFlow_light.png diff --git a/docs/apache-airflow-providers-amazon/logging/cloud-watch-task-handlers.rst b/providers/amazon/docs/logging/cloud-watch-task-handlers.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/logging/cloud-watch-task-handlers.rst rename to providers/amazon/docs/logging/cloud-watch-task-handlers.rst diff --git a/docs/apache-airflow-providers-amazon/logging/index.rst b/providers/amazon/docs/logging/index.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/logging/index.rst rename to providers/amazon/docs/logging/index.rst diff --git a/docs/apache-airflow-providers-amazon/logging/s3-task-handler.rst b/providers/amazon/docs/logging/s3-task-handler.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/logging/s3-task-handler.rst rename to providers/amazon/docs/logging/s3-task-handler.rst diff --git a/docs/apache-airflow-providers-amazon/notifications/chime_notifier_howto_guide.rst b/providers/amazon/docs/notifications/chime_notifier_howto_guide.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/notifications/chime_notifier_howto_guide.rst rename to providers/amazon/docs/notifications/chime_notifier_howto_guide.rst diff --git a/docs/apache-airflow-providers-amazon/notifications/index.rst b/providers/amazon/docs/notifications/index.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/notifications/index.rst rename to providers/amazon/docs/notifications/index.rst diff --git a/docs/apache-airflow-providers-amazon/notifications/sns.rst b/providers/amazon/docs/notifications/sns.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/notifications/sns.rst rename to providers/amazon/docs/notifications/sns.rst diff --git a/docs/apache-airflow-providers-amazon/notifications/sqs.rst b/providers/amazon/docs/notifications/sqs.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/notifications/sqs.rst rename to providers/amazon/docs/notifications/sqs.rst diff --git a/docs/apache-airflow-providers-amazon/operators/appflow.rst b/providers/amazon/docs/operators/appflow.rst similarity index 89% rename from docs/apache-airflow-providers-amazon/operators/appflow.rst rename to providers/amazon/docs/operators/appflow.rst index d54d79df75cbb0..08db8bcad30dd6 100644 --- a/docs/apache-airflow-providers-amazon/operators/appflow.rst +++ b/providers/amazon/docs/operators/appflow.rst @@ -49,7 +49,7 @@ Run Flow To run an AppFlow flow keeping as is, use: :class:`~airflow.providers.amazon.aws.operators.appflow.AppflowRunOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_appflow_run.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_appflow_run.py :language: python :dedent: 4 :start-after: [START howto_operator_appflow_run] @@ -66,7 +66,7 @@ Run Flow Full To run an AppFlow flow removing all filters, use: :class:`~airflow.providers.amazon.aws.operators.appflow.AppflowRunFullOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_appflow.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_appflow.py :language: python :dedent: 4 :start-after: [START howto_operator_appflow_run_full] @@ -83,7 +83,7 @@ Run Flow Daily To run an AppFlow flow filtering daily records, use: :class:`~airflow.providers.amazon.aws.operators.appflow.AppflowRunDailyOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_appflow.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_appflow.py :language: python :dedent: 4 :start-after: [START howto_operator_appflow_run_daily] @@ -100,7 +100,7 @@ Run Flow Before To run an AppFlow flow filtering future records and selecting the past ones, use: :class:`~airflow.providers.amazon.aws.operators.appflow.AppflowRunBeforeOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_appflow.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_appflow.py :language: python :dedent: 4 :start-after: [START howto_operator_appflow_run_before] @@ -117,7 +117,7 @@ Run Flow After To run an AppFlow flow filtering past records and selecting the future ones, use: :class:`~airflow.providers.amazon.aws.operators.appflow.AppflowRunAfterOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_appflow.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_appflow.py :language: python :dedent: 4 :start-after: [START howto_operator_appflow_run_after] @@ -134,7 +134,7 @@ Skipping Tasks For Empty Runs To skip tasks when some AppFlow run return zero records, use: :class:`~airflow.providers.amazon.aws.operators.appflow.AppflowRecordsShortCircuitOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_appflow.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_appflow.py :language: python :dedent: 4 :start-after: [START howto_operator_appflow_shortcircuit] diff --git a/docs/apache-airflow-providers-amazon/operators/athena/athena_boto.rst b/providers/amazon/docs/operators/athena/athena_boto.rst similarity index 91% rename from docs/apache-airflow-providers-amazon/operators/athena/athena_boto.rst rename to providers/amazon/docs/operators/athena/athena_boto.rst index 295da63f9170a3..36f9d3073c6a6f 100644 --- a/docs/apache-airflow-providers-amazon/operators/athena/athena_boto.rst +++ b/providers/amazon/docs/operators/athena/athena_boto.rst @@ -48,9 +48,9 @@ to run a query in Amazon Athena. In the following example, we query an existing Athena table and send the results to an existing Amazon S3 bucket. For more examples of how to use this operator, please -see the `Sample DAG `__. +see the `Sample DAG `__. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_athena.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_athena.py :language: python :start-after: [START howto_operator_athena] :dedent: 4 @@ -67,7 +67,7 @@ Wait on Amazon Athena query results Use the :class:`~airflow.providers.amazon.aws.sensors.athena.AthenaSensor` to wait for the results of a query in Amazon Athena. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_athena.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_athena.py :language: python :start-after: [START howto_sensor_athena] :dedent: 4 diff --git a/docs/apache-airflow-providers-amazon/operators/athena/athena_sql.rst b/providers/amazon/docs/operators/athena/athena_sql.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/operators/athena/athena_sql.rst rename to providers/amazon/docs/operators/athena/athena_sql.rst diff --git a/docs/apache-airflow-providers-amazon/operators/athena/index.rst b/providers/amazon/docs/operators/athena/index.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/operators/athena/index.rst rename to providers/amazon/docs/operators/athena/index.rst diff --git a/docs/apache-airflow-providers-amazon/operators/batch.rst b/providers/amazon/docs/operators/batch.rst similarity index 90% rename from docs/apache-airflow-providers-amazon/operators/batch.rst rename to providers/amazon/docs/operators/batch.rst index efb71fafe80430..504283ad1d1f27 100644 --- a/docs/apache-airflow-providers-amazon/operators/batch.rst +++ b/providers/amazon/docs/operators/batch.rst @@ -40,7 +40,7 @@ Submit a new AWS Batch job To submit a new AWS Batch job and monitor it until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.operators.batch.BatchOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_batch.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_batch.py :language: python :dedent: 4 :start-after: [START howto_operator_batch] @@ -54,7 +54,7 @@ Create an AWS Batch compute environment To create a new AWS Batch compute environment you can use :class:`~airflow.providers.amazon.aws.operators.batch.BatchCreateComputeEnvironmentOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_batch.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_batch.py :language: python :dedent: 4 :start-after: [START howto_operator_batch_create_compute_environment] @@ -71,7 +71,7 @@ Wait on an AWS Batch job state To wait on the state of an AWS Batch Job until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.batch.BatchSensor`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_batch.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_batch.py :language: python :dedent: 4 :start-after: [START howto_sensor_batch] @@ -94,7 +94,7 @@ Wait on an AWS Batch compute environment status To wait on the status of an AWS Batch compute environment until it reaches a terminal status you can use :class:`~airflow.providers.amazon.aws.sensors.batch.BatchComputeEnvironmentSensor`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_batch.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_batch.py :language: python :dedent: 4 :start-after: [START howto_sensor_batch_compute_environment] @@ -108,7 +108,7 @@ Wait on an AWS Batch job queue status To wait on the status of an AWS Batch job queue until it reaches a terminal status you can use :class:`~airflow.providers.amazon.aws.sensors.batch.BatchJobQueueSensor`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_batch.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_batch.py :language: python :dedent: 4 :start-after: [START howto_sensor_batch_job_queue] diff --git a/docs/apache-airflow-providers-amazon/operators/bedrock.rst b/providers/amazon/docs/operators/bedrock.rst similarity index 86% rename from docs/apache-airflow-providers-amazon/operators/bedrock.rst rename to providers/amazon/docs/operators/bedrock.rst index 6a2af21f2ff918..4054b333ea1d1e 100644 --- a/docs/apache-airflow-providers-amazon/operators/bedrock.rst +++ b/providers/amazon/docs/operators/bedrock.rst @@ -52,7 +52,7 @@ for details on the different formats, see For example, to invoke a Meta Llama model you would use: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_bedrock.py :language: python :dedent: 4 :start-after: [START howto_operator_invoke_llama_model] @@ -60,7 +60,7 @@ For example, to invoke a Meta Llama model you would use: To invoke an Amazon Titan model you would use: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_bedrock.py :language: python :dedent: 4 :start-after: [START howto_operator_invoke_titan_model] @@ -68,7 +68,7 @@ To invoke an Amazon Titan model you would use: To invoke a Claude V2 model using the Completions API you would use: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :dedent: 4 :start-after: [START howto_operator_invoke_claude_model] @@ -90,7 +90,7 @@ and the training/validation data size. To monitor the state of the job, you can or the :class:`~airflow.providers.amazon.aws.triggers.BedrockCustomizeModelCompletedTrigger` Trigger. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_bedrock.py :language: python :dedent: 4 :start-after: [START howto_operator_customize_model] @@ -112,7 +112,7 @@ or the :class:`~airflow.providers.amazon.aws.triggers.BedrockProvisionModelThrou Trigger. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_bedrock.py :language: python :dedent: 4 :start-after: [START howto_operator_provision_throughput] @@ -129,7 +129,7 @@ To create an Amazon Bedrock Knowledge Base, you can use For more information on which models support embedding data into a vector store, see https://docs.aws.amazon.com/bedrock/latest/userguide/knowledge-base-supported.html -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :dedent: 4 :start-after: [START howto_operator_bedrock_create_knowledge_base] @@ -142,7 +142,7 @@ Delete an Amazon Bedrock Knowledge Base Deleting a Knowledge Base is a simple boto API call and can be done in a TaskFlow task like the example below. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :start-after: [START howto_operator_bedrock_delete_knowledge_base] :end-before: [END howto_operator_bedrock_delete_knowledge_base] @@ -155,7 +155,7 @@ Create an Amazon Bedrock Data Source To create an Amazon Bedrock Data Source, you can use :class:`~airflow.providers.amazon.aws.operators.bedrock.BedrockCreateDataSourceOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :dedent: 4 :start-after: [START howto_operator_bedrock_create_data_source] @@ -168,7 +168,7 @@ Delete an Amazon Bedrock Data Source Deleting a Data Source is a simple boto API call and can be done in a TaskFlow task like the example below. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :start-after: [START howto_operator_bedrock_delete_data_source] :end-before: [END howto_operator_bedrock_delete_data_source] @@ -181,7 +181,7 @@ Ingest data into an Amazon Bedrock Data Source To add data from an Amazon S3 bucket into an Amazon Bedrock Data Source, you can use :class:`~airflow.providers.amazon.aws.operators.bedrock.BedrockIngestDataOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :dedent: 4 :start-after: [START howto_operator_bedrock_ingest_data] @@ -201,7 +201,7 @@ would like to pass the results through an LLM in order to generate a text respon For more information on which models support retrieving information from a knowledge base, see https://docs.aws.amazon.com/bedrock/latest/userguide/knowledge-base-supported.html -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :dedent: 4 :start-after: [START howto_operator_bedrock_retrieve] @@ -223,7 +223,7 @@ NOTE: Support for "external sources" was added in boto 1.34.90 Example using an Amazon Bedrock Knowledge Base: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :dedent: 4 :start-after: [START howto_operator_bedrock_knowledge_base_rag] @@ -231,7 +231,7 @@ Example using an Amazon Bedrock Knowledge Base: Example using a PDF file in an Amazon S3 Bucket: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :dedent: 4 :start-after: [START howto_operator_bedrock_external_sources_rag] @@ -249,7 +249,7 @@ Wait for an Amazon Bedrock customize model job To wait on the state of an Amazon Bedrock customize model job until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.bedrock.BedrockCustomizeModelCompletedSensor` -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_bedrock.py :language: python :dedent: 4 :start-after: [START howto_sensor_customize_model] @@ -264,7 +264,7 @@ To wait on the state of an Amazon Bedrock provision model throughput job until i terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.bedrock.BedrockProvisionModelThroughputCompletedSensor` -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_bedrock.py :language: python :dedent: 4 :start-after: [START howto_sensor_provision_throughput] @@ -278,7 +278,7 @@ Wait for an Amazon Bedrock Knowledge Base To wait on the state of an Amazon Bedrock Knowledge Base until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.bedrock.BedrockKnowledgeBaseActiveSensor` -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :dedent: 4 :start-after: [START howto_sensor_bedrock_knowledge_base_active] @@ -292,7 +292,7 @@ Wait for an Amazon Bedrock ingestion job to finish To wait on the state of an Amazon Bedrock data ingestion job until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.bedrock.BedrockIngestionJobSensor` -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :dedent: 4 :start-after: [START howto_sensor_bedrock_ingest_data] diff --git a/docs/apache-airflow-providers-amazon/operators/cloudformation.rst b/providers/amazon/docs/operators/cloudformation.rst similarity index 91% rename from docs/apache-airflow-providers-amazon/operators/cloudformation.rst rename to providers/amazon/docs/operators/cloudformation.rst index 40ab46bcf732ee..27af8304ec7039 100644 --- a/docs/apache-airflow-providers-amazon/operators/cloudformation.rst +++ b/providers/amazon/docs/operators/cloudformation.rst @@ -47,7 +47,7 @@ Create an AWS CloudFormation stack To create a new AWS CloudFormation stack use :class:`~airflow.providers.amazon.aws.operators.cloud_formation.CloudFormationCreateStackOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_cloudformation.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_cloudformation.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudformation_create_stack] @@ -61,7 +61,7 @@ Delete an AWS CloudFormation stack To delete an AWS CloudFormation stack you can use :class:`~airflow.providers.amazon.aws.operators.cloud_formation.CloudFormationDeleteStackOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_cloudformation.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_cloudformation.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudformation_delete_stack] @@ -78,7 +78,7 @@ Wait on an AWS CloudFormation stack creation state To wait on the state of an AWS CloudFormation stack creation until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.cloud_formation.CloudFormationCreateStackSensor` -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_cloudformation.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_cloudformation.py :language: python :dedent: 4 :start-after: [START howto_sensor_cloudformation_create_stack] @@ -92,7 +92,7 @@ Wait on an AWS CloudFormation stack deletion state To wait on the state of an AWS CloudFormation stack deletion until it reaches a terminal state you can use use :class:`~airflow.providers.amazon.aws.sensors.cloud_formation.CloudFormationDeleteStackSensor` -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_cloudformation.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_cloudformation.py :language: python :dedent: 4 :start-after: [START howto_sensor_cloudformation_delete_stack] diff --git a/docs/apache-airflow-providers-amazon/operators/comprehend.rst b/providers/amazon/docs/operators/comprehend.rst similarity index 90% rename from docs/apache-airflow-providers-amazon/operators/comprehend.rst rename to providers/amazon/docs/operators/comprehend.rst index c273643651bbd4..f00e4b3e26bdc6 100644 --- a/docs/apache-airflow-providers-amazon/operators/comprehend.rst +++ b/providers/amazon/docs/operators/comprehend.rst @@ -44,7 +44,7 @@ Create an Amazon Comprehend Start PII Entities Detection Job To create an Amazon Comprehend Start PII Entities Detection Job, you can use :class:`~airflow.providers.amazon.aws.operators.comprehend.ComprehendStartPiiEntitiesDetectionJobOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_comprehend.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_comprehend.py :language: python :dedent: 4 :start-after: [START howto_operator_start_pii_entities_detection_job] @@ -58,7 +58,7 @@ Create an Amazon Comprehend Document Classifier To create an Amazon Comprehend Document Classifier, you can use :class:`~airflow.providers.amazon.aws.operators.comprehend.ComprehendCreateDocumentClassifierOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_comprehend_document_classifier.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_comprehend_document_classifier.py :language: python :dedent: 4 :start-after: [START howto_operator_create_document_classifier] @@ -76,7 +76,7 @@ To wait on the state of an Amazon Comprehend Start PII Entities Detection Job un state you can use :class:`~airflow.providers.amazon.aws.sensors.comprehend.ComprehendStartPiiEntitiesDetectionJobCompletedSensor`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_comprehend.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_comprehend.py :language: python :dedent: 4 :start-after: [START howto_sensor_start_pii_entities_detection_job] @@ -91,7 +91,7 @@ To wait on the state of an Amazon Comprehend Document Classifier until it reache state you can use :class:`~airflow.providers.amazon.aws.sensors.comprehend.ComprehendCreateDocumentClassifierCompletedSensor`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_comprehend_document_classifier.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_comprehend_document_classifier.py :language: python :dedent: 4 :start-after: [START howto_sensor_create_document_classifier] diff --git a/docs/apache-airflow-providers-amazon/operators/datasync.rst b/providers/amazon/docs/operators/datasync.rst similarity index 94% rename from docs/apache-airflow-providers-amazon/operators/datasync.rst rename to providers/amazon/docs/operators/datasync.rst index 26db89022b5de3..e8b297f8956a35 100644 --- a/docs/apache-airflow-providers-amazon/operators/datasync.rst +++ b/providers/amazon/docs/operators/datasync.rst @@ -64,7 +64,7 @@ Execute a task To execute a specific task, you can pass the ``task_arn`` to the operator. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_datasync.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_datasync.py :language: python :dedent: 4 :start-after: [START howto_operator_datasync_specific_task] @@ -78,7 +78,7 @@ If one task is found, this one will be executed. If more than one task is found, the operator will raise an Exception. To avoid this, you can set ``allow_random_task_choice`` to ``True`` to randomly choose from candidate tasks. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_datasync.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_datasync.py :language: python :dedent: 4 :start-after: [START howto_operator_datasync_search_task] @@ -97,7 +97,7 @@ existing Task was found. If these are left to their default value (None) then no Also, because ``delete_task_after_execution`` is set to ``True``, the task will be deleted from AWS DataSync after it completes successfully. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_datasync.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_datasync.py :language: python :dedent: 4 :start-after: [START howto_operator_datasync_create_task] diff --git a/docs/apache-airflow-providers-amazon/operators/dms.rst b/providers/amazon/docs/operators/dms.rst similarity index 86% rename from docs/apache-airflow-providers-amazon/operators/dms.rst rename to providers/amazon/docs/operators/dms.rst index fc2a7da2dedcbe..f5d216353c64ec 100644 --- a/docs/apache-airflow-providers-amazon/operators/dms.rst +++ b/providers/amazon/docs/operators/dms.rst @@ -52,7 +52,7 @@ Create a replication task To create a replication task you can use :class:`~airflow.providers.amazon.aws.operators.dms.DmsCreateTaskOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dms.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_dms.py :language: python :dedent: 4 :start-after: [START howto_operator_dms_create_task] @@ -66,7 +66,7 @@ Start a replication task To start a replication task you can use :class:`~airflow.providers.amazon.aws.operators.dms.DmsStartTaskOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dms.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_dms.py :language: python :dedent: 4 :start-after: [START howto_operator_dms_start_task] @@ -80,7 +80,7 @@ Get details of replication tasks To retrieve the details for a list of replication tasks you can use :class:`~airflow.providers.amazon.aws.operators.dms.DmsDescribeTasksOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dms.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_dms.py :language: python :dedent: 4 :start-after: [START howto_operator_dms_describe_tasks] @@ -94,7 +94,7 @@ Stop a replication task To stop a replication task you can use :class:`~airflow.providers.amazon.aws.operators.dms.DmsStopTaskOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dms.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_dms.py :language: python :dedent: 4 :start-after: [START howto_operator_dms_stop_task] @@ -108,7 +108,7 @@ Delete a replication task To delete a replication task you can use :class:`~airflow.providers.amazon.aws.operators.dms.DmsDeleteTaskOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dms.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_dms.py :language: python :dedent: 4 :start-after: [START howto_operator_dms_delete_task] @@ -122,7 +122,7 @@ Create a serverless replication config To create a serverless replication config use :class:`~airflow.providers.amazon.aws.operators.dms.DmsCreateReplicationConfigOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dms_serverless.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_dms_serverless.py :language: python :dedent: 4 :start-after: [START howto_operator_dms_create_replication_config] @@ -136,7 +136,7 @@ Describe a serverless replication config To describe a serverless replication config use :class:`~airflow.providers.amazon.aws.operators.dms.DmsDescribeReplicationConfigsOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dms_serverless.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_dms_serverless.py :language: python :dedent: 4 :start-after: [START howto_operator_dms_describe_replication_config] @@ -150,7 +150,7 @@ Start a serverless replication To start a serverless replication use :class:`~airflow.providers.amazon.aws.operators.dms.DmsStartReplicationOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dms_serverless.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_dms_serverless.py :language: python :dedent: 4 :start-after: [START howto_operator_dms_serverless_start_replication] @@ -164,7 +164,7 @@ Stop a serverless replication To stop a serverless replication use :class:`~airflow.providers.amazon.aws.operators.dms.DmsStopReplicationOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dms_serverless.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_dms_serverless.py :language: python :dedent: 4 :start-after: [START howto_operator_dms_serverless_stop_replication] @@ -178,7 +178,7 @@ Get the status of a serverless replication To get the status of a serverless replication use :class:`~airflow.providers.amazon.aws.operators.dms.DmsDescribeReplicationsOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dms_serverless.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_dms_serverless.py :language: python :dedent: 4 :start-after: [START howto_operator_dms_serverless_describe_replication] @@ -192,7 +192,7 @@ Delete a serverless replication configuration To delete a serverless replication config use :class:`~airflow.providers.amazon.aws.operators.dms.DmsDeleteReplicationConfigOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dms_serverless.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_dms_serverless.py :language: python :dedent: 4 :start-after: [START howto_operator_dms_serverless_delete_replication_config] @@ -211,7 +211,7 @@ Wait for a replication task to complete To check the state of a replication task until it is completed, you can use :class:`~airflow.providers.amazon.aws.sensors.dms.DmsTaskCompletedSensor`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dms.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_dms.py :language: python :dedent: 4 :start-after: [START howto_sensor_dms_task_completed] diff --git a/docs/apache-airflow-providers-amazon/operators/dynamodb.rst b/providers/amazon/docs/operators/dynamodb.rst similarity index 93% rename from docs/apache-airflow-providers-amazon/operators/dynamodb.rst rename to providers/amazon/docs/operators/dynamodb.rst index aab6626109d0e5..8eb569ea87d511 100644 --- a/docs/apache-airflow-providers-amazon/operators/dynamodb.rst +++ b/providers/amazon/docs/operators/dynamodb.rst @@ -51,7 +51,7 @@ Wait for a Single Attribute Value Match: This example shows how to use ``DynamoDBValueSensor`` to wait for a specific attribute/value pair in a DynamoDB item. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dynamodb.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_dynamodb.py :language: python :start-after: [START howto_sensor_dynamodb_value] :dedent: 4 @@ -62,7 +62,7 @@ Wait for Any Value from a List of Attribute Values: In this example, the sensor waits for the DynamoDB item to have an attribute that matches any value from a provided list. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dynamodb.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_dynamodb.py :language: python :start-after: [START howto_sensor_dynamodb_any_value] :dedent: 4 diff --git a/docs/apache-airflow-providers-amazon/operators/ec2.rst b/providers/amazon/docs/operators/ec2.rst similarity index 88% rename from docs/apache-airflow-providers-amazon/operators/ec2.rst rename to providers/amazon/docs/operators/ec2.rst index 4cfeb17d246512..2d4e07b258b1b9 100644 --- a/docs/apache-airflow-providers-amazon/operators/ec2.rst +++ b/providers/amazon/docs/operators/ec2.rst @@ -38,7 +38,7 @@ Start an Amazon EC2 instance To start an Amazon EC2 instance you can use :class:`~airflow.providers.amazon.aws.operators.ec2.EC2StartInstanceOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ec2.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_ec2.py :language: python :dedent: 4 :start-after: [START howto_operator_ec2_start_instance] @@ -52,7 +52,7 @@ Stop an Amazon EC2 instance To stop an Amazon EC2 instance you can use :class:`~airflow.providers.amazon.aws.operators.ec2.EC2StopInstanceOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ec2.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_ec2.py :language: python :dedent: 4 :start-after: [START howto_operator_ec2_stop_instance] @@ -66,7 +66,7 @@ Create and start an Amazon EC2 instance To create and start an Amazon EC2 instance you can use :class:`~airflow.providers.amazon.aws.operators.ec2.EC2CreateInstanceOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ec2.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_ec2.py :language: python :dedent: 4 :start-after: [START howto_operator_ec2_create_instance] @@ -80,7 +80,7 @@ Terminate an Amazon EC2 instance To terminate an Amazon EC2 instance you can use :class:`~airflow.providers.amazon.aws.operators.ec2.EC2TerminateInstanceOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ec2.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_ec2.py :language: python :dedent: 4 :start-after: [START howto_operator_ec2_terminate_instance] @@ -94,7 +94,7 @@ Reboot an Amazon EC2 instance To reboot an Amazon EC2 instance you can use :class:`~airflow.providers.amazon.aws.operators.ec2.EC2RebootInstanceOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ec2.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_ec2.py :language: python :dedent: 4 :start-after: [START howto_operator_ec2_reboot_instance] @@ -108,7 +108,7 @@ Hibernate an Amazon EC2 instance To hibernate an Amazon EC2 instance you can use :class:`~airflow.providers.amazon.aws.operators.ec2.EC2HibernateInstanceOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ec2.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_ec2.py :language: python :dedent: 4 :start-after: [START howto_operator_ec2_hibernate_instance] @@ -125,7 +125,7 @@ Wait on an Amazon EC2 instance state To check the state of an Amazon EC2 instance and wait until it reaches the target state you can use :class:`~airflow.providers.amazon.aws.sensors.ec2.EC2InstanceStateSensor`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ec2.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_ec2.py :language: python :dedent: 4 :start-after: [START howto_sensor_ec2_instance_state] diff --git a/docs/apache-airflow-providers-amazon/operators/ecs.rst b/providers/amazon/docs/operators/ecs.rst similarity index 91% rename from docs/apache-airflow-providers-amazon/operators/ecs.rst rename to providers/amazon/docs/operators/ecs.rst index 8cb02d539e7dee..22a9ec51374b9d 100644 --- a/docs/apache-airflow-providers-amazon/operators/ecs.rst +++ b/providers/amazon/docs/operators/ecs.rst @@ -48,7 +48,7 @@ To create an Amazon ECS cluster you can use All optional parameters to be passed to the Create Cluster API should be passed in the 'create_cluster_kwargs' dict. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ecs.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_ecs.py :language: python :dedent: 4 :start-after: [START howto_operator_ecs_create_cluster] @@ -63,7 +63,7 @@ To delete an Amazon ECS cluster you can use :class:`~airflow.providers.amazon.aws.operators.ecs.EcsDeleteClusterOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ecs.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_ecs.py :language: python :dedent: 4 :start-after: [START howto_operator_ecs_delete_cluster] @@ -81,7 +81,7 @@ All optional parameters to be passed to the Register Task Definition API should passed in the 'register_task_kwargs' dict. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ecs.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_ecs.py :language: python :dedent: 4 :start-after: [START howto_operator_ecs_register_task_definition] @@ -96,7 +96,7 @@ To deregister a task definition you can use :class:`~airflow.providers.amazon.aws.operators.ecs.EcsDeregisterTaskDefinitionOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ecs.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_ecs.py :language: python :dedent: 4 :start-after: [START howto_operator_ecs_deregister_task_definition] @@ -125,14 +125,14 @@ The parameters you need to configure for this Operator will depend upon which `` * If you are using EC2 as the compute resources in your ECS Cluster, set the parameter to EC2. * If you have integrated external resources in your ECS Cluster, for example using ECS Anywhere, and want to run your containers on those external resources, set the parameter to EXTERNAL. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ecs.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_ecs.py :language: python :dedent: 4 :start-after: [START howto_operator_ecs_run_task] :end-before: [END howto_operator_ecs_run_task] -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ecs_fargate.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_ecs_fargate.py :language: python :dedent: 4 :start-after: [START howto_operator_ecs] @@ -145,7 +145,7 @@ To stream logs to AWS CloudWatch, you need to define the parameters below. Using the example above, we would add these additional parameters to enable logging to CloudWatch. You need to ensure that you have the appropriate level of permissions (see next section). -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ecs.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_ecs.py :language: python :dedent: 8 :start-after: [START howto_awslogs_ecs] @@ -228,7 +228,7 @@ the failure reason if a failed state is provided and that state is reached before the target state. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ecs.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_ecs.py :language: python :dedent: 4 :start-after: [START howto_sensor_ecs_cluster_state] @@ -248,7 +248,7 @@ to change that. Raises an AirflowException with the failure reason if the faile is reached before the target state. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ecs.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_ecs.py :language: python :dedent: 4 :start-after: [START howto_sensor_ecs_task_definition_state] @@ -267,7 +267,7 @@ both can be overridden with provided values. Raises an AirflowException with the failure reason if a failed state is provided and that state is reached before the target state. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ecs_fargate.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_ecs_fargate.py :language: python :dedent: 4 :start-after: [START howto_sensor_ecs_task_state] diff --git a/docs/apache-airflow-providers-amazon/operators/eks.rst b/providers/amazon/docs/operators/eks.rst similarity index 87% rename from docs/apache-airflow-providers-amazon/operators/eks.rst rename to providers/amazon/docs/operators/eks.rst index aa774b90959579..28940bba2089f6 100644 --- a/docs/apache-airflow-providers-amazon/operators/eks.rst +++ b/providers/amazon/docs/operators/eks.rst @@ -46,7 +46,7 @@ Note: An AWS IAM role with the following permissions is required: ``eks.amazonaws.com`` must be added to the Trusted Relationships ``AmazonEKSClusterPolicy`` IAM Policy must be attached -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_nodegroups.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_eks_with_nodegroups.py :language: python :dedent: 4 :start-after: [START howto_operator_eks_create_cluster] @@ -65,7 +65,7 @@ Note: An AWS IAM role with the following permissions is required: ``AmazonEKSClusterPolicy`` IAM Policy must be attached ``AmazonEKSWorkerNodePolicy`` IAM Policy must be attached -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_nodegroup_in_one_step.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_eks_with_nodegroup_in_one_step.py :language: python :dedent: 4 :start-after: [START howto_operator_eks_create_cluster_with_nodegroup] @@ -85,7 +85,7 @@ Note: An AWS IAM role with the following permissions is required: ``AmazonEKSClusterPolicy`` IAM Policy must be attached ``AmazonEKSWorkerNodePolicy`` IAM Policy must be attached -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_fargate_in_one_step.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_eks_with_fargate_in_one_step.py :language: python :dedent: 4 :start-after: [START howto_operator_eks_create_cluster_with_fargate_profile] @@ -100,7 +100,7 @@ To delete an existing Amazon EKS Cluster you can use :class:`~airflow.providers.amazon.aws.operators.eks.EksDeleteClusterOperator`. You can also run this operator in deferrable mode by setting ``deferrable`` param to ``True``. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_nodegroups.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_eks_with_nodegroups.py :language: python :dedent: 4 :start-after: [START howto_operator_eks_delete_cluster] @@ -110,7 +110,7 @@ Note: If the cluster has any attached resources, such as an Amazon EKS Nodegroup Fargate profile, the cluster can not be deleted. Using the ``force`` parameter will attempt to delete any attached resources first. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_nodegroup_in_one_step.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_eks_with_nodegroup_in_one_step.py :language: python :dedent: 4 :start-after: [START howto_operator_eks_force_delete_cluster] @@ -130,7 +130,7 @@ Note: An AWS IAM role with the following permissions is required: ``AmazonEC2ContainerRegistryReadOnly`` IAM Policy must be attached ``AmazonEKSWorkerNodePolicy`` IAM Policy must be attached -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_nodegroups.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_eks_with_nodegroups.py :language: python :dedent: 4 :start-after: [START howto_operator_eks_create_nodegroup] @@ -145,7 +145,7 @@ To delete an existing Amazon EKS managed node group you can use :class:`~airflow.providers.amazon.aws.operators.eks.EksDeleteNodegroupOperator`. You can also run this operator in deferrable mode by setting ``deferrable`` param to ``True``. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_nodegroups.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_eks_with_nodegroups.py :language: python :dedent: 4 :start-after: [START howto_operator_eks_delete_nodegroup] @@ -164,7 +164,7 @@ Note: An AWS IAM role with the following permissions is required: ``AmazonEC2ContainerRegistryReadOnly`` IAM Policy must be attached ``AmazonEKSWorkerNodePolicy`` IAM Policy must be attached -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_fargate_profile.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_eks_with_fargate_profile.py :language: python :dedent: 4 :start-after: [START howto_operator_eks_create_fargate_profile] @@ -178,7 +178,7 @@ Delete an AWS Fargate Profile To delete an existing AWS Fargate Profile you can use :class:`~airflow.providers.amazon.aws.operators.eks.EksDeleteFargateProfileOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_fargate_profile.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_eks_with_fargate_profile.py :language: python :dedent: 4 :start-after: [START howto_operator_eks_delete_fargate_profile] @@ -194,7 +194,7 @@ To run a pod on an existing Amazon EKS Cluster, you can use Note: An Amazon EKS Cluster with underlying compute infrastructure is required. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_nodegroups.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_eks_with_nodegroups.py :language: python :dedent: 4 :start-after: [START howto_operator_eks_pod_operator] @@ -211,7 +211,7 @@ Wait on an Amazon EKS cluster state To check the state of an Amazon EKS Cluster until it reaches the target state or another terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.eks.EksClusterStateSensor`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_nodegroups.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_eks_with_nodegroups.py :language: python :dedent: 4 :start-after: [START howto_sensor_eks_cluster] @@ -225,7 +225,7 @@ Wait on an Amazon EKS managed node group state To check the state of an Amazon EKS managed node group until it reaches the target state or another terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.eks.EksNodegroupStateSensor`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_nodegroups.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_eks_with_nodegroups.py :language: python :dedent: 4 :start-after: [START howto_sensor_eks_nodegroup] @@ -239,7 +239,7 @@ Wait on an AWS Fargate profile state To check the state of an AWS Fargate profile until it reaches the target state or another terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.eks.EksFargateProfileSensor`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_fargate_profile.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_eks_with_fargate_profile.py :language: python :dedent: 4 :start-after: [START howto_sensor_eks_fargate] diff --git a/docs/apache-airflow-providers-amazon/operators/emr/emr.rst b/providers/amazon/docs/operators/emr/emr.rst similarity index 91% rename from docs/apache-airflow-providers-amazon/operators/emr/emr.rst rename to providers/amazon/docs/operators/emr/emr.rst index a92837eac35266..0ace939fad867f 100644 --- a/docs/apache-airflow-providers-amazon/operators/emr/emr.rst +++ b/providers/amazon/docs/operators/emr/emr.rst @@ -66,7 +66,7 @@ JobFlow configuration To create a job flow on EMR, you need to specify the configuration for the EMR cluster: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_emr.py :language: python :start-after: [START howto_operator_emr_steps_config] :end-before: [END howto_operator_emr_steps_config] @@ -89,7 +89,7 @@ Create the Job Flow In the following code we are creating a new job flow using the configuration as explained above. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_emr.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_create_job_flow] @@ -107,7 +107,7 @@ Using ``deferrable`` mode will release worker slots and leads to efficient utili resources within Airflow cluster.However this mode will need the Airflow triggerer to be available in your deployment. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_emr.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_add_steps] @@ -125,7 +125,7 @@ Using ``deferrable`` mode will release worker slots and leads to efficient utili resources within Airflow cluster.However this mode will need the Airflow triggerer to be available in your deployment. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_emr.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_terminate_job_flow] @@ -139,7 +139,7 @@ Modify Amazon EMR container To modify an existing EMR container you can use :class:`~airflow.providers.amazon.aws.sensors.emr.EmrContainerSensor`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_emr.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_modify_cluster] @@ -153,7 +153,7 @@ Start an EMR notebook execution You can use :class:`~airflow.providers.amazon.aws.operators.emr.EmrStartNotebookExecutionOperator` to start a notebook execution on an existing notebook attached to a running cluster. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_notebook_execution.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_emr_notebook_execution.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_start_notebook_execution] @@ -167,7 +167,7 @@ Stop an EMR notebook execution You can use :class:`~airflow.providers.amazon.aws.operators.emr.EmrStopNotebookExecutionOperator` to stop a running notebook execution. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_notebook_execution.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_emr_notebook_execution.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_stop_notebook_execution] @@ -184,7 +184,7 @@ Wait on an EMR notebook execution state To monitor the state of an EMR notebook execution you can use :class:`~airflow.providers.amazon.aws.sensors.emr.EmrNotebookExecutionSensor`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_notebook_execution.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_emr_notebook_execution.py :language: python :dedent: 4 :start-after: [START howto_sensor_emr_notebook_execution] @@ -198,7 +198,7 @@ Wait on an Amazon EMR job flow state To monitor the state of an EMR job flow you can use :class:`~airflow.providers.amazon.aws.sensors.emr.EmrJobFlowSensor`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_emr.py :language: python :dedent: 4 :start-after: [START howto_sensor_emr_job_flow] @@ -212,7 +212,7 @@ Wait on an Amazon EMR step state To monitor the state of an EMR job step you can use :class:`~airflow.providers.amazon.aws.sensors.emr.EmrStepSensor`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_emr.py :language: python :dedent: 4 :start-after: [START howto_sensor_emr_step] diff --git a/docs/apache-airflow-providers-amazon/operators/emr/emr_eks.rst b/providers/amazon/docs/operators/emr/emr_eks.rst similarity index 93% rename from docs/apache-airflow-providers-amazon/operators/emr/emr_eks.rst rename to providers/amazon/docs/operators/emr/emr_eks.rst index 122b22fa048411..eaaf906a9af622 100644 --- a/docs/apache-airflow-providers-amazon/operators/emr/emr_eks.rst +++ b/providers/amazon/docs/operators/emr/emr_eks.rst @@ -48,7 +48,7 @@ the eks cluster that you would like to use , and an eks namespace. Refer to the `EMR on EKS Development guide `__ for more details. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_eks.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_emr_eks.py :language: python :start-after: [START howto_operator_emr_eks_create_cluster] :end-before: [END howto_operator_emr_eks_create_cluster] @@ -81,7 +81,7 @@ and ``monitoringConfiguration`` to send logs to the ``/aws/emr-eks-spark`` log g Refer to the `EMR on EKS guide `__ for more details on job configuration. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_eks.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_emr_eks.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_eks_config] @@ -92,7 +92,7 @@ can store them in a connection or provide them in the DAG. Your AWS region shoul in the ``aws_default`` connection as ``{"region_name": "us-east-1"}`` or a custom connection name that gets passed to the operator with the ``aws_conn_id`` parameter. The operator returns the Job ID of the job run. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_eks.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_emr_eks.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_container] @@ -110,7 +110,7 @@ Wait on an Amazon EMR virtual cluster job To wait on the status of an Amazon EMR virtual cluster job to reach a terminal state, you can use :class:`~airflow.providers.amazon.aws.sensors.emr.EmrContainerSensor` -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_eks.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_emr_eks.py :language: python :dedent: 4 :start-after: [START howto_sensor_emr_container] diff --git a/docs/apache-airflow-providers-amazon/operators/emr/emr_serverless.rst b/providers/amazon/docs/operators/emr/emr_serverless.rst similarity index 91% rename from docs/apache-airflow-providers-amazon/operators/emr/emr_serverless.rst rename to providers/amazon/docs/operators/emr/emr_serverless.rst index 28c7bb4720a09b..3c339231cfbd4b 100644 --- a/docs/apache-airflow-providers-amazon/operators/emr/emr_serverless.rst +++ b/providers/amazon/docs/operators/emr/emr_serverless.rst @@ -43,7 +43,7 @@ create a new EMR Serverless Application. This operator can be run in deferrable mode by passing ``deferrable=True`` as a parameter. This requires the aiobotocore module to be installed. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_serverless.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_emr_serverless.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_serverless_create_application] @@ -59,7 +59,7 @@ start an EMR Serverless Job. This operator can be run in deferrable mode by passing ``deferrable=True`` as a parameter. This requires the aiobotocore module to be installed. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_serverless.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_emr_serverless.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_serverless_start_job] @@ -93,7 +93,7 @@ stop an EMR Serverless Application. This operator can be run in deferrable mode by passing ``deferrable=True`` as a parameter. This requires the aiobotocore module to be installed. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_serverless.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_emr_serverless.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_serverless_stop_application] @@ -109,7 +109,7 @@ delete an EMR Serverless Application. This operator can be run in deferrable mode by passing ``deferrable=True`` as a parameter. This requires the aiobotocore module to be installed. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_serverless.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_emr_serverless.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_serverless_delete_application] @@ -126,7 +126,7 @@ Wait on an EMR Serverless Job state To monitor the state of an EMR Serverless Job you can use :class:`~airflow.providers.amazon.aws.sensors.emr.EmrServerlessJobSensor`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_serverless.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_emr_serverless.py :language: python :dedent: 4 :start-after: [START howto_sensor_emr_serverless_job] @@ -140,7 +140,7 @@ Wait on an EMR Serverless Application state To monitor the state of an EMR Serverless Application you can use :class:`~airflow.providers.amazon.aws.sensors.emr.EmrServerlessApplicationSensor`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_serverless.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_emr_serverless.py :language: python :dedent: 4 :start-after: [START howto_sensor_emr_serverless_application] diff --git a/docs/apache-airflow-providers-amazon/operators/emr/index.rst b/providers/amazon/docs/operators/emr/index.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/operators/emr/index.rst rename to providers/amazon/docs/operators/emr/index.rst diff --git a/docs/apache-airflow-providers-amazon/operators/eventbridge.rst b/providers/amazon/docs/operators/eventbridge.rst similarity index 90% rename from docs/apache-airflow-providers-amazon/operators/eventbridge.rst rename to providers/amazon/docs/operators/eventbridge.rst index a39c8841492556..4df3a1ebb557a4 100644 --- a/docs/apache-airflow-providers-amazon/operators/eventbridge.rst +++ b/providers/amazon/docs/operators/eventbridge.rst @@ -48,7 +48,7 @@ Send events to Amazon EventBridge To send custom events to Amazon EventBridge, use :class:`~airflow.providers.amazon.aws.operators.eventbridge.EventBridgePutEventsOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eventbridge.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_eventbridge.py :language: python :dedent: 4 :start-after: [START howto_operator_eventbridge_put_events] @@ -63,7 +63,7 @@ Create or update a rule on Amazon EventBridge To create or update a rule on EventBridge, use :class:`~airflow.providers.amazon.aws.operators.eventbridge.EventBridgePutRuleOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eventbridge.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_eventbridge.py :language: python :dedent: 4 :start-after: [START howto_operator_eventbridge_put_rule] @@ -78,7 +78,7 @@ Enable a rule on Amazon EventBridge To enable an existing rule on EventBridge, use :class:`~airflow.providers.amazon.aws.operators.eventbridge.EventBridgeEnableRuleOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eventbridge.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_eventbridge.py :language: python :dedent: 4 :start-after: [START howto_operator_eventbridge_enable_rule] @@ -93,7 +93,7 @@ Disable a rule on Amazon EventBridge To disable an existing rule on EventBridge, use :class:`~airflow.providers.amazon.aws.operators.eventbridge.EventBridgeDisableRuleOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eventbridge.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_eventbridge.py :language: python :dedent: 4 :start-after: [START howto_operator_eventbridge_disable_rule] diff --git a/docs/apache-airflow-providers-amazon/operators/glue.rst b/providers/amazon/docs/operators/glue.rst similarity index 88% rename from docs/apache-airflow-providers-amazon/operators/glue.rst rename to providers/amazon/docs/operators/glue.rst index 88b4d3374dbf3c..cd12541f115d16 100644 --- a/docs/apache-airflow-providers-amazon/operators/glue.rst +++ b/providers/amazon/docs/operators/glue.rst @@ -46,7 +46,7 @@ AWS Glue Crawlers allow you to easily extract data from various data sources. To create a new AWS Glue Crawler or run an existing one you can use :class:`~airflow.providers.amazon.aws.operators.glue_crawler.GlueCrawlerOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_glue.py :language: python :dedent: 4 :start-after: [START howto_operator_glue_crawler] @@ -64,7 +64,7 @@ Submit an AWS Glue job To submit a new AWS Glue job you can use :class:`~airflow.providers.amazon.aws.operators.glue.GlueJobOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_glue.py :language: python :dedent: 4 :start-after: [START howto_operator_glue] @@ -84,7 +84,7 @@ of your data so that you can make good business decisions. To create a new AWS Glue Data Quality ruleset or update an existing one you can use :class:`~airflow.providers.amazon.aws.operators.glue.GlueDataQualityOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue_data_quality.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_glue_data_quality.py :language: python :dedent: 4 :start-after: [START howto_operator_glue_data_quality_operator] @@ -98,7 +98,7 @@ Start a AWS Glue Data Quality Evaluation Run To start a AWS Glue Data Quality ruleset evaluation run you can use :class:`~airflow.providers.amazon.aws.operators.glue.GlueDataQualityRuleSetEvaluationRunOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue_data_quality.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_glue_data_quality.py :language: python :dedent: 4 :start-after: [START howto_operator_glue_data_quality_ruleset_evaluation_run_operator] @@ -112,7 +112,7 @@ Start a AWS Glue Data Quality Recommendation Run To start a AWS Glue Data Quality rule recommendation run you can use :class:`~airflow.providers.amazon.aws.operators.glue.GlueDataQualityRuleRecommendationRunOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue_data_quality_with_recommendation.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_glue_data_quality_with_recommendation.py :language: python :dedent: 4 :start-after: [START howto_operator_glue_data_quality_rule_recommendation_run] @@ -129,7 +129,7 @@ Wait on an AWS Glue crawler state To wait on the state of an AWS Glue crawler execution until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.glue_crawler.GlueCrawlerSensor`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_glue.py :language: python :dedent: 4 :start-after: [START howto_sensor_glue_crawler] @@ -143,7 +143,7 @@ Wait on an AWS Glue job state To wait on the state of an AWS Glue Job until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.glue.GlueJobSensor` -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_glue.py :language: python :dedent: 4 :start-after: [START howto_sensor_glue] @@ -157,7 +157,7 @@ Wait on an AWS Glue Data Quality Evaluation Run To wait on the state of an AWS Glue Data Quality RuleSet Evaluation Run until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.glue.GlueDataQualityRuleSetEvaluationRunSensor` -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue_data_quality.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_glue_data_quality.py :language: python :dedent: 4 :start-after: [START howto_sensor_glue_data_quality_ruleset_evaluation_run] @@ -171,7 +171,7 @@ Wait on an AWS Glue Data Quality Recommendation Run To wait on the state of an AWS Glue Data Quality recommendation run until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.glue.GlueDataQualityRuleRecommendationRunSensor` -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue_data_quality_with_recommendation.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_glue_data_quality_with_recommendation.py :language: python :dedent: 4 :start-after: [START howto_sensor_glue_data_quality_rule_recommendation_run] @@ -185,7 +185,7 @@ Wait on an AWS Glue Catalog Partition To wait for a partition to show up in AWS Glue Catalog until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.glue_catalog_partition.GlueCatalogPartitionSensor` -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_glue.py :language: python :dedent: 4 :start-after: [START howto_sensor_glue_catalog_partition] diff --git a/docs/apache-airflow-providers-amazon/operators/glue_databrew.rst b/providers/amazon/docs/operators/glue_databrew.rst similarity index 96% rename from docs/apache-airflow-providers-amazon/operators/glue_databrew.rst rename to providers/amazon/docs/operators/glue_databrew.rst index 2286a5146a59dd..054d98d1d8e4bc 100644 --- a/docs/apache-airflow-providers-amazon/operators/glue_databrew.rst +++ b/providers/amazon/docs/operators/glue_databrew.rst @@ -46,7 +46,7 @@ Start an AWS Glue DataBrew job To submit a new AWS Glue DataBrew job you can use :class:`~airflow.providers.amazon.aws.operators.glue_databrew.GlueDataBrewStartJobOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue_databrew.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_glue_databrew.py :language: python :dedent: 4 :start-after: [START howto_operator_glue_databrew_start] diff --git a/docs/apache-airflow-providers-amazon/operators/index.rst b/providers/amazon/docs/operators/index.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/operators/index.rst rename to providers/amazon/docs/operators/index.rst diff --git a/docs/apache-airflow-providers-amazon/operators/kinesis_analytics.rst b/providers/amazon/docs/operators/kinesis_analytics.rst similarity index 90% rename from docs/apache-airflow-providers-amazon/operators/kinesis_analytics.rst rename to providers/amazon/docs/operators/kinesis_analytics.rst index dc351fc1c3401f..9dd55ede1c347e 100644 --- a/docs/apache-airflow-providers-amazon/operators/kinesis_analytics.rst +++ b/providers/amazon/docs/operators/kinesis_analytics.rst @@ -44,7 +44,7 @@ Create an Amazon Managed Service for Apache Flink Application To create an Amazon Managed Service for Apache Flink application, you can use :class:`~airflow.providers.amazon.aws.operators.kinesis_analytics.KinesisAnalyticsV2CreateApplicationOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_kinesis_analytics.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_kinesis_analytics.py :language: python :dedent: 4 :start-after: [START howto_operator_create_application] @@ -58,7 +58,7 @@ Start an Amazon Managed Service for Apache Flink Application To start an Amazon Managed Service for Apache Flink application, you can use :class:`~airflow.providers.amazon.aws.operators.kinesis_analytics.KinesisAnalyticsV2StartApplicationOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_kinesis_analytics.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_kinesis_analytics.py :language: python :dedent: 4 :start-after: [START howto_operator_start_application] @@ -72,7 +72,7 @@ Stop an Amazon Managed Service for Apache Flink Application To stop an Amazon Managed Service for Apache Flink application, you can use :class:`~airflow.providers.amazon.aws.operators.kinesis_analytics.KinesisAnalyticsV2StopApplicationOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_kinesis_analytics.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_kinesis_analytics.py :language: python :dedent: 4 :start-after: [START howto_operator_stop_application] @@ -89,7 +89,7 @@ Wait for an Amazon Managed Service for Apache Flink Application to start To wait on the state of an Amazon Managed Service for Apache Flink Application to start you can use :class:`~airflow.providers.amazon.aws.sensors.kinesis_analytics.KinesisAnalyticsV2StartApplicationCompletedSensor`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_kinesis_analytics.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_kinesis_analytics.py :language: python :dedent: 4 :start-after: [START howto_sensor_start_application] @@ -103,7 +103,7 @@ Wait for an Amazon Managed Service for Apache Flink Application to stop To wait on the state of an Amazon Managed Service for Apache Flink Application to stop you can use :class:`~airflow.providers.amazon.aws.sensors.kinesis_analytics.KinesisAnalyticsV2StopApplicationCompletedSensor`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_kinesis_analytics.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_kinesis_analytics.py :language: python :dedent: 4 :start-after: [START howto_sensor_stop_application] diff --git a/docs/apache-airflow-providers-amazon/operators/lambda.rst b/providers/amazon/docs/operators/lambda.rst similarity index 95% rename from docs/apache-airflow-providers-amazon/operators/lambda.rst rename to providers/amazon/docs/operators/lambda.rst index 3a9f1a01324511..93df72f9f51287 100644 --- a/docs/apache-airflow-providers-amazon/operators/lambda.rst +++ b/providers/amazon/docs/operators/lambda.rst @@ -48,7 +48,7 @@ To create an AWS lambda function you can use This operator can be run in deferrable mode by passing ``deferrable=True`` as a parameter. This requires the aiobotocore module to be installed. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_lambda.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_lambda.py :language: python :dedent: 4 :start-after: [START howto_operator_create_lambda_function] @@ -111,7 +111,7 @@ To invoke an AWS lambda function you can use The only way is `configuring destinations for asynchronous invocation `_ and sensing destination. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_lambda.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_lambda.py :language: python :dedent: 4 :start-after: [START howto_operator_invoke_lambda_function] @@ -128,7 +128,7 @@ Wait on an AWS Lambda function deployment state To check the deployment state of an AWS Lambda function until it reaches the target state or another terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.lambda_function.LambdaFunctionStateSensor`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_lambda.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_lambda.py :language: python :dedent: 4 :start-after: [START howto_sensor_lambda_function_state] diff --git a/docs/apache-airflow-providers-amazon/operators/neptune.rst b/providers/amazon/docs/operators/neptune.rst similarity index 93% rename from docs/apache-airflow-providers-amazon/operators/neptune.rst rename to providers/amazon/docs/operators/neptune.rst index 7b9204e1c02792..b8041260529e28 100644 --- a/docs/apache-airflow-providers-amazon/operators/neptune.rst +++ b/providers/amazon/docs/operators/neptune.rst @@ -49,7 +49,7 @@ the aiobotocore module to be installed. .. note:: This operator only starts an existing Neptune database cluster, it does not create a cluster. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_neptune.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_neptune.py :language: python :dedent: 4 :start-after: [START howto_operator_start_neptune_cluster] @@ -65,7 +65,7 @@ To stop a running Neptune database cluster, you can use This operator can be run in deferrable mode by passing ``deferrable=True`` as a parameter. This requires the aiobotocore module to be installed. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_neptune.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_neptune.py :language: python :dedent: 4 :start-after: [START howto_operator_stop_neptune_cluster] diff --git a/docs/apache-airflow-providers-amazon/operators/opensearchserverless.rst b/providers/amazon/docs/operators/opensearchserverless.rst similarity index 95% rename from docs/apache-airflow-providers-amazon/operators/opensearchserverless.rst rename to providers/amazon/docs/operators/opensearchserverless.rst index 4c6f3686558125..b135a349fb0217 100644 --- a/docs/apache-airflow-providers-amazon/operators/opensearchserverless.rst +++ b/providers/amazon/docs/operators/opensearchserverless.rst @@ -46,7 +46,7 @@ Wait for an Amazon OpenSearch Serverless Collection to become active To wait on the state of an Amazon Bedrock customize model job until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.bedrock.OpenSearchServerlessCollectionActiveSensor` -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :dedent: 4 :start-after: [START howto_sensor_opensearch_collection_active] diff --git a/docs/apache-airflow-providers-amazon/operators/quicksight.rst b/providers/amazon/docs/operators/quicksight.rst similarity index 93% rename from docs/apache-airflow-providers-amazon/operators/quicksight.rst rename to providers/amazon/docs/operators/quicksight.rst index 4ae412da6e3aef..9f08d4e388cf2b 100644 --- a/docs/apache-airflow-providers-amazon/operators/quicksight.rst +++ b/providers/amazon/docs/operators/quicksight.rst @@ -46,7 +46,7 @@ Amazon QuickSight create ingestion The ``QuickSightCreateIngestionOperator`` creates and starts a new SPICE ingestion for a dataset. The operator also refreshes existing SPICE datasets. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_quicksight.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_quicksight.py :language: python :dedent: 4 :start-after: [START howto_operator_quicksight_create_ingestion] @@ -62,7 +62,7 @@ Amazon QuickSight ingestion sensor The ``QuickSightSensor`` waits for an Amazon QuickSight create ingestion until it reaches a terminal state. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_quicksight.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_quicksight.py :language: python :dedent: 4 :start-after: [START howto_sensor_quicksight] diff --git a/docs/apache-airflow-providers-amazon/operators/rds.rst b/providers/amazon/docs/operators/rds.rst similarity index 87% rename from docs/apache-airflow-providers-amazon/operators/rds.rst rename to providers/amazon/docs/operators/rds.rst index 9b06c1a048305e..d2f6082e9a95b7 100644 --- a/docs/apache-airflow-providers-amazon/operators/rds.rst +++ b/providers/amazon/docs/operators/rds.rst @@ -41,7 +41,7 @@ To create a snapshot of an Amazon RDS database instance or cluster you can use :class:`~airflow.providers.amazon.aws.operators.rds.RDSCreateDBSnapshotOperator`. The source database instance must be in the ``available`` or ``storage-optimization`` state. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_snapshot.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_rds_snapshot.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_create_db_snapshot] @@ -56,7 +56,7 @@ To copy a snapshot of an Amazon RDS database instance or cluster you can use :class:`~airflow.providers.amazon.aws.operators.rds.RDSCopyDBSnapshotOperator`. The source database snapshot must be in the ``available`` state. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_snapshot.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_rds_snapshot.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_copy_snapshot] @@ -71,7 +71,7 @@ To delete a snapshot of an Amazon RDS database instance or cluster you can use :class:`~airflow.providers.amazon.aws.operators.rds.RDSDeleteDBSnapshotOperator`. The database snapshot must be in the ``available`` state to be deleted. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_snapshot.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_rds_snapshot.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_delete_snapshot] @@ -86,7 +86,7 @@ To export an Amazon RDS snapshot to Amazon S3 you can use :class:`~airflow.providers.amazon.aws.operators.rds.RDSStartExportTaskOperator`. The provided IAM role must have access to the S3 bucket. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_export.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_rds_export.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_start_export_task] @@ -101,7 +101,7 @@ To cancel an Amazon RDS export task to S3 you can use :class:`~airflow.providers.amazon.aws.operators.rds.RDSCancelExportTaskOperator`. Any data that has already been written to the S3 bucket isn't removed. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_export.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_rds_export.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_cancel_export] @@ -118,7 +118,7 @@ This action requires an Amazon SNS topic Amazon Resource Name (ARN). Amazon RDS event notification is only available for not encrypted SNS topics. If you specify an encrypted SNS topic, event notifications are not sent for the topic. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_event.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_rds_event.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_create_event_subscription] @@ -132,7 +132,7 @@ Unsubscribe to an Amazon RDS event notification To delete an Amazon RDS event subscription you can use :class:`~airflow.providers.amazon.aws.operators.rds.RDSDeleteEventSubscriptionOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_event.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_rds_event.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_delete_event_subscription] @@ -147,7 +147,7 @@ To create a AWS DB instance you can use :class:`~airflow.providers.amazon.aws.operators.rds.RdsCreateDbInstanceOperator`. You can also run this operator in deferrable mode by setting ``deferrable`` param to ``True``. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_instance.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_rds_instance.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_create_db_instance] @@ -162,7 +162,7 @@ To delete a AWS DB instance you can use :class:`~airflow.providers.amazon.aws.operators.rds.RDSDeleteDbInstanceOperator`. You can also run this operator in deferrable mode by setting ``deferrable`` param to ``True``. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_instance.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_rds_instance.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_delete_db_instance] @@ -176,7 +176,7 @@ Start a database instance or cluster To start an Amazon RDS DB instance or cluster you can use :class:`~airflow.providers.amazon.aws.operators.rds.RdsStartDbOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_instance.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_rds_instance.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_start_db] @@ -191,7 +191,7 @@ Stop a database instance or cluster To stop an Amazon RDS DB instance or cluster you can use :class:`~airflow.providers.amazon.aws.operators.rds.RdsStopDbOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_instance.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_rds_instance.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_stop_db] @@ -209,7 +209,7 @@ To wait for an Amazon RDS instance or cluster to reach a specific status you can :class:`~airflow.providers.amazon.aws.sensors.rds.RdsDbSensor`. By default, the sensor waits for a database instance to reach the ``available`` state. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_instance.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_rds_instance.py :language: python :dedent: 4 :start-after: [START howto_sensor_rds_instance] @@ -225,7 +225,7 @@ To wait for an Amazon RDS snapshot with specific statuses you can use :class:`~airflow.providers.amazon.aws.sensors.rds.RdsSnapshotExistenceSensor`. By default, the sensor waits for the existence of a snapshot with status ``available``. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_snapshot.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_rds_snapshot.py :language: python :dedent: 4 :start-after: [START howto_sensor_rds_snapshot_existence] @@ -241,7 +241,7 @@ To wait a for an Amazon RDS snapshot export task with specific statuses you can :class:`~airflow.providers.amazon.aws.sensors.rds.RdsExportTaskExistenceSensor`. By default, the sensor waits for the existence of a snapshot with status ``available``. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_export.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_rds_export.py :language: python :dedent: 4 :start-after: [START howto_sensor_rds_export_task_existence] diff --git a/docs/apache-airflow-providers-amazon/operators/redshift/index.rst b/providers/amazon/docs/operators/redshift/index.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/operators/redshift/index.rst rename to providers/amazon/docs/operators/redshift/index.rst diff --git a/docs/apache-airflow-providers-amazon/operators/redshift/redshift_cluster.rst b/providers/amazon/docs/operators/redshift/redshift_cluster.rst similarity index 89% rename from docs/apache-airflow-providers-amazon/operators/redshift/redshift_cluster.rst rename to providers/amazon/docs/operators/redshift/redshift_cluster.rst index bc710b56eecc75..3369c333650a63 100644 --- a/docs/apache-airflow-providers-amazon/operators/redshift/redshift_cluster.rst +++ b/providers/amazon/docs/operators/redshift/redshift_cluster.rst @@ -40,7 +40,7 @@ Create an Amazon Redshift cluster To create an Amazon Redshift Cluster with the specified parameters you can use :class:`~airflow.providers.amazon.aws.operators.redshift_cluster.RedshiftCreateClusterOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_redshift.py :language: python :dedent: 4 :start-after: [START howto_operator_redshift_cluster] @@ -56,7 +56,7 @@ To resume a 'paused' Amazon Redshift cluster you can use You can also run this operator in deferrable mode by setting ``deferrable`` param to ``True``. This will ensure that the task is deferred from the Airflow worker slot and polling for the task status happens on the trigger. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_redshift.py :language: python :dedent: 4 :start-after: [START howto_operator_redshift_resume_cluster] @@ -71,7 +71,7 @@ To pause an ``available`` Amazon Redshift cluster you can use :class:`RedshiftPauseClusterOperator `. You can also run this operator in deferrable mode by setting ``deferrable`` param to ``True`` -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_redshift.py :language: python :dedent: 4 :start-after: [START howto_operator_redshift_pause_cluster] @@ -85,7 +85,7 @@ Create an Amazon Redshift cluster snapshot To create Amazon Redshift cluster snapshot you can use :class:`RedshiftCreateClusterSnapshotOperator ` -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_redshift.py :language: python :dedent: 4 :start-after: [START howto_operator_redshift_create_cluster_snapshot] @@ -99,7 +99,7 @@ Delete an Amazon Redshift cluster snapshot To delete Amazon Redshift cluster snapshot you can use :class:`RedshiftDeleteClusterSnapshotOperator ` -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_redshift.py :language: python :dedent: 4 :start-after: [START howto_operator_redshift_delete_cluster_snapshot] @@ -114,7 +114,7 @@ To delete an Amazon Redshift cluster you can use :class:`RedshiftDeleteClusterOperator `. You can also run this operator in deferrable mode by setting ``deferrable`` param to ``True`` -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_redshift.py :language: python :dedent: 4 :start-after: [START howto_operator_redshift_delete_cluster] @@ -131,7 +131,7 @@ Wait on an Amazon Redshift cluster state To check the state of an Amazon Redshift Cluster until it reaches the target state or another terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.redshift_cluster.RedshiftClusterSensor`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_redshift.py :language: python :dedent: 4 :start-after: [START howto_sensor_redshift_cluster] diff --git a/docs/apache-airflow-providers-amazon/operators/redshift/redshift_data.rst b/providers/amazon/docs/operators/redshift/redshift_data.rst similarity index 93% rename from docs/apache-airflow-providers-amazon/operators/redshift/redshift_data.rst rename to providers/amazon/docs/operators/redshift/redshift_data.rst index 762eced74fa683..aa472e61ec2450 100644 --- a/docs/apache-airflow-providers-amazon/operators/redshift/redshift_data.rst +++ b/providers/amazon/docs/operators/redshift/redshift_data.rst @@ -48,7 +48,7 @@ statements against an Amazon Redshift cluster. This differs from ``RedshiftSQLOperator`` in that it allows users to query and retrieve data via the AWS API and avoid the necessity of a Postgres connection. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_redshift.py :language: python :dedent: 4 :start-after: [START howto_operator_redshift_data] @@ -60,7 +60,7 @@ Reuse a session when executing multiple statements Specify the ``session_keep_alive_seconds`` parameter on an upstream task. In a downstream task, get the session ID from the XCom and pass it to the ``session_id`` parameter. This is useful when you work with temporary tables. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_redshift.py :language: python :dedent: 4 :start-after: [START howto_operator_redshift_data_session_reuse] diff --git a/docs/apache-airflow-providers-amazon/operators/redshift/redshift_sql.rst b/providers/amazon/docs/operators/redshift/redshift_sql.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/operators/redshift/redshift_sql.rst rename to providers/amazon/docs/operators/redshift/redshift_sql.rst diff --git a/docs/apache-airflow-providers-amazon/operators/s3/glacier.rst b/providers/amazon/docs/operators/s3/glacier.rst similarity index 91% rename from docs/apache-airflow-providers-amazon/operators/s3/glacier.rst rename to providers/amazon/docs/operators/s3/glacier.rst index ab5b6acbe704cc..2f4fefa37d73fd 100644 --- a/docs/apache-airflow-providers-amazon/operators/s3/glacier.rst +++ b/providers/amazon/docs/operators/s3/glacier.rst @@ -45,7 +45,7 @@ use :class:`~airflow.providers.amazon.aws.transfers.glacier_to_gcs.GlacierCreate This Operator returns a dictionary of information related to the initiated job such as *jobId*, which is required for subsequent tasks. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glacier_to_gcs.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_glacier_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_glacier_create_job] @@ -59,7 +59,7 @@ Upload archive to an Amazon Glacier To add an archive to an Amazon S3 Glacier vault use :class:`~airflow.providers.amazon.aws.transfers.glacier_to_gcs.GlacierUploadArchiveOperator` -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glacier_to_gcs.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_glacier_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_glacier_upload_archive] @@ -76,7 +76,7 @@ Wait on an Amazon Glacier job state To wait on the status of an Amazon Glacier Job to reach a terminal state use :class:`~airflow.providers.amazon.aws.sensors.glacier.GlacierJobOperationSensor` -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glacier_to_gcs.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_glacier_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_sensor_glacier_job_operation] diff --git a/docs/apache-airflow-providers-amazon/operators/s3/index.rst b/providers/amazon/docs/operators/s3/index.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/operators/s3/index.rst rename to providers/amazon/docs/operators/s3/index.rst diff --git a/docs/apache-airflow-providers-amazon/operators/s3/s3.rst b/providers/amazon/docs/operators/s3/s3.rst similarity index 86% rename from docs/apache-airflow-providers-amazon/operators/s3/s3.rst rename to providers/amazon/docs/operators/s3/s3.rst index ff48c1d5d15b88..0ba34180156d80 100644 --- a/docs/apache-airflow-providers-amazon/operators/s3/s3.rst +++ b/providers/amazon/docs/operators/s3/s3.rst @@ -38,7 +38,7 @@ Create an Amazon S3 bucket To create an Amazon S3 bucket you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3CreateBucketOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_create_bucket] @@ -52,7 +52,7 @@ Delete an Amazon S3 bucket To delete an Amazon S3 bucket you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3DeleteBucketOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_delete_bucket] @@ -66,7 +66,7 @@ Set the tags for an Amazon S3 bucket To set the tags for an Amazon S3 bucket you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3PutBucketTaggingOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_put_bucket_tagging] @@ -80,7 +80,7 @@ Get the tag of an Amazon S3 bucket To get the tag set associated with an Amazon S3 bucket you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3GetBucketTaggingOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_get_bucket_tagging] @@ -94,7 +94,7 @@ Delete the tags of an Amazon S3 bucket To delete the tags of an Amazon S3 bucket you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3DeleteBucketTaggingOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_delete_bucket_tagging] @@ -108,7 +108,7 @@ Create an Amazon S3 object To create a new (or replace) Amazon S3 object you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3CreateObjectOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_create_object] @@ -123,7 +123,7 @@ To copy an Amazon S3 object from one bucket to another you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3CopyObjectOperator`. The Amazon S3 connection used here needs to have access to both source and destination bucket/key. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_copy_object] @@ -137,7 +137,7 @@ Delete Amazon S3 objects To delete one or multiple Amazon S3 objects you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3DeleteObjectsOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_delete_objects] @@ -153,7 +153,7 @@ To transform the data from one Amazon S3 object and save it to another object yo You can also apply an optional `Amazon S3 Select expression `_ to select the data you want to retrieve from ``source_s3_key`` using ``select_expression``. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_file_transform] @@ -169,7 +169,7 @@ To list all Amazon S3 prefixes within an Amazon S3 bucket you can use See `here `__ for more information about Amazon S3 prefixes. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_list_prefixes] @@ -184,7 +184,7 @@ To list all Amazon S3 objects within an Amazon S3 bucket you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3ListOperator`. You can specify a ``prefix`` to filter the objects whose name begins with such prefix. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_list] @@ -208,7 +208,7 @@ Please keep in mind, especially when used to check a large volume of keys, that To check one file: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_key_single_key] @@ -216,7 +216,7 @@ To check one file: To check multiple files: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_key_multiple_keys] @@ -224,7 +224,7 @@ To check multiple files: To check a file with regular expression: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_key_regex] @@ -244,13 +244,13 @@ multiple files can match one key. The list of matched S3 object attributes conta [{"Size": int}] -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_key_function_definition] :end-before: [END howto_sensor_s3_key_function_definition] -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_key_function] @@ -262,7 +262,7 @@ the triggerer asynchronously. Note that this will need triggerer to be available To check one file: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_key_single_key_deferrable] @@ -270,7 +270,7 @@ To check one file: To check multiple files: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_key_multiple_keys_deferrable] @@ -278,7 +278,7 @@ To check multiple files: To check a file with regular expression: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_key_regex_deferrable] @@ -295,7 +295,7 @@ the inactivity period has passed with no increase in the number of objects you c Note, this sensor will not behave correctly in reschedule mode, as the state of the listed objects in the Amazon S3 bucket will be lost between rescheduled invocations. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_keys_unchanged] diff --git a/docs/apache-airflow-providers-amazon/operators/sagemaker.rst b/providers/amazon/docs/operators/sagemaker.rst similarity index 86% rename from docs/apache-airflow-providers-amazon/operators/sagemaker.rst rename to providers/amazon/docs/operators/sagemaker.rst index 03e5a7a921c270..b961bad61b744a 100644 --- a/docs/apache-airflow-providers-amazon/operators/sagemaker.rst +++ b/providers/amazon/docs/operators/sagemaker.rst @@ -42,7 +42,7 @@ Create an Amazon SageMaker processing job To create an Amazon Sagemaker processing job to sanitize your dataset you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerProcessingOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_processing] @@ -56,7 +56,7 @@ Create an Amazon SageMaker training job To create an Amazon Sagemaker training job you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerTrainingOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_training] @@ -70,7 +70,7 @@ Create an Amazon SageMaker model To create an Amazon Sagemaker model you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerModelOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_model] @@ -84,7 +84,7 @@ Start a hyperparameter tuning job To start a hyperparameter tuning job for an Amazon Sagemaker model you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerTuningOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_tuning] @@ -98,7 +98,7 @@ Delete an Amazon SageMaker model To delete an Amazon Sagemaker model you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerDeleteModelOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_delete_model] @@ -112,7 +112,7 @@ Create an Amazon SageMaker transform job To create an Amazon Sagemaker transform job you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerTransformOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_transform] @@ -126,7 +126,7 @@ Create an Amazon SageMaker endpoint config job To create an Amazon Sagemaker endpoint config job you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerEndpointConfigOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker_endpoint.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_sagemaker_endpoint.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_endpoint_config] @@ -140,7 +140,7 @@ Create an Amazon SageMaker endpoint job To create an Amazon Sagemaker endpoint you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerEndpointOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker_endpoint.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_sagemaker_endpoint.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_endpoint] @@ -154,7 +154,7 @@ Start an Amazon SageMaker pipeline execution To trigger an execution run for an already-defined Amazon Sagemaker pipeline, you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerStartPipelineOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker_pipeline.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_sagemaker_pipeline.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_start_pipeline] @@ -168,7 +168,7 @@ Stop an Amazon SageMaker pipeline execution To stop an Amazon Sagemaker pipeline execution that is currently running, you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerStopPipelineOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker_pipeline.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_sagemaker_pipeline.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_stop_pipeline] @@ -186,7 +186,7 @@ It consists of an inference specification that defines the inference image to us A model package group is a collection of model packages. You can use this operator to add a new version and model package to the group for every DAG run. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_register] @@ -202,7 +202,7 @@ An AutoML experiment will take some input data in CSV and the column it should l and train models on it without needing human supervision. The output is placed in an S3 bucket, and automatically deployed if configured for it. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_auto_ml] @@ -216,7 +216,7 @@ Create an Experiment for later use To create a SageMaker experiment, you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerCreateExperimentOperator`. This creates an experiment so that it's ready to be associated with processing, training and transform jobs. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_experiment] @@ -230,7 +230,7 @@ Create a SageMaker Notebook Instance To create a SageMaker Notebook Instance , you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerCreateNotebookOperator`. This creates a SageMaker Notebook Instance ready to run Jupyter notebooks. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker_notebook.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_sagemaker_notebook.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_notebook_create] @@ -244,7 +244,7 @@ Stop a SageMaker Notebook Instance To terminate SageMaker Notebook Instance , you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerStopNotebookOperator`. This terminates the ML compute instance and disconnects the ML storage volume. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker_notebook.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_sagemaker_notebook.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_notebook_stop] @@ -258,7 +258,7 @@ Start a SageMaker Notebook Instance To launch a SageMaker Notebook Instance and re-attach an ML storage volume, you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerStartNotebookOperator`. This launches a new ML compute instance with the latest version of the libraries and attached your ML storage volume. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker_notebook.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_sagemaker_notebook.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_notebook_start] @@ -273,7 +273,7 @@ Delete a SageMaker Notebook Instance To delete a SageMaker Notebook Instance, you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerDeleteNotebookOperator`. This terminates the instance and deletes the ML storage volume and network interface associated with the instance. The instance must be stopped before it can be deleted. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker_notebook.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_sagemaker_notebook.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_notebook_delete] @@ -290,7 +290,7 @@ Wait on an Amazon SageMaker training job state To check the state of an Amazon Sagemaker training job until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.sagemaker.SageMakerTrainingSensor`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_sensor_sagemaker_training] @@ -304,7 +304,7 @@ Wait on an Amazon SageMaker transform job state To check the state of an Amazon Sagemaker transform job until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerTransformOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_sensor_sagemaker_transform] @@ -318,7 +318,7 @@ Wait on an Amazon SageMaker tuning job state To check the state of an Amazon Sagemaker hyperparameter tuning job until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.sagemaker.SageMakerTuningSensor`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_sensor_sagemaker_tuning] @@ -332,7 +332,7 @@ Wait on an Amazon SageMaker endpoint state To check the state of an Amazon Sagemaker endpoint until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.sagemaker.SageMakerEndpointSensor`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker_endpoint.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_sagemaker_endpoint.py :language: python :dedent: 4 :start-after: [START howto_sensor_sagemaker_endpoint] @@ -346,7 +346,7 @@ Wait on an Amazon SageMaker pipeline execution state To check the state of an Amazon Sagemaker pipeline execution until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.sagemaker.SageMakerPipelineSensor`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker_pipeline.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_sagemaker_pipeline.py :language: python :dedent: 4 :start-after: [START howto_sensor_sagemaker_pipeline] @@ -360,7 +360,7 @@ Wait on an Amazon SageMaker AutoML experiment state To check the state of an Amazon Sagemaker AutoML job until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.sagemaker.SageMakerAutoMLSensor`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_auto_ml] @@ -374,7 +374,7 @@ Wait on an Amazon SageMaker processing job state To check the state of an Amazon Sagemaker processing job until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.sagemaker.SageMakerProcessingSensor`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_sensor_sagemaker_processing] diff --git a/docs/apache-airflow-providers-amazon/operators/sns.rst b/providers/amazon/docs/operators/sns.rst similarity index 96% rename from docs/apache-airflow-providers-amazon/operators/sns.rst rename to providers/amazon/docs/operators/sns.rst index f4eb699e52801e..3055ab210c64d1 100644 --- a/docs/apache-airflow-providers-amazon/operators/sns.rst +++ b/providers/amazon/docs/operators/sns.rst @@ -48,7 +48,7 @@ Publish a message to an existing SNS topic To publish a message to an Amazon SNS Topic you can use :class:`~airflow.providers.amazon.aws.operators.sns.SnsPublishOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sns.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_sns.py :language: python :dedent: 4 :start-after: [START howto_operator_sns_publish_operator] diff --git a/docs/apache-airflow-providers-amazon/operators/sqs.rst b/providers/amazon/docs/operators/sqs.rst similarity index 94% rename from docs/apache-airflow-providers-amazon/operators/sqs.rst rename to providers/amazon/docs/operators/sqs.rst index 3eb0087079c38f..c314aa282e9594 100644 --- a/docs/apache-airflow-providers-amazon/operators/sqs.rst +++ b/providers/amazon/docs/operators/sqs.rst @@ -50,7 +50,7 @@ To publish a message to an Amazon SQS queue you can use the In the following example, the task ``publish_to_queue`` publishes a message containing the task instance and the execution date to a queue with a default name of ``Airflow-Example-Queue``. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sqs.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_sqs.py :language: python :dedent: 4 :start-after: [START howto_operator_sqs] @@ -68,7 +68,7 @@ To read messages from an Amazon SQS queue until exhausted use the :class:`~airflow.providers.amazon.aws.sensors.sqs.SqsSensor` This sensor can also be run in deferrable mode by setting ``deferrable`` param to ``True``. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sqs.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_sqs.py :language: python :dedent: 4 :start-after: [START howto_sensor_sqs] diff --git a/docs/apache-airflow-providers-amazon/operators/step_functions.rst b/providers/amazon/docs/operators/step_functions.rst similarity index 91% rename from docs/apache-airflow-providers-amazon/operators/step_functions.rst rename to providers/amazon/docs/operators/step_functions.rst index a83eafa6ae7ada..5f075724d8c165 100644 --- a/docs/apache-airflow-providers-amazon/operators/step_functions.rst +++ b/providers/amazon/docs/operators/step_functions.rst @@ -45,7 +45,7 @@ To start a new AWS Step Functions state machine execution you can use :class:`~airflow.providers.amazon.aws.operators.step_function.StepFunctionStartExecutionOperator`. You can also run this operator in deferrable mode by setting ``deferrable`` param to ``True``. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_step_functions.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_step_functions.py :language: python :dedent: 4 :start-after: [START howto_operator_step_function_start_execution] @@ -59,7 +59,7 @@ Get an AWS Step Functions execution output To fetch the output from an AWS Step Function state machine execution you can use :class:`~airflow.providers.amazon.aws.operators.step_function.StepFunctionGetExecutionOutputOperator`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_step_functions.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_step_functions.py :language: python :dedent: 4 :start-after: [START howto_operator_step_function_get_execution_output] @@ -76,7 +76,7 @@ Wait on an AWS Step Functions state machine execution state To wait on the state of an AWS Step Function state machine execution until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.step_function.StepFunctionExecutionSensor`. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_step_functions.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_step_functions.py :language: python :dedent: 4 :start-after: [START howto_sensor_step_function_execution] diff --git a/docs/apache-airflow-providers-amazon/redirects.txt b/providers/amazon/docs/redirects.txt similarity index 100% rename from docs/apache-airflow-providers-amazon/redirects.txt rename to providers/amazon/docs/redirects.txt diff --git a/docs/apache-airflow-providers-amazon/secrets-backends/aws-secrets-manager.rst b/providers/amazon/docs/secrets-backends/aws-secrets-manager.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/secrets-backends/aws-secrets-manager.rst rename to providers/amazon/docs/secrets-backends/aws-secrets-manager.rst diff --git a/docs/apache-airflow-providers-amazon/secrets-backends/aws-ssm-parameter-store.rst b/providers/amazon/docs/secrets-backends/aws-ssm-parameter-store.rst similarity index 97% rename from docs/apache-airflow-providers-amazon/secrets-backends/aws-ssm-parameter-store.rst rename to providers/amazon/docs/secrets-backends/aws-ssm-parameter-store.rst index bb4d2be625e4ea..a4e4d618fe51fa 100644 --- a/docs/apache-airflow-providers-amazon/secrets-backends/aws-ssm-parameter-store.rst +++ b/providers/amazon/docs/secrets-backends/aws-ssm-parameter-store.rst @@ -109,7 +109,7 @@ for example when using HTTP / HTTPS or SPARK, you may need URI's that will look http://https%3A%2F%2Fexample.com - spark://spark%3A%2F%2Fspark-master-0.spark-master.spark:7077 + spark://spark%3A%2F%2Fspark-main-0.spark-main.spark:7077 This is a known situation, where schema and protocol parts of the URI are independent and in some cases, need to be specified explicitly. @@ -124,7 +124,7 @@ The same connections could be represented in AWS SSM Parameter Store as a JSON O {"conn_type": "http", "host": "https://example.com"} - {"conn_type": "spark", "host": "spark://spark-master-0.spark-master.spark", "port": 7077} + {"conn_type": "spark", "host": "spark://spark-main-0.spark-main.spark", "port": 7077} Storing and Retrieving Variables diff --git a/docs/apache-airflow-providers-amazon/secrets-backends/index.rst b/providers/amazon/docs/secrets-backends/index.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/secrets-backends/index.rst rename to providers/amazon/docs/secrets-backends/index.rst diff --git a/docs/apache-airflow-providers-amazon/security.rst b/providers/amazon/docs/security.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/security.rst rename to providers/amazon/docs/security.rst diff --git a/docs/apache-airflow-providers-amazon/transfer/azure_blob_to_s3.rst b/providers/amazon/docs/transfer/azure_blob_to_s3.rst similarity index 95% rename from docs/apache-airflow-providers-amazon/transfer/azure_blob_to_s3.rst rename to providers/amazon/docs/transfer/azure_blob_to_s3.rst index cf68049f95165f..c2c363c9f62e6e 100644 --- a/docs/apache-airflow-providers-amazon/transfer/azure_blob_to_s3.rst +++ b/providers/amazon/docs/transfer/azure_blob_to_s3.rst @@ -39,7 +39,7 @@ To copy data from an Azure Blob Storage container to an Amazon S3 bucket you can Example usage: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_azure_blob_to_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_azure_blob_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_azure_blob_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/dynamodb_to_s3.rst b/providers/amazon/docs/transfer/dynamodb_to_s3.rst similarity index 89% rename from docs/apache-airflow-providers-amazon/transfer/dynamodb_to_s3.rst rename to providers/amazon/docs/transfer/dynamodb_to_s3.rst index c8d18f43c3c2ed..04e0cd8a63c627 100644 --- a/docs/apache-airflow-providers-amazon/transfer/dynamodb_to_s3.rst +++ b/providers/amazon/docs/transfer/dynamodb_to_s3.rst @@ -48,7 +48,7 @@ To get more information visit: Example usage: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dynamodb_to_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_dynamodb_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_dynamodb_to_s3] @@ -57,7 +57,7 @@ Example usage: To parallelize the replication, users can create multiple ``DynamoDBToS3Operator`` tasks using the ``TotalSegments`` parameter. For instance to replicate with parallelism of 2, create two tasks: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dynamodb_to_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_dynamodb_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_dynamodb_to_s3_segmented] @@ -67,7 +67,7 @@ Users can also pass in ``point_in_time_export`` boolean param to ``DynamoDBToS3O Full export example usage: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dynamodb_to_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_dynamodb_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_dynamodb_to_s3_in_some_point_in_time_full_export] @@ -75,7 +75,7 @@ Full export example usage: Incremental export example usage: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dynamodb_to_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_dynamodb_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_dynamodb_to_s3_in_some_point_in_time_incremental_export] diff --git a/docs/apache-airflow-providers-amazon/transfer/ftp_to_s3.rst b/providers/amazon/docs/transfer/ftp_to_s3.rst similarity index 95% rename from docs/apache-airflow-providers-amazon/transfer/ftp_to_s3.rst rename to providers/amazon/docs/transfer/ftp_to_s3.rst index 13166a753232ee..a04dba6d9b0225 100644 --- a/docs/apache-airflow-providers-amazon/transfer/ftp_to_s3.rst +++ b/providers/amazon/docs/transfer/ftp_to_s3.rst @@ -41,7 +41,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ftp_to_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_ftp_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_ftp_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/gcs_to_s3.rst b/providers/amazon/docs/transfer/gcs_to_s3.rst similarity index 95% rename from docs/apache-airflow-providers-amazon/transfer/gcs_to_s3.rst rename to providers/amazon/docs/transfer/gcs_to_s3.rst index eb032d813c1c1e..2ac2c1a219f723 100644 --- a/docs/apache-airflow-providers-amazon/transfer/gcs_to_s3.rst +++ b/providers/amazon/docs/transfer/gcs_to_s3.rst @@ -39,7 +39,7 @@ To copy data from a Google Cloud Storage bucket to an Amazon S3 bucket you can u Example usage: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_gcs_to_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_gcs_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_gcs_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/glacier_to_gcs.rst b/providers/amazon/docs/transfer/glacier_to_gcs.rst similarity index 95% rename from docs/apache-airflow-providers-amazon/transfer/glacier_to_gcs.rst rename to providers/amazon/docs/transfer/glacier_to_gcs.rst index 775fda6491c4f4..1dff86787d53d9 100644 --- a/docs/apache-airflow-providers-amazon/transfer/glacier_to_gcs.rst +++ b/providers/amazon/docs/transfer/glacier_to_gcs.rst @@ -38,7 +38,7 @@ Amazon S3 Glacier To GCS transfer operator To transfer data from an Amazon Glacier vault to Google Cloud Storage you can use :class:`~airflow.providers.amazon.aws.transfers.glacier_to_gcs.GlacierToGCSOperator` -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glacier_to_gcs.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_glacier_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_transfer_glacier_to_gcs] diff --git a/docs/apache-airflow-providers-amazon/transfer/google_api_to_s3.rst b/providers/amazon/docs/transfer/google_api_to_s3.rst similarity index 91% rename from docs/apache-airflow-providers-amazon/transfer/google_api_to_s3.rst rename to providers/amazon/docs/transfer/google_api_to_s3.rst index a1d2c8ea99bfb1..ad54b9a56d2e82 100644 --- a/docs/apache-airflow-providers-amazon/transfer/google_api_to_s3.rst +++ b/providers/amazon/docs/transfer/google_api_to_s3.rst @@ -38,7 +38,7 @@ Google Sheets to Amazon S3 transfer operator This example loads data from Google Sheets and save it to an Amazon S3 file. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_google_api_sheets_to_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_google_api_sheets_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_google_api_sheets_to_s3] @@ -57,7 +57,7 @@ It searches for up to 50 videos (due to pagination) in a given time range (``YOUTUBE_VIDEO_PUBLISHED_AFTER``, ``YOUTUBE_VIDEO_PUBLISHED_BEFORE``) on a YouTube channel (``YOUTUBE_CHANNEL_ID``) saves the response in Amazon S3 and also pushes the data to xcom. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_google_api_youtube_to_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_google_api_youtube_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_google_api_youtube_search_to_s3] @@ -66,7 +66,7 @@ saves the response in Amazon S3 and also pushes the data to xcom. It passes over the YouTube IDs to the next request which then gets the information (``YOUTUBE_VIDEO_FIELDS``) for the requested videos and saves them in Amazon S3 (``S3_BUCKET_NAME``). -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_google_api_youtube_to_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_google_api_youtube_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_google_api_youtube_list_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/hive_to_dynamodb.rst b/providers/amazon/docs/transfer/hive_to_dynamodb.rst similarity index 95% rename from docs/apache-airflow-providers-amazon/transfer/hive_to_dynamodb.rst rename to providers/amazon/docs/transfer/hive_to_dynamodb.rst index a13cf012b4b971..914fe8bb8c76d2 100644 --- a/docs/apache-airflow-providers-amazon/transfer/hive_to_dynamodb.rst +++ b/providers/amazon/docs/transfer/hive_to_dynamodb.rst @@ -44,7 +44,7 @@ To get more information visit: Example usage: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_hive_to_dynamodb.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_hive_to_dynamodb.py :language: python :dedent: 4 :start-after: [START howto_transfer_hive_to_dynamodb] diff --git a/docs/apache-airflow-providers-amazon/transfer/http_to_s3.rst b/providers/amazon/docs/transfer/http_to_s3.rst similarity index 95% rename from docs/apache-airflow-providers-amazon/transfer/http_to_s3.rst rename to providers/amazon/docs/transfer/http_to_s3.rst index d28c4508aae1e8..e9e668145026da 100644 --- a/docs/apache-airflow-providers-amazon/transfer/http_to_s3.rst +++ b/providers/amazon/docs/transfer/http_to_s3.rst @@ -41,7 +41,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_http_to_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_http_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_http_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/imap_attachment_to_s3.rst b/providers/amazon/docs/transfer/imap_attachment_to_s3.rst similarity index 94% rename from docs/apache-airflow-providers-amazon/transfer/imap_attachment_to_s3.rst rename to providers/amazon/docs/transfer/imap_attachment_to_s3.rst index cb6f46d338eff9..80e8b03737142e 100644 --- a/docs/apache-airflow-providers-amazon/transfer/imap_attachment_to_s3.rst +++ b/providers/amazon/docs/transfer/imap_attachment_to_s3.rst @@ -38,7 +38,7 @@ Imap Attachment To Amazon S3 transfer operator To save an email attachment via IMAP protocol from an email server to an Amazon S3 Bucket you can use :class:`~airflow.providers.amazon.aws.transfers.imap_attachment_to_s3.ImapAttachmentToS3Operator` -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_imap_attachment_to_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_imap_attachment_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_imap_attachment_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/index.rst b/providers/amazon/docs/transfer/index.rst similarity index 100% rename from docs/apache-airflow-providers-amazon/transfer/index.rst rename to providers/amazon/docs/transfer/index.rst diff --git a/docs/apache-airflow-providers-amazon/transfer/local_to_s3.rst b/providers/amazon/docs/transfer/local_to_s3.rst similarity index 95% rename from docs/apache-airflow-providers-amazon/transfer/local_to_s3.rst rename to providers/amazon/docs/transfer/local_to_s3.rst index d1ca167844464d..3862c83db67602 100644 --- a/docs/apache-airflow-providers-amazon/transfer/local_to_s3.rst +++ b/providers/amazon/docs/transfer/local_to_s3.rst @@ -42,7 +42,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_local_to_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_local_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_local_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/mongo_to_s3.rst b/providers/amazon/docs/transfer/mongo_to_s3.rst similarity index 95% rename from docs/apache-airflow-providers-amazon/transfer/mongo_to_s3.rst rename to providers/amazon/docs/transfer/mongo_to_s3.rst index a9d8a441e670e8..c7cddc73131be5 100644 --- a/docs/apache-airflow-providers-amazon/transfer/mongo_to_s3.rst +++ b/providers/amazon/docs/transfer/mongo_to_s3.rst @@ -43,7 +43,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_mongo_to_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_mongo_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_mongo_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/redshift_to_s3.rst b/providers/amazon/docs/transfer/redshift_to_s3.rst similarity index 95% rename from docs/apache-airflow-providers-amazon/transfer/redshift_to_s3.rst rename to providers/amazon/docs/transfer/redshift_to_s3.rst index f2b32d35c52d45..8ef4f828de2970 100644 --- a/docs/apache-airflow-providers-amazon/transfer/redshift_to_s3.rst +++ b/providers/amazon/docs/transfer/redshift_to_s3.rst @@ -42,7 +42,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift_s3_transfers.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_redshift_s3_transfers.py :language: python :dedent: 4 :start-after: [START howto_transfer_redshift_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/s3_to_dynamodb.rst b/providers/amazon/docs/transfer/s3_to_dynamodb.rst similarity index 93% rename from docs/apache-airflow-providers-amazon/transfer/s3_to_dynamodb.rst rename to providers/amazon/docs/transfer/s3_to_dynamodb.rst index 002d47728c24c3..335d12819d369c 100644 --- a/docs/apache-airflow-providers-amazon/transfer/s3_to_dynamodb.rst +++ b/providers/amazon/docs/transfer/s3_to_dynamodb.rst @@ -48,7 +48,7 @@ To get more information visit: Example usage: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3_to_dynamodb.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_s3_to_dynamodb.py :language: python :dedent: 4 :start-after: [START howto_transfer_s3_to_dynamodb] @@ -57,7 +57,7 @@ Example usage: To load S3 data into an existing DynamoDB table use: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3_to_dynamodb.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_s3_to_dynamodb.py :language: python :dedent: 4 :start-after: [START howto_transfer_s3_to_dynamodb_existing_table] diff --git a/docs/apache-airflow-providers-amazon/transfer/s3_to_ftp.rst b/providers/amazon/docs/transfer/s3_to_ftp.rst similarity index 95% rename from docs/apache-airflow-providers-amazon/transfer/s3_to_ftp.rst rename to providers/amazon/docs/transfer/s3_to_ftp.rst index ecb33de0aa5f28..fb10cadacf113e 100644 --- a/docs/apache-airflow-providers-amazon/transfer/s3_to_ftp.rst +++ b/providers/amazon/docs/transfer/s3_to_ftp.rst @@ -42,7 +42,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3_to_ftp.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_s3_to_ftp.py :language: python :dedent: 4 :start-after: [START howto_transfer_s3_to_ftp] diff --git a/docs/apache-airflow-providers-amazon/transfer/s3_to_redshift.rst b/providers/amazon/docs/transfer/s3_to_redshift.rst similarity index 92% rename from docs/apache-airflow-providers-amazon/transfer/s3_to_redshift.rst rename to providers/amazon/docs/transfer/s3_to_redshift.rst index bcef5b272ac7a5..1b30579ae79377 100644 --- a/docs/apache-airflow-providers-amazon/transfer/s3_to_redshift.rst +++ b/providers/amazon/docs/transfer/s3_to_redshift.rst @@ -42,7 +42,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift_s3_transfers.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_redshift_s3_transfers.py :language: python :dedent: 4 :start-after: [START howto_transfer_s3_to_redshift] @@ -50,7 +50,7 @@ Example usage: Example of ingesting multiple keys: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift_s3_transfers.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_redshift_s3_transfers.py :language: python :dedent: 4 :start-after: [START howto_transfer_s3_to_redshift_multiple_keys] diff --git a/docs/apache-airflow-providers-amazon/transfer/s3_to_sftp.rst b/providers/amazon/docs/transfer/s3_to_sftp.rst similarity index 95% rename from docs/apache-airflow-providers-amazon/transfer/s3_to_sftp.rst rename to providers/amazon/docs/transfer/s3_to_sftp.rst index 56391b634c7697..316a9619374075 100644 --- a/docs/apache-airflow-providers-amazon/transfer/s3_to_sftp.rst +++ b/providers/amazon/docs/transfer/s3_to_sftp.rst @@ -43,7 +43,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3_to_sftp.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_s3_to_sftp.py :language: python :dedent: 4 :start-after: [START howto_transfer_s3_to_sftp] diff --git a/docs/apache-airflow-providers-amazon/transfer/s3_to_sql.rst b/providers/amazon/docs/transfer/s3_to_sql.rst similarity index 91% rename from docs/apache-airflow-providers-amazon/transfer/s3_to_sql.rst rename to providers/amazon/docs/transfer/s3_to_sql.rst index 90899d6a399d38..d9f310e3813b56 100644 --- a/docs/apache-airflow-providers-amazon/transfer/s3_to_sql.rst +++ b/providers/amazon/docs/transfer/s3_to_sql.rst @@ -43,7 +43,7 @@ To get more information about this operator visit: Example usage with a parser for a csv file. This parser loads the file into memory and returns a list of rows: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3_to_sql.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_s3_to_sql.py :language: python :dedent: 4 :start-after: [START howto_transfer_s3_to_sql] @@ -52,7 +52,7 @@ file into memory and returns a list of rows: Example usage with a parser function that returns a generator. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3_to_sql.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_s3_to_sql.py :language: python :dedent: 4 :start-after: [START howto_transfer_s3_to_sql_generator] diff --git a/docs/apache-airflow-providers-amazon/transfer/salesforce_to_s3.rst b/providers/amazon/docs/transfer/salesforce_to_s3.rst similarity index 95% rename from docs/apache-airflow-providers-amazon/transfer/salesforce_to_s3.rst rename to providers/amazon/docs/transfer/salesforce_to_s3.rst index b0a41c16699b0f..5fed4ae8409134 100644 --- a/docs/apache-airflow-providers-amazon/transfer/salesforce_to_s3.rst +++ b/providers/amazon/docs/transfer/salesforce_to_s3.rst @@ -38,7 +38,7 @@ Extract data from Salesforce to Amazon S3 transfer operator The following example demonstrates a use case of extracting account data from a Salesforce instance and upload to an Amazon S3 bucket. -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_salesforce_to_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_salesforce_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_salesforce_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/sftp_to_s3.rst b/providers/amazon/docs/transfer/sftp_to_s3.rst similarity index 95% rename from docs/apache-airflow-providers-amazon/transfer/sftp_to_s3.rst rename to providers/amazon/docs/transfer/sftp_to_s3.rst index 821bf211097b45..df089e81f1f199 100644 --- a/docs/apache-airflow-providers-amazon/transfer/sftp_to_s3.rst +++ b/providers/amazon/docs/transfer/sftp_to_s3.rst @@ -42,7 +42,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sftp_to_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_sftp_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_sftp_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/sql_to_s3.rst b/providers/amazon/docs/transfer/sql_to_s3.rst similarity index 93% rename from docs/apache-airflow-providers-amazon/transfer/sql_to_s3.rst rename to providers/amazon/docs/transfer/sql_to_s3.rst index 5e088f3a127595..058402c7ea4ca7 100644 --- a/docs/apache-airflow-providers-amazon/transfer/sql_to_s3.rst +++ b/providers/amazon/docs/transfer/sql_to_s3.rst @@ -44,7 +44,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sql_to_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_sql_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_sql_to_s3] @@ -57,7 +57,7 @@ We can group the data in the table by passing the ``groupby_kwargs`` param. This Example usage: -.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sql_to_s3.py +.. exampleinclude:: /../../providers/amazon/tests/system/amazon/aws/example_sql_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_sql_to_s3_with_groupby_param] diff --git a/providers/src/airflow/providers/amazon/provider.yaml b/providers/amazon/provider.yaml similarity index 90% rename from providers/src/airflow/providers/amazon/provider.yaml rename to providers/amazon/provider.yaml index 5192052800079b..3310ec331e2de9 100644 --- a/providers/src/airflow/providers/amazon/provider.yaml +++ b/providers/amazon/provider.yaml @@ -93,95 +93,37 @@ versions: - 1.1.0 - 1.0.0 -dependencies: - - apache-airflow>=2.9.0 - - apache-airflow-providers-common-compat>=1.3.0 - - apache-airflow-providers-common-sql>=1.20.0 - - apache-airflow-providers-http - # We should update minimum version of boto3 and here regularly to avoid `pip` backtracking with the number - # of candidates to consider. Make sure to configure boto3 version here as well as in all the tools below - # in the `devel-dependencies` section to be the same minimum version. - - boto3>=1.34.90 - - botocore>=1.34.90 - - inflection>=0.5.1 - # Allow a wider range of watchtower versions for flexibility among users - - watchtower>=3.0.0,!=3.3.0,<4 - - jsonpath_ng>=1.5.3 - - redshift_connector>=2.0.918 - - asgiref>=2.3.0 - - PyAthena>=3.0.10 - - jmespath>=0.7.0 - - python3-saml>=1.16.0 - -additional-extras: - - name: pandas - dependencies: - # In pandas 2.2 minimal version of the sqlalchemy is 2.0 - # https://pandas.pydata.org/docs/whatsnew/v2.2.0.html#increased-minimum-versions-for-dependencies - # However Airflow not fully supports it yet: https://github.com/apache/airflow/issues/28723 - # In addition FAB also limit sqlalchemy to < 2.0 - - pandas>=2.1.2,<2.2 - - - # There is conflict between boto3 and aiobotocore dependency botocore. - # TODO: We can remove it once boto3 and aiobotocore both have compatible botocore version or - # boto3 have native async support and we move away from aio aiobotocore - - name: aiobotocore - dependencies: - - aiobotocore[boto3]>=2.13.0 - - name: cncf.kubernetes - dependencies: - - apache-airflow-providers-cncf-kubernetes>=7.2.0 - - name: s3fs - dependencies: - - s3fs>=2023.10.0 - - name: python3-saml - dependencies: - - python3-saml>=1.16.0 - -devel-dependencies: - - aiobotocore>=2.13.0 - - aws_xray_sdk>=2.12.0 - - moto[cloudformation,glue]>=5.0.0 - - mypy-boto3-appflow>=1.35.39 - - mypy-boto3-rds>=1.34.90 - - mypy-boto3-redshift-data>=1.34.0 - - mypy-boto3-s3>=1.34.90 - - s3fs>=2023.10.0 - - openapi-schema-validator>=0.6.2 - - openapi-spec-validator>=0.7.1 - integrations: - integration-name: Amazon Athena external-doc-url: https://aws.amazon.com/athena/ - logo: /integration-logos/aws/Amazon-Athena_light-bg@4x.png + logo: /docs/integration-logos/Amazon-Athena_light-bg@4x.png how-to-guide: - /docs/apache-airflow-providers-amazon/operators/athena/athena_boto.rst - /docs/apache-airflow-providers-amazon/operators/athena/athena_sql.rst tags: [aws] - integration-name: Amazon Bedrock external-doc-url: https://aws.amazon.com/bedrock/ - logo: /integration-logos/aws/Amazon-Bedrock_light-bg@4x.png + logo: /docs/integration-logos/Amazon-Bedrock_light-bg@4x.png how-to-guide: - /docs/apache-airflow-providers-amazon/operators/bedrock.rst tags: [aws] - integration-name: Amazon Chime external-doc-url: https://aws.amazon.com/chime/ - logo: /integration-logos/aws/Amazon-Chime-light-bg.png + logo: /docs/integration-logos/Amazon-Chime-light-bg.png tags: [aws] - integration-name: Amazon CloudFormation external-doc-url: https://aws.amazon.com/cloudformation/ - logo: /integration-logos/aws/AWS-CloudFormation_light-bg@4x.png + logo: /docs/integration-logos/AWS-CloudFormation_light-bg@4x.png how-to-guide: - /docs/apache-airflow-providers-amazon/operators/cloudformation.rst tags: [aws] - integration-name: Amazon CloudWatch Logs external-doc-url: https://aws.amazon.com/cloudwatch/ - logo: /integration-logos/aws/Amazon-CloudWatch_light-bg@4x.png + logo: /docs/integration-logos/Amazon-CloudWatch_light-bg@4x.png tags: [aws] - integration-name: Amazon Comprehend external-doc-url: https://aws.amazon.com/comprehend/ - logo: /integration-logos/aws/Amazon-Comprehend_light-bg@4x.png + logo: /docs/integration-logos/Amazon-Comprehend_light-bg@4x.png how-to-guide: - /docs/apache-airflow-providers-amazon/operators/comprehend.rst tags: [aws] @@ -192,188 +134,188 @@ integrations: tags: [aws] - integration-name: Amazon DynamoDB external-doc-url: https://aws.amazon.com/dynamodb/ - logo: /integration-logos/aws/Amazon-DynamoDB_light-bg@4x.png + logo: /docs/integration-logos/Amazon-DynamoDB_light-bg@4x.png how-to-guide: - /docs/apache-airflow-providers-amazon/operators/dynamodb.rst tags: [aws] - integration-name: Amazon EC2 external-doc-url: https://aws.amazon.com/ec2/ - logo: /integration-logos/aws/Amazon-EC2_light-bg@4x.png + logo: /docs/integration-logos/Amazon-EC2_light-bg@4x.png how-to-guide: - /docs/apache-airflow-providers-amazon/operators/ec2.rst tags: [aws] - integration-name: Amazon Elastic Container Registry (ECR) external-doc-url: https://aws.amazon.com/ecr/ - logo: /integration-logos/aws/Amazon-Elastic-Container-Registry_light-bg@4x.png + logo: /docs/integration-logos/Amazon-Elastic-Container-Registry_light-bg@4x.png tags: [aws] - integration-name: Amazon ECS external-doc-url: https://aws.amazon.com/ecs/ - logo: /integration-logos/aws/Amazon-Elastic-Container-Service_light-bg@4x.png + logo: /docs/integration-logos/Amazon-Elastic-Container-Service_light-bg@4x.png tags: [aws] - integration-name: Amazon Elastic Kubernetes Service (EKS) external-doc-url: https://aws.amazon.com/eks/ - logo: /integration-logos/aws/Amazon-Elastic-Kubernetes-Service_light-bg@4x.png + logo: /docs/integration-logos/Amazon-Elastic-Kubernetes-Service_light-bg@4x.png how-to-guide: - /docs/apache-airflow-providers-amazon/operators/eks.rst tags: [aws] - integration-name: Amazon ElastiCache external-doc-url: https://aws.amazon.com/elasticache/redis// - logo: /integration-logos/aws/Amazon-ElastiCache_light-bg@4x.png + logo: /docs/integration-logos/Amazon-ElastiCache_light-bg@4x.png tags: [aws] - integration-name: Amazon EMR external-doc-url: https://aws.amazon.com/emr/ how-to-guide: - /docs/apache-airflow-providers-amazon/operators/emr/emr.rst - logo: /integration-logos/aws/Amazon-EMR_light-bg@4x.png + logo: /docs/integration-logos/Amazon-EMR_light-bg@4x.png tags: [aws] - integration-name: Amazon EMR on EKS external-doc-url: https://docs.aws.amazon.com/emr/latest/EMR-on-EKS-DevelopmentGuide/emr-eks.html how-to-guide: - /docs/apache-airflow-providers-amazon/operators/emr/emr_eks.rst - logo: /integration-logos/aws/Amazon-EMR_light-bg@4x.png + logo: /docs/integration-logos/Amazon-EMR_light-bg@4x.png tags: [aws] - integration-name: Amazon EMR Serverless external-doc-url: https://docs.aws.amazon.com/emr/latest/EMR-Serverless-UserGuide/emr-serverless.html how-to-guide: - /docs/apache-airflow-providers-amazon/operators/emr/emr_serverless.rst - logo: /integration-logos/aws/Amazon-EMR_light-bg@4x.png + logo: /docs/integration-logos/Amazon-EMR_light-bg@4x.png tags: [aws] - integration-name: Amazon EventBridge external-doc-url: https://docs.aws.amazon.com/eventbridge/latest/APIReference/Welcome.html how-to-guide: - /docs/apache-airflow-providers-amazon/operators/eventbridge.rst - logo: /integration-logos/aws/Amazon-EventBridge_64.png + logo: /docs/integration-logos/Amazon-EventBridge_64.png tags: [aws] - integration-name: Amazon Glacier external-doc-url: https://aws.amazon.com/glacier/ - logo: /integration-logos/aws/Amazon-S3-Glacier_light-bg@4x.png + logo: /docs/integration-logos/Amazon-S3-Glacier_light-bg@4x.png how-to-guide: - /docs/apache-airflow-providers-amazon/operators/s3/glacier.rst tags: [aws] - integration-name: Amazon Kinesis Data Firehose external-doc-url: https://aws.amazon.com/kinesis/data-firehose/ - logo: /integration-logos/aws/Amazon-Kinesis-Data-Firehose_light-bg@4x.png + logo: /docs/integration-logos/Amazon-Kinesis-Data-Firehose_light-bg@4x.png tags: [aws] - integration-name: Amazon Managed Service for Apache Flink how-to-guide: - /docs/apache-airflow-providers-amazon/operators/kinesis_analytics.rst external-doc-url: https://aws.amazon.com/managed-service-apache-flink/ - logo: /integration-logos/aws/Amazon-Kinesis-Analytics_light-bg@4x.png + logo: /docs/integration-logos/Amazon-Kinesis-Analytics_light-bg@4x.png tags: [aws] - integration-name: Amazon OpenSearch Serverless how-to-guide: - /docs/apache-airflow-providers-amazon/operators/opensearchserverless.rst external-doc-url: https://aws.amazon.com/opensearchserverless/ - logo: /integration-logos/aws/Amazon-OpenSearch_light-bg@4x.png + logo: /docs/integration-logos/Amazon-OpenSearch_light-bg@4x.png tags: [aws] - integration-name: Amazon RDS external-doc-url: https://aws.amazon.com/rds/ - logo: /integration-logos/aws/Amazon-RDS_light-bg@4x.png + logo: /docs/integration-logos/Amazon-RDS_light-bg@4x.png how-to-guide: - /docs/apache-airflow-providers-amazon/operators/rds.rst tags: [aws] - integration-name: Amazon Redshift external-doc-url: https://aws.amazon.com/redshift/ - logo: /integration-logos/aws/Amazon-Redshift_light-bg@4x.png + logo: /docs/integration-logos/Amazon-Redshift_light-bg@4x.png how-to-guide: - /docs/apache-airflow-providers-amazon/operators/redshift/redshift_sql.rst - /docs/apache-airflow-providers-amazon/operators/redshift/redshift_cluster.rst tags: [aws] - integration-name: Amazon Redshift Data external-doc-url: https://aws.amazon.com/redshift/ - logo: /integration-logos/aws/Amazon-Redshift_light-bg@4x.png + logo: /docs/integration-logos/Amazon-Redshift_light-bg@4x.png how-to-guide: - /docs/apache-airflow-providers-amazon/operators/redshift/redshift_data.rst tags: [aws] - integration-name: Amazon SageMaker external-doc-url: https://aws.amazon.com/sagemaker/ - logo: /integration-logos/aws/Amazon-SageMaker_light-bg@4x.png + logo: /docs/integration-logos/Amazon-SageMaker_light-bg@4x.png how-to-guide: - /docs/apache-airflow-providers-amazon/operators/sagemaker.rst tags: [aws] - integration-name: Amazon SecretsManager external-doc-url: https://aws.amazon.com/secrets-manager/ - logo: /integration-logos/aws/AWS-Secrets-Manager_light-bg@4x.png + logo: /docs/integration-logos/AWS-Secrets-Manager_light-bg@4x.png tags: [aws] - integration-name: Amazon Simple Email Service (SES) external-doc-url: https://aws.amazon.com/ses/ - logo: /integration-logos/aws/Amazon-Simple-Email-Service-SES_light-bg@4x.png + logo: /docs/integration-logos/Amazon-Simple-Email-Service-SES_light-bg@4x.png how-to-guide: - /docs/apache-airflow-providers-amazon/operators/ecs.rst tags: [aws] - integration-name: Amazon Simple Notification Service (SNS) external-doc-url: https://aws.amazon.com/sns/ - logo: /integration-logos/aws/Amazon-Simple-Notification-Service-SNS_light-bg@4x.png + logo: /docs/integration-logos/Amazon-Simple-Notification-Service-SNS_light-bg@4x.png how-to-guide: - /docs/apache-airflow-providers-amazon/operators/sns.rst tags: [aws] - integration-name: Amazon Simple Queue Service (SQS) external-doc-url: https://aws.amazon.com/sqs/ - logo: /integration-logos/aws/Amazon-Simple-Queue-Service-SQS_light-bg@4x.png + logo: /docs/integration-logos/Amazon-Simple-Queue-Service-SQS_light-bg@4x.png how-to-guide: - /docs/apache-airflow-providers-amazon/operators/sqs.rst tags: [aws] - integration-name: Amazon Simple Storage Service (S3) external-doc-url: https://aws.amazon.com/s3/ - logo: /integration-logos/aws/Amazon-Simple-Storage-Service-S3_light-bg@4x.png + logo: /docs/integration-logos/Amazon-Simple-Storage-Service-S3_light-bg@4x.png how-to-guide: - /docs/apache-airflow-providers-amazon/operators/s3/s3.rst tags: [aws] - integration-name: Amazon Systems Manager (SSM) external-doc-url: https://aws.amazon.com/systems-manager/ - logo: /integration-logos/aws/AWS-Systems-Manager_light-bg@4x.png + logo: /docs/integration-logos/AWS-Systems-Manager_light-bg@4x.png tags: [aws] - integration-name: Amazon Web Services external-doc-url: https://aws.amazon.com/ - logo: /integration-logos/aws/AWS-Cloud-alt_light-bg@4x.png + logo: /docs/integration-logos/AWS-Cloud-alt_light-bg@4x.png tags: [aws] - integration-name: AWS Batch external-doc-url: https://aws.amazon.com/batch/ - logo: /integration-logos/aws/AWS-Batch_light-bg@4x.png + logo: /docs/integration-logos/AWS-Batch_light-bg@4x.png how-to-guide: - /docs/apache-airflow-providers-amazon/operators/batch.rst tags: [aws] - integration-name: AWS DataSync external-doc-url: https://aws.amazon.com/datasync/ - logo: /integration-logos/aws/AWS-DataSync_light-bg@4x.png + logo: /docs/integration-logos/AWS-DataSync_light-bg@4x.png tags: [aws] - integration-name: AWS Glue external-doc-url: https://aws.amazon.com/glue/ - logo: /integration-logos/aws/AWS-Glue_light-bg@4x.png + logo: /docs/integration-logos/AWS-Glue_light-bg@4x.png how-to-guide: - /docs/apache-airflow-providers-amazon/operators/glue.rst tags: [aws] - integration-name: AWS Lambda external-doc-url: https://aws.amazon.com/lambda/ - logo: /integration-logos/aws/AWS-Lambda_light-bg@4x.png + logo: /docs/integration-logos/AWS-Lambda_light-bg@4x.png how-to-guide: - /docs/apache-airflow-providers-amazon/operators/lambda.rst tags: [aws] - integration-name: AWS Step Functions external-doc-url: https://aws.amazon.com/step-functions/ - logo: /integration-logos/aws/AWS-Step-Functions_light-bg@4x.png + logo: /docs/integration-logos/AWS-Step-Functions_light-bg@4x.png how-to-guide: - /docs/apache-airflow-providers-amazon/operators/step_functions.rst tags: [aws] - integration-name: AWS Database Migration Service external-doc-url: https://aws.amazon.com/dms/ - logo: /integration-logos/aws/AWS-Database-Migration-Service_64@5x.png + logo: /docs/integration-logos/AWS-Database-Migration-Service_64@5x.png how-to-guide: - /docs/apache-airflow-providers-amazon/operators/dms.rst tags: [aws] - integration-name: Amazon QuickSight external-doc-url: https://aws.amazon.com/quicksight/ - logo: /integration-logos/aws/Amazon-Quicksight_light-bg@4x.png + logo: /docs/integration-logos/Amazon-Quicksight_light-bg@4x.png how-to-guide: - /docs/apache-airflow-providers-amazon/operators/quicksight.rst tags: [aws] - integration-name: AWS Security Token Service (STS) external-doc-url: https://docs.aws.amazon.com/STS/latest/APIReference/welcome.html - logo: /integration-logos/aws/AWS-STS_light-bg@4x.png + logo: /docs/integration-logos/AWS-STS_light-bg@4x.png tags: [aws] - integration-name: Amazon Appflow external-doc-url: https://docs.aws.amazon.com/appflow/1.0/APIReference/Welcome.html - logo: /integration-logos/aws/Amazon_AppFlow_light.png + logo: /docs/integration-logos/Amazon_AppFlow_light.png how-to-guide: - /docs/apache-airflow-providers-amazon/operators/appflow.rst tags: [aws] @@ -381,15 +323,15 @@ integrations: external-doc-url: https://docs.aws.amazon.com/databrew/latest/dg/what-is.html how-to-guide: - /docs/apache-airflow-providers-amazon/operators/glue_databrew.rst - logo: /integration-logos/aws/AWS-Glue-DataBrew_64.png + logo: /docs/integration-logos/AWS-Glue-DataBrew_64.png tags: [aws] - integration-name: Amazon Verified Permissions external-doc-url: https://aws.amazon.com/verified-permissions/ - logo: /integration-logos/aws/Amazon-Verified-Permissions.png + logo: /docs/integration-logos/Amazon-Verified-Permissions.png tags: [aws] - integration-name: Amazon Neptune external-doc-url: https://aws.amazon.com/neptune/ - logo: /integration-logos/aws/Amazon-Neptune_64.png + logo: /docs/integration-logos/Amazon-Neptune_64.png how-to-guide: - /docs/apache-airflow-providers-amazon/operators/neptune.rst tags: [aws] diff --git a/providers/amazon/pyproject.toml b/providers/amazon/pyproject.toml new file mode 100644 index 00000000000000..ce4140f3975ed9 --- /dev/null +++ b/providers/amazon/pyproject.toml @@ -0,0 +1,166 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN! + +# IF YOU WANT TO MODIFY THIS FILE EXCEPT DEPENDENCIES, YOU SHOULD MODIFY THE TEMPLATE +# `pyproject_TEMPLATE.toml.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY +[build-system] +requires = ["flit_core==3.10.1"] +build-backend = "flit_core.buildapi" + +[project] +name = "apache-airflow-providers-amazon" +version = "9.2.0" +description = "Provider package apache-airflow-providers-amazon for Apache Airflow" +readme = "README.rst" +authors = [ + {name="Apache Software Foundation", email="dev@airflow.apache.org"}, +] +maintainers = [ + {name="Apache Software Foundation", email="dev@airflow.apache.org"}, +] +keywords = [ "airflow-provider", "amazon", "airflow", "integration" ] +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Environment :: Console", + "Environment :: Web Environment", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Framework :: Apache Airflow", + "Framework :: Apache Airflow :: Provider", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Topic :: System :: Monitoring", +] +requires-python = "~=3.9" + +# The dependencies should be modified in place in the generated file +# Any change in the dependencies is preserved when the file is regenerated +dependencies = [ + "apache-airflow>=2.9.0", + "apache-airflow-providers-common-compat>=1.3.0", + "apache-airflow-providers-common-sql>=1.20.0", + "apache-airflow-providers-http", + # We should update minimum version of boto3 and here regularly to avoid `pip` backtracking with the number + # of candidates to consider. Make sure to configure boto3 version here as well as in all the tools below + # in the `devel-dependencies` section to be the same minimum version. + "boto3>=1.34.90", + "botocore>=1.34.90", + "inflection>=0.5.1", + # Allow a wider range of watchtower versions for flexibility among users + "watchtower>=3.0.0,!=3.3.0,<4", + "jsonpath_ng>=1.5.3", + "redshift_connector>=2.0.918", + "asgiref>=2.3.0", + "PyAthena>=3.0.10", + "jmespath>=0.7.0", + "python3-saml>=1.16.0", +] + +# The optional dependencies should be modified in place in the generated file +# Any change in the dependencies is preserved when the file is regenerated +[project.optional-dependencies] +"pandas" = [ + # In pandas 2.2 minimal version of the sqlalchemy is 2.0 + # https://pandas.pydata.org/docs/whatsnew/v2.2.0.html#increased-minimum-versions-for-dependencies + # However Airflow not fully supports it yet: https://github.com/apache/airflow/issues/28723 + # In addition FAB also limit sqlalchemy to < 2.0 + "pandas>=2.1.2,<2.2", +] +# There is conflict between boto3 and aiobotocore dependency botocore. +# TODO: We can remove it once boto3 and aiobotocore both have compatible botocore version or +# boto3 have native async support and we move away from aio aiobotocore +"aiobotocore" = [ + "aiobotocore[boto3]>=2.13.0", +] +"cncf.kubernetes" = [ + "apache-airflow-providers-cncf-kubernetes>=7.2.0", +] +"s3fs" = [ + "s3fs>=2023.10.0", +] +"python3-saml" = [ + "python3-saml>=1.16.0", +] +"apache.hive" = [ + "apache-airflow-providers-apache-hive" +] +"exasol" = [ + "apache-airflow-providers-exasol" +] +"ftp" = [ + "apache-airflow-providers-ftp" +] +"google" = [ + "apache-airflow-providers-google" +] +"imap" = [ + "apache-airflow-providers-imap" +] +"microsoft.azure" = [ + "apache-airflow-providers-microsoft-azure" +] +"mongo" = [ + "apache-airflow-providers-mongo" +] +"openlineage" = [ + "apache-airflow-providers-openlineage" +] +"salesforce" = [ + "apache-airflow-providers-salesforce" +] +"ssh" = [ + "apache-airflow-providers-ssh" +] + +# The dependency groups should be modified in place in the generated file +# Any change in the dependencies is preserved when the file is regenerated +[dependency-groups] +dev = [ + "aiobotocore>=2.13.0", + "aws_xray_sdk>=2.12.0", + "moto[cloudformation,glue]>=5.0.0", + "mypy-boto3-appflow>=1.35.39", + "mypy-boto3-rds>=1.34.90", + "mypy-boto3-redshift-data>=1.34.0", + "mypy-boto3-s3>=1.34.90", + "s3fs>=2023.10.0", + "openapi-schema-validator>=0.6.2", + "openapi-spec-validator>=0.7.1", +] + +[project.urls] +"Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.2.0" +"Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-amazon/9.2.0/changelog.html" +"Bug Tracker" = "https://github.com/apache/airflow/issues" +"Source Code" = "https://github.com/apache/airflow" +"Slack Chat" = "https://s.apache.org/airflow-slack" +"Twitter" = "https://x.com/ApacheAirflow" +"YouTube" = "https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/" + +[project.entry-points."apache_airflow_provider"] +provider_info = "airflow.providers.amazon.get_provider_info:get_provider_info" + +[tool.flit.module] +name = "airflow.providers.amazon" + +[tool.pytest.ini_options] +ignore = "tests/system/" diff --git a/providers/amazon/src/airflow/providers/amazon/LICENSE b/providers/amazon/src/airflow/providers/amazon/LICENSE new file mode 100644 index 00000000000000..11069edd79019f --- /dev/null +++ b/providers/amazon/src/airflow/providers/amazon/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/providers/src/airflow/providers/amazon/__init__.py b/providers/amazon/src/airflow/providers/amazon/__init__.py similarity index 100% rename from providers/src/airflow/providers/amazon/__init__.py rename to providers/amazon/src/airflow/providers/amazon/__init__.py diff --git a/providers/src/airflow/providers/amazon/aws/__init__.py b/providers/amazon/src/airflow/providers/amazon/aws/__init__.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/__init__.py rename to providers/amazon/src/airflow/providers/amazon/aws/__init__.py diff --git a/providers/src/airflow/providers/amazon/aws/assets/__init__.py b/providers/amazon/src/airflow/providers/amazon/aws/assets/__init__.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/assets/__init__.py rename to providers/amazon/src/airflow/providers/amazon/aws/assets/__init__.py diff --git a/providers/src/airflow/providers/amazon/aws/assets/s3.py b/providers/amazon/src/airflow/providers/amazon/aws/assets/s3.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/assets/s3.py rename to providers/amazon/src/airflow/providers/amazon/aws/assets/s3.py diff --git a/providers/src/airflow/providers/amazon/aws/auth_manager/__init__.py b/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/__init__.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/auth_manager/__init__.py rename to providers/amazon/src/airflow/providers/amazon/aws/auth_manager/__init__.py diff --git a/providers/src/airflow/providers/amazon/aws/auth_manager/avp/__init__.py b/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/avp/__init__.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/auth_manager/avp/__init__.py rename to providers/amazon/src/airflow/providers/amazon/aws/auth_manager/avp/__init__.py diff --git a/providers/src/airflow/providers/amazon/aws/auth_manager/avp/entities.py b/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/avp/entities.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/auth_manager/avp/entities.py rename to providers/amazon/src/airflow/providers/amazon/aws/auth_manager/avp/entities.py diff --git a/providers/src/airflow/providers/amazon/aws/auth_manager/avp/facade.py b/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/avp/facade.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/auth_manager/avp/facade.py rename to providers/amazon/src/airflow/providers/amazon/aws/auth_manager/avp/facade.py diff --git a/providers/src/airflow/providers/amazon/aws/auth_manager/avp/schema.json b/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/avp/schema.json similarity index 100% rename from providers/src/airflow/providers/amazon/aws/auth_manager/avp/schema.json rename to providers/amazon/src/airflow/providers/amazon/aws/auth_manager/avp/schema.json diff --git a/providers/src/airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py b/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py rename to providers/amazon/src/airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py diff --git a/providers/src/airflow/providers/amazon/aws/auth_manager/cli/__init__.py b/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/cli/__init__.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/auth_manager/cli/__init__.py rename to providers/amazon/src/airflow/providers/amazon/aws/auth_manager/cli/__init__.py diff --git a/providers/src/airflow/providers/amazon/aws/auth_manager/cli/avp_commands.py b/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/cli/avp_commands.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/auth_manager/cli/avp_commands.py rename to providers/amazon/src/airflow/providers/amazon/aws/auth_manager/cli/avp_commands.py diff --git a/providers/src/airflow/providers/amazon/aws/auth_manager/cli/definition.py b/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/cli/definition.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/auth_manager/cli/definition.py rename to providers/amazon/src/airflow/providers/amazon/aws/auth_manager/cli/definition.py diff --git a/providers/src/airflow/providers/amazon/aws/auth_manager/constants.py b/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/constants.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/auth_manager/constants.py rename to providers/amazon/src/airflow/providers/amazon/aws/auth_manager/constants.py diff --git a/providers/src/airflow/providers/amazon/aws/auth_manager/router/__init__.py b/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/router/__init__.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/auth_manager/router/__init__.py rename to providers/amazon/src/airflow/providers/amazon/aws/auth_manager/router/__init__.py diff --git a/providers/src/airflow/providers/amazon/aws/auth_manager/router/login.py b/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/router/login.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/auth_manager/router/login.py rename to providers/amazon/src/airflow/providers/amazon/aws/auth_manager/router/login.py diff --git a/providers/src/airflow/providers/amazon/aws/auth_manager/security_manager/__init__.py b/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/security_manager/__init__.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/auth_manager/security_manager/__init__.py rename to providers/amazon/src/airflow/providers/amazon/aws/auth_manager/security_manager/__init__.py diff --git a/providers/src/airflow/providers/amazon/aws/auth_manager/security_manager/aws_security_manager_override.py b/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/security_manager/aws_security_manager_override.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/auth_manager/security_manager/aws_security_manager_override.py rename to providers/amazon/src/airflow/providers/amazon/aws/auth_manager/security_manager/aws_security_manager_override.py diff --git a/providers/src/airflow/providers/amazon/aws/auth_manager/user.py b/providers/amazon/src/airflow/providers/amazon/aws/auth_manager/user.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/auth_manager/user.py rename to providers/amazon/src/airflow/providers/amazon/aws/auth_manager/user.py diff --git a/providers/src/airflow/providers/amazon/aws/exceptions.py b/providers/amazon/src/airflow/providers/amazon/aws/exceptions.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/exceptions.py rename to providers/amazon/src/airflow/providers/amazon/aws/exceptions.py diff --git a/providers/src/airflow/providers/amazon/aws/executors/Dockerfile b/providers/amazon/src/airflow/providers/amazon/aws/executors/Dockerfile similarity index 100% rename from providers/src/airflow/providers/amazon/aws/executors/Dockerfile rename to providers/amazon/src/airflow/providers/amazon/aws/executors/Dockerfile diff --git a/providers/src/airflow/providers/amazon/aws/executors/__init__.py b/providers/amazon/src/airflow/providers/amazon/aws/executors/__init__.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/executors/__init__.py rename to providers/amazon/src/airflow/providers/amazon/aws/executors/__init__.py diff --git a/providers/src/airflow/providers/amazon/aws/executors/batch/__init__.py b/providers/amazon/src/airflow/providers/amazon/aws/executors/batch/__init__.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/executors/batch/__init__.py rename to providers/amazon/src/airflow/providers/amazon/aws/executors/batch/__init__.py diff --git a/providers/src/airflow/providers/amazon/aws/executors/batch/batch_executor.py b/providers/amazon/src/airflow/providers/amazon/aws/executors/batch/batch_executor.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/executors/batch/batch_executor.py rename to providers/amazon/src/airflow/providers/amazon/aws/executors/batch/batch_executor.py diff --git a/providers/src/airflow/providers/amazon/aws/executors/batch/batch_executor_config.py b/providers/amazon/src/airflow/providers/amazon/aws/executors/batch/batch_executor_config.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/executors/batch/batch_executor_config.py rename to providers/amazon/src/airflow/providers/amazon/aws/executors/batch/batch_executor_config.py diff --git a/providers/src/airflow/providers/amazon/aws/executors/batch/boto_schema.py b/providers/amazon/src/airflow/providers/amazon/aws/executors/batch/boto_schema.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/executors/batch/boto_schema.py rename to providers/amazon/src/airflow/providers/amazon/aws/executors/batch/boto_schema.py diff --git a/providers/src/airflow/providers/amazon/aws/executors/batch/utils.py b/providers/amazon/src/airflow/providers/amazon/aws/executors/batch/utils.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/executors/batch/utils.py rename to providers/amazon/src/airflow/providers/amazon/aws/executors/batch/utils.py diff --git a/providers/src/airflow/providers/amazon/aws/executors/ecs/__init__.py b/providers/amazon/src/airflow/providers/amazon/aws/executors/ecs/__init__.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/executors/ecs/__init__.py rename to providers/amazon/src/airflow/providers/amazon/aws/executors/ecs/__init__.py diff --git a/providers/src/airflow/providers/amazon/aws/executors/ecs/boto_schema.py b/providers/amazon/src/airflow/providers/amazon/aws/executors/ecs/boto_schema.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/executors/ecs/boto_schema.py rename to providers/amazon/src/airflow/providers/amazon/aws/executors/ecs/boto_schema.py diff --git a/providers/src/airflow/providers/amazon/aws/executors/ecs/ecs_executor.py b/providers/amazon/src/airflow/providers/amazon/aws/executors/ecs/ecs_executor.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/executors/ecs/ecs_executor.py rename to providers/amazon/src/airflow/providers/amazon/aws/executors/ecs/ecs_executor.py diff --git a/providers/src/airflow/providers/amazon/aws/executors/ecs/ecs_executor_config.py b/providers/amazon/src/airflow/providers/amazon/aws/executors/ecs/ecs_executor_config.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/executors/ecs/ecs_executor_config.py rename to providers/amazon/src/airflow/providers/amazon/aws/executors/ecs/ecs_executor_config.py diff --git a/providers/src/airflow/providers/amazon/aws/executors/ecs/utils.py b/providers/amazon/src/airflow/providers/amazon/aws/executors/ecs/utils.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/executors/ecs/utils.py rename to providers/amazon/src/airflow/providers/amazon/aws/executors/ecs/utils.py diff --git a/providers/src/airflow/providers/amazon/aws/executors/utils/__init__.py b/providers/amazon/src/airflow/providers/amazon/aws/executors/utils/__init__.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/executors/utils/__init__.py rename to providers/amazon/src/airflow/providers/amazon/aws/executors/utils/__init__.py diff --git a/providers/src/airflow/providers/amazon/aws/executors/utils/base_config_keys.py b/providers/amazon/src/airflow/providers/amazon/aws/executors/utils/base_config_keys.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/executors/utils/base_config_keys.py rename to providers/amazon/src/airflow/providers/amazon/aws/executors/utils/base_config_keys.py diff --git a/providers/src/airflow/providers/amazon/aws/executors/utils/exponential_backoff_retry.py b/providers/amazon/src/airflow/providers/amazon/aws/executors/utils/exponential_backoff_retry.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/executors/utils/exponential_backoff_retry.py rename to providers/amazon/src/airflow/providers/amazon/aws/executors/utils/exponential_backoff_retry.py diff --git a/providers/src/airflow/providers/amazon/aws/fs/__init__.py b/providers/amazon/src/airflow/providers/amazon/aws/fs/__init__.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/fs/__init__.py rename to providers/amazon/src/airflow/providers/amazon/aws/fs/__init__.py diff --git a/providers/src/airflow/providers/amazon/aws/fs/s3.py b/providers/amazon/src/airflow/providers/amazon/aws/fs/s3.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/fs/s3.py rename to providers/amazon/src/airflow/providers/amazon/aws/fs/s3.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/__init__.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/__init__.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/__init__.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/__init__.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/appflow.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/appflow.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/appflow.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/appflow.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/athena.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/athena.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/athena.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/athena.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/athena_sql.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/athena_sql.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/athena_sql.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/athena_sql.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/base_aws.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/base_aws.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/base_aws.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/base_aws.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/batch_client.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/batch_client.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/batch_client.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/batch_client.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/batch_waiters.json b/providers/amazon/src/airflow/providers/amazon/aws/hooks/batch_waiters.json similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/batch_waiters.json rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/batch_waiters.json diff --git a/providers/src/airflow/providers/amazon/aws/hooks/batch_waiters.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/batch_waiters.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/batch_waiters.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/batch_waiters.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/bedrock.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/bedrock.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/bedrock.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/bedrock.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/chime.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/chime.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/chime.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/chime.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/cloud_formation.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/cloud_formation.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/cloud_formation.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/cloud_formation.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/comprehend.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/comprehend.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/comprehend.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/comprehend.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/datasync.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/datasync.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/datasync.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/datasync.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/dms.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/dms.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/dms.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/dms.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/dynamodb.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/dynamodb.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/dynamodb.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/dynamodb.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/ec2.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/ec2.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/ec2.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/ec2.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/ecr.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/ecr.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/ecr.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/ecr.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/ecs.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/ecs.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/ecs.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/ecs.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/eks.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/eks.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/eks.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/eks.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/elasticache_replication_group.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/elasticache_replication_group.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/elasticache_replication_group.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/elasticache_replication_group.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/emr.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/emr.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/emr.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/emr.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/eventbridge.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/eventbridge.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/eventbridge.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/eventbridge.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/glacier.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/glacier.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/glacier.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/glacier.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/glue.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/glue.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/glue.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/glue.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/glue_catalog.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/glue_catalog.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/glue_catalog.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/glue_catalog.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/glue_crawler.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/glue_crawler.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/glue_crawler.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/glue_crawler.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/glue_databrew.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/glue_databrew.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/glue_databrew.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/glue_databrew.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/kinesis.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/kinesis.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/kinesis.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/kinesis.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/kinesis_analytics.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/kinesis_analytics.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/kinesis_analytics.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/kinesis_analytics.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/lambda_function.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/lambda_function.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/lambda_function.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/lambda_function.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/logs.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/logs.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/logs.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/logs.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/neptune.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/neptune.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/neptune.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/neptune.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/opensearch_serverless.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/opensearch_serverless.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/opensearch_serverless.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/opensearch_serverless.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/quicksight.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/quicksight.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/quicksight.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/quicksight.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/rds.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/rds.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/rds.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/rds.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/redshift_cluster.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/redshift_cluster.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/redshift_cluster.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/redshift_cluster.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/redshift_data.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/redshift_data.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/redshift_data.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/redshift_data.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/redshift_sql.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/redshift_sql.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/redshift_sql.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/redshift_sql.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/s3.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/s3.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/s3.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/s3.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/sagemaker.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/sagemaker.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/sagemaker.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/sagemaker.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/secrets_manager.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/secrets_manager.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/secrets_manager.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/secrets_manager.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/ses.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/ses.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/ses.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/ses.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/sns.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/sns.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/sns.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/sns.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/sqs.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/sqs.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/sqs.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/sqs.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/ssm.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/ssm.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/ssm.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/ssm.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/step_function.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/step_function.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/step_function.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/step_function.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/sts.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/sts.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/sts.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/sts.py diff --git a/providers/src/airflow/providers/amazon/aws/hooks/verified_permissions.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/verified_permissions.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/hooks/verified_permissions.py rename to providers/amazon/src/airflow/providers/amazon/aws/hooks/verified_permissions.py diff --git a/providers/src/airflow/providers/amazon/aws/links/__init__.py b/providers/amazon/src/airflow/providers/amazon/aws/links/__init__.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/links/__init__.py rename to providers/amazon/src/airflow/providers/amazon/aws/links/__init__.py diff --git a/providers/src/airflow/providers/amazon/aws/links/athena.py b/providers/amazon/src/airflow/providers/amazon/aws/links/athena.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/links/athena.py rename to providers/amazon/src/airflow/providers/amazon/aws/links/athena.py diff --git a/providers/src/airflow/providers/amazon/aws/links/base_aws.py b/providers/amazon/src/airflow/providers/amazon/aws/links/base_aws.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/links/base_aws.py rename to providers/amazon/src/airflow/providers/amazon/aws/links/base_aws.py diff --git a/providers/src/airflow/providers/amazon/aws/links/batch.py b/providers/amazon/src/airflow/providers/amazon/aws/links/batch.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/links/batch.py rename to providers/amazon/src/airflow/providers/amazon/aws/links/batch.py diff --git a/providers/src/airflow/providers/amazon/aws/links/comprehend.py b/providers/amazon/src/airflow/providers/amazon/aws/links/comprehend.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/links/comprehend.py rename to providers/amazon/src/airflow/providers/amazon/aws/links/comprehend.py diff --git a/providers/src/airflow/providers/amazon/aws/links/datasync.py b/providers/amazon/src/airflow/providers/amazon/aws/links/datasync.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/links/datasync.py rename to providers/amazon/src/airflow/providers/amazon/aws/links/datasync.py diff --git a/providers/amazon/src/airflow/providers/amazon/aws/links/ec2.py b/providers/amazon/src/airflow/providers/amazon/aws/links/ec2.py new file mode 100644 index 00000000000000..38a23956cddbbc --- /dev/null +++ b/providers/amazon/src/airflow/providers/amazon/aws/links/ec2.py @@ -0,0 +1,46 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from airflow.providers.amazon.aws.links.base_aws import BASE_AWS_CONSOLE_LINK, BaseAwsLink + + +class EC2InstanceLink(BaseAwsLink): + """Helper class for constructing Amazon EC2 instance links.""" + + name = "Instance" + key = "_instance_id" + format_str = ( + BASE_AWS_CONSOLE_LINK + "/ec2/home?region={region_name}#InstanceDetails:instanceId={instance_id}" + ) + + +class EC2InstanceDashboardLink(BaseAwsLink): + """ + Helper class for constructing Amazon EC2 console links. + + This is useful for displaying the list of EC2 instances, rather + than a single instance. + """ + + name = "EC2 Instances" + key = "_instance_dashboard" + format_str = BASE_AWS_CONSOLE_LINK + "/ec2/home?region={region_name}#Instances:instanceId=:{instance_ids}" + + @staticmethod + def format_instance_id_filter(instance_ids: list[str]) -> str: + return ",:".join(instance_ids) diff --git a/providers/src/airflow/providers/amazon/aws/links/emr.py b/providers/amazon/src/airflow/providers/amazon/aws/links/emr.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/links/emr.py rename to providers/amazon/src/airflow/providers/amazon/aws/links/emr.py diff --git a/providers/src/airflow/providers/amazon/aws/links/glue.py b/providers/amazon/src/airflow/providers/amazon/aws/links/glue.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/links/glue.py rename to providers/amazon/src/airflow/providers/amazon/aws/links/glue.py diff --git a/providers/src/airflow/providers/amazon/aws/links/logs.py b/providers/amazon/src/airflow/providers/amazon/aws/links/logs.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/links/logs.py rename to providers/amazon/src/airflow/providers/amazon/aws/links/logs.py diff --git a/providers/src/airflow/providers/amazon/aws/links/sagemaker.py b/providers/amazon/src/airflow/providers/amazon/aws/links/sagemaker.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/links/sagemaker.py rename to providers/amazon/src/airflow/providers/amazon/aws/links/sagemaker.py diff --git a/providers/src/airflow/providers/amazon/aws/links/step_function.py b/providers/amazon/src/airflow/providers/amazon/aws/links/step_function.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/links/step_function.py rename to providers/amazon/src/airflow/providers/amazon/aws/links/step_function.py diff --git a/providers/src/airflow/providers/amazon/aws/log/__init__.py b/providers/amazon/src/airflow/providers/amazon/aws/log/__init__.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/log/__init__.py rename to providers/amazon/src/airflow/providers/amazon/aws/log/__init__.py diff --git a/providers/src/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py b/providers/amazon/src/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py rename to providers/amazon/src/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py diff --git a/providers/src/airflow/providers/amazon/aws/log/s3_task_handler.py b/providers/amazon/src/airflow/providers/amazon/aws/log/s3_task_handler.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/log/s3_task_handler.py rename to providers/amazon/src/airflow/providers/amazon/aws/log/s3_task_handler.py diff --git a/providers/src/airflow/providers/amazon/aws/notifications/__init__.py b/providers/amazon/src/airflow/providers/amazon/aws/notifications/__init__.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/notifications/__init__.py rename to providers/amazon/src/airflow/providers/amazon/aws/notifications/__init__.py diff --git a/providers/src/airflow/providers/amazon/aws/notifications/chime.py b/providers/amazon/src/airflow/providers/amazon/aws/notifications/chime.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/notifications/chime.py rename to providers/amazon/src/airflow/providers/amazon/aws/notifications/chime.py diff --git a/providers/src/airflow/providers/amazon/aws/notifications/sns.py b/providers/amazon/src/airflow/providers/amazon/aws/notifications/sns.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/notifications/sns.py rename to providers/amazon/src/airflow/providers/amazon/aws/notifications/sns.py diff --git a/providers/src/airflow/providers/amazon/aws/notifications/sqs.py b/providers/amazon/src/airflow/providers/amazon/aws/notifications/sqs.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/notifications/sqs.py rename to providers/amazon/src/airflow/providers/amazon/aws/notifications/sqs.py diff --git a/providers/src/airflow/providers/amazon/aws/operators/__init__.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/__init__.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/operators/__init__.py rename to providers/amazon/src/airflow/providers/amazon/aws/operators/__init__.py diff --git a/providers/src/airflow/providers/amazon/aws/operators/appflow.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/appflow.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/operators/appflow.py rename to providers/amazon/src/airflow/providers/amazon/aws/operators/appflow.py diff --git a/providers/src/airflow/providers/amazon/aws/operators/athena.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/athena.py similarity index 97% rename from providers/src/airflow/providers/amazon/aws/operators/athena.py rename to providers/amazon/src/airflow/providers/amazon/aws/operators/athena.py index 62c1e318d02385..3d152e513d6468 100644 --- a/providers/src/airflow/providers/amazon/aws/operators/athena.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/operators/athena.py @@ -177,14 +177,16 @@ def execute(self, context: Context) -> str | None: return self.query_execution_id def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error while waiting for operation on cluster to complete: {event}") + if validated_event["status"] != "success": + raise AirflowException( + f"Error while waiting for operation on cluster to complete: {validated_event}" + ) # Save query_execution_id to be later used by listeners - self.query_execution_id = event["value"] - return event["value"] + self.query_execution_id = validated_event["value"] + return validated_event["value"] def on_kill(self) -> None: """Cancel the submitted Amazon Athena query.""" diff --git a/providers/src/airflow/providers/amazon/aws/operators/base_aws.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/base_aws.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/operators/base_aws.py rename to providers/amazon/src/airflow/providers/amazon/aws/operators/base_aws.py diff --git a/providers/src/airflow/providers/amazon/aws/operators/batch.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/batch.py similarity index 97% rename from providers/src/airflow/providers/amazon/aws/operators/batch.py rename to providers/amazon/src/airflow/providers/amazon/aws/operators/batch.py index e69508d89319f3..a41d1ec98f4a27 100644 --- a/providers/src/airflow/providers/amazon/aws/operators/batch.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/operators/batch.py @@ -267,13 +267,13 @@ def execute(self, context: Context) -> str | None: return self.job_id def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error while running job: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Error while running job: {validated_event}") self.log.info("Job completed.") - return event["job_id"] + return validated_event["job_id"] def on_kill(self): response = self.hook.client.terminate_job(jobId=self.job_id, reason="Task killed by the user") @@ -540,8 +540,10 @@ def execute(self, context: Context): return arn def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error while waiting for the compute environment to be ready: {event}") - return event["value"] + if validated_event["status"] != "success": + raise AirflowException( + f"Error while waiting for the compute environment to be ready: {validated_event}" + ) + return validated_event["value"] diff --git a/providers/src/airflow/providers/amazon/aws/operators/bedrock.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/bedrock.py similarity index 97% rename from providers/src/airflow/providers/amazon/aws/operators/bedrock.py rename to providers/amazon/src/airflow/providers/amazon/aws/operators/bedrock.py index ef4d78129d473e..ac8e86b92c807d 100644 --- a/providers/src/airflow/providers/amazon/aws/operators/bedrock.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/operators/bedrock.py @@ -198,13 +198,13 @@ def __init__( self.valid_action_if_job_exists: set[str] = {"timestamp", "fail"} def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error while running job: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Error while running job: {validated_event}") self.log.info("Bedrock model customization job `%s` complete.", self.job_name) - return self.hook.conn.get_model_customization_job(jobIdentifier=event["job_name"])["jobArn"] + return self.hook.conn.get_model_customization_job(jobIdentifier=validated_event["job_name"])["jobArn"] def execute(self, context: Context) -> dict: response = {} @@ -353,13 +353,15 @@ def execute(self, context: Context) -> str: return provisioned_model_id def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error while running job: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Error while running job: {validated_event}") - self.log.info("Bedrock provisioned throughput job `%s` complete.", event["provisioned_model_id"]) - return event["provisioned_model_id"] + self.log.info( + "Bedrock provisioned throughput job `%s` complete.", validated_event["provisioned_model_id"] + ) + return validated_event["provisioned_model_id"] class BedrockCreateKnowledgeBaseOperator(AwsBaseOperator[BedrockAgentHook]): @@ -449,13 +451,13 @@ def __init__( self.deferrable = deferrable def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error while running job: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Error while running job: {validated_event}") self.log.info("Bedrock knowledge base creation job `%s` complete.", self.name) - return event["knowledge_base_id"] + return validated_event["knowledge_base_id"] def execute(self, context: Context) -> str: def _create_kb(): @@ -634,14 +636,14 @@ def __init__( self.deferrable = deferrable def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error while running ingestion job: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Error while running ingestion job: {validated_event}") - self.log.info("Bedrock ingestion job `%s` complete.", event["ingestion_job_id"]) + self.log.info("Bedrock ingestion job `%s` complete.", validated_event["ingestion_job_id"]) - return event["ingestion_job_id"] + return validated_event["ingestion_job_id"] def execute(self, context: Context) -> str: ingestion_job_id = self.hook.conn.start_ingestion_job( diff --git a/providers/src/airflow/providers/amazon/aws/operators/cloud_formation.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/cloud_formation.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/operators/cloud_formation.py rename to providers/amazon/src/airflow/providers/amazon/aws/operators/cloud_formation.py diff --git a/providers/src/airflow/providers/amazon/aws/operators/comprehend.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/comprehend.py similarity index 95% rename from providers/src/airflow/providers/amazon/aws/operators/comprehend.py rename to providers/amazon/src/airflow/providers/amazon/aws/operators/comprehend.py index acee6878e46fa2..e8bc64973c79d0 100644 --- a/providers/src/airflow/providers/amazon/aws/operators/comprehend.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/operators/comprehend.py @@ -207,12 +207,12 @@ def execute(self, context: Context) -> str: return job_id def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str: - event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException("Error while running job: %s", event) + validated_event = validate_execute_complete_event(event) + if validated_event["status"] != "success": + raise AirflowException("Error while running job: %s", validated_event) - self.log.info("Comprehend pii entities detection job `%s` complete.", event["job_id"]) - return event["job_id"] + self.log.info("Comprehend pii entities detection job `%s` complete.", validated_event["job_id"]) + return validated_event["job_id"] class ComprehendCreateDocumentClassifierOperator(AwsBaseOperator[ComprehendHook]): @@ -363,14 +363,19 @@ def execute(self, context: Context) -> str: return document_classifier_arn def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str: - event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException("Error while running comprehend create document classifier: %s", event) + validated_event = validate_execute_complete_event(event) + if validated_event["status"] != "success": + raise AirflowException( + "Error while running comprehend create document classifier: %s", validated_event + ) self.hook.validate_document_classifier_training_status( - document_classifier_arn=event["document_classifier_arn"], fail_on_warnings=self.fail_on_warnings + document_classifier_arn=validated_event["document_classifier_arn"], + fail_on_warnings=self.fail_on_warnings, ) - self.log.info("Comprehend document classifier `%s` complete.", event["document_classifier_arn"]) + self.log.info( + "Comprehend document classifier `%s` complete.", validated_event["document_classifier_arn"] + ) - return event["document_classifier_arn"] + return validated_event["document_classifier_arn"] diff --git a/providers/src/airflow/providers/amazon/aws/operators/datasync.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/datasync.py similarity index 99% rename from providers/src/airflow/providers/amazon/aws/operators/datasync.py rename to providers/amazon/src/airflow/providers/amazon/aws/operators/datasync.py index 7b2b7282efca73..83a6e15de13301 100644 --- a/providers/src/airflow/providers/amazon/aws/operators/datasync.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/operators/datasync.py @@ -362,7 +362,7 @@ def _execute_datasync_task(self, context: Context) -> None: aws_domain=DataSyncTaskExecutionLink.get_aws_domain(self.hook.conn_partition), region_name=self.hook.conn_region_name, task_id=self.task_arn.split("/")[-1], - task_execution_id=self.task_execution_arn.split("/")[-1], + task_execution_id=self.task_execution_arn.split("/")[-1], # type: ignore[union-attr] ) DataSyncTaskExecutionLink.persist( context=context, @@ -370,7 +370,7 @@ def _execute_datasync_task(self, context: Context) -> None: region_name=self.hook.conn_region_name, aws_partition=self.hook.conn_partition, task_id=self.task_arn.split("/")[-1], - task_execution_id=self.task_execution_arn.split("/")[-1], + task_execution_id=self.task_execution_arn.split("/")[-1], # type: ignore[union-attr] ) self.log.info("You can view this DataSync task execution at %s", execution_url) diff --git a/providers/src/airflow/providers/amazon/aws/operators/dms.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/dms.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/operators/dms.py rename to providers/amazon/src/airflow/providers/amazon/aws/operators/dms.py diff --git a/providers/src/airflow/providers/amazon/aws/operators/ec2.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/ec2.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/operators/ec2.py rename to providers/amazon/src/airflow/providers/amazon/aws/operators/ec2.py diff --git a/providers/src/airflow/providers/amazon/aws/operators/ecs.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/ecs.py similarity index 98% rename from providers/src/airflow/providers/amazon/aws/operators/ecs.py rename to providers/amazon/src/airflow/providers/amazon/aws/operators/ecs.py index bf59d1b1b7cdf8..bb2c7532362495 100644 --- a/providers/src/airflow/providers/amazon/aws/operators/ecs.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/operators/ecs.py @@ -571,12 +571,12 @@ def execute(self, context): return None def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str | None: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error in task execution: {event}") - self.arn = event["task_arn"] # restore arn to its updated value, needed for next steps - self.cluster = event["cluster"] + if validated_event["status"] != "success": + raise AirflowException(f"Error in task execution: {validated_event}") + self.arn = validated_event["task_arn"] # restore arn to its updated value, needed for next steps + self.cluster = validated_event["cluster"] self._after_execution() if self._aws_logs_enabled(): # same behavior as non-deferrable mode, return last line of logs of the task. diff --git a/providers/src/airflow/providers/amazon/aws/operators/eks.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/eks.py similarity index 98% rename from providers/src/airflow/providers/amazon/aws/operators/eks.py rename to providers/amazon/src/airflow/providers/amazon/aws/operators/eks.py index 6ce0dd154305c1..f1acec9915ffbe 100644 --- a/providers/src/airflow/providers/amazon/aws/operators/eks.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/operators/eks.py @@ -415,11 +415,11 @@ def execute_failed(self, context: Context, event: dict[str, Any] | None = None) raise AirflowException("Error creating cluster") def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) resource = "fargate profile" if self.compute == "fargate" else self.compute - if event["status"] != "success": - raise AirflowException(f"Error creating {resource}: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Error creating {resource}: {validated_event}") self.log.info("%s created successfully", resource) @@ -543,10 +543,10 @@ def execute(self, context: Context): ) def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error creating nodegroup: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Error creating nodegroup: {validated_event}") class EksCreateFargateProfileOperator(BaseOperator): @@ -655,10 +655,10 @@ def execute(self, context: Context): ) def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error creating Fargate profile: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Error creating Fargate profile: {validated_event}") self.log.info("Fargate profile created successfully") @@ -789,9 +789,9 @@ def delete_any_fargate_profiles(self, eks_hook) -> None: self.log.info(SUCCESS_MSG.format(compute=FARGATE_FULL_NAME)) def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] == "success": + if validated_event["status"] == "success": self.log.info("Cluster deleted successfully.") @@ -880,10 +880,10 @@ def execute(self, context: Context): ) def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error deleting nodegroup: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Error deleting nodegroup: {validated_event}") class EksDeleteFargateProfileOperator(BaseOperator): @@ -974,10 +974,10 @@ def execute(self, context: Context): ) def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error deleting Fargate profile: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Error deleting Fargate profile: {validated_event}") self.log.info("Fargate profile deleted successfully") diff --git a/providers/src/airflow/providers/amazon/aws/operators/emr.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/emr.py similarity index 97% rename from providers/src/airflow/providers/amazon/aws/operators/emr.py rename to providers/amazon/src/airflow/providers/amazon/aws/operators/emr.py index d7680baf61a7f6..2d3fcfedef400e 100644 --- a/providers/src/airflow/providers/amazon/aws/operators/emr.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/operators/emr.py @@ -199,13 +199,13 @@ def execute(self, context: Context) -> list[str]: return step_ids def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error while running steps: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Error while running steps: {validated_event}") self.log.info("Steps completed successfully") - return event["value"] + return validated_event["value"] class EmrStartNotebookExecutionOperator(BaseOperator): @@ -591,12 +591,12 @@ def check_failure(self, query_status): ) def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error while running job: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Error while running job: {validated_event}") - return event["job_id"] + return validated_event["job_id"] def on_kill(self) -> None: """Cancel the submitted job run.""" @@ -769,13 +769,13 @@ def execute(self, context: Context) -> str | None: return self._job_flow_id def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error creating jobFlow: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Error creating jobFlow: {validated_event}") self.log.info("JobFlow created successfully") - return event["job_flow_id"] + return validated_event["job_flow_id"] def on_kill(self) -> None: """Terminate the EMR cluster (job flow) unless TerminationProtected is enabled on the cluster.""" @@ -944,10 +944,10 @@ def execute(self, context: Context) -> None: ) def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error terminating JobFlow: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Error terminating JobFlow: {validated_event}") self.log.info("Jobflow terminated successfully.") @@ -1086,13 +1086,13 @@ def start_application_deferred(self, context: Context, event: dict[str, Any] | N ) def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Trigger error: Application failed to start, event is {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Trigger error: Application failed to start, event is {validated_event}") - self.log.info("Application %s started", event["application_id"]) - return event["application_id"] + self.log.info("Application %s started", validated_event["application_id"]) + return validated_event["application_id"] class EmrServerlessStartJobOperator(BaseOperator): @@ -1273,11 +1273,11 @@ def execute(self, context: Context, event: dict[str, Any] | None = None) -> str return self.job_id def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] == "success": + if validated_event["status"] == "success": self.log.info("Serverless job completed") - return event["job_id"] + return validated_event["job_id"] def on_kill(self) -> None: """ @@ -1547,9 +1547,9 @@ def stop_application(self, context: Context, event: dict[str, Any] | None = None ) def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] == "success": + if validated_event["status"] == "success": self.log.info("EMR serverless application %s stopped successfully", self.application_id) @@ -1651,7 +1651,7 @@ def execute(self, context: Context) -> None: self.log.info("EMR serverless application deleted") def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] == "success": + if validated_event["status"] == "success": self.log.info("EMR serverless application %s deleted successfully", self.application_id) diff --git a/providers/src/airflow/providers/amazon/aws/operators/eventbridge.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/eventbridge.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/operators/eventbridge.py rename to providers/amazon/src/airflow/providers/amazon/aws/operators/eventbridge.py diff --git a/providers/src/airflow/providers/amazon/aws/operators/glacier.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/glacier.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/operators/glacier.py rename to providers/amazon/src/airflow/providers/amazon/aws/operators/glacier.py diff --git a/providers/src/airflow/providers/amazon/aws/operators/glue.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/glue.py similarity index 97% rename from providers/src/airflow/providers/amazon/aws/operators/glue.py rename to providers/amazon/src/airflow/providers/amazon/aws/operators/glue.py index 8770b0fc675d2e..d02b43d7ea0531 100644 --- a/providers/src/airflow/providers/amazon/aws/operators/glue.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/operators/glue.py @@ -244,11 +244,11 @@ def execute(self, context: Context): return self._job_run_id def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error in glue job: {event}") - return event["value"] + if validated_event["status"] != "success": + raise AirflowException(f"Error in glue job: {validated_event}") + return validated_event["value"] def on_kill(self): """Cancel the running AWS Glue Job.""" @@ -502,18 +502,18 @@ def execute(self, context: Context) -> str: return evaluation_run_id def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error: AWS Glue data quality ruleset evaluation run: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Error: AWS Glue data quality ruleset evaluation run: {validated_event}") self.hook.validate_evaluation_run_results( - evaluation_run_id=event["evaluation_run_id"], + evaluation_run_id=validated_event["evaluation_run_id"], show_results=self.show_results, verify_result_status=self.verify_result_status, ) - return event["evaluation_run_id"] + return validated_event["evaluation_run_id"] class GlueDataQualityRuleRecommendationRunOperator(AwsBaseOperator[GlueDataQualityHook]): @@ -650,12 +650,12 @@ def execute(self, context: Context) -> str: return recommendation_run_id def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error: AWS Glue data quality rule recommendation run: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Error: AWS Glue data quality rule recommendation run: {validated_event}") if self.show_results: - self.hook.log_recommendation_results(run_id=event["recommendation_run_id"]) + self.hook.log_recommendation_results(run_id=validated_event["recommendation_run_id"]) - return event["recommendation_run_id"] + return validated_event["recommendation_run_id"] diff --git a/providers/src/airflow/providers/amazon/aws/operators/glue_crawler.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/glue_crawler.py similarity index 96% rename from providers/src/airflow/providers/amazon/aws/operators/glue_crawler.py rename to providers/amazon/src/airflow/providers/amazon/aws/operators/glue_crawler.py index 4414026a815a03..d662d86d69ac1b 100644 --- a/providers/src/airflow/providers/amazon/aws/operators/glue_crawler.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/operators/glue_crawler.py @@ -117,8 +117,8 @@ def execute(self, context: Context) -> str: return crawler_name def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error in glue crawl: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Error in glue crawl: {validated_event}") return self.config["Name"] diff --git a/providers/src/airflow/providers/amazon/aws/operators/glue_databrew.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/glue_databrew.py similarity index 95% rename from providers/src/airflow/providers/amazon/aws/operators/glue_databrew.py rename to providers/amazon/src/airflow/providers/amazon/aws/operators/glue_databrew.py index 2bdea28f3a3dc1..c02f69e781e062 100644 --- a/providers/src/airflow/providers/amazon/aws/operators/glue_databrew.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/operators/glue_databrew.py @@ -129,13 +129,13 @@ def execute(self, context: Context): return {"run_id": run_id} def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> dict[str, str]: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException("Error while running AWS Glue DataBrew job: %s", event) + if validated_event["status"] != "success": + raise AirflowException("Error while running AWS Glue DataBrew job: %s", validated_event) - run_id = event.get("run_id", "") - status = event.get("status", "") + run_id = validated_event.get("run_id", "") + status = validated_event.get("status", "") self.log.info("AWS Glue DataBrew runID: %s completed with status: %s", run_id, status) diff --git a/providers/src/airflow/providers/amazon/aws/operators/kinesis_analytics.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/kinesis_analytics.py similarity index 96% rename from providers/src/airflow/providers/amazon/aws/operators/kinesis_analytics.py rename to providers/amazon/src/airflow/providers/amazon/aws/operators/kinesis_analytics.py index 16003e0b9f322a..17848ae35e60de 100644 --- a/providers/src/airflow/providers/amazon/aws/operators/kinesis_analytics.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/operators/kinesis_analytics.py @@ -215,20 +215,20 @@ def execute(self, context: Context) -> dict[str, Any]: return {"ApplicationARN": describe_response["ApplicationDetail"]["ApplicationARN"]} def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> dict[str, Any]: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": + if validated_event["status"] != "success": raise AirflowException( - "Error while starting AWS Managed Service for Apache Flink application: %s", event + "Error while starting AWS Managed Service for Apache Flink application: %s", validated_event ) response = self.hook.conn.describe_application( - ApplicationName=event["application_name"], + ApplicationName=validated_event["application_name"], ) self.log.info( "AWS Managed Service for Apache Flink application %s started successfully.", - event["application_name"], + validated_event["application_name"], ) return {"ApplicationARN": response["ApplicationDetail"]["ApplicationARN"]} @@ -332,18 +332,18 @@ def execute(self, context: Context) -> dict[str, Any]: return {"ApplicationARN": describe_response["ApplicationDetail"]["ApplicationARN"]} def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> dict[str, Any]: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": + if validated_event["status"] != "success": raise AirflowException("Error while stopping AWS Managed Service for Apache Flink application") response = self.hook.conn.describe_application( - ApplicationName=event["application_name"], + ApplicationName=validated_event["application_name"], ) self.log.info( "AWS Managed Service for Apache Flink application %s stopped successfully.", - event["application_name"], + validated_event["application_name"], ) return {"ApplicationARN": response["ApplicationDetail"]["ApplicationARN"]} diff --git a/providers/src/airflow/providers/amazon/aws/operators/lambda_function.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/lambda_function.py similarity index 97% rename from providers/src/airflow/providers/amazon/aws/operators/lambda_function.py rename to providers/amazon/src/airflow/providers/amazon/aws/operators/lambda_function.py index 54bab6e6aa818f..7b4860a8ab40bb 100644 --- a/providers/src/airflow/providers/amazon/aws/operators/lambda_function.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/operators/lambda_function.py @@ -145,13 +145,13 @@ def execute(self, context: Context): return response.get("FunctionArn") def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if not event or event["status"] != "success": - raise AirflowException(f"Trigger error: event is {event}") + if not validated_event or validated_event["status"] != "success": + raise AirflowException(f"Trigger error: event is {validated_event}") self.log.info("Lambda function created successfully") - return event["function_arn"] + return validated_event["function_arn"] class LambdaInvokeFunctionOperator(AwsBaseOperator[LambdaHook]): diff --git a/providers/src/airflow/providers/amazon/aws/operators/neptune.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/neptune.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/operators/neptune.py rename to providers/amazon/src/airflow/providers/amazon/aws/operators/neptune.py diff --git a/providers/src/airflow/providers/amazon/aws/operators/quicksight.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/quicksight.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/operators/quicksight.py rename to providers/amazon/src/airflow/providers/amazon/aws/operators/quicksight.py diff --git a/providers/src/airflow/providers/amazon/aws/operators/rds.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/rds.py similarity index 97% rename from providers/src/airflow/providers/amazon/aws/operators/rds.py rename to providers/amazon/src/airflow/providers/amazon/aws/operators/rds.py index 0ef2462b04f33b..18f227d85ae7fa 100644 --- a/providers/src/airflow/providers/amazon/aws/operators/rds.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/operators/rds.py @@ -627,12 +627,12 @@ def execute(self, context: Context) -> str: return json.dumps(create_db_instance, default=str) def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"DB instance creation failed: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"DB instance creation failed: {validated_event}") - return json.dumps(event["response"], default=str) + return json.dumps(validated_event["response"], default=str) class RdsDeleteDbInstanceOperator(RdsBaseOperator): @@ -712,12 +712,12 @@ def execute(self, context: Context) -> str: return json.dumps(delete_db_instance, default=str) def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"DB instance deletion failed: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"DB instance deletion failed: {validated_event}") - return json.dumps(event["response"], default=str) + return json.dumps(validated_event["response"], default=str) class RdsStartDbOperator(RdsBaseOperator): @@ -779,12 +779,12 @@ def execute(self, context: Context) -> str: return json.dumps(start_db_response, default=str) def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Failed to start DB: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Failed to start DB: {validated_event}") - return json.dumps(event["response"], default=str) + return json.dumps(validated_event["response"], default=str) def _start_db(self): self.log.info("Starting DB %s '%s'", self.db_type.value, self.db_identifier) @@ -891,12 +891,12 @@ def execute(self, context: Context) -> str: return json.dumps(stop_db_response, default=str) def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Failed to start DB: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Failed to start DB: {validated_event}") - return json.dumps(event["response"], default=str) + return json.dumps(validated_event["response"], default=str) def _stop_db(self): self.log.info("Stopping DB %s '%s'", self.db_type.value, self.db_identifier) diff --git a/providers/src/airflow/providers/amazon/aws/operators/redshift_cluster.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/redshift_cluster.py similarity index 97% rename from providers/src/airflow/providers/amazon/aws/operators/redshift_cluster.py rename to providers/amazon/src/airflow/providers/amazon/aws/operators/redshift_cluster.py index 397e3995a0cdbe..7f2f686cb30303 100644 --- a/providers/src/airflow/providers/amazon/aws/operators/redshift_cluster.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/operators/redshift_cluster.py @@ -321,10 +321,10 @@ def execute(self, context: Context): self.log.info(cluster) def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error creating cluster: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Error creating cluster: {validated_event}") class RedshiftCreateClusterSnapshotOperator(BaseOperator): @@ -417,10 +417,10 @@ def execute(self, context: Context) -> Any: ) def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error creating snapshot: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Error creating snapshot: {validated_event}") self.log.info("Cluster snapshot created.") @@ -577,10 +577,10 @@ def execute(self, context: Context): ) def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error resuming cluster: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Error resuming cluster: {validated_event}") self.log.info("Resumed cluster successfully") @@ -683,10 +683,10 @@ def execute(self, context: Context): ) def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error pausing cluster: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Error pausing cluster: {validated_event}") self.log.info("Paused cluster successfully") @@ -792,9 +792,9 @@ def execute(self, context: Context): ) def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error deleting cluster: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Error deleting cluster: {validated_event}") self.log.info("Cluster deleted successfully") diff --git a/providers/src/airflow/providers/amazon/aws/operators/redshift_data.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/redshift_data.py similarity index 97% rename from providers/src/airflow/providers/amazon/aws/operators/redshift_data.py rename to providers/amazon/src/airflow/providers/amazon/aws/operators/redshift_data.py index 0813918854464e..334834d16d3b49 100644 --- a/providers/src/airflow/providers/amazon/aws/operators/redshift_data.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/operators/redshift_data.py @@ -185,13 +185,13 @@ def execute(self, context: Context) -> list[GetStatementResultResponseTypeDef] | def execute_complete( self, context: Context, event: dict[str, Any] | None = None ) -> list[GetStatementResultResponseTypeDef] | list[str]: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] == "error": - msg = f"context: {context}, error message: {event['message']}" + if validated_event["status"] == "error": + msg = f"context: {context}, error message: {validated_event['message']}" raise AirflowException(msg) - statement_id = event["statement_id"] + statement_id = validated_event["statement_id"] if not statement_id: raise AirflowException("statement_id should not be empty.") diff --git a/providers/src/airflow/providers/amazon/aws/operators/s3.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/s3.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/operators/s3.py rename to providers/amazon/src/airflow/providers/amazon/aws/operators/s3.py diff --git a/providers/src/airflow/providers/amazon/aws/operators/sagemaker.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/sagemaker.py similarity index 98% rename from providers/src/airflow/providers/amazon/aws/operators/sagemaker.py rename to providers/amazon/src/airflow/providers/amazon/aws/operators/sagemaker.py index 981c79f1960fa6..dc68f13bb36b50 100644 --- a/providers/src/airflow/providers/amazon/aws/operators/sagemaker.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/operators/sagemaker.py @@ -352,13 +352,13 @@ def execute(self, context: Context) -> dict: return {"Processing": self.serialized_job} def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> dict[str, dict]: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error while running job: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Error while running job: {validated_event}") - self.log.info(event["message"]) - self.serialized_job = serialize(self.hook.describe_processing_job(event["job_name"])) + self.log.info(validated_event["message"]) + self.serialized_job = serialize(self.hook.describe_processing_job(validated_event["job_name"])) self.log.info("%s completed successfully.", self.task_id) return {"Processing": self.serialized_job} @@ -605,12 +605,12 @@ def execute(self, context: Context) -> dict: } def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> dict[str, dict]: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error while running job: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Error while running job: {validated_event}") - response = self.hook.describe_endpoint(event["job_name"]) + response = self.hook.describe_endpoint(validated_event["job_name"]) return { "EndpointConfig": serialize(self.hook.describe_endpoint_config(response["EndpointConfigName"])), "Endpoint": serialize(self.hook.describe_endpoint(response["EndpointName"])), @@ -827,10 +827,10 @@ def _check_if_model_exists(self, model_name: str, describe_func: Callable[[str], return self._check_if_resource_exists(model_name, "model", describe_func) def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> dict[str, dict]: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - self.log.info(event["message"]) - return self.serialize_result(event["job_name"]) + self.log.info(validated_event["message"]) + return self.serialize_result(validated_event["job_name"]) def serialize_result(self, job_name: str) -> dict[str, dict]: job_description = self.hook.describe_transform_job(job_name) @@ -999,11 +999,11 @@ def execute(self, context: Context) -> dict: return {"Tuning": serialize(description)} def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> dict[str, dict]: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error while running job: {event}") - return {"Tuning": serialize(self.hook.describe_tuning_job(event["job_name"]))} + if validated_event["status"] != "success": + raise AirflowException(f"Error while running job: {validated_event}") + return {"Tuning": serialize(self.hook.describe_tuning_job(validated_event["job_name"]))} class SageMakerModelOperator(SageMakerBaseOperator): @@ -1211,13 +1211,13 @@ def execute(self, context: Context) -> dict: return self.serialize_result(self.config["TrainingJobName"]) def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> dict[str, dict]: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error while running job: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Error while running job: {validated_event}") - self.log.info(event["message"]) - return self.serialize_result(event["job_name"]) + self.log.info(validated_event["message"]) + return self.serialize_result(validated_event["job_name"]) def serialize_result(self, job_name: str) -> dict[str, dict]: self.serialized_training_data = serialize(self.hook.describe_training_job(job_name)) @@ -1353,11 +1353,11 @@ def execute(self, context: Context) -> str: return arn def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Failure during pipeline execution: {event}") - return event["value"] + if validated_event["status"] != "success": + raise AirflowException(f"Failure during pipeline execution: {validated_event}") + return validated_event["value"] class SageMakerStopPipelineOperator(SageMakerBaseOperator): @@ -1448,10 +1448,10 @@ def execute(self, context: Context) -> str: return status def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Failure during pipeline execution: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Failure during pipeline execution: {validated_event}") # theoretically we should do a `describe` call to know this, # but if we reach this point, this is the only possible status diff --git a/providers/src/airflow/providers/amazon/aws/operators/sns.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/sns.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/operators/sns.py rename to providers/amazon/src/airflow/providers/amazon/aws/operators/sns.py diff --git a/providers/src/airflow/providers/amazon/aws/operators/sqs.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/sqs.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/operators/sqs.py rename to providers/amazon/src/airflow/providers/amazon/aws/operators/sqs.py diff --git a/providers/src/airflow/providers/amazon/aws/operators/step_function.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/step_function.py similarity index 97% rename from providers/src/airflow/providers/amazon/aws/operators/step_function.py rename to providers/amazon/src/airflow/providers/amazon/aws/operators/step_function.py index 7556555443c602..923e32aaee3e74 100644 --- a/providers/src/airflow/providers/amazon/aws/operators/step_function.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/operators/step_function.py @@ -145,13 +145,13 @@ def execute(self, context: Context): return execution_arn def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Trigger error: event is {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Trigger error: event is {validated_event}") self.log.info("State Machine execution completed successfully") - return event["execution_arn"] + return validated_event["execution_arn"] class StepFunctionGetExecutionOutputOperator(AwsBaseOperator[StepFunctionHook]): diff --git a/providers/src/airflow/providers/amazon/aws/secrets/__init__.py b/providers/amazon/src/airflow/providers/amazon/aws/secrets/__init__.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/secrets/__init__.py rename to providers/amazon/src/airflow/providers/amazon/aws/secrets/__init__.py diff --git a/providers/src/airflow/providers/amazon/aws/secrets/secrets_manager.py b/providers/amazon/src/airflow/providers/amazon/aws/secrets/secrets_manager.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/secrets/secrets_manager.py rename to providers/amazon/src/airflow/providers/amazon/aws/secrets/secrets_manager.py diff --git a/providers/src/airflow/providers/amazon/aws/secrets/systems_manager.py b/providers/amazon/src/airflow/providers/amazon/aws/secrets/systems_manager.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/secrets/systems_manager.py rename to providers/amazon/src/airflow/providers/amazon/aws/secrets/systems_manager.py diff --git a/providers/src/airflow/providers/amazon/aws/sensors/__init__.py b/providers/amazon/src/airflow/providers/amazon/aws/sensors/__init__.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/sensors/__init__.py rename to providers/amazon/src/airflow/providers/amazon/aws/sensors/__init__.py diff --git a/providers/src/airflow/providers/amazon/aws/sensors/athena.py b/providers/amazon/src/airflow/providers/amazon/aws/sensors/athena.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/sensors/athena.py rename to providers/amazon/src/airflow/providers/amazon/aws/sensors/athena.py diff --git a/providers/src/airflow/providers/amazon/aws/sensors/base_aws.py b/providers/amazon/src/airflow/providers/amazon/aws/sensors/base_aws.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/sensors/base_aws.py rename to providers/amazon/src/airflow/providers/amazon/aws/sensors/base_aws.py diff --git a/providers/src/airflow/providers/amazon/aws/sensors/batch.py b/providers/amazon/src/airflow/providers/amazon/aws/sensors/batch.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/sensors/batch.py rename to providers/amazon/src/airflow/providers/amazon/aws/sensors/batch.py diff --git a/providers/src/airflow/providers/amazon/aws/sensors/bedrock.py b/providers/amazon/src/airflow/providers/amazon/aws/sensors/bedrock.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/sensors/bedrock.py rename to providers/amazon/src/airflow/providers/amazon/aws/sensors/bedrock.py diff --git a/providers/src/airflow/providers/amazon/aws/sensors/cloud_formation.py b/providers/amazon/src/airflow/providers/amazon/aws/sensors/cloud_formation.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/sensors/cloud_formation.py rename to providers/amazon/src/airflow/providers/amazon/aws/sensors/cloud_formation.py diff --git a/providers/src/airflow/providers/amazon/aws/sensors/comprehend.py b/providers/amazon/src/airflow/providers/amazon/aws/sensors/comprehend.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/sensors/comprehend.py rename to providers/amazon/src/airflow/providers/amazon/aws/sensors/comprehend.py diff --git a/providers/src/airflow/providers/amazon/aws/sensors/dms.py b/providers/amazon/src/airflow/providers/amazon/aws/sensors/dms.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/sensors/dms.py rename to providers/amazon/src/airflow/providers/amazon/aws/sensors/dms.py diff --git a/providers/src/airflow/providers/amazon/aws/sensors/dynamodb.py b/providers/amazon/src/airflow/providers/amazon/aws/sensors/dynamodb.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/sensors/dynamodb.py rename to providers/amazon/src/airflow/providers/amazon/aws/sensors/dynamodb.py diff --git a/providers/src/airflow/providers/amazon/aws/sensors/ec2.py b/providers/amazon/src/airflow/providers/amazon/aws/sensors/ec2.py similarity index 95% rename from providers/src/airflow/providers/amazon/aws/sensors/ec2.py rename to providers/amazon/src/airflow/providers/amazon/aws/sensors/ec2.py index fa2b638e729661..910cf9f4a8fcd1 100644 --- a/providers/src/airflow/providers/amazon/aws/sensors/ec2.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/sensors/ec2.py @@ -95,7 +95,7 @@ def poke(self, context: Context): return instance_state == self.target_state def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Error: {validated_event}") diff --git a/providers/src/airflow/providers/amazon/aws/sensors/ecs.py b/providers/amazon/src/airflow/providers/amazon/aws/sensors/ecs.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/sensors/ecs.py rename to providers/amazon/src/airflow/providers/amazon/aws/sensors/ecs.py diff --git a/providers/src/airflow/providers/amazon/aws/sensors/eks.py b/providers/amazon/src/airflow/providers/amazon/aws/sensors/eks.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/sensors/eks.py rename to providers/amazon/src/airflow/providers/amazon/aws/sensors/eks.py diff --git a/providers/src/airflow/providers/amazon/aws/sensors/emr.py b/providers/amazon/src/airflow/providers/amazon/aws/sensors/emr.py similarity index 97% rename from providers/src/airflow/providers/amazon/aws/sensors/emr.py rename to providers/amazon/src/airflow/providers/amazon/aws/sensors/emr.py index 1a7a2572d45c61..d35385b35ddb05 100644 --- a/providers/src/airflow/providers/amazon/aws/sensors/emr.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/sensors/emr.py @@ -346,10 +346,10 @@ def execute(self, context: Context): ) def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error while running job: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Error while running job: {validated_event}") self.log.info("Job completed.") @@ -535,10 +535,10 @@ def execute(self, context: Context) -> None: ) def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error while running job: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Error while running job: {validated_event}") self.log.info("Job completed.") @@ -664,9 +664,9 @@ def execute(self, context: Context) -> None: ) def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Error while running job: {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Error while running job: {validated_event}") self.log.info("Job %s completed.", self.job_flow_id) diff --git a/providers/src/airflow/providers/amazon/aws/sensors/glacier.py b/providers/amazon/src/airflow/providers/amazon/aws/sensors/glacier.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/sensors/glacier.py rename to providers/amazon/src/airflow/providers/amazon/aws/sensors/glacier.py diff --git a/providers/src/airflow/providers/amazon/aws/sensors/glue.py b/providers/amazon/src/airflow/providers/amazon/aws/sensors/glue.py similarity index 97% rename from providers/src/airflow/providers/amazon/aws/sensors/glue.py rename to providers/amazon/src/airflow/providers/amazon/aws/sensors/glue.py index 41d25b5ae41b12..79c83c88c1d579 100644 --- a/providers/src/airflow/providers/amazon/aws/sensors/glue.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/sensors/glue.py @@ -174,14 +174,14 @@ def execute(self, context: Context) -> Any: super().execute(context=context) def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - message = f"Error: AWS Glue data quality ruleset evaluation run: {event}" + if validated_event["status"] != "success": + message = f"Error: AWS Glue data quality ruleset evaluation run: {validated_event}" raise AirflowException(message) self.hook.validate_evaluation_run_results( - evaluation_run_id=event["evaluation_run_id"], + evaluation_run_id=validated_event["evaluation_run_id"], show_results=self.show_results, verify_result_status=self.verify_result_status, ) @@ -295,10 +295,10 @@ def execute(self, context: Context) -> Any: super().execute(context=context) def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - message = f"Error: AWS Glue data quality recommendation run: {event}" + if validated_event["status"] != "success": + message = f"Error: AWS Glue data quality recommendation run: {validated_event}" raise AirflowException(message) if self.show_results: diff --git a/providers/src/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py b/providers/amazon/src/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py similarity index 96% rename from providers/src/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py rename to providers/amazon/src/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py index 2e93a56cedd870..acbcc4d88b603c 100644 --- a/providers/src/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py @@ -123,8 +123,8 @@ def poke(self, context: Context): return self.hook.check_for_partition(self.database_name, self.table_name, self.expression) def execute_complete(self, context: Context, event: dict | None = None) -> None: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Trigger error: event is {event}") + if validated_event["status"] != "success": + raise AirflowException(f"Trigger error: event is {validated_event}") self.log.info("Partition exists in the Glue Catalog") diff --git a/providers/src/airflow/providers/amazon/aws/sensors/glue_crawler.py b/providers/amazon/src/airflow/providers/amazon/aws/sensors/glue_crawler.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/sensors/glue_crawler.py rename to providers/amazon/src/airflow/providers/amazon/aws/sensors/glue_crawler.py diff --git a/providers/src/airflow/providers/amazon/aws/sensors/kinesis_analytics.py b/providers/amazon/src/airflow/providers/amazon/aws/sensors/kinesis_analytics.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/sensors/kinesis_analytics.py rename to providers/amazon/src/airflow/providers/amazon/aws/sensors/kinesis_analytics.py diff --git a/providers/src/airflow/providers/amazon/aws/sensors/lambda_function.py b/providers/amazon/src/airflow/providers/amazon/aws/sensors/lambda_function.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/sensors/lambda_function.py rename to providers/amazon/src/airflow/providers/amazon/aws/sensors/lambda_function.py diff --git a/providers/src/airflow/providers/amazon/aws/sensors/opensearch_serverless.py b/providers/amazon/src/airflow/providers/amazon/aws/sensors/opensearch_serverless.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/sensors/opensearch_serverless.py rename to providers/amazon/src/airflow/providers/amazon/aws/sensors/opensearch_serverless.py diff --git a/providers/src/airflow/providers/amazon/aws/sensors/quicksight.py b/providers/amazon/src/airflow/providers/amazon/aws/sensors/quicksight.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/sensors/quicksight.py rename to providers/amazon/src/airflow/providers/amazon/aws/sensors/quicksight.py diff --git a/providers/src/airflow/providers/amazon/aws/sensors/rds.py b/providers/amazon/src/airflow/providers/amazon/aws/sensors/rds.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/sensors/rds.py rename to providers/amazon/src/airflow/providers/amazon/aws/sensors/rds.py diff --git a/providers/src/airflow/providers/amazon/aws/sensors/redshift_cluster.py b/providers/amazon/src/airflow/providers/amazon/aws/sensors/redshift_cluster.py similarity index 94% rename from providers/src/airflow/providers/amazon/aws/sensors/redshift_cluster.py rename to providers/amazon/src/airflow/providers/amazon/aws/sensors/redshift_cluster.py index 2f86e4dc0bab33..c6d356b4db2168 100644 --- a/providers/src/airflow/providers/amazon/aws/sensors/redshift_cluster.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/sensors/redshift_cluster.py @@ -88,11 +88,11 @@ def execute(self, context: Context) -> None: ) def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - status = event["status"] + status = validated_event["status"] if status == "error": - raise AirflowException(f"{event['status']}: {event['message']}") + raise AirflowException(f"{validated_event['status']}: {validated_event['message']}") elif status == "success": self.log.info("%s completed successfully.", self.task_id) self.log.info("Cluster Identifier %s is in %s state", self.cluster_identifier, self.target_status) diff --git a/providers/src/airflow/providers/amazon/aws/sensors/s3.py b/providers/amazon/src/airflow/providers/amazon/aws/sensors/s3.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/sensors/s3.py rename to providers/amazon/src/airflow/providers/amazon/aws/sensors/s3.py diff --git a/providers/src/airflow/providers/amazon/aws/sensors/sagemaker.py b/providers/amazon/src/airflow/providers/amazon/aws/sensors/sagemaker.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/sensors/sagemaker.py rename to providers/amazon/src/airflow/providers/amazon/aws/sensors/sagemaker.py diff --git a/providers/src/airflow/providers/amazon/aws/sensors/sqs.py b/providers/amazon/src/airflow/providers/amazon/aws/sensors/sqs.py similarity index 97% rename from providers/src/airflow/providers/amazon/aws/sensors/sqs.py rename to providers/amazon/src/airflow/providers/amazon/aws/sensors/sqs.py index 016e4fbd6c6d6e..e5d70ed9e1a7b3 100644 --- a/providers/src/airflow/providers/amazon/aws/sensors/sqs.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/sensors/sqs.py @@ -157,11 +157,11 @@ def execute(self, context: Context) -> Any: super().execute(context=context) def execute_complete(self, context: Context, event: dict | None = None) -> None: - event = validate_execute_complete_event(event) + validated_event = validate_execute_complete_event(event) - if event["status"] != "success": - raise AirflowException(f"Trigger error: event is {event}") - context["ti"].xcom_push(key="messages", value=event["message_batch"]) + if validated_event["status"] != "success": + raise AirflowException(f"Trigger error: event is {validated_event}") + context["ti"].xcom_push(key="messages", value=validated_event["message_batch"]) def poll_sqs(self, sqs_conn: BaseAwsConnection) -> Collection: """ diff --git a/providers/src/airflow/providers/amazon/aws/sensors/step_function.py b/providers/amazon/src/airflow/providers/amazon/aws/sensors/step_function.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/sensors/step_function.py rename to providers/amazon/src/airflow/providers/amazon/aws/sensors/step_function.py diff --git a/providers/src/airflow/providers/amazon/aws/transfers/__init__.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/__init__.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/transfers/__init__.py rename to providers/amazon/src/airflow/providers/amazon/aws/transfers/__init__.py diff --git a/providers/src/airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py rename to providers/amazon/src/airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py diff --git a/providers/src/airflow/providers/amazon/aws/transfers/base.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/base.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/transfers/base.py rename to providers/amazon/src/airflow/providers/amazon/aws/transfers/base.py diff --git a/providers/src/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py rename to providers/amazon/src/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py diff --git a/providers/src/airflow/providers/amazon/aws/transfers/exasol_to_s3.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/exasol_to_s3.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/transfers/exasol_to_s3.py rename to providers/amazon/src/airflow/providers/amazon/aws/transfers/exasol_to_s3.py diff --git a/providers/src/airflow/providers/amazon/aws/transfers/ftp_to_s3.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/ftp_to_s3.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/transfers/ftp_to_s3.py rename to providers/amazon/src/airflow/providers/amazon/aws/transfers/ftp_to_s3.py diff --git a/providers/src/airflow/providers/amazon/aws/transfers/gcs_to_s3.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/gcs_to_s3.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/transfers/gcs_to_s3.py rename to providers/amazon/src/airflow/providers/amazon/aws/transfers/gcs_to_s3.py diff --git a/providers/src/airflow/providers/amazon/aws/transfers/glacier_to_gcs.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/glacier_to_gcs.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/transfers/glacier_to_gcs.py rename to providers/amazon/src/airflow/providers/amazon/aws/transfers/glacier_to_gcs.py diff --git a/providers/src/airflow/providers/amazon/aws/transfers/google_api_to_s3.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/google_api_to_s3.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/transfers/google_api_to_s3.py rename to providers/amazon/src/airflow/providers/amazon/aws/transfers/google_api_to_s3.py diff --git a/providers/src/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py rename to providers/amazon/src/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py diff --git a/providers/src/airflow/providers/amazon/aws/transfers/http_to_s3.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/http_to_s3.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/transfers/http_to_s3.py rename to providers/amazon/src/airflow/providers/amazon/aws/transfers/http_to_s3.py diff --git a/providers/src/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py rename to providers/amazon/src/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py diff --git a/providers/src/airflow/providers/amazon/aws/transfers/local_to_s3.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/local_to_s3.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/transfers/local_to_s3.py rename to providers/amazon/src/airflow/providers/amazon/aws/transfers/local_to_s3.py diff --git a/providers/src/airflow/providers/amazon/aws/transfers/mongo_to_s3.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/mongo_to_s3.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/transfers/mongo_to_s3.py rename to providers/amazon/src/airflow/providers/amazon/aws/transfers/mongo_to_s3.py diff --git a/providers/src/airflow/providers/amazon/aws/transfers/redshift_to_s3.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/redshift_to_s3.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/transfers/redshift_to_s3.py rename to providers/amazon/src/airflow/providers/amazon/aws/transfers/redshift_to_s3.py diff --git a/providers/src/airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py rename to providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py diff --git a/providers/src/airflow/providers/amazon/aws/transfers/s3_to_ftp.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_ftp.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/transfers/s3_to_ftp.py rename to providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_ftp.py diff --git a/providers/src/airflow/providers/amazon/aws/transfers/s3_to_redshift.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_redshift.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/transfers/s3_to_redshift.py rename to providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_redshift.py diff --git a/providers/src/airflow/providers/amazon/aws/transfers/s3_to_sftp.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_sftp.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/transfers/s3_to_sftp.py rename to providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_sftp.py diff --git a/providers/src/airflow/providers/amazon/aws/transfers/s3_to_sql.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_sql.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/transfers/s3_to_sql.py rename to providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_sql.py diff --git a/providers/src/airflow/providers/amazon/aws/transfers/salesforce_to_s3.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/salesforce_to_s3.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/transfers/salesforce_to_s3.py rename to providers/amazon/src/airflow/providers/amazon/aws/transfers/salesforce_to_s3.py diff --git a/providers/src/airflow/providers/amazon/aws/transfers/sftp_to_s3.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/sftp_to_s3.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/transfers/sftp_to_s3.py rename to providers/amazon/src/airflow/providers/amazon/aws/transfers/sftp_to_s3.py diff --git a/providers/src/airflow/providers/amazon/aws/transfers/sql_to_s3.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/sql_to_s3.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/transfers/sql_to_s3.py rename to providers/amazon/src/airflow/providers/amazon/aws/transfers/sql_to_s3.py diff --git a/providers/src/airflow/providers/amazon/aws/triggers/README.md b/providers/amazon/src/airflow/providers/amazon/aws/triggers/README.md similarity index 100% rename from providers/src/airflow/providers/amazon/aws/triggers/README.md rename to providers/amazon/src/airflow/providers/amazon/aws/triggers/README.md diff --git a/providers/src/airflow/providers/amazon/aws/triggers/__init__.py b/providers/amazon/src/airflow/providers/amazon/aws/triggers/__init__.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/triggers/__init__.py rename to providers/amazon/src/airflow/providers/amazon/aws/triggers/__init__.py diff --git a/providers/src/airflow/providers/amazon/aws/triggers/athena.py b/providers/amazon/src/airflow/providers/amazon/aws/triggers/athena.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/triggers/athena.py rename to providers/amazon/src/airflow/providers/amazon/aws/triggers/athena.py diff --git a/providers/src/airflow/providers/amazon/aws/triggers/base.py b/providers/amazon/src/airflow/providers/amazon/aws/triggers/base.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/triggers/base.py rename to providers/amazon/src/airflow/providers/amazon/aws/triggers/base.py diff --git a/providers/src/airflow/providers/amazon/aws/triggers/batch.py b/providers/amazon/src/airflow/providers/amazon/aws/triggers/batch.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/triggers/batch.py rename to providers/amazon/src/airflow/providers/amazon/aws/triggers/batch.py diff --git a/providers/src/airflow/providers/amazon/aws/triggers/bedrock.py b/providers/amazon/src/airflow/providers/amazon/aws/triggers/bedrock.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/triggers/bedrock.py rename to providers/amazon/src/airflow/providers/amazon/aws/triggers/bedrock.py diff --git a/providers/src/airflow/providers/amazon/aws/triggers/comprehend.py b/providers/amazon/src/airflow/providers/amazon/aws/triggers/comprehend.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/triggers/comprehend.py rename to providers/amazon/src/airflow/providers/amazon/aws/triggers/comprehend.py diff --git a/providers/src/airflow/providers/amazon/aws/triggers/dms.py b/providers/amazon/src/airflow/providers/amazon/aws/triggers/dms.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/triggers/dms.py rename to providers/amazon/src/airflow/providers/amazon/aws/triggers/dms.py diff --git a/providers/src/airflow/providers/amazon/aws/triggers/ec2.py b/providers/amazon/src/airflow/providers/amazon/aws/triggers/ec2.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/triggers/ec2.py rename to providers/amazon/src/airflow/providers/amazon/aws/triggers/ec2.py diff --git a/providers/src/airflow/providers/amazon/aws/triggers/ecs.py b/providers/amazon/src/airflow/providers/amazon/aws/triggers/ecs.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/triggers/ecs.py rename to providers/amazon/src/airflow/providers/amazon/aws/triggers/ecs.py diff --git a/providers/src/airflow/providers/amazon/aws/triggers/eks.py b/providers/amazon/src/airflow/providers/amazon/aws/triggers/eks.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/triggers/eks.py rename to providers/amazon/src/airflow/providers/amazon/aws/triggers/eks.py diff --git a/providers/src/airflow/providers/amazon/aws/triggers/emr.py b/providers/amazon/src/airflow/providers/amazon/aws/triggers/emr.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/triggers/emr.py rename to providers/amazon/src/airflow/providers/amazon/aws/triggers/emr.py diff --git a/providers/src/airflow/providers/amazon/aws/triggers/glue.py b/providers/amazon/src/airflow/providers/amazon/aws/triggers/glue.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/triggers/glue.py rename to providers/amazon/src/airflow/providers/amazon/aws/triggers/glue.py diff --git a/providers/src/airflow/providers/amazon/aws/triggers/glue_crawler.py b/providers/amazon/src/airflow/providers/amazon/aws/triggers/glue_crawler.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/triggers/glue_crawler.py rename to providers/amazon/src/airflow/providers/amazon/aws/triggers/glue_crawler.py diff --git a/providers/src/airflow/providers/amazon/aws/triggers/glue_databrew.py b/providers/amazon/src/airflow/providers/amazon/aws/triggers/glue_databrew.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/triggers/glue_databrew.py rename to providers/amazon/src/airflow/providers/amazon/aws/triggers/glue_databrew.py diff --git a/providers/src/airflow/providers/amazon/aws/triggers/kinesis_analytics.py b/providers/amazon/src/airflow/providers/amazon/aws/triggers/kinesis_analytics.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/triggers/kinesis_analytics.py rename to providers/amazon/src/airflow/providers/amazon/aws/triggers/kinesis_analytics.py diff --git a/providers/src/airflow/providers/amazon/aws/triggers/lambda_function.py b/providers/amazon/src/airflow/providers/amazon/aws/triggers/lambda_function.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/triggers/lambda_function.py rename to providers/amazon/src/airflow/providers/amazon/aws/triggers/lambda_function.py diff --git a/providers/src/airflow/providers/amazon/aws/triggers/neptune.py b/providers/amazon/src/airflow/providers/amazon/aws/triggers/neptune.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/triggers/neptune.py rename to providers/amazon/src/airflow/providers/amazon/aws/triggers/neptune.py diff --git a/providers/src/airflow/providers/amazon/aws/triggers/opensearch_serverless.py b/providers/amazon/src/airflow/providers/amazon/aws/triggers/opensearch_serverless.py similarity index 95% rename from providers/src/airflow/providers/amazon/aws/triggers/opensearch_serverless.py rename to providers/amazon/src/airflow/providers/amazon/aws/triggers/opensearch_serverless.py index bf94c83bf5f8a9..fa4966d0a124a6 100644 --- a/providers/src/airflow/providers/amazon/aws/triggers/opensearch_serverless.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/triggers/opensearch_serverless.py @@ -53,7 +53,8 @@ def __init__( super().__init__( serialized_fields={"collection_id": collection_id, "collection_name": collection_name}, waiter_name="collection_available", - waiter_args={"ids": [collection_id]} if collection_id else {"names": [collection_name]}, + # waiter_args is a dict[str, Any], allow a possible list of None (it is caught above) + waiter_args={"ids": [collection_id]} if collection_id else {"names": [collection_name]}, # type: ignore[list-item] failure_message="OpenSearch Serverless Collection creation failed.", status_message="Status of OpenSearch Serverless Collection is", status_queries=["status"], diff --git a/providers/src/airflow/providers/amazon/aws/triggers/rds.py b/providers/amazon/src/airflow/providers/amazon/aws/triggers/rds.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/triggers/rds.py rename to providers/amazon/src/airflow/providers/amazon/aws/triggers/rds.py diff --git a/providers/src/airflow/providers/amazon/aws/triggers/redshift_cluster.py b/providers/amazon/src/airflow/providers/amazon/aws/triggers/redshift_cluster.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/triggers/redshift_cluster.py rename to providers/amazon/src/airflow/providers/amazon/aws/triggers/redshift_cluster.py diff --git a/providers/src/airflow/providers/amazon/aws/triggers/redshift_data.py b/providers/amazon/src/airflow/providers/amazon/aws/triggers/redshift_data.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/triggers/redshift_data.py rename to providers/amazon/src/airflow/providers/amazon/aws/triggers/redshift_data.py diff --git a/providers/src/airflow/providers/amazon/aws/triggers/s3.py b/providers/amazon/src/airflow/providers/amazon/aws/triggers/s3.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/triggers/s3.py rename to providers/amazon/src/airflow/providers/amazon/aws/triggers/s3.py diff --git a/providers/src/airflow/providers/amazon/aws/triggers/sagemaker.py b/providers/amazon/src/airflow/providers/amazon/aws/triggers/sagemaker.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/triggers/sagemaker.py rename to providers/amazon/src/airflow/providers/amazon/aws/triggers/sagemaker.py diff --git a/providers/src/airflow/providers/amazon/aws/triggers/sqs.py b/providers/amazon/src/airflow/providers/amazon/aws/triggers/sqs.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/triggers/sqs.py rename to providers/amazon/src/airflow/providers/amazon/aws/triggers/sqs.py diff --git a/providers/src/airflow/providers/amazon/aws/triggers/step_function.py b/providers/amazon/src/airflow/providers/amazon/aws/triggers/step_function.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/triggers/step_function.py rename to providers/amazon/src/airflow/providers/amazon/aws/triggers/step_function.py diff --git a/providers/src/airflow/providers/amazon/aws/utils/__init__.py b/providers/amazon/src/airflow/providers/amazon/aws/utils/__init__.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/utils/__init__.py rename to providers/amazon/src/airflow/providers/amazon/aws/utils/__init__.py diff --git a/providers/src/airflow/providers/amazon/aws/utils/connection_wrapper.py b/providers/amazon/src/airflow/providers/amazon/aws/utils/connection_wrapper.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/utils/connection_wrapper.py rename to providers/amazon/src/airflow/providers/amazon/aws/utils/connection_wrapper.py diff --git a/providers/src/airflow/providers/amazon/aws/utils/eks_get_token.py b/providers/amazon/src/airflow/providers/amazon/aws/utils/eks_get_token.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/utils/eks_get_token.py rename to providers/amazon/src/airflow/providers/amazon/aws/utils/eks_get_token.py diff --git a/providers/src/airflow/providers/amazon/aws/utils/emailer.py b/providers/amazon/src/airflow/providers/amazon/aws/utils/emailer.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/utils/emailer.py rename to providers/amazon/src/airflow/providers/amazon/aws/utils/emailer.py diff --git a/providers/src/airflow/providers/amazon/aws/utils/identifiers.py b/providers/amazon/src/airflow/providers/amazon/aws/utils/identifiers.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/utils/identifiers.py rename to providers/amazon/src/airflow/providers/amazon/aws/utils/identifiers.py diff --git a/providers/src/airflow/providers/amazon/aws/utils/mixins.py b/providers/amazon/src/airflow/providers/amazon/aws/utils/mixins.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/utils/mixins.py rename to providers/amazon/src/airflow/providers/amazon/aws/utils/mixins.py diff --git a/providers/src/airflow/providers/amazon/aws/utils/openlineage.py b/providers/amazon/src/airflow/providers/amazon/aws/utils/openlineage.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/utils/openlineage.py rename to providers/amazon/src/airflow/providers/amazon/aws/utils/openlineage.py diff --git a/providers/src/airflow/providers/amazon/aws/utils/rds.py b/providers/amazon/src/airflow/providers/amazon/aws/utils/rds.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/utils/rds.py rename to providers/amazon/src/airflow/providers/amazon/aws/utils/rds.py diff --git a/providers/src/airflow/providers/amazon/aws/utils/redshift.py b/providers/amazon/src/airflow/providers/amazon/aws/utils/redshift.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/utils/redshift.py rename to providers/amazon/src/airflow/providers/amazon/aws/utils/redshift.py diff --git a/providers/src/airflow/providers/amazon/aws/utils/sagemaker.py b/providers/amazon/src/airflow/providers/amazon/aws/utils/sagemaker.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/utils/sagemaker.py rename to providers/amazon/src/airflow/providers/amazon/aws/utils/sagemaker.py diff --git a/providers/src/airflow/providers/amazon/aws/utils/sqs.py b/providers/amazon/src/airflow/providers/amazon/aws/utils/sqs.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/utils/sqs.py rename to providers/amazon/src/airflow/providers/amazon/aws/utils/sqs.py diff --git a/providers/src/airflow/providers/amazon/aws/utils/suppress.py b/providers/amazon/src/airflow/providers/amazon/aws/utils/suppress.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/utils/suppress.py rename to providers/amazon/src/airflow/providers/amazon/aws/utils/suppress.py diff --git a/providers/src/airflow/providers/amazon/aws/utils/tags.py b/providers/amazon/src/airflow/providers/amazon/aws/utils/tags.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/utils/tags.py rename to providers/amazon/src/airflow/providers/amazon/aws/utils/tags.py diff --git a/providers/src/airflow/providers/amazon/aws/utils/task_log_fetcher.py b/providers/amazon/src/airflow/providers/amazon/aws/utils/task_log_fetcher.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/utils/task_log_fetcher.py rename to providers/amazon/src/airflow/providers/amazon/aws/utils/task_log_fetcher.py diff --git a/providers/src/airflow/providers/amazon/aws/utils/waiter.py b/providers/amazon/src/airflow/providers/amazon/aws/utils/waiter.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/utils/waiter.py rename to providers/amazon/src/airflow/providers/amazon/aws/utils/waiter.py diff --git a/providers/src/airflow/providers/amazon/aws/utils/waiter_with_logging.py b/providers/amazon/src/airflow/providers/amazon/aws/utils/waiter_with_logging.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/utils/waiter_with_logging.py rename to providers/amazon/src/airflow/providers/amazon/aws/utils/waiter_with_logging.py diff --git a/providers/src/airflow/providers/amazon/aws/waiters/README.md b/providers/amazon/src/airflow/providers/amazon/aws/waiters/README.md similarity index 100% rename from providers/src/airflow/providers/amazon/aws/waiters/README.md rename to providers/amazon/src/airflow/providers/amazon/aws/waiters/README.md diff --git a/providers/src/airflow/providers/amazon/aws/waiters/__init__.py b/providers/amazon/src/airflow/providers/amazon/aws/waiters/__init__.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/waiters/__init__.py rename to providers/amazon/src/airflow/providers/amazon/aws/waiters/__init__.py diff --git a/providers/src/airflow/providers/amazon/aws/waiters/appflow.json b/providers/amazon/src/airflow/providers/amazon/aws/waiters/appflow.json similarity index 100% rename from providers/src/airflow/providers/amazon/aws/waiters/appflow.json rename to providers/amazon/src/airflow/providers/amazon/aws/waiters/appflow.json diff --git a/providers/src/airflow/providers/amazon/aws/waiters/athena.json b/providers/amazon/src/airflow/providers/amazon/aws/waiters/athena.json similarity index 100% rename from providers/src/airflow/providers/amazon/aws/waiters/athena.json rename to providers/amazon/src/airflow/providers/amazon/aws/waiters/athena.json diff --git a/providers/src/airflow/providers/amazon/aws/waiters/base_waiter.py b/providers/amazon/src/airflow/providers/amazon/aws/waiters/base_waiter.py similarity index 100% rename from providers/src/airflow/providers/amazon/aws/waiters/base_waiter.py rename to providers/amazon/src/airflow/providers/amazon/aws/waiters/base_waiter.py diff --git a/providers/src/airflow/providers/amazon/aws/waiters/batch.json b/providers/amazon/src/airflow/providers/amazon/aws/waiters/batch.json similarity index 100% rename from providers/src/airflow/providers/amazon/aws/waiters/batch.json rename to providers/amazon/src/airflow/providers/amazon/aws/waiters/batch.json diff --git a/providers/src/airflow/providers/amazon/aws/waiters/bedrock-agent.json b/providers/amazon/src/airflow/providers/amazon/aws/waiters/bedrock-agent.json similarity index 100% rename from providers/src/airflow/providers/amazon/aws/waiters/bedrock-agent.json rename to providers/amazon/src/airflow/providers/amazon/aws/waiters/bedrock-agent.json diff --git a/providers/src/airflow/providers/amazon/aws/waiters/bedrock.json b/providers/amazon/src/airflow/providers/amazon/aws/waiters/bedrock.json similarity index 100% rename from providers/src/airflow/providers/amazon/aws/waiters/bedrock.json rename to providers/amazon/src/airflow/providers/amazon/aws/waiters/bedrock.json diff --git a/providers/src/airflow/providers/amazon/aws/waiters/comprehend.json b/providers/amazon/src/airflow/providers/amazon/aws/waiters/comprehend.json similarity index 100% rename from providers/src/airflow/providers/amazon/aws/waiters/comprehend.json rename to providers/amazon/src/airflow/providers/amazon/aws/waiters/comprehend.json diff --git a/providers/src/airflow/providers/amazon/aws/waiters/databrew.json b/providers/amazon/src/airflow/providers/amazon/aws/waiters/databrew.json similarity index 100% rename from providers/src/airflow/providers/amazon/aws/waiters/databrew.json rename to providers/amazon/src/airflow/providers/amazon/aws/waiters/databrew.json diff --git a/providers/src/airflow/providers/amazon/aws/waiters/dms.json b/providers/amazon/src/airflow/providers/amazon/aws/waiters/dms.json similarity index 100% rename from providers/src/airflow/providers/amazon/aws/waiters/dms.json rename to providers/amazon/src/airflow/providers/amazon/aws/waiters/dms.json diff --git a/providers/src/airflow/providers/amazon/aws/waiters/dynamodb.json b/providers/amazon/src/airflow/providers/amazon/aws/waiters/dynamodb.json similarity index 100% rename from providers/src/airflow/providers/amazon/aws/waiters/dynamodb.json rename to providers/amazon/src/airflow/providers/amazon/aws/waiters/dynamodb.json diff --git a/providers/src/airflow/providers/amazon/aws/waiters/ecs.json b/providers/amazon/src/airflow/providers/amazon/aws/waiters/ecs.json similarity index 100% rename from providers/src/airflow/providers/amazon/aws/waiters/ecs.json rename to providers/amazon/src/airflow/providers/amazon/aws/waiters/ecs.json diff --git a/providers/src/airflow/providers/amazon/aws/waiters/eks.json b/providers/amazon/src/airflow/providers/amazon/aws/waiters/eks.json similarity index 100% rename from providers/src/airflow/providers/amazon/aws/waiters/eks.json rename to providers/amazon/src/airflow/providers/amazon/aws/waiters/eks.json diff --git a/providers/src/airflow/providers/amazon/aws/waiters/emr-containers.json b/providers/amazon/src/airflow/providers/amazon/aws/waiters/emr-containers.json similarity index 100% rename from providers/src/airflow/providers/amazon/aws/waiters/emr-containers.json rename to providers/amazon/src/airflow/providers/amazon/aws/waiters/emr-containers.json diff --git a/providers/src/airflow/providers/amazon/aws/waiters/emr-serverless.json b/providers/amazon/src/airflow/providers/amazon/aws/waiters/emr-serverless.json similarity index 100% rename from providers/src/airflow/providers/amazon/aws/waiters/emr-serverless.json rename to providers/amazon/src/airflow/providers/amazon/aws/waiters/emr-serverless.json diff --git a/providers/src/airflow/providers/amazon/aws/waiters/emr.json b/providers/amazon/src/airflow/providers/amazon/aws/waiters/emr.json similarity index 100% rename from providers/src/airflow/providers/amazon/aws/waiters/emr.json rename to providers/amazon/src/airflow/providers/amazon/aws/waiters/emr.json diff --git a/providers/src/airflow/providers/amazon/aws/waiters/glue.json b/providers/amazon/src/airflow/providers/amazon/aws/waiters/glue.json similarity index 100% rename from providers/src/airflow/providers/amazon/aws/waiters/glue.json rename to providers/amazon/src/airflow/providers/amazon/aws/waiters/glue.json diff --git a/providers/src/airflow/providers/amazon/aws/waiters/kinesisanalyticsv2.json b/providers/amazon/src/airflow/providers/amazon/aws/waiters/kinesisanalyticsv2.json similarity index 100% rename from providers/src/airflow/providers/amazon/aws/waiters/kinesisanalyticsv2.json rename to providers/amazon/src/airflow/providers/amazon/aws/waiters/kinesisanalyticsv2.json diff --git a/providers/src/airflow/providers/amazon/aws/waiters/neptune.json b/providers/amazon/src/airflow/providers/amazon/aws/waiters/neptune.json similarity index 100% rename from providers/src/airflow/providers/amazon/aws/waiters/neptune.json rename to providers/amazon/src/airflow/providers/amazon/aws/waiters/neptune.json diff --git a/providers/src/airflow/providers/amazon/aws/waiters/opensearchserverless.json b/providers/amazon/src/airflow/providers/amazon/aws/waiters/opensearchserverless.json similarity index 100% rename from providers/src/airflow/providers/amazon/aws/waiters/opensearchserverless.json rename to providers/amazon/src/airflow/providers/amazon/aws/waiters/opensearchserverless.json diff --git a/providers/src/airflow/providers/amazon/aws/waiters/rds.json b/providers/amazon/src/airflow/providers/amazon/aws/waiters/rds.json similarity index 100% rename from providers/src/airflow/providers/amazon/aws/waiters/rds.json rename to providers/amazon/src/airflow/providers/amazon/aws/waiters/rds.json diff --git a/providers/src/airflow/providers/amazon/aws/waiters/redshift.json b/providers/amazon/src/airflow/providers/amazon/aws/waiters/redshift.json similarity index 100% rename from providers/src/airflow/providers/amazon/aws/waiters/redshift.json rename to providers/amazon/src/airflow/providers/amazon/aws/waiters/redshift.json diff --git a/providers/src/airflow/providers/amazon/aws/waiters/sagemaker.json b/providers/amazon/src/airflow/providers/amazon/aws/waiters/sagemaker.json similarity index 100% rename from providers/src/airflow/providers/amazon/aws/waiters/sagemaker.json rename to providers/amazon/src/airflow/providers/amazon/aws/waiters/sagemaker.json diff --git a/providers/src/airflow/providers/amazon/aws/waiters/stepfunctions.json b/providers/amazon/src/airflow/providers/amazon/aws/waiters/stepfunctions.json similarity index 100% rename from providers/src/airflow/providers/amazon/aws/waiters/stepfunctions.json rename to providers/amazon/src/airflow/providers/amazon/aws/waiters/stepfunctions.json diff --git a/providers/amazon/src/airflow/providers/amazon/get_provider_info.py b/providers/amazon/src/airflow/providers/amazon/get_provider_info.py new file mode 100644 index 00000000000000..217f618c8667c8 --- /dev/null +++ b/providers/amazon/src/airflow/providers/amazon/get_provider_info.py @@ -0,0 +1,1374 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN! +# +# IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE +# `get_provider_info_TEMPLATE.py.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY + + +def get_provider_info(): + return { + "package-name": "apache-airflow-providers-amazon", + "name": "Amazon", + "description": "Amazon integration (including `Amazon Web Services (AWS) `__).\n", + "state": "ready", + "source-date-epoch": 1734527035, + "versions": [ + "9.2.0", + "9.1.0", + "9.0.0", + "8.29.0", + "8.28.0", + "8.27.0", + "8.26.0", + "8.25.0", + "8.24.0", + "8.23.0", + "8.22.0", + "8.21.0", + "8.20.0", + "8.19.0", + "8.18.0", + "8.17.0", + "8.16.0", + "8.15.0", + "8.14.0", + "8.13.0", + "8.12.0", + "8.11.0", + "8.10.0", + "8.9.0", + "8.8.0", + "8.7.1", + "8.7.0", + "8.6.0", + "8.5.1", + "8.5.0", + "8.4.0", + "8.3.1", + "8.3.0", + "8.2.0", + "8.1.0", + "8.0.0", + "7.4.1", + "7.4.0", + "7.3.0", + "7.2.1", + "7.2.0", + "7.1.0", + "7.0.0", + "6.2.0", + "6.1.0", + "6.0.0", + "5.1.0", + "5.0.0", + "4.1.0", + "4.0.0", + "3.4.0", + "3.3.0", + "3.2.0", + "3.1.1", + "3.0.0", + "2.6.0", + "2.5.0", + "2.4.0", + "2.3.0", + "2.2.0", + "2.1.0", + "2.0.0", + "1.4.0", + "1.3.0", + "1.2.0", + "1.1.0", + "1.0.0", + ], + "integrations": [ + { + "integration-name": "Amazon Athena", + "external-doc-url": "https://aws.amazon.com/athena/", + "logo": "/docs/integration-logos/Amazon-Athena_light-bg@4x.png", + "how-to-guide": [ + "/docs/apache-airflow-providers-amazon/operators/athena/athena_boto.rst", + "/docs/apache-airflow-providers-amazon/operators/athena/athena_sql.rst", + ], + "tags": ["aws"], + }, + { + "integration-name": "Amazon Bedrock", + "external-doc-url": "https://aws.amazon.com/bedrock/", + "logo": "/docs/integration-logos/Amazon-Bedrock_light-bg@4x.png", + "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/bedrock.rst"], + "tags": ["aws"], + }, + { + "integration-name": "Amazon Chime", + "external-doc-url": "https://aws.amazon.com/chime/", + "logo": "/docs/integration-logos/Amazon-Chime-light-bg.png", + "tags": ["aws"], + }, + { + "integration-name": "Amazon CloudFormation", + "external-doc-url": "https://aws.amazon.com/cloudformation/", + "logo": "/docs/integration-logos/AWS-CloudFormation_light-bg@4x.png", + "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/cloudformation.rst"], + "tags": ["aws"], + }, + { + "integration-name": "Amazon CloudWatch Logs", + "external-doc-url": "https://aws.amazon.com/cloudwatch/", + "logo": "/docs/integration-logos/Amazon-CloudWatch_light-bg@4x.png", + "tags": ["aws"], + }, + { + "integration-name": "Amazon Comprehend", + "external-doc-url": "https://aws.amazon.com/comprehend/", + "logo": "/docs/integration-logos/Amazon-Comprehend_light-bg@4x.png", + "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/comprehend.rst"], + "tags": ["aws"], + }, + { + "integration-name": "Amazon DataSync", + "external-doc-url": "https://aws.amazon.com/datasync/", + "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/datasync.rst"], + "tags": ["aws"], + }, + { + "integration-name": "Amazon DynamoDB", + "external-doc-url": "https://aws.amazon.com/dynamodb/", + "logo": "/docs/integration-logos/Amazon-DynamoDB_light-bg@4x.png", + "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/dynamodb.rst"], + "tags": ["aws"], + }, + { + "integration-name": "Amazon EC2", + "external-doc-url": "https://aws.amazon.com/ec2/", + "logo": "/docs/integration-logos/Amazon-EC2_light-bg@4x.png", + "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/ec2.rst"], + "tags": ["aws"], + }, + { + "integration-name": "Amazon Elastic Container Registry (ECR)", + "external-doc-url": "https://aws.amazon.com/ecr/", + "logo": "/docs/integration-logos/Amazon-Elastic-Container-Registry_light-bg@4x.png", + "tags": ["aws"], + }, + { + "integration-name": "Amazon ECS", + "external-doc-url": "https://aws.amazon.com/ecs/", + "logo": "/docs/integration-logos/Amazon-Elastic-Container-Service_light-bg@4x.png", + "tags": ["aws"], + }, + { + "integration-name": "Amazon Elastic Kubernetes Service (EKS)", + "external-doc-url": "https://aws.amazon.com/eks/", + "logo": "/docs/integration-logos/Amazon-Elastic-Kubernetes-Service_light-bg@4x.png", + "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/eks.rst"], + "tags": ["aws"], + }, + { + "integration-name": "Amazon ElastiCache", + "external-doc-url": "https://aws.amazon.com/elasticache/redis//", + "logo": "/docs/integration-logos/Amazon-ElastiCache_light-bg@4x.png", + "tags": ["aws"], + }, + { + "integration-name": "Amazon EMR", + "external-doc-url": "https://aws.amazon.com/emr/", + "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/emr/emr.rst"], + "logo": "/docs/integration-logos/Amazon-EMR_light-bg@4x.png", + "tags": ["aws"], + }, + { + "integration-name": "Amazon EMR on EKS", + "external-doc-url": "https://docs.aws.amazon.com/emr/latest/EMR-on-EKS-DevelopmentGuide/emr-eks.html", + "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/emr/emr_eks.rst"], + "logo": "/docs/integration-logos/Amazon-EMR_light-bg@4x.png", + "tags": ["aws"], + }, + { + "integration-name": "Amazon EMR Serverless", + "external-doc-url": "https://docs.aws.amazon.com/emr/latest/EMR-Serverless-UserGuide/emr-serverless.html", + "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/emr/emr_serverless.rst"], + "logo": "/docs/integration-logos/Amazon-EMR_light-bg@4x.png", + "tags": ["aws"], + }, + { + "integration-name": "Amazon EventBridge", + "external-doc-url": "https://docs.aws.amazon.com/eventbridge/latest/APIReference/Welcome.html", + "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/eventbridge.rst"], + "logo": "/docs/integration-logos/Amazon-EventBridge_64.png", + "tags": ["aws"], + }, + { + "integration-name": "Amazon Glacier", + "external-doc-url": "https://aws.amazon.com/glacier/", + "logo": "/docs/integration-logos/Amazon-S3-Glacier_light-bg@4x.png", + "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/s3/glacier.rst"], + "tags": ["aws"], + }, + { + "integration-name": "Amazon Kinesis Data Firehose", + "external-doc-url": "https://aws.amazon.com/kinesis/data-firehose/", + "logo": "/docs/integration-logos/Amazon-Kinesis-Data-Firehose_light-bg@4x.png", + "tags": ["aws"], + }, + { + "integration-name": "Amazon Managed Service for Apache Flink", + "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/kinesis_analytics.rst"], + "external-doc-url": "https://aws.amazon.com/managed-service-apache-flink/", + "logo": "/docs/integration-logos/Amazon-Kinesis-Analytics_light-bg@4x.png", + "tags": ["aws"], + }, + { + "integration-name": "Amazon OpenSearch Serverless", + "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/opensearchserverless.rst"], + "external-doc-url": "https://aws.amazon.com/opensearchserverless/", + "logo": "/docs/integration-logos/Amazon-OpenSearch_light-bg@4x.png", + "tags": ["aws"], + }, + { + "integration-name": "Amazon RDS", + "external-doc-url": "https://aws.amazon.com/rds/", + "logo": "/docs/integration-logos/Amazon-RDS_light-bg@4x.png", + "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/rds.rst"], + "tags": ["aws"], + }, + { + "integration-name": "Amazon Redshift", + "external-doc-url": "https://aws.amazon.com/redshift/", + "logo": "/docs/integration-logos/Amazon-Redshift_light-bg@4x.png", + "how-to-guide": [ + "/docs/apache-airflow-providers-amazon/operators/redshift/redshift_sql.rst", + "/docs/apache-airflow-providers-amazon/operators/redshift/redshift_cluster.rst", + ], + "tags": ["aws"], + }, + { + "integration-name": "Amazon Redshift Data", + "external-doc-url": "https://aws.amazon.com/redshift/", + "logo": "/docs/integration-logos/Amazon-Redshift_light-bg@4x.png", + "how-to-guide": [ + "/docs/apache-airflow-providers-amazon/operators/redshift/redshift_data.rst" + ], + "tags": ["aws"], + }, + { + "integration-name": "Amazon SageMaker", + "external-doc-url": "https://aws.amazon.com/sagemaker/", + "logo": "/docs/integration-logos/Amazon-SageMaker_light-bg@4x.png", + "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/sagemaker.rst"], + "tags": ["aws"], + }, + { + "integration-name": "Amazon SecretsManager", + "external-doc-url": "https://aws.amazon.com/secrets-manager/", + "logo": "/docs/integration-logos/AWS-Secrets-Manager_light-bg@4x.png", + "tags": ["aws"], + }, + { + "integration-name": "Amazon Simple Email Service (SES)", + "external-doc-url": "https://aws.amazon.com/ses/", + "logo": "/docs/integration-logos/Amazon-Simple-Email-Service-SES_light-bg@4x.png", + "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/ecs.rst"], + "tags": ["aws"], + }, + { + "integration-name": "Amazon Simple Notification Service (SNS)", + "external-doc-url": "https://aws.amazon.com/sns/", + "logo": "/docs/integration-logos/Amazon-Simple-Notification-Service-SNS_light-bg@4x.png", + "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/sns.rst"], + "tags": ["aws"], + }, + { + "integration-name": "Amazon Simple Queue Service (SQS)", + "external-doc-url": "https://aws.amazon.com/sqs/", + "logo": "/docs/integration-logos/Amazon-Simple-Queue-Service-SQS_light-bg@4x.png", + "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/sqs.rst"], + "tags": ["aws"], + }, + { + "integration-name": "Amazon Simple Storage Service (S3)", + "external-doc-url": "https://aws.amazon.com/s3/", + "logo": "/docs/integration-logos/Amazon-Simple-Storage-Service-S3_light-bg@4x.png", + "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/s3/s3.rst"], + "tags": ["aws"], + }, + { + "integration-name": "Amazon Systems Manager (SSM)", + "external-doc-url": "https://aws.amazon.com/systems-manager/", + "logo": "/docs/integration-logos/AWS-Systems-Manager_light-bg@4x.png", + "tags": ["aws"], + }, + { + "integration-name": "Amazon Web Services", + "external-doc-url": "https://aws.amazon.com/", + "logo": "/docs/integration-logos/AWS-Cloud-alt_light-bg@4x.png", + "tags": ["aws"], + }, + { + "integration-name": "AWS Batch", + "external-doc-url": "https://aws.amazon.com/batch/", + "logo": "/docs/integration-logos/AWS-Batch_light-bg@4x.png", + "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/batch.rst"], + "tags": ["aws"], + }, + { + "integration-name": "AWS DataSync", + "external-doc-url": "https://aws.amazon.com/datasync/", + "logo": "/docs/integration-logos/AWS-DataSync_light-bg@4x.png", + "tags": ["aws"], + }, + { + "integration-name": "AWS Glue", + "external-doc-url": "https://aws.amazon.com/glue/", + "logo": "/docs/integration-logos/AWS-Glue_light-bg@4x.png", + "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/glue.rst"], + "tags": ["aws"], + }, + { + "integration-name": "AWS Lambda", + "external-doc-url": "https://aws.amazon.com/lambda/", + "logo": "/docs/integration-logos/AWS-Lambda_light-bg@4x.png", + "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/lambda.rst"], + "tags": ["aws"], + }, + { + "integration-name": "AWS Step Functions", + "external-doc-url": "https://aws.amazon.com/step-functions/", + "logo": "/docs/integration-logos/AWS-Step-Functions_light-bg@4x.png", + "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/step_functions.rst"], + "tags": ["aws"], + }, + { + "integration-name": "AWS Database Migration Service", + "external-doc-url": "https://aws.amazon.com/dms/", + "logo": "/docs/integration-logos/AWS-Database-Migration-Service_64@5x.png", + "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/dms.rst"], + "tags": ["aws"], + }, + { + "integration-name": "Amazon QuickSight", + "external-doc-url": "https://aws.amazon.com/quicksight/", + "logo": "/docs/integration-logos/Amazon-Quicksight_light-bg@4x.png", + "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/quicksight.rst"], + "tags": ["aws"], + }, + { + "integration-name": "AWS Security Token Service (STS)", + "external-doc-url": "https://docs.aws.amazon.com/STS/latest/APIReference/welcome.html", + "logo": "/docs/integration-logos/AWS-STS_light-bg@4x.png", + "tags": ["aws"], + }, + { + "integration-name": "Amazon Appflow", + "external-doc-url": "https://docs.aws.amazon.com/appflow/1.0/APIReference/Welcome.html", + "logo": "/docs/integration-logos/Amazon_AppFlow_light.png", + "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/appflow.rst"], + "tags": ["aws"], + }, + { + "integration-name": "AWS Glue DataBrew", + "external-doc-url": "https://docs.aws.amazon.com/databrew/latest/dg/what-is.html", + "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/glue_databrew.rst"], + "logo": "/docs/integration-logos/AWS-Glue-DataBrew_64.png", + "tags": ["aws"], + }, + { + "integration-name": "Amazon Verified Permissions", + "external-doc-url": "https://aws.amazon.com/verified-permissions/", + "logo": "/docs/integration-logos/Amazon-Verified-Permissions.png", + "tags": ["aws"], + }, + { + "integration-name": "Amazon Neptune", + "external-doc-url": "https://aws.amazon.com/neptune/", + "logo": "/docs/integration-logos/Amazon-Neptune_64.png", + "how-to-guide": ["/docs/apache-airflow-providers-amazon/operators/neptune.rst"], + "tags": ["aws"], + }, + ], + "operators": [ + { + "integration-name": "Amazon Athena", + "python-modules": ["airflow.providers.amazon.aws.operators.athena"], + }, + { + "integration-name": "Amazon Web Services", + "python-modules": ["airflow.providers.amazon.aws.operators.base_aws"], + }, + { + "integration-name": "AWS Batch", + "python-modules": ["airflow.providers.amazon.aws.operators.batch"], + }, + { + "integration-name": "Amazon Bedrock", + "python-modules": ["airflow.providers.amazon.aws.operators.bedrock"], + }, + { + "integration-name": "Amazon CloudFormation", + "python-modules": ["airflow.providers.amazon.aws.operators.cloud_formation"], + }, + { + "integration-name": "Amazon Comprehend", + "python-modules": ["airflow.providers.amazon.aws.operators.comprehend"], + }, + { + "integration-name": "Amazon DataSync", + "python-modules": ["airflow.providers.amazon.aws.operators.datasync"], + }, + { + "integration-name": "AWS Database Migration Service", + "python-modules": ["airflow.providers.amazon.aws.operators.dms"], + }, + { + "integration-name": "Amazon EC2", + "python-modules": ["airflow.providers.amazon.aws.operators.ec2"], + }, + { + "integration-name": "Amazon ECS", + "python-modules": ["airflow.providers.amazon.aws.operators.ecs"], + }, + { + "integration-name": "Amazon Elastic Kubernetes Service (EKS)", + "python-modules": ["airflow.providers.amazon.aws.operators.eks"], + }, + { + "integration-name": "Amazon EMR", + "python-modules": ["airflow.providers.amazon.aws.operators.emr"], + }, + { + "integration-name": "Amazon EMR on EKS", + "python-modules": ["airflow.providers.amazon.aws.operators.emr"], + }, + { + "integration-name": "Amazon EventBridge", + "python-modules": ["airflow.providers.amazon.aws.operators.eventbridge"], + }, + { + "integration-name": "Amazon Glacier", + "python-modules": ["airflow.providers.amazon.aws.operators.glacier"], + }, + { + "integration-name": "AWS Glue", + "python-modules": [ + "airflow.providers.amazon.aws.operators.glue", + "airflow.providers.amazon.aws.operators.glue_crawler", + ], + }, + { + "integration-name": "AWS Lambda", + "python-modules": ["airflow.providers.amazon.aws.operators.lambda_function"], + }, + { + "integration-name": "Amazon Managed Service for Apache Flink", + "python-modules": ["airflow.providers.amazon.aws.operators.kinesis_analytics"], + }, + { + "integration-name": "Amazon Simple Storage Service (S3)", + "python-modules": ["airflow.providers.amazon.aws.operators.s3"], + }, + { + "integration-name": "Amazon SageMaker", + "python-modules": ["airflow.providers.amazon.aws.operators.sagemaker"], + }, + { + "integration-name": "Amazon Simple Notification Service (SNS)", + "python-modules": ["airflow.providers.amazon.aws.operators.sns"], + }, + { + "integration-name": "Amazon Simple Queue Service (SQS)", + "python-modules": ["airflow.providers.amazon.aws.operators.sqs"], + }, + { + "integration-name": "AWS Step Functions", + "python-modules": ["airflow.providers.amazon.aws.operators.step_function"], + }, + { + "integration-name": "Amazon RDS", + "python-modules": ["airflow.providers.amazon.aws.operators.rds"], + }, + { + "integration-name": "Amazon Redshift", + "python-modules": [ + "airflow.providers.amazon.aws.operators.redshift_cluster", + "airflow.providers.amazon.aws.operators.redshift_data", + ], + }, + { + "integration-name": "Amazon QuickSight", + "python-modules": ["airflow.providers.amazon.aws.operators.quicksight"], + }, + { + "integration-name": "Amazon Appflow", + "python-modules": ["airflow.providers.amazon.aws.operators.appflow"], + }, + { + "integration-name": "AWS Glue DataBrew", + "python-modules": ["airflow.providers.amazon.aws.operators.glue_databrew"], + }, + { + "integration-name": "Amazon Neptune", + "python-modules": ["airflow.providers.amazon.aws.operators.neptune"], + }, + ], + "sensors": [ + { + "integration-name": "Amazon Athena", + "python-modules": ["airflow.providers.amazon.aws.sensors.athena"], + }, + { + "integration-name": "Amazon Web Services", + "python-modules": ["airflow.providers.amazon.aws.sensors.base_aws"], + }, + { + "integration-name": "AWS Batch", + "python-modules": ["airflow.providers.amazon.aws.sensors.batch"], + }, + { + "integration-name": "Amazon Bedrock", + "python-modules": ["airflow.providers.amazon.aws.sensors.bedrock"], + }, + { + "integration-name": "Amazon CloudFormation", + "python-modules": ["airflow.providers.amazon.aws.sensors.cloud_formation"], + }, + { + "integration-name": "Amazon Comprehend", + "python-modules": ["airflow.providers.amazon.aws.sensors.comprehend"], + }, + { + "integration-name": "AWS Database Migration Service", + "python-modules": ["airflow.providers.amazon.aws.sensors.dms"], + }, + { + "integration-name": "Amazon DynamoDB", + "python-modules": ["airflow.providers.amazon.aws.sensors.dynamodb"], + }, + { + "integration-name": "Amazon EC2", + "python-modules": ["airflow.providers.amazon.aws.sensors.ec2"], + }, + { + "integration-name": "Amazon ECS", + "python-modules": ["airflow.providers.amazon.aws.sensors.ecs"], + }, + { + "integration-name": "Amazon Elastic Kubernetes Service (EKS)", + "python-modules": ["airflow.providers.amazon.aws.sensors.eks"], + }, + { + "integration-name": "Amazon EMR", + "python-modules": ["airflow.providers.amazon.aws.sensors.emr"], + }, + { + "integration-name": "Amazon EMR on EKS", + "python-modules": ["airflow.providers.amazon.aws.sensors.emr"], + }, + { + "integration-name": "Amazon Glacier", + "python-modules": ["airflow.providers.amazon.aws.sensors.glacier"], + }, + { + "integration-name": "AWS Glue", + "python-modules": [ + "airflow.providers.amazon.aws.sensors.glue", + "airflow.providers.amazon.aws.sensors.glue_crawler", + "airflow.providers.amazon.aws.sensors.glue_catalog_partition", + ], + }, + { + "integration-name": "AWS Lambda", + "python-modules": ["airflow.providers.amazon.aws.sensors.lambda_function"], + }, + { + "integration-name": "Amazon Managed Service for Apache Flink", + "python-modules": ["airflow.providers.amazon.aws.sensors.kinesis_analytics"], + }, + { + "integration-name": "Amazon OpenSearch Serverless", + "python-modules": ["airflow.providers.amazon.aws.sensors.opensearch_serverless"], + }, + { + "integration-name": "Amazon RDS", + "python-modules": ["airflow.providers.amazon.aws.sensors.rds"], + }, + { + "integration-name": "Amazon Redshift", + "python-modules": ["airflow.providers.amazon.aws.sensors.redshift_cluster"], + }, + { + "integration-name": "Amazon Simple Storage Service (S3)", + "python-modules": ["airflow.providers.amazon.aws.sensors.s3"], + }, + { + "integration-name": "Amazon SageMaker", + "python-modules": ["airflow.providers.amazon.aws.sensors.sagemaker"], + }, + { + "integration-name": "Amazon Simple Queue Service (SQS)", + "python-modules": ["airflow.providers.amazon.aws.sensors.sqs"], + }, + { + "integration-name": "AWS Step Functions", + "python-modules": ["airflow.providers.amazon.aws.sensors.step_function"], + }, + { + "integration-name": "Amazon QuickSight", + "python-modules": ["airflow.providers.amazon.aws.sensors.quicksight"], + }, + ], + "asset-uris": [ + { + "schemes": ["s3"], + "handler": "airflow.providers.amazon.aws.assets.s3.sanitize_uri", + "to_openlineage_converter": "airflow.providers.amazon.aws.assets.s3.convert_asset_to_openlineage", + "factory": "airflow.providers.amazon.aws.assets.s3.create_asset", + } + ], + "dataset-uris": [ + { + "schemes": ["s3"], + "handler": "airflow.providers.amazon.aws.assets.s3.sanitize_uri", + "to_openlineage_converter": "airflow.providers.amazon.aws.assets.s3.convert_asset_to_openlineage", + "factory": "airflow.providers.amazon.aws.assets.s3.create_asset", + } + ], + "filesystems": ["airflow.providers.amazon.aws.fs.s3"], + "hooks": [ + { + "integration-name": "Amazon Athena", + "python-modules": [ + "airflow.providers.amazon.aws.hooks.athena", + "airflow.providers.amazon.aws.hooks.athena_sql", + ], + }, + { + "integration-name": "Amazon Bedrock", + "python-modules": ["airflow.providers.amazon.aws.hooks.bedrock"], + }, + { + "integration-name": "Amazon Chime", + "python-modules": ["airflow.providers.amazon.aws.hooks.chime"], + }, + { + "integration-name": "Amazon Comprehend", + "python-modules": ["airflow.providers.amazon.aws.hooks.comprehend"], + }, + { + "integration-name": "Amazon DynamoDB", + "python-modules": ["airflow.providers.amazon.aws.hooks.dynamodb"], + }, + { + "integration-name": "Amazon Web Services", + "python-modules": ["airflow.providers.amazon.aws.hooks.base_aws"], + }, + { + "integration-name": "AWS Batch", + "python-modules": [ + "airflow.providers.amazon.aws.hooks.batch_client", + "airflow.providers.amazon.aws.hooks.batch_waiters", + ], + }, + { + "integration-name": "Amazon CloudFormation", + "python-modules": ["airflow.providers.amazon.aws.hooks.cloud_formation"], + }, + { + "integration-name": "Amazon DataSync", + "python-modules": ["airflow.providers.amazon.aws.hooks.datasync"], + }, + { + "integration-name": "AWS Database Migration Service", + "python-modules": ["airflow.providers.amazon.aws.hooks.dms"], + }, + {"integration-name": "Amazon EC2", "python-modules": ["airflow.providers.amazon.aws.hooks.ec2"]}, + { + "integration-name": "Amazon Elastic Container Registry (ECR)", + "python-modules": ["airflow.providers.amazon.aws.hooks.ecr"], + }, + {"integration-name": "Amazon ECS", "python-modules": ["airflow.providers.amazon.aws.hooks.ecs"]}, + { + "integration-name": "Amazon ElastiCache", + "python-modules": ["airflow.providers.amazon.aws.hooks.elasticache_replication_group"], + }, + { + "integration-name": "Amazon Elastic Kubernetes Service (EKS)", + "python-modules": ["airflow.providers.amazon.aws.hooks.eks"], + }, + {"integration-name": "Amazon EMR", "python-modules": ["airflow.providers.amazon.aws.hooks.emr"]}, + { + "integration-name": "Amazon EMR on EKS", + "python-modules": ["airflow.providers.amazon.aws.hooks.emr"], + }, + { + "integration-name": "Amazon EventBridge", + "python-modules": ["airflow.providers.amazon.aws.hooks.eventbridge"], + }, + { + "integration-name": "Amazon Glacier", + "python-modules": ["airflow.providers.amazon.aws.hooks.glacier"], + }, + { + "integration-name": "AWS Glue", + "python-modules": [ + "airflow.providers.amazon.aws.hooks.glue", + "airflow.providers.amazon.aws.hooks.glue_crawler", + "airflow.providers.amazon.aws.hooks.glue_catalog", + ], + }, + { + "integration-name": "Amazon Kinesis Data Firehose", + "python-modules": ["airflow.providers.amazon.aws.hooks.kinesis"], + }, + { + "integration-name": "AWS Lambda", + "python-modules": ["airflow.providers.amazon.aws.hooks.lambda_function"], + }, + { + "integration-name": "Amazon Managed Service for Apache Flink", + "python-modules": ["airflow.providers.amazon.aws.hooks.kinesis_analytics"], + }, + { + "integration-name": "Amazon CloudWatch Logs", + "python-modules": ["airflow.providers.amazon.aws.hooks.logs"], + }, + { + "integration-name": "Amazon OpenSearch Serverless", + "python-modules": ["airflow.providers.amazon.aws.hooks.opensearch_serverless"], + }, + {"integration-name": "Amazon RDS", "python-modules": ["airflow.providers.amazon.aws.hooks.rds"]}, + { + "integration-name": "Amazon Redshift", + "python-modules": [ + "airflow.providers.amazon.aws.hooks.redshift_sql", + "airflow.providers.amazon.aws.hooks.redshift_cluster", + "airflow.providers.amazon.aws.hooks.redshift_data", + ], + }, + { + "integration-name": "Amazon Simple Storage Service (S3)", + "python-modules": ["airflow.providers.amazon.aws.hooks.s3"], + }, + { + "integration-name": "Amazon SageMaker", + "python-modules": ["airflow.providers.amazon.aws.hooks.sagemaker"], + }, + { + "integration-name": "Amazon Simple Email Service (SES)", + "python-modules": ["airflow.providers.amazon.aws.hooks.ses"], + }, + { + "integration-name": "Amazon Systems Manager (SSM)", + "python-modules": ["airflow.providers.amazon.aws.hooks.ssm"], + }, + { + "integration-name": "Amazon SecretsManager", + "python-modules": ["airflow.providers.amazon.aws.hooks.secrets_manager"], + }, + { + "integration-name": "Amazon Simple Notification Service (SNS)", + "python-modules": ["airflow.providers.amazon.aws.hooks.sns"], + }, + { + "integration-name": "Amazon Simple Queue Service (SQS)", + "python-modules": ["airflow.providers.amazon.aws.hooks.sqs"], + }, + { + "integration-name": "AWS Step Functions", + "python-modules": ["airflow.providers.amazon.aws.hooks.step_function"], + }, + { + "integration-name": "Amazon QuickSight", + "python-modules": ["airflow.providers.amazon.aws.hooks.quicksight"], + }, + { + "integration-name": "AWS Security Token Service (STS)", + "python-modules": ["airflow.providers.amazon.aws.hooks.sts"], + }, + { + "integration-name": "Amazon Appflow", + "python-modules": ["airflow.providers.amazon.aws.hooks.appflow"], + }, + { + "integration-name": "AWS Glue DataBrew", + "python-modules": ["airflow.providers.amazon.aws.hooks.glue_databrew"], + }, + { + "integration-name": "Amazon Verified Permissions", + "python-modules": ["airflow.providers.amazon.aws.hooks.verified_permissions"], + }, + { + "integration-name": "Amazon Neptune", + "python-modules": ["airflow.providers.amazon.aws.hooks.neptune"], + }, + ], + "triggers": [ + { + "integration-name": "Amazon Web Services", + "python-modules": ["airflow.providers.amazon.aws.triggers.base"], + }, + { + "integration-name": "Amazon Athena", + "python-modules": ["airflow.providers.amazon.aws.triggers.athena"], + }, + { + "integration-name": "AWS Batch", + "python-modules": ["airflow.providers.amazon.aws.triggers.batch"], + }, + { + "integration-name": "Amazon Bedrock", + "python-modules": ["airflow.providers.amazon.aws.triggers.bedrock"], + }, + { + "integration-name": "Amazon Comprehend", + "python-modules": ["airflow.providers.amazon.aws.triggers.comprehend"], + }, + { + "integration-name": "Amazon EC2", + "python-modules": ["airflow.providers.amazon.aws.triggers.ec2"], + }, + { + "integration-name": "AWS Lambda", + "python-modules": ["airflow.providers.amazon.aws.triggers.lambda_function"], + }, + { + "integration-name": "Amazon Managed Service for Apache Flink", + "python-modules": ["airflow.providers.amazon.aws.triggers.kinesis_analytics"], + }, + { + "integration-name": "Amazon OpenSearch Serverless", + "python-modules": ["airflow.providers.amazon.aws.triggers.opensearch_serverless"], + }, + { + "integration-name": "Amazon Redshift", + "python-modules": [ + "airflow.providers.amazon.aws.triggers.redshift_cluster", + "airflow.providers.amazon.aws.triggers.redshift_data", + ], + }, + { + "integration-name": "Amazon SageMaker", + "python-modules": ["airflow.providers.amazon.aws.triggers.sagemaker"], + }, + { + "integration-name": "AWS Glue", + "python-modules": [ + "airflow.providers.amazon.aws.triggers.glue", + "airflow.providers.amazon.aws.triggers.glue_crawler", + ], + }, + { + "integration-name": "Amazon Simple Storage Service (S3)", + "python-modules": ["airflow.providers.amazon.aws.triggers.s3"], + }, + { + "integration-name": "Amazon EMR", + "python-modules": ["airflow.providers.amazon.aws.triggers.emr"], + }, + { + "integration-name": "Amazon Elastic Kubernetes Service (EKS)", + "python-modules": ["airflow.providers.amazon.aws.triggers.eks"], + }, + { + "integration-name": "Amazon ECS", + "python-modules": ["airflow.providers.amazon.aws.triggers.ecs"], + }, + { + "integration-name": "Amazon RDS", + "python-modules": ["airflow.providers.amazon.aws.triggers.rds"], + }, + { + "integration-name": "AWS Step Functions", + "python-modules": ["airflow.providers.amazon.aws.triggers.step_function"], + }, + { + "integration-name": "Amazon Simple Queue Service (SQS)", + "python-modules": ["airflow.providers.amazon.aws.triggers.sqs"], + }, + { + "integration-name": "AWS Glue DataBrew", + "python-modules": ["airflow.providers.amazon.aws.triggers.glue_databrew"], + }, + { + "integration-name": "Amazon Neptune", + "python-modules": ["airflow.providers.amazon.aws.triggers.neptune"], + }, + { + "integration-name": "AWS Database Migration Service", + "python-modules": ["airflow.providers.amazon.aws.triggers.dms"], + }, + ], + "transfers": [ + { + "source-integration-name": "Amazon DynamoDB", + "target-integration-name": "Amazon Simple Storage Service (S3)", + "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/dynamodb_to_s3.rst", + "python-module": "airflow.providers.amazon.aws.transfers.dynamodb_to_s3", + }, + { + "source-integration-name": "Google Cloud Storage (GCS)", + "target-integration-name": "Amazon Simple Storage Service (S3)", + "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/gcs_to_s3.rst", + "python-module": "airflow.providers.amazon.aws.transfers.gcs_to_s3", + }, + { + "source-integration-name": "Amazon Glacier", + "target-integration-name": "Google Cloud Storage (GCS)", + "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/glacier_to_gcs.rst", + "python-module": "airflow.providers.amazon.aws.transfers.glacier_to_gcs", + }, + { + "source-integration-name": "Google", + "target-integration-name": "Amazon Simple Storage Service (S3)", + "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/google_api_to_s3.rst", + "python-module": "airflow.providers.amazon.aws.transfers.google_api_to_s3", + }, + { + "source-integration-name": "Apache Hive", + "target-integration-name": "Amazon DynamoDB", + "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/hive_to_dynamodb.rst", + "python-module": "airflow.providers.amazon.aws.transfers.hive_to_dynamodb", + }, + { + "source-integration-name": "Hypertext Transfer Protocol (HTTP)", + "target-integration-name": "Amazon Simple Storage Service (S3)", + "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/http_to_s3.rst", + "python-module": "airflow.providers.amazon.aws.transfers.http_to_s3", + }, + { + "source-integration-name": "Internet Message Access Protocol (IMAP)", + "target-integration-name": "Amazon Simple Storage Service (S3)", + "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/imap_attachment_to_s3.rst", + "python-module": "airflow.providers.amazon.aws.transfers.imap_attachment_to_s3", + }, + { + "source-integration-name": "MongoDB", + "target-integration-name": "Amazon Simple Storage Service (S3)", + "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/mongo_to_s3.rst", + "python-module": "airflow.providers.amazon.aws.transfers.mongo_to_s3", + }, + { + "source-integration-name": "Amazon Redshift", + "target-integration-name": "Amazon Simple Storage Service (S3)", + "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/redshift_to_s3.rst", + "python-module": "airflow.providers.amazon.aws.transfers.redshift_to_s3", + }, + { + "source-integration-name": "Amazon Simple Storage Service (S3)", + "target-integration-name": "Amazon Redshift", + "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/s3_to_redshift.rst", + "python-module": "airflow.providers.amazon.aws.transfers.s3_to_redshift", + }, + { + "source-integration-name": "Amazon Simple Storage Service (S3)", + "target-integration-name": "SSH File Transfer Protocol (SFTP)", + "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/s3_to_sftp.rst", + "python-module": "airflow.providers.amazon.aws.transfers.s3_to_sftp", + }, + { + "source-integration-name": "SSH File Transfer Protocol (SFTP)", + "target-integration-name": "Amazon Simple Storage Service (S3)", + "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/sftp_to_s3.rst", + "python-module": "airflow.providers.amazon.aws.transfers.sftp_to_s3", + }, + { + "source-integration-name": "Amazon Simple Storage Service (S3)", + "target-integration-name": "File Transfer Protocol (FTP)", + "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/s3_to_ftp.rst", + "python-module": "airflow.providers.amazon.aws.transfers.s3_to_ftp", + }, + { + "source-integration-name": "Exasol", + "target-integration-name": "Amazon Simple Storage Service (S3)", + "python-module": "airflow.providers.amazon.aws.transfers.exasol_to_s3", + }, + { + "source-integration-name": "File Transfer Protocol (FTP)", + "target-integration-name": "Amazon Simple Storage Service (S3)", + "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/ftp_to_s3.rst", + "python-module": "airflow.providers.amazon.aws.transfers.ftp_to_s3", + }, + { + "source-integration-name": "Salesforce", + "target-integration-name": "Amazon Simple Storage Service (S3)", + "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/salesforce_to_s3.rst", + "python-module": "airflow.providers.amazon.aws.transfers.salesforce_to_s3", + }, + { + "source-integration-name": "Local", + "target-integration-name": "Amazon Simple Storage Service (S3)", + "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/local_to_s3.rst", + "python-module": "airflow.providers.amazon.aws.transfers.local_to_s3", + }, + { + "source-integration-name": "Common SQL", + "target-integration-name": "Amazon Simple Storage Service (S3)", + "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/sql_to_s3.rst", + "python-module": "airflow.providers.amazon.aws.transfers.sql_to_s3", + }, + { + "source-integration-name": "Amazon Simple Storage Service (S3)", + "target-integration-name": "Common SQL", + "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/s3_to_sql.rst", + "python-module": "airflow.providers.amazon.aws.transfers.s3_to_sql", + }, + { + "source-integration-name": "Amazon Web Services", + "target-integration-name": "Amazon Web Services", + "python-module": "airflow.providers.amazon.aws.transfers.base", + }, + { + "source-integration-name": "Microsoft Azure Blob Storage", + "target-integration-name": "Amazon Simple Storage Service (S3)", + "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/azure_blob_to_s3.rst", + "python-module": "airflow.providers.amazon.aws.transfers.azure_blob_to_s3", + }, + { + "source-integration-name": "Amazon Simple Storage Service (S3)", + "target-integration-name": "Amazon DynamoDB", + "how-to-guide": "/docs/apache-airflow-providers-amazon/transfer/s3_to_dynamodb.rst", + "python-module": "airflow.providers.amazon.aws.transfers.s3_to_dynamodb", + }, + ], + "extra-links": [ + "airflow.providers.amazon.aws.links.athena.AthenaQueryResultsLink", + "airflow.providers.amazon.aws.links.batch.BatchJobDefinitionLink", + "airflow.providers.amazon.aws.links.batch.BatchJobDetailsLink", + "airflow.providers.amazon.aws.links.batch.BatchJobQueueLink", + "airflow.providers.amazon.aws.links.emr.EmrClusterLink", + "airflow.providers.amazon.aws.links.emr.EmrLogsLink", + "airflow.providers.amazon.aws.links.emr.EmrServerlessCloudWatchLogsLink", + "airflow.providers.amazon.aws.links.emr.EmrServerlessDashboardLink", + "airflow.providers.amazon.aws.links.emr.EmrServerlessLogsLink", + "airflow.providers.amazon.aws.links.emr.EmrServerlessS3LogsLink", + "airflow.providers.amazon.aws.links.glue.GlueJobRunDetailsLink", + "airflow.providers.amazon.aws.links.logs.CloudWatchEventsLink", + "airflow.providers.amazon.aws.links.sagemaker.SageMakerTransformJobLink", + "airflow.providers.amazon.aws.links.step_function.StateMachineDetailsLink", + "airflow.providers.amazon.aws.links.step_function.StateMachineExecutionsDetailsLink", + "airflow.providers.amazon.aws.links.comprehend.ComprehendPiiEntitiesDetectionLink", + "airflow.providers.amazon.aws.links.comprehend.ComprehendDocumentClassifierLink", + "airflow.providers.amazon.aws.links.datasync.DataSyncTaskLink", + "airflow.providers.amazon.aws.links.datasync.DataSyncTaskExecutionLink", + "airflow.providers.amazon.aws.links.ec2.EC2InstanceLink", + "airflow.providers.amazon.aws.links.ec2.EC2InstanceDashboardLink", + ], + "connection-types": [ + { + "hook-class-name": "airflow.providers.amazon.aws.hooks.base_aws.AwsGenericHook", + "connection-type": "aws", + }, + { + "hook-class-name": "airflow.providers.amazon.aws.hooks.chime.ChimeWebhookHook", + "connection-type": "chime", + }, + {"hook-class-name": "airflow.providers.amazon.aws.hooks.emr.EmrHook", "connection-type": "emr"}, + { + "hook-class-name": "airflow.providers.amazon.aws.hooks.redshift_sql.RedshiftSQLHook", + "connection-type": "redshift", + }, + { + "hook-class-name": "airflow.providers.amazon.aws.hooks.athena_sql.AthenaSQLHook", + "connection-type": "athena", + }, + ], + "notifications": [ + "airflow.providers.amazon.aws.notifications.chime.ChimeNotifier", + "airflow.providers.amazon.aws.notifications.sns.SnsNotifier", + "airflow.providers.amazon.aws.notifications.sqs.SqsNotifier", + ], + "secrets-backends": [ + "airflow.providers.amazon.aws.secrets.secrets_manager.SecretsManagerBackend", + "airflow.providers.amazon.aws.secrets.systems_manager.SystemsManagerParameterStoreBackend", + ], + "logging": [ + "airflow.providers.amazon.aws.log.s3_task_handler.S3TaskHandler", + "airflow.providers.amazon.aws.log.cloudwatch_task_handler.CloudwatchTaskHandler", + ], + "config": { + "aws": { + "description": "This section contains settings for Amazon Web Services (AWS) integration.", + "options": { + "session_factory": { + "description": "Full import path to the class which implements a custom session factory for\n``boto3.session.Session``. For more details please have a look at\n:ref:`howto/connection:aws:session-factory`.\n", + "default": None, + "example": "my_company.aws.MyCustomSessionFactory", + "type": "string", + "version_added": "3.1.1", + }, + "cloudwatch_task_handler_json_serializer": { + "description": "By default, when logging non-string messages, all non-json objects are logged as `null`.\nExcept `datetime` objects which are ISO formatted. Users can optionally use a `repr` serializer or\nprovide their own JSON serializer for any non-JSON-serializable objects in the logged message.\n\n* `airflow.providers.amazon.aws.log.cloudwatch_task_handler.json_serialize` uses `repr` (be aware\n there is the potential of logging sensitive data depending on the `repr` method of logged objects)\n* `airflow.providers.amazon.aws.log.cloudwatch_task_handler.json_serialize_legacy` uses `null`.\n\nIf a custom serializer is provided, it must adhere to `Callable[[Any], str | None]`, where `None`\nserializes to `null` (e.g. `def my_serializer(o: Any) -> str | None`). Since this is on the logging\npath and it's possible there's an exception being handled, special care should be taken to fail\ngracefully without raising a new exception inside of your serializer.\n", + "type": "string", + "version_added": "8.7.2", + "example": "airflow.providers.amazon.aws.log.cloudwatch_task_handler.json_serialize", + "default": "airflow.providers.amazon.aws.log.cloudwatch_task_handler.json_serialize_legacy", + }, + }, + }, + "aws_batch_executor": { + "description": "This section only applies if you are using the AwsBatchExecutor in\nAirflow's ``[core]`` configuration.\nFor more information on any of these execution parameters, see the link below:\nhttps://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/batch.html#Batch.Client.submit_job\nFor boto3 credential management, see\nhttps://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html\n", + "options": { + "conn_id": { + "description": "The Airflow connection (i.e. credentials) used by the Batch executor to make API calls to AWS Batch.\n", + "version_added": "8.11", + "type": "string", + "example": "aws_default", + "default": "aws_default", + }, + "region_name": { + "description": "The name of the AWS Region where Amazon Batch is configured. Required.\n", + "version_added": "8.11", + "type": "string", + "example": "us-east-1", + "default": None, + }, + "max_submit_job_attempts": { + "description": "The maximum number of times the Batch Executor should attempt to run a Batch Job.\n", + "version_added": "8.11", + "type": "integer", + "example": "3", + "default": "3", + }, + "check_health_on_startup": { + "description": "Whether or not to check the Batch Executor health on startup.\n", + "version_added": "8.11", + "type": "boolean", + "example": "True", + "default": "True", + }, + "job_name": { + "description": "The name of the job submitted to AWS Batch. It can be up to 128 letters long.\nThe first character must be alphanumeric, can contain uppercase and lowercase\nletters, numbers, hyphens (-), and underscores (_).\n", + "version_added": "8.11", + "type": "string", + "example": "airflow-batch-executor-job", + "default": None, + }, + "job_queue": { + "description": "The job queue where the job is submitted. You can specify either the name\nor the Amazon Resource Name (ARN) of the queue.\n", + "version_added": "8.11", + "type": "string", + "example": "airflow-batch-executor-job-queue", + "default": None, + }, + "job_definition": { + "description": "The job definition used by the job. You can specify either the name\nor the Amazon Resource Name (ARN) of the job definition with or\nwithout the revision. If the revision is not specified, then the\nlatest active revision is used.\n", + "version_added": "8.11", + "type": "string", + "example": "airflow-batch-executor-job-definition", + "default": None, + }, + "submit_job_kwargs": { + "description": "Additional parameters to pass to the submit_job method of the\nAWS Batch client.\n", + "version_added": "8.11", + "type": "string", + "example": '{"Tags": [{"Key": "key", "Value": "value"}]}', + "default": None, + }, + }, + }, + "aws_ecs_executor": { + "description": "This section only applies if you are using the AwsEcsExecutor in\nAirflow's ``[core]`` configuration.\nFor more information on any of these execution parameters, see the link below:\nhttps://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ecs/client/run_task.html\nFor boto3 credential management, see\nhttps://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html\n", + "options": { + "conn_id": { + "description": "The Airflow connection (i.e. credentials) used by the ECS executor to make API calls to AWS ECS.\n", + "version_added": "8.10", + "type": "string", + "example": "aws_default", + "default": "aws_default", + }, + "region_name": { + "description": "The name of the AWS Region where Amazon ECS is configured. Required.\n", + "version_added": "8.10", + "type": "string", + "example": "us-east-1", + "default": None, + }, + "assign_public_ip": { + "description": "Whether to assign a public IP address to the containers launched by the ECS executor.\nFor more info see url to Boto3 docs above.\n", + "version_added": "8.10", + "type": "boolean", + "example": "True", + "default": "False", + }, + "cluster": { + "description": "Name of the Amazon ECS Cluster. Required.\n", + "version_added": "8.10", + "type": "string", + "example": "ecs_executor_cluster", + "default": None, + }, + "capacity_provider_strategy": { + "description": "The capacity provider strategy to use for the task.\n\nIf a Capacity Provider Strategy is specified, the Launch Type parameter must be omitted. If\nno Capacity Provider Strategy or Launch Type is specified, the Default CapacityProvider Strategy\nfor the cluster is used, if present.\n\nWhen you use cluster auto scaling, you must specify Capacity Provider Strategy and not Launch Type.\n", + "version_added": "8.17", + "type": "string", + "example": "[{'capacityProvider': 'cp1', 'weight': 5}, {'capacityProvider': 'cp2', 'weight': 1}]", + "default": None, + }, + "container_name": { + "description": "Name of the container that will be used to execute Airflow tasks via the ECS executor.\nThe container should be specified in the ECS Task Definition and will receive an airflow\nCLI command as an additional parameter to its entrypoint. For more info see url to Boto3\ndocs above. Required.\n", + "version_added": "8.10", + "type": "string", + "example": "ecs_executor_container", + "default": None, + }, + "launch_type": { + "description": "Launch type can either be 'FARGATE' OR 'EC2'. For more info see url to\nBoto3 docs above.\n\nIf a Launch Type is specified, the Capacity Provider Strategy parameter must be omitted. If\nno Capacity Provider Strategy or Launch Type is specified, the Default Capacity Provider Strategy\nfor the cluster is used, if present.\n\nIf the launch type is EC2, the executor will attempt to place tasks on\nempty EC2 instances. If there are no EC2 instances available, no task\nis placed and this function will be called again in the next heart-beat.\n\nIf the launch type is FARGATE, this will run the tasks on new AWS Fargate\ninstances.\n", + "version_added": "8.10", + "type": "string", + "example": "FARGATE", + "default": None, + }, + "platform_version": { + "description": "The platform version the task uses. A platform version is only specified\nfor tasks hosted on Fargate. If one isn't specified, the LATEST platform\nversion is used.\n", + "version_added": "8.10", + "type": "string", + "example": "1.4.0", + "default": "LATEST", + }, + "security_groups": { + "description": "The comma-separated IDs of the security groups associated with the task. If you\ndon't specify a security group, the default security group for the VPC is used.\nThere's a limit of 5 security groups. For more info see url to Boto3 docs above.\n", + "version_added": "8.10", + "type": "string", + "example": "sg-XXXX,sg-YYYY", + "default": None, + }, + "subnets": { + "description": "The comma-separated IDs of the subnets associated with the task or service.\nThere's a limit of 16 subnets. For more info see url to Boto3 docs above.\n", + "version_added": "8.10", + "type": "string", + "example": "subnet-XXXXXXXX,subnet-YYYYYYYY", + "default": None, + }, + "task_definition": { + "description": "The family and revision (family:revision) or full ARN of the task definition\nto run. If a revision isn't specified, the latest ACTIVE revision is used.\nFor more info see url to Boto3 docs above.\n", + "version_added": "8.10", + "type": "string", + "example": "executor_task_definition:LATEST", + "default": None, + }, + "max_run_task_attempts": { + "description": "The maximum number of times the ECS Executor should attempt to run a task.\n", + "version_added": "8.10", + "type": "integer", + "example": "3", + "default": "3", + }, + "run_task_kwargs": { + "description": "A JSON string containing arguments to provide the ECS `run_task` API (see url above).\n", + "version_added": "8.10", + "type": "string", + "example": '{"tags": {"key": "schema", "value": "1.0"}}', + "default": None, + }, + "check_health_on_startup": { + "description": "Whether or not to check the ECS Executor health on startup.\n", + "version_added": "8.11", + "type": "boolean", + "example": "True", + "default": "True", + }, + }, + }, + "aws_auth_manager": { + "description": "This section only applies if you are using the AwsAuthManager. In other words, if you set\n``[core] auth_manager = airflow.providers.amazon.aws.auth_manager.aws_auth_manager.AwsAuthManager`` in\nAirflow's configuration.\n", + "options": { + "enable": { + "description": "AWS auth manager is not ready to be used. Turn on this flag to use it anyway.\nDo that at your own risk since the AWS auth manager is not in an usable state.\n", + "version_added": "8.12.0", + "type": "boolean", + "example": "True", + "default": "False", + }, + "conn_id": { + "description": "The Airflow connection (i.e. credentials) used by the AWS auth manager to make API calls to AWS\nIdentity Center and Amazon Verified Permissions.\n", + "version_added": "8.12.0", + "type": "string", + "example": "aws_default", + "default": "aws_default", + }, + "region_name": { + "description": "The name of the AWS Region where Amazon Verified Permissions is configured. Required.\n", + "version_added": "8.10", + "type": "string", + "example": "us-east-1", + "default": None, + }, + "saml_metadata_url": { + "description": "SAML metadata XML file provided by AWS Identity Center.\nThis URL can be found in the AWS Identity Center console. Required.\n", + "version_added": "8.12.0", + "type": "string", + "example": "https://portal.sso..amazonaws.com/saml/metadata/XXXXXXXXXX", + "default": None, + }, + "avp_policy_store_id": { + "description": "Amazon Verified Permissions' policy store ID where all the policies defining user permissions\nin Airflow are stored. Required.\n", + "version_added": "8.12.0", + "type": "string", + "example": None, + "default": None, + }, + }, + }, + }, + "executors": ["airflow.providers.amazon.aws.executors.ecs.ecs_executor.AwsEcsExecutor"], + "dependencies": [ + "apache-airflow>=2.9.0", + "apache-airflow-providers-common-compat>=1.3.0", + "apache-airflow-providers-common-sql>=1.20.0", + "apache-airflow-providers-http", + "boto3>=1.34.90", + "botocore>=1.34.90", + "inflection>=0.5.1", + "watchtower>=3.0.0,!=3.3.0,<4", + "jsonpath_ng>=1.5.3", + "redshift_connector>=2.0.918", + "asgiref>=2.3.0", + "PyAthena>=3.0.10", + "jmespath>=0.7.0", + "python3-saml>=1.16.0", + ], + "optional-dependencies": { + "pandas": ["pandas>=2.1.2,<2.2"], + "aiobotocore": ["aiobotocore[boto3]>=2.13.0"], + "cncf.kubernetes": ["apache-airflow-providers-cncf-kubernetes>=7.2.0"], + "s3fs": ["s3fs>=2023.10.0"], + "python3-saml": ["python3-saml>=1.16.0"], + "apache.hive": ["apache-airflow-providers-apache-hive"], + "exasol": ["apache-airflow-providers-exasol"], + "ftp": ["apache-airflow-providers-ftp"], + "google": ["apache-airflow-providers-google"], + "imap": ["apache-airflow-providers-imap"], + "microsoft.azure": ["apache-airflow-providers-microsoft-azure"], + "mongo": ["apache-airflow-providers-mongo"], + "openlineage": ["apache-airflow-providers-openlineage"], + "salesforce": ["apache-airflow-providers-salesforce"], + "ssh": ["apache-airflow-providers-ssh"], + }, + "devel-dependencies": [ + "aiobotocore>=2.13.0", + "aws_xray_sdk>=2.12.0", + "moto[cloudformation,glue]>=5.0.0", + "mypy-boto3-appflow>=1.35.39", + "mypy-boto3-rds>=1.34.90", + "mypy-boto3-redshift-data>=1.34.0", + "mypy-boto3-s3>=1.34.90", + "s3fs>=2023.10.0", + "openapi-schema-validator>=0.6.2", + "openapi-spec-validator>=0.7.1", + ], + } diff --git a/providers/src/airflow/providers/amazon/version_compat.py b/providers/amazon/src/airflow/providers/amazon/version_compat.py similarity index 100% rename from providers/src/airflow/providers/amazon/version_compat.py rename to providers/amazon/src/airflow/providers/amazon/version_compat.py diff --git a/providers/amazon/tests/conftest.py b/providers/amazon/tests/conftest.py new file mode 100644 index 00000000000000..068fe6bbf5ae9a --- /dev/null +++ b/providers/amazon/tests/conftest.py @@ -0,0 +1,32 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import pathlib + +import pytest + +pytest_plugins = "tests_common.pytest_plugin" + + +@pytest.hookimpl(tryfirst=True) +def pytest_configure(config: pytest.Config) -> None: + deprecations_ignore_path = pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml") + dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() else [] + config.inicfg["airflow_deprecations_ignore"] = ( + config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] + ) diff --git a/providers/tests/system/amazon/aws/tests/__init__.py b/providers/amazon/tests/provider_tests/__init__.py similarity index 90% rename from providers/tests/system/amazon/aws/tests/__init__.py rename to providers/amazon/tests/provider_tests/__init__.py index 13a83393a9124b..e8fd22856438c4 100644 --- a/providers/tests/system/amazon/aws/tests/__init__.py +++ b/providers/amazon/tests/provider_tests/__init__.py @@ -14,3 +14,4 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/providers/tests/amazon/__init__.py b/providers/amazon/tests/provider_tests/amazon/__init__.py similarity index 100% rename from providers/tests/amazon/__init__.py rename to providers/amazon/tests/provider_tests/amazon/__init__.py diff --git a/providers/tests/amazon/aws/.gitignore b/providers/amazon/tests/provider_tests/amazon/aws/.gitignore similarity index 100% rename from providers/tests/amazon/aws/.gitignore rename to providers/amazon/tests/provider_tests/amazon/aws/.gitignore diff --git a/providers/tests/amazon/aws/__init__.py b/providers/amazon/tests/provider_tests/amazon/aws/__init__.py similarity index 100% rename from providers/tests/amazon/aws/__init__.py rename to providers/amazon/tests/provider_tests/amazon/aws/__init__.py diff --git a/providers/tests/amazon/aws/assets/__init__.py b/providers/amazon/tests/provider_tests/amazon/aws/assets/__init__.py similarity index 100% rename from providers/tests/amazon/aws/assets/__init__.py rename to providers/amazon/tests/provider_tests/amazon/aws/assets/__init__.py diff --git a/providers/tests/amazon/aws/assets/test_s3.py b/providers/amazon/tests/provider_tests/amazon/aws/assets/test_s3.py similarity index 100% rename from providers/tests/amazon/aws/assets/test_s3.py rename to providers/amazon/tests/provider_tests/amazon/aws/assets/test_s3.py diff --git a/providers/tests/amazon/aws/auth_manager/__init__.py b/providers/amazon/tests/provider_tests/amazon/aws/auth_manager/__init__.py similarity index 100% rename from providers/tests/amazon/aws/auth_manager/__init__.py rename to providers/amazon/tests/provider_tests/amazon/aws/auth_manager/__init__.py diff --git a/providers/tests/amazon/aws/auth_manager/avp/__init__.py b/providers/amazon/tests/provider_tests/amazon/aws/auth_manager/avp/__init__.py similarity index 100% rename from providers/tests/amazon/aws/auth_manager/avp/__init__.py rename to providers/amazon/tests/provider_tests/amazon/aws/auth_manager/avp/__init__.py diff --git a/providers/tests/amazon/aws/auth_manager/avp/test_entities.py b/providers/amazon/tests/provider_tests/amazon/aws/auth_manager/avp/test_entities.py similarity index 100% rename from providers/tests/amazon/aws/auth_manager/avp/test_entities.py rename to providers/amazon/tests/provider_tests/amazon/aws/auth_manager/avp/test_entities.py diff --git a/providers/tests/amazon/aws/auth_manager/avp/test_facade.py b/providers/amazon/tests/provider_tests/amazon/aws/auth_manager/avp/test_facade.py similarity index 92% rename from providers/tests/amazon/aws/auth_manager/avp/test_facade.py rename to providers/amazon/tests/provider_tests/amazon/aws/auth_manager/avp/test_facade.py index 124f4cc0f10af6..57f350830ca01b 100644 --- a/providers/tests/amazon/aws/auth_manager/avp/test_facade.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/auth_manager/avp/test_facade.py @@ -29,6 +29,7 @@ from airflow.utils.helpers import prune_dict from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS if TYPE_CHECKING: from airflow.auth.managers.base_auth_manager import ResourceMethod @@ -300,10 +301,16 @@ def test_get_batch_is_authorized_single_result_unsuccessful(self, facade): user=test_user, ) + @pytest.mark.skipif(not AIRFLOW_V_3_0_PLUS, reason="Paths changed in 3.0") def test_is_policy_store_schema_up_to_date_when_schema_up_to_date(self, facade, providers_src_folder): - schema_path = providers_src_folder.joinpath( - "airflow", "providers", "amazon", "aws", "auth_manager", "avp", "schema.json" - ).resolve() + if AIRFLOW_V_3_0_PLUS: + schema_path = providers_src_folder.joinpath( + "amazon", "src", "airflow", "providers", "amazon", "aws", "auth_manager", "avp", "schema.json" + ).resolve() + else: + schema_path = providers_src_folder.joinpath( + "airflow", "providers", "amazon", "aws", "auth_manager", "avp", "schema.json" + ).resolve() with open(schema_path) as schema_file: avp_response = {"schema": schema_file.read()} mock_get_schema = Mock(return_value=avp_response) @@ -312,9 +319,14 @@ def test_is_policy_store_schema_up_to_date_when_schema_up_to_date(self, facade, assert facade.is_policy_store_schema_up_to_date() def test_is_policy_store_schema_up_to_date_when_schema_is_modified(self, facade, providers_src_folder): - schema_path = providers_src_folder.joinpath( - "airflow", "providers", "amazon", "aws", "auth_manager", "avp", "schema.json" - ).resolve() + if AIRFLOW_V_3_0_PLUS: + schema_path = providers_src_folder.joinpath( + "amazon", "src", "airflow", "providers", "amazon", "aws", "auth_manager", "avp", "schema.json" + ).resolve() + else: + schema_path = providers_src_folder.joinpath( + "airflow", "providers", "amazon", "aws", "auth_manager", "avp", "schema.json" + ).resolve() with open(schema_path) as schema_file: schema = json.loads(schema_file.read()) schema["new_field"] = "new_value" diff --git a/providers/tests/amazon/aws/auth_manager/cli/__init__.py b/providers/amazon/tests/provider_tests/amazon/aws/auth_manager/cli/__init__.py similarity index 100% rename from providers/tests/amazon/aws/auth_manager/cli/__init__.py rename to providers/amazon/tests/provider_tests/amazon/aws/auth_manager/cli/__init__.py diff --git a/providers/tests/amazon/aws/auth_manager/cli/test_avp_commands.py b/providers/amazon/tests/provider_tests/amazon/aws/auth_manager/cli/test_avp_commands.py similarity index 100% rename from providers/tests/amazon/aws/auth_manager/cli/test_avp_commands.py rename to providers/amazon/tests/provider_tests/amazon/aws/auth_manager/cli/test_avp_commands.py diff --git a/providers/tests/amazon/aws/auth_manager/cli/test_definition.py b/providers/amazon/tests/provider_tests/amazon/aws/auth_manager/cli/test_definition.py similarity index 100% rename from providers/tests/amazon/aws/auth_manager/cli/test_definition.py rename to providers/amazon/tests/provider_tests/amazon/aws/auth_manager/cli/test_definition.py diff --git a/providers/tests/amazon/aws/auth_manager/router/test_login.py b/providers/amazon/tests/provider_tests/amazon/aws/auth_manager/router/test_login.py similarity index 100% rename from providers/tests/amazon/aws/auth_manager/router/test_login.py rename to providers/amazon/tests/provider_tests/amazon/aws/auth_manager/router/test_login.py diff --git a/providers/tests/amazon/aws/auth_manager/router/__init__.py b/providers/amazon/tests/provider_tests/amazon/aws/auth_manager/security_manager/__init__.py similarity index 100% rename from providers/tests/amazon/aws/auth_manager/router/__init__.py rename to providers/amazon/tests/provider_tests/amazon/aws/auth_manager/security_manager/__init__.py diff --git a/providers/tests/amazon/aws/auth_manager/security_manager/test_aws_security_manager_override.py b/providers/amazon/tests/provider_tests/amazon/aws/auth_manager/security_manager/test_aws_security_manager_override.py similarity index 99% rename from providers/tests/amazon/aws/auth_manager/security_manager/test_aws_security_manager_override.py rename to providers/amazon/tests/provider_tests/amazon/aws/auth_manager/security_manager/test_aws_security_manager_override.py index a05df04e9485cc..ad75afc2ac50bb 100644 --- a/providers/tests/amazon/aws/auth_manager/security_manager/test_aws_security_manager_override.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/auth_manager/security_manager/test_aws_security_manager_override.py @@ -21,6 +21,8 @@ import pytest from flask import Flask +from airflow.www.extensions.init_appbuilder import init_appbuilder + from tests_common.test_utils.compat import ignore_provider_compatibility_error python3_saml = pytest.importorskip("python3-saml") @@ -29,7 +31,6 @@ from airflow.providers.amazon.aws.auth_manager.security_manager.aws_security_manager_override import ( AwsSecurityManagerOverride, ) -from airflow.www.extensions.init_appbuilder import init_appbuilder @pytest.fixture diff --git a/providers/tests/amazon/aws/auth_manager/test_aws_auth_manager.py b/providers/amazon/tests/provider_tests/amazon/aws/auth_manager/test_aws_auth_manager.py similarity index 100% rename from providers/tests/amazon/aws/auth_manager/test_aws_auth_manager.py rename to providers/amazon/tests/provider_tests/amazon/aws/auth_manager/test_aws_auth_manager.py diff --git a/providers/tests/amazon/aws/auth_manager/test_constants.py b/providers/amazon/tests/provider_tests/amazon/aws/auth_manager/test_constants.py similarity index 100% rename from providers/tests/amazon/aws/auth_manager/test_constants.py rename to providers/amazon/tests/provider_tests/amazon/aws/auth_manager/test_constants.py diff --git a/providers/tests/amazon/aws/auth_manager/test_user.py b/providers/amazon/tests/provider_tests/amazon/aws/auth_manager/test_user.py similarity index 100% rename from providers/tests/amazon/aws/auth_manager/test_user.py rename to providers/amazon/tests/provider_tests/amazon/aws/auth_manager/test_user.py diff --git a/providers/tests/amazon/aws/auth_manager/security_manager/__init__.py b/providers/amazon/tests/provider_tests/amazon/aws/config_templates/__init__.py similarity index 100% rename from providers/tests/amazon/aws/auth_manager/security_manager/__init__.py rename to providers/amazon/tests/provider_tests/amazon/aws/config_templates/__init__.py diff --git a/providers/tests/amazon/aws/config_templates/args.json b/providers/amazon/tests/provider_tests/amazon/aws/config_templates/args.json similarity index 100% rename from providers/tests/amazon/aws/config_templates/args.json rename to providers/amazon/tests/provider_tests/amazon/aws/config_templates/args.json diff --git a/providers/tests/amazon/aws/config_templates/job.j2.json b/providers/amazon/tests/provider_tests/amazon/aws/config_templates/job.j2.json similarity index 100% rename from providers/tests/amazon/aws/config_templates/job.j2.json rename to providers/amazon/tests/provider_tests/amazon/aws/config_templates/job.j2.json diff --git a/providers/tests/amazon/aws/config_templates/steps.j2.json b/providers/amazon/tests/provider_tests/amazon/aws/config_templates/steps.j2.json similarity index 100% rename from providers/tests/amazon/aws/config_templates/steps.j2.json rename to providers/amazon/tests/provider_tests/amazon/aws/config_templates/steps.j2.json diff --git a/providers/tests/amazon/aws/config_templates/steps.json b/providers/amazon/tests/provider_tests/amazon/aws/config_templates/steps.json similarity index 100% rename from providers/tests/amazon/aws/config_templates/steps.json rename to providers/amazon/tests/provider_tests/amazon/aws/config_templates/steps.json diff --git a/providers/tests/amazon/aws/config_templates/__init__.py b/providers/amazon/tests/provider_tests/amazon/aws/deferrable/__init__.py similarity index 100% rename from providers/tests/amazon/aws/config_templates/__init__.py rename to providers/amazon/tests/provider_tests/amazon/aws/deferrable/__init__.py diff --git a/providers/tests/amazon/aws/deferrable/__init__.py b/providers/amazon/tests/provider_tests/amazon/aws/deferrable/hooks/__init__.py similarity index 100% rename from providers/tests/amazon/aws/deferrable/__init__.py rename to providers/amazon/tests/provider_tests/amazon/aws/deferrable/hooks/__init__.py diff --git a/providers/tests/amazon/aws/deferrable/hooks/__init__.py b/providers/amazon/tests/provider_tests/amazon/aws/executors/__init__.py similarity index 100% rename from providers/tests/amazon/aws/deferrable/hooks/__init__.py rename to providers/amazon/tests/provider_tests/amazon/aws/executors/__init__.py diff --git a/providers/tests/amazon/aws/executors/__init__.py b/providers/amazon/tests/provider_tests/amazon/aws/executors/batch/__init__.py similarity index 100% rename from providers/tests/amazon/aws/executors/__init__.py rename to providers/amazon/tests/provider_tests/amazon/aws/executors/batch/__init__.py diff --git a/providers/tests/amazon/aws/executors/batch/test_batch_executor.py b/providers/amazon/tests/provider_tests/amazon/aws/executors/batch/test_batch_executor.py similarity index 100% rename from providers/tests/amazon/aws/executors/batch/test_batch_executor.py rename to providers/amazon/tests/provider_tests/amazon/aws/executors/batch/test_batch_executor.py diff --git a/providers/tests/amazon/aws/executors/batch/__init__.py b/providers/amazon/tests/provider_tests/amazon/aws/executors/ecs/__init__.py similarity index 100% rename from providers/tests/amazon/aws/executors/batch/__init__.py rename to providers/amazon/tests/provider_tests/amazon/aws/executors/ecs/__init__.py diff --git a/providers/tests/amazon/aws/executors/ecs/test_ecs_executor.py b/providers/amazon/tests/provider_tests/amazon/aws/executors/ecs/test_ecs_executor.py similarity index 100% rename from providers/tests/amazon/aws/executors/ecs/test_ecs_executor.py rename to providers/amazon/tests/provider_tests/amazon/aws/executors/ecs/test_ecs_executor.py diff --git a/providers/tests/amazon/aws/executors/ecs/__init__.py b/providers/amazon/tests/provider_tests/amazon/aws/executors/utils/__init__.py similarity index 100% rename from providers/tests/amazon/aws/executors/ecs/__init__.py rename to providers/amazon/tests/provider_tests/amazon/aws/executors/utils/__init__.py diff --git a/providers/tests/amazon/aws/executors/utils/test_exponential_backoff_retry.py b/providers/amazon/tests/provider_tests/amazon/aws/executors/utils/test_exponential_backoff_retry.py similarity index 100% rename from providers/tests/amazon/aws/executors/utils/test_exponential_backoff_retry.py rename to providers/amazon/tests/provider_tests/amazon/aws/executors/utils/test_exponential_backoff_retry.py diff --git a/providers/tests/amazon/aws/executors/utils/__init__.py b/providers/amazon/tests/provider_tests/amazon/aws/fs/__init__.py similarity index 100% rename from providers/tests/amazon/aws/executors/utils/__init__.py rename to providers/amazon/tests/provider_tests/amazon/aws/fs/__init__.py diff --git a/providers/tests/amazon/aws/fs/test_s3.py b/providers/amazon/tests/provider_tests/amazon/aws/fs/test_s3.py similarity index 100% rename from providers/tests/amazon/aws/fs/test_s3.py rename to providers/amazon/tests/provider_tests/amazon/aws/fs/test_s3.py diff --git a/providers/tests/amazon/aws/fs/__init__.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/__init__.py similarity index 100% rename from providers/tests/amazon/aws/fs/__init__.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/__init__.py diff --git a/providers/tests/amazon/aws/hooks/test_appflow.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_appflow.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_appflow.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_appflow.py diff --git a/providers/tests/amazon/aws/hooks/test_athena.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_athena.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_athena.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_athena.py diff --git a/providers/tests/amazon/aws/hooks/test_athena_sql.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_athena_sql.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_athena_sql.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_athena_sql.py diff --git a/providers/tests/amazon/aws/hooks/test_base_aws.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_base_aws.py similarity index 99% rename from providers/tests/amazon/aws/hooks/test_base_aws.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_base_aws.py index 86487ece6f88c0..c48f608a8c50fb 100644 --- a/providers/tests/amazon/aws/hooks/test_base_aws.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_base_aws.py @@ -1,4 +1,3 @@ -# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information @@ -141,7 +140,7 @@ def mock_conn(request): class TestSessionFactory: @conf_vars( - {("aws", "session_factory"): "providers.tests.amazon.aws.hooks.test_base_aws.CustomSessionFactory"} + {("aws", "session_factory"): "provider_tests.amazon.aws.hooks.test_base_aws.CustomSessionFactory"} ) def test_resolve_session_factory_class(self): cls = resolve_session_factory() @@ -207,13 +206,11 @@ def test_resolve_botocore_config(self, botocore_config, conn_botocore_config): @pytest.mark.parametrize("region_name", ["eu-central-1", None]) @mock.patch("boto3.session.Session", new_callable=mock.PropertyMock, return_value=MOCK_BOTO3_SESSION) - def test_create_session_boto3_credential_strategy(self, mock_boto3_session, region_name, caplog): + def test_create_session_boto3_credential_strategy(self, mock_boto3_session, region_name): sf = BaseSessionFactory(conn=AwsConnectionWrapper(conn=None), region_name=region_name, config=None) session = sf.create_session() mock_boto3_session.assert_called_once_with(region_name=region_name) assert session == MOCK_BOTO3_SESSION - logging_message = "No connection ID provided. Fallback on boto3 credential strategy" - assert any(logging_message in log_text for log_text in caplog.messages) @pytest.mark.parametrize("region_name", ["eu-central-1", None]) @pytest.mark.parametrize("profile_name", ["default", None]) diff --git a/providers/tests/amazon/aws/hooks/test_batch_client.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_batch_client.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_batch_client.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_batch_client.py diff --git a/providers/tests/amazon/aws/hooks/test_batch_waiters.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_batch_waiters.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_batch_waiters.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_batch_waiters.py diff --git a/providers/tests/amazon/aws/hooks/test_bedrock.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_bedrock.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_bedrock.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_bedrock.py diff --git a/providers/tests/amazon/aws/hooks/test_chime.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_chime.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_chime.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_chime.py diff --git a/providers/tests/amazon/aws/hooks/test_cloud_formation.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_cloud_formation.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_cloud_formation.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_cloud_formation.py diff --git a/providers/tests/amazon/aws/hooks/test_comprehend.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_comprehend.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_comprehend.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_comprehend.py diff --git a/providers/tests/amazon/aws/hooks/test_datasync.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_datasync.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_datasync.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_datasync.py diff --git a/providers/tests/amazon/aws/hooks/test_dms.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_dms.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_dms.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_dms.py diff --git a/providers/tests/amazon/aws/hooks/test_dynamodb.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_dynamodb.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_dynamodb.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_dynamodb.py diff --git a/providers/tests/amazon/aws/hooks/test_ec2.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_ec2.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_ec2.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_ec2.py diff --git a/providers/tests/amazon/aws/hooks/test_ecr.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_ecr.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_ecr.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_ecr.py diff --git a/providers/tests/amazon/aws/hooks/test_ecs.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_ecs.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_ecs.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_ecs.py diff --git a/providers/tests/amazon/aws/hooks/test_eks.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_eks.py similarity index 99% rename from providers/tests/amazon/aws/hooks/test_eks.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_eks.py index a5b1471098480f..59505f86849b02 100644 --- a/providers/tests/amazon/aws/hooks/test_eks.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_eks.py @@ -53,8 +53,7 @@ ) from airflow.providers.amazon.aws.hooks.eks import COMMAND, EksHook - -from providers.tests.amazon.aws.utils.eks_test_constants import ( +from provider_tests.amazon.aws.utils.eks_test_constants import ( DEFAULT_CONN_ID, DEFAULT_NAMESPACE, DISK_SIZE, @@ -83,7 +82,7 @@ RegExTemplates, ResponseAttributes, ) -from providers.tests.amazon.aws.utils.eks_test_utils import ( +from provider_tests.amazon.aws.utils.eks_test_utils import ( attributes_to_test, generate_clusters, generate_dict, diff --git a/providers/tests/amazon/aws/hooks/test_elasticache_replication_group.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_elasticache_replication_group.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_elasticache_replication_group.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_elasticache_replication_group.py diff --git a/providers/tests/amazon/aws/hooks/test_emr.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_emr.py similarity index 98% rename from providers/tests/amazon/aws/hooks/test_emr.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_emr.py index aaa572cc5e4f2c..e686554914acb0 100644 --- a/providers/tests/amazon/aws/hooks/test_emr.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_emr.py @@ -27,7 +27,7 @@ from botocore.exceptions import WaiterError from moto import mock_aws -from airflow.exceptions import AirflowException +from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning from airflow.providers.amazon.aws.hooks.emr import EmrHook @@ -174,7 +174,9 @@ def test_create_job_flow_extra_args(self): warnings.simplefilter("error") if sys.version_info >= (3, 12): # Botocore generates deprecation warning on Python 3.12 connected with utcnow use - warnings.filterwarnings("ignore", message=r".*datetime.utcnow.*", category=DeprecationWarning) + warnings.filterwarnings( + "ignore", message=r".*datetime.utcnow.*", category=AirflowProviderDeprecationWarning + ) cluster = hook.create_job_flow( {"Name": "test_cluster", "ReleaseLabel": "", "AmiVersion": "3.2", "Instances": {}} ) diff --git a/providers/tests/amazon/aws/hooks/test_emr_containers.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_emr_containers.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_emr_containers.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_emr_containers.py diff --git a/providers/tests/amazon/aws/hooks/test_emr_serverless.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_emr_serverless.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_emr_serverless.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_emr_serverless.py diff --git a/providers/tests/amazon/aws/hooks/test_eventbridge.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_eventbridge.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_eventbridge.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_eventbridge.py diff --git a/providers/tests/amazon/aws/hooks/test_glacier.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_glacier.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_glacier.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_glacier.py diff --git a/providers/tests/amazon/aws/hooks/test_glue.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_glue.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_glue.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_glue.py diff --git a/providers/tests/amazon/aws/hooks/test_glue_catalog.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_glue_catalog.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_glue_catalog.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_glue_catalog.py diff --git a/providers/tests/amazon/aws/hooks/test_glue_crawler.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_glue_crawler.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_glue_crawler.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_glue_crawler.py diff --git a/providers/tests/amazon/aws/hooks/test_glue_databrew.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_glue_databrew.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_glue_databrew.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_glue_databrew.py diff --git a/providers/tests/amazon/aws/hooks/test_hooks_signature.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_hooks_signature.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_hooks_signature.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_hooks_signature.py diff --git a/providers/tests/amazon/aws/hooks/test_kinesis.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_kinesis.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_kinesis.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_kinesis.py diff --git a/providers/tests/amazon/aws/hooks/test_kinesis_analytics.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_kinesis_analytics.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_kinesis_analytics.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_kinesis_analytics.py diff --git a/providers/tests/amazon/aws/hooks/test_lambda_function.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_lambda_function.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_lambda_function.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_lambda_function.py diff --git a/providers/tests/amazon/aws/hooks/test_logs.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_logs.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_logs.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_logs.py diff --git a/providers/tests/amazon/aws/hooks/test_neptune.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_neptune.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_neptune.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_neptune.py diff --git a/providers/tests/amazon/aws/hooks/test_opensearch_serverless.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_opensearch_serverless.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_opensearch_serverless.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_opensearch_serverless.py diff --git a/providers/tests/amazon/aws/hooks/test_quicksight.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_quicksight.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_quicksight.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_quicksight.py diff --git a/providers/tests/amazon/aws/hooks/test_rds.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_rds.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_rds.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_rds.py diff --git a/providers/tests/amazon/aws/hooks/test_redshift_cluster.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_redshift_cluster.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_redshift_cluster.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_redshift_cluster.py diff --git a/providers/tests/amazon/aws/hooks/test_redshift_data.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_redshift_data.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_redshift_data.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_redshift_data.py diff --git a/providers/tests/amazon/aws/hooks/test_redshift_sql.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_redshift_sql.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_redshift_sql.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_redshift_sql.py diff --git a/providers/tests/amazon/aws/hooks/test_s3.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_s3.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_s3.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_s3.py diff --git a/providers/tests/amazon/aws/hooks/test_sagemaker.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_sagemaker.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_sagemaker.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_sagemaker.py diff --git a/providers/tests/amazon/aws/hooks/test_secrets_manager.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_secrets_manager.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_secrets_manager.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_secrets_manager.py diff --git a/providers/tests/amazon/aws/hooks/test_ses.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_ses.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_ses.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_ses.py diff --git a/providers/tests/amazon/aws/hooks/test_sns.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_sns.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_sns.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_sns.py diff --git a/providers/tests/amazon/aws/hooks/test_sqs.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_sqs.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_sqs.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_sqs.py diff --git a/providers/tests/amazon/aws/hooks/test_ssm.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_ssm.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_ssm.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_ssm.py diff --git a/providers/tests/amazon/aws/hooks/test_step_function.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_step_function.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_step_function.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_step_function.py diff --git a/providers/tests/amazon/aws/hooks/test_sts.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_sts.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_sts.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_sts.py diff --git a/providers/tests/amazon/aws/hooks/test_verified_permissions.py b/providers/amazon/tests/provider_tests/amazon/aws/hooks/test_verified_permissions.py similarity index 100% rename from providers/tests/amazon/aws/hooks/test_verified_permissions.py rename to providers/amazon/tests/provider_tests/amazon/aws/hooks/test_verified_permissions.py diff --git a/providers/tests/amazon/aws/hooks/__init__.py b/providers/amazon/tests/provider_tests/amazon/aws/infrastructure/__init__.py similarity index 100% rename from providers/tests/amazon/aws/hooks/__init__.py rename to providers/amazon/tests/provider_tests/amazon/aws/infrastructure/__init__.py diff --git a/providers/tests/amazon/aws/infrastructure/__init__.py b/providers/amazon/tests/provider_tests/amazon/aws/infrastructure/example_s3_to_redshift/__init__.py similarity index 100% rename from providers/tests/amazon/aws/infrastructure/__init__.py rename to providers/amazon/tests/provider_tests/amazon/aws/infrastructure/example_s3_to_redshift/__init__.py diff --git a/providers/tests/amazon/aws/infrastructure/example_s3_to_redshift/outputs.tf b/providers/amazon/tests/provider_tests/amazon/aws/infrastructure/example_s3_to_redshift/outputs.tf similarity index 100% rename from providers/tests/amazon/aws/infrastructure/example_s3_to_redshift/outputs.tf rename to providers/amazon/tests/provider_tests/amazon/aws/infrastructure/example_s3_to_redshift/outputs.tf diff --git a/providers/tests/amazon/aws/infrastructure/example_s3_to_redshift/resources.tf b/providers/amazon/tests/provider_tests/amazon/aws/infrastructure/example_s3_to_redshift/resources.tf similarity index 100% rename from providers/tests/amazon/aws/infrastructure/example_s3_to_redshift/resources.tf rename to providers/amazon/tests/provider_tests/amazon/aws/infrastructure/example_s3_to_redshift/resources.tf diff --git a/providers/tests/amazon/aws/infrastructure/example_s3_to_redshift/variables.tf b/providers/amazon/tests/provider_tests/amazon/aws/infrastructure/example_s3_to_redshift/variables.tf similarity index 100% rename from providers/tests/amazon/aws/infrastructure/example_s3_to_redshift/variables.tf rename to providers/amazon/tests/provider_tests/amazon/aws/infrastructure/example_s3_to_redshift/variables.tf diff --git a/providers/tests/amazon/aws/infrastructure/example_s3_to_redshift/__init__.py b/providers/amazon/tests/provider_tests/amazon/aws/links/__init__.py similarity index 100% rename from providers/tests/amazon/aws/infrastructure/example_s3_to_redshift/__init__.py rename to providers/amazon/tests/provider_tests/amazon/aws/links/__init__.py diff --git a/providers/tests/amazon/aws/links/test_athena.py b/providers/amazon/tests/provider_tests/amazon/aws/links/test_athena.py similarity index 94% rename from providers/tests/amazon/aws/links/test_athena.py rename to providers/amazon/tests/provider_tests/amazon/aws/links/test_athena.py index 2da3f3fa441d18..77c0ed7f7f3e5f 100644 --- a/providers/tests/amazon/aws/links/test_athena.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/links/test_athena.py @@ -17,8 +17,7 @@ from __future__ import annotations from airflow.providers.amazon.aws.links.athena import AthenaQueryResultsLink - -from providers.tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase +from provider_tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase class TestAthenaQueryResultsLink(BaseAwsLinksTestCase): diff --git a/providers/tests/amazon/aws/links/test_base_aws.py b/providers/amazon/tests/provider_tests/amazon/aws/links/test_base_aws.py similarity index 100% rename from providers/tests/amazon/aws/links/test_base_aws.py rename to providers/amazon/tests/provider_tests/amazon/aws/links/test_base_aws.py diff --git a/providers/tests/amazon/aws/links/test_batch.py b/providers/amazon/tests/provider_tests/amazon/aws/links/test_batch.py similarity index 96% rename from providers/tests/amazon/aws/links/test_batch.py rename to providers/amazon/tests/provider_tests/amazon/aws/links/test_batch.py index eafe49260c9b47..29f666297f9277 100644 --- a/providers/tests/amazon/aws/links/test_batch.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/links/test_batch.py @@ -21,8 +21,7 @@ BatchJobDetailsLink, BatchJobQueueLink, ) - -from providers.tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase +from provider_tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase class TestBatchJobDefinitionLink(BaseAwsLinksTestCase): diff --git a/providers/tests/amazon/aws/links/test_comprehend.py b/providers/amazon/tests/provider_tests/amazon/aws/links/test_comprehend.py similarity index 96% rename from providers/tests/amazon/aws/links/test_comprehend.py rename to providers/amazon/tests/provider_tests/amazon/aws/links/test_comprehend.py index 4e6a4e958474d8..054326b431cd18 100644 --- a/providers/tests/amazon/aws/links/test_comprehend.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/links/test_comprehend.py @@ -20,8 +20,7 @@ ComprehendDocumentClassifierLink, ComprehendPiiEntitiesDetectionLink, ) - -from providers.tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase +from provider_tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase class TestComprehendPiiEntitiesDetectionLink(BaseAwsLinksTestCase): diff --git a/providers/tests/amazon/aws/links/test_datasync.py b/providers/amazon/tests/provider_tests/amazon/aws/links/test_datasync.py similarity index 95% rename from providers/tests/amazon/aws/links/test_datasync.py rename to providers/amazon/tests/provider_tests/amazon/aws/links/test_datasync.py index 9ff1610ac30c87..87a872e7fb91d2 100644 --- a/providers/tests/amazon/aws/links/test_datasync.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/links/test_datasync.py @@ -17,8 +17,7 @@ from __future__ import annotations from airflow.providers.amazon.aws.links.datasync import DataSyncTaskExecutionLink, DataSyncTaskLink - -from providers.tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase +from provider_tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase TASK_ID = "task-0b36221bf94ad2bdd" EXECUTION_ID = "exec-00000000000000004" diff --git a/providers/tests/amazon/aws/links/test_ec2.py b/providers/amazon/tests/provider_tests/amazon/aws/links/test_ec2.py similarity index 96% rename from providers/tests/amazon/aws/links/test_ec2.py rename to providers/amazon/tests/provider_tests/amazon/aws/links/test_ec2.py index 922b12275e5aa0..5f8368a23ab63b 100644 --- a/providers/tests/amazon/aws/links/test_ec2.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/links/test_ec2.py @@ -17,8 +17,7 @@ from __future__ import annotations from airflow.providers.amazon.aws.links.ec2 import EC2InstanceDashboardLink, EC2InstanceLink - -from providers.tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase +from provider_tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase class TestEC2InstanceLink(BaseAwsLinksTestCase): diff --git a/providers/tests/amazon/aws/links/test_emr.py b/providers/amazon/tests/provider_tests/amazon/aws/links/test_emr.py similarity index 99% rename from providers/tests/amazon/aws/links/test_emr.py rename to providers/amazon/tests/provider_tests/amazon/aws/links/test_emr.py index 7510b2a2f50d30..16fd17f0deee76 100644 --- a/providers/tests/amazon/aws/links/test_emr.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/links/test_emr.py @@ -32,8 +32,7 @@ get_log_uri, get_serverless_dashboard_url, ) - -from providers.tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase +from provider_tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase class TestEmrClusterLink(BaseAwsLinksTestCase): diff --git a/providers/tests/amazon/aws/links/test_glue.py b/providers/amazon/tests/provider_tests/amazon/aws/links/test_glue.py similarity index 94% rename from providers/tests/amazon/aws/links/test_glue.py rename to providers/amazon/tests/provider_tests/amazon/aws/links/test_glue.py index 772ac5ee9c7739..5f3371d364e131 100644 --- a/providers/tests/amazon/aws/links/test_glue.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/links/test_glue.py @@ -17,8 +17,7 @@ from __future__ import annotations from airflow.providers.amazon.aws.links.glue import GlueJobRunDetailsLink - -from providers.tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase +from provider_tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase class TestGlueJobRunDetailsLink(BaseAwsLinksTestCase): diff --git a/providers/tests/amazon/aws/links/test_logs.py b/providers/amazon/tests/provider_tests/amazon/aws/links/test_logs.py similarity index 95% rename from providers/tests/amazon/aws/links/test_logs.py rename to providers/amazon/tests/provider_tests/amazon/aws/links/test_logs.py index b596e7517eafb8..4195b59d88907a 100644 --- a/providers/tests/amazon/aws/links/test_logs.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/links/test_logs.py @@ -17,8 +17,7 @@ from __future__ import annotations from airflow.providers.amazon.aws.links.logs import CloudWatchEventsLink - -from providers.tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase +from provider_tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase class TestCloudWatchEventsLink(BaseAwsLinksTestCase): diff --git a/providers/tests/amazon/aws/links/test_sagemaker.py b/providers/amazon/tests/provider_tests/amazon/aws/links/test_sagemaker.py similarity index 94% rename from providers/tests/amazon/aws/links/test_sagemaker.py rename to providers/amazon/tests/provider_tests/amazon/aws/links/test_sagemaker.py index d656b3559bc253..f3076addc1b3f8 100644 --- a/providers/tests/amazon/aws/links/test_sagemaker.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/links/test_sagemaker.py @@ -17,8 +17,7 @@ from __future__ import annotations from airflow.providers.amazon.aws.links.sagemaker import SageMakerTransformJobLink - -from providers.tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase +from provider_tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase class TestSageMakerTransformDetailsLink(BaseAwsLinksTestCase): diff --git a/providers/tests/amazon/aws/links/test_step_function.py b/providers/amazon/tests/provider_tests/amazon/aws/links/test_step_function.py similarity index 96% rename from providers/tests/amazon/aws/links/test_step_function.py rename to providers/amazon/tests/provider_tests/amazon/aws/links/test_step_function.py index fab1205689dc97..e646d565182281 100644 --- a/providers/tests/amazon/aws/links/test_step_function.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/links/test_step_function.py @@ -22,8 +22,7 @@ StateMachineDetailsLink, StateMachineExecutionsDetailsLink, ) - -from providers.tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase +from provider_tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase class TestStateMachineDetailsLink(BaseAwsLinksTestCase): diff --git a/providers/tests/amazon/aws/links/__init__.py b/providers/amazon/tests/provider_tests/amazon/aws/log/__init__.py similarity index 100% rename from providers/tests/amazon/aws/links/__init__.py rename to providers/amazon/tests/provider_tests/amazon/aws/log/__init__.py diff --git a/providers/tests/amazon/aws/log/test_cloudwatch_task_handler.py b/providers/amazon/tests/provider_tests/amazon/aws/log/test_cloudwatch_task_handler.py similarity index 100% rename from providers/tests/amazon/aws/log/test_cloudwatch_task_handler.py rename to providers/amazon/tests/provider_tests/amazon/aws/log/test_cloudwatch_task_handler.py diff --git a/providers/tests/amazon/aws/log/test_s3_task_handler.py b/providers/amazon/tests/provider_tests/amazon/aws/log/test_s3_task_handler.py similarity index 100% rename from providers/tests/amazon/aws/log/test_s3_task_handler.py rename to providers/amazon/tests/provider_tests/amazon/aws/log/test_s3_task_handler.py diff --git a/providers/tests/amazon/aws/log/__init__.py b/providers/amazon/tests/provider_tests/amazon/aws/notifications/__init__.py similarity index 100% rename from providers/tests/amazon/aws/log/__init__.py rename to providers/amazon/tests/provider_tests/amazon/aws/notifications/__init__.py diff --git a/providers/tests/amazon/aws/notifications/test_chime.py b/providers/amazon/tests/provider_tests/amazon/aws/notifications/test_chime.py similarity index 100% rename from providers/tests/amazon/aws/notifications/test_chime.py rename to providers/amazon/tests/provider_tests/amazon/aws/notifications/test_chime.py diff --git a/providers/tests/amazon/aws/notifications/test_sns.py b/providers/amazon/tests/provider_tests/amazon/aws/notifications/test_sns.py similarity index 100% rename from providers/tests/amazon/aws/notifications/test_sns.py rename to providers/amazon/tests/provider_tests/amazon/aws/notifications/test_sns.py diff --git a/providers/tests/amazon/aws/notifications/test_sqs.py b/providers/amazon/tests/provider_tests/amazon/aws/notifications/test_sqs.py similarity index 100% rename from providers/tests/amazon/aws/notifications/test_sqs.py rename to providers/amazon/tests/provider_tests/amazon/aws/notifications/test_sqs.py diff --git a/providers/tests/amazon/aws/notifications/__init__.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/__init__.py similarity index 100% rename from providers/tests/amazon/aws/notifications/__init__.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/__init__.py diff --git a/providers/tests/amazon/aws/operators/athena_metadata.json b/providers/amazon/tests/provider_tests/amazon/aws/operators/athena_metadata.json similarity index 100% rename from providers/tests/amazon/aws/operators/athena_metadata.json rename to providers/amazon/tests/provider_tests/amazon/aws/operators/athena_metadata.json diff --git a/providers/tests/amazon/aws/operators/test_appflow.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_appflow.py similarity index 100% rename from providers/tests/amazon/aws/operators/test_appflow.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_appflow.py diff --git a/providers/tests/amazon/aws/operators/test_athena.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_athena.py similarity index 99% rename from providers/tests/amazon/aws/operators/test_athena.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_athena.py index 3ce9487c4155e0..235f6e03d2eb93 100644 --- a/providers/tests/amazon/aws/operators/test_athena.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_athena.py @@ -41,8 +41,8 @@ from airflow.utils.state import DagRunState from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS TEST_DAG_ID = "unit_tests" diff --git a/providers/tests/amazon/aws/operators/test_base_aws.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_base_aws.py similarity index 100% rename from providers/tests/amazon/aws/operators/test_base_aws.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_base_aws.py diff --git a/providers/tests/amazon/aws/operators/test_batch.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_batch.py similarity index 100% rename from providers/tests/amazon/aws/operators/test_batch.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_batch.py diff --git a/providers/tests/amazon/aws/operators/test_bedrock.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_bedrock.py similarity index 99% rename from providers/tests/amazon/aws/operators/test_bedrock.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_bedrock.py index a92b71b432b8ce..d63f6444bcf660 100644 --- a/providers/tests/amazon/aws/operators/test_bedrock.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_bedrock.py @@ -36,8 +36,7 @@ BedrockInvokeModelOperator, BedrockRaGOperator, ) - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields if TYPE_CHECKING: from airflow.providers.amazon.aws.hooks.base_aws import BaseAwsConnection diff --git a/providers/tests/amazon/aws/operators/test_cloud_formation.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_cloud_formation.py similarity index 98% rename from providers/tests/amazon/aws/operators/test_cloud_formation.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_cloud_formation.py index 47b1659b426f06..f9389dbf2accaf 100644 --- a/providers/tests/amazon/aws/operators/test_cloud_formation.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_cloud_formation.py @@ -28,8 +28,7 @@ CloudFormationDeleteStackOperator, ) from airflow.utils import timezone - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields DEFAULT_DATE = timezone.datetime(2019, 1, 1) DEFAULT_ARGS = {"owner": "airflow", "start_date": DEFAULT_DATE} diff --git a/providers/tests/amazon/aws/operators/test_comprehend.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_comprehend.py similarity index 99% rename from providers/tests/amazon/aws/operators/test_comprehend.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_comprehend.py index 9a4ef2d2f74dbf..b97e7b7b0eec39 100644 --- a/providers/tests/amazon/aws/operators/test_comprehend.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_comprehend.py @@ -30,8 +30,7 @@ ComprehendStartPiiEntitiesDetectionJobOperator, ) from airflow.utils.types import NOTSET - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields if TYPE_CHECKING: from airflow.providers.amazon.aws.hooks.base_aws import BaseAwsConnection diff --git a/providers/tests/amazon/aws/operators/test_datasync.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_datasync.py similarity index 99% rename from providers/tests/amazon/aws/operators/test_datasync.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_datasync.py index 81c8ea7445dbf1..d26b71a2e043f6 100644 --- a/providers/tests/amazon/aws/operators/test_datasync.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_datasync.py @@ -31,8 +31,8 @@ from airflow.utils.state import DagRunState from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS TEST_DAG_ID = "unit_tests" diff --git a/providers/tests/amazon/aws/operators/test_dms.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_dms.py similarity index 99% rename from providers/tests/amazon/aws/operators/test_dms.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_dms.py index f128ab75f2b204..7a4eee540126b2 100644 --- a/providers/tests/amazon/aws/operators/test_dms.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_dms.py @@ -47,8 +47,8 @@ from airflow.utils import timezone from airflow.utils.state import DagRunState from airflow.utils.types import DagRunType +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS TASK_ARN = "test_arn" diff --git a/providers/tests/amazon/aws/operators/test_ec2.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_ec2.py similarity index 99% rename from providers/tests/amazon/aws/operators/test_ec2.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_ec2.py index b4b576c6567f6c..849eecfdaae0e7 100644 --- a/providers/tests/amazon/aws/operators/test_ec2.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_ec2.py @@ -30,8 +30,7 @@ EC2StopInstanceOperator, EC2TerminateInstanceOperator, ) - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields class BaseEc2TestClass: diff --git a/providers/tests/amazon/aws/operators/test_ecs.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_ecs.py similarity index 99% rename from providers/tests/amazon/aws/operators/test_ecs.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_ecs.py index 824ba291f0b716..b4420fbe8c5bab 100644 --- a/providers/tests/amazon/aws/operators/test_ecs.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_ecs.py @@ -38,8 +38,7 @@ from airflow.providers.amazon.aws.triggers.ecs import TaskDoneTrigger from airflow.providers.amazon.aws.utils.task_log_fetcher import AwsTaskLogFetcher from airflow.utils.types import NOTSET - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields CLUSTER_NAME = "test_cluster" CONTAINER_NAME = "e1ed7aac-d9b2-4315-8726-d2432bf11868" diff --git a/providers/tests/amazon/aws/operators/test_eks.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_eks.py similarity index 98% rename from providers/tests/amazon/aws/operators/test_eks.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_eks.py index 1bdaa3f8434b1d..76295170069cfd 100644 --- a/providers/tests/amazon/aws/operators/test_eks.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_eks.py @@ -40,8 +40,7 @@ EksDeleteFargateProfileTrigger, ) from airflow.providers.cncf.kubernetes.utils.pod_manager import OnFinishAction - -from providers.tests.amazon.aws.utils.eks_test_constants import ( +from provider_tests.amazon.aws.utils.eks_test_constants import ( NODEROLE_ARN, POD_EXECUTION_ROLE_ARN, RESOURCES_VPC_CONFIG, @@ -50,9 +49,9 @@ SUBNET_IDS, TASK_ID, ) -from providers.tests.amazon.aws.utils.eks_test_utils import convert_keys -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields -from providers.tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type +from provider_tests.amazon.aws.utils.eks_test_utils import convert_keys +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type CLUSTER_NAME = "cluster1" NODEGROUP_NAME = "nodegroup1" diff --git a/providers/tests/amazon/aws/operators/test_emr_add_steps.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_emr_add_steps.py similarity index 99% rename from providers/tests/amazon/aws/operators/test_emr_add_steps.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_emr_add_steps.py index c30f0b6d4183b4..8393dbb0099042 100644 --- a/providers/tests/amazon/aws/operators/test_emr_add_steps.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_emr_add_steps.py @@ -32,8 +32,8 @@ from airflow.utils import timezone from airflow.utils.state import DagRunState from airflow.utils.types import DagRunType +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS DEFAULT_DATE = timezone.datetime(2017, 1, 1) diff --git a/providers/tests/amazon/aws/operators/test_emr_containers.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_emr_containers.py similarity index 98% rename from providers/tests/amazon/aws/operators/test_emr_containers.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_emr_containers.py index b31364bb02f50e..a3243559d20130 100644 --- a/providers/tests/amazon/aws/operators/test_emr_containers.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_emr_containers.py @@ -25,8 +25,7 @@ from airflow.providers.amazon.aws.hooks.emr import EmrContainerHook from airflow.providers.amazon.aws.operators.emr import EmrContainerOperator, EmrEksCreateClusterOperator from airflow.providers.amazon.aws.triggers.emr import EmrContainerTrigger - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields SUBMIT_JOB_SUCCESS_RETURN = { "ResponseMetadata": {"HTTPStatusCode": 200}, diff --git a/providers/tests/amazon/aws/operators/test_emr_create_job_flow.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_emr_create_job_flow.py similarity index 97% rename from providers/tests/amazon/aws/operators/test_emr_create_job_flow.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_emr_create_job_flow.py index 8131477e5e460b..3e34b8d025ea3c 100644 --- a/providers/tests/amazon/aws/operators/test_emr_create_job_flow.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_emr_create_job_flow.py @@ -34,9 +34,9 @@ from airflow.utils import timezone from airflow.utils.state import DagRunState from airflow.utils.types import DagRunType +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields -from providers.tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS TASK_ID = "test_task" diff --git a/providers/tests/amazon/aws/operators/test_emr_modify_cluster.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_emr_modify_cluster.py similarity index 96% rename from providers/tests/amazon/aws/operators/test_emr_modify_cluster.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_emr_modify_cluster.py index 4c7aae9b4c2728..b9929d172812f0 100644 --- a/providers/tests/amazon/aws/operators/test_emr_modify_cluster.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_emr_modify_cluster.py @@ -25,8 +25,7 @@ from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.emr import EmrModifyClusterOperator from airflow.utils import timezone - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields DEFAULT_DATE = timezone.datetime(2017, 1, 1) MODIFY_CLUSTER_SUCCESS_RETURN = {"ResponseMetadata": {"HTTPStatusCode": 200}, "StepConcurrencyLevel": 1} diff --git a/providers/tests/amazon/aws/operators/test_emr_notebook_execution.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_emr_notebook_execution.py similarity index 98% rename from providers/tests/amazon/aws/operators/test_emr_notebook_execution.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_emr_notebook_execution.py index 42ce47ea6b6992..ec92ffb6307d6f 100644 --- a/providers/tests/amazon/aws/operators/test_emr_notebook_execution.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_emr_notebook_execution.py @@ -28,9 +28,8 @@ EmrStartNotebookExecutionOperator, EmrStopNotebookExecutionOperator, ) - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields -from providers.tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type PARAMS = { "EditorId": "test_editor", diff --git a/providers/tests/amazon/aws/operators/test_emr_serverless.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_emr_serverless.py similarity index 99% rename from providers/tests/amazon/aws/operators/test_emr_serverless.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_emr_serverless.py index bde6e9895f5c9a..a06dc918b9d0f2 100644 --- a/providers/tests/amazon/aws/operators/test_emr_serverless.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_emr_serverless.py @@ -32,8 +32,7 @@ EmrServerlessStopApplicationOperator, ) from airflow.utils.types import NOTSET - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields if TYPE_CHECKING: from unittest.mock import MagicMock diff --git a/providers/tests/amazon/aws/operators/test_emr_terminate_job_flow.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_emr_terminate_job_flow.py similarity index 96% rename from providers/tests/amazon/aws/operators/test_emr_terminate_job_flow.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_emr_terminate_job_flow.py index 6ce3fb29cd6b23..689a12dab54541 100644 --- a/providers/tests/amazon/aws/operators/test_emr_terminate_job_flow.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_emr_terminate_job_flow.py @@ -24,8 +24,7 @@ from airflow.exceptions import TaskDeferred from airflow.providers.amazon.aws.operators.emr import EmrTerminateJobFlowOperator from airflow.providers.amazon.aws.triggers.emr import EmrTerminateJobFlowTrigger - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields TERMINATE_SUCCESS_RETURN = {"ResponseMetadata": {"HTTPStatusCode": 200}} diff --git a/providers/tests/amazon/aws/operators/test_eventbridge.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_eventbridge.py similarity index 98% rename from providers/tests/amazon/aws/operators/test_eventbridge.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_eventbridge.py index 129edf9ff6247e..84b68b3a298298 100644 --- a/providers/tests/amazon/aws/operators/test_eventbridge.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_eventbridge.py @@ -29,8 +29,7 @@ EventBridgePutEventsOperator, EventBridgePutRuleOperator, ) - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields if TYPE_CHECKING: from unittest.mock import MagicMock diff --git a/providers/tests/amazon/aws/operators/test_glacier.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_glacier.py similarity index 97% rename from providers/tests/amazon/aws/operators/test_glacier.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_glacier.py index e9802ba6ee2287..94bade9346204f 100644 --- a/providers/tests/amazon/aws/operators/test_glacier.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_glacier.py @@ -26,8 +26,7 @@ GlacierCreateJobOperator, GlacierUploadArchiveOperator, ) - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields if TYPE_CHECKING: from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator diff --git a/providers/tests/amazon/aws/operators/test_glue.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_glue.py similarity index 99% rename from providers/tests/amazon/aws/operators/test_glue.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_glue.py index 755c6a18ec1e68..661bd3f5b5519f 100644 --- a/providers/tests/amazon/aws/operators/test_glue.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_glue.py @@ -35,8 +35,7 @@ GlueDataQualityRuleSetEvaluationRunOperator, GlueJobOperator, ) - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields if TYPE_CHECKING: from airflow.models import TaskInstance diff --git a/providers/tests/amazon/aws/operators/test_glue_crawler.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_glue_crawler.py similarity index 98% rename from providers/tests/amazon/aws/operators/test_glue_crawler.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_glue_crawler.py index 0a8d0a34b874d8..0dbc3e552d3b0c 100644 --- a/providers/tests/amazon/aws/operators/test_glue_crawler.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_glue_crawler.py @@ -26,8 +26,7 @@ from airflow.providers.amazon.aws.hooks.glue_crawler import GlueCrawlerHook from airflow.providers.amazon.aws.hooks.sts import StsHook from airflow.providers.amazon.aws.operators.glue_crawler import GlueCrawlerOperator - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields if TYPE_CHECKING: from airflow.providers.amazon.aws.hooks.base_aws import BaseAwsConnection diff --git a/providers/tests/amazon/aws/operators/test_glue_databrew.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_glue_databrew.py similarity index 97% rename from providers/tests/amazon/aws/operators/test_glue_databrew.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_glue_databrew.py index 3f227791506099..cf86415acd5c6d 100644 --- a/providers/tests/amazon/aws/operators/test_glue_databrew.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_glue_databrew.py @@ -25,8 +25,7 @@ from airflow.providers.amazon.aws.hooks.glue_databrew import GlueDataBrewHook from airflow.providers.amazon.aws.operators.glue_databrew import GlueDataBrewStartJobOperator - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields JOB_NAME = "test_job" diff --git a/providers/tests/amazon/aws/operators/test_kinesis_analytics.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_kinesis_analytics.py similarity index 99% rename from providers/tests/amazon/aws/operators/test_kinesis_analytics.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_kinesis_analytics.py index 2cbdbe4c786d75..50025a2f338329 100644 --- a/providers/tests/amazon/aws/operators/test_kinesis_analytics.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_kinesis_analytics.py @@ -31,8 +31,7 @@ KinesisAnalyticsV2StartApplicationOperator, KinesisAnalyticsV2StopApplicationOperator, ) - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields if TYPE_CHECKING: from airflow.providers.amazon.aws.hooks.base_aws import BaseAwsConnection diff --git a/providers/tests/amazon/aws/operators/test_lambda_function.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_lambda_function.py similarity index 99% rename from providers/tests/amazon/aws/operators/test_lambda_function.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_lambda_function.py index 7ec081969103a3..f558905a208805 100644 --- a/providers/tests/amazon/aws/operators/test_lambda_function.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_lambda_function.py @@ -29,8 +29,7 @@ LambdaCreateFunctionOperator, LambdaInvokeFunctionOperator, ) - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields FUNCTION_NAME = "function_name" PAYLOADS = [ diff --git a/providers/tests/amazon/aws/operators/test_neptune.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_neptune.py similarity index 99% rename from providers/tests/amazon/aws/operators/test_neptune.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_neptune.py index 6508bf9101eb40..ba5f57a5f8d62c 100644 --- a/providers/tests/amazon/aws/operators/test_neptune.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_neptune.py @@ -30,8 +30,7 @@ NeptuneStartDbClusterOperator, NeptuneStopDbClusterOperator, ) - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields CLUSTER_ID = "test_cluster" diff --git a/providers/tests/amazon/aws/operators/test_quicksight.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_quicksight.py similarity index 97% rename from providers/tests/amazon/aws/operators/test_quicksight.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_quicksight.py index 063c5fc44a4640..304818221b0304 100644 --- a/providers/tests/amazon/aws/operators/test_quicksight.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_quicksight.py @@ -21,8 +21,7 @@ from airflow.providers.amazon.aws.hooks.quicksight import QuickSightHook from airflow.providers.amazon.aws.operators.quicksight import QuickSightCreateIngestionOperator - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields DATA_SET_ID = "DemoDataSet" INGESTION_ID = "DemoDataSet_Ingestion" diff --git a/providers/tests/amazon/aws/operators/test_rds.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_rds.py similarity index 99% rename from providers/tests/amazon/aws/operators/test_rds.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_rds.py index 3d0fbcb84096d5..dcad8d460f9740 100644 --- a/providers/tests/amazon/aws/operators/test_rds.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_rds.py @@ -44,8 +44,7 @@ ) from airflow.providers.amazon.aws.triggers.rds import RdsDbAvailableTrigger, RdsDbStoppedTrigger from airflow.utils import timezone - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields if TYPE_CHECKING: from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook diff --git a/providers/tests/amazon/aws/operators/test_redshift_cluster.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_redshift_cluster.py similarity index 99% rename from providers/tests/amazon/aws/operators/test_redshift_cluster.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_redshift_cluster.py index e67bff3390c08f..c5e1d0f721aa83 100644 --- a/providers/tests/amazon/aws/operators/test_redshift_cluster.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_redshift_cluster.py @@ -38,8 +38,7 @@ RedshiftPauseClusterTrigger, RedshiftResumeClusterTrigger, ) - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields class TestRedshiftCreateClusterOperator: diff --git a/providers/tests/amazon/aws/operators/test_redshift_data.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_redshift_data.py similarity index 99% rename from providers/tests/amazon/aws/operators/test_redshift_data.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_redshift_data.py index da9a486df43af8..e68956e49cc63a 100644 --- a/providers/tests/amazon/aws/operators/test_redshift_data.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_redshift_data.py @@ -25,8 +25,7 @@ from airflow.providers.amazon.aws.hooks.redshift_data import QueryExecutionOutput from airflow.providers.amazon.aws.operators.redshift_data import RedshiftDataOperator from airflow.providers.amazon.aws.triggers.redshift_data import RedshiftDataTrigger - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields CONN_ID = "aws_conn_test" TASK_ID = "task_id" diff --git a/providers/tests/amazon/aws/operators/test_redshift_sql.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_redshift_sql.py similarity index 100% rename from providers/tests/amazon/aws/operators/test_redshift_sql.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_redshift_sql.py diff --git a/providers/tests/amazon/aws/operators/test_s3.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_s3.py similarity index 99% rename from providers/tests/amazon/aws/operators/test_s3.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_s3.py index ab8508ec5f8012..242c15b2358655 100644 --- a/providers/tests/amazon/aws/operators/test_s3.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_s3.py @@ -58,8 +58,7 @@ from airflow.utils.state import DagRunState from airflow.utils.timezone import datetime, utcnow from airflow.utils.types import DagRunType - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields BUCKET_NAME = os.environ.get("BUCKET_NAME", "test-airflow-bucket") S3_KEY = "test-airflow-key" diff --git a/providers/tests/amazon/aws/operators/test_sagemaker_base.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_sagemaker_base.py similarity index 99% rename from providers/tests/amazon/aws/operators/test_sagemaker_base.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_sagemaker_base.py index 408bc3dc57968d..77335eed224614 100644 --- a/providers/tests/amazon/aws/operators/test_sagemaker_base.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_sagemaker_base.py @@ -33,8 +33,8 @@ from airflow.utils import timezone from airflow.utils.state import DagRunState from airflow.utils.types import DagRunType +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS CONFIG: dict = { diff --git a/providers/tests/amazon/aws/operators/test_sagemaker_endpoint.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_sagemaker_endpoint.py similarity index 98% rename from providers/tests/amazon/aws/operators/test_sagemaker_endpoint.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_sagemaker_endpoint.py index c24cf39e79c3a9..121d6a1a633995 100644 --- a/providers/tests/amazon/aws/operators/test_sagemaker_endpoint.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_sagemaker_endpoint.py @@ -27,8 +27,7 @@ from airflow.providers.amazon.aws.operators import sagemaker from airflow.providers.amazon.aws.operators.sagemaker import SageMakerEndpointOperator from airflow.providers.amazon.aws.triggers.sagemaker import SageMakerTrigger - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields CREATE_MODEL_PARAMS: dict = { "ModelName": "model_name", diff --git a/providers/tests/amazon/aws/operators/test_sagemaker_endpoint_config.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_sagemaker_endpoint_config.py similarity index 97% rename from providers/tests/amazon/aws/operators/test_sagemaker_endpoint_config.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_sagemaker_endpoint_config.py index c62721fcf3fe98..83033818a551bc 100644 --- a/providers/tests/amazon/aws/operators/test_sagemaker_endpoint_config.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_sagemaker_endpoint_config.py @@ -25,8 +25,7 @@ from airflow.providers.amazon.aws.hooks.sagemaker import SageMakerHook from airflow.providers.amazon.aws.operators import sagemaker from airflow.providers.amazon.aws.operators.sagemaker import SageMakerEndpointConfigOperator - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields CREATE_ENDPOINT_CONFIG_PARAMS: dict = { "EndpointConfigName": "config_name", diff --git a/providers/tests/amazon/aws/operators/test_sagemaker_model.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_sagemaker_model.py similarity index 98% rename from providers/tests/amazon/aws/operators/test_sagemaker_model.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_sagemaker_model.py index 095c6990ce51a1..38649881063650 100644 --- a/providers/tests/amazon/aws/operators/test_sagemaker_model.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_sagemaker_model.py @@ -31,8 +31,7 @@ SageMakerModelOperator, SageMakerRegisterModelVersionOperator, ) - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields CREATE_MODEL_PARAMS: dict = { "ModelName": "model_name", diff --git a/providers/tests/amazon/aws/operators/test_sagemaker_notebook.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_sagemaker_notebook.py similarity index 98% rename from providers/tests/amazon/aws/operators/test_sagemaker_notebook.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_sagemaker_notebook.py index f4cbb0335d14d2..ed62fa59163cd0 100644 --- a/providers/tests/amazon/aws/operators/test_sagemaker_notebook.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_sagemaker_notebook.py @@ -30,8 +30,7 @@ SageMakerStartNoteBookOperator, SageMakerStopNotebookOperator, ) - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields INSTANCE_NAME = "notebook" INSTANCE_TYPE = "ml.t3.medium" diff --git a/providers/tests/amazon/aws/operators/test_sagemaker_pipeline.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_sagemaker_pipeline.py similarity index 97% rename from providers/tests/amazon/aws/operators/test_sagemaker_pipeline.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_sagemaker_pipeline.py index 0a2d4d00ad85dc..1b17d198389c7c 100644 --- a/providers/tests/amazon/aws/operators/test_sagemaker_pipeline.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_sagemaker_pipeline.py @@ -29,8 +29,7 @@ SageMakerStopPipelineOperator, ) from airflow.providers.amazon.aws.triggers.sagemaker import SageMakerPipelineTrigger - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields if TYPE_CHECKING: from unittest.mock import MagicMock diff --git a/providers/tests/amazon/aws/operators/test_sagemaker_processing.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_sagemaker_processing.py similarity index 99% rename from providers/tests/amazon/aws/operators/test_sagemaker_processing.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_sagemaker_processing.py index 898aa68a65100c..25d282dfb47cee 100644 --- a/providers/tests/amazon/aws/operators/test_sagemaker_processing.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_sagemaker_processing.py @@ -31,8 +31,7 @@ from airflow.providers.amazon.aws.triggers.sagemaker import SageMakerTrigger from airflow.providers.common.compat.openlineage.facet import Dataset from airflow.providers.openlineage.extractors import OperatorLineage - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields CREATE_PROCESSING_PARAMS: dict = { "AppSpecification": { diff --git a/providers/tests/amazon/aws/operators/test_sagemaker_training.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_sagemaker_training.py similarity index 99% rename from providers/tests/amazon/aws/operators/test_sagemaker_training.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_sagemaker_training.py index f37a41967b9ccf..aaa2ebf15131e2 100644 --- a/providers/tests/amazon/aws/operators/test_sagemaker_training.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_sagemaker_training.py @@ -31,8 +31,7 @@ ) from airflow.providers.common.compat.openlineage.facet import Dataset from airflow.providers.openlineage.extractors import OperatorLineage - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields EXPECTED_INTEGER_FIELDS: list[list[str]] = [ ["ResourceConfig", "InstanceCount"], diff --git a/providers/tests/amazon/aws/operators/test_sagemaker_transform.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_sagemaker_transform.py similarity index 99% rename from providers/tests/amazon/aws/operators/test_sagemaker_transform.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_sagemaker_transform.py index 76f89cb710eda1..d61f4b67b14441 100644 --- a/providers/tests/amazon/aws/operators/test_sagemaker_transform.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_sagemaker_transform.py @@ -31,8 +31,7 @@ from airflow.providers.amazon.aws.triggers.sagemaker import SageMakerTrigger from airflow.providers.common.compat.openlineage.facet import Dataset from airflow.providers.openlineage.extractors import OperatorLineage - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields EXPECTED_INTEGER_FIELDS: list[list[str]] = [ ["Transform", "TransformResources", "InstanceCount"], diff --git a/providers/tests/amazon/aws/operators/test_sagemaker_tuning.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_sagemaker_tuning.py similarity index 98% rename from providers/tests/amazon/aws/operators/test_sagemaker_tuning.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_sagemaker_tuning.py index 9048b19b0ec714..c09181ed30b3c3 100644 --- a/providers/tests/amazon/aws/operators/test_sagemaker_tuning.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_sagemaker_tuning.py @@ -26,8 +26,7 @@ from airflow.providers.amazon.aws.operators import sagemaker from airflow.providers.amazon.aws.operators.sagemaker import SageMakerTuningOperator from airflow.providers.amazon.aws.triggers.sagemaker import SageMakerTrigger - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields EXPECTED_INTEGER_FIELDS: list[list[str]] = [ ["HyperParameterTuningJobConfig", "ResourceLimits", "MaxNumberOfTrainingJobs"], diff --git a/providers/tests/amazon/aws/operators/test_sns.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_sns.py similarity index 96% rename from providers/tests/amazon/aws/operators/test_sns.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_sns.py index 62dbeaeb062944..ab13c2d6524d51 100644 --- a/providers/tests/amazon/aws/operators/test_sns.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_sns.py @@ -22,8 +22,7 @@ import pytest from airflow.providers.amazon.aws.operators.sns import SnsPublishOperator - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields TASK_ID = "sns_publish_job" AWS_CONN_ID = "custom_aws_conn" diff --git a/providers/tests/amazon/aws/operators/test_sqs.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_sqs.py similarity index 98% rename from providers/tests/amazon/aws/operators/test_sqs.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_sqs.py index 6c39f5fef72ee7..93e0ed89b2b9ff 100644 --- a/providers/tests/amazon/aws/operators/test_sqs.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_sqs.py @@ -25,8 +25,7 @@ from airflow.providers.amazon.aws.hooks.sqs import SqsHook from airflow.providers.amazon.aws.operators.sqs import SqsPublishOperator - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields REGION_NAME = "eu-west-1" QUEUE_NAME = "test-queue" diff --git a/providers/tests/amazon/aws/operators/test_step_function.py b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_step_function.py similarity index 99% rename from providers/tests/amazon/aws/operators/test_step_function.py rename to providers/amazon/tests/provider_tests/amazon/aws/operators/test_step_function.py index 4450a6a40d0268..ec5a72269fb07e 100644 --- a/providers/tests/amazon/aws/operators/test_step_function.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/operators/test_step_function.py @@ -26,8 +26,7 @@ StepFunctionGetExecutionOutputOperator, StepFunctionStartExecutionOperator, ) - -from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from provider_tests.amazon.aws.utils.test_template_fields import validate_template_fields EXECUTION_ARN = ( "arn:aws:states:us-east-1:123456789012:execution:" diff --git a/providers/tests/amazon/aws/operators/__init__.py b/providers/amazon/tests/provider_tests/amazon/aws/secrets/__init__.py similarity index 100% rename from providers/tests/amazon/aws/operators/__init__.py rename to providers/amazon/tests/provider_tests/amazon/aws/secrets/__init__.py diff --git a/providers/tests/amazon/aws/secrets/test_secrets_manager.py b/providers/amazon/tests/provider_tests/amazon/aws/secrets/test_secrets_manager.py similarity index 100% rename from providers/tests/amazon/aws/secrets/test_secrets_manager.py rename to providers/amazon/tests/provider_tests/amazon/aws/secrets/test_secrets_manager.py diff --git a/providers/tests/amazon/aws/secrets/test_systems_manager.py b/providers/amazon/tests/provider_tests/amazon/aws/secrets/test_systems_manager.py similarity index 100% rename from providers/tests/amazon/aws/secrets/test_systems_manager.py rename to providers/amazon/tests/provider_tests/amazon/aws/secrets/test_systems_manager.py diff --git a/providers/tests/amazon/aws/secrets/__init__.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/__init__.py similarity index 100% rename from providers/tests/amazon/aws/secrets/__init__.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/__init__.py diff --git a/providers/tests/amazon/aws/sensors/test_athena.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_athena.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_athena.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_athena.py diff --git a/providers/tests/amazon/aws/sensors/test_base_aws.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_base_aws.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_base_aws.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_base_aws.py diff --git a/providers/tests/amazon/aws/sensors/test_batch.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_batch.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_batch.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_batch.py diff --git a/providers/tests/amazon/aws/sensors/test_bedrock.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_bedrock.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_bedrock.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_bedrock.py diff --git a/providers/tests/amazon/aws/sensors/test_cloud_formation.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_cloud_formation.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_cloud_formation.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_cloud_formation.py diff --git a/providers/tests/amazon/aws/sensors/test_comprehend.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_comprehend.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_comprehend.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_comprehend.py diff --git a/providers/tests/amazon/aws/sensors/test_dms.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_dms.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_dms.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_dms.py diff --git a/providers/tests/amazon/aws/sensors/test_dynamodb.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_dynamodb.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_dynamodb.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_dynamodb.py diff --git a/providers/tests/amazon/aws/sensors/test_ec2.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_ec2.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_ec2.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_ec2.py diff --git a/providers/tests/amazon/aws/sensors/test_ecs.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_ecs.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_ecs.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_ecs.py diff --git a/providers/tests/amazon/aws/sensors/test_eks.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_eks.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_eks.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_eks.py diff --git a/providers/tests/amazon/aws/sensors/test_emr_base.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_emr_base.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_emr_base.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_emr_base.py diff --git a/providers/tests/amazon/aws/sensors/test_emr_containers.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_emr_containers.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_emr_containers.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_emr_containers.py diff --git a/providers/tests/amazon/aws/sensors/test_emr_job_flow.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_emr_job_flow.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_emr_job_flow.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_emr_job_flow.py diff --git a/providers/tests/amazon/aws/sensors/test_emr_notebook_execution.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_emr_notebook_execution.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_emr_notebook_execution.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_emr_notebook_execution.py diff --git a/providers/tests/amazon/aws/sensors/test_emr_serverless_application.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_emr_serverless_application.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_emr_serverless_application.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_emr_serverless_application.py diff --git a/providers/tests/amazon/aws/sensors/test_emr_serverless_job.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_emr_serverless_job.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_emr_serverless_job.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_emr_serverless_job.py diff --git a/providers/tests/amazon/aws/sensors/test_emr_step.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_emr_step.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_emr_step.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_emr_step.py diff --git a/providers/tests/amazon/aws/sensors/test_glacier.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_glacier.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_glacier.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_glacier.py diff --git a/providers/tests/amazon/aws/sensors/test_glue.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_glue.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_glue.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_glue.py diff --git a/providers/tests/amazon/aws/sensors/test_glue_catalog_partition.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_glue_catalog_partition.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_glue_catalog_partition.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_glue_catalog_partition.py diff --git a/providers/tests/amazon/aws/sensors/test_glue_crawler.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_glue_crawler.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_glue_crawler.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_glue_crawler.py diff --git a/providers/tests/amazon/aws/sensors/test_glue_data_quality.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_glue_data_quality.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_glue_data_quality.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_glue_data_quality.py diff --git a/providers/tests/amazon/aws/sensors/test_kinesis_analytics.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_kinesis_analytics.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_kinesis_analytics.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_kinesis_analytics.py diff --git a/providers/tests/amazon/aws/sensors/test_lambda_function.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_lambda_function.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_lambda_function.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_lambda_function.py diff --git a/providers/tests/amazon/aws/sensors/test_opensearch_serverless.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_opensearch_serverless.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_opensearch_serverless.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_opensearch_serverless.py diff --git a/providers/tests/amazon/aws/sensors/test_quicksight.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_quicksight.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_quicksight.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_quicksight.py diff --git a/providers/tests/amazon/aws/sensors/test_rds.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_rds.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_rds.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_rds.py diff --git a/providers/tests/amazon/aws/sensors/test_redshift_cluster.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_redshift_cluster.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_redshift_cluster.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_redshift_cluster.py diff --git a/providers/tests/amazon/aws/sensors/test_s3.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_s3.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_s3.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_s3.py diff --git a/providers/tests/amazon/aws/sensors/test_sagemaker_automl.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_sagemaker_automl.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_sagemaker_automl.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_sagemaker_automl.py diff --git a/providers/tests/amazon/aws/sensors/test_sagemaker_base.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_sagemaker_base.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_sagemaker_base.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_sagemaker_base.py diff --git a/providers/tests/amazon/aws/sensors/test_sagemaker_endpoint.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_sagemaker_endpoint.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_sagemaker_endpoint.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_sagemaker_endpoint.py diff --git a/providers/tests/amazon/aws/sensors/test_sagemaker_pipeline.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_sagemaker_pipeline.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_sagemaker_pipeline.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_sagemaker_pipeline.py diff --git a/providers/tests/amazon/aws/sensors/test_sagemaker_processing.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_sagemaker_processing.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_sagemaker_processing.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_sagemaker_processing.py diff --git a/providers/tests/amazon/aws/sensors/test_sagemaker_training.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_sagemaker_training.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_sagemaker_training.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_sagemaker_training.py diff --git a/providers/tests/amazon/aws/sensors/test_sagemaker_transform.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_sagemaker_transform.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_sagemaker_transform.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_sagemaker_transform.py diff --git a/providers/tests/amazon/aws/sensors/test_sagemaker_tuning.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_sagemaker_tuning.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_sagemaker_tuning.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_sagemaker_tuning.py diff --git a/providers/tests/amazon/aws/sensors/test_sqs.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_sqs.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_sqs.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_sqs.py diff --git a/providers/tests/amazon/aws/sensors/test_step_function.py b/providers/amazon/tests/provider_tests/amazon/aws/sensors/test_step_function.py similarity index 100% rename from providers/tests/amazon/aws/sensors/test_step_function.py rename to providers/amazon/tests/provider_tests/amazon/aws/sensors/test_step_function.py diff --git a/providers/tests/amazon/aws/sensors/__init__.py b/providers/amazon/tests/provider_tests/amazon/aws/system/__init__.py similarity index 100% rename from providers/tests/amazon/aws/sensors/__init__.py rename to providers/amazon/tests/provider_tests/amazon/aws/system/__init__.py diff --git a/providers/tests/amazon/aws/system/__init__.py b/providers/amazon/tests/provider_tests/amazon/aws/system/utils/__init__.py similarity index 100% rename from providers/tests/amazon/aws/system/__init__.py rename to providers/amazon/tests/provider_tests/amazon/aws/system/utils/__init__.py diff --git a/providers/tests/amazon/aws/system/utils/test_helpers.py b/providers/amazon/tests/provider_tests/amazon/aws/system/utils/test_helpers.py similarity index 97% rename from providers/tests/amazon/aws/system/utils/test_helpers.py rename to providers/amazon/tests/provider_tests/amazon/aws/system/utils/test_helpers.py index 7f625d8433181b..1ee2cf0f3edfb6 100644 --- a/providers/tests/amazon/aws/system/utils/test_helpers.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/system/utils/test_helpers.py @@ -28,9 +28,8 @@ import pytest from moto import mock_aws - -from providers.tests.system.amazon.aws import utils -from providers.tests.system.amazon.aws.utils import ( +from providers.amazon.tests.system.amazon.aws import utils +from providers.amazon.tests.system.amazon.aws.utils import ( DEFAULT_ENV_ID_LEN, DEFAULT_ENV_ID_PREFIX, ENV_ID_ENVIRON_KEY, diff --git a/providers/tests/amazon/aws/system/utils/__init__.py b/providers/amazon/tests/provider_tests/amazon/aws/transfers/__init__.py similarity index 100% rename from providers/tests/amazon/aws/system/utils/__init__.py rename to providers/amazon/tests/provider_tests/amazon/aws/transfers/__init__.py diff --git a/providers/tests/amazon/aws/transfers/test_azure_blob_to_s3.py b/providers/amazon/tests/provider_tests/amazon/aws/transfers/test_azure_blob_to_s3.py similarity index 100% rename from providers/tests/amazon/aws/transfers/test_azure_blob_to_s3.py rename to providers/amazon/tests/provider_tests/amazon/aws/transfers/test_azure_blob_to_s3.py diff --git a/providers/tests/amazon/aws/transfers/test_base.py b/providers/amazon/tests/provider_tests/amazon/aws/transfers/test_base.py similarity index 100% rename from providers/tests/amazon/aws/transfers/test_base.py rename to providers/amazon/tests/provider_tests/amazon/aws/transfers/test_base.py diff --git a/providers/tests/amazon/aws/transfers/test_dynamodb_to_s3.py b/providers/amazon/tests/provider_tests/amazon/aws/transfers/test_dynamodb_to_s3.py similarity index 100% rename from providers/tests/amazon/aws/transfers/test_dynamodb_to_s3.py rename to providers/amazon/tests/provider_tests/amazon/aws/transfers/test_dynamodb_to_s3.py diff --git a/providers/tests/amazon/aws/transfers/test_exasol_to_s3.py b/providers/amazon/tests/provider_tests/amazon/aws/transfers/test_exasol_to_s3.py similarity index 100% rename from providers/tests/amazon/aws/transfers/test_exasol_to_s3.py rename to providers/amazon/tests/provider_tests/amazon/aws/transfers/test_exasol_to_s3.py diff --git a/providers/tests/amazon/aws/transfers/test_ftp_to_s3.py b/providers/amazon/tests/provider_tests/amazon/aws/transfers/test_ftp_to_s3.py similarity index 100% rename from providers/tests/amazon/aws/transfers/test_ftp_to_s3.py rename to providers/amazon/tests/provider_tests/amazon/aws/transfers/test_ftp_to_s3.py diff --git a/providers/tests/amazon/aws/transfers/test_gcs_to_s3.py b/providers/amazon/tests/provider_tests/amazon/aws/transfers/test_gcs_to_s3.py similarity index 100% rename from providers/tests/amazon/aws/transfers/test_gcs_to_s3.py rename to providers/amazon/tests/provider_tests/amazon/aws/transfers/test_gcs_to_s3.py diff --git a/providers/tests/amazon/aws/transfers/test_glacier_to_gcs.py b/providers/amazon/tests/provider_tests/amazon/aws/transfers/test_glacier_to_gcs.py similarity index 100% rename from providers/tests/amazon/aws/transfers/test_glacier_to_gcs.py rename to providers/amazon/tests/provider_tests/amazon/aws/transfers/test_glacier_to_gcs.py diff --git a/providers/tests/amazon/aws/transfers/test_google_api_to_s3.py b/providers/amazon/tests/provider_tests/amazon/aws/transfers/test_google_api_to_s3.py similarity index 100% rename from providers/tests/amazon/aws/transfers/test_google_api_to_s3.py rename to providers/amazon/tests/provider_tests/amazon/aws/transfers/test_google_api_to_s3.py diff --git a/providers/tests/amazon/aws/transfers/test_hive_to_dynamodb.py b/providers/amazon/tests/provider_tests/amazon/aws/transfers/test_hive_to_dynamodb.py similarity index 100% rename from providers/tests/amazon/aws/transfers/test_hive_to_dynamodb.py rename to providers/amazon/tests/provider_tests/amazon/aws/transfers/test_hive_to_dynamodb.py diff --git a/providers/tests/amazon/aws/transfers/test_http_to_s3.py b/providers/amazon/tests/provider_tests/amazon/aws/transfers/test_http_to_s3.py similarity index 100% rename from providers/tests/amazon/aws/transfers/test_http_to_s3.py rename to providers/amazon/tests/provider_tests/amazon/aws/transfers/test_http_to_s3.py diff --git a/providers/tests/amazon/aws/transfers/test_imap_attachment_to_s3.py b/providers/amazon/tests/provider_tests/amazon/aws/transfers/test_imap_attachment_to_s3.py similarity index 100% rename from providers/tests/amazon/aws/transfers/test_imap_attachment_to_s3.py rename to providers/amazon/tests/provider_tests/amazon/aws/transfers/test_imap_attachment_to_s3.py diff --git a/providers/tests/amazon/aws/transfers/test_local_to_s3.py b/providers/amazon/tests/provider_tests/amazon/aws/transfers/test_local_to_s3.py similarity index 100% rename from providers/tests/amazon/aws/transfers/test_local_to_s3.py rename to providers/amazon/tests/provider_tests/amazon/aws/transfers/test_local_to_s3.py diff --git a/providers/tests/amazon/aws/transfers/test_mongo_to_s3.py b/providers/amazon/tests/provider_tests/amazon/aws/transfers/test_mongo_to_s3.py similarity index 100% rename from providers/tests/amazon/aws/transfers/test_mongo_to_s3.py rename to providers/amazon/tests/provider_tests/amazon/aws/transfers/test_mongo_to_s3.py diff --git a/providers/tests/amazon/aws/transfers/test_redshift_to_s3.py b/providers/amazon/tests/provider_tests/amazon/aws/transfers/test_redshift_to_s3.py similarity index 100% rename from providers/tests/amazon/aws/transfers/test_redshift_to_s3.py rename to providers/amazon/tests/provider_tests/amazon/aws/transfers/test_redshift_to_s3.py diff --git a/providers/tests/amazon/aws/transfers/test_s3_to_dynamodb.py b/providers/amazon/tests/provider_tests/amazon/aws/transfers/test_s3_to_dynamodb.py similarity index 100% rename from providers/tests/amazon/aws/transfers/test_s3_to_dynamodb.py rename to providers/amazon/tests/provider_tests/amazon/aws/transfers/test_s3_to_dynamodb.py diff --git a/providers/tests/amazon/aws/transfers/test_s3_to_ftp.py b/providers/amazon/tests/provider_tests/amazon/aws/transfers/test_s3_to_ftp.py similarity index 100% rename from providers/tests/amazon/aws/transfers/test_s3_to_ftp.py rename to providers/amazon/tests/provider_tests/amazon/aws/transfers/test_s3_to_ftp.py diff --git a/providers/tests/amazon/aws/transfers/test_s3_to_redshift.py b/providers/amazon/tests/provider_tests/amazon/aws/transfers/test_s3_to_redshift.py similarity index 100% rename from providers/tests/amazon/aws/transfers/test_s3_to_redshift.py rename to providers/amazon/tests/provider_tests/amazon/aws/transfers/test_s3_to_redshift.py diff --git a/providers/tests/amazon/aws/transfers/test_s3_to_sftp.py b/providers/amazon/tests/provider_tests/amazon/aws/transfers/test_s3_to_sftp.py similarity index 100% rename from providers/tests/amazon/aws/transfers/test_s3_to_sftp.py rename to providers/amazon/tests/provider_tests/amazon/aws/transfers/test_s3_to_sftp.py diff --git a/providers/tests/amazon/aws/transfers/test_s3_to_sql.py b/providers/amazon/tests/provider_tests/amazon/aws/transfers/test_s3_to_sql.py similarity index 100% rename from providers/tests/amazon/aws/transfers/test_s3_to_sql.py rename to providers/amazon/tests/provider_tests/amazon/aws/transfers/test_s3_to_sql.py diff --git a/providers/tests/amazon/aws/transfers/test_salesforce_to_s3.py b/providers/amazon/tests/provider_tests/amazon/aws/transfers/test_salesforce_to_s3.py similarity index 100% rename from providers/tests/amazon/aws/transfers/test_salesforce_to_s3.py rename to providers/amazon/tests/provider_tests/amazon/aws/transfers/test_salesforce_to_s3.py diff --git a/providers/tests/amazon/aws/transfers/test_sftp_to_s3.py b/providers/amazon/tests/provider_tests/amazon/aws/transfers/test_sftp_to_s3.py similarity index 100% rename from providers/tests/amazon/aws/transfers/test_sftp_to_s3.py rename to providers/amazon/tests/provider_tests/amazon/aws/transfers/test_sftp_to_s3.py diff --git a/providers/tests/amazon/aws/transfers/test_sql_to_s3.py b/providers/amazon/tests/provider_tests/amazon/aws/transfers/test_sql_to_s3.py similarity index 100% rename from providers/tests/amazon/aws/transfers/test_sql_to_s3.py rename to providers/amazon/tests/provider_tests/amazon/aws/transfers/test_sql_to_s3.py diff --git a/providers/tests/amazon/aws/transfers/__init__.py b/providers/amazon/tests/provider_tests/amazon/aws/triggers/__init__.py similarity index 100% rename from providers/tests/amazon/aws/transfers/__init__.py rename to providers/amazon/tests/provider_tests/amazon/aws/triggers/__init__.py diff --git a/providers/tests/amazon/aws/triggers/test_athena.py b/providers/amazon/tests/provider_tests/amazon/aws/triggers/test_athena.py similarity index 100% rename from providers/tests/amazon/aws/triggers/test_athena.py rename to providers/amazon/tests/provider_tests/amazon/aws/triggers/test_athena.py diff --git a/providers/tests/amazon/aws/triggers/test_base.py b/providers/amazon/tests/provider_tests/amazon/aws/triggers/test_base.py similarity index 100% rename from providers/tests/amazon/aws/triggers/test_base.py rename to providers/amazon/tests/provider_tests/amazon/aws/triggers/test_base.py diff --git a/providers/tests/amazon/aws/triggers/test_batch.py b/providers/amazon/tests/provider_tests/amazon/aws/triggers/test_batch.py similarity index 100% rename from providers/tests/amazon/aws/triggers/test_batch.py rename to providers/amazon/tests/provider_tests/amazon/aws/triggers/test_batch.py diff --git a/providers/tests/amazon/aws/triggers/test_bedrock.py b/providers/amazon/tests/provider_tests/amazon/aws/triggers/test_bedrock.py similarity index 98% rename from providers/tests/amazon/aws/triggers/test_bedrock.py rename to providers/amazon/tests/provider_tests/amazon/aws/triggers/test_bedrock.py index a619d39b1307cf..faed8770776379 100644 --- a/providers/tests/amazon/aws/triggers/test_bedrock.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/triggers/test_bedrock.py @@ -29,8 +29,7 @@ BedrockProvisionModelThroughputCompletedTrigger, ) from airflow.triggers.base import TriggerEvent - -from providers.tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type +from provider_tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type BASE_TRIGGER_CLASSPATH = "airflow.providers.amazon.aws.triggers.bedrock." diff --git a/providers/tests/amazon/aws/triggers/test_comprehend.py b/providers/amazon/tests/provider_tests/amazon/aws/triggers/test_comprehend.py similarity index 98% rename from providers/tests/amazon/aws/triggers/test_comprehend.py rename to providers/amazon/tests/provider_tests/amazon/aws/triggers/test_comprehend.py index c7a53e2d70fcec..e5d5ff8f74f705 100644 --- a/providers/tests/amazon/aws/triggers/test_comprehend.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/triggers/test_comprehend.py @@ -27,8 +27,7 @@ ComprehendPiiEntitiesDetectionJobCompletedTrigger, ) from airflow.triggers.base import TriggerEvent - -from providers.tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type +from provider_tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type BASE_TRIGGER_CLASSPATH = "airflow.providers.amazon.aws.triggers.comprehend." diff --git a/providers/tests/amazon/aws/triggers/test_dms.py b/providers/amazon/tests/provider_tests/amazon/aws/triggers/test_dms.py similarity index 98% rename from providers/tests/amazon/aws/triggers/test_dms.py rename to providers/amazon/tests/provider_tests/amazon/aws/triggers/test_dms.py index f1dcd99e18fc74..77001c48e23e01 100644 --- a/providers/tests/amazon/aws/triggers/test_dms.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/triggers/test_dms.py @@ -30,8 +30,7 @@ DmsReplicationTerminalStatusTrigger, ) from airflow.triggers.base import TriggerEvent - -from providers.tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type +from provider_tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type BASE_TRIGGER_CLASSPATH = "airflow.providers.amazon.aws.triggers.dms." diff --git a/providers/tests/amazon/aws/triggers/test_ec2.py b/providers/amazon/tests/provider_tests/amazon/aws/triggers/test_ec2.py similarity index 100% rename from providers/tests/amazon/aws/triggers/test_ec2.py rename to providers/amazon/tests/provider_tests/amazon/aws/triggers/test_ec2.py diff --git a/providers/tests/amazon/aws/triggers/test_ecs.py b/providers/amazon/tests/provider_tests/amazon/aws/triggers/test_ecs.py similarity index 100% rename from providers/tests/amazon/aws/triggers/test_ecs.py rename to providers/amazon/tests/provider_tests/amazon/aws/triggers/test_ecs.py diff --git a/providers/tests/amazon/aws/triggers/test_eks.py b/providers/amazon/tests/provider_tests/amazon/aws/triggers/test_eks.py similarity index 100% rename from providers/tests/amazon/aws/triggers/test_eks.py rename to providers/amazon/tests/provider_tests/amazon/aws/triggers/test_eks.py diff --git a/providers/tests/amazon/aws/triggers/test_emr.py b/providers/amazon/tests/provider_tests/amazon/aws/triggers/test_emr.py similarity index 100% rename from providers/tests/amazon/aws/triggers/test_emr.py rename to providers/amazon/tests/provider_tests/amazon/aws/triggers/test_emr.py diff --git a/providers/tests/amazon/aws/triggers/test_glue.py b/providers/amazon/tests/provider_tests/amazon/aws/triggers/test_glue.py similarity index 98% rename from providers/tests/amazon/aws/triggers/test_glue.py rename to providers/amazon/tests/provider_tests/amazon/aws/triggers/test_glue.py index 2f4830bb8689c4..96aac077b1b5ea 100644 --- a/providers/tests/amazon/aws/triggers/test_glue.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/triggers/test_glue.py @@ -32,8 +32,7 @@ GlueJobCompleteTrigger, ) from airflow.triggers.base import TriggerEvent - -from providers.tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type +from provider_tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type BASE_TRIGGER_CLASSPATH = "airflow.providers.amazon.aws.triggers.glue." diff --git a/providers/tests/amazon/aws/triggers/test_glue_crawler.py b/providers/amazon/tests/provider_tests/amazon/aws/triggers/test_glue_crawler.py similarity index 96% rename from providers/tests/amazon/aws/triggers/test_glue_crawler.py rename to providers/amazon/tests/provider_tests/amazon/aws/triggers/test_glue_crawler.py index fadc14fa0f2719..584e1671c4c906 100644 --- a/providers/tests/amazon/aws/triggers/test_glue_crawler.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/triggers/test_glue_crawler.py @@ -24,8 +24,7 @@ from airflow.providers.amazon.aws.hooks.glue_crawler import GlueCrawlerHook from airflow.providers.amazon.aws.triggers.glue_crawler import GlueCrawlerCompleteTrigger from airflow.triggers.base import TriggerEvent - -from providers.tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type +from provider_tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type class TestGlueCrawlerCompleteTrigger: diff --git a/providers/tests/amazon/aws/triggers/test_glue_databrew.py b/providers/amazon/tests/provider_tests/amazon/aws/triggers/test_glue_databrew.py similarity index 100% rename from providers/tests/amazon/aws/triggers/test_glue_databrew.py rename to providers/amazon/tests/provider_tests/amazon/aws/triggers/test_glue_databrew.py diff --git a/providers/tests/amazon/aws/triggers/test_kinesis_analytics.py b/providers/amazon/tests/provider_tests/amazon/aws/triggers/test_kinesis_analytics.py similarity index 97% rename from providers/tests/amazon/aws/triggers/test_kinesis_analytics.py rename to providers/amazon/tests/provider_tests/amazon/aws/triggers/test_kinesis_analytics.py index 6a0f3215450605..08da972daf00c7 100644 --- a/providers/tests/amazon/aws/triggers/test_kinesis_analytics.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/triggers/test_kinesis_analytics.py @@ -26,8 +26,7 @@ KinesisAnalyticsV2ApplicationOperationCompleteTrigger, ) from airflow.triggers.base import TriggerEvent - -from providers.tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type +from provider_tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type BASE_TRIGGER_CLASSPATH = "airflow.providers.amazon.aws.triggers.kinesis_analytics." diff --git a/providers/tests/amazon/aws/triggers/test_lambda_function.py b/providers/amazon/tests/provider_tests/amazon/aws/triggers/test_lambda_function.py similarity index 100% rename from providers/tests/amazon/aws/triggers/test_lambda_function.py rename to providers/amazon/tests/provider_tests/amazon/aws/triggers/test_lambda_function.py diff --git a/providers/tests/amazon/aws/triggers/test_neptune.py b/providers/amazon/tests/provider_tests/amazon/aws/triggers/test_neptune.py similarity index 100% rename from providers/tests/amazon/aws/triggers/test_neptune.py rename to providers/amazon/tests/provider_tests/amazon/aws/triggers/test_neptune.py diff --git a/providers/tests/amazon/aws/triggers/test_opensearch_serverless.py b/providers/amazon/tests/provider_tests/amazon/aws/triggers/test_opensearch_serverless.py similarity index 97% rename from providers/tests/amazon/aws/triggers/test_opensearch_serverless.py rename to providers/amazon/tests/provider_tests/amazon/aws/triggers/test_opensearch_serverless.py index 429b406072c589..044fc0876c297d 100644 --- a/providers/tests/amazon/aws/triggers/test_opensearch_serverless.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/triggers/test_opensearch_serverless.py @@ -27,8 +27,7 @@ ) from airflow.triggers.base import TriggerEvent from airflow.utils.helpers import prune_dict - -from providers.tests.amazon.aws.triggers.test_base import TestAwsBaseWaiterTrigger +from provider_tests.amazon.aws.triggers.test_base import TestAwsBaseWaiterTrigger BASE_TRIGGER_CLASSPATH = "airflow.providers.amazon.aws.triggers.opensearch_serverless." diff --git a/providers/tests/amazon/aws/triggers/test_rds.py b/providers/amazon/tests/provider_tests/amazon/aws/triggers/test_rds.py similarity index 100% rename from providers/tests/amazon/aws/triggers/test_rds.py rename to providers/amazon/tests/provider_tests/amazon/aws/triggers/test_rds.py diff --git a/providers/tests/amazon/aws/triggers/test_redshift_cluster.py b/providers/amazon/tests/provider_tests/amazon/aws/triggers/test_redshift_cluster.py similarity index 100% rename from providers/tests/amazon/aws/triggers/test_redshift_cluster.py rename to providers/amazon/tests/provider_tests/amazon/aws/triggers/test_redshift_cluster.py diff --git a/providers/tests/amazon/aws/triggers/test_redshift_data.py b/providers/amazon/tests/provider_tests/amazon/aws/triggers/test_redshift_data.py similarity index 100% rename from providers/tests/amazon/aws/triggers/test_redshift_data.py rename to providers/amazon/tests/provider_tests/amazon/aws/triggers/test_redshift_data.py diff --git a/providers/tests/amazon/aws/triggers/test_s3.py b/providers/amazon/tests/provider_tests/amazon/aws/triggers/test_s3.py similarity index 100% rename from providers/tests/amazon/aws/triggers/test_s3.py rename to providers/amazon/tests/provider_tests/amazon/aws/triggers/test_s3.py diff --git a/providers/tests/amazon/aws/triggers/test_sagemaker.py b/providers/amazon/tests/provider_tests/amazon/aws/triggers/test_sagemaker.py similarity index 100% rename from providers/tests/amazon/aws/triggers/test_sagemaker.py rename to providers/amazon/tests/provider_tests/amazon/aws/triggers/test_sagemaker.py diff --git a/providers/tests/amazon/aws/triggers/test_serialization.py b/providers/amazon/tests/provider_tests/amazon/aws/triggers/test_serialization.py similarity index 100% rename from providers/tests/amazon/aws/triggers/test_serialization.py rename to providers/amazon/tests/provider_tests/amazon/aws/triggers/test_serialization.py diff --git a/providers/tests/amazon/aws/triggers/test_sqs.py b/providers/amazon/tests/provider_tests/amazon/aws/triggers/test_sqs.py similarity index 100% rename from providers/tests/amazon/aws/triggers/test_sqs.py rename to providers/amazon/tests/provider_tests/amazon/aws/triggers/test_sqs.py diff --git a/providers/tests/amazon/aws/triggers/__init__.py b/providers/amazon/tests/provider_tests/amazon/aws/utils/__init__.py similarity index 100% rename from providers/tests/amazon/aws/triggers/__init__.py rename to providers/amazon/tests/provider_tests/amazon/aws/utils/__init__.py diff --git a/providers/tests/amazon/aws/utils/eks_test_constants.py b/providers/amazon/tests/provider_tests/amazon/aws/utils/eks_test_constants.py similarity index 100% rename from providers/tests/amazon/aws/utils/eks_test_constants.py rename to providers/amazon/tests/provider_tests/amazon/aws/utils/eks_test_constants.py diff --git a/providers/tests/amazon/aws/utils/eks_test_utils.py b/providers/amazon/tests/provider_tests/amazon/aws/utils/eks_test_utils.py similarity index 99% rename from providers/tests/amazon/aws/utils/eks_test_utils.py rename to providers/amazon/tests/provider_tests/amazon/aws/utils/eks_test_utils.py index a9938ec0dad7ad..8e32d2bc34b5fd 100644 --- a/providers/tests/amazon/aws/utils/eks_test_utils.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/utils/eks_test_utils.py @@ -22,7 +22,7 @@ from re import Pattern from typing import TYPE_CHECKING, Union -from providers.tests.amazon.aws.utils.eks_test_constants import ( +from provider_tests.amazon.aws.utils.eks_test_constants import ( STATUS, ClusterAttributes, ClusterInputs, diff --git a/providers/tests/amazon/aws/utils/test_connection_wrapper.py b/providers/amazon/tests/provider_tests/amazon/aws/utils/test_connection_wrapper.py similarity index 100% rename from providers/tests/amazon/aws/utils/test_connection_wrapper.py rename to providers/amazon/tests/provider_tests/amazon/aws/utils/test_connection_wrapper.py diff --git a/providers/tests/amazon/aws/utils/test_eks_get_token.py b/providers/amazon/tests/provider_tests/amazon/aws/utils/test_eks_get_token.py similarity index 83% rename from providers/tests/amazon/aws/utils/test_eks_get_token.py rename to providers/amazon/tests/provider_tests/amazon/aws/utils/test_eks_get_token.py index 1d6300e45851ee..4efb7cd34947e9 100644 --- a/providers/tests/amazon/aws/utils/test_eks_get_token.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/utils/test_eks_get_token.py @@ -25,6 +25,8 @@ import pytest import time_machine +from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS + class TestGetEksToken: @mock.patch("airflow.providers.amazon.aws.hooks.eks.EksHook") @@ -47,7 +49,9 @@ class TestGetEksToken: ], [ [ - "airflow.providers.amazon.aws.utils.eks_get_token", + "airflow.providers.amazon.src.airflow.providers.amazon.aws.utils.eks_get_token" + if AIRFLOW_V_3_0_PLUS + else "airflow.providers.amazon.aws.utils.eks_get_token", "--region-name", "test-region", "--cluster-name", @@ -71,7 +75,13 @@ def test_run(self, mock_eks_hook, args, expected_aws_conn_id, expected_region_na with mock.patch("sys.argv", args), contextlib.redirect_stdout(StringIO()) as temp_stdout: os.chdir(providers_src_folder) # We are not using run_module because of https://github.com/pytest-dev/pytest/issues/9007 - runpy.run_path("airflow/providers/amazon/aws/utils/eks_get_token.py", run_name="__main__") + if AIRFLOW_V_3_0_PLUS: + runpy.run_path( + "providers/amazon/src/airflow/providers/amazon/aws/utils/eks_get_token.py", + run_name="__main__", + ) + else: + runpy.run_path("airflow/providers/amazon/aws/utils/eks_get_token.py", run_name="__main__") output = temp_stdout.getvalue() token = "token: k8s-aws-v1.aHR0cDovL2V4YW1wbGUuY29t" expected_token = output.split(",")[1].strip() diff --git a/providers/tests/amazon/aws/utils/test_emailer.py b/providers/amazon/tests/provider_tests/amazon/aws/utils/test_emailer.py similarity index 100% rename from providers/tests/amazon/aws/utils/test_emailer.py rename to providers/amazon/tests/provider_tests/amazon/aws/utils/test_emailer.py diff --git a/providers/tests/amazon/aws/utils/test_identifiers.py b/providers/amazon/tests/provider_tests/amazon/aws/utils/test_identifiers.py similarity index 100% rename from providers/tests/amazon/aws/utils/test_identifiers.py rename to providers/amazon/tests/provider_tests/amazon/aws/utils/test_identifiers.py diff --git a/providers/tests/amazon/aws/utils/test_mixins.py b/providers/amazon/tests/provider_tests/amazon/aws/utils/test_mixins.py similarity index 100% rename from providers/tests/amazon/aws/utils/test_mixins.py rename to providers/amazon/tests/provider_tests/amazon/aws/utils/test_mixins.py diff --git a/providers/tests/amazon/aws/utils/test_openlineage.py b/providers/amazon/tests/provider_tests/amazon/aws/utils/test_openlineage.py similarity index 100% rename from providers/tests/amazon/aws/utils/test_openlineage.py rename to providers/amazon/tests/provider_tests/amazon/aws/utils/test_openlineage.py diff --git a/providers/tests/amazon/aws/utils/test_redshift.py b/providers/amazon/tests/provider_tests/amazon/aws/utils/test_redshift.py similarity index 100% rename from providers/tests/amazon/aws/utils/test_redshift.py rename to providers/amazon/tests/provider_tests/amazon/aws/utils/test_redshift.py diff --git a/providers/tests/amazon/aws/utils/test_sqs.py b/providers/amazon/tests/provider_tests/amazon/aws/utils/test_sqs.py similarity index 100% rename from providers/tests/amazon/aws/utils/test_sqs.py rename to providers/amazon/tests/provider_tests/amazon/aws/utils/test_sqs.py diff --git a/providers/tests/amazon/aws/utils/test_suppress.py b/providers/amazon/tests/provider_tests/amazon/aws/utils/test_suppress.py similarity index 100% rename from providers/tests/amazon/aws/utils/test_suppress.py rename to providers/amazon/tests/provider_tests/amazon/aws/utils/test_suppress.py diff --git a/providers/tests/amazon/aws/utils/test_tags.py b/providers/amazon/tests/provider_tests/amazon/aws/utils/test_tags.py similarity index 100% rename from providers/tests/amazon/aws/utils/test_tags.py rename to providers/amazon/tests/provider_tests/amazon/aws/utils/test_tags.py diff --git a/providers/tests/amazon/aws/utils/test_task_log_fetcher.py b/providers/amazon/tests/provider_tests/amazon/aws/utils/test_task_log_fetcher.py similarity index 100% rename from providers/tests/amazon/aws/utils/test_task_log_fetcher.py rename to providers/amazon/tests/provider_tests/amazon/aws/utils/test_task_log_fetcher.py diff --git a/providers/tests/amazon/aws/utils/test_template_fields.py b/providers/amazon/tests/provider_tests/amazon/aws/utils/test_template_fields.py similarity index 100% rename from providers/tests/amazon/aws/utils/test_template_fields.py rename to providers/amazon/tests/provider_tests/amazon/aws/utils/test_template_fields.py diff --git a/providers/tests/amazon/aws/utils/test_utils.py b/providers/amazon/tests/provider_tests/amazon/aws/utils/test_utils.py similarity index 100% rename from providers/tests/amazon/aws/utils/test_utils.py rename to providers/amazon/tests/provider_tests/amazon/aws/utils/test_utils.py diff --git a/providers/tests/amazon/aws/utils/test_waiter.py b/providers/amazon/tests/provider_tests/amazon/aws/utils/test_waiter.py similarity index 100% rename from providers/tests/amazon/aws/utils/test_waiter.py rename to providers/amazon/tests/provider_tests/amazon/aws/utils/test_waiter.py diff --git a/providers/tests/amazon/aws/utils/test_waiter_with_logging.py b/providers/amazon/tests/provider_tests/amazon/aws/utils/test_waiter_with_logging.py similarity index 81% rename from providers/tests/amazon/aws/utils/test_waiter_with_logging.py rename to providers/amazon/tests/provider_tests/amazon/aws/utils/test_waiter_with_logging.py index 4e621e75a5698a..aa52920535975e 100644 --- a/providers/tests/amazon/aws/utils/test_waiter_with_logging.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/utils/test_waiter_with_logging.py @@ -17,7 +17,6 @@ # under the License. from __future__ import annotations -import logging from typing import Any from unittest import mock from unittest.mock import AsyncMock @@ -39,7 +38,7 @@ def generate_response(state: str) -> dict[str, Any]: class TestWaiter: @mock.patch("time.sleep") - def test_wait(self, mock_sleep, caplog): + def test_wait(self, mock_sleep): mock_sleep.return_value = True mock_waiter = mock.MagicMock() error = WaiterError( @@ -66,20 +65,9 @@ def test_wait(self, mock_sleep, caplog): ) assert mock_waiter.wait.call_count == 3 mock_sleep.assert_called_with(123) - assert ( - caplog.record_tuples - == [ - ( - "airflow.providers.amazon.aws.utils.waiter_with_logging", - logging.INFO, - "test status message: Pending", - ) - ] - * 2 - ) @pytest.mark.asyncio - async def test_async_wait(self, caplog): + async def test_async_wait(self): mock_waiter = mock.MagicMock() error = WaiterError( name="test_waiter", @@ -106,7 +94,6 @@ async def test_async_wait(self, caplog): }, ) assert mock_waiter.wait.call_count == 3 - assert caplog.messages == ["test status message: Pending", "test status message: Pending"] @pytest.mark.asyncio async def test_async_wait_with_unknown_failure(self): @@ -144,7 +131,7 @@ async def test_async_wait_with_unknown_failure(self): assert mock_waiter.wait.call_count == 1 @mock.patch("time.sleep") - def test_wait_max_attempts_exceeded(self, mock_sleep, caplog): + def test_wait_max_attempts_exceeded(self, mock_sleep): mock_sleep.return_value = True mock_waiter = mock.MagicMock() error = WaiterError( @@ -173,20 +160,9 @@ def test_wait_max_attempts_exceeded(self, mock_sleep, caplog): assert mock_waiter.wait.call_count == 2 mock_sleep.assert_called_with(123) - assert ( - caplog.record_tuples - == [ - ( - "airflow.providers.amazon.aws.utils.waiter_with_logging", - logging.INFO, - "test status message: Pending", - ) - ] - * 2 - ) @mock.patch("time.sleep") - def test_wait_with_failure(self, mock_sleep, caplog): + def test_wait_with_failure(self, mock_sleep): mock_sleep.return_value = True mock_waiter = mock.MagicMock() error = WaiterError( @@ -220,7 +196,6 @@ def test_wait_with_failure(self, mock_sleep, caplog): }, ) assert mock_waiter.wait.call_count == 4 - assert caplog.messages == ["test status message: Pending"] * 3 + ["test failure message: Failure"] @mock.patch("time.sleep") def test_wait_with_unknown_failure(self, mock_sleep): @@ -259,7 +234,7 @@ def test_wait_with_unknown_failure(self, mock_sleep): assert mock_waiter.wait.call_count == 1 @mock.patch("time.sleep") - def test_wait_with_list_response(self, mock_sleep, caplog): + def test_wait_with_list_response(self, mock_sleep): mock_sleep.return_value = True mock_waiter = mock.MagicMock() error = WaiterError( @@ -295,20 +270,9 @@ def test_wait_with_list_response(self, mock_sleep, caplog): ) mock_waiter.wait.call_count == 3 mock_sleep.assert_called_with(123) - assert ( - caplog.record_tuples - == [ - ( - "airflow.providers.amazon.aws.utils.waiter_with_logging", - logging.INFO, - "test status message: Pending", - ) - ] - * 2 - ) @mock.patch("time.sleep") - def test_wait_with_incorrect_args(self, mock_sleep, caplog): + def test_wait_with_incorrect_args(self, mock_sleep): mock_sleep.return_value = True mock_waiter = mock.MagicMock() error = WaiterError( @@ -344,20 +308,9 @@ def test_wait_with_incorrect_args(self, mock_sleep, caplog): ) assert mock_waiter.wait.call_count == 3 mock_sleep.assert_called_with(123) - assert ( - caplog.record_tuples - == [ - ( - "airflow.providers.amazon.aws.utils.waiter_with_logging", - logging.INFO, - "test status message: ", - ) - ] - * 2 - ) @mock.patch("time.sleep") - def test_wait_with_multiple_args(self, mock_sleep, caplog): + def test_wait_with_multiple_args(self, mock_sleep): mock_sleep.return_value = True mock_waiter = mock.MagicMock() error = WaiterError( @@ -385,20 +338,9 @@ def test_wait_with_multiple_args(self, mock_sleep, caplog): ) assert mock_waiter.wait.call_count == 3 mock_sleep.assert_called_with(123) - assert ( - caplog.record_tuples - == [ - ( - "airflow.providers.amazon.aws.utils.waiter_with_logging", - logging.INFO, - "test status message: Pending - test_details - test_name", - ) - ] - * 2 - ) @mock.patch.object(_LazyStatusFormatter, "__str__") - def test_status_formatting_not_done_if_higher_log_level(self, status_format_mock: mock.MagicMock, caplog): + def test_status_formatting_not_done_if_higher_log_level(self, status_format_mock: mock.MagicMock): mock_waiter = mock.MagicMock() error = WaiterError( name="test_waiter", @@ -407,16 +349,14 @@ def test_status_formatting_not_done_if_higher_log_level(self, status_format_mock ) mock_waiter.wait.side_effect = [error, error, True] - with caplog.at_level(level=logging.WARNING): - wait( - waiter=mock_waiter, - waiter_delay=0, - waiter_max_attempts=456, - args={"test_arg": "test_value"}, - failure_message="test failure message", - status_message="test status message", - status_args=["Status.State"], - ) + wait( + waiter=mock_waiter, + waiter_delay=0, + waiter_max_attempts=456, + args={"test_arg": "test_value"}, + failure_message="test failure message", + status_message="test status message", + status_args=["Status.State"], + ) - assert len(caplog.messages) == 0 status_format_mock.assert_not_called() diff --git a/providers/tests/amazon/aws/utils/__init__.py b/providers/amazon/tests/provider_tests/amazon/aws/waiters/__init__.py similarity index 100% rename from providers/tests/amazon/aws/utils/__init__.py rename to providers/amazon/tests/provider_tests/amazon/aws/waiters/__init__.py diff --git a/providers/tests/amazon/aws/waiters/test.json b/providers/amazon/tests/provider_tests/amazon/aws/waiters/test.json similarity index 100% rename from providers/tests/amazon/aws/waiters/test.json rename to providers/amazon/tests/provider_tests/amazon/aws/waiters/test.json diff --git a/providers/tests/amazon/aws/waiters/test_batch.py b/providers/amazon/tests/provider_tests/amazon/aws/waiters/test_batch.py similarity index 100% rename from providers/tests/amazon/aws/waiters/test_batch.py rename to providers/amazon/tests/provider_tests/amazon/aws/waiters/test_batch.py diff --git a/providers/tests/amazon/aws/waiters/test_bedrock.py b/providers/amazon/tests/provider_tests/amazon/aws/waiters/test_bedrock.py similarity index 100% rename from providers/tests/amazon/aws/waiters/test_bedrock.py rename to providers/amazon/tests/provider_tests/amazon/aws/waiters/test_bedrock.py diff --git a/providers/tests/amazon/aws/waiters/test_bedrock_agent.py b/providers/amazon/tests/provider_tests/amazon/aws/waiters/test_bedrock_agent.py similarity index 100% rename from providers/tests/amazon/aws/waiters/test_bedrock_agent.py rename to providers/amazon/tests/provider_tests/amazon/aws/waiters/test_bedrock_agent.py diff --git a/providers/tests/amazon/aws/waiters/test_comprehend.py b/providers/amazon/tests/provider_tests/amazon/aws/waiters/test_comprehend.py similarity index 100% rename from providers/tests/amazon/aws/waiters/test_comprehend.py rename to providers/amazon/tests/provider_tests/amazon/aws/waiters/test_comprehend.py diff --git a/providers/tests/amazon/aws/waiters/test_custom_waiters.py b/providers/amazon/tests/provider_tests/amazon/aws/waiters/test_custom_waiters.py similarity index 100% rename from providers/tests/amazon/aws/waiters/test_custom_waiters.py rename to providers/amazon/tests/provider_tests/amazon/aws/waiters/test_custom_waiters.py diff --git a/providers/tests/amazon/aws/waiters/test_dms.py b/providers/amazon/tests/provider_tests/amazon/aws/waiters/test_dms.py similarity index 100% rename from providers/tests/amazon/aws/waiters/test_dms.py rename to providers/amazon/tests/provider_tests/amazon/aws/waiters/test_dms.py diff --git a/providers/tests/amazon/aws/waiters/test_dynamo.py b/providers/amazon/tests/provider_tests/amazon/aws/waiters/test_dynamo.py similarity index 100% rename from providers/tests/amazon/aws/waiters/test_dynamo.py rename to providers/amazon/tests/provider_tests/amazon/aws/waiters/test_dynamo.py diff --git a/providers/tests/amazon/aws/waiters/test_ecs.py b/providers/amazon/tests/provider_tests/amazon/aws/waiters/test_ecs.py similarity index 100% rename from providers/tests/amazon/aws/waiters/test_ecs.py rename to providers/amazon/tests/provider_tests/amazon/aws/waiters/test_ecs.py diff --git a/providers/tests/amazon/aws/waiters/test_eks.py b/providers/amazon/tests/provider_tests/amazon/aws/waiters/test_eks.py similarity index 96% rename from providers/tests/amazon/aws/waiters/test_eks.py rename to providers/amazon/tests/provider_tests/amazon/aws/waiters/test_eks.py index 6a3aa8cf0273af..caaf8620530ed3 100644 --- a/providers/tests/amazon/aws/waiters/test_eks.py +++ b/providers/amazon/tests/provider_tests/amazon/aws/waiters/test_eks.py @@ -22,8 +22,7 @@ from moto import mock_aws from airflow.providers.amazon.aws.hooks.eks import EksHook - -from providers.tests.amazon.aws.waiters.test_custom_waiters import assert_all_match +from provider_tests.amazon.aws.waiters.test_custom_waiters import assert_all_match class TestCustomEKSServiceWaiters: diff --git a/providers/tests/amazon/aws/waiters/test_emr.py b/providers/amazon/tests/provider_tests/amazon/aws/waiters/test_emr.py similarity index 100% rename from providers/tests/amazon/aws/waiters/test_emr.py rename to providers/amazon/tests/provider_tests/amazon/aws/waiters/test_emr.py diff --git a/providers/tests/amazon/aws/waiters/test_glue.py b/providers/amazon/tests/provider_tests/amazon/aws/waiters/test_glue.py similarity index 100% rename from providers/tests/amazon/aws/waiters/test_glue.py rename to providers/amazon/tests/provider_tests/amazon/aws/waiters/test_glue.py diff --git a/providers/tests/amazon/aws/waiters/test_glue_databrew.py b/providers/amazon/tests/provider_tests/amazon/aws/waiters/test_glue_databrew.py similarity index 100% rename from providers/tests/amazon/aws/waiters/test_glue_databrew.py rename to providers/amazon/tests/provider_tests/amazon/aws/waiters/test_glue_databrew.py diff --git a/providers/tests/amazon/aws/waiters/test_kinesis_analytics.py b/providers/amazon/tests/provider_tests/amazon/aws/waiters/test_kinesis_analytics.py similarity index 100% rename from providers/tests/amazon/aws/waiters/test_kinesis_analytics.py rename to providers/amazon/tests/provider_tests/amazon/aws/waiters/test_kinesis_analytics.py diff --git a/providers/tests/amazon/aws/waiters/test_neptune.py b/providers/amazon/tests/provider_tests/amazon/aws/waiters/test_neptune.py similarity index 100% rename from providers/tests/amazon/aws/waiters/test_neptune.py rename to providers/amazon/tests/provider_tests/amazon/aws/waiters/test_neptune.py diff --git a/providers/tests/amazon/aws/waiters/test_opensearch_serverless.py b/providers/amazon/tests/provider_tests/amazon/aws/waiters/test_opensearch_serverless.py similarity index 100% rename from providers/tests/amazon/aws/waiters/test_opensearch_serverless.py rename to providers/amazon/tests/provider_tests/amazon/aws/waiters/test_opensearch_serverless.py diff --git a/providers/tests/amazon/conftest.py b/providers/amazon/tests/provider_tests/amazon/conftest.py similarity index 100% rename from providers/tests/amazon/conftest.py rename to providers/amazon/tests/provider_tests/amazon/conftest.py diff --git a/providers/tests/system/amazon/CONTRIBUTING.md b/providers/amazon/tests/system/amazon/CONTRIBUTING.md similarity index 98% rename from providers/tests/system/amazon/CONTRIBUTING.md rename to providers/amazon/tests/system/amazon/CONTRIBUTING.md index 4abdf410dd63d4..ebdc23c4623534 100644 --- a/providers/tests/system/amazon/CONTRIBUTING.md +++ b/providers/amazon/tests/system/amazon/CONTRIBUTING.md @@ -144,7 +144,7 @@ Some examples of when this should be used are: ## Location and Naming -All system tests for the Amazon provider package should be in `tests/system/providers/amazon/aws/`. +All system tests for the Amazon provider package should be in `amazon/tests/system/amazon/aws/`. If there is only one system test for a given service, the module name should be `example_{service}`. For example, `example_athena.py`. If more than one module is required, the names should be descriptive. For example, `example_redshift_cluster.py` and `example_redshift_sql.py`. diff --git a/providers/tests/system/amazon/README.md b/providers/amazon/tests/system/amazon/README.md similarity index 100% rename from providers/tests/system/amazon/README.md rename to providers/amazon/tests/system/amazon/README.md diff --git a/providers/tests/system/amazon/__init__.py b/providers/amazon/tests/system/amazon/__init__.py similarity index 100% rename from providers/tests/system/amazon/__init__.py rename to providers/amazon/tests/system/amazon/__init__.py diff --git a/providers/tests/system/amazon/aws/__init__.py b/providers/amazon/tests/system/amazon/aws/__init__.py similarity index 100% rename from providers/tests/system/amazon/aws/__init__.py rename to providers/amazon/tests/system/amazon/aws/__init__.py diff --git a/providers/tests/system/amazon/aws/example_appflow.py b/providers/amazon/tests/system/amazon/aws/example_appflow.py similarity index 97% rename from providers/tests/system/amazon/aws/example_appflow.py rename to providers/amazon/tests/system/amazon/aws/example_appflow.py index 262b6cb803d447..33ef42fbcf1102 100644 --- a/providers/tests/system/amazon/aws/example_appflow.py +++ b/providers/amazon/tests/system/amazon/aws/example_appflow.py @@ -18,6 +18,8 @@ from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder + from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.appflow import ( @@ -29,8 +31,6 @@ ) from airflow.providers.standard.operators.bash import BashOperator -from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder - sys_test_context_task = SystemTestContextBuilder().build() DAG_ID = "example_appflow" diff --git a/providers/tests/system/amazon/aws/example_appflow_run.py b/providers/amazon/tests/system/amazon/aws/example_appflow_run.py similarity index 98% rename from providers/tests/system/amazon/aws/example_appflow_run.py rename to providers/amazon/tests/system/amazon/aws/example_appflow_run.py index 106fda1bb7a426..c372da03c35ae2 100644 --- a/providers/tests/system/amazon/aws/example_appflow_run.py +++ b/providers/amazon/tests/system/amazon/aws/example_appflow_run.py @@ -20,6 +20,7 @@ from datetime import datetime import boto3 +from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain @@ -34,8 +35,6 @@ ) from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder - sys_test_context_task = SystemTestContextBuilder().build() DAG_ID = "example_appflow_run" diff --git a/providers/tests/system/amazon/aws/example_athena.py b/providers/amazon/tests/system/amazon/aws/example_athena.py similarity index 98% rename from providers/tests/system/amazon/aws/example_athena.py rename to providers/amazon/tests/system/amazon/aws/example_athena.py index 201e017d12a64e..bd26cbb5a8f8f0 100644 --- a/providers/tests/system/amazon/aws/example_athena.py +++ b/providers/amazon/tests/system/amazon/aws/example_athena.py @@ -19,6 +19,7 @@ from datetime import datetime import boto3 +from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain @@ -33,8 +34,6 @@ from airflow.providers.amazon.aws.sensors.athena import AthenaSensor from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder - sys_test_context_task = SystemTestContextBuilder().build() DAG_ID = "example_athena" diff --git a/providers/tests/system/amazon/aws/example_azure_blob_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_azure_blob_to_s3.py similarity index 97% rename from providers/tests/system/amazon/aws/example_azure_blob_to_s3.py rename to providers/amazon/tests/system/amazon/aws/example_azure_blob_to_s3.py index 3834365622d30f..6e5338298ccad3 100644 --- a/providers/tests/system/amazon/aws/example_azure_blob_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_azure_blob_to_s3.py @@ -18,14 +18,14 @@ from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder + from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.azure_blob_to_s3 import AzureBlobStorageToS3Operator from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder - sys_test_context_task = SystemTestContextBuilder().build() DAG_ID = "example_azure_blob_to_s3" diff --git a/providers/tests/system/amazon/aws/example_batch.py b/providers/amazon/tests/system/amazon/aws/example_batch.py similarity index 99% rename from providers/tests/system/amazon/aws/example_batch.py rename to providers/amazon/tests/system/amazon/aws/example_batch.py index 828815a2e1c86a..e512180221d166 100644 --- a/providers/tests/system/amazon/aws/example_batch.py +++ b/providers/amazon/tests/system/amazon/aws/example_batch.py @@ -20,6 +20,12 @@ from datetime import datetime import boto3 +from providers.amazon.tests.system.amazon.aws.utils import ( + ENV_ID_KEY, + SystemTestContextBuilder, + prune_logs, + split_string, +) from airflow.decorators import task from airflow.models.baseoperator import chain @@ -33,13 +39,6 @@ ) from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ( - ENV_ID_KEY, - SystemTestContextBuilder, - prune_logs, - split_string, -) - log = logging.getLogger(__name__) DAG_ID = "example_batch" diff --git a/providers/tests/system/amazon/aws/example_bedrock.py b/providers/amazon/tests/system/amazon/aws/example_bedrock.py similarity index 98% rename from providers/tests/system/amazon/aws/example_bedrock.py rename to providers/amazon/tests/system/amazon/aws/example_bedrock.py index 1bc050ffd51095..9a0898e41b442d 100644 --- a/providers/tests/system/amazon/aws/example_bedrock.py +++ b/providers/amazon/tests/system/amazon/aws/example_bedrock.py @@ -21,6 +21,7 @@ from os import environ import boto3 +from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder from airflow.decorators import task, task_group from airflow.models.baseoperator import chain @@ -44,8 +45,6 @@ from airflow.utils.edgemodifier import Label from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder - # Externally fetched variables: ROLE_ARN_KEY = "ROLE_ARN" sys_test_context_task = SystemTestContextBuilder().add_variable(ROLE_ARN_KEY).build() diff --git a/providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py b/providers/amazon/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py similarity index 99% rename from providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py rename to providers/amazon/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py index 86e9a1932704bf..cec67ec58423fb 100644 --- a/providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py +++ b/providers/amazon/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py @@ -32,6 +32,7 @@ OpenSearch, RequestsHttpConnection, ) +from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder from airflow import DAG from airflow.decorators import task, task_group @@ -61,8 +62,6 @@ from airflow.utils.edgemodifier import Label from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder - ####################################################################### # NOTE: # Access to the following foundation model must be requested via diff --git a/providers/tests/system/amazon/aws/example_cloudformation.py b/providers/amazon/tests/system/amazon/aws/example_cloudformation.py similarity index 97% rename from providers/tests/system/amazon/aws/example_cloudformation.py rename to providers/amazon/tests/system/amazon/aws/example_cloudformation.py index 7553f5d9bd0a36..3066ca2e7e850f 100644 --- a/providers/tests/system/amazon/aws/example_cloudformation.py +++ b/providers/amazon/tests/system/amazon/aws/example_cloudformation.py @@ -19,6 +19,8 @@ import json from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder + from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.cloud_formation import ( @@ -31,8 +33,6 @@ ) from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder - sys_test_context_task = SystemTestContextBuilder().build() DAG_ID = "example_cloudformation" diff --git a/providers/tests/system/amazon/aws/example_comprehend.py b/providers/amazon/tests/system/amazon/aws/example_comprehend.py similarity index 98% rename from providers/tests/system/amazon/aws/example_comprehend.py rename to providers/amazon/tests/system/amazon/aws/example_comprehend.py index a4e3bdd4852ef8..75d21b4cd09819 100644 --- a/providers/tests/system/amazon/aws/example_comprehend.py +++ b/providers/amazon/tests/system/amazon/aws/example_comprehend.py @@ -19,6 +19,8 @@ import json from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder + from airflow import DAG from airflow.decorators import task_group from airflow.models.baseoperator import chain @@ -33,8 +35,6 @@ ) from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder - ROLE_ARN_KEY = "ROLE_ARN" sys_test_context_task = SystemTestContextBuilder().add_variable(ROLE_ARN_KEY).build() diff --git a/providers/tests/system/amazon/aws/example_comprehend_document_classifier.py b/providers/amazon/tests/system/amazon/aws/example_comprehend_document_classifier.py similarity index 98% rename from providers/tests/system/amazon/aws/example_comprehend_document_classifier.py rename to providers/amazon/tests/system/amazon/aws/example_comprehend_document_classifier.py index b0bf41209785cb..10dc8edadaca9b 100644 --- a/providers/tests/system/amazon/aws/example_comprehend_document_classifier.py +++ b/providers/amazon/tests/system/amazon/aws/example_comprehend_document_classifier.py @@ -18,6 +18,8 @@ from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder + from airflow import DAG from airflow.decorators import task, task_group from airflow.models.baseoperator import chain @@ -36,8 +38,6 @@ ) from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder - ROLE_ARN_KEY = "ROLE_ARN" BUCKET_NAME_KEY = "BUCKET_NAME" BUCKET_KEY_DISCHARGE_KEY = "BUCKET_KEY_DISCHARGE" diff --git a/providers/tests/system/amazon/aws/example_datasync.py b/providers/amazon/tests/system/amazon/aws/example_datasync.py similarity index 98% rename from providers/tests/system/amazon/aws/example_datasync.py rename to providers/amazon/tests/system/amazon/aws/example_datasync.py index 8d25fcf9c39898..60457b2c24cc93 100644 --- a/providers/tests/system/amazon/aws/example_datasync.py +++ b/providers/amazon/tests/system/amazon/aws/example_datasync.py @@ -19,6 +19,7 @@ from datetime import datetime import boto3 +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain @@ -27,8 +28,6 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - DAG_ID = "example_datasync" # Externally fetched variables: diff --git a/providers/tests/system/amazon/aws/example_dms.py b/providers/amazon/tests/system/amazon/aws/example_dms.py similarity index 98% rename from providers/tests/system/amazon/aws/example_dms.py rename to providers/amazon/tests/system/amazon/aws/example_dms.py index 5d15aa0c4ecb0d..2f52584f210166 100644 --- a/providers/tests/system/amazon/aws/example_dms.py +++ b/providers/amazon/tests/system/amazon/aws/example_dms.py @@ -27,6 +27,8 @@ from typing import cast import boto3 +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder +from providers.amazon.tests.system.amazon.aws.utils.ec2 import get_default_vpc_id from sqlalchemy import Column, MetaData, String, Table, create_engine from airflow.decorators import task @@ -47,9 +49,6 @@ from airflow.providers.amazon.aws.sensors.dms import DmsTaskBaseSensor, DmsTaskCompletedSensor from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder -from providers.tests.system.amazon.aws.utils.ec2 import get_default_vpc_id - DAG_ID = "example_dms" ROLE_ARN_KEY = "ROLE_ARN" diff --git a/providers/tests/system/amazon/aws/example_dms_serverless.py b/providers/amazon/tests/system/amazon/aws/example_dms_serverless.py similarity index 98% rename from providers/tests/system/amazon/aws/example_dms_serverless.py rename to providers/amazon/tests/system/amazon/aws/example_dms_serverless.py index 4fca4f688a5987..e645e39e872012 100644 --- a/providers/tests/system/amazon/aws/example_dms_serverless.py +++ b/providers/amazon/tests/system/amazon/aws/example_dms_serverless.py @@ -26,6 +26,8 @@ from datetime import datetime import boto3 +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder +from providers.amazon.tests.system.amazon.aws.utils.ec2 import get_default_vpc_id from sqlalchemy import Column, MetaData, String, Table, create_engine from airflow.decorators import task @@ -46,9 +48,6 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder -from providers.tests.system.amazon.aws.utils.ec2 import get_default_vpc_id - """ This example demonstrates how to use the DMS operators to create a serverless replication task to replicate data from a PostgreSQL database to Amazon S3. diff --git a/providers/tests/system/amazon/aws/example_dynamodb.py b/providers/amazon/tests/system/amazon/aws/example_dynamodb.py similarity index 97% rename from providers/tests/system/amazon/aws/example_dynamodb.py rename to providers/amazon/tests/system/amazon/aws/example_dynamodb.py index 3cfa15ecaa36e5..f7c08f55ef913d 100644 --- a/providers/tests/system/amazon/aws/example_dynamodb.py +++ b/providers/amazon/tests/system/amazon/aws/example_dynamodb.py @@ -19,6 +19,7 @@ from datetime import datetime import boto3 +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain @@ -26,8 +27,6 @@ from airflow.providers.amazon.aws.sensors.dynamodb import DynamoDBValueSensor from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - # TODO: FIXME The argument types here seems somewhat tricky to fix # mypy: disable-error-code="arg-type" diff --git a/providers/tests/system/amazon/aws/example_dynamodb_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_dynamodb_to_s3.py similarity index 99% rename from providers/tests/system/amazon/aws/example_dynamodb_to_s3.py rename to providers/amazon/tests/system/amazon/aws/example_dynamodb_to_s3.py index 641962aa636d70..58439869031d5b 100644 --- a/providers/tests/system/amazon/aws/example_dynamodb_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_dynamodb_to_s3.py @@ -21,6 +21,7 @@ import boto3 import tenacity +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder from tenacity import before_log, before_sleep_log from airflow.decorators import task, task_group @@ -32,8 +33,6 @@ from airflow.utils.edgemodifier import Label from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - log = logging.getLogger(__name__) DAG_ID = "example_dynamodb_to_s3" diff --git a/providers/tests/system/amazon/aws/example_ec2.py b/providers/amazon/tests/system/amazon/aws/example_ec2.py similarity index 98% rename from providers/tests/system/amazon/aws/example_ec2.py rename to providers/amazon/tests/system/amazon/aws/example_ec2.py index 750ec19c27779b..ca366f8bf6c83d 100644 --- a/providers/tests/system/amazon/aws/example_ec2.py +++ b/providers/amazon/tests/system/amazon/aws/example_ec2.py @@ -20,6 +20,7 @@ from operator import itemgetter import boto3 +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain @@ -35,8 +36,6 @@ from airflow.providers.amazon.aws.sensors.ec2 import EC2InstanceStateSensor from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - DAG_ID = "example_ec2" sys_test_context_task = SystemTestContextBuilder().build() diff --git a/providers/tests/system/amazon/aws/example_ecs.py b/providers/amazon/tests/system/amazon/aws/example_ecs.py similarity index 98% rename from providers/tests/system/amazon/aws/example_ecs.py rename to providers/amazon/tests/system/amazon/aws/example_ecs.py index 0f9118ae34af35..e8fec4600cae5a 100644 --- a/providers/tests/system/amazon/aws/example_ecs.py +++ b/providers/amazon/tests/system/amazon/aws/example_ecs.py @@ -19,6 +19,7 @@ from datetime import datetime import boto3 +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain @@ -37,8 +38,6 @@ ) from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - DAG_ID = "example_ecs" # Externally fetched variables: diff --git a/providers/tests/system/amazon/aws/example_ecs_fargate.py b/providers/amazon/tests/system/amazon/aws/example_ecs_fargate.py similarity index 98% rename from providers/tests/system/amazon/aws/example_ecs_fargate.py rename to providers/amazon/tests/system/amazon/aws/example_ecs_fargate.py index df3afbce7df71a..5204e96b1f1a0a 100644 --- a/providers/tests/system/amazon/aws/example_ecs_fargate.py +++ b/providers/amazon/tests/system/amazon/aws/example_ecs_fargate.py @@ -19,6 +19,7 @@ from datetime import datetime import boto3 +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain @@ -28,8 +29,6 @@ from airflow.providers.amazon.aws.sensors.ecs import EcsTaskStateSensor from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - DAG_ID = "example_ecs_fargate" # Externally fetched variables: diff --git a/providers/tests/system/amazon/aws/example_eks_templated.py b/providers/amazon/tests/system/amazon/aws/example_eks_templated.py similarity index 98% rename from providers/tests/system/amazon/aws/example_eks_templated.py rename to providers/amazon/tests/system/amazon/aws/example_eks_templated.py index de4d5e621ef0ad..eab98d80f19665 100644 --- a/providers/tests/system/amazon/aws/example_eks_templated.py +++ b/providers/amazon/tests/system/amazon/aws/example_eks_templated.py @@ -18,6 +18,8 @@ from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder + from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.hooks.eks import ClusterStates, NodegroupStates @@ -30,8 +32,6 @@ ) from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, EksNodegroupStateSensor -from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder - sys_test_context_task = SystemTestContextBuilder().build() DAG_ID = "example_eks_templated" diff --git a/providers/tests/system/amazon/aws/example_eks_with_fargate_in_one_step.py b/providers/amazon/tests/system/amazon/aws/example_eks_with_fargate_in_one_step.py similarity index 96% rename from providers/tests/system/amazon/aws/example_eks_with_fargate_in_one_step.py rename to providers/amazon/tests/system/amazon/aws/example_eks_with_fargate_in_one_step.py index a267463c11f3df..003cd34f7ae867 100644 --- a/providers/tests/system/amazon/aws/example_eks_with_fargate_in_one_step.py +++ b/providers/amazon/tests/system/amazon/aws/example_eks_with_fargate_in_one_step.py @@ -18,6 +18,9 @@ from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder +from providers.amazon.tests.system.amazon.aws.utils.k8s import get_describe_pod_operator + from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.hooks.eks import ClusterStates, FargateProfileStates @@ -29,9 +32,6 @@ from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, EksFargateProfileStateSensor from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder -from providers.tests.system.amazon.aws.utils.k8s import get_describe_pod_operator - DAG_ID = "example_eks_with_fargate_in_one_step" # Externally fetched variables diff --git a/providers/tests/system/amazon/aws/example_eks_with_fargate_profile.py b/providers/amazon/tests/system/amazon/aws/example_eks_with_fargate_profile.py similarity index 97% rename from providers/tests/system/amazon/aws/example_eks_with_fargate_profile.py rename to providers/amazon/tests/system/amazon/aws/example_eks_with_fargate_profile.py index 1348b352cc51f8..0887b3494c30dc 100644 --- a/providers/tests/system/amazon/aws/example_eks_with_fargate_profile.py +++ b/providers/amazon/tests/system/amazon/aws/example_eks_with_fargate_profile.py @@ -18,6 +18,9 @@ from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder +from providers.amazon.tests.system.amazon.aws.utils.k8s import get_describe_pod_operator + from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.hooks.eks import ClusterStates, FargateProfileStates @@ -31,9 +34,6 @@ from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, EksFargateProfileStateSensor from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder -from providers.tests.system.amazon.aws.utils.k8s import get_describe_pod_operator - DAG_ID = "example_eks_with_fargate_profile" # Externally fetched variables: diff --git a/providers/tests/system/amazon/aws/example_eks_with_nodegroup_in_one_step.py b/providers/amazon/tests/system/amazon/aws/example_eks_with_nodegroup_in_one_step.py similarity index 97% rename from providers/tests/system/amazon/aws/example_eks_with_nodegroup_in_one_step.py rename to providers/amazon/tests/system/amazon/aws/example_eks_with_nodegroup_in_one_step.py index 07538d94d17b3a..3b11019c38221e 100644 --- a/providers/tests/system/amazon/aws/example_eks_with_nodegroup_in_one_step.py +++ b/providers/amazon/tests/system/amazon/aws/example_eks_with_nodegroup_in_one_step.py @@ -19,6 +19,8 @@ from datetime import datetime import boto3 +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder +from providers.amazon.tests.system.amazon.aws.utils.k8s import get_describe_pod_operator from airflow.decorators import task from airflow.models.baseoperator import chain @@ -32,9 +34,6 @@ from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, EksNodegroupStateSensor from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder -from providers.tests.system.amazon.aws.utils.k8s import get_describe_pod_operator - DAG_ID = "example_eks_with_nodegroup_in_one_step" # Externally fetched variables: diff --git a/providers/tests/system/amazon/aws/example_eks_with_nodegroups.py b/providers/amazon/tests/system/amazon/aws/example_eks_with_nodegroups.py similarity index 97% rename from providers/tests/system/amazon/aws/example_eks_with_nodegroups.py rename to providers/amazon/tests/system/amazon/aws/example_eks_with_nodegroups.py index 98448767d97452..1151b15991dc15 100644 --- a/providers/tests/system/amazon/aws/example_eks_with_nodegroups.py +++ b/providers/amazon/tests/system/amazon/aws/example_eks_with_nodegroups.py @@ -19,6 +19,8 @@ from datetime import datetime import boto3 +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder +from providers.amazon.tests.system.amazon.aws.utils.k8s import get_describe_pod_operator from airflow.decorators import task from airflow.models.baseoperator import chain @@ -34,9 +36,6 @@ from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, EksNodegroupStateSensor from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder -from providers.tests.system.amazon.aws.utils.k8s import get_describe_pod_operator - DAG_ID = "example_eks_with_nodegroups" # Externally fetched variables: diff --git a/providers/tests/system/amazon/aws/example_emr.py b/providers/amazon/tests/system/amazon/aws/example_emr.py similarity index 98% rename from providers/tests/system/amazon/aws/example_emr.py rename to providers/amazon/tests/system/amazon/aws/example_emr.py index 9a29667b5d0988..62dc8d2128d9b3 100644 --- a/providers/tests/system/amazon/aws/example_emr.py +++ b/providers/amazon/tests/system/amazon/aws/example_emr.py @@ -23,6 +23,7 @@ from typing import Any import boto3 +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain @@ -37,8 +38,6 @@ from airflow.providers.amazon.aws.sensors.emr import EmrJobFlowSensor, EmrStepSensor from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - DAG_ID = "example_emr" CONFIG_NAME = "EMR Runtime Role Security Configuration" EXECUTION_ROLE_ARN_KEY = "EXECUTION_ROLE_ARN" diff --git a/providers/tests/system/amazon/aws/example_emr_eks.py b/providers/amazon/tests/system/amazon/aws/example_emr_eks.py similarity index 99% rename from providers/tests/system/amazon/aws/example_emr_eks.py rename to providers/amazon/tests/system/amazon/aws/example_emr_eks.py index cafaf09ed74737..4254f8a0c6ba1d 100644 --- a/providers/tests/system/amazon/aws/example_emr_eks.py +++ b/providers/amazon/tests/system/amazon/aws/example_emr_eks.py @@ -21,6 +21,7 @@ from datetime import datetime import boto3 +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain @@ -37,8 +38,6 @@ from airflow.providers.amazon.aws.sensors.emr import EmrContainerSensor from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - DAG_ID = "example_emr_eks" # Externally fetched variables diff --git a/providers/tests/system/amazon/aws/example_emr_notebook_execution.py b/providers/amazon/tests/system/amazon/aws/example_emr_notebook_execution.py similarity index 97% rename from providers/tests/system/amazon/aws/example_emr_notebook_execution.py rename to providers/amazon/tests/system/amazon/aws/example_emr_notebook_execution.py index 681e97048aa582..f145a10debd791 100644 --- a/providers/tests/system/amazon/aws/example_emr_notebook_execution.py +++ b/providers/amazon/tests/system/amazon/aws/example_emr_notebook_execution.py @@ -20,6 +20,8 @@ from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.emr import ( @@ -28,8 +30,6 @@ ) from airflow.providers.amazon.aws.sensors.emr import EmrNotebookExecutionSensor -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - DAG_ID = "example_emr_notebook" # Externally fetched variables: EDITOR_ID_KEY = "EDITOR_ID" diff --git a/providers/tests/system/amazon/aws/example_emr_serverless.py b/providers/amazon/tests/system/amazon/aws/example_emr_serverless.py similarity index 98% rename from providers/tests/system/amazon/aws/example_emr_serverless.py rename to providers/amazon/tests/system/amazon/aws/example_emr_serverless.py index 9059f0e1b1e027..f15889fedbb91c 100644 --- a/providers/tests/system/amazon/aws/example_emr_serverless.py +++ b/providers/amazon/tests/system/amazon/aws/example_emr_serverless.py @@ -19,6 +19,7 @@ from datetime import datetime import boto3 +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder from airflow.models.baseoperator import chain from airflow.models.dag import DAG @@ -33,8 +34,6 @@ from airflow.providers.amazon.aws.sensors.emr import EmrServerlessApplicationSensor, EmrServerlessJobSensor from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - DAG_ID = "example_emr_serverless" # Externally fetched variables: diff --git a/providers/tests/system/amazon/aws/example_eventbridge.py b/providers/amazon/tests/system/amazon/aws/example_eventbridge.py similarity index 96% rename from providers/tests/system/amazon/aws/example_eventbridge.py rename to providers/amazon/tests/system/amazon/aws/example_eventbridge.py index 6f31fc9ce37d8e..cd6d7905b0d157 100644 --- a/providers/tests/system/amazon/aws/example_eventbridge.py +++ b/providers/amazon/tests/system/amazon/aws/example_eventbridge.py @@ -18,6 +18,8 @@ from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.eventbridge import ( @@ -27,8 +29,6 @@ EventBridgePutRuleOperator, ) -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - DAG_ID = "example_eventbridge" ENTRIES = [ { diff --git a/providers/tests/system/amazon/aws/example_ftp_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_ftp_to_s3.py similarity index 96% rename from providers/tests/system/amazon/aws/example_ftp_to_s3.py rename to providers/amazon/tests/system/amazon/aws/example_ftp_to_s3.py index b3e0d2ccf43f54..34373229f9ae6a 100644 --- a/providers/tests/system/amazon/aws/example_ftp_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_ftp_to_s3.py @@ -18,14 +18,14 @@ from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder + from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.ftp_to_s3 import FTPToS3Operator from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder - sys_test_context_task = SystemTestContextBuilder().build() DAG_ID = "example_ftp_to_s3" diff --git a/providers/tests/system/amazon/aws/example_gcs_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_gcs_to_s3.py similarity index 97% rename from providers/tests/system/amazon/aws/example_gcs_to_s3.py rename to providers/amazon/tests/system/amazon/aws/example_gcs_to_s3.py index 5257b440808e98..96b73671e044be 100644 --- a/providers/tests/system/amazon/aws/example_gcs_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_gcs_to_s3.py @@ -18,6 +18,8 @@ from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder + from airflow.decorators import task from airflow.models.baseoperator import chain from airflow.models.dag import DAG @@ -33,8 +35,6 @@ ) from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder - # Externally fetched variables: GCP_PROJECT_ID = "GCP_PROJECT_ID" diff --git a/providers/tests/system/amazon/aws/example_glacier_to_gcs.py b/providers/amazon/tests/system/amazon/aws/example_glacier_to_gcs.py similarity index 97% rename from providers/tests/system/amazon/aws/example_glacier_to_gcs.py rename to providers/amazon/tests/system/amazon/aws/example_glacier_to_gcs.py index 9281d3c92e1a04..4da7f5d7be7cc9 100644 --- a/providers/tests/system/amazon/aws/example_glacier_to_gcs.py +++ b/providers/amazon/tests/system/amazon/aws/example_glacier_to_gcs.py @@ -19,6 +19,7 @@ from datetime import datetime import boto3 +from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain @@ -31,8 +32,6 @@ from airflow.providers.amazon.aws.transfers.glacier_to_gcs import GlacierToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder - sys_test_context_task = SystemTestContextBuilder().build() DAG_ID = "example_glacier_to_gcs" diff --git a/providers/tests/system/amazon/aws/example_glue.py b/providers/amazon/tests/system/amazon/aws/example_glue.py similarity index 98% rename from providers/tests/system/amazon/aws/example_glue.py rename to providers/amazon/tests/system/amazon/aws/example_glue.py index 3d042c79d44cb3..a7fa18eaa7942a 100644 --- a/providers/tests/system/amazon/aws/example_glue.py +++ b/providers/amazon/tests/system/amazon/aws/example_glue.py @@ -20,6 +20,7 @@ from typing import TYPE_CHECKING import boto3 +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs from airflow.decorators import task from airflow.models.baseoperator import chain @@ -36,8 +37,6 @@ from airflow.providers.amazon.aws.sensors.glue_crawler import GlueCrawlerSensor from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs - if TYPE_CHECKING: from botocore.client import BaseClient diff --git a/providers/tests/system/amazon/aws/example_glue_data_quality.py b/providers/amazon/tests/system/amazon/aws/example_glue_data_quality.py similarity index 98% rename from providers/tests/system/amazon/aws/example_glue_data_quality.py rename to providers/amazon/tests/system/amazon/aws/example_glue_data_quality.py index 3c1bdb378b5dc9..941d7232ae1657 100644 --- a/providers/tests/system/amazon/aws/example_glue_data_quality.py +++ b/providers/amazon/tests/system/amazon/aws/example_glue_data_quality.py @@ -18,6 +18,8 @@ from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder + from airflow import DAG from airflow.decorators import task, task_group from airflow.models.baseoperator import chain @@ -35,8 +37,6 @@ from airflow.providers.amazon.aws.sensors.glue import GlueDataQualityRuleSetEvaluationRunSensor from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder - ROLE_ARN_KEY = "ROLE_ARN" sys_test_context_task = SystemTestContextBuilder().add_variable(ROLE_ARN_KEY).build() diff --git a/providers/tests/system/amazon/aws/example_glue_data_quality_with_recommendation.py b/providers/amazon/tests/system/amazon/aws/example_glue_data_quality_with_recommendation.py similarity index 98% rename from providers/tests/system/amazon/aws/example_glue_data_quality_with_recommendation.py rename to providers/amazon/tests/system/amazon/aws/example_glue_data_quality_with_recommendation.py index a0dd4ebba7b56b..6757dd2e615f6d 100644 --- a/providers/tests/system/amazon/aws/example_glue_data_quality_with_recommendation.py +++ b/providers/amazon/tests/system/amazon/aws/example_glue_data_quality_with_recommendation.py @@ -18,6 +18,8 @@ from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder + from airflow import DAG from airflow.decorators import task, task_group from airflow.models.baseoperator import chain @@ -38,8 +40,6 @@ ) from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder - ROLE_ARN_KEY = "ROLE_ARN" sys_test_context_task = SystemTestContextBuilder().add_variable(ROLE_ARN_KEY).build() diff --git a/providers/tests/system/amazon/aws/example_glue_databrew.py b/providers/amazon/tests/system/amazon/aws/example_glue_databrew.py similarity index 98% rename from providers/tests/system/amazon/aws/example_glue_databrew.py rename to providers/amazon/tests/system/amazon/aws/example_glue_databrew.py index 95799666f46e46..2460d762e8984c 100644 --- a/providers/tests/system/amazon/aws/example_glue_databrew.py +++ b/providers/amazon/tests/system/amazon/aws/example_glue_databrew.py @@ -18,6 +18,7 @@ import boto3 import pendulum +from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain @@ -32,8 +33,6 @@ ) from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder - DAG_ID = "example_glue_databrew" # Externally fetched variables: diff --git a/providers/tests/system/amazon/aws/example_google_api_sheets_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_google_api_sheets_to_s3.py similarity index 97% rename from providers/tests/system/amazon/aws/example_google_api_sheets_to_s3.py rename to providers/amazon/tests/system/amazon/aws/example_google_api_sheets_to_s3.py index 24a4eba54d59ba..38db6f178b27a1 100644 --- a/providers/tests/system/amazon/aws/example_google_api_sheets_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_google_api_sheets_to_s3.py @@ -24,14 +24,14 @@ import os from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder + from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.google_api_to_s3 import GoogleApiToS3Operator from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder - sys_test_context_task = SystemTestContextBuilder().build() DAG_ID = "example_google_api_sheets_to_s3" diff --git a/providers/tests/system/amazon/aws/example_google_api_youtube_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_google_api_youtube_to_s3.py similarity index 98% rename from providers/tests/system/amazon/aws/example_google_api_youtube_to_s3.py rename to providers/amazon/tests/system/amazon/aws/example_google_api_youtube_to_s3.py index 3d5ff917bfc0a9..bc4afe636b14fe 100644 --- a/providers/tests/system/amazon/aws/example_google_api_youtube_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_google_api_youtube_to_s3.py @@ -52,6 +52,7 @@ from datetime import datetime import boto3 +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder from airflow import settings from airflow.decorators import task @@ -62,8 +63,6 @@ from airflow.providers.amazon.aws.transfers.google_api_to_s3 import GoogleApiToS3Operator from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - DAG_ID = "example_google_api_youtube_to_s3" YOUTUBE_CHANNEL_ID = "UCSXwxpWZQ7XZ1WL3wqevChA" diff --git a/providers/tests/system/amazon/aws/example_hive_to_dynamodb.py b/providers/amazon/tests/system/amazon/aws/example_hive_to_dynamodb.py similarity index 98% rename from providers/tests/system/amazon/aws/example_hive_to_dynamodb.py rename to providers/amazon/tests/system/amazon/aws/example_hive_to_dynamodb.py index 0c0978361bab7a..fe0d5ecd27fc3d 100644 --- a/providers/tests/system/amazon/aws/example_hive_to_dynamodb.py +++ b/providers/amazon/tests/system/amazon/aws/example_hive_to_dynamodb.py @@ -24,6 +24,8 @@ from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder + from airflow.decorators import task from airflow.models import Connection from airflow.models.baseoperator import chain @@ -33,8 +35,6 @@ from airflow.utils import db from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder - DAG_ID = "example_hive_to_dynamodb" # Externally fetched variables: diff --git a/providers/tests/system/amazon/aws/example_http_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_http_to_s3.py similarity index 97% rename from providers/tests/system/amazon/aws/example_http_to_s3.py rename to providers/amazon/tests/system/amazon/aws/example_http_to_s3.py index 612d68ef36e0ce..51ac0ce2480b40 100644 --- a/providers/tests/system/amazon/aws/example_http_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_http_to_s3.py @@ -18,6 +18,8 @@ from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder + from airflow import settings from airflow.decorators import task from airflow.models.baseoperator import chain @@ -28,8 +30,6 @@ from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder - sys_test_context_task = SystemTestContextBuilder().build() DAG_ID = "example_http_to_s3" diff --git a/providers/tests/system/amazon/aws/example_imap_attachment_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_imap_attachment_to_s3.py similarity index 97% rename from providers/tests/system/amazon/aws/example_imap_attachment_to_s3.py rename to providers/amazon/tests/system/amazon/aws/example_imap_attachment_to_s3.py index c376778b0df3a9..94ecf88ef010b6 100644 --- a/providers/tests/system/amazon/aws/example_imap_attachment_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_imap_attachment_to_s3.py @@ -23,14 +23,14 @@ from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder + from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.imap_attachment_to_s3 import ImapAttachmentToS3Operator from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder - DAG_ID = "example_imap_attachment_to_s3" # Externally fetched variables: diff --git a/providers/tests/system/amazon/aws/example_kinesis_analytics.py b/providers/amazon/tests/system/amazon/aws/example_kinesis_analytics.py similarity index 99% rename from providers/tests/system/amazon/aws/example_kinesis_analytics.py rename to providers/amazon/tests/system/amazon/aws/example_kinesis_analytics.py index b4d4566fc76001..f49862084ac1bc 100644 --- a/providers/tests/system/amazon/aws/example_kinesis_analytics.py +++ b/providers/amazon/tests/system/amazon/aws/example_kinesis_analytics.py @@ -22,6 +22,7 @@ from datetime import datetime import boto3 +from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder from airflow import DAG, settings from airflow.decorators import task, task_group @@ -44,8 +45,6 @@ from airflow.providers.amazon.aws.transfers.http_to_s3 import HttpToS3Operator from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder - ROLE_ARN_KEY = "ROLE_ARN" sys_test_context_task = SystemTestContextBuilder().add_variable(ROLE_ARN_KEY).build() diff --git a/providers/tests/system/amazon/aws/example_lambda.py b/providers/amazon/tests/system/amazon/aws/example_lambda.py similarity index 97% rename from providers/tests/system/amazon/aws/example_lambda.py rename to providers/amazon/tests/system/amazon/aws/example_lambda.py index 767fae86387fd4..9a1a4d23cfa55f 100644 --- a/providers/tests/system/amazon/aws/example_lambda.py +++ b/providers/amazon/tests/system/amazon/aws/example_lambda.py @@ -22,6 +22,7 @@ from io import BytesIO import boto3 +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs from airflow.decorators import task from airflow.models.baseoperator import chain @@ -33,8 +34,6 @@ from airflow.providers.amazon.aws.sensors.lambda_function import LambdaFunctionStateSensor from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs - DAG_ID = "example_lambda" # Externally fetched variables: diff --git a/providers/tests/system/amazon/aws/example_local_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_local_to_s3.py similarity index 97% rename from providers/tests/system/amazon/aws/example_local_to_s3.py rename to providers/amazon/tests/system/amazon/aws/example_local_to_s3.py index cdd97489744338..69498f00553540 100644 --- a/providers/tests/system/amazon/aws/example_local_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_local_to_s3.py @@ -19,6 +19,8 @@ import os from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder + from airflow.decorators import task from airflow.models.baseoperator import chain from airflow.models.dag import DAG @@ -26,8 +28,6 @@ from airflow.providers.amazon.aws.transfers.local_to_s3 import LocalFilesystemToS3Operator from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder - sys_test_context_task = SystemTestContextBuilder().build() diff --git a/providers/tests/system/amazon/aws/example_mongo_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_mongo_to_s3.py similarity index 97% rename from providers/tests/system/amazon/aws/example_mongo_to_s3.py rename to providers/amazon/tests/system/amazon/aws/example_mongo_to_s3.py index 80e02510e3bade..ddfb20e2e84c46 100644 --- a/providers/tests/system/amazon/aws/example_mongo_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_mongo_to_s3.py @@ -16,6 +16,8 @@ # under the License. from __future__ import annotations +from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder + from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator @@ -23,8 +25,6 @@ from airflow.utils.timezone import datetime from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder - DAG_ID = "example_mongo_to_s3" # Externally fetched variables: diff --git a/providers/tests/system/amazon/aws/example_neptune.py b/providers/amazon/tests/system/amazon/aws/example_neptune.py similarity index 97% rename from providers/tests/system/amazon/aws/example_neptune.py rename to providers/amazon/tests/system/amazon/aws/example_neptune.py index 7682a65b65d6df..4807635c2b3ded 100644 --- a/providers/tests/system/amazon/aws/example_neptune.py +++ b/providers/amazon/tests/system/amazon/aws/example_neptune.py @@ -18,6 +18,8 @@ from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder + from airflow.decorators import task from airflow.models.baseoperator import chain from airflow.models.dag import DAG @@ -27,8 +29,6 @@ NeptuneStopDbClusterOperator, ) -from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder - DAG_ID = "example_neptune" sys_test_context_task = SystemTestContextBuilder().build() diff --git a/providers/tests/system/amazon/aws/example_quicksight.py b/providers/amazon/tests/system/amazon/aws/example_quicksight.py similarity index 98% rename from providers/tests/system/amazon/aws/example_quicksight.py rename to providers/amazon/tests/system/amazon/aws/example_quicksight.py index eb53ffe60e012d..8f5e2036d3a2c9 100644 --- a/providers/tests/system/amazon/aws/example_quicksight.py +++ b/providers/amazon/tests/system/amazon/aws/example_quicksight.py @@ -21,6 +21,7 @@ from datetime import datetime import boto3 +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain @@ -34,8 +35,6 @@ from airflow.providers.amazon.aws.sensors.quicksight import QuickSightSensor from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - """ Prerequisites: 1. The account which runs this test must manually be activated in Quicksight here: diff --git a/providers/tests/system/amazon/aws/example_rds_event.py b/providers/amazon/tests/system/amazon/aws/example_rds_event.py similarity index 97% rename from providers/tests/system/amazon/aws/example_rds_event.py rename to providers/amazon/tests/system/amazon/aws/example_rds_event.py index 58ab603326b963..79bd27c680f433 100644 --- a/providers/tests/system/amazon/aws/example_rds_event.py +++ b/providers/amazon/tests/system/amazon/aws/example_rds_event.py @@ -20,6 +20,7 @@ from datetime import datetime import boto3 +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain @@ -32,8 +33,6 @@ ) from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - DAG_ID = "example_rds_event" sys_test_context_task = SystemTestContextBuilder().build() diff --git a/providers/tests/system/amazon/aws/example_rds_export.py b/providers/amazon/tests/system/amazon/aws/example_rds_export.py similarity index 98% rename from providers/tests/system/amazon/aws/example_rds_export.py rename to providers/amazon/tests/system/amazon/aws/example_rds_export.py index 756105d8027401..c30096944f3a04 100644 --- a/providers/tests/system/amazon/aws/example_rds_export.py +++ b/providers/amazon/tests/system/amazon/aws/example_rds_export.py @@ -18,6 +18,8 @@ from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + from airflow.decorators import task from airflow.models.baseoperator import chain from airflow.models.dag import DAG @@ -34,8 +36,6 @@ from airflow.providers.amazon.aws.sensors.rds import RdsExportTaskExistenceSensor, RdsSnapshotExistenceSensor from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - DAG_ID = "example_rds_export" # Externally fetched variables: diff --git a/providers/tests/system/amazon/aws/example_rds_instance.py b/providers/amazon/tests/system/amazon/aws/example_rds_instance.py similarity index 97% rename from providers/tests/system/amazon/aws/example_rds_instance.py rename to providers/amazon/tests/system/amazon/aws/example_rds_instance.py index b2a9e7482e7d0b..32e5304142e26d 100644 --- a/providers/tests/system/amazon/aws/example_rds_instance.py +++ b/providers/amazon/tests/system/amazon/aws/example_rds_instance.py @@ -18,6 +18,8 @@ from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.rds import ( @@ -29,8 +31,6 @@ from airflow.providers.amazon.aws.sensors.rds import RdsDbSensor from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - sys_test_context_task = SystemTestContextBuilder().build() DAG_ID = "example_rds_instance" diff --git a/providers/tests/system/amazon/aws/example_rds_snapshot.py b/providers/amazon/tests/system/amazon/aws/example_rds_snapshot.py similarity index 98% rename from providers/tests/system/amazon/aws/example_rds_snapshot.py rename to providers/amazon/tests/system/amazon/aws/example_rds_snapshot.py index b6b0c468e589b7..d4cdc1ec9d8878 100644 --- a/providers/tests/system/amazon/aws/example_rds_snapshot.py +++ b/providers/amazon/tests/system/amazon/aws/example_rds_snapshot.py @@ -18,6 +18,8 @@ from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.rds import ( @@ -30,8 +32,6 @@ from airflow.providers.amazon.aws.sensors.rds import RdsSnapshotExistenceSensor from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - DAG_ID = "example_rds_snapshot" sys_test_context_task = SystemTestContextBuilder().build() diff --git a/providers/tests/system/amazon/aws/example_redshift.py b/providers/amazon/tests/system/amazon/aws/example_redshift.py similarity index 98% rename from providers/tests/system/amazon/aws/example_redshift.py rename to providers/amazon/tests/system/amazon/aws/example_redshift.py index 1d8784e4176cdb..1b3d139c711b92 100644 --- a/providers/tests/system/amazon/aws/example_redshift.py +++ b/providers/amazon/tests/system/amazon/aws/example_redshift.py @@ -20,6 +20,8 @@ from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.redshift_cluster import ( @@ -34,8 +36,6 @@ from airflow.providers.amazon.aws.sensors.redshift_cluster import RedshiftClusterSensor from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - DAG_ID = "example_redshift" # Externally fetched variables: diff --git a/providers/tests/system/amazon/aws/example_redshift_s3_transfers.py b/providers/amazon/tests/system/amazon/aws/example_redshift_s3_transfers.py similarity index 99% rename from providers/tests/system/amazon/aws/example_redshift_s3_transfers.py rename to providers/amazon/tests/system/amazon/aws/example_redshift_s3_transfers.py index 536f9b97157dce..b9f337282dab7b 100644 --- a/providers/tests/system/amazon/aws/example_redshift_s3_transfers.py +++ b/providers/amazon/tests/system/amazon/aws/example_redshift_s3_transfers.py @@ -18,6 +18,8 @@ from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.redshift_cluster import ( @@ -36,8 +38,6 @@ from airflow.providers.amazon.aws.transfers.s3_to_redshift import S3ToRedshiftOperator from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - DAG_ID = "example_redshift_to_s3" # Externally fetched variables: diff --git a/providers/tests/system/amazon/aws/example_s3.py b/providers/amazon/tests/system/amazon/aws/example_s3.py similarity index 99% rename from providers/tests/system/amazon/aws/example_s3.py rename to providers/amazon/tests/system/amazon/aws/example_s3.py index 30ca003d6ccb32..a4202cdb7b1c5c 100644 --- a/providers/tests/system/amazon/aws/example_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_s3.py @@ -18,6 +18,8 @@ from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.s3 import ( @@ -37,8 +39,6 @@ from airflow.providers.standard.operators.python import BranchPythonOperator from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - DAG_ID = "example_s3" sys_test_context_task = SystemTestContextBuilder().build() diff --git a/providers/tests/system/amazon/aws/example_s3_to_dynamodb.py b/providers/amazon/tests/system/amazon/aws/example_s3_to_dynamodb.py similarity index 98% rename from providers/tests/system/amazon/aws/example_s3_to_dynamodb.py rename to providers/amazon/tests/system/amazon/aws/example_s3_to_dynamodb.py index 4bc4fa6b9393e1..b79829cb4714f3 100644 --- a/providers/tests/system/amazon/aws/example_s3_to_dynamodb.py +++ b/providers/amazon/tests/system/amazon/aws/example_s3_to_dynamodb.py @@ -20,6 +20,7 @@ from datetime import datetime import boto3 +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain @@ -32,8 +33,6 @@ from airflow.providers.amazon.aws.transfers.s3_to_dynamodb import S3ToDynamoDBOperator from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - log = logging.getLogger(__name__) DAG_ID = "example_s3_to_dynamodb" diff --git a/providers/tests/system/amazon/aws/example_s3_to_ftp.py b/providers/amazon/tests/system/amazon/aws/example_s3_to_ftp.py similarity index 96% rename from providers/tests/system/amazon/aws/example_s3_to_ftp.py rename to providers/amazon/tests/system/amazon/aws/example_s3_to_ftp.py index 4dd134f95c3f81..a723a306644eda 100644 --- a/providers/tests/system/amazon/aws/example_s3_to_ftp.py +++ b/providers/amazon/tests/system/amazon/aws/example_s3_to_ftp.py @@ -18,14 +18,14 @@ from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder + from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.s3_to_ftp import S3ToFTPOperator from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder - sys_test_context_task = SystemTestContextBuilder().build() DAG_ID = "example_s3_to_ftp" diff --git a/providers/tests/system/amazon/aws/example_s3_to_sftp.py b/providers/amazon/tests/system/amazon/aws/example_s3_to_sftp.py similarity index 96% rename from providers/tests/system/amazon/aws/example_s3_to_sftp.py rename to providers/amazon/tests/system/amazon/aws/example_s3_to_sftp.py index dc7d010628679e..74e04698e4e626 100644 --- a/providers/tests/system/amazon/aws/example_s3_to_sftp.py +++ b/providers/amazon/tests/system/amazon/aws/example_s3_to_sftp.py @@ -18,14 +18,14 @@ from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder + from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.s3_to_sftp import S3ToSFTPOperator from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder - sys_test_context_task = SystemTestContextBuilder().build() DAG_ID = "example_s3_to_sftp" diff --git a/providers/tests/system/amazon/aws/example_s3_to_sql.py b/providers/amazon/tests/system/amazon/aws/example_s3_to_sql.py similarity index 98% rename from providers/tests/system/amazon/aws/example_s3_to_sql.py rename to providers/amazon/tests/system/amazon/aws/example_s3_to_sql.py index 6ca4f8aa9fe9f1..8b2c988a3c0ec5 100644 --- a/providers/tests/system/amazon/aws/example_s3_to_sql.py +++ b/providers/amazon/tests/system/amazon/aws/example_s3_to_sql.py @@ -18,6 +18,8 @@ from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + from airflow import settings from airflow.decorators import task from airflow.models import Connection @@ -40,7 +42,6 @@ from airflow.providers.common.sql.operators.sql import SQLTableCheckOperator from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder from tests_common.test_utils.watcher import watcher # Externally fetched variables: diff --git a/providers/tests/system/amazon/aws/example_sagemaker.py b/providers/amazon/tests/system/amazon/aws/example_sagemaker.py similarity index 99% rename from providers/tests/system/amazon/aws/example_sagemaker.py rename to providers/amazon/tests/system/amazon/aws/example_sagemaker.py index ca0c92ed462887..214cdf5ea48cd1 100644 --- a/providers/tests/system/amazon/aws/example_sagemaker.py +++ b/providers/amazon/tests/system/amazon/aws/example_sagemaker.py @@ -24,6 +24,7 @@ from textwrap import dedent import boto3 +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs from airflow.decorators import task from airflow.models.baseoperator import chain @@ -54,8 +55,6 @@ from airflow.providers.standard.operators.python import get_current_context from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs - logger = logging.getLogger(__name__) DAG_ID = "example_sagemaker" diff --git a/providers/tests/system/amazon/aws/example_sagemaker_endpoint.py b/providers/amazon/tests/system/amazon/aws/example_sagemaker_endpoint.py similarity index 98% rename from providers/tests/system/amazon/aws/example_sagemaker_endpoint.py rename to providers/amazon/tests/system/amazon/aws/example_sagemaker_endpoint.py index 165e5d5edd444b..e0aafda6ceac35 100644 --- a/providers/tests/system/amazon/aws/example_sagemaker_endpoint.py +++ b/providers/amazon/tests/system/amazon/aws/example_sagemaker_endpoint.py @@ -20,6 +20,7 @@ from datetime import datetime import boto3 +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs from airflow.decorators import task from airflow.models.baseoperator import chain @@ -39,8 +40,6 @@ from airflow.providers.amazon.aws.sensors.sagemaker import SageMakerEndpointSensor from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs - DAG_ID = "example_sagemaker_endpoint" # Externally fetched variables: diff --git a/providers/tests/system/amazon/aws/example_sagemaker_notebook.py b/providers/amazon/tests/system/amazon/aws/example_sagemaker_notebook.py similarity index 97% rename from providers/tests/system/amazon/aws/example_sagemaker_notebook.py rename to providers/amazon/tests/system/amazon/aws/example_sagemaker_notebook.py index e989be7a92e5fe..c370111034ef4f 100644 --- a/providers/tests/system/amazon/aws/example_sagemaker_notebook.py +++ b/providers/amazon/tests/system/amazon/aws/example_sagemaker_notebook.py @@ -18,6 +18,8 @@ from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.sagemaker import ( @@ -27,8 +29,6 @@ SageMakerStopNotebookOperator, ) -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - DAG_ID = "example_sagemaker_notebook" # Externally fetched variables: diff --git a/providers/tests/system/amazon/aws/example_sagemaker_pipeline.py b/providers/amazon/tests/system/amazon/aws/example_sagemaker_pipeline.py similarity index 98% rename from providers/tests/system/amazon/aws/example_sagemaker_pipeline.py rename to providers/amazon/tests/system/amazon/aws/example_sagemaker_pipeline.py index 9c0c9d7284837f..d66b62b4a80cc5 100644 --- a/providers/tests/system/amazon/aws/example_sagemaker_pipeline.py +++ b/providers/amazon/tests/system/amazon/aws/example_sagemaker_pipeline.py @@ -19,6 +19,8 @@ from datetime import datetime import boto3 +from providers.amazon.tests.system.amazon.aws.example_sagemaker import delete_experiments +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain @@ -32,9 +34,6 @@ ) from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.example_sagemaker import delete_experiments -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - DAG_ID = "example_sagemaker_pipeline" # Externally fetched variables: diff --git a/providers/tests/system/amazon/aws/example_salesforce_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_salesforce_to_s3.py similarity index 97% rename from providers/tests/system/amazon/aws/example_salesforce_to_s3.py rename to providers/amazon/tests/system/amazon/aws/example_salesforce_to_s3.py index a533c8c3fbbf3a..b53cf8d00a6d5e 100644 --- a/providers/tests/system/amazon/aws/example_salesforce_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_salesforce_to_s3.py @@ -23,14 +23,14 @@ from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder + from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.salesforce_to_s3 import SalesforceToS3Operator from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder - sys_test_context_task = SystemTestContextBuilder().build() DAG_ID = "example_salesforce_to_s3" diff --git a/providers/tests/system/amazon/aws/example_sftp_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_sftp_to_s3.py similarity index 96% rename from providers/tests/system/amazon/aws/example_sftp_to_s3.py rename to providers/amazon/tests/system/amazon/aws/example_sftp_to_s3.py index 765a68c9946f5a..551bbf8efcf5b3 100644 --- a/providers/tests/system/amazon/aws/example_sftp_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_sftp_to_s3.py @@ -18,14 +18,14 @@ from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder + from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.sftp_to_s3 import SFTPToS3Operator from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder - sys_test_context_task = SystemTestContextBuilder().build() DAG_ID = "example_sftp_to_s3" diff --git a/providers/tests/system/amazon/aws/example_sns.py b/providers/amazon/tests/system/amazon/aws/example_sns.py similarity index 96% rename from providers/tests/system/amazon/aws/example_sns.py rename to providers/amazon/tests/system/amazon/aws/example_sns.py index 06756a244ae9d7..cf00f109b16da0 100644 --- a/providers/tests/system/amazon/aws/example_sns.py +++ b/providers/amazon/tests/system/amazon/aws/example_sns.py @@ -19,6 +19,7 @@ from datetime import datetime import boto3 +from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder from airflow.decorators import task from airflow.models.baseoperator import chain @@ -26,8 +27,6 @@ from airflow.providers.amazon.aws.operators.sns import SnsPublishOperator from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder - sys_test_context_task = SystemTestContextBuilder().build() DAG_ID = "example_sns" diff --git a/providers/tests/system/amazon/aws/example_sql_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_sql_to_s3.py similarity index 98% rename from providers/tests/system/amazon/aws/example_sql_to_s3.py rename to providers/amazon/tests/system/amazon/aws/example_sql_to_s3.py index 4cb8a2e7b9be26..2d830876ed666c 100644 --- a/providers/tests/system/amazon/aws/example_sql_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_sql_to_s3.py @@ -20,6 +20,8 @@ from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + from airflow import settings from airflow.decorators import task from airflow.models import Connection @@ -36,8 +38,6 @@ from airflow.providers.amazon.aws.transfers.sql_to_s3 import SqlToS3Operator from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - DAG_ID = "example_sql_to_s3" # Externally fetched variables: diff --git a/providers/tests/system/amazon/aws/example_sqs.py b/providers/amazon/tests/system/amazon/aws/example_sqs.py similarity index 97% rename from providers/tests/system/amazon/aws/example_sqs.py rename to providers/amazon/tests/system/amazon/aws/example_sqs.py index f14cf3b7a713cb..2fc8971a3e089b 100644 --- a/providers/tests/system/amazon/aws/example_sqs.py +++ b/providers/amazon/tests/system/amazon/aws/example_sqs.py @@ -18,6 +18,8 @@ from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder + from airflow.decorators import task from airflow.models.baseoperator import chain from airflow.models.dag import DAG @@ -26,8 +28,6 @@ from airflow.providers.amazon.aws.sensors.sqs import SqsSensor from airflow.utils.trigger_rule import TriggerRule -from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder - sys_test_context_task = SystemTestContextBuilder().build() DAG_ID = "example_sqs" diff --git a/providers/tests/system/amazon/aws/example_step_functions.py b/providers/amazon/tests/system/amazon/aws/example_step_functions.py similarity index 97% rename from providers/tests/system/amazon/aws/example_step_functions.py rename to providers/amazon/tests/system/amazon/aws/example_step_functions.py index eb3a399c69a93e..8274af020708a5 100644 --- a/providers/tests/system/amazon/aws/example_step_functions.py +++ b/providers/amazon/tests/system/amazon/aws/example_step_functions.py @@ -19,6 +19,8 @@ import json from datetime import datetime +from providers.amazon.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + from airflow.decorators import task from airflow.models.baseoperator import chain from airflow.models.dag import DAG @@ -29,8 +31,6 @@ ) from airflow.providers.amazon.aws.sensors.step_function import StepFunctionExecutionSensor -from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder - DAG_ID = "example_step_functions" # Externally fetched variables: diff --git a/providers/tests/amazon/aws/waiters/__init__.py b/providers/amazon/tests/system/amazon/aws/tests/__init__.py similarity index 100% rename from providers/tests/amazon/aws/waiters/__init__.py rename to providers/amazon/tests/system/amazon/aws/tests/__init__.py diff --git a/providers/tests/system/amazon/aws/tests/test_aws_auth_manager.py b/providers/amazon/tests/system/amazon/aws/tests/test_aws_auth_manager.py similarity index 98% rename from providers/tests/system/amazon/aws/tests/test_aws_auth_manager.py rename to providers/amazon/tests/system/amazon/aws/tests/test_aws_auth_manager.py index 58d9a05c686eb8..077e91f9152d6d 100644 --- a/providers/tests/system/amazon/aws/tests/test_aws_auth_manager.py +++ b/providers/amazon/tests/system/amazon/aws/tests/test_aws_auth_manager.py @@ -23,10 +23,10 @@ import pytest from fastapi.testclient import TestClient from onelogin.saml2.idp_metadata_parser import OneLogin_Saml2_IdPMetadataParser +from providers.amazon.tests.system.amazon.aws.utils import set_env_id from airflow.api_fastapi.app import create_app -from providers.tests.system.amazon.aws.utils import set_env_id from tests_common.test_utils.config import conf_vars SAML_METADATA_URL = "/saml/metadata" diff --git a/providers/tests/system/amazon/aws/utils/__init__.py b/providers/amazon/tests/system/amazon/aws/utils/__init__.py similarity index 100% rename from providers/tests/system/amazon/aws/utils/__init__.py rename to providers/amazon/tests/system/amazon/aws/utils/__init__.py diff --git a/providers/tests/system/amazon/aws/utils/ec2.py b/providers/amazon/tests/system/amazon/aws/utils/ec2.py similarity index 100% rename from providers/tests/system/amazon/aws/utils/ec2.py rename to providers/amazon/tests/system/amazon/aws/utils/ec2.py diff --git a/providers/tests/system/amazon/aws/utils/k8s.py b/providers/amazon/tests/system/amazon/aws/utils/k8s.py similarity index 100% rename from providers/tests/system/amazon/aws/utils/k8s.py rename to providers/amazon/tests/system/amazon/aws/utils/k8s.py diff --git a/providers/microsoft/azure/tests/system/microsoft/azure/example_s3_to_wasb.py b/providers/microsoft/azure/tests/system/microsoft/azure/example_s3_to_wasb.py index 699da911d08460..094f8c0795f2cb 100644 --- a/providers/microsoft/azure/tests/system/microsoft/azure/example_s3_to_wasb.py +++ b/providers/microsoft/azure/tests/system/microsoft/azure/example_s3_to_wasb.py @@ -29,8 +29,7 @@ ) from airflow.providers.microsoft.azure.transfers.s3_to_wasb import S3ToAzureBlobStorageOperator from airflow.utils.trigger_rule import TriggerRule - -from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder +from providers.amazon.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() diff --git a/pyproject.toml b/pyproject.toml index 736f65ab4844d1..547402de6476cd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -641,6 +641,7 @@ dev = [ "local-providers", "apache-airflow-providers-airbyte", "apache-airflow-providers-alibaba", + "apache-airflow-providers-amazon", "apache-airflow-providers-apache-beam", "apache-airflow-providers-apache-cassandra", "apache-airflow-providers-apache-drill", @@ -740,6 +741,7 @@ dev = [ local-providers = { workspace = true } apache-airflow-providers-airbyte = {workspace = true} apache-airflow-providers-alibaba = { workspace = true } +apache-airflow-providers-amazon = { workspace = true } apache-airflow-providers-apache-beam = { workspace = true } apache-airflow-providers-apache-cassandra = { workspace = true } apache-airflow-providers-apache-drill = { workspace = true } @@ -837,6 +839,7 @@ members = [ "providers", "providers/airbyte", "providers/alibaba", + "providers/amazon", "providers/apache/beam", "providers/apache/cassandra", "providers/apache/drill", diff --git a/scripts/ci/docker-compose/remove-sources.yml b/scripts/ci/docker-compose/remove-sources.yml index 1b1533cd2e5f72..c6ac6a9b110556 100644 --- a/scripts/ci/docker-compose/remove-sources.yml +++ b/scripts/ci/docker-compose/remove-sources.yml @@ -34,6 +34,7 @@ services: # START automatically generated volumes by generate-volumes-for-sources pre-commit - ../../../empty:/opt/airflow/providers/airbyte/src - ../../../empty:/opt/airflow/providers/alibaba/src + - ../../../empty:/opt/airflow/providers/amazon/src - ../../../empty:/opt/airflow/providers/apache/beam/src - ../../../empty:/opt/airflow/providers/apache/cassandra/src - ../../../empty:/opt/airflow/providers/apache/drill/src diff --git a/scripts/ci/docker-compose/tests-sources.yml b/scripts/ci/docker-compose/tests-sources.yml index 7399a8d8fa5575..95a32769b97aba 100644 --- a/scripts/ci/docker-compose/tests-sources.yml +++ b/scripts/ci/docker-compose/tests-sources.yml @@ -41,6 +41,7 @@ services: # START automatically generated volumes by generate-volumes-for-sources pre-commit - ../../../providers/airbyte/tests:/opt/airflow/providers/airbyte/tests - ../../../providers/alibaba/tests:/opt/airflow/providers/alibaba/tests + - ../../../providers/amazon/tests:/opt/airflow/providers/amazon/tests - ../../../providers/apache/beam/tests:/opt/airflow/providers/apache/beam/tests - ../../../providers/apache/cassandra/tests:/opt/airflow/providers/apache/cassandra/tests - ../../../providers/apache/drill/tests:/opt/airflow/providers/apache/drill/tests diff --git a/tests/serialization/test_dag_serialization.py b/tests/serialization/test_dag_serialization.py index 281bcc73c346bf..23cf5033f42a65 100644 --- a/tests/serialization/test_dag_serialization.py +++ b/tests/serialization/test_dag_serialization.py @@ -345,6 +345,9 @@ def collect_dags(dag_folder=None): "airflow/example_dags", "providers/src/airflow/providers/*/example_dags", # TODO: Remove once AIP-47 is completed "providers/src/airflow/providers/*/*/example_dags", # TODO: Remove once AIP-47 is completed + # For now include amazon directly because they have many dags and are all serializing without error + "providers/amazon/tests/system/*/*/", + # TODO: Remove once all providers are migrated "providers/tests/system/*/", "providers/tests/system/*/*/", ] diff --git a/tests_common/pytest_plugin.py b/tests_common/pytest_plugin.py index 8b2ee9e6646a3c..150b48f7459ba9 100644 --- a/tests_common/pytest_plugin.py +++ b/tests_common/pytest_plugin.py @@ -1580,7 +1580,7 @@ def _disable_redact(request: pytest.FixtureRequest, mocker): def providers_src_folder() -> Path: import airflow.providers - return Path(airflow.providers.__path__[0]).parents[1] + return Path(airflow.providers.__path__[0]).parents[2] @pytest.fixture