diff --git a/.bash_completion b/.bash_completion
new file mode 100755
index 0000000000000..16a8b89ce7ab6
--- /dev/null
+++ b/.bash_completion
@@ -0,0 +1,22 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+for BCFILE in "${HOME}"/.bash_completion.d/* ; do
+ # shellcheck disable=SC1090
+ . "${BCFILE}"
+done
diff --git a/.bash_completion.d/run-tests-complete b/.bash_completion.d/run-tests-complete
new file mode 120000
index 0000000000000..133d41f740a87
--- /dev/null
+++ b/.bash_completion.d/run-tests-complete
@@ -0,0 +1 @@
+../run-tests-complete
\ No newline at end of file
diff --git a/.dockerignore b/.dockerignore
index 90f8eb0d2083b..25c579b0478a9 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -50,6 +50,11 @@
# So please do not uncomment this line ;)
# !README.md
+# Run tests command with bash completion
+!.bash_completion
+!run-tests
+!run-tests-complete
+
# Setup/version configuration
!setup.cfg
!setup.py
diff --git a/.gitignore b/.gitignore
index 02814a5a90f2a..e69b79e3f572d 100644
--- a/.gitignore
+++ b/.gitignore
@@ -13,7 +13,10 @@ unittests.db
# Airflow temporary artifacts
airflow/git_version
airflow/www/static/coverage/
+airflow/www/static/dist
airflow/www_rbac/static/coverage/
+airflow/www_rbac/static/dist/
+
logs/
airflow-webserver.pid
@@ -57,7 +60,6 @@ pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
-.tox/
.coverage
.coverage.*
.cache
@@ -78,6 +80,7 @@ local_settings.py
# Flask stuff:
instance/
.webassets-cache
+/webserver_config.py
# Scrapy stuff:
.scrapy
@@ -145,7 +148,6 @@ scripts/ci/kubernetes/kube/.generated/airflow.yaml
*.entry.js
node_modules
npm-debug.log*
-static/dist
derby.log
metastore_db
@@ -163,3 +165,9 @@ dmypy.json
# Needed for CI Dockerfile build system
.build
/tmp
+
+/hive_scratch_dir/
+/.bash_aliases
+/.bash_history
+/.inputrc
+log.txt*
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 41f3caa0d4e08..23969a31d6549 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -95,6 +95,7 @@ repos:
name: Add licence for shell files
exclude: ^\.github/.*$"|^airflow/_vendor/.*$
types: [shell]
+ files: ^breeze$|^breeze-complete$
args:
- --comment-style
- "|#|"
@@ -165,6 +166,7 @@ repos:
language: docker_image
entry: koalaman/shellcheck:stable -x -a
types: [shell]
+ files: ^breeze$|^breeze-complete$
- id: lint-dockerfile
name: Lint dockerfile
language: system
diff --git a/BREEZE.rst b/BREEZE.rst
new file mode 100644
index 0000000000000..f66ee8b94d34f
--- /dev/null
+++ b/BREEZE.rst
@@ -0,0 +1,736 @@
+ .. Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ .. http://www.apache.org/licenses/LICENSE-2.0
+
+ .. Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+
+.. image:: images/AirflowBreeze_logo.png
+ :align: center
+ :alt: Airflow Breeze Logo
+
+
+Table of Contents
+=================
+
+* `Airflow Breeze <#airflow-breeze>`_
+* `Installation <#installation>`_
+* `Setting up autocomplete <#setting-up-autocomplete>`_
+* `Using the Airflow Breeze environment <#using-the-airflow-breeze-environment>`_
+ - `Entering the environment <#entering-the-environment>`_
+ - `Running tests in Airflow Breeze <#running-tests-in-airflow-breeze>`_
+ - `Debugging with ipdb <#debugging-with-ipdb>`_
+ - `Airflow directory structure in Docker <#airflow-directory-structure-inside-docker>`_
+ - `Port forwarding <#port-forwarding>`_
+* `Using your host IDE <#using-your-host-ide>`_
+ - `Configuring local virtualenv <#configuring-local-virtualenv>`_
+ - `Running unit tests via IDE <#running-unit-tests-via-ide>`_
+ - `Debugging Airflow Breeze Tests in IDE <#debugging-airflow-breeze-tests-in-ide>`_
+* `Running commands via Airflow Breeze <#running-commands-via-airflow-breeze>`_
+ - `Running static code checks <#running-static-code-checks>`_
+ - `Building the documentation <#building-the-documentation>`_
+ - `Running tests <#running-tests>`_
+ - `Running commands inside Docker <#running-commands-inside-docker>`_
+ - `Running Docker Compose commands <#running-docker-compose-commands>`_
+ - `Convenience scripts <#convenience-scripts>`_
+* `Keeping images up-to-date <#keeping-images-up-to-date>`_
+ - `Updating dependencies <#updating-dependencies>`_
+ - `Pulling the images <#pulling-the-images>`_
+* `Airflow Breeze flags <#airflow-breeze-flags>`_
+
+Airflow Breeze
+==============
+
+Airflow Breeze is an easy-to-use integration test environment managed via
+`Docker Compose `_ .
+The environment is easy to use locally and it is also used by Airflow's CI Travis tests.
+
+It's called **Airflow Breeze** as in "It's a *Breeze* to develop Airflow"
+
+The advantages and disadvantages of using the environment vs. other ways of testing Airflow
+are described in `CONTRIBUTING.md `_.
+
+Here is the short 10 minute video about Airflow Breeze
+
+.. image:: http://img.youtube.com/vi/ffKFHV6f3PQ/0.jpg
+ :width: 480px
+ :height: 360px
+ :scale: 100 %
+ :alt: Airflow Breeze Simplified Development Workflow
+ :align: center
+ :target: http://www.youtube.com/watch?v=ffKFHV6f3PQ
+
+
+Installation
+============
+
+Prerequisites for the installation:
+
+
+*
+ If you are on MacOS you need gnu ``getopt`` and ``gstat`` to get Airflow Breeze running. Typically
+ you need to run ``brew install gnu-getopt coreutils`` and then follow instructions (you need
+ to link the gnu getopt version to become first on the PATH).
+
+*
+ Latest stable Docker Community Edition installed and on the PATH. It should be
+ configured to be able to run ``docker`` commands directly and not only via root user
+
+
+ * your user should be in ``docker`` group.
+ See `Docker installation guide `_
+
+*
+ Latest stable Docker Compose installed and on the PATH. It should be
+ configured to be able to run ``docker-compose`` command.
+ See `Docker compose installation guide `_
+
+
+Your entry point for Airflow Breeze is `./breeze <./breeze>`_
+script. You can run it with ``-h`` option to see the list of available flags.
+You can add the checked out airflow repository to your PATH to run breeze
+without the ./ and from any directory if you have only one airflow directory checked out.
+
+See `Airflow Breeze flags <#airflow-breeze-flags>`_ for details.
+
+First time you run `./breeze <./breeze>`_ script, it will pull and build local version of docker images.
+It will pull latest Airflow CI images from `Apache Airflow DockerHub `_
+and use them to build your local docker images. It will use latest sources from your source code.
+Further on ``breeze`` uses md5sum calculation and Docker caching mechanisms to only rebuild what is needed.
+Airflow Breeze will detect if Docker images need to be rebuilt and ask you for confirmation.
+
+Setting up autocomplete
+=======================
+
+The ``breeze`` command comes with built-in bash/zsh autocomplete. When you start typing
+`./breeze <./breeze>`_ command you can use to show all the available switches
+nd to get autocompletion on typical values of parameters that you can use.
+
+You can setup auto-complete automatically by running this command (-a is shortcut for --setup-autocomplete):
+
+.. code-block:: bash
+
+ ./breeze --setup-autocomplete
+
+
+You get autocomplete working when you re-enter the shell.
+
+Zsh autocompletion is currently limited to only autocomplete flags. Bash autocompletion also completes
+flag values (for example python version or static check name).
+
+
+Using the Airflow Breeze environment
+====================================
+
+Entering the environment
+------------------------
+
+You enter the integration test environment by running the `./breeze <./breeze>`_ script.
+
+You can specify python version to use, backend to use and environment for testing - so that you can
+recreate the same environments as we have in matrix builds in Travis CI. The defaults when you
+run the environment are reasonable (python 3.6, sqlite, docker).
+
+What happens next is the appropriate docker images are pulled, local sources are used to build local version
+of the image and you are dropped into bash shell of the airflow container -
+with all necessary dependencies started up. Note that the first run (per python) might take up to 10 minutes
+on a fast connection to start. Subsequent runs should be much faster.
+
+.. code-block:: bash
+
+ ./breeze
+
+You can choose the optional flags you need with `./breeze <./breeze>`_.
+
+For example you could choose to run python 3.6 tests with mysql as backend and in docker
+environment by:
+
+.. code-block:: bash
+
+ ./breeze --python 3.6 --backend mysql --env docker
+
+The choices you made are persisted in ``./.build/`` cache directory so that next time when you use the
+`./breeze <./breeze>`_ script, it will use the values that were used previously. This way you do not
+have to specify them when you run the script. You can delete the ``./.build/`` in case you want to
+restore default settings.
+
+Relevant sources of airflow are mounted inside the ``airflow-testing`` container that you enter,
+which means that you can continue editing your changes in the host in your favourite IDE and have them
+visible in docker immediately and ready to test without rebuilding images. This can be disabled by specifying
+``--skip-mounting-source-volume`` flag when running breeze, in which case you will have sources
+embedded in the container - and changes to those sources will not be persistent.
+
+Once you enter the environment you are dropped into bash shell and you can run tests immediately.
+
+Running tests in Airflow Breeze
+-------------------------------
+
+Once you enter Airflow Breeze environment you should be able to simply run
+`run-tests` at will. Note that if you want to pass extra parameters to nose
+you should do it after '--'
+
+For example, in order to just execute the "core" unit tests, run the following:
+
+.. code-block:: bash
+
+ run-tests tests.core:CoreTest -- -s --logging-level=DEBUG
+
+or a single test method:
+
+.. code-block:: bash
+
+ run-tests tests.core:CoreTest.test_check_operators -- -s --logging-level=DEBUG
+
+
+The tests will run ``airflow db reset`` and ``airflow db init`` the first time you
+run tests in running container, so you can count on database being initialized.
+
+All subsequent test executions within the same container will run without database
+initialisation.
+
+You can also optionally add --with-db-init flag if you want to re-initialize
+the database.
+
+.. code-block:: bash
+
+ run-tests --with-db-init tests.core:CoreTest.test_check_operators -- -s --logging-level=DEBUG
+
+Debugging with ipdb
+-------------------
+
+You can debug any code you run in the container using ``ipdb`` debugger if you prefer console debugging.
+It is as easy as copy&pasting this line into your code:
+
+.. code-block:: python
+
+ import ipdb; ipdb.set_trace()
+
+Once you hit the line you will be dropped into interactive ipdb debugger where you have colors
+and auto-completion to guide your debugging. This works from the console where you started your program.
+Note that in case of ``nosetest`` you need to provide `--nocapture` flag to avoid nosetests
+capturing the stdout of your process.
+
+Airflow directory structure inside Docker
+-----------------------------------------
+
+When you are in the container note that following directories are used:
+
+.. code-block:: text
+
+ /opt/airflow - here sources of Airflow are mounted from the host (AIRFLOW_SOURCES)
+ /root/airflow - all the "dynamic" Airflow files are created here: (AIRFLOW_HOME)
+ airflow.db - sqlite database in case sqlite is used
+ dags - folder where non-test dags are stored (test dags are in /opt/airflow/tests/dags)
+ logs - logs from airflow executions are created there
+ unittest.cfg - unit test configuration generated when entering the environment
+ webserver_config.py - webserver configuration generated when running airflow in the container
+
+Note that when run in your local environment ``/root/airflow/logs`` folder is actually mounted from your
+``logs`` directory in airflow sources, so all logs created in the container are automatically visible in the host
+as well. Every time you enter the container the logs directory is cleaned so that logs do not accumulate.
+
+Port forwarding
+---------------
+
+When you run Airflow Breeze, the following ports are automatically forwarded:
+
+* 28080 -> forwarded to airflow webserver -> airflow-testing:8080
+* 25433 -> forwarded to postgres database -> postgres:5432
+* 23306 -> forwarded to mysql database -> mysql:3306
+
+You can connect to those ports/databases using:
+
+* Webserver: (http://127.0.0.1:28080)[http://127.0.0.1:28080]
+* Postgres: ``jdbc:postgresql://127.0.0.1:25433/airflow?user=postgres&password=airflow``
+* Mysql: ``jdbc:mysql://localhost:23306/airflow?user=root``
+
+Note that you need to start the webserver manually with ``airflow webserver`` command if you want to connect
+to the webserver (you can use ``tmux`` to multiply terminals).
+
+For databases you need to run ``airflow resetdb`` at least once after you started Airflow Breeze to get
+the database/tables created. You can connect to databases with IDE or any other Database client:
+
+.. image:: images/database_view.png
+ :align: center
+ :alt: Database view
+
+You can change host port numbers used by setting appropriate environment variables:
+
+* WEBSERVER_HOST_PORT
+* POSTGRES_HOST_PORT
+* MYSQL_HOST_PORT
+
+When you set those variables, next time when you enter the environment the new ports should be in effect.
+
+Using your host IDE
+===================
+
+Configuring local virtualenv
+----------------------------
+
+In order to use your host IDE (for example IntelliJ's PyCharm/Idea) you need to have virtual environments
+setup. Ideally you should have virtualenvs for all python versions that Airflow supports (3.5, 3.6, 3.7).
+You can create the virtualenv using ``virtualenvwrapper`` - that will allow you to easily switch between
+virtualenvs using workon command and mange your virtual environments more easily.
+
+Typically creating the environment can be done by:
+
+.. code-block:: bash
+
+ mkvirtualenv --python=python
+
+
+After the virtualenv is created, you must initialize it. Simply enter the environment
+(using workon) and once you are in it run:
+
+.. code-block:: bash
+
+ ./breeze --initialize-local-virtualenv
+
+Once initialization is done, you should select the virtualenv you initialized as the project's default
+virtualenv in your IDE.
+
+Running unit tests via IDE
+--------------------------
+
+After setting it up - you can use the usual "Run Test" option of the IDE and have all the
+autocomplete and documentation support from IDE as well as you can debug and click-through
+the sources of Airflow - which is very helpful during development. Usually you also can run most
+of the unit tests (those that do not require prerequisites) directly from the IDE:
+
+Running unit tests from IDE is as simple as:
+
+.. image:: images/running_unittests.png
+ :align: center
+ :alt: Running unit tests
+
+Some of the core tests use dags defined in ``tests/dags`` folder - those tests should have
+``AIRFLOW__CORE__UNIT_TEST_MODE`` set to True. You can set it up in your test configuration:
+
+.. image:: images/airflow_unit_test_mode.png
+ :align: center
+ :alt: Airflow Unit test mode
+
+
+You cannot run all the tests this way - only unit tests that do not require external dependencies
+such as postgres/mysql/hadoop etc. You should use
+`Running tests in Airflow Breeze <#running-tests-in-airflow-breeze>`_ in order to run those tests. You can
+still use your IDE to debug those tests as explained in the next chapter.
+
+Debugging Airflow Breeze Tests in IDE
+-------------------------------------
+
+When you run example DAGs, even if you run them using UnitTests from within IDE, they are run in a separate
+container. This makes it a little harder to use with IDE built-in debuggers.
+Fortunately for IntelliJ/PyCharm it is fairly easy using remote debugging feature (note that remote
+debugging is only available in paid versions of IntelliJ/PyCharm).
+
+You can read general description `about remote debugging
+`_
+
+You can setup your remote debug session as follows:
+
+.. image:: images/setup_remote_debugging.png
+ :align: center
+ :alt: Setup remote debugging
+
+Not that if you are on ``MacOS`` you have to use the real IP address of your host rather than default
+localhost because on MacOS container runs in a virtual machine with different IP address.
+
+You also have to remember about configuring source code mapping in remote debugging configuration to map
+your local sources into the ``/opt/airflow`` location of the sources within the container.
+
+.. image:: images/source_code_mapping_ide.png
+ :align: center
+ :alt: Source code mapping
+
+
+Running commands via Airflow Breeze
+===================================
+
+Running static code checks
+--------------------------
+
+If you wish to run static code checks inside Docker environment you can do it via
+``-S``, ``--static-check`` flags or ``-F``, ``--static-check-all-files``. The former will run appropriate
+checks only for files changed and staged locally, the latter will run it on all files. It can take a lot of
+time to run check for all files in case of pylint on MacOS due to slow filesystem for Mac OS Docker.
+You can add arguments you should pass them after -- as extra arguments.
+You cannot pass ``--files`` flage if you selected ``--static-check-all-files`` option.
+
+You can see the list of available static checks via --help flag or use autocomplete. Most notably ``all``
+static check runs all static checks configured. Also since pylint tests take a lot of time you can
+also run special ``all-but-pylint`` check which will skip pylint checks.
+
+Run mypy check in currently staged changes:
+
+.. code-block:: bash
+
+ ./breeze --static-check mypy
+
+Run mypy check in all files:
+
+.. code-block:: bash
+
+ ./breeze --static-check-all-files mypy
+
+Run flake8 check for tests.core.py file with verbose output:
+
+.. code-block:: bash
+
+ ./breeze --static-check flake8 -- --files tests/core.py --verbose
+
+Run flake8 check for tests.core package with verbose output:
+
+.. code-block:: bash
+
+ ./breeze --static-check mypy -- --files tests/hooks/test_druid_hook.py
+
+Run all tests on currently staged files:
+
+.. code-block:: bash
+
+ ./breeze --static-check all
+
+Run all tests on all files:
+
+.. code-block:: bash
+
+ ./breeze --static-check-all-files all
+
+Run all tests but pylint on all files:
+
+.. code-block:: bash
+
+ ./breeze --static-check-all-files all-but-pylint
+
+Run pylint checks for all changed files:
+
+.. code-block:: bash
+
+ ./breeze --static-check pylint
+
+Run pylint checks for selected files:
+
+.. code-block:: bash
+
+ ./breeze --static-check pylint -- --files airflow/configuration.py
+
+
+Run pylint checks for all files:
+
+.. code-block:: bash
+
+ ./breeze --static-check-all-files pylint
+
+
+The ``license`` check is also run via separate script and separate docker image containing
+Apache RAT verification tool that checks for Apache-compatibility of licences within the codebase.
+It does not take pre-commit parameters as extra args.
+
+.. code-block:: bash
+
+ ./breeze --static-check-all-files licenses
+
+Building the documentation
+--------------------------
+
+The documentation is build using ``-O``, ``--build-docs`` command:
+
+.. code-block:: bash
+
+ ./breeze --build-docs
+
+
+Results of the build can be found in ``docs/_build`` folder. Often errors during documentation generation
+come from the docstrings of auto-api generated classes. During the docs building auto-api generated
+files are stored in ``docs/_api`` folder - so that in case of problems with documentation you can
+find where the problems with documentation originated from.
+
+Running tests
+-------------
+
+If you wish to run tests only and not drop into shell, you can run them by providing
+-t, --test-target flag. You can add extra nosetest flags after -- in the commandline.
+
+.. code-block:: bash
+
+ ./breeze --test-target tests/hooks/test_druid_hook.py -- --logging-level=DEBUG
+
+You can run the whole test suite with special '.' test target:
+
+.. code-block:: bash
+
+ ./breeze --test-target .
+
+You can also specify individual tests or group of tests:
+
+.. code-block:: bash
+
+ ./breeze --test-target tests.core:CoreTest
+
+Running commands inside Docker
+------------------------------
+
+If you wish to run other commands/executables inside of Docker environment you can do it via
+``-x``, ``--execute-command`` flag. Note that if you want to add arguments you should specify them
+together with the command surrounded with " or ' or pass them after -- as extra arguments.
+
+.. code-block:: bash
+
+ ./breeze --execute-command "ls -la"
+
+.. code-block:: bash
+
+ ./breeze --execute-command ls -- --la
+
+
+Running Docker Compose commands
+-------------------------------
+
+If you wish to run docker-compose command (such as help/pull etc. ) you can do it via
+``-d``, ``--docker-compose`` flag. Note that if you want to add extra arguments you should specify them
+after -- as extra arguments.
+
+.. code-block:: bash
+
+ ./breeze --docker-compose pull -- --ignore-pull-failures
+
+Convenience scripts
+-------------------
+
+Once you run ./breeze you can also execute various actions via generated convenience scripts
+
+.. code-block::
+
+ Enter the environment : ./.build/cmd_run
+ Run command in the environment : ./.build/cmd_run "[command with args]" [bash options]
+ Run tests in the environment : ./.build/test_run [test-target] [nosetest options]
+ Run Docker compose command : ./.build/dc [help/pull/...] [docker-compose options]
+
+Keeping images up-to-date
+=========================
+
+Updating dependencies
+---------------------
+
+If you change apt dependencies in the Dockerfile or change setup.py or
+add new apt dependencies or npm dependencies, you have two options how to update the dependencies.
+
+
+*
+ You can install dependencies inside the container using 'sudo apt install', 'pip install' or 'npm install'
+ (in airflow/www folder) respectively. This is useful if you want to test somthing quickly while in the
+ container. However, those changes are not persistent - they will disappear once you
+ exit the container (except npm dependencies in case your sources are mounted to the container). Therefore
+ if you want to persist a new dependency you have to follow with the second option.
+
+*
+ You can add the dependencies to the Dockerfile, setup.py or package.json and rebuild the image. This
+ should happen automatically if you modify any of setup.py, package.json or update Dockerfile itself.
+ After you exit the container and re-run `./breeze <./breeze>`_ the Breeze detects changes in dependencies,
+ ask you to confirm rebuilding of the image and proceed to rebuilding the image if you confirm (or skip it
+ if you won't confirm). After rebuilding is done, it will drop you to shell. You might also provide
+ ``--build-only`` flag to only rebuild images and not go into shell - it will then rebuild the image
+ and will not enter the shell.
+
+Note that during development, changing dependencies in apt-get closer to the top of the Dockerfile
+will invalidate cache for most of the image and it will take long time to rebuild the image by breeze.
+Therefore it is a recommended practice to add new dependencies closer to the bottom of
+Dockerfile during development (to get the new dependencies incrementally added) and only move them to the
+top when you are close to finalise the PR and merge the change. It's OK for development time to add separate
+``apt-get install`` commands similar to those that are already there (but remember to move newly added
+dependencies to the appropriate ``apt-get install`` command which is already in the Dockerfile.
+
+Pulling the images
+------------------
+
+Sometimes the image on DockerHub is rebuilt from the scratch. This happens for example when there is a
+security update of the python version that all the images are based on. In this case it is much faster to
+pull latest images rather than rebuild them from the scratch. Airflow Breeze will detect such case and
+will ask you to confirm to pull and build the image and if you answer OK, it will pull and build the image.
+You might also provide ``--force-pull-images`` flag to force pull latest images from DockerHub.
+
+Airflow Breeze flags
+====================
+
+These are the current flags of the `./breeze <./breeze>`_ script
+
+.. code-block:: text
+
+ Usage: breeze [FLAGS] \
+ [-k]|[-S ]|[-F ]|[-O]|[-e]|[-a]|[-b]|[-t ]|[-x ]|[-d ] \
+ --
+
+ The swiss-knife-army tool for Airflow testings. It allows to perform various test tasks:
+
+ * Enter interactive environment when no command flags are specified (default behaviour)
+ * Stop the interactive environment with -k, --stop-environment command
+ * Run static checks - either for currently staged change or for all files with
+ -S, --static-check or -F, --static-check-all-files commanbd
+ * Build documentation with -O, --build-docs command
+ * Setup local virtualenv with -e, --setup-virtualenv command
+ * Setup autocomplete for itself with -a, --setup-autocomplete command
+ * Build docker image with -b, --build-only command
+ * Run test target specified with -t, --test-target connad
+ * Execute arbitrary command in the test environmenrt with -x, --execute-command command
+ * Execute arbitrary docker-compose command with -d, --docker-compose command
+
+ ** Commands
+
+ By default the script enters IT environment and drops you to bash shell,
+ but you can also choose one of the commands to run specific actions instead:
+
+ -k, --stop-environment
+ Bring down running docker compose environment. When you start the environment, the docker
+ containers will continue running so that startup time is shorter. But they take quite a lot of
+ memory and CPU. This command stops all running containers from the environment.
+
+ -O, --build-docs
+ Build documentation.
+
+ -S, --static-check
+ Run selected static checks for currently changed files. You should specify static check that
+ you would like to run or 'all' to run all checks. One of
+ [ all all-but-pylint check-hooks-apply check-merge-conflict check-executables-have-shebangs check-xml detect-private-key doctoc end-of-file-fixer flake8 forbid-tabs insert-license check-apache-license lint-dockerfile mixed-line-ending mypy pylint shellcheck].
+ You can pass extra arguments including options to to the pre-commit framework as
+ passed after --. For example:
+
+ './breeze --static-check mypy' or
+ './breeze --static-check mypy -- --files tests/core.py'
+
+ You can see all the options by adding --help EXTRA_ARG:
+
+ './breeze --static-check mypy -- --help'
+
+ -F, --static-check-all-files
+ Run selected static checks for all applicable files. You should specify static check that
+ you would like to run or 'all' to run all checks. One of
+ [ all all-but-pylint check-hooks-apply check-merge-conflict check-executables-have-shebangs check-xml detect-private-key doctoc end-of-file-fixer flake8 forbid-tabs insert-license check-apache-license lint-dockerfile mixed-line-ending mypy pylint shellcheck].
+ You can pass extra arguments including options to the pre-commit framework as
+ passed after --. For example:
+
+ './breeze --static-check-all-files mypy' or
+ './breeze --static-check-all-files mypy -- --verbose'
+
+ You can see all the options by adding --help EXTRA_ARG:
+
+ './breeze --static-check-all-files mypy -- --help'
+
+ -e, --initialize-local-virtualenv
+ Initializes locally created virtualenv installing all dependencies of Airflow.
+ This local virtualenv can be used to aid autocompletion and IDE support as
+ well as run unit tests directly from the IDE. You need to have virtualenv
+ activated before running this command.
+
+ -a, --setup-autocomplete
+ Sets up autocomplete for breeze commands. Once you do it you need to re-enter the bash
+ shell and when typing breeze command will provide autocomplete for parameters and values.
+
+ -b, --build-only
+ Only build docker images but do not enter the airflow-testing docker container.
+
+ -t, --test-target
+ Run the specified unit test target. There might be multiple
+ targets specified separated with comas. The passed after -- are treated
+ as additional options passed to nosetest. For example:
+
+ './breeze --test-target tests.core -- --logging-level=DEBUG'
+
+ -x, --execute-command
+ Run chosen command instead of entering the environment. The command is run using
+ 'bash -c "" if you need to pass arguments to your command, you need
+ to pass them together with command surrounded with " or '. Alternatively you can pass arguments as
+ passed after --. For example:
+
+ './breeze --execute-command "ls -la"' or
+ './breeze --execute-command ls -- --la'
+
+ -d, --docker-compose
+ Run docker-compose command instead of entering the environment. Use 'help' command
+ to see available commands. The passed after -- are treated
+ as additional options passed to docker-compose. For example
+
+ './breeze --docker-compose pull -- --ignore-pull-failures'
+
+ ** General flags
+
+ -h, --help
+ Shows this help message.
+
+ -P, --python
+ Python version used for the image. This is always major/minor version.
+ One of [ 3.5 3.6 3.7 ]. Default is the python3 or python on the path.
+
+ -E, --env
+ Environment to use for tests. It determines which types of tests can be run.
+ One of [ docker kubernetes ]. Default: docker
+
+ -B, --backend
+ Backend to use for tests - it determines which database is used.
+ One of [ sqlite mysql postgres ]. Default: sqlite
+
+ -K, --kubernetes-version
+ Kubernetes version - only used in case of 'kubernetes' environment.
+ One of [ v1.13.0 ]. Default: v1.13.0
+
+ -M, --kubernetes-mode
+ Kubernetes mode - only used in case of 'kubernetes' environment.
+ One of [ persistent_mode git_mode ]. Default: git_mode
+
+ -s, --skip-mounting-source-volume
+ Skips mounting local volume with sources - you get exactly what is in the
+ docker image rather than your current local sources of airflow.
+
+ -v, --verbose
+ Show verbose information about executed commands (enabled by default for running test)
+
+ -y, --assume-yes
+ Assume 'yes' answer to all questions.
+
+ -C, --toggle-suppress-cheatsheet
+ Toggles on/off cheatsheet displayed before starting bash shell
+
+ -A, --toggle-suppress-asciiart
+ Toggles on/off asciiart displayed before starting bash shell
+
+ ** Dockerfile management flags
+
+ -D, --dockerhub-user
+ DockerHub user used to pull, push and build images. Default: apache.
+
+ -H, --dockerhub-repo
+ DockerHub repository used to pull, push, build images. Default: airflow.
+
+ -r, --force-rebuild-images
+ Forces rebuilding of the local docker images. The images are rebuilt
+ automatically for the first time or when changes are detected in
+ package-related files, but you can force it using this flag.
+
+ -R, --force-rebuild-images-clean
+ Force rebuild images without cache. This will remove the pulled or build images
+ and start building images from scratch. This might take a long time.
+
+ -p, --force-pull-images
+ Forces pulling of images from DockerHub before building to populate cache. The
+ images are pulled by default only for the first time you run the
+ environment, later the locally build images are used as cache.
+
+ -u, --push-images
+ After rebuilding - uploads the images to DockerHub
+ It is useful in case you use your own DockerHub user to store images and you want
+ to build them locally. Note that you need to use 'docker login' before you upload images.
+
+ -c, --cleanup-images
+ Cleanup your local docker cache of the airflow docker images. This will not reclaim space in
+ docker cache. You need to 'docker system prune' (optionally with --all) to reclaim that space.
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 6ce483c0f9609..6e2d8bbd98bf2 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -188,6 +188,17 @@ After creating the virtualenv, run this command to create the Airflow sqlite dat
airflow db init
```
+This can be automated if you do it within a virtualenv.
+The [./breeze](./breeze) script has a flag
+(-e or --initialize-local-virtualenv) that automatically installs dependencies
+in the virtualenv you are logged in and resets the sqlite database as described below.
+
+After the virtualenv is created, you must initialize it. Simply enter the environment
+(using `workon`) and once you are in it run:
+```
+./breeze --initialize-local-virtualenv
+````
+
Once initialization is done, you should select the virtualenv you initialized as the
project's default virtualenv in your IDE and run tests efficiently.
@@ -319,18 +330,31 @@ If you are on Linux:
## Using the Docker Compose environment
-### Entering bash shell in Docker Compose environment
+Airflow has a super-easy-to-use integration test environment managed via
+[Docker Compose](https://docs.docker.com/compose/) and used by Airflow's CI Travis tests.
-Default environment settings (python 3.6, sqlite backend, docker environment)
-```bash
- ./scripts/ci/local_ci_enter_environment.sh
-```
+It's called **Airflow Breeze** as in "_It's a breeze to develop Airflow_"
-Overriding default environment settings:
+All details about using and running Airflow Breeze can be found in [BREEZE.rst](BREEZE.rst)
-```bash
-PYTHON_VERSION=3.5 BACKEND=postgres ENV=docker ./scripts/ci/local_ci_enter_environment.sh
-```
+The advantage of the Airflow Breeze Integration Tests environment is that it is a full environment
+including external components - mysql database, hadoop, mongo, cassandra, redis etc. Some of the tests in
+Airflow require those external components. Integration test environment provides preconfigured environment
+where all those services are running and can be used by tests automatically.
+
+Another advantage is that the Airflow Breeze environment is pretty much the same
+as used in [Travis CI](https://travis-ci.com/) automated builds, and if the tests run in
+your local environment they will most likely work on Travis as well.
+
+The disadvantage of Airflow Breeze is that it is fairly complex and requires time to setup. However it is all
+automated and easy to setup. Another disadvantage is that it takes a lot of space in your local Docker cache.
+There is a separate environment for different python versions and airflow versions and each of the images take
+around 3GB in total. Building and preparing the environment by default uses pre-built images from DockerHub
+(requires time to download and extract those GB of images) and less than 10 minutes per python version
+to build.
+
+Note that those images are not supposed to be used in production environments. They are optimised
+for repeatability of tests, maintainability and speed of building rather than performance
### Running individual tests within the container
diff --git a/Dockerfile b/Dockerfile
index d0dd139f1d614..248bb8bcb2ab9 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -362,6 +362,32 @@ RUN if [[ -n "${ADDITIONAL_PYTHON_DEPS}" ]]; then \
COPY --chown=airflow:airflow ./scripts/docker/entrypoint.sh /entrypoint.sh
+ARG APT_DEPS_IMAGE="airflow-apt-deps-ci-slim"
+ENV APT_DEPS_IMAGE=${APT_DEPS_IMAGE}
+
+# Generate list of all tests to aid auto-complete of run-test command
+RUN \
+ if [[ "${APT_DEPS_IMAGE}" == "airflow-apt-deps-ci" ]]; then \
+ gosu "${AIRFLOW_USER}" nosetests --collect-only --with-xunit \
+ --xunit-file="${HOME}/all_tests.xml" && \
+ gosu "${AIRFLOW_USER}" python "${AIRFLOW_SOURCES}/tests/test_utils/get_all_tests.py" \
+ "${HOME}/all_tests.xml" >"${HOME}/all_tests.txt"; \
+ fi
+
+COPY .bash_completion run-tests-complete run-tests ${HOME}/
+
+RUN \
+ if [[ "${APT_DEPS_IMAGE}" == "airflow-apt-deps-ci" ]]; then \
+ echo ". ${HOME}/.bash_completion" >> "${HOME}/.bashrc"; \
+ fi
+
+RUN \
+ if [[ "${APT_DEPS_IMAGE}" == "airflow-apt-deps-ci" ]]; then \
+ chmod +x "${HOME}/run-tests-complete" "${HOME}/run-tests" && \
+ chown "${AIRFLOW_USER}.${AIRFLOW_USER}" "${HOME}/.bashrc" \
+ "${HOME}/run-tests-complete" "${HOME}/run-tests"; \
+ fi
+
USER ${AIRFLOW_USER}
WORKDIR ${AIRFLOW_SOURCES}
diff --git a/UPDATING.md b/UPDATING.md
index b6503a2081d7d..daac8adf23b59 100644
--- a/UPDATING.md
+++ b/UPDATING.md
@@ -636,7 +636,7 @@ FAB has built-in authentication support for DB, OAuth, OpenID, LDAP, and REMOTE_
For any other authentication type (OAuth, OpenID, LDAP, REMOTE_USER), see the [Authentication section of FAB docs](http://flask-appbuilder.readthedocs.io/en/latest/security.html#authentication-methods) for how to configure variables in webserver_config.py file.
-Once you modify your config file, run `airflow initdb` to generate new tables for RBAC support (these tables will have the prefix `ab_`).
+Once you modify your config file, run `airflow db init` to generate new tables for RBAC support (these tables will have the prefix `ab_`).
#### Creating an Admin Account
diff --git a/airflow/gcp/operators/vision.py b/airflow/gcp/operators/vision.py
index 47c8a28eaedd7..59a3693d46682 100644
--- a/airflow/gcp/operators/vision.py
+++ b/airflow/gcp/operators/vision.py
@@ -20,7 +20,6 @@
This module contains a Google Cloud Vision operator.
"""
-
from copy import deepcopy
from typing import Union, List, Dict, Any, Sequence, Tuple, Optional
diff --git a/breeze b/breeze
new file mode 100755
index 0000000000000..abf099bb459e6
--- /dev/null
+++ b/breeze
@@ -0,0 +1,1101 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# Bash sanity settings (error on exit, complain for undefined vars, error when pipe fails)
+set -euo pipefail
+
+MY_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+export AIRFLOW_SOURCES="${MY_DIR}"
+
+# Directory where all CI scripts are located
+export SCRIPTS_CI_DIR="${MY_DIR}/scripts/ci"
+
+# shellcheck source=scripts/ci/_utils.sh
+. "${SCRIPTS_CI_DIR}/_utils.sh"
+
+# shellcheck source=hooks/_default_branch.sh
+. "${MY_DIR}/hooks/_default_branch.sh"
+
+basic_sanity_checks
+
+script_start
+
+# Sets width of the screen
+SEPARATOR_WIDTH="$(tput cols)"
+
+# Name of the script
+CMDNAME="$(basename -- "$0")"
+
+# Update short and long options in the breeze-complete script
+# This way autocomplete will work automagically with all options
+# shellcheck source=breeze-complete
+. "${MY_DIR}/breeze-complete"
+
+# Whether to actually run docker compose with the command set given
+ENTER_ENVIRONMENT="true"
+
+
+# Whether to cleanup local image
+CLEANUP_IMAGES="false"
+
+# Skips mounting local Airflow sources
+SKIP_MOUNTING_LOCAL_SOURCES="false"
+
+# If set, we initialize local virtualenv and install all dependencies
+INITIALIZE_LOCAL_VIRTUALENV=false
+
+# If set, we setup autocomplete for breeze
+SETUP_AUTOCOMPLETE=false
+
+# Holds chosen command if the -x flag is used.
+RUN_COMMAND=""
+
+# Holds the test target if the -t flag is used.
+TEST_TARGET=""
+
+# Holds docker compose command if the -d flag is used.
+DOCKER_COMPOSE_COMMAND=""
+
+# If true, the docker images are rebuilt locally.
+export AIRFLOW_CONTAINER_DOCKER_BUILD_NEEDED="false"
+
+# By default we only pull images if we do not have them locally.
+# This can be overridden by -p flag
+export AIRFLOW_CONTAINER_FORCE_PULL_IMAGES="false"
+
+# We use docker image caches by default to speed up the builds
+export AIRFLOW_CONTAINER_USE_DOCKER_CACHE=${AIRFLOW_CONTAINER_USE_DOCKER_CACHE:="true"}
+
+# By default we do not push images. This can be overridden by -u flag.
+export AIRFLOW_CONTAINER_PUSH_IMAGES=${AIRFLOW_CONTAINER_PUSH_IMAGES:="false"}
+
+# For local builds we fix file permissions only for setup-related files
+export AIRFLOW_FIX_PERMISSIONS=${AIRFLOW_FIX_PERMISSIONS:="setup"}
+
+# Skip building slim image locally - we only need full CI image
+export AIRFLOW_CONTAINER_SKIP_CI_SLIM_IMAGE="true"
+
+# Skip building full CI image locally - we only need slim image
+export AIRFLOW_CONTAINER_SKIP_CI_IMAGE="false"
+
+# Branch name of the base image used (usually master or v1-10-test or v1-10-stable)
+export AIRFLOW_CONTAINER_BRANCH_NAME=${AIRFLOW_CONTAINER_BRANCH_NAME:=${DEFAULT_BRANCH}}
+
+# Determine version of the Airflow from version.py
+AIRFLOW_VERSION=$(cat airflow/version.py - << EOF | python
+print(version.replace("+",""))
+EOF
+)
+export AIRFLOW_VERSION
+
+# Verbosity in running ci scripts
+export AIRFLOW_CI_VERBOSE="false"
+
+# Enter environment by default, rather than run tests or bash command or docker compose or static checks
+export RUN_TESTS="false"
+export RUN_DOCKER_COMPOSE="false"
+export RUN_IN_BASH="false"
+export RUN_STATIC_CHECKS="false"
+export RUN_BUILD_DOCS="false"
+
+export FORCE_BUILD=${FORCE_BUILD:="false"}
+
+# if set to true, rebuild is done without asking user
+export SKIP_BUILD_CHECK="false"
+
+# Files determining whether asciiart/cheatsheet are suppressed
+
+SUPPRESS_CHEATSHEET_FILE="${MY_DIR}/.suppress_cheatsheet"
+SUPPRESS_ASCIIART_FILE="${MY_DIR}/.suppress_asciiart"
+
+export WEBSERVER_HOST_PORT=${WEBSERVER_HOST_PORT:="28080"}
+export POSTGRES_HOST_PORT=${POSTGRES_HOST_PORT:="25433"}
+export MYSQL_HOST_PORT=${MYSQL_HOST_PORT:="23306"}
+
+function print_badge {
+ if [[ ! -f "${SUPPRESS_ASCIIART_FILE}" ]]; then
+ cat < "${FILE}"
+#!/usr/bin/env bash
+cd "\$(pwd)" || exit
+export DOCKERHUB_USER=${DOCKERHUB_USER}
+export DOCKERHUB_REPO=${DOCKERHUB_REPO}
+export COMPOSE_FILE="${COMPOSE_FILE}"
+export PYTHON_VERSION="${PYTHON_VERSION}"
+export BACKEND="${BACKEND}"
+export ENV="${ENV}"
+export KUBERNETES_VERSION="${KUBERNETES_VERSION}"
+export KUBERNETES_MODE="${KUBERNETES_MODE}"
+export AIRFLOW_VERSION="${AIRFLOW_VERSION}"
+export RUN_TESTS="${TESTS}"
+export WEBSERVER_HOST_PORT="${WEBSERVER_HOST_PORT}"
+export POSTGRES_HOST_PORT="${POSTGRES_HOST_PORT}"
+export MYSQL_HOST_PORT="${MYSQL_HOST_PORT}"
+docker-compose --log-level INFO ${CMD}\$${EXPANSION}"
+EOF
+ chmod u+x "${FILE}"
+}
+
+# Default values
+
+_BREEZE_DEFAULT_ENV="docker"
+_BREEZE_DEFAULT_BACKEND="sqlite"
+_BREEZE_DEFAULT_KUBERNETES_VERSION="v1.13.0"
+_BREEZE_DEFAULT_KUBERNETES_MODE="git_mode"
+
+usage() {
+ echo """
+
+Usage: ${CMDNAME} [FLAGS] \\
+ [-k]|[-S ]|[-F ]|[-O]|[-e]|[-a]|[-b]|[-t ]|[-x ]|[-d ] \\
+ --
+
+The swiss-knife-army tool for Airflow testings. It allows to perform various test tasks:
+
+ * Enter interactive environment when no command flags are specified (default behaviour)
+ * Stop the interactive environment with -k, --stop-environment command
+ * Run static checks - either for currently staged change or for all files with
+ -S, --static-check or -F, --static-check-all-files commanbd
+ * Build documentation with -O, --build-docs command
+ * Setup local virtualenv with -e, --setup-virtualenv command
+ * Setup autocomplete for itself with -a, --setup-autocomplete command
+ * Build docker image with -b, --build-only command
+ * Run test target specified with -t, --test-target connad
+ * Execute arbitrary command in the test environmenrt with -x, --execute-command command
+ * Execute arbitrary docker-compose command with -d, --docker-compose command
+
+** Commands
+
+ By default the script enters IT environment and drops you to bash shell,
+ but you can also choose one of the commands to run specific actions instead:
+
+-k, --stop-environment
+ Bring down running docker compose environment. When you start the environment, the docker
+ containers will continue running so that startup time is shorter. But they take quite a lot of
+ memory and CPU. This command stops all running containers from the environment.
+
+-O, --build-docs
+ Build documentation.
+
+-S, --static-check
+ Run selected static checks for currently changed files. You should specify static check that
+ you would like to run or 'all' to run all checks. One of
+ [${_BREEZE_ALLOWED_STATIC_CHECKS:=}].
+ You can pass extra arguments including options to to the pre-commit framework as
+ passed after --. For example:
+
+ '${0} --static-check mypy' or
+ '${0} --static-check mypy -- --files tests/core.py'
+
+ You can see all the options by adding --help EXTRA_ARG:
+
+ '${0} --static-check mypy -- --help'
+
+-F, --static-check-all-files
+ Run selected static checks for all applicable files. You should specify static check that
+ you would like to run or 'all' to run all checks. One of
+ [${_BREEZE_ALLOWED_STATIC_CHECKS:=}].
+ You can pass extra arguments including options to the pre-commit framework as
+ passed after --. For example:
+
+ '${0} --static-check-all-files mypy' or
+ '${0} --static-check-all-files mypy -- --verbose'
+
+ You can see all the options by adding --help EXTRA_ARG:
+
+ '${0} --static-check-all-files mypy -- --help'
+
+-e, --initialize-local-virtualenv
+ Initializes locally created virtualenv installing all dependencies of Airflow.
+ This local virtualenv can be used to aid autocompletion and IDE support as
+ well as run unit tests directly from the IDE. You need to have virtualenv
+ activated before running this command.
+
+-a, --setup-autocomplete
+ Sets up autocomplete for breeze commands. Once you do it you need to re-enter the bash
+ shell and when typing breeze command will provide autocomplete for parameters and values.
+
+-b, --build-only
+ Only build docker images but do not enter the airflow-testing docker container.
+
+-t, --test-target
+ Run the specified unit test target. There might be multiple
+ targets specified separated with comas. The passed after -- are treated
+ as additional options passed to nosetest. For example:
+
+ '${0} --test-target tests.core -- --logging-level=DEBUG'
+
+-x, --execute-command
+ Run chosen command instead of entering the environment. The command is run using
+ 'bash -c \"\" if you need to pass arguments to your command, you need
+ to pass them together with command surrounded with \" or '. Alternatively you can pass arguments as
+ passed after --. For example:
+
+ '${0} --execute-command \"ls -la\"' or
+ '${0} --execute-command ls -- --la'
+
+-d, --docker-compose
+ Run docker-compose command instead of entering the environment. Use 'help' command
+ to see available commands. The passed after -- are treated
+ as additional options passed to docker-compose. For example
+
+ '${0} --docker-compose pull -- --ignore-pull-failures'
+
+** General flags
+
+-h, --help
+ Shows this help message.
+
+-P, --python
+ Python version used for the image. This is always major/minor version.
+ One of [${_BREEZE_ALLOWED_PYTHON_VERSIONS:=}]. Default is the python3 or python on the path.
+
+-E, --env
+ Environment to use for tests. It determines which types of tests can be run.
+ One of [${_BREEZE_ALLOWED_ENVS:=}]. Default: ${_BREEZE_DEFAULT_ENV:=}
+
+-B, --backend
+ Backend to use for tests - it determines which database is used.
+ One of [${_BREEZE_ALLOWED_BACKENDS:=}]. Default: ${_BREEZE_DEFAULT_BACKEND:=}
+
+-K, --kubernetes-version
+ Kubernetes version - only used in case of 'kubernetes' environment.
+ One of [${_BREEZE_ALLOWED_KUBERNETES_VERSIONS:=}]. Default: ${_BREEZE_DEFAULT_KUBERNETES_VERSION:=}
+
+-M, --kubernetes-mode
+ Kubernetes mode - only used in case of 'kubernetes' environment.
+ One of [${_BREEZE_ALLOWED_KUBERNETES_MODES:=}]. Default: ${_BREEZE_DEFAULT_KUBERNETES_MODE:=}
+
+-s, --skip-mounting-source-volume
+ Skips mounting local volume with sources - you get exactly what is in the
+ docker image rather than your current local sources of airflow.
+
+-v, --verbose
+ Show verbose information about executed commands (enabled by default for running test)
+
+-y, --assume-yes
+ Assume 'yes' answer to all questions.
+
+-C, --toggle-suppress-cheatsheet
+ Toggles on/off cheatsheet displayed before starting bash shell
+
+-A, --toggle-suppress-asciiart
+ Toggles on/off asciiart displayed before starting bash shell
+
+** Dockerfile management flags
+
+-D, --dockerhub-user
+ DockerHub user used to pull, push and build images. Default: ${_BREEZE_DEFAULT_DOCKERHUB_USER:=}.
+
+-H, --dockerhub-repo
+ DockerHub repository used to pull, push, build images. Default: ${_BREEZE_DEFAULT_DOCKERHUB_REPO:=}.
+
+-r, --force-rebuild-images
+ Forces rebuilding of the local docker images. The images are rebuilt
+ automatically for the first time or when changes are detected in
+ package-related files, but you can force it using this flag.
+
+-R, --force-rebuild-images-clean
+ Force rebuild images without cache. This will remove the pulled or build images
+ and start building images from scratch. This might take a long time.
+
+-p, --force-pull-images
+ Forces pulling of images from DockerHub before building to populate cache. The
+ images are pulled by default only for the first time you run the
+ environment, later the locally build images are used as cache.
+
+-u, --push-images
+ After rebuilding - uploads the images to DockerHub
+ It is useful in case you use your own DockerHub user to store images and you want
+ to build them locally. Note that you need to use 'docker login' before you upload images.
+
+-c, --cleanup-images
+ Cleanup your local docker cache of the airflow docker images. This will not reclaim space in
+ docker cache. You need to 'docker system prune' (optionally with --all) to reclaim that space.
+
+"""
+}
+
+#################### Parsing options/arguments
+if ! PARAMS=$(getopt \
+ -o "${_BREEZE_GETOPT_SHORT_OPTIONS:=}" \
+ -l "${_BREEZE_GETOPT_LONG_OPTIONS:=}" \
+ --name "$CMDNAME" -- "$@")
+then
+ usage
+ exit 1
+fi
+
+eval set -- "${PARAMS}"
+unset PARAMS
+
+# Parse Flags.
+# Please update short and long options in the breeze-complete script
+# This way autocomplete will work out-of-the-box
+while true
+do
+ case "${1}" in
+ -h|--help)
+ usage;
+ exit 0 ;;
+ -P|--python)
+ export PYTHON_VERSION="${2}";
+ echo
+ echo "Python version: ${PYTHON_VERSION}"
+ echo
+ shift 2 ;;
+ -E|--env)
+ export ENV="${2}";
+ echo
+ echo "Environment: ${ENV}"
+ echo
+ shift 2 ;;
+ -B|--backend)
+ export BACKEND="${2}";
+ echo
+ echo "Backend: ${BACKEND}"
+ echo
+ shift 2 ;;
+ -K|--kubernetes-version)
+ export KUBERNETES_VERSION="${2}";
+ echo
+ echo "Kubernetes version: ${KUBERNETES_VERSION}"
+ echo
+ shift 2 ;;
+ -M|--kubernetes-mode)
+ export KUBERNETES_MODE="${2}";
+ echo
+ echo "Kubernetes mode: ${KUBERNETES_MODE}"
+ echo
+ shift 2 ;;
+ -s|--skip-mounting-local-sources)
+ SKIP_MOUNTING_LOCAL_SOURCES="true"
+ echo "Skip mounting local sources: ${SKIP_MOUNTING_LOCAL_SOURCES}"
+ echo
+ shift ;;
+ -b|--build-only)
+ ENTER_ENVIRONMENT="false"
+ SKIP_BUILD_CHECK="true"
+ AIRFLOW_CONTAINER_DOCKER_BUILD_NEEDED="true"
+ FORCE_BUILD="true"
+ echo "Only build. Do not enter airflow-testing container"
+ echo
+ shift ;;
+ -v|--verbose)
+ AIRFLOW_CI_VERBOSE="true"
+ echo "Verbose output"
+ echo
+ shift ;;
+ -y|--assume-yes)
+ export ASSUME_YES="true"
+ echo "Assuming 'yes' answer to all questions."
+ echo
+ shift ;;
+ -C|--toggle-suppress-cheatsheet)
+ if [[ -f "${SUPPRESS_CHEATSHEET_FILE}" ]]; then
+ rm -f "${SUPPRESS_CHEATSHEET_FILE}"
+ else
+ touch "${SUPPRESS_CHEATSHEET_FILE}"
+ fi
+ echo "Toggle suppress cheatsheet"
+ echo
+ shift ;;
+ -A|--toggle-suppress-asciiart)
+ if [[ -f "${SUPPRESS_ASCIIART_FILE}" ]]; then
+ rm -f "${SUPPRESS_ASCIIART_FILE}"
+ else
+ touch "${SUPPRESS_ASCIIART_FILE}"
+ fi
+ echo "Toggle suppress asciiart"
+ echo
+ shift ;;
+ -r|--force-rebuild-images)
+ echo
+ echo "Force rebuild images"
+ echo
+ AIRFLOW_CONTAINER_DOCKER_BUILD_NEEDED="true"
+ SKIP_BUILD_CHECK="true"
+ FORCE_BUILD="true"
+ shift ;;
+ -R|--force-rebuild-images-clean)
+ echo
+ echo "Clean rebuild of images without cache"
+ echo
+ export AIRFLOW_CONTAINER_USE_DOCKER_CACHE=false
+ export AIRFLOW_CONTAINER_USE_PULLED_IMAGES_CACHE=false
+ AIRFLOW_CONTAINER_DOCKER_BUILD_NEEDED="true"
+ SKIP_BUILD_CHECK="true"
+ FORCE_BUILD="true"
+ CLEANUP_IMAGES="true"
+ shift ;;
+ -p|--force-pull-images)
+ echo
+ echo "Force pulling images before build. Uses pulled images as cache."
+ echo
+ export AIRFLOW_CONTAINER_FORCE_PULL_IMAGES="true"
+ AIRFLOW_CONTAINER_DOCKER_BUILD_NEEDED="true"
+ SKIP_BUILD_CHECK="true"
+ shift ;;
+ -u|--push-images)
+ if [[ "${AIRFLOW_FIX_PERMISSIONS}" != "all" ]]; then
+ echo >&2
+ echo >&2 "ERROR: Disable fix permissions when pushing"
+ echo >&2
+ echo >&2 "You cannot push images if you have AIRFLOW_FIX_PERMISSIONS set to other value than 'all'"
+ echo >&2 "Your docker context is most likely wrong in this case"
+ echo >&2 "You need to set AIRFLOW_FIX_PERMISSIONS to false"
+ echo >&2 "And run the build again"
+ echo >&2
+ exit 1
+ fi
+ echo
+ echo "Pushing images to DockerHub"
+ echo
+ export AIRFLOW_CONTAINER_PUSH_IMAGES="true"
+ AIRFLOW_CONTAINER_DOCKER_BUILD_NEEDED="true"
+ SKIP_BUILD_CHECK="true"
+ shift ;;
+ -c|--cleanup-images)
+ echo
+ echo "Cleanup the images"
+ echo
+ CLEANUP_IMAGES=true
+ shift ;;
+ -D|--dockerhub-user)
+ export DOCKERHUB_USER="${2}"
+ echo
+ echo "Dockerhub user ${DOCKERHUB_USER}"
+ echo
+ shift 2 ;;
+ -H|--dockerhub-repo)
+ export DOCKERHUB_REPO="${2}"
+ echo
+ echo "Dockerhub repo ${DOCKERHUB_REPO}"
+ echo
+ shift 2 ;;
+ -e|--initialize-local-virtualenv)
+ echo
+ echo Initializing local virtualenv
+ echo
+ INITIALIZE_LOCAL_VIRTUALENV="true"
+ SETUP_AUTOCOMPLETE="false"
+ ENTER_ENVIRONMENT=:"false"
+ shift ;;
+ -a|--setup-autocomplete)
+ echo
+ echo Setting up autocomplete
+ echo
+ INITIALIZE_LOCAL_VIRTUALENV="false"
+ SETUP_AUTOCOMPLETE="true"
+ ENTER_ENVIRONMENT=:"false"
+ shift ;;
+ -t|--test-target)
+ export TEST_TARGET="${2}"
+ export RUN_IN_BASH="false"
+ export RUN_TESTS="true"
+ export RUN_DOCKER_COMPOSE="false"
+ export RUN_STATIC_CHECKS="false"
+ export RUN_BUILD_DOCS="false"
+ shift 2 ;;
+ -d|--docker-compose)
+ export DOCKER_COMPOSE_COMMAND="${2}"
+ export RUN_IN_BASH="false"
+ export RUN_TESTS="false"
+ export RUN_DOCKER_COMPOSE="true"
+ export RUN_STATIC_CHECKS="false"
+ export RUN_BUILD_DOCS="false"
+ shift 2 ;;
+ -k|--stop-environment)
+ export DOCKER_COMPOSE_COMMAND="down"
+ export RUN_IN_BASH="false"
+ export RUN_TESTS="false"
+ export RUN_BUILD_DOCS="false"
+ export RUN_DOCKER_COMPOSE="true"
+ export RUN_STATIC_CHECKS="false"
+ shift ;;
+ -x|--execute-command)
+ export RUN_COMMAND="${2}"
+ export RUN_IN_BASH="true"
+ export RUN_TESTS="false"
+ export RUN_BUILD_DOCS="false"
+ export RUN_DOCKER_COMPOSE="false"
+ export RUN_STATIC_CHECKS="false"
+ shift 2 ;;
+ -S|--static-check )
+ export ENTER_ENVIRONMENT="false"
+ export RUN_TESTS="false"
+ export RUN_DOCKER_COMPOSE="false"
+ export RUN_STATIC_CHECKS="true"
+ export RUN_BUILD_DOCS="false"
+ export STATIC_CHECK="${2}"
+ export EXTRA_STATIC_CHECK_OPTIONS=("--show-diff-on-failure")
+ export STATIC_CHECK_ALL_FILES="false"
+ shift 2 ;;
+ -F|--static-check-all-files)
+ export ENTER_ENVIRONMENT="false"
+ export RUN_TESTS="false"
+ export RUN_DOCKER_COMPOSE="false"
+ export RUN_STATIC_CHECKS="true"
+ export RUN_BUILD_DOCS="false"
+ export STATIC_CHECK="${2}"
+ export STATIC_CHECK_ALL_FILES="true"
+ export EXTRA_STATIC_CHECK_OPTIONS=("--all-files" "--show-diff-on-failure")
+ shift 2 ;;
+ -O|--build-docs)
+ export ENTER_ENVIRONMENT="false"
+ export RUN_TESTS="false"
+ export RUN_DOCKER_COMPOSE="false"
+ export RUN_STATIC_CHECKS="false"
+ export RUN_BUILD_DOCS="true"
+ shift 2 ;;
+ --)
+ shift ;
+ break ;;
+ *)
+ usage
+ echo >&2
+ echo >&2 "ERROR: Unknown argument ${1}"
+ echo >&2
+ exit 1
+ ;;
+ esac
+done
+
+echo
+printf '=%.0s' $(seq "${SEPARATOR_WIDTH}")
+echo
+
+if ! PYTHON_BIN=$(command -v python3); then
+ if ! PYTHON_BIN=$(command -v python); then
+ echo >&2
+ echo >&2 "Error: You must have python3 (preferred) or python in your PATH"
+ echo >&2
+ exit 1
+ fi
+fi
+
+BUILD_CACHE_DIR="${MY_DIR}/.build"
+
+mkdir -pv "${BUILD_CACHE_DIR}"
+
+CMDNAME="$(basename -- "$0")"
+
+function save_to_file {
+ # shellcheck disable=SC2005
+ echo "$(eval echo "\$$1")" > "${BUILD_CACHE_DIR}/.$1"
+}
+
+function read_from_file {
+ cat "${BUILD_CACHE_DIR}/.$1" 2>/dev/null || true
+}
+
+export PYTHON_VERSION="${PYTHON_VERSION:=$(read_from_file PYTHON_VERSION)}"
+export PYTHON_VERSION=${PYTHON_VERSION:=$("${PYTHON_BIN}" -c \
+'import sys; print("%s.%s" % (sys.version_info.major, sys.version_info.minor))')}
+
+export ENV="${ENV:=$(read_from_file ENV)}"
+export BACKEND="${BACKEND:=$(read_from_file BACKEND)}"
+export KUBERNETES_VERSION="${KUBERNETES_VERSION:=$(read_from_file KUBERNETES_VERSION)}"
+export KUBERNETES_MODE="${KUBERNETES_MODE:=$(read_from_file KUBERNETES_MODE)}"
+
+# Here you read DockerHub user/account that you use
+# You can populate your own images in DockerHub this way and work with the,
+# You can override it with "-d" option and it will be stored in .build directory
+export DOCKERHUB_USER="${DOCKERHUB_USER:=$(read_from_file DOCKERHUB_USER)}"
+export DOCKERHUB_USER="${DOCKERHUB_USER:=${_BREEZE_DEFAULT_DOCKERHUB_USER}}"
+
+# Here you read DockerHub repo that you use
+# You can populate your own images in DockerHub this way and work with them
+# You can override it with "-d" option and it will be stored in .build directory
+export DOCKERHUB_REPO="${DOCKERHUB_REPO:=$(read_from_file DOCKERHUB_REPO)}"
+export DOCKERHUB_REPO="${DOCKERHUB_REPO:=${_BREEZE_DEFAULT_DOCKERHUB_REPO}}"
+
+# Default environment for tests
+export ENV=${ENV:-${_BREEZE_DEFAULT_ENV}}
+
+# Default backend for tests
+export BACKEND=${BACKEND:-${_BREEZE_DEFAULT_BACKEND}}
+
+# Default version of Kubernetes to use
+export KUBERNETES_VERSION=${KUBERNETES_VERSION:=${_BREEZE_DEFAULT_KUBERNETES_VERSION}}
+# Default mode of Kubernetes to use
+export KUBERNETES_MODE=${KUBERNETES_MODE:=${_BREEZE_DEFAULT_KUBERNETES_MODE}}
+
+#################### Check python version ##########################################
+if [[ ${_BREEZE_ALLOWED_PYTHON_VERSIONS:=} != *" ${PYTHON_VERSION} "* ]]; then
+ echo >&2
+ echo >&2 "ERROR: Allowed Python versions: [${_BREEZE_ALLOWED_PYTHON_VERSIONS}]. Is: '${PYTHON_VERSION}'."
+ echo >&2
+ echo >&2 "Switch to virtualenv with the supported python version or specify python with --python flag."
+ echo >&2
+ exit 1
+fi
+
+#################### Check environments ##########################################
+if [[ ${_BREEZE_ALLOWED_ENVS:=} != *" ${ENV} "* ]]; then
+ echo >&2
+ echo >&2 "ERROR: Allowed environments are [${_BREEZE_ALLOWED_ENVS}]. Used: '${ENV}'"
+ echo >&2
+ exit 1
+fi
+
+#################### Check backends ##########################################
+if [[ ${_BREEZE_ALLOWED_BACKENDS:=} != *" ${BACKEND} "* ]]; then
+ echo >&2
+ echo >&2 "ERROR: Allowed backends are [${_BREEZE_ALLOWED_BACKENDS}]. Used: '${BACKEND}'"
+ echo >&2
+ exit 1
+fi
+
+#################### Check environments ##########################################
+if [[ ${_BREEZE_ALLOWED_KUBERNETES_VERSIONS} != *" ${KUBERNETES_VERSION} "* ]]; then
+ echo >&2
+ echo >&2 "ERROR: Allowed kubernetes versions" \
+ "are [${_BREEZE_ALLOWED_KUBERNETES_VERSIONS}]. Used: '${KUBERNETES_VERSION}'"
+ echo >&2
+ exit 1
+fi
+
+#################### Check environments ##########################################
+if [[ ${_BREEZE_ALLOWED_KUBERNETES_MODES} != *" ${KUBERNETES_MODE} "* ]]; then
+ echo >&2
+ echo >&2 "ERROR: Allowed kubernetes modes" \
+ "are [${_BREEZE_ALLOWED_KUBERNETES_MODES}]. Used: '${KUBERNETES_MODE}'"
+ echo >&2
+ exit 1
+fi
+
+
+# Those files are mounted into container when run locally
+# .bash_history is preserved and you can modify .bash_aliases and .inputrc
+# according to your liking
+touch "${MY_DIR}/.bash_history"
+touch "${MY_DIR}/.bash_aliases"
+touch "${MY_DIR}/.inputrc"
+
+save_to_file BACKEND
+save_to_file ENV
+save_to_file KUBERNETES_VERSION
+save_to_file KUBERNETES_MODE
+save_to_file PYTHON_VERSION
+save_to_file DOCKERHUB_USER
+save_to_file DOCKERHUB_REPO
+
+#################### Cleanup image if requested ########################################
+if [[ "${CLEANUP_IMAGES}" == "true" ]]; then
+ export AIRFLOW_CONTAINER_CLEANUP_IMAGES=true
+ "${MY_DIR}/scripts/ci/local_ci_cleanup.sh"
+ exit 0
+fi
+
+#################### Initializes local virtualenv ########################################
+if [[ ${INITIALIZE_LOCAL_VIRTUALENV} == "true" ]]; then
+ # Check if we are in virtualenv
+ set +e
+ echo -e "import sys\nif not hasattr(sys,'real_prefix'):\n sys.exit(1)" | "${PYTHON_BIN}"
+ RES=$?
+ set -e
+ if [[ ${RES} != "0" ]]; then
+ echo >&2
+ echo >&2 "ERROR: Initializing local virtualenv only works when you have virtualenv activated"
+ echo >&2
+ echo >&2 "Please enter your local virtualenv before (for example using 'workon') "
+ echo >&2
+ exit 1
+ else
+ # If no Airflow Home defined - fallback to ${HOME}/airflow
+ AIRFLOW_HOME_DIR=${AIRFLOW_HOME:=${HOME}/airflow}
+ echo
+ echo "Initializing the virtualenv: $(command -v python)!"
+ echo
+ echo "This will wipe out ${AIRFLOW_HOME_DIR} and reset all the databases!"
+ echo
+ "${MY_DIR}/confirm" "Proceeding with the initialization"
+ echo
+ pushd "${MY_DIR}"
+ SYSTEM=$(uname -s)
+ echo "#######################################################################"
+ echo " If you have trouble installing all dependencies you might need to run:"
+ echo
+ if [[ ${SYSTEM} == "Darwin" ]]; then
+ echo " brew install sqlite mysql postgresql"
+ else
+ echo " sudo apt-get install openssl sqlite libmysqlclient-dev libmysqld-dev postgresql --confirm"
+ fi
+ echo
+ echo "#######################################################################"
+ pip install -e ".[devel]"
+ popd
+ echo
+ echo "Wiping and recreating ${AIRFLOW_HOME_DIR}"
+ echo
+ rm -rvf "${AIRFLOW_HOME_DIR}"
+ mkdir -p "${AIRFLOW_HOME_DIR}"
+ echo
+ echo "Resetting AIRFLOW sqlite database"
+ echo
+ unset AIRFLOW__CORE__UNIT_TEST_MODE
+ airflow db reset -y
+ echo
+ echo "Resetting AIRFLOW sqlite unit test database"
+ echo
+ export AIRFLOW__CORE__UNIT_TEST_MODE=True
+ airflow db reset -y
+ exit 0
+ fi
+fi
+
+
+#################### Sets up autocomplete for breeze commands ########################################
+if [[ ${SETUP_AUTOCOMPLETE} == "true" ]]; then
+ echo
+ echo "Installing bash/zsh completion for local user"
+ echo "Note that completion for zsh is just limited to flags - without their values"
+ echo
+ set +e
+ grep ".bash_completion.d" "${HOME}/.bashrc" >/dev/null 2>&1
+ RES=$?
+ set -e
+ if [[ "${RES}" == "0" ]]; then
+ echo >&2
+ echo >&2 "ERROR: Bash completion already setup before."
+ echo >&2
+ exit 1
+ fi
+ "${MY_DIR}/confirm" "This will create ~/.bash_completion.d/ directory and modify ~/.bashrc and ~/.zshrc file"
+ echo
+ echo
+ mkdir -pv ~/.bash_completion.d
+ ln -sf "${MY_DIR}/breeze-complete" "${HOME}/.bash_completion.d/"
+ touch ~/.bashrc
+ cat >>~/.bashrc <<"EOF"
+for BCFILE in ~/.bash_completion.d/* ; do
+ . ${BCFILE}
+done
+EOF
+ cat >>~/.zshrc <<"EOF"
+autoload compinit && compinit
+autoload bashcompinit && bashcompinit
+source ~/.bash_completion.d/breeze-complete
+EOF
+ if [[ "${OSTYPE}" == "darwin"* ]]; then
+ # For MacOS we have to handle the special case where terminal app DOES NOT run .bashrc by default
+ # But re-runs .bash_profile :(
+ # See https://scriptingosx.com/2017/04/about-bash_profile-and-bashrc-on-macos/
+ set +e
+ grep ".bashrc" "${HOME}/.bash_profile"
+ RES=$?
+ set -e
+ if [[ "${RES}" == "0" ]]; then
+ echo " Seems you already source .bashrc in your .bash_profile so not adding it."
+ else
+ "${MY_DIR}/confirm" "This will modify ~/.bash_profile and source .bashrc from it"
+ echo
+ echo
+ cat >>~/.bash_profile <<"EOF"
+if [ -r ~/.bashrc ]; then
+ source ~/.bashrc
+fi
+EOF
+ fi
+ fi
+ echo
+ echo
+ echo "Breeze bash completion installed to ~/.bash_completion.d/breeze-complete"
+ echo
+ echo
+ echo "Please re-enter bash or run '. ~/.bash_completion.d/breeze-complete'"
+ echo
+ exit 0
+fi
+
+MAIN_DOCKER_COMPOSE_FILE=${SCRIPTS_CI_DIR}/docker-compose.yml
+KUBERNETES_DOCKER_COMPOSE_FILE=${SCRIPTS_CI_DIR}/docker-compose-kubernetes.yml
+BACKEND_DOCKER_COMPOSE_FILE=${SCRIPTS_CI_DIR}/docker-compose-${BACKEND}.yml
+LOCAL_DOCKER_COMPOSE_FILE=${SCRIPTS_CI_DIR}/docker-compose-local.yml
+
+COMPOSE_FILE=${MAIN_DOCKER_COMPOSE_FILE}:${BACKEND_DOCKER_COMPOSE_FILE}
+
+if [[ "${ENV}" == "kubernetes" ]]; then
+ COMPOSE_FILE=${COMPOSE_FILE}:${KUBERNETES_DOCKER_COMPOSE_FILE}
+fi
+
+
+if [[ "${SKIP_MOUNTING_LOCAL_SOURCES}" != "true" ]]; then
+ COMPOSE_FILE=${COMPOSE_FILE}:${LOCAL_DOCKER_COMPOSE_FILE}
+fi
+
+export COMPOSE_FILE
+
+CI_ENTRYPOINT_FILE="/opt/airflow/scripts/ci/in_container/entrypoint_ci.sh"
+
+DC_RUN_COMMAND="run --service-ports --rm airflow-testing \"${CI_ENTRYPOINT_FILE} "
+DC_RUN_COMMAND_ONLY_AT="run --no-deps --service-ports --rm airflow-testing \"${CI_ENTRYPOINT_FILE} "
+
+LAST_DC_RUN_FILE="cmd_run"
+LAST_DC_RUN_ONLY_AT_FILE="cmd_only_at_run"
+LAST_DC_TEST_FILE="test_run"
+LAST_DC_FILE="dc"
+
+# Prepare script for "run command"
+prepare_command_file "${BUILD_CACHE_DIR}/${LAST_DC_RUN_FILE}" "${DC_RUN_COMMAND}" "false" '*'
+
+# Prepare script for "run command"
+prepare_command_file "${BUILD_CACHE_DIR}/${LAST_DC_RUN_ONLY_AT_FILE}" "${DC_RUN_COMMAND_ONLY_AT}" "false" '*'
+
+# Prepare script for "run test"
+prepare_command_file "${BUILD_CACHE_DIR}/${LAST_DC_TEST_FILE}" "${DC_RUN_COMMAND}" "true" '*'
+
+# Prepare script for "run docker compose command"
+prepare_command_file "${BUILD_CACHE_DIR}/${LAST_DC_FILE}" '"' "false"
+
+rebuild_ci_image_if_needed
+rebuild_ci_slim_image_if_needed
+rebuild_checklicence_image_if_needed
+
+export AIRFLOW_CONTAINER_DOCKER_IMAGE=\
+${DOCKERHUB_USER}/${DOCKERHUB_REPO}:${AIRFLOW_CONTAINER_BRANCH_NAME}-python${PYTHON_VERSION}-ci
+
+printf '=%.0s' $(seq "${SEPARATOR_WIDTH}")
+
+if [[ "${TEST_TARGET}" == "." ]]; then
+ TEST_TARGET=""
+fi
+
+print_badge
+
+function print_line {
+ printf '#%.0s' $(seq "${SEPARATOR_WIDTH}")
+}
+
+if [[ ! -f ${SUPPRESS_CHEATSHEET_FILE} ]]; then
+ echo
+ echo
+ print_line
+ echo
+ echo " Airflow Breeze CHEATSHEET"
+ echo
+ print_line
+ echo
+ echo
+ print_line
+ echo
+ echo " Quick scripts:"
+ echo " * Enter the full environment : ${BUILD_CACHE_DIR}/${LAST_DC_RUN_FILE}"
+ echo " * Run command in full environment : ${BUILD_CACHE_DIR}/${LAST_DC_RUN_FILE} "\
+ "[command with args] [bash options]"
+ echo " * Run command airflow-testing only : ${BUILD_CACHE_DIR}/${LAST_DC_RUN_ONLY_AT_FILE} "\
+ "[command with args] [bash options]"
+ echo " * Run tests in the full environment : ${BUILD_CACHE_DIR}/${LAST_DC_TEST_FILE} "\
+ "[test target] [nosetest options]"
+ echo " * Run Docker compose command : ${BUILD_CACHE_DIR}/${LAST_DC_FILE} "\
+ "[docker compose command] [docker-compose options]"
+ echo
+
+ set +e
+ if ! command -v breeze; then
+ print_line
+ echo
+ echo " Adding breeze to your path:"
+ echo " When you exit the environment, you can add sources of airflow to the path - you can"
+ echo " run breeze or the scripts above from any directory by calling 'breeze' commands directly"
+ echo
+ echo " export PATH=\${PATH}:\"${MY_DIR}\""
+ echo
+ fi
+ set -e
+ print_line
+
+ echo
+ echo " Port forwarding:"
+ echo
+ echo " Ports are forwarded to the running docker containers for webserver and database"
+ echo " * ${WEBSERVER_HOST_PORT} -> forwarded to airflow webserver -> airflow-testing:8080"
+ echo " * ${POSTGRES_HOST_PORT} -> forwarded to postgres database -> postgres:5432"
+ echo " * ${MYSQL_HOST_PORT} -> forwarded to mysql database -> mysql:3306"
+ echo
+ echo " Here are links to those services that you can use on host:"
+ echo " * Webserver: http://127.0.0.1:28080"
+ echo " * Postgres: jdbc:postgresql://127.0.0.1:25433/airflow?user=postgres&password=airflow"
+ echo " * Mysql: jdbc:mysql://localhost:23306/airflow?user=root"
+ echo
+else
+ echo
+fi
+
+# shellcheck disable=SC2034 # Unused variables left for comp_breeze usage
+if ! typeset -f "_comp_breeze" > /dev/null; then
+ print_line
+ echo
+ echo " You can setup autocomplete by running '$0 --setup-autocomplete'"
+ echo
+ echo
+fi
+print_line
+echo
+echo " You can toggle ascii/cheatsheet by adding this flag:"
+echo " * --toggle-suppress-cheatsheet"
+echo " * --toggle-suppress-asciiart"
+echo
+print_line
+echo
+echo
+echo
+echo
+
+if [[ ${ENTER_ENVIRONMENT} == "true" ]]; then
+ if [[ ${RUN_TESTS} == "true" ]]; then
+ "${BUILD_CACHE_DIR}/${LAST_DC_TEST_FILE}" "\"${TEST_TARGET}\"" "$@"
+ elif [[ ${RUN_DOCKER_COMPOSE} == "true" ]]; then
+ "${BUILD_CACHE_DIR}/${LAST_DC_FILE}" "${DOCKER_COMPOSE_COMMAND}" "$@"
+ elif [[ ${RUN_IN_BASH} == "true" ]]; then
+ "${BUILD_CACHE_DIR}/${LAST_DC_RUN_FILE}" "${RUN_COMMAND}" "$@"
+ else
+ "${BUILD_CACHE_DIR}/${LAST_DC_RUN_FILE}"
+ fi
+else
+ if [[ ${RUN_STATIC_CHECKS} == "true" ]]; then
+ echo
+ echo "Making sure pre-commit is installed"
+ echo
+ if command -v pip3 >/dev/null; then
+ PIP_BIN=pip3
+ elif command -v pip >/dev/null; then
+ PIP_BIN=pip
+ else
+ echo >&2
+ echo >&2 "ERROR: You need to have pip or pip3 in your PATH"
+ echo >&2
+ S
+ exit 1
+ fi
+ "${PIP_BIN}" install --upgrade pre-commit >/dev/null 2>&1
+ # Add ~/.local/bin to the path in case pip is run outside of virtualenv
+ export PATH="${PATH}":~/.local/bin
+ if [[ ${STATIC_CHECK} == "all" ]]; then
+ echo
+ echo "Running: pre-commit run" "${EXTRA_STATIC_CHECK_OPTIONS[@]}" "$@"
+ echo
+ pre-commit run "${EXTRA_STATIC_CHECK_OPTIONS[@]}" "$@"
+ elif [[ ${STATIC_CHECK} == "all-but-pylint" ]]; then
+ echo
+ echo "Setting SKIP=pylint. Running: pre-commit run" "${EXTRA_STATIC_CHECK_OPTIONS[@]}" "$@"
+ echo
+ echo
+ SKIP=pylint pre-commit run "${EXTRA_STATIC_CHECK_OPTIONS[@]}" "$@"
+ else
+ echo
+ echo "Running: pre-commit run" "${STATIC_CHECK}" "${EXTRA_STATIC_CHECK_OPTIONS[@]}" "$@"
+ echo
+ pre-commit run "${STATIC_CHECK}" "${EXTRA_STATIC_CHECK_OPTIONS[@]}" "$@"
+ fi
+ elif [[ ${RUN_BUILD_DOCS} == "true" ]]; then
+ run_docs
+ fi
+fi
+
+script_end
diff --git a/breeze-complete b/breeze-complete
new file mode 100644
index 0000000000000..a6699fd83d57e
--- /dev/null
+++ b/breeze-complete
@@ -0,0 +1,143 @@
+#!/usr/bin/env bash
+
+_BREEZE_ALLOWED_PYTHON_VERSIONS=" 3.5 3.6 3.7 "
+_BREEZE_ALLOWED_ENVS=" docker kubernetes "
+_BREEZE_ALLOWED_BACKENDS=" sqlite mysql postgres "
+_BREEZE_ALLOWED_KUBERNETES_VERSIONS=" v1.13.0 "
+_BREEZE_ALLOWED_KUBERNETES_MODES=" persistent_mode git_mode "
+_BREEZE_ALLOWED_STATIC_CHECKS=" all all-but-pylint check-hooks-apply check-merge-conflict check-executables-have-shebangs check-xml detect-private-key doctoc end-of-file-fixer flake8 forbid-tabs insert-license check-apache-license lint-dockerfile mixed-line-ending mypy pylint shellcheck"
+_BREEZE_DEFAULT_DOCKERHUB_USER="apache"
+_BREEZE_DEFAULT_DOCKERHUB_REPO="airflow"
+
+_BREEZE_SHORT_OPTIONS="
+h P: E: B: K: M:
+s b O
+v y C A
+r R p u
+c D: H: e a
+t: d: k x: S: F:
+"
+
+_BREEZE_LONG_OPTIONS="
+help python: env: backend: kubernetes-version: kubernetes-mode:
+skip-mounting-local-sources build-only build-docs
+verbose assume-yes toggle-suppress-cheatsheet toggle-suppress-asciiart
+force-rebuild-images force-rebuild-images-clean force-pull-images push-images
+cleanup-images dockerhub-user: dockerhub-repo: initialize-local-virtualenv setup-autocomplete
+test-target: docker-compose: stop-environment execute-command: static-check: static-check-all-files:
+"
+
+# Note on OSX bash has no associative arrays (Bash 3.2) so we have to fake it
+
+_BREEZE_KNOWN_VALUES=""
+
+function _get_known_values_breeze {
+ case "$1" in
+ -P | --python )
+ _BREEZE_KNOWN_VALUES=${_BREEZE_ALLOWED_PYTHON_VERSIONS} ;;
+ -E | --env )
+ _BREEZE_KNOWN_VALUES=${_BREEZE_ALLOWED_ENVS} ;;
+ -B | --backend )
+ _BREEZE_KNOWN_VALUES=${_BREEZE_ALLOWED_BACKENDS} ;;
+ -K | --kubernetes-version )
+ _BREEZE_KNOWN_VALUES=${_BREEZE_ALLOWED_KUBERNETES_VERSIONS} ;;
+ -M | --kubernetes-mode )
+ _BREEZE_KNOWN_VALUES=${_BREEZE_ALLOWED_KUBERNETES_MODES} ;;
+ -S | --static-check )
+ _BREEZE_KNOWN_VALUES=${_BREEZE_ALLOWED_STATIC_CHECKS} ;;
+ -F | --static-check-all-files )
+ _BREEZE_KNOWN_VALUES=${_BREEZE_ALLOWED_STATIC_CHECKS} ;;
+ -d | --docker-compose )
+ # shellcheck disable=SC2034
+ if typeset -f "_docker_compose" > /dev/null; then
+ _docker_compose
+ fi
+ _BREEZE_KNOWN_VALUES="" ;;
+ -D | --dockerhub-user )
+ _BREEZE_KNOWN_VALUES="${_BREEZE_DEFAULT_DOCKERHUB_USER}" ;;
+ -H | --dockerhub-repo )
+ _BREEZE_KNOWN_VALUES="${_BREEZE_DEFAULT_DOCKERHUB_REPO}" ;;
+ *)
+ _BREEZE_KNOWN_VALUES=""
+ esac
+}
+
+_BREEZE_GETOPT_SHORT_OPTIONS=""
+_BREEZE_GETOPT_LONG_OPTIONS=""
+
+function _build_options_breeze {
+ local SEPARATOR=""
+ local OPTION
+
+ for OPTION in ${_BREEZE_SHORT_OPTIONS}
+ do
+ _BREEZE_GETOPT_SHORT_OPTIONS="${_BREEZE_GETOPT_SHORT_OPTIONS}${SEPARATOR}${OPTION}"
+ SEPARATOR=","
+ done
+
+ SEPARATOR=""
+ for OPTION in ${_BREEZE_LONG_OPTIONS}
+ do
+ _BREEZE_GETOPT_LONG_OPTIONS="${_BREEZE_GETOPT_LONG_OPTIONS}${SEPARATOR}${OPTION}"
+ SEPARATOR=","
+ done
+}
+
+function _listcontains_breeze {
+ local WORD
+ for WORD in $1; do
+ [[ ${WORD} = "$2" ]] && return 0
+ done
+ return 1
+}
+
+# A completion function for breeze
+function _comp_breeze {
+ local ALL_OPTIONS=""
+ local EXTRA_ARG_OPTIONS=""
+ local OPTION
+ local GETOPT_OPTION
+ local LAST_COMMAND_PREFIX
+ local PREVIOUS_COMMAND
+
+ for OPTION in ${_BREEZE_SHORT_OPTIONS}
+ do
+ LAST_CHAR="${OPTION:$((${#OPTION}-1)):1}"
+ GETOPT_OPTION='-'${OPTION//:/}
+ if [[ "${LAST_CHAR}" == ":" ]]; then
+ EXTRA_ARG_OPTIONS="${EXTRA_ARG_OPTIONS} ${GETOPT_OPTION}"
+ fi
+ ALL_OPTIONS="${ALL_OPTIONS} ${GETOPT_OPTION}"
+ done
+ for OPTION in ${_BREEZE_LONG_OPTIONS}
+ do
+ LAST_CHAR="${OPTION:$((${#OPTION}-1)):1}"
+ GETOPT_OPTION='--'${OPTION//:/}
+ ALL_OPTIONS="${ALL_OPTIONS} ${GETOPT_OPTION}"
+ if [[ "${LAST_CHAR}" == ":" ]]; then
+ EXTRA_ARG_OPTIONS="${EXTRA_ARG_OPTIONS} ${GETOPT_OPTION}"
+ fi
+ done
+
+ LAST_COMMAND_PREFIX="${COMP_WORDS[${#COMP_WORDS[@]}-1]}"
+ if [[ ${#COMP_WORDS[@]} -gt 1 ]]; then
+ PREVIOUS_COMMAND="${COMP_WORDS[${#COMP_WORDS[@]}-2]}"
+ else
+ PREVIOUS_COMMAND=""
+ fi
+
+ if _listcontains_breeze "${EXTRA_ARG_OPTIONS}" "${PREVIOUS_COMMAND}"; then
+ COMPREPLY=()
+ _get_known_values_breeze "${PREVIOUS_COMMAND}"
+ while IFS='' read -r LINE; do COMPREPLY+=("$LINE"); done \
+ < <(compgen -W "${_BREEZE_KNOWN_VALUES}" -- "${LAST_COMMAND_PREFIX}")
+ else
+ COMPREPLY=()
+ while IFS='' read -r LINE; do COMPREPLY+=("$LINE"); done \
+ < <(compgen -W "${ALL_OPTIONS}" -- "${LAST_COMMAND_PREFIX}")
+ fi
+}
+
+_build_options_breeze
+
+complete -F _comp_breeze breeze
diff --git a/hooks/build b/hooks/build
index 71eca5bc80a53..778b93959017a 100755
--- a/hooks/build
+++ b/hooks/build
@@ -196,7 +196,7 @@ echo
if [[ ! ${LONG_PYTHON_VERSION} =~ python[2-3]\.[0-9]+ ]]; then
echo >&2
- echo >&2 "ERROR! Python version extracted from IMAGE_NAME does not match the pythonX.Y format"
+ echo >&2 "ERROR: Python version extracted from IMAGE_NAME does not match the pythonX.Y format"
echo >&2
echo >&2 "The IMAGE_NAME format should be '-pythonX.Y'"
echo >&2
@@ -211,7 +211,7 @@ echo
if [[ ! ${LOCAL_BASE_IMAGE_NAME} == ${DOCKERHUB_USER}/${DOCKERHUB_REPO}* ]]; then
echo >&2
- echo >&2 "ERROR! The ${LOCAL_BASE_IMAGE_NAME} does not start with ${DOCKERHUB_USER}/${DOCKERHUB_REPO}"
+ echo >&2 "ERROR: The ${LOCAL_BASE_IMAGE_NAME} does not start with ${DOCKERHUB_USER}/${DOCKERHUB_REPO}"
echo >&2
exit 1
fi
diff --git a/images/AirflowBreeze_logo.png b/images/AirflowBreeze_logo.png
new file mode 100644
index 0000000000000..fe418dfe6babf
Binary files /dev/null and b/images/AirflowBreeze_logo.png differ
diff --git a/images/airflow_unit_test_mode.png b/images/airflow_unit_test_mode.png
new file mode 100644
index 0000000000000..f4f42b82e4f94
Binary files /dev/null and b/images/airflow_unit_test_mode.png differ
diff --git a/images/database_view.png b/images/database_view.png
new file mode 100644
index 0000000000000..5719868d625f4
Binary files /dev/null and b/images/database_view.png differ
diff --git a/images/running_unittests.png b/images/running_unittests.png
new file mode 100644
index 0000000000000..963ee77b857f2
Binary files /dev/null and b/images/running_unittests.png differ
diff --git a/images/setup_remote_debugging.png b/images/setup_remote_debugging.png
new file mode 100644
index 0000000000000..4d5a5d4f30575
Binary files /dev/null and b/images/setup_remote_debugging.png differ
diff --git a/images/source_code_mapping_ide.png b/images/source_code_mapping_ide.png
new file mode 100644
index 0000000000000..3e55e7f50a5db
Binary files /dev/null and b/images/source_code_mapping_ide.png differ
diff --git a/run-tests b/run-tests
index 71f4668af7569..5be7f7fe1db76 100755
--- a/run-tests
+++ b/run-tests
@@ -23,19 +23,36 @@ set -euo pipefail
CMDNAME="$(basename -- "$0")"
-AIRFLOW_ROOT="$(cd "${MY_DIR}" && pwd)"
-export AIRFLOW__CORE__DAGS_FOLDER="S{AIRFLOW_ROOT}/tests/dags"
+if [[ ${AIRFLOW_CI_VERBOSE:="false"} == "true" ]]; then
+ set -x
+fi
-# environment
-export AIRFLOW_HOME=${AIRFLOW_HOME:=${HOME}}
+if [[ -z "${AIRFLOW__CORE__SQL_ALCHEMY_CONN:=}" ]]; then
+ echo "AIRFLOW__CORE__SQL_ALCHEMY_CONN not set - using default" >&2
+fi
+
+# Update short and long options in the breeze-complete script
+# This way autocomplete will work automagically with all options
+# shellcheck source=run-tests-complete
+. "${MY_DIR}/run-tests-complete"
-echo "Airflow home: ${AIRFLOW_HOME}"
+# environment
+export AIRFLOW_HOME=${AIRFLOW_HOME:=${HOME}}
+AIRFLOW_ROOT="$(cd "${MY_DIR}" && pwd)"
+export AIRFLOW_ROOT
+export AIRFLOW__CORE__DAGS_FOLDER="S{AIRFLOW_ROOT}/tests/dags"
export AIRFLOW__CORE__UNIT_TEST_MODE=True
# add test/test_utils to PYTHONPATH TODO: Do we need that ??? Looks fishy.
export PYTHONPATH=${PYTHONPATH:=}:${AIRFLOW_ROOT}/tests/test_utils
+echo Airflow home: "${AIRFLOW_HOME}"
+echo Airflow root: "${AIRFLOW_ROOT}"
+echo Home of the user: "${HOME}"
+
+export AIRFLOW__CORE__DAGS_FOLDER="${AIRFLOW_ROOT}/tests/dags"
+
usage() {
echo """
@@ -51,6 +68,13 @@ Flags:
-i, --with-db-init
Forces database initialization before tests
+-x, --with-xunit
+ Dumps result of the tests to Xunit file
+
+-f, --xunit-file
+ The file where xunit results should be dumped. Default if not specified
+ is ${AIRFLOW_ROOT}/logs/all_tests.xml
+
-s, --nocapture
Don't capture stdout when running the tests. This is useful if you are
debugging with ipdb and want to drop into console with it
@@ -69,12 +93,11 @@ echo
#################### Parsing options/arguments
if ! PARAMS=$(getopt \
- -o "h i s v" \
- -l "help with-db-init nocapture verbose" \
- --name "${CMDNAME}" -- "$@")
+ -o "${_RUN_TESTS_GETOPT_SHORT_OPTIONS:=}" \
+ -l "${_RUN_TESTS_GETOPT_LONG_OPTIONS:=}" \
+ --name "$CMDNAME" -- "$@")
then
usage
- exit 1
fi
eval set -- "${PARAMS}"
@@ -82,7 +105,9 @@ unset PARAMS
WITH_DB_INIT="false"
NOCAPTURE="false"
-VERBOSE="false"
+WITH_XUNIT="false"
+XUNIT_FILE="${AIRFLOW_ROOT}/logs/all_tests.xml"
+VERBOSE="${AIRFLOW_CI_VERBOSE}"
# Parse Flags.
# Please update short and long options in the run-tests-complete script
@@ -99,9 +124,15 @@ do
-s|--nocapture)
NOCAPTURE="true"
shift ;;
+ -x|--with-xunit)
+ WITH_XUNIT="true"
+ shift ;;
-v|--verbose)
VERBOSE="true"
shift;;
+ -f|--xunit-file)
+ XUNIT_FILE="$2"
+ shift; shift ;;
--)
shift ;
break ;;
@@ -160,6 +191,13 @@ if [[ "${#NOSE_ARGS[@]}" == "0" ]]; then
"--logging-level=DEBUG")
fi
+if [[ "${WITH_XUNIT}" == "true" ]]; then
+ echo
+ echo "Dumping results to ${XUNIT_FILE}"
+ echo
+ NOSE_ARGS+=("--with-xunit" "--xunit-file=${XUNIT_FILE}")
+fi
+
if [[ "${NOCAPTURE}" == "true" ]]; then
echo
echo "Stop capturing stdout"
@@ -178,4 +216,3 @@ echo
echo "Starting the tests with arguments: ${NOSE_ARGS[*]}"
echo
nosetests "${NOSE_ARGS[@]}"
-set -u
diff --git a/run-tests-complete b/run-tests-complete
new file mode 100644
index 0000000000000..56f4a12b54b30
--- /dev/null
+++ b/run-tests-complete
@@ -0,0 +1,125 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+_RUN_TESTS_SHORT_OPTIONS="
+h i x s v f:"
+
+_RUN_TESTS_LONG_OPTIONS="
+help with-db-init with-xunit nocapture verbose xunit-file:
+"
+
+# Note on OSX bash has no associative arrays (Bash 3.2) so we have to fake it
+
+_RUN_TESTS_KNOWN_VALUES=""
+
+function _get_known_values_run_tests {
+ case "$1" in
+ *)
+ _RUN_TESTS_KNOWN_VALUES=""
+ esac
+}
+
+_RUN_TESTS_GETOPT_SHORT_OPTIONS=""
+_RUN_TESTS_GETOPT_LONG_OPTIONS=""
+
+function _build_options_run_tests {
+ local SEPARATOR=""
+ local OPTION
+
+ for OPTION in ${_RUN_TESTS_SHORT_OPTIONS}
+ do
+ _RUN_TESTS_GETOPT_SHORT_OPTIONS="${_RUN_TESTS_GETOPT_SHORT_OPTIONS}${SEPARATOR}${OPTION}"
+ SEPARATOR=","
+ done
+
+ SEPARATOR=""
+ for OPTION in ${_RUN_TESTS_LONG_OPTIONS}
+ do
+ _RUN_TESTS_GETOPT_LONG_OPTIONS="${_RUN_TESTS_GETOPT_LONG_OPTIONS}${SEPARATOR}${OPTION}"
+ SEPARATOR=","
+ done
+}
+
+function _listcontains_run_tests {
+ local WORD
+ for WORD in $1; do
+ [[ ${WORD} = "$2" ]] && return 0
+ done
+ return 1
+}
+
+# A completion function for run_tests
+function _comp_run_tests {
+ local ALL_OPTIONS=""
+ local EXTRA_ARG_OPTIONS=""
+ local OPTION
+ local GETOPT_OPTION
+ local LAST_COMMAND_PREFIX
+ local PREVIOUS_COMMAND
+ local ALL_TESTS_FILE="${HOME}/all_tests.txt"
+
+ for OPTION in ${_RUN_TESTS_SHORT_OPTIONS}
+ do
+ LAST_CHAR="${OPTION:$((${#OPTION}-1)):1}"
+ GETOPT_OPTION='-'${OPTION//:/}
+ if [[ "${LAST_CHAR}" == ":" ]]; then
+ EXTRA_ARG_OPTIONS="${EXTRA_ARG_OPTIONS} ${GETOPT_OPTION}"
+ fi
+ ALL_OPTIONS="${ALL_OPTIONS} ${GETOPT_OPTION}"
+ done
+ for OPTION in ${_RUN_TESTS_LONG_OPTIONS}
+ do
+ LAST_CHAR="${OPTION:$((${#OPTION}-1)):1}"
+ GETOPT_OPTION='--'${OPTION//:/}
+ ALL_OPTIONS="${ALL_OPTIONS} ${GETOPT_OPTION}"
+ if [[ "${LAST_CHAR}" == ":" ]]; then
+ EXTRA_ARG_OPTIONS="${EXTRA_ARG_OPTIONS} ${GETOPT_OPTION}"
+ fi
+ done
+
+ LAST_COMMAND_PREFIX="${COMP_WORDS[${#COMP_WORDS[@]}-1]}"
+ if [[ ${#COMP_WORDS[@]} -gt 1 ]]; then
+ PREVIOUS_COMMAND="${COMP_WORDS[${#COMP_WORDS[@]}-2]}"
+ else
+ PREVIOUS_COMMAND=""
+ fi
+
+ if _listcontains_run_tests "${EXTRA_ARG_OPTIONS}" "${PREVIOUS_COMMAND}"; then
+ COMPREPLY=()
+ _get_known_values_run_tests "${PREVIOUS_COMMAND}"
+ while IFS='' read -r LINE; do COMPREPLY+=("$LINE"); done \
+ < <(compgen -W "${_RUN_TESTS_KNOWN_VALUES}" -- "${LAST_COMMAND_PREFIX}")
+ else
+ COMPREPLY=()
+ while IFS='' read -r LINE; do COMPREPLY+=("$LINE"); done \
+ < <(compgen -W "${ALL_OPTIONS}" -- "${LAST_COMMAND_PREFIX}")
+
+ if [[ -f ${ALL_TESTS_FILE} ]]; then
+ while IFS='' read -r LINE; do COMPREPLY+=("$LINE"); done \
+ < <(compgen -W "$(cat "${ALL_TESTS_FILE}")" -- "${LAST_COMMAND_PREFIX}")
+ fi
+ fi
+}
+
+_build_options_run_tests
+
+# allow completion to contain colon (see http://tiswww.case.edu/php/chet/bash/FAQ)
+COMP_WORDBREAKS=${COMP_WORDBREAKS//:}
+
+complete -F _comp_run_tests run-tests
diff --git a/scripts/ci/_utils.sh b/scripts/ci/_utils.sh
index d13794c231eaf..a41738a112063 100644
--- a/scripts/ci/_utils.sh
+++ b/scripts/ci/_utils.sh
@@ -17,8 +17,9 @@
# under the License.
# Assume all the scripts are sourcing the _utils.sh from the scripts/ci directory
-# and MY_DIR variable is set to this directory
-AIRFLOW_SOURCES=$(cd "${MY_DIR}/../../" && pwd)
+# and MY_DIR variable is set to this directory. It can be overridden however
+
+AIRFLOW_SOURCES=${AIRFLOW_SOURCES:=$(cd "${MY_DIR}/../../" && pwd)}
export AIRFLOW_SOURCES
BUILD_CACHE_DIR="${AIRFLOW_SOURCES}/.build"
@@ -56,6 +57,22 @@ export PYTHONDONTWRITEBYTECODE=${PYTHONDONTWRITEBYTECODE:="true"}
# Default branch name for triggered builds is the one configured in hooks/_default_branch.sh
export AIRFLOW_CONTAINER_BRANCH_NAME=${AIRFLOW_CONTAINER_BRANCH_NAME:=${DEFAULT_BRANCH}}
+PYTHON_VERSION=${PYTHON_VERSION:=$(python -c \
+ 'import sys; print("%s.%s" % (sys.version_info.major, sys.version_info.minor))')}
+export PYTHON_VERSION
+
+if [[ ${PYTHON_VERSION} == 2.* ]]; then
+ echo 2>&1
+ echo 2>&1 " Warning! You have python 2.7 as default on your path"
+ echo 2>&1 " Switching to python 3"
+ echo 2>&1
+ PYTHON_VERSION=$(python3 -c \
+ 'import sys; print("%s.%s" % (sys.version_info.major, sys.version_info.minor))')
+ export PYTHON_VERSION
+fi
+
+export PYTHON_BINARY=${PYTHON_BINARY:=python${PYTHON_VERSION}}
+
#
# Sets mounting of host volumes to container for static checks
# unless AIRFLOW_MOUNT_HOST_VOLUMES_FOR_STATIC_CHECKS is not true
@@ -151,7 +168,7 @@ function check_file_md5sum {
echo "${MD5SUM}" > "${MD5SUM_FILE_NEW}"
local RET_CODE=0
if [[ ! -f "${MD5SUM_FILE}" ]]; then
- print_info "Missing md5sum for ${FILE}"
+ print_info "Missing md5sum for ${FILE#${AIRFLOW_SOURCES}} (${MD5SUM_FILE#${AIRFLOW_SOURCES}})"
RET_CODE=1
else
diff "${MD5SUM_FILE_NEW}" "${MD5SUM_FILE}" >/dev/null
@@ -319,15 +336,17 @@ function force_python_3_5() {
function confirm_image_rebuild() {
set +e
- "${MY_DIR}/../../confirm" "The image ${THE_IMAGE_TYPE} might need to be rebuild."
+ "${AIRFLOW_SOURCES}/confirm" "${ACTION} the image ${THE_IMAGE_TYPE}."
RES=$?
set -e
if [[ ${RES} == "1" ]]; then
SKIP_REBUILD="true"
+ # Assume No also to subsequent questions
+ export ASSUME_NO_TO_ALL_QUESTIONS="true"
elif [[ ${RES} == "2" ]]; then
echo >&2
echo >&2 "#############################################"
- echo >&2 " ERROR! The image require rebuilding. "
+ echo >&2 " ERROR: ${ACTION} the image stopped. "
echo >&2 "#############################################"
echo >&2
echo >&2 " You should re-run your command with REBUILD=true environment variable set"
@@ -343,7 +362,7 @@ function confirm_image_rebuild() {
echo >&2
exit 1
else
- # Assume Yes also for subsequent questions
+ # Assume Yes also to subsequent questions
export ASSUME_YES_TO_ALL_QUESTIONS="true"
fi
}
@@ -366,13 +385,19 @@ EOF
)
export AIRFLOW_VERSION
- if [[ -f "${BUILD_CACHE_DIR}/.built_${THE_IMAGE_TYPE}_${PYTHON_VERSION}" ]]; then
+ if [[ ${AIRFLOW_CONTAINER_CLEANUP_IMAGES:="false"} == "true" ]]; then
+ print_info
+ print_info "Clean up ${THE_IMAGE_TYPE}"
print_info
- print_info "Image ${THE_IMAGE_TYPE} built locally - skip force-pulling them"
+ export AIRFLOW_CONTAINER_FORCE_PULL_IMAGES="false"
+ export AIRFLOW_CONTAINER_DOCKER_BUILD_NEEDED="true"
+ elif [[ -f "${BUILD_CACHE_DIR}/.built_${THE_IMAGE_TYPE}_${PYTHON_VERSION}" ]]; then
+ print_info
+ print_info "Image ${THE_IMAGE_TYPE} built locally - skip force-pulling"
print_info
else
print_info
- print_info "Image ${THE_IMAGE_TYPE} not built locally - force pulling them first"
+ print_info "Image ${THE_IMAGE_TYPE} not built locally - force pulling"
print_info
export AIRFLOW_CONTAINER_FORCE_PULL_IMAGES="true"
export AIRFLOW_CONTAINER_DOCKER_BUILD_NEEDED="true"
@@ -382,18 +407,23 @@ EOF
check_if_docker_build_is_needed
if [[ "${AIRFLOW_CONTAINER_DOCKER_BUILD_NEEDED}" == "true" ]]; then
SKIP_REBUILD="false"
+ if [[ ${AIRFLOW_CONTAINER_CLEANUP_IMAGES} == "true" ]]; then
+ export ACTION="Cleaning"
+ else
+ export ACTION="Rebuilding"
+ fi
if [[ ${CI:=} != "true" ]]; then
confirm_image_rebuild
fi
if [[ ${SKIP_REBUILD} != "true" ]]; then
print_info
- print_info "Rebuilding image"
+ print_info "${ACTION} image: ${THE_IMAGE_TYPE}"
print_info
- # shellcheck source=../../hooks/build
+ # shellcheck source=hooks/build
./hooks/build | tee -a "${OUTPUT_LOG}"
update_all_md5_files
print_info
- print_info "Image rebuilt"
+ print_info "${ACTION} image completed: ${THE_IMAGE_TYPE}"
print_info
fi
else
@@ -406,7 +436,7 @@ EOF
#
# Rebuilds the slim image for static checks if needed. In order to speed it up, it's built without NPM
#
-function rebuild_image_if_needed_for_static_checks() {
+function rebuild_ci_slim_image_if_needed() {
export AIRFLOW_CONTAINER_SKIP_SLIM_CI_IMAGE="false"
export AIRFLOW_CONTAINER_SKIP_CI_IMAGE="true"
export AIRFLOW_CONTAINER_SKIP_CHECKLICENCE_IMAGE="true"
@@ -417,14 +447,30 @@ function rebuild_image_if_needed_for_static_checks() {
rebuild_image_if_needed
- AIRFLOW_SLIM_CI_IMAGE=$(cat "${BUILD_CACHE_DIR}/.AIRFLOW_SLIM_CI_IMAGE")
+ AIRFLOW_SLIM_CI_IMAGE=$(cat "${BUILD_CACHE_DIR}/.AIRFLOW_SLIM_CI_IMAGE") || true 2>/dev/null
export AIRFLOW_SLIM_CI_IMAGE
}
+#
+# Cleans up the CI slim image
+#
+function cleanup_ci_slim_image() {
+ export AIRFLOW_CONTAINER_SKIP_SLIM_CI_IMAGE="false"
+ export AIRFLOW_CONTAINER_SKIP_CI_IMAGE="true"
+ export AIRFLOW_CONTAINER_SKIP_CHECKLICENCE_IMAGE="true"
+ export AIRFLOW_CONTAINER_CLEANUP_IMAGES="true"
+
+ export PYTHON_VERSION=3.5 # Always use python version 3.5 for static checks
+
+ export THE_IMAGE_TYPE="SLIM_CI"
+
+ rebuild_image_if_needed
+}
+
#
# Rebuilds the image for static checks if needed.
#
-function rebuild_image_if_needed_for_tests() {
+function rebuild_ci_image_if_needed() {
export AIRFLOW_CONTAINER_SKIP_SLIM_CI_IMAGE="true"
export AIRFLOW_CONTAINER_SKIP_CHECKLICENCE_IMAGE="true"
export AIRFLOW_CONTAINER_SKIP_CI_IMAGE="false"
@@ -433,14 +479,29 @@ function rebuild_image_if_needed_for_tests() {
rebuild_image_if_needed
- AIRFLOW_CI_IMAGE=$(cat "${BUILD_CACHE_DIR}/.AIRFLOW_CI_IMAGE")
+ AIRFLOW_CI_IMAGE=$(cat "${BUILD_CACHE_DIR}/.AIRFLOW_CI_IMAGE") || true 2>/dev/null
export AIRFLOW_CI_IMAGE
}
+
+#
+# Cleans up the CI slim image
+#
+function cleanup_ci_image() {
+ export AIRFLOW_CONTAINER_SKIP_SLIM_CI_IMAGE="true"
+ export AIRFLOW_CONTAINER_SKIP_CI_IMAGE="false"
+ export AIRFLOW_CONTAINER_SKIP_CHECKLICENCE_IMAGE="true"
+ export AIRFLOW_CONTAINER_CLEANUP_IMAGES="true"
+
+ export THE_IMAGE_TYPE="CI"
+
+ rebuild_image_if_needed
+}
+
#
# Rebuilds the image for licence checks if needed.
#
-function rebuild_image_if_needed_for_checklicence() {
+function rebuild_checklicence_image_if_needed() {
export AIRFLOW_CONTAINER_SKIP_SLIM_CI_IMAGE="true"
export AIRFLOW_CONTAINER_SKIP_CHECKLICENCE_IMAGE="false"
export AIRFLOW_CONTAINER_SKIP_CI_IMAGE="true"
@@ -449,10 +510,24 @@ function rebuild_image_if_needed_for_checklicence() {
rebuild_image_if_needed
- AIRFLOW_CHECKLICENCE_IMAGE=$(cat "${BUILD_CACHE_DIR}/.AIRFLOW_CHECKLICENCE_IMAGE")
+ AIRFLOW_CHECKLICENCE_IMAGE=$(cat "${BUILD_CACHE_DIR}/.AIRFLOW_CHECKLICENCE_IMAGE") || true 2>/dev/null
export AIRFLOW_CHECKLICENCE_IMAGE
}
+#
+# Cleans up the CI slim image
+#
+function cleanup_checklicence_image() {
+ export AIRFLOW_CONTAINER_SKIP_SLIM_CI_IMAGE="true"
+ export AIRFLOW_CONTAINER_SKIP_CI_IMAGE="true"
+ export AIRFLOW_CONTAINER_SKIP_CHECKLICENCE_IMAGE="false"
+ export AIRFLOW_CONTAINER_CLEANUP_IMAGES="true"
+
+ export THE_IMAGE_TYPE="CHECKLICENCE"
+
+ rebuild_image_if_needed
+}
+
#
# Starts the script/ If VERBOSE variable is set to true, it enables verbose output of commands executed
# Also prints some useful diagnostics information at start of the script
@@ -496,7 +571,7 @@ function script_end {
function go_to_airflow_sources {
print_info
- pushd "${MY_DIR}/../../" &>/dev/null || exit 1
+ pushd "${AIRFLOW_SOURCES}" &>/dev/null || exit 1
print_info
print_info "Running in host in $(pwd)"
print_info
@@ -561,7 +636,7 @@ function run_check_license() {
--env HOST_USER_ID="$(id -ur)" \
--env HOST_GROUP_ID="$(id -gr)" \
--rm \
- "${AIRFLOW_CI_IMAGE}"
+ "${AIRFLOW_CHECKLICENCE_IMAGE}"
}
function run_mypy() {
diff --git a/scripts/ci/ci_before_install.sh b/scripts/ci/ci_before_install.sh
index 21ebf4dec4a03..19be859e9463d 100755
--- a/scripts/ci/ci_before_install.sh
+++ b/scripts/ci/ci_before_install.sh
@@ -33,11 +33,11 @@ export AIRFLOW_CONTAINER_FORCE_PULL_IMAGES="true"
docker system prune --all --force
if [[ ${TRAVIS_JOB_NAME} == "Tests"* ]]; then
- rebuild_image_if_needed_for_tests
+ rebuild_ci_image_if_needed
elif [[ ${TRAVIS_JOB_NAME} == "Check lic"* ]]; then
- rebuild_image_if_needed_for_checklicence
+ rebuild_checklicence_image_if_needed
else
- rebuild_image_if_needed_for_static_checks
+ rebuild_ci_slim_image_if_needed
fi
KUBERNETES_VERSION=${KUBERNETES_VERSION:=""}
diff --git a/scripts/ci/ci_build.sh b/scripts/ci/ci_build.sh
index 8177695b00e76..7378c6959fa36 100755
--- a/scripts/ci/ci_build.sh
+++ b/scripts/ci/ci_build.sh
@@ -32,6 +32,6 @@ force_python_3_5
script_start
-rebuild_image_if_needed_for_static_checks
+rebuild_ci_slim_image_if_needed
script_end
diff --git a/scripts/ci/ci_check_license.sh b/scripts/ci/ci_check_license.sh
index 5f7959fe95d9b..0f377493f4328 100755
--- a/scripts/ci/ci_check_license.sh
+++ b/scripts/ci/ci_check_license.sh
@@ -33,14 +33,8 @@ force_python_3_5
script_start
-rebuild_image_if_needed_for_checklicence
-
-docker run "${AIRFLOW_CONTAINER_EXTRA_DOCKER_FLAGS[@]}" -t \
- --env AIRFLOW_CI_VERBOSE="${VERBOSE}" \
- --env AIRFLOW_CI_SILENT \
- --env HOST_USER_ID="$(id -ur)" \
- --env HOST_GROUP_ID="$(id -gr)" \
- --rm \
- "${AIRFLOW_CHECKLICENCE_IMAGE}"
+rebuild_checklicence_image_if_needed
+
+run_check_license
script_end
diff --git a/scripts/ci/ci_docs.sh b/scripts/ci/ci_docs.sh
index 7a96b6e88f66d..91be390de3c43 100755
--- a/scripts/ci/ci_docs.sh
+++ b/scripts/ci/ci_docs.sh
@@ -32,7 +32,7 @@ force_python_3_5
script_start
-rebuild_image_if_needed_for_static_checks
+rebuild_ci_slim_image_if_needed
run_docs
diff --git a/scripts/ci/ci_flake8.sh b/scripts/ci/ci_flake8.sh
index 62078e6bd61f8..a8ada59956858 100755
--- a/scripts/ci/ci_flake8.sh
+++ b/scripts/ci/ci_flake8.sh
@@ -32,7 +32,7 @@ force_python_3_5
script_start
-rebuild_image_if_needed_for_static_checks
+rebuild_ci_slim_image_if_needed
run_flake8 "$@"
diff --git a/scripts/ci/ci_mypy.sh b/scripts/ci/ci_mypy.sh
index 0ed8c9ce6717e..7d77777a271c8 100755
--- a/scripts/ci/ci_mypy.sh
+++ b/scripts/ci/ci_mypy.sh
@@ -32,7 +32,7 @@ force_python_3_5
script_start
-rebuild_image_if_needed_for_static_checks
+rebuild_ci_slim_image_if_needed
run_mypy "$@"
diff --git a/scripts/ci/ci_pylint_main.sh b/scripts/ci/ci_pylint_main.sh
index 9e104a7b3b28e..e4fc4b7eb9d2d 100755
--- a/scripts/ci/ci_pylint_main.sh
+++ b/scripts/ci/ci_pylint_main.sh
@@ -32,7 +32,7 @@ force_python_3_5
script_start
-rebuild_image_if_needed_for_static_checks
+rebuild_ci_slim_image_if_needed
if [[ "${#@}" != "0" ]]; then
filter_out_files_from_pylint_todo_list "$@"
diff --git a/scripts/ci/ci_pylint_tests.sh b/scripts/ci/ci_pylint_tests.sh
index a906ae934453b..2d09973609fb9 100755
--- a/scripts/ci/ci_pylint_tests.sh
+++ b/scripts/ci/ci_pylint_tests.sh
@@ -32,7 +32,7 @@ force_python_3_5
script_start
-rebuild_image_if_needed_for_static_checks
+rebuild_ci_slim_image_if_needed
if [[ "${#@}" != "0" ]]; then
filter_out_files_from_pylint_todo_list "$@"
diff --git a/scripts/ci/ci_run_airflow_testing.sh b/scripts/ci/ci_run_airflow_testing.sh
index f6328b6ce9b46..e959031a61467 100755
--- a/scripts/ci/ci_run_airflow_testing.sh
+++ b/scripts/ci/ci_run_airflow_testing.sh
@@ -28,7 +28,7 @@ basic_sanity_checks
script_start
-rebuild_image_if_needed_for_tests
+rebuild_ci_image_if_needed
# Test environment
export BACKEND=${BACKEND:="sqlite"}
@@ -97,7 +97,7 @@ elif [[ "${ENV}" == "bare" ]]; then
run --no-deps airflow-testing /opt/airflow/scripts/ci/in_container/entrypoint_ci.sh;
else
echo >&2
- echo >&2 "ERROR! The ENV variable should be one of [docker, kubernetes, bare] and is '${ENV}'"
+ echo >&2 "ERROR: The ENV variable should be one of [docker, kubernetes, bare] and is '${ENV}'"
echo >&2
fi
set -u
diff --git a/scripts/ci/ci_run_all_static_tests.sh b/scripts/ci/ci_run_all_static_tests.sh
index fabc8d5ff77f5..8bd43929f804f 100755
--- a/scripts/ci/ci_run_all_static_tests.sh
+++ b/scripts/ci/ci_run_all_static_tests.sh
@@ -32,9 +32,9 @@ force_python_3_5
script_start
-rebuild_image_if_needed_for_static_checks
+rebuild_ci_slim_image_if_needed
-rebuild_image_if_needed_for_checklicence
+rebuild_checklicence_image_if_needed
pre-commit run --all-files --show-diff-on-failure
diff --git a/scripts/ci/ci_run_all_static_tests_except_pylint.sh b/scripts/ci/ci_run_all_static_tests_except_pylint.sh
index 276c945604e64..d4c6d8e246aa7 100755
--- a/scripts/ci/ci_run_all_static_tests_except_pylint.sh
+++ b/scripts/ci/ci_run_all_static_tests_except_pylint.sh
@@ -32,9 +32,9 @@ force_python_3_5
script_start
-rebuild_image_if_needed_for_static_checks
+rebuild_ci_slim_image_if_needed
-rebuild_image_if_needed_for_checklicence
+rebuild_checklicence_image_if_needed
SKIP=pylint pre-commit run --all-files --show-diff-on-failure
diff --git a/scripts/ci/ci_run_all_static_tests_except_pylint_licence.sh b/scripts/ci/ci_run_all_static_tests_except_pylint_licence.sh
index 81ea21c351417..e61e9e840554e 100755
--- a/scripts/ci/ci_run_all_static_tests_except_pylint_licence.sh
+++ b/scripts/ci/ci_run_all_static_tests_except_pylint_licence.sh
@@ -32,7 +32,7 @@ force_python_3_5
script_start
-rebuild_image_if_needed_for_static_checks
+rebuild_checklicence_image_if_needed
SKIP=pylint,check-apache-license pre-commit run --all-files --show-diff-on-failure
diff --git a/scripts/ci/ci_run_all_static_tests_pylint.sh b/scripts/ci/ci_run_all_static_tests_pylint.sh
index b4980b34aa98b..fbd21aeac7b86 100755
--- a/scripts/ci/ci_run_all_static_tests_pylint.sh
+++ b/scripts/ci/ci_run_all_static_tests_pylint.sh
@@ -32,7 +32,7 @@ force_python_3_5
script_start
-rebuild_image_if_needed_for_static_checks
+rebuild_ci_slim_image_if_needed
pre-commit run pylint --all-files --show-diff-on-failure
diff --git a/scripts/ci/docker-compose-local.yml b/scripts/ci/docker-compose-local.yml
index 053b7b044aca4..01be4c7b65b4c 100644
--- a/scripts/ci/docker-compose-local.yml
+++ b/scripts/ci/docker-compose-local.yml
@@ -17,6 +17,12 @@
---
version: "2.2"
services:
+ mysql:
+ ports:
+ - "${MYSQL_HOST_PORT}:3306"
+ postgres:
+ ports:
+ - "${POSTGRES_HOST_PORT}:5432"
airflow-testing:
# We need to mount files an directories individually because some files
# such apache_airflow.egg-info should not be mounted from host
diff --git a/scripts/ci/in_container/entrypoint_ci.sh b/scripts/ci/in_container/entrypoint_ci.sh
index 836ee7d84a984..c2214ee653df5 100755
--- a/scripts/ci/in_container/entrypoint_ci.sh
+++ b/scripts/ci/in_container/entrypoint_ci.sh
@@ -55,6 +55,29 @@ echo "Airflow sources: ${AIRFLOW_SOURCES}"
echo "Airflow core SQL connection: ${AIRFLOW__CORE__SQL_ALCHEMY_CONN:=}"
echo
+CLEAN_FILES=${CLEAN_FILES:=false}
+
+if [[ ! -d "${AIRFLOW_SOURCES}/airflow/www/node_modules" && "${CLEAN_FILES}" == "false" ]]; then
+ echo
+ echo "Installing NPM modules as they are not yet installed (sources are mounted from the host)"
+ echo
+ pushd "${AIRFLOW_SOURCES}/airflow/www/"
+ npm ci
+ echo
+ popd
+fi
+if [[ ! -d "${AIRFLOW_SOURCES}/airflow/www/static/dist" && ${CLEAN_FILES} == "false" ]]; then
+ pushd "${AIRFLOW_SOURCES}/airflow/www/"
+ echo
+ echo "Building production version of javascript files (sources are mounted from the host)"
+ echo
+ echo
+ npm run prod
+ echo
+ echo
+ popd
+fi
+
ARGS=( "$@" )
RUN_TESTS=${RUN_TESTS:="true"}
@@ -167,7 +190,7 @@ if [[ "${ENV}" == "docker" ]]; then
if [[ ${RES_1} != 0 || ${RES_2} != 0 || ${RES_3} != 0 ]]; then
echo
- echo "ERROR! There was a problem communicating with kerberos"
+ echo "ERROR: There was a problem communicating with kerberos"
echo "Errors produced by kadmin commands are in : ${AIRFLOW_HOME}/logs/kadmin*.log"
echo
echo "Action! Please restart the environment!"
diff --git a/scripts/ci/local_ci_build.sh b/scripts/ci/local_ci_build.sh
index 34b4cb9b54ec7..1fb2bae1dbcdf 100755
--- a/scripts/ci/local_ci_build.sh
+++ b/scripts/ci/local_ci_build.sh
@@ -30,10 +30,10 @@ basic_sanity_checks
script_start
-rebuild_image_if_needed_for_tests
+rebuild_ci_image_if_needed
-rebuild_image_if_needed_for_static_checks
+rebuild_ci_slim_image_if_needed
-rebuild_image_if_needed_for_checklicence
+rebuild_checklicence_image_if_needed
script_end
diff --git a/scripts/ci/local_ci_cleanup.sh b/scripts/ci/local_ci_cleanup.sh
new file mode 100755
index 0000000000000..388731b9619bd
--- /dev/null
+++ b/scripts/ci/local_ci_cleanup.sh
@@ -0,0 +1,39 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+#
+# Pulls and rebuilds the full CI image used for testing
+#
+set -euo pipefail
+
+MY_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+# shellcheck source=scripts/ci/_utils.sh
+. "${MY_DIR}/_utils.sh"
+
+basic_sanity_checks
+
+script_start
+
+cleanup_ci_image
+
+cleanup_ci_slim_image
+
+cleanup_checklicence_image
+
+script_end
diff --git a/scripts/ci/local_ci_pull_and_build.sh b/scripts/ci/local_ci_pull_and_build.sh
index 33840dc65657f..363a2513ed06f 100755
--- a/scripts/ci/local_ci_pull_and_build.sh
+++ b/scripts/ci/local_ci_pull_and_build.sh
@@ -34,10 +34,10 @@ export AIRFLOW_CONTAINER_FORCE_PULL_IMAGES="true"
export AIRFLOW_CONTAINER_SKIP_LATEST_PYTHON_PULL="false"
export ASSUME_YES_TO_ALL_QUESTIONS="true"
-rebuild_image_if_needed_for_tests
+rebuild_ci_image_if_needed
-rebuild_image_if_needed_for_static_checks
+rebuild_ci_slim_image_if_needed
-rebuild_image_if_needed_for_checklicence
+rebuild_checklicence_image_if_needed
script_end
diff --git a/scripts/ci/local_ci_run_airflow_testing.sh b/scripts/ci/local_ci_run_airflow_testing.sh
index f5f18c4b5e6a5..c71d3c857d13e 100755
--- a/scripts/ci/local_ci_run_airflow_testing.sh
+++ b/scripts/ci/local_ci_run_airflow_testing.sh
@@ -21,7 +21,6 @@ MY_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
export RUN_TESTS="true"
export MOUNT_LOCAL_SOURCES="true"
-export PYTHON_VERSION=${PYTHON_VERSION:="3.6"}
export VERBOSE=${VERBOSE:="false"}
# shellcheck source=./ci_run_airflow_testing.sh
diff --git a/tests/test_utils/get_all_tests.py b/tests/test_utils/get_all_tests.py
new file mode 100644
index 0000000000000..a4347a2c330b6
--- /dev/null
+++ b/tests/test_utils/get_all_tests.py
@@ -0,0 +1,74 @@
+#!/usr/bin/env python
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+Gets all tests cases from xunit file.
+"""
+import sys
+from xml.etree import ElementTree
+
+
+def last_replace(s, old, new, number_of_occurrences):
+ """
+ Replaces last n occurrences of the old string with the new one within the string provided
+ :param s: string to replace occurrences with
+ :param old: old string
+ :param new: new string
+ :param number_of_occurrences: how many occurrences should be replaced
+ :return: string with last n occurrences replaced
+ """
+ list_of_components = s.rsplit(old, number_of_occurrences)
+ return new.join(list_of_components)
+
+
+def print_all_cases(xunit_test_file_path):
+ """
+ Prints all test cases read from the xunit test file
+ :param xunit_test_file_path: path of the xunit file
+ :return: None
+ """
+ with open(xunit_test_file_path, "r") as file:
+ text = file.read()
+
+ root = ElementTree.fromstring(text)
+
+ test_cases = root.findall('.//testcase')
+ classes = set()
+ modules = set()
+
+ for test_case in test_cases:
+ the_module = '.'.join(test_case.get('classname').split('.')[:-1])
+ the_class = last_replace(test_case.get('classname'), ".", ":", 1)
+ test_method = test_case.get('name')
+ modules.add(the_module)
+ classes.add(the_class)
+ print(the_class + "." + test_method)
+
+ for the_class in classes:
+ print(the_class)
+
+ for the_module in modules:
+ print(the_module)
+
+
+if __name__ == '__main__':
+ if len(sys.argv) < 2:
+ print("Please provide name of xml unit file as first parameter")
+ exit(0)
+ file_name = sys.argv[1]
+ print_all_cases(file_name)