diff --git a/.github/actions/checkout_target_commit/action.yml b/.github/actions/checkout_target_commit/action.yml deleted file mode 100644 index e95e8b86254a0..0000000000000 --- a/.github/actions/checkout_target_commit/action.yml +++ /dev/null @@ -1,81 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# ---- -name: 'Checkout target commit' -description: > - Checks out target commit with the exception of .github scripts directories that come from the target branch -inputs: - target-commit-sha: - description: 'SHA of the target commit to checkout' - required: true - pull-request-target: - description: 'Whether the workflow is a pull request target workflow' - required: true - is-committer-build: - description: 'Whether the build is done by a committer' - required: true -runs: - using: "composite" - steps: - - name: "Checkout target commit" - uses: actions/checkout@v4 - with: - ref: ${{ inputs.target-commit-sha }} - persist-credentials: false - #################################################################################################### - # BE VERY CAREFUL HERE! THIS LINE AND THE END OF THE WARNING. IN PULL REQUEST TARGET WORKFLOW - # WE CHECK OUT THE TARGET COMMIT ABOVE TO BE ABLE TO BUILD THE IMAGE FROM SOURCES FROM THE - # INCOMING PR, RATHER THAN FROM TARGET BRANCH. THIS IS A SECURITY RISK, BECAUSE THE PR - # CAN CONTAIN ANY CODE AND WE EXECUTE IT HERE. THEREFORE, WE NEED TO BE VERY CAREFUL WHAT WE - # DO HERE. WE SHOULD NOT EXECUTE ANY CODE THAT COMES FROM THE PR. WE SHOULD NOT RUN ANY BREEZE - # COMMAND NOR SCRIPTS NOR COMPOSITE ACTIONS. WE SHOULD ONLY RUN CODE THAT IS EMBEDDED DIRECTLY IN - # THIS WORKFLOW - BECAUSE THIS IS THE ONLY CODE THAT WE CAN TRUST. - #################################################################################################### - - name: Checkout target branch to 'target-airflow' folder to use ci/scripts and breeze from there. - uses: actions/checkout@v4 - with: - path: "target-airflow" - ref: ${{ github.base_ref }} - persist-credentials: false - if: inputs.pull-request-target == 'true' && inputs.is-committer-build != 'true' - - name: > - Replace "scripts/ci", "dev", ".github/actions" and ".github/workflows" with the target branch - so that the those directories are not coming from the PR - shell: bash - run: | - echo - echo -e "\033[33m Replace scripts, dev, actions with target branch for non-committer builds!\033[0m" - echo - rm -rfv "scripts/ci" - rm -rfv "dev" - rm -rfv ".github/actions" - rm -rfv ".github/workflows" - rm -v ".dockerignore" || true - mv -v "target-airflow/scripts/ci" "scripts" - mv -v "target-airflow/dev" "." - mv -v "target-airflow/.github/actions" "target-airflow/.github/workflows" ".github" - mv -v "target-airflow/.dockerignore" ".dockerignore" || true - if: inputs.pull-request-target == 'true' && inputs.is-committer-build != 'true' - #################################################################################################### - # AFTER IT'S SAFE. THE `dev`, `scripts/ci` AND `.github/actions` and `.dockerignore` ARE NOW COMING - # FROM THE BASE_REF - WHICH IS THE TARGET BRANCH OF THE PR. WE CAN TRUST THAT THOSE SCRIPTS ARE - # SAFE TO RUN AND CODE AVAILABLE IN THE DOCKER BUILD PHASE IS CONTROLLED BY THE `.dockerignore`. - # ALL THE REST OF THE CODE COMES FROM THE PR, AND FOR EXAMPLE THE CODE IN THE `Dockerfile.ci` CAN - # BE RUN SAFELY AS PART OF DOCKER BUILD. BECAUSE IT RUNS INSIDE THE DOCKER CONTAINER AND IT IS - # ISOLATED FROM THE RUNNER. - #################################################################################################### diff --git a/.github/actions/prepare_all_ci_images/action.yml b/.github/actions/prepare_all_ci_images/action.yml new file mode 100644 index 0000000000000..893c1094295ee --- /dev/null +++ b/.github/actions/prepare_all_ci_images/action.yml @@ -0,0 +1,94 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +--- +name: 'Prepare all images' +description: 'Recreates current python image from artifacts' +inputs: + pull-image-type: + description: 'Which image type to prepare' + default: "CI" + python-versions-list-as-string: + description: 'Stringified array of all Python versions to test - separated by spaces.' + required: true + platform: + description: 'Platform for the build - linux/amd64 or linux/arm64' + required: true +outputs: + host-python-version: + description: Python version used in host + value: ${{ steps.breeze.outputs.host-python-version }} +runs: + using: "composite" + steps: + - name: "Cleanup docker" + run: ./scripts/ci/cleanup_docker.sh + shell: bash + # TODO: Currently we cannot loop through the list of python versions and have dynamic list of + # tasks. Instead we hardcode all possible python versions and they - but + # this should be implemented in stash action as list of keys to download + - name: "Restore CI docker images ${{ inputs.platform }}-3.8" + uses: ./.github/actions/prepare_single_image + with: + pull-image-type: ${{ inputs.pull-image-type }} + platform: ${{ inputs.platform }} + python: "3.8" + python-versions-list-as-string: ${{ inputs.python-versions-list-as-string }} + - name: "Restore CI docker images ${{ inputs.platform }}-3.9" + uses: ./.github/actions/prepare_single_image + with: + pull-image-type: ${{ inputs.pull-image-type }} + platform: ${{ inputs.platform }} + python: "3.9" + python-versions-list-as-string: ${{ inputs.python-versions-list-as-string }} + - name: "Restore CI docker images ${{ inputs.platform }}-3.10" + uses: ./.github/actions/prepare_single_image + with: + pull-image-type: ${{ inputs.pull-image-type }} + platform: ${{ inputs.platform }} + python: "3.10" + python-versions-list-as-string: ${{ inputs.python-versions-list-as-string }} + - name: "Restore CI docker images ${{ inputs.platform }}-3.11" + uses: ./.github/actions/prepare_single_image + with: + pull-image-type: ${{ inputs.pull-image-type }} + platform: ${{ inputs.platform }} + python: "3.11" + python-versions-list-as-string: ${{ inputs.python-versions-list-as-string }} + - name: "Restore CI docker images ${{ inputs.platform }}-3.12" + uses: ./.github/actions/prepare_single_image + with: + pull-image-type: ${{ inputs.pull-image-type }} + platform: ${{ inputs.platform }} + python: "3.12" + python-versions-list-as-string: ${{ inputs.python-versions-list-as-string }} + - name: "Load CI image ${{ inputs.platform }}:${{ inputs.python-versions-list-as-string }}" + run: | + for PYTHON in ${{ inputs.python-versions-list-as-string }}; do + breeze ci-image load --platform ${{ inputs.platform }} --python ${PYTHON} + rm -rf /tmp/-*${PYTHON}.tar + done + shell: bash + if: inputs.pull-image-type == 'CI' + - name: "Load PROD image ${{ inputs.platform }}${{ env.PYTHON_MAJOR_MINOR_VERSION }}" + run: | + for PYTHON in ${{ inputs.python-versions-list-as-string }}; do + breeze ci-image load --platform ${{ inputs.platform }} --python ${PYTHON} + rm -rf /tmp/-*${PYTHON}.tar + done + shell: bash + if: inputs.pull-image-type == 'PROD' diff --git a/.github/actions/prepare_breeze_and_image/action.yml b/.github/actions/prepare_breeze_and_image/action.yml index 41aa17092d589..ca06266c6ef90 100644 --- a/.github/actions/prepare_breeze_and_image/action.yml +++ b/.github/actions/prepare_breeze_and_image/action.yml @@ -16,12 +16,15 @@ # under the License. # --- -name: 'Prepare breeze && current python image' -description: 'Installs breeze and pulls current python image' +name: 'Prepare breeze && current image (CI or PROD)' +description: 'Installs breeze and recreates current python image from artifact' inputs: pull-image-type: - description: 'Which image to pull' - default: CI + description: 'Which image type to prepare' + default: "CI" + platform: + description: 'Platform for the build - linux/amd64 or linux/arm64' + required: true outputs: host-python-version: description: Python version used in host @@ -29,17 +32,29 @@ outputs: runs: using: "composite" steps: + - name: "Cleanup docker" + run: ./scripts/ci/cleanup_docker.sh + shell: bash - name: "Install Breeze" uses: ./.github/actions/breeze id: breeze - - name: Login to ghcr.io - shell: bash - run: echo "${{ env.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin - - name: Pull CI image ${{ env.PYTHON_MAJOR_MINOR_VERSION }}:${{ env.IMAGE_TAG }} + - name: "Restore CI docker image ${{ inputs.platform }}-${{ env.PYTHON_MAJOR_MINOR_VERSION }}" + uses: apache/infrastructure-actions/stash/restore@c94b890bbedc2fc61466d28e6bd9966bc6c6643c + with: + key: "ci-image-save-${{ inputs.platform }}-${{ env.PYTHON_MAJOR_MINOR_VERSION }}" + path: "/tmp/" + if: inputs.pull-image-type == 'CI' + - name: "Load CI image ${{ inputs.platform }}${{ env.PYTHON_MAJOR_MINOR_VERSION }}" + run: breeze ci-image load --platform ${{ inputs.platform }} shell: bash - run: breeze ci-image pull --tag-as-latest if: inputs.pull-image-type == 'CI' - - name: Pull PROD image ${{ env.PYTHON_MAJOR_MINOR_VERSION }}:${{ env.IMAGE_TAG }} + - name: "Restore PROD docker image ${{ inputs.platform }}-${{ env.PYTHON_MAJOR_MINOR_VERSION }}" + uses: apache/infrastructure-actions/stash/restore@c94b890bbedc2fc61466d28e6bd9966bc6c6643c + with: + key: "prod-image-save-${{ inputs.platform }}-${{ env.PYTHON_MAJOR_MINOR_VERSION }}" + path: "/tmp/" + if: inputs.pull-image-type == 'PROD' + - name: "Load PROD image ${{ inputs.platform }}${{ env.PYTHON_MAJOR_MINOR_VERSION }}" + run: breeze prod-image load --platform ${{ inputs.platform }} shell: bash - run: breeze prod-image pull --tag-as-latest if: inputs.pull-image-type == 'PROD' diff --git a/.github/actions/prepare_single_image/action.yml b/.github/actions/prepare_single_image/action.yml new file mode 100644 index 0000000000000..011564b8e4ebc --- /dev/null +++ b/.github/actions/prepare_single_image/action.yml @@ -0,0 +1,51 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +--- +name: 'Prepare single images' +description: 'Recreates current python image from artifacts' +inputs: + python: + description: 'Python version for image to prepare' + required: true + python-versions-list-as-string: + description: 'Stringified array of all Python versions to prepare - separated by spaces.' + required: true + platform: + description: 'Platform for the build - linux/amd64 or linux/arm64' + required: true +outputs: + host-python-version: + description: Python version used in host + value: ${{ steps.breeze.outputs.host-python-version }} +runs: + using: "composite" + steps: + - name: "Restore CI docker images ${{ inputs.platform }}-${{ inputs.python }}" + uses: apache/infrastructure-actions/stash/restore@c94b890bbedc2fc61466d28e6bd9966bc6c6643c + with: + key: "ci-image-save-${{ inputs.platform }}-${{ inputs.python }}" + path: "/tmp/" + if: contains(inputs.python-versions-list-as-string, inputs.python) + - name: "Load CI image ${{ inputs.platform }}${{ env.PYTHON_MAJOR_MINOR_VERSION }}" + run: breeze ci-image load --platform "${{ inputs.platform }}" --python "${{ inputs.python }}" + shell: bash + if: contains(inputs.python-versions-list-as-string, inputs.python) + - name: "Remove saved image ${{ inputs.platform }}-${{ inputs.python }}" + run: rm -f /tmp/ci-image-save-*-${{ inputs.python }}* + shell: bash + if: contains(inputs.python-versions-list-as-string, inputs.python) diff --git a/.github/workflows/additional-ci-image-checks.yml b/.github/workflows/additional-ci-image-checks.yml index 8a3b46e70d37d..40196a6e04296 100644 --- a/.github/workflows/additional-ci-image-checks.yml +++ b/.github/workflows/additional-ci-image-checks.yml @@ -32,10 +32,6 @@ on: # yamllint disable-line rule:truthy description: "The array of labels (in json form) determining self-hosted runners." required: true type: string - image-tag: - description: "Tag to set for the image" - required: true - type: string python-versions: description: "The list of python versions (stringified JSON array) to run the tests on." required: true @@ -103,8 +99,6 @@ jobs: contents: read # This write is only given here for `push` events from "apache/airflow" repo. It is not given for PRs # from forks. This is to prevent malicious PRs from creating images in the "apache/airflow" repo. - # For regular build for PRS this "build-prod-images" workflow will be skipped anyway by the - # "in-workflow-build" condition packages: write secrets: inherit with: @@ -159,7 +153,7 @@ jobs: # # There is no point in running this one in "canary" run, because the above step is doing the # # same build anyway. # build-ci-arm-images: -# name: Build CI ARM images (in-workflow) +# name: Build CI ARM images # uses: ./.github/workflows/ci-image-build.yml # permissions: # contents: read @@ -169,7 +163,6 @@ jobs: # push-image: "false" # runs-on-as-json-public: ${{ inputs.runs-on-as-json-public }} # runs-on-as-json-self-hosted: ${{ inputs.runs-on-as-json-self-hosted }} -# image-tag: ${{ inputs.image-tag }} # python-versions: ${{ inputs.python-versions }} # platform: "linux/arm64" # branch: ${{ inputs.branch }} diff --git a/.github/workflows/additional-prod-image-tests.yml b/.github/workflows/additional-prod-image-tests.yml index 5ffd2001e0e26..b6d9e25a2b802 100644 --- a/.github/workflows/additional-prod-image-tests.yml +++ b/.github/workflows/additional-prod-image-tests.yml @@ -32,10 +32,6 @@ on: # yamllint disable-line rule:truthy description: "Branch used to construct constraints URL from." required: true type: string - image-tag: - description: "Tag to set for the image" - required: true - type: string upgrade-to-newer-dependencies: description: "Whether to upgrade to newer dependencies (true/false)" required: true @@ -70,7 +66,6 @@ jobs: default-python-version: ${{ inputs.default-python-version }} branch: ${{ inputs.default-branch }} use-uv: "false" - image-tag: ${{ inputs.image-tag }} build-provider-packages: ${{ inputs.default-branch == 'main' }} upgrade-to-newer-dependencies: ${{ inputs.upgrade-to-newer-dependencies }} chicken-egg-providers: ${{ inputs.chicken-egg-providers }} @@ -88,7 +83,6 @@ jobs: default-python-version: ${{ inputs.default-python-version }} branch: ${{ inputs.default-branch }} use-uv: "false" - image-tag: ${{ inputs.image-tag }} build-provider-packages: ${{ inputs.default-branch == 'main' }} upgrade-to-newer-dependencies: ${{ inputs.upgrade-to-newer-dependencies }} chicken-egg-providers: ${{ inputs.chicken-egg-providers }} @@ -122,11 +116,10 @@ jobs: - name: Login to ghcr.io shell: bash run: echo "${{ env.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin - - name: Pull PROD image ${{ inputs.default-python-version}}:${{ inputs.image-tag }} - run: breeze prod-image pull --tag-as-latest + - name: Pull PROD image ${{ inputs.default-python-version}} + run: breeze prod-image pull env: PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" - IMAGE_TAG: "${{ inputs.image-tag }}" - name: "Setup python" uses: actions/setup-python@v5 with: @@ -138,7 +131,7 @@ jobs: cd ./docker_tests && \ python -m pip install -r requirements.txt && \ TEST_IMAGE=\"ghcr.io/${{ github.repository }}/${{ inputs.default-branch }}\ - /prod/python${{ inputs.default-python-version }}:${{ inputs.image-tag }}\" \ + /prod/python${{ inputs.default-python-version }}\" \ python -m pytest test_examples_of_prod_image_building.py -n auto --color=yes" test-docker-compose-quick-start: @@ -146,7 +139,6 @@ jobs: name: "Docker-compose quick start with PROD image verifying" runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} env: - IMAGE_TAG: "${{ inputs.image-tag }}" PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -161,14 +153,10 @@ jobs: with: fetch-depth: 2 persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - - name: "Install Breeze" - uses: ./.github/actions/breeze - - name: Login to ghcr.io - shell: bash - run: echo "${{ env.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin - - name: "Pull image ${{ inputs.default-python-version}}:${{ inputs.image-tag }}" - run: breeze prod-image pull --tag-as-latest + - name: "Prepare breeze & CI image: ${{ inputs.default-python-version}}" + uses: ./.github/actions/prepare_breeze_and_image + with: + platform: "linux/amd64" + id: breeze - name: "Test docker-compose quick start" run: breeze testing docker-compose-tests diff --git a/.github/workflows/basic-tests.yml b/.github/workflows/basic-tests.yml index c8ba85969f5e3..47f80f05b7ac7 100644 --- a/.github/workflows/basic-tests.yml +++ b/.github/workflows/basic-tests.yml @@ -288,7 +288,6 @@ jobs: runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} env: PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" - IMAGE_TAG: ${{ inputs.image-tag }} GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml deleted file mode 100644 index 9135dcb9d9e94..0000000000000 --- a/.github/workflows/build-images.yml +++ /dev/null @@ -1,264 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# ---- -name: Build Images -run-name: > - Build images for ${{ github.event.pull_request.title }} ${{ github.event.pull_request._links.html.href }} -on: # yamllint disable-line rule:truthy - pull_request_target: - branches: - - main - - v2-10-stable - - v2-10-test - - providers-[a-z]+-?[a-z]*/v[0-9]+-[0-9]+ -permissions: - # all other permissions are set to none - contents: read - pull-requests: read - packages: read -env: - ANSWER: "yes" - # You can override CONSTRAINTS_GITHUB_REPOSITORY by setting secret in your repo but by default the - # Airflow one is going to be used - CONSTRAINTS_GITHUB_REPOSITORY: >- - ${{ secrets.CONSTRAINTS_GITHUB_REPOSITORY != '' && - secrets.CONSTRAINTS_GITHUB_REPOSITORY || 'apache/airflow' }} - # This token is WRITE one - pull_request_target type of events always have the WRITE token - DB_RESET: "true" - GITHUB_REPOSITORY: ${{ github.repository }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - GITHUB_USERNAME: ${{ github.actor }} - IMAGE_TAG: "${{ github.event.pull_request.head.sha || github.sha }}" - INCLUDE_SUCCESS_OUTPUTS: "true" - USE_SUDO: "true" - VERBOSE: "true" - -concurrency: - group: build-${{ github.event.pull_request.number || github.ref }} - cancel-in-progress: true - -jobs: - build-info: - timeout-minutes: 10 - name: Build Info - # At build-info stage we do not yet have outputs so we need to hard-code the runs-on to public runners - runs-on: ["ubuntu-22.04"] - env: - TARGET_BRANCH: ${{ github.event.pull_request.base.ref }} - outputs: - image-tag: ${{ github.event.pull_request.head.sha || github.sha }} - python-versions: ${{ steps.selective-checks.outputs.python-versions }} - python-versions-list-as-string: ${{ steps.selective-checks.outputs.python-versions-list-as-string }} - default-python-version: ${{ steps.selective-checks.outputs.default-python-version }} - upgrade-to-newer-dependencies: ${{ steps.selective-checks.outputs.upgrade-to-newer-dependencies }} - run-tests: ${{ steps.selective-checks.outputs.run-tests }} - run-kubernetes-tests: ${{ steps.selective-checks.outputs.run-kubernetes-tests }} - ci-image-build: ${{ steps.selective-checks.outputs.ci-image-build }} - prod-image-build: ${{ steps.selective-checks.outputs.prod-image-build }} - docker-cache: ${{ steps.selective-checks.outputs.docker-cache }} - default-branch: ${{ steps.selective-checks.outputs.default-branch }} - disable-airflow-repo-cache: ${{ steps.selective-checks.outputs.disable-airflow-repo-cache }} - force-pip: ${{ steps.selective-checks.outputs.force-pip }} - constraints-branch: ${{ steps.selective-checks.outputs.default-constraints-branch }} - runs-on-as-json-default: ${{ steps.selective-checks.outputs.runs-on-as-json-default }} - runs-on-as-json-public: ${{ steps.selective-checks.outputs.runs-on-as-json-public }} - runs-on-as-json-self-hosted: ${{ steps.selective-checks.outputs.runs-on-as-json-self-hosted }} - is-self-hosted-runner: ${{ steps.selective-checks.outputs.is-self-hosted-runner }} - is-committer-build: ${{ steps.selective-checks.outputs.is-committer-build }} - is-airflow-runner: ${{ steps.selective-checks.outputs.is-airflow-runner }} - is-amd-runner: ${{ steps.selective-checks.outputs.is-amd-runner }} - is-arm-runner: ${{ steps.selective-checks.outputs.is-arm-runner }} - is-vm-runner: ${{ steps.selective-checks.outputs.is-vm-runner }} - is-k8s-runner: ${{ steps.selective-checks.outputs.is-k8s-runner }} - chicken-egg-providers: ${{ steps.selective-checks.outputs.chicken-egg-providers }} - target-commit-sha: "${{steps.discover-pr-merge-commit.outputs.target-commit-sha || - github.event.pull_request.head.sha || - github.sha - }}" - if: github.repository == 'apache/airflow' - steps: - - name: Cleanup repo - shell: bash - run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - - name: Discover PR merge commit - id: discover-pr-merge-commit - run: | - # Sometimes target-commit-sha cannot be - TARGET_COMMIT_SHA="$(gh api '${{ github.event.pull_request.url }}' --jq .merge_commit_sha)" - if [[ ${TARGET_COMMIT_SHA} == "" ]]; then - # Sometimes retrieving the merge commit SHA from PR fails. We retry it once. Otherwise we - # fall-back to github.event.pull_request.head.sha - echo - echo "Could not retrieve merge commit SHA from PR, waiting for 3 seconds and retrying." - echo - sleep 3 - TARGET_COMMIT_SHA="$(gh api '${{ github.event.pull_request.url }}' --jq .merge_commit_sha)" - if [[ ${TARGET_COMMIT_SHA} == "" ]]; then - echo - echo "Could not retrieve merge commit SHA from PR, falling back to PR head SHA." - echo - TARGET_COMMIT_SHA="${{ github.event.pull_request.head.sha }}" - fi - fi - echo "TARGET_COMMIT_SHA=${TARGET_COMMIT_SHA}" - echo "TARGET_COMMIT_SHA=${TARGET_COMMIT_SHA}" >> ${GITHUB_ENV} - echo "target-commit-sha=${TARGET_COMMIT_SHA}" >> ${GITHUB_OUTPUT} - if: github.event_name == 'pull_request_target' - # The labels in the event aren't updated when re-triggering the job, So lets hit the API to get - # up-to-date values - - name: Get latest PR labels - id: get-latest-pr-labels - run: | - echo -n "pull-request-labels=" >> ${GITHUB_OUTPUT} - gh api graphql --paginate -F node_id=${{github.event.pull_request.node_id}} -f query=' - query($node_id: ID!, $endCursor: String) { - node(id:$node_id) { - ... on PullRequest { - labels(first: 100, after: $endCursor) { - nodes { name } - pageInfo { hasNextPage endCursor } - } - } - } - }' --jq '.data.node.labels.nodes[]' | jq --slurp -c '[.[].name]' >> ${GITHUB_OUTPUT} - if: github.event_name == 'pull_request_target' - - uses: actions/checkout@v4 - with: - ref: ${{ env.TARGET_COMMIT_SHA }} - persist-credentials: false - fetch-depth: 2 - #################################################################################################### - # WE ONLY DO THAT CHECKOUT ABOVE TO RETRIEVE THE TARGET COMMIT AND IT'S PARENT. DO NOT RUN ANY CODE - # RIGHT AFTER THAT AS WE ARE GOING TO RESTORE THE TARGET BRANCH CODE IN THE NEXT STEP. - #################################################################################################### - - name: Checkout target branch to use ci/scripts and breeze from there. - uses: actions/checkout@v4 - with: - ref: ${{ github.base_ref }} - persist-credentials: false - #################################################################################################### - # HERE EVERYTHING IS PERFECTLY SAFE TO RUN. AT THIS POINT WE HAVE THE TARGET BRANCH CHECKED OUT - # AND WE CAN RUN ANY CODE FROM IT. WE CAN RUN BREEZE COMMANDS, WE CAN RUN SCRIPTS, WE CAN RUN - # COMPOSITE ACTIONS. WE CAN RUN ANYTHING THAT IS IN THE TARGET BRANCH AND THERE IS NO RISK THAT - # CODE WILL BE RUN FROM THE PR. - #################################################################################################### - - name: Cleanup docker - run: ./scripts/ci/cleanup_docker.sh - - name: Setup python - uses: actions/setup-python@v5 - with: - python-version: "3.9" - - name: Install Breeze - uses: ./.github/actions/breeze - #################################################################################################### - # WE RUN SELECTIVE CHECKS HERE USING THE TARGET COMMIT AND ITS PARENT TO BE ABLE TO COMPARE THEM - # AND SEE WHAT HAS CHANGED IN THE PR. THE CODE IS STILL RUN FROM THE TARGET BRANCH, SO IT IS SAFE - # TO RUN IT, WE ONLY PASS TARGET_COMMIT_SHA SO THAT SELECTIVE CHECKS CAN SEE WHAT'S COMING IN THE PR - #################################################################################################### - - name: Selective checks - id: selective-checks - env: - PR_LABELS: "${{ steps.get-latest-pr-labels.outputs.pull-request-labels }}" - COMMIT_REF: "${{ env.TARGET_COMMIT_SHA }}" - VERBOSE: "false" - AIRFLOW_SOURCES_ROOT: "${{ github.workspace }}" - run: breeze ci selective-check 2>> ${GITHUB_OUTPUT} - - name: env - run: printenv - env: - PR_LABELS: ${{ steps.get-latest-pr-labels.outputs.pull-request-labels }} - GITHUB_CONTEXT: ${{ toJson(github) }} - - - build-ci-images: - name: Build CI images - permissions: - contents: read - packages: write - secrets: inherit - needs: [build-info] - uses: ./.github/workflows/ci-image-build.yml - # Only run this it if the PR comes from fork, otherwise build will be done "in-PR-workflow" - if: | - needs.build-info.outputs.ci-image-build == 'true' && - github.event.pull_request.head.repo.full_name != 'apache/airflow' - with: - runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} - runs-on-as-json-self-hosted: ${{ needs.build-info.outputs.runs-on-as-json-self-hosted }} - do-build: ${{ needs.build-info.outputs.ci-image-build }} - target-commit-sha: ${{ needs.build-info.outputs.target-commit-sha }} - pull-request-target: "true" - is-committer-build: ${{ needs.build-info.outputs.is-committer-build }} - push-image: "true" - use-uv: ${{ needs.build-info.outputs.force-pip == 'true' && 'false' || 'true' }} - image-tag: ${{ needs.build-info.outputs.image-tag }} - platform: "linux/amd64" - python-versions: ${{ needs.build-info.outputs.python-versions }} - branch: ${{ needs.build-info.outputs.default-branch }} - constraints-branch: ${{ needs.build-info.outputs.constraints-branch }} - upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} - docker-cache: ${{ needs.build-info.outputs.docker-cache }} - disable-airflow-repo-cache: ${{ needs.build-info.outputs.disable-airflow-repo-cache }} - - - generate-constraints: - name: Generate constraints - needs: [build-info, build-ci-images] - uses: ./.github/workflows/generate-constraints.yml - with: - runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} - python-versions-list-as-string: ${{ needs.build-info.outputs.python-versions-list-as-string }} - # For regular PRs we do not need "no providers" constraints - they are only needed in canary builds - generate-no-providers-constraints: "false" - image-tag: ${{ needs.build-info.outputs.image-tag }} - chicken-egg-providers: ${{ needs.build-info.outputs.chicken-egg-providers }} - debug-resources: ${{ needs.build-info.outputs.debug-resources }} - - build-prod-images: - name: Build PROD images - permissions: - contents: read - packages: write - secrets: inherit - needs: [build-info, generate-constraints] - uses: ./.github/workflows/prod-image-build.yml - # Only run this it if the PR comes from fork, otherwise build will be done "in-PR-workflow" - if: | - needs.build-info.outputs.prod-image-build == 'true' && - github.event.pull_request.head.repo.full_name != 'apache/airflow' - with: - runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} - build-type: "Regular" - do-build: ${{ needs.build-info.outputs.ci-image-build }} - upload-package-artifact: "true" - target-commit-sha: ${{ needs.build-info.outputs.target-commit-sha }} - pull-request-target: "true" - is-committer-build: ${{ needs.build-info.outputs.is-committer-build }} - push-image: "true" - use-uv: ${{ needs.build-info.outputs.force-pip == 'true' && 'false' || 'true' }} - image-tag: ${{ needs.build-info.outputs.image-tag }} - platform: linux/amd64 - python-versions: ${{ needs.build-info.outputs.python-versions }} - default-python-version: ${{ needs.build-info.outputs.default-python-version }} - branch: ${{ needs.build-info.outputs.default-branch }} - constraints-branch: ${{ needs.build-info.outputs.constraints-branch }} - build-provider-packages: ${{ needs.build-info.outputs.default-branch == 'main' }} - upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} - chicken-egg-providers: ${{ needs.build-info.outputs.chicken-egg-providers }} - docker-cache: ${{ needs.build-info.outputs.docker-cache }} - disable-airflow-repo-cache: ${{ needs.build-info.outputs.disable-airflow-repo-cache }} diff --git a/.github/workflows/ci-image-build.yml b/.github/workflows/ci-image-build.yml index b8e2feac1755f..8334005743c67 100644 --- a/.github/workflows/ci-image-build.yml +++ b/.github/workflows/ci-image-build.yml @@ -28,13 +28,6 @@ on: # yamllint disable-line rule:truthy description: "The array of labels (in json form) determining self-hosted runners." required: true type: string - do-build: - description: > - Whether to actually do the build (true/false). If set to false, the build is done - already in pull-request-target workflow, so we skip it here. - required: false - default: "true" - type: string target-commit-sha: description: "The commit SHA to checkout for the build" required: false @@ -51,7 +44,7 @@ on: # yamllint disable-line rule:truthy default: "false" type: string platform: - description: "Platform for the build - 'linux/amd64' or 'linux/arm64'" + description: "Platform for the build - 'amd64' or 'arm64'" required: true type: string push-image: @@ -59,6 +52,11 @@ on: # yamllint disable-line rule:truthy required: false default: "true" type: string + upload-image-artifact: + description: "Whether to upload docker image artifact" + required: false + default: "false" + type: string debian-version: description: "Base Debian distribution to use for the build (bookworm)" type: string @@ -71,10 +69,6 @@ on: # yamllint disable-line rule:truthy description: "Whether to use uv to build the image (true/false)" required: true type: string - image-tag: - description: "Tag to set for the image" - required: true - type: string python-versions: description: "JSON-formatted array of Python versions to build images from" required: true @@ -105,13 +99,9 @@ jobs: fail-fast: true matrix: # yamllint disable-line rule:line-length - python-version: ${{ inputs.do-build == 'true' && fromJSON(inputs.python-versions) || fromJSON('[""]') }} + python-version: ${{ fromJSON(inputs.python-versions) || fromJSON('[""]') }} timeout-minutes: 110 - name: "\ -${{ inputs.do-build == 'true' && 'Build' || 'Skip building' }} \ -CI ${{ inputs.platform }} image\ -${{ matrix.python-version }}${{ inputs.do-build == 'true' && ':' || '' }}\ -${{ inputs.do-build == 'true' && inputs.image-tag || '' }}" + name: "Build CI ${{ inputs.platform }} image ${{ matrix.python-version }}" # The ARM images need to be built using self-hosted runners as ARM macos public runners # do not yet allow us to run docker effectively and fast. # https://github.com/actions/runner-images/issues/9254#issuecomment-1917916016 @@ -122,7 +112,7 @@ ${{ inputs.do-build == 'true' && inputs.image-tag || '' }}" # adding space before (with >) apparently turns the `runs-on` processed line into a string "Array" # instead of an array of strings. # yamllint disable-line rule:line-length - runs-on: ${{ (inputs.platform == 'linux/amd64') && fromJSON(inputs.runs-on-as-json-public) || fromJSON(inputs.runs-on-as-json-self-hosted) }} + runs-on: ${{ (inputs.platform == 'amd64') && fromJSON(inputs.runs-on-as-json-public) || fromJSON(inputs.runs-on-as-json-self-hosted) }} env: BACKEND: sqlite DEFAULT_BRANCH: ${{ inputs.branch }} @@ -137,41 +127,21 @@ ${{ inputs.do-build == 'true' && inputs.image-tag || '' }}" - name: "Cleanup repo" shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - if: inputs.do-build == 'true' - name: "Checkout target branch" uses: actions/checkout@v4 with: persist-credentials: false - - name: "Checkout target commit" - uses: ./.github/actions/checkout_target_commit - if: inputs.do-build == 'true' - with: - target-commit-sha: ${{ inputs.target-commit-sha }} - pull-request-target: ${{ inputs.pull-request-target }} - is-committer-build: ${{ inputs.is-committer-build }} - name: "Cleanup docker" run: ./scripts/ci/cleanup_docker.sh - if: inputs.do-build == 'true' - name: "Install Breeze" uses: ./.github/actions/breeze - if: inputs.do-build == 'true' - - name: "Regenerate dependencies in case they were modified manually so that we can build an image" - shell: bash - run: | - pip install rich>=12.4.4 pyyaml - python scripts/ci/pre_commit/update_providers_dependencies.py - if: inputs.do-build == 'true' && inputs.upgrade-to-newer-dependencies != 'false' - - name: "Start ARM instance" - run: ./scripts/ci/images/ci_start_arm_instance_and_connect_to_docker.sh - if: inputs.do-build == 'true' && inputs.platform == 'linux/arm64' - name: Login to ghcr.io run: echo "${{ env.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin - if: inputs.do-build == 'true' - name: > Build ${{ inputs.push-image == 'true' && ' & push ' || '' }} - ${{ inputs.platform }}:${{ matrix.python-version }}:${{ inputs.image-tag }} + ${{ inputs.platform }}:${{ matrix.python-version }} run: > - breeze ci-image build --builder airflow_cache --tag-as-latest --image-tag "${{ inputs.image-tag }}" + breeze ci-image build --builder airflow_cache --python "${{ matrix.python-version }}" --platform "${{ inputs.platform }}" env: DOCKER_CACHE: ${{ inputs.docker-cache }} @@ -189,7 +159,14 @@ ${{ inputs.do-build == 'true' && inputs.image-tag || '' }}" GITHUB_USERNAME: ${{ github.actor }} PUSH: ${{ inputs.push-image }} VERBOSE: "true" - if: inputs.do-build == 'true' - - name: "Stop ARM instance" - run: ./scripts/ci/images/ci_stop_arm_instance.sh - if: always() && inputs.do-build == 'true' && inputs.platform == 'linux/arm64' + - name: "Export CI docker image ${{ matrix.python-version }}" + run: breeze ci-image save --python "${{ matrix.python-version }}" --platform "${{ inputs.platform }}" + if: inputs.upload-image-artifact == 'true' + - name: "Stash CI docker image ${{ matrix.python-version }}" + uses: apache/infrastructure-actions/stash/save@c94b890bbedc2fc61466d28e6bd9966bc6c6643c + with: + key: "ci-image-save-${{ inputs.platform }}-${{ matrix.python-version }}" + path: "/tmp/ci-image-save-*-${{ matrix.python-version }}.tar" + if-no-files-found: 'error' + retention-days: 2 + if: inputs.upload-image-artifact == 'true' diff --git a/.github/workflows/ci-image-checks.yml b/.github/workflows/ci-image-checks.yml index 63598755c32d0..b5eb98e136f91 100644 --- a/.github/workflows/ci-image-checks.yml +++ b/.github/workflows/ci-image-checks.yml @@ -28,10 +28,6 @@ on: # yamllint disable-line rule:truthy description: "The array of labels (in json form) determining the labels used for docs build." required: true type: string - image-tag: - description: "Tag to set for the image" - required: true - type: string needs-mypy: description: "Whether to run mypy checks (true/false)" required: true @@ -117,7 +113,6 @@ jobs: env: PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" UPGRADE_TO_NEWER_DEPENDENCIES: "${{ inputs.upgrade-to-newer-dependencies }}" - IMAGE_TAG: ${{ inputs.image-tag }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} if: inputs.basic-checks-only == 'false' && inputs.latest-versions-only != 'true' steps: @@ -134,10 +129,10 @@ jobs: python-version: ${{ inputs.default-python-version }} cache: 'pip' cache-dependency-path: ./dev/breeze/pyproject.toml - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - - name: "Prepare breeze & CI image: ${{ inputs.default-python-version}}:${{ inputs.image-tag }}" + - name: "Prepare breeze & CI image: ${{ inputs.default-python-version}}" uses: ./.github/actions/prepare_breeze_and_image + with: + platform: "linux/amd64" id: breeze - name: "Install pre-commit" uses: ./.github/actions/install-pre-commit @@ -165,7 +160,6 @@ jobs: mypy-check: ${{ fromJSON(inputs.mypy-checks) }} env: PYTHON_MAJOR_MINOR_VERSION: "${{inputs.default-python-version}}" - IMAGE_TAG: "${{ inputs.image-tag }}" GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} steps: - name: "Cleanup repo" @@ -175,10 +169,10 @@ jobs: uses: actions/checkout@v4 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}:${{ inputs.image-tag }}" + - name: "Prepare breeze & CI image: ${{ inputs.default-python-version}}" uses: ./.github/actions/prepare_breeze_and_image + with: + platform: "linux/amd64" id: breeze - name: "Install pre-commit" uses: ./.github/actions/install-pre-commit @@ -208,7 +202,6 @@ jobs: GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} - IMAGE_TAG: "${{ inputs.image-tag }}" INCLUDE_NOT_READY_PROVIDERS: "true" INCLUDE_SUCCESS_OUTPUTS: "${{ inputs.include-success-outputs }}" PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" @@ -221,10 +214,10 @@ jobs: uses: actions/checkout@v4 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}:${{ inputs.image-tag }}" + - name: "Prepare breeze & CI image: ${{ inputs.default-python-version}}" uses: ./.github/actions/prepare_breeze_and_image + with: + platform: "linux/amd64" - uses: actions/cache@v4 id: cache-doc-inventories with: @@ -254,7 +247,6 @@ jobs: GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} - IMAGE_TAG: "${{ inputs.image-tag }}" INCLUDE_NOT_READY_PROVIDERS: "true" INCLUDE_SUCCESS_OUTPUTS: "${{ inputs.include-success-outputs }}" PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" @@ -283,8 +275,10 @@ jobs: run: > git clone https://github.com/apache/airflow-site.git /mnt/airflow-site/airflow-site && echo "AIRFLOW_SITE_DIRECTORY=/mnt/airflow-site/airflow-site" >> "$GITHUB_ENV" - - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}:${{ inputs.image-tag }}" + - name: "Prepare breeze & CI image: ${{ inputs.default-python-version}}" uses: ./.github/actions/prepare_breeze_and_image + with: + platform: "linux/amd64" - name: "Publish docs" run: > breeze release-management publish-docs --override-versioned --run-in-parallel @@ -331,7 +325,6 @@ jobs: GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} - IMAGE_TAG: "${{ inputs.image-tag }}" JOB_ID: "python-api-client-tests" PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" VERBOSE: "true" @@ -353,8 +346,10 @@ jobs: fetch-depth: 1 persist-credentials: false path: ./airflow-client-python - - name: "Prepare breeze & CI image: ${{inputs.default-python-version}}:${{inputs.image-tag}}" + - name: "Prepare breeze & CI image: ${{ inputs.default-python-version}}" uses: ./.github/actions/prepare_breeze_and_image + with: + platform: "linux/amd64" - name: "Generate airflow python client" run: > breeze release-management prepare-python-client --package-format both diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 09cc3328dd8a7..4420152c8c8b8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -32,7 +32,7 @@ on: # yamllint disable-line rule:truthy - providers-[a-z]+-?[a-z]*/v[0-9]+-[0-9]+ workflow_dispatch: permissions: - # All other permissions are set to none + # All other permissions are set to none by default contents: read # Technically read access while waiting for images should be more than enough. However, # there is a bug in GitHub Actions/Packages and in case private repositories are used, you get a permission @@ -44,7 +44,6 @@ env: GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} - IMAGE_TAG: "${{ github.event.pull_request.head.sha || github.sha }}" SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} VERBOSE: "true" @@ -64,7 +63,6 @@ jobs: all-python-versions-list-as-string: >- ${{ steps.selective-checks.outputs.all-python-versions-list-as-string }} basic-checks-only: ${{ steps.selective-checks.outputs.basic-checks-only }} - build-job-description: ${{ steps.source-run-info.outputs.build-job-description }} canary-run: ${{ steps.source-run-info.outputs.canary-run }} chicken-egg-providers: ${{ steps.selective-checks.outputs.chicken-egg-providers }} ci-image-build: ${{ steps.selective-checks.outputs.ci-image-build }} @@ -88,8 +86,6 @@ jobs: full-tests-needed: ${{ steps.selective-checks.outputs.full-tests-needed }} has-migrations: ${{ steps.selective-checks.outputs.has-migrations }} helm-test-packages: ${{ steps.selective-checks.outputs.helm-test-packages }} - image-tag: ${{ github.event.pull_request.head.sha || github.sha }} - in-workflow-build: ${{ steps.source-run-info.outputs.in-workflow-build }} include-success-outputs: ${{ steps.selective-checks.outputs.include-success-outputs }} individual-providers-test-types-list-as-string: >- ${{ steps.selective-checks.outputs.individual-providers-test-types-list-as-string }} @@ -99,6 +95,7 @@ jobs: is-k8s-runner: ${{ steps.selective-checks.outputs.is-k8s-runner }} is-self-hosted-runner: ${{ steps.selective-checks.outputs.is-self-hosted-runner }} is-vm-runner: ${{ steps.selective-checks.outputs.is-vm-runner }} + kubernetes-combos: ${{ steps.selective-checks.outputs.kubernetes-combos }} kubernetes-combos-list-as-string: >- ${{ steps.selective-checks.outputs.kubernetes-combos-list-as-string }} kubernetes-versions-list-as-string: >- @@ -197,25 +194,21 @@ jobs: canary-run: ${{needs.build-info.outputs.canary-run}} latest-versions-only: ${{needs.build-info.outputs.latest-versions-only}} build-ci-images: - name: > - ${{ needs.build-info.outputs.in-workflow-build == 'true' && 'Build' || 'Skip building' }} - CI images in-workflow + name: Build CI images needs: [build-info] uses: ./.github/workflows/ci-image-build.yml permissions: contents: read # This write is only given here for `push` events from "apache/airflow" repo. It is not given for PRs # from forks. This is to prevent malicious PRs from creating images in the "apache/airflow" repo. - # For regular build for PRS this "build-prod-images" workflow will be skipped anyway by the - # "in-workflow-build" condition packages: write secrets: inherit with: runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} runs-on-as-json-self-hosted: ${{ needs.build-info.outputs.runs-on-as-json-self-hosted }} - do-build: ${{ needs.build-info.outputs.in-workflow-build }} - image-tag: ${{ needs.build-info.outputs.image-tag }} platform: "linux/amd64" + push-image: "false" + upload-image-artifact: "true" python-versions: ${{ needs.build-info.outputs.python-versions }} branch: ${{ needs.build-info.outputs.default-branch }} use-uv: ${{ needs.build-info.outputs.force-pip == 'true' && 'false' || 'true' }} @@ -224,54 +217,15 @@ jobs: docker-cache: ${{ needs.build-info.outputs.docker-cache }} disable-airflow-repo-cache: ${{ needs.build-info.outputs.disable-airflow-repo-cache }} - wait-for-ci-images: - timeout-minutes: 120 - name: "Wait for CI images" - runs-on: ${{ fromJSON(needs.build-info.outputs.runs-on-as-json-public) }} - needs: [build-info, build-ci-images] - if: needs.build-info.outputs.ci-image-build == 'true' - env: - BACKEND: sqlite - # Force more parallelism for pull even on public images - PARALLELISM: 6 - INCLUDE_SUCCESS_OUTPUTS: "${{needs.build-info.outputs.include-success-outputs}}" - steps: - - name: "Cleanup repo" - shell: bash - run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - if: needs.build-info.outputs.in-workflow-build == 'false' - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - if: needs.build-info.outputs.in-workflow-build == 'false' - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - if: needs.build-info.outputs.in-workflow-build == 'false' - - name: "Install Breeze" - uses: ./.github/actions/breeze - if: needs.build-info.outputs.in-workflow-build == 'false' - - name: Login to ghcr.io - run: echo "${{ env.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin - if: needs.build-info.outputs.in-workflow-build == 'false' - - name: Wait for CI images ${{ env.PYTHON_VERSIONS }}:${{ needs.build-info.outputs.image-tag }} - id: wait-for-images - run: breeze ci-image pull --run-in-parallel --wait-for-image --tag-as-latest - env: - PYTHON_VERSIONS: ${{ needs.build-info.outputs.python-versions-list-as-string }} - DEBUG_RESOURCES: ${{needs.build-info.outputs.debug-resources}} - if: needs.build-info.outputs.in-workflow-build == 'false' - additional-ci-image-checks: name: "Additional CI image checks" - needs: [build-info, wait-for-ci-images] + needs: [build-info, build-ci-images] uses: ./.github/workflows/additional-ci-image-checks.yml if: needs.build-info.outputs.canary-run == 'true' with: runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} runs-on-as-json-self-hosted: ${{ needs.build-info.outputs.runs-on-as-json-self-hosted }} - image-tag: ${{ needs.build-info.outputs.image-tag }} python-versions: ${{ needs.build-info.outputs.python-versions }} branch: ${{ needs.build-info.outputs.default-branch }} constraints-branch: ${{ needs.build-info.outputs.default-constraints-branch }} @@ -289,7 +243,7 @@ jobs: generate-constraints: name: "Generate constraints" - needs: [build-info, wait-for-ci-images] + needs: [build-info, build-ci-images] uses: ./.github/workflows/generate-constraints.yml if: > needs.build-info.outputs.ci-image-build == 'true' && @@ -300,19 +254,17 @@ jobs: # generate no providers constraints only in canary builds - they take quite some time to generate # they are not needed for regular builds, they are only needed to update constraints in canaries generate-no-providers-constraints: ${{ needs.build-info.outputs.canary-run }} - image-tag: ${{ needs.build-info.outputs.image-tag }} chicken-egg-providers: ${{ needs.build-info.outputs.chicken-egg-providers }} debug-resources: ${{ needs.build-info.outputs.debug-resources }} ci-image-checks: name: "CI image checks" - needs: [build-info, wait-for-ci-images] + needs: [build-info, build-ci-images] uses: ./.github/workflows/ci-image-checks.yml secrets: inherit with: runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} runs-on-as-json-docs-build: ${{ needs.build-info.outputs.runs-on-as-json-docs-build }} - image-tag: ${{ needs.build-info.outputs.image-tag }} needs-mypy: ${{ needs.build-info.outputs.needs-mypy }} mypy-checks: ${{ needs.build-info.outputs.mypy-checks }} python-versions-list-as-string: ${{ needs.build-info.outputs.python-versions-list-as-string }} @@ -336,7 +288,7 @@ jobs: providers: name: "Provider packages tests" uses: ./.github/workflows/test-provider-packages.yml - needs: [build-info, wait-for-ci-images] + needs: [build-info, build-ci-images] permissions: contents: read packages: read @@ -346,7 +298,6 @@ jobs: needs.build-info.outputs.latest-versions-only != 'true' with: runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} - image-tag: ${{ needs.build-info.outputs.image-tag }} canary-run: ${{ needs.build-info.outputs.canary-run }} default-python-version: ${{ needs.build-info.outputs.default-python-version }} upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} @@ -360,7 +311,7 @@ jobs: tests-helm: name: "Helm tests" uses: ./.github/workflows/helm-tests.yml - needs: [build-info, wait-for-ci-images] + needs: [build-info, build-ci-images] permissions: contents: read packages: read @@ -369,7 +320,6 @@ jobs: runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} helm-test-packages: ${{ needs.build-info.outputs.helm-test-packages }} - image-tag: ${{ needs.build-info.outputs.image-tag }} default-python-version: ${{ needs.build-info.outputs.default-python-version }} if: > needs.build-info.outputs.needs-helm-tests == 'true' && @@ -379,7 +329,7 @@ jobs: tests-postgres: name: "Postgres tests" uses: ./.github/workflows/run-unit-tests.yml - needs: [build-info, wait-for-ci-images] + needs: [build-info, build-ci-images] permissions: contents: read packages: read @@ -390,7 +340,6 @@ jobs: test-name: "Postgres" test-scope: "DB" test-groups: ${{ needs.build-info.outputs.test-groups }} - image-tag: ${{ needs.build-info.outputs.image-tag }} python-versions: ${{ needs.build-info.outputs.python-versions }} backend-versions: ${{ needs.build-info.outputs.postgres-versions }} excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} @@ -406,7 +355,7 @@ jobs: tests-mysql: name: "MySQL tests" uses: ./.github/workflows/run-unit-tests.yml - needs: [build-info, wait-for-ci-images] + needs: [build-info, build-ci-images] permissions: contents: read packages: read @@ -417,7 +366,6 @@ jobs: test-name: "MySQL" test-scope: "DB" test-groups: ${{ needs.build-info.outputs.test-groups }} - image-tag: ${{ needs.build-info.outputs.image-tag }} python-versions: ${{ needs.build-info.outputs.python-versions }} backend-versions: ${{ needs.build-info.outputs.mysql-versions }} excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} @@ -433,7 +381,7 @@ jobs: tests-sqlite: name: "Sqlite tests" uses: ./.github/workflows/run-unit-tests.yml - needs: [build-info, wait-for-ci-images] + needs: [build-info, build-ci-images] permissions: contents: read packages: read @@ -445,7 +393,6 @@ jobs: test-name-separator: "" test-scope: "DB" test-groups: ${{ needs.build-info.outputs.test-groups }} - image-tag: ${{ needs.build-info.outputs.image-tag }} python-versions: ${{ needs.build-info.outputs.python-versions }} # No versions for sqlite backend-versions: "['']" @@ -462,7 +409,7 @@ jobs: tests-non-db: name: "Non-DB tests" uses: ./.github/workflows/run-unit-tests.yml - needs: [build-info, wait-for-ci-images] + needs: [build-info, build-ci-images] permissions: contents: read packages: read @@ -474,7 +421,6 @@ jobs: test-name-separator: "" test-scope: "Non-DB" test-groups: ${{ needs.build-info.outputs.test-groups }} - image-tag: ${{ needs.build-info.outputs.image-tag }} python-versions: ${{ needs.build-info.outputs.python-versions }} # No versions for non-db backend-versions: "['']" @@ -490,7 +436,7 @@ jobs: tests-special: name: "Special tests" uses: ./.github/workflows/special-tests.yml - needs: [build-info, wait-for-ci-images] + needs: [build-info, build-ci-images] permissions: contents: read packages: read @@ -504,7 +450,6 @@ jobs: test-groups: ${{ needs.build-info.outputs.test-groups }} default-branch: ${{ needs.build-info.outputs.default-branch }} runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} - image-tag: ${{ needs.build-info.outputs.image-tag }} core-test-types-list-as-string: ${{ needs.build-info.outputs.core-test-types-list-as-string }} providers-test-types-list-as-string: ${{ needs.build-info.outputs.providers-test-types-list-as-string }} run-coverage: ${{ needs.build-info.outputs.run-coverage }} @@ -519,7 +464,7 @@ jobs: tests-integration-system: name: Integration and System Tests - needs: [build-info, wait-for-ci-images] + needs: [build-info, build-ci-images] uses: ./.github/workflows/integration-system-tests.yml permissions: contents: read @@ -527,7 +472,6 @@ jobs: secrets: inherit with: runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} - image-tag: ${{ needs.build-info.outputs.image-tag }} testable-core-integrations: ${{ needs.build-info.outputs.testable-core-integrations }} testable-providers-integrations: ${{ needs.build-info.outputs.testable-providers-integrations }} run-system-tests: ${{ needs.build-info.outputs.run-tests }} @@ -541,7 +485,7 @@ jobs: tests-with-lowest-direct-resolution: name: "Lowest direct dependency providers tests" - needs: [build-info, wait-for-ci-images] + needs: [build-info, build-ci-images] uses: ./.github/workflows/run-unit-tests.yml permissions: contents: read @@ -556,7 +500,6 @@ jobs: test-scope: "All" test-groups: ${{ needs.build-info.outputs.test-groups }} backend: "postgres" - image-tag: ${{ needs.build-info.outputs.image-tag }} python-versions: ${{ needs.build-info.outputs.python-versions }} backend-versions: "['${{ needs.build-info.outputs.default-postgres-version }}']" excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} @@ -570,30 +513,25 @@ jobs: monitor-delay-time-in-seconds: 120 build-prod-images: - name: > - ${{ needs.build-info.outputs.in-workflow-build == 'true' && 'Build' || 'Skip building' }} - PROD images in-workflow + name: Build PROD images needs: [build-info, build-ci-images, generate-constraints] uses: ./.github/workflows/prod-image-build.yml permissions: contents: read # This write is only given here for `push` events from "apache/airflow" repo. It is not given for PRs # from forks. This is to prevent malicious PRs from creating images in the "apache/airflow" repo. - # For regular build for PRS this "build-prod-images" workflow will be skipped anyway by the - # "in-workflow-build" condition packages: write secrets: inherit with: runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} build-type: "Regular" - do-build: ${{ needs.build-info.outputs.in-workflow-build }} - upload-package-artifact: "true" - image-tag: ${{ needs.build-info.outputs.image-tag }} platform: "linux/amd64" + push-image: "false" + upload-image-artifact: "true" + upload-package-artifact: "true" python-versions: ${{ needs.build-info.outputs.python-versions }} default-python-version: ${{ needs.build-info.outputs.default-python-version }} branch: ${{ needs.build-info.outputs.default-branch }} - push-image: "true" use-uv: ${{ needs.build-info.outputs.force-pip == 'true' && 'false' || 'true' }} build-provider-packages: ${{ needs.build-info.outputs.default-branch == 'main' }} upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} @@ -602,58 +540,14 @@ jobs: docker-cache: ${{ needs.build-info.outputs.docker-cache }} disable-airflow-repo-cache: ${{ needs.build-info.outputs.disable-airflow-repo-cache }} - wait-for-prod-images: - timeout-minutes: 80 - name: "Wait for PROD images" - runs-on: ${{ fromJSON(needs.build-info.outputs.runs-on-as-json-public) }} - needs: [build-info, wait-for-ci-images, build-prod-images] - if: needs.build-info.outputs.prod-image-build == 'true' - env: - BACKEND: sqlite - PYTHON_MAJOR_MINOR_VERSION: "${{needs.build-info.outputs.default-python-version}}" - # Force more parallelism for pull on public images - PARALLELISM: 6 - INCLUDE_SUCCESS_OUTPUTS: "${{needs.build-info.outputs.include-success-outputs}}" - IMAGE_TAG: ${{ needs.build-info.outputs.image-tag }} - steps: - - name: "Cleanup repo" - shell: bash - run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - if: needs.build-info.outputs.in-workflow-build == 'false' - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - if: needs.build-info.outputs.in-workflow-build == 'false' - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - if: needs.build-info.outputs.in-workflow-build == 'false' - - name: "Install Breeze" - uses: ./.github/actions/breeze - if: needs.build-info.outputs.in-workflow-build == 'false' - - name: Login to ghcr.io - run: echo "${{ env.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin - if: needs.build-info.outputs.in-workflow-build == 'false' - - name: Wait for PROD images ${{ env.PYTHON_VERSIONS }}:${{ needs.build-info.outputs.image-tag }} - # We wait for the images to be available either from "build-images.yml' run as pull_request_target - # or from build-prod-images (or build-prod-images-release-branch) above. - # We are utilising single job to wait for all images because this job merely waits - # For the images to be available. - run: breeze prod-image pull --wait-for-image --run-in-parallel - env: - PYTHON_VERSIONS: ${{ needs.build-info.outputs.python-versions-list-as-string }} - DEBUG_RESOURCES: ${{ needs.build-info.outputs.debug-resources }} - if: needs.build-info.outputs.in-workflow-build == 'false' - additional-prod-image-tests: name: "Additional PROD image tests" - needs: [build-info, wait-for-prod-images, generate-constraints] + needs: [build-info, build-prod-images, generate-constraints] uses: ./.github/workflows/additional-prod-image-tests.yml with: runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} default-branch: ${{ needs.build-info.outputs.default-branch }} constraints-branch: ${{ needs.build-info.outputs.default-constraints-branch }} - image-tag: ${{ needs.build-info.outputs.image-tag }} upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} chicken-egg-providers: ${{ needs.build-info.outputs.chicken-egg-providers }} docker-cache: ${{ needs.build-info.outputs.docker-cache }} @@ -665,20 +559,18 @@ jobs: tests-kubernetes: name: "Kubernetes tests" uses: ./.github/workflows/k8s-tests.yml - needs: [build-info, wait-for-prod-images] + needs: [build-info, build-prod-images] permissions: contents: read packages: read secrets: inherit with: runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} - image-tag: ${{ needs.build-info.outputs.image-tag }} python-versions-list-as-string: ${{ needs.build-info.outputs.python-versions-list-as-string }} - kubernetes-versions-list-as-string: ${{ needs.build-info.outputs.kubernetes-versions-list-as-string }} - kubernetes-combos-list-as-string: ${{ needs.build-info.outputs.kubernetes-combos-list-as-string }} include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} use-uv: ${{ needs.build-info.outputs.force-pip == 'true' && 'false' || 'true' }} debug-resources: ${{ needs.build-info.outputs.debug-resources }} + kubernetes-combos: ${{ needs.build-info.outputs.kubernetes-combos }} if: > ( needs.build-info.outputs.run-kubernetes-tests == 'true' || needs.build-info.outputs.needs-helm-tests == 'true') @@ -686,14 +578,13 @@ jobs: tests-task-sdk: name: "Task SDK tests" uses: ./.github/workflows/task-sdk-tests.yml - needs: [build-info, wait-for-ci-images] + needs: [build-info, build-ci-images] permissions: contents: read packages: read secrets: inherit with: runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} - image-tag: ${{ needs.build-info.outputs.image-tag }} default-python-version: ${{ needs.build-info.outputs.default-python-version }} python-versions: ${{ needs.build-info.outputs.python-versions }} run-task-sdk-tests: ${{ needs.build-info.outputs.run-task-sdk-tests }} @@ -711,8 +602,6 @@ jobs: needs: - build-info - generate-constraints - - wait-for-ci-images - - wait-for-prod-images - ci-image-checks - tests-sqlite - tests-mysql @@ -723,13 +612,11 @@ jobs: with: runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} runs-on-as-json-self-hosted: ${{ needs.build-info.outputs.runs-on-as-json-self-hosted }} - image-tag: ${{ needs.build-info.outputs.image-tag }} python-versions: ${{ needs.build-info.outputs.python-versions }} python-versions-list-as-string: ${{ needs.build-info.outputs.python-versions-list-as-string }} branch: ${{ needs.build-info.outputs.default-branch }} constraints-branch: ${{ needs.build-info.outputs.default-constraints-branch }} default-python-version: ${{ needs.build-info.outputs.default-python-version }} - in-workflow-build: ${{ needs.build-info.outputs.in-workflow-build }} upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} docker-cache: ${{ needs.build-info.outputs.docker-cache }} diff --git a/.github/workflows/finalize-tests.yml b/.github/workflows/finalize-tests.yml index 6f9bc74168b42..b8fd240235f10 100644 --- a/.github/workflows/finalize-tests.yml +++ b/.github/workflows/finalize-tests.yml @@ -28,10 +28,6 @@ on: # yamllint disable-line rule:truthy description: "The array of labels (in json form) determining self-hosted runners." required: true type: string - image-tag: - description: "Tag to set for the image" - required: true - type: string python-versions: description: "JSON-formatted array of Python versions to test" required: true @@ -52,10 +48,6 @@ on: # yamllint disable-line rule:truthy description: "Which version of python should be used by default" required: true type: string - in-workflow-build: - description: "Whether the build is executed as part of the workflow (true/false)" - required: true - type: string upgrade-to-newer-dependencies: description: "Whether to upgrade to newer dependencies (true/false)" required: true @@ -87,7 +79,6 @@ jobs: env: DEBUG_RESOURCES: ${{ inputs.debug-resources}} PYTHON_VERSIONS: ${{ inputs.python-versions-list-as-string }} - IMAGE_TAG: ${{ inputs.image-tag }} GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} diff --git a/.github/workflows/generate-constraints.yml b/.github/workflows/generate-constraints.yml index d6e536dfd091a..d73a1d37dc760 100644 --- a/.github/workflows/generate-constraints.yml +++ b/.github/workflows/generate-constraints.yml @@ -32,10 +32,6 @@ on: # yamllint disable-line rule:truthy description: "Whether to generate constraints without providers (true/false)" required: true type: string - image-tag: - description: "Tag to set for the image" - required: true - type: string chicken-egg-providers: description: "Space-separated list of providers that should be installed from context files" required: true @@ -57,7 +53,6 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} INCLUDE_SUCCESS_OUTPUTS: "true" - IMAGE_TAG: ${{ inputs.image-tag }} PYTHON_VERSIONS: ${{ inputs.python-versions-list-as-string }} VERBOSE: "true" VERSION_SUFFIX_FOR_PYPI: "dev0" @@ -69,21 +64,15 @@ jobs: uses: actions/checkout@v4 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - name: "Install Breeze" uses: ./.github/actions/breeze - - name: Login to ghcr.io - run: echo "${{ env.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin - - name: "\ - Pull CI images \ - ${{ inputs.python-versions-list-as-string }}:\ - ${{ inputs.image-tag }}" - run: breeze ci-image pull --run-in-parallel --tag-as-latest - - name: " - Verify CI images \ - ${{ inputs.python-versions-list-as-string }}:\ - ${{ inputs.image-tag }}" + id: breeze + - name: "Prepare CI images: ${{ inputs.python-versions-list-as-string}}" + uses: ./.github/actions/prepare_all_images + with: + platform: "linux/amd64" + python-versions-list-as-string: ${{ inputs.python-versions-list-as-string }} + - name: "Verify CI images ${{ inputs.python-versions-list-as-string }}" run: breeze ci-image verify --run-in-parallel - name: "Source constraints" shell: bash diff --git a/.github/workflows/helm-tests.yml b/.github/workflows/helm-tests.yml index 4c1ec1023fc90..a54de11a4a933 100644 --- a/.github/workflows/helm-tests.yml +++ b/.github/workflows/helm-tests.yml @@ -32,10 +32,6 @@ on: # yamllint disable-line rule:truthy description: "Stringified JSON array of helm test packages to test" required: true type: string - image-tag: - description: "Tag to set for the image" - required: true - type: string default-python-version: description: "Which version of python should be used by default" required: true @@ -57,7 +53,6 @@ jobs: DB_RESET: "false" JOB_ID: "helm-tests" USE_XDIST: "true" - IMAGE_TAG: "${{ inputs.image-tag }}" GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} @@ -70,10 +65,10 @@ jobs: uses: actions/checkout@v4 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - - name: "Prepare breeze & CI image: ${{inputs.default-python-version}}:${{inputs.image-tag}}" + - name: "Prepare breeze & CI image: ${{ inputs.default-python-version}}" uses: ./.github/actions/prepare_breeze_and_image + with: + platform: "linux/amd64" - name: "Helm Unit Tests: ${{ matrix.helm-test-package }}" run: breeze testing helm-tests --test-type "${{ matrix.helm-test-package }}" diff --git a/.github/workflows/integration-system-tests.yml b/.github/workflows/integration-system-tests.yml index 7fde2ae968363..67bc0320a5ee0 100644 --- a/.github/workflows/integration-system-tests.yml +++ b/.github/workflows/integration-system-tests.yml @@ -24,10 +24,6 @@ on: # yamllint disable-line rule:truthy description: "The array of labels (in json form) determining public runners." required: true type: string - image-tag: - description: "Tag to set for the image" - required: true - type: string testable-core-integrations: description: "The list of testable core integrations as JSON array." required: true @@ -75,7 +71,6 @@ jobs: matrix: integration: ${{ fromJSON(inputs.testable-core-integrations) }} env: - IMAGE_TAG: "${{ inputs.image-tag }}" BACKEND: "postgres" BACKEND_VERSION: ${{ inputs.default-postgres-version }}" PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" @@ -95,10 +90,10 @@ jobs: uses: actions/checkout@v4 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}:${{ inputs.image-tag }}" + - name: "Prepare breeze & CI image: ${{ inputs.default-python-version}}" uses: ./.github/actions/prepare_breeze_and_image + with: + platform: "linux/amd64" - name: "Integration: core ${{ matrix.integration }}" # yamllint disable rule:line-length run: ./scripts/ci/testing/run_integration_tests_with_retry.sh core "${{ matrix.integration }}" @@ -121,7 +116,6 @@ jobs: matrix: integration: ${{ fromJSON(inputs.testable-providers-integrations) }} env: - IMAGE_TAG: "${{ inputs.image-tag }}" BACKEND: "postgres" BACKEND_VERSION: ${{ inputs.default-postgres-version }}" PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" @@ -141,10 +135,10 @@ jobs: uses: actions/checkout@v4 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}:${{ inputs.image-tag }}" + - name: "Prepare breeze & CI image: ${{ inputs.default-python-version}}" uses: ./.github/actions/prepare_breeze_and_image + with: + platform: "linux/amd64" - name: "Integration: providers ${{ matrix.integration }}" run: ./scripts/ci/testing/run_integration_tests_with_retry.sh providers "${{ matrix.integration }}" - name: "Post Tests success" @@ -162,7 +156,6 @@ jobs: name: "System Tests" runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} env: - IMAGE_TAG: "${{ inputs.image-tag }}" BACKEND: "postgres" BACKEND_VERSION: ${{ inputs.default-postgres-version }}" PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" @@ -182,10 +175,10 @@ jobs: uses: actions/checkout@v4 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}:${{ inputs.image-tag }}" + - name: "Prepare breeze & CI image: ${{ inputs.default-python-version}}" uses: ./.github/actions/prepare_breeze_and_image + with: + platform: "linux/amd64" - name: "System Tests" run: > ./scripts/ci/testing/run_system_tests.sh diff --git a/.github/workflows/k8s-tests.yml b/.github/workflows/k8s-tests.yml index 3b3e067038db9..26fc00b87d8da 100644 --- a/.github/workflows/k8s-tests.yml +++ b/.github/workflows/k8s-tests.yml @@ -24,20 +24,12 @@ on: # yamllint disable-line rule:truthy description: "The array of labels (in json form) determining default runner used for the build." required: true type: string - image-tag: - description: "Tag to set for the image" - required: true - type: string python-versions-list-as-string: description: "List of Python versions to test: space separated string" required: true type: string - kubernetes-versions-list-as-string: - description: "List of Kubernetes versions to test" - required: true - type: string - kubernetes-combos-list-as-string: - description: "List of combinations of Kubernetes and Python versions to test: space separated string" + kubernetes-combos: + description: "Array of combinations of Kubernetes and Python versions to test" required: true type: string include-success-outputs: @@ -55,19 +47,17 @@ on: # yamllint disable-line rule:truthy jobs: tests-kubernetes: timeout-minutes: 240 - name: "\ - K8S System:${{ matrix.executor }} - ${{ matrix.use-standard-naming }} - \ - ${{ inputs.kubernetes-versions-list-as-string }}" + name: "K8S System:${{ matrix.executor }}-${{ matrix.kubernetes-combo }}-${{ matrix.use-standard-naming }}" runs-on: ${{ fromJSON(inputs.runs-on-as-json-default) }} strategy: matrix: executor: [KubernetesExecutor, CeleryExecutor, LocalExecutor] use-standard-naming: [true, false] + kubernetes-combo: ${{ fromJSON(inputs.kubernetes-combos) }} fail-fast: false env: DEBUG_RESOURCES: ${{ inputs.debug-resources }} INCLUDE_SUCCESS_OUTPUTS: ${{ inputs.include-success-outputs }} - IMAGE_TAG: ${{ inputs.image-tag }} GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} @@ -76,23 +66,25 @@ jobs: - name: "Cleanup repo" shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" + - name: "Prepare PYTHON_MAJOR_MINOR_VERSION and KUBERNETES_VERSION" + id: prepare-versions + run: | + echo "PYTHON_MAJOR_MINOR_VERSION=${{ matrix.kubernetes-combo }}" | sed 's/=[^-]*-//' >> $GITHUB_ENV + echo "KUBERNETES_VERSION=${{ matrix.kubernetes-combo }}" | sed 's/-*//' >> $GITHUB_ENV - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v4 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - name: "Install Breeze" uses: ./.github/actions/breeze id: breeze - - name: Login to ghcr.io - run: echo "${{ env.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin - - name: Pull PROD images ${{ inputs.python-versions-list-as-string }}:${{ inputs.image-tag }} - run: breeze prod-image pull --run-in-parallel --tag-as-latest - env: - PYTHON_VERSIONS: ${{ inputs.python-versions-list-as-string }} - # Force more parallelism for pull even on public images - PARALLELISM: 6 + - name: Restore PROD image ${{ env.PYTHON_MAJOR_MINOR_VERSION }} + uses: apache/infrastructure-actions/stash/restore@c94b890bbedc2fc61466d28e6bd9966bc6c6643c + with: + key: "prod-image-save-${{ inputs.platform }}-${{ env.PYTHON_MAJOR_MINOR_VERSION }}" + path: "/tmp/" + - name: Import PROD image ${{ env.PYTHON_MAJOR_MINOR_VERSION }} + run: breeze prod-image load --platform "${{ inputs.platform }}" - name: "Cache bin folder with tools for kubernetes testing" uses: actions/cache@v4 with: @@ -103,26 +95,34 @@ jobs: - name: "Switch breeze to use uv" run: breeze setup config --use-uv if: inputs.use-uv == 'true' - - name: Run complete K8S tests ${{ inputs.kubernetes-combos-list-as-string }} - run: breeze k8s run-complete-tests --run-in-parallel --upgrade --no-copy-local-sources + - name: "\ + Run complete K8S tests ${{ matrix.executor }}-${{ env.PYTHON_MAJOR_MINOR_VERSION }}-\ + ${{env.KUBERNETES_VERSION}}-${{ matrix.use-standard-naming }}" + run: breeze k8s run-complete-tests --upgrade --no-copy-local-sources env: - PYTHON_VERSIONS: ${{ inputs.python-versions-list-as-string }} - KUBERNETES_VERSIONS: ${{ inputs.kubernetes-versions-list-as-string }} EXECUTOR: ${{ matrix.executor }} USE_STANDARD_NAMING: ${{ matrix.use-standard-naming }} VERBOSE: "false" - - name: Upload KinD logs on failure ${{ inputs.kubernetes-combos-list-as-string }} + - name: "\ + Upload KinD logs on failure ${{ matrix.executor }}-${{ matrix.kubernetes-combo }}-\ + ${{ matrix.use-standard-naming }}" uses: actions/upload-artifact@v4 if: failure() || cancelled() with: - name: kind-logs-${{ matrix.executor }}-${{ matrix.use-standard-naming }} + name: "\ + kind-logs-${{ matrix.kubernetes-combo }}-${{ matrix.executor }}-\ + ${{ matrix.use-standard-naming }}" path: /tmp/kind_logs_* retention-days: 7 - - name: Upload test resource logs on failure ${{ inputs.kubernetes-combos-list-as-string }} + - name: "\ + Upload test resource logs on failure ${{ matrix.executor }}-${{ matrix.kubernetes-combo }}-\ + ${{ matrix.use-standard-naming }}" uses: actions/upload-artifact@v4 if: failure() || cancelled() with: - name: k8s-test-resources-${{ matrix.executor }}-${{ matrix.use-standard-naming }} + name: "\ + k8s-test-resources-${{ matrix.kubernetes-combo }}-${{ matrix.executor }}-\ + ${{ matrix.use-standard-naming }}" path: /tmp/k8s_test_resources_* retention-days: 7 - name: "Delete clusters just in case they are left" diff --git a/.github/workflows/prod-image-build.yml b/.github/workflows/prod-image-build.yml index df4f24981ff30..baa0867aaf822 100644 --- a/.github/workflows/prod-image-build.yml +++ b/.github/workflows/prod-image-build.yml @@ -30,13 +30,6 @@ on: # yamllint disable-line rule:truthy variations. required: true type: string - do-build: - description: > - Whether to actually do the build (true/false). If set to false, the build is done - already in pull-request-target workflow, so we skip it here. - required: false - default: "true" - type: string upload-package-artifact: description: > Whether to upload package artifacts (true/false). If false, the job will rely on artifacts prepared @@ -62,6 +55,11 @@ on: # yamllint disable-line rule:truthy description: "Whether to push image to the registry (true/false)" required: true type: string + upload-image-artifact: + description: "Whether to upload docker image artifact" + required: false + default: "false" + type: string debian-version: description: "Base Debian distribution to use for the build (bookworm)" type: string @@ -74,10 +72,6 @@ on: # yamllint disable-line rule:truthy description: "Whether to use uv to build the image (true/false)" required: true type: string - image-tag: - description: "Tag to set for the image" - required: true - type: string python-versions: description: "JSON-formatted array of Python versions to build images from" required: true @@ -87,7 +81,7 @@ on: # yamllint disable-line rule:truthy required: true type: string platform: - description: "Platform for the build - 'linux/amd64' or 'linux/arm64'" + description: "Platform for the build - 'amd64' or 'arm64'" required: true type: string branch: @@ -121,7 +115,7 @@ on: # yamllint disable-line rule:truthy jobs: build-prod-packages: - name: "${{ inputs.do-build == 'true' && 'Build' || 'Skip building' }} Airflow and provider packages" + name: "Build Airflow and provider packages" timeout-minutes: 10 runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} env: @@ -131,32 +125,25 @@ jobs: - name: "Cleanup repo" shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - if: inputs.do-build == 'true' && inputs.upload-package-artifact == 'true' + if: inputs.upload-package-artifact == 'true' - name: "Checkout target branch" uses: actions/checkout@v4 with: persist-credentials: false - - name: "Checkout target commit" - uses: ./.github/actions/checkout_target_commit - with: - target-commit-sha: ${{ inputs.target-commit-sha }} - pull-request-target: ${{ inputs.pull-request-target }} - is-committer-build: ${{ inputs.is-committer-build }} - if: inputs.do-build == 'true' && inputs.upload-package-artifact == 'true' - name: "Cleanup docker" run: ./scripts/ci/cleanup_docker.sh - if: inputs.do-build == 'true' && inputs.upload-package-artifact == 'true' + if: inputs.upload-package-artifact == 'true' - uses: actions/setup-python@v5 with: python-version: "${{ inputs.default-python-version }}" - if: inputs.do-build == 'true' && inputs.upload-package-artifact == 'true' + if: inputs.upload-package-artifact == 'true' - name: "Cleanup dist and context file" shell: bash run: rm -fv ./dist/* ./docker-context-files/* - if: inputs.do-build == 'true' && inputs.upload-package-artifact == 'true' + if: inputs.upload-package-artifact == 'true' - name: "Install Breeze" uses: ./.github/actions/breeze - if: inputs.do-build == 'true' && inputs.upload-package-artifact == 'true' + if: inputs.upload-package-artifact == 'true' - name: "Prepare providers packages" shell: bash run: > @@ -164,7 +151,6 @@ jobs: --package-list-file ./prod_image_installed_providers.txt --package-format wheel if: > - inputs.do-build == 'true' && inputs.upload-package-artifact == 'true' && inputs.build-provider-packages == 'true' - name: "Prepare chicken-eggs provider packages" @@ -173,19 +159,18 @@ jobs: breeze release-management prepare-provider-packages --package-format wheel ${{ inputs.chicken-egg-providers }} if: > - inputs.do-build == 'true' && inputs.upload-package-artifact == 'true' && inputs.chicken-egg-providers != '' - name: "Prepare airflow package" shell: bash run: > breeze release-management prepare-airflow-package --package-format wheel - if: inputs.do-build == 'true' && inputs.upload-package-artifact == 'true' + if: inputs.upload-package-artifact == 'true' - name: "Prepare task-sdk package" shell: bash run: > breeze release-management prepare-task-sdk-package --package-format wheel - if: inputs.do-build == 'true' && inputs.upload-package-artifact == 'true' + if: inputs.upload-package-artifact == 'true' - name: "Upload prepared packages as artifacts" uses: actions/upload-artifact@v4 with: @@ -193,20 +178,15 @@ jobs: path: ./dist retention-days: 7 if-no-files-found: error - if: inputs.do-build == 'true' && inputs.upload-package-artifact == 'true' + if: inputs.upload-package-artifact == 'true' build-prod-images: strategy: fail-fast: false matrix: - # yamllint disable-line rule:line-length - python-version: ${{ inputs.do-build == 'true' && fromJSON(inputs.python-versions) || fromJSON('[""]') }} + python-version: ${{ fromJSON(inputs.python-versions) || fromJSON('[""]') }} timeout-minutes: 80 - name: "\ -${{ inputs.do-build == 'true' && 'Build' || 'Skip building' }} \ -PROD ${{ inputs.build-type }} image\ -${{ matrix.python-version }}${{ inputs.do-build == 'true' && ':' || '' }}\ -${{ inputs.do-build == 'true' && inputs.image-tag || '' }}" + name: "Build PROD ${{ inputs.build-type }} image ${{ matrix.python-version }}" runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} needs: - build-prod-packages @@ -231,54 +211,34 @@ ${{ inputs.do-build == 'true' && inputs.image-tag || '' }}" - name: "Cleanup repo" shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - if: inputs.do-build == 'true' - name: "Checkout target branch" uses: actions/checkout@v4 with: persist-credentials: false - - name: "Checkout target commit" - uses: ./.github/actions/checkout_target_commit - with: - target-commit-sha: ${{ inputs.target-commit-sha }} - pull-request-target: ${{ inputs.pull-request-target }} - is-committer-build: ${{ inputs.is-committer-build }} - if: inputs.do-build == 'true' - name: "Cleanup docker" run: ./scripts/ci/cleanup_docker.sh - if: inputs.do-build == 'true' - name: "Install Breeze" uses: ./.github/actions/breeze - if: inputs.do-build == 'true' - - name: "Regenerate dependencies in case they was modified manually so that we can build an image" - shell: bash - run: | - pip install rich>=12.4.4 pyyaml - python scripts/ci/pre_commit/update_providers_dependencies.py - if: inputs.do-build == 'true' && inputs.upgrade-to-newer-dependencies != 'false' - name: "Cleanup dist and context file" shell: bash run: rm -fv ./dist/* ./docker-context-files/* - if: inputs.do-build == 'true' - name: "Download packages prepared as artifacts" uses: actions/download-artifact@v4 with: name: prod-packages path: ./docker-context-files - if: inputs.do-build == 'true' - name: "Download constraints" uses: actions/download-artifact@v4 with: name: constraints path: ./docker-context-files - if: inputs.do-build == 'true' - name: Login to ghcr.io shell: bash run: echo "${{ env.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin - if: inputs.do-build == 'true' - - name: "Build PROD images w/ source providers ${{ matrix.python-version }}:${{ inputs.image-tag }}" + - name: "Build PROD images w/ source providers ${{ matrix.python-version }}" shell: bash run: > - breeze prod-image build --tag-as-latest --image-tag "${{ inputs.image-tag }}" + breeze prod-image build --commit-sha "${{ github.sha }}" --install-packages-from-context --airflow-constraints-mode constraints-source-providers --use-constraints-for-context-packages --python "${{ matrix.python-version }}" @@ -290,12 +250,12 @@ ${{ inputs.do-build == 'true' && inputs.image-tag || '' }}" INSTALL_MYSQL_CLIENT_TYPE: ${{ inputs.install-mysql-client-type }} UPGRADE_TO_NEWER_DEPENDENCIES: ${{ inputs.upgrade-to-newer-dependencies }} INCLUDE_NOT_READY_PROVIDERS: "true" - if: inputs.do-build == 'true' && inputs.build-provider-packages == 'true' - - name: "Build PROD images with PyPi providers ${{ matrix.python-version }}:${{ inputs.image-tag }}" + if: inputs.build-provider-packages == 'true' + - name: "Build PROD images with PyPi providers ${{ matrix.python-version }}" shell: bash run: > - breeze prod-image build --builder airflow_cache --tag-as-latest - --image-tag "${{ inputs.image-tag }}" --commit-sha "${{ github.sha }}" + breeze prod-image build --builder airflow_cache + --commit-sha "${{ github.sha }}" --install-packages-from-context --airflow-constraints-mode constraints --use-constraints-for-context-packages --python "${{ matrix.python-version }}" env: @@ -306,9 +266,18 @@ ${{ inputs.do-build == 'true' && inputs.image-tag || '' }}" INSTALL_MYSQL_CLIENT_TYPE: ${{ inputs.install-mysql-client-type }} UPGRADE_TO_NEWER_DEPENDENCIES: ${{ inputs.upgrade-to-newer-dependencies }} INCLUDE_NOT_READY_PROVIDERS: "true" - if: inputs.do-build == 'true' && inputs.build-provider-packages != 'true' - - name: Verify PROD image ${{ matrix.python-version }}:${{ inputs.image-tag }} + if: inputs.build-provider-packages != 'true' + - name: Verify PROD image ${{ matrix.python-version }} + run: breeze prod-image verify --python "${{ matrix.python-version }}" + - name: "Export PROD docker image ${{ matrix.python-version }}" run: > - breeze prod-image verify --image-tag "${{ inputs.image-tag }}" - --python "${{ matrix.python-version }}" - if: inputs.do-build == 'true' + breeze prod-image save --python "${{ matrix.python-version }}" --platform "${{ inputs.platform }}" + if: inputs.upload-image-artifact == 'true' + - name: "Stash PROD docker image ${{ matrix.python-version }}" + uses: apache/infrastructure-actions/stash/save@c94b890bbedc2fc61466d28e6bd9966bc6c6643c + with: + key: "prod-image-save-${{ inputs.platform }}-${{ matrix.python-version }}" + path: "/tmp/prod-image-save-*-${{ matrix.python-version }}.tar" + if-no-files-found: 'error' + retention-days: 2 + if: inputs.upload-image-artifact == 'true' diff --git a/.github/workflows/prod-image-extra-checks.yml b/.github/workflows/prod-image-extra-checks.yml index bb63faef7b243..33ebc4700dad3 100644 --- a/.github/workflows/prod-image-extra-checks.yml +++ b/.github/workflows/prod-image-extra-checks.yml @@ -40,9 +40,6 @@ on: # yamllint disable-line rule:truthy description: "Whether to use uv to build the image (true/false)" required: true type: string - image-tag: - required: true - type: string build-provider-packages: description: "Whether to build provider packages (true/false). If false providers are from PyPI" required: true @@ -74,7 +71,6 @@ jobs: runs-on-as-json-public: ${{ inputs.runs-on-as-json-public }} build-type: "MySQL Client" upload-package-artifact: "false" - image-tag: mysql-${{ inputs.image-tag }} install-mysql-client-type: "mysql" python-versions: ${{ inputs.python-versions }} default-python-version: ${{ inputs.default-python-version }} @@ -98,7 +94,6 @@ jobs: runs-on-as-json-public: ${{ inputs.runs-on-as-json-public }} build-type: "pip" upload-package-artifact: "false" - image-tag: mysql-${{ inputs.image-tag }} install-mysql-client-type: "mysql" python-versions: ${{ inputs.python-versions }} default-python-version: ${{ inputs.default-python-version }} diff --git a/.github/workflows/push-image-cache.yml b/.github/workflows/push-image-cache.yml index 10a33275ad3f3..8712ee7d7ba47 100644 --- a/.github/workflows/push-image-cache.yml +++ b/.github/workflows/push-image-cache.yml @@ -49,7 +49,7 @@ on: # yamllint disable-line rule:truthy type: string default: "mariadb" platform: - description: "Platform for the build - 'linux/amd64' or 'linux/arm64'" + description: "Platform for the build - 'amd64' or 'arm64'" required: true type: string python-versions: @@ -87,7 +87,7 @@ jobs: # adding space before (with >) apparently turns the `runs-on` processed line into a string "Array" # instead of an array of strings. # yamllint disable-line rule:line-length - runs-on: ${{ (inputs.platform == 'linux/amd64') && fromJSON(inputs.runs-on-as-json-public) || fromJSON(inputs.runs-on-as-json-self-hosted) }} + runs-on: ${{ (inputs.platform == 'amd64') && fromJSON(inputs.runs-on-as-json-public) || fromJSON(inputs.runs-on-as-json-self-hosted) }} strategy: fail-fast: false matrix: @@ -128,7 +128,7 @@ jobs: uses: ./.github/actions/breeze - name: "Start ARM instance" run: ./scripts/ci/images/ci_start_arm_instance_and_connect_to_docker.sh - if: inputs.platform == 'linux/arm64' + if: inputs.platform == 'arm64' - name: Login to ghcr.io run: echo "${{ env.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin - name: "Push CI ${{ inputs.cache-type }} cache: ${{ matrix.python }} ${{ inputs.platform }}" @@ -137,12 +137,12 @@ jobs: --platform "${{ inputs.platform }}" --python ${{ matrix.python }} - name: "Stop ARM instance" run: ./scripts/ci/images/ci_stop_arm_instance.sh - if: always() && inputs.platform == 'linux/arm64' + if: always() && inputs.platform == 'arm64' - name: "Push CI latest images: ${{ matrix.python }} (linux/amd64 only)" run: > breeze ci-image build --builder airflow_cache --push --python "${{ matrix.python }}" --platform "${{ inputs.platform }}" - if: inputs.push-latest-images == 'true' && inputs.platform == 'linux/amd64' + if: inputs.push-latest-images == 'true' && inputs.platform == 'amd64' push-prod-image-cache: name: "Push PROD ${{ inputs.cache-type }}:${{ matrix.python }} image cache" @@ -150,7 +150,7 @@ jobs: # adding space before (with >) apparently turns the `runs-on` processed line into a string "Array" # instead of an array of strings. # yamllint disable-line rule:line-length - runs-on: ${{ (inputs.platform == 'linux/amd64') && fromJSON(inputs.runs-on-as-json-public) || fromJSON(inputs.runs-on-as-json-self-hosted) }} + runs-on: ${{ (inputs.platform == 'amd64') && fromJSON(inputs.runs-on-as-json-public) || fromJSON(inputs.runs-on-as-json-self-hosted) }} strategy: fail-fast: false matrix: @@ -198,7 +198,7 @@ jobs: path: ./docker-context-files - name: "Start ARM instance" run: ./scripts/ci/images/ci_start_arm_instance_and_connect_to_docker.sh - if: inputs.platform == 'linux/arm64' + if: inputs.platform == 'arm64' - name: Login to ghcr.io run: echo "${{ env.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin - name: "Push PROD ${{ inputs.cache-type }} cache: ${{ matrix.python-version }} ${{ inputs.platform }}" @@ -209,7 +209,7 @@ jobs: --python ${{ matrix.python }} - name: "Stop ARM instance" run: ./scripts/ci/images/ci_stop_arm_instance.sh - if: always() && inputs.platform == 'linux/arm64' + if: always() && inputs.platform == 'arm64' # We only push "AMD" images as it is really only needed for any kind of automated builds in CI # and currently there is not an easy way to make multi-platform image from two separate builds # and we can do it after we stopped the ARM instance as it is not needed anymore @@ -217,4 +217,4 @@ jobs: run: > breeze prod-image build --builder airflow_cache --install-packages-from-context --push --platform "${{ inputs.platform }}" - if: inputs.push-latest-images == 'true' && inputs.platform == 'linux/amd64' + if: inputs.push-latest-images == 'true' && inputs.platform == 'amd64' diff --git a/.github/workflows/release_dockerhub_image.yml b/.github/workflows/release_dockerhub_image.yml index 5ce1585131f76..46705c6a106fa 100644 --- a/.github/workflows/release_dockerhub_image.yml +++ b/.github/workflows/release_dockerhub_image.yml @@ -47,7 +47,7 @@ jobs: defaultPythonVersion: ${{ steps.selective-checks.outputs.default-python-version }} chicken-egg-providers: ${{ steps.selective-checks.outputs.chicken-egg-providers }} skipLatest: ${{ github.event.inputs.skipLatest == '' && ' ' || '--skip-latest' }} - limitPlatform: ${{ github.repository == 'apache/airflow' && ' ' || '--limit-platform linux/amd64' }} + limitPlatform: ${{ github.repository == 'apache/airflow' && ' ' || '--limit-platform amd64' }} env: GITHUB_CONTEXT: ${{ toJson(github) }} VERBOSE: true diff --git a/.github/workflows/run-unit-tests.yml b/.github/workflows/run-unit-tests.yml index 6b491f6bff4ab..3a449a375a8c7 100644 --- a/.github/workflows/run-unit-tests.yml +++ b/.github/workflows/run-unit-tests.yml @@ -45,10 +45,6 @@ on: # yamllint disable-line rule:truthy required: false default: ":" type: string - image-tag: - description: "Tag to set for the image" - required: true - type: string python-versions: description: "The list of python versions (stringified JSON array) to run the tests on." required: true @@ -144,7 +140,6 @@ jobs: GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} - IMAGE_TAG: "${{ inputs.image-tag }}" INCLUDE_SUCCESS_OUTPUTS: ${{ inputs.include-success-outputs }} # yamllint disable rule:line-length JOB_ID: "${{ matrix.test-group }}-${{ inputs.test-scope }}-${{ inputs.test-name }}-${{inputs.backend}}-${{ matrix.backend-version }}-${{ matrix.python-version }}" @@ -163,10 +158,10 @@ jobs: uses: actions/checkout@v4 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - - name: "Prepare breeze & CI image: ${{matrix.python-version}}:${{ inputs.image-tag }}" + - name: "Prepare breeze & CI image: ${{ matrix.python-version}}" uses: ./.github/actions/prepare_breeze_and_image + with: + platform: "linux/amd64" - name: > Migration Tests: ${{ matrix.python-version }}:${{ env.PARALLEL_TEST_TYPES }} uses: ./.github/actions/migration_tests diff --git a/.github/workflows/special-tests.yml b/.github/workflows/special-tests.yml index decc7271b728b..d416d55575fb9 100644 --- a/.github/workflows/special-tests.yml +++ b/.github/workflows/special-tests.yml @@ -32,10 +32,6 @@ on: # yamllint disable-line rule:truthy description: "The json representing list of test test groups to run" required: true type: string - image-tag: - description: "Tag to set for the image" - required: true - type: string core-test-types-list-as-string: description: "The list of core test types to run separated by spaces" required: true @@ -96,7 +92,6 @@ jobs: test-scope: "DB" test-groups: ${{ inputs.test-groups }} backend: "postgres" - image-tag: ${{ inputs.image-tag }} python-versions: "['${{ inputs.default-python-version }}']" backend-versions: "['${{ inputs.default-postgres-version }}']" excluded-providers-as-string: ${{ inputs.excluded-providers-as-string }} @@ -120,7 +115,6 @@ jobs: test-scope: "All" test-groups: ${{ inputs.test-groups }} backend: "postgres" - image-tag: ${{ inputs.image-tag }} python-versions: "['${{ inputs.default-python-version }}']" backend-versions: "['${{ inputs.default-postgres-version }}']" excluded-providers-as-string: ${{ inputs.excluded-providers-as-string }} @@ -145,7 +139,6 @@ jobs: test-scope: "All" test-groups: ${{ inputs.test-groups }} backend: "postgres" - image-tag: ${{ inputs.image-tag }} python-versions: "['${{ inputs.default-python-version }}']" backend-versions: "['${{ inputs.default-postgres-version }}']" excluded-providers-as-string: ${{ inputs.excluded-providers-as-string }} @@ -169,7 +162,6 @@ jobs: test-scope: "Quarantined" test-groups: ${{ inputs.test-groups }} backend: "postgres" - image-tag: ${{ inputs.image-tag }} python-versions: "['${{ inputs.default-python-version }}']" backend-versions: "['${{ inputs.default-postgres-version }}']" excluded-providers-as-string: ${{ inputs.excluded-providers-as-string }} @@ -193,7 +185,6 @@ jobs: test-scope: "ARM collection" test-groups: ${{ inputs.test-groups }} backend: "postgres" - image-tag: ${{ inputs.image-tag }} python-versions: "['${{ inputs.default-python-version }}']" backend-versions: "['${{ inputs.default-postgres-version }}']" excluded-providers-as-string: ${{ inputs.excluded-providers-as-string }} @@ -218,7 +209,6 @@ jobs: test-scope: "System" test-groups: ${{ inputs.test-groups }} backend: "postgres" - image-tag: ${{ inputs.image-tag }} python-versions: "['${{ inputs.default-python-version }}']" backend-versions: "['${{ inputs.default-postgres-version }}']" excluded-providers-as-string: ${{ inputs.excluded-providers-as-string }} diff --git a/.github/workflows/task-sdk-tests.yml b/.github/workflows/task-sdk-tests.yml index acc9872e6ed96..9baabed3b545c 100644 --- a/.github/workflows/task-sdk-tests.yml +++ b/.github/workflows/task-sdk-tests.yml @@ -24,10 +24,6 @@ on: # yamllint disable-line rule:truthy description: "The array of labels (in json form) determining default runner used for the build." required: true type: string - image-tag: - description: "Tag to set for the image" - required: true - type: string default-python-version: description: "Which version of python should be used by default" required: true @@ -53,7 +49,6 @@ jobs: GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} - IMAGE_TAG: "${{ inputs.image-tag }}" INCLUDE_NOT_READY_PROVIDERS: "true" PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" VERBOSE: "true" @@ -66,10 +61,10 @@ jobs: uses: actions/checkout@v4 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - - name: "Prepare breeze & CI image: ${{ matrix.python-version }}:${{ inputs.image-tag }}" + - name: "Prepare breeze & CI image: ${{ matrix.python-version }}" uses: ./.github/actions/prepare_breeze_and_image + with: + platform: "linux/amd64" - name: "Cleanup dist files" run: rm -fv ./dist/* - name: "Prepare Task SDK packages: wheel" diff --git a/.github/workflows/test-provider-packages.yml b/.github/workflows/test-provider-packages.yml index 08715af6b58ba..8e8099cc907dd 100644 --- a/.github/workflows/test-provider-packages.yml +++ b/.github/workflows/test-provider-packages.yml @@ -24,10 +24,6 @@ on: # yamllint disable-line rule:truthy description: "The array of labels (in json form) determining default runner used for the build." required: true type: string - image-tag: - description: "Tag to set for the image" - required: true - type: string canary-run: description: "Whether this is a canary run" required: true @@ -75,7 +71,6 @@ jobs: GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} - IMAGE_TAG: "${{ inputs.image-tag }}" INCLUDE_NOT_READY_PROVIDERS: "true" PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" VERBOSE: "true" @@ -87,11 +82,10 @@ jobs: uses: actions/checkout@v4 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - - name: > - Prepare breeze & CI image: ${{ inputs.default-python-version }}:${{ inputs.image-tag }} + - name: "Prepare breeze & CI image: ${{ inputs.default-python-version}}" uses: ./.github/actions/prepare_breeze_and_image + with: + platform: "linux/amd64" - name: "Cleanup dist files" run: rm -fv ./dist/* - name: "Prepare provider documentation" @@ -161,7 +155,6 @@ jobs: GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} - IMAGE_TAG: "${{ inputs.image-tag }}" INCLUDE_NOT_READY_PROVIDERS: "true" PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" VERSION_SUFFIX_FOR_PYPI: "dev0" @@ -176,10 +169,10 @@ jobs: uses: actions/checkout@v4 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - - name: "Prepare breeze & CI image: ${{ matrix.python-version }}:${{ inputs.image-tag }}" + - name: "Prepare breeze & CI image: ${{ matrix.default-python-version}}" uses: ./.github/actions/prepare_breeze_and_image + with: + platform: "linux/amd64" - name: "Cleanup dist files" run: rm -fv ./dist/* - name: "Prepare provider packages: wheel" diff --git a/Dockerfile b/Dockerfile index fe49db186479d..f32fbef633bc4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -422,85 +422,6 @@ common::show_packaging_tool_version_and_location common::install_packaging_tools EOF -# The content below is automatically copied from scripts/docker/install_airflow_dependencies_from_branch_tip.sh -COPY <<"EOF" /install_airflow_dependencies_from_branch_tip.sh -#!/usr/bin/env bash - -. "$( dirname "${BASH_SOURCE[0]}" )/common.sh" - -: "${AIRFLOW_REPO:?Should be set}" -: "${AIRFLOW_BRANCH:?Should be set}" -: "${INSTALL_MYSQL_CLIENT:?Should be true or false}" -: "${INSTALL_POSTGRES_CLIENT:?Should be true or false}" - -function install_airflow_dependencies_from_branch_tip() { - echo - echo "${COLOR_BLUE}Installing airflow from ${AIRFLOW_BRANCH}. It is used to cache dependencies${COLOR_RESET}" - echo - if [[ ${INSTALL_MYSQL_CLIENT} != "true" ]]; then - AIRFLOW_EXTRAS=${AIRFLOW_EXTRAS/mysql,} - fi - if [[ ${INSTALL_POSTGRES_CLIENT} != "true" ]]; then - AIRFLOW_EXTRAS=${AIRFLOW_EXTRAS/postgres,} - fi - local TEMP_AIRFLOW_DIR - TEMP_AIRFLOW_DIR=$(mktemp -d) - # Install latest set of dependencies - without constraints. This is to download a "base" set of - # dependencies that we can cache and reuse when installing airflow using constraints and latest - # pyproject.toml in the next step (when we install regular airflow). - set -x - curl -fsSL "https://github.com/${AIRFLOW_REPO}/archive/${AIRFLOW_BRANCH}.tar.gz" | \ - tar xz -C "${TEMP_AIRFLOW_DIR}" --strip 1 - # Make sure editable dependencies are calculated when devel-ci dependencies are installed - ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} ${ADDITIONAL_PIP_INSTALL_FLAGS} \ - --editable "${TEMP_AIRFLOW_DIR}[${AIRFLOW_EXTRAS}]" - set +x - common::install_packaging_tools - set -x - echo "${COLOR_BLUE}Uninstalling providers. Dependencies remain${COLOR_RESET}" - # Uninstall airflow and providers to keep only the dependencies. In the future when - # planned https://github.com/pypa/pip/issues/11440 is implemented in pip we might be able to use this - # flag and skip the remove step. - pip freeze | grep apache-airflow-providers | xargs ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} || true - set +x - echo - echo "${COLOR_BLUE}Uninstalling just airflow. Dependencies remain. Now target airflow can be reinstalled using mostly cached dependencies${COLOR_RESET}" - echo - set +x - ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} apache-airflow - rm -rf "${TEMP_AIRFLOW_DIR}" - set -x - # If you want to make sure dependency is removed from cache in your PR when you removed it from - # pyproject.toml - please add your dependency here as a list of strings - # for example: - # DEPENDENCIES_TO_REMOVE=("package_a" "package_b") - # Once your PR is merged, you should make a follow-up PR to remove it from this list - # and increase the AIRFLOW_CI_BUILD_EPOCH in Dockerfile.ci to make sure your cache is rebuilt. - local DEPENDENCIES_TO_REMOVE - # IMPORTANT!! Make sure to increase AIRFLOW_CI_BUILD_EPOCH in Dockerfile.ci when you remove a dependency from that list - DEPENDENCIES_TO_REMOVE=() - if [[ "${DEPENDENCIES_TO_REMOVE[*]}" != "" ]]; then - echo - echo "${COLOR_BLUE}Uninstalling just removed dependencies (temporary until cache refreshes)${COLOR_RESET}" - echo "${COLOR_BLUE}Dependencies to uninstall: ${DEPENDENCIES_TO_REMOVE[*]}${COLOR_RESET}" - echo - set +x - ${PACKAGING_TOOL_CMD} uninstall "${DEPENDENCIES_TO_REMOVE[@]}" || true - set -x - # make sure that the dependency is not needed by something else - pip check - fi -} - -common::get_colors -common::get_packaging_tool -common::get_airflow_version_specification -common::get_constraints_location -common::show_packaging_tool_version_and_location - -install_airflow_dependencies_from_branch_tip -EOF - # The content below is automatically copied from scripts/docker/common.sh COPY <<"EOF" /common.sh #!/usr/bin/env bash @@ -524,8 +445,6 @@ function common::get_packaging_tool() { ## IMPORTANT: IF YOU MODIFY THIS FUNCTION YOU SHOULD ALSO MODIFY CORRESPONDING FUNCTION IN ## `scripts/in_container/_in_container_utils.sh` - local PYTHON_BIN - PYTHON_BIN=$(which python) if [[ ${AIRFLOW_USE_UV} == "true" ]]; then echo echo "${COLOR_BLUE}Using 'uv' to install Airflow${COLOR_RESET}" @@ -533,8 +452,8 @@ function common::get_packaging_tool() { export PACKAGING_TOOL="uv" export PACKAGING_TOOL_CMD="uv pip" if [[ -z ${VIRTUAL_ENV=} ]]; then - export EXTRA_INSTALL_FLAGS="--python ${PYTHON_BIN}" - export EXTRA_UNINSTALL_FLAGS="--python ${PYTHON_BIN}" + export EXTRA_INSTALL_FLAGS="--system" + export EXTRA_UNINSTALL_FLAGS="--system" else export EXTRA_INSTALL_FLAGS="" export EXTRA_UNINSTALL_FLAGS="" @@ -900,18 +819,12 @@ function install_airflow() { # Determine the installation_command_flags based on AIRFLOW_INSTALLATION_METHOD method local installation_command_flags if [[ ${AIRFLOW_INSTALLATION_METHOD} == "." ]]; then - # We need _a_ file in there otherwise the editable install doesn't include anything in the .pth file - mkdir -p ./providers/src/airflow/providers/ - touch ./providers/src/airflow/providers/__init__.py - - # Similarly we need _a_ file for task_sdk too - mkdir -p ./task_sdk/src/airflow/sdk/ - echo '__version__ = "0.0.0dev0"' > ./task_sdk/src/airflow/sdk/__init__.py - - trap 'rm -f ./providers/src/airflow/providers/__init__.py ./task_sdk/src/airflow/__init__.py 2>/dev/null' EXIT - # When installing from sources - we always use `--editable` mode - installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION} --editable ./providers --editable ./task_sdk" + installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION} --editable ./task_sdk" + while IFS= read -r -d '' pyproject_toml_file; do + project_folder=$(dirname ${pyproject_toml_file}) + installation_command_flags="${installation_command_flags} --editable ${project_folder}" + done < <(find "providers" -name "pyproject.toml" -print0) elif [[ ${AIRFLOW_INSTALLATION_METHOD} == "apache-airflow" ]]; then installation_command_flags="apache-airflow[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" elif [[ ${AIRFLOW_INSTALLATION_METHOD} == apache-airflow\ @\ * ]]; then @@ -1407,7 +1320,8 @@ ARG PYTHON_BASE_IMAGE ENV PYTHON_BASE_IMAGE=${PYTHON_BASE_IMAGE} \ DEBIAN_FRONTEND=noninteractive LANGUAGE=C.UTF-8 LANG=C.UTF-8 LC_ALL=C.UTF-8 \ LC_CTYPE=C.UTF-8 LC_MESSAGES=C.UTF-8 \ - PIP_CACHE_DIR=/tmp/.cache/pip + PIP_CACHE_DIR=/tmp/.cache/pip \ + UV_CACHE_DIR=/tmp/.cache/uv ARG DEV_APT_DEPS="" ARG ADDITIONAL_DEV_APT_DEPS="" @@ -1473,9 +1387,6 @@ ARG DEFAULT_CONSTRAINTS_BRANCH="constraints-main" # By default PIP has progress bar but you can disable it. ARG PIP_PROGRESS_BAR -# By default we do not use pre-cached packages, but in CI/Breeze environment we override this to speed up -# builds in case pyproject.toml changed. This is pure optimisation of CI/Breeze builds. -ARG AIRFLOW_PRE_CACHED_PIP_PACKAGES="false" # This is airflow version that is put in the label of the image build ARG AIRFLOW_VERSION # By default latest released version of airflow is installed (when empty) but this value can be overridden @@ -1513,7 +1424,6 @@ ENV AIRFLOW_PIP_VERSION=${AIRFLOW_PIP_VERSION} \ AIRFLOW_UV_VERSION=${AIRFLOW_UV_VERSION} \ UV_HTTP_TIMEOUT=${UV_HTTP_TIMEOUT} \ AIRFLOW_USE_UV=${AIRFLOW_USE_UV} \ - AIRFLOW_PRE_CACHED_PIP_PACKAGES=${AIRFLOW_PRE_CACHED_PIP_PACKAGES} \ AIRFLOW_VERSION=${AIRFLOW_VERSION} \ AIRFLOW_INSTALLATION_METHOD=${AIRFLOW_INSTALLATION_METHOD} \ AIRFLOW_VERSION_SPECIFICATION=${AIRFLOW_VERSION_SPECIFICATION} \ @@ -1538,8 +1448,7 @@ ENV AIRFLOW_PIP_VERSION=${AIRFLOW_PIP_VERSION} \ # Copy all scripts required for installation - changing any of those should lead to # rebuilding from here -COPY --from=scripts common.sh install_packaging_tools.sh \ - install_airflow_dependencies_from_branch_tip.sh create_prod_venv.sh /scripts/docker/ +COPY --from=scripts common.sh install_packaging_tools.sh create_prod_venv.sh /scripts/docker/ # We can set this value to true in case we want to install .whl/.tar.gz packages placed in the # docker-context-files folder. This can be done for both additional packages you want to install @@ -1569,13 +1478,7 @@ ENV AIRFLOW_CI_BUILD_EPOCH=${AIRFLOW_CI_BUILD_EPOCH} # By default PIP installs everything to ~/.local and it's also treated as VIRTUALENV ENV VIRTUAL_ENV="${AIRFLOW_USER_HOME_DIR}/.local" -RUN bash /scripts/docker/install_packaging_tools.sh; \ - bash /scripts/docker/create_prod_venv.sh; \ - if [[ ${AIRFLOW_PRE_CACHED_PIP_PACKAGES} == "true" && \ - ${INSTALL_PACKAGES_FROM_CONTEXT} == "false" && \ - ${UPGRADE_INVALIDATION_STRING} == "" ]]; then \ - bash /scripts/docker/install_airflow_dependencies_from_branch_tip.sh; \ - fi +RUN bash /scripts/docker/install_packaging_tools.sh; bash /scripts/docker/create_prod_venv.sh COPY --chown=airflow:0 ${AIRFLOW_SOURCES_FROM} ${AIRFLOW_SOURCES_TO} @@ -1599,10 +1502,10 @@ COPY --from=scripts install_from_docker_context_files.sh install_airflow.sh \ # an incorrect architecture. ARG TARGETARCH # Value to be able to easily change cache id and therefore use a bare new cache -ARG PIP_CACHE_EPOCH="9" +ARG DEPENDENCY_CACHE_EPOCH="9" # hadolint ignore=SC2086, SC2010, DL3042 -RUN --mount=type=cache,id=$PYTHON_BASE_IMAGE-$AIRFLOW_PIP_VERSION-$TARGETARCH-$PIP_CACHE_EPOCH,target=/tmp/.cache/pip,uid=${AIRFLOW_UID} \ +RUN --mount=type=cache,id=prod-$TARGETARCH-$DEPENDENCY_CACHE_EPOCH,target=/tmp/.cache/,uid=${AIRFLOW_UID} \ if [[ ${INSTALL_PACKAGES_FROM_CONTEXT} == "true" ]]; then \ bash /scripts/docker/install_from_docker_context_files.sh; \ fi; \ @@ -1622,7 +1525,7 @@ RUN --mount=type=cache,id=$PYTHON_BASE_IMAGE-$AIRFLOW_PIP_VERSION-$TARGETARCH-$P # during the build additionally to whatever has been installed so far. It is recommended that # the requirements.txt contains only dependencies with == version specification # hadolint ignore=DL3042 -RUN --mount=type=cache,id=additional-requirements-$PYTHON_BASE_IMAGE-$AIRFLOW_PIP_VERSION-$TARGETARCH-$PIP_CACHE_EPOCH,target=/tmp/.cache/pip,uid=${AIRFLOW_UID} \ +RUN --mount=type=cache,id=prod-$TARGETARCH-$DEPENDENCY_CACHE_EPOCH,target=/tmp/.cache/,uid=${AIRFLOW_UID} \ if [[ -f /docker-context-files/requirements.txt ]]; then \ pip install -r /docker-context-files/requirements.txt; \ fi @@ -1650,7 +1553,9 @@ ARG PYTHON_BASE_IMAGE ENV PYTHON_BASE_IMAGE=${PYTHON_BASE_IMAGE} \ # Make sure noninteractive debian install is used and language variables set DEBIAN_FRONTEND=noninteractive LANGUAGE=C.UTF-8 LANG=C.UTF-8 LC_ALL=C.UTF-8 \ - LC_CTYPE=C.UTF-8 LC_MESSAGES=C.UTF-8 LD_LIBRARY_PATH=/usr/local/lib + LC_CTYPE=C.UTF-8 LC_MESSAGES=C.UTF-8 LD_LIBRARY_PATH=/usr/local/lib \ + PIP_CACHE_DIR=/tmp/.cache/pip \ + UV_CACHE_DIR=/tmp/.cache/uv ARG RUNTIME_APT_DEPS="" ARG ADDITIONAL_RUNTIME_APT_DEPS="" diff --git a/Dockerfile.ci b/Dockerfile.ci index 7c0b529d4711f..74d8eb59435da 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -363,85 +363,6 @@ common::show_packaging_tool_version_and_location common::install_packaging_tools EOF -# The content below is automatically copied from scripts/docker/install_airflow_dependencies_from_branch_tip.sh -COPY <<"EOF" /install_airflow_dependencies_from_branch_tip.sh -#!/usr/bin/env bash - -. "$( dirname "${BASH_SOURCE[0]}" )/common.sh" - -: "${AIRFLOW_REPO:?Should be set}" -: "${AIRFLOW_BRANCH:?Should be set}" -: "${INSTALL_MYSQL_CLIENT:?Should be true or false}" -: "${INSTALL_POSTGRES_CLIENT:?Should be true or false}" - -function install_airflow_dependencies_from_branch_tip() { - echo - echo "${COLOR_BLUE}Installing airflow from ${AIRFLOW_BRANCH}. It is used to cache dependencies${COLOR_RESET}" - echo - if [[ ${INSTALL_MYSQL_CLIENT} != "true" ]]; then - AIRFLOW_EXTRAS=${AIRFLOW_EXTRAS/mysql,} - fi - if [[ ${INSTALL_POSTGRES_CLIENT} != "true" ]]; then - AIRFLOW_EXTRAS=${AIRFLOW_EXTRAS/postgres,} - fi - local TEMP_AIRFLOW_DIR - TEMP_AIRFLOW_DIR=$(mktemp -d) - # Install latest set of dependencies - without constraints. This is to download a "base" set of - # dependencies that we can cache and reuse when installing airflow using constraints and latest - # pyproject.toml in the next step (when we install regular airflow). - set -x - curl -fsSL "https://github.com/${AIRFLOW_REPO}/archive/${AIRFLOW_BRANCH}.tar.gz" | \ - tar xz -C "${TEMP_AIRFLOW_DIR}" --strip 1 - # Make sure editable dependencies are calculated when devel-ci dependencies are installed - ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} ${ADDITIONAL_PIP_INSTALL_FLAGS} \ - --editable "${TEMP_AIRFLOW_DIR}[${AIRFLOW_EXTRAS}]" - set +x - common::install_packaging_tools - set -x - echo "${COLOR_BLUE}Uninstalling providers. Dependencies remain${COLOR_RESET}" - # Uninstall airflow and providers to keep only the dependencies. In the future when - # planned https://github.com/pypa/pip/issues/11440 is implemented in pip we might be able to use this - # flag and skip the remove step. - pip freeze | grep apache-airflow-providers | xargs ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} || true - set +x - echo - echo "${COLOR_BLUE}Uninstalling just airflow. Dependencies remain. Now target airflow can be reinstalled using mostly cached dependencies${COLOR_RESET}" - echo - set +x - ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} apache-airflow - rm -rf "${TEMP_AIRFLOW_DIR}" - set -x - # If you want to make sure dependency is removed from cache in your PR when you removed it from - # pyproject.toml - please add your dependency here as a list of strings - # for example: - # DEPENDENCIES_TO_REMOVE=("package_a" "package_b") - # Once your PR is merged, you should make a follow-up PR to remove it from this list - # and increase the AIRFLOW_CI_BUILD_EPOCH in Dockerfile.ci to make sure your cache is rebuilt. - local DEPENDENCIES_TO_REMOVE - # IMPORTANT!! Make sure to increase AIRFLOW_CI_BUILD_EPOCH in Dockerfile.ci when you remove a dependency from that list - DEPENDENCIES_TO_REMOVE=() - if [[ "${DEPENDENCIES_TO_REMOVE[*]}" != "" ]]; then - echo - echo "${COLOR_BLUE}Uninstalling just removed dependencies (temporary until cache refreshes)${COLOR_RESET}" - echo "${COLOR_BLUE}Dependencies to uninstall: ${DEPENDENCIES_TO_REMOVE[*]}${COLOR_RESET}" - echo - set +x - ${PACKAGING_TOOL_CMD} uninstall "${DEPENDENCIES_TO_REMOVE[@]}" || true - set -x - # make sure that the dependency is not needed by something else - pip check - fi -} - -common::get_colors -common::get_packaging_tool -common::get_airflow_version_specification -common::get_constraints_location -common::show_packaging_tool_version_and_location - -install_airflow_dependencies_from_branch_tip -EOF - # The content below is automatically copied from scripts/docker/common.sh COPY <<"EOF" /common.sh #!/usr/bin/env bash @@ -465,8 +386,6 @@ function common::get_packaging_tool() { ## IMPORTANT: IF YOU MODIFY THIS FUNCTION YOU SHOULD ALSO MODIFY CORRESPONDING FUNCTION IN ## `scripts/in_container/_in_container_utils.sh` - local PYTHON_BIN - PYTHON_BIN=$(which python) if [[ ${AIRFLOW_USE_UV} == "true" ]]; then echo echo "${COLOR_BLUE}Using 'uv' to install Airflow${COLOR_RESET}" @@ -474,8 +393,8 @@ function common::get_packaging_tool() { export PACKAGING_TOOL="uv" export PACKAGING_TOOL_CMD="uv pip" if [[ -z ${VIRTUAL_ENV=} ]]; then - export EXTRA_INSTALL_FLAGS="--python ${PYTHON_BIN}" - export EXTRA_UNINSTALL_FLAGS="--python ${PYTHON_BIN}" + export EXTRA_INSTALL_FLAGS="--system" + export EXTRA_UNINSTALL_FLAGS="--system" else export EXTRA_INSTALL_FLAGS="" export EXTRA_UNINSTALL_FLAGS="" @@ -670,18 +589,12 @@ function install_airflow() { # Determine the installation_command_flags based on AIRFLOW_INSTALLATION_METHOD method local installation_command_flags if [[ ${AIRFLOW_INSTALLATION_METHOD} == "." ]]; then - # We need _a_ file in there otherwise the editable install doesn't include anything in the .pth file - mkdir -p ./providers/src/airflow/providers/ - touch ./providers/src/airflow/providers/__init__.py - - # Similarly we need _a_ file for task_sdk too - mkdir -p ./task_sdk/src/airflow/sdk/ - echo '__version__ = "0.0.0dev0"' > ./task_sdk/src/airflow/sdk/__init__.py - - trap 'rm -f ./providers/src/airflow/providers/__init__.py ./task_sdk/src/airflow/__init__.py 2>/dev/null' EXIT - # When installing from sources - we always use `--editable` mode - installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION} --editable ./providers --editable ./task_sdk" + installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION} --editable ./task_sdk" + while IFS= read -r -d '' pyproject_toml_file; do + project_folder=$(dirname ${pyproject_toml_file}) + installation_command_flags="${installation_command_flags} --editable ${project_folder}" + done < <(find "providers" -name "pyproject.toml" -print0) elif [[ ${AIRFLOW_INSTALLATION_METHOD} == "apache-airflow" ]]; then installation_command_flags="apache-airflow[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" elif [[ ${AIRFLOW_INSTALLATION_METHOD} == apache-airflow\ @\ * ]]; then @@ -1202,7 +1115,10 @@ ENV PYTHON_BASE_IMAGE=${PYTHON_BASE_IMAGE} \ DEPENDENCIES_EPOCH_NUMBER=${DEPENDENCIES_EPOCH_NUMBER} \ INSTALL_MYSQL_CLIENT="true" \ INSTALL_MSSQL_CLIENT="true" \ - INSTALL_POSTGRES_CLIENT="true" + INSTALL_POSTGRES_CLIENT="true" \ + PIP_CACHE_DIR=/root/.cache/pip \ + UV_CACHE_DIR=/root/.cache/uv + RUN echo "Base image version: ${PYTHON_BASE_IMAGE}" @@ -1282,12 +1198,7 @@ ARG DEFAULT_CONSTRAINTS_BRANCH="constraints-main" # By changing the epoch we can force reinstalling Airflow and pip all dependencies # It can also be overwritten manually by setting the AIRFLOW_CI_BUILD_EPOCH environment variable. ARG AIRFLOW_CI_BUILD_EPOCH="10" -ARG AIRFLOW_PRE_CACHED_PIP_PACKAGES="true" # Setup PIP -# By default PIP install run without cache to make image smaller -ARG PIP_NO_CACHE_DIR="true" -# By default UV install run without cache to make image smaller -ARG UV_NO_CACHE="true" ARG UV_HTTP_TIMEOUT="300" # By default PIP has progress bar but you can disable it. ARG PIP_PROGRESS_BAR="on" @@ -1315,7 +1226,6 @@ ENV AIRFLOW_REPO=${AIRFLOW_REPO}\ AIRFLOW_CONSTRAINTS_LOCATION=${AIRFLOW_CONSTRAINTS_LOCATION} \ DEFAULT_CONSTRAINTS_BRANCH=${DEFAULT_CONSTRAINTS_BRANCH} \ AIRFLOW_CI_BUILD_EPOCH=${AIRFLOW_CI_BUILD_EPOCH} \ - AIRFLOW_PRE_CACHED_PIP_PACKAGES=${AIRFLOW_PRE_CACHED_PIP_PACKAGES} \ AIRFLOW_VERSION=${AIRFLOW_VERSION} \ AIRFLOW_PIP_VERSION=${AIRFLOW_PIP_VERSION} \ AIRFLOW_UV_VERSION=${AIRFLOW_UV_VERSION} \ @@ -1327,9 +1237,7 @@ ENV AIRFLOW_REPO=${AIRFLOW_REPO}\ INSTALL_POSTGRES_CLIENT="true" \ AIRFLOW_INSTALLATION_METHOD="." \ AIRFLOW_VERSION_SPECIFICATION="" \ - PIP_NO_CACHE_DIR=${PIP_NO_CACHE_DIR} \ PIP_PROGRESS_BAR=${PIP_PROGRESS_BAR} \ - UV_NO_CACHE=${UV_NO_CACHE} \ ADDITIONAL_PIP_INSTALL_FLAGS=${ADDITIONAL_PIP_INSTALL_FLAGS} \ CASS_DRIVER_BUILD_CONCURRENCY=${CASS_DRIVER_BUILD_CONCURRENCY} \ CASS_DRIVER_NO_CYTHON=${CASS_DRIVER_NO_CYTHON} @@ -1338,25 +1246,10 @@ RUN echo "Airflow version: ${AIRFLOW_VERSION}" # Copy all scripts required for installation - changing any of those should lead to # rebuilding from here -COPY --from=scripts install_packaging_tools.sh install_airflow_dependencies_from_branch_tip.sh \ - common.sh /scripts/docker/ +COPY --from=scripts common.sh install_packaging_tools.sh install_additional_dependencies.sh /scripts/docker/ # We are first creating a venv where all python packages and .so binaries needed by those are # installed. -# In case of CI builds we want to pre-install main version of airflow dependencies so that -# We do not have to always reinstall it from the scratch. -# And is automatically reinstalled from the scratch every time patch release of python gets released -# The Airflow and providers are uninstalled, only dependencies remain. -# the cache is only used when "upgrade to newer dependencies" is not set to automatically -# account for removed dependencies (we do not install them in the first place) -# -# We are installing from branch tip without fixing UV or PIP version - in order to avoid rebuilding the -# base cache layer every time the UV or PIP version changes. -RUN bash /scripts/docker/install_packaging_tools.sh; \ - if [[ ${AIRFLOW_PRE_CACHED_PIP_PACKAGES} == "true" ]]; then \ - bash /scripts/docker/install_airflow_dependencies_from_branch_tip.sh; \ - fi - # Here we fix the versions so all subsequent commands will use the versions # from the sources @@ -1372,31 +1265,33 @@ ARG AIRFLOW_PRE_COMMIT_UV_VERSION="4.1.4" ENV AIRFLOW_PIP_VERSION=${AIRFLOW_PIP_VERSION} \ AIRFLOW_UV_VERSION=${AIRFLOW_UV_VERSION} \ + # This is needed since we are using cache mounted from the host + UV_LINK_MODE=copy \ AIRFLOW_PRE_COMMIT_VERSION=${AIRFLOW_PRE_COMMIT_VERSION} # The PATH is needed for PIPX to find the tools installed ENV PATH="/root/.local/bin:${PATH}" +# Useful for creating a cache id based on the underlying architecture, preventing the use of cached python packages from +# an incorrect architecture. +ARG TARGETARCH +# Value to be able to easily change cache id and therefore use a bare new cache +ARG DEPENDENCY_CACHE_EPOCH="0" + # Install useful command line tools in their own virtualenv so that they do not clash with # dependencies installed in Airflow also reinstall PIP and UV to make sure they are installed # in the version specified above -RUN bash /scripts/docker/install_packaging_tools.sh - -# Airflow sources change frequently but dependency configuration won't change that often -# We copy pyproject.toml and other files needed to perform setup of dependencies -# So in case pyproject.toml changes we can install latest dependencies required. -COPY pyproject.toml ${AIRFLOW_SOURCES}/pyproject.toml -COPY providers/pyproject.toml ${AIRFLOW_SOURCES}/providers/pyproject.toml -COPY task_sdk/pyproject.toml ${AIRFLOW_SOURCES}/task_sdk/pyproject.toml -COPY task_sdk/README.md ${AIRFLOW_SOURCES}/task_sdk/README.md -COPY airflow/__init__.py ${AIRFLOW_SOURCES}/airflow/ -COPY tests_common/ ${AIRFLOW_SOURCES}/tests_common/ -COPY generated/* ${AIRFLOW_SOURCES}/generated/ -COPY constraints/* ${AIRFLOW_SOURCES}/constraints/ -COPY LICENSE ${AIRFLOW_SOURCES}/LICENSE -COPY hatch_build.py ${AIRFLOW_SOURCES}/ +RUN --mount=type=cache,id=ci-$TARGETARCH-$DEPENDENCY_CACHE_EPOCH,target=/root/.cache/ \ + bash /scripts/docker/install_packaging_tools.sh + COPY --from=scripts install_airflow.sh /scripts/docker/ +# We can copy everything here. The Context is filtered by dockerignore. This makes sure we are not +# copying over stuff that is accidentally generated or that we do not need (such as egg-info) +# if you want to add something that is missing and you expect to see it in the image you can +# add it with ! in .dockerignore next to the airflow, test etc. directories there +COPY . ${AIRFLOW_SOURCES}/ + # Those are additional constraints that are needed for some extras but we do not want to # force them on the main Airflow package. Currently we need no extra limits as PIP 23.1+ has much better # dependency resolution and we do not need to limit the versions of the dependencies @@ -1415,36 +1310,30 @@ ENV EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS=${EAGER_UPGRADE_ADDITIONAL_REQUIREMENT # Usually we will install versions based on the dependencies in pyproject.toml and upgraded only if needed. # But in cron job we will install latest versions matching pyproject.toml to see if there is no breaking change # and push the constraints if everything is successful -RUN bash /scripts/docker/install_airflow.sh - -COPY --from=scripts entrypoint_ci.sh /entrypoint -COPY --from=scripts entrypoint_exec.sh /entrypoint-exec -RUN chmod a+x /entrypoint /entrypoint-exec +RUN --mount=type=cache,id=ci-$TARGETARCH-$DEPENDENCY_CACHE_EPOCH,target=/root/.cache/ bash /scripts/docker/install_airflow.sh COPY --from=scripts install_packaging_tools.sh install_additional_dependencies.sh /scripts/docker/ -# Additional python deps to install ARG ADDITIONAL_PYTHON_DEPS="" -RUN bash /scripts/docker/install_packaging_tools.sh; \ +ENV ADDITIONAL_PYTHON_DEPS=${ADDITIONAL_PYTHON_DEPS} + +RUN --mount=type=cache,id=ci-$TARGETARCH-$DEPENDENCY_CACHE_EPOCH,target=/root/.cache/ \ + bash /scripts/docker/install_packaging_tools.sh; \ if [[ -n "${ADDITIONAL_PYTHON_DEPS}" ]]; then \ bash /scripts/docker/install_additional_dependencies.sh; \ fi -# Install autocomplete for airflow -RUN if command -v airflow; then \ - register-python-argcomplete airflow >> ~/.bashrc ; \ - fi - -# Install autocomplete for Kubectl -RUN echo "source /etc/bash_completion" >> ~/.bashrc +COPY --from=scripts entrypoint_ci.sh /entrypoint +COPY --from=scripts entrypoint_exec.sh /entrypoint-exec +RUN chmod a+x /entrypoint /entrypoint-exec -# We can copy everything here. The Context is filtered by dockerignore. This makes sure we are not -# copying over stuff that is accidentally generated or that we do not need (such as egg-info) -# if you want to add something that is missing and you expect to see it in the image you can -# add it with ! in .dockerignore next to the airflow, test etc. directories there -COPY . ${AIRFLOW_SOURCES}/ +# Install autocomplete for airflow and kubectl +RUN if command -v airflow; then \ + register-python-argcomplete airflow >> ~/.bashrc ; \ + fi; \ + echo "source /etc/bash_completion" >> ~/.bashrc WORKDIR ${AIRFLOW_SOURCES} diff --git a/dev/breeze/doc/06_managing_docker_images.rst b/dev/breeze/doc/06_managing_docker_images.rst index bb4c4f9e06f62..84e4e77a010f1 100644 --- a/dev/breeze/doc/06_managing_docker_images.rst +++ b/dev/breeze/doc/06_managing_docker_images.rst @@ -76,7 +76,7 @@ These are all available flags of ``pull`` command: Verifying CI image .................. -Finally, you can verify CI image by running tests - either with the pulled/built images or +You can verify CI image by running tests - either with the pulled/built images or with an arbitrary image. These are all available flags of ``verify`` command: @@ -86,6 +86,41 @@ These are all available flags of ``verify`` command: :width: 100% :alt: Breeze ci-image verify +Loading and saving CI image +........................... + +You can load and save PROD image - for example to transfer it to another machine or to load an image +that has been built in our CI. + +These are all available flags of ``save`` command: + +.. image:: ./images/output_ci-image_save.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_ci-image_save.svg + :width: 100% + :alt: Breeze ci-image save + +These are all available flags of ``load`` command: + +.. image:: ./images/output_ci-image_load.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_ci-image_load.svg + :width: 100% + :alt: Breeze ci-image load + +Images for every build from our CI are uploaded as artifacts to the +GitHub Action run (in summary) and can be downloaded from there for 2 days in order to reproduce the complete +environment used during the tests and loaded to the local Docker registry (note that you have +to use the same platform as the CI run). + +You will find the artifacts for each image in the summary of the CI run. The artifacts are named +``ci-image-docker-export---_merge``. Those are compressed zip files that +contain the ".tar" image that should be used with ``--image-file`` flag of the load method. Make sure to +use the same ``--python`` version as the image was built with. + +.. image:: ./images/image_artifacts.png + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_ci-image_load.svg + :width: 100% + :alt: Breeze image artifacts + PROD Image tasks ---------------- @@ -170,7 +205,7 @@ These are all available flags of ``pull-prod-image`` command: Verifying PROD image .................... -Finally, you can verify PROD image by running tests - either with the pulled/built images or +You can verify PROD image by running tests - either with the pulled/built images or with an arbitrary image. These are all available flags of ``verify-prod-image`` command: @@ -180,6 +215,31 @@ These are all available flags of ``verify-prod-image`` command: :width: 100% :alt: Breeze prod-image verify +Loading and saving PROD image +............................. + +You can load and save PROD image - for example to transfer it to another machine or to load an image +that has been built in our CI. + +These are all available flags of ``save`` command: + +.. image:: ./images/output_prod-image_save.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_prod-image_save.svg + :width: 100% + :alt: Breeze prod-image save + +These are all available flags of ``load`` command: + +.. image:: ./images/output-prod-image_load.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_prod-image_load.svg + :width: 100% + :alt: Breeze prod-image load + +Similarly as in case of CI images, Images for every build from our CI are uploaded as artifacts to the +GitHub Action run (in summary) and can be downloaded from there for 2 days in order to reproduce the complete +environment used during the tests and loaded to the local Docker registry (note that you have +to use the same platform as the CI run). + ------ Next step: Follow the `Breeze maintenance tasks <07_breeze_maintenance_tasks.rst>`_ to learn about tasks that diff --git a/dev/breeze/doc/ci/02_images.md b/dev/breeze/doc/ci/02_images.md index eb3af6ae6ce87..263b37d62b82b 100644 --- a/dev/breeze/doc/ci/02_images.md +++ b/dev/breeze/doc/ci/02_images.md @@ -329,22 +329,14 @@ new version of base Python is released. However, occasionally, you might need to rebuild images locally and push them directly to the registries to refresh them. -Every developer can also pull and run images being result of a specific +Every contributor can also pull and run images being result of a specific CI run in GitHub Actions. This is a powerful tool that allows to reproduce CI failures locally, enter the images and fix them much -faster. It is enough to pass `--image-tag` and the registry and Breeze -will download and execute commands using the same image that was used -during the CI tests. +faster. It is enough to download and uncompress the artifact that stores the +image and run ``breeze ci-image load -i `` to load the +image and mark the image as refreshed in the local cache. -For example this command will run the same Python 3.9 image as was used -in build identified with 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e commit -SHA with enabled rabbitmq integration. - -``` bash -breeze --image-tag 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e --python 3.9 --integration rabbitmq -``` - -You can see more details and examples in[Breeze](../README.rst) +You can see more details and examples in[Breeze](../06_managing_docker_images.rst) # Customizing the CI image @@ -427,8 +419,6 @@ can be used for CI images: | `PYTHON_MAJOR_MINOR_VERSION` | `3.9` | major/minor version of Python (should match base image) | | `DEPENDENCIES_EPOCH_NUMBER` | `2` | increasing this number will reinstall all apt dependencies | | `ADDITIONAL_PIP_INSTALL_FLAGS` | | additional `pip` flags passed to the installation commands (except when reinstalling `pip` itself) | -| `PIP_NO_CACHE_DIR` | `true` | if true, then no pip cache will be stored | -| `UV_NO_CACHE` | `true` | if true, then no uv cache will be stored | | `HOME` | `/root` | Home directory of the root user (CI image has root user as default) | | `AIRFLOW_HOME` | `/root/airflow` | Airflow's HOME (that's where logs and sqlite databases are stored) | | `AIRFLOW_SOURCES` | `/opt/airflow` | Mounted sources of Airflow | @@ -439,7 +429,6 @@ can be used for CI images: | `AIRFLOW_CONSTRAINTS_REFERENCE` | | reference (branch or tag) from GitHub repository from which constraints are used. By default it is set to `constraints-main` but can be `constraints-2-X`. | | `AIRFLOW_EXTRAS` | `all` | extras to install | | `UPGRADE_INVALIDATION_STRING` | | If set to any random value the dependencies are upgraded to newer versions. In CI it is set to build id. | -| `AIRFLOW_PRE_CACHED_PIP_PACKAGES` | `true` | Allows to pre-cache airflow PIP packages from the GitHub of Apache Airflow This allows to optimize iterations for Image builds and speeds up CI jobs. | | `ADDITIONAL_AIRFLOW_EXTRAS` | | additional extras to install | | `ADDITIONAL_PYTHON_DEPS` | | additional Python dependencies to install | | `DEV_APT_COMMAND` | | Dev apt command executed before dev deps are installed in the first part of image | diff --git a/dev/breeze/doc/ci/05_workflows.md b/dev/breeze/doc/ci/05_workflows.md index 130774a730cb6..63c94871c5d9d 100644 --- a/dev/breeze/doc/ci/05_workflows.md +++ b/dev/breeze/doc/ci/05_workflows.md @@ -206,9 +206,9 @@ code. | Build info | Prints detailed information about the build | Yes | Yes | Yes | Yes | | Push early cache & images | Pushes early cache/images to GitHub Registry | | Yes | | | | Check that image builds quickly | Checks that image builds quickly | | Yes | | Yes | -| Build CI images | Builds images in-workflow (not in the build images) | | Yes | Yes (1) | Yes (4) | +| Build CI images | Builds images | | Yes | Yes (1) | Yes (4) | | Generate constraints/CI verify | Generate constraints for the build and verify CI image | Yes (2) | Yes (2) | Yes (2) | Yes (2) | -| Build PROD images | Builds images in-workflow (not in the build images) | | Yes | Yes (1) | Yes (4) | +| Build PROD images | Builds images | | Yes | Yes (1) | Yes (4) | | Run breeze tests | Run unit tests for Breeze | Yes | Yes | Yes | Yes | | Test OpenAPI client gen | Tests if OpenAPIClient continues to generate | Yes | Yes | Yes | Yes | | React WWW tests | React UI tests for new Airflow UI | Yes | Yes | Yes | Yes | diff --git a/dev/breeze/doc/ci/08_running_ci_locally.md b/dev/breeze/doc/ci/08_running_ci_locally.md index 4fd0a7c993799..7300c8fde0b84 100644 --- a/dev/breeze/doc/ci/08_running_ci_locally.md +++ b/dev/breeze/doc/ci/08_running_ci_locally.md @@ -36,17 +36,13 @@ checks in CI and locally, but another part is the CI environment which is replicated locally with Breeze. You can read more about Breeze in -[README.rst](../README.rst) but in essence it is a script -that allows you to re-create CI environment in your local development -instance and interact with it. In its basic form, when you do -development you can run all the same tests that will be run in CI - but +[README.rst](../README.rst) but in essence it is a python wrapper around +docker commands that allows you (among others) to re-create CI environment +in your local development instance and interact with it. +In its basic form, when you do development you can run all the same +tests that will be run in CI - but locally, before you submit them as PR. Another use case where Breeze is -useful is when tests fail on CI. You can take the full `COMMIT_SHA` of -the failed build pass it as `--image-tag` parameter of Breeze and it -will download the very same version of image that was used in CI and run -it locally. This way, you can very easily reproduce any failed test that -happens in CI - even if you do not check out the sources connected with -the run. +useful is when tests fail on CI. All our CI jobs are executed via `breeze` commands. You can replicate exactly what our CI is doing by running the sequence of corresponding @@ -62,39 +58,16 @@ exactly what our CI is doing by running the sequence of corresponding In the output of the CI jobs, you will find both - the flags passed and environment variables set. +Every contributor can also pull and run images being result of a specific +CI run in GitHub Actions. This is a powerful tool that allows to +reproduce CI failures locally, enter the images and fix them much +faster. It is enough to download and uncompress the artifact that stores the +image and run ``breeze ci-image load -i --python python`` +to load the image and mark the image as refreshed in the local cache. + You can read more about it in [Breeze](../README.rst) and [Testing](../../../../contributing-docs/09_testing.rst) -Since we store images from every CI run, you should be able easily -reproduce any of the CI tests problems locally. You can do it by pulling -and using the right image and running it with the right docker command, -For example knowing that the CI job was for commit -`cd27124534b46c9688a1d89e75fcd137ab5137e3`: - -``` bash -docker pull ghcr.io/apache/airflow/main/ci/python3.9:cd27124534b46c9688a1d89e75fcd137ab5137e3 - -docker run -it ghcr.io/apache/airflow/main/ci/python3.9:cd27124534b46c9688a1d89e75fcd137ab5137e3 -``` - -But you usually need to pass more variables and complex setup if you -want to connect to a database or enable some integrations. Therefore it -is easiest to use [Breeze](../README.rst) for that. For -example if you need to reproduce a MySQL environment in python 3.9 -environment you can run: - -``` bash -breeze --image-tag cd27124534b46c9688a1d89e75fcd137ab5137e3 --python 3.9 --backend mysql -``` - -You will be dropped into a shell with the exact version that was used -during the CI run and you will be able to run pytest tests manually, -easily reproducing the environment that was used in CI. Note that in -this case, you do not need to checkout the sources that were used for -that run - they are already part of the image - but remember that any -changes you make in those sources are lost when you leave the image as -the sources are not mapped from your host machine. - Depending whether the scripts are run locally via [Breeze](../README.rst) or whether they are run in `Build Images` or `Tests` workflows they can take different values. @@ -127,9 +100,9 @@ passed to `breeze shell` command. # Upgrade to newer dependencies -By default we are using a tested set of dependency constraints stored in separated "orphan" branches of the airflow repository +By default, we are using a tested set of dependency constraints stored in separated "orphan" branches of the airflow repository ("constraints-main, "constraints-2-0") but when this flag is set to anything but false (for example random value), -they are not used used and "eager" upgrade strategy is used when installing dependencies. We set it to true in case of direct +they are not used and "eager" upgrade strategy is used when installing dependencies. We set it to true in case of direct pushes (merges) to main and scheduled builds so that the constraints are tested. In those builds, in case we determine that the tests pass we automatically push latest set of "tested" constraints to the repository. Setting the value to random value is best way to assure that constraints are upgraded even if there is no change to pyproject.toml diff --git a/dev/breeze/doc/images/image_artifacts.png b/dev/breeze/doc/images/image_artifacts.png new file mode 100644 index 0000000000000..485a6a2c9cf10 Binary files /dev/null and b/dev/breeze/doc/images/image_artifacts.png differ diff --git a/dev/breeze/doc/images/output_ci-image.svg b/dev/breeze/doc/images/output_ci-image.svg index 8d7c7893f55c8..6fc5b425c5c7a 100644 --- a/dev/breeze/doc/images/output_ci-image.svg +++ b/dev/breeze/doc/images/output_ci-image.svg @@ -1,4 +1,4 @@ - +