diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 413d5d43e338..c1dd57cfe06c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -101,95 +101,6 @@ jobs: if: steps.cache.outputs.cache-hit != 'true' && matrix.cache == 'true' run: rm -rf ~/.m2/repository/io/trino/trino-* - artifact-checks: - needs: path-filters - if: github.event_name != 'pull_request' || needs.path-filters.outputs.non_docs == 'true' - runs-on: ubuntu-latest - timeout-minutes: 45 - steps: - - uses: actions/checkout@v5 - with: - fetch-depth: 0 # checkout all commits, as the build result depends on `git describe` equivalent - ref: | - ${{ github.event_name == 'repository_dispatch' && - github.event.client_payload.pull_request.head.sha == github.event.client_payload.slash_command.args.named.sha && - format('refs/pull/{0}/head', github.event.client_payload.pull_request.number) || '' }} - - uses: ./.github/actions/setup - timeout-minutes: 15 - with: - cache: 'restore' - cleanup-node: true - - name: Maven Install - run: | - export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - $MAVEN clean install ${MAVEN_FAST_INSTALL} -pl '!:trino-docs' - - name: Test JDBC shading - # Run only integration tests to verify JDBC driver shading - run: | - export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - $MAVEN failsafe:integration-test failsafe:verify -B --strict-checksums -P ci -pl :trino-jdbc - - name: Clean Maven Output - run: $MAVEN clean -pl '!:trino-server,!:trino-cli' - - uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3 - with: - platforms: arm64,ppc64le - - name: Build and Test Docker Image - run: core/docker/build.sh - - check-commits-dispatcher: - needs: path-filters - if: github.event_name == 'pull_request' && needs.path-filters.outputs.non_docs == 'true' - runs-on: ubuntu-latest - outputs: - matrix: ${{ steps.set-matrix.outputs.matrix }} - steps: - - uses: actions/checkout@v5 - with: - fetch-depth: 0 # checkout all commits to be able to determine merge base - - name: Block illegal commits - uses: trinodb/github-actions/block-commits@c2991972560c5219d9ae5fb68c0c9d687ffcdd10 - with: - action-merge: fail - action-fixup: none - - name: Set matrix (dispatch commit checks) - id: set-matrix - run: | - # Make sure the PR branch contains the compile-commit composite job - if git merge-base --is-ancestor $( git rev-list HEAD -- .github/actions/compile-commit/action.yml | tail -n 1 ) ${{ github.event.pull_request.head.sha }} - then - # The HEAD commit of the PR can be safely ignored since it's already compiled in other jobs - # This is achieved by adding a tilde (~) after the HEAD sha - git log --reverse --pretty=format:'%H,%T,"%s"' refs/remotes/origin/${{ github.event.pull_request.base.ref }}..${{ github.event.pull_request.head.sha }}~ | ./.github/bin/prepare-check-commits-matrix.py > commit-matrix.json - else - echo -n '' > commit-matrix.json - fi - - echo "Commit matrix: $(jq '.' commit-matrix.json)" - echo "matrix=$(jq -c '.' commit-matrix.json)" >> $GITHUB_OUTPUT - - check-commit: - needs: check-commits-dispatcher - runs-on: ubuntu-latest - timeout-minutes: 20 - if: github.event_name == 'pull_request' && needs.check-commits-dispatcher.outputs.matrix != '' - strategy: - fail-fast: false - matrix: ${{ fromJson(needs.check-commits-dispatcher.outputs.matrix) }} - steps: - - uses: actions/checkout@v5 - if: matrix.commit != '' - with: - fetch-depth: 0 # checkout all commits to be able to determine merge base - ref: ${{ matrix.commit }} - # This composite job must be entirely standalone, and checked out from the correct commit before being executed. - # It can't accept any parameters defined in this workflow, because the values of those parameters would always be taken from - # PR HEAD since that is the commit the workflow is started for. This could lead to problems if those parameters were changed - # in the middle of a PR branch. - - uses: ./.github/actions/compile-commit - if: matrix.commit != '' - with: - base_ref: ${{ github.event.pull_request.base.ref }} - error-prone-checks: needs: path-filters if: github.event_name != 'pull_request' || needs.path-filters.outputs.non_docs == 'true' @@ -220,195 +131,6 @@ jobs: $MAVEN ${MAVEN_TEST} -T 1C clean compile test-compile -DskipTests -Dair.check.skip-all=true ${MAVEN_GIB} -Dgib.buildUpstream=never -P errorprone-compiler \ -pl '!:trino-docs,!:trino-server' - test-jdbc-compatibility: - needs: path-filters - if: github.event_name != 'pull_request' || needs.path-filters.outputs.non_docs == 'true' - runs-on: ubuntu-latest - timeout-minutes: 30 - steps: - - uses: actions/checkout@v5 - with: - fetch-depth: 0 # checkout tags so version in Manifest is set properly - ref: | - ${{ github.event_name == 'repository_dispatch' && - github.event.client_payload.pull_request.head.sha == github.event.client_payload.slash_command.args.named.sha && - format('refs/pull/{0}/head', github.event.client_payload.pull_request.number) || '' }} - - uses: ./.github/actions/setup - timeout-minutes: 15 - with: - cache: restore - cleanup-node: 'true' - - name: Maven Install - run: | - export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - $MAVEN clean install ${MAVEN_FAST_INSTALL} ${MAVEN_GIB} -Dgib.logImpactedTo=gib-impacted.log -pl '!:trino-docs,!:trino-server' - - name: Test old JDBC vs current server - id: tests-old - run: | - if [ ! -f gib-impacted.log ] || grep -q testing/trino-test-jdbc-compatibility-old-driver gib-impacted.log; then - testing/trino-test-jdbc-compatibility-old-driver/bin/run_tests.sh - fi - - name: Test current JDBC vs old server - id: tests-current - if: always() - run: | - if [ ! -f gib-impacted.log ] || grep -q testing/trino-test-jdbc-compatibility-old-server gib-impacted.log; then - $MAVEN test ${MAVEN_TEST} -pl :trino-test-jdbc-compatibility-old-server - fi - - name: Upload test results - uses: ./.github/actions/process-test-results - if: always() - with: - has-failed-tests: ${{ steps.tests-old.outcome == 'failure' || steps.tests-current.outcome == 'failure' }} - upload-heap-dump: ${{ env.SECRETS_PRESENT == '' && github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository }} - - hive-tests: - needs: path-filters - if: github.event_name != 'pull_request' || needs.path-filters.outputs.non_docs == 'true' - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - config: - - config-hdp3 - # TODO: config-apache-hive3 - timeout-minutes: 60 - steps: - - uses: actions/checkout@v5 - with: - fetch-depth: 0 # checkout all commits to be able to determine merge base for GIB - ref: | - ${{ github.event_name == 'repository_dispatch' && - github.event.client_payload.pull_request.head.sha == github.event.client_payload.slash_command.args.named.sha && - format('refs/pull/{0}/head', github.event.client_payload.pull_request.number) || '' }} - - uses: ./.github/actions/setup - timeout-minutes: 15 - with: - cache: restore - - name: Install Hive Module - run: | - export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - $MAVEN clean install ${MAVEN_FAST_INSTALL} ${MAVEN_GIB} -Dgib.logImpactedTo=gib-impacted.log -am -pl :trino-hive - - name: Run Hive AWS Tests - id: tests - env: - AWS_ACCESS_KEY_ID: ${{ vars.TRINO_AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.TRINO_AWS_SECRET_ACCESS_KEY }} - AWS_REGION: ${{ vars.TRINO_AWS_REGION }} - S3_BUCKET: ${{ vars.TRINO_S3_BUCKET }} - S3_BUCKET_ENDPOINT: "s3.${{ vars.TRINO_AWS_REGION }}.amazonaws.com" - run: | - if [ "${AWS_ACCESS_KEY_ID}" != "" ] && ( [ ! -f gib-impacted.log ] || grep -q plugin/trino-hive gib-impacted.log ); then - $MAVEN test ${MAVEN_TEST} -pl :trino-hive -P aws-tests - fi - - name: Upload test results - uses: ./.github/actions/process-test-results - if: always() - with: - has-failed-tests: ${{ steps.tests.outcome == 'failure' }} - upload-heap-dump: ${{ env.SECRETS_PRESENT == '' && github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository }} - - name: Update PR check - uses: ./.github/actions/update-check - if: >- - failure() && - github.event_name == 'repository_dispatch' && - github.event.client_payload.slash_command.args.named.sha != '' && - github.event.client_payload.pull_request.head.sha == github.event.client_payload.slash_command.args.named.sha - with: - pull_request_number: ${{ github.event.client_payload.pull_request.number }} - check_name: ${{ github.job }} (${{ matrix.config }}) with secrets - conclusion: ${{ job.status }} - github_token: ${{ secrets.GITHUB_TOKEN }} - - test-other-modules: - needs: path-filters - if: github.event_name != 'pull_request' || needs.path-filters.outputs.non_docs == 'true' - runs-on: ubuntu-latest - timeout-minutes: 60 - steps: - - uses: actions/checkout@v5 - with: - fetch-depth: 0 # checkout all commits to be able to determine merge base for GIB - ref: | - ${{ github.event_name == 'repository_dispatch' && - github.event.client_payload.pull_request.head.sha == github.event.client_payload.slash_command.args.named.sha && - format('refs/pull/{0}/head', github.event.client_payload.pull_request.number) || '' }} - - uses: ./.github/actions/setup - timeout-minutes: 15 - with: - cache: restore - cleanup-node: true - - name: Maven Install - run: | - export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - $MAVEN clean install ${MAVEN_FAST_INSTALL} ${MAVEN_GIB} -pl '!:trino-docs,!:trino-server' - - name: Maven Tests - id: tests - run: | - $MAVEN test ${MAVEN_TEST} -pl ' - !:trino-base-jdbc, - !:trino-bigquery, - !:trino-cassandra, - !:trino-clickhouse, - !:trino-delta-lake, - !:trino-docs, - !:trino-druid, - !:trino-duckdb, - !:trino-elasticsearch, - !:trino-exasol, - !:trino-faker, - !:trino-faulttolerant-tests, - !:trino-filesystem, - !:trino-filesystem-alluxio, - !:trino-filesystem-cache-alluxio, - !:trino-filesystem-azure, - !:trino-filesystem-gcs, - !:trino-filesystem-manager, - !:trino-filesystem-s3, - !:trino-geospatial, - !:trino-google-sheets, - !:trino-hdfs, - !:trino-hive, - !:trino-hive-formats, - !:trino-hudi, - !:trino-iceberg, - !:trino-ignite, - !:trino-jdbc, - !:trino-kafka, - !:trino-lakehouse, - !:trino-main, - !:trino-mariadb, - !:trino-memory, - !:trino-mongodb, - !:trino-mysql, - !:trino-openlineage, - !:trino-opensearch, - !:trino-oracle, - !:trino-orc, - !:trino-parquet, - !:trino-pinot, - !:trino-postgresql, - !:trino-redis, - !:trino-redshift, - !:trino-resource-group-managers, - !:trino-server-core, - !:trino-server, - !:trino-singlestore, - !:trino-snowflake, - !:trino-spi, - !:trino-sqlserver, - !:trino-test-jdbc-compatibility-old-server, - !:trino-tests, - !:trino-thrift, - !:trino-vertica, - !:trino-web-ui' - - name: Upload test results - uses: ./.github/actions/process-test-results - if: always() - with: - has-failed-tests: ${{ steps.tests.outcome == 'failure' }} - upload-heap-dump: ${{ env.SECRETS_PRESENT == '' && github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository }} - build-test-matrix: needs: path-filters if: github.event_name != 'pull_request' || needs.path-filters.outputs.non_docs == 'true' @@ -447,81 +169,8 @@ jobs: touch gib-impacted.log cat < .github/test-matrix.yaml include: - - modules: - - client/trino-jdbc - - core/trino-spi - - core/trino-web-ui - - modules: - - plugin/trino-base-jdbc - - plugin/trino-faker - - plugin/trino-geospatial - - plugin/trino-memory - - plugin/trino-openlineage - - plugin/trino-thrift - - modules: - - lib/trino-orc - - lib/trino-parquet - - modules: - - lib/trino-filesystem - - lib/trino-filesystem-azure - - lib/trino-filesystem-alluxio - - lib/trino-filesystem-cache-alluxio - - lib/trino-filesystem-gcs - - lib/trino-filesystem-manager - - lib/trino-filesystem-s3 - - lib/trino-hdfs - - lib/trino-hive-formats - - { modules: core/trino-main } - - { modules: lib/trino-filesystem-azure, profile: cloud-tests } - - { modules: lib/trino-filesystem-gcs, profile: cloud-tests } - - { modules: lib/trino-filesystem-s3, profile: cloud-tests } - - { modules: lib/trino-hdfs, profile: cloud-tests } - - { modules: plugin/trino-bigquery } - - { modules: plugin/trino-bigquery, profile: cloud-tests-2 } - - { modules: plugin/trino-cassandra } - - { modules: plugin/trino-clickhouse } - - { modules: plugin/trino-delta-lake } - - { modules: plugin/trino-delta-lake, profile: cloud-tests } - - { modules: plugin/trino-delta-lake, profile: fte-tests } - - { modules: plugin/trino-druid } - - { modules: plugin/trino-duckdb } - - { modules: plugin/trino-elasticsearch } - - { modules: plugin/trino-exasol } - - { modules: plugin/trino-google-sheets } - - { modules: plugin/trino-hive } - - { modules: plugin/trino-hive, profile: fte-tests } - - { modules: plugin/trino-hive, profile: test-parquet } - - { modules: plugin/trino-hudi } - - { modules: plugin/trino-iceberg } - - { modules: plugin/trino-iceberg, profile: cloud-tests } - - { modules: plugin/trino-iceberg, profile: fte-tests } - - { modules: plugin/trino-iceberg, profile: minio-and-avro } - - { modules: plugin/trino-ignite } - - { modules: plugin/trino-kafka } - - { modules: plugin/trino-lakehouse } - - { modules: plugin/trino-mariadb } - - { modules: plugin/trino-mongodb } - - { modules: plugin/trino-mysql } - - { modules: plugin/trino-openlineage } - - { modules: plugin/trino-opensearch } - - { modules: plugin/trino-oracle } - - { modules: plugin/trino-pinot } - - { modules: plugin/trino-postgresql } - - { modules: plugin/trino-redis } - - { modules: plugin/trino-redshift } - - { modules: plugin/trino-redshift, profile: cloud-tests } - - { modules: plugin/trino-redshift, profile: fte-tests } - - { modules: plugin/trino-resource-group-managers } - - { modules: plugin/trino-singlestore } - - { modules: plugin/trino-snowflake } - - { modules: plugin/trino-snowflake, profile: cloud-tests } - - { modules: plugin/trino-sqlserver } - - { modules: plugin/trino-vertica } - - { modules: testing/trino-faulttolerant-tests, profile: default } - - { modules: testing/trino-faulttolerant-tests, profile: test-fault-tolerant-delta } - - { modules: testing/trino-faulttolerant-tests, profile: test-fault-tolerant-hive } - - { modules: testing/trino-faulttolerant-tests, profile: test-fault-tolerant-iceberg } - - { modules: testing/trino-tests } + - { modules: plugin/trino-teradata } + - { modules: plugin/trino-teradata, profile: clearscape-tests } EOF ./.github/bin/build-matrix-from-impacted.py -v -i gib-impacted.log -m .github/test-matrix.yaml -o matrix.json echo "Matrix: $(jq '.' matrix.json)" @@ -534,7 +183,7 @@ jobs: strategy: fail-fast: false matrix: ${{ fromJson(needs.build-test-matrix.outputs.matrix) }} - timeout-minutes: 60 + timeout-minutes: 120 steps: - uses: actions/checkout@v5 with: @@ -566,6 +215,7 @@ jobs: && ! (contains(matrix.modules, 'trino-filesystem-gcs') && contains(matrix.profile, 'cloud-tests')) && ! (contains(matrix.modules, 'trino-filesystem-s3') && contains(matrix.profile, 'cloud-tests')) && ! (contains(matrix.modules, 'trino-hdfs') && contains(matrix.profile, 'cloud-tests')) + && ! (contains(matrix.modules, 'trino-teradata') && contains(matrix.profile, 'clearscape-tests')) run: $MAVEN test ${MAVEN_TEST} -pl ${{ matrix.modules }} ${{ matrix.profile != '' && format('-P {0}', matrix.profile) || '' }} # Additional tests for selected modules - name: HDFS file system cache isolated JVM tests @@ -791,6 +441,15 @@ jobs: # Cancelled workflows may have left the ephemeral cluster running if: always() run: .github/bin/redshift/delete-aws-redshift.sh + - name: Teradata Tests + id: tests-teradata + env: + CLEARSCAPE_TOKEN: ${{ secrets.CLEARSCAPE_TOKEN }} + CLEARSCAPE_PASSWORD: ${{ secrets.CLEARSCAPE_PASSWORD }} + CLEARSCAPE_REGION: ${{ vars.CLEARSCAPE_REGION }} + if: matrix.modules == 'plugin/trino-teradata' && contains(matrix.profile, 'clearscape-tests') && (env.CLEARSCAPE_TOKEN != '' || env.CLEARSCAPE_PASSWORD != '') + run: | + $MAVEN test ${MAVEN_TEST} -pl :trino-teradata -Pclearscape-tests - name: Sanitize artifact name if: always() run: | @@ -834,308 +493,15 @@ jobs: conclusion: ${{ job.status }} github_token: ${{ secrets.GITHUB_TOKEN }} - build-pt: - needs: path-filters - if: github.event_name != 'pull_request' || needs.path-filters.outputs.non_docs == 'true' - runs-on: ubuntu-latest - outputs: - matrix: ${{ steps.set-matrix.outputs.matrix }} - product-tests-changed: ${{ steps.filter.outputs.product-tests }} - steps: - - uses: actions/checkout@v5 - with: - fetch-depth: 0 # checkout all commits to be able to determine merge base for GIB - ref: | - ${{ github.event_name == 'repository_dispatch' && - github.event.client_payload.pull_request.head.sha == github.event.client_payload.slash_command.args.named.sha && - format('refs/pull/{0}/head', github.event.client_payload.pull_request.number) || '' }} - - uses: ./.github/actions/setup - timeout-minutes: 15 - with: - cache: restore - cleanup-node: true - - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3 - id: filter - with: - filters: | - product-tests: - - 'testing/trino-product-tests*/**' - - 'testing/trino-testing-services/**' - # run all tests when there are any changes in the trino-server Maven module - # because it doesn't define it's Trino dependencies and - # it relies on the Provisio plugin to find the right artifacts - - 'core/trino-server/**' - - '.github/**' - - name: Maven Install - run: | - export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - $MAVEN clean install ${MAVEN_FAST_INSTALL} -pl '!:trino-docs' - - name: Map impacted plugins to features - run: | - export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - # build a list of impacted modules, ignoring modules that cannot affect either product tests or Trino - $MAVEN validate ${MAVEN_FAST_INSTALL} ${MAVEN_GIB} -Dgib.logImpactedTo=gib-impacted.log -pl '!:trino-docs,!:trino-tests,!:trino-faulttolerant-tests' - # GIB doesn't run on master, so make sure the file always exist - touch gib-impacted.log - testing/trino-plugin-reader/target/trino-plugin-reader-*-executable.jar -i gib-impacted.log -p core/trino-server/target/trino-server-*-hardlinks/plugin > impacted-features.log - echo "Impacted plugin features:" - cat impacted-features.log - - name: Product tests artifact - uses: actions/upload-artifact@v5 - with: - name: product tests and server tarball - path: | - core/trino-server/target/*.tar.gz - impacted-features.log - testing/trino-product-tests-launcher/target/*.jar - testing/trino-product-tests/target/*-executable.jar - client/trino-cli/target/*-executable.jar - retention-days: 1 - - id: prepare-matrix-template - run: | - cat < .github/test-pt-matrix.yaml - config: - - default - suite: - - suite-1 - - suite-2 - - suite-3 - # suite-4 does not exist - - suite-5 - - suite-6-non-generic - - suite-7-non-generic - - suite-hive-transactional - - suite-azure - - suite-delta-lake-databricks122 - - suite-delta-lake-databricks133 - - suite-delta-lake-databricks143 - - suite-delta-lake-databricks154 - - suite-delta-lake-databricks164 - - suite-exasol - - suite-ranger - - suite-gcs - - suite-hive4 - - suite-clients - - suite-functions - - suite-tpch - - suite-tpcds - - suite-storage-formats-detailed - - suite-parquet - - suite-oauth2 - - suite-ldap - - suite-loki - - suite-compatibility - - suite-all-connectors-smoke - - suite-delta-lake-oss - - suite-kafka - - suite-cassandra - - suite-clickhouse - - suite-mysql - - suite-iceberg - - suite-snowflake - - suite-hudi - - suite-ignite - exclude: - - suite: suite-azure - ignore exclusion if: >- - ${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || - vars.AZURE_ABFS_HIERARCHICAL_CONTAINER != '' || - vars.AZURE_ABFS_HIERARCHICAL_ACCOUNT != '' || - secrets.AZURE_ABFS_HIERARCHICAL_ACCESS_KEY != '' }} - - - suite: suite-gcs - ignore exclusion if: >- - ${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.GCP_CREDENTIALS_KEY != '' }} - - - suite: suite-delta-lake-databricks122 - ignore exclusion if: >- - ${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }} - - suite: suite-delta-lake-databricks133 - ignore exclusion if: >- - ${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }} - - suite: suite-delta-lake-databricks143 - ignore exclusion if: >- - ${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }} - - suite: suite-delta-lake-databricks154 - ignore exclusion if: >- - ${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }} - - suite: suite-delta-lake-databricks164 - ignore exclusion if: >- - ${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }} - - suite: suite-snowflake - ignore exclusion if: >- - ${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.SNOWFLAKE_PASSWORD != '' }} - - ignore exclusion if: - # Do not use this property outside of the matrix configuration. - # - # This is added to all matrix entries so they may be conditionally - # excluded by adding them to the excludes list with a GHA expression - # for this property. - # - If the expression evaluates to true, it will never match the a - # actual value of the property, and will therefore not be excluded. - # - If the expression evaluates to false, it will match the actual - # value of the property, and the exclusion will apply normally. - - "false" - include: - # this suite is designed specifically for apache-hive3. TODO remove the suite once we can run all regular tests on apache-hive3. - - config: apache-hive3 - suite: suite-hms-only - EOF - - name: Build PT matrix (all) - if: | - github.event_name != 'pull_request' || - steps.filter.outputs.product-tests == 'true' || - contains(github.event.pull_request.labels.*.name, 'tests:all') || - contains(github.event.pull_request.labels.*.name, 'tests:all-product') - run: | - # converts entire YAML file into JSON - no filtering since we want all PTs to run - ./.github/bin/build-pt-matrix-from-impacted-connectors.py -v -m .github/test-pt-matrix.yaml -o matrix.json - - name: Build PT matrix (impacted-features) - if: | - github.event_name == 'pull_request' && - steps.filter.outputs.product-tests == 'false' && - !contains(github.event.pull_request.labels.*.name, 'tests:all') && - !contains(github.event.pull_request.labels.*.name, 'product-tests:all') - # all these envs are required to be set by some product test environments - env: - ABFS_CONTAINER: "" - ABFS_ACCOUNT: "" - ABFS_ACCESS_KEY: "" - S3_BUCKET: "" - AWS_REGION: "" - TRINO_AWS_ACCESS_KEY_ID: "" - TRINO_AWS_SECRET_ACCESS_KEY: "" - DATABRICKS_122_JDBC_URL: "" - DATABRICKS_133_JDBC_URL: "" - DATABRICKS_143_JDBC_URL: "" - DATABRICKS_154_JDBC_URL: "" - DATABRICKS_164_JDBC_URL: "" - DATABRICKS_LOGIN: "" - DATABRICKS_TOKEN: "" - GCP_CREDENTIALS_KEY: "" - GCP_STORAGE_BUCKET: "" - SNOWFLAKE_URL: "" - SNOWFLAKE_USER: "" - SNOWFLAKE_PASSWORD: "" - SNOWFLAKE_DATABASE: "" - SNOWFLAKE_ROLE: "" - SNOWFLAKE_WAREHOUSE: "" - TESTCONTAINERS_NEVER_PULL: true - run: | - # converts filtered YAML file into JSON - ./.github/bin/build-pt-matrix-from-impacted-connectors.py -v -m .github/test-pt-matrix.yaml -i impacted-features.log -o matrix.json - - id: set-matrix - run: | - echo "Matrix: $(jq '.' matrix.json)" - echo "matrix=$(cat matrix.json)" >> $GITHUB_OUTPUT - - pt: - runs-on: 'ubuntu-latest' - # explicitly define the name to avoid adding the value of the `ignore exclusion if` matrix item - name: pt (${{ matrix.config }}, ${{ matrix.suite }}, ${{ matrix.jdk }}) - if: needs.build-pt.outputs.matrix != '{}' - strategy: - fail-fast: false - matrix: ${{ fromJson(needs.build-pt.outputs.matrix) }} - # PT Launcher's timeout defaults to 2h, add some margin - timeout-minutes: 130 - needs: build-pt - steps: - - uses: actions/checkout@v5 - with: - fetch-depth: 0 # checkout all commits, as the build result depends on `git describe` equivalent - ref: | - ${{ github.event_name == 'repository_dispatch' && - github.event.client_payload.pull_request.head.sha == github.event.client_payload.slash_command.args.named.sha && - format('refs/pull/{0}/head', github.event.client_payload.pull_request.number) || '' }} - - uses: ./.github/actions/setup - timeout-minutes: 15 - with: - # The job doesn't build anything, so the ~/.m2/repository cache isn't useful - cache: 'false' - - name: Product tests artifact - uses: actions/download-artifact@v6 - with: - name: product tests and server tarball - - name: Fix artifact permissions - run: | - find . -type f -name \*-executable.jar -exec chmod 0777 {} \; - - name: Enable impact analysis - if: | - needs.build-pt.outputs.product-tests-changed == 'false' && - github.event_name == 'pull_request' && - !contains(github.event.pull_request.labels.*.name, 'tests:all') && - !contains(github.event.pull_request.labels.*.name, 'tests:all-product') - run: echo "PTL_OPTS=--impacted-features impacted-features.log" >> $GITHUB_ENV - - name: Product Tests - id: tests - env: - ABFS_CONTAINER: ${{ vars.AZURE_ABFS_HIERARCHICAL_CONTAINER }} - ABFS_ACCOUNT: ${{ vars.AZURE_ABFS_HIERARCHICAL_ACCOUNT }} - ABFS_ACCESS_KEY: ${{ secrets.AZURE_ABFS_HIERARCHICAL_ACCESS_KEY }} - S3_BUCKET: ${{ vars.TRINO_S3_BUCKET }} - AWS_REGION: ${{ vars.TRINO_AWS_REGION }} - TRINO_AWS_ACCESS_KEY_ID: ${{ vars.TRINO_AWS_ACCESS_KEY_ID }} - TRINO_AWS_SECRET_ACCESS_KEY: ${{ secrets.TRINO_AWS_SECRET_ACCESS_KEY }} - DATABRICKS_122_JDBC_URL: ${{ vars.DATABRICKS_122_JDBC_URL }} - DATABRICKS_133_JDBC_URL: ${{ vars.DATABRICKS_133_JDBC_URL }} - DATABRICKS_143_JDBC_URL: ${{ vars.DATABRICKS_143_JDBC_URL }} - DATABRICKS_154_JDBC_URL: ${{ vars.DATABRICKS_154_JDBC_URL }} - DATABRICKS_164_JDBC_URL: ${{ vars.DATABRICKS_164_JDBC_URL }} - DATABRICKS_LOGIN: token - DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} - GCP_CREDENTIALS_KEY: ${{ secrets.GCP_CREDENTIALS_KEY }} - GCP_STORAGE_BUCKET: ${{ vars.GCP_STORAGE_BUCKET }} - SNOWFLAKE_URL: ${{ vars.SNOWFLAKE_URL }} - SNOWFLAKE_USER: ${{ vars.SNOWFLAKE_USER }} - SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD }} - SNOWFLAKE_DATABASE: ${{ vars.SNOWFLAKE_DATABASE }} - SNOWFLAKE_ROLE: ${{ vars.SNOWFLAKE_ROLE }} - SNOWFLAKE_WAREHOUSE: ${{ vars.SNOWFLAKE_WAREHOUSE }} - run: | - exec testing/trino-product-tests-launcher/target/trino-product-tests-launcher-*-executable.jar suite run \ - --suite ${{ matrix.suite }} \ - --config config-${{ matrix.config }} \ - ${PTL_OPTS:-} \ - --bind=off --logs-dir logs/ --timeout 2h - - name: Upload test results - uses: ./.github/actions/process-test-results - if: always() - with: - artifact-name: pt (${{ matrix.config }}, ${{ matrix.suite }}, ${{ matrix.jdk }}) - has-failed-tests: ${{ steps.tests.outcome == 'failure' }} - upload-heap-dump: ${{ env.SECRETS_PRESENT == '' && github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository }} - - name: Update PR check - uses: ./.github/actions/update-check - if: >- - failure() && - github.event_name == 'repository_dispatch' && - github.event.client_payload.slash_command.args.named.sha != '' && - github.event.client_payload.pull_request.head.sha == github.event.client_payload.slash_command.args.named.sha - with: - pull_request_number: ${{ github.event.client_payload.pull_request.number }} - check_name: ${{ github.job }} with secrets - conclusion: ${{ job.status }} - github_token: ${{ secrets.GITHUB_TOKEN }} - build-success: if: ${{ always() }} # if `failure()` would not work for cancellations, `!success()` would not work for skipped jobs runs-on: ubuntu-latest needs: - - artifact-checks - - build-pt - build-test-matrix - - check-commit - - check-commits-dispatcher - error-prone-checks - - hive-tests - maven-checks - path-filters - - pt - test - - test-jdbc-compatibility - - test-other-modules steps: - name: "Check results" run: | diff --git a/client/trino-cli/src/main/java/io/trino/cli/ClientOptions.java b/client/trino-cli/src/main/java/io/trino/cli/ClientOptions.java index f7c10aaba44b..f9c239587c9d 100644 --- a/client/trino-cli/src/main/java/io/trino/cli/ClientOptions.java +++ b/client/trino-cli/src/main/java/io/trino/cli/ClientOptions.java @@ -47,6 +47,7 @@ import static com.google.common.base.Strings.nullToEmpty; import static io.trino.cli.TerminalUtils.getTerminal; import static io.trino.client.KerberosUtil.defaultCredentialCachePath; +import static io.trino.client.ProtocolHeaders.TRINO_HEADERS; import static io.trino.client.uri.PropertyName.ACCESS_TOKEN; import static io.trino.client.uri.PropertyName.CATALOG; import static io.trino.client.uri.PropertyName.CLIENT_INFO; @@ -56,6 +57,7 @@ import static io.trino.client.uri.PropertyName.EXTERNAL_AUTHENTICATION; import static io.trino.client.uri.PropertyName.EXTERNAL_AUTHENTICATION_REDIRECT_HANDLERS; import static io.trino.client.uri.PropertyName.EXTRA_CREDENTIALS; +import static io.trino.client.uri.PropertyName.EXTRA_HEADERS; import static io.trino.client.uri.PropertyName.HTTP_LOGGING_LEVEL; import static io.trino.client.uri.PropertyName.HTTP_PROXY; import static io.trino.client.uri.PropertyName.KERBEROS_CONFIG_PATH; @@ -204,6 +206,10 @@ public class ClientOptions @Option(names = "--client-tags", paramLabel = "", description = "Client tags", converter = ClientTagsConverter.class) public Optional> clientTags; + @PropertyMapping(EXTRA_HEADERS) + @Option(names = "--extra-header", paramLabel = "
", description = "Additional HTTP header to add to HTTP requests (property can be used multiple times; format is key=value)") + public final List extraHeaders = new ArrayList<>(); + @PropertyMapping(TRACE_TOKEN) @Option(names = "--trace-token", paramLabel = "", description = "Trace token") public Optional traceToken; @@ -422,6 +428,9 @@ public TrinoUri getTrinoUri(Map restrictedProperties) if (!sessionProperties.isEmpty()) { builder.setSessionProperties(toProperties(sessionProperties)); } + if (!extraHeaders.isEmpty()) { + builder.setExtraHeaders(toExtraHeaders(extraHeaders)); + } if (!resourceEstimates.isEmpty()) { builder.setResourceEstimates(toResourceEstimates(resourceEstimates)); } @@ -498,6 +507,15 @@ public static URI parseServer(String server) } } + public static Map toExtraHeaders(List extraHeaders) + { + ImmutableMap.Builder builder = ImmutableMap.builder(); + for (ExtraHeader extraHeader : extraHeaders) { + builder.put(extraHeader.getHeader(), extraHeader.getValue()); + } + return builder.buildOrThrow(); + } + private static Map toProperties(List sessionProperties) { ImmutableMap.Builder builder = ImmutableMap.builder(); @@ -605,6 +623,59 @@ public int hashCode() } } + public static final class ExtraHeader + { + private final String header; + private final String value; + + public ExtraHeader(String headerAndValue) + { + List nameValue = NAME_VALUE_SPLITTER.splitToList(headerAndValue); + checkArgument(nameValue.size() == 2, "Header and value: %s", headerAndValue); + this.header = nameValue.get(0); + this.value = nameValue.get(1); + + checkArgument(!TRINO_HEADERS.isProtocolHeader(header), "Header '%s' is a protocol header and cannot be set as an extra header", header); + checkArgument(!header.isEmpty(), "Header name is empty"); + checkArgument(!value.isEmpty(), "Header value is empty"); + } + + public ExtraHeader(String header, String value) + { + this.header = header; + this.value = value; + } + + public String getHeader() + { + return header; + } + + public String getValue() + { + return value; + } + + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ExtraHeader other = (ExtraHeader) o; + return Objects.equals(header, other.header) && Objects.equals(value, other.value); + } + + @Override + public int hashCode() + { + return Objects.hash(header, value); + } + } + public static final class ClientSessionProperty { private static final Splitter NAME_SPLITTER = Splitter.on('.'); diff --git a/client/trino-cli/src/main/java/io/trino/cli/Trino.java b/client/trino-cli/src/main/java/io/trino/cli/Trino.java index d7b07f6fc11c..d31cc764ae4e 100644 --- a/client/trino-cli/src/main/java/io/trino/cli/Trino.java +++ b/client/trino-cli/src/main/java/io/trino/cli/Trino.java @@ -20,6 +20,7 @@ import io.trino.cli.ClientOptions.ClientExtraCredential; import io.trino.cli.ClientOptions.ClientResourceEstimate; import io.trino.cli.ClientOptions.ClientSessionProperty; +import io.trino.cli.ClientOptions.ExtraHeader; import org.jline.utils.AttributedStringBuilder; import org.jline.utils.AttributedStyle; import picocli.CommandLine; @@ -62,6 +63,7 @@ public static CommandLine createCommandLine(Object command) .registerConverter(ClientResourceEstimate.class, ClientResourceEstimate::new) .registerConverter(ClientSessionProperty.class, ClientSessionProperty::new) .registerConverter(ClientExtraCredential.class, ClientExtraCredential::new) + .registerConverter(ExtraHeader.class, ExtraHeader::new) .registerConverter(HostAndPort.class, HostAndPort::fromString) .registerConverter(Duration.class, Duration::valueOf) .setResourceBundle(new TrinoResourceBundle()) diff --git a/client/trino-cli/src/test/java/io/trino/cli/TestClientOptions.java b/client/trino-cli/src/test/java/io/trino/cli/TestClientOptions.java index 6ebeaf2c91a7..ce6d21c95f7b 100644 --- a/client/trino-cli/src/test/java/io/trino/cli/TestClientOptions.java +++ b/client/trino-cli/src/test/java/io/trino/cli/TestClientOptions.java @@ -246,6 +246,20 @@ public void testExtraCredentials() new ClientOptions.ClientExtraCredential("test.token.bar", "bar"))); } + @Test + public void testExtraHeaders() + { + Console console = createConsole("--extra-header", "X-Trino-Routing-Group=foo", "--extra-header", "x-foo=bar"); + ClientOptions options = console.clientOptions; + assertThat(options.extraHeaders).isEqualTo(ImmutableList.of( + new ClientOptions.ExtraHeader("X-Trino-Routing-Group", "foo"), + new ClientOptions.ExtraHeader("x-foo", "bar"))); + + assertThatThrownBy(() -> createConsole("--extra-header", "X-Trino-User=Forbidden")) + .hasCauseInstanceOf(IllegalArgumentException.class) + .hasMessageContaining("Header 'X-Trino-User' is a protocol header and cannot be set as an extra header"); + } + @Test public void testSessionProperties() { diff --git a/client/trino-client/src/main/java/io/trino/client/ClientSession.java b/client/trino-client/src/main/java/io/trino/client/ClientSession.java index b768934700ef..e4e4f75b5bed 100644 --- a/client/trino-client/src/main/java/io/trino/client/ClientSession.java +++ b/client/trino-client/src/main/java/io/trino/client/ClientSession.java @@ -51,6 +51,7 @@ public class ClientSession private final ZoneId timeZone; private final Locale locale; private final Map resourceEstimates; + private final Map extraHeaders; private final Map properties; private final Map preparedStatements; private final Map roles; @@ -87,6 +88,7 @@ private ClientSession( String source, Optional traceToken, Set clientTags, + Map extraHeaders, String clientInfo, Optional catalog, Optional schema, @@ -112,6 +114,7 @@ private ClientSession( this.source = requireNonNull(source, "source is null"); this.traceToken = requireNonNull(traceToken, "traceToken is null"); this.clientTags = ImmutableSet.copyOf(requireNonNull(clientTags, "clientTags is null")); + this.extraHeaders = ImmutableMap.copyOf(requireNonNull(extraHeaders, "extraHeaders is null")); this.clientInfo = clientInfo; this.catalog = catalog; this.schema = schema; @@ -198,6 +201,11 @@ public Set getClientTags() return clientTags; } + public Map getExtraHeaders() + { + return extraHeaders; + } + public String getClientInfo() { return clientInfo; @@ -295,6 +303,7 @@ public String toString() .add("sessionUser", sessionUser) .add("authorizationUser", authorizationUser) .add("clientTags", clientTags) + .add("extraHeaders", extraHeaders) .add("clientInfo", clientInfo) .add("catalog", catalog) .add("schema", schema) @@ -324,6 +333,7 @@ public static final class Builder private String source; private Optional traceToken = Optional.empty(); private Set clientTags = ImmutableSet.of(); + private Map extraHeaders = ImmutableMap.of(); private String clientInfo; private String catalog; private String schema; @@ -354,6 +364,7 @@ private Builder(ClientSession clientSession) source = clientSession.getSource(); traceToken = clientSession.getTraceToken(); clientTags = clientSession.getClientTags(); + extraHeaders = clientSession.getExtraHeaders(); clientInfo = clientSession.getClientInfo(); catalog = clientSession.getCatalog().orElse(null); schema = clientSession.getSchema().orElse(null); @@ -419,6 +430,12 @@ public Builder clientTags(Set clientTags) return this; } + public Builder extraHeaders(Map extraHeaders) + { + this.extraHeaders = extraHeaders; + return this; + } + public Builder clientInfo(String clientInfo) { this.clientInfo = clientInfo; @@ -526,6 +543,7 @@ public ClientSession build() source, traceToken, clientTags, + extraHeaders, clientInfo, Optional.ofNullable(catalog), Optional.ofNullable(schema), diff --git a/client/trino-client/src/main/java/io/trino/client/OkHttpUtil.java b/client/trino-client/src/main/java/io/trino/client/OkHttpUtil.java index 0553e1217f3f..e897ad143b1f 100644 --- a/client/trino-client/src/main/java/io/trino/client/OkHttpUtil.java +++ b/client/trino-client/src/main/java/io/trino/client/OkHttpUtil.java @@ -55,6 +55,7 @@ import java.security.cert.X509Certificate; import java.util.Arrays; import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.concurrent.TimeUnit; @@ -109,6 +110,17 @@ public static Interceptor tokenAuth(String accessToken) .build()); } + public static Interceptor extraHeaders(Map extraHeaders) + { + requireNonNull(extraHeaders, "extraHeaders is null"); + + return chain -> { + okhttp3.Request.Builder builder = chain.request().newBuilder(); + extraHeaders.forEach(builder::addHeader); + return chain.proceed(builder.build()); + }; + } + public static void setupTimeouts(OkHttpClient.Builder clientBuilder, int timeout, TimeUnit unit) { clientBuilder diff --git a/client/trino-client/src/main/java/io/trino/client/ProtocolHeaders.java b/client/trino-client/src/main/java/io/trino/client/ProtocolHeaders.java index 015ea8953981..01003bd0919c 100644 --- a/client/trino-client/src/main/java/io/trino/client/ProtocolHeaders.java +++ b/client/trino-client/src/main/java/io/trino/client/ProtocolHeaders.java @@ -194,6 +194,16 @@ private ProtocolHeaders(String name) responseOriginalRole = RESPONSE_SET_ORIGINAL_ROLES.withProtocolName(name); } + public boolean isProtocolHeader(String headerName) + { + for (Headers header : Headers.values()) { + if (header.withProtocolName(name).equalsIgnoreCase(headerName)) { + return true; + } + } + return false; + } + public String getProtocolName() { return name; diff --git a/client/trino-client/src/main/java/io/trino/client/uri/ConnectionProperties.java b/client/trino-client/src/main/java/io/trino/client/uri/ConnectionProperties.java index 4da7f640c16d..efc2baad33be 100644 --- a/client/trino-client/src/main/java/io/trino/client/uri/ConnectionProperties.java +++ b/client/trino-client/src/main/java/io/trino/client/uri/ConnectionProperties.java @@ -16,6 +16,7 @@ import com.google.common.base.CharMatcher; import com.google.common.base.Joiner; import com.google.common.base.Splitter; +import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.net.HostAndPort; import io.airlift.units.Duration; @@ -44,6 +45,7 @@ import static com.google.common.collect.Streams.stream; import static io.trino.client.ClientSelectedRole.Type.ALL; import static io.trino.client.ClientSelectedRole.Type.NONE; +import static io.trino.client.ProtocolHeaders.TRINO_HEADERS; import static io.trino.client.uri.AbstractConnectionProperty.Validator; import static io.trino.client.uri.AbstractConnectionProperty.validator; import static io.trino.client.uri.ConnectionProperties.SslVerificationMode.FULL; @@ -99,6 +101,7 @@ enum SslVerificationMode public static final ConnectionProperty> EXTRA_CREDENTIALS = new ExtraCredentials(); public static final ConnectionProperty CLIENT_INFO = new ClientInfo(); public static final ConnectionProperty> CLIENT_TAGS = new ClientTags(); + public static final ConnectionProperty> EXTRA_HEADERS = new ExtraHeaders(); public static final ConnectionProperty TRACE_TOKEN = new TraceToken(); public static final ConnectionProperty> SESSION_PROPERTIES = new SessionProperties(); public static final ConnectionProperty SOURCE = new Source(); @@ -139,6 +142,7 @@ enum SslVerificationMode .add(EXTERNAL_AUTHENTICATION_TIMEOUT) .add(EXTERNAL_AUTHENTICATION_TOKEN_CACHE) .add(EXTRA_CREDENTIALS) + .add(EXTRA_HEADERS) .add(HOSTNAME_IN_CERTIFICATE) .add(HTTP_LOGGING_LEVEL) .add(HTTP_PROXY) @@ -789,6 +793,42 @@ public static String toString(Map values) } } + private static class ExtraHeaders + extends AbstractConnectionProperty> + { + private static final Validator VALIDATE_EXTRA_HEADER = validator( + ExtraHeaders::isNotReservedHeader, + format("Connection property %s cannot override any of the Trino protocol headers", PropertyName.EXTRA_HEADERS)); + + public ExtraHeaders() + { + super(PropertyName.EXTRA_HEADERS, NOT_REQUIRED, VALIDATE_EXTRA_HEADER, converter(ExtraHeaders::parseExtraHeaders, ExtraHeaders::toString)); + } + + // Extra headers consists of a list of header name value pairs. + // E.g., `jdbc:trino://example.net:8080/?extraHeaders=X-Trino-Route:foo;X-Trino-Custom:bar` will send + // HTTP headers `X-Trino-Route=foo` and `X-Trino-Custom=bar`. + // These headers must not conflict with Trino protocol headers. + public static Map parseExtraHeaders(String extraHeadersString) + { + return new MapPropertyParser(PropertyName.EXTRA_HEADERS.toString()).parse(extraHeadersString); + } + + public static String toString(Map values) + { + return values.entrySet().stream() + .map(entry -> entry.getKey() + ":" + entry.getValue()) + .collect(Collectors.joining(";")); + } + + private static boolean isNotReservedHeader(Properties properties) + { + Map extraHeaders = EXTRA_HEADERS.getValueOrDefault(properties, ImmutableMap.of()); + return extraHeaders.keySet().stream() + .noneMatch(TRINO_HEADERS::isProtocolHeader); + } + } + private static class SessionProperties extends AbstractConnectionProperty> { diff --git a/client/trino-client/src/main/java/io/trino/client/uri/HttpClientFactory.java b/client/trino-client/src/main/java/io/trino/client/uri/HttpClientFactory.java index b882792c146d..05b62ee23371 100644 --- a/client/trino-client/src/main/java/io/trino/client/uri/HttpClientFactory.java +++ b/client/trino-client/src/main/java/io/trino/client/uri/HttpClientFactory.java @@ -30,6 +30,7 @@ import static io.trino.client.KerberosUtil.defaultCredentialCachePath; import static io.trino.client.OkHttpUtil.basicAuth; +import static io.trino.client.OkHttpUtil.extraHeaders; import static io.trino.client.OkHttpUtil.setupAlternateHostnameVerification; import static io.trino.client.OkHttpUtil.setupCookieJar; import static io.trino.client.OkHttpUtil.setupHttpLogging; @@ -87,6 +88,10 @@ public static OkHttpClient.Builder toHttpClientBuilder(TrinoUri uri, String user builder.addNetworkInterceptor(tokenAuth(uri.getAccessToken().get())); } + if (!uri.getExtraHeaders().isEmpty()) { + builder.addNetworkInterceptor(extraHeaders(uri.getExtraHeaders())); + } + if (uri.isExternalAuthenticationEnabled()) { if (!uri.isUseSecureConnection()) { throw new RuntimeException("TLS/SSL required for authentication using external authorization"); diff --git a/client/trino-client/src/main/java/io/trino/client/uri/PropertyName.java b/client/trino-client/src/main/java/io/trino/client/uri/PropertyName.java index 1a7de8011bb3..ddf43e596b87 100644 --- a/client/trino-client/src/main/java/io/trino/client/uri/PropertyName.java +++ b/client/trino-client/src/main/java/io/trino/client/uri/PropertyName.java @@ -31,6 +31,7 @@ public enum PropertyName CLIENT_INFO("clientInfo"), CLIENT_TAGS("clientTags"), DISABLE_COMPRESSION("disableCompression"), + EXTRA_HEADERS("extraHeaders"), DNS_RESOLVER("dnsResolver"), DNS_RESOLVER_CONTEXT("dnsResolverContext"), ENCODING("encoding"), diff --git a/client/trino-client/src/main/java/io/trino/client/uri/TrinoUri.java b/client/trino-client/src/main/java/io/trino/client/uri/TrinoUri.java index 15e4e9b3f0b5..40c782c2d42b 100644 --- a/client/trino-client/src/main/java/io/trino/client/uri/TrinoUri.java +++ b/client/trino-client/src/main/java/io/trino/client/uri/TrinoUri.java @@ -61,6 +61,7 @@ import static io.trino.client.uri.ConnectionProperties.EXTERNAL_AUTHENTICATION_TIMEOUT; import static io.trino.client.uri.ConnectionProperties.EXTERNAL_AUTHENTICATION_TOKEN_CACHE; import static io.trino.client.uri.ConnectionProperties.EXTRA_CREDENTIALS; +import static io.trino.client.uri.ConnectionProperties.EXTRA_HEADERS; import static io.trino.client.uri.ConnectionProperties.HOSTNAME_IN_CERTIFICATE; import static io.trino.client.uri.ConnectionProperties.HTTP_LOGGING_LEVEL; import static io.trino.client.uri.ConnectionProperties.HTTP_PROXY; @@ -244,6 +245,11 @@ public Map getSessionProperties() return resolveWithDefault(SESSION_PROPERTIES, ImmutableMap.of()); } + public Map getExtraHeaders() + { + return resolveWithDefault(EXTRA_HEADERS, ImmutableMap.of()); + } + public Optional getSource() { return resolveOptional(SOURCE); @@ -521,6 +527,7 @@ public ClientSession.Builder toClientSessionBuilder() .timeZone(getTimeZone()) .locale(getLocale()) .properties(getSessionProperties()) + .extraHeaders(getExtraHeaders()) .credentials(getExtraCredentials()) .transactionId(null) .resourceEstimates(getResourceEstimates()) @@ -1009,6 +1016,11 @@ public Builder setSessionProperties(Map sessionProperties) return setProperty(SESSION_PROPERTIES, requireNonNull(sessionProperties, "sessionProperties is null")); } + public Builder setExtraHeaders(Map extraHeaders) + { + return setProperty(EXTRA_HEADERS, requireNonNull(extraHeaders, "extraHeaders is null")); + } + public Builder setSource(String source) { return setProperty(SOURCE, requireNonNull(source, "source is null")); diff --git a/core/trino-main/src/main/java/io/trino/dispatcher/FailedDispatchQuery.java b/core/trino-main/src/main/java/io/trino/dispatcher/FailedDispatchQuery.java index e0b78689f27c..3591bb365b16 100644 --- a/core/trino-main/src/main/java/io/trino/dispatcher/FailedDispatchQuery.java +++ b/core/trino-main/src/main/java/io/trino/dispatcher/FailedDispatchQuery.java @@ -249,6 +249,7 @@ private static QueryInfo immediateFailureQueryInfo( ImmutableList.of(), ImmutableSet.of(), Optional.empty(), + Optional.empty(), ImmutableList.of(), ImmutableList.of(), true, diff --git a/core/trino-main/src/main/java/io/trino/event/QueryMonitor.java b/core/trino-main/src/main/java/io/trino/event/QueryMonitor.java index 20b71e3e13c8..6af43c61e778 100644 --- a/core/trino-main/src/main/java/io/trino/event/QueryMonitor.java +++ b/core/trino-main/src/main/java/io/trino/event/QueryMonitor.java @@ -250,6 +250,7 @@ public void queryImmediateFailureEvent(BasicQueryInfo queryInfo, ExecutionFailur queryInfo.getQueryType(), queryInfo.getRetryPolicy()), new QueryIOMetadata(ImmutableList.of(), Optional.empty()), + Optional.empty(), createQueryFailureInfo(failure, Optional.empty()), ImmutableList.of(), queryInfo.getQueryStats().getCreateTime(), @@ -276,6 +277,7 @@ public void queryCompletedEvent(QueryInfo queryInfo) queryInfo.getQueryType(), queryInfo.getRetryPolicy()), getQueryIOMetadata(queryInfo), + queryInfo.getSelectColumnsLineageInfo(), createQueryFailureInfo(queryInfo.getFailureInfo(), queryInfo.getStages()), queryInfo.getWarnings(), queryStats.getCreateTime(), diff --git a/core/trino-main/src/main/java/io/trino/exchange/ExchangeManagerConfig.java b/core/trino-main/src/main/java/io/trino/exchange/ExchangeManagerConfig.java new file mode 100644 index 000000000000..b8b13f60afd5 --- /dev/null +++ b/core/trino-main/src/main/java/io/trino/exchange/ExchangeManagerConfig.java @@ -0,0 +1,37 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.exchange; + +import io.airlift.configuration.Config; +import io.airlift.configuration.validation.FileExists; + +import java.io.File; +import java.util.Optional; + +public class ExchangeManagerConfig +{ + private Optional exchangeManagerConfigFile = Optional.empty(); + + public Optional<@FileExists File> getExchangeManagerConfigFile() + { + return exchangeManagerConfigFile; + } + + @Config("exchange-manager.config-file") + public ExchangeManagerConfig setExchangeManagerConfigFile(File exchangeManagerConfigFile) + { + this.exchangeManagerConfigFile = Optional.ofNullable(exchangeManagerConfigFile); + return this; + } +} diff --git a/core/trino-main/src/main/java/io/trino/exchange/ExchangeManagerModule.java b/core/trino-main/src/main/java/io/trino/exchange/ExchangeManagerModule.java index e05fe95d2de9..79bcdd537e83 100644 --- a/core/trino-main/src/main/java/io/trino/exchange/ExchangeManagerModule.java +++ b/core/trino-main/src/main/java/io/trino/exchange/ExchangeManagerModule.java @@ -17,12 +17,15 @@ import com.google.inject.Module; import com.google.inject.Scopes; +import static io.airlift.configuration.ConfigBinder.configBinder; + public class ExchangeManagerModule implements Module { @Override public void configure(Binder binder) { + configBinder(binder).bindConfig(ExchangeManagerConfig.class); binder.bind(ExchangeManagerRegistry.class).in(Scopes.SINGLETON); } } diff --git a/core/trino-main/src/main/java/io/trino/exchange/ExchangeManagerRegistry.java b/core/trino-main/src/main/java/io/trino/exchange/ExchangeManagerRegistry.java index ed66b2149930..241978fe74cc 100644 --- a/core/trino-main/src/main/java/io/trino/exchange/ExchangeManagerRegistry.java +++ b/core/trino-main/src/main/java/io/trino/exchange/ExchangeManagerRegistry.java @@ -29,6 +29,7 @@ import java.io.UncheckedIOException; import java.util.HashMap; import java.util.Map; +import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; import static com.google.common.base.Preconditions.checkArgument; @@ -52,16 +53,19 @@ public class ExchangeManagerRegistry private volatile ExchangeManager exchangeManager; private final SecretsResolver secretsResolver; + private final Optional configFile; @Inject public ExchangeManagerRegistry( OpenTelemetry openTelemetry, Tracer tracer, - SecretsResolver secretsResolver) + SecretsResolver secretsResolver, + ExchangeManagerConfig config) { this.openTelemetry = requireNonNull(openTelemetry, "openTelemetry is null"); this.tracer = requireNonNull(tracer, "tracer is null"); this.secretsResolver = requireNonNull(secretsResolver, "secretsResolver is null"); + this.configFile = config.getExchangeManagerConfigFile(); } public void addExchangeManagerFactory(ExchangeManagerFactory factory) @@ -74,13 +78,14 @@ public void addExchangeManagerFactory(ExchangeManagerFactory factory) public void loadExchangeManager() { - if (!CONFIG_FILE.exists()) { + File configFile = this.configFile.orElse(CONFIG_FILE); + if (!configFile.exists()) { return; } - Map properties = loadProperties(CONFIG_FILE); + Map properties = loadProperties(configFile); String name = properties.remove(EXCHANGE_MANAGER_NAME_PROPERTY); - checkArgument(!isNullOrEmpty(name), "Exchange manager configuration %s does not contain %s", CONFIG_FILE, EXCHANGE_MANAGER_NAME_PROPERTY); + checkArgument(!isNullOrEmpty(name), "Exchange manager configuration %s does not contain %s", configFile, EXCHANGE_MANAGER_NAME_PROPERTY); loadExchangeManager(name, properties); } diff --git a/core/trino-main/src/main/java/io/trino/execution/QueryInfo.java b/core/trino-main/src/main/java/io/trino/execution/QueryInfo.java index efd508c6582b..eef3dbecae65 100644 --- a/core/trino-main/src/main/java/io/trino/execution/QueryInfo.java +++ b/core/trino-main/src/main/java/io/trino/execution/QueryInfo.java @@ -26,6 +26,7 @@ import io.trino.spi.ErrorType; import io.trino.spi.QueryId; import io.trino.spi.TrinoWarning; +import io.trino.spi.eventlistener.ColumnLineageInfo; import io.trino.spi.eventlistener.RoutineInfo; import io.trino.spi.eventlistener.TableInfo; import io.trino.spi.resourcegroups.QueryType; @@ -80,6 +81,7 @@ public class QueryInfo private final List warnings; private final Set inputs; private final Optional output; + private final Optional> selectColumnsLineageInfo; private final boolean finalQueryInfo; private final Optional resourceGroupId; private final Optional queryType; @@ -117,6 +119,7 @@ public QueryInfo( @JsonProperty("warnings") List warnings, @JsonProperty("inputs") Set inputs, @JsonProperty("output") Optional output, + @JsonProperty("selectColumnsLineageInfo") Optional> selectColumnsLineageInfo, @JsonProperty("referencedTables") List referencedTables, @JsonProperty("routines") List routines, @JsonProperty("finalQueryInfo") boolean finalQueryInfo, @@ -154,6 +157,7 @@ public QueryInfo( requireNonNull(queryType, "queryType is null"); requireNonNull(retryPolicy, "retryPolicy is null"); requireNonNull(version, "version is null"); + requireNonNull(selectColumnsLineageInfo, "selectColumnsLineageInfo is null"); this.queryId = queryId; this.session = session; @@ -193,6 +197,13 @@ public QueryInfo( this.retryPolicy = retryPolicy; this.pruned = pruned; this.version = version; + this.selectColumnsLineageInfo = selectColumnsLineageInfo.map(ImmutableList::copyOf); + } + + @JsonProperty + public Optional> getSelectColumnsLineageInfo() + { + return selectColumnsLineageInfo; } @JsonProperty diff --git a/core/trino-main/src/main/java/io/trino/execution/QueryStateMachine.java b/core/trino-main/src/main/java/io/trino/execution/QueryStateMachine.java index 634f4c39de92..ac1229151baf 100644 --- a/core/trino-main/src/main/java/io/trino/execution/QueryStateMachine.java +++ b/core/trino-main/src/main/java/io/trino/execution/QueryStateMachine.java @@ -49,6 +49,7 @@ import io.trino.spi.QueryId; import io.trino.spi.TrinoException; import io.trino.spi.TrinoWarning; +import io.trino.spi.eventlistener.ColumnLineageInfo; import io.trino.spi.eventlistener.RoutineInfo; import io.trino.spi.eventlistener.StageGcStatistics; import io.trino.spi.eventlistener.TableInfo; @@ -111,6 +112,7 @@ import static io.trino.operator.RetryPolicy.TASK; import static io.trino.server.DynamicFilterService.DynamicFiltersStats; import static io.trino.spi.StandardErrorCode.NOT_FOUND; +import static io.trino.spi.StandardErrorCode.TRANSACTION_ALREADY_ABORTED; import static io.trino.spi.StandardErrorCode.USER_CANCELED; import static io.trino.spi.connector.StandardWarningCode.SPOOLING_NOT_SUPPORTED; import static io.trino.spi.resourcegroups.QueryType.SELECT; @@ -182,6 +184,7 @@ public class QueryStateMachine private final AtomicReference> inputs = new AtomicReference<>(ImmutableSet.of()); private final AtomicReference> output = new AtomicReference<>(Optional.empty()); + private final AtomicReference>> selectColumnsLineageInfo = new AtomicReference<>(Optional.empty()); private final AtomicReference> referencedTables = new AtomicReference<>(ImmutableList.of()); private final AtomicReference> routines = new AtomicReference<>(ImmutableList.of()); private final AtomicReference> catalogMetadataMetrics = new AtomicReference<>(ImmutableMap.of()); @@ -417,7 +420,9 @@ private void collectCatalogMetadataMetrics() // the transaction can be committed or aborted concurrently, after the check is done. } catch (RuntimeException e) { - QUERY_STATE_LOG.error(e, "Error collecting query catalog metadata metrics: %s", queryId); + if (!(e instanceof TrinoException trinoException && TRANSACTION_ALREADY_ABORTED.toErrorCode().equals(trinoException.getErrorCode()))) { + QUERY_STATE_LOG.error(e, "Error collecting query catalog metadata metrics: %s", queryId); + } } } @@ -693,6 +698,7 @@ QueryInfo getQueryInfo(Optional stages) warningCollector.getWarnings(), inputs.get(), output.get(), + selectColumnsLineageInfo.get(), referencedTables.get(), routines.get(), finalInfo, @@ -1029,6 +1035,12 @@ public void setOutput(Optional output) this.output.set(output); } + public void setSelectColumnsLineageInfo(Optional> selectOutputColumnsLineage) + { + requireNonNull(selectOutputColumnsLineage, "selectOutputColumnsLineage is null"); + this.selectColumnsLineageInfo.set(selectOutputColumnsLineage); + } + public void setReferencedTables(List tables) { requireNonNull(tables, "tables is null"); @@ -1499,6 +1511,7 @@ public static QueryInfo pruneQueryInfo(QueryInfo queryInfo, NodeVersion version) queryInfo.getWarnings(), queryInfo.getInputs(), queryInfo.getOutput(), + Optional.empty(), queryInfo.getReferencedTables(), queryInfo.getRoutines(), queryInfo.isFinalQueryInfo(), diff --git a/core/trino-main/src/main/java/io/trino/execution/SqlQueryExecution.java b/core/trino-main/src/main/java/io/trino/execution/SqlQueryExecution.java index 223b69c364c1..ff7106d19062 100644 --- a/core/trino-main/src/main/java/io/trino/execution/SqlQueryExecution.java +++ b/core/trino-main/src/main/java/io/trino/execution/SqlQueryExecution.java @@ -289,6 +289,7 @@ private static Analysis analyze( stateMachine.setUpdateType(analysis.getUpdateType()); stateMachine.setReferencedTables(analysis.getReferencedTables()); stateMachine.setRoutines(analysis.getRoutines()); + stateMachine.setSelectColumnsLineageInfo(analysis.getSelectColumnsLineageInfo()); stateMachine.endAnalysis(); diff --git a/core/trino-main/src/main/java/io/trino/operator/MarkDistinctHash.java b/core/trino-main/src/main/java/io/trino/operator/MarkDistinctHash.java index 2ea8a11cddc2..bcf96d10ae16 100644 --- a/core/trino-main/src/main/java/io/trino/operator/MarkDistinctHash.java +++ b/core/trino-main/src/main/java/io/trino/operator/MarkDistinctHash.java @@ -42,6 +42,11 @@ private MarkDistinctHash(MarkDistinctHash other) nextDistinctId = other.nextDistinctId; } + public long getGroupCount() + { + return groupByHash.getGroupCount(); + } + public long getEstimatedSize() { return groupByHash.getEstimatedSize(); diff --git a/core/trino-main/src/main/java/io/trino/operator/aggregation/ApproximateLongPercentileAggregations.java b/core/trino-main/src/main/java/io/trino/operator/aggregation/ApproximateLongPercentileAggregations.java index f42ece1598b1..8a44bb37e499 100644 --- a/core/trino-main/src/main/java/io/trino/operator/aggregation/ApproximateLongPercentileAggregations.java +++ b/core/trino-main/src/main/java/io/trino/operator/aggregation/ApproximateLongPercentileAggregations.java @@ -70,7 +70,7 @@ public static void output(@AggregationState TDigestAndPercentileState state, Blo public static double toDoubleExact(long value) { double doubleValue = (double) value; - checkCondition((long) doubleValue == value, INVALID_FUNCTION_ARGUMENT, () -> String.format("no exact double representation for long: %s", value)); + checkCondition((long) doubleValue == value, INVALID_FUNCTION_ARGUMENT, "no exact double representation for long: %s", value); return doubleValue; } } diff --git a/core/trino-main/src/main/java/io/trino/operator/aggregation/DistinctWindowAccumulator.java b/core/trino-main/src/main/java/io/trino/operator/aggregation/DistinctWindowAccumulator.java index 59bf70c1d3c5..2a8567a2196f 100644 --- a/core/trino-main/src/main/java/io/trino/operator/aggregation/DistinctWindowAccumulator.java +++ b/core/trino-main/src/main/java/io/trino/operator/aggregation/DistinctWindowAccumulator.java @@ -26,7 +26,6 @@ import io.trino.spi.block.Block; import io.trino.spi.block.BlockBuilder; import io.trino.spi.block.RunLengthEncodedBlock; -import io.trino.spi.block.ValueBlock; import io.trino.spi.function.WindowAccumulator; import io.trino.spi.function.WindowIndex; import io.trino.spi.type.Type; @@ -36,6 +35,7 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static io.trino.spi.type.BooleanType.BOOLEAN; +import static java.lang.Math.toIntExact; import static java.util.Objects.requireNonNull; public class DistinctWindowAccumulator @@ -117,8 +117,7 @@ public void addInput(WindowIndex index, int startPosition, int endPosition) pageBuilder.reset(); } for (int channel = 0; channel < argumentChannels.size(); channel++) { - ValueBlock value = index.getSingleValueBlock(channel, position).getSingleValueBlock(0); - pageBuilder.getBlockBuilder(channel).append(value, 0); + index.appendTo(channel, position, pageBuilder.getBlockBuilder(channel)); } pageBuilder.declarePosition(); } @@ -126,39 +125,46 @@ public void addInput(WindowIndex index, int startPosition, int endPosition) private void indexCurrentPage(Page page) { + long initialGroupCount = hash.getGroupCount(); Work work = hash.markDistinctRows(page); checkState(work.process()); Block distinctMask = work.getResult(); int positionCount = distinctMask.getPositionCount(); checkArgument(positionCount == page.getPositionCount(), "Page position count does not match distinct mask position count"); - PagesIndex pagesIndex = pagesIndexFactory.newPagesIndex(argumentTypes, positionCount); + + int distinctPositions = toIntExact(hash.getGroupCount() - initialGroupCount); + if (distinctPositions == 0) { + return; + } + PagesIndex pagesIndex = pagesIndexFactory.newPagesIndex(argumentTypes, distinctPositions); if (distinctMask instanceof RunLengthEncodedBlock) { - if (test(distinctMask, 0)) { - // all positions selected - pagesIndex.addPage(page); - } + // all positions selected + checkState(test(distinctMask, 0), "all positions must be distinct"); + pagesIndex.addPage(page); } else { - PageBuilder filteredPageBuilder = new PageBuilder(argumentTypes); + int[] selectedPositions = new int[distinctPositions]; + int selectedIndex = 0; for (int position = 0; position < positionCount; position++) { - if (!test(distinctMask, position)) { - continue; + if (test(distinctMask, position)) { + selectedPositions[selectedIndex++] = position; } - for (int channel = 0; channel < argumentChannels.size(); channel++) { - Block block = page.getBlock(channel); - filteredPageBuilder.getBlockBuilder(channel).append(block.getUnderlyingValueBlock(), block.getUnderlyingValuePosition(position)); - } - filteredPageBuilder.declarePosition(); } - pagesIndex.addPage(filteredPageBuilder.build()); + checkState(selectedIndex == selectedPositions.length, "Invalid positions in distinct mask"); + + Block[] filteredBlocks = new Block[argumentChannels.size()]; + for (int channel = 0; channel < argumentChannels.size(); channel++) { + filteredBlocks[channel] = page.getBlock(channel).copyPositions(selectedPositions, 0, selectedPositions.length); + } + pagesIndex.addPage(new Page(selectedPositions.length, filteredBlocks)); } int selectedPositionsCount = pagesIndex.getPositionCount(); - if (selectedPositionsCount > 0) { - PagesWindowIndex selectedWindowIndex = new PagesWindowIndex(pagesIndex, 0, selectedPositionsCount); - delegate.addInput(selectedWindowIndex, 0, selectedPositionsCount - 1); - } + checkState(selectedPositionsCount == distinctPositions, "unexpected pagesIndex positions: %s <> %s", selectedPositionsCount, distinctPositions); + + PagesWindowIndex selectedWindowIndex = new PagesWindowIndex(pagesIndex, 0, selectedPositionsCount); + delegate.addInput(selectedWindowIndex, 0, selectedPositionsCount - 1); } private static boolean test(Block block, int position) diff --git a/core/trino-main/src/main/java/io/trino/operator/aggregation/OrderedWindowAccumulator.java b/core/trino-main/src/main/java/io/trino/operator/aggregation/OrderedWindowAccumulator.java index 6c5ceb7ebe80..591ff0f0f65c 100644 --- a/core/trino-main/src/main/java/io/trino/operator/aggregation/OrderedWindowAccumulator.java +++ b/core/trino-main/src/main/java/io/trino/operator/aggregation/OrderedWindowAccumulator.java @@ -19,7 +19,6 @@ import io.trino.operator.window.PagesWindowIndex; import io.trino.spi.PageBuilder; import io.trino.spi.block.BlockBuilder; -import io.trino.spi.block.ValueBlock; import io.trino.spi.connector.SortOrder; import io.trino.spi.function.WindowAccumulator; import io.trino.spi.function.WindowIndex; @@ -112,8 +111,7 @@ public void addInput(WindowIndex index, int startPosition, int endPosition) indexCurrentPage(); } for (int channel = 0; channel < argumentTypes.size(); channel++) { - ValueBlock value = index.getSingleValueBlock(channel, position).getSingleValueBlock(0); - pageBuilder.getBlockBuilder(channel).append(value, 0); + index.appendTo(channel, position, pageBuilder.getBlockBuilder(channel)); } pageBuilder.declarePosition(); } diff --git a/core/trino-main/src/main/java/io/trino/operator/scalar/ArrayCombinationsFunction.java b/core/trino-main/src/main/java/io/trino/operator/scalar/ArrayCombinationsFunction.java index 6ea939faabe5..689447160644 100644 --- a/core/trino-main/src/main/java/io/trino/operator/scalar/ArrayCombinationsFunction.java +++ b/core/trino-main/src/main/java/io/trino/operator/scalar/ArrayCombinationsFunction.java @@ -56,8 +56,8 @@ public static Block combinations( { int arrayLength = array.getPositionCount(); int combinationLength = toIntExact(n); - checkCondition(combinationLength >= 0, INVALID_FUNCTION_ARGUMENT, () -> String.format("combination size must not be negative: %s", combinationLength)); - checkCondition(combinationLength <= MAX_COMBINATION_LENGTH, INVALID_FUNCTION_ARGUMENT, () -> String.format("combination size must not exceed %s: %s", MAX_COMBINATION_LENGTH, combinationLength)); + checkCondition(combinationLength >= 0, INVALID_FUNCTION_ARGUMENT, "combination size must not be negative: %s", combinationLength); + checkCondition(combinationLength <= MAX_COMBINATION_LENGTH, INVALID_FUNCTION_ARGUMENT, "combination size must not exceed %s: %s", MAX_COMBINATION_LENGTH, combinationLength); ArrayType arrayType = new ArrayType(elementType); if (combinationLength > arrayLength) { diff --git a/core/trino-main/src/main/java/io/trino/operator/scalar/ArrayExceptFunction.java b/core/trino-main/src/main/java/io/trino/operator/scalar/ArrayExceptFunction.java index a54b06396e60..5e21e0028046 100644 --- a/core/trino-main/src/main/java/io/trino/operator/scalar/ArrayExceptFunction.java +++ b/core/trino-main/src/main/java/io/trino/operator/scalar/ArrayExceptFunction.java @@ -14,8 +14,6 @@ package io.trino.operator.scalar; import io.trino.spi.block.Block; -import io.trino.spi.block.BlockBuilder; -import io.trino.spi.block.ValueBlock; import io.trino.spi.function.Convention; import io.trino.spi.function.Description; import io.trino.spi.function.OperatorDependency; @@ -25,11 +23,13 @@ import io.trino.spi.type.Type; import io.trino.type.BlockTypeOperators.BlockPositionHashCode; import io.trino.type.BlockTypeOperators.BlockPositionIsIdentical; +import it.unimi.dsi.fastutil.ints.IntArrayList; import static io.trino.spi.function.InvocationConvention.InvocationArgumentConvention.BLOCK_POSITION; import static io.trino.spi.function.InvocationConvention.InvocationReturnConvention.FAIL_ON_NULL; import static io.trino.spi.function.OperatorType.HASH_CODE; import static io.trino.spi.function.OperatorType.IDENTICAL; +import static java.lang.Math.min; @ScalarFunction("array_except") @Description("Returns an array of elements that are in the first array but not the second, without duplicates.") @@ -63,14 +63,12 @@ public static Block except( for (int i = 0; i < rightPositionCount; i++) { set.add(rightArray, i); } - BlockBuilder distinctElementBlockBuilder = type.createBlockBuilder(null, leftPositionCount); - ValueBlock leftValueBlock = leftArray.getUnderlyingValueBlock(); + IntArrayList distinctPositions = new IntArrayList(min(64, leftPositionCount)); for (int i = 0; i < leftPositionCount; i++) { - int leftPosition = leftArray.getUnderlyingValuePosition(i); - if (set.add(leftValueBlock, leftPosition)) { - distinctElementBlockBuilder.append(leftValueBlock, leftPosition); + if (set.add(leftArray, i)) { + distinctPositions.add(i); } } - return distinctElementBlockBuilder.build(); + return leftArray.copyPositions(distinctPositions.elements(), 0, distinctPositions.size()); } } diff --git a/core/trino-main/src/main/java/io/trino/operator/scalar/ArrayRemoveFunction.java b/core/trino-main/src/main/java/io/trino/operator/scalar/ArrayRemoveFunction.java index 5bf1c193c22c..bf5550e94b4a 100644 --- a/core/trino-main/src/main/java/io/trino/operator/scalar/ArrayRemoveFunction.java +++ b/core/trino-main/src/main/java/io/trino/operator/scalar/ArrayRemoveFunction.java @@ -15,20 +15,16 @@ import io.trino.spi.TrinoException; import io.trino.spi.block.Block; -import io.trino.spi.block.BufferedArrayValueBuilder; -import io.trino.spi.block.ValueBlock; import io.trino.spi.function.Convention; import io.trino.spi.function.Description; import io.trino.spi.function.OperatorDependency; import io.trino.spi.function.ScalarFunction; import io.trino.spi.function.SqlType; import io.trino.spi.function.TypeParameter; -import io.trino.spi.type.ArrayType; import io.trino.spi.type.Type; +import it.unimi.dsi.fastutil.ints.IntArrayList; import java.lang.invoke.MethodHandle; -import java.util.ArrayList; -import java.util.List; import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED; import static io.trino.spi.function.InvocationConvention.InvocationArgumentConvention.NEVER_NULL; @@ -36,22 +32,17 @@ import static io.trino.spi.function.OperatorType.EQUAL; import static io.trino.spi.type.TypeUtils.readNativeValue; import static io.trino.util.Failures.internalError; +import static java.lang.Math.min; @ScalarFunction("array_remove") @Description("Remove specified values from the given array") public final class ArrayRemoveFunction { - private final BufferedArrayValueBuilder arrayValueBuilder; - - @TypeParameter("E") - public ArrayRemoveFunction(@TypeParameter("E") Type elementType) - { - arrayValueBuilder = BufferedArrayValueBuilder.createBuffered(new ArrayType(elementType)); - } + private ArrayRemoveFunction() {} @TypeParameter("E") @SqlType("array(E)") - public Block remove( + public static Block remove( @OperatorDependency( operator = EQUAL, argumentTypes = {"E", "E"}, @@ -61,7 +52,7 @@ public Block remove( @SqlType("array(E)") Block array, @SqlType("E") Object value) { - List positions = new ArrayList<>(); + IntArrayList positions = new IntArrayList(min(64, array.getPositionCount())); for (int i = 0; i < array.getPositionCount(); i++) { Object element = readNativeValue(type, array, i); @@ -88,11 +79,6 @@ public Block remove( return array; } - return arrayValueBuilder.build(positions.size(), elementBuilder -> { - ValueBlock valueBlock = array.getUnderlyingValueBlock(); - for (int position : positions) { - elementBuilder.append(valueBlock, array.getUnderlyingValuePosition(position)); - } - }); + return array.copyPositions(positions.elements(), 0, positions.size()); } } diff --git a/core/trino-main/src/main/java/io/trino/operator/scalar/ArrayReverseFunction.java b/core/trino-main/src/main/java/io/trino/operator/scalar/ArrayReverseFunction.java index d74574cd64ab..8f1f976f36d4 100644 --- a/core/trino-main/src/main/java/io/trino/operator/scalar/ArrayReverseFunction.java +++ b/core/trino-main/src/main/java/io/trino/operator/scalar/ArrayReverseFunction.java @@ -14,30 +14,20 @@ package io.trino.operator.scalar; import io.trino.spi.block.Block; -import io.trino.spi.block.BufferedArrayValueBuilder; -import io.trino.spi.block.ValueBlock; import io.trino.spi.function.Description; import io.trino.spi.function.ScalarFunction; import io.trino.spi.function.SqlType; import io.trino.spi.function.TypeParameter; -import io.trino.spi.type.ArrayType; -import io.trino.spi.type.Type; @ScalarFunction("reverse") @Description("Returns an array which has the reversed order of the given array.") public final class ArrayReverseFunction { - private final BufferedArrayValueBuilder arrayValueBuilder; - - @TypeParameter("E") - public ArrayReverseFunction(@TypeParameter("E") Type elementType) - { - arrayValueBuilder = BufferedArrayValueBuilder.createBuffered(new ArrayType(elementType)); - } + private ArrayReverseFunction() {} @TypeParameter("E") @SqlType("array(E)") - public Block reverse(@SqlType("array(E)") Block block) + public static Block reverse(@SqlType("array(E)") Block block) { int arrayLength = block.getPositionCount(); @@ -45,11 +35,10 @@ public Block reverse(@SqlType("array(E)") Block block) return block; } - return arrayValueBuilder.build(arrayLength, elementBuilder -> { - ValueBlock valueBlock = block.getUnderlyingValueBlock(); - for (int i = arrayLength - 1; i >= 0; i--) { - elementBuilder.append(valueBlock, block.getUnderlyingValuePosition(i)); - } - }); + int[] positions = new int[arrayLength]; + for (int i = 0; i < arrayLength; i++) { + positions[i] = arrayLength - i - 1; + } + return block.copyPositions(positions, 0, arrayLength); } } diff --git a/core/trino-main/src/main/java/io/trino/operator/scalar/ArrayShuffleFunction.java b/core/trino-main/src/main/java/io/trino/operator/scalar/ArrayShuffleFunction.java index a954134bfb39..48dae6cad23c 100644 --- a/core/trino-main/src/main/java/io/trino/operator/scalar/ArrayShuffleFunction.java +++ b/core/trino-main/src/main/java/io/trino/operator/scalar/ArrayShuffleFunction.java @@ -14,14 +14,10 @@ package io.trino.operator.scalar; import io.trino.spi.block.Block; -import io.trino.spi.block.BufferedArrayValueBuilder; -import io.trino.spi.block.ValueBlock; import io.trino.spi.function.Description; import io.trino.spi.function.ScalarFunction; import io.trino.spi.function.SqlType; import io.trino.spi.function.TypeParameter; -import io.trino.spi.type.ArrayType; -import io.trino.spi.type.Type; import java.util.concurrent.ThreadLocalRandom; @@ -29,24 +25,14 @@ @Description("Generates a random permutation of the given array.") public final class ArrayShuffleFunction { - private final BufferedArrayValueBuilder arrayValueBuilder; - private static final int INITIAL_LENGTH = 128; - private int[] positions = new int[INITIAL_LENGTH]; - - @TypeParameter("E") - public ArrayShuffleFunction(@TypeParameter("E") Type elementType) - { - arrayValueBuilder = BufferedArrayValueBuilder.createBuffered(new ArrayType(elementType)); - } + private ArrayShuffleFunction() {} @TypeParameter("E") @SqlType("array(E)") - public Block shuffle(@SqlType("array(E)") Block block) + public static Block shuffle(@SqlType("array(E)") Block block) { int length = block.getPositionCount(); - if (positions.length < length) { - positions = new int[length]; - } + int[] positions = new int[length]; for (int i = 0; i < length; i++) { positions[i] = i; } @@ -60,11 +46,6 @@ public Block shuffle(@SqlType("array(E)") Block block) positions[index] = swap; } - return arrayValueBuilder.build(length, elementBuilder -> { - ValueBlock valueBlock = block.getUnderlyingValueBlock(); - for (int i = 0; i < length; i++) { - elementBuilder.append(valueBlock, block.getUnderlyingValuePosition(positions[i])); - } - }); + return block.copyPositions(positions, 0, length); } } diff --git a/core/trino-main/src/main/java/io/trino/operator/scalar/ArraySortComparatorFunction.java b/core/trino-main/src/main/java/io/trino/operator/scalar/ArraySortComparatorFunction.java index 9f90e27003c2..5520e0f9ab08 100644 --- a/core/trino-main/src/main/java/io/trino/operator/scalar/ArraySortComparatorFunction.java +++ b/core/trino-main/src/main/java/io/trino/operator/scalar/ArraySortComparatorFunction.java @@ -16,16 +16,12 @@ import com.google.common.primitives.Ints; import io.trino.spi.TrinoException; import io.trino.spi.block.Block; -import io.trino.spi.block.BufferedArrayValueBuilder; -import io.trino.spi.block.ValueBlock; import io.trino.spi.function.Convention; import io.trino.spi.function.Description; import io.trino.spi.function.OperatorDependency; import io.trino.spi.function.ScalarFunction; import io.trino.spi.function.SqlType; import io.trino.spi.function.TypeParameter; -import io.trino.spi.type.ArrayType; -import io.trino.spi.type.Type; import io.trino.sql.gen.lambda.LambdaFunctionInterface; import java.lang.invoke.MethodHandle; @@ -43,25 +39,20 @@ @Description("Sorts the given array with a lambda comparator.") public final class ArraySortComparatorFunction { - private final BufferedArrayValueBuilder arrayValueBuilder; - private static final int INITIAL_LENGTH = 128; - private List positions = Ints.asList(new int[INITIAL_LENGTH]); - - @TypeParameter("T") - public ArraySortComparatorFunction(@TypeParameter("T") Type elementType) - { - arrayValueBuilder = BufferedArrayValueBuilder.createBuffered(new ArrayType(elementType)); - } + private ArraySortComparatorFunction() {} @TypeParameter("T") @SqlType("array(T)") - public Block sort( + public static Block sort( @OperatorDependency(operator = READ_VALUE, argumentTypes = "T", convention = @Convention(arguments = BLOCK_POSITION_NOT_NULL, result = FAIL_ON_NULL)) MethodHandle readValue, @SqlType("array(T)") Block block, @SqlType("function(T, T, integer)") ComparatorObjectLambda function) { int arrayLength = block.getPositionCount(); - initPositionsList(arrayLength); + int[] positions = new int[arrayLength]; + for (int i = 0; i < arrayLength; i++) { + positions[i] = i; + } Comparator comparator = (x, y) -> { try { @@ -75,29 +66,14 @@ public Block sort( } }; - sortPositions(arrayLength, comparator); + sortPositions(positions, comparator); - return arrayValueBuilder.build(arrayLength, elementBuilder -> { - ValueBlock valueBlock = block.getUnderlyingValueBlock(); - for (int i = 0; i < arrayLength; i++) { - elementBuilder.append(valueBlock, block.getUnderlyingValuePosition(positions.get(i))); - } - }); - } - - private void initPositionsList(int arrayLength) - { - if (positions.size() < arrayLength) { - positions = Ints.asList(new int[arrayLength]); - } - for (int i = 0; i < arrayLength; i++) { - positions.set(i, i); - } + return block.copyPositions(positions, 0, arrayLength); } - private void sortPositions(int arrayLength, Comparator comparator) + private static void sortPositions(int[] positions, Comparator comparator) { - List list = positions.subList(0, arrayLength); + List list = Ints.asList(positions); try { list.sort(comparator); diff --git a/core/trino-main/src/main/java/io/trino/operator/scalar/ArraySortFunction.java b/core/trino-main/src/main/java/io/trino/operator/scalar/ArraySortFunction.java index 2bb7cb771ba3..89b968441043 100644 --- a/core/trino-main/src/main/java/io/trino/operator/scalar/ArraySortFunction.java +++ b/core/trino-main/src/main/java/io/trino/operator/scalar/ArraySortFunction.java @@ -14,18 +14,14 @@ package io.trino.operator.scalar; import io.trino.spi.block.Block; -import io.trino.spi.block.BufferedArrayValueBuilder; -import io.trino.spi.block.ValueBlock; import io.trino.spi.function.Convention; import io.trino.spi.function.Description; import io.trino.spi.function.OperatorDependency; import io.trino.spi.function.ScalarFunction; import io.trino.spi.function.SqlType; import io.trino.spi.function.TypeParameter; -import io.trino.spi.type.ArrayType; -import io.trino.spi.type.Type; import io.trino.type.BlockTypeOperators.BlockPositionComparison; -import it.unimi.dsi.fastutil.ints.IntArrayList; +import it.unimi.dsi.fastutil.ints.IntArrays; import static io.trino.spi.function.InvocationConvention.InvocationArgumentConvention.BLOCK_POSITION_NOT_NULL; import static io.trino.spi.function.InvocationConvention.InvocationReturnConvention.FAIL_ON_NULL; @@ -36,19 +32,12 @@ public final class ArraySortFunction { public static final String NAME = "array_sort"; - private final BufferedArrayValueBuilder arrayValueBuilder; - private static final int INITIAL_LENGTH = 128; - private final IntArrayList positions = new IntArrayList(INITIAL_LENGTH); - @TypeParameter("E") - public ArraySortFunction(@TypeParameter("E") Type elementType) - { - arrayValueBuilder = BufferedArrayValueBuilder.createBuffered(new ArrayType(elementType)); - } + private ArraySortFunction() {} @TypeParameter("E") @SqlType("array(E)") - public Block sort( + public static Block sort( @OperatorDependency( operator = COMPARISON_UNORDERED_LAST, argumentTypes = {"E", "E"}, @@ -56,12 +45,12 @@ public Block sort( @SqlType("array(E)") Block block) { int arrayLength = block.getPositionCount(); - positions.clear(); + int[] positions = new int[arrayLength]; for (int i = 0; i < arrayLength; i++) { - positions.add(i); + positions[i] = i; } - positions.subList(0, arrayLength).sort((left, right) -> { + IntArrays.stableSort(positions, (left, right) -> { boolean nullLeft = block.isNull(left); boolean nullRight = block.isNull(right); if (nullLeft && nullRight) { @@ -77,11 +66,6 @@ public Block sort( return (int) comparisonOperator.compare(block, left, block, right); }); - return arrayValueBuilder.build(arrayLength, elementBuilder -> { - ValueBlock valueBlock = block.getUnderlyingValueBlock(); - for (int i = 0; i < arrayLength; i++) { - elementBuilder.append(valueBlock, block.getUnderlyingValuePosition(positions.getInt(i))); - } - }); + return block.copyPositions(positions, 0, arrayLength); } } diff --git a/core/trino-main/src/main/java/io/trino/operator/scalar/ArrayTrimFunction.java b/core/trino-main/src/main/java/io/trino/operator/scalar/ArrayTrimFunction.java index d7b5a6436239..9b253f1ba8be 100644 --- a/core/trino-main/src/main/java/io/trino/operator/scalar/ArrayTrimFunction.java +++ b/core/trino-main/src/main/java/io/trino/operator/scalar/ArrayTrimFunction.java @@ -38,8 +38,8 @@ public static Block trim( @SqlType("array(E)") Block array, @SqlType(StandardTypes.BIGINT) long size) { - checkCondition(size >= 0, INVALID_FUNCTION_ARGUMENT, () -> String.format("size must not be negative: %s", size)); - checkCondition(size <= array.getPositionCount(), INVALID_FUNCTION_ARGUMENT, () -> String.format("size must not exceed array cardinality %s: %s", array.getPositionCount(), size)); + checkCondition(size >= 0, INVALID_FUNCTION_ARGUMENT, "size must not be negative: %s", size); + checkCondition(size <= array.getPositionCount(), INVALID_FUNCTION_ARGUMENT, "size must not exceed array cardinality %s: %s", array.getPositionCount(), size); return array.getRegion(0, toIntExact(array.getPositionCount() - size)); } diff --git a/core/trino-main/src/main/java/io/trino/operator/scalar/QuantileDigestFunctions.java b/core/trino-main/src/main/java/io/trino/operator/scalar/QuantileDigestFunctions.java index 790c72ab2010..32afef473b84 100644 --- a/core/trino-main/src/main/java/io/trino/operator/scalar/QuantileDigestFunctions.java +++ b/core/trino-main/src/main/java/io/trino/operator/scalar/QuantileDigestFunctions.java @@ -141,13 +141,13 @@ public static Block valuesAtQuantilesBigint(@SqlType("qdigest(bigint)") Slice in public static double verifyAccuracy(double accuracy) { - checkCondition(accuracy > 0 && accuracy < 1, INVALID_FUNCTION_ARGUMENT, () -> String.format("Percentile accuracy must be exclusively between 0 and 1, was %s", accuracy)); + checkCondition(accuracy > 0 && accuracy < 1, INVALID_FUNCTION_ARGUMENT, "Percentile accuracy must be exclusively between 0 and 1, was %s", accuracy); return accuracy; } public static long verifyWeight(long weight) { - checkCondition(weight > 0, INVALID_FUNCTION_ARGUMENT, () -> String.format("Percentile weight must be > 0, was %s", weight)); + checkCondition(weight > 0, INVALID_FUNCTION_ARGUMENT, "Percentile weight must be > 0, was %s", weight); return weight; } } diff --git a/core/trino-main/src/main/java/io/trino/operator/scalar/TDigestFunctions.java b/core/trino-main/src/main/java/io/trino/operator/scalar/TDigestFunctions.java index 0686bee47a68..eff8de151f4f 100644 --- a/core/trino-main/src/main/java/io/trino/operator/scalar/TDigestFunctions.java +++ b/core/trino-main/src/main/java/io/trino/operator/scalar/TDigestFunctions.java @@ -61,13 +61,13 @@ public static Block valuesAtQuantiles(@SqlType(StandardTypes.TDIGEST) TDigest in public static void verifyValue(double value) { - checkCondition(Double.isFinite(value), INVALID_FUNCTION_ARGUMENT, () -> String.format("value must be finite; was %s", value)); + checkCondition(Double.isFinite(value), INVALID_FUNCTION_ARGUMENT, "value must be finite; was %s", value); } public static double verifyWeight(double weight) { - checkCondition(Double.isFinite(weight), INVALID_FUNCTION_ARGUMENT, () -> String.format("weight must be finite, was %s", weight)); - checkCondition(weight >= 1, INVALID_FUNCTION_ARGUMENT, () -> String.format("weight must be >= 1, was %s", weight)); + checkCondition(Double.isFinite(weight), INVALID_FUNCTION_ARGUMENT, "weight must be finite, was %s", weight); + checkCondition(weight >= 1, INVALID_FUNCTION_ARGUMENT, "weight must be >= 1, was %s", weight); return weight; } } diff --git a/core/trino-main/src/main/java/io/trino/server/InternalAuthenticationManager.java b/core/trino-main/src/main/java/io/trino/server/InternalAuthenticationManager.java index 7676fcd65751..600b085aa7df 100644 --- a/core/trino-main/src/main/java/io/trino/server/InternalAuthenticationManager.java +++ b/core/trino-main/src/main/java/io/trino/server/InternalAuthenticationManager.java @@ -43,6 +43,7 @@ import static io.trino.server.security.jwt.JwtUtil.newJwtBuilder; import static io.trino.server.security.jwt.JwtUtil.newJwtParserBuilder; import static jakarta.ws.rs.core.MediaType.TEXT_PLAIN_TYPE; +import static jakarta.ws.rs.core.Response.Status.SERVICE_UNAVAILABLE; import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED; import static java.nio.charset.StandardCharsets.UTF_8; import static java.time.temporal.ChronoUnit.MINUTES; @@ -62,11 +63,12 @@ public class InternalAuthenticationManager private final String nodeId; private final JwtParser jwtParser; private final AtomicReference currentToken; + private final StartupStatus startupStatus; @Inject - public InternalAuthenticationManager(InternalCommunicationConfig internalCommunicationConfig, SecurityConfig securityConfig, NodeInfo nodeInfo) + public InternalAuthenticationManager(InternalCommunicationConfig internalCommunicationConfig, SecurityConfig securityConfig, NodeInfo nodeInfo, StartupStatus startupStatus) { - this(getSharedSecret(internalCommunicationConfig, nodeInfo, !securityConfig.getAuthenticationTypes().equals(ImmutableList.of("insecure"))), nodeInfo.getNodeId()); + this(getSharedSecret(internalCommunicationConfig, nodeInfo, !securityConfig.getAuthenticationTypes().equals(ImmutableList.of("insecure"))), nodeInfo.getNodeId(), startupStatus); } private static String getSharedSecret(InternalCommunicationConfig internalCommunicationConfig, NodeInfo nodeInfo, boolean authenticationEnabled) @@ -86,10 +88,11 @@ private static String getSharedSecret(InternalCommunicationConfig internalCommun return internalCommunicationConfig.getSharedSecret().orElseGet(nodeInfo::getEnvironment); } - public InternalAuthenticationManager(String sharedSecret, String nodeId) + public InternalAuthenticationManager(String sharedSecret, String nodeId, StartupStatus startupStatus) { requireNonNull(sharedSecret, "sharedSecret is null"); requireNonNull(nodeId, "nodeId is null"); + this.startupStatus = requireNonNull(startupStatus, "startupStatus is null"); this.hmac = hmacShaKeyFor(Hashing.sha256().hashString(sharedSecret, UTF_8).asBytes()); this.nodeId = nodeId; this.jwtParser = newJwtParserBuilder().verifyWith(hmac).build(); @@ -118,6 +121,13 @@ public void handleInternalRequest(ContainerRequestContext request) throw new RuntimeException("Authentication error", e); } + if (!startupStatus.isStartupComplete()) { + request.abortWith(Response.status(SERVICE_UNAVAILABLE) + .type(TEXT_PLAIN_TYPE.toString()) + .entity("Trino server is still initializing") + .build()); + } + Identity identity = Identity.forUser("") .withPrincipal(new InternalPrincipal(subject)) .build(); diff --git a/core/trino-main/src/main/java/io/trino/server/testing/TestingTrinoServer.java b/core/trino-main/src/main/java/io/trino/server/testing/TestingTrinoServer.java index 8e5d603f5874..e36e3e00ec02 100644 --- a/core/trino-main/src/main/java/io/trino/server/testing/TestingTrinoServer.java +++ b/core/trino-main/src/main/java/io/trino/server/testing/TestingTrinoServer.java @@ -50,6 +50,7 @@ import io.trino.dispatcher.DispatchManager; import io.trino.eventlistener.EventListenerConfig; import io.trino.eventlistener.EventListenerManager; +import io.trino.exchange.ExchangeManagerConfig; import io.trino.exchange.ExchangeManagerRegistry; import io.trino.execution.FailureInjector; import io.trino.execution.FailureInjector.InjectedFailureType; @@ -315,6 +316,7 @@ private TestingTrinoServer( .addBinding() .to(TracingServletFilter.class); binder.bind(EventListenerConfig.class).in(Scopes.SINGLETON); + binder.bind(ExchangeManagerConfig.class).in(Scopes.SINGLETON); binder.bind(AccessControlConfig.class).in(Scopes.SINGLETON); binder.bind(TestingAccessControlManager.class).in(Scopes.SINGLETON); binder.bind(TestingGroupProvider.class).in(Scopes.SINGLETON); diff --git a/core/trino-main/src/main/java/io/trino/sql/analyzer/Analysis.java b/core/trino-main/src/main/java/io/trino/sql/analyzer/Analysis.java index 77d5f3cc4de9..d507f84b6b42 100644 --- a/core/trino-main/src/main/java/io/trino/sql/analyzer/Analysis.java +++ b/core/trino-main/src/main/java/io/trino/sql/analyzer/Analysis.java @@ -43,6 +43,7 @@ import io.trino.spi.eventlistener.BaseViewReferenceInfo; import io.trino.spi.eventlistener.ColumnDetail; import io.trino.spi.eventlistener.ColumnInfo; +import io.trino.spi.eventlistener.ColumnLineageInfo; import io.trino.spi.eventlistener.ColumnMaskReferenceInfo; import io.trino.spi.eventlistener.MaterializedViewReferenceInfo; import io.trino.spi.eventlistener.RoutineInfo; @@ -100,6 +101,7 @@ import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collection; +import java.util.Comparator; import java.util.Deque; import java.util.HashSet; import java.util.LinkedHashMap; @@ -276,6 +278,31 @@ public Analysis(@Nullable Statement root, Map, Expression> pa this.queryType = requireNonNull(queryType, "queryType is null"); } + public Optional> getSelectColumnsLineageInfo() + { + // This single check should handle all cases where we don't want to produce lineage info: + // - EXPLAIN ✓ + // - INSERT/UPDATE/DELETE/MERGE ✓ + // - ALTER TABLE ADD COLUMN ✓ + // - SET COLUMN TYPE or any other DDL ✓ + if (!(root instanceof Query)) { + return Optional.empty(); + } + + RelationType rootRelation = getOutputDescriptor(); + List lineageInfo = rootRelation.getVisibleFields().stream() + // sort output fields by their index to ensure consistent ordering of lineage info + .sorted(Comparator.comparingInt(rootRelation::indexOf)) + .map(field -> new ColumnLineageInfo( + field.getName().orElse(""), + getSourceColumns(field) + .stream() + .map(SourceColumn::getColumnDetail) + .collect(toImmutableSet()))) + .collect(toImmutableList()); + return lineageInfo.isEmpty() ? Optional.empty() : Optional.of(lineageInfo); + } + public Statement getStatement() { return root; diff --git a/core/trino-main/src/main/java/io/trino/sql/planner/PlanOptimizers.java b/core/trino-main/src/main/java/io/trino/sql/planner/PlanOptimizers.java index 83978f1f3e05..138f6dff9b8c 100644 --- a/core/trino-main/src/main/java/io/trino/sql/planner/PlanOptimizers.java +++ b/core/trino-main/src/main/java/io/trino/sql/planner/PlanOptimizers.java @@ -963,6 +963,7 @@ public PlanOptimizers( .add(new PushFilterIntoValues(plannerContext)) .add(new ReplaceJoinOverConstantWithProject()) .add(new RemoveRedundantPredicateAboveTableScan(plannerContext)) + .add(new RemoveEmptyUnionBranches()) .build())); // Remove unsupported dynamic filters introduced by PredicatePushdown. Also, cleanup dynamic filters removed by // PushPredicateIntoTableScan and RemoveRedundantPredicateAboveTableScan due to those rules replacing table scans with empty ValuesNode diff --git a/core/trino-main/src/main/java/io/trino/testing/PlanTester.java b/core/trino-main/src/main/java/io/trino/testing/PlanTester.java index e41574a6fa7c..6e11be75e1b9 100644 --- a/core/trino-main/src/main/java/io/trino/testing/PlanTester.java +++ b/core/trino-main/src/main/java/io/trino/testing/PlanTester.java @@ -60,6 +60,7 @@ import io.trino.cost.TaskCountEstimator; import io.trino.eventlistener.EventListenerConfig; import io.trino.eventlistener.EventListenerManager; +import io.trino.exchange.ExchangeManagerConfig; import io.trino.exchange.ExchangeManagerRegistry; import io.trino.execution.DynamicFilterConfig; import io.trino.execution.NodeTaskMap; @@ -474,7 +475,7 @@ private PlanTester(Session defaultSession, int nodeCountForStats) ImmutableSet.of(), ImmutableSet.of(new ExcludeColumnsFunction())); - exchangeManagerRegistry = new ExchangeManagerRegistry(noop(), noopTracer(), secretsResolver); + exchangeManagerRegistry = new ExchangeManagerRegistry(noop(), noopTracer(), secretsResolver, new ExchangeManagerConfig()); spoolingManagerRegistry = new SpoolingManagerRegistry( new InternalNode("nodeId", URI.create("http://localhost:8080"), NodeVersion.UNKNOWN, false), new ServerConfig(), diff --git a/core/trino-main/src/main/java/io/trino/type/DecimalCasts.java b/core/trino-main/src/main/java/io/trino/type/DecimalCasts.java index b111c7704b42..9ef4358c4fc6 100644 --- a/core/trino-main/src/main/java/io/trino/type/DecimalCasts.java +++ b/core/trino-main/src/main/java/io/trino/type/DecimalCasts.java @@ -573,7 +573,7 @@ public static Int128 jsonToLongDecimal(Slice json, long precision, long scale, I try (JsonParser parser = createJsonParser(JSON_FACTORY, json)) { parser.nextToken(); Int128 result = currentTokenAsLongDecimal(parser, intPrecision(precision), DecimalConversions.intScale(scale)); - checkCondition(parser.nextToken() == null, INVALID_CAST_ARGUMENT, () -> String.format("Cannot cast input json to DECIMAL(%s,%s)", precision, scale)); // check no trailing token + checkCondition(parser.nextToken() == null, INVALID_CAST_ARGUMENT, "Cannot cast input json to DECIMAL(%s,%s)", precision, scale); // check no trailing token return result; } catch (IOException | NumberFormatException | JsonCastException e) { @@ -587,7 +587,7 @@ public static Long jsonToShortDecimal(Slice json, long precision, long scale, lo try (JsonParser parser = createJsonParser(JSON_FACTORY, json)) { parser.nextToken(); Long result = currentTokenAsShortDecimal(parser, intPrecision(precision), DecimalConversions.intScale(scale)); - checkCondition(parser.nextToken() == null, INVALID_CAST_ARGUMENT, () -> String.format("Cannot cast input json to DECIMAL(%s,%s)", precision, scale)); // check no trailing token + checkCondition(parser.nextToken() == null, INVALID_CAST_ARGUMENT, "Cannot cast input json to DECIMAL(%s,%s)", precision, scale); // check no trailing token return result; } catch (IOException | NumberFormatException | JsonCastException e) { diff --git a/core/trino-main/src/main/java/io/trino/type/IpAddressType.java b/core/trino-main/src/main/java/io/trino/type/IpAddressType.java index aee1f0953355..d2de5a29b2cc 100644 --- a/core/trino-main/src/main/java/io/trino/type/IpAddressType.java +++ b/core/trino-main/src/main/java/io/trino/type/IpAddressType.java @@ -189,10 +189,10 @@ private static void readFlatToBlock( @ScalarOperator(READ_VALUE) private static void writeFlat( Slice value, - byte[] fixedSizeSlice, - int fixedSizeOffset, - byte[] unusedVariableSizeSlice, - int unusedVariableSizeOffset) + @FlatFixed byte[] fixedSizeSlice, + @FlatFixedOffset int fixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset) { value.getBytes(0, fixedSizeSlice, fixedSizeOffset, INT128_BYTES); } @@ -217,6 +217,22 @@ private static boolean equalOperator(@BlockPosition Int128ArrayBlock leftBlock, rightBlock.getInt128Low(rightPosition)); } + @ScalarOperator(EQUAL) + private static boolean equalOperator( + @FlatFixed byte[] leftFixedSizeSlice, + @FlatFixedOffset int leftFixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset, + @BlockPosition Int128ArrayBlock rightBlock, + @BlockIndex int rightPosition) + { + return equal( + (long) LONG_HANDLE.get(leftFixedSizeSlice, leftFixedSizeOffset), + (long) LONG_HANDLE.get(leftFixedSizeSlice, leftFixedSizeOffset + SIZE_OF_LONG), + rightBlock.getInt128High(rightPosition), + rightBlock.getInt128Low(rightPosition)); + } + private static boolean equal(long leftLow, long leftHigh, long rightLow, long rightHigh) { return leftLow == rightLow && leftHigh == rightHigh; @@ -234,6 +250,18 @@ private static long xxHash64Operator(@BlockPosition Int128ArrayBlock block, @Blo return xxHash64(block.getInt128High(position), block.getInt128Low(position)); } + @ScalarOperator(XX_HASH_64) + private static long xxHash64Operator( + @FlatFixed byte[] fixedSizeSlice, + @FlatFixedOffset int fixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset) + { + return xxHash64( + (long) LONG_HANDLE.get(fixedSizeSlice, fixedSizeOffset), + (long) LONG_HANDLE.get(fixedSizeSlice, fixedSizeOffset + SIZE_OF_LONG)); + } + private static long xxHash64(long low, long high) { return XxHash64.hash(low) ^ XxHash64.hash(high); diff --git a/core/trino-main/src/main/java/io/trino/type/LikeFunctions.java b/core/trino-main/src/main/java/io/trino/type/LikeFunctions.java index 9c50c720b309..381afe1bb425 100644 --- a/core/trino-main/src/main/java/io/trino/type/LikeFunctions.java +++ b/core/trino-main/src/main/java/io/trino/type/LikeFunctions.java @@ -151,6 +151,6 @@ private static Optional getEscapeCharacter(Optional escape) private static void checkEscape(boolean condition) { - checkCondition(condition, INVALID_FUNCTION_ARGUMENT, "Escape character must be followed by '%%', '_' or the escape character itself"); + checkCondition(condition, INVALID_FUNCTION_ARGUMENT, "Escape character must be followed by '%', '_' or the escape character itself"); } } diff --git a/core/trino-main/src/main/java/io/trino/util/Failures.java b/core/trino-main/src/main/java/io/trino/util/Failures.java index 8bef07c3606b..38d13dacbc36 100644 --- a/core/trino-main/src/main/java/io/trino/util/Failures.java +++ b/core/trino-main/src/main/java/io/trino/util/Failures.java @@ -13,9 +13,9 @@ */ package io.trino.util; -import com.google.common.base.Supplier; import com.google.common.collect.ImmutableList; import com.google.errorprone.annotations.FormatMethod; +import com.google.errorprone.annotations.FormatString; import io.trino.client.ErrorLocation; import io.trino.execution.ExecutionFailureInfo; import io.trino.execution.Failure; @@ -54,24 +54,119 @@ public final class Failures private Failures() {} - public static ExecutionFailureInfo toFailure(Throwable failure) + public static void checkCondition(boolean condition, ErrorCodeSupplier errorCode, String message) { - return toFailure(failure, newIdentityHashSet()); + if (!condition) { + throw new TrinoException(errorCode, message); + } + } + + @FormatMethod + public static void checkCondition(boolean condition, ErrorCodeSupplier errorCode, @FormatString String formatString, Object argument) + { + if (!condition) { + throw new TrinoException(errorCode, format(formatString, argument)); + } + } + + @FormatMethod + public static void checkCondition(boolean condition, ErrorCodeSupplier errorCode, @FormatString String formatString, Object argumentOne, Object argumentTwo) + { + if (!condition) { + throw new TrinoException(errorCode, format(formatString, argumentOne, argumentTwo)); + } + } + + /** + * @deprecated This overload can result in performance issues due to the varargs array creation and primitive boxing, consider adding an overload that + * matches the specific argument types you're passing instead of using this method. + */ + @Deprecated + @FormatMethod + public static void checkCondition(boolean condition, ErrorCodeSupplier errorCode, @FormatString String formatString, Object... args) + { + if (!condition) { + throw new TrinoException(errorCode, format(formatString, args)); + } + } + + @FormatMethod + public static void checkCondition(boolean condition, ErrorCodeSupplier errorCode, @FormatString String formatString, int argument) + { + if (!condition) { + throw new TrinoException(errorCode, format(formatString, argument)); + } + } + + @FormatMethod + public static void checkCondition(boolean condition, ErrorCodeSupplier errorCode, @FormatString String formatString, int argumentOne, int argumentTwo) + { + if (!condition) { + throw new TrinoException(errorCode, format(formatString, argumentOne, argumentTwo)); + } + } + + @FormatMethod + public static void checkCondition(boolean condition, ErrorCodeSupplier errorCode, @FormatString String formatString, long argument) + { + if (!condition) { + throw new TrinoException(errorCode, format(formatString, argument)); + } + } + + @FormatMethod + public static void checkCondition(boolean condition, ErrorCodeSupplier errorCode, @FormatString String formatString, long argumentOne, long argumentTwo) + { + if (!condition) { + throw new TrinoException(errorCode, format(formatString, argumentOne, argumentTwo)); + } + } + + @FormatMethod + public static void checkCondition(boolean condition, ErrorCodeSupplier errorCode, @FormatString String formatString, float argument) + { + if (!condition) { + throw new TrinoException(errorCode, format(formatString, argument)); + } + } + + @FormatMethod + public static void checkCondition(boolean condition, ErrorCodeSupplier errorCode, @FormatString String formatString, float argumentOne, float argumentTwo) + { + if (!condition) { + throw new TrinoException(errorCode, format(formatString, argumentOne, argumentTwo)); + } + } + + @FormatMethod + public static void checkCondition(boolean condition, ErrorCodeSupplier errorCode, @FormatString String formatString, double argument) + { + if (!condition) { + throw new TrinoException(errorCode, format(formatString, argument)); + } } @FormatMethod - public static void checkCondition(boolean condition, ErrorCodeSupplier errorCode, String formatString, Object... args) + public static void checkCondition(boolean condition, ErrorCodeSupplier errorCode, @FormatString String formatString, double argumentOne, double argumentTwo) { - checkCondition(condition, errorCode, () -> format(formatString, args)); + if (!condition) { + throw new TrinoException(errorCode, format(formatString, argumentOne, argumentTwo)); + } } - public static void checkCondition(boolean condition, ErrorCodeSupplier errorCode, Supplier errorMessage) + @FormatMethod + public static void checkCondition(boolean condition, ErrorCodeSupplier errorCode, @FormatString String formatString, double argumentOne, double argumentTwo, double argumentThree) { if (!condition) { - throw new TrinoException(errorCode, errorMessage.get()); + throw new TrinoException(errorCode, format(formatString, argumentOne, argumentTwo, argumentThree)); } } + public static ExecutionFailureInfo toFailure(Throwable failure) + { + return toFailure(failure, newIdentityHashSet()); + } + public static List toFailures(Collection failures) { return failures.stream() diff --git a/core/trino-main/src/test/java/io/trino/exchange/TestExchangeConfig.java b/core/trino-main/src/test/java/io/trino/exchange/TestExchangeConfig.java new file mode 100644 index 000000000000..70f83fe0e45b --- /dev/null +++ b/core/trino-main/src/test/java/io/trino/exchange/TestExchangeConfig.java @@ -0,0 +1,50 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.exchange; + +import com.google.common.collect.ImmutableMap; +import org.junit.jupiter.api.Test; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Map; + +import static io.airlift.configuration.testing.ConfigAssertions.assertFullMapping; +import static io.airlift.configuration.testing.ConfigAssertions.assertRecordedDefaults; +import static io.airlift.configuration.testing.ConfigAssertions.recordDefaults; + +final class TestExchangeConfig +{ + @Test + void testDefaults() + { + assertRecordedDefaults(recordDefaults(ExchangeManagerConfig.class) + .setExchangeManagerConfigFile(null)); + } + + @Test + void testExplicitPropertyMappings() + throws IOException + { + Path exchangeConfig = Files.createTempFile(null, null); + + Map properties = ImmutableMap.of("exchange-manager.config-file", exchangeConfig.toString()); + + ExchangeManagerConfig expected = new ExchangeManagerConfig() + .setExchangeManagerConfigFile(exchangeConfig.toFile()); + + assertFullMapping(properties, expected); + } +} diff --git a/core/trino-main/src/test/java/io/trino/exchange/TestLazyExchangeDataSource.java b/core/trino-main/src/test/java/io/trino/exchange/TestLazyExchangeDataSource.java index 39a8823e7c5b..cfd8fa3099c8 100644 --- a/core/trino-main/src/test/java/io/trino/exchange/TestLazyExchangeDataSource.java +++ b/core/trino-main/src/test/java/io/trino/exchange/TestLazyExchangeDataSource.java @@ -45,7 +45,7 @@ public void testIsBlockedCancellationIsolationInInitializationPhase() throw new UnsupportedOperationException(); }, RetryPolicy.NONE, - new ExchangeManagerRegistry(OpenTelemetry.noop(), Tracing.noopTracer(), new SecretsResolver(ImmutableMap.of())))) { + new ExchangeManagerRegistry(OpenTelemetry.noop(), Tracing.noopTracer(), new SecretsResolver(ImmutableMap.of()), new ExchangeManagerConfig()))) { ListenableFuture first = source.isBlocked(); ListenableFuture second = source.isBlocked(); assertThat(first) diff --git a/core/trino-main/src/test/java/io/trino/execution/BaseTestSqlTaskManager.java b/core/trino-main/src/test/java/io/trino/execution/BaseTestSqlTaskManager.java index ae128f8d1b13..73fb044c4738 100644 --- a/core/trino-main/src/test/java/io/trino/execution/BaseTestSqlTaskManager.java +++ b/core/trino-main/src/test/java/io/trino/execution/BaseTestSqlTaskManager.java @@ -29,6 +29,7 @@ import io.trino.connector.CatalogHandle; import io.trino.connector.ConnectorServices; import io.trino.connector.ConnectorServicesProvider; +import io.trino.exchange.ExchangeManagerConfig; import io.trino.exchange.ExchangeManagerRegistry; import io.trino.execution.buffer.BufferResult; import io.trino.execution.buffer.BufferState; @@ -337,7 +338,7 @@ private SqlTaskManager createSqlTaskManager(TaskManagerConfig taskManagerConfig, new NodeSpillConfig(), new TestingGcMonitor(), noopTracer(), - new ExchangeManagerRegistry(OpenTelemetry.noop(), Tracing.noopTracer(), new SecretsResolver(ImmutableMap.of()))); + new ExchangeManagerRegistry(OpenTelemetry.noop(), Tracing.noopTracer(), new SecretsResolver(ImmutableMap.of()), new ExchangeManagerConfig())); } private TaskInfo createTask(SqlTaskManager sqlTaskManager, TaskId taskId, Set splits, OutputBuffers outputBuffers) diff --git a/core/trino-main/src/test/java/io/trino/execution/MockManagedQueryExecution.java b/core/trino-main/src/test/java/io/trino/execution/MockManagedQueryExecution.java index cf25e47851f6..bd005dc007c1 100644 --- a/core/trino-main/src/test/java/io/trino/execution/MockManagedQueryExecution.java +++ b/core/trino-main/src/test/java/io/trino/execution/MockManagedQueryExecution.java @@ -289,6 +289,7 @@ public QueryInfo getFullQueryInfo() ImmutableList.of(), ImmutableSet.of(), Optional.empty(), + Optional.empty(), ImmutableList.of(), ImmutableList.of(), state.isDone(), diff --git a/core/trino-main/src/test/java/io/trino/execution/MockRemoteTaskFactory.java b/core/trino-main/src/test/java/io/trino/execution/MockRemoteTaskFactory.java index 8f73d91515c3..2c0d9fab675e 100644 --- a/core/trino-main/src/test/java/io/trino/execution/MockRemoteTaskFactory.java +++ b/core/trino-main/src/test/java/io/trino/execution/MockRemoteTaskFactory.java @@ -31,6 +31,7 @@ import io.opentelemetry.api.trace.Span; import io.trino.Session; import io.trino.cost.StatsAndCosts; +import io.trino.exchange.ExchangeManagerConfig; import io.trino.exchange.ExchangeManagerRegistry; import io.trino.execution.NodeTaskMap.PartitionedSplitCountTracker; import io.trino.execution.buffer.LazyOutputBuffer; @@ -235,7 +236,7 @@ public MockRemoteTask( DataSize.ofBytes(1), () -> new SimpleLocalMemoryContext(newSimpleAggregatedMemoryContext(), "test"), () -> {}, - new ExchangeManagerRegistry(OpenTelemetry.noop(), Tracing.noopTracer(), new SecretsResolver(ImmutableMap.of()))); + new ExchangeManagerRegistry(OpenTelemetry.noop(), Tracing.noopTracer(), new SecretsResolver(ImmutableMap.of()), new ExchangeManagerConfig())); this.fragment = requireNonNull(fragment, "fragment is null"); this.nodeId = requireNonNull(nodeId, "nodeId is null"); diff --git a/core/trino-main/src/test/java/io/trino/execution/TaskTestUtils.java b/core/trino-main/src/test/java/io/trino/execution/TaskTestUtils.java index 618453c7a53f..3b7aa748e125 100644 --- a/core/trino-main/src/test/java/io/trino/execution/TaskTestUtils.java +++ b/core/trino-main/src/test/java/io/trino/execution/TaskTestUtils.java @@ -22,6 +22,7 @@ import io.trino.connector.CatalogHandle; import io.trino.connector.CatalogServiceProvider; import io.trino.cost.StatsAndCosts; +import io.trino.exchange.ExchangeManagerConfig; import io.trino.exchange.ExchangeManagerRegistry; import io.trino.execution.BaseTestSqlTaskManager.MockDirectExchangeClientSupplier; import io.trino.execution.buffer.OutputBuffers; @@ -180,7 +181,7 @@ public static LocalExecutionPlanner createTestingPlanner() blockTypeOperators, PLANNER_CONTEXT.getTypeOperators(), new TableExecuteContextManager(), - new ExchangeManagerRegistry(noop(), noopTracer(), new SecretsResolver(ImmutableMap.of())), + new ExchangeManagerRegistry(noop(), noopTracer(), new SecretsResolver(ImmutableMap.of()), new ExchangeManagerConfig()), new NodeVersion("test"), new CompilerConfig()); } diff --git a/core/trino-main/src/test/java/io/trino/execution/TestMemoryRevokingScheduler.java b/core/trino-main/src/test/java/io/trino/execution/TestMemoryRevokingScheduler.java index f6213664e96b..c96e93a49c26 100644 --- a/core/trino-main/src/test/java/io/trino/execution/TestMemoryRevokingScheduler.java +++ b/core/trino-main/src/test/java/io/trino/execution/TestMemoryRevokingScheduler.java @@ -25,6 +25,7 @@ import io.airlift.tracing.Tracing; import io.airlift.units.DataSize; import io.opentelemetry.api.OpenTelemetry; +import io.trino.exchange.ExchangeManagerConfig; import io.trino.exchange.ExchangeManagerRegistry; import io.trino.execution.buffer.PipelinedOutputBuffers; import io.trino.execution.executor.TaskExecutor; @@ -282,7 +283,7 @@ private SqlTask newSqlTask(QueryId queryId) sqlTask -> {}, DataSize.of(32, MEGABYTE), DataSize.of(200, MEGABYTE), - new ExchangeManagerRegistry(OpenTelemetry.noop(), Tracing.noopTracer(), new SecretsResolver(ImmutableMap.of())), + new ExchangeManagerRegistry(OpenTelemetry.noop(), Tracing.noopTracer(), new SecretsResolver(ImmutableMap.of()), new ExchangeManagerConfig()), new CounterStat()); } diff --git a/core/trino-main/src/test/java/io/trino/execution/TestQueryInfo.java b/core/trino-main/src/test/java/io/trino/execution/TestQueryInfo.java index 7558f25e5f39..46e7a4f5ec53 100644 --- a/core/trino-main/src/test/java/io/trino/execution/TestQueryInfo.java +++ b/core/trino-main/src/test/java/io/trino/execution/TestQueryInfo.java @@ -241,6 +241,7 @@ private static QueryInfo createQueryInfo(Optional stagesInfo) ImmutableList.of(new TrinoWarning(new WarningCode(1, "name"), "message")), ImmutableSet.of(new Input(Optional.of("connectorName"), "catalog", new CatalogVersion("default"), "schema", "talble", Optional.empty(), ImmutableList.of(new Column("name", "type")), new PlanFragmentId("id"), new PlanNodeId("1"))), Optional.empty(), + Optional.empty(), ImmutableList.of(), ImmutableList.of(), true, diff --git a/core/trino-main/src/test/java/io/trino/execution/TestSqlTask.java b/core/trino-main/src/test/java/io/trino/execution/TestSqlTask.java index 793c0f3f36ab..0fb3248bf145 100644 --- a/core/trino-main/src/test/java/io/trino/execution/TestSqlTask.java +++ b/core/trino-main/src/test/java/io/trino/execution/TestSqlTask.java @@ -27,6 +27,7 @@ import io.airlift.units.Duration; import io.opentelemetry.api.OpenTelemetry; import io.opentelemetry.api.trace.Span; +import io.trino.exchange.ExchangeManagerConfig; import io.trino.exchange.ExchangeManagerRegistry; import io.trino.execution.buffer.BufferResult; import io.trino.execution.buffer.BufferState; @@ -454,7 +455,7 @@ private SqlTask createInitialTask() sqlTask -> {}, DataSize.of(32, MEGABYTE), DataSize.of(200, MEGABYTE), - new ExchangeManagerRegistry(OpenTelemetry.noop(), Tracing.noopTracer(), new SecretsResolver(ImmutableMap.of())), + new ExchangeManagerRegistry(OpenTelemetry.noop(), Tracing.noopTracer(), new SecretsResolver(ImmutableMap.of()), new ExchangeManagerConfig()), new CounterStat()); } } diff --git a/core/trino-main/src/test/java/io/trino/execution/TestSqlTaskManagerRaceWithCatalogPrune.java b/core/trino-main/src/test/java/io/trino/execution/TestSqlTaskManagerRaceWithCatalogPrune.java index 012c4798eb96..952582a5904d 100644 --- a/core/trino-main/src/test/java/io/trino/execution/TestSqlTaskManagerRaceWithCatalogPrune.java +++ b/core/trino-main/src/test/java/io/trino/execution/TestSqlTaskManagerRaceWithCatalogPrune.java @@ -36,6 +36,7 @@ import io.trino.connector.MockConnectorFactory; import io.trino.connector.TestingLocalCatalogPruneTask; import io.trino.connector.WorkerDynamicCatalogManager; +import io.trino.exchange.ExchangeManagerConfig; import io.trino.exchange.ExchangeManagerRegistry; import io.trino.execution.buffer.PipelinedOutputBuffers; import io.trino.execution.executor.RunningSplitInfo; @@ -270,7 +271,7 @@ private static SqlTaskManager getWorkerTaskManagerWithConnectorServiceProvider(C new NodeSpillConfig(), new TestingGcMonitor(), noopTracer(), - new ExchangeManagerRegistry(OpenTelemetry.noop(), Tracing.noopTracer(), new SecretsResolver(ImmutableMap.of())), + new ExchangeManagerRegistry(OpenTelemetry.noop(), Tracing.noopTracer(), new SecretsResolver(ImmutableMap.of()), new ExchangeManagerConfig()), ignore -> true); } diff --git a/core/trino-main/src/test/java/io/trino/execution/TestTaskExecutorStuckSplits.java b/core/trino-main/src/test/java/io/trino/execution/TestTaskExecutorStuckSplits.java index 585bb9c33fdd..65d3e752926d 100644 --- a/core/trino-main/src/test/java/io/trino/execution/TestTaskExecutorStuckSplits.java +++ b/core/trino-main/src/test/java/io/trino/execution/TestTaskExecutorStuckSplits.java @@ -30,6 +30,7 @@ import io.trino.connector.CatalogHandle; import io.trino.connector.ConnectorServices; import io.trino.connector.ConnectorServicesProvider; +import io.trino.exchange.ExchangeManagerConfig; import io.trino.exchange.ExchangeManagerRegistry; import io.trino.execution.executor.TaskExecutor; import io.trino.execution.executor.TaskHandle; @@ -138,7 +139,7 @@ private SqlTaskManager createSqlTaskManager( new NodeSpillConfig(), new TestingGcMonitor(), noopTracer(), - new ExchangeManagerRegistry(OpenTelemetry.noop(), Tracing.noopTracer(), new SecretsResolver(ImmutableMap.of())), + new ExchangeManagerRegistry(OpenTelemetry.noop(), Tracing.noopTracer(), new SecretsResolver(ImmutableMap.of()), new ExchangeManagerConfig()), stuckSplitStackTracePredicate); } diff --git a/core/trino-main/src/test/java/io/trino/execution/scheduler/faulttolerant/TestBinPackingNodeAllocator.java b/core/trino-main/src/test/java/io/trino/execution/scheduler/faulttolerant/TestBinPackingNodeAllocator.java index 079e1e169bbe..f5de85da004b 100644 --- a/core/trino-main/src/test/java/io/trino/execution/scheduler/faulttolerant/TestBinPackingNodeAllocator.java +++ b/core/trino-main/src/test/java/io/trino/execution/scheduler/faulttolerant/TestBinPackingNodeAllocator.java @@ -983,7 +983,7 @@ public void testChangeMemoryRequirement() } @Test - @Timeout(value = TEST_TIMEOUT + 3000, unit = MILLISECONDS) + @Timeout(value = TEST_TIMEOUT + 5000, unit = MILLISECONDS) public void testFailover() { TestingInternalNodeManager nodeManager = TestingInternalNodeManager.createDefault(NODE_1, NODE_2); diff --git a/core/trino-main/src/test/java/io/trino/failuredetector/TestHeartbeatFailureDetector.java b/core/trino-main/src/test/java/io/trino/failuredetector/TestHeartbeatFailureDetector.java index dc98f6878164..8126fc062ac1 100644 --- a/core/trino-main/src/test/java/io/trino/failuredetector/TestHeartbeatFailureDetector.java +++ b/core/trino-main/src/test/java/io/trino/failuredetector/TestHeartbeatFailureDetector.java @@ -17,6 +17,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.inject.Injector; import com.google.inject.Key; +import com.google.inject.Scopes; import io.airlift.bootstrap.Bootstrap; import io.airlift.discovery.client.ServiceSelector; import io.airlift.discovery.client.testing.TestingDiscoveryModule; @@ -29,6 +30,7 @@ import io.trino.execution.QueryManagerConfig; import io.trino.failuredetector.HeartbeatFailureDetector.Stats; import io.trino.server.InternalCommunicationConfig; +import io.trino.server.StartupStatus; import io.trino.server.security.SecurityConfig; import jakarta.ws.rs.GET; import jakarta.ws.rs.Path; @@ -66,6 +68,7 @@ public void testExcludesCurrentNode() // Jersey with jetty 9 requires at least one resource // todo add a dummy resource to airlift jaxrs in this case jaxrsBinder(binder).bind(FooResource.class); + binder.bind(StartupStatus.class).in(Scopes.SINGLETON); }); Injector injector = app @@ -73,6 +76,9 @@ public void testExcludesCurrentNode() .quiet() .initialize(); + StartupStatus startupStatus = injector.getInstance(StartupStatus.class); + startupStatus.startupComplete(); + ServiceSelector selector = injector.getInstance(Key.get(ServiceSelector.class, serviceType("trino"))); assertThat(selector.selectAllServices()).hasSize(1); diff --git a/core/trino-main/src/test/java/io/trino/node/TestAnnounceNodeInventory.java b/core/trino-main/src/test/java/io/trino/node/TestAnnounceNodeInventory.java index 042c4e3fd41e..7cd7aa7cf3ac 100644 --- a/core/trino-main/src/test/java/io/trino/node/TestAnnounceNodeInventory.java +++ b/core/trino-main/src/test/java/io/trino/node/TestAnnounceNodeInventory.java @@ -16,6 +16,7 @@ import com.google.common.collect.ImmutableList; import com.google.inject.Injector; import com.google.inject.Module; +import com.google.inject.Scopes; import io.airlift.bootstrap.Bootstrap; import io.airlift.bootstrap.LifeCycleManager; import io.airlift.http.client.HttpClient; @@ -26,6 +27,7 @@ import io.airlift.json.JsonModule; import io.airlift.node.testing.TestingNodeModule; import io.trino.client.NodeVersion; +import io.trino.server.StartupStatus; import io.trino.server.security.SecurityConfig; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Test; @@ -172,6 +174,7 @@ private static AnnouncementServer createAnnounceServer() URI.create("https://example.com:1234"), new NodeVersion("test-version"), true)); + binder.bind(StartupStatus.class).in(Scopes.SINGLETON); }); Injector injector = new Bootstrap(modules.build()) .doNotInitializeLogging() @@ -180,6 +183,8 @@ private static AnnouncementServer createAnnounceServer() AnnounceNodeInventory nodeInventory = injector.getInstance(AnnounceNodeInventory.class); URI serverUri = injector.getInstance(HttpServerInfo.class).getHttpUri(); + StartupStatus startupStatus = injector.getInstance(StartupStatus.class); + startupStatus.startupComplete(); return new AnnouncementServer( nodeInventory, serverUri, diff --git a/core/trino-main/src/test/java/io/trino/operator/TestDeduplicatingDirectExchangeBuffer.java b/core/trino-main/src/test/java/io/trino/operator/TestDeduplicatingDirectExchangeBuffer.java index f0fa2746fa61..f18e8edb9b40 100644 --- a/core/trino-main/src/test/java/io/trino/operator/TestDeduplicatingDirectExchangeBuffer.java +++ b/core/trino-main/src/test/java/io/trino/operator/TestDeduplicatingDirectExchangeBuffer.java @@ -26,6 +26,7 @@ import io.airlift.units.DataSize; import io.opentelemetry.api.OpenTelemetry; import io.opentelemetry.api.trace.Span; +import io.trino.exchange.ExchangeManagerConfig; import io.trino.exchange.ExchangeManagerRegistry; import io.trino.execution.StageId; import io.trino.execution.TaskId; @@ -68,7 +69,7 @@ public class TestDeduplicatingDirectExchangeBuffer @BeforeAll public void beforeClass() { - exchangeManagerRegistry = new ExchangeManagerRegistry(OpenTelemetry.noop(), Tracing.noopTracer(), new SecretsResolver(ImmutableMap.of())); + exchangeManagerRegistry = new ExchangeManagerRegistry(OpenTelemetry.noop(), Tracing.noopTracer(), new SecretsResolver(ImmutableMap.of()), new ExchangeManagerConfig()); exchangeManagerRegistry.addExchangeManagerFactory(new FileSystemExchangeManagerFactory()); exchangeManagerRegistry.loadExchangeManager("filesystem", ImmutableMap.of( "exchange.base-directories", System.getProperty("java.io.tmpdir") + "/trino-local-file-system-exchange-manager")); @@ -449,7 +450,7 @@ public void testExchangeManagerNotConfigured() directExecutor(), DataSize.of(100, BYTE), RetryPolicy.QUERY, - new ExchangeManagerRegistry(OpenTelemetry.noop(), Tracing.noopTracer(), new SecretsResolver(ImmutableMap.of())), + new ExchangeManagerRegistry(OpenTelemetry.noop(), Tracing.noopTracer(), new SecretsResolver(ImmutableMap.of()), new ExchangeManagerConfig()), new QueryId("query"), Span.getInvalid(), createRandomExchangeId())) { @@ -473,7 +474,7 @@ public void testExchangeManagerNotConfigured() directExecutor(), DataSize.of(100, BYTE), RetryPolicy.QUERY, - new ExchangeManagerRegistry(OpenTelemetry.noop(), Tracing.noopTracer(), new SecretsResolver(ImmutableMap.of())), + new ExchangeManagerRegistry(OpenTelemetry.noop(), Tracing.noopTracer(), new SecretsResolver(ImmutableMap.of()), new ExchangeManagerConfig()), new QueryId("query"), Span.getInvalid(), createRandomExchangeId())) { diff --git a/core/trino-main/src/test/java/io/trino/operator/TestDirectExchangeClient.java b/core/trino-main/src/test/java/io/trino/operator/TestDirectExchangeClient.java index bd818f42136c..902154b74010 100644 --- a/core/trino-main/src/test/java/io/trino/operator/TestDirectExchangeClient.java +++ b/core/trino-main/src/test/java/io/trino/operator/TestDirectExchangeClient.java @@ -34,6 +34,7 @@ import io.opentelemetry.api.trace.Span; import io.trino.FeaturesConfig.DataIntegrityVerification; import io.trino.block.BlockAssertions; +import io.trino.exchange.ExchangeManagerConfig; import io.trino.exchange.ExchangeManagerRegistry; import io.trino.execution.StageId; import io.trino.execution.TaskId; @@ -492,7 +493,7 @@ public void testDeduplicationTaskFailure() scheduler, DataSize.of(1, Unit.MEGABYTE), RetryPolicy.QUERY, - new ExchangeManagerRegistry(OpenTelemetry.noop(), Tracing.noopTracer(), new SecretsResolver(ImmutableMap.of())), + new ExchangeManagerRegistry(OpenTelemetry.noop(), Tracing.noopTracer(), new SecretsResolver(ImmutableMap.of()), new ExchangeManagerConfig()), new QueryId("query"), Span.getInvalid(), createRandomExchangeId()); @@ -553,7 +554,7 @@ public void testDeduplication() scheduler, DataSize.of(1, Unit.KILOBYTE), RetryPolicy.QUERY, - new ExchangeManagerRegistry(OpenTelemetry.noop(), Tracing.noopTracer(), new SecretsResolver(ImmutableMap.of())), + new ExchangeManagerRegistry(OpenTelemetry.noop(), Tracing.noopTracer(), new SecretsResolver(ImmutableMap.of()), new ExchangeManagerConfig()), new QueryId("query"), Span.getInvalid(), createRandomExchangeId()), diff --git a/core/trino-main/src/test/java/io/trino/operator/TestExchangeOperator.java b/core/trino-main/src/test/java/io/trino/operator/TestExchangeOperator.java index f0b1f1d24796..b875542800b7 100644 --- a/core/trino-main/src/test/java/io/trino/operator/TestExchangeOperator.java +++ b/core/trino-main/src/test/java/io/trino/operator/TestExchangeOperator.java @@ -27,6 +27,7 @@ import io.opentelemetry.api.OpenTelemetry; import io.trino.FeaturesConfig.DataIntegrityVerification; import io.trino.exchange.DirectExchangeInput; +import io.trino.exchange.ExchangeManagerConfig; import io.trino.exchange.ExchangeManagerRegistry; import io.trino.execution.StageId; import io.trino.execution.TaskId; @@ -270,7 +271,7 @@ private SourceOperator createExchangeOperator() directExchangeClientSupplier, SERDE_FACTORY, RetryPolicy.NONE, - new ExchangeManagerRegistry(OpenTelemetry.noop(), Tracing.noopTracer(), new SecretsResolver(ImmutableMap.of()))); + new ExchangeManagerRegistry(OpenTelemetry.noop(), Tracing.noopTracer(), new SecretsResolver(ImmutableMap.of()), new ExchangeManagerConfig())); DriverContext driverContext = createTaskContext(scheduler, scheduledExecutor, TEST_SESSION) .addPipelineContext(0, true, true, false) diff --git a/core/trino-main/src/test/java/io/trino/operator/TestMergeOperator.java b/core/trino-main/src/test/java/io/trino/operator/TestMergeOperator.java index 5a5d8a922ea2..401caa0422f7 100644 --- a/core/trino-main/src/test/java/io/trino/operator/TestMergeOperator.java +++ b/core/trino-main/src/test/java/io/trino/operator/TestMergeOperator.java @@ -27,6 +27,7 @@ import io.opentelemetry.api.OpenTelemetry; import io.trino.FeaturesConfig; import io.trino.exchange.DirectExchangeInput; +import io.trino.exchange.ExchangeManagerConfig; import io.trino.exchange.ExchangeManagerRegistry; import io.trino.execution.StageId; import io.trino.execution.TaskId; @@ -101,7 +102,7 @@ public void setUp() httpClient, new HttpClientConfig(), executor, - new ExchangeManagerRegistry(OpenTelemetry.noop(), Tracing.noopTracer(), new SecretsResolver(ImmutableMap.of()))); + new ExchangeManagerRegistry(OpenTelemetry.noop(), Tracing.noopTracer(), new SecretsResolver(ImmutableMap.of()), new ExchangeManagerConfig())); orderingCompiler = new OrderingCompiler(new TypeOperators()); } diff --git a/core/trino-main/src/test/java/io/trino/operator/scalar/BenchmarkArraySort.java b/core/trino-main/src/test/java/io/trino/operator/scalar/BenchmarkArraySort.java index 6afaa846e059..a5a3ba0fddfb 100644 --- a/core/trino-main/src/test/java/io/trino/operator/scalar/BenchmarkArraySort.java +++ b/core/trino-main/src/test/java/io/trino/operator/scalar/BenchmarkArraySort.java @@ -14,28 +14,19 @@ package io.trino.operator.scalar; import com.google.common.collect.ImmutableList; -import com.google.common.primitives.Ints; import io.airlift.slice.Slices; -import io.trino.metadata.InternalFunctionBundle; import io.trino.metadata.TestingFunctionResolution; import io.trino.operator.DriverYieldSignal; import io.trino.operator.project.PageProcessor; import io.trino.spi.Page; import io.trino.spi.block.ArrayBlockBuilder; import io.trino.spi.block.Block; -import io.trino.spi.block.BlockBuilder; -import io.trino.spi.block.ValueBlock; import io.trino.spi.connector.SourcePage; -import io.trino.spi.function.ScalarFunction; -import io.trino.spi.function.SqlType; import io.trino.spi.type.ArrayType; import io.trino.spi.type.Type; -import io.trino.spi.type.TypeOperators; import io.trino.sql.gen.ExpressionCompiler; import io.trino.sql.relational.CallExpression; import io.trino.sql.relational.RowExpression; -import io.trino.type.BlockTypeOperators; -import io.trino.type.BlockTypeOperators.BlockPositionComparison; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Fork; @@ -43,7 +34,6 @@ import org.openjdk.jmh.annotations.Mode; import org.openjdk.jmh.annotations.OperationsPerInvocation; import org.openjdk.jmh.annotations.OutputTimeUnit; -import org.openjdk.jmh.annotations.Param; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; @@ -96,16 +86,13 @@ public List> arraySort(BenchmarkData data) @State(Scope.Thread) public static class BenchmarkData { - @Param({"array_sort", "old_array_sort"}) - private String name = "array_sort"; - private Page page; private PageProcessor pageProcessor; @Setup public void setup() { - TestingFunctionResolution functionResolution = new TestingFunctionResolution(InternalFunctionBundle.extractFunctions(BenchmarkArraySort.class)); + TestingFunctionResolution functionResolution = new TestingFunctionResolution(); ExpressionCompiler compiler = functionResolution.getExpressionCompiler(); ImmutableList.Builder projectionsBuilder = ImmutableList.builder(); Block[] blocks = new Block[TYPES.size()]; @@ -113,7 +100,7 @@ public void setup() Type elementType = TYPES.get(i); ArrayType arrayType = new ArrayType(elementType); projectionsBuilder.add(new CallExpression( - functionResolution.resolveFunction(name, fromTypes(arrayType)), + functionResolution.resolveFunction("array_sort", fromTypes(arrayType)), ImmutableList.of(field(i, arrayType)))); blocks[i] = createChannel(POSITIONS, ARRAY_SIZE, arrayType); } @@ -165,29 +152,4 @@ public static void main(String[] args) benchmark(BenchmarkArraySort.class).run(); } - - private static final BlockPositionComparison VARCHAR_COMPARISON = new BlockTypeOperators(new TypeOperators()).getComparisonUnorderedLastOperator(VARCHAR); - - @ScalarFunction - @SqlType("array(varchar)") - public static Block oldArraySort(@SqlType("array(varchar)") Block block) - { - List positions = Ints.asList(new int[block.getPositionCount()]); - for (int i = 0; i < block.getPositionCount(); i++) { - positions.set(i, i); - } - - positions.sort((p1, p2) -> { - //TODO: This could be quite slow, it should use parametric equals - return (int) VARCHAR_COMPARISON.compare(block, p1, block, p2); - }); - - BlockBuilder blockBuilder = VARCHAR.createBlockBuilder(null, block.getPositionCount()); - ValueBlock valueBlock = block.getUnderlyingValueBlock(); - for (int position : positions) { - blockBuilder.append(valueBlock, block.getUnderlyingValuePosition(position)); - } - - return blockBuilder.build(); - } } diff --git a/core/trino-main/src/test/java/io/trino/security/TestAccessControlConfig.java b/core/trino-main/src/test/java/io/trino/security/TestAccessControlConfig.java index 471e1a89d268..aba3b84f5774 100644 --- a/core/trino-main/src/test/java/io/trino/security/TestAccessControlConfig.java +++ b/core/trino-main/src/test/java/io/trino/security/TestAccessControlConfig.java @@ -17,6 +17,7 @@ import com.google.common.collect.ImmutableMap; import io.airlift.configuration.testing.ConfigAssertions; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; import java.io.IOException; import java.nio.file.Files; @@ -33,11 +34,11 @@ public void testDefaults() } @Test - public void testExplicitPropertyMappings() + public void testExplicitPropertyMappings(@TempDir Path tempDir) throws IOException { - Path config1 = Files.createTempFile(null, null); - Path config2 = Files.createTempFile(null, null); + Path config1 = Files.createTempFile(tempDir, null, null); + Path config2 = Files.createTempFile(tempDir, null, null); Map properties = ImmutableMap.of("access-control.config-files", config1.toString() + "," + config2.toString()); diff --git a/core/trino-main/src/test/java/io/trino/server/TestBasicQueryInfo.java b/core/trino-main/src/test/java/io/trino/server/TestBasicQueryInfo.java index 76367e1fc91e..11bda0191bfd 100644 --- a/core/trino-main/src/test/java/io/trino/server/TestBasicQueryInfo.java +++ b/core/trino-main/src/test/java/io/trino/server/TestBasicQueryInfo.java @@ -157,6 +157,7 @@ public void testConstructor() ImmutableList.of(), ImmutableSet.of(), Optional.empty(), + Optional.empty(), ImmutableList.of(), ImmutableList.of(), false, diff --git a/core/trino-main/src/test/java/io/trino/server/TestQueryStateInfo.java b/core/trino-main/src/test/java/io/trino/server/TestQueryStateInfo.java index 061293dd9ee9..a9b777fca000 100644 --- a/core/trino-main/src/test/java/io/trino/server/TestQueryStateInfo.java +++ b/core/trino-main/src/test/java/io/trino/server/TestQueryStateInfo.java @@ -202,6 +202,7 @@ private QueryInfo createQueryInfo(String queryId, QueryState state, String query ImmutableList.of(), ImmutableSet.of(), Optional.empty(), + Optional.empty(), ImmutableList.of(), ImmutableList.of(), false, diff --git a/core/trino-main/src/test/java/io/trino/sql/analyzer/TestAnalyzer.java b/core/trino-main/src/test/java/io/trino/sql/analyzer/TestAnalyzer.java index dedb7341b975..0a5767cde0eb 100644 --- a/core/trino-main/src/test/java/io/trino/sql/analyzer/TestAnalyzer.java +++ b/core/trino-main/src/test/java/io/trino/sql/analyzer/TestAnalyzer.java @@ -78,6 +78,8 @@ import io.trino.spi.connector.ConnectorTableMetadata; import io.trino.spi.connector.ConnectorTransactionHandle; import io.trino.spi.connector.SchemaTableName; +import io.trino.spi.eventlistener.ColumnDetail; +import io.trino.spi.eventlistener.ColumnLineageInfo; import io.trino.spi.security.Identity; import io.trino.spi.session.PropertyMetadata; import io.trino.spi.transaction.IsolationLevel; @@ -7432,6 +7434,369 @@ public void testDisallowAggregationFunctionInUnnest() .hasMessage("line 1:46: UNNEST cannot contain aggregations, window functions or grouping operations: [COUNT(t.a)]"); } + @Test + public void testSelectColumnsLineageInfo() + { + String sql = "SELECT a, b + 1 AS b1, 'C' as literal, a + b FROM (VALUES (1, 2)) t(a, b)"; + + Analysis analysis = analyze(sql); + + Optional> optionalLineageInfo = analysis.getSelectColumnsLineageInfo(); + assertThat(optionalLineageInfo).isPresent(); + List lineageInfo = optionalLineageInfo.get(); + assertThat(lineageInfo.size()).isEqualTo(4); + + // Check first column lineage + ColumnLineageInfo colA = lineageInfo.getFirst(); + assertThat(colA.name()).isEqualTo("a"); + assertThat(colA.sourceColumns()).isEmpty(); // 'a' is a direct value from the VALUES clause + + // Check second column lineage + ColumnLineageInfo colB1 = lineageInfo.get(1); + assertThat(colB1.name()).isEqualTo("b1"); + assertThat(colB1.sourceColumns()).isEmpty(); // 'b1' is derived from 'b + 1', which is a direct value from the VALUES clause + + // Check third column lineage + ColumnLineageInfo colLiteral = lineageInfo.get(2); + assertThat(colLiteral.name()).isEqualTo("literal"); + assertThat(colLiteral.sourceColumns()).isEmpty(); // 'literal' is a literal value + + // Check fourth column lineage + ColumnLineageInfo colAB = lineageInfo.get(3); + assertThat(colAB.name()).isEmpty(); // anonymous + assertThat(colAB.sourceColumns()).isEmpty(); // 'a + b' is derived from the values in the VALUES clause + } + + @Test + public void testSelectColumnsLineageInfoAggregateFunction() { + String sql = "SELECT SUM(a) FROM t1 WHERE b > 1"; + + Analysis analysis = analyze(sql); + + Optional> optionalLineageInfo = analysis.getSelectColumnsLineageInfo(); + assertThat(optionalLineageInfo).isPresent(); + List lineageInfo = optionalLineageInfo.get(); + assertThat(lineageInfo.size()).isEqualTo(1); + + // Check the column lineage + ColumnLineageInfo colCount = lineageInfo.getFirst(); + assertThat(colCount.name()).isEmpty(); // anonymous + assertThat(colCount.sourceColumns()).containsExactlyInAnyOrder( + new ColumnDetail("tpch", "s1", "t1", "a")); + } + + @Test + public void testSelectColumnsLineageInfoWithUnion() + { + String sql = "SELECT c AS unionized FROM t1 UNION SELECT b FROM t2 UNION SELECT a FROM t3"; + + Analysis analysis = analyze(sql); + + Optional> optionalLineageInfo = analysis.getSelectColumnsLineageInfo(); + assertThat(optionalLineageInfo).isPresent(); + List lineageInfo = optionalLineageInfo.get(); + assertThat(lineageInfo.size()).isEqualTo(1); + + // Check the column lineage + ColumnLineageInfo colA = lineageInfo.getFirst(); + assertThat(colA.name()).isEqualTo("unionized"); + assertThat(colA.sourceColumns()).containsExactlyInAnyOrder( + new ColumnDetail("tpch","s1","t1", "c"), + new ColumnDetail("tpch","s1","t2", "b"), + new ColumnDetail("tpch","s1","t3", "a")); + } + + @Test + public void testSelectColumnsLineageInfoWithClause() + { + String sql = "WITH cte AS (SELECT a FROM t1)\n" + "SELECT a FROM cte UNION SELECT b FROM t2"; + + Analysis analysis = analyze(sql); + + Optional> optionalLineageInfo = analysis.getSelectColumnsLineageInfo(); + assertThat(optionalLineageInfo).isPresent(); + List lineageInfo = optionalLineageInfo.get(); + assertThat(lineageInfo.size()).isEqualTo(1); + + // Check the column lineage + ColumnLineageInfo colA = lineageInfo.getFirst(); + assertThat(colA.name()).isEqualTo("a"); + // The source columns should include both 'a' from t1 and 'b' from t2 + assertThat(colA.sourceColumns()).containsExactlyInAnyOrder( + new ColumnDetail("tpch","s1","t1", "a"), + new ColumnDetail("tpch","s1","t2", "b")); + } + + @Test + public void testSelectColumnsLineageInfoWithSubquery() + { + String sql = "SELECT (SELECT max(a)+min(b) FROM t2) AS min_max FROM t1 UNION SELECT max(a) FROM t3"; + + Analysis analysis = analyze(sql); + + Optional> optionalLineageInfo = analysis.getSelectColumnsLineageInfo(); + assertThat(optionalLineageInfo).isPresent(); + List lineageInfo = optionalLineageInfo.get(); + assertThat(lineageInfo.size()).isEqualTo(1); + + // Check the column lineage + ColumnLineageInfo colA = lineageInfo.getFirst(); + assertThat(colA.name()).isEqualTo("min_max"); + // The source columns should include both 'a' and 'b' from t2 in the subquery and t3.a from the union + assertThat(colA.sourceColumns()).containsExactlyInAnyOrder( + new ColumnDetail("tpch","s1","t2", "a"), + new ColumnDetail("tpch","s1","t2", "b"), + new ColumnDetail("tpch","s1","t3", "a")); + } + + @Test + public void testSelectColumnsLineageInfoNestedSet() + { + String sql = "SELECT a FROM t1 UNION (SELECT b FROM t2 INTERSECT SELECT b FROM t3)"; + + Analysis analysis = analyze(sql); + + Optional> optionalLineageInfo = analysis.getSelectColumnsLineageInfo(); + assertThat(optionalLineageInfo).isPresent(); + List lineageInfo = optionalLineageInfo.get(); + assertThat(lineageInfo.size()).isEqualTo(1); + + // Check the column lineage + ColumnLineageInfo colA = lineageInfo.getFirst(); + assertThat(colA.name()).isEqualTo("a"); + // The source columns should include both 'a' from the subquery and 'b' from t2 + assertThat(colA.sourceColumns()).containsExactlyInAnyOrder( + new ColumnDetail("tpch","s1","t1", "a"), + new ColumnDetail("tpch","s1","t2", "b"), + new ColumnDetail("tpch","s1","t3", "b")); + } + + @Test + public void testSelectColumnsLineageInfoRecursive() + { + String sql = "WITH RECURSIVE a(x) AS (SELECT a FROM t1) SELECT * FROM a"; + + Analysis analysis = analyze(sql); + + Optional> optionalLineageInfo = analysis.getSelectColumnsLineageInfo(); + assertThat(optionalLineageInfo).isPresent(); + List lineageInfo = optionalLineageInfo.get(); + assertThat(lineageInfo.size()).isEqualTo(1); + + // Check the column lineage + ColumnLineageInfo colA = lineageInfo.getFirst(); + assertThat(colA.name()).isEqualTo("x"); + // The source column should include 'a' from t1 + assertThat(colA.sourceColumns()).containsExactlyInAnyOrder( + new ColumnDetail("tpch", "s1", "t1", "a")); + } + + @Test + public void testSelectColumnsLineageInfoRowFromJoin() + { + String sql = "SELECT ROW(t1.a, t2.b) AS row_field FROM t1 JOIN t2 ON t1.a = t2.a"; + + Analysis analysis = analyze(sql); + + Optional> optionalLineageInfo = analysis.getSelectColumnsLineageInfo(); + assertThat(optionalLineageInfo).isPresent(); + List lineageInfo = optionalLineageInfo.get(); + assertThat(lineageInfo.size()).isEqualTo(1); + + // Check the column lineage for the ROW field + ColumnLineageInfo colRow = lineageInfo.getFirst(); + assertThat(colRow.name()).isEqualTo("row_field"); + // The ROW constructor should track lineage from both tables in the JOIN + assertThat(colRow.sourceColumns()).containsExactlyInAnyOrder( + new ColumnDetail("tpch", "s1", "t1", "a"), + new ColumnDetail("tpch", "s1", "t2", "b")); + } + + @Test + public void testSelectColumnsLineageInfoWithExplain() + { + String sql = "EXPLAIN SELECT a FROM t1"; + + Analysis analysis = analyze(sql); + + Optional> optionalLineageInfo = analysis.getSelectColumnsLineageInfo(); + assertThat(optionalLineageInfo).isEmpty(); + } + + @Test + public void testSelectColumnsLineageInfoWithInsert() + { + String sql = "INSERT INTO t1 SELECT a, b, a, b FROM t2"; + + Analysis analysis = analyze(sql); + + Optional> optionalLineageInfo = analysis.getSelectColumnsLineageInfo(); + assertThat(optionalLineageInfo).isEmpty(); + } + + @Test + public void testSelectColumnsLineageInfoWithAlterTable() + { + String sql = "ALTER TABLE t1 ADD COLUMN c bigint"; + + Analysis analysis = analyze(sql); + + Optional> optionalLineageInfo = analysis.getSelectColumnsLineageInfo(); + assertThat(optionalLineageInfo).isEmpty(); + } + + @Test + public void testSelectColumnsLineageInfoWithSetColumnType() + { + String sql = "ALTER TABLE t1 ALTER COLUMN a SET DATA TYPE varchar"; + + Analysis analysis = analyze(sql); + + Optional> optionalLineageInfo = analysis.getSelectColumnsLineageInfo(); + assertThat(optionalLineageInfo).isEmpty(); + } + + @Test + public void testSelectColumnsLineageInfoWithSession() + { + String sql = "WITH SESSION a = 1 SELECT a FROM t1"; + + Analysis analysis = analyze(sql); + + Optional> optionalLineageInfo = analysis.getSelectColumnsLineageInfo(); + assertThat(optionalLineageInfo).isPresent(); + List lineageInfo = optionalLineageInfo.get(); + assertThat(lineageInfo).hasSize(1); + assertThat(lineageInfo.get(0).sourceColumns()).containsExactly( + new ColumnDetail("tpch", "s1", "t1", "a")); + } + + @Test + public void testSelectColumnsLineageInfoWithFunction() + { + String sql = "WITH FUNCTION my_abs(x bigint) RETURNS bigint RETURN abs(x) SELECT my_abs(a) FROM t1"; + + Analysis analysis = analyze(sql); + + Optional> optionalLineageInfo = analysis.getSelectColumnsLineageInfo(); + assertThat(optionalLineageInfo).isPresent(); + List lineageInfo = optionalLineageInfo.get(); + assertThat(lineageInfo).hasSize(1); + assertThat(lineageInfo.get(0).sourceColumns()).containsExactly( + new ColumnDetail("tpch", "s1", "t1", "a")); + } + + @Test + public void testSelectColumnsLineageInfoWithNamedQuery() + { + String sql = "WITH cte AS (SELECT a FROM t1) SELECT a FROM cte"; + + Analysis analysis = analyze(sql); + + Optional> optionalLineageInfo = analysis.getSelectColumnsLineageInfo(); + assertThat(optionalLineageInfo).isPresent(); + List lineageInfo = optionalLineageInfo.get(); + assertThat(lineageInfo).hasSize(1); + assertThat(lineageInfo.get(0).sourceColumns()).containsExactly( + new ColumnDetail("tpch", "s1", "t1", "a")); + } + + @Test + public void testSelectColumnsLineageInfoWithTable() + { + String sql = "TABLE t1"; + + Analysis analysis = analyze(sql); + + Optional> optionalLineageInfo = analysis.getSelectColumnsLineageInfo(); + assertThat(optionalLineageInfo).isPresent(); + List lineageInfo = optionalLineageInfo.get(); + assertThat(lineageInfo).hasSize(4); + assertThat(lineageInfo.get(0).sourceColumns()).containsExactly( + new ColumnDetail("tpch", "s1", "t1", "a")); + assertThat(lineageInfo.get(1).sourceColumns()).containsExactly( + new ColumnDetail("tpch", "s1", "t1", "b")); + assertThat(lineageInfo.get(2).sourceColumns()).containsExactly( + new ColumnDetail("tpch", "s1", "t1", "c")); + assertThat(lineageInfo.get(3).sourceColumns()).containsExactly( + new ColumnDetail("tpch", "s1", "t1", "d")); + } + + @Test + public void testSelectColumnsLineageInfoWithValues() + { + String sql = "VALUES (1, 2)"; + + Analysis analysis = analyze(sql); + + Optional> optionalLineageInfo = analysis.getSelectColumnsLineageInfo(); + assertThat(optionalLineageInfo).isPresent(); + List lineageInfo = optionalLineageInfo.get(); + assertThat(lineageInfo).hasSize(2); + // VALUES have no source columns + assertThat(lineageInfo.get(0).sourceColumns()).isEmpty(); + assertThat(lineageInfo.get(1).sourceColumns()).isEmpty(); + } + + @Test + public void testSelectColumnsLineageInfoWithParentheses() + { + String sql = "(SELECT a FROM t1)"; + + Analysis analysis = analyze(sql); + + Optional> optionalLineageInfo = analysis.getSelectColumnsLineageInfo(); + assertThat(optionalLineageInfo).isPresent(); + List lineageInfo = optionalLineageInfo.get(); + assertThat(lineageInfo).hasSize(1); + assertThat(lineageInfo.get(0).sourceColumns()).containsExactly( + new ColumnDetail("tpch", "s1", "t1", "a")); + } + + @Test + public void testSelectColumnsLineageInfoWithIntersect() + { + String sql = "SELECT a FROM t1 INTERSECT SELECT a FROM t2"; + + Analysis analysis = analyze(sql); + + Optional> optionalLineageInfo = analysis.getSelectColumnsLineageInfo(); + assertThat(optionalLineageInfo).isPresent(); + List lineageInfo = optionalLineageInfo.get(); + assertThat(lineageInfo).hasSize(1); + assertThat(lineageInfo.get(0).sourceColumns()).containsExactlyInAnyOrder( + new ColumnDetail("tpch", "s1", "t1", "a"), + new ColumnDetail("tpch", "s1", "t2", "a")); + } + + @Test + public void testSelectColumnsLineageInfoWithExplainAnalyze() + { + String sql = "EXPLAIN ANALYZE SELECT a FROM t1"; + + Analysis analysis = analyze(sql); + + Optional> optionalLineageInfo = analysis.getSelectColumnsLineageInfo(); + assertThat(optionalLineageInfo).isEmpty(); + } + + @Test + public void testSelectColumnsLineageInfoWithShow() + { + String sql = "SHOW SCHEMAS"; + + Analysis analysis = analyze(sql); + + Optional> optionalLineageInfo = analysis.getSelectColumnsLineageInfo(); + assertThat(optionalLineageInfo).isPresent(); + List lineageInfo = optionalLineageInfo.get(); + assertThat(lineageInfo).hasSize(1); + assertThat(lineageInfo.get(0).name()).isEqualTo("Schema"); + assertThat(lineageInfo.get(0).sourceColumns()).hasSize(1); + assertThat(lineageInfo.get(0).sourceColumns()).contains( + new ColumnDetail("tpch", "information_schema", "schemata", "schema_name")); + } + @BeforeAll public void setup() { diff --git a/core/trino-main/src/test/java/io/trino/sql/planner/TestLogicalPlanner.java b/core/trino-main/src/test/java/io/trino/sql/planner/TestLogicalPlanner.java index 870207ff9311..c09536c763f5 100644 --- a/core/trino-main/src/test/java/io/trino/sql/planner/TestLogicalPlanner.java +++ b/core/trino-main/src/test/java/io/trino/sql/planner/TestLogicalPlanner.java @@ -1319,6 +1319,22 @@ public void testCorrelatedDistinctGroupedAggregationRewriteToLeftOuterJoin() ImmutableMap.of("o_orderkey", "orderkey", "o_orderstatus", "orderstatus", "o_custkey", "custkey"))))))))))))))); } + @Test + public void testRemoveEmptyUnionBranch() + { + assertThat(countOfMatchingNodes( + plan(""" + SELECT * + FROM ( + SELECT n.name, CAST(null AS varchar) AS comment FROM nation n WHERE n.nationkey <= 3 + UNION ALL + SELECT r.name, r.comment FROM region r + ) + WHERE comment IN (SELECT r.comment FROM region r) + """), + ValuesNode.class::isInstance)).isEqualTo(0); + } + @Test public void testRemovesTrivialFilters() { diff --git a/core/trino-server-main/pom.xml b/core/trino-server-main/pom.xml index 6f33bfc568fa..6d9d3fa9a3e1 100644 --- a/core/trino-server-main/pom.xml +++ b/core/trino-server-main/pom.xml @@ -16,7 +16,7 @@ true - 8 + 11 diff --git a/core/trino-server-main/src/main/java/io/trino/server/TrinoServer.java b/core/trino-server-main/src/main/java/io/trino/server/TrinoServer.java index 58626c837c5b..6c13a4f25c2e 100644 --- a/core/trino-server-main/src/main/java/io/trino/server/TrinoServer.java +++ b/core/trino-server-main/src/main/java/io/trino/server/TrinoServer.java @@ -13,11 +13,7 @@ */ package io.trino.server; -import com.google.common.base.StandardSystemProperty; -import com.google.common.primitives.Ints; - import static com.google.common.base.MoreObjects.firstNonNull; -import static com.google.common.base.Strings.nullToEmpty; public final class TrinoServer { @@ -25,15 +21,13 @@ private TrinoServer() {} public static void main(String[] args) { - String javaVersion = nullToEmpty(StandardSystemProperty.JAVA_VERSION.value()); - String majorVersion = javaVersion.split("\\D", 2)[0]; - Integer major = Ints.tryParse(majorVersion); - if (major == null || major < 22) { + Runtime.Version javaVersion = Runtime.version(); + if (javaVersion.feature() < 22) { System.err.printf("ERROR: Trino requires Java 22+ (found %s)%n", javaVersion); System.exit(100); } - String version = TrinoServer.class.getPackage().getImplementationVersion(); - new Server().start(firstNonNull(version, "unknown")); + String trinoVersion = TrinoServer.class.getPackage().getImplementationVersion(); + new Server().start(firstNonNull(trinoVersion, "unknown")); } } diff --git a/core/trino-spi/pom.xml b/core/trino-spi/pom.xml index 09396d9b9930..93f84a83f4cc 100644 --- a/core/trino-spi/pom.xml +++ b/core/trino-spi/pom.xml @@ -216,40 +216,6 @@ - - true - java.method.noLongerDefault - method void io.trino.spi.connector.Connector::shutdown() - method void io.trino.spi.connector.Connector::shutdown() - Require connector to implement shutdown to prevent leaks - - - true - java.method.nowAbstract - method void io.trino.spi.connector.Connector::shutdown() - method void io.trino.spi.connector.Connector::shutdown() - Require connector to implement shutdown to prevent leaks - - - java.annotation.removed - method java.lang.String io.trino.spi.QueryId::toString() - method java.lang.String io.trino.spi.QueryId::toString() - @com.fasterxml.jackson.annotation.JsonValue - QueryId converted to a record - - - true - java.class.kindChanged - class io.trino.spi.QueryId - class io.trino.spi.QueryId - QueryId converted to a record - - - true - java.method.removed - method void io.trino.spi.resourcegroups.SelectionCriteria::<init>(boolean, java.lang.String, java.util.Set<java.lang.String>, java.util.Optional<java.lang.String>, java.util.Set<java.lang.String>, io.trino.spi.session.ResourceEstimates, java.util.Optional<java.lang.String>) - Remove a deprecated constructor - true java.method.varargOverloadsOnlyDifferInVarargParameter @@ -292,46 +258,6 @@ method io.opentelemetry.api.common.Value<java.util.List<io.opentelemetry.api.common.Value<?>>> io.opentelemetry.api.common.Value<T>::of(io.opentelemetry.api.common.Value<?>[]) Revapi now detects new API changes to vararg args - - true - java.method.visibilityIncreased - method boolean[] io.trino.spi.block.RowBlock::getRawRowIsNull() - method boolean[] io.trino.spi.block.RowBlock::getRawRowIsNull() - package - public - Allow direct access to isNull mask on RowBlock for performance critical sections - - - java.method.returnTypeChanged - method void io.trino.spi.connector.ConnectorMetadata::executeTableExecute(io.trino.spi.connector.ConnectorSession, io.trino.spi.connector.ConnectorTableExecuteHandle) - method java.util.Map<java.lang.String, java.lang.Long> io.trino.spi.connector.ConnectorMetadata::executeTableExecute(io.trino.spi.connector.ConnectorSession, io.trino.spi.connector.ConnectorTableExecuteHandle) - - - java.method.removed - method int io.trino.spi.PageSorter::decodePageIndex(long) - - - java.method.removed - method int io.trino.spi.PageSorter::decodePositionIndex(long) - - - java.method.returnTypeChanged - method long[] io.trino.spi.PageSorter::sort(java.util.List<io.trino.spi.type.Type>, java.util.List<io.trino.spi.Page>, java.util.List<java.lang.Integer>, java.util.List<io.trino.spi.connector.SortOrder>, int) - method java.util.Iterator<io.trino.spi.Page> io.trino.spi.PageSorter::sort(java.util.List<io.trino.spi.type.Type>, java.util.List<io.trino.spi.Page>, java.util.List<java.lang.Integer>, java.util.List<io.trino.spi.connector.SortOrder>, int) - - - java.method.removed - method double io.trino.spi.metrics.Distribution<T>::getPercentile(double) - - - java.method.addedToInterface - method double[] io.trino.spi.metrics.Distribution<T>::getPercentiles(double[]) - - - java.method.numberOfParametersChanged - method void io.trino.spi.eventlistener.QueryStatistics::<init>(java.time.Duration, java.time.Duration, java.time.Duration, java.time.Duration, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, long, long, long, long, long, long, long, long, long, long, long, long, long, long, double, double, java.util.List<io.trino.spi.eventlistener.StageGcStatistics>, int, boolean, java.util.List<io.trino.spi.eventlistener.StageCpuDistribution>, java.util.List<io.trino.spi.eventlistener.StageOutputBufferUtilization>, java.util.List<io.trino.spi.eventlistener.StageOutputBufferMetrics>, java.util.List<io.trino.spi.eventlistener.StageTaskStatistics>, java.util.List<io.trino.spi.eventlistener.DynamicFilterDomainStatistics>, java.util.function.Supplier<java.util.List<java.lang.String>>, java.util.List<io.trino.spi.eventlistener.QueryPlanOptimizerStatistics>, java.util.Optional<java.lang.String>) - method void io.trino.spi.eventlistener.QueryStatistics::<init>(java.time.Duration, java.time.Duration, java.time.Duration, java.time.Duration, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, long, long, long, long, long, long, long, long, long, long, long, long, long, long, double, double, java.util.List<io.trino.spi.eventlistener.StageGcStatistics>, int, boolean, java.util.List<io.trino.spi.eventlistener.StageCpuDistribution>, java.util.List<io.trino.spi.eventlistener.StageOutputBufferUtilization>, java.util.List<io.trino.spi.eventlistener.StageOutputBufferMetrics>, java.util.List<io.trino.spi.eventlistener.StageTaskStatistics>, java.util.List<io.trino.spi.eventlistener.DynamicFilterDomainStatistics>, java.util.function.Supplier<java.util.List<java.lang.String>>, java.util.List<io.trino.spi.eventlistener.QueryPlanOptimizerStatistics>, java.util.Map<java.lang.String, io.trino.spi.metrics.Metrics>, java.util.Optional<java.lang.String>) - java.method.numberOfParametersChanged method void io.trino.spi.eventlistener.QueryStatistics::<init>(java.time.Duration, java.time.Duration, java.time.Duration, java.time.Duration, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, java.util.Optional<java.time.Duration>, long, long, long, long, long, long, long, long, long, long, long, long, long, long, double, double, java.util.List<io.trino.spi.eventlistener.StageGcStatistics>, int, boolean, java.util.List<io.trino.spi.eventlistener.StageCpuDistribution>, java.util.List<io.trino.spi.eventlistener.StageOutputBufferUtilization>, java.util.List<io.trino.spi.eventlistener.StageOutputBufferMetrics>, java.util.List<io.trino.spi.eventlistener.StageTaskStatistics>, java.util.List<io.trino.spi.eventlistener.DynamicFilterDomainStatistics>, java.util.function.Supplier<java.util.List<java.lang.String>>, java.util.List<io.trino.spi.eventlistener.QueryPlanOptimizerStatistics>, java.util.Map<java.lang.String, io.trino.spi.metrics.Metrics>, java.util.Optional<java.lang.String>) diff --git a/core/trino-spi/src/main/java/io/trino/spi/block/ArrayBlockBuilder.java b/core/trino-spi/src/main/java/io/trino/spi/block/ArrayBlockBuilder.java index 53879c21bf27..38e0c2bb7039 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/block/ArrayBlockBuilder.java +++ b/core/trino-spi/src/main/java/io/trino/spi/block/ArrayBlockBuilder.java @@ -119,6 +119,45 @@ public void buildEntry(ArrayValueBuilder builder) currentEntryOpened = false; } + public ArrayEntryBuilder buildEntry() + { + return new ArrayEntryBuilderImplementation(); + } + + private class ArrayEntryBuilderImplementation + implements ArrayEntryBuilder + { + private boolean entryBuilt; + + public ArrayEntryBuilderImplementation() + { + if (currentEntryOpened) { + throw new IllegalStateException("Expected current entry to be closed but was opened"); + } + currentEntryOpened = true; + } + + @Override + public BlockBuilder getElementBuilder() + { + if (entryBuilt || !currentEntryOpened) { + throw new IllegalStateException("Entry has already been built"); + } + return values; + } + + @Override + public void build() + { + if (entryBuilt || !currentEntryOpened) { + throw new IllegalStateException("Entry has already been built"); + } + entryBuilt = true; + entryAdded(false); + currentEntryOpened = false; + } + } + @Override public void append(ValueBlock block, int position) { diff --git a/core/trino-spi/src/main/java/io/trino/spi/block/ArrayEntryBuilder.java b/core/trino-spi/src/main/java/io/trino/spi/block/ArrayEntryBuilder.java new file mode 100644 index 000000000000..c37adce7f855 --- /dev/null +++ b/core/trino-spi/src/main/java/io/trino/spi/block/ArrayEntryBuilder.java @@ -0,0 +1,29 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.spi.block; + +public interface ArrayEntryBuilder +{ + /** + * Get the element BlockBuilder. + * The block builder must not be retained or used after build() is called. + */ + BlockBuilder getElementBuilder(); + + /** + * Finalize the entry the elements have been appended to the element builder. + * This method MUST be called exactly once. + */ + void build(); +} diff --git a/core/trino-spi/src/main/java/io/trino/spi/block/RowBlockBuilder.java b/core/trino-spi/src/main/java/io/trino/spi/block/RowBlockBuilder.java index 003e95f069d6..35ecf22e8e1e 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/block/RowBlockBuilder.java +++ b/core/trino-spi/src/main/java/io/trino/spi/block/RowBlockBuilder.java @@ -113,6 +113,45 @@ public void buildEntry(RowValueBuilder builder) currentEntryOpened = false; } + public RowEntryBuilder buildEntry() + { + return new RowEntryBuilderImplementation(); + } + + private class RowEntryBuilderImplementation + implements RowEntryBuilder + { + private boolean entryBuilt; + + public RowEntryBuilderImplementation() + { + if (currentEntryOpened) { + throw new IllegalStateException("Expected current entry to be closed but was opened"); + } + currentEntryOpened = true; + } + + @Override + public BlockBuilder getFieldBuilder(int fieldId) + { + if (entryBuilt || !currentEntryOpened) { + throw new IllegalStateException("Entry has already been built"); + } + return fieldBlockBuilders[fieldId]; + } + + @Override + public void build() + { + if (entryBuilt || !currentEntryOpened) { + throw new IllegalStateException("Entry has already been built"); + } + entryBuilt = true; + entryAdded(false); + currentEntryOpened = false; + } + } + @Override public void append(ValueBlock block, int position) { diff --git a/core/trino-spi/src/main/java/io/trino/spi/block/RowEntryBuilder.java b/core/trino-spi/src/main/java/io/trino/spi/block/RowEntryBuilder.java new file mode 100644 index 000000000000..db80682498bf --- /dev/null +++ b/core/trino-spi/src/main/java/io/trino/spi/block/RowEntryBuilder.java @@ -0,0 +1,30 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.spi.block; + +public interface RowEntryBuilder +{ + /** + * Get the BlockBuilder for a specific field in the row. + * Only a single value should be appended to the returned BlockBuilder before calling build(). + * The block builder must not be retained or used after build() is called. + */ + BlockBuilder getFieldBuilder(int fieldId); + + /** + * Finalize the entry after ALL field values have been appended to the field builders. + * This method MUST be called exactly once after all field builders have been used. + */ + void build(); +} diff --git a/core/trino-spi/src/main/java/io/trino/spi/eventlistener/ColumnLineageInfo.java b/core/trino-spi/src/main/java/io/trino/spi/eventlistener/ColumnLineageInfo.java new file mode 100644 index 000000000000..6d08268317a6 --- /dev/null +++ b/core/trino-spi/src/main/java/io/trino/spi/eventlistener/ColumnLineageInfo.java @@ -0,0 +1,39 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.trino.spi.eventlistener; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; + +import java.util.Set; + +import static java.util.Objects.requireNonNull; + +/** + * This record is JSON serializable for storing column lineage information for select queries. + */ +public record ColumnLineageInfo( + @JsonProperty String name, + @JsonProperty Set sourceColumns) +{ + @JsonCreator + public ColumnLineageInfo(String name, Set sourceColumns) + { + requireNonNull(name, "name is null"); + requireNonNull(sourceColumns, "sourceColumns is null"); + this.name = name; + this.sourceColumns = Set.copyOf(sourceColumns); + } +} diff --git a/core/trino-spi/src/main/java/io/trino/spi/eventlistener/QueryCompletedEvent.java b/core/trino-spi/src/main/java/io/trino/spi/eventlistener/QueryCompletedEvent.java index 0dbee9098b77..65b924cbba52 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/eventlistener/QueryCompletedEvent.java +++ b/core/trino-spi/src/main/java/io/trino/spi/eventlistener/QueryCompletedEvent.java @@ -34,6 +34,7 @@ public class QueryCompletedEvent private final QueryStatistics statistics; private final QueryContext context; private final QueryIOMetadata ioMetadata; + private final Optional> selectColumnsLineageInfo; private final Optional failureInfo; private final List warnings; @@ -48,6 +49,7 @@ public QueryCompletedEvent( QueryStatistics statistics, QueryContext context, QueryIOMetadata ioMetadata, + Optional> selectColumnsLineageInfo, Optional failureInfo, List warnings, Instant createTime, @@ -58,6 +60,7 @@ public QueryCompletedEvent( this.statistics = requireNonNull(statistics, "statistics is null"); this.context = requireNonNull(context, "context is null"); this.ioMetadata = requireNonNull(ioMetadata, "ioMetadata is null"); + this.selectColumnsLineageInfo = requireNonNull(selectColumnsLineageInfo, "selectColumnsLineageInfo is null"); this.failureInfo = requireNonNull(failureInfo, "failureInfo is null"); this.warnings = requireNonNull(warnings, "warnings is null"); this.createTime = requireNonNull(createTime, "createTime is null"); @@ -65,6 +68,16 @@ public QueryCompletedEvent( this.endTime = requireNonNull(endTime, "endTime is null"); } + /** + * returns column lineage information for select queries if available. + * the list is sorted by column index in the select clause. + */ + @JsonProperty + public Optional> getSelectColumnsLineageInfo() + { + return selectColumnsLineageInfo; + } + @JsonProperty public QueryMetadata getMetadata() { diff --git a/core/trino-spi/src/main/java/io/trino/spi/expression/StandardFunctions.java b/core/trino-spi/src/main/java/io/trino/spi/expression/StandardFunctions.java index cf52f4288b42..86ec5935f4ba 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/expression/StandardFunctions.java +++ b/core/trino-spi/src/main/java/io/trino/spi/expression/StandardFunctions.java @@ -52,6 +52,9 @@ private StandardFunctions() {} public static final FunctionName LESS_THAN_OR_EQUAL_OPERATOR_FUNCTION_NAME = new FunctionName("$less_than_or_equal"); public static final FunctionName GREATER_THAN_OPERATOR_FUNCTION_NAME = new FunctionName("$greater_than"); public static final FunctionName GREATER_THAN_OR_EQUAL_OPERATOR_FUNCTION_NAME = new FunctionName("$greater_than_or_equal"); + /** + * $identical function is equivalent to the SQL operator "IS NOT DISTINCT FROM". + */ public static final FunctionName IDENTICAL_OPERATOR_FUNCTION_NAME = new FunctionName("$identical"); /** diff --git a/core/trino-spi/src/main/java/io/trino/spi/type/AbstractIntType.java b/core/trino-spi/src/main/java/io/trino/spi/type/AbstractIntType.java index b17f6e3fd96e..5274086b47ac 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/type/AbstractIntType.java +++ b/core/trino-spi/src/main/java/io/trino/spi/type/AbstractIntType.java @@ -165,10 +165,10 @@ private static long readFlat( @ScalarOperator(READ_VALUE) private static void writeFlat( long value, - byte[] fixedSizeSlice, - int fixedSizeOffset, - byte[] unusedVariableSizeSlice, - int unusedVariableSizeOffset) + @FlatFixed byte[] fixedSizeSlice, + @FlatFixedOffset int fixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset) { INT_HANDLE.set(fixedSizeSlice, fixedSizeOffset, (int) value); } diff --git a/core/trino-spi/src/main/java/io/trino/spi/type/AbstractLongType.java b/core/trino-spi/src/main/java/io/trino/spi/type/AbstractLongType.java index fc82b2081ee1..dd2ae4ce569b 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/type/AbstractLongType.java +++ b/core/trino-spi/src/main/java/io/trino/spi/type/AbstractLongType.java @@ -143,10 +143,10 @@ private static long readFlat( @ScalarOperator(READ_VALUE) private static void writeFlat( long value, - byte[] fixedSizeSlice, - int fixedSizeOffset, - byte[] unusedVariableSizeSlice, - int unusedVariableSizeOffset) + @FlatFixed byte[] fixedSizeSlice, + @FlatFixedOffset int fixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset) { LONG_HANDLE.set(fixedSizeSlice, fixedSizeOffset, value); } diff --git a/core/trino-spi/src/main/java/io/trino/spi/type/AbstractVariableWidthType.java b/core/trino-spi/src/main/java/io/trino/spi/type/AbstractVariableWidthType.java index 095fbec38bdc..67c20d0dda50 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/type/AbstractVariableWidthType.java +++ b/core/trino-spi/src/main/java/io/trino/spi/type/AbstractVariableWidthType.java @@ -201,10 +201,10 @@ private static void readFlatToBlock( @ScalarOperator(READ_VALUE) private static void writeFlatFromStack( Slice value, - byte[] fixedSizeSlice, - int fixedSizeOffset, - byte[] variableSizeSlice, - int variableSizeOffset) + @FlatFixed byte[] fixedSizeSlice, + @FlatFixedOffset int fixedSizeOffset, + @FlatVariableWidth byte[] variableSizeSlice, + @FlatVariableOffset int variableSizeOffset) { int length = value.length(); writeFlatVariableLength(length, fixedSizeSlice, fixedSizeOffset); @@ -225,10 +225,10 @@ private static void writeFlatFromStack( private static void writeFlatFromBlock( @BlockPosition VariableWidthBlock block, @BlockIndex int position, - byte[] fixedSizeSlice, - int fixedSizeOffset, - byte[] variableSizeSlice, - int variableSizeOffset) + @FlatFixed byte[] fixedSizeSlice, + @FlatFixedOffset int fixedSizeOffset, + @FlatVariableWidth byte[] variableSizeSlice, + @FlatVariableOffset int variableSizeOffset) { Slice rawSlice = block.getRawSlice(); int rawSliceOffset = block.getRawSliceOffset(position); diff --git a/core/trino-spi/src/main/java/io/trino/spi/type/BooleanType.java b/core/trino-spi/src/main/java/io/trino/spi/type/BooleanType.java index 0fb118304ce6..5ab89395b56b 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/type/BooleanType.java +++ b/core/trino-spi/src/main/java/io/trino/spi/type/BooleanType.java @@ -176,10 +176,10 @@ private static boolean readFlat( @ScalarOperator(READ_VALUE) private static void writeFlat( boolean value, - byte[] fixedSizeSlice, - int fixedSizeOffset, - byte[] unusedVariableSizeSlice, - int unusedVariableSizeOffset) + @FlatFixed byte[] fixedSizeSlice, + @FlatFixedOffset int fixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset) { fixedSizeSlice[fixedSizeOffset] = (byte) (value ? 1 : 0); } diff --git a/core/trino-spi/src/main/java/io/trino/spi/type/DoubleType.java b/core/trino-spi/src/main/java/io/trino/spi/type/DoubleType.java index af4dbc34f425..a53d7ad9e9d7 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/type/DoubleType.java +++ b/core/trino-spi/src/main/java/io/trino/spi/type/DoubleType.java @@ -174,10 +174,10 @@ private static double readFlat( @ScalarOperator(READ_VALUE) private static void writeFlat( double value, - byte[] fixedSizeSlice, - int fixedSizeOffset, - byte[] unusedVariableSizeSlice, - int unusedVariableSizeOffset) + @FlatFixed byte[] fixedSizeSlice, + @FlatFixedOffset int fixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset) { DOUBLE_HANDLE.set(fixedSizeSlice, fixedSizeOffset, value); } diff --git a/core/trino-spi/src/main/java/io/trino/spi/type/LongDecimalType.java b/core/trino-spi/src/main/java/io/trino/spi/type/LongDecimalType.java index a5c91a7cdb06..0150acfb9329 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/type/LongDecimalType.java +++ b/core/trino-spi/src/main/java/io/trino/spi/type/LongDecimalType.java @@ -152,10 +152,10 @@ private static void readFlatToBlock( @ScalarOperator(READ_VALUE) private static void writeFlat( Int128 value, - byte[] fixedSizeSlice, - int fixedSizeOffset, - byte[] unusedVariableSizeSlice, - int unusedVariableSizeOffset) + @FlatFixed byte[] fixedSizeSlice, + @FlatFixedOffset int fixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset) { LONG_HANDLE.set(fixedSizeSlice, fixedSizeOffset, value.getHigh()); LONG_HANDLE.set(fixedSizeSlice, fixedSizeOffset + SIZE_OF_LONG, value.getLow()); @@ -165,10 +165,10 @@ private static void writeFlat( private static void writeBlockToFlat( @BlockPosition Int128ArrayBlock block, @BlockIndex int position, - byte[] fixedSizeSlice, - int fixedSizeOffset, - byte[] unusedVariableSizeSlice, - int unusedVariableSizeOffset) + @FlatFixed byte[] fixedSizeSlice, + @FlatFixedOffset int fixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset) { LONG_HANDLE.set(fixedSizeSlice, fixedSizeOffset, block.getInt128High(position)); LONG_HANDLE.set(fixedSizeSlice, fixedSizeOffset + SIZE_OF_LONG, block.getInt128Low(position)); @@ -187,6 +187,19 @@ private static boolean equalOperator(@BlockPosition Int128ArrayBlock leftBlock, leftBlock.getInt128Low(leftPosition) == rightBlock.getInt128Low(rightPosition); } + @ScalarOperator(EQUAL) + private static boolean equalOperator( + @FlatFixed byte[] leftFixedSizeSlice, + @FlatFixedOffset int leftFixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset, + @BlockPosition Int128ArrayBlock rightBlock, + @BlockIndex int rightPosition) + { + return ((long) LONG_HANDLE.get(leftFixedSizeSlice, leftFixedSizeOffset)) == rightBlock.getInt128High(rightPosition) + && ((long) LONG_HANDLE.get(leftFixedSizeSlice, leftFixedSizeOffset + SIZE_OF_LONG)) == rightBlock.getInt128Low(rightPosition); + } + @ScalarOperator(XX_HASH_64) private static long xxHash64Operator(Int128 value) { @@ -199,6 +212,18 @@ private static long xxHash64Operator(@BlockPosition Int128ArrayBlock block, @Blo return xxHash64(block.getInt128High(position), block.getInt128Low(position)); } + @ScalarOperator(XX_HASH_64) + private static long xxHash64Operator( + @FlatFixed byte[] fixedSizeSlice, + @FlatFixedOffset int fixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset) + { + return xxHash64( + (long) LONG_HANDLE.get(fixedSizeSlice, fixedSizeOffset), + (long) LONG_HANDLE.get(fixedSizeSlice, fixedSizeOffset + SIZE_OF_LONG)); + } + private static long xxHash64(long high, long low) { return XxHash64.hash(high) ^ XxHash64.hash(low); diff --git a/core/trino-spi/src/main/java/io/trino/spi/type/LongTimeWithTimeZoneType.java b/core/trino-spi/src/main/java/io/trino/spi/type/LongTimeWithTimeZoneType.java index 95ff1097d40f..9b8ee9d23b0a 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/type/LongTimeWithTimeZoneType.java +++ b/core/trino-spi/src/main/java/io/trino/spi/type/LongTimeWithTimeZoneType.java @@ -170,10 +170,10 @@ private static void readFlatToBlock( @ScalarOperator(READ_VALUE) private static void writeFlat( LongTimeWithTimeZone value, - byte[] fixedSizeSlice, - int fixedSizeOffset, - byte[] unusedVariableSizeSlice, - int unusedVariableSizeOffset) + @FlatFixed byte[] fixedSizeSlice, + @FlatFixedOffset int fixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset) { LONG_HANDLE.set(fixedSizeSlice, fixedSizeOffset, value.getPicoseconds()); INT_HANDLE.set(fixedSizeSlice, fixedSizeOffset + SIZE_OF_LONG, value.getOffsetMinutes()); @@ -183,10 +183,10 @@ private static void writeFlat( private static void writeBlockFlat( @BlockPosition Fixed12Block block, @BlockIndex int position, - byte[] fixedSizeSlice, - int fixedSizeOffset, - byte[] unusedVariableSizeSlice, - int unusedVariableSizeOffset) + @FlatFixed byte[] fixedSizeSlice, + @FlatFixedOffset int fixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset) { LONG_HANDLE.set(fixedSizeSlice, fixedSizeOffset, getPicos(block, position)); INT_HANDLE.set(fixedSizeSlice, fixedSizeOffset + SIZE_OF_LONG, getOffsetMinutes(block, position)); @@ -212,6 +212,22 @@ private static boolean equalOperator(@BlockPosition Fixed12Block leftBlock, @Blo getOffsetMinutes(rightBlock, rightPosition)); } + @ScalarOperator(EQUAL) + private static boolean equalOperator( + @FlatFixed byte[] fixedSizeSlice, + @FlatFixedOffset int fixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset, + @BlockPosition Fixed12Block rightBlock, + @BlockIndex int rightPosition) + { + return equal( + (long) LONG_HANDLE.get(fixedSizeSlice, fixedSizeOffset), + (int) INT_HANDLE.get(fixedSizeSlice, fixedSizeOffset + SIZE_OF_LONG), + getPicos(rightBlock, rightPosition), + getOffsetMinutes(rightBlock, rightPosition)); + } + private static boolean equal(long leftPicos, int leftOffsetMinutes, long rightPicos, int rightOffsetMinutes) { return normalizePicos(leftPicos, leftOffsetMinutes) == normalizePicos(rightPicos, rightOffsetMinutes); @@ -229,6 +245,16 @@ private static long hashCodeOperator(@BlockPosition Fixed12Block block, @BlockIn return hashCodeOperator(getPicos(block, position), getOffsetMinutes(block, position)); } + @ScalarOperator(HASH_CODE) + private static long hashCodeOperator( + @FlatFixed byte[] fixedSizeSlice, + @FlatFixedOffset int fixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset) + { + return hashCodeOperator((long) LONG_HANDLE.get(fixedSizeSlice, fixedSizeOffset), (int) INT_HANDLE.get(fixedSizeSlice, fixedSizeOffset + SIZE_OF_LONG)); + } + private static long hashCodeOperator(long picos, int offsetMinutes) { return AbstractLongType.hash(normalizePicos(picos, offsetMinutes)); @@ -246,6 +272,18 @@ private static long xxHash64Operator(@BlockPosition Fixed12Block block, @BlockIn return xxHash64(getPicos(block, position), getOffsetMinutes(block, position)); } + @ScalarOperator(XX_HASH_64) + private static long xxHash64Operator( + @FlatFixed byte[] fixedSizeSlice, + @FlatFixedOffset int fixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset) + { + return xxHash64( + (long) LONG_HANDLE.get(fixedSizeSlice, fixedSizeOffset), + (int) INT_HANDLE.get(fixedSizeSlice, fixedSizeOffset + SIZE_OF_LONG)); + } + private static long xxHash64(long picos, int offsetMinutes) { return XxHash64.hash(normalizePicos(picos, offsetMinutes)); diff --git a/core/trino-spi/src/main/java/io/trino/spi/type/LongTimestampType.java b/core/trino-spi/src/main/java/io/trino/spi/type/LongTimestampType.java index 5f23f18ae49b..b3d740419bbf 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/type/LongTimestampType.java +++ b/core/trino-spi/src/main/java/io/trino/spi/type/LongTimestampType.java @@ -188,10 +188,10 @@ private static void readFlatToBlock( @ScalarOperator(READ_VALUE) private static void writeFlat( LongTimestamp value, - byte[] fixedSizeSlice, - int fixedSizeOffset, - byte[] unusedVariableSizeSlice, - int unusedVariableSizeOffset) + @FlatFixed byte[] fixedSizeSlice, + @FlatFixedOffset int fixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset) { LONG_HANDLE.set(fixedSizeSlice, fixedSizeOffset, value.getEpochMicros()); INT_HANDLE.set(fixedSizeSlice, fixedSizeOffset + SIZE_OF_LONG, value.getPicosOfMicro()); @@ -201,10 +201,10 @@ private static void writeFlat( private static void writeBlockFlat( @BlockPosition Fixed12Block block, @BlockIndex int position, - byte[] fixedSizeSlice, - int fixedSizeOffset, - byte[] unusedVariableSizeSlice, - int unusedVariableSizeOffset) + @FlatFixed byte[] fixedSizeSlice, + @FlatFixedOffset int fixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset) { LONG_HANDLE.set(fixedSizeSlice, fixedSizeOffset, getEpochMicros(block, position)); INT_HANDLE.set(fixedSizeSlice, fixedSizeOffset + SIZE_OF_LONG, getFraction(block, position)); @@ -230,6 +230,22 @@ private static boolean equalOperator(@BlockPosition Fixed12Block leftBlock, @Blo getFraction(rightBlock, rightPosition)); } + @ScalarOperator(EQUAL) + private static boolean equalOperator( + @FlatFixed byte[] leftFixedSizeSlice, + @FlatFixedOffset int leftFixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset, + @BlockPosition Fixed12Block rightBlock, + @BlockIndex int rightPosition) + { + return equal( + (long) LONG_HANDLE.get(leftFixedSizeSlice, leftFixedSizeOffset), + (int) INT_HANDLE.get(leftFixedSizeSlice, leftFixedSizeOffset + SIZE_OF_LONG), + getEpochMicros(rightBlock, rightPosition), + getFraction(rightBlock, rightPosition)); + } + private static boolean equal(long leftEpochMicros, int leftFraction, long rightEpochMicros, int rightFraction) { return leftEpochMicros == rightEpochMicros && leftFraction == rightFraction; @@ -249,6 +265,18 @@ private static long xxHash64Operator(@BlockPosition Fixed12Block block, @BlockIn getFraction(block, position)); } + @ScalarOperator(XX_HASH_64) + private static long xxHash64Operator( + @FlatFixed byte[] leftFixedSizeSlice, + @FlatFixedOffset int leftFixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset) + { + return xxHash64( + (long) LONG_HANDLE.get(leftFixedSizeSlice, leftFixedSizeOffset), + (int) INT_HANDLE.get(leftFixedSizeSlice, leftFixedSizeOffset + SIZE_OF_LONG)); + } + private static long xxHash64(long epochMicros, int fraction) { return XxHash64.hash(epochMicros) ^ XxHash64.hash(fraction); diff --git a/core/trino-spi/src/main/java/io/trino/spi/type/LongTimestampWithTimeZoneType.java b/core/trino-spi/src/main/java/io/trino/spi/type/LongTimestampWithTimeZoneType.java index 0690f343bbc3..51b494b8ea6a 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/type/LongTimestampWithTimeZoneType.java +++ b/core/trino-spi/src/main/java/io/trino/spi/type/LongTimestampWithTimeZoneType.java @@ -231,10 +231,10 @@ private static void readFlatToBlock( @ScalarOperator(READ_VALUE) private static void writeFlat( LongTimestampWithTimeZone value, - byte[] fixedSizeSlice, - int fixedSizeOffset, - byte[] unusedVariableSizeSlice, - int unusedVariableSizeOffset) + @FlatFixed byte[] fixedSizeSlice, + @FlatFixedOffset int fixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset) { LONG_HANDLE.set(fixedSizeSlice, fixedSizeOffset, packDateTimeWithZone(value.getEpochMillis(), value.getTimeZoneKey())); INT_HANDLE.set(fixedSizeSlice, fixedSizeOffset + SIZE_OF_LONG, value.getPicosOfMilli()); @@ -244,10 +244,10 @@ private static void writeFlat( private static void writeBlockFlat( @BlockPosition Fixed12Block block, @BlockIndex int position, - byte[] fixedSizeSlice, - int fixedSizeOffset, - byte[] unusedVariableSizeSlice, - int unusedVariableSizeOffset) + @FlatFixed byte[] fixedSizeSlice, + @FlatFixedOffset int fixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset) { LONG_HANDLE.set(fixedSizeSlice, fixedSizeOffset, getPackedEpochMillis(block, position)); INT_HANDLE.set(fixedSizeSlice, fixedSizeOffset + SIZE_OF_LONG, getPicosOfMilli(block, position)); @@ -273,6 +273,22 @@ private static boolean equalOperator(@BlockPosition Fixed12Block leftBlock, @Blo getPicosOfMilli(rightBlock, rightPosition)); } + @ScalarOperator(EQUAL) + private static boolean equalOperator( + @FlatFixed byte[] leftFixedSizeSlice, + @FlatFixedOffset int leftFixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset, + @BlockPosition Fixed12Block rightBlock, + @BlockIndex int rightPosition) + { + return equal( + unpackMillisUtc((long) LONG_HANDLE.get(leftFixedSizeSlice, leftFixedSizeOffset)), + (int) INT_HANDLE.get(leftFixedSizeSlice, leftFixedSizeOffset + SIZE_OF_LONG), + getEpochMillis(rightBlock, rightPosition), + getPicosOfMilli(rightBlock, rightPosition)); + } + private static boolean equal(long leftEpochMillis, int leftPicosOfMilli, long rightEpochMillis, int rightPicosOfMilli) { return leftEpochMillis == rightEpochMillis && @@ -293,6 +309,18 @@ private static long xxHash64Operator(@BlockPosition Fixed12Block block, @BlockIn getPicosOfMilli(block, position)); } + @ScalarOperator(XX_HASH_64) + private static long xxHash64Operator( + @FlatFixed byte[] fixedSizeSlice, + @FlatFixedOffset int fixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset) + { + return xxHash64( + unpackMillisUtc((long) LONG_HANDLE.get(fixedSizeSlice, fixedSizeOffset)), + (int) INT_HANDLE.get(fixedSizeSlice, fixedSizeOffset + SIZE_OF_LONG)); + } + private static long xxHash64(long epochMillis, int picosOfMilli) { return XxHash64.hash(epochMillis) ^ XxHash64.hash(picosOfMilli); diff --git a/core/trino-spi/src/main/java/io/trino/spi/type/RealType.java b/core/trino-spi/src/main/java/io/trino/spi/type/RealType.java index b2687c4eefd6..1f353b6d4b9b 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/type/RealType.java +++ b/core/trino-spi/src/main/java/io/trino/spi/type/RealType.java @@ -130,10 +130,10 @@ private static long readFlat( @ScalarOperator(READ_VALUE) private static void writeFlat( long value, - byte[] fixedSizeSlice, - int fixedSizeOffset, - byte[] unusedVariableSizeSlice, - int unusedVariableSizeOffset) + @FlatFixed byte[] fixedSizeSlice, + @FlatFixedOffset int fixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset) { INT_HANDLE.set(fixedSizeSlice, fixedSizeOffset, (int) value); } diff --git a/core/trino-spi/src/main/java/io/trino/spi/type/ShortDecimalType.java b/core/trino-spi/src/main/java/io/trino/spi/type/ShortDecimalType.java index 577bdf371813..6fe8e1b9f037 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/type/ShortDecimalType.java +++ b/core/trino-spi/src/main/java/io/trino/spi/type/ShortDecimalType.java @@ -195,14 +195,26 @@ private static long readFlat( @ScalarOperator(READ_VALUE) private static void writeFlat( long value, - byte[] fixedSizeSlice, - int fixedSizeOffset, - byte[] unusedVariableSizeSlice, - int unusedVariableSizeOffset) + @FlatFixed byte[] fixedSizeSlice, + @FlatFixedOffset int fixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset) { LONG_HANDLE.set(fixedSizeSlice, fixedSizeOffset, value); } + @ScalarOperator(EQUAL) + private static boolean equalOperator( + @FlatFixed byte[] leftFixedSizeSlice, + @FlatFixedOffset int leftFixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset, + @BlockPosition LongArrayBlock rightBlock, + @BlockIndex int rightPosition) + { + return equalOperator((long) LONG_HANDLE.get(leftFixedSizeSlice, leftFixedSizeOffset), rightBlock.getLong(rightPosition)); + } + @ScalarOperator(EQUAL) private static boolean equalOperator(long left, long right) { diff --git a/core/trino-spi/src/main/java/io/trino/spi/type/ShortTimeWithTimeZoneType.java b/core/trino-spi/src/main/java/io/trino/spi/type/ShortTimeWithTimeZoneType.java index d8adb098af4d..865078bea023 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/type/ShortTimeWithTimeZoneType.java +++ b/core/trino-spi/src/main/java/io/trino/spi/type/ShortTimeWithTimeZoneType.java @@ -146,10 +146,10 @@ private static long readFlat( @ScalarOperator(READ_VALUE) private static void writeFlat( long value, - byte[] fixedSizeSlice, - int fixedSizeOffset, - byte[] unusedVariableSizeSlice, - int unusedVariableSizeOffset) + @FlatFixed byte[] fixedSizeSlice, + @FlatFixedOffset int fixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset) { LONG_HANDLE.set(fixedSizeSlice, fixedSizeOffset, value); } diff --git a/core/trino-spi/src/main/java/io/trino/spi/type/ShortTimestampType.java b/core/trino-spi/src/main/java/io/trino/spi/type/ShortTimestampType.java index c3d270c040c3..af333b9ca415 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/type/ShortTimestampType.java +++ b/core/trino-spi/src/main/java/io/trino/spi/type/ShortTimestampType.java @@ -181,10 +181,10 @@ private static long readFlat( @ScalarOperator(READ_VALUE) private static void writeFlat( long value, - byte[] fixedSizeSlice, - int fixedSizeOffset, - byte[] unusedVariableSizeSlice, - int unusedVariableSizeOffset) + @FlatFixed byte[] fixedSizeSlice, + @FlatFixedOffset int fixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset) { LONG_HANDLE.set(fixedSizeSlice, fixedSizeOffset, value); } diff --git a/core/trino-spi/src/main/java/io/trino/spi/type/TimeType.java b/core/trino-spi/src/main/java/io/trino/spi/type/TimeType.java index de246e5a6829..3a782cd2a518 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/type/TimeType.java +++ b/core/trino-spi/src/main/java/io/trino/spi/type/TimeType.java @@ -144,10 +144,10 @@ private static long readFlat( @ScalarOperator(READ_VALUE) private static void writeFlat( long value, - byte[] fixedSizeSlice, - int fixedSizeOffset, - byte[] unusedVariableSizeSlice, - int unusedVariableSizeOffset) + @FlatFixed byte[] fixedSizeSlice, + @FlatFixedOffset int fixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset) { LONG_HANDLE.set(fixedSizeSlice, fixedSizeOffset, value); } diff --git a/core/trino-spi/src/main/java/io/trino/spi/type/UuidType.java b/core/trino-spi/src/main/java/io/trino/spi/type/UuidType.java index 25ea659d6730..c5c6a44386a3 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/type/UuidType.java +++ b/core/trino-spi/src/main/java/io/trino/spi/type/UuidType.java @@ -192,10 +192,10 @@ private static Slice readFlat( @ScalarOperator(READ_VALUE) private static void writeFlat( Slice sourceSlice, - byte[] fixedSizeSlice, - int fixedSizeOffset, - byte[] unusedVariableSizeSlice, - int unusedVariableSizeOffset) + @FlatFixed byte[] fixedSizeSlice, + @FlatFixedOffset int fixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset) { sourceSlice.getBytes(0, fixedSizeSlice, fixedSizeOffset, INT128_BYTES); } @@ -233,6 +233,22 @@ private static boolean equalOperator(@BlockPosition Int128ArrayBlock leftBlock, rightBlock.getInt128Low(rightPosition)); } + @ScalarOperator(EQUAL) + private static boolean equalOperator( + @FlatFixed byte[] leftFixedSizeSlice, + @FlatFixedOffset int leftFixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset, + @BlockPosition Int128ArrayBlock rightBlock, + @BlockIndex int rightPosition) + { + return equal( + (long) LONG_HANDLE.get(leftFixedSizeSlice, leftFixedSizeOffset), + (long) LONG_HANDLE.get(leftFixedSizeSlice, leftFixedSizeOffset + SIZE_OF_LONG), + rightBlock.getInt128High(rightPosition), + rightBlock.getInt128Low(rightPosition)); + } + private static boolean equal(long leftLow, long leftHigh, long rightLow, long rightHigh) { return leftLow == rightLow && leftHigh == rightHigh; @@ -250,6 +266,18 @@ private static long xxHash64Operator(@BlockPosition Int128ArrayBlock block, @Blo return xxHash64(block.getInt128High(position), block.getInt128Low(position)); } + @ScalarOperator(XX_HASH_64) + private static long xxHash64Operator( + @FlatFixed byte[] fixedSizeSlice, + @FlatFixedOffset int fixedSizeOffset, + @FlatVariableWidth byte[] unusedVariableSizeSlice, + @FlatVariableOffset int unusedVariableSizeOffset) + { + return xxHash64( + (long) LONG_HANDLE.get(fixedSizeSlice, fixedSizeOffset), + (long) LONG_HANDLE.get(fixedSizeSlice, fixedSizeOffset + SIZE_OF_LONG)); + } + private static long xxHash64(long low, long high) { return XxHash64.hash(low) ^ XxHash64.hash(high); diff --git a/core/trino-spi/src/test/java/io/trino/spi/block/TestArrayBlockBuilder.java b/core/trino-spi/src/test/java/io/trino/spi/block/TestArrayBlockBuilder.java index 689504f8f927..86c2d967d9f4 100644 --- a/core/trino-spi/src/test/java/io/trino/spi/block/TestArrayBlockBuilder.java +++ b/core/trino-spi/src/test/java/io/trino/spi/block/TestArrayBlockBuilder.java @@ -94,6 +94,56 @@ public void testBuilderProducesNullRleForNullRows() assertIsAllNulls(blockBuilder().appendNull().appendNull().build(), 2); } + @Test + void buildEntry() + { + List> values = getTestValues(); + assertThat(values) + .hasSize(5) + .doesNotHaveDuplicates() + .doesNotContainNull(); + + ArrayBlockBuilder blockBuilder = (ArrayBlockBuilder) createBlockBuilder(); + for (List array : values) { + blockBuilder.buildEntry(elementBuilder -> { + for (String element : array) { + if (element == null) { + elementBuilder.appendNull(); + } + else { + VARCHAR.writeString(elementBuilder, element); + } + } + }); + } + assertThat(blockToValues(blockBuilder.buildValueBlock())).isEqualTo(values); + + blockBuilder = (ArrayBlockBuilder) createBlockBuilder(); + for (List array : values) { + ArrayEntryBuilder arrayEntryBuilder = blockBuilder.buildEntry(); + for (String element : array) { + if (element == null) { + arrayEntryBuilder.getElementBuilder().appendNull(); + } + else { + VARCHAR.writeString(arrayEntryBuilder.getElementBuilder(), element); + } + } + arrayEntryBuilder.build(); + } + assertThat(blockToValues(blockBuilder.buildValueBlock())).isEqualTo(values); + + blockBuilder = (ArrayBlockBuilder) createBlockBuilder(); + blockBuilder.buildEntry(); + assertThatThrownBy(blockBuilder::buildEntry).isInstanceOf(IllegalStateException.class); + + blockBuilder = (ArrayBlockBuilder) createBlockBuilder(); + ArrayEntryBuilder multipleEntryBuilder = blockBuilder.buildEntry(); + multipleEntryBuilder.build(); + assertThatThrownBy(multipleEntryBuilder::getElementBuilder).isInstanceOf(IllegalStateException.class); + assertThatThrownBy(multipleEntryBuilder::build).isInstanceOf(IllegalStateException.class); + } + private static BlockBuilder blockBuilder() { return new ArrayBlockBuilder(BIGINT, null, 10); diff --git a/core/trino-spi/src/test/java/io/trino/spi/block/TestRowBlockBuilder.java b/core/trino-spi/src/test/java/io/trino/spi/block/TestRowBlockBuilder.java index 5a0ff445388f..b5d0087087d7 100644 --- a/core/trino-spi/src/test/java/io/trino/spi/block/TestRowBlockBuilder.java +++ b/core/trino-spi/src/test/java/io/trino/spi/block/TestRowBlockBuilder.java @@ -24,6 +24,7 @@ import static io.trino.spi.type.IntegerType.INTEGER; import static io.trino.spi.type.VarcharType.VARCHAR; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; public class TestRowBlockBuilder extends AbstractTestBlockBuilder @@ -41,6 +42,62 @@ public void testBuilderProducesNullRleForNullRows() assertIsAllNulls(blockBuilder().appendNull().appendNull().build(), 2); } + @Test + void buildEntry() + { + List values = getTestValues(); + assertThat(values) + .hasSize(5) + .doesNotHaveDuplicates() + .doesNotContainNull() + .doesNotContain(getUnusedTestValue()); + + RowBlockBuilder blockBuilder = new RowBlockBuilder(List.of(VARCHAR, INTEGER, BOOLEAN), null, 1); + for (TestRow row : values) { + blockBuilder.buildEntry(fieldBuilders -> { + if (row.name() == null) { + fieldBuilders.getFirst().appendNull(); + } + else { + VARCHAR.writeString(fieldBuilders.getFirst(), row.name()); + } + INTEGER.writeLong(fieldBuilders.get(1), row.number()); + BOOLEAN.writeBoolean(fieldBuilders.get(2), row.flag()); + }); + } + assertThat(blockToValues(blockBuilder.buildValueBlock())).isEqualTo(values); + + blockBuilder = new RowBlockBuilder(List.of(VARCHAR, INTEGER, BOOLEAN), null, 1); + for (TestRow row : values) { + RowEntryBuilder rowEntryBuilder = blockBuilder.buildEntry(); + if (row.name() == null) { + rowEntryBuilder.getFieldBuilder(0).appendNull(); + } + else { + VARCHAR.writeString(rowEntryBuilder.getFieldBuilder(0), row.name()); + } + INTEGER.writeLong(rowEntryBuilder.getFieldBuilder(1), row.number()); + BOOLEAN.writeBoolean(rowEntryBuilder.getFieldBuilder(2), row.flag()); + rowEntryBuilder.build(); + } + assertThat(blockToValues(blockBuilder.buildValueBlock())).isEqualTo(values); + + blockBuilder = new RowBlockBuilder(List.of(VARCHAR, INTEGER, BOOLEAN), null, 1); + blockBuilder.buildEntry(); + assertThatThrownBy(blockBuilder::buildEntry).isInstanceOf(IllegalStateException.class); + + blockBuilder = new RowBlockBuilder(List.of(VARCHAR, INTEGER, BOOLEAN), null, 1); + RowEntryBuilder incompleteEntryBuilder = blockBuilder.buildEntry(); + assertThatThrownBy(incompleteEntryBuilder::build).isInstanceOf(IllegalStateException.class); + + blockBuilder = new RowBlockBuilder(List.of(BOOLEAN), null, 1); + RowEntryBuilder multipleEntryBuilder = blockBuilder.buildEntry(); + BOOLEAN.writeBoolean(multipleEntryBuilder.getFieldBuilder(0), true); + multipleEntryBuilder.build(); + assertThatThrownBy(() -> multipleEntryBuilder.getFieldBuilder(0)).isInstanceOf(IllegalStateException.class); + assertThatThrownBy(multipleEntryBuilder::build).isInstanceOf(IllegalStateException.class); + } + private static BlockBuilder blockBuilder() { return new RowBlockBuilder(ImmutableList.of(BIGINT), null, 10); diff --git a/docs/src/main/sphinx/admin/fault-tolerant-execution.md b/docs/src/main/sphinx/admin/fault-tolerant-execution.md index 528ed5ce9c43..c42505509593 100644 --- a/docs/src/main/sphinx/admin/fault-tolerant-execution.md +++ b/docs/src/main/sphinx/admin/fault-tolerant-execution.md @@ -397,6 +397,11 @@ all worker nodes. In this file, set the `exchange-manager.name` configuration property to `filesystem` or `hdfs`, and set additional configuration properties as needed for your storage solution. +You can also specify the location of the exchange manager configuration file +in `config.properties` with the `exchange-manager.config-file` property. +When this property is set, Trino loads the exchange manager configuration +from the specified path instead of the default `etc/exchange-manager.properties`. + The following table lists the available configuration properties for `exchange-manager.properties`, their default values, and which file systems the property may be configured for: @@ -414,6 +419,11 @@ the property may be configured for: store spooling data. - - Any +* - `exchange.max-page-storage-size` + - Max storage size of a page written to a sink, including the page itself + and its size. + - `16MB` + - Any * - `exchange.sink-buffer-pool-min-size` - The minimum buffer pool size for an exchange sink. The larger the buffer pool size, the larger the write parallelism and memory usage. @@ -426,7 +436,7 @@ the property may be configured for: - Any * - `exchange.sink-max-file-size` - Max [data size](prop-type-data-size) of files written by exchange sinks. - - ``1GB`` + - `1GB` - Any * - `exchange.source-concurrent-readers` - Number of concurrent readers to read from spooling storage. The larger the @@ -620,7 +630,7 @@ The following `exchange-manager.properties` configuration example specifies Allu as the spooling storage destination. ```properties -exchange-manager.name=alluxio +exchange-manager.name=filesystem exchange.base-directories=alluxio://alluxio-master:19998/exchange-spooling-directory exchange.alluxio.site-file-path=/path/to/alluxio-site.properties ``` diff --git a/docs/src/main/sphinx/client/cli.md b/docs/src/main/sphinx/client/cli.md index d70341af0012..2e5f85d3bc12 100644 --- a/docs/src/main/sphinx/client/cli.md +++ b/docs/src/main/sphinx/client/cli.md @@ -163,6 +163,9 @@ mode: EMACS editors. Defaults to `EMACS`. * - `--extra-credential` - Extra credentials (property can be used multiple times; format is key=value) +* - `--extra-header` + - HTTP header to add to the authenticated HTTP requests + (property can be used multiple times; format is key=value). * - `--http-proxy` - Configures the URL of the HTTP proxy to connect to Trino. * - `--history-file` diff --git a/docs/src/main/sphinx/client/jdbc.md b/docs/src/main/sphinx/client/jdbc.md index 0058ef72d895..2ce05df9c2fb 100644 --- a/docs/src/main/sphinx/client/jdbc.md +++ b/docs/src/main/sphinx/client/jdbc.md @@ -235,6 +235,12 @@ may not be specified using both methods. list of key-value pairs. For example, `abc:xyz;example.foo:bar` sets the system property `abc` to the value `xyz` and the `foo` property for catalog `example` to the value `bar`. +* - `extraHeaders` + - HTTP headers to add to the authenticated HTTP requests, specified as a + list of key-value pairs. For example, `X-Trino-Foo:xyz;X-Trino-Bar:bar` + sends the `X-Trino-Foo` header with the value `xyz` and the `X-Trino-Bar` + header with the value `bar`. Protocol headers such as `X-Trino-User` cannot be + overridden using this parameter. * - `externalAuthentication` - Set to true if you want to use external authentication via [](/security/oauth2). Use a local web browser to authenticate with an diff --git a/docs/src/main/sphinx/connector.md b/docs/src/main/sphinx/connector.md index a954b30cf059..1a77556400c3 100644 --- a/docs/src/main/sphinx/connector.md +++ b/docs/src/main/sphinx/connector.md @@ -42,6 +42,7 @@ SingleStore Snowflake SQL Server System +Teradata Thrift TPC-DS TPC-H diff --git a/docs/src/main/sphinx/connector/delta-lake.md b/docs/src/main/sphinx/connector/delta-lake.md index 858c762cf341..220c98f14c5d 100644 --- a/docs/src/main/sphinx/connector/delta-lake.md +++ b/docs/src/main/sphinx/connector/delta-lake.md @@ -13,7 +13,7 @@ data. To connect to Databricks Delta Lake, you need: - Tables written by Databricks Runtime 7.3 LTS, 9.1 LTS, 10.4 LTS, 11.3 LTS, - 12.2 LTS, 13.3 LTS, 14.3 LTS, 15.4 LTS and 16.4 LTS are supported. + 12.2 LTS, 13.3 LTS, 14.3 LTS, 15.4 LTS, 16.4 LTS and 17.3 LTS are supported. - Deployments using AWS, HDFS, Azure Storage, and Google Cloud Storage (GCS) are fully supported. - Network access from the coordinator and workers to the Delta Lake storage. diff --git a/docs/src/main/sphinx/connector/teradata.md b/docs/src/main/sphinx/connector/teradata.md new file mode 100644 index 000000000000..51bc4fb08026 --- /dev/null +++ b/docs/src/main/sphinx/connector/teradata.md @@ -0,0 +1,177 @@ +# Teradata connector + +```{raw} html + +``` + +The Teradata connector allows querying and creating tables in an external +[Teradata](https://www.teradata.com/) database. This can be used to join +data between different systems like Teradata and Hive, or between different Teradata instances. + +## Requirements + +To connect to Teradata, you need: + +- Teradata Database +- Network access from the Trino coordinator and workers to Teradata. Port + 1025 is the default port + +## Configuration + +To configure the Teradata connector, create a catalog properties file in +`etc/catalog` named, for example, `teradata.properties`, to mount the Teradata +connector as the `teradata` catalog. Create the file with the following +contents, replacing the connection properties as appropriate for your setup: + +```properties +connector.name=teradata +connection-url=jdbc:teradata://example.teradata.com/CHARSET=UTF8,TMODE=ANSI,LOGMECH=TD2 +connection-user=*** +connection-password=*** +``` + +The `connection-url` defines the connection information and parameters to pass +to the Teradata JDBC driver. The supported parameters for the URL are +available in the +[Teradata JDBC documentation](https://teradata-docs.s3.amazonaws.com/doc/connectivity/jdbc/reference/current/jdbcug_chapter_2.html#BABJIHBJ). +For example, the following `connection-url` configures character encoding, +transaction mode, and authentication. + +```properties +connection-url=jdbc:teradata://example.teradata.com/CHARSET=UTF8,TMODE=ANSI,LOGMECH=TD2 +``` + +The `connection-user` and `connection-password` are typically required and +determine the user credentials for the connection, often a service user. + +### Connection security + +If you have TLS configured with a globally-trusted certificate installed on +your data source, you can enable TLS between your cluster and the data +source by appending parameters to the JDBC connection string set in the +connection-url catalog configuration property. + +For example, to specify SSLMODE: + +```properties +connection-url=jdbc:teradata://example.teradata.com/SSLMODE=REQUIRED +``` + +For more information on TLS configuration options, see the +Teradata [JDBC documentation](https://teradata-docs.s3.amazonaws.com/doc/connectivity/jdbc/reference/current/jdbcug_chapter_2.html#URL_SSLMODE). + +```{include} jdbc-authentication.fragment +``` + +### Multiple Teradata databases + +You can have as many catalogs as you need, so if you have additional Teradata +databases, simply add another properties file to etc/catalog with a different +name, making sure it ends in .properties. +For example, if you name the property file sales.properties, Trino creates a +catalog named sales using the configured connector. + +## Type mapping + +Because Trino and Teradata each support types that the other does not, this +connector {ref}`modifies some types ` when reading data. +Refer to the following sections for type mapping when reading data from +Teradata to Trino. + +### Teradata type to Trino type mapping + +The connector maps Teradata types to the corresponding Trino types following +this table: + +:::{list-table} Teradata type to Trino type mapping +:widths: 40, 40, 20 +:header-rows: 1 + +* - Teradata type + - Trino type + - Notes +* - `TINYINT` + - `TINYINT` + - +* - `SMALLINT` + - `SMALLINT` + - +* - `INTEGER` + - `INTEGER` + - +* - `BIGINT` + - `BIGINT` + - +* - `REAL` + - `DOUBLE` + - +* - `DOUBLE` + - `DOUBLE` + - +* - `FLOAT` + - `DOUBLE` + - +* - `NUMBER(p, s)` + - `DECIMAL(p, s)` + - +* - `NUMERIC(p, s)` + - `DECIMAL(p, s)` + - +* - `DECIMAL(p, s)` + - `DECIMAL(p, s)` + - +* - `CHAR(n)` + - `CHAR(n)` + - +* - `CHARACTER(n)` + - `CHAR(n)` + - +* - `VARCHAR(n)` + - `VARCHAR(n)` + - +* - `DATE` + - `DATE` + - +::: + +No other types are supported. + +```{include} jdbc-type-mapping.fragment +``` + +## Querying Teradata + +The Teradata connector provides a schema for every Teradata database. You can +see the available Teradata databases by running SHOW SCHEMAS: + +``` +SHOW SCHEMAS FROM teradata; +``` + +If you have a Teradata database named sales, you can view the tables in this +database by running SHOW TABLES: + +``` +SHOW TABLES FROM teradata.sales; +``` + +You can see a list of the columns in the orders table in the sales database +using either of the following: + +``` +DESCRIBE teradata.sales.orders; +SHOW COLUMNS FROM teradata.sales.orders; +``` + +Finally, you can access the orders table in the sales database: + +``` +SELECT * FROM teradata.sales.orders; +``` + +## SQL support + +The connector provides read access to data and metadata in +a Teradata database. The connector supports the {ref}`globally available +` and {ref}`read operation ` +statements. diff --git a/docs/src/main/sphinx/language/types.md b/docs/src/main/sphinx/language/types.md index 72194a358705..68e63693c83a 100644 --- a/docs/src/main/sphinx/language/types.md +++ b/docs/src/main/sphinx/language/types.md @@ -332,7 +332,8 @@ More information in [](/functions/array). ### `MAP` A map between the given component types. A map is a collection of key-value -pairs, where each key is associated with a single value. +pairs, where each key is associated with a single value. Map keys are required, +while map values can be null. Example: `MAP(ARRAY['foo', 'bar'], ARRAY[1, 2])` diff --git a/docs/src/main/sphinx/object-storage/metastores.md b/docs/src/main/sphinx/object-storage/metastores.md index 5fec3303d28e..8966b6fec56f 100644 --- a/docs/src/main/sphinx/object-storage/metastores.md +++ b/docs/src/main/sphinx/object-storage/metastores.md @@ -509,6 +509,9 @@ following properties: * - `iceberg.rest-catalog.oauth2.token-refresh-enabled` - Controls whether a token should be refreshed if information about its expiration time is available. Defaults to `true` +* - `iceberg.rest-catalog.oauth2.token-exchange-enabled` + - Controls whether to use the token exchange flow to acquire new tokens. + Defaults to `true` * - `iceberg.rest-catalog.vended-credentials-enabled` - Use credentials provided by the REST backend for file system access. Defaults to `false`. diff --git a/lib/trino-filesystem-azure/src/main/java/io/trino/filesystem/azure/AzureFileSystem.java b/lib/trino-filesystem-azure/src/main/java/io/trino/filesystem/azure/AzureFileSystem.java index 3455edd1d70c..ae2515b3ddac 100644 --- a/lib/trino-filesystem-azure/src/main/java/io/trino/filesystem/azure/AzureFileSystem.java +++ b/lib/trino-filesystem-azure/src/main/java/io/trino/filesystem/azure/AzureFileSystem.java @@ -26,6 +26,7 @@ import com.azure.storage.blob.models.UserDelegationKey; import com.azure.storage.blob.sas.BlobSasPermission; import com.azure.storage.blob.sas.BlobServiceSasSignatureValues; +import com.azure.storage.blob.specialized.BlockBlobClient; import com.azure.storage.common.sas.SasProtocol; import com.azure.storage.file.datalake.DataLakeDirectoryClient; import com.azure.storage.file.datalake.DataLakeFileClient; @@ -602,10 +603,10 @@ private boolean isHierarchicalNamespaceEnabled(AzureLocation location) throws IOException { try { - return createBlobContainerClient(location, Optional.empty()) - .getServiceClient() - .getAccountInfo() - .isHierarchicalNamespaceEnabled(); + BlockBlobClient blockBlobClient = createBlobContainerClient(location, Optional.empty()) + .getBlobClient("/") + .getBlockBlobClient(); + return blockBlobClient.exists(); } catch (RuntimeException e) { throw new IOException("Checking whether hierarchical namespace is enabled for the location %s failed".formatted(location), e); diff --git a/lib/trino-filesystem-cache-alluxio/src/main/java/io/trino/filesystem/alluxio/AlluxioInputStream.java b/lib/trino-filesystem-cache-alluxio/src/main/java/io/trino/filesystem/alluxio/AlluxioInputStream.java index 04c5bc862429..a2d12ee229a2 100644 --- a/lib/trino-filesystem-cache-alluxio/src/main/java/io/trino/filesystem/alluxio/AlluxioInputStream.java +++ b/lib/trino-filesystem-cache-alluxio/src/main/java/io/trino/filesystem/alluxio/AlluxioInputStream.java @@ -27,7 +27,6 @@ import java.io.IOException; import static com.google.common.base.Verify.verify; -import static com.google.common.primitives.Ints.saturatedCast; import static io.trino.filesystem.tracing.CacheSystemAttributes.CACHE_FILE_LOCATION; import static io.trino.filesystem.tracing.CacheSystemAttributes.CACHE_FILE_READ_POSITION; import static io.trino.filesystem.tracing.CacheSystemAttributes.CACHE_FILE_READ_SIZE; @@ -70,9 +69,10 @@ public AlluxioInputStream(Tracer tracer, TrinoInputFile inputFile, String key, U public int available() throws IOException { + // Not needed per contract, but complies with AbstractTestTrinoFileSystem expectations easier. + // It's easer to just check "is open?" in available() than refactor that test. ensureOpen(); - - return saturatedCast(fileLength - position); + return super.available(); } @Override diff --git a/lib/trino-filesystem-gcs/src/main/java/io/trino/filesystem/gcs/GcsInputStream.java b/lib/trino-filesystem-gcs/src/main/java/io/trino/filesystem/gcs/GcsInputStream.java index e32376dcfc4d..28180983116e 100644 --- a/lib/trino-filesystem-gcs/src/main/java/io/trino/filesystem/gcs/GcsInputStream.java +++ b/lib/trino-filesystem-gcs/src/main/java/io/trino/filesystem/gcs/GcsInputStream.java @@ -15,7 +15,6 @@ import com.google.cloud.ReadChannel; import com.google.cloud.storage.Blob; -import com.google.common.primitives.Ints; import io.trino.filesystem.TrinoInputStream; import io.trino.filesystem.encryption.EncryptionKey; @@ -63,9 +62,10 @@ public GcsInputStream(GcsLocation location, Blob blob, int readBlockSizeBytes, O public int available() throws IOException { + // Not needed per contract, but complies with AbstractTestTrinoFileSystem expectations easier. + // It's easer to just check "is open?" in available() than refactor that test. ensureOpen(); - repositionStream(); - return Ints.saturatedCast(fileSize - currentPosition); + return super.available(); } @Override diff --git a/lib/trino-filesystem/src/main/java/io/trino/filesystem/local/LocalInputStream.java b/lib/trino-filesystem/src/main/java/io/trino/filesystem/local/LocalInputStream.java index 1688f4ff4ab4..785b9cd1c33e 100644 --- a/lib/trino-filesystem/src/main/java/io/trino/filesystem/local/LocalInputStream.java +++ b/lib/trino-filesystem/src/main/java/io/trino/filesystem/local/LocalInputStream.java @@ -13,7 +13,6 @@ */ package io.trino.filesystem.local; -import com.google.common.primitives.Ints; import io.trino.filesystem.Location; import io.trino.filesystem.TrinoInputStream; @@ -53,7 +52,7 @@ public int available() throws IOException { ensureOpen(); - return Ints.saturatedCast(fileLength - position); + return input.available(); } @Override diff --git a/lib/trino-plugin-toolkit/src/main/java/io/trino/plugin/base/filter/UtcConstraintExtractor.java b/lib/trino-plugin-toolkit/src/main/java/io/trino/plugin/base/filter/UtcConstraintExtractor.java index 32d44c961dcd..fcd561e6c5fc 100644 --- a/lib/trino-plugin-toolkit/src/main/java/io/trino/plugin/base/filter/UtcConstraintExtractor.java +++ b/lib/trino-plugin-toolkit/src/main/java/io/trino/plugin/base/filter/UtcConstraintExtractor.java @@ -248,7 +248,7 @@ private static Optional createDomain(FunctionName functionName, Type typ return Optional.of(Domain.create(ValueSet.ofRanges(Range.greaterThanOrEqual(type, startOfDate)), false)); } if (functionName.equals(IDENTICAL_OPERATOR_FUNCTION_NAME)) { - return Optional.of(Domain.create(ValueSet.ofRanges(Range.range(type, startOfDate, true, startOfNextDate, false)), true)); + return Optional.of(Domain.create(ValueSet.ofRanges(Range.range(type, startOfDate, true, startOfNextDate, false)), false)); } return Optional.empty(); @@ -363,7 +363,7 @@ private static Optional unwrapDateTruncInComparison(String unit, Functio if (!constantAtPeriodStart) { return Optional.of(Domain.none(type)); } - return Optional.of(Domain.create(ValueSet.ofRanges(Range.range(type, start, true, end, false)), true)); + return Optional.of(Domain.create(ValueSet.ofRanges(Range.range(type, start, true, end, false)), false)); } if (functionName.equals(LESS_THAN_OPERATOR_FUNCTION_NAME)) { if (constantAtPeriodStart) { diff --git a/lib/trino-plugin-toolkit/src/main/java/io/trino/plugin/base/security/FileBasedSystemAccessControl.java b/lib/trino-plugin-toolkit/src/main/java/io/trino/plugin/base/security/FileBasedSystemAccessControl.java index c6ac33b94726..ac068af75edb 100644 --- a/lib/trino-plugin-toolkit/src/main/java/io/trino/plugin/base/security/FileBasedSystemAccessControl.java +++ b/lib/trino-plugin-toolkit/src/main/java/io/trino/plugin/base/security/FileBasedSystemAccessControl.java @@ -236,6 +236,7 @@ public SystemAccessControl create(Map config, SystemAccessContro requireNonNull(config, "config is null"); Bootstrap bootstrap = new Bootstrap( + "io.trino.bootstrap.access." + getName(), binder -> configBinder(binder).bindConfig(FileBasedAccessControlConfig.class), new FileBasedSystemAccessControlModule()); diff --git a/lib/trino-plugin-toolkit/src/test/java/io/trino/plugin/base/filter/TestUtcConstraintExtractor.java b/lib/trino-plugin-toolkit/src/test/java/io/trino/plugin/base/filter/TestUtcConstraintExtractor.java index bc50d011cca5..45c0a5f35176 100644 --- a/lib/trino-plugin-toolkit/src/test/java/io/trino/plugin/base/filter/TestUtcConstraintExtractor.java +++ b/lib/trino-plugin-toolkit/src/test/java/io/trino/plugin/base/filter/TestUtcConstraintExtractor.java @@ -146,7 +146,7 @@ public void testExtractTimestampTzMillisDateComparison() Map.of(timestampTzColumnSymbol, columnHandle)))) .isEqualTo(TupleDomain.withColumnDomains(Map.of( columnHandle, - Domain.create(ValueSet.ofRanges(Range.range(columnType, startOfDateUtc, true, startOfNextDateUtc, false)), true)))); + Domain.create(ValueSet.ofRanges(Range.range(columnType, startOfDateUtc, true, startOfNextDateUtc, false)), false)))); } /** @@ -216,7 +216,7 @@ public void testExtractTimestampTzMicrosDateComparison() Map.of(timestampTzColumnSymbol, columnHandle)))) .isEqualTo(TupleDomain.withColumnDomains(Map.of( columnHandle, - Domain.create(ValueSet.ofRanges(Range.range(columnType, startOfDateUtc, true, startOfNextDateUtc, false)), true)))); + Domain.create(ValueSet.ofRanges(Range.range(columnType, startOfDateUtc, true, startOfNextDateUtc, false)), false)))); } /** @@ -302,7 +302,7 @@ public void testExtractDateTruncTimestampTzMillisComparison() Map.of(timestampTzColumnSymbol, columnHandle)))) .isEqualTo(TupleDomain.withColumnDomains(Map.of( columnHandle, - Domain.create(ValueSet.ofRanges(Range.range(columnType, startOfDateUtc, true, startOfNextDateUtc, false)), true)))); + Domain.create(ValueSet.ofRanges(Range.range(columnType, startOfDateUtc, true, startOfNextDateUtc, false)), false)))); } /** @@ -388,7 +388,7 @@ public void testExtractDateTruncTimestampTzMicrosComparison() Map.of(timestampTzColumnSymbol, columnHandle)))) .isEqualTo(TupleDomain.withColumnDomains(Map.of( columnHandle, - Domain.create(ValueSet.ofRanges(Range.range(columnType, startOfDateUtc, true, startOfNextDateUtc, false)), true)))); + Domain.create(ValueSet.ofRanges(Range.range(columnType, startOfDateUtc, true, startOfNextDateUtc, false)), false)))); } /** @@ -461,7 +461,7 @@ public void testExtractYearTimestampTzMicrosComparison() Map.of(timestampTzColumnSymbol, columnHandle)))) .isEqualTo(TupleDomain.withColumnDomains(Map.of( columnHandle, - Domain.create(ValueSet.ofRanges(Range.range(columnType, startOfYearUtc, true, startOfNextDateUtc, false)), true)))); + Domain.create(ValueSet.ofRanges(Range.range(columnType, startOfYearUtc, true, startOfNextDateUtc, false)), false)))); } /** @@ -534,7 +534,7 @@ public void testExtractYearTimestampTzMillisComparison() Map.of(timestampTzColumnSymbol, columnHandle)))) .isEqualTo(TupleDomain.withColumnDomains(Map.of( columnHandle, - Domain.create(ValueSet.ofRanges(Range.range(columnType, startOfYearUtc, true, startOfNextDateUtc, false)), true)))); + Domain.create(ValueSet.ofRanges(Range.range(columnType, startOfYearUtc, true, startOfNextDateUtc, false)), false)))); } @Test diff --git a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/DeltaLakeParquetStatisticsUtils.java b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/DeltaLakeParquetStatisticsUtils.java index 371f2afe69ec..8b7f4cd7a1c6 100644 --- a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/DeltaLakeParquetStatisticsUtils.java +++ b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/DeltaLakeParquetStatisticsUtils.java @@ -252,7 +252,7 @@ public static Map jsonEncodeMax(Map jsonEncode(Map>> stats, Map typeForColumn, BiFunction, Optional> accessor) { Map> allStats = stats.entrySet().stream() - .filter(entry -> entry.getValue() != null && entry.getValue().isPresent() && !entry.getValue().get().isEmpty()) + .filter(entry -> entry.getValue() != null && entry.getValue().isPresent() && !entry.getValue().get().isEmpty() && typeForColumn.containsKey(entry.getKey())) .collect(toImmutableMap(Map.Entry::getKey, entry -> accessor.apply(typeForColumn.get(entry.getKey()), entry.getValue().get()))); return allStats.entrySet().stream() diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeConnectorTest.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeConnectorTest.java index 87abce343958..4bd7021b5be5 100644 --- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeConnectorTest.java +++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeConnectorTest.java @@ -815,6 +815,13 @@ public void testTimestampWithTimeZoneOptimization() .isFullyPushedDown() .returnsEmptyResult(); + assertThat(query("SELECT * FROM " + tableName + " WHERE year(part) IS DISTINCT FROM 2006")) + .isNotFullyPushedDown(FilterNode.class); + + assertThat(query("SELECT * FROM " + tableName + " WHERE year(part) IS NOT DISTINCT FROM 2006")) + .isFullyPushedDown() + .returnsEmptyResult(); + assertUpdate("DROP TABLE " + tableName); } diff --git a/plugin/trino-example-http/src/main/java/io/trino/plugin/example/ExampleConnectorFactory.java b/plugin/trino-example-http/src/main/java/io/trino/plugin/example/ExampleConnectorFactory.java index 0e4faef478f1..c605d3f3216a 100644 --- a/plugin/trino-example-http/src/main/java/io/trino/plugin/example/ExampleConnectorFactory.java +++ b/plugin/trino-example-http/src/main/java/io/trino/plugin/example/ExampleConnectorFactory.java @@ -44,6 +44,7 @@ public Connector create(String catalogName, Map requiredConfig, // A plugin is not required to use Guice; it is just very convenient Bootstrap app = new Bootstrap( + "io.trino.bootstrap.catalog." + catalogName, new JsonModule(), new TypeDeserializerModule(), new ConnectorContextModule(catalogName, context), diff --git a/plugin/trino-functions-python/pom.xml b/plugin/trino-functions-python/pom.xml index c670ac15fbe0..fbd8b0504791 100644 --- a/plugin/trino-functions-python/pom.xml +++ b/plugin/trino-functions-python/pom.xml @@ -18,7 +18,7 @@ com.dylibso.chicory bom - 1.5.3 + 1.6.0 pom import diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java index dd3a1f168b65..8143fd392dea 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java @@ -181,6 +181,7 @@ import static io.trino.metastore.PrincipalPrivileges.NO_PRIVILEGES; import static io.trino.metastore.PrincipalPrivileges.fromHivePrivilegeInfos; import static io.trino.metastore.StatisticsUpdateMode.MERGE_INCREMENTAL; +import static io.trino.metastore.StatisticsUpdateMode.OVERWRITE_ALL; import static io.trino.metastore.StorageFormat.VIEW_STORAGE_FORMAT; import static io.trino.metastore.type.Category.PRIMITIVE; import static io.trino.parquet.writer.ParquetWriter.SUPPORTED_BLOOM_FILTER_TYPES; @@ -2360,7 +2361,8 @@ else if (partitionUpdate.getUpdateMode() == APPEND) { partitionValues, partitionUpdate.getWritePath(), partitionUpdate.getFileNames(), - partitionStatistics)); + partitionStatistics, + MERGE_INCREMENTAL)); } else if (partitionUpdate.getUpdateMode() == NEW || partitionUpdate.getUpdateMode() == OVERWRITE) { // insert into new partition or overwrite existing partition @@ -2385,6 +2387,13 @@ else if (partitionUpdate.getUpdateMode() == NEW || partitionUpdate.getUpdateMode TrinoFileSystem fileSystem = fileSystemFactory.create(session); cleanExtraOutputFiles(fileSystem, session.getQueryId(), partitionUpdate.getTargetPath(), ImmutableSet.copyOf(partitionUpdate.getFileNames())); } + partitionUpdateInfosBuilder.add( + new PartitionUpdateInfo( + partitionValues, + partitionUpdate.getWritePath(), + partitionUpdate.getFileNames(), + partitionStatistics, + OVERWRITE_ALL)); } else { metastore.dropPartition(session, handle.getSchemaName(), handle.getTableName(), partition.getValues(), true); @@ -2690,7 +2699,8 @@ private void finishOptimize(ConnectorSession session, ConnectorTableExecuteHandl partitionValues, partitionUpdate.getWritePath(), partitionUpdate.getFileNames(), - PartitionStatistics.empty())); + PartitionStatistics.empty(), + MERGE_INCREMENTAL)); } } diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/SemiTransactionalHiveMetastore.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/SemiTransactionalHiveMetastore.java index 31ef18e7c9a8..346947b0faa8 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/SemiTransactionalHiveMetastore.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/SemiTransactionalHiveMetastore.java @@ -171,6 +171,8 @@ public class SemiTransactionalHiveMetastore AcidOperation.INSERT, ActionType.INSERT_EXISTING, AcidOperation.MERGE, ActionType.MERGE); + private static final boolean SHOULD_MERGE_STATISTICS = true; + private final HiveMetastore delegate; private final TypeManager typeManager; private final boolean partitionProjectionEnabled; @@ -911,7 +913,7 @@ public synchronized void addPartition( if (oldPartitionAction == null) { partitionActionsOfTable.put( partition.getValues(), - new Action<>(ActionType.ADD, new PartitionAndMore(partition, currentLocation, files, statistics, statistics, cleanExtraOutputFilesOnCommit), session.getIdentity(), session.getQueryId())); + new Action<>(ActionType.ADD, new PartitionAndMore(partition, currentLocation, files, statistics, statistics, SHOULD_MERGE_STATISTICS, cleanExtraOutputFilesOnCommit), session.getIdentity(), session.getQueryId())); return; } switch (oldPartitionAction.type()) { @@ -921,7 +923,7 @@ public synchronized void addPartition( } partitionActionsOfTable.put( partition.getValues(), - new Action<>(ActionType.ALTER, new PartitionAndMore(partition, currentLocation, files, statistics, statistics, cleanExtraOutputFilesOnCommit), session.getIdentity(), session.getQueryId())); + new Action<>(ActionType.ALTER, new PartitionAndMore(partition, currentLocation, files, statistics, statistics, SHOULD_MERGE_STATISTICS, cleanExtraOutputFilesOnCommit), session.getIdentity(), session.getQueryId())); } case ADD, ALTER, INSERT_EXISTING, MERGE -> throw new TrinoException(ALREADY_EXISTS, format("Partition already exists for table '%s.%s': %s", databaseName, tableName, partition.getValues())); @@ -1003,8 +1005,9 @@ public synchronized void finishInsertIntoExistingPartitions( partition, partitionInfo.currentLocation(), Optional.of(partitionInfo.fileNames()), - MERGE_INCREMENTAL.updatePartitionStatistics(currentStatistics, partitionInfo.statisticsUpdate()), + partitionInfo.statisticsUpdateMode().updatePartitionStatistics(currentStatistics, partitionInfo.statisticsUpdate()), partitionInfo.statisticsUpdate(), + partitionInfo.statisticsUpdateMode() != OVERWRITE_ALL, cleanExtraOutputFilesOnCommit), session.getIdentity(), session.getQueryId())); @@ -1989,7 +1992,7 @@ private void prepareInsertExistingPartition(ConnectorIdentity identity, String q partition.getSchemaTableName(), Optional.of(getPartitionName(partition.getDatabaseName(), partition.getTableName(), partition.getValues())), partitionAndMore.statisticsUpdate(), - true)); + partitionAndMore.mergeStatistic())); } private void executeCleanupTasksForAbort(Collection declaredIntentionsToWrite) @@ -2840,6 +2843,7 @@ private record PartitionAndMore( Optional> fileNames, PartitionStatistics statistics, PartitionStatistics statisticsUpdate, + boolean mergeStatistic, boolean cleanExtraOutputFilesOnCommit) { private PartitionAndMore @@ -3362,7 +3366,7 @@ public static void cleanExtraOutputFiles(TrinoFileSystem fileSystem, String quer } } - public record PartitionUpdateInfo(List partitionValues, Location currentLocation, List fileNames, PartitionStatistics statisticsUpdate) + public record PartitionUpdateInfo(List partitionValues, Location currentLocation, List fileNames, PartitionStatistics statisticsUpdate, StatisticsUpdateMode statisticsUpdateMode) { public PartitionUpdateInfo { @@ -3370,6 +3374,7 @@ public record PartitionUpdateInfo(List partitionValues, Location current requireNonNull(currentLocation, "currentLocation is null"); requireNonNull(fileNames, "fileNames is null"); requireNonNull(statisticsUpdate, "statisticsUpdate is null"); + requireNonNull(statisticsUpdateMode, "statisticsUpdateMode is null"); } } } diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/UgiBasedMetastoreClientFactory.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/UgiBasedMetastoreClientFactory.java index 5408300b951d..000230b322c7 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/UgiBasedMetastoreClientFactory.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/UgiBasedMetastoreClientFactory.java @@ -18,8 +18,6 @@ import io.trino.spi.security.ConnectorIdentity; import org.apache.thrift.TException; -import java.io.Closeable; -import java.io.IOException; import java.util.Optional; import static java.util.Objects.requireNonNull; @@ -65,12 +63,9 @@ private static void setMetastoreUserOrClose(ThriftMetastoreClient client, String } catch (Throwable t) { // close client and suppress any error from close - try (Closeable ignored = client) { + try (var _ = client) { throw t; } - catch (IOException e) { - // impossible; will be suppressed - } } } } diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/BaseHiveConnectorTest.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/BaseHiveConnectorTest.java index 5ddfcde36773..6e1191873047 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/BaseHiveConnectorTest.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/BaseHiveConnectorTest.java @@ -794,10 +794,11 @@ public void testSchemaAuthorizationForRole() // make sure role-grants only work on existing roles assertQueryFails(admin, "ALTER SCHEMA test_schema_authorization_role SET AUTHORIZATION ROLE nonexisting_role", ".*?Role 'nonexisting_role' does not exist in catalog 'hive'"); - assertUpdate(admin, "CREATE ROLE authorized_users IN hive"); - assertUpdate(admin, "GRANT authorized_users TO user IN hive"); + String role = "authorized_users" + randomNameSuffix(); + assertUpdate(admin, "CREATE ROLE " + role + " IN hive"); + assertUpdate(admin, "GRANT " + role + " TO user IN hive"); - assertUpdate(admin, "ALTER SCHEMA test_schema_authorization_role SET AUTHORIZATION ROLE authorized_users"); + assertUpdate(admin, "ALTER SCHEMA test_schema_authorization_role SET AUTHORIZATION ROLE " + role); Session user = testSessionBuilder() .setCatalog(getSession().getCatalog()) @@ -825,7 +826,7 @@ public void testSchemaAuthorizationForRole() assertUpdate(user, "DROP TABLE test_schema_authorization_role.test"); assertUpdate(user, "DROP SCHEMA test_schema_authorization_role"); - assertUpdate(admin, "DROP ROLE authorized_users IN hive"); + assertUpdate(admin, "DROP ROLE " + role + " IN hive"); } @Test @@ -908,11 +909,12 @@ public void testCreateSchemaWithAuthorizationForRole() .build()) .build(); - assertUpdate(admin, "CREATE ROLE authorized_users IN hive"); - assertUpdate(admin, "GRANT authorized_users TO user IN hive"); + String role = "authorized_users" + randomNameSuffix(); + assertUpdate(admin, "CREATE ROLE " + role + " IN hive"); + assertUpdate(admin, "GRANT " + role + " TO user IN hive"); assertQueryFails(admin, "CREATE SCHEMA test_createschema_authorization_role AUTHORIZATION ROLE nonexisting_role", ".*?Role 'nonexisting_role' does not exist in catalog 'hive'"); - assertUpdate(admin, "CREATE SCHEMA test_createschema_authorization_role AUTHORIZATION ROLE authorized_users"); + assertUpdate(admin, "CREATE SCHEMA test_createschema_authorization_role AUTHORIZATION ROLE " + role); assertUpdate(user, "CREATE TABLE test_createschema_authorization_role.test (x bigint)"); // "user" without the role enabled cannot create new tables @@ -926,7 +928,7 @@ public void testCreateSchemaWithAuthorizationForRole() assertUpdate(user, "DROP TABLE test_createschema_authorization_role.test"); assertUpdate(user, "DROP SCHEMA test_createschema_authorization_role"); - assertUpdate(admin, "DROP ROLE authorized_users IN hive"); + assertUpdate(admin, "DROP ROLE " + role + " IN hive"); } @Test diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/BaseTestHiveOnDataLake.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/BaseTestHiveOnDataLake.java index c86c8fcfe34e..cd72a458381a 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/BaseTestHiveOnDataLake.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/BaseTestHiveOnDataLake.java @@ -50,6 +50,7 @@ import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.OptionalLong; import java.util.TimeZone; import java.util.stream.Collectors; @@ -281,6 +282,80 @@ public void testSyncPartitionOnBucketRoot() assertUpdate("DROP TABLE " + fullyQualifiedTestTableName); } + @Test + public void testUpdateStatisticInsertOverwritePartitionedTable() + { + String partitionValue = "0"; + Session session = Session.builder(getQueryRunner().getDefaultSession()) + .setCatalogSessionProperty("hive", "insert_existing_partitions_behavior", "OVERWRITE") + .setCatalogSessionProperty("hive", "collect_column_statistics_on_write", "true") + .build(); + String tableName = "test_statistic" + randomNameSuffix(); + String testTable = getFullyQualifiedTestTableName(tableName); + computeActual(getCreateTableStatement( + testTable, + "partitioned_by=ARRAY['regionkey']")); + copyTpchNationToTable(session, testTable); + Table hiveTable = metastoreClient.getTable(HIVE_TEST_SCHEMA, tableName).orElseThrow(); + Partition partition = metastoreClient.getPartition(hiveTable, List.of(partitionValue)).orElseThrow(); + Map> partitionStatistics = metastoreClient.getPartitionColumnStatistics( + HIVE_TEST_SCHEMA, + tableName, + ImmutableSet.of("regionkey=0"), + partition.getColumns().stream().map(Column::getName).collect(toSet())); + assertThat(partitionStatistics.get("regionkey=0").get("nationkey").getIntegerStatistics().isPresent()).isTrue(); + assertThat(partitionStatistics.get("regionkey=0").get("nationkey").getIntegerStatistics().get().getMin()).isEqualTo(OptionalLong.of(0)); + assertThat(partitionStatistics.get("regionkey=0").get("nationkey").getIntegerStatistics().get().getMax()).isEqualTo(OptionalLong.of(16)); + + assertUpdate(session, "INSERT INTO " + testTable + "(name, comment, nationkey, regionkey) values ('name1', 'comment1', 20, 0)", 1); + + partitionStatistics = metastoreClient.getPartitionColumnStatistics( + HIVE_TEST_SCHEMA, + tableName, + ImmutableSet.of("regionkey=0"), + partition.getColumns().stream().map(Column::getName).collect(toSet())); + assertThat(partitionStatistics.get("regionkey=0").get("nationkey").getIntegerStatistics().isPresent()).isTrue(); + assertThat(partitionStatistics.get("regionkey=0").get("nationkey").getIntegerStatistics().get().getMin()).isEqualTo(OptionalLong.of(20)); + assertThat(partitionStatistics.get("regionkey=0").get("nationkey").getIntegerStatistics().get().getMax()).isEqualTo(OptionalLong.of(20)); + } + + @Test + public void testUpdateStatisticInsertAppendPartitionedTable() + { + String partitionValue = "0"; + Session session = Session.builder(getQueryRunner().getDefaultSession()) + .setCatalogSessionProperty("hive", "insert_existing_partitions_behavior", "APPEND") + .setCatalogSessionProperty("hive", "collect_column_statistics_on_write", "true") + .build(); + String tableName = "test_statistic" + randomNameSuffix(); + String testTable = getFullyQualifiedTestTableName(tableName); + computeActual(session, getCreateTableStatement( + testTable, + "partitioned_by=ARRAY['regionkey']")); + copyTpchNationToTable(session, testTable); + Table hiveTable = metastoreClient.getTable(HIVE_TEST_SCHEMA, tableName).orElseThrow(); + Partition partition = metastoreClient.getPartition(hiveTable, List.of(partitionValue)).orElseThrow(); + Map> partitionStatistics = metastoreClient.getPartitionColumnStatistics( + HIVE_TEST_SCHEMA, + tableName, + ImmutableSet.of("regionkey=0"), + partition.getColumns().stream().map(Column::getName).collect(toSet())); + assertThat(partitionStatistics.get("regionkey=0").get("nationkey").getIntegerStatistics().isPresent()).isTrue(); + assertThat(partitionStatistics.get("regionkey=0").get("nationkey").getIntegerStatistics().get().getMin()).isEqualTo(OptionalLong.of(0)); + assertThat(partitionStatistics.get("regionkey=0").get("nationkey").getIntegerStatistics().get().getMax()).isEqualTo(OptionalLong.of(16)); + + computeActual(session, "INSERT INTO " + testTable + "(name, comment, nationkey, regionkey) values ('name1', 'comment1', 20, 0)"); + + partitionStatistics = metastoreClient.getPartitionColumnStatistics( + HIVE_TEST_SCHEMA, + tableName, + ImmutableSet.of("regionkey=0"), + partition.getColumns().stream().map(Column::getName).collect(toSet())); + assertThat(partitionStatistics.get("regionkey=0").get("nationkey").getIntegerStatistics().isPresent()).isTrue(); + assertThat(partitionStatistics.get("regionkey=0").get("nationkey").getIntegerStatistics().get().getMin()).isEqualTo(OptionalLong.of(0)); + assertThat(partitionStatistics.get("regionkey=0").get("nationkey").getIntegerStatistics().get().getMax()).isEqualTo(OptionalLong.of(20)); + } + @Test public void testSyncPartitionCaseSensitivePathVariation() { @@ -2470,9 +2545,13 @@ protected String getCreateTableStatement(String tableName, List properti tableName); } - protected void copyTpchNationToTable(String testTable) + protected void copyTpchNationToTable(String testTable) { + copyTpchNationToTable(getSession(), testTable); + } + + protected void copyTpchNationToTable(Session session, String testTable) { - computeActual(format("INSERT INTO " + testTable + " SELECT name, comment, nationkey, regionkey FROM tpch.tiny.nation")); + computeActual(session, format("INSERT INTO " + testTable + " SELECT name, comment, nationkey, regionkey FROM tpch.tiny.nation")); } private void testWriteWithFileSize(String testTable, int scaleFactorInThousands, long fileSizeRangeStart, long fileSizeRangeEnd) diff --git a/plugin/trino-http-event-listener/src/test/java/io/trino/plugin/httpquery/TestHttpEventListener.java b/plugin/trino-http-event-listener/src/test/java/io/trino/plugin/httpquery/TestHttpEventListener.java index 16d09edf1b5a..e7adae58a2fc 100644 --- a/plugin/trino-http-event-listener/src/test/java/io/trino/plugin/httpquery/TestHttpEventListener.java +++ b/plugin/trino-http-event-listener/src/test/java/io/trino/plugin/httpquery/TestHttpEventListener.java @@ -194,6 +194,7 @@ final class TestHttpEventListener queryContext, queryIOMetadata, Optional.empty(), + Optional.empty(), Collections.emptyList(), Instant.now(), Instant.now(), diff --git a/plugin/trino-http-server-event-listener/src/test/java/io/trino/plugin/httpquery/TestHttpServerEventListener.java b/plugin/trino-http-server-event-listener/src/test/java/io/trino/plugin/httpquery/TestHttpServerEventListener.java index 3d3d22abbb98..30027506d6b3 100644 --- a/plugin/trino-http-server-event-listener/src/test/java/io/trino/plugin/httpquery/TestHttpServerEventListener.java +++ b/plugin/trino-http-server-event-listener/src/test/java/io/trino/plugin/httpquery/TestHttpServerEventListener.java @@ -177,6 +177,7 @@ final class TestHttpServerEventListener queryContext, queryIOMetadata, Optional.empty(), + Optional.empty(), Collections.emptyList(), Instant.now(), Instant.now(), @@ -188,6 +189,7 @@ final class TestHttpServerEventListener queryContext, queryIOMetadata, Optional.empty(), + Optional.empty(), Collections.emptyList(), Instant.now(), Instant.now(), diff --git a/plugin/trino-iceberg/pom.xml b/plugin/trino-iceberg/pom.xml index 6c13eaef0a05..26269bd5aced 100644 --- a/plugin/trino-iceberg/pom.xml +++ b/plugin/trino-iceberg/pom.xml @@ -16,7 +16,7 @@ - 0.105.6 + 0.105.7 diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java index 390bc22ffe10..298128734ff8 100644 --- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java +++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java @@ -1522,7 +1522,7 @@ public Optional finishInsert( commitUpdate(appendFiles, session, "insert"); if (isS3Tables(icebergTable.location())) { - log.debug("S3 Tables does not support statistics: %s", table.name()); + log.debug("S3 Tables do not support statistics: %s", table.name()); } else if (!computedStatistics.isEmpty()) { long newSnapshotId = icebergTable.currentSnapshot().snapshotId(); @@ -2038,14 +2038,20 @@ private void executeDropExtendedStats(ConnectorSession session, IcebergTableExec { checkArgument(executeHandle.procedureHandle() instanceof IcebergDropExtendedStatsHandle, "Unexpected procedure handle %s", executeHandle.procedureHandle()); - Table icebergTable = catalog.loadTable(session, executeHandle.schemaTableName()); - beginTransaction(icebergTable); - UpdateStatistics updateStatistics = transaction.updateStatistics(); - for (StatisticsFile statisticsFile : icebergTable.statisticsFiles()) { - updateStatistics.removeStatistics(statisticsFile.snapshotId()); + try { + Table icebergTable = catalog.loadTable(session, executeHandle.schemaTableName()); + beginTransaction(icebergTable); + UpdateStatistics updateStatistics = transaction.updateStatistics(); + for (StatisticsFile statisticsFile : icebergTable.statisticsFiles()) { + updateStatistics.removeStatistics(statisticsFile.snapshotId()); + } + updateStatistics.commit(); + commitTransaction(transaction, "drop extended stats"); } - updateStatistics.commit(); - commitTransaction(transaction, "drop extended stats"); + catch (NotFoundException e) { + throw new TrinoException(ICEBERG_INVALID_METADATA, e); + } + transaction = null; } @@ -2054,8 +2060,13 @@ private void executeRollbackToSnapshot(ConnectorSession session, IcebergTableExe checkArgument(executeHandle.procedureHandle() instanceof IcebergRollbackToSnapshotHandle, "Unexpected procedure handle %s", executeHandle.procedureHandle()); long snapshotId = ((IcebergRollbackToSnapshotHandle) executeHandle.procedureHandle()).snapshotId(); - Table icebergTable = catalog.loadTable(session, executeHandle.schemaTableName()); - icebergTable.manageSnapshots().setCurrentSnapshot(snapshotId).commit(); + try { + Table icebergTable = catalog.loadTable(session, executeHandle.schemaTableName()); + icebergTable.manageSnapshots().setCurrentSnapshot(snapshotId).commit(); + } + catch (NotFoundException e) { + throw new TrinoException(ICEBERG_INVALID_METADATA, e); + } } private void executeExpireSnapshots(ConnectorSession session, IcebergTableExecuteHandle executeHandle) @@ -2074,10 +2085,15 @@ private void executeExpireSnapshots(ConnectorSession session, IcebergTableExecut IcebergSessionProperties.EXPIRE_SNAPSHOTS_MIN_RETENTION); // ForwardingFileIo handles bulk operations so no separate function implementation is needed - table.expireSnapshots() - .expireOlderThan(session.getStart().toEpochMilli() - retention.toMillis()) - .planWith(icebergScanExecutor) - .commit(); + try { + table.expireSnapshots() + .expireOlderThan(session.getStart().toEpochMilli() - retention.toMillis()) + .planWith(icebergScanExecutor) + .commit(); + } + catch (NotFoundException e) { + throw new TrinoException(ICEBERG_INVALID_METADATA, e); + } } private static void validateTableExecuteParameters( @@ -2963,7 +2979,7 @@ public ConnectorTableHandle beginStatisticsCollection(ConnectorSession session, IcebergTableHandle handle = (IcebergTableHandle) tableHandle; Table icebergTable = catalog.loadTable(session, handle.getSchemaTableName()); if (isS3Tables(icebergTable.location())) { - throw new TrinoException(NOT_SUPPORTED, "S3 Tables does not support analyze"); + throw new TrinoException(NOT_SUPPORTED, "S3 Tables do not support analyze"); } beginTransaction(icebergTable); return handle; diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/rest/OAuth2SecurityConfig.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/rest/OAuth2SecurityConfig.java index ccb5675d0e66..66da457f7904 100644 --- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/rest/OAuth2SecurityConfig.java +++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/rest/OAuth2SecurityConfig.java @@ -29,6 +29,7 @@ public class OAuth2SecurityConfig private String token; private URI serverUri; private boolean tokenRefreshEnabled = OAuth2Properties.TOKEN_REFRESH_ENABLED_DEFAULT; + private boolean tokenExchangeEnabled = OAuth2Properties.TOKEN_EXCHANGE_ENABLED_DEFAULT; public Optional getCredential() { @@ -97,6 +98,19 @@ public OAuth2SecurityConfig setTokenRefreshEnabled(boolean tokenRefreshEnabled) return this; } + public boolean isTokenExchangeEnabled() + { + return tokenExchangeEnabled; + } + + @Config("iceberg.rest-catalog.oauth2.token-exchange-enabled") + @ConfigDescription("Controls whether to use the token exchange flow to acquire new tokens") + public OAuth2SecurityConfig setTokenExchangeEnabled(boolean tokenExchangeEnabled) + { + this.tokenExchangeEnabled = tokenExchangeEnabled; + return this; + } + @AssertTrue(message = "OAuth2 requires a credential or token") public boolean credentialOrTokenPresent() { diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/rest/OAuth2SecurityProperties.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/rest/OAuth2SecurityProperties.java index 5a568306e8af..5e2c9de49267 100644 --- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/rest/OAuth2SecurityProperties.java +++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/rest/OAuth2SecurityProperties.java @@ -45,6 +45,7 @@ public OAuth2SecurityProperties(OAuth2SecurityConfig securityConfig) securityConfig.getServerUri().ifPresent( value -> propertiesBuilder.put(OAuth2Properties.OAUTH2_SERVER_URI, value.toString())); propertiesBuilder.put(OAuth2Properties.TOKEN_REFRESH_ENABLED, String.valueOf(securityConfig.isTokenRefreshEnabled())); + propertiesBuilder.put(OAuth2Properties.TOKEN_EXCHANGE_ENABLED, String.valueOf(securityConfig.isTokenExchangeEnabled())); this.securityProperties = propertiesBuilder.buildOrThrow(); } diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java index b1c373b253bb..d4222d8343c9 100644 --- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java +++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java @@ -3321,6 +3321,10 @@ else if (format == AVRO) { // year() assertThat(query("SELECT * FROM test_year_transform_timestamptz WHERE year(d) = 2015")) .isFullyPushedDown(); + assertThat(query("SELECT * FROM test_year_transform_timestamptz WHERE year(d) IS DISTINCT FROM 2015")) + .isNotFullyPushedDown(FilterNode.class); + assertThat(query("SELECT * FROM test_year_transform_timestamptz WHERE year(d) IS NOT DISTINCT FROM 2015")) + .isFullyPushedDown(); // date_trunc assertThat(query("SELECT * FROM test_year_transform_timestamptz WHERE date_trunc('year', d) = TIMESTAMP '2015-01-01 00:00:00.000000 UTC'")) diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/rest/TestOAuth2SecurityConfig.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/rest/TestOAuth2SecurityConfig.java index 6152afeb7249..5ad16dc3999d 100644 --- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/rest/TestOAuth2SecurityConfig.java +++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/rest/TestOAuth2SecurityConfig.java @@ -35,7 +35,8 @@ public void testDefaults() .setToken(null) .setScope(null) .setServerUri(null) - .setTokenRefreshEnabled(OAuth2Properties.TOKEN_REFRESH_ENABLED_DEFAULT)); + .setTokenRefreshEnabled(OAuth2Properties.TOKEN_REFRESH_ENABLED_DEFAULT) + .setTokenExchangeEnabled(OAuth2Properties.TOKEN_EXCHANGE_ENABLED_DEFAULT)); } @Test @@ -47,6 +48,7 @@ public void testExplicitPropertyMappings() .put("iceberg.rest-catalog.oauth2.scope", "scope") .put("iceberg.rest-catalog.oauth2.server-uri", "http://localhost:8080/realms/iceberg/protocol/openid-connect/token") .put("iceberg.rest-catalog.oauth2.token-refresh-enabled", "false") + .put("iceberg.rest-catalog.oauth2.token-exchange-enabled", "false") .buildOrThrow(); OAuth2SecurityConfig expected = new OAuth2SecurityConfig() @@ -54,7 +56,8 @@ public void testExplicitPropertyMappings() .setToken("token") .setScope("scope") .setServerUri(URI.create("http://localhost:8080/realms/iceberg/protocol/openid-connect/token")) - .setTokenRefreshEnabled(false); + .setTokenRefreshEnabled(false) + .setTokenExchangeEnabled(false); assertThat(expected.credentialOrTokenPresent()).isTrue(); assertThat(expected.scopePresentOnlyWithCredential()).isFalse(); assertFullMapping(properties, expected); diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/containers/NessieContainer.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/containers/NessieContainer.java index 9def923963ac..1e18138749f5 100644 --- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/containers/NessieContainer.java +++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/containers/NessieContainer.java @@ -28,7 +28,7 @@ public class NessieContainer { private static final Logger log = Logger.get(NessieContainer.class); - public static final String DEFAULT_IMAGE = "ghcr.io/projectnessie/nessie:0.105.6"; + public static final String DEFAULT_IMAGE = "ghcr.io/projectnessie/nessie:0.105.7"; public static final String DEFAULT_HOST_NAME = "nessie"; public static final String VERSION_STORE_TYPE = "IN_MEMORY"; diff --git a/plugin/trino-iceberg/src/test/java/org/apache/iceberg/rest/DelegatingRestSessionCatalog.java b/plugin/trino-iceberg/src/test/java/org/apache/iceberg/rest/DelegatingRestSessionCatalog.java index d2955084e08b..0d0bdfa6fb72 100644 --- a/plugin/trino-iceberg/src/test/java/org/apache/iceberg/rest/DelegatingRestSessionCatalog.java +++ b/plugin/trino-iceberg/src/test/java/org/apache/iceberg/rest/DelegatingRestSessionCatalog.java @@ -15,6 +15,7 @@ import io.airlift.http.server.HttpServerConfig; import io.airlift.http.server.HttpServerInfo; +import io.airlift.http.server.ServerFeature; import io.airlift.http.server.testing.TestingHttpServer; import io.airlift.node.NodeInfo; import org.apache.iceberg.catalog.Catalog; @@ -66,7 +67,10 @@ public TestingHttpServer testServer() HttpServerInfo httpServerInfo = new HttpServerInfo(config, nodeInfo); RestCatalogServlet servlet = new RestCatalogServlet(adapter); - return new TestingHttpServer(httpServerInfo, nodeInfo, config, servlet, false, true, false); + return new TestingHttpServer(httpServerInfo, nodeInfo, config, servlet, ServerFeature.builder() + // Required due to URIs like: HEAD /v1/namespaces/level_1%1Flevel_2 + .withLegacyUriCompliance(true) + .build()); } public static Builder builder() diff --git a/plugin/trino-kafka-event-listener/src/test/java/io/trino/plugin/eventlistener/kafka/TestUtils.java b/plugin/trino-kafka-event-listener/src/test/java/io/trino/plugin/eventlistener/kafka/TestUtils.java index c3aaf9bf6636..d1440a7f8727 100644 --- a/plugin/trino-kafka-event-listener/src/test/java/io/trino/plugin/eventlistener/kafka/TestUtils.java +++ b/plugin/trino-kafka-event-listener/src/test/java/io/trino/plugin/eventlistener/kafka/TestUtils.java @@ -168,6 +168,7 @@ private TestUtils() {} queryStatistics, queryContext, queryIOMetadata, + Optional.empty(), queryFailureInfo, List.of(new TrinoWarning(new WarningCode(101, "TestCode"), "Test error message")), Instant.now(), diff --git a/plugin/trino-memory/src/main/java/io/trino/plugin/memory/MemoryMetadata.java b/plugin/trino-memory/src/main/java/io/trino/plugin/memory/MemoryMetadata.java index 70e6bb8c652f..bdb93d8721df 100644 --- a/plugin/trino-memory/src/main/java/io/trino/plugin/memory/MemoryMetadata.java +++ b/plugin/trino-memory/src/main/java/io/trino/plugin/memory/MemoryMetadata.java @@ -165,32 +165,40 @@ public synchronized void renameSchema(ConnectorSession session, String source, S } schemas.add(target); - for (Map.Entry table : tableIds.entrySet()) { + Map newTableIds = new HashMap<>(); + for (Iterator> iterator = tableIds.entrySet().iterator(); iterator.hasNext(); ) { + Map.Entry table = iterator.next(); if (table.getKey().getSchemaName().equals(source)) { - tableIds.remove(table.getKey()); - tableIds.put(new SchemaTableName(target, table.getKey().getTableName()), table.getValue()); + iterator.remove(); + newTableIds.put(new SchemaTableName(target, table.getKey().getTableName()), table.getValue()); } } + tableIds.putAll(newTableIds); - for (TableInfo table : tables.values()) { - if (table.schemaName().equals(source)) { - tables.put(table.id(), new TableInfo(table.id(), target, table.tableName(), table.columns(), false, table.dataFragments(), table.comment())); - } - } + tables.replaceAll((tableId, table) -> + table.schemaName().equals(source) + ? new TableInfo(tableId, target, table.tableName(), table.columns(), table.truncated(), table.dataFragments(), table.comment()) + : table); - for (Map.Entry view : views.entrySet()) { + Map newViews = new HashMap<>(); + for (Iterator> iterator = views.entrySet().iterator(); iterator.hasNext(); ) { + Map.Entry view = iterator.next(); if (view.getKey().getSchemaName().equals(source)) { - views.remove(view.getKey()); - views.put(new SchemaTableName(target, view.getKey().getTableName()), view.getValue()); + iterator.remove(); + newViews.put(new SchemaTableName(target, view.getKey().getTableName()), view.getValue()); } } + views.putAll(newViews); - for (Map.Entry> function : functions.entrySet()) { + Map> newFunctions = new HashMap<>(); + for (Iterator>> iterator = functions.entrySet().iterator(); iterator.hasNext(); ) { + Map.Entry> function = iterator.next(); if (function.getKey().getSchemaName().equals(source)) { - functions.remove(function.getKey()); - functions.put(new SchemaFunctionName(target, function.getKey().getFunctionName()), function.getValue()); + iterator.remove(); + newFunctions.put(new SchemaFunctionName(target, function.getKey().getFunctionName()), function.getValue()); } } + functions.putAll(newFunctions); } @GuardedBy("this") diff --git a/plugin/trino-memory/src/test/java/io/trino/plugin/memory/TestMemoryMetadata.java b/plugin/trino-memory/src/test/java/io/trino/plugin/memory/TestMemoryMetadata.java index 0b6dc3b1d624..69142ec1a88f 100644 --- a/plugin/trino-memory/src/test/java/io/trino/plugin/memory/TestMemoryMetadata.java +++ b/plugin/trino-memory/src/test/java/io/trino/plugin/memory/TestMemoryMetadata.java @@ -32,7 +32,10 @@ import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.Set; +import java.util.stream.IntStream; +import static com.google.common.collect.ImmutableSet.toImmutableSet; import static io.trino.spi.StandardErrorCode.ALREADY_EXISTS; import static io.trino.spi.StandardErrorCode.NOT_FOUND; import static io.trino.spi.connector.RetryMode.NO_RETRIES; @@ -344,6 +347,33 @@ public void testRenameTable() assertThat(metadata.listTables(SESSION, Optional.of("test_different_schema"))).isEqualTo(ImmutableList.of(differentSchemaTableName)); } + @Test + public void testRenameSchema() + { + Set tableNames = IntStream.range(1, 10) + .mapToObj(idx -> new SchemaTableName("test_schema_to_be_renamed", "test_table_" + idx)) + .collect(toImmutableSet()); + MemoryMetadata metadata = createMetadata(); + metadata.createSchema(SESSION, "test_schema_to_be_renamed", ImmutableMap.of(), new TrinoPrincipal(USER, SESSION.getUser())); + tableNames.forEach(tableName -> { + ConnectorOutputTableHandle table = metadata.beginCreateTable( + SESSION, + new ConnectorTableMetadata(tableName, ImmutableList.of(), ImmutableMap.of()), + Optional.empty(), + NO_RETRIES, + false); + metadata.finishCreateTable(SESSION, table, ImmutableList.of(), ImmutableList.of()); + }); + + // rename schema + Set renamedTableNames = tableNames.stream() + .map(tableName -> new SchemaTableName("test_schema", tableName.getTableName())) + .collect(toImmutableSet()); + metadata.renameSchema(SESSION, "test_schema_to_be_renamed", "test_schema"); + assertThat(metadata.listTables(SESSION, Optional.of("test_schema_to_be_renamed"))).isEmpty(); + assertThat(metadata.listTables(SESSION, Optional.of("test_schema"))).containsAll(renamedTableNames); + } + private static void assertNoTables(MemoryMetadata metadata) { assertThat(metadata.listTables(SESSION, Optional.empty())) diff --git a/plugin/trino-mysql-event-listener/src/test/java/io/trino/plugin/eventlistener/mysql/TestMysqlEventListener.java b/plugin/trino-mysql-event-listener/src/test/java/io/trino/plugin/eventlistener/mysql/TestMysqlEventListener.java index 5529017456db..14cd76435cdd 100644 --- a/plugin/trino-mysql-event-listener/src/test/java/io/trino/plugin/eventlistener/mysql/TestMysqlEventListener.java +++ b/plugin/trino-mysql-event-listener/src/test/java/io/trino/plugin/eventlistener/mysql/TestMysqlEventListener.java @@ -232,6 +232,7 @@ final class TestMysqlEventListener FULL_QUERY_STATISTICS, FULL_QUERY_CONTEXT, FULL_QUERY_IO_METADATA, + Optional.empty(), Optional.of(FULL_FAILURE_INFO), List.of(new TrinoWarning( StandardWarningCode.TOO_MANY_STAGES, @@ -348,6 +349,7 @@ final class TestMysqlEventListener MINIMAL_QUERY_CONTEXT, MINIMAL_QUERY_IO_METADATA, Optional.empty(), + Optional.empty(), List.of(), Instant.now(), Instant.now(), diff --git a/plugin/trino-openlineage/pom.xml b/plugin/trino-openlineage/pom.xml index ade4f992b089..fe738939e9d5 100644 --- a/plugin/trino-openlineage/pom.xml +++ b/plugin/trino-openlineage/pom.xml @@ -63,7 +63,7 @@ io.openlineage openlineage-java - 1.39.0 + 1.40.0 diff --git a/plugin/trino-openlineage/src/test/java/io/trino/plugin/openlineage/TestOpenLineageEventsFromQueries.java b/plugin/trino-openlineage/src/test/java/io/trino/plugin/openlineage/TestOpenLineageEventsFromQueries.java index 61f778f700e4..fd0e618c0483 100644 --- a/plugin/trino-openlineage/src/test/java/io/trino/plugin/openlineage/TestOpenLineageEventsFromQueries.java +++ b/plugin/trino-openlineage/src/test/java/io/trino/plugin/openlineage/TestOpenLineageEventsFromQueries.java @@ -88,10 +88,10 @@ public void assertCreateTableAsSelectFromTable(String queryId, String query, Str completedEvent.getInputs().getFirst(), "tpch.tiny.nation", ImmutableList.of( - openLineage.newSchemaDatasetFacetFields("nationkey", null, null, null), - openLineage.newSchemaDatasetFacetFields("regionkey", null, null, null), - openLineage.newSchemaDatasetFacetFields("name", null, null, null), - openLineage.newSchemaDatasetFacetFields("comment", null, null, null))); + openLineage.newSchemaDatasetFacetFields("nationkey", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("regionkey", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("name", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("comment", null, null, null, null))); assertThat(completedEvent.getOutputs()) .hasSize(1); @@ -130,10 +130,10 @@ public void assertCreateTableAsSelectFromTable(String queryId, String query, Str openLineage.newInputField(OPEN_LINEAGE_NAMESPACE, "tpch.tiny.nation", "name", null), openLineage.newInputField(OPEN_LINEAGE_NAMESPACE, "tpch.tiny.nation", "comment", null)); List expectedSchemaFields = List.of( - openLineage.newSchemaDatasetFacetFields("nationkey", "bigint", null, null), - openLineage.newSchemaDatasetFacetFields("name", "varchar(25)", null, null), - openLineage.newSchemaDatasetFacetFields("regionkey", "bigint", null, null), - openLineage.newSchemaDatasetFacetFields("comment", "varchar(152)", null, null)); + openLineage.newSchemaDatasetFacetFields("nationkey", "bigint", null, null, null), + openLineage.newSchemaDatasetFacetFields("name", "varchar(25)", null, null, null), + openLineage.newSchemaDatasetFacetFields("regionkey", "bigint", null, null, null), + openLineage.newSchemaDatasetFacetFields("comment", "varchar(152)", null, null, null)); assertCompletedEventOutput( completedEvent.getOutputs().getFirst(), fullTableName, @@ -185,10 +185,10 @@ public void assertCreateTableAsSelectFromView( completeCreateTableEvent.getInputs().getFirst(), format("marquez.default.%s", viewName), ImmutableList.of( - openLineage.newSchemaDatasetFacetFields("nationkey", null, null, null), - openLineage.newSchemaDatasetFacetFields("regionkey", null, null, null), - openLineage.newSchemaDatasetFacetFields("name", null, null, null), - openLineage.newSchemaDatasetFacetFields("comment", null, null, null))); + openLineage.newSchemaDatasetFacetFields("nationkey", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("regionkey", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("name", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("comment", null, null, null, null))); assertThat(completeCreateTableEvent.getOutputs()) .hasSize(1); @@ -227,10 +227,10 @@ public void assertCreateTableAsSelectFromView( openLineage.newInputField(OPEN_LINEAGE_NAMESPACE, "tpch.tiny.nation", "name", null), openLineage.newInputField(OPEN_LINEAGE_NAMESPACE, "tpch.tiny.nation", "comment", null)); List expectedSchemaFields = List.of( - openLineage.newSchemaDatasetFacetFields("nationkey", "bigint", null, null), - openLineage.newSchemaDatasetFacetFields("name", "varchar(25)", null, null), - openLineage.newSchemaDatasetFacetFields("regionkey", "bigint", null, null), - openLineage.newSchemaDatasetFacetFields("comment", "varchar(152)", null, null)); + openLineage.newSchemaDatasetFacetFields("nationkey", "bigint", null, null, null), + openLineage.newSchemaDatasetFacetFields("name", "varchar(25)", null, null, null), + openLineage.newSchemaDatasetFacetFields("regionkey", "bigint", null, null, null), + openLineage.newSchemaDatasetFacetFields("comment", "varchar(152)", null, null, null)); assertCompletedEventOutput( completeCreateTableEvent.getOutputs().getFirst(), fullTableName, @@ -268,21 +268,21 @@ public void assertCreateTableWithJoin(String createTableQueryId, String createTa completedEvent.getInputs().getFirst(), "tpch.tiny.nation", ImmutableList.of( - openLineage.newSchemaDatasetFacetFields("nationkey", null, null, null), - openLineage.newSchemaDatasetFacetFields("name", null, null, null))); + openLineage.newSchemaDatasetFacetFields("nationkey", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("name", null, null, null, null))); assertCompletedEventInput( completedEvent.getInputs().get(1), "tpch.sf1.customer", ImmutableList.of( - openLineage.newSchemaDatasetFacetFields("nationkey", null, null, null), - openLineage.newSchemaDatasetFacetFields("custkey", null, null, null))); + openLineage.newSchemaDatasetFacetFields("nationkey", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("custkey", null, null, null, null))); assertCompletedEventInput( completedEvent.getInputs().get(2), "tpch.tiny.orders", ImmutableList.of( - openLineage.newSchemaDatasetFacetFields("custkey", null, null, null), - openLineage.newSchemaDatasetFacetFields("totalprice", null, null, null), - openLineage.newSchemaDatasetFacetFields("orderdate", null, null, null))); + openLineage.newSchemaDatasetFacetFields("custkey", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("totalprice", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("orderdate", null, null, null, null))); assertThat(completedEvent.getOutputs()) .hasSize(1); @@ -320,10 +320,10 @@ public void assertCreateTableWithJoin(String createTableQueryId, String createTa openLineage.newInputField(OPEN_LINEAGE_NAMESPACE, "tpch.tiny.nation", "nationkey", null), openLineage.newInputField(OPEN_LINEAGE_NAMESPACE, "tpch.tiny.nation", "name", null)); List expectedSchemaFields = ImmutableList.of( - openLineage.newSchemaDatasetFacetFields("nation", "varchar(25)", null, null), - openLineage.newSchemaDatasetFacetFields("order_count", "bigint", null, null), - openLineage.newSchemaDatasetFacetFields("total_revenue", "double", null, null), - openLineage.newSchemaDatasetFacetFields("avg_order_value", "double", null, null)); + openLineage.newSchemaDatasetFacetFields("nation", "varchar(25)", null, null, null), + openLineage.newSchemaDatasetFacetFields("order_count", "bigint", null, null, null), + openLineage.newSchemaDatasetFacetFields("total_revenue", "double", null, null, null), + openLineage.newSchemaDatasetFacetFields("avg_order_value", "double", null, null, null)); assertCompletedEventOutput( completedEvent.getOutputs().getFirst(), "marquez.default.test_create_table_with_join", @@ -359,22 +359,22 @@ public void assertCreateTableWithCTE(String createTableQueryId, String createTab completedEvent.getInputs().getFirst(), "tpcds.tiny.store_sales", ImmutableList.of( - openLineage.newSchemaDatasetFacetFields("ss_sold_date_sk", null, null, null), - openLineage.newSchemaDatasetFacetFields("ss_store_sk", null, null, null), - openLineage.newSchemaDatasetFacetFields("ss_sales_price", null, null, null))); + openLineage.newSchemaDatasetFacetFields("ss_sold_date_sk", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("ss_store_sk", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("ss_sales_price", null, null, null, null))); assertCompletedEventInput( completedEvent.getInputs().get(1), "tpcds.tiny.date_dim", ImmutableList.of( - openLineage.newSchemaDatasetFacetFields("d_moy", null, null, null), - openLineage.newSchemaDatasetFacetFields("d_date_sk", null, null, null), - openLineage.newSchemaDatasetFacetFields("d_year", null, null, null))); + openLineage.newSchemaDatasetFacetFields("d_moy", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("d_date_sk", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("d_year", null, null, null, null))); assertCompletedEventInput( completedEvent.getInputs().get(2), "tpcds.tiny.store", ImmutableList.of( - openLineage.newSchemaDatasetFacetFields("s_store_name", null, null, null), - openLineage.newSchemaDatasetFacetFields("s_store_sk", null, null, null))); + openLineage.newSchemaDatasetFacetFields("s_store_name", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("s_store_sk", null, null, null, null))); assertThat(completedEvent.getOutputs()) .hasSize(1); @@ -448,12 +448,12 @@ public void assertCreateTableWithCTE(String createTableQueryId, String createTab openLineage.newInputField(OPEN_LINEAGE_NAMESPACE, "tpcds.sf0.01.store", "s_store_sk", null), openLineage.newInputField(OPEN_LINEAGE_NAMESPACE, "tpcds.sf0.01.store", "s_store_name", null)); List expectedSchemaFields = ImmutableList.of( - openLineage.newSchemaDatasetFacetFields("d_year", "integer", null, null), - openLineage.newSchemaDatasetFacetFields("d_moy", "integer", null, null), - openLineage.newSchemaDatasetFacetFields("year_month", "varchar", null, null), - openLineage.newSchemaDatasetFacetFields("s_store_name", "varchar(50)", null, null), - openLineage.newSchemaDatasetFacetFields("monthly_total", "decimal(38,2)", null, null), - openLineage.newSchemaDatasetFacetFields("store_rank", "bigint", null, null)); + openLineage.newSchemaDatasetFacetFields("d_year", "integer", null, null, null), + openLineage.newSchemaDatasetFacetFields("d_moy", "integer", null, null, null), + openLineage.newSchemaDatasetFacetFields("year_month", "varchar", null, null, null), + openLineage.newSchemaDatasetFacetFields("s_store_name", "varchar(50)", null, null, null), + openLineage.newSchemaDatasetFacetFields("monthly_total", "decimal(38,2)", null, null, null), + openLineage.newSchemaDatasetFacetFields("store_rank", "bigint", null, null, null)); assertCompletedEventOutput( completedEvent.getOutputs().getFirst(), "marquez.default.monthly_store_rankings", @@ -489,30 +489,30 @@ public void assertCreateTableWithSubquery(String createTableQueryId, String crea completedEvent.getInputs().getFirst(), "tpch.tiny.supplier", ImmutableList.of( - openLineage.newSchemaDatasetFacetFields("nationkey", null, null, null), - openLineage.newSchemaDatasetFacetFields("address", null, null, null), - openLineage.newSchemaDatasetFacetFields("phone", null, null, null), - openLineage.newSchemaDatasetFacetFields("name", null, null, null), - openLineage.newSchemaDatasetFacetFields("suppkey", null, null, null))); + openLineage.newSchemaDatasetFacetFields("nationkey", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("address", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("phone", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("name", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("suppkey", null, null, null, null))); assertCompletedEventInput( completedEvent.getInputs().get(1), "tpch.tiny.nation", ImmutableList.of( - openLineage.newSchemaDatasetFacetFields("nationkey", null, null, null), - openLineage.newSchemaDatasetFacetFields("name", null, null, null))); + openLineage.newSchemaDatasetFacetFields("nationkey", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("name", null, null, null, null))); assertCompletedEventInput( completedEvent.getInputs().get(2), "tpch.tiny.lineitem", ImmutableList.of( - openLineage.newSchemaDatasetFacetFields("orderkey", null, null, null), - openLineage.newSchemaDatasetFacetFields("quantity", null, null, null), - openLineage.newSchemaDatasetFacetFields("suppkey", null, null, null))); + openLineage.newSchemaDatasetFacetFields("orderkey", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("quantity", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("suppkey", null, null, null, null))); assertCompletedEventInput( completedEvent.getInputs().get(3), "tpch.tiny.orders", ImmutableList.of( - openLineage.newSchemaDatasetFacetFields("orderkey", null, null, null), - openLineage.newSchemaDatasetFacetFields("orderdate", null, null, null))); + openLineage.newSchemaDatasetFacetFields("orderkey", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("orderdate", null, null, null, null))); assertThat(completedEvent.getOutputs()) .hasSize(1); @@ -566,11 +566,11 @@ public void assertCreateTableWithSubquery(String createTableQueryId, String crea openLineage.newInputField(OPEN_LINEAGE_NAMESPACE, "tpch.tiny.nation", "nationkey", null), openLineage.newInputField(OPEN_LINEAGE_NAMESPACE, "tpch.tiny.nation", "name", null)); List expectedSchemaFields = ImmutableList.of( - openLineage.newSchemaDatasetFacetFields("suppkey", "bigint", null, null), - openLineage.newSchemaDatasetFacetFields("name", "varchar(25)", null, null), - openLineage.newSchemaDatasetFacetFields("address", "varchar(40)", null, null), - openLineage.newSchemaDatasetFacetFields("phone", "varchar(15)", null, null), - openLineage.newSchemaDatasetFacetFields("nation_name", "varchar(25)", null, null)); + openLineage.newSchemaDatasetFacetFields("suppkey", "bigint", null, null, null), + openLineage.newSchemaDatasetFacetFields("name", "varchar(25)", null, null, null), + openLineage.newSchemaDatasetFacetFields("address", "varchar(40)", null, null, null), + openLineage.newSchemaDatasetFacetFields("phone", "varchar(15)", null, null, null), + openLineage.newSchemaDatasetFacetFields("nation_name", "varchar(25)", null, null, null)); assertCompletedEventOutput( completedEvent.getOutputs().getFirst(), "marquez.default.active_suppliers", @@ -606,20 +606,20 @@ public void assertCreateTableWithUnion(String createTableQueryId, String createT completedEvent.getInputs().getFirst(), "tpch.tiny.orders", ImmutableList.of( - openLineage.newSchemaDatasetFacetFields("totalprice", null, null, null), - openLineage.newSchemaDatasetFacetFields("orderdate", null, null, null))); + openLineage.newSchemaDatasetFacetFields("totalprice", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("orderdate", null, null, null, null))); assertCompletedEventInput( completedEvent.getInputs().get(1), "tpcds.tiny.store_sales", ImmutableList.of( - openLineage.newSchemaDatasetFacetFields("ss_sold_date_sk", null, null, null), - openLineage.newSchemaDatasetFacetFields("ss_sales_price", null, null, null))); + openLineage.newSchemaDatasetFacetFields("ss_sold_date_sk", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("ss_sales_price", null, null, null, null))); assertCompletedEventInput( completedEvent.getInputs().get(2), "tpcds.tiny.date_dim", ImmutableList.of( - openLineage.newSchemaDatasetFacetFields("d_date_sk", null, null, null), - openLineage.newSchemaDatasetFacetFields("d_year", null, null, null))); + openLineage.newSchemaDatasetFacetFields("d_date_sk", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("d_year", null, null, null, null))); assertThat(completedEvent.getOutputs()) .hasSize(1); @@ -667,11 +667,11 @@ public void assertCreateTableWithUnion(String createTableQueryId, String createT openLineage.newInputField(OPEN_LINEAGE_NAMESPACE, "tpcds.sf0.01.date_dim", "d_date_sk", null), openLineage.newInputField(OPEN_LINEAGE_NAMESPACE, "tpcds.sf0.01.date_dim", "d_year", null)); List expectedSchemaFields = ImmutableList.of( - openLineage.newSchemaDatasetFacetFields("dataset", "varchar(6)", null, null), - openLineage.newSchemaDatasetFacetFields("metric_type", "varchar(15)", null, null), - openLineage.newSchemaDatasetFacetFields("record_count", "bigint", null, null), - openLineage.newSchemaDatasetFacetFields("total_value", "double", null, null), - openLineage.newSchemaDatasetFacetFields("avg_value", "double", null, null)); + openLineage.newSchemaDatasetFacetFields("dataset", "varchar(6)", null, null, null), + openLineage.newSchemaDatasetFacetFields("metric_type", "varchar(15)", null, null, null), + openLineage.newSchemaDatasetFacetFields("record_count", "bigint", null, null, null), + openLineage.newSchemaDatasetFacetFields("total_value", "double", null, null, null), + openLineage.newSchemaDatasetFacetFields("avg_value", "double", null, null, null)); assertCompletedEventOutput( completedEvent.getOutputs().getFirst(), fullTableName, @@ -717,10 +717,10 @@ void assertInsertIntoTable( completeCreateTableEvent.getInputs().getFirst(), "tpch.tiny.nation", ImmutableList.of( - openLineage.newSchemaDatasetFacetFields("nationkey", null, null, null), - openLineage.newSchemaDatasetFacetFields("regionkey", null, null, null), - openLineage.newSchemaDatasetFacetFields("name", null, null, null), - openLineage.newSchemaDatasetFacetFields("comment", null, null, null))); + openLineage.newSchemaDatasetFacetFields("nationkey", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("regionkey", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("name", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("comment", null, null, null, null))); assertThat(completeCreateTableEvent.getOutputs()) .hasSize(1); @@ -759,10 +759,10 @@ void assertInsertIntoTable( openLineage.newInputField(OPEN_LINEAGE_NAMESPACE, "tpch.tiny.nation", "name", null), openLineage.newInputField(OPEN_LINEAGE_NAMESPACE, "tpch.tiny.nation", "comment", null)); List expectedCreateTableSchemaFields = List.of( - openLineage.newSchemaDatasetFacetFields("nationkey", "bigint", null, null), - openLineage.newSchemaDatasetFacetFields("name", "varchar(25)", null, null), - openLineage.newSchemaDatasetFacetFields("regionkey", "bigint", null, null), - openLineage.newSchemaDatasetFacetFields("comment", "varchar(152)", null, null)); + openLineage.newSchemaDatasetFacetFields("nationkey", "bigint", null, null, null), + openLineage.newSchemaDatasetFacetFields("name", "varchar(25)", null, null, null), + openLineage.newSchemaDatasetFacetFields("regionkey", "bigint", null, null, null), + openLineage.newSchemaDatasetFacetFields("comment", "varchar(152)", null, null, null)); assertCompletedEventOutput( completeCreateTableEvent.getOutputs().getFirst(), fullTableName, @@ -781,10 +781,10 @@ void assertInsertIntoTable( completeInsertIntoTableEvent.getInputs().getFirst(), "tpch.tiny.nation", ImmutableList.of( - openLineage.newSchemaDatasetFacetFields("nationkey", null, null, null), - openLineage.newSchemaDatasetFacetFields("regionkey", null, null, null), - openLineage.newSchemaDatasetFacetFields("name", null, null, null), - openLineage.newSchemaDatasetFacetFields("comment", null, null, null))); + openLineage.newSchemaDatasetFacetFields("nationkey", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("regionkey", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("name", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("comment", null, null, null, null))); assertThat(completeInsertIntoTableEvent.getOutputs()) .hasSize(1); @@ -823,10 +823,10 @@ void assertInsertIntoTable( openLineage.newInputField(OPEN_LINEAGE_NAMESPACE, "tpch.tiny.nation", "name", null), openLineage.newInputField(OPEN_LINEAGE_NAMESPACE, "tpch.tiny.nation", "comment", null)); List expectedInsertIntoTableSchemaFields = List.of( - openLineage.newSchemaDatasetFacetFields("nationkey", "bigint", null, null), - openLineage.newSchemaDatasetFacetFields("name", "varchar(25)", null, null), - openLineage.newSchemaDatasetFacetFields("regionkey", "bigint", null, null), - openLineage.newSchemaDatasetFacetFields("comment", "varchar(152)", null, null)); + openLineage.newSchemaDatasetFacetFields("nationkey", "bigint", null, null, null), + openLineage.newSchemaDatasetFacetFields("name", "varchar(25)", null, null, null), + openLineage.newSchemaDatasetFacetFields("regionkey", "bigint", null, null, null), + openLineage.newSchemaDatasetFacetFields("comment", "varchar(152)", null, null, null)); assertCompletedEventOutput( completeInsertIntoTableEvent.getOutputs().getFirst(), fullTableName, @@ -882,10 +882,10 @@ void assertDeleteFromTable( completeCreateTableEvent.getInputs().getFirst(), "tpch.tiny.customer", ImmutableList.of( - openLineage.newSchemaDatasetFacetFields("mktsegment", null, null, null), - openLineage.newSchemaDatasetFacetFields("nationkey", null, null, null), - openLineage.newSchemaDatasetFacetFields("custkey", null, null, null), - openLineage.newSchemaDatasetFacetFields("name", null, null, null))); + openLineage.newSchemaDatasetFacetFields("mktsegment", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("nationkey", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("custkey", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("name", null, null, null, null))); assertThat(completeCreateTableEvent.getOutputs()) .hasSize(1); @@ -924,10 +924,10 @@ void assertDeleteFromTable( openLineage.newInputField(OPEN_LINEAGE_NAMESPACE, "tpch.tiny.customer", "nationkey", null), openLineage.newInputField(OPEN_LINEAGE_NAMESPACE, "tpch.tiny.customer", "name", null)); List expectedCreateTableSchemaFields = List.of( - openLineage.newSchemaDatasetFacetFields("custkey", "bigint", null, null), - openLineage.newSchemaDatasetFacetFields("name", "varchar(25)", null, null), - openLineage.newSchemaDatasetFacetFields("mktsegment", "varchar(10)", null, null), - openLineage.newSchemaDatasetFacetFields("nationkey", "bigint", null, null)); + openLineage.newSchemaDatasetFacetFields("custkey", "bigint", null, null, null), + openLineage.newSchemaDatasetFacetFields("name", "varchar(25)", null, null, null), + openLineage.newSchemaDatasetFacetFields("mktsegment", "varchar(10)", null, null, null), + openLineage.newSchemaDatasetFacetFields("nationkey", "bigint", null, null, null)); assertCompletedEventOutput( completeCreateTableEvent.getOutputs().getFirst(), fullTableName, @@ -946,8 +946,8 @@ void assertDeleteFromTable( completeDeleteFromTableEvent.getInputs().getFirst(), "tpch.tiny.customer", ImmutableList.of( - openLineage.newSchemaDatasetFacetFields("custkey", null, null, null), - openLineage.newSchemaDatasetFacetFields("acctbal", null, null, null))); + openLineage.newSchemaDatasetFacetFields("custkey", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("acctbal", null, null, null, null))); assertThat(completeDeleteFromTableEvent.getOutputs()) .hasSize(1); @@ -1011,10 +1011,10 @@ void assertMergeIntoTable( completeCreateTableEvent.getInputs().getFirst(), "tpch.tiny.customer", ImmutableList.of( - openLineage.newSchemaDatasetFacetFields("mktsegment", null, null, null), - openLineage.newSchemaDatasetFacetFields("nationkey", null, null, null), - openLineage.newSchemaDatasetFacetFields("custkey", null, null, null), - openLineage.newSchemaDatasetFacetFields("name", null, null, null))); + openLineage.newSchemaDatasetFacetFields("mktsegment", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("nationkey", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("custkey", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("name", null, null, null, null))); assertThat(completeCreateTableEvent.getOutputs()) .hasSize(1); @@ -1053,10 +1053,10 @@ void assertMergeIntoTable( openLineage.newInputField(OPEN_LINEAGE_NAMESPACE, "tpch.tiny.customer", "nationkey", null), openLineage.newInputField(OPEN_LINEAGE_NAMESPACE, "tpch.tiny.customer", "name", null)); List expectedCreateTableSchemaFields = List.of( - openLineage.newSchemaDatasetFacetFields("custkey", "bigint", null, null), - openLineage.newSchemaDatasetFacetFields("name", "varchar(25)", null, null), - openLineage.newSchemaDatasetFacetFields("mktsegment", "varchar(10)", null, null), - openLineage.newSchemaDatasetFacetFields("nationkey", "bigint", null, null)); + openLineage.newSchemaDatasetFacetFields("custkey", "bigint", null, null, null), + openLineage.newSchemaDatasetFacetFields("name", "varchar(25)", null, null, null), + openLineage.newSchemaDatasetFacetFields("mktsegment", "varchar(10)", null, null, null), + openLineage.newSchemaDatasetFacetFields("nationkey", "bigint", null, null, null)); assertCompletedEventOutput( completeCreateTableEvent.getOutputs().getFirst(), fullTableName, @@ -1075,10 +1075,10 @@ void assertMergeIntoTable( completeMergeIntoTableEvent.getInputs().getFirst(), "tpch.tiny.customer", ImmutableList.of( - openLineage.newSchemaDatasetFacetFields("mktsegment", null, null, null), - openLineage.newSchemaDatasetFacetFields("nationkey", null, null, null), - openLineage.newSchemaDatasetFacetFields("custkey", null, null, null), - openLineage.newSchemaDatasetFacetFields("name", null, null, null))); + openLineage.newSchemaDatasetFacetFields("mktsegment", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("nationkey", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("custkey", null, null, null, null), + openLineage.newSchemaDatasetFacetFields("name", null, null, null, null))); assertThat(completeMergeIntoTableEvent.getOutputs()) .hasSize(1); @@ -1096,7 +1096,7 @@ void assertMergeIntoTable( openLineage.newInputField(OPEN_LINEAGE_NAMESPACE, "tpch.tiny.customer", "nationkey", null), openLineage.newInputField(OPEN_LINEAGE_NAMESPACE, "tpch.tiny.customer", "name", null)); List expectedMergeIntoTableSchemaFields = List.of( - openLineage.newSchemaDatasetFacetFields("name", "varchar(25)", null, null)); + openLineage.newSchemaDatasetFacetFields("name", "varchar(25)", null, null, null)); assertCompletedEventOutput( completeMergeIntoTableEvent.getOutputs().getFirst(), fullTableName, diff --git a/plugin/trino-openlineage/src/test/java/io/trino/plugin/openlineage/TrinoEventData.java b/plugin/trino-openlineage/src/test/java/io/trino/plugin/openlineage/TrinoEventData.java index 9a9948e75dd7..2429b8e17bd8 100644 --- a/plugin/trino-openlineage/src/test/java/io/trino/plugin/openlineage/TrinoEventData.java +++ b/plugin/trino-openlineage/src/test/java/io/trino/plugin/openlineage/TrinoEventData.java @@ -148,6 +148,7 @@ private TrinoEventData() queryContext, queryIOMetadata, Optional.empty(), + Optional.empty(), Collections.emptyList(), Instant.parse("2025-04-28T11:23:55.384424Z"), Instant.parse("2025-04-28T11:24:16.256207Z"), diff --git a/plugin/trino-password-authenticators/src/main/java/io/trino/plugin/password/file/FileAuthenticatorFactory.java b/plugin/trino-password-authenticators/src/main/java/io/trino/plugin/password/file/FileAuthenticatorFactory.java index 630eb6077514..a5368eef0eaf 100644 --- a/plugin/trino-password-authenticators/src/main/java/io/trino/plugin/password/file/FileAuthenticatorFactory.java +++ b/plugin/trino-password-authenticators/src/main/java/io/trino/plugin/password/file/FileAuthenticatorFactory.java @@ -36,6 +36,7 @@ public String getName() public PasswordAuthenticator create(Map config) { Bootstrap app = new Bootstrap( + "io.trino.bootstrap.auth." + getName(), binder -> { configBinder(binder).bindConfig(FileConfig.class); binder.bind(FileAuthenticator.class).in(Scopes.SINGLETON); diff --git a/plugin/trino-password-authenticators/src/main/java/io/trino/plugin/password/file/FileGroupProviderFactory.java b/plugin/trino-password-authenticators/src/main/java/io/trino/plugin/password/file/FileGroupProviderFactory.java index dc8efb2e55f3..4e34509f89a0 100644 --- a/plugin/trino-password-authenticators/src/main/java/io/trino/plugin/password/file/FileGroupProviderFactory.java +++ b/plugin/trino-password-authenticators/src/main/java/io/trino/plugin/password/file/FileGroupProviderFactory.java @@ -36,6 +36,7 @@ public String getName() public GroupProvider create(Map config) { Bootstrap app = new Bootstrap( + "io.trino.bootstrap.groups." + getName(), binder -> { configBinder(binder).bindConfig(FileGroupConfig.class); binder.bind(FileGroupProvider.class).in(Scopes.SINGLETON); diff --git a/plugin/trino-password-authenticators/src/main/java/io/trino/plugin/password/ldap/LdapAuthenticatorFactory.java b/plugin/trino-password-authenticators/src/main/java/io/trino/plugin/password/ldap/LdapAuthenticatorFactory.java index a298713b1ad5..2db9c6338910 100644 --- a/plugin/trino-password-authenticators/src/main/java/io/trino/plugin/password/ldap/LdapAuthenticatorFactory.java +++ b/plugin/trino-password-authenticators/src/main/java/io/trino/plugin/password/ldap/LdapAuthenticatorFactory.java @@ -37,6 +37,7 @@ public String getName() public PasswordAuthenticator create(Map config) { Bootstrap app = new Bootstrap( + "io.trino.bootstrap.auth." + getName(), new LdapClientModule(), binder -> { configBinder(binder).bindConfig(LdapAuthenticatorConfig.class); diff --git a/plugin/trino-password-authenticators/src/main/java/io/trino/plugin/password/salesforce/SalesforceAuthenticatorFactory.java b/plugin/trino-password-authenticators/src/main/java/io/trino/plugin/password/salesforce/SalesforceAuthenticatorFactory.java index 46460de74f5d..5088802eab3e 100644 --- a/plugin/trino-password-authenticators/src/main/java/io/trino/plugin/password/salesforce/SalesforceAuthenticatorFactory.java +++ b/plugin/trino-password-authenticators/src/main/java/io/trino/plugin/password/salesforce/SalesforceAuthenticatorFactory.java @@ -37,6 +37,7 @@ public String getName() public PasswordAuthenticator create(Map config) { Bootstrap app = new Bootstrap( + "io.trino.bootstrap.auth." + getName(), binder -> { configBinder(binder).bindConfig(SalesforceConfig.class); binder.bind(SalesforceBasicAuthenticator.class).in(Scopes.SINGLETON); diff --git a/plugin/trino-pinot/pom.xml b/plugin/trino-pinot/pom.xml index 4ecc5c3b2e29..4d564d7cbe47 100644 --- a/plugin/trino-pinot/pom.xml +++ b/plugin/trino-pinot/pom.xml @@ -26,7 +26,7 @@ net.openhft chronicle-core - 2.27ea8 + 2.27ea9 net.openhft diff --git a/plugin/trino-ranger/pom.xml b/plugin/trino-ranger/pom.xml index f938343174e2..2d8d9c846098 100644 --- a/plugin/trino-ranger/pom.xml +++ b/plugin/trino-ranger/pom.xml @@ -126,7 +126,7 @@ org.apache.hadoop.thirdparty hadoop-shaded-guava - 1.4.0 + 1.5.0 runtime @@ -324,6 +324,29 @@ + + org.basepom.maven + duplicate-finder-maven-plugin + + + + + + org.jspecify + jspecify + + + org.apache.hadoop.thirdparty + hadoop-shaded-guava + + + + org.jspecify.annotations + + + + + diff --git a/plugin/trino-redshift/src/main/java/io/trino/plugin/redshift/RedshiftClient.java b/plugin/trino-redshift/src/main/java/io/trino/plugin/redshift/RedshiftClient.java index a1e0dd0bb802..ddc3ec052d12 100644 --- a/plugin/trino-redshift/src/main/java/io/trino/plugin/redshift/RedshiftClient.java +++ b/plugin/trino-redshift/src/main/java/io/trino/plugin/redshift/RedshiftClient.java @@ -677,6 +677,10 @@ public Optional toColumnMapping(ConnectorSession session, Connect throw new TrinoException(REDSHIFT_INVALID_TYPE, "column size not present"); } int length = type.requiredColumnSize(); + if (length == -1) { + // CHARACTER VARYING returns -1. Treat the type as varchar(0) for the empty string. + length = 0; + } return Optional.of(varcharColumnMapping( length < VarcharType.MAX_LENGTH ? createVarcharType(length) diff --git a/plugin/trino-redshift/src/test/java/io/trino/plugin/redshift/TestingRedshiftServer.java b/plugin/trino-redshift/src/test/java/io/trino/plugin/redshift/TestingRedshiftServer.java index a1b4579b0432..53176f548301 100644 --- a/plugin/trino-redshift/src/test/java/io/trino/plugin/redshift/TestingRedshiftServer.java +++ b/plugin/trino-redshift/src/test/java/io/trino/plugin/redshift/TestingRedshiftServer.java @@ -39,7 +39,7 @@ private TestingRedshiftServer() {} public static void executeInRedshiftWithRetry(String sql) { Failsafe.with(RetryPolicy.builder() - .handleIf(e -> e.getMessage().matches(".* concurrent transaction .*")) + .handleIf(TestingRedshiftServer::isExceptionRecoverable) .withDelay(Duration.ofSeconds(10)) .withMaxRetries(3) .build()) @@ -62,4 +62,13 @@ public static T executeWithRedshift(HandleCallback create(Map config, ResourceGroupConfigurationManagerContext context) { Bootstrap app = new Bootstrap( + "io.trino.bootstrap.resource-group." + getName(), new JsonModule(), new FileResourceGroupsModule(), binder -> binder.bind(ClusterMemoryPoolManager.class).toInstance(context.getMemoryPoolManager())); diff --git a/plugin/trino-resource-group-managers/src/main/java/io/trino/plugin/resourcegroups/db/DbResourceGroupConfigurationManagerFactory.java b/plugin/trino-resource-group-managers/src/main/java/io/trino/plugin/resourcegroups/db/DbResourceGroupConfigurationManagerFactory.java index 7d2c0625f017..8eb01821392d 100644 --- a/plugin/trino-resource-group-managers/src/main/java/io/trino/plugin/resourcegroups/db/DbResourceGroupConfigurationManagerFactory.java +++ b/plugin/trino-resource-group-managers/src/main/java/io/trino/plugin/resourcegroups/db/DbResourceGroupConfigurationManagerFactory.java @@ -43,6 +43,7 @@ public ResourceGroupConfigurationManager create(Map config, R { FlywayMigration.migrate(new ConfigurationFactory(replaceEnvironmentVariables(config)).build(DbResourceGroupConfig.class)); Bootstrap app = new Bootstrap( + "io.trino.bootstrap.resource-group." + getName(), new MBeanModule(), new MBeanServerModule(), new JsonModule(), diff --git a/plugin/trino-spooling-filesystem/src/main/java/io/trino/spooling/filesystem/PartitionedFileSystemLayout.java b/plugin/trino-spooling-filesystem/src/main/java/io/trino/spooling/filesystem/PartitionedFileSystemLayout.java index c2bcef59c015..4a28436bcec3 100644 --- a/plugin/trino-spooling-filesystem/src/main/java/io/trino/spooling/filesystem/PartitionedFileSystemLayout.java +++ b/plugin/trino-spooling-filesystem/src/main/java/io/trino/spooling/filesystem/PartitionedFileSystemLayout.java @@ -17,6 +17,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.hash.HashCode; import com.google.common.hash.Hashing; +import com.google.inject.Inject; import io.trino.filesystem.Location; import java.util.List; @@ -28,6 +29,7 @@ public class PartitionedFileSystemLayout { private final int partitions; + @Inject public PartitionedFileSystemLayout(PartitionedLayoutConfig layoutConfig) { this.partitions = requireNonNull(layoutConfig, "layoutConfig is null").getPartitions(); diff --git a/plugin/trino-sqlserver/src/test/java/io/trino/plugin/sqlserver/TestSqlServerConnectorTest.java b/plugin/trino-sqlserver/src/test/java/io/trino/plugin/sqlserver/TestSqlServerConnectorTest.java index a116e59f19bb..e6f3ac63b669 100644 --- a/plugin/trino-sqlserver/src/test/java/io/trino/plugin/sqlserver/TestSqlServerConnectorTest.java +++ b/plugin/trino-sqlserver/src/test/java/io/trino/plugin/sqlserver/TestSqlServerConnectorTest.java @@ -292,4 +292,12 @@ public void testSelectInformationSchemaColumns() // Isolate this test to avoid problem described in https://github.com/trinodb/trino/issues/10846 executeExclusively(super::testSelectInformationSchemaColumns); } + + @Test + @Override + public void testBulkColumnListingOptions() + { + // Isolate this test to avoid problem described in https://github.com/trinodb/trino/issues/10846 + executeExclusively(super::testBulkColumnListingOptions); + } } diff --git a/plugin/trino-teradata/README.md b/plugin/trino-teradata/README.md new file mode 100644 index 000000000000..ee45e8f3b908 --- /dev/null +++ b/plugin/trino-teradata/README.md @@ -0,0 +1,42 @@ +# Teradata Connector Developer Notes + +The Teradata connector module has both unit tests and integration tests. +The integration tests require access to a [Teradata ClearScape Analytics™ Experience](https://clearscape.teradata.com/sign-in). +You can follow the steps below to run the integration tests locally. + +## Prerequisites + +#### 1. Create a new ClearScape Analytics™ Experience account + +If you don't already have one, sign up at: + +[Teradata ClearScape Analytics™ Experience](https://www.teradata.com/getting-started/demos/clearscape-analytics) + +#### 2. Login + +Sign in with your new account at: + +[ClearScape Analytics™ Experience Login](https://clearscape.teradata.com/sign-in) + +#### 3. Collect the API Token + +Use the **Copy API Token** button in the UI to retrieve your token. + +#### 4. Define the following environment variables + +⚠️ **Note:** The Teradata database password must be **at least 8 characters long**. + +``` +export CLEARSCAPE_TOKEN= +export CLEARSCAPE_PASSWORD= +``` + +## Running Integration Tests + +Once the environment variables are set, run the integration tests with: + +⚠️ **Note:** Run the following command from the Trino parent directory. + + ``` + ./mvnw clean install -pl :trino-teradata +``` diff --git a/plugin/trino-teradata/pom.xml b/plugin/trino-teradata/pom.xml new file mode 100644 index 000000000000..5594c56a130c --- /dev/null +++ b/plugin/trino-teradata/pom.xml @@ -0,0 +1,326 @@ + + + 4.0.0 + + io.trino + trino-root + 479-SNAPSHOT + ../../pom.xml + + + trino-teradata + trino-plugin + ${project.artifactId} + Trino - Teradata connector + + + true + true + + + + + com.google.inject + guice + classes + + + + io.airlift + configuration + + + + io.trino + trino-base-jdbc + + + + io.trino + trino-plugin-toolkit + + + + jakarta.validation + jakarta.validation-api + + + + com.fasterxml.jackson.core + jackson-annotations + provided + + + + io.airlift + slice + provided + + + + io.opentelemetry + opentelemetry-api + provided + + + + io.opentelemetry + opentelemetry-api-incubator + provided + + + + io.opentelemetry + opentelemetry-context + provided + + + + io.trino + trino-spi + provided + + + + org.openjdk.jol + jol-core + provided + + + + com.teradata.jdbc + terajdbc + 20.00.00.49 + runtime + + + + com.fasterxml.jackson.core + jackson-core + test + + + + com.fasterxml.jackson.core + jackson-databind + test + + + + com.google.errorprone + error_prone_annotations + test + true + + + + com.google.guava + guava + test + + + + io.airlift + concurrent + test + + + + io.airlift + configuration-testing + test + + + + io.airlift + json + test + + + + io.airlift + junit-extensions + test + + + + io.airlift + log + test + + + + io.airlift + log-manager + test + + + + io.airlift + testing + test + + + + io.airlift + tracing + test + + + + io.airlift + units + test + + + + io.trino + trino-base-jdbc + test-jar + test + + + + io.trino + trino-exchange-filesystem + test + + + + io.trino + trino-exchange-filesystem + test-jar + test + + + + io.trino + trino-jmx + test + + + + io.trino + trino-main + test + + + + io.trino + trino-main + test-jar + test + + + + io.trino + trino-parser + test + + + + io.trino + trino-plugin-toolkit + test-jar + test + + + + io.trino + trino-testing + test + + + + io.trino + trino-testing-containers + test + + + + io.trino + trino-testing-services + test + + + + io.trino + trino-tpch + test + + + + io.trino.tpch + tpch + test + + + + org.assertj + assertj-core + test + + + + org.jetbrains + annotations + test + + + + org.junit.jupiter + junit-jupiter-api + test + + + + org.junit.jupiter + junit-jupiter-engine + test + + + + + + default + + true + + + + + org.apache.maven.plugins + maven-surefire-plugin + + + **/TestTeradataConnectorTest.java + **/TestTeradataTypeMapping.java + + + + + + + + + clearscape-tests + + false + + + + + org.apache.maven.plugins + maven-surefire-plugin + + + **/TestTeradataConnectorTest.java + **/TestTeradataTypeMapping.java + + + + + + + + diff --git a/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataClient.java b/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataClient.java new file mode 100644 index 000000000000..8d5bacc685fd --- /dev/null +++ b/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataClient.java @@ -0,0 +1,382 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.teradata; + +import com.google.inject.Inject; +import io.trino.plugin.base.mapping.IdentifierMapping; +import io.trino.plugin.jdbc.BaseJdbcClient; +import io.trino.plugin.jdbc.BaseJdbcConfig; +import io.trino.plugin.jdbc.CaseSensitivity; +import io.trino.plugin.jdbc.ColumnMapping; +import io.trino.plugin.jdbc.ConnectionFactory; +import io.trino.plugin.jdbc.JdbcColumnHandle; +import io.trino.plugin.jdbc.JdbcOutputTableHandle; +import io.trino.plugin.jdbc.JdbcTableHandle; +import io.trino.plugin.jdbc.JdbcTypeHandle; +import io.trino.plugin.jdbc.PredicatePushdownController; +import io.trino.plugin.jdbc.QueryBuilder; +import io.trino.plugin.jdbc.RemoteTableName; +import io.trino.plugin.jdbc.WriteMapping; +import io.trino.plugin.jdbc.logging.RemoteQueryModifier; +import io.trino.spi.TrinoException; +import io.trino.spi.connector.ColumnMetadata; +import io.trino.spi.connector.ColumnPosition; +import io.trino.spi.connector.ConnectorSession; +import io.trino.spi.connector.SchemaTableName; +import io.trino.spi.type.CharType; +import io.trino.spi.type.DecimalType; +import io.trino.spi.type.Decimals; +import io.trino.spi.type.Type; +import io.trino.spi.type.VarcharType; +import org.weakref.jmx.$internal.guava.collect.ImmutableMap; + +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.PreparedStatement; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Types; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.OptionalLong; + +import static io.trino.plugin.jdbc.CaseSensitivity.CASE_INSENSITIVE; +import static io.trino.plugin.jdbc.CaseSensitivity.CASE_SENSITIVE; +import static io.trino.plugin.jdbc.JdbcErrorCode.JDBC_ERROR; +import static io.trino.plugin.jdbc.PredicatePushdownController.CASE_INSENSITIVE_CHARACTER_PUSHDOWN; +import static io.trino.plugin.jdbc.PredicatePushdownController.FULL_PUSHDOWN; +import static io.trino.plugin.jdbc.StandardColumnMappings.bigintColumnMapping; +import static io.trino.plugin.jdbc.StandardColumnMappings.bigintWriteFunction; +import static io.trino.plugin.jdbc.StandardColumnMappings.charReadFunction; +import static io.trino.plugin.jdbc.StandardColumnMappings.charWriteFunction; +import static io.trino.plugin.jdbc.StandardColumnMappings.dateColumnMappingUsingLocalDate; +import static io.trino.plugin.jdbc.StandardColumnMappings.dateWriteFunctionUsingLocalDate; +import static io.trino.plugin.jdbc.StandardColumnMappings.decimalColumnMapping; +import static io.trino.plugin.jdbc.StandardColumnMappings.doubleColumnMapping; +import static io.trino.plugin.jdbc.StandardColumnMappings.doubleWriteFunction; +import static io.trino.plugin.jdbc.StandardColumnMappings.integerColumnMapping; +import static io.trino.plugin.jdbc.StandardColumnMappings.integerWriteFunction; +import static io.trino.plugin.jdbc.StandardColumnMappings.longDecimalWriteFunction; +import static io.trino.plugin.jdbc.StandardColumnMappings.realWriteFunction; +import static io.trino.plugin.jdbc.StandardColumnMappings.shortDecimalWriteFunction; +import static io.trino.plugin.jdbc.StandardColumnMappings.smallintColumnMapping; +import static io.trino.plugin.jdbc.StandardColumnMappings.smallintWriteFunction; +import static io.trino.plugin.jdbc.StandardColumnMappings.tinyintColumnMapping; +import static io.trino.plugin.jdbc.StandardColumnMappings.tinyintWriteFunction; +import static io.trino.plugin.jdbc.StandardColumnMappings.varcharReadFunction; +import static io.trino.plugin.jdbc.StandardColumnMappings.varcharWriteFunction; +import static io.trino.plugin.jdbc.TypeHandlingJdbcSessionProperties.getUnsupportedTypeHandling; +import static io.trino.plugin.jdbc.UnsupportedTypeHandling.CONVERT_TO_VARCHAR; +import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED; +import static io.trino.spi.type.BigintType.BIGINT; +import static io.trino.spi.type.CharType.createCharType; +import static io.trino.spi.type.DateType.DATE; +import static io.trino.spi.type.DecimalType.createDecimalType; +import static io.trino.spi.type.DoubleType.DOUBLE; +import static io.trino.spi.type.IntegerType.INTEGER; +import static io.trino.spi.type.RealType.REAL; +import static io.trino.spi.type.SmallintType.SMALLINT; +import static io.trino.spi.type.TinyintType.TINYINT; +import static io.trino.spi.type.VarcharType.createUnboundedVarcharType; +import static io.trino.spi.type.VarcharType.createVarcharType; +import static java.lang.String.format; + +public class TeradataClient + extends BaseJdbcClient +{ + private static final PredicatePushdownController TERADATA_STRING_PUSHDOWN = FULL_PUSHDOWN; + private final TeradataConfig.TeradataCaseSensitivity teradataJDBCCaseSensitivity; + + @Inject + public TeradataClient( + BaseJdbcConfig config, + TeradataConfig teradataConfig, + ConnectionFactory connectionFactory, + QueryBuilder queryBuilder, + IdentifierMapping identifierMapping, + RemoteQueryModifier remoteQueryModifier) + { + super("\"", connectionFactory, queryBuilder, config.getJdbcTypesMappedToVarchar(), identifierMapping, remoteQueryModifier, true); + this.teradataJDBCCaseSensitivity = teradataConfig.getTeradataCaseSensitivity(); + } + + private static ColumnMapping charColumnMapping(int charLength, boolean isCaseSensitive) + { + if (charLength > CharType.MAX_LENGTH) { + return varcharColumnMapping(charLength, isCaseSensitive); + } + CharType charType = createCharType(charLength); + return ColumnMapping.sliceMapping( + charType, + charReadFunction(charType), + charWriteFunction(), + isCaseSensitive ? TERADATA_STRING_PUSHDOWN : CASE_INSENSITIVE_CHARACTER_PUSHDOWN); + } + + private static ColumnMapping varcharColumnMapping(int varcharLength, boolean isCaseSensitive) + { + VarcharType varcharType = varcharLength <= VarcharType.MAX_LENGTH + ? createVarcharType(varcharLength) + : createUnboundedVarcharType(); + return ColumnMapping.sliceMapping( + varcharType, + varcharReadFunction(varcharType), + varcharWriteFunction(), + isCaseSensitive ? TERADATA_STRING_PUSHDOWN : CASE_INSENSITIVE_CHARACTER_PUSHDOWN); + } + + private boolean deriveCaseSensitivity(CaseSensitivity caseSensitivity) + { + return switch (teradataJDBCCaseSensitivity) { + case CASE_INSENSITIVE -> false; + case CASE_SENSITIVE -> true; + default -> caseSensitivity != null; + }; + } + + @Override + protected void createSchema(ConnectorSession session, Connection connection, String remoteSchemaName) + { + execute(session, format( + "CREATE DATABASE %s AS PERMANENT = 60000000, SPOOL = 120000000", + quoted(remoteSchemaName))); + } + + @Override + protected void copyTableSchema(ConnectorSession session, Connection connection, String catalogName, String schemaName, String tableName, String newTableName, + List columnNames) + { + String tableCopyFormat = "CREATE TABLE %s AS ( SELECT * FROM %s ) WITH DATA"; + String sql = format( + tableCopyFormat, + quoted(catalogName, schemaName, newTableName), + quoted(catalogName, schemaName, tableName)); + try { + execute(session, connection, sql); + } + catch (SQLException e) { + throw new TrinoException(JDBC_ERROR, e); + } + } + + @Override + protected void verifySchemaName(DatabaseMetaData databaseMetadata, String schemaName) + throws SQLException + { + int schemaNameLimit = databaseMetadata.getMaxSchemaNameLength(); + if (schemaName.length() > schemaNameLimit) { + throw new TrinoException(NOT_SUPPORTED, format("Schema name must be shorter than or equal to '%s' characters but got '%s'", schemaNameLimit, schemaName.length())); + } + } + + @Override + protected void verifyTableName(DatabaseMetaData databaseMetadata, String tableName) + throws SQLException + { + if (tableName.length() > databaseMetadata.getMaxTableNameLength()) { + throw new TrinoException(NOT_SUPPORTED, format("Table name must be shorter than or equal to '%s' characters but got '%s'", databaseMetadata.getMaxTableNameLength(), + tableName.length())); + } + } + + @Override + protected void verifyColumnName(DatabaseMetaData databaseMetadata, String columnName) + throws SQLException + { + if (columnName.length() > databaseMetadata.getMaxColumnNameLength()) { + throw new TrinoException(NOT_SUPPORTED, format("Column name must be shorter than or equal to '%s' characters but got '%s': '%s'", + databaseMetadata.getMaxColumnNameLength(), columnName.length(), columnName)); + } + } + + @Override + protected void dropSchema(ConnectorSession session, Connection connection, String remoteSchemaName, boolean cascade) + throws SQLException + { + if (cascade) { + throw new TrinoException(NOT_SUPPORTED, "This connector does not support dropping schemas with CASCADE option"); + } + String dropSchema = "DROP DATABASE " + quoted(remoteSchemaName); + execute(session, connection, dropSchema); + } + + @Override + public void renameSchema(ConnectorSession session, String schemaName, String newSchemaName) + { + throw new TrinoException(NOT_SUPPORTED, "This connector does not support renaming schema"); + } + + @Override + public OptionalLong delete(ConnectorSession session, JdbcTableHandle handle) + { + throw new TrinoException(NOT_SUPPORTED, "This connector does not support modifying table rows"); + } + + @Override + public void truncateTable(ConnectorSession session, JdbcTableHandle handle) + { + throw new TrinoException(NOT_SUPPORTED, "This connector does not support truncating tables"); + } + + @Override + public void dropColumn(ConnectorSession session, JdbcTableHandle handle, JdbcColumnHandle column) + { + throw new TrinoException(NOT_SUPPORTED, "This connector does not support dropping columns"); + } + + @Override + public void renameColumn(ConnectorSession session, JdbcTableHandle handle, JdbcColumnHandle jdbcColumn, String newColumnName) + { + throw new TrinoException(NOT_SUPPORTED, "This connector does not support renaming columns"); + } + + @Override + public void renameTable(ConnectorSession session, JdbcTableHandle handle, SchemaTableName newTableName) + { + throw new TrinoException(NOT_SUPPORTED, "This connector does not support renaming tables"); + } + + @Override + public JdbcOutputTableHandle beginInsertTable(ConnectorSession session, JdbcTableHandle tableHandle, List columns) + { + throw new TrinoException(NOT_SUPPORTED, "This connector does not support inserts"); + } + + @Override + public void setColumnType(ConnectorSession session, JdbcTableHandle handle, JdbcColumnHandle column, Type type) + { + throw new TrinoException(NOT_SUPPORTED, "This connector does not support setting column types"); + } + + @Override + public void addColumn(ConnectorSession session, JdbcTableHandle handle, ColumnMetadata column, ColumnPosition position) + { + throw new TrinoException(NOT_SUPPORTED, "This connector does not support add column operations"); + } + + @Override + public void dropNotNullConstraint(ConnectorSession session, JdbcTableHandle handle, JdbcColumnHandle column) + { + throw new TrinoException(NOT_SUPPORTED, "This connector does not support dropping a not null constraint"); + } + + @Override + protected Map getCaseSensitivityForColumns(ConnectorSession session, Connection connection, SchemaTableName schemaTableName, + RemoteTableName remoteTableName) + { + // try to use result set metadata from select * from table to populate the mapping + try { + HashMap caseMap = new HashMap<>(); + String sql = format("select * from %s.%s where 0=1", schemaTableName.getSchemaName(), schemaTableName.getTableName()); + PreparedStatement pstmt = connection.prepareStatement(sql); + ResultSetMetaData rsmd = pstmt.getMetaData(); + int columnCount = rsmd.getColumnCount(); + for (int i = 1; i <= columnCount; i++) { + caseMap.put(rsmd.getColumnName(i), rsmd.isCaseSensitive(i) ? CASE_SENSITIVE : CASE_INSENSITIVE); + } + pstmt.close(); + return caseMap; + } + catch (SQLException e) { + // behavior of base jdbc + return ImmutableMap.of(); + } + } + + @Override + public Optional toColumnMapping(ConnectorSession session, Connection connection, JdbcTypeHandle typeHandle) + { + // this method should ultimately encompass all the expected teradata data types + Optional mapping = getForcedMappingToVarchar(typeHandle); + if (mapping.isPresent()) { + return mapping; + } + + switch (typeHandle.jdbcType()) { + case Types.TINYINT: + return Optional.of(tinyintColumnMapping()); + case Types.SMALLINT: + return Optional.of(smallintColumnMapping()); + case Types.INTEGER: + return Optional.of(integerColumnMapping()); + case Types.BIGINT: + return Optional.of(bigintColumnMapping()); + case Types.REAL: + case Types.DOUBLE: + case Types.FLOAT: + // teradata float is 64 bit + // trino double is 64 bit + // teradata float / real / double precision all map to jdbc type float + return Optional.of(doubleColumnMapping()); + case Types.NUMERIC: + case Types.DECIMAL: + return numberMapping(typeHandle); + case Types.CHAR: + return Optional.of(charColumnMapping(typeHandle.requiredColumnSize(), deriveCaseSensitivity(typeHandle.caseSensitivity().orElse(null)))); + case Types.VARCHAR: + // see prior note on trino case sensitivity + return Optional.of(varcharColumnMapping(typeHandle.requiredColumnSize(), deriveCaseSensitivity(typeHandle.caseSensitivity().orElse(null)))); + case Types.DATE: + return Optional.of(dateColumnMappingUsingLocalDate()); + } + + if (getUnsupportedTypeHandling(session) == CONVERT_TO_VARCHAR) { + return mapToUnboundedVarchar(typeHandle); + } + + return Optional.empty(); + } + + private Optional numberMapping(JdbcTypeHandle typeHandle) + { + int precision = typeHandle.requiredColumnSize(); + int scale = typeHandle.requiredDecimalDigits(); + if (precision > Decimals.MAX_PRECISION) { + // this will trigger for number(*) as precision is 40 + return Optional.of(decimalColumnMapping(createDecimalType(Decimals.MAX_PRECISION, scale))); + } + return Optional.of(decimalColumnMapping(createDecimalType(precision, scale))); + } + + @Override + public WriteMapping toWriteMapping(ConnectorSession session, Type type) + { + return switch (type) { + case Type typeInstance when typeInstance == TINYINT -> WriteMapping.longMapping("smallint", tinyintWriteFunction()); + case Type typeInstance when typeInstance == SMALLINT -> WriteMapping.longMapping("smallint", smallintWriteFunction()); + case Type typeInstance when typeInstance == INTEGER -> WriteMapping.longMapping("integer", integerWriteFunction()); + case Type typeInstance when typeInstance == BIGINT -> WriteMapping.longMapping("bigint", bigintWriteFunction()); + case Type typeInstance when typeInstance == REAL -> WriteMapping.longMapping("FLOAT", realWriteFunction()); + case Type typeInstance when typeInstance == DOUBLE -> WriteMapping.doubleMapping("double precision", doubleWriteFunction()); + case Type typeInstance when typeInstance == DATE -> WriteMapping.longMapping("date", dateWriteFunctionUsingLocalDate()); + case DecimalType decimalTypeInstance -> { + String dataType = String.format("decimal(%s, %s)", decimalTypeInstance.getPrecision(), decimalTypeInstance.getScale()); + if (decimalTypeInstance.isShort()) { + yield WriteMapping.longMapping(dataType, shortDecimalWriteFunction(decimalTypeInstance)); + } + yield WriteMapping.objectMapping(dataType, longDecimalWriteFunction(decimalTypeInstance)); + } + case CharType charTypeInstance -> WriteMapping.sliceMapping("char(" + charTypeInstance.getLength() + ")", charWriteFunction()); + case VarcharType varcharTypeInstance -> { + String dataType = varcharTypeInstance.isUnbounded() + ? "clob" + : "varchar(" + varcharTypeInstance.getBoundedLength() + ")"; + yield WriteMapping.sliceMapping(dataType, varcharWriteFunction()); + } + default -> throw new TrinoException(NOT_SUPPORTED, "Unsupported column type: " + type.getDisplayName()); + }; + } +} diff --git a/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataClientModule.java b/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataClientModule.java new file mode 100644 index 000000000000..081f9807b59b --- /dev/null +++ b/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataClientModule.java @@ -0,0 +1,63 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.teradata; + +import com.google.inject.Binder; +import com.google.inject.Provides; +import com.google.inject.Scopes; +import com.google.inject.Singleton; +import io.airlift.configuration.AbstractConfigurationAwareModule; +import io.opentelemetry.api.OpenTelemetry; +import io.trino.plugin.jdbc.BaseJdbcConfig; +import io.trino.plugin.jdbc.ConnectionFactory; +import io.trino.plugin.jdbc.DriverConnectionFactory; +import io.trino.plugin.jdbc.ForBaseJdbc; +import io.trino.plugin.jdbc.JdbcClient; +import io.trino.plugin.jdbc.JdbcJoinPushdownSupportModule; +import io.trino.plugin.jdbc.JdbcStatisticsConfig; +import io.trino.plugin.jdbc.credential.CredentialProvider; + +import java.sql.Driver; +import java.sql.DriverManager; +import java.sql.SQLException; +import java.util.Properties; + +import static io.airlift.configuration.ConfigBinder.configBinder; + +public class TeradataClientModule + extends AbstractConfigurationAwareModule +{ + @Provides + @Singleton + @ForBaseJdbc + public static ConnectionFactory getConnectionFactory(BaseJdbcConfig config, CredentialProvider credentialProvider, OpenTelemetry openTelemetry) + throws SQLException + { + Driver driver = DriverManager.getDriver(config.getConnectionUrl()); + Properties connectionProperties = new Properties(); + connectionProperties.setProperty("LOGMECH", "TD2"); + return DriverConnectionFactory.builder(driver, config.getConnectionUrl(), credentialProvider) + .setConnectionProperties(connectionProperties) + .setOpenTelemetry(openTelemetry).build(); + } + + @Override + public void setup(Binder binder) + { + configBinder(binder).bindConfig(TeradataConfig.class); + binder.bind(JdbcClient.class).annotatedWith(ForBaseJdbc.class).to(TeradataClient.class).in(Scopes.SINGLETON); + configBinder(binder).bindConfig(JdbcStatisticsConfig.class); + install(new JdbcJoinPushdownSupportModule()); + } +} diff --git a/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataConfig.java b/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataConfig.java new file mode 100644 index 000000000000..e0450da835ab --- /dev/null +++ b/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataConfig.java @@ -0,0 +1,42 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.teradata; + +import io.airlift.configuration.Config; +import io.airlift.configuration.ConfigDescription; +import jakarta.validation.constraints.NotNull; + +public class TeradataConfig +{ + private TeradataCaseSensitivity teradataCaseSensitivity = TeradataCaseSensitivity.CASE_SENSITIVE; + + @NotNull + public TeradataCaseSensitivity getTeradataCaseSensitivity() + { + return teradataCaseSensitivity; + } + + @Config("teradata.case-sensitivity") + @ConfigDescription("How char/varchar columns' case sensitivity will be exposed to Trino (default: CASE_SENSITIVE). Possible values: CASE_INSENSITIVE, CASE_SENSITIVE, AS_DEFINED.") + public TeradataConfig setTeradataCaseSensitivity(TeradataCaseSensitivity teradataCaseSensitivity) + { + this.teradataCaseSensitivity = teradataCaseSensitivity; + return this; + } + + public enum TeradataCaseSensitivity + { + CASE_INSENSITIVE, CASE_SENSITIVE, AS_DEFINED + } +} diff --git a/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataPlugin.java b/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataPlugin.java new file mode 100644 index 000000000000..d11110edfbed --- /dev/null +++ b/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataPlugin.java @@ -0,0 +1,25 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.teradata; + +import io.trino.plugin.jdbc.JdbcPlugin; + +public class TeradataPlugin + extends JdbcPlugin +{ + public TeradataPlugin() + { + super("teradata", TeradataClientModule::new); + } +} diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/TestTeradataConfig.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/TestTeradataConfig.java new file mode 100644 index 000000000000..a9a57054ef96 --- /dev/null +++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/TestTeradataConfig.java @@ -0,0 +1,47 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.teradata; + +import com.google.common.collect.ImmutableMap; +import io.trino.plugin.teradata.TeradataConfig.TeradataCaseSensitivity; +import org.junit.jupiter.api.Test; + +import java.util.Map; + +import static io.airlift.configuration.testing.ConfigAssertions.assertFullMapping; +import static io.airlift.configuration.testing.ConfigAssertions.assertRecordedDefaults; +import static io.airlift.configuration.testing.ConfigAssertions.recordDefaults; + +public class TestTeradataConfig +{ + @Test + public void testDefaults() + { + assertRecordedDefaults(recordDefaults(TeradataConfig.class) + .setTeradataCaseSensitivity(TeradataCaseSensitivity.CASE_SENSITIVE)); + } + + @Test + public void testExplicitPropertyMappings() + { + Map properties = ImmutableMap.builder() + .put("teradata.case-sensitivity", "as-defined") + .buildOrThrow(); + + TeradataConfig expected = new TeradataConfig() + .setTeradataCaseSensitivity(TeradataCaseSensitivity.AS_DEFINED); + + assertFullMapping(properties, expected); + } +} diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/TestTeradataPlugin.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/TestTeradataPlugin.java new file mode 100644 index 000000000000..6139e48af624 --- /dev/null +++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/TestTeradataPlugin.java @@ -0,0 +1,42 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.teradata; + +import io.trino.plugin.jdbc.JdbcConnectorFactory; +import io.trino.spi.connector.ConnectorFactory; +import io.trino.testing.TestingConnectorContext; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import java.util.Map; + +import static com.google.common.collect.Iterables.getOnlyElement; +import static org.assertj.core.api.Assertions.assertThat; + +public class TestTeradataPlugin +{ + @Test + public void testCreateConnector() + { + TeradataPlugin plugin = new TeradataPlugin(); + ConnectorFactory factory = getOnlyElement(plugin.getConnectorFactories()); + Assertions.assertNotNull(factory); + assertThat(factory).isInstanceOf(JdbcConnectorFactory.class); + factory.create("test", + Map.of( + "connection-url", "jdbc:teradata://test/"), + new TestingConnectorContext()) + .shutdown(); + } +} diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/AuthenticationConfig.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/AuthenticationConfig.java new file mode 100644 index 000000000000..605faaf3d0cb --- /dev/null +++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/AuthenticationConfig.java @@ -0,0 +1,24 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.teradata.integration; + +public record AuthenticationConfig( + String userName, + String password) +{ + public AuthenticationConfig() + { + this(null, null); + } +} diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/DatabaseConfig.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/DatabaseConfig.java new file mode 100644 index 000000000000..0cc9e8ed1168 --- /dev/null +++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/DatabaseConfig.java @@ -0,0 +1,156 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.teradata.integration; + +import java.util.Map; + +public class DatabaseConfig +{ + private final String jdbcUrl; + private final String hostName; + private final String databaseName; + private final boolean useClearScape; + private final AuthenticationConfig authConfig; + private final String clearScapeEnvName; + private final Map jdbcProperties; + + private DatabaseConfig(Builder builder) + { + this.jdbcUrl = builder.jdbcUrl; + this.hostName = builder.hostName; + this.databaseName = builder.databaseName; + this.useClearScape = builder.useClearScape; + this.authConfig = builder.authConfig; + this.clearScapeEnvName = builder.clearScapeEnvName; + this.jdbcProperties = builder.jdbcProperties; + } + + public static Builder builder() + { + return new Builder(); + } + + public Builder toBuilder() + { + return builder() + .jdbcUrl(this.jdbcUrl) + .hostName(this.hostName) + .databaseName(this.databaseName) + .useClearScape(this.useClearScape) + .authConfig(this.authConfig) + .clearScapeEnvName(this.clearScapeEnvName) + .jdbcProperties(this.jdbcProperties); + } + + public String getJdbcUrl() + { + return jdbcUrl; + } + + public String getDatabaseName() + { + return databaseName; + } + + public boolean isUseClearScape() + { + return useClearScape; + } + + public AuthenticationConfig getAuthConfig() + { + return authConfig; + } + + public String getClearScapeEnvName() + { + return clearScapeEnvName; + } + + public Map getJdbcProperties() + { + return jdbcProperties; + } + + public String getHostName() + { + return hostName; + } + + public String getTMode() + { + if (jdbcProperties != null && jdbcProperties.containsKey("TMODE")) { + return jdbcProperties.get("TMODE"); + } + return "ANSI"; + } + + public static class Builder + { + private String jdbcUrl; + private String hostName; + private String databaseName = "trino"; + private boolean useClearScape; + private AuthenticationConfig authConfig = new AuthenticationConfig(); + private String clearScapeEnvName; + private Map jdbcProperties; + + public Builder jdbcUrl(String jdbcUrl) + { + this.jdbcUrl = jdbcUrl; + return this; + } + + public Builder databaseName(String databaseName) + { + this.databaseName = databaseName; + return this; + } + + public Builder useClearScape(boolean useClearScape) + { + this.useClearScape = useClearScape; + return this; + } + + public Builder authConfig(AuthenticationConfig authConfig) + { + this.authConfig = authConfig; + return this; + } + + public Builder clearScapeEnvName(String clearScapeEnvName) + { + this.clearScapeEnvName = clearScapeEnvName; + return this; + } + + public Builder jdbcProperties(Map jdbcProperties) + { + this.jdbcProperties = jdbcProperties; + return this; + } + + public Builder hostName(String hostName) + { + this.hostName = hostName; + return this; + } + + public DatabaseConfig build() + { + return new DatabaseConfig(this); + } + } +} diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/DatabaseConfigFactory.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/DatabaseConfigFactory.java new file mode 100644 index 000000000000..24be04ec2d81 --- /dev/null +++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/DatabaseConfigFactory.java @@ -0,0 +1,67 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.teradata.integration; + +import java.util.HashMap; +import java.util.Map; + +import static io.trino.testing.SystemEnvironmentUtils.isEnvSet; +import static io.trino.testing.SystemEnvironmentUtils.requireEnv; + +public class DatabaseConfigFactory +{ + private DatabaseConfigFactory() {} + + public static DatabaseConfig create(String envName) + { + String userName; + String password; + String hostName = null; + + if (isEnvSet("CLEARSCAPE_TOKEN")) { + userName = TeradataTestConstants.ENV_CLEARSCAPE_USERNAME; + password = requireEnv("CLEARSCAPE_PASSWORD"); + } + else { + userName = requireEnv("TERADATA_USERNAME"); + password = requireEnv("TERADATA_PASSWORD"); + hostName = requireEnv("TERADATA_HOSTNAME"); + } + + String databaseName = envName.replace("-", "_"); + + AuthenticationConfig authConfig = createAuthConfig(userName, password); + return DatabaseConfig.builder() + .hostName(hostName) + .databaseName(databaseName) + .useClearScape(hostName == null) + .authConfig(authConfig) + .clearScapeEnvName(envName) + .jdbcProperties(getJdbcProperties()) + .build(); + } + + public static Map getJdbcProperties() + { + Map propsMap = new HashMap<>(); + propsMap.put("TMODE", "ANSI"); + propsMap.put("CHARSET", "UTF8"); + return propsMap; + } + + private static AuthenticationConfig createAuthConfig(String username, String password) + { + return new AuthenticationConfig(username, password); + } +} diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TeradataQueryRunner.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TeradataQueryRunner.java new file mode 100644 index 000000000000..31f1b807e091 --- /dev/null +++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TeradataQueryRunner.java @@ -0,0 +1,125 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.teradata.integration; + +import com.google.common.collect.ImmutableList; +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import io.airlift.log.Level; +import io.airlift.log.Logger; +import io.airlift.log.Logging; +import io.trino.Session; +import io.trino.metadata.QualifiedObjectName; +import io.trino.plugin.teradata.TeradataPlugin; +import io.trino.plugin.tpch.TpchPlugin; +import io.trino.testing.DistributedQueryRunner; +import io.trino.testing.QueryRunner; +import io.trino.tpch.TpchTable; +import org.assertj.core.api.ObjectAssert; +import org.intellij.lang.annotations.Language; + +import java.util.List; +import java.util.Locale; + +import static io.trino.plugin.tpch.TpchMetadata.TINY_SCHEMA_NAME; +import static io.trino.testing.TestingSession.testSessionBuilder; +import static java.util.Objects.requireNonNull; +import static org.assertj.core.api.Assertions.assertThat; + +public final class TeradataQueryRunner +{ + private TeradataQueryRunner() {} + + public static Builder builder(TestingTeradataServer server) + { + return new Builder(server); + } + + public static void main(String[] args) + throws Exception + { + Logging logger = Logging.initialize(); + logger.setLevel("io.trino.plugin.teradata", Level.DEBUG); + logger.setLevel("io.trino", Level.INFO); + TestingTeradataServer server = new TestingTeradataServer("TeradataQueryRunner", false); + QueryRunner queryRunner = builder(server).addCoordinatorProperty("http-server.http.port", "8080").setInitialTables(TpchTable.getTables()).build(); + + Logger log = Logger.get(TeradataQueryRunner.class); + log.info("======== SERVER STARTED ========"); + log.info("\n====\n%s\n====", queryRunner.getCoordinator().getBaseUrl()); + } + + public static class Builder + extends DistributedQueryRunner.Builder + { + private final TestingTeradataServer server; + private List> initialTables = ImmutableList.of(); + + protected Builder(TestingTeradataServer server) + { + super(testSessionBuilder().setCatalog("teradata").setSchema(server.getDatabaseName()).build()); + this.server = requireNonNull(server, "server is null"); + } + + public void copyTable(QueryRunner queryRunner, QualifiedObjectName table, Session session) + { + @Language("SQL") String sql = String.format("CREATE TABLE %s AS SELECT * FROM %s", table.objectName(), table); + queryRunner.execute(session, sql); + ((ObjectAssert) assertThat(queryRunner.execute(session, "SELECT count(*) FROM " + table.objectName()).getOnlyValue()).as("Table is not loaded properly: %s", new Object[] { + table.objectName()})).isEqualTo(queryRunner.execute(session, "SELECT count(*) FROM " + table).getOnlyValue()); + } + + public void copyTpchTables(QueryRunner queryRunner, String sourceCatalog, String sourceSchema, Session session, Iterable> tables) + { + for (TpchTable table : tables) { + copyTable(queryRunner, sourceCatalog, sourceSchema, table.getTableName().toLowerCase(Locale.ENGLISH), session); + } + } + + public void copyTpchTables(QueryRunner queryRunner, String sourceCatalog, String sourceSchema, Iterable> tables) + { + copyTpchTables(queryRunner, sourceCatalog, sourceSchema, queryRunner.getDefaultSession(), tables); + } + + public void copyTable(QueryRunner queryRunner, String sourceCatalog, String sourceSchema, String sourceTable, Session session) + { + QualifiedObjectName table = new QualifiedObjectName(sourceCatalog, sourceSchema, sourceTable); + if (!server.isTableExists(sourceTable)) { + copyTable(queryRunner, table, session); + } + } + + @CanIgnoreReturnValue + public Builder setInitialTables(Iterable> initialTables) + { + this.initialTables = ImmutableList.copyOf(requireNonNull(initialTables, "initialTables is null")); + return this; + } + + @Override + public DistributedQueryRunner build() + throws Exception + { + super.setAdditionalSetup(runner -> { + runner.installPlugin(new TpchPlugin()); + runner.createCatalog("tpch", "tpch"); + + runner.installPlugin(new TeradataPlugin()); + runner.createCatalog("teradata", "teradata", server.getCatalogProperties()); + + copyTpchTables(runner, "tpch", TINY_SCHEMA_NAME, initialTables); + }); + return super.build(); + } + } +} diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TeradataTestConstants.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TeradataTestConstants.java new file mode 100644 index 000000000000..45d0232bb3e0 --- /dev/null +++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TeradataTestConstants.java @@ -0,0 +1,20 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.teradata.integration; + +public interface TeradataTestConstants +{ + String ENV_CLEARSCAPE_URL = "https://api.clearscape.teradata.com"; + String ENV_CLEARSCAPE_USERNAME = "demo_user"; +} diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TestTeradataConnectorTest.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TestTeradataConnectorTest.java new file mode 100644 index 000000000000..da8fa79d3d3d --- /dev/null +++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TestTeradataConnectorTest.java @@ -0,0 +1,479 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.teradata.integration; + +import io.trino.Session; +import io.trino.plugin.jdbc.BaseJdbcConnectorTest; +import io.trino.sql.query.QueryAssertions; +import io.trino.testing.QueryRunner; +import io.trino.testing.TestingConnectorBehavior; +import io.trino.testing.TestingNames; +import io.trino.testing.assertions.TrinoExceptionAssert; +import io.trino.testing.sql.SqlExecutor; +import io.trino.testing.sql.TestTable; +import org.assertj.core.api.AssertProvider; +import org.intellij.lang.annotations.Language; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Test; + +import java.util.List; +import java.util.Optional; +import java.util.OptionalInt; +import java.util.function.Consumer; + +import static io.trino.plugin.teradata.integration.clearscape.ClearScapeEnvironmentUtils.generateUniqueEnvName; +import static io.trino.testing.TestingNames.randomNameSuffix; +import static java.lang.String.format; +import static java.util.Objects.requireNonNull; +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assumptions.abort; + +final class TestTeradataConnectorTest + extends BaseJdbcConnectorTest +{ + private static final int TERADATA_OBJECT_NAME_LIMIT = 128; + + private TestingTeradataServer database; + + private static void verifyResultOrFailure(AssertProvider queryAssertProvider, Consumer verifyResults, + Consumer verifyFailure) + { + requireNonNull(verifyResults, "verifyResults is null"); + requireNonNull(verifyFailure, "verifyFailure is null"); + QueryAssertions.QueryAssert queryAssert = assertThat(queryAssertProvider); + verifyResults.accept(queryAssert); + } + + @Override + protected SqlExecutor onRemoteDatabase() + { + return database; + } + + @Override + protected QueryRunner createQueryRunner() + throws Exception + { + database = closeAfterClass(new TestingTeradataServer(generateUniqueEnvName(getClass()), true)); + // Register this specific instance for this test class + return TeradataQueryRunner.builder(database).setInitialTables(REQUIRED_TPCH_TABLES).build(); + } + + @Override + protected boolean hasBehavior(TestingConnectorBehavior connectorBehavior) + { + return switch (connectorBehavior) { + case SUPPORTS_ADD_COLUMN, + SUPPORTS_AGGREGATION_PUSHDOWN, + SUPPORTS_COMMENT_ON_COLUMN, + SUPPORTS_COMMENT_ON_TABLE, + SUPPORTS_CREATE_MATERIALIZED_VIEW, + SUPPORTS_CREATE_TABLE_WITH_COLUMN_COMMENT, + SUPPORTS_CREATE_TABLE_WITH_TABLE_COMMENT, + SUPPORTS_CREATE_VIEW, + SUPPORTS_DELETE, + SUPPORTS_DEREFERENCE_PUSHDOWN, + SUPPORTS_DROP_COLUMN, + SUPPORTS_DROP_SCHEMA_CASCADE, + SUPPORTS_INSERT, + SUPPORTS_JOIN_PUSHDOWN, + SUPPORTS_JOIN_PUSHDOWN_WITH_DISTINCT_FROM, + SUPPORTS_JOIN_PUSHDOWN_WITH_VARCHAR_INEQUALITY, + SUPPORTS_LIMIT_PUSHDOWN, + SUPPORTS_MAP_TYPE, + SUPPORTS_MERGE, + SUPPORTS_NATIVE_QUERY, + SUPPORTS_NEGATIVE_DATE, + SUPPORTS_PREDICATE_ARITHMETIC_EXPRESSION_PUSHDOWN, + SUPPORTS_PREDICATE_EXPRESSION_PUSHDOWN, + SUPPORTS_PREDICATE_PUSHDOWN, + SUPPORTS_PREDICATE_PUSHDOWN_WITH_VARCHAR_INEQUALITY, + SUPPORTS_RENAME_COLUMN, + SUPPORTS_RENAME_SCHEMA, + SUPPORTS_RENAME_TABLE, + SUPPORTS_ROW_LEVEL_DELETE, + SUPPORTS_ROW_TYPE, + SUPPORTS_SET_COLUMN_TYPE, + SUPPORTS_TOPN_PUSHDOWN, + SUPPORTS_TOPN_PUSHDOWN_WITH_VARCHAR, + SUPPORTS_TRUNCATE, + SUPPORTS_UPDATE -> false; + case SUPPORTS_CREATE_SCHEMA, + SUPPORTS_CREATE_TABLE -> true; + default -> super.hasBehavior(connectorBehavior); + }; + } + + @AfterAll + public void cleanupTestDatabase() + { + database = null; + } + + @Override + protected OptionalInt maxSchemaNameLength() + { + return OptionalInt.of(TERADATA_OBJECT_NAME_LIMIT); + } + + @Override // Override because the expected error message is different + protected void verifySchemaNameLengthFailurePermissible(Throwable e) + { + assertThat(e).hasMessage(format("Schema name must be shorter than or equal to '%s' characters but got '%s'", TERADATA_OBJECT_NAME_LIMIT, TERADATA_OBJECT_NAME_LIMIT + 1)); + } + + @Override // Override because Teradata Object name limit is 128 characters + protected OptionalInt maxColumnNameLength() + { + return OptionalInt.of(TERADATA_OBJECT_NAME_LIMIT); + } + + @Override // Override because the expected error message is different + protected void verifyColumnNameLengthFailurePermissible(Throwable e) + { + assertThat(e).hasMessageMatching(format("Column name must be shorter than or equal to '%s' characters but got '%s': '.*'", TERADATA_OBJECT_NAME_LIMIT, + TERADATA_OBJECT_NAME_LIMIT + 1)); + } + + @Override // Override to skip the data mapping smoke test + @Test + public void testDataMappingSmokeTest() + { + skipTestUnless(false); + } + + @Override // Override because Teradata Table name limit is 128 characters + protected OptionalInt maxTableNameLength() + { + return OptionalInt.of(TERADATA_OBJECT_NAME_LIMIT); + } + + @Override // Override because the expected error message is different + protected void verifyTableNameLengthFailurePermissible(Throwable e) + { + assertThat(e).hasMessageMatching(format("Table name must be shorter than or equal to '%s' characters but got '%s'", TERADATA_OBJECT_NAME_LIMIT, + TERADATA_OBJECT_NAME_LIMIT + 1)); + } + + @Override // Overriding this test case as Teradata defines varchar with a length. + @Test + public void testVarcharCastToDateInPredicate() + { + String tableName = "varchar_as_date_pred"; + try (TestTable table = newTrinoTable(tableName, "(a varchar(50))", List.of("'999-09-09'", "'1005-09-09'", "'2005-06-06'", "'2005-06-6'", "'2005-6-06'", "'2005-6-6'", "' " + + "2005-06-06'", "'2005-06-06 '", "' +2005-06-06'", "'02005-06-06'", "'2005-09-06'", "'2005-09-6'", "'2005-9-06'", "'2005-9-6'", "' 2005-09-06'", "'2005-09-06 '", + "' +2005-09-06'", "'02005-09-06'", "'2005-09-09'", "'2005-09-9'", "'2005-9-09'", "'2005-9-9'", "' 2005-09-09'", "'2005-09-09 '", "' +2005-09-09'", "'02005-09-09" + + "'", "'2005-09-10'", "'2005-9-10'", "' 2005-09-10'", "'2005-09-10 '", "' +2005-09-10'", "'02005-09-10'", "'2005-09-20'", "'2005-9-20'", "' 2005-09-20'", + "'2005-09-20 '", "' +2005-09-20'", "'02005-09-20'", "'9999-09-09'", "'99999-09-09'"))) { + for (String date : List.of("2005-09-06", "2005-09-09", "2005-09-10")) { + for (String operator : List.of("=", "<=", "<", ">", ">=", "!=", "IS DISTINCT FROM", "IS NOT DISTINCT FROM")) { + assertThat(query("SELECT a FROM %s WHERE CAST(a AS date) %s DATE '%s'".formatted(table.getName(), operator, date))).hasCorrectResultsRegardlessOfPushdown(); + } + } + } + try (TestTable table = newTrinoTable(tableName, "(a varchar(50))", List.of("'2005-06-bad-date'", "'2005-09-10'"))) { + assertThat(query("SELECT a FROM %s WHERE CAST(a AS date) < DATE '2005-09-10'".formatted(table.getName()))).failure().hasMessage("Value cannot be cast to date: " + + "2005-06-bad-date"); + verifyResultOrFailure(query("SELECT a FROM %s WHERE CAST(a AS date) = DATE '2005-09-10'".formatted(table.getName())), + queryAssert -> queryAssert.skippingTypesCheck().matches("VALUES '2005-09-10'"), failureAssert -> failureAssert.hasMessage("Value cannot be cast to date: " + + "2005-06-bad-date")); + } + try (TestTable table = newTrinoTable(tableName, "(a varchar(50))", List.of("'2005-09-10'"))) { + // 2005-09-01, when written as 2005-09-1, is a prefix of an existing data point: 2005-09-10 + assertThat(query("SELECT a FROM %s WHERE CAST(a AS date) != DATE '2005-09-01'".formatted(table.getName()))).skippingTypesCheck().matches("VALUES '2005-09-10'"); + } + } + + // Tests CREATE TABLE AS SELECT functionality with Teradata syntax + // Overridden to handle Teradata's specific "WITH DATA" syntax for table creation + @Override + @Test + public void testCreateTableAsSelect() + { + String tableName = "test_ctas" + randomNameSuffix(); + assertUpdate("CREATE TABLE IF NOT EXISTS " + tableName + " AS SELECT name, regionkey FROM nation", "SELECT count(*) FROM nation"); + assertTableColumnNames(tableName, "name", "regionkey"); + assertThat(getTableComment(tableName)).isNull(); + assertUpdate("DROP TABLE " + tableName); + + // Some connectors support CREATE TABLE AS but not the ordinary CREATE TABLE. Let's test CTAS IF NOT EXISTS with a table that is guaranteed to exist. + assertUpdate("CREATE TABLE IF NOT EXISTS nation AS SELECT nationkey, regionkey FROM nation", 0); + assertTableColumnNames("nation", "nationkey", "name", "regionkey", "comment"); + + assertCreateTableAsSelect("SELECT nationkey, name, regionkey FROM nation", "SELECT count(*) FROM nation"); + + assertCreateTableAsSelect("SELECT mktsegment, sum(acctbal) x FROM customer GROUP BY mktsegment", "SELECT count(DISTINCT mktsegment) FROM customer"); + + assertCreateTableAsSelect("SELECT count(*) x FROM nation JOIN region ON nation.regionkey = region.regionkey", "SELECT 1"); + + assertCreateTableAsSelect("SELECT nationkey FROM nation ORDER BY nationkey LIMIT 10", "SELECT 10"); + + // Tests for CREATE TABLE with UNION ALL: exercises PushTableWriteThroughUnion optimizer + + assertCreateTableAsSelect("SELECT name, nationkey, regionkey FROM nation WHERE nationkey % 2 = 0 UNION ALL " + "SELECT name, nationkey, regionkey FROM nation WHERE " + + "nationkey % 2 = 1", "SELECT name, nationkey, regionkey FROM nation", "SELECT count(*) FROM nation"); + + assertCreateTableAsSelect(Session.builder(getSession()).setSystemProperty("redistribute_writes", "true").build(), "SELECT CAST(nationkey AS BIGINT) nationkey, regionkey " + + "FROM nation UNION ALL " + "SELECT 1234567890, 123", "SELECT nationkey, regionkey FROM nation UNION ALL " + "SELECT 1234567890, 123", "SELECT count(*) + 1 FROM " + + "nation"); + + assertCreateTableAsSelect(Session.builder(getSession()).setSystemProperty("redistribute_writes", "false").build(), "SELECT CAST(nationkey AS BIGINT) nationkey, regionkey" + + " FROM nation UNION ALL " + "SELECT 1234567890, 123", "SELECT nationkey, regionkey FROM nation UNION ALL " + "SELECT 1234567890, 123", "SELECT count(*) + 1 FROM " + + "nation"); + + tableName = "test_ctas" + randomNameSuffix(); + assertThat(query("EXPLAIN ANALYZE CREATE TABLE " + tableName + " AS SELECT name FROM nation")).succeeds(); + assertThat(query("SELECT * from " + tableName)).matches("SELECT name FROM nation"); + assertUpdate("DROP TABLE " + tableName); + } + + @Override // Overriding this test case as Teradata does not support negative dates. + @Test + public void testDateYearOfEraPredicate() + { + assertQuery("SELECT orderdate FROM orders WHERE orderdate = DATE '1997-09-14'", "VALUES DATE '1997-09-14'"); + } + + @Override // Override this test case as Teradata has different syntax for creating tables with AS SELECT statement. + @Test + public void verifySupportsRowLevelUpdateDeclaration() + { + String testTableName = "test_supports_update"; + try (TestTable table = newTrinoTable(testTableName, "AS ( SELECT * FROM nation) WITH DATA")) { + assertQueryFails("UPDATE " + table.getName() + " SET nationkey = nationkey * 100 WHERE regionkey = 2", "This connector does not support modifying table rows"); + } + } + + @Override // Overriding this test case as Teradata doesn't have support to (k, v) AS VALUES in insert statement + @Test + public void testCharVarcharComparison() + { + String testTableName = "test_char_varchar"; + try (TestTable table = newTrinoTable(testTableName, "(k int, v char(3))", List.of("-1, CAST(NULL AS char(3))", "3, CAST(' ' AS char(3))", "6, CAST('x ' AS char(3))"))) { + assertQuery("SELECT k, v FROM " + table.getName() + " WHERE v = CAST(' ' AS varchar(2))", "VALUES (3, ' ')"); + assertQuery("SELECT k, v FROM " + table.getName() + " WHERE v = CAST(' ' AS varchar(4))", "VALUES (3, ' ')"); + assertQuery("SELECT k, v FROM " + table.getName() + " WHERE v = CAST('x ' AS varchar(2))", "VALUES (6, 'x ')"); + } + } + + @Override // Overriding this test case as Teradata doesn't have support to (k, v) AS VALUES in insert statement + @Test + public void testVarcharCharComparison() + { + try (TestTable table = newTrinoTable("test_varchar_char", "(k int, v char(3))", List.of("-1, CAST(NULL AS varchar(3))", "0, CAST('' AS varchar(3))", "1, CAST(' ' AS" + + " varchar(3))", "2, CAST(' ' AS varchar(3))", "3, CAST(' ' AS varchar(3))", "4, CAST('x' AS varchar(3))", "5, CAST('x ' AS varchar(3))", + "6, CAST('x ' AS " + "varchar(3))"))) { + // Teradata's CHAR type automatically pads values with spaces to the defined length + assertQuery("SELECT k, v FROM " + table.getName() + " WHERE v = CAST(' ' AS char(2))", "VALUES (0, ' '), (1, ' '), (2, ' '), (3, ' ')"); + assertQuery("SELECT k, v FROM " + table.getName() + " WHERE v = CAST('x ' AS char(2))", "VALUES (4, 'x '), (5, 'x '), (6, 'x ')"); + } + } + + // Filters data mapping test data for Teradata compatibility + // Overridden to exclude data types that Teradata doesn't support or handles differently + @Override + protected Optional filterDataMappingSmokeTestData(DataMappingTestSetup dataMappingTestSetup) + { + String typeName = dataMappingTestSetup.getTrinoTypeName(); + return switch (typeName) { + // skipping date as during julian->gregorian date is handled differently in Teradata. tinyint, double and varchar with unbounded (need to handle special characters) + // is skipped and will handle it while improving + // write functionalities. + case "boolean", "tinyint", "date", "real", "double", "varchar", "time", "time(6)", "timestamp", "timestamp(6)", "varbinary", "timestamp(3) with time zone", + "timestamp(6) with time zone", "U&'a \\000a newline'" -> Optional.empty(); + default -> Optional.of(dataMappingTestSetup); + }; + } + + @Override + @Test + public void testTimestampWithTimeZoneCastToDatePredicate() + { + abort("Skipping as connector does not support Timestamp with Time Zone data type"); + } + + @Override + @Test + public void testTimestampWithTimeZoneCastToTimestampPredicate() + { + abort("Skipping as connector does not support Timestamp with Time Zone data type"); + } + + @Override + @Test + public void testRenameSchema() + { + abort("Skipping as connector does not support RENAME SCHEMA"); + } + + @Override + @Test + public void testColumnName() + { + abort("Skipping as connector does not support column level write operations"); + } + + @Override + @Test + public void testCreateTableAsSelectWithUnicode() + { + abort("Skipping as connector does not support creating table with UNICODE characters"); + } + + @Override + @Test + public void testUpdateNotNullColumn() + { + abort("Skipping as connector does not support insert operations"); + } + + @Override + @Test + public void testWriteBatchSizeSessionProperty() + { + abort("Skipping as connector does not support insert operations"); + } + + @Override + @Test + public void testInsertWithoutTemporaryTable() + { + abort("Skipping as connector does not support insert operations"); + } + + @Override + @Test + public void testWriteTaskParallelismSessionProperty() + { + abort("Skipping as connector does not support insert operations"); + } + + @Override + @Test + public void testInsertIntoNotNullColumn() + { + abort("Skipping as connector does not support insert operations"); + } + + @Override + @Test + public void testDropSchemaCascade() + { + abort("Skipping as connector does not support dropping schemas with CASCADE option"); + } + + @Override + @Test + public void testAddColumn() + { + abort("Skipping as connector does not support column level write operations"); + } + + @Override + @Test + public void testDropNonEmptySchemaWithTable() + { + abort("Skipping as connector does not support drop schemas"); + } + + @Override + @Test + public void verifySupportsUpdateDeclaration() + { + abort("Skipping as connector does not support update operations"); + } + + @Override + @Test + public void testDropNotNullConstraint() + { + abort("Skipping as connector does not support dropping a not null constraint"); + } + + @Override + @Test + public void testExecuteProcedureWithInvalidQuery() + { + abort("Skipping as connector does not support execute procedure"); + } + + @Override + @Test + public void testCreateTableAsSelectNegativeDate() + { + abort("Skipping as connector does not support creating table with negative date"); + } + + // Creates CTAS queries with proper session and row count validation + // Overridden to use Teradata's "WITH DATA" syntax for CREATE TABLE AS SELECT statements + @Override + protected void assertCreateTableAsSelect(Session session, String query, String expectedQuery, String rowCountQuery) + { + String table = "test_ctas_" + TestingNames.randomNameSuffix(); + assertUpdate(session, "CREATE TABLE " + table + " AS ( " + query + ") WITH DATA", rowCountQuery); + assertQuery(session, "SELECT * FROM " + table, expectedQuery); + assertUpdate(session, "DROP TABLE " + table); + assertThat(getQueryRunner().tableExists(session, table)).isFalse(); + } + + // Creates new Trino test tables with proper schema handling + // Overridden to handle Teradata's schema.table naming format and table creation syntax + @Override + protected TestTable newTrinoTable(String namePrefix, @Language("SQL") String tableDefinition, List rowsToInsert) + { + String tableName; + + // Check if namePrefix already contains schema (contains a dot) + if (namePrefix.contains(".")) { + // namePrefix already has schema.tablename format + tableName = namePrefix; + } + else { + // Append current schema to namePrefix + String schemaName = getSession().getSchema().orElseThrow(); + tableName = schemaName + "." + namePrefix; + } + return new TestTable(database, tableName, tableDefinition, rowsToInsert); + } + + @Test + public void testTeradataNumberDataType() + { + try (TestTable table = newTrinoTable("test_number", "(id INTEGER, " + "number_col NUMBER(10,2), " + "number_default NUMBER, " + "number_large NUMBER(38,10))", List.of( + "1, CAST(12345.67 AS NUMBER(10,2)), CAST(999999999999999 AS NUMBER), CAST(1234567890123456789012345678.1234567890 AS NUMBER(38,10))", "2, CAST(-99999.99 AS " + + "NUMBER(10,2)), CAST(-123456789012345 AS NUMBER), CAST(-9999999999999999999999999999.9999999999 AS NUMBER(38,10))", + "3, CAST(0.00 AS NUMBER(10,2)), CAST" + "(0 AS NUMBER), CAST(0.0000000000 AS NUMBER(38,10))"))) { + assertThat(query(format("SELECT number_col FROM %s WHERE id = 1", table.getName()))).matches("VALUES CAST(12345.67 AS DECIMAL(10,2))"); + assertThat(query(format("SELECT number_default FROM %s WHERE id = 1", table.getName()))).matches("VALUES CAST(999999999999999 AS DECIMAL(38,0))"); + assertThat(query(format("SELECT number_large FROM %s WHERE id = 1", table.getName()))).matches("VALUES CAST(1234567890123456789012345678.1234567890 AS DECIMAL(38,10)" + + ")"); + assertThat(query(format("SELECT number_col FROM %s WHERE id = 2", table.getName()))).matches("VALUES CAST(-99999.99 AS DECIMAL(10,2))"); + assertThat(query(format("SELECT number_col FROM %s WHERE id = 3", table.getName()))).matches("VALUES CAST(0.00 AS DECIMAL(10,2))"); + } + } + + @Test + public void testTeradataCharacterDataType() + { + try (TestTable table = newTrinoTable("test_character", "(id INTEGER, " + "char_col CHARACTER(5), " + "char_default CHARACTER, " + "char_large CHARACTER(100))", List.of( + "1, CAST('HELLO' AS CHARACTER(5)), CAST('A' AS CHARACTER), CAST('TERADATA' AS CHARACTER(100))", + "2, CAST('WORLD' AS CHARACTER(5)), CAST('B' AS CHARACTER), CAST" + "('CHARACTER' AS CHARACTER(100))", "3, CAST('' AS CHARACTER(5)), CAST('C' AS CHARACTER), CAST" + + "('' AS CHARACTER(100))"))) { + assertThat(query(format("SELECT char_col FROM %s WHERE id = 1", table.getName()))).matches("VALUES CAST('HELLO' AS CHAR(5))"); + assertThat(query(format("SELECT char_default FROM %s WHERE id = 1", table.getName()))).matches("VALUES CAST('A' AS CHAR(1))"); + assertThat(query(format("SELECT char_large FROM %s WHERE id = 1", table.getName()))).matches("VALUES CAST('TERADATA' AS CHAR(100))"); + assertThat(query(format("SELECT char_col FROM %s WHERE id = 3", table.getName()))).matches("VALUES CAST('' AS CHAR(5))"); + } + } +} diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TestTeradataTypeMapping.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TestTeradataTypeMapping.java new file mode 100644 index 000000000000..69023fbb5b13 --- /dev/null +++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TestTeradataTypeMapping.java @@ -0,0 +1,281 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.teradata.integration; + +import io.trino.testing.AbstractTestQueryFramework; +import io.trino.testing.QueryRunner; +import io.trino.testing.datatype.CreateAndInsertDataSetup; +import io.trino.testing.datatype.DataSetup; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Test; + +import java.sql.SQLException; + +import static io.trino.plugin.teradata.integration.clearscape.ClearScapeEnvironmentUtils.generateUniqueEnvName; +import static io.trino.spi.type.BigintType.BIGINT; +import static io.trino.spi.type.CharType.createCharType; +import static io.trino.spi.type.DateType.DATE; +import static io.trino.spi.type.DecimalType.createDecimalType; +import static io.trino.spi.type.DoubleType.DOUBLE; +import static io.trino.spi.type.IntegerType.INTEGER; +import static io.trino.spi.type.SmallintType.SMALLINT; +import static io.trino.spi.type.TinyintType.TINYINT; +import static io.trino.spi.type.VarcharType.createVarcharType; +import static io.trino.testing.datatype.SqlDataTypeTest.create; +import static org.assertj.core.api.AssertionsForClassTypes.assertThatThrownBy; + +final class TestTeradataTypeMapping + extends AbstractTestQueryFramework +{ + private TestingTeradataServer database; + + @Override + protected QueryRunner createQueryRunner() + throws Exception + { + database = closeAfterClass(new TestingTeradataServer(generateUniqueEnvName(getClass()), true)); + // Register this specific instance for this test class + return TeradataQueryRunner.builder(database).build(); + } + + @AfterAll + void cleanupTestClass() + { + database = null; + } + + @Test + void testByteint() + { + create() + .addRoundTrip("byteint", "0", TINYINT, "CAST(0 AS TINYINT)") + .addRoundTrip("byteint", "127", TINYINT, "CAST(127 AS TINYINT)") + .addRoundTrip("byteint", "-128", TINYINT, "CAST(-128 AS TINYINT)") + .addRoundTrip("byteint", "null", TINYINT, "CAST(null AS TINYINT)") + .execute(getQueryRunner(), teradataJDBCCreateAndInsert("byteint")); + } + + @Test + void testSmallint() + { + create() + .addRoundTrip("smallint", "0", SMALLINT, "CAST(0 AS SMALLINT)") + .addRoundTrip("smallint", "32767", SMALLINT, "CAST(32767 AS SMALLINT)") + .addRoundTrip("smallint", "-32768", SMALLINT, "CAST(-32768 AS SMALLINT)") + .addRoundTrip("smallint", "null", SMALLINT, "CAST(null AS SMALLINT)") + .execute(getQueryRunner(), teradataJDBCCreateAndInsert("smallint")); + } + + @Test + void testInteger() + { + create() + .addRoundTrip("integer", "0", INTEGER, "0") + .addRoundTrip("integer", "2147483647", INTEGER, "2147483647") + .addRoundTrip("integer", "-2147483648", INTEGER, "-2147483648") + .addRoundTrip("integer", "NULL", INTEGER, "CAST(NULL AS INTEGER)") + .execute(getQueryRunner(), teradataJDBCCreateAndInsert("integer")); + } + + @Test + void testBigint() + { + create() + .addRoundTrip("bigint", "0", BIGINT, "CAST(0 AS BIGINT)") + .addRoundTrip("bigint", "9223372036854775807", BIGINT, "9223372036854775807") + .addRoundTrip("bigint", "-9223372036854775808", BIGINT, "-9223372036854775808") + .addRoundTrip("bigint", "NULL", BIGINT, "CAST(NULL AS BIGINT)") + .execute(getQueryRunner(), teradataJDBCCreateAndInsert("bigint")); + } + + @Test + void testFloat() + { + create() + .addRoundTrip("float", "0", DOUBLE, "CAST(0 AS DOUBLE)") + .addRoundTrip("real", "0", DOUBLE, "CAST(0 AS DOUBLE)") + .addRoundTrip("double precision", "0", DOUBLE, "CAST(0 AS DOUBLE)") + .addRoundTrip("float", "1.797e308", DOUBLE, "1.797e308") + .addRoundTrip("real", "1.797e308", DOUBLE, "1.797e308") + .addRoundTrip("double precision", "1.797e308", DOUBLE, "1.797e308") + .addRoundTrip("float", "2.226e-308", DOUBLE, "2.226e-308") + .addRoundTrip("real", "2.226e-308", DOUBLE, "2.226e-308") + .addRoundTrip("double precision", "2.226e-308", DOUBLE, "2.226e-308") + .addRoundTrip("float", "NULL", DOUBLE, "CAST(NULL AS DOUBLE)") + .addRoundTrip("real", "NULL", DOUBLE, "CAST(NULL AS DOUBLE)") + .addRoundTrip("double precision", "NULL", DOUBLE, "CAST(NULL AS DOUBLE)") + .execute(getQueryRunner(), teradataJDBCCreateAndInsert("float")); + } + + @Test + void testDecimal() + { + create() + .addRoundTrip("decimal(3, 0)", "0", createDecimalType(3, 0), "CAST('0' AS decimal(3, 0))") + .addRoundTrip("numeric(3, 0)", "0", createDecimalType(3, 0), "CAST('0' AS decimal(3, 0))") + .addRoundTrip("decimal(3, 1)", "0.0", createDecimalType(3, 1), "CAST('0.0' AS decimal(3, 1))") + .addRoundTrip("numeric(3, 1)", "0.0", createDecimalType(3, 1), "CAST('0.0' AS decimal(3, 1))") + .addRoundTrip("decimal(1, 0)", "1", createDecimalType(1, 0), "CAST('1' AS decimal(1, 0))") + .addRoundTrip("numeric(1, 0)", "1", createDecimalType(1, 0), "CAST('1' AS decimal(1, 0))") + .addRoundTrip("decimal(1, 0)", "-1", createDecimalType(1, 0), "CAST('-1' AS decimal(1, 0))") + .addRoundTrip("numeric(1, 0)", "-1", createDecimalType(1, 0), "CAST('-1' AS decimal(1, 0))") + .addRoundTrip("decimal(3, 0)", "1", createDecimalType(3, 0), "CAST('1' AS decimal(3, 0))") + .addRoundTrip("numeric(3, 0)", "1", createDecimalType(3, 0), "CAST('1' AS decimal(3, 0))") + .addRoundTrip("decimal(3, 0)", "-1", createDecimalType(3, 0), "CAST('-1' AS decimal(3, 0))") + .addRoundTrip("numeric(3, 0)", "-1", createDecimalType(3, 0), "CAST('-1' AS decimal(3, 0))") + .addRoundTrip("decimal(3, 0)", "123", createDecimalType(3, 0), "CAST('123' AS decimal(3, 0))") + .addRoundTrip("numeric(3, 0)", "123", createDecimalType(3, 0), "CAST('123' AS decimal(3, 0))") + .addRoundTrip("decimal(3, 0)", "-123", createDecimalType(3, 0), "CAST('-123' AS decimal(3, 0))") + .addRoundTrip("numeric(3, 0)", "-123", createDecimalType(3, 0), "CAST('-123' AS decimal(3, 0))") + .addRoundTrip("decimal(3, 1)", "10.0", createDecimalType(3, 1), "CAST('10.0' AS decimal(3, 1))") + .addRoundTrip("numeric(3, 1)", "10.0", createDecimalType(3, 1), "CAST('10.0' AS decimal(3, 1))") + .addRoundTrip("decimal(3, 1)", "12.3", createDecimalType(3, 1), "CAST('12.3' AS decimal(3, 1))") + .addRoundTrip("numeric(3, 1)", "12.3", createDecimalType(3, 1), "CAST('12.3' AS decimal(3, 1))") + .addRoundTrip("decimal(3, 1)", "-12.3", createDecimalType(3, 1), "CAST('-12.3' AS decimal(3, 1))") + .addRoundTrip("numeric(3, 1)", "-12.3", createDecimalType(3, 1), "CAST('-12.3' AS decimal(3, 1))") + .addRoundTrip("decimal(38, 0)", "12345678901234567890123456789012345678", createDecimalType(38, 0), "CAST('12345678901234567890123456789012345678' AS decimal(38, 0))") + .addRoundTrip("numeric(38, 0)", "12345678901234567890123456789012345678", createDecimalType(38, 0), "CAST('12345678901234567890123456789012345678' AS decimal(38, 0))") + .addRoundTrip("decimal(38, 0)", "-12345678901234567890123456789012345678", createDecimalType(38, 0), "CAST('-12345678901234567890123456789012345678' AS decimal(38, 0))") + .addRoundTrip("numeric(38, 0)", "-12345678901234567890123456789012345678", createDecimalType(38, 0), "CAST('-12345678901234567890123456789012345678' AS decimal(38, 0))") + .addRoundTrip("decimal(1, 0)", "null", createDecimalType(1, 0), "CAST(null AS decimal(1, 0))") + .execute(getQueryRunner(), teradataJDBCCreateAndInsert("decimal")); + } + + @Test + void testNumber() + { + create() + .addRoundTrip("numeric(3)", "0", createDecimalType(3, 0), "CAST('0' AS decimal(3, 0))") + .addRoundTrip("number(5,2)", "0", createDecimalType(5, 2), "CAST('0' AS decimal(5, 2))") + .addRoundTrip("number(38)", "0", createDecimalType(38, 0), "CAST('0' AS decimal(38, 0))") + .addRoundTrip("number(38,2)", "123456789012345678901234567890123456.78", createDecimalType(38, 2), "CAST('123456789012345678901234567890123456.78' AS decimal(38, 2))") + .addRoundTrip("numeric(38)", "12345678901234567890123456789012345678", createDecimalType(38, 0), "CAST('12345678901234567890123456789012345678' AS decimal(38, 0))") + .addRoundTrip("numeric(3)", "null", createDecimalType(3, 0), "CAST(null AS decimal(3, 0))") + .execute(getQueryRunner(), teradataJDBCCreateAndInsert("number")); + } + + @Test + void testChar() + { + create() + .addRoundTrip("char(3)", "''", createCharType(3), "CAST('' AS char(3))") + .addRoundTrip("char(3)", "' '", createCharType(3), "CAST(' ' AS char(3))") + .addRoundTrip("char(3)", "' '", createCharType(3), "CAST(' ' AS char(3))") + .addRoundTrip("char(3)", "' '", createCharType(3), "CAST(' ' AS char(3))") + .addRoundTrip("char(3)", "'A'", createCharType(3), "CAST('A' AS char(3))") + .addRoundTrip("char(3)", "'A '", createCharType(3), "CAST('A ' AS char(3))") + .addRoundTrip("char(3)", "' B '", createCharType(3), "CAST(' B ' AS char(3))") + .addRoundTrip("char(3)", "' C'", createCharType(3), "CAST(' C' AS char(3))") + .addRoundTrip("char(3)", "'AB'", createCharType(3), "CAST('AB' AS char(3))") + .addRoundTrip("char(3)", "'ABC'", createCharType(3), "CAST('ABC' AS char(3))") + .addRoundTrip("char(3)", "'A C'", createCharType(3), "CAST('A C' AS char(3))") + .addRoundTrip("char(3)", "' BC'", createCharType(3), "CAST(' BC' AS char(3))") + .addRoundTrip("char(3)", "null", createCharType(3), "CAST(null AS char(3))") + .execute(getQueryRunner(), teradataJDBCCreateAndInsert("char")); + String tmode = database.getTMode(); + if (tmode.equals("TERA")) { + // truncation + create() + .addRoundTrip("char(3)", "'ABCD'", createCharType(3), "CAST('ABCD' AS char(3))") + .execute(getQueryRunner(), teradataJDBCCreateAndInsert("chart")); + } + else { + // Error on truncation + assertThatThrownBy(() -> + create() + .addRoundTrip("char(3)", "'ABCD'", createCharType(3), "CAST('ABCD' AS char(3))") + .execute(getQueryRunner(), teradataJDBCCreateAndInsert("chart"))) + .isInstanceOf(RuntimeException.class) + .hasCauseInstanceOf(SQLException.class) + .cause() + .hasMessageContaining("Right truncation of string data"); + } + // max-size + create() + .addRoundTrip("char(64000)", "'max'", createCharType(64000), "CAST('max' AS char(64000))") + .execute(getQueryRunner(), teradataJDBCCreateAndInsert("charl")); + } + + @Test + void testVarchar() + { + create() + .addRoundTrip("varchar(32)", "''", createVarcharType(32), "CAST('' AS varchar(32))") + .addRoundTrip("varchar(32)", "' '", createVarcharType(32), "CAST(' ' AS varchar(32))") + .addRoundTrip("varchar(32)", "' '", createVarcharType(32), "CAST(' ' AS varchar(32))") + .addRoundTrip("varchar(32)", "' '", createVarcharType(32), "CAST(' ' AS varchar(32))") + .addRoundTrip("varchar(32)", "' '", createVarcharType(32), "CAST(' ' AS varchar(32))") + .addRoundTrip("varchar(32)", "'A'", createVarcharType(32), "CAST('A' AS varchar(32))") + .addRoundTrip("varchar(32)", "'A '", createVarcharType(32), "CAST('A ' AS varchar(32))") + .addRoundTrip("varchar(32)", "' B '", createVarcharType(32), "CAST(' B ' AS varchar(32))") + .addRoundTrip("varchar(32)", "' C'", createVarcharType(32), "CAST(' C' AS varchar(32))") + .addRoundTrip("varchar(32)", "'AB'", createVarcharType(32), "CAST('AB' AS varchar(32))") + .addRoundTrip("varchar(32)", "'ABC'", createVarcharType(32), "CAST('ABC' AS varchar(32))") + .addRoundTrip("varchar(32)", "'A C'", createVarcharType(32), "CAST('A C' AS varchar(32))") + .addRoundTrip("varchar(32)", "' BC'", createVarcharType(32), "CAST(' BC' AS varchar(32))") + .addRoundTrip("varchar(32)", "null", createVarcharType(32), "CAST(null AS varchar(32))") + .execute(getQueryRunner(), teradataJDBCCreateAndInsert("varchar")); + String teraMode = database.getTMode(); + if (teraMode.equals("TERA")) { + // truncation + create() + .addRoundTrip("varchar(3)", "'ABCD'", createVarcharType(3), "CAST('ABCD' AS varchar(3))") + .execute(getQueryRunner(), teradataJDBCCreateAndInsert("varchart")); + } + else { + // Error on truncation + assertThatThrownBy(() -> + create() + .addRoundTrip("varchar(3)", "'ABCD'", createVarcharType(3), "CAST('ABCD' AS varchar(3))") + .execute(getQueryRunner(), teradataJDBCCreateAndInsert("varchart"))) + .isInstanceOf(RuntimeException.class) + .hasCauseInstanceOf(SQLException.class) + .cause() + .hasMessageContaining("Right truncation of string data"); + } + // max-size + create() + .addRoundTrip("long varchar", "'max'", createVarcharType(64000), "CAST('max' AS varchar(64000))") + .execute(getQueryRunner(), teradataJDBCCreateAndInsert("varcharl")); + } + + @Test + void testDate() + { + create() + .addRoundTrip("date", "DATE '0001-01-01'", DATE, "DATE '0001-01-01'") + .addRoundTrip("date", "DATE '0012-12-12'", DATE, "DATE '0012-12-12'") + .addRoundTrip("date", "DATE '1500-01-01'", DATE, "DATE '1500-01-01'") + .addRoundTrip("date", "DATE '1582-10-04'", DATE, "DATE '1582-10-04'") + .addRoundTrip("date", "DATE '1582-10-15'", DATE, "DATE '1582-10-15'") + .addRoundTrip("date", "DATE '1952-04-03'", DATE, "DATE '1952-04-03'") + .addRoundTrip("date", "DATE '1970-01-01'", DATE, "DATE '1970-01-01'") + .addRoundTrip("date", "DATE '1970-02-03'", DATE, "DATE '1970-02-03'") + .addRoundTrip("date", "DATE '1970-01-01'", DATE, "DATE '1970-01-01'") + .addRoundTrip("date", "DATE '1983-04-01'", DATE, "DATE '1983-04-01'") + .addRoundTrip("date", "DATE '1983-10-01'", DATE, "DATE '1983-10-01'") + .addRoundTrip("date", "DATE '2017-07-01'", DATE, "DATE '2017-07-01'") + .addRoundTrip("date", "DATE '2017-01-01'", DATE, "DATE '2017-01-01'") + .addRoundTrip("date", "DATE '2024-02-29'", DATE, "DATE '2024-02-29'") + .addRoundTrip("date", "DATE '9999-12-30'", DATE, "DATE '9999-12-30'") + .addRoundTrip("date", "NULL", DATE, "CAST(NULL AS DATE)") + .execute(getQueryRunner(), teradataJDBCCreateAndInsert("date")); + } + + private DataSetup teradataJDBCCreateAndInsert(String tableNamePrefix) + { + String prefix = String.format("%s.%s", database.getDatabaseName(), tableNamePrefix); + return new CreateAndInsertDataSetup(database, prefix); + } +} diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TestingTeradataServer.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TestingTeradataServer.java new file mode 100644 index 000000000000..076c91e9f733 --- /dev/null +++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TestingTeradataServer.java @@ -0,0 +1,387 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.teradata.integration; + +import io.trino.plugin.teradata.integration.clearscape.ClearScapeSetup; +import io.trino.plugin.teradata.integration.clearscape.Model; +import io.trino.testing.sql.SqlExecutor; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; +import java.util.Random; + +import static io.trino.testing.SystemEnvironmentUtils.isEnvSet; +import static io.trino.testing.SystemEnvironmentUtils.requireEnv; + +public final class TestingTeradataServer + implements AutoCloseable, SqlExecutor +{ + private static final int MAX_RETRIES = 5; + private static final long BASE_RETRY_DELAY_MS = 1500L; + private static final long MAX_RETRY_DELAY_MS = 10_000L; + private static final Random RANDOM = new Random(); + + private volatile Connection connection; + private DatabaseConfig config; + private ClearScapeSetup clearScapeSetup; + + public TestingTeradataServer(String envName, boolean destroyEnv) + { + config = DatabaseConfigFactory.create(envName); + String hostName = config.getHostName(); + + // Initialize ClearScape Instance and get hostname from ClearScape API when used + if (config.isUseClearScape()) { + if (isEnvSet("CLEARSCAPE_DESTROY_ENV")) { + destroyEnv = Boolean.parseBoolean(requireEnv("CLEARSCAPE_DESTROY_ENV")); + } + clearScapeSetup = new ClearScapeSetup( + requireEnv("CLEARSCAPE_TOKEN"), + requireEnv("CLEARSCAPE_PASSWORD"), + config.getClearScapeEnvName(), + destroyEnv, + requireEnv("CLEARSCAPE_REGION")); + Model model = clearScapeSetup.initialize(); + hostName = model.getHostName(); + } + String jdbcUrl = buildJdbcUrl(hostName); + config = config.toBuilder() + .hostName(hostName) + .jdbcUrl(jdbcUrl) + .build(); + // Recreate the connection with retries to handle transient ClearScape socket or connection closure issues. + connection = createConnectionWithRetries(); + createTestDatabaseIfAbsent(); + } + + private static Properties buildConnectionProperties(AuthenticationConfig auth) + { + Properties props = new Properties(); + props.setProperty("logmech", "TD2"); + props.setProperty("username", auth.userName()); + props.setProperty("password", auth.password()); + return props; + } + + private static long computeBackoffDelay(int attempt) + { + // Calculates how long to wait before retrying an operation that failed + long base = BASE_RETRY_DELAY_MS * (1L << Math.max(0, attempt - 1)); + long jitter = (long) (RANDOM.nextDouble() * BASE_RETRY_DELAY_MS); + long delay = Math.min(base + jitter, MAX_RETRY_DELAY_MS); + return Math.max(delay, BASE_RETRY_DELAY_MS); + } + + private static void sleepUnchecked(long millis) + { + try { + Thread.sleep(millis); + } + catch (InterruptedException ie) { + Thread.currentThread().interrupt(); + throw new RuntimeException("Interrupted during retry wait", ie); + } + } + + public Map getCatalogProperties() + { + Map properties = new HashMap<>(); + properties.put("connection-url", config.getJdbcUrl()); + + AuthenticationConfig auth = config.getAuthConfig(); + properties.put("connection-user", auth.userName()); + properties.put("connection-password", auth.password()); + + return properties; + } + + public void createTestDatabaseIfAbsent() + { + executeWithRetry(() -> { + if (!schemaExists(config.getDatabaseName())) { + execute(String.format("CREATE DATABASE \"%s\" AS PERM=100e6;", config.getDatabaseName())); + } + }); + } + + public void dropTestDatabaseIfExists() + { + executeWithRetry(() -> { + if (schemaExists(config.getDatabaseName())) { + execute(String.format("DELETE DATABASE \"%s\"", config.getDatabaseName())); + execute(String.format("DROP DATABASE \"%s\"", config.getDatabaseName())); + } + }); + } + + public boolean isTableExists(String tableName) + { + ensureConnection(); + String query = "SELECT count(1) FROM DBC.TablesV WHERE DataBaseName = ? AND TableName = ?"; + try (PreparedStatement stmt = connection.prepareStatement(query)) { + stmt.setString(1, config.getDatabaseName()); + stmt.setString(2, tableName); + try (ResultSet rs = stmt.executeQuery()) { + return rs.next() && rs.getInt(1) > 0; + } + } + catch (SQLException e) { + if (isConnectionException(e)) { + connection = createConnectionWithRetries(); + try (PreparedStatement stmt = connection.prepareStatement(query)) { + stmt.setString(1, config.getDatabaseName()); + stmt.setString(2, tableName); + try (ResultSet rs = stmt.executeQuery()) { + return rs.next() && rs.getInt(1) > 0; + } + } + catch (SQLException ex) { + throw new RuntimeException("Failed to check table existence: " + ex.getMessage(), ex); + } + } + throw new RuntimeException("Failed to check table existence: " + e.getMessage(), e); + } + } + + @Override + public void execute(String sql) + { + executeWithRetry(() -> doExecute(sql)); + } + + public String getDatabaseName() + { + return config.getDatabaseName(); + } + + public String getTMode() + { + return config.getTMode(); + } + + @Override + public void close() + { + try { + dropTestDatabaseIfExists(); + } + finally { + try { + if (connection != null && !connection.isClosed()) { + connection.close(); + } + } + catch (SQLException ignored) { + } + connection = null; + if (clearScapeSetup != null) { + try { + clearScapeSetup.cleanup(); + } + catch (Exception ignored) { + } + } + } + } + + @Override + public boolean supportsMultiRowInsert() + { + return false; + } + + private String buildJdbcUrl(String hostName) + { + String baseUrl = String.format("jdbc:teradata://%s/", hostName); + String propertiesString = buildPropertiesString(); + return propertiesString.isEmpty() ? baseUrl : baseUrl + propertiesString; + } + + private String buildPropertiesString() + { + Map properties = config.getJdbcProperties(); + if (properties == null || properties.isEmpty()) { + return ""; + } + return properties.entrySet() + .stream() + .map(entry -> entry.getKey() + "=" + entry.getValue()) + .collect(java.util.stream.Collectors.joining(",")); + } + + private Connection createConnection() + { + try { + Class.forName("com.teradata.jdbc.TeraDriver"); + Properties props = buildConnectionProperties(config.getAuthConfig()); + return DriverManager.getConnection(config.getJdbcUrl(), props); + } + catch (SQLException | ClassNotFoundException e) { + throw new RuntimeException("Failed to create database connection", e); + } + } + + private Connection createConnectionWithRetries() + { + int attempt = 0; + while (true) { + try { + return createConnection(); + } + catch (RuntimeException e) { + attempt++; + if (attempt >= MAX_RETRIES) { + throw new RuntimeException("Failed to create database connection after retries", e); + } + long delay = computeBackoffDelay(attempt); + sleepUnchecked(delay); + } + } + } + + private void doExecute(String sql) + { + ensureConnection(); + try (Statement stmt = connection.createStatement()) { + if (config.getDatabaseName() != null && schemaExists(config.getDatabaseName())) { + stmt.execute(String.format("DATABASE \"%s\"", config.getDatabaseName())); + } + stmt.execute(sql); + } + catch (SQLException e) { + throw new RuntimeException("SQL execution failed: " + sql, e); + } + } + + private synchronized void ensureConnection() + { + try { + if (connection == null || connection.isClosed()) { + connection = createConnectionWithRetries(); + } + } + catch (SQLException e) { + connection = createConnectionWithRetries(); + } + } + + private boolean schemaExists(String schemaName) + { + ensureConnection(); + String query = "SELECT COUNT(1) FROM DBC.DatabasesV WHERE DatabaseName = ?"; + try (PreparedStatement stmt = connection.prepareStatement(query)) { + stmt.setString(1, schemaName); + try (ResultSet rs = stmt.executeQuery()) { + return rs.next() && rs.getInt(1) > 0; + } + } + catch (SQLException e) { + if (isConnectionException(e)) { + connection = createConnectionWithRetries(); + try (PreparedStatement stmt = connection.prepareStatement(query)) { + stmt.setString(1, schemaName); + try (ResultSet rs = stmt.executeQuery()) { + return rs.next() && rs.getInt(1) > 0; + } + } + catch (SQLException ex) { + throw new RuntimeException("Failed to check schema existence", ex); + } + } + throw new RuntimeException("Failed to check schema existence", e); + } + } + + private boolean isTeradataError3598(Throwable t) + { + if (t == null) { + return false; + } + Throwable root = t; + while (root.getCause() != null && !(root instanceof SQLException)) { + root = root.getCause(); + } + if (root instanceof SQLException sqlEx) { + try { + if (sqlEx.getErrorCode() == 3598) { + return true; + } + } + catch (Exception ignored) { + } + } + return false; + } + + private boolean isConnectionException(SQLException e) + { + if (e == null) { + return false; + } + try { + int code = e.getErrorCode(); + if (code == 1095 || code == 804) { // 1095 == closed connection, 804 socket communication failure + return true; + } + } + catch (Exception ignored) { + } + + try { + return connection == null || connection.isClosed(); + } + catch (SQLException ignored) { + } + + return false; + } + + private void executeWithRetry(Runnable operation) + { + int attempt = 0; + + while (true) { + try { + operation.run(); + return; + } + catch (RuntimeException e) { + attempt++; + Throwable cause = e.getCause(); + + // Connection-related: recreate connection and retry + if (cause instanceof SQLException sqlEx && isConnectionException(sqlEx) && attempt < MAX_RETRIES) { + connection = createConnectionWithRetries(); + sleepUnchecked(computeBackoffDelay(attempt)); + continue; + } + + // Teradata transient concurrency error 3598: backoff & retry + if (isTeradataError3598(e) && attempt < MAX_RETRIES) { + long delay = computeBackoffDelay(attempt); + sleepUnchecked(delay); + continue; + } + throw e; + } + } + } +} diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/BaseException.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/BaseException.java new file mode 100644 index 000000000000..dd7a3292a3a8 --- /dev/null +++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/BaseException.java @@ -0,0 +1,31 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.teradata.integration.clearscape; + +public class BaseException + extends RuntimeException +{ + private final int statusCode; + + public BaseException(int statusCode, String body) + { + super(body); + this.statusCode = statusCode; + } + + public int getStatusCode() + { + return statusCode; + } +} diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/ClearScapeEnvironmentUtils.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/ClearScapeEnvironmentUtils.java new file mode 100644 index 000000000000..94d9c0b9e1e5 --- /dev/null +++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/ClearScapeEnvironmentUtils.java @@ -0,0 +1,37 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.teradata.integration.clearscape; + +import java.util.concurrent.ThreadLocalRandom; + +import static java.util.Locale.ENGLISH; + +public final class ClearScapeEnvironmentUtils +{ + private static final int MAX_ENV_NAME_LENGTH = 40; // Adjust based on ClearScape limits + + private ClearScapeEnvironmentUtils() {} + + public static String generateUniqueEnvName(Class testClass) + { + String className = testClass.getSimpleName().toLowerCase(ENGLISH); + String suffix = Long.toString(ThreadLocalRandom.current().nextLong(Long.MAX_VALUE), 36); + String envName = className + "-" + suffix; + // Truncate if too long + if (envName.length() > MAX_ENV_NAME_LENGTH) { + envName = envName.substring(0, MAX_ENV_NAME_LENGTH); + } + return envName; + } +} diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/ClearScapeManager.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/ClearScapeManager.java new file mode 100644 index 000000000000..64c9bcf48d3d --- /dev/null +++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/ClearScapeManager.java @@ -0,0 +1,143 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.teradata.integration.clearscape; + +import io.airlift.log.Logger; +import io.trino.plugin.teradata.integration.TeradataTestConstants; + +import java.net.URISyntaxException; +import java.util.regex.Pattern; + +public class ClearScapeManager +{ + private static final Logger log = Logger.get(ClearScapeManager.class); + private static final Pattern ALLOWED_URL_PATTERN = + Pattern.compile("^(https?://)(www\\.)?api.clearscape.teradata\\.com.*"); + private Model model; + + private boolean isValidUrl(String url) + { + return ALLOWED_URL_PATTERN.matcher(url).matches(); + } + + private TeradataHttpClient getTeradataHttpClient() + throws URISyntaxException + { + String envUrl = TeradataTestConstants.ENV_CLEARSCAPE_URL; + if (isValidUrl(envUrl)) { + return new TeradataHttpClient(envUrl); + } + else { + throw new URISyntaxException(envUrl, "Provide valid environment URL"); + } + } + + public void init(Model model) + { + this.model = model; + } + + public void setup() + { + createAndStartClearScapeInstance(); + } + + public void stop() + { + stopClearScapeInstance(); + } + + public void teardown() + { + shutdownAndDestroyClearScapeInstance(); + } + + private void createAndStartClearScapeInstance() + { + try { + TeradataHttpClient teradataHttpClient = getTeradataHttpClient(); + + String token = this.model.getToken(); + String name = this.model.getEnvName(); + EnvironmentResponse response = null; + try { + response = teradataHttpClient.getEnvironment(new GetEnvironmentRequest(name), token); + } + catch (BaseException be) { + log.info("Environment %s is not available. %s", name, be.getMessage()); + } + + if (response == null || response.ip() == null) { + CreateEnvironmentRequest request = new CreateEnvironmentRequest( + name, + model.getRegion(), + model.getPassword()); + response = teradataHttpClient.createEnvironment(request, token).get(); + } + else if (response.state() == EnvironmentResponse.State.STOPPED) { + EnvironmentRequest request = new EnvironmentRequest(name, new OperationRequest("start")); + teradataHttpClient.startEnvironment(request, token); + } + if (response != null) { + model.setHostName(response.ip()); + } + } + catch (Exception e) { + throw new RuntimeException("Failed to create and start ClearScape instance", e); + } + } + + private void stopClearScapeInstance() + { + try { + TeradataHttpClient teradataHttpClient = getTeradataHttpClient(); + String token = this.model.getToken(); + String name = this.model.getEnvName(); + + EnvironmentResponse response = null; + try { + response = teradataHttpClient.getEnvironment(new GetEnvironmentRequest(name), token); + } + catch (BaseException be) { + log.info("Environment %s is not available. %s", name, be.getMessage()); + } + if (response != null && + response.ip() != null && + response.state() == EnvironmentResponse.State.RUNNING) { + EnvironmentRequest request = new EnvironmentRequest(name, new OperationRequest("stop")); + teradataHttpClient.stopEnvironment(request, token); + } + } + catch (Exception e) { + throw new RuntimeException("Failed to stop ClearScape instance", e); + } + } + + private void shutdownAndDestroyClearScapeInstance() + { + try { + TeradataHttpClient teradataHttpClient = getTeradataHttpClient(); + String token = this.model.getToken(); + DeleteEnvironmentRequest request = new DeleteEnvironmentRequest(this.model.getEnvName()); + teradataHttpClient.deleteEnvironment(request, token).get(); + } + catch (BaseException be) { + log.info("Environment %s is not available. Error - %s", + this.model.getEnvName(), be.getMessage()); + } + catch (Exception e) { + throw new RuntimeException("Failed to shutdown and destroy ClearScape instance", e); + } + } +} diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/ClearScapeSetup.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/ClearScapeSetup.java new file mode 100644 index 000000000000..7690ef186f79 --- /dev/null +++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/ClearScapeSetup.java @@ -0,0 +1,84 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.teradata.integration.clearscape; + +import io.trino.plugin.teradata.integration.TeradataTestConstants; + +import static java.util.Objects.requireNonNull; + +public class ClearScapeSetup +{ + private final String token; + private final String password; + private final String envName; + private final String region; + private final boolean destroyEnv; + private ClearScapeManager manager; + + public ClearScapeSetup( + String token, + String password, + String envName, + boolean destroyEnv, + String region) + { + requireNonNull(token, "token is null"); + requireNonNull(password, "password is null"); + requireNonNull(envName, "envName is null"); + requireNonNull(region, "region is null"); + this.token = token; + this.password = password; + this.envName = envName; + this.region = region; + this.destroyEnv = destroyEnv; + } + + public Model initialize() + { + try { + manager = new ClearScapeManager(); + Model model = createModel(); + manager.init(model); + manager.setup(); + return model; + } + catch (Exception e) { + throw new RuntimeException("Failed to initialize ClearScape environment: " + envName, e); + } + } + + private Model createModel() + { + Model model = new Model(); + model.setEnvName(envName); + model.setUserName(TeradataTestConstants.ENV_CLEARSCAPE_USERNAME); + model.setPassword(password); + model.setDatabaseName(TeradataTestConstants.ENV_CLEARSCAPE_USERNAME); + model.setToken(token); + model.setRegion(region); + return model; + } + + public void cleanup() + { + if (manager == null) { + return; + } + if (destroyEnv) { + manager.teardown(); + return; + } + manager.stop(); + } +} diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/CreateEnvironmentRequest.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/CreateEnvironmentRequest.java new file mode 100644 index 000000000000..e80aeced517b --- /dev/null +++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/CreateEnvironmentRequest.java @@ -0,0 +1,20 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.teradata.integration.clearscape; + +public record CreateEnvironmentRequest( + String name, + String region, + String password +) {} diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/DeleteEnvironmentRequest.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/DeleteEnvironmentRequest.java new file mode 100644 index 000000000000..7c806aa8920c --- /dev/null +++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/DeleteEnvironmentRequest.java @@ -0,0 +1,18 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.teradata.integration.clearscape; + +public record DeleteEnvironmentRequest( + String name +) {} diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/EnvironmentRequest.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/EnvironmentRequest.java new file mode 100644 index 000000000000..b165da10cd52 --- /dev/null +++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/EnvironmentRequest.java @@ -0,0 +1,19 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.teradata.integration.clearscape; + +public record EnvironmentRequest( + String name, + OperationRequest request +) {} diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/EnvironmentResponse.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/EnvironmentResponse.java new file mode 100644 index 000000000000..03233b8403ee --- /dev/null +++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/EnvironmentResponse.java @@ -0,0 +1,37 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.teradata.integration.clearscape; + +import static java.util.Locale.ENGLISH; +import static java.util.Objects.requireNonNull; + +public record EnvironmentResponse( + State state, + String region, + String name, + String ip) +{ + public EnvironmentResponse { + requireNonNull(state, "state must not be null"); + requireNonNull(region, "name must not be null"); + requireNonNull(name, "name must not be null"); + region = region.toUpperCase(ENGLISH); + } + + public enum State + { + RUNNING, + STOPPED, + } +} diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/Error4xxException.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/Error4xxException.java new file mode 100644 index 000000000000..be90c6f28e6b --- /dev/null +++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/Error4xxException.java @@ -0,0 +1,23 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.teradata.integration.clearscape; + +public class Error4xxException + extends BaseException +{ + public Error4xxException(int statusCode, String body) + { + super(statusCode, body); + } +} diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/Error5xxException.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/Error5xxException.java new file mode 100644 index 000000000000..20d4afb8b441 --- /dev/null +++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/Error5xxException.java @@ -0,0 +1,23 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.teradata.integration.clearscape; + +public class Error5xxException + extends BaseException +{ + public Error5xxException(int statusCode, String body) + { + super(statusCode, body); + } +} diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/GetEnvironmentRequest.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/GetEnvironmentRequest.java new file mode 100644 index 000000000000..a35e9d4c0b8b --- /dev/null +++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/GetEnvironmentRequest.java @@ -0,0 +1,18 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.teradata.integration.clearscape; + +public record GetEnvironmentRequest( + String name +) {} diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/Model.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/Model.java new file mode 100644 index 000000000000..605c60c3a191 --- /dev/null +++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/Model.java @@ -0,0 +1,85 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.teradata.integration.clearscape; + +public class Model +{ + String envName; + String hostName; + String userName; + String password; + String databaseName; + String token; + String region; + + public String getEnvName() + { + return envName; + } + + public void setEnvName(String envName) + { + this.envName = envName; + } + + public String getHostName() + { + return hostName; + } + + public void setHostName(String hostName) + { + this.hostName = hostName; + } + + public void setUserName(String userName) + { + this.userName = userName; + } + + public String getPassword() + { + return password; + } + + public void setPassword(String password) + { + this.password = password; + } + + public void setDatabaseName(String databaseName) + { + this.databaseName = databaseName; + } + + public String getToken() + { + return token; + } + + public void setToken(String token) + { + this.token = token; + } + + public String getRegion() + { + return region; + } + + public void setRegion(String region) + { + this.region = region; + } +} diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/OperationRequest.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/OperationRequest.java new file mode 100644 index 000000000000..db7611a8d5e3 --- /dev/null +++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/OperationRequest.java @@ -0,0 +1,17 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.teradata.integration.clearscape; + +public record OperationRequest( + String operation) {} diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/TeradataHttpClient.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/TeradataHttpClient.java new file mode 100644 index 000000000000..77115b6192f3 --- /dev/null +++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/TeradataHttpClient.java @@ -0,0 +1,172 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.teradata.integration.clearscape; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.DeserializationFeature; +import com.fasterxml.jackson.databind.MapperFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.json.JsonMapper; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.net.URI; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.util.concurrent.CompletableFuture; + +import static com.google.common.net.HttpHeaders.AUTHORIZATION; +import static com.google.common.net.HttpHeaders.CONTENT_TYPE; + +public class TeradataHttpClient +{ + private static final String APPLICATION_JSON = "application/json"; + private static final String BEARER = "Bearer "; + + private final String baseUrl; + private final HttpClient httpClient; + private final ObjectMapper objectMapper; + + public TeradataHttpClient(String baseUrl) + { + this(HttpClient.newBuilder().version(HttpClient.Version.HTTP_1_1).build(), baseUrl); + } + + public TeradataHttpClient( + HttpClient httpClient, + String baseUrl) + { + this.httpClient = httpClient; + this.baseUrl = baseUrl; + this.objectMapper = JsonMapper.builder() + .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) + .configure(MapperFeature.ALLOW_FINAL_FIELDS_AS_MUTATORS, false) + .build(); + } + + // Creating an environment is a blocking operation by default, and it takes ~1.5min to finish + public CompletableFuture createEnvironment(CreateEnvironmentRequest createEnvironmentRequest, + String token) + { + var requestBody = handleCheckedException(() -> objectMapper.writeValueAsString(createEnvironmentRequest)); + var httpRequest = HttpRequest.newBuilder(URI.create(baseUrl.concat("/environments"))) + .headers( + AUTHORIZATION, BEARER + token, + CONTENT_TYPE, APPLICATION_JSON) + .POST(HttpRequest.BodyPublishers.ofString(requestBody)) + .build(); + return httpClient.sendAsync(httpRequest, HttpResponse.BodyHandlers.ofString()) + .thenApply(httpResponse -> handleHttpResponse(httpResponse, new TypeReference<>() {})); + } + + public EnvironmentResponse getEnvironment(GetEnvironmentRequest getEnvironmentRequest, String token) + { + var httpRequest = HttpRequest.newBuilder(URI.create(baseUrl + .concat("/environments/") + .concat(getEnvironmentRequest.name()))) + .headers(AUTHORIZATION, BEARER + token) + .GET() + .build(); + var httpResponse = + handleCheckedException(() -> httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString())); + return handleHttpResponse(httpResponse, new TypeReference<>() {}); + } + + // Deleting an environment is a blocking operation by default, and it takes ~1.5min to finish + public CompletableFuture deleteEnvironment(DeleteEnvironmentRequest deleteEnvironmentRequest, String token) + { + var httpRequest = HttpRequest.newBuilder(URI.create(baseUrl + .concat("/environments/") + .concat(deleteEnvironmentRequest.name()))) + .headers(AUTHORIZATION, BEARER + token) + .DELETE() + .build(); + + return httpClient.sendAsync(httpRequest, HttpResponse.BodyHandlers.ofString()) + .thenApply(httpResponse -> handleHttpResponse(httpResponse, new TypeReference<>() {})); + } + + public void startEnvironment(EnvironmentRequest environmentRequest, String token) + { + var requestBody = handleCheckedException(() -> objectMapper.writeValueAsString(environmentRequest.request())); + getVoidCompletableFuture(environmentRequest.name(), token, requestBody); + } + + public void stopEnvironment(EnvironmentRequest environmentRequest, String token) + { + var requestBody = handleCheckedException(() -> objectMapper.writeValueAsString(environmentRequest.request())); + getVoidCompletableFuture(environmentRequest.name(), token, requestBody); + } + + private void getVoidCompletableFuture(String name, String token, String jsonPayLoadString) + { + HttpRequest.BodyPublisher publisher = HttpRequest.BodyPublishers.ofString(jsonPayLoadString); + var httpRequest = HttpRequest.newBuilder(URI.create(baseUrl + .concat("/environments/") + .concat(name))) + .headers(AUTHORIZATION, BEARER + token, + CONTENT_TYPE, APPLICATION_JSON) + .method("PATCH", publisher) + .build(); + var httpResponse = + handleCheckedException(() -> httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString())); + handleHttpResponse(httpResponse, new TypeReference<>() {}); + } + + private T handleHttpResponse(HttpResponse httpResponse, TypeReference typeReference) + { + var body = httpResponse.body(); + if (httpResponse.statusCode() >= 200 && httpResponse.statusCode() <= 299) { + return handleCheckedException(() -> { + if (typeReference.getType().getTypeName().equals(Void.class.getTypeName())) { + return null; + } + else { + return objectMapper.readValue(body, typeReference); + } + }); + } + else if (httpResponse.statusCode() >= 400 && httpResponse.statusCode() <= 499) { + throw new Error4xxException(httpResponse.statusCode(), body); + } + else if (httpResponse.statusCode() >= 500 && httpResponse.statusCode() <= 599) { + throw new Error5xxException(httpResponse.statusCode(), body); + } + else { + throw new BaseException(httpResponse.statusCode(), body); + } + } + + private static T handleCheckedException(CheckedSupplier checkedSupplier) + { + try { + return checkedSupplier.get(); + } + catch (IOException e) { + throw new UncheckedIOException(e); + } + catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new RuntimeException(e); + } + } + + @FunctionalInterface + private interface CheckedSupplier + { + T get() + throws IOException, InterruptedException; + } +} diff --git a/pom.xml b/pom.xml index 30bae5b7326d..421a360b72ef 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ io.airlift airbase - 324 + 328 io.trino @@ -113,6 +113,7 @@ plugin/trino-snowflake plugin/trino-spooling-filesystem plugin/trino-sqlserver + plugin/trino-teradata plugin/trino-teradata-functions plugin/trino-thrift plugin/trino-thrift-api @@ -182,7 +183,7 @@ ${air.test.jvm.additional-arguments.default} - 372 + 377 2.9.6 4.13.2 1.12.1 @@ -191,7 +192,7 @@ 7.8.0 119 1.24 - 11.15.0 + 11.17.0 1.15.4 v22.14.0 11.2.0 @@ -201,7 +202,7 @@ 5.18.1 0.13.0 1.20.0 - 4.1.0 + 4.1.1 5.3.0 3.16.2 1.16.0 @@ -228,7 +229,7 @@ com.azure azure-sdk-bom - 1.3.0 + 1.3.2 pom import @@ -284,7 +285,7 @@ org.jdbi jdbi3-bom - 3.49.6 + 3.50.0 pom import @@ -292,7 +293,7 @@ org.testcontainers testcontainers-bom - 2.0.1 + 2.0.2 pom import @@ -300,7 +301,7 @@ software.amazon.awssdk bom - 2.37.3 + 2.38.5 pom import @@ -308,7 +309,7 @@ com.adobe.testing s3mock-testcontainers - 4.9.1 + 4.10.0 @@ -423,7 +424,7 @@ com.github.docker-java docker-java-api - 3.6.0 + 3.7.0 @@ -441,7 +442,7 @@ com.google.auto.value auto-value-annotations - 1.11.0 + 1.11.1 @@ -521,13 +522,13 @@ com.nimbusds nimbus-jose-jwt - 10.5 + 10.6 com.nimbusds oauth2-oidc-sdk - 11.30 + 11.30.1 @@ -575,19 +576,19 @@ commons-cli commons-cli - 1.10.0 + 1.11.0 commons-codec commons-codec - 1.19.0 + 1.20.0 commons-io commons-io - 2.20.0 + 2.21.0 @@ -913,13 +914,13 @@ io.projectreactor reactor-core - 3.7.12 + 3.8.0 io.projectreactor.netty reactor-netty-core - 1.2.11 + 1.3.0 @@ -2233,7 +2234,7 @@ org.checkerframework checker-qual - 3.51.1 + 3.52.0 @@ -2830,6 +2831,33 @@ iso3166_3-codes.properties + + + + org.glassfish.jersey.containers + jersey-container-servlet + + + org.glassfish.jersey.core + jersey-client + + + org.glassfish.jersey.core + jersey-common + + + org.glassfish.jersey.core + jersey-server + + + org.glassfish.jersey.inject + jersey-hk2 + + + + about.html + + diff --git a/testing/trino-product-tests-groups/src/main/java/io/trino/tests/product/TestGroups.java b/testing/trino-product-tests-groups/src/main/java/io/trino/tests/product/TestGroups.java index 06ec00ebac65..b7df57f43ba4 100644 --- a/testing/trino-product-tests-groups/src/main/java/io/trino/tests/product/TestGroups.java +++ b/testing/trino-product-tests-groups/src/main/java/io/trino/tests/product/TestGroups.java @@ -99,8 +99,9 @@ public final class TestGroups public static final String DELTA_LAKE_DATABRICKS_133 = "delta-lake-databricks-133"; public static final String DELTA_LAKE_DATABRICKS_143 = "delta-lake-databricks-143"; public static final String DELTA_LAKE_DATABRICKS_154 = "delta-lake-databricks-154"; + public static final String DELTA_LAKE_DATABRICKS_164 = "delta-lake-databricks-164"; // TODO: Remove it once we support generatedColumns, particularly for writes in Delta Lake - public static final String DELTA_LAKE_EXCLUDE_164 = "delta-lake-exclude-164"; + public static final String DELTA_LAKE_EXCLUDE_173 = "delta-lake-exclude-173"; public static final String DELTA_LAKE_ALLUXIO_CACHING = "delta-lake-alluxio-caching"; public static final String HUDI = "hudi"; public static final String PARQUET = "parquet"; diff --git a/testing/trino-product-tests-launcher/pom.xml b/testing/trino-product-tests-launcher/pom.xml index cb28a6abebc8..273ded812806 100644 --- a/testing/trino-product-tests-launcher/pom.xml +++ b/testing/trino-product-tests-launcher/pom.xml @@ -260,7 +260,7 @@ com.databricks databricks-jdbc - 3.0.3 + 3.0.4 jar ${project.build.directory} databricks-jdbc.jar diff --git a/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvSinglenodeDeltaLakeDatabricks173.java b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvSinglenodeDeltaLakeDatabricks173.java new file mode 100644 index 000000000000..885577d3dce7 --- /dev/null +++ b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvSinglenodeDeltaLakeDatabricks173.java @@ -0,0 +1,38 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.tests.product.launcher.env.environment; + +import com.google.inject.Inject; +import io.trino.tests.product.launcher.docker.DockerFiles; +import io.trino.tests.product.launcher.env.common.Standard; +import io.trino.tests.product.launcher.env.common.TestsEnvironment; + +import static io.trino.testing.SystemEnvironmentUtils.requireEnv; + +@TestsEnvironment +public class EnvSinglenodeDeltaLakeDatabricks173 + extends AbstractSinglenodeDeltaLakeDatabricks +{ + @Inject + public EnvSinglenodeDeltaLakeDatabricks173(Standard standard, DockerFiles dockerFiles) + { + super(standard, dockerFiles); + } + + @Override + String databricksTestJdbcUrl() + { + return requireEnv("DATABRICKS_173_JDBC_URL"); + } +} diff --git a/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvSinglenodeSparkIcebergNessie.java b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvSinglenodeSparkIcebergNessie.java index 3fd8d6b89766..a4f9672b5dde 100644 --- a/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvSinglenodeSparkIcebergNessie.java +++ b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvSinglenodeSparkIcebergNessie.java @@ -43,7 +43,7 @@ public class EnvSinglenodeSparkIcebergNessie private static final int SPARK_THRIFT_PORT = 10213; private static final int NESSIE_PORT = 19120; - private static final String NESSIE_VERSION = "0.105.6"; + private static final String NESSIE_VERSION = "0.105.7"; private static final String SPARK = "spark"; private final DockerFiles dockerFiles; diff --git a/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/suite/suites/SuiteDeltaLakeDatabricks164.java b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/suite/suites/SuiteDeltaLakeDatabricks164.java index c2dce4fa8fbe..f975e8ba2170 100644 --- a/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/suite/suites/SuiteDeltaLakeDatabricks164.java +++ b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/suite/suites/SuiteDeltaLakeDatabricks164.java @@ -22,8 +22,7 @@ import java.util.List; import static io.trino.tests.product.TestGroups.CONFIGURED_FEATURES; -import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS; -import static io.trino.tests.product.TestGroups.DELTA_LAKE_EXCLUDE_164; +import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_164; import static io.trino.tests.product.launcher.suite.SuiteTestRun.testOnEnvironment; public class SuiteDeltaLakeDatabricks164 @@ -34,8 +33,7 @@ public List getTestRuns(EnvironmentConfig config) { return ImmutableList.of( testOnEnvironment(EnvSinglenodeDeltaLakeDatabricks164.class) - .withGroups(CONFIGURED_FEATURES, DELTA_LAKE_DATABRICKS) - .withExcludedGroups(DELTA_LAKE_EXCLUDE_164) + .withGroups(CONFIGURED_FEATURES, DELTA_LAKE_DATABRICKS_164) .withExcludedTests(getExcludedTests()) .build()); } diff --git a/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/suite/suites/SuiteDeltaLakeDatabricks173.java b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/suite/suites/SuiteDeltaLakeDatabricks173.java new file mode 100644 index 000000000000..2686cc56a27f --- /dev/null +++ b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/suite/suites/SuiteDeltaLakeDatabricks173.java @@ -0,0 +1,42 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.tests.product.launcher.suite.suites; + +import com.google.common.collect.ImmutableList; +import io.trino.tests.product.launcher.env.EnvironmentConfig; +import io.trino.tests.product.launcher.env.environment.EnvSinglenodeDeltaLakeDatabricks173; +import io.trino.tests.product.launcher.suite.SuiteDeltaLakeDatabricks; +import io.trino.tests.product.launcher.suite.SuiteTestRun; + +import java.util.List; + +import static io.trino.tests.product.TestGroups.CONFIGURED_FEATURES; +import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS; +import static io.trino.tests.product.TestGroups.DELTA_LAKE_EXCLUDE_173; +import static io.trino.tests.product.launcher.suite.SuiteTestRun.testOnEnvironment; + +public class SuiteDeltaLakeDatabricks173 + extends SuiteDeltaLakeDatabricks +{ + @Override + public List getTestRuns(EnvironmentConfig config) + { + return ImmutableList.of( + testOnEnvironment(EnvSinglenodeDeltaLakeDatabricks173.class) + .withGroups(CONFIGURED_FEATURES, DELTA_LAKE_DATABRICKS) + .withExcludedGroups(DELTA_LAKE_EXCLUDE_173) + .withExcludedTests(getExcludedTests()) + .build()); + } +} diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeCaseInsensitiveMapping.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeCaseInsensitiveMapping.java index aed9b029fd31..998f7f4aab4d 100644 --- a/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeCaseInsensitiveMapping.java +++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeCaseInsensitiveMapping.java @@ -27,6 +27,7 @@ import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_133; import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_143; import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_154; +import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_164; import static io.trino.tests.product.TestGroups.DELTA_LAKE_OSS; import static io.trino.tests.product.TestGroups.PROFILE_SPECIFIC_TESTS; import static io.trino.tests.product.deltalake.util.DatabricksVersion.DATABRICKS_143_RUNTIME_VERSION; @@ -73,7 +74,7 @@ public void testNonLowercaseColumnNames() } } - @Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS}) + @Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, DELTA_LAKE_DATABRICKS_164, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS}) @Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH) public void testNonLowercaseFieldNames() { diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeChangeDataFeedCompatibility.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeChangeDataFeedCompatibility.java index 23d68e51b0c8..79216ea0a9dc 100644 --- a/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeChangeDataFeedCompatibility.java +++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeChangeDataFeedCompatibility.java @@ -35,6 +35,7 @@ import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_133; import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_143; import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_154; +import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_164; import static io.trino.tests.product.TestGroups.DELTA_LAKE_OSS; import static io.trino.tests.product.TestGroups.PROFILE_SPECIFIC_TESTS; import static io.trino.tests.product.deltalake.S3ClientFactory.createS3Client; @@ -539,7 +540,7 @@ public void testMergeDeleteIntoTableWithCdfEnabled(String columnMappingMode) } } - @Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS}) + @Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, DELTA_LAKE_DATABRICKS_164, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS}) @Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH) public void testMergeMixedDeleteAndUpdateIntoTableWithCdfEnabled() { diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeCheckpointsCompatibility.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeCheckpointsCompatibility.java index 310ba55ebcc1..8d78a6e53ee5 100644 --- a/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeCheckpointsCompatibility.java +++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeCheckpointsCompatibility.java @@ -38,6 +38,7 @@ import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_133; import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_143; import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_154; +import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_164; import static io.trino.tests.product.TestGroups.DELTA_LAKE_OSS; import static io.trino.tests.product.TestGroups.PROFILE_SPECIFIC_TESTS; import static io.trino.tests.product.deltalake.S3ClientFactory.createS3Client; @@ -276,7 +277,7 @@ private void trinoUsesCheckpointInterval(String deltaTableProperties) } } - @Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, PROFILE_SPECIFIC_TESTS}) + @Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, DELTA_LAKE_DATABRICKS_164, PROFILE_SPECIFIC_TESTS}) @Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH) public void testDatabricksUsesCheckpointInterval() { diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeColumnMappingMode.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeColumnMappingMode.java index 2eb76bdc4053..42997a176cf0 100644 --- a/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeColumnMappingMode.java +++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeColumnMappingMode.java @@ -30,7 +30,8 @@ import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_133; import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_143; import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_154; -import static io.trino.tests.product.TestGroups.DELTA_LAKE_EXCLUDE_164; +import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_164; +import static io.trino.tests.product.TestGroups.DELTA_LAKE_EXCLUDE_173; import static io.trino.tests.product.TestGroups.DELTA_LAKE_OSS; import static io.trino.tests.product.TestGroups.PROFILE_SPECIFIC_TESTS; import static io.trino.tests.product.deltalake.util.DeltaLakeTestUtils.DATABRICKS_COMMUNICATION_FAILURE_ISSUE; @@ -53,7 +54,7 @@ public class TestDeltaLakeColumnMappingMode extends BaseTestDeltaLakeS3Storage { - @Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS}) + @Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, DELTA_LAKE_DATABRICKS_164, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS}) @Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH) public void testColumnMappingModeNone() { @@ -240,9 +241,9 @@ public void testTrinoColumnMappingMode(String mode) ")")); } - // When setting the table property `delta.columnMapping.mode` on Databricks 16.4, it will enable the `delta.feature.generatedColumns` + // When setting the table property `delta.columnMapping.mode` on Databricks >= 16.x, it will enable the `delta.feature.generatedColumns` // feature, which is not supported by Trino. - @Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_EXCLUDE_164, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS}, dataProvider = "columnMappingDataProvider") + @Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_EXCLUDE_173, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS}, dataProvider = "columnMappingDataProvider") @Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH) public void testDeltaColumnMappingMode(String mode) { diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeCreateTableAsSelectCompatibility.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeCreateTableAsSelectCompatibility.java index 70daa3f31a01..c4608bc0cb73 100644 --- a/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeCreateTableAsSelectCompatibility.java +++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeCreateTableAsSelectCompatibility.java @@ -28,6 +28,7 @@ import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_133; import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_143; import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_154; +import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_164; import static io.trino.tests.product.TestGroups.DELTA_LAKE_OSS; import static io.trino.tests.product.TestGroups.PROFILE_SPECIFIC_TESTS; import static io.trino.tests.product.deltalake.util.DeltaLakeTestUtils.DATABRICKS_COMMUNICATION_FAILURE_ISSUE; @@ -42,7 +43,7 @@ public class TestDeltaLakeCreateTableAsSelectCompatibility extends BaseTestDeltaLakeS3Storage { - @Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, PROFILE_SPECIFIC_TESTS}) + @Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, DELTA_LAKE_DATABRICKS_164, PROFILE_SPECIFIC_TESTS}) @Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH) public void testTrinoTypesWithDatabricks() { diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeInsertCompatibility.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeInsertCompatibility.java index 3afdaf184e00..d8d3e4b3436d 100644 --- a/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeInsertCompatibility.java +++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeInsertCompatibility.java @@ -31,6 +31,7 @@ import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_133; import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_143; import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_154; +import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_164; import static io.trino.tests.product.TestGroups.DELTA_LAKE_OSS; import static io.trino.tests.product.TestGroups.PROFILE_SPECIFIC_TESTS; import static io.trino.tests.product.deltalake.util.DeltaLakeTestUtils.DATABRICKS_COMMUNICATION_FAILURE_ISSUE; @@ -44,7 +45,7 @@ public class TestDeltaLakeInsertCompatibility extends BaseTestDeltaLakeS3Storage { - @Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS}) + @Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, DELTA_LAKE_DATABRICKS_164, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS}) @Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH) public void testInsertCompatibility() { @@ -80,7 +81,7 @@ public void testInsertCompatibility() } } - @Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, PROFILE_SPECIFIC_TESTS}) + @Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, DELTA_LAKE_DATABRICKS_164, PROFILE_SPECIFIC_TESTS}) @Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH) public void testPartitionedInsertCompatibility() { diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeSelectCompatibility.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeSelectCompatibility.java index eda37f51851d..bd4ce693d225 100644 --- a/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeSelectCompatibility.java +++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeSelectCompatibility.java @@ -27,6 +27,7 @@ import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_133; import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_143; import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_154; +import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_164; import static io.trino.tests.product.TestGroups.DELTA_LAKE_OSS; import static io.trino.tests.product.TestGroups.PROFILE_SPECIFIC_TESTS; import static io.trino.tests.product.deltalake.util.DeltaLakeTestUtils.DATABRICKS_COMMUNICATION_FAILURE_ISSUE; @@ -40,7 +41,7 @@ public class TestDeltaLakeSelectCompatibility extends BaseTestDeltaLakeS3Storage { - @Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS}) + @Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, DELTA_LAKE_DATABRICKS_164, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS}) @Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH) public void testPartitionedSelectSpecialCharacters() { diff --git a/testing/trino-testing-services/src/main/java/io/trino/testing/services/junit/ReportOverriddenMethods.java b/testing/trino-testing-services/src/main/java/io/trino/testing/services/junit/ReportOverriddenMethods.java new file mode 100644 index 000000000000..fb0f930da29f --- /dev/null +++ b/testing/trino-testing-services/src/main/java/io/trino/testing/services/junit/ReportOverriddenMethods.java @@ -0,0 +1,71 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.testing.services.junit; + +import org.junit.platform.engine.support.descriptor.MethodSource; +import org.junit.platform.launcher.TestExecutionListener; +import org.junit.platform.launcher.TestIdentifier; +import org.junit.platform.launcher.TestPlan; + +import java.lang.reflect.Modifier; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Stream; + +import static com.google.common.base.Throwables.getStackTraceAsString; +import static io.trino.testing.services.junit.Listeners.reportListenerFailure; +import static java.lang.String.format; + +public class ReportOverriddenMethods + implements TestExecutionListener +{ + @Override + public void testPlanExecutionStarted(TestPlan testPlan) + { + try { + testPlan.accept(new TestPlan.Visitor() + { + @Override + public void visit(TestIdentifier testIdentifier) + { + testIdentifier.getSource().ifPresent(source -> { + if (source instanceof MethodSource methodSource) { + if (!Modifier.isPublic(methodSource.getJavaMethod().getModifiers()) && + !Modifier.isProtected(methodSource.getJavaMethod().getModifiers())) { + List> declaringClasses = Stream.>iterate(methodSource.getJavaClass(), clazz -> clazz.getSuperclass() != null, Class::getSuperclass) + .filter(clazz -> + Arrays.stream(clazz.getDeclaredMethods()) + .anyMatch(method -> method.getName().equals(methodSource.getJavaMethod().getName()))) + .toList(); + if (declaringClasses.size() > 1) { + throw new IllegalStateException(format( + """ + Method %s is not public. Similar methods are defined by %s. \ + When tests are non-public, they do not @Override in Java sense, but they still interact with each other in JUnit, \ + and only one of declared tests gets executed. \ + This leads to tests being silently skipped without any source-level indication.""", + methodSource.getJavaMethod(), + declaringClasses)); + } + } + } + }); + } + }); + } + catch (RuntimeException | Error e) { + reportListenerFailure(getClass(), "%s", getStackTraceAsString(e)); + } + } +} diff --git a/testing/trino-testing-services/src/main/resources/META-INF/services/org.junit.platform.launcher.TestExecutionListener b/testing/trino-testing-services/src/main/resources/META-INF/services/org.junit.platform.launcher.TestExecutionListener index 618d40bf5a0a..670508fdd433 100644 --- a/testing/trino-testing-services/src/main/resources/META-INF/services/org.junit.platform.launcher.TestExecutionListener +++ b/testing/trino-testing-services/src/main/resources/META-INF/services/org.junit.platform.launcher.TestExecutionListener @@ -1 +1,2 @@ io.trino.testing.services.junit.LogTestDurationListener +io.trino.testing.services.junit.ReportOverriddenMethods diff --git a/testing/trino-tests/src/test/java/io/trino/execution/TestEventListenerBasic.java b/testing/trino-tests/src/test/java/io/trino/execution/TestEventListenerBasic.java index 6147a1e41719..a8d18e0435c1 100644 --- a/testing/trino-tests/src/test/java/io/trino/execution/TestEventListenerBasic.java +++ b/testing/trino-tests/src/test/java/io/trino/execution/TestEventListenerBasic.java @@ -41,6 +41,7 @@ import io.trino.spi.connector.SchemaTableName; import io.trino.spi.eventlistener.ColumnDetail; import io.trino.spi.eventlistener.ColumnInfo; +import io.trino.spi.eventlistener.ColumnLineageInfo; import io.trino.spi.eventlistener.OutputColumnMetadata; import io.trino.spi.eventlistener.QueryCompletedEvent; import io.trino.spi.eventlistener.QueryCreatedEvent; @@ -1557,6 +1558,30 @@ public void testAllImmediateFailureEventsPresent() } } + @Test + public void testSelectColumnsLineageInfo() + throws Exception + { + String sql = "SELECT nationkey AS test_nationkey, name AS test_name, 'anonymous_literal', 'named_literal' AS named FROM nation"; + QueryEvents queryEvents = runQueryAndWaitForEvents(sql).getQueryEvents(); + QueryCompletedEvent event = queryEvents.getQueryCompletedEvent(); + assertThat(event.getSelectColumnsLineageInfo()).isPresent(); + List selectColumnsLineageInfo = event.getSelectColumnsLineageInfo().get(); + assertThat(selectColumnsLineageInfo).hasSize(4); + + assertThat(selectColumnsLineageInfo.getFirst().name()).isEqualTo("test_nationkey"); + assertThat(selectColumnsLineageInfo.getFirst().sourceColumns()).containsExactly(new ColumnDetail("tpch", "tiny", "nation", "nationkey")); + + assertThat(selectColumnsLineageInfo.get(1).name()).isEqualTo("test_name"); + assertThat(selectColumnsLineageInfo.get(1).sourceColumns()).containsExactly(new ColumnDetail("tpch", "tiny", "nation", "name")); + + assertThat(selectColumnsLineageInfo.get(2).name()).isEqualTo(""); + assertThat(selectColumnsLineageInfo.get(2).sourceColumns()).isEmpty(); + + assertThat(selectColumnsLineageInfo.get(3).name()).isEqualTo("named"); + assertThat(selectColumnsLineageInfo.get(3).sourceColumns()).isEmpty(); + } + private void assertLineage(String baseQuery, Set inputTables, OutputColumnMetadata... outputColumnMetadata) throws Exception { diff --git a/testing/trino-tests/src/test/java/io/trino/execution/resourcegroups/db/H2ResourceGroupConfigurationManagerFactory.java b/testing/trino-tests/src/test/java/io/trino/execution/resourcegroups/db/H2ResourceGroupConfigurationManagerFactory.java index c53fc1141ac7..f6360eefa407 100644 --- a/testing/trino-tests/src/test/java/io/trino/execution/resourcegroups/db/H2ResourceGroupConfigurationManagerFactory.java +++ b/testing/trino-tests/src/test/java/io/trino/execution/resourcegroups/db/H2ResourceGroupConfigurationManagerFactory.java @@ -50,6 +50,7 @@ public ResourceGroupConfigurationManager create(Map config, R { try (ThreadContextClassLoader _ = new ThreadContextClassLoader(classLoader)) { Bootstrap app = new Bootstrap( + "io.trino.bootstrap.resource-group." + getName(), new JsonModule(), new H2ResourceGroupsModule(), new NodeModule(), diff --git a/testing/trino-tests/src/test/java/io/trino/server/protocol/TestJsonSpooledPartitionedLayoutQueries.java b/testing/trino-tests/src/test/java/io/trino/server/protocol/TestJsonSpooledPartitionedLayoutQueries.java new file mode 100644 index 000000000000..bf629fdcee69 --- /dev/null +++ b/testing/trino-tests/src/test/java/io/trino/server/protocol/TestJsonSpooledPartitionedLayoutQueries.java @@ -0,0 +1,40 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.server.protocol; + +import com.google.common.collect.ImmutableMap; + +import java.util.Map; + +public class TestJsonSpooledPartitionedLayoutQueries + extends AbstractSpooledQueryDataDistributedQueries +{ + @Override + protected String encoding() + { + return "json"; + } + + @Override + protected Map spoolingFileSystemConfig() + { + return ImmutableMap.of("fs.layout", "partitioned"); + } + + @Override + protected Map spoolingConfig() + { + return ImmutableMap.of("protocol.spooling.inlining.enabled", "false"); + } +} diff --git a/testing/trino-tests/src/test/java/io/trino/tests/TestWorkerRestart.java b/testing/trino-tests/src/test/java/io/trino/tests/TestWorkerRestart.java index 6aa5ca146e1d..dc91f69c1960 100644 --- a/testing/trino-tests/src/test/java/io/trino/tests/TestWorkerRestart.java +++ b/testing/trino-tests/src/test/java/io/trino/tests/TestWorkerRestart.java @@ -101,7 +101,8 @@ public void testRestartDuringQuery() .cause().hasMessageFindingMatch("^Expected response code from \\S+ to be 200, but was 500" + "|Expected response from \\S+ is empty" + "|Error fetching \\S+: Expected response code to be 200, but was 500" + - "|Could not communicate with the remote task. The node may have crashed or be under too much load"); + "|Could not communicate with the remote task. The node may have crashed or be under too much load" + + "|Error fetching \\S+: Content-Type header is not set"); // Ensure that the restarted worker is able to serve queries. assertThat((long) queryRunner.execute("SELECT count(*) FROM tpch.tiny.lineitem").getOnlyValue())