diff --git a/.github/ISSUE_TEMPLATE/bug.yml b/.github/ISSUE_TEMPLATE/bug.yml new file mode 100644 index 000000000000..6d7964de2882 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug.yml @@ -0,0 +1,165 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +name: Bug Report +description: File a bug report +title: "[Bug]: " +labels: ["bug", "awaiting triage"] +assignees: + - octocat +body: + - type: markdown + attributes: + value: | + Thanks for taking the time to fill out this bug report! + - type: textarea + id: what-happened + attributes: + label: What happened? + description: Please describe your issue, including the version of Beam and any other languages or tools being used. + validations: + required: true + - type: dropdown + id: priority + attributes: + label: Issue Priority + description: What priority is this bug? + options: + - "Priority: 0" + - "Priority: 1" + - "Priority: 2" + - "Priority: 3" + validations: + required: true + - type: dropdown + id: component + attributes: + label: Issue Component + options: + - "Component: beam-community" + - "Component: beam-model" + - "Component: beam-playground" + - "Component: benchmarking-py" + - "Component: build-system" + - "Component: community-metrics" + - "Component: cross-language" + - "Component: dependencies" + - "Component: dsl-dataframe" + - "Component: dsl-euphoria" + - "Component: dsl-sql" + - "Component: dsl-sql-zetasql" + - "Component: examples-java" + - "Component: examples-python" + - "Component: extensions-ideas" + - "Component: extensions-java-gcp" + - "Component: extensions-java-join-library" + - "Component: extensions-java-json" + - "Component: extensions-java-kyro" + - "Component: extensions-java-protobuf" + - "Component: extensions-java-sketching" + - "Component: extensions-java-sorter" + - "Component: gcp-quota" + - "Component: infrastructure" + - "Component: io-common" + - "Component: io-go-gcp" + - "Component: io-ideas" + - "Component: io-java-amqp" + - "Component: io-java-avro" + - "Component: io-java-aws" + - "Component: io-java-azure" + - "Component: io-java-cassandra" + - "Component: io-java-cdap" + - "Component: io-java-clickhouse" + - "Component: io-java-debezium" + - "Component: io-java-elasticsearch" + - "Component: io-java-files" + - "Component: io-java-gcp" + - "Component: io-java-hadoop-file-system" + - "Component: io-java-hadoop-format" + - "Component: io-java-hbase" + - "Component: io-java-hcatalog" + - "Component: io-java-healthcare" + - "Component: io-java-influxdb" + - "Component: io-java-jdbc" + - "Component: io-java-jms" + - "Component: io-java-kafka" + - "Component: io-java-kinesis" + - "Component: io-java-kudu" + - "Component: io-java-mongodb" + - "Component: io-java-mqtt" + - "Component: io-java-parquet" + - "Component: io-java-pulsar" + - "Component: io-java-rabbitmq" + - "Component: io-java-redis" + - "Component: io-java-snowflake" + - "Component: io-java-solr" + - "Component: io-java-splunk" + - "Component: io-java-text" + - "Component: io-java-tfrecord" + - "Component: io-java-tika" + - "Component: io-java-utilities" + - "Component: io-java-xml" + - "Component: io-py-avro" + - "Component: io-py-aws" + - "Component: io-py-common" + - "Component: io-py-files" + - "Component: io-py-gcp" + - "Component: io-py-hadoop" + - "Component: io-py-ideas" + - "Component: io-py-kafka" + - "Component: io-py-mongodb" + - "Component: io-py-parquet" + - "Component: io-py-tfrecord" + - "Component: io-py-vcf" + - "Component: java-fn-execution" + - "Component: jobserver" + - "Component: katas" + - "Component: project-management" + - "Component: release" + - "Component: runner-apex" + - "Component: runner-core" + - "Component: runner-dataflow" + - "Component: runner-direct" + - "Component: runner-extensions-metrics" + - "Component: runner-flink" + - "Component: runner-gearpump" + - "Component: runner-ideas" + - "Component: runner-jet" + - "Component: runner-jstorm" + - "Component: runner-mapreduce" + - "Component: runner-py-direct" + - "Component: runner-py-interactive" + - "Component: runner-ray" + - "Component: runner-samza" + - "Component: runner-spark" + - "Component: runner-tez" + - "Component: runner-twister2" + - "Component: runner-universal" + - "Component: sdk-go" + - "Component: sdk-ideas" + - "Component: sdk-java-core" + - "Component: sdk-java-harness" + - "Component: sdk-py-core" + - "Component: sdk-py-harness" + - "Component: sdk-typescript" + - "Component: test-failures" + - "Component: testing" + - "Component: testing-nexmark" + - "Component: testing-tpcds" + - "Component: website" + validations: + required: true diff --git a/.github/ISSUE_TEMPLATE/feature.yml b/.github/ISSUE_TEMPLATE/feature.yml new file mode 100644 index 000000000000..c3358138c378 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature.yml @@ -0,0 +1,165 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +name: Feature Request +description: File a feature request +title: "[Feature Request]: " +labels: ["new feature", "awaiting triage"] +assignees: + - octocat +body: + - type: markdown + attributes: + value: | + Thanks for taking the time to fill out this feature request! + - type: textarea + id: what-happened + attributes: + label: What would you like to happen? + description: Please describe your issue + validations: + required: true + - type: dropdown + id: priority + attributes: + label: Issue Priority + description: What priority is this bug? + options: + - "Priority: 0" + - "Priority: 1" + - "Priority: 2" + - "Priority: 3" + validations: + required: true + - type: dropdown + id: component + attributes: + label: Issue Component + options: + - "Component: beam-community" + - "Component: beam-model" + - "Component: beam-playground" + - "Component: benchmarking-py" + - "Component: build-system" + - "Component: community-metrics" + - "Component: cross-language" + - "Component: dependencies" + - "Component: dsl-dataframe" + - "Component: dsl-euphoria" + - "Component: dsl-sql" + - "Component: dsl-sql-zetasql" + - "Component: examples-java" + - "Component: examples-python" + - "Component: extensions-ideas" + - "Component: extensions-java-gcp" + - "Component: extensions-java-join-library" + - "Component: extensions-java-json" + - "Component: extensions-java-kyro" + - "Component: extensions-java-protobuf" + - "Component: extensions-java-sketching" + - "Component: extensions-java-sorter" + - "Component: gcp-quota" + - "Component: infrastructure" + - "Component: io-common" + - "Component: io-go-gcp" + - "Component: io-ideas" + - "Component: io-java-amqp" + - "Component: io-java-avro" + - "Component: io-java-aws" + - "Component: io-java-azure" + - "Component: io-java-cassandra" + - "Component: io-java-cdap" + - "Component: io-java-clickhouse" + - "Component: io-java-debezium" + - "Component: io-java-elasticsearch" + - "Component: io-java-files" + - "Component: io-java-gcp" + - "Component: io-java-hadoop-file-system" + - "Component: io-java-hadoop-format" + - "Component: io-java-hbase" + - "Component: io-java-hcatalog" + - "Component: io-java-healthcare" + - "Component: io-java-influxdb" + - "Component: io-java-jdbc" + - "Component: io-java-jms" + - "Component: io-java-kafka" + - "Component: io-java-kinesis" + - "Component: io-java-kudu" + - "Component: io-java-mongodb" + - "Component: io-java-mqtt" + - "Component: io-java-parquet" + - "Component: io-java-pulsar" + - "Component: io-java-rabbitmq" + - "Component: io-java-redis" + - "Component: io-java-snowflake" + - "Component: io-java-solr" + - "Component: io-java-splunk" + - "Component: io-java-text" + - "Component: io-java-tfrecord" + - "Component: io-java-tika" + - "Component: io-java-utilities" + - "Component: io-java-xml" + - "Component: io-py-avro" + - "Component: io-py-aws" + - "Component: io-py-common" + - "Component: io-py-files" + - "Component: io-py-gcp" + - "Component: io-py-hadoop" + - "Component: io-py-ideas" + - "Component: io-py-kafka" + - "Component: io-py-mongodb" + - "Component: io-py-parquet" + - "Component: io-py-tfrecord" + - "Component: io-py-vcf" + - "Component: java-fn-execution" + - "Component: jobserver" + - "Component: katas" + - "Component: project-management" + - "Component: release" + - "Component: runner-apex" + - "Component: runner-core" + - "Component: runner-dataflow" + - "Component: runner-direct" + - "Component: runner-extensions-metrics" + - "Component: runner-flink" + - "Component: runner-gearpump" + - "Component: runner-ideas" + - "Component: runner-jet" + - "Component: runner-jstorm" + - "Component: runner-mapreduce" + - "Component: runner-py-direct" + - "Component: runner-py-interactive" + - "Component: runner-ray" + - "Component: runner-samza" + - "Component: runner-spark" + - "Component: runner-tez" + - "Component: runner-twister2" + - "Component: runner-universal" + - "Component: sdk-go" + - "Component: sdk-ideas" + - "Component: sdk-java-core" + - "Component: sdk-java-harness" + - "Component: sdk-py-core" + - "Component: sdk-py-harness" + - "Component: sdk-typescript" + - "Component: test-failures" + - "Component: testing" + - "Component: testing-nexmark" + - "Component: testing-tpcds" + - "Component: website" + validations: + required: true \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/task.yml b/.github/ISSUE_TEMPLATE/task.yml new file mode 100644 index 000000000000..04518db8f2e4 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/task.yml @@ -0,0 +1,165 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +name: Task +description: File a task +title: "[Task]: " +labels: ["task", "awaiting triage"] +assignees: + - octocat +body: + - type: markdown + attributes: + value: | + Thanks for taking the time to fill out this task! + - type: textarea + id: what-happened + attributes: + label: What needs to happen? + description: Please describe your issue + validations: + required: true + - type: dropdown + id: priority + attributes: + label: Issue Priority + description: What priority is this bug? + options: + - "Priority: 0" + - "Priority: 1" + - "Priority: 2" + - "Priority: 3" + validations: + required: true + - type: dropdown + id: component + attributes: + label: Issue Component + options: + - "Component: beam-community" + - "Component: beam-model" + - "Component: beam-playground" + - "Component: benchmarking-py" + - "Component: build-system" + - "Component: community-metrics" + - "Component: cross-language" + - "Component: dependencies" + - "Component: dsl-dataframe" + - "Component: dsl-euphoria" + - "Component: dsl-sql" + - "Component: dsl-sql-zetasql" + - "Component: examples-java" + - "Component: examples-python" + - "Component: extensions-ideas" + - "Component: extensions-java-gcp" + - "Component: extensions-java-join-library" + - "Component: extensions-java-json" + - "Component: extensions-java-kyro" + - "Component: extensions-java-protobuf" + - "Component: extensions-java-sketching" + - "Component: extensions-java-sorter" + - "Component: gcp-quota" + - "Component: infrastructure" + - "Component: io-common" + - "Component: io-go-gcp" + - "Component: io-ideas" + - "Component: io-java-amqp" + - "Component: io-java-avro" + - "Component: io-java-aws" + - "Component: io-java-azure" + - "Component: io-java-cassandra" + - "Component: io-java-cdap" + - "Component: io-java-clickhouse" + - "Component: io-java-debezium" + - "Component: io-java-elasticsearch" + - "Component: io-java-files" + - "Component: io-java-gcp" + - "Component: io-java-hadoop-file-system" + - "Component: io-java-hadoop-format" + - "Component: io-java-hbase" + - "Component: io-java-hcatalog" + - "Component: io-java-healthcare" + - "Component: io-java-influxdb" + - "Component: io-java-jdbc" + - "Component: io-java-jms" + - "Component: io-java-kafka" + - "Component: io-java-kinesis" + - "Component: io-java-kudu" + - "Component: io-java-mongodb" + - "Component: io-java-mqtt" + - "Component: io-java-parquet" + - "Component: io-java-pulsar" + - "Component: io-java-rabbitmq" + - "Component: io-java-redis" + - "Component: io-java-snowflake" + - "Component: io-java-solr" + - "Component: io-java-splunk" + - "Component: io-java-text" + - "Component: io-java-tfrecord" + - "Component: io-java-tika" + - "Component: io-java-utilities" + - "Component: io-java-xml" + - "Component: io-py-avro" + - "Component: io-py-aws" + - "Component: io-py-common" + - "Component: io-py-files" + - "Component: io-py-gcp" + - "Component: io-py-hadoop" + - "Component: io-py-ideas" + - "Component: io-py-kafka" + - "Component: io-py-mongodb" + - "Component: io-py-parquet" + - "Component: io-py-tfrecord" + - "Component: io-py-vcf" + - "Component: java-fn-execution" + - "Component: jobserver" + - "Component: katas" + - "Component: project-management" + - "Component: release" + - "Component: runner-apex" + - "Component: runner-core" + - "Component: runner-dataflow" + - "Component: runner-direct" + - "Component: runner-extensions-metrics" + - "Component: runner-flink" + - "Component: runner-gearpump" + - "Component: runner-ideas" + - "Component: runner-jet" + - "Component: runner-jstorm" + - "Component: runner-mapreduce" + - "Component: runner-py-direct" + - "Component: runner-py-interactive" + - "Component: runner-ray" + - "Component: runner-samza" + - "Component: runner-spark" + - "Component: runner-tez" + - "Component: runner-twister2" + - "Component: runner-universal" + - "Component: sdk-go" + - "Component: sdk-ideas" + - "Component: sdk-java-core" + - "Component: sdk-java-harness" + - "Component: sdk-py-core" + - "Component: sdk-py-harness" + - "Component: sdk-typescript" + - "Component: test-failures" + - "Component: testing" + - "Component: testing-nexmark" + - "Component: testing-tpcds" + - "Component: website" + validations: + required: true \ No newline at end of file diff --git a/.github/autolabeler.yml b/.github/autolabeler.yml index 29277521928a..afaaaa2c9fc4 100644 --- a/.github/autolabeler.yml +++ b/.github/autolabeler.yml @@ -21,7 +21,7 @@ build: ["assembly.xml", "build.gradle.kts", "buildSrc/**/*", ".gitattributes", ".github/**/*", ".gitignore", "gradle/**/*", ".mailmap", "ownership/**/*", "release/**/*", "sdks/java/build-tools/**/*", "settings.gradle.kts"] docker: ["runners/flink/job-server-container/**/*", "runners/spark/job-server/container/**/*", "sdks/go/container/**/*", "sdks/java/container/**/*", "sdks/python/container/**/*"] examples: ["examples/**/*", "sdks/go/examples/**/*", "sdks/python/apache_beam/examples/**/*"] -go: ["sdks/go/**/*"] +go: ["sdks/go/**/*", "sdks/go.mod", "sdks/go.sum"] infra: [".test-infra/**/*"] java: ["examples/java/**/*", "sdks/java/**/*"] kotlin: ["examples/kotlin/**/*"] diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 000000000000..0641e6f267bb --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,29 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +version: 2 +updates: + - package-ecosystem: "gomod" + directory: "/sdks" # Location of package manifests + schedule: + interval: "daily" + - package-ecosystem: "pip" + directory: "/sdks/python" # Location of package manifests + schedule: + interval: "daily" + - package-ecosystem: "gradle" + directory: "/" # Location of package manifests + schedule: + interval: "daily" diff --git a/.github/workflows/go_tests.yml b/.github/workflows/go_tests.yml index 8829faecde0a..12970dab4e9e 100644 --- a/.github/workflows/go_tests.yml +++ b/.github/workflows/go_tests.yml @@ -28,7 +28,7 @@ on: pull_request: branches: ['master', 'release-*'] tags: 'v*' - paths: ['sdks/go/pkg/**'] + paths: ['sdks/go/pkg/**', 'sdks/go.mod', 'sdks/go.sum'] jobs: build: diff --git a/.test-infra/jenkins/job_LoadTests_CoGBK_Dataflow_V2_Java11.groovy b/.test-infra/jenkins/job_LoadTests_CoGBK_Dataflow_V2_Java11.groovy index f96bd35b8253..bd4b97c07e59 100644 --- a/.test-infra/jenkins/job_LoadTests_CoGBK_Dataflow_V2_Java11.groovy +++ b/.test-infra/jenkins/job_LoadTests_CoGBK_Dataflow_V2_Java11.groovy @@ -23,8 +23,6 @@ import PhraseTriggeringPostCommitBuilder import CronJobBuilder import InfluxDBCredentialsHelper -// TODO(BEAM-14229): remove forceNumInitialBundles once source issue resolved. - def loadTestConfigurations = { mode, isStreaming -> [ [ @@ -49,7 +47,6 @@ def loadTestConfigurations = { mode, isStreaming -> "numRecords": 20000000, "keySizeBytes": 10, "valueSizeBytes": 90, - "forceNumInitialBundles": 1, "numHotKeys": 1 } """.trim().replaceAll("\\s", ""), @@ -58,7 +55,6 @@ def loadTestConfigurations = { mode, isStreaming -> "numRecords": 2000000, "keySizeBytes": 10, "valueSizeBytes": 90, - "forceNumInitialBundles": 1, "numHotKeys": 1000 } """.trim().replaceAll("\\s", ""), @@ -90,7 +86,6 @@ def loadTestConfigurations = { mode, isStreaming -> "numRecords": 20000000, "keySizeBytes": 10, "valueSizeBytes": 90, - "forceNumInitialBundles": 1, "numHotKeys": 5 } """.trim().replaceAll("\\s", ""), @@ -99,7 +94,6 @@ def loadTestConfigurations = { mode, isStreaming -> "numRecords": 2000000, "keySizeBytes": 10, "valueSizeBytes": 90, - "forceNumInitialBundles": 1, "numHotKeys": 1000 } """.trim().replaceAll("\\s", ""), @@ -132,7 +126,6 @@ def loadTestConfigurations = { mode, isStreaming -> "numRecords": 2000000, "keySizeBytes": 10, "valueSizeBytes": 90, - "forceNumInitialBundles": 1, "numHotKeys": 200000 } """.trim().replaceAll("\\s", ""), @@ -141,7 +134,6 @@ def loadTestConfigurations = { mode, isStreaming -> "numRecords": 2000000, "keySizeBytes": 10, "valueSizeBytes": 90, - "forceNumInitialBundles": 1, "numHotKeys": 1000 } """.trim().replaceAll("\\s", ""), @@ -174,7 +166,6 @@ def loadTestConfigurations = { mode, isStreaming -> "numRecords": 2000000, "keySizeBytes": 10, "valueSizeBytes": 90, - "forceNumInitialBundles": 1, "numHotKeys": 1000 } """.trim().replaceAll("\\s", ""), @@ -183,7 +174,6 @@ def loadTestConfigurations = { mode, isStreaming -> "numRecords": 2000000, "keySizeBytes": 10, "valueSizeBytes": 90, - "forceNumInitialBundles": 1, "numHotKeys": 1000 } """.trim().replaceAll("\\s", ""), diff --git a/.test-infra/jenkins/job_LoadTests_CoGBK_Dataflow_V2_Java17.groovy b/.test-infra/jenkins/job_LoadTests_CoGBK_Dataflow_V2_Java17.groovy index 72646fab8bee..0978753f40e6 100644 --- a/.test-infra/jenkins/job_LoadTests_CoGBK_Dataflow_V2_Java17.groovy +++ b/.test-infra/jenkins/job_LoadTests_CoGBK_Dataflow_V2_Java17.groovy @@ -23,8 +23,6 @@ import PhraseTriggeringPostCommitBuilder import CronJobBuilder import InfluxDBCredentialsHelper -// TODO(BEAM-14229): remove forceNumInitialBundles once source issue resolved. - def loadTestConfigurations = { mode, isStreaming -> [ [ @@ -49,7 +47,6 @@ def loadTestConfigurations = { mode, isStreaming -> "numRecords": 20000000, "keySizeBytes": 10, "valueSizeBytes": 90, - "forceNumInitialBundles": 1, "numHotKeys": 1 } """.trim().replaceAll("\\s", ""), @@ -58,7 +55,6 @@ def loadTestConfigurations = { mode, isStreaming -> "numRecords": 2000000, "keySizeBytes": 10, "valueSizeBytes": 90, - "forceNumInitialBundles": 1, "numHotKeys": 1000 } """.trim().replaceAll("\\s", ""), @@ -90,7 +86,6 @@ def loadTestConfigurations = { mode, isStreaming -> "numRecords": 20000000, "keySizeBytes": 10, "valueSizeBytes": 90, - "forceNumInitialBundles": 1, "numHotKeys": 5 } """.trim().replaceAll("\\s", ""), @@ -99,7 +94,6 @@ def loadTestConfigurations = { mode, isStreaming -> "numRecords": 2000000, "keySizeBytes": 10, "valueSizeBytes": 90, - "forceNumInitialBundles": 1, "numHotKeys": 1000 } """.trim().replaceAll("\\s", ""), @@ -132,7 +126,6 @@ def loadTestConfigurations = { mode, isStreaming -> "numRecords": 2000000, "keySizeBytes": 10, "valueSizeBytes": 90, - "forceNumInitialBundles": 1, "numHotKeys": 200000 } """.trim().replaceAll("\\s", ""), @@ -141,7 +134,6 @@ def loadTestConfigurations = { mode, isStreaming -> "numRecords": 2000000, "keySizeBytes": 10, "valueSizeBytes": 90, - "forceNumInitialBundles": 1, "numHotKeys": 1000 } """.trim().replaceAll("\\s", ""), @@ -174,7 +166,6 @@ def loadTestConfigurations = { mode, isStreaming -> "numRecords": 2000000, "keySizeBytes": 10, "valueSizeBytes": 90, - "forceNumInitialBundles": 1, "numHotKeys": 1000 } """.trim().replaceAll("\\s", ""), @@ -183,7 +174,6 @@ def loadTestConfigurations = { mode, isStreaming -> "numRecords": 2000000, "keySizeBytes": 10, "valueSizeBytes": 90, - "forceNumInitialBundles": 1, "numHotKeys": 1000 } """.trim().replaceAll("\\s", ""), diff --git a/.test-infra/jenkins/job_LoadTests_GBK_Dataflow_V2_Java11.groovy b/.test-infra/jenkins/job_LoadTests_GBK_Dataflow_V2_Java11.groovy index 246ed4f0eaf7..30c9dda37634 100644 --- a/.test-infra/jenkins/job_LoadTests_GBK_Dataflow_V2_Java11.groovy +++ b/.test-infra/jenkins/job_LoadTests_GBK_Dataflow_V2_Java11.groovy @@ -23,8 +23,6 @@ import PhraseTriggeringPostCommitBuilder import CronJobBuilder import InfluxDBCredentialsHelper -// TODO(BEAM-14229): remove forceNumInitialBundles once source issue resolved. - def loadTestConfigurations = { mode, isStreaming -> [ [ @@ -48,8 +46,7 @@ def loadTestConfigurations = { mode, isStreaming -> { "numRecords": 200000000, "keySizeBytes": 1, - "valueSizeBytes": 9, - "forceNumInitialBundles": 1 + "valueSizeBytes": 9 } """.trim().replaceAll("\\s", ""), fanout : 1, @@ -80,8 +77,7 @@ def loadTestConfigurations = { mode, isStreaming -> { "numRecords": 20000000, "keySizeBytes": 10, - "valueSizeBytes": 90, - "forceNumInitialBundles": 1 + "valueSizeBytes": 90 } """.trim().replaceAll("\\s", ""), fanout : 1, @@ -113,8 +109,7 @@ def loadTestConfigurations = { mode, isStreaming -> { "numRecords": 20000, "keySizeBytes": 10000, - "valueSizeBytes": 90000, - "forceNumInitialBundles": 1 + "valueSizeBytes": 90000 } """.trim().replaceAll("\\s", ""), fanout : 1, @@ -146,8 +141,7 @@ def loadTestConfigurations = { mode, isStreaming -> { "numRecords": 5000000, "keySizeBytes": 10, - "valueSizeBytes": 90, - "forceNumInitialBundles": 1 + "valueSizeBytes": 90 } """.trim().replaceAll("\\s", ""), fanout : 4, @@ -178,8 +172,7 @@ def loadTestConfigurations = { mode, isStreaming -> { "numRecords": 2500000, "keySizeBytes": 10, - "valueSizeBytes": 90, - "forceNumInitialBundles": 1 + "valueSizeBytes": 90 } """.trim().replaceAll("\\s", ""), fanout : 8, @@ -211,7 +204,6 @@ def loadTestConfigurations = { mode, isStreaming -> "numRecords": 20000000, "keySizeBytes": 10, "valueSizeBytes": 90, - "forceNumInitialBundles": 1, "numHotKeys": 200, "hotKeyFraction": 1 } @@ -245,7 +237,6 @@ def loadTestConfigurations = { mode, isStreaming -> "numRecords": 20000000, "keySizeBytes": 10, "valueSizeBytes": 90, - "forceNumInitialBundles": 1, "numHotKeys": 10, "hotKeyFraction": 1 } diff --git a/.test-infra/jenkins/job_LoadTests_GBK_Dataflow_V2_Java17.groovy b/.test-infra/jenkins/job_LoadTests_GBK_Dataflow_V2_Java17.groovy index f015d828870c..ca897e7eec10 100644 --- a/.test-infra/jenkins/job_LoadTests_GBK_Dataflow_V2_Java17.groovy +++ b/.test-infra/jenkins/job_LoadTests_GBK_Dataflow_V2_Java17.groovy @@ -23,8 +23,6 @@ import PhraseTriggeringPostCommitBuilder import CronJobBuilder import InfluxDBCredentialsHelper -// TODO(BEAM-14229): remove forceNumInitialBundles once source issue resolved. - def loadTestConfigurations = { mode, isStreaming -> [ [ @@ -48,8 +46,7 @@ def loadTestConfigurations = { mode, isStreaming -> { "numRecords": 200000000, "keySizeBytes": 1, - "valueSizeBytes": 9, - "forceNumInitialBundles": 1 + "valueSizeBytes": 9 } """.trim().replaceAll("\\s", ""), fanout : 1, @@ -80,8 +77,7 @@ def loadTestConfigurations = { mode, isStreaming -> { "numRecords": 20000000, "keySizeBytes": 10, - "valueSizeBytes": 90, - "forceNumInitialBundles": 1 + "valueSizeBytes": 90 } """.trim().replaceAll("\\s", ""), fanout : 1, @@ -113,8 +109,7 @@ def loadTestConfigurations = { mode, isStreaming -> { "numRecords": 20000, "keySizeBytes": 10000, - "valueSizeBytes": 90000, - "forceNumInitialBundles": 1 + "valueSizeBytes": 90000 } """.trim().replaceAll("\\s", ""), fanout : 1, @@ -146,8 +141,7 @@ def loadTestConfigurations = { mode, isStreaming -> { "numRecords": 5000000, "keySizeBytes": 10, - "valueSizeBytes": 90, - "forceNumInitialBundles": 1 + "valueSizeBytes": 90 } """.trim().replaceAll("\\s", ""), fanout : 4, @@ -178,8 +172,7 @@ def loadTestConfigurations = { mode, isStreaming -> { "numRecords": 2500000, "keySizeBytes": 10, - "valueSizeBytes": 90, - "forceNumInitialBundles": 1 + "valueSizeBytes": 90 } """.trim().replaceAll("\\s", ""), fanout : 8, @@ -211,7 +204,6 @@ def loadTestConfigurations = { mode, isStreaming -> "numRecords": 20000000, "keySizeBytes": 10, "valueSizeBytes": 90, - "forceNumInitialBundles": 1, "numHotKeys": 200, "hotKeyFraction": 1 } @@ -245,7 +237,6 @@ def loadTestConfigurations = { mode, isStreaming -> "numRecords": 20000000, "keySizeBytes": 10, "valueSizeBytes": 90, - "forceNumInitialBundles": 1, "numHotKeys": 10, "hotKeyFraction": 1 } diff --git a/.test-infra/jenkins/job_LoadTests_ParDo_Dataflow_V2_Java11.groovy b/.test-infra/jenkins/job_LoadTests_ParDo_Dataflow_V2_Java11.groovy index 86f5e6fe732b..0a1cf8cfde53 100644 --- a/.test-infra/jenkins/job_LoadTests_ParDo_Dataflow_V2_Java11.groovy +++ b/.test-infra/jenkins/job_LoadTests_ParDo_Dataflow_V2_Java11.groovy @@ -23,8 +23,6 @@ import PhraseTriggeringPostCommitBuilder import CronJobBuilder import InfluxDBCredentialsHelper -// TODO(BEAM-14229): remove forceNumInitialBundles once source issue resolved. - def commonLoadTestConfig = { jobType, isStreaming -> [ [ @@ -48,8 +46,7 @@ def commonLoadTestConfig = { jobType, isStreaming -> { "numRecords": 20000000, "keySizeBytes": 10, - "valueSizeBytes": 90, - "forceNumInitialBundles": 1 + "valueSizeBytes": 90 } """.trim().replaceAll("\\s", ""), iterations : 10, @@ -81,8 +78,7 @@ def commonLoadTestConfig = { jobType, isStreaming -> { "numRecords": 20000000, "keySizeBytes": 10, - "valueSizeBytes": 90, - "forceNumInitialBundles": 1 + "valueSizeBytes": 90 } """.trim().replaceAll("\\s", ""), iterations : 200, @@ -115,8 +111,7 @@ def commonLoadTestConfig = { jobType, isStreaming -> { "numRecords": 20000000, "keySizeBytes": 10, - "valueSizeBytes": 90, - "forceNumInitialBundles": 1 + "valueSizeBytes": 90 } """.trim().replaceAll("\\s", ""), iterations : 1, @@ -149,8 +144,7 @@ def commonLoadTestConfig = { jobType, isStreaming -> { "numRecords": 20000000, "keySizeBytes": 10, - "valueSizeBytes": 90, - "forceNumInitialBundles": 1 + "valueSizeBytes": 90 } """.trim().replaceAll("\\s", ""), iterations : 1, diff --git a/.test-infra/jenkins/job_LoadTests_ParDo_Dataflow_V2_Java17.groovy b/.test-infra/jenkins/job_LoadTests_ParDo_Dataflow_V2_Java17.groovy index ac40f39e62cb..f09787225b96 100644 --- a/.test-infra/jenkins/job_LoadTests_ParDo_Dataflow_V2_Java17.groovy +++ b/.test-infra/jenkins/job_LoadTests_ParDo_Dataflow_V2_Java17.groovy @@ -23,8 +23,6 @@ import PhraseTriggeringPostCommitBuilder import CronJobBuilder import InfluxDBCredentialsHelper -// TODO(BEAM-14229): remove forceNumInitialBundles once source issue resolved. - def commonLoadTestConfig = { jobType, isStreaming -> [ [ @@ -48,8 +46,7 @@ def commonLoadTestConfig = { jobType, isStreaming -> { "numRecords": 20000000, "keySizeBytes": 10, - "valueSizeBytes": 90, - "forceNumInitialBundles": 1 + "valueSizeBytes": 90 } """.trim().replaceAll("\\s", ""), iterations : 10, @@ -81,8 +78,7 @@ def commonLoadTestConfig = { jobType, isStreaming -> { "numRecords": 20000000, "keySizeBytes": 10, - "valueSizeBytes": 90, - "forceNumInitialBundles": 1 + "valueSizeBytes": 90 } """.trim().replaceAll("\\s", ""), iterations : 200, @@ -115,8 +111,7 @@ def commonLoadTestConfig = { jobType, isStreaming -> { "numRecords": 20000000, "keySizeBytes": 10, - "valueSizeBytes": 90, - "forceNumInitialBundles": 1 + "valueSizeBytes": 90 } """.trim().replaceAll("\\s", ""), iterations : 1, @@ -149,8 +144,7 @@ def commonLoadTestConfig = { jobType, isStreaming -> { "numRecords": 20000000, "keySizeBytes": 10, - "valueSizeBytes": 90, - "forceNumInitialBundles": 1 + "valueSizeBytes": 90 } """.trim().replaceAll("\\s", ""), iterations : 1, diff --git a/.test-infra/jenkins/job_LoadTests_SideInput_Go.groovy b/.test-infra/jenkins/job_LoadTests_SideInput_Go.groovy index 225bbc799989..55809ac7a103 100644 --- a/.test-infra/jenkins/job_LoadTests_SideInput_Go.groovy +++ b/.test-infra/jenkins/job_LoadTests_SideInput_Go.groovy @@ -29,7 +29,7 @@ String now = new Date().format('MMddHHmmss', TimeZone.getTimeZone('UTC')) def batchScenarios = { [ [ - title : 'SideInput Go Load test: 400mb-1kb-10workers-1window-first-iterable', + title : 'SideInput Go Load test: 10gb-1kb-10workers-1window-first-iterable', test : 'sideinput', runner : CommonTestProperties.Runner.DATAFLOW, pipelineOptions: [ @@ -41,7 +41,7 @@ def batchScenarios = { influx_namespace : 'dataflow', influx_measurement : 'go_batch_sideinput_3', input_options : '\'{' + - '"num_records": 400000,' + + '"num_records": 10000000,' + '"key_size": 100,' + '"value_size": 900}\'', access_percentage: 1, @@ -52,7 +52,7 @@ def batchScenarios = { ] ], [ - title : 'SideInput Go Load test: 400mb-1kb-10workers-1window-iterable', + title : 'SideInput Go Load test: 10gb-1kb-10workers-1window-iterable', test : 'sideinput', runner : CommonTestProperties.Runner.DATAFLOW, pipelineOptions: [ @@ -64,7 +64,7 @@ def batchScenarios = { influx_namespace : 'dataflow', influx_measurement : 'go_batch_sideinput_4', input_options : '\'{' + - '"num_records": 400000,' + + '"num_records": 10000000,' + '"key_size": 100,' + '"value_size": 900}\'', num_workers : 10, diff --git a/.test-infra/jenkins/job_PreCommit_Go.groovy b/.test-infra/jenkins/job_PreCommit_Go.groovy index db9db2744235..73b6cf4c9384 100644 --- a/.test-infra/jenkins/job_PreCommit_Go.groovy +++ b/.test-infra/jenkins/job_PreCommit_Go.groovy @@ -24,7 +24,7 @@ PrecommitJobBuilder builder = new PrecommitJobBuilder( gradleTask: ':goPreCommit', triggerPathPatterns: [ '^model/.*$', - '^sdks/go/.*$', + '^sdks/go.*$', '^release/.*$', ] ) diff --git a/.test-infra/jenkins/job_PreCommit_Go_Portable.groovy b/.test-infra/jenkins/job_PreCommit_Go_Portable.groovy index 3e0e198d2a8d..12c762e5eb37 100644 --- a/.test-infra/jenkins/job_PreCommit_Go_Portable.groovy +++ b/.test-infra/jenkins/job_PreCommit_Go_Portable.groovy @@ -24,7 +24,7 @@ PrecommitJobBuilder builder = new PrecommitJobBuilder( gradleTask: ':goPortablePreCommit', triggerPathPatterns: [ '^model/.*$', - '^sdks/go/.*$', + '^sdks/go.*$', '^release/.*$', ] ) diff --git a/.test-infra/metrics/grafana/dashboards/perftests_metrics/SideInput_Load_Tests.json b/.test-infra/metrics/grafana/dashboards/perftests_metrics/SideInput_Load_Tests.json index 62616951c808..cee6bb705733 100644 --- a/.test-infra/metrics/grafana/dashboards/perftests_metrics/SideInput_Load_Tests.json +++ b/.test-infra/metrics/grafana/dashboards/perftests_metrics/SideInput_Load_Tests.json @@ -21,7 +21,7 @@ "links": [], "panels": [ { - "content": "The following options should be used by default:\n* key size: 100B\n* value size: 900B\n* number of workers: 10\n* size of the window (if fixed windows are used): 1 second\n\n[Jenkins job definition (Python, Dataflow)](https://github.com/apache/beam/blob/master/.test-infra/jenkins/job_LoadTests_SideInput_Python.groovy) [Jenkins job definition (Go, Flink)](https://github.com/apache/beam/tree/master/.test-infra/jenkins/job_LoadTests_SideInput_Flink_Go.groovy) [Jenkins job definition (Go, Dataflow)](https://github.com/apache/beam/blob/master/.test-infra/jenkins/job_LoadTests_SideInput_Go.groovy)\n\nUntil the issue [BEAM-11427](https://issues.apache.org/jira/browse/BEAM-11427) in Go SDK is resolved, sideinput iteration test have 400MB, instead of 10GB.", + "content": "Jenkins job definition (Python, Dataflow)](https://github.com/apache/beam/blob/master/.test-infra/jenkins/job_LoadTests_SideInput_Python.groovy) [Jenkins job definition (Go, Flink)](https://github.com/apache/beam/tree/master/.test-infra/jenkins/job_LoadTests_SideInput_Flink_Go.groovy) [Jenkins job definition (Go, Dataflow)](https://github.com/apache/beam/blob/master/.test-infra/jenkins/job_LoadTests_SideInput_Go.groovy)", "datasource": null, "gridPos": { "h": 8, diff --git a/CHANGES.md b/CHANGES.md index 5064e86fc57d..65610a9a7fcd 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -63,6 +63,8 @@ ## New Features / Improvements * X feature added (Java/Python) ([BEAM-X](https://issues.apache.org/jira/browse/BEAM-X)). +* Go SDK users can now use generic registration functions to optimize their DoFn execution. ([BEAM-14347](https://issues.apache.org/jira/browse/BEAM-14347)) +* Go SDK users may now write self-checkpointing Splittable DoFns to read from streaming sources. ([BEAM-11104](https://issues.apache.org/jira/browse/BEAM-11104)) ## Breaking Changes @@ -99,7 +101,6 @@ * 'Manage Clusters' JupyterLab extension added for users to configure usage of Dataproc clusters managed by Interactive Beam (Python) ([BEAM-14130](https://issues.apache.org/jira/browse/BEAM-14130)). * Pipeline drain support added for Go SDK ([BEAM-11106](https://issues.apache.org/jira/browse/BEAM-11106)). **Note: this feature is not yet fully validated and should be treated as experimental in this release.** -* Go SDK users may now write self-checkpointing Splittable DoFns to read from streaming sources. **Note: this feature is not yet fully validated and should be treated as experimental in this release.** ([BEAM-11104](https://issues.apache.org/jira/browse/BEAM-11104)) ## Breaking Changes @@ -163,6 +164,10 @@ * This caused unnecessarily long pre-processing times before job submission for large complex pipelines. * Fix `pyarrow` version parsing (Python)([BEAM-14235](https://issues.apache.org/jira/browse/BEAM-14235)) +## Known Issues + +* Some pipelines that use Java SpannerIO may raise a NPE when the project ID is not specified ([BEAM-14405](https://issues.apache.org/jira/browse/BEAM-14405)) + # [2.37.0] - 2022-03-04 ## Highlights @@ -195,6 +200,9 @@ ## Known Issues +* On rare occations, Python Datastore source may swallow some exceptions. Users are adviced to upgrade to Beam 2.38.0 or later ([BEAM-14282](https://issues.apache.org/jira/browse/BEAM-14282)) +* On rare occations, Python GCS source may swallow some exceptions. Users are adviced to upgrade to Beam 2.38.0 or later ([BEAM-14282](https://issues.apache.org/jira/browse/BEAM-14282)) + # [2.36.0] - 2022-02-07 ## I/Os @@ -224,6 +232,9 @@ * Users may encounter an unexpected java.lang.ArithmeticException when outputting a timestamp for an element further than allowedSkew from an allowed DoFN skew set to a value more than Integer.MAX_VALUE. +* On rare occations, Python Datastore source may swallow some exceptions. Users are adviced to upgrade to Beam 2.38.0 or later ([BEAM-14282](https://issues.apache.org/jira/browse/BEAM-14282)) +* On rare occations, Python GCS source may swallow some exceptions. Users are adviced to upgrade to Beam 2.38.0 or later ([BEAM-14282](https://issues.apache.org/jira/browse/BEAM-14282)) +* On rare occations, Java SpannerIO source may swallow some exceptions. Users are adviced to upgrade to Beam 2.37.0 or later ([BEAM-14005](https://issues.apache.org/jira/browse/BEAM-14005)) # [2.35.0] - 2021-12-29 @@ -277,6 +288,9 @@ ## Known Issues * Users of beam-sdks-java-io-hcatalog (and beam-sdks-java-extensions-sql-hcatalog) must take care to override the transitive log4j dependency when they add a hive dependency ([BEAM-13499](https://issues.apache.org/jira/browse/BEAM-13499)). +* On rare occations, Python Datastore source may swallow some exceptions. Users are adviced to upgrade to Beam 2.38.0 or later ([BEAM-14282](https://issues.apache.org/jira/browse/BEAM-14282)) +* On rare occations, Python GCS source may swallow some exceptions. Users are adviced to upgrade to Beam 2.38.0 or later ([BEAM-14282](https://issues.apache.org/jira/browse/BEAM-14282)) +* On rare occations, Java SpannerIO source may swallow some exceptions. Users are adviced to upgrade to Beam 2.37.0 or later ([BEAM-14005](https://issues.apache.org/jira/browse/BEAM-14005)) # [2.34.0] - 2021-11-11 @@ -313,6 +327,12 @@ * Fixed error when importing the DataFrame API with pandas 1.0.x installed ([BEAM-12945](https://issues.apache.org/jira/browse/BEAM-12945)). * Fixed top.SmallestPerKey implementation in the Go SDK ([BEAM-12946](https://issues.apache.org/jira/browse/BEAM-12946)). +## Known Issues + +* On rare occations, Python Datastore source may swallow some exceptions. Users are adviced to upgrade to Beam 2.38.0 or later ([BEAM-14282](https://issues.apache.org/jira/browse/BEAM-14282)) +* On rare occations, Python GCS source may swallow some exceptions. Users are adviced to upgrade to Beam 2.38.0 or later ([BEAM-14282](https://issues.apache.org/jira/browse/BEAM-14282)) +* On rare occations, Java SpannerIO source may swallow some exceptions. Users are adviced to upgrade to Beam 2.37.0 or later ([BEAM-14005](https://issues.apache.org/jira/browse/BEAM-14005)) + # [2.33.0] - 2021-10-07 ## Highlights @@ -361,6 +381,7 @@ * Spark 2.x users will need to update Spark's Jackson runtime dependencies (`spark.jackson.version`) to at least version 2.9.2, due to Beam updating its dependencies. * Go SDK jobs may produce "Failed to deduce Step from MonitoringInfo" messages following successful job execution. The messages are benign and don't indicate job failure. These are due to not yet handling PCollection metrics. +* On rare occations, Python GCS source may swallow some exceptions. Users are adviced to upgrade to Beam 2.38.0 or later ([BEAM-14282](https://issues.apache.org/jira/browse/BEAM-14282)) # [2.32.0] - 2021-08-25 @@ -416,6 +437,9 @@ * Fixed race condition in RabbitMqIO causing duplicate acks (Java) ([BEAM-6516](https://issues.apache.org/jira/browse/BEAM-6516))) +## Known Issues +* On rare occations, Python GCS source may swallow some exceptions. Users are adviced to upgrade to Beam 2.38.0 or later ([BEAM-14282](https://issues.apache.org/jira/browse/BEAM-14282)) + # [2.31.0] - 2021-07-08 ## I/Os diff --git a/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy b/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy index 4bb05439ab1b..994eacd39d97 100644 --- a/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy +++ b/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy @@ -481,7 +481,7 @@ class BeamModulePlugin implements Plugin { def powermock_version = "2.0.9" // Try to keep protobuf_version consistent with the protobuf version in google_cloud_platform_libraries_bom def protobuf_version = "3.19.4" - def quickcheck_version = "0.8" + def quickcheck_version = "1.0" def sbe_tool_version = "1.25.1" def slf4j_version = "1.7.30" def spark2_version = "2.4.8" @@ -538,8 +538,10 @@ class BeamModulePlugin implements Plugin { cassandra_driver_core : "com.datastax.cassandra:cassandra-driver-core:$cassandra_driver_version", cassandra_driver_mapping : "com.datastax.cassandra:cassandra-driver-mapping:$cassandra_driver_version", cdap_api : "io.cdap.cdap:cdap-api:$cdap_version", + cdap_api_commons : "io.cdap.cdap:cdap-api-common:$cdap_version", cdap_common : "io.cdap.cdap:cdap-common:$cdap_version", cdap_etl_api : "io.cdap.cdap:cdap-etl-api:$cdap_version", + cdap_etl_api_spark : "io.cdap.cdap:cdap-etl-api-spark:$cdap_version", cdap_plugin_service_now : "io.cdap.plugin:servicenow-plugins:1.1.0", checker_qual : "org.checkerframework:checker-qual:$checkerframework_version", classgraph : "io.github.classgraph:classgraph:$classgraph_version", @@ -693,6 +695,7 @@ class BeamModulePlugin implements Plugin { spark3_sql : "org.apache.spark:spark-sql_2.12:$spark3_version", spark3_streaming : "org.apache.spark:spark-streaming_2.12:$spark3_version", stax2_api : "org.codehaus.woodstox:stax2-api:4.2.1", + tephra : "org.apache.tephra:tephra-api:0.15.0-incubating", testcontainers_base : "org.testcontainers:testcontainers:$testcontainers_version", testcontainers_clickhouse : "org.testcontainers:clickhouse:$testcontainers_version", testcontainers_elasticsearch : "org.testcontainers:elasticsearch:$testcontainers_version", diff --git a/learning/katas/python/Windowing/Adding Timestamp/ParDo/task.py b/learning/katas/python/Windowing/Adding Timestamp/ParDo/task.py index 95430204ec6e..07ebcd2549d6 100644 --- a/learning/katas/python/Windowing/Adding Timestamp/ParDo/task.py +++ b/learning/katas/python/Windowing/Adding Timestamp/ParDo/task.py @@ -16,7 +16,7 @@ # beam-playground: # name: WindowingParDo -# description: Task from katas to assign each element a timestamp based on the the `Event.timestamp`. +# description: Task from katas to assign each element a timestamp based on the `Event.timestamp`. # multifile: false # context_line: 34 # categories: diff --git a/model/pipeline/src/main/proto/org/apache/beam/model/pipeline/v1/beam_runner_api.proto b/model/pipeline/src/main/proto/org/apache/beam/model/pipeline/v1/beam_runner_api.proto index 54d328cce82c..a19229a7857f 100644 --- a/model/pipeline/src/main/proto/org/apache/beam/model/pipeline/v1/beam_runner_api.proto +++ b/model/pipeline/src/main/proto/org/apache/beam/model/pipeline/v1/beam_runner_api.proto @@ -1046,12 +1046,8 @@ message StandardCoders { // Nullable types in container types (ArrayType, MapType) per the // encoding described for general Nullable types below. // - // Well known logical types: - // beam:logical_type:micros_instant:v1 - // - Representation type: ROW - // - A timestamp without a timezone where seconds + micros represents the - // amount of time since the epoch. - // + // Logical types understood by all SDKs should be defined in schema.proto. + // Example of well known logical types: // beam:logical_type:schema:v1 // - Representation type: BYTES // - A Beam Schema stored as a serialized proto. diff --git a/model/pipeline/src/main/proto/org/apache/beam/model/pipeline/v1/schema.proto b/model/pipeline/src/main/proto/org/apache/beam/model/pipeline/v1/schema.proto index b26fc8fef8d6..3a6a79a6e2ea 100644 --- a/model/pipeline/src/main/proto/org/apache/beam/model/pipeline/v1/schema.proto +++ b/model/pipeline/src/main/proto/org/apache/beam/model/pipeline/v1/schema.proto @@ -31,6 +31,8 @@ option go_package = "github.com/apache/beam/sdks/v2/go/pkg/beam/model/pipeline_v option java_package = "org.apache.beam.model.pipeline.v1"; option java_outer_classname = "SchemaApi"; +import "org/apache/beam/model/pipeline/v1/beam_runner_api.proto"; + message Schema { // List of fields for this schema. Two fields may not share a name. repeated Field fields = 1; @@ -110,6 +112,27 @@ message LogicalType { FieldValue argument = 5; } +// Universally defined Logical types for Row schemas. +// These logical types are supposed to be understood by all SDKs. +message LogicalTypes { + enum Enum { + // A URN for Python Callable logical type + // - Representation type: STRING + // - Language type: In Python SDK, PythonCallableWithSource. + // In any other SDKs, a wrapper object for a string which + // can be evaluated to a Python Callable object. + PYTHON_CALLABLE = 0 [(org.apache.beam.model.pipeline.v1.beam_urn) = + "beam:logical_type:python_callable:v1"]; + + // A URN for MicrosInstant type + // - Representation type: ROW + // - A timestamp without a timezone where seconds + micros represents the + // amount of time since the epoch. + MICROS_INSTANT = 1 [(org.apache.beam.model.pipeline.v1.beam_urn) = + "beam:logical_type:micros_instant:v1"]; + } +} + message Option { // REQUIRED. Identifier for the option. string name = 1; diff --git a/release/src/main/scripts/mass_comment.py b/release/src/main/scripts/mass_comment.py index ffced67aadc5..64d272aa01b3 100644 --- a/release/src/main/scripts/mass_comment.py +++ b/release/src/main/scripts/mass_comment.py @@ -89,7 +89,6 @@ "Run PostCommit_Java_Dataflow", "Run PostCommit_Java_DataflowV2", "Run PostCommit_Java_Hadoop_Versions", - "Run Python 3.6 PostCommit", "Run Python 3.7 PostCommit", "Run Python 3.8 PostCommit", "Run Python 3.9 PostCommit", diff --git a/runners/spark/spark_runner.gradle b/runners/spark/spark_runner.gradle index acbe1806c8ee..138b823a77e7 100644 --- a/runners/spark/spark_runner.gradle +++ b/runners/spark/spark_runner.gradle @@ -94,7 +94,6 @@ if (copySourceBase) { } test { - systemProperty "beam.spark.test.reuseSparkContext", "true" systemProperty "spark.sql.shuffle.partitions", "4" systemProperty "spark.ui.enabled", "false" systemProperty "spark.ui.showConsoleProgress", "false" @@ -113,17 +112,14 @@ test { jvmArgs System.getProperty("beamSurefireArgline") } - // Only one SparkContext may be running in a JVM (SPARK-2243) - forkEvery 1 maxParallelForks 4 useJUnit { excludeCategories "org.apache.beam.runners.spark.StreamingTest" excludeCategories "org.apache.beam.runners.spark.UsesCheckpointRecovery" } - filter { - // BEAM-11653 MetricsSinkTest is failing with Spark 3 - excludeTestsMatching 'org.apache.beam.runners.spark.aggregators.metrics.sink.SparkMetricsSinkTest' - } + + // easily re-run all tests (to deal with flaky tests / SparkContext leaks) + if(project.hasProperty("rerun-tests")) { outputs.upToDateWhen {false} } } dependencies { @@ -291,10 +287,6 @@ def validatesRunnerStreaming = tasks.register("validatesRunnerStreaming", Test) useJUnit { includeCategories 'org.apache.beam.runners.spark.StreamingTest' } - filter { - // BEAM-11653 MetricsSinkTest is failing with Spark 3 - excludeTestsMatching 'org.apache.beam.runners.spark.aggregators.metrics.sink.SparkMetricsSinkTest' - } } tasks.register("validatesStructuredStreamingRunnerBatch", Test) { diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkContextOptions.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkContextOptions.java index 13ae67878eb2..39caee7e6ba7 100644 --- a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkContextOptions.java +++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkContextOptions.java @@ -37,6 +37,12 @@ * which link to Spark dependencies, won't be scanned by {@link PipelineOptions} reflective * instantiation. Note that {@link SparkContextOptions} is not registered with {@link * SparkRunnerRegistrar}. + * + *

Note: It's recommended to use {@link + * org.apache.beam.runners.spark.translation.SparkContextFactory#setProvidedSparkContext(JavaSparkContext)} + * instead of {@link SparkContextOptions#setProvidedSparkContext(JavaSparkContext)} for testing. + * When using @{@link org.apache.beam.sdk.testing.TestPipeline} any provided {@link + * JavaSparkContext} via {@link SparkContextOptions} is dropped. */ public interface SparkContextOptions extends SparkPipelineOptions { diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkRunnerDebugger.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkRunnerDebugger.java index 65f83f5e8195..0474ca580a44 100644 --- a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkRunnerDebugger.java +++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkRunnerDebugger.java @@ -21,6 +21,7 @@ import java.util.concurrent.TimeoutException; import org.apache.beam.runners.core.construction.SplittableParDo; import org.apache.beam.runners.spark.translation.EvaluationContext; +import org.apache.beam.runners.spark.translation.SparkContextFactory; import org.apache.beam.runners.spark.translation.SparkPipelineTranslator; import org.apache.beam.runners.spark.translation.TransformTranslator; import org.apache.beam.runners.spark.translation.streaming.StreamingTransformTranslator; @@ -86,7 +87,8 @@ public SparkPipelineResult run(Pipeline pipeline) { SplittableParDo.convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(pipeline); } - JavaSparkContext jsc = new JavaSparkContext("local[1]", "Debug_Pipeline"); + JavaSparkContext jsc = + SparkContextFactory.getSparkContext(pipeline.getOptions().as(SparkPipelineOptions.class)); JavaStreamingContext jssc = new JavaStreamingContext(jsc, new org.apache.spark.streaming.Duration(1000)); @@ -107,7 +109,7 @@ public SparkPipelineResult run(Pipeline pipeline) { pipeline.traverseTopologically(visitor); - jsc.stop(); + SparkContextFactory.stopSparkContext(jsc); String debugString = visitor.getDebugString(); LOG.info("Translated Native Spark pipeline:\n" + debugString); diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkContextFactory.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkContextFactory.java index 61cf3afed9ca..9f9465ccde8f 100644 --- a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkContextFactory.java +++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkContextFactory.java @@ -17,6 +17,9 @@ */ package org.apache.beam.runners.spark.translation; +import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkNotNull; + +import javax.annotation.Nullable; import org.apache.beam.runners.spark.SparkContextOptions; import org.apache.beam.runners.spark.SparkPipelineOptions; import org.apache.beam.runners.spark.coders.SparkRunnerKryoRegistrator; @@ -25,80 +28,121 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** The Spark context factory. */ -@SuppressWarnings({ - "nullness" // TODO(https://issues.apache.org/jira/browse/BEAM-10402) -}) public final class SparkContextFactory { private static final Logger LOG = LoggerFactory.getLogger(SparkContextFactory.class); /** * If the property {@code beam.spark.test.reuseSparkContext} is set to {@code true} then the Spark * context will be reused for beam pipelines. This property should only be enabled for tests. + * + * @deprecated This will leak your SparkContext, any attempt to create a new SparkContext later + * will fail. Please use {@link #setProvidedSparkContext(JavaSparkContext)} / {@link + * #clearProvidedSparkContext()} instead to properly control the lifecycle of your context. + * Alternatively you may also provide a SparkContext using {@link + * SparkContextOptions#setUsesProvidedSparkContext(boolean)} together with {@link + * SparkContextOptions#setProvidedSparkContext(JavaSparkContext)} and close that one + * appropriately. Tests of this module should use {@code SparkContextRule}. */ + @Deprecated public static final String TEST_REUSE_SPARK_CONTEXT = "beam.spark.test.reuseSparkContext"; // Spark allows only one context for JVM so this can be static. - private static JavaSparkContext sparkContext; - private static String sparkMaster; - private static boolean usesProvidedSparkContext; + private static @Nullable JavaSparkContext sparkContext; + + // Remember spark master if TEST_REUSE_SPARK_CONTEXT is enabled. + private static @Nullable String reusableSparkMaster; + + // SparkContext is provided by the user instead of simply reused using TEST_REUSE_SPARK_CONTEXT + private static boolean hasProvidedSparkContext; private SparkContextFactory() {} + /** + * Set an externally managed {@link JavaSparkContext} that will be used if {@link + * SparkContextOptions#getUsesProvidedSparkContext()} is set to {@code true}. + * + *

A Spark context can also be provided using {@link + * SparkContextOptions#setProvidedSparkContext(JavaSparkContext)}. However, it will be dropped + * during serialization potentially leading to confusing behavior. This is particularly the case + * when used in tests with {@link org.apache.beam.sdk.testing.TestPipeline}. + */ + public static synchronized void setProvidedSparkContext(JavaSparkContext providedSparkContext) { + sparkContext = checkNotNull(providedSparkContext); + hasProvidedSparkContext = true; + reusableSparkMaster = null; + } + + public static synchronized void clearProvidedSparkContext() { + hasProvidedSparkContext = false; + sparkContext = null; + } + public static synchronized JavaSparkContext getSparkContext(SparkPipelineOptions options) { SparkContextOptions contextOptions = options.as(SparkContextOptions.class); - usesProvidedSparkContext = contextOptions.getUsesProvidedSparkContext(); - // reuse should be ignored if the context is provided. - if (Boolean.getBoolean(TEST_REUSE_SPARK_CONTEXT) && !usesProvidedSparkContext) { - - // if the context is null or stopped for some reason, re-create it. - if (sparkContext == null || sparkContext.sc().isStopped()) { - sparkContext = createSparkContext(contextOptions); - sparkMaster = options.getSparkMaster(); - } else if (!options.getSparkMaster().equals(sparkMaster)) { - throw new IllegalArgumentException( + if (contextOptions.getUsesProvidedSparkContext()) { + JavaSparkContext jsc = contextOptions.getProvidedSparkContext(); + if (jsc != null) { + setProvidedSparkContext(jsc); + } else if (hasProvidedSparkContext) { + jsc = sparkContext; + } + if (jsc == null) { + throw new IllegalStateException( + "No Spark context was provided. Use SparkContextFactor.setProvidedSparkContext to do so."); + } else if (jsc.sc().isStopped()) { + LOG.error("The provided Spark context " + jsc + " was already stopped."); + throw new IllegalStateException("The provided Spark context was already stopped"); + } + LOG.info("Using a provided Spark Context"); + return jsc; + } else if (Boolean.getBoolean(TEST_REUSE_SPARK_CONTEXT)) { + // This is highly discouraged as it leaks the SparkContext without any way to close it. + // Attempting to create any new SparkContext later will fail. + // If the context is null or stopped for some reason, re-create it. + @Nullable JavaSparkContext jsc = sparkContext; + if (jsc == null || jsc.sc().isStopped()) { + sparkContext = jsc = createSparkContext(contextOptions); + reusableSparkMaster = options.getSparkMaster(); + hasProvidedSparkContext = false; + } else if (hasProvidedSparkContext) { + throw new IllegalStateException( + "Usage of provided Spark context is disabled in SparkPipelineOptions."); + } else if (!options.getSparkMaster().equals(reusableSparkMaster)) { + throw new IllegalStateException( String.format( "Cannot reuse spark context " + "with different spark master URL. Existing: %s, requested: %s.", - sparkMaster, options.getSparkMaster())); + reusableSparkMaster, options.getSparkMaster())); } - return sparkContext; + return jsc; } else { - return createSparkContext(contextOptions); + JavaSparkContext jsc = createSparkContext(contextOptions); + clearProvidedSparkContext(); // any provided context can't be valid anymore + return jsc; } } public static synchronized void stopSparkContext(JavaSparkContext context) { - if (!Boolean.getBoolean(TEST_REUSE_SPARK_CONTEXT) && !usesProvidedSparkContext) { + if (!Boolean.getBoolean(TEST_REUSE_SPARK_CONTEXT) && !hasProvidedSparkContext) { context.stop(); } } - private static JavaSparkContext createSparkContext(SparkContextOptions contextOptions) { - if (usesProvidedSparkContext) { - LOG.info("Using a provided Spark Context"); - JavaSparkContext jsc = contextOptions.getProvidedSparkContext(); - if (jsc == null || jsc.sc().isStopped()) { - LOG.error("The provided Spark context " + jsc + " was not created or was stopped"); - throw new RuntimeException("The provided Spark context was not created or was stopped"); - } - return jsc; - } else { - LOG.info("Creating a brand new Spark Context."); - SparkConf conf = new SparkConf(); - if (!conf.contains("spark.master")) { - // set master if not set. - conf.setMaster(contextOptions.getSparkMaster()); - } - - if (contextOptions.getFilesToStage() != null && !contextOptions.getFilesToStage().isEmpty()) { - conf.setJars(contextOptions.getFilesToStage().toArray(new String[0])); - } + private static JavaSparkContext createSparkContext(SparkPipelineOptions options) { + LOG.info("Creating a brand new Spark Context."); + SparkConf conf = new SparkConf(); + if (!conf.contains("spark.master")) { + // set master if not set. + conf.setMaster(options.getSparkMaster()); + } - conf.setAppName(contextOptions.getAppName()); - // register immutable collections serializers because the SDK uses them. - conf.set("spark.kryo.registrator", SparkRunnerKryoRegistrator.class.getName()); - return new JavaSparkContext(conf); + if (options.getFilesToStage() != null && !options.getFilesToStage().isEmpty()) { + conf.setJars(options.getFilesToStage().toArray(new String[0])); } + + conf.setAppName(options.getAppName()); + // register immutable collections serializers because the SDK uses them. + conf.set("spark.kryo.registrator", SparkRunnerKryoRegistrator.class.getName()); + return new JavaSparkContext(conf); } } diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/CacheTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/CacheTest.java index 8209d4302717..861e13a4208f 100644 --- a/runners/spark/src/test/java/org/apache/beam/runners/spark/CacheTest.java +++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/CacheTest.java @@ -25,11 +25,9 @@ import java.util.List; import org.apache.beam.runners.spark.translation.Dataset; import org.apache.beam.runners.spark.translation.EvaluationContext; -import org.apache.beam.runners.spark.translation.SparkContextFactory; import org.apache.beam.runners.spark.translation.TransformTranslator; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.options.PipelineOptionsFactory; import org.apache.beam.sdk.transforms.Count; import org.apache.beam.sdk.transforms.Create; import org.apache.beam.sdk.transforms.Create.Values; @@ -39,7 +37,7 @@ import org.apache.beam.sdk.transforms.View; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionView; -import org.apache.spark.api.java.JavaSparkContext; +import org.junit.ClassRule; import org.junit.Test; /** Tests of {@link Dataset#cache(String, Coder)}} scenarios. */ @@ -48,13 +46,15 @@ }) public class CacheTest { + @ClassRule public static SparkContextRule contextRule = new SparkContextRule(); + /** * Test checks how the cache candidates map is populated by the runner when evaluating the * pipeline. */ @Test public void cacheCandidatesUpdaterTest() { - SparkPipelineOptions options = createOptions(); + SparkPipelineOptions options = contextRule.createPipelineOptions(); Pipeline pipeline = Pipeline.create(options); PCollection pCollection = pipeline.apply(Create.of("foo", "bar")); @@ -80,8 +80,8 @@ public void processElement(ProcessContext processContext) { }) .withSideInputs(view)); - JavaSparkContext jsc = SparkContextFactory.getSparkContext(options); - EvaluationContext ctxt = new EvaluationContext(jsc, pipeline, options); + EvaluationContext ctxt = + new EvaluationContext(contextRule.getSparkContext(), pipeline, options); SparkRunner.CacheVisitor cacheVisitor = new SparkRunner.CacheVisitor(new TransformTranslator.Translator(), ctxt); pipeline.traverseTopologically(cacheVisitor); @@ -91,15 +91,15 @@ public void processElement(ProcessContext processContext) { @Test public void shouldCacheTest() { - SparkPipelineOptions options = createOptions(); + SparkPipelineOptions options = contextRule.createPipelineOptions(); options.setCacheDisabled(true); Pipeline pipeline = Pipeline.create(options); Values valuesTransform = Create.of("foo", "bar"); PCollection pCollection = mock(PCollection.class); - JavaSparkContext jsc = SparkContextFactory.getSparkContext(options); - EvaluationContext ctxt = new EvaluationContext(jsc, pipeline, options); + EvaluationContext ctxt = + new EvaluationContext(contextRule.getSparkContext(), pipeline, options); ctxt.getCacheCandidates().put(pCollection, 2L); assertFalse(ctxt.shouldCache(valuesTransform, pCollection)); @@ -110,11 +110,4 @@ public void shouldCacheTest() { GroupByKey gbkTransform = GroupByKey.create(); assertFalse(ctxt.shouldCache(gbkTransform, pCollection)); } - - private SparkPipelineOptions createOptions() { - SparkPipelineOptions options = - PipelineOptionsFactory.create().as(TestSparkPipelineOptions.class); - options.setRunner(TestSparkRunner.class); - return options; - } } diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/GlobalWatermarkHolderTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/GlobalWatermarkHolderTest.java index 7bcff9875db6..a4dc6afd9c45 100644 --- a/runners/spark/src/test/java/org/apache/beam/runners/spark/GlobalWatermarkHolderTest.java +++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/GlobalWatermarkHolderTest.java @@ -20,13 +20,12 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.IsEqual.equalTo; -import org.apache.beam.runners.spark.translation.SparkContextFactory; import org.apache.beam.runners.spark.util.GlobalWatermarkHolder; import org.apache.beam.runners.spark.util.GlobalWatermarkHolder.SparkWatermarks; -import org.apache.beam.sdk.options.PipelineOptionsFactory; import org.apache.beam.sdk.testing.RegexMatcher; import org.joda.time.Duration; import org.joda.time.Instant; +import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; @@ -34,27 +33,21 @@ /** A test suite for the propagation of watermarks in the Spark runner. */ public class GlobalWatermarkHolderTest { + // Watermark holder requires valid SparkEnv + @ClassRule public static SparkContextRule contextRule = new SparkContextRule(); + @Rule public ClearWatermarksRule clearWatermarksRule = new ClearWatermarksRule(); @Rule public ExpectedException thrown = ExpectedException.none(); - @Rule public ReuseSparkContextRule reuseContext = ReuseSparkContextRule.yes(); - - // only needed in-order to get context from the SparkContextFactory. - private static final SparkPipelineOptions options = - PipelineOptionsFactory.create().as(SparkPipelineOptions.class); - private static final String INSTANT_PATTERN = "[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z"; @Test public void testLowHighWatermarksAdvance() { - Instant instant = new Instant(0); // low == high. - SparkContextFactory.getSparkContext(options); - GlobalWatermarkHolder.add( 1, new SparkWatermarks( @@ -98,7 +91,7 @@ public void testLowHighWatermarksAdvance() { @Test public void testSynchronizedTimeMonotonic() { Instant instant = new Instant(0); - SparkContextFactory.getSparkContext(options); + GlobalWatermarkHolder.add( 1, new SparkWatermarks( @@ -119,7 +112,7 @@ public void testSynchronizedTimeMonotonic() { @Test public void testMultiSource() { Instant instant = new Instant(0); - SparkContextFactory.getSparkContext(options); + GlobalWatermarkHolder.add( 1, new SparkWatermarks( diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/ProvidedSparkContextTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/ProvidedSparkContextTest.java index 4a57ade09cb5..0ef6bb0e078c 100644 --- a/runners/spark/src/test/java/org/apache/beam/runners/spark/ProvidedSparkContextTest.java +++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/ProvidedSparkContextTest.java @@ -18,13 +18,12 @@ package org.apache.beam.runners.spark; import static org.junit.Assert.assertFalse; -import static org.junit.Assert.fail; +import static org.junit.Assert.assertThrows; import org.apache.beam.runners.spark.examples.WordCount; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.PipelineResult; import org.apache.beam.sdk.coders.StringUtf8Coder; -import org.apache.beam.sdk.options.PipelineOptionsFactory; import org.apache.beam.sdk.testing.PAssert; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.transforms.Create; @@ -32,10 +31,18 @@ import org.apache.beam.sdk.values.PCollection; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableSet; -import org.apache.spark.api.java.JavaSparkContext; +import org.junit.ClassRule; +import org.junit.FixMethodOrder; import org.junit.Test; +import org.junit.runners.MethodSorters; -/** Provided Spark Context tests. */ +/** + * Provided Spark Context tests. + * + *

Note: These tests are run sequentially ordered by their name to reuse the Spark context and + * speed up testing. + */ +@FixMethodOrder(MethodSorters.NAME_ASCENDING) public class ProvidedSparkContextTest { private static final String[] WORDS_ARRAY = { "hi there", "hi", "hi sue bob", @@ -47,72 +54,50 @@ public class ProvidedSparkContextTest { private static final String PROVIDED_CONTEXT_EXCEPTION = "The provided Spark context was not created or was stopped"; + @ClassRule + public static SparkContextOptionsRule contextOptionsRule = new SparkContextOptionsRule(); + /** Provide a context and call pipeline run. */ @Test - public void testWithProvidedContext() throws Exception { - JavaSparkContext jsc = new JavaSparkContext("local[*]", "Existing_Context"); - testWithValidProvidedContext(jsc); + public void testAWithProvidedContext() throws Exception { + Pipeline p = createPipeline(); + PipelineResult result = p.run(); // Run test from pipeline + result.waitUntilFinish(); + TestPipeline.verifyPAssertsSucceeded(p, result); // A provided context must not be stopped after execution - assertFalse(jsc.sc().isStopped()); - jsc.stop(); + assertFalse(contextOptionsRule.getSparkContext().sc().isStopped()); } - /** Provide a context and call pipeline run. */ + /** A SparkRunner with a stopped provided Spark context cannot run pipelines. */ @Test - public void testWithNullContext() throws Exception { - testWithInvalidContext(null); + public void testBWithStoppedProvidedContext() { + // Stop the provided Spark context + contextOptionsRule.getSparkContext().sc().stop(); + assertThrows( + PROVIDED_CONTEXT_EXCEPTION, + RuntimeException.class, + () -> createPipeline().run().waitUntilFinish()); } - /** A SparkRunner with a stopped provided Spark context cannot run pipelines. */ + /** Provide a context and call pipeline run. */ @Test - public void testWithStoppedProvidedContext() throws Exception { - JavaSparkContext jsc = new JavaSparkContext("local[*]", "Existing_Context"); - // Stop the provided Spark context directly - jsc.stop(); - testWithInvalidContext(jsc); + public void testCWithNullContext() { + contextOptionsRule.getOptions().setProvidedSparkContext(null); + assertThrows( + PROVIDED_CONTEXT_EXCEPTION, + RuntimeException.class, + () -> createPipeline().run().waitUntilFinish()); } - private void testWithValidProvidedContext(JavaSparkContext jsc) throws Exception { - SparkContextOptions options = getSparkContextOptions(jsc); - - Pipeline p = Pipeline.create(options); + private Pipeline createPipeline() { + Pipeline p = Pipeline.create(contextOptionsRule.getOptions()); PCollection inputWords = p.apply(Create.of(WORDS).withCoder(StringUtf8Coder.of())); PCollection output = inputWords .apply(new WordCount.CountWords()) .apply(MapElements.via(new WordCount.FormatAsTextFn())); - - PAssert.that(output).containsInAnyOrder(EXPECTED_COUNT_SET); - // Run test from pipeline - PipelineResult result = p.run(); - - TestPipeline.verifyPAssertsSucceeded(p, result); - } - - private void testWithInvalidContext(JavaSparkContext jsc) { - SparkContextOptions options = getSparkContextOptions(jsc); - - Pipeline p = Pipeline.create(options); - PCollection inputWords = p.apply(Create.of(WORDS).withCoder(StringUtf8Coder.of())); - inputWords - .apply(new WordCount.CountWords()) - .apply(MapElements.via(new WordCount.FormatAsTextFn())); - - try { - p.run().waitUntilFinish(); - fail("Should throw an exception when The provided Spark context is null or stopped"); - } catch (RuntimeException e) { - assert e.getMessage().contains(PROVIDED_CONTEXT_EXCEPTION); - } - } - - private static SparkContextOptions getSparkContextOptions(JavaSparkContext jsc) { - final SparkContextOptions options = PipelineOptionsFactory.as(SparkContextOptions.class); - options.setRunner(TestSparkRunner.class); - options.setUsesProvidedSparkContext(true); - options.setProvidedSparkContext(jsc); - options.setEnableSparkMetricSinks(false); - return options; + PAssert.that(output).containsInAnyOrder(EXPECTED_COUNT_SET); + return p; } } diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/ReuseSparkContextRule.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/SparkContextOptionsRule.java similarity index 58% rename from runners/spark/src/test/java/org/apache/beam/runners/spark/ReuseSparkContextRule.java rename to runners/spark/src/test/java/org/apache/beam/runners/spark/SparkContextOptionsRule.java index 54b77448f78a..2f424cd7ca40 100644 --- a/runners/spark/src/test/java/org/apache/beam/runners/spark/ReuseSparkContextRule.java +++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/SparkContextOptionsRule.java @@ -17,28 +17,27 @@ */ package org.apache.beam.runners.spark; -import org.apache.beam.runners.spark.translation.SparkContextFactory; -import org.junit.rules.ExternalResource; +import javax.annotation.Nullable; +import org.apache.beam.sdk.values.KV; -/** Explicitly set {@link org.apache.spark.SparkContext} to be reused (or not) in tests. */ -public class ReuseSparkContextRule extends ExternalResource { +public class SparkContextOptionsRule extends SparkContextRule { - private final boolean reuse; + private @Nullable SparkContextOptions contextOptions = null; - private ReuseSparkContextRule(boolean reuse) { - this.reuse = reuse; - } - - public static ReuseSparkContextRule no() { - return new ReuseSparkContextRule(false); - } - - public static ReuseSparkContextRule yes() { - return new ReuseSparkContextRule(true); + public SparkContextOptionsRule(KV... sparkConfig) { + super(sparkConfig); } @Override protected void before() throws Throwable { - System.setProperty(SparkContextFactory.TEST_REUSE_SPARK_CONTEXT, Boolean.toString(reuse)); + super.before(); + contextOptions = createPipelineOptions(); + } + + public SparkContextOptions getOptions() { + if (contextOptions == null) { + throw new IllegalStateException("SparkContextOptions not available"); + } + return contextOptions; } } diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/SparkContextRule.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/SparkContextRule.java new file mode 100644 index 000000000000..caa7d8f6814b --- /dev/null +++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/SparkContextRule.java @@ -0,0 +1,90 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.beam.runners.spark; + +import static java.util.stream.Collectors.toMap; + +import java.io.Serializable; +import java.util.Arrays; +import java.util.Map; +import javax.annotation.Nullable; +import org.apache.beam.runners.spark.translation.SparkContextFactory; +import org.apache.beam.sdk.options.PipelineOptions; +import org.apache.beam.sdk.testing.TestPipeline; +import org.apache.beam.sdk.values.KV; +import org.apache.spark.SparkConf; +import org.apache.spark.api.java.JavaSparkContext; +import org.junit.rules.ExternalResource; +import org.junit.runner.Description; +import org.junit.runners.model.Statement; + +public class SparkContextRule extends ExternalResource implements Serializable { + private transient SparkConf sparkConf; + private transient @Nullable JavaSparkContext sparkContext = null; + + public SparkContextRule(String sparkMaster, Map sparkConfig) { + sparkConf = new SparkConf(); + sparkConfig.forEach(sparkConf::set); + sparkConf.setMaster(sparkMaster); + } + + public SparkContextRule(KV... sparkConfig) { + this("local", sparkConfig); + } + + public SparkContextRule(String sparkMaster, KV... sparkConfig) { + this(sparkMaster, Arrays.stream(sparkConfig).collect(toMap(KV::getKey, KV::getValue))); + } + + public JavaSparkContext getSparkContext() { + if (sparkContext == null) { + throw new IllegalStateException("SparkContext not available"); + } + return sparkContext; + } + + public SparkContextOptions createPipelineOptions() { + return configure(TestPipeline.testingPipelineOptions()); + } + + public SparkContextOptions configure(PipelineOptions opts) { + SparkContextOptions ctxOpts = opts.as(SparkContextOptions.class); + ctxOpts.setUsesProvidedSparkContext(true); + ctxOpts.setProvidedSparkContext(getSparkContext()); + return ctxOpts; + } + + @Override + public Statement apply(Statement base, Description description) { + sparkConf.setAppName(description.getDisplayName()); + return super.apply(base, description); + } + + @Override + protected void before() throws Throwable { + sparkContext = new JavaSparkContext(sparkConf); + SparkContextFactory.setProvidedSparkContext(sparkContext); + } + + @Override + protected void after() { + SparkContextFactory.clearProvidedSparkContext(); + getSparkContext().stop(); + sparkContext = null; + } +} diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/SparkPipelineStateTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/SparkPipelineStateTest.java index b48f553d8fc5..61e111234331 100644 --- a/runners/spark/src/test/java/org/apache/beam/runners/spark/SparkPipelineStateTest.java +++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/SparkPipelineStateTest.java @@ -20,190 +20,137 @@ import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.fail; +import static org.joda.time.Duration.millis; +import static org.junit.Assert.assertThrows; import java.io.Serializable; +import javax.annotation.Nullable; import org.apache.beam.runners.spark.io.CreateStream; import org.apache.beam.sdk.Pipeline; +import org.apache.beam.sdk.Pipeline.PipelineExecutionException; import org.apache.beam.sdk.PipelineResult; import org.apache.beam.sdk.coders.StringUtf8Coder; -import org.apache.beam.sdk.options.PipelineOptionsFactory; import org.apache.beam.sdk.transforms.Create; -import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.transforms.MapElements; import org.apache.beam.sdk.transforms.PTransform; -import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.transforms.SimpleFunction; import org.apache.beam.sdk.values.PBegin; import org.apache.beam.sdk.values.PCollection; -import org.joda.time.Duration; -import org.junit.Rule; +import org.junit.ClassRule; import org.junit.Test; -import org.junit.rules.TestName; /** This suite tests that various scenarios result in proper states of the pipeline. */ public class SparkPipelineStateTest implements Serializable { - private static class MyCustomException extends RuntimeException { + @ClassRule public static SparkContextRule contextRule = new SparkContextRule(); - MyCustomException(final String message) { + private static class CustomException extends RuntimeException { + CustomException(final String message) { super(message); } } - private final transient SparkPipelineOptions options = - PipelineOptionsFactory.as(SparkPipelineOptions.class); - - @Rule public transient TestName testName = new TestName(); - - private static final String FAILED_THE_BATCH_INTENTIONALLY = "Failed the batch intentionally"; - - private ParDo.SingleOutput printParDo(final String prefix) { - return ParDo.of( - new DoFn() { - - @ProcessElement - public void processElement(final ProcessContext c) { - System.out.println(prefix + " " + c.element()); - } - }); - } - - private PTransform> getValues(final SparkPipelineOptions options) { - final boolean doNotSyncWithWatermark = false; - return options.isStreaming() - ? CreateStream.of(StringUtf8Coder.of(), Duration.millis(1), doNotSyncWithWatermark) - .nextBatch("one", "two") - : Create.of("one", "two"); + private static class FailAlways extends SimpleFunction { + @Override + public String apply(final String input) { + throw new CustomException(FAILED_THE_BATCH_INTENTIONALLY); + } } - private SparkPipelineOptions getStreamingOptions() { - options.setRunner(SparkRunner.class); - options.setStreaming(true); - return options; - } + private static final String FAILED_THE_BATCH_INTENTIONALLY = "Failed the batch intentionally"; - private SparkPipelineOptions getBatchOptions() { + private Pipeline createPipeline( + boolean isStreaming, @Nullable SimpleFunction mapFun) { + SparkContextOptions options = contextRule.createPipelineOptions(); options.setRunner(SparkRunner.class); - options.setStreaming(false); // explicit because options is reused throughout the test. - return options; - } + options.setStreaming(isStreaming); - private Pipeline getPipeline(final SparkPipelineOptions options) { - - final Pipeline pipeline = Pipeline.create(options); - final String name = testName.getMethodName() + "(isStreaming=" + options.isStreaming() + ")"; - - pipeline.apply(getValues(options)).setCoder(StringUtf8Coder.of()).apply(printParDo(name)); + Pipeline pipeline = Pipeline.create(options); + PTransform> values = + isStreaming + ? CreateStream.of(StringUtf8Coder.of(), millis(1), false).nextBatch("one", "two") + : Create.of("one", "two"); + PCollection collection = pipeline.apply(values).setCoder(StringUtf8Coder.of()); + if (mapFun != null) { + collection.apply(MapElements.via(mapFun)); + } return pipeline; } - private void testFailedPipeline(final SparkPipelineOptions options) throws Exception { - - SparkPipelineResult result = null; - - try { - final Pipeline pipeline = Pipeline.create(options); - pipeline - .apply(getValues(options)) - .setCoder(StringUtf8Coder.of()) - .apply( - MapElements.via( - new SimpleFunction() { - - @Override - public String apply(final String input) { - throw new MyCustomException(FAILED_THE_BATCH_INTENTIONALLY); - } - })); - - result = (SparkPipelineResult) pipeline.run(); - result.waitUntilFinish(); - } catch (final Exception e) { - assertThat(e, instanceOf(Pipeline.PipelineExecutionException.class)); - assertThat(e.getCause(), instanceOf(MyCustomException.class)); - assertThat(e.getCause().getMessage(), is(FAILED_THE_BATCH_INTENTIONALLY)); - assertThat(result.getState(), is(PipelineResult.State.FAILED)); - result.cancel(); - return; - } + private void testFailedPipeline(boolean isStreaming) throws Exception { + Pipeline pipeline = createPipeline(isStreaming, new FailAlways()); + SparkPipelineResult result = (SparkPipelineResult) pipeline.run(); - fail("An injected failure did not affect the pipeline as expected."); + PipelineExecutionException e = + assertThrows(PipelineExecutionException.class, () -> result.waitUntilFinish()); + assertThat(e.getCause(), instanceOf(CustomException.class)); + assertThat(e.getCause().getMessage(), is(FAILED_THE_BATCH_INTENTIONALLY)); + assertThat(result.getState(), is(PipelineResult.State.FAILED)); + result.cancel(); } - private void testTimeoutPipeline(final SparkPipelineOptions options) throws Exception { - - final Pipeline pipeline = getPipeline(options); - - final SparkPipelineResult result = (SparkPipelineResult) pipeline.run(); - - result.waitUntilFinish(Duration.millis(1)); + private void testWaitUntilFinishedTimeout(boolean isStreaming) throws Exception { + Pipeline pipeline = createPipeline(isStreaming, null); + SparkPipelineResult result = (SparkPipelineResult) pipeline.run(); + result.waitUntilFinish(millis(1)); + // Wait timed out, pipeline is still running assertThat(result.getState(), is(PipelineResult.State.RUNNING)); - result.cancel(); } - private void testCanceledPipeline(final SparkPipelineOptions options) throws Exception { - - final Pipeline pipeline = getPipeline(options); - - final SparkPipelineResult result = (SparkPipelineResult) pipeline.run(); - + private void testCanceledPipeline(boolean isStreaming) throws Exception { + Pipeline pipeline = createPipeline(isStreaming, null); + SparkPipelineResult result = (SparkPipelineResult) pipeline.run(); result.cancel(); - assertThat(result.getState(), is(PipelineResult.State.CANCELLED)); } - private void testRunningPipeline(final SparkPipelineOptions options) throws Exception { - - final Pipeline pipeline = getPipeline(options); - - final SparkPipelineResult result = (SparkPipelineResult) pipeline.run(); - + private void testRunningPipeline(boolean isStreaming) throws Exception { + Pipeline pipeline = createPipeline(isStreaming, null); + SparkPipelineResult result = (SparkPipelineResult) pipeline.run(); assertThat(result.getState(), is(PipelineResult.State.RUNNING)); - result.cancel(); } @Test public void testStreamingPipelineRunningState() throws Exception { - testRunningPipeline(getStreamingOptions()); + testRunningPipeline(true); } @Test public void testBatchPipelineRunningState() throws Exception { - testRunningPipeline(getBatchOptions()); + testRunningPipeline(false); } @Test public void testStreamingPipelineCanceledState() throws Exception { - testCanceledPipeline(getStreamingOptions()); + testCanceledPipeline(true); } @Test public void testBatchPipelineCanceledState() throws Exception { - testCanceledPipeline(getBatchOptions()); + testCanceledPipeline(false); } @Test public void testStreamingPipelineFailedState() throws Exception { - testFailedPipeline(getStreamingOptions()); + testFailedPipeline(true); } @Test public void testBatchPipelineFailedState() throws Exception { - testFailedPipeline(getBatchOptions()); + testFailedPipeline(false); } @Test - public void testStreamingPipelineTimeoutState() throws Exception { - testTimeoutPipeline(getStreamingOptions()); + public void testStreamingPipelineWaitTimeout() throws Exception { + testWaitUntilFinishedTimeout(true); } @Test - public void testBatchPipelineTimeoutState() throws Exception { - testTimeoutPipeline(getBatchOptions()); + public void testBatchPipelineWaitTimeout() throws Exception { + testWaitUntilFinishedTimeout(false); } } diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/SparkRunnerDebuggerTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/SparkRunnerDebuggerTest.java index c9bb83dd0c34..91ef5a426401 100644 --- a/runners/spark/src/test/java/org/apache/beam/runners/spark/SparkRunnerDebuggerTest.java +++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/SparkRunnerDebuggerTest.java @@ -49,6 +49,7 @@ import org.apache.kafka.common.serialization.StringSerializer; import org.hamcrest.Matchers; import org.joda.time.Duration; +import org.junit.ClassRule; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; @@ -57,11 +58,12 @@ @RunWith(JUnit4.class) public class SparkRunnerDebuggerTest { + @ClassRule public static SparkContextRule contextRule = new SparkContextRule("local[1]"); + @Test public void debugBatchPipeline() { - PipelineOptions options = PipelineOptionsFactory.create().as(TestSparkPipelineOptions.class); + PipelineOptions options = contextRule.configure(PipelineOptionsFactory.create()); options.setRunner(SparkRunnerDebugger.class); - Pipeline pipeline = Pipeline.create(options); PCollection lines = @@ -105,11 +107,9 @@ public void debugBatchPipeline() { @Test public void debugStreamingPipeline() { - TestSparkPipelineOptions options = - PipelineOptionsFactory.create().as(TestSparkPipelineOptions.class); - options.setForceStreaming(true); + PipelineOptions options = contextRule.configure(PipelineOptionsFactory.create()); options.setRunner(SparkRunnerDebugger.class); - + options.as(TestSparkPipelineOptions.class).setForceStreaming(true); Pipeline pipeline = Pipeline.create(options); KafkaIO.Read read = diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/aggregators/metrics/sink/SparkMetricsSinkTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/aggregators/metrics/sink/SparkMetricsSinkTest.java index 7439ebfeb726..f21168336d02 100644 --- a/runners/spark/src/test/java/org/apache/beam/runners/spark/aggregators/metrics/sink/SparkMetricsSinkTest.java +++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/aggregators/metrics/sink/SparkMetricsSinkTest.java @@ -21,7 +21,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -import org.apache.beam.runners.spark.ReuseSparkContextRule; +import org.apache.beam.runners.spark.SparkContextRule; import org.apache.beam.runners.spark.SparkPipelineOptions; import org.apache.beam.runners.spark.StreamingTest; import org.apache.beam.runners.spark.examples.WordCount; @@ -39,6 +39,7 @@ import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableSet; import org.joda.time.Duration; import org.joda.time.Instant; +import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -49,9 +50,13 @@ * streaming modes. */ public class SparkMetricsSinkTest { + @ClassRule public static SparkContextRule contextRule = new SparkContextRule(); + @Rule public ExternalResource inMemoryMetricsSink = new InMemoryMetricsSinkRule(); - @Rule public final TestPipeline pipeline = TestPipeline.create(); - @Rule public final transient ReuseSparkContextRule noContextResue = ReuseSparkContextRule.no(); + + @Rule + public final TestPipeline pipeline = + TestPipeline.fromOptions(contextRule.createPipelineOptions()); private static final ImmutableList WORDS = ImmutableList.of("hi there", "hi", "hi sue bob", "hi sue", "", "bob hi"); @@ -68,7 +73,7 @@ public void testInBatchMode() throws Exception { .apply(new WordCount.CountWords()) .apply(MapElements.via(new WordCount.FormatAsTextFn())); PAssert.that(output).containsInAnyOrder(EXPECTED_COUNTS); - pipeline.run(); + pipeline.run().waitUntilFinish(); assertThat(InMemoryMetrics.valueOf("emptyLines"), is(1d)); } diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/coders/SparkRunnerKryoRegistratorTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/coders/SparkRunnerKryoRegistratorTest.java index 390b127871a4..fcc7fee27063 100644 --- a/runners/spark/src/test/java/org/apache/beam/runners/spark/coders/SparkRunnerKryoRegistratorTest.java +++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/coders/SparkRunnerKryoRegistratorTest.java @@ -17,96 +17,84 @@ */ package org.apache.beam.runners.spark.coders; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; import com.esotericsoftware.kryo.Kryo; import com.esotericsoftware.kryo.Registration; -import org.apache.beam.runners.spark.SparkContextOptions; -import org.apache.beam.runners.spark.SparkPipelineOptions; -import org.apache.beam.runners.spark.TestSparkPipelineOptions; -import org.apache.beam.runners.spark.TestSparkRunner; +import org.apache.beam.runners.spark.SparkContextRule; +import org.apache.beam.runners.spark.coders.SparkRunnerKryoRegistratorTest.Others.TestKryoRegistrator; import org.apache.beam.runners.spark.io.MicrobatchSource; import org.apache.beam.sdk.Pipeline; -import org.apache.beam.sdk.options.PipelineOptionsFactory; import org.apache.beam.sdk.transforms.Create; -import org.apache.spark.SparkConf; -import org.apache.spark.api.java.JavaSparkContext; +import org.apache.beam.sdk.values.KV; +import org.junit.ClassRule; import org.junit.Test; +import org.junit.experimental.runners.Enclosed; +import org.junit.runner.RunWith; -/** Testing of beam registrar. */ +/** + * Testing of beam registrar. Note: There can only be one Spark context at a time. For that reason + * tests requiring a different context have to be forked using separate test classes. + */ @SuppressWarnings({ "rawtypes" // TODO(https://issues.apache.org/jira/browse/BEAM-10556) }) +@RunWith(Enclosed.class) public class SparkRunnerKryoRegistratorTest { - @Test - public void testKryoRegistration() { - SparkConf conf = new SparkConf(); - conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer"); - conf.set("spark.kryo.registrator", WrapperKryoRegistrator.class.getName()); - runSimplePipelineWithSparkContext(conf); - assertTrue( - "WrapperKryoRegistrator wasn't initiated, probably KryoSerializer is not set", - WrapperKryoRegistrator.wasInitiated); - } - - @Test - public void testDefaultSerializerNotCallingKryo() { - SparkConf conf = new SparkConf(); - conf.set("spark.kryo.registrator", KryoRegistratorIsNotCalled.class.getName()); - runSimplePipelineWithSparkContext(conf); - } + public static class WithKryoSerializer { - private void runSimplePipelineWithSparkContext(SparkConf conf) { - SparkPipelineOptions options = - PipelineOptionsFactory.create().as(TestSparkPipelineOptions.class); - options.setRunner(TestSparkRunner.class); + @ClassRule + public static SparkContextRule contextRule = + new SparkContextRule( + KV.of("spark.serializer", "org.apache.spark.serializer.KryoSerializer"), + KV.of("spark.kryo.registrator", TestKryoRegistrator.class.getName())); - conf.set("spark.master", "local"); - conf.setAppName("test"); - - JavaSparkContext javaSparkContext = new JavaSparkContext(conf); - options.setUsesProvidedSparkContext(true); - options.as(SparkContextOptions.class).setProvidedSparkContext(javaSparkContext); - Pipeline p = Pipeline.create(options); - p.apply(Create.of("a")); // some operation to trigger pipeline construction - p.run().waitUntilFinish(); - javaSparkContext.stop(); + @Test + public void testKryoRegistration() { + TestKryoRegistrator.wasInitiated = false; + runSimplePipelineWithSparkContextOptions(contextRule); + assertTrue(TestKryoRegistrator.wasInitiated); + } } - /** - * A {@link SparkRunnerKryoRegistrator} that fails if called. Use only for test purposes. Needs to - * be public for serialization. - */ - public static class KryoRegistratorIsNotCalled extends SparkRunnerKryoRegistrator { + public static class WithoutKryoSerializer { + @ClassRule + public static SparkContextRule contextRule = + new SparkContextRule(KV.of("spark.kryo.registrator", TestKryoRegistrator.class.getName())); - @Override - public void registerClasses(Kryo kryo) { - fail( - "Default spark.serializer is JavaSerializer" - + " so spark.kryo.registrator shouldn't be called"); + @Test + public void testDefaultSerializerNotCallingKryo() { + TestKryoRegistrator.wasInitiated = false; + runSimplePipelineWithSparkContextOptions(contextRule); + assertFalse(TestKryoRegistrator.wasInitiated); } } - /** - * A {@link SparkRunnerKryoRegistrator} that registers an internal class to validate - * KryoSerialization resolution. Use only for test purposes. Needs to be public for serialization. - */ - public static class WrapperKryoRegistrator extends SparkRunnerKryoRegistrator { + // Hide TestKryoRegistrator from the Enclosed JUnit runner + interface Others { + class TestKryoRegistrator extends SparkRunnerKryoRegistrator { - static boolean wasInitiated = false; + static boolean wasInitiated = false; - public WrapperKryoRegistrator() { - wasInitiated = true; - } + public TestKryoRegistrator() { + wasInitiated = true; + } - @Override - public void registerClasses(Kryo kryo) { - super.registerClasses(kryo); - Registration registration = kryo.getRegistration(MicrobatchSource.class); - com.esotericsoftware.kryo.Serializer kryoSerializer = registration.getSerializer(); - assertTrue(kryoSerializer instanceof StatelessJavaSerializer); + @Override + public void registerClasses(Kryo kryo) { + super.registerClasses(kryo); + // verify serializer for MicrobatchSource + Registration registration = kryo.getRegistration(MicrobatchSource.class); + assertTrue(registration.getSerializer() instanceof StatelessJavaSerializer); + } } } + + private static void runSimplePipelineWithSparkContextOptions(SparkContextRule context) { + Pipeline p = Pipeline.create(context.createPipelineOptions()); + p.apply(Create.of("a")); // some operation to trigger pipeline construction + p.run().waitUntilFinish(); + } } diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/metrics/SparkMetricsPusherTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/metrics/SparkMetricsPusherTest.java index bc4f7507ca05..aa7ab616ecd8 100644 --- a/runners/spark/src/test/java/org/apache/beam/runners/spark/metrics/SparkMetricsPusherTest.java +++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/metrics/SparkMetricsPusherTest.java @@ -21,7 +21,6 @@ import static org.hamcrest.Matchers.is; import org.apache.beam.runners.core.metrics.TestMetricsSink; -import org.apache.beam.runners.spark.ReuseSparkContextRule; import org.apache.beam.runners.spark.SparkPipelineOptions; import org.apache.beam.runners.spark.StreamingTest; import org.apache.beam.runners.spark.io.CreateStream; @@ -52,8 +51,6 @@ public class SparkMetricsPusherTest { private static final Logger LOG = LoggerFactory.getLogger(SparkMetricsPusherTest.class); private static final String COUNTER_NAME = "counter"; - @Rule public final transient ReuseSparkContextRule noContextResue = ReuseSparkContextRule.no(); - @Rule public final TestPipeline pipeline = TestPipeline.create(); private Duration batchDuration() { diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/structuredstreaming/SparkSessionRule.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/structuredstreaming/SparkSessionRule.java new file mode 100644 index 000000000000..f68df83ac07d --- /dev/null +++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/structuredstreaming/SparkSessionRule.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.beam.runners.spark.structuredstreaming; + +import static java.util.stream.Collectors.toMap; + +import java.io.Serializable; +import java.util.Arrays; +import java.util.Map; +import javax.annotation.Nullable; +import org.apache.beam.sdk.values.KV; +import org.apache.spark.sql.SparkSession; +import org.junit.rules.ExternalResource; +import org.junit.runner.Description; +import org.junit.runners.model.Statement; + +public class SparkSessionRule extends ExternalResource implements Serializable { + private transient SparkSession.Builder builder; + private transient @Nullable SparkSession session = null; + + public SparkSessionRule(String sparkMaster, Map sparkConfig) { + builder = SparkSession.builder(); + sparkConfig.forEach(builder::config); + builder.master(sparkMaster); + } + + public SparkSessionRule(KV... sparkConfig) { + this("local", sparkConfig); + } + + public SparkSessionRule(String sparkMaster, KV... sparkConfig) { + this(sparkMaster, Arrays.stream(sparkConfig).collect(toMap(KV::getKey, KV::getValue))); + } + + public SparkSession getSession() { + if (session == null) { + throw new IllegalStateException("SparkSession not available"); + } + return session; + } + + @Override + public Statement apply(Statement base, Description description) { + builder.appName(description.getDisplayName()); + return super.apply(base, description); + } + + @Override + protected void before() throws Throwable { + session = builder.getOrCreate(); + } + + @Override + protected void after() { + getSession().stop(); + session = null; + } +} diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/structuredstreaming/translation/helpers/EncoderHelpersTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/structuredstreaming/translation/helpers/EncoderHelpersTest.java index 54db4fae1c24..3151a5fe956f 100644 --- a/runners/spark/src/test/java/org/apache/beam/runners/spark/structuredstreaming/translation/helpers/EncoderHelpersTest.java +++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/structuredstreaming/translation/helpers/EncoderHelpersTest.java @@ -21,9 +21,10 @@ import java.util.Arrays; import java.util.List; +import org.apache.beam.runners.spark.structuredstreaming.SparkSessionRule; import org.apache.beam.sdk.coders.VarIntCoder; import org.apache.spark.sql.Dataset; -import org.apache.spark.sql.SparkSession; +import org.junit.ClassRule; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; @@ -32,16 +33,15 @@ @RunWith(JUnit4.class) public class EncoderHelpersTest { + @ClassRule public static SparkSessionRule sessionRule = new SparkSessionRule(); + @Test public void beamCoderToSparkEncoderTest() { - SparkSession sparkSession = - SparkSession.builder() - .appName("beamCoderToSparkEncoderTest") - .master("local[4]") - .getOrCreate(); List data = Arrays.asList(1, 2, 3); Dataset dataset = - sparkSession.createDataset(data, EncoderHelpers.fromBeamCoder(VarIntCoder.of())); + sessionRule + .getSession() + .createDataset(data, EncoderHelpers.fromBeamCoder(VarIntCoder.of())); assertEquals(data, dataset.collectAsList()); } } diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/CreateStreamTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/CreateStreamTest.java index 2a40b45136a9..8fde97456227 100644 --- a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/CreateStreamTest.java +++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/CreateStreamTest.java @@ -29,7 +29,6 @@ import java.io.IOException; import java.io.Serializable; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.beam.runners.spark.ReuseSparkContextRule; import org.apache.beam.runners.spark.SparkPipelineOptions; import org.apache.beam.runners.spark.StreamingTest; import org.apache.beam.runners.spark.io.CreateStream; @@ -86,7 +85,6 @@ public class CreateStreamTest implements Serializable { @Rule public final transient TestPipeline p = TestPipeline.create(); - @Rule public final transient ReuseSparkContextRule noContextResue = ReuseSparkContextRule.no(); @Rule public final transient ExpectedException thrown = ExpectedException.none(); @Test diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/ResumeFromCheckpointStreamingTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/ResumeFromCheckpointStreamingTest.java index 6c107c474b66..e7f45d99e513 100644 --- a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/ResumeFromCheckpointStreamingTest.java +++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/ResumeFromCheckpointStreamingTest.java @@ -30,7 +30,6 @@ import java.util.Map; import java.util.Properties; import java.util.concurrent.TimeUnit; -import org.apache.beam.runners.spark.ReuseSparkContextRule; import org.apache.beam.runners.spark.SparkPipelineResult; import org.apache.beam.runners.spark.TestSparkPipelineOptions; import org.apache.beam.runners.spark.TestSparkRunner; @@ -84,7 +83,6 @@ import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; -import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TemporaryFolder; @@ -112,8 +110,6 @@ public class ResumeFromCheckpointStreamingTest implements Serializable { private transient TemporaryFolder temporaryFolder; - @Rule public final transient ReuseSparkContextRule noContextReuse = ReuseSparkContextRule.no(); - @BeforeClass public static void setup() throws IOException { EMBEDDED_ZOOKEEPER.startup(); diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/SparkCoGroupByKeyStreamingTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/SparkCoGroupByKeyStreamingTest.java index 407b07ac0d6d..fc4e427e2f30 100644 --- a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/SparkCoGroupByKeyStreamingTest.java +++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/SparkCoGroupByKeyStreamingTest.java @@ -22,7 +22,6 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; -import org.apache.beam.runners.spark.ReuseSparkContextRule; import org.apache.beam.runners.spark.SparkPipelineOptions; import org.apache.beam.runners.spark.StreamingTest; import org.apache.beam.runners.spark.io.CreateStream; @@ -53,8 +52,6 @@ public class SparkCoGroupByKeyStreamingTest { private static final TupleTag INPUT1_TAG = new TupleTag<>("input1"); private static final TupleTag INPUT2_TAG = new TupleTag<>("input2"); - @Rule public final transient ReuseSparkContextRule noContextResue = ReuseSparkContextRule.no(); - @Rule public final TestPipeline pipeline = TestPipeline.create(); private Duration batchDuration() { diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/TrackStreamingSourcesTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/TrackStreamingSourcesTest.java index 5ede41aaedaf..79bc8a0a71a2 100644 --- a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/TrackStreamingSourcesTest.java +++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/TrackStreamingSourcesTest.java @@ -22,17 +22,15 @@ import static org.hamcrest.core.IsEqual.equalTo; import java.util.List; -import org.apache.beam.runners.spark.ReuseSparkContextRule; +import org.apache.beam.runners.spark.SparkContextRule; import org.apache.beam.runners.spark.SparkPipelineOptions; import org.apache.beam.runners.spark.SparkRunner; import org.apache.beam.runners.spark.io.CreateStream; import org.apache.beam.runners.spark.translation.Dataset; import org.apache.beam.runners.spark.translation.EvaluationContext; -import org.apache.beam.runners.spark.translation.SparkContextFactory; import org.apache.beam.runners.spark.translation.TransformTranslator; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.VarIntCoder; -import org.apache.beam.sdk.options.PipelineOptionsFactory; import org.apache.beam.sdk.runners.AppliedPTransform; import org.apache.beam.sdk.runners.TransformHierarchy; import org.apache.beam.sdk.transforms.DoFn; @@ -46,7 +44,7 @@ import org.apache.spark.streaming.api.java.JavaStreamingContext; import org.joda.time.Duration; import org.junit.Before; -import org.junit.Rule; +import org.junit.ClassRule; import org.junit.Test; /** @@ -58,10 +56,7 @@ }) public class TrackStreamingSourcesTest { - @Rule public ReuseSparkContextRule reuseContext = ReuseSparkContextRule.yes(); - - private static final transient SparkPipelineOptions options = - PipelineOptionsFactory.create().as(SparkPipelineOptions.class); + @ClassRule public static SparkContextRule sparkContext = new SparkContextRule(); @Before public void before() { @@ -70,8 +65,9 @@ public void before() { @Test public void testTrackSingle() { + SparkPipelineOptions options = sparkContext.createPipelineOptions(); options.setRunner(SparkRunner.class); - JavaSparkContext jsc = SparkContextFactory.getSparkContext(options); + JavaSparkContext jsc = sparkContext.getSparkContext(); JavaStreamingContext jssc = new JavaStreamingContext( jsc, new org.apache.spark.streaming.Duration(options.getBatchIntervalMillis())); @@ -90,8 +86,9 @@ public void testTrackSingle() { @Test public void testTrackFlattened() { + SparkPipelineOptions options = sparkContext.createPipelineOptions(); options.setRunner(SparkRunner.class); - JavaSparkContext jsc = SparkContextFactory.getSparkContext(options); + JavaSparkContext jsc = sparkContext.getSparkContext(); JavaStreamingContext jssc = new JavaStreamingContext( jsc, new org.apache.spark.streaming.Duration(options.getBatchIntervalMillis())); @@ -135,7 +132,7 @@ private StreamingSourceTracker( Pipeline pipeline, Class transformClassToAssert, Integer... expected) { - this.ctxt = new EvaluationContext(jssc.sparkContext(), pipeline, options, jssc); + this.ctxt = new EvaluationContext(jssc.sparkContext(), pipeline, pipeline.getOptions(), jssc); this.evaluator = new SparkRunner.Evaluator( new StreamingTransformTranslator.Translator(new TransformTranslator.Translator()), diff --git a/sdks/go.mod b/sdks/go.mod index 4a91dff296b3..b0533c3cda99 100644 --- a/sdks/go.mod +++ b/sdks/go.mod @@ -23,64 +23,65 @@ module github.com/apache/beam/sdks/v2 go 1.18 require ( - cloud.google.com/go/bigquery v1.28.0 + cloud.google.com/go/bigquery v1.32.0 cloud.google.com/go/datastore v1.6.0 - cloud.google.com/go/pubsub v1.18.0 - cloud.google.com/go/storage v1.21.0 + cloud.google.com/go/pubsub v1.21.1 + cloud.google.com/go/storage v1.22.0 github.com/docker/go-connections v0.4.0 github.com/go-sql-driver/mysql v1.6.0 github.com/golang/protobuf v1.5.2 // TODO(danoliveira): Fully replace this with google.golang.org/protobuf - github.com/google/go-cmp v0.5.7 + github.com/google/go-cmp v0.5.8 github.com/google/uuid v1.3.0 - github.com/lib/pq v1.10.4 + github.com/lib/pq v1.10.5 github.com/linkedin/goavro v2.1.0+incompatible github.com/nightlyone/lockfile v1.0.0 github.com/proullon/ramsql v0.0.0-20211120092837-c8d0a408b939 - github.com/spf13/cobra v1.3.0 - github.com/testcontainers/testcontainers-go v0.12.0 + github.com/spf13/cobra v1.4.0 + github.com/testcontainers/testcontainers-go v0.13.0 github.com/xitongsys/parquet-go v1.6.2 github.com/xitongsys/parquet-go-source v0.0.0-20220315005136-aec0fe3e777c - golang.org/x/net v0.0.0-20220225172249-27dd8689420f - golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b - golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9 + golang.org/x/net v0.0.0-20220412020605-290c469a71a5 + golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 + golang.org/x/sys v0.0.0-20220412211240-33da011f77ad golang.org/x/text v0.3.7 - google.golang.org/api v0.70.0 - google.golang.org/genproto v0.0.0-20220302033224-9aa15565e42a - google.golang.org/grpc v1.44.0 - google.golang.org/protobuf v1.27.1 + google.golang.org/api v0.76.0 + google.golang.org/genproto v0.0.0-20220426171045-31bebdecfb46 + google.golang.org/grpc v1.45.0 + google.golang.org/protobuf v1.28.0 gopkg.in/retry.v1 v1.0.3 gopkg.in/yaml.v2 v2.4.0 ) require ( cloud.google.com/go v0.100.2 // indirect - cloud.google.com/go/compute v1.5.0 // indirect - cloud.google.com/go/iam v0.2.0 // indirect - github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78 // indirect - github.com/Microsoft/go-winio v0.4.17-0.20210211115548-6eac466e5fa3 // indirect - github.com/Microsoft/hcsshim v0.8.16 // indirect + cloud.google.com/go/compute v1.6.0 // indirect + cloud.google.com/go/iam v0.3.0 // indirect + github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 // indirect + github.com/Microsoft/go-winio v0.4.17 // indirect + github.com/Microsoft/hcsshim v0.8.23 // indirect github.com/apache/arrow/go/arrow v0.0.0-20200730104253-651201b0f516 // indirect github.com/apache/thrift v0.14.2 // indirect - github.com/cenkalti/backoff v2.2.1+incompatible // indirect - github.com/containerd/cgroups v0.0.0-20210114181951-8a68de567b68 // indirect - github.com/containerd/containerd v1.5.0-beta.4 // indirect + github.com/cenkalti/backoff/v4 v4.1.2 // indirect + github.com/containerd/cgroups v1.0.1 // indirect + github.com/containerd/containerd v1.5.9 // indirect github.com/docker/distribution v2.7.1+incompatible // indirect github.com/docker/docker v20.10.11+incompatible // indirect github.com/docker/go-units v0.4.0 // indirect github.com/gogo/protobuf v1.3.2 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/snappy v0.0.4 // indirect - github.com/googleapis/gax-go/v2 v2.1.1 // indirect + github.com/googleapis/gax-go/v2 v2.3.0 // indirect + github.com/googleapis/go-type-adapters v1.0.0 // indirect github.com/inconshreveable/mousetrap v1.0.0 // indirect github.com/klauspost/compress v1.13.1 // indirect github.com/kr/text v0.2.0 // indirect github.com/magiconair/properties v1.8.5 // indirect github.com/moby/sys/mount v0.2.0 // indirect github.com/moby/sys/mountinfo v0.5.0 // indirect - github.com/moby/term v0.0.0-20201216013528-df9cb8a40635 // indirect + github.com/moby/term v0.0.0-20210619224110-3f7ff695adc6 // indirect github.com/morikuni/aec v0.0.0-20170113033406-39771216ff4c // indirect github.com/opencontainers/go-digest v1.0.0 // indirect - github.com/opencontainers/image-spec v1.0.1 // indirect + github.com/opencontainers/image-spec v1.0.2 // indirect github.com/opencontainers/runc v1.0.2 // indirect github.com/pierrec/lz4/v4 v4.1.8 // indirect github.com/pkg/errors v0.9.1 // indirect @@ -88,7 +89,8 @@ require ( github.com/spf13/pflag v1.0.5 // indirect go.opencensus.io v0.23.0 // indirect golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect - golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect + golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f // indirect google.golang.org/appengine v1.6.7 // indirect gopkg.in/linkedin/goavro.v1 v1.0.5 // indirect + gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect ) diff --git a/sdks/go.sum b/sdks/go.sum index 3b53065cec7d..2e621336b67c 100644 --- a/sdks/go.sum +++ b/sdks/go.sum @@ -26,7 +26,6 @@ cloud.google.com/go v0.90.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aD cloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI= cloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW4= cloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc= -cloud.google.com/go v0.98.0/go.mod h1:ua6Ush4NALrHk5QXDWnjvZHN93OuF0HfuEPq9I1X0cM= cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA= cloud.google.com/go v0.100.1/go.mod h1:fs4QogzfH5n2pBXBP9vRiU+eCny7lD2vmFZy79Iuw1U= cloud.google.com/go v0.100.2 h1:t9Iw5QH5v4XtlEQaCtUY7x6sCABps8sW0acw7e2WQ6Y= @@ -37,46 +36,44 @@ cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvf cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= -cloud.google.com/go/bigquery v1.28.0 h1:xmLwUenH57OZKR6MZQGapBaMY8t7XvzgWm8RjiIXmIo= -cloud.google.com/go/bigquery v1.28.0/go.mod h1:/Lo9aP2BX/WDiOvHiXX/UQWH9vLDFRABeyqFA+fjkqE= +cloud.google.com/go/bigquery v1.32.0 h1:0OMQYCp03Ff9B5OeVY8GGUlOC99s93bjM+c5xS0H5gs= +cloud.google.com/go/bigquery v1.32.0/go.mod h1:hAfV1647X+/fGUqeVVdKW+HfYtT5UCjOZsuOydOSH4M= cloud.google.com/go/compute v0.1.0/go.mod h1:GAesmwr110a34z04OlxYkATPBEfVhkymfTBXtfbBFow= -cloud.google.com/go/compute v1.2.0/go.mod h1:xlogom/6gr8RJGBe7nT2eGsQYAFUbbv8dbC29qE3Xmw= cloud.google.com/go/compute v1.3.0/go.mod h1:cCZiE1NHEtai4wiufUhW8I8S1JKkAnhnQJWM7YD99wM= -cloud.google.com/go/compute v1.5.0 h1:b1zWmYuuHz7gO9kDcM/EpHGr06UgsYNRpNJzI2kFiLM= cloud.google.com/go/compute v1.5.0/go.mod h1:9SMHyhJlzhlkJqrPAc839t2BZFTSk6Jdj6mkzQJeu0M= -cloud.google.com/go/datacatalog v1.1.0 h1:sXyBbqz2Y+9hIOqEUepAA2OpUIgOts2oe92EScwYxEg= -cloud.google.com/go/datacatalog v1.1.0/go.mod h1:XiA5mWWnIFIcwFmsZGLOZRyX4AhXdh2SYpcQJMmkHiA= +cloud.google.com/go/compute v1.6.0 h1:XdQIN5mdPTSBVwSIVDuY5e8ZzVAccsHvD3qTEz4zIps= +cloud.google.com/go/compute v1.6.0/go.mod h1:T29tfhtVbq1wvAPo0E3+7vhgmkOYeXjhFvz/FMzPu0s= +cloud.google.com/go/datacatalog v1.3.0 h1:3llKXv7cC1acsWjvWmG0NQQkYVSVgunMSfVk7h6zz8Q= +cloud.google.com/go/datacatalog v1.3.0/go.mod h1:g9svFY6tuR+j+hrTw3J2dNcmI0dzmSiyOzm8kpLq0a0= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= cloud.google.com/go/datastore v1.6.0 h1:wZaHIqu1tebvGRYhVgcfNX6jN2q638OGO23JyJckxuI= cloud.google.com/go/datastore v1.6.0/go.mod h1:q3ZJj1GMQRdU0OCv5XXpCqfLqHHZnI5zcumkvuYDmHI= -cloud.google.com/go/firestore v1.6.1/go.mod h1:asNXNOzBdyVQmEU+ggO8UPodTkEVFW5Qx+rwHnAz+EY= cloud.google.com/go/iam v0.1.0/go.mod h1:vcUNEa0pEm0qRVpmWepWaFMIAI8/hjB9mO8rNCJtF6c= -cloud.google.com/go/iam v0.1.1/go.mod h1:CKqrcnI/suGpybEHxZ7BMehL0oA4LpdyJdUlTl9jVMw= -cloud.google.com/go/iam v0.2.0 h1:Ouq6qif4mZdXkb3SiFMpxvu0JQJB1Yid9TsZ23N6hg8= -cloud.google.com/go/iam v0.2.0/go.mod h1:BCK88+tmjAwnZYfOSizmKCTSFjJHCa18t3DpdGEY13Y= -cloud.google.com/go/kms v1.1.0 h1:1yc4rLqCkVDS9Zvc7m+3mJ47kw0Uo5Q5+sMjcmUVUeM= -cloud.google.com/go/kms v1.1.0/go.mod h1:WdbppnCDMDpOvoYBMn1+gNmOeEoZYqAv+HeuKARGCXI= +cloud.google.com/go/iam v0.3.0 h1:exkAomrVUuzx9kWFI1wm3KI0uoDeUFPB4kKGzx6x+Gc= +cloud.google.com/go/iam v0.3.0/go.mod h1:XzJPvDayI+9zsASAFO68Hk07u3z+f+JrT2xXNdp4bnY= +cloud.google.com/go/kms v1.4.0 h1:iElbfoE61VeLhnZcGOltqL8HIly8Nhbe5t6JlH9GXjo= +cloud.google.com/go/kms v1.4.0/go.mod h1:fajBHndQ+6ubNw6Ss2sSd+SWvjL26RNo/dr7uxsnnOA= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= -cloud.google.com/go/pubsub v1.18.0 h1:f5HKj3RCujL2zm2cT/Op1mHG1bIDj2fYQ2NDbiAuNAU= -cloud.google.com/go/pubsub v1.18.0/go.mod h1:Vg6zS1lnXBFiQuHMntX4Id4mKIdsVRjKED4nCVMdMJ8= +cloud.google.com/go/pubsub v1.21.1 h1:ghu6wlm6WouITmmuwkxGG+6vNRXDaPdAjqLcRdsw3EQ= +cloud.google.com/go/pubsub v1.21.1/go.mod h1:u3XGeMBOBCIQLcxNzy14Svz88ZFS8vI250uDgIAQDSQ= cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= -cloud.google.com/go/storage v1.18.2/go.mod h1:AiIj7BWXyhO5gGVmYJ+S8tbkCx3yb0IMjua8Aw4naVM= -cloud.google.com/go/storage v1.21.0 h1:HwnT2u2D309SFDHQII6m18HlrCi3jAXhUMTLOWXYH14= -cloud.google.com/go/storage v1.21.0/go.mod h1:XmRlxkgPjlBONznT2dDUU/5XlpU2OjMnKuqnZI01LAA= +cloud.google.com/go/storage v1.22.0 h1:NUV0NNp9nkBuW66BFRLuMgldN60C57ET3dhbwLIYio8= +cloud.google.com/go/storage v1.22.0/go.mod h1:GbaLEoMqbVm6sx3Z0R++gSiBlgMv6yUi2q1DeGFKQgE= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/Azure/azure-pipeline-go v0.2.3/go.mod h1:x841ezTBIMG6O3lAcl8ATHnsOPVl2bqk7S3ta6S6u4k= github.com/Azure/azure-sdk-for-go v16.2.1+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= github.com/Azure/azure-storage-blob-go v0.14.0/go.mod h1:SMqIBi+SuiQH32bvyjngEewEeXoPfKMgWlBDaYf6fck= -github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78 h1:w+iIsaOQNcT7OZ575w+acHgRric5iCyQh+xv+KJ4HB8= github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8= +github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= +github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= github.com/Azure/go-autorest v10.8.1+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= github.com/Azure/go-autorest v14.2.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= github.com/Azure/go-autorest/autorest v0.11.1/go.mod h1:JFgpikqFJ/MleTTxwepExTKnFUKKszPS8UavbQYUMuw= @@ -91,24 +88,24 @@ github.com/Azure/go-autorest/logger v0.2.1/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZ github.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBpUA79WCAKPPZVC2DeU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= -github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= -github.com/Flaque/filet v0.0.0-20201012163910-45f684403088 h1:PnnQln5IGbhLeJOi6hVs+lCeF+B1dRfFKPGXUAez0Ww= -github.com/Flaque/filet v0.0.0-20201012163910-45f684403088/go.mod h1:TK+jB3mBs+8ZMWhU5BqZKnZWJ1MrLo8etNVg51ueTBo= github.com/Microsoft/go-winio v0.4.11/go.mod h1:VhR8bwka0BXejwEJY73c50VrPtXAaKcyvVC4A4RozmA= github.com/Microsoft/go-winio v0.4.14/go.mod h1:qXqCSQ3Xa7+6tgxaGTIe4Kpcdsi+P8jBhyzoq1bpyYA= github.com/Microsoft/go-winio v0.4.15-0.20190919025122-fc70bd9a86b5/go.mod h1:tTuCMEN+UleMWgg9dVx4Hu52b1bJo+59jBh3ajtinzw= github.com/Microsoft/go-winio v0.4.16-0.20201130162521-d1ffc52c7331/go.mod h1:XB6nPKklQyQ7GC9LdcBEcBl8PF76WugXOPRXwdLnMv0= github.com/Microsoft/go-winio v0.4.16/go.mod h1:XB6nPKklQyQ7GC9LdcBEcBl8PF76WugXOPRXwdLnMv0= -github.com/Microsoft/go-winio v0.4.17-0.20210211115548-6eac466e5fa3 h1:mw6pDQqv38/WGF1cO/jF5t/jyAJ2yi7CmtFLLO5tGFI= github.com/Microsoft/go-winio v0.4.17-0.20210211115548-6eac466e5fa3/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= +github.com/Microsoft/go-winio v0.4.17-0.20210324224401-5516f17a5958/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= +github.com/Microsoft/go-winio v0.4.17 h1:iT12IBVClFevaf8PuVyi3UmZOVh4OqnaLxDTW2O6j3w= +github.com/Microsoft/go-winio v0.4.17/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= github.com/Microsoft/hcsshim v0.8.6/go.mod h1:Op3hHsoHPAvb6lceZHDtd9OkTew38wNoXnJs8iY7rUg= github.com/Microsoft/hcsshim v0.8.7-0.20190325164909-8abdbb8205e4/go.mod h1:Op3hHsoHPAvb6lceZHDtd9OkTew38wNoXnJs8iY7rUg= github.com/Microsoft/hcsshim v0.8.7/go.mod h1:OHd7sQqRFrYd3RmSgbgji+ctCwkbq2wbEYNSzOYtcBQ= github.com/Microsoft/hcsshim v0.8.9/go.mod h1:5692vkUqntj1idxauYlpoINNKeqCiG6Sg38RRsjT5y8= github.com/Microsoft/hcsshim v0.8.14/go.mod h1:NtVKoYxQuTLx6gEq0L96c9Ju4JbRJ4nY2ow3VK6a9Lg= github.com/Microsoft/hcsshim v0.8.15/go.mod h1:x38A4YbHbdxJtc0sF6oIz+RG0npwSCAvn69iY6URG00= -github.com/Microsoft/hcsshim v0.8.16 h1:8/auA4LFIZFTGrqfKhGBSXwM6/4X1fHa/xniyEHu8ac= github.com/Microsoft/hcsshim v0.8.16/go.mod h1:o5/SZqmR7x9JNKsW3pu+nqHm0MF8vbA+VxGOoXdC600= +github.com/Microsoft/hcsshim v0.8.23 h1:47MSwtKGXet80aIn+7h4YI6fwPmwIghAnsx2aOUrG2M= +github.com/Microsoft/hcsshim v0.8.23/go.mod h1:4zegtUJth7lAvFyc6cH2gGQ5B3OFQim01nnU2M8jKDg= github.com/Microsoft/hcsshim/test v0.0.0-20201218223536-d3e5debf77da/go.mod h1:5hlzMzRKMLyo42nCZ9oml8AdTlq/0cvIaBv6tK1RehU= github.com/Microsoft/hcsshim/test v0.0.0-20210227013316-43a75bb4edd3/go.mod h1:mw7qgWloBUl75W/gVH3cQszUg1+gUITj7D6NY7ywVnY= github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb06e3pkSAbeQ52E9H9iFoQsEEwGN64994WTCIhntQ= @@ -127,11 +124,7 @@ github.com/apache/arrow/go/arrow v0.0.0-20200730104253-651201b0f516/go.mod h1:QN github.com/apache/thrift v0.0.0-20181112125854-24918abba929/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= github.com/apache/thrift v0.14.2 h1:hY4rAyg7Eqbb27GB6gkhUKrRAuc8xRjlNtJq+LseKeY= github.com/apache/thrift v0.14.2/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= -github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= -github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= -github.com/armon/go-metrics v0.3.10/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc= -github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= -github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY= github.com/aws/aws-sdk-go v1.15.11/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0= github.com/aws/aws-sdk-go v1.30.19/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0= @@ -163,13 +156,12 @@ github.com/buger/jsonparser v0.0.0-20180808090653-f4dd9f5a6b44/go.mod h1:bbYlZJ7 github.com/bugsnag/bugsnag-go v0.0.0-20141110184014-b1d153021fcd/go.mod h1:2oa8nejYd4cQ/b0hMIopN0lCRxU0bueqREvZLWFrtK8= github.com/bugsnag/osext v0.0.0-20130617224835-0dd3f918b21b/go.mod h1:obH5gd0BsqsP2LwDJ9aOkm/6J86V6lyAXCoQWGw3K50= github.com/bugsnag/panicwrap v0.0.0-20151223152923-e2c28503fcd0/go.mod h1:D/8v3kj0zr8ZAKg1AQ6crr+5VwKN5eIywRkfhyM/+dE= -github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4= -github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= +github.com/cenkalti/backoff/v4 v4.1.1/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw= +github.com/cenkalti/backoff/v4 v4.1.2 h1:6Yo7N8UP2K6LWZnW94DLVSSrbobcWdVzAYOisuDPIFo= +github.com/cenkalti/backoff/v4 v4.1.2/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/checkpoint-restore/go-criu/v4 v4.1.0/go.mod h1:xUQBLp4RLc5zJtWY++yjOoMoB5lihDt7fai+75m+rGw= github.com/checkpoint-restore/go-criu/v5 v5.0.0/go.mod h1:cfwC0EG7HMUenopBsUf9d89JlCLQIfgVcNsNN0t6T2M= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= @@ -178,9 +170,8 @@ github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMn github.com/cilium/ebpf v0.0.0-20200110133405-4032b1d8aae3/go.mod h1:MA5e5Lr8slmEg9bt0VpxxWqJlO4iwu3FBdHUzV7wQVg= github.com/cilium/ebpf v0.0.0-20200702112145-1c8d4c9ef775/go.mod h1:7cR51M8ViRLIdUjrmSXlK9pkrsDlLHbO8jiB8X8JnOc= github.com/cilium/ebpf v0.2.0/go.mod h1:To2CFviqOWL/M0gIMsvSMlqe7em/l1ALkX1PyjrX2Qs= +github.com/cilium/ebpf v0.4.0/go.mod h1:4tRaxcgiL706VnOzHOdBlY8IEAIdxINsQBcU4xJJXRs= github.com/cilium/ebpf v0.6.2/go.mod h1:4tRaxcgiL706VnOzHOdBlY8IEAIdxINsQBcU4xJJXRs= -github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag= -github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= @@ -189,23 +180,24 @@ github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XP github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20211130200136-a8f946100490/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8= github.com/colinmarc/hdfs/v2 v2.1.1/go.mod h1:M3x+k8UKKmxtFu++uAZ0OtDU8jR3jnaZIAc6yK4Ue0c= github.com/containerd/aufs v0.0.0-20200908144142-dab0cbea06f4/go.mod h1:nukgQABAEopAHvB6j7cnP5zJ+/3aVcE7hCYqvIwAHyE= github.com/containerd/aufs v0.0.0-20201003224125-76a6863f2989/go.mod h1:AkGGQs9NM2vtYHaUen+NljV0/baGCAPELGm2q9ZXpWU= github.com/containerd/aufs v0.0.0-20210316121734-20793ff83c97/go.mod h1:kL5kd6KM5TzQjR79jljyi4olc1Vrx6XBlcyj3gNv2PU= +github.com/containerd/aufs v1.0.0/go.mod h1:kL5kd6KM5TzQjR79jljyi4olc1Vrx6XBlcyj3gNv2PU= github.com/containerd/btrfs v0.0.0-20201111183144-404b9149801e/go.mod h1:jg2QkJcsabfHugurUvvPhS3E08Oxiuh5W/g1ybB4e0E= github.com/containerd/btrfs v0.0.0-20210316141732-918d888fb676/go.mod h1:zMcX3qkXTAi9GI50+0HOeuV8LU2ryCE/V2vG/ZBiTss= +github.com/containerd/btrfs v1.0.0/go.mod h1:zMcX3qkXTAi9GI50+0HOeuV8LU2ryCE/V2vG/ZBiTss= github.com/containerd/cgroups v0.0.0-20190717030353-c4b9ac5c7601/go.mod h1:X9rLEHIqSf/wfK8NsPqxJmeZgW4pcfzdXITDrUSJ6uI= github.com/containerd/cgroups v0.0.0-20190919134610-bf292b21730f/go.mod h1:OApqhQ4XNSNC13gXIwDjhOQxjWa/NxkwZXJ1EvqT0ko= github.com/containerd/cgroups v0.0.0-20200531161412-0dbf7f05ba59/go.mod h1:pA0z1pT8KYB3TCXK/ocprsh7MAkoW8bZVzPdih9snmM= github.com/containerd/cgroups v0.0.0-20200710171044-318312a37340/go.mod h1:s5q4SojHctfxANBDvMeIaIovkq29IP48TKAxnhYRxvo= github.com/containerd/cgroups v0.0.0-20200824123100-0b889c03f102/go.mod h1:s5q4SojHctfxANBDvMeIaIovkq29IP48TKAxnhYRxvo= -github.com/containerd/cgroups v0.0.0-20210114181951-8a68de567b68 h1:hkGVFjz+plgr5UfxZUTPFbUFIF/Km6/s+RVRIRHLrrY= github.com/containerd/cgroups v0.0.0-20210114181951-8a68de567b68/go.mod h1:ZJeTFisyysqgcCdecO57Dj79RfL0LNeGiFUqLYQRYLE= +github.com/containerd/cgroups v1.0.1 h1:iJnMvco9XGvKUvNQkv88bE4uJXxRQH18efbKo9w5vHQ= +github.com/containerd/cgroups v1.0.1/go.mod h1:0SJrPIenamHDcZhEcJMNBB85rHcUsw4f25ZfBiPYRkU= github.com/containerd/console v0.0.0-20180822173158-c12b1e7919c1/go.mod h1:Tj/on1eG8kiEhd0+fhSDzsPAFESxzBBvdyEgyryXffw= github.com/containerd/console v0.0.0-20181022165439-0650fd9eeb50/go.mod h1:Tj/on1eG8kiEhd0+fhSDzsPAFESxzBBvdyEgyryXffw= github.com/containerd/console v0.0.0-20191206165004-02ecf6a7291e/go.mod h1:8Pf4gM6VEbTNRIT26AyyU7hxdQU3MvAvxVI0sc00XBE= @@ -219,49 +211,68 @@ github.com/containerd/containerd v1.3.2/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMX github.com/containerd/containerd v1.4.0-beta.2.0.20200729163537-40b22ef07410/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= github.com/containerd/containerd v1.4.1/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= github.com/containerd/containerd v1.4.3/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= +github.com/containerd/containerd v1.4.9/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= github.com/containerd/containerd v1.5.0-beta.1/go.mod h1:5HfvG1V2FsKesEGQ17k5/T7V960Tmcumvqn8Mc+pCYQ= github.com/containerd/containerd v1.5.0-beta.3/go.mod h1:/wr9AVtEM7x9c+n0+stptlo/uBBoBORwEx6ardVcmKU= -github.com/containerd/containerd v1.5.0-beta.4 h1:zjz4MOAOFgdBlwid2nNUlJ3YLpVi/97L36lfMYJex60= github.com/containerd/containerd v1.5.0-beta.4/go.mod h1:GmdgZd2zA2GYIBZ0w09ZvgqEq8EfBp/m3lcVZIvPHhI= +github.com/containerd/containerd v1.5.0-rc.0/go.mod h1:V/IXoMqNGgBlabz3tHD2TWDoTJseu1FGOKuoA4nNb2s= +github.com/containerd/containerd v1.5.9 h1:rs6Xg1gtIxaeyG+Smsb/0xaSDu1VgFhOCKBXxMxbsF4= +github.com/containerd/containerd v1.5.9/go.mod h1:fvQqCfadDGga5HZyn3j4+dx56qj2I9YwBrlSdalvJYQ= github.com/containerd/continuity v0.0.0-20190426062206-aaeac12a7ffc/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y= github.com/containerd/continuity v0.0.0-20190815185530-f2a389ac0a02/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y= github.com/containerd/continuity v0.0.0-20191127005431-f65d91d395eb/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y= github.com/containerd/continuity v0.0.0-20200710164510-efbc4488d8fe/go.mod h1:cECdGN1O8G9bgKTlLhuPJimka6Xb/Gg7vYzCTNVxhvo= github.com/containerd/continuity v0.0.0-20201208142359-180525291bb7/go.mod h1:kR3BEg7bDFaEddKm54WSmrol1fKWDU1nKYkgrcgZT7Y= -github.com/containerd/continuity v0.0.0-20210208174643-50096c924a4e h1:6JKvHHt396/qabvMhnhUZvWaHZzfVfldxE60TK8YLhg= github.com/containerd/continuity v0.0.0-20210208174643-50096c924a4e/go.mod h1:EXlVlkqNba9rJe3j7w3Xa924itAMLgZH4UD/Q4PExuQ= +github.com/containerd/continuity v0.1.0 h1:UFRRY5JemiAhPZrr/uE0n8fMTLcZsUvySPr1+D7pgr8= +github.com/containerd/continuity v0.1.0/go.mod h1:ICJu0PwR54nI0yPEnJ6jcS+J7CZAUXrLh8lPo2knzsM= github.com/containerd/fifo v0.0.0-20180307165137-3d5202aec260/go.mod h1:ODA38xgv3Kuk8dQz2ZQXpnv/UZZUHUCL7pnLehbXgQI= github.com/containerd/fifo v0.0.0-20190226154929-a9fb20d87448/go.mod h1:ODA38xgv3Kuk8dQz2ZQXpnv/UZZUHUCL7pnLehbXgQI= github.com/containerd/fifo v0.0.0-20200410184934-f15a3290365b/go.mod h1:jPQ2IAeZRCYxpS/Cm1495vGFww6ecHmMk1YJH2Q5ln0= github.com/containerd/fifo v0.0.0-20201026212402-0724c46b320c/go.mod h1:jPQ2IAeZRCYxpS/Cm1495vGFww6ecHmMk1YJH2Q5ln0= github.com/containerd/fifo v0.0.0-20210316144830-115abcc95a1d/go.mod h1:ocF/ME1SX5b1AOlWi9r677YJmCPSwwWnQ9O123vzpE4= +github.com/containerd/fifo v1.0.0/go.mod h1:ocF/ME1SX5b1AOlWi9r677YJmCPSwwWnQ9O123vzpE4= github.com/containerd/go-cni v1.0.1/go.mod h1:+vUpYxKvAF72G9i1WoDOiPGRtQpqsNW/ZHtSlv++smU= +github.com/containerd/go-cni v1.0.2/go.mod h1:nrNABBHzu0ZwCug9Ije8hL2xBCYh/pjfMb1aZGrrohk= github.com/containerd/go-runc v0.0.0-20180907222934-5a6d9f37cfa3/go.mod h1:IV7qH3hrUgRmyYrtgEeGWJfWbgcHL9CSRruz2Vqcph0= github.com/containerd/go-runc v0.0.0-20190911050354-e029b79d8cda/go.mod h1:IV7qH3hrUgRmyYrtgEeGWJfWbgcHL9CSRruz2Vqcph0= github.com/containerd/go-runc v0.0.0-20200220073739-7016d3ce2328/go.mod h1:PpyHrqVs8FTi9vpyHwPwiNEGaACDxT/N/pLcvMSRA9g= github.com/containerd/go-runc v0.0.0-20201020171139-16b287bc67d0/go.mod h1:cNU0ZbCgCQVZK4lgG3P+9tn9/PaJNmoDXPpoJhDR+Ok= +github.com/containerd/go-runc v1.0.0/go.mod h1:cNU0ZbCgCQVZK4lgG3P+9tn9/PaJNmoDXPpoJhDR+Ok= github.com/containerd/imgcrypt v1.0.1/go.mod h1:mdd8cEPW7TPgNG4FpuP3sGBiQ7Yi/zak9TYCG3juvb0= github.com/containerd/imgcrypt v1.0.4-0.20210301171431-0ae5c75f59ba/go.mod h1:6TNsg0ctmizkrOgXRNQjAPFWpMYRWuiB6dSF4Pfa5SA= github.com/containerd/imgcrypt v1.1.1-0.20210312161619-7ed62a527887/go.mod h1:5AZJNI6sLHJljKuI9IHnw1pWqo/F0nGDOuR9zgTs7ow= +github.com/containerd/imgcrypt v1.1.1/go.mod h1:xpLnwiQmEUJPvQoAapeb2SNCxz7Xr6PJrXQb0Dpc4ms= github.com/containerd/nri v0.0.0-20201007170849-eb1350a75164/go.mod h1:+2wGSDGFYfE5+So4M5syatU0N0f0LbWpuqyMi4/BE8c= github.com/containerd/nri v0.0.0-20210316161719-dbaa18c31c14/go.mod h1:lmxnXF6oMkbqs39FiCt1s0R2HSMhcLel9vNL3m4AaeY= +github.com/containerd/nri v0.1.0/go.mod h1:lmxnXF6oMkbqs39FiCt1s0R2HSMhcLel9vNL3m4AaeY= github.com/containerd/ttrpc v0.0.0-20190828154514-0e0f228740de/go.mod h1:PvCDdDGpgqzQIzDW1TphrGLssLDZp2GuS+X5DkEJB8o= github.com/containerd/ttrpc v0.0.0-20190828172938-92c8520ef9f8/go.mod h1:PvCDdDGpgqzQIzDW1TphrGLssLDZp2GuS+X5DkEJB8o= github.com/containerd/ttrpc v0.0.0-20191028202541-4f1b8fe65a5c/go.mod h1:LPm1u0xBw8r8NOKoOdNMeVHSawSsltak+Ihv+etqsE8= github.com/containerd/ttrpc v1.0.1/go.mod h1:UAxOpgT9ziI0gJrmKvgcZivgxOp8iFPSk8httJEt98Y= github.com/containerd/ttrpc v1.0.2/go.mod h1:UAxOpgT9ziI0gJrmKvgcZivgxOp8iFPSk8httJEt98Y= +github.com/containerd/ttrpc v1.1.0/go.mod h1:XX4ZTnoOId4HklF4edwc4DcqskFZuvXB1Evzy5KFQpQ= github.com/containerd/typeurl v0.0.0-20180627222232-a93fcdb778cd/go.mod h1:Cm3kwCdlkCfMSHURc+r6fwoGH6/F1hH3S4sg0rLFWPc= github.com/containerd/typeurl v0.0.0-20190911142611-5eb25027c9fd/go.mod h1:GeKYzf2pQcqv7tJ0AoCuuhtnqhva5LNU3U+OyKxxJpk= github.com/containerd/typeurl v1.0.1/go.mod h1:TB1hUtrpaiO88KEK56ijojHS1+NeF0izUACaJW2mdXg= +github.com/containerd/typeurl v1.0.2/go.mod h1:9trJWW2sRlGub4wZJRTW83VtbOLS6hwcDZXTn6oPz9s= github.com/containerd/zfs v0.0.0-20200918131355-0a33824f23a2/go.mod h1:8IgZOBdv8fAgXddBT4dBXJPtxyRsejFIpXoklgxgEjw= github.com/containerd/zfs v0.0.0-20210301145711-11e8f1707f62/go.mod h1:A9zfAbMlQwE+/is6hi0Xw8ktpL+6glmqZYtevJgaB8Y= github.com/containerd/zfs v0.0.0-20210315114300-dde8f0fda960/go.mod h1:m+m51S1DvAP6r3FcmYCp54bQ34pyOwTieQDNRIRHsFY= +github.com/containerd/zfs v0.0.0-20210324211415-d5c4544f0433/go.mod h1:m+m51S1DvAP6r3FcmYCp54bQ34pyOwTieQDNRIRHsFY= +github.com/containerd/zfs v1.0.0/go.mod h1:m+m51S1DvAP6r3FcmYCp54bQ34pyOwTieQDNRIRHsFY= github.com/containernetworking/cni v0.7.1/go.mod h1:LGwApLUm2FpoOfxTDEeq8T9ipbpZ61X79hmU3w8FmsY= github.com/containernetworking/cni v0.8.0/go.mod h1:LGwApLUm2FpoOfxTDEeq8T9ipbpZ61X79hmU3w8FmsY= +github.com/containernetworking/cni v0.8.1/go.mod h1:LGwApLUm2FpoOfxTDEeq8T9ipbpZ61X79hmU3w8FmsY= github.com/containernetworking/plugins v0.8.6/go.mod h1:qnw5mN19D8fIwkqW7oHHYDHVlzhJpcY6TQxn/fUyDDM= +github.com/containernetworking/plugins v0.9.1/go.mod h1:xP/idU2ldlzN6m4p5LmGiwRDjeJr6FLK6vuiUwoH7P8= github.com/containers/ocicrypt v1.0.1/go.mod h1:MeJDzk1RJHv89LjsH0Sp5KTY3ZYkjXO/C+bKAeWFIrc= github.com/containers/ocicrypt v1.1.0/go.mod h1:b8AOe0YR67uU8OqfVNcznfFpAzu3rdgUV4GP9qXPfu4= +github.com/containers/ocicrypt v1.1.1/go.mod h1:Dm55fwWm1YZAjYRaJ94z2mfZikIyIN4B0oB3dj3jFxY= +github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= +github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= github.com/coreos/go-iptables v0.4.5/go.mod h1:/mVI274lEDI2ns62jHCDnCyBF9Iwsmekav8Dbxlm1MU= +github.com/coreos/go-iptables v0.5.0/go.mod h1:/mVI274lEDI2ns62jHCDnCyBF9Iwsmekav8Dbxlm1MU= github.com/coreos/go-oidc v2.1.0+incompatible/go.mod h1:CgnwVTmzoESiwO9qyAFEMiHoZ1nMCKZlZ9V6mm3/LKc= github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= @@ -291,7 +302,9 @@ github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSs github.com/denverdino/aliyungo v0.0.0-20190125010748-a747050bb1ba/go.mod h1:dV8lFg6daOBZbT6/BDGIz6Y3WFGn8juu6G+CQ6LHtl0= github.com/dgrijalva/jwt-go v0.0.0-20170104182250-a601269ab70c/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= +github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= github.com/dnaeon/go-vcr v1.0.1/go.mod h1:aBB1+wY4s93YsC3HHjMBMrwTj2R9FHDzUr9KyGc8n1E= +github.com/dnephin/pflag v1.0.7/go.mod h1:uxE91IoWURlOiTUIA8Mq5ZZkAv3dPUfZNaT80Zm7OQE= github.com/docker/distribution v0.0.0-20190905152932-14b96e55d84c/go.mod h1:0+TTO4EOBfRPhZXAeF1Vu+W3hHZ8eLp8PgKVZlcvtFY= github.com/docker/distribution v2.7.1-0.20190205005809-0d3efadf0154+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= github.com/docker/distribution v2.7.1+incompatible h1:a5mlkVzth6W5A4fOsS3D2EO5BUmsJpcB+cRlLU7cSug= @@ -322,20 +335,16 @@ github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.m github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= -github.com/envoyproxy/go-control-plane v0.10.1/go.mod h1:AY7fTTXNdv/aJ2O5jwpxAPOWUZ7hQAEvzN5Pf27BkQQ= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/envoyproxy/protoc-gen-validate v0.6.2/go.mod h1:2t7qjJNvHPx8IjnBOzl9E9/baC+qXE/TeeyBRzgJDws= github.com/evanphx/json-patch v4.9.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= -github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= +github.com/fatih/color v1.10.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= github.com/form3tech-oss/jwt-go v3.2.2+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= github.com/frankban/quicktest v1.2.2/go.mod h1:Qh/WofXFeiAFII1aEBu529AtJo6Zg2VHscnEsbBnJ20= github.com/frankban/quicktest v1.11.3 h1:8sXhOn0uLys67V8EsXLc6eszDs8VXWxL3iRvebPhedY= github.com/frankban/quicktest v1.11.3/go.mod h1:wRf/ReqHper53s+kmmSZizM8NamnL3IM0I9ntUbOk+k= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= -github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU= github.com/fullsailor/pkcs7 v0.0.0-20190404230743-d7302db945fa/go.mod h1:KnogPXtdwXqoenmZCw6S+25EAm2MkxbG0deNDu4cbSA= github.com/garyburd/redigo v0.0.0-20150301180006-535138d7bcd7/go.mod h1:NR3MbYisc3/PwhQ00EMzDiPmrwpPxAn5GI05/YaO1SY= github.com/ghodss/yaml v0.0.0-20150909031657-73d445a93680/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= @@ -381,6 +390,7 @@ github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= @@ -435,8 +445,9 @@ github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.7 h1:81/ik6ipDQS2aGcBfIN5dHDB36BwrStyeAQquSYCV4o= github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= +github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg= +github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= @@ -460,6 +471,7 @@ github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLe github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= @@ -469,61 +481,43 @@ github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+ github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= -github.com/googleapis/gax-go/v2 v2.1.1 h1:dp3bWCh+PPO1zjRRiCSczJav13sBvG4UhNyVTa1KqdU= github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM= +github.com/googleapis/gax-go/v2 v2.2.0/go.mod h1:as02EH8zWkzwUoLbBaFeQ+arQaj/OthfcblKl4IGNaM= +github.com/googleapis/gax-go/v2 v2.3.0 h1:nRJtk3y8Fm770D42QV6T90ZnvFZyk7agSo3Q+Z9p3WI= +github.com/googleapis/gax-go/v2 v2.3.0/go.mod h1:b8LNqSzNabLiUpXKkY7HAR5jr6bIT99EXz9pXxye9YM= github.com/googleapis/gnostic v0.4.1/go.mod h1:LRhVm6pbyptWbWbuZ38d1eyptfvIytN3ir6b65WBswg= +github.com/googleapis/go-type-adapters v1.0.0 h1:9XdMn+d/G57qq1s8dNc5IesGCXHf6V2HZ2JwRxfA2tA= +github.com/googleapis/go-type-adapters v1.0.0/go.mod h1:zHW75FOG2aur7gAO2B+MLby+cLsWGBF62rFAi7WjWO4= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/gorilla/handlers v0.0.0-20150720190736-60c7bfde3e33/go.mod h1:Qkdc/uu4tH4g6mTK6auzZ766c4CA0Ng8+o/OAirnOIQ= github.com/gorilla/mux v1.7.2 h1:zoNxOV7WjqXptQOVngLmcSQgXmgk4NMz1HibBchjl/I= github.com/gorilla/mux v1.7.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= +github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= +github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= +github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= -github.com/hashicorp/consul/api v1.11.0/go.mod h1:XjsvQN+RJGWI2TWy1/kqaE16HrR2J/FWgkYjdZQsX9M= -github.com/hashicorp/consul/sdk v0.8.0/go.mod h1:GBvyrGALthsZObzUGsfgHZQDXjg4lOjagTIwIR1vPms= github.com/hashicorp/errwrap v0.0.0-20141028054710-7554cd9344ce/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= -github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= -github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= -github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= -github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= -github.com/hashicorp/go-hclog v1.0.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= -github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= -github.com/hashicorp/go-immutable-radix v1.3.1/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= -github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= github.com/hashicorp/go-multierror v0.0.0-20161216184304-ed905158d874/go.mod h1:JMRHfdO9jKNzS/+BTlxCjKNQHg/jZAft8U7LloJvN7I= github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= -github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA= -github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= -github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= -github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= -github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= github.com/hashicorp/go-uuid v0.0.0-20180228145832-27454136f036/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= -github.com/hashicorp/mdns v1.0.1/go.mod h1:4gW7WsVCke5TE7EPeYliwHlRUyBtfCwuFwuMg2DmyNY= -github.com/hashicorp/mdns v1.0.4/go.mod h1:mtBihi+LeNXGtG8L9dX59gAEa12BDtBQSp4v/YAJqrc= -github.com/hashicorp/memberlist v0.2.2/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= -github.com/hashicorp/memberlist v0.3.0/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= -github.com/hashicorp/serf v0.9.5/go.mod h1:UWDWwZeL5cuWDJdl0C6wrvrUwEqtQ4ZKBKKENpqIUyk= -github.com/hashicorp/serf v0.9.6/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= -github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/imdario/mergo v0.3.5/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= github.com/imdario/mergo v0.3.8/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= github.com/imdario/mergo v0.3.10/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= +github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/j-keck/arping v0.0.0-20160618110441-2cf9dc699c56/go.mod h1:ymszkNOg6tORTn+6F6j+Jc8TOr5osrynvN6ivFWZ2GA= @@ -534,12 +528,10 @@ github.com/jmespath/go-jmespath v0.3.0/go.mod h1:9QtRXoHjLGCJ5IBSaohpXITPlowMeeY github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= +github.com/jonboulle/clockwork v0.2.2/go.mod h1:Pkfl5aHPm1nk2H9h0bjmnJD/BcgbGXUBGnn1kMkgxc8= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= @@ -550,12 +542,12 @@ github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/compress v1.9.7/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= github.com/klauspost/compress v1.11.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/klauspost/compress v1.11.13/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.13.1 h1:wXr2uRxZTJXHLly6qhJabee5JqIhTRoLBhDOA74hDEQ= github.com/klauspost/compress v1.13.1/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= @@ -567,11 +559,11 @@ github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= -github.com/lib/pq v1.10.4 h1:SO9z7FRPzA03QhHKJrH5BXA6HU1rS4V2nIVrrNC1iYk= -github.com/lib/pq v1.10.4/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/lib/pq v1.10.5 h1:J+gdV2cUmX7ZqL2B0lFcW0m+egaHC2V3lpO8nWxyYiQ= +github.com/lib/pq v1.10.5/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/linkedin/goavro v2.1.0+incompatible h1:DV2aUlj2xZiuxQyvag8Dy7zjY69ENjS66bWkSfdpddY= github.com/linkedin/goavro v2.1.0+incompatible/go.mod h1:bBCwI2eGYpUI/4820s67MElg9tdeLbINjLjiM2xZFYM= -github.com/lyft/protoc-gen-star v0.5.3/go.mod h1:V0xaHgaf5oCCqmcxYcWiDfTiKsZsRc87/1qhoTACD8w= +github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= github.com/magiconair/properties v1.8.5 h1:b6kJs+EmPFMYGkow9GiUyCyOvIwYetYJ3fSaWak/Gls= github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= @@ -579,34 +571,20 @@ github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN github.com/mailru/easyjson v0.7.0/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs= github.com/marstr/guid v1.1.0/go.mod h1:74gB1z2wpxxInTG6yaqA7KrtM0NZ+RbrcqDvYHefzho= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= -github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= -github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= +github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-ieproxy v0.0.1/go.mod h1:pYabZ6IHcRpFh7vIaLfK7rdcWgFEb3SFJ6/gNWuh88E= -github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= -github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84= -github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= github.com/mattn/go-shellwords v1.0.3/go.mod h1:3xCvwCdWdlDJUrvuMn7Wuy9eWs4pE8vqg+NOMyg4B2o= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= -github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= -github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso= -github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI= github.com/miekg/pkcs11 v1.0.3/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= github.com/mistifyio/go-zfs v2.1.2-0.20190413222219-f784269be439+incompatible/go.mod h1:8AuVvqP/mXw1px98n46wfvcGfQ4ci2FwoAjKYxuo3Z4= -github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= -github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= -github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/osext v0.0.0-20151018003038-5e2d6d41470f/go.mod h1:OkQIRizQZAeMln+1tSwduZz7+Af5oFlKirV/MSYes2A= +github.com/moby/locker v1.0.1/go.mod h1:S7SDdo5zpBK84bzzVlKr2V0hz+7x9hWbYC/kq7oQppc= github.com/moby/sys/mount v0.2.0 h1:WhCW5B355jtxndN5ovugJlMFJawbUODuW8fSnEH6SSM= github.com/moby/sys/mount v0.2.0/go.mod h1:aAivFE2LB3W4bACsUXChRHQ0qKWsetY4Y9V7sxOougM= github.com/moby/sys/mountinfo v0.4.0/go.mod h1:rEr8tzG/lsIZHBtN/JjGG+LMYx9eXgW2JI+6q0qou+A= @@ -615,13 +593,12 @@ github.com/moby/sys/mountinfo v0.5.0 h1:2Ks8/r6lopsxWi9m58nlwjaeSzUX9iiL1vj5qB/9 github.com/moby/sys/mountinfo v0.5.0/go.mod h1:3bMD3Rg+zkqx8MRYPi7Pyb0Ie97QEBmdxbhnCLlSvSU= github.com/moby/sys/symlink v0.1.0/go.mod h1:GGDODQmbFOjFsXvfLVn3+ZRxkch54RkSiGqsZeMYowQ= github.com/moby/term v0.0.0-20200312100748-672ec06f55cd/go.mod h1:DdlQx2hp0Ss5/fLikoLlEeIYiATotOjgB//nb973jeo= -github.com/moby/term v0.0.0-20201216013528-df9cb8a40635 h1:rzf0wL0CHVc8CEsgyygG0Mn9CNCCPZqOPaz8RiiHYQk= -github.com/moby/term v0.0.0-20201216013528-df9cb8a40635/go.mod h1:FBS0z0QWA44HXygs7VXDUOGoN/1TV3RuWkLO04am3wc= +github.com/moby/term v0.0.0-20210619224110-3f7ff695adc6 h1:dcztxKSvZ4Id8iPpHERQBbIJfabdt4wUm5qy3wOL2Zc= +github.com/moby/term v0.0.0-20210619224110-3f7ff695adc6/go.mod h1:E2VnQOmVuvZB6UYnnDB0qG5Nq/1tD9acaOpo6xmt0Kw= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/morikuni/aec v0.0.0-20170113033406-39771216ff4c h1:nXxl5PrvVm2L/wCy8dQu6DMTwH4oIuGN8GJDAlqDdVE= github.com/morikuni/aec v0.0.0-20170113033406-39771216ff4c/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= github.com/mrunalp/fileutils v0.5.0/go.mod h1:M1WthSahJixYnrXQl/DFQuteStB1weuxD2QJNHXfbSQ= @@ -633,6 +610,8 @@ github.com/ncw/swift v1.0.47/go.mod h1:23YIA4yWVnGwv2dQlN4bB7egfYX6YLn0Yo/S6zZO/ github.com/ncw/swift v1.0.52/go.mod h1:23YIA4yWVnGwv2dQlN4bB7egfYX6YLn0Yo/S6zZO/ZM= github.com/nightlyone/lockfile v1.0.0 h1:RHep2cFKK4PonZJDdEl4GmkabuhbsRMgk/k3uAmxBiA= github.com/nightlyone/lockfile v1.0.0/go.mod h1:rywoIealpdNse2r832aiD9jRk8ErCatROs6LzC841CI= +github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= +github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= github.com/onsi/ginkgo v0.0.0-20151202141238-7f8ab55aaf3b/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v0.0.0-20170829012221-11459a886d9c/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= @@ -640,10 +619,12 @@ github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+W github.com/onsi/ginkgo v1.10.1/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.10.3/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.11.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= github.com/onsi/gomega v0.0.0-20151007035656-2152b45fa28a/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= github.com/onsi/gomega v0.0.0-20170829124025-dcabb60a477c/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= +github.com/onsi/gomega v1.10.3/go.mod h1:V9xEwhxec5O8UDM77eCW8vLymOMltsqPVYWrpDsH8xc= github.com/opencontainers/go-digest v0.0.0-20170106003457-a6d0ee40d420/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= github.com/opencontainers/go-digest v0.0.0-20180430190053-c9281466c8b2/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= github.com/opencontainers/go-digest v1.0.0-rc1/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= @@ -651,8 +632,9 @@ github.com/opencontainers/go-digest v1.0.0-rc1.0.20180430190053-c9281466c8b2/go. github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= github.com/opencontainers/image-spec v1.0.0/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= -github.com/opencontainers/image-spec v1.0.1 h1:JMemWkRwHx4Zj+fVxWoMCFm/8sYGGrUVojFA6h/TRcI= github.com/opencontainers/image-spec v1.0.1/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= +github.com/opencontainers/image-spec v1.0.2 h1:9yCKha/T5XdGtO0q9Q9a6T5NUCsTn/DrBg0D7ufOcFM= +github.com/opencontainers/image-spec v1.0.2/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= github.com/opencontainers/runc v0.0.0-20190115041553-12f6a991201f/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U= github.com/opencontainers/runc v0.1.1/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U= github.com/opencontainers/runc v1.0.0-rc8.0.20190926000215-3e425f80a8c9/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U= @@ -670,10 +652,9 @@ github.com/opencontainers/runtime-tools v0.0.0-20181011054405-1d69bd0f9c39/go.mo github.com/opencontainers/selinux v1.6.0/go.mod h1:VVGKuOLlE7v4PJyT6h7mNWvq1rzqiriPsEqVhc+svHE= github.com/opencontainers/selinux v1.8.0/go.mod h1:RScLhm78qiWa2gbVCcGkC7tCGdgk3ogry1nUQF8Evvo= github.com/opencontainers/selinux v1.8.2/go.mod h1:MUIHuUEvKB1wtJjQdOyYRgOnLD2xAPP8dBsCoU0KuF8= -github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= -github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pborman/getopt v0.0.0-20180729010549-6fdd0a2c7117/go.mod h1:85jBQOZwpVEaDAr341tbn15RS4fCAsIst0qp7i8ex1o= -github.com/pelletier/go-toml v1.9.4/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= +github.com/pelletier/go-toml v1.8.1/go.mod h1:T2/BmBdy8dvIRq1a/8aqjN41wvWlN4lrapLU/GW4pbc= github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= github.com/pierrec/lz4/v4 v4.1.8 h1:ieHkV+i2BRzngO4Wd/3HGowuZStgq6QkPsD1eolNAO4= github.com/pierrec/lz4/v4 v4.1.8/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= @@ -682,17 +663,14 @@ github.com/pkg/errors v0.8.1-0.20171018195549-f15c970de5b7/go.mod h1:bwawxfHBFNV github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= -github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s= github.com/pquerna/cachecontrol v0.0.0-20171018203845-0dec1b30a021/go.mod h1:prYjPmNq4d1NPVmpShWobRqXY3q7Vp+80DqgxxUrUIA= github.com/prometheus/client_golang v0.0.0-20180209125602-c332b6f63c06/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso= github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= github.com/prometheus/client_golang v1.1.0/go.mod h1:I1FGZT9+L76gKKOs5djB6ezCbFQP1xR9D75/vuwEF3g= -github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU= github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= github.com/prometheus/client_model v0.0.0-20171117100541-99fa1f4be8e5/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= @@ -700,12 +678,14 @@ github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1: github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/common v0.0.0-20180110214958-89604d197083/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= +github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= +github.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= github.com/prometheus/common v0.6.0/go.mod h1:eBmuwkDJBwy6iBfxCBob6t6dR6ENT/y+J+Zk0j9GMYc= -github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4= github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= github.com/prometheus/procfs v0.0.0-20180125133057-cb4147076ac7/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= github.com/prometheus/procfs v0.0.0-20190522114515-bc1a522cf7b1/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= github.com/prometheus/procfs v0.0.3/go.mod h1:4A/X28fw3Fc593LaREMrKMqOKvUAntwMDaekg4FpcdQ= @@ -714,6 +694,7 @@ github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+Gx github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= github.com/prometheus/procfs v0.2.0/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= +github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU= github.com/proullon/ramsql v0.0.0-20211120092837-c8d0a408b939 h1:mtMU7aT8cTAyNL3O4RyOfe/OOUxwCN525SIbKQoUvw0= github.com/proullon/ramsql v0.0.0-20211120092837-c8d0a408b939/go.mod h1:jG8oAQG0ZPHPyxg5QlMERS31airDC+ZuqiAe8DUvFVo= github.com/rogpeppe/clock v0.0.0-20190514195947-2896927a307a h1:3QH7VyOaaiUHNrA9Se4YQIRkDTCw1EJls9xTUCaCeRM= @@ -723,11 +704,8 @@ github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6L github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/safchain/ethtool v0.0.0-20190326074333-42ed695e3de8/go.mod h1:Z0q5wiBQGYcxhMZ6gUqHn6pYNLypFAvaL3UvgZLR0U4= -github.com/sagikazarmark/crypt v0.3.0/go.mod h1:uD/D+6UF4SrIR1uGEv7bBNkNqLGqUr43MRiaGWX1Nig= github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= -github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= github.com/seccomp/libseccomp-golang v0.9.1/go.mod h1:GbW5+tmTXfcxTToHLXlScSlAvWlF4P2Ca7zGrPiEpWo= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/sirupsen/logrus v1.0.4-0.20170822132746-89742aefa4b2/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc= @@ -743,23 +721,22 @@ github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1 github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= +github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk= -github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4= -github.com/spf13/afero v1.6.0 h1:xoax2sJ2DT8S8xA2paPFjDCScCNeWsg75VG0DLRreiY= -github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= -github.com/spf13/cast v1.4.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/spf13/cobra v0.0.2-0.20171109065643-2da4a54c5cee/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= -github.com/spf13/cobra v1.3.0 h1:R7cSvGu+Vv+qX0gW5R/85dx2kmmJT5z5NM8ifdYjdn0= -github.com/spf13/cobra v1.3.0/go.mod h1:BrRVncBjOJa/eUcVVm9CE+oC6as8k+VYr4NY7WCi9V4= -github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= +github.com/spf13/cobra v1.0.0/go.mod h1:/6GTrnGXV9HjY+aR4k0oJ5tcvakLuG6EuKReYlHNrgE= +github.com/spf13/cobra v1.4.0 h1:y+wJpx64xcgO1V+RcnwW0LEHxTKRi2ZDPSBjWnrg88Q= +github.com/spf13/cobra v1.4.0/go.mod h1:Wo4iy3BUC+X2Fybo0PDqwJIv3dNRiZLHQymsfxlB84g= +github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= github.com/spf13/pflag v0.0.0-20170130214245-9ff6c6923cff/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.1-0.20171106142849-4c012f6dcd95/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/spf13/viper v1.10.0/go.mod h1:SoyBPwAtKDzypXNDFKN5kzH7ppppbGZtls1UpIy5AsM= +github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE= github.com/stefanberger/go-pkcs11uri v0.0.0-20201008174630-78d3cae3a980/go.mod h1:AO3tvPzVZ/ayst6UlUKUv6rcPQInYe3IknH3jYhAKu8= github.com/stretchr/objx v0.0.0-20180129172003-8a3f7159479f/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= @@ -774,24 +751,25 @@ github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5 github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= github.com/syndtr/gocapability v0.0.0-20170704070218-db04d3cc01c8/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww= github.com/syndtr/gocapability v0.0.0-20180916011248-d98352740cb2/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww= github.com/syndtr/gocapability v0.0.0-20200815063812-42c35b437635/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww= github.com/tchap/go-patricia v2.2.6+incompatible/go.mod h1:bmLyhP68RS6kStMGxByiQ23RP/odRBOTVjwp2cDyi6I= -github.com/testcontainers/testcontainers-go v0.12.0 h1:SK0NryGHIx7aifF6YqReORL18aGAA4bsDPtikDVCEyg= -github.com/testcontainers/testcontainers-go v0.12.0/go.mod h1:SIndOQXZng0IW8iWU1Js0ynrfZ8xcxrTtDfF6rD2pxs= +github.com/testcontainers/testcontainers-go v0.13.0 h1:OUujSlEGsXVo/ykPVZk3KanBNGN0TYb/7oKIPVn15JA= +github.com/testcontainers/testcontainers-go v0.13.0/go.mod h1:z1abufU633Eb/FmSBTzV6ntZAC1eZBYPtaFsn4nPuDk= github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= -github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM= +github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= github.com/urfave/cli v0.0.0-20171014202726-7bc6a0acffa5/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= github.com/urfave/cli v1.22.2/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= github.com/vishvananda/netlink v0.0.0-20181108222139-023a6dafdcdf/go.mod h1:+SR5DhBJrl6ZM7CoCKvpw5BKroDKQ+PJqOg65H/2ktk= github.com/vishvananda/netlink v1.1.0/go.mod h1:cTgwzPIzzgDAYoQrMm0EdrjRUBkTqKYppBueQtXaqoE= +github.com/vishvananda/netlink v1.1.1-0.20201029203352-d40f9887b852/go.mod h1:twkDnbuQxJYemMlGd4JFIcuhgX83tXhKS2B/PRMpOho= github.com/vishvananda/netns v0.0.0-20180720170159-13995c7128cc/go.mod h1:ZjcWmFBXmLKZu9Nxj3WKYEafiSqer2rnvPr0en9UNpI= github.com/vishvananda/netns v0.0.0-20191106174202-0a2b9b5464df/go.mod h1:JP3t17pCcGlemwknint6hfoeCVQrEMVwxRLRjXpq+BU= +github.com/vishvananda/netns v0.0.0-20200728191858-db3c7e526aae/go.mod h1:DD4vA1DwXk04H54A1oHXtwZmA0grkVMdPxx/VGLCah0= github.com/willf/bitset v1.1.11-0.20200630133818-d5bec3311243/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4= github.com/willf/bitset v1.1.11/go.mod h1:83CECat5yLh5zVOf4P1ErAgKA5UDvKtgyUABdr3+MjI= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= @@ -805,6 +783,7 @@ github.com/xitongsys/parquet-go-source v0.0.0-20190524061010-2b72cbee77d5/go.mod github.com/xitongsys/parquet-go-source v0.0.0-20200817004010-026bad9b25d0/go.mod h1:HYhIKsdns7xz80OgkbgJYrtQY7FjHWHKH6cvN7+czGE= github.com/xitongsys/parquet-go-source v0.0.0-20220315005136-aec0fe3e777c h1:UDtocVeACpnwauljUbeHD9UOjjcvF5kLUHruww7VT9A= github.com/xitongsys/parquet-go-source v0.0.0-20220315005136-aec0fe3e777c/go.mod h1:qLb2Itmdcp7KPa5KZKvhE9U1q5bYSOmgeOckF/H2rQA= +github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= @@ -813,12 +792,10 @@ github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1 github.com/yvasiyarov/go-metrics v0.0.0-20140926110328-57bccd1ccd43/go.mod h1:aX5oPXxHm3bOH+xeAttToC8pqch2ScQN/JoXYupl6xs= github.com/yvasiyarov/gorelic v0.0.0-20141212073537-a9bba5b9ab50/go.mod h1:NUSPSUX/bi6SeDMUh6brw0nXpxHnc96TguQh0+r/ssA= github.com/yvasiyarov/newrelic_platform_go v0.0.0-20140908184405-b21fdbd4370f/go.mod h1:GlGEuHIJweS1mbCqG+7vt2nvWLzLLnRHbXz5JKd/Qbg= +go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= go.etcd.io/bbolt v1.3.5/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ= go.etcd.io/etcd v0.5.0-alpha.5.0.20200910180754-dd1b699fc489/go.mod h1:yVHk9ub3CSBatqGNg7GRmsnfLWtoW60w4eDYfh7vHDg= -go.etcd.io/etcd/api/v3 v3.5.1/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= -go.etcd.io/etcd/client/pkg/v3 v3.5.1/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= -go.etcd.io/etcd/client/v2 v2.305.1/go.mod h1:pMEacxZW7o8pg4CrFE7pquyCJJzZvkvdD2RibOCCCGs= go.mozilla.org/pkcs7 v0.0.0-20200128120323-432b2356ecb1/go.mod h1:SNgMg+EgDFwmvSmLRTNKC5fegJjB7v23qTQ0XLGUNHk= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= @@ -831,29 +808,23 @@ go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= -go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= -go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= -go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= golang.org/x/crypto v0.0.0-20171113213409-9f005a07e0d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20180723164146-c126467f60eb/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20181009213950-7c1a557ab941/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200728195943-123391ffb6de/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210513164829-c07d793c2f9a/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8= -golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -889,12 +860,10 @@ golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181011144130-49bb7cea24b1/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -912,7 +881,6 @@ golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLL golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20191004110552-13f9640d40b9/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20191112182307-2180aed22343/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= @@ -929,6 +897,7 @@ golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/ golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201006153459-a7d1128ccaa0/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= @@ -938,13 +907,13 @@ golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= -golang.org/x/net v0.0.0-20210410081132-afb366fc7cd1/go.mod h1:9tjilg8BloeKEkVJvy7fQ90B1CfIiPueXVOjqfkSzI8= golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211108170745-6635138e15ea/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/net v0.0.0-20220225172249-27dd8689420f h1:oA4XRj0qtSt8Yo1Zms0CUlsT3KG69V2UGQWPBxujDmc= golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220412020605-290c469a71a5 h1:bRb386wvrE+oBNdF1d/Xh9mQrfQ4ecYhW5qJ5GvTGT4= +golang.org/x/net v0.0.0-20220412020605-290c469a71a5/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -960,10 +929,11 @@ golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b h1:clP8eMhB30EHdc0bd2Twtq6kgU7yl5ub2cQLSdrv1Dg= golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= +golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= +golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 h1:OSnWWcOd/CtWQC2cYSBgbTSJv3ciqd8r54ySIW2y3RE= +golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -976,15 +946,12 @@ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -1001,12 +968,10 @@ golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20190801041406-cbf593c0f2f3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190812073006-9eafafc0a87e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191022100944-742c48ecaeb7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191112214154-59a1497f0cea/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -1023,6 +988,7 @@ golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20200124204421-9fbb57f87de9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200217220822-9197077df867/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -1037,13 +1003,13 @@ golang.org/x/sys v0.0.0-20200728102440-3e129f6d46b1/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200817155316-9781c653f443/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200828194041-157a740278f4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200831180312-196b9ba8737a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200909081042-eff7692f9009/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200916030750-2334cc1a136f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200922070232-aee5d888a860/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201112073958-5cba982894dd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201117170446-d9b008d0a637/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201202213521-69691e467435/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -1051,13 +1017,11 @@ golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210324051608-47abb6519492/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210426230700-d19ff857e887/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -1067,23 +1031,18 @@ golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210816183151-1e6c022a8912/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210917161153-d61c044b1678/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211109184856-51b60fd695b3/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211205182925-97ca703d548d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220209214540-3681064d5158/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9 h1:nhht2DYV/Sn3qOayu8lM+cU1ii9sTLUeBQwQQfUHtrs= golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220412211240-33da011f77ad h1:ntjMns5wyP/fN65tdBD4g8J5w8n015+iIIs9rtjXkY0= +golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -1101,8 +1060,8 @@ golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxb golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20200630173020-3af7569d3a1e/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20211116232009-f0f3c7e86c11 h1:GZokNIeuVkl3aZHJchRrr13WCsols02MLUcz1U9is6M= -golang.org/x/time v0.0.0-20211116232009-f0f3c7e86c11/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20220411224347-583f2d630306 h1:+gHMid33q6pen7kv9xvT+JRinntgeXO2AeZVd0AWD3w= +golang.org/x/time v0.0.0-20220411224347-583f2d630306/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -1121,7 +1080,6 @@ golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgw golang.org/x/tools v0.0.0-20190624222133-a101b041ded4/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190907020128-2ca718005c18/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= @@ -1165,8 +1123,9 @@ golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f h1:GGU+dLjvlC3qDwqYgL6UgRmHXhOOgns0bZu2Ty5mm6U= +golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/api v0.0.0-20160322025152-9bf6e6e569ff/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= @@ -1197,17 +1156,14 @@ google.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6 google.golang.org/api v0.55.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= google.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= google.golang.org/api v0.57.0/go.mod h1:dVPlbZyBo2/OjBpmvNdpn2GRm6rPy75jyU7bmhdrMgI= -google.golang.org/api v0.58.0/go.mod h1:cAbP2FsxoGVNwtgNAmmn3y5G1TWAiVYRmg4yku3lv+E= -google.golang.org/api v0.59.0/go.mod h1:sT2boj7M9YJxZzgeZqXogmhfmRWDtPzT31xkieUbuZU= google.golang.org/api v0.61.0/go.mod h1:xQRti5UdCmoCEqFxcz93fTl338AVqDgyaDRuOZ3hg9I= -google.golang.org/api v0.62.0/go.mod h1:dKmwPCydfsad4qCH08MSdgWjfHOyfpd4VtDGgRFdavw= google.golang.org/api v0.63.0/go.mod h1:gs4ij2ffTRXwuzzgJl/56BdwJaA194ijkfn++9tDuPo= -google.golang.org/api v0.64.0/go.mod h1:931CdxA8Rm4t6zqTFGSsgwbAEZ2+GMYurbndwSimebM= -google.golang.org/api v0.66.0/go.mod h1:I1dmXYpX7HGwz/ejRxwQp2qj5bFAz93HiCU1C1oYd9M= google.golang.org/api v0.67.0/go.mod h1:ShHKP8E60yPsKNw/w8w+VYaj9H6buA5UqDp8dhbQZ6g= -google.golang.org/api v0.69.0/go.mod h1:boanBiw+h5c3s+tBPgEzLDRHfFLWV0qXxRHz3ws7C80= -google.golang.org/api v0.70.0 h1:67zQnAE0T2rB0A3CwLSas0K+SbVzSxP+zTLkQLexeiw= google.golang.org/api v0.70.0/go.mod h1:Bs4ZM2HGifEvXwd50TtW70ovgJffJYw2oRCOFU/SkfA= +google.golang.org/api v0.71.0/go.mod h1:4PyU6e6JogV1f9eA4voyrTY2batOLdgZ5qZ5HOCc4j8= +google.golang.org/api v0.74.0/go.mod h1:ZpfMZOVRMywNyvJFeqL9HRWBgAuRfSjJFpe9QtRRyDs= +google.golang.org/api v0.76.0 h1:UkZl25bR1FHNqtK/EKs3vCdpZtUO6gea3YElTwc8pQg= +google.golang.org/api v0.76.0/go.mod h1:pU9QmyHLnzlpar1Mjt4IbapUCy8J+6HD6GeELN69ljA= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -1259,6 +1215,7 @@ google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6D google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210329143202-679c6ae281ee/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= google.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= @@ -1275,30 +1232,24 @@ google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEc google.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= google.golang.org/genproto v0.0.0-20210909211513-a8c4777a87af/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210917145530-b395a37504d4/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= google.golang.org/genproto v0.0.0-20210924002016-3dee208752a0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211008145708-270636b82663/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211016002631-37fc39342514/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211018162055-cf77aa76bad2/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211028162531-8db9c33dc351/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211129164237-f09f9a12af12/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211203200212-54befc351ae9/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211221195035-429b39de9b1c/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211223182754-3ac035c7e7cb/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220111164026-67b88f271998/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220114231437-d2e6a121cae0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20220126215142-9970aeb2e350/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220201184016-50beb8ab5c44/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20220207164111-0872dc986b00/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220211171837-173942840c17/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= -google.golang.org/genproto v0.0.0-20220216160803-4663080d8bc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= google.golang.org/genproto v0.0.0-20220218161850-94dd64e39d7c/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= google.golang.org/genproto v0.0.0-20220222213610-43724f9ea8cf/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= -google.golang.org/genproto v0.0.0-20220302033224-9aa15565e42a h1:uqouglH745GoGeZ1YFZbPBiu961tgi/9Qm5jaorajjQ= -google.golang.org/genproto v0.0.0-20220302033224-9aa15565e42a/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220304144024-325a89244dc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220310185008-1973136f34c6/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220324131243-acbaeb5b85eb/go.mod h1:hAL49I2IFola2sVEjAn7MEwsja0xp51I0tlGAf9hz4E= +google.golang.org/genproto v0.0.0-20220405205423-9d709892a2bf/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220407144326-9054f6ed7bac/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220413183235-5e96e2839df9/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220414192740-2d67ff6cf2b4/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220426171045-31bebdecfb46 h1:G1IeWbjrqEq9ChWxEuRPJu6laA67+XgTFHVSAvepr38= +google.golang.org/genproto v0.0.0-20220426171045-31bebdecfb46/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/grpc v0.0.0-20160317175043-d3ddb4469d5a/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= @@ -1329,9 +1280,9 @@ google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnD google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= -google.golang.org/grpc v1.42.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= -google.golang.org/grpc v1.44.0 h1:weqSxi/TMs1SqFRMHCtBgXRs8k3X39QIDEZ0pRcttUg= google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= +google.golang.org/grpc v1.45.0 h1:NEpgUqV3Z+ZjkqMsxMg11IaDrXY4RY6CQukSGK0uI1M= +google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= @@ -1345,8 +1296,9 @@ google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGj google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.27.1 h1:SnqbnDw1V7RiZcXPx5MEeqPv2s79L9i7BJUlG/+RurQ= google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.28.0 h1:w43yiav+6bVFTBQFZX0r7ipe9JQ1QsbMgHwbBziscLw= +google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= @@ -1360,7 +1312,6 @@ gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/gemnasium/logrus-airbrake-hook.v2 v2.1.2/go.mod h1:Xk6kEKp8OKb+X14hQBKWaSkCsqBpgog8nAV2xsGOxlo= gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= -gopkg.in/ini.v1 v1.66.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/jcmturner/aescts.v1 v1.0.1/go.mod h1:nsR8qBOg+OucoIW+WMhB3GspUQXq9XorLnQb9XtvcOo= gopkg.in/jcmturner/dnsutils.v1 v1.0.1/go.mod h1:m3v+5svpVOhtFAP/wSz+yzh4Mc0Fg7eRhxkJMWSIz9Q= gopkg.in/jcmturner/goidentity.v3 v3.0.0/go.mod h1:oG2kH0IvSYNIu80dVAyu/yoefjq1mNfM5bm88whjWx4= @@ -1392,6 +1343,7 @@ gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C gotest.tools v2.2.0+incompatible h1:VsBPFP1AI068pPrMxtb/S8Zkgf9xEmTLJjfM+P5UIEo= gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw= gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw= +gotest.tools/gotestsum v1.7.0/go.mod h1:V1m4Jw3eBerhI/A6qCxUE07RnCg7ACkKj9BYcAm09V8= gotest.tools/v3 v3.0.2/go.mod h1:3SzNCllyD9/Y+b5r9JIKQ474KzkZyqLqEfYqMsX94Bk= gotest.tools/v3 v3.0.2/go.mod h1:3SzNCllyD9/Y+b5r9JIKQ474KzkZyqLqEfYqMsX94Bk= gotest.tools/v3 v3.0.3 h1:4AuOwCGf4lLR9u3YOe2awrHygurzhO/HeQ6laiA6Sx0= @@ -1404,12 +1356,24 @@ honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= k8s.io/api v0.20.1/go.mod h1:KqwcCVogGxQY3nBlRpwt+wpAMF/KjaCc7RpywacvqUo= +k8s.io/api v0.20.4/go.mod h1:++lNL1AJMkDymriNniQsWRkMDzRaX2Y/POTUi8yvqYQ= +k8s.io/api v0.20.6/go.mod h1:X9e8Qag6JV/bL5G6bU8sdVRltWKmdHsFUGS3eVndqE8= k8s.io/apimachinery v0.20.1/go.mod h1:WlLqWAHZGg07AeltaI0MV5uk1Omp8xaN0JGLY6gkRpU= +k8s.io/apimachinery v0.20.4/go.mod h1:WlLqWAHZGg07AeltaI0MV5uk1Omp8xaN0JGLY6gkRpU= +k8s.io/apimachinery v0.20.6/go.mod h1:ejZXtW1Ra6V1O5H8xPBGz+T3+4gfkTCeExAHKU57MAc= k8s.io/apiserver v0.20.1/go.mod h1:ro5QHeQkgMS7ZGpvf4tSMx6bBOgPfE+f52KwvXfScaU= +k8s.io/apiserver v0.20.4/go.mod h1:Mc80thBKOyy7tbvFtB4kJv1kbdD0eIH8k8vianJcbFM= +k8s.io/apiserver v0.20.6/go.mod h1:QIJXNt6i6JB+0YQRNcS0hdRHJlMhflFmsBDeSgT1r8Q= k8s.io/client-go v0.20.1/go.mod h1:/zcHdt1TeWSd5HoUe6elJmHSQ6uLLgp4bIJHVEuy+/Y= +k8s.io/client-go v0.20.4/go.mod h1:LiMv25ND1gLUdBeYxBIwKpkSC5IsozMMmOOeSJboP+k= +k8s.io/client-go v0.20.6/go.mod h1:nNQMnOvEUEsOzRRFIIkdmYOjAZrC8bgq0ExboWSU1I0= k8s.io/component-base v0.20.1/go.mod h1:guxkoJnNoh8LNrbtiQOlyp2Y2XFCZQmrcg2n/DeYNLk= +k8s.io/component-base v0.20.4/go.mod h1:t4p9EdiagbVCJKrQ1RsA5/V4rFQNDfRlevJajlGwgjI= +k8s.io/component-base v0.20.6/go.mod h1:6f1MPBAeI+mvuts3sIdtpjljHWBQ2cIy38oBIWMYnrM= k8s.io/cri-api v0.17.3/go.mod h1:X1sbHmuXhwaHs9xxYffLqJogVsnI+f6cPRcgPel7ywM= k8s.io/cri-api v0.20.1/go.mod h1:2JRbKt+BFLTjtrILYVqQK5jqhI+XNdF6UiGMgczeBCI= +k8s.io/cri-api v0.20.4/go.mod h1:2JRbKt+BFLTjtrILYVqQK5jqhI+XNdF6UiGMgczeBCI= +k8s.io/cri-api v0.20.6/go.mod h1:ew44AjNXwyn1s0U4xCKGodU7J1HzBeZ1MpGrpa5r8Yc= k8s.io/gengo v0.0.0-20200413195148-3a45101e95ac/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0= k8s.io/klog/v2 v2.0.0/go.mod h1:PBfzABfn139FHAV07az/IF9Wp1bkk3vpT2XSJ76fSDE= k8s.io/klog/v2 v2.4.0/go.mod h1:Od+F08eJP+W3HUb4pSrPpgp9DGU4GzlpG/TmITuYh/Y= @@ -1420,6 +1384,8 @@ rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8 rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.14/go.mod h1:LEScyzhFmoF5pso/YSeBstl57mOzx9xlU9n85RGrDQg= +sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.15/go.mod h1:LEScyzhFmoF5pso/YSeBstl57mOzx9xlU9n85RGrDQg= sigs.k8s.io/structured-merge-diff/v4 v4.0.2/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw= +sigs.k8s.io/structured-merge-diff/v4 v4.0.3/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw= sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc= diff --git a/sdks/go/pkg/beam/coder.go b/sdks/go/pkg/beam/coder.go index fe6be4728538..a6e1441e8c0b 100644 --- a/sdks/go/pkg/beam/coder.go +++ b/sdks/go/pkg/beam/coder.go @@ -374,11 +374,3 @@ func schemaDec(t reflect.Type, in []byte) (T, error) { } return val, nil } - -func newSchemaCoder(t reflect.Type) (*coder.CustomCoder, error) { - c, err := coder.NewCustomCoder("schema", t, schemaEnc, schemaDec) - if err != nil { - return nil, errors.Wrapf(err, "invalid coder") - } - return c, nil -} diff --git a/sdks/go/pkg/beam/core/graph/fn.go b/sdks/go/pkg/beam/core/graph/fn.go index 931458d922d2..0dc19e176665 100644 --- a/sdks/go/pkg/beam/core/graph/fn.go +++ b/sdks/go/pkg/beam/core/graph/fn.go @@ -122,7 +122,6 @@ func NewFn(fn interface{}) (*Fn, error) { } methods[name] = f } - return &Fn{Recv: fn, methods: methods, annotations: annotations}, nil } // TODO(lostluck): Consider moving this into the reflectx package. for i := 0; i < val.Type().NumMethod(); i++ { @@ -133,6 +132,9 @@ func NewFn(fn interface{}) (*Fn, error) { if m.Name == "String" { continue // skip: harmless } + if _, ok := methods[m.Name]; ok { + continue // skip : already wrapped + } // CAVEAT(herohde) 5/22/2017: The type val.Type.Method.Type is not // the same as val.Method.Type: the former has the explicit receiver. diff --git a/sdks/go/pkg/beam/core/graph/fn_test.go b/sdks/go/pkg/beam/core/graph/fn_test.go index 5f0e274b0840..9333db347342 100644 --- a/sdks/go/pkg/beam/core/graph/fn_test.go +++ b/sdks/go/pkg/beam/core/graph/fn_test.go @@ -13,6 +13,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +//lint:file-ignore U1000 unused functions/types are for tests + package graph import ( diff --git a/sdks/go/pkg/beam/core/runtime/exec/emit.go b/sdks/go/pkg/beam/core/runtime/exec/emit.go index e24b6925ed7b..c008b0b412d0 100644 --- a/sdks/go/pkg/beam/core/runtime/exec/emit.go +++ b/sdks/go/pkg/beam/core/runtime/exec/emit.go @@ -35,8 +35,9 @@ type ReusableEmitter interface { Value() interface{} } -// ReusableEmitter is a resettable value needed to hold the implicit context and -// emit event time. It also has the ability to have a watermark estimator attached. +// ReusableTimestampObservingWatermarkEmitter is a resettable value needed to hold +// the implicit context and emit event time. It also has the ability to have a +// watermark estimator attached. type ReusableTimestampObservingWatermarkEmitter interface { ReusableEmitter AttachEstimator(est *sdf.WatermarkEstimator) diff --git a/sdks/go/pkg/beam/core/runtime/exec/sdf.go b/sdks/go/pkg/beam/core/runtime/exec/sdf.go index f5dd6a7431bc..8ca57625573f 100644 --- a/sdks/go/pkg/beam/core/runtime/exec/sdf.go +++ b/sdks/go/pkg/beam/core/runtime/exec/sdf.go @@ -665,15 +665,20 @@ func (n *ProcessSizedElementsAndRestrictions) singleWindowSplit(f float64, pWeSt return []*FullValue{}, []*FullValue{}, nil } - pfv, err := n.newSplitResult(p, n.elm.Windows, pWeState) - if err != nil { - return nil, nil, err + var primaryResult []*FullValue + if p != nil { + pfv, err := n.newSplitResult(p, n.elm.Windows, pWeState) + if err != nil { + return nil, nil, err + } + primaryResult = append(primaryResult, pfv) } + rfv, err := n.newSplitResult(r, n.elm.Windows, rWeState) if err != nil { return nil, nil, err } - return []*FullValue{pfv}, []*FullValue{rfv}, nil + return primaryResult, []*FullValue{rfv}, nil } // multiWindowSplit is intended for splitting multi-window elements in diff --git a/sdks/go/pkg/beam/core/runtime/genx/genx_test.go b/sdks/go/pkg/beam/core/runtime/genx/genx_test.go index 24f96a5bc7a9..17b690381570 100644 --- a/sdks/go/pkg/beam/core/runtime/genx/genx_test.go +++ b/sdks/go/pkg/beam/core/runtime/genx/genx_test.go @@ -13,6 +13,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +//lint:file-ignore U1000 unused functions/types are for tests + package genx import ( diff --git a/sdks/go/pkg/beam/core/runtime/graphx/schema/logicaltypes.go b/sdks/go/pkg/beam/core/runtime/graphx/schema/logicaltypes.go index 42bda4f79b6f..cea0cbbf76eb 100644 --- a/sdks/go/pkg/beam/core/runtime/graphx/schema/logicaltypes.go +++ b/sdks/go/pkg/beam/core/runtime/graphx/schema/logicaltypes.go @@ -124,7 +124,6 @@ type LogicalType struct { identifier string goT, storageT, argT reflect.Type argV reflect.Value - toStorage, toGo func(value reflect.Value) reflect.Value } // ID is a unique identifier for the logical type. diff --git a/sdks/go/pkg/beam/core/runtime/graphx/schema/schema_test.go b/sdks/go/pkg/beam/core/runtime/graphx/schema/schema_test.go index ef66499de4ec..5ef2f707280a 100644 --- a/sdks/go/pkg/beam/core/runtime/graphx/schema/schema_test.go +++ b/sdks/go/pkg/beam/core/runtime/graphx/schema/schema_test.go @@ -13,6 +13,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +//lint:file-ignore U1000 unused functions/types are for tests + package schema import ( diff --git a/sdks/go/pkg/beam/core/runtime/graphx/serialize_test.go b/sdks/go/pkg/beam/core/runtime/graphx/serialize_test.go index b1a3369e768e..cd0e6dda994f 100644 --- a/sdks/go/pkg/beam/core/runtime/graphx/serialize_test.go +++ b/sdks/go/pkg/beam/core/runtime/graphx/serialize_test.go @@ -13,6 +13,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +//lint:file-ignore U1000 unused functions/types are for tests + package graphx import ( diff --git a/sdks/go/pkg/beam/core/runtime/graphx/translate.go b/sdks/go/pkg/beam/core/runtime/graphx/translate.go index b28820966a8a..d5c7a31b3e4c 100644 --- a/sdks/go/pkg/beam/core/runtime/graphx/translate.go +++ b/sdks/go/pkg/beam/core/runtime/graphx/translate.go @@ -1248,12 +1248,6 @@ func mustEncodeMultiEdgeBase64(edge *graph.MultiEdge) (string, error) { }), nil } -// makeBytesKeyedCoder returns KV<[]byte,A,> for any coder, -// even if the coder is already a KV coder. -func makeBytesKeyedCoder(c *coder.Coder) *coder.Coder { - return coder.NewKV([]*coder.Coder{coder.NewBytes(), c}) -} - func edgeID(edge *graph.MultiEdge) string { return fmt.Sprintf("e%v", edge.ID()) } diff --git a/sdks/go/pkg/beam/core/runtime/graphx/xlang_test.go b/sdks/go/pkg/beam/core/runtime/graphx/xlang_test.go index a94fce634e69..7b68a7707682 100644 --- a/sdks/go/pkg/beam/core/runtime/graphx/xlang_test.go +++ b/sdks/go/pkg/beam/core/runtime/graphx/xlang_test.go @@ -145,12 +145,6 @@ func newComponents(ts []string) *pipepb.Components { return components } -func expectPanic(t *testing.T, err string) { - if r := recover(); r == nil { - t.Errorf("expected panic; %v", err) - } -} - func TestExpandedTransform(t *testing.T) { t.Run("Correct PTransform", func(t *testing.T) { want := newTransform("x") diff --git a/sdks/go/pkg/beam/core/runtime/types_test.go b/sdks/go/pkg/beam/core/runtime/types_test.go index ed9b9e9606ae..b301df6da14f 100644 --- a/sdks/go/pkg/beam/core/runtime/types_test.go +++ b/sdks/go/pkg/beam/core/runtime/types_test.go @@ -13,6 +13,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +//lint:file-ignore U1000 unused functions/types are for tests + package runtime import ( diff --git a/sdks/go/pkg/beam/io/synthetic/source.go b/sdks/go/pkg/beam/io/synthetic/source.go index 822c416f6969..d80a89c931c5 100644 --- a/sdks/go/pkg/beam/io/synthetic/source.go +++ b/sdks/go/pkg/beam/io/synthetic/source.go @@ -27,17 +27,17 @@ import ( "encoding/json" "fmt" "math/rand" - "reflect" "time" "github.com/apache/beam/sdks/v2/go/pkg/beam" "github.com/apache/beam/sdks/v2/go/pkg/beam/core/sdf" "github.com/apache/beam/sdks/v2/go/pkg/beam/io/rtrackers/offsetrange" + "github.com/apache/beam/sdks/v2/go/pkg/beam/register" ) func init() { - beam.RegisterType(reflect.TypeOf((*sourceFn)(nil)).Elem()) - beam.RegisterType(reflect.TypeOf((*SourceConfig)(nil)).Elem()) + register.DoFn3x1[*sdf.LockRTracker, SourceConfig, func([]byte, []byte), error]((*sourceFn)(nil)) + register.Emitter2[[]byte, []byte]() } // Source creates a synthetic source transform that emits randomly diff --git a/sdks/go/pkg/beam/io/synthetic/step.go b/sdks/go/pkg/beam/io/synthetic/step.go index 3691fec9b4fe..d800f5a054d3 100644 --- a/sdks/go/pkg/beam/io/synthetic/step.go +++ b/sdks/go/pkg/beam/io/synthetic/step.go @@ -18,18 +18,19 @@ package synthetic import ( "fmt" "math/rand" - "reflect" "time" "github.com/apache/beam/sdks/v2/go/pkg/beam/core/sdf" + "github.com/apache/beam/sdks/v2/go/pkg/beam/register" "github.com/apache/beam/sdks/v2/go/pkg/beam" "github.com/apache/beam/sdks/v2/go/pkg/beam/io/rtrackers/offsetrange" ) func init() { - beam.RegisterType(reflect.TypeOf((*stepFn)(nil)).Elem()) - beam.RegisterType(reflect.TypeOf((*sdfStepFn)(nil)).Elem()) + register.DoFn3x0[[]byte, []byte, func([]byte, []byte)]((*stepFn)(nil)) + register.DoFn4x0[*sdf.LockRTracker, []byte, []byte, func([]byte, []byte)]((*sdfStepFn)(nil)) + register.Emitter2[[]byte, []byte]() } // Step creates a synthetic step transform that receives KV<[]byte, []byte> diff --git a/sdks/go/pkg/beam/io/xlang/jdbcio/jdbc.go b/sdks/go/pkg/beam/io/xlang/jdbcio/jdbc.go index 6938c51c2018..a027e00c5db0 100644 --- a/sdks/go/pkg/beam/io/xlang/jdbcio/jdbc.go +++ b/sdks/go/pkg/beam/io/xlang/jdbcio/jdbc.go @@ -69,8 +69,6 @@ var defaultClasspaths = map[string][]string{ "com.mysql.jdbc.Driver": []string{"mysql:mysql-connector-java:8.0.28"}, } -var autoStartupAddress string = xlangx.UseAutomatedJavaExpansionService(serviceGradleTarget) - // jdbcConfigSchema is the config schema as per the expected corss language payload // for JDBC IO read and write transform. type jdbcConfigSchema struct { diff --git a/sdks/go/pkg/beam/register/register.go b/sdks/go/pkg/beam/register/register.go index caeb687bcea0..6cf4cfa9efc6 100644 --- a/sdks/go/pkg/beam/register/register.go +++ b/sdks/go/pkg/beam/register/register.go @@ -2250,12 +2250,23 @@ func registerDoFn0x0StructWrappersAndFuncs(doFn genericDoFn0x0) { reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn0x0 registers your DoFn to optimize execution at runtime. +// DoFn0x0 registers your structural DoFn to optimize execution at runtime. func DoFn0x0(doFn genericDoFn0x0) { registerDoFnTypes(doFn) registerDoFn0x0StructWrappersAndFuncs(doFn) } +// Function0x0 registers your functional DoFn to optimize execution at runtime. +func Function0x0(doFn func()) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func()) + return &caller0x0{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func())(nil)).Elem(), caller) +} + type genericDoFn1x0[I0 any] interface { ProcessElement(i0 I0) } @@ -2277,8 +2288,8 @@ func (c *caller1x0[I0]) Call(args []interface{}) []interface{} { return []interface{}{} } -func (c *caller1x0[I0]) Call1x0(arg0 I0) { - c.fn(arg0) +func (c *caller1x0[I0]) Call1x0(arg0 interface{}) { + c.fn(arg0.(I0)) } func registerDoFn1x0StructWrappersAndFuncs[I0 any](doFn genericDoFn1x0[I0]) { @@ -2320,13 +2331,25 @@ func registerDoFn1x0StructWrappersAndFuncs[I0 any](doFn genericDoFn1x0[I0]) { reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn1x0[I0 any] registers your DoFn to optimize execution at runtime. +// DoFn1x0[I0 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn1x0[I0 any](doFn genericDoFn1x0[I0]) { registerDoFnTypes(doFn) registerDoFn1x0StructWrappersAndFuncs[I0](doFn) } +// Function1x0[I0 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function1x0[I0 any](doFn func(I0)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0)) + return &caller1x0[I0]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0))(nil)).Elem(), caller) +} + type genericDoFn2x0[I0, I1 any] interface { ProcessElement(i0 I0, i1 I1) } @@ -2348,8 +2371,8 @@ func (c *caller2x0[I0, I1]) Call(args []interface{}) []interface{} { return []interface{}{} } -func (c *caller2x0[I0, I1]) Call2x0(arg0 I0, arg1 I1) { - c.fn(arg0, arg1) +func (c *caller2x0[I0, I1]) Call2x0(arg0 interface{}, arg1 interface{}) { + c.fn(arg0.(I0), arg1.(I1)) } func registerDoFn2x0StructWrappersAndFuncs[I0, I1 any](doFn genericDoFn2x0[I0, I1]) { @@ -2391,13 +2414,25 @@ func registerDoFn2x0StructWrappersAndFuncs[I0, I1 any](doFn genericDoFn2x0[I0, I reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn2x0[I0, I1 any] registers your DoFn to optimize execution at runtime. +// DoFn2x0[I0, I1 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn2x0[I0, I1 any](doFn genericDoFn2x0[I0, I1]) { registerDoFnTypes(doFn) registerDoFn2x0StructWrappersAndFuncs[I0, I1](doFn) } +// Function2x0[I0, I1 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function2x0[I0, I1 any](doFn func(I0, I1)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1)) + return &caller2x0[I0, I1]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1))(nil)).Elem(), caller) +} + type genericDoFn3x0[I0, I1, I2 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2) } @@ -2419,8 +2454,8 @@ func (c *caller3x0[I0, I1, I2]) Call(args []interface{}) []interface{} { return []interface{}{} } -func (c *caller3x0[I0, I1, I2]) Call3x0(arg0 I0, arg1 I1, arg2 I2) { - c.fn(arg0, arg1, arg2) +func (c *caller3x0[I0, I1, I2]) Call3x0(arg0 interface{}, arg1 interface{}, arg2 interface{}) { + c.fn(arg0.(I0), arg1.(I1), arg2.(I2)) } func registerDoFn3x0StructWrappersAndFuncs[I0, I1, I2 any](doFn genericDoFn3x0[I0, I1, I2]) { @@ -2462,13 +2497,25 @@ func registerDoFn3x0StructWrappersAndFuncs[I0, I1, I2 any](doFn genericDoFn3x0[I reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn3x0[I0, I1, I2 any] registers your DoFn to optimize execution at runtime. +// DoFn3x0[I0, I1, I2 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn3x0[I0, I1, I2 any](doFn genericDoFn3x0[I0, I1, I2]) { registerDoFnTypes(doFn) registerDoFn3x0StructWrappersAndFuncs[I0, I1, I2](doFn) } +// Function3x0[I0, I1, I2 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function3x0[I0, I1, I2 any](doFn func(I0, I1, I2)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2)) + return &caller3x0[I0, I1, I2]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2))(nil)).Elem(), caller) +} + type genericDoFn4x0[I0, I1, I2, I3 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3) } @@ -2490,8 +2537,8 @@ func (c *caller4x0[I0, I1, I2, I3]) Call(args []interface{}) []interface{} { return []interface{}{} } -func (c *caller4x0[I0, I1, I2, I3]) Call4x0(arg0 I0, arg1 I1, arg2 I2, arg3 I3) { - c.fn(arg0, arg1, arg2, arg3) +func (c *caller4x0[I0, I1, I2, I3]) Call4x0(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}) { + c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3)) } func registerDoFn4x0StructWrappersAndFuncs[I0, I1, I2, I3 any](doFn genericDoFn4x0[I0, I1, I2, I3]) { @@ -2533,13 +2580,25 @@ func registerDoFn4x0StructWrappersAndFuncs[I0, I1, I2, I3 any](doFn genericDoFn4 reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn4x0[I0, I1, I2, I3 any] registers your DoFn to optimize execution at runtime. +// DoFn4x0[I0, I1, I2, I3 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn4x0[I0, I1, I2, I3 any](doFn genericDoFn4x0[I0, I1, I2, I3]) { registerDoFnTypes(doFn) registerDoFn4x0StructWrappersAndFuncs[I0, I1, I2, I3](doFn) } +// Function4x0[I0, I1, I2, I3 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function4x0[I0, I1, I2, I3 any](doFn func(I0, I1, I2, I3)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3)) + return &caller4x0[I0, I1, I2, I3]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3))(nil)).Elem(), caller) +} + type genericDoFn5x0[I0, I1, I2, I3, I4 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4) } @@ -2561,8 +2620,8 @@ func (c *caller5x0[I0, I1, I2, I3, I4]) Call(args []interface{}) []interface{} { return []interface{}{} } -func (c *caller5x0[I0, I1, I2, I3, I4]) Call5x0(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4) { - c.fn(arg0, arg1, arg2, arg3, arg4) +func (c *caller5x0[I0, I1, I2, I3, I4]) Call5x0(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}) { + c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4)) } func registerDoFn5x0StructWrappersAndFuncs[I0, I1, I2, I3, I4 any](doFn genericDoFn5x0[I0, I1, I2, I3, I4]) { @@ -2604,13 +2663,25 @@ func registerDoFn5x0StructWrappersAndFuncs[I0, I1, I2, I3, I4 any](doFn genericD reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn5x0[I0, I1, I2, I3, I4 any] registers your DoFn to optimize execution at runtime. +// DoFn5x0[I0, I1, I2, I3, I4 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn5x0[I0, I1, I2, I3, I4 any](doFn genericDoFn5x0[I0, I1, I2, I3, I4]) { registerDoFnTypes(doFn) registerDoFn5x0StructWrappersAndFuncs[I0, I1, I2, I3, I4](doFn) } +// Function5x0[I0, I1, I2, I3, I4 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function5x0[I0, I1, I2, I3, I4 any](doFn func(I0, I1, I2, I3, I4)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4)) + return &caller5x0[I0, I1, I2, I3, I4]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4))(nil)).Elem(), caller) +} + type genericDoFn6x0[I0, I1, I2, I3, I4, I5 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5) } @@ -2632,8 +2703,8 @@ func (c *caller6x0[I0, I1, I2, I3, I4, I5]) Call(args []interface{}) []interface return []interface{}{} } -func (c *caller6x0[I0, I1, I2, I3, I4, I5]) Call6x0(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4, arg5 I5) { - c.fn(arg0, arg1, arg2, arg3, arg4, arg5) +func (c *caller6x0[I0, I1, I2, I3, I4, I5]) Call6x0(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}, arg5 interface{}) { + c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4), arg5.(I5)) } func registerDoFn6x0StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5 any](doFn genericDoFn6x0[I0, I1, I2, I3, I4, I5]) { @@ -2675,13 +2746,25 @@ func registerDoFn6x0StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5 any](doFn gene reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn6x0[I0, I1, I2, I3, I4, I5 any] registers your DoFn to optimize execution at runtime. +// DoFn6x0[I0, I1, I2, I3, I4, I5 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn6x0[I0, I1, I2, I3, I4, I5 any](doFn genericDoFn6x0[I0, I1, I2, I3, I4, I5]) { registerDoFnTypes(doFn) registerDoFn6x0StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5](doFn) } +// Function6x0[I0, I1, I2, I3, I4, I5 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function6x0[I0, I1, I2, I3, I4, I5 any](doFn func(I0, I1, I2, I3, I4, I5)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4, I5)) + return &caller6x0[I0, I1, I2, I3, I4, I5]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4, I5))(nil)).Elem(), caller) +} + type genericDoFn7x0[I0, I1, I2, I3, I4, I5, I6 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6) } @@ -2703,8 +2786,8 @@ func (c *caller7x0[I0, I1, I2, I3, I4, I5, I6]) Call(args []interface{}) []inter return []interface{}{} } -func (c *caller7x0[I0, I1, I2, I3, I4, I5, I6]) Call7x0(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4, arg5 I5, arg6 I6) { - c.fn(arg0, arg1, arg2, arg3, arg4, arg5, arg6) +func (c *caller7x0[I0, I1, I2, I3, I4, I5, I6]) Call7x0(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}, arg5 interface{}, arg6 interface{}) { + c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4), arg5.(I5), arg6.(I6)) } func registerDoFn7x0StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6 any](doFn genericDoFn7x0[I0, I1, I2, I3, I4, I5, I6]) { @@ -2746,13 +2829,25 @@ func registerDoFn7x0StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6 any](doFn reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn7x0[I0, I1, I2, I3, I4, I5, I6 any] registers your DoFn to optimize execution at runtime. +// DoFn7x0[I0, I1, I2, I3, I4, I5, I6 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn7x0[I0, I1, I2, I3, I4, I5, I6 any](doFn genericDoFn7x0[I0, I1, I2, I3, I4, I5, I6]) { registerDoFnTypes(doFn) registerDoFn7x0StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6](doFn) } +// Function7x0[I0, I1, I2, I3, I4, I5, I6 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function7x0[I0, I1, I2, I3, I4, I5, I6 any](doFn func(I0, I1, I2, I3, I4, I5, I6)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4, I5, I6)) + return &caller7x0[I0, I1, I2, I3, I4, I5, I6]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4, I5, I6))(nil)).Elem(), caller) +} + type genericDoFn8x0[I0, I1, I2, I3, I4, I5, I6, I7 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6, i7 I7) } @@ -2774,8 +2869,8 @@ func (c *caller8x0[I0, I1, I2, I3, I4, I5, I6, I7]) Call(args []interface{}) []i return []interface{}{} } -func (c *caller8x0[I0, I1, I2, I3, I4, I5, I6, I7]) Call8x0(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4, arg5 I5, arg6 I6, arg7 I7) { - c.fn(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7) +func (c *caller8x0[I0, I1, I2, I3, I4, I5, I6, I7]) Call8x0(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}, arg5 interface{}, arg6 interface{}, arg7 interface{}) { + c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4), arg5.(I5), arg6.(I6), arg7.(I7)) } func registerDoFn8x0StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7 any](doFn genericDoFn8x0[I0, I1, I2, I3, I4, I5, I6, I7]) { @@ -2817,13 +2912,25 @@ func registerDoFn8x0StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7 any](d reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn8x0[I0, I1, I2, I3, I4, I5, I6, I7 any] registers your DoFn to optimize execution at runtime. +// DoFn8x0[I0, I1, I2, I3, I4, I5, I6, I7 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn8x0[I0, I1, I2, I3, I4, I5, I6, I7 any](doFn genericDoFn8x0[I0, I1, I2, I3, I4, I5, I6, I7]) { registerDoFnTypes(doFn) registerDoFn8x0StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7](doFn) } +// Function8x0[I0, I1, I2, I3, I4, I5, I6, I7 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function8x0[I0, I1, I2, I3, I4, I5, I6, I7 any](doFn func(I0, I1, I2, I3, I4, I5, I6, I7)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4, I5, I6, I7)) + return &caller8x0[I0, I1, I2, I3, I4, I5, I6, I7]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4, I5, I6, I7))(nil)).Elem(), caller) +} + type genericDoFn9x0[I0, I1, I2, I3, I4, I5, I6, I7, I8 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6, i7 I7, i8 I8) } @@ -2845,8 +2952,8 @@ func (c *caller9x0[I0, I1, I2, I3, I4, I5, I6, I7, I8]) Call(args []interface{}) return []interface{}{} } -func (c *caller9x0[I0, I1, I2, I3, I4, I5, I6, I7, I8]) Call9x0(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4, arg5 I5, arg6 I6, arg7 I7, arg8 I8) { - c.fn(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8) +func (c *caller9x0[I0, I1, I2, I3, I4, I5, I6, I7, I8]) Call9x0(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}, arg5 interface{}, arg6 interface{}, arg7 interface{}, arg8 interface{}) { + c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4), arg5.(I5), arg6.(I6), arg7.(I7), arg8.(I8)) } func registerDoFn9x0StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8 any](doFn genericDoFn9x0[I0, I1, I2, I3, I4, I5, I6, I7, I8]) { @@ -2888,13 +2995,25 @@ func registerDoFn9x0StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8 an reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn9x0[I0, I1, I2, I3, I4, I5, I6, I7, I8 any] registers your DoFn to optimize execution at runtime. +// DoFn9x0[I0, I1, I2, I3, I4, I5, I6, I7, I8 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn9x0[I0, I1, I2, I3, I4, I5, I6, I7, I8 any](doFn genericDoFn9x0[I0, I1, I2, I3, I4, I5, I6, I7, I8]) { registerDoFnTypes(doFn) registerDoFn9x0StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8](doFn) } +// Function9x0[I0, I1, I2, I3, I4, I5, I6, I7, I8 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function9x0[I0, I1, I2, I3, I4, I5, I6, I7, I8 any](doFn func(I0, I1, I2, I3, I4, I5, I6, I7, I8)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4, I5, I6, I7, I8)) + return &caller9x0[I0, I1, I2, I3, I4, I5, I6, I7, I8]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4, I5, I6, I7, I8))(nil)).Elem(), caller) +} + type genericDoFn10x0[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6, i7 I7, i8 I8, i9 I9) } @@ -2916,8 +3035,8 @@ func (c *caller10x0[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9]) Call(args []interfa return []interface{}{} } -func (c *caller10x0[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9]) Call10x0(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4, arg5 I5, arg6 I6, arg7 I7, arg8 I8, arg9 I9) { - c.fn(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9) +func (c *caller10x0[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9]) Call10x0(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}, arg5 interface{}, arg6 interface{}, arg7 interface{}, arg8 interface{}, arg9 interface{}) { + c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4), arg5.(I5), arg6.(I6), arg7.(I7), arg8.(I8), arg9.(I9)) } func registerDoFn10x0StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9 any](doFn genericDoFn10x0[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9]) { @@ -2959,13 +3078,25 @@ func registerDoFn10x0StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn10x0[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9 any] registers your DoFn to optimize execution at runtime. +// DoFn10x0[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn10x0[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9 any](doFn genericDoFn10x0[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9]) { registerDoFnTypes(doFn) registerDoFn10x0StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9](doFn) } +// Function10x0[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function10x0[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9 any](doFn func(I0, I1, I2, I3, I4, I5, I6, I7, I8, I9)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4, I5, I6, I7, I8, I9)) + return &caller10x0[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4, I5, I6, I7, I8, I9))(nil)).Elem(), caller) +} + type genericDoFn0x1[R0 any] interface { ProcessElement() R0 } @@ -3030,13 +3161,25 @@ func registerDoFn0x1StructWrappersAndFuncs[R0 any](doFn genericDoFn0x1[R0]) { reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn0x1[R0 any] registers your DoFn to optimize execution at runtime. +// DoFn0x1[R0 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn0x1[R0 any](doFn genericDoFn0x1[R0]) { registerDoFnTypes(doFn) registerDoFn0x1StructWrappersAndFuncs[R0](doFn) } +// Function0x1[R0 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function0x1[R0 any](doFn func() R0) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func() R0) + return &caller0x1[R0]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func() R0)(nil)).Elem(), caller) +} + type genericDoFn1x1[I0, R0 any] interface { ProcessElement(i0 I0) R0 } @@ -3058,8 +3201,8 @@ func (c *caller1x1[I0, R0]) Call(args []interface{}) []interface{} { return []interface{}{out0} } -func (c *caller1x1[I0, R0]) Call1x1(arg0 I0) interface{} { - return c.fn(arg0) +func (c *caller1x1[I0, R0]) Call1x1(arg0 interface{}) interface{} { + return c.fn(arg0.(I0)) } func registerDoFn1x1StructWrappersAndFuncs[I0, R0 any](doFn genericDoFn1x1[I0, R0]) { @@ -3101,13 +3244,25 @@ func registerDoFn1x1StructWrappersAndFuncs[I0, R0 any](doFn genericDoFn1x1[I0, R reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn1x1[I0, R0 any] registers your DoFn to optimize execution at runtime. +// DoFn1x1[I0, R0 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn1x1[I0, R0 any](doFn genericDoFn1x1[I0, R0]) { registerDoFnTypes(doFn) registerDoFn1x1StructWrappersAndFuncs[I0, R0](doFn) } +// Function1x1[I0, R0 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function1x1[I0, R0 any](doFn func(I0) R0) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0) R0) + return &caller1x1[I0, R0]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0) R0)(nil)).Elem(), caller) +} + type genericDoFn2x1[I0, I1, R0 any] interface { ProcessElement(i0 I0, i1 I1) R0 } @@ -3129,8 +3284,8 @@ func (c *caller2x1[I0, I1, R0]) Call(args []interface{}) []interface{} { return []interface{}{out0} } -func (c *caller2x1[I0, I1, R0]) Call2x1(arg0 I0, arg1 I1) interface{} { - return c.fn(arg0, arg1) +func (c *caller2x1[I0, I1, R0]) Call2x1(arg0 interface{}, arg1 interface{}) interface{} { + return c.fn(arg0.(I0), arg1.(I1)) } func registerDoFn2x1StructWrappersAndFuncs[I0, I1, R0 any](doFn genericDoFn2x1[I0, I1, R0]) { @@ -3172,13 +3327,25 @@ func registerDoFn2x1StructWrappersAndFuncs[I0, I1, R0 any](doFn genericDoFn2x1[I reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn2x1[I0, I1, R0 any] registers your DoFn to optimize execution at runtime. +// DoFn2x1[I0, I1, R0 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn2x1[I0, I1, R0 any](doFn genericDoFn2x1[I0, I1, R0]) { registerDoFnTypes(doFn) registerDoFn2x1StructWrappersAndFuncs[I0, I1, R0](doFn) } +// Function2x1[I0, I1, R0 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function2x1[I0, I1, R0 any](doFn func(I0, I1) R0) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1) R0) + return &caller2x1[I0, I1, R0]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1) R0)(nil)).Elem(), caller) +} + type genericDoFn3x1[I0, I1, I2, R0 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2) R0 } @@ -3200,8 +3367,8 @@ func (c *caller3x1[I0, I1, I2, R0]) Call(args []interface{}) []interface{} { return []interface{}{out0} } -func (c *caller3x1[I0, I1, I2, R0]) Call3x1(arg0 I0, arg1 I1, arg2 I2) interface{} { - return c.fn(arg0, arg1, arg2) +func (c *caller3x1[I0, I1, I2, R0]) Call3x1(arg0 interface{}, arg1 interface{}, arg2 interface{}) interface{} { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2)) } func registerDoFn3x1StructWrappersAndFuncs[I0, I1, I2, R0 any](doFn genericDoFn3x1[I0, I1, I2, R0]) { @@ -3243,13 +3410,25 @@ func registerDoFn3x1StructWrappersAndFuncs[I0, I1, I2, R0 any](doFn genericDoFn3 reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn3x1[I0, I1, I2, R0 any] registers your DoFn to optimize execution at runtime. +// DoFn3x1[I0, I1, I2, R0 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn3x1[I0, I1, I2, R0 any](doFn genericDoFn3x1[I0, I1, I2, R0]) { registerDoFnTypes(doFn) registerDoFn3x1StructWrappersAndFuncs[I0, I1, I2, R0](doFn) } +// Function3x1[I0, I1, I2, R0 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function3x1[I0, I1, I2, R0 any](doFn func(I0, I1, I2) R0) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2) R0) + return &caller3x1[I0, I1, I2, R0]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2) R0)(nil)).Elem(), caller) +} + type genericDoFn4x1[I0, I1, I2, I3, R0 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3) R0 } @@ -3271,8 +3450,8 @@ func (c *caller4x1[I0, I1, I2, I3, R0]) Call(args []interface{}) []interface{} { return []interface{}{out0} } -func (c *caller4x1[I0, I1, I2, I3, R0]) Call4x1(arg0 I0, arg1 I1, arg2 I2, arg3 I3) interface{} { - return c.fn(arg0, arg1, arg2, arg3) +func (c *caller4x1[I0, I1, I2, I3, R0]) Call4x1(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}) interface{} { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3)) } func registerDoFn4x1StructWrappersAndFuncs[I0, I1, I2, I3, R0 any](doFn genericDoFn4x1[I0, I1, I2, I3, R0]) { @@ -3314,13 +3493,25 @@ func registerDoFn4x1StructWrappersAndFuncs[I0, I1, I2, I3, R0 any](doFn genericD reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn4x1[I0, I1, I2, I3, R0 any] registers your DoFn to optimize execution at runtime. +// DoFn4x1[I0, I1, I2, I3, R0 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn4x1[I0, I1, I2, I3, R0 any](doFn genericDoFn4x1[I0, I1, I2, I3, R0]) { registerDoFnTypes(doFn) registerDoFn4x1StructWrappersAndFuncs[I0, I1, I2, I3, R0](doFn) } +// Function4x1[I0, I1, I2, I3, R0 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function4x1[I0, I1, I2, I3, R0 any](doFn func(I0, I1, I2, I3) R0) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3) R0) + return &caller4x1[I0, I1, I2, I3, R0]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3) R0)(nil)).Elem(), caller) +} + type genericDoFn5x1[I0, I1, I2, I3, I4, R0 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4) R0 } @@ -3342,8 +3533,8 @@ func (c *caller5x1[I0, I1, I2, I3, I4, R0]) Call(args []interface{}) []interface return []interface{}{out0} } -func (c *caller5x1[I0, I1, I2, I3, I4, R0]) Call5x1(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4) interface{} { - return c.fn(arg0, arg1, arg2, arg3, arg4) +func (c *caller5x1[I0, I1, I2, I3, I4, R0]) Call5x1(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}) interface{} { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4)) } func registerDoFn5x1StructWrappersAndFuncs[I0, I1, I2, I3, I4, R0 any](doFn genericDoFn5x1[I0, I1, I2, I3, I4, R0]) { @@ -3385,13 +3576,25 @@ func registerDoFn5x1StructWrappersAndFuncs[I0, I1, I2, I3, I4, R0 any](doFn gene reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn5x1[I0, I1, I2, I3, I4, R0 any] registers your DoFn to optimize execution at runtime. +// DoFn5x1[I0, I1, I2, I3, I4, R0 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn5x1[I0, I1, I2, I3, I4, R0 any](doFn genericDoFn5x1[I0, I1, I2, I3, I4, R0]) { registerDoFnTypes(doFn) registerDoFn5x1StructWrappersAndFuncs[I0, I1, I2, I3, I4, R0](doFn) } +// Function5x1[I0, I1, I2, I3, I4, R0 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function5x1[I0, I1, I2, I3, I4, R0 any](doFn func(I0, I1, I2, I3, I4) R0) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4) R0) + return &caller5x1[I0, I1, I2, I3, I4, R0]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4) R0)(nil)).Elem(), caller) +} + type genericDoFn6x1[I0, I1, I2, I3, I4, I5, R0 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5) R0 } @@ -3413,8 +3616,8 @@ func (c *caller6x1[I0, I1, I2, I3, I4, I5, R0]) Call(args []interface{}) []inter return []interface{}{out0} } -func (c *caller6x1[I0, I1, I2, I3, I4, I5, R0]) Call6x1(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4, arg5 I5) interface{} { - return c.fn(arg0, arg1, arg2, arg3, arg4, arg5) +func (c *caller6x1[I0, I1, I2, I3, I4, I5, R0]) Call6x1(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}, arg5 interface{}) interface{} { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4), arg5.(I5)) } func registerDoFn6x1StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, R0 any](doFn genericDoFn6x1[I0, I1, I2, I3, I4, I5, R0]) { @@ -3456,13 +3659,25 @@ func registerDoFn6x1StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, R0 any](doFn reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn6x1[I0, I1, I2, I3, I4, I5, R0 any] registers your DoFn to optimize execution at runtime. +// DoFn6x1[I0, I1, I2, I3, I4, I5, R0 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn6x1[I0, I1, I2, I3, I4, I5, R0 any](doFn genericDoFn6x1[I0, I1, I2, I3, I4, I5, R0]) { registerDoFnTypes(doFn) registerDoFn6x1StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, R0](doFn) } +// Function6x1[I0, I1, I2, I3, I4, I5, R0 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function6x1[I0, I1, I2, I3, I4, I5, R0 any](doFn func(I0, I1, I2, I3, I4, I5) R0) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4, I5) R0) + return &caller6x1[I0, I1, I2, I3, I4, I5, R0]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4, I5) R0)(nil)).Elem(), caller) +} + type genericDoFn7x1[I0, I1, I2, I3, I4, I5, I6, R0 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6) R0 } @@ -3484,8 +3699,8 @@ func (c *caller7x1[I0, I1, I2, I3, I4, I5, I6, R0]) Call(args []interface{}) []i return []interface{}{out0} } -func (c *caller7x1[I0, I1, I2, I3, I4, I5, I6, R0]) Call7x1(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4, arg5 I5, arg6 I6) interface{} { - return c.fn(arg0, arg1, arg2, arg3, arg4, arg5, arg6) +func (c *caller7x1[I0, I1, I2, I3, I4, I5, I6, R0]) Call7x1(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}, arg5 interface{}, arg6 interface{}) interface{} { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4), arg5.(I5), arg6.(I6)) } func registerDoFn7x1StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, R0 any](doFn genericDoFn7x1[I0, I1, I2, I3, I4, I5, I6, R0]) { @@ -3527,13 +3742,25 @@ func registerDoFn7x1StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, R0 any](d reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn7x1[I0, I1, I2, I3, I4, I5, I6, R0 any] registers your DoFn to optimize execution at runtime. +// DoFn7x1[I0, I1, I2, I3, I4, I5, I6, R0 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn7x1[I0, I1, I2, I3, I4, I5, I6, R0 any](doFn genericDoFn7x1[I0, I1, I2, I3, I4, I5, I6, R0]) { registerDoFnTypes(doFn) registerDoFn7x1StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, R0](doFn) } +// Function7x1[I0, I1, I2, I3, I4, I5, I6, R0 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function7x1[I0, I1, I2, I3, I4, I5, I6, R0 any](doFn func(I0, I1, I2, I3, I4, I5, I6) R0) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4, I5, I6) R0) + return &caller7x1[I0, I1, I2, I3, I4, I5, I6, R0]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4, I5, I6) R0)(nil)).Elem(), caller) +} + type genericDoFn8x1[I0, I1, I2, I3, I4, I5, I6, I7, R0 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6, i7 I7) R0 } @@ -3555,8 +3782,8 @@ func (c *caller8x1[I0, I1, I2, I3, I4, I5, I6, I7, R0]) Call(args []interface{}) return []interface{}{out0} } -func (c *caller8x1[I0, I1, I2, I3, I4, I5, I6, I7, R0]) Call8x1(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4, arg5 I5, arg6 I6, arg7 I7) interface{} { - return c.fn(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7) +func (c *caller8x1[I0, I1, I2, I3, I4, I5, I6, I7, R0]) Call8x1(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}, arg5 interface{}, arg6 interface{}, arg7 interface{}) interface{} { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4), arg5.(I5), arg6.(I6), arg7.(I7)) } func registerDoFn8x1StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, R0 any](doFn genericDoFn8x1[I0, I1, I2, I3, I4, I5, I6, I7, R0]) { @@ -3598,13 +3825,25 @@ func registerDoFn8x1StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, R0 an reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn8x1[I0, I1, I2, I3, I4, I5, I6, I7, R0 any] registers your DoFn to optimize execution at runtime. +// DoFn8x1[I0, I1, I2, I3, I4, I5, I6, I7, R0 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn8x1[I0, I1, I2, I3, I4, I5, I6, I7, R0 any](doFn genericDoFn8x1[I0, I1, I2, I3, I4, I5, I6, I7, R0]) { registerDoFnTypes(doFn) registerDoFn8x1StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, R0](doFn) } +// Function8x1[I0, I1, I2, I3, I4, I5, I6, I7, R0 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function8x1[I0, I1, I2, I3, I4, I5, I6, I7, R0 any](doFn func(I0, I1, I2, I3, I4, I5, I6, I7) R0) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4, I5, I6, I7) R0) + return &caller8x1[I0, I1, I2, I3, I4, I5, I6, I7, R0]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4, I5, I6, I7) R0)(nil)).Elem(), caller) +} + type genericDoFn9x1[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6, i7 I7, i8 I8) R0 } @@ -3626,8 +3865,8 @@ func (c *caller9x1[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0]) Call(args []interfac return []interface{}{out0} } -func (c *caller9x1[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0]) Call9x1(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4, arg5 I5, arg6 I6, arg7 I7, arg8 I8) interface{} { - return c.fn(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8) +func (c *caller9x1[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0]) Call9x1(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}, arg5 interface{}, arg6 interface{}, arg7 interface{}, arg8 interface{}) interface{} { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4), arg5.(I5), arg6.(I6), arg7.(I7), arg8.(I8)) } func registerDoFn9x1StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0 any](doFn genericDoFn9x1[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0]) { @@ -3669,13 +3908,25 @@ func registerDoFn9x1StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, R reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn9x1[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0 any] registers your DoFn to optimize execution at runtime. +// DoFn9x1[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn9x1[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0 any](doFn genericDoFn9x1[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0]) { registerDoFnTypes(doFn) registerDoFn9x1StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0](doFn) } +// Function9x1[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function9x1[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0 any](doFn func(I0, I1, I2, I3, I4, I5, I6, I7, I8) R0) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4, I5, I6, I7, I8) R0) + return &caller9x1[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4, I5, I6, I7, I8) R0)(nil)).Elem(), caller) +} + type genericDoFn10x1[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6, i7 I7, i8 I8, i9 I9) R0 } @@ -3697,8 +3948,8 @@ func (c *caller10x1[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0]) Call(args []int return []interface{}{out0} } -func (c *caller10x1[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0]) Call10x1(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4, arg5 I5, arg6 I6, arg7 I7, arg8 I8, arg9 I9) interface{} { - return c.fn(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9) +func (c *caller10x1[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0]) Call10x1(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}, arg5 interface{}, arg6 interface{}, arg7 interface{}, arg8 interface{}, arg9 interface{}) interface{} { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4), arg5.(I5), arg6.(I6), arg7.(I7), arg8.(I8), arg9.(I9)) } func registerDoFn10x1StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0 any](doFn genericDoFn10x1[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0]) { @@ -3740,13 +3991,25 @@ func registerDoFn10x1StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn10x1[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0 any] registers your DoFn to optimize execution at runtime. +// DoFn10x1[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn10x1[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0 any](doFn genericDoFn10x1[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0]) { registerDoFnTypes(doFn) registerDoFn10x1StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0](doFn) } +// Function10x1[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function10x1[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0 any](doFn func(I0, I1, I2, I3, I4, I5, I6, I7, I8, I9) R0) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4, I5, I6, I7, I8, I9) R0) + return &caller10x1[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4, I5, I6, I7, I8, I9) R0)(nil)).Elem(), caller) +} + type genericDoFn0x2[R0, R1 any] interface { ProcessElement() (R0, R1) } @@ -3811,13 +4074,25 @@ func registerDoFn0x2StructWrappersAndFuncs[R0, R1 any](doFn genericDoFn0x2[R0, R reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn0x2[R0, R1 any] registers your DoFn to optimize execution at runtime. +// DoFn0x2[R0, R1 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn0x2[R0, R1 any](doFn genericDoFn0x2[R0, R1]) { registerDoFnTypes(doFn) registerDoFn0x2StructWrappersAndFuncs[R0, R1](doFn) } +// Function0x2[R0, R1 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function0x2[R0, R1 any](doFn func() (R0, R1)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func() (R0, R1)) + return &caller0x2[R0, R1]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func() (R0, R1))(nil)).Elem(), caller) +} + type genericDoFn1x2[I0, R0, R1 any] interface { ProcessElement(i0 I0) (R0, R1) } @@ -3839,8 +4114,8 @@ func (c *caller1x2[I0, R0, R1]) Call(args []interface{}) []interface{} { return []interface{}{out0, out1} } -func (c *caller1x2[I0, R0, R1]) Call1x2(arg0 I0) (interface{}, interface{}) { - return c.fn(arg0) +func (c *caller1x2[I0, R0, R1]) Call1x2(arg0 interface{}) (interface{}, interface{}) { + return c.fn(arg0.(I0)) } func registerDoFn1x2StructWrappersAndFuncs[I0, R0, R1 any](doFn genericDoFn1x2[I0, R0, R1]) { @@ -3882,13 +4157,25 @@ func registerDoFn1x2StructWrappersAndFuncs[I0, R0, R1 any](doFn genericDoFn1x2[I reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn1x2[I0, R0, R1 any] registers your DoFn to optimize execution at runtime. +// DoFn1x2[I0, R0, R1 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn1x2[I0, R0, R1 any](doFn genericDoFn1x2[I0, R0, R1]) { registerDoFnTypes(doFn) registerDoFn1x2StructWrappersAndFuncs[I0, R0, R1](doFn) } +// Function1x2[I0, R0, R1 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function1x2[I0, R0, R1 any](doFn func(I0) (R0, R1)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0) (R0, R1)) + return &caller1x2[I0, R0, R1]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0) (R0, R1))(nil)).Elem(), caller) +} + type genericDoFn2x2[I0, I1, R0, R1 any] interface { ProcessElement(i0 I0, i1 I1) (R0, R1) } @@ -3910,8 +4197,8 @@ func (c *caller2x2[I0, I1, R0, R1]) Call(args []interface{}) []interface{} { return []interface{}{out0, out1} } -func (c *caller2x2[I0, I1, R0, R1]) Call2x2(arg0 I0, arg1 I1) (interface{}, interface{}) { - return c.fn(arg0, arg1) +func (c *caller2x2[I0, I1, R0, R1]) Call2x2(arg0 interface{}, arg1 interface{}) (interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1)) } func registerDoFn2x2StructWrappersAndFuncs[I0, I1, R0, R1 any](doFn genericDoFn2x2[I0, I1, R0, R1]) { @@ -3953,13 +4240,25 @@ func registerDoFn2x2StructWrappersAndFuncs[I0, I1, R0, R1 any](doFn genericDoFn2 reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn2x2[I0, I1, R0, R1 any] registers your DoFn to optimize execution at runtime. +// DoFn2x2[I0, I1, R0, R1 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn2x2[I0, I1, R0, R1 any](doFn genericDoFn2x2[I0, I1, R0, R1]) { registerDoFnTypes(doFn) registerDoFn2x2StructWrappersAndFuncs[I0, I1, R0, R1](doFn) } +// Function2x2[I0, I1, R0, R1 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function2x2[I0, I1, R0, R1 any](doFn func(I0, I1) (R0, R1)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1) (R0, R1)) + return &caller2x2[I0, I1, R0, R1]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1) (R0, R1))(nil)).Elem(), caller) +} + type genericDoFn3x2[I0, I1, I2, R0, R1 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2) (R0, R1) } @@ -3981,8 +4280,8 @@ func (c *caller3x2[I0, I1, I2, R0, R1]) Call(args []interface{}) []interface{} { return []interface{}{out0, out1} } -func (c *caller3x2[I0, I1, I2, R0, R1]) Call3x2(arg0 I0, arg1 I1, arg2 I2) (interface{}, interface{}) { - return c.fn(arg0, arg1, arg2) +func (c *caller3x2[I0, I1, I2, R0, R1]) Call3x2(arg0 interface{}, arg1 interface{}, arg2 interface{}) (interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2)) } func registerDoFn3x2StructWrappersAndFuncs[I0, I1, I2, R0, R1 any](doFn genericDoFn3x2[I0, I1, I2, R0, R1]) { @@ -4024,13 +4323,25 @@ func registerDoFn3x2StructWrappersAndFuncs[I0, I1, I2, R0, R1 any](doFn genericD reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn3x2[I0, I1, I2, R0, R1 any] registers your DoFn to optimize execution at runtime. +// DoFn3x2[I0, I1, I2, R0, R1 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn3x2[I0, I1, I2, R0, R1 any](doFn genericDoFn3x2[I0, I1, I2, R0, R1]) { registerDoFnTypes(doFn) registerDoFn3x2StructWrappersAndFuncs[I0, I1, I2, R0, R1](doFn) } +// Function3x2[I0, I1, I2, R0, R1 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function3x2[I0, I1, I2, R0, R1 any](doFn func(I0, I1, I2) (R0, R1)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2) (R0, R1)) + return &caller3x2[I0, I1, I2, R0, R1]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2) (R0, R1))(nil)).Elem(), caller) +} + type genericDoFn4x2[I0, I1, I2, I3, R0, R1 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3) (R0, R1) } @@ -4052,8 +4363,8 @@ func (c *caller4x2[I0, I1, I2, I3, R0, R1]) Call(args []interface{}) []interface return []interface{}{out0, out1} } -func (c *caller4x2[I0, I1, I2, I3, R0, R1]) Call4x2(arg0 I0, arg1 I1, arg2 I2, arg3 I3) (interface{}, interface{}) { - return c.fn(arg0, arg1, arg2, arg3) +func (c *caller4x2[I0, I1, I2, I3, R0, R1]) Call4x2(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}) (interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3)) } func registerDoFn4x2StructWrappersAndFuncs[I0, I1, I2, I3, R0, R1 any](doFn genericDoFn4x2[I0, I1, I2, I3, R0, R1]) { @@ -4095,13 +4406,25 @@ func registerDoFn4x2StructWrappersAndFuncs[I0, I1, I2, I3, R0, R1 any](doFn gene reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn4x2[I0, I1, I2, I3, R0, R1 any] registers your DoFn to optimize execution at runtime. +// DoFn4x2[I0, I1, I2, I3, R0, R1 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn4x2[I0, I1, I2, I3, R0, R1 any](doFn genericDoFn4x2[I0, I1, I2, I3, R0, R1]) { registerDoFnTypes(doFn) registerDoFn4x2StructWrappersAndFuncs[I0, I1, I2, I3, R0, R1](doFn) } +// Function4x2[I0, I1, I2, I3, R0, R1 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function4x2[I0, I1, I2, I3, R0, R1 any](doFn func(I0, I1, I2, I3) (R0, R1)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3) (R0, R1)) + return &caller4x2[I0, I1, I2, I3, R0, R1]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3) (R0, R1))(nil)).Elem(), caller) +} + type genericDoFn5x2[I0, I1, I2, I3, I4, R0, R1 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4) (R0, R1) } @@ -4123,8 +4446,8 @@ func (c *caller5x2[I0, I1, I2, I3, I4, R0, R1]) Call(args []interface{}) []inter return []interface{}{out0, out1} } -func (c *caller5x2[I0, I1, I2, I3, I4, R0, R1]) Call5x2(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4) (interface{}, interface{}) { - return c.fn(arg0, arg1, arg2, arg3, arg4) +func (c *caller5x2[I0, I1, I2, I3, I4, R0, R1]) Call5x2(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}) (interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4)) } func registerDoFn5x2StructWrappersAndFuncs[I0, I1, I2, I3, I4, R0, R1 any](doFn genericDoFn5x2[I0, I1, I2, I3, I4, R0, R1]) { @@ -4166,13 +4489,25 @@ func registerDoFn5x2StructWrappersAndFuncs[I0, I1, I2, I3, I4, R0, R1 any](doFn reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn5x2[I0, I1, I2, I3, I4, R0, R1 any] registers your DoFn to optimize execution at runtime. +// DoFn5x2[I0, I1, I2, I3, I4, R0, R1 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn5x2[I0, I1, I2, I3, I4, R0, R1 any](doFn genericDoFn5x2[I0, I1, I2, I3, I4, R0, R1]) { registerDoFnTypes(doFn) registerDoFn5x2StructWrappersAndFuncs[I0, I1, I2, I3, I4, R0, R1](doFn) } +// Function5x2[I0, I1, I2, I3, I4, R0, R1 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function5x2[I0, I1, I2, I3, I4, R0, R1 any](doFn func(I0, I1, I2, I3, I4) (R0, R1)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4) (R0, R1)) + return &caller5x2[I0, I1, I2, I3, I4, R0, R1]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4) (R0, R1))(nil)).Elem(), caller) +} + type genericDoFn6x2[I0, I1, I2, I3, I4, I5, R0, R1 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5) (R0, R1) } @@ -4194,8 +4529,8 @@ func (c *caller6x2[I0, I1, I2, I3, I4, I5, R0, R1]) Call(args []interface{}) []i return []interface{}{out0, out1} } -func (c *caller6x2[I0, I1, I2, I3, I4, I5, R0, R1]) Call6x2(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4, arg5 I5) (interface{}, interface{}) { - return c.fn(arg0, arg1, arg2, arg3, arg4, arg5) +func (c *caller6x2[I0, I1, I2, I3, I4, I5, R0, R1]) Call6x2(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}, arg5 interface{}) (interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4), arg5.(I5)) } func registerDoFn6x2StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, R0, R1 any](doFn genericDoFn6x2[I0, I1, I2, I3, I4, I5, R0, R1]) { @@ -4237,13 +4572,25 @@ func registerDoFn6x2StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, R0, R1 any](d reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn6x2[I0, I1, I2, I3, I4, I5, R0, R1 any] registers your DoFn to optimize execution at runtime. +// DoFn6x2[I0, I1, I2, I3, I4, I5, R0, R1 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn6x2[I0, I1, I2, I3, I4, I5, R0, R1 any](doFn genericDoFn6x2[I0, I1, I2, I3, I4, I5, R0, R1]) { registerDoFnTypes(doFn) registerDoFn6x2StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, R0, R1](doFn) } +// Function6x2[I0, I1, I2, I3, I4, I5, R0, R1 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function6x2[I0, I1, I2, I3, I4, I5, R0, R1 any](doFn func(I0, I1, I2, I3, I4, I5) (R0, R1)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4, I5) (R0, R1)) + return &caller6x2[I0, I1, I2, I3, I4, I5, R0, R1]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4, I5) (R0, R1))(nil)).Elem(), caller) +} + type genericDoFn7x2[I0, I1, I2, I3, I4, I5, I6, R0, R1 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6) (R0, R1) } @@ -4265,8 +4612,8 @@ func (c *caller7x2[I0, I1, I2, I3, I4, I5, I6, R0, R1]) Call(args []interface{}) return []interface{}{out0, out1} } -func (c *caller7x2[I0, I1, I2, I3, I4, I5, I6, R0, R1]) Call7x2(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4, arg5 I5, arg6 I6) (interface{}, interface{}) { - return c.fn(arg0, arg1, arg2, arg3, arg4, arg5, arg6) +func (c *caller7x2[I0, I1, I2, I3, I4, I5, I6, R0, R1]) Call7x2(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}, arg5 interface{}, arg6 interface{}) (interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4), arg5.(I5), arg6.(I6)) } func registerDoFn7x2StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, R0, R1 any](doFn genericDoFn7x2[I0, I1, I2, I3, I4, I5, I6, R0, R1]) { @@ -4308,13 +4655,25 @@ func registerDoFn7x2StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, R0, R1 an reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn7x2[I0, I1, I2, I3, I4, I5, I6, R0, R1 any] registers your DoFn to optimize execution at runtime. +// DoFn7x2[I0, I1, I2, I3, I4, I5, I6, R0, R1 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn7x2[I0, I1, I2, I3, I4, I5, I6, R0, R1 any](doFn genericDoFn7x2[I0, I1, I2, I3, I4, I5, I6, R0, R1]) { registerDoFnTypes(doFn) registerDoFn7x2StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, R0, R1](doFn) } +// Function7x2[I0, I1, I2, I3, I4, I5, I6, R0, R1 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function7x2[I0, I1, I2, I3, I4, I5, I6, R0, R1 any](doFn func(I0, I1, I2, I3, I4, I5, I6) (R0, R1)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4, I5, I6) (R0, R1)) + return &caller7x2[I0, I1, I2, I3, I4, I5, I6, R0, R1]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4, I5, I6) (R0, R1))(nil)).Elem(), caller) +} + type genericDoFn8x2[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6, i7 I7) (R0, R1) } @@ -4336,8 +4695,8 @@ func (c *caller8x2[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1]) Call(args []interfac return []interface{}{out0, out1} } -func (c *caller8x2[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1]) Call8x2(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4, arg5 I5, arg6 I6, arg7 I7) (interface{}, interface{}) { - return c.fn(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7) +func (c *caller8x2[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1]) Call8x2(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}, arg5 interface{}, arg6 interface{}, arg7 interface{}) (interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4), arg5.(I5), arg6.(I6), arg7.(I7)) } func registerDoFn8x2StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1 any](doFn genericDoFn8x2[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1]) { @@ -4379,13 +4738,25 @@ func registerDoFn8x2StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, R0, R reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn8x2[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1 any] registers your DoFn to optimize execution at runtime. +// DoFn8x2[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn8x2[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1 any](doFn genericDoFn8x2[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1]) { registerDoFnTypes(doFn) registerDoFn8x2StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1](doFn) } +// Function8x2[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function8x2[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1 any](doFn func(I0, I1, I2, I3, I4, I5, I6, I7) (R0, R1)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4, I5, I6, I7) (R0, R1)) + return &caller8x2[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4, I5, I6, I7) (R0, R1))(nil)).Elem(), caller) +} + type genericDoFn9x2[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6, i7 I7, i8 I8) (R0, R1) } @@ -4407,8 +4778,8 @@ func (c *caller9x2[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1]) Call(args []inte return []interface{}{out0, out1} } -func (c *caller9x2[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1]) Call9x2(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4, arg5 I5, arg6 I6, arg7 I7, arg8 I8) (interface{}, interface{}) { - return c.fn(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8) +func (c *caller9x2[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1]) Call9x2(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}, arg5 interface{}, arg6 interface{}, arg7 interface{}, arg8 interface{}) (interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4), arg5.(I5), arg6.(I6), arg7.(I7), arg8.(I8)) } func registerDoFn9x2StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1 any](doFn genericDoFn9x2[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1]) { @@ -4450,13 +4821,25 @@ func registerDoFn9x2StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, R reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn9x2[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1 any] registers your DoFn to optimize execution at runtime. +// DoFn9x2[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn9x2[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1 any](doFn genericDoFn9x2[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1]) { registerDoFnTypes(doFn) registerDoFn9x2StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1](doFn) } +// Function9x2[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function9x2[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1 any](doFn func(I0, I1, I2, I3, I4, I5, I6, I7, I8) (R0, R1)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4, I5, I6, I7, I8) (R0, R1)) + return &caller9x2[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4, I5, I6, I7, I8) (R0, R1))(nil)).Elem(), caller) +} + type genericDoFn10x2[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6, i7 I7, i8 I8, i9 I9) (R0, R1) } @@ -4478,8 +4861,8 @@ func (c *caller10x2[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1]) Call(args [ return []interface{}{out0, out1} } -func (c *caller10x2[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1]) Call10x2(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4, arg5 I5, arg6 I6, arg7 I7, arg8 I8, arg9 I9) (interface{}, interface{}) { - return c.fn(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9) +func (c *caller10x2[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1]) Call10x2(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}, arg5 interface{}, arg6 interface{}, arg7 interface{}, arg8 interface{}, arg9 interface{}) (interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4), arg5.(I5), arg6.(I6), arg7.(I7), arg8.(I8), arg9.(I9)) } func registerDoFn10x2StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1 any](doFn genericDoFn10x2[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1]) { @@ -4521,13 +4904,25 @@ func registerDoFn10x2StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn10x2[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1 any] registers your DoFn to optimize execution at runtime. +// DoFn10x2[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn10x2[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1 any](doFn genericDoFn10x2[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1]) { registerDoFnTypes(doFn) registerDoFn10x2StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1](doFn) } +// Function10x2[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function10x2[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1 any](doFn func(I0, I1, I2, I3, I4, I5, I6, I7, I8, I9) (R0, R1)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4, I5, I6, I7, I8, I9) (R0, R1)) + return &caller10x2[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4, I5, I6, I7, I8, I9) (R0, R1))(nil)).Elem(), caller) +} + type genericDoFn0x3[R0, R1, R2 any] interface { ProcessElement() (R0, R1, R2) } @@ -4592,13 +4987,25 @@ func registerDoFn0x3StructWrappersAndFuncs[R0, R1, R2 any](doFn genericDoFn0x3[R reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn0x3[R0, R1, R2 any] registers your DoFn to optimize execution at runtime. +// DoFn0x3[R0, R1, R2 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn0x3[R0, R1, R2 any](doFn genericDoFn0x3[R0, R1, R2]) { registerDoFnTypes(doFn) registerDoFn0x3StructWrappersAndFuncs[R0, R1, R2](doFn) } +// Function0x3[R0, R1, R2 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function0x3[R0, R1, R2 any](doFn func() (R0, R1, R2)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func() (R0, R1, R2)) + return &caller0x3[R0, R1, R2]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func() (R0, R1, R2))(nil)).Elem(), caller) +} + type genericDoFn1x3[I0, R0, R1, R2 any] interface { ProcessElement(i0 I0) (R0, R1, R2) } @@ -4620,8 +5027,8 @@ func (c *caller1x3[I0, R0, R1, R2]) Call(args []interface{}) []interface{} { return []interface{}{out0, out1, out2} } -func (c *caller1x3[I0, R0, R1, R2]) Call1x3(arg0 I0) (interface{}, interface{}, interface{}) { - return c.fn(arg0) +func (c *caller1x3[I0, R0, R1, R2]) Call1x3(arg0 interface{}) (interface{}, interface{}, interface{}) { + return c.fn(arg0.(I0)) } func registerDoFn1x3StructWrappersAndFuncs[I0, R0, R1, R2 any](doFn genericDoFn1x3[I0, R0, R1, R2]) { @@ -4663,13 +5070,25 @@ func registerDoFn1x3StructWrappersAndFuncs[I0, R0, R1, R2 any](doFn genericDoFn1 reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn1x3[I0, R0, R1, R2 any] registers your DoFn to optimize execution at runtime. +// DoFn1x3[I0, R0, R1, R2 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn1x3[I0, R0, R1, R2 any](doFn genericDoFn1x3[I0, R0, R1, R2]) { registerDoFnTypes(doFn) registerDoFn1x3StructWrappersAndFuncs[I0, R0, R1, R2](doFn) } +// Function1x3[I0, R0, R1, R2 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function1x3[I0, R0, R1, R2 any](doFn func(I0) (R0, R1, R2)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0) (R0, R1, R2)) + return &caller1x3[I0, R0, R1, R2]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0) (R0, R1, R2))(nil)).Elem(), caller) +} + type genericDoFn2x3[I0, I1, R0, R1, R2 any] interface { ProcessElement(i0 I0, i1 I1) (R0, R1, R2) } @@ -4691,8 +5110,8 @@ func (c *caller2x3[I0, I1, R0, R1, R2]) Call(args []interface{}) []interface{} { return []interface{}{out0, out1, out2} } -func (c *caller2x3[I0, I1, R0, R1, R2]) Call2x3(arg0 I0, arg1 I1) (interface{}, interface{}, interface{}) { - return c.fn(arg0, arg1) +func (c *caller2x3[I0, I1, R0, R1, R2]) Call2x3(arg0 interface{}, arg1 interface{}) (interface{}, interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1)) } func registerDoFn2x3StructWrappersAndFuncs[I0, I1, R0, R1, R2 any](doFn genericDoFn2x3[I0, I1, R0, R1, R2]) { @@ -4734,13 +5153,25 @@ func registerDoFn2x3StructWrappersAndFuncs[I0, I1, R0, R1, R2 any](doFn genericD reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn2x3[I0, I1, R0, R1, R2 any] registers your DoFn to optimize execution at runtime. +// DoFn2x3[I0, I1, R0, R1, R2 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn2x3[I0, I1, R0, R1, R2 any](doFn genericDoFn2x3[I0, I1, R0, R1, R2]) { registerDoFnTypes(doFn) registerDoFn2x3StructWrappersAndFuncs[I0, I1, R0, R1, R2](doFn) } +// Function2x3[I0, I1, R0, R1, R2 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function2x3[I0, I1, R0, R1, R2 any](doFn func(I0, I1) (R0, R1, R2)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1) (R0, R1, R2)) + return &caller2x3[I0, I1, R0, R1, R2]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1) (R0, R1, R2))(nil)).Elem(), caller) +} + type genericDoFn3x3[I0, I1, I2, R0, R1, R2 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2) (R0, R1, R2) } @@ -4762,8 +5193,8 @@ func (c *caller3x3[I0, I1, I2, R0, R1, R2]) Call(args []interface{}) []interface return []interface{}{out0, out1, out2} } -func (c *caller3x3[I0, I1, I2, R0, R1, R2]) Call3x3(arg0 I0, arg1 I1, arg2 I2) (interface{}, interface{}, interface{}) { - return c.fn(arg0, arg1, arg2) +func (c *caller3x3[I0, I1, I2, R0, R1, R2]) Call3x3(arg0 interface{}, arg1 interface{}, arg2 interface{}) (interface{}, interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2)) } func registerDoFn3x3StructWrappersAndFuncs[I0, I1, I2, R0, R1, R2 any](doFn genericDoFn3x3[I0, I1, I2, R0, R1, R2]) { @@ -4805,13 +5236,25 @@ func registerDoFn3x3StructWrappersAndFuncs[I0, I1, I2, R0, R1, R2 any](doFn gene reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn3x3[I0, I1, I2, R0, R1, R2 any] registers your DoFn to optimize execution at runtime. +// DoFn3x3[I0, I1, I2, R0, R1, R2 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn3x3[I0, I1, I2, R0, R1, R2 any](doFn genericDoFn3x3[I0, I1, I2, R0, R1, R2]) { registerDoFnTypes(doFn) registerDoFn3x3StructWrappersAndFuncs[I0, I1, I2, R0, R1, R2](doFn) } +// Function3x3[I0, I1, I2, R0, R1, R2 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function3x3[I0, I1, I2, R0, R1, R2 any](doFn func(I0, I1, I2) (R0, R1, R2)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2) (R0, R1, R2)) + return &caller3x3[I0, I1, I2, R0, R1, R2]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2) (R0, R1, R2))(nil)).Elem(), caller) +} + type genericDoFn4x3[I0, I1, I2, I3, R0, R1, R2 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3) (R0, R1, R2) } @@ -4833,8 +5276,8 @@ func (c *caller4x3[I0, I1, I2, I3, R0, R1, R2]) Call(args []interface{}) []inter return []interface{}{out0, out1, out2} } -func (c *caller4x3[I0, I1, I2, I3, R0, R1, R2]) Call4x3(arg0 I0, arg1 I1, arg2 I2, arg3 I3) (interface{}, interface{}, interface{}) { - return c.fn(arg0, arg1, arg2, arg3) +func (c *caller4x3[I0, I1, I2, I3, R0, R1, R2]) Call4x3(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}) (interface{}, interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3)) } func registerDoFn4x3StructWrappersAndFuncs[I0, I1, I2, I3, R0, R1, R2 any](doFn genericDoFn4x3[I0, I1, I2, I3, R0, R1, R2]) { @@ -4876,13 +5319,25 @@ func registerDoFn4x3StructWrappersAndFuncs[I0, I1, I2, I3, R0, R1, R2 any](doFn reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn4x3[I0, I1, I2, I3, R0, R1, R2 any] registers your DoFn to optimize execution at runtime. +// DoFn4x3[I0, I1, I2, I3, R0, R1, R2 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn4x3[I0, I1, I2, I3, R0, R1, R2 any](doFn genericDoFn4x3[I0, I1, I2, I3, R0, R1, R2]) { registerDoFnTypes(doFn) registerDoFn4x3StructWrappersAndFuncs[I0, I1, I2, I3, R0, R1, R2](doFn) } +// Function4x3[I0, I1, I2, I3, R0, R1, R2 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function4x3[I0, I1, I2, I3, R0, R1, R2 any](doFn func(I0, I1, I2, I3) (R0, R1, R2)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3) (R0, R1, R2)) + return &caller4x3[I0, I1, I2, I3, R0, R1, R2]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3) (R0, R1, R2))(nil)).Elem(), caller) +} + type genericDoFn5x3[I0, I1, I2, I3, I4, R0, R1, R2 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4) (R0, R1, R2) } @@ -4904,8 +5359,8 @@ func (c *caller5x3[I0, I1, I2, I3, I4, R0, R1, R2]) Call(args []interface{}) []i return []interface{}{out0, out1, out2} } -func (c *caller5x3[I0, I1, I2, I3, I4, R0, R1, R2]) Call5x3(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4) (interface{}, interface{}, interface{}) { - return c.fn(arg0, arg1, arg2, arg3, arg4) +func (c *caller5x3[I0, I1, I2, I3, I4, R0, R1, R2]) Call5x3(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}) (interface{}, interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4)) } func registerDoFn5x3StructWrappersAndFuncs[I0, I1, I2, I3, I4, R0, R1, R2 any](doFn genericDoFn5x3[I0, I1, I2, I3, I4, R0, R1, R2]) { @@ -4947,13 +5402,25 @@ func registerDoFn5x3StructWrappersAndFuncs[I0, I1, I2, I3, I4, R0, R1, R2 any](d reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn5x3[I0, I1, I2, I3, I4, R0, R1, R2 any] registers your DoFn to optimize execution at runtime. +// DoFn5x3[I0, I1, I2, I3, I4, R0, R1, R2 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn5x3[I0, I1, I2, I3, I4, R0, R1, R2 any](doFn genericDoFn5x3[I0, I1, I2, I3, I4, R0, R1, R2]) { registerDoFnTypes(doFn) registerDoFn5x3StructWrappersAndFuncs[I0, I1, I2, I3, I4, R0, R1, R2](doFn) } +// Function5x3[I0, I1, I2, I3, I4, R0, R1, R2 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function5x3[I0, I1, I2, I3, I4, R0, R1, R2 any](doFn func(I0, I1, I2, I3, I4) (R0, R1, R2)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4) (R0, R1, R2)) + return &caller5x3[I0, I1, I2, I3, I4, R0, R1, R2]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4) (R0, R1, R2))(nil)).Elem(), caller) +} + type genericDoFn6x3[I0, I1, I2, I3, I4, I5, R0, R1, R2 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5) (R0, R1, R2) } @@ -4975,8 +5442,8 @@ func (c *caller6x3[I0, I1, I2, I3, I4, I5, R0, R1, R2]) Call(args []interface{}) return []interface{}{out0, out1, out2} } -func (c *caller6x3[I0, I1, I2, I3, I4, I5, R0, R1, R2]) Call6x3(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4, arg5 I5) (interface{}, interface{}, interface{}) { - return c.fn(arg0, arg1, arg2, arg3, arg4, arg5) +func (c *caller6x3[I0, I1, I2, I3, I4, I5, R0, R1, R2]) Call6x3(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}, arg5 interface{}) (interface{}, interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4), arg5.(I5)) } func registerDoFn6x3StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, R0, R1, R2 any](doFn genericDoFn6x3[I0, I1, I2, I3, I4, I5, R0, R1, R2]) { @@ -5018,13 +5485,25 @@ func registerDoFn6x3StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, R0, R1, R2 an reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn6x3[I0, I1, I2, I3, I4, I5, R0, R1, R2 any] registers your DoFn to optimize execution at runtime. +// DoFn6x3[I0, I1, I2, I3, I4, I5, R0, R1, R2 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn6x3[I0, I1, I2, I3, I4, I5, R0, R1, R2 any](doFn genericDoFn6x3[I0, I1, I2, I3, I4, I5, R0, R1, R2]) { registerDoFnTypes(doFn) registerDoFn6x3StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, R0, R1, R2](doFn) } +// Function6x3[I0, I1, I2, I3, I4, I5, R0, R1, R2 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function6x3[I0, I1, I2, I3, I4, I5, R0, R1, R2 any](doFn func(I0, I1, I2, I3, I4, I5) (R0, R1, R2)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4, I5) (R0, R1, R2)) + return &caller6x3[I0, I1, I2, I3, I4, I5, R0, R1, R2]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4, I5) (R0, R1, R2))(nil)).Elem(), caller) +} + type genericDoFn7x3[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6) (R0, R1, R2) } @@ -5046,8 +5525,8 @@ func (c *caller7x3[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2]) Call(args []interfac return []interface{}{out0, out1, out2} } -func (c *caller7x3[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2]) Call7x3(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4, arg5 I5, arg6 I6) (interface{}, interface{}, interface{}) { - return c.fn(arg0, arg1, arg2, arg3, arg4, arg5, arg6) +func (c *caller7x3[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2]) Call7x3(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}, arg5 interface{}, arg6 interface{}) (interface{}, interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4), arg5.(I5), arg6.(I6)) } func registerDoFn7x3StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2 any](doFn genericDoFn7x3[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2]) { @@ -5089,13 +5568,25 @@ func registerDoFn7x3StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, R0, R1, R reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn7x3[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2 any] registers your DoFn to optimize execution at runtime. +// DoFn7x3[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn7x3[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2 any](doFn genericDoFn7x3[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2]) { registerDoFnTypes(doFn) registerDoFn7x3StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2](doFn) } +// Function7x3[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function7x3[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2 any](doFn func(I0, I1, I2, I3, I4, I5, I6) (R0, R1, R2)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4, I5, I6) (R0, R1, R2)) + return &caller7x3[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4, I5, I6) (R0, R1, R2))(nil)).Elem(), caller) +} + type genericDoFn8x3[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6, i7 I7) (R0, R1, R2) } @@ -5117,8 +5608,8 @@ func (c *caller8x3[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2]) Call(args []inte return []interface{}{out0, out1, out2} } -func (c *caller8x3[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2]) Call8x3(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4, arg5 I5, arg6 I6, arg7 I7) (interface{}, interface{}, interface{}) { - return c.fn(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7) +func (c *caller8x3[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2]) Call8x3(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}, arg5 interface{}, arg6 interface{}, arg7 interface{}) (interface{}, interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4), arg5.(I5), arg6.(I6), arg7.(I7)) } func registerDoFn8x3StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2 any](doFn genericDoFn8x3[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2]) { @@ -5160,13 +5651,25 @@ func registerDoFn8x3StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, R0, R reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn8x3[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2 any] registers your DoFn to optimize execution at runtime. +// DoFn8x3[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn8x3[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2 any](doFn genericDoFn8x3[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2]) { registerDoFnTypes(doFn) registerDoFn8x3StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2](doFn) } +// Function8x3[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function8x3[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2 any](doFn func(I0, I1, I2, I3, I4, I5, I6, I7) (R0, R1, R2)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4, I5, I6, I7) (R0, R1, R2)) + return &caller8x3[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4, I5, I6, I7) (R0, R1, R2))(nil)).Elem(), caller) +} + type genericDoFn9x3[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6, i7 I7, i8 I8) (R0, R1, R2) } @@ -5188,8 +5691,8 @@ func (c *caller9x3[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2]) Call(args [] return []interface{}{out0, out1, out2} } -func (c *caller9x3[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2]) Call9x3(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4, arg5 I5, arg6 I6, arg7 I7, arg8 I8) (interface{}, interface{}, interface{}) { - return c.fn(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8) +func (c *caller9x3[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2]) Call9x3(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}, arg5 interface{}, arg6 interface{}, arg7 interface{}, arg8 interface{}) (interface{}, interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4), arg5.(I5), arg6.(I6), arg7.(I7), arg8.(I8)) } func registerDoFn9x3StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2 any](doFn genericDoFn9x3[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2]) { @@ -5231,13 +5734,25 @@ func registerDoFn9x3StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, R reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn9x3[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2 any] registers your DoFn to optimize execution at runtime. +// DoFn9x3[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn9x3[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2 any](doFn genericDoFn9x3[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2]) { registerDoFnTypes(doFn) registerDoFn9x3StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2](doFn) } +// Function9x3[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function9x3[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2 any](doFn func(I0, I1, I2, I3, I4, I5, I6, I7, I8) (R0, R1, R2)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4, I5, I6, I7, I8) (R0, R1, R2)) + return &caller9x3[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4, I5, I6, I7, I8) (R0, R1, R2))(nil)).Elem(), caller) +} + type genericDoFn10x3[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6, i7 I7, i8 I8, i9 I9) (R0, R1, R2) } @@ -5259,8 +5774,8 @@ func (c *caller10x3[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2]) Call(ar return []interface{}{out0, out1, out2} } -func (c *caller10x3[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2]) Call10x3(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4, arg5 I5, arg6 I6, arg7 I7, arg8 I8, arg9 I9) (interface{}, interface{}, interface{}) { - return c.fn(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9) +func (c *caller10x3[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2]) Call10x3(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}, arg5 interface{}, arg6 interface{}, arg7 interface{}, arg8 interface{}, arg9 interface{}) (interface{}, interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4), arg5.(I5), arg6.(I6), arg7.(I7), arg8.(I8), arg9.(I9)) } func registerDoFn10x3StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2 any](doFn genericDoFn10x3[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2]) { @@ -5302,13 +5817,25 @@ func registerDoFn10x3StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn10x3[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2 any] registers your DoFn to optimize execution at runtime. +// DoFn10x3[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn10x3[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2 any](doFn genericDoFn10x3[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2]) { registerDoFnTypes(doFn) registerDoFn10x3StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2](doFn) } +// Function10x3[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function10x3[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2 any](doFn func(I0, I1, I2, I3, I4, I5, I6, I7, I8, I9) (R0, R1, R2)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4, I5, I6, I7, I8, I9) (R0, R1, R2)) + return &caller10x3[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4, I5, I6, I7, I8, I9) (R0, R1, R2))(nil)).Elem(), caller) +} + type genericDoFn0x4[R0, R1, R2, R3 any] interface { ProcessElement() (R0, R1, R2, R3) } @@ -5373,13 +5900,25 @@ func registerDoFn0x4StructWrappersAndFuncs[R0, R1, R2, R3 any](doFn genericDoFn0 reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn0x4[R0, R1, R2, R3 any] registers your DoFn to optimize execution at runtime. +// DoFn0x4[R0, R1, R2, R3 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn0x4[R0, R1, R2, R3 any](doFn genericDoFn0x4[R0, R1, R2, R3]) { registerDoFnTypes(doFn) registerDoFn0x4StructWrappersAndFuncs[R0, R1, R2, R3](doFn) } +// Function0x4[R0, R1, R2, R3 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function0x4[R0, R1, R2, R3 any](doFn func() (R0, R1, R2, R3)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func() (R0, R1, R2, R3)) + return &caller0x4[R0, R1, R2, R3]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func() (R0, R1, R2, R3))(nil)).Elem(), caller) +} + type genericDoFn1x4[I0, R0, R1, R2, R3 any] interface { ProcessElement(i0 I0) (R0, R1, R2, R3) } @@ -5401,8 +5940,8 @@ func (c *caller1x4[I0, R0, R1, R2, R3]) Call(args []interface{}) []interface{} { return []interface{}{out0, out1, out2, out3} } -func (c *caller1x4[I0, R0, R1, R2, R3]) Call1x4(arg0 I0) (interface{}, interface{}, interface{}, interface{}) { - return c.fn(arg0) +func (c *caller1x4[I0, R0, R1, R2, R3]) Call1x4(arg0 interface{}) (interface{}, interface{}, interface{}, interface{}) { + return c.fn(arg0.(I0)) } func registerDoFn1x4StructWrappersAndFuncs[I0, R0, R1, R2, R3 any](doFn genericDoFn1x4[I0, R0, R1, R2, R3]) { @@ -5444,13 +5983,25 @@ func registerDoFn1x4StructWrappersAndFuncs[I0, R0, R1, R2, R3 any](doFn genericD reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn1x4[I0, R0, R1, R2, R3 any] registers your DoFn to optimize execution at runtime. +// DoFn1x4[I0, R0, R1, R2, R3 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn1x4[I0, R0, R1, R2, R3 any](doFn genericDoFn1x4[I0, R0, R1, R2, R3]) { registerDoFnTypes(doFn) registerDoFn1x4StructWrappersAndFuncs[I0, R0, R1, R2, R3](doFn) } +// Function1x4[I0, R0, R1, R2, R3 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function1x4[I0, R0, R1, R2, R3 any](doFn func(I0) (R0, R1, R2, R3)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0) (R0, R1, R2, R3)) + return &caller1x4[I0, R0, R1, R2, R3]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0) (R0, R1, R2, R3))(nil)).Elem(), caller) +} + type genericDoFn2x4[I0, I1, R0, R1, R2, R3 any] interface { ProcessElement(i0 I0, i1 I1) (R0, R1, R2, R3) } @@ -5472,8 +6023,8 @@ func (c *caller2x4[I0, I1, R0, R1, R2, R3]) Call(args []interface{}) []interface return []interface{}{out0, out1, out2, out3} } -func (c *caller2x4[I0, I1, R0, R1, R2, R3]) Call2x4(arg0 I0, arg1 I1) (interface{}, interface{}, interface{}, interface{}) { - return c.fn(arg0, arg1) +func (c *caller2x4[I0, I1, R0, R1, R2, R3]) Call2x4(arg0 interface{}, arg1 interface{}) (interface{}, interface{}, interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1)) } func registerDoFn2x4StructWrappersAndFuncs[I0, I1, R0, R1, R2, R3 any](doFn genericDoFn2x4[I0, I1, R0, R1, R2, R3]) { @@ -5515,13 +6066,25 @@ func registerDoFn2x4StructWrappersAndFuncs[I0, I1, R0, R1, R2, R3 any](doFn gene reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn2x4[I0, I1, R0, R1, R2, R3 any] registers your DoFn to optimize execution at runtime. +// DoFn2x4[I0, I1, R0, R1, R2, R3 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn2x4[I0, I1, R0, R1, R2, R3 any](doFn genericDoFn2x4[I0, I1, R0, R1, R2, R3]) { registerDoFnTypes(doFn) registerDoFn2x4StructWrappersAndFuncs[I0, I1, R0, R1, R2, R3](doFn) } +// Function2x4[I0, I1, R0, R1, R2, R3 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function2x4[I0, I1, R0, R1, R2, R3 any](doFn func(I0, I1) (R0, R1, R2, R3)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1) (R0, R1, R2, R3)) + return &caller2x4[I0, I1, R0, R1, R2, R3]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1) (R0, R1, R2, R3))(nil)).Elem(), caller) +} + type genericDoFn3x4[I0, I1, I2, R0, R1, R2, R3 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2) (R0, R1, R2, R3) } @@ -5543,8 +6106,8 @@ func (c *caller3x4[I0, I1, I2, R0, R1, R2, R3]) Call(args []interface{}) []inter return []interface{}{out0, out1, out2, out3} } -func (c *caller3x4[I0, I1, I2, R0, R1, R2, R3]) Call3x4(arg0 I0, arg1 I1, arg2 I2) (interface{}, interface{}, interface{}, interface{}) { - return c.fn(arg0, arg1, arg2) +func (c *caller3x4[I0, I1, I2, R0, R1, R2, R3]) Call3x4(arg0 interface{}, arg1 interface{}, arg2 interface{}) (interface{}, interface{}, interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2)) } func registerDoFn3x4StructWrappersAndFuncs[I0, I1, I2, R0, R1, R2, R3 any](doFn genericDoFn3x4[I0, I1, I2, R0, R1, R2, R3]) { @@ -5586,13 +6149,25 @@ func registerDoFn3x4StructWrappersAndFuncs[I0, I1, I2, R0, R1, R2, R3 any](doFn reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn3x4[I0, I1, I2, R0, R1, R2, R3 any] registers your DoFn to optimize execution at runtime. +// DoFn3x4[I0, I1, I2, R0, R1, R2, R3 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn3x4[I0, I1, I2, R0, R1, R2, R3 any](doFn genericDoFn3x4[I0, I1, I2, R0, R1, R2, R3]) { registerDoFnTypes(doFn) registerDoFn3x4StructWrappersAndFuncs[I0, I1, I2, R0, R1, R2, R3](doFn) } +// Function3x4[I0, I1, I2, R0, R1, R2, R3 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function3x4[I0, I1, I2, R0, R1, R2, R3 any](doFn func(I0, I1, I2) (R0, R1, R2, R3)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2) (R0, R1, R2, R3)) + return &caller3x4[I0, I1, I2, R0, R1, R2, R3]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2) (R0, R1, R2, R3))(nil)).Elem(), caller) +} + type genericDoFn4x4[I0, I1, I2, I3, R0, R1, R2, R3 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3) (R0, R1, R2, R3) } @@ -5614,8 +6189,8 @@ func (c *caller4x4[I0, I1, I2, I3, R0, R1, R2, R3]) Call(args []interface{}) []i return []interface{}{out0, out1, out2, out3} } -func (c *caller4x4[I0, I1, I2, I3, R0, R1, R2, R3]) Call4x4(arg0 I0, arg1 I1, arg2 I2, arg3 I3) (interface{}, interface{}, interface{}, interface{}) { - return c.fn(arg0, arg1, arg2, arg3) +func (c *caller4x4[I0, I1, I2, I3, R0, R1, R2, R3]) Call4x4(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}) (interface{}, interface{}, interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3)) } func registerDoFn4x4StructWrappersAndFuncs[I0, I1, I2, I3, R0, R1, R2, R3 any](doFn genericDoFn4x4[I0, I1, I2, I3, R0, R1, R2, R3]) { @@ -5657,13 +6232,25 @@ func registerDoFn4x4StructWrappersAndFuncs[I0, I1, I2, I3, R0, R1, R2, R3 any](d reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn4x4[I0, I1, I2, I3, R0, R1, R2, R3 any] registers your DoFn to optimize execution at runtime. +// DoFn4x4[I0, I1, I2, I3, R0, R1, R2, R3 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn4x4[I0, I1, I2, I3, R0, R1, R2, R3 any](doFn genericDoFn4x4[I0, I1, I2, I3, R0, R1, R2, R3]) { registerDoFnTypes(doFn) registerDoFn4x4StructWrappersAndFuncs[I0, I1, I2, I3, R0, R1, R2, R3](doFn) } +// Function4x4[I0, I1, I2, I3, R0, R1, R2, R3 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function4x4[I0, I1, I2, I3, R0, R1, R2, R3 any](doFn func(I0, I1, I2, I3) (R0, R1, R2, R3)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3) (R0, R1, R2, R3)) + return &caller4x4[I0, I1, I2, I3, R0, R1, R2, R3]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3) (R0, R1, R2, R3))(nil)).Elem(), caller) +} + type genericDoFn5x4[I0, I1, I2, I3, I4, R0, R1, R2, R3 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4) (R0, R1, R2, R3) } @@ -5685,8 +6272,8 @@ func (c *caller5x4[I0, I1, I2, I3, I4, R0, R1, R2, R3]) Call(args []interface{}) return []interface{}{out0, out1, out2, out3} } -func (c *caller5x4[I0, I1, I2, I3, I4, R0, R1, R2, R3]) Call5x4(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4) (interface{}, interface{}, interface{}, interface{}) { - return c.fn(arg0, arg1, arg2, arg3, arg4) +func (c *caller5x4[I0, I1, I2, I3, I4, R0, R1, R2, R3]) Call5x4(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}) (interface{}, interface{}, interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4)) } func registerDoFn5x4StructWrappersAndFuncs[I0, I1, I2, I3, I4, R0, R1, R2, R3 any](doFn genericDoFn5x4[I0, I1, I2, I3, I4, R0, R1, R2, R3]) { @@ -5728,13 +6315,25 @@ func registerDoFn5x4StructWrappersAndFuncs[I0, I1, I2, I3, I4, R0, R1, R2, R3 an reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn5x4[I0, I1, I2, I3, I4, R0, R1, R2, R3 any] registers your DoFn to optimize execution at runtime. +// DoFn5x4[I0, I1, I2, I3, I4, R0, R1, R2, R3 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn5x4[I0, I1, I2, I3, I4, R0, R1, R2, R3 any](doFn genericDoFn5x4[I0, I1, I2, I3, I4, R0, R1, R2, R3]) { registerDoFnTypes(doFn) registerDoFn5x4StructWrappersAndFuncs[I0, I1, I2, I3, I4, R0, R1, R2, R3](doFn) } +// Function5x4[I0, I1, I2, I3, I4, R0, R1, R2, R3 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function5x4[I0, I1, I2, I3, I4, R0, R1, R2, R3 any](doFn func(I0, I1, I2, I3, I4) (R0, R1, R2, R3)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4) (R0, R1, R2, R3)) + return &caller5x4[I0, I1, I2, I3, I4, R0, R1, R2, R3]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4) (R0, R1, R2, R3))(nil)).Elem(), caller) +} + type genericDoFn6x4[I0, I1, I2, I3, I4, I5, R0, R1, R2, R3 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5) (R0, R1, R2, R3) } @@ -5756,8 +6355,8 @@ func (c *caller6x4[I0, I1, I2, I3, I4, I5, R0, R1, R2, R3]) Call(args []interfac return []interface{}{out0, out1, out2, out3} } -func (c *caller6x4[I0, I1, I2, I3, I4, I5, R0, R1, R2, R3]) Call6x4(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4, arg5 I5) (interface{}, interface{}, interface{}, interface{}) { - return c.fn(arg0, arg1, arg2, arg3, arg4, arg5) +func (c *caller6x4[I0, I1, I2, I3, I4, I5, R0, R1, R2, R3]) Call6x4(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}, arg5 interface{}) (interface{}, interface{}, interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4), arg5.(I5)) } func registerDoFn6x4StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, R0, R1, R2, R3 any](doFn genericDoFn6x4[I0, I1, I2, I3, I4, I5, R0, R1, R2, R3]) { @@ -5799,13 +6398,25 @@ func registerDoFn6x4StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, R0, R1, R2, R reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn6x4[I0, I1, I2, I3, I4, I5, R0, R1, R2, R3 any] registers your DoFn to optimize execution at runtime. +// DoFn6x4[I0, I1, I2, I3, I4, I5, R0, R1, R2, R3 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn6x4[I0, I1, I2, I3, I4, I5, R0, R1, R2, R3 any](doFn genericDoFn6x4[I0, I1, I2, I3, I4, I5, R0, R1, R2, R3]) { registerDoFnTypes(doFn) registerDoFn6x4StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, R0, R1, R2, R3](doFn) } +// Function6x4[I0, I1, I2, I3, I4, I5, R0, R1, R2, R3 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function6x4[I0, I1, I2, I3, I4, I5, R0, R1, R2, R3 any](doFn func(I0, I1, I2, I3, I4, I5) (R0, R1, R2, R3)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4, I5) (R0, R1, R2, R3)) + return &caller6x4[I0, I1, I2, I3, I4, I5, R0, R1, R2, R3]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4, I5) (R0, R1, R2, R3))(nil)).Elem(), caller) +} + type genericDoFn7x4[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2, R3 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6) (R0, R1, R2, R3) } @@ -5827,8 +6438,8 @@ func (c *caller7x4[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2, R3]) Call(args []inte return []interface{}{out0, out1, out2, out3} } -func (c *caller7x4[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2, R3]) Call7x4(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4, arg5 I5, arg6 I6) (interface{}, interface{}, interface{}, interface{}) { - return c.fn(arg0, arg1, arg2, arg3, arg4, arg5, arg6) +func (c *caller7x4[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2, R3]) Call7x4(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}, arg5 interface{}, arg6 interface{}) (interface{}, interface{}, interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4), arg5.(I5), arg6.(I6)) } func registerDoFn7x4StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2, R3 any](doFn genericDoFn7x4[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2, R3]) { @@ -5870,13 +6481,25 @@ func registerDoFn7x4StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, R0, R1, R reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn7x4[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2, R3 any] registers your DoFn to optimize execution at runtime. +// DoFn7x4[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2, R3 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn7x4[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2, R3 any](doFn genericDoFn7x4[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2, R3]) { registerDoFnTypes(doFn) registerDoFn7x4StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2, R3](doFn) } +// Function7x4[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2, R3 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function7x4[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2, R3 any](doFn func(I0, I1, I2, I3, I4, I5, I6) (R0, R1, R2, R3)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4, I5, I6) (R0, R1, R2, R3)) + return &caller7x4[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2, R3]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4, I5, I6) (R0, R1, R2, R3))(nil)).Elem(), caller) +} + type genericDoFn8x4[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2, R3 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6, i7 I7) (R0, R1, R2, R3) } @@ -5898,8 +6521,8 @@ func (c *caller8x4[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2, R3]) Call(args [] return []interface{}{out0, out1, out2, out3} } -func (c *caller8x4[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2, R3]) Call8x4(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4, arg5 I5, arg6 I6, arg7 I7) (interface{}, interface{}, interface{}, interface{}) { - return c.fn(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7) +func (c *caller8x4[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2, R3]) Call8x4(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}, arg5 interface{}, arg6 interface{}, arg7 interface{}) (interface{}, interface{}, interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4), arg5.(I5), arg6.(I6), arg7.(I7)) } func registerDoFn8x4StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2, R3 any](doFn genericDoFn8x4[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2, R3]) { @@ -5941,13 +6564,25 @@ func registerDoFn8x4StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, R0, R reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn8x4[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2, R3 any] registers your DoFn to optimize execution at runtime. +// DoFn8x4[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2, R3 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn8x4[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2, R3 any](doFn genericDoFn8x4[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2, R3]) { registerDoFnTypes(doFn) registerDoFn8x4StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2, R3](doFn) } +// Function8x4[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2, R3 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function8x4[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2, R3 any](doFn func(I0, I1, I2, I3, I4, I5, I6, I7) (R0, R1, R2, R3)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4, I5, I6, I7) (R0, R1, R2, R3)) + return &caller8x4[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2, R3]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4, I5, I6, I7) (R0, R1, R2, R3))(nil)).Elem(), caller) +} + type genericDoFn9x4[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2, R3 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6, i7 I7, i8 I8) (R0, R1, R2, R3) } @@ -5969,8 +6604,8 @@ func (c *caller9x4[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2, R3]) Call(arg return []interface{}{out0, out1, out2, out3} } -func (c *caller9x4[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2, R3]) Call9x4(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4, arg5 I5, arg6 I6, arg7 I7, arg8 I8) (interface{}, interface{}, interface{}, interface{}) { - return c.fn(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8) +func (c *caller9x4[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2, R3]) Call9x4(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}, arg5 interface{}, arg6 interface{}, arg7 interface{}, arg8 interface{}) (interface{}, interface{}, interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4), arg5.(I5), arg6.(I6), arg7.(I7), arg8.(I8)) } func registerDoFn9x4StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2, R3 any](doFn genericDoFn9x4[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2, R3]) { @@ -6012,13 +6647,25 @@ func registerDoFn9x4StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, R reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn9x4[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2, R3 any] registers your DoFn to optimize execution at runtime. +// DoFn9x4[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2, R3 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn9x4[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2, R3 any](doFn genericDoFn9x4[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2, R3]) { registerDoFnTypes(doFn) registerDoFn9x4StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2, R3](doFn) } +// Function9x4[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2, R3 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function9x4[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2, R3 any](doFn func(I0, I1, I2, I3, I4, I5, I6, I7, I8) (R0, R1, R2, R3)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4, I5, I6, I7, I8) (R0, R1, R2, R3)) + return &caller9x4[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2, R3]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4, I5, I6, I7, I8) (R0, R1, R2, R3))(nil)).Elem(), caller) +} + type genericDoFn10x4[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2, R3 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6, i7 I7, i8 I8, i9 I9) (R0, R1, R2, R3) } @@ -6040,8 +6687,8 @@ func (c *caller10x4[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2, R3]) Cal return []interface{}{out0, out1, out2, out3} } -func (c *caller10x4[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2, R3]) Call10x4(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4, arg5 I5, arg6 I6, arg7 I7, arg8 I8, arg9 I9) (interface{}, interface{}, interface{}, interface{}) { - return c.fn(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9) +func (c *caller10x4[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2, R3]) Call10x4(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}, arg5 interface{}, arg6 interface{}, arg7 interface{}, arg8 interface{}, arg9 interface{}) (interface{}, interface{}, interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4), arg5.(I5), arg6.(I6), arg7.(I7), arg8.(I8), arg9.(I9)) } func registerDoFn10x4StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2, R3 any](doFn genericDoFn10x4[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2, R3]) { @@ -6083,13 +6730,25 @@ func registerDoFn10x4StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn10x4[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2, R3 any] registers your DoFn to optimize execution at runtime. +// DoFn10x4[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2, R3 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn10x4[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2, R3 any](doFn genericDoFn10x4[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2, R3]) { registerDoFnTypes(doFn) registerDoFn10x4StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2, R3](doFn) } +// Function10x4[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2, R3 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function10x4[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2, R3 any](doFn func(I0, I1, I2, I3, I4, I5, I6, I7, I8, I9) (R0, R1, R2, R3)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4, I5, I6, I7, I8, I9) (R0, R1, R2, R3)) + return &caller10x4[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2, R3]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4, I5, I6, I7, I8, I9) (R0, R1, R2, R3))(nil)).Elem(), caller) +} + type genericDoFn0x5[R0, R1, R2, R3, R4 any] interface { ProcessElement() (R0, R1, R2, R3, R4) } @@ -6154,13 +6813,25 @@ func registerDoFn0x5StructWrappersAndFuncs[R0, R1, R2, R3, R4 any](doFn genericD reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn0x5[R0, R1, R2, R3, R4 any] registers your DoFn to optimize execution at runtime. +// DoFn0x5[R0, R1, R2, R3, R4 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn0x5[R0, R1, R2, R3, R4 any](doFn genericDoFn0x5[R0, R1, R2, R3, R4]) { registerDoFnTypes(doFn) registerDoFn0x5StructWrappersAndFuncs[R0, R1, R2, R3, R4](doFn) } +// Function0x5[R0, R1, R2, R3, R4 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function0x5[R0, R1, R2, R3, R4 any](doFn func() (R0, R1, R2, R3, R4)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func() (R0, R1, R2, R3, R4)) + return &caller0x5[R0, R1, R2, R3, R4]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func() (R0, R1, R2, R3, R4))(nil)).Elem(), caller) +} + type genericDoFn1x5[I0, R0, R1, R2, R3, R4 any] interface { ProcessElement(i0 I0) (R0, R1, R2, R3, R4) } @@ -6182,8 +6853,8 @@ func (c *caller1x5[I0, R0, R1, R2, R3, R4]) Call(args []interface{}) []interface return []interface{}{out0, out1, out2, out3, out4} } -func (c *caller1x5[I0, R0, R1, R2, R3, R4]) Call1x5(arg0 I0) (interface{}, interface{}, interface{}, interface{}, interface{}) { - return c.fn(arg0) +func (c *caller1x5[I0, R0, R1, R2, R3, R4]) Call1x5(arg0 interface{}) (interface{}, interface{}, interface{}, interface{}, interface{}) { + return c.fn(arg0.(I0)) } func registerDoFn1x5StructWrappersAndFuncs[I0, R0, R1, R2, R3, R4 any](doFn genericDoFn1x5[I0, R0, R1, R2, R3, R4]) { @@ -6225,13 +6896,25 @@ func registerDoFn1x5StructWrappersAndFuncs[I0, R0, R1, R2, R3, R4 any](doFn gene reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn1x5[I0, R0, R1, R2, R3, R4 any] registers your DoFn to optimize execution at runtime. +// DoFn1x5[I0, R0, R1, R2, R3, R4 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn1x5[I0, R0, R1, R2, R3, R4 any](doFn genericDoFn1x5[I0, R0, R1, R2, R3, R4]) { registerDoFnTypes(doFn) registerDoFn1x5StructWrappersAndFuncs[I0, R0, R1, R2, R3, R4](doFn) } +// Function1x5[I0, R0, R1, R2, R3, R4 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function1x5[I0, R0, R1, R2, R3, R4 any](doFn func(I0) (R0, R1, R2, R3, R4)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0) (R0, R1, R2, R3, R4)) + return &caller1x5[I0, R0, R1, R2, R3, R4]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0) (R0, R1, R2, R3, R4))(nil)).Elem(), caller) +} + type genericDoFn2x5[I0, I1, R0, R1, R2, R3, R4 any] interface { ProcessElement(i0 I0, i1 I1) (R0, R1, R2, R3, R4) } @@ -6253,8 +6936,8 @@ func (c *caller2x5[I0, I1, R0, R1, R2, R3, R4]) Call(args []interface{}) []inter return []interface{}{out0, out1, out2, out3, out4} } -func (c *caller2x5[I0, I1, R0, R1, R2, R3, R4]) Call2x5(arg0 I0, arg1 I1) (interface{}, interface{}, interface{}, interface{}, interface{}) { - return c.fn(arg0, arg1) +func (c *caller2x5[I0, I1, R0, R1, R2, R3, R4]) Call2x5(arg0 interface{}, arg1 interface{}) (interface{}, interface{}, interface{}, interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1)) } func registerDoFn2x5StructWrappersAndFuncs[I0, I1, R0, R1, R2, R3, R4 any](doFn genericDoFn2x5[I0, I1, R0, R1, R2, R3, R4]) { @@ -6296,13 +6979,25 @@ func registerDoFn2x5StructWrappersAndFuncs[I0, I1, R0, R1, R2, R3, R4 any](doFn reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn2x5[I0, I1, R0, R1, R2, R3, R4 any] registers your DoFn to optimize execution at runtime. +// DoFn2x5[I0, I1, R0, R1, R2, R3, R4 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn2x5[I0, I1, R0, R1, R2, R3, R4 any](doFn genericDoFn2x5[I0, I1, R0, R1, R2, R3, R4]) { registerDoFnTypes(doFn) registerDoFn2x5StructWrappersAndFuncs[I0, I1, R0, R1, R2, R3, R4](doFn) } +// Function2x5[I0, I1, R0, R1, R2, R3, R4 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function2x5[I0, I1, R0, R1, R2, R3, R4 any](doFn func(I0, I1) (R0, R1, R2, R3, R4)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1) (R0, R1, R2, R3, R4)) + return &caller2x5[I0, I1, R0, R1, R2, R3, R4]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1) (R0, R1, R2, R3, R4))(nil)).Elem(), caller) +} + type genericDoFn3x5[I0, I1, I2, R0, R1, R2, R3, R4 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2) (R0, R1, R2, R3, R4) } @@ -6324,8 +7019,8 @@ func (c *caller3x5[I0, I1, I2, R0, R1, R2, R3, R4]) Call(args []interface{}) []i return []interface{}{out0, out1, out2, out3, out4} } -func (c *caller3x5[I0, I1, I2, R0, R1, R2, R3, R4]) Call3x5(arg0 I0, arg1 I1, arg2 I2) (interface{}, interface{}, interface{}, interface{}, interface{}) { - return c.fn(arg0, arg1, arg2) +func (c *caller3x5[I0, I1, I2, R0, R1, R2, R3, R4]) Call3x5(arg0 interface{}, arg1 interface{}, arg2 interface{}) (interface{}, interface{}, interface{}, interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2)) } func registerDoFn3x5StructWrappersAndFuncs[I0, I1, I2, R0, R1, R2, R3, R4 any](doFn genericDoFn3x5[I0, I1, I2, R0, R1, R2, R3, R4]) { @@ -6367,13 +7062,25 @@ func registerDoFn3x5StructWrappersAndFuncs[I0, I1, I2, R0, R1, R2, R3, R4 any](d reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn3x5[I0, I1, I2, R0, R1, R2, R3, R4 any] registers your DoFn to optimize execution at runtime. +// DoFn3x5[I0, I1, I2, R0, R1, R2, R3, R4 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn3x5[I0, I1, I2, R0, R1, R2, R3, R4 any](doFn genericDoFn3x5[I0, I1, I2, R0, R1, R2, R3, R4]) { registerDoFnTypes(doFn) registerDoFn3x5StructWrappersAndFuncs[I0, I1, I2, R0, R1, R2, R3, R4](doFn) } +// Function3x5[I0, I1, I2, R0, R1, R2, R3, R4 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function3x5[I0, I1, I2, R0, R1, R2, R3, R4 any](doFn func(I0, I1, I2) (R0, R1, R2, R3, R4)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2) (R0, R1, R2, R3, R4)) + return &caller3x5[I0, I1, I2, R0, R1, R2, R3, R4]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2) (R0, R1, R2, R3, R4))(nil)).Elem(), caller) +} + type genericDoFn4x5[I0, I1, I2, I3, R0, R1, R2, R3, R4 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3) (R0, R1, R2, R3, R4) } @@ -6395,8 +7102,8 @@ func (c *caller4x5[I0, I1, I2, I3, R0, R1, R2, R3, R4]) Call(args []interface{}) return []interface{}{out0, out1, out2, out3, out4} } -func (c *caller4x5[I0, I1, I2, I3, R0, R1, R2, R3, R4]) Call4x5(arg0 I0, arg1 I1, arg2 I2, arg3 I3) (interface{}, interface{}, interface{}, interface{}, interface{}) { - return c.fn(arg0, arg1, arg2, arg3) +func (c *caller4x5[I0, I1, I2, I3, R0, R1, R2, R3, R4]) Call4x5(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}) (interface{}, interface{}, interface{}, interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3)) } func registerDoFn4x5StructWrappersAndFuncs[I0, I1, I2, I3, R0, R1, R2, R3, R4 any](doFn genericDoFn4x5[I0, I1, I2, I3, R0, R1, R2, R3, R4]) { @@ -6438,13 +7145,25 @@ func registerDoFn4x5StructWrappersAndFuncs[I0, I1, I2, I3, R0, R1, R2, R3, R4 an reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn4x5[I0, I1, I2, I3, R0, R1, R2, R3, R4 any] registers your DoFn to optimize execution at runtime. +// DoFn4x5[I0, I1, I2, I3, R0, R1, R2, R3, R4 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn4x5[I0, I1, I2, I3, R0, R1, R2, R3, R4 any](doFn genericDoFn4x5[I0, I1, I2, I3, R0, R1, R2, R3, R4]) { registerDoFnTypes(doFn) registerDoFn4x5StructWrappersAndFuncs[I0, I1, I2, I3, R0, R1, R2, R3, R4](doFn) } +// Function4x5[I0, I1, I2, I3, R0, R1, R2, R3, R4 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function4x5[I0, I1, I2, I3, R0, R1, R2, R3, R4 any](doFn func(I0, I1, I2, I3) (R0, R1, R2, R3, R4)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3) (R0, R1, R2, R3, R4)) + return &caller4x5[I0, I1, I2, I3, R0, R1, R2, R3, R4]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3) (R0, R1, R2, R3, R4))(nil)).Elem(), caller) +} + type genericDoFn5x5[I0, I1, I2, I3, I4, R0, R1, R2, R3, R4 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4) (R0, R1, R2, R3, R4) } @@ -6466,8 +7185,8 @@ func (c *caller5x5[I0, I1, I2, I3, I4, R0, R1, R2, R3, R4]) Call(args []interfac return []interface{}{out0, out1, out2, out3, out4} } -func (c *caller5x5[I0, I1, I2, I3, I4, R0, R1, R2, R3, R4]) Call5x5(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4) (interface{}, interface{}, interface{}, interface{}, interface{}) { - return c.fn(arg0, arg1, arg2, arg3, arg4) +func (c *caller5x5[I0, I1, I2, I3, I4, R0, R1, R2, R3, R4]) Call5x5(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}) (interface{}, interface{}, interface{}, interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4)) } func registerDoFn5x5StructWrappersAndFuncs[I0, I1, I2, I3, I4, R0, R1, R2, R3, R4 any](doFn genericDoFn5x5[I0, I1, I2, I3, I4, R0, R1, R2, R3, R4]) { @@ -6509,13 +7228,25 @@ func registerDoFn5x5StructWrappersAndFuncs[I0, I1, I2, I3, I4, R0, R1, R2, R3, R reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn5x5[I0, I1, I2, I3, I4, R0, R1, R2, R3, R4 any] registers your DoFn to optimize execution at runtime. +// DoFn5x5[I0, I1, I2, I3, I4, R0, R1, R2, R3, R4 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn5x5[I0, I1, I2, I3, I4, R0, R1, R2, R3, R4 any](doFn genericDoFn5x5[I0, I1, I2, I3, I4, R0, R1, R2, R3, R4]) { registerDoFnTypes(doFn) registerDoFn5x5StructWrappersAndFuncs[I0, I1, I2, I3, I4, R0, R1, R2, R3, R4](doFn) } +// Function5x5[I0, I1, I2, I3, I4, R0, R1, R2, R3, R4 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function5x5[I0, I1, I2, I3, I4, R0, R1, R2, R3, R4 any](doFn func(I0, I1, I2, I3, I4) (R0, R1, R2, R3, R4)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4) (R0, R1, R2, R3, R4)) + return &caller5x5[I0, I1, I2, I3, I4, R0, R1, R2, R3, R4]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4) (R0, R1, R2, R3, R4))(nil)).Elem(), caller) +} + type genericDoFn6x5[I0, I1, I2, I3, I4, I5, R0, R1, R2, R3, R4 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5) (R0, R1, R2, R3, R4) } @@ -6537,8 +7268,8 @@ func (c *caller6x5[I0, I1, I2, I3, I4, I5, R0, R1, R2, R3, R4]) Call(args []inte return []interface{}{out0, out1, out2, out3, out4} } -func (c *caller6x5[I0, I1, I2, I3, I4, I5, R0, R1, R2, R3, R4]) Call6x5(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4, arg5 I5) (interface{}, interface{}, interface{}, interface{}, interface{}) { - return c.fn(arg0, arg1, arg2, arg3, arg4, arg5) +func (c *caller6x5[I0, I1, I2, I3, I4, I5, R0, R1, R2, R3, R4]) Call6x5(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}, arg5 interface{}) (interface{}, interface{}, interface{}, interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4), arg5.(I5)) } func registerDoFn6x5StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, R0, R1, R2, R3, R4 any](doFn genericDoFn6x5[I0, I1, I2, I3, I4, I5, R0, R1, R2, R3, R4]) { @@ -6580,13 +7311,25 @@ func registerDoFn6x5StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, R0, R1, R2, R reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn6x5[I0, I1, I2, I3, I4, I5, R0, R1, R2, R3, R4 any] registers your DoFn to optimize execution at runtime. +// DoFn6x5[I0, I1, I2, I3, I4, I5, R0, R1, R2, R3, R4 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn6x5[I0, I1, I2, I3, I4, I5, R0, R1, R2, R3, R4 any](doFn genericDoFn6x5[I0, I1, I2, I3, I4, I5, R0, R1, R2, R3, R4]) { registerDoFnTypes(doFn) registerDoFn6x5StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, R0, R1, R2, R3, R4](doFn) } +// Function6x5[I0, I1, I2, I3, I4, I5, R0, R1, R2, R3, R4 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function6x5[I0, I1, I2, I3, I4, I5, R0, R1, R2, R3, R4 any](doFn func(I0, I1, I2, I3, I4, I5) (R0, R1, R2, R3, R4)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4, I5) (R0, R1, R2, R3, R4)) + return &caller6x5[I0, I1, I2, I3, I4, I5, R0, R1, R2, R3, R4]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4, I5) (R0, R1, R2, R3, R4))(nil)).Elem(), caller) +} + type genericDoFn7x5[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2, R3, R4 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6) (R0, R1, R2, R3, R4) } @@ -6608,8 +7351,8 @@ func (c *caller7x5[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2, R3, R4]) Call(args [] return []interface{}{out0, out1, out2, out3, out4} } -func (c *caller7x5[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2, R3, R4]) Call7x5(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4, arg5 I5, arg6 I6) (interface{}, interface{}, interface{}, interface{}, interface{}) { - return c.fn(arg0, arg1, arg2, arg3, arg4, arg5, arg6) +func (c *caller7x5[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2, R3, R4]) Call7x5(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}, arg5 interface{}, arg6 interface{}) (interface{}, interface{}, interface{}, interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4), arg5.(I5), arg6.(I6)) } func registerDoFn7x5StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2, R3, R4 any](doFn genericDoFn7x5[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2, R3, R4]) { @@ -6651,13 +7394,25 @@ func registerDoFn7x5StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, R0, R1, R reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn7x5[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2, R3, R4 any] registers your DoFn to optimize execution at runtime. +// DoFn7x5[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2, R3, R4 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn7x5[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2, R3, R4 any](doFn genericDoFn7x5[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2, R3, R4]) { registerDoFnTypes(doFn) registerDoFn7x5StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2, R3, R4](doFn) } +// Function7x5[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2, R3, R4 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function7x5[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2, R3, R4 any](doFn func(I0, I1, I2, I3, I4, I5, I6) (R0, R1, R2, R3, R4)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4, I5, I6) (R0, R1, R2, R3, R4)) + return &caller7x5[I0, I1, I2, I3, I4, I5, I6, R0, R1, R2, R3, R4]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4, I5, I6) (R0, R1, R2, R3, R4))(nil)).Elem(), caller) +} + type genericDoFn8x5[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2, R3, R4 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6, i7 I7) (R0, R1, R2, R3, R4) } @@ -6679,8 +7434,8 @@ func (c *caller8x5[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2, R3, R4]) Call(arg return []interface{}{out0, out1, out2, out3, out4} } -func (c *caller8x5[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2, R3, R4]) Call8x5(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4, arg5 I5, arg6 I6, arg7 I7) (interface{}, interface{}, interface{}, interface{}, interface{}) { - return c.fn(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7) +func (c *caller8x5[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2, R3, R4]) Call8x5(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}, arg5 interface{}, arg6 interface{}, arg7 interface{}) (interface{}, interface{}, interface{}, interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4), arg5.(I5), arg6.(I6), arg7.(I7)) } func registerDoFn8x5StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2, R3, R4 any](doFn genericDoFn8x5[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2, R3, R4]) { @@ -6722,13 +7477,25 @@ func registerDoFn8x5StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, R0, R reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn8x5[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2, R3, R4 any] registers your DoFn to optimize execution at runtime. +// DoFn8x5[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2, R3, R4 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn8x5[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2, R3, R4 any](doFn genericDoFn8x5[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2, R3, R4]) { registerDoFnTypes(doFn) registerDoFn8x5StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2, R3, R4](doFn) } +// Function8x5[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2, R3, R4 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function8x5[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2, R3, R4 any](doFn func(I0, I1, I2, I3, I4, I5, I6, I7) (R0, R1, R2, R3, R4)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4, I5, I6, I7) (R0, R1, R2, R3, R4)) + return &caller8x5[I0, I1, I2, I3, I4, I5, I6, I7, R0, R1, R2, R3, R4]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4, I5, I6, I7) (R0, R1, R2, R3, R4))(nil)).Elem(), caller) +} + type genericDoFn9x5[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2, R3, R4 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6, i7 I7, i8 I8) (R0, R1, R2, R3, R4) } @@ -6750,8 +7517,8 @@ func (c *caller9x5[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2, R3, R4]) Call return []interface{}{out0, out1, out2, out3, out4} } -func (c *caller9x5[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2, R3, R4]) Call9x5(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4, arg5 I5, arg6 I6, arg7 I7, arg8 I8) (interface{}, interface{}, interface{}, interface{}, interface{}) { - return c.fn(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8) +func (c *caller9x5[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2, R3, R4]) Call9x5(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}, arg5 interface{}, arg6 interface{}, arg7 interface{}, arg8 interface{}) (interface{}, interface{}, interface{}, interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4), arg5.(I5), arg6.(I6), arg7.(I7), arg8.(I8)) } func registerDoFn9x5StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2, R3, R4 any](doFn genericDoFn9x5[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2, R3, R4]) { @@ -6793,13 +7560,25 @@ func registerDoFn9x5StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, R reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn9x5[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2, R3, R4 any] registers your DoFn to optimize execution at runtime. +// DoFn9x5[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2, R3, R4 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn9x5[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2, R3, R4 any](doFn genericDoFn9x5[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2, R3, R4]) { registerDoFnTypes(doFn) registerDoFn9x5StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2, R3, R4](doFn) } +// Function9x5[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2, R3, R4 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function9x5[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2, R3, R4 any](doFn func(I0, I1, I2, I3, I4, I5, I6, I7, I8) (R0, R1, R2, R3, R4)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4, I5, I6, I7, I8) (R0, R1, R2, R3, R4)) + return &caller9x5[I0, I1, I2, I3, I4, I5, I6, I7, I8, R0, R1, R2, R3, R4]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4, I5, I6, I7, I8) (R0, R1, R2, R3, R4))(nil)).Elem(), caller) +} + type genericDoFn10x5[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2, R3, R4 any] interface { ProcessElement(i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6, i7 I7, i8 I8, i9 I9) (R0, R1, R2, R3, R4) } @@ -6821,8 +7600,8 @@ func (c *caller10x5[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2, R3, R4]) return []interface{}{out0, out1, out2, out3, out4} } -func (c *caller10x5[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2, R3, R4]) Call10x5(arg0 I0, arg1 I1, arg2 I2, arg3 I3, arg4 I4, arg5 I5, arg6 I6, arg7 I7, arg8 I8, arg9 I9) (interface{}, interface{}, interface{}, interface{}, interface{}) { - return c.fn(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9) +func (c *caller10x5[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2, R3, R4]) Call10x5(arg0 interface{}, arg1 interface{}, arg2 interface{}, arg3 interface{}, arg4 interface{}, arg5 interface{}, arg6 interface{}, arg7 interface{}, arg8 interface{}, arg9 interface{}) (interface{}, interface{}, interface{}, interface{}, interface{}) { + return c.fn(arg0.(I0), arg1.(I1), arg2.(I2), arg3.(I3), arg4.(I4), arg5.(I5), arg6.(I6), arg7.(I7), arg8.(I8), arg9.(I9)) } func registerDoFn10x5StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2, R3, R4 any](doFn genericDoFn10x5[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2, R3, R4]) { @@ -6864,13 +7643,25 @@ func registerDoFn10x5StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn10x5[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2, R3, R4 any] registers your DoFn to optimize execution at runtime. +// DoFn10x5[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2, R3, R4 any] registers your structural DoFn to optimize execution at runtime. // DoFn input and output parameter types should be provided in order as the generic constraints. func DoFn10x5[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2, R3, R4 any](doFn genericDoFn10x5[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2, R3, R4]) { registerDoFnTypes(doFn) registerDoFn10x5StructWrappersAndFuncs[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2, R3, R4](doFn) } +// Function10x5[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2, R3, R4 any] registers your functional DoFn to optimize execution at runtime. +// Function input and output parameter types should be provided in order as the generic constraints. +func Function10x5[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2, R3, R4 any](doFn func(I0, I1, I2, I3, I4, I5, I6, I7, I8, I9) (R0, R1, R2, R3, R4)) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func(I0, I1, I2, I3, I4, I5, I6, I7, I8, I9) (R0, R1, R2, R3, R4)) + return &caller10x5[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, R0, R1, R2, R3, R4]{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func(I0, I1, I2, I3, I4, I5, I6, I7, I8, I9) (R0, R1, R2, R3, R4))(nil)).Elem(), caller) +} + type startBundle0x0 interface { StartBundle() } diff --git a/sdks/go/pkg/beam/register/register.tmpl b/sdks/go/pkg/beam/register/register.tmpl index 459b97e321f7..b701be456865 100644 --- a/sdks/go/pkg/beam/register/register.tmpl +++ b/sdks/go/pkg/beam/register/register.tmpl @@ -151,8 +151,8 @@ func (c *caller{{$processElementIn}}x{{$processElementOut}}{{(genericTypingRepre return []interface{}{ {{if $processElementOut}}{{range $out := upto $processElementOut}}{{if $out}}, {{end}}out{{$out}}{{end}}{{end}} } } -func (c *caller{{$processElementIn}}x{{$processElementOut}}{{(genericTypingRepresentation $processElementIn $processElementOut false)}}) Call{{$processElementIn}}x{{$processElementOut}}({{range $in := upto $processElementIn}}{{if $in}}, {{end}}arg{{$in}} I{{$in}}{{end}}){{if $processElementOut}} ({{range $out := upto $processElementOut}}{{if $out}}, {{end}}interface{}{{end}}){{end}} { - {{if $processElementOut}}return {{end}}c.fn({{range $in := upto $processElementIn}}{{if $in}}, {{end}}arg{{$in}}{{end}}) +func (c *caller{{$processElementIn}}x{{$processElementOut}}{{(genericTypingRepresentation $processElementIn $processElementOut false)}}) Call{{$processElementIn}}x{{$processElementOut}}({{range $in := upto $processElementIn}}{{if $in}}, {{end}}arg{{$in}} interface{}{{end}}){{if $processElementOut}} ({{range $out := upto $processElementOut}}{{if $out}}, {{end}}interface{}{{end}}){{end}} { + {{if $processElementOut}}return {{end}}c.fn({{range $in := upto $processElementIn}}{{if $in}}, {{end}}arg{{$in}}.(I{{$in}}){{end}}) } func registerDoFn{{$processElementIn}}x{{$processElementOut}}StructWrappersAndFuncs{{(genericTypingRepresentation $processElementIn $processElementOut true)}}(doFn genericDoFn{{$processElementIn}}x{{$processElementOut}}{{(genericTypingRepresentation $processElementIn $processElementOut false)}}) { @@ -194,11 +194,23 @@ func registerDoFn{{$processElementIn}}x{{$processElementOut}}StructWrappersAndFu reflectx.RegisterStructWrapper(reflect.TypeOf(doFn).Elem(), wrapperFn) } -// DoFn{{$processElementIn}}x{{$processElementOut}}{{(genericTypingRepresentation $processElementIn $processElementOut true)}} registers your DoFn to optimize execution at runtime. +// DoFn{{$processElementIn}}x{{$processElementOut}}{{(genericTypingRepresentation $processElementIn $processElementOut true)}} registers your structural DoFn to optimize execution at runtime. {{if (or $processElementIn $processElementOut)}}// DoFn input and output parameter types should be provided in order as the generic constraints. {{end}}func DoFn{{$processElementIn}}x{{$processElementOut}}{{(genericTypingRepresentation $processElementIn $processElementOut true)}}(doFn genericDoFn{{$processElementIn}}x{{$processElementOut}}{{(genericTypingRepresentation $processElementIn $processElementOut false)}}) { registerDoFnTypes(doFn) registerDoFn{{$processElementIn}}x{{$processElementOut}}StructWrappersAndFuncs{{(genericTypingRepresentation $processElementIn $processElementOut false)}}(doFn) +} + +// Function{{$processElementIn}}x{{$processElementOut}}{{(genericTypingRepresentation $processElementIn $processElementOut true)}} registers your functional DoFn to optimize execution at runtime. +{{if (or $processElementIn $processElementOut)}}// Function input and output parameter types should be provided in order as the generic constraints. +{{end}}func Function{{$processElementIn}}x{{$processElementOut}}{{(genericTypingRepresentation $processElementIn $processElementOut true)}}(doFn func({{range $in := upto $processElementIn}}{{if $in}}, {{end}}I{{$in}}{{end}}) {{if (gt $processElementOut 1)}}({{end}}{{range $out := upto $processElementOut}}{{if $out}}, {{end}}R{{$out}}{{end}}{{if (gt $processElementOut 1)}}){{end}}) { + runtime.RegisterFunction(doFn) + registerMethodTypes(reflect.TypeOf(doFn)) + caller := func(fn interface{}) reflectx.Func { + f := fn.(func({{range $in := upto $processElementIn}}{{if $in}}, {{end}}I{{$in}}{{end}}) {{if (gt $processElementOut 1)}}({{end}}{{range $out := upto $processElementOut}}{{if $out}}, {{end}}R{{$out}}{{end}}{{if (gt $processElementOut 1)}}){{end}}) + return &caller{{$processElementIn}}x{{$processElementOut}}{{(genericTypingRepresentation $processElementIn $processElementOut false)}}{fn: f} + } + reflectx.RegisterFunc(reflect.TypeOf((*func({{range $in := upto $processElementIn}}{{if $in}}, {{end}}I{{$in}}{{end}}) {{if (gt $processElementOut 1)}}({{end}}{{range $out := upto $processElementOut}}{{if $out}}, {{end}}R{{$out}}{{end}}{{if (gt $processElementOut 1)}}){{end}})(nil)).Elem(), caller) }{{end}}{{end}} {{range $startFinishBundleOut := upto $startFinishBundleOutRange}}{{range $startFinishBundleIn := upto $startFinishBundleInRange}} diff --git a/sdks/go/pkg/beam/register/register_test.go b/sdks/go/pkg/beam/register/register_test.go index e89b2c123100..39962ab3c421 100644 --- a/sdks/go/pkg/beam/register/register_test.go +++ b/sdks/go/pkg/beam/register/register_test.go @@ -20,9 +20,12 @@ import ( "reflect" "testing" + "github.com/apache/beam/sdks/v2/go/pkg/beam/core/graph" "github.com/apache/beam/sdks/v2/go/pkg/beam/core/graph/mtime" "github.com/apache/beam/sdks/v2/go/pkg/beam/core/graph/window" + "github.com/apache/beam/sdks/v2/go/pkg/beam/core/runtime" "github.com/apache/beam/sdks/v2/go/pkg/beam/core/runtime/exec" + "github.com/apache/beam/sdks/v2/go/pkg/beam/core/runtime/graphx" "github.com/apache/beam/sdks/v2/go/pkg/beam/core/runtime/graphx/schema" "github.com/apache/beam/sdks/v2/go/pkg/beam/core/typex" "github.com/apache/beam/sdks/v2/go/pkg/beam/core/util/reflectx" @@ -534,6 +537,36 @@ func TestIter2_Struct(t *testing.T) { } } +type CustomFunctionParameter struct { + key string + val int +} + +type CustomFunctionReturn struct { + key int + val string +} + +func customFunction(a CustomFunctionParameter) CustomFunctionReturn { + return CustomFunctionReturn{ + key: a.val, + val: a.key, + } +} + +func TestFunction(t *testing.T) { + Function1x1[CustomFunctionParameter, CustomFunctionReturn](customFunction) + + // Need to call FromType so that the registry will reconcile its registrations + schema.FromType(reflect.TypeOf(CustomFunctionParameter{})) + if !schema.Registered(reflect.TypeOf(CustomFunctionParameter{})) { + t.Errorf("schema.Registered(reflect.TypeOf(CustomFunctionParameter{})) = false, want true") + } + if !schema.Registered(reflect.TypeOf(CustomFunctionReturn{})) { + t.Errorf("schema.Registered(reflect.TypeOf(CustomFunctionReturn{})) = false, want true") + } +} + type elementProcessor struct { inFV exec.FullValue } @@ -672,3 +705,209 @@ func (fn *PartialCombiner2) AddInput(i int, c CustomType) int { func (fn *PartialCombiner2) MergeAccumulators(i1 int, i2 int) int { return i1 + i2 } + +// Foo is a struct with a method for measuring method invocation +// overhead for StructuralDoFns. +type Foo struct { + A int +} + +// ProcessElement is a method for measuring a baseline of structural dofn overhead. +func (f *Foo) ProcessElement(b CustomType) int { + return f.A + b.val +} + +func MakeMultiEdge(f *graph.DoFn) graph.MultiEdge { + return graph.MultiEdge{ + DoFn: f, + } +} + +type callerCustomTypeГint struct { + fn func(CustomType) int +} + +func (c *callerCustomTypeГint) Name() string { + return reflectx.FunctionName(c.fn) +} + +func (c *callerCustomTypeГint) Type() reflect.Type { + return reflect.TypeOf(c.fn) +} + +func (c *callerCustomTypeГint) Call(args []interface{}) []interface{} { + out0 := c.fn(args[0].(CustomType)) + return []interface{}{out0} +} + +func (c *callerCustomTypeГint) Call1x1(arg0 interface{}) interface{} { + return c.fn(arg0.(CustomType)) +} + +type callerCustomType2CustomType2ГCustomType2 struct { + fn func(CustomType2, CustomType2) CustomType2 +} + +func (c *callerCustomType2CustomType2ГCustomType2) Name() string { + return reflectx.FunctionName(c.fn) +} + +func (c *callerCustomType2CustomType2ГCustomType2) Type() reflect.Type { + return reflect.TypeOf(c.fn) +} + +func (c *callerCustomType2CustomType2ГCustomType2) Call(args []interface{}) []interface{} { + out0 := c.fn(args[0].(CustomType2), args[0].(CustomType2)) + return []interface{}{out0} +} + +func (c *callerCustomType2CustomType2ГCustomType2) Call2x1(arg0, arg1 interface{}) interface{} { + return c.fn(arg0.(CustomType2), arg1.(CustomType2)) +} + +func funcMakerCustomTypeГint(fn interface{}) reflectx.Func { + f := fn.(func(CustomType) int) + return &callerCustomTypeГint{fn: f} +} + +func funcMakerCustomType2CustomType2ГCustomType2(fn interface{}) reflectx.Func { + f := fn.(func(CustomType2, CustomType2) CustomType2) + return &callerCustomType2CustomType2ГCustomType2{fn: f} +} + +func wrapMakerFoo(fn interface{}) map[string]reflectx.Func { + dfn := fn.(*Foo) + return map[string]reflectx.Func{ + "ProcessElement": reflectx.MakeFunc(func(a0 CustomType) int { return dfn.ProcessElement(a0) }), + } +} + +func addCustomType2(a CustomType2, b CustomType2) CustomType2 { + return CustomType2{ + val2: a.val2 + b.val2, + } +} + +func GeneratedOptimizationCalls() { + runtime.RegisterType(reflect.TypeOf((*Foo)(nil)).Elem()) + schema.RegisterType(reflect.TypeOf((*Foo)(nil)).Elem()) + runtime.RegisterType(reflect.TypeOf((*CustomType)(nil)).Elem()) + schema.RegisterType(reflect.TypeOf((*CustomType)(nil)).Elem()) + runtime.RegisterType(reflect.TypeOf((*CustomType2)(nil)).Elem()) + schema.RegisterType(reflect.TypeOf((*CustomType2)(nil)).Elem()) + reflectx.RegisterFunc(reflect.TypeOf((*func(CustomType) int)(nil)).Elem(), funcMakerCustomTypeГint) + reflectx.RegisterFunc(reflect.TypeOf((*func(CustomType2, CustomType2) CustomType2)(nil)).Elem(), funcMakerCustomType2CustomType2ГCustomType2) + reflectx.RegisterStructWrapper(reflect.TypeOf((*Foo)(nil)).Elem(), wrapMakerFoo) +} + +// BenchmarkMethodCalls measures the overhead of invoking several different methods after performing +// different types of registration. The unoptimized calls don't perform any optimization. The +// GenericRegistration calls first register the DoFn being used with this package's generic registration +// functions. This is the preferred path for users. The GeneratedShims calls call various registration +// functions, mirroring the behavior of the shims generated by the code generator. This is not the +// recommended path for most users - if these are materially better than the generic benchmarks, +// this package requires further optimization. +// +// BenchmarkMethodCalls/MakeFunc_Unoptimized-16 11480814 88.35 ns/op +// BenchmarkMethodCalls/MakeFunc.Call_Unoptimized-16 3525211 324.0 ns/op +// BenchmarkMethodCalls/MakeFunc.Call1x1_Unoptimized-16 3450822 343.0 ns/op +// BenchmarkMethodCalls/NewFn_Unoptimized-16 875199 1385 ns/op +// BenchmarkMethodCalls/EncodeMultiEdge_Unoptimized-16 1000000 1063 ns/op +// BenchmarkMethodCalls/MakeFunc_FunctionalDoFn_Unoptimized-16 11984484 110.6 ns/op +// BenchmarkMethodCalls/MakeFunc_FunctionalDoFn.Call_Unoptimized-16 1574622 744.4 ns/op +// BenchmarkMethodCalls/MakeFunc_FunctionalDoFn.Call1x1_Unoptimized-16 1504969 795.9 ns/op +// +// BenchmarkMethodCalls/MakeFunc_GenericRegistration-16 16266259 72.07 ns/op +// BenchmarkMethodCalls/MakeFunc.Call_GenericRegistration-16 38331327 32.70 ns/op +// BenchmarkMethodCalls/MakeFunc.Call1x1_GenericRegistration-16 135934086 8.434 ns/op +// BenchmarkMethodCalls/NewFn_GenericRegistration-16 1000000 1108 ns/op +// BenchmarkMethodCalls/EncodeMultiEdge_GenericRegistration-16 1000000 1052 ns/op +// BenchmarkMethodCalls/MakeFunc_FunctionalDoFn_GenericRegistration-16 11295202 95.43 ns/op +// BenchmarkMethodCalls/MakeFunc_FunctionalDoFn.Call_GenericRegistration-16 20299956 54.15 ns/op +// BenchmarkMethodCalls/MakeFunc_FunctionalDoFn.Call1x1_GenericRegistration-16 92858212 12.86 ns/op +// +// BenchmarkMethodCalls/MakeFunc_GeneratedShims-16 16400914 69.17 ns/op +// BenchmarkMethodCalls/MakeFunc.Call_GeneratedShims-16 37106445 33.69 ns/op +// BenchmarkMethodCalls/MakeFunc.Call1x1_GeneratedShims-16 141127965 8.312 ns/op +// BenchmarkMethodCalls/NewFn_GeneratedShims-16 1000000 1099 ns/op +// BenchmarkMethodCalls/EncodeMultiEdge_GeneratedShims-16 1000000 1071 ns/op +// BenchmarkMethodCalls/MakeFunc_FunctionalDoFn_GeneratedShims-16 12444930 90.77 ns/op +// BenchmarkMethodCalls/MakeFunc_FunctionalDoFn.Call_GeneratedShims-16 19462878 51.92 ns/op +// BenchmarkMethodCalls/MakeFunc_FunctionalDoFn.Call2x1_GeneratedShims-16 85194289 15.76 ns/op +func BenchmarkMethodCalls(b *testing.B) { + f := &Foo{A: 3} + g, err := graph.NewFn(&Foo{A: 5}) + if err != nil { + panic(err) + } + gDoFn, err := graph.AsDoFn(g, 1) + if err != nil { + panic(err) + } + me := MakeMultiEdge(gDoFn) + + var aFunc reflectx.Func + var aFn *graph.Fn + var aME interface{} + var aFnCall int + var aFnCall2 CustomType2 + var aFunc1x1 reflectx.Func1x1 + var aFunc2x1 reflectx.Func2x1 + funcIn := []interface{}{CustomType{val: 4}} + funcIn2 := []interface{}{CustomType2{val2: 4}, CustomType2{val2: 3}} + + // We need to do this registration just to get it to not panic when encoding the multi-edge with no additional optimization. + // This is currently required of users anyways + runtime.RegisterType(reflect.TypeOf((*Foo)(nil))) + tests := []struct { + name string + fn func() + registration func() + }{ + // No optimization performed at all + {"MakeFunc_Unoptimized", func() { aFunc = reflectx.MakeFunc(f.ProcessElement) }, func() { /*No op*/ }}, // Used in graph deserialization + {"MakeFunc.Call_Unoptimized", func() { aFnCall = aFunc.Call(funcIn)[0].(int) }, func() { /*No op*/ }}, // Used to call the function repeatedly + {"MakeFunc.Call1x1_Unoptimized", func() { aFnCall = aFunc1x1.Call1x1(CustomType{val: 4}).(int) }, func() { aFunc1x1 = reflectx.ToFunc1x1(aFunc) }}, // Used to call the function repeatedly + {"NewFn_Unoptimized", func() { aFn, _ = graph.NewFn(f) }, func() { /*No op*/ }}, // Used in graph construction (less valuable) + {"EncodeMultiEdge_Unoptimized", func() { aME, _ = graphx.EncodeMultiEdge(&me) }, func() { /*No op*/ }}, // Used in graph serialization at execution time + {"MakeFunc_FunctionalDoFn_Unoptimized", func() { aFunc = reflectx.MakeFunc(addCustomType2) }, func() { /*No op*/ }}, // Used in graph deserialization + {"MakeFunc_FunctionalDoFn.Call_Unoptimized", func() { aFnCall2 = aFunc.Call(funcIn2)[0].(CustomType2) }, func() { /*No op*/ }}, // Used to call the function repeatedly + {"MakeFunc_FunctionalDoFn.Call2x1_Unoptimized", func() { aFnCall2 = aFunc2x1.Call2x1(CustomType2{val2: 4}, CustomType2{val2: 3}).(CustomType2) }, func() { aFunc2x1 = reflectx.ToFunc2x1(aFunc) }}, // Used to call the function repeatedly + + // Perform some generic registration to optimize execution + {"MakeFunc_GenericRegistration", func() { aFunc = reflectx.MakeFunc(f.ProcessElement) }, func() { DoFn1x1[CustomType, int](f) }}, // Used in graph deserialization + {"MakeFunc.Call_GenericRegistration", func() { aFnCall = aFunc.Call(funcIn)[0].(int) }, func() { DoFn1x1[CustomType, int](f) }}, // Used to call the function repeatedly + {"MakeFunc.Call1x1_GenericRegistration", func() { aFnCall = aFunc1x1.Call1x1(CustomType{val: 3}).(int) }, func() { DoFn1x1[CustomType, int](f); aFunc1x1 = reflectx.ToFunc1x1(aFunc) }}, // Used to call the function repeatedly + {"NewFn_GenericRegistration", func() { aFn, _ = graph.NewFn(f) }, func() { DoFn1x1[CustomType, int](f) }}, // Used in graph construction (less valuable) + {"EncodeMultiEdge_GenericRegistration", func() { aME, _ = graphx.EncodeMultiEdge(&me) }, func() { DoFn1x1[CustomType, int](f) }}, // Used in graph serialization at execution time + {"MakeFunc_FunctionalDoFn_GenericRegistration", func() { aFunc = reflectx.MakeFunc(addCustomType2) }, func() { Function2x1[CustomType2, CustomType2, CustomType2](addCustomType2) }}, // Used in graph deserialization + {"MakeFunc_FunctionalDoFn.Call_GenericRegistration", func() { aFnCall2 = aFunc.Call(funcIn2)[0].(CustomType2) }, func() { Function2x1[CustomType2, CustomType2, CustomType2](addCustomType2) }}, // Used to call the function repeatedly + {"MakeFunc_FunctionalDoFn.Call2x1_GenericRegistration", func() { aFnCall2 = aFunc2x1.Call2x1(CustomType2{val2: 4}, CustomType2{val2: 3}).(CustomType2) }, func() { + Function2x1[CustomType2, CustomType2, CustomType2](addCustomType2) + aFunc2x1 = reflectx.ToFunc2x1(aFunc) + }}, // Used to call the function repeatedly + + // Perform some registration via copies of the code generator's shims + {"MakeFunc_GeneratedShims", func() { aFunc = reflectx.MakeFunc(f.ProcessElement) }, func() { GeneratedOptimizationCalls() }}, // Used in graph deserialization + {"MakeFunc.Call_GeneratedShims", func() { aFnCall = aFunc.Call(funcIn)[0].(int) }, func() { GeneratedOptimizationCalls() }}, // Used to call the function repeatedly + {"MakeFunc.Call1x1_GeneratedShims", func() { aFnCall = aFunc1x1.Call1x1(CustomType{val: 5}).(int) }, func() { GeneratedOptimizationCalls(); aFunc1x1 = reflectx.ToFunc1x1(aFunc) }}, // Used to call the function repeatedly + {"NewFn_GeneratedShims", func() { aFn, _ = graph.NewFn(f) }, func() { GeneratedOptimizationCalls() }}, // Used in graph construction (less valuable) + {"EncodeMultiEdge_GeneratedShims", func() { aME, err = graphx.EncodeMultiEdge(&me) }, func() { GeneratedOptimizationCalls() }}, // Used in graph serialization at execution time + {"MakeFunc_FunctionalDoFn_GeneratedShims", func() { aFunc = reflectx.MakeFunc(addCustomType2) }, func() { GeneratedOptimizationCalls() }}, // Used in graph deserialization + {"MakeFunc_FunctionalDoFn.Call_GeneratedShims", func() { aFnCall2 = aFunc.Call(funcIn2)[0].(CustomType2) }, func() { GeneratedOptimizationCalls() }}, // Used to call the function repeatedly + {"MakeFunc_FunctionalDoFn.Call2x1_GeneratedShims", func() { aFnCall2 = aFunc2x1.Call2x1(CustomType2{val2: 4}, CustomType2{val2: 3}).(CustomType2) }, func() { GeneratedOptimizationCalls(); aFunc2x1 = reflectx.ToFunc2x1(aFunc) }}, // Used to call the function repeatedly + } + for _, test := range tests { + test.registration() + b.Run(test.name, func(b *testing.B) { + for i := 0; i < b.N; i++ { + test.fn() + } + }) + } + b.Log(aFunc) + b.Log(aFnCall) + b.Log(aFnCall2) + b.Log(aFn) + b.Log(aME) +} diff --git a/sdks/go/pkg/beam/staticcheck.conf b/sdks/go/pkg/beam/staticcheck.conf index e4a182b22f43..5da9745a7a0c 100644 --- a/sdks/go/pkg/beam/staticcheck.conf +++ b/sdks/go/pkg/beam/staticcheck.conf @@ -17,6 +17,4 @@ # analysis tool. See https://staticcheck.io/docs/checks/ for descriptions of # each check. -# TODO(BEAM-14371): Clean up instances of S1021 (merge variable declaration and assignment) and enable check -# TODO(BEAM-14372): Clean up instances of U1000 (unused fields, vars, functions) and enable check -checks = ["all", "-ST1000", "-ST1003", "-ST1016", "-ST1020", "-ST1021", "-ST1022", "-ST1023", "-SA1019", "-U1000", "-S1021"] \ No newline at end of file +checks = ["all", "-ST1000", "-ST1003", "-ST1016", "-ST1020", "-ST1022", "-ST1023", "-SA1019", "-S1021"] \ No newline at end of file diff --git a/sdks/go/pkg/beam/transforms/sql/sqlx/sqlx.go b/sdks/go/pkg/beam/transforms/sql/sqlx/sqlx.go index 7e537a4cbd98..2f050a14432c 100644 --- a/sdks/go/pkg/beam/transforms/sql/sqlx/sqlx.go +++ b/sdks/go/pkg/beam/transforms/sql/sqlx/sqlx.go @@ -13,6 +13,9 @@ // See the License for the specific language governing permissions and // limitations under the License. +//lint:file-ignore U1000 unused type options in ExpansionPayload struct is needed to maintain +// correct expected serialized payload + // Package sqlx contains "internal" SQL transform interfaces that are needed // by the SQL expansion providers. // diff --git a/sdks/go/test/load/cogbk/cogbk.go b/sdks/go/test/load/cogbk/cogbk.go index 77b196f1620f..eefd0bddff1c 100644 --- a/sdks/go/test/load/cogbk/cogbk.go +++ b/sdks/go/test/load/cogbk/cogbk.go @@ -18,11 +18,11 @@ package main import ( "context" "flag" - "reflect" "github.com/apache/beam/sdks/v2/go/pkg/beam" "github.com/apache/beam/sdks/v2/go/pkg/beam/io/synthetic" "github.com/apache/beam/sdks/v2/go/pkg/beam/log" + "github.com/apache/beam/sdks/v2/go/pkg/beam/register" "github.com/apache/beam/sdks/v2/go/pkg/beam/x/beamx" "github.com/apache/beam/sdks/v2/go/test/load" ) @@ -43,7 +43,9 @@ var ( ) func init() { - beam.RegisterType(reflect.TypeOf((*ungroupAndReiterateFn)(nil)).Elem()) + register.DoFn4x0[[]byte, func(*[]byte) bool, func(*[]byte) bool, func([]byte, []byte)]((*ungroupAndReiterateFn)(nil)) + register.Emitter2[[]byte, []byte]() + register.Iter1[[]byte]() } // ungroupAndReiterateFn reiterates given number of times over CoGBK's output. @@ -51,6 +53,8 @@ type ungroupAndReiterateFn struct { Iterations int } +// TODO use re-iterators once supported. + func (fn *ungroupAndReiterateFn) ProcessElement(key []byte, p1values, p2values func(*[]byte) bool, emit func([]byte, []byte)) { var value []byte for i := 0; i < fn.Iterations; i++ { diff --git a/sdks/go/test/load/combine/combine.go b/sdks/go/test/load/combine/combine.go index 32d46a1d94a7..9f00bbf79909 100644 --- a/sdks/go/test/load/combine/combine.go +++ b/sdks/go/test/load/combine/combine.go @@ -23,6 +23,7 @@ import ( "github.com/apache/beam/sdks/v2/go/pkg/beam" "github.com/apache/beam/sdks/v2/go/pkg/beam/io/synthetic" "github.com/apache/beam/sdks/v2/go/pkg/beam/log" + "github.com/apache/beam/sdks/v2/go/pkg/beam/register" "github.com/apache/beam/sdks/v2/go/pkg/beam/transforms/top" "github.com/apache/beam/sdks/v2/go/pkg/beam/x/beamx" "github.com/apache/beam/sdks/v2/go/test/load" @@ -52,6 +53,12 @@ func parseSyntheticConfig() synthetic.SourceConfig { } } +func init() { + register.Function2x1(compareLess) + register.Function3x0(getElement) + register.Emitter2[[]byte, []byte]() +} + func compareLess(key []byte, value []byte) bool { return bytes.Compare(key, value) < 0 } @@ -73,6 +80,7 @@ func main() { pcoll := top.LargestPerKey(s, src, *topCount, compareLess) pcoll = beam.ParDo(s, getElement, pcoll) pcoll = beam.ParDo(s, &load.RuntimeMonitor{}, pcoll) + _ = pcoll } presult, err := beamx.RunWithMetrics(ctx, p) diff --git a/sdks/go/test/load/group_by_key/group_by_key.go b/sdks/go/test/load/group_by_key/group_by_key.go index 78871c96d6f9..645afabedef4 100644 --- a/sdks/go/test/load/group_by_key/group_by_key.go +++ b/sdks/go/test/load/group_by_key/group_by_key.go @@ -24,6 +24,7 @@ import ( "github.com/apache/beam/sdks/v2/go/pkg/beam" "github.com/apache/beam/sdks/v2/go/pkg/beam/io/synthetic" "github.com/apache/beam/sdks/v2/go/pkg/beam/log" + "github.com/apache/beam/sdks/v2/go/pkg/beam/register" "github.com/apache/beam/sdks/v2/go/pkg/beam/x/beamx" "github.com/apache/beam/sdks/v2/go/test/load" ) @@ -52,6 +53,30 @@ func parseSyntheticConfig() synthetic.SourceConfig { } } +func init() { + register.DoFn2x2[[]byte, func(*[]byte) bool, []byte, []byte]((*ungroupAndReiterateFn)(nil)) + register.Iter1[[]byte]() +} + +// ungroupAndReiterateFn reiterates given number of times over GBK's output. +type ungroupAndReiterateFn struct { + Iterations int +} + +// TODO use re-iterators once supported. + +func (fn *ungroupAndReiterateFn) ProcessElement(key []byte, values func(*[]byte) bool) ([]byte, []byte) { + var value []byte + for i := 0; i < fn.Iterations; i++ { + for values(&value) { + if i == fn.Iterations-1 { + return key, value + } + } + } + return key, []byte{0} +} + func main() { flag.Parse() beam.Init() @@ -63,18 +88,9 @@ func main() { src = beam.ParDo(s, &load.RuntimeMonitor{}, src) for i := 0; i < *fanout; i++ { pcoll := beam.GroupByKey(s, src) - pcoll = beam.ParDo(s, func(key []byte, values func(*[]byte) bool) ([]byte, []byte) { - for i := 0; i < *iterations; i++ { - var value []byte - for values(&value) { - if i == *iterations-1 { - return key, value - } - } - } - return key, []byte{0} - }, pcoll) + pcoll = beam.ParDo(s, &ungroupAndReiterateFn{*iterations}, pcoll) pcoll = beam.ParDo(s, &load.RuntimeMonitor{}, pcoll) + _ = pcoll } presult, err := beamx.RunWithMetrics(ctx, p) diff --git a/sdks/go/test/load/pardo/pardo.go b/sdks/go/test/load/pardo/pardo.go index 8114ac8a9060..eceae82bc5d0 100644 --- a/sdks/go/test/load/pardo/pardo.go +++ b/sdks/go/test/load/pardo/pardo.go @@ -19,11 +19,11 @@ import ( "context" "flag" "fmt" - "reflect" "github.com/apache/beam/sdks/v2/go/pkg/beam" "github.com/apache/beam/sdks/v2/go/pkg/beam/io/synthetic" "github.com/apache/beam/sdks/v2/go/pkg/beam/log" + "github.com/apache/beam/sdks/v2/go/pkg/beam/register" "github.com/apache/beam/sdks/v2/go/pkg/beam/x/beamx" "github.com/apache/beam/sdks/v2/go/test/load" ) @@ -48,7 +48,8 @@ var ( ) func init() { - beam.RegisterType(reflect.TypeOf((*counterOperationFn)(nil)).Elem()) + register.DoFn4x0[context.Context, []byte, []byte, func([]byte, []byte)]((*counterOperationFn)(nil)) + register.Emitter2[[]byte, []byte]() } type counterOperationFn struct { @@ -57,7 +58,10 @@ type counterOperationFn struct { } func newCounterOperationFn(operations, numCounters int) *counterOperationFn { - return &counterOperationFn{operations, numCounters, nil} + return &counterOperationFn{ + Operations: operations, + NumCounters: numCounters, + } } func (fn *counterOperationFn) Setup() { diff --git a/sdks/go/test/load/sideinput/sideinput.go b/sdks/go/test/load/sideinput/sideinput.go index 6f7cb6f2d419..c57ed9f0b235 100644 --- a/sdks/go/test/load/sideinput/sideinput.go +++ b/sdks/go/test/load/sideinput/sideinput.go @@ -18,17 +18,20 @@ package main import ( "context" "flag" - "reflect" "github.com/apache/beam/sdks/v2/go/pkg/beam" "github.com/apache/beam/sdks/v2/go/pkg/beam/io/synthetic" "github.com/apache/beam/sdks/v2/go/pkg/beam/log" + "github.com/apache/beam/sdks/v2/go/pkg/beam/register" "github.com/apache/beam/sdks/v2/go/pkg/beam/x/beamx" "github.com/apache/beam/sdks/v2/go/test/load" ) func init() { - beam.RegisterDoFn(reflect.TypeOf((*doFn)(nil))) + register.DoFn4x0[[]byte, []byte, func(*[]byte, *[]byte) bool, func([]byte, []byte)]((*iterSideInputFn)(nil)) + register.Emitter2[[]byte, []byte]() + register.Iter2[[]byte, []byte]() + register.Function2x0(impToKV) } var ( @@ -51,11 +54,17 @@ func parseSyntheticConfig() synthetic.SourceConfig { } } -type doFn struct { +// impToKV just turns an impulse signal into a KV instead of +// adding a single value input version of RuntimeMonitor +func impToKV(imp []byte, emit func([]byte, []byte)) { + emit(imp, imp) +} + +type iterSideInputFn struct { ElementsToAccess int64 } -func (fn *doFn) ProcessElement(_ []byte, values func(*[]byte, *[]byte) bool, emit func([]byte, []byte)) { +func (fn *iterSideInputFn) ProcessElement(_, _ []byte, values func(*[]byte, *[]byte) bool, emit func([]byte, []byte)) { var key, value []byte var i int64 for values(&key, &value) { @@ -74,18 +83,21 @@ func main() { p, s := beam.NewPipelineWithRoot() syntheticConfig := parseSyntheticConfig() - elementsToAccess := syntheticConfig.NumElements * int64(*accessPercentage/100) + elementsToAccess := syntheticConfig.NumElements * int64(float64(*accessPercentage)/float64(100)) src := synthetic.SourceSingle(s, syntheticConfig) - src = beam.ParDo(s, &load.RuntimeMonitor{}, src) - src = beam.ParDo( + imp := beam.Impulse(s) + impKV := beam.ParDo(s, impToKV, imp) + monitored := beam.ParDo(s, &load.RuntimeMonitor{}, impKV) + + useSide := beam.ParDo( s, - &doFn{ElementsToAccess: elementsToAccess}, - beam.Impulse(s), + &iterSideInputFn{ElementsToAccess: elementsToAccess}, + monitored, beam.SideInput{Input: src}) - beam.ParDo(s, &load.RuntimeMonitor{}, src) + beam.ParDo(s, &load.RuntimeMonitor{}, useSide) presult, err := beamx.RunWithMetrics(ctx, p) if err != nil { diff --git a/sdks/go/test/load/util.go b/sdks/go/test/load/util.go index 96436d0d6782..4cbfda8e0ba9 100644 --- a/sdks/go/test/load/util.go +++ b/sdks/go/test/load/util.go @@ -24,12 +24,12 @@ import ( "log" "net/http" "os" - "reflect" "strings" "time" "github.com/apache/beam/sdks/v2/go/pkg/beam" "github.com/apache/beam/sdks/v2/go/pkg/beam/core/metrics" + "github.com/apache/beam/sdks/v2/go/pkg/beam/register" ) const ( @@ -60,7 +60,8 @@ var ( ) func init() { - beam.RegisterType(reflect.TypeOf((*RuntimeMonitor)(nil)).Elem()) + register.DoFn3x0[[]byte, []byte, func([]byte, []byte)]((*RuntimeMonitor)(nil)) + register.Emitter2[[]byte, []byte]() } // RuntimeMonitor is a DoFn to record processing time in the pipeline. @@ -132,10 +133,16 @@ func newLoadTestResult(value float64) loadTestResult { // PublishMetrics calculates the runtime and sends the result to InfluxDB database. func PublishMetrics(results metrics.QueryResults) { options := newInfluxDBOptions() + ress := toLoadTestResults(results) + for _, res := range ress { + log.Printf("%s %v", res.metric, time.Duration(float64(time.Second)*res.value)) + } + if len(ress) == 0 { + log.Print("No metrics returned.") + return + } if options.validate() { - if res := toLoadTestResults(results); len(res) > 0 { - publishMetricstoInfluxDB(options, toLoadTestResults(results)) - } + publishMetricstoInfluxDB(options, ress) } else { log.Print("Missing InfluxDB options. Metrics will not be published to InfluxDB") } @@ -212,8 +219,8 @@ func publishMetricstoInfluxDB(options *influxDBOptions, results []loadTestResult if resp.StatusCode != 204 { jsonData := make(map[string]string) json.Unmarshal(body, &jsonData) - log.Print(fmt.Errorf("Failed to publish metrics to InfluxDB. Received status code %v "+ - "with an error message: %v", resp.StatusCode, jsonData["error"])) + log.Printf("Failed to publish metrics to InfluxDB. Received status code %v "+ + "with an error message: %v", resp.StatusCode, jsonData["error"]) } } diff --git a/sdks/java/container/boot.go b/sdks/java/container/boot.go index f37857305512..d067db2b304d 100644 --- a/sdks/java/container/boot.go +++ b/sdks/java/container/boot.go @@ -343,7 +343,8 @@ func BuildOptions(metaOptions []*MetaOption) *Options { continue } - options.JavaArguments = append(options.JavaArguments, meta.Options.JavaArguments...) + // Rightmost takes precedence + options.JavaArguments = append(meta.Options.JavaArguments, options.JavaArguments...) for key, value := range meta.Options.Properties { _, exists := options.Properties[key] diff --git a/sdks/java/container/boot_test.go b/sdks/java/container/boot_test.go new file mode 100644 index 000000000000..53942f683430 --- /dev/null +++ b/sdks/java/container/boot_test.go @@ -0,0 +1,74 @@ +// Licensed to the Apache Software Foundation (ASF) under one or more +// contributor license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright ownership. +// The ASF licenses this file to You under the Apache License, Version 2.0 +// (the "License"); you may not use this file except in compliance with +// the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// boot is the boot code for the Java SDK harness container. It is responsible +// for retrieving staged files and invoking the JVM correctly. +package main + +import ( + "reflect" + "testing" +) + +func TestBuildOptionsEmpty(t *testing.T) { + dir := "test/empty" + metaOptions, err := LoadMetaOptions(dir) + if err != nil { + t.Fatalf("Got error %v running LoadMetaOptions", err) + } + if metaOptions != nil { + t.Fatalf("LoadMetaOptions(%v) = %v, want nil", dir, metaOptions) + } + + javaOptions := BuildOptions(metaOptions) + if len(javaOptions.JavaArguments) != 0 || len(javaOptions.Classpath) != 0 || len(javaOptions.Properties) != 0 { + t.Errorf("BuildOptions(%v) = %v, want nil", metaOptions, javaOptions) + } +} + +func TestBuildOptionsDisabled(t *testing.T) { + metaOptions, err := LoadMetaOptions("test/disabled") + if err != nil { + t.Fatalf("Got error %v running LoadMetaOptions", err) + } + + javaOptions := BuildOptions(metaOptions) + if len(javaOptions.JavaArguments) != 0 || len(javaOptions.Classpath) != 0 || len(javaOptions.Properties) != 0 { + t.Errorf("BuildOptions(%v) = %v, want nil", metaOptions, javaOptions) + } +} + +func TestBuildOptions(t *testing.T) { + metaOptions, err := LoadMetaOptions("test/priority") + if err != nil { + t.Fatalf("Got error %v running LoadMetaOptions", err) + } + + javaOptions := BuildOptions(metaOptions) + wantJavaArguments := []string{"java_args=low", "java_args=high"} + wantClasspath := []string{"classpath_high", "classpath_low"} + wantProperties := map[string]string{ + "priority":"high", + } + if !reflect.DeepEqual(javaOptions.JavaArguments, wantJavaArguments) { + t.Errorf("BuildOptions(%v).JavaArguments = %v, want %v", metaOptions, javaOptions.JavaArguments, wantJavaArguments) + } + if !reflect.DeepEqual(javaOptions.Classpath, wantClasspath) { + t.Errorf("BuildOptions(%v).Classpath = %v, want %v", metaOptions, javaOptions.Classpath, wantClasspath) + } + if !reflect.DeepEqual(javaOptions.Properties, wantProperties) { + t.Errorf("BuildOptions(%v).JavaProperties = %v, want %v", metaOptions, javaOptions.Properties, wantProperties) + } +} diff --git a/sdks/java/container/common.gradle b/sdks/java/container/common.gradle index 72c643a04455..b9f8da73cbd6 100644 --- a/sdks/java/container/common.gradle +++ b/sdks/java/container/common.gradle @@ -75,7 +75,7 @@ task copyGolangLicenses(type: Copy) { } task copyJdkOptions(type: Copy) { - if (imageJavaVersion == "17") { + if (imageJavaVersion == "17" || imageJavaVersion == "11") { from "option-jamm.json" into "build/target/options" } diff --git a/sdks/java/container/java11/option-jamm.json b/sdks/java/container/java11/option-jamm.json new file mode 100644 index 000000000000..e994648a8e2d --- /dev/null +++ b/sdks/java/container/java11/option-jamm.json @@ -0,0 +1,13 @@ +{ + "name": "jamm", + "enabled": true, + "options": { + "java_arguments": [ + "--add-modules=jamm", + "--module-path=/opt/apache/beam/jars/jamm.jar", + "--add-opens=java.base/java.lang=jamm", + "--add-opens=java.base/java.lang.ref=jamm", + "--add-opens=java.base/java.util=jamm" + ] + } +} \ No newline at end of file diff --git a/sdks/java/container/test/disabled/option-disabled.json b/sdks/java/container/test/disabled/option-disabled.json new file mode 100644 index 000000000000..8ba7e7ff00c3 --- /dev/null +++ b/sdks/java/container/test/disabled/option-disabled.json @@ -0,0 +1,6 @@ +{ + "name": "test-disabled", + "enabled": false, + "options": { + } +} \ No newline at end of file diff --git a/sdks/java/container/test/empty/README b/sdks/java/container/test/empty/README new file mode 100644 index 000000000000..16786ad6f5eb --- /dev/null +++ b/sdks/java/container/test/empty/README @@ -0,0 +1 @@ +Empty directory to test boot options \ No newline at end of file diff --git a/sdks/java/container/test/priority/option-high.json b/sdks/java/container/test/priority/option-high.json new file mode 100644 index 000000000000..f7830cc41082 --- /dev/null +++ b/sdks/java/container/test/priority/option-high.json @@ -0,0 +1,16 @@ +{ + "name": "high", + "enabled": true, + "priority": 100, + "options": { + "java_arguments": [ + "java_args=high" + ], + "classpath": [ + "classpath_high" + ], + "properties": { + "priority": "high" + } + } +} \ No newline at end of file diff --git a/sdks/java/container/test/priority/option-low.json b/sdks/java/container/test/priority/option-low.json new file mode 100644 index 000000000000..b67a8ccdfdf7 --- /dev/null +++ b/sdks/java/container/test/priority/option-low.json @@ -0,0 +1,16 @@ +{ + "name": "low", + "enabled": true, + "priority": 0, + "options": { + "java_arguments": [ + "java_args=low" + ], + "classpath": [ + "classpath_low" + ], + "properties": { + "priority": "low" + } + } +} \ No newline at end of file diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/SchemaTranslation.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/SchemaTranslation.java index e1112b2472de..9a63c2d87819 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/SchemaTranslation.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/SchemaTranslation.java @@ -43,6 +43,7 @@ import org.apache.beam.sdk.schemas.Schema.LogicalType; import org.apache.beam.sdk.schemas.Schema.TypeName; import org.apache.beam.sdk.schemas.logicaltypes.MicrosInstant; +import org.apache.beam.sdk.schemas.logicaltypes.PythonCallable; import org.apache.beam.sdk.schemas.logicaltypes.SchemaLogicalType; import org.apache.beam.sdk.schemas.logicaltypes.UnknownLogicalType; import org.apache.beam.sdk.util.SerializableUtils; @@ -74,6 +75,7 @@ public class SchemaTranslation { ImmutableMap.>>builder() .put(MicrosInstant.IDENTIFIER, MicrosInstant.class) .put(SchemaLogicalType.IDENTIFIER, SchemaLogicalType.class) + .put(PythonCallable.IDENTIFIER, PythonCallable.class) .build(); public static SchemaApi.Schema schemaToProto(Schema schema, boolean serializeLogicalType) { diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/logicaltypes/MicrosInstant.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/logicaltypes/MicrosInstant.java index 6c1fea85d842..a388731a14c5 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/logicaltypes/MicrosInstant.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/logicaltypes/MicrosInstant.java @@ -18,6 +18,8 @@ package org.apache.beam.sdk.schemas.logicaltypes; import java.time.Instant; +import org.apache.beam.model.pipeline.v1.RunnerApi; +import org.apache.beam.model.pipeline.v1.SchemaApi; import org.apache.beam.sdk.schemas.Schema; import org.apache.beam.sdk.values.Row; @@ -36,7 +38,11 @@ "nullness" // TODO(https://issues.apache.org/jira/browse/BEAM-10402) }) public class MicrosInstant implements Schema.LogicalType { - public static final String IDENTIFIER = "beam:logical_type:micros_instant:v1"; + public static final String IDENTIFIER = + SchemaApi.LogicalTypes.Enum.MICROS_INSTANT + .getValueDescriptor() + .getOptions() + .getExtension(RunnerApi.beamUrn); // TODO(BEAM-10878): This should be a constant private final Schema schema; diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/logicaltypes/PythonCallable.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/logicaltypes/PythonCallable.java new file mode 100644 index 000000000000..ea4e297515e9 --- /dev/null +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/logicaltypes/PythonCallable.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.beam.sdk.schemas.logicaltypes; + +import org.apache.beam.model.pipeline.v1.RunnerApi; +import org.apache.beam.model.pipeline.v1.SchemaApi; +import org.apache.beam.sdk.annotations.Experimental; +import org.apache.beam.sdk.schemas.Schema; +import org.apache.beam.sdk.schemas.Schema.LogicalType; +import org.apache.beam.sdk.util.PythonCallableSource; +import org.checkerframework.checker.nullness.qual.NonNull; +import org.checkerframework.checker.nullness.qual.Nullable; + +/** A logical type for PythonCallableSource objects. */ +@Experimental(Experimental.Kind.SCHEMAS) +public class PythonCallable implements LogicalType { + public static final String IDENTIFIER = + SchemaApi.LogicalTypes.Enum.PYTHON_CALLABLE + .getValueDescriptor() + .getOptions() + .getExtension(RunnerApi.beamUrn); + + @Override + public String getIdentifier() { + return IDENTIFIER; + } + + @Override + public Schema.@Nullable FieldType getArgumentType() { + return null; + } + + @Override + public Schema.FieldType getBaseType() { + return Schema.FieldType.STRING; + } + + @Override + public @NonNull String toBaseType(@NonNull PythonCallableSource input) { + return input.getPythonCallableCode(); + } + + @Override + public @NonNull PythonCallableSource toInputType(@NonNull String base) { + return PythonCallableSource.of(base); + } +} diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/utils/StaticSchemaInference.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/utils/StaticSchemaInference.java index 103405037bed..a1437c2d0ccd 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/utils/StaticSchemaInference.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/utils/StaticSchemaInference.java @@ -126,6 +126,7 @@ public static Schema.FieldType fieldFromType( return fieldFromType(type, fieldValueTypeSupplier, new HashMap()); } + // TODO(BEAM-14458): support type inference for logical types private static Schema.FieldType fieldFromType( TypeDescriptor type, FieldValueTypeSupplier fieldValueTypeSupplier, diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PythonCallableSource.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PythonCallableSource.java new file mode 100644 index 000000000000..8875d8982963 --- /dev/null +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PythonCallableSource.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.beam.sdk.util; + +import java.io.Serializable; + +/** + * A wrapper object storing a Python code that can be evaluated to Python callables in Python SDK. + */ +public class PythonCallableSource implements Serializable { + private final String pythonCallableCode; + + private PythonCallableSource(String pythonCallableCode) { + this.pythonCallableCode = pythonCallableCode; + } + + public static PythonCallableSource of(String pythonCallableCode) { + // TODO(BEAM-14457): check syntactic correctness of Python code if possible + return new PythonCallableSource(pythonCallableCode); + } + + public String getPythonCallableCode() { + return pythonCallableCode; + } +} diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/schemas/SchemaTranslationTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/schemas/SchemaTranslationTest.java index f4274de02ea5..9f3f7004e8c1 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/schemas/SchemaTranslationTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/schemas/SchemaTranslationTest.java @@ -41,6 +41,7 @@ import org.apache.beam.sdk.schemas.logicaltypes.DateTime; import org.apache.beam.sdk.schemas.logicaltypes.FixedBytes; import org.apache.beam.sdk.schemas.logicaltypes.MicrosInstant; +import org.apache.beam.sdk.schemas.logicaltypes.PythonCallable; import org.apache.beam.sdk.schemas.logicaltypes.SchemaLogicalType; import org.apache.beam.sdk.values.Row; import org.apache.beam.vendor.grpc.v1p43p2.com.google.protobuf.ByteString; @@ -132,6 +133,7 @@ public static Iterable data() { Field.of("decimal", FieldType.DECIMAL), Field.of("datetime", FieldType.DATETIME))) .add(Schema.of(Field.of("fixed_bytes", FieldType.logicalType(FixedBytes.of(24))))) .add(Schema.of(Field.of("micros_instant", FieldType.logicalType(new MicrosInstant())))) + .add(Schema.of(Field.of("python_callable", FieldType.logicalType(new PythonCallable())))) .add( Schema.of( Field.of("field_with_option_atomic", FieldType.STRING) diff --git a/sdks/java/extensions/python/src/main/java/org/apache/beam/sdk/extensions/python/PythonExternalTransform.java b/sdks/java/extensions/python/src/main/java/org/apache/beam/sdk/extensions/python/PythonExternalTransform.java index c412acd220ee..04a87cea36a6 100644 --- a/sdks/java/extensions/python/src/main/java/org/apache/beam/sdk/extensions/python/PythonExternalTransform.java +++ b/sdks/java/extensions/python/src/main/java/org/apache/beam/sdk/extensions/python/PythonExternalTransform.java @@ -18,6 +18,7 @@ package org.apache.beam.sdk.extensions.python; import java.util.Arrays; +import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.SortedMap; @@ -33,10 +34,12 @@ import org.apache.beam.sdk.schemas.Schema; import org.apache.beam.sdk.schemas.SchemaRegistry; import org.apache.beam.sdk.schemas.SchemaTranslation; +import org.apache.beam.sdk.schemas.logicaltypes.PythonCallable; import org.apache.beam.sdk.schemas.utils.StaticSchemaInference; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.SerializableFunction; import org.apache.beam.sdk.util.CoderUtils; +import org.apache.beam.sdk.util.PythonCallableSource; import org.apache.beam.sdk.values.PBegin; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionTuple; @@ -64,6 +67,7 @@ public class PythonExternalTransform kwargsMap; + private Map, Schema.FieldType> typeHints; private @Nullable Object @NonNull [] argsArray; private @Nullable Row providedKwargsRow; @@ -72,6 +76,11 @@ private PythonExternalTransform(String fullyQualifiedName, String expansionServi this.fullyQualifiedName = fullyQualifiedName; this.expansionService = expansionService; this.kwargsMap = new TreeMap<>(); + this.typeHints = new HashMap<>(); + // TODO(BEAM-14458): remove a default type hint for PythonCallableSource when BEAM-14458 is + // resolved + this.typeHints.put( + PythonCallableSource.class, Schema.FieldType.logicalType(new PythonCallable())); argsArray = new Object[] {}; } @@ -162,6 +171,26 @@ public PythonExternalTransform withKwargs(Row kwargs) { return this; } + /** + * Specifies the field type of arguments. + * + *

Type hints are especially useful for logical types since type inference does not work well + * for logical types. + * + * @param argType A class object for the argument type. + * @param fieldType A schema field type for the argument. + * @return updated wrapper for the cross-language transform. + */ + public PythonExternalTransform withTypeHint( + java.lang.Class argType, Schema.FieldType fieldType) { + if (typeHints.containsKey(argType)) { + throw new IllegalArgumentException( + String.format("typehint for arg type %s already exists", argType)); + } + typeHints.put(argType, fieldType); + return this; + } + @VisibleForTesting Row buildOrGetKwargsRow() { if (providedKwargsRow != null) { @@ -170,6 +199,7 @@ Row buildOrGetKwargsRow() { Schema schema = generateSchemaFromFieldValues( kwargsMap.values().toArray(), kwargsMap.keySet().toArray(new String[] {})); + schema.setUUID(UUID.randomUUID()); return Row.withSchema(schema) .addValues(convertComplexTypesToRows(kwargsMap.values().toArray())) .build(); @@ -179,16 +209,18 @@ Row buildOrGetKwargsRow() { // Types that are not one of following are considered custom types. // * Java primitives // * Type String + // * Any Type explicitly annotated by withTypeHint() // * Type Row - private static boolean isCustomType(java.lang.Class type) { + private boolean isCustomType(java.lang.Class type) { boolean val = !(ClassUtils.isPrimitiveOrWrapper(type) || type == String.class + || typeHints.containsKey(type) || Row.class.isAssignableFrom(type)); return val; } - // If the custom type has a registered schema, we use that. OTherwise we try to register it using + // If the custom type has a registered schema, we use that. Otherwise, we try to register it using // 'JavaFieldSchema'. private Row convertCustomValue(Object value) { SerializableFunction toRowFunc; @@ -223,6 +255,7 @@ private Object[] convertComplexTypesToRows(@Nullable Object @NonNull [] values) @VisibleForTesting Row buildOrGetArgsRow() { Schema schema = generateSchemaFromFieldValues(argsArray, null); + schema.setUUID(UUID.randomUUID()); Object[] convertedValues = convertComplexTypesToRows(argsArray); return Row.withSchema(schema).addValues(convertedValues).build(); } @@ -239,6 +272,8 @@ private Schema generateSchemaDirectly( if (field instanceof Row) { // Rows are used as is but other types are converted to proper field types. builder.addRowField(fieldName, ((Row) field).getSchema()); + } else if (typeHints.containsKey(field.getClass())) { + builder.addField(fieldName, typeHints.get(field.getClass())); } else { builder.addField( fieldName, diff --git a/sdks/java/extensions/python/src/main/resources/org/apache/beam/sdk/extensions/python/bootstrap_beam_venv.py b/sdks/java/extensions/python/src/main/resources/org/apache/beam/sdk/extensions/python/bootstrap_beam_venv.py index 08120b84adba..c113676a8f74 100644 --- a/sdks/java/extensions/python/src/main/resources/org/apache/beam/sdk/extensions/python/bootstrap_beam_venv.py +++ b/sdks/java/extensions/python/src/main/resources/org/apache/beam/sdk/extensions/python/bootstrap_beam_venv.py @@ -77,7 +77,7 @@ def maybe_strict_version(s): or options.beam_version.startswith('https://')): # It's a path to a tarball. beam_version = os.path.basename(options.beam_version) - beam_package = options.beam_version + beam_package = options.beam_version + '[gcp,aws,asure,dataframe]' else: beam_version = options.beam_version beam_package = 'apache_beam[gcp,aws,asure,dataframe]==' + beam_version diff --git a/sdks/java/extensions/python/src/test/java/org/apache/beam/sdk/extensions/python/ExternalPythonTransformTest.java b/sdks/java/extensions/python/src/test/java/org/apache/beam/sdk/extensions/python/PythonExternalTransformTest.java similarity index 83% rename from sdks/java/extensions/python/src/test/java/org/apache/beam/sdk/extensions/python/ExternalPythonTransformTest.java rename to sdks/java/extensions/python/src/test/java/org/apache/beam/sdk/extensions/python/PythonExternalTransformTest.java index 60deebfc6e66..5d55a6e8d345 100644 --- a/sdks/java/extensions/python/src/test/java/org/apache/beam/sdk/extensions/python/ExternalPythonTransformTest.java +++ b/sdks/java/extensions/python/src/test/java/org/apache/beam/sdk/extensions/python/PythonExternalTransformTest.java @@ -22,14 +22,17 @@ import static org.junit.Assert.assertTrue; import java.io.Serializable; +import java.time.Instant; import java.util.Map; import org.apache.beam.model.pipeline.v1.ExternalTransforms; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.schemas.Schema; import org.apache.beam.sdk.schemas.SchemaTranslation; +import org.apache.beam.sdk.schemas.logicaltypes.MicrosInstant; import org.apache.beam.sdk.testing.PAssert; import org.apache.beam.sdk.transforms.Create; import org.apache.beam.sdk.transforms.MapElements; +import org.apache.beam.sdk.util.PythonCallableSource; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.Row; @@ -41,7 +44,7 @@ import org.junit.runners.JUnit4; @RunWith(JUnit4.class) -public class ExternalPythonTransformTest implements Serializable { +public class PythonExternalTransformTest implements Serializable { @Ignore("BEAM-14148") @Test public void trivialPythonTransform() { @@ -184,6 +187,29 @@ public void generateArgsWithCustomType() { assertEquals(456, (int) receivedRow.getRow("field1").getInt32("intField")); } + @Test + public void generateArgsWithPythonCallableSource() { + PythonExternalTransform transform = + PythonExternalTransform + .>, PCollection>>>from( + "DummyTransform") + .withArgs(PythonCallableSource.of("dummy data")); + Row receivedRow = transform.buildOrGetArgsRow(); + assertTrue(receivedRow.getValue("field0") instanceof PythonCallableSource); + } + + @Test + public void generateArgsWithTypeHint() { + PythonExternalTransform transform = + PythonExternalTransform + .>, PCollection>>>from( + "DummyTransform") + .withArgs(Instant.ofEpochSecond(0)) + .withTypeHint(Instant.class, Schema.FieldType.logicalType(new MicrosInstant())); + Row receivedRow = transform.buildOrGetArgsRow(); + assertTrue(receivedRow.getValue("field0") instanceof Instant); + } + @Test public void generateKwargsEmpty() { PythonExternalTransform transform = @@ -274,6 +300,29 @@ public void generateKwargsWithCustomType() { assertEquals(456, (int) receivedRow.getRow("customField1").getInt32("intField")); } + @Test + public void generateKwargsWithPythonCallableSource() { + PythonExternalTransform transform = + PythonExternalTransform + .>, PCollection>>>from( + "DummyTransform") + .withKwarg("customField0", PythonCallableSource.of("dummy data")); + Row receivedRow = transform.buildOrGetKwargsRow(); + assertTrue(receivedRow.getValue("customField0") instanceof PythonCallableSource); + } + + @Test + public void generateKwargsWithTypeHint() { + PythonExternalTransform transform = + PythonExternalTransform + .>, PCollection>>>from( + "DummyTransform") + .withKwarg("customField0", Instant.ofEpochSecond(0)) + .withTypeHint(Instant.class, Schema.FieldType.logicalType(new MicrosInstant())); + Row receivedRow = transform.buildOrGetKwargsRow(); + assertTrue(receivedRow.getValue("customField0") instanceof Instant); + } + @Test public void generateKwargsFromMap() { Map kwargsMap = diff --git a/sdks/java/io/amazon-web-services/src/main/java/org/apache/beam/sdk/io/aws/s3/DefaultS3ClientBuilderFactory.java b/sdks/java/io/amazon-web-services/src/main/java/org/apache/beam/sdk/io/aws/s3/DefaultS3ClientBuilderFactory.java index 7ed6f6b4cf44..a00cfc5fea01 100644 --- a/sdks/java/io/amazon-web-services/src/main/java/org/apache/beam/sdk/io/aws/s3/DefaultS3ClientBuilderFactory.java +++ b/sdks/java/io/amazon-web-services/src/main/java/org/apache/beam/sdk/io/aws/s3/DefaultS3ClientBuilderFactory.java @@ -22,8 +22,6 @@ import org.apache.beam.sdk.io.aws.options.S3ClientBuilderFactory; import org.apache.beam.sdk.io.aws.options.S3Options; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Strings; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * Construct AmazonS3ClientBuilder with default values of S3 client properties like path style @@ -31,8 +29,6 @@ */ public class DefaultS3ClientBuilderFactory implements S3ClientBuilderFactory { - private static final Logger LOG = LoggerFactory.getLogger(DefaultS3ClientBuilderFactory.class); - @Override public AmazonS3ClientBuilder createBuilder(S3Options s3Options) { AmazonS3ClientBuilder builder = @@ -49,10 +45,6 @@ public AmazonS3ClientBuilder createBuilder(S3Options s3Options) { s3Options.getAwsServiceEndpoint(), s3Options.getAwsRegion())); } else if (!Strings.isNullOrEmpty(s3Options.getAwsRegion())) { builder = builder.withRegion(s3Options.getAwsRegion()); - } else { - LOG.info( - "The AWS S3 Beam extension was included in this build, but the awsRegion flag " - + "was not specified. If you don't plan to use S3, then ignore this message."); } return builder; } diff --git a/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/s3/DefaultS3ClientBuilderFactory.java b/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/s3/DefaultS3ClientBuilderFactory.java index acb04d913c1f..4d4209a22bb3 100644 --- a/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/s3/DefaultS3ClientBuilderFactory.java +++ b/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/s3/DefaultS3ClientBuilderFactory.java @@ -21,8 +21,6 @@ import org.apache.beam.sdk.io.aws2.options.S3ClientBuilderFactory; import org.apache.beam.sdk.io.aws2.options.S3Options; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import software.amazon.awssdk.services.s3.S3Client; import software.amazon.awssdk.services.s3.S3ClientBuilder; @@ -32,8 +30,6 @@ */ public class DefaultS3ClientBuilderFactory implements S3ClientBuilderFactory { - private static final Logger LOG = LoggerFactory.getLogger(DefaultS3ClientBuilderFactory.class); - @Override public S3ClientBuilder createBuilder(S3Options s3Options) { return createBuilder(S3Client.builder(), s3Options); @@ -41,11 +37,6 @@ public S3ClientBuilder createBuilder(S3Options s3Options) { @VisibleForTesting static S3ClientBuilder createBuilder(S3ClientBuilder builder, S3Options s3Options) { - if (s3Options.getAwsRegion() == null) { - LOG.info( - "The AWS S3 Beam extension was included in this build, but the awsRegion flag " - + "was not specified. If you don't plan to use S3, then ignore this message."); - } return ClientBuilderFactory.getFactory(s3Options).create(builder, s3Options); } } diff --git a/sdks/java/io/cdap/build.gradle b/sdks/java/io/cdap/build.gradle index 4ef361924d75..9fe7d305a296 100644 --- a/sdks/java/io/cdap/build.gradle +++ b/sdks/java/io/cdap/build.gradle @@ -38,15 +38,22 @@ interface for integration with CDAP plugins.""" */ dependencies { - implementation library.java.guava implementation library.java.cdap_api - implementation library.java.cdap_common + implementation library.java.cdap_api_commons + implementation (library.java.cdap_common) { + exclude module: "log4j-over-slf4j" + } + implementation library.java.cdap_etl_api + implementation library.java.cdap_etl_api_spark implementation library.java.jackson_core implementation library.java.jackson_databind + implementation library.java.guava implementation library.java.slf4j_api + implementation library.java.tephra implementation project(path: ":sdks:java:core", configuration: "shadow") testImplementation library.java.cdap_plugin_service_now testImplementation library.java.cdap_etl_api testImplementation library.java.vendored_guava_26_0_jre testImplementation library.java.junit + testImplementation project(path: ":runners:direct-java", configuration: "shadow") } diff --git a/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/context/BatchContextImpl.java b/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/context/BatchContextImpl.java new file mode 100644 index 000000000000..06b174062df0 --- /dev/null +++ b/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/context/BatchContextImpl.java @@ -0,0 +1,232 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.beam.sdk.io.cdap.context; + +import io.cdap.cdap.api.data.DatasetInstantiationException; +import io.cdap.cdap.api.data.batch.InputFormatProvider; +import io.cdap.cdap.api.data.schema.Schema; +import io.cdap.cdap.api.dataset.Dataset; +import io.cdap.cdap.api.dataset.DatasetManagementException; +import io.cdap.cdap.api.dataset.DatasetProperties; +import io.cdap.cdap.api.metadata.Metadata; +import io.cdap.cdap.api.metadata.MetadataEntity; +import io.cdap.cdap.api.metadata.MetadataException; +import io.cdap.cdap.api.metadata.MetadataScope; +import io.cdap.cdap.api.plugin.PluginProperties; +import io.cdap.cdap.etl.api.FailureCollector; +import io.cdap.cdap.etl.api.Lookup; +import io.cdap.cdap.etl.api.StageMetrics; +import io.cdap.cdap.etl.api.SubmitterLifecycle; +import io.cdap.cdap.etl.api.action.SettableArguments; +import io.cdap.cdap.etl.api.batch.BatchContext; +import io.cdap.cdap.etl.api.lineage.field.FieldOperation; +import java.net.URL; +import java.sql.Timestamp; +import java.util.List; +import java.util.Map; +import javax.annotation.Nullable; + +/** Class for Batch, Sink and Stream CDAP wrapper classes that use it to provide common details. */ +@SuppressWarnings({"TypeParameterUnusedInFormals", "nullness"}) +public abstract class BatchContextImpl implements BatchContext { + + private final FailureCollectorWrapper failureCollector = new FailureCollectorWrapper(); + + /** + * This should be set after {@link SubmitterLifecycle#prepareRun(Object)} call with passing this + * context object as a param. + */ + protected InputFormatProvider inputFormatProvider; + + private final Timestamp startTime = new Timestamp(System.currentTimeMillis()); + + public InputFormatProvider getInputFormatProvider() { + return inputFormatProvider; + } + + @Override + public String getStageName() { + return null; + } + + @Override + public String getNamespace() { + return null; + } + + @Override + public String getPipelineName() { + return null; + } + + @Override + public long getLogicalStartTime() { + return this.startTime.getTime(); + } + + @Override + public StageMetrics getMetrics() { + return null; + } + + @Override + public PluginProperties getPluginProperties() { + return null; + } + + @Override + public PluginProperties getPluginProperties(String pluginId) { + return null; + } + + @Override + public Class loadPluginClass(String pluginId) { + return null; + } + + @Override + public T newPluginInstance(String pluginId) throws InstantiationException { + return null; + } + + @Nullable + @Override + public Schema getInputSchema() { + return null; + } + + @Override + public @Nullable Map getInputSchemas() { + return null; + } + + @Override + public @Nullable Schema getOutputSchema() { + return null; + } + + @Override + public Map getOutputPortSchemas() { + return null; + } + + @Override + public void createDataset(String datasetName, String typeName, DatasetProperties properties) + throws DatasetManagementException {} + + @Override + public boolean datasetExists(String datasetName) throws DatasetManagementException { + return false; + } + + @Override + public SettableArguments getArguments() { + return null; + } + + @Override + public FailureCollector getFailureCollector() { + return this.failureCollector; + } + + @Nullable + @Override + public URL getServiceURL(String applicationId, String serviceId) { + return null; + } + + @Nullable + @Override + public URL getServiceURL(String serviceId) { + return null; + } + + @Override + public Map getMetadata(MetadataEntity metadataEntity) + throws MetadataException { + return null; + } + + @Override + public Metadata getMetadata(MetadataScope scope, MetadataEntity metadataEntity) + throws MetadataException { + return null; + } + + @Override + public void addProperties(MetadataEntity metadataEntity, Map properties) {} + + @Override + public void addTags(MetadataEntity metadataEntity, String... tags) {} + + @Override + public void addTags(MetadataEntity metadataEntity, Iterable tags) {} + + @Override + public void removeMetadata(MetadataEntity metadataEntity) {} + + @Override + public void removeProperties(MetadataEntity metadataEntity) {} + + @Override + public void removeProperties(MetadataEntity metadataEntity, String... keys) {} + + @Override + public void removeTags(MetadataEntity metadataEntity) {} + + @Override + public void removeTags(MetadataEntity metadataEntity, String... tags) {} + + @Override + public void record(List fieldOperations) {} + + @Override + public T getDataset(String name) throws DatasetInstantiationException { + return null; + } + + @Override + public T getDataset(String namespace, String name) + throws DatasetInstantiationException { + return null; + } + + @Override + public T getDataset(String name, Map arguments) + throws DatasetInstantiationException { + return null; + } + + @Override + public T getDataset( + String namespace, String name, Map arguments) + throws DatasetInstantiationException { + return null; + } + + @Override + public void releaseDataset(Dataset dataset) {} + + @Override + public void discardDataset(Dataset dataset) {} + + @Override + public Lookup provide(String table, Map arguments) { + return null; + } +} diff --git a/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/context/BatchSinkContextImpl.java b/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/context/BatchSinkContextImpl.java new file mode 100644 index 000000000000..f0374f7793df --- /dev/null +++ b/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/context/BatchSinkContextImpl.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.beam.sdk.io.cdap.context; + +import io.cdap.cdap.api.data.batch.Output; +import io.cdap.cdap.etl.api.batch.BatchSinkContext; + +/** Class for creating context object of different CDAP classes with batch sink type. */ +public class BatchSinkContextImpl extends BatchContextImpl implements BatchSinkContext { + + @Override + public void addOutput(Output output) {} + + @Override + public boolean isPreviewEnabled() { + return false; + } +} diff --git a/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/context/BatchSourceContextImpl.java b/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/context/BatchSourceContextImpl.java new file mode 100644 index 000000000000..98532936035d --- /dev/null +++ b/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/context/BatchSourceContextImpl.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.beam.sdk.io.cdap.context; + +import io.cdap.cdap.api.data.batch.Input; +import io.cdap.cdap.etl.api.batch.BatchSourceContext; + +/** Class for creating context object of different CDAP classes with batch source type. */ +public class BatchSourceContextImpl extends BatchContextImpl implements BatchSourceContext { + + @Override + public void setInput(Input input) { + this.inputFormatProvider = ((Input.InputFormatProviderInput) input).getInputFormatProvider(); + } + + @Override + public boolean isPreviewEnabled() { + return false; + } + + @Override + public int getMaxPreviewRecords() { + return 0; + } +} diff --git a/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/context/FailureCollectorWrapper.java b/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/context/FailureCollectorWrapper.java new file mode 100644 index 000000000000..d697909d02ef --- /dev/null +++ b/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/context/FailureCollectorWrapper.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.beam.sdk.io.cdap.context; + +import io.cdap.cdap.etl.api.FailureCollector; +import io.cdap.cdap.etl.api.validation.ValidationException; +import io.cdap.cdap.etl.api.validation.ValidationFailure; +import java.util.ArrayList; +import javax.annotation.Nullable; + +/** Class FailureCollectorWrapper is a class for collecting ValidationFailure. */ +public class FailureCollectorWrapper implements FailureCollector { + private ArrayList failuresCollection; + + public FailureCollectorWrapper() { + this.failuresCollection = new ArrayList<>(); + } + + @Override + public ValidationFailure addFailure(String message, @Nullable String correctiveAction) { + ValidationFailure validationFailure = new ValidationFailure(message, correctiveAction); + failuresCollection.add(validationFailure); + + return validationFailure; + } + + @Override + public ValidationException getOrThrowException() throws ValidationException { + if (failuresCollection.isEmpty()) { + return new ValidationException(this.failuresCollection); + } + + throw new ValidationException(this.failuresCollection); + } + + @Override + public ArrayList getValidationFailures() { + return this.failuresCollection; + } +} diff --git a/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/context/StreamingSourceContextImpl.java b/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/context/StreamingSourceContextImpl.java new file mode 100644 index 000000000000..7c09ba19f5fa --- /dev/null +++ b/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/context/StreamingSourceContextImpl.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.beam.sdk.io.cdap.context; + +import io.cdap.cdap.api.data.schema.Schema; +import io.cdap.cdap.api.dataset.DatasetManagementException; +import io.cdap.cdap.etl.api.streaming.StreamingSourceContext; +import javax.annotation.Nullable; +import org.apache.tephra.TransactionFailureException; + +/** Class for creating context object of different CDAP classes with stream source type. */ +public class StreamingSourceContextImpl extends BatchContextImpl implements StreamingSourceContext { + + @Override + public void registerLineage(String referenceName, @Nullable Schema schema) + throws DatasetManagementException, TransactionFailureException {} + + @Override + public boolean isPreviewEnabled() { + return false; + } +} diff --git a/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/context/package-info.java b/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/context/package-info.java new file mode 100644 index 000000000000..f6548ccdf932 --- /dev/null +++ b/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/context/package-info.java @@ -0,0 +1,24 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Context for CDAP classes. */ +@Experimental(Kind.SOURCE_SINK) +package org.apache.beam.sdk.io.cdap.context; + +import org.apache.beam.sdk.annotations.Experimental; +import org.apache.beam.sdk.annotations.Experimental.Kind; diff --git a/sdks/java/io/cdap/src/test/java/org/apache/beam/sdk/io/cdap/context/BatchContextImplTest.java b/sdks/java/io/cdap/src/test/java/org/apache/beam/sdk/io/cdap/context/BatchContextImplTest.java new file mode 100644 index 000000000000..8f679fe3fc08 --- /dev/null +++ b/sdks/java/io/cdap/src/test/java/org/apache/beam/sdk/io/cdap/context/BatchContextImplTest.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.beam.sdk.io.cdap.context; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import io.cdap.cdap.etl.api.FailureCollector; +import io.cdap.cdap.etl.api.validation.ValidationException; +import java.sql.Timestamp; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +/** Test class for {@link BatchContextImpl}. */ +@RunWith(JUnit4.class) +public class BatchContextImplTest { + + @Test + public void getLogicalStartTime() { + /** arrange */ + Timestamp expectedStartTime = new Timestamp(System.currentTimeMillis()); + BatchContextImpl context = new BatchSourceContextImpl(); + + /** act */ + long actualStartTime = context.getLogicalStartTime(); + + /** assert */ + assertTrue((expectedStartTime.getTime() - actualStartTime) <= 100); + } + + @Test + public void getFailureCollector() { + /** arrange */ + BatchContextImpl context = new BatchSinkContextImpl(); + + /** act */ + FailureCollector failureCollector = context.getFailureCollector(); + + /** assert */ + ValidationException validationException = failureCollector.getOrThrowException(); + assertEquals(0, validationException.getFailures().size()); + } +} diff --git a/sdks/java/io/cdap/src/test/java/org/apache/beam/sdk/io/cdap/context/FailureCollectorWrapperTest.java b/sdks/java/io/cdap/src/test/java/org/apache/beam/sdk/io/cdap/context/FailureCollectorWrapperTest.java new file mode 100644 index 000000000000..0e35c8a06a59 --- /dev/null +++ b/sdks/java/io/cdap/src/test/java/org/apache/beam/sdk/io/cdap/context/FailureCollectorWrapperTest.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.beam.sdk.io.cdap.context; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThrows; + +import io.cdap.cdap.etl.api.validation.ValidationException; +import io.cdap.cdap.etl.api.validation.ValidationFailure; +import java.util.ArrayList; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +/** Test class for {@link FailureCollectorWrapper}. */ +@RunWith(JUnit4.class) +public class FailureCollectorWrapperTest { + + @Test + public void addFailure() { + /** arrange */ + FailureCollectorWrapper failureCollectorWrapper = new FailureCollectorWrapper(); + + /** act */ + RuntimeException error = new RuntimeException("An error has occurred"); + failureCollectorWrapper.addFailure(error.getMessage(), null); + + /** assert */ + assertThrows(ValidationException.class, () -> failureCollectorWrapper.getOrThrowException()); + } + + @Test + public void getOrThrowException() { + /** arrange */ + FailureCollectorWrapper failureCollectorWrapper = new FailureCollectorWrapper(); + String errorMessage = "An error has occurred"; + String expectedMessage = "Errors were encountered during validation. An error has occurred"; + + FailureCollectorWrapper emptyFailureCollectorWrapper = new FailureCollectorWrapper(); + + RuntimeException error = new RuntimeException(errorMessage); + failureCollectorWrapper.addFailure(error.getMessage(), null); + + /** act && assert */ + ValidationException e = + assertThrows( + ValidationException.class, () -> failureCollectorWrapper.getOrThrowException()); + assertEquals(expectedMessage, e.getMessage()); + + // A case when return ValidationException with empty collector + ArrayList exceptionCollector = + emptyFailureCollectorWrapper.getValidationFailures(); + assertEquals(0, exceptionCollector.size()); + } + + @Test + public void getValidationFailures() { + /** arrange */ + FailureCollectorWrapper failureCollectorWrapper = new FailureCollectorWrapper(); + String errorMessage = "An error has occurred"; + + FailureCollectorWrapper emptyFailureCollectorWrapper = new FailureCollectorWrapper(); + + RuntimeException error = new RuntimeException(errorMessage); + failureCollectorWrapper.addFailure(error.getMessage(), null); + + /** act */ + ArrayList exceptionCollector = + failureCollectorWrapper.getValidationFailures(); + ArrayList emptyExceptionCollector = + emptyFailureCollectorWrapper.getValidationFailures(); + + /** assert */ + assertEquals(1, exceptionCollector.size()); + assertEquals(errorMessage, exceptionCollector.get(0).getMessage()); + assertEquals(0, emptyExceptionCollector.size()); + } +} diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BatchLoads.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BatchLoads.java index 98310ebb8e14..fc2b727ec79c 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BatchLoads.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BatchLoads.java @@ -357,6 +357,10 @@ private WriteResult expandTriggered(PCollection> inpu PCollection> tempTables = writeTempTables(partitions.get(multiPartitionsTag), tempLoadJobIdPrefixView); + List> sideInputsForUpdateSchema = + Lists.newArrayList(tempLoadJobIdPrefixView); + sideInputsForUpdateSchema.addAll(dynamicDestinations.getSideInputs()); + PCollection successfulMultiPartitionWrites = tempTables // Now that the load job has happened, we want the rename to happen immediately. @@ -368,6 +372,22 @@ private WriteResult expandTriggered(PCollection> inpu .setCoder(KvCoder.of(VoidCoder.of(), tempTables.getCoder())) .apply("GroupByKey", GroupByKey.create()) .apply("Extract Values", Values.create()) + .apply( + ParDo.of( + new UpdateSchemaDestination( + bigQueryServices, + tempLoadJobIdPrefixView, + loadJobProjectId, + WriteDisposition.WRITE_APPEND, + CreateDisposition.CREATE_NEVER, + maxRetryJobs, + ignoreUnknownValues, + kmsKey, + rowWriterFactory.getSourceFormat(), + useAvroLogicalTypes, + schemaUpdateOptions, + dynamicDestinations)) + .withSideInputs(sideInputsForUpdateSchema)) .apply( "WriteRenameTriggered", ParDo.of( @@ -444,9 +464,29 @@ public WriteResult expandUntriggered(PCollection> inp PCollection successfulSinglePartitionWrites = writeSinglePartition(partitions.get(singlePartitionTag), loadJobIdPrefixView); + List> sideInputsForUpdateSchema = + Lists.newArrayList(tempLoadJobIdPrefixView); + sideInputsForUpdateSchema.addAll(dynamicDestinations.getSideInputs()); + PCollection successfulMultiPartitionWrites = writeTempTables(partitions.get(multiPartitionsTag), tempLoadJobIdPrefixView) .apply("ReifyRenameInput", new ReifyAsIterable<>()) + .apply( + ParDo.of( + new UpdateSchemaDestination( + bigQueryServices, + tempLoadJobIdPrefixView, + loadJobProjectId, + WriteDisposition.WRITE_APPEND, + CreateDisposition.CREATE_NEVER, + maxRetryJobs, + ignoreUnknownValues, + kmsKey, + rowWriterFactory.getSourceFormat(), + useAvroLogicalTypes, + schemaUpdateOptions, + dynamicDestinations)) + .withSideInputs(sideInputsForUpdateSchema)) .apply( "WriteRenameUntriggered", ParDo.of( @@ -679,17 +719,6 @@ private PCollection> writeTempTables( ShardedKeyCoder.of(NullableCoder.of(destinationCoder)), WritePartition.ResultCoder.INSTANCE); - // If the final destination table exists already (and we're appending to it), then the temp - // tables must exactly match schema, partitioning, etc. Wrap the DynamicDestinations object - // with one that makes this happen. - @SuppressWarnings("unchecked") - DynamicDestinations destinations = dynamicDestinations; - if (createDisposition.equals(CreateDisposition.CREATE_IF_NEEDED) - || createDisposition.equals(CreateDisposition.CREATE_NEVER)) { - destinations = - DynamicDestinationsHelpers.matchTableDynamicDestinations(destinations, bigQueryServices); - } - Coder tableDestinationCoder = clusteringEnabled ? TableDestinationCoderV3.of() : TableDestinationCoderV2.of(); @@ -711,7 +740,7 @@ private PCollection> writeTempTables( WriteDisposition.WRITE_EMPTY, CreateDisposition.CREATE_IF_NEEDED, sideInputs, - destinations, + dynamicDestinations, loadJobProjectId, maxRetryJobs, ignoreUnknownValues, @@ -720,7 +749,7 @@ private PCollection> writeTempTables( useAvroLogicalTypes, // Note that we can't pass through the schema update options when creating temporary // tables. They also shouldn't be needed. See BEAM-12482 for additional details. - Collections.emptySet(), + schemaUpdateOptions, tempDataset)) .setCoder(KvCoder.of(tableDestinationCoder, WriteTables.ResultCoder.INSTANCE)); } diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServices.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServices.java index 23fce8ba6ff0..598431407d14 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServices.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServices.java @@ -17,6 +17,7 @@ */ package org.apache.beam.sdk.io.gcp.bigquery; +import com.google.api.client.http.AbstractInputStreamContent; import com.google.api.core.ApiFuture; import com.google.api.services.bigquery.model.Dataset; import com.google.api.services.bigquery.model.Job; @@ -68,6 +69,14 @@ public interface JobService extends AutoCloseable { /** Start a BigQuery load job. */ void startLoadJob(JobReference jobRef, JobConfigurationLoad loadConfig) throws InterruptedException, IOException; + + /** Start a BigQuery load job with stream content. */ + void startLoadJob( + JobReference jobRef, + JobConfigurationLoad loadConfig, + AbstractInputStreamContent streamContent) + throws InterruptedException, IOException; + /** Start a BigQuery extract job. */ void startExtractJob(JobReference jobRef, JobConfigurationExtract extractConfig) throws InterruptedException, IOException; diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImpl.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImpl.java index 2949150c9eeb..0185b5a40693 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImpl.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImpl.java @@ -22,6 +22,7 @@ import com.google.api.client.googleapis.json.GoogleJsonError; import com.google.api.client.googleapis.json.GoogleJsonResponseException; import com.google.api.client.googleapis.services.AbstractGoogleClientRequest; +import com.google.api.client.http.AbstractInputStreamContent; import com.google.api.client.http.HttpRequestInitializer; import com.google.api.client.util.BackOff; import com.google.api.client.util.BackOffUtils; @@ -236,6 +237,28 @@ public void startLoadJob(JobReference jobRef, JobConfigurationLoad loadConfig) startJob(job, errorExtractor, client); } + /** + * {@inheritDoc} + * + *

Tries executing the RPC for at most {@code MAX_RPC_RETRIES} times until it succeeds. + * + * @throws IOException if it exceeds {@code MAX_RPC_RETRIES} attempts. + */ + @Override + public void startLoadJob( + JobReference jobRef, JobConfigurationLoad loadConfig, AbstractInputStreamContent stream) + throws InterruptedException, IOException { + Map labelMap = new HashMap<>(); + Job job = + new Job() + .setJobReference(jobRef) + .setConfiguration( + new JobConfiguration() + .setLoad(loadConfig) + .setLabels(this.bqIOMetadata.addAdditionalJobLabels(labelMap))); + startJobStream(job, stream, errorExtractor, client, Sleeper.DEFAULT, createDefaultBackoff()); + } + /** * {@inheritDoc} * @@ -338,6 +361,47 @@ static void startJob( lastException); } + static void startJobStream( + Job job, + AbstractInputStreamContent streamContent, + ApiErrorExtractor errorExtractor, + Bigquery client, + Sleeper sleeper, + BackOff backOff) + throws IOException, InterruptedException { + JobReference jobReference = job.getJobReference(); + Exception exception; + do { + try { + client + .jobs() + .insert(jobReference.getProjectId(), job, streamContent) + .setPrettyPrint(false) + .execute(); + LOG.info( + "Started BigQuery job: {}.\n{}", + jobReference, + formatBqStatusCommand(jobReference.getProjectId(), jobReference.getJobId())); + return; + } catch (IOException e) { + if (errorExtractor.itemAlreadyExists(e)) { + LOG.info( + "BigQuery job " + jobReference + " already exists, will not retry inserting it:", + e); + return; // SUCCEEDED + } + // ignore and retry + LOG.info("Failed to insert job " + jobReference + ", will retry:", e); + exception = e; + } + } while (nextBackOff(sleeper, backOff)); + throw new IOException( + String.format( + "Unable to insert job: %s, aborting after %d .", + jobReference.getJobId(), MAX_RPC_RETRIES), + exception); + } + @Override public Job pollJob(JobReference jobRef, int maxAttempts) throws InterruptedException { BackOff backoff = diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/UpdateSchemaDestination.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/UpdateSchemaDestination.java new file mode 100644 index 000000000000..4ae1064bc431 --- /dev/null +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/UpdateSchemaDestination.java @@ -0,0 +1,338 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.beam.sdk.io.gcp.bigquery; + +import com.google.api.client.http.ByteArrayContent; +import com.google.api.services.bigquery.model.Clustering; +import com.google.api.services.bigquery.model.EncryptionConfiguration; +import com.google.api.services.bigquery.model.JobConfigurationLoad; +import com.google.api.services.bigquery.model.JobReference; +import com.google.api.services.bigquery.model.Table; +import com.google.api.services.bigquery.model.TableReference; +import com.google.api.services.bigquery.model.TableSchema; +import com.google.api.services.bigquery.model.TimePartitioning; +import java.io.IOException; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; +import javax.annotation.Nullable; +import org.apache.beam.sdk.io.gcp.bigquery.BigQueryServices.DatasetService; +import org.apache.beam.sdk.options.PipelineOptions; +import org.apache.beam.sdk.options.ValueProvider; +import org.apache.beam.sdk.transforms.DoFn; +import org.apache.beam.sdk.transforms.windowing.BoundedWindow; +import org.apache.beam.sdk.values.KV; +import org.apache.beam.sdk.values.PCollectionView; +import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Lists; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@SuppressWarnings({"nullness", "rawtypes"}) +public class UpdateSchemaDestination + extends DoFn< + Iterable>, + Iterable>> { + + private static final Logger LOG = LoggerFactory.getLogger(UpdateSchemaDestination.class); + private final BigQueryServices bqServices; + private final PCollectionView loadJobIdPrefixView; + private final ValueProvider loadJobProjectId; + private transient @Nullable DatasetService datasetService; + + private final int maxRetryJobs; + private final @Nullable String kmsKey; + private final String sourceFormat; + private final boolean useAvroLogicalTypes; + private @Nullable BigQueryServices.JobService jobService; + private final boolean ignoreUnknownValues; + private final Set schemaUpdateOptions; + private BigQueryIO.Write.WriteDisposition writeDisposition; + private BigQueryIO.Write.CreateDisposition createDisposition; + private DynamicDestinations dynamicDestinations; + + private static class PendingJobData { + final BigQueryHelpers.PendingJob retryJob; + final TableDestination tableDestination; + final BoundedWindow window; + + public PendingJobData( + BigQueryHelpers.PendingJob retryJob, + TableDestination tableDestination, + BoundedWindow window) { + this.retryJob = retryJob; + this.tableDestination = tableDestination; + this.window = window; + } + } + + private List pendingJobs = Lists.newArrayList(); + + public UpdateSchemaDestination( + BigQueryServices bqServices, + PCollectionView loadJobIdPrefixView, + @Nullable ValueProvider loadJobProjectId, + BigQueryIO.Write.WriteDisposition writeDisposition, + BigQueryIO.Write.CreateDisposition createDisposition, + int maxRetryJobs, + boolean ignoreUnknownValues, + String kmsKey, + String sourceFormat, + boolean useAvroLogicalTypes, + Set schemaUpdateOptions, + DynamicDestinations dynamicDestinations) { + this.loadJobProjectId = loadJobProjectId; + this.loadJobIdPrefixView = loadJobIdPrefixView; + this.bqServices = bqServices; + this.maxRetryJobs = maxRetryJobs; + this.ignoreUnknownValues = ignoreUnknownValues; + this.kmsKey = kmsKey; + this.sourceFormat = sourceFormat; + this.useAvroLogicalTypes = useAvroLogicalTypes; + this.schemaUpdateOptions = schemaUpdateOptions; + this.createDisposition = createDisposition; + this.writeDisposition = writeDisposition; + this.dynamicDestinations = dynamicDestinations; + } + + @StartBundle + public void startBundle(StartBundleContext c) { + pendingJobs.clear(); + } + + @ProcessElement + public void processElement( + @Element Iterable> element, + ProcessContext context, + BoundedWindow window) + throws IOException { + Object destination = null; + for (KV entry : element) { + destination = entry.getKey(); + if (destination != null) { + break; + } + } + if (destination != null) { + TableDestination tableDestination = dynamicDestinations.getTable(destination); + TableSchema schema = dynamicDestinations.getSchema(destination); + TableReference tableReference = tableDestination.getTableReference(); + String jobIdPrefix = + BigQueryResourceNaming.createJobIdWithDestination( + context.sideInput(loadJobIdPrefixView), + tableDestination, + 1, + context.pane().getIndex()); + jobIdPrefix += "_schemaUpdateDestination"; + BigQueryHelpers.PendingJob updateSchemaDestinationJob = + startZeroLoadJob( + getJobService(context.getPipelineOptions().as(BigQueryOptions.class)), + getDatasetService(context.getPipelineOptions().as(BigQueryOptions.class)), + jobIdPrefix, + tableReference, + tableDestination.getTimePartitioning(), + tableDestination.getClustering(), + schema, + writeDisposition, + createDisposition, + schemaUpdateOptions); + if (updateSchemaDestinationJob != null) { + pendingJobs.add(new PendingJobData(updateSchemaDestinationJob, tableDestination, window)); + } + context.output(element); + } + } + + @Teardown + public void onTeardown() { + try { + if (datasetService != null) { + datasetService.close(); + datasetService = null; + } + if (jobService != null) { + jobService.close(); + jobService = null; + } + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + @FinishBundle + public void finishBundle(FinishBundleContext context) throws Exception { + DatasetService datasetService = + getDatasetService(context.getPipelineOptions().as(BigQueryOptions.class)); + BigQueryHelpers.PendingJobManager jobManager = new BigQueryHelpers.PendingJobManager(); + for (final PendingJobData pendingJobData : pendingJobs) { + jobManager = + jobManager.addPendingJob( + pendingJobData.retryJob, + j -> { + try { + if (pendingJobData.tableDestination.getTableDescription() != null) { + TableReference ref = pendingJobData.tableDestination.getTableReference(); + datasetService.patchTableDescription( + ref.clone() + .setTableId(BigQueryHelpers.stripPartitionDecorator(ref.getTableId())), + pendingJobData.tableDestination.getTableDescription()); + } + return null; + } catch (IOException | InterruptedException e) { + return e; + } + }); + } + jobManager.waitForDone(); + } + + private BigQueryHelpers.PendingJob startZeroLoadJob( + BigQueryServices.JobService jobService, + DatasetService datasetService, + String jobIdPrefix, + TableReference tableReference, + TimePartitioning timePartitioning, + Clustering clustering, + @Nullable TableSchema schema, + BigQueryIO.Write.WriteDisposition writeDisposition, + BigQueryIO.Write.CreateDisposition createDisposition, + Set schemaUpdateOptions) { + JobConfigurationLoad loadConfig = + new JobConfigurationLoad() + .setDestinationTable(tableReference) + .setSchema(schema) + .setWriteDisposition(writeDisposition.name()) + .setCreateDisposition(createDisposition.name()) + .setSourceFormat(sourceFormat) + .setIgnoreUnknownValues(ignoreUnknownValues) + .setUseAvroLogicalTypes(useAvroLogicalTypes); + if (schemaUpdateOptions != null) { + List options = + schemaUpdateOptions.stream() + .map(Enum::name) + .collect(Collectors.toList()); + loadConfig.setSchemaUpdateOptions(options); + } + if (!loadConfig + .getWriteDisposition() + .equals(BigQueryIO.Write.WriteDisposition.WRITE_TRUNCATE.toString()) + && !loadConfig + .getWriteDisposition() + .equals(BigQueryIO.Write.WriteDisposition.WRITE_APPEND.toString())) { + return null; + } + Table destinationTable = null; + try { + destinationTable = datasetService.getTable(tableReference); + if (destinationTable == null) { + return null; // no need to update schema ahead if table does not exists + } + } catch (IOException | InterruptedException e) { + LOG.warn("Failed to get table {} with {}", tableReference, e.toString()); + throw new RuntimeException(e); + } + if (destinationTable.getSchema().equals(schema)) { + return null; // no need to update schema ahead if schema is already the same + } + if (timePartitioning != null) { + loadConfig.setTimePartitioning(timePartitioning); + // only set clustering if timePartitioning is set + if (clustering != null) { + loadConfig.setClustering(clustering); + } + } + if (kmsKey != null) { + loadConfig.setDestinationEncryptionConfiguration( + new EncryptionConfiguration().setKmsKeyName(kmsKey)); + } + String projectId = + loadJobProjectId == null || loadJobProjectId.get() == null + ? tableReference.getProjectId() + : loadJobProjectId.get(); + String bqLocation = + BigQueryHelpers.getDatasetLocation( + datasetService, tableReference.getProjectId(), tableReference.getDatasetId()); + + BigQueryHelpers.PendingJob retryJob = + new BigQueryHelpers.PendingJob( + // Function to load the data. + jobId -> { + JobReference jobRef = + new JobReference() + .setProjectId(projectId) + .setJobId(jobId.getJobId()) + .setLocation(bqLocation); + LOG.info( + "Loading zero rows using job {}, job id {} iteration {}", + tableReference, + jobRef, + jobId.getRetryIndex()); + try { + jobService.startLoadJob( + jobRef, loadConfig, new ByteArrayContent("text/plain", new byte[0])); + } catch (IOException | InterruptedException e) { + LOG.warn("Load job {} failed with {}", jobRef, e.toString()); + throw new RuntimeException(e); + } + return null; + }, + // Function to poll the result of a load job. + jobId -> { + JobReference jobRef = + new JobReference() + .setProjectId(projectId) + .setJobId(jobId.getJobId()) + .setLocation(bqLocation); + try { + return jobService.pollJob(jobRef, BatchLoads.LOAD_JOB_POLL_MAX_RETRIES); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }, + // Function to lookup a job. + jobId -> { + JobReference jobRef = + new JobReference() + .setProjectId(projectId) + .setJobId(jobId.getJobId()) + .setLocation(bqLocation); + try { + return jobService.getJob(jobRef); + } catch (InterruptedException | IOException e) { + throw new RuntimeException(e); + } + }, + maxRetryJobs, + jobIdPrefix); + return retryJob; + } + + private BigQueryServices.JobService getJobService(PipelineOptions pipelineOptions) + throws IOException { + if (jobService == null) { + jobService = bqServices.getJobService(pipelineOptions.as(BigQueryOptions.class)); + } + return jobService; + } + + private DatasetService getDatasetService(PipelineOptions pipelineOptions) throws IOException { + if (datasetService == null) { + datasetService = bqServices.getDatasetService(pipelineOptions.as(BigQueryOptions.class)); + } + return datasetService; + } +} diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/WriteTables.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/WriteTables.java index 737ab4ff41de..f30388b523cc 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/WriteTables.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/WriteTables.java @@ -269,7 +269,7 @@ public void processElement( } else if (tempTable) { // In this case, we are writing to a temp table and always need to create it. // WRITE_TRUNCATE is set so that we properly handle retries of this pane. - writeDisposition = WriteDisposition.WRITE_TRUNCATE; + writeDisposition = WriteDisposition.WRITE_APPEND; createDisposition = CreateDisposition.CREATE_IF_NEEDED; } @@ -286,6 +286,7 @@ public void processElement( writeDisposition, createDisposition, schemaUpdateOptions); + pendingJobs.add( new PendingJobData( window, @@ -354,7 +355,6 @@ public void finishBundle(FinishBundleContext c) throws Exception { BigQueryHelpers.stripPartitionDecorator(ref.getTableId())), pendingJob.tableDestination.getTableDescription()); } - Result result = new AutoValue_WriteTables_Result( BigQueryHelpers.toJsonString(pendingJob.tableReference), @@ -451,6 +451,7 @@ public PCollection> expand( .apply(GroupByKey.create()) .apply(Values.create()) .apply(ParDo.of(new GarbageCollectTemporaryFiles())); + return writeTablesOutputs.get(mainOutputTag); } diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/testing/FakeJobService.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/testing/FakeJobService.java index 81017f45c57b..6856a5c20f2e 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/testing/FakeJobService.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/testing/FakeJobService.java @@ -20,6 +20,7 @@ import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkArgument; import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkNotNull; +import com.google.api.client.http.AbstractInputStreamContent; import com.google.api.client.json.JsonFactory; import com.google.api.client.util.BackOff; import com.google.api.client.util.BackOffUtils; @@ -177,6 +178,15 @@ public void startLoadJob(JobReference jobRef, JobConfigurationLoad loadConfig) } } + @Override + public void startLoadJob( + JobReference jobRef, + JobConfigurationLoad loadConfig, + AbstractInputStreamContent streamContent) + throws InterruptedException, IOException { + // TODO + } + @Override public void startExtractJob(JobReference jobRef, JobConfigurationExtract extractConfig) throws IOException { diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQuerySchemaUpdateOptionsIT.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQuerySchemaUpdateOptionsIT.java index ed75a6688075..dd9ab1508f3f 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQuerySchemaUpdateOptionsIT.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQuerySchemaUpdateOptionsIT.java @@ -28,6 +28,7 @@ import java.security.SecureRandom; import java.util.Arrays; import java.util.EnumSet; +import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; @@ -219,4 +220,68 @@ public void testAllowFieldRelaxation() throws Exception { List> expectedResult = Arrays.asList(Arrays.asList(value)); runWriteTest(schemaUpdateOptions, tableName, newSchema, rowToInsert, testQuery, expectedResult); } + + @Test + public void runWriteTestTempTables() throws Exception { + String tableName = makeTestTable(); + + Set schemaUpdateOptions = + EnumSet.of(BigQueryIO.Write.SchemaUpdateOption.ALLOW_FIELD_ADDITION); + + TableSchema schema = + new TableSchema() + .setFields( + ImmutableList.of( + new TableFieldSchema().setName("new_field").setType("STRING"), + new TableFieldSchema().setName("optional_field").setType("STRING"), + new TableFieldSchema() + .setName("required_field") + .setType("STRING") + .setMode("REQUIRED"))); + + String[] values = {"meow", "bark"}; + + String testQuery = + String.format( + "SELECT new_field, required_field FROM [%s.%s];", BIG_QUERY_DATASET_ID, tableName); + + List> expectedResult = + Arrays.asList(Arrays.asList(values[0], values[1]), Arrays.asList(values[1], values[0])); + + Options options = TestPipeline.testingPipelineOptions().as(Options.class); + options.setTempLocation(options.getTempRoot() + "/bq_it_temp"); + + Pipeline p = Pipeline.create(options); + Create.Values input = + Create.of( + Arrays.asList( + new TableRow().set("new_field", values[0]).set("required_field", values[1]), + new TableRow().set("new_field", values[1]).set("required_field", values[0]))); + + Write writer = + BigQueryIO.writeTableRows() + .to(String.format("%s:%s.%s", options.getProject(), BIG_QUERY_DATASET_ID, tableName)) + .withSchema(schema) + .withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED) + .withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_APPEND) + .withSchemaUpdateOptions(schemaUpdateOptions) + .withMaxBytesPerPartition(1) + .withMaxFilesPerPartition(1); + + p.apply(input).apply(writer); + p.run().waitUntilFinish(); + + QueryResponse response = BQ_CLIENT.queryWithRetries(testQuery, project); + + List> result = + response.getRows().stream() + .map( + row -> + row.getF().stream() + .map(cell -> cell.getV().toString()) + .collect(Collectors.toList())) + .collect(Collectors.toList()); + + assertEquals(new HashSet<>(expectedResult), new HashSet<>(result)); + } } diff --git a/sdks/python/apache_beam/examples/wordcount_it_test.py b/sdks/python/apache_beam/examples/wordcount_it_test.py index be8bbbfed8a6..afbe70b9d239 100644 --- a/sdks/python/apache_beam/examples/wordcount_it_test.py +++ b/sdks/python/apache_beam/examples/wordcount_it_test.py @@ -28,6 +28,7 @@ from hamcrest.core.core.allof import all_of from apache_beam.examples import wordcount +from apache_beam.internal.gcp import auth from apache_beam.testing.load_tests.load_test_metrics_utils import InfluxDBMetricsPublisherOptions from apache_beam.testing.load_tests.load_test_metrics_utils import MetricsReader from apache_beam.testing.pipeline_verifiers import FileChecksumMatcher @@ -47,6 +48,44 @@ class WordCountIT(unittest.TestCase): def test_wordcount_it(self): self._run_wordcount_it(wordcount.run) + @pytest.mark.it_postcommit + @pytest.mark.sickbay_direct + @pytest.mark.sickbay_spark + @pytest.mark.sickbay_flink + def test_wordcount_impersonation_it(self): + """Tests impersonation on dataflow. + + For testing impersonation, we use three ingredients: + - a principal to impersonate + - a dataflow service account that only that principal is + allowed to launch jobs as + - a temp root that only the above two accounts have access to + + Jenkins and Dataflow workers both run as GCE default service account. + So we remove that account from all the above. + """ + # Credentials need to be reset or this test will fail and credentials + # from a previous test will be used. + auth._Credentials._credentials_init = False + + ACCOUNT_TO_IMPERSONATE = ( + 'allows-impersonation@apache-' + 'beam-testing.iam.gserviceaccount.com') + RUNNER_ACCOUNT = ( + 'impersonation-dataflow-worker@' + 'apache-beam-testing.iam.gserviceaccount.com') + TEMP_DIR = 'gs://impersonation-test-bucket/temp-it' + STAGING_LOCATION = 'gs://impersonation-test-bucket/staging-it' + extra_options = { + 'impersonate_service_account': ACCOUNT_TO_IMPERSONATE, + 'service_account_email': RUNNER_ACCOUNT, + 'temp_location': TEMP_DIR, + 'staging_location': STAGING_LOCATION + } + self._run_wordcount_it(wordcount.run, **extra_options) + # Reset credentials for future tests. + auth._Credentials._credentials_init = False + @pytest.mark.it_postcommit @pytest.mark.it_validatescontainer def test_wordcount_fnapi_it(self): diff --git a/sdks/python/apache_beam/internal/gcp/auth.py b/sdks/python/apache_beam/internal/gcp/auth.py index 439264a9794b..27a3c40cd4b3 100644 --- a/sdks/python/apache_beam/internal/gcp/auth.py +++ b/sdks/python/apache_beam/internal/gcp/auth.py @@ -23,8 +23,12 @@ import socket import threading +from apache_beam.options.pipeline_options import GoogleCloudOptions +from apache_beam.options.pipeline_options import PipelineOptions + # google.auth is only available when Beam is installed with the gcp extra. try: + from google.auth import impersonated_credentials import google.auth import google_auth_httplib2 _GOOGLE_AUTH_AVAILABLE = True @@ -40,6 +44,16 @@ _LOGGER = logging.getLogger(__name__) +CLIENT_SCOPES = [ + 'https://www.googleapis.com/auth/bigquery', + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/devstorage.full_control', + 'https://www.googleapis.com/auth/userinfo.email', + 'https://www.googleapis.com/auth/datastore', + 'https://www.googleapis.com/auth/spanner.admin', + 'https://www.googleapis.com/auth/spanner.data' +] + def set_running_in_gce(worker_executing_project): """For internal use only; no backwards-compatibility guarantees. @@ -59,16 +73,19 @@ def set_running_in_gce(worker_executing_project): executing_project = worker_executing_project -def get_service_credentials(): +def get_service_credentials(pipeline_options): """For internal use only; no backwards-compatibility guarantees. Get credentials to access Google services. + Args: + pipeline_options: Pipeline options, used in creating credentials + like impersonated credentials. Returns: A ``google.auth.credentials.Credentials`` object or None if credentials not found. Returned object is thread-safe. """ - return _Credentials.get_service_credentials() + return _Credentials.get_service_credentials(pipeline_options) if _GOOGLE_AUTH_AVAILABLE: @@ -108,10 +125,7 @@ class _Credentials(object): _credentials = None @classmethod - def get_service_credentials(cls): - if cls._credentials_init: - return cls._credentials - + def get_service_credentials(cls, pipeline_options): with cls._credentials_lock: if cls._credentials_init: return cls._credentials @@ -124,13 +138,13 @@ def get_service_credentials(cls): _LOGGER.info( "socket default timeout is %s seconds.", socket.getdefaulttimeout()) - cls._credentials = cls._get_service_credentials() + cls._credentials = cls._get_service_credentials(pipeline_options) cls._credentials_init = True return cls._credentials @staticmethod - def _get_service_credentials(): + def _get_service_credentials(pipeline_options): if not _GOOGLE_AUTH_AVAILABLE: _LOGGER.warning( 'Unable to find default credentials because the google-auth library ' @@ -138,17 +152,10 @@ def _get_service_credentials(): 'Google default credentials. Connecting anonymously.') return None - client_scopes = [ - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/devstorage.full_control', - 'https://www.googleapis.com/auth/userinfo.email', - 'https://www.googleapis.com/auth/datastore', - 'https://www.googleapis.com/auth/spanner.admin', - 'https://www.googleapis.com/auth/spanner.data' - ] try: - credentials, _ = google.auth.default(scopes=client_scopes) # pylint: disable=c-extension-no-member + credentials, _ = google.auth.default(scopes=CLIENT_SCOPES) # pylint: disable=c-extension-no-member + credentials = _Credentials._add_impersonation_credentials( + credentials, pipeline_options) credentials = _ApitoolsCredentialsAdapter(credentials) logging.debug( 'Connecting using Google Application Default ' @@ -160,3 +167,26 @@ def _get_service_credentials(): 'Connecting anonymously.', e) return None + + @staticmethod + def _add_impersonation_credentials(credentials, pipeline_options): + if isinstance(pipeline_options, PipelineOptions): + gcs_options = pipeline_options.view_as(GoogleCloudOptions) + impersonate_service_account = gcs_options.impersonate_service_account + elif isinstance(pipeline_options, dict): + impersonate_service_account = pipeline_options.get( + 'impersonate_service_account') + else: + return credentials + if impersonate_service_account: + _LOGGER.info('Impersonating: %s', impersonate_service_account) + impersonate_accounts = impersonate_service_account.split(',') + target_principal = impersonate_accounts[-1] + delegate_to = impersonate_accounts[0:-1] + credentials = impersonated_credentials.Credentials( + source_credentials=credentials, + target_principal=target_principal, + delegates=delegate_to, + target_scopes=CLIENT_SCOPES, + ) + return credentials diff --git a/sdks/python/apache_beam/io/gcp/bigquery.py b/sdks/python/apache_beam/io/gcp/bigquery.py index 2c21dca60478..4d7df85f905e 100644 --- a/sdks/python/apache_beam/io/gcp/bigquery.py +++ b/sdks/python/apache_beam/io/gcp/bigquery.py @@ -378,6 +378,15 @@ def compute_table_name(row): NOTE: This job name template does not have backwards compatibility guarantees. """ BQ_JOB_NAME_TEMPLATE = "beam_bq_job_{job_type}_{job_id}_{step_id}{random}" +""" +The maximum number of times that a bundle of rows that errors out should be +sent for insertion into BigQuery. + +The default is 10,000 with exponential backoffs, so a bundle of rows may be +tried for a very long time. You may reduce this property to reduce the number +of retries. +""" +MAX_INSERT_RETRIES = 10000 @deprecated(since='2.11.0', current="bigquery_tools.parse_table_reference") @@ -1492,6 +1501,7 @@ class BigQueryWriteFn(DoFn): DEFAULT_MAX_BATCH_SIZE = 500 FAILED_ROWS = 'FailedRows' + FAILED_ROWS_WITH_ERRORS = 'FailedRowsWithErrors' STREAMING_API_LOGGING_FREQUENCY_SEC = 300 def __init__( @@ -1507,7 +1517,8 @@ def __init__( additional_bq_parameters=None, ignore_insert_ids=False, with_batched_input=False, - ignore_unknown_columns=False): + ignore_unknown_columns=False, + max_retries=MAX_INSERT_RETRIES): """Initialize a WriteToBigQuery transform. Args: @@ -1555,6 +1566,9 @@ def __init__( the schema. The unknown values are ignored. Default is False, which treats unknown values as errors. See reference: https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/insertAll + max_retries: The number of times that we will retry inserting a group of + rows into BigQuery. By default, we retry 10000 times with exponential + backoffs (effectively retry forever). """ self.schema = schema @@ -1592,6 +1606,7 @@ def __init__( self.streaming_api_logging_frequency_sec = ( BigQueryWriteFn.STREAMING_API_LOGGING_FREQUENCY_SEC) self.ignore_unknown_columns = ignore_unknown_columns + self._max_retries = max_retries def display_data(self): return { @@ -1643,7 +1658,9 @@ def start_bundle(self): self._backoff_calculator = iter( retry.FuzzedExponentialIntervals( - initial_delay_secs=0.2, num_retries=10000, max_delay_secs=1500)) + initial_delay_secs=0.2, + num_retries=self._max_retries, + max_delay_secs=1500)) def _create_table_if_needed(self, table_reference, schema=None): str_table_reference = '%s:%s.%s' % ( @@ -1754,41 +1771,57 @@ def _flush_batch(self, destination): ignore_unknown_values=self.ignore_unknown_columns) self.batch_latency_metric.update((time.time() - start) * 1000) - failed_rows = [rows[entry['index']] for entry in errors] + failed_rows = [(rows[entry['index']], entry["errors"]) + for entry in errors] + retry_backoff = next(self._backoff_calculator, None) + + # If retry_backoff is None, then we will not retry and must log. should_retry = any( RetryStrategy.should_retry( self._retry_strategy, entry['errors'][0]['reason']) - for entry in errors) + for entry in errors) and retry_backoff is not None + if not passed: self.failed_rows_metric.update(len(failed_rows)) message = ( 'There were errors inserting to BigQuery. Will{} retry. ' 'Errors were {}'.format(("" if should_retry else " not"), errors)) - if should_retry: - _LOGGER.warning(message) - else: - _LOGGER.error(message) - rows = failed_rows + # The log level is: + # - WARNING when we are continuing to retry, and have a deadline. + # - ERROR when we will no longer retry, or MAY retry forever. + log_level = ( + logging.WARN if should_retry or + self._retry_strategy != RetryStrategy.RETRY_ALWAYS else + logging.ERROR) + + _LOGGER.log(log_level, message) if not should_retry: break else: - retry_backoff = next(self._backoff_calculator) _LOGGER.info( 'Sleeping %s seconds before retrying insertion.', retry_backoff) time.sleep(retry_backoff) + rows = [fr[0] for fr in failed_rows] self._throttled_secs.inc(retry_backoff) self._total_buffered_rows -= len(self._rows_buffer[destination]) del self._rows_buffer[destination] - return [ + return itertools.chain([ pvalue.TaggedOutput( - BigQueryWriteFn.FAILED_ROWS, - GlobalWindows.windowed_value((destination, row))) - for row in failed_rows - ] + BigQueryWriteFn.FAILED_ROWS_WITH_ERRORS, + GlobalWindows.windowed_value((destination, row, err))) for row, + err in failed_rows + ], + [ + pvalue.TaggedOutput( + BigQueryWriteFn.FAILED_ROWS, + GlobalWindows.windowed_value( + (destination, row))) for row, + unused_err in failed_rows + ]) # The number of shards per destination when writing via streaming inserts. @@ -1815,7 +1848,8 @@ def __init__( ignore_insert_ids, ignore_unknown_columns, with_auto_sharding, - test_client=None): + test_client=None, + max_retries=None): self.table_reference = table_reference self.table_side_inputs = table_side_inputs self.schema_side_inputs = schema_side_inputs @@ -1831,6 +1865,7 @@ def __init__( self.ignore_insert_ids = ignore_insert_ids self.ignore_unknown_columns = ignore_unknown_columns self.with_auto_sharding = with_auto_sharding + self.max_retries = max_retries or MAX_INSERT_RETRIES class InsertIdPrefixFn(DoFn): def start_bundle(self): @@ -1856,7 +1891,8 @@ def expand(self, input): additional_bq_parameters=self.additional_bq_parameters, ignore_insert_ids=self.ignore_insert_ids, ignore_unknown_columns=self.ignore_unknown_columns, - with_batched_input=self.with_auto_sharding) + with_batched_input=self.with_auto_sharding, + max_retries=self.max_retries) def _add_random_shard(element): key = element[0] @@ -1905,7 +1941,9 @@ def _restore_table_ref(sharded_table_ref_elems_kv): | 'FromHashableTableRef' >> beam.Map(_restore_table_ref) | 'StreamInsertRows' >> ParDo( bigquery_write_fn, *self.schema_side_inputs).with_outputs( - BigQueryWriteFn.FAILED_ROWS, main='main')) + BigQueryWriteFn.FAILED_ROWS, + BigQueryWriteFn.FAILED_ROWS_WITH_ERRORS, + main='main')) # Flag to be passed to WriteToBigQuery to force schema autodetection @@ -2194,7 +2232,11 @@ def expand(self, pcoll): with_auto_sharding=self.with_auto_sharding, test_client=self.test_client) - return {BigQueryWriteFn.FAILED_ROWS: outputs[BigQueryWriteFn.FAILED_ROWS]} + return { + BigQueryWriteFn.FAILED_ROWS: outputs[BigQueryWriteFn.FAILED_ROWS], + BigQueryWriteFn.FAILED_ROWS_WITH_ERRORS: outputs[ + BigQueryWriteFn.FAILED_ROWS_WITH_ERRORS], + } else: if self._temp_file_format == bigquery_tools.FileFormat.AVRO: if self.schema == SCHEMA_AUTODETECT: diff --git a/sdks/python/apache_beam/io/gcp/bigquery_test.py b/sdks/python/apache_beam/io/gcp/bigquery_test.py index 74722e4e538c..ff2a95c7f8eb 100644 --- a/sdks/python/apache_beam/io/gcp/bigquery_test.py +++ b/sdks/python/apache_beam/io/gcp/bigquery_test.py @@ -42,6 +42,7 @@ from apache_beam.internal import pickler from apache_beam.internal.gcp.json_value import to_json_value from apache_beam.io.filebasedsink_test import _TestCaseWithTempDirCleanUp +from apache_beam.io.gcp import bigquery as beam_bq from apache_beam.io.gcp import bigquery_tools from apache_beam.io.gcp.bigquery import TableRowJsonCoder from apache_beam.io.gcp.bigquery import WriteToBigQuery @@ -91,6 +92,14 @@ _LOGGER = logging.getLogger(__name__) +def _load_or_default(filename): + try: + with open(filename) as f: + return json.load(f) + except: # pylint: disable=bare-except + return {} + + @unittest.skipIf( HttpError is None or gcp_bigquery is None, 'GCP dependencies are not installed') @@ -838,6 +847,7 @@ def noop(table, **kwargs): test_client=client)) +@unittest.skipIf(HttpError is None, 'GCP dependencies are not installed') class BigQueryStreamingInsertsErrorHandling(unittest.TestCase): # Using https://cloud.google.com/bigquery/docs/error-messages and @@ -1233,7 +1243,8 @@ def test_with_batched_input(self): @unittest.skipIf(HttpError is None, 'GCP dependencies are not installed') class PipelineBasedStreamingInsertTest(_TestCaseWithTempDirCleanUp): - def test_failure_has_same_insert_ids(self): + @mock.patch('time.sleep') + def test_failure_has_same_insert_ids(self, unused_mock_sleep): tempdir = '%s%s' % (self._new_tempdir(), os.sep) file_name_1 = os.path.join(tempdir, 'file1') file_name_2 = os.path.join(tempdir, 'file2') @@ -1289,6 +1300,184 @@ def store_callback(table, **kwargs): with open(file_name_1) as f1, open(file_name_2) as f2: self.assertEqual(json.load(f1), json.load(f2)) + @parameterized.expand([ + param(retry_strategy=RetryStrategy.RETRY_ALWAYS), + param(retry_strategy=RetryStrategy.RETRY_NEVER), + param(retry_strategy=RetryStrategy.RETRY_ON_TRANSIENT_ERROR), + ]) + def test_failure_in_some_rows_does_not_duplicate(self, retry_strategy=None): + with mock.patch('time.sleep'): + # In this test we simulate a failure to write out two out of three rows. + # Row 0 and row 2 fail to be written on the first attempt, and then + # succeed on the next attempt (if there is one). + tempdir = '%s%s' % (self._new_tempdir(), os.sep) + file_name_1 = os.path.join(tempdir, 'file1_partial') + file_name_2 = os.path.join(tempdir, 'file2_partial') + + def store_callback(table, **kwargs): + insert_ids = [r for r in kwargs['row_ids']] + colA_values = [r['columnA'] for r in kwargs['json_rows']] + + # The first time this function is called, all rows are included + # so we need to filter out 'failed' rows. + json_output_1 = { + 'insertIds': [insert_ids[1]], 'colA_values': [colA_values[1]] + } + # The second time this function is called, only rows 0 and 2 are incl + # so we don't need to filter any of them. We just write them all out. + json_output_2 = {'insertIds': insert_ids, 'colA_values': colA_values} + + # The first time we try to insert, we save those insertions in + # file insert_calls1. + if not os.path.exists(file_name_1): + with open(file_name_1, 'w') as f: + json.dump(json_output_1, f) + return [ + { + 'index': 0, + 'errors': [{ + 'reason': 'i dont like this row' + }, { + 'reason': 'its bad' + }] + }, + { + 'index': 2, + 'errors': [{ + 'reason': 'i het this row' + }, { + 'reason': 'its no gud' + }] + }, + ] + else: + with open(file_name_2, 'w') as f: + json.dump(json_output_2, f) + return [] + + client = mock.Mock() + client.insert_rows_json = mock.Mock(side_effect=store_callback) + + # The expected rows to be inserted according to the insert strategy + if retry_strategy == RetryStrategy.RETRY_NEVER: + result = ['value3'] + else: # RETRY_ALWAYS and RETRY_ON_TRANSIENT_ERRORS should insert all rows + result = ['value1', 'value3', 'value5'] + + # Using the bundle based direct runner to avoid pickling problems + # with mocks. + with beam.Pipeline(runner='BundleBasedDirectRunner') as p: + bq_write_out = ( + p + | beam.Create([{ + 'columnA': 'value1', 'columnB': 'value2' + }, { + 'columnA': 'value3', 'columnB': 'value4' + }, { + 'columnA': 'value5', 'columnB': 'value6' + }]) + | _StreamToBigQuery( + table_reference='project:dataset.table', + table_side_inputs=[], + schema_side_inputs=[], + schema='anyschema', + batch_size=None, + triggering_frequency=None, + create_disposition='CREATE_NEVER', + write_disposition=None, + kms_key=None, + retry_strategy=retry_strategy, + additional_bq_parameters=[], + ignore_insert_ids=False, + ignore_unknown_columns=False, + with_auto_sharding=False, + test_client=client)) + + failed_values = ( + bq_write_out[beam_bq.BigQueryWriteFn.FAILED_ROWS_WITH_ERRORS] + | beam.Map(lambda x: x[1]['columnA'])) + + assert_that( + failed_values, + equal_to(list({'value1', 'value3', 'value5'}.difference(result)))) + + data1 = _load_or_default(file_name_1) + data2 = _load_or_default(file_name_2) + + self.assertListEqual( + sorted(data1.get('colA_values', []) + data2.get('colA_values', [])), + result) + self.assertEqual(len(data1['colA_values']), 1) + + @parameterized.expand([ + param(retry_strategy=RetryStrategy.RETRY_ALWAYS), + param(retry_strategy=RetryStrategy.RETRY_NEVER), + param(retry_strategy=RetryStrategy.RETRY_ON_TRANSIENT_ERROR), + ]) + def test_permanent_failure_in_some_rows_does_not_duplicate( + self, unused_sleep_mock=None, retry_strategy=None): + with mock.patch('time.sleep'): + + def store_callback(table, **kwargs): + return [ + { + 'index': 0, + 'errors': [{ + 'reason': 'invalid' + }, { + 'reason': 'its bad' + }] + }, + ] + + client = mock.Mock() + client.insert_rows_json = mock.Mock(side_effect=store_callback) + + # The expected rows to be inserted according to the insert strategy + if retry_strategy == RetryStrategy.RETRY_NEVER: + inserted_rows = ['value3', 'value5'] + else: # RETRY_ALWAYS and RETRY_ON_TRANSIENT_ERRORS should insert all rows + inserted_rows = ['value3', 'value5'] + + # Using the bundle based direct runner to avoid pickling problems + # with mocks. + with beam.Pipeline(runner='BundleBasedDirectRunner') as p: + bq_write_out = ( + p + | beam.Create([{ + 'columnA': 'value1', 'columnB': 'value2' + }, { + 'columnA': 'value3', 'columnB': 'value4' + }, { + 'columnA': 'value5', 'columnB': 'value6' + }]) + | _StreamToBigQuery( + table_reference='project:dataset.table', + table_side_inputs=[], + schema_side_inputs=[], + schema='anyschema', + batch_size=None, + triggering_frequency=None, + create_disposition='CREATE_NEVER', + write_disposition=None, + kms_key=None, + retry_strategy=retry_strategy, + additional_bq_parameters=[], + ignore_insert_ids=False, + ignore_unknown_columns=False, + with_auto_sharding=False, + test_client=client, + max_retries=10)) + + failed_values = ( + bq_write_out[beam_bq.BigQueryWriteFn.FAILED_ROWS] + | beam.Map(lambda x: x[1]['columnA'])) + + assert_that( + failed_values, + equal_to( + list({'value1', 'value3', 'value5'}.difference(inserted_rows)))) + @parameterized.expand([ param(with_auto_sharding=False), param(with_auto_sharding=True), @@ -1353,6 +1542,7 @@ def store_callback(table, **kwargs): self.assertEqual(out2['colA_values'], ['value5']) +@unittest.skipIf(HttpError is None, 'GCP dependencies are not installed') class BigQueryStreamingInsertTransformIntegrationTests(unittest.TestCase): BIG_QUERY_DATASET_ID = 'python_bq_streaming_inserts_' @@ -1538,9 +1728,15 @@ def test_multiple_destinations_transform(self): method='STREAMING_INSERTS')) assert_that( - r[beam.io.gcp.bigquery.BigQueryWriteFn.FAILED_ROWS], + r[beam.io.gcp.bigquery.BigQueryWriteFn.FAILED_ROWS_WITH_ERRORS] + | beam.Map(lambda elm: (elm[0], elm[1])), equal_to([(full_output_table_1, bad_record)])) + assert_that( + r[beam.io.gcp.bigquery.BigQueryWriteFn.FAILED_ROWS], + equal_to([(full_output_table_1, bad_record)]), + label='FailedRowsMatch') + def tearDown(self): request = bigquery.BigqueryDatasetsDeleteRequest( projectId=self.project, datasetId=self.dataset_id, deleteContents=True) @@ -1646,6 +1842,7 @@ def test_file_loads(self): WriteToBigQuery.Method.FILE_LOADS, triggering_frequency=20) +@unittest.skipIf(HttpError is None, 'GCP dependencies are not installed') class BigQueryFileLoadsIntegrationTests(unittest.TestCase): BIG_QUERY_DATASET_ID = 'python_bq_file_loads_' @@ -1676,12 +1873,12 @@ def test_avro_file_load(self): bigquery_file_loads._DEFAULT_MAX_FILE_SIZE = 100 elements = [ { - 'name': u'Negative infinity', + 'name': 'Negative infinity', 'value': -float('inf'), 'timestamp': datetime.datetime(1970, 1, 1, tzinfo=pytz.utc), }, { - 'name': u'Not a number', + 'name': 'Not a number', 'value': float('nan'), 'timestamp': datetime.datetime(2930, 12, 9, tzinfo=pytz.utc), }, diff --git a/sdks/python/apache_beam/io/gcp/bigquery_tools.py b/sdks/python/apache_beam/io/gcp/bigquery_tools.py index 89efa1ef6230..bb3b60273404 100644 --- a/sdks/python/apache_beam/io/gcp/bigquery_tools.py +++ b/sdks/python/apache_beam/io/gcp/bigquery_tools.py @@ -314,7 +314,7 @@ class BigQueryWrapper(object): The wrapper is used to organize all the BigQuery integration points and offer a common place where retry logic for failures can be controlled. - In addition it offers various functions used both in sources and sinks + In addition, it offers various functions used both in sources and sinks (e.g., find and create tables, query a table, etc.). """ @@ -328,7 +328,7 @@ class BigQueryWrapper(object): def __init__(self, client=None, temp_dataset_id=None, temp_table_ref=None): self.client = client or bigquery.BigqueryV2( http=get_new_http(), - credentials=auth.get_service_credentials(), + credentials=auth.get_service_credentials(None), response_encoding='utf8', additional_http_headers={ "user-agent": "apache-beam-%s" % apache_beam.__version__ diff --git a/sdks/python/apache_beam/io/gcp/bigquery_tools_test.py b/sdks/python/apache_beam/io/gcp/bigquery_tools_test.py index e4ff6082cabb..3ce8d0ff7de4 100644 --- a/sdks/python/apache_beam/io/gcp/bigquery_tools_test.py +++ b/sdks/python/apache_beam/io/gcp/bigquery_tools_test.py @@ -223,7 +223,7 @@ def test_delete_dataset_retries_for_timeouts(self, patched_time_sleep): self.assertTrue(client.datasets.Delete.called) @unittest.skipIf( - google and not hasattr(google.cloud, '_http'), + google and not hasattr(google.cloud, '_http'), # pylint: disable=c-extension-no-member 'Dependencies not installed') @mock.patch('time.sleep', return_value=None) @mock.patch('google.cloud._http.JSONConnection.http') diff --git a/sdks/python/apache_beam/io/gcp/bigquery_write_it_test.py b/sdks/python/apache_beam/io/gcp/bigquery_write_it_test.py index dd2283eb71d6..e75b698c6516 100644 --- a/sdks/python/apache_beam/io/gcp/bigquery_write_it_test.py +++ b/sdks/python/apache_beam/io/gcp/bigquery_write_it_test.py @@ -36,14 +36,18 @@ from parameterized import parameterized import apache_beam as beam +from apache_beam.io.gcp.bigquery import BigQueryWriteFn from apache_beam.io.gcp.bigquery_tools import BigQueryWrapper from apache_beam.io.gcp.bigquery_tools import FileFormat from apache_beam.io.gcp.internal.clients import bigquery from apache_beam.io.gcp.tests.bigquery_matcher import BigqueryFullResultMatcher from apache_beam.testing.test_pipeline import TestPipeline +from apache_beam.testing.util import assert_that +from apache_beam.testing.util import equal_to # Protect against environments where bigquery library is not available. # pylint: disable=wrong-import-order, wrong-import-position + try: from apitools.base.py.exceptions import HttpError except ImportError: @@ -373,6 +377,85 @@ def test_big_query_write_without_schema(self): write_disposition=beam.io.BigQueryDisposition.WRITE_APPEND, temp_file_format=FileFormat.JSON)) + @pytest.mark.it_postcommit + def test_big_query_write_insert_errors_reporting(self): + """ + Test that errors returned by beam.io.WriteToBigQuery + contain both the failed rows amd the reason for it failing. + """ + table_name = 'python_write_table' + table_id = '{}.{}'.format(self.dataset_id, table_name) + + input_data = [{ + 'number': 1, + 'str': 'some_string', + }, { + 'number': 2 + }, + { + 'number': 3, + 'str': 'some_string', + 'additional_field_str': 'some_string', + }] + + table_schema = { + "fields": [{ + "name": "number", "type": "INTEGER", 'mode': 'REQUIRED' + }, { + "name": "str", "type": "STRING", 'mode': 'REQUIRED' + }] + } + + bq_result_errors = [( + { + "number": 2 + }, + [{ + "reason": "invalid", + "location": "", + "debugInfo": "", + "message": "Missing required field: Msg_0_CLOUD_QUERY_TABLE.str." + }], + ), + ({ + "number": 3, + "str": "some_string", + "additional_field_str": "some_string" + }, + [{ + "reason": "invalid", + "location": "additional_field_str", + "debugInfo": "", + "message": "no such field: additional_field_str." + }])] + + pipeline_verifiers = [ + BigqueryFullResultMatcher( + project=self.project, + query="SELECT number, str FROM %s" % table_id, + data=[(1, 'some_string')]), + ] + + args = self.test_pipeline.get_full_options_as_args( + on_success_matcher=hc.all_of(*pipeline_verifiers)) + + with beam.Pipeline(argv=args) as p: + # pylint: disable=expression-not-assigned + errors = ( + p | 'create' >> beam.Create(input_data) + | 'write' >> beam.io.WriteToBigQuery( + table_id, + schema=table_schema, + method='STREAMING_INSERTS', + insert_retry_strategy='RETRY_NEVER', + create_disposition=beam.io.BigQueryDisposition.CREATE_IF_NEEDED, + write_disposition=beam.io.BigQueryDisposition.WRITE_APPEND)) + + assert_that( + errors[BigQueryWriteFn.FAILED_ROWS_WITH_ERRORS] + | 'ParseErrors' >> beam.Map(lambda err: (err[1], err[2])), + equal_to(bq_result_errors)) + @pytest.mark.it_postcommit @parameterized.expand([ param(file_format=FileFormat.AVRO), diff --git a/sdks/python/apache_beam/io/gcp/gcsfilesystem.py b/sdks/python/apache_beam/io/gcp/gcsfilesystem.py index 90ecc1d0fcc9..11184cd34fd3 100644 --- a/sdks/python/apache_beam/io/gcp/gcsfilesystem.py +++ b/sdks/python/apache_beam/io/gcp/gcsfilesystem.py @@ -45,6 +45,10 @@ class GCSFileSystem(FileSystem): CHUNK_SIZE = gcsio.MAX_BATCH_OPERATION_SIZE # Chuck size in batch operations GCS_PREFIX = 'gs://' + def __init__(self, pipeline_options): + super().__init__(pipeline_options) + self._pipeline_options = pipeline_options + @classmethod def scheme(cls): """URI scheme for the FileSystem @@ -127,12 +131,15 @@ def _list(self, dir_or_prefix): ``BeamIOError``: if listing fails, but not if no files were found. """ try: - for path, (size, updated) in gcsio.GcsIO().list_prefix( + for path, (size, updated) in self._gcsIO().list_prefix( dir_or_prefix, with_metadata=True).items(): yield FileMetadata(path, size, updated) except Exception as e: # pylint: disable=broad-except raise BeamIOError("List operation failed", {dir_or_prefix: e}) + def _gcsIO(self): + return gcsio.GcsIO(pipeline_options=self._pipeline_options) + def _path_open( self, path, @@ -143,7 +150,7 @@ def _path_open( """ compression_type = FileSystem._get_compression_type(path, compression_type) mime_type = CompressionTypes.mime_type(compression_type, mime_type) - raw_file = gcsio.GcsIO().open(path, mode, mime_type=mime_type) + raw_file = self._gcsIO().open(path, mode, mime_type=mime_type) if compression_type == CompressionTypes.UNCOMPRESSED: return raw_file return CompressedFile(raw_file, compression_type=compression_type) @@ -206,9 +213,9 @@ def _copy_path(source, destination): raise ValueError('Destination %r must be GCS path.' % destination) # Use copy_tree if the path ends with / as it is a directory if source.endswith('/'): - gcsio.GcsIO().copytree(source, destination) + self._gcsIO().copytree(source, destination) else: - gcsio.GcsIO().copy(source, destination) + self._gcsIO().copy(source, destination) exceptions = {} for source, destination in zip(source_file_names, destination_file_names): @@ -249,7 +256,7 @@ def rename(self, source_file_names, destination_file_names): # Execute GCS renames if any and return exceptions. exceptions = {} for batch in gcs_batches: - copy_statuses = gcsio.GcsIO().copy_batch(batch) + copy_statuses = self._gcsIO().copy_batch(batch) copy_succeeded = [] for src, dest, exception in copy_statuses: if exception: @@ -257,7 +264,7 @@ def rename(self, source_file_names, destination_file_names): else: copy_succeeded.append((src, dest)) delete_batch = [src for src, dest in copy_succeeded] - delete_statuses = gcsio.GcsIO().delete_batch(delete_batch) + delete_statuses = self._gcsIO().delete_batch(delete_batch) for i, (src, exception) in enumerate(delete_statuses): dest = copy_succeeded[i][1] if exception: @@ -274,7 +281,7 @@ def exists(self, path): Returns: boolean flag indicating if path exists """ - return gcsio.GcsIO().exists(path) + return self._gcsIO().exists(path) def size(self, path): """Get size of path on the FileSystem. @@ -287,7 +294,7 @@ def size(self, path): Raises: ``BeamIOError``: if path doesn't exist. """ - return gcsio.GcsIO().size(path) + return self._gcsIO().size(path) def last_updated(self, path): """Get UNIX Epoch time in seconds on the FileSystem. @@ -300,7 +307,7 @@ def last_updated(self, path): Raises: ``BeamIOError``: if path doesn't exist. """ - return gcsio.GcsIO().last_updated(path) + return self._gcsIO().last_updated(path) def checksum(self, path): """Fetch checksum metadata of a file on the @@ -315,7 +322,7 @@ def checksum(self, path): ``BeamIOError``: if path isn't a file or doesn't exist. """ try: - return gcsio.GcsIO().checksum(path) + return self._gcsIO().checksum(path) except Exception as e: # pylint: disable=broad-except raise BeamIOError("Checksum operation failed", {path: e}) @@ -332,7 +339,7 @@ def metadata(self, path): ``BeamIOError``: if path isn't a file or doesn't exist. """ try: - file_metadata = gcsio.GcsIO()._status(path) + file_metadata = self._gcsIO()._status(path) return FileMetadata( path, file_metadata['size'], file_metadata['last_updated']) except Exception as e: # pylint: disable=broad-except @@ -353,7 +360,7 @@ def _delete_path(path): else: path_to_use = path match_result = self.match([path_to_use])[0] - statuses = gcsio.GcsIO().delete_batch( + statuses = self._gcsIO().delete_batch( [m.path for m in match_result.metadata_list]) # pylint: disable=used-before-assignment failures = [e for (_, e) in statuses if e is not None] diff --git a/sdks/python/apache_beam/io/gcp/gcsfilesystem_test.py b/sdks/python/apache_beam/io/gcp/gcsfilesystem_test.py index b4d921ada234..49b0bdc9f6cf 100644 --- a/sdks/python/apache_beam/io/gcp/gcsfilesystem_test.py +++ b/sdks/python/apache_beam/io/gcp/gcsfilesystem_test.py @@ -81,7 +81,7 @@ def test_split(self): def test_match_multiples(self, mock_gcsio): # Prepare mocks. gcsio_mock = mock.MagicMock() - gcsfilesystem.gcsio.GcsIO = lambda: gcsio_mock + gcsfilesystem.gcsio.GcsIO = lambda pipeline_options=None: gcsio_mock gcsio_mock.list_prefix.return_value = { 'gs://bucket/file1': (1, 99999.0), 'gs://bucket/file2': (2, 88888.0) } @@ -99,7 +99,7 @@ def test_match_multiples_limit(self, mock_gcsio): # Prepare mocks. gcsio_mock = mock.MagicMock() limit = 1 - gcsfilesystem.gcsio.GcsIO = lambda: gcsio_mock + gcsfilesystem.gcsio.GcsIO = lambda pipeline_options=None: gcsio_mock gcsio_mock.list_prefix.return_value = {'gs://bucket/file1': (1, 99999.0)} expected_results = set([FileMetadata('gs://bucket/file1', 1, 99999.0)]) match_result = self.fs.match(['gs://bucket/'], [limit])[0] @@ -112,7 +112,7 @@ def test_match_multiples_limit(self, mock_gcsio): def test_match_multiples_error(self, mock_gcsio): # Prepare mocks. gcsio_mock = mock.MagicMock() - gcsfilesystem.gcsio.GcsIO = lambda: gcsio_mock + gcsfilesystem.gcsio.GcsIO = lambda pipeline_options=None: gcsio_mock exception = IOError('Failed') gcsio_mock.list_prefix.side_effect = exception @@ -128,7 +128,7 @@ def test_match_multiples_error(self, mock_gcsio): def test_match_multiple_patterns(self, mock_gcsio): # Prepare mocks. gcsio_mock = mock.MagicMock() - gcsfilesystem.gcsio.GcsIO = lambda: gcsio_mock + gcsfilesystem.gcsio.GcsIO = lambda pipeline_options=None: gcsio_mock gcsio_mock.list_prefix.side_effect = [ { 'gs://bucket/file1': (1, 99999.0) @@ -146,7 +146,7 @@ def test_match_multiple_patterns(self, mock_gcsio): def test_create(self, mock_gcsio): # Prepare mocks. gcsio_mock = mock.MagicMock() - gcsfilesystem.gcsio.GcsIO = lambda: gcsio_mock + gcsfilesystem.gcsio.GcsIO = lambda pipeline_options=None: gcsio_mock # Issue file copy _ = self.fs.create('gs://bucket/from1', 'application/octet-stream') @@ -157,7 +157,7 @@ def test_create(self, mock_gcsio): def test_open(self, mock_gcsio): # Prepare mocks. gcsio_mock = mock.MagicMock() - gcsfilesystem.gcsio.GcsIO = lambda: gcsio_mock + gcsfilesystem.gcsio.GcsIO = lambda pipeline_options=None: gcsio_mock # Issue file copy _ = self.fs.open('gs://bucket/from1', 'application/octet-stream') @@ -168,7 +168,7 @@ def test_open(self, mock_gcsio): def test_copy_file(self, mock_gcsio): # Prepare mocks. gcsio_mock = mock.MagicMock() - gcsfilesystem.gcsio.GcsIO = lambda: gcsio_mock + gcsfilesystem.gcsio.GcsIO = lambda pipeline_options=None: gcsio_mock sources = ['gs://bucket/from1'] destinations = ['gs://bucket/to1'] @@ -182,7 +182,7 @@ def test_copy_file(self, mock_gcsio): def test_copy_file_error(self, mock_gcsio): # Prepare mocks. gcsio_mock = mock.MagicMock() - gcsfilesystem.gcsio.GcsIO = lambda: gcsio_mock + gcsfilesystem.gcsio.GcsIO = lambda pipeline_options=None: gcsio_mock sources = ['gs://bucket/from1'] destinations = ['gs://bucket/to1'] @@ -208,7 +208,7 @@ def test_copy_file_error(self, mock_gcsio): def test_copy_tree(self, mock_gcsio): # Prepare mocks. gcsio_mock = mock.MagicMock() - gcsfilesystem.gcsio.GcsIO = lambda: gcsio_mock + gcsfilesystem.gcsio.GcsIO = lambda pipeline_options=None: gcsio_mock sources = ['gs://bucket1/'] destinations = ['gs://bucket2/'] @@ -222,7 +222,7 @@ def test_copy_tree(self, mock_gcsio): def test_rename(self, mock_gcsio): # Prepare mocks. gcsio_mock = mock.MagicMock() - gcsfilesystem.gcsio.GcsIO = lambda: gcsio_mock + gcsfilesystem.gcsio.GcsIO = lambda pipeline_options=None: gcsio_mock sources = [ 'gs://bucket/from1', 'gs://bucket/from2', @@ -262,7 +262,7 @@ def test_rename(self, mock_gcsio): def test_rename_error(self, mock_gcsio): # Prepare mocks. gcsio_mock = mock.MagicMock() - gcsfilesystem.gcsio.GcsIO = lambda: gcsio_mock + gcsfilesystem.gcsio.GcsIO = lambda pipeline_options=None: gcsio_mock sources = [ 'gs://bucket/from1', 'gs://bucket/from2', @@ -308,7 +308,7 @@ def test_rename_error(self, mock_gcsio): def test_delete(self, mock_gcsio): # Prepare mocks. gcsio_mock = mock.MagicMock() - gcsfilesystem.gcsio.GcsIO = lambda: gcsio_mock + gcsfilesystem.gcsio.GcsIO = lambda pipeline_options=None: gcsio_mock gcsio_mock._status.return_value = {'size': 0, 'last_updated': 99999.0} files = [ 'gs://bucket/from1', @@ -324,7 +324,7 @@ def test_delete(self, mock_gcsio): def test_delete_error(self, mock_gcsio): # Prepare mocks. gcsio_mock = mock.MagicMock() - gcsfilesystem.gcsio.GcsIO = lambda: gcsio_mock + gcsfilesystem.gcsio.GcsIO = lambda pipeline_options=None: gcsio_mock exception = IOError('Failed') gcsio_mock.delete_batch.side_effect = exception gcsio_mock._status.return_value = {'size': 0, 'last_updated': 99999.0} diff --git a/sdks/python/apache_beam/io/gcp/gcsio.py b/sdks/python/apache_beam/io/gcp/gcsio.py index 599861b5f778..bf41ae646107 100644 --- a/sdks/python/apache_beam/io/gcp/gcsio.py +++ b/sdks/python/apache_beam/io/gcp/gcsio.py @@ -138,7 +138,7 @@ def get_or_create_default_gcs_bucket(options): return None bucket_name = default_gcs_bucket_name(project, region) - bucket = GcsIO().get_bucket(bucket_name) + bucket = GcsIO(pipeline_options=options).get_bucket(bucket_name) if bucket: return bucket else: @@ -146,7 +146,8 @@ def get_or_create_default_gcs_bucket(options): 'Creating default GCS bucket for project %s: gs://%s', project, bucket_name) - return GcsIO().create_bucket(bucket_name, project, location=region) + return GcsIO(pipeline_options=options).create_bucket( + bucket_name, project, location=region) class GcsIOError(IOError, retry.PermanentException): @@ -156,10 +157,10 @@ class GcsIOError(IOError, retry.PermanentException): class GcsIO(object): """Google Cloud Storage I/O client.""" - def __init__(self, storage_client=None): + def __init__(self, storage_client=None, pipeline_options=None): if storage_client is None: storage_client = storage.StorageV1( - credentials=auth.get_service_credentials(), + credentials=auth.get_service_credentials(pipeline_options), get_credentials=False, http=get_new_http(), response_encoding='utf8', diff --git a/sdks/python/apache_beam/io/gcp/gcsio_test.py b/sdks/python/apache_beam/io/gcp/gcsio_test.py index a4aa2d4aa858..260090461c8c 100644 --- a/sdks/python/apache_beam/io/gcp/gcsio_test.py +++ b/sdks/python/apache_beam/io/gcp/gcsio_test.py @@ -461,7 +461,7 @@ def test_delete(self): @mock.patch( 'apache_beam.io.gcp.gcsio.auth.get_service_credentials', - wraps=lambda: None) + wraps=lambda pipeline_options: None) @mock.patch('apache_beam.io.gcp.gcsio.get_new_http') def test_user_agent_passed(self, get_new_http_mock, get_service_creds_mock): client = gcsio.GcsIO() diff --git a/sdks/python/apache_beam/options/pipeline_options.py b/sdks/python/apache_beam/options/pipeline_options.py index e02f6d799308..5aa29c0fd96e 100644 --- a/sdks/python/apache_beam/options/pipeline_options.py +++ b/sdks/python/apache_beam/options/pipeline_options.py @@ -748,9 +748,17 @@ def _add_argparse_args(cls, parser): '--enable_artifact_caching', default=False, action='store_true', - help= - 'When true, artifacts will be cached across job submissions in the GCS ' - 'staging bucket') + help='When true, artifacts will be cached across job submissions in ' + 'the GCS staging bucket') + parser.add_argument( + '--impersonate_service_account', + default=None, + help='All API requests will be made as the given service account or ' + 'target service account in an impersonation delegation chain ' + 'instead of the currently selected account. You can specify ' + 'either a single service account as the impersonator, or a ' + 'comma-separated list of service accounts to create an ' + 'impersonation delegation chain.') def _create_default_gcs_bucket(self): try: diff --git a/sdks/python/apache_beam/portability/common_urns.py b/sdks/python/apache_beam/portability/common_urns.py index daf54ea04da3..5e8a3ce4cce1 100644 --- a/sdks/python/apache_beam/portability/common_urns.py +++ b/sdks/python/apache_beam/portability/common_urns.py @@ -22,6 +22,7 @@ from .api import beam_runner_api_pb2_urns from .api import external_transforms_pb2_urns from .api import metrics_pb2_urns +from .api import schema_pb2_urns from .api import standard_window_fns_pb2_urns BeamConstants = beam_runner_api_pb2_urns.BeamConstants @@ -39,6 +40,7 @@ MonitoringInfo = metrics_pb2_urns.MonitoringInfo MonitoringInfoSpecs = metrics_pb2_urns.MonitoringInfoSpecs MonitoringInfoTypeUrns = metrics_pb2_urns.MonitoringInfoTypeUrns +LogicalTypes = schema_pb2_urns.LogicalTypes FixedWindowsPayload = standard_window_fns_pb2_urns.FixedWindowsPayload GlobalWindowsPayload = standard_window_fns_pb2_urns.GlobalWindowsPayload SessionWindowsPayload = standard_window_fns_pb2_urns.SessionWindowsPayload @@ -76,3 +78,6 @@ displayData = StandardDisplayData.DisplayData java_class_lookup = ExpansionMethods.Enum.JAVA_CLASS_LOOKUP + +micros_instant = LogicalTypes.Enum.MICROS_INSTANT +python_callable = LogicalTypes.Enum.PYTHON_CALLABLE diff --git a/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py b/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py index 13cbec6dc022..49f7251c0559 100644 --- a/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py +++ b/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py @@ -1640,7 +1640,10 @@ def wait_until_finish(self, duration=None): if not self.is_in_terminal_state(): if not self.has_job: raise IOError('Failed to get the Dataflow job id.') - + consoleUrl = ( + "Console URL: https://console.cloud.google.com/" + f"dataflow/jobs//{self.job_id()}" + "?project=") thread = threading.Thread( target=DataflowRunner.poll_for_job_completion, args=(self._runner, self, duration)) @@ -1657,13 +1660,15 @@ def wait_until_finish(self, duration=None): # is_in_terminal_state. terminated = self.is_in_terminal_state() assert duration or terminated, ( - 'Job did not reach to a terminal state after waiting indefinitely.') + 'Job did not reach to a terminal state after waiting indefinitely. ' + '{}'.format(consoleUrl)) # TODO(BEAM-14291): Also run this check if wait_until_finish was called # after the pipeline completed. if terminated and self.state != PipelineState.DONE: # TODO(BEAM-1290): Consider converting this to an error log based on # theresolution of the issue. + _LOGGER.error(consoleUrl) raise DataflowRuntimeException( 'Dataflow pipeline failed. State: %s, Error:\n%s' % (self.state, getattr(self._runner, 'last_error_msg', None)), diff --git a/sdks/python/apache_beam/runners/dataflow/internal/apiclient.py b/sdks/python/apache_beam/runners/dataflow/internal/apiclient.py index 21e3335c077e..e08729565294 100644 --- a/sdks/python/apache_beam/runners/dataflow/internal/apiclient.py +++ b/sdks/python/apache_beam/runners/dataflow/internal/apiclient.py @@ -345,6 +345,10 @@ def __init__( for k, v in sdk_pipeline_options.items() if v is not None } options_dict["pipelineUrl"] = proto_pipeline_staged_url + # Don't pass impersonate_service_account through to the harness. + # Though impersonation should start a job, the workers should + # not try to modify their credentials. + options_dict.pop('impersonate_service_account', None) self.proto.sdkPipelineOptions.additionalProperties.append( dataflow.Environment.SdkPipelineOptionsValue.AdditionalProperty( key='options', value=to_json_value(options_dict))) @@ -557,7 +561,7 @@ def __init__(self, options, root_staging_location=None): if self.google_cloud_options.no_auth: credentials = None else: - credentials = get_service_credentials() + credentials = get_service_credentials(options) http_client = get_new_http() self._client = dataflow.DataflowV1b3( diff --git a/sdks/python/apache_beam/runners/dataflow/test_dataflow_runner.py b/sdks/python/apache_beam/runners/dataflow/test_dataflow_runner.py index d4743a558f3e..58bc05c39509 100644 --- a/sdks/python/apache_beam/runners/dataflow/test_dataflow_runner.py +++ b/sdks/python/apache_beam/runners/dataflow/test_dataflow_runner.py @@ -55,6 +55,8 @@ def run_pipeline(self, pipeline, options): # TODO(markflyhigh)(BEAM-1890): Use print since Nose dosen't show logs # in some cases. print('Worker logs: %s' % self.build_console_url(options)) + _LOGGER.info('Console log: ') + _LOGGER.info(self.build_console_url(options)) try: self.wait_until_in_state(PipelineState.RUNNING) @@ -84,7 +86,11 @@ def build_console_url(self, options): def wait_until_in_state(self, expected_state, timeout=WAIT_IN_STATE_TIMEOUT): """Wait until Dataflow pipeline enters a certain state.""" + consoleUrl = ( + "Console URL: https://console.cloud.google.com/dataflow/" + f"/{self.result.job_id()}?project=") if not self.result.has_job: + _LOGGER.error(consoleUrl) raise IOError('Failed to get the Dataflow job id.') start_time = time.time() @@ -93,7 +99,7 @@ def wait_until_in_state(self, expected_state, timeout=WAIT_IN_STATE_TIMEOUT): if self.result.is_in_terminal_state() or job_state == expected_state: return job_state time.sleep(5) - + _LOGGER.error(consoleUrl) raise RuntimeError( 'Timeout after %d seconds while waiting for job %s ' 'enters expected state %s. Current state is %s.' % diff --git a/sdks/python/apache_beam/runners/interactive/utils.py b/sdks/python/apache_beam/runners/interactive/utils.py index cfc2a1a8637d..68b4fceaa8c3 100644 --- a/sdks/python/apache_beam/runners/interactive/utils.py +++ b/sdks/python/apache_beam/runners/interactive/utils.py @@ -452,7 +452,7 @@ def assert_bucket_exists(bucket_name): try: from apitools.base.py.exceptions import HttpError storage_client = storage.StorageV1( - credentials=auth.get_service_credentials(), + credentials=auth.get_service_credentials(None), get_credentials=False, http=get_new_http(), response_encoding='utf8') diff --git a/sdks/python/apache_beam/runners/portability/sdk_container_builder.py b/sdks/python/apache_beam/runners/portability/sdk_container_builder.py index d06b005a4f0d..f81e015ea591 100644 --- a/sdks/python/apache_beam/runners/portability/sdk_container_builder.py +++ b/sdks/python/apache_beam/runners/portability/sdk_container_builder.py @@ -209,7 +209,7 @@ def __init__(self, options): if self._google_cloud_options.no_auth: credentials = None else: - credentials = get_service_credentials() + credentials = get_service_credentials(options) self._storage_client = storage.StorageV1( url='https://www.googleapis.com/storage/v1', credentials=credentials, diff --git a/sdks/python/apache_beam/typehints/schemas.py b/sdks/python/apache_beam/typehints/schemas.py index 5a04ba51722b..02eac46ae5d6 100644 --- a/sdks/python/apache_beam/typehints/schemas.py +++ b/sdks/python/apache_beam/typehints/schemas.py @@ -69,6 +69,7 @@ import numpy as np from google.protobuf import text_format +from apache_beam.portability import common_urns from apache_beam.portability.api import schema_pb2 from apache_beam.typehints import row_type from apache_beam.typehints.native_type_compatibility import _get_args @@ -78,6 +79,7 @@ from apache_beam.typehints.native_type_compatibility import extract_optional_type from apache_beam.typehints.native_type_compatibility import match_is_named_tuple from apache_beam.utils import proto_utils +from apache_beam.utils.python_callable import PythonCallableWithSource from apache_beam.utils.timestamp import Timestamp PYTHON_ANY_URN = "beam:logical:pythonsdk_any:v1" @@ -540,7 +542,7 @@ class MicrosInstant(NoArgumentLogicalType[Timestamp, MicrosInstantRepresentation]): @classmethod def urn(cls): - return "beam:logical_type:micros_instant:v1" + return common_urns.micros_instant.urn @classmethod def representation_type(cls): @@ -559,3 +561,27 @@ def to_representation_type(self, value): def to_language_type(self, value): # type: (MicrosInstantRepresentation) -> Timestamp return Timestamp(seconds=int(value.seconds), micros=int(value.micros)) + + +@LogicalType.register_logical_type +class PythonCallable(NoArgumentLogicalType[PythonCallableWithSource, str]): + @classmethod + def urn(cls): + return common_urns.python_callable.urn + + @classmethod + def representation_type(cls): + # type: () -> type + return str + + @classmethod + def language_type(cls): + return PythonCallableWithSource + + def to_representation_type(self, value): + # type: (PythonCallableWithSource) -> str + return value.get_source() + + def to_language_type(self, value): + # type: (str) -> PythonCallableWithSource + return PythonCallableWithSource(value) diff --git a/sdks/python/apache_beam/typehints/schemas_test.py b/sdks/python/apache_beam/typehints/schemas_test.py index 834edf18777e..404d9c5583c3 100644 --- a/sdks/python/apache_beam/typehints/schemas_test.py +++ b/sdks/python/apache_beam/typehints/schemas_test.py @@ -31,6 +31,7 @@ import numpy as np +from apache_beam.portability import common_urns from apache_beam.portability.api import schema_pb2 from apache_beam.typehints.native_type_compatibility import match_is_named_tuple from apache_beam.typehints.schemas import SchemaTypeRegistry @@ -239,6 +240,22 @@ def test_float_maps_to_float64(self): schema_pb2.FieldType(atomic_type=schema_pb2.DOUBLE), typing_to_runner_api(float)) + def test_python_callable_maps_to_logical_type(self): + from apache_beam.utils.python_callable import PythonCallableWithSource + self.assertEqual( + schema_pb2.FieldType( + logical_type=schema_pb2.LogicalType( + urn=common_urns.python_callable.urn, + representation=typing_to_runner_api(str))), + typing_to_runner_api(PythonCallableWithSource)) + self.assertEqual( + typing_from_runner_api( + schema_pb2.FieldType( + logical_type=schema_pb2.LogicalType( + urn=common_urns.python_callable.urn, + representation=typing_to_runner_api(str)))), + PythonCallableWithSource) + def test_trivial_example(self): MyCuteClass = NamedTuple( 'MyCuteClass', diff --git a/sdks/python/apache_beam/utils/python_callable.py b/sdks/python/apache_beam/utils/python_callable.py new file mode 100644 index 000000000000..9238e4de66ba --- /dev/null +++ b/sdks/python/apache_beam/utils/python_callable.py @@ -0,0 +1,41 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +"""Python Callable utilities. + +For internal use only; no backwards-compatibility guarantees. +""" + + +class PythonCallableWithSource(object): + """Represents a Python callable object with source codes before evaluated. + + Proxy object to Store a callable object with its string form (source code). + The string form is used when the object is encoded and transferred to foreign + SDKs (non-Python SDKs). + """ + def __init__(self, source): + # type: (str) -> None + self._source = source + self._callable = eval(source) # pylint: disable=eval-used + + def get_source(self): + # type: () -> str + return self._source + + def __call__(self, *args, **kwargs): + return self._callable(*args, **kwargs) diff --git a/sdks/python/setup.py b/sdks/python/setup.py index b808c87405cb..1db8bdf177d8 100644 --- a/sdks/python/setup.py +++ b/sdks/python/setup.py @@ -120,121 +120,11 @@ def get_version(): except ImportError: cythonize = lambda *args, **kwargs: [] -REQUIRED_PACKAGES = [ - # Avro 1.9.2 for python3 was broken. The issue was fixed in version 1.9.2.1 - 'crcmod>=1.7,<2.0', - # dataclasses backport for python_version<3.7. No version bound because this - # is Python standard since Python 3.7 and each Python version is compatible - # with a specific dataclasses version. - 'dataclasses;python_version<"3.7"', - 'orjson<4.0', - # Dill doesn't have forwards-compatibility guarantees within minor version. - # Pickles created with a new version of dill may not unpickle using older - # version of dill. It is best to use the same version of dill on client and - # server, therefore list of allowed versions is very narrow. - # See: https://github.com/uqfoundation/dill/issues/341. - 'dill>=0.3.1.1,<0.3.2', - 'cloudpickle>=2.0.0,<3', - 'fastavro>=0.23.6,<2', - 'grpcio>=1.29.0,<2', - 'hdfs>=2.1.0,<3.0.0', - 'httplib2>=0.8,<0.20.0', - 'numpy>=1.14.3,<1.23.0', - 'pymongo>=3.8.0,<4.0.0', - 'protobuf>=3.12.2,<4', - 'proto-plus>=1.7.1,<2', - 'pyarrow>=0.15.1,<8.0.0', - 'pydot>=1.2.0,<2', - 'python-dateutil>=2.8.0,<3', - 'pytz>=2018.3', - 'requests>=2.24.0,<3.0.0', - 'typing-extensions>=3.7.0', -] - # [BEAM-8181] pyarrow cannot be installed on 32-bit Windows platforms. if sys.platform == 'win32' and sys.maxsize <= 2**32: - REQUIRED_PACKAGES = [ - p for p in REQUIRED_PACKAGES if not p.startswith('pyarrow') - ] - -REQUIRED_TEST_PACKAGES = [ - 'freezegun>=0.3.12', - 'joblib>=1.0.1', - 'mock>=1.0.1,<3.0.0', - 'pandas<2.0.0', - 'parameterized>=0.7.1,<0.8.0', - 'pyhamcrest>=1.9,!=1.10.0,<2.0.0', - 'pyyaml>=3.12,<7.0.0', - 'requests_mock>=1.7,<2.0', - 'tenacity>=5.0.2,<6.0', - 'pytest>=4.4.0,<5.0', - 'pytest-xdist>=1.29.0,<2', - 'pytest-timeout>=1.3.3,<2', - 'scikit-learn>=0.20.0', - 'sqlalchemy>=1.3,<2.0', - 'psycopg2-binary>=2.8.5,<3.0.0', - 'testcontainers[mysql]>=3.0.3,<4.0.0', - 'cryptography>=36.0.0', -] - -GCP_REQUIREMENTS = [ - 'cachetools>=3.1.0,<5', - 'google-apitools>=0.5.31,<0.5.32', - # NOTE: Maintainers, please do not require google-auth>=2.x.x - # Until this issue is closed - # https://github.com/googleapis/google-cloud-python/issues/10566 - 'google-auth>=1.18.0,<3', - 'google-auth-httplib2>=0.1.0,<0.2.0', - 'google-cloud-datastore>=1.8.0,<2', - 'google-cloud-pubsub>=2.1.0,<3', - 'google-cloud-pubsublite>=1.2.0,<2', - # GCP packages required by tests - 'google-cloud-bigquery>=1.6.0,<3', - 'google-cloud-bigquery-storage>=2.6.3', - 'google-cloud-core>=0.28.1,<2', - 'google-cloud-bigtable>=0.31.1,<2', - 'google-cloud-spanner>=1.13.0,<2', - 'grpcio-gcp>=0.2.2,<1', - # GCP Packages required by ML functionality - 'google-cloud-dlp>=3.0.0,<4', - 'google-cloud-language>=1.3.0,<2', - 'google-cloud-videointelligence>=1.8.0,<2', - 'google-cloud-vision>=0.38.0,<2', - 'google-cloud-recommendations-ai>=0.1.0,<=0.2.0' -] - -INTERACTIVE_BEAM = [ - 'facets-overview>=1.0.0,<2', - 'google-cloud-dataproc>=3.0.0,<3.2.0', - # IPython>=8 is not compatible with Python<=3.7 - 'ipython>=7,<8;python_version<="3.7"', - 'ipython>=8,<9;python_version>"3.7"', - 'ipykernel>=6,<7', - 'ipywidgets>=7.6.5,<8', - # Skip version 6.1.13 due to - # https://github.com/jupyter/jupyter_client/issues/637 - 'jupyter-client>=6.1.11,<6.1.13', - 'timeloop>=1.0.2,<2', -] - -INTERACTIVE_BEAM_TEST = [ - # notebok utils - 'nbformat>=5.0.5,<6', - 'nbconvert>=6.2.0,<7', - # headless chrome based integration tests - 'needle>=0.5.0,<1', - 'chromedriver-binary>=100,<101', - # use a fixed major version of PIL for different python versions - 'pillow>=7.1.1,<8', -] - -AWS_REQUIREMENTS = ['boto3 >=1.9'] - -AZURE_REQUIREMENTS = [ - 'azure-storage-blob >=12.3.2', - 'azure-core >=1.7.0', -] - + pyarrow_dependency = '' +else: + pyarrow_dependency = 'pyarrow>=0.15.1,<8.0.0' # We must generate protos after setup_requires are installed. def generate_protos_first(): @@ -272,6 +162,8 @@ def get_portability_package_data(): # structure must exist before the call to setuptools.find_packages() # executes below. generate_protos_first() + # Keep all dependencies inlined in the setup call, otherwise Dependabot won't + # be able to parse it. setuptools.setup( name=PACKAGE_NAME, version=PACKAGE_VERSION, @@ -309,7 +201,35 @@ def get_portability_package_data(): 'apache_beam/utils/counters.py', 'apache_beam/utils/windowed_value.py', ]), - install_requires=REQUIRED_PACKAGES, + install_requires=[ + # Avro 1.9.2 for python3 was broken. + # The issue was fixed in version 1.9.2.1 + 'crcmod>=1.7,<2.0', + 'orjson<4.0', + # Dill doesn't have forwards-compatibility guarantees within minor + # version. Pickles created with a new version of dill may not unpickle + # using older version of dill. It is best to use the same version of + # dill on client and server, therefore list of allowed versions is very + # narrow. See: https://github.com/uqfoundation/dill/issues/341. + 'dill>=0.3.1.1,<0.3.2', + 'cloudpickle>=2.0.0,<3', + 'fastavro>=0.23.6,<2', + 'grpcio>=1.29.0,<2', + 'hdfs>=2.1.0,<3.0.0', + 'httplib2>=0.8,<0.21.0', + 'numpy>=1.14.3,<1.23.0', + 'pymongo>=3.8.0,<5.0.0', + 'protobuf>=3.12.2,<4', + 'proto-plus>=1.7.1,<2', + 'pydot>=1.2.0,<2', + 'python-dateutil>=2.8.0,<3', + 'pytz>=2018.3', + 'requests>=2.24.0,<3.0.0', + 'typing-extensions>=3.7.0', + # Dynamic dependencies must be specified in a separate list, otherwise + # Dependabot won't be able to parse the main list. Any dynamic + # dependencies will not receive updates from Dependabot. + ] + [pyarrow_dependency], python_requires=python_requires, # BEAM-8840: Do NOT use tests_require or setup_requires. extras_require={ @@ -319,12 +239,78 @@ def get_portability_package_data(): # https://github.com/sphinx-doc/sphinx/issues/9727 'docutils==0.17.1' ], - 'test': REQUIRED_TEST_PACKAGES, - 'gcp': GCP_REQUIREMENTS, - 'interactive': INTERACTIVE_BEAM, - 'interactive_test': INTERACTIVE_BEAM_TEST, - 'aws': AWS_REQUIREMENTS, - 'azure': AZURE_REQUIREMENTS, + 'test': [ + 'freezegun>=0.3.12', + 'joblib>=1.0.1', + 'mock>=1.0.1,<3.0.0', + 'pandas<2.0.0', + 'parameterized>=0.7.1,<0.8.0', + 'pyhamcrest>=1.9,!=1.10.0,<2.0.0', + 'pyyaml>=3.12,<7.0.0', + 'requests_mock>=1.7,<2.0', + 'tenacity>=5.0.2,<6.0', + 'pytest>=4.4.0,<5.0', + 'pytest-xdist>=1.29.0,<2', + 'pytest-timeout>=1.3.3,<2', + 'scikit-learn>=0.20.0', + 'sqlalchemy>=1.3,<2.0', + 'psycopg2-binary>=2.8.5,<3.0.0', + 'testcontainers[mysql]>=3.0.3,<4.0.0', + 'cryptography>=36.0.0', + ], + 'gcp': [ + 'cachetools>=3.1.0,<5', + 'google-apitools>=0.5.31,<0.5.32', + # NOTE: Maintainers, please do not require google-auth>=2.x.x + # Until this issue is closed + # https://github.com/googleapis/google-cloud-python/issues/10566 + 'google-auth>=1.18.0,<3', + 'google-auth-httplib2>=0.1.0,<0.2.0', + 'google-cloud-datastore>=1.8.0,<2', + 'google-cloud-pubsub>=2.1.0,<3', + 'google-cloud-pubsublite>=1.2.0,<2', + # GCP packages required by tests + 'google-cloud-bigquery>=1.6.0,<3', + 'google-cloud-bigquery-storage>=2.6.3', + 'google-cloud-core>=0.28.1,<2', + 'google-cloud-bigtable>=0.31.1,<2', + 'google-cloud-spanner>=1.13.0,<2', + 'grpcio-gcp>=0.2.2,<1', + # GCP Packages required by ML functionality + 'google-cloud-dlp>=3.0.0,<4', + 'google-cloud-language>=1.3.0,<2', + 'google-cloud-videointelligence>=1.8.0,<2', + 'google-cloud-vision>=0.38.0,<2', + 'google-cloud-recommendations-ai>=0.1.0,<=0.2.0' + ], + 'interactive': [ + 'facets-overview>=1.0.0,<2', + 'google-cloud-dataproc>=3.0.0,<3.2.0', + # IPython>=8 is not compatible with Python<=3.7 + 'ipython>=7,<8;python_version<="3.7"', + 'ipython>=8,<9;python_version>"3.7"', + 'ipykernel>=6,<7', + 'ipywidgets>=7.6.5,<8', + # Skip version 6.1.13 due to + # https://github.com/jupyter/jupyter_client/issues/637 + 'jupyter-client>=6.1.11,<6.1.13', + 'timeloop>=1.0.2,<2', + ], + 'interactive_test': [ + # notebok utils + 'nbformat>=5.0.5,<6', + 'nbconvert>=6.2.0,<7', + # headless chrome based integration tests + 'needle>=0.5.0,<1', + 'chromedriver-binary>=100,<101', + # use a fixed major version of PIL for different python versions + 'pillow>=7.1.1,<8', + ], + 'aws': ['boto3 >=1.9'], + 'azure': [ + 'azure-storage-blob >=12.3.2', + 'azure-core >=1.7.0', + ], 'dataframe': ['pandas>=1.0,<1.5'] }, zip_safe=False, diff --git a/sdks/typescript/README.md b/sdks/typescript/README.md index d931e836941b..7971bf1d5634 100644 --- a/sdks/typescript/README.md +++ b/sdks/typescript/README.md @@ -62,7 +62,7 @@ encoding is used when we don't have sufficient type information. * We have added additional methods to the PCollection object, notably `map` and `flatmap`, [rather than only allowing apply](https://www.mail-archive.com/dev@beam.apache.org/msg06035.html). -In addition, `apply` can accept a function argument `(PColletion) => ...` as +In addition, `apply` can accept a function argument `(PCollection) => ...` as well as a PTransform subclass, which treats this callable as if it were a PTransform's expand. @@ -102,7 +102,7 @@ We currently offer asynchronous variants of `PValue.apply(...)` (in addition to the synchronous ones, as they are easier to chain) as well as making `Runner.run` asynchronous. TBD to do this for all user callbacks as well. -An example pipeline can be found at https://github.com/robertwb/beam-javascript/blob/javascript/sdks/node-ts/src/apache_beam/examples/wordcount.ts +An example pipeline can be found at https://github.com/apache/beam/blob/master/sdks/typescript/src/apache_beam/examples/wordcount.ts ## TODO diff --git a/sdks/typescript/package-lock.json b/sdks/typescript/package-lock.json index 49e62618f887..c39002e60016 100644 --- a/sdks/typescript/package-lock.json +++ b/sdks/typescript/package-lock.json @@ -1,12 +1,12 @@ { "name": "apache_beam", - "version": "0.37.0.dev", + "version": "0.38.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "apache_beam", - "version": "0.37.0.dev", + "version": "0.38.0", "dependencies": { "@grpc/grpc-js": "^1.4.6", "@protobuf-ts/grpc-transport": "^2.1.0", @@ -16,6 +16,7 @@ "chai": "^4.3.4", "date-fns": "^2.28.0", "fast-deep-equal": "^3.1.3", + "find-git-root": "^1.0.4", "long": "^4.0.0", "protobufjs": "^6.10.2", "queue-typescript": "^1.0.1", @@ -919,6 +920,11 @@ "node": ">=8" } }, + "node_modules/find-git-root": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/find-git-root/-/find-git-root-1.0.4.tgz", + "integrity": "sha512-468fmirKKgcrqfZfPn0xIpwZUUsZQcYXfx0RC2/jX39GPz83TwutQNZZhDrI6HqjO8cRejxQVaUY8GQdXopFfA==" + }, "node_modules/find-up": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", @@ -2942,6 +2948,11 @@ "to-regex-range": "^5.0.1" } }, + "find-git-root": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/find-git-root/-/find-git-root-1.0.4.tgz", + "integrity": "sha512-468fmirKKgcrqfZfPn0xIpwZUUsZQcYXfx0RC2/jX39GPz83TwutQNZZhDrI6HqjO8cRejxQVaUY8GQdXopFfA==" + }, "find-up": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", diff --git a/sdks/typescript/package.json b/sdks/typescript/package.json index 1e13de36bfc6..6ab659ccf2e9 100644 --- a/sdks/typescript/package.json +++ b/sdks/typescript/package.json @@ -32,6 +32,7 @@ "chai": "^4.3.4", "date-fns": "^2.28.0", "fast-deep-equal": "^3.1.3", + "find-git-root": "^1.0.4", "long": "^4.0.0", "protobufjs": "^6.10.2", "queue-typescript": "^1.0.1", diff --git a/sdks/typescript/src/apache_beam/examples/wordcount.ts b/sdks/typescript/src/apache_beam/examples/wordcount.ts index d68d0f256008..961afb43e9bd 100644 --- a/sdks/typescript/src/apache_beam/examples/wordcount.ts +++ b/sdks/typescript/src/apache_beam/examples/wordcount.ts @@ -16,10 +16,24 @@ * limitations under the License. */ -// TODO: Should this be in a top-level examples dir, rather than under apache_beam. +// Run directly with +// +// node dist/src/apache_beam/examples/wordcount.js +// +// A different runner can be chosen via a --runner argument, e.g. +// +// node dist/src/apache_beam/examples/wordcount.js --runner=flink +// +// To run on Dataflow, pass the required arguments: +// +// node dist/src/apache_beam/examples/wordcount.js --runner=dataflow --project=PROJECT_ID --tempLocation=gs://BUCKET/DIR' --region=us-central1 + +// TODO: Should this be in a top-level examples dir, rather than under apache_beam? + +import * as yargs from "yargs"; import * as beam from "../../apache_beam"; -import { DirectRunner } from "../runners/direct_runner"; +import { createRunner } from "../runners/runner"; import { count } from "../transforms/combiners"; import { GroupBy } from "../transforms/group_and_combine"; @@ -45,7 +59,7 @@ function wordCount(lines: beam.PCollection): beam.PCollection { } async function main() { - await new DirectRunner().run((root) => { + await createRunner(yargs.argv).run((root) => { const lines = root.apply( new beam.Create([ "In the beginning God created the heaven and the earth.", diff --git a/sdks/typescript/src/apache_beam/runners/dataflow.ts b/sdks/typescript/src/apache_beam/runners/dataflow.ts new file mode 100644 index 000000000000..958eb99c9565 --- /dev/null +++ b/sdks/typescript/src/apache_beam/runners/dataflow.ts @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { Pipeline } from "../internal/pipeline"; +import { PipelineResult, Runner } from "./runner"; +import { PortableRunner } from "./portable_runner/runner"; +import { PythonService } from "../utils/service"; + +export function dataflowRunner(runnerOptions: { + project: string; + tempLocation: string; + region: string; + [others: string]: any; +}): Runner { + return new (class extends Runner { + async runPipeline( + pipeline: Pipeline, + options: Object = {} + ): Promise { + return new PortableRunner( + runnerOptions as any, + new PythonService("apache_beam.runners.dataflow.dataflow_job_service", [ + "--port", + "{{PORT}}", + ]) + ).runPipeline(pipeline, options); + } + })(); +} diff --git a/sdks/typescript/src/apache_beam/runners/direct_runner.ts b/sdks/typescript/src/apache_beam/runners/direct_runner.ts index a1a10621af98..ff203d06c41d 100644 --- a/sdks/typescript/src/apache_beam/runners/direct_runner.ts +++ b/sdks/typescript/src/apache_beam/runners/direct_runner.ts @@ -44,14 +44,61 @@ import { } from "../values"; import { PaneInfoCoder } from "../coders/standard_coders"; import { Coder, Context as CoderContext } from "../coders/coders"; +import * as environments from "../internal/environments"; import { serializeFn, deserializeFn } from "../internal/serialize"; +const SUPPORTED_REQUIREMENTS: string[] = []; + +export function directRunner(options: Object = {}): Runner { + return new DirectRunner(options); +} + export class DirectRunner extends Runner { // All the operators for a given pipeline should share the same state. // This global mapping allows operators to look up a shared state object for // a given pipeline on deserialization. static inMemoryStatesRefs: Map = new Map(); + constructor(private options: Object = {}) { + super(); + } + + unsupportedFeatures(pipeline, options: Object = {}): string[] { + return [...this.unsupportedFeaturesIter(pipeline, options)]; + } + + *unsupportedFeaturesIter(pipeline, options: Object = {}) { + const proto: runnerApi.Pipeline = pipeline.proto; + for (const requirement of proto.requirements) { + if (!SUPPORTED_REQUIREMENTS.includes(requirement)) { + yield requirement; + } + } + + for (const env of Object.values(proto.components!.environments)) { + if ( + env.urn && + env.urn != environments.TYPESCRIPT_DEFAULT_ENVIRONMENT_URN + ) { + yield env.urn; + } + } + + for (const windowing of Object.values( + proto.components!.windowingStrategies + )) { + if ( + ![ + runnerApi.MergeStatus_Enum.UNSPECIFIED, + runnerApi.MergeStatus_Enum.NON_MERGING, + runnerApi.MergeStatus_Enum.ALREADY_MERGED, + ].includes(windowing.mergeStatus) + ) { + yield "MergeStatus=" + windowing.mergeStatus; + } + } + } + async runPipeline(p): Promise { // console.dir(p.proto, { depth: null }); diff --git a/sdks/typescript/src/apache_beam/runners/flink.ts b/sdks/typescript/src/apache_beam/runners/flink.ts new file mode 100644 index 000000000000..4acb68e642fa --- /dev/null +++ b/sdks/typescript/src/apache_beam/runners/flink.ts @@ -0,0 +1,84 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +const fs = require("fs"); +const os = require("os"); +const path = require("path"); + +import { Pipeline } from "../internal/pipeline"; +import { PipelineResult, Runner } from "./runner"; +import { PortableRunner } from "./portable_runner/runner"; +import { JavaJarService } from "../utils/service"; + +const MAGIC_HOST_NAMES = ["[local]", "[auto]"]; + +// These should stay in sync with gradle.properties. +const PUBLISHED_FLINK_VERSIONS = ["1.12", "1.13", "1.14"]; + +const defaultOptions = { + flinkMaster: "[local]", + flinkVersion: PUBLISHED_FLINK_VERSIONS[PUBLISHED_FLINK_VERSIONS.length - 1], +}; + +export function flinkRunner(runnerOptions: Object = {}): Runner { + return new (class extends Runner { + async runPipeline( + pipeline: Pipeline, + options: Object = {} + ): Promise { + const allOptions = { + ...defaultOptions, + ...runnerOptions, + ...options, + } as any; + if ( + !allOptions.environmentType && + MAGIC_HOST_NAMES.includes(allOptions.flinkMaster) + ) { + allOptions.environmentType = "LOOPBACK"; + } + if (!allOptions.artifactsDir) { + allOptions.artifactsDir = fs.mkdtempSync( + path.join(os.tmpdir(), "flinkArtifactsDir") + ); + } + + const jobServerJar = + allOptions.flinkJobServerJar || + (await JavaJarService.cachedJar( + JavaJarService.gradleToJar( + `runners:flink:${allOptions.flinkVersion}:job-server:shadowJar` + ) + )); + const jobServer = new JavaJarService(jobServerJar, [ + "--flink-master", + allOptions.flinkMaster, + "--artifacts-dir", + allOptions.artifactsDir, + "--job-port", + "{{PORT}}", + "--artifact-port", + "0", + "--expansion-port", + "0", + ]); + + return new PortableRunner(allOptions, jobServer).runPipeline(pipeline); + } + })(); +} diff --git a/sdks/typescript/src/apache_beam/runners/portable_runner/runner.ts b/sdks/typescript/src/apache_beam/runners/portable_runner/runner.ts index 3081249539f1..f5281aa969d4 100644 --- a/sdks/typescript/src/apache_beam/runners/portable_runner/runner.ts +++ b/sdks/typescript/src/apache_beam/runners/portable_runner/runner.ts @@ -28,11 +28,12 @@ import { ArtifactStagingServiceClient } from "../../proto/beam_artifact_api.clie import { Pipeline } from "../../internal/pipeline"; import { PipelineResult, Runner } from "../runner"; import { PipelineOptions } from "../../options/pipeline_options"; -import { JobState_Enum } from "../../proto/beam_job_api"; +import { JobState_Enum, JobStateEvent } from "../../proto/beam_job_api"; import { ExternalWorkerPool } from "../../worker/external_worker_service"; import * as environments from "../../internal/environments"; import * as artifacts from "../artifacts"; +import { Service as JobService } from "../../utils/service"; const TERMINAL_STATES = [ JobState_Enum.DONE, @@ -42,19 +43,22 @@ const TERMINAL_STATES = [ JobState_Enum.DRAINED, ]; +type completionCallback = (terminalState: JobStateEvent) => Promise; + class PortableRunnerPipelineResult implements PipelineResult { jobId: string; runner: PortableRunner; - workers?: ExternalWorkerPool; + completionCallbacks: completionCallback[]; + terminalState?: JobStateEvent; constructor( runner: PortableRunner, jobId: string, - workers: ExternalWorkerPool | undefined = undefined + completionCallbacks: completionCallback[] ) { this.runner = runner; this.jobId = jobId; - this.workers = workers; + this.completionCallbacks = completionCallbacks; } static isTerminal(state: JobState_Enum) { @@ -62,13 +66,15 @@ class PortableRunnerPipelineResult implements PipelineResult { } async getState() { + if (this.terminalState) { + return this.terminalState; + } const state = await this.runner.getJobState(this.jobId); - if ( - this.workers != undefined && - PortableRunnerPipelineResult.isTerminal(state.state) - ) { - this.workers.stop(); - this.workers = undefined; + if (PortableRunnerPipelineResult.isTerminal(state.state)) { + this.terminalState = state; + for (const callback of this.completionCallbacks) { + await callback(state); + } } return state; } @@ -96,11 +102,12 @@ class PortableRunnerPipelineResult implements PipelineResult { } export class PortableRunner extends Runner { - client: JobServiceClient; + client?: JobServiceClient; defaultOptions: any; constructor( - options: string | { jobEndpoint: string; [others: string]: any } + options: string | { jobEndpoint: string; [others: string]: any }, + private jobService: JobService | undefined = undefined ) { super(); if (typeof options == "string") { @@ -108,16 +115,25 @@ export class PortableRunner extends Runner { } else if (options) { this.defaultOptions = options; } - this.client = new JobServiceClient( - new GrpcTransport({ - host: this.defaultOptions?.jobEndpoint, - channelCredentials: ChannelCredentials.createInsecure(), - }) - ); + } + + async getClient(): Promise { + if (!this.client) { + if (this.jobService) { + this.defaultOptions.jobEndpoint = await this.jobService.start(); + } + this.client = new JobServiceClient( + new GrpcTransport({ + host: this.defaultOptions?.jobEndpoint, + channelCredentials: ChannelCredentials.createInsecure(), + }) + ); + } + return this.client; } async getJobState(jobId: string) { - const call = this.client.getState({ jobId }); + const call = (await this.getClient()).getState({ jobId }); return await call.response; } @@ -138,11 +154,18 @@ export class PortableRunner extends Runner { options = { ...this.defaultOptions, ...options }; } - const use_loopback_service = - (options as any)?.environmentType == "LOOPBACK"; - const workers = use_loopback_service ? new ExternalWorkerPool() : undefined; - if (use_loopback_service) { - workers!.start(); + const completionCallbacks: completionCallback[] = []; + + if (this.jobService) { + const jobService = this.jobService; + completionCallbacks.push(() => jobService.stop()); + } + + let loopbackAddress: string | undefined = undefined; + if ((options as any)?.environmentType == "LOOPBACK") { + const workers = new ExternalWorkerPool(); + loopbackAddress = await workers.start(); + completionCallbacks.push(() => workers.stop()); } // Replace the default environment according to the pipeline options. @@ -151,9 +174,9 @@ export class PortableRunner extends Runner { pipeline.components!.environments )) { if (env.urn == environments.TYPESCRIPT_DEFAULT_ENVIRONMENT_URN) { - if (use_loopback_service) { + if (loopbackAddress) { pipeline.components!.environments[envId] = - environments.asExternalEnvironment(env, workers!.address); + environments.asExternalEnvironment(env, loopbackAddress); } else { pipeline.components!.environments[envId] = environments.asDockerEnvironment( @@ -166,6 +189,7 @@ export class PortableRunner extends Runner { } // Inform the runner that we'd like to execute this pipeline. + console.debug("Preparing job."); let message: PrepareJobRequest = { pipeline, jobName: (options as any)?.jobName || "", @@ -182,10 +206,12 @@ export class PortableRunner extends Runner { ) ); } - const prepareResponse = await this.client.prepare(message).response; + const client = await this.getClient(); + const prepareResponse = await client.prepare(message).response; // Allow the runner to fetch any artifacts it can't interpret. if (prepareResponse.artifactStagingEndpoint) { + console.debug("Staging artifacts"); await artifacts.offerArtifacts( new ArtifactStagingServiceClient( new GrpcTransport({ @@ -198,7 +224,8 @@ export class PortableRunner extends Runner { } // Actually kick off the job. - const runCall = this.client.run({ + console.debug("Running job."); + const runCall = client.run({ preparationId: prepareResponse.preparationId, retrievalToken: "", }); @@ -208,6 +235,6 @@ export class PortableRunner extends Runner { // If desired, the user can use this handle to await job completion, but // this function returns as soon as the job is successfully started, not // once the job has completed. - return new PortableRunnerPipelineResult(this, jobId, workers); + return new PortableRunnerPipelineResult(this, jobId, completionCallbacks); } } diff --git a/sdks/typescript/src/apache_beam/runners/runner.ts b/sdks/typescript/src/apache_beam/runners/runner.ts index 272ef8407836..7fc2ad794776 100644 --- a/sdks/typescript/src/apache_beam/runners/runner.ts +++ b/sdks/typescript/src/apache_beam/runners/runner.ts @@ -25,11 +25,29 @@ export interface PipelineResult { waitUntilFinish(duration?: number): Promise; } +export function createRunner(options): Runner { + let runnerConstructor: (any) => Runner; + if (options.runner == undefined || options.runner == "default") { + runnerConstructor = defaultRunner; + } else if (options.runner == "direct") { + runnerConstructor = require("./direct_runner").directRunner; + } else if (options.runner == "universal") { + runnerConstructor = require("./universal").universalRunner; + } else if (options.runner == "flink") { + runnerConstructor = require("./flink").flinkRunner; + } else if (options.runner == "dataflow") { + runnerConstructor = require("./dataflow").dataflowRunner; + } else { + throw new Error("Unknown runner: " + options.runner); + } + return runnerConstructor(options); +} + /** * A Runner is the object that takes a pipeline definition and actually * executes, e.g. locally or on a distributed system. */ -export class Runner { +export abstract class Runner { /** * Runs the transform. * @@ -64,10 +82,27 @@ export class Runner { return this.runPipeline(p); } - protected async runPipeline( + abstract runPipeline( pipeline: Pipeline, options?: PipelineOptions - ): Promise { - throw new Error("Not implemented."); - } + ): Promise; +} + +export function defaultRunner(defaultOptions: Object): Runner { + return new (class extends Runner { + async runPipeline( + pipeline: Pipeline, + options: Object = {} + ): Promise { + const directRunner = + require("./direct_runner").directRunner(defaultOptions); + if (directRunner.unsupportedFeatures(pipeline, options).length == 0) { + return directRunner.runPipeline(pipeline, options); + } else { + return require("./universal") + .universalRunner(defaultOptions) + .runPipeline(pipeline, options); + } + } + })(); } diff --git a/sdks/typescript/src/apache_beam/runners/universal.ts b/sdks/typescript/src/apache_beam/runners/universal.ts new file mode 100644 index 000000000000..c2c4db14c69f --- /dev/null +++ b/sdks/typescript/src/apache_beam/runners/universal.ts @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { Pipeline } from "../internal/pipeline"; +import { PipelineResult, Runner } from "./runner"; +import { PortableRunner } from "./portable_runner/runner"; +import { PythonService } from "../utils/service"; + +export function universalRunner(runnerOptions: { + [others: string]: any; +}): Runner { + return new (class extends Runner { + async runPipeline( + pipeline: Pipeline, + options: Object = {} + ): Promise { + return new PortableRunner( + runnerOptions as any, + new PythonService( + "apache_beam.runners.portability.local_job_service_main", + ["--port", "{{PORT}}"] + ) + ).runPipeline(pipeline, options); + } + })(); +} diff --git a/sdks/typescript/src/apache_beam/transforms/internal.ts b/sdks/typescript/src/apache_beam/transforms/internal.ts index 232cfd2832fe..27f03c836ab7 100644 --- a/sdks/typescript/src/apache_beam/transforms/internal.ts +++ b/sdks/typescript/src/apache_beam/transforms/internal.ts @@ -55,6 +55,7 @@ export class Impulse extends PTransform> { urn: Impulse.urn, payload: urns.IMPULSE_BUFFER, }); + transformProto.environmentId = ""; return pipeline.createPCollectionInternal(new BytesCoder()); } } @@ -134,6 +135,7 @@ export class GroupByKey extends PTransform< urn: GroupByKey.urn, payload: undefined!, }); + transformProto.environmentId = ""; // TODO: (Cleanup) warn about BsonObjectCoder and (non)deterministic key ordering? const keyCoder = pipeline.getCoder(inputCoderProto.componentCoderIds[0]); diff --git a/sdks/typescript/src/apache_beam/utils/service.ts b/sdks/typescript/src/apache_beam/utils/service.ts index 693731dc8655..bb5554e3123e 100644 --- a/sdks/typescript/src/apache_beam/utils/service.ts +++ b/sdks/typescript/src/apache_beam/utils/service.ts @@ -22,6 +22,7 @@ const os = require("os"); const net = require("net"); const path = require("path"); const childProcess = require("child_process"); +const findGitRoot = require("find-git-root"); // TODO: (Typescript) Why can't the var above be used as a namespace? import { ChildProcess } from "child_process"; @@ -47,17 +48,35 @@ export class SubprocessService { process: ChildProcess; cmd: string; args: string[]; + name: string; - constructor(cmd: string, args: string[]) { + constructor( + cmd: string, + args: string[], + name: string | undefined = undefined + ) { this.cmd = cmd; this.args = args; + this.name = name || cmd; + } + + static async freePort(): Promise { + return new Promise((resolve) => { + const srv = net.createServer(); + srv.listen(0, () => { + const port = srv.address().port; + srv.close((_) => resolve(port)); + }); + }); } async start() { - // TODO: (Cleanup) Choose a free port. const host = "localhost"; - const port = "7778"; - console.log(this.args.map((arg) => arg.replace("{{PORT}}", port))); + const port = (await SubprocessService.freePort()).toString(); + console.debug( + this.cmd, + this.args.map((arg) => arg.replace("{{PORT}}", port)) + ); this.process = childProcess.spawn( this.cmd, this.args.map((arg) => arg.replace("{{PORT}}", port)), @@ -67,7 +86,11 @@ export class SubprocessService { ); try { + console.debug( + `Waiting for ${this.name} to be available on port ${port}.` + ); await this.portReady(port, host, 10000); + console.debug(`Service ${this.name} available.`); } catch (error) { this.process.kill(); throw error; @@ -77,6 +100,7 @@ export class SubprocessService { } async stop() { + console.log(`Tearing down ${this.name}.`); this.process.kill(); } @@ -91,9 +115,9 @@ export class SubprocessService { try { await new Promise((resolve, reject) => { const socket = net.createConnection(port, host, () => { - resolve(); - socket.end(); connected = true; + socket.end(); + resolve(); }); socket.on("error", (err) => { reject(err); @@ -123,10 +147,12 @@ export function serviceProviderFromJavaGradleTarget( }; } +const BEAM_CACHE = path.join(os.homedir(), ".apache_beam", "cache"); + export class JavaJarService extends SubprocessService { static APACHE_REPOSITORY = "https://repo.maven.apache.org/maven2"; static BEAM_GROUP_ID = "org.apache.beam"; - static JAR_CACHE = path.join(os.homedir(), ".apache_beam", "cache", "jars"); + static JAR_CACHE = path.join(BEAM_CACHE, "jars"); constructor(jar: string, args: string[] | undefined = undefined) { if (args == undefined) { @@ -185,16 +211,7 @@ export class JavaJarService extends SubprocessService { } const gradlePackage = gradleTarget.match(/^:?(.*):[^:]+:?$/)![1]; const artifactId = "beam-" + gradlePackage.replaceAll(":", "-"); - // TODO: Do this more robustly, e.g. use the git root. - const projectRoot = path.resolve( - __dirname, - "..", - "..", - "..", - "..", - "..", - ".." - ); + const projectRoot = path.dirname(findGitRoot(__dirname)); const localPath = path.join( projectRoot, gradlePackage.replaceAll(":", path.sep), @@ -256,3 +273,60 @@ export class JavaJarService extends SubprocessService { ); } } + +export class PythonService extends SubprocessService { + static VENV_CACHE = path.join(BEAM_CACHE, "venvs"); + + static whichPython(): string { + for (const bin of ["python3", "python"]) { + try { + const result = childProcess.spawnSync(bin, ["--version"]); + if (result.status == 0) { + return bin; + } + } catch (err) { + // Try the next one. + } + } + throw new Error("Can't find a Python executable."); + } + + static beamPython(): string { + const projectRoot = path.dirname(findGitRoot(__dirname)); + // TODO: Package this up with the npm. + const bootstrapScript = path.join( + projectRoot, + "sdks", + "java", + "extensions", + "python", + "src", + "main", + "resources", + "org", + "apache", + "beam", + "sdk", + "extensions", + "python", + "bootstrap_beam_venv.py" + ); + console.debug("Invoking Python bootstrap script."); + const result = childProcess.spawnSync( + PythonService.whichPython(), + [bootstrapScript], + { encoding: "latin1" } + ); + if (result.status == 0) { + console.debug(result.stdout); + const lines = result.stdout.trim().split("\n"); + return lines[lines.length - 1]; + } else { + throw new Error(result.output); + } + } + + constructor(module: string, args: string[] = []) { + super(PythonService.beamPython(), ["-u", "-m", module].concat(args)); + } +} diff --git a/sdks/typescript/src/apache_beam/worker/data.ts b/sdks/typescript/src/apache_beam/worker/data.ts index b68ba41ff46a..436010c109a3 100644 --- a/sdks/typescript/src/apache_beam/worker/data.ts +++ b/sdks/typescript/src/apache_beam/worker/data.ts @@ -72,6 +72,9 @@ export class MultiplexingDataChannel { } } }); + this.dataChannel.on("error", (err) => { + console.log("Data channel error", err); + }); } close() { diff --git a/sdks/typescript/src/apache_beam/worker/external_worker_service.ts b/sdks/typescript/src/apache_beam/worker/external_worker_service.ts index 02f77e0d8770..f20b8fb904a7 100644 --- a/sdks/typescript/src/apache_beam/worker/external_worker_service.ts +++ b/sdks/typescript/src/apache_beam/worker/external_worker_service.ts @@ -36,13 +36,12 @@ export class ExternalWorkerPool { server: grpc.Server; workers: Map = new Map(); - // TODO: (Cleanup) Choose a free port. - constructor(address: string = "localhost:5555") { + constructor(address: string = "localhost:0") { this.address = address; } - start() { - console.log("Starting the workers at ", this.address); + async start(): Promise { + console.log("Starting loopback workers at ", this.address); const this_ = this; this.server = new grpc.Server(); @@ -87,23 +86,35 @@ export class ExternalWorkerPool { }, }; - this.server.bindAsync( - this.address, - grpc.ServerCredentials.createInsecure(), - (err: Error | null, port: number) => { - if (err) { - console.error(`Server error: ${err.message}`); - } else { - console.log(`Server bound on port: ${port}`); - this_.server.start(); - } - } - ); - this.server.addService(beamFnExternalWorkerPoolDefinition, workerService); + + return new Promise((resolve, reject) => { + this.server.bindAsync( + this.address, + grpc.ServerCredentials.createInsecure(), + (err: Error | null, port: number) => { + if (err) { + reject(`Error starting loopback service: ${err.message}`); + } else { + console.log(`Server bound on port: ${port}`); + this_.address = `localhost:${port}`; + this_.server.start(); + resolve(this_.address); + } + } + ); + }); } - stop() { + async stop(timeoutMs = 100) { + console.debug("Shutting down external workers."); + // Let the runner attempt to gracefully shut these down. + const start = Date.now(); + while (Date.now() - start < timeoutMs) { + if (this.workers.size) { + await new Promise((r) => setTimeout(r, timeoutMs / 10)); + } + } this.server.forceShutdown(); } } diff --git a/sdks/typescript/src/apache_beam/worker/worker.ts b/sdks/typescript/src/apache_beam/worker/worker.ts index 722ff6359341..8f30f26126f1 100644 --- a/sdks/typescript/src/apache_beam/worker/worker.ts +++ b/sdks/typescript/src/apache_beam/worker/worker.ts @@ -88,10 +88,18 @@ export class Worker { this.controlChannel.on("end", () => { console.log("Control channel closed."); for (const dataChannel of this.dataChannels.values()) { - dataChannel.close(); + try { + // Best effort. + dataChannel.close(); + } finally { + } } for (const stateChannel of this.stateChannels.values()) { - stateChannel.close(); + try { + // Best effort. + stateChannel.close(); + } finally { + } } }); } diff --git a/website/www/site/assets/icons/linkedin-icon.svg b/website/www/site/assets/icons/linkedin-icon.svg new file mode 100644 index 000000000000..86d58686665b --- /dev/null +++ b/website/www/site/assets/icons/linkedin-icon.svg @@ -0,0 +1,21 @@ + + + + diff --git a/website/www/site/assets/scss/_footer.sass b/website/www/site/assets/scss/_footer.sass index ce99da118e58..0aa826792a18 100644 --- a/website/www/site/assets/scss/_footer.sass +++ b/website/www/site/assets/scss/_footer.sass @@ -43,6 +43,12 @@ padding: 5px box-sizing: border-box + .footer__cols__col--group + display: flex + gap: 30px + @media (max-width: $ak-breakpoint-lg) + gap: 0 + .footer__cols__col__title color: #fff font-weight: $font-weight-bold @@ -54,7 +60,16 @@ .footer__cols__col__logo margin-bottom: $pad @media (max-width: $ak-breakpoint-lg) - margin-right: 50px + margin-right: 20px + a + img + width: 64px + border-radius: 50% + filter: grayscale(100%) + opacity: 0.7 + &:hover, focus + filter: grayscale(0) + opacity: 1 .footer__flex_mobile display: flex @@ -93,6 +108,6 @@ .main-padding padding-bottom: 48px - background: #37424B; + background: #37424B @media (max-width: $ak-breakpoint-lg) padding-bottom: 100px diff --git a/website/www/site/assets/scss/_graphic.scss b/website/www/site/assets/scss/_graphic.scss index 7b25a596ec6e..2396e076c468 100644 --- a/website/www/site/assets/scss/_graphic.scss +++ b/website/www/site/assets/scss/_graphic.scss @@ -34,6 +34,14 @@ margin-bottom: 30px; } } + + .row-image { + display: block; + @media (max-width: 1024px) { + display: none; + } + } + .row { display: flex; justify-content: space-between; diff --git a/website/www/site/assets/scss/_logos.scss b/website/www/site/assets/scss/_logos.scss index 5706a9bba9c2..849ecccaf972 100644 --- a/website/www/site/assets/scss/_logos.scss +++ b/website/www/site/assets/scss/_logos.scss @@ -45,7 +45,7 @@ @media (max-width: $tablet) { .logos { - padding: $pad-md $pad-s; + padding: 20px 30px; .logos-logos { max-width: 360px; diff --git a/website/www/site/assets/scss/_pillars.scss b/website/www/site/assets/scss/_pillars.scss index ae80f38c5ffb..9f05beaa7fbe 100644 --- a/website/www/site/assets/scss/_pillars.scss +++ b/website/www/site/assets/scss/_pillars.scss @@ -67,8 +67,17 @@ .pillars-social-icons { display: flex; align-items: center; + justify-content: space-between; margin-bottom: 45px; + div { + padding-right: 20px; + + &:last-child { + padding-right: 0; + } + } + svg { height: 41px; width: auto; @@ -87,10 +96,6 @@ opacity: 1; } } - - .pillars-youtube-icon { - margin: 0 80px; - } } .pillars-social-text { @@ -98,6 +103,78 @@ max-width: 285px; } } + + h2 { + @extend .component-title; + } + + .margin { + margin-top: 84px; + margin-bottom: 84px; + @media (max-width: $mobile) { + margin-top: 0; + margin-bottom: 64px; + } + } + + .row { + display: flex; + justify-content: space-between; + @media (max-width: $mobile) { + flex-direction: column; + align-items: center; + } + .logos-row { + display: flex; + align-items: center; + margin-top: 20px; + max-height: 73px; + @media (max-width: $mobile) { + margin-top: 64px; + } + img { + height: auto; + width: 112px; + } + } + .first_logo { + margin-right: 18px; + } + #last_logo { + margin-left: 18px; + width: 55px; + } + .column { + display: flex; + flex-direction: column; + align-items: center; + max-width: 306px; + + h4 { + margin-top: 32px; + } + .more { + margin-top: 32px; + font-size: 14px; + font-weight: bold; + line-height: 16px; + letter-spacing: 0.6px; + color: #f26628; + } + } + .icon { + width: 34px; + height: 44px; + margin-top: 16px; + @media (max-width: $mobile) { + margin-top: 64px; + } + } + img { + max-width: 306px; + height: 42px; + } + } } @media (max-width: $ak-breakpoint-lg) { diff --git a/website/www/site/assets/scss/_pipelines.scss b/website/www/site/assets/scss/_pipelines.scss new file mode 100644 index 000000000000..4057b8aabded --- /dev/null +++ b/website/www/site/assets/scss/_pipelines.scss @@ -0,0 +1,71 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +@import "media"; + +.pipelines { + padding: 40px 30px 10px; + + .pipelines-title { + @extend .component-title; + + text-align: center; + } + + .pipelines-logos { + display: flex; + justify-content: space-around; + width: 100%; + max-width: 1111px; + margin: 70px auto 60px; + + .pipelines-logo { + line-height: 0; + + img { + max-height: 70px; + } + } + } +} + +@media (max-width: $tablet) { + .pipelines { + padding: 20px 30px 10px; + + .pipelines-logos { + max-width: 360px; + flex-wrap: wrap; + justify-content: center; + margin: 50px auto 0; + + .pipelines-logo { + margin-right: 60px; + margin-bottom: 50px; + + img { + max-height: 45px; + } + } + + :nth-child(2), + :last-child { + margin-right: 0; + } + } + } +} diff --git a/website/www/site/assets/scss/_quotes.scss b/website/www/site/assets/scss/_quotes.scss index 96667a79931f..8d50f1384b3c 100644 --- a/website/www/site/assets/scss/_quotes.scss +++ b/website/www/site/assets/scss/_quotes.scss @@ -77,7 +77,7 @@ width: 100%; overflow: hidden; height: 600px; - margin: 76px 0 20px 0; + margin: 30px 0 20px 0; padding: 55px 20px 24px 20px; border-radius: 16px; background-color: $color-white; @@ -113,10 +113,6 @@ @media (max-width: $tablet) { .quotes { - .quotes-title { - margin-bottom: 64px; - } - h2 { margin-bottom: 0 !important; } diff --git a/website/www/site/assets/scss/main.scss b/website/www/site/assets/scss/main.scss index a86585101fb8..e3510b5d06e3 100644 --- a/website/www/site/assets/scss/main.scss +++ b/website/www/site/assets/scss/main.scss @@ -55,4 +55,5 @@ @import "_powered_by.scss"; @import "_case_study.scss"; @import "_banner.sass"; +@import "_pipelines.scss"; @import "_about.sass"; diff --git a/website/www/site/data/en/pillars_social.yaml b/website/www/site/data/en/pillars_social.yaml index 2ed3ff6c03a9..192780de264d 100644 --- a/website/www/site/data/en/pillars_social.yaml +++ b/website/www/site/data/en/pillars_social.yaml @@ -13,9 +13,12 @@ - name: pillars-github-icon icon: icons/github-icon.svg url: https://github.com/apache/beam +- name: pillars-linkedin-icon + icon: icons/linkedin-icon.svg + url: https://www.linkedin.com/company/apache-beam/ - name: pillars-youtube-icon icon: icons/youtube-icon.svg url: https://www.youtube.com/channel/UChNnb_YO_7B0HlW6FhAXZZQ - name: pillars-twitter-icon icon: icons/twitter-icon.svg - url: https://twitter.com/apachebeam \ No newline at end of file + url: https://twitter.com/apachebeam diff --git a/website/www/site/data/pipelines.yaml b/website/www/site/data/pipelines.yaml new file mode 100644 index 000000000000..767ca12e10df --- /dev/null +++ b/website/www/site/data/pipelines.yaml @@ -0,0 +1,24 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +- title: Python + image_url: /images/logos/sdks/python_pipelines.png + url: http://apache-beam-website-pull-requests.storage.googleapis.com/17388/documentation/sdks/python/index.html +- title: Java + image_url: /images/logos/sdks/java_pipelines.png + url: http://apache-beam-website-pull-requests.storage.googleapis.com/17388/documentation/sdks/java/index.html +- title: Go + image_url: /images/logos/sdks/go_pipelines.png + url: http://apache-beam-website-pull-requests.storage.googleapis.com/17388/documentation/sdks/go/index.html +- title: SQL + image_url: /images/logos/sdks/sql_pipelines.png + url: http://apache-beam-website-pull-requests.storage.googleapis.com/17388/documentation/dsls/sql/overview/index.html diff --git a/website/www/site/data/works_with.yaml b/website/www/site/data/works_with.yaml index 6447a1402cf4..96bcb336c920 100644 --- a/website/www/site/data/works_with.yaml +++ b/website/www/site/data/works_with.yaml @@ -12,16 +12,19 @@ - title: Flink image_url: /images/logo_flink.png - url: https://flink.apache.org + url: https://beam.apache.org/documentation/runners/flink/ - title: Spark image_url: /images/logo_spark.png - url: https://spark.apache.org/ + url: https://beam.apache.org/documentation/runners/spark/ - title: Google Cloud Dataflow image_url: /images/logo_google_cloud.png - url: https://cloud.google.com/dataflow/ + url: https://beam.apache.org/documentation/runners/dataflow/ - title: Samza image_url: /images/logo_samza.png - url: https://samza.apache.org/ + url: https://beam.apache.org/documentation/runners/samza/ - title: Twister2 image_url: /images/logo_twister2.png - url: https://twister2.org// \ No newline at end of file + url: https://beam.apache.org/documentation/runners/twister2/ +- title: Amazon Kinesis Data Analytics + image_url: /images/logo_amazon-kinesis.png + url: https://docs.aws.amazon.com/kinesisanalytics/latest/java/examples-beam.html diff --git a/website/www/site/i18n/home/logos/en.yaml b/website/www/site/i18n/home/logos/en.yaml index 772807b469da..1d433cb7a33b 100644 --- a/website/www/site/i18n/home/logos/en.yaml +++ b/website/www/site/i18n/home/logos/en.yaml @@ -11,4 +11,7 @@ # limitations under the License. - id: home-logos-title - translation: "Apache Beam Runs in These Environments" + translation: "Write Once, Run Anywhere" + +- id: home-pipelines-title + translation: "Create Multi-language Pipelines" diff --git a/website/www/site/layouts/_default/baseof.html b/website/www/site/layouts/_default/baseof.html index b90b21c1fd87..3c83c919a7e0 100644 --- a/website/www/site/layouts/_default/baseof.html +++ b/website/www/site/layouts/_default/baseof.html @@ -22,8 +22,9 @@ {{ block "ctas-section" . }}{{ end }} {{ block "graphic-section" . }}{{ end }} {{ block "pillars-section" . }}{{ end }} - {{ block "playground-section" . }}{{ end }} {{ block "logos-section" . }}{{ end }} + {{ block "pipelines-section" . }}{{ end }} + {{ block "playground-section" . }}{{ end }} {{ block "quotes-section" . }}{{ end }} {{ block "quotes-mobile-section" . }}{{ end }} {{ block "calendar-section" . }}{{ end }} diff --git a/website/www/site/layouts/index.html b/website/www/site/layouts/index.html index 397ad93f2142..7e79c2db6f58 100644 --- a/website/www/site/layouts/index.html +++ b/website/www/site/layouts/index.html @@ -36,28 +36,17 @@

{{ T "home-hero-subheading" }}

{{ end }} {{ define "pillars-section" }} -
-

- {{ T "home-pillars-title" }} -

-
- {{ $data := index $.Site.Data .Site.Language.Lang }} - {{ range $pillar := $data.pillars }} - {{ partial "pillars/pillars-item" (dict "logo" $pillar.icon "header" $pillar.title "text" $pillar.body) }} - {{ end }} -
-
-
+
+

+ {{ T "home-pillars-title" }} +

+
{{ $data := index $.Site.Data .Site.Language.Lang }} - {{ range $pillars_social := $data.pillars_social }} - {{ partial "pillars/pillars-social" (dict "icon" $pillars_social.icon "url" $pillars_social.url "name" $pillars_social.name) }} + {{ range $pillar := $data.pillars }} + {{ partial "pillars/pillars-item" (dict "logo" $pillar.icon "header" $pillar.title "text" $pillar.body) }} {{ end }}
-

- {{ T "home-pillars-social-text" }} -

-
{{ end }} {{ define "playground-section" }} @@ -85,58 +74,24 @@

Try Beam Playground

{{ end }} {{ define "graphic-section" }} -
-
-

{{ T "home-model-title1" }}

-
-
- - - - - -
-
- {{ $data := index $.Site.Data .Site.Language.Lang }} - {{ range $item := $data.graphic }} -
- -

{{ .title }}

-

{{ .body }}

-
- {{ end }} -
-
-
-
-

{{ T "home-model-title2" }}

-
-
-
- - - -
-

{{ T "home-graphic-more" }}

-
-

{{ T "home-graphic-runner-title" }}

-

{{ T "home-graphic-runner-body" }}

-
-
-
-
- - -
-

{{ T "home-graphic-more" }}

-
-

{{ T "home-graphic-language-title" }}

-

{{ T "home-graphic-language-body" }}

+
+
+

{{ T "home-model-title1" }}

+
+ +
+ {{ $data := index $.Site.Data .Site.Language.Lang }} + {{ range $item := $data.graphic }} +
+ +

{{ .title }}

+

{{ .body }}

+
+ {{ end }}
-
{{ end }} {{ define "calendar-section" }} @@ -260,3 +215,17 @@

{{ end }} +{{ define "pipelines-section" }} +
+
+ {{ T "home-pipelines-title" }} +
+
+ {{ range $pipeline := $.Site.Data.pipelines }} + + {{ end }} +
+
+{{ end }} diff --git a/website/www/site/layouts/partials/footer.html b/website/www/site/layouts/partials/footer.html index ef6604f5557e..cfdde015a26f 100644 --- a/website/www/site/layouts/partials/footer.html +++ b/website/www/site/layouts/partials/footer.html @@ -62,6 +62,32 @@ {{ T "footer-copy" }}
+
diff --git a/website/www/site/static/images/graphic-background.png b/website/www/site/static/images/graphic-background.png new file mode 100644 index 000000000000..0c40bac6103f Binary files /dev/null and b/website/www/site/static/images/graphic-background.png differ diff --git a/website/www/site/static/images/logo_amazon-kinesis.png b/website/www/site/static/images/logo_amazon-kinesis.png new file mode 100644 index 000000000000..080378eba475 Binary files /dev/null and b/website/www/site/static/images/logo_amazon-kinesis.png differ diff --git a/website/www/site/static/images/logos/sdks/go_pipelines.png b/website/www/site/static/images/logos/sdks/go_pipelines.png new file mode 100644 index 000000000000..bd00fc64f00f Binary files /dev/null and b/website/www/site/static/images/logos/sdks/go_pipelines.png differ diff --git a/website/www/site/static/images/logos/sdks/java_pipelines.png b/website/www/site/static/images/logos/sdks/java_pipelines.png new file mode 100644 index 000000000000..7f20bdd29635 Binary files /dev/null and b/website/www/site/static/images/logos/sdks/java_pipelines.png differ diff --git a/website/www/site/static/images/logos/sdks/python_pipelines.png b/website/www/site/static/images/logos/sdks/python_pipelines.png new file mode 100644 index 000000000000..469a2a2ff9d0 Binary files /dev/null and b/website/www/site/static/images/logos/sdks/python_pipelines.png differ diff --git a/website/www/site/static/images/logos/sdks/sql_pipelines.png b/website/www/site/static/images/logos/sdks/sql_pipelines.png new file mode 100644 index 000000000000..e464391589ae Binary files /dev/null and b/website/www/site/static/images/logos/sdks/sql_pipelines.png differ diff --git a/website/www/site/static/images/logos/social-icons/github-logo-150.png b/website/www/site/static/images/logos/social-icons/github-logo-150.png new file mode 100644 index 000000000000..6006868e562d Binary files /dev/null and b/website/www/site/static/images/logos/social-icons/github-logo-150.png differ diff --git a/website/www/site/static/images/logos/social-icons/linkedin-logo-150.png b/website/www/site/static/images/logos/social-icons/linkedin-logo-150.png new file mode 100644 index 000000000000..4eedb182c087 Binary files /dev/null and b/website/www/site/static/images/logos/social-icons/linkedin-logo-150.png differ diff --git a/website/www/site/static/images/logos/social-icons/twitter-logo-150.png b/website/www/site/static/images/logos/social-icons/twitter-logo-150.png new file mode 100644 index 000000000000..b12f0212f2f3 Binary files /dev/null and b/website/www/site/static/images/logos/social-icons/twitter-logo-150.png differ diff --git a/website/www/site/static/images/logos/social-icons/youtube-logo-150.png b/website/www/site/static/images/logos/social-icons/youtube-logo-150.png new file mode 100644 index 000000000000..2235d85ac12b Binary files /dev/null and b/website/www/site/static/images/logos/social-icons/youtube-logo-150.png differ