diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index fef56a1..867a546 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.8 + rev: v0.7.1 hooks: - id: ruff args: [ --fix, --exit-non-zero-on-fix ] diff --git a/docs/gen_ref_pages.py b/docs/gen_ref_pages.py index b0b5ea1..4e1ddde 100644 --- a/docs/gen_ref_pages.py +++ b/docs/gen_ref_pages.py @@ -1,4 +1,5 @@ """Generate the code reference pages.""" + from __future__ import annotations from pathlib import Path diff --git a/examples/python/my_modules/pi.py b/examples/python/my_modules/pi.py index 7b6009d..4fc53fc 100644 --- a/examples/python/my_modules/pi.py +++ b/examples/python/my_modules/pi.py @@ -2,6 +2,7 @@ This is an example module that contains a class definition used in the job submission example. """ + from __future__ import annotations diff --git a/pyproject.toml b/pyproject.toml index f36d0bf..f97e2dc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -65,6 +65,7 @@ target-version = "py38" line-length = 110 indent-width = 4 src = ["spark_on_k8s", "tests"] +[tool.ruff.lint] fixable = ["ALL"] ignore = ["E712"] select = [ @@ -79,12 +80,12 @@ select = [ "TCH", # flake8-type-checking ] -[tool.ruff.format] -docstring-code-format = true - -[tool.ruff.isort] +[tool.ruff.lint.isort] required-imports = ["from __future__ import annotations"] combine-as-imports = true +[tool.ruff.format] +docstring-code-format = true + [tool.ruff.lint.pydocstyle] convention = "google" diff --git a/spark_on_k8s/client.py b/spark_on_k8s/client.py index 324653f..2446daa 100644 --- a/spark_on_k8s/client.py +++ b/spark_on_k8s/client.py @@ -322,9 +322,9 @@ def submit_app( basic_conf["spark.dynamicAllocation.maxExecutors"] = f"{executor_instances.max}" basic_conf["spark.dynamicAllocation.initialExecutors"] = f"{executor_instances.initial or 0}" else: - basic_conf[ - "spark.executor.instances" - ] = f"{executor_instances.initial if executor_instances.initial is not None else 2}" + basic_conf["spark.executor.instances"] = ( + f"{executor_instances.initial if executor_instances.initial is not None else 2}" + ) if executor_volume_mounts: basic_conf.update( self._executor_volumes_config(volumes=volumes, volume_mounts=executor_volume_mounts)