Skip to content

Commit

Permalink
chore: update ruff version to v0.7.1 (#93)
Browse files Browse the repository at this point in the history
  • Loading branch information
hussein-awala authored Oct 25, 2024
1 parent b9a9048 commit 3e2f100
Show file tree
Hide file tree
Showing 5 changed files with 11 additions and 8 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.1.8
rev: v0.7.1
hooks:
- id: ruff
args: [ --fix, --exit-non-zero-on-fix ]
Expand Down
1 change: 1 addition & 0 deletions docs/gen_ref_pages.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Generate the code reference pages."""

from __future__ import annotations

from pathlib import Path
Expand Down
1 change: 1 addition & 0 deletions examples/python/my_modules/pi.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
This is an example module that contains a class definition
used in the job submission example.
"""

from __future__ import annotations


Expand Down
9 changes: 5 additions & 4 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@ target-version = "py38"
line-length = 110
indent-width = 4
src = ["spark_on_k8s", "tests"]
[tool.ruff.lint]
fixable = ["ALL"]
ignore = ["E712"]
select = [
Expand All @@ -79,12 +80,12 @@ select = [
"TCH", # flake8-type-checking
]

[tool.ruff.format]
docstring-code-format = true

[tool.ruff.isort]
[tool.ruff.lint.isort]
required-imports = ["from __future__ import annotations"]
combine-as-imports = true

[tool.ruff.format]
docstring-code-format = true

[tool.ruff.lint.pydocstyle]
convention = "google"
6 changes: 3 additions & 3 deletions spark_on_k8s/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -322,9 +322,9 @@ def submit_app(
basic_conf["spark.dynamicAllocation.maxExecutors"] = f"{executor_instances.max}"
basic_conf["spark.dynamicAllocation.initialExecutors"] = f"{executor_instances.initial or 0}"
else:
basic_conf[
"spark.executor.instances"
] = f"{executor_instances.initial if executor_instances.initial is not None else 2}"
basic_conf["spark.executor.instances"] = (
f"{executor_instances.initial if executor_instances.initial is not None else 2}"
)
if executor_volume_mounts:
basic_conf.update(
self._executor_volumes_config(volumes=volumes, volume_mounts=executor_volume_mounts)
Expand Down

0 comments on commit 3e2f100

Please sign in to comment.