Skip to content

Commit

Permalink
Merge branch 'master' into ci/yamllint
Browse files Browse the repository at this point in the history
  • Loading branch information
StrikerRUS authored Dec 15, 2024
2 parents ea8897f + c2f3807 commit d3f5e7e
Show file tree
Hide file tree
Showing 8 changed files with 37 additions and 35 deletions.
10 changes: 2 additions & 8 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -22,15 +22,9 @@ repos:
hooks:
- id: yamllint
args: ["--strict"]
- repo: https://github.com/pycqa/isort
rev: 5.13.2
hooks:
- id: isort
name: isort (python)
args: ["--settings-path", "python-package/pyproject.toml"]
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.7.0
rev: v0.8.3
hooks:
# Run the linter.
- id: ruff
Expand All @@ -45,7 +39,7 @@ repos:
hooks:
- id: shellcheck
- repo: https://github.com/crate-ci/typos
rev: v1.23.2
rev: v1.28.3
hooks:
- id: typos
args: ["--force-exclude"]
Expand Down
2 changes: 1 addition & 1 deletion R-package/tests/testthat/test_basic.R
Original file line number Diff line number Diff line change
Expand Up @@ -2345,7 +2345,7 @@ test_that("early stopping works with lgb.cv()", {
# never changes, its first iteration was the best oone
expect_equal(bst$best_iter, 1L)

# best_score should be taken from the first metri
# best_score should be taken from the first metric
expect_equal(bst$best_score, 0.2)

# early stopping should have happened, since constant_metric was the first
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
"try:\n",
" # To enable interactive mode you should install ipywidgets\n",
" # https://github.com/jupyter-widgets/ipywidgets\n",
" from ipywidgets import interact, SelectMultiple\n",
" from ipywidgets import SelectMultiple, interact\n",
"\n",
" INTERACTIVE = True\n",
"except ImportError:\n",
Expand Down
15 changes: 6 additions & 9 deletions python-package/lightgbm/basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -1291,10 +1291,7 @@ def __inner_predict_np2d(
predict_type: int,
preds: Optional[np.ndarray],
) -> Tuple[np.ndarray, int]:
if mat.dtype == np.float32 or mat.dtype == np.float64:
data = np.asarray(mat.reshape(mat.size), dtype=mat.dtype)
else: # change non-float data to float data, need to copy
data = np.array(mat.reshape(mat.size), dtype=np.float32)
data, layout = _np2d_to_np1d(mat)
ptr_data, type_ptr_data, _ = _c_float_array(data)
n_preds = self.__get_num_preds(
start_iteration=start_iteration,
Expand All @@ -1314,7 +1311,7 @@ def __inner_predict_np2d(
ctypes.c_int(type_ptr_data),
ctypes.c_int32(mat.shape[0]),
ctypes.c_int32(mat.shape[1]),
ctypes.c_int(_C_API_IS_ROW_MAJOR),
ctypes.c_int(layout),
ctypes.c_int(predict_type),
ctypes.c_int(start_iteration),
ctypes.c_int(num_iteration),
Expand Down Expand Up @@ -2507,13 +2504,13 @@ def _compare_params_for_warning(
compare_result : bool
Returns whether two dictionaries with params are equal.
"""
for k in other_params:
for k, v in other_params.items():
if k not in ignore_keys:
if k not in params or params[k] != other_params[k]:
if k not in params or params[k] != v:
return False
for k in params:
for k, v in params.items():
if k not in ignore_keys:
if k not in other_params or params[k] != other_params[k]:
if k not in other_params or v != other_params[k]:
return False
return True

Expand Down
24 changes: 10 additions & 14 deletions python-package/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -84,17 +84,6 @@ minimum-version = "build-system.requires"

# end:build-system

[tool.isort]
include_trailing_comma = true
line_length = 120
# "vertical hanging indent", to match what ruff-format does
# ref: https://pycqa.github.io/isort/docs/configuration/multi_line_output_modes.html#3-vertical-hanging-indent
multi_line_output = 3
skip_glob = [
"*/external_libs/*",
"*/lightgbm-python/*",
]

[tool.mypy]
disallow_untyped_defs = true
exclude = 'build/*|compile/*|docs/*|examples/*|external_libs/*|lightgbm-python/*|tests/*'
Expand Down Expand Up @@ -140,7 +129,7 @@ ignore = [
"PLR1714",
# (pylint) Magic value used in comparison
"PLR2004",
# (pylint) for loop veriable overwritten by assignment target
# (pylint) for loop variable overwritten by assignment target
"PLW2901",
# (pylint) use 'elif' instead of 'else' then 'if', to reduce indentation
"PLR5501"
Expand All @@ -152,10 +141,12 @@ select = [
"C4",
# pydocstyle
"D",
# pycodestyle
# pycodestyle (errors)
"E",
# pyflakes
"F",
# isort
"I",
# NumPy-specific rules
"NPY",
# pylint
Expand All @@ -166,11 +157,13 @@ select = [
"SIM401",
# flake8-print
"T",
# pycodestyle (warnings)
"W",
]

[tool.ruff.lint.per-file-ignores]
"docs/conf.py" = [
# (flake8-bugbear) raise exceptions with "raise ... from errr"
# (flake8-bugbear) raise exceptions with "raise ... from err"
"B904",
# (flake8-print) flake8-print
"T"
Expand All @@ -196,3 +189,6 @@ select = [

[tool.ruff.lint.pydocstyle]
convention = "numpy"

[tool.ruff.lint.isort]
known-first-party = ["lightgbm"]
2 changes: 1 addition & 1 deletion src/objective/rank_objective.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ class LambdarankNDCG : public RankingObjective {
}
const double worst_score = score[sorted_idx[worst_idx]];
double sum_lambdas = 0.0;
// start accmulate lambdas by pairs that contain at least one document above truncation level
// start accumulate lambdas by pairs that contain at least one document above truncation level
for (data_size_t i = 0; i < cnt - 1 && i < truncation_level_; ++i) {
if (score[sorted_idx[i]] == kMinScore) { continue; }
for (data_size_t j = i + 1; j < cnt; ++j) {
Expand Down
2 changes: 1 addition & 1 deletion src/treelearner/kernels/histogram_16_64_256.cu
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ __global__ void KERNEL_NAME(const uchar* feature_data_base,
// size of threads that process this feature4
const unsigned int subglobal_size = lsize * (1 << power_feature_workgroups);

// equavalent thread ID in this subgroup for this feature4
// equivalent thread ID in this subgroup for this feature4
const unsigned int subglobal_tid = gtid - feature_id * subglobal_size;


Expand Down
15 changes: 15 additions & 0 deletions tests/python_package_test/test_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -4611,3 +4611,18 @@ def test_bagging_by_query_in_lambdarank():
ndcg_score_no_bagging_by_query = gbm_no_bagging_by_query.best_score["valid_0"]["ndcg@5"]
assert ndcg_score_bagging_by_query >= ndcg_score - 0.1
assert ndcg_score_no_bagging_by_query >= ndcg_score - 0.1


def test_equal_predict_from_row_major_and_col_major_data():
X_row, y = make_synthetic_regression()
assert X_row.flags["C_CONTIGUOUS"] and not X_row.flags["F_CONTIGUOUS"]
ds = lgb.Dataset(X_row, y)
params = {"num_leaves": 8, "verbose": -1}
bst = lgb.train(params, ds, num_boost_round=5)
preds_row = bst.predict(X_row)

X_col = np.asfortranarray(X_row)
assert X_col.flags["F_CONTIGUOUS"] and not X_col.flags["C_CONTIGUOUS"]
preds_col = bst.predict(X_col)

np.testing.assert_allclose(preds_row, preds_col)

0 comments on commit d3f5e7e

Please sign in to comment.