From aaf4a722449b170ca0d72c42a26f1c5f1deaca60 Mon Sep 17 00:00:00 2001 From: Elena Khaustova Date: Tue, 7 Jan 2025 12:21:59 +0000 Subject: [PATCH] Replaced callable check Signed-off-by: Elena Khaustova --- .../kedro_datasets/partitions/partitioned_dataset.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/kedro-datasets/kedro_datasets/partitions/partitioned_dataset.py b/kedro-datasets/kedro_datasets/partitions/partitioned_dataset.py index ea2461034..46fc74e78 100644 --- a/kedro-datasets/kedro_datasets/partitions/partitioned_dataset.py +++ b/kedro-datasets/kedro_datasets/partitions/partitioned_dataset.py @@ -43,6 +43,11 @@ def _grandparent(path: str) -> str: return str(grandparent) +def _islambda(obj: object): + """Check if object is a lambda function.""" + return callable(obj) and hasattr(obj, "__name__") and obj.__name__ == "" + + class PartitionedDataset(AbstractDataset[dict[str, Any], dict[str, Callable[[], Any]]]): """``PartitionedDataset`` loads and saves partitioned file-like data using the underlying dataset definition. For filesystem level operations it uses `fsspec`: @@ -311,7 +316,7 @@ def save(self, data: dict[str, Any]) -> None: # join the protocol back since tools like PySpark may rely on it kwargs[self._filepath_arg] = self._join_protocol(partition) dataset = self._dataset_type(**kwargs) # type: ignore - if callable(partition_data): + if _islambda(partition_data): partition_data = partition_data() # noqa: PLW2901 dataset.save(partition_data) self._invalidate_caches()