Skip to content

Commit

Permalink
feat(datasets): Add MatlabDataset to kedro-datasets (#515)
Browse files Browse the repository at this point in the history
* Refork and commit kedro matlab datasets

Signed-off-by: samuelleeshemen <[email protected]>

* Fix lint, add to docs

Signed-off-by: Ankita Katiyar <[email protected]>

* Try fixing docstring

Signed-off-by: Ankita Katiyar <[email protected]>

* Try fixing save

Signed-off-by: Ankita Katiyar <[email protected]>

* Try fix docstest

Signed-off-by: Ankita Katiyar <[email protected]>

* Fix unit tests

Signed-off-by: Ankita Katiyar <[email protected]>

* Update release notes:

Signed-off-by: Ankita Katiyar <[email protected]>

* Not hardcode load mode

Signed-off-by: Ankita Katiyar <[email protected]>

---------

Signed-off-by: samuelleeshemen <[email protected]>
Signed-off-by: Ankita Katiyar <[email protected]>
Co-authored-by: Ankita Katiyar <[email protected]>
  • Loading branch information
samuel-lee-sj and ankatiyar authored Jan 22, 2024
1 parent 52c2563 commit 5e3a540
Show file tree
Hide file tree
Showing 7 changed files with 376 additions and 0 deletions.
4 changes: 4 additions & 0 deletions kedro-datasets/RELEASE.md
Original file line number Diff line number Diff line change
@@ -1,8 +1,12 @@
# Upcoming Release
## Major features and improvements
* Added `MatlabDataset` which uses `scipy` to save and load `.mat` files.

## Bug fixes and other changes
* Removed Windows specific conditions in `pandas.HDFDataset` extra dependencies
## Community contributions
Many thanks to the following Kedroids for contributing PRs to this release:
* [Samuel Lee SJ](https://github.com/samuel-lee-sj)

# Release 2.0.0
## Major features and improvements
Expand Down
1 change: 1 addition & 0 deletions kedro-datasets/docs/source/api/kedro_datasets.rst
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ kedro_datasets
kedro_datasets.huggingface.HFDataset
kedro_datasets.huggingface.HFTransformerPipelineDataset
kedro_datasets.json.JSONDataset
kedro_datasets.matlab.MatlabDataset
kedro_datasets.matplotlib.MatplotlibWriter
kedro_datasets.networkx.GMLDataset
kedro_datasets.networkx.GraphMLDataset
Expand Down
13 changes: 13 additions & 0 deletions kedro-datasets/kedro_datasets/matlab/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
"""``AbstractDataset`` implementation to load/save data from/to a Matlab file."""
from __future__ import annotations

from typing import Any

import lazy_loader as lazy

MatlabDataSet: type[MatlabDataset]
MatlabDataset: Any

__getattr__, __dir__, __all__ = lazy.attach(
__name__, submod_attrs={"matlab_dataset": ["MatlabDataset"]}
)
155 changes: 155 additions & 0 deletions kedro-datasets/kedro_datasets/matlab/matlab_dataset.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,155 @@
"""``MatlabDataset`` loads/saves data from/to a Matlab file using an underlying
filesystem ?(e.g.: local, S3, GCS)?. The underlying functionality is supported by
the specified backend library passed in (defaults to the ``matlab`` library), so it
supports all allowed options for loading and saving matlab files.
"""
from copy import deepcopy
from pathlib import PurePosixPath
from typing import Any

import fsspec
import numpy as np
from kedro.io.core import (
AbstractVersionedDataset,
DatasetError,
Version,
get_filepath_str,
get_protocol_and_path,
)
from scipy import io


class MatlabDataset(AbstractVersionedDataset[np.ndarray, np.ndarray]):
"""`MatlabDataSet` loads and saves data from/to a MATLAB file using scipy.io.
Example usage for the
`YAML API <https://kedro.readthedocs.io/en/stable/data/\
data_catalog_yaml_examples.html>`_:
.. code-block:: yaml
cars:
type: matlab.MatlabDataset
filepath: gcs://your_bucket/cars.mat
fs_args:
project: my-project
credentials: my_gcp_credentials
Example usage for the
`Python API <https://kedro.readthedocs.io/en/stable/data/\
advanced_data_catalog_usage.html>`_:
.. code-block:: pycon
>>> from kedro_datasets.matlab import MatlabDataset
>>> import numpy as np
>>> data = np.array([1, 2, 3])
>>> dataset = MatlabDataset(filepath=tmp_path / "test.mat")
>>> dataset.save(data)
>>> reloaded = dataset.load()
>>> assert (data == reloaded["data"]).all()
"""

DEFAULT_SAVE_ARGS: dict[str, Any] = {"indent": 2}

def __init__( # noqa = PLR0913
self,
filepath: str,
save_args: dict[str, Any] = None,
version: Version = None,
credentials: dict[str, Any] = None,
fs_args: dict[str, Any] = None,
metadata: dict[str, Any] = None,
) -> None:
"""Creates a new instance of MatlabDataSet to load and save data from/to a MATLAB file.
Args:
filepath: Filepath in POSIX format to a Matlab file prefixed with a protocol like `s3://`.
If prefix is not provided, `file` protocol (local filesystem) will be used.
The prefix should be any protocol supported by ``fsspec``.
Note: `http(s)` doesn't support versioning.
save_args: .mat options for saving .mat files.
version: If specified, should be an instance of
``kedro.io.core.Version``. If its ``load`` attribute is
None, the latest version will be loaded. If its ``save``
attribute is None, save version will be autogenerated.
credentials: Credentials required to get access to the underlying filesystem.
E.g. for ``GCSFileSystem`` it should look like `{"token": None}`.
fs_args: Extra arguments to pass into underlying filesystem class constructor
(e.g. `{"project": "my-project"}` for ``GCSFileSystem``), as well as
to pass to the filesystem's `open` method through nested keys
`open_args_load` and `open_args_save`.
Here you can find all available arguments for `open`:
https://filesystem-spec.readthedocs.io/en/latest/api.html#fsspec.spec.AbstractFileSystem.open
All defaults are preserved, except `mode`, which is set to `r` when loading
and to `w` when saving.
metadata: Any arbitrary metadata.
This is ignored by Kedro, but may be consumed by users or external plugins.
"""
_fs_args = deepcopy(fs_args) or {}
_fs_open_args_load = _fs_args.pop("open_args_load", {})
_fs_open_args_save = _fs_args.pop("open_args_save", {})
_credentials = deepcopy(credentials) or {}

protocol, path = get_protocol_and_path(filepath, version)
self._protocol = protocol
if protocol == "file":
_fs_args.setdefault("auto_mkdir", True)
self._fs = fsspec.filesystem(self._protocol, **_credentials, **_fs_args)
self.metadata = metadata

super().__init__(
filepath=PurePosixPath(path),
version=version,
exists_function=self._fs.exists,
glob_function=self._fs.glob,
)
# Handle default save arguments
self._save_args = deepcopy(self.DEFAULT_SAVE_ARGS)
if save_args is not None:
self._save_args.update(save_args)

_fs_open_args_save.setdefault("mode", "w")
self._fs_open_args_load = _fs_open_args_load
self._fs_open_args_save = _fs_open_args_save

def _describe(self) -> dict[str, Any]:
return {
"filepath": self._filepath,
"protocol": self._protocol,
"save_args": self._save_args,
"version": self._version,
}

def _load(self) -> np.ndarray:
"""
Access the specific variable in the .mat file, e.g, data['variable_name']
"""
load_path = get_filepath_str(self._get_load_path(), self._protocol)
with self._fs.open(load_path) as f:
data = io.loadmat(f)
return data

def _save(self, data: np.ndarray) -> None:
save_path = get_filepath_str(self._get_save_path(), self._protocol)
with self._fs.open(save_path, mode="wb") as f:
io.savemat(f, {"data": data})
self._invalidate_cache()

def _exists(self) -> bool:
try:
load_path = get_filepath_str(self._get_load_path(), self._protocol)
except DatasetError:
return False

return self._fs.exists(load_path)

def _release(self) -> None:
super()._release()
self._invalidate_cache()

def _invalidate_cache(self) -> None:
"""Invalidate underlying filesystem caches."""
filepath = get_filepath_str(self._filepath, self._protocol)
self._fs.invalidate_cache(filepath)
2 changes: 2 additions & 0 deletions kedro-datasets/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ def _collect_requirements(requires):
"huggingface.HFTransformerPipelineDataset": ["transformers"],
}
matplotlib_require = {"matplotlib.MatplotlibWriter": ["matplotlib>=3.0.3, <4.0"]}
matlab_require = {"matlab.MatlabDataset": ["scipy"]}
networkx_require = {"networkx.NetworkXDataset": ["networkx~=2.4"]}
pandas_require = {
"pandas.CSVDataset": [PANDAS],
Expand Down Expand Up @@ -118,6 +119,7 @@ def _collect_requirements(requires):
"geopandas": _collect_requirements(geopandas_require),
"holoviews": _collect_requirements(holoviews_require),
"huggingface": _collect_requirements(huggingface_require),
"matlab": _collect_requirements(matlab_require),
"matplotlib": _collect_requirements(matplotlib_require),
"networkx": _collect_requirements(networkx_require),
"pandas": _collect_requirements(pandas_require),
Expand Down
Empty file.
Loading

0 comments on commit 5e3a540

Please sign in to comment.