|
1 | | -import os |
2 | 1 | from typing import Any, Optional |
3 | 2 |
|
4 | 3 | import cloudpickle |
5 | 4 | import h5py |
6 | 5 | import numpy as np |
7 | 6 |
|
8 | | -group_dict = { |
9 | | - "fn": "function", |
10 | | - "args": "input_args", |
11 | | - "kwargs": "input_kwargs", |
12 | | - "output": "output", |
13 | | - "error": "error", |
14 | | - "runtime": "runtime", |
15 | | - "queue_id": "queue_id", |
16 | | -} |
| 7 | +from executorlib.standalone.cache import group_dict |
17 | 8 |
|
18 | 9 |
|
19 | 10 | def dump(file_name: Optional[str], data_dict: dict) -> None: |
@@ -98,25 +89,17 @@ def get_runtime(file_name: str) -> float: |
98 | 89 |
|
99 | 90 |
|
100 | 91 | def get_queue_id(file_name: Optional[str]) -> Optional[int]: |
| 92 | + """ |
| 93 | + Get queuing system id from HDF5 file |
| 94 | +
|
| 95 | + Args: |
| 96 | + file_name (str): file name of the HDF5 file as absolute path |
| 97 | +
|
| 98 | + Returns: |
| 99 | + int: queuing system id from the execution of the python function |
| 100 | + """ |
101 | 101 | if file_name is not None: |
102 | 102 | with h5py.File(file_name, "r") as hdf: |
103 | 103 | if "queue_id" in hdf: |
104 | 104 | return cloudpickle.loads(np.void(hdf["/queue_id"])) |
105 | 105 | return None |
106 | | - |
107 | | - |
108 | | -def get_cache_data(cache_directory: str) -> list[dict]: |
109 | | - file_lst = [] |
110 | | - for task_key in os.listdir(cache_directory): |
111 | | - file_name = os.path.join(cache_directory, task_key, "cache.h5out") |
112 | | - os.makedirs(os.path.join(cache_directory, task_key), exist_ok=True) |
113 | | - if os.path.exists(file_name): |
114 | | - with h5py.File(file_name, "r") as hdf: |
115 | | - file_content_dict = { |
116 | | - key: cloudpickle.loads(np.void(hdf["/" + key])) |
117 | | - for key in group_dict.values() |
118 | | - if key in hdf |
119 | | - } |
120 | | - file_content_dict["filename"] = file_name |
121 | | - file_lst.append(file_content_dict) |
122 | | - return file_lst |
0 commit comments