Skip to content

Commit 584f167

Browse files
authored
Merge pull request #19 from nel-lab/black-formatter
Use black formater
2 parents 3ba85f4 + b46cf21 commit 584f167

File tree

16 files changed

+745
-640
lines changed

16 files changed

+745
-640
lines changed

.github/workflows/black.yml

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
name: Lint
2+
3+
on: [push, pull_request]
4+
5+
jobs:
6+
lint:
7+
runs-on: ubuntu-latest
8+
steps:
9+
- uses: actions/checkout@v2
10+
- uses: psf/black@stable

.github/workflows/yapf-check.yml

Lines changed: 0 additions & 12 deletions
This file was deleted.

mesmerize_core/__init__.py

Lines changed: 25 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,28 @@
1-
from .batch_utils import COMPUTE_BACKENDS, COMPUTE_BACKEND_QPROCESS, COMPUTE_BACKEND_SLURM, \
2-
COMPUTE_BACKEND_SUBPROCESS, set_parent_data_path, get_parent_data_path, get_full_data_path, \
3-
load_batch, create_batch
1+
from .batch_utils import (
2+
COMPUTE_BACKENDS,
3+
COMPUTE_BACKEND_QPROCESS,
4+
COMPUTE_BACKEND_SLURM,
5+
COMPUTE_BACKEND_SUBPROCESS,
6+
set_parent_data_path,
7+
get_parent_data_path,
8+
get_full_data_path,
9+
load_batch,
10+
create_batch,
11+
)
412
from .caiman_extensions import *
513

6-
__all__ =\
7-
[
8-
'COMPUTE_BACKENDS',
9-
'COMPUTE_BACKEND_QPROCESS',
10-
'COMPUTE_BACKEND_SLURM',
11-
'COMPUTE_BACKEND_SUBPROCESS',
12-
'set_parent_data_path',
13-
'get_parent_data_path',
14-
'get_full_data_path',
15-
'load_batch',
16-
'create_batch',
17-
'CaimanDataFrameExtensions',
18-
'CaimanSeriesExtensions',
19-
'CNMFExtensions',
20-
'MCorrExtensions'
14+
__all__ = [
15+
"COMPUTE_BACKENDS",
16+
"COMPUTE_BACKEND_QPROCESS",
17+
"COMPUTE_BACKEND_SLURM",
18+
"COMPUTE_BACKEND_SUBPROCESS",
19+
"set_parent_data_path",
20+
"get_parent_data_path",
21+
"get_full_data_path",
22+
"load_batch",
23+
"create_batch",
24+
"CaimanDataFrameExtensions",
25+
"CaimanSeriesExtensions",
26+
"CNMFExtensions",
27+
"MCorrExtensions",
2128
]
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__all__ = ['cnmf', 'mcorr', 'cnmfe']
1+
__all__ = ["cnmf", "mcorr", "cnmfe"]

mesmerize_core/algorithms/cnmf.py

Lines changed: 37 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -10,93 +10,90 @@
1010
from pathlib import Path
1111

1212
# prevent circular import
13-
if __name__ == '__main__':
13+
if __name__ == "__main__":
1414
from mesmerize_core import set_parent_data_path, get_full_data_path
1515

1616

1717
@click.command()
18-
@click.option('--batch-path', type=str)
19-
@click.option('--uuid', type=str)
20-
@click.option('--data-path')
18+
@click.option("--batch-path", type=str)
19+
@click.option("--uuid", type=str)
20+
@click.option("--data-path")
2121
def main(batch_path, uuid, data_path: str = None):
2222
df = pd.read_pickle(batch_path)
23-
item = df[df['uuid'] == uuid].squeeze()
23+
item = df[df["uuid"] == uuid].squeeze()
2424

25-
input_movie_path = item['input_movie_path']
25+
input_movie_path = item["input_movie_path"]
2626

2727
set_parent_data_path(data_path)
2828
input_movie_path = str(get_full_data_path(input_movie_path))
2929

30-
params = item['params']
30+
params = item["params"]
3131
print("cnmf params", params)
3232

3333
# adapted from current demo notebook
3434
n_processes = psutil.cpu_count() - 1
3535
# Start cluster for parallel processing
3636
c, dview, n_processes = cm.cluster.setup_cluster(
37-
backend='local',
38-
n_processes=n_processes,
39-
single_thread=False
37+
backend="local", n_processes=n_processes, single_thread=False
4038
)
4139

4240
# merge cnmf and eval kwargs into one dict
43-
c = dict(params['cnmf_kwargs'])
44-
e = dict(params['eval_kwargs'])
41+
c = dict(params["cnmf_kwargs"])
42+
e = dict(params["eval_kwargs"])
4543
tot = {**c, **e}
4644
cnmf_params = CNMFParams(params_dict=tot)
4745
# Run CNMF, denote boolean 'success' if CNMF completes w/out error
4846
try:
4947
fname_new = cm.save_memmap(
50-
[input_movie_path],
51-
base_name=f'{uuid}_cnmf-memmap_',
52-
order='C',
53-
dview=dview
48+
[input_movie_path], base_name=f"{uuid}_cnmf-memmap_", order="C", dview=dview
5449
)
5550

56-
print('making memmap')
51+
print("making memmap")
5752

5853
Yr, dims, T = cm.load_memmap(fname_new)
59-
images = np.reshape(Yr.T, [T] + list(dims), order='F')
54+
images = np.reshape(Yr.T, [T] + list(dims), order="F")
6055

6156
proj_paths = dict()
62-
for proj_type in ['mean', 'std', 'max']:
63-
p_img = getattr(np, f'nan{proj_type}')(images, axis=0)
64-
proj_paths[proj_type] = Path(input_movie_path).parent.joinpath(f'{uuid}_{proj_type}_projection.npy')
57+
for proj_type in ["mean", "std", "max"]:
58+
p_img = getattr(np, f"nan{proj_type}")(images, axis=0)
59+
proj_paths[proj_type] = Path(input_movie_path).parent.joinpath(
60+
f"{uuid}_{proj_type}_projection.npy"
61+
)
6562
np.save(str(proj_paths[proj_type]), p_img)
6663

6764
# in fname new load in memmap order C
6865
cm.stop_server(dview=dview)
6966
c, dview, n_processes = cm.cluster.setup_cluster(
70-
backend='local',
71-
n_processes=None,
72-
single_thread=False
67+
backend="local", n_processes=None, single_thread=False
7368
)
7469

7570
print("performing CNMF")
76-
cnm = cnmf.CNMF(
77-
n_processes,
78-
params=cnmf_params,
79-
dview=dview
80-
)
71+
cnm = cnmf.CNMF(n_processes, params=cnmf_params, dview=dview)
8172

8273
print("fitting images")
8374
cnm = cnm.fit(images)
8475
#
85-
if params['refit'] is True:
86-
print('refitting')
76+
if params["refit"] is True:
77+
print("refitting")
8778
cnm = cnm.refit(images, dview=dview)
8879

8980
print("Eval")
9081
cnm.estimates.evaluate_components(images, cnm.params, dview=dview)
9182

92-
output_path = get_full_data_path(input_movie_path).parent.joinpath(f"{uuid}.hdf5").resolve()
83+
output_path = (
84+
get_full_data_path(input_movie_path)
85+
.parent.joinpath(f"{uuid}.hdf5")
86+
.resolve()
87+
)
9388

9489
cnm.save(str(output_path))
9590

9691
Cn = cm.local_correlations(images.transpose(1, 2, 0))
9792
Cn[np.isnan(Cn)] = 0
9893

99-
corr_img_path = Path(input_movie_path).parent.joinpath(f'{uuid}_cn.npy').resolve()
94+
corr_img_path = (
95+
Path(input_movie_path).parent.joinpath(f"{uuid}_cn.npy").resolve()
96+
)
10097
np.save(str(corr_img_path), Cn, allow_pickle=False)
10198

10299
# output dict for dataframe row (pd.Series)
@@ -107,7 +104,9 @@ def main(batch_path, uuid, data_path: str = None):
107104
cnmf_memmap_path = Path(fname_new).relative_to(data_path)
108105
corr_img_path = corr_img_path.relative_to(data_path)
109106
for proj_type in proj_paths.keys():
110-
d[f"{proj_type}-projection-path"] = proj_paths[proj_type].relative_to(data_path)
107+
d[f"{proj_type}-projection-path"] = proj_paths[proj_type].relative_to(
108+
data_path
109+
)
111110
else: # absolute paths
112111
cnmf_hdf5_path = output_path
113112
cnmf_memmap_path = fname_new
@@ -120,17 +119,17 @@ def main(batch_path, uuid, data_path: str = None):
120119
"cnmf-memmap-path": cnmf_memmap_path,
121120
"corr-img-path": corr_img_path,
122121
"success": True,
123-
"traceback": None
122+
"traceback": None,
124123
}
125124
)
126125

127126
except:
128127
d = {"success": False, "traceback": traceback.format_exc()}
129-
128+
130129
print(f"Final output dict:\n{d}")
131-
130+
132131
# Add dictionary to output column of series
133-
df.loc[df['uuid'] == uuid, 'outputs'] = [d]
132+
df.loc[df["uuid"] == uuid, "outputs"] = [d]
134133
# save dataframe to disc
135134
df.to_pickle(batch_path)
136135

mesmerize_core/algorithms/cnmfe.py

Lines changed: 46 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -8,96 +8,99 @@
88
import traceback
99
from pathlib import Path
1010

11-
if __name__ == '__main__':
11+
if __name__ == "__main__":
1212
from mesmerize_core import set_parent_data_path, get_full_data_path
1313

14+
1415
@click.command()
15-
@click.option('--batch-path', type=str)
16-
@click.option('--uuid', type=str)
17-
@click.option('--data-path')
16+
@click.option("--batch-path", type=str)
17+
@click.option("--uuid", type=str)
18+
@click.option("--data-path")
1819
def main(batch_path, uuid, data_path: str = None):
1920
df = pd.read_pickle(batch_path)
20-
item = df[df['uuid'] == uuid].squeeze()
21+
item = df[df["uuid"] == uuid].squeeze()
2122

22-
input_movie_path = item['input_movie_path']
23+
input_movie_path = item["input_movie_path"]
2324
set_parent_data_path(data_path)
2425
input_movie_path = str(get_full_data_path(input_movie_path))
2526

26-
params = item['params']
27+
params = item["params"]
2728
print("cnmfe params:", params)
2829

29-
#adapted from current demo notebook
30+
# adapted from current demo notebook
3031
n_processes = psutil.cpu_count() - 1
3132
# Start cluster for parallel processing
3233
c, dview, n_processes = cm.cluster.setup_cluster(
33-
backend='local',
34-
n_processes=n_processes,
35-
single_thread=False
36-
)
34+
backend="local", n_processes=n_processes, single_thread=False
35+
)
3736

3837
try:
3938
fname_new = cm.save_memmap(
40-
[input_movie_path],
41-
base_name=f'{uuid}_cnmf-memmap_',
42-
order='C',
43-
dview=dview
39+
[input_movie_path], base_name=f"{uuid}_cnmf-memmap_", order="C", dview=dview
4440
)
4541

46-
print('making memmap')
47-
gSig = params['cnmfe_kwargs']['gSig'][0]
42+
print("making memmap")
43+
gSig = params["cnmfe_kwargs"]["gSig"][0]
4844

4945
Yr, dims, T = cm.load_memmap(fname_new)
50-
images = np.reshape(Yr.T, [T] + list(dims), order='F')
46+
images = np.reshape(Yr.T, [T] + list(dims), order="F")
5147

5248
proj_paths = dict()
53-
for proj_type in ['mean', 'std', 'max']:
54-
p_img = getattr(np, f'nan{proj_type}')(images, axis=0)
55-
proj_paths[proj_type] = Path(input_movie_path).parent.joinpath(f'{uuid}_{proj_type}_projection.npy')
49+
for proj_type in ["mean", "std", "max"]:
50+
p_img = getattr(np, f"nan{proj_type}")(images, axis=0)
51+
proj_paths[proj_type] = Path(input_movie_path).parent.joinpath(
52+
f"{uuid}_{proj_type}_projection.npy"
53+
)
5654
np.save(str(proj_paths[proj_type]), p_img)
5755

58-
downsample_ratio = params['downsample_ratio']
56+
downsample_ratio = params["downsample_ratio"]
5957
# in fname new load in memmap order C
6058

6159
cn_filter, pnr = cm.summary_images.correlation_pnr(
6260
images[::downsample_ratio], swap_dim=False, gSig=gSig
6361
)
6462

65-
pnr_output_path = Path(input_movie_path).parent.joinpath(f"{uuid}_pn.npy").resolve()
66-
cn_output_path = Path(input_movie_path).parent.joinpath(f"{uuid}_cn.npy").resolve()
63+
pnr_output_path = (
64+
Path(input_movie_path).parent.joinpath(f"{uuid}_pn.npy").resolve()
65+
)
66+
cn_output_path = (
67+
Path(input_movie_path).parent.joinpath(f"{uuid}_cn.npy").resolve()
68+
)
6769

6870
np.save(str(pnr_output_path), pnr, allow_pickle=False)
6971
np.save(str(cn_output_path), cn_filter, allow_pickle=False)
7072

7173
d = dict() # for output
7274

73-
if params['do_cnmfe']:
74-
cnmfe_params_dict = \
75-
{
76-
"method_init": 'corr_pnr',
77-
"n_processes": n_processes,
78-
"only_init": True, # for 1p
79-
"center_psf": True, # for 1p
80-
"normalize_init": False # for 1p
81-
}
82-
tot = {**cnmfe_params_dict, **params['cnmfe_kwargs']}
75+
if params["do_cnmfe"]:
76+
cnmfe_params_dict = {
77+
"method_init": "corr_pnr",
78+
"n_processes": n_processes,
79+
"only_init": True, # for 1p
80+
"center_psf": True, # for 1p
81+
"normalize_init": False, # for 1p
82+
}
83+
tot = {**cnmfe_params_dict, **params["cnmfe_kwargs"]}
8384
cnmfe_params_dict = CNMFParams(params_dict=tot)
8485
cnm = cnmf.CNMF(
85-
n_processes=n_processes,
86-
dview=dview,
87-
params=cnmfe_params_dict
86+
n_processes=n_processes, dview=dview, params=cnmfe_params_dict
8887
)
8988
print("Performing CNMFE")
9089
cnm = cnm.fit(images)
9190
print("evaluating components")
9291
cnm.estimates.evaluate_components(images, cnm.params, dview=dview)
9392

94-
output_path = Path(input_movie_path).parent.joinpath(f"{uuid}.hdf5").resolve()
93+
output_path = (
94+
Path(input_movie_path).parent.joinpath(f"{uuid}.hdf5").resolve()
95+
)
9596
cnm.save(str(output_path))
9697

9798
if data_path is not None:
9899
cnmf_hdf5_path = Path(output_path).relative_to(data_path)
99100
for proj_type in proj_paths.keys():
100-
d[f"{proj_type}-projection-path"] = proj_paths[proj_type].relative_to(data_path)
101+
d[f"{proj_type}-projection-path"] = proj_paths[
102+
proj_type
103+
].relative_to(data_path)
101104
else:
102105
cnmf_hdf5_path = output_path
103106
for proj_type in proj_paths.keys():
@@ -122,7 +125,7 @@ def main(batch_path, uuid, data_path: str = None):
122125
"corr-img-path": cn_output_path,
123126
"pnr-image-path": pnr_output_path,
124127
"success": True,
125-
"traceback": None
128+
"traceback": None,
126129
}
127130
)
128131

@@ -132,10 +135,10 @@ def main(batch_path, uuid, data_path: str = None):
132135
d = {"success": False, "traceback": traceback.format_exc()}
133136

134137
# Add dictionary to output column of series
135-
df.loc[df['uuid'] == uuid, 'outputs'] = [d]
138+
df.loc[df["uuid"] == uuid, "outputs"] = [d]
136139
# save dataframe to disc
137140
df.to_pickle(batch_path)
138141

139142

140143
if __name__ == "__main__":
141-
main()
144+
main()

0 commit comments

Comments
 (0)