Skip to content

Commit eb4ba34

Browse files
committed
cnmf/hdf5: Improve handling of serialisation of NoneType. Also fix some typos.
1 parent 00d32b6 commit eb4ba34

File tree

3 files changed

+12
-7
lines changed

3 files changed

+12
-7
lines changed

caiman/source_extraction/cnmf/estimates.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1421,7 +1421,7 @@ def remove_small_large_neurons(self, min_size_neuro, max_size_neuro,
14211421
indeces of components with size within the acceptable range
14221422
'''
14231423
if self.A_thr is None:
1424-
raise Exception('You need to compute thresolded components before calling remove_duplicates: use the threshold_components method')
1424+
raise Exception('You need to compute thresholded components before calling remove_duplicates: use the threshold_components method')
14251425

14261426
A_gt_thr_bin = self.A_thr.toarray() > 0
14271427
size_neurons_gt = A_gt_thr_bin.sum(0)
@@ -1451,7 +1451,7 @@ def remove_duplicates(self, predictions=None, r_values=None, dist_thr=0.1,
14511451
plot_duplicates
14521452
'''
14531453
if self.A_thr is None:
1454-
raise Exception('You need to compute thresolded components before calling remove_duplicates: use the threshold_components method')
1454+
raise Exception('You need to compute thresholded components before calling remove_duplicates: use the threshold_components method')
14551455

14561456
A_gt_thr_bin = (self.A_thr.toarray() > 0).reshape([self.dims[0], self.dims[1], -1], order='F').transpose([2, 0, 1]) * 1.
14571457

@@ -1492,7 +1492,7 @@ def masks_2_neurofinder(self, dataset_name):
14921492
"""
14931493
if self.A_thr is None:
14941494
raise Exception(
1495-
'You need to compute thresolded components before calling this method: use the threshold_components method')
1495+
'You need to compute thresholded components before calling this method: use the threshold_components method')
14961496
bin_masks = self.A_thr.reshape([self.dims[0], self.dims[1], -1], order='F').transpose([2, 0, 1])
14971497
return nf_masks_to_neurof_dict(bin_masks, dataset_name)
14981498

@@ -1711,10 +1711,10 @@ def compare_components(estimate_gt, estimate_cmp, Cn=None, thresh_cost=.8, min_
17111711
labels=['GT', 'CMP'], plot_results=False):
17121712
if estimate_gt.A_thr is None:
17131713
raise Exception(
1714-
'You need to compute thresolded components for first argument before calling remove_duplicates: use the threshold_components method')
1714+
'You need to compute thresholded components for first argument before calling remove_duplicates: use the threshold_components method')
17151715
if estimate_cmp.A_thr is None:
17161716
raise Exception(
1717-
'You need to compute thresolded components for second argument before calling remove_duplicates: use the threshold_components method')
1717+
'You need to compute thresholded components for second argument before calling remove_duplicates: use the threshold_components method')
17181718

17191719
if plot_results:
17201720
plt.figure(figsize=(20, 10))

caiman/source_extraction/cnmf/spatial.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -455,7 +455,7 @@ def construct_ellipse_parallel(pars):
455455
return np.sqrt(np.sum([old_div((dist_cm * V[:, k]) ** 2, dkk[k]) for k in range(len(dkk))], 0)) <= dist
456456

457457
def threshold_components(A, dims, medw=None, thr_method='max', maxthr=0.1, nrgthr=0.9999, extract_cc=True,
458-
se=None, ss=None, dview=None):
458+
se=None, ss=None, dview=None) -> np.ndarray:
459459
"""
460460
Post-processing of spatial components which includes the following steps
461461

caiman/utils/utils.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -544,6 +544,10 @@ def recursively_load_dict_contents_from_group(h5file:h5py.File, path:str) -> Dic
544544
Starting with Caiman 1.9.9 we started saving strings as attributes rather than independent datasets,
545545
which gets us a better syntax and less damage to the strings, at the cost of scanning properly for them
546546
being a little more involved. In future versions of Caiman we may store all scalars as attributes.
547+
548+
There's some special casing here that should be solved in a more general way; anything serialised into
549+
hdf5 and then deserialised should probably go back through the class constructor, and revalidated
550+
so all the fields end up with appropriate data types.
547551
'''
548552

549553
ans:Dict = {}
@@ -560,7 +564,6 @@ def recursively_load_dict_contents_from_group(h5file:h5py.File, path:str) -> Dic
560564
ans[key] = item[()]
561565

562566
elif key in ['dims', 'medw', 'sigma_smooth_snmf', 'dxy', 'max_shifts', 'strides', 'overlaps']:
563-
564567
if isinstance(item[()], np.ndarray):
565568
ans[key] = tuple(item[()])
566569
else:
@@ -570,6 +573,8 @@ def recursively_load_dict_contents_from_group(h5file:h5py.File, path:str) -> Dic
570573
ans[key] = bool(item[()])
571574
else:
572575
ans[key] = item[()]
576+
if isinstance(ans[key], bytes) and ans[key] == b'NoneType':
577+
ans[key] = None
573578

574579
elif isinstance(item, h5py._hl.group.Group):
575580
if key in ('A', 'W', 'Ab', 'downscale_matrix', 'upscale_matrix'):

0 commit comments

Comments
 (0)