From 10e62e935fd824db599045aa5d8d630c389503ad Mon Sep 17 00:00:00 2001
From: Jessica Scheick <jbscheick@gmail.com>
Date: Tue, 15 Feb 2022 17:18:21 -0500
Subject: [PATCH 1/9] add new boolean flag and note for implementing fix

---
 icepyx/core/read.py      | 11 ++++++++++-
 icepyx/core/variables.py |  9 ++++++++-
 2 files changed, 18 insertions(+), 2 deletions(-)

diff --git a/icepyx/core/read.py b/icepyx/core/read.py
index e1624c2d6..65c37bebf 100644
--- a/icepyx/core/read.py
+++ b/icepyx/core/read.py
@@ -338,9 +338,18 @@ def _add_var_to_ds(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict):
 
         wanted_vars = list(wanted_dict.keys())
 
+        print(grp_path)
+        print(wanted_groups_tiered)
+        print(wanted_dict)
+        print(wanted_vars)
+
         if grp_path in ["orbit_info", "ancillary_data"]:
+            # print(grp_path)
+            # print(wanted_groups_tiered[0])
+            # # print(wanted_groups_tiered)
+            # print(wanted_vars)
             grp_spec_vars = [
-                wanted_vars[i]
+                print(wanted_vars[i])
                 for i, x in enumerate(wanted_groups_tiered[0])
                 if x == grp_path
             ]
diff --git a/icepyx/core/variables.py b/icepyx/core/variables.py
index ef71dc737..f50880bed 100644
--- a/icepyx/core/variables.py
+++ b/icepyx/core/variables.py
@@ -135,7 +135,7 @@ def visitor_func(name, node):
             return self._avail
 
     @staticmethod
-    def parse_var_list(varlist, tiered=True):
+    def parse_var_list(varlist, tiered=True, tiered_vars=False):
         """
         Parse a list of path strings into tiered lists and names of variables
 
@@ -149,6 +149,13 @@ def parse_var_list(varlist, tiered=True):
             (e.g. [['orbit_info', 'ancillary_data', 'gt1l'],['none','none','land_ice_segments']])
             or a single list of path strings (e.g. ['orbit_info','ancillary_data','gt1l/land_ice_segments'])
 
+        tiered_vars : boolean, default False
+            Whether or not to append a list of the variable names to the nested list of component strings
+            (e.g. [['orbit_info', 'ancillary_data', 'gt1l'],['none','none','land_ice_segments'],
+                ['sc_orient','atlas_sdp_gps_epoch','h_li']]))
+
+        Jessica NOTE: add this tiered vars functionality, then use it in read ln 582 (set to true) and ultimately circa line 339 to avoid the index error
+
         Examples
         --------
         >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28'], version='1') # doctest: +SKIP

From dd6b2df1a42faee53982339a37e3ac4b0cfaa28c Mon Sep 17 00:00:00 2001
From: Jessica Scheick <jbscheick@gmail.com>
Date: Wed, 16 Feb 2022 11:20:27 -0500
Subject: [PATCH 2/9] implement boolean flag for dealing with index error from
 mistmatched length var and varpath lists

---
 icepyx/core/read.py      | 20 +++++++++-----------
 icepyx/core/variables.py |  9 ++++++---
 2 files changed, 15 insertions(+), 14 deletions(-)

diff --git a/icepyx/core/read.py b/icepyx/core/read.py
index 65c37bebf..5b3ad02b2 100644
--- a/icepyx/core/read.py
+++ b/icepyx/core/read.py
@@ -336,23 +336,19 @@ def _add_var_to_ds(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict):
         Xarray Dataset with variables from the ds variable group added.
         """
 
-        wanted_vars = list(wanted_dict.keys())
+        # wanted_vars = list(wanted_dict.keys())
 
         print(grp_path)
-        print(wanted_groups_tiered)
-        print(wanted_dict)
-        print(wanted_vars)
+        # print(wanted_groups_tiered)
+        # print(wanted_dict)
 
         if grp_path in ["orbit_info", "ancillary_data"]:
-            # print(grp_path)
-            # print(wanted_groups_tiered[0])
-            # # print(wanted_groups_tiered)
-            # print(wanted_vars)
             grp_spec_vars = [
-                print(wanted_vars[i])
+                wanted_groups_tiered[-1][i]
                 for i, x in enumerate(wanted_groups_tiered[0])
                 if x == grp_path
             ]
+            print(grp_spec_vars)
 
             for var in grp_spec_vars:
                 is2ds = is2ds.assign({var: ("gran_idx", ds[var].data)})
@@ -392,7 +388,7 @@ def _add_var_to_ds(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict):
             grp_spec_vars = [
                 k for k, v in wanted_dict.items() if any(grp_path in x for x in v)
             ]
-            # print(grp_spec_vars)
+            print(grp_spec_vars)
 
             ds = (
                 ds.reset_coords(drop=False)
@@ -579,7 +575,9 @@ def _build_single_file_dataset(self, file, groups_list):
             # orbit_info is used automatically as the first group path so the info is available for the rest of the groups
             wanted_groups_set.remove("orbit_info")
             # returns the wanted groups as a list of lists with group path string elements separated
-            _, wanted_groups_tiered = Variables.parse_var_list(groups_list, tiered=True)
+            _, wanted_groups_tiered = Variables.parse_var_list(
+                groups_list, tiered=True, tiered_vars=True
+            )
 
             for grp_path in ["orbit_info"] + list(wanted_groups_set):
                 ds = self._read_single_var(file, grp_path)
diff --git a/icepyx/core/variables.py b/icepyx/core/variables.py
index f50880bed..ae12d4d3c 100644
--- a/icepyx/core/variables.py
+++ b/icepyx/core/variables.py
@@ -154,8 +154,6 @@ def parse_var_list(varlist, tiered=True, tiered_vars=False):
             (e.g. [['orbit_info', 'ancillary_data', 'gt1l'],['none','none','land_ice_segments'],
                 ['sc_orient','atlas_sdp_gps_epoch','h_li']]))
 
-        Jessica NOTE: add this tiered vars functionality, then use it in read ln 582 (set to true) and ultimately circa line 339 to avoid the index error
-
         Examples
         --------
         >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28'], version='1') # doctest: +SKIP
@@ -222,7 +220,10 @@ def parse_var_list(varlist, tiered=True, tiered_vars=False):
         else:
             num = np.max([v.count("/") for v in varlist])
             #         print('max needed: ' + str(num))
-            paths = [[] for i in range(num)]
+            if tiered_vars == True:
+                paths = [[] for i in range(num + 1)]
+            else:
+                paths = [[] for i in range(num)]
 
         # print(self._cust_options['variables'])
         for vn in varlist:
@@ -244,6 +245,8 @@ def parse_var_list(varlist, tiered=True, tiered_vars=False):
                     for i in range(j, num):
                         paths[i].append("none")
                         i = i + 1
+                    if tiered_vars == True:
+                        paths[num].append(vkey)
 
         return vgrp, paths
 

From 5f7850e011cdd64be3ccd0c628566899db6cd28c Mon Sep 17 00:00:00 2001
From: Jessica Scheick <jbscheick@gmail.com>
Date: Wed, 16 Feb 2022 12:08:33 -0500
Subject: [PATCH 3/9] fix generation of group specific variable list for
 multiple variable levels

---
 icepyx/core/read.py | 12 ++++++++----
 1 file changed, 8 insertions(+), 4 deletions(-)

diff --git a/icepyx/core/read.py b/icepyx/core/read.py
index 5b3ad02b2..fe1a94f45 100644
--- a/icepyx/core/read.py
+++ b/icepyx/core/read.py
@@ -348,7 +348,6 @@ def _add_var_to_ds(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict):
                 for i, x in enumerate(wanted_groups_tiered[0])
                 if x == grp_path
             ]
-            print(grp_spec_vars)
 
             for var in grp_spec_vars:
                 is2ds = is2ds.assign({var: ("gran_idx", ds[var].data)})
@@ -385,19 +384,24 @@ def _add_var_to_ds(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict):
             spot = is2ref.gt2spot(gt_str, is2ds.sc_orient.values[0])
             # add a test for the new function (called here)!
 
+            # print(wanted_dict)
+
             grp_spec_vars = [
-                k for k, v in wanted_dict.items() if any(grp_path in x for x in v)
+                k
+                for k, v in wanted_dict.items()
+                if any(f"{grp_path}/{k}" in x for x in v)
             ]
             print(grp_spec_vars)
 
+            print(ds)
+
             ds = (
                 ds.reset_coords(drop=False)
                 .expand_dims(dim=["spot", "gran_idx"])
                 .assign_coords(spot=("spot", [spot]))
                 .assign(gt=(("gran_idx", "spot"), [[gt_str]]))
             )
-
-            # print(ds)
+            # print(ds[grp_spec_vars])
             grp_spec_vars.append("gt")
             is2ds = is2ds.merge(
                 ds[grp_spec_vars], join="outer", combine_attrs="no_conflicts"

From c3b5c73c7cbfac628b1f6e301915985c7161a49d Mon Sep 17 00:00:00 2001
From: Jessica Scheick <jbscheick@gmail.com>
Date: Mon, 21 Feb 2022 13:22:38 -0500
Subject: [PATCH 4/9] set up structure to handle merge conflicts caused by more
 highly nested variables

---
 icepyx/core/query.py |  2 +-
 icepyx/core/read.py  | 41 +++++++++++++++++++++++++++++------------
 2 files changed, 30 insertions(+), 13 deletions(-)

diff --git a/icepyx/core/query.py b/icepyx/core/query.py
index 244e75978..db8219f8e 100644
--- a/icepyx/core/query.py
+++ b/icepyx/core/query.py
@@ -1017,7 +1017,7 @@ def download_granules(
             by default when subset=True, but additional subsetting options are available.
             Spatial subsetting returns all data that are within the area of interest (but not complete
             granules. This eliminates false-positive granules returned by the metadata-level search)
-        restart: boolean, default false
+        restart : boolean, default false
             If previous download was terminated unexpectedly. Run again with restart set to True to continue.
         **kwargs : key-value pairs
             Additional parameters to be passed to the subsetter.
diff --git a/icepyx/core/read.py b/icepyx/core/read.py
index fe1a94f45..b9bb82123 100644
--- a/icepyx/core/read.py
+++ b/icepyx/core/read.py
@@ -311,9 +311,9 @@ def _check_source_for_pattern(source, filename_pattern):
         return False, None
 
     @staticmethod
-    def _add_var_to_ds(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict):
+    def _add_vars_to_ds(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict):
         """
-        Add the new variable group to the dataset template.
+        Add the new variables in the group to the dataset template.
 
         Parameters
         ----------
@@ -338,7 +338,7 @@ def _add_var_to_ds(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict):
 
         # wanted_vars = list(wanted_dict.keys())
 
-        print(grp_path)
+        # print(grp_path)
         # print(wanted_groups_tiered)
         # print(wanted_dict)
 
@@ -391,10 +391,10 @@ def _add_var_to_ds(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict):
                 for k, v in wanted_dict.items()
                 if any(f"{grp_path}/{k}" in x for x in v)
             ]
-            print(grp_spec_vars)
 
-            print(ds)
+            # print(ds)
 
+            # DevNOTE: the issue seems to be that the incoming ds has mismatching delta time lengths, and they're not brought in as coordinates for the canopy/canopy_hy
             ds = (
                 ds.reset_coords(drop=False)
                 .expand_dims(dim=["spot", "gran_idx"])
@@ -403,9 +403,26 @@ def _add_var_to_ds(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict):
             )
             # print(ds[grp_spec_vars])
             grp_spec_vars.append("gt")
+
+            # Use this to handle issues specific to group paths that are more nested
+            tiers = len(wanted_groups_tiered)
+            if tiers > 3 and grp_path.count("/") == tiers - 2:
+                # Handle attribute conflicts that arose from data descriptions during merging
+                for var in grp_spec_vars:
+                    ds[var].attrs = ds.attrs
+                for k in ds[var].attrs.keys():
+                    ds.attrs.pop(k)
+                # warnings.warn(
+                #     "Due to the number of layers of variable group paths, some attributes have been dropped from your DataSet during merging",
+                #     UserWarning,
+                # )
+
+                # assign delta-time coordinates for the deeper layer variable
+
             is2ds = is2ds.merge(
                 ds[grp_spec_vars], join="outer", combine_attrs="no_conflicts"
             )
+
             # print(is2ds)
 
             # re-cast some dtypes to make array smaller
@@ -486,7 +503,7 @@ def _build_dataset_template(self, file):
         )
         return is2ds
 
-    def _read_single_var(self, file, grp_path):
+    def _read_single_grp(self, file, grp_path):
         """
         For a given file and variable group path, construct an Intake catalog and use it to read in the data.
 
@@ -520,12 +537,10 @@ def _read_single_var(self, file, grp_path):
                 grp_paths=grp_path,
                 extra_engine_kwargs={"phony_dims": "access"},
             )
-
             ds = grpcat[self._source_type].read()
 
         return ds
 
-    # NOTE: for non-gridded datasets only
     def _build_single_file_dataset(self, file, groups_list):
         """
         Create a single xarray dataset with all of the wanted variables/groups from the wanted var list for a single data file/url.
@@ -545,7 +560,7 @@ def _build_single_file_dataset(self, file, groups_list):
         Xarray Dataset
         """
 
-        file_product = self._read_single_var(file, "/").attrs["identifier_product_type"]
+        file_product = self._read_single_grp(file, "/").attrs["identifier_product_type"]
         assert (
             file_product == self._prod
         ), "Your product specification does not match the product specification within your files."
@@ -582,10 +597,12 @@ def _build_single_file_dataset(self, file, groups_list):
             _, wanted_groups_tiered = Variables.parse_var_list(
                 groups_list, tiered=True, tiered_vars=True
             )
-
+            print(wanted_groups_set)
             for grp_path in ["orbit_info"] + list(wanted_groups_set):
-                ds = self._read_single_var(file, grp_path)
-                is2ds = Read._add_var_to_ds(
+                print(grp_path)
+                ds = self._read_single_grp(file, grp_path)
+                # print(ds)
+                is2ds = Read._add_vars_to_ds(
                     is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict
                 )
 

From 57fd93d68779a03206ca7b84c78167c1057e729c Mon Sep 17 00:00:00 2001
From: Jessica Scheick <jbscheick@gmail.com>
Date: Thu, 24 Feb 2022 17:04:25 -0500
Subject: [PATCH 5/9] working prototype for deeply nested dataset merging

---
 icepyx/core/read.py | 154 ++++++++++++++++++++++++++++++++++++++------
 1 file changed, 136 insertions(+), 18 deletions(-)

diff --git a/icepyx/core/read.py b/icepyx/core/read.py
index b9bb82123..281443a61 100644
--- a/icepyx/core/read.py
+++ b/icepyx/core/read.py
@@ -1,4 +1,5 @@
 import fnmatch
+import grp
 import os
 import warnings
 
@@ -404,20 +405,29 @@ def _add_vars_to_ds(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict):
             # print(ds[grp_spec_vars])
             grp_spec_vars.append("gt")
 
-            # Use this to handle issues specific to group paths that are more nested
-            tiers = len(wanted_groups_tiered)
-            if tiers > 3 and grp_path.count("/") == tiers - 2:
-                # Handle attribute conflicts that arose from data descriptions during merging
-                for var in grp_spec_vars:
-                    ds[var].attrs = ds.attrs
-                for k in ds[var].attrs.keys():
-                    ds.attrs.pop(k)
-                # warnings.warn(
-                #     "Due to the number of layers of variable group paths, some attributes have been dropped from your DataSet during merging",
-                #     UserWarning,
-                # )
+            # # Use this to handle issues specific to group paths that are more nested
+            # tiers = len(wanted_groups_tiered)
+            # if tiers > 3 and grp_path.count("/") == tiers - 2:
+            #     # Handle attribute conflicts that arose from data descriptions during merging
+            #     for var in grp_spec_vars:
+            #         ds[var].attrs = ds.attrs
+            #     for k in ds[var].attrs.keys():
+            #         ds.attrs.pop(k)
+            #     # warnings.warn(
+            #     #     "Due to the number of layers of variable group paths, some attributes have been dropped from your DataSet during merging",
+            #     #     UserWarning,
+            #     # )
+
+            #     # assign delta-time coordinates for the deeper layer variable
+            #     up_grp_path = grp_path.rsplit("/")[0]
+
+            #     print(is2ds.sel(spot=spot).delta_time)
+
+            #     # ds.assign_coords(delta_time=is2ds.sel(spot=spot).delta_time)
+            #     print(is2ds)
 
-                # assign delta-time coordinates for the deeper layer variable
+            #     ds=ds.sel(spot=spot).assign_coords({'delta_time':is2ds.sel(spot=spot).delta_time.data})
+            #     # print(ds)
 
             is2ds = is2ds.merge(
                 ds[grp_spec_vars], join="outer", combine_attrs="no_conflicts"
@@ -429,6 +439,92 @@ def _add_vars_to_ds(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict):
             is2ds["gt"] = is2ds.gt.astype(str)
             is2ds["spot"] = is2ds.spot.astype(np.uint8)
 
+        return is2ds, ds[grp_spec_vars]
+
+    @staticmethod
+    def _combine_nested_vars(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict):
+        """
+        Add the new variables in the group to the dataset template.
+
+        Parameters
+        ----------
+        is2ds : Xarray dataset
+            Template dataset to add new variables to.
+        ds : Xarray dataset
+            Dataset containing the group to add
+        grp_path : str
+            hdf5 group path read into ds
+        wanted_groups_tiered : list of lists
+            A list of lists of deconstructed group + variable paths.
+            The first list contains the first portion of the group name (between consecutive "/"),
+            the second list contains the second portion of the group name, etc.
+            "none" is used to fill in where paths are shorter than the longest path.
+        wanted_dict : dict
+            Dictionary with variable names as keys and a list of group + variable paths containing those variables as values.
+
+        Returns
+        -------
+        Xarray Dataset with variables from the ds variable group added.
+        """
+
+        # wanted_vars = list(wanted_dict.keys())
+
+        # print(grp_path)
+        # print(wanted_groups_tiered)
+        # print(wanted_dict)
+
+        # print(wanted_dict)
+
+        grp_spec_vars = [
+            k for k, v in wanted_dict.items() if any(f"{grp_path}/{k}" in x for x in v)
+        ]
+
+        # print(ds)
+
+        # DevNOTE: the issue seems to be that the incoming ds has mismatching delta time lengths, and they're not brought in as coordinates for the canopy/canopy_hy
+        # ds = (
+        #     ds.reset_coords(drop=False)
+        #     .expand_dims(dim=["spot", "gran_idx"])
+        #     .assign_coords(spot=("spot", [spot]))
+        #     .assign(gt=(("gran_idx", "spot"), [[gt_str]]))
+        # )
+        # # print(ds[grp_spec_vars])
+        # grp_spec_vars.append("gt")
+
+        # # Use this to handle issues specific to group paths that are more nested
+        # tiers = len(wanted_groups_tiered)
+        # if tiers > 3 and grp_path.count("/") == tiers - 2:
+        #     # Handle attribute conflicts that arose from data descriptions during merging
+        #     for var in grp_spec_vars:
+        #         ds[var].attrs = ds.attrs
+        #     for k in ds[var].attrs.keys():
+        #         ds.attrs.pop(k)
+        #     # warnings.warn(
+        #     #     "Due to the number of layers of variable group paths, some attributes have been dropped from your DataSet during merging",
+        #     #     UserWarning,
+        #     # )
+
+        #     # assign delta-time coordinates for the deeper layer variable
+        #     up_grp_path = grp_path.rsplit("/")[0]
+
+        #     print(is2ds.sel(spot=spot).delta_time)
+
+        #     # ds.assign_coords(delta_time=is2ds.sel(spot=spot).delta_time)
+        #     print(is2ds)
+
+        #     ds=ds.sel(spot=spot).assign_coords({'delta_time':is2ds.sel(spot=spot).delta_time.data})
+        #     # print(ds)
+
+        print(grp_spec_vars)
+
+        is2ds = is2ds.assign(ds[grp_spec_vars])
+
+        # print(is2ds)
+
+        # re-cast some dtypes to make array smaller
+        # is2ds["gt"] = is2ds.gt.astype(str)
+        # is2ds["spot"] = is2ds.spot.astype(np.uint8)
+
         return is2ds
 
     def load(self):
@@ -593,17 +689,39 @@ def _build_single_file_dataset(self, file, groups_list):
             wanted_groups_set = set(wanted_groups)
             # orbit_info is used automatically as the first group path so the info is available for the rest of the groups
             wanted_groups_set.remove("orbit_info")
+            # Note: the sorting is critical for datasets with highly nested groups
+            wanted_groups_list = ["orbit_info"] + sorted(wanted_groups_set)
             # returns the wanted groups as a list of lists with group path string elements separated
             _, wanted_groups_tiered = Variables.parse_var_list(
                 groups_list, tiered=True, tiered_vars=True
             )
-            print(wanted_groups_set)
-            for grp_path in ["orbit_info"] + list(wanted_groups_set):
-                print(grp_path)
+
+            while wanted_groups_list:
+                grp_path = wanted_groups_list[0]
+                wanted_groups_list = wanted_groups_list[1:]
+                # Note this will fail with an index error on the last run
                 ds = self._read_single_grp(file, grp_path)
-                # print(ds)
-                is2ds = Read._add_vars_to_ds(
+                print(grp_path)
+                is2ds, ds = Read._add_vars_to_ds(
                     is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict
                 )
 
+                # if there are any deeper nested variables, get those so they have actual coordinates and add them
+                if any(grp_path in grp_path2 for grp_path2 in wanted_groups_list):
+                    print("deep nested paths")
+                    for grp_path2 in wanted_groups_list:
+                        if grp_path in grp_path2:
+                            sub_ds = self._read_single_grp(file, grp_path2)
+                            # print(ds)
+                            # print(sub_ds)
+                            ds = Read._combine_nested_vars(
+                                ds, sub_ds, grp_path2, wanted_groups_tiered, wanted_dict
+                            )
+                            wanted_groups_list.remove(grp_path2)
+                    is2ds = is2ds.merge(ds, join="outer", combine_attrs="no_conflicts")
+
+                print(is2ds)
+
+                # Notes (next steps): test on ATL06; reset kernal and try again; figure out gran_idx generation to be unique for ATL08files
+
         return is2ds

From 6d9cec66578c0d400aeb77f76b402b183389f1a8 Mon Sep 17 00:00:00 2001
From: Jessica Scheick <jbscheick@gmail.com>
Date: Fri, 25 Feb 2022 10:43:40 -0500
Subject: [PATCH 6/9] fix docstring typo

---
 icepyx/core/is2ref.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/icepyx/core/is2ref.py b/icepyx/core/is2ref.py
index 3fed2ef4f..5f4e455c6 100644
--- a/icepyx/core/is2ref.py
+++ b/icepyx/core/is2ref.py
@@ -259,7 +259,7 @@ def _default_varlists(product):
 
     else:
         print(
-            "THE REQUESTED PRODUCT DOES NOT YET HAVE A DEFAULT LIST SET UP. ONLY DELTA_TIME, LATITUTDE, AND LONGITUDE WILL BE RETURNED"
+            "THE REQUESTED PRODUCT DOES NOT YET HAVE A DEFAULT LIST SET UP. ONLY DELTA_TIME, LATITUDE, AND LONGITUDE WILL BE RETURNED"
         )
         return common_list
 

From 763305b6db9b01ffdabc28a50b02d9187b8bf9c8 Mon Sep 17 00:00:00 2001
From: Jessica Scheick <jbscheick@gmail.com>
Date: Fri, 25 Feb 2022 10:44:26 -0500
Subject: [PATCH 7/9] finish debugging ATL08 read-in issue

---
 .../example_notebooks/IS2_data_read-in.ipynb  |  2 +-
 icepyx/core/read.py                           | 81 +------------------
 2 files changed, 3 insertions(+), 80 deletions(-)

diff --git a/doc/source/example_notebooks/IS2_data_read-in.ipynb b/doc/source/example_notebooks/IS2_data_read-in.ipynb
index 6537777de..836381698 100644
--- a/doc/source/example_notebooks/IS2_data_read-in.ipynb
+++ b/doc/source/example_notebooks/IS2_data_read-in.ipynb
@@ -426,7 +426,7 @@
     "\n",
     "***ATTENTION: icepyx loads your data by creating an Xarray DataSet for each input granule and then merging them. In some cases, the automatic merge fails and needs to be handled manually. In these cases, icepyx will return a warning with the error message from the failed Xarray merge and a list of per-granule DataSets***\n",
     "\n",
-    "This can happen if you unintentionally provide the same granule multiple times with different filenames."
+    "This can happen if you unintentionally provide the same granule multiple times with different filenames or in segmented products where the rgt+cycle automatically generated `gran_idx` values match. In this latter case, you can simply provide unique `gran_idx` values for each DataSet in `ds` and run `import xarray as xr` and `ds_merged = xr.merge(ds)` to create one merged DataSet."
    ]
   },
   {
diff --git a/icepyx/core/read.py b/icepyx/core/read.py
index 281443a61..2f3ab5891 100644
--- a/icepyx/core/read.py
+++ b/icepyx/core/read.py
@@ -385,56 +385,25 @@ def _add_vars_to_ds(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict):
             spot = is2ref.gt2spot(gt_str, is2ds.sc_orient.values[0])
             # add a test for the new function (called here)!
 
-            # print(wanted_dict)
-
             grp_spec_vars = [
                 k
                 for k, v in wanted_dict.items()
                 if any(f"{grp_path}/{k}" in x for x in v)
             ]
 
-            # print(ds)
-
-            # DevNOTE: the issue seems to be that the incoming ds has mismatching delta time lengths, and they're not brought in as coordinates for the canopy/canopy_hy
             ds = (
                 ds.reset_coords(drop=False)
                 .expand_dims(dim=["spot", "gran_idx"])
                 .assign_coords(spot=("spot", [spot]))
                 .assign(gt=(("gran_idx", "spot"), [[gt_str]]))
             )
-            # print(ds[grp_spec_vars])
-            grp_spec_vars.append("gt")
-
-            # # Use this to handle issues specific to group paths that are more nested
-            # tiers = len(wanted_groups_tiered)
-            # if tiers > 3 and grp_path.count("/") == tiers - 2:
-            #     # Handle attribute conflicts that arose from data descriptions during merging
-            #     for var in grp_spec_vars:
-            #         ds[var].attrs = ds.attrs
-            #     for k in ds[var].attrs.keys():
-            #         ds.attrs.pop(k)
-            #     # warnings.warn(
-            #     #     "Due to the number of layers of variable group paths, some attributes have been dropped from your DataSet during merging",
-            #     #     UserWarning,
-            #     # )
-
-            #     # assign delta-time coordinates for the deeper layer variable
-            #     up_grp_path = grp_path.rsplit("/")[0]
-
-            #     print(is2ds.sel(spot=spot).delta_time)
-
-            #     # ds.assign_coords(delta_time=is2ds.sel(spot=spot).delta_time)
-            #     print(is2ds)
 
-            #     ds=ds.sel(spot=spot).assign_coords({'delta_time':is2ds.sel(spot=spot).delta_time.data})
-            #     # print(ds)
+            grp_spec_vars.append("gt")
 
             is2ds = is2ds.merge(
                 ds[grp_spec_vars], join="outer", combine_attrs="no_conflicts"
             )
 
-            # print(is2ds)
-
             # re-cast some dtypes to make array smaller
             is2ds["gt"] = is2ds.gt.astype(str)
             is2ds["spot"] = is2ds.spot.astype(np.uint8)
@@ -467,30 +436,10 @@ def _combine_nested_vars(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict)
         Xarray Dataset with variables from the ds variable group added.
         """
 
-        # wanted_vars = list(wanted_dict.keys())
-
-        # print(grp_path)
-        # print(wanted_groups_tiered)
-        # print(wanted_dict)
-
-        # print(wanted_dict)
-
         grp_spec_vars = [
             k for k, v in wanted_dict.items() if any(f"{grp_path}/{k}" in x for x in v)
         ]
 
-        # print(ds)
-
-        # DevNOTE: the issue seems to be that the incoming ds has mismatching delta time lengths, and they're not brought in as coordinates for the canopy/canopy_hy
-        # ds = (
-        #     ds.reset_coords(drop=False)
-        #     .expand_dims(dim=["spot", "gran_idx"])
-        #     .assign_coords(spot=("spot", [spot]))
-        #     .assign(gt=(("gran_idx", "spot"), [[gt_str]]))
-        # )
-        # # print(ds[grp_spec_vars])
-        # grp_spec_vars.append("gt")
-
         # # Use this to handle issues specific to group paths that are more nested
         # tiers = len(wanted_groups_tiered)
         # if tiers > 3 and grp_path.count("/") == tiers - 2:
@@ -504,27 +453,8 @@ def _combine_nested_vars(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict)
         #     #     UserWarning,
         #     # )
 
-        #     # assign delta-time coordinates for the deeper layer variable
-        #     up_grp_path = grp_path.rsplit("/")[0]
-
-        #     print(is2ds.sel(spot=spot).delta_time)
-
-        #     # ds.assign_coords(delta_time=is2ds.sel(spot=spot).delta_time)
-        #     print(is2ds)
-
-        #     ds=ds.sel(spot=spot).assign_coords({'delta_time':is2ds.sel(spot=spot).delta_time.data})
-        #     # print(ds)
-
-        print(grp_spec_vars)
-
         is2ds = is2ds.assign(ds[grp_spec_vars])
 
-        # print(is2ds)
-
-        # re-cast some dtypes to make array smaller
-        # is2ds["gt"] = is2ds.gt.astype(str)
-        # is2ds["spot"] = is2ds.spot.astype(np.uint8)
-
         return is2ds
 
     def load(self):
@@ -699,29 +629,22 @@ def _build_single_file_dataset(self, file, groups_list):
             while wanted_groups_list:
                 grp_path = wanted_groups_list[0]
                 wanted_groups_list = wanted_groups_list[1:]
-                # Note this will fail with an index error on the last run
                 ds = self._read_single_grp(file, grp_path)
-                print(grp_path)
                 is2ds, ds = Read._add_vars_to_ds(
                     is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict
                 )
 
                 # if there are any deeper nested variables, get those so they have actual coordinates and add them
                 if any(grp_path in grp_path2 for grp_path2 in wanted_groups_list):
-                    print("deep nested paths")
                     for grp_path2 in wanted_groups_list:
                         if grp_path in grp_path2:
                             sub_ds = self._read_single_grp(file, grp_path2)
-                            # print(ds)
-                            # print(sub_ds)
                             ds = Read._combine_nested_vars(
                                 ds, sub_ds, grp_path2, wanted_groups_tiered, wanted_dict
                             )
                             wanted_groups_list.remove(grp_path2)
                     is2ds = is2ds.merge(ds, join="outer", combine_attrs="no_conflicts")
 
-                print(is2ds)
-
-                # Notes (next steps): test on ATL06; reset kernal and try again; figure out gran_idx generation to be unique for ATL08files
+                # Notes (next steps):  open an issue; maybe add a fn to generate unique gran ids
 
         return is2ds

From 1e8b785de49fdec907856b67f4c139a17e04b7b4 Mon Sep 17 00:00:00 2001
From: Jessica Scheick <jbscheick@gmail.com>
Date: Fri, 25 Feb 2022 10:58:43 -0500
Subject: [PATCH 8/9] clean up code after viewing dif

---
 icepyx/core/read.py | 25 +++++--------------------
 1 file changed, 5 insertions(+), 20 deletions(-)

diff --git a/icepyx/core/read.py b/icepyx/core/read.py
index 2f3ab5891..184d2b7c6 100644
--- a/icepyx/core/read.py
+++ b/icepyx/core/read.py
@@ -1,5 +1,4 @@
 import fnmatch
-import grp
 import os
 import warnings
 
@@ -337,12 +336,6 @@ def _add_vars_to_ds(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict):
         Xarray Dataset with variables from the ds variable group added.
         """
 
-        # wanted_vars = list(wanted_dict.keys())
-
-        # print(grp_path)
-        # print(wanted_groups_tiered)
-        # print(wanted_dict)
-
         if grp_path in ["orbit_info", "ancillary_data"]:
             grp_spec_vars = [
                 wanted_groups_tiered[-1][i]
@@ -399,7 +392,6 @@ def _add_vars_to_ds(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict):
             )
 
             grp_spec_vars.append("gt")
-
             is2ds = is2ds.merge(
                 ds[grp_spec_vars], join="outer", combine_attrs="no_conflicts"
             )
@@ -411,23 +403,18 @@ def _add_vars_to_ds(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict):
         return is2ds, ds[grp_spec_vars]
 
     @staticmethod
-    def _combine_nested_vars(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict):
+    def _combine_nested_vars(is2ds, ds, grp_path, wanted_dict):
         """
-        Add the new variables in the group to the dataset template.
+        Add the deeply nested variables to a dataset with appropriate coordinate information.
 
         Parameters
         ----------
         is2ds : Xarray dataset
-            Template dataset to add new variables to.
+            Dataset to add deeply nested variables to.
         ds : Xarray dataset
-            Dataset containing the group to add
+            Dataset containing proper dimensions for the variables being added
         grp_path : str
             hdf5 group path read into ds
-        wanted_groups_tiered : list of lists
-            A list of lists of deconstructed group + variable paths.
-            The first list contains the first portion of the group name (between consecutive "/"),
-            the second list contains the second portion of the group name, etc.
-            "none" is used to fill in where paths are shorter than the longest path.
         wanted_dict : dict
             Dictionary with variable names as keys and a list of group + variable paths containing those variables as values.
 
@@ -640,11 +627,9 @@ def _build_single_file_dataset(self, file, groups_list):
                         if grp_path in grp_path2:
                             sub_ds = self._read_single_grp(file, grp_path2)
                             ds = Read._combine_nested_vars(
-                                ds, sub_ds, grp_path2, wanted_groups_tiered, wanted_dict
+                                ds, sub_ds, grp_path2, wanted_dict
                             )
                             wanted_groups_list.remove(grp_path2)
                     is2ds = is2ds.merge(ds, join="outer", combine_attrs="no_conflicts")
 
-                # Notes (next steps):  open an issue; maybe add a fn to generate unique gran ids
-
         return is2ds

From 5ee425fa575fd12cebb5a314751664ca1c1a05bc Mon Sep 17 00:00:00 2001
From: GitHub Action <ricardobarroslourenco@users.noreply.github.com>
Date: Thu, 17 Mar 2022 01:21:19 +0000
Subject: [PATCH 9/9] GitHub action UML generation auto-update

---
 .../documentation/classes_dev_uml.svg         | 343 +++++++++---------
 .../documentation/classes_user_uml.svg        |   6 +-
 2 files changed, 175 insertions(+), 174 deletions(-)

diff --git a/doc/source/user_guide/documentation/classes_dev_uml.svg b/doc/source/user_guide/documentation/classes_dev_uml.svg
index a494bc220..a73320ea1 100644
--- a/doc/source/user_guide/documentation/classes_dev_uml.svg
+++ b/doc/source/user_guide/documentation/classes_dev_uml.svg
@@ -4,11 +4,11 @@
 <!-- Generated by graphviz version 2.43.0 (0)
  -->
 <!-- Title: classes_dev_uml Pages: 1 -->
-<svg width="2688pt" height="1236pt"
- viewBox="0.00 0.00 2687.50 1236.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
+<svg width="2691pt" height="1236pt"
+ viewBox="0.00 0.00 2690.50 1236.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
 <g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 1232)">
 <title>classes_dev_uml</title>
-<polygon fill="white" stroke="transparent" points="-4,4 -4,-1232 2683.5,-1232 2683.5,4 -4,4"/>
+<polygon fill="white" stroke="transparent" points="-4,4 -4,-1232 2686.5,-1232 2686.5,4 -4,4"/>
 <!-- icepyx.core.Earthdata.Earthdata -->
 <g id="node1" class="node">
 <title>icepyx.core.Earthdata.Earthdata</title>
@@ -18,7 +18,7 @@
 <text text-anchor="start" x="8" y="-196.3" font-family="Times,serif" font-size="14.00">capability_url</text>
 <text text-anchor="start" x="8" y="-181.3" font-family="Times,serif" font-size="14.00">email</text>
 <text text-anchor="start" x="8" y="-166.3" font-family="Times,serif" font-size="14.00">netrc : NoneType</text>
-<text text-anchor="start" x="8" y="-151.3" font-family="Times,serif" font-size="14.00">pswd : NoneType, str</text>
+<text text-anchor="start" x="8" y="-151.3" font-family="Times,serif" font-size="14.00">pswd : str, NoneType</text>
 <text text-anchor="start" x="8" y="-136.3" font-family="Times,serif" font-size="14.00">session : Session</text>
 <text text-anchor="start" x="8" y="-121.3" font-family="Times,serif" font-size="14.00">uid</text>
 <polyline fill="none" stroke="black" points="0,-113.5 301,-113.5 "/>
@@ -29,17 +29,17 @@
 <!-- icepyx.core.query.GenQuery -->
 <g id="node2" class="node">
 <title>icepyx.core.query.GenQuery</title>
-<polygon fill="none" stroke="black" points="869.5,-1083.5 869.5,-1227.5 1293.5,-1227.5 1293.5,-1083.5 869.5,-1083.5"/>
-<text text-anchor="middle" x="1081.5" y="-1212.3" font-family="Times,serif" font-size="14.00">GenQuery</text>
-<polyline fill="none" stroke="black" points="869.5,-1204.5 1293.5,-1204.5 "/>
-<text text-anchor="start" x="877.5" y="-1189.3" font-family="Times,serif" font-size="14.00">_end : datetime</text>
-<text text-anchor="start" x="877.5" y="-1174.3" font-family="Times,serif" font-size="14.00">_geom_filepath : NoneType</text>
-<text text-anchor="start" x="877.5" y="-1159.3" font-family="Times,serif" font-size="14.00">_spat_extent</text>
-<text text-anchor="start" x="877.5" y="-1144.3" font-family="Times,serif" font-size="14.00">_start : datetime</text>
-<text text-anchor="start" x="877.5" y="-1129.3" font-family="Times,serif" font-size="14.00">extent_type : str</text>
-<polyline fill="none" stroke="black" points="869.5,-1121.5 1293.5,-1121.5 "/>
-<text text-anchor="start" x="877.5" y="-1106.3" font-family="Times,serif" font-size="14.00">__init__(spatial_extent, date_range, start_time, end_time)</text>
-<text text-anchor="start" x="877.5" y="-1091.3" font-family="Times,serif" font-size="14.00">__str__()</text>
+<polygon fill="none" stroke="black" points="866.5,-1083.5 866.5,-1227.5 1290.5,-1227.5 1290.5,-1083.5 866.5,-1083.5"/>
+<text text-anchor="middle" x="1078.5" y="-1212.3" font-family="Times,serif" font-size="14.00">GenQuery</text>
+<polyline fill="none" stroke="black" points="866.5,-1204.5 1290.5,-1204.5 "/>
+<text text-anchor="start" x="874.5" y="-1189.3" font-family="Times,serif" font-size="14.00">_end : datetime</text>
+<text text-anchor="start" x="874.5" y="-1174.3" font-family="Times,serif" font-size="14.00">_geom_filepath : NoneType</text>
+<text text-anchor="start" x="874.5" y="-1159.3" font-family="Times,serif" font-size="14.00">_spat_extent</text>
+<text text-anchor="start" x="874.5" y="-1144.3" font-family="Times,serif" font-size="14.00">_start : datetime</text>
+<text text-anchor="start" x="874.5" y="-1129.3" font-family="Times,serif" font-size="14.00">extent_type : str</text>
+<polyline fill="none" stroke="black" points="866.5,-1121.5 1290.5,-1121.5 "/>
+<text text-anchor="start" x="874.5" y="-1106.3" font-family="Times,serif" font-size="14.00">__init__(spatial_extent, date_range, start_time, end_time)</text>
+<text text-anchor="start" x="874.5" y="-1091.3" font-family="Times,serif" font-size="14.00">__str__()</text>
 </g>
 <!-- icepyx.core.granules.Granules -->
 <g id="node3" class="node">
@@ -58,104 +58,104 @@
 <!-- icepyx.core.query.Query -->
 <g id="node7" class="node">
 <title>icepyx.core.query.Query</title>
-<polygon fill="none" stroke="black" points="734.5,-346.5 734.5,-1045.5 1428.5,-1045.5 1428.5,-346.5 734.5,-346.5"/>
-<text text-anchor="middle" x="1081.5" y="-1030.3" font-family="Times,serif" font-size="14.00">Query</text>
-<polyline fill="none" stroke="black" points="734.5,-1022.5 1428.5,-1022.5 "/>
-<text text-anchor="start" x="742.5" y="-1007.3" font-family="Times,serif" font-size="14.00">CMRparams</text>
-<text text-anchor="start" x="742.5" y="-992.3" font-family="Times,serif" font-size="14.00">_CMRparams</text>
-<text text-anchor="start" x="742.5" y="-977.3" font-family="Times,serif" font-size="14.00">_about_product</text>
-<text text-anchor="start" x="742.5" y="-962.3" font-family="Times,serif" font-size="14.00">_cust_options : dict</text>
-<text text-anchor="start" x="742.5" y="-947.3" font-family="Times,serif" font-size="14.00">_cycles : list</text>
-<text text-anchor="start" x="742.5" y="-932.3" font-family="Times,serif" font-size="14.00">_email</text>
-<text text-anchor="start" x="742.5" y="-917.3" font-family="Times,serif" font-size="14.00">_file_vars</text>
-<text text-anchor="start" x="742.5" y="-902.3" font-family="Times,serif" font-size="14.00">_granules</text>
-<text text-anchor="start" x="742.5" y="-887.3" font-family="Times,serif" font-size="14.00">_order_vars</text>
-<text text-anchor="start" x="742.5" y="-872.3" font-family="Times,serif" font-size="14.00">_prod : NoneType, str</text>
-<text text-anchor="start" x="742.5" y="-857.3" font-family="Times,serif" font-size="14.00">_readable_granule_name : list</text>
-<text text-anchor="start" x="742.5" y="-842.3" font-family="Times,serif" font-size="14.00">_reqparams</text>
-<text text-anchor="start" x="742.5" y="-827.3" font-family="Times,serif" font-size="14.00">_s3login_credentials</text>
-<text text-anchor="start" x="742.5" y="-812.3" font-family="Times,serif" font-size="14.00">_session : Session</text>
-<text text-anchor="start" x="742.5" y="-797.3" font-family="Times,serif" font-size="14.00">_source : str</text>
-<text text-anchor="start" x="742.5" y="-782.3" font-family="Times,serif" font-size="14.00">_subsetparams : NoneType</text>
-<text text-anchor="start" x="742.5" y="-767.3" font-family="Times,serif" font-size="14.00">_tracks : list</text>
-<text text-anchor="start" x="742.5" y="-752.3" font-family="Times,serif" font-size="14.00">_version</text>
-<text text-anchor="start" x="742.5" y="-737.3" font-family="Times,serif" font-size="14.00">cycles</text>
-<text text-anchor="start" x="742.5" y="-722.3" font-family="Times,serif" font-size="14.00">dataset</text>
-<text text-anchor="start" x="742.5" y="-707.3" font-family="Times,serif" font-size="14.00">dates</text>
-<text text-anchor="start" x="742.5" y="-692.3" font-family="Times,serif" font-size="14.00">end_time</text>
-<text text-anchor="start" x="742.5" y="-677.3" font-family="Times,serif" font-size="14.00">file_vars</text>
-<text text-anchor="start" x="742.5" y="-662.3" font-family="Times,serif" font-size="14.00">granules</text>
-<text text-anchor="start" x="742.5" y="-647.3" font-family="Times,serif" font-size="14.00">order_vars</text>
-<text text-anchor="start" x="742.5" y="-632.3" font-family="Times,serif" font-size="14.00">product</text>
-<text text-anchor="start" x="742.5" y="-617.3" font-family="Times,serif" font-size="14.00">product_version</text>
-<text text-anchor="start" x="742.5" y="-602.3" font-family="Times,serif" font-size="14.00">reqparams</text>
-<text text-anchor="start" x="742.5" y="-587.3" font-family="Times,serif" font-size="14.00">spatial_extent</text>
-<text text-anchor="start" x="742.5" y="-572.3" font-family="Times,serif" font-size="14.00">start_time</text>
-<text text-anchor="start" x="742.5" y="-557.3" font-family="Times,serif" font-size="14.00">tracks</text>
-<polyline fill="none" stroke="black" points="734.5,-549.5 1428.5,-549.5 "/>
-<text text-anchor="start" x="742.5" y="-534.3" font-family="Times,serif" font-size="14.00">__init__(product, spatial_extent, date_range, start_time, end_time, version, cycles, tracks, files)</text>
-<text text-anchor="start" x="742.5" y="-519.3" font-family="Times,serif" font-size="14.00">__str__()</text>
-<text text-anchor="start" x="742.5" y="-504.3" font-family="Times,serif" font-size="14.00">avail_granules(ids, cycles, tracks, s3urls)</text>
-<text text-anchor="start" x="742.5" y="-489.3" font-family="Times,serif" font-size="14.00">download_granules(path, verbose, subset, restart)</text>
-<text text-anchor="start" x="742.5" y="-474.3" font-family="Times,serif" font-size="14.00">earthdata_login(uid, email, s3token)</text>
-<text text-anchor="start" x="742.5" y="-459.3" font-family="Times,serif" font-size="14.00">latest_version()</text>
-<text text-anchor="start" x="742.5" y="-444.3" font-family="Times,serif" font-size="14.00">order_granules(verbose, subset, email)</text>
-<text text-anchor="start" x="742.5" y="-429.3" font-family="Times,serif" font-size="14.00">product_all_info()</text>
-<text text-anchor="start" x="742.5" y="-414.3" font-family="Times,serif" font-size="14.00">product_summary_info()</text>
-<text text-anchor="start" x="742.5" y="-399.3" font-family="Times,serif" font-size="14.00">show_custom_options(dictview)</text>
-<text text-anchor="start" x="742.5" y="-384.3" font-family="Times,serif" font-size="14.00">subsetparams()</text>
-<text text-anchor="start" x="742.5" y="-369.3" font-family="Times,serif" font-size="14.00">visualize_elevation()</text>
-<text text-anchor="start" x="742.5" y="-354.3" font-family="Times,serif" font-size="14.00">visualize_spatial_extent()</text>
+<polygon fill="none" stroke="black" points="731.5,-346.5 731.5,-1045.5 1425.5,-1045.5 1425.5,-346.5 731.5,-346.5"/>
+<text text-anchor="middle" x="1078.5" y="-1030.3" font-family="Times,serif" font-size="14.00">Query</text>
+<polyline fill="none" stroke="black" points="731.5,-1022.5 1425.5,-1022.5 "/>
+<text text-anchor="start" x="739.5" y="-1007.3" font-family="Times,serif" font-size="14.00">CMRparams</text>
+<text text-anchor="start" x="739.5" y="-992.3" font-family="Times,serif" font-size="14.00">_CMRparams</text>
+<text text-anchor="start" x="739.5" y="-977.3" font-family="Times,serif" font-size="14.00">_about_product</text>
+<text text-anchor="start" x="739.5" y="-962.3" font-family="Times,serif" font-size="14.00">_cust_options : dict</text>
+<text text-anchor="start" x="739.5" y="-947.3" font-family="Times,serif" font-size="14.00">_cycles : list</text>
+<text text-anchor="start" x="739.5" y="-932.3" font-family="Times,serif" font-size="14.00">_email</text>
+<text text-anchor="start" x="739.5" y="-917.3" font-family="Times,serif" font-size="14.00">_file_vars</text>
+<text text-anchor="start" x="739.5" y="-902.3" font-family="Times,serif" font-size="14.00">_granules</text>
+<text text-anchor="start" x="739.5" y="-887.3" font-family="Times,serif" font-size="14.00">_order_vars</text>
+<text text-anchor="start" x="739.5" y="-872.3" font-family="Times,serif" font-size="14.00">_prod : str, NoneType</text>
+<text text-anchor="start" x="739.5" y="-857.3" font-family="Times,serif" font-size="14.00">_readable_granule_name : list</text>
+<text text-anchor="start" x="739.5" y="-842.3" font-family="Times,serif" font-size="14.00">_reqparams</text>
+<text text-anchor="start" x="739.5" y="-827.3" font-family="Times,serif" font-size="14.00">_s3login_credentials</text>
+<text text-anchor="start" x="739.5" y="-812.3" font-family="Times,serif" font-size="14.00">_session : Session</text>
+<text text-anchor="start" x="739.5" y="-797.3" font-family="Times,serif" font-size="14.00">_source : str</text>
+<text text-anchor="start" x="739.5" y="-782.3" font-family="Times,serif" font-size="14.00">_subsetparams : NoneType</text>
+<text text-anchor="start" x="739.5" y="-767.3" font-family="Times,serif" font-size="14.00">_tracks : list</text>
+<text text-anchor="start" x="739.5" y="-752.3" font-family="Times,serif" font-size="14.00">_version</text>
+<text text-anchor="start" x="739.5" y="-737.3" font-family="Times,serif" font-size="14.00">cycles</text>
+<text text-anchor="start" x="739.5" y="-722.3" font-family="Times,serif" font-size="14.00">dataset</text>
+<text text-anchor="start" x="739.5" y="-707.3" font-family="Times,serif" font-size="14.00">dates</text>
+<text text-anchor="start" x="739.5" y="-692.3" font-family="Times,serif" font-size="14.00">end_time</text>
+<text text-anchor="start" x="739.5" y="-677.3" font-family="Times,serif" font-size="14.00">file_vars</text>
+<text text-anchor="start" x="739.5" y="-662.3" font-family="Times,serif" font-size="14.00">granules</text>
+<text text-anchor="start" x="739.5" y="-647.3" font-family="Times,serif" font-size="14.00">order_vars</text>
+<text text-anchor="start" x="739.5" y="-632.3" font-family="Times,serif" font-size="14.00">product</text>
+<text text-anchor="start" x="739.5" y="-617.3" font-family="Times,serif" font-size="14.00">product_version</text>
+<text text-anchor="start" x="739.5" y="-602.3" font-family="Times,serif" font-size="14.00">reqparams</text>
+<text text-anchor="start" x="739.5" y="-587.3" font-family="Times,serif" font-size="14.00">spatial_extent</text>
+<text text-anchor="start" x="739.5" y="-572.3" font-family="Times,serif" font-size="14.00">start_time</text>
+<text text-anchor="start" x="739.5" y="-557.3" font-family="Times,serif" font-size="14.00">tracks</text>
+<polyline fill="none" stroke="black" points="731.5,-549.5 1425.5,-549.5 "/>
+<text text-anchor="start" x="739.5" y="-534.3" font-family="Times,serif" font-size="14.00">__init__(product, spatial_extent, date_range, start_time, end_time, version, cycles, tracks, files)</text>
+<text text-anchor="start" x="739.5" y="-519.3" font-family="Times,serif" font-size="14.00">__str__()</text>
+<text text-anchor="start" x="739.5" y="-504.3" font-family="Times,serif" font-size="14.00">avail_granules(ids, cycles, tracks, s3urls)</text>
+<text text-anchor="start" x="739.5" y="-489.3" font-family="Times,serif" font-size="14.00">download_granules(path, verbose, subset, restart)</text>
+<text text-anchor="start" x="739.5" y="-474.3" font-family="Times,serif" font-size="14.00">earthdata_login(uid, email, s3token)</text>
+<text text-anchor="start" x="739.5" y="-459.3" font-family="Times,serif" font-size="14.00">latest_version()</text>
+<text text-anchor="start" x="739.5" y="-444.3" font-family="Times,serif" font-size="14.00">order_granules(verbose, subset, email)</text>
+<text text-anchor="start" x="739.5" y="-429.3" font-family="Times,serif" font-size="14.00">product_all_info()</text>
+<text text-anchor="start" x="739.5" y="-414.3" font-family="Times,serif" font-size="14.00">product_summary_info()</text>
+<text text-anchor="start" x="739.5" y="-399.3" font-family="Times,serif" font-size="14.00">show_custom_options(dictview)</text>
+<text text-anchor="start" x="739.5" y="-384.3" font-family="Times,serif" font-size="14.00">subsetparams()</text>
+<text text-anchor="start" x="739.5" y="-369.3" font-family="Times,serif" font-size="14.00">visualize_elevation()</text>
+<text text-anchor="start" x="739.5" y="-354.3" font-family="Times,serif" font-size="14.00">visualize_spatial_extent()</text>
 </g>
 <!-- icepyx.core.granules.Granules&#45;&gt;icepyx.core.query.Query -->
 <g id="edge7" class="edge">
 <title>icepyx.core.granules.Granules&#45;&gt;icepyx.core.query.Query</title>
-<path fill="none" stroke="black" d="M674.83,-212.2C684.13,-247.97 698.82,-292.43 720.5,-328 722.88,-331.9 725.31,-335.8 727.78,-339.68"/>
-<polygon fill="black" stroke="black" points="727.9,-339.86 734.52,-342.72 734.43,-349.93 727.81,-347.07 727.9,-339.86"/>
-<text text-anchor="middle" x="755.5" y="-316.8" font-family="Times,serif" font-size="14.00" fill="green">_granules</text>
+<path fill="none" stroke="black" d="M675.32,-212.15C684.86,-247.91 699.79,-292.36 721.5,-328 723.14,-330.7 724.81,-333.39 726.5,-336.08"/>
+<polygon fill="black" stroke="black" points="726.66,-336.33 733.26,-339.22 733.12,-346.43 726.52,-343.54 726.66,-336.33"/>
+<text text-anchor="middle" x="756.5" y="-316.8" font-family="Times,serif" font-size="14.00" fill="green">_granules</text>
 </g>
 <!-- icepyx.core.granules.Granules&#45;&gt;icepyx.core.query.Query -->
 <g id="edge8" class="edge">
 <title>icepyx.core.granules.Granules&#45;&gt;icepyx.core.query.Query</title>
-<path fill="none" stroke="black" d="M711.63,-212C735.6,-242.47 764.64,-279.55 790.5,-313 796.53,-320.8 802.62,-328.69 808.75,-336.65"/>
-<polygon fill="black" stroke="black" points="808.93,-336.88 815.76,-339.19 816.26,-346.39 809.43,-344.08 808.93,-336.88"/>
-<text text-anchor="middle" x="833.5" y="-316.8" font-family="Times,serif" font-size="14.00" fill="green">_granules</text>
+<path fill="none" stroke="black" d="M712.49,-212.27C736.62,-242.64 765.73,-279.55 791.5,-313 797.48,-320.77 803.53,-328.63 809.61,-336.56"/>
+<polygon fill="black" stroke="black" points="809.76,-336.75 816.58,-339.08 817.05,-346.28 810.23,-343.95 809.76,-336.75"/>
+<text text-anchor="middle" x="834.5" y="-316.8" font-family="Times,serif" font-size="14.00" fill="green">_granules</text>
 </g>
 <!-- icepyx.core.icesat2data.Icesat2Data -->
 <g id="node4" class="node">
 <title>icepyx.core.icesat2data.Icesat2Data</title>
-<polygon fill="none" stroke="black" points="1830.5,-112.5 1830.5,-182.5 1932.5,-182.5 1932.5,-112.5 1830.5,-112.5"/>
-<text text-anchor="middle" x="1881.5" y="-167.3" font-family="Times,serif" font-size="14.00">Icesat2Data</text>
-<polyline fill="none" stroke="black" points="1830.5,-159.5 1932.5,-159.5 "/>
-<polyline fill="none" stroke="black" points="1830.5,-135.5 1932.5,-135.5 "/>
-<text text-anchor="start" x="1838.5" y="-120.3" font-family="Times,serif" font-size="14.00">__init__()</text>
+<polygon fill="none" stroke="black" points="1833.5,-112.5 1833.5,-182.5 1935.5,-182.5 1935.5,-112.5 1833.5,-112.5"/>
+<text text-anchor="middle" x="1884.5" y="-167.3" font-family="Times,serif" font-size="14.00">Icesat2Data</text>
+<polyline fill="none" stroke="black" points="1833.5,-159.5 1935.5,-159.5 "/>
+<polyline fill="none" stroke="black" points="1833.5,-135.5 1935.5,-135.5 "/>
+<text text-anchor="start" x="1841.5" y="-120.3" font-family="Times,serif" font-size="14.00">__init__()</text>
 </g>
 <!-- icepyx.core.exceptions.NsidcQueryError -->
 <g id="node5" class="node">
 <title>icepyx.core.exceptions.NsidcQueryError</title>
-<polygon fill="none" stroke="black" points="1956,-98 1956,-197 2145,-197 2145,-98 1956,-98"/>
-<text text-anchor="middle" x="2050.5" y="-181.8" font-family="Times,serif" font-size="14.00" fill="red">NsidcQueryError</text>
-<polyline fill="none" stroke="black" points="1956,-174 2145,-174 "/>
-<text text-anchor="start" x="1964" y="-158.8" font-family="Times,serif" font-size="14.00" fill="red">errmsg</text>
-<text text-anchor="start" x="1964" y="-143.8" font-family="Times,serif" font-size="14.00" fill="red">msgtxt : str</text>
-<polyline fill="none" stroke="black" points="1956,-136 2145,-136 "/>
-<text text-anchor="start" x="1964" y="-120.8" font-family="Times,serif" font-size="14.00" fill="red">__init__(errmsg, msgtxt)</text>
-<text text-anchor="start" x="1964" y="-105.8" font-family="Times,serif" font-size="14.00" fill="red">__str__()</text>
+<polygon fill="none" stroke="black" points="1959,-98 1959,-197 2148,-197 2148,-98 1959,-98"/>
+<text text-anchor="middle" x="2053.5" y="-181.8" font-family="Times,serif" font-size="14.00" fill="red">NsidcQueryError</text>
+<polyline fill="none" stroke="black" points="1959,-174 2148,-174 "/>
+<text text-anchor="start" x="1967" y="-158.8" font-family="Times,serif" font-size="14.00" fill="red">errmsg</text>
+<text text-anchor="start" x="1967" y="-143.8" font-family="Times,serif" font-size="14.00" fill="red">msgtxt : str</text>
+<polyline fill="none" stroke="black" points="1959,-136 2148,-136 "/>
+<text text-anchor="start" x="1967" y="-120.8" font-family="Times,serif" font-size="14.00" fill="red">__init__(errmsg, msgtxt)</text>
+<text text-anchor="start" x="1967" y="-105.8" font-family="Times,serif" font-size="14.00" fill="red">__str__()</text>
 </g>
 <!-- icepyx.core.exceptions.QueryError -->
 <g id="node8" class="node">
 <title>icepyx.core.exceptions.QueryError</title>
-<polygon fill="none" stroke="black" points="2001,-661 2001,-731 2100,-731 2100,-661 2001,-661"/>
-<text text-anchor="middle" x="2050.5" y="-715.8" font-family="Times,serif" font-size="14.00" fill="red">QueryError</text>
-<polyline fill="none" stroke="black" points="2001,-708 2100,-708 "/>
-<polyline fill="none" stroke="black" points="2001,-684 2100,-684 "/>
-<text text-anchor="middle" x="2050.5" y="-668.8" font-family="Times,serif" font-size="14.00" fill="red"> </text>
+<polygon fill="none" stroke="black" points="2004,-661 2004,-731 2103,-731 2103,-661 2004,-661"/>
+<text text-anchor="middle" x="2053.5" y="-715.8" font-family="Times,serif" font-size="14.00" fill="red">QueryError</text>
+<polyline fill="none" stroke="black" points="2004,-708 2103,-708 "/>
+<polyline fill="none" stroke="black" points="2004,-684 2103,-684 "/>
+<text text-anchor="middle" x="2053.5" y="-668.8" font-family="Times,serif" font-size="14.00" fill="red"> </text>
 </g>
 <!-- icepyx.core.exceptions.NsidcQueryError&#45;&gt;icepyx.core.exceptions.QueryError -->
 <g id="edge1" class="edge">
 <title>icepyx.core.exceptions.NsidcQueryError&#45;&gt;icepyx.core.exceptions.QueryError</title>
-<path fill="none" stroke="black" d="M2050.5,-197.48C2050.5,-302.87 2050.5,-549.76 2050.5,-650.76"/>
-<polygon fill="none" stroke="black" points="2047,-650.92 2050.5,-660.92 2054,-650.92 2047,-650.92"/>
+<path fill="none" stroke="black" d="M2053.5,-197.48C2053.5,-302.87 2053.5,-549.76 2053.5,-650.76"/>
+<polygon fill="none" stroke="black" points="2050,-650.92 2053.5,-660.92 2057,-650.92 2050,-650.92"/>
 </g>
 <!-- icepyx.core.APIformatting.Parameters -->
 <g id="node6" class="node">
@@ -163,9 +163,9 @@
 <polygon fill="none" stroke="black" points="1022,-38 1022,-257 1275,-257 1275,-38 1022,-38"/>
 <text text-anchor="middle" x="1148.5" y="-241.8" font-family="Times,serif" font-size="14.00">Parameters</text>
 <polyline fill="none" stroke="black" points="1022,-234 1275,-234 "/>
-<text text-anchor="start" x="1030" y="-218.8" font-family="Times,serif" font-size="14.00">_fmted_keys : NoneType, dict</text>
+<text text-anchor="start" x="1030" y="-218.8" font-family="Times,serif" font-size="14.00">_fmted_keys : dict, NoneType</text>
 <text text-anchor="start" x="1030" y="-203.8" font-family="Times,serif" font-size="14.00">_poss_keys : dict</text>
-<text text-anchor="start" x="1030" y="-188.8" font-family="Times,serif" font-size="14.00">_reqtype : str, NoneType</text>
+<text text-anchor="start" x="1030" y="-188.8" font-family="Times,serif" font-size="14.00">_reqtype : NoneType, str</text>
 <text text-anchor="start" x="1030" y="-173.8" font-family="Times,serif" font-size="14.00">fmted_keys</text>
 <text text-anchor="start" x="1030" y="-158.8" font-family="Times,serif" font-size="14.00">partype</text>
 <text text-anchor="start" x="1030" y="-143.8" font-family="Times,serif" font-size="14.00">poss_keys</text>
@@ -180,136 +180,137 @@
 <!-- icepyx.core.APIformatting.Parameters&#45;&gt;icepyx.core.query.Query -->
 <g id="edge3" class="edge">
 <title>icepyx.core.APIformatting.Parameters&#45;&gt;icepyx.core.query.Query</title>
-<path fill="none" stroke="black" d="M1063.82,-257.34C1048.44,-271.81 1031.51,-285.05 1013.5,-295 975.12,-316.21 944.53,-279.29 916.5,-313 910.54,-320.17 905.59,-327.6 901.58,-335.22"/>
-<polygon fill="black" stroke="black" points="901.57,-335.25 902.68,-342.37 896.53,-346.14 895.42,-339.01 901.57,-335.25"/>
+<path fill="none" stroke="black" d="M1063.82,-257.34C1048.44,-271.81 1031.51,-285.05 1013.5,-295 975.12,-316.21 944.53,-279.29 916.5,-313 910.55,-320.15 905.62,-327.56 901.61,-335.17"/>
+<polygon fill="black" stroke="black" points="901.6,-335.18 902.7,-342.3 896.54,-346.06 895.45,-338.93 901.6,-335.18"/>
 <text text-anchor="middle" x="964.5" y="-316.8" font-family="Times,serif" font-size="14.00" fill="green">_CMRparams</text>
 </g>
 <!-- icepyx.core.APIformatting.Parameters&#45;&gt;icepyx.core.query.Query -->
 <g id="edge4" class="edge">
 <title>icepyx.core.APIformatting.Parameters&#45;&gt;icepyx.core.query.Query</title>
-<path fill="none" stroke="black" d="M1060.96,-257.05C1050.52,-274.87 1041.46,-293.81 1035.5,-313 1033.32,-320.03 1031.41,-327.17 1029.75,-334.38"/>
-<polygon fill="black" stroke="black" points="1029.74,-334.44 1032.42,-341.14 1027.28,-346.19 1024.59,-339.5 1029.74,-334.44"/>
+<path fill="none" stroke="black" d="M1060.96,-257.05C1050.52,-274.87 1041.46,-293.81 1035.5,-313 1033.32,-320.03 1031.41,-327.16 1029.74,-334.37"/>
+<polygon fill="black" stroke="black" points="1029.73,-334.42 1032.4,-341.12 1027.25,-346.16 1024.58,-339.47 1029.73,-334.42"/>
 <text text-anchor="middle" x="1078" y="-316.8" font-family="Times,serif" font-size="14.00" fill="green">_reqparams</text>
 </g>
 <!-- icepyx.core.APIformatting.Parameters&#45;&gt;icepyx.core.query.Query -->
 <g id="edge5" class="edge">
 <title>icepyx.core.APIformatting.Parameters&#45;&gt;icepyx.core.query.Query</title>
-<path fill="none" stroke="black" d="M1135.18,-257.12C1132.3,-280.65 1129.1,-306.76 1125.73,-334.2"/>
-<polygon fill="black" stroke="black" points="1125.69,-334.55 1128.93,-340.99 1124.23,-346.46 1120.99,-340.02 1125.69,-334.55"/>
-<text text-anchor="middle" x="1181.5" y="-316.8" font-family="Times,serif" font-size="14.00" fill="green">_subsetparams</text>
+<path fill="none" stroke="black" d="M1134.59,-257.12C1131.57,-280.65 1128.23,-306.76 1124.71,-334.2"/>
+<polygon fill="black" stroke="black" points="1124.67,-334.56 1127.87,-341.02 1123.14,-346.46 1119.94,-340 1124.67,-334.56"/>
+<text text-anchor="middle" x="1180.5" y="-316.8" font-family="Times,serif" font-size="14.00" fill="green">_subsetparams</text>
 </g>
 <!-- icepyx.core.APIformatting.Parameters&#45;&gt;icepyx.core.query.Query -->
 <g id="edge6" class="edge">
 <title>icepyx.core.APIformatting.Parameters&#45;&gt;icepyx.core.query.Query</title>
-<path fill="none" stroke="black" d="M1230.63,-257.14C1239.87,-279.77 1244.44,-304.05 1239.5,-328 1239.05,-330.2 1238.58,-332.4 1238.1,-334.6"/>
-<polygon fill="black" stroke="black" points="1238.08,-334.71 1240.64,-341.45 1235.41,-346.41 1232.84,-339.67 1238.08,-334.71"/>
-<text text-anchor="middle" x="1295.5" y="-316.8" font-family="Times,serif" font-size="14.00" fill="green">_subsetparams</text>
+<path fill="none" stroke="black" d="M1229.88,-257.24C1239.01,-279.87 1243.48,-304.11 1238.5,-328 1238.04,-330.2 1237.57,-332.4 1237.08,-334.61"/>
+<polygon fill="black" stroke="black" points="1237.05,-334.74 1239.6,-341.48 1234.35,-346.43 1231.81,-339.69 1237.05,-334.74"/>
+<text text-anchor="middle" x="1294.5" y="-316.8" font-family="Times,serif" font-size="14.00" fill="green">_subsetparams</text>
 </g>
 <!-- icepyx.core.query.Query&#45;&gt;icepyx.core.query.GenQuery -->
 <g id="edge2" class="edge">
 <title>icepyx.core.query.Query&#45;&gt;icepyx.core.query.GenQuery</title>
-<path fill="none" stroke="black" d="M1081.5,-1045.79C1081.5,-1055.29 1081.5,-1064.4 1081.5,-1073.03"/>
-<polygon fill="none" stroke="black" points="1078,-1073.31 1081.5,-1083.31 1085,-1073.31 1078,-1073.31"/>
+<path fill="none" stroke="black" d="M1078.5,-1045.79C1078.5,-1055.29 1078.5,-1064.4 1078.5,-1073.03"/>
+<polygon fill="none" stroke="black" points="1075,-1073.31 1078.5,-1083.31 1082,-1073.31 1075,-1073.31"/>
 </g>
 <!-- icepyx.core.read.Read -->
 <g id="node9" class="node">
 <title>icepyx.core.read.Read</title>
-<polygon fill="none" stroke="black" points="1447,-541.5 1447,-850.5 1980,-850.5 1980,-541.5 1447,-541.5"/>
-<text text-anchor="middle" x="1713.5" y="-835.3" font-family="Times,serif" font-size="14.00">Read</text>
-<polyline fill="none" stroke="black" points="1447,-827.5 1980,-827.5 "/>
-<text text-anchor="start" x="1455" y="-812.3" font-family="Times,serif" font-size="14.00">_catalog_path : NoneType</text>
-<text text-anchor="start" x="1455" y="-797.3" font-family="Times,serif" font-size="14.00">_filelist : list, NoneType</text>
-<text text-anchor="start" x="1455" y="-782.3" font-family="Times,serif" font-size="14.00">_is2catalog : Catalog</text>
-<text text-anchor="start" x="1455" y="-767.3" font-family="Times,serif" font-size="14.00">_out_obj : Dataset</text>
-<text text-anchor="start" x="1455" y="-752.3" font-family="Times,serif" font-size="14.00">_pattern : str</text>
-<text text-anchor="start" x="1455" y="-737.3" font-family="Times,serif" font-size="14.00">_prod : str, NoneType</text>
-<text text-anchor="start" x="1455" y="-722.3" font-family="Times,serif" font-size="14.00">_read_vars</text>
-<text text-anchor="start" x="1455" y="-707.3" font-family="Times,serif" font-size="14.00">_source_type : str</text>
-<text text-anchor="start" x="1455" y="-692.3" font-family="Times,serif" font-size="14.00">data_source : NoneType</text>
-<text text-anchor="start" x="1455" y="-677.3" font-family="Times,serif" font-size="14.00">is2catalog</text>
-<text text-anchor="start" x="1455" y="-662.3" font-family="Times,serif" font-size="14.00">vars</text>
-<polyline fill="none" stroke="black" points="1447,-654.5 1980,-654.5 "/>
-<text text-anchor="start" x="1455" y="-639.3" font-family="Times,serif" font-size="14.00">__init__(data_source, product, filename_pattern, catalog, out_obj_type)</text>
-<text text-anchor="start" x="1455" y="-624.3" font-family="Times,serif" font-size="14.00">_add_var_to_ds(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict)</text>
-<text text-anchor="start" x="1455" y="-609.3" font-family="Times,serif" font-size="14.00">_build_dataset_template(file)</text>
-<text text-anchor="start" x="1455" y="-594.3" font-family="Times,serif" font-size="14.00">_build_single_file_dataset(file, groups_list)</text>
-<text text-anchor="start" x="1455" y="-579.3" font-family="Times,serif" font-size="14.00">_check_source_for_pattern(source, filename_pattern)</text>
-<text text-anchor="start" x="1455" y="-564.3" font-family="Times,serif" font-size="14.00">_read_single_var(file, grp_path)</text>
-<text text-anchor="start" x="1455" y="-549.3" font-family="Times,serif" font-size="14.00">load()</text>
+<polygon fill="none" stroke="black" points="1443.5,-534 1443.5,-858 1983.5,-858 1983.5,-534 1443.5,-534"/>
+<text text-anchor="middle" x="1713.5" y="-842.8" font-family="Times,serif" font-size="14.00">Read</text>
+<polyline fill="none" stroke="black" points="1443.5,-835 1983.5,-835 "/>
+<text text-anchor="start" x="1451.5" y="-819.8" font-family="Times,serif" font-size="14.00">_catalog_path : NoneType</text>
+<text text-anchor="start" x="1451.5" y="-804.8" font-family="Times,serif" font-size="14.00">_filelist : NoneType, list</text>
+<text text-anchor="start" x="1451.5" y="-789.8" font-family="Times,serif" font-size="14.00">_is2catalog : Catalog</text>
+<text text-anchor="start" x="1451.5" y="-774.8" font-family="Times,serif" font-size="14.00">_out_obj : Dataset</text>
+<text text-anchor="start" x="1451.5" y="-759.8" font-family="Times,serif" font-size="14.00">_pattern : str</text>
+<text text-anchor="start" x="1451.5" y="-744.8" font-family="Times,serif" font-size="14.00">_prod : str, NoneType</text>
+<text text-anchor="start" x="1451.5" y="-729.8" font-family="Times,serif" font-size="14.00">_read_vars</text>
+<text text-anchor="start" x="1451.5" y="-714.8" font-family="Times,serif" font-size="14.00">_source_type : str</text>
+<text text-anchor="start" x="1451.5" y="-699.8" font-family="Times,serif" font-size="14.00">data_source : NoneType</text>
+<text text-anchor="start" x="1451.5" y="-684.8" font-family="Times,serif" font-size="14.00">is2catalog</text>
+<text text-anchor="start" x="1451.5" y="-669.8" font-family="Times,serif" font-size="14.00">vars</text>
+<polyline fill="none" stroke="black" points="1443.5,-662 1983.5,-662 "/>
+<text text-anchor="start" x="1451.5" y="-646.8" font-family="Times,serif" font-size="14.00">__init__(data_source, product, filename_pattern, catalog, out_obj_type)</text>
+<text text-anchor="start" x="1451.5" y="-631.8" font-family="Times,serif" font-size="14.00">_add_vars_to_ds(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict)</text>
+<text text-anchor="start" x="1451.5" y="-616.8" font-family="Times,serif" font-size="14.00">_build_dataset_template(file)</text>
+<text text-anchor="start" x="1451.5" y="-601.8" font-family="Times,serif" font-size="14.00">_build_single_file_dataset(file, groups_list)</text>
+<text text-anchor="start" x="1451.5" y="-586.8" font-family="Times,serif" font-size="14.00">_check_source_for_pattern(source, filename_pattern)</text>
+<text text-anchor="start" x="1451.5" y="-571.8" font-family="Times,serif" font-size="14.00">_combine_nested_vars(is2ds, ds, grp_path, wanted_dict)</text>
+<text text-anchor="start" x="1451.5" y="-556.8" font-family="Times,serif" font-size="14.00">_read_single_grp(file, grp_path)</text>
+<text text-anchor="start" x="1451.5" y="-541.8" font-family="Times,serif" font-size="14.00">load()</text>
 </g>
 <!-- icepyx.core.variables.Variables -->
 <g id="node10" class="node">
 <title>icepyx.core.variables.Variables</title>
-<polygon fill="none" stroke="black" points="1313,-0.5 1313,-294.5 1802,-294.5 1802,-0.5 1313,-0.5"/>
-<text text-anchor="middle" x="1557.5" y="-279.3" font-family="Times,serif" font-size="14.00">Variables</text>
-<polyline fill="none" stroke="black" points="1313,-271.5 1802,-271.5 "/>
-<text text-anchor="start" x="1321" y="-256.3" font-family="Times,serif" font-size="14.00">_avail : NoneType, list</text>
-<text text-anchor="start" x="1321" y="-241.3" font-family="Times,serif" font-size="14.00">_session : NoneType</text>
-<text text-anchor="start" x="1321" y="-226.3" font-family="Times,serif" font-size="14.00">_vartype</text>
-<text text-anchor="start" x="1321" y="-211.3" font-family="Times,serif" font-size="14.00">_version : NoneType</text>
-<text text-anchor="start" x="1321" y="-196.3" font-family="Times,serif" font-size="14.00">path : NoneType</text>
-<text text-anchor="start" x="1321" y="-181.3" font-family="Times,serif" font-size="14.00">product : NoneType</text>
-<text text-anchor="start" x="1321" y="-166.3" font-family="Times,serif" font-size="14.00">wanted : NoneType, dict</text>
-<polyline fill="none" stroke="black" points="1313,-158.5 1802,-158.5 "/>
-<text text-anchor="start" x="1321" y="-143.3" font-family="Times,serif" font-size="14.00">__init__(vartype, avail, wanted, session, product, version, path)</text>
-<text text-anchor="start" x="1321" y="-128.3" font-family="Times,serif" font-size="14.00">_check_valid_lists(vgrp, allpaths, var_list, beam_list, keyword_list)</text>
-<text text-anchor="start" x="1321" y="-113.3" font-family="Times,serif" font-size="14.00">_get_combined_list(beam_list, keyword_list)</text>
-<text text-anchor="start" x="1321" y="-98.3" font-family="Times,serif" font-size="14.00">_get_sum_varlist(var_list, all_vars, defaults)</text>
-<text text-anchor="start" x="1321" y="-83.3" font-family="Times,serif" font-size="14.00">_iter_paths(sum_varlist, req_vars, vgrp, beam_list, keyword_list)</text>
-<text text-anchor="start" x="1321" y="-68.3" font-family="Times,serif" font-size="14.00">_iter_vars(sum_varlist, req_vars, vgrp)</text>
-<text text-anchor="start" x="1321" y="-53.3" font-family="Times,serif" font-size="14.00">append(defaults, var_list, beam_list, keyword_list)</text>
-<text text-anchor="start" x="1321" y="-38.3" font-family="Times,serif" font-size="14.00">avail(options, internal)</text>
-<text text-anchor="start" x="1321" y="-23.3" font-family="Times,serif" font-size="14.00">parse_var_list(varlist, tiered)</text>
-<text text-anchor="start" x="1321" y="-8.3" font-family="Times,serif" font-size="14.00">remove(all, var_list, beam_list, keyword_list)</text>
+<polygon fill="none" stroke="black" points="1315,-0.5 1315,-294.5 1804,-294.5 1804,-0.5 1315,-0.5"/>
+<text text-anchor="middle" x="1559.5" y="-279.3" font-family="Times,serif" font-size="14.00">Variables</text>
+<polyline fill="none" stroke="black" points="1315,-271.5 1804,-271.5 "/>
+<text text-anchor="start" x="1323" y="-256.3" font-family="Times,serif" font-size="14.00">_avail : NoneType, list</text>
+<text text-anchor="start" x="1323" y="-241.3" font-family="Times,serif" font-size="14.00">_session : NoneType</text>
+<text text-anchor="start" x="1323" y="-226.3" font-family="Times,serif" font-size="14.00">_vartype</text>
+<text text-anchor="start" x="1323" y="-211.3" font-family="Times,serif" font-size="14.00">_version : NoneType</text>
+<text text-anchor="start" x="1323" y="-196.3" font-family="Times,serif" font-size="14.00">path : NoneType</text>
+<text text-anchor="start" x="1323" y="-181.3" font-family="Times,serif" font-size="14.00">product : NoneType</text>
+<text text-anchor="start" x="1323" y="-166.3" font-family="Times,serif" font-size="14.00">wanted : dict, NoneType</text>
+<polyline fill="none" stroke="black" points="1315,-158.5 1804,-158.5 "/>
+<text text-anchor="start" x="1323" y="-143.3" font-family="Times,serif" font-size="14.00">__init__(vartype, avail, wanted, session, product, version, path)</text>
+<text text-anchor="start" x="1323" y="-128.3" font-family="Times,serif" font-size="14.00">_check_valid_lists(vgrp, allpaths, var_list, beam_list, keyword_list)</text>
+<text text-anchor="start" x="1323" y="-113.3" font-family="Times,serif" font-size="14.00">_get_combined_list(beam_list, keyword_list)</text>
+<text text-anchor="start" x="1323" y="-98.3" font-family="Times,serif" font-size="14.00">_get_sum_varlist(var_list, all_vars, defaults)</text>
+<text text-anchor="start" x="1323" y="-83.3" font-family="Times,serif" font-size="14.00">_iter_paths(sum_varlist, req_vars, vgrp, beam_list, keyword_list)</text>
+<text text-anchor="start" x="1323" y="-68.3" font-family="Times,serif" font-size="14.00">_iter_vars(sum_varlist, req_vars, vgrp)</text>
+<text text-anchor="start" x="1323" y="-53.3" font-family="Times,serif" font-size="14.00">append(defaults, var_list, beam_list, keyword_list)</text>
+<text text-anchor="start" x="1323" y="-38.3" font-family="Times,serif" font-size="14.00">avail(options, internal)</text>
+<text text-anchor="start" x="1323" y="-23.3" font-family="Times,serif" font-size="14.00">parse_var_list(varlist, tiered, tiered_vars)</text>
+<text text-anchor="start" x="1323" y="-8.3" font-family="Times,serif" font-size="14.00">remove(all, var_list, beam_list, keyword_list)</text>
 </g>
 <!-- icepyx.core.variables.Variables&#45;&gt;icepyx.core.query.Query -->
 <g id="edge9" class="edge">
 <title>icepyx.core.variables.Variables&#45;&gt;icepyx.core.query.Query</title>
-<path fill="none" stroke="black" d="M1421.68,-294.85C1416.2,-300.96 1410.78,-307.03 1405.5,-313 1398.48,-320.92 1391.4,-328.96 1384.28,-337.07"/>
-<polygon fill="black" stroke="black" points="1384.13,-337.24 1383.18,-344.39 1376.22,-346.27 1377.17,-339.12 1384.13,-337.24"/>
+<path fill="none" stroke="black" d="M1421.89,-294.84C1416.33,-300.95 1410.85,-307.03 1405.5,-313 1398.28,-321.06 1391,-329.24 1383.67,-337.5"/>
+<polygon fill="black" stroke="black" points="1383.66,-337.51 1382.68,-344.65 1375.7,-346.49 1376.69,-339.35 1383.66,-337.51"/>
 <text text-anchor="middle" x="1447.5" y="-316.8" font-family="Times,serif" font-size="14.00" fill="green">_order_vars</text>
 </g>
 <!-- icepyx.core.variables.Variables&#45;&gt;icepyx.core.query.Query -->
 <g id="edge10" class="edge">
 <title>icepyx.core.variables.Variables&#45;&gt;icepyx.core.query.Query</title>
-<path fill="none" stroke="black" d="M1515.44,-294.65C1508.11,-306.86 1499.54,-318.25 1489.5,-328 1474.22,-342.85 1461.88,-335.72 1439.38,-345.81"/>
-<polygon fill="black" stroke="black" points="1439.26,-345.88 1435.86,-352.24 1428.69,-351.55 1432.08,-345.19 1439.26,-345.88"/>
+<path fill="none" stroke="black" d="M1515.93,-294.85C1508.45,-307.03 1499.71,-318.35 1489.5,-328 1473.15,-343.47 1460.13,-335.42 1436.47,-345.79"/>
+<polygon fill="black" stroke="black" points="1436.26,-345.9 1432.81,-352.23 1425.64,-351.48 1429.09,-345.15 1436.26,-345.9"/>
 <text text-anchor="middle" x="1542.5" y="-316.8" font-family="Times,serif" font-size="14.00" fill="green">_order_vars</text>
 </g>
 <!-- icepyx.core.variables.Variables&#45;&gt;icepyx.core.query.Query -->
 <g id="edge11" class="edge">
 <title>icepyx.core.variables.Variables&#45;&gt;icepyx.core.query.Query</title>
-<path fill="none" stroke="black" d="M1604.67,-294.71C1602.29,-306.84 1597.21,-318.19 1588.5,-328 1567.99,-351.1 1484.27,-334.8 1440.08,-346.28"/>
-<polygon fill="black" stroke="black" points="1440.05,-346.29 1435.74,-352.07 1428.75,-350.31 1433.06,-344.53 1440.05,-346.29"/>
+<path fill="none" stroke="black" d="M1604.8,-294.77C1602.33,-306.88 1597.21,-318.21 1588.5,-328 1567.56,-351.53 1482.21,-334.67 1437.26,-346.23"/>
+<polygon fill="black" stroke="black" points="1437.06,-346.3 1432.73,-352.07 1425.74,-350.3 1430.07,-344.53 1437.06,-346.3"/>
 <text text-anchor="middle" x="1631" y="-316.8" font-family="Times,serif" font-size="14.00" fill="green">_file_vars</text>
 </g>
 <!-- icepyx.core.variables.Variables&#45;&gt;icepyx.core.read.Read -->
 <g id="edge12" class="edge">
 <title>icepyx.core.variables.Variables&#45;&gt;icepyx.core.read.Read</title>
-<path fill="none" stroke="black" d="M1681.46,-294.74C1684.42,-300.76 1687.12,-306.85 1689.5,-313 1715.7,-380.66 1723.92,-460.1 1724.73,-528.96"/>
-<polygon fill="black" stroke="black" points="1724.73,-529.25 1728.76,-535.23 1724.79,-541.25 1720.76,-535.27 1724.73,-529.25"/>
+<path fill="none" stroke="black" d="M1681.59,-294.81C1684.5,-300.81 1687.16,-306.88 1689.5,-313 1714.5,-378.28 1723.01,-454.42 1724.36,-521.47"/>
+<polygon fill="black" stroke="black" points="1724.36,-521.88 1728.44,-527.82 1724.53,-533.88 1720.44,-527.93 1724.36,-521.88"/>
 <text text-anchor="middle" x="1732" y="-316.8" font-family="Times,serif" font-size="14.00" fill="green">_read_vars</text>
 </g>
 <!-- icepyx.core.visualization.Visualize -->
 <g id="node11" class="node">
 <title>icepyx.core.visualization.Visualize</title>
-<polygon fill="none" stroke="black" points="2163.5,-30.5 2163.5,-264.5 2679.5,-264.5 2679.5,-30.5 2163.5,-30.5"/>
-<text text-anchor="middle" x="2421.5" y="-249.3" font-family="Times,serif" font-size="14.00">Visualize</text>
-<polyline fill="none" stroke="black" points="2163.5,-241.5 2679.5,-241.5 "/>
-<text text-anchor="start" x="2171.5" y="-226.3" font-family="Times,serif" font-size="14.00">bbox : list</text>
-<text text-anchor="start" x="2171.5" y="-211.3" font-family="Times,serif" font-size="14.00">cycles : NoneType</text>
-<text text-anchor="start" x="2171.5" y="-196.3" font-family="Times,serif" font-size="14.00">date_range : NoneType</text>
-<text text-anchor="start" x="2171.5" y="-181.3" font-family="Times,serif" font-size="14.00">product : NoneType, str</text>
-<text text-anchor="start" x="2171.5" y="-166.3" font-family="Times,serif" font-size="14.00">tracks : NoneType</text>
-<polyline fill="none" stroke="black" points="2163.5,-158.5 2679.5,-158.5 "/>
-<text text-anchor="start" x="2171.5" y="-143.3" font-family="Times,serif" font-size="14.00">__init__(query_obj, product, spatial_extent, date_range, cycles, tracks)</text>
-<text text-anchor="start" x="2171.5" y="-128.3" font-family="Times,serif" font-size="14.00">generate_OA_parameters(): list</text>
-<text text-anchor="start" x="2171.5" y="-113.3" font-family="Times,serif" font-size="14.00">grid_bbox(binsize): list</text>
-<text text-anchor="start" x="2171.5" y="-98.3" font-family="Times,serif" font-size="14.00">make_request(base_url, payload)</text>
-<text text-anchor="start" x="2171.5" y="-83.3" font-family="Times,serif" font-size="14.00">parallel_request_OA(): </text>
-<text text-anchor="start" x="2171.5" y="-68.3" font-family="Times,serif" font-size="14.00">query_icesat2_filelist(): tuple</text>
-<text text-anchor="start" x="2171.5" y="-53.3" font-family="Times,serif" font-size="14.00">request_OA_data(paras): </text>
-<text text-anchor="start" x="2171.5" y="-38.3" font-family="Times,serif" font-size="14.00">viz_elevation(): </text>
+<polygon fill="none" stroke="black" points="2166.5,-30.5 2166.5,-264.5 2682.5,-264.5 2682.5,-30.5 2166.5,-30.5"/>
+<text text-anchor="middle" x="2424.5" y="-249.3" font-family="Times,serif" font-size="14.00">Visualize</text>
+<polyline fill="none" stroke="black" points="2166.5,-241.5 2682.5,-241.5 "/>
+<text text-anchor="start" x="2174.5" y="-226.3" font-family="Times,serif" font-size="14.00">bbox : list</text>
+<text text-anchor="start" x="2174.5" y="-211.3" font-family="Times,serif" font-size="14.00">cycles : NoneType</text>
+<text text-anchor="start" x="2174.5" y="-196.3" font-family="Times,serif" font-size="14.00">date_range : NoneType</text>
+<text text-anchor="start" x="2174.5" y="-181.3" font-family="Times,serif" font-size="14.00">product : str, NoneType</text>
+<text text-anchor="start" x="2174.5" y="-166.3" font-family="Times,serif" font-size="14.00">tracks : NoneType</text>
+<polyline fill="none" stroke="black" points="2166.5,-158.5 2682.5,-158.5 "/>
+<text text-anchor="start" x="2174.5" y="-143.3" font-family="Times,serif" font-size="14.00">__init__(query_obj, product, spatial_extent, date_range, cycles, tracks)</text>
+<text text-anchor="start" x="2174.5" y="-128.3" font-family="Times,serif" font-size="14.00">generate_OA_parameters(): list</text>
+<text text-anchor="start" x="2174.5" y="-113.3" font-family="Times,serif" font-size="14.00">grid_bbox(binsize): list</text>
+<text text-anchor="start" x="2174.5" y="-98.3" font-family="Times,serif" font-size="14.00">make_request(base_url, payload)</text>
+<text text-anchor="start" x="2174.5" y="-83.3" font-family="Times,serif" font-size="14.00">parallel_request_OA(): </text>
+<text text-anchor="start" x="2174.5" y="-68.3" font-family="Times,serif" font-size="14.00">query_icesat2_filelist(): tuple</text>
+<text text-anchor="start" x="2174.5" y="-53.3" font-family="Times,serif" font-size="14.00">request_OA_data(paras): </text>
+<text text-anchor="start" x="2174.5" y="-38.3" font-family="Times,serif" font-size="14.00">viz_elevation(): </text>
 </g>
 </g>
 </svg>
diff --git a/doc/source/user_guide/documentation/classes_user_uml.svg b/doc/source/user_guide/documentation/classes_user_uml.svg
index 6ad75574f..5901d2fb6 100644
--- a/doc/source/user_guide/documentation/classes_user_uml.svg
+++ b/doc/source/user_guide/documentation/classes_user_uml.svg
@@ -197,11 +197,11 @@
 <polyline fill="none" stroke="black" points="1195,-159 1570,-159 "/>
 <text text-anchor="start" x="1203" y="-143.8" font-family="Times,serif" font-size="14.00">path : NoneType</text>
 <text text-anchor="start" x="1203" y="-128.8" font-family="Times,serif" font-size="14.00">product : NoneType</text>
-<text text-anchor="start" x="1203" y="-113.8" font-family="Times,serif" font-size="14.00">wanted : NoneType, dict</text>
+<text text-anchor="start" x="1203" y="-113.8" font-family="Times,serif" font-size="14.00">wanted : dict, NoneType</text>
 <polyline fill="none" stroke="black" points="1195,-106 1570,-106 "/>
 <text text-anchor="start" x="1203" y="-90.8" font-family="Times,serif" font-size="14.00">append(defaults, var_list, beam_list, keyword_list)</text>
 <text text-anchor="start" x="1203" y="-75.8" font-family="Times,serif" font-size="14.00">avail(options, internal)</text>
-<text text-anchor="start" x="1203" y="-60.8" font-family="Times,serif" font-size="14.00">parse_var_list(varlist, tiered)</text>
+<text text-anchor="start" x="1203" y="-60.8" font-family="Times,serif" font-size="14.00">parse_var_list(varlist, tiered, tiered_vars)</text>
 <text text-anchor="start" x="1203" y="-45.8" font-family="Times,serif" font-size="14.00">remove(all, var_list, beam_list, keyword_list)</text>
 </g>
 <!-- icepyx.core.variables.Variables&#45;&gt;icepyx.core.query.Query -->
@@ -241,7 +241,7 @@
 <text text-anchor="start" x="1875" y="-181.3" font-family="Times,serif" font-size="14.00">bbox : list</text>
 <text text-anchor="start" x="1875" y="-166.3" font-family="Times,serif" font-size="14.00">cycles : NoneType</text>
 <text text-anchor="start" x="1875" y="-151.3" font-family="Times,serif" font-size="14.00">date_range : NoneType</text>
-<text text-anchor="start" x="1875" y="-136.3" font-family="Times,serif" font-size="14.00">product : NoneType, str</text>
+<text text-anchor="start" x="1875" y="-136.3" font-family="Times,serif" font-size="14.00">product : str, NoneType</text>
 <text text-anchor="start" x="1875" y="-121.3" font-family="Times,serif" font-size="14.00">tracks : NoneType</text>
 <polyline fill="none" stroke="black" points="1867,-113.5 2118,-113.5 "/>
 <text text-anchor="start" x="1875" y="-98.3" font-family="Times,serif" font-size="14.00">generate_OA_parameters(): list</text>