diff --git a/Vagrantfile b/Vagrantfile index aacc0310..33cfd808 100644 --- a/Vagrantfile +++ b/Vagrantfile @@ -6,8 +6,8 @@ VAGRANTFILE_API_VERSION = "2" Vagrant.configure(VAGRANTFILE_API_VERSION) do |config| - config.vm.box = "ubuntu/trusty64" - config.vm.box_url = "https://atlas.hashicorp.com/ubuntu/trusty64" + config.vm.box = "ubuntu/bionic64" + #config.vm.box_url = "https://atlas.hashicorp.com/ubuntu/bionic64" config.vm.define "nansat", primary: true do |nansat| end diff --git a/nansat/domain.py b/nansat/domain.py index b38ff0dd..5ac51b9c 100644 --- a/nansat/domain.py +++ b/nansat/domain.py @@ -526,9 +526,9 @@ def _create_extent_dict(extent_str): raise ValueError(' must contains exactly 2 parameters ' '("-te" or "-lle") and ("-ts" or "-tr")') key, extent_dict = Domain._add_to_dict(extent_dict, option) - if key is 'te' or key is 'lle': + if key == 'te' or key == 'lle': Domain._validate_te_lle(extent_dict[key]) - elif key is 'ts' or key is 'tr': + elif key == 'ts' or key == 'tr': Domain._validate_ts_tr(extent_dict[key]) return extent_dict diff --git a/nansat/exporter.py b/nansat/exporter.py index 56c30fab..53884972 100644 --- a/nansat/exporter.py +++ b/nansat/exporter.py @@ -15,9 +15,11 @@ from __future__ import print_function, absolute_import, division import os +import pytz import tempfile import datetime import warnings +import importlib from nansat.utils import gdal import numpy as np @@ -30,6 +32,11 @@ from nansat.exceptions import NansatGDALError +try: + import xarray as xr +except: + warnings.warn("'xarray' needs to be installed for Exporter.xr_export to work.") + class Exporter(object): """Abstract class for export functions """ @@ -40,7 +47,7 @@ class Exporter(object): '_FillValue', 'type', 'scale', 'offset', 'NETCDF_VARNAME'] def export(self, filename='', bands=None, rm_metadata=None, add_geolocation=True, - driver='netCDF', options='FORMAT=NC4', hardcopy=False): + driver='netCDF', options='FORMAT=NC4', hardcopy=False, add_gcps=True): """Export Nansat object into netCDF or GTiff file Parameters @@ -106,10 +113,11 @@ def export(self, filename='', bands=None, rm_metadata=None, add_geolocation=True if self.filename == filename or hardcopy: export_vrt.hardcopy_bands() - if driver == 'GTiff': - add_gcps = export_vrt.prepare_export_gtiff() - else: - add_gcps = export_vrt.prepare_export_netcdf() + if add_gcps: + if driver == 'GTiff': + add_gcps = export_vrt.prepare_export_gtiff() + else: + add_gcps = export_vrt.prepare_export_netcdf() # Create output file using GDAL dataset = gdal.GetDriverByName(driver).CreateCopy(filename, export_vrt.dataset, @@ -123,14 +131,22 @@ def export(self, filename='', bands=None, rm_metadata=None, add_geolocation=True # Rename variable names to get rid of the band numbers self.rename_variables(filename) # Rename attributes to get rid of "GDAL_" added by gdal - self.rename_attributes(filename) + try: + history = self.vrt.dataset.GetMetadata()['history'] + except KeyError: + history = None + self.correct_attributes(filename, history=history) self.logger.debug('Export - OK!') @staticmethod - def rename_attributes(filename): + def correct_attributes(filename, history=None): """ Rename global attributes to get rid of the "GDAL_"-string - added by gdal. + added by gdal, remove attributes added by gdal that are + already present in the Nansat object, and correct the history + attribute (the latter may be reduced in length because + gdal.GetDriverByName(driver).CreateCopy limits the string + lenght to 161 characters). """ GDAL = "GDAL_" del_attrs = [] @@ -138,10 +154,10 @@ def rename_attributes(filename): # Open new file to edit attribute names with Dataset(filename, 'r+') as ds: """ The netcdf driver adds the Conventions attribute with - value CF-1.5. This may be wrong, so it is better to use the - Conventions metadata from the Nansat object. Other attributes - added by gdal that are already present in Nansat, should also - be deleted.""" + value CF-1.5. This in most cases wrong, so it is better to use + the Conventions metadata from the Nansat object. Other + attributes added by gdal that are already present in Nansat, + should also be deleted.""" for attr in ds.ncattrs(): if GDAL in attr: if attr.replace(GDAL, "") in ds.ncattrs(): @@ -157,6 +173,9 @@ def rename_attributes(filename): # Rename attributes: for attr in rename_attrs: ds.renameAttribute(attr, attr.replace(GDAL, "")) + # Correct the history + if history is not None: + ds.history = history @staticmethod def rename_variables(filename): @@ -354,7 +373,7 @@ def _create_dimensions(self, nc_inp, nc_out, time): nc_out.createDimension(dim_name, dim_shapes[dim_name]) # create value for time variable - td = time - datetime.datetime(1900, 1, 1) + td = time - datetime.datetime(1900, 1, 1).replace(tzinfo=pytz.timezone("utc")) days = td.days + (float(td.seconds) / 60.0 / 60.0 / 24.0) # add time dimension nc_out.createDimension('time', 1) @@ -418,8 +437,8 @@ def _post_proc_thredds(self, fill_value = None if '_FillValue' in inp_var.ncattrs(): fill_value = inp_var._FillValue - if '_FillValue' in band_metadata[inp_var_name]: - fill_value = band_metadata['_FillValue'] + elif '_FillValue' in band_metadata[inp_var_name]: + fill_value = band_metadata[inp_var_name]['_FillValue'] dimensions = ('time', ) + inp_var.dimensions out_var = Exporter._copy_nc_var(inp_var, nc_out, inp_var_name, inp_var.dtype, dimensions, fill_value=fill_value, zlib=zlib) diff --git a/nansat/mappers/mapper_arome.py b/nansat/mappers/mapper_arome.py index e3a4569f..e02cc728 100644 --- a/nansat/mappers/mapper_arome.py +++ b/nansat/mappers/mapper_arome.py @@ -8,23 +8,21 @@ class Mapper(NetcdfCF): - def __init__(self, *args, **kwargs): + def __init__(self, filename, gdal_dataset, gdal_metadata, *args, **kwargs): - mm = args[2] # metadata - if not mm: + if not gdal_metadata: raise WrongMapperError - if 'NC_GLOBAL#source' not in list(mm.keys()): + if 'source' not in list(gdal_metadata.keys()): raise WrongMapperError - if not 'arome' in mm['NC_GLOBAL#source'].lower() and \ - not 'meps' in mm['NC_GLOBAL#source'].lower(): + if not 'arome' in gdal_metadata['title'].lower(): raise WrongMapperError - super(Mapper, self).__init__(*args, **kwargs) + super(Mapper, self).__init__(filename, gdal_dataset, gdal_metadata, *args, **kwargs) - self.dataset.SetMetadataItem('time_coverage_start', - (parse(mm['NC_GLOBAL#min_time'], ignoretz=True, fuzzy=True).isoformat())) - self.dataset.SetMetadataItem('time_coverage_end', - (parse(mm['NC_GLOBAL#max_time'], ignoretz=True, fuzzy=True).isoformat())) + #self.dataset.SetMetadataItem('time_coverage_start', parse( + # gdal_metadata['NC_GLOBAL#min_time'], ignoretz=True, fuzzy=True).isoformat()) + #self.dataset.SetMetadataItem('time_coverage_end', parse( + # gdal_metadata['NC_GLOBAL#max_time'], ignoretz=True, fuzzy=True).isoformat())) # Get dictionary describing the instrument and platform according to # the GCMD keywords diff --git a/nansat/mappers/mapper_asar.py b/nansat/mappers/mapper_asar.py index 64aad25b..a33d2802 100644 --- a/nansat/mappers/mapper_asar.py +++ b/nansat/mappers/mapper_asar.py @@ -18,6 +18,7 @@ import pythesint as pti +from nansat.nsr import NSR from nansat.vrt import VRT from nansat.mappers.envisat import Envisat from nansat.domain import Domain @@ -33,7 +34,7 @@ class Mapper(VRT, Envisat): http://envisat.esa.int/handbooks/asar/CNTR6-6-9.htm#eph.asar.asardf.asarrec.ASAR_Geo_Grid_ADSR ''' - def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs): + def __init__(self, filename, gdalDataset, gdalMetadata, step=1, **kwargs): ''' Parameters @@ -234,7 +235,7 @@ def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs): # add bands with metadata and corresponding values to the empty VRT self.create_bands(metaDict) - # Add oribit and look information to metadata domain + # Add orbit and look information to metadata domain # ASAR is always right-looking self.dataset.SetMetadataItem('ANTENNA_POINTING', 'RIGHT') self.dataset.SetMetadataItem('ORBIT_DIRECTION', @@ -291,3 +292,9 @@ def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs): self.dataset.SetMetadataItem('instrument', json.dumps(mm)) self.dataset.SetMetadataItem('platform', json.dumps(ee)) + + self.add_geolocation_from_ads(gdalDataset) + + # Set projection + if self.dataset.GetProjection() == "": + self.dataset.SetProjection(NSR().wkt) diff --git a/nansat/mappers/mapper_meps.py b/nansat/mappers/mapper_meps.py new file mode 100644 index 00000000..c756315c --- /dev/null +++ b/nansat/mappers/mapper_meps.py @@ -0,0 +1,112 @@ +import json +import pytz +import datetime + +import pythesint as pti + +from osgeo import gdal +from pyproj import CRS +from netCDF4 import Dataset + +from nansat.nsr import NSR +from nansat.exceptions import WrongMapperError +from nansat.mappers.mapper_netcdf_cf import Mapper as NetcdfCF +from nansat.mappers.opendap import Opendap + + +class Mapper(NetcdfCF, Opendap): + + def __init__(self, url, gdal_dataset, gdal_metadata, file_num=0, bands=None, *args, + **kwargs): + + if not url.endswith(".nc"): + raise WrongMapperError + + try: + ds = Dataset(url) + except OSError: + raise WrongMapperError + + if "title" not in ds.ncattrs() or "meps" not in ds.getncattr("title").lower(): + raise WrongMapperError + + metadata = {} + for attr in ds.ncattrs(): + content = ds.getncattr(attr) + if isinstance(content, str): + content = content.replace("æ", "ae").replace("ø", "oe").replace("å", "aa") + metadata[attr] = content + + self.input_filename = url + + xsize = ds.dimensions["x"].size + ysize = ds.dimensions["y"].size + + # Pick 10 meter height dimension only + height_dim = "height6" + if height_dim not in ds.dimensions.keys(): + raise WrongMapperError + if ds.dimensions[height_dim].size != 1: + raise WrongMapperError + if ds.variables[height_dim][0].data != 10: + raise WrongMapperError + + varnames = [] + for var in ds.variables: + var_dimensions = ds.variables[var].dimensions + if var_dimensions == ("time", height_dim, "y", "x"): + varnames.append(var) + + # Projection + try: + grid_mapping = ds.variables[ds.variables[varnames[0]].grid_mapping] + except KeyError: + raise WrongMapperError + + grid_mapping_dict = {} + for index in grid_mapping.ncattrs(): + grid_mapping_dict[index] = grid_mapping.getncattr(index) + crs = CRS.from_cf(grid_mapping_dict) + nsr = NSR(crs.to_proj4()) + + # Geotransform + xx = ds.variables["x"][0:2] + yy = ds.variables["y"][0:2] + gtrans = xx[0], xx[1]-xx[0], 0, yy[0], 0, yy[1]-yy[0] + + self._init_from_dataset_params(xsize, ysize, gtrans, nsr.wkt) + + meta_dict = [] + if bands is None: + bands = varnames + for band in bands: + if band not in ds.variables.keys(): + continue + dimension_names, dim_sizes = self._get_dimension_info(band) + self._pop_spatial_dimensions(dimension_names) + index = self._get_index_of_dimensions(dimension_names, {}, dim_sizes) + fn = self._get_sub_filename(url, band, dim_sizes, index) + band_metadata = self.get_band_metadata_dict(fn, ds.variables[band]) + # Add time stamp to band metadata + tt = datetime.datetime.fromisoformat(str(self.times()[index["time"]["index"]])) + if tt.tzinfo is None: + tt = pytz.utc.localize(tt) + band_metadata["dst"]["time"] = tt.isoformat() + meta_dict.append(band_metadata) + + self.create_bands(meta_dict) + + # Copy metadata + for key in metadata.keys(): + self.dataset.SetMetadataItem(str(key), str(metadata[key])) + + # Get dictionary describing the instrument and platform according to + # the GCMD keywords + mm = pti.get_gcmd_instrument("computer") + ee = pti.get_gcmd_platform("models") + + self.dataset.SetMetadataItem("instrument", json.dumps(mm)) + self.dataset.SetMetadataItem("platform", json.dumps(ee)) + + # Set input filename + self.dataset.SetMetadataItem("nc_file", self.input_filename) diff --git a/nansat/mappers/mapper_meps_ncml.py b/nansat/mappers/mapper_meps_ncml.py new file mode 100644 index 00000000..c5fe86c7 --- /dev/null +++ b/nansat/mappers/mapper_meps_ncml.py @@ -0,0 +1,40 @@ +import os +import pytz +import netCDF4 +import datetime + +import numpy as np + +from nansat.exceptions import WrongMapperError +from nansat.mappers.mapper_meps import Mapper as NCMapper + +class Mapper(NCMapper): + + + def __init__(self, ncml_url, gdal_dataset, gdal_metadata, netcdf_dim=None, *args, **kwargs): + + if not ncml_url.endswith(".ncml"): + raise WrongMapperError + + dt = datetime.timedelta(0) + if netcdf_dim is not None and "time" in netcdf_dim.keys(): + ds = netCDF4.Dataset(ncml_url) + time = netcdf_dim["time"].astype(datetime.datetime).replace( + tzinfo=pytz.timezone("utc")) + dt = time - datetime.datetime.fromisoformat(ds.time_coverage_start.replace( + "Z", "+00:00")) + url = self._get_odap_url(ncml_url, np.round(dt.total_seconds()/3600)) + + super(Mapper, self).__init__(url, gdal_dataset, gdal_metadata, *args, **kwargs) + + + def _get_odap_url(self, fn, file_num=0): + """ Get the opendap url to file number 'file_num'. The + default file number is 0, and yields the forecast time. + """ + url = ( + "" + os.path.split(fn)[0] + "/member_%02d" + "/meps_" + os.path.basename(fn).split("_")[2] + + "_%02d_" + os.path.basename(fn).split("_")[3][:-2] + ) % (int(os.path.basename(fn)[8:11]), file_num) + return url diff --git a/nansat/mappers/mapper_ncep_global_atm_model_best.py b/nansat/mappers/mapper_ncep_global_atm_model_best.py new file mode 100644 index 00000000..e94024e9 --- /dev/null +++ b/nansat/mappers/mapper_ncep_global_atm_model_best.py @@ -0,0 +1,92 @@ +import pytz +import netCDF4 +import datetime + +import numpy as np + +from dateutil.parser import parse + +from nansat.exceptions import WrongMapperError +from nansat.vrt import VRT + + +class Mapper(VRT): + + def __init__(self, filename, gdal_dataset, metadata, netcdf_dim=None, *args, **kwargs): + """Read NCEP_Global_Atmospheric_Model_best.ncd from + https://pae-paha.pacioos.hawaii.edu/thredds/dodsC/ncep_global/, + and get Nansat object with arrays as close as possible to the + given time. + + Time must be type datetime64. Timezone is assumed to be UTC. + """ + + fn = ("https://pae-paha.pacioos.hawaii.edu/thredds/dodsC/" + "ncep_global/NCEP_Global_Atmospheric_Model_best.ncd") + if filename != fn: + raise WrongMapperError + + if netcdf_dim is None: + raise WrongMapperError + + ds = netCDF4.Dataset(filename) + metadata = {} + for key in ds.ncattrs(): + metadata[key] = str(ds.getncattr(key)) + + lon = (ds["longitude"][:] + 180.) % 360 - 180. + #lon = ds["longitude"][:] + lat = ds["latitude"][:] + times = ds["time"][:] + + longitude, latitude = np.meshgrid(lon, lat) + # Shift array to yield -180 to 180 deg longitude + ind = np.where(lon<0)[0] + xx = np.empty(longitude.shape) + xx[:, 0:ind.shape[0]] = longitude[:, ind.min():] + xx[:, ind.shape[0]:] = longitude[:, 0:ind.min()] + longitude = xx + + super(Mapper, self)._init_from_lonlat(longitude, latitude, add_gcps=True) + self.dataset.SetMetadata(metadata) + + t0 = parse(ds["time"].units.strip("hours since ")) + t1 = ds["time"][:] + dt = [datetime.timedelta(hours=t) for t in t1.data[:]] + times = [t0 + tt for tt in dt] + diff = np.array(times) - \ + netcdf_dim["time"].astype(datetime.datetime).replace(tzinfo=pytz.utc) + time_index = np.abs(diff).argmin() + + for attr in ds.ncattrs(): + content = ds.getncattr(attr) + if type(content) is str: + metadata[attr] = content + + self.band_vrts = {} + metaDict = [] + + for key, val in ds.variables.items(): + if ds[key].shape != (t1.shape[0], longitude.shape[0], longitude.shape[1]): + continue + data = ds[key][time_index, :, :].filled(fill_value=np.nan) + xx = np.empty(longitude.shape) + xx[:, 0:ind.shape[0]] = data[:, ind.min():] + xx[:, ind.shape[0]:] = data[:, 0:ind.min()] + self.band_vrts[key] = VRT.from_array(xx) + key_metadict = {} + for attr in ds[key].ncattrs(): + key_metadict[attr] = ds[key].getncattr(attr) + key_metadict["name"] = key + key_metadict["time"] = times[time_index].isoformat() + metaDict.append({ + 'src': { + 'SourceFilename': self.band_vrts[key].filename, + 'SourceBand': 1 + }, + 'dst': key_metadict, + }) + + self.create_bands(metaDict) + + #self.fix_global_metadata diff --git a/nansat/mappers/mapper_netcdf_cf.py b/nansat/mappers/mapper_netcdf_cf.py index d899e126..fa0adbef 100644 --- a/nansat/mappers/mapper_netcdf_cf.py +++ b/nansat/mappers/mapper_netcdf_cf.py @@ -4,13 +4,17 @@ http://cfconventions.org/compliance-checker.html ''' -import warnings, os, datetime -import numpy as np import collections -from nansat.utils import gdal +import datetime +import os +import warnings + +import numpy as np from dateutil.parser import parse from netCDF4 import Dataset +from osgeo import gdal +from pyproj import CRS from nansat.vrt import VRT from nansat.nsr import NSR @@ -28,7 +32,6 @@ class ContinueI(Exception): class Mapper(VRT): """ """ - input_filename = '' def __init__(self, filename, gdal_dataset, gdal_metadata, *args, **kwargs): @@ -85,6 +88,11 @@ def __init__(self, filename, gdal_dataset, gdal_metadata, *args, **kwargs): # Set GCMD/DIF compatible metadata if available self._set_time_coverage_metadata(metadata) + # Add global metadata + metadata.pop("title_no", "") + metadata.pop("summary_no", "") + self.dataset.SetMetadata(metadata) + # Then add remaining GCMD/DIF compatible metadata in inheriting mappers def times(self): @@ -124,9 +132,10 @@ def _time_reference(self, ds=None): rt = parse(times.units, fuzzy=True) # This sets timezone to local # Remove timezone information from epoch, which defaults to # utc (otherwise the timezone should be given in the dataset) + units = times.units epoch = datetime.datetime(rt.year, rt.month, rt.day, rt.hour, rt.minute, rt.second) - return epoch, times.units + return epoch, units def _timevarname(self, ds=None): if not ds: @@ -171,9 +180,10 @@ def _time_count_to_np_datetime64(self, time_count, time_reference=None): raise Exception('Check time units..') return tt - def _band_list(self, gdal_dataset, gdal_metadata, netcdf_dim={}, bands=[], *args, **kwargs): - ''' Create list of dictionaries mapping source and destination metadata - of bands that should be added to the Nansat object. + def _band_list(self, gdal_dataset, gdal_metadata, netcdf_dim=None, bands=None, *args, + **kwargs): + ''' Create list of dictionaries mapping source and destination + metadata of bands that should be added to the Nansat object. Parameters ---------- @@ -182,50 +192,48 @@ def _band_list(self, gdal_dataset, gdal_metadata, netcdf_dim={}, bands=[], *args gdal_metadata : dict Dictionary of global metadata netcdf_dim : dict - Dictionary of desired slice of a multi-dimensional array. Since - gdal only returns 2D bands, a multi-dimensional array (x,y,z) is - split into z bands accompanied with metadata information about the - position of the slice along the z-axis. The (key, value) pairs represent dimension name and - the desired value in that dimension (not the index), respectively. E.g., for a height - dimension of [10, 20, 30] m, netcdf_dim = {'height': 20} if you want to extract the - data from 20 m height. + Dictionary of desired slice of a multi-dimensional array. + Since gdal only returns 2D bands, a multi-dimensional + array (x,y,z) is split into z bands accompanied with + metadata information about the position of the slice along + the z-axis. The (key, value) pairs represent dimension + name and the desired value in that dimension (not the + index), respectively. E.g., for a height dimension of + [10, 20, 30] m, netcdf_dim = {'height': 20} if you want to + extract the data from 20 m height. bands : list - List of desired bands following NetCDF-CF standard names. NOTE: - some datasets have other bands as well, i.e., of data not yet - implemented in CF. We may at some point generalize this to provide - a dict with key name and value, where the key is, e.g., - "standard_name" or "metno_name", etc. + List of desired bands following NetCDF-CF standard names. + NOTE: some datasets have other bands as well, i.e., of + data not yet implemented in CF. We may at some point + generalize this to provide a dict with key name and value, + where the key is, e.g., "standard_name" or "metno_name", + etc. ''' + if netcdf_dim is None: + netcdf_dim = {} + if bands is None: + ds = Dataset(self.input_filename) + bands = ds.variables.keys() metadictlist = [] for fn in self._get_sub_filenames(gdal_dataset): + band = fn.split(':')[-1] + if band not in bands: + continue + # Don't process geolocation variables/bands if ('GEOLOCATION_X_DATASET' in fn or 'longitude' in fn or 'GEOLOCATION_Y_DATASET' in fn or 'latitude' in fn): continue try: - metadictlist.append(self._get_band_from_subfile(fn, netcdf_dim=netcdf_dim, bands=bands)) + metadictlist.append( + self._get_band_metadata(fn, netcdf_dim=netcdf_dim)) except ContinueI: continue - return metadictlist - - def _get_band_from_subfile(self, fn, netcdf_dim={}, bands=[]): - nc_ds = Dataset(self.input_filename) - band_name = fn.split(':')[-1] - if bands: - variable = nc_ds.variables[band_name] - if 'standard_name' not in variable.ncattrs() or not variable.standard_name in bands: - raise ContinueI - # TODO: consider to allow band name in addition or instead of standard_name in the band - # list kwarg - sub_band = nc_ds.variables[band_name] - dimension_names = [b.name for b in sub_band.get_dims()] - dimension_names.reverse() - dim_sizes = {} - for dim in sub_band.get_dims(): - dim_sizes[dim.name] = dim.size - - # Pop spatial dimensions (longitude and latitude, or x and y) + @staticmethod + def _pop_spatial_dimensions(dimension_names): + """ Pop spatial dimensions (longitude and latitude, or x and y) + """ for allowed in ALLOWED_SPATIAL_DIMENSIONS_X: try: ind_dim_x = [i for i, s in enumerate(dimension_names) if allowed in s.lower()][0] @@ -238,50 +246,108 @@ def _get_band_from_subfile(self, fn, netcdf_dim={}, bands=[]): dimension_names.pop(ind_dim_y) except IndexError: continue + + def _get_index_of_dimensions(self, dimension_names, netcdf_dim, dim_sizes): + """ Nansat only works with 2D data. For for datasets with + higher dimensions, we need to pick the correct slice. The + function returns a dictionary with the index of the wanted + data slice, as specified in the netcdf_dim dictionary. + """ + ds = Dataset(self.input_filename) index4key = collections.OrderedDict() for key in dimension_names: - if key in netcdf_dim.keys(): - val = netcdf_dim[key] - if key == 'time' and type(val) == np.datetime64: + var = ds[key] + if "standard_name" in var.ncattrs(): + check_attr = var.standard_name + else: + check_attr = key + if check_attr in netcdf_dim.keys(): + val = netcdf_dim[check_attr] + if "time" == check_attr: + if type(val) != datetime.datetime and type(val) != np.datetime64: + raise ValueError + if type(val) == datetime.datetime: + val = np.datetime64(val).astype('M8[s]') # Get band number from given timestamp index = int(np.argmin(np.abs(self.times() - val))) else: - index = int(np.argmin(np.abs(nc_ds.variables[key][:] - val))) + index = int(np.argmin(np.abs(ds.variables[check_attr][:] - val))) index4key[key] = { 'index': index, 'size': dim_sizes[key], } else: - index4key[key] = { + index4key[check_attr] = { 'index': 0, 'size': dim_sizes[key], } - - # Works in Python 2 and 3 + return index4key + + @staticmethod + def _calculate_band_number(index4dimension, dimension_names): + """ Uses index and size values in provided dict to calculate + gdal band number. The index4dimension dict contains the index + in the dimensions given by dimension_names, and the size of + the dimensions. + """ class Context: band_number = 1 multiplier = 1 - #band_number = 1 # Only works in python 3 - #multiplier = 1 # Only works in Python 3 + def get_band_number(): - #nonlocal band_number # Only works in Python 3 - #nonlocal multiplier # Only works in Python 3 try: name_dim0 = dimension_names.pop(0) except: return - Context.band_number += index4key[name_dim0]['index']*Context.multiplier - Context.multiplier *= index4key[name_dim0]['size'] + Context.band_number += index4dimension[name_dim0]['index']*Context.multiplier + Context.multiplier *= index4dimension[name_dim0]['size'] get_band_number() get_band_number() + return Context.band_number + + def _get_dimension_info(self, band): + """ Get band list from a netCDF4.Dataset. See docs of + _band_list. + """ + ds = Dataset(self.input_filename) + var = ds.variables[band] + dimension_names = [b.name for b in var.get_dims()] + dimension_names.reverse() + dim_sizes = collections.OrderedDict() + for dim in var.get_dims(): + dim_sizes[dim.name] = dim.size + + return dimension_names, dim_sizes + + def _get_band_number(self, band, netcdf_dim={}): + """ Get gdal band number from multidimensional dataset. + """ + if netcdf_dim is None: + netcdf_dim = {} + + dimension_names, dim_sizes = self._get_dimension_info(band) + self._pop_spatial_dimensions(dimension_names) + index = self._get_index_of_dimensions(dimension_names, netcdf_dim, dim_sizes) + band_number = self._calculate_band_number(index, dimension_names) + + return band_number + + def _get_band_metadata(self, fn, netcdf_dim=None): + """ Gets band metadata using gdal + """ + if netcdf_dim is None: + netcdf_dim = {} + band = fn.split(':')[-1] + band_number = self._get_band_number(band, netcdf_dim) + subds = gdal.Open(fn) - band = subds.GetRasterBand(Context.band_number) + band = subds.GetRasterBand(band_number) band_metadata = self._clean_band_metadata(band) band_metadata['_FillValue'] = band.GetNoDataValue() - return self._band_dict(fn, Context.band_number, subds, band=band, + return self._band_dict(fn, band_number, subds, band=band, band_metadata=band_metadata) def _clean_band_metadata(self, band, remove = ['_Unsigned', 'ScaleRatio', @@ -385,24 +451,41 @@ def _band_dict(self, subfilename, band_num, subds, band=None, band_metadata=None def _create_empty(self, gdal_dataset, gdal_metadata): try: self._create_empty_from_projection_variable(gdal_dataset, gdal_metadata) - except KeyError: + except (KeyError, AttributeError): try: self._create_empty_from_subdatasets(gdal_dataset, gdal_metadata) except NansatMissingProjectionError: - # this happens rarely - a special workaround is done in mapper_quikscat - warnings.warn('GDAL cannot determine the dataset projection - using Nansat ' \ - 'spatial reference WKT, assuming a regular longitude/latitude grid') self._create_empty_with_nansat_spatial_reference_WKT(gdal_dataset, gdal_metadata) - def _create_empty_from_projection_variable(self, gdal_dataset, gdal_metadata, - projection_variable='projection_lambert'): + def _create_empty_from_projection_variable(self, gdal_dataset, gdal_metadata): + + # TODO: only open dataset once or close them.. ds = Dataset(self.input_filename) - subdataset = gdal.Open(self._get_sub_filenames(gdal_dataset)[0]) + + shape=[] + for key in ds.dimensions.keys(): + shape.append(ds.dimensions[key].size) + + for var in ds.variables.keys(): + if ds[var].shape==tuple(shape): + break + + grid_mapping = ds.variables[ds.variables[var].grid_mapping] + + grid_mapping_dict = {} + for index in grid_mapping.ncattrs(): + grid_mapping_dict[index] = grid_mapping.getncattr(index) + crs = CRS.from_cf(grid_mapping_dict) + + try: + subdataset = gdal.Open(self._get_sub_filenames(gdal_dataset)[0]) + except IndexError: + subdataset = gdal_dataset self._init_from_dataset_params( x_size = subdataset.RasterXSize, y_size = subdataset.RasterYSize, geo_transform = subdataset.GetGeoTransform(), - projection = NSR(ds.variables[projection_variable].proj4).wkt, + projection = NSR(crs.to_proj4()).wkt, metadata = gdal_metadata) def _create_empty_from_subdatasets(self, gdal_dataset, metadata): @@ -421,9 +504,26 @@ def _create_empty_from_subdatasets(self, gdal_dataset, metadata): no_projection = False break if no_projection: - raise NansatMissingProjectionError - # Initialise VRT with subdataset containing projection - self._init_from_gdal_dataset(sub, metadata=metadata) + #raise NansatMissingProjectionError + sub_datasets = gdal_dataset.GetSubDatasets() + filenames = [f[0] for f in sub_datasets] + for _, filename in enumerate(filenames): + if 'longitude' in filename: + xDatasetSource = filename + if 'latitude' in filename: + yDatasetSource = filename + if not "xDatasetSource" in locals(): + raise NansatMissingProjectionError + if not "yDatasetSource" in locals(): + raise NansatMissingProjectionError + lon_dataset = gdal.Open(xDatasetSource) + lon = lon_dataset.GetRasterBand(1).ReadAsArray() + lat_dataset = gdal.Open(yDatasetSource) + lat = lat_dataset.GetRasterBand(1).ReadAsArray() + self._init_from_lonlat(lon, lat, add_gcps=False) + else: + # Initialise VRT with subdataset containing projection + self._init_from_gdal_dataset(sub, metadata=metadata) def _create_empty_with_nansat_spatial_reference_WKT(self, gdal_dataset, gdal_metadata): """ In this case, gdal cannot find the projection of any diff --git a/nansat/mappers/mapper_opendap_arome.py b/nansat/mappers/mapper_opendap_arome.py index 0d86fb13..1a4583bd 100644 --- a/nansat/mappers/mapper_opendap_arome.py +++ b/nansat/mappers/mapper_opendap_arome.py @@ -1,24 +1,28 @@ -# Name: mapper_arome.py -# Purpose: Nansat mapping for AROME-Arctic and MEPS (MetCoOp Ensemble -# Prediction System) data provided by MET.NO -# Author: Artem Moiseev # Licence: This file is part of NANSAT. You can redistribute it or modify # under the terms of GNU General Public License, v.3 # http://www.gnu.org/licenses/gpl-3.0.html +import os +import json +import pytz +import datetime -from nansat.mappers.mapper_arome import Mapper as MapperArome -from nansat.mappers.opendap import Opendap -from nansat.exceptions import WrongMapperError -from nansat.nsr import NSR +import numpy as np import pythesint as pti -import os -from datetime import datetime + from netCDF4 import Dataset -import numpy as np -import json +from osgeo import gdal +from pyproj import CRS +from nansat.exceptions import WrongMapperError +from nansat.nsr import NSR +from nansat.vrt import VRT + +from nansat.mappers.mapper_netcdf_cf import Mapper as MapperNCCF +#from nansat.mappers.mapper_arome import Mapper as MapperArome +from nansat.mappers.opendap import Opendap -class Mapper(Opendap, MapperArome): + +class Mapper(MapperNCCF, Opendap): baseURLs = ['http://thredds.met.no/thredds/catalog/arome25/catalog.html', 'https://thredds.met.no/thredds/dodsC/aromearcticarchive', @@ -26,73 +30,110 @@ class Mapper(Opendap, MapperArome): 'https://thredds.met.no/thredds/dodsC/meps25epsarchive', 'http://thredds.met.no/thredds/dodsC/meps25epsarchive'] timeVarName = 'time' - xName = 'x' - yName = 'y' - timeCalendarStart = '1970-01-01' - def __init__(self, filename, gdal_dataset, gdal_metadata, date=None, + def __init__(self, filename, gdal_dataset, gdal_metadata, netcdf_dim=None, ds=None, bands=None, cachedir=None, *args, **kwargs): - self.test_mapper(filename) - timestamp = date if date else self.get_date(filename) - ds = Dataset(filename) + if netcdf_dim is None: + netcdf_dim = {} + + self.input_filename = filename + + if "thredds.met.no" not in filename: + raise WrongMapperError + + if gdal_dataset is None: + gdal_dataset = gdal.Open(filename) try: - self.srcDSProjection = NSR(ds.variables['projection_lambert'].proj4) + ds = Dataset(filename) + except OSError: + raise WrongMapperError + + metadata = {} + for attr in ds.ncattrs(): + metadata[attr] = self._fix_encoding(ds.getncattr(attr)) + + if 'title' not in metadata.keys() or 'arome' not in metadata['title'].lower(): + raise WrongMapperError + + xsize = ds.dimensions['x'].size + ysize = ds.dimensions['y'].size + + # Pick 10 meter height dimension only + height_dim = 'height7' + if height_dim not in ds.dimensions.keys(): + raise WrongMapperError + if ds.dimensions[height_dim].size != 1: + raise WrongMapperError + if ds.variables[height_dim][0].data != 10: + raise WrongMapperError + + varnames = [] + for var in ds.variables: + var_dimensions = ds.variables[var].dimensions + if var_dimensions == ('time', height_dim, 'y', 'x'): + varnames.append(var) + + # Projection + try: + grid_mapping = ds.variables[ds.variables[varnames[0]].grid_mapping] except KeyError: raise WrongMapperError - self.create_vrt(filename, gdal_dataset, gdal_metadata, timestamp, ds, bands, cachedir) + grid_mapping_dict = {} + for index in grid_mapping.ncattrs(): + grid_mapping_dict[index] = grid_mapping.getncattr(index) + crs = CRS.from_cf(grid_mapping_dict) + nsr = NSR(crs.to_proj4()) + + # Geotransform + xx = ds.variables['x'][0:2] + yy = ds.variables['y'][0:2] + gtrans = xx[0], xx[1]-xx[0], 0, yy[0], 0, yy[1]-yy[0] + + self._init_from_dataset_params(xsize, ysize, gtrans, nsr.wkt) + + meta_dict = [] + if bands is None: + bands = varnames + for band in bands: + if band not in ds.variables.keys(): + continue + dimension_names, dim_sizes = self._get_dimension_info(band) + self._pop_spatial_dimensions(dimension_names) + index = self._get_index_of_dimensions(dimension_names, netcdf_dim, dim_sizes) + fn = self._get_sub_filename(filename, band, dim_sizes, index) + band_metadata = self.get_band_metadata_dict(fn, ds.variables[band]) + # Add time stamp to band metadata + tt = datetime.datetime.fromisoformat(str(self.times()[index["time"]["index"]])) + if tt.tzinfo is None: + tt = pytz.utc.localize(tt) + band_metadata["dst"]["time"] = tt.isoformat() + meta_dict.append(band_metadata) + + self.create_bands(meta_dict) + + # Copy metadata + for key in metadata.keys(): + self.dataset.SetMetadataItem(str(key), str(metadata[key])) mm = pti.get_gcmd_instrument('Computer') ee = pti.get_gcmd_platform('ecmwfifs') self.dataset.SetMetadataItem('instrument', json.dumps(mm)) self.dataset.SetMetadataItem('platform', json.dumps(ee)) - md_item = 'Data Center' - if not self.dataset.GetMetadataItem(md_item): - self.dataset.SetMetadataItem(md_item, 'NO/MET') - md_item = 'Entry Title' - if not self.dataset.GetMetadataItem(md_item): - self.dataset.SetMetadataItem(md_item, str(ds.getncattr('title'))) - md_item = 'summary' - if not self.dataset.GetMetadataItem(md_item): - summary = """ - AROME_Arctic is a convection-permitting atmosphere model covering parts of the Barents - Sea and the Nordic Arctic. It has horizontal resolution of 2.5 km and 65 vertical - levels. AROME_Arctic runs for 66 hours four times a day (00,06,12,18) with three-hourly - cycling for data assimilation. Boundary data is from ECMWF. Model code based on HARMONIE - cy40h1.1 - """ - self.dataset.SetMetadataItem(md_item, str(summary)) - @staticmethod def get_date(filename): """Extract date and time parameters from filename and return - it as a formatted string - - Parameters - ---------- - - filename: str - nn - - Returns - ------- - str, YYYY-mm-ddThh:MMZ - - Examples - -------- - >>> Mapper.get_date('/path/to/arome_arctic_full_2_5km_20171030T21Z.nc') - '2017-10-30T21:00Z' + it as a datetime object. """ _, filename = os.path.split(filename) - t = datetime.strptime(filename.split('_')[-1], '%Y%m%dT%HZ.nc') - return datetime.strftime(t, '%Y-%m-%dT%H:%MZ') + return datetime.datetime.strptime(filename.split('_')[-1], '%Y%m%dT%HZ.nc') def convert_dstime_datetimes(self, ds_time): """Convert time variable to np.datetime64""" ds_datetimes = np.array( - [(np.datetime64(self.timeCalendarStart).astype('M8[s]') + [(np.datetime64(self.epoch).astype('M8[s]') + np.timedelta64(int(sec), 's').astype('m8[s]')) for sec in ds_time]).astype('M8[s]') return ds_datetimes diff --git a/nansat/mappers/mapper_opendap_met_nordic.py b/nansat/mappers/mapper_opendap_met_nordic.py new file mode 100644 index 00000000..f2168213 --- /dev/null +++ b/nansat/mappers/mapper_opendap_met_nordic.py @@ -0,0 +1,119 @@ +# Licence: This file is part of NANSAT. You can redistribute it or modify +# under the terms of GNU General Public License, v.3 +# http://www.gnu.org/licenses/gpl-3.0.html +import json +import os +import numpy as np +import pythesint as pti + +from datetime import datetime +from netCDF4 import Dataset +from osgeo import gdal +from pyproj import CRS + +from nansat.exceptions import WrongMapperError +from nansat.nsr import NSR +from nansat.vrt import VRT + +from nansat.mappers.mapper_netcdf_cf import Mapper as MapperNCCF +#from nansat.mappers.mapper_arome import Mapper as MapperArome +from nansat.mappers.opendap import Opendap + + +class Mapper(MapperNCCF, Opendap): + + baseURLs = ['https://thredds.met.no/thredds/dodsC/metpparchivev3',] + timeVarName = 'time' + + def __init__(self, filename, gdal_dataset, gdal_metadata, netcdf_dim=None, + ds=None, bands=None, cachedir=None, *args, **kwargs): + + if netcdf_dim is None: + netcdf_dim = {} + + if "thredds.met.no" not in filename: + raise WrongMapperError + + self.input_filename = filename + + if gdal_dataset is None: + gdal_dataset = gdal.Open(filename) + + try: + ds = Dataset(filename) + except OSError: + raise WrongMapperError + + metadata = {} + for attr in ds.ncattrs(): + metadata[attr] = self._fix_encoding(ds.getncattr(attr)) + + if 'title' not in metadata.keys() or 'met nordic' not in metadata['title'].lower(): + raise WrongMapperError + + xsize = ds.dimensions['x'].size + ysize = ds.dimensions['y'].size + + varnames = [] + for var in ds.variables: + var_dimensions = ds.variables[var].dimensions + if var_dimensions == ('time', 'y', 'x'): + varnames.append(var) + + # Projection + try: + grid_mapping = ds.variables[ds.variables[varnames[0]].grid_mapping] + except KeyError: + raise WrongMapperError + + grid_mapping_dict = {} + for index in grid_mapping.ncattrs(): + grid_mapping_dict[index] = grid_mapping.getncattr(index) + crs = CRS.from_cf(grid_mapping_dict) + nsr = NSR(crs.to_proj4()) + + # Geotransform + xx = ds.variables['x'][0:2] + yy = ds.variables['y'][0:2] + gtrans = xx[0], xx[1]-xx[0], 0, yy[0], 0, yy[1]-yy[0] + + self._init_from_dataset_params(xsize, ysize, gtrans, nsr.wkt) + + meta_dict = [] + if bands is None: + bands = varnames + for band in bands: + if band not in ds.variables.keys(): + continue + dimension_names, dim_sizes = self._get_dimension_info(band) + self._pop_spatial_dimensions(dimension_names) + index = self._get_index_of_dimensions(dimension_names, netcdf_dim, dim_sizes) + fn = self._get_sub_filename(filename, band, dim_sizes, index) + meta_dict.append(self.get_band_metadata_dict(fn, ds.variables[band])) + + self.create_bands(meta_dict) + + # Copy metadata + for key in metadata.keys(): + self.dataset.SetMetadataItem(str(key), str(metadata[key])) + + mm = pti.get_gcmd_instrument('Computer') + ee = pti.get_gcmd_platform('ecmwfifs') + self.dataset.SetMetadataItem('instrument', json.dumps(mm)) + self.dataset.SetMetadataItem('platform', json.dumps(ee)) + + @staticmethod + def get_date(filename): + """Extract date and time parameters from filename and return + it as a datetime object. + """ + _, filename = os.path.split(filename) + return datetime.strptime(filename.split('_')[-1], '%Y%m%dT%HZ.nc') + + def convert_dstime_datetimes(self, ds_time): + """Convert time variable to np.datetime64""" + ds_datetimes = np.array( + [(np.datetime64(self.epoch).astype('M8[s]') + + np.timedelta64(int(sec), 's').astype('m8[s]')) for sec in ds_time]).astype('M8[s]') + return ds_datetimes + diff --git a/nansat/mappers/opendap.py b/nansat/mappers/opendap.py index 88814e5a..c0d604ec 100644 --- a/nansat/mappers/opendap.py +++ b/nansat/mappers/opendap.py @@ -109,9 +109,7 @@ def get_dataset_time(self): if os.path.exists(cachefile): ds_time = np.load(cachefile)[self.timeVarName] else: - warnings.warn('Time consuming loading time from OpenDAP...') ds_time = self.ds.variables[self.timeVarName][:] - warnings.warn('Loading time - OK!') if os.path.exists(cachefile): np.savez(cachefile, **{self.timeVarName: ds_time}) @@ -157,8 +155,9 @@ def get_metaitem(self, url, var_name, var_dimensions): # assemble dimensions string dims = ''.join(['[%s]' % dim for dim in var_dimensions]) sfname = '{url}?{var}.{var}{shape}'.format(url=url, var=var_name, shape=dims) - # For Sentinel-1, the source filename is not at the same format. Simple solution is to check - # if this is correct witha try-except but that may be too time consuming. Expecting + # For Sentinel-1, the source filename is not at the same + # format. Simple solution is to check if this is correct with + # a try-except but that may be too time consuming. Expecting # discussion... try: ds = gdal.Open(sfname) @@ -210,7 +209,47 @@ def _fix_encoding(var): >>> Opendap._fix_encoding('asnes') 'asnes' """ - return str(var.encode('ascii', 'ignore').decode()) + if isinstance(var, str): + retval = str(var.encode('ascii', 'ignore').decode()) + else: + retval = var + return retval + + + def get_band_metadata_dict(self, fn, ncvar): + gds = gdal.Open(fn) + meta_item = { + 'src': {'SourceFilename': fn, 'SourceBand': 1}, + 'dst': {'name': ncvar.name, 'dataType': 6} + } + + for attr_key in ncvar.ncattrs(): + attr_val = self._fix_encoding(ncvar.getncattr(attr_key)) + if attr_key in ['scale', 'scale_factor']: + meta_item['src']['ScaleRatio'] = attr_val + elif attr_key in ['offset', 'add_offset']: + meta_item['src']['ScaleOffset'] = attr_val + else: + meta_item['dst'][attr_key] = attr_val + + return meta_item + + + @staticmethod + def _get_sub_filename(url, var, dim_sizes, index): + """ Opendap driver refers to subdatasets differently than the + standard way in vrt.py + """ + shape = [] + for item in dim_sizes.items(): + if item[0] in index.keys(): + shape.append(index[item[0]]['index']) + else: + shape.append(item[0]) + # assemble dimensions string + gd_shape = ''.join(['[%s]' % dimsize for dimsize in shape]) + return '{url}?{var}.{var}{shape}'.format(url=url, var=var, shape=gd_shape) + def create_vrt(self, filename, gdalDataset, gdalMetadata, date, ds, bands, cachedir): """ Create VRT diff --git a/nansat/mappers/sentinel1.py b/nansat/mappers/sentinel1.py index f64b355d..90c1d11f 100644 --- a/nansat/mappers/sentinel1.py +++ b/nansat/mappers/sentinel1.py @@ -1,4 +1,5 @@ import json +import logging import numpy as np from dateutil.parser import parse try: @@ -69,10 +70,14 @@ def set_gcmd_dif_keywords(self): self.dataset.SetMetadataItem('instrument', json.dumps(mm)) self.dataset.SetMetadataItem('platform', json.dumps(ee)) - self.dataset.SetMetadataItem('time_coverage_start', - self.dataset.GetMetadataItem('ACQUISITION_START_TIME')) - self.dataset.SetMetadataItem('time_coverage_end', - self.dataset.GetMetadataItem('ACQUISITION_STOP_TIME')) + tstart = self.dataset.GetMetadataItem('time_coverage_start') if \ + self.dataset.GetMetadataItem('ACQUISITION_START_TIME') is None else \ + self.dataset.GetMetadataItem('ACQUISITION_START_TIME') + tend = self.dataset.GetMetadataItem('time_coverage_end') if \ + self.dataset.GetMetadataItem('ACQUISITION_STOP_TIME') is None else \ + self.dataset.GetMetadataItem('ACQUISITION_STOP_TIME') + self.dataset.SetMetadataItem('time_coverage_start', tstart) + self.dataset.SetMetadataItem('time_coverage_end', tend) def get_gcps(self, flip_gcp_line=False): """ Get Ground Control Points for the dataset. @@ -103,13 +108,69 @@ def get_gcps(self, flip_gcp_line=False): return gcps - def add_incidence_angle_band(self): + def get_gcp_shape(self): + """Get GCP shape from GCP pixel and line data. Returns the + GCP y and x dimensions. + """ # Get GCP variables pixel = self.ds['GCP_pixel_'+self.ds.polarisation[:2]][:].data line = self.ds['GCP_line_'+self.ds.polarisation[:2]][:].data + gcp_y = np.unique(line[:].data).shape[0] + gcp_x = np.unique(pixel[:].data).shape[0] + """ Uniqueness is not guaranteed at the correct grid - assert + that the gcp_index dimension divided by gcp_x and gcp_y + dimensions results in an integer. + """ + def test1(gcp_y, gcp_x): + """ Check if one of them is correct, then modify the + other. If that does not work, use test2 function. + """ + if pixel.size % gcp_x != 0: + if pixel.size % gcp_y == 0: + gcp_x = pixel.size/gcp_y + if pixel.size % gcp_y != 0: + if pixel.size % gcp_x == 0: + gcp_y = pixel.size/gcp_x + + if gcp_y*gcp_x != pixel.size: + gcp_x = test2(gcp_x) + gcp_y = test2(gcp_y) + + return gcp_y, gcp_x + + def test2(gcp_dim): + """Test modulo from -/+gcp_*/4 until remainder=0 + """ + if pixel.size % gcp_dim != 0: + for i in range(np.round(gcp_dim/4).astype("int")): + test_dim = gcp_dim - i + if pixel.size % test_dim == 0: + gcp_dim = test_dim + break + test_dim = gcp_dim + i + if pixel.size % test_dim == 0: + gcp_dim = test_dim + break + return gcp_dim + + gcp_y, gcp_x = test1(gcp_y, gcp_x) + + logging.debug("GCPY size: %d" % gcp_y) + logging.debug("GCPX size: %d" % gcp_x) + logging.debug("Pixel(s) size: %d" % pixel.size) + + if gcp_y*gcp_x != pixel.size: + if gcp_y*gcp_x > pixel.size: + gcp_x, gcp_y = test1(gcp_x-1, gcp_y-1) + if gcp_y*gcp_x != pixel.size: + raise ValueError("GCP dimension mismatch") + + return int(gcp_y), int(gcp_x) + + def add_incidence_angle_band(self): + gcp_y, gcp_x = self.get_gcp_shape() inci = self.ds['GCP_incidenceAngle_'+self.ds.polarisation[:2]][:].data - inci = inci.reshape(np.unique(line[:].data).shape[0], - np.unique(pixel[:].data).shape[0]) + inci = inci.reshape(gcp_y, gcp_x) # Add incidence angle band inciVRT = VRT.from_array(inci) @@ -124,14 +185,11 @@ def add_incidence_angle_band(self): def get_full_size_GCPs(self): # Get GCP variables - pixel = self.ds['GCP_pixel_' + self.ds.polarisation[:2]][:].data - line = self.ds['GCP_line_' + self.ds.polarisation[:2]][:].data + gcp_y, gcp_x = self.get_gcp_shape() lon = self.ds['GCP_longitude_' + self.ds.polarisation[:2]][:].data + lon = lon.reshape(gcp_y, gcp_x) lat = self.ds['GCP_latitude_' + self.ds.polarisation[:2]][:].data - lon = lon.reshape(np.unique(line[:].data).shape[0], - np.unique(pixel[:].data).shape[0]) - lat = lat.reshape(np.unique(line[:].data).shape[0], - np.unique(pixel[:].data).shape[0]) + lat = lat.reshape(gcp_y, gcp_x) return lon, lat def add_look_direction_band(self): diff --git a/nansat/nansat.py b/nansat/nansat.py index 7fabc6dd..20a30801 100644 --- a/nansat/nansat.py +++ b/nansat/nansat.py @@ -1241,7 +1241,7 @@ def get_transect(self, points, bands, Parameters ---------- points : 2xN list or array, N (number of points) >= 1 - coordinates [[x1, x2, y2], [y1, y2, y3]] + coordinates [[x1, x2, x3], [y1, y2, y3]] bands : list of int or string elements of the list are band number or band Name lonlat : bool @@ -1416,7 +1416,7 @@ def crop_lonlat(self, lonlim, latlim): Examples -------- - >>> extent = n.crop(lonlim=[-10,10], latlim=[-20,20]) # crop for given lon/lat limits + >>> extent = n.crop([-10,10], [-20,20]) # crop for given lon/lat limits """ # lon/lat lists for four corners diff --git a/nansat/nsr.py b/nansat/nsr.py index 46247bb2..5937ad74 100644 --- a/nansat/nsr.py +++ b/nansat/nsr.py @@ -60,7 +60,7 @@ def __init__(self, srs=0): osr.SpatialReference.__init__(self) # parse input parameters - if srs is 0: + if srs == 0: # generate default WGS84 SRS self.ImportFromEPSG(4326) elif isinstance(srs, str_types): diff --git a/nansat/pointbrowser.py b/nansat/pointbrowser.py index 940ee660..9f229219 100644 --- a/nansat/pointbrowser.py +++ b/nansat/pointbrowser.py @@ -70,9 +70,9 @@ def __init__(self, data, fmt='x-k', force_interactive=True, **kwargs): if not MATPLOTLIB_IS_INSTALLED: raise ImportError(' Matplotlib is not installed ') if force_interactive and not matplotlib.is_interactive(): - raise SystemError(''' - Python is started with -pylab option, transect will not work. - Please restart python without -pylab.''') + plt.ion() + # raise SystemError("Python is started with -pylab option, transect will " + # "not work. Please restart python without -pylab.") self.fig = plt.figure() self.data = data @@ -88,7 +88,7 @@ def __init__(self, data, fmt='x-k', force_interactive=True, **kwargs): self.lines = [self.ax.plot([], [], self.fmt)[0]] self.coordinates = [[]] - def onclick(self, event): + def __call__(self, event): """ Process mouse onclick event Append coordinates of the click to self.coordinates, add point and 2D line to self.points If click is outside, nothing is done @@ -100,6 +100,9 @@ def onclick(self, event): event : matplotlib.mouse_event """ + print('%s click: button=%d, x=%d, y=%d, xdata=%f, ydata=%f' % + ('double' if event.dblclick else 'single', event.button, + event.x, event.y, event.xdata, event.ydata)) # ignore click outside image if event.xdata is None or event.ydata is None: return @@ -150,7 +153,7 @@ def get_points(self): points : array ''' - self.fig.canvas.mpl_connect('button_press_event', self.onclick) + self.fig.canvas.mpl_connect('button_press_event', self) self.ax.set_xlim([0, self.data.shape[1]]) self.ax.set_ylim([0, self.data.shape[0]]) self.ax.invert_yaxis() @@ -172,7 +175,8 @@ def get_points(self): # collect data plt.show() - # convert list of lists of coordinates to list of arrays - points = self._convert_coordinates() + return self + # # convert list of lists of coordinates to list of arrays + # points = self._convert_coordinates() - return points + # return points