diff --git a/src/bitsea/instruments/check.py b/src/bitsea/instruments/check.py index f97d4c31..e4b16f3a 100644 --- a/src/bitsea/instruments/check.py +++ b/src/bitsea/instruments/check.py @@ -20,8 +20,11 @@ def __init__(self,OUTDIR:Path, verboselevel=1, threshold_nitrate=1): if verboselevel = 1, a NetCDF will be printed out else, no files will be dumped ''' - self.outdir = OUTDIR - OUTDIR.mkdir(parents=True, exist_ok=True) + if OUTDIR=="": + self.outdir="" + else: + self.outdir = OUTDIR + OUTDIR.mkdir(parents=True, exist_ok=True) self.verboselevel=verboselevel self.threshold_nitrate = threshold_nitrate diff --git a/src/bitsea/postproc/aveScan.py b/src/bitsea/postproc/aveScan.py index bdcb27d7..371be17a 100644 --- a/src/bitsea/postproc/aveScan.py +++ b/src/bitsea/postproc/aveScan.py @@ -415,7 +415,7 @@ def dumpPointProfiles(var): ncOUT_Pprofiles.createDimension("z" ,jpk) global VAR #VAR = VAR.copy() - VAR[~tmask]=np.NaN + VAR[~tmask]=np.nan P=PointProfiles(var) ncvar = ncOUT_Pprofiles.createVariable(var ,'f',('Ncruise','z')) ncvar[:] = P diff --git a/src/bitsea/validation/online/MVR_gen.py b/src/bitsea/validation/online/MVR_gen.py index 1d10f66d..a83cb161 100644 --- a/src/bitsea/validation/online/MVR_gen.py +++ b/src/bitsea/validation/online/MVR_gen.py @@ -50,6 +50,7 @@ def argument(): from bitsea.commons import genUserDateList as DL from bitsea.commons import timerequestors as requestors import calendar +import sys INDIRSAT = addsep(args.inputsat) INDIRFLOAT = addsep(args.inputfloat) @@ -73,6 +74,8 @@ def argument(): TLfloat = TimeList.fromfilenames(TI,INDIRFLOAT,"BioFloat_Weekly*nc", \ prefix="BioFloat_Weekly_validation_", \ dateformat='%Y%m%d') + +#### Sat TLsat = {} LISTforecast = ['f%d' %ii for ii in range(1,4)] @@ -93,6 +96,8 @@ def argument(): prefix="Validation_" + ff + "_YYYYMMDD_on_daily_Sat.", \ dateformat='%Y%m%d') +#### end Sat + METRICS_NAMES = [ 'number of data values', 'mean of product', @@ -100,8 +105,8 @@ def argument(): 'mean squared error', 'variance of product', 'variance of reference', - 'correlation', 'anomaly correlation', + 'correlation', ] @@ -145,8 +150,8 @@ def argument(): DICTmetricnames[tt]['mean squared error'] = 'BGC_CLASS4_CHL_RMS_SURF_BASIN' + stringlog DICTmetricnames[tt]['variance of product'] = 'MODEL_VARIANCE' + stringlog DICTmetricnames[tt]['variance of reference'] = 'SAT___VARIANCE' + stringlog - DICTmetricnames[tt]['correlation'] = 'BGC_CLASS4_CHL_CORR_SURF_BASIN' + stringlog DICTmetricnames[tt]['anomaly correlation'] = 'ANOMALY_CORRELATION' + stringlog + DICTmetricnames[tt]['correlation'] = 'BGC_CLASS4_CHL_CORR_SURF_BASIN' + stringlog file1 = TLsat['f1'].filelist[0] @@ -176,7 +181,7 @@ def argument(): for idate,datef in enumerate(AllDates): req = requestors.Daily_req(datef.year,datef.month,datef.day) - TIMELIST[idate] = datef.toordinal() - datetime.datetime(1970,1,1).toordinal() + TIMELIST[idate] = datef.toordinal() - datetime.datetime(1950,1,1).toordinal() for iforecast,ff in enumerate(LISTforecast): ii = TLsat[ff].select_one(req) if ii==None: continue @@ -200,11 +205,11 @@ def argument(): LISTmetrics_FLOAT = [ 'nobs', - 'refmean', 'modelmean', - 'refvar', + 'refmean', + 'mse', 'modelvar', - 'rmse', + 'refvar', 'bias', 'BGC_CLASS4_CHL_CORR_SURF_BASIN_LOG', 'anomaly_corr', @@ -219,11 +224,11 @@ def argument(): DICTmetricnames['number of data values'] = 'nobs' DICTmetricnames['mean of product'] = 'modelmean' DICTmetricnames['mean of reference'] = 'refmean' -DICTmetricnames['mean squared error'] = 'rmse' +DICTmetricnames['mean squared error'] = 'mse' DICTmetricnames['variance of product'] = 'modelvar' DICTmetricnames['variance of reference'] = 'refvar' -DICTmetricnames['correlation'] = 'corr' DICTmetricnames['anomaly correlation'] = 'anomaly_corr' +DICTmetricnames['correlation'] = 'corr' DICTsubbasins = {} @@ -253,7 +258,7 @@ def argument(): DEPTHSlist = [] for layername in M_depths: top,bottom_m = layername.split('-') - if float(top)>150 : continue + #if float(top)>150 : continue bottom = float(bottom_m[:-1]) DEPTHSlist.append(bottom) Ndepths = len(DEPTHSlist) @@ -261,100 +266,189 @@ def argument(): Nmetrics = len(METRICS_NAMES) Nsub = len(AREA_NAMES) -MetricsFLOAT = np.zeros((Ndates,Ndepths,Nmetrics,Nsub)) -MetricsFLOAT[:] = np.nan -TIMELIST = np.zeros((Ndates,),np.float32) - +varlist=['P_l','N3n','O2o'] +Nvar=len(varlist) +MetricsFLOAT = np.zeros((Ndates,Ndepths,Nmetrics,Nsub,Nvar)) +MetricsFLOAT[:] = 1.e+20 +TIMELIST = np.zeros((Ndates,),np.float32) for idate,datef in enumerate(AllDates): - TIMELIST[idate] = datef.toordinal() - datetime.datetime(1970,1,1).toordinal() + TIMELIST[idate] = datef.toordinal() - datetime.datetime(1950,1,1).toordinal() ii,diffseconds = TLfloat.find(datef,returndiff=True) if diffseconds/24/3600>3.5: continue filef = TLfloat.filelist[ii] - M = NC.Dataset(filef,"r") for mm,metric in enumerate(METRICS_NAMES): for isub,subname in enumerate(AREA_NAMES): if subname=='Aegean Sea': continue varname = DICTmetricnames[0][metric].decode() - MetricsFLOAT[idate,:,mm,isub] = M.variables[varname][0,indexAREAS[subname],:Ndepths].data.copy() + for ivar, VAR in enumerate (varlist): + MetricsFLOAT[idate,:,mm,isub,ivar] = M.variables[varname][ivar,indexAREAS[subname],:Ndepths].data.copy() masknan = np.isnan(MetricsFLOAT) MetricsFLOAT[masknan] = 1.e+20 + outfile = 'product_quality_stats_' + productname + '_' + STARTTIME + '_' + END__TIME + '.nc' outfilepath = OUTDIR + outfile + +print(outfilepath) + +#import sys +#sys.exit() + S=NC.Dataset(outfilepath,"w") S.createDimension("time" ,None) -S.createDimension("string_length",max_strlenght) -S.createDimension("areas" ,len(AREA_NAMES)) -S.createDimension("metrics" ,len(METRICS_NAMES)) -S.createDimension("depths" ,Ndepths) -S.createDimension("surface" ,1) -S.createDimension("forecasts" ,len(leadtimes)) - +S.createDimension("area" ,len(AREA_NAMES)) +S.createDimension("metric" ,len(METRICS_NAMES)) +S.createDimension("depth" ,1) +S.createDimension("layer" ,Ndepths) +S.createDimension("forecast" ,len(leadtimes)) + +subbasins_renamed = [ + "Alboran Sea", + "South West Med western part", + "South West Med eastern part", + "North West Med", + "Tyrrhenian Sea northern part", + "Tyrrhenian Sea southern part", + "Adriatic Sea northern part", + "Adriatic Sea southern part", + "Aegean Sea", + "Ionian Sea western part", + "Ionian Sea south-eastern part", + "Ionian Sea north-eastern part", + "Levantine Sea western part", + "Levantine Sea central-northern part", + "Levantine Sea central-southern part", + "Levantine Sea eastern part", + "Full domain" +] -ncvar = S.createVariable("area_names",'c',('areas','string_length')) x=np.array(ALLstring,dtype=str) -ncvar[:] = x.view('U1').reshape(x.size,-1)[:Nsub,:] -setattr(S.variables['area_names'],"long_name" ,"area names") -setattr(S.variables['area_names'],"description","region over which statistics are aggregated") - -ncvar = S.createVariable("metric_names",'c',('metrics','string_length')) -ncvar[:] = x.view('U1').reshape(x.size,-1)[Nsub:,:] -setattr(S.variables['metric_names'],"long_name","metric names") -ncvar = S.createVariable("forecasts",'f',('forecasts',)) +from netCDF4 import stringtochar +# writing AREA as Variable +vlen_str = S.createVLType(str, "vlen_str") +ncvar = S.createVariable("area", vlen_str, ("area",)) +for i, name in enumerate(subbasins_renamed): + ncvar[i] = name + +#ncvar = S.createVariable("area", str, ("area",)) # +#x=np.array(subbasins_renamed ,dtype=str) +#ncvar[:] = subbasins_renamed #x[:Nsub] +setattr(S.variables['area'],"long_name" ,"area") +setattr(S.variables['area'], "description", "Geographical areas that are included in the Region of reference") + +# writing metric as Variable +ncvar = S.createVariable("metric", str, ('metric')) +ncvar[:] = x[Nsub:] +setattr(S.variables['metric'], "long_name", "List of CLASS4 metrics") + +# writing forecast as Variable +ncvar = S.createVariable("forecast",'f4',('forecast',) , fill_value=-999.0) ncvar[:] = leadtimes -setattr(S.variables['forecasts'],"long_name","forecast lead time") -setattr(S.variables['forecasts'],"units" ,"hours") +setattr(S.variables['forecast'],"description","forecast lead time") +setattr(S.variables['forecast'],"units" ,"hours") + +# writing depth as Variable +ncvar = S.createVariable("depth",'f',('depth',)) +ncvar[:]=np.array(0) +setattr(S.variables['depth'],"long_name" ,"depth") +setattr(S.variables['depth'],"units" ,"m") +setattr(S.variables['depth'], "description", "Surface level (0 m) over which statistics are calculated") + -ncvar = S.createVariable("depths",'f',('depths',)) +# writing layer as Variable +ncvar = S.createVariable("layer",'f',('layer',)) ncvar[:]=np.array(DEPTHSlist) -setattr(S.variables['depths'],"long_name" ,"depths") -setattr(S.variables['depths'],"positive" ,"down") -setattr(S.variables['depths'],"units" ,"m") -setattr(S.variables['depths'],"description","depth of the base of the vertical layer over which statistics are aggregated") +setattr(S.variables['layer'],"long_name" ,"layer") +setattr(S.variables['layer'],"positive" ,"down") +setattr(S.variables['layer'],"units" ,"m") +setattr(S.variables['layer'],"description","depth of the base of the vertical layer over which statistics are aggregated") -ncvar = S.createVariable("time",'f',('time',)) +# writing time as Variable +ncvar = S.createVariable('time', 'f4', ('time',), fill_value=-999.0) ncvar[:]=TIMELIST setattr(S.variables['time'],"long_name" ,"validity time") setattr(S.variables['time'],"standard_name","time") -setattr(S.variables['time'],"units" ,"days since 1970-01-01") +setattr(S.variables['time'],"units" ,"days since 1950-01-01 12:00:00") setattr(S.variables['time'],"axis" ,"T") -for tt in ['LOG','NOLOG']: +# writing stats_chlorophyll-a_sat as Variable ONLY NOLOG +for tt in ['NOLOG']: #for tt in ['LOG','NOLOG']: if tt=='LOG': stringlog = '_' + tt else: stringlog = '' - statsname = 'stats_surface_chlorophyll' + stringlog.lower() + statsname = 'stats_chlorophyll-a_sat-l3' + stringlog.lower() if tt=='LOG': - parametername = 'log of Surface Chlorophyll' + parametername = 'log of Surface chlorophyll' else: parametername = "Surface Chlorophyll" - ncvar=S.createVariable(statsname,'f',('time','forecasts','surface','metrics','areas'),fill_value=1.e+20) + ncvar = S.createVariable(statsname, 'f4', ('time', 'forecast', 'depth', 'metric', 'area'), fill_value=-999.0) ncvar[:,:,0,:,:] = MetricsSAT[tt] setattr(S.variables[statsname], "parameter",parametername) - setattr(S.variables[statsname], "reference","Satellite observations from OC-TAC") - setattr(S.variables[statsname], "units" ,"log(mg Chl*m^-3)") + setattr(S.variables[statsname], "reference","OCEANCOLOUR_MED_BGC_L3_NRT_009_141") + setattr(S.variables[statsname], "metric_name","CHL-SURF-D-CLASS4-SAT-PQD_EAN-XYZ-MED") + setattr(S.variables[statsname], "units" ,"mg Chl*m^-3") #log removed + -statsname = 'stats_profile_chlorophyll' +###### saving FLOAT +# writing Chlorophyll as BGC-Argo Variable +statsname = 'stats_chlorophyll-a_ins-pf' parametername = "Chlorophyll" -ncvar=S.createVariable(statsname,'f',('time','forecasts','depths','metrics','areas'),fill_value=1.e+20) -ncvar[:,:,:,:,:] = np.nan -ncvar[:,0,:,:,:] = MetricsFLOAT +ncvar=S.createVariable(statsname,'f4',('time', 'forecast', 'layer', 'metric', 'area'), fill_value=-999.0) + +ncvar[:,:,:,:,:] = np.nan +ncvar[:,0,:,:,:] = MetricsFLOAT[:,:,:,:,0] +ncvar[:,:,-3:,:,:] = np.nan setattr(S.variables[statsname], "parameter",parametername) -setattr(S.variables[statsname], "reference","BGC-Argo floats") +setattr(S.variables[statsname], "reference","BGC-Argo") +setattr(S.variables[statsname], "metric_name","CHL-X_Y-D-CLASS4-PROF-PQD_EAN-XY-MED") setattr(S.variables[statsname], "units" ,"mg Chl*m^-3") + +# writing nitrate as BGC-Argo Variable +statsname = 'stats_nitrate_ins-pf' +parametername = "Nitrate" +ncvar = S.createVariable(statsname, 'f4', ('time', 'forecast', 'layer', 'metric', 'area'), fill_value=-999.0) + +ncvar[:,:,:,:,:] = np.nan +ncvar[:,0,:,:,:] = MetricsFLOAT[:,:,:,:,1] + +setattr(S.variables[statsname], "parameter",parametername) +setattr(S.variables[statsname], "reference","BGC-Argo") +setattr(S.variables[statsname], "metric_name","NO3-X_Y-D-CLASS4-PROF-PQD_EAN-XY-MED") +setattr(S.variables[statsname], "units" ,"mmol NO3*m^-3") + +# writing Oxygen as BGC-Argo Variable +statsname = 'stats_oxygen_ins-pf' +parametername = "Oxygen" +ncvar = S.createVariable(statsname, 'f4', ('time', 'forecast', 'layer', 'metric', 'area'), fill_value=-999.0) +ncvar[:,:,:,:,:] = np.nan +ncvar[:,0,:,:,:] = MetricsFLOAT[:,:,:,:,2] + + +setattr(S.variables[statsname], "parameter",parametername) +setattr(S.variables[statsname], "reference","BGC-Argo") +setattr(S.variables[statsname], "metric_name","O2-X_Y-D-CLASS4-PROF-PQD_EAN-XY-MED") +setattr(S.variables[statsname], "units" ,"mmol O2*m^-3") + + +# writing metadata setattr(S,"contact","service.med.ogs@inogs.it") setattr(S,"product",productname) setattr(S,"start_date",STARTTIME) setattr(S,"end_date" ,END__TIME) -setattr(S,"filename" ,outfile) +setattr(S, "institution" , "OGS") +from datetime import date +today = date.today().isoformat() +setattr(S, "bulletin_date", today) +setattr(S, "frequency" , "monthly") +#setattr(S,"filename" ,outfile) S.close() diff --git a/src/bitsea/validation/online/RunSatValidation.py b/src/bitsea/validation/online/RunSatValidation.py index 17d54bfd..e78bf882 100644 --- a/src/bitsea/validation/online/RunSatValidation.py +++ b/src/bitsea/validation/online/RunSatValidation.py @@ -95,7 +95,7 @@ def argument(): def climfilename(CLIM_DIR, month,var): if var=='P_l': - return CLIM_DIR + '/ave.yyyy' + month + '15-00:00:00.nc' + return CLIM_DIR + f"/ave.yyyy{month}15-00:00:00.{var}.nc" else: return None diff --git a/src/bitsea/validation/online/biofloats_ms_MVR.py b/src/bitsea/validation/online/biofloats_ms_MVR.py index 23077775..df81da89 100644 --- a/src/bitsea/validation/online/biofloats_ms_MVR.py +++ b/src/bitsea/validation/online/biofloats_ms_MVR.py @@ -26,7 +26,7 @@ def argument(): description=""" Generates float validation data on a single week, from one single chain run. Week is centered on thursday. - Produces a single file, containing bias, rmse and number of measurements for each subbasin and layer + Produces a single file, containing bias, mse and number of measurements for each subbasin and layer for chlorophyll, nitrate and oxygen. In this approach we define the measurement as mean on layer of the float profile values. """, @@ -86,14 +86,16 @@ def argument(): Check_Obj = check.check(Path(""), verboselevel=0) LAYERLIST = [ - Layer(0, 10), - Layer(10, 30), - Layer(30, 60), - Layer(60, 100), - Layer(100, 150), - Layer(150, 300), -] -VARLIST = ["P_l"] + Layer(0, 10), + Layer(10, 30), + Layer(30, 60), + Layer(60, 100), + Layer(100, 150), + Layer(150, 300), + Layer(300, 600), + ] + +VARLIST=['P_l','N3n','O2o'] nSub = len(OGS.MVR.basin_list) nDepth = len(LAYERLIST) nVar = len(VARLIST) @@ -114,15 +116,16 @@ def argument(): REF___VARIANCE[:] = np.nan BIAS = np.zeros((nVar, nSub, nDepth), np.float32) -RMSE = np.zeros((nVar, nSub, nDepth), np.float32) +MSE = np.zeros((nVar, nSub, nDepth), np.float32) CORR = np.zeros((nVar, nSub, nDepth), np.float32) ANOMALY_CORR = np.zeros((nVar, nSub, nDepth), np.float32) BIAS[:] = np.nan -RMSE[:] = np.nan +MSE[:] = np.nan CORR[:] = np.nan ANOMALY_CORR[:] = np.nan TI = R.time_interval +print(TI) TL = TimeList.fromfilenames(TI, BASEDIR / "PROFILES", "ave*nc") ALL_PROFILES = bio_float.FloatSelector(None, TI, Rectangle(-6, 36, 30, 46)) @@ -133,7 +136,6 @@ def argument(): TLclim.inputFrequency = "monthly" Mclim = Matchup_Manager(ALL_PROFILES, TLclim, BASEDIR_CLIM) - for ivar, var in enumerate(VARLIST): print(var) for isub, sub in enumerate(OGS.MVR): @@ -145,6 +147,8 @@ def argument(): Matchup_object_list = [] Matchup_object_list_clim = [] for p in Profilelist: + #if p.ID() == '4903760_20251026-22:50:21_4.198_38.402': # If a float should be excluded, add its ID here. + # continue floatmatchup = M.getMatchups2( [p], TheMask.zlevels, @@ -153,7 +157,6 @@ def argument(): interpolation_on_Float=True, ) Matchup_object_list.append(floatmatchup) - if args.basedir_clim is not None: floatmatchup_clim = Mclim.getMatchups2( [p], @@ -165,7 +168,6 @@ def argument(): Matchup_object_list_clim.append(floatmatchup_clim) else: Matchup_object_list_clim.append(floatmatchup) - for ilayer, layer in enumerate(LAYERLIST): MODEL_LAYER_MEAN = [] # one value for each suitable profile in (subbasin, layer) REF_LAYER_MEAN = [] @@ -190,7 +192,7 @@ def argument(): np.array(MODEL_LAYER_MEAN), np.array(REF_LAYER_MEAN) ) BIAS[ivar, isub, ilayer] = M_LAYER.bias() - RMSE[ivar, isub, ilayer] = M_LAYER.RMSE() + MSE[ivar, isub, ilayer] = M_LAYER.MSE() MODEL_MEAN[ivar, isub, ilayer] = np.nanmean(MODEL_LAYER_MEAN) REF___MEAN[ivar, isub, ilayer] = np.nanmean(REF_LAYER_MEAN) @@ -230,8 +232,8 @@ def argument(): ncvar = ncOUT.createVariable("bias", "f", ("var", "sub", "depth")) ncvar[:] = BIAS -ncvar = ncOUT.createVariable("rmse", "f", ("var", "sub", "depth")) -ncvar[:] = RMSE +ncvar = ncOUT.createVariable("mse", "f", ("var", "sub", "depth")) +ncvar[:] = MSE ncvar = ncOUT.createVariable("npoints", "i", ("var", "sub", "depth")) ncvar[:] = NPROFILES ncvar = ncOUT.createVariable("nobs", "i", ("var", "sub", "depth")) diff --git a/src/bitsea/validation/online/plot_monthlyvalidation_forMedeaf.py b/src/bitsea/validation/online/plot_monthlyvalidation_forMedeaf.py index 4b6c86b6..21b88196 100644 --- a/src/bitsea/validation/online/plot_monthlyvalidation_forMedeaf.py +++ b/src/bitsea/validation/online/plot_monthlyvalidation_forMedeaf.py @@ -21,6 +21,12 @@ def argument(): default = '', help = 'Input dir') + parser.add_argument( '--varname', '-vv', + type = str, + required = True, + default = 'chlorophyll', + help = 'varname') + return parser.parse_args() args = argument() @@ -34,6 +40,7 @@ def argument(): from bitsea.commons.utils import addsep from bitsea.commons.Timelist import TimeList from bitsea.basins import V2 as OGS +import sys @@ -49,52 +56,91 @@ def argument(): satstats = [] floatstats = [] +VAR = args.varname +LIST_VAR = ['chlorophyll', 'nitrate', 'oxygen'] + +if VAR not in LIST_VAR: + sys.exit(f"Selected varname '{VAR}' is not valid. Choose from: {', '.join(LIST_VAR)}") + + DICTvardim = { - 'areas': 'area_names', - 'metrics': 'metric_names', - 'forecasts': 'forecasts', - 'time': 'time', - 'depths': 'depths', + 'time': 'time', + 'area': 'area', + 'metric': 'metric', + 'depth': 'depth', + 'layer': 'layer', + 'forecast': 'forecast', } + +def reshape_label(handles_labels): + """ Dividere la lista in due sotto-liste: una per "Mod" e una per "Ref + """ + mod_list = [item for item in handles_labels if 'Mod' in item[0]] + ref_list = [item for item in handles_labels if 'Ref' in item[0]] + reshaped_list = [] + for mod, ref in zip(mod_list, ref_list): + reshaped_list.append(mod) + reshaped_list.append(ref) + return(reshaped_list) + for ii,filein in enumerate(TLmvr.filelist): - #print(filein) + print(filein) MVR = NC.Dataset(filein,'r') datesmonth = MVR.variables['time'][:].data.copy() dates.extend(list(datesmonth)) - satstats_month = MVR.variables['stats_surface_chlorophyll'][:].data.copy() - satstats.extend(list(satstats_month)) - floatstats_month = MVR.variables['stats_profile_chlorophyll'][:].data.copy() + if VAR == "chlorophyll": + satstats_month = MVR.variables['stats_chlorophyll-a_sat-l3'][:].data.copy() + satstats.extend(list(satstats_month)) + print('stats_'+VAR+'-a-ins-pf') + floatstats_month = MVR.variables['stats_'+VAR+'-a_ins-pf'][:].data.copy() + #o2;no3# + else: + NAMEVAR='stats_'+VAR+'-ins-pf' + print(NAMEVAR) + floatstats_month = MVR.variables['stats_'+VAR+'_ins-pf'][:].data.copy() floatstats.extend(list(floatstats_month)) if ii==0: DICTdim_sat = {} - dimtuple = MVR.variables['stats_surface_chlorophyll'].dimensions + if VAR == "chlorophyll": + dimtuple = MVR.variables['stats_chlorophyll-a_sat-l3'].dimensions + else: + dimtuple = MVR.variables['stats_'+VAR+'_ins-pf' ].dimensions + for iid,dd in enumerate(dimtuple): - if 'surf' in dd: - DICTdim_sat[dd] = ['surface',iid] - continue - varname = DICTvardim[dd] - vv = MVR.variables[varname][:].data.copy() - if vv.dtype.kind=='f': - DICTdim_sat[dd] = [vv,iid] - if vv.dtype.kind=='S': - vLIST = [] - for iiv in range(vv.shape[0]): - vLIST.append(''.join([vv[iiv,kk].decode("utf-8") for kk in range(vv.shape[1])])) - DICTdim_sat[dd] = [vLIST,iid] - - + if VAR == "chlorophyll": + if 'depth' in dd: + DICTdim_sat[dd] = ['depth',iid] + continue + else: + if 'lay' in dd: + DICTdim_sat[dd] = ['layer',iid] + continue + varname = DICTvardim[dd] + vv = MVR.variables[varname][:] + if vv.dtype.kind=='f': + DICTdim_sat[dd] = [vv,iid] + if vv.dtype.kind=='S' or vv.dtype.kind=='O': + vLIST = [] + #for iiv in range(vv.shape[0]): + #vLIST.append(''.join([vv[iiv,kk].decode("utf-8") for kk in range(vv.shape[1])])) + vLIST=list(vv) + DICTdim_sat[dd] = [vLIST,iid] + DICTdim_float = {} - dimtuple = MVR.variables['stats_profile_chlorophyll'].dimensions + if VAR == "chlorophyll": + dimtuple = MVR.variables['stats_'+VAR+'-a_ins-pf' ].dimensions + else: + dimtuple = MVR.variables['stats_'+VAR+'_ins-pf' ].dimensions for iid,dd in enumerate(dimtuple): varname = DICTvardim[dd] - vv = MVR.variables[varname][:].data.copy() + vv = MVR.variables[varname][:] if vv.dtype.kind=='f': DICTdim_float[dd] = [vv,iid] - if vv.dtype.kind=='S': - vLIST = [] - for iiv in range(vv.shape[0]): - vLIST.append(''.join([vv[iiv,kk].decode("utf-8") for kk in range(vv.shape[1])])) + if vv.dtype.kind=='S' or vv.dtype.kind=='O': + vLIST = list(vv) + #for iiv in range(vv.shape[0]): + # vLIST.append(''.join([vv[iiv,kk].decode("utf-8") for kk in range(vv.shape[1])])) DICTdim_float[dd] = [vLIST,iid] @@ -105,7 +151,7 @@ def argument(): dates_datetime = [] for dd in dates: - ddordinal = int(dd) + datetime.datetime(1970,1,1).toordinal() + ddordinal = int(dd) + datetime.datetime(1950,1,1).toordinal() dd_datetime = datetime.datetime.fromordinal(ddordinal) dates_datetime.append(dd_datetime) @@ -118,7 +164,7 @@ def argument(): break DICTsub_shortname = {} -for subname in DICTdim_sat['areas'][0]: +for subname in DICTdim_sat['area'][0]: for sub in OGS.P.basin_list: if subname in sub.extended_name: DICTsub_shortname[subname] = sub.name @@ -138,109 +184,203 @@ def argument(): 'mean squared error': 2, 'variance of product': 3, 'variance of reference': 3, - 'correlation': 4, - 'anomaly correlation': 5, + 'anomaly correlation': 4, + 'correlation': 5, } cmap = plt.get_cmap("Dark2") +cmap_edge = plt.get_cmap("gray") plt.close('all') -indmetrics = DICTdim_sat['metrics'][1] -indsub = DICTdim_sat['areas'][1] - -noforecasts = ['number of data values','mean of reference','variance of reference'] -for isub,subname in enumerate(DICTdim_sat['areas'][0]): - #print (subname) - #print ('...........SAT.....') - fig,axs = plt.subplots(3,2,sharex=True,figsize=[14,8])#,sharey=True) - for iim,mm in enumerate(DICTdim_sat['metrics'][0]): - #print (mm) - nax = DICTvargroup[mm] - ix_ax = int(np.floor(nax/2)) - iy_ax = nax-2*ix_ax - plt.sca(axs[ix_ax,iy_ax]) - for iif,ff in enumerate(DICTdim_sat['forecasts'][0]): - if 'reference' in mm: - label = mm - else: - label = mm + ' ' + str(ff) - if ff<0: - linestyle=':' - else: - linestyle='-' - - DICTind = { - indmetrics: iim, - indsub: isub, - DICTdim_sat['forecasts'][1]: iif, - } - selection_sat = [DICTind.get(dd,slice(None)) for dd in range(len(DICTdim_sat.keys()))] - plt.plot(dates_datetime,array_satstats[tuple(selection_sat)], - color=cmap(iim), - linestyle=linestyle, - alpha=DICTalpha[ff], - label=label) - if mm in noforecasts: break - - - for axx in axs.flatten(): - plt.sca(axx) - plt.legend() - plt.grid() - plt.suptitle(subname) - fig.autofmt_xdate() - - # plt.show(block=False) - plt.savefig(OUTDIR + '/satmetric' + DICTsub_shortname[subname].upper() + '.png') - +if VAR == 'chlorophyll': + + indmetrics = DICTdim_sat['metric'][1] + indsub = DICTdim_sat['area'][1] + + noforecasts = ['number of data values','mean of reference','variance of reference'] + for isub,subname in enumerate(DICTdim_sat['area'][0]): + fig,axs = plt.subplots(3,2,sharex=True,figsize=[14,8])#,sharey=True) + for iim,mm in enumerate(DICTdim_sat['metric'][0]): + nax = DICTvargroup[mm] + ix_ax = int(np.floor(nax/2)) + iy_ax = nax-2*ix_ax + plt.sca(axs[ix_ax,iy_ax]) + for iif,ff in enumerate(DICTdim_sat['forecast'][0]): + if 'reference' in mm: + label = mm + else: + label = mm + ' ' + str(ff) + if ff<0: + linestyle=':' + else: + linestyle='-' + + DICTind = { + indmetrics: iim, + indsub: isub, + DICTdim_sat['forecast'][1]: iif, + } + selection_sat = [DICTind.get(dd,slice(None)) for dd in range(len(DICTdim_sat.keys()))] + plt.plot(dates_datetime,array_satstats[tuple(selection_sat)], + color=cmap(iim), + linestyle=linestyle, + alpha=DICTalpha[ff], + label=label) + if mm in noforecasts: break + + for axx in axs.flatten(): + plt.sca(axx) + plt.legend() + plt.grid() + plt.suptitle(subname) + fig.autofmt_xdate() + BASIN=subname.replace(' ','_') + plt.savefig(OUTDIR + '/satmetric_' + BASIN+ '.png') + +else: + pass # float plt.close('all') -indmetrics = DICTdim_sat['metrics'][1] -indsub = DICTdim_sat['areas'][1] +indmetrics = DICTdim_sat['metric'][1] +indsub = DICTdim_sat['area'][1] +indlayer = DICTdim_float['layer'][1] -LISTalpha_depth = [1-x*2/10. for x in range(len(DICTdim_float['depths'][0]))] +LISTalpha_depth = np.linspace(1,0.3, len(DICTdim_float['layer'][0])) for sub in OGS.MVR.basin_list: - #print (sub.name) - #print ('..........FLOAT......') - fig,axs = plt.subplots(3,2,sharex=True,figsize=[14,8])#,sharey=True) - for iim,mm in enumerate(DICTdim_sat['metrics'][0]): - #print (mm) + print (sub.name) + handles_labels = [] + fig,axs = plt.subplots(3,2,sharex=True,figsize=[14,12])#,sharey=True) + for iim,mm in enumerate(DICTdim_sat['metric'][0]): # loop su tutte le metriche + #import sys + #sys.exit('carol') + #10-60-100-150m chla + #10-60-100-150-600m doxy no3 nax = DICTvargroup[mm] ix_ax = int(np.floor(nax/2)) iy_ax = nax-2*ix_ax - plt.sca(axs[ix_ax,iy_ax]) - label = mm - for iid,depth in enumerate(DICTdim_float['depths'][0]): - if 'reference' in mm: - label = mm - if iid>0: - label = None - - - DICTind = { - indmetrics: iim, - indsub: DICTsubgroup_index[sub.name], - DICTdim_sat['forecasts'][1]: 0, - DICTdim_float['depths'][1]: iid, - } - selection = [DICTind.get(dd,slice(None)) for dd in range(len(DICTdim_float.keys()))] - plt.plot(dates_datetime,array_floatstats[tuple(selection)], - color=cmap(iim), - alpha=LISTalpha_depth[iid], - label=label) - + ax = axs[ix_ax, iy_ax] + iii=0 + for iid,depth in enumerate(DICTdim_float['layer'][0]): + if VAR == 'chlorophyll': + LISTDEPTH=np.array([10,60,100,150]) + LISTLINEE=np.array(['10','100','150']) + else: + LISTDEPTH = np.array([10,60,100,150,600]) + LISTLINEE = np.array([10,100,600]) + if int(depth) in LISTDEPTH: + group_label = "Mod" if iim == 0 else "Ref" + label = f"{depth}m {group_label}" + lab = str(int(depth)) + CMAP=cmap(0) + title= mm.capitalize() + + if str(nax) in ['1','3']: # see DICTvargroup + title= mm.split()[0].capitalize() + ' Mod vs Ref' + if 'ref' in mm: + CMAP=cmap(1) + + ax.set_title(title) + + DICTind = { + indmetrics: iim, + indsub: DICTsubgroup_index[sub.name], + DICTdim_sat['forecast'][1]: 0, + DICTdim_float['layer'][1]: iid, + } + selection = [DICTind.get(dd,slice(None)) for dd in range(len(DICTdim_float.keys()))] + + # + arr=array_floatstats[tuple(selection)] + if mm.startswith('number'): + arr[arr == 0] = np.nan + + #ax.plot(dates_datetime, + # arr, + # color='k', + # linewidth=0.4, + # alpha=LISTalpha_depth[iid]) + + if int(depth) not in LISTLINEE: + #pass + line=ax.plot(dates_datetime, + arr, + color=CMAP, + linewidth=1, + alpha=LISTalpha_depth[iid]) + + ax.annotate( + f"{int(depth)}", + xy=(dates_datetime[0], arr[0]), # primo punto della linea + xytext=(-10, -2), # offset VERSO L’ALTO (in pixel) + textcoords="offset points", + ha="center", + va="bottom", + fontsize=8, + color=CMAP, + alpha=LISTalpha_depth[iid] + ) + + else: + + if (iii % 2) == 0: # scala verdi + line = ax.scatter(dates_datetime, + arr, + facecolor=CMAP, + edgecolors='k', + alpha=LISTalpha_depth[iid], + label=label, + s=20, + marker='o') + + else: # scala aracionw + line = ax.scatter(dates_datetime, + arr, + facecolor=CMAP, + edgecolors='k', + alpha=LISTalpha_depth[iid], + label=label, + s=20, + marker='s') + + iii+=1 + y_values = array_floatstats[tuple(selection)] + x_values = dates_datetime + + if iim == 0 or iim==2: + handles_labels.append((label, line)) + else: + pass for axx in axs.flatten(): - plt.sca(axx) - plt.legend() - plt.grid() - plt.suptitle(sub.name) + #axx.grid() + axx.grid(color='silver', linewidth=0.3) + + plt.suptitle(sub.name +' '+ VAR) fig.autofmt_xdate() - # plt.show(block=False) - plt.savefig(OUTDIR + '/floatmetric' + sub.name.upper() + '.png') + handles_labels=reshape_label(handles_labels) + handles_labels_mod = [] + handles_labels_ref = [] + + + labels_seen = set() + handles, labels = [], [] + for label, handle in handles_labels: + if label not in labels_seen: + labels_seen.add(label) + labels.append(label) + handles.append(handle) + + + fig.legend(handles, labels, + loc='lower center', + ncol=len(LISTDEPTH), #iid+1, + bbox_to_anchor=(0.5, -0.)) # Spazio sotto + + plt.subplots_adjust(top=0.93, left=0.04, bottom=0.15, right=0.97) + plt.savefig(OUTDIR + '/'+VAR +'_floatmetric_' + sub.name.upper() + '.png')