diff --git a/docs/source/_autosummary/pvdeg.geospatial.plot_sparse_analysis_land.rst b/docs/source/_autosummary/pvdeg.geospatial.plot_sparse_analysis_land.rst new file mode 100644 index 0000000..16e8a98 --- /dev/null +++ b/docs/source/_autosummary/pvdeg.geospatial.plot_sparse_analysis_land.rst @@ -0,0 +1,6 @@ +pvdeg.geospatial.plot\_sparse\_analysis\_land +============================================= + +.. currentmodule:: pvdeg.geospatial + +.. autofunction:: plot_sparse_analysis_land \ No newline at end of file diff --git a/docs/source/_autosummary/pvdeg.geospatial.rst b/docs/source/_autosummary/pvdeg.geospatial.rst index 4524da2..159523f 100644 --- a/docs/source/_autosummary/pvdeg.geospatial.rst +++ b/docs/source/_autosummary/pvdeg.geospatial.rst @@ -34,6 +34,7 @@ pvdeg.geospatial pvdeg.geospatial.plot_Europe pvdeg.geospatial.plot_USA pvdeg.geospatial.plot_sparse_analysis + pvdeg.geospatial.plot_sparse_analysis_land pvdeg.geospatial.start_dask pvdeg.geospatial.template_parameters pvdeg.geospatial.zero_template @@ -157,6 +158,13 @@ pvdeg.geospatial .. minigallery:: pvdeg.geospatial.plot_sparse_analysis :add-heading: + .. autofunction:: plot_sparse_analysis_land + + .. _sphx_glr_backref_pvdeg.geospatial.plot_sparse_analysis_land: + + .. minigallery:: pvdeg.geospatial.plot_sparse_analysis_land + :add-heading: + .. autofunction:: start_dask .. _sphx_glr_backref_pvdeg.geospatial.start_dask: diff --git a/docs/source/_autosummary/pvdeg.utilities.display_json.rst b/docs/source/_autosummary/pvdeg.utilities.display_json.rst new file mode 100644 index 0000000..a59150c --- /dev/null +++ b/docs/source/_autosummary/pvdeg.utilities.display_json.rst @@ -0,0 +1,6 @@ +pvdeg.utilities.display\_json +============================= + +.. currentmodule:: pvdeg.utilities + +.. autofunction:: display_json \ No newline at end of file diff --git a/docs/source/_autosummary/pvdeg.utilities.linear_normalize.rst b/docs/source/_autosummary/pvdeg.utilities.linear_normalize.rst new file mode 100644 index 0000000..8fae071 --- /dev/null +++ b/docs/source/_autosummary/pvdeg.utilities.linear_normalize.rst @@ -0,0 +1,6 @@ +pvdeg.utilities.linear\_normalize +================================= + +.. currentmodule:: pvdeg.utilities + +.. autofunction:: linear_normalize \ No newline at end of file diff --git a/docs/source/_autosummary/pvdeg.utilities.merge_sparse.rst b/docs/source/_autosummary/pvdeg.utilities.merge_sparse.rst new file mode 100644 index 0000000..9da49b5 --- /dev/null +++ b/docs/source/_autosummary/pvdeg.utilities.merge_sparse.rst @@ -0,0 +1,6 @@ +pvdeg.utilities.merge\_sparse +============================= + +.. currentmodule:: pvdeg.utilities + +.. autofunction:: merge_sparse \ No newline at end of file diff --git a/docs/source/_autosummary/pvdeg.utilities.read_material.rst b/docs/source/_autosummary/pvdeg.utilities.read_material.rst new file mode 100644 index 0000000..875bb77 --- /dev/null +++ b/docs/source/_autosummary/pvdeg.utilities.read_material.rst @@ -0,0 +1,6 @@ +pvdeg.utilities.read\_material +============================== + +.. currentmodule:: pvdeg.utilities + +.. autofunction:: read_material \ No newline at end of file diff --git a/docs/source/_autosummary/pvdeg.utilities.rst b/docs/source/_autosummary/pvdeg.utilities.rst index fe0b340..95e47e5 100644 --- a/docs/source/_autosummary/pvdeg.utilities.rst +++ b/docs/source/_autosummary/pvdeg.utilities.rst @@ -21,17 +21,22 @@ pvdeg.utilities pvdeg.utilities.compare_templates pvdeg.utilities.convert_tmy + pvdeg.utilities.display_json pvdeg.utilities.fix_metadata pvdeg.utilities.geospatial_from_csv pvdeg.utilities.get_kinetics pvdeg.utilities.get_state_bbox pvdeg.utilities.gid_downsampling + pvdeg.utilities.linear_normalize + pvdeg.utilities.merge_sparse pvdeg.utilities.meta_as_dict pvdeg.utilities.new_id pvdeg.utilities.nrel_kestrel_check pvdeg.utilities.quantile_df + pvdeg.utilities.read_material pvdeg.utilities.remove_scenario_filetrees pvdeg.utilities.restore_gids + pvdeg.utilities.search_json pvdeg.utilities.strip_normalize_tmy pvdeg.utilities.tilt_azimuth_scan pvdeg.utilities.ts_gid_df @@ -65,6 +70,13 @@ pvdeg.utilities .. minigallery:: pvdeg.utilities.convert_tmy :add-heading: + .. autofunction:: display_json + + .. _sphx_glr_backref_pvdeg.utilities.display_json: + + .. minigallery:: pvdeg.utilities.display_json + :add-heading: + .. autofunction:: fix_metadata .. _sphx_glr_backref_pvdeg.utilities.fix_metadata: @@ -100,6 +112,20 @@ pvdeg.utilities .. minigallery:: pvdeg.utilities.gid_downsampling :add-heading: + .. autofunction:: linear_normalize + + .. _sphx_glr_backref_pvdeg.utilities.linear_normalize: + + .. minigallery:: pvdeg.utilities.linear_normalize + :add-heading: + + .. autofunction:: merge_sparse + + .. _sphx_glr_backref_pvdeg.utilities.merge_sparse: + + .. minigallery:: pvdeg.utilities.merge_sparse + :add-heading: + .. autofunction:: meta_as_dict .. _sphx_glr_backref_pvdeg.utilities.meta_as_dict: @@ -128,6 +154,13 @@ pvdeg.utilities .. minigallery:: pvdeg.utilities.quantile_df :add-heading: + .. autofunction:: read_material + + .. _sphx_glr_backref_pvdeg.utilities.read_material: + + .. minigallery:: pvdeg.utilities.read_material + :add-heading: + .. autofunction:: remove_scenario_filetrees .. _sphx_glr_backref_pvdeg.utilities.remove_scenario_filetrees: @@ -142,6 +175,13 @@ pvdeg.utilities .. minigallery:: pvdeg.utilities.restore_gids :add-heading: + .. autofunction:: search_json + + .. _sphx_glr_backref_pvdeg.utilities.search_json: + + .. minigallery:: pvdeg.utilities.search_json + :add-heading: + .. autofunction:: strip_normalize_tmy .. _sphx_glr_backref_pvdeg.utilities.strip_normalize_tmy: diff --git a/docs/source/_autosummary/pvdeg.utilities.search_json.rst b/docs/source/_autosummary/pvdeg.utilities.search_json.rst new file mode 100644 index 0000000..11f7324 --- /dev/null +++ b/docs/source/_autosummary/pvdeg.utilities.search_json.rst @@ -0,0 +1,6 @@ +pvdeg.utilities.search\_json +============================ + +.. currentmodule:: pvdeg.utilities + +.. autofunction:: search_json \ No newline at end of file diff --git a/docs/source/whatsnew/index.rst b/docs/source/whatsnew/index.rst index 78b3536..8298a18 100644 --- a/docs/source/whatsnew/index.rst +++ b/docs/source/whatsnew/index.rst @@ -4,7 +4,8 @@ What's New ========== PVDegradationTools (pvdeg) change log: - +.. include:: releases/v0.4.3.rst +.. include:: releases/v0.4.2.rst .. include:: releases/v0.4.1.rst .. include:: releases/v0.4.0.rst .. include:: releases/v0.3.3.rst diff --git a/docs/source/whatsnew/releases/v0.4.2.rst b/docs/source/whatsnew/releases/v0.4.2.rst index 597e551..d165d41 100644 --- a/docs/source/whatsnew/releases/v0.4.2.rst +++ b/docs/source/whatsnew/releases/v0.4.2.rst @@ -13,4 +13,4 @@ Tests Contributors ~~~~~~~~~~~~ * Martin Springer (:ghuser:`martin-springer`) -* Aidan Wesley (:ghuser:`AidanWesley``) +* Aidan Wesley (:ghuser:`AidanWesley``) \ No newline at end of file diff --git a/docs/source/whatsnew/releases/v0.4.3.rst b/docs/source/whatsnew/releases/v0.4.3.rst new file mode 100644 index 0000000..2b2691d --- /dev/null +++ b/docs/source/whatsnew/releases/v0.4.3.rst @@ -0,0 +1,14 @@ +v0.4.3 (2024-10-10) +=================== + +Enhancements +------------ +Suite of utility functions to facilitate accessing material parameter json files. + +* ``pvdeg.utilities.read_material`` creates a public api to replace the private ``pvdeg.untility._read_material`` function (to be deprecated soon) +* ``pvdeg.utilities.search_json`` to search jsons and identify keys for desired materials. +* ``pvdeg.utilities.display_json`` to view 2 level jsons in Jupyter Notebooks using HTML. + +Contributors +----------- +* Tobin Ford (:ghuser:`tobin-ford`) \ No newline at end of file diff --git a/pvdeg/__init__.py b/pvdeg/__init__.py index 6613597..0b770fc 100644 --- a/pvdeg/__init__.py +++ b/pvdeg/__init__.py @@ -12,7 +12,7 @@ from . import humidity from . import letid from . import montecarlo -from . import scenario +from .scenario import Scenario, GeospatialScenario from . import spectral from . import symbolic from . import standards diff --git a/pvdeg/data/H2Opermeation.json b/pvdeg/data/H2Opermeation.json index 4f2502a..4e72e19 100644 --- a/pvdeg/data/H2Opermeation.json +++ b/pvdeg/data/H2Opermeation.json @@ -22,11 +22,11 @@ "source": "unpublished measurements", "Fickian": true, "Ead": 61.4781422330562, - "Do": 25790.6020262449, + "Do": 257.906020262449, "Eas": 5.88752263485353, - "So": 0.00982242435416737, - "Eap": 67.3656648679097, - "Po": 5559396276.60964 + "So": 0.0982242435416737, + "Eap": 66.9611315410624, + "Po": 189338932521.637 }, "W003": { "name": "Coveme", diff --git a/pvdeg/data/O2permeation.json b/pvdeg/data/O2permeation.json index 9ecc24d..18eedd3 100644 --- a/pvdeg/data/O2permeation.json +++ b/pvdeg/data/O2permeation.json @@ -36,10 +36,10 @@ "Fickian": true, "Ead": 29.43112031, "Do": 0.129061678, - "Eas": 32.3137806, - "So": 87.81142774, - "Eap": 61.7449009, - "Po": 97917899126 + "Eas": 16.6314948252219, + "So": 0.136034525059804, + "Eap": 49.1083457348515, + "Po": 528718258.338532 }, "OX004": { "name": "AAA polyamide backsheet", diff --git a/pvdeg/diffusion.py b/pvdeg/diffusion.py index c299def..e40f26d 100644 --- a/pvdeg/diffusion.py +++ b/pvdeg/diffusion.py @@ -8,25 +8,24 @@ from pvdeg import DATA_DIR from numba import jit import numpy as np +from typing import Callable def esdiffusion( temperature, - edge_seal=None, - encapsulant=None, + edge_seal="OX005", + encapsulant="OX003", edge_seal_width=1.5, encapsulant_width=10, seal_nodes=20, encapsulant_nodes=50, press = 0.209, repeat = 1, - Dos=None, - Eads=None, - Sos=None, - Eass=None, - Doe=None, - Eade=None, - Soe=None, - Ease=None, + Dos=None, Eads=None, Sos=None,Eass=None, Doe=None, Eade=None, Soe=None, Ease=None, + react_func = None, + deg_func = None, + deg = None, + perm = None, + printout = True, **kwarg ): @@ -58,40 +57,68 @@ def esdiffusion( This is the partial pressure of oxygen. repeat : integer, optional This is the number of times to do the calculation for the whole dataset. E.g. repeat the 1-y data for 10 years. + react_func : string, optional + This is the name of the function that will be calculating the consumption of oxygen. + deg_func :string, optional + This is the name of the function that will be calculating the degradation. + printout : Boolean + This allows you to suppress printing messages during code execution by setting it to false. + deg : Numpy Array + One can send in an array with predefined degradation data already in it if desired. + I.e. you can have some pre degradation or areas that require more degradation. + perm : Numpy Array + One can send in an array with the permeant already in it if desired. kwargs : dict, optional - If es or enc are left at 'None' then the use parameters, Dos, Eads, Sos, Eass, Doe, Eade, Soe, Ease in units of + If edge_seal or encapsulant are set at 'None' then you can enter your own parameters for, Dos, Eads, Sos, Eass, Doe, Eade, Soe, Ease in units of [cm²/s], [g/cm³], or [kJ/mol] for diffusivity, solubility, or activation energy respectively. If specific parameters are provided, - then the JSON ones can be overridden. + then the JSON ones will be overridden. + Should also contain any key word arguments that need to be passed to the function calculating consumption of the permeant or degradation. Returns ------- ingress_data : pandas.DataFrame - This will give the concentration profile as a function of temperature along with degradation parameters in futur iterations.. + This will give the concentration profile as a function of time. + If there is a degradation function called, this data will also be inclueded on a node by node basis under a third index. """ with open(os.path.join(DATA_DIR, "O2permeation.json")) as user_file: O2 = json.load(user_file) user_file.close() - # O2 - if edge_seal == None: - esp = O2.get("OX005") # This is the number for the edge seal in the json file - else: - esp = O2.get(edge_seal) + with open(os.path.join(DATA_DIR, "H2Opermeation.json")) as user_file: + H2O = json.load(user_file) + user_file.close() - if encapsulant == None: - encp = O2.get( - "OX003" - ) # This is the number for the encapsulant in the json file + if edge_seal[0:2]=="OX": + esp = O2.get(edge_seal) + if printout: + print("Oxygen ingress parameters loaded for the edge seal.") else: - encp = O2.get(encapsulant) + if edge_seal[0:1]=="W": + esp = H2O.get(edge_seal) + if printout: + print("Water ingress parameters loaded for the edge seal.") + else: + print("Edge seal material not found") - try: - print("The edge seal is", esp.get("name"), ".") - print("The encapsulant is", encp.get("name"), ".") - except: - print("") + if encapsulant[0:2]=="OX": + encp = O2.get(encapsulant) + if printout: + print("Oxygen ingress parameters loaded for the encapsulant.") + else: + if encapsulant[0:1]=="W": + encp = H2O.get(encapsulant) + if printout: + print("Water ingress parameters loaded for the eencapsulant.") + else: + print("Encapsulant material not found") + if printout: + try: + print("The edge seal is", esp.get("name"), ".") + print("The encapsulant is", encp.get("name"), ".") + except: + print("Unknown material selected.") - # These are the edge seal oxygen permeation parameters + # These are the edge seal oxygen or water permeation parameters if Dos == None: Dos = esp.get("Do") if Eads == None: @@ -136,12 +163,14 @@ def esdiffusion( perm_mid = np.array( np.zeros((seal_nodes + encapsulant_nodes + 3)), dtype=np.float64 - ) # This is the profile at a transition point between output points. - perm = np.array( - np.zeros( - (len(temperature) * repeat - repeat + 1, seal_nodes + encapsulant_nodes + 3), dtype=np.float64 - ) - ) # It adds in two nodes for the interface concentration for both materials and one for the hour column. + ) # This is the profile at a transition point between output points. + if perm == None: + perm = np.array( + np.zeros( + (len(temperature) * repeat - repeat + 1, seal_nodes + encapsulant_nodes + 3), dtype=np.float64 + ) + ) # It adds in two nodes for the interface concentration for both materials and one for the hour column. + temperature = pd.DataFrame( temperature, columns=["module_temperature", "time", "time_step"] ) # This adds the number of time steps to be used as a subdivision between data points. [s] @@ -163,7 +192,8 @@ def esdiffusion( time_step[row] = np.trunc(fos / f_max) + 1 else: time_step[row] = np.trunc(foe / f_max) + 1 - + if deg_func != None and deg == None: # Sets up an array to do the degradation calculation. + deg=perm perm[0][1] = Sos * np.exp(-Eass / met_data[0][0]) perm_mid = perm[0] for rp_num in range(repeat): @@ -187,10 +217,9 @@ def esdiffusion( ) # Cs edge seal/Ce encapsulant r1 = so * np.exp(-eas / (met_data[row][0] + dtemp * mid_point)) - r2 = ( - dod * np.exp(-ead / (met_data[row][0] + dtemp * mid_point)) - * r1 * encapsulant_width / edge_seal_width - ) # Ds/De*Cs/Ce*We/Ws + r2 = dod * np.exp(-ead / (met_data[row][0] + dtemp * mid_point) + )* r1 * encapsulant_width / edge_seal_width + # Ds/De*Cs/Ce*We/Ws # Calculates the edge seal nodes. Adjusted to not calculate ends and to have the first node be temperature. for node in range(2, seal_nodes): perm[row + 1 + rp_row][node] = perm_mid[node] + fos * ( @@ -201,36 +230,54 @@ def esdiffusion( perm[row + 1 + rp_row][node] = perm_mid[node] + foe * ( perm_mid[node - 1] + perm_mid[node + 1] - 2 * perm_mid[node] ) - # Calculates the center encapsulant node. Accounts for temperature and two interfade nodes. + # Calculates the center encapsulant node. Accounts for temperature and two interface nodes. perm[row + 1 + rp_row][encapsulant_nodes + seal_nodes + 2] = perm_mid[ - encapsulant_nodes + seal_nodes + 2 - ] + 2 * foe * (perm_mid[encapsulant_nodes + seal_nodes + 1] - perm_mid[encapsulant_nodes + seal_nodes + 2]) + encapsulant_nodes + seal_nodes + 2] + 2 * foe * ( + perm_mid[encapsulant_nodes + seal_nodes + 1] - + perm_mid[encapsulant_nodes + seal_nodes + 2]) + # Calculated edge seal node adjacent to the first encapsulant node. Node numbers shifted. perm[row + 1 + rp_row][seal_nodes] = perm_mid[seal_nodes] + fos * ( perm_mid[seal_nodes - 1] + perm_mid[seal_nodes + 3] * r1 * 2 / (1 + r2) - - perm_mid[seal_nodes] * (1 + 2 / (1 + r2)) - ) + - perm_mid[seal_nodes] * (1 + 2 / (1 + r2))) + # Calculated encapsulant node adjacent to the last edge seal node. Node numbers shifted. perm[row + 1 + rp_row][seal_nodes + 3] = perm_mid[seal_nodes + 3] + foe * ( perm_mid[seal_nodes] / r1 * 2 / (1 + 1 / r2) + perm_mid[seal_nodes + 4] - - perm_mid[seal_nodes + 3] * (1 + 2 / (1 + 1 / r2)) - ) + - perm_mid[seal_nodes + 3] * (1 + 2 / (1 + 1 / r2))) + # sets the concentration at the edge seal to air interface. perm[row + 1 + rp_row][1] = Sos * np.exp( -Eass / (met_data[row + 1][0] + dtemp * mid_point) ) + + + # Runs the degradation calculation. + if deg_func != None: + print('oops') + # Runs the reaction with permeant function. + if react_func != None: + print('oops') + perm_mid = perm[row + 1 + rp_row] - # calculate edge seal at interface to encapsulant. - perm[row + 1 + rp_row][seal_nodes + 1] = ( - perm_mid[seal_nodes + 3] / r2 * r1 + perm_mid[seal_nodes] - ) / (1 / r2 + 1) - # calculate encapsulant at interface to the edge seal. - perm[row + 1 + rp_row][seal_nodes + 2] = perm[row + 1 + rp_row][seal_nodes + 1] / r1 + # Calculate edge seal at interface to encapsulant. + # Blocked out code did weird things and was based on equal fluxes. Actually using a simple averaging. This looks better and is not used in the diffusion calculations. + #perm[row + 1 + rp_row][seal_nodes + 1] = (perm_mid[seal_nodes + 3]*r1 + # + perm_mid[seal_nodes]*r2) / (1+r2) + perm[row + 1 + rp_row][seal_nodes + 1] = perm_mid[seal_nodes ]+(perm_mid[seal_nodes]-perm_mid[seal_nodes-1])/2 + + # Calculate encapsulant at interface to the edge seal. + #perm[row + 1 + rp_row][seal_nodes + 2] = perm[row + 1 + rp_row][seal_nodes + 1] / r1 + perm[row + 1 + rp_row][seal_nodes + 2] = perm_mid[seal_nodes + 3]+(perm_mid[seal_nodes + 4]-perm_mid[seal_nodes+3])/2 + + # Puts in the time for the first column. perm[row + 1 + rp_row][0] = rp_time + met_data[row + 1][1] + + # Because it is cycling around, it needs to start with the last temperature. met_data[0][0] = met_data[met_data.shape[0] - 1][0] diff --git a/pvdeg/geospatial.py b/pvdeg/geospatial.py index d54877e..85455f6 100644 --- a/pvdeg/geospatial.py +++ b/pvdeg/geospatial.py @@ -85,7 +85,7 @@ def start_dask(hpc=None): client = Client(cluster) print("Dashboard:", client.dashboard_link) - client.wait_for_workers(n_workers=1) + # client.wait_for_workers(n_workers=1) return client @@ -898,7 +898,7 @@ def elevation_stochastic_downselect( Options : `'mean'`, `'sum'`, `'median'` normalization : str, (default = 'linear') function to apply when normalizing weights. Logarithmic uses log_e/ln - options : `'linear'`, `'logarithmic'`, '`exponential'` + options : `'linear'`, `'log'`, '`exp'`, `'invert-linear'` Returns: -------- @@ -927,7 +927,10 @@ def elevation_stochastic_downselect( def interpolate_analysis( - result: xr.Dataset, data_var: str, method="nearest" + result: xr.Dataset, + data_var: str, + method="nearest", + resolution=100j, ) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: """ Interpolate sparse spatial result data against DataArray coordinates. @@ -935,6 +938,9 @@ def interpolate_analysis( Parameters: ----------- + resolution: complex + Change the amount the input is interpolated. + For more interpolation set higher (200j is more than 100j) Result: ------- @@ -951,8 +957,8 @@ def interpolate_analysis( ) # probably a nicer way to do this grid_lat, grid_lon = np.mgrid[ - df["latitude"].min() : df["latitude"].max() : 100j, - df["longitude"].min() : df["longitude"].max() : 100j, + df["latitude"].min() : df["latitude"].max() : resolution, + df["longitude"].min() : df["longitude"].max() : resolution, ] grid_z = griddata(data[:, 0:2], data[:, 2], xi=(grid_lat, grid_lon), method=method) @@ -960,13 +966,46 @@ def interpolate_analysis( return grid_z, grid_lat, grid_lon -def plot_sparse_analysis(result: xr.Dataset, data_var: str, method="nearest") -> None: +# api could be updated to match that of plot_USA +def plot_sparse_analysis( + result: xr.Dataset, + data_var: str, + method="nearest", + resolution: complex = 100j, + figsize: tuple = (10, 8), + show_plot: bool = False, +) -> None: + """ + Plot the output of a sparse geospatial analysis using interpolation. + + Parameters + ----------- + result: xr.Dataset + xarray dataset in memory containing coordinates['longitude', 'latitude'] and at least one datavariable. + data_var: str + name of datavariable to plot from result + method: str + interpolation method. + Options: `'nearest', 'linear', 'cubic'` + See [`scipy.interpolate.griddata`](https://docs.scipy.org/doc/scipy/reference/generated/scipy.interpolate.griddata.html) + resolution: complex + Change the amount the input is interpolated. + For more interpolation set higher (200j is more than 100j) + + Returns + ------- + fig, ax: tuple + matplotlib figure and axes of plot + """ + grid_values, lat, lon = interpolate_analysis( - result=result, data_var=data_var, method=method + result=result, data_var=data_var, method=method, resolution=resolution ) - fig = plt.figure() - ax = fig.add_axes([0, 0, 1, 1], projection=ccrs.LambertConformal(), frameon=False) + fig = plt.figure(figsize=figsize) + ax = fig.add_axes( + [0, 0, 1, 1], projection=ccrs.LambertConformal(), frameon=False + ) # these should be the same ccrs ax.patch.set_visible(False) extent = [lon.min(), lon.max(), lat.min(), lat.max()] @@ -976,8 +1015,8 @@ def plot_sparse_analysis(result: xr.Dataset, data_var: str, method="nearest") -> extent=extent, origin="lower", cmap="viridis", - transform=ccrs.PlateCarree(), - ) # should this be trnsposed + transform=ccrs.PlateCarree(), # why are ccrs different + ) shapename = "admin_1_states_provinces_lakes" states_shp = shpreader.natural_earth( @@ -994,7 +1033,73 @@ def plot_sparse_analysis(result: xr.Dataset, data_var: str, method="nearest") -> cbar = plt.colorbar(img, ax=ax, orientation="vertical", fraction=0.02, pad=0.04) cbar.set_label("Value") - plt.title("Interpolated Heatmap") + plt.title(f"Interpolated Sparse Analysis, {data_var}") plt.xlabel("Longitude") plt.ylabel("Latitude") - plt.show() + + if show_plot: + plt.show() + + return fig, ax + + +def plot_sparse_analysis_land( + result: xr.Dataset, + data_var: str, + method="nearest", + resolution: complex = 100j, + figsize: tuple = (10, 8), + show_plot: bool = False, + proj=ccrs.PlateCarree(), +): + import matplotlib.path as mpath + from cartopy.mpl.patch import geos_to_path + + grid_values, lat, lon = interpolate_analysis( + result=result, data_var=data_var, method=method, resolution=resolution + ) + + fig = plt.figure(figsize=figsize) + ax = fig.add_axes([0, 0, 1, 1], projection=proj, frameon=False) + ax.patch.set_visible(False) + + extent = [lon.min(), lon.max(), lat.min(), lat.max()] + ax.set_extent(extent, crs=proj) + + mesh = ax.pcolormesh(lon, lat, grid_values, transform=proj, cmap="viridis") + + land_path = geos_to_path(list(cfeature.LAND.geometries())) + land_path = mpath.Path.make_compound_path(*land_path) + plate_carre_data_transform = proj._as_mpl_transform(ax) + mesh.set_clip_path(land_path, plate_carre_data_transform) + + shapename = "admin_1_states_provinces_lakes" + states_shp = shpreader.natural_earth( + resolution="110m", category="cultural", name=shapename + ) + + ax.add_geometries( + shpreader.Reader(states_shp).geometries(), + proj, + facecolor="none", + edgecolor="black", + linestyle=":", + ) + + cbar = plt.colorbar(mesh, ax=ax, orientation="vertical", fraction=0.02, pad=0.04) + cbar.set_label("Value") + + utilities._add_cartopy_features( + ax=ax, + features=[ + cfeature.BORDERS, + cfeature.COASTLINE, + cfeature.LAND, + cfeature.OCEAN, + ], + ) + + if show_plot: + plt.show() + + return fig, ax diff --git a/pvdeg/scenario.py b/pvdeg/scenario.py index 1e4d1e7..aab8afa 100644 --- a/pvdeg/scenario.py +++ b/pvdeg/scenario.py @@ -1,5 +1,7 @@ """Class to define an analysis scenario.""" +import matplotlib.axes +import matplotlib.figure import pvdeg from pvdeg import utilities @@ -16,16 +18,13 @@ import matplotlib.pyplot as plt from collections import OrderedDict from copy import deepcopy -from typing import List, Union, Optional, Tuple, Callable +from typing import List, Union, Optional, Tuple, Callable, overload from functools import partial import pprint from IPython.display import display, HTML import cartopy.crs as ccrs import cartopy.feature as cfeature -### premade scenario with locations of interest. Ask Mike? -# TODO: geospatial reset weather and addLocation from gids. - class Scenario: """ @@ -792,7 +791,7 @@ def plot( start_time: Optional[dt] = None, end_time: Optional[dt] = None, title: str = "", - ) -> None: + ) -> tuple: """ Plot scenario results along an axis using `Scenario.extract` @@ -844,7 +843,8 @@ def plot( Returns: ------- - None + fig, ax: tuple + matplotlib figure and axis objects See Also: --------- @@ -866,6 +866,8 @@ def plot( ax.set_title(f"{self.name} : {title}") plt.show() + return fig, ax + def _ipython_display_(self): file_url = "no file provided" if self.path: @@ -1106,7 +1108,12 @@ def __init__( self.hpc = hpc def __eq__(self, other): - raise NotImplementedError("cannot directly compare geospatial scenario objects") + raise NotImplementedError(""" + Cannot directly compare pvdeg.GeospatialScenario objects + due to larger than memory/out of memory datasets stored in + GeospatialScenario.weather_data attribute. + """) + # add restoring from gids functionality from nsrdb def addLocation( @@ -1181,7 +1188,6 @@ def addLocation( "attributes": nsrdb_attributes, } - # nsrdb_fp = r"/datasets/NSRDB" # kestrel directory geo_weather, geo_meta = pvdeg.weather.get( weather_db, geospatial=True, **weather_arg ) @@ -1193,27 +1199,38 @@ def addLocation( bbox_gids = pvdeg.geospatial.apply_bounding_box(geo_meta, **bbox_kwarg) geo_meta = geo_meta.loc[bbox_gids] + + # Downselect by Region + # ====================================================== + # string to list whole word list or keep list toList = lambda s: s if isinstance(s, list) else [s] if country: countries = toList(country) + self._check_set(countries, set(geo_meta["country"])) geo_meta = geo_meta[geo_meta["country"].isin(countries)] + + if state: states = toList(state) states = [ pvdeg.utilities._get_state(entry) if len(entry) == 2 else entry for entry in states ] + + self._check_set(states, set(geo_meta["state"])) geo_meta = geo_meta[geo_meta["state"].isin(states)] + + if county: if isinstance(county, str): county = toList(county) + self._check_set(county, set(geo_meta["county"])) geo_meta = geo_meta[geo_meta["county"].isin(county)] + # ====================================================== - # we don't downsample weather data until this runs - # because on NSRDB we are storing weather OUT of MEMORY with dask geo_meta, geo_gids = pvdeg.utilities.gid_downsampling( geo_meta, downsample_factor ) @@ -1451,8 +1468,8 @@ def downselect_elevation_stochastic( method to calculate elevation weights for each point. Options : `'mean'`, `'sum'`, `'median'` normalization : str, (default = 'linear') - function to apply when normalizing weights. Logarithmic uses log_e/ln - options : `'linear'`, `'logarithmic'`, '`exponential'` + function to apply when normalizing weights. Logarithmic uses $log_e$, $ln$ + options : `'linear'`, `'log'`, '`exp'`, `'invert-linear'` Returns: -------- @@ -1533,7 +1550,7 @@ def coords_tonumpy(self) -> np.array: return coords - def geospatial_data(self) -> tuple[xr.Dataset, pd.DataFrame]: + def get_geospatial_data(self) -> tuple[xr.Dataset, pd.DataFrame]: """ Extract the geospatial weather dataset and metadata dataframe from the scenario object @@ -1548,7 +1565,7 @@ def geospatial_data(self) -> tuple[xr.Dataset, pd.DataFrame]: Returns: -------- - (weather_data, meta_data): (xr.Dataset, pd.DataFrame) + (weather_data, meta_data): tuple[xr.Dataset, pd.DataFrame] A tuple of weather data as an `xarray.Dataset` and the corresponding meta data as a dataframe. """ # downsample here, not done already happens at pipeline runtime @@ -1557,6 +1574,26 @@ def geospatial_data(self) -> tuple[xr.Dataset, pd.DataFrame]: ) return geo_weather_sub, self.meta_data + # @dispatch(xr.Dataset, pd.DataFrame) + def set_geospatial_data(self, weather_ds: xr.Dataset, meta_df: pd.DataFrame ) -> None: + """ + Parameters: + ----------- + weather_ds : xarray.Dataset + Dataset containing weather data for a block of gids. + meta_df : pandas.DataFrame + DataFrame containing meta data for a block of gids. + + Modifies: + ---------- + self.weather_data + sets to weather_ds + self.meta_data + sets to meta_df + """ + self.weather_data, self.meta_data = weather_ds, meta_df + + def addJob( self, func: Callable = None, @@ -1580,7 +1617,6 @@ def addJob( set flag to get a userWarning notifying the user of the job added to the pipeline in method call. ``default = False`` """ - # check if we can do geospatial analyis on desired function try: pvdeg.geospatial.template_parameters(func) except ValueError: @@ -1590,10 +1626,6 @@ def addJob( geo_job_dict = {"geospatial_job": {"job": func, "params": func_params}} - # # UNTESTED - # if func_params: - # geo_job_dict.update(func_params) - self.pipeline = geo_job_dict if see_added: @@ -1740,7 +1772,8 @@ def getValidRegions( f"self.geospatial should be True. Current value = {self.geospatial}" ) - discard_weather, meta_df = Scenario._get_geospatial_data(year=2022) + # discard_weather, meta_df = Scenario._get_geospatial_data(year=2022) + discard_weather, meta_df = self._get_geospatial_data(year=2022) if country: meta_df = meta_df[meta_df["country"] == country] @@ -1761,13 +1794,15 @@ def plot(self): "The 'plot' method is not accessible in GeospatialScenario, only in Scenario" ) + import matplotlib + def plot_coords( self, coord_1: Optional[tuple[float]] = None, coord_2: Optional[tuple[float]] = None, coords: Optional[np.ndarray[float]] = None, size: Union[int, float] = 1, - ) -> None: + ) -> tuple[matplotlib.figure, matplotlib.axes]: """ Plot lat-long coordinate pairs on blank map. Quickly view geospatial datapoints before your analysis. @@ -1789,6 +1824,11 @@ def plot_coords( size : float matplotlib scatter point size. Without any downsampling NSRDB points will siginficantly overlap. + + Returns: + -------- + fig, ax + matplotlib figure and axis """ fig = plt.figure(figsize=(15, 10)) ax = plt.axes(projection=ccrs.PlateCarree()) @@ -1811,6 +1851,9 @@ def plot_coords( plt.title(f"Coordinate Pairs from '{self.name}' Meta Data") plt.show() + return fig, ax + + def plot_meta_classification( self, col_name: str = None, @@ -1818,7 +1861,7 @@ def plot_meta_classification( coord_2: Optional[tuple[float]] = None, coords: Optional[np.ndarray[float]] = None, size: Union[int, float] = 1, - ): + ) -> tuple[matplotlib.figure, matplotlib.axes]: """ Plot classified lat-long coordinate pairs on map. Quicly view geospatial datapoints with binary classification in a meta_data @@ -1844,6 +1887,11 @@ def plot_meta_classification( size : float matplotlib scatter point size. Without any downsampling NSRDB points will siginficantly overlap. + + Returns: + -------- + fig, ax + matplotlib figure and axis """ if not col_name: raise ValueError("col_name cannot be none") @@ -1892,11 +1940,13 @@ def plot_meta_classification( plt.legend() plt.show() + return fig, ax + def plot_world( self, data_variable: str, cmap: str = "viridis", - ): + ) -> tuple[matplotlib.figure, matplotlib.axes]: da = (self.results)[data_variable] fig, ax = plt.subplots( @@ -1915,6 +1965,8 @@ def plot_world( ax.add_feature(cfeature.LAKES, edgecolor="black") plt.show() + return fig, ax + # test this def plot_USA( self, @@ -1923,7 +1975,7 @@ def plot_USA( cmap: str = "viridis", vmin: Union[int, float] = 0, vmax: Optional[Union[int, float]] = None, - ): + ) -> tuple[matplotlib.figure, matplotlib.axes]: """ Plot a vizualization of the geospatial scenario result. Only works on geospatial scenarios. @@ -1957,6 +2009,19 @@ def plot_USA( fpath if fpath else [f"os.getcwd/{self.name}-{self.results[data_from_result]}"] fig.savefig() + return fig, ax + + + def _check_set(self, iterable, to_check: set): + """Check if iterable is a subset of to_check""" + if not isinstance(iterable, set): + iterable = set(iterable) + + if not iterable.issubset(to_check): + raise ValueError(f"All of iterable: {iterable} does not exist in {to_check}") + + + def format_pipeline(self): pipeline_html = "
" if "geospatial_job" in self.pipeline: @@ -2005,11 +2070,11 @@ def _ipython_display_(self):

Weather Dataset

- {self.format_weather()} + {self.format_geo_weather()}

Meta Dataframe

- {self.format_meta()} + {self.format_geo_meta()}