Skip to content

Commit

Permalink
Colorado 1/16th Resolution, add albedo, wind_speed
Browse files Browse the repository at this point in the history
  • Loading branch information
tobin-ford committed Jan 31, 2025
1 parent fe22aef commit 11d28f3
Show file tree
Hide file tree
Showing 24 changed files with 5,004 additions and 2,933 deletions.
68 changes: 49 additions & 19 deletions pvdeg/geospatial.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
pysam,
)


import xarray as xr
import dask.array as da
import pandas as pd
Expand Down Expand Up @@ -92,13 +91,24 @@ def start_dask(hpc=None):

return client

# rename this?
# and combine into a single function with _df_from_arbitrary, this ds_from_arbitray isnt really doing anything anymore
# we only want ds_from_arbitrary and then convert to ds, but if the input is a dataset already then we dont want to anything
def _ds_from_arbitrary(res, func):
"""
Convert an arbitrary return type to xarray.Dataset.
"""

if isinstance(res, pysam.inspirePysamReturn):
return pysam._handle_pysam_return(res)
######## STRUCTURAL #########
# functions can just return xr.Dataset to take advantage of geospatial
# this should not be required to implement a new geospatial function

if isinstance(res, xr.Dataset):
return res


# if isinstance(res, pysam.inspirePysamReturn):
# return pysam._handle_pysam_return(res)
# add more conditionals if we have special cases
# or add general case for mixed return dimensions: HARD

Expand Down Expand Up @@ -992,7 +1002,7 @@ def elevation_stochastic_downselect(


def interpolate_analysis(
result: xr.Dataset, data_var: str, method="nearest"
result: xr.Dataset, data_var: str, method="nearest", res = 100j,
) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
"""
Interpolate sparse spatial result data against DataArray coordinates.
Expand All @@ -1016,33 +1026,47 @@ def interpolate_analysis(
) # probably a nicer way to do this

grid_lat, grid_lon = np.mgrid[
df["latitude"].min() : df["latitude"].max() : 100j,
df["longitude"].min() : df["longitude"].max() : 100j,
df["latitude"].min() : df["latitude"].max() : res,
df["longitude"].min() : df["longitude"].max() : res,
]

grid_z = griddata(data[:, 0:2], data[:, 2], xi=(grid_lat, grid_lon), method=method)

return grid_z, grid_lat, grid_lon


def plot_sparse_analysis(result: xr.Dataset, data_var: str, method="nearest") -> None:
def plot_sparse_analysis(
result: xr.Dataset,
data_var: str,
method="nearest",
res=100j,
cmap='viridis',
ax=None

) -> None:
grid_values, lat, lon = interpolate_analysis(
result=result, data_var=data_var, method=method
result=result, data_var=data_var, method=method, res=res
)

fig = plt.figure()
ax = fig.add_axes([0, 0, 1, 1], projection=ccrs.LambertConformal(), frameon=False)
ax.patch.set_visible(False)
if ax is None:
fig = plt.figure()
ax = fig.add_axes([0, 0, 1, 1], projection=ccrs.LambertConformal(), frameon=False)
ax.patch.set_visible(False)

show = True
else:
fig = None
show = False

extent = [lon.min(), lon.max(), lat.min(), lat.max()]
ax.set_extent(extent)
img = ax.imshow(
grid_values,
extent=extent,
origin="lower",
cmap="viridis",
cmap=cmap,
transform=ccrs.PlateCarree(),
) # should this be trnsposed
)

shapename = "admin_1_states_provinces_lakes"
states_shp = shpreader.natural_earth(
Expand All @@ -1056,10 +1080,16 @@ def plot_sparse_analysis(result: xr.Dataset, data_var: str, method="nearest") ->
edgecolor="gray",
)

cbar = plt.colorbar(img, ax=ax, orientation="vertical", fraction=0.02, pad=0.04)
cbar.set_label("Value")
if fig is not None:
cbar = plt.colorbar(img, ax=ax, orientation="vertical", fraction=0.02, pad=0.04)
cbar.set_label("Value")

if fig is not None:
plt.title("Interpolated Heatmap")
plt.xlabel("Longitude")
plt.ylabel("Latitude")

plt.title("Interpolated Heatmap")
plt.xlabel("Longitude")
plt.ylabel("Latitude")
plt.show()
if show and fig is not None:
plt.show()

return fig, ax
133 changes: 94 additions & 39 deletions pvdeg/pysam.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,18 +4,24 @@
"""

import dask.dataframe as dd
import dask.array as da
import pandas as pd
import xarray as xr
import numpy as np
import pickle
import json
import sys
import os

import PySAM
import PySAM.Pvsamv1 as pv1
import PySAM.Pvwattsv8 as pv8
import PySAM.Grid as Grid
import PySAM.Utilityrate5 as UtilityRate
import PySAM.Cashloan as Cashloan
try:
import PySAM
import PySAM.Pvsamv1 as pv1
import PySAM.Pvwattsv8 as pv8
import PySAM.Grid as Grid
import PySAM.Utilityrate5 as UtilityRate
import PySAM.Cashloan as Cashloan
except ImportError:
print('pysam import failed. run pip install pvdeg[sam] to install all dependencies for the "pvdeg.pysam" submodule')

from pvdeg import (
weather,
Expand Down Expand Up @@ -364,37 +370,59 @@ def pysam_hourly_trivial(weather_df, meta):
return outputs

# TODO: add slots
class inspirePysamReturn():
"""simple struct to facilitate handling weirdly shaped pysam simulation return values"""

# removes __dict__ atribute and breaks pickle
# __slots__ = ("annual_poa", "ground_irradiance", "timeseries_index")
# class inspirePysamReturn():
# """simple struct to facilitate handling weirdly shaped pysam simulation return values"""

# # removes __dict__ atribute and breaks pickle
# # __slots__ = ("annual_poa", "ground_irradiance", "timeseries_index")

# def __init__(self, annual_poa, ground_irradiance, timeseries_index, annual_energy, poa_front, poa_rear, subarray1_poa_front, subarray1_poa_rear):
# self.annual_energy = annual_energy
# self.annual_poa = annual_poa
# self.ground_irradiance = ground_irradiance
# self.timeseries_index = timeseries_index
# self.poa_front = poa_front
# self.poa_rear = poa_rear
# self.subarray1_poa_front = subarray1_poa_front
# self.subarray1_poa_rear = subarray1_poa_rear

# def _handle_pysam_return(pysam_res : inspirePysamReturn) -> xr.Dataset:
def _handle_pysam_return(pysam_res_dict : dict, weather_df: pd.DataFrame) -> xr.Dataset:
"""Handle a pysam return object and transform it to an xarray"""

def __init__(self, annual_poa, ground_irradiance, timeseries_index):
self.annual_poa = annual_poa
self.ground_irradiance = ground_irradiance
self.timeseries_index = timeseries_index
ground_irradiance = pysam_res_dict["subarray1_ground_rear_spatial"]

annual_poa = pysam_res_dict["annual_poa_front"]
annual_energy = pysam_res_dict["annual_energy"]

# rename?
import xarray as xr
def _handle_pysam_return(pysam_res : inspirePysamReturn) -> xr.Dataset:
"""Handle a pysam return object and transform it to an xarray"""
poa_front = pysam_res_dict["poa_front"][:8760] # 25 * 8760 entries, all pairs of 8760 entries are identical
poa_rear = pysam_res_dict["poa_rear"][:8760] # same for the following
subarray1_poa_front = pysam_res_dict["subarray1_poa_front"][:8760]
subarray1_poa_rear = pysam_res_dict["subarray1_poa_rear"][:8760]

import dask.array as da
import numpy as np
timeseries_index = weather_df.index

# redo this using numba?
distances = pysam_res.ground_irradiance[0][1:]
ground_irradiance_values = da.from_array([row[1:] for row in pysam_res.ground_irradiance[1:]])
distances = ground_irradiance[0][1:]
ground_irradiance_values = da.from_array([row[1:] for row in ground_irradiance[1:]])

single_location_ds = xr.Dataset(
data_vars={
"annual_poa" : pysam_res.annual_poa, # scalar variable
"ground_irradiance" : (("time", "distance"), ground_irradiance_values)
# scalars
"annual_poa" : annual_poa,
"annual_energy" : annual_energy,

# simple timeseries
"poa_front" : (("time", ), da.array(poa_front)),
"poa_rear" : (("time", ), da.array(poa_rear)),
"subarray1_poa_front" : (("time", ), da.array(subarray1_poa_front)),
"subarray1_poa_rear" : (("time", ), da.array(subarray1_poa_rear)),

# spatio-temporal
"ground_irradiance" : (("time", "distance"), ground_irradiance_values),
},
coords={
"time" : pysam_res.timeseries_index,
"time" : timeseries_index,
# "distance" : distances,
"distance" : np.arange(10), # this matches the dimension axis of the output_temlate dataset
}
Expand All @@ -403,7 +431,24 @@ def _handle_pysam_return(pysam_res : inspirePysamReturn) -> xr.Dataset:
return single_location_ds


INSPIRE_NSRDB_ATTRIBUTES = [
"air_temperature",
"wind_speed",
"wind_direction",
"dhi",
"ghi",
"dni",
"relative_humidity",
"surface_albedo",
]

# annual_poa_nom, annual_poa_front, annual_poa_rear, poa_nom, poa_front, or poa_rear

# annual energy
# front_poa, rear_poa (timeseries, repeating 25 times, we can just take the first year)
# annual_poa_rear_gain_percent (rear side gain, bifacial factor) (rear poa / front poa) * bifacial factor

# should be be using poa_front or subarray1_poa_front (same for poa_rear?)
# TODO: add config file, multiple config files.
def inspire_ground_irradiance(weather_df, meta, config_files):
"""
Expand Down Expand Up @@ -432,21 +477,19 @@ def inspire_ground_irradiance(weather_df, meta, config_files):
meta type : {type(meta)}
""")

# force localize utc from tmy to local time by moving rows
weather_df = weather.roll_tmy(weather_df, meta)

outputs = pysam(
weather_df = weather_df,
meta = meta,
pv_model = "pysamv1",
config_files=config_files,
results = ["subarray1_ground_rear_spatial", "annual_poa_front"],
)

result = inspirePysamReturn(
ground_irradiance = outputs["subarray1_ground_rear_spatial"],
annual_poa = outputs["annual_poa_front"],
timeseries_index=weather_df.index,
)
ds_result = _handle_pysam_return(pysam_res_dict=outputs, weather_df=weather_df)

return result
return ds_result

def solar_resource_dict(weather_df, meta):
"""
Expand All @@ -457,24 +500,36 @@ def solar_resource_dict(weather_df, meta):

# weather_df = weather_df.reset_index(drop=True) # Probably dont need to do this
weather_df = utilities.add_time_columns_tmy(weather_df) # only supports hourly data

# enforce tmy scheme
times = pd.date_range(start="2001-01-01", periods=8760, freq="1h")

# all options
# lat,lon,tz,elev,year,month,hour,minute,gh,dn,df,poa,tdry,twet,tdew,rhum,pres,snow,alb,aod,wspd,wdir
sr = {
'lat': meta['latitude'],
'lon': meta['longitude'],
'tz': meta['tz'] if 'tz' in meta.keys() else 0,
'elev': meta['altitude'],
'year': list(weather_df['Year']),
'month': list(weather_df['Month']),
'day': list(weather_df['Day']),
'hour': list(weather_df['Hour']),
'minute': list(weather_df['Minute']),
'year': list(times.year), #list(weather_df['Year']),
'month': list(times.month),
'day': list(times.day),
'hour': list(times.hour),
'minute': list(times.minute),
'gh': list(weather_df['ghi']),
'dn': list(weather_df['dni']),
'df': list(weather_df['dhi']),
'wspd': list(weather_df['wind_speed']),
'tdry': list(weather_df['temp_air']),
'alb' : weather_df['albedo'] if 'albedo' in weather_df.columns.values else [0.2]*len(weather_df)
'alb' : list(weather_df['albedo']) if 'albedo' in weather_df.columns.values else [0.2] * len(weather_df)
}

# if we have wind direction then add it
if 'wind_direction' in weather_df.columns.values:
sr['wdir'] = list(weather_df['wind_direction'])

print(sr['alb'])

return sr

def sample_pysam_result(weather_df, meta): # throw weather, meta away
Expand Down
Loading

0 comments on commit 11d28f3

Please sign in to comment.