diff --git a/contributors/tobin-ford.md b/contributors/tobin-ford.md
new file mode 100644
index 00000000..6a71060c
--- /dev/null
+++ b/contributors/tobin-ford.md
@@ -0,0 +1,12 @@
+2024-08-01
+
+I hereby agree to the terms of the Contributors License Agreement,
+version 1.0, with MD5 checksum 46ea45f996295bdb0652333d516a6d0a.
+
+I furthermore declare that I am authorized and able to make this
+agreement and sign this declaration.
+
+Signed,
+
+Tobin Ford https://github.com/tobin-ford
+
diff --git a/pvdeg/geospatial.py b/pvdeg/geospatial.py
index 1286e7e7..2cc87816 100644
--- a/pvdeg/geospatial.py
+++ b/pvdeg/geospatial.py
@@ -262,6 +262,8 @@ def output_template(
# we should be able to get rid of this with the new autotemplating function and decorator
+# this is helpful for users so we should move it to a section in the documenation,
+# discuss with group
def template_parameters(func):
"""
Output parameters for xarray template.
diff --git a/pvdeg/utilities.py b/pvdeg/utilities.py
index 6379b851..46607bae 100644
--- a/pvdeg/utilities.py
+++ b/pvdeg/utilities.py
@@ -7,6 +7,7 @@
from typing import Callable
import inspect
import math
+import xarray as xr
def gid_downsampling(meta, n):
@@ -471,3 +472,32 @@ def tilt_azimuth_scan(
print("\r ", end="")
print("\r", end="")
return tilt_azimuth_series
+
+def compare_templates(ds1: xr.Dataset, ds2: xr.Dataset, atol=1e-10, consider_nan_equal=True) -> bool:
+ """Compare loaded datasets with "empty-like" values"""
+
+ if ds1.dims != ds2.dims:
+ return False
+
+ if set(ds1.coords.keys()) != set(ds2.coords.keys()):
+ return False
+
+ for coord in ds1.coords:
+ if ds1.coords[coord].dtype.kind in {'i', 'f'}:
+ if not np.allclose(ds1.coords[coord], ds2.coords[coord], atol=atol): # Use np.allclose for numeric coordinates
+ return False
+ elif ds1.coords[coord].dtype.kind == 'M': # datetime64
+ if not np.array_equal(ds1.coords[coord], ds2.coords[coord]): # Use array equality for datetime coordinates
+ return False
+ else:
+ if not np.array_equal(ds1.coords[coord], ds2.coords[coord]):
+ return False
+
+ if set(ds1.data_vars.keys()) != set(ds2.data_vars.keys()):
+ return False
+
+ for dim in ds1.dims:
+ if not ds1.indexes[dim].equals(ds2.indexes[dim]):
+ return False
+
+ return True
\ No newline at end of file
diff --git a/tests/data/humidity_template.nc b/tests/data/humidity_template.nc
new file mode 100644
index 00000000..63bbc255
Binary files /dev/null and b/tests/data/humidity_template.nc differ
diff --git a/tests/sandbox.ipynb b/tests/sandbox.ipynb
index 8d4f5eea..d828964b 100644
--- a/tests/sandbox.ipynb
+++ b/tests/sandbox.ipynb
@@ -81,834 +81,155 @@
"import pickle\n",
"import pandas as pd\n",
"import numpy as np\n",
+ "import xarray as xr\n",
"import os\n",
"from pvdeg import TEST_DATA_DIR\n",
"\n",
- "with open(r\"C:\\Users\\tford\\Downloads\\summit-weather.pkl\", 'rb') as f:\n",
- " GEO_WEATHER = pickle.load(f)\n",
- "\n",
- "GEO_META = pd.read_csv(r\"C:\\Users\\tford\\Downloads\\summit-meta.csv\", index_col=0)"
+ "GEO_META = pd.read_csv(os.path.join(TEST_DATA_DIR, \"summit-meta.csv\"), index_col=0)\n",
+ "with open(os.path.join(TEST_DATA_DIR, \"summit-weather.pkl\"), 'rb') as f:\n",
+ " GEO_WEATHER = pickle.load(f)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "autotemplate"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
- "outputs": [
- {
- "data": {
- "text/html": [
- "
\n",
- "\n",
- "\n",
- " \n",
- " \n",
- " \n",
- " \n",
- "\n",
- " \n",
- " \n",
- " \n",
- " \n",
- " \n",
- " \n",
- " \n",
- "
<xarray.Dataset> Size: 9MB\n",
- "Dimensions: (time: 17520, gid: 11)\n",
- "Coordinates:\n",
- " * gid (gid) int64 88B 449211 452064 453020 ... 460613 462498\n",
- " * time (time) datetime64[ns] 140kB 2022-01-01 ... 2022-12-31T...\n",
- "Data variables:\n",
- " temp_air (time, gid) float64 2MB -12.0 -8.1 -14.2 ... -4.3 -6.2\n",
- " wind_speed (time, gid) float64 2MB 0.6 0.6 0.3 0.6 ... 0.9 1.0 1.1\n",
- " dhi (time, gid) float64 2MB 0.0 0.0 0.0 ... 13.0 18.0 19.0\n",
- " ghi (time, gid) float64 2MB 0.0 0.0 0.0 ... 13.0 24.0 19.0\n",
- " dni (time, gid) float64 2MB 0.0 0.0 0.0 0.0 ... 0.0 126.0 1.0\n",
- " relative_humidity (time, gid) float64 2MB 99.93 79.41 100.0 ... 95.93 100.0\n",
- "Attributes:\n",
- " full_version_record: {"rex": "0.2.80", "pandas": "2.0.0", "numpy": "1.23...\n",
- " package: rex\n",
- " version: 4.0.0 Dimensions:
Coordinates: (2)
Data variables: (6)
temp_air
(time, gid)
float64
-12.0 -8.1 -14.2 ... -4.5 -4.3 -6.2
chunks : [2000 500] data_source : MERRA2 elevation_correction : True physical_max : 70.0 physical_min : -100.0 psm_scale_factor : 10.0 psm_units : Celsius source_dir : /projects/pxs/ancillary/merra spatial_interp_method : IDW2 temporal_interp_method : linear units : Celsius array([[-12. , -8.1, -14.2, ..., -11.5, -10.9, -12.3],\n",
- " [-12.4, -8.5, -14.6, ..., -12.1, -11.5, -13. ],\n",
- " [-12.7, -8.8, -15. , ..., -12.6, -12.1, -13.7],\n",
- " ...,\n",
- " [ -3.6, 0.2, -6.1, ..., -3.9, -3.7, -5.4],\n",
- " [ -4.1, -0.4, -6.6, ..., -4.5, -4.3, -6.2],\n",
- " [ -4.1, -0.4, -6.6, ..., -4.5, -4.3, -6.2]]) wind_speed
(time, gid)
float64
0.6 0.6 0.3 0.6 ... 0.9 0.9 1.0 1.1
chunks : [2000 500] data_source : MERRA2 elevation_correction : False physical_max : 40.0 physical_min : 0.0 psm_scale_factor : 10.0 psm_units : m/s source_dir : /projects/pxs/ancillary/merra spatial_interp_method : IDW4 temporal_interp_method : linear units : m/s array([[0.6, 0.6, 0.3, ..., 0.5, 0.8, 1.1],\n",
- " [0.6, 0.6, 0.4, ..., 0.7, 1. , 1.3],\n",
- " [0.6, 0.6, 0.5, ..., 0.8, 1.2, 1.6],\n",
- " ...,\n",
- " [0.7, 0.7, 0.8, ..., 0.9, 1. , 1.2],\n",
- " [0.6, 0.6, 0.8, ..., 0.9, 1. , 1.1],\n",
- " [0.6, 0.6, 0.8, ..., 0.9, 1. , 1.1]]) dhi
(time, gid)
float64
0.0 0.0 0.0 0.0 ... 13.0 18.0 19.0
chunks : [2000 500] data_source : output elevation_correction : False physical_max : 800.0 physical_min : 0.0 psm_scale_factor : 1.0 psm_units : W/m2 source_dir : nan spatial_interp_method : nan temporal_interp_method : nan units : W/m2 array([[ 0., 0., 0., ..., 0., 0., 0.],\n",
- " [ 0., 0., 0., ..., 0., 0., 0.],\n",
- " [ 0., 0., 0., ..., 0., 0., 0.],\n",
- " ...,\n",
- " [106., 80., 105., ..., 78., 59., 108.],\n",
- " [ 47., 33., 52., ..., 42., 49., 54.],\n",
- " [ 22., 16., 18., ..., 13., 18., 19.]]) ghi
(time, gid)
float64
0.0 0.0 0.0 0.0 ... 13.0 24.0 19.0
chunks : [2000 500] data_source : output elevation_correction : False physical_max : 1350.0 physical_min : 0.0 psm_scale_factor : 1.0 psm_units : W/m2 source_dir : nan spatial_interp_method : nan temporal_interp_method : nan units : W/m2 array([[ 0., 0., 0., ..., 0., 0., 0.],\n",
- " [ 0., 0., 0., ..., 0., 0., 0.],\n",
- " [ 0., 0., 0., ..., 0., 0., 0.],\n",
- " ...,\n",
- " [111., 82., 113., ..., 82., 79., 134.],\n",
- " [ 47., 33., 52., ..., 43., 55., 63.],\n",
- " [ 23., 17., 18., ..., 13., 24., 19.]]) dni
(time, gid)
float64
0.0 0.0 0.0 0.0 ... 0.0 126.0 1.0
chunks : [2000 500] data_source : output elevation_correction : False physical_max : 1350.0 physical_min : 0.0 psm_scale_factor : 1.0 psm_units : W/m2 source_dir : nan spatial_interp_method : nan temporal_interp_method : nan units : W/m2 array([[ 0., 0., 0., ..., 0., 0., 0.],\n",
- " [ 0., 0., 0., ..., 0., 0., 0.],\n",
- " [ 0., 0., 0., ..., 0., 0., 0.],\n",
- " ...,\n",
- " [ 26., 12., 40., ..., 18., 98., 131.],\n",
- " [ 2., 0., 2., ..., 12., 48., 76.],\n",
- " [ 21., 17., 0., ..., 0., 126., 1.]]) relative_humidity
(time, gid)
float64
99.93 79.41 100.0 ... 95.93 100.0
chunks : [2000 500] data_source : derived elevation_correction : False physical_max : 100.0 physical_min : 0.0 psm_scale_factor : 100.0 psm_units : % source_dir : nan spatial_interp_method : nan temporal_interp_method : nan units : % array([[ 99.93, 79.41, 100. , ..., 95.63, 94.27, 100. ],\n",
- " [100. , 81.92, 100. , ..., 100. , 98.89, 100. ],\n",
- " [100. , 79.59, 100. , ..., 95.56, 93.34, 100. ],\n",
- " ...,\n",
- " [100. , 90.58, 100. , ..., 100. , 99.84, 100. ],\n",
- " [100. , 86.56, 100. , ..., 98.63, 95.93, 100. ],\n",
- " [100. , 86.56, 100. , ..., 98.63, 95.93, 100. ]]) Indexes: (2)
PandasIndex
PandasIndex(Index([449211, 452064, 453020, 454916, 455867, 455877, 456827, 457776, 459670,\n",
- " 460613, 462498],\n",
- " dtype='int64', name='gid')) PandasIndex
PandasIndex(DatetimeIndex(['2022-01-01 00:00:00', '2022-01-01 00:30:00',\n",
- " '2022-01-01 01:00:00', '2022-01-01 01:30:00',\n",
- " '2022-01-01 02:00:00', '2022-01-01 02:30:00',\n",
- " '2022-01-01 03:00:00', '2022-01-01 03:30:00',\n",
- " '2022-01-01 04:00:00', '2022-01-01 04:30:00',\n",
- " ...\n",
- " '2022-12-31 19:00:00', '2022-12-31 19:30:00',\n",
- " '2022-12-31 20:00:00', '2022-12-31 20:30:00',\n",
- " '2022-12-31 21:00:00', '2022-12-31 21:30:00',\n",
- " '2022-12-31 22:00:00', '2022-12-31 22:30:00',\n",
- " '2022-12-31 23:00:00', '2022-12-31 23:30:00'],\n",
- " dtype='datetime64[ns]', name='time', length=17520, freq=None)) Attributes: (3)
full_version_record : {"rex": "0.2.80", "pandas": "2.0.0", "numpy": "1.23.5", "python": "3.9.16 (main, Mar 8 2023, 14:00:05) \\n[GCC 11.2.0]", "click": "8.1.3", "h5py": "3.7.0", "h5pyd": "0.14.0", "scipy": "1.10.1"} package : rex version : 4.0.0 "
- ],
- "text/plain": [
- " Size: 9MB\n",
- "Dimensions: (time: 17520, gid: 11)\n",
- "Coordinates:\n",
- " * gid (gid) int64 88B 449211 452064 453020 ... 460613 462498\n",
- " * time (time) datetime64[ns] 140kB 2022-01-01 ... 2022-12-31T...\n",
- "Data variables:\n",
- " temp_air (time, gid) float64 2MB -12.0 -8.1 -14.2 ... -4.3 -6.2\n",
- " wind_speed (time, gid) float64 2MB 0.6 0.6 0.3 0.6 ... 0.9 1.0 1.1\n",
- " dhi (time, gid) float64 2MB 0.0 0.0 0.0 ... 13.0 18.0 19.0\n",
- " ghi (time, gid) float64 2MB 0.0 0.0 0.0 ... 13.0 24.0 19.0\n",
- " dni (time, gid) float64 2MB 0.0 0.0 0.0 0.0 ... 0.0 126.0 1.0\n",
- " relative_humidity (time, gid) float64 2MB 99.93 79.41 100.0 ... 95.93 100.0\n",
- "Attributes:\n",
- " full_version_record: {\"rex\": \"0.2.80\", \"pandas\": \"2.0.0\", \"numpy\": \"1.23...\n",
- " package: rex\n",
- " version: 4.0.0"
- ]
- },
- "execution_count": 2,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
+ "outputs": [],
"source": [
- "GEO_WEATHER"
+ "autotemplate_result = pvdeg.geospatial.auto_template(\n",
+ " func=pvdeg.humidity.module, \n",
+ " ds_gids=GEO_WEATHER\n",
+ ").compute()"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
- "outputs": [
- {
- "data": {
- "text/html": [
- "\n",
- "\n",
- "
\n",
- " \n",
- " \n",
- " \n",
- " latitude \n",
- " longitude \n",
- " altitude \n",
- " tz \n",
- " country \n",
- " state \n",
- " county \n",
- " wind_height \n",
- " \n",
- " \n",
- " \n",
- " \n",
- " 449211 \n",
- " 39.89 \n",
- " -106.42 \n",
- " 2954 \n",
- " -7 \n",
- " United States \n",
- " Colorado \n",
- " Summit \n",
- " 2 \n",
- " \n",
- " \n",
- " 452064 \n",
- " 39.89 \n",
- " -106.30 \n",
- " 2486 \n",
- " -7 \n",
- " United States \n",
- " Colorado \n",
- " Summit \n",
- " 2 \n",
- " \n",
- " \n",
- " 453020 \n",
- " 39.69 \n",
- " -106.26 \n",
- " 3485 \n",
- " -7 \n",
- " United States \n",
- " Colorado \n",
- " Summit \n",
- " 2 \n",
- " \n",
- " \n",
- " 454916 \n",
- " 39.81 \n",
- " -106.18 \n",
- " 2767 \n",
- " -7 \n",
- " United States \n",
- " Colorado \n",
- " Summit \n",
- " 2 \n",
- " \n",
- " \n",
- " 455867 \n",
- " 39.81 \n",
- " -106.14 \n",
- " 2883 \n",
- " -7 \n",
- " United States \n",
- " Colorado \n",
- " Summit \n",
- " 2 \n",
- " \n",
- " \n",
- " 455877 \n",
- " 39.41 \n",
- " -106.14 \n",
- " 3859 \n",
- " -7 \n",
- " United States \n",
- " Colorado \n",
- " Summit \n",
- " 2 \n",
- " \n",
- " \n",
- " 456827 \n",
- " 39.45 \n",
- " -106.10 \n",
- " 3634 \n",
- " -7 \n",
- " United States \n",
- " Colorado \n",
- " Summit \n",
- " 2 \n",
- " \n",
- " \n",
- " 457776 \n",
- " 39.41 \n",
- " -106.06 \n",
- " 3363 \n",
- " -7 \n",
- " United States \n",
- " Colorado \n",
- " Summit \n",
- " 2 \n",
- " \n",
- " \n",
- " 459670 \n",
- " 39.65 \n",
- " -105.98 \n",
- " 3261 \n",
- " -7 \n",
- " United States \n",
- " Colorado \n",
- " Summit \n",
- " 2 \n",
- " \n",
- " \n",
- " 460613 \n",
- " 39.53 \n",
- " -105.94 \n",
- " 3190 \n",
- " -7 \n",
- " United States \n",
- " Colorado \n",
- " Summit \n",
- " 2 \n",
- " \n",
- " \n",
- " 462498 \n",
- " 39.57 \n",
- " -105.86 \n",
- " 3523 \n",
- " -7 \n",
- " United States \n",
- " Colorado \n",
- " Summit \n",
- " 2 \n",
- " \n",
- " \n",
- "
\n",
- "
"
- ],
- "text/plain": [
- " latitude longitude altitude tz country state county \\\n",
- "449211 39.89 -106.42 2954 -7 United States Colorado Summit \n",
- "452064 39.89 -106.30 2486 -7 United States Colorado Summit \n",
- "453020 39.69 -106.26 3485 -7 United States Colorado Summit \n",
- "454916 39.81 -106.18 2767 -7 United States Colorado Summit \n",
- "455867 39.81 -106.14 2883 -7 United States Colorado Summit \n",
- "455877 39.41 -106.14 3859 -7 United States Colorado Summit \n",
- "456827 39.45 -106.10 3634 -7 United States Colorado Summit \n",
- "457776 39.41 -106.06 3363 -7 United States Colorado Summit \n",
- "459670 39.65 -105.98 3261 -7 United States Colorado Summit \n",
- "460613 39.53 -105.94 3190 -7 United States Colorado Summit \n",
- "462498 39.57 -105.86 3523 -7 United States Colorado Summit \n",
- "\n",
- " wind_height \n",
- "449211 2 \n",
- "452064 2 \n",
- "453020 2 \n",
- "454916 2 \n",
- "455867 2 \n",
- "455877 2 \n",
- "456827 2 \n",
- "457776 2 \n",
- "459670 2 \n",
- "460613 2 \n",
- "462498 2 "
- ]
- },
- "execution_count": 3,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
+ "outputs": [],
"source": [
- "GEO_META"
+ "humidity_template = xr.open_dataset(os.path.join(TEST_DATA_DIR, 'humidity_template.nc')).compute()"
]
},
{
"cell_type": "code",
- "execution_count": 18,
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def compare_datasets(ds1: xr.Dataset, ds2: xr.Dataset, atol=1e-10) -> bool:\n",
+ " \"\"\"Compare loaded datasets with \"empty-like\" values\"\"\"\n",
+ "\n",
+ " if ds1.dims != ds2.dims:\n",
+ " return False\n",
+ "\n",
+ " if set(ds1.coords.keys()) != set(ds2.coords.keys()):\n",
+ " return False\n",
+ "\n",
+ " for coord in ds1.coords:\n",
+ " if ds1.coords[coord].dtype.kind in {'i', 'f'}:\n",
+ " # Use np.allclose for numeric coordinates\n",
+ " if not np.allclose(ds1.coords[coord], ds2.coords[coord], atol=atol):\n",
+ " return False\n",
+ " elif ds1.coords[coord].dtype.kind == 'M': # datetime64 type\n",
+ " # Use array equality for datetime coordinates\n",
+ " if not np.array_equal(ds1.coords[coord], ds2.coords[coord]):\n",
+ " return False\n",
+ " else:\n",
+ " if not np.array_equal(ds1.coords[coord], ds2.coords[coord]):\n",
+ " return False\n",
+ "\n",
+ " if set(ds1.data_vars.keys()) != set(ds2.data_vars.keys()):\n",
+ " return False\n",
+ "\n",
+ " for var in ds1.data_vars:\n",
+ " if not np.allclose(ds1[var], ds2[var], atol=atol):\n",
+ " return False\n",
+ "\n",
+ " for dim in ds1.dims:\n",
+ " if not ds1.indexes[dim].equals(ds2.indexes[dim]):\n",
+ " return False\n",
+ "\n",
+ " return True"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
"metadata": {},
"outputs": [
{
- "name": "stdout",
+ "name": "stderr",
"output_type": "stream",
"text": [
- "The array tilt angle was not provided, therefore the latitude tilt of 39.9 was used.\n",
- "The array azimuth was not provided, therefore an azimuth of 180.0 was used.\n",
- "The array tilt angle was not provided, therefore the latitude tilt of 39.9 was used.\n",
- "The array azimuth was not provided, therefore an azimuth of 180.0 was used.\n",
- "The array tilt angle was not provided, therefore the latitude tilt of 39.7 was used.\n",
- "The array azimuth was not provided, therefore an azimuth of 180.0 was used.\n",
- "The array tilt angle was not provided, therefore the latitude tilt of 39.8 was used.\n",
- "The array azimuth was not provided, therefore an azimuth of 180.0 was used.\n",
- "The array tilt angle was not provided, therefore the latitude tilt of 39.8 was used.\n",
- "The array azimuth was not provided, therefore an azimuth of 180.0 was used.\n",
- "The array tilt angle was not provided, therefore the latitude tilt of 39.4 was used.\n",
- "The array azimuth was not provided, therefore an azimuth of 180.0 was used.\n",
- "The array tilt angle was not provided, therefore the latitude tilt of 39.5 was used.\n",
- "The array azimuth was not provided, therefore an azimuth of 180.0 was used.\n",
- "The array tilt angle was not provided, therefore the latitude tilt of 39.4 was used.\n",
- "The array azimuth was not provided, therefore an azimuth of 180.0 was used.\n",
- "The array tilt angle was not provided, therefore the latitude tilt of 39.6 was used.\n",
- "The array azimuth was not provided, therefore an azimuth of 180.0 was used.\n",
- "The array tilt angle was not provided, therefore the latitude tilt of 39.5 was used.\n",
- "The array azimuth was not provided, therefore an azimuth of 180.0 was used.\n",
- "The array tilt angle was not provided, therefore the latitude tilt of 39.6 was used.\n",
- "The array azimuth was not provided, therefore an azimuth of 180.0 was used.\n"
+ "C:\\Users\\tford\\dev\\PVDegradationTools\\pvdeg\\utilities.py:479: FutureWarning: The return type of `Dataset.dims` will be changed to return a set of dimension names in future, in order to be more consistent with `DataArray.dims`. To access a mapping from dimension names to lengths, please use `Dataset.sizes`.\n",
+ " if ds1.dims != ds2.dims:\n"
]
}
],
"source": [
- "res = pvdeg.geospatial.analysis(\n",
- " weather_ds=GEO_WEATHER,\n",
- " meta_df=GEO_META,\n",
- " func=pvdeg.standards.standoff,\n",
- ")"
+ "assert pvdeg.utilities.compare_datasets(autotemplate_result,humidity_template)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "output template"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "shapes = {\n",
+ " \"RH_surface_outside\": (\"gid\", \"time\"),\n",
+ " \"RH_front_encap\": (\"gid\", \"time\"),\n",
+ " \"RH_back_encap\": (\"gid\", \"time\"),\n",
+ " \"RH_backsheet\": (\"gid\", \"time\"),\n",
+ "}\n",
+ "\n",
+ "manual_template = pvdeg.geospatial.output_template(\n",
+ " shapes=shapes,\n",
+ " ds_gids=GEO_WEATHER\n",
+ ").compute()"
]
},
{
"cell_type": "code",
- "execution_count": 6,
+ "execution_count": 11,
"metadata": {},
"outputs": [
{
- "name": "stdout",
+ "name": "stderr",
"output_type": "stream",
"text": [
- "[[ 3.94100000e+01 -1.06420000e+02 nan]\n",
- " [ 3.94100000e+01 -1.06300000e+02 nan]\n",
- " [ 3.94100000e+01 -1.06260000e+02 nan]\n",
- " [ 3.94100000e+01 -1.06180000e+02 nan]\n",
- " [ 3.94100000e+01 -1.06140000e+02 5.05368226e-01]\n",
- " [ 3.94100000e+01 -1.06100000e+02 nan]\n",
- " [ 3.94100000e+01 -1.06060000e+02 4.23099392e-01]\n",
- " [ 3.94100000e+01 -1.05980000e+02 nan]\n",
- " [ 3.94100000e+01 -1.05940000e+02 nan]\n",
- " [ 3.94100000e+01 -1.05860000e+02 nan]\n",
- " [ 3.94500000e+01 -1.06420000e+02 nan]\n",
- " [ 3.94500000e+01 -1.06300000e+02 nan]\n",
- " [ 3.94500000e+01 -1.06260000e+02 nan]\n",
- " [ 3.94500000e+01 -1.06180000e+02 nan]\n",
- " [ 3.94500000e+01 -1.06140000e+02 nan]\n",
- " [ 3.94500000e+01 -1.06100000e+02 4.74997232e-01]\n",
- " [ 3.94500000e+01 -1.06060000e+02 nan]\n",
- " [ 3.94500000e+01 -1.05980000e+02 nan]\n",
- " [ 3.94500000e+01 -1.05940000e+02 nan]\n",
- " [ 3.94500000e+01 -1.05860000e+02 nan]\n",
- " [ 3.95300000e+01 -1.06420000e+02 nan]\n",
- " [ 3.95300000e+01 -1.06300000e+02 nan]\n",
- " [ 3.95300000e+01 -1.06260000e+02 nan]\n",
- " [ 3.95300000e+01 -1.06180000e+02 nan]\n",
- " [ 3.95300000e+01 -1.06140000e+02 nan]\n",
- " [ 3.95300000e+01 -1.06100000e+02 nan]\n",
- " [ 3.95300000e+01 -1.06060000e+02 nan]\n",
- " [ 3.95300000e+01 -1.05980000e+02 nan]\n",
- " [ 3.95300000e+01 -1.05940000e+02 7.43781748e-01]\n",
- " [ 3.95300000e+01 -1.05860000e+02 nan]\n",
- " [ 3.95700000e+01 -1.06420000e+02 nan]\n",
- " [ 3.95700000e+01 -1.06300000e+02 nan]\n",
- " [ 3.95700000e+01 -1.06260000e+02 nan]\n",
- " [ 3.95700000e+01 -1.06180000e+02 nan]\n",
- " [ 3.95700000e+01 -1.06140000e+02 nan]\n",
- " [ 3.95700000e+01 -1.06100000e+02 nan]\n",
- " [ 3.95700000e+01 -1.06060000e+02 nan]\n",
- " [ 3.95700000e+01 -1.05980000e+02 nan]\n",
- " [ 3.95700000e+01 -1.05940000e+02 nan]\n",
- " [ 3.95700000e+01 -1.05860000e+02 1.02006123e-01]\n",
- " [ 3.96500000e+01 -1.06420000e+02 nan]\n",
- " [ 3.96500000e+01 -1.06300000e+02 nan]\n",
- " [ 3.96500000e+01 -1.06260000e+02 nan]\n",
- " [ 3.96500000e+01 -1.06180000e+02 nan]\n",
- " [ 3.96500000e+01 -1.06140000e+02 nan]\n",
- " [ 3.96500000e+01 -1.06100000e+02 nan]\n",
- " [ 3.96500000e+01 -1.06060000e+02 nan]\n",
- " [ 3.96500000e+01 -1.05980000e+02 9.86327972e-01]\n",
- " [ 3.96500000e+01 -1.05940000e+02 nan]\n",
- " [ 3.96500000e+01 -1.05860000e+02 nan]\n",
- " [ 3.96900000e+01 -1.06420000e+02 nan]\n",
- " [ 3.96900000e+01 -1.06300000e+02 nan]\n",
- " [ 3.96900000e+01 -1.06260000e+02 1.11679921e+00]\n",
- " [ 3.96900000e+01 -1.06180000e+02 nan]\n",
- " [ 3.96900000e+01 -1.06140000e+02 nan]\n",
- " [ 3.96900000e+01 -1.06100000e+02 nan]\n",
- " [ 3.96900000e+01 -1.06060000e+02 nan]\n",
- " [ 3.96900000e+01 -1.05980000e+02 nan]\n",
- " [ 3.96900000e+01 -1.05940000e+02 nan]\n",
- " [ 3.96900000e+01 -1.05860000e+02 nan]\n",
- " [ 3.98100000e+01 -1.06420000e+02 nan]\n",
- " [ 3.98100000e+01 -1.06300000e+02 nan]\n",
- " [ 3.98100000e+01 -1.06260000e+02 nan]\n",
- " [ 3.98100000e+01 -1.06180000e+02 3.31105174e+00]\n",
- " [ 3.98100000e+01 -1.06140000e+02 2.42166730e+00]\n",
- " [ 3.98100000e+01 -1.06100000e+02 nan]\n",
- " [ 3.98100000e+01 -1.06060000e+02 nan]\n",
- " [ 3.98100000e+01 -1.05980000e+02 nan]\n",
- " [ 3.98100000e+01 -1.05940000e+02 nan]\n",
- " [ 3.98100000e+01 -1.05860000e+02 nan]\n",
- " [ 3.98900000e+01 -1.06420000e+02 2.15902146e+00]\n",
- " [ 3.98900000e+01 -1.06300000e+02 3.70965360e+00]\n",
- " [ 3.98900000e+01 -1.06260000e+02 nan]\n",
- " [ 3.98900000e+01 -1.06180000e+02 nan]\n",
- " [ 3.98900000e+01 -1.06140000e+02 nan]\n",
- " [ 3.98900000e+01 -1.06100000e+02 nan]\n",
- " [ 3.98900000e+01 -1.06060000e+02 nan]\n",
- " [ 3.98900000e+01 -1.05980000e+02 nan]\n",
- " [ 3.98900000e+01 -1.05940000e+02 nan]\n",
- " [ 3.98900000e+01 -1.05860000e+02 nan]]\n"
+ "C:\\Users\\tford\\dev\\PVDegradationTools\\pvdeg\\utilities.py:479: FutureWarning: The return type of `Dataset.dims` will be changed to return a set of dimension names in future, in order to be more consistent with `DataArray.dims`. To access a mapping from dimension names to lengths, please use `Dataset.sizes`.\n",
+ " if ds1.dims != ds2.dims:\n"
]
+ },
+ {
+ "data": {
+ "text/plain": [
+ "True"
+ ]
+ },
+ "execution_count": 11,
+ "metadata": {},
+ "output_type": "execute_result"
}
],
"source": [
- "data_var = res[\"x\"]\n",
- "\n",
- "# Stack the latitude and longitude coordinates into a single dimension\n",
- "stacked = data_var.stack(z=(\"latitude\", \"longitude\"))\n",
- "\n",
- "# Extract the coordinates as numpy arrays\n",
- "latitudes = stacked['latitude'].values\n",
- "longitudes = stacked['longitude'].values\n",
- "\n",
- "# Extract the data values\n",
- "data_values = stacked.values\n",
- "\n",
- "# Combine coordinates and data values into a single numpy array\n",
- "combined_array = np.column_stack((latitudes, longitudes, data_values))\n",
- "\n",
- "print(combined_array)"
+ "pvdeg.utilities.compare_datasets(manual_template, humidity_template)"
]
},
{
@@ -916,27 +237,36 @@
"execution_count": null,
"metadata": {},
"outputs": [],
- "source": []
+ "source": [
+ "# test template\n",
+ "\n",
+ "shapes = {\n",
+ " 'testA' : (\"gid\", ),\n",
+ " 'testB' : (\"gid\", \"time\")\n",
+ "}\n",
+ "\n",
+ "template = pvdeg.geospatial.output_template(\n",
+ " shapes=shapes,\n",
+ "\n",
+ " ds_gids=GEO_WEATHER,\n",
+ ")\n"
+ ]
},
{
"cell_type": "code",
- "execution_count": 13,
+ "execution_count": null,
"metadata": {},
"outputs": [],
"source": [
- "res = pd.DataFrame(combined_array).dropna()\n",
- "ans = pd.read_csv(os.path.join(TEST_DATA_DIR, 'summit-standoff-res.csv'), index_col=0)\n",
- "res.columns = ans.columns"
+ "template.to_netcdf(os.path.join(TEST_DATA_DIR, \"mismatch-template.nc\"))"
]
},
{
"cell_type": "code",
- "execution_count": 17,
+ "execution_count": null,
"metadata": {},
"outputs": [],
- "source": [
- "pd.testing.assert_frame_equal(res, ans, check_dtype=False, check_names=False)"
- ]
+ "source": []
}
],
"metadata": {
diff --git a/tests/test_geospatial.py b/tests/test_geospatial.py
index 14c3aacd..1793c12a 100644
--- a/tests/test_geospatial.py
+++ b/tests/test_geospatial.py
@@ -3,14 +3,20 @@
import pickle
import pandas as pd
import numpy as np
+import xarray as xr
import os
GEO_META = pd.read_csv(os.path.join(TEST_DATA_DIR, "summit-meta.csv"), index_col=0)
-with open(os.path.join(TEST_DATA_DIR, "summit-weather.pkl"), 'rb') as f:
+with open(os.path.join(TEST_DATA_DIR, "summit-weather.pkl"), "rb") as f:
GEO_WEATHER = pickle.load(f)
+HUMIDITY_TEMPLATE = xr.open_dataset(
+ os.path.join(TEST_DATA_DIR, "humidity_template.nc")
+).compute()
+
+
def test_analysis_standoff():
res_ds = pvdeg.geospatial.analysis(
weather_ds=GEO_WEATHER,
@@ -23,13 +29,40 @@ def test_analysis_standoff():
# Stack the latitude and longitude coordinates into a single dimension
# convert to dataframe, this can be done with xr.dataset.to_dataframe as well
stacked = data_var.stack(z=("latitude", "longitude"))
- latitudes = stacked['latitude'].values
- longitudes = stacked['longitude'].values
+ latitudes = stacked["latitude"].values
+ longitudes = stacked["longitude"].values
data_values = stacked.values
combined_array = np.column_stack((latitudes, longitudes, data_values))
res = pd.DataFrame(combined_array).dropna()
- ans = pd.read_csv(os.path.join(TEST_DATA_DIR, 'summit-standoff-res.csv'), index_col=0)
+ ans = pd.read_csv(
+ os.path.join(TEST_DATA_DIR, "summit-standoff-res.csv"), index_col=0
+ )
res.columns = ans.columns
pd.testing.assert_frame_equal(res, ans, check_dtype=False, check_names=False)
+
+
+def test_autotemplate():
+ autotemplate_result = pvdeg.geospatial.auto_template(
+ func=pvdeg.humidity.module, ds_gids=GEO_WEATHER
+ ).compute()
+
+ assert pvdeg.utilities.compare_templates(
+ autotemplate_result, HUMIDITY_TEMPLATE
+ ) # custom function because we cant use equals or identical because of empty like values
+
+
+def test_template():
+ shapes = {
+ "RH_surface_outside": ("gid", "time"),
+ "RH_front_encap": ("gid", "time"),
+ "RH_back_encap": ("gid", "time"),
+ "RH_backsheet": ("gid", "time"),
+ }
+
+ manual_template = pvdeg.geospatial.output_template(
+ shapes=shapes, ds_gids=GEO_WEATHER
+ ).compute()
+
+ assert pvdeg.utilities.compare_templates(manual_template, HUMIDITY_TEMPLATE)