diff --git a/contributors/tobin-ford.md b/contributors/tobin-ford.md new file mode 100644 index 00000000..6a71060c --- /dev/null +++ b/contributors/tobin-ford.md @@ -0,0 +1,12 @@ +2024-08-01 + +I hereby agree to the terms of the Contributors License Agreement, +version 1.0, with MD5 checksum 46ea45f996295bdb0652333d516a6d0a. + +I furthermore declare that I am authorized and able to make this +agreement and sign this declaration. + +Signed, + +Tobin Ford https://github.com/tobin-ford + diff --git a/pvdeg/geospatial.py b/pvdeg/geospatial.py index 1286e7e7..2cc87816 100644 --- a/pvdeg/geospatial.py +++ b/pvdeg/geospatial.py @@ -262,6 +262,8 @@ def output_template( # we should be able to get rid of this with the new autotemplating function and decorator +# this is helpful for users so we should move it to a section in the documenation, +# discuss with group def template_parameters(func): """ Output parameters for xarray template. diff --git a/pvdeg/utilities.py b/pvdeg/utilities.py index 6379b851..46607bae 100644 --- a/pvdeg/utilities.py +++ b/pvdeg/utilities.py @@ -7,6 +7,7 @@ from typing import Callable import inspect import math +import xarray as xr def gid_downsampling(meta, n): @@ -471,3 +472,32 @@ def tilt_azimuth_scan( print("\r ", end="") print("\r", end="") return tilt_azimuth_series + +def compare_templates(ds1: xr.Dataset, ds2: xr.Dataset, atol=1e-10, consider_nan_equal=True) -> bool: + """Compare loaded datasets with "empty-like" values""" + + if ds1.dims != ds2.dims: + return False + + if set(ds1.coords.keys()) != set(ds2.coords.keys()): + return False + + for coord in ds1.coords: + if ds1.coords[coord].dtype.kind in {'i', 'f'}: + if not np.allclose(ds1.coords[coord], ds2.coords[coord], atol=atol): # Use np.allclose for numeric coordinates + return False + elif ds1.coords[coord].dtype.kind == 'M': # datetime64 + if not np.array_equal(ds1.coords[coord], ds2.coords[coord]): # Use array equality for datetime coordinates + return False + else: + if not np.array_equal(ds1.coords[coord], ds2.coords[coord]): + return False + + if set(ds1.data_vars.keys()) != set(ds2.data_vars.keys()): + return False + + for dim in ds1.dims: + if not ds1.indexes[dim].equals(ds2.indexes[dim]): + return False + + return True \ No newline at end of file diff --git a/tests/data/humidity_template.nc b/tests/data/humidity_template.nc new file mode 100644 index 00000000..63bbc255 Binary files /dev/null and b/tests/data/humidity_template.nc differ diff --git a/tests/sandbox.ipynb b/tests/sandbox.ipynb index 8d4f5eea..d828964b 100644 --- a/tests/sandbox.ipynb +++ b/tests/sandbox.ipynb @@ -81,834 +81,155 @@ "import pickle\n", "import pandas as pd\n", "import numpy as np\n", + "import xarray as xr\n", "import os\n", "from pvdeg import TEST_DATA_DIR\n", "\n", - "with open(r\"C:\\Users\\tford\\Downloads\\summit-weather.pkl\", 'rb') as f:\n", - " GEO_WEATHER = pickle.load(f)\n", - "\n", - "GEO_META = pd.read_csv(r\"C:\\Users\\tford\\Downloads\\summit-meta.csv\", index_col=0)" + "GEO_META = pd.read_csv(os.path.join(TEST_DATA_DIR, \"summit-meta.csv\"), index_col=0)\n", + "with open(os.path.join(TEST_DATA_DIR, \"summit-weather.pkl\"), 'rb') as f:\n", + " GEO_WEATHER = pickle.load(f)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "autotemplate" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
<xarray.Dataset> Size: 9MB\n",
-       "Dimensions:            (time: 17520, gid: 11)\n",
-       "Coordinates:\n",
-       "  * gid                (gid) int64 88B 449211 452064 453020 ... 460613 462498\n",
-       "  * time               (time) datetime64[ns] 140kB 2022-01-01 ... 2022-12-31T...\n",
-       "Data variables:\n",
-       "    temp_air           (time, gid) float64 2MB -12.0 -8.1 -14.2 ... -4.3 -6.2\n",
-       "    wind_speed         (time, gid) float64 2MB 0.6 0.6 0.3 0.6 ... 0.9 1.0 1.1\n",
-       "    dhi                (time, gid) float64 2MB 0.0 0.0 0.0 ... 13.0 18.0 19.0\n",
-       "    ghi                (time, gid) float64 2MB 0.0 0.0 0.0 ... 13.0 24.0 19.0\n",
-       "    dni                (time, gid) float64 2MB 0.0 0.0 0.0 0.0 ... 0.0 126.0 1.0\n",
-       "    relative_humidity  (time, gid) float64 2MB 99.93 79.41 100.0 ... 95.93 100.0\n",
-       "Attributes:\n",
-       "    full_version_record:  {"rex": "0.2.80", "pandas": "2.0.0", "numpy": "1.23...\n",
-       "    package:              rex\n",
-       "    version:              4.0.0
" - ], - "text/plain": [ - " Size: 9MB\n", - "Dimensions: (time: 17520, gid: 11)\n", - "Coordinates:\n", - " * gid (gid) int64 88B 449211 452064 453020 ... 460613 462498\n", - " * time (time) datetime64[ns] 140kB 2022-01-01 ... 2022-12-31T...\n", - "Data variables:\n", - " temp_air (time, gid) float64 2MB -12.0 -8.1 -14.2 ... -4.3 -6.2\n", - " wind_speed (time, gid) float64 2MB 0.6 0.6 0.3 0.6 ... 0.9 1.0 1.1\n", - " dhi (time, gid) float64 2MB 0.0 0.0 0.0 ... 13.0 18.0 19.0\n", - " ghi (time, gid) float64 2MB 0.0 0.0 0.0 ... 13.0 24.0 19.0\n", - " dni (time, gid) float64 2MB 0.0 0.0 0.0 0.0 ... 0.0 126.0 1.0\n", - " relative_humidity (time, gid) float64 2MB 99.93 79.41 100.0 ... 95.93 100.0\n", - "Attributes:\n", - " full_version_record: {\"rex\": \"0.2.80\", \"pandas\": \"2.0.0\", \"numpy\": \"1.23...\n", - " package: rex\n", - " version: 4.0.0" - ] - }, - "execution_count": 2, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ - "GEO_WEATHER" + "autotemplate_result = pvdeg.geospatial.auto_template(\n", + " func=pvdeg.humidity.module, \n", + " ds_gids=GEO_WEATHER\n", + ").compute()" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
latitudelongitudealtitudetzcountrystatecountywind_height
44921139.89-106.422954-7United StatesColoradoSummit2
45206439.89-106.302486-7United StatesColoradoSummit2
45302039.69-106.263485-7United StatesColoradoSummit2
45491639.81-106.182767-7United StatesColoradoSummit2
45586739.81-106.142883-7United StatesColoradoSummit2
45587739.41-106.143859-7United StatesColoradoSummit2
45682739.45-106.103634-7United StatesColoradoSummit2
45777639.41-106.063363-7United StatesColoradoSummit2
45967039.65-105.983261-7United StatesColoradoSummit2
46061339.53-105.943190-7United StatesColoradoSummit2
46249839.57-105.863523-7United StatesColoradoSummit2
\n", - "
" - ], - "text/plain": [ - " latitude longitude altitude tz country state county \\\n", - "449211 39.89 -106.42 2954 -7 United States Colorado Summit \n", - "452064 39.89 -106.30 2486 -7 United States Colorado Summit \n", - "453020 39.69 -106.26 3485 -7 United States Colorado Summit \n", - "454916 39.81 -106.18 2767 -7 United States Colorado Summit \n", - "455867 39.81 -106.14 2883 -7 United States Colorado Summit \n", - "455877 39.41 -106.14 3859 -7 United States Colorado Summit \n", - "456827 39.45 -106.10 3634 -7 United States Colorado Summit \n", - "457776 39.41 -106.06 3363 -7 United States Colorado Summit \n", - "459670 39.65 -105.98 3261 -7 United States Colorado Summit \n", - "460613 39.53 -105.94 3190 -7 United States Colorado Summit \n", - "462498 39.57 -105.86 3523 -7 United States Colorado Summit \n", - "\n", - " wind_height \n", - "449211 2 \n", - "452064 2 \n", - "453020 2 \n", - "454916 2 \n", - "455867 2 \n", - "455877 2 \n", - "456827 2 \n", - "457776 2 \n", - "459670 2 \n", - "460613 2 \n", - "462498 2 " - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ - "GEO_META" + "humidity_template = xr.open_dataset(os.path.join(TEST_DATA_DIR, 'humidity_template.nc')).compute()" ] }, { "cell_type": "code", - "execution_count": 18, + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def compare_datasets(ds1: xr.Dataset, ds2: xr.Dataset, atol=1e-10) -> bool:\n", + " \"\"\"Compare loaded datasets with \"empty-like\" values\"\"\"\n", + "\n", + " if ds1.dims != ds2.dims:\n", + " return False\n", + "\n", + " if set(ds1.coords.keys()) != set(ds2.coords.keys()):\n", + " return False\n", + "\n", + " for coord in ds1.coords:\n", + " if ds1.coords[coord].dtype.kind in {'i', 'f'}:\n", + " # Use np.allclose for numeric coordinates\n", + " if not np.allclose(ds1.coords[coord], ds2.coords[coord], atol=atol):\n", + " return False\n", + " elif ds1.coords[coord].dtype.kind == 'M': # datetime64 type\n", + " # Use array equality for datetime coordinates\n", + " if not np.array_equal(ds1.coords[coord], ds2.coords[coord]):\n", + " return False\n", + " else:\n", + " if not np.array_equal(ds1.coords[coord], ds2.coords[coord]):\n", + " return False\n", + "\n", + " if set(ds1.data_vars.keys()) != set(ds2.data_vars.keys()):\n", + " return False\n", + "\n", + " for var in ds1.data_vars:\n", + " if not np.allclose(ds1[var], ds2[var], atol=atol):\n", + " return False\n", + "\n", + " for dim in ds1.dims:\n", + " if not ds1.indexes[dim].equals(ds2.indexes[dim]):\n", + " return False\n", + "\n", + " return True" + ] + }, + { + "cell_type": "code", + "execution_count": 5, "metadata": {}, "outputs": [ { - "name": "stdout", + "name": "stderr", "output_type": "stream", "text": [ - "The array tilt angle was not provided, therefore the latitude tilt of 39.9 was used.\n", - "The array azimuth was not provided, therefore an azimuth of 180.0 was used.\n", - "The array tilt angle was not provided, therefore the latitude tilt of 39.9 was used.\n", - "The array azimuth was not provided, therefore an azimuth of 180.0 was used.\n", - "The array tilt angle was not provided, therefore the latitude tilt of 39.7 was used.\n", - "The array azimuth was not provided, therefore an azimuth of 180.0 was used.\n", - "The array tilt angle was not provided, therefore the latitude tilt of 39.8 was used.\n", - "The array azimuth was not provided, therefore an azimuth of 180.0 was used.\n", - "The array tilt angle was not provided, therefore the latitude tilt of 39.8 was used.\n", - "The array azimuth was not provided, therefore an azimuth of 180.0 was used.\n", - "The array tilt angle was not provided, therefore the latitude tilt of 39.4 was used.\n", - "The array azimuth was not provided, therefore an azimuth of 180.0 was used.\n", - "The array tilt angle was not provided, therefore the latitude tilt of 39.5 was used.\n", - "The array azimuth was not provided, therefore an azimuth of 180.0 was used.\n", - "The array tilt angle was not provided, therefore the latitude tilt of 39.4 was used.\n", - "The array azimuth was not provided, therefore an azimuth of 180.0 was used.\n", - "The array tilt angle was not provided, therefore the latitude tilt of 39.6 was used.\n", - "The array azimuth was not provided, therefore an azimuth of 180.0 was used.\n", - "The array tilt angle was not provided, therefore the latitude tilt of 39.5 was used.\n", - "The array azimuth was not provided, therefore an azimuth of 180.0 was used.\n", - "The array tilt angle was not provided, therefore the latitude tilt of 39.6 was used.\n", - "The array azimuth was not provided, therefore an azimuth of 180.0 was used.\n" + "C:\\Users\\tford\\dev\\PVDegradationTools\\pvdeg\\utilities.py:479: FutureWarning: The return type of `Dataset.dims` will be changed to return a set of dimension names in future, in order to be more consistent with `DataArray.dims`. To access a mapping from dimension names to lengths, please use `Dataset.sizes`.\n", + " if ds1.dims != ds2.dims:\n" ] } ], "source": [ - "res = pvdeg.geospatial.analysis(\n", - " weather_ds=GEO_WEATHER,\n", - " meta_df=GEO_META,\n", - " func=pvdeg.standards.standoff,\n", - ")" + "assert pvdeg.utilities.compare_datasets(autotemplate_result,humidity_template)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "output template" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "shapes = {\n", + " \"RH_surface_outside\": (\"gid\", \"time\"),\n", + " \"RH_front_encap\": (\"gid\", \"time\"),\n", + " \"RH_back_encap\": (\"gid\", \"time\"),\n", + " \"RH_backsheet\": (\"gid\", \"time\"),\n", + "}\n", + "\n", + "manual_template = pvdeg.geospatial.output_template(\n", + " shapes=shapes,\n", + " ds_gids=GEO_WEATHER\n", + ").compute()" ] }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 11, "metadata": {}, "outputs": [ { - "name": "stdout", + "name": "stderr", "output_type": "stream", "text": [ - "[[ 3.94100000e+01 -1.06420000e+02 nan]\n", - " [ 3.94100000e+01 -1.06300000e+02 nan]\n", - " [ 3.94100000e+01 -1.06260000e+02 nan]\n", - " [ 3.94100000e+01 -1.06180000e+02 nan]\n", - " [ 3.94100000e+01 -1.06140000e+02 5.05368226e-01]\n", - " [ 3.94100000e+01 -1.06100000e+02 nan]\n", - " [ 3.94100000e+01 -1.06060000e+02 4.23099392e-01]\n", - " [ 3.94100000e+01 -1.05980000e+02 nan]\n", - " [ 3.94100000e+01 -1.05940000e+02 nan]\n", - " [ 3.94100000e+01 -1.05860000e+02 nan]\n", - " [ 3.94500000e+01 -1.06420000e+02 nan]\n", - " [ 3.94500000e+01 -1.06300000e+02 nan]\n", - " [ 3.94500000e+01 -1.06260000e+02 nan]\n", - " [ 3.94500000e+01 -1.06180000e+02 nan]\n", - " [ 3.94500000e+01 -1.06140000e+02 nan]\n", - " [ 3.94500000e+01 -1.06100000e+02 4.74997232e-01]\n", - " [ 3.94500000e+01 -1.06060000e+02 nan]\n", - " [ 3.94500000e+01 -1.05980000e+02 nan]\n", - " [ 3.94500000e+01 -1.05940000e+02 nan]\n", - " [ 3.94500000e+01 -1.05860000e+02 nan]\n", - " [ 3.95300000e+01 -1.06420000e+02 nan]\n", - " [ 3.95300000e+01 -1.06300000e+02 nan]\n", - " [ 3.95300000e+01 -1.06260000e+02 nan]\n", - " [ 3.95300000e+01 -1.06180000e+02 nan]\n", - " [ 3.95300000e+01 -1.06140000e+02 nan]\n", - " [ 3.95300000e+01 -1.06100000e+02 nan]\n", - " [ 3.95300000e+01 -1.06060000e+02 nan]\n", - " [ 3.95300000e+01 -1.05980000e+02 nan]\n", - " [ 3.95300000e+01 -1.05940000e+02 7.43781748e-01]\n", - " [ 3.95300000e+01 -1.05860000e+02 nan]\n", - " [ 3.95700000e+01 -1.06420000e+02 nan]\n", - " [ 3.95700000e+01 -1.06300000e+02 nan]\n", - " [ 3.95700000e+01 -1.06260000e+02 nan]\n", - " [ 3.95700000e+01 -1.06180000e+02 nan]\n", - " [ 3.95700000e+01 -1.06140000e+02 nan]\n", - " [ 3.95700000e+01 -1.06100000e+02 nan]\n", - " [ 3.95700000e+01 -1.06060000e+02 nan]\n", - " [ 3.95700000e+01 -1.05980000e+02 nan]\n", - " [ 3.95700000e+01 -1.05940000e+02 nan]\n", - " [ 3.95700000e+01 -1.05860000e+02 1.02006123e-01]\n", - " [ 3.96500000e+01 -1.06420000e+02 nan]\n", - " [ 3.96500000e+01 -1.06300000e+02 nan]\n", - " [ 3.96500000e+01 -1.06260000e+02 nan]\n", - " [ 3.96500000e+01 -1.06180000e+02 nan]\n", - " [ 3.96500000e+01 -1.06140000e+02 nan]\n", - " [ 3.96500000e+01 -1.06100000e+02 nan]\n", - " [ 3.96500000e+01 -1.06060000e+02 nan]\n", - " [ 3.96500000e+01 -1.05980000e+02 9.86327972e-01]\n", - " [ 3.96500000e+01 -1.05940000e+02 nan]\n", - " [ 3.96500000e+01 -1.05860000e+02 nan]\n", - " [ 3.96900000e+01 -1.06420000e+02 nan]\n", - " [ 3.96900000e+01 -1.06300000e+02 nan]\n", - " [ 3.96900000e+01 -1.06260000e+02 1.11679921e+00]\n", - " [ 3.96900000e+01 -1.06180000e+02 nan]\n", - " [ 3.96900000e+01 -1.06140000e+02 nan]\n", - " [ 3.96900000e+01 -1.06100000e+02 nan]\n", - " [ 3.96900000e+01 -1.06060000e+02 nan]\n", - " [ 3.96900000e+01 -1.05980000e+02 nan]\n", - " [ 3.96900000e+01 -1.05940000e+02 nan]\n", - " [ 3.96900000e+01 -1.05860000e+02 nan]\n", - " [ 3.98100000e+01 -1.06420000e+02 nan]\n", - " [ 3.98100000e+01 -1.06300000e+02 nan]\n", - " [ 3.98100000e+01 -1.06260000e+02 nan]\n", - " [ 3.98100000e+01 -1.06180000e+02 3.31105174e+00]\n", - " [ 3.98100000e+01 -1.06140000e+02 2.42166730e+00]\n", - " [ 3.98100000e+01 -1.06100000e+02 nan]\n", - " [ 3.98100000e+01 -1.06060000e+02 nan]\n", - " [ 3.98100000e+01 -1.05980000e+02 nan]\n", - " [ 3.98100000e+01 -1.05940000e+02 nan]\n", - " [ 3.98100000e+01 -1.05860000e+02 nan]\n", - " [ 3.98900000e+01 -1.06420000e+02 2.15902146e+00]\n", - " [ 3.98900000e+01 -1.06300000e+02 3.70965360e+00]\n", - " [ 3.98900000e+01 -1.06260000e+02 nan]\n", - " [ 3.98900000e+01 -1.06180000e+02 nan]\n", - " [ 3.98900000e+01 -1.06140000e+02 nan]\n", - " [ 3.98900000e+01 -1.06100000e+02 nan]\n", - " [ 3.98900000e+01 -1.06060000e+02 nan]\n", - " [ 3.98900000e+01 -1.05980000e+02 nan]\n", - " [ 3.98900000e+01 -1.05940000e+02 nan]\n", - " [ 3.98900000e+01 -1.05860000e+02 nan]]\n" + "C:\\Users\\tford\\dev\\PVDegradationTools\\pvdeg\\utilities.py:479: FutureWarning: The return type of `Dataset.dims` will be changed to return a set of dimension names in future, in order to be more consistent with `DataArray.dims`. To access a mapping from dimension names to lengths, please use `Dataset.sizes`.\n", + " if ds1.dims != ds2.dims:\n" ] + }, + { + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" } ], "source": [ - "data_var = res[\"x\"]\n", - "\n", - "# Stack the latitude and longitude coordinates into a single dimension\n", - "stacked = data_var.stack(z=(\"latitude\", \"longitude\"))\n", - "\n", - "# Extract the coordinates as numpy arrays\n", - "latitudes = stacked['latitude'].values\n", - "longitudes = stacked['longitude'].values\n", - "\n", - "# Extract the data values\n", - "data_values = stacked.values\n", - "\n", - "# Combine coordinates and data values into a single numpy array\n", - "combined_array = np.column_stack((latitudes, longitudes, data_values))\n", - "\n", - "print(combined_array)" + "pvdeg.utilities.compare_datasets(manual_template, humidity_template)" ] }, { @@ -916,27 +237,36 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": [] + "source": [ + "# test template\n", + "\n", + "shapes = {\n", + " 'testA' : (\"gid\", ),\n", + " 'testB' : (\"gid\", \"time\")\n", + "}\n", + "\n", + "template = pvdeg.geospatial.output_template(\n", + " shapes=shapes,\n", + "\n", + " ds_gids=GEO_WEATHER,\n", + ")\n" + ] }, { "cell_type": "code", - "execution_count": 13, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ - "res = pd.DataFrame(combined_array).dropna()\n", - "ans = pd.read_csv(os.path.join(TEST_DATA_DIR, 'summit-standoff-res.csv'), index_col=0)\n", - "res.columns = ans.columns" + "template.to_netcdf(os.path.join(TEST_DATA_DIR, \"mismatch-template.nc\"))" ] }, { "cell_type": "code", - "execution_count": 17, + "execution_count": null, "metadata": {}, "outputs": [], - "source": [ - "pd.testing.assert_frame_equal(res, ans, check_dtype=False, check_names=False)" - ] + "source": [] } ], "metadata": { diff --git a/tests/test_geospatial.py b/tests/test_geospatial.py index 14c3aacd..1793c12a 100644 --- a/tests/test_geospatial.py +++ b/tests/test_geospatial.py @@ -3,14 +3,20 @@ import pickle import pandas as pd import numpy as np +import xarray as xr import os GEO_META = pd.read_csv(os.path.join(TEST_DATA_DIR, "summit-meta.csv"), index_col=0) -with open(os.path.join(TEST_DATA_DIR, "summit-weather.pkl"), 'rb') as f: +with open(os.path.join(TEST_DATA_DIR, "summit-weather.pkl"), "rb") as f: GEO_WEATHER = pickle.load(f) +HUMIDITY_TEMPLATE = xr.open_dataset( + os.path.join(TEST_DATA_DIR, "humidity_template.nc") +).compute() + + def test_analysis_standoff(): res_ds = pvdeg.geospatial.analysis( weather_ds=GEO_WEATHER, @@ -23,13 +29,40 @@ def test_analysis_standoff(): # Stack the latitude and longitude coordinates into a single dimension # convert to dataframe, this can be done with xr.dataset.to_dataframe as well stacked = data_var.stack(z=("latitude", "longitude")) - latitudes = stacked['latitude'].values - longitudes = stacked['longitude'].values + latitudes = stacked["latitude"].values + longitudes = stacked["longitude"].values data_values = stacked.values combined_array = np.column_stack((latitudes, longitudes, data_values)) res = pd.DataFrame(combined_array).dropna() - ans = pd.read_csv(os.path.join(TEST_DATA_DIR, 'summit-standoff-res.csv'), index_col=0) + ans = pd.read_csv( + os.path.join(TEST_DATA_DIR, "summit-standoff-res.csv"), index_col=0 + ) res.columns = ans.columns pd.testing.assert_frame_equal(res, ans, check_dtype=False, check_names=False) + + +def test_autotemplate(): + autotemplate_result = pvdeg.geospatial.auto_template( + func=pvdeg.humidity.module, ds_gids=GEO_WEATHER + ).compute() + + assert pvdeg.utilities.compare_templates( + autotemplate_result, HUMIDITY_TEMPLATE + ) # custom function because we cant use equals or identical because of empty like values + + +def test_template(): + shapes = { + "RH_surface_outside": ("gid", "time"), + "RH_front_encap": ("gid", "time"), + "RH_back_encap": ("gid", "time"), + "RH_backsheet": ("gid", "time"), + } + + manual_template = pvdeg.geospatial.output_template( + shapes=shapes, ds_gids=GEO_WEATHER + ).compute() + + assert pvdeg.utilities.compare_templates(manual_template, HUMIDITY_TEMPLATE)