Skip to content

Commit 75fd6bd

Browse files
create a public plotting interfaces (#19)
* add cdippy lib version info to requests add loggers to cdipnc and url_utils mock all unit tests * fix uninitialized variable in mop_data * fix 'start' dates interpreted as local time rather than utc * re-org package and define public api config docs * fix pyproject to include utils package * re-org package and define public api config docs * create a top-level plotting interface to replace the plots lib; clean up API refs for plotting * make import names more descriptive
1 parent 5132752 commit 75fd6bd

File tree

10 files changed

+144
-77
lines changed

10 files changed

+144
-77
lines changed

cdippy/__init__.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,8 @@
11
# import public top-level modules
2-
from . import cdipnc, nchashes, ncstats, ndbc, plots, spectra, stndata
2+
from . import cdipnc, nchashes, ncstats, ndbc, plotting, spectra, stndata
3+
4+
# import plots library for backward compatibility
5+
from . import plots as plots # noqa: F401
36

47
# public API (i.e. "from cdippy import *")
5-
__all__ = ["cdipnc", "nchashes", "ncstats", "ndbc", "plots", "spectra", "stndata"]
8+
__all__ = ["cdipnc", "nchashes", "ncstats", "ndbc", "plotting", "spectra", "stndata"]

cdippy/cdipnc.py

Lines changed: 13 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -9,11 +9,8 @@
99
from bisect import bisect_left, bisect_right
1010

1111
import cdippy.ndbc as ndbc
12-
import cdippy.utils.utils as cu
13-
import cdippy.utils.urls as uu
14-
15-
16-
logger = logging.getLogger(__name__)
12+
import cdippy.utils.utils as cdip_utils
13+
import cdippy.utils.urls as url_utils
1714

1815

1916
logger = logging.getLogger(__name__)
@@ -174,9 +171,9 @@ def set_timespan(self, start, end):
174171
)
175172
else:
176173
self.end_dt = end
177-
self.start_stamp = cu.datetime_to_timestamp(self.start_dt)
174+
self.start_stamp = cdip_utils.datetime_to_timestamp(self.start_dt)
178175

179-
self.end_stamp = cu.datetime_to_timestamp(self.end_dt)
176+
self.end_stamp = cdip_utils.datetime_to_timestamp(self.end_dt)
180177

181178
def get_request(self) -> dict:
182179
"""Returns the data specified using set_request_info.
@@ -444,23 +441,23 @@ def get_dataset_urls(self) -> dict:
444441
catalog_url = "/".join([self.THREDDS_url, "thredds", "catalog.xml"])
445442

446443
result = {}
447-
root = uu.load_et_root(catalog_url)
444+
root = url_utils.load_et_root(catalog_url)
448445
catalogs = []
449-
uu.rfindta(root, catalogs, "catalogRef", "href")
446+
url_utils.rfindta(root, catalogs, "catalogRef", "href")
450447
for catalog in catalogs:
451448
# - Archive data sets
452449
url = self.THREDDS_url + catalog
453-
cat = uu.load_et_root(url)
450+
cat = url_utils.load_et_root(url)
454451
if catalog.find("archive") >= 0:
455452
ar_urls = []
456-
uu.rfindta(cat, ar_urls, "catalogRef", "href")
453+
url_utils.rfindta(cat, ar_urls, "catalogRef", "href")
457454
b_url = os.path.dirname(url)
458455
# - Station datasets
459456
ar_ds_urls = []
460457
for u in ar_urls:
461458
url = b_url + "/" + u
462-
ds = uu.load_et_root(url)
463-
uu.rfindta(ds, ar_ds_urls, "dataset", "urlPath")
459+
ds = url_utils.load_et_root(url)
460+
url_utils.rfindta(ds, ar_ds_urls, "dataset", "urlPath")
464461
full_urls = []
465462
for url in ar_ds_urls:
466463
full_urls.append(
@@ -469,7 +466,7 @@ def get_dataset_urls(self) -> dict:
469466
result["archive"] = full_urls
470467
elif catalog.find("realtime") >= 0:
471468
rt_ds_urls = []
472-
uu.rfindta(cat, rt_ds_urls, "dataset", "urlPath")
469+
url_utils.rfindta(cat, rt_ds_urls, "dataset", "urlPath")
473470
full_urls = []
474471
for url in rt_ds_urls:
475472
full_urls.append(
@@ -1000,8 +997,8 @@ def get_request(self):
1000997
end_idx = self.__get_idx_from_timestamp(self.end_stamp)
1001998
z = self.get_var("xyzZDisplacement")
1002999
# Find out if the request timespan overlaps the data
1003-
ts1 = cu.Timespan(start_idx, end_idx)
1004-
ts2 = cu.Timespan(0, len(z) - 1)
1000+
ts1 = cdip_utils.Timespan(start_idx, end_idx)
1001+
ts2 = cdip_utils.Timespan(0, len(z) - 1)
10051002
if not ts1.overlap(ts2):
10061003
return {}
10071004
# Make sure the indices will work with the arrays

cdippy/mopdata.py

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
from bisect import bisect_left
33

44
from cdippy.cdipnc import CDIPnc
5-
import cdippy.utils.utils as cu
5+
import cdippy.utils.utils as cdip_utils
66

77

88
class MopData(CDIPnc):
@@ -238,11 +238,11 @@ def get_series(
238238
tzinfo=timezone.utc
239239
)
240240
ts_I = self.get_target_timespan(
241-
cu.datetime_to_timestamp(start), target_records, prefix + "Time"
241+
cdip_utils.datetime_to_timestamp(start), target_records, prefix + "Time"
242242
)
243243
if ts_I[0] is not None:
244-
start = cu.timestamp_to_datetime(ts_I[0])
245-
end = cu.timestamp_to_datetime(ts_I[1])
244+
start = cdip_utils.timestamp_to_datetime(ts_I[0])
245+
end = cdip_utils.timestamp_to_datetime(ts_I[1])
246246
else:
247247
return None
248248
elif start is None: # Use default 3 days back
@@ -289,12 +289,14 @@ def get_target_timespan(
289289
if i_b == last_idx or stamps[i_b] == target_timestamp:
290290
closest_idx = i_b
291291
elif i_b > 0:
292-
closest_idx = cu.get_closest_index(i_b - 1, i_b, stamps, target_timestamp)
292+
closest_idx = cdip_utils.get_closest_index(
293+
i_b - 1, i_b, stamps, target_timestamp
294+
)
293295

294296
# Now we have the closest index, find the intervals
295297

296298
if closest_idx is not None:
297-
interval = cu.get_interval(stamps, closest_idx, num_target_records)
299+
interval = cdip_utils.get_interval(stamps, closest_idx, num_target_records)
298300
return interval
299301

300302
# If we get to here there's a problem

cdippy/nchashes.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
import cdippy.utils.urls as uu
2-
import cdippy.utils.utils as cu
1+
import cdippy.utils.urls as url_utils
2+
import cdippy.utils.utils as cdip_utils
33

44

55
class NcHashes:
@@ -12,7 +12,7 @@ def __init__(self, hash_file_location=""):
1212
self.hash_pkl = hash_file_location + "/HASH.pkl"
1313

1414
def load_hash_table(self):
15-
lines = uu.read_url(self.hashes_url).strip().split("\n")
15+
lines = url_utils.read_url(self.hashes_url).strip().split("\n")
1616
for line in lines:
1717
if line[0:8] == "filename":
1818
continue
@@ -44,7 +44,7 @@ def compare_hash_tables(self) -> list:
4444
return changed
4545

4646
def save_new_hashes(self):
47-
cu.pkl_dump(self.new_hashes, self.hash_pkl)
47+
cdip_utils.pkl_dump(self.new_hashes, self.hash_pkl)
4848

4949
def get_old_hashes(self):
50-
return cu.pkl_load(self.hash_pkl)
50+
return cdip_utils.pkl_load(self.hash_pkl)

cdippy/ndbc.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,8 @@
33
import os
44
from datetime import datetime, timezone
55

6-
import cdippy.utils.urls as uu
7-
import cdippy.utils.utils as cu
6+
import cdippy.utils.urls as url_utils
7+
import cdippy.utils.utils as cdip_utils
88

99
sos_base = "https://sdf.ndbc.noaa.gov/sos/server.php"
1010
request = "request=DescribeSensor"
@@ -20,9 +20,9 @@ def get_stn_info(wmo_id):
2020
"""Work in progress, querying ndbc sos service."""
2121
qry = "&".join([request, service, version, outputformat, describe_stn + wmo_id])
2222
url = "?".join([sos_base, qry])
23-
root = uu.load_et_root(url)
23+
root = url_utils.load_et_root(url)
2424
results = []
25-
uu.rfindt(root, results, "description")
25+
url_utils.rfindt(root, results, "description")
2626

2727

2828
def get_wmo_id(
@@ -35,14 +35,14 @@ def get_wmo_id(
3535
now = datetime.now(timezone.utc)
3636
if not pkl_fl or now.minute == 23 or not os.path.isfile(pkl_fl):
3737
url = "/".join([cdip_base, "wmo_ids"])
38-
r = uu.read_url(url)
38+
r = url_utils.read_url(url)
3939
ids = {}
4040
for line in r.splitlines():
4141
ids[line[0:3]] = line[5:].strip()
4242
if pkl_fl:
43-
cu.pkl_dump(ids, pkl_fl)
43+
cdip_utils.pkl_dump(ids, pkl_fl)
4444
else:
45-
ids = cu.pkl_load(pkl_fl)
45+
ids = cdip_utils.pkl_load(pkl_fl)
4646
if stn in ids:
4747
return ids[stn]
4848
else:

cdippy/plots/compendium.py

Lines changed: 20 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
import cdippy.utils.utils as ut
2-
import cdippy.plots.utils as pu
1+
import cdippy.utils.utils as cdip_utils
2+
import cdippy.plots.utils as plot_utils
33
from datetime import datetime
44
import numpy as np
55
import calendar
@@ -15,7 +15,7 @@
1515

1616

1717
def make_plot(
18-
stns: str, start: datetime, end: datetime, params: str, x_inch: int
18+
stns: str, start: datetime, end: datetime, params: str, x_inch: int = None
1919
) -> tuple:
2020
"""CDIP's classic compendium plot for multiple stations and parameters.
2121
@@ -66,7 +66,7 @@ def make_plot(
6666
return fig, "Error: start is not set"
6767

6868
if type(start) is str:
69-
start = ut.cdip_datetime(start)
69+
start = cdip_utils.cdip_datetime(start)
7070

7171
month_plot = False
7272
if end is None: # Month compendium plot
@@ -81,7 +81,7 @@ def make_plot(
8181
)
8282
month_plot = True
8383
elif type(end) is str:
84-
end = ut.cdip_datetime(end)
84+
end = cdip_utils.cdip_datetime(end)
8585

8686
if params is None:
8787
params = "waveHs,waveTp,waveDp"
@@ -123,7 +123,7 @@ def make_plot(
123123

124124
# Get the color for the station (circular list)
125125
if multiple_stns:
126-
stn_color = pu.stn_colors[stn_idx]
126+
stn_color = plot_utils.stn_colors[stn_idx]
127127
else:
128128
stn_color = "k"
129129
stn_idx += 1
@@ -155,13 +155,13 @@ def make_plot(
155155

156156
# Prepare data to show gaps where there is no data
157157
index_name = "waveTime"
158-
data = pu.prepare_gaps_dict(data, index_name)
158+
data = plot_utils.prepare_gaps_dict(data, index_name)
159159

160160
# Plot the processed data.
161161
if len(data) > 0:
162-
wT = [ut.timestamp_to_datetime(x) for x in data["waveTime"]]
162+
wT = [cdip_utils.timestamp_to_datetime(x) for x in data["waveTime"]]
163163
for idx in range(len(params)):
164-
attr = pu.pm_data[params[idx]]
164+
attr = plot_utils.pm_data[params[idx]]
165165
ax = pm_axes[idx]
166166
if multiple_stns:
167167
plot_color = stn_color
@@ -201,12 +201,16 @@ def make_plot(
201201
plt.minorticks_on()
202202
for idx in range(len(params)):
203203
ax = pm_axes[idx]
204-
attr = pu.pm_data[params[idx]]
204+
attr = plot_utils.pm_data[params[idx]]
205205
# Grids
206206
ax.grid(
207-
axis="x", which="major", color=pu.grid_color, linestyle="-", linewidth=2
207+
axis="x",
208+
which="major",
209+
color=plot_utils.grid_color,
210+
linestyle="-",
211+
linewidth=2,
208212
)
209-
ax.grid(axis="y", which="major", color=pu.grid_color, linestyle="-")
213+
ax.grid(axis="y", which="major", color=plot_utils.grid_color, linestyle="-")
210214
# Ticks
211215
ax.tick_params(axis="x", which="minor", length=4, top="off")
212216
ax.tick_params(
@@ -227,12 +231,12 @@ def make_plot(
227231
# Note waveHs may not be in current stn data, hence check if in params.
228232
if "waveHs" in params:
229233
hs_ax = pm_axes[params.index("waveHs")]
230-
low = pu.pm_data["waveHs"]["ylim"][0]
231-
high = pu.pm_data["waveHs"]["ylim"][1]
234+
low = plot_utils.pm_data["waveHs"]["ylim"][0]
235+
high = plot_utils.pm_data["waveHs"]["ylim"][1]
232236
if hs_ylim_max < high:
233237
hs_ax.set_ylim(low, high)
234238
# Add second Hs axes
235-
attr = pu.pm_data["waveHs"]
239+
attr = plot_utils.pm_data["waveHs"]
236240
pHs2 = hs_ax.twinx()
237241
pHs2.set_ylabel(attr["ylabel_ft"], fontsize=label_font_size)
238242
pHs2.tick_params(axis="y", which="major", labelsize=label_font_size)
@@ -241,7 +245,7 @@ def make_plot(
241245

242246
if "waveDp" in params:
243247
dp_ax = pm_axes[params.index("waveDp")]
244-
attr = pu.pm_data["waveDp"]
248+
attr = plot_utils.pm_data["waveDp"]
245249
pDp2 = dp_ax.twinx()
246250
pDp2.set_ylabel(attr["ylabel_compass"], fontsize=label_font_size)
247251
pDp2.tick_params(axis="y", which="major", labelsize=label_font_size)

cdippy/plots/sst_climatology.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55

66

77
# CDIP imports
8-
from cdippy.utils import utils as ut
8+
from cdippy.utils import utils as cdip_utils
99
from cdippy.stndata import StnData
1010

1111
import matplotlib as mpl
@@ -14,7 +14,7 @@
1414
import matplotlib.pyplot as plt # noqa: E402
1515

1616

17-
def make_plot(stn: str, x_inch: int = None, y_inch: int = None):
17+
def make_plot(stn: str, x_inch: int = None, y_inch: int = None) -> tuple:
1818
"""
1919
Year-long Climatology of Sea Surface Temperature across all years of available data for a station.
2020
@@ -87,11 +87,11 @@ def Tf(Tc):
8787

8888
for year in yearsort:
8989

90-
unixstart = ut.datetime_to_timestamp(datetime(year, 1, 1))
90+
unixstart = cdip_utils.datetime_to_timestamp(datetime(year, 1, 1))
9191
ncstart = timefillunix[bisect.bisect_left(timefillunix, unixstart)]
9292
nearidx = np.where(timefillunix == ncstart)[0][0]
9393

94-
unixend = ut.datetime_to_timestamp(datetime(year, 12, 31, 23, 59, 59))
94+
unixend = cdip_utils.datetime_to_timestamp(datetime(year, 12, 31, 23, 59, 59))
9595
ncend = timefillunix[(bisect.bisect_right(timefillunix, unixend)) - 1]
9696
futureidx = np.where(timefillunix == ncend)[0][0]
9797

cdippy/plotting.py

Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
from . import plots
2+
from matplotlib.pyplot import Figure
3+
4+
5+
def make_annual_hs_boxplot(stn: str, year: int) -> Figure:
6+
"""
7+
Create a boxplot of annual significant wave heights for a station.
8+
9+
Args:
10+
stn (str): A 5-char station identifier, e.g. '100p1'.
11+
year (int): The year to plot.
12+
13+
Returns:
14+
fig (Figure): A matplotlib.pyplot.Figure object for the created plot.
15+
"""
16+
17+
return plots.annual_hs_boxplot.make_plot(stn, year)
18+
19+
20+
def make_compendium_plot(
21+
stns: str, start: str, end: str, params: str, x_inch: int
22+
) -> Figure:
23+
"""CDIP's classic compendium plot for multiple stations and parameters.
24+
25+
Args:
26+
stns (str): A comma-delimited list of 5-char station identifiers, e.g. '100p1,201p1'.
27+
start (str): Start time of data series formatted as 'yyyymm[ddHHMMss]' where 'ddHHMMss' are optional components.
28+
end (str): End time of data series ('yyyymm[ddHHMMss]') If 'None' is provided, defaults to the current date and time.
29+
params (str): A comma-delimited string of parameter names, e.g. 'waveHs,waveTp'.
30+
31+
Returns:
32+
fig (Figure): A matplotlib.pyplot.Figure object for the created plot.
33+
34+
"""
35+
36+
return plots.compendium.make_plot(stns, start, end, params, x_inch)
37+
38+
39+
def make_sst_climatology_plot(
40+
stn: str, x_inch: int = None, y_inch: int = None
41+
) -> Figure:
42+
"""
43+
Create a plot of yearly climatology of sea surface temperature at a station for all years of available data.
44+
45+
Args:
46+
stn (str): A 5-char station identifier, e.g. '100p1'.
47+
48+
Returns:
49+
fig (Figure): A matplotlib.pyplot.Figure object for the created plot.
50+
"""
51+
52+
return plots.sst_climatology.make_plot(stn, x_inch, y_inch)

0 commit comments

Comments
 (0)