Skip to content

Commit d639824

Browse files
authored
tag_to_var_map arguments and tests (#109)
* adding tag_to_var_map argument when calculate_values is called in tag.py * Adding missing tag_to_var_map argument within process_binary_ops * Adding test for use of tag_to_var_map * Simplifying logic to pass in tag_to_var_map in test_tag.py * Reformatting with black and addressing flake8 issues Tests passing * Reformatting with black and addressing flake8 issues Tests passing * Reverting unneeded changes in test_calculate_values * Adding 2 more tag_to_var_map arguments in process_unary_ops and process_binary_ops, respectively * temp * Adding additional test coverage for tag_to_var_map * Consolidating when TypeError is raised for unary_helper (for codecov) Adding test case for empty unary operations (for codecov) Removing files generated in tests * Delete pype_schema/tests/data/EPANET_Net_3_temp.inp * Delete pype_schema/tests/data/L-TOWN_temp.inp * Delete pype_schema/tests/data/valve_temp.inp * Adding test for invalid unary data * Removing type check from unary_helper since type is already checked in process_unary_ops * Adding back simplified TypeError raising in unary_helper Adding tests for: - TypeError from unary_helper - tag_to_var_map with List data - invalid mode in VirtualTag class
1 parent 0177ecb commit d639824

9 files changed

Lines changed: 455 additions & 24 deletions

File tree

pype_schema/operations.py

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -198,6 +198,7 @@ def unary_helper(data, un_op):
198198
numpy array of dataset trannsformed by unary operation
199199
"""
200200
# allow for multiple unary operations to be performed sequentially
201+
result = None
201202
if isinstance(un_op, list):
202203
result = data.copy()
203204
for op in un_op:
@@ -212,15 +213,11 @@ def unary_helper(data, un_op):
212213
result = [-x for x in data]
213214
elif isinstance(data, (np.ndarray, pd.Series)):
214215
result = -data
215-
else:
216-
raise TypeError("Data must be either a list, array, or Series")
217216
elif un_op == "~":
218217
if isinstance(data, list):
219-
result = result = [not bool(x) for x in data]
218+
result = [not bool(x) for x in data]
220219
elif isinstance(data, (np.ndarray, pd.Series)):
221220
result = data == 0
222-
else:
223-
raise TypeError("Data must be either a list, array, or Series")
224221
else:
225222
if isinstance(data, list):
226223
result = data.copy()
@@ -247,7 +244,8 @@ def unary_helper(data, un_op):
247244
result = data.shift(-1)
248245
elif un_op == ">>":
249246
result = data.shift(1)
250-
else:
251-
raise TypeError("Data must be either a list, array, or Series")
247+
248+
if result is None:
249+
raise TypeError("Data must be either a list, array, or Series")
252250

253251
return result

pype_schema/tag.py

Lines changed: 29 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -918,16 +918,32 @@ def process_unary_ops(self, data, tag_to_var_map={}):
918918
else: # must be a DataFrame
919919
relevant_data = pd.Series([tag_obj.value] * len(data))
920920
elif isinstance(tag_obj, self.__class__):
921-
relevant_data = tag_obj.calculate_values(data)
921+
relevant_data = tag_obj.calculate_values(data, tag_to_var_map)
922922
elif tag_to_var_map:
923923
relevant_data = result[tag_to_var_map[tag_obj.id]]
924924
else:
925925
relevant_data = result[tag_obj.id]
926926

927-
relevant_data = unary_helper( # noqa: F405
927+
is_series = isinstance(relevant_data, pd.Series)
928+
if is_series: # store info, then convert to np array
929+
original_index = relevant_data.index
930+
original_name = relevant_data.name
931+
relevant_data = relevant_data.values
932+
933+
processed_relevant_data = unary_helper( # noqa: F405
928934
relevant_data, self.unary_operations[i]
929935
)
930936

937+
# Convert back if original was Series
938+
if is_series:
939+
relevant_data = pd.Series(
940+
processed_relevant_data,
941+
index=original_index,
942+
name=original_name,
943+
)
944+
else:
945+
relevant_data = processed_relevant_data
946+
931947
if tag_to_var_map:
932948
result[tag_to_var_map[tag_obj.id]] = relevant_data
933949
else:
@@ -1015,7 +1031,7 @@ def process_binary_ops(self, data, tag_to_var_map={}):
10151031
else:
10161032
relevant_data = data[tag_obj.id].copy()
10171033
elif isinstance(tag_obj, self.__class__):
1018-
relevant_data = tag_obj.calculate_values(data)
1034+
relevant_data = tag_obj.calculate_values(data, tag_to_var_map)
10191035
elif tag_to_var_map:
10201036
relevant_data = data[tag_to_var_map[tag_obj.id]].copy()
10211037
else:
@@ -1077,7 +1093,7 @@ def process_binary_ops(self, data, tag_to_var_map={}):
10771093
else:
10781094
relevant_data = data[tag_obj.id].copy()
10791095
elif isinstance(tag_obj, self.__class__):
1080-
relevant_data = tag_obj.calculate_values(data)
1096+
relevant_data = tag_obj.calculate_values(data, tag_to_var_map)
10811097
elif tag_to_var_map:
10821098
relevant_data = data[tag_to_var_map[tag_obj.id]].copy()
10831099
else:
@@ -1155,7 +1171,7 @@ def process_custom_ops(self, data, tag_to_var_map={}):
11551171
varname = tag_to_var_map[tag_obj.id] if tag_to_var_map else tag_obj.id
11561172
varnames.append(varname)
11571173
if isinstance(tag_obj, self.__class__):
1158-
data[varname] = tag_obj.calculate_values(data)
1174+
data[varname] = tag_obj.calculate_values(data, tag_to_var_map)
11591175
result = func_(*[data[varname] for varname in varnames])
11601176
if isinstance(result, Series):
11611177
result.rename(self.id, inplace=True)
@@ -1193,7 +1209,10 @@ def calculate_values(self, data, tag_to_var_map={}):
11931209
data = self.process_binary_ops(data, tag_to_var_map=tag_to_var_map)
11941210
elif isinstance(data, (dict, DataFrame)):
11951211
# if no binary ops, get appropriate column from unary ops and rename
1196-
data = data[self.tags[0].id].rename(self.id)
1212+
if isinstance(data, dict):
1213+
data = pd.Series(data[self.tags[0].id], name=self.id)
1214+
else:
1215+
data = data[self.tags[0].id].rename(self.id)
11971216
elif isinstance(data, ndarray):
11981217
# flatten array since binary operations do that automatically
11991218
data = data[:, 0]
@@ -1202,7 +1221,10 @@ def calculate_values(self, data, tag_to_var_map={}):
12021221
data = self.process_custom_ops(data, tag_to_var_map=tag_to_var_map)
12031222
elif isinstance(data, (dict, DataFrame)):
12041223
# if custom_operations is empty, get appropriate column and rename
1205-
data = data[self.tags[0].id].rename(self.id)
1224+
if isinstance(data, dict):
1225+
data = pd.Series(data[self.tags[0].id], name=self.id)
1226+
else:
1227+
data = data[self.tags[0].id].rename(self.id)
12061228
elif isinstance(data, ndarray):
12071229
# flatten array since operations do that automatically
12081230
data = data[:, 0]

pype_schema/tests/__init__.py

Lines changed: 35 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
"""Unit test package for pype_schema."""
44

55
import pickle
6-
from pype_schema.tag import VirtualTag
6+
from pype_schema.tag import VirtualTag, Tag
77
from pype_schema.node import Boiler, Cogeneration, Pump, Network
88

99

@@ -48,3 +48,37 @@ def pickle_without_functions(network, outpath):
4848
# export pickled object
4949
with open(outpath, "wb") as pickle_file:
5050
pickle.dump(network, pickle_file)
51+
52+
53+
def generate_tag_to_var_map(network):
54+
"""Simple example of generating a mapping from
55+
rom tag IDs to variable names, renaming variables linking
56+
objects in the network
57+
(used to generate mapping json file for tests)
58+
59+
Parameters
60+
----------
61+
network : Network
62+
The network to generate the mapping for
63+
64+
Returns
65+
-------
66+
dict
67+
Dictionary mapping tag IDs to variable names
68+
"""
69+
tag_to_var_map = {}
70+
for tag in network.get_all_tags(recurse=True):
71+
if isinstance(tag, VirtualTag): # keep network name for virtual tags
72+
tag_to_var_map[tag.id] = tag.id
73+
elif isinstance(tag, Tag): # rename for other tags
74+
parent = network.get_parent_from_tag(tag)
75+
if hasattr(parent, "id"):
76+
source_id = parent.id
77+
else:
78+
source_id = "Unknown"
79+
80+
contents_type = tag.contents.name if tag.contents is not None else "None"
81+
variable_type = tag.tag_type.name
82+
83+
tag_to_var_map[tag.id] = f"{source_id}_{contents_type}_{variable_type}"
84+
return tag_to_var_map

pype_schema/tests/data/connection_less_than.json

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,14 @@
6464
"type": "Flow",
6565
"parent_id": "Cogenerator",
6666
"contents": "Electricity"
67+
},
68+
"TestEmptyCustom": {
69+
"tags": ["ElectricityGeneration"],
70+
"custom_operations": "",
71+
"type": "Flow",
72+
"parent_id": "Cogenerator",
73+
"contents": "Electricity",
74+
"units": "kWh"
6775
}
6876
},
6977
"DrinkingWaterFacility": {
Lines changed: 97 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,97 @@
1+
GasToBoiler_NaturalGas_Flow,GasToCogen_NaturalGas_Flow,ConditionerToCogen_Biogas_Flow,CogenElecToFacility_Electricity_Flow,Constant1
2+
0,1264.064032,6361.096418,919,1
3+
0,1304.840291,6361.096418,863,1
4+
0,1304.840291,6034.886346,882,1
5+
0,1264.064032,5994.110087,945,1
6+
0,1223.287773,6075.662605,914,1
7+
0,1223.287773,6605.753973,912,1
8+
0,1264.064032,5831.00505,867,1
9+
0,1345.61655,6157.215123,926,1
10+
0,1223.287773,5790.228791,890,1
11+
0,1264.064032,6483.425196,911,1
12+
0,1264.064032,6687.306491,901,1
13+
0,1223.287773,6157.215123,926,1
14+
0,1223.287773,6361.096418,916,1
15+
0,1304.840291,6809.635268,925,1
16+
0,1386.392809,6850.411528,890,1
17+
0,1223.287773,6809.635268,935,1
18+
0,1264.064032,6687.306491,913,1
19+
0,1304.840291,6646.530232,905,1
20+
0,1264.064032,6646.530232,927,1
21+
0,1264.064032,6687.306491,921,1
22+
0,1223.287773,6972.740305,903,1
23+
0,1223.287773,6564.977714,886,1
24+
0,1304.840291,5953.333828,911,1
25+
0,1264.064032,6157.215123,896,1
26+
0,1304.840291,5667.900014,879,1
27+
0,1264.064032,5260.137423,915,1
28+
0,1304.840291,5423.242459,915,1
29+
0,1304.840291,5341.689941,882,1
30+
0,1304.840291,5137.808646,930,1
31+
0,1264.064032,4811.598573,889,1
32+
0,1264.064032,5056.256127,890,1
33+
0,1264.064032,5056.256127,897,1
34+
17.488486,1182.511514,5097.032387,867,1
35+
17.488486,1182.511514,5504.794977,878,1
36+
302.9223,897.0777,4730.046055,939,1
37+
343.6985591,856.3014409,4566.941018,853,1
38+
1200,0,4730.046055,862,1
39+
1200,0,5056.256127,911,1
40+
1200,0,5219.361164,816,1
41+
1200,0,5545.571237,879,1
42+
1200,0,5341.689941,905,1
43+
1200,0,5545.571237,961,1
44+
1200,0,5178.584905,919,1
45+
1200,0,5871.781309,903,1
46+
1200,0,5953.333828,944,1
47+
1200,0,5871.781309,909,1
48+
1200,0,5790.228791,927,1
49+
1200,0,5464.018718,910,1
50+
1200,0,5545.571237,903,1
51+
1200,0,5667.900014,917,1
52+
1200,0,5382.4662,886,1
53+
1200,0,5871.781309,907,1
54+
1200,0,5708.676273,897,1
55+
1200,0,5667.900014,966,1
56+
1200,0,5627.123755,893,1
57+
1200,0,5300.913682,893,1
58+
1200,0,5586.347496,953,1
59+
1200,0,5056.256127,924,1
60+
1200,0,5300.913682,908,1
61+
1200,0,5015.479868,935,1
62+
1200,0,5464.018718,976,1
63+
1200,0,5464.018718,923,1
64+
1200,0,5464.018718,895,1
65+
1200,0,5464.018718,910,1
66+
1200,0,5341.689941,953,1
67+
1200,0,4770.822314,595,1
68+
1200,0,5178.584905,946,1
69+
1200,0,5341.689941,926,1
70+
1200,0,5831.00505,959,1
71+
1200,0,5219.361164,916,1
72+
1200,0,5382.4662,887,1
73+
1200,0,5137.808646,840,1
74+
1200,0,5382.4662,863,1
75+
1200,0,4485.3885,881,1
76+
1200,0,5341.689941,863,1
77+
1200,0,5097.032387,799,1
78+
1200,0,4770.822314,819,1
79+
0,1304.840291,5097.032387,840,1
80+
0,1264.064032,4811.598573,877,1
81+
0,1304.840291,4893.151091,918,1
82+
0,1264.064032,5260.137423,939,1
83+
0,1223.287773,5260.137423,956,1
84+
0,1264.064032,4240.730946,933,1
85+
0,1264.064032,4974.703609,889,1
86+
17.488486,1182.511514,5382.4662,921,1
87+
0,1223.287773,4933.92735,938,1
88+
17.488486,1182.511514,4974.703609,896,1
89+
0,1223.287773,4566.941018,852,1
90+
0,1223.287773,4526.164759,925,1
91+
0,1264.064032,4811.598573,914,1
92+
0,1223.287773,4770.822314,892,1
93+
0,1223.287773,4363.059723,903,1
94+
0,1223.287773,4811.598573,909,1
95+
0,1223.287773,5178.584905,950,1
96+
0,1223.287773,4648.493537,926,1
97+
17.488486,1182.511514,5178.584905,919,1
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
{
2+
"ElectricityGeneration_LShift1": "ElectricityGeneration_LShift1",
3+
"Digester3GasFlow": "ConditionerToCogen_Biogas_Flow",
4+
"Digester2GasFlow": "ConditionerToCogen_Biogas_Flow",
5+
"BoilerGasPurchases": "GasToBoiler_NaturalGas_Flow",
6+
"TotalizedFlaredGas": "ConditionerToFlare_Biogas_Flow",
7+
"NoGasPurchases": "NoGasPurchases",
8+
"GrossGasProduction": "GrossGasProduction",
9+
"ElectricityProductionByGasVolume": "ElectricityProductionByGasVolume",
10+
"ElectricityGeneration_RShift2_List": "ElectricityGeneration_RShift2_List",
11+
"Digester1GasFlow": "ConditionerToCogen_Biogas_Flow",
12+
"CogenGasPurchases": "GasToCogen_NaturalGas_Flow",
13+
"CombinedDigesterGasFlow": "ConditionerToCogen_Biogas_Flow",
14+
"ElectricityGeneration": "CogenElecToFacility_Electricity_Flow",
15+
"TankLevel": "FOGTank_FatOilGrease_Level",
16+
"InfluentFlow": "SewerIntake_UntreatedSewage_Flow",
17+
"NoGasPurchasesList": "NoGasPurchasesList",
18+
"ElectricityGeneration_LShift1_List": "ElectricityGeneration_LShift1_List",
19+
"ElectricityGeneration_RShift2": "ElectricityGeneration_RShift2",
20+
"TankVolume": "FOGTank_FatOilGrease_Volume",
21+
"GrossGasProductionList": "GrossGasProductionList",
22+
"ElectricityGenDelta": "ElectricityGenDelta"
23+
}

0 commit comments

Comments
 (0)