forked from UCLA-VAST/Stream-HLS
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathutils.py
More file actions
58 lines (51 loc) · 1.85 KB
/
utils.py
File metadata and controls
58 lines (51 loc) · 1.85 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
import torch_mlir
import torch
import time
def convertTorchToMLIR(model, inputs, outPath, output_type="linalg-on-tensors", print_weights=False):
model.train(False)
model.eval()
module = torch_mlir.compile(model, inputs, output_type=output_type, use_tracing=True)
with open(outPath, "w") as f:
if print_weights:
print(module.operation.get_asm(), file=f)
else:
print(module.operation.get_asm(large_elements_limit=1), file=f)
def generateGoldenResults(model, inputs, outPath):
print("Saving golden results to", outPath)
# save inputs as binary files
for i, input in enumerate(inputs):
with open(outPath + f"input_{i}.bin", "wb") as f:
f.write(input.detach().numpy().tobytes())
# measure inference time
model.train(False)
model.eval()
start = time.time()
outputs = model(*inputs)
end = time.time()
# if outputs is a tuple, store each tensor as a separate binary file
if isinstance(outputs, tuple):
for idx, output in enumerate(outputs):
# store torch tensor as binary file
with open(outPath + f"output_{idx}.bin", "wb") as f:
f.write(output.detach().numpy().tobytes())
else:
# store torch tensor as binary file
with open(outPath + "output_0.bin", "wb") as f:
f.write(outputs.detach().numpy().tobytes())
return end - start
# store to text file
# linearize the tensor
# outputs = outputs.flatten()
# with open(outPath + ".txt", "w") as f:
# for i in range(outputs.shape[0]):
# print(outputs[i].item(), file=f)
def randTensor(*shape, dtype=torch.float32):
if dtype.is_floating_point:
return torch.rand(*shape, dtype=dtype) * 2 - 1
elif dtype == torch.bool:
return torch.randint(0, 2, shape, dtype=dtype)
else:
if dtype.is_signed:
return torch.randint(-10, 10, shape, dtype=dtype)
else:
return torch.randint(0, 10, shape, dtype=dtype)