-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathutils.py
More file actions
59 lines (46 loc) · 1.79 KB
/
utils.py
File metadata and controls
59 lines (46 loc) · 1.79 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
import torch
import config
class Dataset(torch.utils.data.Dataset):
"""
PyTorch wrapper for a numpy dataset.
@param dataset Numpy array representing the dataset.
"""
def __init__(self, dataset):
self.dataset = dataset
def __len__(self):
return numpy.shape(self.dataset)[0]
def __getitem__(self, index):
return self.dataset[index]
class LabelledDataset(torch.utils.data.Dataset):
"""
PyTorch wrapper for a numpy dataset and its associated labels.
@param dataset Numpy array representing the dataset.
@param labels One-dimensional array of the same length as dataset with
non-negative int values.
"""
def __init__(self, dataset, labels):
self.dataset = dataset
self.labels = labels
def __len__(self):
return numpy.shape(self.dataset)[0]
def __getitem__(self, index):
return self.dataset[index], self.labels[index]
def get_checkpoint_state(model, optimizer, scheduler, load_path=config.MODEL_LOAD_PATH):
checkpoint = torch.load(load_path)
model.load_state_dict(checkpoint['model_state_dict'])
epoch = checkpoint['epoch']
optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
scheduler.load_state_dict(checkpoint['scheduler_state_dict'])
return model, epoch, optimizer, scheduler
def save_checkpoint_state(epoch, model, optimizer, scheduler, save_path=config.MODEL_SAVE_PATH):
checkpoint = {
'epoch': epoch,
'model_state_dict': model.state_dict(),
'optimizer_state_dict': optimizer.state_dict(),
'scheduler_state_dict': scheduler.state_dict()
}
torch.save(checkpoint, save_path)
def writelog(logstr):
result_file_open = open(config.LOG_FILE_PATH, 'a')
result_file_open.write(logstr+'\n')
result_file_open.close()