-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtest.py
55 lines (39 loc) · 1.13 KB
/
test.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
import numpy as np
from sklearn import preprocessing
import math
import boto3
import pickle
HASH = 524288
converter = {}
for i in range(14):
converter[i] = lambda s: float(s.strip() or 0)
for i in range(14, 40):
converter[i] = lambda s: hash(s) % HASH
data = np.loadtxt('testset.txt', converters=converter, delimiter="\t")
ys = data[:, 0]
xs_dense = data[:, 1:14]
xs_sparse = data[:, 14:]
scaler = preprocessing.MinMaxScaler()
fit_mat = [
[0,-3, 0, 0, 0, 0,0,0, 0,0,0, 0,0],
[5775, 257675, 65535, 969, 23159500, 431037, 56311, 6047, 29019, 11, 231, 4008, 7393] ]
fit_mat = np.matrix(fit_mat)
scaler.fit(fit_mat)
xs_dense = scaler.transform(xs_dense)
#xs_dense = np.column_stack([xs_dense]) # N by (D+1)
training_data = []
for i in range(data.shape[0]):
label = ys[0]
cnt = 0
row = []
for x in xs_dense[i]:
row.append((cnt, x))
cnt += 1
for idx in xs_sparse[i]:
row.append((int(idx+14),1))
training_data.append([label, row])
print(i, data.shape[0])
out = open("testset.data", "wb")
print("Writing to testset.data")
pickle.dump(training_data, out)
out.close()