-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathmidas2.py
More file actions
82 lines (66 loc) · 2.68 KB
/
midas2.py
File metadata and controls
82 lines (66 loc) · 2.68 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
from lasagne import layers
from lasagne.updates import nesterov_momentum
from nolearn.lasagne import NeuralNet
import scipy.io as sio
import scipy.misc as smisc
import numpy as np
def load():
_TRAIN = 100
_TEST = 50
_VALIDATION = 10
# print "Loading %d for Training, %d for Validation, and %d for Testing!" % (_TRAIN, _VALIDATION, _TEST)
# We can now download and read the training and test set images and labels.
print("Training: %d ..." % (_TRAIN))
TrainData = map(lambda p: sio.loadmat('TrainData/Data_%s.mat' % (('%d') % (p)).zfill(7)), range(_TRAIN))
print(" Done.\nValidation %d ..." % (_VALIDATION))
# ValidationData = map(lambda p: sio.loadmat('ValidationData/Data_%s.mat' % (('%d') % (p)).zfill(7)), range(_VALIDATION))
# print(" Done.\nTesting %d ..." % (_TEST))
#
# TestData = map(lambda p: sio.loadmat('TestData/Data_%s.mat' % (('%d') % (p)).zfill(7)), range(_TEST))
# print(' Done.\n')
for i in range(len(TrainData)):
norm = np.linalg.norm(TrainData[i]['depth'])
TrainData[i]['depth'] /= norm
def depth(data):
d = np.array(map(lambda i: smisc.imresize(i['ir'], 0.25), data))
d.shape = (len(d), 60*80)
d /= d.max()
return d
def label(data):
d = np.array(map(lambda i: i['bbox'], data))
d.shape = (len(d), 4)
return d
X_train = depth(TrainData)
y_train = label(TrainData)
print y_train.shape
# X_test = depth(TestData)
# y_test = label(TestData)
# X_val = depth(ValidationData)
# y_val = label(ValidationData)
# We reserve the last 10000 training examples for validation.
# X_train, X_val = X_train[:-10000], X_train[-10000:]
# y_train, y_val = y_train[:-10000], y_train[-10000:]
# We just return all the arrays in order, as expected in main().
# (It doesn't matter how we do this as long as we can read them again.)
return X_train, y_train #, X_val, y_val, X_test, y_test
net1 = NeuralNet(
layers=[ # three layers: one hidden layer
('input', layers.InputLayer),
('hidden', layers.DenseLayer),
('output', layers.DenseLayer),
],
# layer parameters:
input_shape=(None, 60*80), # 96x96 input pixels per batch
hidden_num_units=60*80*3, # number of units in hidden layer
output_nonlinearity=None, # output layer uses identity function
output_num_units=60*80, # 30 target values
# optimization method:
update=nesterov_momentum,
update_learning_rate=0.25,
update_momentum=0.9,
regression=True, # flag to indicate we're dealing with regression problem
max_epochs=10, # we want to train this many epochs
verbose=1,
)
X, y = load()
net1.fit(X, y)