Skip to content

Commit 154530d

Browse files
authored
Create network.py
1 parent 484ee80 commit 154530d

File tree

1 file changed

+144
-0
lines changed

1 file changed

+144
-0
lines changed

model/network.py

Lines changed: 144 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,144 @@
1+
import numpy as np
2+
import pickle
3+
import sys
4+
from time import *
5+
from model.loss import *
6+
from model.layers import *
7+
8+
class Net:
9+
def __init__(self):
10+
lr = 0.01
11+
self.layers = []
12+
self.layers.append(Convolution2D(inputs_channel=1, num_filters=6, kernel_size=5, padding=2, stride=1, learning_rate=lr, name='conv1'))
13+
self.layers.append(ReLu())
14+
self.layers.append(Maxpooling2D(pool_size=2, stride=2, name='maxpool2'))
15+
self.layers.append(Convolution2D(inputs_channel=6, num_filters=16, kernel_size=5, padding=0, stride=1, learning_rate=lr, name='conv3'))
16+
self.layers.append(ReLu())
17+
self.layers.append(Maxpooling2D(pool_size=2, stride=2, name='maxpool4'))
18+
self.layers.append(Convolution2D(inputs_channel=16, num_filters=120, kernel_size=5, padding=0, stride=1, learning_rate=lr, name='conv5'))
19+
self.layers.append(ReLu())
20+
self.layers.append(Flatten())
21+
self.layers.append(FullyConnected(num_inputs=120, num_outputs=84, learning_rate=lr, name='fc6'))
22+
self.layers.append(ReLu())
23+
self.layers.append(FullyConnected(num_inputs=84, num_outputs=10, learning_rate=lr, name='fc7'))
24+
self.layers.append(Softmax())
25+
self.lay_num = len(self.layers)
26+
27+
def train(self, training_data, training_label, batch_size, epoch, weights_file):
28+
total_acc = 0
29+
for e in range(epoch):
30+
for batch_index in range(0, training_data.shape[0], batch_size):
31+
if batch_index + batch_size < training_data.shape[0]:
32+
data = training_data[batch_index:batch_index+batch_size]
33+
label = training_label[batch_index:batch_index + batch_size]
34+
else:
35+
data = training_data[batch_index:training_data.shape[0]]
36+
label = training_label[batch_index:training_label.shape[0]]
37+
loss = 0
38+
acc = 0
39+
start_time = time()
40+
for b in range(batch_size):
41+
x = data[b]
42+
y = label[b]
43+
for l in range(self.lay_num):
44+
output = self.layers[l].forward(x)
45+
x = output
46+
loss += cross_entropy(output, y)
47+
if np.argmax(output) == np.argmax(y):
48+
acc += 1
49+
total_acc += 1
50+
dy = y
51+
for l in range(self.lay_num-1, -1, -1):
52+
dout = self.layers[l].backward(dy)
53+
dy = dout
54+
end_time = time()
55+
batch_time = end_time-start_time
56+
remain_time = (training_data.shape[0]*epoch-batch_index-training_data.shape[0]*e)/batch_size*batch_time
57+
hrs = int(remain_time)/3600
58+
mins = int((remain_time/60-hrs*60))
59+
secs = int(remain_time-mins*60-hrs*3600)
60+
loss /= batch_size
61+
batch_acc = float(acc)/float(batch_size)
62+
training_acc = float(total_acc)/float((batch_index+batch_size)*(e+1))
63+
print('=== Epoch: {0:d}/{1:d} === Iter:{2:d} === Loss: {3:.2f} === BAcc: {4:.2f} === TAcc: {5:.2f} === Remain: {6:d} Hrs {7:d} Mins {8:d} Secs ==='.format(e,epoch,batch_index+batch_size,loss,batch_acc,training_acc,int(hrs),int(mins),int(secs)))
64+
obj = []
65+
for i in range(self.lay_num):
66+
cache = self.layers[i].extract()
67+
obj.append(cache)
68+
with open(weights_file, 'wb') as handle:
69+
pickle.dump(obj, handle, protocol=pickle.HIGHEST_PROTOCOL)
70+
71+
72+
def test(self, data, label, test_size):
73+
toolbar_width = 40
74+
sys.stdout.write("[%s]" % (" " * (toolbar_width-1)))
75+
sys.stdout.flush()
76+
sys.stdout.write("\b" * (toolbar_width))
77+
step = float(test_size)/float(toolbar_width)
78+
st = 1
79+
total_acc = 0
80+
for i in range(test_size):
81+
if i == round(step):
82+
step += float(test_size)/float(toolbar_width)
83+
st += 1
84+
sys.stdout.write(".")
85+
sys.stdout.flush()
86+
x = data[i]
87+
y = label[i]
88+
for l in range(self.lay_num):
89+
output = self.layers[l].forward(x)
90+
x = output
91+
if np.argmax(output) == np.argmax(y):
92+
total_acc += 1
93+
sys.stdout.write("\n")
94+
print('=== Test Size:{0:d} === Test Acc:{1:.2f} ==='.format(test_size, float(total_acc)/float(test_size)))
95+
96+
97+
def test_with_pretrained_weights(self, data, label, test_size, weights_file):
98+
with open(weights_file, 'rb') as handle:
99+
b = pickle.load(handle)
100+
self.layers[0].feed(b[0]['conv1.weights'], b[0]['conv1.bias'])
101+
self.layers[3].feed(b[3]['conv3.weights'], b[3]['conv3.bias'])
102+
self.layers[6].feed(b[6]['conv5.weights'], b[6]['conv5.bias'])
103+
self.layers[9].feed(b[9]['fc6.weights'], b[9]['fc6.bias'])
104+
self.layers[11].feed(b[11]['fc7.weights'], b[11]['fc7.bias'])
105+
toolbar_width = 40
106+
sys.stdout.write("[%s]" % (" " * (toolbar_width-1)))
107+
sys.stdout.flush()
108+
sys.stdout.write("\b" * (toolbar_width))
109+
step = float(test_size)/float(toolbar_width)
110+
st = 1
111+
total_acc = 0
112+
for i in range(test_size):
113+
if i == round(step):
114+
step += float(test_size)/float(toolbar_width)
115+
st += 1
116+
sys.stdout.write(".")
117+
sys.stdout.flush()
118+
x = data[i]
119+
y = label[i]
120+
for l in range(self.lay_num):
121+
output = self.layers[l].forward(x)
122+
x = output
123+
if np.argmax(output) == np.argmax(y):
124+
total_acc += 1
125+
sys.stdout.write("\n")
126+
print('=== Test Size:{0:d} === Test Acc:{1:.2f} ==='.format(test_size, float(total_acc)/float(test_size)))
127+
128+
def predict_with_pretrained_weights(self, inputs, weights_file):
129+
with open(weights_file, 'rb') as handle:
130+
b = pickle.load(handle)
131+
self.layers[0].feed(b[0]['conv1.weights'], b[0]['conv1.bias'])
132+
self.layers[3].feed(b[3]['conv3.weights'], b[3]['conv3.bias'])
133+
self.layers[6].feed(b[6]['conv5.weights'], b[6]['conv5.bias'])
134+
self.layers[9].feed(b[9]['fc6.weights'], b[9]['fc6.bias'])
135+
self.layers[11].feed(b[11]['fc7.weights'], b[11]['fc7.bias'])
136+
for l in range(self.lay_num):
137+
output = self.layers[l].forward(inputs)
138+
inputs = output
139+
digit = np.argmax(output)
140+
probability = output[0, digit]
141+
return digit, probability
142+
143+
144+

0 commit comments

Comments
 (0)