Skip to content

Commit c80706d

Browse files
committed
feat(model): 实现自定义层数和大小的神经网络FCNet
1 parent c9111a3 commit c80706d

File tree

2 files changed

+135
-0
lines changed

2 files changed

+135
-0
lines changed

pynet/models/FCNet.py

Lines changed: 134 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,134 @@
1+
# -*- coding: utf-8 -*-
2+
3+
# @Time : 19-7-8 下午1:36
4+
# @Author : zj
5+
6+
"""
7+
参考 cs231n assignment2 FullyConnectedNet,实现自定义层数和大小的神经网络
8+
9+
网络结构为
10+
11+
{FC - [batch/layer norm] - RELU - [dropout]} * (L - 1) - FC
12+
"""
13+
14+
__all__ = ['FCNet']
15+
16+
import numpy as np
17+
from pynet import nn
18+
from .Net import Net
19+
20+
21+
class FCNet(Net):
22+
"""
23+
实现2层神经网络
24+
"""
25+
26+
def __init__(self, hidden_dims, input_dim=3 * 32 * 32, num_classes=10,
27+
dropout=1.0, weight_scale=1e-2, dtype=np.double):
28+
super(FCNet, self).__init__()
29+
self.hidden_dims = hidden_dims
30+
self.input_dim = input_dim
31+
self.num_classes = num_classes
32+
self.weight_scale = weight_scale
33+
self.dtype = dtype
34+
35+
self.use_dropout = dropout != 1
36+
self.num_layers = 1 + len(hidden_dims)
37+
self.relu = nn.ReLU()
38+
39+
self.fcs = self._get_fcs()
40+
self.params = self._get_params()
41+
self.caches = self._get_caches()
42+
43+
# 转换参数为指定数据类型
44+
for k, v in self.params.items():
45+
self.params[k] = v.astype(dtype)
46+
47+
self.use_dropout = dropout != 1.0
48+
self.dropout_param = {}
49+
if self.use_dropout:
50+
self.dropout_param['mode'] = 'train'
51+
self.dropout_param['p'] = dropout
52+
self.dropout = nn.Dropout()
53+
self.U1 = None
54+
self.U2 = None
55+
56+
def __call__(self, inputs):
57+
return self.forward(inputs)
58+
59+
def forward(self, inputs):
60+
inputs = inputs.reshape(inputs.shape[0], -1)
61+
inputs = inputs.astype(self.dtype)
62+
63+
x = None
64+
for i in range(self.num_layers):
65+
w = self.params['W%d' % (i + 1)]
66+
b = self.params['b%d' % (i + 1)]
67+
68+
if i == 0:
69+
x = inputs
70+
self.caches['z%d' % (i + 1)], self.caches['z%d_cache' % (i + 1)] = self.fcs[i].forward(x, w, b)
71+
72+
if i != (self.num_layers - 1):
73+
x = self.relu(self.caches['z%d' % (i + 1)])
74+
75+
return self.caches['z%d' % self.num_layers]
76+
77+
def backward(self, grad_out):
78+
grad = dict()
79+
80+
da = None
81+
for i in reversed(range(self.num_layers)):
82+
z = self.caches['z%d' % (i + 1)]
83+
cache = self.caches['z%d_cache' % (i + 1)]
84+
85+
if i == (self.num_layers - 1):
86+
dz = grad_out
87+
else:
88+
dz = self.relu.backward(da, z)
89+
90+
grad['W%d' % (i + 1)], grad['b%d' % (i + 1)], da = self.fcs[i].backward(dz, cache)
91+
92+
self.caches = self._get_caches()
93+
return grad
94+
95+
def _get_fcs(self):
96+
fcs = list()
97+
if self.hidden_dims is None:
98+
fcs.append(nn.FC(self.input_dim, self.num_classes, weight_scale=self.weight_scale))
99+
else:
100+
for i in range(self.num_layers):
101+
if i == 0:
102+
num_in = self.input_dim
103+
else:
104+
num_in = self.hidden_dims[i - 1]
105+
106+
if i == (self.num_layers - 1):
107+
num_out = self.num_classes
108+
else:
109+
num_out = self.hidden_dims[i]
110+
111+
fcs.append(nn.FC(num_in, num_out))
112+
113+
return fcs
114+
115+
def _get_params(self):
116+
params = dict()
117+
for i, fc in enumerate(self.fcs):
118+
params['W%d' % (i + 1)], params['b%d' % (i + 1)] = fc.get_params()
119+
return params
120+
121+
def _get_caches(self):
122+
caches = dict()
123+
for i in range(1, self.num_layers):
124+
caches['z%d' % i] = None
125+
caches['z%d_cache' % i] = None
126+
return caches
127+
128+
def train(self):
129+
if self.use_dropout:
130+
self.dropout_param['mode'] = 'train'
131+
132+
def eval(self):
133+
if self.use_dropout:
134+
self.dropout_param['mode'] = 'test'

pynet/models/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
from .TwoLayerNet import *
22
from .ThreeLayerNet import *
3+
from .FCNet import *
34
from .LeNet5 import *
45
from .AlexNet import *
56
from .NIN import *

0 commit comments

Comments
 (0)