Skip to content

Commit 271545e

Browse files
Add alexnet
1 parent 3d166e5 commit 271545e

27 files changed

+2409
-0
lines changed
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
version https://git-lfs.github.com/spec/v1
2+
oid sha256:4bff209a9837298157915ef50a4831a59636a6ca1a6b8ebacd990c3a5f3053e0
3+
size 243862414
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
version https://git-lfs.github.com/spec/v1
2+
oid sha256:117736f199a116dfa050accf88a7e8d027d9e414c3d7daf677e05e33ad8ec3b2
3+
size 243862320
Lines changed: 285 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,285 @@
1+
name: "AlexNet"
2+
#layer {
3+
# name: "data"
4+
# type: "Input"
5+
# top: "data"
6+
# input_param { shape: { dim: 10 dim: 3 dim: 227 dim: 227 } }
7+
#}
8+
input : "data"
9+
input_shape {
10+
dim: 10
11+
dim: 3
12+
dim: 227
13+
dim: 227
14+
}
15+
16+
layer {
17+
name: "conv1"
18+
type: "Convolution"
19+
bottom: "data"
20+
top: "conv1"
21+
param {
22+
lr_mult: 1
23+
decay_mult: 1
24+
}
25+
param {
26+
lr_mult: 2
27+
decay_mult: 0
28+
}
29+
convolution_param {
30+
num_output: 96
31+
kernel_size: 11
32+
stride: 4
33+
}
34+
}
35+
layer {
36+
name: "relu1"
37+
type: "ReLU"
38+
bottom: "conv1"
39+
top: "conv1"
40+
}
41+
layer {
42+
name: "norm1"
43+
type: "LRN"
44+
bottom: "conv1"
45+
top: "norm1"
46+
lrn_param {
47+
local_size: 5
48+
alpha: 0.0001
49+
beta: 0.75
50+
}
51+
}
52+
layer {
53+
name: "pool1"
54+
type: "Pooling"
55+
bottom: "norm1"
56+
top: "pool1"
57+
pooling_param {
58+
pool: MAX
59+
kernel_size: 3
60+
stride: 2
61+
}
62+
}
63+
layer {
64+
name: "conv2"
65+
type: "Convolution"
66+
bottom: "pool1"
67+
top: "conv2"
68+
param {
69+
lr_mult: 1
70+
decay_mult: 1
71+
}
72+
param {
73+
lr_mult: 2
74+
decay_mult: 0
75+
}
76+
convolution_param {
77+
num_output: 256
78+
pad: 2
79+
kernel_size: 5
80+
group: 2
81+
}
82+
}
83+
layer {
84+
name: "relu2"
85+
type: "ReLU"
86+
bottom: "conv2"
87+
top: "conv2"
88+
}
89+
layer {
90+
name: "norm2"
91+
type: "LRN"
92+
bottom: "conv2"
93+
top: "norm2"
94+
lrn_param {
95+
local_size: 5
96+
alpha: 0.0001
97+
beta: 0.75
98+
}
99+
}
100+
layer {
101+
name: "pool2"
102+
type: "Pooling"
103+
bottom: "norm2"
104+
top: "pool2"
105+
pooling_param {
106+
pool: MAX
107+
kernel_size: 3
108+
stride: 2
109+
}
110+
}
111+
layer {
112+
name: "conv3"
113+
type: "Convolution"
114+
bottom: "pool2"
115+
top: "conv3"
116+
param {
117+
lr_mult: 1
118+
decay_mult: 1
119+
}
120+
param {
121+
lr_mult: 2
122+
decay_mult: 0
123+
}
124+
convolution_param {
125+
num_output: 384
126+
pad: 1
127+
kernel_size: 3
128+
}
129+
}
130+
layer {
131+
name: "relu3"
132+
type: "ReLU"
133+
bottom: "conv3"
134+
top: "conv3"
135+
}
136+
layer {
137+
name: "conv4"
138+
type: "Convolution"
139+
bottom: "conv3"
140+
top: "conv4"
141+
param {
142+
lr_mult: 1
143+
decay_mult: 1
144+
}
145+
param {
146+
lr_mult: 2
147+
decay_mult: 0
148+
}
149+
convolution_param {
150+
num_output: 384
151+
pad: 1
152+
kernel_size: 3
153+
group: 2
154+
}
155+
}
156+
layer {
157+
name: "relu4"
158+
type: "ReLU"
159+
bottom: "conv4"
160+
top: "conv4"
161+
}
162+
layer {
163+
name: "conv5"
164+
type: "Convolution"
165+
bottom: "conv4"
166+
top: "conv5"
167+
param {
168+
lr_mult: 1
169+
decay_mult: 1
170+
}
171+
param {
172+
lr_mult: 2
173+
decay_mult: 0
174+
}
175+
convolution_param {
176+
num_output: 256
177+
pad: 1
178+
kernel_size: 3
179+
group: 2
180+
}
181+
}
182+
layer {
183+
name: "relu5"
184+
type: "ReLU"
185+
bottom: "conv5"
186+
top: "conv5"
187+
}
188+
layer {
189+
name: "pool5"
190+
type: "Pooling"
191+
bottom: "conv5"
192+
top: "pool5"
193+
pooling_param {
194+
pool: MAX
195+
kernel_size: 3
196+
stride: 2
197+
}
198+
}
199+
layer {
200+
name: "fc6"
201+
type: "InnerProduct"
202+
bottom: "pool5"
203+
top: "fc6"
204+
param {
205+
lr_mult: 1
206+
decay_mult: 1
207+
}
208+
param {
209+
lr_mult: 2
210+
decay_mult: 0
211+
}
212+
inner_product_param {
213+
num_output: 4096
214+
}
215+
}
216+
layer {
217+
name: "relu6"
218+
type: "ReLU"
219+
bottom: "fc6"
220+
top: "fc6"
221+
}
222+
layer {
223+
name: "drop6"
224+
type: "Dropout"
225+
bottom: "fc6"
226+
top: "fc6"
227+
dropout_param {
228+
dropout_ratio: 0.5
229+
}
230+
}
231+
layer {
232+
name: "fc7"
233+
type: "InnerProduct"
234+
bottom: "fc6"
235+
top: "fc7"
236+
param {
237+
lr_mult: 1
238+
decay_mult: 1
239+
}
240+
param {
241+
lr_mult: 2
242+
decay_mult: 0
243+
}
244+
inner_product_param {
245+
num_output: 4096
246+
}
247+
}
248+
layer {
249+
name: "relu7"
250+
type: "ReLU"
251+
bottom: "fc7"
252+
top: "fc7"
253+
}
254+
layer {
255+
name: "drop7"
256+
type: "Dropout"
257+
bottom: "fc7"
258+
top: "fc7"
259+
dropout_param {
260+
dropout_ratio: 0.5
261+
}
262+
}
263+
layer {
264+
name: "fc8"
265+
type: "InnerProduct"
266+
bottom: "fc7"
267+
top: "fc8"
268+
param {
269+
lr_mult: 1
270+
decay_mult: 1
271+
}
272+
param {
273+
lr_mult: 2
274+
decay_mult: 0
275+
}
276+
inner_product_param {
277+
num_output: 1000
278+
}
279+
}
280+
layer {
281+
name: "prob"
282+
type: "Softmax"
283+
bottom: "fc8"
284+
top: "prob"
285+
}
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
version https://git-lfs.github.com/spec/v1
2+
oid sha256:b5863836b6777bdb3d336d94c791864500e2aebb49f7c2bf3d20110223da244f
3+
size 786446

caffe_models/alexnet/include_test_images/n01496331_12107.h

Lines changed: 705 additions & 0 deletions
Large diffs are not rendered by default.

caffe_models/alexnet/include_test_images/n01580077_10435.h

Lines changed: 705 additions & 0 deletions
Large diffs are not rendered by default.

caffe_models/alexnet/include_test_images/n04162706_10854.h

Lines changed: 705 additions & 0 deletions
Large diffs are not rendered by default.
7.73 KB
Loading
142 KB
Loading
97.1 KB
Loading

0 commit comments

Comments
 (0)