openpose / pose /body_25 /pose_deploy.prototxt
eelxpeng's picture
Upload folder using huggingface_hub
1c54393 verified
name: "OpenPose - BODY_25"
input: "image"
input_dim: 1 # This value will be defined at runtime
input_dim: 3
input_dim: 16 # This value will be defined at runtime
input_dim: 16 # This value will be defined at runtime
layer {
name: "conv1_1"
type: "Convolution"
bottom: "image"
top: "conv1_1"
convolution_param {
num_output: 64
pad: 1
kernel_size: 3
}
}
layer {
name: "relu1_1"
type: "ReLU"
bottom: "conv1_1"
top: "conv1_1"
}
layer {
name: "conv1_2"
type: "Convolution"
bottom: "conv1_1"
top: "conv1_2"
convolution_param {
num_output: 64
pad: 1
kernel_size: 3
}
}
layer {
name: "relu1_2"
type: "ReLU"
bottom: "conv1_2"
top: "conv1_2"
}
layer {
name: "pool1_stage1"
type: "Pooling"
bottom: "conv1_2"
top: "pool1_stage1"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layer {
name: "conv2_1"
type: "Convolution"
bottom: "pool1_stage1"
top: "conv2_1"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "relu2_1"
type: "ReLU"
bottom: "conv2_1"
top: "conv2_1"
}
layer {
name: "conv2_2"
type: "Convolution"
bottom: "conv2_1"
top: "conv2_2"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "relu2_2"
type: "ReLU"
bottom: "conv2_2"
top: "conv2_2"
}
layer {
name: "pool2_stage1"
type: "Pooling"
bottom: "conv2_2"
top: "pool2_stage1"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layer {
name: "conv3_1"
type: "Convolution"
bottom: "pool2_stage1"
top: "conv3_1"
convolution_param {
num_output: 256
pad: 1
kernel_size: 3
}
}
layer {
name: "relu3_1"
type: "ReLU"
bottom: "conv3_1"
top: "conv3_1"
}
layer {
name: "conv3_2"
type: "Convolution"
bottom: "conv3_1"
top: "conv3_2"
convolution_param {
num_output: 256
pad: 1
kernel_size: 3
}
}
layer {
name: "relu3_2"
type: "ReLU"
bottom: "conv3_2"
top: "conv3_2"
}
layer {
name: "conv3_3"
type: "Convolution"
bottom: "conv3_2"
top: "conv3_3"
convolution_param {
num_output: 256
pad: 1
kernel_size: 3
}
}
layer {
name: "relu3_3"
type: "ReLU"
bottom: "conv3_3"
top: "conv3_3"
}
layer {
name: "conv3_4"
type: "Convolution"
bottom: "conv3_3"
top: "conv3_4"
convolution_param {
num_output: 256
pad: 1
kernel_size: 3
}
}
layer {
name: "relu3_4"
type: "ReLU"
bottom: "conv3_4"
top: "conv3_4"
}
layer {
name: "pool3_stage1"
type: "Pooling"
bottom: "conv3_4"
top: "pool3_stage1"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layer {
name: "conv4_1"
type: "Convolution"
bottom: "pool3_stage1"
top: "conv4_1"
convolution_param {
num_output: 512
pad: 1
kernel_size: 3
}
}
layer {
name: "relu4_1"
type: "ReLU"
bottom: "conv4_1"
top: "conv4_1"
}
layer {
name: "conv4_2"
type: "Convolution"
bottom: "conv4_1"
top: "conv4_2"
convolution_param {
num_output: 512
pad: 1
kernel_size: 3
}
}
layer {
name: "prelu4_2"
type: "PReLU"
bottom: "conv4_2"
top: "conv4_2"
}
layer {
name: "conv4_3_CPM"
type: "Convolution"
bottom: "conv4_2"
top: "conv4_3_CPM"
convolution_param {
num_output: 256
pad: 1
kernel_size: 3
}
}
layer {
name: "prelu4_3_CPM"
type: "PReLU"
bottom: "conv4_3_CPM"
top: "conv4_3_CPM"
}
layer {
name: "conv4_4_CPM"
type: "Convolution"
bottom: "conv4_3_CPM"
top: "conv4_4_CPM"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "prelu4_4_CPM"
type: "PReLU"
bottom: "conv4_4_CPM"
top: "conv4_4_CPM"
}
layer {
name: "Mconv1_stage0_L2_0"
type: "Convolution"
bottom: "conv4_4_CPM"
top: "Mconv1_stage0_L2_0"
convolution_param {
num_output: 96
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu1_stage0_L2_0"
type: "PReLU"
bottom: "Mconv1_stage0_L2_0"
top: "Mconv1_stage0_L2_0"
}
layer {
name: "Mconv1_stage0_L2_1"
type: "Convolution"
bottom: "Mconv1_stage0_L2_0"
top: "Mconv1_stage0_L2_1"
convolution_param {
num_output: 96
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu1_stage0_L2_1"
type: "PReLU"
bottom: "Mconv1_stage0_L2_1"
top: "Mconv1_stage0_L2_1"
}
layer {
name: "Mconv1_stage0_L2_2"
type: "Convolution"
bottom: "Mconv1_stage0_L2_1"
top: "Mconv1_stage0_L2_2"
convolution_param {
num_output: 96
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu1_stage0_L2_2"
type: "PReLU"
bottom: "Mconv1_stage0_L2_2"
top: "Mconv1_stage0_L2_2"
}
layer {
name: "Mconv1_stage0_L2_concat"
type: "Concat"
bottom: "Mconv1_stage0_L2_0"
bottom: "Mconv1_stage0_L2_1"
bottom: "Mconv1_stage0_L2_2"
top: "Mconv1_stage0_L2_concat"
concat_param {
axis: 1
}
}
layer {
name: "Mconv2_stage0_L2_0"
type: "Convolution"
bottom: "Mconv1_stage0_L2_concat"
top: "Mconv2_stage0_L2_0"
convolution_param {
num_output: 96
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu2_stage0_L2_0"
type: "PReLU"
bottom: "Mconv2_stage0_L2_0"
top: "Mconv2_stage0_L2_0"
}
layer {
name: "Mconv2_stage0_L2_1"
type: "Convolution"
bottom: "Mconv2_stage0_L2_0"
top: "Mconv2_stage0_L2_1"
convolution_param {
num_output: 96
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu2_stage0_L2_1"
type: "PReLU"
bottom: "Mconv2_stage0_L2_1"
top: "Mconv2_stage0_L2_1"
}
layer {
name: "Mconv2_stage0_L2_2"
type: "Convolution"
bottom: "Mconv2_stage0_L2_1"
top: "Mconv2_stage0_L2_2"
convolution_param {
num_output: 96
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu2_stage0_L2_2"
type: "PReLU"
bottom: "Mconv2_stage0_L2_2"
top: "Mconv2_stage0_L2_2"
}
layer {
name: "Mconv2_stage0_L2_concat"
type: "Concat"
bottom: "Mconv2_stage0_L2_0"
bottom: "Mconv2_stage0_L2_1"
bottom: "Mconv2_stage0_L2_2"
top: "Mconv2_stage0_L2_concat"
concat_param {
axis: 1
}
}
layer {
name: "Mconv3_stage0_L2_0"
type: "Convolution"
bottom: "Mconv2_stage0_L2_concat"
top: "Mconv3_stage0_L2_0"
convolution_param {
num_output: 96
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu3_stage0_L2_0"
type: "PReLU"
bottom: "Mconv3_stage0_L2_0"
top: "Mconv3_stage0_L2_0"
}
layer {
name: "Mconv3_stage0_L2_1"
type: "Convolution"
bottom: "Mconv3_stage0_L2_0"
top: "Mconv3_stage0_L2_1"
convolution_param {
num_output: 96
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu3_stage0_L2_1"
type: "PReLU"
bottom: "Mconv3_stage0_L2_1"
top: "Mconv3_stage0_L2_1"
}
layer {
name: "Mconv3_stage0_L2_2"
type: "Convolution"
bottom: "Mconv3_stage0_L2_1"
top: "Mconv3_stage0_L2_2"
convolution_param {
num_output: 96
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu3_stage0_L2_2"
type: "PReLU"
bottom: "Mconv3_stage0_L2_2"
top: "Mconv3_stage0_L2_2"
}
layer {
name: "Mconv3_stage0_L2_concat"
type: "Concat"
bottom: "Mconv3_stage0_L2_0"
bottom: "Mconv3_stage0_L2_1"
bottom: "Mconv3_stage0_L2_2"
top: "Mconv3_stage0_L2_concat"
concat_param {
axis: 1
}
}
layer {
name: "Mconv4_stage0_L2_0"
type: "Convolution"
bottom: "Mconv3_stage0_L2_concat"
top: "Mconv4_stage0_L2_0"
convolution_param {
num_output: 96
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu4_stage0_L2_0"
type: "PReLU"
bottom: "Mconv4_stage0_L2_0"
top: "Mconv4_stage0_L2_0"
}
layer {
name: "Mconv4_stage0_L2_1"
type: "Convolution"
bottom: "Mconv4_stage0_L2_0"
top: "Mconv4_stage0_L2_1"
convolution_param {
num_output: 96
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu4_stage0_L2_1"
type: "PReLU"
bottom: "Mconv4_stage0_L2_1"
top: "Mconv4_stage0_L2_1"
}
layer {
name: "Mconv4_stage0_L2_2"
type: "Convolution"
bottom: "Mconv4_stage0_L2_1"
top: "Mconv4_stage0_L2_2"
convolution_param {
num_output: 96
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu4_stage0_L2_2"
type: "PReLU"
bottom: "Mconv4_stage0_L2_2"
top: "Mconv4_stage0_L2_2"
}
layer {
name: "Mconv4_stage0_L2_concat"
type: "Concat"
bottom: "Mconv4_stage0_L2_0"
bottom: "Mconv4_stage0_L2_1"
bottom: "Mconv4_stage0_L2_2"
top: "Mconv4_stage0_L2_concat"
concat_param {
axis: 1
}
}
layer {
name: "Mconv5_stage0_L2_0"
type: "Convolution"
bottom: "Mconv4_stage0_L2_concat"
top: "Mconv5_stage0_L2_0"
convolution_param {
num_output: 96
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu5_stage0_L2_0"
type: "PReLU"
bottom: "Mconv5_stage0_L2_0"
top: "Mconv5_stage0_L2_0"
}
layer {
name: "Mconv5_stage0_L2_1"
type: "Convolution"
bottom: "Mconv5_stage0_L2_0"
top: "Mconv5_stage0_L2_1"
convolution_param {
num_output: 96
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu5_stage0_L2_1"
type: "PReLU"
bottom: "Mconv5_stage0_L2_1"
top: "Mconv5_stage0_L2_1"
}
layer {
name: "Mconv5_stage0_L2_2"
type: "Convolution"
bottom: "Mconv5_stage0_L2_1"
top: "Mconv5_stage0_L2_2"
convolution_param {
num_output: 96
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu5_stage0_L2_2"
type: "PReLU"
bottom: "Mconv5_stage0_L2_2"
top: "Mconv5_stage0_L2_2"
}
layer {
name: "Mconv5_stage0_L2_concat"
type: "Concat"
bottom: "Mconv5_stage0_L2_0"
bottom: "Mconv5_stage0_L2_1"
bottom: "Mconv5_stage0_L2_2"
top: "Mconv5_stage0_L2_concat"
concat_param {
axis: 1
}
}
layer {
name: "Mconv6_stage0_L2"
type: "Convolution"
bottom: "Mconv5_stage0_L2_concat"
top: "Mconv6_stage0_L2"
convolution_param {
num_output: 256
pad: 0
kernel_size: 1
}
}
layer {
name: "Mprelu6_stage0_L2"
type: "PReLU"
bottom: "Mconv6_stage0_L2"
top: "Mconv6_stage0_L2"
}
layer {
name: "Mconv7_stage0_L2"
type: "Convolution"
bottom: "Mconv6_stage0_L2"
top: "Mconv7_stage0_L2"
convolution_param {
num_output: 52
pad: 0
kernel_size: 1
}
}
layer {
name: "concat_stage1_L2"
type: "Concat"
bottom: "conv4_4_CPM"
bottom: "Mconv7_stage0_L2"
top: "concat_stage1_L2"
concat_param {
axis: 1
}
}
layer {
name: "Mconv1_stage1_L2_0"
type: "Convolution"
bottom: "concat_stage1_L2"
top: "Mconv1_stage1_L2_0"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu1_stage1_L2_0"
type: "PReLU"
bottom: "Mconv1_stage1_L2_0"
top: "Mconv1_stage1_L2_0"
}
layer {
name: "Mconv1_stage1_L2_1"
type: "Convolution"
bottom: "Mconv1_stage1_L2_0"
top: "Mconv1_stage1_L2_1"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu1_stage1_L2_1"
type: "PReLU"
bottom: "Mconv1_stage1_L2_1"
top: "Mconv1_stage1_L2_1"
}
layer {
name: "Mconv1_stage1_L2_2"
type: "Convolution"
bottom: "Mconv1_stage1_L2_1"
top: "Mconv1_stage1_L2_2"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu1_stage1_L2_2"
type: "PReLU"
bottom: "Mconv1_stage1_L2_2"
top: "Mconv1_stage1_L2_2"
}
layer {
name: "Mconv1_stage1_L2_concat"
type: "Concat"
bottom: "Mconv1_stage1_L2_0"
bottom: "Mconv1_stage1_L2_1"
bottom: "Mconv1_stage1_L2_2"
top: "Mconv1_stage1_L2_concat"
concat_param {
axis: 1
}
}
layer {
name: "Mconv2_stage1_L2_0"
type: "Convolution"
bottom: "Mconv1_stage1_L2_concat"
top: "Mconv2_stage1_L2_0"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu2_stage1_L2_0"
type: "PReLU"
bottom: "Mconv2_stage1_L2_0"
top: "Mconv2_stage1_L2_0"
}
layer {
name: "Mconv2_stage1_L2_1"
type: "Convolution"
bottom: "Mconv2_stage1_L2_0"
top: "Mconv2_stage1_L2_1"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu2_stage1_L2_1"
type: "PReLU"
bottom: "Mconv2_stage1_L2_1"
top: "Mconv2_stage1_L2_1"
}
layer {
name: "Mconv2_stage1_L2_2"
type: "Convolution"
bottom: "Mconv2_stage1_L2_1"
top: "Mconv2_stage1_L2_2"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu2_stage1_L2_2"
type: "PReLU"
bottom: "Mconv2_stage1_L2_2"
top: "Mconv2_stage1_L2_2"
}
layer {
name: "Mconv2_stage1_L2_concat"
type: "Concat"
bottom: "Mconv2_stage1_L2_0"
bottom: "Mconv2_stage1_L2_1"
bottom: "Mconv2_stage1_L2_2"
top: "Mconv2_stage1_L2_concat"
concat_param {
axis: 1
}
}
layer {
name: "Mconv3_stage1_L2_0"
type: "Convolution"
bottom: "Mconv2_stage1_L2_concat"
top: "Mconv3_stage1_L2_0"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu3_stage1_L2_0"
type: "PReLU"
bottom: "Mconv3_stage1_L2_0"
top: "Mconv3_stage1_L2_0"
}
layer {
name: "Mconv3_stage1_L2_1"
type: "Convolution"
bottom: "Mconv3_stage1_L2_0"
top: "Mconv3_stage1_L2_1"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu3_stage1_L2_1"
type: "PReLU"
bottom: "Mconv3_stage1_L2_1"
top: "Mconv3_stage1_L2_1"
}
layer {
name: "Mconv3_stage1_L2_2"
type: "Convolution"
bottom: "Mconv3_stage1_L2_1"
top: "Mconv3_stage1_L2_2"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu3_stage1_L2_2"
type: "PReLU"
bottom: "Mconv3_stage1_L2_2"
top: "Mconv3_stage1_L2_2"
}
layer {
name: "Mconv3_stage1_L2_concat"
type: "Concat"
bottom: "Mconv3_stage1_L2_0"
bottom: "Mconv3_stage1_L2_1"
bottom: "Mconv3_stage1_L2_2"
top: "Mconv3_stage1_L2_concat"
concat_param {
axis: 1
}
}
layer {
name: "Mconv4_stage1_L2_0"
type: "Convolution"
bottom: "Mconv3_stage1_L2_concat"
top: "Mconv4_stage1_L2_0"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu4_stage1_L2_0"
type: "PReLU"
bottom: "Mconv4_stage1_L2_0"
top: "Mconv4_stage1_L2_0"
}
layer {
name: "Mconv4_stage1_L2_1"
type: "Convolution"
bottom: "Mconv4_stage1_L2_0"
top: "Mconv4_stage1_L2_1"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu4_stage1_L2_1"
type: "PReLU"
bottom: "Mconv4_stage1_L2_1"
top: "Mconv4_stage1_L2_1"
}
layer {
name: "Mconv4_stage1_L2_2"
type: "Convolution"
bottom: "Mconv4_stage1_L2_1"
top: "Mconv4_stage1_L2_2"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu4_stage1_L2_2"
type: "PReLU"
bottom: "Mconv4_stage1_L2_2"
top: "Mconv4_stage1_L2_2"
}
layer {
name: "Mconv4_stage1_L2_concat"
type: "Concat"
bottom: "Mconv4_stage1_L2_0"
bottom: "Mconv4_stage1_L2_1"
bottom: "Mconv4_stage1_L2_2"
top: "Mconv4_stage1_L2_concat"
concat_param {
axis: 1
}
}
layer {
name: "Mconv5_stage1_L2_0"
type: "Convolution"
bottom: "Mconv4_stage1_L2_concat"
top: "Mconv5_stage1_L2_0"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu5_stage1_L2_0"
type: "PReLU"
bottom: "Mconv5_stage1_L2_0"
top: "Mconv5_stage1_L2_0"
}
layer {
name: "Mconv5_stage1_L2_1"
type: "Convolution"
bottom: "Mconv5_stage1_L2_0"
top: "Mconv5_stage1_L2_1"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu5_stage1_L2_1"
type: "PReLU"
bottom: "Mconv5_stage1_L2_1"
top: "Mconv5_stage1_L2_1"
}
layer {
name: "Mconv5_stage1_L2_2"
type: "Convolution"
bottom: "Mconv5_stage1_L2_1"
top: "Mconv5_stage1_L2_2"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu5_stage1_L2_2"
type: "PReLU"
bottom: "Mconv5_stage1_L2_2"
top: "Mconv5_stage1_L2_2"
}
layer {
name: "Mconv5_stage1_L2_concat"
type: "Concat"
bottom: "Mconv5_stage1_L2_0"
bottom: "Mconv5_stage1_L2_1"
bottom: "Mconv5_stage1_L2_2"
top: "Mconv5_stage1_L2_concat"
concat_param {
axis: 1
}
}
layer {
name: "Mconv6_stage1_L2"
type: "Convolution"
bottom: "Mconv5_stage1_L2_concat"
top: "Mconv6_stage1_L2"
convolution_param {
num_output: 512
pad: 0
kernel_size: 1
}
}
layer {
name: "Mprelu6_stage1_L2"
type: "PReLU"
bottom: "Mconv6_stage1_L2"
top: "Mconv6_stage1_L2"
}
layer {
name: "Mconv7_stage1_L2"
type: "Convolution"
bottom: "Mconv6_stage1_L2"
top: "Mconv7_stage1_L2"
convolution_param {
num_output: 52
pad: 0
kernel_size: 1
}
}
layer {
name: "concat_stage2_L2"
type: "Concat"
bottom: "conv4_4_CPM"
bottom: "Mconv7_stage1_L2"
top: "concat_stage2_L2"
concat_param {
axis: 1
}
}
layer {
name: "Mconv1_stage2_L2_0"
type: "Convolution"
bottom: "concat_stage2_L2"
top: "Mconv1_stage2_L2_0"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu1_stage2_L2_0"
type: "PReLU"
bottom: "Mconv1_stage2_L2_0"
top: "Mconv1_stage2_L2_0"
}
layer {
name: "Mconv1_stage2_L2_1"
type: "Convolution"
bottom: "Mconv1_stage2_L2_0"
top: "Mconv1_stage2_L2_1"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu1_stage2_L2_1"
type: "PReLU"
bottom: "Mconv1_stage2_L2_1"
top: "Mconv1_stage2_L2_1"
}
layer {
name: "Mconv1_stage2_L2_2"
type: "Convolution"
bottom: "Mconv1_stage2_L2_1"
top: "Mconv1_stage2_L2_2"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu1_stage2_L2_2"
type: "PReLU"
bottom: "Mconv1_stage2_L2_2"
top: "Mconv1_stage2_L2_2"
}
layer {
name: "Mconv1_stage2_L2_concat"
type: "Concat"
bottom: "Mconv1_stage2_L2_0"
bottom: "Mconv1_stage2_L2_1"
bottom: "Mconv1_stage2_L2_2"
top: "Mconv1_stage2_L2_concat"
concat_param {
axis: 1
}
}
layer {
name: "Mconv2_stage2_L2_0"
type: "Convolution"
bottom: "Mconv1_stage2_L2_concat"
top: "Mconv2_stage2_L2_0"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu2_stage2_L2_0"
type: "PReLU"
bottom: "Mconv2_stage2_L2_0"
top: "Mconv2_stage2_L2_0"
}
layer {
name: "Mconv2_stage2_L2_1"
type: "Convolution"
bottom: "Mconv2_stage2_L2_0"
top: "Mconv2_stage2_L2_1"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu2_stage2_L2_1"
type: "PReLU"
bottom: "Mconv2_stage2_L2_1"
top: "Mconv2_stage2_L2_1"
}
layer {
name: "Mconv2_stage2_L2_2"
type: "Convolution"
bottom: "Mconv2_stage2_L2_1"
top: "Mconv2_stage2_L2_2"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu2_stage2_L2_2"
type: "PReLU"
bottom: "Mconv2_stage2_L2_2"
top: "Mconv2_stage2_L2_2"
}
layer {
name: "Mconv2_stage2_L2_concat"
type: "Concat"
bottom: "Mconv2_stage2_L2_0"
bottom: "Mconv2_stage2_L2_1"
bottom: "Mconv2_stage2_L2_2"
top: "Mconv2_stage2_L2_concat"
concat_param {
axis: 1
}
}
layer {
name: "Mconv3_stage2_L2_0"
type: "Convolution"
bottom: "Mconv2_stage2_L2_concat"
top: "Mconv3_stage2_L2_0"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu3_stage2_L2_0"
type: "PReLU"
bottom: "Mconv3_stage2_L2_0"
top: "Mconv3_stage2_L2_0"
}
layer {
name: "Mconv3_stage2_L2_1"
type: "Convolution"
bottom: "Mconv3_stage2_L2_0"
top: "Mconv3_stage2_L2_1"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu3_stage2_L2_1"
type: "PReLU"
bottom: "Mconv3_stage2_L2_1"
top: "Mconv3_stage2_L2_1"
}
layer {
name: "Mconv3_stage2_L2_2"
type: "Convolution"
bottom: "Mconv3_stage2_L2_1"
top: "Mconv3_stage2_L2_2"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu3_stage2_L2_2"
type: "PReLU"
bottom: "Mconv3_stage2_L2_2"
top: "Mconv3_stage2_L2_2"
}
layer {
name: "Mconv3_stage2_L2_concat"
type: "Concat"
bottom: "Mconv3_stage2_L2_0"
bottom: "Mconv3_stage2_L2_1"
bottom: "Mconv3_stage2_L2_2"
top: "Mconv3_stage2_L2_concat"
concat_param {
axis: 1
}
}
layer {
name: "Mconv4_stage2_L2_0"
type: "Convolution"
bottom: "Mconv3_stage2_L2_concat"
top: "Mconv4_stage2_L2_0"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu4_stage2_L2_0"
type: "PReLU"
bottom: "Mconv4_stage2_L2_0"
top: "Mconv4_stage2_L2_0"
}
layer {
name: "Mconv4_stage2_L2_1"
type: "Convolution"
bottom: "Mconv4_stage2_L2_0"
top: "Mconv4_stage2_L2_1"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu4_stage2_L2_1"
type: "PReLU"
bottom: "Mconv4_stage2_L2_1"
top: "Mconv4_stage2_L2_1"
}
layer {
name: "Mconv4_stage2_L2_2"
type: "Convolution"
bottom: "Mconv4_stage2_L2_1"
top: "Mconv4_stage2_L2_2"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu4_stage2_L2_2"
type: "PReLU"
bottom: "Mconv4_stage2_L2_2"
top: "Mconv4_stage2_L2_2"
}
layer {
name: "Mconv4_stage2_L2_concat"
type: "Concat"
bottom: "Mconv4_stage2_L2_0"
bottom: "Mconv4_stage2_L2_1"
bottom: "Mconv4_stage2_L2_2"
top: "Mconv4_stage2_L2_concat"
concat_param {
axis: 1
}
}
layer {
name: "Mconv5_stage2_L2_0"
type: "Convolution"
bottom: "Mconv4_stage2_L2_concat"
top: "Mconv5_stage2_L2_0"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu5_stage2_L2_0"
type: "PReLU"
bottom: "Mconv5_stage2_L2_0"
top: "Mconv5_stage2_L2_0"
}
layer {
name: "Mconv5_stage2_L2_1"
type: "Convolution"
bottom: "Mconv5_stage2_L2_0"
top: "Mconv5_stage2_L2_1"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu5_stage2_L2_1"
type: "PReLU"
bottom: "Mconv5_stage2_L2_1"
top: "Mconv5_stage2_L2_1"
}
layer {
name: "Mconv5_stage2_L2_2"
type: "Convolution"
bottom: "Mconv5_stage2_L2_1"
top: "Mconv5_stage2_L2_2"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu5_stage2_L2_2"
type: "PReLU"
bottom: "Mconv5_stage2_L2_2"
top: "Mconv5_stage2_L2_2"
}
layer {
name: "Mconv5_stage2_L2_concat"
type: "Concat"
bottom: "Mconv5_stage2_L2_0"
bottom: "Mconv5_stage2_L2_1"
bottom: "Mconv5_stage2_L2_2"
top: "Mconv5_stage2_L2_concat"
concat_param {
axis: 1
}
}
layer {
name: "Mconv6_stage2_L2"
type: "Convolution"
bottom: "Mconv5_stage2_L2_concat"
top: "Mconv6_stage2_L2"
convolution_param {
num_output: 512
pad: 0
kernel_size: 1
}
}
layer {
name: "Mprelu6_stage2_L2"
type: "PReLU"
bottom: "Mconv6_stage2_L2"
top: "Mconv6_stage2_L2"
}
layer {
name: "Mconv7_stage2_L2"
type: "Convolution"
bottom: "Mconv6_stage2_L2"
top: "Mconv7_stage2_L2"
convolution_param {
num_output: 52
pad: 0
kernel_size: 1
}
}
layer {
name: "concat_stage3_L2"
type: "Concat"
bottom: "conv4_4_CPM"
bottom: "Mconv7_stage2_L2"
top: "concat_stage3_L2"
concat_param {
axis: 1
}
}
layer {
name: "Mconv1_stage3_L2_0"
type: "Convolution"
bottom: "concat_stage3_L2"
top: "Mconv1_stage3_L2_0"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu1_stage3_L2_0"
type: "PReLU"
bottom: "Mconv1_stage3_L2_0"
top: "Mconv1_stage3_L2_0"
}
layer {
name: "Mconv1_stage3_L2_1"
type: "Convolution"
bottom: "Mconv1_stage3_L2_0"
top: "Mconv1_stage3_L2_1"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu1_stage3_L2_1"
type: "PReLU"
bottom: "Mconv1_stage3_L2_1"
top: "Mconv1_stage3_L2_1"
}
layer {
name: "Mconv1_stage3_L2_2"
type: "Convolution"
bottom: "Mconv1_stage3_L2_1"
top: "Mconv1_stage3_L2_2"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu1_stage3_L2_2"
type: "PReLU"
bottom: "Mconv1_stage3_L2_2"
top: "Mconv1_stage3_L2_2"
}
layer {
name: "Mconv1_stage3_L2_concat"
type: "Concat"
bottom: "Mconv1_stage3_L2_0"
bottom: "Mconv1_stage3_L2_1"
bottom: "Mconv1_stage3_L2_2"
top: "Mconv1_stage3_L2_concat"
concat_param {
axis: 1
}
}
layer {
name: "Mconv2_stage3_L2_0"
type: "Convolution"
bottom: "Mconv1_stage3_L2_concat"
top: "Mconv2_stage3_L2_0"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu2_stage3_L2_0"
type: "PReLU"
bottom: "Mconv2_stage3_L2_0"
top: "Mconv2_stage3_L2_0"
}
layer {
name: "Mconv2_stage3_L2_1"
type: "Convolution"
bottom: "Mconv2_stage3_L2_0"
top: "Mconv2_stage3_L2_1"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu2_stage3_L2_1"
type: "PReLU"
bottom: "Mconv2_stage3_L2_1"
top: "Mconv2_stage3_L2_1"
}
layer {
name: "Mconv2_stage3_L2_2"
type: "Convolution"
bottom: "Mconv2_stage3_L2_1"
top: "Mconv2_stage3_L2_2"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu2_stage3_L2_2"
type: "PReLU"
bottom: "Mconv2_stage3_L2_2"
top: "Mconv2_stage3_L2_2"
}
layer {
name: "Mconv2_stage3_L2_concat"
type: "Concat"
bottom: "Mconv2_stage3_L2_0"
bottom: "Mconv2_stage3_L2_1"
bottom: "Mconv2_stage3_L2_2"
top: "Mconv2_stage3_L2_concat"
concat_param {
axis: 1
}
}
layer {
name: "Mconv3_stage3_L2_0"
type: "Convolution"
bottom: "Mconv2_stage3_L2_concat"
top: "Mconv3_stage3_L2_0"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu3_stage3_L2_0"
type: "PReLU"
bottom: "Mconv3_stage3_L2_0"
top: "Mconv3_stage3_L2_0"
}
layer {
name: "Mconv3_stage3_L2_1"
type: "Convolution"
bottom: "Mconv3_stage3_L2_0"
top: "Mconv3_stage3_L2_1"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu3_stage3_L2_1"
type: "PReLU"
bottom: "Mconv3_stage3_L2_1"
top: "Mconv3_stage3_L2_1"
}
layer {
name: "Mconv3_stage3_L2_2"
type: "Convolution"
bottom: "Mconv3_stage3_L2_1"
top: "Mconv3_stage3_L2_2"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu3_stage3_L2_2"
type: "PReLU"
bottom: "Mconv3_stage3_L2_2"
top: "Mconv3_stage3_L2_2"
}
layer {
name: "Mconv3_stage3_L2_concat"
type: "Concat"
bottom: "Mconv3_stage3_L2_0"
bottom: "Mconv3_stage3_L2_1"
bottom: "Mconv3_stage3_L2_2"
top: "Mconv3_stage3_L2_concat"
concat_param {
axis: 1
}
}
layer {
name: "Mconv4_stage3_L2_0"
type: "Convolution"
bottom: "Mconv3_stage3_L2_concat"
top: "Mconv4_stage3_L2_0"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu4_stage3_L2_0"
type: "PReLU"
bottom: "Mconv4_stage3_L2_0"
top: "Mconv4_stage3_L2_0"
}
layer {
name: "Mconv4_stage3_L2_1"
type: "Convolution"
bottom: "Mconv4_stage3_L2_0"
top: "Mconv4_stage3_L2_1"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu4_stage3_L2_1"
type: "PReLU"
bottom: "Mconv4_stage3_L2_1"
top: "Mconv4_stage3_L2_1"
}
layer {
name: "Mconv4_stage3_L2_2"
type: "Convolution"
bottom: "Mconv4_stage3_L2_1"
top: "Mconv4_stage3_L2_2"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu4_stage3_L2_2"
type: "PReLU"
bottom: "Mconv4_stage3_L2_2"
top: "Mconv4_stage3_L2_2"
}
layer {
name: "Mconv4_stage3_L2_concat"
type: "Concat"
bottom: "Mconv4_stage3_L2_0"
bottom: "Mconv4_stage3_L2_1"
bottom: "Mconv4_stage3_L2_2"
top: "Mconv4_stage3_L2_concat"
concat_param {
axis: 1
}
}
layer {
name: "Mconv5_stage3_L2_0"
type: "Convolution"
bottom: "Mconv4_stage3_L2_concat"
top: "Mconv5_stage3_L2_0"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu5_stage3_L2_0"
type: "PReLU"
bottom: "Mconv5_stage3_L2_0"
top: "Mconv5_stage3_L2_0"
}
layer {
name: "Mconv5_stage3_L2_1"
type: "Convolution"
bottom: "Mconv5_stage3_L2_0"
top: "Mconv5_stage3_L2_1"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu5_stage3_L2_1"
type: "PReLU"
bottom: "Mconv5_stage3_L2_1"
top: "Mconv5_stage3_L2_1"
}
layer {
name: "Mconv5_stage3_L2_2"
type: "Convolution"
bottom: "Mconv5_stage3_L2_1"
top: "Mconv5_stage3_L2_2"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu5_stage3_L2_2"
type: "PReLU"
bottom: "Mconv5_stage3_L2_2"
top: "Mconv5_stage3_L2_2"
}
layer {
name: "Mconv5_stage3_L2_concat"
type: "Concat"
bottom: "Mconv5_stage3_L2_0"
bottom: "Mconv5_stage3_L2_1"
bottom: "Mconv5_stage3_L2_2"
top: "Mconv5_stage3_L2_concat"
concat_param {
axis: 1
}
}
layer {
name: "Mconv6_stage3_L2"
type: "Convolution"
bottom: "Mconv5_stage3_L2_concat"
top: "Mconv6_stage3_L2"
convolution_param {
num_output: 512
pad: 0
kernel_size: 1
}
}
layer {
name: "Mprelu6_stage3_L2"
type: "PReLU"
bottom: "Mconv6_stage3_L2"
top: "Mconv6_stage3_L2"
}
layer {
name: "Mconv7_stage3_L2"
type: "Convolution"
bottom: "Mconv6_stage3_L2"
top: "Mconv7_stage3_L2"
convolution_param {
num_output: 52
pad: 0
kernel_size: 1
}
}
layer {
name: "concat_stage0_L1"
type: "Concat"
bottom: "conv4_4_CPM"
bottom: "Mconv7_stage3_L2"
top: "concat_stage0_L1"
concat_param {
axis: 1
}
}
layer {
name: "Mconv1_stage0_L1_0"
type: "Convolution"
bottom: "concat_stage0_L1"
top: "Mconv1_stage0_L1_0"
convolution_param {
num_output: 96
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu1_stage0_L1_0"
type: "PReLU"
bottom: "Mconv1_stage0_L1_0"
top: "Mconv1_stage0_L1_0"
}
layer {
name: "Mconv1_stage0_L1_1"
type: "Convolution"
bottom: "Mconv1_stage0_L1_0"
top: "Mconv1_stage0_L1_1"
convolution_param {
num_output: 96
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu1_stage0_L1_1"
type: "PReLU"
bottom: "Mconv1_stage0_L1_1"
top: "Mconv1_stage0_L1_1"
}
layer {
name: "Mconv1_stage0_L1_2"
type: "Convolution"
bottom: "Mconv1_stage0_L1_1"
top: "Mconv1_stage0_L1_2"
convolution_param {
num_output: 96
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu1_stage0_L1_2"
type: "PReLU"
bottom: "Mconv1_stage0_L1_2"
top: "Mconv1_stage0_L1_2"
}
layer {
name: "Mconv1_stage0_L1_concat"
type: "Concat"
bottom: "Mconv1_stage0_L1_0"
bottom: "Mconv1_stage0_L1_1"
bottom: "Mconv1_stage0_L1_2"
top: "Mconv1_stage0_L1_concat"
concat_param {
axis: 1
}
}
layer {
name: "Mconv2_stage0_L1_0"
type: "Convolution"
bottom: "Mconv1_stage0_L1_concat"
top: "Mconv2_stage0_L1_0"
convolution_param {
num_output: 96
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu2_stage0_L1_0"
type: "PReLU"
bottom: "Mconv2_stage0_L1_0"
top: "Mconv2_stage0_L1_0"
}
layer {
name: "Mconv2_stage0_L1_1"
type: "Convolution"
bottom: "Mconv2_stage0_L1_0"
top: "Mconv2_stage0_L1_1"
convolution_param {
num_output: 96
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu2_stage0_L1_1"
type: "PReLU"
bottom: "Mconv2_stage0_L1_1"
top: "Mconv2_stage0_L1_1"
}
layer {
name: "Mconv2_stage0_L1_2"
type: "Convolution"
bottom: "Mconv2_stage0_L1_1"
top: "Mconv2_stage0_L1_2"
convolution_param {
num_output: 96
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu2_stage0_L1_2"
type: "PReLU"
bottom: "Mconv2_stage0_L1_2"
top: "Mconv2_stage0_L1_2"
}
layer {
name: "Mconv2_stage0_L1_concat"
type: "Concat"
bottom: "Mconv2_stage0_L1_0"
bottom: "Mconv2_stage0_L1_1"
bottom: "Mconv2_stage0_L1_2"
top: "Mconv2_stage0_L1_concat"
concat_param {
axis: 1
}
}
layer {
name: "Mconv3_stage0_L1_0"
type: "Convolution"
bottom: "Mconv2_stage0_L1_concat"
top: "Mconv3_stage0_L1_0"
convolution_param {
num_output: 96
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu3_stage0_L1_0"
type: "PReLU"
bottom: "Mconv3_stage0_L1_0"
top: "Mconv3_stage0_L1_0"
}
layer {
name: "Mconv3_stage0_L1_1"
type: "Convolution"
bottom: "Mconv3_stage0_L1_0"
top: "Mconv3_stage0_L1_1"
convolution_param {
num_output: 96
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu3_stage0_L1_1"
type: "PReLU"
bottom: "Mconv3_stage0_L1_1"
top: "Mconv3_stage0_L1_1"
}
layer {
name: "Mconv3_stage0_L1_2"
type: "Convolution"
bottom: "Mconv3_stage0_L1_1"
top: "Mconv3_stage0_L1_2"
convolution_param {
num_output: 96
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu3_stage0_L1_2"
type: "PReLU"
bottom: "Mconv3_stage0_L1_2"
top: "Mconv3_stage0_L1_2"
}
layer {
name: "Mconv3_stage0_L1_concat"
type: "Concat"
bottom: "Mconv3_stage0_L1_0"
bottom: "Mconv3_stage0_L1_1"
bottom: "Mconv3_stage0_L1_2"
top: "Mconv3_stage0_L1_concat"
concat_param {
axis: 1
}
}
layer {
name: "Mconv4_stage0_L1_0"
type: "Convolution"
bottom: "Mconv3_stage0_L1_concat"
top: "Mconv4_stage0_L1_0"
convolution_param {
num_output: 96
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu4_stage0_L1_0"
type: "PReLU"
bottom: "Mconv4_stage0_L1_0"
top: "Mconv4_stage0_L1_0"
}
layer {
name: "Mconv4_stage0_L1_1"
type: "Convolution"
bottom: "Mconv4_stage0_L1_0"
top: "Mconv4_stage0_L1_1"
convolution_param {
num_output: 96
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu4_stage0_L1_1"
type: "PReLU"
bottom: "Mconv4_stage0_L1_1"
top: "Mconv4_stage0_L1_1"
}
layer {
name: "Mconv4_stage0_L1_2"
type: "Convolution"
bottom: "Mconv4_stage0_L1_1"
top: "Mconv4_stage0_L1_2"
convolution_param {
num_output: 96
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu4_stage0_L1_2"
type: "PReLU"
bottom: "Mconv4_stage0_L1_2"
top: "Mconv4_stage0_L1_2"
}
layer {
name: "Mconv4_stage0_L1_concat"
type: "Concat"
bottom: "Mconv4_stage0_L1_0"
bottom: "Mconv4_stage0_L1_1"
bottom: "Mconv4_stage0_L1_2"
top: "Mconv4_stage0_L1_concat"
concat_param {
axis: 1
}
}
layer {
name: "Mconv5_stage0_L1_0"
type: "Convolution"
bottom: "Mconv4_stage0_L1_concat"
top: "Mconv5_stage0_L1_0"
convolution_param {
num_output: 96
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu5_stage0_L1_0"
type: "PReLU"
bottom: "Mconv5_stage0_L1_0"
top: "Mconv5_stage0_L1_0"
}
layer {
name: "Mconv5_stage0_L1_1"
type: "Convolution"
bottom: "Mconv5_stage0_L1_0"
top: "Mconv5_stage0_L1_1"
convolution_param {
num_output: 96
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu5_stage0_L1_1"
type: "PReLU"
bottom: "Mconv5_stage0_L1_1"
top: "Mconv5_stage0_L1_1"
}
layer {
name: "Mconv5_stage0_L1_2"
type: "Convolution"
bottom: "Mconv5_stage0_L1_1"
top: "Mconv5_stage0_L1_2"
convolution_param {
num_output: 96
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu5_stage0_L1_2"
type: "PReLU"
bottom: "Mconv5_stage0_L1_2"
top: "Mconv5_stage0_L1_2"
}
layer {
name: "Mconv5_stage0_L1_concat"
type: "Concat"
bottom: "Mconv5_stage0_L1_0"
bottom: "Mconv5_stage0_L1_1"
bottom: "Mconv5_stage0_L1_2"
top: "Mconv5_stage0_L1_concat"
concat_param {
axis: 1
}
}
layer {
name: "Mconv6_stage0_L1"
type: "Convolution"
bottom: "Mconv5_stage0_L1_concat"
top: "Mconv6_stage0_L1"
convolution_param {
num_output: 256
pad: 0
kernel_size: 1
}
}
layer {
name: "Mprelu6_stage0_L1"
type: "PReLU"
bottom: "Mconv6_stage0_L1"
top: "Mconv6_stage0_L1"
}
layer {
name: "Mconv7_stage0_L1"
type: "Convolution"
bottom: "Mconv6_stage0_L1"
top: "Mconv7_stage0_L1"
convolution_param {
num_output: 26
pad: 0
kernel_size: 1
}
}
layer {
name: "concat_stage1_L1"
type: "Concat"
bottom: "conv4_4_CPM"
bottom: "Mconv7_stage0_L1"
bottom: "Mconv7_stage3_L2"
top: "concat_stage1_L1"
concat_param {
axis: 1
}
}
layer {
name: "Mconv1_stage1_L1_0"
type: "Convolution"
bottom: "concat_stage1_L1"
top: "Mconv1_stage1_L1_0"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu1_stage1_L1_0"
type: "PReLU"
bottom: "Mconv1_stage1_L1_0"
top: "Mconv1_stage1_L1_0"
}
layer {
name: "Mconv1_stage1_L1_1"
type: "Convolution"
bottom: "Mconv1_stage1_L1_0"
top: "Mconv1_stage1_L1_1"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu1_stage1_L1_1"
type: "PReLU"
bottom: "Mconv1_stage1_L1_1"
top: "Mconv1_stage1_L1_1"
}
layer {
name: "Mconv1_stage1_L1_2"
type: "Convolution"
bottom: "Mconv1_stage1_L1_1"
top: "Mconv1_stage1_L1_2"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu1_stage1_L1_2"
type: "PReLU"
bottom: "Mconv1_stage1_L1_2"
top: "Mconv1_stage1_L1_2"
}
layer {
name: "Mconv1_stage1_L1_concat"
type: "Concat"
bottom: "Mconv1_stage1_L1_0"
bottom: "Mconv1_stage1_L1_1"
bottom: "Mconv1_stage1_L1_2"
top: "Mconv1_stage1_L1_concat"
concat_param {
axis: 1
}
}
layer {
name: "Mconv2_stage1_L1_0"
type: "Convolution"
bottom: "Mconv1_stage1_L1_concat"
top: "Mconv2_stage1_L1_0"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu2_stage1_L1_0"
type: "PReLU"
bottom: "Mconv2_stage1_L1_0"
top: "Mconv2_stage1_L1_0"
}
layer {
name: "Mconv2_stage1_L1_1"
type: "Convolution"
bottom: "Mconv2_stage1_L1_0"
top: "Mconv2_stage1_L1_1"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu2_stage1_L1_1"
type: "PReLU"
bottom: "Mconv2_stage1_L1_1"
top: "Mconv2_stage1_L1_1"
}
layer {
name: "Mconv2_stage1_L1_2"
type: "Convolution"
bottom: "Mconv2_stage1_L1_1"
top: "Mconv2_stage1_L1_2"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu2_stage1_L1_2"
type: "PReLU"
bottom: "Mconv2_stage1_L1_2"
top: "Mconv2_stage1_L1_2"
}
layer {
name: "Mconv2_stage1_L1_concat"
type: "Concat"
bottom: "Mconv2_stage1_L1_0"
bottom: "Mconv2_stage1_L1_1"
bottom: "Mconv2_stage1_L1_2"
top: "Mconv2_stage1_L1_concat"
concat_param {
axis: 1
}
}
layer {
name: "Mconv3_stage1_L1_0"
type: "Convolution"
bottom: "Mconv2_stage1_L1_concat"
top: "Mconv3_stage1_L1_0"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu3_stage1_L1_0"
type: "PReLU"
bottom: "Mconv3_stage1_L1_0"
top: "Mconv3_stage1_L1_0"
}
layer {
name: "Mconv3_stage1_L1_1"
type: "Convolution"
bottom: "Mconv3_stage1_L1_0"
top: "Mconv3_stage1_L1_1"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu3_stage1_L1_1"
type: "PReLU"
bottom: "Mconv3_stage1_L1_1"
top: "Mconv3_stage1_L1_1"
}
layer {
name: "Mconv3_stage1_L1_2"
type: "Convolution"
bottom: "Mconv3_stage1_L1_1"
top: "Mconv3_stage1_L1_2"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu3_stage1_L1_2"
type: "PReLU"
bottom: "Mconv3_stage1_L1_2"
top: "Mconv3_stage1_L1_2"
}
layer {
name: "Mconv3_stage1_L1_concat"
type: "Concat"
bottom: "Mconv3_stage1_L1_0"
bottom: "Mconv3_stage1_L1_1"
bottom: "Mconv3_stage1_L1_2"
top: "Mconv3_stage1_L1_concat"
concat_param {
axis: 1
}
}
layer {
name: "Mconv4_stage1_L1_0"
type: "Convolution"
bottom: "Mconv3_stage1_L1_concat"
top: "Mconv4_stage1_L1_0"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu4_stage1_L1_0"
type: "PReLU"
bottom: "Mconv4_stage1_L1_0"
top: "Mconv4_stage1_L1_0"
}
layer {
name: "Mconv4_stage1_L1_1"
type: "Convolution"
bottom: "Mconv4_stage1_L1_0"
top: "Mconv4_stage1_L1_1"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu4_stage1_L1_1"
type: "PReLU"
bottom: "Mconv4_stage1_L1_1"
top: "Mconv4_stage1_L1_1"
}
layer {
name: "Mconv4_stage1_L1_2"
type: "Convolution"
bottom: "Mconv4_stage1_L1_1"
top: "Mconv4_stage1_L1_2"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu4_stage1_L1_2"
type: "PReLU"
bottom: "Mconv4_stage1_L1_2"
top: "Mconv4_stage1_L1_2"
}
layer {
name: "Mconv4_stage1_L1_concat"
type: "Concat"
bottom: "Mconv4_stage1_L1_0"
bottom: "Mconv4_stage1_L1_1"
bottom: "Mconv4_stage1_L1_2"
top: "Mconv4_stage1_L1_concat"
concat_param {
axis: 1
}
}
layer {
name: "Mconv5_stage1_L1_0"
type: "Convolution"
bottom: "Mconv4_stage1_L1_concat"
top: "Mconv5_stage1_L1_0"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu5_stage1_L1_0"
type: "PReLU"
bottom: "Mconv5_stage1_L1_0"
top: "Mconv5_stage1_L1_0"
}
layer {
name: "Mconv5_stage1_L1_1"
type: "Convolution"
bottom: "Mconv5_stage1_L1_0"
top: "Mconv5_stage1_L1_1"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu5_stage1_L1_1"
type: "PReLU"
bottom: "Mconv5_stage1_L1_1"
top: "Mconv5_stage1_L1_1"
}
layer {
name: "Mconv5_stage1_L1_2"
type: "Convolution"
bottom: "Mconv5_stage1_L1_1"
top: "Mconv5_stage1_L1_2"
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "Mprelu5_stage1_L1_2"
type: "PReLU"
bottom: "Mconv5_stage1_L1_2"
top: "Mconv5_stage1_L1_2"
}
layer {
name: "Mconv5_stage1_L1_concat"
type: "Concat"
bottom: "Mconv5_stage1_L1_0"
bottom: "Mconv5_stage1_L1_1"
bottom: "Mconv5_stage1_L1_2"
top: "Mconv5_stage1_L1_concat"
concat_param {
axis: 1
}
}
layer {
name: "Mconv6_stage1_L1"
type: "Convolution"
bottom: "Mconv5_stage1_L1_concat"
top: "Mconv6_stage1_L1"
convolution_param {
num_output: 512
pad: 0
kernel_size: 1
}
}
layer {
name: "Mprelu6_stage1_L1"
type: "PReLU"
bottom: "Mconv6_stage1_L1"
top: "Mconv6_stage1_L1"
}
layer {
name: "Mconv7_stage1_L1"
type: "Convolution"
bottom: "Mconv6_stage1_L1"
top: "Mconv7_stage1_L1"
convolution_param {
num_output: 26
pad: 0
kernel_size: 1
}
}
layer {
name: "net_output"
type: "Concat"
bottom: "Mconv7_stage1_L1"
bottom: "Mconv7_stage3_L2"
top: "net_output"
concat_param {
axis: 1
}
}