请问下老师,训练报check failed a<=b是什么意思
来源:4-7 Caffe-SSD人脸检测模型训练实操讲解

qq_Zl_26
2019-03-30
/anaconda3/envs/py27/bin/python /Users/zhanglei/PycharmProjects/caffe_demo/caffe-ssd/ssd_pascal.py
I0330 12:49:46.055624 185894336 caffe.cpp:210] Use CPU.
I0330 12:49:46.057651 185894336 solver.cpp:63] Initializing solver from parameters:
train_net: "models/VGGNet/wider_face/SSD_300x300/train.prototxt"
test_net: "models/VGGNet/wider_face/SSD_300x300/test.prototxt"
test_iter: 250
test_interval: 10000
base_lr: 0.001
display: 10
max_iter: 120000
lr_policy: "multistep"
gamma: 0.1
momentum: 0.9
weight_decay: 0.0005
snapshot: 80000
snapshot_prefix: "models/VGGNet/wider_face/SSD_300x300/VGG_wider_face_SSD_300x300"
solver_mode: CPU
device_id: 0
debug_info: false
train_state {
level: 0
stage: “”
}
snapshot_after_train: true
test_initialization: false
average_loss: 10
stepvalue: 80000
stepvalue: 100000
stepvalue: 120000
iter_size: 1
type: "SGD"
eval_type: "detection"
ap_version: "11point"
I0330 12:49:46.057927 185894336 solver.cpp:96] Creating training net from train_net file: models/VGGNet/wider_face/SSD_300x300/train.prototxt
I0330 12:49:46.058876 185894336 net.cpp:58] Initializing net from parameters:
name: "VGG_wider_face_SSD_300x300_train"
state {
phase: TRAIN
level: 0
stage: “”
}
layer {
name: "data"
type: "AnnotatedData"
top: "data"
top: "label"
include {
phase: TRAIN
}
transform_param {
mirror: true
mean_value: 104
mean_value: 117
mean_value: 123
resize_param {
prob: 1
resize_mode: WARP
height: 300
width: 300
interp_mode: LINEAR
interp_mode: AREA
interp_mode: NEAREST
interp_mode: CUBIC
interp_mode: LANCZOS4
}
emit_constraint {
emit_type: CENTER
}
distort_param {
brightness_prob: 0.5
brightness_delta: 32
contrast_prob: 0.5
contrast_lower: 0.5
contrast_upper: 1.5
hue_prob: 0.5
hue_delta: 18
saturation_prob: 0.5
saturation_lower: 0.5
saturation_upper: 1.5
random_order_prob: 0
}
expand_param {
prob: 0.5
max_expand_ratio: 4
}
}
data_param {
source: "examples/widerface/widerface_trainval_lmdb"
batch_size: 32
backend: LMDB
}
annotated_data_param {
batch_sampler {
max_sample: 1
max_trials: 1
}
batch_sampler {
sampler {
min_scale: 0.3
max_scale: 1
min_aspect_ratio: 0.5
max_aspect_ratio: 2
}
sample_constraint {
min_jaccard_overlap: 0.1
}
max_sample: 1
max_trials: 50
}
batch_sampler {
sampler {
min_scale: 0.3
max_scale: 1
min_aspect_ratio: 0.5
max_aspect_ratio: 2
}
sample_constraint {
min_jaccard_overlap: 0.3
}
max_sample: 1
max_trials: 50
}
batch_sampler {
sampler {
min_scale: 0.3
max_scale: 1
min_aspect_ratio: 0.5
max_aspect_ratio: 2
}
sample_constraint {
min_jaccard_overlap: 0.5
}
max_sample: 1
max_trials: 50
}
batch_sampler {
sampler {
min_scale: 0.3
max_scale: 1
min_aspect_ratio: 0.5
max_aspect_ratio: 2
}
sample_constraint {
min_jaccard_overlap: 0.7
}
max_sample: 1
max_trials: 50
}
batch_sampler {
sampler {
min_scale: 0.3
max_scale: 1
min_aspect_ratio: 0.5
max_aspect_ratio: 2
}
sample_constraint {
min_jaccard_overlap: 0.9
}
max_sample: 1
max_trials: 50
}
batch_sampler {
sampler {
min_scale: 0.3
max_scale: 1
min_aspect_ratio: 0.5
max_aspect_ratio: 2
}
sample_constraint {
max_jaccard_overlap: 1
}
max_sample: 1
max_trials: 50
}
label_map_file: “data/widerface/labelmap_voc.prototxt”
}
}
layer {
name: "conv1_1"
type: "Convolution"
bottom: "data"
top: "conv1_1"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 64
pad: 1
kernel_size: 3
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "relu1_1"
type: "ReLU"
bottom: "conv1_1"
top: “conv1_1”
}
layer {
name: "conv1_2"
type: "Convolution"
bottom: "conv1_1"
top: "conv1_2"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 64
pad: 1
kernel_size: 3
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "relu1_2"
type: "ReLU"
bottom: "conv1_2"
top: “conv1_2”
}
layer {
name: "pool1"
type: "Pooling"
bottom: "conv1_2"
top: "pool1"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layer {
name: "conv2_1"
type: "Convolution"
bottom: "pool1"
top: "conv2_1"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "relu2_1"
type: "ReLU"
bottom: "conv2_1"
top: “conv2_1”
}
layer {
name: "conv2_2"
type: "Convolution"
bottom: "conv2_1"
top: "conv2_2"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "relu2_2"
type: "ReLU"
bottom: "conv2_2"
top: “conv2_2”
}
layer {
name: "pool2"
type: "Pooling"
bottom: "conv2_2"
top: "pool2"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layer {
name: "conv3_1"
type: "Convolution"
bottom: "pool2"
top: "conv3_1"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 256
pad: 1
kernel_size: 3
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "relu3_1"
type: "ReLU"
bottom: "conv3_1"
top: “conv3_1”
}
layer {
name: "conv3_2"
type: "Convolution"
bottom: "conv3_1"
top: "conv3_2"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 256
pad: 1
kernel_size: 3
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "relu3_2"
type: "ReLU"
bottom: "conv3_2"
top: “conv3_2”
}
layer {
name: "conv3_3"
type: "Convolution"
bottom: "conv3_2"
top: "conv3_3"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 256
pad: 1
kernel_size: 3
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "relu3_3"
type: "ReLU"
bottom: "conv3_3"
top: “conv3_3”
}
layer {
name: "pool3"
type: "Pooling"
bottom: "conv3_3"
top: "pool3"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layer {
name: "conv4_1"
type: "Convolution"
bottom: "pool3"
top: "conv4_1"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 512
pad: 1
kernel_size: 3
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "relu4_1"
type: "ReLU"
bottom: "conv4_1"
top: “conv4_1”
}
layer {
name: "conv4_2"
type: "Convolution"
bottom: "conv4_1"
top: "conv4_2"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 512
pad: 1
kernel_size: 3
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "relu4_2"
type: "ReLU"
bottom: "conv4_2"
top: “conv4_2”
}
layer {
name: "conv4_3"
type: "Convolution"
bottom: "conv4_2"
top: "conv4_3"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 512
pad: 1
kernel_size: 3
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "relu4_3"
type: "ReLU"
bottom: "conv4_3"
top: “conv4_3”
}
layer {
name: "pool4"
type: "Pooling"
bottom: "conv4_3"
top: "pool4"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layer {
name: "conv5_1"
type: "Convolution"
bottom: "pool4"
top: "conv5_1"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 512
pad: 1
kernel_size: 3
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
dilation: 1
}
}
layer {
name: "relu5_1"
type: "ReLU"
bottom: "conv5_1"
top: “conv5_1”
}
layer {
name: "conv5_2"
type: "Convolution"
bottom: "conv5_1"
top: "conv5_2"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 512
pad: 1
kernel_size: 3
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
dilation: 1
}
}
layer {
name: "relu5_2"
type: "ReLU"
bottom: "conv5_2"
top: “conv5_2”
}
layer {
name: "conv5_3"
type: "Convolution"
bottom: "conv5_2"
top: "conv5_3"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 512
pad: 1
kernel_size: 3
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
dilation: 1
}
}
layer {
name: "relu5_3"
type: "ReLU"
bottom: "conv5_3"
top: “conv5_3”
}
layer {
name: "pool5"
type: "Pooling"
bottom: "conv5_3"
top: "pool5"
pooling_param {
pool: MAX
kernel_size: 3
stride: 1
pad: 1
}
}
layer {
name: "fc6"
type: "Convolution"
bottom: "pool5"
top: "fc6"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 1024
pad: 6
kernel_size: 3
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
dilation: 6
}
}
layer {
name: "relu6"
type: "ReLU"
bottom: "fc6"
top: “fc6”
}
layer {
name: "fc7"
type: "Convolution"
bottom: "fc6"
top: "fc7"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 1024
kernel_size: 1
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "relu7"
type: "ReLU"
bottom: "fc7"
top: “fc7”
}
layer {
name: "conv6_1"
type: "Convolution"
bottom: "fc7"
top: "conv6_1"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 256
pad: 0
kernel_size: 1
stride: 1
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "conv6_1_relu"
type: "ReLU"
bottom: "conv6_1"
top: “conv6_1”
}
layer {
name: "conv6_2"
type: "Convolution"
bottom: "conv6_1"
top: "conv6_2"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 512
pad: 1
kernel_size: 3
stride: 2
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "conv6_2_relu"
type: "ReLU"
bottom: "conv6_2"
top: “conv6_2”
}
layer {
name: "conv7_1"
type: "Convolution"
bottom: "conv6_2"
top: "conv7_1"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 128
pad: 0
kernel_size: 1
stride: 1
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "conv7_1_relu"
type: "ReLU"
bottom: "conv7_1"
top: “conv7_1”
}
layer {
name: "conv7_2"
type: "Convolution"
bottom: "conv7_1"
top: "conv7_2"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 256
pad: 1
kernel_size: 3
stride: 2
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "conv7_2_relu"
type: "ReLU"
bottom: "conv7_2"
top: “conv7_2”
}
layer {
name: "conv8_1"
type: "Convolution"
bottom: "conv7_2"
top: "conv8_1"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 128
pad: 0
kernel_size: 1
stride: 1
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "conv8_1_relu"
type: "ReLU"
bottom: "conv8_1"
top: “conv8_1”
}
layer {
name: "conv8_2"
type: "Convolution"
bottom: "conv8_1"
top: "conv8_2"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 256
pad: 0
kernel_size: 3
stride: 1
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "conv8_2_relu"
type: "ReLU"
bottom: "conv8_2"
top: “conv8_2”
}
layer {
name: "conv9_1"
type: "Convolution"
bottom: "conv8_2"
top: "conv9_1"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 128
pad: 0
kernel_size: 1
stride: 1
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "conv9_1_relu"
type: "ReLU"
bottom: "conv9_1"
top: “conv9_1”
}
layer {
name: "conv9_2"
type: "Convolution"
bottom: "conv9_1"
top: "conv9_2"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 256
pad: 0
kernel_size: 3
stride: 1
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "conv9_2_relu"
type: "ReLU"
bottom: "conv9_2"
top: “conv9_2”
}
layer {
name: "conv4_3_norm"
type: "Normalize"
bottom: "conv4_3"
top: "conv4_3_norm"
norm_param {
across_spatial: false
scale_filler {
type: "constant"
value: 20
}
channel_shared: false
}
}
layer {
name: "conv4_3_norm_mbox_loc"
type: "Convolution"
bottom: "conv4_3_norm"
top: "conv4_3_norm_mbox_loc"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 16
pad: 1
kernel_size: 3
stride: 1
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "conv4_3_norm_mbox_loc_perm"
type: "Permute"
bottom: "conv4_3_norm_mbox_loc"
top: "conv4_3_norm_mbox_loc_perm"
permute_param {
order: 0
order: 2
order: 3
order: 1
}
}
layer {
name: "conv4_3_norm_mbox_loc_flat"
type: "Flatten"
bottom: "conv4_3_norm_mbox_loc_perm"
top: "conv4_3_norm_mbox_loc_flat"
flatten_param {
axis: 1
}
}
layer {
name: "conv4_3_norm_mbox_conf"
type: "Convolution"
bottom: "conv4_3_norm"
top: "conv4_3_norm_mbox_conf"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 8
pad: 1
kernel_size: 3
stride: 1
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "conv4_3_norm_mbox_conf_perm"
type: "Permute"
bottom: "conv4_3_norm_mbox_conf"
top: "conv4_3_norm_mbox_conf_perm"
permute_param {
order: 0
order: 2
order: 3
order: 1
}
}
layer {
name: "conv4_3_norm_mbox_conf_flat"
type: "Flatten"
bottom: "conv4_3_norm_mbox_conf_perm"
top: "conv4_3_norm_mbox_conf_flat"
flatten_param {
axis: 1
}
}
layer {
name: "conv4_3_norm_mbox_priorbox"
type: "PriorBox"
bottom: "conv4_3_norm"
bottom: "data"
top: "conv4_3_norm_mbox_priorbox"
prior_box_param {
min_size: 30
max_size: 60
aspect_ratio: 2
flip: true
clip: false
variance: 0.1
variance: 0.1
variance: 0.2
variance: 0.2
step: 8
offset: 0.5
}
}
layer {
name: "fc7_mbox_loc"
type: "Convolution"
bottom: "fc7"
top: "fc7_mbox_loc"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 24
pad: 1
kernel_size: 3
stride: 1
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "fc7_mbox_loc_perm"
type: "Permute"
bottom: "fc7_mbox_loc"
top: "fc7_mbox_loc_perm"
permute_param {
order: 0
order: 2
order: 3
order: 1
}
}
layer {
name: "fc7_mbox_loc_flat"
type: "Flatten"
bottom: "fc7_mbox_loc_perm"
top: "fc7_mbox_loc_flat"
flatten_param {
axis: 1
}
}
layer {
name: "fc7_mbox_conf"
type: "Convolution"
bottom: "fc7"
top: "fc7_mbox_conf"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 12
pad: 1
kernel_size: 3
stride: 1
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "fc7_mbox_conf_perm"
type: "Permute"
bottom: "fc7_mbox_conf"
top: "fc7_mbox_conf_perm"
permute_param {
order: 0
order: 2
order: 3
order: 1
}
}
layer {
name: "fc7_mbox_conf_flat"
type: "Flatten"
bottom: "fc7_mbox_conf_perm"
top: "fc7_mbox_conf_flat"
flatten_param {
axis: 1
}
}
layer {
name: "fc7_mbox_priorbox"
type: "PriorBox"
bottom: "fc7"
bottom: "data"
top: "fc7_mbox_priorbox"
prior_box_param {
min_size: 60
max_size: 111
aspect_ratio: 2
aspect_ratio: 3
flip: true
clip: false
variance: 0.1
variance: 0.1
variance: 0.2
variance: 0.2
step: 16
offset: 0.5
}
}
layer {
name: "conv6_2_mbox_loc"
type: "Convolution"
bottom: "conv6_2"
top: "conv6_2_mbox_loc"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 24
pad: 1
kernel_size: 3
stride: 1
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "conv6_2_mbox_loc_perm"
type: "Permute"
bottom: "conv6_2_mbox_loc"
top: "conv6_2_mbox_loc_perm"
permute_param {
order: 0
order: 2
order: 3
order: 1
}
}
layer {
name: "conv6_2_mbox_loc_flat"
type: "Flatten"
bottom: "conv6_2_mbox_loc_perm"
top: "conv6_2_mbox_loc_flat"
flatten_param {
axis: 1
}
}
layer {
name: "conv6_2_mbox_conf"
type: "Convolution"
bottom: "conv6_2"
top: "conv6_2_mbox_conf"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 12
pad: 1
kernel_size: 3
stride: 1
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "conv6_2_mbox_conf_perm"
type: "Permute"
bottom: "conv6_2_mbox_conf"
top: "conv6_2_mbox_conf_perm"
permute_param {
order: 0
order: 2
order: 3
order: 1
}
}
layer {
name: "conv6_2_mbox_conf_flat"
type: "Flatten"
bottom: "conv6_2_mbox_conf_perm"
top: "conv6_2_mbox_conf_flat"
flatten_param {
axis: 1
}
}
layer {
name: "conv6_2_mbox_priorbox"
type: "PriorBox"
bottom: "conv6_2"
bottom: "data"
top: "conv6_2_mbox_priorbox"
prior_box_param {
min_size: 111
max_size: 162
aspect_ratio: 2
aspect_ratio: 3
flip: true
clip: false
variance: 0.1
variance: 0.1
variance: 0.2
variance: 0.2
step: 32
offset: 0.5
}
}
layer {
name: "conv7_2_mbox_loc"
type: "Convolution"
bottom: "conv7_2"
top: "conv7_2_mbox_loc"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 24
pad: 1
kernel_size: 3
stride: 1
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "conv7_2_mbox_loc_perm"
type: "Permute"
bottom: "conv7_2_mbox_loc"
top: "conv7_2_mbox_loc_perm"
permute_param {
order: 0
order: 2
order: 3
order: 1
}
}
layer {
name: "conv7_2_mbox_loc_flat"
type: "Flatten"
bottom: "conv7_2_mbox_loc_perm"
top: "conv7_2_mbox_loc_flat"
flatten_param {
axis: 1
}
}
layer {
name: "conv7_2_mbox_conf"
type: "Convolution"
bottom: "conv7_2"
top: "conv7_2_mbox_conf"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 12
pad: 1
kernel_size: 3
stride: 1
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "conv7_2_mbox_conf_perm"
type: "Permute"
bottom: "conv7_2_mbox_conf"
top: "conv7_2_mbox_conf_perm"
permute_param {
order: 0
order: 2
order: 3
order: 1
}
}
layer {
name: "conv7_2_mbox_conf_flat"
type: "Flatten"
bottom: "conv7_2_mbox_conf_perm"
top: "conv7_2_mbox_conf_flat"
flatten_param {
axis: 1
}
}
layer {
name: "conv7_2_mbox_priorbox"
type: "PriorBox"
bottom: "conv7_2"
bottom: "data"
top: "conv7_2_mbox_priorbox"
prior_box_param {
min_size: 162
max_size: 213
aspect_ratio: 2
aspect_ratio: 3
flip: true
clip: false
variance: 0.1
variance: 0.1
variance: 0.2
variance: 0.2
step: 64
offset: 0.5
}
}
layer {
name: "conv8_2_mbox_loc"
type: "Convolution"
bottom: "conv8_2"
top: "conv8_2_mbox_loc"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 16
pad: 1
kernel_size: 3
stride: 1
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "conv8_2_mbox_loc_perm"
type: "Permute"
bottom: "conv8_2_mbox_loc"
top: "conv8_2_mbox_loc_perm"
permute_param {
order: 0
order: 2
order: 3
order: 1
}
}
layer {
name: "conv8_2_mbox_loc_flat"
type: "Flatten"
bottom: "conv8_2_mbox_loc_perm"
top: "conv8_2_mbox_loc_flat"
flatten_param {
axis: 1
}
}
layer {
name: "conv8_2_mbox_conf"
type: "Convolution"
bottom: "conv8_2"
top: "conv8_2_mbox_conf"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 8
pad: 1
kernel_size: 3
stride: 1
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "conv8_2_mbox_conf_perm"
type: "Permute"
bottom: "conv8_2_mbox_conf"
top: "conv8_2_mbox_conf_perm"
permute_param {
order: 0
order: 2
order: 3
order: 1
}
}
layer {
name: "conv8_2_mbox_conf_flat"
type: "Flatten"
bottom: "conv8_2_mbox_conf_perm"
top: "conv8_2_mbox_conf_flat"
flatten_param {
axis: 1
}
}
layer {
name: "conv8_2_mbox_priorbox"
type: "PriorBox"
bottom: "conv8_2"
bottom: "data"
top: "conv8_2_mbox_priorbox"
prior_box_param {
min_size: 213
max_size: 264
aspect_ratio: 2
flip: true
clip: false
variance: 0.1
variance: 0.1
variance: 0.2
variance: 0.2
step: 100
offset: 0.5
}
}
layer {
name: "conv9_2_mbox_loc"
type: "Convolution"
bottom: "conv9_2"
top: "conv9_2_mbox_loc"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 16
pad: 1
kernel_size: 3
stride: 1
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "conv9_2_mbox_loc_perm"
type: "Permute"
bottom: "conv9_2_mbox_loc"
top: "conv9_2_mbox_loc_perm"
permute_param {
order: 0
order: 2
order: 3
order: 1
}
}
layer {
name: "conv9_2_mbox_loc_flat"
type: "Flatten"
bottom: "conv9_2_mbox_loc_perm"
top: "conv9_2_mbox_loc_flat"
flatten_param {
axis: 1
}
}
layer {
name: "conv9_2_mbox_conf"
type: "Convolution"
bottom: "conv9_2"
top: "conv9_2_mbox_conf"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 8
pad: 1
kernel_size: 3
stride: 1
weight_filler {
type: “xavier”
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "conv9_2_mbox_conf_perm"
type: "Permute"
bottom: "conv9_2_mbox_conf"
top: "conv9_2_mbox_conf_perm"
permute_param {
order: 0
order: 2
order: 3
order: 1
}
}
layer {
name: "conv9_2_mbox_conf_flat"
type: "Flatten"
bottom: "conv9_2_mbox_conf_perm"
top: "conv9_2_mbox_conf_flat"
flatten_param {
axis: 1
}
}
layer {
name: "conv9_2_mbox_priorbox"
type: "PriorBox"
bottom: "conv9_2"
bottom: "data"
top: "conv9_2_mbox_priorbox"
prior_box_param {
min_size: 264
max_size: 315
aspect_ratio: 2
flip: true
clip: false
variance: 0.1
variance: 0.1
variance: 0.2
variance: 0.2
step: 300
offset: 0.5
}
}
layer {
name: "mbox_loc"
type: "Concat"
bottom: "conv4_3_norm_mbox_loc_flat"
bottom: "fc7_mbox_loc_flat"
bottom: "conv6_2_mbox_loc_flat"
bottom: "conv7_2_mbox_loc_flat"
bottom: "conv8_2_mbox_loc_flat"
bottom: "conv9_2_mbox_loc_flat"
top: "mbox_loc"
concat_param {
axis: 1
}
}
layer {
name: "mbox_conf"
type: "Concat"
bottom: "conv4_3_norm_mbox_conf_flat"
bottom: "fc7_mbox_conf_flat"
bottom: "conv6_2_mbox_conf_flat"
bottom: "conv7_2_mbox_conf_flat"
bottom: "conv8_2_mbox_conf_flat"
bottom: "conv9_2_mbox_conf_flat"
top: "mbox_conf"
concat_param {
axis: 1
}
}
layer {
name: "mbox_priorbox"
type: "Concat"
bottom: "conv4_3_norm_mbox_priorbox"
bottom: "fc7_mbox_priorbox"
bottom: "conv6_2_mbox_priorbox"
bottom: "conv7_2_mbox_priorbox"
bottom: "conv8_2_mbox_priorbox"
bottom: "conv9_2_mbox_priorbox"
top: "mbox_priorbox"
concat_param {
axis: 2
}
}
layer {
name: "mbox_loss"
type: "MultiBoxLoss"
bottom: "mbox_loc"
bottom: "mbox_conf"
bottom: "mbox_priorbox"
bottom: "label"
top: "mbox_loss"
include {
phase: TRAIN
}
propagate_down: true
propagate_down: true
propagate_down: false
propagate_down: false
loss_param {
normalization: VALID
}
multibox_loss_param {
loc_loss_type: SMOOTH_L1
conf_loss_type: SOFTMAX
loc_weight: 1
num_classes: 2
share_location: true
match_type: PER_PREDICTION
overlap_threshold: 0.5
use_prior_for_matching: true
background_label_id: 0
use_difficult_gt: true
neg_pos_ratio: 3
neg_overlap: 0.5
code_type: CENTER_SIZE
ignore_cross_boundary_bbox: false
mining_type: MAX_NEGATIVE
}
}
I0330 12:49:46.060093 185894336 layer_factory.hpp:77] Creating layer data
I0330 12:49:46.062091 185894336 net.cpp:100] Creating Layer data
I0330 12:49:46.062108 185894336 net.cpp:408] data -> data
I0330 12:49:46.062125 185894336 net.cpp:408] data -> label
I0330 12:49:46.062324 140492800 db_lmdb.cpp:35] Opened lmdb examples/widerface/widerface_trainval_lmdb
I0330 12:49:46.072914 185894336 annotated_data_layer.cpp:62] output data size: 32,3,300,300
I0330 12:49:46.131165 185894336 net.cpp:150] Setting up data
I0330 12:49:46.131192 185894336 net.cpp:157] Top shape: 32 3 300 300 (8640000)
I0330 12:49:46.131201 185894336 net.cpp:157] Top shape: 1 1 3 8 (24)
I0330 12:49:46.131207 185894336 net.cpp:165] Memory required for data: 34560096
I0330 12:49:46.131278 185894336 layer_factory.hpp:77] Creating layer data_data_0_split
I0330 12:49:46.131292 185894336 net.cpp:100] Creating Layer data_data_0_split
I0330 12:49:46.131299 185894336 net.cpp:434] data_data_0_split <- data
I0330 12:49:46.131310 185894336 net.cpp:408] data_data_0_split -> data_data_0_split_0
I0330 12:49:46.131320 185894336 net.cpp:408] data_data_0_split -> data_data_0_split_1
I0330 12:49:46.131327 185894336 net.cpp:408] data_data_0_split -> data_data_0_split_2
I0330 12:49:46.131333 185894336 net.cpp:408] data_data_0_split -> data_data_0_split_3
I0330 12:49:46.131340 185894336 net.cpp:408] data_data_0_split -> data_data_0_split_4
I0330 12:49:46.131345 185894336 net.cpp:408] data_data_0_split -> data_data_0_split_5
I0330 12:49:46.131351 185894336 net.cpp:408] data_data_0_split -> data_data_0_split_6
I0330 12:49:46.131363 185894336 net.cpp:150] Setting up data_data_0_split
I0330 12:49:46.131368 185894336 net.cpp:157] Top shape: 32 3 300 300 (8640000)
I0330 12:49:46.131374 185894336 net.cpp:157] Top shape: 32 3 300 300 (8640000)
I0330 12:49:46.131379 185894336 net.cpp:157] Top shape: 32 3 300 300 (8640000)
I0330 12:49:46.131384 185894336 net.cpp:157] Top shape: 32 3 300 300 (8640000)
I0330 12:49:46.131389 185894336 net.cpp:157] Top shape: 32 3 300 300 (8640000)
I0330 12:49:46.131394 185894336 net.cpp:157] Top shape: 32 3 300 300 (8640000)
I0330 12:49:46.131399 185894336 net.cpp:157] Top shape: 32 3 300 300 (8640000)
I0330 12:49:46.131405 185894336 net.cpp:165] Memory required for data: 276480096
I0330 12:49:46.131409 185894336 layer_factory.hpp:77] Creating layer conv1_1
I0330 12:49:46.131419 185894336 net.cpp:100] Creating Layer conv1_1
I0330 12:49:46.131424 185894336 net.cpp:434] conv1_1 <- data_data_0_split_0
I0330 12:49:46.131431 185894336 net.cpp:408] conv1_1 -> conv1_1
I0330 12:49:46.131709 185894336 net.cpp:150] Setting up conv1_1
I0330 12:49:46.131716 185894336 net.cpp:157] Top shape: 32 64 300 300 (184320000)
I0330 12:49:46.131722 185894336 net.cpp:165] Memory required for data: 1013760096
I0330 12:49:46.131736 185894336 layer_factory.hpp:77] Creating layer relu1_1
I0330 12:49:46.131744 185894336 net.cpp:100] Creating Layer relu1_1
I0330 12:49:46.131749 185894336 net.cpp:434] relu1_1 <- conv1_1
I0330 12:49:46.131754 185894336 net.cpp:395] relu1_1 -> conv1_1 (in-place)
I0330 12:49:46.131760 185894336 net.cpp:150] Setting up relu1_1
I0330 12:49:46.131764 185894336 net.cpp:157] Top shape: 32 64 300 300 (184320000)
I0330 12:49:46.131770 185894336 net.cpp:165] Memory required for data: 1751040096
I0330 12:49:46.131774 185894336 layer_factory.hpp:77] Creating layer conv1_2
I0330 12:49:46.131783 185894336 net.cpp:100] Creating Layer conv1_2
I0330 12:49:46.131788 185894336 net.cpp:434] conv1_2 <- conv1_1
I0330 12:49:46.131793 185894336 net.cpp:408] conv1_2 -> conv1_2
I0330 12:49:46.132225 185894336 net.cpp:150] Setting up conv1_2
I0330 12:49:46.132232 185894336 net.cpp:157] Top shape: 32 64 300 300 (184320000)
I0330 12:49:46.132238 185894336 net.cpp:165] Memory required for data: 2488320096
I0330 12:49:46.132246 185894336 layer_factory.hpp:77] Creating layer relu1_2
I0330 12:49:46.132256 185894336 net.cpp:100] Creating Layer relu1_2
I0330 12:49:46.132261 185894336 net.cpp:434] relu1_2 <- conv1_2
I0330 12:49:46.132266 185894336 net.cpp:395] relu1_2 -> conv1_2 (in-place)
I0330 12:49:46.132272 185894336 net.cpp:150] Setting up relu1_2
I0330 12:49:46.132277 185894336 net.cpp:157] Top shape: 32 64 300 300 (184320000)
I0330 12:49:46.132282 185894336 net.cpp:165] Memory required for data: 3225600096
I0330 12:49:46.132287 185894336 layer_factory.hpp:77] Creating layer pool1
I0330 12:49:46.132292 185894336 net.cpp:100] Creating Layer pool1
I0330 12:49:46.132297 185894336 net.cpp:434] pool1 <- conv1_2
I0330 12:49:46.132302 185894336 net.cpp:408] pool1 -> pool1
I0330 12:49:46.132313 185894336 net.cpp:150] Setting up pool1
I0330 12:49:46.132318 185894336 net.cpp:157] Top shape: 32 64 150 150 (46080000)
I0330 12:49:46.132323 185894336 net.cpp:165] Memory required for data: 3409920096
I0330 12:49:46.132349 185894336 layer_factory.hpp:77] Creating layer conv2_1
I0330 12:49:46.132359 185894336 net.cpp:100] Creating Layer conv2_1
I0330 12:49:46.132364 185894336 net.cpp:434] conv2_1 <- pool1
I0330 12:49:46.132369 185894336 net.cpp:408] conv2_1 -> conv2_1
I0330 12:49:46.133023 185894336 net.cpp:150] Setting up conv2_1
I0330 12:49:46.133031 185894336 net.cpp:157] Top shape: 32 128 150 150 (92160000)
I0330 12:49:46.133038 185894336 net.cpp:165] Memory required for data: 3778560096
I0330 12:49:46.133044 185894336 layer_factory.hpp:77] Creating layer relu2_1
I0330 12:49:46.133051 185894336 net.cpp:100] Creating Layer relu2_1
I0330 12:49:46.133056 185894336 net.cpp:434] relu2_1 <- conv2_1
I0330 12:49:46.133061 185894336 net.cpp:395] relu2_1 -> conv2_1 (in-place)
I0330 12:49:46.133074 185894336 net.cpp:150] Setting up relu2_1
I0330 12:49:46.133080 185894336 net.cpp:157] Top shape: 32 128 150 150 (92160000)
I0330 12:49:46.133085 185894336 net.cpp:165] Memory required for data: 4147200096
I0330 12:49:46.133092 185894336 layer_factory.hpp:77] Creating layer conv2_2
I0330 12:49:46.133100 185894336 net.cpp:100] Creating Layer conv2_2
I0330 12:49:46.133105 185894336 net.cpp:434] conv2_2 <- conv2_1
I0330 12:49:46.133111 185894336 net.cpp:408] conv2_2 -> conv2_2
I0330 12:49:46.134320 185894336 net.cpp:150] Setting up conv2_2
I0330 12:49:46.134327 185894336 net.cpp:157] Top shape: 32 128 150 150 (92160000)
I0330 12:49:46.134333 185894336 net.cpp:165] Memory required for data: 4515840096
I0330 12:49:46.134338 185894336 layer_factory.hpp:77] Creating layer relu2_2
I0330 12:49:46.134343 185894336 net.cpp:100] Creating Layer relu2_2
I0330 12:49:46.134348 185894336 net.cpp:434] relu2_2 <- conv2_2
I0330 12:49:46.134353 185894336 net.cpp:395] relu2_2 -> conv2_2 (in-place)
I0330 12:49:46.134358 185894336 net.cpp:150] Setting up relu2_2
I0330 12:49:46.134362 185894336 net.cpp:157] Top shape: 32 128 150 150 (92160000)
I0330 12:49:46.134367 185894336 net.cpp:165] Memory required for data: 4884480096
I0330 12:49:46.134372 185894336 layer_factory.hpp:77] Creating layer pool2
I0330 12:49:46.134383 185894336 net.cpp:100] Creating Layer pool2
I0330 12:49:46.134388 185894336 net.cpp:434] pool2 <- conv2_2
I0330 12:49:46.134394 185894336 net.cpp:408] pool2 -> pool2
I0330 12:49:46.134400 185894336 net.cpp:150] Setting up pool2
I0330 12:49:46.134405 185894336 net.cpp:157] Top shape: 32 128 75 75 (23040000)
I0330 12:49:46.134410 185894336 net.cpp:165] Memory required for data: 4976640096
I0330 12:49:46.134415 185894336 layer_factory.hpp:77] Creating layer conv3_1
I0330 12:49:46.134421 185894336 net.cpp:100] Creating Layer conv3_1
I0330 12:49:46.134426 185894336 net.cpp:434] conv3_1 <- pool2
I0330 12:49:46.134433 185894336 net.cpp:408] conv3_1 -> conv3_1
I0330 12:49:46.136783 185894336 net.cpp:150] Setting up conv3_1
I0330 12:49:46.136791 185894336 net.cpp:157] Top shape: 32 256 75 75 (46080000)
I0330 12:49:46.136796 185894336 net.cpp:165] Memory required for data: 5160960096
I0330 12:49:46.136803 185894336 layer_factory.hpp:77] Creating layer relu3_1
I0330 12:49:46.136809 185894336 net.cpp:100] Creating Layer relu3_1
I0330 12:49:46.136814 185894336 net.cpp:434] relu3_1 <- conv3_1
I0330 12:49:46.136819 185894336 net.cpp:395] relu3_1 -> conv3_1 (in-place)
I0330 12:49:46.136824 185894336 net.cpp:150] Setting up relu3_1
I0330 12:49:46.136828 185894336 net.cpp:157] Top shape: 32 256 75 75 (46080000)
I0330 12:49:46.136834 185894336 net.cpp:165] Memory required for data: 5345280096
I0330 12:49:46.136838 185894336 layer_factory.hpp:77] Creating layer conv3_2
I0330 12:49:46.136845 185894336 net.cpp:100] Creating Layer conv3_2
I0330 12:49:46.136850 185894336 net.cpp:434] conv3_2 <- conv3_1
I0330 12:49:46.136855 185894336 net.cpp:408] conv3_2 -> conv3_2
I0330 12:49:46.141605 185894336 net.cpp:150] Setting up conv3_2
I0330 12:49:46.141614 185894336 net.cpp:157] Top shape: 32 256 75 75 (46080000)
I0330 12:49:46.141620 185894336 net.cpp:165] Memory required for data: 5529600096
I0330 12:49:46.141626 185894336 layer_factory.hpp:77] Creating layer relu3_2
I0330 12:49:46.141659 185894336 net.cpp:100] Creating Layer relu3_2
I0330 12:49:46.141665 185894336 net.cpp:434] relu3_2 <- conv3_2
I0330 12:49:46.141670 185894336 net.cpp:395] relu3_2 -> conv3_2 (in-place)
I0330 12:49:46.141676 185894336 net.cpp:150] Setting up relu3_2
I0330 12:49:46.141680 185894336 net.cpp:157] Top shape: 32 256 75 75 (46080000)
I0330 12:49:46.141686 185894336 net.cpp:165] Memory required for data: 5713920096
I0330 12:49:46.141690 185894336 layer_factory.hpp:77] Creating layer conv3_3
I0330 12:49:46.141700 185894336 net.cpp:100] Creating Layer conv3_3
I0330 12:49:46.141705 185894336 net.cpp:434] conv3_3 <- conv3_2
I0330 12:49:46.141710 185894336 net.cpp:408] conv3_3 -> conv3_3
I0330 12:49:46.146356 185894336 net.cpp:150] Setting up conv3_3
I0330 12:49:46.146363 185894336 net.cpp:157] Top shape: 32 256 75 75 (46080000)
I0330 12:49:46.146369 185894336 net.cpp:165] Memory required for data: 5898240096
I0330 12:49:46.146374 185894336 layer_factory.hpp:77] Creating layer relu3_3
I0330 12:49:46.146380 185894336 net.cpp:100] Creating Layer relu3_3
I0330 12:49:46.146384 185894336 net.cpp:434] relu3_3 <- conv3_3
I0330 12:49:46.146389 185894336 net.cpp:395] relu3_3 -> conv3_3 (in-place)
I0330 12:49:46.146395 185894336 net.cpp:150] Setting up relu3_3
I0330 12:49:46.146399 185894336 net.cpp:157] Top shape: 32 256 75 75 (46080000)
I0330 12:49:46.146404 185894336 net.cpp:165] Memory required for data: 6082560096
I0330 12:49:46.146409 185894336 layer_factory.hpp:77] Creating layer pool3
I0330 12:49:46.146414 185894336 net.cpp:100] Creating Layer pool3
I0330 12:49:46.146419 185894336 net.cpp:434] pool3 <- conv3_3
I0330 12:49:46.146423 185894336 net.cpp:408] pool3 -> pool3
I0330 12:49:46.146430 185894336 net.cpp:150] Setting up pool3
I0330 12:49:46.146435 185894336 net.cpp:157] Top shape: 32 256 38 38 (11829248)
I0330 12:49:46.146440 185894336 net.cpp:165] Memory required for data: 6129877088
I0330 12:49:46.146445 185894336 layer_factory.hpp:77] Creating layer conv4_1
I0330 12:49:46.146450 185894336 net.cpp:100] Creating Layer conv4_1
I0330 12:49:46.146456 185894336 net.cpp:434] conv4_1 <- pool3
I0330 12:49:46.146461 185894336 net.cpp:408] conv4_1 -> conv4_1
I0330 12:49:46.157172 185894336 net.cpp:150] Setting up conv4_1
I0330 12:49:46.157189 185894336 net.cpp:157] Top shape: 32 512 38 38 (23658496)
I0330 12:49:46.157196 185894336 net.cpp:165] Memory required for data: 6224511072
I0330 12:49:46.157203 185894336 layer_factory.hpp:77] Creating layer relu4_1
I0330 12:49:46.157212 185894336 net.cpp:100] Creating Layer relu4_1
I0330 12:49:46.157217 185894336 net.cpp:434] relu4_1 <- conv4_1
I0330 12:49:46.157223 185894336 net.cpp:395] relu4_1 -> conv4_1 (in-place)
I0330 12:49:46.157230 185894336 net.cpp:150] Setting up relu4_1
I0330 12:49:46.157234 185894336 net.cpp:157] Top shape: 32 512 38 38 (23658496)
I0330 12:49:46.157239 185894336 net.cpp:165] Memory required for data: 6319145056
I0330 12:49:46.157244 185894336 layer_factory.hpp:77] Creating layer conv4_2
I0330 12:49:46.157253 185894336 net.cpp:100] Creating Layer conv4_2
I0330 12:49:46.157258 185894336 net.cpp:434] conv4_2 <- conv4_1
I0330 12:49:46.157263 185894336 net.cpp:408] conv4_2 -> conv4_2
I0330 12:49:46.173918 185894336 net.cpp:150] Setting up conv4_2
I0330 12:49:46.173931 185894336 net.cpp:157] Top shape: 32 512 38 38 (23658496)
I0330 12:49:46.173938 185894336 net.cpp:165] Memory required for data: 6413779040
I0330 12:49:46.173947 185894336 layer_factory.hpp:77] Creating layer relu4_2
I0330 12:49:46.173954 185894336 net.cpp:100] Creating Layer relu4_2
I0330 12:49:46.173959 185894336 net.cpp:434] relu4_2 <- conv4_2
I0330 12:49:46.173964 185894336 net.cpp:395] relu4_2 -> conv4_2 (in-place)
I0330 12:49:46.173970 185894336 net.cpp:150] Setting up relu4_2
I0330 12:49:46.173974 185894336 net.cpp:157] Top shape: 32 512 38 38 (23658496)
I0330 12:49:46.173979 185894336 net.cpp:165] Memory required for data: 6508413024
I0330 12:49:46.173982 185894336 layer_factory.hpp:77] Creating layer conv4_3
I0330 12:49:46.173990 185894336 net.cpp:100] Creating Layer conv4_3
I0330 12:49:46.174033 185894336 net.cpp:434] conv4_3 <- conv4_2
I0330 12:49:46.174039 185894336 net.cpp:408] conv4_3 -> conv4_3
I0330 12:49:46.191447 185894336 net.cpp:150] Setting up conv4_3
I0330 12:49:46.191462 185894336 net.cpp:157] Top shape: 32 512 38 38 (23658496)
I0330 12:49:46.191469 185894336 net.cpp:165] Memory required for data: 6603047008
I0330 12:49:46.191475 185894336 layer_factory.hpp:77] Creating layer relu4_3
I0330 12:49:46.191483 185894336 net.cpp:100] Creating Layer relu4_3
I0330 12:49:46.191488 185894336 net.cpp:434] relu4_3 <- conv4_3
I0330 12:49:46.191493 185894336 net.cpp:395] relu4_3 -> conv4_3 (in-place)
I0330 12:49:46.191506 185894336 net.cpp:150] Setting up relu4_3
I0330 12:49:46.191510 185894336 net.cpp:157] Top shape: 32 512 38 38 (23658496)
I0330 12:49:46.191514 185894336 net.cpp:165] Memory required for data: 6697680992
I0330 12:49:46.191519 185894336 layer_factory.hpp:77] Creating layer conv4_3_relu4_3_0_split
I0330 12:49:46.191524 185894336 net.cpp:100] Creating Layer conv4_3_relu4_3_0_split
I0330 12:49:46.191529 185894336 net.cpp:434] conv4_3_relu4_3_0_split <- conv4_3
I0330 12:49:46.191538 185894336 net.cpp:408] conv4_3_relu4_3_0_split -> conv4_3_relu4_3_0_split_0
I0330 12:49:46.191545 185894336 net.cpp:408] conv4_3_relu4_3_0_split -> conv4_3_relu4_3_0_split_1
I0330 12:49:46.191552 185894336 net.cpp:150] Setting up conv4_3_relu4_3_0_split
I0330 12:49:46.191561 185894336 net.cpp:157] Top shape: 32 512 38 38 (23658496)
I0330 12:49:46.191566 185894336 net.cpp:157] Top shape: 32 512 38 38 (23658496)
I0330 12:49:46.191571 185894336 net.cpp:165] Memory required for data: 6886948960
I0330 12:49:46.191576 185894336 layer_factory.hpp:77] Creating layer pool4
I0330 12:49:46.191581 185894336 net.cpp:100] Creating Layer pool4
I0330 12:49:46.191586 185894336 net.cpp:434] pool4 <- conv4_3_relu4_3_0_split_0
I0330 12:49:46.191591 185894336 net.cpp:408] pool4 -> pool4
I0330 12:49:46.191597 185894336 net.cpp:150] Setting up pool4
I0330 12:49:46.191602 185894336 net.cpp:157] Top shape: 32 512 19 19 (5914624)
I0330 12:49:46.191606 185894336 net.cpp:165] Memory required for data: 6910607456
I0330 12:49:46.191610 185894336 layer_factory.hpp:77] Creating layer conv5_1
I0330 12:49:46.191618 185894336 net.cpp:100] Creating Layer conv5_1
I0330 12:49:46.191623 185894336 net.cpp:434] conv5_1 <- pool4
I0330 12:49:46.191630 185894336 net.cpp:408] conv5_1 -> conv5_1
I0330 12:49:46.208515 185894336 net.cpp:150] Setting up conv5_1
I0330 12:49:46.208528 185894336 net.cpp:157] Top shape: 32 512 19 19 (5914624)
I0330 12:49:46.208534 185894336 net.cpp:165] Memory required for data: 6934265952
I0330 12:49:46.208540 185894336 layer_factory.hpp:77] Creating layer relu5_1
I0330 12:49:46.208547 185894336 net.cpp:100] Creating Layer relu5_1
I0330 12:49:46.208552 185894336 net.cpp:434] relu5_1 <- conv5_1
I0330 12:49:46.208557 185894336 net.cpp:395] relu5_1 -> conv5_1 (in-place)
I0330 12:49:46.208564 185894336 net.cpp:150] Setting up relu5_1
I0330 12:49:46.208567 185894336 net.cpp:157] Top shape: 32 512 19 19 (5914624)
I0330 12:49:46.208572 185894336 net.cpp:165] Memory required for data: 6957924448
I0330 12:49:46.208575 185894336 layer_factory.hpp:77] Creating layer conv5_2
I0330 12:49:46.208582 185894336 net.cpp:100] Creating Layer conv5_2
I0330 12:49:46.208585 185894336 net.cpp:434] conv5_2 <- conv5_1
I0330 12:49:46.208590 185894336 net.cpp:408] conv5_2 -> conv5_2
I0330 12:49:46.220824 185894336 net.cpp:150] Setting up conv5_2
I0330 12:49:46.220836 185894336 net.cpp:157] Top shape: 32 512 19 19 (5914624)
I0330 12:49:46.220841 185894336 net.cpp:165] Memory required for data: 6981582944
I0330 12:49:46.220847 185894336 layer_factory.hpp:77] Creating layer relu5_2
I0330 12:49:46.220854 185894336 net.cpp:100] Creating Layer relu5_2
I0330 12:49:46.220857 185894336 net.cpp:434] relu5_2 <- conv5_2
I0330 12:49:46.220862 185894336 net.cpp:395] relu5_2 -> conv5_2 (in-place)
I0330 12:49:46.220870 185894336 net.cpp:150] Setting up relu5_2
I0330 12:49:46.220872 185894336 net.cpp:157] Top shape: 32 512 19 19 (5914624)
I0330 12:49:46.220877 185894336 net.cpp:165] Memory required for data: 7005241440
I0330 12:49:46.220909 185894336 layer_factory.hpp:77] Creating layer conv5_3
I0330 12:49:46.220918 185894336 net.cpp:100] Creating Layer conv5_3
I0330 12:49:46.220922 185894336 net.cpp:434] conv5_3 <- conv5_2
I0330 12:49:46.220927 185894336 net.cpp:408] conv5_3 -> conv5_3
I0330 12:49:46.237524 185894336 net.cpp:150] Setting up conv5_3
I0330 12:49:46.237536 185894336 net.cpp:157] Top shape: 32 512 19 19 (5914624)
I0330 12:49:46.237542 185894336 net.cpp:165] Memory required for data: 7028899936
I0330 12:49:46.237547 185894336 layer_factory.hpp:77] Creating layer relu5_3
I0330 12:49:46.237560 185894336 net.cpp:100] Creating Layer relu5_3
I0330 12:49:46.237565 185894336 net.cpp:434] relu5_3 <- conv5_3
I0330 12:49:46.237571 185894336 net.cpp:395] relu5_3 -> conv5_3 (in-place)
I0330 12:49:46.237576 185894336 net.cpp:150] Setting up relu5_3
I0330 12:49:46.237581 185894336 net.cpp:157] Top shape: 32 512 19 19 (5914624)
I0330 12:49:46.237584 185894336 net.cpp:165] Memory required for data: 7052558432
I0330 12:49:46.237588 185894336 layer_factory.hpp:77] Creating layer pool5
I0330 12:49:46.237593 185894336 net.cpp:100] Creating Layer pool5
I0330 12:49:46.237597 185894336 net.cpp:434] pool5 <- conv5_3
I0330 12:49:46.237602 185894336 net.cpp:408] pool5 -> pool5
I0330 12:49:46.237608 185894336 net.cpp:150] Setting up pool5
I0330 12:49:46.237613 185894336 net.cpp:157] Top shape: 32 512 19 19 (5914624)
I0330 12:49:46.237617 185894336 net.cpp:165] Memory required for data: 7076216928
I0330 12:49:46.237622 185894336 layer_factory.hpp:77] Creating layer fc6
I0330 12:49:46.237627 185894336 net.cpp:100] Creating Layer fc6
I0330 12:49:46.237632 185894336 net.cpp:434] fc6 <- pool5
I0330 12:49:46.237637 185894336 net.cpp:408] fc6 -> fc6
F0330 12:49:46.254192 141029376 math_functions.cpp:250] Check failed: a <= b (0 vs. -1.19209e-07)
*** Check failure stack trace: ***
@ 0x1032f3a27 google::LogMessage::Flush()
@ 0x1032f6deb google::LogMessageFatal::~LogMessageFatal()
@ 0x1032f4357 google::LogMessageFatal::~LogMessageFatal()
@ 0x10307b8c5 caffe::caffe_rng_uniform<>()
@ 0x1030847d4 caffe::SampleBBox()
@ 0x103084a70 caffe::GenerateSamples()
@ 0x103084bc4 caffe::GenerateBatchSamples()
@ 0x10303b662 caffe::AnnotatedDataLayer<>::load_batch()
@ 0x102fe3b0c caffe::BasePrefetchingDataLayer<>::InternalThreadEntry()
@ 0x1077c42c8 boost::(anonymous namespace)::thread_proxy()
@ 0x7fff70cac2eb _pthread_body
@ 0x7fff70caf249 _pthread_start
Process finished with exit code 0
1回答
-
qq_Zl_26
提问者
2019-03-30
网上找的解决方法,修改src/caffe/util/math_functions.cpp下的这个函数,但是这是什么原因呢,会不会影响后面训练?
void caffe_rng_uniform(const int n, Dtype a, Dtype b, Dtype* r) {
CHECK_GE(n, 0);
CHECK(r);
if(a > b) {
Dtype c = a;
a = b;
b = c;
}
CHECK_LE(a, b);
boost::uniform_real<Dtype> random_distribution(a, caffe_nextafter<Dtype>(b));
boost::variate_generator<caffe::rng_t*, boost::uniform_real<Dtype> >
variate_generator(caffe_rng(), random_distribution);
for (int i = 0; i < n; ++i) {
r[i] = variate_generator();
}
}00
相似问题