regression model
2020.06.18 23:20
댓글 6
-
WHRIA
2020.06.18 23:25
-
WHRIA
2020.06.19 22:06
-
WHRIA
2020.06.20 21:22
layer {
name: "pool5/7x7_s1"
type: "Pooling"
bottom: "conv5_3"
top: "pool5/7x7_s1"
pooling_param {
pool: AVE
kernel_size: 7
stride: 1
}
}
layer {
name: "whria_classifier"
type: "InnerProduct"
bottom: "pool5/7x7_s1"
top: "whria_classifier"
param {
lr_mult: 1.0
decay_mult: 1.0
}
param {
lr_mult: 2.0
decay_mult: 0.0
}
inner_product_param {
num_output: 1
weight_filler{
type: "gaussian"
std:0.01
}
bias_filler{
type:"constant"
value:0
}
}
}
layer {
bottom: "whria_classifier"
bottom: "label"
top: "loss"
type: "EuclideanLoss"
name: "loss"
} -
WHRIA
2020.06.20 21:24
layer {
name: "whria_classifier"
type: "InnerProduct"
bottom: "pool5/7x7_s1"
top: "whria_classifier"
param {
lr_mult: 1.0
decay_mult: 1.0
}
param {
lr_mult: 2.0
decay_mult: 0.0
}
inner_product_param {
num_output: 2
}
}
layer {
bottom: "whria_classifier"
bottom: "label"
top: "loss"
name: "loss"
type: "SoftmaxWithLoss"
}
layer {
name: "accuracy"
type: "Accuracy"
bottom: "whria_classifier"
bottom: "label"
top: "accuracy"
include {
phase: TEST
}
} -
WHRIA
2020.06.20 21:39
layer {
name: "pool5/7x7_s1"
type: "Pooling"
bottom: "conv5_3"
top: "pool5/7x7_s1"
pooling_param {
pool: AVE
kernel_size: 7
stride: 1
}
}
layer {
name: "whria_classifier1"
type: "InnerProduct"
bottom: "pool5/7x7_s1"
top: "whria_classifier1"
param {
lr_mult: 1.0
decay_mult: 1.0
}
param {
lr_mult: 2.0
decay_mult: 0.0
}
inner_product_param {
num_output: 16
}
}
layer {
name: "relu_whria1"
type: "ReLU"
bottom:"whria_classifier1"
top: "whria_classifier1"
}
layer {
name : "drop16"
type : "Dropout"
bottom : "whria_classifier1"
top: "whria_classifier1"
dropout_param {
dropout_ratio:0.5
}
}
layer {
name:"whria_classifier2"
type: "InnerProduct"
bottom:"whria_classifier1"
top:"whria_classifier2"
inner_product_param{
num_output: 1
weight_filler{
type: "gaussian"
std:0.01
}
bias_filler{
type:"constant"
value:0
}
}
}
layer {
bottom: "whria_classifier2"
bottom: "label"
top: "loss"
type: "EuclideanLoss"
name: "loss"
} -
WHRIA
2020.06.20 22:12
layer {
name: "whria_classifier1"
type: "InnerProduct"
bottom: "pool5/7x7_s1"
top: "whria_classifier1"
param {
lr_mult: 1.0
decay_mult: 1.0
}
param {
lr_mult: 2.0
decay_mult: 0.0
}
inner_product_param {
num_output: 32
}
}
layer {
name: "relu_whria1"
type: "ReLU"
bottom:"whria_classifier1"
top: "whria_classifier1"
}
layer {
name : "drop_whria1"
type : "Dropout"
bottom : "whria_classifier1"
top: "whria_classifier1"
dropout_param {
dropout_ratio:0.5
}
}
layer {
name: "whria_classifier2"
type: "InnerProduct"
bottom: "whria_classifier1"
top: "whria_classifier2"
param {
lr_mult: 1.0
decay_mult: 1.0
}
param {
lr_mult: 2.0
decay_mult: 0.0
}
inner_product_param {
num_output: 32
}
}
layer {
name: "relu_whria2"
type: "ReLU"
bottom:"whria_classifier2"
top: "whria_classifier2"
}
layer {
name : "drop_whria2"
type : "Dropout"
bottom : "whria_classifier2"
top: "whria_classifier2"
dropout_param {
dropout_ratio:0.5
}
}
layer {
name:"whria_classifier_final"
type: "InnerProduct"
bottom:"whria_classifier2"
top:"whria_classifier_final"
inner_product_param{
num_output: 1
weight_filler{
type: "gaussian"
std:0.01
}
bias_filler{
type:"constant"
value:0
}
}
}
layer {
bottom: "whria_classifier_final"
bottom: "label"
top: "loss"
type: "EuclideanLoss"
name: "loss"
}
번호 | 제목 | 글쓴이 | 날짜 | 조회 수 |
---|---|---|---|---|
186 | hipass | WHRIA | 2008.11.09 | 6967 |
185 | topsite | WHRIA | 2008.11.13 | 6988 |
184 | medicalphoto 구연 | WHRIA | 2008.06.02 | 6996 |
183 | Clinical Study | WHRIA | 2007.08.13 | 7031 |
182 | 제 홈페이지를 두배로 즐기려면??? | 한승석 | 2000.06.09 | 7044 |
181 | 사랑에 빠지다 | WHRIA | 2007.07.06 | 7045 |
180 | Medicalphoto 홈페이지 개편 | WHRIA | 2012.04.10 | 7091 |
179 | MS 가 리눅스개발자를 구한다는데 | WHRIA | 2012.01.29 | 7094 |
178 | Deep learning 기반 DEMO | WHRIA | 2018.09.16 | 7103 |
177 | go~ go~ go~ | WHRIA | 2008.08.19 | 7126 |
176 | 피부암 | WHRIA | 2008.08.13 | 7127 |
175 | inception | WHRIA | 2017.01.23 | 7188 |
174 | 신한 | WHRIA | 2008.10.27 | 7202 |
173 | 인생의 방향 | WHRIA | 2007.09.18 | 7203 |
172 | ssl | WHRIA | 2008.06.09 | 7207 |
https://github.com/BVLC/caffe/issues/1396