Skip to content
This repository was archived by the owner on Nov 17, 2023. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
d3d8943
fix for bug #10868: _backward_softsign activation is incorrect
Jul 19, 2018
4496714
fix for bug #10868: _backward_softsign activation is incorrect
Jul 19, 2018
ac7e831
Merge branch 'master' of https://github.com/apache/incubator-mxnet in…
Jul 19, 2018
562a2b3
Merge branch 'sskalic_branch' of https://github.com/samskalicky/incub…
Jul 19, 2018
3f5d0ed
rerunning the CI build
Jul 19, 2018
f53657c
fixed size checks for when USE_MKLDNN=ON
Jul 19, 2018
d8566d5
Merge branch 'sskalic_branch' of https://github.com/samskalicky/incub…
Jul 19, 2018
bf4f491
switched arguments in activation.cu as well for softsign
Jul 19, 2018
fb2acd1
Merge branch 'sskalic_branch' of https://github.com/samskalicky/incub…
Jul 20, 2018
1898bab
Merge branch 'sskalic_branch' of https://github.com/samskalicky/incub…
Jul 20, 2018
c5dea39
changed the code style per anirudh
Jul 20, 2018
7835c0d
changed the code style per anirudh
Jul 20, 2018
38a1940
Merge branch 'sskalic_branch' of https://github.com/samskalicky/incub…
Jul 20, 2018
f75f269
Merge branch 'sskalic_branch' of https://github.com/samskalicky/incub…
Jul 20, 2018
6026d46
removed whitespace
Jul 20, 2018
f9d66f9
removed whitespace
Jul 20, 2018
5362e53
Merge branch 'sskalic_branch' of https://github.com/samskalicky/incub…
Jul 20, 2018
5de0d67
Merge branch 'sskalic_branch' of https://github.com/samskalicky/incub…
Jul 20, 2018
73c7b10
Merge branch 'sskalic_branch' of https://github.com/samskalicky/incub…
Jul 20, 2018
dc08df7
Merge branch 'sskalic_branch' of https://github.com/samskalicky/incub…
Jul 20, 2018
c4505c8
Merge branch 'sskalic_branch' of https://github.com/samskalicky/incub…
Jul 20, 2018
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 4 additions & 3 deletions src/operator/nn/activation-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ void ActivationGradComputeImpl(const nnvm::NodeAttrs& attrs, const OpContext &ct
break;
case activation::kSoftSign:
ActivationBackward<xpu, mshadow_op::softsign, mshadow_op::softsign_grad>(
ctx, inputs[0], inputs[1], req[0], outputs[0]);
ctx, inputs[0], inputs[2], req[0], outputs[0]);
break;
default:
LOG(FATAL) << "unknown activation type";
Expand All @@ -198,12 +198,13 @@ void ActivationGradCompute(const nnvm::NodeAttrs& attrs,
const std::vector<TBlob>& inputs,
const std::vector<OpReqType>& req,
const std::vector<TBlob>& outputs) {
#if (MXNET_USE_CUDNN == 1 || MXNET_USE_MKLDNN == 1)
const ActivationParam& param = nnvm::get<ActivationParam>(attrs.parsed);
#if (MXNET_USE_CUDNN == 1 || MXNET_USE_MKLDNN == 1)
bool relu = param.act_type == activation::kReLU;
CHECK_EQ(inputs.size(), relu ? 2U : 3U);
#else
CHECK_EQ(inputs.size(), 2U);
bool softsign = param.act_type == activation::kSoftSign;
CHECK_EQ(inputs.size(), softsign ? 3U : 2U);
#endif
CHECK_EQ(outputs.size(), 1U);
CHECK_EQ(req.size(), 1U);
Expand Down
29 changes: 22 additions & 7 deletions src/operator/nn/activation.cc
Original file line number Diff line number Diff line change
Expand Up @@ -44,11 +44,19 @@ struct ActivationGrad {
const std::vector<nnvm::NodeEntry>& ograds) const {
std::vector<nnvm::NodeEntry> heads(ograds.begin(), ograds.end());
heads.emplace_back(nnvm::NodeEntry{n, activation::kOut, 0});
#if (MXNET_USE_CUDNN == 1 || MXNET_USE_MKLDNN == 1)

const NodeAttrs& attrs = n->attrs;
int act_type = dmlc::get<ActivationParam>(attrs.parsed).act_type;
if (act_type == activation::kSoftSign) {
// for softsign need the inputs to compute the activation.
heads.push_back(n->inputs[activation::kData]);
}

#if (MXNET_USE_CUDNN == 1 || MXNET_USE_MKLDNN == 1)
// for ReLU, no need to pass input data. This enables inplace optimization during the
// forward pass.
if (dmlc::get<ActivationParam>(attrs.parsed).act_type != activation::kReLU) {
if (act_type != activation::kReLU &&
act_type != activation::kSoftSign) {
heads.push_back(n->inputs[activation::kData]);
}
#endif
Expand Down Expand Up @@ -118,8 +126,8 @@ inline static bool BackwardActStorageType(const nnvm::NodeAttrs& attrs,
std::vector<int> *in_attrs,
std::vector<int> *out_attrs) {
bool ret = false;
#if (MXNET_USE_CUDNN == 1 || MXNET_USE_MKLDNN == 1)
const ActivationParam& param = nnvm::get<ActivationParam>(attrs.parsed);
#if (MXNET_USE_CUDNN == 1 || MXNET_USE_MKLDNN == 1)
if (param.act_type != activation::kReLU) {
CHECK_EQ(in_attrs->size(), 3U);
ret = ElemwiseStorageType<3, 1, false, false, false>(attrs, dev_mask,
Expand All @@ -133,10 +141,17 @@ inline static bool BackwardActStorageType(const nnvm::NodeAttrs& attrs,
in_attrs, out_attrs);
}
#else
CHECK_EQ(in_attrs->size(), 2U);
ret = ElemwiseStorageType<2, 1, false, false, false>(attrs, dev_mask,
dispatch_mode,
in_attrs, out_attrs);
if (param.act_type == activation::kSoftSign) {
CHECK_EQ(in_attrs->size(), 3U);
ret = ElemwiseStorageType<3, 1, false, false, false>(attrs, dev_mask,
dispatch_mode,
in_attrs, out_attrs);
} else {
CHECK_EQ(in_attrs->size(), 2U);
ret = ElemwiseStorageType<2, 1, false, false, false>(attrs, dev_mask,
dispatch_mode,
in_attrs, out_attrs);
}
#endif
CHECK_EQ(out_attrs->size(), 1U);
#if MXNET_USE_MKLDNN == 1
Expand Down
2 changes: 1 addition & 1 deletion src/operator/nn/activation.cu
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ void ActivationGradCompute<gpu>(const nnvm::NodeAttrs& attrs,
ctx, inputs[0], inputs[1], req[0], outputs[0]);
} else if (param.act_type == activation::kSoftSign) {
ActivationBackward<gpu, mshadow_op::softsign, mshadow_op::softsign_grad>(
ctx, inputs[0], inputs[1], req[0], outputs[0]);
ctx, inputs[0], inputs[2], req[0], outputs[0]);
} else {
MSHADOW_REAL_TYPE_SWITCH(inputs[0].type_flag_, DType, {
// XXX: for y = relu(x), y is passed as "in_data" to Backward()
Expand Down
4 changes: 4 additions & 0 deletions tests/python/unittest/test_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -6850,6 +6850,10 @@ def test_activation():
lambda x: np.log(1. + np.exp(x)),
lambda x: 1. - 1 / (1 + np.exp(x)),
-3.0, 3.0],
'softsign': [lambda x: mx.sym.Activation(x, act_type='softsign'),
lambda x: x / (1. + np.abs(x)),
lambda x: 1. / np.square(1. + np.abs(x)),
-3.0, 3.0],
}
# Loop over operators
for name, op in unary_ops.items():
Expand Down