From 45e1502e4b0cf8c38380417776c391464d89c6fb Mon Sep 17 00:00:00 2001 From: sxjscience Date: Sun, 14 Oct 2018 14:56:04 +0800 Subject: [PATCH] try to add support some ops --- .../tensor/elemwise_binary_op_basic.cc | 12 +++++++++- .../tensor/elemwise_unary_op_basic.cc | 8 ++++++- src/operator/tensor/elemwise_unary_op_trig.cc | 22 +++++++++++++++++-- 3 files changed, 38 insertions(+), 4 deletions(-) diff --git a/src/operator/tensor/elemwise_binary_op_basic.cc b/src/operator/tensor/elemwise_binary_op_basic.cc index 339290df8bf9..710ce5510236 100644 --- a/src/operator/tensor/elemwise_binary_op_basic.cc +++ b/src/operator/tensor/elemwise_binary_op_basic.cc @@ -224,7 +224,17 @@ The storage type of ``elemwise_mul`` output depends on storage types of inputs return std::vector{ResourceRequest::kTempSpace}; }) .add_alias("_mul").add_alias("_Mul") -.set_attr("FGradient", ElemwiseGradUseIn{"_backward_mul"}); +.set_attr("FGradient", + [](const nnvm::NodePtr& n, const std::vector& ograds) { + auto lhs_grad = MakeNode("elemwise_mul", n->attrs.name + "_backward_lhs", + {ograds[0], n->inputs[1]}, nullptr, &n); + auto rhs_grad = MakeNode("elemwise_mul", n->attrs.name + "_backward_rhs", + {ograds[0], n->inputs[0]}, nullptr, &n); + std::vector ret; + ret.emplace_back(nnvm::NodeEntry{lhs_grad, 0, 0}); + ret.emplace_back(nnvm::NodeEntry{rhs_grad, 0, 0}); + return ret; + }); NNVM_REGISTER_OP(_backward_mul) .set_num_inputs(3) diff --git a/src/operator/tensor/elemwise_unary_op_basic.cc b/src/operator/tensor/elemwise_unary_op_basic.cc index 49ae976cfc2c..b11c1ebbcc28 100644 --- a/src/operator/tensor/elemwise_unary_op_basic.cc +++ b/src/operator/tensor/elemwise_unary_op_basic.cc @@ -623,7 +623,13 @@ The storage type of ``negative`` output depends upon the input storage type: - negative(csr) = csr )code") -.set_attr("FGradient", ElemwiseGradUseNone{"negative"}); +.set_attr("FGradient", + [](const nnvm::NodePtr& n, const std::vector& ograds) { + auto in_grad = MakeNode("negative", n->attrs.name + "_backward", {ograds[0]}, nullptr, &n); + std::vector ret; + ret.emplace_back(nnvm::NodeEntry{in_grad, 0, 0}); + return ret; + }); // reciprocal MXNET_OPERATOR_REGISTER_UNARY(reciprocal) diff --git a/src/operator/tensor/elemwise_unary_op_trig.cc b/src/operator/tensor/elemwise_unary_op_trig.cc index 288719f48a96..5de6de63c06d 100644 --- a/src/operator/tensor/elemwise_unary_op_trig.cc +++ b/src/operator/tensor/elemwise_unary_op_trig.cc @@ -44,7 +44,15 @@ The storage type of ``sin`` output depends upon the input storage type: - sin(csr) = csr )code" ADD_FILELINE) -.set_attr("FGradient", ElemwiseGradUseIn{ "_backward_sin" }); +.set_attr("FGradient", + [](const nnvm::NodePtr& n, const std::vector& ograds) { + auto x_grad = MakeNode("cos", n->attrs.name + "_mid_x_grad", {n->inputs[0]}, nullptr, &n); + auto in_grad = MakeNode("elemwise_mul", n->attrs.name + "_backward", + {ograds[0], nnvm::NodeEntry{x_grad, 0, 0}}, nullptr, &n); + std::vector ret; + ret.emplace_back(nnvm::NodeEntry{in_grad, 0, 0}); + return ret; + }); MXNET_OPERATOR_REGISTER_BINARY_WITH_SPARSE_CPU_DR(_backward_sin, unary_bwd); @@ -61,7 +69,17 @@ The input should be in radians (:math:`2\pi` rad equals 360 degrees). The storage type of ``cos`` output is always dense )code" ADD_FILELINE) -.set_attr("FGradient", ElemwiseGradUseIn{"_backward_cos"}); +.set_attr("FGradient", + [](const nnvm::NodePtr& n, const std::vector& ograds) { + auto x_grad = MakeNode("sin", n->attrs.name + "_mid_x_grad", {n->inputs[0]}, nullptr, &n); + auto neg_x_grad = MakeNode("negative", n->attrs.name + "_mid_neg_x_grad", + {nnvm::NodeEntry{x_grad, 0, 0}}, nullptr, &n); + auto in_grad = MakeNode("elemwise_mul", n->attrs.name + "_backward", + {ograds[0], nnvm::NodeEntry{neg_x_grad, 0, 0}}, nullptr, &n); + std::vector ret; + ret.emplace_back(nnvm::NodeEntry{in_grad, 0, 0}); + return ret; + }); MXNET_OPERATOR_REGISTER_BINARY_WITH_SPARSE_CPU(_backward_cos, unary_bwd);