Skip to content
This repository was archived by the owner on Nov 17, 2023. It is now read-only.

Commit 8a5a209

Browse files
committed
Improve GPU path for logsigmoid
1 parent ebcea90 commit 8a5a209

File tree

14 files changed

+54
-18
lines changed

14 files changed

+54
-18
lines changed

3rdparty/onednn

benchmark/opperf/nd_operations/nn_activation_operators.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -36,9 +36,10 @@
3636
8. Activation
3737
8.1 relu
3838
8.2 sigmoid
39-
8.3 softrelu
40-
8.4 softsign
41-
8.5 tanh
39+
8.3 log_sigmoid
40+
8.4 softrelu
41+
8.5 softsign
42+
8.6 tanh
4243
4344
"""
4445

benchmark/opperf/rules/default_params.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -375,7 +375,7 @@
375375

376376
# For NN operators
377377
DEFAULT_ACT_TYPE_LR = ['leaky', 'elu', 'selu', 'gelu']
378-
DEFAULT_ACT_TYPE_ACTIVATION = ['relu', 'sigmoid', 'softrelu', 'softsign', 'tanh']
378+
DEFAULT_ACT_TYPE_ACTIVATION = ['relu', 'sigmoid', 'log_sigmoid', 'softrelu', 'softsign', 'tanh']
379379
DEFAULT_LABEL_SOFTMAX = [(1024, 1024), (10000, 1), (10000, 100)]
380380

381381
DEFAULT_LABEL_SOFTMAX_LARGE_TENSOR = [(2**32, 1)]

docs/static_site/src/pages/api/faq/new_op.md

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -225,7 +225,6 @@ struct ActivationParam : public dmlc::Parameter<ActivationParam> {
225225
DMLC_DECLARE_FIELD(act_type)
226226
.add_enum("relu", activation::kReLU)
227227
.add_enum("sigmoid", activation::kSigmoid)
228-
.add_enum("log_sigmoid", activation::kLogSigmoid)
229228
.add_enum("tanh", activation::kTanh)
230229
.add_enum("softrelu", activation::kSoftReLU)
231230
.describe("Activation function to be applied.");

python/mxnet/ndarray/ndarray.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2171,6 +2171,14 @@ def log1p(self, *args, **kwargs):
21712171
"""
21722172
return op.log1p(self, *args, **kwargs)
21732173

2174+
def log_sigmoid(self, *args, **kwargs):
2175+
"""Convenience fluent method for :py:func:`log_sigmoid`.
2176+
2177+
The arguments are the same as for :py:func:`log_sigmoid`, with
2178+
this array as data.
2179+
"""
2180+
return op.log_sigmoid(self, *args, **kwargs)
2181+
21742182
def sqrt(self, *args, **kwargs):
21752183
"""Convenience fluent method for :py:func:`sqrt`.
21762184

python/mxnet/numpy/multiarray.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2260,6 +2260,14 @@ def log1p(self, *args, **kwargs):
22602260
"""
22612261
raise AttributeError('mxnet.numpy.ndarray object has no attribute log1p')
22622262

2263+
def log_sigmoid(self, *args, **kwargs):
2264+
"""Convenience fluent method for :py:func:`log_sigmoid`.
2265+
2266+
The arguments are the same as for :py:func:`log_sigmoid`, with
2267+
this array as data.
2268+
"""
2269+
raise AttributeError('mxnet.numpy.ndarray object has no attribute log_sigmoid')
2270+
22632271
def sqrt(self, *args, **kwargs):
22642272
"""Convenience fluent method for :py:func:`sqrt`.
22652273

python/mxnet/symbol/symbol.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2519,6 +2519,14 @@ def log1p(self, *args, **kwargs):
25192519
"""
25202520
return op.log1p(self, *args, **kwargs)
25212521

2522+
def log_sigmoid(self, *args, **kwargs):
2523+
"""Convenience fluent method for :py:func:`log_sigmoid`.
2524+
2525+
The arguments are the same as for :py:func:`log_sigmoid`, with
2526+
this array as data.
2527+
"""
2528+
return op.log_sigmoid(self, *args, **kwargs)
2529+
25222530
def sqrt(self, *args, **kwargs):
25232531
"""Convenience fluent method for :py:func:`sqrt`.
25242532
@@ -2583,14 +2591,6 @@ def sigmoid(self, *args, **kwargs):
25832591
"""
25842592
return op.sigmoid(self, *args, **kwargs)
25852593

2586-
def log_sigmoid(self, *args, **kwargs):
2587-
"""Convenience fluent method for :py:func:`log_sigmoid`.
2588-
2589-
The arguments are the same as for :py:func:`log_sigmoid`, with
2590-
this array as data.
2591-
"""
2592-
return op.log_sigmoid(self, *args, **kwargs)
2593-
25942594
def softmax(self, *args, **kwargs):
25952595
"""Convenience fluent method for :py:func:`softmax`.
25962596

src/api/operator/numpy_extension/npx_activation_op.cc

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,8 @@ inline int String2MXNetActType(const std::string& s) {
3434
return activation::kReLU;
3535
} else if (s == "sigmoid") {
3636
return activation::kSigmoid;
37+
} else if (s == "log_sigmoid") {
38+
return activation::kLogSigmoid;
3739
} else if (s == "tanh") {
3840
return activation::kTanh;
3941
} else if (s == "softrelu") {

src/common/cuda/rtc/backward_functions-inl.h

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -40,8 +40,14 @@ backward_relu(const DTypeGrad grad, const DType val) {
4040
4141
template <typename DType, typename DTypeGrad>
4242
__device__ inline mixed_type<DTypeGrad, DType>
43-
backward_sigmoid(const DTypeGrad grad, const DType out) {
44-
return grad * out * (1 - out);
43+
backward_sigmoid(const DTypeGrad grad, const DType val) {
44+
return grad * val * (1 - val);
45+
}
46+
47+
template <typename DType, typename DTypeGrad>
48+
__device__ inline mixed_type<DTypeGrad, DType>
49+
backward_log_sigmoid(const DTypeGrad grad, const DType val) {
50+
return grad * 1 / (1 + op::exp(val));
4551
}
4652
4753
template <typename DType, typename DTypeGrad>

src/common/cuda/rtc/forward_functions-inl.h

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -685,6 +685,15 @@ __device__ inline DType sigmoid(const DType val) {
685685
}
686686
}
687687
688+
template <typename DType>
689+
__device__ inline DType log_sigmoid(const DType val) {
690+
if (type_util::has_double_or_integral<DType>::value) {
691+
return ::log(1./(1 + ::exp(-val)));
692+
} else {
693+
return ::logf(1.f/(1 + expf(-val)));
694+
}
695+
}
696+
688697
template <typename DType>
689698
__device__ inline DType softrelu(const DType val) {
690699
if (type_util::has_double_or_integral<DType>::value) {

0 commit comments

Comments
 (0)