Skip to content

Commit 0701c2d

Browse files
authored
Merge pull request #16518 from zhoukunsheng/rsqrt
Rsqrt
2 parents bbcfa8f + b1c5820 commit 0701c2d

File tree

5 files changed

+51
-0
lines changed

5 files changed

+51
-0
lines changed

paddle/fluid/API.spec

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -324,6 +324,7 @@ paddle.fluid.layers.atan (ArgSpec(args=['x', 'name'], varargs=None, keywords=Non
324324
paddle.fluid.layers.tanh_shrink (ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,)), ('document', '1e521554b9fdda9061ec6d306f0709b7'))
325325
paddle.fluid.layers.softshrink (ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,)), ('document', '9eef31597bbafa2bd49691e072296e13'))
326326
paddle.fluid.layers.sqrt (ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,)), ('document', 'e9e27491c39ac74d0b1ffe506aec0ebb'))
327+
paddle.fluid.layers.rsqrt (ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,)), ('document', 'c445467ebe58b3c0d7f0bba7795b6f56'))
327328
paddle.fluid.layers.abs (ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,)), ('document', '64650ac42cf82e9920cb0b172b1d29fd'))
328329
paddle.fluid.layers.ceil (ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,)), ('document', 'c75d67dc5fe28f68e4cfffead4f698ad'))
329330
paddle.fluid.layers.floor (ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,)), ('document', '647b16c5da5ef909649ae02abb434973'))

paddle/fluid/operators/activation_op.cc

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -227,6 +227,15 @@ you should add a small epsilon(1e-12) to avoid negative number caused by numeric
227227
228228
)DOC";
229229

230+
UNUSED constexpr char RsqrtDoc[] = R"DOC(
231+
Rsqrt Activation Operator.
232+
233+
Please make sure input is legal in case of numeric errors.
234+
235+
$out = \frac{1}{\sqrt{x}}$
236+
237+
)DOC";
238+
230239
UNUSED constexpr char AbsDoc[] = R"DOC(
231240
Abs Activation Operator.
232241
@@ -575,6 +584,7 @@ REGISTER_ACTIVATION_OP_MAKER(Gelu, GeluDoc);
575584
REGISTER_ACTIVATION_OP_MAKER(Tanh, TanhDoc);
576585
REGISTER_ACTIVATION_OP_MAKER(TanhShrink, TanhShrinkDoc);
577586
REGISTER_ACTIVATION_OP_MAKER(Sqrt, SqrtDoc);
587+
REGISTER_ACTIVATION_OP_MAKER(Rsqrt, RsqrtDoc);
578588
REGISTER_ACTIVATION_OP_MAKER(Abs, AbsDoc);
579589
REGISTER_ACTIVATION_OP_MAKER(Ceil, CeilDoc);
580590
REGISTER_ACTIVATION_OP_MAKER(Floor, FloorDoc);
@@ -586,6 +596,7 @@ REGISTER_ACTIVATION_OP_MAKER(Log, LogDoc);
586596
REGISTER_ACTIVATION_OP_MAKER(Square, SquareDoc);
587597
REGISTER_ACTIVATION_OP_MAKER(Softplus, SoftplusDoc);
588598
REGISTER_ACTIVATION_OP_MAKER(Softsign, SoftsignDoc);
599+
589600
} // namespace operators
590601
} // namespace paddle
591602

paddle/fluid/operators/activation_op.h

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -511,6 +511,26 @@ struct SqrtGradFunctor : public BaseActivationFunctor<T> {
511511
static constexpr ActBwdOpFwdDeps FwdDeps() { return kDepOut; }
512512
};
513513

514+
// rsqrt(x) = x^(-1/2)
515+
template <typename T>
516+
struct RsqrtFunctor : public BaseActivationFunctor<T> {
517+
template <typename Device, typename X, typename Out>
518+
void operator()(Device d, X x, Out out) const {
519+
out.device(d) = x.rsqrt();
520+
}
521+
};
522+
523+
template <typename T>
524+
struct RsqrtGradFunctor : public BaseActivationFunctor<T> {
525+
template <typename Device, typename X, typename Out, typename dOut,
526+
typename dX>
527+
void operator()(Device d, X x, Out out, dOut dout, dX dx) const {
528+
dx.device(d) = static_cast<T>(-0.5) * dout * out * out * out;
529+
}
530+
531+
static constexpr ActBwdOpFwdDeps FwdDeps() { return kDepOut; }
532+
};
533+
514534
// ceil(x) = ceiling(x)
515535
template <typename T>
516536
struct CeilFunctor : public BaseActivationFunctor<T> {
@@ -1191,6 +1211,7 @@ struct SwishGradFunctor : public BaseActivationFunctor<T> {
11911211
__macro(atan, Atan, AtanFunctor, AtanGradFunctor); \
11921212
__macro(softshrink, SoftShrink, SoftShrinkFunctor, SoftShrinkGradFunctor); \
11931213
__macro(sqrt, Sqrt, SqrtFunctor, SqrtGradFunctor); \
1214+
__macro(rsqrt, Rsqrt, RsqrtFunctor, RsqrtGradFunctor); \
11941215
__macro(abs, Abs, AbsFunctor, AbsGradFunctor); \
11951216
__macro(ceil, Ceil, CeilFunctor, ZeroGradFunctor); \
11961217
__macro(floor, Floor, FloorFunctor, ZeroGradFunctor); \

python/paddle/fluid/layers/ops.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@
2727
'tanh_shrink',
2828
'softshrink',
2929
'sqrt',
30+
'rsqrt',
3031
'abs',
3132
'ceil',
3233
'floor',

python/paddle/fluid/tests/unittests/test_activation_op.py

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -192,6 +192,23 @@ def test_check_grad(self):
192192
self.check_grad(['X'], 'Out', max_relative_error=0.007)
193193

194194

195+
class TestRsqrt(TestActivation):
196+
def setUp(self):
197+
self.op_type = "rsqrt"
198+
self.init_dtype()
199+
200+
x = np.random.uniform(0.1, 1, [2, 3]).astype(self.dtype)
201+
out = 1.0 / np.sqrt(x)
202+
203+
self.inputs = {'X': OpTest.np_dtype_to_fluid_dtype(x)}
204+
self.outputs = {'Out': out}
205+
206+
def test_check_grad(self):
207+
if self.dtype == np.float16:
208+
return
209+
self.check_grad(['X'], 'Out', max_relative_error=0.0005)
210+
211+
195212
class TestAbs(TestActivation):
196213
def setUp(self):
197214
self.op_type = "abs"

0 commit comments

Comments
 (0)