Home
last modified time | relevance | path

Searched full:softplus (Results 1 – 25 of 174) sorted by relevance

1234567

/aosp_15_r20/external/tensorflow/tensorflow/python/kernel_tests/nn_ops/
H A Dsoftplus_op_test.py15 """Tests for Softplus and SoftplusGrad."""
38 softplus = nn_ops.softplus(np_features)
39 tf_softplus = self.evaluate(softplus)
42 self.assertShapeEqual(np_softplus, softplus)
77 y = nn_ops.softplus(x, name="softplus")
84 print("softplus (float) gradient err = ", err)
94 y = nn_ops.softplus(x, name="softplus")
102 print("softplus (float) gradient of gradient err = ", err)
112 y = nn_ops.softplus(x, name="softplus")
121 print("softplus (float) third-order gradient err = ", err)
[all …]
/aosp_15_r20/external/tensorflow/tensorflow/core/kernels/
H A Dsoftplus_op.h32 struct Softplus { struct
33 // Computes Softplus activation.
41 // true softplus(x). Offset of 2 from machine epsilon checked in operator()
43 // softplus implemented with numpy's log1p and numpy's logaddexp. in operator()
46 // Value above which exp(x) may overflow, but softplus(x) == x in operator()
49 // Value below which exp(x) may underflow, but softplus(x) == exp(x) in operator()
54 features, // softplus(x) ~= x for x large in operator()
55 too_small.select(features_exp, // softplus(x) ~= exp(x) for x small in operator()
65 // gradients: gradients backpropagated to the Softplus op.
66 // features: inputs that where passed to the Softplus op.
[all …]
H A Dsoftplus_op.cc40 functor::Softplus<Device, T> functor; in Operate()
81 Name("Softplus").Device(DEVICE_CPU).TypeConstraint<type>("T"), \
96 void Softplus<GPUDevice, T>::operator()( \
99 extern template struct Softplus<GPUDevice, T>;
119 Name("Softplus").Device(DEVICE_GPU).TypeConstraint<type>("T"), \
/aosp_15_r20/external/tensorflow/tensorflow/python/tools/
H A Dprint_selective_registration_header_test.py168 ["Softplus", "SoftplusOp<CPUDevice, float>"]]"""
173 ('Softplus', 'SoftplusOp<CPUDevice, float>'),
177 ops_list = '[["Softplus", "SoftplusOp<CPUDevice, float>"]]'
181 ('Softplus', 'SoftplusOp<CPUDevice, float>'),
194 ops_list = '[["Softplus", ""]]'
198 ('Softplus', None),
202 ops_list = '[["Softplus", "SoftplusOp<CPUDevice, float>"]]'
208 ('Softplus', 'SoftplusOp<CPUDevice, float>'),
/aosp_15_r20/external/tensorflow/tensorflow/core/ops/compat/ops_history_v1/
H A DSoftplus.pbtxt2 name: "Softplus"
30 name: "Softplus"
60 name: "Softplus"
91 name: "Softplus"
122 name: "Softplus"
/aosp_15_r20/external/tensorflow/tensorflow/core/ops/compat/ops_history_v2/
H A DSoftplus.pbtxt2 name: "Softplus"
30 name: "Softplus"
60 name: "Softplus"
91 name: "Softplus"
122 name: "Softplus"
/aosp_15_r20/external/tensorflow/tensorflow/core/api_def/base_api/
H A Dapi_def_SoftplusGrad.pbtxt7 The backpropagated gradients to the corresponding softplus operation.
13 The features passed as input to the corresponding softplus operation.
22 summary: "Computes softplus gradients for a softplus operation."
/aosp_15_r20/external/tensorflow/tensorflow/compiler/tf2xla/kernels/
H A Dunary_ops.cc96 static xla::XlaOp Softplus(xla::XlaBuilder* b, xla::XlaOp features) { in Softplus() function
101 // Value above which exp(x) may overflow, but softplus(x) == x in Softplus()
104 // Value below which exp(x) may underflow, but softplus(x) == exp(x) in Softplus()
114 XLAJIT_MAKE_UNARY(Softplus, Softplus(b, x));
/aosp_15_r20/external/tensorflow/tensorflow/python/ops/distributions/
H A Dbernoulli.py152 nn.softplus(-self.logits)) # pylint: disable=invalid-unary-operand-type
180 delta_probs0 = nn.softplus(-b.logits) - nn.softplus(-a.logits)
181 delta_probs1 = nn.softplus(b.logits) - nn.softplus(a.logits)
H A Dgamma.py288 """`Gamma` with softplus of `concentration` and `rate`."""
292 "Use `tfd.Gamma(tf.nn.softplus(concentration), "
293 "tf.nn.softplus(rate))` instead.",
304 concentration=nn.softplus(concentration,
306 rate=nn.softplus(rate, name="softplus_rate"),
H A Dbeta.py351 """Beta with softplus transform of `concentration1` and `concentration0`."""
355 "Use `tfd.Beta(tf.nn.softplus(concentration1), "
356 "tf.nn.softplus(concentration2))` instead.",
368 concentration1=nn.softplus(concentration1,
370 concentration0=nn.softplus(concentration0,
H A Dexponential.py144 """Exponential with softplus transform on `rate`."""
148 "Use `tfd.Exponential(tf.nn.softplus(rate)).",
158 rate=nn.softplus(rate, name="softplus_rate"),
H A Dlaplace.py217 """Laplace with softplus applied to `scale`."""
221 "Use `tfd.Laplace(loc, tf.nn.softplus(scale)) "
234 scale=nn.softplus(scale, name="softplus_scale"),
H A Dnormal.py246 """Normal with softplus applied to `scale`."""
250 "Use `tfd.Normal(loc, tf.nn.softplus(scale)) "
263 scale=nn.softplus(scale, name="softplus_scale"),
/aosp_15_r20/external/tensorflow/tensorflow/python/keras/
H A Dactivations.py200 @keras_export('keras.activations.softplus')
202 def softplus(x): function
203 """Softplus activation function, `softplus(x) = log(exp(x) + 1)`.
208 >>> b = tf.keras.activations.softplus(a)
217 The softplus activation: `log(exp(x) + 1)`.
219 return math_ops.softplus(x)
/aosp_15_r20/external/tensorflow/tensorflow/core/kernels/mlir_generated/
H A Dgpu_op_softplus.cc21 GENERATE_AND_REGISTER_UNARY_GPU_KERNEL(Softplus, DT_HALF);
22 GENERATE_AND_REGISTER_UNARY_GPU_KERNEL(Softplus, DT_FLOAT);
23 GENERATE_AND_REGISTER_UNARY_GPU_KERNEL(Softplus, DT_DOUBLE);
/aosp_15_r20/external/pytorch/torch/nn/modules/
H A Dactivation.py35 "Softplus",
445 \text{Mish}(x) = x * \text{Tanh}(\text{Softplus}(x))
858 class Softplus(Module): class
859 r"""Applies the Softplus function element-wise.
862 \text{Softplus}(x) = \frac{1}{\beta} * \log(1 + \exp(\beta * x))
864 SoftPlus is a smooth approximation to the ReLU function and can be used
871 beta: the :math:`\beta` value for the Softplus formulation. Default: 1
878 .. image:: ../scripts/activation_images/Softplus.png
882 >>> m = nn.Softplus()
897 return F.softplus(input, self.beta, self.threshold)
/aosp_15_r20/external/pytorch/torch/distributions/
H A Dtransforms.py19 from torch.nn.functional import pad, softplus
630 return -F.softplus(-x) - F.softplus(x)
635 Transform via the mapping :math:`\text{Softplus}(x) = \log(1 + \exp(x))`.
647 return softplus(x)
653 return -softplus(-x)
689 return 2.0 * (math.log(2.0) - x - softplus(-2.0 * x))
869 tanh_logdet = -2 * (x + softplus(-2 * x) - math.log(2.0)).sum(dim=-1)
/aosp_15_r20/external/tensorflow/tensorflow/python/kernel_tests/distributions/
H A Dutil_test.py924 softplus = nn_ops.softplus(np_features)
925 softplus_inverse = du.softplus_inverse(softplus)
927 softplus, softplus_inverse])
939 self.assertShapeEqual(np_softplus, softplus)
986 y = nn_ops.softplus(x, name="softplus")
993 tf_logging.vlog(2, "softplus (float) gradient err = ", err)
/aosp_15_r20/external/pytorch/torch/csrc/api/include/torch/nn/modules/
H A Dactivation.h640 // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Softplus ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
642 /// Applies softplus over a given input.
643 /// See https://pytorch.org/docs/main/nn.html#torch.nn.Softplus to learn
651 /// Softplus model(SoftplusOptions().beta(0.24).threshold(42.42));
661 /// Pretty prints the `Softplus` module into the given `stream`.
670 /// provides, and examples of how to use `Softplus` with
673 TORCH_MODULE(Softplus);
/aosp_15_r20/external/pytorch/torch/_refs/nn/functional/
H A D__init__.py62 "softplus",
397 return a * torch.tanh(torch.nn.functional.softplus(a))
453 # softplus is implemented specially because it has beta and threshold arguments
454 @register_decomposition(aten.softplus)
461 def softplus( function
468 Reference implementation of torch.nn.functional.softplus
/aosp_15_r20/external/pytorch/torch/csrc/api/include/torch/nn/functional/
H A Dactivation.h537 inline Tensor softplus(const Tensor& input, double beta, double threshold) { in softplus() function
538 return torch::softplus(input, beta, threshold); in softplus()
544 /// https://pytorch.org/docs/main/nn.functional.html#torch.nn.functional.softplus
553 /// F::softplus(x, F::SoftplusFuncOptions().beta(0.5).threshold(3.0));
555 inline Tensor softplus(
558 return detail::softplus(input, options.beta(), options.threshold());
/aosp_15_r20/external/pytorch/torch/csrc/api/include/torch/nn/options/
H A Dactivation.h495 /// Options for the `Softplus` module.
499 /// Softplus model(SoftplusOptions().beta(0.24).threshold(42.42));
502 /// the `beta` value for the Softplus formulation. Default: 1
510 /// Options for `torch::nn::functional::softplus`.
518 /// F::softplus(x, F::SoftplusFuncOptions().beta(0.5).threshold(3.0));
/aosp_15_r20/external/tensorflow/tensorflow/core/api_def/java_api/
H A Dapi_def_Softplus.pbtxt2 graph_op_name: "Softplus"
4 name: "math.Softplus"
/aosp_15_r20/external/tensorflow/tensorflow/lite/experimental/mlir/testing/op_tests/
H A Dsoftplus.py28 """Make a set of tests to do softplus."""
42 out = tf.math.softplus(input_tensor)

1234567