Skip to content

Commit

Permalink
Update unit test to account for improvements to numerical accuracy of…
Browse files Browse the repository at this point in the history
… the GeLU implementation in TensorFlow.

PiperOrigin-RevId: 681164292
  • Loading branch information
lingvo-bot authored and copybara-github committed Oct 1, 2024
1 parent 6cbc270 commit 2cb07a2
Showing 1 changed file with 5 additions and 3 deletions.
8 changes: 5 additions & 3 deletions lingvo/core/activations_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,11 @@ def testGeluActivation(self):
np.linspace(-10.0, 10.0, num=21, dtype='float32'), dtype=tf.float32)
grads_gelu = tf.gradients(tf.nn.gelu(inputs), inputs)
grads_relu = tf.gradients(tf.nn.relu(inputs), inputs)

self.assertEqual(0.0,
tf.nn.gelu(tf.constant(-10.0, dtype='float32')).eval())
# The true value of gelu underflows to zero for float32 at approximately
# x = -14.307701750188 (-13.1988706591358576 without gradual underflow).
self.assertEqual(
0.0, tf.nn.gelu(tf.constant(-15.0, dtype='float32')).eval()
)
self.assertEqual(0.0,
tf.nn.gelu(tf.constant(0.0, dtype='float32')).eval())
self.assertEqual(10.0,
Expand Down

0 comments on commit 2cb07a2

Please sign in to comment.