Skip to content

Commit

Permalink
accurate-gelu - describe corresponding pytorch algos for gelus
Browse files Browse the repository at this point in the history
  • Loading branch information
jcrist1 committed Jul 14, 2023
1 parent 8fbf249 commit d201f4a
Showing 1 changed file with 6 additions and 2 deletions.
8 changes: 6 additions & 2 deletions src/nn/activations.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,12 @@ macro_rules! activation_impls {
}

activation_impls!(ReLU, try_relu, #[doc="Calls [relu()]."]);
activation_impls!(GeLU, try_gelu, #[doc="Calls [gelu()]."]);
activation_impls!(AccurateGeLU, try_accurate_gelu, #[doc="Calls [accurate_gelu()]. The GeLU is defined as x * Phi(x) where Phi is the cumulative distribution function of a standard Normal Distribution. It is often implemented with a fast approximation using tanh (see [GeLU])"]);
activation_impls!(GeLU, try_gelu, #[doc="Calls [gelu()]. This corresponds to `torch.nn.GELU(approximate='tanh')` in pytorch."]);
activation_impls!(
AccurateGeLU,
try_accurate_gelu,
#[doc=r#"Calls [accurate_gelu()]. The GeLU is defined as x * Phi(x) where Phi is the cumulative distribution function of a standard Normal Distribution.
It is often implemented with a fast approximation using tanh (see [GeLU]). This corresponds to pytorch `torch.nn.GELU(approximate='none')` in pytorch."#]);
activation_impls!(Sin, try_sin, #[doc="Calls [sin()]."]);
activation_impls!(Cos, try_cos, #[doc="Calls [cos()]."]);
activation_impls!(Ln, try_ln, #[doc="Calls [ln()]."]);
Expand Down

0 comments on commit d201f4a

Please sign in to comment.