Skip to content

Commit

Permalink
Make gradient on edges smaller
Browse files Browse the repository at this point in the history
  • Loading branch information
opfromthestart committed Sep 28, 2023
1 parent b5dccc0 commit 30bd707
Show file tree
Hide file tree
Showing 3 changed files with 3 additions and 3 deletions.
2 changes: 1 addition & 1 deletion src/tensor_ops/sigmoidr/cpu_kernel.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,6 @@ impl<F: num_traits::Float> UnaryDerivative<F> for super::SigmoidrKernelOp {
#[inline(always)]
fn df(&self, &fx: &F) -> F {
let d = fx * (F::one() - fx);
F::max(d, F::from(0.0001).unwrap())
F::max(d, F::from(0.0000001).unwrap())
}
}
2 changes: 1 addition & 1 deletion src/tensor_ops/sigmoidr/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ mod tests {
let g = r.mean().backward();
assert_close_to_literal!(
g.get(&x),
[0.020998716, 0.039322387, 0.05, 0.039322387, 0.00002]
[0.020998716, 0.039322387, 0.05, 0.039322387, 0.00000002]
);
}
}
2 changes: 1 addition & 1 deletion src/tensor_ops/sigmoidr/sigmoidr.cu
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ template<typename T>
__device__ __forceinline__ T sigmoidr_bwd(T y) {
T one = 1.0;
T d = y * (one - y);
return max(d, 0.0001);
return max(d, 0.0000001);
}

UNARY_OP(__half, sigmoidr_fwd_f16, sigmoidr_bwd_f16, SigmoidrKernelOp,
Expand Down

0 comments on commit 30bd707

Please sign in to comment.