Skip to content

Commit 7cef95a

Browse files
add alpha dropout
1 parent a39eba8 commit 7cef95a

File tree

1 file changed

+22
-0
lines changed

1 file changed

+22
-0
lines changed

src/dropout.jl

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -92,3 +92,25 @@ function dropback!(p,x,y,dy,dx)
9292
return dx
9393
end
9494

95+
96+
"""
97+
alpha_dropout(x, p)
98+
99+
Dropout associated to the `selu` activation.
100+
101+
Paper Ref.:
102+
Self-Normalizing Neural Networks
103+
https://arxiv.org/abs/1706.02515
104+
"""
105+
function alpha_dropout(x, p)
106+
training = x isa Rec
107+
(p == 0 || !training) && return x
108+
109+
alpha = Float32(-1.758099)
110+
q = Float32(1-p)
111+
x = q*dropout(x .- alpha, p) .+ alpha #set dropped input to alpha
112+
a = 1 / sqrt(q + alpha^2 * q*p)
113+
b = -a * alpha * p
114+
return a*x + b
115+
end
116+

0 commit comments

Comments
 (0)