From 6be4200cf3fb480e740e23129cadc4c50950fcb2 Mon Sep 17 00:00:00 2001 From: Moritz Sallermann Date: Mon, 23 Oct 2023 15:08:35 +0000 Subject: [PATCH] dropout_layer: implemented scaling by p_keep at training time --- include/dropout_layer.hpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/include/dropout_layer.hpp b/include/dropout_layer.hpp index 7ed150f..40c95b4 100644 --- a/include/dropout_layer.hpp +++ b/include/dropout_layer.hpp @@ -14,14 +14,14 @@ template class DropoutLayer : public Layer { protected: - scalar dropout_probability = 0.0; + scalar p_keep = 0.0; Vector dropout_mask; std::mt19937 gen; std::uniform_real_distribution dist = std::uniform_real_distribution( 0.0, 1.0 ); std::optional frozen_seed = std::nullopt; public: - DropoutLayer( scalar dropout_probability ) : Layer(), dropout_probability( dropout_probability ) + DropoutLayer( scalar p_keep ) : Layer(), p_keep( p_keep ) { int seed = std::random_device()(); gen = std::mt19937( seed ); @@ -44,7 +44,7 @@ class DropoutLayer : public Layer dropout_mask.resize( input_data.size(), 1 ); - const auto dropout_lambda = [&]( scalar x ) { return dist( gen ) < this->dropout_probability ? 0.0 : 1.0; }; + const auto dropout_lambda = [&]( scalar x ) { return dist( gen ) > this->p_keep ? 0.0 : 1.0/p_keep; }; dropout_mask = dropout_mask.array().unaryExpr( dropout_lambda );