Skip to content

Commit

Permalink
dropout_layer: implemented scaling by p_keep at training time
Browse files Browse the repository at this point in the history
  • Loading branch information
MSallermann committed Oct 23, 2023
1 parent b591019 commit 6be4200
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions include/dropout_layer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,14 @@ template<typename scalar>
class DropoutLayer : public Layer<scalar>
{
protected:
scalar dropout_probability = 0.0;
scalar p_keep = 0.0;
Vector<scalar> dropout_mask;
std::mt19937 gen;
std::uniform_real_distribution<scalar> dist = std::uniform_real_distribution<scalar>( 0.0, 1.0 );
std::optional<size_t> frozen_seed = std::nullopt;

public:
DropoutLayer( scalar dropout_probability ) : Layer<scalar>(), dropout_probability( dropout_probability )
DropoutLayer( scalar p_keep ) : Layer<scalar>(), p_keep( p_keep )
{
int seed = std::random_device()();
gen = std::mt19937( seed );
Expand All @@ -44,7 +44,7 @@ class DropoutLayer : public Layer<scalar>

dropout_mask.resize( input_data.size(), 1 );

const auto dropout_lambda = [&]( scalar x ) { return dist( gen ) < this->dropout_probability ? 0.0 : 1.0; };
const auto dropout_lambda = [&]( scalar x ) { return dist( gen ) > this->p_keep ? 0.0 : 1.0/p_keep; };

dropout_mask = dropout_mask.array().unaryExpr( dropout_lambda );

Expand Down

0 comments on commit 6be4200

Please sign in to comment.