Skip to content

Commit

Permalink
added a function in layer to get parameters
Browse files Browse the repository at this point in the history
  • Loading branch information
amritagos committed Oct 22, 2023
1 parent 2aff5e5 commit 63ea76a
Show file tree
Hide file tree
Showing 6 changed files with 38 additions and 6 deletions.
3 changes: 3 additions & 0 deletions examples/mnist/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,9 @@ int main()
network.add( Robbie::ActivationLayer<scalar, Robbie::ActivationFunctions::Tanh<scalar>>() );
network.add( Robbie::FCLayer<scalar>( 50, 10 ) );

// No. of trainable params
network.summary();

network.fit( x_train, y_train, 35, 0.1 );

// Test on three samples
Expand Down
6 changes: 6 additions & 0 deletions include/activation_layer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -23,5 +23,11 @@ class ActivationLayer : public Layer<scalar>
{
return Activation::df( this->input ).array() * output_error.array();
}

// Return the number of trainable parameters
int get_trainable_params() override
{
return 0;
}
};
} // namespace Robbie
11 changes: 8 additions & 3 deletions include/fc_layer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,8 @@ class FCLayer : public Layer<scalar>
weights( Matrix<scalar>::Random( input_size, output_size ) ),
bias( Vector<scalar>::Random( output_size ) )
{
scalar offset = 0.5;
weights = weights.array() / 2.0;
bias = bias.array() / 2.0;
weights = weights.array() / 2.0;
bias = bias.array() / 2.0;
}

// returns output for a given input
Expand All @@ -50,6 +49,12 @@ class FCLayer : public Layer<scalar>
return input_error;
}

// Return the number of trainable parameters
int get_trainable_params() override
{
return this->weights.size() + this->bias.size();
}

// Access the current weights
Matrix<scalar> get_weights()
{
Expand Down
3 changes: 3 additions & 0 deletions include/layer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,9 @@ class Layer
// computes dE/dX for a given dE/dY (and update parameters if any)
virtual Vector<scalar> backward_propagation( const Vector<scalar> & output_error, scalar learning_rate ) = 0;

// Get trainable parameters
virtual int get_trainable_params() = 0;

virtual ~Layer() = default;
};

Expand Down
16 changes: 16 additions & 0 deletions include/network.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -78,8 +78,24 @@ class Network
}
}

void summary()
{
n_trainable_params = 0;

for( auto & layer : layers )
{
n_trainable_params += layer->get_trainable_params();
}

// Print the number of trainable parameters
fmt::print( "=================================================================\n" );
fmt::print( "Trainable params = {}\n", n_trainable_params );
fmt::print( "=================================================================\n" );
}

private:
std::vector<std::unique_ptr<Layer<scalar>>> layers;
int n_trainable_params;
};

} // namespace Robbie
5 changes: 2 additions & 3 deletions test/test_loss_functions.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,8 @@ TEST_CASE( "Test_LossFunctions" )
auto y_pred = Robbie::Vector<double>( 5 );
y_pred << -3.1, 1.2, 2.3, 3.1, -1;

auto f_bound = [&]( const Robbie::Vector<double> & y_pred ) {
return Robbie::LossFunctions::MeanSquareError<double>::f( y_true, y_pred );
};
auto f_bound = [&]( const Robbie::Vector<double> & y_pred )
{ return Robbie::LossFunctions::MeanSquareError<double>::f( y_true, y_pred ); };

auto df_finite_diff = Robbie::finite_difference_gradient( f_bound, y_pred );
auto df = Robbie::LossFunctions::MeanSquareError<double>::df( y_true, y_pred );
Expand Down

0 comments on commit 63ea76a

Please sign in to comment.