Skip to content

Commit

Permalink
clean
Browse files Browse the repository at this point in the history
  • Loading branch information
RoberLopez committed Dec 26, 2024
1 parent bfe430b commit 704860a
Show file tree
Hide file tree
Showing 13 changed files with 99 additions and 170 deletions.
12 changes: 6 additions & 6 deletions opennn/quasi_newton_method.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ void QuasiNewtonMethod::set_maximum_time(const type& new_maximum_time)
}


void QuasiNewtonMethod::calculate_inverse_hessian_approximation(QuasiNewtonMehtodData& optimization_data) const
void QuasiNewtonMethod::calculate_inverse_hessian_approximation(QuasiNewtonMethodData& optimization_data) const
{
switch(inverse_hessian_approximation_method)
{
Expand All @@ -195,7 +195,7 @@ void QuasiNewtonMethod::calculate_inverse_hessian_approximation(QuasiNewtonMehto
}


void QuasiNewtonMethod::calculate_DFP_inverse_hessian(QuasiNewtonMehtodData& optimization_data) const
void QuasiNewtonMethod::calculate_DFP_inverse_hessian(QuasiNewtonMethodData& optimization_data) const
{
const Tensor<type, 1>& parameters_difference = optimization_data.parameters_difference;
const Tensor<type, 1>& gradient_difference = optimization_data.gradient_difference;
Expand Down Expand Up @@ -234,7 +234,7 @@ void QuasiNewtonMethod::calculate_DFP_inverse_hessian(QuasiNewtonMehtodData& opt
}


void QuasiNewtonMethod::calculate_BFGS_inverse_hessian(QuasiNewtonMehtodData& optimization_data) const
void QuasiNewtonMethod::calculate_BFGS_inverse_hessian(QuasiNewtonMethodData& optimization_data) const
{
const Tensor<type, 1>& parameters_difference = optimization_data.parameters_difference;
const Tensor<type, 1>& gradient_difference = optimization_data.gradient_difference;
Expand Down Expand Up @@ -283,7 +283,7 @@ void QuasiNewtonMethod::update_parameters(
const Batch& batch,
ForwardPropagation& forward_propagation,
BackPropagation& back_propagation,
QuasiNewtonMehtodData& optimization_data) const
QuasiNewtonMethodData& optimization_data) const
{
Tensor<type, 1>& parameters = back_propagation.parameters;
const Tensor<type, 1>& gradient = back_propagation.gradient;
Expand Down Expand Up @@ -461,7 +461,7 @@ TrainingResults QuasiNewtonMethod::perform_training()
time(&beginning_time);
type elapsed_time;

QuasiNewtonMehtodData optimization_data(this);
QuasiNewtonMethodData optimization_data(this);

// Main loop

Expand Down Expand Up @@ -652,7 +652,7 @@ void QuasiNewtonMethod::from_XML(const XMLDocument& document)
}


void QuasiNewtonMehtodData::set(QuasiNewtonMethod* new_quasi_newton_method)
void QuasiNewtonMethodData::set(QuasiNewtonMethod* new_quasi_newton_method)
{
quasi_newton_method = new_quasi_newton_method;

Expand Down
14 changes: 7 additions & 7 deletions opennn/quasi_newton_method.h
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
namespace opennn
{

struct QuasiNewtonMehtodData;
struct QuasiNewtonMethodData;

class QuasiNewtonMethod : public OptimizationAlgorithm
{
Expand Down Expand Up @@ -67,13 +67,13 @@ class QuasiNewtonMethod : public OptimizationAlgorithm

// Training

void calculate_DFP_inverse_hessian(QuasiNewtonMehtodData&) const;
void calculate_DFP_inverse_hessian(QuasiNewtonMethodData&) const;

void calculate_BFGS_inverse_hessian(QuasiNewtonMehtodData&) const;
void calculate_BFGS_inverse_hessian(QuasiNewtonMethodData&) const;

void calculate_inverse_hessian_approximation(QuasiNewtonMehtodData&) const;
void calculate_inverse_hessian_approximation(QuasiNewtonMethodData&) const;

void update_parameters(const Batch& , ForwardPropagation& , BackPropagation& , QuasiNewtonMehtodData&) const;
void update_parameters(const Batch& , ForwardPropagation& , BackPropagation& , QuasiNewtonMethodData&) const;

TrainingResults perform_training() override;

Expand Down Expand Up @@ -109,10 +109,10 @@ class QuasiNewtonMethod : public OptimizationAlgorithm
};


struct QuasiNewtonMehtodData : public OptimizationAlgorithmData
struct QuasiNewtonMethodData : public OptimizationAlgorithmData
{

QuasiNewtonMehtodData(QuasiNewtonMethod* new_quasi_newton_method = nullptr)
QuasiNewtonMethodData(QuasiNewtonMethod* new_quasi_newton_method = nullptr)
{
set(new_quasi_newton_method);
}
Expand Down
5 changes: 2 additions & 3 deletions tests/conjugate_gradient_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

TEST(ConjugateGradientTest, DefaultConstructor)
{
ConjugateGradient conjugate_gradient_1;
ConjugateGradient conjugate_gradient;
// EXPECT_EQ(!conjugate_gradient_1.has_loss_index());

// ConjugateGradient conjugate_gradient_2(&mean_squared_error);
Expand All @@ -15,8 +15,7 @@ TEST(ConjugateGradientTest, DefaultConstructor)

TEST(ConjugateGradientTest, GeneralConstructor)
{
ConjugateGradient conjugate_gradient_1;
// EXPECT_EQ(!conjugate_gradient_1.has_loss_index());
ConjugateGradient conjugate_gradient;

// ConjugateGradient conjugate_gradient_2(&mean_squared_error);
// EXPECT_EQ(conjugate_gradient_2.has_loss_index());
Expand Down
11 changes: 6 additions & 5 deletions tests/cross_entropy_error_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -8,18 +8,18 @@

TEST(CrossEntropyErrorTest, DefaultConstructor)
{
/*

CrossEntropyError cross_entropy_error;

EXPECT_TRUE(!cross_entropy_error.has_data_set());
EXPECT_TRUE(!cross_entropy_error.has_neural_network());
*/

}


TEST(CrossEntropyErrorTest, BackPropagateEmpty)
{
/*

DataSet data_set;

Batch batch;
Expand All @@ -32,19 +32,20 @@ TEST(CrossEntropyErrorTest, BackPropagateEmpty)
BackPropagation back_propagation;

cross_entropy_error.back_propagate(batch, forward_propagation, back_propagation);
*/

}


TEST(CrossEntropyErrorTest, BackPropagate)
{
/*

const Index samples_number = get_random_index(1, 10);
const Index inputs_number = get_random_index(1, 10);
const Index targets_number = get_random_index(1, 10);
const Index neurons_number = get_random_index(1, 10);

DataSet data_set(samples_number, { inputs_number }, { targets_number });
/*
data_set.set_data_classification();
data_set.set(DataSet::SampleUse::Training);
Expand Down
24 changes: 0 additions & 24 deletions tests/inputs_selection_test.cpp

This file was deleted.

10 changes: 5 additions & 5 deletions tests/model_selection_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -22,14 +22,14 @@ TEST(ModelSelectionTest, GeneralConstructor)

TEST(ModelSelectionTest, NeuronsSelection)
{
/*
data_set.generate_sum_data(20, 2);

neural_network.set(NeuralNetwork::ModelType::Approximation, { 1 }, { 2 }, { 1 });
// data_set.generate_sum_data(20, 2);

training_strategy.set_display(false);
NeuralNetwork neural_network(NeuralNetwork::ModelType::Approximation, { 1 }, { 2 }, { 1 });
/*
//training_strategy.set_display(false);
model_selection.set_display(false);
//model_selection.set_display(false);
GrowingNeurons* incremental_neurons = model_selection.get_growing_neurons();
Expand Down
22 changes: 0 additions & 22 deletions tests/neurons_selection_test.cpp

This file was deleted.

20 changes: 8 additions & 12 deletions tests/probabilistic_layer_3d_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -171,15 +171,13 @@ TEST(ProbabilisticLayer3DTest, Activations)
*/
}

/*

void ProbabilisticLayer3DTest::test_calculate_activations()
TEST(ProbabilisticLayer3DTest, SoftmaxDerivatives)
{
/*
Tensor<type, 3>& activations;
Tensor<type, 4>& activation_derivatives;
}
bool ProbabilisticLayer3DTest::check_softmax_derivatives(Tensor<type, 3>& activations, Tensor<type, 4>& activation_derivatives) const
{
for(Index i = 0; i < samples_number; i++)
{
for(Index j = 0; j < inputs_number; j++)
Expand All @@ -202,13 +200,13 @@ bool ProbabilisticLayer3DTest::check_softmax_derivatives(Tensor<type, 3>& activa
}
}
}
return true;
*/
}


void ProbabilisticLayer3DTest::test_forward_propagate()
TEST(ProbabilisticLayer3DTest, ForwardPropagate)
{
/*
bool is_training = true;
{
Expand Down Expand Up @@ -295,7 +293,5 @@ void ProbabilisticLayer3DTest::test_forward_propagate()
EXPECT_EQ(correct_outputs);
}
*/
}
}
*/
21 changes: 15 additions & 6 deletions tests/quasi_newton_method_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -34,13 +34,16 @@ TEST(QuasiNewtonMethodTest, DFP)
data_set.set_data_random();

NeuralNetwork neural_network(NeuralNetwork::ModelType::Approximation, { inputs_number }, {}, { outputs_number });

neural_network.set_parameters_constant(type(1));
/*
quasi_newton_method_data.set(&quasi_newton_method);

quasi_newton_method.calculate_DFP_inverse_hessian(quasi_newton_method_data);
MeanSquaredError mean_squared_error(&neural_network, &data_set);

QuasiNewtonMethod quasi_newton_method(&mean_squared_error);

QuasiNewtonMethodData quasi_newton_method_data(&quasi_newton_method);

quasi_newton_method.calculate_DFP_inverse_hessian(quasi_newton_method_data);
/*
EXPECT_EQ(are_equal(quasi_newton_method_data.inverse_hessian, inverse_hessian, type(1e-4)));
*/
}
Expand All @@ -55,11 +58,17 @@ TEST(QuasiNewtonMethodTest, BGFS)

NeuralNetwork neural_network(NeuralNetwork::ModelType::Approximation, { inputs_number }, {}, { outputs_number });
neural_network.set_parameters_constant(type(1));
/*

MeanSquaredError mean_squared_error(&neural_network);

mean_squared_error.set_regularization_method(LossIndex::RegularizationMethod::L2);

quasi_newton_method.calculate_BFGS_inverse_hessian(quasi_newton_method_data);
QuasiNewtonMethod quasi_newton_method(&mean_squared_error);

QuasiNewtonMethodData quasi_newton_method_data(&quasi_newton_method);

quasi_newton_method.calculate_BFGS_inverse_hessian(quasi_newton_method_data);
/*
EXPECT_EQ(are_equal(BFGS_inverse_hessian, inverse_hessian, type(1e-4)));
*/
}
Expand Down
42 changes: 11 additions & 31 deletions tests/recurrent_layer_test.cpp
Original file line number Diff line number Diff line change
@@ -1,44 +1,24 @@
#include "pch.h"

#include "../opennn/recurrent_layer.h"

TEST(RecurrentLayerTest, DefaultConstructor)
{
RecurrentLayer recurrent_layer;

// EXPECT_EQ(quasi_newton_method.has_loss_index(), false);
EXPECT_EQ(recurrent_layer.get_inputs_number(), 0);
}


TEST(RecurrentLayerTest, GeneralConstructor)
{
const Index inputs_number = get_random_index(1, 10);
const Index neurons_number = get_random_index(1, 10);
const Index time_steps = get_random_index(1, 10);

RecurrentLayer recurrent_layer;

Index inputs_number;
Index neurons_number;
Index time_steps;
/*
// Test
RecurrentLayer recurrent_layer({ inputs_number }, { neurons_number });

inputs_number = 1;
neurons_number = 1;
time_steps = 1;
recurrent_layer.set(inputs_number, neurons_number, time_steps);
EXPECT_EQ(recurrent_layer.get_parameters_number() == 3);
// Test
inputs_number = 2;
neurons_number = 3;
recurrent_layer.set(inputs_number, neurons_number, time_steps);
EXPECT_EQ(recurrent_layer.get_parameters_number() == 18);
EXPECT_EQ(quasi_newton_method.has_loss_index(), true);
*/
// EXPECT_EQ(recurrent_layer.get_parameters_number(), 3);
}


Expand All @@ -59,8 +39,8 @@ TEST(RecurrentLayerTest, ForwardPropagate)
Tensor<type, 1> new_biases;

pair<type*, dimensions> input_pairs;
/*
recurrent_layer.set(inputs_number, neurons_number, time_steps);

RecurrentLayer recurrent_layer({ inputs_number }, { neurons_number });

recurrent_layer.set_activation_function(RecurrentLayer::ActivationFunction::HyperbolicTangent);

Expand All @@ -69,8 +49,8 @@ TEST(RecurrentLayerTest, ForwardPropagate)
recurrent_layer.set_parameters_constant(type(1));
inputs.setConstant(type(1));

recurrent_layer_forward_propagation.set(samples_number, &recurrent_layer);
RecurrentLayerForwardPropagation recurrent_layer_forward_propagation(samples_number, &recurrent_layer);
/*
Tensor<type*, 1> input_data(1);
input_data(0) = inputs.data();
Expand Down
Loading

0 comments on commit 704860a

Please sign in to comment.