本文整理汇总了C++中NumericalDifferentiation::calculate_Jacobian方法的典型用法代码示例。如果您正苦于以下问题:C++ NumericalDifferentiation::calculate_Jacobian方法的具体用法?C++ NumericalDifferentiation::calculate_Jacobian怎么用?C++ NumericalDifferentiation::calculate_Jacobian使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类NumericalDifferentiation
的用法示例。
在下文中一共展示了NumericalDifferentiation::calculate_Jacobian方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: test_calculate_Jacobian
void ProbabilisticLayerTest::test_calculate_Jacobian(void)
{
message += "test_calculate_Jacobian\n";
NumericalDifferentiation nd;
ProbabilisticLayer pl;
Vector<double> inputs;
Matrix<double> Jacobian;
Matrix<double> numerical_Jacobian;
// Test
if(numerical_differentiation_tests)
{
pl.set_probabilistic_method(ProbabilisticLayer::Softmax);
pl.set(3);
inputs.set(3);
inputs.randomize_normal();
Jacobian = pl.calculate_Jacobian(inputs);
numerical_Jacobian = nd.calculate_Jacobian(pl, &ProbabilisticLayer::calculate_outputs, inputs);
assert_true((Jacobian-numerical_Jacobian).calculate_absolute_value() < 1.0e-3, LOG);
}
}
示例2: test_calculate_terms_Jacobian
void SumSquaredErrorTest::test_calculate_terms_Jacobian(void)
{
message += "test_calculate_terms_Jacobian\n";
NumericalDifferentiation nd;
NeuralNetwork nn;
Vector<size_t> architecture;
Vector<double> parameters;
DataSet ds;
SumSquaredError sse(&nn, &ds);
Vector<double> gradient;
Vector<double> terms;
Matrix<double> terms_Jacobian;
Matrix<double> numerical_Jacobian_terms;
// Test
nn.set(1, 1);
nn.initialize_parameters(0.0);
ds.set(1, 1, 1);
ds.initialize_data(0.0);
terms_Jacobian = sse.calculate_terms_Jacobian();
assert_true(terms_Jacobian.get_rows_number() == ds.get_instances().get_instances_number(), LOG);
assert_true(terms_Jacobian.get_columns_number() == nn.count_parameters_number(), LOG);
assert_true(terms_Jacobian == 0.0, LOG);
// Test
nn.set(3, 4, 2);
nn.initialize_parameters(0.0);
ds.set(3, 2, 5);
sse.set(&nn, &ds);
ds.initialize_data(0.0);
terms_Jacobian = sse.calculate_terms_Jacobian();
assert_true(terms_Jacobian.get_rows_number() == ds.get_instances().count_training_instances_number(), LOG);
assert_true(terms_Jacobian.get_columns_number() == nn.count_parameters_number(), LOG);
assert_true(terms_Jacobian == 0.0, LOG);
// Test
architecture.set(3);
architecture[0] = 5;
architecture[1] = 1;
architecture[2] = 2;
nn.set(architecture);
nn.initialize_parameters(0.0);
ds.set(5, 2, 3);
sse.set(&nn, &ds);
ds.initialize_data(0.0);
terms_Jacobian = sse.calculate_terms_Jacobian();
assert_true(terms_Jacobian.get_rows_number() == ds.get_instances().count_training_instances_number(), LOG);
assert_true(terms_Jacobian.get_columns_number() == nn.count_parameters_number(), LOG);
assert_true(terms_Jacobian == 0.0, LOG);
// Test
nn.set(1, 1, 1);
nn.randomize_parameters_normal();
parameters = nn.arrange_parameters();
ds.set(1, 1, 1);
ds.randomize_data_normal();
terms_Jacobian = sse.calculate_terms_Jacobian();
numerical_Jacobian_terms = nd.calculate_Jacobian(sse, &SumSquaredError::calculate_terms, parameters);
assert_true((terms_Jacobian-numerical_Jacobian_terms).calculate_absolute_value() < 1.0e-3, LOG);
// Test
nn.set(2, 2, 2);
nn.randomize_parameters_normal();
parameters = nn.arrange_parameters();
ds.set(2, 2, 2);
ds.randomize_data_normal();
terms_Jacobian = sse.calculate_terms_Jacobian();
numerical_Jacobian_terms = nd.calculate_Jacobian(sse, &SumSquaredError::calculate_terms, parameters);
assert_true((terms_Jacobian-numerical_Jacobian_terms).calculate_absolute_value() < 1.0e-3, LOG);
// Test
//.........这里部分代码省略.........
示例3: test_calculate_Jacobian_terms
void MeanSquaredErrorTest::test_calculate_Jacobian_terms(void)
{
message += "test_calculate_Jacobian_terms\n";
NumericalDifferentiation nd;
NeuralNetwork nn;
Vector<unsigned> multilayer_perceptron_architecture;
Vector<double> parameters;
DataSet ds;
MeanSquaredError mse(&nn, &ds);
Vector<double> objective_gradient;
Vector<double> evaluation_terms;
Matrix<double> terms_Jacobian;
Matrix<double> numerical_Jacobian_terms;
// Test
nn.set(1, 1);
nn.initialize_parameters(0.0);
ds.set(1, 1, 1);
ds.initialize_data(0.0);
terms_Jacobian = mse.calculate_terms_Jacobian();
assert_true(terms_Jacobian.get_rows_number() == ds.get_instances().count_training_instances_number(), LOG);
assert_true(terms_Jacobian.get_columns_number() == nn.count_parameters_number(), LOG);
assert_true(terms_Jacobian == 0.0, LOG);
// Test
nn.set(3, 4, 2);
nn.initialize_parameters(0.0);
ds.set(3, 2, 5);
mse.set(&nn, &ds);
ds.initialize_data(0.0);
terms_Jacobian = mse.calculate_terms_Jacobian();
assert_true(terms_Jacobian.get_rows_number() == ds.get_instances().count_training_instances_number(), LOG);
assert_true(terms_Jacobian.get_columns_number() == nn.count_parameters_number(), LOG);
assert_true(terms_Jacobian == 0.0, LOG);
// Test
multilayer_perceptron_architecture.set(3);
multilayer_perceptron_architecture[0] = 2;
multilayer_perceptron_architecture[1] = 1;
multilayer_perceptron_architecture[2] = 2;
nn.set(multilayer_perceptron_architecture);
nn.initialize_parameters(0.0);
ds.set(2, 2, 5);
mse.set(&nn, &ds);
ds.initialize_data(0.0);
terms_Jacobian = mse.calculate_terms_Jacobian();
assert_true(terms_Jacobian.get_rows_number() == ds.get_instances().count_training_instances_number(), LOG);
assert_true(terms_Jacobian.get_columns_number() == nn.count_parameters_number(), LOG);
assert_true(terms_Jacobian == 0.0, LOG);
// Test
nn.set(1, 1, 1);
nn.randomize_parameters_normal();
parameters = nn.arrange_parameters();
ds.set(1, 1, 1);
ds.randomize_data_normal();
terms_Jacobian = mse.calculate_terms_Jacobian();
numerical_Jacobian_terms = nd.calculate_Jacobian(mse, &MeanSquaredError::calculate_terms, parameters);
assert_true((terms_Jacobian-numerical_Jacobian_terms).calculate_absolute_value() < 1.0e-3, LOG);
// Test
nn.set(2, 2, 2);
nn.randomize_parameters_normal();
parameters = nn.arrange_parameters();
ds.set(2, 2, 2);
ds.randomize_data_normal();
terms_Jacobian = mse.calculate_terms_Jacobian();
numerical_Jacobian_terms = nd.calculate_Jacobian(mse, &MeanSquaredError::calculate_terms, parameters);
assert_true((terms_Jacobian-numerical_Jacobian_terms).calculate_absolute_value() < 1.0e-3, LOG);
// Test
//.........这里部分代码省略.........
示例4: test_calculate_Jacobian
void NeuralNetworkTest::test_calculate_Jacobian(void) {
message += "test_calculate_Jacobian\n";
// One layer
NeuralNetwork nn;
Vector<unsigned> multilayer_perceptron_architecture;
Vector<double> inputs;
Matrix<double> Jacobian;
// Vector<double> inputs_minimum;
// Vector<double> inputs_maximum;
// Vector<double> inputs_mean;
// Vector<double> inputs_standard_deviation;
// Vector<double> outputs_minimum;
// Vector<double> outputs_maximum;
// Vector<double> outputs_mean;
// Vector<double> outputs_standard_deviation;
// mmlp.set_display(false);
NumericalDifferentiation nd;
Matrix<double> numerical_Jacobian;
// Test
nn.set(1, 1, 1);
nn.initialize_parameters(0.0);
inputs.set(1, 0.0);
Jacobian = nn.calculate_Jacobian(inputs);
assert_true(Jacobian == 0.0, LOG);
// Test
nn.set(3, 4, 2);
nn.initialize_parameters(0.0);
inputs.set(3, 0.0);
Jacobian = nn.calculate_Jacobian(inputs);
assert_true(Jacobian == 0.0, LOG);
// Test
if (numerical_differentiation_tests) {
nn.set(3, 4, 2);
nn.initialize_parameters(0.0);
inputs.set(3, 0.0);
Jacobian = nn.calculate_Jacobian(inputs);
numerical_Jacobian =
nd.calculate_Jacobian(nn, &NeuralNetwork::calculate_outputs, inputs);
assert_true(
(Jacobian - numerical_Jacobian).calculate_absolute_value() < 1.0e-3,
LOG);
}
// Test
multilayer_perceptron_architecture.set(3, 1);
nn.set(multilayer_perceptron_architecture);
nn.initialize_parameters(0.0);
inputs.set(1, 0.0);
Jacobian = nn.calculate_Jacobian(inputs);
assert_true(Jacobian == 0.0, LOG);
// Test
multilayer_perceptron_architecture.set(3);
multilayer_perceptron_architecture[0] = 3;
multilayer_perceptron_architecture[1] = 4;
multilayer_perceptron_architecture[2] = 1;
nn.set(multilayer_perceptron_architecture);
nn.initialize_parameters(0.0);
inputs.set(3, 0.0);
Jacobian = nn.calculate_Jacobian(inputs);
assert_true(Jacobian == 0.0, LOG);
// Test
if (numerical_differentiation_tests) {
multilayer_perceptron_architecture.set(3);
multilayer_perceptron_architecture[0] = 3;
multilayer_perceptron_architecture[1] = 4;
multilayer_perceptron_architecture[2] = 1;
nn.set(multilayer_perceptron_architecture);
inputs.set(3);
inputs[0] = 0.0;
inputs[1] = 1.0;
inputs[2] = 2.0;
Jacobian = nn.calculate_Jacobian(inputs);
numerical_Jacobian =
nd.calculate_Jacobian(nn, &NeuralNetwork::calculate_outputs, inputs);
//.........这里部分代码省略.........
示例5: test_calculate_Jacobian_terms
void NormalizedSquaredErrorTest::test_calculate_Jacobian_terms(void)
{
message += "test_calculate_Jacobian_terms\n";
NumericalDifferentiation nd;
NeuralNetwork nn;
Vector<int> hidden_layers_size;
Vector<double> network_parameters;
DataSet ds;
NormalizedSquaredError nse(&nn, &ds);
Vector<double> objective_gradient;
Vector<double> evaluation_terms;
Matrix<double> terms_Jacobian;
Matrix<double> numerical_Jacobian_terms;
// Test
nn.set(1, 1);
nn.randomize_parameters_normal();
network_parameters = nn.arrange_parameters();
ds.set(1, 1, 2);
ds.randomize_data_normal();
terms_Jacobian = nse.calculate_terms_Jacobian();
numerical_Jacobian_terms = nd.calculate_Jacobian(nse, &NormalizedSquaredError::calculate_terms, network_parameters);
assert_true((terms_Jacobian-numerical_Jacobian_terms).calculate_absolute_value() < 1.0e-3, LOG);
// Test
nn.set(2, 2, 2);
nn.randomize_parameters_normal();
network_parameters = nn.arrange_parameters();
ds.set(2, 2, 2);
ds.randomize_data_normal();
terms_Jacobian = nse.calculate_terms_Jacobian();
numerical_Jacobian_terms = nd.calculate_Jacobian(nse, &NormalizedSquaredError::calculate_terms, network_parameters);
assert_true((terms_Jacobian-numerical_Jacobian_terms).calculate_absolute_value() < 1.0e-3, LOG);
// Test
nn.set(2,2,2);
nn.randomize_parameters_normal();
ds.set(2,2,2);
ds.randomize_data_normal();
objective_gradient = nse.calculate_gradient();
evaluation_terms = nse.calculate_terms();
terms_Jacobian = nse.calculate_terms_Jacobian();
assert_true(((terms_Jacobian.calculate_transpose()).dot(evaluation_terms)*2.0 - objective_gradient).calculate_absolute_value() < 1.0e-3, LOG);
}