Skip to content

Commit

Permalink
unit test fixes
Browse files Browse the repository at this point in the history
- autoencoder
- libsvr
  • Loading branch information
jonpsy committed Dec 30, 2020
1 parent 839ead2 commit 5cc9b8e
Show file tree
Hide file tree
Showing 2 changed files with 32 additions and 36 deletions.
58 changes: 28 additions & 30 deletions tests/unit/neuralnets/Autoencoder_unittest.cc
Original file line number Diff line number Diff line change
Expand Up @@ -64,14 +64,12 @@ TEST(Autoencoder, train)
auto decoding_layer = std::make_shared<NeuralLinearLayer>(num_features);
hidden_layer->put("seed", seed);
decoding_layer->put("seed", seed);
Autoencoder ae(num_features, hidden_layer, decoding_layer);
ae.put("seed", seed);
auto ae = std::make_shared<Autoencoder>(num_features, hidden_layer, decoding_layer);
ae->put("seed", seed);

auto features = std::make_shared<DenseFeatures<float64_t>>(data);

ae.train(features);

auto reconstructed = ae.reconstruct(features);
auto reconstructed =ae->train(features)->as<Autoencoder>()->reconstruct(features);
SGMatrix<float64_t> reconstructed_data = reconstructed->get_feature_matrix();

float64_t avg_diff = 0;
Expand All @@ -97,11 +95,11 @@ TEST(Autoencoder, contractive_linear)
auto decoding_layer = std::make_shared<NeuralLinearLayer>(10);
hidden_layer->put("seed", seed);
decoding_layer->put("seed", seed);
Autoencoder ae(10, hidden_layer, decoding_layer);
ae.put("seed", seed);
ae.set_contraction_coefficient(10.0);
auto ae = std::make_shared<Autoencoder>(10, hidden_layer, decoding_layer);
ae->put("seed", seed);
ae->set_contraction_coefficient(10.0);

EXPECT_NEAR(ae.check_gradients(), 0.0, tolerance);
EXPECT_NEAR(ae->check_gradients(), 0.0, tolerance);
}

/** Tests gradients computed using backpropagation against gradients computed
Expand All @@ -117,11 +115,11 @@ TEST(Autoencoder, contractive_rectified_linear)
auto decoding_layer = std::make_shared<NeuralLinearLayer>(10);
hidden_layer->put("seed", seed);
decoding_layer->put("seed", seed);
Autoencoder ae(10, hidden_layer, decoding_layer);
ae.put("seed", seed);
ae.set_contraction_coefficient(10.0);
auto ae = std::make_shared<Autoencoder>(10, hidden_layer, decoding_layer);
ae->put("seed", seed);
ae->set_contraction_coefficient(10.0);

EXPECT_NEAR(ae.check_gradients(), 0.0, tolerance);
EXPECT_NEAR(ae->check_gradients(), 0.0, tolerance);
}

/** Tests gradients computed using backpropagation against gradients computed
Expand All @@ -137,13 +135,13 @@ TEST(Autoencoder, contractive_logistic)
auto decoding_layer = std::make_shared<NeuralLinearLayer>(10);
hidden_layer->put("seed", seed);
decoding_layer->put("seed", seed);
Autoencoder ae(10, hidden_layer, decoding_layer);
ae.put("seed", seed);
ae.initialize_neural_network();
auto ae = std::make_shared<Autoencoder>(10, hidden_layer, decoding_layer);
ae->put("seed", seed);
ae->initialize_neural_network();

ae.set_contraction_coefficient(1.0);
ae->set_contraction_coefficient(1.0);

EXPECT_NEAR(ae.check_gradients(), 0.0, tolerance);
EXPECT_NEAR(ae->check_gradients(), 0.0, tolerance);
}

/** Tests gradients computed using backpropagation against gradients computed
Expand All @@ -161,10 +159,10 @@ TEST(Autoencoder, convolutional)
auto decoding_layer = std::make_shared<NeuralConvolutionalLayer>(CMAF_IDENTITY, 3, 1,1, 1,1, 1,1);
hidden_layer->put("seed", seed);
decoding_layer->put("seed", seed);
Autoencoder ae(w,h,3,hidden_layer,decoding_layer);
ae.put("seed", seed);
auto ae = std::make_shared<Autoencoder>(w, h, 3, hidden_layer, decoding_layer);
ae->put("seed", seed);

EXPECT_NEAR(ae.check_gradients(), 0.0, tolerance);
EXPECT_NEAR(ae->check_gradients(), 0.0, tolerance);
}

/** Tests gradients computed using backpropagation against gradients computed
Expand All @@ -182,10 +180,10 @@ TEST(Autoencoder, convolutional_with_pooling)
auto decoding_layer = std::make_shared<NeuralConvolutionalLayer>(CMAF_IDENTITY, 3, 1,1, 1,1, 1,1);
hidden_layer->put("seed", seed);
decoding_layer->put("seed", seed);
Autoencoder ae(w,h,3,hidden_layer,decoding_layer);
ae.put("seed", seed);
auto ae = std::make_shared<Autoencoder>(w, h, 3, hidden_layer, decoding_layer);
ae->put("seed", seed);

EXPECT_NEAR(ae.check_gradients(), 0.0, tolerance);
EXPECT_NEAR(ae->check_gradients(), 0.0, tolerance);
}

/** Tests gradients computed using backpropagation against gradients computed
Expand All @@ -202,10 +200,10 @@ TEST(Autoencoder, convolutional_with_stride)
auto decoding_layer = std::make_shared<NeuralConvolutionalLayer>(CMAF_IDENTITY, 3, 1,1, 1,1, 1,1);
hidden_layer->put("seed", seed);
decoding_layer->put("seed", seed);
Autoencoder ae(w,h,3,hidden_layer,decoding_layer);
ae.put("seed", seed);
auto ae = std::make_shared<Autoencoder>(w, h, 3, hidden_layer, decoding_layer);
ae->put("seed", seed);

EXPECT_NEAR(ae.check_gradients(), 0.0, tolerance);
EXPECT_NEAR(ae->check_gradients(), 0.0, tolerance);
}

/** Tests gradients computed using backpropagation against gradients computed
Expand All @@ -223,8 +221,8 @@ TEST(Autoencoder, convolutional_with_stride_and_pooling)
auto decoding_layer = std::make_shared<NeuralConvolutionalLayer>(CMAF_IDENTITY, 3, 1,1, 1,1, 1,1);
hidden_layer->put("seed", seed);
decoding_layer->put("seed", seed);
Autoencoder ae(w,h,3,hidden_layer,decoding_layer);
ae.put("seed", seed);
auto ae = std::make_shared<Autoencoder>(w, h, 3, hidden_layer, decoding_layer);
ae->put("seed", seed);

EXPECT_NEAR(ae.check_gradients(), 0.0, tolerance);
EXPECT_NEAR(ae->check_gradients(), 0.0, tolerance);
}
10 changes: 4 additions & 6 deletions tests/unit/regression/LibSVR_unittest.cc
Original file line number Diff line number Diff line change
Expand Up @@ -54,12 +54,11 @@ TEST(LibSVR,epsilon_svr_apply)
kernel->init(features_train, features_train);

LIBSVR_SOLVER_TYPE st=LIBSVR_EPSILON_SVR;
LibSVR* svm=new LibSVR(svm_C, svm_eps, kernel, labels_train, st);
svm->train();
auto svm = std::make_shared<LibSVR>(svm_C, svm_eps, kernel, labels_train, st);

/* predict */
auto predicted_labels =
svm->apply(features_test)->as<RegressionLabels>();
svm->train()->as<LibSVR>()->apply(features_test)->as<RegressionLabels>();

/* LibSVM regression comparison (with easy.py script) */
EXPECT_NEAR(predicted_labels->get_labels()[0], 2.44343, 1E-5);
Expand Down Expand Up @@ -119,12 +118,11 @@ TEST(LibSVR,nu_svr_apply)
kernel->init(features_train, features_train);

LIBSVR_SOLVER_TYPE st=LIBSVR_NU_SVR;
LibSVR* svm=new LibSVR(svm_C, svm_nu, kernel, labels_train, st);
svm->train();
auto svm = std::make_shared<LibSVR>(svm_C, svm_nu, kernel, labels_train, st);

/* predict */
auto predicted_labels =
svm->apply(features_test)->as<RegressionLabels>();
svm->train()->as<LibSVR>()->apply(features_test)->as<RegressionLabels>();

/* LibSVM regression comparison (with easy.py script) */
EXPECT_NEAR(predicted_labels->get_labels()[0], 2.18062, 1E-5);
Expand Down

0 comments on commit 5cc9b8e

Please sign in to comment.