Skip to content

Commit

Permalink
added constructor using iterators
Browse files Browse the repository at this point in the history
  • Loading branch information
steffennissen committed Nov 8, 2015
1 parent 6dfea22 commit df4baad
Show file tree
Hide file tree
Showing 5 changed files with 53 additions and 10 deletions.
34 changes: 34 additions & 0 deletions src/include/fann_cpp.h
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
#define FANN_CPP_H_INCLUDED

#include <memory>
#include <iterator>
/*
* Fast Artificial Neural Network (fann) C++ Wrapper
* Copyright (C) 2004-2006 created by freegoldbar (at) yahoo dot com
Expand Down Expand Up @@ -106,6 +107,39 @@ namespace FANN {
assert(ann != NULL);
}

/* Constructor: neural_net(network_type_enum net_type, InputIterator layersBeginIterator, InputIterator layersEndIterator)
Creates a neural network of the desired <network_type_enum> net_type, based on iterator to the layers.
Parameters:
net_type - The desired network type of the neural network
layersBeginIterator - begin iterator to the collection of unsigned int layers
layersEndIterator - end iterator to the collection of unsigned int layers
Example:
>vector<unsigned int> layers{2, 3, 4, 5};
>neural_net net(LAYER, layers.begin(), layers.end());
This function appears in FANN >= 2.3.0.
*/
template <class InputIterator>
neural_net(network_type_enum net_type, InputIterator layersBeginIterator, InputIterator layersEndIterator) {
unsigned int num_layers = static_cast<unsigned int>(std::distance(layersBeginIterator, layersEndIterator));
unsigned int *layers = new unsigned int[num_layers];
std::copy(layersBeginIterator, layersEndIterator, layers);

switch (net_type){
case LAYER:
ann = fann_create_standard_array(num_layers, layers);
break;
case SHORTCUT:
ann = fann_create_shortcut_array(num_layers, layers);
break;
}
delete[] layers;
assert(ann != NULL);
}

/* Constructor: neural_net(network_type_enum net_type, unsigned int num_layers, ...)
Creates a neural network of the desired <network_type_enum> net_type.
Expand Down
13 changes: 11 additions & 2 deletions tests/fann_test.cpp
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
#include <vector>
#include "fann_test.h"

using namespace std;

void FannTest::SetUp() {
//ensure random generator is seeded at a known value to ensure reproducible results
srand(0);
Expand All @@ -11,7 +14,7 @@ void FannTest::TearDown() {
data.destroy_train();
}

void FannTest::AssertCreate(neural_net &net, unsigned int numLayers, unsigned int *layers,
void FannTest::AssertCreate(neural_net &net, unsigned int numLayers, const unsigned int *layers,
unsigned int neurons, unsigned int connections) {
EXPECT_EQ(numLayers, net.get_num_layers());
EXPECT_EQ(layers[0], net.get_num_input());
Expand All @@ -29,7 +32,7 @@ void FannTest::AssertCreate(neural_net &net, unsigned int numLayers, unsigned in
AssertWeights(net, -0.09, 0.09, 0.0);
}

void FannTest::AssertCreateAndCopy(neural_net &net, unsigned int numLayers, unsigned int *layers, unsigned int neurons,
void FannTest::AssertCreateAndCopy(neural_net &net, unsigned int numLayers, const unsigned int *layers, unsigned int neurons,
unsigned int connections) {
AssertCreate(net, numLayers, layers, neurons, connections);
neural_net net_copy(net);
Expand Down Expand Up @@ -79,6 +82,12 @@ TEST_F(FannTest, CreateStandardFourLayersArrayUsingCreateMethod) {
AssertCreateAndCopy(net, 4, layers, 17, 50);
}

TEST_F(FannTest, CreateStandardFourLayersVector) {
vector<unsigned int> layers{2, 3, 4, 5};
neural_net net(LAYER, layers.begin(), layers.end());
AssertCreateAndCopy(net, 4, layers.data(), 17, 50);
}

TEST_F(FannTest, CreateSparseFourLayers) {
neural_net net(0.5, 4, 2, 3, 4, 5);
AssertCreateAndCopy(net, 4, (unsigned int[]){2, 3, 4, 5}, 17, 31);
Expand Down
4 changes: 2 additions & 2 deletions tests/fann_test.h
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,10 @@ class FannTest : public testing::Test {
neural_net net;
training_data data;

void AssertCreateAndCopy(neural_net &net, unsigned int numLayers, unsigned int *layers, unsigned int neurons,
void AssertCreateAndCopy(neural_net &net, unsigned int numLayers, const unsigned int *layers, unsigned int neurons,
unsigned int connections);

void AssertCreate(neural_net &net, unsigned int numLayers, unsigned int *layers,
void AssertCreate(neural_net &net, unsigned int numLayers, const unsigned int *layers,
unsigned int neurons, unsigned int connections);

void AssertWeights(neural_net &net, fann_type min, fann_type max, fann_type avg);
Expand Down
8 changes: 4 additions & 4 deletions tests/fann_test_data.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ void FannTestData::InitializeTrainDataStructure(unsigned int numData,
}
}

void FannTestData::AssertTrainData(FANN::training_data &trainingData, unsigned int numData, unsigned int numInput,
void FannTestData::AssertTrainData(training_data &trainingData, unsigned int numData, unsigned int numInput,
unsigned int numOutput, fann_type inputValue, fann_type outputValue) {
EXPECT_EQ(numData, trainingData.length_train_data());
EXPECT_EQ(numInput, trainingData.num_input_train_data());
Expand Down Expand Up @@ -68,15 +68,15 @@ TEST_F(FannTestData, CreateTrainDataFromArrays) {

TEST_F(FannTestData, CreateTrainDataFromCopy) {
data.set_train_data(numData, numInput, inputData, numOutput, outputData);
FANN::training_data dataCopy(data);
training_data dataCopy(data);

AssertTrainData(dataCopy, numData, numInput, numOutput, inputValue, outputValue);
}

TEST_F(FannTestData, CreateTrainDataFromFile) {
data.set_train_data(numData, numInput, inputData, numOutput, outputData);
data.save_train("tmpFile");
FANN::training_data dataCopy;
training_data dataCopy;
dataCopy.read_train_from_file("tmpFile");

AssertTrainData(dataCopy, numData, numInput, numOutput, inputValue, outputValue);
Expand Down Expand Up @@ -104,7 +104,7 @@ TEST_F(FannTestData, ShuffleTrainData) {

TEST_F(FannTestData, MergeTrainData) {
data.set_train_data(numData, numInput, inputData, numOutput, outputData);
FANN::training_data dataCopy(data);
training_data dataCopy(data);
data.merge_train_data(dataCopy);
AssertTrainData(data, numData*2, numInput, numOutput, inputValue, outputValue);
}
Expand Down
4 changes: 2 additions & 2 deletions tests/fann_test_train.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ void FannTestTrain::TearDown() {
}

TEST_F(FannTestTrain, TrainOnDateSimpleXor) {
net.create_standard(3, 2, 3, 1);
neural_net net(LAYER, 3, 2, 3, 1);

data.set_train_data(4, 2, xorInput, 1, xorOutput);
net.train_on_data(data, 100, 100, 0.001);
Expand All @@ -22,7 +22,7 @@ TEST_F(FannTestTrain, TrainOnDateSimpleXor) {
}

TEST_F(FannTestTrain, TrainSimpleIncrementalXor) {
net.create_standard(3, 2, 3, 1);
neural_net net(LAYER, 3, 2, 3, 1);

for(int i = 0; i < 100000; i++) {
net.train((fann_type[]) {0.0, 0.0}, (fann_type[]) {0.0});
Expand Down

0 comments on commit df4baad

Please sign in to comment.