Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

User aggregators #95

Open
wants to merge 31 commits into
base: master
Choose a base branch
from
Open
Changes from 1 commit
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
917e118
Merge branch 'master' into boa_evaluator
Jan 31, 2017
a6e1c3d
Initial commit for User Defined Aggregation support
Feb 1, 2017
a31e918
Fixing bug : Only one user defined aggregator runs in the presence of…
Feb 2, 2017
e4b0f05
REmoving unnecessary prints
Feb 2, 2017
ba6b2ff
Fixing a bug: Filter non aggregator functions from list
Feb 2, 2017
1829489
Merge branch 'boa_evaluator' into user_Aggregators
Feb 2, 2017
de27178
Fixing a test case as code generation has change.
Feb 2, 2017
a7fd7b6
Updating latest code generation string template.
Feb 2, 2017
81e8f37
Fixing bug in UserDefinedCode generating process. Fixing fullyqualifi…
Feb 3, 2017
2972425
adding naive bayes exmaple using user defined aggragation
Feb 3, 2017
b161ab2
Allowing creation of arrays of nested and complex types
Feb 12, 2017
81af781
Adding capability to convert a tuple into array if possible. If tuple…
Feb 12, 2017
3295b15
code for matrix transpose, inverse, summation and substraction suppor…
Feb 13, 2017
a05a385
Adding machine learning examples codes in test directory
Feb 14, 2017
ff5b37b
Adding matrix operations
Feb 14, 2017
6b6aa9f
Fixing bug in getCol method in matrix operations
Feb 15, 2017
0da11ef
linear regression optimized and unoptimized code
Feb 18, 2017
737060d
adding neural network withour back propogation
Feb 19, 2017
db0a04f
Changes in MatrixOperations and Adding Print facility for debugging H…
Feb 19, 2017
98eb3ac
removing merge conflicts
Feb 19, 2017
fb23150
adding back propogation in neural
Feb 20, 2017
491adfc
adding pca
Feb 21, 2017
72711ce
adding optimized pca
Feb 22, 2017
edf12ff
adding new machine learning algorCithms
Feb 22, 2017
656775d
Adding changes to support options as user defined aggregations
Feb 26, 2017
e691a5b
Changes to support serialization of ml model in Boa
Feb 26, 2017
390fc86
Storing the class as part of model
Feb 26, 2017
6ecf209
Adding serialization support for the model using simple json
Feb 26, 2017
fec8ee8
adding support for loading ml model
Feb 27, 2017
739eb3c
Allowing options in user defined aggregator class
Mar 2, 2017
459000f
adding training model usage
Mar 3, 2017
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
adding neural network withour back propogation
  • Loading branch information
nmtiwari committed Feb 19, 2017
commit 737060d515861d064cfc07ecbbcd1ec01961fbfe
179 changes: 179 additions & 0 deletions test/ml/neural.boa
Original file line number Diff line number Diff line change
@@ -0,0 +1,179 @@
p: Project = input;
testing : output sum of int;

type Connection = {cweight : float, prevDeltaWeight : float, deltaWeight : float, leftNeuron : int, rightNeuron : int};
type Neuron = {id: int, bias : float , outputVal : float, biasConnection : Connection, inConnection : array of Connection};
type emitvals = {inp:array of float, expected: array of float};
testing << 1;

nueralNetworks := function(vals : array of emitvals) :float {
runs : int = 500;
thresholdError := 0.001;
id_counter : int = 0;

dummyConnection: Connection = {-1.0, -1.0, -1.0, -1, -1};
dummyConnArray : array of Connection = {dummyConnection};

biasNeuron : Neuron = {id_counter, 0.0, 0.0, dummyConnection, dummyConnArray};
dummyArray : array of float = {0.0};
resultOutputs: array of array of float;
resultOutputs = new(resultOutputs, len(vals), dummyArray);

learningRate : float = 0.9;
momentum : float = 0.7;
layers : array of int = {2, 4, 1};
totalLayers := len(layers); # intputLayer = 0, hiddenLayer = 1; outputLayer = 2


inputLayer : array of Neuron = {biasNeuron, biasNeuron};
hiddenLayer : array of Neuron = {biasNeuron, biasNeuron, biasNeuron, biasNeuron};
outputLayer : array of Neuron = {biasNeuron};
neuronRecorder : map[int] of Neuron;

neuronRecorder[id_counter] = biasNeuron;
id_counter = id_counter + 1;


for(i :int = 1; i < totalLayers; i++) {
# input layer
if(i == 0) {
for(s :int = 0; s < layers[i]; s++) {
inputLayer[s] = {id_counter, 0.0, 0.0, dummyConnection, dummyConnArray};
neuronRecorder[id_counter] = inputLayer[s];
id_counter = id_counter + 1;
}
}
# hidden layer
if(i == 1) {
for(z :int = 0; z < layers[i]; z++) {
cons : array of Connection = {dummyConnection, dummyConnection};
#cons = new(cons, layers[0], dummyConnection);
node : Neuron = {id_counter, 0.0, 0.0, dummyConnection, cons};

# add connections
foreach(k: int; def(inputLayer[k])) {
localConnection: Connection = {rand(), 0.0, 0.0, inputLayer[k].id, node.id}; # assigns random connweight to the connection
node.inConnection[k] = localConnection;
}
neuronRecorder[id_counter] = node;
id_counter++;

#addInConnection(node, inputLayer);
hiddenLayer[z] = node;
}
}
# output layer
if(i == 2) {
for(j :int = 0; j < layers[i]; j++) {
cons1 : array of Connection = {dummyConnection, dummyConnection, dummyConnection, dummyConnection};
#cons1 = new(cons1, layers[1]);
node1 : Neuron = {id_counter, 0.0, 0.0, dummyConnection, cons1};

# add connections
foreach(k: int; def(hiddenLayer[k])) {
con1 : Connection = {rand(), 0.0, 0.0, hiddenLayer[k].id, node1.id}; # assigns random connweight to the connection
node1.inConnection[k] = con1;
}

neuronRecorder[id_counter] = node1;
id_counter++;
#addInConnection(node, hiddenLayer);
outputLayer[j] = node1;
}
}

}


error : float = 1.0;
for(m: int = 0; m < runs; m++) {
error = 0;
foreach(n : int; def(vals[n])) {
valueEmitted: emitvals = vals[n];
# set the input variables for jth value from values
foreach(k: int; def(inputLayer[k])) {
# there is one to one mapping in input neurons and number of features in each value
print("m");
print(m); print("k");print(k);
d: float = vals[n].inp[k];
inputLayer[k].outputVal = d;
}

# activate the neurons for the forward propagation
# calculate the output of each hiddenLayer Neuron
foreach(k : int; def(hiddenLayer[k])) {
node2: Neuron = hiddenLayer[k];
intermediateResult : float = 0.0;
connections :array of Connection = node2.inConnection;
foreach(l: int; def(connections[l])) {
print(neuronRecorder);
printany(connections[l].leftNeuron);
left: Neuron = neuronRecorder[connections[l].leftNeuron];
connweight : float = connections[l].cweight;
intermediateResult = intermediateResult + (connweight * left.outputVal);
}
intermediateResult = intermediateResult + (node2.biasConnection.cweight * node2.bias);
node2.outputVal = 1.0 / (1.0 + exp(intermediateResult));
#calculateOutput(hiddenLayer[i]);
}
# calculate the output of each outputLayer Neuron
foreach(k : int; def(outputLayer[k])) {
node3:Neuron = outputLayer[k];
intermediateResult1 : float = 0.0;
connections1 :array of Connection = node3.inConnection;
foreach(l: int; def(connections1[l])) {
left1: Neuron = neuronRecorder[connections1[l].leftNeuron];
connweight1 : float = connections1[l].cweight;
intermediateResult1 = intermediateResult1 + (connweight1 * left1.outputVal);
}
intermediateResult1 = intermediateResult1 + (node3.biasConnection.cweight * node3.bias);
node3.outputVal = 1.0 / (1.0 + exp(intermediateResult1));
#calculateOutput(outputLayer[i]);
}

# output results of each loop
outputR : array of float;
outputR = new(outputR, len(outputLayer), 0);
foreach(l: int; def(outputLayer[l])) {
outputR[l] = outputLayer[l].outputVal;
}

resultOutputs[n] = outputR;

#calculate error
expectations :array of float = vals[n].expected;
foreach(l: int; def(expectations[l])) {
err : float = pow(outputR[l]- vals[n].expected[l], 2);
error = error + err;
}
}
}
return outputLayer[0].outputVal;
};


neuralNetwork : output nueralNetworks of emitvals;

inps: array of float = {1.0, 1.0};
out: array of float = {0.0};

result: emitvals = {inps, out};
neuralNetwork << result;

inps = {1.0, 0.0};
out = {1.0};

result= {inps, out};
neuralNetwork << result;

inps = {0.0, 1.0};
out = {1.0};

result = {inps, out};
neuralNetwork << result;

inps = {0.0, 0.0};
out = {0.0};

result = {inps, out};
neuralNetwork << result;