Skip to content

Commit

Permalink
More improvements to NM
Browse files Browse the repository at this point in the history
  • Loading branch information
jeffheaton committed Apr 15, 2012
1 parent cf9f17c commit ee7a879
Show file tree
Hide file tree
Showing 3 changed files with 11 additions and 11 deletions.
2 changes: 1 addition & 1 deletion encog-cmd/encog-cmd.vcxproj.user
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<LocalDebuggerCommandArguments>train c:\test\iris.eg c:\test\iris.egb /train:nm</LocalDebuggerCommandArguments>
<LocalDebuggerCommandArguments>train d:\test\iris.eg d:\test\iris.egb /train:nm</LocalDebuggerCommandArguments>
<DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>
</PropertyGroup>
</Project>
5 changes: 5 additions & 0 deletions encog-core/encog.h
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,11 @@ extern "C" {
#define PARAM_MAXPOS "MAXPOS"
#define PARAM_MAXVEL "MAXVEL"

/* Nelder Mead */
#define PARAM_STEP "STEP"
#define PARAM_KONVERGE "KONVERGE"
#define PARAM_REQMIN "REQMIN"

#define ENCOG_TYPE_NEURAL_NETWORK 1
#define ENCOG_TYPE_DATA 2
#define ENCOG_TYPE_PSO 3
Expand Down
15 changes: 5 additions & 10 deletions encog-core/nm.c
Original file line number Diff line number Diff line change
Expand Up @@ -374,6 +374,7 @@ static void _nelmin ( ENCOG_TRAIN_NM *nm, double start[], double xmin[] )
{
break;
}

/*
Restart the procedure.
*/
Expand Down Expand Up @@ -411,16 +412,14 @@ ENCOG_TRAIN_NM *EncogTrainNMNew(ENCOG_NEURAL_NETWORK *network, ENCOG_DATA *data)
result->data = data;
result->targetNetwork = network;
result->reportTarget = &EncogTrainStandardCallback;
result->error = 100;
result->error = 1.0;
result->network = network;
result->reqmin = 0.01;
result->konvge = 100;
result->step = EncogHashGetFloat(encogContext.config,PARAM_STEP,10.0);
result->reqmin = EncogHashGetFloat(encogContext.config,PARAM_REQMIN, 1.0e-16);
result->konvge = EncogHashGetInteger(encogContext.config,PARAM_KONVERGE,100);
result->ifault = 0;
memset(&result->currentReport,0,sizeof(ENCOG_TRAINING_REPORT));

EncogNetworkRandomizeRange(network,-1,1);


result->n = result->network->weightCount;
result->step = 1;

Expand All @@ -434,7 +433,6 @@ float EncogTrainNMRun(ENCOG_TRAIN_NM *nm)
{
int n;
ENCOG_DATA *data;
double reqmin;
double *start;
double *xmin;

Expand All @@ -450,11 +448,8 @@ float EncogTrainNMRun(ENCOG_TRAIN_NM *nm)
data = nm->data;
n = nm->network->weightCount;
start = (double*)EncogUtilDuplicateMemory(nm->network->weights,n,sizeof(REAL));
reqmin = 0.001;
xmin = (double*)EncogUtilAlloc(n,sizeof(double));

nm->step = 1;

_nelmin ( nm, start, xmin );

nm->currentReport.error = nm->error;
Expand Down

0 comments on commit ee7a879

Please sign in to comment.