This commit is contained in:
Gustav Louw 2018-04-03 15:12:36 -07:00
parent 4b3a45af8d
commit a5913774b9
3 changed files with 16 additions and 14 deletions

20
Tinn.c
View File

@ -18,7 +18,7 @@ static float pderr(const float a, const float b)
}
// Total error.
static float terr(const float* const tg, const float* const o, const int size)
static float toterr(const float* const tg, const float* const o, const int size)
{
float sum = 0.0f;
for(int i = 0; i < size; i++)
@ -45,7 +45,7 @@ static float frand()
}
// Back propagation.
static void backwards(const Tinn t, const float* const in, const float* const tg, float rate)
static void bprop(const Tinn t, const float* const in, const float* const tg, float rate)
{
for(int i = 0; i < t.nhid; i++)
{
@ -66,7 +66,7 @@ static void backwards(const Tinn t, const float* const in, const float* const tg
}
// Forward propagation.
static void forewards(const Tinn t, const float* const in)
static void fprop(const Tinn t, const float* const in)
{
// Calculate hidden layer neuron values.
for(int i = 0; i < t.nhid; i++)
@ -121,17 +121,17 @@ static void* ecalloc(const size_t nmemb, const size_t size)
return mem;
}
float* xpredict(const Tinn t, const float* const in)
float* xtpredict(const Tinn t, const float* const in)
{
forewards(t, in);
fprop(t, in);
return t.o;
}
float xttrain(const Tinn t, const float* const in, const float* const tg, float rate)
{
forewards(t, in);
backwards(t, in, tg, rate);
return terr(tg, t.o, t.nops);
fprop(t, in);
bprop(t, in, tg, rate);
return toterr(tg, t.o, t.nops);
}
Tinn xtbuild(const int nips, const int nhid, const int nops)
@ -154,7 +154,7 @@ Tinn xtbuild(const int nips, const int nhid, const int nops)
void xtsave(const Tinn t, const char* const path)
{
FILE* const file = efopen(path, "w");
FILE* const file = efopen(path, "wb");
// Header.
fprintf(file, "%d %d %d\n", t.nips, t.nhid, t.nops);
// Biases and weights.
@ -165,7 +165,7 @@ void xtsave(const Tinn t, const char* const path)
Tinn xtload(const char* const path)
{
FILE* const file = efopen(path, "r");
FILE* const file = efopen(path, "rb");
int nips = 0;
int nhid = 0;
int nops = 0;

4
Tinn.h
View File

@ -19,7 +19,7 @@ Tinn;
// Trains a tinn with an input and target output with a learning rate.
// Returns error rate of the neural network.
float xttrain(const Tinn, const float* in, const float* tg, float rate);
float xttrain(Tinn, const float* in, const float* tg, float rate);
// Builds a new tinn object given number of inputs (nips),
// number of hidden neurons for the hidden layer (nhid),
@ -27,7 +27,7 @@ float xttrain(const Tinn, const float* in, const float* tg, float rate);
Tinn xtbuild(int nips, int nhid, int nops);
// Returns an output prediction given an input.
float* xpredict(const Tinn, const float* in);
float* xtpredict(Tinn, const float* in);
// Saves the tinn to disk.
void xtsave(Tinn, const char* path);

6
test.c
View File

@ -154,7 +154,9 @@ int main()
const float* const tg = data.tg[j];
error += xttrain(tinn, in, tg, rate);
}
printf("error %.12f :: rate %f\n", (double) error / data.rows, (double) rate);
printf("error %.12f :: learning rate %f\n",
(double) error / data.rows,
(double) rate);
rate *= anneal;
}
// This is how you save the neural network to disk.
@ -167,7 +169,7 @@ int main()
// but for the sake of brevity here we just reuse the training set from earlier.
const float* const in = data.in[0];
const float* const tg = data.tg[0];
const float* const pd = xpredict(loaded, in);
const float* const pd = xtpredict(loaded, in);
for(int i = 0; i < data.nops; i++) { printf("%f ", (double) tg[i]); } printf("\n");
for(int i = 0; i < data.nops; i++) { printf("%f ", (double) pd[i]); } printf("\n");
// All done. Let's clean up.