This commit is contained in:
Gustav Louw 2018-04-12 21:57:49 -07:00
parent df1b13c593
commit 396bd2bcdd
2 changed files with 18 additions and 23 deletions

39
Tinn.c
View File

@ -5,19 +5,19 @@
#include <stdlib.h> #include <stdlib.h>
#include <math.h> #include <math.h>
// Error function. // Computs error.
static float err(const float a, const float b) static float err(const float a, const float b)
{ {
return 0.5f * (a - b) * (a - b); return 0.5f * (a - b) * (a - b);
} }
// Partial derivative of error function. // Returns partial derivative of error function.
static float pderr(const float a, const float b) static float pderr(const float a, const float b)
{ {
return a - b; return a - b;
} }
// Total error. // Computes total error of target to output.
static float toterr(const float* const tg, const float* const o, const int size) static float toterr(const float* const tg, const float* const o, const int size)
{ {
float sum = 0.0f; float sum = 0.0f;
@ -32,19 +32,19 @@ static float act(const float a)
return 1.0f / (1.0f + expf(-a)); return 1.0f / (1.0f + expf(-a));
} }
// Partial derivative of activation function. // Returns partial derivative of activation function.
static float pdact(const float a) static float pdact(const float a)
{ {
return a * (1.0f - a); return a * (1.0f - a);
} }
// Floating point random from 0.0 - 1.0. // Returns floating point random from 0.0 - 1.0.
static float frand() static float frand()
{ {
return rand() / (float) RAND_MAX; return rand() / (float) RAND_MAX;
} }
// Back propagation. // Performs back propagation.
static void bprop(const Tinn t, const float* const in, const float* const tg, float rate) static void bprop(const Tinn t, const float* const in, const float* const tg, float rate)
{ {
for(int i = 0; i < t.nhid; i++) for(int i = 0; i < t.nhid; i++)
@ -65,7 +65,7 @@ static void bprop(const Tinn t, const float* const in, const float* const tg, fl
} }
} }
// Forward propagation. // Performs forward propagation.
static void fprop(const Tinn t, const float* const in) static void fprop(const Tinn t, const float* const in)
{ {
// Calculate hidden layer neuron values. // Calculate hidden layer neuron values.
@ -86,7 +86,7 @@ static void fprop(const Tinn t, const float* const in)
} }
} }
// Randomizes weights and biases. // Randomizes tinn weights and biases.
static void twrand(const Tinn t) static void twrand(const Tinn t)
{ {
for(int i = 0; i < t.nw; i++) t.w[i] = frand() - 0.5f; for(int i = 0; i < t.nw; i++) t.w[i] = frand() - 0.5f;
@ -100,8 +100,7 @@ float* xtpredict(const Tinn t, const float* const in)
return t.o; return t.o;
} }
// Trains a tinn with an input and target output with a learning rate. // Trains a tinn with an input and target output with a learning rate. Returns target to output error.
// Returns error rate of the neural network.
float xttrain(const Tinn t, const float* const in, const float* const tg, float rate) float xttrain(const Tinn t, const float* const in, const float* const tg, float rate)
{ {
fprop(t, in); fprop(t, in);
@ -109,9 +108,7 @@ float xttrain(const Tinn t, const float* const in, const float* const tg, float
return toterr(tg, t.o, t.nops); return toterr(tg, t.o, t.nops);
} }
// Builds a new tinn object given number of inputs (nips), // Constructs a tinn with number of inputs, number of hidden neurons, and number of outputs
// number of hidden neurons for the hidden layer (nhid),
// and number of outputs (nops).
Tinn xtbuild(const int nips, const int nhid, const int nops) Tinn xtbuild(const int nips, const int nhid, const int nops)
{ {
Tinn t; Tinn t;
@ -130,37 +127,37 @@ Tinn xtbuild(const int nips, const int nhid, const int nops)
return t; return t;
} }
// Saves the tinn to disk. // Saves a tinn to disk.
void xtsave(const Tinn t, const char* const path) void xtsave(const Tinn t, const char* const path)
{ {
FILE* const file = fopen(path, "w"); FILE* const file = fopen(path, "w");
// Header. // Save header.
fprintf(file, "%d %d %d\n", t.nips, t.nhid, t.nops); fprintf(file, "%d %d %d\n", t.nips, t.nhid, t.nops);
// Biases and weights. // Save biases and weights.
for(int i = 0; i < t.nb; i++) fprintf(file, "%f\n", (double) t.b[i]); for(int i = 0; i < t.nb; i++) fprintf(file, "%f\n", (double) t.b[i]);
for(int i = 0; i < t.nw; i++) fprintf(file, "%f\n", (double) t.w[i]); for(int i = 0; i < t.nw; i++) fprintf(file, "%f\n", (double) t.w[i]);
fclose(file); fclose(file);
} }
// Loads a new tinn from disk. // Loads a tinn from disk.
Tinn xtload(const char* const path) Tinn xtload(const char* const path)
{ {
FILE* const file = fopen(path, "r"); FILE* const file = fopen(path, "r");
int nips = 0; int nips = 0;
int nhid = 0; int nhid = 0;
int nops = 0; int nops = 0;
// Header. // Load header.
fscanf(file, "%d %d %d\n", &nips, &nhid, &nops); fscanf(file, "%d %d %d\n", &nips, &nhid, &nops);
// A new tinn is returned. // Build a new tinn.
const Tinn t = xtbuild(nips, nhid, nops); const Tinn t = xtbuild(nips, nhid, nops);
// Biases and weights. // Load biaes and weights.
for(int i = 0; i < t.nb; i++) fscanf(file, "%f\n", &t.b[i]); for(int i = 0; i < t.nb; i++) fscanf(file, "%f\n", &t.b[i]);
for(int i = 0; i < t.nw; i++) fscanf(file, "%f\n", &t.w[i]); for(int i = 0; i < t.nw; i++) fscanf(file, "%f\n", &t.w[i]);
fclose(file); fclose(file);
return t; return t;
} }
// Frees a tinn from the heap. // Frees object from heap.
void xtfree(const Tinn t) void xtfree(const Tinn t)
{ {
free(t.w); free(t.w);

2
Tinn.h
View File

@ -12,12 +12,10 @@ typedef struct
float* h; float* h;
// Output layer. // Output layer.
float* o; float* o;
// Number of biases - always two - Tinn only supports a single hidden layer. // Number of biases - always two - Tinn only supports a single hidden layer.
int nb; int nb;
// Number of weights. // Number of weights.
int nw; int nw;
// Number of inputs. // Number of inputs.
int nips; int nips;
// Number of hidden neurons. // Number of hidden neurons.