tinn/Tinn.c

207 lines
5.2 KiB
C
Raw Normal View History

2018-03-29 06:41:08 +03:00
#include "Tinn.h"
#include <stdarg.h>
2018-03-31 01:42:20 +03:00
#include <stdio.h>
2018-03-29 06:41:08 +03:00
#include <stdlib.h>
#include <math.h>
2018-03-30 23:04:37 +03:00
// Error function.
2018-04-03 23:08:27 +03:00
static float err(const float a, const float b)
2018-03-29 06:41:08 +03:00
{
2018-03-31 14:29:03 +03:00
return 0.5f * powf(a - b, 2.0f);
2018-03-29 06:41:08 +03:00
}
2018-03-30 23:04:37 +03:00
// Partial derivative of error function.
2018-04-03 23:08:27 +03:00
static float pderr(const float a, const float b)
2018-03-29 06:41:08 +03:00
{
2018-03-30 23:04:37 +03:00
return a - b;
2018-03-29 06:41:08 +03:00
}
2018-03-30 23:04:37 +03:00
// Total error.
2018-04-04 01:12:36 +03:00
static float toterr(const float* const tg, const float* const o, const int size)
2018-03-29 06:41:08 +03:00
{
2018-03-31 14:29:03 +03:00
float sum = 0.0f;
2018-03-30 23:04:37 +03:00
for(int i = 0; i < size; i++)
sum += err(tg[i], o[i]);
return sum;
2018-03-29 06:41:08 +03:00
}
2018-03-30 23:04:37 +03:00
// Activation function.
2018-04-03 23:08:27 +03:00
static float act(const float a)
2018-03-30 23:04:37 +03:00
{
2018-03-31 14:29:03 +03:00
return 1.0f / (1.0f + expf(-a));
2018-03-30 23:04:37 +03:00
}
// Partial derivative of activation function.
2018-04-03 23:08:27 +03:00
static float pdact(const float a)
2018-03-30 23:04:37 +03:00
{
2018-03-31 14:29:03 +03:00
return a * (1.0f - a);
2018-03-30 23:04:37 +03:00
}
// Floating point random from 0.0 - 1.0.
2018-03-31 14:29:03 +03:00
static float frand()
2018-03-30 00:32:11 +03:00
{
2018-03-31 14:29:03 +03:00
return rand() / (float) RAND_MAX;
2018-03-30 00:32:11 +03:00
}
2018-03-30 23:04:37 +03:00
// Back propagation.
2018-04-04 01:12:36 +03:00
static void bprop(const Tinn t, const float* const in, const float* const tg, float rate)
2018-03-29 06:41:08 +03:00
{
2018-03-30 23:04:37 +03:00
for(int i = 0; i < t.nhid; i++)
2018-03-29 06:41:08 +03:00
{
2018-03-31 14:29:03 +03:00
float sum = 0.0f;
2018-03-30 23:04:37 +03:00
// Calculate total error change with respect to output.
for(int j = 0; j < t.nops; j++)
2018-03-29 06:41:08 +03:00
{
2018-04-02 03:01:42 +03:00
const float a = pderr(t.o[j], tg[j]);
const float b = pdact(t.o[j]);
2018-03-30 21:01:49 +03:00
sum += a * b * t.x[j * t.nhid + i];
2018-03-30 23:04:37 +03:00
// Correct weights in hidden to output layer.
2018-03-30 21:01:49 +03:00
t.x[j * t.nhid + i] -= rate * a * b * t.h[i];
2018-03-29 06:41:08 +03:00
}
2018-03-30 23:04:37 +03:00
// Correct weights in input to hidden layer.
for(int j = 0; j < t.nips; j++)
t.w[i * t.nips + j] -= rate * sum * pdact(t.h[i]) * in[j];
}
}
// Forward propagation.
2018-04-04 01:12:36 +03:00
static void fprop(const Tinn t, const float* const in)
2018-03-30 23:04:37 +03:00
{
// Calculate hidden layer neuron values.
for(int i = 0; i < t.nhid; i++)
{
2018-03-31 14:29:03 +03:00
float sum = 0.0f;
2018-03-30 23:04:37 +03:00
for(int j = 0; j < t.nips; j++)
sum += in[j] * t.w[i * t.nips + j];
2018-03-30 00:32:11 +03:00
t.h[i] = act(sum + t.b[0]);
2018-03-29 06:41:08 +03:00
}
2018-03-30 23:04:37 +03:00
// Calculate output layer neuron values.
for(int i = 0; i < t.nops; i++)
2018-03-29 06:41:08 +03:00
{
2018-03-31 14:29:03 +03:00
float sum = 0.0f;
2018-03-30 23:04:37 +03:00
for(int j = 0; j < t.nhid; j++)
2018-03-30 21:01:49 +03:00
sum += t.h[j] * t.x[i * t.nhid + j];
2018-03-30 00:32:11 +03:00
t.o[i] = act(sum + t.b[1]);
2018-03-29 06:41:08 +03:00
}
}
2018-03-30 23:04:37 +03:00
// Randomizes weights and biases.
static void twrand(const Tinn t)
2018-03-29 06:41:08 +03:00
{
2018-03-31 14:29:03 +03:00
for(int i = 0; i < t.nw; i++) t.w[i] = frand() - 0.5f;
for(int i = 0; i < t.nb; i++) t.b[i] = frand() - 0.5f;
2018-03-30 23:04:37 +03:00
}
// Prints a message and exits.
static void bomb(const char* const message, ...)
{
va_list args;
va_start(args, message);
vprintf(message, args);
va_end(args);
exit(1);
}
// Fail safe file opening.
static FILE* efopen(const char* const pathname, const char* const mode)
{
FILE* const file = fopen(pathname, mode);
if(file == NULL)
bomb("failure: fopen(\"%s\", \"%s\")\n", pathname, mode);
return file;
}
// Fail safe clear allocation.
static void* ecalloc(const size_t nmemb, const size_t size)
{
void* const mem = calloc(nmemb, size);
if(mem == NULL)
bomb("failure: calloc(%d, %d)\n", nmemb, size);
return mem;
}
2018-04-11 05:12:35 +03:00
// Returns an output prediction given an input.
2018-04-04 01:12:36 +03:00
float* xtpredict(const Tinn t, const float* const in)
2018-03-30 23:04:37 +03:00
{
2018-04-04 01:12:36 +03:00
fprop(t, in);
2018-03-30 23:04:37 +03:00
return t.o;
2018-03-29 06:41:08 +03:00
}
2018-04-11 05:12:35 +03:00
// Trains a tinn with an input and target output with a learning rate.
// Returns error rate of the neural network.
2018-04-03 23:08:27 +03:00
float xttrain(const Tinn t, const float* const in, const float* const tg, float rate)
2018-03-29 08:04:47 +03:00
{
2018-04-04 01:12:36 +03:00
fprop(t, in);
bprop(t, in, tg, rate);
return toterr(tg, t.o, t.nops);
2018-03-29 08:04:47 +03:00
}
2018-04-11 05:12:35 +03:00
// Builds a new tinn object given number of inputs (nips),
// number of hidden neurons for the hidden layer (nhid),
// and number of outputs (nops).
2018-04-03 23:08:27 +03:00
Tinn xtbuild(const int nips, const int nhid, const int nops)
2018-03-29 06:41:08 +03:00
{
Tinn t;
2018-03-30 23:04:37 +03:00
// Tinn only supports one hidden layer so there are two biases.
2018-03-30 00:32:11 +03:00
t.nb = 2;
2018-03-31 01:42:20 +03:00
t.nw = nhid * (nips + nops);
t.w = (float*) ecalloc(t.nw, sizeof(*t.w));
2018-03-30 21:01:49 +03:00
t.x = t.w + nhid * nips;
t.b = (float*) ecalloc(t.nb, sizeof(*t.b));
t.h = (float*) ecalloc(nhid, sizeof(*t.h));
t.o = (float*) ecalloc(nops, sizeof(*t.o));
2018-03-29 08:04:47 +03:00
t.nips = nips;
2018-03-30 00:32:11 +03:00
t.nhid = nhid;
t.nops = nops;
2018-03-29 08:04:47 +03:00
twrand(t);
2018-03-29 06:41:08 +03:00
return t;
}
2018-04-11 05:12:35 +03:00
// Saves the tinn to disk.
2018-04-03 23:08:27 +03:00
void xtsave(const Tinn t, const char* const path)
2018-03-31 01:42:20 +03:00
{
2018-04-04 01:27:44 +03:00
FILE* const file = efopen(path, "w");
2018-03-31 01:42:20 +03:00
// Header.
fprintf(file, "%d %d %d\n", t.nips, t.nhid, t.nops);
// Biases and weights.
2018-04-04 01:27:44 +03:00
for(int i = 0; i < t.nb; i++) fprintf(file, "%a\n", (double) t.b[i]);
for(int i = 0; i < t.nw; i++) fprintf(file, "%a\n", (double) t.w[i]);
2018-03-31 01:42:20 +03:00
fclose(file);
}
2018-04-11 05:12:35 +03:00
// Loads a new tinn from disk.
2018-04-03 23:08:27 +03:00
Tinn xtload(const char* const path)
2018-03-31 01:42:20 +03:00
{
2018-04-04 01:27:44 +03:00
FILE* const file = efopen(path, "r");
2018-03-31 01:42:20 +03:00
int nips = 0;
int nhid = 0;
int nops = 0;
// Header.
fscanf(file, "%d %d %d\n", &nips, &nhid, &nops);
// A new tinn is returned.
2018-04-03 22:46:57 +03:00
const Tinn t = xtbuild(nips, nhid, nops);
2018-03-31 01:42:20 +03:00
// Biases and weights.
2018-04-04 01:27:44 +03:00
for(int i = 0; i < t.nb; i++) fscanf(file, "%a\n", &t.b[i]);
for(int i = 0; i < t.nw; i++) fscanf(file, "%a\n", &t.w[i]);
2018-03-31 01:42:20 +03:00
fclose(file);
return t;
}
2018-04-11 05:12:35 +03:00
// Frees a tinn from the heap.
2018-03-30 23:04:37 +03:00
void xtfree(const Tinn t)
2018-03-29 06:41:08 +03:00
{
2018-03-29 08:04:47 +03:00
free(t.w);
2018-03-31 01:42:20 +03:00
free(t.b);
2018-03-29 08:04:47 +03:00
free(t.h);
free(t.o);
2018-03-29 06:41:08 +03:00
}
2018-04-11 19:34:56 +03:00
2018-04-11 19:38:24 +03:00
// Prints an array of floats. Useful for printing predictions.
2018-04-11 19:34:56 +03:00
void xtprint(const float* arr, const int size)
{
for(int i = 0; i < size; i++)
printf("%f ", (double) arr[i]);
printf("\n");
}