tinn/test.c

171 lines
4.2 KiB
C
Raw Normal View History

2018-03-29 06:55:49 +03:00
#include "Tinn.h"
2018-03-27 00:11:15 +03:00
#include <stdio.h>
2018-03-30 02:13:48 +03:00
#include <string.h>
#include <stdlib.h>
typedef struct
{
2018-03-30 23:04:37 +03:00
double** in;
double** tg;
int nips;
int nops;
2018-03-30 02:13:48 +03:00
int rows;
}
Data;
static int lns(FILE* const file)
{
int ch = EOF;
int lines = 0;
int pc = '\n';
while((ch = getc(file)) != EOF)
{
if(ch == '\n')
lines++;
pc = ch;
}
if(pc != '\n')
lines++;
rewind(file);
return lines;
}
static char* readln(FILE* const file)
{
int ch = EOF;
int reads = 0;
int size = 128;
2018-03-30 23:04:37 +03:00
char* line = ((char*) malloc((size) * sizeof(char)));
2018-03-30 02:13:48 +03:00
while((ch = getc(file)) != '\n' && ch != EOF)
{
line[reads++] = ch;
if(reads + 1 == size)
2018-03-30 23:04:37 +03:00
line = (char*) realloc((line), (size *= 2) * sizeof(char));
2018-03-30 02:13:48 +03:00
}
line[reads] = '\0';
return line;
}
static double** new2d(const int rows, const int cols)
{
2018-03-30 23:04:37 +03:00
double** row = (double**) malloc((rows) * sizeof(double*));
2018-03-30 02:13:48 +03:00
for(int r = 0; r < rows; r++)
2018-03-30 23:04:37 +03:00
row[r] = (double*) malloc((cols) * sizeof(double));
2018-03-30 02:13:48 +03:00
return row;
}
2018-03-30 23:04:37 +03:00
static Data ndata(const int nips, const int nops, const int rows)
2018-03-30 02:13:48 +03:00
{
const Data data = {
2018-03-30 23:04:37 +03:00
new2d(rows, nips), new2d(rows, nops), nips, nops, rows
2018-03-30 02:13:48 +03:00
};
return data;
}
2018-03-29 08:04:47 +03:00
2018-03-30 02:13:48 +03:00
static void parse(const Data data, char* line, const int row)
{
2018-03-30 23:04:37 +03:00
const int cols = data.nips + data.nops;
2018-03-30 02:13:48 +03:00
for(int col = 0; col < cols; col++)
{
2018-03-30 23:04:37 +03:00
const double val = atof(strtok(col == 0 ? line : NULL, " "));
if(col < data.nips)
data.in[row][col] = val;
2018-03-30 02:13:48 +03:00
else
2018-03-30 23:04:37 +03:00
data.tg[row][col - data.nips] = val;
2018-03-30 02:13:48 +03:00
}
}
static void dfree(const Data d)
{
for(int row = 0; row < d.rows; row++)
{
2018-03-30 23:04:37 +03:00
free(d.in[row]);
free(d.tg[row]);
2018-03-30 02:13:48 +03:00
}
2018-03-30 23:04:37 +03:00
free(d.in);
free(d.tg);
2018-03-30 02:13:48 +03:00
}
static void shuffle(const Data d)
{
for(int a = 0; a < d.rows; a++)
{
const int b = rand() % d.rows;
2018-03-30 23:04:37 +03:00
double* ot = d.tg[a];
double* it = d.in[a];
2018-03-30 02:13:48 +03:00
// Swap output.
2018-03-30 23:04:37 +03:00
d.tg[a] = d.tg[b];
d.tg[b] = ot;
2018-03-30 02:13:48 +03:00
// Swap input.
2018-03-30 23:04:37 +03:00
d.in[a] = d.in[b];
d.in[b] = it;
2018-03-30 02:13:48 +03:00
}
}
2018-03-30 23:04:37 +03:00
static Data build(const char* path, const int nips, const int nops)
2018-03-30 02:13:48 +03:00
{
FILE* file = fopen(path, "r");
if(file == NULL)
{
printf("Could not open %s\n", path);
2018-03-30 23:04:37 +03:00
printf("Get it from the machine learning database: ");
printf("wget http://archive.ics.uci.edu/ml/machine-learning-databases/semeion/semeion.data\n");
2018-03-30 02:13:48 +03:00
exit(1);
}
const int rows = lns(file);
2018-03-30 23:04:37 +03:00
Data data = ndata(nips, nops, rows);
2018-03-30 02:13:48 +03:00
for(int row = 0; row < rows; row++)
{
char* line = readln(file);
parse(data, line, row);
free(line);
}
fclose(file);
return data;
}
2018-03-29 08:04:47 +03:00
2018-03-30 23:04:37 +03:00
int main()
2018-03-29 06:55:49 +03:00
{
2018-03-30 23:04:37 +03:00
// Input and output size is harded coded here,
// so make sure the training data sizes match.
const int nips = 256;
const int nops = 10;
// Hyper Parameters.
// Learning rate is annealed and thus not constant.
const int nhid = 32;
2018-03-31 01:42:20 +03:00
double rate = 1.0;
2018-03-30 23:04:37 +03:00
// Load the training set.
const Data data = build("semeion.data", nips, nops);
2018-03-31 01:42:20 +03:00
// Train, baby, train.
2018-03-30 23:04:37 +03:00
const Tinn tinn = xtbuild(nips, nhid, nops);
2018-03-31 01:42:20 +03:00
for(int i = 0; i < 30; i++)
2018-03-30 00:32:11 +03:00
{
2018-03-30 23:04:37 +03:00
shuffle(data);
2018-03-30 02:13:48 +03:00
double error = 0.0;
for(int j = 0; j < data.rows; j++)
{
2018-03-30 23:04:37 +03:00
const double* const in = data.in[j];
const double* const tg = data.tg[j];
error += xttrain(tinn, in, tg, rate);
2018-03-30 02:13:48 +03:00
}
2018-03-30 23:04:37 +03:00
printf("error %.12f :: rate %f\n", error / data.rows, rate);
2018-03-31 01:42:20 +03:00
rate *= 0.9;
2018-03-30 00:32:11 +03:00
}
2018-03-31 01:42:20 +03:00
// This is how you save the neural network to disk.
xtsave(tinn, "saved.tinn");
xtfree(tinn);
// This is how you load the neural network from disk.
const Tinn loaded = xtload("saved.tinn");
2018-03-30 23:04:37 +03:00
// Ideally, you would load a testing set for predictions,
// but for the sake of brevity the training set is reused.
const double* const in = data.in[0];
const double* const tg = data.tg[0];
2018-03-31 01:42:20 +03:00
const double* const pd = xpredict(loaded, in);
2018-03-30 23:04:37 +03:00
for(int i = 0; i < data.nops; i++) { printf("%f ", tg[i]); } printf("\n");
for(int i = 0; i < data.nops; i++) { printf("%f ", pd[i]); } printf("\n");
2018-03-31 01:42:20 +03:00
// Cleanup.
xtfree(loaded);
2018-03-30 02:13:48 +03:00
dfree(data);
2018-03-27 00:11:15 +03:00
return 0;
}