This commit is contained in:
Gustav Louw 2018-04-11 09:34:56 -07:00
parent 9ea4e51a38
commit 9ee52221ca
3 changed files with 21 additions and 30 deletions

7
Tinn.c
View File

@ -196,3 +196,10 @@ void xtfree(const Tinn t)
free(t.h); free(t.h);
free(t.o); free(t.o);
} }
void xtprint(const float* arr, const int size)
{
for(int i = 0; i < size; i++)
printf("%f ", (double) arr[i]);
printf("\n");
}

2
Tinn.h
View File

@ -38,3 +38,5 @@ void xtsave(Tinn, const char* path);
Tinn xtload(const char* path); Tinn xtload(const char* path);
void xtfree(Tinn); void xtfree(Tinn);
void xtprint(const float* arr, const int size);

42
test.c
View File

@ -1,26 +1,3 @@
// gcc test.c Tinn.c -lm
//
// Tinn does not include functionality for loading
// and parsing data sets as all data sets are different.
//
// This example shows how to open an example data file from the machine learning archives.
// The training data consists of hand written digits and can be found at:
//
// http://archive.ics.uci.edu/ml/machine-learning-databases/semeion/semeion.data
//
// Each line is one digit. A digit consists of 256 pixels (16 x 16 display).
// Each line finishes with 10 digits indicating the digit:
//
// 0: 1 0 0 0 0 0 0 0 0 0
// 1: 0 1 0 0 0 0 0 0 0 0
// 2: 0 0 1 0 0 0 0 0 0 0
// 3: 0 0 0 1 0 0 0 0 0 0
// 4: 0 0 0 0 1 0 0 0 0 0
// ...
// 9: 0 0 0 0 0 0 0 0 0 1
//
// This gives 256 inputs and 10 outputs to the neural network.
#include "Tinn.h" #include "Tinn.h"
#include <stdio.h> #include <stdio.h>
#include <time.h> #include <time.h>
@ -148,6 +125,7 @@ static Data build(const char* path, const int nips, const int nops)
return data; return data;
} }
// Learns and predicts hand written digits with 98% accuracy.
int main() int main()
{ {
// Tinn does not seed the random number generator. // Tinn does not seed the random number generator.
@ -159,15 +137,17 @@ int main()
// Hyper Parameters. // Hyper Parameters.
// Learning rate is annealed and thus not constant. // Learning rate is annealed and thus not constant.
// It can be fine tuned along with the number of hidden layers. // It can be fine tuned along with the number of hidden layers.
// Feel free to modify the anneal rate as well. // Feel free to modify the anneal rate.
const int nhid = 28; // The number of iterations can be changed for stronger training.
float rate = 1.0f; float rate = 1.0f;
const int nhid = 28;
const float anneal = 0.99f; const float anneal = 0.99f;
const int iterations = 128;
// Load the training set. // Load the training set.
const Data data = build("semeion.data", nips, nops); const Data data = build("semeion.data", nips, nops);
// Train, baby, train. // Train, baby, train.
const Tinn tinn = xtbuild(nips, nhid, nops); const Tinn tinn = xtbuild(nips, nhid, nops);
for(int i = 0; i < 100; i++) for(int i = 0; i < iterations; i++)
{ {
shuffle(data); shuffle(data);
float error = 0.0f; float error = 0.0f;
@ -190,11 +170,13 @@ int main()
// Now we do a prediction with the neural network we loaded from disk. // Now we do a prediction with the neural network we loaded from disk.
// Ideally, we would also load a testing set to make the prediction with, // Ideally, we would also load a testing set to make the prediction with,
// but for the sake of brevity here we just reuse the training set from earlier. // but for the sake of brevity here we just reuse the training set from earlier.
const float* const in = data.in[0]; // One data set is picked at random.
const float* const tg = data.tg[0]; const int pick = rand() % data.rows;
const float* const in = data.in[pick];
const float* const tg = data.tg[pick];
const float* const pd = xtpredict(loaded, in); const float* const pd = xtpredict(loaded, in);
for(int i = 0; i < data.nops; i++) { printf("%f ", (double) tg[i]); } printf("\n"); xtprint(tg, data.nops);
for(int i = 0; i < data.nops; i++) { printf("%f ", (double) pd[i]); } printf("\n"); xtprint(pd, data.nops);
// All done. Let's clean up. // All done. Let's clean up.
xtfree(loaded); xtfree(loaded);
dfree(data); dfree(data);