Added CMake and C89 support

This commit is contained in:
Samuel Marks 2021-08-16 14:26:10 +10:00
parent 4f72209510
commit cd885c2819
No known key found for this signature in database
GPG Key ID: 43FD8EDE42E1A799
14 changed files with 413 additions and 159 deletions

8
.gitignore vendored Normal file
View File

@ -0,0 +1,8 @@
*.o
*.log
*.tmp
.idea
cmake-build-*
build*
*_export.h

63
CMakeLists.txt Normal file
View File

@ -0,0 +1,63 @@
cmake_minimum_required(VERSION 3.19)
# set the project name and version
project(genann VERSION 0.0.1 LANGUAGES "C")
string(TOLOWER "${PROJECT_NAME}" PROJECT_LOWER_NAME)
set(CMAKE_C_STANDARD 90)
set(CMAKE_VERBOSE_MAKEFILE ON)
add_library("${PROJECT_LOWER_NAME}_compiler_flags" INTERFACE)
target_compile_features("${PROJECT_LOWER_NAME}_compiler_flags" INTERFACE "c_std_90")
# add compiler warning flags just when building this project via
# the BUILD_INTERFACE genex
set(gcc_like "$<COMPILE_LANG_AND_ID:C,CXX,ARMClang,AppleClang,Clang,GNU>")
set(msvc "$<COMPILE_LANG_AND_ID:C,CXX,MSVC>")
target_compile_options(
"${PROJECT_LOWER_NAME}_compiler_flags"
INTERFACE
"$<${gcc_like}:$<BUILD_INTERFACE:-Wshadow;-Wformat=2;-Wall;-Wunused-macros;-pedantic;-march=native>>"
# "-Wgnu-zero-variadic-macro-arguments"
"$<${msvc}:$<BUILD_INTERFACE:-W3;-WX;-Zi;-permissive->>"
)
# control where the static and shared libraries are built so that on windows
# we don't need to tinker with the path to run the executable
set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "${PROJECT_BINARY_DIR}")
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${PROJECT_BINARY_DIR}")
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "${PROJECT_BINARY_DIR}")
option(BUILD_SHARED_LIBS "Build using shared libraries" OFF)
if(APPLE)
set(CMAKE_INSTALL_RPATH "@executable_path/../lib")
elseif(UNIX)
set(CMAKE_INSTALL_RPATH "$ORIGIN/../lib")
endif()
configure_file(
"${PROJECT_NAME}Config.h.in"
"config/${PROJECT_NAME}Config.h"
)
set(original_deps "genann" "example" "test")
foreach (_lib ${original_deps})
add_subdirectory("${_lib}")
message(STATUS "Built ${_lib}")
endforeach ()
#set_target_properties(
# "${PROJECT_NAME}"
# PROPERTIES
# LINKER_LANGUAGE
# C
#)
## add the binary tree to the search path for include files
## so that we will find "${PROJECT_NAME}Config.h"
target_include_directories(
"${PROJECT_NAME}"
PUBLIC
"${PROJECT_BINARY_DIR}/config"
)

View File

@ -11,11 +11,11 @@ functions and little extra.
## Features
- **C99 with no dependencies**.
- **C89 with no dependencies**.
- Contained in a single source code and header file.
- Simple.
- Fast and thread-safe.
- Easily extendible.
- Easily extendable.
- Implements backpropagation training.
- *Compatible with alternative training methods* (classic optimization, genetic algorithms, etc)
- Includes examples and test suite.

48
example/CMakeLists.txt Normal file
View File

@ -0,0 +1,48 @@
get_filename_component(LIBRARY_NAME "${CMAKE_CURRENT_SOURCE_DIR}" NAME)
string(REPLACE " " "_" LIBRARY_NAME "${LIBRARY_NAME}")
file(
COPY "iris.data" "iris.names" "xor.ann"
DESTINATION "${CMAKE_ARCHIVE_OUTPUT_DIRECTORY}/example"
)
foreach(example RANGE 1 4)
set(EXEC_NAME "example${example}")
set(Source_Files "../${EXEC_NAME}.c")
source_group("Source Files" FILES "${Source_Files}")
set(TARGET_NAME "${PROJECT_NAME}_${EXEC_NAME}")
add_executable("${TARGET_NAME}" "${Source_Files}")
target_include_directories(
"${TARGET_NAME}"
INTERFACE
"$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}>"
"$<INSTALL_INTERFACE:include>"
)
target_link_libraries(
"${TARGET_NAME}"
INTERFACE
"${PROJECT_LOWER_NAME}_compiler_flags"
)
target_link_libraries("${TARGET_NAME}" PRIVATE "genann")
set_target_properties(
"${TARGET_NAME}"
PROPERTIES
LINKER_LANGUAGE
C
)
# install rules
set(installable_libs "${TARGET_NAME}" "${PROJECT_LOWER_NAME}_compiler_flags")
if (TARGET "${DEPENDANT_LIBRARY}")
list(APPEND installable_libs "${DEPENDANT_LIBRARY}")
endif ()
install(TARGETS ${installable_libs}
DESTINATION "bin/"
EXPORT "${TARGET_NAME}Targets")
endforeach ()

View File

@ -5,6 +5,7 @@
int main(int argc, char *argv[])
{
genann *ann;
printf("GENANN example 1.\n");
printf("Train a small ANN to the XOR function using backpropagation.\n");
@ -12,30 +13,35 @@ int main(int argc, char *argv[])
/* If you don't get a good result, try again for a different result. */
srand(time(0));
/* Input and expected out data for the XOR function. */
const double input[4][2] = {{0, 0}, {0, 1}, {1, 0}, {1, 1}};
const double output[4] = {0, 1, 1, 0};
int i;
{
/* Input and expected out data for the XOR function. */
const double input[4][2] = {{0, 0},
{0, 1},
{1, 0},
{1, 1}};
const double output[4] = {0, 1, 1, 0};
int i;
/* New network with 2 inputs,
* 1 hidden layer of 2 neurons,
* and 1 output. */
genann *ann = genann_init(2, 1, 2, 1);
/* New network with 2 inputs,
* 1 hidden layer of 2 neurons,
* and 1 output. */
ann = genann_init(2, 1, 2, 1);
/* Train on the four labeled data points many times. */
for (i = 0; i < 500; ++i) {
genann_train(ann, input[0], output + 0, 3);
genann_train(ann, input[1], output + 1, 3);
genann_train(ann, input[2], output + 2, 3);
genann_train(ann, input[3], output + 3, 3);
/* Train on the four labeled data points many times. */
for (i = 0; i < 500; ++i) {
genann_train(ann, input[0], output + 0, 3);
genann_train(ann, input[1], output + 1, 3);
genann_train(ann, input[2], output + 2, 3);
genann_train(ann, input[3], output + 3, 3);
}
/* Run the network and see what it predicts. */
printf("Output for [%1.f, %1.f] is %1.f.\n", input[0][0], input[0][1], *genann_run(ann, input[0]));
printf("Output for [%1.f, %1.f] is %1.f.\n", input[1][0], input[1][1], *genann_run(ann, input[1]));
printf("Output for [%1.f, %1.f] is %1.f.\n", input[2][0], input[2][1], *genann_run(ann, input[2]));
printf("Output for [%1.f, %1.f] is %1.f.\n", input[3][0], input[3][1], *genann_run(ann, input[3]));
}
/* Run the network and see what it predicts. */
printf("Output for [%1.f, %1.f] is %1.f.\n", input[0][0], input[0][1], *genann_run(ann, input[0]));
printf("Output for [%1.f, %1.f] is %1.f.\n", input[1][0], input[1][1], *genann_run(ann, input[1]));
printf("Output for [%1.f, %1.f] is %1.f.\n", input[2][0], input[2][1], *genann_run(ann, input[2]));
printf("Output for [%1.f, %1.f] is %1.f.\n", input[3][0], input[3][1], *genann_run(ann, input[3]));
genann_free(ann);
return 0;
}

View File

@ -6,65 +6,72 @@
int main(int argc, char *argv[])
{
genann *ann;
printf("GENANN example 2.\n");
printf("Train a small ANN to the XOR function using random search.\n");
srand(time(0));
/* Input and expected out data for the XOR function. */
const double input[4][2] = {{0, 0}, {0, 1}, {1, 0}, {1, 1}};
const double output[4] = {0, 1, 1, 0};
int i;
{
const double input[4][2] = {{0, 0},
{0, 1},
{1, 0},
{1, 1}};
const double output[4] = {0, 1, 1, 0};
int i;
/* New network with 2 inputs,
* 1 hidden layer of 2 neurons,
* and 1 output. */
genann *ann = genann_init(2, 1, 2, 1);
double err;
double last_err = 1000;
int count = 0;
double err;
double last_err = 1000;
int count = 0;
/* New network with 2 inputs,
* 1 hidden layer of 2 neurons,
* and 1 output. */
ann = genann_init(2, 1, 2, 1);
do {
++count;
if (count % 1000 == 0) {
/* We're stuck, start over. */
genann_randomize(ann);
last_err = 1000;
}
do {
genann *save;
++count;
if (count % 1000 == 0) {
/* We're stuck, start over. */
genann_randomize(ann);
last_err = 1000;
}
genann *save = genann_copy(ann);
save = genann_copy(ann);
/* Take a random guess at the ANN weights. */
for (i = 0; i < ann->total_weights; ++i) {
ann->weight[i] += ((double)rand())/RAND_MAX-0.5;
}
/* Take a random guess at the ANN weights. */
for (i = 0; i < ann->total_weights; ++i) {
ann->weight[i] += ((double) rand()) / RAND_MAX - 0.5;
}
/* See how we did. */
err = 0;
err += pow(*genann_run(ann, input[0]) - output[0], 2.0);
err += pow(*genann_run(ann, input[1]) - output[1], 2.0);
err += pow(*genann_run(ann, input[2]) - output[2], 2.0);
err += pow(*genann_run(ann, input[3]) - output[3], 2.0);
/* See how we did. */
err = 0;
err += pow(*genann_run(ann, input[0]) - output[0], 2.0);
err += pow(*genann_run(ann, input[1]) - output[1], 2.0);
err += pow(*genann_run(ann, input[2]) - output[2], 2.0);
err += pow(*genann_run(ann, input[3]) - output[3], 2.0);
/* Keep these weights if they're an improvement. */
if (err < last_err) {
genann_free(save);
last_err = err;
} else {
genann_free(ann);
ann = save;
}
/* Keep these weights if they're an improvement. */
if (err < last_err) {
genann_free(save);
last_err = err;
} else {
genann_free(ann);
ann = save;
}
} while (err > 0.01);
} while (err > 0.01);
printf("Finished in %d loops.\n", count);
printf("Finished in %d loops.\n", count);
/* Run the network and see what it predicts. */
printf("Output for [%1.f, %1.f] is %1.f.\n", input[0][0], input[0][1], *genann_run(ann, input[0]));
printf("Output for [%1.f, %1.f] is %1.f.\n", input[1][0], input[1][1], *genann_run(ann, input[1]));
printf("Output for [%1.f, %1.f] is %1.f.\n", input[2][0], input[2][1], *genann_run(ann, input[2]));
printf("Output for [%1.f, %1.f] is %1.f.\n", input[3][0], input[3][1], *genann_run(ann, input[3]));
/* Run the network and see what it predicts. */
printf("Output for [%1.f, %1.f] is %1.f.\n", input[0][0], input[0][1], *genann_run(ann, input[0]));
printf("Output for [%1.f, %1.f] is %1.f.\n", input[1][0], input[1][1], *genann_run(ann, input[1]));
printf("Output for [%1.f, %1.f] is %1.f.\n", input[2][0], input[2][1], *genann_run(ann, input[2]));
printf("Output for [%1.f, %1.f] is %1.f.\n", input[3][0], input[3][1], *genann_run(ann, input[3]));
}
genann_free(ann);
return 0;

View File

@ -6,19 +6,22 @@ const char *save_name = "example/xor.ann";
int main(int argc, char *argv[])
{
genann *ann;
printf("GENANN example 3.\n");
printf("Load a saved ANN to solve the XOR function.\n");
{
FILE *saved = fopen(save_name, "r");
if (!saved) {
printf("Couldn't open file: %s\n", save_name);
exit(1);
}
FILE *saved = fopen(save_name, "r");
if (!saved) {
printf("Couldn't open file: %s\n", save_name);
exit(1);
ann = genann_read(saved);
fclose(saved);
}
genann *ann = genann_read(saved);
fclose(saved);
if (!ann) {
printf("Error loading ANN from file: %s.", save_name);
exit(1);
@ -26,13 +29,18 @@ int main(int argc, char *argv[])
/* Input data for the XOR function. */
const double input[4][2] = {{0, 0}, {0, 1}, {1, 0}, {1, 1}};
{
const double input[4][2] = {{0, 0},
{0, 1},
{1, 0},
{1, 1}};
/* Run the network and see what it predicts. */
printf("Output for [%1.f, %1.f] is %1.f.\n", input[0][0], input[0][1], *genann_run(ann, input[0]));
printf("Output for [%1.f, %1.f] is %1.f.\n", input[1][0], input[1][1], *genann_run(ann, input[1]));
printf("Output for [%1.f, %1.f] is %1.f.\n", input[2][0], input[2][1], *genann_run(ann, input[2]));
printf("Output for [%1.f, %1.f] is %1.f.\n", input[3][0], input[3][1], *genann_run(ann, input[3]));
/* Run the network and see what it predicts. */
printf("Output for [%1.f, %1.f] is %1.f.\n", input[0][0], input[0][1], *genann_run(ann, input[0]));
printf("Output for [%1.f, %1.f] is %1.f.\n", input[1][0], input[1][1], *genann_run(ann, input[1]));
printf("Output for [%1.f, %1.f] is %1.f.\n", input[2][0], input[2][1], *genann_run(ann, input[2]));
printf("Output for [%1.f, %1.f] is %1.f.\n", input[3][0], input[3][1], *genann_run(ann, input[3]));
}
genann_free(ann);
return 0;

View File

@ -18,13 +18,15 @@ const char *class_names[] = {"Iris-setosa", "Iris-versicolor", "Iris-virginica"}
void load_data() {
/* Load the iris data-set. */
FILE *in = fopen("example/iris.data", "r");
int i, j;
char line[1024];
if (!in) {
printf("Could not open file: %s\n", iris_data);
exit(1);
}
/* Loop through the data to get a count. */
char line[1024];
while (!feof(in) && fgets(line, 1024, in)) {
++samples;
}
@ -37,7 +39,6 @@ void load_data() {
class = malloc(sizeof(double) * samples * 3);
/* Read the file into our arrays. */
int i, j;
for (i = 0; i < samples; ++i) {
double *p = input + i * 4;
double *c = class + i * 3;
@ -48,19 +49,21 @@ void load_data() {
exit(1);
}
char *split = strtok(line, ",");
for (j = 0; j < 4; ++j) {
p[j] = atof(split);
split = strtok(0, ",");
}
{
char *split = strtok(line, ",");
for (j = 0; j < 4; ++j) {
p[j] = atof(split);
split = strtok(0, ",");
}
split[strlen(split)-1] = 0;
if (strcmp(split, class_names[0]) == 0) {c[0] = 1.0;}
else if (strcmp(split, class_names[1]) == 0) {c[1] = 1.0;}
else if (strcmp(split, class_names[2]) == 0) {c[2] = 1.0;}
else {
printf("Unknown class %s.\n", split);
exit(1);
split[strlen(split) - 1] = 0;
if (strcmp(split, class_names[0]) == 0) { c[0] = 1.0; }
else if (strcmp(split, class_names[1]) == 0) { c[1] = 1.0; }
else if (strcmp(split, class_names[2]) == 0) { c[2] = 1.0; }
else {
printf("Unknown class %s.\n", split);
exit(1);
}
}
/* printf("Data point %d is %f %f %f %f -> %f %f %f\n", i, p[0], p[1], p[2], p[3], c[0], c[1], c[2]); */
@ -72,6 +75,8 @@ void load_data() {
int main(int argc, char *argv[])
{
genann *ann;
printf("GENANN example 4.\n");
printf("Train an ANN on the IRIS dataset using backpropagation.\n");
@ -84,7 +89,7 @@ int main(int argc, char *argv[])
* 1 hidden layer(s) of 4 neurons.
* 3 outputs (1 per class)
*/
genann *ann = genann_init(4, 1, 4, 3);
ann = genann_init(4, 1, 4, 3);
int i, j;
int loops = 5000;

114
genann.c
View File

@ -84,12 +84,13 @@ void genann_init_sigmoid_lookup(const genann *ann) {
}
double genann_act_sigmoid_cached(const genann *ann unused, double a) {
size_t j;
assert(!isnan(a));
if (a < sigmoid_dom_min) return lookup[0];
if (a >= sigmoid_dom_max) return lookup[LOOKUP_SIZE - 1];
size_t j = (size_t)((a-sigmoid_dom_min)*interval+0.5);
j = (size_t)((a-sigmoid_dom_min)*interval+0.5);
/* Because floating point... */
if (unlikely(j >= LOOKUP_SIZE)) return lookup[LOOKUP_SIZE - 1];
@ -112,44 +113,47 @@ genann *genann_init(int inputs, int hidden_layers, int hidden, int outputs) {
if (hidden_layers > 0 && hidden < 1) return 0;
const int hidden_weights = hidden_layers ? (inputs+1) * hidden + (hidden_layers-1) * (hidden+1) * hidden : 0;
const int output_weights = (hidden_layers ? (hidden+1) : (inputs+1)) * outputs;
const int total_weights = (hidden_weights + output_weights);
{
const int hidden_weights = hidden_layers ? (inputs + 1) * hidden + (hidden_layers - 1) * (hidden + 1) * hidden
: 0;
const int output_weights = (hidden_layers ? (hidden + 1) : (inputs + 1)) * outputs;
const int total_weights = (hidden_weights + output_weights);
const int total_neurons = (inputs + hidden * hidden_layers + outputs);
const int total_neurons = (inputs + hidden * hidden_layers + outputs);
/* Allocate extra size for weights, outputs, and deltas. */
const int size = sizeof(genann) + sizeof(double) * (total_weights + total_neurons + (total_neurons - inputs));
genann *ret = malloc(size);
if (!ret) return 0;
/* Allocate extra size for weights, outputs, and deltas. */
const int size = sizeof(genann) + sizeof(double) * (total_weights + total_neurons + (total_neurons - inputs));
genann *ret = malloc(size);
if (!ret) return 0;
ret->inputs = inputs;
ret->hidden_layers = hidden_layers;
ret->hidden = hidden;
ret->outputs = outputs;
ret->inputs = inputs;
ret->hidden_layers = hidden_layers;
ret->hidden = hidden;
ret->outputs = outputs;
ret->total_weights = total_weights;
ret->total_neurons = total_neurons;
ret->total_weights = total_weights;
ret->total_neurons = total_neurons;
/* Set pointers. */
ret->weight = (double*)((char*)ret + sizeof(genann));
ret->output = ret->weight + ret->total_weights;
ret->delta = ret->output + ret->total_neurons;
/* Set pointers. */
ret->weight = (double *) ((char *) ret + sizeof(genann));
ret->output = ret->weight + ret->total_weights;
ret->delta = ret->output + ret->total_neurons;
genann_randomize(ret);
genann_randomize(ret);
ret->activation_hidden = genann_act_sigmoid_cached;
ret->activation_output = genann_act_sigmoid_cached;
ret->activation_hidden = genann_act_sigmoid_cached;
ret->activation_output = genann_act_sigmoid_cached;
genann_init_sigmoid_lookup(ret);
genann_init_sigmoid_lookup(ret);
return ret;
return ret;
}
}
genann *genann_read(FILE *in) {
int inputs, hidden_layers, hidden, outputs;
int rc;
int inputs, hidden_layers, hidden, outputs, rc;
genann *ann;
errno = 0;
rc = fscanf(in, "%d %d %d %d", &inputs, &hidden_layers, &hidden, &outputs);
@ -158,17 +162,19 @@ genann *genann_read(FILE *in) {
return NULL;
}
genann *ann = genann_init(inputs, hidden_layers, hidden, outputs);
ann = genann_init(inputs, hidden_layers, hidden, outputs);
int i;
for (i = 0; i < ann->total_weights; ++i) {
errno = 0;
rc = fscanf(in, " %le", ann->weight + i);
if (rc < 1 || errno != 0) {
perror("fscanf");
genann_free(ann);
{
int i;
for (i = 0; i < ann->total_weights; ++i) {
errno = 0;
rc = fscanf(in, " %le", ann->weight + i);
if (rc < 1 || errno != 0) {
perror("fscanf");
genann_free(ann);
return NULL;
return NULL;
}
}
}
@ -213,12 +219,12 @@ double const *genann_run(genann const *ann, double const *inputs) {
double *o = ann->output + ann->inputs;
double const *i = ann->output;
int h, j, k;
/* Copy the inputs to the scratch area, where we also store each neuron's
* output, for consistency. This way the first layer isn't a special case. */
memcpy(ann->output, inputs, sizeof(double) * ann->inputs);
int h, j, k;
if (!ann->hidden_layers) {
double *ret = o;
for (j = 0; j < ann->outputs; ++j) {
@ -256,31 +262,33 @@ double const *genann_run(genann const *ann, double const *inputs) {
i += ann->hidden;
}
double const *ret = o;
{
double const *ret = o;
/* Figure output layer. */
for (j = 0; j < ann->outputs; ++j) {
double sum = *w++ * -1.0;
for (k = 0; k < ann->hidden; ++k) {
sum += *w++ * i[k];
/* Figure output layer. */
for (j = 0; j < ann->outputs; ++j) {
double sum = *w++ * -1.0;
for (k = 0; k < ann->hidden; ++k) {
sum += *w++ * i[k];
}
*o++ = genann_act_output(ann, sum);
}
*o++ = genann_act_output(ann, sum);
/* Sanity check that we used all weights and wrote all outputs. */
assert(w - ann->weight == ann->total_weights);
assert(o - ann->output == ann->total_neurons);
return ret;
}
/* Sanity check that we used all weights and wrote all outputs. */
assert(w - ann->weight == ann->total_weights);
assert(o - ann->output == ann->total_neurons);
return ret;
}
void genann_train(genann const *ann, double const *inputs, double const *desired_outputs, double learning_rate) {
int h, j, k;
/* To begin with, we must run the network forward. */
genann_run(ann, inputs);
int h, j, k;
/* First set the output layer deltas. */
{
double const *o = ann->output + ann->inputs + ann->hidden * ann->hidden_layers; /* First output. */
@ -394,9 +402,9 @@ void genann_train(genann const *ann, double const *inputs, double const *desired
void genann_write(genann const *ann, FILE *out) {
int i;
fprintf(out, "%d %d %d %d", ann->inputs, ann->hidden_layers, ann->hidden, ann->outputs);
int i;
for (i = 0; i < ann->total_weights; ++i) {
fprintf(out, " %.20e", ann->weight[i]);
}

View File

@ -29,6 +29,8 @@
#include <stdio.h>
#include "genann/genann_export.h"
#ifdef __cplusplus
extern "C" {
#endif
@ -71,34 +73,34 @@ typedef struct genann {
} genann;
/* Creates and returns a new ann. */
genann *genann_init(int inputs, int hidden_layers, int hidden, int outputs);
genann GENANN_EXPORT *genann_init(int inputs, int hidden_layers, int hidden, int outputs);
/* Creates ANN from file saved with genann_write. */
genann *genann_read(FILE *in);
genann GENANN_EXPORT *genann_read(FILE *in);
/* Sets weights randomly. Called by init. */
void genann_randomize(genann *ann);
void GENANN_EXPORT genann_randomize(genann *ann);
/* Returns a new copy of ann. */
genann *genann_copy(genann const *ann);
genann GENANN_EXPORT *genann_copy(genann const *ann);
/* Frees the memory used by an ann. */
void genann_free(genann *ann);
void GENANN_EXPORT genann_free(genann *ann);
/* Runs the feedforward algorithm to calculate the ann's output. */
double const *genann_run(genann const *ann, double const *inputs);
double const GENANN_EXPORT *genann_run(genann const *ann, double const *inputs);
/* Does a single backprop update. */
void genann_train(genann const *ann, double const *inputs, double const *desired_outputs, double learning_rate);
void GENANN_EXPORT genann_train(genann const *ann, double const *inputs, double const *desired_outputs, double learning_rate);
/* Saves the ann. */
void genann_write(genann const *ann, FILE *out);
void GENANN_EXPORT genann_write(genann const *ann, FILE *out);
void genann_init_sigmoid_lookup(const genann *ann);
double genann_act_sigmoid(const genann *ann, double a);
double genann_act_sigmoid_cached(const genann *ann, double a);
double genann_act_threshold(const genann *ann, double a);
double genann_act_linear(const genann *ann, double a);
void GENANN_EXPORT genann_init_sigmoid_lookup(const genann *ann);
double GENANN_EXPORT genann_act_sigmoid(const genann *ann, double a);
double GENANN_EXPORT genann_act_sigmoid_cached(const genann *ann, double a);
double GENANN_EXPORT genann_act_threshold(const genann *ann, double a);
double GENANN_EXPORT genann_act_linear(const genann *ann, double a);
#ifdef __cplusplus

46
genann/CMakeLists.txt Normal file
View File

@ -0,0 +1,46 @@
get_filename_component(LIBRARY_NAME "${CMAKE_CURRENT_SOURCE_DIR}" NAME)
string(REPLACE " " "_" LIBRARY_NAME "${LIBRARY_NAME}")
include(GenerateExportHeader)
set(Header_Files "../genann.h") # "${CMAKE_BINARY_DIR}/config/${PROJECT_NAME}Config.h"
source_group("Header Files" FILES "${Header_Files}")
set(Source_Files "../genann.c")
source_group("Source Files" FILES "${Source_Files}")
add_library("${LIBRARY_NAME}" "${Header_Files}" "${Source_Files}")
target_include_directories(
"${LIBRARY_NAME}"
INTERFACE
"$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}>"
"$<INSTALL_INTERFACE:include>"
)
target_link_libraries("${LIBRARY_NAME}" PUBLIC "${PROJECT_LOWER_NAME}_compiler_flags")
target_link_libraries("${LIBRARY_NAME}" PRIVATE "${_libs}")
set_target_properties(
"${LIBRARY_NAME}"
PROPERTIES
LINKER_LANGUAGE
C
)
set(_export_file "${CMAKE_CURRENT_SOURCE_DIR}/${LIBRARY_NAME}_export.h")
generate_export_header("${LIBRARY_NAME}" EXPORT_FILE_NAME "${_export_file}")
# setup the version numbering
set_property(TARGET "${LIBRARY_NAME}" PROPERTY VERSION "1.0.0")
set_property(TARGET "${LIBRARY_NAME}" PROPERTY SOVERSION "1")
# install rules
set(installable_libs "${LIBRARY_NAME}" "${PROJECT_LOWER_NAME}_compiler_flags")
if (TARGET "${DEPENDANT_LIBRARY}")
list(APPEND installable_libs "${DEPENDANT_LIBRARY}")
endif ()
install(TARGETS ${installable_libs}
DESTINATION "bin"
EXPORT "${LIBRARY_NAME}Targets")
install(FILES "${_export_file}" "${Header_Files}" DESTINATION "include")

4
genannConfig.cmake.in Normal file
View File

@ -0,0 +1,4 @@
@PACKAGE_INIT@
include ( "${CMAKE_CURRENT_LIST_DIR}/versionsTargets.cmake" )

9
genannConfig.h.in Normal file
View File

@ -0,0 +1,9 @@
#ifndef GENANN_CONFIG_H
#define GENANN_CONFIG_H
/* the configured options and settings for genann */
#define GENANN_VERSION_MAJOR @genann_VERSION_MAJOR@
#define GENANN_VERSION_MINOR @genann_VERSION_MINOR@
#define GENANN_VERSION_PATCH @genann_VERSION_PATCH@
#endif /* GENANN_CONFIG_H */

40
test/CMakeLists.txt Normal file
View File

@ -0,0 +1,40 @@
get_filename_component(EXEC_NAME "${CMAKE_CURRENT_SOURCE_DIR}" NAME)
string(REPLACE " " "_" EXEC_NAME "${EXEC_NAME}")
set(Header_Files "../minctest.h")
source_group("Header Files" FILES "${Header_Files}")
set(Source_Files "../test.c")
source_group("Source Files" FILES "${Source_Files}")
add_executable("${EXEC_NAME}" "${Header_Files}" "${Source_Files}")
target_include_directories(
"${EXEC_NAME}"
INTERFACE
"$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}>"
"$<INSTALL_INTERFACE:include>"
)
target_link_libraries(
"${EXEC_NAME}"
INTERFACE
"${PROJECT_LOWER_NAME}_compiler_flags"
"genann"
)
set_target_properties(
"${EXEC_NAME}"
PROPERTIES
LINKER_LANGUAGE
C
)
# install rules
set(installable_libs "${EXEC_NAME}" "${PROJECT_LOWER_NAME}_compiler_flags")
if (TARGET "${DEPENDANT_LIBRARY}")
list(APPEND installable_libs "${DEPENDANT_LIBRARY}")
endif ()
install(TARGETS ${installable_libs}
DESTINATION "bin/"
EXPORT "${EXEC_NAME}Targets")