Example: Train a neural network to compute all 16 two-input logic functions.
- Author
- Oscar Sotomayor (Titux)
- Date
- 2026
This example creates a 3-16 neural network and trains it to learn all 16 possible two-input logic functions. It demonstrates network construction, training, evaluation, and model serialization. Expected output:
~/NeuroTIC/examples$ time bash test.sh logic_gates
Compiling project in location: examples
Project name: logic_gates
Platform: CPU
Attemps: 474
=========================================================================================================================
| A | B | NULL | NOR | EXA | NOTB | EXB | NOTA | XOR | NAND | AND | XNOR | A | IMPA | B | IMPB | OR | ALL |
|---|---|------|------|------|------|------|------|------|------|------|------|------|------|------|------|------|------|
| 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 1 |
| 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 |
| 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 |
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 |
=========================================================================================================================
=========================================================================================================================
| A | B | NULL | NOR | EXA | NOTB | EXB | NOTA | XOR | NAND | AND | XNOR | A | IMPA | B | IMPB | OR | ALL |
|---|---|------|------|------|------|------|------|------|------|------|------|------|------|------|------|------|------|
| 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 1 |
| 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 |
| 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 |
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 |
=========================================================================================================================
real 0m0.599s
user 0m0.448s
sys 0m0.155s
/
#include <stdio.h>
#include "ntcomplete.h"
int main( void ){
for( uint16_t j= 1 ; j < network.neurons[0] ; j++ ) network.nn[0][j].fn=
NTACT_SIGMOID;
for( uint16_t j= 0 ; j < network.neurons[1] ; j++ ) network.nn[1][j].fn=
NTACT_BOOLEAN;
.tolerance= 0.0,
.max_attempts= 10000000,
.samples= 4
};
for( uint64_t i= 0 ; i < data.
samples ; i++ ){
for( input_t j= 0 ; j < network.inputs ; j++ ) data.
in[i][j]= ( i >> j ) & 1;
data.
results[i][1]= !( data.
in[i][0] || data.
in[i][1] );
data.
results[i][7]= !( data.
in[i][0] && data.
in[i][1] );
}
printf( "\n\n=========================================================================================================================" );
printf( "\n| A | B | NULL | NOR | EXA | NOTB | EXB | NOTA | XOR | NAND | AND | XNOR | A | IMPA | B | IMPB | OR | ALL |" );
printf( "\n|---|---|------|------|------|------|------|------|------|------|------|------|------|------|------|------|------|------|" );
for( input_t j= 0 ; j < network.inputs ; j++ ) network.in[j]= &data.
in[i][j];
printf(
"\n| %.0f | %.0f |" , data.
in[i][0] , data.
in[i][1] );
for( uint16_t j= 0 ; j < network.neurons[network.layers - 1] ; j++ ) printf( " %.0f |" , *network.out[j] );
}
printf( "\n=========================================================================================================================");
savenet( &network ,
"logic_gates" );
printf( "\n\n=========================================================================================================================" );
printf( "\n| A | B | NULL | NOR | EXA | NOTB | EXB | NOTA | XOR | NAND | AND | XNOR | A | IMPA | B | IMPB | OR | ALL |" );
printf( "\n|---|---|------|------|------|------|------|------|------|------|------|------|------|------|------|------|------|------|" );
for( input_t j= 0 ; j < network_copy.
inputs ; j++ ) network_copy.
in[j]= &data.
in[i][j];
printf(
"\n| %.0f | %.0f |" , data.
in[i][0] , data.
in[i][1] );
for( uint16_t j= 0 ; j < network_copy.
neurons[network_copy.
layers - 1] ; j++ ) printf(
" %.0f |" , *network_copy.
out[j] );
}
printf( "\n=========================================================================================================================\n\n");
remove( "logic_gates.ntic" );
return 0;
}
@ NTACT_SIGMOID
Sigmoid activation function.
Definition ntactivation.h:24
@ NTACT_BOOLEAN
Boolean step activation function.
Definition ntactivation.h:23
#define CREATE_NET_FEEDFORWARD(network, i, neurons)
Convenience macro to create and build a feedforward network.
Definition ntbuilder.h:38
data_t * feedforward(net_s *net)
Executes full feedforward propagation.
Definition ntcalculate.c:44
struct net_s loadnet(char *name)
Loads a network from a binary file with extension .ntic.
Definition ntfile.c:176
unsigned char savenet(net_s *net, const char *name)
Saves a network to a binary file with extension .ntic.
Definition ntfile.c:115
void randnet(net_s *net)
Randomly initializes network weights.
Definition ntinitialize.c:25
attempts_t backpropagation(net_s *net, traindata_t *train_data)
Trains a network using backpropagation.
Definition nttrain.c:46
void newtraindata(traindata_t *train_data, net_s *net)
Allocates memory for training data arrays.
Definition nttrain.c:26
uint64_t sample_t
Definition nttrain.h:15
Root structural container of a NeuroTIC network.
Definition ntcore.h:100
uint16_t * neurons
Definition ntcore.h:103
input_t inputs
Definition ntcore.h:101
layer_t layers
Definition ntcore.h:102
data_t ** in
Definition ntcore.h:104
data_t ** out
Definition ntcore.h:108
Structure to hold training dataset and parameters.
Definition nttrain.h:21
precision_t learning_rate
Definition nttrain.h:23
data_t ** results
Definition nttrain.h:27
data_t ** in
Definition nttrain.h:26
sample_t samples
Definition nttrain.h:22
Definition in file logic_gates.c.