NeuroTIC 0.0
Loading...
Searching...
No Matches
logic_gates.c File Reference

Example: Train a neural network to compute all 16 two-input logic functions. More...

Go to the source code of this file.

Detailed Description

Example: Train a neural network to compute all 16 two-input logic functions.

Author
Oscar Sotomayor (Titux)
Date
2026

This example creates a 3-16 neural network and trains it to learn all 16 possible two-input logic functions. It demonstrates network construction, training, evaluation, and model serialization. Expected output:

~/NeuroTIC/examples$ time bash test.sh logic_gates
Compiling project in location: examples
Project name: logic_gates
Platform: CPU
Attemps: 474
=========================================================================================================================
| A | B | NULL | NOR | EXA | NOTB | EXB | NOTA | XOR | NAND | AND | XNOR | A | IMPA | B | IMPB | OR | ALL |
|---|---|------|------|------|------|------|------|------|------|------|------|------|------|------|------|------|------|
| 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 1 |
| 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 |
| 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 |
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 |
=========================================================================================================================
=========================================================================================================================
| A | B | NULL | NOR | EXA | NOTB | EXB | NOTA | XOR | NAND | AND | XNOR | A | IMPA | B | IMPB | OR | ALL |
|---|---|------|------|------|------|------|------|------|------|------|------|------|------|------|------|------|------|
| 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 1 |
| 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 |
| 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 |
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 |
=========================================================================================================================
real 0m0.599s
user 0m0.448s
sys 0m0.155s
/
#include <stdio.h>
#include "ntcomplete.h"
int main( void ){
// Network structure: 2 inputs, 2 layers (one hidden layer with 3 neurons and output layer with 16 neurons)
CREATE_NET_FEEDFORWARD( network , 2 , ((uint16_t []){3,16}) );
// Set activation functions to sigmoid for all neurons: Following activation functions distribution had shown the best efficiency equilibrium between training attemps, convergenece and computational work, in BOOLEAN vs SIGMOID tests.
network.nn[0][0].fn= NTACT_BOOLEAN; //<- First hiiden neuron.
for( uint16_t j= 1 ; j < network.neurons[0] ; j++ ) network.nn[0][j].fn= NTACT_SIGMOID;
for( uint16_t j= 0 ; j < network.neurons[1] ; j++ ) network.nn[1][j].fn= NTACT_BOOLEAN;
// Initialize weights randomly
randnet( &network );
// Prepare training data for all 16 two-input logic functions
traindata_t data={
.tolerance= 0.0,
.max_attempts= 10000000,
.samples= 4
};
// Allocate training data
newtraindata( &data , &network );
// Define input-output pairs for all logic functions
for( uint64_t i= 0 ; i < data.samples ; i++ ){
for( input_t j= 0 ; j < network.inputs ; j++ ) data.in[i][j]= ( i >> j ) & 1;
data.results[i][0]= 0; // NULL 0000
data.results[i][1]= !( data.in[i][0] || data.in[i][1] ); // NOR 1000
data.results[i][2]= data.in[i][0] && !data.in[i][1]; // EXA 0100
data.results[i][3]= !data.in[i][1]; // NOTB 1100
data.results[i][4]= !data.in[i][0] && data.in[i][1]; // EXB 0010
data.results[i][5]= !data.in[i][0]; // NOTA 1010
data.results[i][6]= data.in[i][0] != data.in[i][1]; // XOR 0110
data.results[i][7]= !( data.in[i][0] && data.in[i][1] ); // NAND 1110
data.results[i][8]= data.in[i][0] && data.in[i][1]; // AND 0001
data.results[i][9]= data.in[i][0] == data.in[i][1]; // XNOR 1001
data.results[i][10]= data.in[i][0]; // A 0101
data.results[i][11]= data.in[i][0] || !data.in[i][1]; // IMPB 1101
data.results[i][12]= data.in[i][1]; // B 0011
data.results[i][13]= !data.in[i][0] || data.in[i][1]; // IMPA 1011
data.results[i][14]= data.in[i][0] || data.in[i][1]; // OR 0111
data.results[i][15]= 1; // ALL 1111
}
// Train the network using backpropagation
printf( "\nAttemps: %li" , backpropagation( &network , &data ) );
// Evaluate and display results
printf( "\n\n=========================================================================================================================" );
printf( "\n| A | B | NULL | NOR | EXA | NOTB | EXB | NOTA | XOR | NAND | AND | XNOR | A | IMPA | B | IMPB | OR | ALL |" );
printf( "\n|---|---|------|------|------|------|------|------|------|------|------|------|------|------|------|------|------|------|" );
for( sample_t i= 0 ; i < data.samples ; i++ ){
for( input_t j= 0 ; j < network.inputs ; j++ ) network.in[j]= &data.in[i][j];
feedforward( &network );
printf( "\n| %.0f | %.0f |" , data.in[i][0] , data.in[i][1] );
for( uint16_t j= 0 ; j < network.neurons[network.layers - 1] ; j++ ) printf( " %.0f |" , *network.out[j] );
}
printf( "\n=========================================================================================================================");
// Save and reload the trained network
savenet( &network , "logic_gates" );
net_s network_copy= loadnet( "logic_gates" );
// Display results from the loaded network to verify persistence
printf( "\n\n=========================================================================================================================" );
printf( "\n| A | B | NULL | NOR | EXA | NOTB | EXB | NOTA | XOR | NAND | AND | XNOR | A | IMPA | B | IMPB | OR | ALL |" );
printf( "\n|---|---|------|------|------|------|------|------|------|------|------|------|------|------|------|------|------|------|" );
for( sample_t i= 0 ; i < data.samples ; i++ ){
for( input_t j= 0 ; j < network_copy.inputs ; j++ ) network_copy.in[j]= &data.in[i][j];
feedforward( &network_copy );
printf( "\n| %.0f | %.0f |" , data.in[i][0] , data.in[i][1] );
for( uint16_t j= 0 ; j < network_copy.neurons[network_copy.layers - 1] ; j++ ) printf( " %.0f |" , *network_copy.out[j] );
}
printf( "\n=========================================================================================================================\n\n");
// Clean up
remove( "logic_gates.ntic" );
return 0;
}
@ NTACT_SIGMOID
Sigmoid activation function.
Definition ntactivation.h:24
@ NTACT_BOOLEAN
Boolean step activation function.
Definition ntactivation.h:23
#define CREATE_NET_FEEDFORWARD(network, i, neurons)
Convenience macro to create and build a feedforward network.
Definition ntbuilder.h:38
data_t * feedforward(net_s *net)
Executes full feedforward propagation.
Definition ntcalculate.c:44
struct net_s loadnet(char *name)
Loads a network from a binary file with extension .ntic.
Definition ntfile.c:176
unsigned char savenet(net_s *net, const char *name)
Saves a network to a binary file with extension .ntic.
Definition ntfile.c:115
void randnet(net_s *net)
Randomly initializes network weights.
Definition ntinitialize.c:25
attempts_t backpropagation(net_s *net, traindata_t *train_data)
Trains a network using backpropagation.
Definition nttrain.c:46
void newtraindata(traindata_t *train_data, net_s *net)
Allocates memory for training data arrays.
Definition nttrain.c:26
uint64_t sample_t
Definition nttrain.h:15
Root structural container of a NeuroTIC network.
Definition ntcore.h:100
uint16_t * neurons
Definition ntcore.h:103
input_t inputs
Definition ntcore.h:101
layer_t layers
Definition ntcore.h:102
data_t ** in
Definition ntcore.h:104
data_t ** out
Definition ntcore.h:108
Structure to hold training dataset and parameters.
Definition nttrain.h:21
precision_t learning_rate
Definition nttrain.h:23
data_t ** results
Definition nttrain.h:27
data_t ** in
Definition nttrain.h:26
sample_t samples
Definition nttrain.h:22

Definition in file logic_gates.c.