next up previous contents
Next: Extended XOR program - Up: Introduction Previous: Introduction

The XOR program - 2 input, 2 hidden

This is my first program, it's the implementation of the XOR problem. I had many difficulties understanding the back-propagation idea. This program is my first working result of my back-propagation study.

Note: Before trying code mentioned in this document note that the rand() function works different on Windows machines!

/*
 * The XOR backpropagation network example
 * by R.M.Morrien, Copyright 2001
 */
#include <iostream.h>
#include <math.h>

#define ITER      10000
#define LEARNRATE 0.5

// Function declaratation
void  Train(float a, float b, float result);
void  Run(float a, float b);
float Sigmoid(float num);

// Variable declaration
float input1; // The XOR input 	
float input2; // The XOR input 		
float target; // The desired XOR output

// The neural network variables (the neurons and the weights)
float hidden1      =  0.2; // These values should be random... between -1.0 and 1.0
float hidden2      = -0.3;
float output1      =  0.4;

float weight_i1_h1 =  0.1;
float weight_i1_h2 = -0.2;
float weight_i2_h1 =  0.3;
float weight_i2_h2 =  0.4;
float weight_h1_o1 =  0.5;
float weight_h2_o1 = -0.4;



/*
 * The main function
 */
void main() {
    // Train
    for(int i=0; i<ITER; i++){
    	Train(1.0,1.0,0.0);
    	Train(1.0,0.0,1.0);
    	Train(0.0,0.0,0.0);
    	Train(0.0,1.0,1.0);
    }
    
    // Show results
    Run(1.0,1.0);
    Run(1.0,0.0);
    Run(0.0,0.0);
    Run(0.0,1.0);
}


/*
 * Name:   Train
 * Input:  Two inputs and the desired result
 * Pre:    The inputs and results are floats in the range 0.0 - 1.0
 * Post:   The weight of the neural network are adjusted (trained..)
 */
void Train(float a, float b, float result){
    input1 = a;	
    input2 = b;	
    target = result;		
    
    float output_hidden1;
    float output_hidden2;
    float output_output1;
	
    // Calculate the outputs
    output_hidden1 = input1         * weight_i1_h1 + input2         * weight_i2_h1 + hidden1;
    output_hidden1 = Sigmoid(output_hidden1);
    
    output_hidden2 = input1         * weight_i1_h2 + input2         * weight_i2_h2 + hidden2;
    output_hidden2 = Sigmoid(output_hidden2);

    output_output1 = output_hidden1 * weight_h1_o1 + output_hidden2 * weight_h2_o1 + output1;
    output_output1 = Sigmoid(output_output1);
    
    // Calculate the error
    float output1_adjustment = output_output1 * (1-output_output1) * (target - output_output1);
    float hidden2_adjustment = output_hidden2 * (1-output_hidden2) * output1_adjustment * weight_h2_o1;
    float hidden1_adjustment = output_hidden1 * (1-output_hidden1) * output1_adjustment * weight_h1_o1;
    
    // Adjust the weights
    weight_i1_h1 = weight_i1_h1 + LEARNRATE * hidden1_adjustment * input1;
    weight_i1_h2 = weight_i1_h2 + LEARNRATE * hidden2_adjustment * input1;
    weight_i2_h1 = weight_i2_h1 + LEARNRATE * hidden1_adjustment * input2;
    weight_i2_h2 = weight_i2_h2 + LEARNRATE * hidden2_adjustment * input2;
    weight_h1_o1 = weight_h1_o1 + LEARNRATE * output1_adjustment * output_hidden1; 
    weight_h2_o1 = weight_h2_o1 + LEARNRATE * output1_adjustment * output_hidden2;     
    hidden1      = hidden1      + LEARNRATE * hidden1_adjustment; 
    hidden2      = hidden2      + LEARNRATE * hidden2_adjustment; 
    output1      = output1      + LEARNRATE * output1_adjustment; 
}


/*
 * Name:   Run
 * Input:  Two inputs
 * Pre:    The inputs are floats in the range 0.0 - 1.0
 * Post:   Shows the output of the neural network 
 */
void Run(float a, float b){
    input1 = a;	
    input2 = b;	

    float output_hidden1;
    float output_hidden2;
    float output_output1;
	
    // Calculate the outputs, same code as the train function
    output_hidden1 = input1         * weight_i1_h1 + input2         * weight_i2_h1 + hidden1;
    output_hidden1 = Sigmoid(output_hidden1);
    
    output_hidden2 = input1         * weight_i1_h2 + input2         * weight_i2_h2 + hidden2;
    output_hidden2 = Sigmoid(output_hidden2);

    output_output1 = output_hidden1 * weight_h1_o1 + output_hidden2 * weight_h2_o1 + output1;
    output_output1 = Sigmoid(output_output1);
    
    // Show the result
    cout << output_output1;
     if(output_output1 >= 0.5) cout << "\t (1)";
     if(output_output1 < 0.5) cout << "\t (0)";
    cout << endl;
}


/*
 * Name:   Sigmoid
 * Input:  One input
 * Pre:    The input is a float (positive or negative)
 * Post:   Returns the result of the sigmoid function (also known as the logistic function)
 */
float Sigmoid(float num) {
    return (float)(1/(1+exp(-num)));
}

Download source here.

The results (10.000 iterations on a Pentium 166):

[rmorrien@escay backpropagation]$ time ./a.out
0.0223939        (0)
0.981986         (1)
0.0169971        (0)
0.981931         (1)

real    0m0.313s
user    0m0.310s
sys     0m0.000s
[rmorrien@escay backpropagation]$



Copyright © 2001, R.M. Morriën