Hi
Thanx for the answer.
At first I used the sigmoid function to convert my real input values into continuous values between 0 and 1. Its ok. Now I have to propose the values of desired output, e.g weight = 80. To convert my real output into values between 0 and 1, I used sigmoid function like this:
return(float)(1/1+exp(-80));
But the problem is that I’m always getting 1 as answer. In fact whatever figure I input in the equation of sigmoid, I get 1.
What should I do?
Is the desired output connected to the other layers?
Pliz help me
Thanx
Please find below a copy of my source code.
#include <iostream.h>
#include <math.h>
#include <stdlib.h>
#include<time.h>
#define e 2.718282
const int INP_UNITS = 5; // Maximum number of nodes in input layer
const int MIP = INP_UNITS+1; //Extra unit used for thresholding
const int HID_UNITS = 8; // Maximum number of nodes in hidden layer
const int MHP=HID_UNITS+1;
const int OUT_UNITS = 2; // Maximum number of nodes in output layer
const int MOT = OUT_UNITS+1;
const int MAX_PAT = 4; //Maximum number of training patterns
const int MPT = MAX_PAT+1;
const float INP_PATTERNS[MPT][MIP] = {{0,0,0,0,0,1},{0,0,0,0,1,0},{0,0,0,1,0,0},
{0,0,1,0,0,0},{0,1,0,0,0,0}};
const float OUT_PATTERNS[MPT][MOT] =
{{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0}};
struct layer{
float weight[MIP];//Weights assgned to the network
float weight_change[MIP];//Change in weights
float threshold;//Threshold used in the network
float a; //activation
float error; //Error between the layers
};
float real_input[MIP];
float real_output[MOT];
float target_output[MOT];
float final_target_output[MOT];
layer input[MIP]; //Input layer
layer hidden[MHP]; //Hidden layer
layer output[MOT]; //Output layer
//Declarations of all the functions used in the network
class CBackPNet{
public:
int contin();
float random_num();
float sigmoid(float);
void random_weights();
void get_input_data();
void get_output_data();
void get_target_output();
void run_input_units();
void run_hidden_units();
void run_output_units();
void feed_forward();
void convert_real_output();
void display_results();
void calculate_mean_squared_error();
void calculate_output_layer_errors();
void calculate_hidden_layer_errors();
void calculate_input_layer_errors();
void test_network();
void adjust_weight();
void back_propagate();
void blank_changes();
void train_network();
};
// Get a random number in the range 0 to 1 as a float
float CBackPNet:: random_num ()
{ return (float)(rand()+0.0)/RAND_MAX; }
//Assign random numbers for each of the different layers
void CBackPNet:: random_weights ()
{ for (int i = 1; i <= INP_UNITS; i++)
{ for (int j = 1; j <= INP_UNITS; j++)
{
input.weight[j] = random_num();
cout<<"input["<<i<<"].weight["<<j<<"]=";
cout<<random_num()<<endl;
}
}
for ( i = 1; i <= INP_UNITS; i++)
{ for (int j = 1; j <= HID_UNITS; j++){
hidden.weight[j] = random_num();
cout<<"hidden["<<i<<"].weight["<<j<<"]=";
cout<<random_num()<<endl;
}
}
for ( i = 1; i <= HID_UNITS; i++)
{ for (int j = 1; j <= OUT_UNITS; j++){
output.weight[j] = random_num();
cout<<"output["<<i<<"].weight["<<j<<"]=";
cout<<random_num()<<endl;
}
}
}
// Prompts user to enter input values
void CBackPNet:: get_input_data()
{
cout << endl;
cout << "INPUT PARAMETERS" << endl;
for (int i = 1; i <= INP_UNITS; i++)
{
cout << i << " . ";
cout<<"Machine Diameter: ";
cin >> real_input;
cout << ++i << " . ";
cout<<"Machine Gauge: ";
cin >> real_input;
cout << ++i << " . ";
cout<<"No. of Needles: ";
cin >> real_input;
cout << ++i << " . ";
cout<<"LFA: ";
cin >> real_input;
cout << ++i << " . ";
cout<<"Yarn Count: ";
cin >> real_input;
}
}
//Prompts user to enter output values
void CBackPNet::get_output_data()
{
cout<<endl;
cout << "OUTPUT PARAMETERS" << endl;
for (int i = 1; i <= OUT_UNITS; i++)
{
cout << i << " . ";
cout<<"Fabric Width: ";
cin >> real_output;
cout << ++i << " . ";
cout<<"Fabric Specific Weight: ";
cin >> real_output;
}
}
//Definition of Sigmoid transfer function
float CBackPNet::sigmoid (float num)
{
return (float)(1/(1+exp(-num)));
}
//
//Convert real output values entered to values between 0 and 1
void CBackPNet::get_target_output()
{
for(int i=1;i<=OUT_UNITS;i++)
target_output=sigmoid(2*real_output);
cout<<endl<<"TARGET OUTPUT=";
cout<<target_output<<" ";
cout<<endl;
}
//Convert real input values to values between 0 and 1
//by applying sigmoid function
void CBackPNet:: run_input_units ()
{ float sum;
for (int i = 1; i <= INP_UNITS; i++)
{ sum = 0;
for (int j = 1; j <= INP_UNITS; j++)
sum += input.weight[j] * real_input[j];
input.a = sigmoid(sum - input.threshold);
}
}
//Apply transfer function between input and hidden units
void CBackPNet:: run_hidden_units ()
{ float sum;
for (int i = 1; i <= HID_UNITS; i++)
{ sum = 0;
for (int j = 1; j <= INP_UNITS; j++)
sum += hidden.weight[j] * input[j].a;
hidden.a = sigmoid(sum - hidden.threshold);
}
}
//Apply transfer function between hidden and output units
void CBackPNet:: run_output_units ()
{ float sum;
for (int i = 1; i <= OUT_UNITS; i++)
{ sum = 0;
for (int j = 1; j <= HID_UNITS; j++)
sum += output.weight[j] * hidden[j].a;
output.a = sigmoid(sum - output.threshold);
}
}
//Convert network output to real output
void CBackPNet::convert_real_output()
{
for(int i=1;i<=OUT_UNITS;i++)
{
final_target_output=log(output.a)/log(e);
cout<<"FINAL TARGET OUTPUT:";
cout<<final_target_output<<" ";
cout<<endl;
}
}
void CBackPNet:: feed_forward ()
{
run_input_units();
run_hidden_units();
run_output_units();
}
// Displays the results of the inputs and outputs on the screen.
void CBackPNet:: display_results ()
{ cout << endl << "INPUTS: ";
for (int i = 1; i <= INP_UNITS; i++)
cout << real_input << " ";
cout << endl << "OUTPUTS: ";
for (i = 1; i <= OUT_UNITS; i++)
cout << output.a << " ";
cout << endl;
}
//Asks user whether he wants to continue or not
int CBackPNet:: contin ()
{ char k;
cout << endl << " Do you want to continue testing? (Press Y or N) ";
do
{ cin >> k; }
while (k != 'Y' && k != 'y' && k != 'N' && k != 'n');
if (k == 'Y' || k == 'y')
return 1;
else
return 0;
}
void CBackPNet:: test_network ()
{
cout<<endl;
do
{ get_input_data();
get_output_data();
get_target_output();
feed_forward();
display_results();
}
while (contin() == 1);
}
//Calculating and displaying the mean squared error
void CBackPNet:: calculate_mean_squared_error()
{ float error_function;
float target_output[MOT]={0.05f,0.95f};
float sum=0;
for (int j = 1; j <= OUT_UNITS; j++)
sum+=(float)pow((target_output[j] - output[j].a),2);
error_function= 0.5f* sum;
cout<<"Error Function:";
cout<<error_function<<" ";
cout<<endl;
}
//Calculating error between network output and target output
void CBackPNet:: calculate_output_layer_errors ()
{ for (int j = 1; j <= OUT_UNITS; j++)
output[j].error = output[j].a * (1 - output[j].a) * (target_output[j] - output[j].a);
}
//Calculating error between hidden and output units
void CBackPNet:: calculate_hidden_layer_errors ()
{ float sum;
for (int i = 1; i <= HID_UNITS; i++)
{ sum = 0;
for (int j = 1; j <= OUT_UNITS; j++)
sum += output[j].error * output[j].weight[j];
hidden.error = hidden.a * (1 - hidden.a) * sum;
}
}
//Calculating error between input and hidden units
void CBackPNet:: calculate_input_layer_errors ()
{ float sum;
for (int i = 1; i <= INP_UNITS; i++)
{ sum = 0;
for (int j = 1; j <= HID_UNITS; j++)
sum += hidden[j].error * hidden[j].weight[j];
input.error = input.a * (1 - input.a) * sum;
}
}
//Adjusting the weights between each layer
void CBackPNet:: adjust_weight()
{ const float learn_coeff = 0.9f; // Learning rate
const float momentum = 0.9f; // Momentum parameter
for (int j = 1; j <= OUT_UNITS; j++)
{ for (int i = 1; i <= HID_UNITS; i++)
{ output[j].weight_change[j] = learn_coeff * output[j].error * hidden.a + momentum * output[j].weight_change[j];
output[j].weight[j] += output[j].weight_change[j];
}
}
for (j = 1; j <= HID_UNITS; j++)
{ for (int i = 1; i <= INP_UNITS; i++)
{ hidden[j].weight_change[j] = learn_coeff * hidden[j].error * input.a + momentum * hidden[j].weight_change[j];
hidden[j].weight[j] += hidden[j].weight_change[j];
}
}
for (j = 1; j <= INP_UNITS; j++)
{ for (int i = 1; i <= INP_UNITS; i++)
{ input[j].weight_change[j] = learn_coeff * input[j].error * real_input[j] + momentum * input[j].weight_change[j];
input[j].weight[j] += input[j].weight_change[j];
}
}
}
void CBackPNet:: back_propagate ()
{
calculate_mean_squared_error();
calculate_output_layer_errors();
calculate_hidden_layer_errors();
calculate_input_layer_errors();
adjust_weight();
}
// At the start of back propagation, there are no weight changes to
// influence the next cycle, so clear the arrays
void CBackPNet:: blank_changes ()
{ for (int j = 1; j <= INP_UNITS; j++)
{ for (int i = 1; i <= INP_UNITS; i++)
input[j].weight_change[j] = 0;
}
for (j = 1; j <= HID_UNITS; j++)
{ for (int i = 1; i <= INP_UNITS; i++)
hidden[j].weight_change[j] = 0;
}
for (j = 1; j <= OUT_UNITS; j++)
{ for (int i = 1; i <= HID_UNITS; i++)
output[j].weight_change[j] = 0;
}
}
//Iteration function for user to repeat same procure several times
void CBackPNet:: train_network ()
{ long num_cycles;
cout << endl;
cout << "Enter the number of training cycles : ";
cin >> num_cycles;
blank_changes();
for (long loop = 1; loop <= num_cycles; loop++)
for (int pat = 1; pat <= MAX_PAT; pat++)
{ for (int i = 1; i <= INP_UNITS; i++)
real_input = INP_PATTERNS[pat];
for (i = 1; i <= OUT_UNITS; i++)
target_output = OUT_PATTERNS[pat];
feed_forward();
back_propagate();
}
}
int main ()
{
CBackPNet neural;
neural.random_weights();
neural.test_network();
neural.calculate_mean_squared_error();
neural.train_network();
neural.test_network();
return 0;
}
Thanx for the answer.
At first I used the sigmoid function to convert my real input values into continuous values between 0 and 1. Its ok. Now I have to propose the values of desired output, e.g weight = 80. To convert my real output into values between 0 and 1, I used sigmoid function like this:
return(float)(1/1+exp(-80));
But the problem is that I’m always getting 1 as answer. In fact whatever figure I input in the equation of sigmoid, I get 1.
What should I do?
Is the desired output connected to the other layers?
Pliz help me
Thanx
Please find below a copy of my source code.
#include <iostream.h>
#include <math.h>
#include <stdlib.h>
#include<time.h>
#define e 2.718282
const int INP_UNITS = 5; // Maximum number of nodes in input layer
const int MIP = INP_UNITS+1; //Extra unit used for thresholding
const int HID_UNITS = 8; // Maximum number of nodes in hidden layer
const int MHP=HID_UNITS+1;
const int OUT_UNITS = 2; // Maximum number of nodes in output layer
const int MOT = OUT_UNITS+1;
const int MAX_PAT = 4; //Maximum number of training patterns
const int MPT = MAX_PAT+1;
const float INP_PATTERNS[MPT][MIP] = {{0,0,0,0,0,1},{0,0,0,0,1,0},{0,0,0,1,0,0},
{0,0,1,0,0,0},{0,1,0,0,0,0}};
const float OUT_PATTERNS[MPT][MOT] =
{{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0}};
struct layer{
float weight[MIP];//Weights assgned to the network
float weight_change[MIP];//Change in weights
float threshold;//Threshold used in the network
float a; //activation
float error; //Error between the layers
};
float real_input[MIP];
float real_output[MOT];
float target_output[MOT];
float final_target_output[MOT];
layer input[MIP]; //Input layer
layer hidden[MHP]; //Hidden layer
layer output[MOT]; //Output layer
//Declarations of all the functions used in the network
class CBackPNet{
public:
int contin();
float random_num();
float sigmoid(float);
void random_weights();
void get_input_data();
void get_output_data();
void get_target_output();
void run_input_units();
void run_hidden_units();
void run_output_units();
void feed_forward();
void convert_real_output();
void display_results();
void calculate_mean_squared_error();
void calculate_output_layer_errors();
void calculate_hidden_layer_errors();
void calculate_input_layer_errors();
void test_network();
void adjust_weight();
void back_propagate();
void blank_changes();
void train_network();
};
// Get a random number in the range 0 to 1 as a float
float CBackPNet:: random_num ()
{ return (float)(rand()+0.0)/RAND_MAX; }
//Assign random numbers for each of the different layers
void CBackPNet:: random_weights ()
{ for (int i = 1; i <= INP_UNITS; i++)
{ for (int j = 1; j <= INP_UNITS; j++)
{
input.weight[j] = random_num();
cout<<"input["<<i<<"].weight["<<j<<"]=";
cout<<random_num()<<endl;
}
}
for ( i = 1; i <= INP_UNITS; i++)
{ for (int j = 1; j <= HID_UNITS; j++){
hidden.weight[j] = random_num();
cout<<"hidden["<<i<<"].weight["<<j<<"]=";
cout<<random_num()<<endl;
}
}
for ( i = 1; i <= HID_UNITS; i++)
{ for (int j = 1; j <= OUT_UNITS; j++){
output.weight[j] = random_num();
cout<<"output["<<i<<"].weight["<<j<<"]=";
cout<<random_num()<<endl;
}
}
}
// Prompts user to enter input values
void CBackPNet:: get_input_data()
{
cout << endl;
cout << "INPUT PARAMETERS" << endl;
for (int i = 1; i <= INP_UNITS; i++)
{
cout << i << " . ";
cout<<"Machine Diameter: ";
cin >> real_input;
cout << ++i << " . ";
cout<<"Machine Gauge: ";
cin >> real_input;
cout << ++i << " . ";
cout<<"No. of Needles: ";
cin >> real_input;
cout << ++i << " . ";
cout<<"LFA: ";
cin >> real_input;
cout << ++i << " . ";
cout<<"Yarn Count: ";
cin >> real_input;
}
}
//Prompts user to enter output values
void CBackPNet::get_output_data()
{
cout<<endl;
cout << "OUTPUT PARAMETERS" << endl;
for (int i = 1; i <= OUT_UNITS; i++)
{
cout << i << " . ";
cout<<"Fabric Width: ";
cin >> real_output;
cout << ++i << " . ";
cout<<"Fabric Specific Weight: ";
cin >> real_output;
}
}
//Definition of Sigmoid transfer function
float CBackPNet::sigmoid (float num)
{
return (float)(1/(1+exp(-num)));
}
//
//Convert real output values entered to values between 0 and 1
void CBackPNet::get_target_output()
{
for(int i=1;i<=OUT_UNITS;i++)
target_output=sigmoid(2*real_output);
cout<<endl<<"TARGET OUTPUT=";
cout<<target_output<<" ";
cout<<endl;
}
//Convert real input values to values between 0 and 1
//by applying sigmoid function
void CBackPNet:: run_input_units ()
{ float sum;
for (int i = 1; i <= INP_UNITS; i++)
{ sum = 0;
for (int j = 1; j <= INP_UNITS; j++)
sum += input.weight[j] * real_input[j];
input.a = sigmoid(sum - input.threshold);
}
}
//Apply transfer function between input and hidden units
void CBackPNet:: run_hidden_units ()
{ float sum;
for (int i = 1; i <= HID_UNITS; i++)
{ sum = 0;
for (int j = 1; j <= INP_UNITS; j++)
sum += hidden.weight[j] * input[j].a;
hidden.a = sigmoid(sum - hidden.threshold);
}
}
//Apply transfer function between hidden and output units
void CBackPNet:: run_output_units ()
{ float sum;
for (int i = 1; i <= OUT_UNITS; i++)
{ sum = 0;
for (int j = 1; j <= HID_UNITS; j++)
sum += output.weight[j] * hidden[j].a;
output.a = sigmoid(sum - output.threshold);
}
}
//Convert network output to real output
void CBackPNet::convert_real_output()
{
for(int i=1;i<=OUT_UNITS;i++)
{
final_target_output=log(output.a)/log(e);
cout<<"FINAL TARGET OUTPUT:";
cout<<final_target_output<<" ";
cout<<endl;
}
}
void CBackPNet:: feed_forward ()
{
run_input_units();
run_hidden_units();
run_output_units();
}
// Displays the results of the inputs and outputs on the screen.
void CBackPNet:: display_results ()
{ cout << endl << "INPUTS: ";
for (int i = 1; i <= INP_UNITS; i++)
cout << real_input << " ";
cout << endl << "OUTPUTS: ";
for (i = 1; i <= OUT_UNITS; i++)
cout << output.a << " ";
cout << endl;
}
//Asks user whether he wants to continue or not
int CBackPNet:: contin ()
{ char k;
cout << endl << " Do you want to continue testing? (Press Y or N) ";
do
{ cin >> k; }
while (k != 'Y' && k != 'y' && k != 'N' && k != 'n');
if (k == 'Y' || k == 'y')
return 1;
else
return 0;
}
void CBackPNet:: test_network ()
{
cout<<endl;
do
{ get_input_data();
get_output_data();
get_target_output();
feed_forward();
display_results();
}
while (contin() == 1);
}
//Calculating and displaying the mean squared error
void CBackPNet:: calculate_mean_squared_error()
{ float error_function;
float target_output[MOT]={0.05f,0.95f};
float sum=0;
for (int j = 1; j <= OUT_UNITS; j++)
sum+=(float)pow((target_output[j] - output[j].a),2);
error_function= 0.5f* sum;
cout<<"Error Function:";
cout<<error_function<<" ";
cout<<endl;
}
//Calculating error between network output and target output
void CBackPNet:: calculate_output_layer_errors ()
{ for (int j = 1; j <= OUT_UNITS; j++)
output[j].error = output[j].a * (1 - output[j].a) * (target_output[j] - output[j].a);
}
//Calculating error between hidden and output units
void CBackPNet:: calculate_hidden_layer_errors ()
{ float sum;
for (int i = 1; i <= HID_UNITS; i++)
{ sum = 0;
for (int j = 1; j <= OUT_UNITS; j++)
sum += output[j].error * output[j].weight[j];
hidden.error = hidden.a * (1 - hidden.a) * sum;
}
}
//Calculating error between input and hidden units
void CBackPNet:: calculate_input_layer_errors ()
{ float sum;
for (int i = 1; i <= INP_UNITS; i++)
{ sum = 0;
for (int j = 1; j <= HID_UNITS; j++)
sum += hidden[j].error * hidden[j].weight[j];
input.error = input.a * (1 - input.a) * sum;
}
}
//Adjusting the weights between each layer
void CBackPNet:: adjust_weight()
{ const float learn_coeff = 0.9f; // Learning rate
const float momentum = 0.9f; // Momentum parameter
for (int j = 1; j <= OUT_UNITS; j++)
{ for (int i = 1; i <= HID_UNITS; i++)
{ output[j].weight_change[j] = learn_coeff * output[j].error * hidden.a + momentum * output[j].weight_change[j];
output[j].weight[j] += output[j].weight_change[j];
}
}
for (j = 1; j <= HID_UNITS; j++)
{ for (int i = 1; i <= INP_UNITS; i++)
{ hidden[j].weight_change[j] = learn_coeff * hidden[j].error * input.a + momentum * hidden[j].weight_change[j];
hidden[j].weight[j] += hidden[j].weight_change[j];
}
}
for (j = 1; j <= INP_UNITS; j++)
{ for (int i = 1; i <= INP_UNITS; i++)
{ input[j].weight_change[j] = learn_coeff * input[j].error * real_input[j] + momentum * input[j].weight_change[j];
input[j].weight[j] += input[j].weight_change[j];
}
}
}
void CBackPNet:: back_propagate ()
{
calculate_mean_squared_error();
calculate_output_layer_errors();
calculate_hidden_layer_errors();
calculate_input_layer_errors();
adjust_weight();
}
// At the start of back propagation, there are no weight changes to
// influence the next cycle, so clear the arrays
void CBackPNet:: blank_changes ()
{ for (int j = 1; j <= INP_UNITS; j++)
{ for (int i = 1; i <= INP_UNITS; i++)
input[j].weight_change[j] = 0;
}
for (j = 1; j <= HID_UNITS; j++)
{ for (int i = 1; i <= INP_UNITS; i++)
hidden[j].weight_change[j] = 0;
}
for (j = 1; j <= OUT_UNITS; j++)
{ for (int i = 1; i <= HID_UNITS; i++)
output[j].weight_change[j] = 0;
}
}
//Iteration function for user to repeat same procure several times
void CBackPNet:: train_network ()
{ long num_cycles;
cout << endl;
cout << "Enter the number of training cycles : ";
cin >> num_cycles;
blank_changes();
for (long loop = 1; loop <= num_cycles; loop++)
for (int pat = 1; pat <= MAX_PAT; pat++)
{ for (int i = 1; i <= INP_UNITS; i++)
real_input = INP_PATTERNS[pat];
for (i = 1; i <= OUT_UNITS; i++)
target_output = OUT_PATTERNS[pat];
feed_forward();
back_propagate();
}
}
int main ()
{
CBackPNet neural;
neural.random_weights();
neural.test_network();
neural.calculate_mean_squared_error();
neural.train_network();
neural.test_network();
return 0;
}