neural network for mnist keep guessing 1 digit

64 Views Asked by At

I'm working on feed forward neural network for solving mnist dataset without library to help me better understand the concept of neural network. But I think I miss something as the result guessed by the neural network keep guessing just one number. For example just guess digit 5 or digit 9, even if the weight just pure random.

node[0] always bias

feedforward:

int c = 1;
input[0] = 1;
for (int j = 0; j < 28; j++)
{
    for (int k = 0; k < 28; k++)
    {
        if (traindata[i, j, k] > 126)
        {
            input[c] = 1;
        }
        else
        {
            input[c] = 0;
        } //Console.Write(input[c]);
    } //Console.WriteLine();
} //MessageBox.Show("Test");

//feed forward
hiddenlayer1[0] = 1;
double temp;
for (int j = 1; j < HIDDEN1; j++)
{
    temp = 0;
    for (int k = 0; k < INPUT; k++)
    {
        temp += input[k] * Winput_hiddenlayer1[k, j];
    } hiddenlayer1[j] = sigmoid(temp); //MessageBox.Show(hiddenlayer1[j].ToString());
}

hiddenlayer2[0] = 1;
for (int j = 1; j < HIDDEN2; j++)
{
    temp = 0;
    for (int k = 0; k < HIDDEN1; k++)
    {
        temp += hiddenlayer1[k] * Whiddenlayer1_hiddenlayer2[k, j];
    } hiddenlayer2[j] = sigmoid(temp);
}

for (int j = 0; j < OUTPUT; j++)
{
    temp = 0;
    for (int k = 0; k < HIDDEN2; k++)
    {
        temp += hiddenlayer2[k] * Whiddenlayer2_output[k, j];
    } output[j] = sigmoid(temp);
}

and the backpropagation:

//set desired output
for (int j = 0; j < OUTPUT; j++)
{
    Doutput[j] = 0;
} Doutput[labeltrain[i]] = 1;

//for (int j = 0; j < OUTPUT; j++)
//{
//    Console.Write(Doutput[j].ToString());
//} Console.WriteLine();
//MessageBox.Show("Test");

//output error calculation
for (int j = 0; j < OUTPUT; j++)
{
    outputerror[j] = (Doutput[j] - output[j]) * (1.0 - output[j]);
    //Console.WriteLine("expected: " + Doutput[j]);
    //Console.WriteLine("real: " + output[j]);
    //Console.WriteLine("(Doutput[j] - output[j]): " + (Doutput[j] - output[j]));
    //Console.WriteLine("1.0 - output[j]: " + (1.0 - output[j]));
    //Console.WriteLine("output error: " + outputerror[j]);
    //MessageBox.Show("Test");
}

//hidden2 error calculation
for (int j = 0; j < HIDDEN2; j++)
{
    temp = 0;
    for (int k = 0; k < OUTPUT; k++)
    {
        for (int l = 0; l < HIDDEN1; l++)
        {
            temp += outputerror[k] * Whiddenlayer1_hiddenlayer2[l, k];
        }
    } hidden2error[j] = temp * hiddenlayer2[j] * (1.0 - hiddenlayer2[j]);
}

//hidden1 error calculation
for (int j = 0; j < HIDDEN1; j++)
{
    temp = 0;
    for (int k = 0; k < HIDDEN2; k++)
    {
        for (int l = 0; l < INPUT; l++)
        {
            temp += hidden2error[k] * Winput_hiddenlayer1[l, k];
        }
    } hidden1error[j] = temp * hiddenlayer1[j] * (1.0 - hiddenlayer1[j]);
}

//hidden2-output weight adjustment
for (int j = 0; j < HIDDEN2; j++)
{
    for (int k = 0; k < OUTPUT; k++)
    {
        Whiddenlayer2_output[j,k] += LEARNING_RATE * outputerror[k] * hiddenlayer2[j];
    }
}

//hidden1-hidden2 weight adjusment
for (int j = 0; j < HIDDEN1; j++)
{
    for (int k = 0; k < HIDDEN2; k++)
    {
        Whiddenlayer1_hiddenlayer2[j, k] += LEARNING_RATE * hidden2error[k] * hiddenlayer1[j];
    }
}

//input-hidden1 weight adjustment
for (int j = 0; j < INPUT; j++)
{
    for (int k = 0; k < HIDDEN1; k++)
    {
        Winput_hiddenlayer1[j, k] += LEARNING_RATE * hidden1error[k] * input[j];
    }
}
0

There are 0 best solutions below