Click here to Skip to main content
15,895,817 members
Home / Discussions / C#
   

C#

 
AnswerRe: RE: Save user's re-order grid preferences Pin
Searril8-Oct-10 8:03
Searril8-Oct-10 8:03 
GeneralRe: RE: Save user's re-order grid preferences Pin
Richard Andrew x648-Oct-10 12:13
professionalRichard Andrew x648-Oct-10 12:13 
GeneralRe: RE: Save user's re-order grid preferences Pin
Searril9-Oct-10 6:53
Searril9-Oct-10 6:53 
AnswerRe: RE: Save user's re-order grid preferences Pin
PIEBALDconsult8-Oct-10 15:51
mvePIEBALDconsult8-Oct-10 15:51 
AnswerRe: RE: Save user's re-order grid preferences Pin
Mycroft Holmes9-Oct-10 2:31
professionalMycroft Holmes9-Oct-10 2:31 
GeneralRe: RE: Save user's re-order grid preferences Pin
roman_s12-Oct-10 7:19
roman_s12-Oct-10 7:19 
GeneralRe: RE: Save user's re-order grid preferences Pin
Mycroft Holmes12-Oct-10 12:50
professionalMycroft Holmes12-Oct-10 12:50 
Questioni write a backprobagate neural network code with c# and it is work slowly [modified] Pin
karayel_kara8-Oct-10 5:06
karayel_kara8-Oct-10 5:06 
hi,
i wrote a code and it has 4 layers and difference count neuron in each layer, and also it has 5 input,output. i have read so many codes of neural network and they have run very fast but mine is very slow.
finally, when i use 400 paterns for input and 100000 iterations ,i wait so much.
can we solve this problem for me ?
i wait your recommend.
Best

this code :
Main Blocak
double err=10, mmse ;
           int ss = data.GetLength(0);
           snetwork(0);
           initializes();
           mmse = 0;
           double mser=0.0;
         for (i = 0; i <iterasyon; i++)



           {


             mser = 0.0;
             for (int xx = 0; xx <data.GetLength(0); xx++)
             {


                 ffw(xx);
                 backpropagate(xx);
                 mser += mse(nb_cikis, xx);
             }

             mmse = mser / data.GetLength(0);
             if (mmse < 0.00001) break;
         } ;


           ffw_egitimout();

backprobagate error:
void  backpropagate(int r)
 {
            double sum;
            for (int j = 0; j < networks.layers[networks.layers.Length - 1].noron.Length; j++)
            {
                networks.layers[networks.layers.Length - 1].noron[j].hata = nb_cikis[r,j] - networks.layers[networks.layers.Length - 1].noron[j].output;
                networks.layers[networks.layers.Length - 1].noron[j].delta = networks.layers[networks.layers.Length - 1].noron[j].output * (1 - networks.layers[networks.layers.Length - 1].noron[j].output) * networks.layers[networks.layers.Length - 1].noron[j].hata;//son katman deltası hesaplanıyor...
            }

        	//	find delta for hidden layers	
	for(int i=networks.layers.Length-2;i>0;i--)
    {
		for(int j=0;j<networks.layers[i].noron.Length;j++)
        {
			sum=0.0;
             for (int k = 0; k < networks.layers[i].noron[j].sbaglanti.Length; k++)
            {
			             
          sum+=  networks.layers[laybul(networks.layers[i].noron[j].sbaglanti[k])].noron[nronbul(networks.layers[i].noron[j].sbaglanti[k])].delta*networks.layers[laybul(networks.layers[i].noron[j].sbaglanti[k])].noron[nronbul(networks.layers[i].noron[j].sbaglanti[k])].dentw[k];
				
			}
			networks.layers[i].noron[j].delta=networks.layers[i].noron[j].output*(1-networks.layers[i].noron[j].output)*sum;
//          
		}
	}
        
        

            	//	apply momentum ( does nothing if alpha=0 )

		for(int i=1;i<networks.layers.Length;i++)
        {
		
            for(int j=0;j<networks.layers[i].noron.Length;j++)
            {
			
                     for (int k = 0; k < networks.layers[i].noron[j].obaglanti.Length; k++)
                     {  
                         networks.layers[i].noron[j].dentw[k]+=momentum* networks.layers[i].noron[j].prew[k];
                   	
		     
			         }
              networks.layers[i].noron[j].bias+=momentum* networks.layers[i].noron[j].prewbias;
		    }
	    }

	//	adjust weights usng steepest descent	

		for(int i=1;i<networks.layers.Length;i++)
        {
	
            for(int j=0;j<networks.layers[i].noron.Length;j++)
            {
		
                     for (int k = 0; k < networks.layers[i].noron[j].obaglanti.Length; k++)
                     {
				        networks.layers[i].noron[j].prew[k]=Lr*networks.layers[i].noron[j].delta*networks.layers[laybul(networks.layers[i].noron[j].obaglanti[k])].noron[nronbul(networks.layers[i].noron[j].obaglanti[k])].output;

				        networks.layers[i].noron[j].dentw[k]+=networks.layers[i].noron[j].prew[k];
                   
			         }
                networks.layers[i].noron[j].prewbias=Lr*networks.layers[i].noron[j].delta;
			         
                networks.layers[i].noron[j].bias+=networks.layers[i].noron[j].prewbias;

		    }
        
        }
        
        
        
    }//function backprob. end

feed forward calculating:
void ffw(int v)
       {

           double ssum,ssum1;

            for (int n = 0; n < networks.layers[0].noron.Length; n++)//n. neuron
               {
                 networks.layers[0].noron[n].output = ndata[v,n];//
               }


           for (int l = 1; l < networks.layers.Length; l++)//l. layer
           {
               for (int n = 0; n < networks.layers[l].noron.Length; n++)//n. neuron
               {
                   ssum = 0.0;

                   for(int k=0;k<networks.layers[l].noron[n].obaglanti.Length;k++)
                   {

                        ssum+= networks.layers[laybul(networks.layers[l].noron[n].obaglanti[k])].noron[nronbul(networks.layers[l].noron[n].obaglanti[k])].output*networks.layers[l].noron[n].dentw[k];

                         // Apply weight to inputs and add to sum
                   }
                 ssum+=networks.layers[l].noron[n].bias;        // Apply bias
               networks.layers[l].noron[n].output=activation(ssum);    //sigmoid function apply

               }// for n

           }// for l





       }

network class:
 class neuron
        {
            public double[] dentw,prew;
            public double prewbias,bias, hata, delta;

            public double[] input;
            public double output;
            public int[] sbaglanti;
            public int[] sbagk;
            public int[] obaglanti;
            public int numara;
            public int lay;
            public int nn;
            public int dents;//dentrit sayısı
            public int oid, sid;

            public neuron(int onron, int snron)
            {
                dentw = new double[onron];
                prew = new double[onron];
                input = new double[onron];
                sbaglanti = new int[snron];
                sbagk = new int[snron];
                obaglanti = new int[onron];
                oid = onron;
                sid = snron;
                dents = onron;
            }



        }




        class layer
        {
            public neuron[] noron;

            public layer()
            {
                noron = new neuron[nron];

                for (int index = 0; index < noron.Length; index++)
                {

                    noron[index] = new neuron(nron, nron);


                }
            }

            public layer(int neurons, int sneurons, int kendi)
            {
                noron = new neuron[kendi];

                for (int index = 0; index < noron.Length; index++)
                {
                    noron[index] = new neuron(neurons, sneurons);
                }
            }




        }//layer class

        class net
        {

            public int ban, hen;
            public layer[] layers;

            public net()
            {
                layers = new layer[lay];

                for (int index = 0; index < layers.Length; index++)
                {
                    layers[index] = new layer();
                }
            }


            public net(int lays, int[] neuronMap)
            {
                layers = new layer[lays];

                for (int index = (layers.Length - 1); index >= 0; index--)
                {
                    if (index == 0)
                        layers[index] = new layer(1, neuronMap[index + 1], neuronMap[index]);
                    else if (index == layers.Length - 1)
                        layers[index] = new layer(neuronMap[index - 1], 1, neuronMap[index]);
                    else
                        layers[index] = new layer(neuronMap[index - 1], neuronMap[index + 1], neuronMap[index]);
                }
            }


        } //net classs

#endregion


        static int[] neuronMap = new int[] { 5, 10,10, 5 };

        net networks = new net(neuronMap.Length, neuronMap);


modified on Monday, October 11, 2010 4:56 AM

AnswerRe: i write a backprobagate neural network code with c# and it is work slowly Pin
Not Active8-Oct-10 5:23
mentorNot Active8-Oct-10 5:23 
GeneralRe: i write a backprobagate neural network code with c# and it is work slowly Pin
Khaniya8-Oct-10 21:07
professionalKhaniya8-Oct-10 21:07 
AnswerRe: i write a backprobagate neural network code with c# and it is work slowly Pin
Abhinav S8-Oct-10 22:43
Abhinav S8-Oct-10 22:43 
AnswerRe: i write a backprobagate neural network code with c# and it is work slowly [modified] Pin
Keith Barrow9-Oct-10 7:41
professionalKeith Barrow9-Oct-10 7:41 
AnswerRe: i write a backprobagate neural network code with c# and it is work slowly Pin
karayel_kara10-Oct-10 23:13
karayel_kara10-Oct-10 23:13 
QuestionCustom TextSource creates TextRuns more than once Pin
WebMaster8-Oct-10 4:47
WebMaster8-Oct-10 4:47 
AnswerRe: Custom TextSource creates TextRuns more than once Pin
OriginalGriff8-Oct-10 5:26
mveOriginalGriff8-Oct-10 5:26 
QuestionDos Command Pin
M Riaz Bashir8-Oct-10 2:31
M Riaz Bashir8-Oct-10 2:31 
AnswerRe: Dos Command Pin
#realJSOP8-Oct-10 2:34
mve#realJSOP8-Oct-10 2:34 
GeneralRe: Dos Command Pin
M Riaz Bashir8-Oct-10 2:37
M Riaz Bashir8-Oct-10 2:37 
AnswerRe: Dos Command Pin
Pete O'Hanlon8-Oct-10 2:37
mvePete O'Hanlon8-Oct-10 2:37 
AnswerRe: Dos Command Pin
MasttsaM13-Oct-10 22:04
MasttsaM13-Oct-10 22:04 
QuestionPassing Objects From Child Form Back To User Control Pin
JasonD_S7-Oct-10 22:54
JasonD_S7-Oct-10 22:54 
AnswerRe: Passing Objects From Child Form Back To User Control Pin
OriginalGriff7-Oct-10 23:11
mveOriginalGriff7-Oct-10 23:11 
GeneralRe: Passing Objects From Child Form Back To User Control Pin
JasonD_S8-Oct-10 1:25
JasonD_S8-Oct-10 1:25 
GeneralRe: Passing Objects From Child Form Back To User Control Pin
OriginalGriff8-Oct-10 1:26
mveOriginalGriff8-Oct-10 1:26 
GeneralRe: Passing Objects From Child Form Back To User Control Pin
JasonD_S8-Oct-10 1:31
JasonD_S8-Oct-10 1:31 

General General    News News    Suggestion Suggestion    Question Question    Bug Bug    Answer Answer    Joke Joke    Praise Praise    Rant Rant    Admin Admin   

Use Ctrl+Left/Right to switch messages, Ctrl+Up/Down to switch threads, Ctrl+Shift+Left/Right to switch pages.