Board index » delphi » neural net

neural net

Does anyone have any software, whether free or for purchase to implement neural networks (in Pascal).
Any sources of useful reference material would also be greatly appreciated.

John

 

Re:neural net


Quote
jmas...@ibm.net wrote:
>Does anyone have any software, whether free or for purchase to implement neural networks (in Pascal).
>Any sources of useful reference material would also be greatly appreciated.

I have a fully working backpropagation with as many layers/units you
need. It's implemented in Delphi.
If there are some variables/functions you don't understand - give me
an e-mail.

Here is the most important parts of the code:

TWeightArray = array [1..maxUnits] of extended;

procedure TNetwork.iteratePattern(p:integer);
var
   r:TWeightArray;

procedure computeOutErrors(t,r:TWeightArray);
var
   i:integer;
begin
     for i:=1 to layers[noLayers].noUnits do
     begin
          error[noLayers,i]:=(t[i]-r[i])*r[i]*(1-r[i])*lr;

          //Used for computing the meanPatError and meanError:
//          patError[p,i]:=error[noLayers,i];
          patError[p,i]:=sqr(t[i]-r[i]);
     end;
end;

procedure computeHidLayersErrors;
var
   l,i,k:integer;
   sum,h:extended;
begin
     for l:=nolayers-1 downto 1 do
         for i:=1 to layers[l].noUnits do
         begin
              sum:=0;
              for k:=1 to layers[l+1].noUnits do

sum:=sum+error[l+1,k]*layers[l+1].units[k].weights[i+1];
              h:=layers[l].units[i].activity;
              error[l,i]:=sum * h*(1-h)*lr;

          end;
end;

procedure changeOutBias_and_Weights;
var
   i,j:integer;
begin
     for i:=1 to layers[noLayers].noUnits do
     begin
          //The biases for output-layer  (delta-Ci):
          layers[noLayers].units[i].weights[1]:=
                layers[noLayers].units[i].weights[1] +
                error[noLayers,i]*lr;
          //the weights for output-layer (delta-Vij):
          for j:=2 to layers[noLayers].noWeights do
              layers[noLayers].units[i].weights[j]:=
                layers[noLayers].units[i].weights[j] +

error[noLayers,i]*layers[noLayers-1].units[j-1].activity*lr;
     end;
end;

procedure changeHidBias_and_Weights;
var
   l,i,j:integer;
begin
     //All layers between the response and the first layer
     //(uses the activities from previous layer)
     for l:=nolayers-1 downto 2 do   //do not include the first layer
          for i:=1 to layers[l].noUnits do
          begin
               //the bias for hidden-layers     (delta-bi)
               layers[l].units[i].weights[1]:=
               layers[l].units[i].weights[1]+
               error[l,i]*lr;
               //the weights for hidden-layers  (delta-wij)
               for j:=2 to layers[l].noWeights do
                   layers[l].units[i].weights[j]:=
                   layers[l].units[i].weights[j]+
                   error[l,i]*layers[l-1].units[j-1].activity*lr;
          end;

     //The first layer (uses the input-stimulus)
     for i:=1 to layers[1].noUnits do
     begin
          //the bias for hidden-layers     (delta-bi)
          layers[1].units[i].weights[1]:=
          layers[1].units[i].weights[1]+
          error[1,i]*lr;
          //the weights for hidden-layers  (delta-wij)
          for j:=2 to layers[1].noWeights do
              layers[1].units[i].weights[j]:=
              layers[1].units[i].weights[j]+
              error[1,i]*trainPatterns[p].s[j-1]*lr;
     end;
end;

begin //iteratePattern

          inc(patCount);
// 2
          r:=computeResponse(noStimulus,trainPatterns[p].s);

// 3
          computeOutErrors(trainPatterns[p].t , r);

// 4
          computeHidLayersErrors;

// 5
          changeOutBias_and_Weights;
          changeHidBias_and_Weights;
          if patCount=noTrainPatterns then
          begin
               inc(iterations);
               patCount:=0;
          end;
end;

procedure TNetwork.iterateOne;
var
   p:integer;
   t:longint;

begin        //TNetwork.iterateOne
// 1
     for p:=1 to noTrainPatterns do
     begin
          if randomSelect then t:=random(noTrainPatterns)+1 else t:=p;
          iteratePattern(t);
     end;
end;

__________________________________________________
Andre Ranvik        -       MSIS-student at Pitt.
6629 Wilkins Avenue,  Pittsburgh, PA 15217,    USA
Phone:412 421 5651  URL: http://www.pitt.edu/~oarst1
__________________________________________________

Other Threads