Artificial Intelligence - Class TNeuralNetwork

Artificial Intelligence - Class TNeuralNetwork

Postby Antonio Linares » Wed Sep 30, 2020 9:51 am

Work in progress:

https://gist.github.com/espiritusanti/b7485c68a06ef2c8c76d8c62c8c39d8f

Image

Code: Select all  Expand view
#ifdef __PLATFORM__WINDOWS
   #include "c:\harbour\include\hbclass.ch"
#else
   #include "/usr/include/harbour/hbclass.ch"
#endif

#define NUM_INPUTS               2
#define A_HIDDENLAYERSNEURONS  { 2 }  // one hiddenlayers with 2 neurons
#define NUM_OUTPUTS              1

static nLearningRate := 0.1

function Main()

   local oNN := TNeuralNetwork():New( NUM_INPUTS, A_HIDDENLAYERSNEURONS, NUM_OUTPUTS )

   oNN:Learn( { 0, 0 }, { 0 } )
   oNN:Learn( { 1, 0 }, { 1 } )
   oNN:Learn( { 0, 1 }, { 1 } )
   oNN:Learn( { 1, 1 }, { 0 } )
   
   ? Time()

return nil

CLASS TNeuralNetwork

   DATA   oInputLayer
   
   DATA   aHiddenLayers INIT {}    
   
   DATA   oOutputLayer
   
   METHOD New( nInputs, aHiddenLayersNeurons, nOutputs )

   METHOD Learn( aInputs, aOutputs )

   METHOD Propagation( aInputs, aOutputs )

   METHOD BackPropagation( aInputs, aOutputs )

ENDCLASS

METHOD New( nInputs, aHiddenLayersNeurons, nOutputs ) CLASS TNeuralNetwork

   local n

   ::oInputLayer = TNeuralLayer():New( nInputs, nInputs )
   
   for n = 1 to Len( aHiddenLayersNeurons ) // number of hidden layers
      AAdd( ::aHiddenLayers, TNeuralLayer():New( aHiddenLayersNeurons[ n ],;
            If( n == 1, nInputs, aHiddenLayersNeurons[ n - 1 ] ) ) )
   next
   
   ::oOutputLayer = TNeuralLayer():New( nOutputs, ATail( aHiddenLayersNeurons ) )
   
return Self  

METHOD Learn( aInputs, aOutputs ) CLASS TNeuralNetwork

   local n

   ::Propagation( aInputs, aOutputs )
   ::BackPropagation( aInputs, aOutputs )
   
   ? "Inputs:", aInputs
   
   for n = 1 to Len( ::oOutputLayer:aNeurons )
      ?? ", output:", ::oOutputLayer:aNeurons[ n ]:nValue
      ?? ", expected output:", aOutputs[ n ]
      ?? ", error:", ::oOutputLayer:aNeurons[ n ]:nDeltaError
   next

return nil

METHOD Propagation( aInputs, aOutputs ) CLASS TNeuralNetwork

   local oInputNeuron, oHiddenLayer, oHiddenLayerNeuron, oOutputNeuron, nSum
   
   for each oInputNeuron in ::oInputLayer:aNeurons
      oInputNeuron:nValue = aInputs[ oInputNeuron:__enumIndex ]
   next
   
   for each oHiddenLayer in ::aHiddenLayers
      if oHiddenLayer:__enumIndex == 1
         for each oHiddenLayerNeuron in oHiddenLayer:aNeurons
            nSum = oHiddenLayerNeuron:nBias
            for each oInputNeuron in ::oInputLayer:aNeurons
               nSum += oInputNeuron:nValue * oHiddenLayerNeuron:aWeights[ oInputNeuron:__enumIndex ]
            next
            oHiddenLayerNeuron:nValue = Sigmoid( nSum )
         next
      endif
   next  

   for each oOutputNeuron in ::oOutputLayer:aNeurons
      nSum = oOutputNeuron:nBias
      for each oHiddenLayerNeuron in ATail( ::aHiddenLayers ):aNeurons
        nSum += oHiddenLayerNeuron:nValue * oOutputNeuron:aWeights[ oHiddenLayerNeuron:__enumIndex ]
      next
      oOutputNeuron:nValue = Sigmoid( nSum )
   next

return nil

METHOD BackPropagation( aInputs, aOutputs ) CLASS TNeuralNetwork

   local oOutputNeuron, oHiddenLayer, oHiddenLayerNeuron, oInputNeuron, nError
   
   for each oOutputNeuron in ::oOutputLayer:aNeurons
      nError = aOutputs[ oOutputNeuron:__enumIndex ] - oOutputNeuron:nValue
      oOutputNeuron:nDeltaError = nError * dSigmoid( oOutputNeuron:nValue )
   next    
   
   for each oHiddenLayer in ::aHiddenLayers // how to step it backwards ?
      if oHiddenLayer:__enumIndex == Len( ::aHiddenLayers )
         for each oHiddenLayerNeuron in oHiddenLayer:aNeurons
            nError = 0
            for each oOutputNeuron in ::oOutputLayer:aNeurons
               nError += oOutputNeuron:nDeltaError * oHiddenLayerNeuron:aWeights[ oOutputNeuron:__enumIndex ]            
            next
            oHiddenLayerNeuron:nDeltaError = nError * dSigmoid( oHiddenLayerNeuron:nValue )
         next      
      else
      endif
   next
   
   for each oOutputNeuron in ::oOutputLayer:aNeurons
      oOutputNeuron:nBias += oOutputNeuron:nDeltaError * nLearningRate
      for each oHiddenLayer in ::aHiddenLayers
         if oHiddenLayer:__enumIndex == Len( ::aHiddenLayers )
            for each oHiddenLayerNeuron in oHiddenLayer:aNeurons
               for each oOutputNeuron in ::oOutputLayer:aNeurons
                  oOutputNeuron:aWeights[ oHiddenLayerNeuron:__enumIndex ] += oHiddenLayerNeuron:nValue * ;
                  oOutputNeuron:nDeltaError * nLearningRate
               next        
            next
         endif
      next    
   next  
   
   for each oHiddenLayerNeuron in ::aHiddenLayers[ 1 ]:aNeurons
      oHiddenLayerNeuron:nBias += oHiddenLayerNeuron:nDeltaError * nLearningRate
      for each oInputNeuron in ::oInputLayer:aNeurons
         oHiddenLayerNeuron:aWeights[ oInputNeuron:__enumIndex ] += aInputs[ oHiddenLayerNeuron:__enumIndex ] * ;
         oHiddenLayerNeuron:nDeltaError * nLearningRate
      next
   next

return nil

CLASS TNeuralLayer

   DATA   aNeurons INIT {}
   
   METHOD New( nNeurons, nInputs )    
   
ENDCLASS  

METHOD New( nNeurons, nInputs ) CLASS TNeuralLayer

   local n
   
   for n = 1 to nNeurons
      AAdd( ::aNeurons, TNeuron():New( nInputs ) )
   next

return Self

CLASS TNeuron

   DATA   nBias    INIT hb_Random()
   
   DATA   aWeights
   
   DATA   nValue
   
   DATA   nDeltaError
   
   METHOD New( nInputs )
   
ENDCLASS

METHOD New( nInputs ) CLASS TNeuron

   local n

   ::aWeights = Array( nInputs )
   
   for n = 1 to nInputs
      ::aWeights[ n ] = hb_Random()
   next      
   
return Self  

function Sigmoid( nValue )

return 1 / ( 1 + Math_E()  ^ -nValue )

function dSigmoid( nValue ) // returns the derivative of the sigmoid function

   local n := Sigmoid( nValue )

return n * ( 1 - n )
regards, saludos

Antonio Linares
www.fivetechsoft.com
User avatar
Antonio Linares
Site Admin
 
Posts: 41314
Joined: Thu Oct 06, 2005 5:47 pm
Location: Spain

Re: Artificial Intelligence - Class TNeuralNetwork

Postby Antonio Linares » Sat Oct 03, 2020 5:50 am

Testing the Neural Network once trained:

Code: Select all  Expand view
#ifdef __PLATFORM__WINDOWS
   #include "c:\harbour\include\hbclass.ch"
#else
   #include "/usr/include/harbour/hbclass.ch"
#endif

#define NUM_INPUTS               2
#define A_HIDDENLAYERSNEURONS  { 2 }  // one hiddenlayers with 2 neurons
#define NUM_OUTPUTS              1

static nLearningRate := 0.1

function Main()

   local oNN := TNeuralNetwork():New( NUM_INPUTS, A_HIDDENLAYERSNEURONS, NUM_OUTPUTS )
   local n
   
   /*
   for n = 1 to 10000
      oNN:Learn( { 0, 0 }, { 0 }, .F. )
      oNN:Learn( { 1, 0 }, { 1 }, .F. )
      oNN:Learn( { 0, 1 }, { 1 }, .F. )
      oNN:Learn( { 1, 1 }, { 0 }, .F. )
   next
   */

   
   oNN:aHiddenLayers[ 1 ]:aNeurons[ 1 ]:aWeights[ 1 ] = 3.70166
   oNN:aHiddenLayers[ 1 ]:aNeurons[ 1 ]:aWeights[ 2 ] = 3.7104
   oNN:aHiddenLayers[ 1 ]:aNeurons[ 1 ]:nBias = -5.67588
   oNN:aHiddenLayers[ 1 ]:aNeurons[ 2 ]:aWeights[ 1 ] = 5.83499
   oNN:aHiddenLayers[ 1 ]:aNeurons[ 2 ]:aWeights[ 2 ] = 5.87966
   oNN:aHiddenLayers[ 1 ]:aNeurons[ 2 ]:nBias = -2.45284
   oNN:oOutputLayer:aNeurons[ 1 ]:aWeights[ 1 ] = -8.19958
   oNN:oOutputLayer:aNeurons[ 1 ]:aWeights[ 2 ] = 7.52827
   oNN:oOutputLayer:aNeurons[ 1 ]:nBias = -3.3797

   oNN:Learn( { 0, 0 }, { 0 }, .T. )
   oNN:Learn( { 1, 0 }, { 1 }, .T. )
   oNN:Learn( { 0, 1 }, { 1 }, .T. )
   oNN:Learn( { 1, 1 }, { 0 }, .T. )

   // ? oNN
   
   ? Time()

return nil

CLASS TNeuralNetwork

   DATA   oInputLayer
   
   DATA   aHiddenLayers INIT {}    
   
   DATA   oOutputLayer
   
   METHOD New( nInputs, aHiddenLayersNeurons, nOutputs )

   METHOD Learn( aInputs, aOutputs, lDebug )

   METHOD Propagation( aInputs, aOutputs )

   METHOD BackPropagation( aInputs, aOutputs )

ENDCLASS

METHOD New( nInputs, aHiddenLayersNeurons, nOutputs ) CLASS TNeuralNetwork

   local n

   ::oInputLayer = TNeuralLayer():New( nInputs, nInputs )
   
   for n = 1 to Len( aHiddenLayersNeurons ) // number of hidden layers
      AAdd( ::aHiddenLayers, TNeuralLayer():New( aHiddenLayersNeurons[ n ],;
            If( n == 1, nInputs, aHiddenLayersNeurons[ n - 1 ] ) ) )
   next
   
   ::oOutputLayer = TNeuralLayer():New( nOutputs, ATail( aHiddenLayersNeurons ) )
   
return Self  

METHOD Learn( aInputs, aOutputs, lDebug ) CLASS TNeuralNetwork

   local n

   ::Propagation( aInputs, aOutputs )
   ::BackPropagation( aInputs, aOutputs )
   
   if lDebug
      ? "Inputs:", aInputs
   
      for n = 1 to Len( ::oOutputLayer:aNeurons )
         ?? ", output:", ::oOutputLayer:aNeurons[ n ]:nValue
         ?? ", expected output:", aOutputs[ n ]
         ?? ", error:", ::oOutputLayer:aNeurons[ n ]:nDeltaError
      next
   endif      

return nil

METHOD Propagation( aInputs, aOutputs ) CLASS TNeuralNetwork

   local oInputNeuron, oHiddenLayer, oHiddenLayerNeuron, oOutputNeuron, nSum
   
   for each oInputNeuron in ::oInputLayer:aNeurons
      oInputNeuron:nValue = aInputs[ oInputNeuron:__enumIndex ]
   next
   
   for each oHiddenLayer in ::aHiddenLayers
      if oHiddenLayer:__enumIndex == 1
         for each oHiddenLayerNeuron in oHiddenLayer:aNeurons
            nSum = oHiddenLayerNeuron:nBias
            for each oInputNeuron in ::oInputLayer:aNeurons
               nSum += oInputNeuron:nValue * oHiddenLayerNeuron:aWeights[ oInputNeuron:__enumIndex ]
            next
            oHiddenLayerNeuron:nValue = Sigmoid( nSum )
         next
      endif
   next  

   for each oOutputNeuron in ::oOutputLayer:aNeurons
      nSum = oOutputNeuron:nBias
      for each oHiddenLayerNeuron in ATail( ::aHiddenLayers ):aNeurons
        nSum += oHiddenLayerNeuron:nValue * oOutputNeuron:aWeights[ oHiddenLayerNeuron:__enumIndex ]
      next
      oOutputNeuron:nValue = Sigmoid( nSum )
   next

return nil

METHOD BackPropagation( aInputs, aOutputs ) CLASS TNeuralNetwork

   local oOutputNeuron, oHiddenLayer, oHiddenLayerNeuron, oInputNeuron, nError
   
   for each oOutputNeuron in ::oOutputLayer:aNeurons
      nError = aOutputs[ oOutputNeuron:__enumIndex ] - oOutputNeuron:nValue
      oOutputNeuron:nDeltaError = nError * dSigmoid( oOutputNeuron:nValue )
   next    
   
   for each oHiddenLayer in ::aHiddenLayers // how to step it backwards ?
      if oHiddenLayer:__enumIndex == Len( ::aHiddenLayers )
         for each oHiddenLayerNeuron in oHiddenLayer:aNeurons
            nError = 0
            for each oOutputNeuron in ::oOutputLayer:aNeurons
               nError += oOutputNeuron:nDeltaError * oOutputNeuron:aWeights[ oOutputNeuron:__enumIndex ]            
            next
            oHiddenLayerNeuron:nDeltaError = nError * dSigmoid( oHiddenLayerNeuron:nValue )
         next      
      else
      endif
   next
   
   for each oOutputNeuron in ::oOutputLayer:aNeurons
      oOutputNeuron:nBias += oOutputNeuron:nDeltaError * nLearningRate
      for each oHiddenLayer in ::aHiddenLayers
         if oHiddenLayer:__enumIndex == Len( ::aHiddenLayers )
            for each oHiddenLayerNeuron in oHiddenLayer:aNeurons
               for each oOutputNeuron in ::oOutputLayer:aNeurons
                  oOutputNeuron:aWeights[ oHiddenLayerNeuron:__enumIndex ] += oHiddenLayerNeuron:nValue * ;
                  oOutputNeuron:nDeltaError * nLearningRate
               next        
            next
         endif
      next    
   next  
   
   for each oHiddenLayerNeuron in ::aHiddenLayers[ 1 ]:aNeurons
      oHiddenLayerNeuron:nBias += oHiddenLayerNeuron:nDeltaError * nLearningRate
      for each oInputNeuron in ::oInputLayer:aNeurons
         oHiddenLayerNeuron:aWeights[ oInputNeuron:__enumIndex ] += aInputs[ oHiddenLayerNeuron:__enumIndex ] * ;
         oHiddenLayerNeuron:nDeltaError * nLearningRate
      next
   next

return nil

CLASS TNeuralLayer

   DATA   aNeurons INIT {}
   
   METHOD New( nNeurons, nInputs )    
   
ENDCLASS  

METHOD New( nNeurons, nInputs ) CLASS TNeuralLayer

   local n
   
   for n = 1 to nNeurons
      AAdd( ::aNeurons, TNeuron():New( nInputs ) )
   next

return Self

CLASS TNeuron

   DATA   nBias    INIT hb_Random()
   
   DATA   aWeights
   
   DATA   nValue
   
   DATA   nDeltaError
   
   METHOD New( nInputs )
   
ENDCLASS

METHOD New( nInputs ) CLASS TNeuron

   local n

   ::aWeights = Array( nInputs )
   
   for n = 1 to nInputs
      ::aWeights[ n ] = hb_Random()
   next      
   
return Self  

function Sigmoid( nValue )

return 1 / ( 1 + Math_E()  ^ -nValue )

function dSigmoid( nValue ) // returns the derivative of the sigmoid function

   // local n := Sigmoid( nValue )

return nValue * ( 1 - nValue )


Inputs: {0, 0}, output: 0.06 , expected output: 0 , error: -0.003035
Inputs: {1, 0}, output: 0.95 , expected output: 1 , error: 0.002573
Inputs: {0, 1}, output: 0.95 , expected output: 1 , error: 0.002556
Inputs: {1, 1}, output: 0.06 , expected output: 0 , error: -0.002977
regards, saludos

Antonio Linares
www.fivetechsoft.com
User avatar
Antonio Linares
Site Admin
 
Posts: 41314
Joined: Thu Oct 06, 2005 5:47 pm
Location: Spain

Re: Artificial Intelligence - Class TNeuralNetwork

Postby Antonio Linares » Sat Oct 03, 2020 5:59 am

Working version (just missing the multi hidden layer support)

Code: Select all  Expand view
#ifdef __PLATFORM__WINDOWS
   #include "c:\harbour\include\hbclass.ch"
#else
   #include "/usr/include/harbour/hbclass.ch"
#endif

#define NUM_INPUTS               2
#define A_HIDDENLAYERSNEURONS  { 2 }  // one hiddenlayers with 2 neurons
#define NUM_OUTPUTS              1

static nLearningRate := 0.1

function Main()

   local oNN := TNeuralNetwork():New( NUM_INPUTS, A_HIDDENLAYERSNEURONS, NUM_OUTPUTS )
   local n
   
   SET DECIMALS TO 6
   
   for n = 1 to 30000
      oNN:Learn( { 0, 0 }, { 0 }, .F. )
      oNN:Learn( { 1, 0 }, { 1 }, .F. )
      oNN:Learn( { 0, 1 }, { 1 }, .F. )
      oNN:Learn( { 1, 1 }, { 0 }, .F. )
   next

   /*
   oNN:aHiddenLayers[ 1 ]:aNeurons[ 1 ]:aWeights[ 1 ] = 3.70166
   oNN:aHiddenLayers[ 1 ]:aNeurons[ 1 ]:aWeights[ 2 ] = 3.7104
   oNN:aHiddenLayers[ 1 ]:aNeurons[ 1 ]:nBias = -5.67588
   oNN:aHiddenLayers[ 1 ]:aNeurons[ 2 ]:aWeights[ 1 ] = 5.83499
   oNN:aHiddenLayers[ 1 ]:aNeurons[ 2 ]:aWeights[ 2 ] = 5.87966
   oNN:aHiddenLayers[ 1 ]:aNeurons[ 2 ]:nBias = -2.45284
   oNN:oOutputLayer:aNeurons[ 1 ]:aWeights[ 1 ] = -8.19958
   oNN:oOutputLayer:aNeurons[ 1 ]:aWeights[ 2 ] = 7.52827
   oNN:oOutputLayer:aNeurons[ 1 ]:nBias = -3.3797
   */


   oNN:Learn( { 0, 0 }, { 0 }, .T. )
   oNN:Learn( { 1, 0 }, { 1 }, .T. )
   oNN:Learn( { 0, 1 }, { 1 }, .T. )
   oNN:Learn( { 1, 1 }, { 0 }, .T. )

   // ? oNN
   
   ? Time()

return nil

CLASS TNeuralNetwork

   DATA   oInputLayer
   
   DATA   aHiddenLayers INIT {}    
   
   DATA   oOutputLayer
   
   METHOD New( nInputs, aHiddenLayersNeurons, nOutputs )

   METHOD Learn( aInputs, aOutputs, lDebug )

   METHOD Propagation( aInputs, aOutputs )

   METHOD BackPropagation( aInputs, aOutputs )

ENDCLASS

METHOD New( nInputs, aHiddenLayersNeurons, nOutputs ) CLASS TNeuralNetwork

   local n

   ::oInputLayer = TNeuralLayer():New( nInputs, nInputs )
   
   for n = 1 to Len( aHiddenLayersNeurons ) // number of hidden layers
      AAdd( ::aHiddenLayers, TNeuralLayer():New( aHiddenLayersNeurons[ n ],;
            If( n == 1, nInputs, aHiddenLayersNeurons[ n - 1 ] ) ) )
   next
   
   ::oOutputLayer = TNeuralLayer():New( nOutputs, ATail( aHiddenLayersNeurons ) )
   
return Self  

METHOD Learn( aInputs, aOutputs, lDebug ) CLASS TNeuralNetwork

   local n

   ::Propagation( aInputs, aOutputs )
   ::BackPropagation( aInputs, aOutputs )
   
   if lDebug
      ? "Inputs:", aInputs
   
      for n = 1 to Len( ::oOutputLayer:aNeurons )
         ?? ", output:", ::oOutputLayer:aNeurons[ n ]:nValue
         ?? ", expected output:", aOutputs[ n ]
         ?? ", error:", ::oOutputLayer:aNeurons[ n ]:nDeltaError
      next
   endif      

return nil

METHOD Propagation( aInputs, aOutputs ) CLASS TNeuralNetwork

   local oInputNeuron, oHiddenLayer, oHiddenLayerNeuron, oOutputNeuron, nSum
   
   for each oInputNeuron in ::oInputLayer:aNeurons
      oInputNeuron:nValue = aInputs[ oInputNeuron:__enumIndex ]
   next
   
   for each oHiddenLayer in ::aHiddenLayers
      if oHiddenLayer:__enumIndex == 1
         for each oHiddenLayerNeuron in oHiddenLayer:aNeurons
            nSum = oHiddenLayerNeuron:nBias
            for each oInputNeuron in ::oInputLayer:aNeurons
               nSum += oInputNeuron:nValue * oHiddenLayerNeuron:aWeights[ oInputNeuron:__enumIndex ]
            next
            oHiddenLayerNeuron:nValue = Sigmoid( nSum )
         next
      endif
   next  

   for each oOutputNeuron in ::oOutputLayer:aNeurons
      nSum = oOutputNeuron:nBias
      for each oHiddenLayerNeuron in ATail( ::aHiddenLayers ):aNeurons
        nSum += oHiddenLayerNeuron:nValue * oOutputNeuron:aWeights[ oHiddenLayerNeuron:__enumIndex ]
      next
      oOutputNeuron:nValue = Sigmoid( nSum )
   next

return nil

METHOD BackPropagation( aInputs, aOutputs ) CLASS TNeuralNetwork

   local oOutputNeuron, oHiddenLayer, oHiddenLayerNeuron, oInputNeuron, nError
   
   for each oOutputNeuron in ::oOutputLayer:aNeurons
      nError = aOutputs[ oOutputNeuron:__enumIndex ] - oOutputNeuron:nValue
      oOutputNeuron:nDeltaError = nError * dSigmoid( oOutputNeuron:nValue )
   next    
   
   for each oHiddenLayer in ::aHiddenLayers // how to step it backwards ?
      if oHiddenLayer:__enumIndex == Len( ::aHiddenLayers )
         for each oHiddenLayerNeuron in oHiddenLayer:aNeurons
            nError = 0
            for each oOutputNeuron in ::oOutputLayer:aNeurons
               nError += oOutputNeuron:nDeltaError * oOutputNeuron:aWeights[ oOutputNeuron:__enumIndex ]            
            next
            oHiddenLayerNeuron:nDeltaError = nError * dSigmoid( oHiddenLayerNeuron:nValue )
         next      
      else
      endif
   next
   
   for each oOutputNeuron in ::oOutputLayer:aNeurons
      oOutputNeuron:nBias += oOutputNeuron:nDeltaError * nLearningRate
      for each oHiddenLayer in ::aHiddenLayers
         if oHiddenLayer:__enumIndex == Len( ::aHiddenLayers )
            for each oHiddenLayerNeuron in oHiddenLayer:aNeurons
               for each oOutputNeuron in ::oOutputLayer:aNeurons
                  oOutputNeuron:aWeights[ oHiddenLayerNeuron:__enumIndex ] += oHiddenLayerNeuron:nValue * ;
                  oOutputNeuron:nDeltaError * nLearningRate
               next        
            next
         endif
      next    
   next  
   
   for each oHiddenLayerNeuron in ::aHiddenLayers[ 1 ]:aNeurons
      oHiddenLayerNeuron:nBias += oHiddenLayerNeuron:nDeltaError * nLearningRate
      for each oInputNeuron in ::oInputLayer:aNeurons
         oHiddenLayerNeuron:aWeights[ oInputNeuron:__enumIndex ] += aInputs[ oHiddenLayerNeuron:__enumIndex ] * ;
         oHiddenLayerNeuron:nDeltaError * nLearningRate
      next
   next

return nil

CLASS TNeuralLayer

   DATA   aNeurons INIT {}
   
   METHOD New( nNeurons, nInputs )    
   
ENDCLASS  

METHOD New( nNeurons, nInputs ) CLASS TNeuralLayer

   local n
   
   for n = 1 to nNeurons
      AAdd( ::aNeurons, TNeuron():New( nInputs ) )
   next

return Self

CLASS TNeuron

   DATA   nBias    INIT hb_Random()
   
   DATA   aWeights
   
   DATA   nValue
   
   DATA   nDeltaError
   
   METHOD New( nInputs )
   
ENDCLASS

METHOD New( nInputs ) CLASS TNeuron

   local n

   ::aWeights = Array( nInputs )
   
   for n = 1 to nInputs
      ::aWeights[ n ] = hb_Random()
   next      
   
return Self  

function Sigmoid( nValue )

return 1 / ( 1 + Math_E()  ^ -nValue )

function dSigmoid( nValue ) // returns the derivative of the sigmoid function

   // local n := Sigmoid( nValue )

return nValue * ( 1 - nValue )


Inputs: {0, 0}, output: 0.040317 , expected output: 0 , error: -0.001559955174626856
Inputs: {1, 0}, output: 0.963627 , expected output: 1 , error: 0.001274852658445150
Inputs: {0, 1}, output: 0.939530 , expected output: 1 , error: 0.003435486831421813
Inputs: {1, 1}, output: 0.051295 , expected output: 0 , error: -0.002496201295406568
regards, saludos

Antonio Linares
www.fivetechsoft.com
User avatar
Antonio Linares
Site Admin
 
Posts: 41314
Joined: Thu Oct 06, 2005 5:47 pm
Location: Spain

Re: Artificial Intelligence - Class TNeuralNetwork

Postby Antonio Linares » Thu Sep 08, 2022 2:24 pm

regards, saludos

Antonio Linares
www.fivetechsoft.com
User avatar
Antonio Linares
Site Admin
 
Posts: 41314
Joined: Thu Oct 06, 2005 5:47 pm
Location: Spain


Return to AI Introduction (Harbour code and samples)

Who is online

Users browsing this forum: No registered users and 2 guests