Our first neuronal network using TensorFlow !!!

Our first neuronal network using TensorFlow !!!

Postby Antonio Linares » Tue Aug 15, 2017 12:54 am

Inspired by this example:
https://youtu.be/-9H_eVZdtN8

Here it is the TensorFlow neuronal network to solve it:

tf_nn.py
Code: Select all  Expand view
# Building a neuronal network with TensorFlow

import tensorflow as tf

def multilayer_perceptron( x, weights, biases ):
    # Hidden layer with RELU activation
    layer_1 = tf.add( tf.matmul( x, weights[ 'h1' ] ), biases[ 'b1' ] )
    layer_1 = tf.nn.relu(layer_1)

    # Output layer with linear activation
    out_layer = tf.matmul( layer_1, weights[ 'out' ] ) + biases[ 'out' ]
    return out_layer

session = tf.Session()

nInputs = 7  # Number of inputs to the neuronal network
nHiddenPerceptrons = 12
nTypes = 10  # Number of different types in the output
nLearningRate = 0.002
nTrainingEpochs = 500

# Input data
aInputs = [ [ 1, 1, 1, 0, 1, 1, 1 ],  # zero                 2
            [ 1, 0, 0, 0, 0, 0, 1 ],  # one               -------
            [ 1, 1, 0, 1, 1, 1, 0 ],  # two            3  |     |  1
            [ 1, 1, 0, 1, 0, 1, 1 ],  # three             |  4  |  
            [ 1, 0, 1, 1, 0, 0, 1 ],  # four              -------
            [ 0, 1, 1, 1, 0, 1, 1 ],  # five              |     |  
            [ 0, 1, 1, 1, 1, 1, 1 ],  # six            5  |     |  7    
            [ 1, 1, 0, 0, 0, 0, 1 ],  # seven             -------  
            [ 1, 1, 1, 1, 1, 1, 1 ],  # eight                6
            [ 1, 1, 1, 1, 0, 1, 1 ] ] # nine

aOutputs = [ [ 1, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
             [ 0, 1, 0, 0, 0, 0, 0, 0, 0, 0 ],
             [ 0, 0, 1, 0, 0, 0, 0, 0, 0, 0 ],
             [ 0, 0, 0, 1, 0, 0, 0, 0, 0, 0 ],
             [ 0, 0, 0, 0, 1, 0, 0, 0, 0, 0 ],
             [ 0, 0, 0, 0, 0, 1, 0, 0, 0, 0 ],
             [ 0, 0, 0, 0, 0, 0, 1, 0, 0, 0 ],
             [ 0, 0, 0, 0, 0, 0, 0, 1, 0, 0 ],
             [ 0, 0, 0, 0, 0, 0, 0, 0, 1, 0 ],
             [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 1 ] ]

input = tf.placeholder( "float", shape=( None, nInputs ) )
output = tf.placeholder( "float", shape=( None, nTypes ) )

# Store layers weight & bias
weights = { 'h1': tf.Variable(tf.random_normal( [ nInputs, nHiddenPerceptrons ] ) ),
            'out': tf.Variable(tf.random_normal( [ nHiddenPerceptrons, nTypes ] ) ) }
biases = { 'b1': tf.Variable( tf.random_normal( [ nHiddenPerceptrons ] ) ),
           'out': tf.Variable( tf.random_normal( [ nTypes ] ) ) }

# Create model
network = multilayer_perceptron( input, weights, biases )
loss = tf.reduce_mean( tf.nn.softmax_cross_entropy_with_logits( logits=network, labels=output ) )
optimizer = tf.train.AdamOptimizer( learning_rate = nLearningRate ).minimize( loss )
init = tf.global_variables_initializer()

with tf.Session() as session:
   session.run( init )

   # Training cycle
   for epoch in range( nTrainingEpochs ) :
       avg_error = 0
       for n in range( len( aInputs ) ) :
          cost = session.run( [ optimizer, loss ], { input: [ aInputs[ n ] ], output: [ aOutputs[ n ] ] } )
          # Compute average error
          avg_error += cost[ 1 ] / len( aInputs )
       
       print( "Epoch:", '%04d' % ( epoch + 1 ), "error=", "{:.9f}".format( avg_error ) )

   print( "Optimization Finished" )

   # Test model on train data
   print( "Testing:" )
   for n in range( len( aInputs ) ) :
      print( tf.argmax( network, 1 ).eval( { input: [ aInputs[ n ] ] } )[ 0 ] )


From a cmd window type:
python tf_nn.py

and you will see how the neuronal network learns (the error decreases) and finally when we test it, the right values appear!
Epoch: 0491 error= 0.010902708
Epoch: 0492 error= 0.010839775
Epoch: 0493 error= 0.010745070
Epoch: 0494 error= 0.010681662
Epoch: 0495 error= 0.010590052
Epoch: 0496 error= 0.010513857
Epoch: 0497 error= 0.010433172
Epoch: 0498 error= 0.010375975
Epoch: 0499 error= 0.010283007
Epoch: 0500 error= 0.010227598
Optimization Finished
Testing:
0
1
2
3
4
5
6
7
8
9
regards, saludos

Antonio Linares
www.fivetechsoft.com
User avatar
Antonio Linares
Site Admin
 
Posts: 42081
Joined: Thu Oct 06, 2005 5:47 pm
Location: Spain

Re: Our first neuronal network using TensorFlow !!!

Postby Antonio Linares » Wed Aug 23, 2017 1:05 pm

Progressively turning it into a class :-)

It is easier to modify existing code step by step, in order to find bugs, instead of making many changes at once in the code. Python code
and TensorFlow code is tricky and changes must be implemented progressively.

This example is properly working and we are closer to have a working generic class TNeuronalNetwork that we may use from FWH :-)

tf_nn.py
Code: Select all  Expand view
# Building a neuronal network with TensorFlow

import tensorflow as tf

class TNeuronalNetwork :    
   input = None
   output = None
   nLearningRate = 0.002
   nTrainingEpochs = 500
   network = None
   loss = None
   optimizer = None

   def New( self, aTopology ) :
      self.input = tf.placeholder( "float", shape=( None, aTopology[ 0 ] ) )
      self.output = tf.placeholder( "float", shape=( None, aTopology[ -1 ] ) )

      self.network = self.BuildNetwork( aTopology )

      self.loss = tf.reduce_mean( tf.nn.softmax_cross_entropy_with_logits( logits=self.network, labels=self.output ) )
      self.optimizer = tf.train.AdamOptimizer( learning_rate = self.nLearningRate ).minimize( self.loss )
      return self
     
   def BuildNetwork( self, aTopology ) :
      return multilayer_perceptron( self.input, aTopology[ 0 ], aTopology[ 1 ], aTopology[ -1 ] )

def multilayer_perceptron( input, nInputs, nHiddenPerceptrons, nTypes ):

    # Hidden layer with RELU activation
    network = tf.add( tf.matmul( input, tf.Variable( tf.random_normal( [ nInputs, nHiddenPerceptrons ] ) ) ),
                      tf.Variable( tf.random_normal( [ nHiddenPerceptrons ] ) ) )
    network = tf.nn.relu( network )

    # Output layer with linear activation
    network = tf.matmul( network, tf.Variable( tf.random_normal( [ nHiddenPerceptrons, nTypes ] ) ) ) + \
                         tf.Variable( tf.random_normal( [ nTypes ] ) )
    return network

session = tf.Session()

aInputs = [ [ 1, 1, 1, 0, 1, 1, 1 ],  # zero                 2
            [ 1, 0, 0, 0, 0, 0, 1 ],  # one               -------
            [ 1, 1, 0, 1, 1, 1, 0 ],  # two            3  |     |  1
            [ 1, 1, 0, 1, 0, 1, 1 ],  # three             |  4  |  
            [ 1, 0, 1, 1, 0, 0, 1 ],  # four              -------
            [ 0, 1, 1, 1, 0, 1, 1 ],  # five              |     |  
            [ 0, 1, 1, 1, 1, 1, 1 ],  # six            5  |     |  7    
            [ 1, 1, 0, 0, 0, 0, 1 ],  # seven             -------  
            [ 1, 1, 1, 1, 1, 1, 1 ],  # eight                6
            [ 1, 1, 1, 1, 0, 1, 1 ] ] # nine

aOutputs = [ [ 1, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
             [ 0, 1, 0, 0, 0, 0, 0, 0, 0, 0 ],
             [ 0, 0, 1, 0, 0, 0, 0, 0, 0, 0 ],
             [ 0, 0, 0, 1, 0, 0, 0, 0, 0, 0 ],
             [ 0, 0, 0, 0, 1, 0, 0, 0, 0, 0 ],
             [ 0, 0, 0, 0, 0, 1, 0, 0, 0, 0 ],
             [ 0, 0, 0, 0, 0, 0, 1, 0, 0, 0 ],
             [ 0, 0, 0, 0, 0, 0, 0, 1, 0, 0 ],
             [ 0, 0, 0, 0, 0, 0, 0, 0, 1, 0 ],
             [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 1 ] ]

oNN = TNeuronalNetwork().New( [ 7, 12, 10 ] )   # 7 inputs, 12 perceptrons for a single hidden layer, 10 outputs
init = tf.global_variables_initializer()

with tf.Session() as session:
   session.run( init )

   # Training cycle
   for epoch in range( oNN.nTrainingEpochs ) :
       avg_error = 0
       for n in range( len( aInputs ) ) :
          cost = session.run( [ oNN.optimizer, oNN.loss ], { oNN.input: [ aInputs[ n ] ], oNN.output: [ aOutputs[ n ] ] } )
          # Compute average error
          avg_error += cost[ 1 ] / len( aInputs )
       
       print( "Epoch:", '%04d' % ( epoch + 1 ), "error=", "{:.9f}".format( avg_error ) )

   print( "Optimization Finished" )

   # Test model on train data
   print( "Testing:" )
   for n in range( len( aInputs ) ) :
      print( tf.argmax( oNN.network, 1 ).eval( { oNN.input: [ aInputs[ n ] ] } )[ 0 ] )
regards, saludos

Antonio Linares
www.fivetechsoft.com
User avatar
Antonio Linares
Site Admin
 
Posts: 42081
Joined: Thu Oct 06, 2005 5:47 pm
Location: Spain

Re: Our first neuronal network using TensorFlow !!!

Postby Antonio Linares » Wed Aug 23, 2017 1:20 pm

Getting simpler:

tf_nn.py
Code: Select all  Expand view
# Building a neuronal network with TensorFlow

import tensorflow as tf

class TNeuronalNetwork :    
   input = None
   output = None
   nLearningRate = 0.002
   nTrainingEpochs = 500
   network = None
   loss = None
   optimizer = None

   def New( self, aTopology ) :
      self.input = tf.placeholder( "float", shape=( None, aTopology[ 0 ] ) )
      self.output = tf.placeholder( "float", shape=( None, aTopology[ -1 ] ) )

      self.network = self.BuildNetwork( aTopology )

      self.loss = tf.reduce_mean( tf.nn.softmax_cross_entropy_with_logits( logits=self.network, labels=self.output ) )
      self.optimizer = tf.train.AdamOptimizer( learning_rate = self.nLearningRate ).minimize( self.loss )
      return self
     
   def BuildNetwork( self, aTopology ) :
      return multilayer_perceptron( self.input, aTopology )

def multilayer_perceptron( input, aTopology ):

    # Hidden layer with RELU activation
    network = tf.add( tf.matmul( input, tf.Variable( tf.random_normal( [ aTopology[ 0 ], aTopology[ 1 ] ] ) ) ),
                      tf.Variable( tf.random_normal( [ aTopology[ 1 ] ] ) ) )
    network = tf.nn.relu( network )

    # Output layer with linear activation
    network = tf.matmul( network, tf.Variable( tf.random_normal( [ aTopology[ 1 ], aTopology[ -1 ] ] ) ) ) + \
                         tf.Variable( tf.random_normal( [ aTopology[ -1 ] ] ) )
    return network

session = tf.Session()

aInputs = [ [ 1, 1, 1, 0, 1, 1, 1 ],  # zero                 2
            [ 1, 0, 0, 0, 0, 0, 1 ],  # one               -------
            [ 1, 1, 0, 1, 1, 1, 0 ],  # two            3  |     |  1
            [ 1, 1, 0, 1, 0, 1, 1 ],  # three             |  4  |  
            [ 1, 0, 1, 1, 0, 0, 1 ],  # four              -------
            [ 0, 1, 1, 1, 0, 1, 1 ],  # five              |     |  
            [ 0, 1, 1, 1, 1, 1, 1 ],  # six            5  |     |  7    
            [ 1, 1, 0, 0, 0, 0, 1 ],  # seven             -------  
            [ 1, 1, 1, 1, 1, 1, 1 ],  # eight                6
            [ 1, 1, 1, 1, 0, 1, 1 ] ] # nine

aOutputs = [ [ 1, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
             [ 0, 1, 0, 0, 0, 0, 0, 0, 0, 0 ],
             [ 0, 0, 1, 0, 0, 0, 0, 0, 0, 0 ],
             [ 0, 0, 0, 1, 0, 0, 0, 0, 0, 0 ],
             [ 0, 0, 0, 0, 1, 0, 0, 0, 0, 0 ],
             [ 0, 0, 0, 0, 0, 1, 0, 0, 0, 0 ],
             [ 0, 0, 0, 0, 0, 0, 1, 0, 0, 0 ],
             [ 0, 0, 0, 0, 0, 0, 0, 1, 0, 0 ],
             [ 0, 0, 0, 0, 0, 0, 0, 0, 1, 0 ],
             [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 1 ] ]

oNN = TNeuronalNetwork().New( [ 7, 12, 10 ] )   # 7 inputs, 12 perceptrons for a single hidden layer, 10 outputs
init = tf.global_variables_initializer()

with tf.Session() as session:
   session.run( init )

   # Training cycle
   for epoch in range( oNN.nTrainingEpochs ) :
       avg_error = 0
       for n in range( len( aInputs ) ) :
          cost = session.run( [ oNN.optimizer, oNN.loss ], { oNN.input: [ aInputs[ n ] ], oNN.output: [ aOutputs[ n ] ] } )
          # Compute average error
          avg_error += cost[ 1 ] / len( aInputs )
       
       print( "Epoch:", '%04d' % ( epoch + 1 ), "error=", "{:.9f}".format( avg_error ) )

   print( "Optimization Finished" )

   # Test model on train data
   print( "Testing:" )
   for n in range( len( aInputs ) ) :
      print( tf.argmax( oNN.network, 1 ).eval( { oNN.input: [ aInputs[ n ] ] } )[ 0 ] )
regards, saludos

Antonio Linares
www.fivetechsoft.com
User avatar
Antonio Linares
Site Admin
 
Posts: 42081
Joined: Thu Oct 06, 2005 5:47 pm
Location: Spain

Re: Our first neuronal network using TensorFlow !!!

Postby Antonio Linares » Wed Aug 23, 2017 3:37 pm

Finally it is automatically building x amount of hidden layers!!! :-D

tf_nn.py
Code: Select all  Expand view
# Building a neuronal network with TensorFlow

import tensorflow as tf

class TNeuronalNetwork :    
   input = None
   output = None
   nLearningRate = 0.002
   nTrainingEpochs = 500
   network = None
   loss = None
   optimizer = None

   def New( self, aTopology ) :
      self.input = tf.placeholder( "float", shape=( None, aTopology[ 0 ] ) )
      self.output = tf.placeholder( "float", shape=( None, aTopology[ -1 ] ) )

      self.network = self.BuildNetwork( aTopology )

      self.loss = tf.reduce_mean( tf.nn.softmax_cross_entropy_with_logits( logits=self.network, labels=self.output ) )
      self.optimizer = tf.train.AdamOptimizer( learning_rate = self.nLearningRate ).minimize( self.loss )
      return self
     
   def BuildNetwork( self, aTopology ) :
      return multilayer_perceptron( self.input, aTopology )

def multilayer_perceptron( input, aTopology ) :

   for n in range( len( aTopology ) - 2 ) :
      # Hidden layers with RELU activation
      if n == 0 :
         network = tf.add( tf.matmul( input, tf.Variable( tf.random_normal( [ aTopology[ n ], aTopology[ n + 1 ] ] ) ) ),
                           tf.Variable( tf.random_normal( [ aTopology[ n + 1 ] ] ) ) )
      else :
         network = tf.add( tf.matmul( network, tf.Variable( tf.random_normal( [ aTopology[ n ], aTopology[ n + 1 ] ] ) ) ),
                           tf.Variable( tf.random_normal( [ aTopology[ n + 1 ] ] ) ) )
      network = tf.nn.relu( network )

   # Output layer with linear activation
   network = tf.matmul( network, tf.Variable( tf.random_normal( [ aTopology[ -2 ], aTopology[ -1 ] ] ) ) ) + \
                        tf.Variable( tf.random_normal( [ aTopology[ -1 ] ] ) )
   return network

session = tf.Session()

aInputs = [ [ 1, 1, 1, 0, 1, 1, 1 ],  # zero                 2
            [ 1, 0, 0, 0, 0, 0, 1 ],  # one               -------
            [ 1, 1, 0, 1, 1, 1, 0 ],  # two            3  |     |  1
            [ 1, 1, 0, 1, 0, 1, 1 ],  # three             |  4  |  
            [ 1, 0, 1, 1, 0, 0, 1 ],  # four              -------
            [ 0, 1, 1, 1, 0, 1, 1 ],  # five              |     |  
            [ 0, 1, 1, 1, 1, 1, 1 ],  # six            5  |     |  7    
            [ 1, 1, 0, 0, 0, 0, 1 ],  # seven             -------  
            [ 1, 1, 1, 1, 1, 1, 1 ],  # eight                6
            [ 1, 1, 1, 1, 0, 1, 1 ] ] # nine

aOutputs = [ [ 1, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
             [ 0, 1, 0, 0, 0, 0, 0, 0, 0, 0 ],
             [ 0, 0, 1, 0, 0, 0, 0, 0, 0, 0 ],
             [ 0, 0, 0, 1, 0, 0, 0, 0, 0, 0 ],
             [ 0, 0, 0, 0, 1, 0, 0, 0, 0, 0 ],
             [ 0, 0, 0, 0, 0, 1, 0, 0, 0, 0 ],
             [ 0, 0, 0, 0, 0, 0, 1, 0, 0, 0 ],
             [ 0, 0, 0, 0, 0, 0, 0, 1, 0, 0 ],
             [ 0, 0, 0, 0, 0, 0, 0, 0, 1, 0 ],
             [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 1 ] ]

oNN = TNeuronalNetwork().New( [ 7, 12, 10 ] )  # You may try with different topologies like [ 7, 12, 20, 10 ] etc.
init = tf.global_variables_initializer()

with tf.Session() as session:
   session.run( init )

   # Training cycle
   for epoch in range( oNN.nTrainingEpochs ) :
       avg_error = 0
       for n in range( len( aInputs ) ) :
          cost = session.run( [ oNN.optimizer, oNN.loss ], { oNN.input: [ aInputs[ n ] ], oNN.output: [ aOutputs[ n ] ] } )
          # Compute average error
          avg_error += cost[ 1 ] / len( aInputs )
       
       print( "Epoch:", '%04d' % ( epoch + 1 ), "error=", "{:.9f}".format( avg_error ) )

   print( "Optimization Finished" )

   # Test model on train data
   print( "Testing:" )
   for n in range( len( aInputs ) ) :
      print( tf.argmax( oNN.network, 1 ).eval( { oNN.input: [ aInputs[ n ] ] } )[ 0 ] )
regards, saludos

Antonio Linares
www.fivetechsoft.com
User avatar
Antonio Linares
Site Admin
 
Posts: 42081
Joined: Thu Oct 06, 2005 5:47 pm
Location: Spain

Re: Our first neuronal network using TensorFlow !!!

Postby Antonio Linares » Thu Aug 24, 2017 10:59 am

Simpler and easier:

tf_nn.py
Code: Select all  Expand view
# Building a neuronal network with TensorFlow

import tensorflow as tf

class TNeuronalNetwork :    
   input = None
   output = None
   nLearningRate = 0.002
   nTrainingEpochs = 500
   network = None
   loss = None
   optimizer = None

   def New( self, aTopology ) :
      self.input = tf.placeholder( "float", shape=( None, aTopology[ 0 ] ) )
      self.output = tf.placeholder( "float", shape=( None, aTopology[ -1 ] ) )

      self.network = self.BuildNetwork( aTopology )

      self.loss = tf.reduce_mean( tf.nn.softmax_cross_entropy_with_logits( logits=self.network, labels=self.output ) )
      self.optimizer = tf.train.AdamOptimizer( learning_rate = self.nLearningRate ).minimize( self.loss )
      return self
     
   def BuildNetwork( self, aTopology ) :
      return multilayer_perceptron( self.input, aTopology )

   def Train( self, aInputs, aOutputs ) :
      for epoch in range( self.nTrainingEpochs ) :
         avg_error = 0
         for n in range( len( aInputs ) ) :
            cost = session.run( [ self.optimizer, self.loss ], { self.input: [ aInputs[ n ] ], self.output: [ aOutputs[ n ] ] } )
            avg_error += cost[ 1 ] / len( aInputs )
       
         print( "Epoch:", '%04d' % ( epoch + 1 ), "error=", "{:.9f}".format( avg_error ) )
      print( "Optimization Finished" )

def multilayer_perceptron( input, aTopology ) :

   for n in range( len( aTopology ) - 2 ) :
      # Hidden layers with RELU activation
      if n == 0 :
         network = tf.add( tf.matmul( input, tf.Variable( tf.random_normal( [ aTopology[ n ], aTopology[ n + 1 ] ] ) ) ),
                           tf.Variable( tf.random_normal( [ aTopology[ n + 1 ] ] ) ) )
      else :
         network = tf.add( tf.matmul( network, tf.Variable( tf.random_normal( [ aTopology[ n ], aTopology[ n + 1 ] ] ) ) ),
                           tf.Variable( tf.random_normal( [ aTopology[ n + 1 ] ] ) ) )
      network = tf.nn.relu( network )

   # Output layer with linear activation
   network = tf.matmul( network, tf.Variable( tf.random_normal( [ aTopology[ -2 ], aTopology[ -1 ] ] ) ) ) + \
                        tf.Variable( tf.random_normal( [ aTopology[ -1 ] ] ) )
   return network

session = tf.Session()

aInputs = [ [ 1, 1, 1, 0, 1, 1, 1 ],  # zero                 2
            [ 1, 0, 0, 0, 0, 0, 1 ],  # one               -------
            [ 1, 1, 0, 1, 1, 1, 0 ],  # two            3  |     |  1
            [ 1, 1, 0, 1, 0, 1, 1 ],  # three             |  4  |  
            [ 1, 0, 1, 1, 0, 0, 1 ],  # four              -------
            [ 0, 1, 1, 1, 0, 1, 1 ],  # five              |     |  
            [ 0, 1, 1, 1, 1, 1, 1 ],  # six            5  |     |  7    
            [ 1, 1, 0, 0, 0, 0, 1 ],  # seven             -------  
            [ 1, 1, 1, 1, 1, 1, 1 ],  # eight                6
            [ 1, 1, 1, 1, 0, 1, 1 ] ] # nine

aOutputs = [ [ 1, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
             [ 0, 1, 0, 0, 0, 0, 0, 0, 0, 0 ],
             [ 0, 0, 1, 0, 0, 0, 0, 0, 0, 0 ],
             [ 0, 0, 0, 1, 0, 0, 0, 0, 0, 0 ],
             [ 0, 0, 0, 0, 1, 0, 0, 0, 0, 0 ],
             [ 0, 0, 0, 0, 0, 1, 0, 0, 0, 0 ],
             [ 0, 0, 0, 0, 0, 0, 1, 0, 0, 0 ],
             [ 0, 0, 0, 0, 0, 0, 0, 1, 0, 0 ],
             [ 0, 0, 0, 0, 0, 0, 0, 0, 1, 0 ],
             [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 1 ] ]

oNN = TNeuronalNetwork().New( [ 7, 12, 10 ] )
init = tf.global_variables_initializer()

with tf.Session() as session:
   session.run( init )

   oNN.Train( aInputs, aOutputs )

   # Test model on train data
   print( "Testing:" )
   for n in range( len( aInputs ) ) :
      print( tf.argmax( oNN.network, 1 ).eval( { oNN.input: [ aInputs[ n ] ] } )[ 0 ] )
regards, saludos

Antonio Linares
www.fivetechsoft.com
User avatar
Antonio Linares
Site Admin
 
Posts: 42081
Joined: Thu Oct 06, 2005 5:47 pm
Location: Spain

Re: Our first neuronal network using TensorFlow !!!

Postby Antonio Linares » Sun Aug 27, 2017 5:29 am

using an external file to simplify our code:

test.py
Code: Select all  Expand view
import tf_nn

aInputs = [ [ 1, 1, 1, 0, 1, 1, 1 ],  # zero                 2
            [ 1, 0, 0, 0, 0, 0, 1 ],  # one               -------
            [ 1, 1, 0, 1, 1, 1, 0 ],  # two            3  |     |  1
            [ 1, 1, 0, 1, 0, 1, 1 ],  # three             |  4  |  
            [ 1, 0, 1, 1, 0, 0, 1 ],  # four              -------
            [ 0, 1, 1, 1, 0, 1, 1 ],  # five              |     |  
            [ 0, 1, 1, 1, 1, 1, 1 ],  # six            5  |     |  7    
            [ 1, 1, 0, 0, 0, 0, 1 ],  # seven             -------  
            [ 1, 1, 1, 1, 1, 1, 1 ],  # eight                6
            [ 1, 1, 1, 1, 0, 1, 1 ] ] # nine

aOutputs = [ [ 1, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
             [ 0, 1, 0, 0, 0, 0, 0, 0, 0, 0 ],
             [ 0, 0, 1, 0, 0, 0, 0, 0, 0, 0 ],
             [ 0, 0, 0, 1, 0, 0, 0, 0, 0, 0 ],
             [ 0, 0, 0, 0, 1, 0, 0, 0, 0, 0 ],
             [ 0, 0, 0, 0, 0, 1, 0, 0, 0, 0 ],
             [ 0, 0, 0, 0, 0, 0, 1, 0, 0, 0 ],
             [ 0, 0, 0, 0, 0, 0, 0, 1, 0, 0 ],
             [ 0, 0, 0, 0, 0, 0, 0, 0, 1, 0 ],
             [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 1 ] ]

oNN = tf_nn.TNeuronalNetwork().New( [ 7, 12, 10 ] )

oNN.Train( aInputs, aOutputs )


tf_nn.py
Code: Select all  Expand view
# Building a neuronal network with TensorFlow

import tensorflow as tf

class TNeuronalNetwork :    
   input = None
   output = None
   nLearningRate = 0.002
   nTrainingEpochs = 500
   network = None
   loss = None
   optimizer = None
   session = None

   session = tf.Session()

   def New( self, aTopology ) :
      self.input = tf.placeholder( "float", shape=( None, aTopology[ 0 ] ) )
      self.output = tf.placeholder( "float", shape=( None, aTopology[ -1 ] ) )

      self.network = self.BuildNetwork( aTopology )

      self.loss = tf.reduce_mean( tf.nn.softmax_cross_entropy_with_logits( logits=self.network, labels=self.output ) )
      self.optimizer = tf.train.AdamOptimizer( learning_rate = self.nLearningRate ).minimize( self.loss )
 
      self.session = tf.Session()
      self.session.run( tf.global_variables_initializer() )

      return self
     
   def BuildNetwork( self, aTopology ) :
      return multilayer_perceptron( self.input, aTopology )

   def Train( self, aInputs, aOutputs ) :
      for epoch in range( self.nTrainingEpochs ) :
         avg_error = 0
         for n in range( len( aInputs ) ) :
            cost = self.session.run( [ self.optimizer, self.loss ], { self.input: [ aInputs[ n ] ], self.output: [ aOutputs[ n ] ] } )
            avg_error += cost[ 1 ] / len( aInputs )
       
         print( "Epoch:", '%04d' % ( epoch + 1 ), "error=", "{:.9f}".format( avg_error ) )
      print( "Optimization Finished" )

def multilayer_perceptron( input, aTopology ) :

   for n in range( len( aTopology ) - 2 ) :
      # Hidden layers with RELU activation
      if n == 0 :
         network = tf.add( tf.matmul( input, tf.Variable( tf.random_normal( [ aTopology[ n ], aTopology[ n + 1 ] ] ) ) ),
                           tf.Variable( tf.random_normal( [ aTopology[ n + 1 ] ] ) ) )
      else :
         network = tf.add( tf.matmul( network, tf.Variable( tf.random_normal( [ aTopology[ n ], aTopology[ n + 1 ] ] ) ) ),
                           tf.Variable( tf.random_normal( [ aTopology[ n + 1 ] ] ) ) )
      network = tf.nn.relu( network )

   # Output layer with linear activation
   network = tf.matmul( network, tf.Variable( tf.random_normal( [ aTopology[ -2 ], aTopology[ -1 ] ] ) ) ) + \
                        tf.Variable( tf.random_normal( [ aTopology[ -1 ] ] ) )
   return network
regards, saludos

Antonio Linares
www.fivetechsoft.com
User avatar
Antonio Linares
Site Admin
 
Posts: 42081
Joined: Thu Oct 06, 2005 5:47 pm
Location: Spain

Re: Our first neuronal network using TensorFlow !!!

Postby Antonio Linares » Mon Aug 28, 2017 7:29 am

Enhanced class and simplified example:

test.py
Code: Select all  Expand view
import tf_nn

aInputs = [ [ 1, 1, 1, 0, 1, 1, 1 ],  # zero                 2
            [ 1, 0, 0, 0, 0, 0, 1 ],  # one               -------
            [ 1, 1, 0, 1, 1, 1, 0 ],  # two            3  |     |  1
            [ 1, 1, 0, 1, 0, 1, 1 ],  # three             |  4  |  
            [ 1, 0, 1, 1, 0, 0, 1 ],  # four              -------
            [ 0, 1, 1, 1, 0, 1, 1 ],  # five              |     |  
            [ 0, 1, 1, 1, 1, 1, 1 ],  # six            5  |     |  7    
            [ 1, 1, 0, 0, 0, 0, 1 ],  # seven             -------  
            [ 1, 1, 1, 1, 1, 1, 1 ],  # eight                6
            [ 1, 1, 1, 1, 0, 1, 1 ] ] # nine

aOutputs = [ [ 1, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
             [ 0, 1, 0, 0, 0, 0, 0, 0, 0, 0 ],
             [ 0, 0, 1, 0, 0, 0, 0, 0, 0, 0 ],
             [ 0, 0, 0, 1, 0, 0, 0, 0, 0, 0 ],
             [ 0, 0, 0, 0, 1, 0, 0, 0, 0, 0 ],
             [ 0, 0, 0, 0, 0, 1, 0, 0, 0, 0 ],
             [ 0, 0, 0, 0, 0, 0, 1, 0, 0, 0 ],
             [ 0, 0, 0, 0, 0, 0, 0, 1, 0, 0 ],
             [ 0, 0, 0, 0, 0, 0, 0, 0, 1, 0 ],
             [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 1 ] ]

oNN = tf_nn.TNeuronalNetwork().New( [ 7, 12, 10 ] )

oNN.Learn( aInputs, aOutputs )

oNN.Calculate( aInputs )


tf_nn.py
Code: Select all  Expand view
# Building a neuronal network with TensorFlow

import tensorflow as tf

class TNeuronalNetwork :    
   input = None
   output = None
   nLearningRate = 0.002
   nTrainingEpochs = 500
   network = None
   loss = None
   optimizer = None
   session = None

   session = tf.Session()

   def New( self, aTopology ) :
      self.input = tf.placeholder( "float", shape=( None, aTopology[ 0 ] ) )
      self.output = tf.placeholder( "float", shape=( None, aTopology[ -1 ] ) )

      self.BuildLayers( aTopology )

      self.loss = tf.reduce_mean( tf.nn.softmax_cross_entropy_with_logits( logits=self.network, labels=self.output ) )
      self.optimizer = tf.train.AdamOptimizer( learning_rate = self.nLearningRate ).minimize( self.loss )
 
      self.session = tf.Session()
      self.session.run( tf.global_variables_initializer() )

      return self
     
   def BuildLayers( self, aTopology ) :
      for n in range( len( aTopology ) - 2 ) :
         # Hidden layers with RELU activation
         if n == 0 :
            self.network = tf.add( tf.matmul( self.input, tf.Variable( tf.random_normal( [ aTopology[ n ], aTopology[ n + 1 ] ] ) ) ),
                                   tf.Variable( tf.random_normal( [ aTopology[ n + 1 ] ] ) ) )
         else :
            self.network = tf.add( tf.matmul( self.network, tf.Variable( tf.random_normal( [ aTopology[ n ], aTopology[ n + 1 ] ] ) ) ),
                                   tf.Variable( tf.random_normal( [ aTopology[ n + 1 ] ] ) ) )
         self.network = tf.nn.relu( self.network )

      # Output layer with linear activation
      self.network = tf.matmul( self.network, tf.Variable( tf.random_normal( [ aTopology[ -2 ], aTopology[ -1 ] ] ) ) ) + \
                                tf.Variable( tf.random_normal( [ aTopology[ -1 ] ] ) )

   def Learn( self, aInputs, aOutputs ) :
      for epoch in range( self.nTrainingEpochs ) :
         avg_error = 0
         for n in range( len( aInputs ) ) :
            cost = self.session.run( [ self.optimizer, self.loss ], { self.input: [ aInputs[ n ] ], self.output: [ aOutputs[ n ] ] } )
            avg_error += cost[ 1 ] / len( aInputs )
       
         print( "Epoch:", '%04d' % ( epoch + 1 ), "error=", "{:.9f}".format( avg_error ) )
      print( "Optimization Finished" )

   def Calculate( self, aInputs ) :  
      for n in range( len( aInputs ) ) :
         print( tf.argmax( self.network, 1 ).eval( { self.input: [ aInputs[ n ] ] }, session= self.session )[ 0 ] )
regards, saludos

Antonio Linares
www.fivetechsoft.com
User avatar
Antonio Linares
Site Admin
 
Posts: 42081
Joined: Thu Oct 06, 2005 5:47 pm
Location: Spain


Return to Artificial Intelligence examples

Who is online

Users browsing this forum: No registered users and 3 guests

cron