Monday, 14 January 2013

15th Jan. Pruning based fitness function + Mutation implemented. MB11

Github MB11 shows the latest version, with mutation of loops and pruning based fitness function. First glance shows that the fitness function seems to be too noisy to be optimised. I'm not convinced by it. It basically is the number of hidden-neurons that survive the pruning. So its an integer number. The thresholds and the weight decay term are a bit arbitrary, and the integer values make it very course grained. Also, I'm not sure what the significance of preserving lots of hidden nodes is in the first place.


         
          #Fitness function (3) *************************************************************
          #Record the sm data for this loop and consider its properties
          #print(smMatrix)
          #print(len(smMatrix))

          #net = buildNetwork(3,10,1, bias = True)
          net = FeedForwardNetwork()
          inp = LinearLayer(3)
          h1 = SigmoidLayer(10)
          outp = LinearLayer(1)
          # add modules
          net.addOutputModule(outp)
          net.addInputModule(inp)
          net.addModule(h1)
          # create connections
          iToH = FullConnection(inp, h1)
          hToO = FullConnection(h1, outp)
          net.addConnection(iToH)
          net.addConnection(hToO)
          # finish up
          net.sortModules()


          ds = SupervisedDataSet(3, 1)

          trainSet = []
          for index_x, x in enumerate(smMatrix):
               if index_x > 0 and index_x < len(smMatrix)-1:
                    #trainSet.append( [smMatrix[index_x][0], smMatrix[index_x][1], smMatrix[index_x][2], smMatrix[index_x+1][3] ] )
                    ds.addSample(([smMatrix[index_x][0], smMatrix[index_x][1], smMatrix[index_x][2]]), (smMatrix[index_x+1][3]))
          #print(trainSet)
          #print(ds)
          trainer = BackpropTrainer(net, ds, weightdecay=0.01)
          err = trainer.trainUntilConvergence(maxEpochs = 50)
          #Visualize the network performance and structure.

          #nn = NNregression(ds, epoinc = 10)
          #nn.setupNN()
          #nn.runTraining()
          #self.pesos_conexiones(net)
          #print("Input to hidden", iToH.params)
          #print("H to output", hToO.params)
          #print(iToH.params)
          n1 = iToH.params
          n1a= zip(*[iter(n1)]*3)
          n2 = hToO.params
          
          sums = []
          for x in n1a:
               sumr = 0
               for y in x:
                    sumr = sumr + abs(y)
               sums.append(sumr)
                         
          sums2 = []
          for x in n2:
               sums2.append(abs(x))
          
          #Choose those neurons that have inputs below a threshold value
          a1 = [index for index,value in enumerate(sums) if value > 2.0]
          a2 = [index for index,value in enumerate(sums2) if value > 0.5]
          inter = len(set(a1).intersection( set(a2) ))
          fit = inter
          #fit = sum(n1a[:]) + sum(n2[:])
          print fit
          return fit




No comments:

Post a Comment