I'm training convolution neural network using firefly algorithm. Although I 
update the weights of the network they seems not updated. So how to update 
the network weights manually?
This is how I update the weights

def set_params(params, bestSolution, layers=[layer0, layer1, layer2, 
layer21, layer3]):
    # print('Best sol: ', bestSolution)
    # print('params: ', params)

    l0w = bestSolution[0:150]
    l0b = bestSolution[150:156]
    l1w = bestSolution[156:2556]
    l1b = bestSolution[2556:2572]
    l2w = bestSolution[2572:33292]
    l2b = bestSolution[33292:33412]
    l21w = bestSolution[33412:43492]
    l21b = bestSolution[43492:43576]
    l3w = bestSolution[43576:44416]
    l3b = bestSolution[44416:44426]


    # first conpool layer weights
    params[8] = theano.shared(
        numpy.reshape(numpy.asarray(l0w, dtype=theano.config.floatX), (6, 
1, 5, 5)),
        borrow=True
    )
    layers[0].params[0] = params[8]
    # print('before: ')
    # layers[0].ppp()
    layers[0].W = params[8]
    # print('after: ')
    # layers[0].ppp()



    # first conpool layer biases
    params[9] = theano.shared(numpy.reshape(numpy.asarray(l0b, 
dtype=theano.config.floatX), (6)), borrow=True)
    layers[0].params[1] = params[9]
    layers[0].b = params[9]

    # second conpool layer weights
    params[6] = theano.shared(
        numpy.reshape(numpy.asarray(l1w, dtype=theano.config.floatX), (16, 
6, 5, 5)),
        borrow=True
    )
    layers[1].params[0] = params[6]
    layers[1].W = params[6]

    # second conpool layer biases
    params[7] = theano.shared(numpy.reshape(numpy.asarray(l1b, 
dtype=theano.config.floatX), (16)), borrow=True)
    layers[1].params[1] = params[7]
    layers[1].b = params[7]

    # first hidden layer weights
    params[4] = theano.shared(numpy.reshape(numpy.asarray(l2w, 
dtype=theano.config.floatX), (256, 120)), name='W',
                              borrow=True)
    layers[2].params[0] = params[4]
    layers[2].W = params[4]

    # first hidden layer biases
    params[5] = theano.shared(numpy.reshape(numpy.array(l2b, 
dtype=theano.config.floatX), (120)), name='b', borrow=True)
    layers[2].params[1] = params[5]
    layers[2].b = params[5]

    # second hidden layer weights
    params[2] = theano.shared(numpy.reshape(numpy.asarray(l21w, 
dtype=theano.config.floatX), (120, 84)), name='W',
                              borrow=True)
    layers[3].params[0] = params[2]
    layers[3].W = params[2]

    # second hidden layer biases
    params[3] = theano.shared(numpy.reshape(numpy.asarray(l21b, 
dtype=theano.config.floatX), (84)), name='b',
                              borrow=True)
    layers[3].params[1] = params[3]
    layers[3].b = params[3]

    # output layer weights
    params[0] = theano.shared(
        numpy.reshape(numpy.asarray(l3w, dtype=theano.config.floatX), (84, 
10)),
        name='W',
        borrow=True
    )
    layers[4].params[0] = params[0]
    layers[4].W = params[0]

    # output layer biases
    params[1] = theano.shared(
        numpy.reshape(numpy.asarray(l3b, dtype=theano.config.floatX), (10)),
        name='b',
        borrow=True
    )
    layers[4].params[1] = params[1]
    layers[4].b = params[1]

    firefly = params[0] + params[1] + params[2] + params[3] + params[4] + 
 params[5] +  params[6] +  params[7] +  params[8] +  params[9]
    return firefly

-- 

--- 
You received this message because you are subscribed to the Google Groups 
"theano-users" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to [email protected].
For more options, visit https://groups.google.com/d/optout.

Reply via email to