Skip to content Skip to sidebar Skip to footer

How To Initialize The Weights Of A Network With The Weights Of Another Network?

I want to combine 2 networks to one network while keeping the weights of the original network. I saved the weights in in their numpy form using: for i in tf.get_collection(tf.Graph

Solution 1:

You can write both functions

def save_to_dict(sess, collection=tf.GraphKeys.TRAINABLE_VARIABLES):
    return {v.name: sess.run(v) for v in tf.global_variables()}


def load_from_dict(sess, data):
    for v in tf.global_variables():
        if v.name in data.keys():
            sess.run(v.assign(data[v.name]))

The trick is to simply iterate over all variables and just check whether they exists in the dictionary, like

import tensorflow as tf
import numpy as np


defsave_to_dict(sess, collection=tf.GraphKeys.TRAINABLE_VARIABLES):
    return {v.name: sess.run(v) for v in tf.global_variables()}


defload_from_dict(sess, data):
    for v in tf.global_variables():
        if v.name in data.keys():
            sess.run(v.assign(data[v.name]))


defnetwork(x):
    x = tf.layers.dense(x, 512, activation=tf.nn.relu, name='fc0')
    x = tf.layers.dense(x, 512, activation=tf.nn.relu, name='fc1')
    x = tf.layers.dense(x, 512, activation=tf.nn.relu, name='fc2')
    x = tf.layers.dense(x, 512, activation=tf.nn.relu, name='fc3')
    x = tf.layers.dense(x, 512, activation=tf.nn.relu, name='fc4')
    return x


element = np.random.randn(8, 10)
weights = None# first sessionwith tf.Session() as sess:

    x = tf.placeholder(dtype=tf.float32, shape=[None, 10])
    y = network(x)
    sess.run(tf.global_variables_initializer())

    # first evaluation
    expected = sess.run(y, {x: element})

    # dump as dict
    weights = save_to_dict(sess)

# destroy session and graph
tf.reset_default_graph()

# second sessionwith tf.Session() as sess:

    x = tf.placeholder(dtype=tf.float32, shape=[None, 10])
    y = network(x)
    sess.run(tf.global_variables_initializer())

    # use randomly initialized parameters
    actual = sess.run(y, {x: element})
    assert np.sum(np.abs(actual - expected)) > 0# should NOT match# load previous parameters
    load_from_dict(sess, weights)

    actual = sess.run(y, {x: element})
    assert np.sum(np.abs(actual - expected)) == 0# should match

This way, you can simply drop some parameters from the dictionary, change the weights before loading and even change the parameter-name.

Post a Comment for "How To Initialize The Weights Of A Network With The Weights Of Another Network?"