I have the following setup, where each input consists of two trajectories. I want that the left graph has the same weight as the right graph
I tried to follow the approach described here for sharing variables, https://www.tensorflow.org/versions/r1.0/how_tos/variable_scope/, however it is not working. Two different graphs are created. What am I doing wrong?
def build_t_model(trajectories):
"""
Function to build a subgraph
"""
with tf.name_scope('h1_t'):
weights = tf.Variable(tf.truncated_normal([150, h1_t_units], stddev=1.0/math.sqrt(float(150))), name='weights')
biases = tf.Variable(tf.zeros([h1_t_units]), name='biases')
h1_t = tf.nn.relu(tf.matmul(trajectories, weights) + biases)
with tf.name_scope('h2_t'):
weights = tf.Variable(tf.truncated_normal([h1_t_units, h2_t_units], stddev=1.0/math.sqrt(float(h1_t_units))), name='weights')
biases = tf.Variable(tf.zeros([h2_t_units]), name='biases')
h2_t = tf.nn.relu(tf.matmul(h1_t, weights) + biases)
with tf.name_scope('h3_t'):
weights = tf.Variable(tf.truncated_normal([h2_t_units, M], stddev=1.0/math.sqrt(float(h2_t_units))), name='weights')
biases = tf.Variable(tf.zeros([M]), name='biases')
h3_t = tf.nn.relu(tf.matmul(h2_t, weights) + biases)
return h3_t
# We build two trajectory networks. The weights should be shared
with tf.variable_scope('traj_embedding') as scope:
self.embeddings_left = build_t_model(self.input_traj)
scope.reuse_variables()
self.embeddings_right = build_t_model(self.input_traj_mv)
Okay, use tf.get_variable instead of tf.Variable for this. This works
def build_t_model(trajectories):
"""
Build the trajectory network
"""
with tf.name_scope('h1_t'):
weights = tf.get_variable(
'weights1',
shape=[150, h1_t_units],
initializer=tf.truncated_normal_initializer(
stddev=1.0/math.sqrt(float(150))))
biases = tf.get_variable(
'biases1',
initializer=tf.zeros_initializer(shape=[h1_t_units]))
h1_t = tf.nn.relu(tf.matmul(trajectories, weights) + biases)
with tf.name_scope('h2_t'):
weights = tf.get_variable(
'weights2',
shape=[h1_t_units, h2_t_units],
initializer=tf.truncated_normal_initializer(
stddev=1.0/math.sqrt(float(h1_t_units))))
biases = tf.get_variable(
'biases2',
initializer=tf.zeros_initializer(shape=[h2_t_units]))
h2_t = tf.nn.relu(tf.matmul(h1_t, weights) + biases)
with tf.name_scope('h3_t'):
weights = tf.get_variable(
'weights3',
shape=[h2_t_units, M],
initializer=tf.truncated_normal_initializer(
stddev=1.0/math.sqrt(float(h2_t_units))))
biases = tf.get_variable(
'biases3',
initializer=tf.zeros_initializer(shape=[M]))
h3_t = tf.nn.relu(tf.matmul(h2_t, weights) + biases)
return h3_t
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With