I was playing around with Tensorflow and ran into a problem with this code:
def process_tree_tf(matrix, weights, idxs, name=None):
with tf.name_scope(name, "process_tree", [tree, weights, idxs]).as scope():
loop_index = tf.sub(tf.shape(matrix)[0], 1)
loop_vars = loop_index, matrix, idxs, weights
def loop_condition(loop_idx, *_):
return tf.greater(loop_idx, 0)
def loop_body(loop_idx, mat, idxs, weights):
x = mat[loop_idx]
w = weights
bias = tf.Variable(tf.constant(0.1, [2], dtype=tf.float64)) # Here?
...
return loop_idx-1, mat, idxs, weights
return tf.while_loop(loop_condition, loop_body, loop_vars, name=scope)[1]
I'm evaluating the function this way:
height = 2
width = 2
nodes = 4
matrix = np.ones((nodes, width+height))
weights = np.ones((width+height, width))/100
idxs = [0,0,1,2]
with tf.Session as sess():
sess.run(tf.global_variables_initializer()) # Error Here!
r = process_tree_tf(matrix, weights, idxs)
print(r.eval())
I'm getting this error:
InvalidArgumentError: The node 'process_tree_tf/Variable/Assign' has inputs from different frames. The input 'process_tree_tf/Const_1' is in frame 'process_tree_tf/process_tree_tf/'. The input 'process_tree_tf/Variable' is in frame ''.
Weirdly, if I restart the kernel in jupyter notebook and run all again, I get this error:
FailedPreconditionError (see above for traceback): Attempting to use uninitialized value bias
[[Node: bias/read = IdentityT=DT_FLOAT, _class=["loc:@bias"], _device="/job:localhost/replica:0/task:0/cpu:0"]]
I tried using this instead:
bias = tf.get_variable("bias", shape=[2], initializer=tf.constant_initializer(0.1))
but that didn't work either.
I'm very sorry if I'm overlooking something obvious here, but I'd really appreciate it if someone could tell me where I'm going wrong.
Thank you very much!
See Question&Answers more detail:
os