InvalidArgumentError: node has inputs from different frames

I played with Tensorflow and ran into a problem with this code:

def process_tree_tf(matrix, weights, idxs, name=None):

    with tf.name_scope(name, "process_tree", [tree, weights, idxs]).as scope():
         loop_index = tf.sub(tf.shape(matrix)[0], 1)
         loop_vars = loop_index, matrix, idxs, weights

         def loop_condition(loop_idx, *_):
             return tf.greater(loop_idx, 0)

         def loop_body(loop_idx, mat, idxs, weights):
             x = mat[loop_idx]
             w = weights
             bias = tf.Variable(tf.constant(0.1, [2], dtype=tf.float64)) # Here?

             ...
             return loop_idx-1, mat, idxs, weights

         return tf.while_loop(loop_condition, loop_body, loop_vars, name=scope)[1]

I evaluate the function as follows:

height = 2
width = 2
nodes = 4
matrix = np.ones((nodes, width+height))
weights = np.ones((width+height, width))/100
idxs = [0,0,1,2]
with tf.Session as sess():
    sess.run(tf.global_variables_initializer()) # Error Here!
    r = process_tree_tf(matrix, weights, idxs)
    print(r.eval())

I get this error:

InvalidArgumentError: node 'process_tree_tf / Variable / Assign' has inputs from different frames. The input "process_tree_tf / Const_1" is in the frame "process_tree_tf / process_tree_tf /". The input "process_tree_tf / Variable" is in the frame ''.

Anyway, if I restart the kernel in the jupyter laptop and run everything again, I get this error:

FailedPreconditionError (see above for tracking): attempt to use uninitialized value offset [[Node: bias / read = IdentityT = DT_FLOAT, _class = ["loc: @bias"], _device = "/ job: localhost / replica: 0 / task : 0 / cpu: 0 "]]

: bias = tf.get_variable("bias", shape=[2], initializer=tf.constant_initializer(0.1)), .

, - , , - , .

!

+4
1

tf.Variable TensorFlow tf.while_loop(). TensorFlow , , tf.constant(), , , ( ), . :

def process_tree_tf(matrix, weights, idxs, name=None):

    with tf.name_scope(name, "process_tree", [tree, weights, idxs]).as scope():
         loop_index = tf.sub(tf.shape(matrix)[0], 1)
         loop_vars = loop_index, matrix, idxs, weights

         # Define the bias variable outside the loop to avoid problems.
         bias = tf.Variable(tf.constant(0.1, [2], dtype=tf.float64)) 

         def loop_condition(loop_idx, *_):
             return tf.greater(loop_idx, 0)

         def loop_body(loop_idx, mat, idxs, weights):
             x = mat[loop_idx]
             w = weights

             # You can still refer to `bias` in here, and the loop body
             # will capture it appropriately.
             ...
             return loop_idx-1, mat, idxs, weights

         return tf.while_loop(loop_condition, loop_body, loop_vars, name=scope)[1]

( - tf.constant_initializer(), tf.constant() .)

+5

Source: https://habr.com/ru/post/1671292/


All Articles