I defined a class here
class BasicNetwork(object): def __init__(self, scope, task_name, is_train=False, img_shape=(80, 80)): self.scope = scope self.is_train = is_train self.task_name = task_name self.__create_network(scope, img_shape=img_shape) def __create_network(self, scope, img_shape=(80, 80)): with tf.variable_scope(scope): with tf.variable_scope(self.task_name): with tf.variable_scope('input_data'): self.inputs = tf.placeholder(shape=[None, *img_shape, cfg.HIST_LEN], dtype=tf.float32) with tf.variable_scope('networks'): with tf.variable_scope('conv_1'): self.conv_1 = slim.conv2d(activation_fn=tf.nn.relu, inputs=self.inputs, num_outputs=32, kernel_size=[8, 8], stride=4, padding='SAME', trainable=self.is_train) with tf.variable_scope('conv_2'): self.conv_2 = slim.conv2d(activation_fn=tf.nn.relu, inputs=self.conv_1, num_outputs=64, kernel_size=[4, 4], stride=2, padding='SAME', trainable=self.is_train) with tf.variable_scope('conv_3'): self.conv_3 = slim.conv2d(activation_fn=tf.nn.relu, inputs=self.conv_2, num_outputs=64, kernel_size=[3, 3], stride=1, padding='SAME', trainable=self.is_train) with tf.variable_scope('f_c'): self.fc = slim.fully_connected(slim.flatten(self.conv_3), 512, activation_fn=tf.nn.elu, trainable=self.is_train)
And I want to define two instances of BasicNetwork with different task names. The scope is global. But when I check the output, there is
ipdb> for i in net_1.layres: print(i) Tensor("global/simple/networks/conv_1/Conv/Relu:0", shape=(?, 20, 20, 32), dtype=float32, device=/device:GPU:2) Tensor("global/simple/networks/conv_2/Conv/Relu:0", shape=(?, 10, 10, 64), dtype=float32, device=/device:GPU:2) Tensor("global/simple/networks/conv_3/Conv/Relu:0", shape=(?, 10, 10, 64), dtype=float32, device=/device:GPU:2) Tensor("global/simple/networks/f_c/fully_connected/Elu:0", shape=(?, 512), dtype=float32, device=/device:GPU:2) ipdb> for i in net_2.layres: print(i) Tensor("global_1/supreme/networks/conv_1/Conv/Relu:0", shape=(?, 20, 20, 32), dtype=float32, device=/device:GPU:2) Tensor("global_1/supreme/networks/conv_2/Conv/Relu:0", shape=(?, 10, 10, 64), dtype=float32, device=/device:GPU:2) Tensor("global_1/supreme/networks/conv_3/Conv/Relu:0", shape=(?, 10, 10, 64), dtype=float32, device=/device:GPU:2) Tensor("global_1/supreme/networks/f_c/fully_connected/Elu:0", shape=(?, 512), dtype=float32, device=/device:GPU:2)
As you can see in the output, a new global_1 scope has been global_1 , but I want to make it global . I set reuse=True , but later discovered that when there is no realm named global , reuse=True cannot be used. What should I do?