TypeError: expected binary or unicode string, received Tensorflow list

Alright, so carry me. So this is my code:

import tensorflow as tf
import os
import pickle
from numpy import asarray, reshape

os.chdir('PATH')

with open('xSensor.pkl','rb') as file:
    x_train = asarray(pickle.load(file))

with open('ySensor.pkl','rb') as file:
    y_train = asarray(pickle.load(file))

def neural_network(data):
    n_nodes_h1 = 1000
    n_nodes_h2 = 1000
    n_nodes_h3 = 500

    hidden_layer_1 = {
        'weights': tf.Variable(tf.random_normal([13, n_nodes_h1],dtype=tf.float64)),
        'biases': tf.Variable(tf.random_normal([n_nodes_h1],dtype=tf.float64))
    }

    #Omitting some code that just defines a couple more layers in the same format as above

    layer_1 = tf.matmul(data, hidden_layer_1['weights']) + hidden_layer_1['biases']
    layer_1 = tf.nn.relu(layer_1)

    #Omitting more code.

    output = tf.matmul(layer_3, output['weights']) + output['biases']

    return output


def train_network(x_t,y_t):
    x = tf.placeholder(tf.float64, shape=[None, 13])
    y = tf.placeholder(tf.float64)
    prediction = neural_network(x_t)
    y_t = reshape(y_t,(700,1))
    cost = tf.reduce_mean(tf.losses.mean_squared_error(labels=y_t, predictions=prediction))
    optimizer = tf.train.AdamOptimizer(0.005).minimize(cost) #learning rate by default is 0.01
    n_epochs = 1000

    with tf.Session() as sess:
        sess.run(tf.global_variables_initializer())
        for _ in range(0, n_epochs):
            x_ = sess.run([optimizer,cost], feed_dict={x: x_t, y: y_t})
            print("Loss is: ", x_[1])



train_network(x_train,y_train)

And here is the error log:

Traceback (most recent call last):
  File "C:/Users/my system/Desktop/height_sensor.py", line 94, in <module>
    train_network(x_train,y_train)
  File "C:/Users/my system/Desktop/height_sensor.py", line 77, in train_network
    prediction = neural_network(x_t)
  File "C:/Users/my system/Desktop/height_sensor.py", line 59, in neural_network
    layer_1 = tf.matmul(data, hidden_layer_1['weights']) + hidden_layer_1['biases']
  File "C:\Users\my system\AppData\Local\Programs\Python\Python35\lib\site-packages\tensorflow\python\ops\math_ops.py", line 1844, in matmul
    a = ops.convert_to_tensor(a, name="a")
  ...etc...
TypeError: Expected binary or unicode string, got [15.0126, 1.38684, 27.6, 1.6323, -0.624113, 8.97763, 2.06581, 8.88303, -0.689839, 9.13284, 353.183, 349.178, 210.498]

Process finished with exit code 1

Sorry for posting so much code, I wanted to post waaaay less, but I was worried that I might omit this thing that caused the error. If anyone could point me so much in the right direction, I would be more than happy. Thanks

+4
source share
1 answer

. . , - , x_train; , . , , , x_train , .

+1

Source: https://habr.com/ru/post/1690155/


All Articles