Destination slicing in Keras / Tensorflow?

Here is my custom Keras loss function:

def custom_loss(y_true, y_pred):
    sqerr = (y_true - y_pred)**2
    sqerr[:,4:-1:7] = sqerr[:,4:-1:7] * ((y_true[:,2:-1:7]-y_true[:,3:-1:7])/y_true[:,2:-1:7])**2
    return sqerr.mean()

But "sqerr" is not a numpy array, so this code leads to an error

TypeError: Tensor object does not support element assignment

So, I read the post "How to assign slice to Tensorflow" , including the answer from jdehesa and the GitHub Page in this discussion . And now, what I have ...

def custom_loss(y_true, y_pred):
    sqerr = K.square(y_true-y_pred)

    sqerr = tf.Variable(  sqerr , validate_shape=False )
    with tf.control_dependencies([sqerr[:,4:-1:7].assign( sqerr[:,4:-1:7] * ((y_true[:,2:-1:7]-y_true[:,3:-1:7])/y_true[:,2:-1:7])**2 )]):
        sqerr = tf.identity(sqerr)

    return K.mean(sqerr)

... but apparently I find it when it really comes time to use it:

Traceback (last last call): File "my_awesome_nn.py", line 119, in setup_model

model.compile(loss=custom_loss, optimizer=opt)  

File "/opt/anaconda/envs/py35/lib/python3.5/site-packages/keras/engine/training.py", line 850, in compilation

sample_weight, mask)   

"/opt/anaconda/envs/py35/lib/python3.5/site-packages/keras/engine/training.py", 465,

score_array = K.mean(score_array, axis=list(range(weight_ndim, ndim))) 

TypeError: "NoneType"

, TF , , . Variable, , " " (.. - "?" ). validate_shape = False , , Keras . :

def custom_loss(y_true, y_pred):  
        sqerr = K.square(y_true-y_pred)
        print("K.ndim(sqerr) #1 = ",K.ndim(sqerr))
        sqerr = tf.Variable(  sqerr , validate_shape=False )
        print("K.ndim(sqerr) #2 = ",K.ndim(sqerr))
        with tf.control_dependencies([sqerr[:,4:-1:7].assign( sqerr[:,4:-1:7] * ((y_true[:,2:-1:7]-y_true[:,3:-1:7])/y_true[:,2:-1:7])**2 )]):
            sqerr = tf.identity(sqerr)

        return K.mean(sqerr)

...

K.ndim(sqerr) # 1 = 2

K.ndim(sqerr) # 2 =

, Keras training.py "ndim = K.ndim(score_array)", None, , , NoneType.

- , , ? , , , .

( , , " 3 " , MSE)

+4
1

, . sqerr[:, 4:-1:7], , .

def custom_loss_keras(y_true, y_pred):
    # first sum up the squared error column-wise
    sqerr = K.square(y_true - y_pred)
    loss = K.sum(sqerr, axis=-1)

    # subtract the loss for the sliced part
    loss -= K.sum(sqerr[:, 4:-1:7], axis=-1)

    # add back the adjusted loss for the sliced part
    denominator = K.maximum(y_true[:, 2:-1:7], K.epsilon())  # handle zero-division
    numerator = y_true[:, 2:-1:7] - y_true[:, 3:-1:7]
    loss += K.sum(sqerr[:, 4:-1:7] * K.square(numerator / denominator), axis=-1)

    # take average
    ncols = K.int_shape(y_pred)[-1]
    loss /= ncols
    return K.mean(loss)

, numpy:

def custom_loss_numpy(y_true, y_pred):
    sqerr = (y_true - y_pred)**2
    sqerr[:,4:-1:7] = sqerr[:,4:-1:7] * ((y_true[:,2:-1:7]-y_true[:,3:-1:7])/y_true[:,2:-1:7])**2
    return sqerr.mean()

y_true = np.random.rand(50, 1000)
y_pred = np.random.rand(50, 1000)

print(custom_loss_numpy(y_true, y_pred))
889.992075384

print(K.eval(custom_loss_keras(K.variable(y_true), K.variable(y_pred))))
889.992
+2

Source: https://habr.com/ru/post/1688317/


All Articles