@@ -263,7 +263,7 @@ class Transformer(keras.Model):
263263 preds = self ([source, dec_input])
264264 one_hot = tf.one_hot(dec_target, depth = self .num_classes)
265265 mask = tf.math.logical_not(tf.math.equal(dec_target, 0 ))
266- loss = model .compute_loss(None , one_hot, preds, sample_weight = mask)
266+ loss = self .compute_loss(y = one_hot, y_pred = preds, sample_weight = mask)
267267 trainable_vars = self .trainable_variables
268268 gradients = tape.gradient(loss, trainable_vars)
269269 self .optimizer.apply_gradients(zip (gradients, trainable_vars))
@@ -278,7 +278,7 @@ class Transformer(keras.Model):
278278 preds = self ([source, dec_input])
279279 one_hot = tf.one_hot(dec_target, depth = self .num_classes)
280280 mask = tf.math.logical_not(tf.math.equal(dec_target, 0 ))
281- loss = model .compute_loss(None , one_hot, preds, sample_weight = mask)
281+ loss = self .compute_loss(y = one_hot, y_pred = preds, sample_weight = mask)
282282 self .loss_metric.update_state(loss)
283283 return {" loss" : self .loss_metric.result()}
284284
0 commit comments