Skip to content

Commit 655c39d

Browse files
update .md and .ipynb (#2139)
* Update transformer_asr.md * Update transformer_asr.ipynb * Update examples/audio/md/transformer_asr.md Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> * Update examples/audio/md/transformer_asr.md Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> --------- Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
1 parent 9f8949e commit 655c39d

File tree

2 files changed

+4
-4
lines changed

2 files changed

+4
-4
lines changed

examples/audio/ipynb/transformer_asr.ipynb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -321,7 +321,7 @@
321321
" preds = self([source, dec_input])\n",
322322
" one_hot = tf.one_hot(dec_target, depth=self.num_classes)\n",
323323
" mask = tf.math.logical_not(tf.math.equal(dec_target, 0))\n",
324-
" loss = model.compute_loss(None, one_hot, preds, sample_weight=mask)\n",
324+
" loss = self.compute_loss(None, one_hot, preds, sample_weight=mask)\n",
325325
" trainable_vars = self.trainable_variables\n",
326326
" gradients = tape.gradient(loss, trainable_vars)\n",
327327
" self.optimizer.apply_gradients(zip(gradients, trainable_vars))\n",
@@ -336,7 +336,7 @@
336336
" preds = self([source, dec_input])\n",
337337
" one_hot = tf.one_hot(dec_target, depth=self.num_classes)\n",
338338
" mask = tf.math.logical_not(tf.math.equal(dec_target, 0))\n",
339-
" loss = model.compute_loss(None, one_hot, preds, sample_weight=mask)\n",
339+
" loss = self.compute_loss(None, one_hot, preds, sample_weight=mask)\n",
340340
" self.loss_metric.update_state(loss)\n",
341341
" return {\"loss\": self.loss_metric.result()}\n",
342342
"\n",

examples/audio/md/transformer_asr.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -262,7 +262,7 @@ class Transformer(keras.Model):
262262
preds = self([source, dec_input])
263263
one_hot = tf.one_hot(dec_target, depth=self.num_classes)
264264
mask = tf.math.logical_not(tf.math.equal(dec_target, 0))
265-
loss = model.compute_loss(None, one_hot, preds, sample_weight=mask)
265+
loss = self.compute_loss(y=one_hot, y_pred=preds, sample_weight=mask)
266266
trainable_vars = self.trainable_variables
267267
gradients = tape.gradient(loss, trainable_vars)
268268
self.optimizer.apply_gradients(zip(gradients, trainable_vars))
@@ -277,7 +277,7 @@ class Transformer(keras.Model):
277277
preds = self([source, dec_input])
278278
one_hot = tf.one_hot(dec_target, depth=self.num_classes)
279279
mask = tf.math.logical_not(tf.math.equal(dec_target, 0))
280-
loss = model.compute_loss(None, one_hot, preds, sample_weight=mask)
280+
loss = self.compute_loss(y=one_hot, y_pred=preds, sample_weight=mask)
281281
self.loss_metric.update_state(loss)
282282
return {"loss": self.loss_metric.result()}
283283

0 commit comments

Comments
 (0)