Skip to content

Commit 81ba549

Browse files
Fixing the ppl overflow issue
1 parent f31488d commit 81ba549

File tree

1 file changed

+4
-2
lines changed

1 file changed

+4
-2
lines changed

03-wordemb-pytorch/wordemb-skip.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -102,8 +102,9 @@ def calc_sent_loss(sent):
102102
optimizer.step()
103103
if (sent_id + 1) % 5000 == 0:
104104
print("--finished %r sentences" % (sent_id + 1))
105+
train_ppl = float('inf') if train_loss / train_words > 709 else math.exp(train_loss / train_words)
105106
print("iter %r: train loss/word=%.4f, ppl=%.4f, time=%.2fs" % (
106-
ITER, train_loss / train_words, math.exp(train_loss / train_words), time.time() - start))
107+
ITER, train_loss / train_words, train_ppl, time.time() - start))
107108
# Evaluate on dev set
108109
dev_words, dev_loss = 0, 0.0
109110
start = time.time()
@@ -112,8 +113,9 @@ def calc_sent_loss(sent):
112113
my_loss = calc_sent_loss(sent)
113114
dev_loss += my_loss.item()
114115
dev_words += len(sent)
116+
dev_ppl = float('inf') if dev_loss / dev_words > 709 else math.exp(dev_loss / dev_words)
115117
print("iter %r: dev loss/word=%.4f, ppl=%.4f, time=%.2fs" % (
116-
ITER, dev_loss / dev_words, math.exp(dev_loss / dev_words), time.time() - start))
118+
ITER, dev_loss / dev_words, dev_ppl, time.time() - start))
117119

118120
print("saving embedding files")
119121
with open(embeddings_location, 'w') as embeddings_file:

0 commit comments

Comments
 (0)