We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
self.vocab_size
1 parent 1b5824b commit ae38becCopy full SHA for ae38bec
ntloss/core.py
@@ -499,8 +499,7 @@ def setup_distance_lookup(
499
num_ids = torch.nonzero(self.is_number_token, as_tuple=True)[0]
500
# Create mapping from number token ids to their index in order of appearance in vocab:
501
# e.g. token "3" -> id 519 -> dist_idx 1, then abs dist to 3 for other NT values will be found in row/column 1
502
- final_vocab_size = self.number_values.shape[0]
503
- vocab_to_dist_idx = torch.full((final_vocab_size,), -1, dtype=torch.long)
+ vocab_to_dist_idx = torch.full((self.vocab_size,), -1, dtype=torch.long)
504
# Use arange to ensure order of appearance
505
vocab_to_dist_idx[num_ids] = torch.arange(num_ids.size(0), dtype=torch.long)
506
0 commit comments