Skip to content

Commit c0cf9e9

Browse files
authored
adapt paddle 3.2, remove some hack code to avoid conflict. (#4120)
1 parent 8247eba commit c0cf9e9

File tree

4 files changed

+4
-12
lines changed

4 files changed

+4
-12
lines changed

paddlespeech/s2t/__init__.py

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -37,14 +37,6 @@
3737
paddle.uint16 = 'uint16'
3838
paddle.cdouble = 'complex128'
3939

40-
if not hasattr(paddle, 'softmax'):
41-
logger.debug("register user softmax to paddle, remove this when fixed!")
42-
setattr(paddle, 'softmax', paddle.nn.functional.softmax)
43-
44-
if not hasattr(paddle, 'log_softmax'):
45-
logger.debug("register user log_softmax to paddle, remove this when fixed!")
46-
setattr(paddle, 'log_softmax', paddle.nn.functional.log_softmax)
47-
4840
if not hasattr(paddle, 'sigmoid'):
4941
logger.debug("register user sigmoid to paddle, remove this when fixed!")
5042
setattr(paddle, 'sigmoid', paddle.nn.functional.sigmoid)

paddlespeech/s2t/models/wav2vec2/modules/modeling_wav2vec2.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -891,7 +891,7 @@ def forward(self, hidden_states, mask_time_indices=None):
891891
hard=True).type_as(hidden_states)
892892

893893
# compute perplexity
894-
codevector_soft_dist = paddle.softmax(
894+
codevector_soft_dist = paddle.nn.functional.softmax(
895895
hidden_states.reshape((batch_size * sequence_length,
896896
self.num_groups, -1)).float(),
897897
axis=-1)

paddlespeech/s2t/modules/attention.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -120,11 +120,11 @@ def forward_attention(
120120
# for last chunk, time2 might be larger than scores.size(-1)
121121
mask = mask[:, :, :, :scores.shape[-1]]
122122
scores = scores.masked_fill(mask, -float('inf'))
123-
attn = paddle.softmax(
123+
attn = paddle.nn.functional.softmax(
124124
scores, axis=-1).masked_fill(mask,
125125
0.0) # (batch, head, time1, time2)
126126
else:
127-
attn = paddle.softmax(
127+
attn = paddle.nn.functional.softmax(
128128
scores, axis=-1) # (batch, head, time1, time2)
129129

130130
p_attn = self.dropout(attn)

paddlespeech/s2t/modules/decoder.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -189,7 +189,7 @@ def forward_one_step(
189189
else:
190190
y = x[:, -1]
191191
if self.use_output_layer:
192-
y = paddle.log_softmax(self.output_layer(y), axis=-1)
192+
y = paddle.nn.functional.log_softmax(self.output_layer(y), axis=-1)
193193
return y, new_cache
194194

195195
# beam search API (see ScorerInterface)

0 commit comments

Comments
 (0)