We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent d0d0172 commit adbaa23Copy full SHA for adbaa23
optimum/habana/transformers/models/gpt_neox/modeling_gpt_neox.py
@@ -269,6 +269,7 @@ def gaudi_gpt_neox_model_forward(
269
return_dict: Optional[bool] = None,
270
cache_position: Optional[torch.LongTensor] = None,
271
token_idx: Optional[torch.Tensor] = None,
272
+ **kwargs,
273
) -> Union[Tuple, BaseModelOutputWithPast]:
274
"""
275
Copied from GPTNeoxModel.forward: https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt_neox/modeling_gpt_neox.py
0 commit comments