"...git@developer.sourcefind.cn:chenpangpang/transformers.git" did not exist on "b6c9f47fd6f911450024c52e382e544e5d04387a"
Unverified Commit 2848c9ce authored by Partho's avatar Partho Committed by GitHub
Browse files

Add type hints for M2M (#18998)

* added type hints

* fixed typo
parent 4bd36f18
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
import math import math
import random import random
from typing import Optional, Tuple, Union from typing import List, Optional, Tuple, Union
import torch import torch
from torch import nn from torch import nn
...@@ -712,13 +712,13 @@ class M2M100Encoder(M2M100PreTrainedModel): ...@@ -712,13 +712,13 @@ class M2M100Encoder(M2M100PreTrainedModel):
def forward( def forward(
self, self,
input_ids=None, input_ids: Optional[torch.Tensor] = None,
attention_mask=None, attention_mask: Optional[torch.Tensor] = None,
head_mask=None, head_mask: Optional[torch.Tensor] = None,
inputs_embeds=None, inputs_embeds: Optional[torch.Tensor] = None,
output_attentions=None, output_attentions: Optional[bool] = None,
output_hidden_states=None, output_hidden_states: Optional[bool] = None,
return_dict=None, return_dict: Optional[bool] = None,
): ):
r""" r"""
Args: Args:
...@@ -887,18 +887,18 @@ class M2M100Decoder(M2M100PreTrainedModel): ...@@ -887,18 +887,18 @@ class M2M100Decoder(M2M100PreTrainedModel):
def forward( def forward(
self, self,
input_ids=None, input_ids: Optional[torch.Tensor] = None,
attention_mask=None, attention_mask: Optional[torch.Tensor] = None,
encoder_hidden_states=None, encoder_hidden_states: Optional[torch.Tensor] = None,
encoder_attention_mask=None, encoder_attention_mask: Optional[torch.Tensor] = None,
head_mask=None, head_mask: Optional[torch.Tensor] = None,
cross_attn_head_mask=None, cross_attn_head_mask: Optional[torch.Tensor] = None,
past_key_values=None, past_key_values: Optional[List[torch.FloatTensor]] = None,
inputs_embeds=None, inputs_embeds: Optional[torch.Tensor] = None,
use_cache=None, use_cache: Optional[bool] = None,
output_attentions=None, output_attentions: Optional[bool] = None,
output_hidden_states=None, output_hidden_states: Optional[bool] = None,
return_dict=None, return_dict: Optional[bool] = None,
): ):
r""" r"""
Args: Args:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment