"git@developer.sourcefind.cn:chenpangpang/transformers.git" did not exist on "30a09f382726b667f7ab7334d5b24452c72ffadb"
Unverified Commit a70da86b authored by J-shang's avatar J-shang Committed by GitHub
Browse files

Fix hint in src/transformers/modeling_utils.py (#22074)

fix hint
parent 419d979f
...@@ -807,7 +807,7 @@ class ModuleUtilsMixin: ...@@ -807,7 +807,7 @@ class ModuleUtilsMixin:
return extended_attention_mask return extended_attention_mask
def get_extended_attention_mask( def get_extended_attention_mask(
self, attention_mask: Tensor, input_shape: Tuple[int], device: device = None, dtype: torch.float = None self, attention_mask: Tensor, input_shape: Tuple[int], device: torch.device = None, dtype: torch.float = None
) -> Tensor: ) -> Tensor:
""" """
Makes broadcastable attention and causal masks so that future and masked tokens are ignored. Makes broadcastable attention and causal masks so that future and masked tokens are ignored.
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment