"lmdeploy/vscode:/vscode.git/clone" did not exist on "1a665a63b09a83ab06317f8acfe7e7f75037c5ab"
Commit 41750a6c authored by sshleifer's avatar sshleifer Committed by Julien Chaumond
Browse files

Fix typos

parent 12bb7fe7
......@@ -302,7 +302,7 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin):
self._tie_or_clone_weights(output_embeddings, self.get_input_embeddings())
def _tie_or_clone_weights(self, output_embeddings, input_embeddings):
""" Tie or clone module weights depending of weither we are using TorchScript or not
""" Tie or clone module weights depending of whether we are using TorchScript or not
"""
if self.config.torchscript:
output_embeddings.weight = nn.Parameter(input_embeddings.weight.clone())
......@@ -1524,7 +1524,7 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin):
return decoded
# force one of token_ids to be generated by setting prob of all other tokens to 0.
def _force_token_ids_generation(self, scores, token_ids):
def _force_token_ids_generation(self, scores, token_ids) -> None:
if isinstance(token_ids, int):
token_ids = [token_ids]
all_but_token_ids_mask = torch.tensor(
......@@ -2025,8 +2025,8 @@ def create_position_ids_from_input_ids(input_ids, padding_idx):
"""
# The series of casts and type-conversions here are carefully balanced to both work with ONNX export and XLA.
mask = input_ids.ne(padding_idx).int()
incremental_indicies = torch.cumsum(mask, dim=1).type_as(mask) * mask
return incremental_indicies.long() + padding_idx
incremental_indices = torch.cumsum(mask, dim=1).type_as(mask) * mask
return incremental_indices.long() + padding_idx
def prune_linear_layer(layer, index, dim=0):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment