Unverified Commit e6522e49 authored by Adam Louly's avatar Adam Louly Committed by GitHub
Browse files

Fixing the failure of models without max_position_embeddings attribute. (#27499)

fix max pos issue

Co-authored-by: Adam Louly <adamlouly@microsoft.com@orttrainingdev9.d32nl1ml4oruzj4qz3bqlggovf.px.internal.cloudapp.net>
parent a0633c44
......@@ -497,15 +497,20 @@ def main():
batched=True,
remove_columns=column_names,
)
if hasattr(config, "max_position_embeddings"):
max_pos_embeddings = config.max_position_embeddings
else:
# Define a default value if the attribute is missing in the config.
max_pos_embeddings = 1024
if data_args.block_size is None:
block_size = tokenizer.model_max_length
if block_size > config.max_position_embeddings:
if block_size > max_pos_embeddings:
logger.warning(
f"The tokenizer picked seems to have a very large `model_max_length` ({tokenizer.model_max_length}). "
f"Using block_size={min(1024, config.max_position_embeddings)} instead. You can change that default value by passing --block_size xxx."
f"Using block_size={min(1024, max_pos_embeddings)} instead. You can change that default value by passing --block_size xxx."
)
block_size = min(1024, config.max_position_embeddings)
block_size = min(1024, max_pos_embeddings)
else:
if data_args.block_size > tokenizer.model_max_length:
logger.warning(
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment