"src/vscode:/vscode.git/clone" did not exist on "afad0c18d90bc9f1aa560443830391181b454a52"
Unverified Commit 6015f91a authored by Phuc Van Phan's avatar Phuc Van Phan Committed by GitHub
Browse files

refactor: change default block_size (#26229)

* refactor: change default block_size

* fix: return tf to origin

* fix: change files to origin

* rebase

* rebase

* rebase

* rebase

* rebase

* rebase

* rebase

* rebase

* refactor: add min block_size to files

* reformat: add min block_size for run_clm tf
parent 8b46c5bc
......@@ -497,13 +497,12 @@ def main():
if data_args.block_size is None:
block_size = tokenizer.model_max_length
if block_size > 1024:
if block_size > config.max_position_embeddings:
logger.warning(
"The chosen tokenizer supports a `model_max_length` that is longer than the default `block_size` value"
" of 1024. If you would like to use a longer `block_size` up to `tokenizer.model_max_length` you can"
" override this default with `--block_size xxx`."
f"The tokenizer picked seems to have a very large `model_max_length` ({tokenizer.model_max_length}). "
f"Using block_size={min(1024, config.max_position_embeddings)} instead. You can change that default value by passing --block_size xxx."
)
block_size = 1024
block_size = min(1024, config.max_position_embeddings)
else:
if data_args.block_size > tokenizer.model_max_length:
logger.warning(
......
......@@ -440,13 +440,12 @@ def main():
if args.block_size is None:
block_size = tokenizer.model_max_length
if block_size > 1024:
if block_size > config.max_position_embeddings:
logger.warning(
"The chosen tokenizer supports a `model_max_length` that is longer than the default `block_size` value"
" of 1024. If you would like to use a longer `block_size` up to `tokenizer.model_max_length` you can"
" override this default with `--block_size xxx`."
f"The tokenizer picked seems to have a very large `model_max_length` ({tokenizer.model_max_length}). "
f"Using block_size={min(1024, config.max_position_embeddings)} instead. You can change that default value by passing --block_size xxx."
)
block_size = 1024
block_size = min(1024, config.max_position_embeddings)
else:
if args.block_size > tokenizer.model_max_length:
logger.warning(
......
......@@ -362,9 +362,9 @@ def main():
if block_size > config.max_position_embeddings:
logger.warning(
f"The tokenizer picked seems to have a very large `model_max_length` ({tokenizer.model_max_length}). "
"Picking 1024 instead. You can change that default value by passing --block_size xxx."
f"Using block_size={min(1024, config.max_position_embeddings)} instead. You can change that default value by passing --block_size xxx."
)
block_size = 1024
block_size = min(1024, config.max_position_embeddings)
else:
if data_args.block_size > tokenizer.model_max_length:
logger.warning(
......
......@@ -423,12 +423,12 @@ def main():
if data_args.block_size is None:
block_size = tokenizer.model_max_length
if block_size > 1024:
if block_size > config.max_position_embeddings:
logger.warning(
f"The tokenizer picked seems to have a very large `model_max_length` ({tokenizer.model_max_length}). "
"Picking 1024 instead. You can change that default value by passing --block_size xxx."
f"Using block_size={min(1024, config.max_position_embeddings)} instead. You can change that default value by passing --block_size xxx."
)
block_size = 1024
block_size = min(1024, config.max_position_embeddings)
else:
if data_args.block_size > tokenizer.model_max_length:
logger.warning(
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment