"docs/source/vscode:/vscode.git/clone" did not exist on "801894e08c53a80c2b4377ce102a44dccfda8928"
Unverified Commit 6015f91a authored by Phuc Van Phan's avatar Phuc Van Phan Committed by GitHub
Browse files

refactor: change default block_size (#26229)

* refactor: change default block_size

* fix: return tf to origin

* fix: change files to origin

* rebase

* rebase

* rebase

* rebase

* rebase

* rebase

* rebase

* rebase

* refactor: add min block_size to files

* reformat: add min block_size for run_clm tf
parent 8b46c5bc
...@@ -497,13 +497,12 @@ def main(): ...@@ -497,13 +497,12 @@ def main():
if data_args.block_size is None: if data_args.block_size is None:
block_size = tokenizer.model_max_length block_size = tokenizer.model_max_length
if block_size > 1024: if block_size > config.max_position_embeddings:
logger.warning( logger.warning(
"The chosen tokenizer supports a `model_max_length` that is longer than the default `block_size` value" f"The tokenizer picked seems to have a very large `model_max_length` ({tokenizer.model_max_length}). "
" of 1024. If you would like to use a longer `block_size` up to `tokenizer.model_max_length` you can" f"Using block_size={min(1024, config.max_position_embeddings)} instead. You can change that default value by passing --block_size xxx."
" override this default with `--block_size xxx`."
) )
block_size = 1024 block_size = min(1024, config.max_position_embeddings)
else: else:
if data_args.block_size > tokenizer.model_max_length: if data_args.block_size > tokenizer.model_max_length:
logger.warning( logger.warning(
......
...@@ -440,13 +440,12 @@ def main(): ...@@ -440,13 +440,12 @@ def main():
if args.block_size is None: if args.block_size is None:
block_size = tokenizer.model_max_length block_size = tokenizer.model_max_length
if block_size > 1024: if block_size > config.max_position_embeddings:
logger.warning( logger.warning(
"The chosen tokenizer supports a `model_max_length` that is longer than the default `block_size` value" f"The tokenizer picked seems to have a very large `model_max_length` ({tokenizer.model_max_length}). "
" of 1024. If you would like to use a longer `block_size` up to `tokenizer.model_max_length` you can" f"Using block_size={min(1024, config.max_position_embeddings)} instead. You can change that default value by passing --block_size xxx."
" override this default with `--block_size xxx`."
) )
block_size = 1024 block_size = min(1024, config.max_position_embeddings)
else: else:
if args.block_size > tokenizer.model_max_length: if args.block_size > tokenizer.model_max_length:
logger.warning( logger.warning(
......
...@@ -362,9 +362,9 @@ def main(): ...@@ -362,9 +362,9 @@ def main():
if block_size > config.max_position_embeddings: if block_size > config.max_position_embeddings:
logger.warning( logger.warning(
f"The tokenizer picked seems to have a very large `model_max_length` ({tokenizer.model_max_length}). " f"The tokenizer picked seems to have a very large `model_max_length` ({tokenizer.model_max_length}). "
"Picking 1024 instead. You can change that default value by passing --block_size xxx." f"Using block_size={min(1024, config.max_position_embeddings)} instead. You can change that default value by passing --block_size xxx."
) )
block_size = 1024 block_size = min(1024, config.max_position_embeddings)
else: else:
if data_args.block_size > tokenizer.model_max_length: if data_args.block_size > tokenizer.model_max_length:
logger.warning( logger.warning(
......
...@@ -423,12 +423,12 @@ def main(): ...@@ -423,12 +423,12 @@ def main():
if data_args.block_size is None: if data_args.block_size is None:
block_size = tokenizer.model_max_length block_size = tokenizer.model_max_length
if block_size > 1024: if block_size > config.max_position_embeddings:
logger.warning( logger.warning(
f"The tokenizer picked seems to have a very large `model_max_length` ({tokenizer.model_max_length}). " f"The tokenizer picked seems to have a very large `model_max_length` ({tokenizer.model_max_length}). "
"Picking 1024 instead. You can change that default value by passing --block_size xxx." f"Using block_size={min(1024, config.max_position_embeddings)} instead. You can change that default value by passing --block_size xxx."
) )
block_size = 1024 block_size = min(1024, config.max_position_embeddings)
else: else:
if data_args.block_size > tokenizer.model_max_length: if data_args.block_size > tokenizer.model_max_length:
logger.warning( logger.warning(
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment