Unverified Commit 5d75c1bb authored by Tong Gao's avatar Tong Gao Committed by GitHub
Browse files

[Enhancement] Increase default task size (#360)

parent b8bf16e8
...@@ -29,7 +29,7 @@ class SizePartitioner(BasePartitioner): ...@@ -29,7 +29,7 @@ class SizePartitioner(BasePartitioner):
def __init__(self, def __init__(self,
out_dir: str, out_dir: str,
max_task_size: int = 2000, max_task_size: int = 40000,
gen_task_coef: int = 20, gen_task_coef: int = 20,
dataset_size_path: str = '.cache/dataset_size.json'): dataset_size_path: str = '.cache/dataset_size.json'):
super().__init__(out_dir) super().__init__(out_dir)
......
...@@ -91,7 +91,7 @@ def parse_args(): ...@@ -91,7 +91,7 @@ def parse_args():
help='The maximum size of an infer task. Only ' help='The maximum size of an infer task. Only '
'effective when "infer" is missing from the config.', 'effective when "infer" is missing from the config.',
type=int, type=int,
default=2000), default=40000),
parser.add_argument( parser.add_argument(
'--gen-task-coef', '--gen-task-coef',
help='The dataset cost measurement coefficient for generation tasks, ' help='The dataset cost measurement coefficient for generation tasks, '
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment