Unverified Commit fcab30f8 authored by Hubert's avatar Hubert Committed by GitHub
Browse files

[Fix] change save_every defaults to 1 (#592)

parent 19ad7f96
...@@ -42,7 +42,7 @@ class AttackInferencer(BaseInferencer): ...@@ -42,7 +42,7 @@ class AttackInferencer(BaseInferencer):
gen_field_replace_token (:obj:`str`, optional): Used to replace the gen_field_replace_token (:obj:`str`, optional): Used to replace the
generation field token when generating prompts. generation field token when generating prompts.
save_every (:obj:`int`, optional): Save intermediate results every save_every (:obj:`int`, optional): Save intermediate results every
`save_every` epochs. `save_every` iters. Defaults to 1.
generation_kwargs (:obj:`Dict`, optional): Parameters for the generation_kwargs (:obj:`Dict`, optional): Parameters for the
:obj:`model.generate()` method. :obj:`model.generate()` method.
""" """
...@@ -58,7 +58,7 @@ class AttackInferencer(BaseInferencer): ...@@ -58,7 +58,7 @@ class AttackInferencer(BaseInferencer):
gen_field_replace_token: Optional[str] = '', gen_field_replace_token: Optional[str] = '',
output_json_filepath: Optional[str] = './icl_inference_output', output_json_filepath: Optional[str] = './icl_inference_output',
output_json_filename: Optional[str] = 'predictions', output_json_filename: Optional[str] = 'predictions',
save_every: Optional[int] = None, save_every: Optional[int] = 1,
dataset_cfg: Optional[List[int]] = None, dataset_cfg: Optional[List[int]] = None,
**kwargs) -> None: **kwargs) -> None:
super().__init__( super().__init__(
......
...@@ -36,7 +36,7 @@ class GenInferencer(BaseInferencer): ...@@ -36,7 +36,7 @@ class GenInferencer(BaseInferencer):
gen_field_replace_token (:obj:`str`, optional): Used to replace the gen_field_replace_token (:obj:`str`, optional): Used to replace the
generation field token when generating prompts. generation field token when generating prompts.
save_every (:obj:`int`, optional): Save intermediate results every save_every (:obj:`int`, optional): Save intermediate results every
`save_every` epochs. `save_every` iters. Defaults to 1.
generation_kwargs (:obj:`Dict`, optional): Parameters for the generation_kwargs (:obj:`Dict`, optional): Parameters for the
:obj:`model.generate()` method. :obj:`model.generate()` method.
""" """
...@@ -50,7 +50,7 @@ class GenInferencer(BaseInferencer): ...@@ -50,7 +50,7 @@ class GenInferencer(BaseInferencer):
gen_field_replace_token: Optional[str] = '', gen_field_replace_token: Optional[str] = '',
output_json_filepath: Optional[str] = './icl_inference_output', output_json_filepath: Optional[str] = './icl_inference_output',
output_json_filename: Optional[str] = 'predictions', output_json_filename: Optional[str] = 'predictions',
save_every: Optional[int] = None, save_every: Optional[int] = 1,
**kwargs) -> None: **kwargs) -> None:
super().__init__( super().__init__(
model=model, model=model,
......
...@@ -34,7 +34,7 @@ class SCInferencer(BaseInferencer): ...@@ -34,7 +34,7 @@ class SCInferencer(BaseInferencer):
gen_field_replace_token (:obj:`str`, optional): Used to replace the gen_field_replace_token (:obj:`str`, optional): Used to replace the
generation field token when generating prompts. generation field token when generating prompts.
save_every (:obj:`int`, optional): Save intermediate results every save_every (:obj:`int`, optional): Save intermediate results every
`save_every` epochs. `save_every` iters. Defaults to 1.
generation_kwargs (:obj:`Dict`, optional): Parameters for the generation_kwargs (:obj:`Dict`, optional): Parameters for the
:obj:`model.generate()` method. :obj:`model.generate()` method.
sc_size (:obj:`int`, optional): Sample size for Self-Consistency sc_size (:obj:`int`, optional): Sample size for Self-Consistency
...@@ -51,7 +51,7 @@ class SCInferencer(BaseInferencer): ...@@ -51,7 +51,7 @@ class SCInferencer(BaseInferencer):
gen_field_replace_token: Optional[str] = '', gen_field_replace_token: Optional[str] = '',
output_json_filepath: Optional[str] = './icl_inference_output', output_json_filepath: Optional[str] = './icl_inference_output',
output_json_filename: Optional[str] = 'predictions', output_json_filename: Optional[str] = 'predictions',
save_every: Optional[int] = None, save_every: Optional[int] = 1,
sc_size: Optional[int] = 1, sc_size: Optional[int] = 1,
infer_type: Optional[str] = '', infer_type: Optional[str] = '',
generation_kwargs: dict = {}, generation_kwargs: dict = {},
......
...@@ -43,7 +43,7 @@ class ToTInferencer(GenInferencer): ...@@ -43,7 +43,7 @@ class ToTInferencer(GenInferencer):
gen_field_replace_token (:obj:`str`, optional): Used to replace the gen_field_replace_token (:obj:`str`, optional): Used to replace the
generation field token when generating prompts. generation field token when generating prompts.
save_every (:obj:`int`, optional): Save intermediate results every save_every (:obj:`int`, optional): Save intermediate results every
`save_every` epochs. `save_every` iters. Defaults to 1.
generation_kwargs (:obj:`Dict`, optional): Parameters for the generation_kwargs (:obj:`Dict`, optional): Parameters for the
:obj:`model.generate()` method. :obj:`model.generate()` method.
naive_run (:obj:`bool`): if True, run naive IO/CoT sampling instead of naive_run (:obj:`bool`): if True, run naive IO/CoT sampling instead of
...@@ -74,7 +74,7 @@ class ToTInferencer(GenInferencer): ...@@ -74,7 +74,7 @@ class ToTInferencer(GenInferencer):
gen_field_replace_token: Optional[str] = '', gen_field_replace_token: Optional[str] = '',
output_json_filepath: Optional[str] = './icl_inference_output', output_json_filepath: Optional[str] = './icl_inference_output',
output_json_filename: Optional[str] = 'predictions', output_json_filename: Optional[str] = 'predictions',
save_every: Optional[int] = None, save_every: Optional[int] = 1,
naive_run: bool = False, naive_run: bool = False,
prompt_wrapper: dict = {}, prompt_wrapper: dict = {},
prompt_sample: str = 'standard', prompt_sample: str = 'standard',
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment