Commit 0e5f2734 authored by Yoach Lacombe's avatar Yoach Lacombe
Browse files

remove useless parameters from training config

parent b10e5625
......@@ -60,5 +60,8 @@ if __name__ == "__main__":
model.generation_config.max_length = int(30 * model.audio_encoder.config.frame_rate)
model.generation_config.do_sample = True # True
model.generation_config.guidance_scale = 1 # 3.0
model.config.pad_token_id = encodec_vocab_size
model.config.decoder_start_token_id = encodec_vocab_size+1
model.save_pretrained(os.path.join(args.save_directory, "tiny-model"))
......@@ -60,5 +60,8 @@ if __name__ == "__main__":
model.generation_config.max_length = int(30 * model.audio_encoder.config.frame_rate)
model.generation_config.do_sample = True # True
model.generation_config.guidance_scale = 1 # 3.0
model.config.pad_token_id = encodec_vocab_size
model.config.decoder_start_token_id = encodec_vocab_size+1
model.save_pretrained(os.path.join(args.save_directory, "parler-tts-untrained-300M/"))
......@@ -37,9 +37,6 @@
"preprocessing_num_workers": 8,
"pad_token_id": 1024,
"decoder_start_token_id": 1025,
"do_train": true,
"num_train_epochs": 50,
"gradient_accumulation_steps": 1,
......
......@@ -39,9 +39,6 @@
"preprocessing_num_workers": 8,
"pad_token_id": 1024,
"decoder_start_token_id": 1025,
"do_train": true,
"num_train_epochs": 40,
"gradient_accumulation_steps": 1,
......
......@@ -124,8 +124,6 @@ accelerate launch ./training/run_parler_tts_training.py \
--add_audio_samples_to_wandb true \
--id_column_name "id" \
--preprocessing_num_workers 8 \
--pad_token_id 1024 \
--decoder_start_token_id 1025 \
--do_train true \
--num_train_epochs 50 \
--gradient_accumulation_steps 1 \
......
......@@ -235,11 +235,11 @@ class ModelArguments:
metadata={"help": "Whether to freeze the text encoder."},
)
do_sample: bool = field(
default=False,
default=True,
metadata={"help": "Whether to do sampling or greedy decoding."},
)
temperature: float = field(
default=0.4,
default=1.0,
metadata={"help": "Temperature if sampling."},
)
max_length: int = field(
......@@ -1018,10 +1018,10 @@ def main():
{
"pad_token_id": model_args.pad_token_id
if model_args.pad_token_id is not None
else model.config.pad_token_id,
else config.pad_token_id,
"decoder_start_token_id": model_args.decoder_start_token_id
if model_args.decoder_start_token_id is not None
else model.config.decoder_start_token_id,
else config.decoder_start_token_id,
}
)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment