librispeech_tts_r.json 2.07 KB
Newer Older
1
{
2
3
4
5
    "model_name_or_path": "/home/yoach/dataspeech/artefacts/tiny-model/",
    "feature_extractor_name":"facebook/encodec_24khz",
    "description_tokenizer_name":"t5-base",
    "prompt_tokenizer_name":"t5-base",
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23

    "push_to_hub": false,
    "hub_model_id": "stable-speech-mini",
    "report_to": ["wandb"],
    "overwrite_output_dir": true,
    "output_dir": "/home/yoach/dataspeech/artefacts/training/",

    "train_dataset_name": "blabble-io/libritts_r+blabble-io/libritts_r+blabble-io/libritts_r",
    "train_metadata_dataset_name": "stable-speech/libritts-r-tags-and-text-generated+stable-speech/libritts-r-tags-and-text-generated+stable-speech/libritts-r-tags-and-text-generated",
    "train_dataset_config_name": "clean+clean+other",
    "train_split_name": "train.clean.360+train.clean.100+train.other.500",

    "eval_dataset_name": "blabble-io/libritts_r+blabble-io/libritts_r",
    "eval_metadata_dataset_name": "stable-speech/libritts-r-tags-and-text-generated+stable-speech/libritts-r-tags-and-text-generated",
    "eval_dataset_config_name": "clean+other",
    "eval_split_name": "test.clean+test.other",

    "target_audio_column_name": "audio", 
24
25
    "description_column_name": "text_description",
    "prompt_column_name": "text",
26
27
28
29
30
31
32
33
34
35
36

    "max_train_samples": 1000,
    "max_eval_samples": 200,

    
    "max_duration_in_seconds": 20,
    "min_duration_in_seconds": 1.0,

    "add_audio_samples_to_wandb": true,
    "id_column_name": "id",

37
    "preprocessing_num_workers": 1,
38
39
40
41
42
43
44
45

    "pad_token_id": 2048,
    "decoder_start_token_id": 2048,

    "do_train": true,
    "num_train_epochs": 20,
    "gradient_accumulation_steps": 1,
    "gradient_checkpointing": true,
46
    "per_device_train_batch_size": 16,
47
48
49
50
51
52
53
54
55
56
57
58
    "learning_rate": 1e-6,
    "adam_beta1": 0.9,
    "adam_beta2": 0.95,
    "weight_decay": 0.1,

    "logging_steps": 25,


    "do_eval": true, 
    "predict_with_generate": true,
    "include_inputs_for_metrics": true,
    "evaluation_strategy": "epoch",
59
    "per_device_eval_batch_size": 16,
60
61
62
63
64
65
66
    "generation_max_length": 400,

    "fp16": true,
    "seed": 456,

    "dataloader_num_workers":8
}