bagel_t2i.json 869 Bytes
Newer Older
litzh's avatar
litzh committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
{
    "infer_steps": 50,
    "inference_hyper": {
        "cfg_text_scale": 4.0,
        "cfg_img_scale": 1.0,
        "cfg_interval": [0.4, 1.0],
        "timestep_shift": 3.0,
        "num_timesteps": 50,
        "cfg_renorm_min": 0.0,
        "cfg_renorm_type": "global"
    },
    "think": false,
    "understanding_output": false,
    "do_sample": false,
    "text_temperature": 0.3,
    "max_think_token_n": 1000,
    "interpolate_pos": false,
    "visual_gen": true,
    "visual_und": true,
    "vit_max_num_patch_per_side": 70,
    "connector_act": "gelu_pytorch_tanh",
    "latent_patch_size": 2,
    "max_latent_size_update": 64,
    "llm_config_update": {
        "dtype": "bfloat16",
        "freeze_und": false,
        "is_causal": true,
        "layer_module": "Qwen2MoTDecoderLayer",
        "rope_scaling": null,
        "sliding_window": null
    }
}