50%|████████████████████████████████████████████████████████████████████████████████████████████████ | 1500/3000 [1:34:10<45:25, 1.82s/itSaving model checkpoint to output/AdvertiseGenLoRA/20240220145905/tmp-checkpoint-1500
/usr/local/lib/python3.8/site-packages/peft/utils/save_and_load.py:148: UserWarning: Could not find a config file in checkpoint/miniCPM-bf16/ - will assume that the vocabulary was not modified.
warnings.warn(
tokenizer config file saved in output/AdvertiseGenLoRA/20240220145905/tmp-checkpoint-1500/tokenizer_config.json
Special tokens file saved in output/AdvertiseGenLoRA/20240220145905/tmp-checkpoint-1500/special_tokens_map.json
[2024-02-20 16:35:20,179] [INFO] [logging.py:96:log_dist] [Rank 0] [Torch] Checkpoint global_step1500 is about to be saved!
/usr/local/lib/python3.8/site-packages/torch/nn/modules/module.py:1879: UserWarning: Positional args are being deprecated, use kwargs instead. Refer to https://pytorch.org/docs/master/generated/torch.nn.Module.html#torch.nn.Module.state_dict for details.
warnings.warn(
[2024-02-20 16:35:20,222] [INFO] [logging.py:96:log_dist] [Rank 0] Saving model checkpoint: output/AdvertiseGenLoRA/20240220145905/tmp-checkpoint-1500/global_step1500/zero_pp_rank_0_mp_rank_00_model_states.pt
67%|████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████ | 2000/3000 [2:06:57<34:29, 2.07s/itSaving model checkpoint to output/AdvertiseGenLoRA/20240220145905/tmp-checkpoint-2000
/usr/local/lib/python3.8/site-packages/peft/utils/save_and_load.py:148: UserWarning: Could not find a config file in checkpoint/miniCPM-bf16/ - will assume that the vocabulary was not modified.
warnings.warn(
tokenizer config file saved in output/AdvertiseGenLoRA/20240220145905/tmp-checkpoint-2000/tokenizer_config.json
Special tokens file saved in output/AdvertiseGenLoRA/20240220145905/tmp-checkpoint-2000/special_tokens_map.json
[2024-02-20 17:08:07,683] [INFO] [logging.py:96:log_dist] [Rank 0] [Torch] Checkpoint global_step2000 is about to be saved!
/usr/local/lib/python3.8/site-packages/torch/nn/modules/module.py:1879: UserWarning: Positional args are being deprecated, use kwargs instead. Refer to https://pytorch.org/docs/master/generated/torch.nn.Module.html#torch.nn.Module.state_dict for details.
warnings.warn(
[2024-02-20 17:08:07,726] [INFO] [logging.py:96:log_dist] [Rank 0] Saving model checkpoint: output/AdvertiseGenLoRA/20240220145905/tmp-checkpoint-2000/global_step2000/zero_pp_rank_0_mp_rank_00_model_states.pt
83%|████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████ | 2500/3000 [2:39:31<14:50, 1.78s/itSaving model checkpoint to output/AdvertiseGenLoRA/20240220145905/tmp-checkpoint-2500
/usr/local/lib/python3.8/site-packages/peft/utils/save_and_load.py:148: UserWarning: Could not find a config file in checkpoint/miniCPM-bf16/ - will assume that the vocabulary was not modified.
warnings.warn(
tokenizer config file saved in output/AdvertiseGenLoRA/20240220145905/tmp-checkpoint-2500/tokenizer_config.json
Special tokens file saved in output/AdvertiseGenLoRA/20240220145905/tmp-checkpoint-2500/special_tokens_map.json
[2024-02-20 17:40:42,775] [INFO] [logging.py:96:log_dist] [Rank 0] [Torch] Checkpoint global_step2500 is about to be saved!
/usr/local/lib/python3.8/site-packages/torch/nn/modules/module.py:1879: UserWarning: Positional args are being deprecated, use kwargs instead. Refer to https://pytorch.org/docs/master/generated/torch.nn.Module.html#torch.nn.Module.state_dict for details.
warnings.warn(
[2024-02-20 17:40:42,818] [INFO] [logging.py:96:log_dist] [Rank 0] Saving model checkpoint: output/AdvertiseGenLoRA/20240220145905/tmp-checkpoint-2500/global_step2500/zero_pp_rank_0_mp_rank_00_model_states.pt
100%|████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 3000/3000 [3:10:54<00:00, 1.77s/itSaving model checkpoint to output/AdvertiseGenLoRA/20240220145905/tmp-checkpoint-3000
/usr/local/lib/python3.8/site-packages/peft/utils/save_and_load.py:148: UserWarning: Could not find a config file in checkpoint/miniCPM-bf16/ - will assume that the vocabulary was not modified.
warnings.warn(
tokenizer config file saved in output/AdvertiseGenLoRA/20240220145905/tmp-checkpoint-3000/tokenizer_config.json
Special tokens file saved in output/AdvertiseGenLoRA/20240220145905/tmp-checkpoint-3000/special_tokens_map.json
[2024-02-20 18:12:04,484] [INFO] [logging.py:96:log_dist] [Rank 0] [Torch] Checkpoint global_step3000 is about to be saved!
/usr/local/lib/python3.8/site-packages/torch/nn/modules/module.py:1879: UserWarning: Positional args are being deprecated, use kwargs instead. Refer to https://pytorch.org/docs/master/generated/torch.nn.Module.html#torch.nn.Module.state_dict for details.
warnings.warn(
[2024-02-20 18:12:04,524] [INFO] [logging.py:96:log_dist] [Rank 0] Saving model checkpoint: output/AdvertiseGenLoRA/20240220145905/tmp-checkpoint-3000/global_step3000/zero_pp_rank_0_mp_rank_00_model_states.pt