dpo_fsdp_train.py 302 Bytes
Newer Older
chenzk's avatar
v1.0  
chenzk committed
1
2
3
4
5
6
7
8
9
10
from allamo.configuration import AllamoConfiguration
from allamo.trainer.dpo_fsdp_trainer import DPOTrainer

if __name__ == '__main__':
    config = AllamoConfiguration()
    config.training_type = 'dpo'
    trainer = DPOTrainer(config)
    trainer.init_wandb()
    trainer.train()
    trainer.close()