v1_dpo_demo.yaml 89 Bytes
Newer Older
shihm's avatar
uodata  
shihm committed
1
2
3
4
dpo_zh_demo:
  path: HuggingFaceH4/orca_dpo_pairs
  split: train_prefs
  converter: pair