#!/bin/bash deepspeed --master_port $(shuf -n 1 -i 10000-65535) --include="localhost:3,4" /DISC-FinLLM_ModelZoo/LLaMA-Factory/src/train_bash.py \ --deepspeed /DISC-FinLLM_ModelZoo/LLaMA-Factory/deepspeed.json \ --stage sft \ --do_train \ --model_name_or_path /DISC-FinLLM_ModelZoo/FinLLM \ --dataset retrieval_part \ --dataset_dir /DISC-FinLLM_ModelZoo/LLaMA-Factory/data \ --template baichuan2 \ --finetuning_type lora \ --lora_target all \ --output_dir /DISC-FinLLM_ModelZoo/saves/lora_multi \ --overwrite_output_dir \ --cutoff_len 1024 \ --preprocessing_num_workers 4 \ --per_device_train_batch_size 16 \ --per_device_eval_batch_size 4 \ --gradient_accumulation_steps 1 \ --lr_scheduler_type cosine \ --logging_steps 10 \ --warmup_steps 20 \ --save_steps 100 \ --eval_steps 100 \ --evaluation_strategy steps \ --learning_rate 5e-5 \ --num_train_epochs 10.0 \ --max_samples 3000 \ --val_size 0.1 \ --ddp_timeout 180000000 \ --plot_loss \ --fp16