Commit 6fc1b02f authored by zihanl's avatar zihanl
Browse files

update commands

parent a9738f63
......@@ -10,8 +10,8 @@ DISTRIBUTED_ARGS="--nproc_per_node $WORLD_SIZE \
--master_addr localhost \
--master_port 6000"
OUTPUT_PATH=<SPECIFIC_PATH_FOR_THE_OUTPUT_GENERATION>
GROUND_TRUTH_PATH=<SPECIFIC_PATH_FOR_THE_GROUND_TRUTH>
OUTPUT_PATH=<PATH_OF_THE_OUTPUT_GENERATION>
GROUND_TRUTH_PATH=<PATH_OF_THE_GROUND_TRUTH>
python -m torch.distributed.launch $DISTRIBUTED_ARGS ./tasks/main.py \
--num-layers 24 \
......
......@@ -12,12 +12,12 @@ DISTRIBUTED_ARGS="--nproc_per_node $WORLD_SIZE \
--master_addr localhost \
--master_port 6000"
CHECKPOINT_PATH=<Specify path for the language model>
OUTPUT_MODEL_PATH=<Specify path for the saved model>
VOCAB_PATH=<Specify path for the vocab file>
MERGE_PATH=<Specify path for the merge file>
TRAIN_PATH=<Specify path for the training dataset>
TEST_PATH=<Specify path for the test dataset>
CHECKPOINT_PATH=<PATH_OF_THE_LANGUAGE_MODEL>
OUTPUT_MODEL_PATH=<PATH_OF_THE_SAVED_MODEL>
VOCAB_PATH=<PATH_OF_THE_VOCAB_FILE>
MERGE_PATH=<PATH_OF_THE_MERGE_FILE>
TRAIN_PATH=<PATH_OF_THE_TRAINING_DATASET>
TEST_PATH=<PATH_OF_THE_TEST_DATASET>
python -m torch.distributed.launch $DISTRIBUTED_ARGS ./tasks/main.py \
--num-layers 24 \
......
......@@ -12,12 +12,12 @@ DISTRIBUTED_ARGS="--nproc_per_node $WORLD_SIZE \
--master_addr localhost \
--master_port 6000"
CHECKPOINT_PATH=<Specify path for the language model>
OUTPUT_MODEL_PATH=<Specify path for the saved model>
VOCAB_PATH=<Specify path for the vocab file>
MERGE_PATH=<Specify path for the merge file>
TRAIN_PATH=<Specify path for the training dataset>
TEST_PATH=<Specify path for the test dataset>
CHECKPOINT_PATH=<PATH_OF_THE_LANGUAGE_MODEL>
OUTPUT_MODEL_PATH=<PATH_OF_THE_SAVED_MODEL>
VOCAB_PATH=<PATH_OF_THE_VOCAB_FILE>
MERGE_PATH=<PATH_OF_THE_MERGE_FILE>
TRAIN_PATH=<PATH_OF_THE_TRAINING_DATASET>
TEST_PATH=<PATH_OF_THE_TEST_DATASET>
python -m torch.distributed.launch $DISTRIBUTED_ARGS ./tasks/main.py \
--num-layers 24 \
......
......@@ -12,12 +12,12 @@ DISTRIBUTED_ARGS="--nproc_per_node $WORLD_SIZE \
--master_addr localhost \
--master_port 6000"
CHECKPOINT_PATH=<Specify path for the language model>
INPUT_PATH=<Specific path for the input test dataset>
VOCAB_PATH=<Specify path for the vocab file>
MERGE_PATH=<Specify path for the merge file>
OUTPUT_PATH=<Speicifc path for the output>
PROMPT_PATH=<Specific path for the prompts>
CHECKPOINT_PATH=<PATH_OF_THE_LANGUAGE_MODEL>
INPUT_PATH=<PATH_OF_THE_INPUT_TEST_DATA_FILE>
VOCAB_PATH=<PATH_OF_THE_VOCAB_FILE>
MERGE_PATH=<PATH_OF_THE_MERGE_FILE>
OUTPUT_PATH=<PATH_OF_THE_OUTPUT_GENERATION_FILE>
PROMPT_PATH=<PATH_OF_THE_KNOWLEDGE_GENERATION_PROMPTS>
python -m torch.distributed.launch $DISTRIBUTED_ARGS ./tasks/main.py \
--num-layers 24 \
......
......@@ -13,12 +13,12 @@ DISTRIBUTED_ARGS="--nproc_per_node $WORLD_SIZE \
--master_addr localhost \
--master_port 6000"
CHECKPOINT_PATH=<Specify path for the language model>
INPUT_PATH=<Specific path for the input test dataset>
VOCAB_PATH=<Specify path for the vocab file>
MERGE_PATH=<Specify path for the merge file>
OUTPUT_PATH=<Speicifc path for the output>
PROMPT_PATH=<Specific path for the prompts>
CHECKPOINT_PATH=<PATH_OF_THE_LANGUAGE_MODEL>
INPUT_PATH=<PATH_OF_THE_INPUT_TEST_DATA_FILE>
VOCAB_PATH=<PATH_OF_THE_VOCAB_FILE>
MERGE_PATH=<PATH_OF_THE_MERGE_FILE>
OUTPUT_PATH=<PATH_OF_THE_OUTPUT_GENERATION_FILE>
PROMPT_PATH=<PATH_OF_THE_RESPONSE_GENERATION_PROMPTS>
python -m torch.distributed.launch $DISTRIBUTED_ARGS ./tasks/main.py \
--num-layers 24 \
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment