"model/git@developer.sourcefind.cn:OpenDAS/ollama.git" did not exist on "139f84cf21f8d8107f69c1404f17a8840c6d67d0"
Commit b07aadfe authored by zhougaofeng's avatar zhougaofeng
Browse files

Update inference/7B_single_dcu.py, result/all_results.json,...

Update inference/7B_single_dcu.py, result/all_results.json, result/training_loss.png, result/train_results.json files
parent 5c00723a
Pipeline #1182 canceled with stages
import torch
from transformers import AutoTokenizer, AutoModelForCausalLM, GenerationConfig
model_name = "deepseek-ai/deepseek-math-7b-base" # <your-model-path>
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.bfloat16, device_map="auto")
model.generation_config = GenerationConfig.from_pretrained(model_name)
model.generation_config.pad_token_id = model.generation_config.eos_token_id
text = "The integral of x^2 from 0 to 2 is"
inputs = tokenizer(text, return_tensors="pt")
outputs = model.generate(**inputs.to(model.device), max_new_tokens=100)
result = tokenizer.decode(outputs[0], skip_special_tokens=True)
print(result)
\ No newline at end of file
{
"epoch": 3.0,
"eval_loss": 1.1501325368881226,
"eval_runtime": 24.1564,
"eval_samples_per_second": 4.14,
"eval_steps_per_second": 2.07,
"train_loss": 1.04150841889558,
"train_runtime": 1708.8196,
"train_samples_per_second": 1.58,
"train_steps_per_second": 0.395
}
\ No newline at end of file
{
"epoch": 3.0,
"train_loss": 1.04150841889558,
"train_runtime": 1708.8196,
"train_samples_per_second": 1.58,
"train_steps_per_second": 0.395
}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment