from transformers import AutoModelForCausalLM, AutoTokenizer model_id = "/home/temp_model/Codestral-22B-v0.1" tokenizer = AutoTokenizer.from_pretrained(model_id) model = AutoModelForCausalLM.from_pretrained(model_id,device_map="auto") text = "Write me a function that computes fibonacci in Rust" inputs = tokenizer(text, return_tensors="pt") outputs = model.generate(**inputs, max_new_tokens=512) print(tokenizer.decode(outputs[0], skip_special_tokens=True))