t83714 commited on
Commit
762cf8c
·
1 Parent(s): 896bd0e

update readme.md

Browse files
Files changed (1) hide show
  1. README.md +3 -3
README.md CHANGED
@@ -51,16 +51,16 @@ pip install torch transformers
51
  from transformers import AutoModelForCausalLM, AutoTokenizer
52
 
53
  model_name = "t83714/llama-3.1-8b-instruct-limo"
54
- model = AutoModelForCausalLM.from_pretrained(model_name)
55
  tokenizer = AutoTokenizer.from_pretrained(model_name)
56
 
57
  prompt = "How much is (2+5)x5/7"
58
 
59
  # Tokenize the input
60
- inputs = tokenizer(prompt, return_tensors="pt")
61
 
62
  # Generate the output
63
- output = model.generate(**inputs, max_length=200)
64
  print(tokenizer.decode(output[0], skip_special_tokens=True))
65
  ```
66
 
 
51
  from transformers import AutoModelForCausalLM, AutoTokenizer
52
 
53
  model_name = "t83714/llama-3.1-8b-instruct-limo"
54
+ model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype="auto", device_map="auto")
55
  tokenizer = AutoTokenizer.from_pretrained(model_name)
56
 
57
  prompt = "How much is (2+5)x5/7"
58
 
59
  # Tokenize the input
60
+ inputs = tokenizer(prompt, return_tensors="pt").to("cuda")
61
 
62
  # Generate the output
63
+ output = model.generate(**inputs, max_length=8000)
64
  print(tokenizer.decode(output[0], skip_special_tokens=True))
65
  ```
66