saheedniyi
commited on
Update README.md
Browse files
README.md
CHANGED
@@ -35,14 +35,18 @@ The model was built in an attempt to "Nigerialize" Llama-3, giving it a Nigerian
|
|
35 |
Use the code below to get started with the model.
|
36 |
|
37 |
```python
|
|
|
|
|
|
|
38 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
39 |
|
40 |
tokenizer = AutoTokenizer.from_pretrained("saheedniyi/Llama3-8b-Naija_v1")
|
41 |
model = AutoModelForCausalLM.from_pretrained("saheedniyi/Llama3-8b-Naija_v1")
|
42 |
|
43 |
-
input_text = "What
|
|
|
44 |
inputs = tokenizer(input_text, return_tensors="pt")
|
45 |
-
outputs = model.generate(**inputs)
|
46 |
|
47 |
print(tokenizer.decode(outputs[0], skip_special_tokens=True))
|
48 |
```
|
|
|
35 |
Use the code below to get started with the model.
|
36 |
|
37 |
```python
|
38 |
+
#necessary installations
|
39 |
+
!pip install bitsandbytes peft accelerate
|
40 |
+
|
41 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
42 |
|
43 |
tokenizer = AutoTokenizer.from_pretrained("saheedniyi/Llama3-8b-Naija_v1")
|
44 |
model = AutoModelForCausalLM.from_pretrained("saheedniyi/Llama3-8b-Naija_v1")
|
45 |
|
46 |
+
input_text = "What are the top locations for tourism in Nigeria?"
|
47 |
+
formatted_prompt=input_text=f"### BEGIN CONVERSATION ###\n\n## User: ##\n{input_text}\n\n## Assistant: ##\n"
|
48 |
inputs = tokenizer(input_text, return_tensors="pt")
|
49 |
+
outputs = model.generate(**inputs.to("cuda"), max_new_tokens=512,pad_token_id=tokenizer.pad_token_id,do_sample=True,temperature=0.6,top_p=0.9,)
|
50 |
|
51 |
print(tokenizer.decode(outputs[0], skip_special_tokens=True))
|
52 |
```
|