Spaces:
Running
on
L4
Running
on
L4
Update app.py
Browse files
app.py
CHANGED
@@ -3,7 +3,13 @@ import os
|
|
3 |
|
4 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
5 |
|
|
|
|
|
6 |
peft_model_id = "samidh/samidh-cope-gemma2b-hs-2c-skr-s1.5.9.d25"
|
7 |
-
model = AutoModelForCausalLM.from_pretrained(
|
|
|
|
|
|
|
|
|
8 |
|
9 |
#gr.load("models/samidh/cope-gemma2b-hs-2c-skr-s1.5.9.d25", hf_token=os.environ['HF_TOKEN']).launch()
|
|
|
3 |
|
4 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
5 |
|
6 |
+
print(os.environ['HF_TOKEN'])
|
7 |
+
|
8 |
peft_model_id = "samidh/samidh-cope-gemma2b-hs-2c-skr-s1.5.9.d25"
|
9 |
+
model = AutoModelForCausalLM.from_pretrained(
|
10 |
+
peft_model_id,
|
11 |
+
use_auth=True,
|
12 |
+
hf_token=os.environ['HF_TOKEN']
|
13 |
+
)
|
14 |
|
15 |
#gr.load("models/samidh/cope-gemma2b-hs-2c-skr-s1.5.9.d25", hf_token=os.environ['HF_TOKEN']).launch()
|