File size: 407 Bytes
3213d99
174f794
3213d99
d77692a
 
bfcd36b
 
b59dd8f
bfcd36b
 
 
 
 
d77692a
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
import gradio as gr
import os

from transformers import AutoModelForCausalLM, AutoTokenizer

print(os.environ['HF_TOKEN'])

peft_model_id = "samidh/cope-gemma2b-hs-2c-skr-s1.5.9.d25"
model = AutoModelForCausalLM.from_pretrained(
    peft_model_id,
    use_auth=True,
    hf_token=os.environ['HF_TOKEN']
)

#gr.load("models/samidh/cope-gemma2b-hs-2c-skr-s1.5.9.d25", hf_token=os.environ['HF_TOKEN']).launch()