Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -1,16 +1,19 @@
|
|
1 |
import gradio as gr
|
2 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
3 |
import torch
|
|
|
4 |
|
5 |
# Model and tokenizer paths
|
6 |
model_path = "rajj0/autotrain-phi3-midium-4k-godsent-orpo-6"
|
|
|
7 |
|
8 |
# Load the tokenizer and model
|
9 |
-
tokenizer = AutoTokenizer.from_pretrained(model_path)
|
10 |
model = AutoModelForCausalLM.from_pretrained(
|
11 |
model_path,
|
12 |
device_map="auto",
|
13 |
-
torch_dtype='auto'
|
|
|
14 |
).eval()
|
15 |
|
16 |
# Function to generate a response from the model
|
|
|
1 |
import gradio as gr
|
2 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
3 |
import torch
|
4 |
+
import os
|
5 |
|
6 |
# Model and tokenizer paths
|
7 |
model_path = "rajj0/autotrain-phi3-midium-4k-godsent-orpo-6"
|
8 |
+
hf_token = os.getenv("yaml") # Get the token from environment variables
|
9 |
|
10 |
# Load the tokenizer and model
|
11 |
+
tokenizer = AutoTokenizer.from_pretrained(model_path, use_auth_token=hf_token)
|
12 |
model = AutoModelForCausalLM.from_pretrained(
|
13 |
model_path,
|
14 |
device_map="auto",
|
15 |
+
torch_dtype='auto',
|
16 |
+
use_auth_token=hf_token
|
17 |
).eval()
|
18 |
|
19 |
# Function to generate a response from the model
|