Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -2,7 +2,7 @@
|
|
2 |
# app.py
|
3 |
|
4 |
import gradio as gr
|
5 |
-
from transformers import AutoTokenizer
|
6 |
from groq import Groq
|
7 |
import os
|
8 |
from huggingface_hub import login
|
@@ -22,8 +22,16 @@ if hf_token is None:
|
|
22 |
login(token=hf_token)
|
23 |
|
24 |
# Model identifier for Groq API (you can replace it with your HF model if needed)
|
|
|
25 |
model_name = "asthaa30/l3.1"
|
26 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
27 |
# Load tokenizer (model will be accessed via Groq API)
|
28 |
try:
|
29 |
tokenizer = AutoTokenizer.from_pretrained(model_name, token=hf_token)
|
|
|
2 |
# app.py
|
3 |
|
4 |
import gradio as gr
|
5 |
+
from transformers import AutoTokenizer, AutoModelForCausalLM
|
6 |
from groq import Groq
|
7 |
import os
|
8 |
from huggingface_hub import login
|
|
|
22 |
login(token=hf_token)
|
23 |
|
24 |
# Model identifier for Groq API (you can replace it with your HF model if needed)
|
25 |
+
# Model identifier
|
26 |
model_name = "asthaa30/l3.1"
|
27 |
|
28 |
+
# Load tokenizer and model directly
|
29 |
+
try:
|
30 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
31 |
+
model = AutoModelForCausalLM.from_pretrained(model_name)
|
32 |
+
except Exception as e:
|
33 |
+
raise ValueError(f"Failed to load model or tokenizer: {e}")
|
34 |
+
|
35 |
# Load tokenizer (model will be accessed via Groq API)
|
36 |
try:
|
37 |
tokenizer = AutoTokenizer.from_pretrained(model_name, token=hf_token)
|