Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
import streamlit as st
|
2 |
-
from
|
3 |
import torch
|
4 |
|
5 |
# Configuration de la page Streamlit
|
@@ -11,10 +11,8 @@ def load_model():
|
|
11 |
model_name = "analist/deepseek-math-gguf" # Remplacez par votre nom de modèle
|
12 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
13 |
model = AutoModelForCausalLM.from_pretrained(
|
14 |
-
|
15 |
-
|
16 |
-
device_map="cpu"
|
17 |
-
)
|
18 |
return model, tokenizer
|
19 |
|
20 |
def generate_response(prompt, model, tokenizer):
|
|
|
1 |
import streamlit as st
|
2 |
+
from ctransformers import AutoModelForCausalLM, AutoTokenizer
|
3 |
import torch
|
4 |
|
5 |
# Configuration de la page Streamlit
|
|
|
11 |
model_name = "analist/deepseek-math-gguf" # Remplacez par votre nom de modèle
|
12 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
13 |
model = AutoModelForCausalLM.from_pretrained(
|
14 |
+
"analist/deepseek-math-gguf", model_file="model.gguf"
|
15 |
+
)
|
|
|
|
|
16 |
return model, tokenizer
|
17 |
|
18 |
def generate_response(prompt, model, tokenizer):
|