Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
@@ -7,7 +7,7 @@ from threading import Thread
|
|
7 |
import gradio as gr
|
8 |
import spaces
|
9 |
import torch
|
10 |
-
from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer
|
11 |
|
12 |
DESCRIPTION = """# AlexHung29629/fix_magistral_fc2
|
13 |
開頭有<think>"""
|
@@ -46,7 +46,7 @@ CHAT_TEMPLATE="""{%- set default_system_message = "A user will ask you to solve
|
|
46 |
|
47 |
if torch.cuda.is_available():
|
48 |
model_id = "mistralai/Mistral-Small-24B-Instruct-2501"
|
49 |
-
model =
|
50 |
tokenizer = AutoTokenizer.from_pretrained("mistralai/Mistral-Small-24B-Instruct-2501")
|
51 |
|
52 |
|
|
|
7 |
import gradio as gr
|
8 |
import spaces
|
9 |
import torch
|
10 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer, AutoModelForImageTextToText
|
11 |
|
12 |
DESCRIPTION = """# AlexHung29629/fix_magistral_fc2
|
13 |
開頭有<think>"""
|
|
|
46 |
|
47 |
if torch.cuda.is_available():
|
48 |
model_id = "mistralai/Mistral-Small-24B-Instruct-2501"
|
49 |
+
model = AutoModelForImageTextToText.from_pretrained("AlexHung29629/add_vision_4", torch_dtype=torch.bfloat16, device_map="auto")
|
50 |
tokenizer = AutoTokenizer.from_pretrained("mistralai/Mistral-Small-24B-Instruct-2501")
|
51 |
|
52 |
|