Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -6,8 +6,7 @@ from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStream
|
|
6 |
import os
|
7 |
from threading import Thread
|
8 |
|
9 |
-
|
10 |
-
MODEL_ID = "Nechba/Coin-Generative-Recognition"
|
11 |
|
12 |
TITLE = f'<br><center>π Coin Generative Recognition</a></center>'
|
13 |
|
@@ -38,18 +37,15 @@ img {
|
|
38 |
max-height: 300px; /* Limit the height of images */
|
39 |
}
|
40 |
"""
|
41 |
-
import os
|
42 |
-
# Directory where the model and tokenizer will be saved
|
43 |
|
44 |
# Load model directly
|
45 |
-
|
46 |
-
model =
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
# ).to(0)
|
53 |
|
54 |
tokenizer = AutoTokenizer.from_pretrained(MODEL_ID, trust_remote_code=True)
|
55 |
model.eval()
|
|
|
6 |
import os
|
7 |
from threading import Thread
|
8 |
|
9 |
+
MODEL_ID = "THUDM/glm-4v-9b"
|
|
|
10 |
|
11 |
TITLE = f'<br><center>π Coin Generative Recognition</a></center>'
|
12 |
|
|
|
37 |
max-height: 300px; /* Limit the height of images */
|
38 |
}
|
39 |
"""
|
|
|
|
|
40 |
|
41 |
# Load model directly
|
42 |
+
|
43 |
+
model = AutoModelForCausalLM.from_pretrained(
|
44 |
+
MODEL_ID,
|
45 |
+
torch_dtype=torch.bfloat16,
|
46 |
+
low_cpu_mem_usage=True,
|
47 |
+
trust_remote_code=True
|
48 |
+
).to(0)
|
|
|
49 |
|
50 |
tokenizer = AutoTokenizer.from_pretrained(MODEL_ID, trust_remote_code=True)
|
51 |
model.eval()
|