ffreemt
commited on
Commit
·
b64e738
1
Parent(s):
6bd1e61
Update exist when no cuda present
Browse files
app.py
CHANGED
@@ -14,7 +14,7 @@ from transformers.generation.utils import GenerationConfig
|
|
14 |
model_name = "baichuan-inc/Baichuan2-13B-Chat-4bits"
|
15 |
if not torch.cuda.is_available():
|
16 |
gradio.Error(f"No cuda, cant run {model_name}")
|
17 |
-
raise SystemError(
|
18 |
|
19 |
# snapshot_download?
|
20 |
loc = snapshot_download(repo_id=model_name, local_dir="model")
|
|
|
14 |
model_name = "baichuan-inc/Baichuan2-13B-Chat-4bits"
|
15 |
if not torch.cuda.is_available():
|
16 |
gradio.Error(f"No cuda, cant run {model_name}")
|
17 |
+
raise SystemError(f"No cuda, cant run {model_name}")
|
18 |
|
19 |
# snapshot_download?
|
20 |
loc = snapshot_download(repo_id=model_name, local_dir="model")
|