Spaces:
Sleeping
Sleeping
Tuchuanhuhuhu
commited on
Commit
·
9c8c84b
1
Parent(s):
7c40573
feat:新增 ChatGLM2 支持
Browse files- modules/models/models.py +2 -1
- modules/presets.py +3 -1
modules/models/models.py
CHANGED
@@ -611,7 +611,8 @@ def get_model(
|
|
611 |
raise ValueError(f"未知模型: {model_name}")
|
612 |
logging.info(msg)
|
613 |
except Exception as e:
|
614 |
-
|
|
|
615 |
msg = f"{STANDARD_ERROR_MSG}: {e}"
|
616 |
if dont_change_lora_selector:
|
617 |
return model, msg, chatbot
|
|
|
611 |
raise ValueError(f"未知模型: {model_name}")
|
612 |
logging.info(msg)
|
613 |
except Exception as e:
|
614 |
+
import traceback
|
615 |
+
traceback.print_exc()
|
616 |
msg = f"{STANDARD_ERROR_MSG}: {e}"
|
617 |
if dont_change_lora_selector:
|
618 |
return model, msg, chatbot
|
modules/presets.py
CHANGED
@@ -72,7 +72,9 @@ ONLINE_MODELS = [
|
|
72 |
LOCAL_MODELS = [
|
73 |
"chatglm-6b",
|
74 |
"chatglm-6b-int4",
|
75 |
-
"chatglm-6b-int4-
|
|
|
|
|
76 |
"StableLM",
|
77 |
"MOSS",
|
78 |
"llama-7b-hf",
|
|
|
72 |
LOCAL_MODELS = [
|
73 |
"chatglm-6b",
|
74 |
"chatglm-6b-int4",
|
75 |
+
"chatglm-6b-int4-ge",
|
76 |
+
"chatglm2-6b",
|
77 |
+
"chatglm2-6b-int4",
|
78 |
"StableLM",
|
79 |
"MOSS",
|
80 |
"llama-7b-hf",
|