wuran commited on
Commit
072201c
·
verified ·
1 Parent(s): 2333f81

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +146 -29
config.json CHANGED
@@ -6,34 +6,169 @@
6
  "pgHNSWEfSearch": 100
7
  },
8
  "llmModels": [
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9
  {
10
- "model": "gpt-3.5-turbo-16k",
11
- "name": "gpt-3.5-turbo",
12
- "maxContext": 16000,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
  "maxResponse": 4000,
14
- "quoteMaxToken": 13000,
15
  "maxTemperature": 1.2,
16
  "charsPointsPrice": 0,
17
  "censor": false,
18
- "vision": false,
19
- "datasetProcess": false,
 
 
 
 
20
  "toolChoice": true,
21
  "functionCall": false,
22
  "customCQPrompt": "",
23
  "customExtractPrompt": "",
24
  "defaultSystemChatPrompt": "",
25
- "defaultConfig": {}
 
26
  },
27
  {
28
- "model": "gpt-4o",
29
- "name": "gpt-4o",
 
30
  "maxContext": 125000,
31
- "maxResponse": 8000,
32
- "quoteMaxToken": 100000,
33
  "maxTemperature": 1.2,
34
  "charsPointsPrice": 0,
35
  "censor": false,
36
  "vision": true,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
37
  "datasetProcess": false,
38
  "toolChoice": true,
39
  "functionCall": false,
@@ -187,24 +322,6 @@
187
  "customExtractPrompt": "",
188
  "defaultSystemChatPrompt": "",
189
  "defaultConfig": {}
190
- },
191
- {
192
- "model": "dall-e-3",
193
- "name": "dall-e-3",
194
- "maxContext": 128000,
195
- "maxResponse": 4000,
196
- "quoteMaxToken": 100000,
197
- "maxTemperature": 1.2,
198
- "charsPointsPrice": 0,
199
- "censor": false,
200
- "vision": true,
201
- "datasetProcess": false,
202
- "toolChoice": true,
203
- "functionCall": false,
204
- "customCQPrompt": "",
205
- "customExtractPrompt": "",
206
- "defaultSystemChatPrompt": "",
207
- "defaultConfig": {}
208
  }
209
  ],
210
  "vectorModels": [
 
6
  "pgHNSWEfSearch": 100
7
  },
8
  "llmModels": [
9
+ {
10
+ "model": "gpt-4o-mini",
11
+ "name": "gpt-4o-mini",
12
+ "avatar": "/imgs/model/openai.svg",
13
+ "maxContext": 125000,
14
+ "maxResponse": 16000,
15
+ "quoteMaxToken": 120000,
16
+ "maxTemperature": 1.2,
17
+ "charsPointsPrice": 0,
18
+ "censor": false,
19
+ "vision": true,
20
+ "datasetProcess": true,
21
+ "usedInClassify": true,
22
+ "usedInExtractFields": true,
23
+ "usedInToolCall": true,
24
+ "usedInQueryExtension": true,
25
+ "toolChoice": true,
26
+ "functionCall": false,
27
+ "customCQPrompt": "",
28
+ "customExtractPrompt": "",
29
+ "defaultSystemChatPrompt": "",
30
+ "defaultConfig": {},
31
+ "fieldMap": {}
32
+ },
33
  {
34
+ "model": "net-gpt-4o-mini",
35
+ "name": "net-gpt-4o-mini",
36
+ "avatar": "/imgs/model/openai.svg",
37
+ "maxContext": 125000,
38
+ "maxResponse": 16000,
39
+ "quoteMaxToken": 120000,
40
+ "maxTemperature": 1.2,
41
+ "charsPointsPrice": 0,
42
+ "censor": false,
43
+ "vision": true,
44
+ "datasetProcess": true,
45
+ "usedInClassify": true,
46
+ "usedInExtractFields": true,
47
+ "usedInToolCall": true,
48
+ "usedInQueryExtension": true,
49
+ "toolChoice": true,
50
+ "functionCall": false,
51
+ "customCQPrompt": "",
52
+ "customExtractPrompt": "",
53
+ "defaultSystemChatPrompt": "",
54
+ "defaultConfig": {},
55
+ "fieldMap": {}
56
+ },{
57
+ "model": "gpt-4o",
58
+ "name": "gpt-4o",
59
+ "avatar": "/imgs/model/openai.svg",
60
+ "maxContext": 125000,
61
  "maxResponse": 4000,
62
+ "quoteMaxToken": 120000,
63
  "maxTemperature": 1.2,
64
  "charsPointsPrice": 0,
65
  "censor": false,
66
+ "vision": true,
67
+ "datasetProcess": true,
68
+ "usedInClassify": true,
69
+ "usedInExtractFields": true,
70
+ "usedInToolCall": true,
71
+ "usedInQueryExtension": true,
72
  "toolChoice": true,
73
  "functionCall": false,
74
  "customCQPrompt": "",
75
  "customExtractPrompt": "",
76
  "defaultSystemChatPrompt": "",
77
+ "defaultConfig": {},
78
+ "fieldMap": {}
79
  },
80
  {
81
+ "model": "dall-e-3",
82
+ "name": "dall-e-3",
83
+ "avatar": "/imgs/model/openai.svg",
84
  "maxContext": 125000,
85
+ "maxResponse": 4000,
86
+ "quoteMaxToken": 120000,
87
  "maxTemperature": 1.2,
88
  "charsPointsPrice": 0,
89
  "censor": false,
90
  "vision": true,
91
+ "datasetProcess": true,
92
+ "usedInClassify": true,
93
+ "usedInExtractFields": true,
94
+ "usedInToolCall": true,
95
+ "usedInQueryExtension": true,
96
+ "toolChoice": true,
97
+ "functionCall": false,
98
+ "customCQPrompt": "",
99
+ "customExtractPrompt": "",
100
+ "defaultSystemChatPrompt": "",
101
+ "defaultConfig": {},
102
+ "fieldMap": {}
103
+ },
104
+ {
105
+ "model": "o1-mini",
106
+ "name": "o1-mini",
107
+ "avatar": "/imgs/model/openai.svg",
108
+ "maxContext": 125000,
109
+ "maxResponse": 65000,
110
+ "quoteMaxToken": 120000,
111
+ "maxTemperature": 1.2,
112
+ "charsPointsPrice": 0,
113
+ "censor": false,
114
+ "vision": false,
115
+ "datasetProcess": true,
116
+ "usedInClassify": true,
117
+ "usedInExtractFields": true,
118
+ "usedInToolCall": true,
119
+ "usedInQueryExtension": true,
120
+ "toolChoice": false,
121
+ "functionCall": false,
122
+ "customCQPrompt": "",
123
+ "customExtractPrompt": "",
124
+ "defaultSystemChatPrompt": "",
125
+ "defaultConfig": {
126
+ "temperature": 1,
127
+ "stream": false
128
+ },
129
+ "fieldMap": {
130
+ "max_tokens": "max_completion_tokens"
131
+ }
132
+ },
133
+ {
134
+ "model": "o1-preview",
135
+ "name": "o1-preview",
136
+ "avatar": "/imgs/model/openai.svg",
137
+ "maxContext": 125000,
138
+ "maxResponse": 32000,
139
+ "quoteMaxToken": 120000,
140
+ "maxTemperature": 1.2,
141
+ "charsPointsPrice": 0,
142
+ "censor": false,
143
+ "vision": false,
144
+ "datasetProcess": true,
145
+ "usedInClassify": true,
146
+ "usedInExtractFields": true,
147
+ "usedInToolCall": true,
148
+ "usedInQueryExtension": true,
149
+ "toolChoice": false,
150
+ "functionCall": false,
151
+ "customCQPrompt": "",
152
+ "customExtractPrompt": "",
153
+ "defaultSystemChatPrompt": "",
154
+ "defaultConfig": {
155
+ "temperature": 1,
156
+ "stream": false
157
+ },
158
+ "fieldMap": {
159
+ "max_tokens": "max_completion_tokens"
160
+ }
161
+ },
162
+ {
163
+ "model": "gpt-3.5-turbo-16k",
164
+ "name": "gpt-3.5-turbo",
165
+ "maxContext": 16000,
166
+ "maxResponse": 4000,
167
+ "quoteMaxToken": 13000,
168
+ "maxTemperature": 1.2,
169
+ "charsPointsPrice": 0,
170
+ "censor": false,
171
+ "vision": false,
172
  "datasetProcess": false,
173
  "toolChoice": true,
174
  "functionCall": false,
 
322
  "customExtractPrompt": "",
323
  "defaultSystemChatPrompt": "",
324
  "defaultConfig": {}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
325
  }
326
  ],
327
  "vectorModels": [