Update config.json
Browse files- config.json +56 -2
config.json
CHANGED
@@ -91,7 +91,25 @@
|
|
91 |
"vision": true,
|
92 |
"datasetProcess": true,
|
93 |
"toolChoice": true,
|
94 |
-
"functionCall":
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
95 |
"customCQPrompt": "",
|
96 |
"customExtractPrompt": "",
|
97 |
"defaultSystemChatPrompt": "",
|
@@ -128,7 +146,7 @@
|
|
128 |
"vision": false,
|
129 |
"datasetProcess": true,
|
130 |
"toolChoice": true,
|
131 |
-
"functionCall":
|
132 |
"customCQPrompt": "",
|
133 |
"customExtractPrompt": "",
|
134 |
"defaultSystemChatPrompt": "",
|
@@ -152,6 +170,42 @@
|
|
152 |
"defaultSystemChatPrompt": "",
|
153 |
"defaultConfig": {}
|
154 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
155 |
{
|
156 |
"model": "gemini-1.5-pro-latest",
|
157 |
"name": "gemini-1.5-pro-latest",
|
|
|
91 |
"vision": true,
|
92 |
"datasetProcess": true,
|
93 |
"toolChoice": true,
|
94 |
+
"functionCall": true,
|
95 |
+
"customCQPrompt": "",
|
96 |
+
"customExtractPrompt": "",
|
97 |
+
"defaultSystemChatPrompt": "",
|
98 |
+
"defaultConfig": {}
|
99 |
+
},
|
100 |
+
{
|
101 |
+
"model": "gpt-4-turbo",
|
102 |
+
"name": "gpt-4-turbo",
|
103 |
+
"maxContext": 128000,
|
104 |
+
"maxResponse": 4000,
|
105 |
+
"quoteMaxToken": 100000,
|
106 |
+
"maxTemperature": 1.2,
|
107 |
+
"charsPointsPrice": 0,
|
108 |
+
"censor": false,
|
109 |
+
"vision": true,
|
110 |
+
"datasetProcess": true,
|
111 |
+
"toolChoice": true,
|
112 |
+
"functionCall": true,
|
113 |
"customCQPrompt": "",
|
114 |
"customExtractPrompt": "",
|
115 |
"defaultSystemChatPrompt": "",
|
|
|
146 |
"vision": false,
|
147 |
"datasetProcess": true,
|
148 |
"toolChoice": true,
|
149 |
+
"functionCall": true,
|
150 |
"customCQPrompt": "",
|
151 |
"customExtractPrompt": "",
|
152 |
"defaultSystemChatPrompt": "",
|
|
|
170 |
"defaultSystemChatPrompt": "",
|
171 |
"defaultConfig": {}
|
172 |
},
|
173 |
+
{
|
174 |
+
"model": "chatglm_pro",
|
175 |
+
"name": "chatglm_pro",
|
176 |
+
"maxContext": 128000,
|
177 |
+
"maxResponse": 4000,
|
178 |
+
"quoteMaxToken": 100000,
|
179 |
+
"maxTemperature": 1.2,
|
180 |
+
"charsPointsPrice": 0,
|
181 |
+
"censor": false,
|
182 |
+
"vision": true,
|
183 |
+
"datasetProcess": true,
|
184 |
+
"toolChoice": true,
|
185 |
+
"functionCall": true,
|
186 |
+
"customCQPrompt": "",
|
187 |
+
"customExtractPrompt": "",
|
188 |
+
"defaultSystemChatPrompt": "",
|
189 |
+
"defaultConfig": {}
|
190 |
+
},
|
191 |
+
{
|
192 |
+
"model": "glm-4",
|
193 |
+
"name": "glm-4",
|
194 |
+
"maxContext": 128000,
|
195 |
+
"maxResponse": 4000,
|
196 |
+
"quoteMaxToken": 100000,
|
197 |
+
"maxTemperature": 1.2,
|
198 |
+
"charsPointsPrice": 0,
|
199 |
+
"censor": false,
|
200 |
+
"vision": true,
|
201 |
+
"datasetProcess": true,
|
202 |
+
"toolChoice": true,
|
203 |
+
"functionCall": true,
|
204 |
+
"customCQPrompt": "",
|
205 |
+
"customExtractPrompt": "",
|
206 |
+
"defaultSystemChatPrompt": "",
|
207 |
+
"defaultConfig": {}
|
208 |
+
},
|
209 |
{
|
210 |
"model": "gemini-1.5-pro-latest",
|
211 |
"name": "gemini-1.5-pro-latest",
|