Rooni commited on
Commit
f65a4fc
·
verified ·
1 Parent(s): b93410f

Update server.js

Browse files
Files changed (1) hide show
  1. server.js +17 -7
server.js CHANGED
@@ -34,20 +34,30 @@ app.post('/update', async (req, res) => {
34
 
35
  app.post('/pl', async (req, res) => {
36
  const prompt = req.body.prompt;
 
37
  const lang = req.body.lang || "ru";
38
  const chatid = req.body.chatid || "";
39
  const apiKey = req.body.api || getRandomApiKey();
40
 
41
- if (!prompt) {
42
- return res.status(400).json({ error: "", title: "Ошибка", text: "Нет запроса!", okb: "Ок", oklink: "", cancelable: "true" }); // Не удалось принять данные
43
- }
44
 
45
- try {
46
- const systemMessage = {'role': 'system', 'content': `Доктор ${chatid} ${start}. Отвечай на языке: ${lang}`};
47
- const allMessages = [systemMessage].concat(req.body.messages); // Объединяем сообщения
 
 
 
 
 
 
 
 
 
 
48
 
 
49
  const response = await axios.post('https://openai-gemini-iota.vercel.app/v1/chat/completions', {
50
- messages: allMessages, // Используем объединенный массив сообщений
51
  max_tokens: 4000,
52
  temperature: 0.3,
53
  model: "gemini-1.5-pro-002",
 
34
 
35
  app.post('/pl', async (req, res) => {
36
  const prompt = req.body.prompt;
37
+ const messages = req.body.messages;
38
  const lang = req.body.lang || "ru";
39
  const chatid = req.body.chatid || "";
40
  const apiKey = req.body.api || getRandomApiKey();
41
 
42
+ let allMessages;
 
 
43
 
44
+ if (messages && Array.isArray(messages) && messages.length > 0) {
45
+ allMessages = [
46
+ { 'role': 'system', 'content': `Доктор ${chatid} ${start}. Отвечай на языке: ${lang}` },
47
+ ...messages
48
+ ];
49
+ } else if (prompt) {
50
+ allMessages = [
51
+ { 'role': 'system', 'content': `Доктор ${chatid} ${start}. Отвечай на языке: ${lang}` },
52
+ { 'role': 'user', 'content': prompt }
53
+ ];
54
+ } else {
55
+ return res.status(400).json({ error: "", title: "Ошибка", text: "Нет запроса!", okb: "Ок", oklink: "", cancelable: "true" });
56
+ }
57
 
58
+ try {
59
  const response = await axios.post('https://openai-gemini-iota.vercel.app/v1/chat/completions', {
60
+ messages: allMessages,
61
  max_tokens: 4000,
62
  temperature: 0.3,
63
  model: "gemini-1.5-pro-002",