Rooni commited on
Commit
55cfd98
·
verified ·
1 Parent(s): bec22ae

Update server.js

Browse files
Files changed (1) hide show
  1. server.js +3 -3
server.js CHANGED
@@ -45,7 +45,7 @@ app.post('/pl', async (req, res) => {
45
  messages: [{'role': 'system', 'content': start}, {'role': 'user', 'content': prompt}],
46
  max_tokens: 4000,
47
  temperature: 0.7,
48
- model: "gemini-1.5-flash-002",
49
  presence_penalty: 0.4,
50
  }, {
51
  headers: {
@@ -77,9 +77,9 @@ app.post('/pls', async (req, res) => {
77
  try {
78
  const response = await axios.post('https://openai-gemini-iota.vercel.app/v1/chat/completions', {
79
  messages: [{'role': 'system', 'content': `${start}. Отвечай кратко, но понятно!`}, {'role': 'user', 'content': prompt}],
80
- max_tokens: 4000,
81
  temperature: 0.7,
82
- model: "gemini-1.5-flash-002",
83
  presence_penalty: 0.4,
84
  }, {
85
  headers: {
 
45
  messages: [{'role': 'system', 'content': start}, {'role': 'user', 'content': prompt}],
46
  max_tokens: 4000,
47
  temperature: 0.7,
48
+ model: "gemini-1.5-pro-002",
49
  presence_penalty: 0.4,
50
  }, {
51
  headers: {
 
77
  try {
78
  const response = await axios.post('https://openai-gemini-iota.vercel.app/v1/chat/completions', {
79
  messages: [{'role': 'system', 'content': `${start}. Отвечай кратко, но понятно!`}, {'role': 'user', 'content': prompt}],
80
+ max_tokens: 2000,
81
  temperature: 0.7,
82
+ model: "gemini-1.5-pro-002",
83
  presence_penalty: 0.4,
84
  }, {
85
  headers: {