|
import express from 'express'; |
|
import { OpenaiRes } from '../lib/scrapper.js'; |
|
import { NvidiaTogether } from '../lib/@randydev/together/llama.js'; |
|
import { CohereAI } from '../lib/@randydev/together/cohere.js'; |
|
import { AlibabaTogether } from '../lib/@randydev/together/qwen-ai.js'; |
|
import { DeepSeekR1 } from '../lib/@randydev/together/blackbox.js'; |
|
import { authenticateApiKey, authenticateApiKeyPremium, apiLimiter } from '../middleware/midware.js'; |
|
const GptRoutes = express.Router(); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
GptRoutes.get('/api/v1/ai/deepseek/deepseek-R1', authenticateApiKeyPremium, apiLimiter, async (req, res) => { |
|
try { |
|
const query = req.query.query; |
|
const results = await DeepSeekR1(query); |
|
res.json({ results }); |
|
} catch (error) { |
|
res.status(401).json({ error: error.message }); |
|
} |
|
}); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
GptRoutes.get('/api/v1/ai/alibaba/qwen-plus', authenticateApiKeyPremium, apiLimiter, async (req, res) => { |
|
try { |
|
const query = req.query.query; |
|
const system_prompt = req.query.system_prompt || "Your name is AkenoX AI A kind and friendly AI assistant that answers in a short and concise answer. Give short step-by-step reasoning if required."; |
|
|
|
const results = await AlibabaTogether(query, { |
|
system_prompt: system_prompt |
|
}); |
|
|
|
res.json({ results }); |
|
} catch (error) { |
|
res.status(401).json({ error: error.message }); |
|
} |
|
}); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
GptRoutes.get('/api/v1/ai/cohere/command-plus', authenticateApiKeyPremium, apiLimiter, async (req, res) => { |
|
try { |
|
const query = req.query.query; |
|
const chatHistory = req.query.chatHistory ? JSON.parse(req.query.chatHistory) : []; |
|
const system_prompt = req.query.system_prompt || "Your name is AkenoX AI A kind and friendly AI assistant that answers in a short and concise answer. Give short step-by-step reasoning if required."; |
|
|
|
const results = await CohereAI(query, { |
|
system_prompt: system_prompt, |
|
chatHistory: chatHistory |
|
}); |
|
|
|
res.json({ results }); |
|
} catch (error) { |
|
res.status(401).json({ error: error.message }); |
|
} |
|
}); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
GptRoutes.get('/api/v1/ai/nvidia/llama-31-70b', authenticateApiKeyPremium, apiLimiter, async (req, res) => { |
|
try { |
|
const query = req.query.query; |
|
let system_prompt = "Your name is AkenoX AI A kind and friendly AI assistant that answers in\na short and concise answer. Give short step-by-step reasoning if required.\n"; |
|
system_prompt = req.query.system_prompt ? req.query.system_prompt : system_prompt; |
|
const results = await NvidiaTogether(query, { |
|
system_prompt: system_prompt |
|
}); |
|
res.json({ results }); |
|
} catch (error) { |
|
res.status(401).json({ error: error.message }); |
|
} |
|
}); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
GptRoutes.get('/api/v1/ai/openai/gpt-old', authenticateApiKey, apiLimiter, async (req, res) => { |
|
try { |
|
const query = req.query.query; |
|
const results = await OpenaiRes(query); |
|
res.json({ results }); |
|
} catch (error) { |
|
res.status(401).json({ error: error.message }); |
|
} |
|
}); |
|
|
|
export { GptRoutes }; |