|
import express from 'express'; |
|
import { OpenaiRes } from '../lib/scrapper.js'; |
|
import { NvidiaTogether } from '../lib/@randydev/together/llama.js'; |
|
import { CohereAI } from '../lib/@randydev/together/cohere.js'; |
|
import { authenticateApiKey, apiLimiter } from '../middleware/midware.js'; |
|
const GptRoutes = express.Router(); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
GptRoutes.get('/api/v1/ai/cohere/command-plus', authenticateApiKey, apiLimiter, async (req, res) => { |
|
try { |
|
const query = req.query.query; |
|
let system_prompt = "Your name is AkenoX AI A kind and friendly AI assistant that answers in\na short and concise answer. Give short step-by-step reasoning if required.\n"; |
|
system_prompt = req.query.system_prompt ? req.query.system_prompt : system_prompt; |
|
const results = await CohereAI(query, { |
|
system_prompt: system_prompt |
|
}); |
|
res.json({ results }); |
|
} catch (error) { |
|
res.status(401).json({ error: error.message }); |
|
} |
|
}); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
GptRoutes.get('/api/v1/ai/nvidia/llama-31-70b', authenticateApiKey, apiLimiter, async (req, res) => { |
|
try { |
|
const query = req.query.query; |
|
let system_prompt = "Your name is AkenoX AI A kind and friendly AI assistant that answers in\na short and concise answer. Give short step-by-step reasoning if required.\n"; |
|
system_prompt = req.query.system_prompt ? req.query.system_prompt : system_prompt; |
|
const results = await NvidiaTogether(query, { |
|
system_prompt: system_prompt |
|
}); |
|
res.json({ results }); |
|
} catch (error) { |
|
res.status(401).json({ error: error.message }); |
|
} |
|
}); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
GptRoutes.get('/api/v1/ai/gpt-old', authenticateApiKey, apiLimiter, async (req, res) => { |
|
try { |
|
const query = req.query.query; |
|
const results = await OpenaiRes(query); |
|
res.json({ results }); |
|
} catch (error) { |
|
res.status(401).json({ error: error.message }); |
|
} |
|
}); |
|
|
|
export { GptRoutes }; |