AZILS's picture
Update hf.js
e090494 verified
raw
history blame
11.4 kB
// Import required modules
const express = require('express');
const morgan = require('morgan');
const { createProxyMiddleware } = require('http-proxy-middleware');
const axios = require('axios');
const url = require('url');
const app = express();
// Middleware for logging requests
app.use(morgan('dev'));
// Middleware to parse JSON bodies
app.use(express.json());
// Global proxy pool
let proxyPool = process.env.PROXY ? process.env.PROXY.split(',').map(p => p.trim()) : [];
console.log('Initial proxy pool:', proxyPool);
// Function to get a random proxy
function getRandomProxy() {
if (proxyPool.length === 0) return null;
const randomIndex = Math.floor(Math.random() * proxyPool.length);
const proxyUrl = proxyPool[randomIndex];
const parsedUrl = url.parse(proxyUrl);
return {
host: parsedUrl.hostname,
port: parsedUrl.port || 80,
auth: parsedUrl.auth ? {
username: parsedUrl.auth.split(':')[0],
password: parsedUrl.auth.split(':')[1]
} : undefined
};
}
// Configure axios proxy
function configureAxiosProxy() {
const proxy = getRandomProxy();
if (proxy) {
axios.defaults.proxy = proxy;
console.log(`Axios using proxy: ${proxy.host}:${proxy.port}`);
} else {
delete axios.defaults.proxy;
console.log('No proxy available for axios');
}
}
// Update proxy pool from external API
async function updateProxyPool() {
const proxyApiUrl = process.env.PROXY_API_URL || 'http://example.com/api/proxies';
try {
const response = await axios.get(proxyApiUrl);
const newProxies = response.data.proxies || [];
if (newProxies.length > 0) {
proxyPool = newProxies;
console.log('Proxy pool updated:', proxyPool);
configureAxiosProxy();
} else {
console.warn('No proxies received from API');
}
} catch (error) {
console.error('Failed to update proxy pool:', error.message);
}
}
// Periodically update proxy pool
const updateInterval = parseInt(process.env.PROXY_UPDATE_INTERVAL) || 300; // Default 5 minutes
if (updateInterval > 0) {
setInterval(updateProxyPool, updateInterval * 1000);
console.log(`Proxy pool will update every ${updateInterval} seconds`);
updateProxyPool(); // Initial update
}
// Models API
app.get('/hf/v1/models', (req, res) => {
const models = {
"object": "list",
"data": [
{ "id": "claude-3.5-sonnet", "object": "model", "created": 1706745938, "owned_by": "cursor" },
{ "id": "gpt-4", "object": "model", "created": 1706745938, "owned_by": "cursor" },
{ "id": "gpt-4o", "object": "model", "created": 1706745938, "owned_by": "cursor" },
{ "id": "claude-3-opus", "object": "model", "created": 1706745938, "owned_by": "cursor" },
{ "id": "gpt-3.5-turbo", "object": "model", "created": 1706745938, "owned_by": "cursor" },
{ "id": "gpt-4-turbo-2024-04-09", "object": "model", "created": 1706745938, "owned_by": "cursor" },
{ "id": "gpt-4o-128k", "object": "model", "created": 1706745938, "owned_by": "cursor" },
{ "id": "gemini-1.5-flash-500k", "object": "model", "created": 1706745938, "owned_by": "cursor" },
{ "id": "claude-3-haiku-200k", "object": "model", "created": 1706745938, "owned_by": "cursor" },
{ "id": "claude-3-5-sonnet-200k", "object": "model", "created": 1706745938, "owned_by": "cursor" },
{ "id": "claude-3-5-sonnet-20241022", "object": "model", "created": 1706745938, " owned_by": "cursor" },
{ "id": "gpt-4o-mini", "object": "model", "created": 1706745938, "owned_by": "cursor" },
{ "id": "o1-mini", "object": "model", "created": 1706745938, "owned_by": "cursor" },
{ "id": "o1-preview", "object": "model", "created": 1706745938, "owned_by": "cursor" },
{ "id": "o1", "object": "model", "created": 1706745938, "owned_by": "cursor" },
{ "id": "claude-3.5-haiku", "object": "model", "created": 1706745938, "owned_by": "cursor" },
{ "id": "gemini-exp-1206", "object": "model", "created": 1706745938, "owned_by": "cursor" },
{ "id": "gemini-2.0-flash-thinking-exp", "object": "model", "created": 1706745938, "owned_by": "cursor" },
{ "id": "gemini-2.0-flash-exp", "object": "model", "created": 1706745938, "owned_by": "cursor" },
{ "id": "deepseek-v3", "object": "model", "created": 1706745938, "owned_by": "cursor" },
{ "id": "deepseek-r1", "object": "model", "created": 1706745938, "owned_by": "cursor" }
]
};
res.json(models);
});
// Proxy for chat completions
app.use('/hf/v1/chat/completions', (req, res, next) => {
const proxy = getRandomProxy();
const middleware = createProxyMiddleware({
target: 'http://localhost:3010/v1/chat/completions', // Replace with actual target service
changeOrigin: true,
timeout: 30000,
proxyTimeout: 30000,
onProxyReq: (proxyReq, req, res) => {
if (req.body) {
const bodyData = JSON.stringify(req.body);
proxyReq.setHeader('Content-Type', 'application/json');
proxyReq.setHeader('Content-Length', Buffer.byteLength(bodyData));
proxyReq.write(bodyData);
proxyReq.end();
}
},
onError: (err, req, res) => {
console.error('Proxy error:', err);
res.status(500).send('Proxy error occurred');
}
});
if (proxy) {
console.log(`Request proxied via ${proxy.host}:${proxy.port}`);
} else {
console.log('No proxy available, direct connection');
}
middleware(req, res, next);
});
// Home page with interactive frontend
app.get('/', (req, res) => {
const htmlContent = `
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Cursor To OpenAI</title>
<style>
:root {
--primary-color: #1e90ff;
--bg-color: #121212;
--card-bg: #1e1e1e;
--text-color: #ffffff;
--input-bg: #2a2a2a;
--input-border: #444;
}
body {
padding: 20px;
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
max-width: 800px;
margin: 0 auto;
line-height: 1.6;
background: var(--bg-color);
color: var(--text-color);
}
.container {
padding: 20px;
}
.header {
text-align: center;
margin-bottom: 40px;
}
.header h1 {
color: var(--primary-color);
font-size: 2.5em;
margin-bottom: 10px;
}
.info, .models, .chat {
background: var(--card-bg);
padding: 25px;
border-radius: 12px;
box-shadow: 0 4px 6px -1px rgba(0, 0, 0, 0.3);
margin-bottom: 30px;
border: 1px solid #444;
}
.info-item, .model-item {
margin: 15px 0;
padding: 10px;
background: #2a2a2a;
border-radius: 8px;
border: 1px solid #444;
}
.info-label, .model-name {
color: #bbb;
font-size: 0.9em;
margin-bottom: 5px;
}
.info-value, .model-provider {
color: var(--primary-color);
font-weight: 500;
}
.chat-input {
width: 100%;
padding: 10px;
border: 1px solid var(--input-border);
border-radius: 5px;
background: var(--input-bg);
color: var(--text-color);
margin-bottom: 10px;
}
.chat-button {
padding: 10px 15px;
background: var(--primary-color);
color: var(--text-color);
border: none;
border-radius: 5px;
cursor: pointer;
transition: background 0.3s;
}
.chat-button:hover {
background: #1c7bbf;
}
.chat-output {
margin-top: 20px;
padding: 10px;
background: #2a2a2a;
border-radius: 8px;
border: 1px solid #444;
max-height: 300px;
overflow-y: auto;
}
</style>
</head>
<body>
<div class="container">
<div class="header">
<h1>Cursor To OpenAI Server</h1>
<p>高性能 AI 模型代理服务</p>
</div>
<div class="info">
<h2>配置信息</h2>
<div class="info-item">
<div class="info-label">聊天来源</div>
<div class="info-value">自定义(兼容 OpenAI)</div>
</div>
<div class="info-item">
<div class="info-label">自定义端点(基本URL)</div>
<div class="info-value" id="endpoint-url"></div>
</div>
<div class="info-item">
<div class="info-label">自定义API密钥</div>
<div class="info-value">抓取的Cursor Cookie,格式为user_...</div>
</div>
</div>
<div class="chat">
<h3>与 AI 互动</h3>
<input type="text" id="user-input" class="chat-input" placeholder="输入你的问题..." />
<button id="send-button" class="chat-button">发送</button>
<div id="chat-output" class="chat-output"></div>
</div>
<div class="models">
<h3>支持的模型列表</h3>
<div id="model-list"></div>
</div>
</div>
<script>
const url = new URL(window.location.href);
const link = url.protocol + '//' + url.host + '/hf/v1';
document.getElementById('endpoint-url').textContent = link;
fetch(link + '/models')
.then(response => response.json())
.then(data => {
const modelList = document.getElementById('model-list');
data.data.forEach(model => {
const div = document.createElement('div');
div.className = 'model-item';
div.innerHTML = \`
<span class="model-name">\${model.id}</span>
<span class="model-provider">\${model.owned_by}</span>
\`;
modelList.appendChild(div);
});
})
.catch(error => {
console.error('Error fetching models:', error);
document.getElementById('model-list').textContent = '获取模型列表失败';
});
document.getElementById('send-button').addEventListener('click', async () => {
const userInput = document.getElementById('user-input').value;
if (!userInput) return;
const chatOutput = document.getElementById('chat-output');
chatOutput.innerHTML += \`<div><strong>你:</strong> \${userInput}</div>\`;
document.getElementById('user-input').value = '';
try {
const response = await fetch(link + '/chat/completions', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({ prompt: userInput, model: "gpt-4" }) // Specify the model you want to use
});
const data = await response.json();
chatOutput.innerHTML += \`<div><strong>AI:</strong> \${data.choices[0].text}</div>\`;
chatOutput.scrollTop = chatOutput.scrollHeight; // Scroll to the bottom
} catch (error) {
console.error('Error fetching chat completion:', error);
chatOutput.innerHTML += '<div><strong>AI:</strong> 出现错误,请稍后再试。</div>';
}
});
</script>
</body>
</html>
`;
res.send(htmlContent);
});
// Start the server and configure initial axios proxy
configureAxiosProxy();
const port = process.env.HF_PORT || 7860;
app.listen(port, () => {
console.log(`HF Proxy server is running at PORT: ${port}`);
});