const express = require('express');
const morgan = require('morgan');
const { createProxyMiddleware } = require('http-proxy-middleware');
const url = require('url');
const app = express();
app.use(morgan('dev'));
// Get proxy configuration from environment variables
const proxyUrl = process.env.PROXY || '';
console.log(`Proxy configuration: ${proxyUrl ? 'Configured' : 'Not configured'}`);
console.log(`Raw proxy URL: ${proxyUrl}`); // Print the raw proxy URL
// Parse proxy URL
let proxyConfig = null;
if (proxyUrl) {
try {
const parsedUrl = url.parse(proxyUrl);
proxyConfig = {
host: parsedUrl.hostname,
port: parsedUrl.port || 80,
auth: parsedUrl.auth ? {
username: parsedUrl.auth.split(':')[0],
password: parsedUrl.auth.split(':')[1]
} : undefined
};
// Print EXACT proxy configuration with actual username and password
console.log('Using proxy with EXACT credentials:', JSON.stringify(proxyConfig));
if (proxyConfig.auth) {
console.log('EXACT AUTH DETAILS:');
console.log('Username:', proxyConfig.auth.username);
console.log('Password:', proxyConfig.auth.password);
}
} catch (error) {
console.error('Failed to parse proxy URL:', error.message);
}
}
// Add models list API
app.get('/hf/v1/models', (req, res) => {
const models = {
"object": "list",
"data": [
{
"id": "claude-3.5-sonnet",
"object": "model",
"created": 1706745938,
"owned_by": "cursor"
},
{
"id": "gpt-4",
"object": "model",
"created": 1706745938,
"owned_by": "cursor"
},
{
"id": "gpt-4o",
"object": "model",
"created": 1706745938,
"owned_by": "cursor"
},
{
"id": "claude-3-opus",
"object": "model",
"created": 1706745938,
"owned_by": "cursor"
},
{
"id": "gpt-3.5-turbo",
"object": "model",
"created": 1706745938,
"owned_by": "cursor"
},
{
"id": "gpt-4-turbo-2024-04-09",
"object": "model",
"created": 1706745938,
"owned_by": "cursor"
},
{
"id": "gpt-4o-128k",
"object": "model",
"created": 1706745938,
"owned_by": "cursor"
},
{
"id": "gemini-1.5-flash-500k",
"object": "model",
"created": 1706745938,
"owned_by": "cursor"
},
{
"id": "claude-3-haiku-200k",
"object": "model",
"created": 1706745938,
"owned_by": "cursor"
},
{
"id": "claude-3-5-sonnet-200k",
"object": "model",
"created": 1706745938,
"owned_by": "cursor"
},
{
"id": "claude-3-5-sonnet-20241022",
"object": "model",
"created": 1706745938,
"owned_by": "cursor"
},
{
"id": "gpt-4o-mini",
"object": "model",
"created": 1706745938,
"owned_by": "cursor"
},
{
"id": "o1-mini",
"object": "model",
"created": 1706745938,
"owned_by": "cursor"
},
{
"id": "o1-preview",
"object": "model",
"created": 1706745938,
"owned_by": "cursor"
},
{
"id": "o1",
"object": "model",
"created": 1706745938,
"owned_by": "cursor"
},
{
"id": "claude-3.5-haiku",
"object": "model",
"created": 1706745938,
"owned_by": "cursor"
},
{
"id": "gemini-exp-1206",
"object": "model",
"created": 1706745938,
"owned_by": "cursor"
},
{
"id": "gemini-2.0-flash-thinking-exp",
"object": "model",
"created": 1706745938,
"owned_by": "cursor"
},
{
"id": "gemini-2.0-flash-exp",
"object": "model",
"created": 1706745938,
"owned_by": "cursor"
},
{
"id": "deepseek-v3",
"object": "model",
"created": 1706745938,
"owned_by": "cursor"
},
{
"id": "deepseek-r1",
"object": "model",
"created": 1706745938,
"owned_by": "cursor"
},
// New models
{
"id": "claude-3.7-sonnet",
"object": "model",
"created": 1706745938,
"owned_by": "cursor"
},
{
"id": "claude-3.7-sonnet-thinking",
"object": "model",
"created": 1706745938,
"owned_by": "cursor"
}
]
};
res.json(models);
});
// Configure proxy middleware
app.use('/hf/v1/chat/completions', createProxyMiddleware({
target: 'http://localhost:3010/v1/chat/completions',
changeOrigin: true,
// Add proxy configuration
proxy: proxyConfig,
// Add error handling
onError: (err, req, res) => {
console.error('Proxy error:', err);
res.status(500).send('Proxy error occurred: ' + err.message);
},
onProxyReq: (proxyReq, req, res) => {
console.log(`Proxying request to chat completions ${proxyConfig ? 'using proxy' : 'directly'}`);
},
onProxyRes: (proxyRes, req, res) => {
console.log(`Received response with status: ${proxyRes.statusCode}`);
}
}));
app.get('/', (req, res) => {
const htmlContent = `
Cursor To OpenAI
Authentication
Cursor Cookie (user_...)
Proxy Status
${proxyConfig ? 'Enabled' : 'Disabled'}
Available Models
Loading...
Server Information
Featured Models
Loading...
Quick Start Guide
1Authentication
Get your Cursor cookie that starts with "user_..." from browser cookies after logging in to Cursor.
2API Requests
Send POST requests to Loading... with your Cursor cookie as Bearer token.
3Request Format
Use OpenAI-compatible format with model, messages array, and optional parameters.
Available Models
Loading...
Integration Guide
1Authentication
To authenticate with the Cursor API:
- Log in to Cursor.so
- Open Developer Tools (F12)
- Go to Application โ Cookies
- Find cookie with name starting with "user_"
- Use this value as your API key
2API Configuration
Set up your API client with:
3Making Requests
Send chat completion requests:
POST /chat/completions
{
"model": "claude-3.7-sonnet",
"messages": [
{"role": "user", "content": "Hello!"}
],
"temperature": 0.7
}
Code Examples
const response = await fetch('${req.protocol}://${req.get('host')}/hf/v1/chat/completions', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': 'Bearer user_your_cookie_value'
},
body: JSON.stringify({
model: 'claude-3.7-sonnet',
messages: [
{ role: 'user', content: 'Hello, who are you?' }
],
temperature: 0.7
})
});
const data = await response.json();
console.log(data);
import requests
url = "${req.protocol}://${req.get('host')}/hf/v1/chat/completions"
headers = {
"Content-Type": "application/json",
"Authorization": "Bearer user_your_cookie_value"
}
payload = {
"model": "claude-3.7-sonnet",
"messages": [
{"role": "user", "content": "Hello, who are you?"}
],
"temperature": 0.7
}
response = requests.post(url, headers=headers, json=payload)
data = response.json()
print(data)
curl -X POST "${req.protocol}://${req.get('host')}/hf/v1/chat/completions" \\
-H "Content-Type: application/json" \\
-H "Authorization: Bearer user_your_cookie_value" \\
-d '{
"model": "claude-3.7-sonnet",
"messages": [
{"role": "user", "content": "Hello, who are you?"}
],
"temperature": 0.7
}'
API Tester
Response
Response will appear here...
`;
res.send(htmlContent);
});
const port = process.env.HF_PORT || 7860;
app.listen(port, () => {
console.log(`HF Proxy server is running at PORT: ${port}`);
console.log(`Proxy status: ${proxyConfig ? 'Enabled' : 'Disabled'}`);
});