Spaces:
Runtime error
Runtime error
Duplicate from fanren/openai-api-proxy
Browse filesCo-authored-by: fanren <[email protected]>
- .gitattributes +34 -0
- Dockerfile +32 -0
- README.md +11 -0
- app.js +143 -0
- fetchsse.js +64 -0
- package.json +10 -0
.gitattributes
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
*.7z filter=lfs diff=lfs merge=lfs -text
|
2 |
+
*.arrow filter=lfs diff=lfs merge=lfs -text
|
3 |
+
*.bin filter=lfs diff=lfs merge=lfs -text
|
4 |
+
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
5 |
+
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
6 |
+
*.ftz filter=lfs diff=lfs merge=lfs -text
|
7 |
+
*.gz filter=lfs diff=lfs merge=lfs -text
|
8 |
+
*.h5 filter=lfs diff=lfs merge=lfs -text
|
9 |
+
*.joblib filter=lfs diff=lfs merge=lfs -text
|
10 |
+
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
11 |
+
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
12 |
+
*.model filter=lfs diff=lfs merge=lfs -text
|
13 |
+
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
14 |
+
*.npy filter=lfs diff=lfs merge=lfs -text
|
15 |
+
*.npz filter=lfs diff=lfs merge=lfs -text
|
16 |
+
*.onnx filter=lfs diff=lfs merge=lfs -text
|
17 |
+
*.ot filter=lfs diff=lfs merge=lfs -text
|
18 |
+
*.parquet filter=lfs diff=lfs merge=lfs -text
|
19 |
+
*.pb filter=lfs diff=lfs merge=lfs -text
|
20 |
+
*.pickle filter=lfs diff=lfs merge=lfs -text
|
21 |
+
*.pkl filter=lfs diff=lfs merge=lfs -text
|
22 |
+
*.pt filter=lfs diff=lfs merge=lfs -text
|
23 |
+
*.pth filter=lfs diff=lfs merge=lfs -text
|
24 |
+
*.rar filter=lfs diff=lfs merge=lfs -text
|
25 |
+
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
26 |
+
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
27 |
+
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
28 |
+
*.tflite filter=lfs diff=lfs merge=lfs -text
|
29 |
+
*.tgz filter=lfs diff=lfs merge=lfs -text
|
30 |
+
*.wasm filter=lfs diff=lfs merge=lfs -text
|
31 |
+
*.xz filter=lfs diff=lfs merge=lfs -text
|
32 |
+
*.zip filter=lfs diff=lfs merge=lfs -text
|
33 |
+
*.zst filter=lfs diff=lfs merge=lfs -text
|
34 |
+
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
Dockerfile
ADDED
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# from official image
|
2 |
+
FROM ubuntu:latest
|
3 |
+
|
4 |
+
# key-value pairs
|
5 |
+
# allow more than one
|
6 |
+
#
|
7 |
+
LABEL version="1.0"
|
8 |
+
|
9 |
+
# install package
|
10 |
+
RUN apt-get update
|
11 |
+
RUN apt-get install -y curl sudo
|
12 |
+
RUN curl -sL https://deb.nodesource.com/setup_18.x | sudo -E bash -
|
13 |
+
RUN apt-get install -y nodejs
|
14 |
+
|
15 |
+
# set working directory to /app
|
16 |
+
WORKDIR /app
|
17 |
+
|
18 |
+
# copy index.js from current directory into the container at /app
|
19 |
+
COPY . /app
|
20 |
+
|
21 |
+
# install need packages specified in package.json
|
22 |
+
RUN npm install
|
23 |
+
|
24 |
+
# expose port 7860 for acessing the app
|
25 |
+
EXPOSE 7860
|
26 |
+
|
27 |
+
# This allows Heroku bind its PORT the Apps port
|
28 |
+
# since Heroku needs to use its own PORT before the App can be made accessible to the World
|
29 |
+
EXPOSE $PORT
|
30 |
+
|
31 |
+
# run app when container launches
|
32 |
+
CMD ["node", "app.js"]
|
README.md
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
title: chatgpt-openai-api-proxy
|
3 |
+
emoji: 🐢
|
4 |
+
colorFrom: green
|
5 |
+
colorTo: blue
|
6 |
+
sdk: docker
|
7 |
+
pinned: false
|
8 |
+
duplicated_from: fanren/openai-api-proxy
|
9 |
+
---
|
10 |
+
|
11 |
+
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
app.js
ADDED
@@ -0,0 +1,143 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
const express = require('express')
|
2 |
+
const path = require('path')
|
3 |
+
const fetch = require('cross-fetch')
|
4 |
+
const app = express()
|
5 |
+
var multer = require('multer');
|
6 |
+
var forms = multer({limits: { fieldSize: 10*1024*1024 }});
|
7 |
+
app.use(forms.array());
|
8 |
+
const cors = require('cors');
|
9 |
+
app.use(cors());
|
10 |
+
|
11 |
+
const bodyParser = require('body-parser')
|
12 |
+
app.use(bodyParser.json({limit : '50mb' }));
|
13 |
+
app.use(bodyParser.urlencoded({ extended: true }));
|
14 |
+
|
15 |
+
app.all(`*`, async (req, res) => {
|
16 |
+
const url = `https://api.openai.com${req.url}`;
|
17 |
+
// 从 header 中取得 Authorization': 'Bearer 后的 token
|
18 |
+
const token = req.headers.authorization?.split(' ')[1];
|
19 |
+
if( !token ) return res.status(403).send('Forbidden');
|
20 |
+
|
21 |
+
const openai_key = token.split(':')[0];
|
22 |
+
if( !openai_key ) return res.status(403).send('Forbidden');
|
23 |
+
|
24 |
+
const proxy_key = token.split(':')[1]||"";
|
25 |
+
if( process.env.PROXY_KEY && proxy_key !== process.env.PROXY_KEY )
|
26 |
+
return res.status(403).send('Forbidden');
|
27 |
+
|
28 |
+
//console.log( req );
|
29 |
+
|
30 |
+
|
31 |
+
const options = {
|
32 |
+
method: req.method,
|
33 |
+
timeout: process.env.TIMEOUT||30000,
|
34 |
+
headers: {
|
35 |
+
'Content-Type': 'application/json; charset=utf-8',
|
36 |
+
'Authorization': 'Bearer '+ openai_key,
|
37 |
+
},
|
38 |
+
onMessage: (data) => {
|
39 |
+
// console.log(data);
|
40 |
+
res.write("data: "+data+"\n\n" );
|
41 |
+
if( data === '[DONE]' )
|
42 |
+
{
|
43 |
+
res.end();
|
44 |
+
}
|
45 |
+
}
|
46 |
+
};
|
47 |
+
|
48 |
+
if( req.method.toLocaleLowerCase() === 'post' && req.body ) options.body = JSON.stringify(req.body);
|
49 |
+
// console.log({url, options});
|
50 |
+
|
51 |
+
try {
|
52 |
+
|
53 |
+
// 如果是 chat completion 和 text completion,使用 SSE
|
54 |
+
if( (req.url.startsWith('/v1/completions') || req.url.startsWith('/v1/chat/completions')) && req.body.stream ) {
|
55 |
+
const response = await myFetch(url, options);
|
56 |
+
if( response.ok )
|
57 |
+
{
|
58 |
+
// write header
|
59 |
+
res.writeHead(200, {
|
60 |
+
'Content-Type': 'text/event-stream',
|
61 |
+
'Cache-Control': 'no-cache',
|
62 |
+
'Connection': 'keep-alive',
|
63 |
+
});
|
64 |
+
const { createParser } = await import("eventsource-parser");
|
65 |
+
const parser = createParser((event) => {
|
66 |
+
if (event.type === "event") {
|
67 |
+
options.onMessage(event.data);
|
68 |
+
}
|
69 |
+
});
|
70 |
+
if (!response.body.getReader) {
|
71 |
+
const body = response.body;
|
72 |
+
if (!body.on || !body.read) {
|
73 |
+
throw new error('unsupported "fetch" implementation');
|
74 |
+
}
|
75 |
+
body.on("readable", () => {
|
76 |
+
let chunk;
|
77 |
+
while (null !== (chunk = body.read())) {
|
78 |
+
parser.feed(chunk.toString());
|
79 |
+
}
|
80 |
+
});
|
81 |
+
} else {
|
82 |
+
for await (const chunk of streamAsyncIterable(response.body)) {
|
83 |
+
const str = new TextDecoder().decode(chunk);
|
84 |
+
parser.feed(str);
|
85 |
+
}
|
86 |
+
}
|
87 |
+
}
|
88 |
+
|
89 |
+
}else
|
90 |
+
{
|
91 |
+
const response = await myFetch(url, options);
|
92 |
+
console.log(response);
|
93 |
+
const data = await response.json();
|
94 |
+
console.log( data );
|
95 |
+
res.json(data);
|
96 |
+
}
|
97 |
+
|
98 |
+
|
99 |
+
} catch (error) {
|
100 |
+
console.error(error);
|
101 |
+
res.status(500).json({"error":error.toString()});
|
102 |
+
}
|
103 |
+
})
|
104 |
+
|
105 |
+
async function* streamAsyncIterable(stream) {
|
106 |
+
const reader = stream.getReader();
|
107 |
+
try {
|
108 |
+
while (true) {
|
109 |
+
const { done, value } = await reader.read();
|
110 |
+
if (done) {
|
111 |
+
return;
|
112 |
+
}
|
113 |
+
yield value;
|
114 |
+
}
|
115 |
+
} finally {
|
116 |
+
reader.releaseLock();
|
117 |
+
}
|
118 |
+
}
|
119 |
+
|
120 |
+
async function myFetch(url, options) {
|
121 |
+
const {timeout, ...fetchOptions} = options;
|
122 |
+
const controller = new AbortController();
|
123 |
+
const timeoutId = setTimeout(() => controller.abort(), timeout||30000)
|
124 |
+
const res = await fetch(url, {...fetchOptions,signal:controller.signal});
|
125 |
+
clearTimeout(timeoutId);
|
126 |
+
return res;
|
127 |
+
}
|
128 |
+
|
129 |
+
|
130 |
+
|
131 |
+
|
132 |
+
|
133 |
+
|
134 |
+
// Error handler
|
135 |
+
app.use(function(err, req, res, next) {
|
136 |
+
console.error(err)
|
137 |
+
res.status(500).send('Internal Serverless Error')
|
138 |
+
})
|
139 |
+
|
140 |
+
const port = process.env.PORT||9000;
|
141 |
+
app.listen(port, () => {
|
142 |
+
console.log(`Server start on http://localhost:${port}`);
|
143 |
+
})
|
fetchsse.js
ADDED
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
async function* streamAsyncIterable(stream) {
|
2 |
+
const reader = stream.getReader();
|
3 |
+
try {
|
4 |
+
while (true) {
|
5 |
+
const { done, value } = await reader.read();
|
6 |
+
if (done) {
|
7 |
+
return;
|
8 |
+
}
|
9 |
+
yield value;
|
10 |
+
}
|
11 |
+
} finally {
|
12 |
+
reader.releaseLock();
|
13 |
+
}
|
14 |
+
}
|
15 |
+
|
16 |
+
// add timeout to fetchSSE
|
17 |
+
async function fetchSSE(url, options, fetch2 = fetch) {
|
18 |
+
const { createParser } = await import("eventsource-parser");
|
19 |
+
const { onMessage, timeout , ...fetchOptions } = options;
|
20 |
+
const controller = new AbortController();
|
21 |
+
const timeoutId = setTimeout(() => controller.abort(), timeout||30000)
|
22 |
+
|
23 |
+
const res = await fetch2(url, {...fetchOptions,signal:controller.signal});
|
24 |
+
clearTimeout(timeoutId);
|
25 |
+
|
26 |
+
if (!res.ok) {
|
27 |
+
let reason;
|
28 |
+
try {
|
29 |
+
reason = await res.text();
|
30 |
+
} catch (err) {
|
31 |
+
reason = res.statusText;
|
32 |
+
}
|
33 |
+
const msg = `ChatGPT error ${res.status}: ${reason}`;
|
34 |
+
const error = new ChatGPTError(msg, { cause: res });
|
35 |
+
error.statusCode = res.status;
|
36 |
+
error.statusText = res.statusText;
|
37 |
+
error.context = { url, options };
|
38 |
+
throw error;
|
39 |
+
}
|
40 |
+
const parser = createParser((event) => {
|
41 |
+
if (event.type === "event") {
|
42 |
+
onMessage(event.data);
|
43 |
+
}
|
44 |
+
});
|
45 |
+
if (!res.body.getReader) {
|
46 |
+
const body = res.body;
|
47 |
+
if (!body.on || !body.read) {
|
48 |
+
throw new ChatGPTError('unsupported "fetch" implementation');
|
49 |
+
}
|
50 |
+
body.on("readable", () => {
|
51 |
+
let chunk;
|
52 |
+
while (null !== (chunk = body.read())) {
|
53 |
+
parser.feed(chunk.toString());
|
54 |
+
}
|
55 |
+
});
|
56 |
+
} else {
|
57 |
+
for await (const chunk of streamAsyncIterable(res.body)) {
|
58 |
+
const str = new TextDecoder().decode(chunk);
|
59 |
+
parser.feed(str);
|
60 |
+
}
|
61 |
+
}
|
62 |
+
}
|
63 |
+
|
64 |
+
module.exports = fetchSSE;
|
package.json
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dependencies": {
|
3 |
+
"body-parser": "^1.20.2",
|
4 |
+
"cors": "^2.8.5",
|
5 |
+
"cross-fetch": "^3.1.5",
|
6 |
+
"eventsource-parser": "^0.1.0",
|
7 |
+
"express": "^4.18.2",
|
8 |
+
"multer": "^1.4.5-lts.1"
|
9 |
+
}
|
10 |
+
}
|