Spaces:
Running
Running
Update Dockerfile
Browse files- Dockerfile +50 -0
Dockerfile
CHANGED
@@ -9,6 +9,56 @@ RUN pip install cloudscraper Flask && \
|
|
9 |
RUN <<EOF > /proxy.py
|
10 |
from flask import Flask, request, Response
|
11 |
import cloudscraper
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
12 |
EOF
|
13 |
|
14 |
#COPY proxy.py /proxy.py
|
|
|
9 |
RUN <<EOF > /proxy.py
|
10 |
from flask import Flask, request, Response
|
11 |
import cloudscraper
|
12 |
+
scraper = cloudscraper.create_scraper(interpreter='nodejs')
|
13 |
+
|
14 |
+
app = Flask(__name__)
|
15 |
+
|
16 |
+
@app.route('/proxy/<scheme>:/<path:url>', methods=['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS'])
|
17 |
+
def proxy(scheme, url):
|
18 |
+
req_method = request.method
|
19 |
+
req_headers = {key: value for key, value in request.headers if key != 'Host'}
|
20 |
+
req_data = request.get_data()
|
21 |
+
|
22 |
+
resp = scraper.request(method=req_method,
|
23 |
+
url=f'{scheme}://{url}',
|
24 |
+
headers=req_headers,
|
25 |
+
data=req_data,
|
26 |
+
stream=True)
|
27 |
+
|
28 |
+
excluded_headers = [] #['content-encoding', 'content-length', 'transfer-encoding', 'connection']
|
29 |
+
headers = [(name, value) for (name, value) in resp.raw.headers.items()
|
30 |
+
if name.lower() not in excluded_headers]
|
31 |
+
|
32 |
+
def generate():
|
33 |
+
for chunk in resp.iter_content(chunk_size=1):
|
34 |
+
yield chunk
|
35 |
+
|
36 |
+
return Response(generate(), status=resp.status_code, headers=headers)
|
37 |
+
|
38 |
+
@app.route('/', methods=['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS'])
|
39 |
+
@app.route('/<path:url>', methods=['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS'])
|
40 |
+
def cf(url=""):
|
41 |
+
req_method = request.method
|
42 |
+
req_headers = {key: value for key, value in request.headers if key != 'Host'}
|
43 |
+
req_data = request.get_data()
|
44 |
+
|
45 |
+
resp = scraper.request(method=req_method,
|
46 |
+
url=f'https://chat-shared3.zhile.io/{url}',
|
47 |
+
headers=req_headers,
|
48 |
+
data=req_data,
|
49 |
+
stream=True)
|
50 |
+
|
51 |
+
excluded_headers = [] #['content-encoding', 'content-length', 'transfer-encoding', 'connection']
|
52 |
+
headers = [(name, value) for (name, value) in resp.raw.headers.items()
|
53 |
+
if name.lower() not in excluded_headers]
|
54 |
+
|
55 |
+
def generate():
|
56 |
+
for chunk in resp.iter_content(chunk_size=1):
|
57 |
+
yield chunk
|
58 |
+
|
59 |
+
return Response(generate(), status=resp.status_code, headers=headers)
|
60 |
+
if __name__ == '__main__':
|
61 |
+
app.run(host='0.0.0.0', port=7860)
|
62 |
EOF
|
63 |
|
64 |
#COPY proxy.py /proxy.py
|