github-actions[bot] commited on
Commit
43f369c
·
1 Parent(s): 9cf7a5c

Update from GitHub Actions

Browse files
Files changed (4) hide show
  1. main.py +307 -4
  2. pyproject.toml +1 -1
  3. requirements.txt +6 -6
  4. uv.lock +1 -1
main.py CHANGED
@@ -2,13 +2,13 @@ import os
2
  import cloudscraper
3
  from fastapi import FastAPI, HTTPException, Request, Response
4
  from fastapi.middleware.cors import CORSMiddleware
5
- from fastapi.responses import StreamingResponse
6
  from typing import Optional
7
  import uvicorn
8
  import asyncio
9
 
10
  app = FastAPI(
11
- title="ScraperCookie",
12
  description="一个使用CloudScraper进行请求转发的代理,支持流式响应",
13
  version="0.1.0"
14
  )
@@ -29,9 +29,312 @@ async def stream_generator(response):
29
  yield chunk
30
  await asyncio.sleep(0.001) # 让出控制权,保持异步特性
31
 
32
- @app.get("/")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  async def root():
34
- return {"message": "欢迎使用ScraperProxy API,访问 /docs 查看API文档"}
35
 
36
  @app.api_route("/proxy", methods=["GET", "POST", "PUT", "DELETE", "OPTIONS", "HEAD", "PATCH"])
37
  async def proxy(request: Request):
 
2
  import cloudscraper
3
  from fastapi import FastAPI, HTTPException, Request, Response
4
  from fastapi.middleware.cors import CORSMiddleware
5
+ from fastapi.responses import HTMLResponse, StreamingResponse
6
  from typing import Optional
7
  import uvicorn
8
  import asyncio
9
 
10
  app = FastAPI(
11
+ title="ScraperProxy",
12
  description="一个使用CloudScraper进行请求转发的代理,支持流式响应",
13
  version="0.1.0"
14
  )
 
29
  yield chunk
30
  await asyncio.sleep(0.001) # 让出控制权,保持异步特性
31
 
32
+
33
+
34
+
35
+
36
+ # 读取 HTML 模板
37
+ def get_html_template():
38
+ # 这里可以从文件读取 HTML,或者直接返回上面的 HTML 字符串
39
+ # 为了简化示例,我们直接返回一个字符串变量
40
+ html_content = """<!DOCTYPE html>
41
+ <html lang="zh-CN">
42
+ <head>
43
+ <meta charset="UTF-8">
44
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
45
+ <title>ScraperProxy API - 网页请求代理服务</title>
46
+ <style>
47
+ :root {
48
+ --primary-color: #3498db;
49
+ --secondary-color: #2980b9;
50
+ --accent-color: #e74c3c;
51
+ --text-color: #333;
52
+ --light-bg: #f5f7fa;
53
+ --card-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
54
+ }
55
+
56
+ * {
57
+ margin: 0;
58
+ padding: 0;
59
+ box-sizing: border-box;
60
+ }
61
+
62
+ body {
63
+ font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
64
+ line-height: 1.6;
65
+ color: var(--text-color);
66
+ background: linear-gradient(135deg, #f5f7fa 0%, #c3cfe2 100%);
67
+ min-height: 100vh;
68
+ padding: 20px;
69
+ }
70
+
71
+ .container {
72
+ max-width: 1200px;
73
+ margin: 0 auto;
74
+ padding: 20px;
75
+ }
76
+
77
+ header {
78
+ text-align: center;
79
+ margin-bottom: 40px;
80
+ padding: 20px;
81
+ background-color: white;
82
+ border-radius: 10px;
83
+ box-shadow: var(--card-shadow);
84
+ }
85
+
86
+ h1 {
87
+ color: var(--primary-color);
88
+ margin-bottom: 10px;
89
+ font-size: 2.5rem;
90
+ }
91
+
92
+ .subtitle {
93
+ font-size: 1.2rem;
94
+ color: #666;
95
+ margin-bottom: 20px;
96
+ }
97
+
98
+ .features {
99
+ display: grid;
100
+ grid-template-columns: repeat(auto-fit, minmax(300px, 1fr));
101
+ gap: 20px;
102
+ margin-bottom: 40px;
103
+ }
104
+
105
+ .feature-card {
106
+ background-color: white;
107
+ padding: 25px;
108
+ border-radius: 10px;
109
+ box-shadow: var(--card-shadow);
110
+ transition: transform 0.3s ease;
111
+ }
112
+
113
+ .feature-card:hover {
114
+ transform: translateY(-5px);
115
+ }
116
+
117
+ .feature-card h3 {
118
+ color: var(--primary-color);
119
+ margin-bottom: 15px;
120
+ font-size: 1.4rem;
121
+ }
122
+
123
+ .code-section {
124
+ background-color: white;
125
+ padding: 30px;
126
+ border-radius: 10px;
127
+ box-shadow: var(--card-shadow);
128
+ margin-bottom: 40px;
129
+ }
130
+
131
+ .code-block {
132
+ background-color: #282c34;
133
+ color: #abb2bf;
134
+ padding: 20px;
135
+ border-radius: 6px;
136
+ overflow-x: auto;
137
+ font-family: 'Courier New', Courier, monospace;
138
+ margin: 15px 0;
139
+ white-space: pre-wrap;
140
+ }
141
+
142
+ .code-title {
143
+ margin-bottom: 15px;
144
+ color: var(--primary-color);
145
+ font-size: 1.3rem;
146
+ }
147
+
148
+ .button {
149
+ display: inline-block;
150
+ background-color: var(--primary-color);
151
+ color: white;
152
+ padding: 12px 24px;
153
+ border-radius: 6px;
154
+ text-decoration: none;
155
+ font-weight: bold;
156
+ transition: background-color 0.3s ease;
157
+ margin: 10px 5px;
158
+ }
159
+
160
+ .button:hover {
161
+ background-color: var(--secondary-color);
162
+ }
163
+
164
+ .button.accent {
165
+ background-color: var(--accent-color);
166
+ }
167
+
168
+ .button.accent:hover {
169
+ background-color: #c0392b;
170
+ }
171
+
172
+ footer {
173
+ text-align: center;
174
+ margin-top: 40px;
175
+ padding: 20px;
176
+ color: #666;
177
+ }
178
+
179
+ .try-it-section {
180
+ background-color: white;
181
+ padding: 30px;
182
+ border-radius: 10px;
183
+ box-shadow: var(--card-shadow);
184
+ margin-bottom: 40px;
185
+ }
186
+
187
+ .input-group {
188
+ margin-bottom: 20px;
189
+ }
190
+
191
+ .input-group label {
192
+ display: block;
193
+ margin-bottom: 8px;
194
+ font-weight: bold;
195
+ }
196
+
197
+ .input-group input[type="text"] {
198
+ width: 100%;
199
+ padding: 12px;
200
+ border: 1px solid #ddd;
201
+ border-radius: 6px;
202
+ font-size: 16px;
203
+ }
204
+
205
+ .checkbox-group {
206
+ margin: 15px 0;
207
+ }
208
+
209
+ #response-container {
210
+ background-color: #f5f5f5;
211
+ padding: 20px;
212
+ border-radius: 6px;
213
+ min-height: 100px;
214
+ margin-top: 20px;
215
+ white-space: pre-wrap;
216
+ display: none;
217
+ }
218
+ </style>
219
+ </head>
220
+ <body>
221
+ <div class="container">
222
+ <header>
223
+ <h1>ScraperProxy API</h1>
224
+ <p class="subtitle">强大的网页请求代理服务,轻松绕过访问限制</p>
225
+ <div>
226
+ <a href="/docs" class="button">API 文档</a>
227
+ <a href="#try-it" class="button accent">立即尝试</a>
228
+ </div>
229
+ </header>
230
+
231
+ <div class="features">
232
+ <div class="feature-card">
233
+ <h3>绕过访问限制</h3>
234
+ <p>使用 cloudscraper 技术,轻松绕过常见的网站防护机制,如 Cloudflare 的反爬虫保护。</p>
235
+ </div>
236
+ <div class="feature-card">
237
+ <h3>支持流式响应</h3>
238
+ <p>通过流式响应处理大型数据,保持连接稳定,实现更高效的数据传输。</p>
239
+ </div>
240
+ <div class="feature-card">
241
+ <h3>简单易用</h3>
242
+ <p>简洁的 API 设计,只需一个 URL 参数即可使用,支持多种请求方法和自定义选项。</p>
243
+ </div>
244
+ </div>
245
+
246
+ <div class="code-section">
247
+ <h2 class="code-title">快速开始</h2>
248
+ <p>使用我们的代理服务非常简单,只需发送请求到以下端点:</p>
249
+
250
+ <div class="code-block">
251
+ # 基本用法
252
+ GET /proxy?url=https://example.com
253
+
254
+ # 启用流式响应
255
+ GET /proxy?url=https://example.com&stream=true
256
+
257
+ # 自定义请求方法和头信息
258
+ POST /proxy
259
+ {
260
+ "url": "https://example.com",
261
+ "method": "POST",
262
+ "headers": {"Custom-Header": "Value"},
263
+ "data": {"key": "value"},
264
+ "stream": true
265
+ }
266
+ </div>
267
+ </div>
268
+
269
+ <div class="try-it-section" id="try-it">
270
+ <h2 class="code-title">立即尝试</h2>
271
+ <div class="input-group">
272
+ <label for="url-input">输入要请求的 URL:</label>
273
+ <input type="text" id="url-input" placeholder="https://example.com" value="https://example.com">
274
+ </div>
275
+
276
+ <div class="checkbox-group">
277
+ <input type="checkbox" id="stream-checkbox" checked>
278
+ <label for="stream-checkbox">启用流式响应</label>
279
+ </div>
280
+
281
+ <button id="send-request" class="button">发送请求</button>
282
+
283
+ <div id="response-container"></div>
284
+ </div>
285
+ </div>
286
+
287
+ <footer>
288
+ <p>&copy; 2025 ScraperProxy API. 所有权利保留。</p>
289
+ </footer>
290
+
291
+ <script>
292
+ document.getElementById('send-request').addEventListener('click', async function() {
293
+ const url = document.getElementById('url-input').value;
294
+ const streamEnabled = document.getElementById('stream-checkbox').checked;
295
+ const responseContainer = document.getElementById('response-container');
296
+
297
+ if (!url) {
298
+ alert('请输入有效的 URL');
299
+ return;
300
+ }
301
+
302
+ responseContainer.style.display = 'block';
303
+ responseContainer.textContent = '正在加载...';
304
+
305
+ try {
306
+ const proxyUrl = `/proxy?url=${encodeURIComponent(url)}&stream=${streamEnabled}`;
307
+
308
+ if (streamEnabled) {
309
+ responseContainer.textContent = '';
310
+ const response = await fetch(proxyUrl);
311
+ const reader = response.body.getReader();
312
+
313
+ while (true) {
314
+ const { done, value } = await reader.read();
315
+ if (done) break;
316
+ const text = new TextDecoder().decode(value);
317
+ responseContainer.textContent += text;
318
+ }
319
+ } else {
320
+ const response = await fetch(proxyUrl);
321
+ const data = await response.text();
322
+ responseContainer.textContent = data;
323
+ }
324
+ } catch (error) {
325
+ responseContainer.textContent = `错误: ${error.message}`;
326
+ }
327
+ });
328
+ </script>
329
+ </body>
330
+ </html>
331
+ """
332
+ return html_content
333
+
334
+
335
+ @app.get("/", response_class=HTMLResponse)
336
  async def root():
337
+ return get_html_template()
338
 
339
  @app.api_route("/proxy", methods=["GET", "POST", "PUT", "DELETE", "OPTIONS", "HEAD", "PATCH"])
340
  async def proxy(request: Request):
pyproject.toml CHANGED
@@ -1,5 +1,5 @@
1
  [project]
2
- name = "scrapercookie"
3
  version = "0.1.0"
4
  description = "Add your description here"
5
  readme = "README.md"
 
1
  [project]
2
+ name = "scraperproxy"
3
  version = "0.1.0"
4
  description = "Add your description here"
5
  readme = "README.md"
requirements.txt CHANGED
@@ -7,17 +7,17 @@ anyio==4.8.0
7
  certifi==2025.1.31
8
  # via requests
9
  chardet==5.2.0
10
- # via scrapercookie (pyproject.toml)
11
  charset-normalizer==3.4.1
12
  # via requests
13
  click==8.1.8
14
  # via uvicorn
15
  cloudscraper==1.2.71
16
- # via scrapercookie (pyproject.toml)
17
  colorama==0.4.6
18
  # via click
19
  fastapi==0.115.10
20
- # via scrapercookie (pyproject.toml)
21
  h11==0.14.0
22
  # via uvicorn
23
  idna==3.10
@@ -26,7 +26,7 @@ idna==3.10
26
  # requests
27
  pydantic==2.10.6
28
  # via
29
- # scrapercookie (pyproject.toml)
30
  # fastapi
31
  pydantic-core==2.27.2
32
  # via pydantic
@@ -44,11 +44,11 @@ starlette==0.46.0
44
  # via fastapi
45
  typing-extensions==4.12.2
46
  # via
47
- # scrapercookie (pyproject.toml)
48
  # fastapi
49
  # pydantic
50
  # pydantic-core
51
  urllib3==2.3.0
52
  # via requests
53
  uvicorn==0.34.0
54
- # via scrapercookie (pyproject.toml)
 
7
  certifi==2025.1.31
8
  # via requests
9
  chardet==5.2.0
10
+ # via scraperproxy (pyproject.toml)
11
  charset-normalizer==3.4.1
12
  # via requests
13
  click==8.1.8
14
  # via uvicorn
15
  cloudscraper==1.2.71
16
+ # via scraperproxy (pyproject.toml)
17
  colorama==0.4.6
18
  # via click
19
  fastapi==0.115.10
20
+ # via scraperproxy (pyproject.toml)
21
  h11==0.14.0
22
  # via uvicorn
23
  idna==3.10
 
26
  # requests
27
  pydantic==2.10.6
28
  # via
29
+ # scraperproxy (pyproject.toml)
30
  # fastapi
31
  pydantic-core==2.27.2
32
  # via pydantic
 
44
  # via fastapi
45
  typing-extensions==4.12.2
46
  # via
47
+ # scraperproxy (pyproject.toml)
48
  # fastapi
49
  # pydantic
50
  # pydantic-core
51
  urllib3==2.3.0
52
  # via requests
53
  uvicorn==0.34.0
54
+ # via scraperproxy (pyproject.toml)
uv.lock CHANGED
@@ -245,7 +245,7 @@ wheels = [
245
  ]
246
 
247
  [[package]]
248
- name = "scrapercookie"
249
  version = "0.1.0"
250
  source = { virtual = "." }
251
  dependencies = [
 
245
  ]
246
 
247
  [[package]]
248
+ name = "scraperproxy"
249
  version = "0.1.0"
250
  source = { virtual = "." }
251
  dependencies = [