Spaces:
Sleeping
Sleeping
github-actions[bot]
commited on
Commit
·
9c1cc33
1
Parent(s):
2e466ee
Update from GitHub Actions
Browse files- index.html +307 -0
- main.py +50 -390
- pyproject.toml +2 -0
- uv.lock +65 -0
index.html
ADDED
@@ -0,0 +1,307 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<!DOCTYPE html>
|
2 |
+
<html lang="zh-CN">
|
3 |
+
<head>
|
4 |
+
<meta charset="UTF-8">
|
5 |
+
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
6 |
+
<title>ScraperProxy API</title>
|
7 |
+
<style>
|
8 |
+
:root {
|
9 |
+
--primary-color: #3498db;
|
10 |
+
--secondary-color: #2980b9;
|
11 |
+
--accent-color: #e74c3c;
|
12 |
+
--text-color: #333;
|
13 |
+
--light-bg: #f5f7fa;
|
14 |
+
--card-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
|
15 |
+
}
|
16 |
+
|
17 |
+
* {
|
18 |
+
margin: 0;
|
19 |
+
padding: 0;
|
20 |
+
box-sizing: border-box;
|
21 |
+
}
|
22 |
+
|
23 |
+
body {
|
24 |
+
font-family: Arial, sans-serif;
|
25 |
+
line-height: 1.6;
|
26 |
+
color: var(--text-color);
|
27 |
+
background: linear-gradient(135deg, #f5f7fa 0%, #c3cfe2 100%);
|
28 |
+
min-height: 100vh;
|
29 |
+
}
|
30 |
+
|
31 |
+
.container {
|
32 |
+
max-width: 800px;
|
33 |
+
margin: 0 auto;
|
34 |
+
padding: 20px;
|
35 |
+
}
|
36 |
+
|
37 |
+
header {
|
38 |
+
text-align: center;
|
39 |
+
margin-bottom: 40px;
|
40 |
+
padding: 20px;
|
41 |
+
background-color: white;
|
42 |
+
border-radius: 10px;
|
43 |
+
box-shadow: var(--card-shadow);
|
44 |
+
}
|
45 |
+
|
46 |
+
h1 {
|
47 |
+
color: var(--primary-color);
|
48 |
+
margin-bottom: 10px;
|
49 |
+
font-size: 2.5rem;
|
50 |
+
}
|
51 |
+
|
52 |
+
h2 {
|
53 |
+
color: var(--primary-color);
|
54 |
+
margin: 25px 0 15px;
|
55 |
+
font-size: 1.8rem;
|
56 |
+
}
|
57 |
+
|
58 |
+
.subtitle {
|
59 |
+
font-size: 1.2rem;
|
60 |
+
color: #666;
|
61 |
+
margin-bottom: 20px;
|
62 |
+
}
|
63 |
+
|
64 |
+
pre, .code-block {
|
65 |
+
background-color: #282c34;
|
66 |
+
color: #abb2bf;
|
67 |
+
padding: 20px;
|
68 |
+
border-radius: 6px;
|
69 |
+
overflow-x: auto;
|
70 |
+
font-family: 'Courier New', Courier, monospace;
|
71 |
+
margin: 15px 0;
|
72 |
+
white-space: pre-wrap;
|
73 |
+
}
|
74 |
+
|
75 |
+
.features {
|
76 |
+
display: grid;
|
77 |
+
grid-template-columns: repeat(auto-fit, minmax(250px, 1fr));
|
78 |
+
gap: 20px;
|
79 |
+
margin-bottom: 40px;
|
80 |
+
}
|
81 |
+
|
82 |
+
.feature-card {
|
83 |
+
background-color: white;
|
84 |
+
padding: 25px;
|
85 |
+
border-radius: 10px;
|
86 |
+
box-shadow: var(--card-shadow);
|
87 |
+
transition: transform 0.3s ease;
|
88 |
+
}
|
89 |
+
|
90 |
+
.feature-card:hover {
|
91 |
+
transform: translateY(-5px);
|
92 |
+
}
|
93 |
+
|
94 |
+
.feature-card h3 {
|
95 |
+
color: var(--primary-color);
|
96 |
+
margin-bottom: 15px;
|
97 |
+
font-size: 1.4rem;
|
98 |
+
}
|
99 |
+
|
100 |
+
.section {
|
101 |
+
background-color: white;
|
102 |
+
padding: 30px;
|
103 |
+
border-radius: 10px;
|
104 |
+
box-shadow: var(--card-shadow);
|
105 |
+
margin-bottom: 30px;
|
106 |
+
}
|
107 |
+
|
108 |
+
.button {
|
109 |
+
display: inline-block;
|
110 |
+
background-color: var(--primary-color);
|
111 |
+
color: white;
|
112 |
+
padding: 12px 24px;
|
113 |
+
border-radius: 6px;
|
114 |
+
text-decoration: none;
|
115 |
+
font-weight: bold;
|
116 |
+
transition: background-color 0.3s ease;
|
117 |
+
margin: 10px 5px;
|
118 |
+
border: none;
|
119 |
+
cursor: pointer;
|
120 |
+
}
|
121 |
+
|
122 |
+
.button:hover {
|
123 |
+
background-color: var(--secondary-color);
|
124 |
+
}
|
125 |
+
|
126 |
+
.button.accent {
|
127 |
+
background-color: var(--accent-color);
|
128 |
+
}
|
129 |
+
|
130 |
+
.button.accent:hover {
|
131 |
+
background-color: #c0392b;
|
132 |
+
}
|
133 |
+
|
134 |
+
.input-group {
|
135 |
+
margin-bottom: 20px;
|
136 |
+
}
|
137 |
+
|
138 |
+
.input-group label {
|
139 |
+
display: block;
|
140 |
+
margin-bottom: 8px;
|
141 |
+
font-weight: bold;
|
142 |
+
}
|
143 |
+
|
144 |
+
.input-group input[type="text"] {
|
145 |
+
width: 100%;
|
146 |
+
padding: 12px;
|
147 |
+
border: 1px solid #ddd;
|
148 |
+
border-radius: 6px;
|
149 |
+
font-size: 16px;
|
150 |
+
}
|
151 |
+
|
152 |
+
.checkbox-group {
|
153 |
+
margin: 15px 0;
|
154 |
+
}
|
155 |
+
|
156 |
+
#response-container {
|
157 |
+
background-color: #f5f5f5;
|
158 |
+
padding: 20px;
|
159 |
+
border-radius: 6px;
|
160 |
+
min-height: 100px;
|
161 |
+
margin-top: 20px;
|
162 |
+
white-space: pre-wrap;
|
163 |
+
display: none;
|
164 |
+
}
|
165 |
+
|
166 |
+
ul {
|
167 |
+
list-style-position: inside;
|
168 |
+
margin: 10px 0;
|
169 |
+
}
|
170 |
+
|
171 |
+
ul li {
|
172 |
+
margin-bottom: 5px;
|
173 |
+
}
|
174 |
+
|
175 |
+
footer {
|
176 |
+
text-align: center;
|
177 |
+
margin-top: 40px;
|
178 |
+
padding: 20px;
|
179 |
+
color: #666;
|
180 |
+
}
|
181 |
+
</style>
|
182 |
+
</head>
|
183 |
+
<body>
|
184 |
+
<div class="container">
|
185 |
+
<header>
|
186 |
+
<h1>ScraperProxy API</h1>
|
187 |
+
<p class="subtitle">强大的网页请求代理服务,轻松绕过访问限制</p>
|
188 |
+
<div>
|
189 |
+
<a href="/docs" class="button">API 文档</a>
|
190 |
+
<a href="#try-it" class="button accent">立即尝试</a>
|
191 |
+
</div>
|
192 |
+
</header>
|
193 |
+
|
194 |
+
<div class="section">
|
195 |
+
<h2>支持的请求库</h2>
|
196 |
+
<p>这是一个支持两种强大请求库的代理服务:</p>
|
197 |
+
<ul>
|
198 |
+
<li><strong>CloudScraper</strong> - 专门用于绕过 Cloudflare 保护</li>
|
199 |
+
<li><strong>curl-cffi</strong> - 高性能的 cURL 实现</li>
|
200 |
+
</ul>
|
201 |
+
</div>
|
202 |
+
|
203 |
+
<div class="features">
|
204 |
+
<div class="feature-card">
|
205 |
+
<h3>绕过访问限制</h3>
|
206 |
+
<p>使用专业技术,轻松绕过常见的网站防护机制,如 Cloudflare 的反爬虫保护。</p>
|
207 |
+
</div>
|
208 |
+
<div class="feature-card">
|
209 |
+
<h3>支持流式响应</h3>
|
210 |
+
<p>通过流式响应处理大型数据,保持连接稳定,实现更高效的数据传输。</p>
|
211 |
+
</div>
|
212 |
+
<div class="feature-card">
|
213 |
+
<h3>简单易用</h3>
|
214 |
+
<p>简洁的 API 设计,只需一个 URL 参数即可使用,支持多种请求方法和自定义选项。</p>
|
215 |
+
</div>
|
216 |
+
</div>
|
217 |
+
|
218 |
+
<div class="section">
|
219 |
+
<h2>使用方法</h2>
|
220 |
+
<p>基本请求格式:</p>
|
221 |
+
<pre>GET /proxy?url=https://example.com
|
222 |
+
POST /proxy?url=https://example.com</pre>
|
223 |
+
|
224 |
+
<h3>环境变量配置</h3>
|
225 |
+
<ul>
|
226 |
+
<li><strong>REQUEST_LIB</strong>: 选择请求库 (cloudscraper 或 curl_cffi)</li>
|
227 |
+
<li><strong>PROXY</strong>: 设置代理服务器</li>
|
228 |
+
<li><strong>TOKEN</strong>: 设置访问令牌</li>
|
229 |
+
</ul>
|
230 |
+
|
231 |
+
<h3>高级用法示例</h3>
|
232 |
+
<div class="code-block"># 启用流式响应
|
233 |
+
GET /proxy?url=https://example.com&stream=true
|
234 |
+
|
235 |
+
# 自定义请求方法和头信息
|
236 |
+
POST /proxy
|
237 |
+
{
|
238 |
+
"url": "https://example.com",
|
239 |
+
"method": "POST",
|
240 |
+
"headers": {"Custom-Header": "Value"},
|
241 |
+
"data": {"key": "value"},
|
242 |
+
"stream": true
|
243 |
+
}</div>
|
244 |
+
</div>
|
245 |
+
|
246 |
+
<div class="section" id="try-it">
|
247 |
+
<h2>立即尝试</h2>
|
248 |
+
<div class="input-group">
|
249 |
+
<label for="url-input">输入要请求的 URL:</label>
|
250 |
+
<input type="text" id="url-input" placeholder="https://example.com" value="https://example.com">
|
251 |
+
</div>
|
252 |
+
|
253 |
+
<div class="checkbox-group">
|
254 |
+
<input type="checkbox" id="stream-checkbox" checked>
|
255 |
+
<label for="stream-checkbox">启用流式响应</label>
|
256 |
+
</div>
|
257 |
+
|
258 |
+
<button id="send-request" class="button">发送请求</button>
|
259 |
+
|
260 |
+
<div id="response-container"></div>
|
261 |
+
</div>
|
262 |
+
</div>
|
263 |
+
|
264 |
+
<footer>
|
265 |
+
<p>© 2025 ScraperProxy API. 所有权利保留。</p>
|
266 |
+
</footer>
|
267 |
+
|
268 |
+
<script>
|
269 |
+
document.getElementById('send-request').addEventListener('click', async function() {
|
270 |
+
const url = document.getElementById('url-input').value;
|
271 |
+
const streamEnabled = document.getElementById('stream-checkbox').checked;
|
272 |
+
const responseContainer = document.getElementById('response-container');
|
273 |
+
|
274 |
+
if (!url) {
|
275 |
+
alert('请输入有效的 URL');
|
276 |
+
return;
|
277 |
+
}
|
278 |
+
|
279 |
+
responseContainer.style.display = 'block';
|
280 |
+
responseContainer.textContent = '正在加载...';
|
281 |
+
|
282 |
+
try {
|
283 |
+
const proxyUrl = `/proxy?url=${encodeURIComponent(url)}&stream=${streamEnabled}`;
|
284 |
+
|
285 |
+
if (streamEnabled) {
|
286 |
+
responseContainer.textContent = '';
|
287 |
+
const response = await fetch(proxyUrl);
|
288 |
+
const reader = response.body.getReader();
|
289 |
+
|
290 |
+
while (true) {
|
291 |
+
const { done, value } = await reader.read();
|
292 |
+
if (done) break;
|
293 |
+
const text = new TextDecoder().decode(value);
|
294 |
+
responseContainer.textContent += text;
|
295 |
+
}
|
296 |
+
} else {
|
297 |
+
const response = await fetch(proxyUrl);
|
298 |
+
const data = await response.text();
|
299 |
+
responseContainer.textContent = data;
|
300 |
+
}
|
301 |
+
} catch (error) {
|
302 |
+
responseContainer.textContent = `错误: ${error.message}`;
|
303 |
+
}
|
304 |
+
});
|
305 |
+
</script>
|
306 |
+
</body>
|
307 |
+
</html>
|
main.py
CHANGED
@@ -8,6 +8,11 @@ import uvicorn
|
|
8 |
import asyncio
|
9 |
from urllib.parse import urlparse
|
10 |
import time
|
|
|
|
|
|
|
|
|
|
|
11 |
|
12 |
app = FastAPI(
|
13 |
title="ScraperProxy",
|
@@ -31,312 +36,46 @@ async def stream_generator(response):
|
|
31 |
yield chunk
|
32 |
await asyncio.sleep(0.001) # 让出控制权,保持异步特性
|
33 |
|
|
|
|
|
34 |
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
def get_html_template():
|
40 |
-
# 这里可以从文件读取 HTML,或者直接返回上面的 HTML 字符串
|
41 |
-
# 为了简化示例,我们直接返回一个字符串变量
|
42 |
-
html_content = """<!DOCTYPE html>
|
43 |
-
<html lang="zh-CN">
|
44 |
-
<head>
|
45 |
-
<meta charset="UTF-8">
|
46 |
-
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
47 |
-
<title>ScraperProxy API - 网页请求代理服务</title>
|
48 |
-
<style>
|
49 |
-
:root {
|
50 |
-
--primary-color: #3498db;
|
51 |
-
--secondary-color: #2980b9;
|
52 |
-
--accent-color: #e74c3c;
|
53 |
-
--text-color: #333;
|
54 |
-
--light-bg: #f5f7fa;
|
55 |
-
--card-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
|
56 |
-
}
|
57 |
-
|
58 |
-
* {
|
59 |
-
margin: 0;
|
60 |
-
padding: 0;
|
61 |
-
box-sizing: border-box;
|
62 |
-
}
|
63 |
-
|
64 |
-
body {
|
65 |
-
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
|
66 |
-
line-height: 1.6;
|
67 |
-
color: var(--text-color);
|
68 |
-
background: linear-gradient(135deg, #f5f7fa 0%, #c3cfe2 100%);
|
69 |
-
min-height: 100vh;
|
70 |
-
padding: 20px;
|
71 |
-
}
|
72 |
-
|
73 |
-
.container {
|
74 |
-
max-width: 1200px;
|
75 |
-
margin: 0 auto;
|
76 |
-
padding: 20px;
|
77 |
-
}
|
78 |
-
|
79 |
-
header {
|
80 |
-
text-align: center;
|
81 |
-
margin-bottom: 40px;
|
82 |
-
padding: 20px;
|
83 |
-
background-color: white;
|
84 |
-
border-radius: 10px;
|
85 |
-
box-shadow: var(--card-shadow);
|
86 |
-
}
|
87 |
-
|
88 |
-
h1 {
|
89 |
-
color: var(--primary-color);
|
90 |
-
margin-bottom: 10px;
|
91 |
-
font-size: 2.5rem;
|
92 |
-
}
|
93 |
-
|
94 |
-
.subtitle {
|
95 |
-
font-size: 1.2rem;
|
96 |
-
color: #666;
|
97 |
-
margin-bottom: 20px;
|
98 |
-
}
|
99 |
-
|
100 |
-
.features {
|
101 |
-
display: grid;
|
102 |
-
grid-template-columns: repeat(auto-fit, minmax(300px, 1fr));
|
103 |
-
gap: 20px;
|
104 |
-
margin-bottom: 40px;
|
105 |
-
}
|
106 |
-
|
107 |
-
.feature-card {
|
108 |
-
background-color: white;
|
109 |
-
padding: 25px;
|
110 |
-
border-radius: 10px;
|
111 |
-
box-shadow: var(--card-shadow);
|
112 |
-
transition: transform 0.3s ease;
|
113 |
-
}
|
114 |
-
|
115 |
-
.feature-card:hover {
|
116 |
-
transform: translateY(-5px);
|
117 |
-
}
|
118 |
-
|
119 |
-
.feature-card h3 {
|
120 |
-
color: var(--primary-color);
|
121 |
-
margin-bottom: 15px;
|
122 |
-
font-size: 1.4rem;
|
123 |
-
}
|
124 |
-
|
125 |
-
.code-section {
|
126 |
-
background-color: white;
|
127 |
-
padding: 30px;
|
128 |
-
border-radius: 10px;
|
129 |
-
box-shadow: var(--card-shadow);
|
130 |
-
margin-bottom: 40px;
|
131 |
-
}
|
132 |
-
|
133 |
-
.code-block {
|
134 |
-
background-color: #282c34;
|
135 |
-
color: #abb2bf;
|
136 |
-
padding: 20px;
|
137 |
-
border-radius: 6px;
|
138 |
-
overflow-x: auto;
|
139 |
-
font-family: 'Courier New', Courier, monospace;
|
140 |
-
margin: 15px 0;
|
141 |
-
white-space: pre-wrap;
|
142 |
-
}
|
143 |
-
|
144 |
-
.code-title {
|
145 |
-
margin-bottom: 15px;
|
146 |
-
color: var(--primary-color);
|
147 |
-
font-size: 1.3rem;
|
148 |
-
}
|
149 |
-
|
150 |
-
.button {
|
151 |
-
display: inline-block;
|
152 |
-
background-color: var(--primary-color);
|
153 |
-
color: white;
|
154 |
-
padding: 12px 24px;
|
155 |
-
border-radius: 6px;
|
156 |
-
text-decoration: none;
|
157 |
-
font-weight: bold;
|
158 |
-
transition: background-color 0.3s ease;
|
159 |
-
margin: 10px 5px;
|
160 |
-
}
|
161 |
-
|
162 |
-
.button:hover {
|
163 |
-
background-color: var(--secondary-color);
|
164 |
-
}
|
165 |
-
|
166 |
-
.button.accent {
|
167 |
-
background-color: var(--accent-color);
|
168 |
-
}
|
169 |
-
|
170 |
-
.button.accent:hover {
|
171 |
-
background-color: #c0392b;
|
172 |
-
}
|
173 |
|
174 |
-
|
175 |
-
|
176 |
-
|
177 |
-
|
178 |
-
|
179 |
-
|
180 |
-
|
181 |
-
|
182 |
-
|
183 |
-
|
184 |
-
|
185 |
-
|
186 |
-
|
187 |
-
}
|
188 |
-
|
189 |
-
.input-group {
|
190 |
-
margin-bottom: 20px;
|
191 |
-
}
|
192 |
-
|
193 |
-
.input-group label {
|
194 |
-
display: block;
|
195 |
-
margin-bottom: 8px;
|
196 |
-
font-weight: bold;
|
197 |
-
}
|
198 |
-
|
199 |
-
.input-group input[type="text"] {
|
200 |
-
width: 100%;
|
201 |
-
padding: 12px;
|
202 |
-
border: 1px solid #ddd;
|
203 |
-
border-radius: 6px;
|
204 |
-
font-size: 16px;
|
205 |
-
}
|
206 |
-
|
207 |
-
.checkbox-group {
|
208 |
-
margin: 15px 0;
|
209 |
-
}
|
210 |
|
211 |
-
#
|
212 |
-
|
213 |
-
|
214 |
-
|
215 |
-
|
216 |
-
margin-top: 20px;
|
217 |
-
white-space: pre-wrap;
|
218 |
-
display: none;
|
219 |
}
|
220 |
-
</style>
|
221 |
-
</head>
|
222 |
-
<body>
|
223 |
-
<div class="container">
|
224 |
-
<header>
|
225 |
-
<h1>ScraperProxy API</h1>
|
226 |
-
<p class="subtitle">强大的网页请求代理服务,轻松绕过访问限制</p>
|
227 |
-
<div>
|
228 |
-
<a href="/docs" class="button">API 文档</a>
|
229 |
-
<a href="#try-it" class="button accent">立即尝试</a>
|
230 |
-
</div>
|
231 |
-
</header>
|
232 |
-
|
233 |
-
<div class="features">
|
234 |
-
<div class="feature-card">
|
235 |
-
<h3>绕过访问限制</h3>
|
236 |
-
<p>使用 cloudscraper 技术,轻松绕过常见的网站防护机制,如 Cloudflare 的反爬虫保护。</p>
|
237 |
-
</div>
|
238 |
-
<div class="feature-card">
|
239 |
-
<h3>支持流式响应</h3>
|
240 |
-
<p>通过流式响应处理大型数据,保持连接稳定,实现更高效的数据传输。</p>
|
241 |
-
</div>
|
242 |
-
<div class="feature-card">
|
243 |
-
<h3>简单易用</h3>
|
244 |
-
<p>简洁的 API 设计,只需一个 URL 参数即可使用,支持多种请求方法和自定义选项。</p>
|
245 |
-
</div>
|
246 |
-
</div>
|
247 |
|
248 |
-
|
249 |
-
<h2 class="code-title">快速开始</h2>
|
250 |
-
<p>使用我们的代理服务非常简单,只需发送请求到以下端点:</p>
|
251 |
-
|
252 |
-
<div class="code-block">
|
253 |
-
# 基本用法
|
254 |
-
GET /proxy?url=https://example.com
|
255 |
-
|
256 |
-
# 启用流式响应
|
257 |
-
GET /proxy?url=https://example.com&stream=true
|
258 |
-
|
259 |
-
# 自定义请求方法和头信息
|
260 |
-
POST /proxy
|
261 |
-
{
|
262 |
-
"url": "https://example.com",
|
263 |
-
"method": "POST",
|
264 |
-
"headers": {"Custom-Header": "Value"},
|
265 |
-
"data": {"key": "value"},
|
266 |
-
"stream": true
|
267 |
-
}
|
268 |
-
</div>
|
269 |
-
</div>
|
270 |
-
|
271 |
-
<div class="try-it-section" id="try-it">
|
272 |
-
<h2 class="code-title">立即尝试</h2>
|
273 |
-
<div class="input-group">
|
274 |
-
<label for="url-input">输入要请求的 URL:</label>
|
275 |
-
<input type="text" id="url-input" placeholder="https://example.com" value="https://example.com">
|
276 |
-
</div>
|
277 |
-
|
278 |
-
<div class="checkbox-group">
|
279 |
-
<input type="checkbox" id="stream-checkbox" checked>
|
280 |
-
<label for="stream-checkbox">启用流式响应</label>
|
281 |
-
</div>
|
282 |
-
|
283 |
-
<button id="send-request" class="button">发送请求</button>
|
284 |
-
|
285 |
-
<div id="response-container"></div>
|
286 |
-
</div>
|
287 |
-
</div>
|
288 |
-
|
289 |
-
<footer>
|
290 |
-
<p>© 2025 ScraperProxy API. 所有权利保留。</p>
|
291 |
-
</footer>
|
292 |
-
|
293 |
-
<script>
|
294 |
-
document.getElementById('send-request').addEventListener('click', async function() {
|
295 |
-
const url = document.getElementById('url-input').value;
|
296 |
-
const streamEnabled = document.getElementById('stream-checkbox').checked;
|
297 |
-
const responseContainer = document.getElementById('response-container');
|
298 |
-
|
299 |
-
if (!url) {
|
300 |
-
alert('请输入有效的 URL');
|
301 |
-
return;
|
302 |
-
}
|
303 |
-
|
304 |
-
responseContainer.style.display = 'block';
|
305 |
-
responseContainer.textContent = '正在加载...';
|
306 |
-
|
307 |
-
try {
|
308 |
-
const proxyUrl = `/proxy?url=${encodeURIComponent(url)}&stream=${streamEnabled}`;
|
309 |
-
|
310 |
-
if (streamEnabled) {
|
311 |
-
responseContainer.textContent = '';
|
312 |
-
const response = await fetch(proxyUrl);
|
313 |
-
const reader = response.body.getReader();
|
314 |
-
|
315 |
-
while (true) {
|
316 |
-
const { done, value } = await reader.read();
|
317 |
-
if (done) break;
|
318 |
-
const text = new TextDecoder().decode(value);
|
319 |
-
responseContainer.textContent += text;
|
320 |
-
}
|
321 |
-
} else {
|
322 |
-
const response = await fetch(proxyUrl);
|
323 |
-
const data = await response.text();
|
324 |
-
responseContainer.textContent = data;
|
325 |
-
}
|
326 |
-
} catch (error) {
|
327 |
-
responseContainer.textContent = `错误: ${error.message}`;
|
328 |
-
}
|
329 |
-
});
|
330 |
-
</script>
|
331 |
-
</body>
|
332 |
-
</html>
|
333 |
-
"""
|
334 |
-
return html_content
|
335 |
-
|
336 |
|
337 |
@app.get("/", response_class=HTMLResponse)
|
338 |
async def root():
|
339 |
-
|
|
|
|
|
|
|
|
|
|
|
340 |
|
341 |
@app.api_route("/proxy", methods=["GET", "POST", "PUT", "DELETE", "OPTIONS", "HEAD", "PATCH"])
|
342 |
async def proxy(request: Request):
|
@@ -365,79 +104,21 @@ async def proxy(request: Request):
|
|
365 |
detail="Token无效"
|
366 |
)
|
367 |
|
368 |
-
#
|
369 |
-
method = request.method
|
370 |
-
|
371 |
target_url = request.query_params.get("url")
|
372 |
if not target_url:
|
373 |
raise HTTPException(status_code=400, detail="必须提供目标URL")
|
374 |
|
375 |
-
# 检查是否请求流式响应
|
376 |
-
stream_request = "stream" in request.query_params and request.query_params["stream"].lower() in ["true", "1", "yes"]
|
377 |
-
|
378 |
-
# 创建带有代理的 scraper
|
379 |
-
# 创建cloudscraper实例
|
380 |
-
scraper = cloudscraper.create_scraper(
|
381 |
-
# browser={
|
382 |
-
# 'browser': 'chrome',
|
383 |
-
# 'platform': 'windows',
|
384 |
-
# 'mobile': False
|
385 |
-
# },
|
386 |
-
# captcha={
|
387 |
-
# 'provider': '2captcha',
|
388 |
-
# 'api_key': ' '
|
389 |
-
# },
|
390 |
-
#debug=True,
|
391 |
-
delay=10
|
392 |
-
)
|
393 |
-
|
394 |
-
# 从请求中获取cookies并设置到scraper
|
395 |
-
cookies = request.cookies
|
396 |
-
for key, value in cookies.items():
|
397 |
-
scraper.cookies.set(key, value)
|
398 |
-
|
399 |
-
# 检查环境变量PROXY是否存在
|
400 |
-
proxy = os.environ.get('PROXY')
|
401 |
-
if proxy:
|
402 |
-
# 如果环境变量存在,则设置代理
|
403 |
-
scraper.proxies = {
|
404 |
-
'http': proxy,
|
405 |
-
'https': proxy
|
406 |
-
}
|
407 |
-
# 测试代理是否生效
|
408 |
-
# response = scraper.get('https://httpbin.org/ip')
|
409 |
-
# print(response.text)
|
410 |
-
|
411 |
# 获取home_url
|
412 |
home_url = request.query_params.get("home")
|
413 |
if not home_url:
|
414 |
# 从target_url中提取home_url
|
415 |
parsed_url = urlparse(target_url)
|
416 |
home_url = f"{parsed_url.scheme}://{parsed_url.netloc}/"
|
417 |
-
|
418 |
-
#
|
419 |
-
|
420 |
-
|
421 |
-
home_response = None
|
422 |
-
|
423 |
-
for attempt in range(max_retries):
|
424 |
-
try:
|
425 |
-
home_response = scraper.get(home_url, headers= {
|
426 |
-
"sec-fetch-dest": "document"
|
427 |
-
})
|
428 |
-
print(f"主页{home_url}响应 (尝试 {attempt + 1}): {home_response.status_code}")
|
429 |
-
|
430 |
-
if home_response.status_code == 200:
|
431 |
-
break
|
432 |
-
|
433 |
-
if attempt < max_retries - 1: # 如果不是最后一次尝试
|
434 |
-
time.sleep(retry_delay)
|
435 |
-
|
436 |
-
except Exception as e:
|
437 |
-
print(f"主页请求失败 (尝试 {attempt + 1}): {str(e)}")
|
438 |
-
if attempt < max_retries - 1:
|
439 |
-
time.sleep(retry_delay)
|
440 |
-
|
441 |
# 获取请求体
|
442 |
body = await request.body()
|
443 |
|
@@ -447,13 +128,11 @@ async def proxy(request: Request):
|
|
447 |
params.pop("url", None)
|
448 |
params.pop("stream", None)
|
449 |
|
450 |
-
|
451 |
# 获取原始请求头
|
452 |
headers = dict(request.headers)
|
453 |
# 移除可能导致问题的头
|
454 |
headers.pop("host", None)
|
455 |
-
headers.pop("authorization", None)
|
456 |
-
headers.pop("cookie", None)
|
457 |
headers.pop("x-forwarded-for", None)
|
458 |
headers.pop("x-forwarded-proto", None)
|
459 |
headers.pop("x-forwarded-port", None)
|
@@ -462,18 +141,12 @@ async def proxy(request: Request):
|
|
462 |
headers.pop("x-ip-token", None)
|
463 |
headers.pop("x-direct-url", None)
|
464 |
headers.pop("x-direct-url", None)
|
465 |
-
headers.pop("accept", None)
|
466 |
-
headers.pop("accept-language", None)
|
467 |
-
headers.pop("accept-encoding", None)
|
468 |
-
headers.pop("content-type", None)
|
469 |
-
headers.pop("content-length", None)
|
470 |
-
headers.pop("user-agent", None)
|
471 |
print(f"{headers}")
|
472 |
|
473 |
# 构建请求参数
|
474 |
request_kwargs = {
|
475 |
"url": target_url,
|
476 |
-
"headers":
|
477 |
"params": params,
|
478 |
"stream": stream_request # 设置stream参数
|
479 |
}
|
@@ -481,24 +154,9 @@ async def proxy(request: Request):
|
|
481 |
# 如果有请求体,添加到请求参数中
|
482 |
if body:
|
483 |
request_kwargs["data"] = body
|
484 |
-
|
485 |
-
#
|
486 |
-
|
487 |
-
response = scraper.get(**request_kwargs)
|
488 |
-
elif method == "POST":
|
489 |
-
response = scraper.post(**request_kwargs)
|
490 |
-
elif method == "PUT":
|
491 |
-
response = scraper.put(**request_kwargs)
|
492 |
-
elif method == "DELETE":
|
493 |
-
response = scraper.delete(**request_kwargs)
|
494 |
-
elif method == "HEAD":
|
495 |
-
response = scraper.head(**request_kwargs)
|
496 |
-
elif method == "OPTIONS":
|
497 |
-
response = scraper.options(**request_kwargs)
|
498 |
-
elif method == "PATCH":
|
499 |
-
response = scraper.patch(**request_kwargs)
|
500 |
-
else:
|
501 |
-
raise HTTPException(status_code=405, detail=f"不支持的方法: {method}")
|
502 |
|
503 |
# 处理流式响应
|
504 |
if stream_request:
|
@@ -534,7 +192,9 @@ async def proxy(request: Request):
|
|
534 |
return proxy_response
|
535 |
|
536 |
except Exception as e:
|
537 |
-
|
|
|
|
|
538 |
|
539 |
|
540 |
|
|
|
8 |
import asyncio
|
9 |
from urllib.parse import urlparse
|
10 |
import time
|
11 |
+
from curl_cffi import requests
|
12 |
+
from dotenv import load_dotenv
|
13 |
+
|
14 |
+
# 加载.env文件
|
15 |
+
load_dotenv()
|
16 |
|
17 |
app = FastAPI(
|
18 |
title="ScraperProxy",
|
|
|
36 |
yield chunk
|
37 |
await asyncio.sleep(0.001) # 让出控制权,保持异步特性
|
38 |
|
39 |
+
# 获取环境变量中的请求库选择
|
40 |
+
REQUEST_LIB = os.environ.get('REQUEST_LIB', 'cloudscraper').lower()
|
41 |
|
42 |
+
async def make_request(method: str, **kwargs):
|
43 |
+
"""统一的请求处理函数"""
|
44 |
+
if REQUEST_LIB == 'cloudscraper':
|
45 |
+
scraper = cloudscraper.create_scraper(delay=10)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
46 |
|
47 |
+
# 设置代理
|
48 |
+
proxy = os.environ.get('PROXY')
|
49 |
+
if proxy:
|
50 |
+
scraper.proxies = {
|
51 |
+
'http': proxy,
|
52 |
+
'https': proxy
|
53 |
+
}
|
54 |
+
|
55 |
+
# 根据方法发送请求
|
56 |
+
return getattr(scraper, method.lower())(**kwargs)
|
57 |
+
else:
|
58 |
+
# 使用 curl_cffi
|
59 |
+
proxy = os.environ.get('PROXY')
|
60 |
+
proxies = {'http': proxy, 'https': proxy} if proxy else None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
61 |
|
62 |
+
# curl_cffi 的请求配置
|
63 |
+
request_config = {
|
64 |
+
**kwargs,
|
65 |
+
'proxies': proxies,
|
66 |
+
'impersonate': 'chrome110',
|
|
|
|
|
|
|
67 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
68 |
|
69 |
+
return requests.request(method, **request_config)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
70 |
|
71 |
@app.get("/", response_class=HTMLResponse)
|
72 |
async def root():
|
73 |
+
"""返回index.html的内容"""
|
74 |
+
try:
|
75 |
+
with open("index.html", "r", encoding="utf-8") as f:
|
76 |
+
return f.read()
|
77 |
+
except FileNotFoundError:
|
78 |
+
return "Welcome to Scraper Proxy!"
|
79 |
|
80 |
@app.api_route("/proxy", methods=["GET", "POST", "PUT", "DELETE", "OPTIONS", "HEAD", "PATCH"])
|
81 |
async def proxy(request: Request):
|
|
|
104 |
detail="Token无效"
|
105 |
)
|
106 |
|
107 |
+
# 获取target_url
|
|
|
|
|
108 |
target_url = request.query_params.get("url")
|
109 |
if not target_url:
|
110 |
raise HTTPException(status_code=400, detail="必须提供目标URL")
|
111 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
112 |
# 获取home_url
|
113 |
home_url = request.query_params.get("home")
|
114 |
if not home_url:
|
115 |
# 从target_url中提取home_url
|
116 |
parsed_url = urlparse(target_url)
|
117 |
home_url = f"{parsed_url.scheme}://{parsed_url.netloc}/"
|
118 |
+
|
119 |
+
# 检查是否请求流式响应
|
120 |
+
stream_request = "stream" in request.query_params and request.query_params["stream"].lower() in ["true", "1", "yes"]
|
121 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
122 |
# 获取请求体
|
123 |
body = await request.body()
|
124 |
|
|
|
128 |
params.pop("url", None)
|
129 |
params.pop("stream", None)
|
130 |
|
|
|
131 |
# 获取原始请求头
|
132 |
headers = dict(request.headers)
|
133 |
# 移除可能导致问题的头
|
134 |
headers.pop("host", None)
|
135 |
+
headers.pop("authorization", None)
|
|
|
136 |
headers.pop("x-forwarded-for", None)
|
137 |
headers.pop("x-forwarded-proto", None)
|
138 |
headers.pop("x-forwarded-port", None)
|
|
|
141 |
headers.pop("x-ip-token", None)
|
142 |
headers.pop("x-direct-url", None)
|
143 |
headers.pop("x-direct-url", None)
|
|
|
|
|
|
|
|
|
|
|
|
|
144 |
print(f"{headers}")
|
145 |
|
146 |
# 构建请求参数
|
147 |
request_kwargs = {
|
148 |
"url": target_url,
|
149 |
+
"headers": headers,
|
150 |
"params": params,
|
151 |
"stream": stream_request # 设置stream参数
|
152 |
}
|
|
|
154 |
# 如果有请求体,添加到请求参数中
|
155 |
if body:
|
156 |
request_kwargs["data"] = body
|
157 |
+
|
158 |
+
# 使用统一的请求函数发送请求
|
159 |
+
response = await make_request(request.method, **request_kwargs)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
160 |
|
161 |
# 处理流式响应
|
162 |
if stream_request:
|
|
|
192 |
return proxy_response
|
193 |
|
194 |
except Exception as e:
|
195 |
+
error = f"代理请求失败: {str(e)}"
|
196 |
+
print(error)
|
197 |
+
raise HTTPException(status_code=500, detail=error)
|
198 |
|
199 |
|
200 |
|
pyproject.toml
CHANGED
@@ -7,8 +7,10 @@ requires-python = ">=3.13"
|
|
7 |
dependencies = [
|
8 |
"chardet>=5.2.0",
|
9 |
"cloudscraper>=1.2.71",
|
|
|
10 |
"fastapi>=0.115.10",
|
11 |
"pydantic>=2.10.6",
|
|
|
12 |
"typing-extensions>=4.12.2",
|
13 |
"uvicorn>=0.34.0",
|
14 |
]
|
|
|
7 |
dependencies = [
|
8 |
"chardet>=5.2.0",
|
9 |
"cloudscraper>=1.2.71",
|
10 |
+
"curl-cffi>=0.9.0",
|
11 |
"fastapi>=0.115.10",
|
12 |
"pydantic>=2.10.6",
|
13 |
+
"python-dotenv>=1.0.1",
|
14 |
"typing-extensions>=4.12.2",
|
15 |
"uvicorn>=0.34.0",
|
16 |
]
|
uv.lock
CHANGED
@@ -32,6 +32,28 @@ wheels = [
|
|
32 |
{ url = "https://mirrors.aliyun.com/pypi/packages/38/fc/bce832fd4fd99766c04d1ee0eead6b0ec6486fb100ae5e74c1d91292b982/certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe" },
|
33 |
]
|
34 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
35 |
[[package]]
|
36 |
name = "chardet"
|
37 |
version = "5.2.0"
|
@@ -98,6 +120,27 @@ wheels = [
|
|
98 |
{ url = "https://mirrors.aliyun.com/pypi/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6" },
|
99 |
]
|
100 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
101 |
[[package]]
|
102 |
name = "fastapi"
|
103 |
version = "0.115.10"
|
@@ -152,6 +195,15 @@ wheels = [
|
|
152 |
{ url = "https://mirrors.aliyun.com/pypi/packages/b1/1f/4e7a9b6b33a085172a826d1f9d0a19a2e77982298acea13d40442f14ef28/poethepoet-0.32.2-py3-none-any.whl", hash = "sha256:97e165de8e00b07d33fd8d72896fad8b20ccafcd327b1118bb6a3da26af38d33" },
|
153 |
]
|
154 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
155 |
[[package]]
|
156 |
name = "pydantic"
|
157 |
version = "2.10.6"
|
@@ -200,6 +252,15 @@ wheels = [
|
|
200 |
{ url = "https://mirrors.aliyun.com/pypi/packages/1c/a7/c8a2d361bf89c0d9577c934ebb7421b25dc84bf3a8e3ac0a40aed9acc547/pyparsing-3.2.1-py3-none-any.whl", hash = "sha256:506ff4f4386c4cec0590ec19e6302d3aedb992fdc02c761e90416f158dacf8e1" },
|
201 |
]
|
202 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
203 |
[[package]]
|
204 |
name = "pyyaml"
|
205 |
version = "6.0.2"
|
@@ -251,8 +312,10 @@ source = { virtual = "." }
|
|
251 |
dependencies = [
|
252 |
{ name = "chardet" },
|
253 |
{ name = "cloudscraper" },
|
|
|
254 |
{ name = "fastapi" },
|
255 |
{ name = "pydantic" },
|
|
|
256 |
{ name = "typing-extensions" },
|
257 |
{ name = "uvicorn" },
|
258 |
]
|
@@ -266,8 +329,10 @@ dev = [
|
|
266 |
requires-dist = [
|
267 |
{ name = "chardet", specifier = ">=5.2.0" },
|
268 |
{ name = "cloudscraper", specifier = ">=1.2.71" },
|
|
|
269 |
{ name = "fastapi", specifier = ">=0.115.10" },
|
270 |
{ name = "pydantic", specifier = ">=2.10.6" },
|
|
|
271 |
{ name = "typing-extensions", specifier = ">=4.12.2" },
|
272 |
{ name = "uvicorn", specifier = ">=0.34.0" },
|
273 |
]
|
|
|
32 |
{ url = "https://mirrors.aliyun.com/pypi/packages/38/fc/bce832fd4fd99766c04d1ee0eead6b0ec6486fb100ae5e74c1d91292b982/certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe" },
|
33 |
]
|
34 |
|
35 |
+
[[package]]
|
36 |
+
name = "cffi"
|
37 |
+
version = "1.17.1"
|
38 |
+
source = { registry = "https://mirrors.aliyun.com/pypi/simple/" }
|
39 |
+
dependencies = [
|
40 |
+
{ name = "pycparser" },
|
41 |
+
]
|
42 |
+
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824" }
|
43 |
+
wheels = [
|
44 |
+
{ url = "https://mirrors.aliyun.com/pypi/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e" },
|
45 |
+
{ url = "https://mirrors.aliyun.com/pypi/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2" },
|
46 |
+
{ url = "https://mirrors.aliyun.com/pypi/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3" },
|
47 |
+
{ url = "https://mirrors.aliyun.com/pypi/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683" },
|
48 |
+
{ url = "https://mirrors.aliyun.com/pypi/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5" },
|
49 |
+
{ url = "https://mirrors.aliyun.com/pypi/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4" },
|
50 |
+
{ url = "https://mirrors.aliyun.com/pypi/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd" },
|
51 |
+
{ url = "https://mirrors.aliyun.com/pypi/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed" },
|
52 |
+
{ url = "https://mirrors.aliyun.com/pypi/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9" },
|
53 |
+
{ url = "https://mirrors.aliyun.com/pypi/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d" },
|
54 |
+
{ url = "https://mirrors.aliyun.com/pypi/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a" },
|
55 |
+
]
|
56 |
+
|
57 |
[[package]]
|
58 |
name = "chardet"
|
59 |
version = "5.2.0"
|
|
|
120 |
{ url = "https://mirrors.aliyun.com/pypi/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6" },
|
121 |
]
|
122 |
|
123 |
+
[[package]]
|
124 |
+
name = "curl-cffi"
|
125 |
+
version = "0.9.0"
|
126 |
+
source = { registry = "https://mirrors.aliyun.com/pypi/simple/" }
|
127 |
+
dependencies = [
|
128 |
+
{ name = "certifi" },
|
129 |
+
{ name = "cffi" },
|
130 |
+
]
|
131 |
+
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/9f/7d/c326faf5a772a11011dcc30aca5b64c197bcf59fdd9b90bf28b700d6d682/curl_cffi-0.9.0.tar.gz", hash = "sha256:4818e074b61cb209bd8d4d0d03783313d4773e6b51f8b815e25aad9cc146a7b7" }
|
132 |
+
wheels = [
|
133 |
+
{ url = "https://mirrors.aliyun.com/pypi/packages/2a/96/befafab403a12a25e9fca376da20512b5962a2bf5810921c2fd27c01e96f/curl_cffi-0.9.0-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:429a14e724898b7001be2776caa87c294e8062f0ac652619df5854eeae6e156c" },
|
134 |
+
{ url = "https://mirrors.aliyun.com/pypi/packages/2a/88/c3dfdf28448fad4be68a37cf758941583e4c1ff300b4e384197bedfda493/curl_cffi-0.9.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:7968c3ea37bec96bbe4b623c0fa59c09d3cdaf173da2e0a516b19db52f05caa4" },
|
135 |
+
{ url = "https://mirrors.aliyun.com/pypi/packages/3d/7a/feda016a35e904f0e4f753d41677f343e63271373b9c3648183e8f29ca76/curl_cffi-0.9.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26d61c5b5c59e9c4a006cba35feccd73c75ce981db0da89a3da88dd280ead7a0" },
|
136 |
+
{ url = "https://mirrors.aliyun.com/pypi/packages/28/27/d0cbb7a7cc9c444688c06cb41271d7ee7d429966d77dbaedbf19dd52d30e/curl_cffi-0.9.0-cp38-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b1177203e47d3248ccb250e63983eaddf39fce5332f91cc2993ea54cae5aa21" },
|
137 |
+
{ url = "https://mirrors.aliyun.com/pypi/packages/c1/2f/75c971a345737c330d98fe75f7ef538209d77aa3f1197752766566815bac/curl_cffi-0.9.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f41bd0a5dafe3fe19319da1c31302a40634ec4779769ca3d954d8673eb2b4f2" },
|
138 |
+
{ url = "https://mirrors.aliyun.com/pypi/packages/e3/3b/0b11dfcdd4dcbefa025e8049f9479ace3321435afb6f78e3394cea395591/curl_cffi-0.9.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:752a4686c062a8bd6e25961845ea812704ba089a0e6863957deb2a30590105bb" },
|
139 |
+
{ url = "https://mirrors.aliyun.com/pypi/packages/11/76/f2c551acfb6538c3932734b83ae7b87baf7b795852b6600a7519ff68bb05/curl_cffi-0.9.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ea8f0511bc6882d30a985ddc53fa26d109f2977715d4851aef162bfa308cd2a8" },
|
140 |
+
{ url = "https://mirrors.aliyun.com/pypi/packages/f3/0f/3eab3d6170813947c7ba2971ec516322be2678a088f190ead0bf45d7e596/curl_cffi-0.9.0-cp38-abi3-win32.whl", hash = "sha256:202145686c5ebc00635b9b15cc8afa82523f14197a640b618bb09d59e74889ba" },
|
141 |
+
{ url = "https://mirrors.aliyun.com/pypi/packages/e5/49/7207356b343d9aa3590461035eabcef838af4c8d042ddac687fce878a999/curl_cffi-0.9.0-cp38-abi3-win_amd64.whl", hash = "sha256:81ba09622e68f8392236212dd0afd75dcd18cbe1be16467deb4d441d62cfc8a3" },
|
142 |
+
]
|
143 |
+
|
144 |
[[package]]
|
145 |
name = "fastapi"
|
146 |
version = "0.115.10"
|
|
|
195 |
{ url = "https://mirrors.aliyun.com/pypi/packages/b1/1f/4e7a9b6b33a085172a826d1f9d0a19a2e77982298acea13d40442f14ef28/poethepoet-0.32.2-py3-none-any.whl", hash = "sha256:97e165de8e00b07d33fd8d72896fad8b20ccafcd327b1118bb6a3da26af38d33" },
|
196 |
]
|
197 |
|
198 |
+
[[package]]
|
199 |
+
name = "pycparser"
|
200 |
+
version = "2.22"
|
201 |
+
source = { registry = "https://mirrors.aliyun.com/pypi/simple/" }
|
202 |
+
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6" }
|
203 |
+
wheels = [
|
204 |
+
{ url = "https://mirrors.aliyun.com/pypi/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc" },
|
205 |
+
]
|
206 |
+
|
207 |
[[package]]
|
208 |
name = "pydantic"
|
209 |
version = "2.10.6"
|
|
|
252 |
{ url = "https://mirrors.aliyun.com/pypi/packages/1c/a7/c8a2d361bf89c0d9577c934ebb7421b25dc84bf3a8e3ac0a40aed9acc547/pyparsing-3.2.1-py3-none-any.whl", hash = "sha256:506ff4f4386c4cec0590ec19e6302d3aedb992fdc02c761e90416f158dacf8e1" },
|
253 |
]
|
254 |
|
255 |
+
[[package]]
|
256 |
+
name = "python-dotenv"
|
257 |
+
version = "1.0.1"
|
258 |
+
source = { registry = "https://mirrors.aliyun.com/pypi/simple/" }
|
259 |
+
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/bc/57/e84d88dfe0aec03b7a2d4327012c1627ab5f03652216c63d49846d7a6c58/python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca" }
|
260 |
+
wheels = [
|
261 |
+
{ url = "https://mirrors.aliyun.com/pypi/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a" },
|
262 |
+
]
|
263 |
+
|
264 |
[[package]]
|
265 |
name = "pyyaml"
|
266 |
version = "6.0.2"
|
|
|
312 |
dependencies = [
|
313 |
{ name = "chardet" },
|
314 |
{ name = "cloudscraper" },
|
315 |
+
{ name = "curl-cffi" },
|
316 |
{ name = "fastapi" },
|
317 |
{ name = "pydantic" },
|
318 |
+
{ name = "python-dotenv" },
|
319 |
{ name = "typing-extensions" },
|
320 |
{ name = "uvicorn" },
|
321 |
]
|
|
|
329 |
requires-dist = [
|
330 |
{ name = "chardet", specifier = ">=5.2.0" },
|
331 |
{ name = "cloudscraper", specifier = ">=1.2.71" },
|
332 |
+
{ name = "curl-cffi", specifier = ">=0.9.0" },
|
333 |
{ name = "fastapi", specifier = ">=0.115.10" },
|
334 |
{ name = "pydantic", specifier = ">=2.10.6" },
|
335 |
+
{ name = "python-dotenv", specifier = ">=1.0.1" },
|
336 |
{ name = "typing-extensions", specifier = ">=4.12.2" },
|
337 |
{ name = "uvicorn", specifier = ">=0.34.0" },
|
338 |
]
|