from functools import partial from os import environ from typing import Callable, Coroutine from anyio import create_task_group from fastapi import FastAPI, HTTPException, Request from fastapi.responses import HTMLResponse from fastapi.responses import StreamingResponse from httpx import AsyncClient, RequestError, Timeout from starlette.types import Receive, Scope, Send from datetime import datetime, timedelta API_KEYS = [line for line in environ['API_KEYS'].strip().split('\n') if line and line.startswith('sk-')] COMPLETIONS_URL = 'https://openrouter.ai/api/v1/chat/completions' app = FastAPI(title='reverse-proxy') class Cache: def __init__(self, expire: timedelta): self.expire = expire self.cache = {} self.timestamp = datetime.utcnow() async def get(self, key): if datetime.utcnow() - self.timestamp > self.expire: self.cache.clear() self.timestamp = datetime.utcnow() return self.cache.get(key) async def set(self, key, value): self.cache[key] = value cache = Cache(expire=timedelta(hours=1)) def cache_results(func): async def wrapper(*args, **kwargs): cache_key = f"{func.__name__}:{args}:{kwargs}" cached_result = await cache.get(cache_key) if cached_result is not None: return cached_result result = await func(*args, **kwargs) await cache.set(cache_key, result) return result return wrapper class AuthError(Exception): pass class CensoredError(Exception): pass @app.middleware('http') async def add_cors_headers(request: Request, call_next): response = await call_next(request) response.headers['Access-Control-Allow-Origin'] = '*' response.headers['Access-Control-Allow-Methods'] = 'GET, POST, PUT, DELETE, PATCH, OPTIONS' response.headers['Access-Control-Allow-Headers'] = 'Content-Type, Authorization' return response @app.get('/') async def root(): return HTMLResponse('ну пролапс, ну и что') class OverrideStreamResponse(StreamingResponse): async def stream_response(self, send: Send) -> None: first_chunk = True async for chunk in self.body_iterator: if first_chunk: await self.send_request_header(send) first_chunk = False if not isinstance(chunk, bytes): chunk = chunk.encode(self.charset) await send({'type': 'http.response.body', 'body': chunk, 'more_body': True}) if first_chunk: await self.send_request_header(send) await send({'type': 'http.response.body', 'body': b'', 'more_body': False}) async def send_request_header(self, send: Send) -> None: await send({'type': 'http.response.start', 'status': self.status_code, 'headers': self.raw_headers, }) async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: async with create_task_group() as task_group: async def wrap(func: Callable[[], Coroutine]) -> None: await func() task_group.cancel_scope.cancel() task_group.start_soon(wrap, partial(self.stream_response, send)) await wrap(partial(self.listen_for_disconnect, receive)) if self.background is not None: await self.background() async def proxy_openai_api(request: Request): headers = {k: v for k, v in request.headers.items() if k not in {'host', 'content-length', 'x-forwarded-for', 'x-real-ip', 'connection'}} def update_authorization_header(api_key): auth_header_key = next((k for k in headers.keys() if k.lower() == 'authorization'), 'Authorization') headers[auth_header_key] = f'Bearer {api_key}' client = AsyncClient(verify=False, follow_redirects=True, timeout=Timeout(connect=10, read=90, write=10, pool=10)) request_body = await request.json() if request.method in {'POST', 'PUT'} else None async def stream_api_response(api_key: str): update_authorization_header(api_key) try: streaming = client.stream(request.method, COMPLETIONS_URL, headers=headers, params=request.query_params, json=request_body) async with streaming as stream_response: if stream_response.status_code in {401, 402, 429}: raise AuthError('ключ API недействителен или превышен лимит отправки запросов') if stream_response.status_code == 403: raise CensoredError('отклонено по цензуре') # это специфичная ошибка именно для опенроутера! response.init_headers({k: v for k, v in stream_response.headers.items() if k not in {'content-length', 'content-encoding', 'alt-svc'}}) content = bytearray() async for chunk in stream_response.aiter_bytes(): yield chunk content.extend(chunk) except RequestError as exc: raise HTTPException(status_code=500, detail=f'произошла ошибка при запросе: {exc}') for api_key in API_KEYS: try: response_generator = stream_api_response(api_key) response = OverrideStreamResponse(response_generator) return response except AuthError: print(f'ключ API {api_key} недействителен или превышен лимит отправки запросов') continue raise HTTPException(status_code=401, detail='все ключи API использованы, доступ запрещен.') @cache_results async def get_free_models(): async with AsyncClient(follow_redirects=True, timeout=Timeout(10.0, read=30.0, write=10.0, pool=10.0)) as client: response = await client.get('https://openrouter.ai/api/v1/models') response.raise_for_status() data = response.json() filtered_models = [model for model in data.get('data', []) if model.get('id', '').endswith(':free')] return {'data': filtered_models, 'object': 'list'} @app.api_route('/v1/models', methods=['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS']) @app.api_route('/models', methods=['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS']) async def get_models(): return await get_free_models() @app.api_route('/v1/chat/completions', methods=['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS']) @app.api_route('/chat/completions', methods=['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS']) async def proxy_handler(request: Request): return await proxy_openai_api(request) if __name__ == '__main__': from uvicorn import run run(app, host='0.0.0.0', port=7860)