|
from fastapi import FastAPI, Request, Depends, HTTPException |
|
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials |
|
from fastapi.responses import StreamingResponse, HTMLResponse |
|
from fastapi.background import BackgroundTasks |
|
from contextlib import asynccontextmanager |
|
import requests |
|
from curl_cffi import requests as cffi_requests |
|
import uuid |
|
import json |
|
import time |
|
from typing import Optional |
|
import asyncio |
|
import base64 |
|
import tempfile |
|
import os |
|
import re |
|
import threading |
|
import logging |
|
from dotenv import load_dotenv |
|
from playwright.sync_api import sync_playwright |
|
|
|
|
|
load_dotenv(override=True) |
|
|
|
|
|
logging.basicConfig( |
|
level=logging.INFO, |
|
format='%(asctime)s - %(levelname)s - %(message)s' |
|
) |
|
logger = logging.getLogger(__name__) |
|
|
|
|
|
global_data = { |
|
"cookie": None, |
|
"cookies": None, |
|
"last_update": 0, |
|
"cookie_expires": 0 |
|
} |
|
|
|
@asynccontextmanager |
|
async def lifespan(app: FastAPI): |
|
|
|
logger.info("Starting FastAPI application, initializing cookie fetcher...") |
|
|
|
|
|
cookie_thread = threading.Thread(target=get_cookie_with_retry) |
|
cookie_thread.daemon = True |
|
cookie_thread.start() |
|
|
|
logger.info("Cookie fetcher thread started") |
|
yield |
|
|
|
|
|
logger.info("Shutting down FastAPI application") |
|
global_data["cookie"] = None |
|
global_data["cookies"] = None |
|
global_data["last_update"] = 0 |
|
|
|
def get_cookie_with_retry(max_retries=3, retry_delay=5): |
|
"""带重试机制的获取 cookie 函数""" |
|
retries = 0 |
|
while retries < max_retries: |
|
logger.info(f"Cookie fetching attempt {retries + 1}/{max_retries}") |
|
cookie = get_cookie() |
|
if cookie: |
|
logger.info("Successfully retrieved cookie") |
|
return cookie |
|
|
|
retries += 1 |
|
if retries < max_retries: |
|
logger.info(f"Retrying cookie fetch in {retry_delay} seconds...") |
|
time.sleep(retry_delay) |
|
|
|
logger.error(f"Failed to fetch cookie after {max_retries} attempts") |
|
return None |
|
|
|
app = FastAPI(lifespan=lifespan) |
|
security = HTTPBearer() |
|
|
|
|
|
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", None) |
|
logger.info(f"OPENAI_API_KEY is set: {OPENAI_API_KEY is not None}") |
|
logger.info(f"OPENAI_API_KEY value: {OPENAI_API_KEY}") |
|
|
|
def get_cookie(): |
|
try: |
|
logger.info("Starting cookie retrieval process...") |
|
|
|
with sync_playwright() as p: |
|
try: |
|
|
|
logger.info("Launching browser...") |
|
browser = p.chromium.launch( |
|
headless=True, |
|
args=[ |
|
'--no-sandbox', |
|
'--disable-dev-shm-usage', |
|
'--disable-gpu', |
|
'--disable-software-rasterizer', |
|
'--disable-extensions', |
|
'--disable-setuid-sandbox', |
|
'--no-first-run', |
|
'--no-zygote', |
|
'--single-process', |
|
'--window-size=1920,1080', |
|
'--disable-blink-features=AutomationControlled' |
|
] |
|
) |
|
|
|
logger.info("Browser launched successfully") |
|
|
|
|
|
logger.info("Creating browser context...") |
|
context = browser.new_context( |
|
viewport={'width': 1920, 'height': 1080}, |
|
user_agent='Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36', |
|
locale='en-US', |
|
timezone_id='America/New_York', |
|
permissions=['geolocation'], |
|
extra_http_headers={ |
|
'Accept-Language': 'en-US,en;q=0.9', |
|
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7', |
|
'Sec-Ch-Ua': '"Not_A Brand";v="8", "Chromium";v="120", "Google Chrome";v="120"', |
|
'Sec-Ch-Ua-Mobile': '?0', |
|
'Sec-Ch-Ua-Platform': '"macOS"', |
|
'Sec-Fetch-Dest': 'document', |
|
'Sec-Fetch-Mode': 'navigate', |
|
'Sec-Fetch-Site': 'none', |
|
'Sec-Fetch-User': '?1', |
|
'Upgrade-Insecure-Requests': '1' |
|
} |
|
) |
|
|
|
logger.info("Browser context created successfully") |
|
|
|
|
|
logger.info("Creating new page...") |
|
page = context.new_page() |
|
logger.info("Page created successfully") |
|
|
|
|
|
page.set_default_timeout(60000) |
|
|
|
|
|
logger.info("Navigating to target website...") |
|
page.goto("https://chat.akash.network/", timeout=50000) |
|
|
|
|
|
logger.info("Waiting for page load...") |
|
try: |
|
|
|
page.wait_for_load_state("domcontentloaded", timeout=30000) |
|
logger.info("DOM content loaded") |
|
|
|
|
|
logger.info("Waiting for Cloudflare check...") |
|
time.sleep(5) |
|
|
|
|
|
try: |
|
page.mouse.move(100, 100) |
|
page.mouse.click(100, 100) |
|
logger.info("Simulated user interaction") |
|
except Exception as e: |
|
logger.warning(f"Failed to simulate user interaction: {e}") |
|
|
|
|
|
time.sleep(5) |
|
|
|
except Exception as e: |
|
logger.warning(f"Timeout waiting for load state: {e}") |
|
|
|
|
|
logger.info("Getting cookies...") |
|
cookies = context.cookies() |
|
|
|
if not cookies: |
|
logger.error("No cookies found") |
|
browser.close() |
|
return None |
|
|
|
|
|
cf_cookie = next((cookie for cookie in cookies if cookie['name'] == 'cf_clearance'), None) |
|
if not cf_cookie: |
|
logger.error("cf_clearance cookie not found") |
|
browser.close() |
|
return None |
|
|
|
|
|
cookie_str = '; '.join([f"{cookie['name']}={cookie['value']}" for cookie in cookies]) |
|
global_data["cookie"] = cookie_str |
|
global_data["cookies"] = cookies |
|
global_data["last_update"] = time.time() |
|
|
|
|
|
session_cookie = next((cookie for cookie in cookies if cookie['name'] == 'session_token'), None) |
|
if session_cookie and 'expires' in session_cookie and session_cookie['expires'] > 0: |
|
global_data["cookie_expires"] = session_cookie['expires'] |
|
logger.info(f"Session token expires at: {time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(session_cookie['expires']))}") |
|
else: |
|
|
|
global_data["cookie_expires"] = time.time() + 3600 |
|
logger.info("No explicit expiration in session_token cookie, setting default 1 hour expiration") |
|
|
|
logger.info("Successfully retrieved cookies") |
|
browser.close() |
|
return cookie_str |
|
|
|
except Exception as e: |
|
logger.error(f"Error in browser operations: {e}") |
|
logger.error(f"Error type: {type(e)}") |
|
import traceback |
|
logger.error(f"Traceback: {traceback.format_exc()}") |
|
return None |
|
|
|
except Exception as e: |
|
logger.error(f"Error fetching cookie: {str(e)}") |
|
logger.error(f"Error type: {type(e)}") |
|
import traceback |
|
logger.error(f"Traceback: {traceback.format_exc()}") |
|
return None |
|
|
|
|
|
async def refresh_cookie(): |
|
logger.info("Refreshing cookie due to 401 error") |
|
|
|
global_data["cookie_expires"] = 0 |
|
|
|
return get_cookie() |
|
|
|
async def check_and_update_cookie(background_tasks: BackgroundTasks): |
|
|
|
current_time = time.time() |
|
if not global_data["cookie"] or current_time >= global_data["cookie_expires"]: |
|
logger.info("Cookie expired or not available, refreshing...") |
|
background_tasks.add_task(get_cookie) |
|
else: |
|
logger.info("Using existing cookie") |
|
|
|
async def get_api_key(credentials: HTTPAuthorizationCredentials = Depends(security)): |
|
token = credentials.credentials |
|
logger.info(f"Received token: {token}") |
|
|
|
|
|
if OPENAI_API_KEY is not None: |
|
|
|
clean_token = token.replace("Bearer ", "") if token.startswith("Bearer ") else token |
|
logger.info(f"Clean token: {clean_token}") |
|
if clean_token != OPENAI_API_KEY: |
|
logger.error(f"Token mismatch. Expected: {OPENAI_API_KEY}, Got: {clean_token}") |
|
raise HTTPException( |
|
status_code=401, |
|
detail="Invalid API key" |
|
) |
|
logger.info("API key validation passed") |
|
|
|
return True |
|
|
|
async def validate_cookie(background_tasks: BackgroundTasks): |
|
|
|
await check_and_update_cookie(background_tasks) |
|
|
|
|
|
max_wait = 30 |
|
start_time = time.time() |
|
while not global_data["cookie"] and time.time() - start_time < max_wait: |
|
await asyncio.sleep(1) |
|
logger.info("Waiting for cookie initialization...") |
|
|
|
|
|
if not global_data["cookie"]: |
|
logger.error("Cookie not available after waiting") |
|
raise HTTPException( |
|
status_code=503, |
|
detail="Service temporarily unavailable - Cookie not available" |
|
) |
|
|
|
logger.info("Cookie validation passed") |
|
return global_data["cookie"] |
|
|
|
async def check_image_status(session: requests.Session, job_id: str, headers: dict) -> Optional[str]: |
|
"""检查图片生成状态并获取生成的图片""" |
|
max_retries = 30 |
|
for attempt in range(max_retries): |
|
try: |
|
print(f"\nAttempt {attempt + 1}/{max_retries} for job {job_id}") |
|
response = session.get( |
|
f'https://chat.akash.network/api/image-status?ids={job_id}', |
|
headers=headers |
|
) |
|
print(f"Status response code: {response.status_code}") |
|
status_data = response.json() |
|
|
|
if status_data and isinstance(status_data, list) and len(status_data) > 0: |
|
job_info = status_data[0] |
|
status = job_info.get('status') |
|
print(f"Job status: {status}") |
|
|
|
|
|
if status == "completed": |
|
result = job_info.get("result") |
|
if result and not result.startswith("Failed"): |
|
print("Got valid result, attempting upload...") |
|
image_url = await upload_to_xinyew(result, job_id) |
|
if image_url: |
|
print(f"Successfully uploaded image: {image_url}") |
|
return image_url |
|
print("Image upload failed") |
|
return None |
|
print("Invalid result received") |
|
return None |
|
elif status == "failed": |
|
print(f"Job {job_id} failed") |
|
return None |
|
|
|
|
|
await asyncio.sleep(1) |
|
continue |
|
|
|
except Exception as e: |
|
print(f"Error checking status: {e}") |
|
return None |
|
|
|
print(f"Timeout waiting for job {job_id}") |
|
return None |
|
|
|
@app.get("/", response_class=HTMLResponse) |
|
async def health_check(): |
|
"""Health check endpoint""" |
|
|
|
cookie_status = "ok" if global_data["cookie"] is not None else "error" |
|
cookie_status_color = "#4CAF50" if cookie_status == "ok" else "#f44336" |
|
|
|
status = { |
|
"status": cookie_status, |
|
"version": "1.0.0", |
|
"cookie_status": { |
|
"available": global_data["cookie"] is not None, |
|
"last_update": time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(global_data["last_update"])) if global_data["last_update"] > 0 else None, |
|
"expires": time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(global_data["cookie_expires"])) if global_data["cookie_expires"] > 0 else None |
|
} |
|
} |
|
|
|
html = f""" |
|
<!DOCTYPE html> |
|
<html> |
|
<head> |
|
<title>Akash API Status</title> |
|
<style> |
|
body {{ |
|
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif; |
|
line-height: 1.6; |
|
margin: 0; |
|
padding: 20px; |
|
background-color: #f5f5f5; |
|
}} |
|
.container {{ |
|
max-width: 800px; |
|
margin: 0 auto; |
|
background-color: white; |
|
padding: 20px; |
|
border-radius: 8px; |
|
box-shadow: 0 2px 4px rgba(0,0,0,0.1); |
|
}} |
|
h1 {{ |
|
color: #333; |
|
margin-top: 0; |
|
}} |
|
.status {{ |
|
display: inline-block; |
|
padding: 4px 8px; |
|
border-radius: 4px; |
|
font-weight: bold; |
|
background-color: {cookie_status_color}; |
|
color: white; |
|
}} |
|
.info {{ |
|
margin-top: 20px; |
|
}} |
|
.info-item {{ |
|
margin-bottom: 10px; |
|
}} |
|
.label {{ |
|
font-weight: bold; |
|
color: #666; |
|
}} |
|
.value {{ |
|
color: #333; |
|
}} |
|
.cookie-status {{ |
|
margin-top: 20px; |
|
padding: 15px; |
|
background-color: #f8f9fa; |
|
border-radius: 4px; |
|
}} |
|
.cookie-status .available {{ |
|
color: {cookie_status_color}; |
|
}} |
|
.error-message {{ |
|
color: #f44336; |
|
margin-top: 10px; |
|
padding: 10px; |
|
background-color: #ffebee; |
|
border-radius: 4px; |
|
display: {"block" if cookie_status == "error" else "none"}; |
|
}} |
|
</style> |
|
</head> |
|
<body> |
|
<div class="container"> |
|
<h1>Akash API Status <span class="status">{status["status"]}</span></h1> |
|
|
|
<div class="info"> |
|
<div class="info-item"> |
|
<span class="label">Version:</span> |
|
<span class="value">{status["version"]}</span> |
|
</div> |
|
</div> |
|
|
|
<div class="cookie-status"> |
|
<h2>Cookie Status</h2> |
|
<div class="info-item"> |
|
<span class="label">Available:</span> |
|
<span class="value available">{str(status["cookie_status"]["available"])}</span> |
|
</div> |
|
<div class="info-item"> |
|
<span class="label">Last Update:</span> |
|
<span class="value">{status["cookie_status"]["last_update"] or "Never"}</span> |
|
</div> |
|
<div class="info-item"> |
|
<span class="label">Expires:</span> |
|
<span class="value">{status["cookie_status"]["expires"] or "Unknown"}</span> |
|
</div> |
|
|
|
<div class="error-message"> |
|
Cookie retrieval failed. The service may not be fully functional. |
|
</div> |
|
</div> |
|
</div> |
|
</body> |
|
</html> |
|
""" |
|
|
|
return html |
|
|
|
@app.post("/v1/chat/completions") |
|
async def chat_completions( |
|
request: Request, |
|
background_tasks: BackgroundTasks, |
|
api_key: bool = Depends(get_api_key), |
|
cookie: str = Depends(validate_cookie) |
|
): |
|
try: |
|
data = await request.json() |
|
|
|
chat_id = str(uuid.uuid4()).replace('-', '')[:16] |
|
|
|
akash_data = { |
|
"id": chat_id, |
|
"messages": data.get('messages', []), |
|
"model": data.get('model', "DeepSeek-R1"), |
|
"system": data.get('system_message', "You are a helpful assistant."), |
|
"temperature": data.get('temperature', 0.6), |
|
"topP": data.get('top_p', 0.95) |
|
} |
|
|
|
|
|
headers = { |
|
"Content-Type": "application/json", |
|
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36", |
|
"Accept": "*/*", |
|
"Accept-Language": "en-US,en;q=0.9", |
|
"Accept-Encoding": "gzip, deflate, br", |
|
"Origin": "https://chat.akash.network", |
|
"Referer": "https://chat.akash.network/", |
|
"Sec-Fetch-Dest": "empty", |
|
"Sec-Fetch-Mode": "cors", |
|
"Sec-Fetch-Site": "same-origin", |
|
"Connection": "keep-alive" |
|
} |
|
|
|
|
|
headers["Cookie"] = cookie |
|
|
|
with requests.Session() as session: |
|
response = session.post( |
|
'https://chat.akash.network/api/chat', |
|
json=akash_data, |
|
headers=headers, |
|
stream=True |
|
) |
|
|
|
|
|
if response.status_code == 401: |
|
logger.info("Cookie expired, refreshing...") |
|
new_cookie = await refresh_cookie() |
|
if new_cookie: |
|
headers["Cookie"] = new_cookie |
|
response = session.post( |
|
'https://chat.akash.network/api/chat', |
|
json=akash_data, |
|
headers=headers, |
|
stream=True |
|
) |
|
|
|
if response.status_code != 200: |
|
logger.error(f"Akash API error: {response.text}") |
|
raise HTTPException( |
|
status_code=response.status_code, |
|
detail=f"Akash API error: {response.text}" |
|
) |
|
|
|
def generate(): |
|
content_buffer = "" |
|
for line in response.iter_lines(): |
|
if not line: |
|
continue |
|
|
|
try: |
|
line_str = line.decode('utf-8') |
|
msg_type, msg_data = line_str.split(':', 1) |
|
|
|
if msg_type == '0': |
|
if msg_data.startswith('"') and msg_data.endswith('"'): |
|
msg_data = msg_data.replace('\\"', '"') |
|
msg_data = msg_data[1:-1] |
|
msg_data = msg_data.replace("\\n", "\n") |
|
|
|
|
|
if data.get('model') == 'AkashGen' and "<image_generation>" in msg_data: |
|
|
|
async def process_and_send(): |
|
messages = await process_image_generation(msg_data, session, headers, chat_id) |
|
if messages: |
|
return messages |
|
return None |
|
|
|
|
|
loop = asyncio.new_event_loop() |
|
asyncio.set_event_loop(loop) |
|
try: |
|
result_messages = loop.run_until_complete(process_and_send()) |
|
finally: |
|
loop.close() |
|
|
|
if result_messages: |
|
for message in result_messages: |
|
yield f"data: {json.dumps(message)}\n\n" |
|
continue |
|
|
|
content_buffer += msg_data |
|
|
|
chunk = { |
|
"id": f"chatcmpl-{chat_id}", |
|
"object": "chat.completion.chunk", |
|
"created": int(time.time()), |
|
"model": data.get('model'), |
|
"choices": [{ |
|
"delta": {"content": msg_data}, |
|
"index": 0, |
|
"finish_reason": None |
|
}] |
|
} |
|
yield f"data: {json.dumps(chunk)}\n\n" |
|
|
|
elif msg_type in ['e', 'd']: |
|
chunk = { |
|
"id": f"chatcmpl-{chat_id}", |
|
"object": "chat.completion.chunk", |
|
"created": int(time.time()), |
|
"model": data.get('model'), |
|
"choices": [{ |
|
"delta": {}, |
|
"index": 0, |
|
"finish_reason": "stop" |
|
}] |
|
} |
|
yield f"data: {json.dumps(chunk)}\n\n" |
|
yield "data: [DONE]\n\n" |
|
break |
|
|
|
except Exception as e: |
|
print(f"Error processing line: {e}") |
|
continue |
|
|
|
return StreamingResponse( |
|
generate(), |
|
media_type='text/event-stream', |
|
headers={ |
|
'Cache-Control': 'no-cache', |
|
'Connection': 'keep-alive', |
|
'Content-Type': 'text/event-stream' |
|
} |
|
) |
|
|
|
except Exception as e: |
|
print(f"Error in chat_completions: {e}") |
|
import traceback |
|
print(traceback.format_exc()) |
|
return {"error": str(e)} |
|
|
|
@app.get("/v1/models") |
|
async def list_models( |
|
background_tasks: BackgroundTasks, |
|
cookie: str = Depends(validate_cookie) |
|
): |
|
try: |
|
headers = { |
|
"accept": "application/json", |
|
"accept-language": "en-US,en;q=0.9", |
|
"sec-ch-ua": '"Not_A Brand";v="8", "Chromium";v="120", "Google Chrome";v="120"', |
|
"sec-ch-ua-mobile": "?0", |
|
"sec-ch-ua-platform": '"macOS"', |
|
"sec-fetch-dest": "document", |
|
"sec-fetch-mode": "cors", |
|
"sec-fetch-site": "same-origin", |
|
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36", |
|
"referer": "https://chat.akash.network/" |
|
} |
|
|
|
|
|
headers["Cookie"] = cookie |
|
|
|
print(f"Using cookie: {headers.get('Cookie', 'None')}") |
|
print("Sending request to get models...") |
|
|
|
response = requests.get( |
|
'https://chat.akash.network/api/models', |
|
headers=headers |
|
) |
|
|
|
print(f"Models response status: {response.status_code}") |
|
print(f"Models response headers: {response.headers}") |
|
|
|
if response.status_code == 401: |
|
print("Authentication failed. Please check your API key.") |
|
return {"error": "Authentication failed. Please check your API key."} |
|
|
|
akash_response = response.json() |
|
|
|
|
|
print(f"Akash API response: {akash_response}") |
|
|
|
|
|
models_list = [] |
|
if isinstance(akash_response, list): |
|
|
|
models_list = akash_response |
|
elif isinstance(akash_response, dict): |
|
|
|
models_list = akash_response.get("models", []) |
|
else: |
|
print(f"Unexpected response format: {type(akash_response)}") |
|
models_list = [] |
|
|
|
|
|
openai_models = { |
|
"object": "list", |
|
"data": [ |
|
{ |
|
"id": model["id"] if isinstance(model, dict) else model, |
|
"object": "model", |
|
"created": int(time.time()), |
|
"owned_by": "akash", |
|
"permission": [{ |
|
"id": f"modelperm-{model['id'] if isinstance(model, dict) else model}", |
|
"object": "model_permission", |
|
"created": int(time.time()), |
|
"allow_create_engine": False, |
|
"allow_sampling": True, |
|
"allow_logprobs": True, |
|
"allow_search_indices": False, |
|
"allow_view": True, |
|
"allow_fine_tuning": False, |
|
"organization": "*", |
|
"group": None, |
|
"is_blocking": False |
|
}] |
|
} for model in models_list |
|
] |
|
} |
|
|
|
return openai_models |
|
|
|
except Exception as e: |
|
print(f"Error in list_models: {e}") |
|
import traceback |
|
print(traceback.format_exc()) |
|
return {"error": str(e)} |
|
|
|
async def process_image_generation(msg_data: str, session: requests.Session, headers: dict, chat_id: str) -> Optional[list]: |
|
"""处理图片生成的逻辑,返回多个消息块""" |
|
match = re.search(r"jobId='([^']+)' prompt='([^']+)' negative='([^']*)'", msg_data) |
|
if match: |
|
job_id, prompt, negative = match.groups() |
|
print(f"Starting image generation process for job_id: {job_id}") |
|
|
|
|
|
start_time = time.time() |
|
|
|
|
|
think_msg = "<think>\n" |
|
think_msg += "🎨 Generating image...\n\n" |
|
think_msg += f"Prompt: {prompt}\n" |
|
|
|
|
|
result = await check_image_status(session, job_id, headers) |
|
|
|
|
|
elapsed_time = time.time() - start_time |
|
|
|
|
|
think_msg += f"\n🤔 Thinking for {elapsed_time:.1f}s...\n" |
|
think_msg += "</think>" |
|
|
|
|
|
messages = [] |
|
|
|
|
|
messages.append({ |
|
"id": f"chatcmpl-{chat_id}-think", |
|
"object": "chat.completion.chunk", |
|
"created": int(time.time()), |
|
"model": "AkashGen", |
|
"choices": [{ |
|
"delta": {"content": think_msg}, |
|
"index": 0, |
|
"finish_reason": None |
|
}] |
|
}) |
|
|
|
|
|
if result: |
|
image_msg = f"\n\n" |
|
messages.append({ |
|
"id": f"chatcmpl-{chat_id}-image", |
|
"object": "chat.completion.chunk", |
|
"created": int(time.time()), |
|
"model": "AkashGen", |
|
"choices": [{ |
|
"delta": {"content": image_msg}, |
|
"index": 0, |
|
"finish_reason": None |
|
}] |
|
}) |
|
else: |
|
fail_msg = "\n\n*Image generation or upload failed.*" |
|
messages.append({ |
|
"id": f"chatcmpl-{chat_id}-fail", |
|
"object": "chat.completion.chunk", |
|
"created": int(time.time()), |
|
"model": "AkashGen", |
|
"choices": [{ |
|
"delta": {"content": fail_msg}, |
|
"index": 0, |
|
"finish_reason": None |
|
}] |
|
}) |
|
|
|
return messages |
|
return None |
|
|
|
async def upload_to_xinyew(image_base64: str, job_id: str) -> Optional[str]: |
|
"""上传图片到新野图床并返回URL""" |
|
try: |
|
print(f"\n=== Starting image upload for job {job_id} ===") |
|
print(f"Base64 data length: {len(image_base64)}") |
|
|
|
|
|
try: |
|
image_data = base64.b64decode(image_base64.split(',')[1] if ',' in image_base64 else image_base64) |
|
print(f"Decoded image data length: {len(image_data)} bytes") |
|
except Exception as e: |
|
print(f"Error decoding base64: {e}") |
|
print(f"First 100 chars of base64: {image_base64[:100]}...") |
|
return None |
|
|
|
|
|
with tempfile.NamedTemporaryFile(suffix='.jpeg', delete=False) as temp_file: |
|
temp_file.write(image_data) |
|
temp_file_path = temp_file.name |
|
|
|
try: |
|
filename = f"{job_id}.jpeg" |
|
print(f"Using filename: {filename}") |
|
|
|
|
|
files = { |
|
'file': (filename, open(temp_file_path, 'rb'), 'image/jpeg') |
|
} |
|
|
|
print("Sending request to xinyew.cn...") |
|
response = requests.post( |
|
'https://api.xinyew.cn/api/jdtc', |
|
files=files, |
|
timeout=30 |
|
) |
|
|
|
print(f"Upload response status: {response.status_code}") |
|
if response.status_code == 200: |
|
result = response.json() |
|
print(f"Upload response: {result}") |
|
|
|
if result.get('errno') == 0: |
|
url = result.get('data', {}).get('url') |
|
if url: |
|
print(f"Successfully got image URL: {url}") |
|
return url |
|
print("No URL in response data") |
|
else: |
|
print(f"Upload failed: {result.get('message')}") |
|
else: |
|
print(f"Upload failed with status {response.status_code}") |
|
print(f"Response content: {response.text}") |
|
return None |
|
|
|
finally: |
|
|
|
try: |
|
os.unlink(temp_file_path) |
|
except Exception as e: |
|
print(f"Error removing temp file: {e}") |
|
|
|
except Exception as e: |
|
print(f"Error in upload_to_xinyew: {e}") |
|
import traceback |
|
print(traceback.format_exc()) |
|
return None |
|
|
|
if __name__ == '__main__': |
|
import uvicorn |
|
uvicorn.run(app, host='0.0.0.0', port=9000) |