Niansuh commited on
Commit
4295e27
·
verified ·
1 Parent(s): 8145173

Update api/utils.py

Browse files
Files changed (1) hide show
  1. api/utils.py +67 -23
api/utils.py CHANGED
@@ -1,7 +1,17 @@
 
 
 
 
 
 
 
 
 
 
1
  class Editee:
2
  label = "Editee"
3
  url = "https://editee.com"
4
- api_endpoint = "https://editee.com/submit/chatgptfree"
5
  working = True
6
  supports_stream = True
7
  supports_system_message = True
@@ -36,24 +46,7 @@ class Editee:
36
  ):
37
  model = cls.get_model(model)
38
 
39
- headers = {
40
- "Accept": "application/json, text/plain, */*",
41
- "Accept-Language": "en-US,en;q=0.9",
42
- "Cache-Control": "no-cache",
43
- "Content-Type": "application/json",
44
- "Origin": cls.url,
45
- "Pragma": "no-cache",
46
- "Priority": "u=1, i",
47
- "Referer": f"{cls.url}/chat-gpt",
48
- "Sec-CH-UA": '"Chromium";v="129", "Not=A?Brand";v="8"',
49
- "Sec-CH-UA-Mobile": '?0',
50
- "Sec-CH-UA-Platform": '"Linux"',
51
- "Sec-Fetch-Dest": 'empty',
52
- "Sec-Fetch-Mode": 'cors',
53
- "Sec-Fetch-Site": 'same-origin',
54
- "User-Agent": 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36',
55
- "X-Requested-With": 'XMLHttpRequest',
56
- }
57
 
58
  async with ClientSession(headers=headers) as session:
59
  prompt = format_prompt(messages)
@@ -63,7 +56,58 @@ class Editee:
63
  "template_id": "",
64
  "selected_model": model
65
  }
66
- async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response:
67
- response.raise_for_status()
68
- response_data = await response.json()
69
- yield response_data['text']
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import uuid
2
+ from typing import List, Dict
3
+ from aiohttp import ClientSession
4
+ from api.models import ChatRequest
5
+ from api.helper import format_prompt
6
+ from api.logger import logger
7
+ from api.config import MODEL_MAPPING, EDITEA_API_ENDPOINT, EDITEA_HEADERS
8
+ from fastapi import HTTPException
9
+
10
+ # Editee class is now integrated within utils.py
11
  class Editee:
12
  label = "Editee"
13
  url = "https://editee.com"
14
+ api_endpoint = EDITEA_API_ENDPOINT
15
  working = True
16
  supports_stream = True
17
  supports_system_message = True
 
46
  ):
47
  model = cls.get_model(model)
48
 
49
+ headers = EDITEA_HEADERS
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
50
 
51
  async with ClientSession(headers=headers) as session:
52
  prompt = format_prompt(messages)
 
56
  "template_id": "",
57
  "selected_model": model
58
  }
59
+ try:
60
+ async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response:
61
+ response.raise_for_status()
62
+ if response.content_type == 'text/event-stream':
63
+ async for line in response.content:
64
+ yield line.decode('utf-8')
65
+ else:
66
+ response_data = await response.json()
67
+ yield response_data['text']
68
+ except Exception as e:
69
+ logger.error(f"Error in Editee API call: {e}")
70
+ raise HTTPException(status_code=500, detail="Error in Editee API call")
71
+
72
+ # Function to process the response
73
+ async def process_response(request: ChatRequest, stream: bool = False):
74
+ try:
75
+ model = MODEL_MAPPING.get(request.model, request.model)
76
+ messages = [
77
+ {"role": message.role, "content": message.content}
78
+ for message in request.messages
79
+ ]
80
+
81
+ generator = Editee.create_async_generator(
82
+ model=model,
83
+ messages=messages,
84
+ proxy=None # Add proxy if needed
85
+ )
86
+
87
+ if stream:
88
+ async def event_generator():
89
+ async for chunk in generator:
90
+ yield f"data: {chunk}\n\n"
91
+ return event_generator()
92
+ else:
93
+ full_response = ""
94
+ async for chunk in generator:
95
+ full_response += chunk
96
+
97
+ return {
98
+ "id": f"chatcmpl-{uuid.uuid4()}",
99
+ "object": "chat.completion",
100
+ "created": int(uuid.uuid1().time),
101
+ "model": model,
102
+ "choices": [
103
+ {
104
+ "index": 0,
105
+ "message": {"role": "assistant", "content": full_response},
106
+ "finish_reason": "stop",
107
+ }
108
+ ],
109
+ "usage": None,
110
+ }
111
+ except Exception as e:
112
+ logger.error(f"Error processing response: {e}")
113
+ raise HTTPException(status_code=500, detail=str(e))