SharryOG commited on
Commit
2627be2
·
verified ·
1 Parent(s): d19a99e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +49 -1
app.py CHANGED
@@ -1,7 +1,55 @@
1
  from fastapi import FastAPI, Request, Response
2
  from pydantic import BaseModel
3
- from webscout.LLM import LLM
4
  from typing import Union, Dict, List, Any
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  app = FastAPI()
6
 
7
  class Model(BaseModel):
 
1
  from fastapi import FastAPI, Request, Response
2
  from pydantic import BaseModel
 
3
  from typing import Union, Dict, List, Any
4
+ import requests
5
+ import base64
6
+ from typing import List, Dict, Union
7
+ import json
8
+ import requests
9
+ import base64
10
+ from typing import List, Dict, Union
11
+
12
+ class LLM:
13
+ def __init__(self, model: str, system_message: str = "You are a Helpful AI."):
14
+ self.model = model
15
+ self.conversation_history = [{"role": "system", "content": system_message}]
16
+
17
+ def chat(self, messages: List[Dict[str, str]]) -> Union[str, None]:
18
+ url = "https://api.deepinfra.com/v1/openai/chat/completions"
19
+ headers = {
20
+ 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36',
21
+ 'Accept-Language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
22
+ 'Cache-Control': 'no-cache',
23
+ 'Connection': 'keep-alive',
24
+ 'Content-Type': 'application/json',
25
+ 'Origin': 'https://deepinfra.com',
26
+ 'Pragma': 'no-cache',
27
+ 'Referer': 'https://deepinfra.com/',
28
+ 'Sec-Fetch-Dest': 'empty',
29
+ 'Sec-Fetch-Mode': 'cors',
30
+ 'Sec-Fetch-Site': 'same-site',
31
+ 'X-Deepinfra-Source': 'web-embed',
32
+ 'accept': 'text/event-stream',
33
+ 'sec-ch-ua': '"Google Chrome";v="119", "Chromium";v="119", "Not?A_Brand";v="24"',
34
+ 'sec-ch-ua-mobile': '?0',
35
+ 'sec-ch-ua-platform': '"macOS"'
36
+ }
37
+ data = json.dumps(
38
+ {
39
+ 'model': self.model,
40
+ 'messages': messages,
41
+ 'temperature': 0.7,
42
+ 'max_tokens': 8028,
43
+ 'stop': [],
44
+ 'stream': False #dont change it
45
+ }, separators=(',', ':')
46
+ )
47
+ try:
48
+ result = requests.post(url=url, data=data, headers=headers)
49
+ return result.json()['choices'][0]['message']['content']
50
+ except:
51
+ return None
52
+
53
  app = FastAPI()
54
 
55
  class Model(BaseModel):