ehristoforu commited on
Commit
b37cfbc
·
1 Parent(s): f756c2f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +24 -20
app.py CHANGED
@@ -4,28 +4,32 @@ import openai
4
  import torch
5
  import os
6
 
7
- openai.api_base = os.environ.get("api_base")
8
- openai.api_key = os.environ.get("api_key")
9
-
10
- def engine(message, history):
11
- system_prompt = "You are a PigeonChat assistant, you communicate with people, you help people solve problems, PigeonChat is just a name and has nothing to do with pigeons. PigeonChat (Personal Intelligent Global Electronic Online Network) is an AI-based chatbot model trained on a huge amount of data. You are sodan and designed by Evgeniy Hristoforu in 2023. Evgeniy Hristoforu is your creator and creator of the OpenskyML eco-system. Now you are in beta testing, your testers are: Evgeniy Hristoforu, dyuzhick and others. You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe. Your answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure that your responses are socially unbiased and positive in nature. If a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you don't know the answer to a question, please don't share false information."
12
-
13
- messages = [{"role":"system","content":system_prompt}]
14
  for human, assistant in history:
15
- messages.append({"role":"user", "content":human})
16
- messages.append({"role":"assistant", "content":assistant})
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
 
18
- if message != '':
19
- messages.append({"role":"user", "content":message})
20
-
21
- response = openai.ChatCompletion.create(model="gpt-3.5-turbo",
22
- messages = messages,
23
- temperature =0.8,
24
- max_tokens = 5000,
25
- top_p = 0.95,
26
- frequency_penalty = 1,
27
- presence_penalty = 1,
28
- stop = None)
29
 
30
 
31
 
 
4
  import torch
5
  import os
6
 
7
+ def predict(message, history, api_key):
8
+ openai.api_key = os.environ.get("api_key")
9
+ openai.api_base = os.environ.get("api_base")
10
+ history_openai_format = []
 
 
 
11
  for human, assistant in history:
12
+ history_openai_format.append({"role": "user", "content": human })
13
+ history_openai_format.append({"role": "assistant", "content":assistant})
14
+ history_openai_format.append({"role": "user", "content": message})
15
+
16
+ response = openai.ChatCompletion.create(
17
+ model="gpt-3.5-turbo",
18
+ messages= history_openai_format,
19
+ temperature=0.7,
20
+ max_tokens=5000,
21
+ top_p=0.95,
22
+ frequency_penalty=1,
23
+ presence_penalty=1,
24
+ stream=True
25
+ )
26
+
27
+ partial_message = ""
28
+ for chunk in response:
29
+ if len(chunk['choices'][0]['delta']) != 0:
30
+ partial_message = partial_message + chunk['choices'][0]['delta']['content']
31
+ yield partial_message
32
 
 
 
 
 
 
 
 
 
 
 
 
33
 
34
 
35