Spaces:
Running
Running
File size: 5,468 Bytes
770f5f7 20d9410 caebb7e b267e8f 770f5f7 45dd069 770f5f7 3de8d87 14fc436 1125ceb 770f5f7 cc4a887 a42a483 cc4a887 79615b0 cc4a887 b267e8f e09464a a42a483 79615b0 a42a483 e09464a a42a483 b267e8f 79615b0 a42a483 cc4a887 770f5f7 2a9567a 3b847a5 2a9567a a157f63 79615b0 a157f63 e2c0d0d 167569a a157f63 79615b0 fc42d3b 14fc436 fc42d3b cc4a887 79615b0 cc4a887 79615b0 3de8d87 3b847a5 a157f63 3b847a5 fc42d3b 3b847a5 b267e8f 79615b0 cf9c5e4 b267e8f 79615b0 93675e1 79615b0 770f5f7 b267e8f 2a9567a 3b847a5 a157f63 e2c0d0d a157f63 e2c0d0d a157f63 8819714 a157f63 8819714 3de8d87 e2c0d0d a157f63 f6fe9f4 3b847a5 a157f63 79615b0 3b847a5 e2c0d0d 3f653ef |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 |
import os
import gradio as gr
# import openai
from openai import OpenAI
from dotenv import load_dotenv
from pathlib import Path
from time import sleep
load_dotenv(override=True)
key = os.getenv('OPENAI_API_KEY')
users = os.getenv('LOGNAME')
unames = users.split(',')
pwds = os.getenv('PASSWORD')
pwdList = pwds.split(',')
site = os.getenv('SITE')
if site == 'local':
dp = Path('./data')
dp.mkdir(exist_ok=True)
dataDir = './data/'
else:
dp = Path('/data')
dp.mkdir(exist_ok=True)
dataDir = '/data/'
client = OpenAI(api_key = key)
def genUsageStats(do_reset=False):
result = []
ttotal4o = 0
ttotal4mini = 0
for user in unames:
tokens4o = 0
tokens4mini = 0
fp = dataDir + user + '_log.txt'
if os.path.exists(fp):
accessOk = False
for i in range(3):
try:
with open(fp) as f:
dataList = f.readlines()
if do_reset:
os.remove(fp)
else:
for line in dataList:
(u, t) = line.split(':')
(t, m) = t.split('-')
tcount = int(t)
if 'mini' in m:
tokens4mini += tcount
ttotal4mini += tcount
else:
tokens4o += tcount
ttotal4o += tcount
accessOk = True
break
except:
sleep(3)
if not accessOk:
return f'File access failed reading stats for user: {user}'
result.append([user, str(tokens4mini), str(tokens4o)])
gtotal = ttotal4mini + ttotal4o
result.append(['totals', str(ttotal4mini), str(ttotal4o), str(gtotal)])
return result
def clear():
return [None, [], None]
def updatePassword(txt):
return [txt.lower().strip(), "*********"]
# def setModel(val):
# return val
def chat(prompt, user_window, pwd_window, past, response, gptModel):
user_window = user_window.lower().strip()
isBoss = False
if user_window == unames[0] and pwd_window == pwdList[0]:
isBoss = True
if prompt == 'stats':
response = genUsageStats()
return [past, response, None, gptModel]
if prompt == 'reset':
response = genUsageStats(True)
return [past, response, None, gptModel]
if prompt.startswith('gpt4'):
gptModel = 'gpt-4o'
prompt = prompt[5:]
if user_window in unames and pwd_window in pwdList:
past.append({"role":"user", "content":prompt})
completion = client.chat.completions.create(model=gptModel,
messages=past)
reply = completion.choices[0].message.content
tokens = completion.usage.total_tokens
response += "\n\nYOU: " + prompt + "\nGPT: " + reply
if isBoss:
response += f"\n{gptModel}: {tokens} tokens"
if tokens > 40000:
response += "\n\nTHIS DIALOG IS GETTING TOO LONG. PLEASE RESTART CONVERSATION SOON."
past.append({"role":"assistant", "content": reply})
accessOk = False
for i in range(3):
try:
dataFile = new_func(user_window)
with open(dataFile, 'a') as f:
m = '4o'
if 'mini' in gptModel:
m = '4omini'
f.write(f'{user_window}: {tokens}-{m}\n')
accessOk = True
break
except Exception as e:
sleep(3)
if not accessOk:
response += f"\nDATA LOG FAILED, path = {dataFile}"
return [past, response , None, gptModel]
else:
return [[], "User name and/or password are incorrect", prompt, gptModel]
def new_func(user_window):
dataFile = dataDir + user_window + '_log.txt'
return dataFile
with gr.Blocks() as demo:
history = gr.State([])
password = gr.State("")
model = gr.State("gpt-4o-mini")
gr.Markdown('# GPT Chat')
gr.Markdown('Enter user name & password then enter prompt and click submit button. Restart conversation if topic changes')
# heading = gr.Label(value="GPT Chat", scale=2, color="Crimson" )
with gr.Row():
user_window = gr.Textbox(label = "User Name")
pwd_window = gr.Textbox(label = "Password")
pwd_window.blur(updatePassword, pwd_window, [password, pwd_window])
with gr.Row():
clear_button = gr.Button(value="Restart Conversation")
# gpt_chooser=gr.Radio(choices=[("GPT-3.5","gpt-3.5-turbo"),("GPT-4o","gpt-4o-mini")],
# value="gpt-3.5-turbo", label="GPT Model", interactive=True)
submit_window = gr.Button(value="Submit Prompt/Question")
prompt_window = gr.Textbox(label = "Prompt or Question")
output_window = gr.Textbox(label = "Dialog")
submit_window.click(chat, inputs=[prompt_window, user_window, password, history, output_window, model],
outputs=[history, output_window, prompt_window, model])
clear_button.click(clear, inputs=[], outputs=[prompt_window, history, output_window])
# gpt_chooser.input(fn=setModel, inputs=gpt_chooser, outputs=model)
demo.launch()
|