Spaces:
Sleeping
Sleeping
Commit
·
1442961
1
Parent(s):
1dfec50
Update app.py to commit 58d0dedd in GH repo
Browse files
app.py
CHANGED
@@ -3,6 +3,11 @@ import streamlit_authenticator as stauth
|
|
3 |
import sqlite3
|
4 |
import yaml
|
5 |
from yaml.loader import SafeLoader
|
|
|
|
|
|
|
|
|
|
|
6 |
|
7 |
# Connect to SQLite database
|
8 |
conn = sqlite3.connect('user_data.db')
|
@@ -55,21 +60,26 @@ if authentication_status:
|
|
55 |
st.title("Main Screen")
|
56 |
|
57 |
# Input boxes
|
|
|
58 |
original_post = st.text_input("Paste Original Post Here")
|
59 |
background_info = st.text_input("Background information on original post (references, relevant information, best practices for responding)")
|
60 |
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
|
|
|
|
|
|
|
|
|
|
71 |
# Output from function
|
72 |
-
draft_response = process_inputs(original_post, background_info, user_data[1], user_data[2], user_data[3])
|
73 |
st.text_area(label="Draft Response. Please edit here or prompt suggestions in the box below.", value=draft_response, height=350)
|
74 |
|
75 |
regenerate_prompt = st.text_input("Additional prompting for regenerating draft response")
|
@@ -99,4 +109,4 @@ elif authentication_status is None:
|
|
99 |
st.error(e)
|
100 |
|
101 |
with open('config.yaml', 'w') as file:
|
102 |
-
yaml.dump(config, file, default_flow_style=False)
|
|
|
3 |
import sqlite3
|
4 |
import yaml
|
5 |
from yaml.loader import SafeLoader
|
6 |
+
import os
|
7 |
+
|
8 |
+
from free_speech_app.DataLoadDb import *
|
9 |
+
from free_speech_app.FreeSpeechPromptsResponses import *
|
10 |
+
from langchain.chat_models import ChatOpenAI
|
11 |
|
12 |
# Connect to SQLite database
|
13 |
conn = sqlite3.connect('user_data.db')
|
|
|
60 |
st.title("Main Screen")
|
61 |
|
62 |
# Input boxes
|
63 |
+
api_input = st.text_input("OpenAI API Token")
|
64 |
original_post = st.text_input("Paste Original Post Here")
|
65 |
background_info = st.text_input("Background information on original post (references, relevant information, best practices for responding)")
|
66 |
|
67 |
+
chat_mdl = None
|
68 |
+
draft_response = ''
|
69 |
+
|
70 |
+
# Check if the "Submit" button is clicked
|
71 |
+
if st.button("Submit"):
|
72 |
+
if api_input:
|
73 |
+
os.environ["OPENAI_API_KEY"] = api_input
|
74 |
+
chat_mdl = ChatOpenAI(model_name = 'gpt-3.5-turbo-16k', temperature=0.1)
|
75 |
+
|
76 |
+
if chat_mdl is not None:
|
77 |
+
if user_data is None:
|
78 |
+
draft_response = generate_custom_response(original_post, chat_mdl, background_info, "", "", "").content
|
79 |
+
else:
|
80 |
+
draft_response = generate_custom_response(original_post, chat_mdl, background_info, user_data[1], user_data[2], user_data[3]).content
|
81 |
+
|
82 |
# Output from function
|
|
|
83 |
st.text_area(label="Draft Response. Please edit here or prompt suggestions in the box below.", value=draft_response, height=350)
|
84 |
|
85 |
regenerate_prompt = st.text_input("Additional prompting for regenerating draft response")
|
|
|
109 |
st.error(e)
|
110 |
|
111 |
with open('config.yaml', 'w') as file:
|
112 |
+
yaml.dump(config, file, default_flow_style=False)
|