Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -25,6 +25,13 @@ client = Groq(
|
|
25 |
api_key=os.environ['GROQ_API_KEY'],
|
26 |
)
|
27 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
28 |
# Initialize chat history and selected model
|
29 |
if "messages" not in st.session_state:
|
30 |
st.session_state.messages = []
|
@@ -55,6 +62,13 @@ with col1:
|
|
55 |
format_func=lambda x: models[x]["name"],
|
56 |
index=0, # Default to the first model in the list
|
57 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
58 |
|
59 |
# Detect model change and clear chat history if model has changed
|
60 |
if st.session_state.selected_model != model_option:
|
@@ -81,36 +95,18 @@ for message in st.session_state.messages:
|
|
81 |
with st.chat_message(message["role"], avatar=avatar):
|
82 |
st.markdown(message["content"])
|
83 |
|
84 |
-
# Function to save prompt to a file
|
85 |
-
def save_prompt(prompt):
|
86 |
-
with open("saved_prompts.txt", "a") as file:
|
87 |
-
file.write(prompt + "\n")
|
88 |
-
|
89 |
-
# Function to delete prompt from file
|
90 |
-
def delete_prompt(prompt_index):
|
91 |
-
with open("saved_prompts.txt", "r") as file:
|
92 |
-
prompts = file.readlines()
|
93 |
-
with open("saved_prompts.txt", "w") as file:
|
94 |
-
for i, p in enumerate(prompts):
|
95 |
-
if i != prompt_index:
|
96 |
-
file.write(p)
|
97 |
-
|
98 |
-
|
99 |
def generate_chat_responses(chat_completion) -> Generator[str, None, None]:
|
100 |
"""Yield chat response content from the Groq API response."""
|
101 |
for chunk in chat_completion:
|
102 |
if chunk.choices[0].delta.content:
|
103 |
yield chunk.choices[0].delta.content
|
104 |
|
105 |
-
if prompt := st.chat_input("Enter your prompt here..."):
|
106 |
st.session_state.messages.append({"role": "user", "content": prompt})
|
107 |
|
108 |
with st.chat_message("user", avatar="❓"):
|
109 |
st.markdown(prompt)
|
110 |
|
111 |
-
if st.button("Save"):
|
112 |
-
save_prompt(prompt)
|
113 |
-
|
114 |
# Fetch response from Groq API
|
115 |
try:
|
116 |
chat_completion = client.chat.completions.create(
|
@@ -140,15 +136,4 @@ if prompt := st.chat_input("Enter your prompt here..."):
|
|
140 |
combined_response = "\n".join(str(item) for item in full_response)
|
141 |
st.session_state.messages.append(
|
142 |
{"role": "assistant", "content": combined_response}
|
143 |
-
)
|
144 |
-
|
145 |
-
# Collapsible section for prompt management
|
146 |
-
with st.expander("Prompt Management", expanded=False):
|
147 |
-
if os.path.exists("saved_prompts.txt"):
|
148 |
-
with open("saved_prompts.txt", "r") as file:
|
149 |
-
saved_prompts = file.readlines()
|
150 |
-
for i, prompt in enumerate(saved_prompts):
|
151 |
-
delete_button = st.button("Delete", key=f"delete_{i}")
|
152 |
-
st.write(f"{i + 1}. {prompt.strip()}")
|
153 |
-
if delete_button:
|
154 |
-
delete_prompt(i)
|
|
|
25 |
api_key=os.environ['GROQ_API_KEY'],
|
26 |
)
|
27 |
|
28 |
+
# Read saved prompts from file
|
29 |
+
with open("saved_prompts.txt", "r") as f:
|
30 |
+
saved_prompts = f.read().split("<|>")
|
31 |
+
|
32 |
+
prompt_names = [p.split(" ", 1)[0] for p in saved_prompts]
|
33 |
+
prompt_map = {name: prompt for name, prompt in zip(prompt_names, saved_prompts)}
|
34 |
+
|
35 |
# Initialize chat history and selected model
|
36 |
if "messages" not in st.session_state:
|
37 |
st.session_state.messages = []
|
|
|
62 |
format_func=lambda x: models[x]["name"],
|
63 |
index=0, # Default to the first model in the list
|
64 |
)
|
65 |
+
# Add prompt dropdown
|
66 |
+
prompt_option = st.selectbox("Choose a prompt:", options=prompt_names)
|
67 |
+
|
68 |
+
if not prompt_option:
|
69 |
+
prompt = ""
|
70 |
+
else:
|
71 |
+
prompt = prompt_map[prompt_option]
|
72 |
|
73 |
# Detect model change and clear chat history if model has changed
|
74 |
if st.session_state.selected_model != model_option:
|
|
|
95 |
with st.chat_message(message["role"], avatar=avatar):
|
96 |
st.markdown(message["content"])
|
97 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
98 |
def generate_chat_responses(chat_completion) -> Generator[str, None, None]:
|
99 |
"""Yield chat response content from the Groq API response."""
|
100 |
for chunk in chat_completion:
|
101 |
if chunk.choices[0].delta.content:
|
102 |
yield chunk.choices[0].delta.content
|
103 |
|
104 |
+
if prompt := st.chat_input("Enter your prompt here...", value=prompt):
|
105 |
st.session_state.messages.append({"role": "user", "content": prompt})
|
106 |
|
107 |
with st.chat_message("user", avatar="❓"):
|
108 |
st.markdown(prompt)
|
109 |
|
|
|
|
|
|
|
110 |
# Fetch response from Groq API
|
111 |
try:
|
112 |
chat_completion = client.chat.completions.create(
|
|
|
136 |
combined_response = "\n".join(str(item) for item in full_response)
|
137 |
st.session_state.messages.append(
|
138 |
{"role": "assistant", "content": combined_response}
|
139 |
+
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|