Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -167,36 +167,34 @@ def load_score(key):
|
|
167 |
return score_data["score"]
|
168 |
return 0
|
169 |
|
170 |
-
|
171 |
-
|
172 |
def search_glossary(query):
|
|
|
|
|
|
|
|
|
173 |
|
174 |
-
st.write('## ' + query)
|
175 |
-
|
176 |
all=""
|
|
|
|
|
|
|
177 |
st.write('## π Running with GPT.') # -------------------------------------------------------------------------------------------------
|
178 |
-
response = chat_with_model(
|
179 |
-
|
180 |
-
|
181 |
-
filename = generate_filename(query + ' --- ' + response, "md")
|
182 |
create_file(filename, query, response, should_save)
|
183 |
|
184 |
-
|
185 |
-
#
|
186 |
-
|
187 |
-
|
188 |
-
filename_txt = generate_filename(query + ' --- ' + response2, "
|
189 |
create_file(filename_txt, query, response2, should_save)
|
190 |
-
|
191 |
all = '# Query: ' + query + '# Response: ' + response + '# Response2: ' + response2
|
192 |
-
|
193 |
filename_txt2 = generate_filename(query + ' --- ' + all, "md")
|
194 |
create_file(filename_txt2, query, all, should_save)
|
195 |
-
|
196 |
SpeechSynthesis(all)
|
197 |
return all
|
198 |
|
199 |
-
|
200 |
# Function to display the glossary in a structured format
|
201 |
def display_glossary(glossary, area):
|
202 |
if area in glossary:
|
|
|
167 |
return score_data["score"]
|
168 |
return 0
|
169 |
|
|
|
|
|
170 |
def search_glossary(query):
|
171 |
+
for category, terms in roleplaying_glossary.items():
|
172 |
+
if query.lower() in (term.lower() for term in terms):
|
173 |
+
st.markdown(f"#### {category}")
|
174 |
+
st.write(f"- {query}")
|
175 |
|
|
|
|
|
176 |
all=""
|
177 |
+
|
178 |
+
query2 = PromptPrefix + query # Add prompt preface for method step task behavior
|
179 |
+
# st.write('## ' + query2)
|
180 |
st.write('## π Running with GPT.') # -------------------------------------------------------------------------------------------------
|
181 |
+
response = chat_with_model(query2)
|
182 |
+
filename = generate_filename(query2 + ' --- ' + response, "md")
|
|
|
|
|
183 |
create_file(filename, query, response, should_save)
|
184 |
|
185 |
+
query3 = PromptPrefix2 + query + ' creating streamlit functions that implement outline of method steps below: ' + response # Add prompt preface for coding task behavior
|
186 |
+
# st.write('## ' + query3)
|
187 |
+
st.write('## π Coding with GPT.') # -------------------------------------------------------------------------------------------------
|
188 |
+
response2 = chat_with_model(query3)
|
189 |
+
filename_txt = generate_filename(query + ' --- ' + response2, "py")
|
190 |
create_file(filename_txt, query, response2, should_save)
|
191 |
+
|
192 |
all = '# Query: ' + query + '# Response: ' + response + '# Response2: ' + response2
|
|
|
193 |
filename_txt2 = generate_filename(query + ' --- ' + all, "md")
|
194 |
create_file(filename_txt2, query, all, should_save)
|
|
|
195 |
SpeechSynthesis(all)
|
196 |
return all
|
197 |
|
|
|
198 |
# Function to display the glossary in a structured format
|
199 |
def display_glossary(glossary, area):
|
200 |
if area in glossary:
|