Spaces:
Runtime error
Runtime error
feat: handling exception in various cases.
Browse files- app.py +55 -38
- modules/gpt_modules.py +1 -0
app.py
CHANGED
@@ -21,6 +21,11 @@ from modules.setting_modules import blockPrint
|
|
21 |
#########################################################
|
22 |
blockPrint()
|
23 |
|
|
|
|
|
|
|
|
|
|
|
24 |
#########################################################
|
25 |
# GET DB
|
26 |
#########################################################
|
@@ -32,11 +37,6 @@ dynamodb = get_db()
|
|
32 |
tm = time.localtime()
|
33 |
time_stamp = time.strftime('%Y-%m-%d %I:%M:%S %p', tm)
|
34 |
|
35 |
-
#########################################################
|
36 |
-
# Page Configurations
|
37 |
-
#########################################################
|
38 |
-
st.set_page_config(page_title="Debate With GPT : DEBO")
|
39 |
-
|
40 |
#########################################################
|
41 |
# Initialize session state variables
|
42 |
#########################################################
|
@@ -443,9 +443,12 @@ def page4():
|
|
443 |
result = gpt_call(user_input)
|
444 |
st.session_state.ask_gpt_prev_response = result
|
445 |
except:
|
446 |
-
|
447 |
-
time.sleep(1
|
448 |
-
st.
|
|
|
|
|
|
|
449 |
|
450 |
# save user_prompt and bot_response to database
|
451 |
put_item(
|
@@ -499,9 +502,9 @@ def execute_stt(audio, error_message):
|
|
499 |
try:
|
500 |
user_input = whisper_transcribe(wav_file)
|
501 |
except:
|
502 |
-
error_message.warning('Whisper Error : The engine is currently overloaded. Please click Rerun button
|
503 |
time.sleep(1)
|
504 |
-
rerun = st.button(label="Rerun")
|
505 |
if rerun:
|
506 |
st.experimental_rerun()
|
507 |
st.stop()
|
@@ -536,9 +539,9 @@ def page5():
|
|
536 |
result = gpt_call(user_input)
|
537 |
st.session_state.ask_gpt_prev_response = result
|
538 |
except:
|
539 |
-
st.warning('Chat-GPT Error : The engine is currently overloaded. Please click Rerun button
|
540 |
time.sleep(1)
|
541 |
-
rerun = st.button(label="Rerun")
|
542 |
if rerun:
|
543 |
st.experimental_rerun()
|
544 |
st.stop()
|
@@ -583,19 +586,17 @@ def page5():
|
|
583 |
])
|
584 |
first_prompt = "Now we're going to start. Summarize the subject and your role. And ask user ready to begin."
|
585 |
|
586 |
-
st.session_state['total_debate_history'] = [
|
587 |
-
{"role": "system", "content": debate_preset}
|
588 |
-
]
|
589 |
try:
|
590 |
response = gpt_call(debate_preset + "\n" + first_prompt, role="system")
|
591 |
except:
|
592 |
-
st.warning('Chat-GPT Error : The engine is currently overloaded. Please click Rerun button
|
593 |
time.sleep(1)
|
594 |
-
rerun = st.button(label="Rerun")
|
595 |
if rerun:
|
596 |
st.experimental_rerun()
|
597 |
st.stop()
|
598 |
-
|
|
|
599 |
st.session_state['total_debate_history'].append({"role": "assistant", "content": response})
|
600 |
st.session_state['bot_debate_history'].append(response)
|
601 |
|
@@ -621,21 +622,20 @@ def page5():
|
|
621 |
if np.array_equal(st.session_state['pre_audio'], audio):
|
622 |
audio = np.array([])
|
623 |
|
624 |
-
|
625 |
-
submit_buttom = st.form_submit_button(label='๐ฌ Send')
|
626 |
send_error_message = st.empty()
|
627 |
|
628 |
-
#if
|
629 |
-
if
|
630 |
if audio.any():
|
631 |
user_input = execute_stt(audio, openai_error_bottom)
|
632 |
-
|
633 |
try :
|
634 |
response = generate_response(user_input)
|
635 |
except:
|
636 |
-
openai_error_bottom.warning('Chat-GPT Error : The engine is currently overloaded. Please click Rerun button
|
637 |
time.sleep(1)
|
638 |
-
rerun = st.button(label="Rerun")
|
|
|
639 |
if rerun:
|
640 |
st.experimental_rerun()
|
641 |
st.stop()
|
@@ -671,27 +671,36 @@ def page5():
|
|
671 |
try:
|
672 |
message(st.session_state['bot_debate_history'][0], key='0_bot')
|
673 |
except:
|
674 |
-
st.warning('Server Error : Unexpected Server error occur. Please click Rerun button
|
675 |
time.sleep(1)
|
676 |
-
|
|
|
|
|
677 |
if rerun:
|
678 |
st.experimental_rerun()
|
679 |
st.stop()
|
680 |
if len(st.session_state['bot_debate_history']) == 1:
|
681 |
text_to_speech = gTTS(text=st.session_state['bot_debate_history'][0], lang='en', slow=False)
|
682 |
-
text_to_speech.save(f"audio/
|
683 |
|
684 |
-
audio_file = open(f"audio/
|
685 |
audio_bytes = audio_file.read()
|
686 |
st.audio(audio_bytes, format='audio/ogg')
|
687 |
|
688 |
-
|
689 |
-
|
690 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
691 |
if i == len(st.session_state['bot_debate_history']) - 2 and not reload:
|
692 |
-
text_to_speech = gTTS(text=
|
693 |
-
text_to_speech.save(f"audio/
|
694 |
-
audio_file = open(f"audio/
|
695 |
audio_bytes = audio_file.read()
|
696 |
st.audio(audio_bytes, format='audio/ogg')
|
697 |
reload = False
|
@@ -709,6 +718,14 @@ print("#"*80)
|
|
709 |
#########################################################
|
710 |
# Page6 - Total Debate Evaluation
|
711 |
#########################################################
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
712 |
def get_stop_words():
|
713 |
file = open("text/stop_words.txt", "r")
|
714 |
try:
|
@@ -808,8 +825,8 @@ def page6():
|
|
808 |
# ํ
์คํธ๋ฅผ ๋จ์ด๋ก ๋ถํ ํฉ๋๋ค.
|
809 |
# ๊ฐ ๋จ์ด์ ๋น๋๋ฅผ ๊ณ์ฐํฉ๋๋ค.
|
810 |
|
811 |
-
# ๋ฆฌ์คํธ๋ฅผ ๋ฌธ์์ด๋ก ๋ณํํ๊ณ , ๊ณต๋ฐฑ์ ๊ธฐ์ค์ผ๋ก ๋จ์ด๋ฅผ ๋ถํ ํฉ๋๋ค.
|
812 |
-
total_word_list =
|
813 |
total_word_count = len(total_word_list)
|
814 |
#total_word_count = len(user_history.split())
|
815 |
st.write("Total Word Count: ", total_word_count)
|
@@ -821,7 +838,7 @@ def page6():
|
|
821 |
|
822 |
# 2. ๋น์ถ ๋จ์ด: ๋ฐ๋ณตํด์ ์ฌ์ฉํ๋ ๋จ์ด ๋ฆฌ์คํธ
|
823 |
# ๋ถ์ฉ์ด ์ ๊ฑฐ
|
824 |
-
total_word_list = [word for word in total_word_list not in get_stop_words()]
|
825 |
# ๋น๋ ๊ณ์ฐ
|
826 |
frequency = Counter(total_word_list)
|
827 |
# ๊ฐ์ฅ ๋น๋๊ฐ ๋์ ๋ฐ์ดํฐ ์ถ๋ ฅ
|
|
|
21 |
#########################################################
|
22 |
blockPrint()
|
23 |
|
24 |
+
#########################################################
|
25 |
+
# Page Configurations
|
26 |
+
#########################################################
|
27 |
+
st.set_page_config(page_title="Debate With GPT : DEBO")
|
28 |
+
|
29 |
#########################################################
|
30 |
# GET DB
|
31 |
#########################################################
|
|
|
37 |
tm = time.localtime()
|
38 |
time_stamp = time.strftime('%Y-%m-%d %I:%M:%S %p', tm)
|
39 |
|
|
|
|
|
|
|
|
|
|
|
40 |
#########################################################
|
41 |
# Initialize session state variables
|
42 |
#########################################################
|
|
|
443 |
result = gpt_call(user_input)
|
444 |
st.session_state.ask_gpt_prev_response = result
|
445 |
except:
|
446 |
+
st.warning('Chat-GPT Error : The engine is currently overloaded. Please click "Rerun" button below.', icon="โ ๏ธ")
|
447 |
+
time.sleep(1)
|
448 |
+
rerun = st.button(label="Rerun", type="primary")
|
449 |
+
if rerun:
|
450 |
+
st.experimental_rerun()
|
451 |
+
st.stop()
|
452 |
|
453 |
# save user_prompt and bot_response to database
|
454 |
put_item(
|
|
|
502 |
try:
|
503 |
user_input = whisper_transcribe(wav_file)
|
504 |
except:
|
505 |
+
error_message.warning('Whisper Error : The engine is currently overloaded. Please click "Rerun" button below.', icon="โ ๏ธ")
|
506 |
time.sleep(1)
|
507 |
+
rerun = st.button(label="Rerun", type="primary")
|
508 |
if rerun:
|
509 |
st.experimental_rerun()
|
510 |
st.stop()
|
|
|
539 |
result = gpt_call(user_input)
|
540 |
st.session_state.ask_gpt_prev_response = result
|
541 |
except:
|
542 |
+
st.warning('Chat-GPT Error : The engine is currently overloaded. Please click "Rerun" button below.', icon="โ ๏ธ")
|
543 |
time.sleep(1)
|
544 |
+
rerun = st.button(label="Rerun", type="primary")
|
545 |
if rerun:
|
546 |
st.experimental_rerun()
|
547 |
st.stop()
|
|
|
586 |
])
|
587 |
first_prompt = "Now we're going to start. Summarize the subject and your role. And ask user ready to begin."
|
588 |
|
|
|
|
|
|
|
589 |
try:
|
590 |
response = gpt_call(debate_preset + "\n" + first_prompt, role="system")
|
591 |
except:
|
592 |
+
st.warning('Chat-GPT Error : The engine is currently overloaded. Please click "Rerun" button below.', icon="โ ๏ธ")
|
593 |
time.sleep(1)
|
594 |
+
rerun = st.button(label="Rerun", type="primary")
|
595 |
if rerun:
|
596 |
st.experimental_rerun()
|
597 |
st.stop()
|
598 |
+
|
599 |
+
st.session_state['total_debate_history'].append({"role": "system", "content": debate_preset})
|
600 |
st.session_state['total_debate_history'].append({"role": "assistant", "content": response})
|
601 |
st.session_state['bot_debate_history'].append(response)
|
602 |
|
|
|
622 |
if np.array_equal(st.session_state['pre_audio'], audio):
|
623 |
audio = np.array([])
|
624 |
|
625 |
+
submit_button = st.form_submit_button(label='๐ฌ Send')
|
|
|
626 |
send_error_message = st.empty()
|
627 |
|
628 |
+
#if submit_button and user_input:
|
629 |
+
if submit_button:
|
630 |
if audio.any():
|
631 |
user_input = execute_stt(audio, openai_error_bottom)
|
|
|
632 |
try :
|
633 |
response = generate_response(user_input)
|
634 |
except:
|
635 |
+
openai_error_bottom.warning('Chat-GPT Error : The engine is currently overloaded. Please click "Rerun" button below.', icon="โ ๏ธ")
|
636 |
time.sleep(1)
|
637 |
+
rerun = st.button(label="Rerun", type="primary")
|
638 |
+
reload = True
|
639 |
if rerun:
|
640 |
st.experimental_rerun()
|
641 |
st.stop()
|
|
|
671 |
try:
|
672 |
message(st.session_state['bot_debate_history'][0], key='0_bot')
|
673 |
except:
|
674 |
+
st.warning('Server Error : Unexpected Server error occur. Please click "Rerun" button below.', icon="โ ๏ธ")
|
675 |
time.sleep(1)
|
676 |
+
reload = True
|
677 |
+
st.session_state['total_debate_history'] = []
|
678 |
+
rerun = st.button(label="Rerun", type="primary")
|
679 |
if rerun:
|
680 |
st.experimental_rerun()
|
681 |
st.stop()
|
682 |
if len(st.session_state['bot_debate_history']) == 1:
|
683 |
text_to_speech = gTTS(text=st.session_state['bot_debate_history'][0], lang='en', slow=False)
|
684 |
+
text_to_speech.save(f"audio/ses_{st.session_state['session_num']}_bot_res_0.mp3")
|
685 |
|
686 |
+
audio_file = open(f"audio/ses_{st.session_state['session_num']}_bot_res_0.mp3", 'rb')
|
687 |
audio_bytes = audio_file.read()
|
688 |
st.audio(audio_bytes, format='audio/ogg')
|
689 |
|
690 |
+
#TODO zip_longest() ๊ณ ๋ คํ๊ธฐ!
|
691 |
+
message_pairs = zip(
|
692 |
+
st.session_state['bot_debate_history'][1:],
|
693 |
+
st.session_state['user_debate_history'],
|
694 |
+
)
|
695 |
+
for i, (bot_hist, user_hist) in enumerate(message_pairs):
|
696 |
+
message(user_hist, is_user=True, key=str(i)+'_user')
|
697 |
+
message(bot_hist, key=str(i + 1)+'_bot')
|
698 |
+
# if bot_hist:
|
699 |
+
#TODO ์์ฑ๋ message์ ์์ฑ ํ์ผ path๋ฅผ ํ๋์ ๊ฐ์ฒด๋ก ๊ด๋ฆฌํ๋ ๋ฐฉ๋ฒ ๊ณ ๋ฏผ
|
700 |
if i == len(st.session_state['bot_debate_history']) - 2 and not reload:
|
701 |
+
text_to_speech = gTTS(text=bot_hist, lang='en', slow=False)
|
702 |
+
text_to_speech.save(f"audio/ses_{st.session_state['session_num']}_bot_res_{str(i + 1)}.mp3")
|
703 |
+
audio_file = open(f"audio/ses_{st.session_state['session_num']}_bot_res_{str(i + 1)}.mp3", 'rb')
|
704 |
audio_bytes = audio_file.read()
|
705 |
st.audio(audio_bytes, format='audio/ogg')
|
706 |
reload = False
|
|
|
718 |
#########################################################
|
719 |
# Page6 - Total Debate Evaluation
|
720 |
#########################################################
|
721 |
+
@st.cache_data
|
722 |
+
def preprocess_words(user_history):
|
723 |
+
res = " ".join(user_history)
|
724 |
+
res = res.lower()
|
725 |
+
res = res.translate(dict.fromkeys(map(ord, '!"#&\(),./:;<=>@[\\]^_`{|}~')))
|
726 |
+
return res.split()
|
727 |
+
|
728 |
+
@st.cache_data
|
729 |
def get_stop_words():
|
730 |
file = open("text/stop_words.txt", "r")
|
731 |
try:
|
|
|
825 |
# ํ
์คํธ๋ฅผ ๋จ์ด๋ก ๋ถํ ํฉ๋๋ค.
|
826 |
# ๊ฐ ๋จ์ด์ ๋น๋๋ฅผ ๊ณ์ฐํฉ๋๋ค.
|
827 |
|
828 |
+
# ๋ฆฌ์คํธ๋ฅผ ๋ฌธ์์ด๋ก ๋ณํํ๊ณ , ์ ์ฒ๋ฆฌ๋ฅผ ํฉ๋๋ค. ๊ณต๋ฐฑ์ ๊ธฐ์ค์ผ๋ก ๋จ์ด๋ฅผ ๋ถํ ํฉ๋๋ค.
|
829 |
+
total_word_list = preprocess_words(user_history)
|
830 |
total_word_count = len(total_word_list)
|
831 |
#total_word_count = len(user_history.split())
|
832 |
st.write("Total Word Count: ", total_word_count)
|
|
|
838 |
|
839 |
# 2. ๋น์ถ ๋จ์ด: ๋ฐ๋ณตํด์ ์ฌ์ฉํ๋ ๋จ์ด ๋ฆฌ์คํธ
|
840 |
# ๋ถ์ฉ์ด ์ ๊ฑฐ
|
841 |
+
total_word_list = [word for word in total_word_list if word not in get_stop_words()]
|
842 |
# ๋น๋ ๊ณ์ฐ
|
843 |
frequency = Counter(total_word_list)
|
844 |
# ๊ฐ์ฅ ๋น๋๊ฐ ๋์ ๋ฐ์ดํฐ ์ถ๋ ฅ
|
modules/gpt_modules.py
CHANGED
@@ -32,4 +32,5 @@ def gpt_call_context(messages):
|
|
32 |
)
|
33 |
output_text = response["choices"][0]["message"]["content"]
|
34 |
|
|
|
35 |
return output_text
|
|
|
32 |
)
|
33 |
output_text = response["choices"][0]["message"]["content"]
|
34 |
|
35 |
+
# raise RuntimeError
|
36 |
return output_text
|