timfe commited on
Commit
dde02d9
·
1 Parent(s): cb68d5c

interactive app

Browse files
Files changed (1) hide show
  1. app.py +45 -5
app.py CHANGED
@@ -24,10 +24,11 @@ for file in files:
24
  docs.append(loader.load()[0])
25
 
26
  # Config
27
- chunk_size = 500 # Defines the chunks in amount of tokens in which the files are split. Also defines the amount of tokens that are feeded into the context.
28
- chunk_overlap = 100
29
- temperature = 0.4
30
- model = "gpt-3.5-turbo"
 
31
 
32
 
33
  prompt_template ="""
@@ -71,17 +72,56 @@ rag_chain = (
71
  | StrOutputParser()
72
  )
73
 
 
74
  st.title("🐔 Volker-Chat")
75
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
76
  if "messages" not in st.session_state:
77
  st.session_state["messages"] = [{"role": "assistant", "content": "Ahoi! Ich bin Volker. Wie kann ich dir helfen?"}]
78
 
79
  for msg in st.session_state.messages:
80
  st.chat_message(msg["role"]).write(msg["content"])
81
 
 
 
 
 
 
 
 
 
 
 
 
82
  if prompt := st.chat_input():
83
  st.chat_message("user").write(prompt)
84
  with get_openai_callback() as cb:
85
  response = rag_chain.invoke(prompt)
86
  st.chat_message("assistant").write(response)
87
- st.chat_message("assistant").write(str(cb))
 
 
 
 
 
24
  docs.append(loader.load()[0])
25
 
26
  # Config
27
+ with st.sidebar:
28
+ chunk_size = st.number_input("Insert chunk size", value=500, step=100, placeholder=500) # Defines the chunks in amount of tokens in which the files are split. Also defines the amount of tokens that are feeded into the context.
29
+ chunk_overlap = st.number_input("Insert chunk-overlap", value=100, step=10, placeholder=100)
30
+ temperature = st.number_input("Insert temperature", value=0.0, min_value=0.0, step=0.2, max_value=1.0, placeholder=0.0)
31
+ model = st.selectbox("Model name", ["gpt-3.5-turbo"])
32
 
33
 
34
  prompt_template ="""
 
72
  | StrOutputParser()
73
  )
74
 
75
+
76
  st.title("🐔 Volker-Chat")
77
 
78
+ def click_button(btn):
79
+ st.session_state.clicked = True
80
+ st.session_state['btn'] = btn
81
+
82
+ def predefined_question(btn):
83
+ if btn == "btn_1":
84
+ prompt = "Erläutere die Säule 'Lernen' aus der Volker-App."
85
+ elif btn == "btn_2":
86
+ prompt = "Erläutere die Säule 'Tracken' aus der Volker-App."
87
+ elif btn == "btn_3":
88
+ prompt = "Erläutere die Säule 'Handeln' aus der Volker-App."
89
+ return prompt
90
+
91
+ c = st.container()
92
+ c.write("Beispielfragen")
93
+ col1, col2, col3 = c.columns(3)
94
+ col1.button("Was ist 'Lernen'?", on_click=click_button, args=['btn_1'])
95
+ col2.button("Was ist 'Tracken'?", on_click=click_button, args=['btn_2'])
96
+ col3.button("Was ist 'Handeln'?", on_click=click_button, args=['btn_3'])
97
+
98
+ if 'clicked' not in st.session_state:
99
+ st.session_state.clicked = False
100
+
101
  if "messages" not in st.session_state:
102
  st.session_state["messages"] = [{"role": "assistant", "content": "Ahoi! Ich bin Volker. Wie kann ich dir helfen?"}]
103
 
104
  for msg in st.session_state.messages:
105
  st.chat_message(msg["role"]).write(msg["content"])
106
 
107
+ if st.session_state.clicked:
108
+ prompt = predefined_question(st.session_state['btn'])
109
+ st.chat_message("user").write(prompt)
110
+ with get_openai_callback() as cb:
111
+ response = rag_chain.invoke(prompt)
112
+ st.chat_message("assistant").write(response)
113
+ with st.sidebar:
114
+ sidebar_c = st.container()
115
+ sidebar_c.success(cb)
116
+ st.session_state.clicked = False
117
+
118
  if prompt := st.chat_input():
119
  st.chat_message("user").write(prompt)
120
  with get_openai_callback() as cb:
121
  response = rag_chain.invoke(prompt)
122
  st.chat_message("assistant").write(response)
123
+ with st.sidebar:
124
+ sidebar_c = st.container()
125
+ sidebar_c.success(cb)
126
+ st.session_state.clicked = False
127
+