Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -10,12 +10,20 @@ model = 'models/text-bison-001'
|
|
10 |
|
11 |
# Generate text
|
12 |
if prompt := st.chat_input("Hi, I can help you manage your daily tasks."):
|
13 |
-
enprom = f"""Understand whether user is asking to create a task
|
14 |
-
or
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
19 |
completion = palm.generate_text(model=model, prompt=enprom, temperature=0.5, max_output_tokens=800)
|
20 |
|
21 |
# response = palm.chat(messages=["Hello."])
|
|
|
10 |
|
11 |
# Generate text
|
12 |
if prompt := st.chat_input("Hi, I can help you manage your daily tasks."):
|
13 |
+
enprom = f""" Understand whether user is asking to create a task, have a general conversation,
|
14 |
+
or discuss a potential task.
|
15 |
+
|
16 |
+
- If creating a task, extract details and return a table with Task title, time, repetition, status.
|
17 |
+
- If general conversation, provide a suitable response.
|
18 |
+
- If potential task, ask clarifying questions and propose tasks with details in a table.
|
19 |
+
|
20 |
+
Input: \n{prompt}"""
|
21 |
+
# Understand whether user is asking to create a task or trying to have a general conversation
|
22 |
+
# or is saying something which relates to a task creation thing and can be further discussed to know about task details.
|
23 |
+
# If user is asking to create task then take all details for creating a task and send as a table for 4 columns i.e Task title, time, repetation, status.
|
24 |
+
# Else if user is trying to have just a normal general conversation, then give a reply accordingly.
|
25 |
+
# or if user is talking about something that can be related to a task and ask more question from the user to get more clarity about task details and show me the the discussions and question you had and also give me a table with those 4 columns for task task you propose to achieve the goal.
|
26 |
+
# Follow all the above instruction for the below give input:
|
27 |
completion = palm.generate_text(model=model, prompt=enprom, temperature=0.5, max_output_tokens=800)
|
28 |
|
29 |
# response = palm.chat(messages=["Hello."])
|