npc0 commited on
Commit
84ff616
·
verified ·
1 Parent(s): d671955

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +31 -5
app.py CHANGED
@@ -16,19 +16,24 @@ bg = st.text_area("Background information",
16
 
17
  if 'ft' in st.session_state or st.button('Next step'):
18
  if 'ft' not in st.session_state:
 
19
  messages = [
20
  {
21
  "role": "user",
22
  "content": f"""Generate possible factors related to the goal
23
- based on the information provided. List the factors in markdown list.\n\n
 
 
24
  goals:\n\n{gl}\n\n
25
- background:\n\n{bg}\n\n""",
 
 
26
  }
27
  ]
28
 
29
  chat_completion = client.chat.completions.create(
30
  messages=messages,
31
- model="llama3-8b-8192",
32
  )
33
 
34
  ft = st.text_area("Factors to be considered in questions",
@@ -36,7 +41,28 @@ if 'ft' in st.session_state or st.button('Next step'):
36
  st.session_state['ft'] = ft
37
  st.write('Edit above factors, add or remove if needed')
38
 
 
 
39
  if st.button('Generate questions'):
40
- pass # for each factor generate 5 questions
41
- # suggest the number of sample to collect for the polling = 7 * total question number
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
42
  # dump questions into excel and allow download
 
16
 
17
  if 'ft' in st.session_state or st.button('Next step'):
18
  if 'ft' not in st.session_state:
19
+ rf = os.environ.get("DESIGN_PROMPT")
20
  messages = [
21
  {
22
  "role": "user",
23
  "content": f"""Generate possible factors related to the goal
24
+ based on the information provided. List the factors in markdown list.
25
+ You can give advice to human to edit their goal if you cannot define
26
+ more than 3 factors.\n\n
27
  goals:\n\n{gl}\n\n
28
+ background:\n\n{bg}\n\n
29
+ some design principles for your reference:\n\n
30
+ reference:\n\n```markdown\n{rf}\n\n```\n\n""",
31
  }
32
  ]
33
 
34
  chat_completion = client.chat.completions.create(
35
  messages=messages,
36
+ model="llama3-70b-8192",
37
  )
38
 
39
  ft = st.text_area("Factors to be considered in questions",
 
41
  st.session_state['ft'] = ft
42
  st.write('Edit above factors, add or remove if needed')
43
 
44
+ rf = os.environ.get("DESIGN_PROMPT")
45
+
46
  if st.button('Generate questions'):
47
+ rf += f'''\n\n---\n\n{os.environ.get("DESIGN_PROMPT_SIXPOINT")}'''
48
+ messages = [
49
+ {
50
+ "role": "user",
51
+ "content": f"""Extract all factors from the text to create context.
52
+ Generate 5 question based on the text and context.
53
+ Write the questionnaire form in markdown\n\n
54
+ text:\n\n{ft}\n\n
55
+ some design principles for your reference:\n\n
56
+ reference:\n\n```markdown\n{rf}\n```\n\n""",
57
+ }
58
+ ]
59
+
60
+ chat_completion = client.chat.completions.create(
61
+ messages=messages,
62
+ model="llama3-70b-8192",
63
+ )
64
+
65
+ md = chat_completion.choices[0].message.content
66
+ suggested_sampling = 5 * len(md.split("\n"))
67
+ st.markdown(f"{md}\n\n**suggested number of sampling**: {suggested_sampling}")
68
  # dump questions into excel and allow download