File size: 2,635 Bytes
fc35dc8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
84ff616
fc35dc8
 
 
 
84ff616
 
 
fc35dc8
84ff616
 
 
fc35dc8
 
 
 
 
84ff616
fc35dc8
 
 
 
 
 
 
84ff616
 
fc35dc8
84ff616
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fc35dc8
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
import os
from groq import Groq
import streamlit as st

client = Groq(
    api_key=os.environ.get("GROQ_API_KEY"),
)

st.write('Background analysis')
gl = st.text_area("Goal and assumptions",
                  placeholder="""What is the assumption to verify, what do you want to know,
                  the goal and mission of this polling""")
bg = st.text_area("Background information",
                  placeholder="""What is the polling about, what we should know to better
                  help you design the questions""")

if 'ft' in st.session_state or st.button('Next step'):
    if 'ft' not in st.session_state:
        rf = os.environ.get("DESIGN_PROMPT")
        messages = [
            {
                "role": "user",
                "content": f"""Generate possible factors related to the goal
                based on the information provided. List the factors in markdown list.
                You can give advice to human to edit their goal if you cannot define
                more than 3 factors.\n\n
                goals:\n\n{gl}\n\n
                background:\n\n{bg}\n\n
                some design principles for your reference:\n\n
                reference:\n\n```markdown\n{rf}\n\n```\n\n""",
            }
        ]
        
        chat_completion = client.chat.completions.create(
            messages=messages,
            model="llama3-70b-8192",
        )
    
    ft = st.text_area("Factors to be considered in questions",
                      chat_completion.choices[0].message.content)
    st.session_state['ft'] = ft
    st.write('Edit above factors, add or remove if needed')

    rf = os.environ.get("DESIGN_PROMPT")

    if st.button('Generate questions'):
        rf += f'''\n\n---\n\n{os.environ.get("DESIGN_PROMPT_SIXPOINT")}'''
        messages = [
            {
                "role": "user",
                "content": f"""Extract all factors from the text to create context.
                Generate 5 question based on the text and context.
                Write the questionnaire form in markdown\n\n
                text:\n\n{ft}\n\n
                some design principles for your reference:\n\n
                reference:\n\n```markdown\n{rf}\n```\n\n""",
            }
        ]
        
        chat_completion = client.chat.completions.create(
            messages=messages,
            model="llama3-70b-8192",
        )

        md = chat_completion.choices[0].message.content
        suggested_sampling = 5 * len(md.split("\n"))
        st.markdown(f"{md}\n\n**suggested number of sampling**: {suggested_sampling}")
        # dump questions into excel and allow download