File size: 1,302 Bytes
de43789
fb84b3c
d93e314
de43789
2be5891
db4e906
a1a96ab
db4e906
a1a96ab
c68709e
 
1296ca3
 
fb84b3c
 
 
 
 
 
 
 
 
a1a96ab
fb84b3c
 
a1a96ab
 
fb84b3c
 
a1a96ab
 
fb84b3c
 
 
 
a1a96ab
fb84b3c
 
 
 
 
 
 
a1a96ab
 
fb84b3c
 
a1a96ab
 
fb84b3c
a1a96ab
fb84b3c
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
import streamlit as st
import google.generativeai as genai
import os


gemini_api_key = os.getenv("GEMINI_API_KEY")

genai.configure(api_key = gemini_api_key)

model = genai.GenerativeModel('gemini-pro')



# prompt = st.chat_input("Say something")
# if prompt:
#     st.write(f"User has sent the following prompt: {prompt}")
# else:
#     prompt = "who are you?"
# response = model.generate_content(prompt)
# message = st.chat_message("ai")
# message.write(response.text)


import string
import random


def randon_string() -> str:
    return "".join(random.choices(string.ascii_uppercase + string.digits, k=10))


def chat_actions():
    st.session_state["chat_history"].append(
        {"role": "user", "content": st.session_state["chat_input"]},
    )

    response = model.generate_content(st.session_state["chat_input"])
    st.session_state["chat_history"].append(
        {
            "role": "assistant",
            "content": response,
        },  # This can be replaced with your chat response logic
    )


if "chat_history" not in st.session_state:
    st.session_state["chat_history"] = []


st.chat_input("Enter your message", on_submit=chat_actions, key="chat_input")

for i in st.session_state["chat_history"]:
    with st.chat_message(name=i["role"]):
        st.write(i["content"])