gemchat / app.py
reshav1's picture
Update app.py
45dfc95 verified
raw
history blame
736 Bytes
import streamlit
import langchain
from langchain.llms import GenerativeModel
# Replace with your Gemini API key
API_KEY = "AIzaSyBI2C5bFa0JsvCCyabbyANg8LUjcpqUiVM"
# Configure the model
llm = GenerativeModel("google-llm/text-davinci-003", api_key=API_KEY)
def generate_response(user_input):
"""
Sends user input to Gemini and returns its response.
Args:
user_input: The user's message.
Returns:
The generated response from Gemini.
"""
prompt ={user_input}
response = llm.generate_content(prompt=prompt)
return response.content[0]["text"]
while True:
user_input = input("enter your text")
if user_input.lower() == "quit":
break
response = generate_response(user_input)
st.write( {response})