Spaces:
Sleeping
Sleeping
import streamlit as st | |
import google.generativeai as genai | |
import requests | |
import os | |
MODEL_ID = "gemini-2.0-flash-exp" | |
api_key = os.getenv("GEMINI_API_KEY") | |
model_id = MODEL_ID | |
genai.configure(api_key=api_key) | |
if "model" not in st.session_state: | |
st.session_state.model = genai.GenerativeModel(MODEL_ID) | |
model = st.session_state.model | |
chat = model.start_chat() | |
creative_prompt = "" | |
factcheck_prompt = "" | |
# Page Title | |
st.title("LemmaTeks: AI-Powered Text Generator") | |
# Sidebar for Settings | |
with st.sidebar: | |
st.header("Configuration") | |
# Dropdown for Output Format | |
output_format = st.selectbox( | |
"Choose Output Format:", | |
["Story", "Poem", "Article", "Code"] | |
) | |
# Dropdown for Tone/Style | |
tone_style = st.selectbox( | |
"Select Tone/Style:", | |
["Formal", "Informal", "Humorous", "Technical"] | |
) | |
# Sliders for Text Length and Creativity | |
text_length = st.slider("Text Length (words):", min_value=50, max_value=2000, value=1000, step=50) | |
creativity_level = st.slider("Creativity Level:", min_value=0.0, max_value=1.0, value=0.3, step=0.1) | |
# Checkboxes for Features | |
creative_mode = st.checkbox("Enable Creative Mode") | |
fact_checking = st.checkbox("Enable Fact-Checking") | |
#modify prompt based on settings | |
if creative_mode: | |
creative_prompt = " Optimize the creativity of your response. " | |
if fact_checking: | |
factcheck_prompt = "Support your answer with evidences. " | |
# Text Input Field | |
user_prompt = st.text_area("Enter Your Prompt Here:") | |
# Append the creative and fact checking | |
user_prompt = user_prompt + creative_prompt + factcheck_prompt | |
# Submit Button | |
if st.button("Generate"): | |
if user_prompt.strip() == "": | |
st.warning("Please enter a prompt before generating!") | |
else: | |
# Process AI Request | |
st.write("Generating responses...") | |
try: | |
response = model.generate_content( | |
f"Format: {output_format}\nTone: {tone_style}\nPrompt: {user_prompt}", | |
generation_config = genai.GenerationConfig( | |
max_output_tokens=text_length * 2, | |
temperature=creativity_level, | |
) | |
) | |
st.markdown(response.text) | |
except Exception as e: | |
st.error(f"Exception occured: {e}") | |