hruday96 commited on
Commit
bb497c1
·
verified ·
1 Parent(s): 415ee47

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -17
app.py CHANGED
@@ -1,6 +1,7 @@
 
1
  import streamlit as st
2
  import requests
3
- import streamlit_cookies_manager
4
  from openai import OpenAI
5
  import google.generativeai as genai
6
 
@@ -13,7 +14,7 @@ GEMINI_API_KEY = st.secrets["GEMINI_API_KEY"]
13
  # -----------------------------------------------------
14
  # Initialize OpenAI & Gemini
15
  # -----------------------------------------------------
16
- client = OpenAI(api_key=OPENAI_API_KEY) # OpenAI client
17
  genai.configure(api_key=GEMINI_API_KEY)
18
  gemini_model = genai.GenerativeModel("gemini-pro")
19
 
@@ -25,29 +26,28 @@ st.title("🤖 AI Model Comparator")
25
  st.subheader("Compare responses across multiple LLMs.")
26
 
27
  # -----------------------------------------------------
28
- # Cookie Manager for Persistent Request Limits
29
  # -----------------------------------------------------
30
- cookies = streamlit_cookies_manager.CookieManager()
 
 
 
31
 
32
- def get_request_count():
33
- """Retrieve the request count from cookies."""
34
- if "request_count" in cookies:
35
- return int(cookies["request_count"])
36
- return 0
37
 
38
- def update_request_count():
39
- """Increment the request count and save it to cookies."""
40
- count = get_request_count() + 1
41
- cookies["request_count"] = str(count)
42
- cookies.save()
43
 
44
  # -----------------------------------------------------
45
  # Check Request Limit (Max 3 Requests Per User)
46
  # -----------------------------------------------------
47
- request_count = get_request_count()
48
  if request_count >= 3:
49
  st.warning("⚠️ Request limit reached (3 per user). Please wait before trying again.")
50
- st.stop() # Stop execution if limit is reached
51
 
52
  # -----------------------------------------------------
53
  # Sidebar: Model Settings and Future Works Section
@@ -107,7 +107,9 @@ def get_gemini_response(prompt):
107
  # Generate Responses when Button is Clicked
108
  # -----------------------------------------------------
109
  if st.button("🚀 Generate Responses"):
110
- update_request_count() # Increment the user's request count
 
 
111
 
112
  with st.spinner("Fetching responses..."):
113
  openai_text = get_openai_response(user_prompt)
 
1
+ import os
2
  import streamlit as st
3
  import requests
4
+ from streamlit_cookies_manager import EncryptedCookieManager
5
  from openai import OpenAI
6
  import google.generativeai as genai
7
 
 
14
  # -----------------------------------------------------
15
  # Initialize OpenAI & Gemini
16
  # -----------------------------------------------------
17
+ client = OpenAI(api_key=OPENAI_API_KEY)
18
  genai.configure(api_key=GEMINI_API_KEY)
19
  gemini_model = genai.GenerativeModel("gemini-pro")
20
 
 
26
  st.subheader("Compare responses across multiple LLMs.")
27
 
28
  # -----------------------------------------------------
29
+ # Secure Cookie Manager for Request Limits
30
  # -----------------------------------------------------
31
+ cookies = EncryptedCookieManager(
32
+ prefix="ai-model-comparator/",
33
+ password=os.environ.get("COOKIES_PASSWORD", "MySecretPassword") # Use a strong secret in deployment
34
+ )
35
 
36
+ if not cookies.ready():
37
+ st.stop() # Wait until cookies are available
 
 
 
38
 
39
+ # Initialize request count if not set
40
+ if "request_count" not in cookies:
41
+ cookies["request_count"] = 0
42
+
43
+ request_count = int(cookies["request_count"])
44
 
45
  # -----------------------------------------------------
46
  # Check Request Limit (Max 3 Requests Per User)
47
  # -----------------------------------------------------
 
48
  if request_count >= 3:
49
  st.warning("⚠️ Request limit reached (3 per user). Please wait before trying again.")
50
+ st.stop()
51
 
52
  # -----------------------------------------------------
53
  # Sidebar: Model Settings and Future Works Section
 
107
  # Generate Responses when Button is Clicked
108
  # -----------------------------------------------------
109
  if st.button("🚀 Generate Responses"):
110
+ request_count += 1
111
+ cookies["request_count"] = request_count # Update cookie count
112
+ cookies.save() # Persist changes
113
 
114
  with st.spinner("Fetching responses..."):
115
  openai_text = get_openai_response(user_prompt)