Spaces:
Sleeping
Sleeping
Update orchestrator/gemini.py
Browse files- orchestrator/gemini.py +9 -4
orchestrator/gemini.py
CHANGED
@@ -1,18 +1,16 @@
|
|
1 |
import os
|
2 |
import streamlit as st
|
3 |
-
|
4 |
try:
|
5 |
import google.generativeai as genai
|
6 |
except ImportError:
|
7 |
genai = None
|
8 |
|
9 |
def get_gemini_api_key():
|
10 |
-
# Try Streamlit Secrets, then env var
|
11 |
if hasattr(st.secrets, "GEMINI_API_KEY"):
|
12 |
return st.secrets["GEMINI_API_KEY"]
|
13 |
return os.getenv("GEMINI_API_KEY")
|
14 |
|
15 |
-
def
|
16 |
api_key = get_gemini_api_key()
|
17 |
if not api_key:
|
18 |
raise RuntimeError("Gemini API key not set!")
|
@@ -21,5 +19,12 @@ def gemini_generate(prompt, model="gemini-1.5-flash"):
|
|
21 |
genai.configure(api_key=api_key)
|
22 |
model_obj = genai.GenerativeModel(model)
|
23 |
response = model_obj.generate_content(prompt)
|
24 |
-
# Correct line:
|
25 |
return response.text if hasattr(response, "text") else str(response)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import os
|
2 |
import streamlit as st
|
|
|
3 |
try:
|
4 |
import google.generativeai as genai
|
5 |
except ImportError:
|
6 |
genai = None
|
7 |
|
8 |
def get_gemini_api_key():
|
|
|
9 |
if hasattr(st.secrets, "GEMINI_API_KEY"):
|
10 |
return st.secrets["GEMINI_API_KEY"]
|
11 |
return os.getenv("GEMINI_API_KEY")
|
12 |
|
13 |
+
def gemini_generate_code(prompt, model="gemini-1.5-pro"):
|
14 |
api_key = get_gemini_api_key()
|
15 |
if not api_key:
|
16 |
raise RuntimeError("Gemini API key not set!")
|
|
|
19 |
genai.configure(api_key=api_key)
|
20 |
model_obj = genai.GenerativeModel(model)
|
21 |
response = model_obj.generate_content(prompt)
|
|
|
22 |
return response.text if hasattr(response, "text") else str(response)
|
23 |
+
|
24 |
+
def gemini_explain_code(code, model="gemini-1.5-pro"):
|
25 |
+
# Ask Gemini to explain code in plain English
|
26 |
+
explanation_prompt = (
|
27 |
+
"Explain step-by-step, in clear language, what this Python MCP server code does, and draw a high-level block diagram in Mermaid format:\n\n" +
|
28 |
+
code
|
29 |
+
)
|
30 |
+
return gemini_generate_code(explanation_prompt, model)
|