Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -27,20 +27,19 @@ def to_markdown(text):
|
|
27 |
return Markdown(textwrap.indent(text, '> ', predicate=lambda _: True))
|
28 |
|
29 |
|
30 |
-
def chat(
|
31 |
-
"""Generates a response
|
32 |
|
33 |
Args:
|
34 |
-
|
35 |
|
36 |
Returns:
|
37 |
-
str: The AI
|
38 |
"""
|
39 |
|
40 |
if not genai:
|
41 |
return "AI responses are currently unavailable. Please install `google-generativeai` for this functionality."
|
42 |
|
43 |
-
user_message = chat_history[-1] # Get the latest user message
|
44 |
try:
|
45 |
response = model.generate_content(user_message, stream=True)
|
46 |
for chunk in response:
|
@@ -52,10 +51,10 @@ def chat(chat_history):
|
|
52 |
|
53 |
interface = gr.Interface(
|
54 |
fn=chat,
|
55 |
-
inputs="
|
56 |
outputs="textbox",
|
57 |
title="Gradio Chat App",
|
58 |
-
description="Chat with an AI assistant (requires `google-generativeai`)"
|
59 |
)
|
60 |
|
61 |
interface.launch()
|
@@ -68,11 +67,8 @@ if __name__ == "__main__":
|
|
68 |
genai.configure(api_key='AIzaSyCMBk81YmILNTok8hd6tYtJaevp1qbl6I0') # Replace with your actual API key
|
69 |
model = genai.GenerativeModel('gemini-pro')
|
70 |
|
71 |
-
chat_history = []
|
72 |
while True:
|
73 |
user_message = input("You: ")
|
74 |
-
|
75 |
-
response = chat(chat_history)
|
76 |
print(f"AI: {response}")
|
77 |
-
chat_history.append(response)
|
78 |
print("-" * 80)
|
|
|
27 |
return Markdown(textwrap.indent(text, '> ', predicate=lambda _: True))
|
28 |
|
29 |
|
30 |
+
def chat(user_message):
|
31 |
+
"""Generates a response to the user's message.
|
32 |
|
33 |
Args:
|
34 |
+
user_message (str): The user's message.
|
35 |
|
36 |
Returns:
|
37 |
+
str: The AI-generated response (or a message indicating unavailability).
|
38 |
"""
|
39 |
|
40 |
if not genai:
|
41 |
return "AI responses are currently unavailable. Please install `google-generativeai` for this functionality."
|
42 |
|
|
|
43 |
try:
|
44 |
response = model.generate_content(user_message, stream=True)
|
45 |
for chunk in response:
|
|
|
51 |
|
52 |
interface = gr.Interface(
|
53 |
fn=chat,
|
54 |
+
inputs="textbox",
|
55 |
outputs="textbox",
|
56 |
title="Gradio Chat App",
|
57 |
+
description="Chat with an AI assistant (requires `google-generativeai`)",
|
58 |
)
|
59 |
|
60 |
interface.launch()
|
|
|
67 |
genai.configure(api_key='AIzaSyCMBk81YmILNTok8hd6tYtJaevp1qbl6I0') # Replace with your actual API key
|
68 |
model = genai.GenerativeModel('gemini-pro')
|
69 |
|
|
|
70 |
while True:
|
71 |
user_message = input("You: ")
|
72 |
+
response = chat(user_message)
|
|
|
73 |
print(f"AI: {response}")
|
|
|
74 |
print("-" * 80)
|