Update app.py
Browse files
app.py
CHANGED
@@ -638,7 +638,7 @@ def _():
|
|
638 |
@app.cell
|
639 |
def _(DirectLLMClient, mo):
|
640 |
def display_response(user_prompt: str = None):
|
641 |
-
"""Create a nice Marimo UI element to display the prompt and response."""
|
642 |
if not user_prompt.value:
|
643 |
return mo.md(
|
644 |
f"""
|
@@ -657,7 +657,10 @@ def _(DirectLLMClient, mo):
|
|
657 |
|
658 |
client = DirectLLMClient(provider='openai')
|
659 |
|
660 |
-
|
|
|
|
|
|
|
661 |
|
662 |
final_response = mo.vstack(
|
663 |
[
|
|
|
638 |
@app.cell
|
639 |
def _(DirectLLMClient, mo):
|
640 |
def display_response(user_prompt: str = None):
|
641 |
+
"""Create a nice Marimo UI element to display the prompt and response with a loading spinner."""
|
642 |
if not user_prompt.value:
|
643 |
return mo.md(
|
644 |
f"""
|
|
|
657 |
|
658 |
client = DirectLLMClient(provider='openai')
|
659 |
|
660 |
+
# Display a loading message with a spinner
|
661 |
+
with mo.status.spinner(subtitle='Generating response...') as spinner:
|
662 |
+
response = client.generate(prompt=user_prompt.value, system_message='You are a helpful assistant.')
|
663 |
+
spinner.update(subtitle='Formatting response...')
|
664 |
|
665 |
final_response = mo.vstack(
|
666 |
[
|