Spaces:
Running
Running
Delete app copy.py
Browse files- app copy.py +0 -89
app copy.py
DELETED
@@ -1,89 +0,0 @@
|
|
1 |
-
import gradio as gr
|
2 |
-
from bedrock_client import claude_llm, get_anthropic_client, claude_stream_response
|
3 |
-
from utils import load_users
|
4 |
-
|
5 |
-
AUTHS = load_users('user.csv')
|
6 |
-
HISTORY_LIMIT = 30 # max number of turns (user+assistant) to keep
|
7 |
-
|
8 |
-
|
9 |
-
# 1) Your system prompt
|
10 |
-
SYSTEM_PROMPT = (
|
11 |
-
"Du bist DevalBot, ein konversationeller Assistent des Deutschen Evaluierungsinstituts fΓΌr Entwicklungsbewertung (DEval). DEval bietet staatlichen und zivilgesellschaftlichen Organisationen in der Entwicklungszusammenarbeit unabhΓ€ngige und wissenschaftlich fundierte Evaluierungen. Deine Hauptsprache ist Deutsch; antworte daher standardmΓ€Γig auf Deutsch. Du kannst zudem bei statistischen Analysen und Programmierung in Stata und R unterstΓΌtzen. Antworte sachlich, prΓ€zise und stelle bei Unklarheiten klΓ€rende RΓΌckfragen."
|
12 |
-
)
|
13 |
-
|
14 |
-
def chat(user_message, history):
|
15 |
-
# ββ 1) Guard against empty input βββββββββββββββββββββ
|
16 |
-
if not user_message or not user_message.strip():
|
17 |
-
return
|
18 |
-
|
19 |
-
# ββ 2) Build the LLMβs messages list βββββββββββββββββ
|
20 |
-
# Always start with the SYSTEM_PROMPT, then the UI history,
|
21 |
-
# then the new user turn:
|
22 |
-
llm_messages = [{"role":"system","content":SYSTEM_PROMPT}]
|
23 |
-
llm_messages += history
|
24 |
-
llm_messages.append({"role":"user","content":user_message})
|
25 |
-
|
26 |
-
# ββ 3) Kick off the streaming call βββββββββββββββββββ
|
27 |
-
client = get_anthropic_client()
|
28 |
-
streamer = lambda msgs: claude_stream_response(msgs, client)
|
29 |
-
|
30 |
-
# ββ 4) Immediately show the userβs turn in the UI β
|
31 |
-
ui_history = history + [{"role":"user","content":user_message}]
|
32 |
-
|
33 |
-
full_resp = ""
|
34 |
-
try:
|
35 |
-
for chunk in streamer(llm_messages):
|
36 |
-
full_resp += chunk
|
37 |
-
# yield the UI history plus the growing assistant bubble
|
38 |
-
yield ui_history + [{"role":"assistant","content": full_resp}]
|
39 |
-
except Exception as e:
|
40 |
-
# surface any error inline
|
41 |
-
err = f"β οΈ Oops, something went wrong: {e}"
|
42 |
-
yield ui_history + [{"role":"assistant","content": err}]
|
43 |
-
return
|
44 |
-
|
45 |
-
# ββ 5) Finalize the assistant turn in the UI βββββββββ
|
46 |
-
ui_history.append({"role":"assistant","content": full_resp})
|
47 |
-
|
48 |
-
# ββ 6) Trim to the last N turns ββββββββββββββββββββββ
|
49 |
-
if len(ui_history) > HISTORY_LIMIT:
|
50 |
-
ui_history = ui_history[-HISTORY_LIMIT:]
|
51 |
-
|
52 |
-
yield ui_history
|
53 |
-
|
54 |
-
|
55 |
-
with gr.Blocks(css_paths=["static/deval.css"],theme = gr.themes.Default(primary_hue="blue", secondary_hue="yellow"),) as demo:
|
56 |
-
# ββ Logo + Header + Logout ββββββββββββββββββββββββββββββββ
|
57 |
-
|
58 |
-
gr.Image(
|
59 |
-
value="static/logo.png",
|
60 |
-
show_label=False,
|
61 |
-
interactive=False,
|
62 |
-
show_download_button=False,
|
63 |
-
show_fullscreen_button=False,
|
64 |
-
elem_id="logo-primary", # matches the CSS above
|
65 |
-
)
|
66 |
-
|
67 |
-
#logout_btn = gr.Button("Logout", elem_id="logout-btn")
|
68 |
-
# inject auto-reload script
|
69 |
-
gr.HTML(
|
70 |
-
"""
|
71 |
-
<script>
|
72 |
-
// Reload the page after 1 minutes (300β000 ms)
|
73 |
-
setTimeout(() => {
|
74 |
-
window.location.reload();
|
75 |
-
}, 1000);
|
76 |
-
</script>
|
77 |
-
"""
|
78 |
-
)
|
79 |
-
gr.ChatInterface(
|
80 |
-
chat,
|
81 |
-
type="messages",
|
82 |
-
editable=True,
|
83 |
-
concurrency_limit=200,
|
84 |
-
save_history=True,
|
85 |
-
)
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
demo.queue().launch(auth=AUTHS, ssr_mode=False)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|