decodingdatascience commited on
Commit
31ac8cf
Β·
verified Β·
1 Parent(s): eb94f2d

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +98 -0
app.py ADDED
@@ -0,0 +1,98 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # ── Imports & API setup ─────────────────────────────────────────────────────────
2
+ import os, requests, gradio as gr
3
+ from google.colab import userdata
4
+
5
+ groq_api_key = userdata.get("GROQ_API_KEY")
6
+ url, headers = (
7
+ "https://api.groq.com/openai/v1/chat/completions",
8
+ {"Authorization": f"Bearer {groq_api_key}"}
9
+ )
10
+
11
+ # ── Original core function (unchanged) ──────────────────────────────────────────
12
+ def chat_with_groq(user_input): # ← still the same
13
+ body = {
14
+ "model": "llama-3.1-8b-instant",
15
+ "messages": [{"role": "user", "content": user_input}]
16
+ }
17
+ r = requests.post(url, headers=headers, json=body)
18
+ return (r.json()['choices'][0]['message']['content']
19
+ if r.status_code == 200 else f"Error: {r.json()}")
20
+
21
+ # ── UI helpers ─────────────────────────────────────────────────────────────────
22
+ CSS = """
23
+ #header-row {align-items:center; gap:0.75rem;}
24
+ #logo {max-width:60px; border-radius:8px;}
25
+ #title {font-size:2rem; font-weight:600; margin:0;}
26
+ #left-col {width:64%;}
27
+ #right-col {width:35%; padding-left:1rem; border:1px solid #e5e5e5;
28
+ border-radius:8px; padding:1rem;}
29
+ """
30
+
31
+ LOGO_URL = (
32
+ "https://raw.githubusercontent.com/Decoding-Data-Science/"
33
+ "airesidency/main/dds_logo.jpg"
34
+ )
35
+
36
+ LLM_QUESTIONS = [
37
+ "What is Retrieval-Augmented Generation (RAG)?",
38
+ "Explain Chain-of-Thought prompting in simple terms.",
39
+ "How do I fine-tune an LLM on my own data?",
40
+ "What are the security risks of LLM applications?",
41
+ "Compare zero-shot vs few-shot prompting.",
42
+ "What is the role of vector databases with LLMs?"
43
+ ]
44
+
45
+ # ── Build the Gradio app ───────────────────────────────────────────────────────
46
+ with gr.Blocks(css=CSS, title="DDS Chat with Groq AI") as demo:
47
+ # Header row: logo + title
48
+ with gr.Row(elem_id="header-row"):
49
+ gr.Image(value=LOGO_URL, elem_id="logo", show_label=False,
50
+ show_download_button=False)
51
+ gr.Markdown("<div id='title'>DDS Chat with Groq AI (Llama 3.1-8B)</div>")
52
+
53
+ gr.Markdown("Ask anythingβ€”or pick a quick question on the right.")
54
+
55
+ with gr.Row():
56
+ # ── Left column: chat interface ───────────────────────────────────────
57
+ with gr.Column(elem_id="left-col"):
58
+ chatbot = gr.Chatbot(height=450, label="Conversation")
59
+ user_box = gr.Textbox(placeholder="Type your question…",
60
+ show_label=False, lines=2)
61
+ send_btn = gr.Button("Send", variant="primary")
62
+ state = gr.State([])
63
+
64
+ # ── Right column: quick-question panel ───────────────────────────────
65
+ with gr.Column(elem_id="right-col"):
66
+ gr.Markdown("**LLM Quick Questions**")
67
+ question_dd = gr.Dropdown(choices=LLM_QUESTIONS,
68
+ label="Select a question",
69
+ interactive=True)
70
+ gr.Markdown(
71
+ "Pick a topic and it will populate the input box. "
72
+ "Feel free to edit before sending."
73
+ )
74
+
75
+ # ── Logic glue ───────────────────────────────────────────────────────────
76
+ def respond(user_msg, history):
77
+ reply = chat_with_groq(user_msg)
78
+ history = history + [(user_msg, reply)]
79
+ return history, history
80
+
81
+ send_btn.click(
82
+ fn=respond,
83
+ inputs=[user_box, state],
84
+ outputs=[chatbot, state],
85
+ queue=False
86
+ ).then(
87
+ lambda: "", None, user_box, queue=False # clear textbox
88
+ )
89
+
90
+ question_dd.change(
91
+ lambda q: gr.update(value=q),
92
+ inputs=question_dd,
93
+ outputs=user_box,
94
+ queue=False
95
+ )
96
+
97
+ demo.launch()
98
+