DreamStream-1 commited on
Commit
f2fc69b
·
verified ·
1 Parent(s): b297bf2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +25 -0
app.py CHANGED
@@ -22,6 +22,22 @@ from sklearn.naive_bayes import GaussianNB
22
  from sklearn.metrics import accuracy_score
23
  import logging
24
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  # Suppress TensorFlow warnings
26
  os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
27
  os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3"
@@ -333,6 +349,15 @@ with gr.Blocks(theme="shivi/calm_seafoam") as app:
333
  inputs=None,
334
  outputs=[user_input, chatbot]
335
  )
 
 
 
 
 
 
 
 
 
336
  with gr.Tab("Disease Prediction"):
337
  gr.HTML("""
338
  <h1 style="color: #388e3c; font-family: 'Helvetica', sans-serif; text-align: center; font-size: 3.5em; margin-bottom: 0;">
 
22
  from sklearn.metrics import accuracy_score
23
  import logging
24
 
25
+ # --- Groq LLM Integration ---
26
+ from groq import Groq
27
+
28
+ # Load Groq API key from Hugging Face secret
29
+ GROQ_API_KEY = os.environ.get("groq")
30
+ client = Groq(api_key=GROQ_API_KEY)
31
+
32
+ def ask_groq_llm(user_message):
33
+ response = client.chat.completions.create(
34
+ model="llama-3.3-70b-versatile",
35
+ messages=[
36
+ {"role": "user", "content": user_message}
37
+ ]
38
+ )
39
+ return response.choices[0].message.content
40
+
41
  # Suppress TensorFlow warnings
42
  os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
43
  os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3"
 
349
  inputs=None,
350
  outputs=[user_input, chatbot]
351
  )
352
+ with gr.Tab("Groq Medical Q&A"):
353
+ groq_input = gr.Textbox(label="Ask a medical question (Groq LLM)")
354
+ groq_output = gr.Textbox(label="Groq LLM Response")
355
+ groq_button = gr.Button("Ask Groq LLM")
356
+ groq_button.click(
357
+ ask_groq_llm,
358
+ inputs=groq_input,
359
+ outputs=groq_output
360
+ )
361
  with gr.Tab("Disease Prediction"):
362
  gr.HTML("""
363
  <h1 style="color: #388e3c; font-family: 'Helvetica', sans-serif; text-align: center; font-size: 3.5em; margin-bottom: 0;">