Mubbashir Ahmed commited on
Commit
bb6169a
·
1 Parent(s): f3362e5

spider dataset work update 1

Browse files
Files changed (1) hide show
  1. app.py +26 -12
app.py CHANGED
@@ -1,4 +1,5 @@
1
  import os
 
2
  import gradio as gr
3
  from huggingface_hub import InferenceClient
4
  from datasets import load_dataset
@@ -6,7 +7,11 @@ from datasets import load_dataset
6
  # import torch
7
 
8
  HF_TOKEN = os.environ.get("HF_TOKEN")
9
- spider_data = load_dataset("spider")
 
 
 
 
10
 
11
  # ------------------------
12
  # API Clients
@@ -86,27 +91,30 @@ def run_model_with_history(model_name, user_input, chat_history):
86
 
87
  return chat_transcript, chat_history
88
 
 
 
 
 
 
 
 
89
  # ------------------------
90
  # Gradio UI
91
  # ------------------------
92
  with gr.Blocks() as demo:
93
  gr.Markdown("## 🧠 Generative AI Model Evaluation with Context")
94
 
95
- model_choice = gr.Dropdown(
96
- choices=["LLaMA 4", "Qwen3 14B"], # "Mixtral 8x7B" removed
97
- label="Select Model",
98
- value="LLaMA 4"
99
- )
 
 
100
 
101
  chat_display = gr.Textbox(label="Chat History", lines=20, interactive=False)
102
  prompt_input = gr.Textbox(label="Your Prompt", lines=3, placeholder="Ask your BI question...")
103
 
104
- random_index = 0
105
- if spider_data:
106
- spider_prompt = spider_data[random_index]["question"]
107
- chat_memory = []
108
- chat_display.update(f"👤 User: {spider_prompt}")
109
-
110
  run_button = gr.Button("Send")
111
 
112
  # Hidden chat history state
@@ -117,6 +125,12 @@ with gr.Blocks() as demo:
117
  inputs=[model_choice, prompt_input, chat_memory],
118
  outputs=[chat_display, chat_memory]
119
  )
 
 
 
 
 
 
120
 
121
  # Launch app
122
  demo.launch()
 
1
  import os
2
+ import random
3
  import gradio as gr
4
  from huggingface_hub import InferenceClient
5
  from datasets import load_dataset
 
7
  # import torch
8
 
9
  HF_TOKEN = os.environ.get("HF_TOKEN")
10
+
11
+ # ------------------------
12
+ # Load Spider Dataset (Hugging Face Datasets)
13
+ # ------------------------
14
+ spider_dataset = load_dataset("spider", split="train")
15
 
16
  # ------------------------
17
  # API Clients
 
91
 
92
  return chat_transcript, chat_history
93
 
94
+ # ------------------------
95
+ # Get Random Spider Question
96
+ # ------------------------
97
+ def get_random_spider_question():
98
+ sample = random.choice(spider_dataset)
99
+ return sample["question"]
100
+
101
  # ------------------------
102
  # Gradio UI
103
  # ------------------------
104
  with gr.Blocks() as demo:
105
  gr.Markdown("## 🧠 Generative AI Model Evaluation with Context")
106
 
107
+ with gr.Row():
108
+ model_choice = gr.Dropdown(
109
+ choices=["LLaMA 4", "Qwen3 14B"],
110
+ label="Select Model",
111
+ value="LLaMA 4"
112
+ )
113
+ load_spider_btn = gr.Button("🔀 Load Random Spider Prompt")
114
 
115
  chat_display = gr.Textbox(label="Chat History", lines=20, interactive=False)
116
  prompt_input = gr.Textbox(label="Your Prompt", lines=3, placeholder="Ask your BI question...")
117
 
 
 
 
 
 
 
118
  run_button = gr.Button("Send")
119
 
120
  # Hidden chat history state
 
125
  inputs=[model_choice, prompt_input, chat_memory],
126
  outputs=[chat_display, chat_memory]
127
  )
128
+
129
+ load_spider_btn.click(
130
+ fn=get_random_spider_question,
131
+ inputs=[],
132
+ outputs=prompt_input
133
+ )
134
 
135
  # Launch app
136
  demo.launch()