Aishwarya Solanki commited on
Commit
a6ce7be
·
1 Parent(s): c3a4000

init commit

Browse files
Files changed (1) hide show
  1. app.py +106 -0
app.py ADDED
@@ -0,0 +1,106 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import os
3
+ import requests
4
+ import google.generativeai as genai
5
+ import openai
6
+
7
+ def generate_text_chatgpt(key, prompt, temperature, top_p):
8
+
9
+ openai.api_key = key
10
+
11
+ response = openai.chat.completions.create(
12
+ model="gpt-4-0613",
13
+ messages=[{"role": "system", "content": "Suppose that you are a talented diagnostician"},
14
+ {"role": "user", "content": prompt}],
15
+ temperature=temperature,
16
+ max_tokens=50,
17
+ top_p=top_p,
18
+ frequency_penalty=0
19
+ )
20
+
21
+ return response.choices[0].message.content
22
+
23
+
24
+ def generate_text_gemini(key, prompt, temperature, top_p):
25
+ genai.configure(api_key=key)
26
+
27
+ generation_config = genai.GenerationConfig(
28
+ max_output_tokens=len(prompt)+50,
29
+ temperature=temperature,
30
+ top_p=top_p,
31
+ )
32
+ model = genai.GenerativeModel("gemini-1.5-flash", generation_config=generation_config)
33
+ response = model.generate_content(prompt)
34
+ return response.text
35
+
36
+
37
+ def generate_text_llama(key, prompt, temperature, top_p):
38
+ model_name = "meta-llama/Llama-3.1-8B-Instruct"
39
+
40
+ API_URL = f"https://api-inference.huggingface.co/models/{model_name}"
41
+ headers = {"Authorization": f"Bearer {key}"}
42
+ payload = {
43
+ "inputs": prompt,
44
+ "parameters": {
45
+ "temperature": temperature,
46
+ "max_new_tokens": 50,
47
+ "top_p": top_p,
48
+ }
49
+ }
50
+ response = requests.post(API_URL, headers=headers, json=payload)
51
+ resp_obj = response.json()
52
+ if isinstance(resp_obj, list):
53
+ resp = resp_obj[0]
54
+ if 'generated_text' in resp:
55
+ if len(resp['generated_text']) > len(prompt):
56
+ return resp['generated_text'][len(prompt):]
57
+ return resp['generated_text']
58
+ return resp
59
+ return resp_obj
60
+
61
+
62
+ def diagnose(key, model, top_k, temperature, symptom_prompt):
63
+
64
+ if symptom_prompt:
65
+ if "GPT" in model:
66
+ message = generate_text_chatgpt(key, symptom_prompt, temperature, top_k)
67
+ elif "Llama" in model:
68
+ message = generate_text_llama(key, symptom_prompt, temperature, top_k)
69
+ elif "Gemini" in model:
70
+ message = generate_text_gemini(key, symptom_prompt, temperature, top_k)
71
+ else:
72
+ message = "Incorrect model, please try again."
73
+ else:
74
+ message = "Please add the symptoms data"
75
+
76
+ return message
77
+
78
+
79
+
80
+ with gr.Blocks() as ui:
81
+ message = "Hello, Welcome to the GUI by Team #9."
82
+
83
+ with gr.Row(equal_height=500):
84
+ with gr.Column(scale=1, min_width=300):
85
+ model = gr.Radio(label="LLM Selection", value="GPT-3.5-Turbo",
86
+ choices=["GPT-3.5-Turbo", "Llama-3.1", "Gemini-1.5"])
87
+ key = gr.Textbox(label="Please input your LLM key", type="password")
88
+ gr.Button(value="Don't have an LLM key? Get one through the below links.")
89
+ gr.Button(value="OpenAi Key", link="https://platform.openai.com/account/api-keys")
90
+ gr.Button(value="Meta Llama Key", link="https://platform.openai.com/account/api-keys")
91
+ gr.Button(value="Gemini Key", link="https://platform.openai.com/account/api-keys")
92
+ gr.ClearButton(key, message, variant="primary")
93
+
94
+ with gr.Column(scale=2, min_width=600):
95
+ message = gr.Textbox(label="", value=message, interactive=False, visible=True)
96
+ output = gr.Textbox(label="Model output status", value="Model hasn't run yet.")
97
+ temperature = gr.Slider(0.0, 1.0, value=0.7, step = 0.01, label="Temperature", info="Set the Temperature")
98
+ top_k = gr.Slider(1, 10, value=3, step = 1, label="top-k value", info="Set the 'k' for top-k LLM responses")
99
+ symptoms = gr.Textbox(label="Add the symptom data in the input to receive diagnosis")
100
+ llm_btn = gr.Button(value="Diagnose Disease", variant="primary", elem_id="diagnose")
101
+ llm_btn.click(fn=diagnose, inputs=[key, model, top_k, temperature, symptoms], outputs=output, api_name="auditor")
102
+ output = gr.Textbox(label="LLM output status", value=output.value)
103
+
104
+
105
+
106
+ ui.launch(share=True)