Spaces:
Runtime error
Runtime error
Commit
·
076d731
1
Parent(s):
3c712c1
test
Browse files
app.py
CHANGED
@@ -3,8 +3,6 @@ from transformers import AutoTokenizer, AutoModelForCausalLM
|
|
3 |
import time
|
4 |
import openai
|
5 |
|
6 |
-
openai.api_key = "OPENAI_API_KEY"
|
7 |
-
|
8 |
# Load the Vicuna 7B v1.3 LMSys model and tokenizer
|
9 |
model_name = "lmsys/vicuna-7b-v1.3"
|
10 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
@@ -26,41 +24,43 @@ def respond(message, chat_history):
|
|
26 |
time.sleep(2)
|
27 |
return "", chat_history
|
28 |
|
29 |
-
def interface():
|
30 |
gr.Markdown(" Description ")
|
31 |
|
32 |
-
|
33 |
-
|
|
|
|
|
34 |
|
35 |
gr.Markdown("Strategy 1 QA-Based Prompting")
|
36 |
with gr.Row():
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
clear = gr.ClearButton([
|
41 |
gr.Markdown("Strategy 2 Instruction-Based Prompting")
|
42 |
with gr.Row():
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
clear = gr.ClearButton([
|
47 |
gr.Markdown("Strategy 3 Structured Prompting")
|
48 |
with gr.Row():
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
clear = gr.ClearButton([
|
53 |
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
|
58 |
|
59 |
with gr.Blocks() as demo:
|
60 |
gr.Markdown("# LLM Evaluator With Linguistic Scrutiny")
|
61 |
|
62 |
with gr.Tab("Noun"):
|
63 |
-
interface()
|
64 |
|
65 |
with gr.Tab("Determiner"):
|
66 |
gr.Markdown(" Description ")
|
@@ -87,13 +87,13 @@ with gr.Blocks() as demo:
|
|
87 |
clear = gr.ClearButton([prompt_CHUNK, vicuna_S3_chatbot_CHUNK])
|
88 |
|
89 |
with gr.Tab("Noun phrase"):
|
90 |
-
interface()
|
91 |
with gr.Tab("Verb phrase"):
|
92 |
-
interface()
|
93 |
with gr.Tab("Dependent clause"):
|
94 |
-
interface()
|
95 |
with gr.Tab("T-units"):
|
96 |
-
interface()
|
97 |
|
98 |
prompt_CHUNK.submit(respond, [prompt_CHUNK, vicuna_S1_chatbot_CHUNK], [prompt_CHUNK, vicuna_S1_chatbot_CHUNK])
|
99 |
prompt_CHUNK.submit(respond, [prompt_CHUNK, vicuna_S2_chatbot_CHUNK], [prompt_CHUNK, vicuna_S2_chatbot_CHUNK])
|
|
|
3 |
import time
|
4 |
import openai
|
5 |
|
|
|
|
|
6 |
# Load the Vicuna 7B v1.3 LMSys model and tokenizer
|
7 |
model_name = "lmsys/vicuna-7b-v1.3"
|
8 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
|
|
24 |
time.sleep(2)
|
25 |
return "", chat_history
|
26 |
|
27 |
+
def interface(tab_name):
|
28 |
gr.Markdown(" Description ")
|
29 |
|
30 |
+
textbox_prompt = gr.Textbox(show_label=False, placeholder="Write a prompt and press enter")
|
31 |
+
openai.api_key = gr.Textbox(label="Open AI Key", placeholder="Enter your Openai key here", type="password")
|
32 |
+
|
33 |
+
prompt = template_single.format(tab_name, textbox_prompt)
|
34 |
|
35 |
gr.Markdown("Strategy 1 QA-Based Prompting")
|
36 |
with gr.Row():
|
37 |
+
vicuna_S1_chatbot = gr.Chatbot(label="vicuna-7b")
|
38 |
+
llama_S1_chatbot = gr.Chatbot(label="llama-7b")
|
39 |
+
gpt_S1_chatbot = gr.Chatbot(label="gpt-3.5")
|
40 |
+
clear = gr.ClearButton([prompt, vicuna_S1_chatbot])
|
41 |
gr.Markdown("Strategy 2 Instruction-Based Prompting")
|
42 |
with gr.Row():
|
43 |
+
vicuna_S2_chatbot = gr.Chatbot(label="vicuna-7b")
|
44 |
+
llama_S2_chatbot = gr.Chatbot(label="llama-7b")
|
45 |
+
gpt_S2_chatbot = gr.Chatbot(label="gpt-3.5")
|
46 |
+
clear = gr.ClearButton([prompt, vicuna_S2_chatbot])
|
47 |
gr.Markdown("Strategy 3 Structured Prompting")
|
48 |
with gr.Row():
|
49 |
+
vicuna_S3_chatbot = gr.Chatbot(label="vicuna-7b")
|
50 |
+
llama_S3_chatbot = gr.Chatbot(label="llama-7b")
|
51 |
+
gpt_S3_chatbot = gr.Chatbot(label="gpt-3.5")
|
52 |
+
clear = gr.ClearButton([prompt, vicuna_S3_chatbot])
|
53 |
|
54 |
+
prompt.submit(respond, [prompt, vicuna_S1_chatbot], [prompt, vicuna_S1_chatbot])
|
55 |
+
prompt.submit(respond, [prompt, vicuna_S2_chatbot], [prompt, vicuna_S2_chatbot])
|
56 |
+
prompt.submit(respond, [prompt, vicuna_S3_chatbot], [prompt, vicuna_S3_chatbot])
|
57 |
|
58 |
|
59 |
with gr.Blocks() as demo:
|
60 |
gr.Markdown("# LLM Evaluator With Linguistic Scrutiny")
|
61 |
|
62 |
with gr.Tab("Noun"):
|
63 |
+
interface("Noun")
|
64 |
|
65 |
with gr.Tab("Determiner"):
|
66 |
gr.Markdown(" Description ")
|
|
|
87 |
clear = gr.ClearButton([prompt_CHUNK, vicuna_S3_chatbot_CHUNK])
|
88 |
|
89 |
with gr.Tab("Noun phrase"):
|
90 |
+
interface("Noun phrase")
|
91 |
with gr.Tab("Verb phrase"):
|
92 |
+
interface("Verb phrase")
|
93 |
with gr.Tab("Dependent clause"):
|
94 |
+
interface("Dependent clause")
|
95 |
with gr.Tab("T-units"):
|
96 |
+
interface("T-units")
|
97 |
|
98 |
prompt_CHUNK.submit(respond, [prompt_CHUNK, vicuna_S1_chatbot_CHUNK], [prompt_CHUNK, vicuna_S1_chatbot_CHUNK])
|
99 |
prompt_CHUNK.submit(respond, [prompt_CHUNK, vicuna_S2_chatbot_CHUNK], [prompt_CHUNK, vicuna_S2_chatbot_CHUNK])
|