Spaces:
Runtime error
Runtime error
Update gradio_llm_example.py
Browse files- gradio_llm_example.py +25 -20
gradio_llm_example.py
CHANGED
@@ -76,7 +76,7 @@ def upload_file(files_obj):
|
|
76 |
choose_btn : gr.Button(value="Choose", visible=True)}
|
77 |
|
78 |
|
79 |
-
def read_content(file_name):
|
80 |
print(file_name, type(file_name))
|
81 |
temp_file_path = "./temp"
|
82 |
file_path = os.path.join(temp_file_path, file_name)
|
@@ -84,22 +84,24 @@ def read_content(file_name):
|
|
84 |
try:
|
85 |
content = file.read()
|
86 |
print(content)
|
87 |
-
return {
|
|
|
88 |
except Exception as e:
|
89 |
print(f"Error occurred while writing the file: {e}")
|
90 |
-
return {
|
|
|
91 |
|
92 |
|
93 |
-
def
|
94 |
-
language_choice, max_length, temperature,
|
95 |
num_return_sequences, top_p, no_repeat_ngram_size):
|
96 |
#No LLM here, just respond with a random pre-made message
|
97 |
-
if
|
98 |
-
bot_message = f"
|
99 |
"Cool, but I'm not interested",
|
100 |
"Hmmmm, ok then"])
|
101 |
else:
|
102 |
-
bot_message = "
|
103 |
chat_history.append((message, bot_message))
|
104 |
return "", chat_history
|
105 |
|
@@ -118,7 +120,7 @@ with gr.Blocks(theme=gr.themes.Soft()) as gradioApp:
|
|
118 |
intro_gr = gr.Markdown(intro)
|
119 |
|
120 |
# Upload several documents
|
121 |
-
|
122 |
upload_button = gr.UploadButton("Browse files", label="Drag and drop your documents here",
|
123 |
size="lg", scale=0, min_width=100,
|
124 |
file_types=["pdf"], file_count="multiple")
|
@@ -129,32 +131,35 @@ with gr.Blocks(theme=gr.themes.Soft()) as gradioApp:
|
|
129 |
|
130 |
# Read only one document
|
131 |
error_box = gr.Textbox(label="Reading files... ", visible=False)
|
132 |
-
choose_btn.click(read_content, inputs=uploaded_check, outputs=error_box)
|
133 |
|
134 |
# Select advanced options
|
135 |
gr.Markdown(""" ## Toolbox """)
|
136 |
with gr.Accordion(label="Select advanced options",open=False):
|
|
|
137 |
language_choice = gr.Dropdown(["English", "French"], label="Language", info="Choose your language")
|
138 |
max_length = gr.Slider(label="Token length", minimum=1, maximum=100, value=50, step=1)
|
139 |
temperature= gr.Slider(label="Temperature", minimum=0.1, maximum=1, value=0.8, step=0.1)
|
140 |
-
num_return_sequences= gr.Slider(label="
|
141 |
-
top_p= gr.Slider(label="
|
142 |
-
no_repeat_ngram_size= gr.Slider(label="
|
143 |
|
144 |
|
145 |
# Chat
|
146 |
with gr.Column(scale=2, min_width=600):
|
147 |
title2_gr = gr.Markdown(title2)
|
148 |
-
chatbot = gr.Chatbot(label="Bot", height=500)
|
149 |
msg = gr.Textbox(label="User", placeholder="Ask any question.")
|
150 |
-
chatbot_btn = gr.Button("Submit")
|
151 |
-
|
152 |
-
|
153 |
-
|
154 |
-
|
|
|
155 |
clear = gr.ClearButton(components=[msg, chatbot], value="Clear console")
|
156 |
|
157 |
|
158 |
gr.close_all()
|
159 |
-
gradioApp.
|
|
|
160 |
#auth=("neovision", "gradio2023")
|
|
|
76 |
choose_btn : gr.Button(value="Choose", visible=True)}
|
77 |
|
78 |
|
79 |
+
def read_content(content, file_name):
|
80 |
print(file_name, type(file_name))
|
81 |
temp_file_path = "./temp"
|
82 |
file_path = os.path.join(temp_file_path, file_name)
|
|
|
84 |
try:
|
85 |
content = file.read()
|
86 |
print(content)
|
87 |
+
return { content_var : str(content[:10]),
|
88 |
+
error_box : gr.Textbox(value=f"File ready to be used. \n You can ask a question about the uploaded PDF document.", visible=True)}
|
89 |
except Exception as e:
|
90 |
print(f"Error occurred while writing the file: {e}")
|
91 |
+
return { content_var : str(content[:10]),
|
92 |
+
error_box : gr.Textbox(value=f"Error occurred while writing the file: {e}", visible=True)}
|
93 |
|
94 |
|
95 |
+
def my_model(message, chat_history, content_var,
|
96 |
+
language_choice, model_choice, max_length, temperature,
|
97 |
num_return_sequences, top_p, no_repeat_ngram_size):
|
98 |
#No LLM here, just respond with a random pre-made message
|
99 |
+
if content_var == "":
|
100 |
+
bot_message = f"Pas de context : {content_var}" + random.choice(["Tell me more about it",
|
101 |
"Cool, but I'm not interested",
|
102 |
"Hmmmm, ok then"])
|
103 |
else:
|
104 |
+
bot_message = f" Voici le context {content_var}"
|
105 |
chat_history.append((message, bot_message))
|
106 |
return "", chat_history
|
107 |
|
|
|
120 |
intro_gr = gr.Markdown(intro)
|
121 |
|
122 |
# Upload several documents
|
123 |
+
content_var = gr.State("")
|
124 |
upload_button = gr.UploadButton("Browse files", label="Drag and drop your documents here",
|
125 |
size="lg", scale=0, min_width=100,
|
126 |
file_types=["pdf"], file_count="multiple")
|
|
|
131 |
|
132 |
# Read only one document
|
133 |
error_box = gr.Textbox(label="Reading files... ", visible=False)
|
134 |
+
choose_btn.click(read_content, inputs=[content_var, uploaded_check], outputs=[content_var, error_box])
|
135 |
|
136 |
# Select advanced options
|
137 |
gr.Markdown(""" ## Toolbox """)
|
138 |
with gr.Accordion(label="Select advanced options",open=False):
|
139 |
+
model_choice = gr.Dropdown(["LLM", "Other"], label="Model", info="Choose your AI model")
|
140 |
language_choice = gr.Dropdown(["English", "French"], label="Language", info="Choose your language")
|
141 |
max_length = gr.Slider(label="Token length", minimum=1, maximum=100, value=50, step=1)
|
142 |
temperature= gr.Slider(label="Temperature", minimum=0.1, maximum=1, value=0.8, step=0.1)
|
143 |
+
num_return_sequences= gr.Slider(label="Return Sequence", minimum=0.1, maximum=50, value=1, step=0.1)
|
144 |
+
top_p= gr.Slider(label="top p", minimum=0.1, maximum=1, value=0.8, step=0.1)
|
145 |
+
no_repeat_ngram_size= gr.Slider(label="repeat", minimum=0.1, maximum=1, value=3, step=0.1)
|
146 |
|
147 |
|
148 |
# Chat
|
149 |
with gr.Column(scale=2, min_width=600):
|
150 |
title2_gr = gr.Markdown(title2)
|
151 |
+
chatbot = gr.Chatbot(label="Bot", height=500, queue = True)
|
152 |
msg = gr.Textbox(label="User", placeholder="Ask any question.")
|
153 |
+
# chatbot_btn = gr.Button("Submit")
|
154 |
+
msg.submit(my_model, queue = True,
|
155 |
+
inputs=[msg, chatbot, content_var,
|
156 |
+
language_choice, model_choice, max_length, temperature,
|
157 |
+
num_return_sequences, top_p, no_repeat_ngram_size],
|
158 |
+
outputs=[msg, chatbot])
|
159 |
clear = gr.ClearButton(components=[msg, chatbot], value="Clear console")
|
160 |
|
161 |
|
162 |
gr.close_all()
|
163 |
+
gradioApp.queue()
|
164 |
+
gradioApp.launch(share=True, auth=("neovision", "gradio2023"))
|
165 |
#auth=("neovision", "gradio2023")
|