rajsinghparihar
commited on
Commit
•
2bae954
1
Parent(s):
2b898ff
remove example caching in gradio interface
Browse files
app.py
CHANGED
@@ -5,7 +5,6 @@ import json
|
|
5 |
from prompts import general_prompt
|
6 |
from gradio_pdf import PDF
|
7 |
import requests
|
8 |
-
import os
|
9 |
|
10 |
service_context_module = None
|
11 |
current_model = None
|
@@ -91,8 +90,6 @@ Created by [@rajsinghparihar](https://huggingface.co/rajsinghparihar) for extrac
|
|
91 |
label="Enter your Groq API KEY",
|
92 |
type="password",
|
93 |
)
|
94 |
-
if api_key == "" or not api_key:
|
95 |
-
api_key = os.getenv("GROQ_API_KEY")
|
96 |
available_models = gr.Dropdown(
|
97 |
value="llama3-70b-8192",
|
98 |
label="Choose your LLM",
|
@@ -131,6 +128,7 @@ Created by [@rajsinghparihar](https://huggingface.co/rajsinghparihar) for extrac
|
|
131 |
"Employee Name, Bank Name, Location, Total Salary, Total Deductions",
|
132 |
],
|
133 |
],
|
|
|
134 |
)
|
135 |
gr.Markdown("""
|
136 |
## Pros of LLMs as information extractors over current extraction solutions:
|
|
|
5 |
from prompts import general_prompt
|
6 |
from gradio_pdf import PDF
|
7 |
import requests
|
|
|
8 |
|
9 |
service_context_module = None
|
10 |
current_model = None
|
|
|
90 |
label="Enter your Groq API KEY",
|
91 |
type="password",
|
92 |
)
|
|
|
|
|
93 |
available_models = gr.Dropdown(
|
94 |
value="llama3-70b-8192",
|
95 |
label="Choose your LLM",
|
|
|
128 |
"Employee Name, Bank Name, Location, Total Salary, Total Deductions",
|
129 |
],
|
130 |
],
|
131 |
+
cache_examples="lazy",
|
132 |
)
|
133 |
gr.Markdown("""
|
134 |
## Pros of LLMs as information extractors over current extraction solutions:
|