Spaces:
Sleeping
Sleeping
kz209
commited on
Commit
•
bb237fc
1
Parent(s):
589c423
update
Browse files
pages/summarization_example.py
CHANGED
@@ -1,9 +1,10 @@
|
|
1 |
from dotenv import load_dotenv
|
2 |
-
# from transformers import pipeline
|
3 |
import gradio as gr
|
4 |
|
5 |
from utils.model import Model
|
6 |
|
|
|
|
|
7 |
load_dotenv()
|
8 |
|
9 |
examples = {
|
@@ -24,7 +25,6 @@ Back in Boston, Kidd is going to rely on Lively even more. He'll play close to 3
|
|
24 |
def generate_answer(sources, model_name, prompt):
|
25 |
assert "{sources}" in prompt, ValueError("No {sources} Found")
|
26 |
|
27 |
-
model = Model(model_name)
|
28 |
meta_prompt = prompt
|
29 |
content = meta_prompt.format(sources=sources)
|
30 |
answer = model.gen(content)
|
@@ -51,7 +51,7 @@ def create_summarization_interface():
|
|
51 |
|
52 |
Template_text = gr.Textbox(value="""{sources}
|
53 |
|
54 |
-
summarization: """, label='Input Prompting Template', lines=
|
55 |
|
56 |
input_text = gr.Textbox(label="Input Text", lines=10, placeholder="Enter text here...")
|
57 |
submit_button = gr.Button("✨ Submit ✨")
|
|
|
1 |
from dotenv import load_dotenv
|
|
|
2 |
import gradio as gr
|
3 |
|
4 |
from utils.model import Model
|
5 |
|
6 |
+
__default_model_name__ = "lmsys/vicuna-7b-v1.5"
|
7 |
+
model = Model(__default_model_name__)
|
8 |
load_dotenv()
|
9 |
|
10 |
examples = {
|
|
|
25 |
def generate_answer(sources, model_name, prompt):
|
26 |
assert "{sources}" in prompt, ValueError("No {sources} Found")
|
27 |
|
|
|
28 |
meta_prompt = prompt
|
29 |
content = meta_prompt.format(sources=sources)
|
30 |
answer = model.gen(content)
|
|
|
51 |
|
52 |
Template_text = gr.Textbox(value="""{sources}
|
53 |
|
54 |
+
summarization: """, label='Input Prompting Template', lines=8, placeholder='Input your prompts, must include \{sources\}')
|
55 |
|
56 |
input_text = gr.Textbox(label="Input Text", lines=10, placeholder="Enter text here...")
|
57 |
submit_button = gr.Button("✨ Submit ✨")
|