legacy107 commited on
Commit
5861c8f
·
1 Parent(s): 7717dbf

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -4
app.py CHANGED
@@ -17,7 +17,7 @@ bi_encoder.max_seq_length = 256
17
  top_k = 3
18
 
19
  # Load your fine-tuned model and tokenizer
20
- model_name = "legacy107/flan-t5-large-ia3-wiki-merged"
21
  tokenizer = AutoTokenizer.from_pretrained(model_name)
22
  model = T5ForConditionalGeneration.from_pretrained(model_name)
23
  max_length = 512
@@ -98,7 +98,7 @@ def generate_answer(question, context, ground):
98
  # Decode and return the generated answer
99
  generated_answer = tokenizer.decode(generated_ids[0], skip_special_tokens=True)
100
 
101
- return generated_answer, context, ground
102
 
103
 
104
  # Define a function to list examples from the dataset
@@ -122,8 +122,7 @@ iface = gr.Interface(
122
  ],
123
  outputs=[
124
  Textbox(label="Generated Answer"),
125
- Textbox(label="Retrieved Context"),
126
- Textbox(label="Ground Truth")
127
  ],
128
  examples=list_examples()
129
  )
 
17
  top_k = 3
18
 
19
  # Load your fine-tuned model and tokenizer
20
+ model_name = "legacy107/flan-t5-large-ia3-wiki2-100-merged"
21
  tokenizer = AutoTokenizer.from_pretrained(model_name)
22
  model = T5ForConditionalGeneration.from_pretrained(model_name)
23
  max_length = 512
 
98
  # Decode and return the generated answer
99
  generated_answer = tokenizer.decode(generated_ids[0], skip_special_tokens=True)
100
 
101
+ return generated_answer, context
102
 
103
 
104
  # Define a function to list examples from the dataset
 
122
  ],
123
  outputs=[
124
  Textbox(label="Generated Answer"),
125
+ Textbox(label="Retrieved Context")
 
126
  ],
127
  examples=list_examples()
128
  )