Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -47,9 +47,12 @@ with col1:
|
|
47 |
)
|
48 |
|
49 |
with col2:
|
50 |
-
temperature = st.
|
|
|
51 |
max_tokens = st.number_input('Max output length: ', min_value=1, max_value=64, format='%i')
|
52 |
-
|
|
|
|
|
53 |
tokenizer = AutoTokenizer.from_pretrained(model_checkpoint)
|
54 |
|
55 |
model = T5ForConditionalGeneration.from_pretrained(
|
@@ -99,5 +102,5 @@ if instruction:
|
|
99 |
|
100 |
st.write(output_text)
|
101 |
|
102 |
-
fig = plot_word_scores(top_token_ids(outputs, threshold
|
103 |
-
st.plotly_chart(fig, use_container_width=False)
|
|
|
47 |
)
|
48 |
|
49 |
with col2:
|
50 |
+
temperature = st.slider('Temperature: ', min_value=0.0, max_value=1.0, value=0.5)
|
51 |
+
top_p = st.slider('Top p: ', min_value=0.5, max_value=1.0, value=0.99)
|
52 |
max_tokens = st.number_input('Max output length: ', min_value=1, max_value=64, format='%i')
|
53 |
+
threshold = st.number_input('Min token score:: ', value=-np.inf)
|
54 |
+
|
55 |
+
|
56 |
tokenizer = AutoTokenizer.from_pretrained(model_checkpoint)
|
57 |
|
58 |
model = T5ForConditionalGeneration.from_pretrained(
|
|
|
102 |
|
103 |
st.write(output_text)
|
104 |
|
105 |
+
fig = plot_word_scores(top_token_ids(outputs, threshold=threshold), outputs, tokenizer)
|
106 |
+
st.plotly_chart(fig, theme=None, use_container_width=False)
|