Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -42,21 +42,23 @@ chat = utils.ChatState(gemma_lm) # adding a chat helper to manage the conversati
|
|
42 |
def launch(message):
|
43 |
|
44 |
# Uncomment for QA system without chat history/memory
|
45 |
-
#
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
|
|
53 |
|
54 |
# This is to convert QA system to a chatbot
|
55 |
-
|
|
|
56 |
|
57 |
iface = gr.Interface(launch,
|
58 |
-
inputs=
|
59 |
-
outputs=
|
60 |
css=css,
|
61 |
title="Hey I am CosmoGemma 👋 I can answer cosmology questions from astroph.CO research articles. Try me :)",
|
62 |
description="Gemma_2b_en fine-tuned on QA pairs (~3.5k) generated from Cosmology and Nongalactic Astrophysics articles (arXiv astro-ph.CO) from 2018-2022 and tested on QA pairs (~1k) generated from 2023 articles, scoring over 75% accuracy.")
|
|
|
42 |
def launch(message):
|
43 |
|
44 |
# Uncomment for QA system without chat history/memory
|
45 |
+
#
|
46 |
+
template = "Instruction:\n{instruction}\n\nResponse:\n{response}"
|
47 |
+
prompt = template.format(
|
48 |
+
instruction=message,
|
49 |
+
response="",
|
50 |
+
)
|
51 |
+
out = gemma_lm.generate(prompt, max_length=1024)
|
52 |
+
ind = out.index('\n\nResponse:\n') + len('\n\nResponse:\n')
|
53 |
+
return out[ind:]
|
54 |
|
55 |
# This is to convert QA system to a chatbot
|
56 |
+
#chat_history = chat.send_message(message) + "\n\n##### History #####\n\n" + chat.get_history()
|
57 |
+
#return chat_history
|
58 |
|
59 |
iface = gr.Interface(launch,
|
60 |
+
inputs="text",
|
61 |
+
outputs="text",
|
62 |
css=css,
|
63 |
title="Hey I am CosmoGemma 👋 I can answer cosmology questions from astroph.CO research articles. Try me :)",
|
64 |
description="Gemma_2b_en fine-tuned on QA pairs (~3.5k) generated from Cosmology and Nongalactic Astrophysics articles (arXiv astro-ph.CO) from 2018-2022 and tested on QA pairs (~1k) generated from 2023 articles, scoring over 75% accuracy.")
|