Spaces:
Running
on
Zero
Running
on
Zero
πwπ
Browse files
app.py
CHANGED
@@ -53,7 +53,7 @@ def prepare_prompt(query, retrieved_examples):
|
|
53 |
return prompt, (titles, urls)
|
54 |
|
55 |
|
56 |
-
@spaces.GPU
|
57 |
def talk(message, history):
|
58 |
retrieved_examples = search(message)
|
59 |
message, metadata = prepare_prompt(message, retrieved_examples)
|
@@ -92,42 +92,45 @@ def talk(message, history):
|
|
92 |
partial_text = ""
|
93 |
for new_text in streamer:
|
94 |
partial_text += new_text
|
95 |
-
print(partial_text)
|
96 |
yield partial_text
|
97 |
# partial_text += resources
|
98 |
# yield partial_text
|
99 |
|
100 |
|
101 |
-
TITLE = "RAG"
|
102 |
|
103 |
DESCRIPTION = """
|
104 |
A rag pipeline with a chatbot feature
|
105 |
|
106 |
Resources used to build this project :
|
107 |
|
108 |
-
embedding model : https://huggingface.co/mixedbread-ai/mxbai-embed-large-v1
|
109 |
-
dataset : https://huggingface.co/datasets/not-lain/wikipedia-small-3000-embedded (used mxbai-colbert-large-v1 to create the embedding column )
|
110 |
-
faiss docs : https://huggingface.co/docs/datasets/v2.18.0/en/package_reference/main_classes#datasets.Dataset.add_faiss_index
|
111 |
-
chatbot : https://huggingface.co/google/gemma-7b-it
|
112 |
|
113 |
If you want to support my work please click on the heart react button β€οΈπ€
|
114 |
|
115 |
<sub><sup><sub><sup>psst, I am still open for work, so please reach me out at https://not-lain.github.io/</sup></sub></sup></sub>
|
116 |
"""
|
117 |
|
118 |
-
|
119 |
-
|
120 |
-
|
121 |
-
|
122 |
-
|
123 |
-
|
124 |
-
|
125 |
-
|
126 |
-
|
127 |
-
|
128 |
-
|
129 |
-
|
130 |
-
|
131 |
-
|
132 |
-
|
133 |
-
|
|
|
|
|
|
|
|
53 |
return prompt, (titles, urls)
|
54 |
|
55 |
|
56 |
+
# @spaces.GPU
|
57 |
def talk(message, history):
|
58 |
retrieved_examples = search(message)
|
59 |
message, metadata = prepare_prompt(message, retrieved_examples)
|
|
|
92 |
partial_text = ""
|
93 |
for new_text in streamer:
|
94 |
partial_text += new_text
|
95 |
+
print("partial_text : ", partial_text)
|
96 |
yield partial_text
|
97 |
# partial_text += resources
|
98 |
# yield partial_text
|
99 |
|
100 |
|
101 |
+
TITLE = "# RAG"
|
102 |
|
103 |
DESCRIPTION = """
|
104 |
A rag pipeline with a chatbot feature
|
105 |
|
106 |
Resources used to build this project :
|
107 |
|
108 |
+
* embedding model : https://huggingface.co/mixedbread-ai/mxbai-embed-large-v1
|
109 |
+
* dataset : https://huggingface.co/datasets/not-lain/wikipedia-small-3000-embedded (used mxbai-colbert-large-v1 to create the embedding column )
|
110 |
+
* faiss docs : https://huggingface.co/docs/datasets/v2.18.0/en/package_reference/main_classes#datasets.Dataset.add_faiss_index
|
111 |
+
* chatbot : https://huggingface.co/google/gemma-7b-it
|
112 |
|
113 |
If you want to support my work please click on the heart react button β€οΈπ€
|
114 |
|
115 |
<sub><sup><sub><sup>psst, I am still open for work, so please reach me out at https://not-lain.github.io/</sup></sub></sup></sub>
|
116 |
"""
|
117 |
|
118 |
+
|
119 |
+
with gr.Blocks() as demo:
|
120 |
+
gr.Markdown(TITLE)
|
121 |
+
gr.Markdown(DESCRIPTION)
|
122 |
+
gr.ChatInterface(
|
123 |
+
fn=talk,
|
124 |
+
chatbot=gr.Chatbot(
|
125 |
+
show_label=True,
|
126 |
+
show_share_button=True,
|
127 |
+
show_copy_button=True,
|
128 |
+
likeable=True,
|
129 |
+
layout="bubble",
|
130 |
+
bubble_full_width=False,
|
131 |
+
),
|
132 |
+
theme="Soft",
|
133 |
+
examples=[["what is machine learning"]],
|
134 |
+
)
|
135 |
+
|
136 |
+
demo.launch(debug=True)
|