Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -133,13 +133,14 @@ gpt4o_mini_model = initialize_gpt4o_mini_model()
|
|
133 |
|
134 |
|
135 |
# Existing embeddings and vector store for GPT-4o
|
136 |
-
gpt_embeddings = OpenAIEmbeddings(api_key=os.environ['OPENAI_API_KEY'])
|
137 |
-
|
|
|
138 |
gpt_retriever = gpt_vectorstore.as_retriever(search_kwargs={'k': 5})
|
139 |
|
140 |
# New vector store setup for Phi-3.5
|
141 |
phi_embeddings = embeddings
|
142 |
-
phi_vectorstore = PineconeVectorStore(index_name="italyv109102024", embedding=
|
143 |
phi_retriever = phi_vectorstore.as_retriever(search_kwargs={'k': 5})
|
144 |
|
145 |
|
@@ -198,9 +199,13 @@ It was my pleasure!
|
|
198 |
Question: {{question}}
|
199 |
Helpful Answer:"""
|
200 |
|
201 |
-
template2 =f"""
|
202 |
-
|
203 |
-
|
|
|
|
|
|
|
|
|
204 |
{{context}}
|
205 |
Question: {{question}}
|
206 |
Helpful Answer:"""
|
@@ -1491,7 +1496,7 @@ with gr.Blocks(theme='gradio/soft') as demo:
|
|
1491 |
chatbot = gr.Chatbot([], elem_id="RADAR:Channel 94.1", bubble_full_width=False)
|
1492 |
choice = gr.Radio(label="Select Style", choices=["Details", "Conversational"], value="Conversational",interactive=False,visible=False)
|
1493 |
retrieval_mode = gr.Radio(label="Retrieval Mode", choices=["VDB", "KGF"], value="VDB",interactive=False,visible=False)
|
1494 |
-
model_choice = gr.Dropdown(label="Choose Model", choices=["LM-2"], value="LM-2")
|
1495 |
|
1496 |
# Link the dropdown change to handle_model_choice_change
|
1497 |
model_choice.change(fn=handle_model_choice_change, inputs=model_choice, outputs=[retrieval_mode, choice, choice])
|
|
|
133 |
|
134 |
|
135 |
# Existing embeddings and vector store for GPT-4o
|
136 |
+
# gpt_embeddings = OpenAIEmbeddings(api_key=os.environ['OPENAI_API_KEY'])
|
137 |
+
gpt_embeddings = embeddings
|
138 |
+
gpt_vectorstore = PineconeVectorStore(index_name="italyv109102024", embedding=gpt_embeddings)
|
139 |
gpt_retriever = gpt_vectorstore.as_retriever(search_kwargs={'k': 5})
|
140 |
|
141 |
# New vector store setup for Phi-3.5
|
142 |
phi_embeddings = embeddings
|
143 |
+
phi_vectorstore = PineconeVectorStore(index_name="italyv109102024", embedding=phi_embeddings)
|
144 |
phi_retriever = phi_vectorstore.as_retriever(search_kwargs={'k': 5})
|
145 |
|
146 |
|
|
|
199 |
Question: {{question}}
|
200 |
Helpful Answer:"""
|
201 |
|
202 |
+
template2 =f"""Sei un esperto di madrelingua italiana. Il tuo compito è fornire risposte precise, accurate, concise, nitide e brevi basate sul documento fornito. Dovresti restituire le informazioni nel seguente formato:
|
203 |
+
|
204 |
+
- Nome del documento: (il nome del documento)
|
205 |
+
- Numero di pagina: (numero di pagina)
|
206 |
+
- Contenuto effettivo: (contenuto pertinente del documento)
|
207 |
+
|
208 |
+
Se non riesci a trovare la risposta nel documento, rispondi semplicemente con "Questa domanda va oltre la mia conoscenza".
|
209 |
{{context}}
|
210 |
Question: {{question}}
|
211 |
Helpful Answer:"""
|
|
|
1496 |
chatbot = gr.Chatbot([], elem_id="RADAR:Channel 94.1", bubble_full_width=False)
|
1497 |
choice = gr.Radio(label="Select Style", choices=["Details", "Conversational"], value="Conversational",interactive=False,visible=False)
|
1498 |
retrieval_mode = gr.Radio(label="Retrieval Mode", choices=["VDB", "KGF"], value="VDB",interactive=False,visible=False)
|
1499 |
+
model_choice = gr.Dropdown(label="Choose Model", choices=["LM-1","LM-2"], value="LM-2")
|
1500 |
|
1501 |
# Link the dropdown change to handle_model_choice_change
|
1502 |
model_choice.change(fn=handle_model_choice_change, inputs=model_choice, outputs=[retrieval_mode, choice, choice])
|