Spaces:
Running
on
T4
Running
on
T4
rerank model
Browse files
pages/Multimodal_Conversational_Search.py
CHANGED
@@ -25,6 +25,7 @@ from requests_aws4auth import AWS4Auth
|
|
25 |
import colpali
|
26 |
from requests.auth import HTTPBasicAuth
|
27 |
import warnings
|
|
|
28 |
|
29 |
warnings.filterwarnings("ignore", category=DeprecationWarning)
|
30 |
|
@@ -398,118 +399,121 @@ with col_2:
|
|
398 |
with col_3:
|
399 |
#hidden = st.button("RUN",disabled=True,key = "hidden")
|
400 |
play = st.button("Go",on_click=handle_input,key = "play")
|
401 |
-
|
402 |
-
|
403 |
-
|
404 |
-
|
405 |
-
|
406 |
-
|
407 |
-
|
408 |
-
|
409 |
-
|
410 |
-
|
411 |
-
|
412 |
-
|
413 |
-
|
414 |
-
|
415 |
-
|
416 |
-
|
417 |
-
|
418 |
-
|
419 |
-
|
420 |
-
|
421 |
-
|
422 |
-
|
423 |
-
|
424 |
-
|
425 |
-
st.
|
|
|
|
|
|
|
|
|
426 |
|
427 |
-
|
428 |
-
|
429 |
-
|
430 |
-
|
431 |
|
|
|
|
|
|
|
|
|
|
|
432 |
|
433 |
-
|
434 |
-
|
435 |
-
|
436 |
-
|
437 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
438 |
|
439 |
-
|
440 |
-
|
441 |
-
|
442 |
-
|
443 |
-
|
444 |
-
|
445 |
-
|
446 |
-
|
447 |
-
|
448 |
-
|
449 |
-
|
450 |
-
|
451 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
452 |
|
453 |
-
############## haystach demo temporary addition ############
|
454 |
-
# st.subheader(":blue[Multimodality]")
|
455 |
-
# colu1,colu2 = st.columns([50,50])
|
456 |
-
# with colu1:
|
457 |
-
# in_images = st.toggle('Images', key = 'in_images', disabled = False)
|
458 |
-
# with colu2:
|
459 |
-
# in_tables = st.toggle('Tables', key = 'in_tables', disabled = False)
|
460 |
-
# if(in_tables):
|
461 |
-
# st.session_state.input_table_with_sql = True
|
462 |
-
# else:
|
463 |
-
# st.session_state.input_table_with_sql = False
|
464 |
|
465 |
-
############## haystach demo temporary addition ############
|
466 |
-
#if(pdf_doc_ is None or pdf_doc_ == ""):
|
467 |
-
if(index_select == "Global Warming stats"):
|
468 |
-
st.session_state.input_index = "globalwarming"
|
469 |
-
if(index_select == "Covid19 impacts on Ireland"):
|
470 |
-
st.session_state.input_index = "covid19ie"#"choosetheknnalgorithmforyourbillionscaleusecasewithopensearchawsbigdatablog"
|
471 |
-
if(index_select == "BEIR"):
|
472 |
-
st.session_state.input_index = "2104"
|
473 |
-
if(index_select == "UK Housing"):
|
474 |
-
st.session_state.input_index = "hpijan2024hometrack"
|
475 |
-
|
476 |
-
# custom_index = st.text_input("If uploaded the file already, enter the original file name", value = "")
|
477 |
-
# if(custom_index!=""):
|
478 |
-
# st.session_state.input_index = re.sub('[^A-Za-z0-9]+', '', (custom_index.lower().replace(".pdf","").split("/")[-1].split(".")[0]).lower())
|
479 |
-
|
480 |
-
|
481 |
-
|
482 |
-
st.subheader(":blue[Retriever]")
|
483 |
-
search_type = st.multiselect('Select the Retriever(s)',
|
484 |
-
['Keyword Search',
|
485 |
-
'Vector Search',
|
486 |
-
'Sparse Search',
|
487 |
-
],
|
488 |
-
['Vector Search'],
|
489 |
-
|
490 |
-
key = 'input_rag_searchType',
|
491 |
-
help = "Select the type of Search, adding more than one search type will activate hybrid search"#\n1. Conversational Search (Recommended) - This will include both the OpenSearch and LLM in the retrieval pipeline \n (note: This will put opensearch response as context to LLM to answer) \n2. OpenSearch vector search - This will put only OpenSearch's vector search in the pipeline, \n(Warning: this will lead to unformatted results )\n3. LLM Text Generation - This will include only LLM in the pipeline, \n(Warning: This will give hallucinated and out of context answers)"
|
492 |
-
)
|
493 |
-
|
494 |
-
re_rank = st.checkbox('Re-rank results', key = 'input_re_rank', disabled = False, value = True, help = "Checking this box will re-rank the results using a cross-encoder model")
|
495 |
-
|
496 |
-
if(re_rank):
|
497 |
-
st.session_state.input_is_rerank = True
|
498 |
-
else:
|
499 |
-
st.session_state.input_is_rerank = False
|
500 |
-
|
501 |
-
st.subheader(":blue[Multi-vector retrieval]")
|
502 |
-
|
503 |
-
colpali_search_rerank = st.checkbox('Try Colpali multi-vector retrieval on the [sample dataset](https://huggingface.co/datasets/vespa-engine/gpfg-QA)', key = 'input_colpali', disabled = False, value = False, help = "Checking this box will use colpali as the embedding model and retrieval is performed using multi-vectors followed by re-ranking using MaxSim")
|
504 |
|
505 |
-
|
506 |
-
st.
|
507 |
-
|
508 |
-
|
509 |
-
|
510 |
-
|
511 |
-
|
512 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
513 |
|
514 |
-
|
515 |
-
|
|
|
25 |
import colpali
|
26 |
from requests.auth import HTTPBasicAuth
|
27 |
import warnings
|
28 |
+
from streamlit import experimental_fragment
|
29 |
|
30 |
warnings.filterwarnings("ignore", category=DeprecationWarning)
|
31 |
|
|
|
399 |
with col_3:
|
400 |
#hidden = st.button("RUN",disabled=True,key = "hidden")
|
401 |
play = st.button("Go",on_click=handle_input,key = "play")
|
402 |
+
|
403 |
+
@experimental_fragment
|
404 |
+
def sidebar_controls():
|
405 |
+
with st.sidebar:
|
406 |
+
st.page_link("app.py", label=":orange[Home]", icon="🏠")
|
407 |
+
st.subheader(":blue[Sample Data]")
|
408 |
+
coln_1,coln_2 = st.columns([70,30])
|
409 |
+
with coln_1:
|
410 |
+
index_select = st.radio("Choose one index",["Global Warming stats","UK Housing","Covid19 impacts on Ireland"],key="input_rad_index")
|
411 |
+
with coln_2:
|
412 |
+
st.markdown("<p style='font-size:15px'>Preview file</p>",unsafe_allow_html=True)
|
413 |
+
st.write("[:eyes:](https://github.com/aws-samples/AI-search-with-amazon-opensearch-service/blob/b559f82c07dfcca973f457c0a15d6444752553ab/rag/sample_pdfs/global_warming.pdf)")
|
414 |
+
st.write("[:eyes:](https://github.com/aws-samples/AI-search-with-amazon-opensearch-service/blob/b559f82c07dfcca973f457c0a15d6444752553ab/rag/sample_pdfs/HPI-Jan-2024-Hometrack.pdf)")
|
415 |
+
st.write("[:eyes:](https://github.com/aws-samples/AI-search-with-amazon-opensearch-service/blob/b559f82c07dfcca973f457c0a15d6444752553ab/rag/sample_pdfs/covid19_ie.pdf)")
|
416 |
+
st.markdown("""
|
417 |
+
<style>
|
418 |
+
[data-testid=column]:nth-of-type(2) [data-testid=stVerticalBlock]{
|
419 |
+
gap: 0rem;
|
420 |
+
}
|
421 |
+
[data-testid=column]:nth-of-type(1) [data-testid=stVerticalBlock]{
|
422 |
+
gap: 0rem;
|
423 |
+
}
|
424 |
+
</style>
|
425 |
+
""",unsafe_allow_html=True)
|
426 |
+
with st.expander("Sample questions:"):
|
427 |
+
st.markdown("<span style = 'color:#FF9900;'>Global Warming stats</span> - What is the projected energy percentage from renewable sources in future?",unsafe_allow_html=True)
|
428 |
+
st.markdown("<span style = 'color:#FF9900;'>UK Housing</span> - which city has the highest average housing price in UK ?",unsafe_allow_html=True)
|
429 |
+
st.markdown("<span style = 'color:#FF9900;'>Covid19 impacts</span> - How many aged above 85 years died due to covid ?",unsafe_allow_html=True)
|
430 |
+
|
431 |
|
432 |
+
#st.subheader(":blue[Your multi-modal documents]")
|
433 |
+
# pdf_doc_ = st.file_uploader(
|
434 |
+
# "Upload your PDFs here and click on 'Process'", accept_multiple_files=False)
|
435 |
+
|
436 |
|
437 |
+
# pdf_docs = [pdf_doc_]
|
438 |
+
# if st.button("Process"):
|
439 |
+
# with st.spinner("Processing"):
|
440 |
+
# if os.path.isdir(parent_dirname+"/pdfs") == False:
|
441 |
+
# os.mkdir(parent_dirname+"/pdfs")
|
442 |
|
443 |
+
# for pdf_doc in pdf_docs:
|
444 |
+
# print(type(pdf_doc))
|
445 |
+
# pdf_doc_name = (pdf_doc.name).replace(" ","_")
|
446 |
+
# with open(os.path.join(parent_dirname+"/pdfs",pdf_doc_name),"wb") as f:
|
447 |
+
# f.write(pdf_doc.getbuffer())
|
448 |
+
|
449 |
+
# request_ = { "bucket": s3_bucket_,"key": pdf_doc_name}
|
450 |
+
# # if(st.session_state.input_copali_rerank):
|
451 |
+
# # copali.process_doc(request_)
|
452 |
+
# # else:
|
453 |
+
# rag_DocumentLoader.load_docs(request_)
|
454 |
+
# print('lambda done')
|
455 |
+
# st.success('you can start searching on your PDF')
|
456 |
|
457 |
+
############## haystach demo temporary addition ############
|
458 |
+
# st.subheader(":blue[Multimodality]")
|
459 |
+
# colu1,colu2 = st.columns([50,50])
|
460 |
+
# with colu1:
|
461 |
+
# in_images = st.toggle('Images', key = 'in_images', disabled = False)
|
462 |
+
# with colu2:
|
463 |
+
# in_tables = st.toggle('Tables', key = 'in_tables', disabled = False)
|
464 |
+
# if(in_tables):
|
465 |
+
# st.session_state.input_table_with_sql = True
|
466 |
+
# else:
|
467 |
+
# st.session_state.input_table_with_sql = False
|
468 |
+
|
469 |
+
############## haystach demo temporary addition ############
|
470 |
+
#if(pdf_doc_ is None or pdf_doc_ == ""):
|
471 |
+
if(index_select == "Global Warming stats"):
|
472 |
+
st.session_state.input_index = "globalwarming"
|
473 |
+
if(index_select == "Covid19 impacts on Ireland"):
|
474 |
+
st.session_state.input_index = "covid19ie"#"choosetheknnalgorithmforyourbillionscaleusecasewithopensearchawsbigdatablog"
|
475 |
+
if(index_select == "BEIR"):
|
476 |
+
st.session_state.input_index = "2104"
|
477 |
+
if(index_select == "UK Housing"):
|
478 |
+
st.session_state.input_index = "hpijan2024hometrack"
|
479 |
+
|
480 |
+
# custom_index = st.text_input("If uploaded the file already, enter the original file name", value = "")
|
481 |
+
# if(custom_index!=""):
|
482 |
+
# st.session_state.input_index = re.sub('[^A-Za-z0-9]+', '', (custom_index.lower().replace(".pdf","").split("/")[-1].split(".")[0]).lower())
|
483 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
484 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
485 |
|
486 |
+
st.subheader(":blue[Retriever]")
|
487 |
+
search_type = st.multiselect('Select the Retriever(s)',
|
488 |
+
['Keyword Search',
|
489 |
+
'Vector Search',
|
490 |
+
'Sparse Search',
|
491 |
+
],
|
492 |
+
['Vector Search'],
|
493 |
+
|
494 |
+
key = 'input_rag_searchType',
|
495 |
+
help = "Select the type of Search, adding more than one search type will activate hybrid search"#\n1. Conversational Search (Recommended) - This will include both the OpenSearch and LLM in the retrieval pipeline \n (note: This will put opensearch response as context to LLM to answer) \n2. OpenSearch vector search - This will put only OpenSearch's vector search in the pipeline, \n(Warning: this will lead to unformatted results )\n3. LLM Text Generation - This will include only LLM in the pipeline, \n(Warning: This will give hallucinated and out of context answers)"
|
496 |
+
)
|
497 |
+
|
498 |
+
re_rank = st.checkbox('Re-rank results', key = 'input_re_rank', disabled = False, value = True, help = "Checking this box will re-rank the results using a cross-encoder model")
|
499 |
+
|
500 |
+
if(re_rank):
|
501 |
+
st.session_state.input_is_rerank = True
|
502 |
+
else:
|
503 |
+
st.session_state.input_is_rerank = False
|
504 |
+
|
505 |
+
st.subheader(":blue[Multi-vector retrieval]")
|
506 |
+
|
507 |
+
colpali_search_rerank = st.checkbox('Try Colpali multi-vector retrieval on the [sample dataset](https://huggingface.co/datasets/vespa-engine/gpfg-QA)', key = 'input_colpali', disabled = False, value = False, help = "Checking this box will use colpali as the embedding model and retrieval is performed using multi-vectors followed by re-ranking using MaxSim")
|
508 |
+
|
509 |
+
if(colpali_search_rerank):
|
510 |
+
st.session_state.input_is_colpali = True
|
511 |
+
#st.session_state.input_query = ""
|
512 |
+
else:
|
513 |
+
st.session_state.input_is_colpali = False
|
514 |
+
|
515 |
+
with st.expander("Sample questions for Colpali retriever:"):
|
516 |
+
st.write("1. Proportion of female new hires 2021-2023? \n\n 2. First-half 2021 return on unlisted real estate investments? \n\n 3. Trend of the fund's expected absolute volatility between January 2014 and January 2016? \n\n 4. Fund return percentage in 2017? \n\n 5. Annualized gross return of the fund from 1997 to 2008?")
|
517 |
|
518 |
+
sidebar_controls()
|
519 |
+
|