Spaces:
Sleeping
Sleeping
File size: 1,577 Bytes
71e2c65 1112e46 95bfa0d e8e78ae 5a44b64 9a8353d 71e2c65 59284b1 4c66f77 1112e46 9a8353d 4c66f77 71e2c65 95bfa0d 9a8353d 95bfa0d e8e78ae 9a8353d 95bfa0d 71e2c65 9d92f1e 71e2c65 9a8353d 71e2c65 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 |
import gradio as gr
import os
from loaddataset import ExtractRagBenchData
from createmilvusschema import CreateMilvusDbSchema
from insertmilvushelper import EmbedAllDocumentsAndInsert
from sentence_transformers import SentenceTransformer
from searchmilvushelper import SearchTopKDocuments
from model import generate_response
from huggingface_hub import login
from huggingface_hub import whoami
from huggingface_hub import dataset_info
# Load embedding model
QUERY_EMBEDDING_MODEL = SentenceTransformer('all-MiniLM-L6-v2')
WINDOW_SIZE = 5
OVERLAP = 2
RETRIVE_TOP_K_SIZE=10
hf_token = os.getenv("HF_TOKEN")
login(hf_token)
rag_extracted_data = ExtractRagBenchData()
print(rag_extracted_data.head(5))
#invoke create milvus db function
try:
db_collection = CreateMilvusDbSchema()
except Exception as e:
print(f"Error creating Milvus DB schema: {e}")
#insert embdeding to milvus db
"""
EmbedAllDocumentsAndInsert(QUERY_EMBEDDING_MODEL, rag_extracted_data, db_collection, window_size=WINDOW_SIZE, overlap=OVERLAP)
"""
query = "what would the net revenue have been in 2015 if there wasn't a stipulated settlement from the business combination in october 2015?"
results_for_top5_chunks = SearchTopKDocuments(db_collection, query, QUERY_EMBEDDING_MODEL, top_k=RETRIVE_TOP_K_SIZE)
print(results_for_top5_chunks)
def chatbot(prompt):
return whoami()
iface = gr.Interface(fn=chatbot,
inputs="text",
outputs="text",
title="Capstone Project Group 10")
if __name__ == "__main__":
iface.launch() |