drkareemkamal commited on
Commit
b34fb97
·
verified ·
1 Parent(s): e201e8b

Delete app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -101
app.py DELETED
@@ -1,101 +0,0 @@
1
- import streamlit as st
2
- from streamlit_chat import message
3
- import tempfile
4
- #from langchain_community.documentloader.csv_loader import CSVLoader
5
- from langchain_community.document_loaders.csv_loader import CSVLoader
6
- from langchain_community.embeddings import HuggingFaceEmbedding
7
- #from langchain_community.embeddings import HuggingFaceBgeEmbeddings
8
-
9
- from langchain_community.vectorstores import FAISS
10
- #from langchain_community.llms import CTransformers
11
- from langchain_community.llms.ctransformers import CTransformers
12
-
13
- from langchain_community.chains import ConversationalRetreievalChain
14
- #from langchain.chains.conversational_retrieval.base import ConversationalRetreievalChain
15
-
16
-
17
- DB_FAISS_PATH = 'vectorstore/db_faiss'
18
-
19
- def load_llm():
20
- # load model from hugging face repo
21
- llm = CTransformers(
22
- model = 'TheBloke/Llama-2-7B-Chat-GGML',
23
- model_type = 'llma',
24
- max_new_token = 512,
25
- temperature = 0.5
26
- )
27
- return llm
28
-
29
- st.title("Chat with CSV using Llma 2")
30
- st.markdown("<h1 style='text-align: center; color: blue;'>Chat with your PDF 📄 </h1>", unsafe_allow_html=True)
31
- st.markdown("<h3 style='text-align: center; color: grey;'>Built by <a href='https://github.com/DrKareemKAmal'>MindSparks ❤️ </a></h3>", unsafe_allow_html=True)
32
-
33
- uploaded_file = st.sidebar._file_uploader('Upload your data', type='csv')
34
-
35
- if uploaded_file:
36
- with tempfile.NamedTemporaryFile(delete=False)as temp_file :
37
- tempfile.write(uploaded_file.getvalue())
38
- tempfile_path = tempfile.name
39
-
40
- loader = CSVLoader(file_path = tempfile_path, encoding = 'utf-8',
41
- csv_args = {'delimiter': ','} )
42
- data = loader.load()
43
- st.json(data)
44
-
45
- embeddings = HuggingFaceEmbedding(
46
- model = 'all-MiniLM-L6-v2',
47
- model_kwargs = {'device': 'cpu'}
48
- )
49
-
50
-
51
- db = FAISS.from_documents(data, embeddings)
52
- db.save_local (DB_FAISS_PATH)
53
- llm = load_llm()
54
-
55
- chain = ConversationalRetreievalChain.from_llm(llm= llm , retriever = db.as_retriever())
56
-
57
- def conversational_chat(query):
58
- result = chain({"quetion": query ,
59
- "chat_history": st.session_state['history']})
60
- st.session_state['history'].append((query , result['answer']))
61
- return result['answer']
62
-
63
- if 'history' not in st.session_state :
64
- st.session_state['history'] = []
65
-
66
- if 'generated' not in st.session_state :
67
- st.session_state['generated'] = ['Hello, Ask me anything about ' + uploaded_file.name]
68
-
69
- if 'past' not in st.session_state :
70
- st.session_state['past'] = ['Hey !']
71
-
72
- # Container for the chat history
73
- response_container = st.container()
74
- container = st.container()
75
-
76
- with container :
77
- with st.form(key = 'mu_form',
78
- clear_on_submit=True):
79
- user_input = st.text_input('Query:', placeholder= "Talk to youur CSV Data here ")
80
- submit_button = st.from_submit_button(label = 'chat')
81
-
82
- if submit_button and user_input :
83
- output = conversational_chat(user_input)
84
-
85
- st.session_state['past'].append(user_input)
86
- st.session_state['generated'].append(output)
87
-
88
- if st.session_state['generated'] :
89
- with response_container:
90
- for i in range(len(st.session_state['generated'])):
91
- message(st.session_state['past'][i], is_user = True , key=str(i) + '_user',
92
- avatar_style='big-smile')
93
- message(st.session_state['generated'][i], key = str(i), avatar_style='thumb')
94
-
95
-
96
-
97
-
98
-
99
-
100
-
101
-