kadirbalalan commited on
Commit
58c25d8
·
1 Parent(s): bfac72c

Upload 9 files

Browse files
.gitattributes CHANGED
@@ -35,3 +35,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
  Llama2-Naprela/data/Fashion[[:space:]]design[[:space:]]course[[:space:]]By[[:space:]]Faerm[[:space:]]Steven-pdfread.net.pdf filter=lfs diff=lfs merge=lfs -text
37
  Llama2-Naprela/vectorstore/db_faiss/index.faiss filter=lfs diff=lfs merge=lfs -text
 
 
 
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
  Llama2-Naprela/data/Fashion[[:space:]]design[[:space:]]course[[:space:]]By[[:space:]]Faerm[[:space:]]Steven-pdfread.net.pdf filter=lfs diff=lfs merge=lfs -text
37
  Llama2-Naprela/vectorstore/db_faiss/index.faiss filter=lfs diff=lfs merge=lfs -text
38
+ data/Fashion[[:space:]]design[[:space:]]course[[:space:]]By[[:space:]]Faerm[[:space:]]Steven-pdfread.net.pdf filter=lfs diff=lfs merge=lfs -text
39
+ vectorstore/db_faiss/index.faiss filter=lfs diff=lfs merge=lfs -text
LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2023 AI Anytime
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
README.md CHANGED
@@ -1,12 +1,2 @@
1
- ---
2
- title: Naprela
3
- emoji: 📊
4
- colorFrom: gray
5
- colorTo: pink
6
- sdk: streamlit
7
- sdk_version: 1.25.0
8
- app_file: app.py
9
- pinned: false
10
- ---
11
-
12
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
1
+ # Llama2-Naprela-Fashion Designer
2
+ This is the fashion designer bot Naprela.
 
 
 
 
 
 
 
 
 
 
chainlit.md ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ # Welcome to Naprela the Fashion Designer
2
+
3
+ Hi there, 👋 We're excited to have you on board. This is a powerful bot designed to help you ask queries related to your data/knowledge.
4
+
5
+ Happy chatting! 💻😊
6
+
data/Fashion design course By Faerm Steven-pdfread.net.pdf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2bbb7b2bc67219a180df3278ec0fb46a27d3e3f7dc920a30eab4a0e213b9a4bf
3
+ size 15190231
ingest.py ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain.embeddings import HuggingFaceEmbeddings
2
+ from langchain.vectorstores import FAISS
3
+ from langchain.document_loaders import PyPDFLoader, DirectoryLoader
4
+ from langchain.text_splitter import RecursiveCharacterTextSplitter
5
+
6
+ DATA_PATH = 'data/'
7
+ DB_FAISS_PATH = 'vectorstore/db_faiss'
8
+
9
+ # Create vector database
10
+ def create_vector_db():
11
+ loader = DirectoryLoader(DATA_PATH,
12
+ glob='*.pdf',
13
+ loader_cls=PyPDFLoader)
14
+
15
+ documents = loader.load()
16
+ text_splitter = RecursiveCharacterTextSplitter(chunk_size=500,
17
+ chunk_overlap=50)
18
+ texts = text_splitter.split_documents(documents)
19
+
20
+ embeddings = HuggingFaceEmbeddings(model_name='sentence-transformers/all-MiniLM-L6-v2',
21
+ model_kwargs={'device': 'cpu'})
22
+
23
+ db = FAISS.from_documents(texts, embeddings)
24
+ db.save_local(DB_FAISS_PATH)
25
+
26
+ if __name__ == "__main__":
27
+ create_vector_db()
28
+
model.py ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain.document_loaders import PyPDFLoader, DirectoryLoader
2
+ from langchain import PromptTemplate
3
+ from langchain.embeddings import HuggingFaceEmbeddings
4
+ from langchain.vectorstores import FAISS
5
+ from langchain.llms import CTransformers
6
+ from langchain.chains import RetrievalQA
7
+ import chainlit as cl
8
+
9
+ DB_FAISS_PATH = 'vectorstore/db_faiss'
10
+
11
+ custom_prompt_template = """You are now Naprela. A fashion designer woman who always says slayy, girll, talks like gen-z.
12
+ She is very helpful and caring. Never break character even if the question is inappropriate. You are Naprela. Use the following pieces of information to answer the user's question.
13
+ If you don't know the answer, just say that "Girl I don't know, sorry" don't try to make up an answer.
14
+
15
+ Context: {context}
16
+ Question: {question}
17
+
18
+ Only return the helpful answer below and nothing else.
19
+ Helpful answer:
20
+ """
21
+
22
+ def set_custom_prompt():
23
+ """
24
+ Prompt template for QA retrieval for each vectorstore
25
+ """
26
+ prompt = PromptTemplate(template=custom_prompt_template,
27
+ input_variables=['context', 'question'])
28
+ return prompt
29
+
30
+ #Retrieval QA Chain
31
+ def retrieval_qa_chain(llm, prompt, db):
32
+ qa_chain = RetrievalQA.from_chain_type(llm=llm,
33
+ chain_type='stuff',
34
+ retriever=db.as_retriever(search_kwargs={'k': 2}),
35
+ return_source_documents=False,
36
+ chain_type_kwargs={'prompt': prompt}
37
+ )
38
+ return qa_chain
39
+
40
+ #Loading the model
41
+ def load_llm():
42
+ # Load the locally downloaded model here
43
+ llm = CTransformers(
44
+ model = "llama-2-7b-chat.ggmlv3.q8_0.bin",
45
+ model_type="llama",
46
+ max_new_tokens = 512,
47
+ temperature = 0.5
48
+ )
49
+ return llm
50
+
51
+ #QA Model Function
52
+ def qa_bot():
53
+ embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2",
54
+ model_kwargs={'device': 'cpu'})
55
+ db = FAISS.load_local(DB_FAISS_PATH, embeddings)
56
+ llm = load_llm()
57
+ qa_prompt = set_custom_prompt()
58
+ qa = retrieval_qa_chain(llm, qa_prompt, db)
59
+
60
+ return qa
61
+
62
+ #output function
63
+ def final_result(query):
64
+ qa_result = qa_bot()
65
+ response = qa_result({'query': query})
66
+ return response
67
+
68
+ #chainlit code
69
+ @cl.on_chat_start
70
+ async def start():
71
+ chain = qa_bot()
72
+ msg = cl.Message(content="Starting the bot...")
73
+ await msg.send()
74
+ msg.content = "Hi, Welcome to Medical Bot. What is your query?"
75
+ await msg.update()
76
+
77
+ cl.user_session.set("chain", chain)
78
+
79
+ @cl.on_message
80
+ async def main(message):
81
+ chain = cl.user_session.get("chain")
82
+ cb = cl.AsyncLangchainCallbackHandler(
83
+ stream_final_answer=True, answer_prefix_tokens=["FINAL", "ANSWER"]
84
+ )
85
+ cb.answer_reached = True
86
+ res = await chain.acall(message, callbacks=[cb])
87
+ answer = res["result"]
88
+ sources = res["source_documents"]
89
+
90
+ if sources:
91
+ answer += f"\nSources:" + str(sources)
92
+ else:
93
+ answer += "\nNo sources found"
94
+
95
+ await cl.Message(content=answer).send()
96
+
requirements.txt ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ pypdf
2
+ langchain
3
+ torch
4
+ accelerate
5
+ bitsandbytes
6
+ transformers
7
+ sentence_transformers
8
+ faiss_cpu
vectorstore/db_faiss/index.faiss ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3c219be0c422137d6354fdf0db6f2a2fe719ba536215b2dcba2366723f00b6e9
3
+ size 10983981
vectorstore/db_faiss/index.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d75f6e95d75f5bad95668fcd18f2daffb0d562d33784e6228e5c0f785605ee0c
3
+ size 3567746