Divyanshh commited on
Commit
18602ef
·
verified ·
1 Parent(s): 6a7d34e

Update util.py

Browse files
Files changed (1) hide show
  1. util.py +7 -5
util.py CHANGED
@@ -4,11 +4,11 @@ from langchain_community.embeddings import HuggingFaceHubEmbeddings
4
  from langchain_community.vectorstores import Chroma
5
  from langchain.chains import RetrievalQA
6
  from langchain_google_genai import ChatGoogleGenerativeAI, GoogleGenerativeAIEmbeddings
7
- import shutil
8
  import git
9
 
 
10
  from chromadb.utils import embedding_functions
11
-
12
  embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001", google_api_key=os.environ['GOOGLE_API_KEY'], task_type="retrieval_query")
13
 
14
  model = ChatGoogleGenerativeAI(model="gemini-pro",google_api_key=os.environ['GOOGLE_API_KEY'],temperature=0.2,convert_system_message_to_human=True)
@@ -18,7 +18,7 @@ def get_folder_paths(directory = "githubCode"):
18
  for root, dirs, files in os.walk(directory):
19
  if '.git' in dirs:
20
  # Skip the directory if a .git folder is found
21
- dirs.remove('.git')
22
  for dir_name in dirs:
23
  folder_paths.append(os.path.join(root, dir_name))
24
  return folder_paths
@@ -50,17 +50,19 @@ texts = text_splitter.split_text(context)
50
 
51
  vector_index = Chroma.from_texts(texts, embeddings).as_retriever(search_kwargs={"k":5})
52
 
53
- shutil.rmtree("githubCode")
 
 
54
  qa_chain = RetrievalQA.from_chain_type(
55
  model,
56
  retriever=vector_index,
57
  return_source_documents=True
58
-
59
  )
60
 
61
  # Function to generate assistant's response using ask function
62
  def generate_assistant_response(question):
63
  answer = qa_chain({"query": question})
 
64
  return answer['result']
65
 
66
  # print(generate_assistant_response("Tell me about the instructor_embeddings function."))
 
4
  from langchain_community.vectorstores import Chroma
5
  from langchain.chains import RetrievalQA
6
  from langchain_google_genai import ChatGoogleGenerativeAI, GoogleGenerativeAIEmbeddings
7
+
8
  import git
9
 
10
+ # embeddings = HuggingFaceHubEmbeddings(model="thuan9889/llama_embedding_model_v1")
11
  from chromadb.utils import embedding_functions
 
12
  embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001", google_api_key=os.environ['GOOGLE_API_KEY'], task_type="retrieval_query")
13
 
14
  model = ChatGoogleGenerativeAI(model="gemini-pro",google_api_key=os.environ['GOOGLE_API_KEY'],temperature=0.2,convert_system_message_to_human=True)
 
18
  for root, dirs, files in os.walk(directory):
19
  if '.git' in dirs:
20
  # Skip the directory if a .git folder is found
21
+ dirs.remove('.git')
22
  for dir_name in dirs:
23
  folder_paths.append(os.path.join(root, dir_name))
24
  return folder_paths
 
50
 
51
  vector_index = Chroma.from_texts(texts, embeddings).as_retriever(search_kwargs={"k":5})
52
 
53
+ # import shutil
54
+ # shutil.rmtree('githubCode')
55
+ # print("Directory removed!!")
56
  qa_chain = RetrievalQA.from_chain_type(
57
  model,
58
  retriever=vector_index,
59
  return_source_documents=True
 
60
  )
61
 
62
  # Function to generate assistant's response using ask function
63
  def generate_assistant_response(question):
64
  answer = qa_chain({"query": question})
65
+ print(answer)
66
  return answer['result']
67
 
68
  # print(generate_assistant_response("Tell me about the instructor_embeddings function."))