Gourisankar Padihary commited on
Commit
201376b
·
1 Parent(s): 2889c96
config.py CHANGED
@@ -1,7 +1,7 @@
1
 
2
  class ConfigConstants:
3
  # Constants related to datasets and models
4
- DATA_SET_NAMES = ['covidqa', 'cuad']#, 'delucionqa', 'emanual', 'expertqa', 'finqa', 'hagrid', 'hotpotqa', 'msmarco', 'pubmedqa', 'tatqa', 'techqa']
5
  EMBEDDING_MODEL_NAME = "sentence-transformers/paraphrase-MiniLM-L3-v2"
6
  RE_RANKER_MODEL_NAME = 'cross-encoder/ms-marco-electra-base'
7
  GENERATION_MODEL_NAME = 'mixtral-8x7b-32768'
 
1
 
2
  class ConfigConstants:
3
  # Constants related to datasets and models
4
+ DATA_SET_NAMES = ['covidqa', 'cuad', 'techqa']#, 'delucionqa', 'emanual', 'expertqa', 'finqa', 'hagrid', 'hotpotqa', 'msmarco', 'pubmedqa', 'tatqa']
5
  EMBEDDING_MODEL_NAME = "sentence-transformers/paraphrase-MiniLM-L3-v2"
6
  RE_RANKER_MODEL_NAME = 'cross-encoder/ms-marco-electra-base'
7
  GENERATION_MODEL_NAME = 'mixtral-8x7b-32768'
generator/initialize_llm.py CHANGED
@@ -3,7 +3,7 @@ import os
3
  from langchain_groq import ChatGroq
4
 
5
  def initialize_generation_llm(input_model_name):
6
- os.environ["GROQ_API_KEY"] = ""
7
 
8
  model_name = input_model_name
9
  llm = ChatGroq(model=model_name, temperature=0.7)
@@ -13,7 +13,7 @@ def initialize_generation_llm(input_model_name):
13
  return llm
14
 
15
  def initialize_validation_llm(input_model_name):
16
- os.environ["GROQ_API_KEY"] = ""
17
 
18
  model_name = input_model_name
19
  llm = ChatGroq(model=model_name, temperature=0.7)
 
3
  from langchain_groq import ChatGroq
4
 
5
  def initialize_generation_llm(input_model_name):
6
+ os.environ["GROQ_API_KEY"] = "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"
7
 
8
  model_name = input_model_name
9
  llm = ChatGroq(model=model_name, temperature=0.7)
 
13
  return llm
14
 
15
  def initialize_validation_llm(input_model_name):
16
+ os.environ["GROQ_API_KEY"] = "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"
17
 
18
  model_name = input_model_name
19
  llm = ChatGroq(model=model_name, temperature=0.7)
retriever/embed_documents.py CHANGED
@@ -1,4 +1,4 @@
1
- import os
2
  import logging
3
  from langchain_huggingface import HuggingFaceEmbeddings
4
  from langchain_community.vectorstores import FAISS
@@ -16,9 +16,9 @@ def embed_documents(documents, embedding_path="embeddings.faiss"):
16
  vector_store = FAISS.from_texts([doc['text'] for doc in documents], embedding_model)
17
  vector_store.save_local(embedding_path)
18
 
19
- return vector_store
20
 
21
- '''import os
22
  import logging
23
  import hashlib
24
  from typing import List, Dict
@@ -91,6 +91,6 @@ def _save_metadata(metadata_path: str, metadata: Dict[str, bool]):
91
  """Save metadata to a file."""
92
  import json
93
  with open(metadata_path, "w") as f:
94
- json.dump(metadata, f)'''
95
 
96
 
 
1
+ '''import os
2
  import logging
3
  from langchain_huggingface import HuggingFaceEmbeddings
4
  from langchain_community.vectorstores import FAISS
 
16
  vector_store = FAISS.from_texts([doc['text'] for doc in documents], embedding_model)
17
  vector_store.save_local(embedding_path)
18
 
19
+ return vector_store'''
20
 
21
+ import os
22
  import logging
23
  import hashlib
24
  from typing import List, Dict
 
91
  """Save metadata to a file."""
92
  import json
93
  with open(metadata_path, "w") as f:
94
+ json.dump(metadata, f)
95
 
96