sharjeel1477 commited on
Commit
d1197b3
·
1 Parent(s): 0d283e9

Create ask.py

Browse files
Files changed (1) hide show
  1. ask.py +45 -0
ask.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from llama_index import GPTPineconeIndex, LLMPredictor, ServiceContext
2
+ import pinecone
3
+ from langchain import OpenAI
4
+ import os
5
+
6
+
7
+ # logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
8
+ # logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))
9
+
10
+
11
+ def askQuestion(brain, question, temperature, maxTokens):
12
+ print(brain, question, temperature)
13
+ Brain_Name = brain
14
+ pinecone.init(api_key=os.environ['PINECONE_KEY'],
15
+ environment="us-west4-gcp")
16
+ pineconeindex = pinecone.Index(Brain_Name)
17
+ index = GPTPineconeIndex([], pinecone_index=pineconeindex)
18
+ # index = GPTSimpleVectorIndex.load_from_disk('index.json')
19
+
20
+ # For Q-A set this value to 4, For Content-Genration set this value b/w 7-10.
21
+ data_chunks = 4
22
+
23
+ # prompt query goes here
24
+ # query="summarize in full detail the solution that dimetyd is providing, and previous email sequences which can be used as a context knowledge"
25
+ query = question
26
+ # relevant info from brain goes here
27
+ info = ["pdf"]
28
+
29
+ llm_predictor = LLMPredictor(llm=OpenAI(
30
+ temperature=temperature, model_name="text-davinci-003", max_tokens=maxTokens))
31
+ service_context_gpt4 = ServiceContext.from_defaults(
32
+ llm_predictor=llm_predictor)
33
+
34
+ response = index.query(query, service_context=service_context_gpt4,
35
+ similarity_top_k=data_chunks, response_mode="compact")
36
+
37
+ return response
38
+
39
+
40
+ def getBrains():
41
+ pinecone.init(api_key=os.environ['PINECONE_KEY'],
42
+ environment="us-west4-gcp")
43
+ active_indexes = pinecone.list_indexes()
44
+ print(active_indexes)
45
+ return active_indexes