Spaces:
Sleeping
Sleeping
Aiswarya Sankar
commited on
Commit
·
9fcff82
1
Parent(s):
f3b7606
Update openai key
Browse files
app.py
CHANGED
@@ -18,7 +18,7 @@ import random
|
|
18 |
import time
|
19 |
import together
|
20 |
|
21 |
-
os.environ['OPENAI_API_KEY']='sk-
|
22 |
os.environ['ACTIVELOOP_TOKEN']='eyJhbGciOiJIUzUxMiIsImlhdCI6MTY4MTU5NTgyOCwiZXhwIjoxNzEzMjE4MTU5fQ.eyJpZCI6ImFpc3dhcnlhcyJ9.eoiMFZsS20zzMXXupFbowUlLdgIgf_MA1ck_DByzREeoQvNm8GPhKEfqea2y1Qak-ud2jo9dhSTBTfRe1ztezw'
|
23 |
|
24 |
|
@@ -119,10 +119,11 @@ def index_repo(textbox: str, dropdown: str) -> Response:
|
|
119 |
print("Repo name after setting the value: " + str(repoName))
|
120 |
pathName = git_clone(repo)
|
121 |
root_dir = './' + pathName
|
|
|
122 |
|
123 |
print("Repo name after setting the value: " + str(repoName))
|
124 |
activeloop_username = "aiswaryas"
|
125 |
-
dataset_path = f"hub://{activeloop_username}/" + pathName + "
|
126 |
print(dataset_path)
|
127 |
|
128 |
try:
|
@@ -153,7 +154,7 @@ def index_repo(textbox: str, dropdown: str) -> Response:
|
|
153 |
pass
|
154 |
|
155 |
activeloop_username = "aiswaryas"
|
156 |
-
dataset_path = f"hub://{activeloop_username}/" + pathName + "
|
157 |
text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0)
|
158 |
texts = text_splitter.split_documents(docs)
|
159 |
|
@@ -194,10 +195,8 @@ def index_repo(textbox: str, dropdown: str) -> Response:
|
|
194 |
)
|
195 |
|
196 |
global ticket_choices, ticket_titles, tickets
|
197 |
-
print("REPO name in bug triage: " + str(repoName))
|
198 |
repo = "/".join(repoName[:-4].split("/")[-2:])
|
199 |
tickets = fetchGithubIssues(repo, 10)
|
200 |
-
# print("tickets: " + str(tickets))
|
201 |
|
202 |
# Create the dropdown
|
203 |
ticket_choices = {ticket["title"]: ticket for ticket in tickets}
|
@@ -218,9 +217,9 @@ def answer_questions(question: str, github: str, **kwargs) -> Response:
|
|
218 |
github = repoName[:-4]
|
219 |
print(github)
|
220 |
try:
|
221 |
-
embeddings = OpenAIEmbeddings(openai_api_key="sk-
|
222 |
pathName = github.split('/')[-1]
|
223 |
-
dataset_path = "hub://aiswaryas/" + pathName + "
|
224 |
|
225 |
db = DeepLake(dataset_path=dataset_path, read_only=True, embedding_function=embeddings)
|
226 |
|
@@ -241,7 +240,7 @@ def answer_questions(question: str, github: str, **kwargs) -> Response:
|
|
241 |
callback_manager=CallbackManager(
|
242 |
[StreamingGradioCallbackHandler(q)]
|
243 |
),
|
244 |
-
openai_api_key="sk-
|
245 |
)
|
246 |
qa = ConversationalRetrievalChain.from_llm(model,retriever=retriever)
|
247 |
chat_history = []
|
@@ -306,7 +305,7 @@ def generateFolderNamesForRepo(repo):
|
|
306 |
input data and generate the responses that are displayed in the UI.
|
307 |
"""
|
308 |
pathName = git_clone(repo)
|
309 |
-
root_dir = './' + pathName + "
|
310 |
|
311 |
files, dirs, docs = [], [], []
|
312 |
for dirpath, dirnames, filenames in os.walk(root_dir):
|
@@ -342,10 +341,10 @@ def generateDocumentationPerFolder(dir, github):
|
|
342 |
|
343 |
print(prompt)
|
344 |
try:
|
345 |
-
embeddings = OpenAIEmbeddings(openai_api_key="sk-
|
346 |
pathName = github.split('/')[-1]
|
347 |
print("PATH NAME: " + str(pathName))
|
348 |
-
dataset_path = "hub://aiswaryas/" + pathName + "
|
349 |
|
350 |
db = DeepLake(dataset_path=dataset_path, read_only=True, embedding_function=embeddings)
|
351 |
|
@@ -362,7 +361,7 @@ def generateDocumentationPerFolder(dir, github):
|
|
362 |
temperature=0.0,
|
363 |
verbose=True,
|
364 |
streaming=True, # Pass `streaming=True` to make sure the client receives the data.
|
365 |
-
openai_api_key="sk-
|
366 |
)
|
367 |
qa = ConversationalRetrievalChain.from_llm(model,retriever=retriever)
|
368 |
chat_history = []
|
@@ -405,9 +404,9 @@ def solveGithubIssue(ticket, history) -> Response:
|
|
405 |
print(question)
|
406 |
|
407 |
try:
|
408 |
-
embeddings = OpenAIEmbeddings(openai_api_key="sk-
|
409 |
pathName = github.split('/')[-1]
|
410 |
-
dataset_path = "hub://aiswaryas/" + pathName + "
|
411 |
|
412 |
db = DeepLake(dataset_path=dataset_path, read_only=True, embedding=embeddings)
|
413 |
|
@@ -427,7 +426,7 @@ def solveGithubIssue(ticket, history) -> Response:
|
|
427 |
callback_manager=CallbackManager(
|
428 |
[StreamingGradioCallbackHandler(q)]
|
429 |
),
|
430 |
-
openai_api_key="sk-
|
431 |
)
|
432 |
qa = ConversationalRetrievalChain.from_llm(model,retriever=retriever,max_tokens_limit=8000)
|
433 |
|
@@ -455,9 +454,9 @@ def bot(history, **kwargs):
|
|
455 |
print("Repo name in the bot: " + str(repoName))
|
456 |
github = repoName[:-4]
|
457 |
try:
|
458 |
-
embeddings = OpenAIEmbeddings(openai_api_key="sk-
|
459 |
pathName = github.split('/')[-1]
|
460 |
-
dataset_path = "hub://aiswaryas/" + pathName + "
|
461 |
|
462 |
db = DeepLake(dataset_path=dataset_path, read_only=True, embedding_function=embeddings)
|
463 |
|
@@ -477,7 +476,7 @@ def bot(history, **kwargs):
|
|
477 |
callback_manager=CallbackManager(
|
478 |
[StreamingGradioCallbackHandler(q)]
|
479 |
),
|
480 |
-
openai_api_key="sk-
|
481 |
)
|
482 |
qa = ConversationalRetrievalChain.from_llm(model,retriever=retriever)
|
483 |
chat_history = []
|
|
|
18 |
import time
|
19 |
import together
|
20 |
|
21 |
+
os.environ['OPENAI_API_KEY']='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'
|
22 |
os.environ['ACTIVELOOP_TOKEN']='eyJhbGciOiJIUzUxMiIsImlhdCI6MTY4MTU5NTgyOCwiZXhwIjoxNzEzMjE4MTU5fQ.eyJpZCI6ImFpc3dhcnlhcyJ9.eoiMFZsS20zzMXXupFbowUlLdgIgf_MA1ck_DByzREeoQvNm8GPhKEfqea2y1Qak-ud2jo9dhSTBTfRe1ztezw'
|
23 |
|
24 |
|
|
|
119 |
print("Repo name after setting the value: " + str(repoName))
|
120 |
pathName = git_clone(repo)
|
121 |
root_dir = './' + pathName
|
122 |
+
print(root_dir)
|
123 |
|
124 |
print("Repo name after setting the value: " + str(repoName))
|
125 |
activeloop_username = "aiswaryas"
|
126 |
+
dataset_path = f"hub://{activeloop_username}/" + pathName + "3"
|
127 |
print(dataset_path)
|
128 |
|
129 |
try:
|
|
|
154 |
pass
|
155 |
|
156 |
activeloop_username = "aiswaryas"
|
157 |
+
dataset_path = f"hub://{activeloop_username}/" + pathName + "3"
|
158 |
text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0)
|
159 |
texts = text_splitter.split_documents(docs)
|
160 |
|
|
|
195 |
)
|
196 |
|
197 |
global ticket_choices, ticket_titles, tickets
|
|
|
198 |
repo = "/".join(repoName[:-4].split("/")[-2:])
|
199 |
tickets = fetchGithubIssues(repo, 10)
|
|
|
200 |
|
201 |
# Create the dropdown
|
202 |
ticket_choices = {ticket["title"]: ticket for ticket in tickets}
|
|
|
217 |
github = repoName[:-4]
|
218 |
print(github)
|
219 |
try:
|
220 |
+
embeddings = OpenAIEmbeddings(openai_api_key="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")
|
221 |
pathName = github.split('/')[-1]
|
222 |
+
dataset_path = "hub://aiswaryas/" + pathName + "3"
|
223 |
|
224 |
db = DeepLake(dataset_path=dataset_path, read_only=True, embedding_function=embeddings)
|
225 |
|
|
|
240 |
callback_manager=CallbackManager(
|
241 |
[StreamingGradioCallbackHandler(q)]
|
242 |
),
|
243 |
+
openai_api_key="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",
|
244 |
)
|
245 |
qa = ConversationalRetrievalChain.from_llm(model,retriever=retriever)
|
246 |
chat_history = []
|
|
|
305 |
input data and generate the responses that are displayed in the UI.
|
306 |
"""
|
307 |
pathName = git_clone(repo)
|
308 |
+
root_dir = './' + pathName + "3"
|
309 |
|
310 |
files, dirs, docs = [], [], []
|
311 |
for dirpath, dirnames, filenames in os.walk(root_dir):
|
|
|
341 |
|
342 |
print(prompt)
|
343 |
try:
|
344 |
+
embeddings = OpenAIEmbeddings(openai_api_key="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")
|
345 |
pathName = github.split('/')[-1]
|
346 |
print("PATH NAME: " + str(pathName))
|
347 |
+
dataset_path = "hub://aiswaryas/" + pathName + "3"
|
348 |
|
349 |
db = DeepLake(dataset_path=dataset_path, read_only=True, embedding_function=embeddings)
|
350 |
|
|
|
361 |
temperature=0.0,
|
362 |
verbose=True,
|
363 |
streaming=True, # Pass `streaming=True` to make sure the client receives the data.
|
364 |
+
openai_api_key="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",
|
365 |
)
|
366 |
qa = ConversationalRetrievalChain.from_llm(model,retriever=retriever)
|
367 |
chat_history = []
|
|
|
404 |
print(question)
|
405 |
|
406 |
try:
|
407 |
+
embeddings = OpenAIEmbeddings(openai_api_key="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")
|
408 |
pathName = github.split('/')[-1]
|
409 |
+
dataset_path = "hub://aiswaryas/" + pathName + "3"
|
410 |
|
411 |
db = DeepLake(dataset_path=dataset_path, read_only=True, embedding=embeddings)
|
412 |
|
|
|
426 |
callback_manager=CallbackManager(
|
427 |
[StreamingGradioCallbackHandler(q)]
|
428 |
),
|
429 |
+
openai_api_key="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",
|
430 |
)
|
431 |
qa = ConversationalRetrievalChain.from_llm(model,retriever=retriever,max_tokens_limit=8000)
|
432 |
|
|
|
454 |
print("Repo name in the bot: " + str(repoName))
|
455 |
github = repoName[:-4]
|
456 |
try:
|
457 |
+
embeddings = OpenAIEmbeddings(openai_api_key="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")
|
458 |
pathName = github.split('/')[-1]
|
459 |
+
dataset_path = "hub://aiswaryas/" + pathName + "3"
|
460 |
|
461 |
db = DeepLake(dataset_path=dataset_path, read_only=True, embedding_function=embeddings)
|
462 |
|
|
|
476 |
callback_manager=CallbackManager(
|
477 |
[StreamingGradioCallbackHandler(q)]
|
478 |
),
|
479 |
+
openai_api_key="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",
|
480 |
)
|
481 |
qa = ConversationalRetrievalChain.from_llm(model,retriever=retriever)
|
482 |
chat_history = []
|