Aiswarya Sankar commited on
Commit
79013f3
·
1 Parent(s): 02a22e9

Update url

Browse files
Files changed (1) hide show
  1. app.py +31 -30
app.py CHANGED
@@ -81,7 +81,7 @@ global tickets
81
  global ticket_choices
82
  tickets = []
83
 
84
- repoName = "https://github.com/jerryjliu/llama_index.git"
85
 
86
  embeddings = OpenAIEmbeddings(disallowed_special=())
87
 
@@ -109,6 +109,7 @@ def index_repo(textbox: str, dropdown: str) -> Response:
109
  else:
110
  repo = mapping[dropdown[0]]
111
 
 
112
  pathName = git_clone(repo)
113
  root_dir = './' + pathName
114
 
@@ -185,7 +186,7 @@ def index_repo(textbox: str, dropdown: str) -> Response:
185
  )
186
 
187
  global ticket_choices, ticket_titles, tickets
188
- repo = "/".join(repoName[:-4].split("/")[-2:])
189
  tickets = fetchGithubIssues(repo, 10)
190
 
191
  # # Create the dropdown
@@ -326,34 +327,34 @@ def generateDocumentationPerFolder(dir, github):
326
  an overview of that function.
327
  """.format(dir, github)
328
 
329
- return prompt
330
- # try:
331
- # embeddings = OpenAIEmbeddings()
332
- # pathName = github.split('/')[-1]
333
- # dataset_path = "hub://aiswaryas/" + pathName
334
-
335
- # db = DeepLake(dataset_path=dataset_path, read_only=True, embedding_function=embeddings)
336
-
337
- # retriever = db.as_retriever()
338
- # retriever.search_kwargs['distance_metric'] = 'cos'
339
- # retriever.search_kwargs['fetch_k'] = 100
340
- # retriever.search_kwargs['maximal_marginal_relevance'] = True
341
- # retriever.search_kwargs['k'] = 20
342
-
343
- # # streaming_handler = kwargs.get('streaming_handler')
344
- # model = ChatOpenAI(
345
- # model_name='gpt-3.5-turbo-16k',
346
- # temperature=0.0,
347
- # verbose=True,
348
- # streaming=True, # Pass `streaming=True` to make sure the client receives the data.
349
- # )
350
- # qa = ConversationalRetrievalChain.from_llm(model,retriever=retriever)
351
- # chat_history = []
352
- # return qa({"question": prompt, "chat_history": chat_history})["answer"]
353
-
354
- # except Exception as e:
355
- # print (str(e))
356
- # return "Failed to generate documentation"
357
 
358
 
359
  def solveGithubIssue(ticket, history) -> Response:
 
81
  global ticket_choices
82
  tickets = []
83
 
84
+ repoName = "https://github.com/gradio-app/gradio.git"
85
 
86
  embeddings = OpenAIEmbeddings(disallowed_special=())
87
 
 
109
  else:
110
  repo = mapping[dropdown[0]]
111
 
112
+ print("REPO NAME: " + str(repo))
113
  pathName = git_clone(repo)
114
  root_dir = './' + pathName
115
 
 
186
  )
187
 
188
  global ticket_choices, ticket_titles, tickets
189
+ repo = "/".join(repo[:-4].split("/")[-2:])
190
  tickets = fetchGithubIssues(repo, 10)
191
 
192
  # # Create the dropdown
 
327
  an overview of that function.
328
  """.format(dir, github)
329
 
330
+ # return prompt
331
+ try:
332
+ embeddings = OpenAIEmbeddings()
333
+ pathName = github.split('/')[-1]
334
+ dataset_path = "hub://aiswaryas/" + pathName
335
+
336
+ db = DeepLake(dataset_path=dataset_path, read_only=True, embedding_function=embeddings)
337
+
338
+ retriever = db.as_retriever()
339
+ retriever.search_kwargs['distance_metric'] = 'cos'
340
+ retriever.search_kwargs['fetch_k'] = 100
341
+ retriever.search_kwargs['maximal_marginal_relevance'] = True
342
+ retriever.search_kwargs['k'] = 20
343
+
344
+ # streaming_handler = kwargs.get('streaming_handler')
345
+ model = ChatOpenAI(
346
+ model_name='gpt-3.5-turbo-16k',
347
+ temperature=0.0,
348
+ verbose=True,
349
+ streaming=True, # Pass `streaming=True` to make sure the client receives the data.
350
+ )
351
+ qa = ConversationalRetrievalChain.from_llm(model,retriever=retriever)
352
+ chat_history = []
353
+ return qa({"question": prompt, "chat_history": chat_history})["answer"]
354
+
355
+ except Exception as e:
356
+ print (str(e))
357
+ return "Failed to generate documentation"
358
 
359
 
360
  def solveGithubIssue(ticket, history) -> Response: