thedamn commited on
Commit
cad7cfa
·
1 Parent(s): d2ac297

damn commit

Browse files
Files changed (3) hide show
  1. app.py +8 -35
  2. proper_main.py +122 -0
  3. requirements.txt +3 -0
app.py CHANGED
@@ -1,48 +1,21 @@
1
- from langchain import PromptTemplate, LLMChain
2
- from gpt4all import GPT4All
3
- from huggingface_hub import hf_hub_download
4
  import streamlit as st
5
- import os
6
- import subprocess as sp
7
- #gpt=GPT4All("ggml-gpt4all-j-v1.3-groovy")
8
- #hf_hub_download(repo_id="dnato/ggml-gpt4all-j-v1.3-groovy.bin", filename="ggml-gpt4all-j-v1.3-groovy.bin", local_dir=".")
9
- from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
10
- template = """
11
- You are a friendly chatbot assistant that responds in a conversational
12
- manner to users questions. Keep the answers short, unless specifically
13
- asked by the user to elaborate on something.
14
-
15
- Question: {question}
16
-
17
- Answer:"""
18
-
19
- local_path=os.getcwd() + "/ggml-gpt4all-j-v1.3-groovy.bin"
20
-
21
- prompt = PromptTemplate(template=template, input_variables=["question"])
22
- from langchain.llms import GPT4All
23
- #llm = GPT4All(
24
- # model=local_path,
25
- # callbacks=[StreamingStdOutCallbackHandler()]
26
- #)
27
-
28
- #llm_chain = LLMChain(prompt=prompt, llm=llm)
29
-
30
 
31
  def main():
32
  st.title("GPT4All Chatbot")
33
 
34
  # User input
35
- query = st.text_input("Enter your message:")
36
 
37
  # Generate response
38
  if st.button("Submit"):
39
- #response=llm_chain(query)
40
- #response = gptj.chat_completion(messages)
41
- #answer = response['choices'][0]['message']['content']
42
-
43
  # Display the response
44
- st.text_area("Bot Response:", value=sp.check_output(query), height=100)
45
 
46
  if __name__ == "__main__":
47
  main()
48
-
 
 
 
 
1
  import streamlit as st
2
+ from resource import *
3
+ from proper_main import *
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4
 
5
  def main():
6
  st.title("GPT4All Chatbot")
7
 
8
  # User input
9
+ user_url = st.text_input("Enter the Github URL")
10
 
11
  # Generate response
12
  if st.button("Submit"):
13
+ web_scrape(user_url)
14
+ curr_path = data_cloning()
15
+ query = analyse()
16
+ response_gpt = llm_chain.run([query])
17
  # Display the response
18
+ st.text_area("Bot Response:", value=response_gpt, height=100)
19
 
20
  if __name__ == "__main__":
21
  main()
 
proper_main.py ADDED
@@ -0,0 +1,122 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ try:
2
+ import requests
3
+ import os
4
+ import subprocess as sp
5
+ from bs4 import BeautifulSoup
6
+ from nbconvert import PythonExporter
7
+ import shutil
8
+
9
+ except Exception as e:
10
+ print("Some modules are missing:", e)
11
+ print("Do you want to install them via this Python program?")
12
+ option = input("Y or N: ")
13
+ if option.lower() not in ["y", "n"]:
14
+ exit()
15
+ elif option.lower() == "n":
16
+ exit()
17
+ elif option.lower() == "y":
18
+ print("Make sure your internet connection is active; otherwise, it may throw an error. Press 'N' to exit.")
19
+ curr_dir = os.getcwd()
20
+ os.system("pip install -r " + curr_dir + "/requirements.txt")
21
+
22
+
23
+ repos = []
24
+
25
+
26
+ def web_scrape(user_url):
27
+ base_url = "https://www.github.com"
28
+ user_url = user_url
29
+
30
+ if user_url.endswith("/"):
31
+ user_url = user_url[:-1]
32
+
33
+ try:
34
+ response = requests.get(user_url + "?tab=repositories")
35
+ except Exception as e:
36
+ print("Please provide a valid link:", e)
37
+ web_scrape()
38
+
39
+ if response.status_code != 200:
40
+ print("Please provide a valid link.")
41
+ web_scrape()
42
+
43
+ make_soup = BeautifulSoup(response.text, 'html.parser')
44
+ li = make_soup.findAll('div', class_='d-inline-block mb-1')
45
+ if len(li) == 0:
46
+ print("Please Provide the Valid Link")
47
+ web_scrape()
48
+
49
+ for _, i in enumerate(li):
50
+ for a in i.findAll('a'):
51
+ new_url = base_url + a['href']
52
+ repos.append(new_url)
53
+
54
+
55
+ def data_cloning():
56
+ os.mkdir("/tmp/repos")
57
+ os.chdir("/tmp/repos")
58
+ for i in repos:
59
+ sp.run(["git", "clone", i], stdout=sp.DEVNULL, stderr=sp.DEVNULL)
60
+
61
+ return os.getcwd()
62
+
63
+
64
+ def data_cleaning(directory):
65
+ exporter = PythonExporter()
66
+
67
+ for root, dirs, files in os.walk(directory, topdown=False):
68
+ for filename in files:
69
+ file_path = os.path.join(root, filename)
70
+
71
+ if filename.endswith(".ipynb"):
72
+ output, _ = exporter.from_filename(file_path)
73
+ with open(os.path.join(root, filename[:-6] + ".py"), "w") as script_file:
74
+ script_file.write(output)
75
+ os.remove(file_path)
76
+
77
+ if not (filename.endswith(".py") or filename.endswith(".ipynb")):
78
+ os.remove(file_path)
79
+
80
+ for dir_name in dirs:
81
+ dir_path = os.path.join(root, dir_name)
82
+ if not os.listdir(dir_path):
83
+ os.rmdir(dir_path)
84
+
85
+
86
+ def analyse():
87
+ project_and_grades = {}
88
+
89
+ for file in os.listdir(os.getcwd()):
90
+ print(file)
91
+ path = os.getcwd() + "/" + file
92
+
93
+ cmd = ["radon", "cc", "--total-average", file]
94
+ res = sp.check_output(cmd)
95
+ index = res.decode().find("Average")
96
+ if index <= 0:
97
+ grade = "A"
98
+ score = 1
99
+ else:
100
+ grade = res.decode()[index:]
101
+ score = grade[23:-1]
102
+ score = score[:3]
103
+ grade=grade[20]
104
+
105
+
106
+ project_and_grades["Repo " + file] = "Grade " + grade + " Score " + str(score)
107
+ shutil.rmtree('/tmp/repos')
108
+
109
+ return project_and_grades
110
+
111
+
112
+
113
+ """def main():
114
+ web_scrape()
115
+ curr_path=data_cloning()
116
+ data_cleaning(curr_path)
117
+ report=analyse()
118
+ print(report)
119
+
120
+ if __name__ == main():
121
+ main()
122
+ """
requirements.txt CHANGED
@@ -4,3 +4,6 @@ langchain
4
  huggingface
5
  huggingface_hub
6
  radon
 
 
 
 
4
  huggingface
5
  huggingface_hub
6
  radon
7
+ requests
8
+ bs4
9
+ nbconvert