thedamn commited on
Commit
0bcd97d
·
1 Parent(s): d1c8797

hope commit

Browse files
Files changed (4) hide show
  1. app.py +12 -36
  2. proper_main.py +122 -0
  3. requirements.txt +2 -0
  4. resource.py +42 -0
app.py CHANGED
@@ -1,53 +1,29 @@
1
- from langchain import PromptTemplate, LLMChain
2
- from gpt4all import GPT4All
3
- from huggingface_hub import hf_hub_download
4
  import streamlit as st
5
- import shlex as sx
6
- import os
7
- import subprocess as sp
8
- #gpt=GPT4All("ggml-gpt4all-j-v1.3-groovy")
9
- #hf_hub_download(repo_id="dnato/ggml-gpt4all-j-v1.3-groovy.bin", filename="ggml-gpt4all-j-v1.3-groovy.bin", local_dir=".")
10
- from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
11
- template = """
12
- You are a friendly chatbot assistant that responds in a conversational
13
- manner to users questions. Keep the answers short, unless specifically
14
- asked by the user to elaborate on something.
15
 
16
- Question: {question}
17
 
18
- Answer:"""
19
 
20
- local_path=os.getcwd() + "/ggml-gpt4all-j-v1.3-groovy.bin"
21
-
22
- prompt = PromptTemplate(template=template, input_variables=["question"])
23
- from langchain.llms import GPT4All
24
- #llm = GPT4All(
25
- # model=local_path,
26
- # callbacks=[StreamingStdOutCallbackHandler()]
27
- #)
28
-
29
- #llm_chain = LLMChain(prompt=prompt, llm=llm)
30
- os.chdir("/tmp")
31
- cm=sx.split("git clone https://github.com/theedamn/Basic-Encrypter-using-Python.git")
32
- sp.run(cm)
33
 
34
 
35
  def main():
36
  st.title("GPT4All Chatbot")
37
 
38
  # User input
39
- query = st.text_input("Enter your message:")
40
- cm=sx.split(query)
41
 
42
  # Generate response
43
  if st.button("Submit"):
44
- #response=llm_chain(query)
45
- #response = gptj.chat_completion(messages)
46
- #answer = response['choices'][0]['message']['content']
47
-
 
48
  # Display the response
49
- st.text_area("Bot Response:", value=sp.check_output(cm), height=100)
50
 
51
  if __name__ == "__main__":
52
- main()
 
53
 
 
 
 
 
1
  import streamlit as st
2
+ from resource import *
3
+ from proper_main import *
 
 
 
 
 
 
 
 
4
 
 
5
 
 
6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
 
8
 
9
  def main():
10
  st.title("GPT4All Chatbot")
11
 
12
  # User input
13
+ user_url = st.text_input("Enter the Github URL")
14
+
15
 
16
  # Generate response
17
  if st.button("Submit"):
18
+ web_scrape(user_url)
19
+ curr_path=data_cloning()
20
+ cm=sx.split(query)
21
+ report=analyse()
22
+ response_gpt=llm_chain(report)
23
  # Display the response
24
+ st.text_area("Bot Response:", value=response_gpt, height=100)
25
 
26
  if __name__ == "__main__":
27
+ main()
28
+
29
 
proper_main.py ADDED
@@ -0,0 +1,122 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ try:
2
+ import requests
3
+ import os
4
+ import subprocess as sp
5
+ from bs4 import BeautifulSoup
6
+ from nbconvert import PythonExporter
7
+ import shutil
8
+
9
+ except Exception as e:
10
+ print("Some modules are missing:", e)
11
+ print("Do you want to install them via this Python program?")
12
+ option = input("Y or N: ")
13
+ if option.lower() not in ["y", "n"]:
14
+ exit()
15
+ elif option.lower() == "n":
16
+ exit()
17
+ elif option.lower() == "y":
18
+ print("Make sure your internet connection is active; otherwise, it may throw an error. Press 'N' to exit.")
19
+ curr_dir = os.getcwd()
20
+ os.system("pip install -r " + curr_dir + "/requirements.txt")
21
+
22
+
23
+ repos = []
24
+
25
+
26
+ def web_scrape(user_url):
27
+ base_url = "https://www.github.com"
28
+ user_url = user_url
29
+
30
+ if user_url.endswith("/"):
31
+ user_url = user_url[:-1]
32
+
33
+ try:
34
+ response = requests.get(user_url + "?tab=repositories")
35
+ except Exception as e:
36
+ print("Please provide a valid link:", e)
37
+ web_scrape()
38
+
39
+ if response.status_code != 200:
40
+ print("Please provide a valid link.")
41
+ web_scrape()
42
+
43
+ make_soup = BeautifulSoup(response.text, 'html.parser')
44
+ li = make_soup.findAll('div', class_='d-inline-block mb-1')
45
+ if len(li) == 0:
46
+ print("Please Provide the Valid Link")
47
+ web_scrape()
48
+
49
+ for _, i in enumerate(li):
50
+ for a in i.findAll('a'):
51
+ new_url = base_url + a['href']
52
+ repos.append(new_url)
53
+
54
+
55
+ def data_cloning():
56
+ os.mkdir("/tmp/repos")
57
+ os.chdir("/tmp/repos")
58
+ for i in repos:
59
+ sp.run(["git", "clone", i], stdout=sp.DEVNULL, stderr=sp.DEVNULL)
60
+
61
+ return os.getcwd()
62
+
63
+
64
+ def data_cleaning(directory):
65
+ exporter = PythonExporter()
66
+
67
+ for root, dirs, files in os.walk(directory, topdown=False):
68
+ for filename in files:
69
+ file_path = os.path.join(root, filename)
70
+
71
+ if filename.endswith(".ipynb"):
72
+ output, _ = exporter.from_filename(file_path)
73
+ with open(os.path.join(root, filename[:-6] + ".py"), "w") as script_file:
74
+ script_file.write(output)
75
+ os.remove(file_path)
76
+
77
+ if not (filename.endswith(".py") or filename.endswith(".ipynb")):
78
+ os.remove(file_path)
79
+
80
+ for dir_name in dirs:
81
+ dir_path = os.path.join(root, dir_name)
82
+ if not os.listdir(dir_path):
83
+ os.rmdir(dir_path)
84
+
85
+
86
+ def analyse():
87
+ project_and_grades = {}
88
+
89
+ for file in os.listdir(os.getcwd()):
90
+ print(file)
91
+ path = os.getcwd() + "/" + file
92
+
93
+ cmd = ["radon", "cc", "--total-average", file]
94
+ res = sp.check_output(cmd)
95
+ index = res.decode().find("Average")
96
+ if index <= 0:
97
+ grade = "A"
98
+ score = 1
99
+ else:
100
+ grade = res.decode()[index:]
101
+ score = grade[23:-1]
102
+ score = score[:3]
103
+ grade=grade[20]
104
+
105
+
106
+ project_and_grades["Repo " + file] = "Grade " + grade + " Score " + str(score)
107
+ #shutil.rmtree('/tmp/repos')
108
+
109
+ return project_and_grades
110
+
111
+
112
+
113
+ """def main():
114
+ web_scrape()
115
+ curr_path=data_cloning()
116
+ data_cleaning(curr_path)
117
+ report=analyse()
118
+ print(report)
119
+
120
+ if __name__ == main():
121
+ main()
122
+ """
requirements.txt CHANGED
@@ -5,3 +5,5 @@ huggingface
5
  huggingface_hub
6
  radon
7
  requests
 
 
 
5
  huggingface_hub
6
  radon
7
  requests
8
+ bs4
9
+ nbconvert
resource.py ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain import PromptTemplate, LLMChain
2
+ from huggingface_hub import hf_hub_download
3
+ from langchain.llms import GPT4All
4
+ from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
5
+
6
+
7
+ try :
8
+
9
+ hf_hub_download(repo_id="dnato/ggml-gpt4all-j-v1.3-groovy.bin", filename="ggml-gpt4all-j-v1.3-groovy.bin", local_dir=".")
10
+
11
+ local_path=os.getcwd() + "/ggml-gpt4all-j-v1.3-groovy.bin"
12
+
13
+ llm = GPT4All(
14
+ model=local_path,
15
+ callbacks=[StreamingStdOutCallbackHandler()]
16
+ )
17
+
18
+
19
+
20
+ llm_chain = LLMChain(prompt=prompt, llm=llm)
21
+
22
+ except Exception as e:
23
+ print("Error Loading Model Please Contact Admin",e)
24
+
25
+
26
+
27
+
28
+
29
+ template = """
30
+ You are a friendly chatbot assistant that responds in a conversational
31
+ manner to users questions. Keep the answers short, unless specifically
32
+ asked by the user to elaborate on something.
33
+
34
+ Question: {question}
35
+
36
+ Answer:"""
37
+
38
+
39
+ prompt = PromptTemplate(template=template, input_variables=["question"])
40
+
41
+
42
+