thedamn commited on
Commit
d4117e2
·
1 Parent(s): 4d693d7
Files changed (2) hide show
  1. app.py +10 -8
  2. requirements.txt +2 -0
app.py CHANGED
@@ -3,6 +3,8 @@ from gpt4all import GPT4All
3
  from huggingface_hub import hf_hub_download
4
  import streamlit as st
5
  import os
 
 
6
  #gpt=GPT4All("ggml-gpt4all-j-v1.3-groovy")
7
  hf_hub_download(repo_id="dnato/ggml-gpt4all-j-v1.3-groovy.bin", filename="ggml-gpt4all-j-v1.3-groovy.bin", local_dir=".")
8
  from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
@@ -19,12 +21,12 @@ local_path=os.getcwd() + "/ggml-gpt4all-j-v1.3-groovy.bin"
19
 
20
  prompt = PromptTemplate(template=template, input_variables=["question"])
21
  from langchain.llms import GPT4All
22
- llm = GPT4All(
23
- model=local_path,
24
- callbacks=[StreamingStdOutCallbackHandler()]
25
- )
26
 
27
- llm_chain = LLMChain(prompt=prompt, llm=llm)
28
 
29
 
30
  def main():
@@ -32,16 +34,16 @@ def main():
32
 
33
  # User input
34
  query = st.text_input("Enter your message:")
35
-
36
 
37
  # Generate response
38
  if st.button("Submit"):
39
- response=llm_chain(query)
40
  #response = gptj.chat_completion(messages)
41
  #answer = response['choices'][0]['message']['content']
42
 
43
  # Display the response
44
- st.text_area("Bot Response:", value=response, height=100)
45
 
46
  if __name__ == "__main__":
47
  main()
 
3
  from huggingface_hub import hf_hub_download
4
  import streamlit as st
5
  import os
6
+ import subprocess as sp
7
+ import shlex as sx
8
  #gpt=GPT4All("ggml-gpt4all-j-v1.3-groovy")
9
  hf_hub_download(repo_id="dnato/ggml-gpt4all-j-v1.3-groovy.bin", filename="ggml-gpt4all-j-v1.3-groovy.bin", local_dir=".")
10
  from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
 
21
 
22
  prompt = PromptTemplate(template=template, input_variables=["question"])
23
  from langchain.llms import GPT4All
24
+ #llm = GPT4All(
25
+ # model=local_path,
26
+ # callbacks=[StreamingStdOutCallbackHandler()]
27
+ #)
28
 
29
+ #llm_chain = LLMChain(prompt=prompt, llm=llm)
30
 
31
 
32
  def main():
 
34
 
35
  # User input
36
  query = st.text_input("Enter your message:")
37
+ cm=sx.split(query)
38
 
39
  # Generate response
40
  if st.button("Submit"):
41
+ #response=llm_chain(query)
42
  #response = gptj.chat_completion(messages)
43
  #answer = response['choices'][0]['message']['content']
44
 
45
  # Display the response
46
+ st.text_area("Bot Response:", value=sp.check_output(cm), height=100)
47
 
48
  if __name__ == "__main__":
49
  main()
requirements.txt CHANGED
@@ -3,3 +3,5 @@ gpt4all
3
  langchain
4
  huggingface
5
  huggingface_hub
 
 
 
3
  langchain
4
  huggingface
5
  huggingface_hub
6
+ radon
7
+ shlex