thedamn commited on
Commit
c0b268d
·
1 Parent(s): f9b085a

last hope commit

Browse files
Files changed (2) hide show
  1. app.py +0 -1
  2. resource.py +4 -19
app.py CHANGED
@@ -18,5 +18,4 @@ def main():
18
  st.text_area("Bot Response:", value=response_gpt, height=100)
19
 
20
  if __name__ == "__main__":
21
- imp()
22
  main()
 
18
  st.text_area("Bot Response:", value=response_gpt, height=100)
19
 
20
  if __name__ == "__main__":
 
21
  main()
resource.py CHANGED
@@ -27,26 +27,11 @@ Answer:"""
27
 
28
  prompt = PromptTemplate(template=template, input_variables=["report"])
29
 
 
 
 
 
30
 
31
- def imp() :
32
-
33
- try :
34
-
35
- hf_hub_download(repo_id="dnato/ggml-gpt4all-j-v1.3-groovy.bin", filename="ggml-gpt4all-j-v1.3-groovy.bin", local_dir=".")
36
-
37
- local_path=os.getcwd() + "/ggml-gpt4all-j-v1.3-groovy.bin"
38
-
39
- llm = GPT4All(
40
- model=local_path,
41
- callbacks=[StreamingStdOutCallbackHandler()]
42
- )
43
-
44
-
45
-
46
- llm_chain = LLMChain(prompt=prompt, llm=llm)
47
-
48
- except Exception as e:
49
- print("Error Loading Model Please Contact Admin",e)
50
 
51
 
52
 
 
27
 
28
  prompt = PromptTemplate(template=template, input_variables=["report"])
29
 
30
+ hf_hub_download(repo_id="dnato/ggml-gpt4all-j-v1.3-groovy.bin", filename="ggml-gpt4all-j-v1.3-groovy.bin", local_dir=".")
31
+ local_path=os.getcwd() + "/ggml-gpt4all-j-v1.3-groovy.bin"
32
+ llm = GPT4All(model=local_path,callbacks=[StreamingStdOutCallbackHandler()] )
33
+ llm_chain = LLMChain(prompt=prompt, llm=llm)
34
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
35
 
36
 
37