thedamn commited on
Commit
f13a9e9
·
1 Parent(s): 35bd500

good night last2 commit

Browse files
Files changed (2) hide show
  1. app.py +1 -1
  2. resource.py +2 -2
app.py CHANGED
@@ -14,7 +14,7 @@ def main():
14
  curr_path = data_cloning()
15
  query = analyse()
16
  llm_chain=langu()
17
- response_gpt = llm_chain.run([str(query)])
18
  # Display the response
19
  st.text_area("Bot Response:", value=response_gpt, height=100)
20
 
 
14
  curr_path = data_cloning()
15
  query = analyse()
16
  llm_chain=langu()
17
+ response_gpt = llm_chain([str(query)])
18
  # Display the response
19
  st.text_area("Bot Response:", value=response_gpt, height=100)
20
 
resource.py CHANGED
@@ -32,8 +32,8 @@ def langu():
32
  import os
33
  prompt = PromptTemplate(template=template, input_variables=["question"])
34
 
35
- hf_hub_download(repo_id="dnato/ggml-gpt4all-j-v1.3-groovy.bin", filename="ggml-gpt4all-j-v1.3-groovy.bin", local_dir=".")
36
- local_path= "/home/user/app/ggml-gpt4all-j-v1.3-groovy.bin"
37
  llm = GPT4All(model=local_path,callbacks=[StreamingStdOutCallbackHandler()] )
38
  llm_chain = LLMChain(prompt=prompt, llm=llm)
39
  return llm_chain
 
32
  import os
33
  prompt = PromptTemplate(template=template, input_variables=["question"])
34
 
35
+ hf_hub_download(repo_id="dnato/ggml-gpt4all-j-v1.3-groovy.bin", filename="ggml-gpt4all-j-v1.3-groovy.bin", local_dir="/tmp")
36
+ local_path= "/tmp/ggml-gpt4all-j-v1.3-groovy.bin"
37
  llm = GPT4All(model=local_path,callbacks=[StreamingStdOutCallbackHandler()] )
38
  llm_chain = LLMChain(prompt=prompt, llm=llm)
39
  return llm_chain