Hemasagar commited on
Commit
e168eb0
·
verified ·
1 Parent(s): edd456e

Update utils.py

Browse files
Files changed (1) hide show
  1. utils.py +1 -1
utils.py CHANGED
@@ -44,7 +44,7 @@ def extracted_data(pages_data):
44
  # input={"prompt":prompt_template.format(pages=pages_data) ,
45
  # "temperature":0.1, "top_p":0.9, "max_length":512, "repetition_penalty":1})
46
  llm = AutoModelForCausalLM.from_pretrained("TheBloke/Llama-2-7B-Chat-GGML", model_file="llama-2-7b-chat.ggmlv3.q8_0.bin")
47
- output_text=llm(prompt_template.format(pages=pages_data),temperature =0.1, top_p=0.9, max_length=512, repetition_penalty=1)
48
 
49
  full_response = ''
50
  for item in output_text:
 
44
  # input={"prompt":prompt_template.format(pages=pages_data) ,
45
  # "temperature":0.1, "top_p":0.9, "max_length":512, "repetition_penalty":1})
46
  llm = AutoModelForCausalLM.from_pretrained("TheBloke/Llama-2-7B-Chat-GGML", model_file="llama-2-7b-chat.ggmlv3.q8_0.bin")
47
+ output_text=llm(prompt_template.format(pages=pages_data),temperature =0.1, top_p=0.9, repetition_penalty=1)
48
 
49
  full_response = ''
50
  for item in output_text: