zac commited on
Commit
f2b8f78
·
1 Parent(s): ecfc01c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -15,7 +15,7 @@ from langchain.memory import ConversationBufferMemory
15
  from langchain.chains import ConversationalRetrievalChain
16
 
17
 
18
- llm = Llama(model_path= hf_hub_download(repo_id="TheBloke/airoboros-l2-7B-gpt4-m2.0-GGUF", filename="airoboros-l2-7B-gpt4-m2.0.Q6_K.gguf"), n_ctx=2048) #download model from hf/ n_ctx=2048 for high ccontext length
19
 
20
  history = []
21
 
@@ -46,10 +46,10 @@ def generate_text(input_text, history):
46
  print("input ", input_text)
47
  temp =""
48
  if history == []:
49
- input_text_with_history = f"INSTRUCTIONS:{pre_prompt}"+ "\n" + f"HR: {input_text} " + "\n" +" ISAAC:"
50
  else:
51
  input_text_with_history = f"{history[-1][1]}"+ "\n"
52
- input_text_with_history += f"HR: {input_text}" + "\n" +" ISAAC:"
53
  print("new input", input_text_with_history)
54
  output = llm(input_text_with_history, max_tokens=1024, stop=["<|prompter|>", "<|endoftext|>", "<|endoftext|> \n","ISAAC:","HR:","INSTRUCTIONS:","TASK:","BACKGROUND_INFORMATION:"], stream=True)
55
  for out in output:
 
15
  from langchain.chains import ConversationalRetrievalChain
16
 
17
 
18
+ llm = Llama(model_path= hf_hub_download(repo_id="TheBloke/Llama-2-7B-32K-Instruct-GGUF", filename="llama-2-7b-32k-instruct.Q2_K.gguf"), n_ctx=2048) #download model from hf/ n_ctx=2048 for high ccontext length
19
 
20
  history = []
21
 
 
46
  print("input ", input_text)
47
  temp =""
48
  if history == []:
49
+ input_text_with_history = f"INSTRUCTIONS:{pre_prompt}"+ "\n" + f"HR: {input_text} " + "\n" +"ISAAC:"
50
  else:
51
  input_text_with_history = f"{history[-1][1]}"+ "\n"
52
+ input_text_with_history += f"HR: {input_text}" + "\n" +"ISAAC:"
53
  print("new input", input_text_with_history)
54
  output = llm(input_text_with_history, max_tokens=1024, stop=["<|prompter|>", "<|endoftext|>", "<|endoftext|> \n","ISAAC:","HR:","INSTRUCTIONS:","TASK:","BACKGROUND_INFORMATION:"], stream=True)
55
  for out in output: