rahgadda commited on
Commit
052b12a
·
verified ·
1 Parent(s): eb9afcc

Initial Draft

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -67,7 +67,7 @@ def fn_generate_QnA_response(mv_selected_model, mv_user_question, lv_vector_stor
67
  # )
68
  lv_ms_phi2_pipeline = pipeline(
69
  "text-generation", tokenizer=lv_tokenizer, model=lv_model,
70
- device_map="cpu", max_new_tokens=4000, return_full_text=True
71
  )
72
  lv_hf_phi2_pipeline = HuggingFacePipeline(pipeline=lv_ms_phi2_pipeline)
73
  lv_chain = ConversationalRetrievalChain.from_llm(lv_hf_phi2_pipeline, lv_vector_store.as_retriever(), return_source_documents=True)
 
67
  # )
68
  lv_ms_phi2_pipeline = pipeline(
69
  "text-generation", tokenizer=lv_tokenizer, model=lv_model,
70
+ device_map="cpu", max_new_tokens=512, return_full_text=True
71
  )
72
  lv_hf_phi2_pipeline = HuggingFacePipeline(pipeline=lv_ms_phi2_pipeline)
73
  lv_chain = ConversationalRetrievalChain.from_llm(lv_hf_phi2_pipeline, lv_vector_store.as_retriever(), return_source_documents=True)