Shaleen123 commited on
Commit
f776487
Β·
1 Parent(s): 897ec15

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -4
app.py CHANGED
@@ -2,7 +2,10 @@ import streamlit as st
2
  from dotenv import load_dotenv
3
  from PyPDF2 import PdfReader
4
  from langchain.text_splitter import CharacterTextSplitter, RecursiveCharacterTextSplitter
 
5
  from langchain.embeddings import OpenAIEmbeddings, HuggingFaceInstructEmbeddings
 
 
6
  from langchain.vectorstores import FAISS, Chroma
7
  from langchain.embeddings import HuggingFaceEmbeddings # General embeddings from HuggingFace models.
8
  from langchain.chat_models import ChatOpenAI
@@ -60,22 +63,30 @@ def get_vectorstore(text_chunks):
60
 
61
  return vectorstore # μƒμ„±λœ 벑터 μŠ€ν† μ–΄λ₯Ό λ°˜ν™˜ν•©λ‹ˆλ‹€.
62
 
63
-
64
  def get_conversation_chain(vectorstore):
65
- gpt_model_name = 'gpt-3.5-turbo'
66
- llm = ChatOpenAI(model_name = gpt_model_name) #gpt-3.5 λͺ¨λΈ λ‘œλ“œ
 
 
 
 
 
 
67
 
68
  # λŒ€ν™” 기둝을 μ €μž₯ν•˜κΈ° μœ„ν•œ λ©”λͺ¨λ¦¬λ₯Ό μƒμ„±ν•©λ‹ˆλ‹€.
69
  memory = ConversationBufferMemory(
70
  memory_key='chat_history', return_messages=True)
 
71
  # λŒ€ν™” 검색 체인을 μƒμ„±ν•©λ‹ˆλ‹€.
72
  conversation_chain = ConversationalRetrievalChain.from_llm(
73
- llm=llm,
74
  retriever=vectorstore.as_retriever(),
75
  memory=memory
76
  )
 
77
  return conversation_chain
78
 
 
79
  # μ‚¬μš©μž μž…λ ₯을 μ²˜λ¦¬ν•˜λŠ” ν•¨μˆ˜μž…λ‹ˆλ‹€.
80
  def handle_userinput(user_question):
81
  # λŒ€ν™” 체인을 μ‚¬μš©ν•˜μ—¬ μ‚¬μš©μž μ§ˆλ¬Έμ— λŒ€ν•œ 응닡을 μƒμ„±ν•©λ‹ˆλ‹€.
 
2
  from dotenv import load_dotenv
3
  from PyPDF2 import PdfReader
4
  from langchain.text_splitter import CharacterTextSplitter, RecursiveCharacterTextSplitter
5
+ from transformers import AutoModelForCausalLM, AutoTokenizer
6
  from langchain.embeddings import OpenAIEmbeddings, HuggingFaceInstructEmbeddings
7
+ from peft import PeftModel, PeftConfig
8
+ from transformers import AutoModelForCausalLM
9
  from langchain.vectorstores import FAISS, Chroma
10
  from langchain.embeddings import HuggingFaceEmbeddings # General embeddings from HuggingFace models.
11
  from langchain.chat_models import ChatOpenAI
 
63
 
64
  return vectorstore # μƒμ„±λœ 벑터 μŠ€ν† μ–΄λ₯Ό λ°˜ν™˜ν•©λ‹ˆλ‹€.
65
 
 
66
  def get_conversation_chain(vectorstore):
67
+ # Replace 'microsoft/DialoGPT-large' with the desired model name
68
+ model_name = "Shaleen123/mistrallite_medical_qa"
69
+
70
+ config = PeftConfig.from_pretrained(model_name)
71
+ model = AutoModelForCausalLM.from_pretrained(model_name)
72
+ model = PeftModel.from_pretrained(model, model_name)
73
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
74
+
75
 
76
  # λŒ€ν™” 기둝을 μ €μž₯ν•˜κΈ° μœ„ν•œ λ©”λͺ¨λ¦¬λ₯Ό μƒμ„±ν•©λ‹ˆλ‹€.
77
  memory = ConversationBufferMemory(
78
  memory_key='chat_history', return_messages=True)
79
+
80
  # λŒ€ν™” 검색 체인을 μƒμ„±ν•©λ‹ˆλ‹€.
81
  conversation_chain = ConversationalRetrievalChain.from_llm(
82
+ llm=model,
83
  retriever=vectorstore.as_retriever(),
84
  memory=memory
85
  )
86
+
87
  return conversation_chain
88
 
89
+
90
  # μ‚¬μš©μž μž…λ ₯을 μ²˜λ¦¬ν•˜λŠ” ν•¨μˆ˜μž…λ‹ˆλ‹€.
91
  def handle_userinput(user_question):
92
  # λŒ€ν™” 체인을 μ‚¬μš©ν•˜μ—¬ μ‚¬μš©μž μ§ˆλ¬Έμ— λŒ€ν•œ 응닡을 μƒμ„±ν•©λ‹ˆλ‹€.