Iliassti commited on
Commit
3864760
·
1 Parent(s): dc8a349

remove Anthropic model

Browse files
Files changed (1) hide show
  1. app.py +9 -5
app.py CHANGED
@@ -1,5 +1,5 @@
1
  from langchain.chat_models import ChatAnthropic
2
- from langchain import PromptTemplate, LLMChain
3
  from langchain.prompts.chat import (
4
  ChatPromptTemplate,
5
  SystemMessagePromptTemplate,
@@ -10,6 +10,7 @@ from langchain.schema import AIMessage, HumanMessage, SystemMessage
10
  import streamlit as st
11
  from dotenv import load_dotenv
12
  import PyPDF2
 
13
 
14
  load_dotenv()
15
 
@@ -25,10 +26,13 @@ class LegalExpert:
25
  )
26
 
27
  # create llm from huggingfaceHub model
 
 
 
 
 
28
 
29
- self.chat = ChatAnthropic()
30
-
31
- self.chain = LLMChain(llm=self.chat, prompt=full_prompt_template)
32
 
33
  def get_system_prompt(self):
34
  system_prompt = """
@@ -97,7 +101,7 @@ if "context" in st.session_state:
97
  legal_response = st.session_state.LegalExpert.run_chain(
98
  language=language, context=st.session_state.context, question=question
99
  )
100
-
101
  if "legal_response" not in st.session_state:
102
  st.session_state.legal_response = legal_response
103
 
 
1
  from langchain.chat_models import ChatAnthropic
2
+ from langchain import PromptTemplate, LLMChain, HuggingFaceHub
3
  from langchain.prompts.chat import (
4
  ChatPromptTemplate,
5
  SystemMessagePromptTemplate,
 
10
  import streamlit as st
11
  from dotenv import load_dotenv
12
  import PyPDF2
13
+ import os
14
 
15
  load_dotenv()
16
 
 
26
  )
27
 
28
  # create llm from huggingfaceHub model
29
+ if not os.getenv("HUGGINGFACE_API_KEY"):
30
+ raise ValueError("HUGGINGFACE_API_KEY not set")
31
+ self.llm = HuggingFaceHub(repo_id="google/flan-t5-xl",
32
+ model_kwargs={"temperature":0.3,
33
+ "max_length":64})
34
 
35
+ self.chain = LLMChain(llm=self.llm, prompt=full_prompt_template)
 
 
36
 
37
  def get_system_prompt(self):
38
  system_prompt = """
 
101
  legal_response = st.session_state.LegalExpert.run_chain(
102
  language=language, context=st.session_state.context, question=question
103
  )
104
+ print(f"legal_response: {legal_response}")
105
  if "legal_response" not in st.session_state:
106
  st.session_state.legal_response = legal_response
107