Pijush2023 commited on
Commit
dcfae74
·
verified ·
1 Parent(s): 736108b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +33 -10
app.py CHANGED
@@ -3,7 +3,7 @@ import os
3
  import logging
4
  from langchain_core.prompts import ChatPromptTemplate
5
  from langchain_core.output_parsers import StrOutputParser
6
- from langchain_openai import ChatOpenAI
7
  from langchain_community.graphs import Neo4jGraph
8
  from typing import List, Tuple
9
  from pydantic import BaseModel, Field
@@ -42,6 +42,15 @@ graph = Neo4jGraph(
42
  password="Z10duoPkKCtENuOukw3eIlvl0xJWKtrVSr-_hGX1LQ4"
43
  )
44
 
 
 
 
 
 
 
 
 
 
45
  # Define entity extraction and retrieval functions
46
  class Entities(BaseModel):
47
  names: List[str] = Field(
@@ -53,7 +62,7 @@ entity_prompt = ChatPromptTemplate.from_messages([
53
  ("human", "Use the given format to extract information from the following input: {question}"),
54
  ])
55
 
56
- chat_model = ChatOpenAI(temperature=0, model_name="gpt-4o", api_key=os.environ['OPENAI_API_KEY'])
57
  entity_chain = entity_prompt | chat_model.with_structured_output(Entities)
58
 
59
  def remove_lucene_chars(input: str) -> str:
@@ -129,21 +138,35 @@ _search_query = RunnableBranch(
129
  chat_history=lambda x: _format_chat_history(x["chat_history"])
130
  )
131
  | CONDENSE_QUESTION_PROMPT
132
- | ChatOpenAI(temperature=0, api_key=os.environ['OPENAI_API_KEY'])
133
  | StrOutputParser(),
134
  ),
135
  RunnableLambda(lambda x: x["question"]),
136
  )
137
 
138
 
139
- template = """I am a guide for Birmingham, Alabama. I can provide recommendations and insights about the city, including events and activities.
140
- Ask your question directly, and I'll provide a precise and quick,short and crisp response in a conversational way without any Greet.
141
- {context}
142
- Question: {question}
143
- Answer:"""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
144
 
145
 
146
- qa_prompt = ChatPromptTemplate.from_template(template)
147
 
148
  # Define the chain for Neo4j-based retrieval and response generation
149
  chain_neo4j = (
@@ -154,7 +177,7 @@ chain_neo4j = (
154
  }
155
  )
156
  | qa_prompt
157
- | chat_model
158
  | StrOutputParser()
159
  )
160
 
 
3
  import logging
4
  from langchain_core.prompts import ChatPromptTemplate
5
  from langchain_core.output_parsers import StrOutputParser
6
+ #from langchain_openai import ChatOpenAI
7
  from langchain_community.graphs import Neo4jGraph
8
  from typing import List, Tuple
9
  from pydantic import BaseModel, Field
 
42
  password="Z10duoPkKCtENuOukw3eIlvl0xJWKtrVSr-_hGX1LQ4"
43
  )
44
 
45
+ os.environ["NVIDIA_API_KEY"] = "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"
46
+ llm = ChatNVIDIA(model="meta/llama-3.1-70b-instruct")
47
+
48
+
49
+ llm = ChatNVIDIA(
50
+ base_url="https://integrate.api.nvidia.com/v1",
51
+ model="meta/llama-3.1-8b-instruct"
52
+ )
53
+
54
  # Define entity extraction and retrieval functions
55
  class Entities(BaseModel):
56
  names: List[str] = Field(
 
62
  ("human", "Use the given format to extract information from the following input: {question}"),
63
  ])
64
 
65
+ #chat_model = ChatOpenAI(temperature=0, model_name="gpt-4o", api_key=os.environ['OPENAI_API_KEY'])
66
  entity_chain = entity_prompt | chat_model.with_structured_output(Entities)
67
 
68
  def remove_lucene_chars(input: str) -> str:
 
138
  chat_history=lambda x: _format_chat_history(x["chat_history"])
139
  )
140
  | CONDENSE_QUESTION_PROMPT
141
+ | llm
142
  | StrOutputParser(),
143
  ),
144
  RunnableLambda(lambda x: x["question"]),
145
  )
146
 
147
 
148
+ #template = """I am a guide for Birmingham, Alabama. I can provide recommendations and insights about the city, including events and activities.
149
+ #Ask your question directly, and I'll provide a precise and quick,short and crisp response in a conversational way without any Greet.
150
+ #{context}
151
+ #Question: {question}
152
+ #Answer:"""
153
+
154
+
155
+ prompt = ChatPromptTemplate.from_messages(
156
+ [
157
+ (
158
+ "system",
159
+ "I am a guide for Birmingham, Alabama. I can provide recommendations and insights about the city, including events and activities.
160
+ Ask your question directly, and I'll provide a precise and quick,short and crisp response in a conversational way without any Greet."
161
+ ),
162
+ MessagesPlaceholder(variable_name="chat_history"),
163
+ ("user", "{input}"),
164
+ MessagesPlaceholder(variable_name="agent_scratchpad"),
165
+ ]
166
+ )
167
 
168
 
169
+ qa_prompt = ChatPromptTemplate.from_template(prompt)
170
 
171
  # Define the chain for Neo4j-based retrieval and response generation
172
  chain_neo4j = (
 
177
  }
178
  )
179
  | qa_prompt
180
+ | llm
181
  | StrOutputParser()
182
  )
183