RubenAMtz commited on
Commit
f7c72fc
·
1 Parent(s): 569b62c

switched from openai to azure openai

Browse files
Files changed (1) hide show
  1. app.py +14 -13
app.py CHANGED
@@ -9,21 +9,21 @@ from dotenv import load_dotenv
9
  import arxiv
10
  import pinecone
11
  from langchain.embeddings.openai import OpenAIEmbeddings
 
 
12
  from langchain.embeddings import CacheBackedEmbeddings
13
  from langchain.storage import LocalFileStore, InMemoryStore
14
  from utils.store import index_documents, search_and_index
15
  from utils.chain import create_chain
16
  from langchain.vectorstores import Pinecone
17
- from langchain.chat_models import ChatOpenAI
18
  from langchain.schema.runnable import RunnableSequence
19
  from langchain.schema import format_document
20
  from pprint import pprint
21
  from langchain_core.vectorstores import VectorStoreRetriever
22
  import langchain
23
  from langchain.cache import InMemoryCache
24
- from langchain_core.messages.human import HumanMessage
25
  from langchain.memory import ConversationBufferMemory
26
- from chainlit import make_async
27
 
28
  load_dotenv()
29
  YOUR_API_KEY = os.environ["PINECONE_API_KEY"]
@@ -42,14 +42,16 @@ async def start_chat():
42
  }
43
 
44
  await cl.Message(
45
- content="What would you like to learn about today? 😊"
46
  ).send()
47
 
48
  # create an embedder through a cache interface (locally) (on start)
49
  store = InMemoryStore()
50
 
51
- core_embeddings_model = OpenAIEmbeddings(
52
- api_key=os.environ['OPENAI_API_KEY']
 
 
53
  )
54
 
55
  embedder = CacheBackedEmbeddings.from_bytes_store(
@@ -71,13 +73,13 @@ async def start_chat():
71
  dimension=1536
72
  )
73
  index = pinecone.GRPCIndex(INDEX_NAME)
74
-
75
- # setup your ChatOpenAI model (on start)
76
- llm = ChatOpenAI(
77
- model=settings['model'],
78
  temperature=settings['temperature'],
79
  max_tokens=settings['max_tokens'],
80
- api_key=os.environ["OPENAI_API_KEY"],
 
 
81
  streaming=True
82
  )
83
 
@@ -89,7 +91,6 @@ async def start_chat():
89
  os.environ["LANGCHAIN_WANDB_TRACING"] = "true"
90
 
91
  # setup memory
92
-
93
  memory = ConversationBufferMemory(memory_key="chat_history")
94
 
95
  tools = {
@@ -102,7 +103,7 @@ async def start_chat():
102
  cl.user_session.set("settings", settings)
103
  cl.user_session.set("first_run", False)
104
 
105
- @cl.on_settings_update
106
  @cl.on_message # marks a function that should be run each time the chatbot receives a message from a user
107
  async def main(message: cl.Message):
108
  settings = cl.user_session.get("settings")
 
9
  import arxiv
10
  import pinecone
11
  from langchain.embeddings.openai import OpenAIEmbeddings
12
+ from langchain.embeddings.azure_openai import AzureOpenAIEmbeddings
13
+ from langchain.chat_models import ChatOpenAI, AzureChatOpenAI
14
  from langchain.embeddings import CacheBackedEmbeddings
15
  from langchain.storage import LocalFileStore, InMemoryStore
16
  from utils.store import index_documents, search_and_index
17
  from utils.chain import create_chain
18
  from langchain.vectorstores import Pinecone
19
+
20
  from langchain.schema.runnable import RunnableSequence
21
  from langchain.schema import format_document
22
  from pprint import pprint
23
  from langchain_core.vectorstores import VectorStoreRetriever
24
  import langchain
25
  from langchain.cache import InMemoryCache
 
26
  from langchain.memory import ConversationBufferMemory
 
27
 
28
  load_dotenv()
29
  YOUR_API_KEY = os.environ["PINECONE_API_KEY"]
 
42
  }
43
 
44
  await cl.Message(
45
+ content="Hi, I am here to help you learn about a topic, what would you like to learn about today? 😊"
46
  ).send()
47
 
48
  # create an embedder through a cache interface (locally) (on start)
49
  store = InMemoryStore()
50
 
51
+ core_embeddings_model = AzureOpenAIEmbeddings(
52
+ api_key=os.environ['AZURE_OPENAI_API_KEY'],
53
+ azure_deployment="text-embedding-ada-002",
54
+ azure_endpoint=os.environ['AZURE_OPENAI_ENDPOINT']
55
  )
56
 
57
  embedder = CacheBackedEmbeddings.from_bytes_store(
 
73
  dimension=1536
74
  )
75
  index = pinecone.GRPCIndex(INDEX_NAME)
76
+
77
+ llm = AzureChatOpenAI(
 
 
78
  temperature=settings['temperature'],
79
  max_tokens=settings['max_tokens'],
80
+ api_key=os.environ['AZURE_OPENAI_API_KEY'],
81
+ azure_deployment="gpt-35-turbo-16k",
82
+ api_version="2023-07-01-preview",
83
  streaming=True
84
  )
85
 
 
91
  os.environ["LANGCHAIN_WANDB_TRACING"] = "true"
92
 
93
  # setup memory
 
94
  memory = ConversationBufferMemory(memory_key="chat_history")
95
 
96
  tools = {
 
103
  cl.user_session.set("settings", settings)
104
  cl.user_session.set("first_run", False)
105
 
106
+
107
  @cl.on_message # marks a function that should be run each time the chatbot receives a message from a user
108
  async def main(message: cl.Message):
109
  settings = cl.user_session.get("settings")