Spaces:
Build error
Build error
Update app.py
Browse files
app.py
CHANGED
@@ -26,10 +26,10 @@ from langchain_huggingface import HuggingFacePipeline
|
|
26 |
from langchain_huggingface import HuggingFaceEndpoint
|
27 |
from langchain_core.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
|
28 |
|
29 |
-
HUGGINGFACEHUB_API_TOKEN = os.environ.get("HUGGINGFACEHUB_API_TOKEN")
|
30 |
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
|
31 |
#HF_INFERENCE_ENDPOINT =
|
32 |
#BOOKING_ID = re.compile(r'\b[A-Z]{6}\d{6}\b')
|
|
|
33 |
|
34 |
BOOKING_KEYWORDS = [
|
35 |
"booking",
|
@@ -49,12 +49,12 @@ BOOKING_KEYWORDS = [
|
|
49 |
]
|
50 |
|
51 |
daysoff_assistant_template = """
|
52 |
-
You are a customer support assistant (’kundeservice AI assistent’) for Daysoff
|
53 |
-
By default, you respond in Norwegian language, using a warm, direct and professional tone.
|
54 |
-
is in
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
Chat History: {chat_history}
|
59 |
Question: {question}
|
60 |
Answer:
|
@@ -80,13 +80,13 @@ api_url_prompt = PromptTemplate(input_variables=['api_docs', 'question'],
|
|
80 |
# (..) If {question} contains an alphanumeric identifier consisting of 6 letters followed by 6 digits (e.g., DAGHNS116478)
|
81 |
api_response_template = """
|
82 |
With the API Documentation for Daysoff's official API: {api_docs} in mind,
|
83 |
-
and user question: {question}
|
|
|
84 |
here is the response from Daysoff's API: {api_response}.
|
85 |
-
Please provide an summary that directly addresses the user's question,
|
86 |
omitting technical details like response format, and
|
87 |
focusing on delivering the answer with clarity and conciseness,
|
88 |
as if a human customer service agent is providing this information.
|
89 |
-
By default, you respond in Norwegian language.
|
90 |
Summary:
|
91 |
"""
|
92 |
|
@@ -98,8 +98,16 @@ api_response_prompt = PromptTemplate(
|
|
98 |
@cl.on_chat_start
|
99 |
def setup_multiple_chains():
|
100 |
|
101 |
-
llm = OpenAI(
|
102 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
103 |
|
104 |
#llm = HuggingFaceEndpoint(
|
105 |
#repo_id="google/gemma-2-2b", #"norallm/normistral-7b-warm-instruct",
|
@@ -123,7 +131,7 @@ def setup_multiple_chains():
|
|
123 |
#)
|
124 |
|
125 |
conversation_memory = ConversationBufferMemory(memory_key="chat_history",
|
126 |
-
max_len=
|
127 |
return_messages=True,
|
128 |
)
|
129 |
llm_chain = LLMChain(llm=llm,
|
|
|
26 |
from langchain_huggingface import HuggingFaceEndpoint
|
27 |
from langchain_core.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
|
28 |
|
|
|
29 |
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
|
30 |
#HF_INFERENCE_ENDPOINT =
|
31 |
#BOOKING_ID = re.compile(r'\b[A-Z]{6}\d{6}\b')
|
32 |
+
#HUGGINGFACEHUB_API_TOKEN = os.environ.get("HUGGINGFACEHUB_API_TOKEN")
|
33 |
|
34 |
BOOKING_KEYWORDS = [
|
35 |
"booking",
|
|
|
49 |
]
|
50 |
|
51 |
daysoff_assistant_template = """
|
52 |
+
You are a customer support assistant (’kundeservice AI assistent’) for Daysoff.no
|
53 |
+
By default, you respond in Norwegian language, using a warm, direct and professional tone.
|
54 |
+
Your expertise is exclusively in in providing information related to a given booking ID (’bestillingsnummer’)
|
55 |
+
and booking-related queries such as firmahytteordning and personvernspolicy.
|
56 |
+
You do not provide information outside of this scope. If a question is not about booking or booking-related queries,
|
57 |
+
respond with, "Ønsker du annen informasjon, må du kontakte oss her på [email protected]"
|
58 |
Chat History: {chat_history}
|
59 |
Question: {question}
|
60 |
Answer:
|
|
|
80 |
# (..) If {question} contains an alphanumeric identifier consisting of 6 letters followed by 6 digits (e.g., DAGHNS116478)
|
81 |
api_response_template = """
|
82 |
With the API Documentation for Daysoff's official API: {api_docs} in mind,
|
83 |
+
and the specific user question: {question} in mind,
|
84 |
+
and given this API URL: {api_url} for querying,
|
85 |
here is the response from Daysoff's API: {api_response}.
|
86 |
+
Please provide an summary (in Norwegian) that directly addresses the user's question,
|
87 |
omitting technical details like response format, and
|
88 |
focusing on delivering the answer with clarity and conciseness,
|
89 |
as if a human customer service agent is providing this information.
|
|
|
90 |
Summary:
|
91 |
"""
|
92 |
|
|
|
98 |
@cl.on_chat_start
|
99 |
def setup_multiple_chains():
|
100 |
|
101 |
+
llm = OpenAI(
|
102 |
+
model='gpt-3.5-turbo-instruct',
|
103 |
+
temperature=0.7,
|
104 |
+
openai_api_key=OPENAI_API_KEY,
|
105 |
+
#max_tokens=512,
|
106 |
+
top_p=0.9,
|
107 |
+
repetition_penalty=1.03,
|
108 |
+
presence_penalty=0.3
|
109 |
+
)
|
110 |
+
|
111 |
|
112 |
#llm = HuggingFaceEndpoint(
|
113 |
#repo_id="google/gemma-2-2b", #"norallm/normistral-7b-warm-instruct",
|
|
|
131 |
#)
|
132 |
|
133 |
conversation_memory = ConversationBufferMemory(memory_key="chat_history",
|
134 |
+
max_len=300,
|
135 |
return_messages=True,
|
136 |
)
|
137 |
llm_chain = LLMChain(llm=llm,
|