Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -38,10 +38,20 @@ from api_docs_mck import api_docs_str
|
|
38 |
from personvernspolicy import instruction_text_priv, personvernspolicy_data
|
39 |
from frequently_asked_questions import instruction_text_faq, faq
|
40 |
|
41 |
-
|
42 |
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
|
43 |
|
44 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
45 |
daysoff_assistant_template = """
|
46 |
You are a customer support assistant (โkundeservice AI assistentโ) for Daysoff.
|
47 |
By default, you respond in Norwegian language, using a warm, direct, and professional tone.
|
@@ -74,8 +84,6 @@ API URL:
|
|
74 |
"""
|
75 |
api_url_prompt = PromptTemplate(input_variables=['api_docs', 'question'],
|
76 |
template=api_url_template)
|
77 |
-
|
78 |
-
# If the response includes booking information, provide the information verbatim (do not summarize it.)
|
79 |
|
80 |
api_response_template = """
|
81 |
With the API Documentation for Daysoff's official API: {api_docs} in mind,
|
@@ -93,19 +101,6 @@ api_response_prompt = PromptTemplate(
|
|
93 |
template=api_response_template
|
94 |
)
|
95 |
|
96 |
-
# ---------------------------------------------------------------------------------------------------------
|
97 |
-
# 100 tokens โ 75 words
|
98 |
-
# system prompt(s), total = 330 tokens
|
99 |
-
# average api response = 250-300 tokens (current)
|
100 |
-
# user input "reserved" = 400 tokens (300 words max. /English; Polish, Norwegian {..}?@tiktokenizer), could be reduc3d to 140 tokens โ 105 words
|
101 |
-
# model output (max_tokens) = 2048
|
102 |
-
|
103 |
-
# ConversationBufferMemory = maintains raw chat history; crucial for "nuanced" follow-ups (e.g. "nuanced" ~ for non-English inputs)
|
104 |
-
# ConversationTokenBufferMemory (max_token_limit) = 1318 (gives space in chat_history for approximately 10-15 exchanges, assuming ~100 tokens/exchange)
|
105 |
-
# ConversationSummaryMemory = scalable approach, especially useful for extended or complex interactions, caveat: loss of granular context
|
106 |
-
# ---------------------------------------------------------------------------------------------------------
|
107 |
-
|
108 |
-
|
109 |
@cl.on_chat_start
|
110 |
def setup_multiple_chains():
|
111 |
|
@@ -118,31 +113,11 @@ def setup_multiple_chains():
|
|
118 |
frequency_penalty=0.1,
|
119 |
presence_penalty=0.1
|
120 |
)
|
121 |
-
|
122 |
-
# --ConversationBufferMemory
|
123 |
conversation_memory = ConversationBufferMemory(memory_key="chat_history",
|
124 |
max_len=30, # --retains only the last 30 exchanges
|
125 |
return_messages=True,
|
126 |
)
|
127 |
-
|
128 |
-
# --ConversationTokenBufferMemory
|
129 |
-
#conversation_memory = ConversationTokenBufferMemory(memory_key="chat_history",
|
130 |
-
#max_token_limit=1318,
|
131 |
-
#return_messages=True,
|
132 |
-
#)
|
133 |
-
|
134 |
-
# --ConversationSummaryMemory
|
135 |
-
#conversation_memory = ConversationSummaryMemory(memory_key="chat_history",
|
136 |
-
#return_messages=True,
|
137 |
-
#)
|
138 |
-
|
139 |
-
|
140 |
-
class LLMChainConfig(BaseModel):
|
141 |
-
model_config = ConfigDict(extra='allow')
|
142 |
-
instruction_text_faq: str
|
143 |
-
faq: dict
|
144 |
-
instruction_text_priv: str
|
145 |
-
personvernspolicy_data: dict
|
146 |
|
147 |
llm_chain = LLMChain(
|
148 |
llm=llm,
|
|
|
38 |
from personvernspolicy import instruction_text_priv, personvernspolicy_data
|
39 |
from frequently_asked_questions import instruction_text_faq, faq
|
40 |
|
|
|
41 |
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
|
42 |
|
43 |
+
class LLMChainConfig(BaseModel):
|
44 |
+
model_config = ConfigDict(extra='allow')
|
45 |
+
instruction_text_faq: str
|
46 |
+
faq: dict
|
47 |
+
instruction_text_priv: str
|
48 |
+
personvernspolicy_data: dict
|
49 |
+
|
50 |
+
instruction_text_faq = instruction_text_faq,
|
51 |
+
faq = faq,
|
52 |
+
instruction_text_priv = instruction_text_priv,
|
53 |
+
personvernspolicy_data = personvernspolicy_data
|
54 |
+
|
55 |
daysoff_assistant_template = """
|
56 |
You are a customer support assistant (โkundeservice AI assistentโ) for Daysoff.
|
57 |
By default, you respond in Norwegian language, using a warm, direct, and professional tone.
|
|
|
84 |
"""
|
85 |
api_url_prompt = PromptTemplate(input_variables=['api_docs', 'question'],
|
86 |
template=api_url_template)
|
|
|
|
|
87 |
|
88 |
api_response_template = """
|
89 |
With the API Documentation for Daysoff's official API: {api_docs} in mind,
|
|
|
101 |
template=api_response_template
|
102 |
)
|
103 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
104 |
@cl.on_chat_start
|
105 |
def setup_multiple_chains():
|
106 |
|
|
|
113 |
frequency_penalty=0.1,
|
114 |
presence_penalty=0.1
|
115 |
)
|
116 |
+
|
|
|
117 |
conversation_memory = ConversationBufferMemory(memory_key="chat_history",
|
118 |
max_len=30, # --retains only the last 30 exchanges
|
119 |
return_messages=True,
|
120 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
121 |
|
122 |
llm_chain = LLMChain(
|
123 |
llm=llm,
|