### title: 010125-daysoff-assistant-api ### file: app.py import asyncio import os import re import time import json import torch import logging from api_docs_mck import api_docs_str import chainlit as cl from langchain import hub from langchain.chains import LLMChain, APIChain from langchain_core.prompts import PromptTemplate from langchain.memory.buffer import ConversationBufferMemory from langchain_openai import OpenAI from langchain_community.llms import HuggingFaceHub from langchain_huggingface import HuggingFacePipeline from langchain_huggingface import HuggingFaceEndpoint from langchain_core.callbacks.streaming_stdout import StreamingStdOutCallbackHandler OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY") #HF_INFERENCE_ENDPOINT = #BOOKING_ID = re.compile(r'\b[A-Z]{6}\d{6}\b') #HUGGINGFACEHUB_API_TOKEN = os.environ.get("HUGGINGFACEHUB_API_TOKEN") BOOKING_KEYWORDS = [ "booking", "bestillingsnummer", "bookingen", "ordrenummer", "reservation", "rezerwacji", "bookingreferanse", "rezerwacja", "booket", "reservation number", "bestilling", "order number", "booking ID", "identyfikacyjny płatności" ] daysoff_assistant_template = """ You are a customer support assistant (’kundeservice AI assistent’) for Daysoff.no By default, you respond in Norwegian language, using a warm, direct and professional tone. Your expertise is exclusively in in providing information related to a given booking ID (’bestillingsnummer’) and booking-related queries such as firmahytteordning and personvernspolicy. You do not provide information outside of this scope. If a question is not about booking or booking-related queries, respond with, "Ønsker du annen informasjon, må du kontakte oss her på kundeservice@daysoff.no" Chat History: {chat_history} Question: {question} Answer: """ daysoff_assistant_prompt= PromptTemplate( input_variables=["chat_history", "question"], template=daysoff_assistant_template ) api_url_template = """ Given the following API Documentation for Daysoff's official booking information API: {api_docs} Your task is to construct the most efficient API URL to answer the user's question, ensuring the call is optimized to include only the necessary information. Question: {question} API URL: """ api_url_prompt = PromptTemplate(input_variables=['api_docs', 'question'], template=api_url_template) # (..) If {question} contains an alphanumeric identifier consisting of 6 letters followed by 6 digits (e.g., DAGHNS116478) api_response_template = """ With the API Documentation for Daysoff's official API: {api_docs} in mind, and the specific user question: {question} in mind, and given this API URL: {api_url} for querying, here is the response from Daysoff's API: {api_response}. Please provide an summary (in Norwegian) that directly addresses the user's question, omitting technical details like response format, and focusing on delivering the answer with clarity and conciseness, as if a human customer service agent is providing this information. Summary: """ api_response_prompt = PromptTemplate( input_variables=['api_docs', 'question', 'api_url', 'api_response'], template=api_response_template ) @cl.on_chat_start def setup_multiple_chains(): llm = OpenAI( model='gpt-3.5-turbo-instruct', temperature=0.7, openai_api_key=OPENAI_API_KEY, #max_tokens=512, top_p=0.9, frequency_penalty=0.5, presence_penalty=0.3 ) #llm = HuggingFaceEndpoint( #repo_id="google/gemma-2-2b", #"norallm/normistral-7b-warm-instruct", #endpoint_url="http://localhost:8010/", #model="google/gemma-2-2b", #max_new_tokens=512, #top_k=10, #top_p=0.95, #typical_p=0.95, #temperature=0.7, #repetition_penalty=1.03, #huggingfacehub_api_token=HUGGINGFACEHUB_API_TOKEN, #task="text-generation" #) #llm = HuggingFacePipeline.from_model_id( #model_id="normistral-7b-warm-instruct", #task="text-generation", #pipeline_kwargs={"max_new_tokens": 10}, #) conversation_memory = ConversationBufferMemory(memory_key="chat_history", max_len=300, return_messages=True, ) llm_chain = LLMChain(llm=llm, prompt=daysoff_assistant_prompt, memory=conversation_memory ) cl.user_session.set("llm_chain", llm_chain) api_chain = APIChain.from_llm_and_api_docs( llm=llm, api_docs=api_docs_str, api_url_prompt=api_url_prompt, api_response_prompt=api_response_prompt, verbose=True, limit_to_domains=None #["https://670dccd0073307b4ee447f2f.mockapi.io/daysoff/api/V1"] ) cl.user_session.set("api_chain", api_chain) @cl.on_message async def handle_message(message: cl.Message): user_message = message.content llm_chain = cl.user_session.get("llm_chain") api_chain = cl.user_session.get("api_chain") booking_pattern = r'\b[A-Z]{6}\d{6}\b' base_url = "https://670dccd0073307b4ee447f2f.mockapi.io/daysoff/api/V1/booking" try: if re.search(booking_pattern, user_message): booking_id = re.search(booking_pattern, user_message).group(0) logging.debug(f"Booking ID detected: {booking_id}") url = f"{base_url}?search={booking_id}" logging.debug(f"Constructed API URL: {url}") response = await api_chain.acall({"booking_id": booking_id}, callbacks=[cl.AsyncLangchainCallbackHandler()]) else: logging.debug("Triggered LLMChain for ") response = await llm_chain.acall(user_message, callbacks=[cl.AsyncLangchainCallbackHandler()]) except Exception as e: logging.error(f"Error in processing message: {str(e)}") response = {"output": "Jeg får desverre ikke fram din informasjon akkurat nå."} response_key = "output" if "output" in response else "text" await cl.Message(response.get(response_key, "")).send() return message.content """ @cl.on_message async def handle_message(message: cl.Message): user_message = message.content llm_chain = cl.user_session.get("llm_chain") api_chain = cl.user_session.get("api_chain") # api_keywords = ["firmahytteordning", "personvernspolicy"] #base_url = "https://670dccd0073307b4ee447f2f.mockapi.io/daysoff/api/V1/booking" #url = f"{base_url}?search={booking_id}" try: # --check message for booking ID if re.search(r'\b[A-Z]{6}\d{6}\b', user_message): logging.debug(f"Booking ID detected in message: {user_message}") response = await api_chain.acall(user_message, callbacks=[cl.AsyncLangchainCallbackHandler()]) # --check message for keywords #elif any(keyword in user_message for keyword in ["firmahytteordning", "personvernspolicy"]): # any(keyword in user_message for keyword in api_keywords): #logging.debug(f"API keyword detected in message: {user_message}") #response = await api_chain.acall(user_message, callbacks=[cl.AsyncLangchainCallbackHandler()]) else: logging.debug("Triggers LLMChain for everything else.") response = await llm_chain.acall(user_message, callbacks=[cl.AsyncLangchainCallbackHandler()]) except Exception as e: logging.error(f"Error in processing message: {str(e)}") response = {"output": "Jeg får desverre ikke hentet fram din informasjon akkurat nå."} response_key = "output" if "output" in response else "text" await cl.Message(response.get(response_key, "")).send() return message.content """ #def is_booking_query(user_message): #match = re.search(r'\b[A-Z]{6}\d{6}\b', user_message) #return match is not None # --works boolean #booked = is_booking_query(user_message) #if booked: #response = await api_chain.acall(user_message, callbacks=[cl.AsyncLangchainCallbackHandler()]) # etc..