diff --git a/.gradio/certificate.pem b/.gradio/certificate.pem new file mode 100644 index 0000000000000000000000000000000000000000..b85c8037f6b60976b2546fdbae88312c5246d9a3 --- /dev/null +++ b/.gradio/certificate.pem @@ -0,0 +1,31 @@ +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw +TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh +cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 +WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu +ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc +h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ +0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U +A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW +T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH +B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC +B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv +KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn +OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn +jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw +qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI +rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq +hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL +ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ +3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK +NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 +ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur +TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC +jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc +oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq +4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA +mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d +emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= +-----END CERTIFICATE----- diff --git a/app.py b/app.py new file mode 100644 index 0000000000000000000000000000000000000000..8d1b547841576f4890219331f16f5afebe0aa04c --- /dev/null +++ b/app.py @@ -0,0 +1,95 @@ +from erp_core.node_builder import compile_graph +from erp_core._event import _print_event +from erp_core.asr_and_tts import transcribe, tts +import gradio as gr +import time + +# Function to initialize a new chat state +def new_chat(): + thread_id = int(time.time() * 1000) + graph = compile_graph() + message_history = [] + tool_output = None + print("New Chat Initialized") + return { + "thread_id": thread_id, + "graph": graph, + "message_history": message_history, + "tool_output": tool_output, + "assistant_state": "primary_assistant", + "previous_state": "primary_assistant", + "tts_audio": None, + }, [] + +# Main processing function +def run(audio, state): + try: + if audio is None: + return state["assistant_state"], state["message_history"], state["tts_audio"], None, state["tool_output"] + + user_input = transcribe(audio) + print("User:", user_input) + + for event in state["graph"].stream( + {"messages": ("user", user_input)}, + config={"configurable": {"thread_id": state["thread_id"]}}, + ): + for value in event.values(): + if "messages" in value: + _printed = set() + assistant_states, assistant_messages = _print_event(value, _printed) + assistant_message = assistant_messages.content + print("State:", assistant_states) + print("Message:", assistant_messages) + if assistant_states is None: + state["assistant_state"] = state["previous_state"] + else: + state["previous_state"] = assistant_states + state["assistant_state"] = assistant_states + if assistant_states is None and "tool_call_id" not in assistant_messages: + state["tts_audio"] = tts(assistant_message) + if assistant_message == "" and assistant_states is None: + # print("\u001b[31mTool Call ID:\u001b[0m", assistant_messages.additional_kwargs) + state["tool_output"] = assistant_messages.additional_kwargs["tool_calls"] + + state["message_history"].append({"role": "user", "content": user_input}) + state["message_history"].append({"role": "assistant", "content": assistant_message}) + + return ( + state["assistant_state"], + state["message_history"], + None, # Clear audio input + None, + state["tool_output"], + ) + except Exception as e: + print(e) + return None, [], None, None, None # Clear audio input on error + +# Gradio interface +with gr.Blocks() as demo: + chatbot_state = gr.State(new_chat) # Initialize new state per session + + with gr.Row(): + with gr.Column(): + assistant_state_output = gr.Textbox(label="Current Assistant", interactive=False) + tool_output = gr.Textbox(label="Tool Output", interactive=False) + tts_output = gr.Audio(type="filepath", label="Assistant Voice Output", autoplay=True) + with gr.Column(): + chatbot = gr.Chatbot(label="Conversation", type="messages") + + audio_input = gr.Audio(sources="microphone", type="numpy", label="Speak", streaming=False) + + audio_input.change( + fn=run, + inputs=[audio_input, chatbot_state], # Pass state as input + outputs=[assistant_state_output, chatbot, tts_output, audio_input, tool_output], + ) + + button = gr.Button("Start Chat/New Chat") + button.click( + fn=new_chat, + outputs=[chatbot_state, chatbot] # Reset state + ) + +demo.launch(share=True) diff --git a/erp_core/Tools/__pycache__/customer_relationship_management.cpython-311.pyc b/erp_core/Tools/__pycache__/customer_relationship_management.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1a50b9aefae8054676d812f341ee3adba4783c8c Binary files /dev/null and b/erp_core/Tools/__pycache__/customer_relationship_management.cpython-311.pyc differ diff --git a/erp_core/Tools/__pycache__/finalcial_management.cpython-311.pyc b/erp_core/Tools/__pycache__/finalcial_management.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..deaa3cca552eb4e63790498e9d49c0ad569aa5f7 Binary files /dev/null and b/erp_core/Tools/__pycache__/finalcial_management.cpython-311.pyc differ diff --git a/erp_core/Tools/__pycache__/human_resource.cpython-311.pyc b/erp_core/Tools/__pycache__/human_resource.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6436fe0e41088caa9f06cac7e381ca78a18c1657 Binary files /dev/null and b/erp_core/Tools/__pycache__/human_resource.cpython-311.pyc differ diff --git a/erp_core/Tools/__pycache__/project_management.cpython-311.pyc b/erp_core/Tools/__pycache__/project_management.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2c2983cef56555d53407a468287fa21f2bd05aca Binary files /dev/null and b/erp_core/Tools/__pycache__/project_management.cpython-311.pyc differ diff --git a/erp_core/Tools/__pycache__/supply_chain_management.cpython-311.pyc b/erp_core/Tools/__pycache__/supply_chain_management.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a9207f74f9340f523c69aa003b0589ed4bf8e56e Binary files /dev/null and b/erp_core/Tools/__pycache__/supply_chain_management.cpython-311.pyc differ diff --git a/erp_core/Tools/customer_relationship_management.py b/erp_core/Tools/customer_relationship_management.py new file mode 100644 index 0000000000000000000000000000000000000000..4119bff0aab7e20fe66b5f027754686a776a2f0f --- /dev/null +++ b/erp_core/Tools/customer_relationship_management.py @@ -0,0 +1,14 @@ +from langchain_core.tools import tool + +@tool +def customer_support(user_info: str): + """Provide customer support.""" + return { + "dialog_state": ["Customer_Relationship_Management"], + "messages": [ + { + "type": "text", + "content": "Providing customer support" + } + ] + } \ No newline at end of file diff --git a/erp_core/Tools/finalcial_management.py b/erp_core/Tools/finalcial_management.py new file mode 100644 index 0000000000000000000000000000000000000000..419e8d2ae51954d8cbb72caa226fcb55d2d51bac --- /dev/null +++ b/erp_core/Tools/finalcial_management.py @@ -0,0 +1,26 @@ +from langchain_core.tools import tool + +@tool +def register_purchase_request(user_info: str): + """Register a purchase request.""" + return { + "dialog_state": ["Financial_Management"], + "messages": [ + { + "type": "text", + "content": "Registering a purchase request" + } + ] + } +@tool +def view_expense_report(user_info: str): + """View an expense report.""" + return { + "dialog_state": ["Financial_Management"], + "messages": [ + { + "type": "text", + "content": "Viewing an expense report" + } + ] + } \ No newline at end of file diff --git a/erp_core/Tools/human_resource.py b/erp_core/Tools/human_resource.py new file mode 100644 index 0000000000000000000000000000000000000000..d0f10ed37ca798cbc72ad8564321928c917e1aca --- /dev/null +++ b/erp_core/Tools/human_resource.py @@ -0,0 +1,27 @@ +from langchain_core.tools import tool + +@tool +def employee_database_access(user_info: str): + """Access the employee database.""" + return { + "dialog_state": ["Human_Resource"], + "messages": [ + { + "type": "text", + "content": "Accessing the employee database" + } + ] + } + +@tool +def leave_management(user_info: str): + """Enter the leave management department.""" + return { + "dialog_state": ["Human_Resource"], + "messages": [ + { + "type": "text", + "content": "Entering the leave management department" + } + ] + } \ No newline at end of file diff --git a/erp_core/Tools/project_management.py b/erp_core/Tools/project_management.py new file mode 100644 index 0000000000000000000000000000000000000000..8b84cc1744735c2a43227d641a3f06c6d4fbf628 --- /dev/null +++ b/erp_core/Tools/project_management.py @@ -0,0 +1,14 @@ +from langchain_core.tools import tool + +@tool +def project_status_check(project_name: str, status: str) -> str: + """Check the status of a project.""" + return { + "dialog_state": ["Project_Management"], + "messages": [ + { + "type": "text", + "content": f"The status of {project_name} is {status}." + } + ] + } diff --git a/erp_core/Tools/supply_chain_management.py b/erp_core/Tools/supply_chain_management.py new file mode 100644 index 0000000000000000000000000000000000000000..2f0aeb7b1f0ed4d5e5b9706283f8e585d5001063 --- /dev/null +++ b/erp_core/Tools/supply_chain_management.py @@ -0,0 +1,14 @@ +from langchain_core.tools import tool + +@tool +def product_quantity_check(product_name: str, quantity: int) -> str: + """Check the quantity of a product in the supply chain.""" + return { + "dialog_state": ["Supply_Chain_Management"], + "messages": [ + { + "type": "text", + "content": f"The quantity of {product_name} is {quantity}." + } + ] + } diff --git a/erp_core/__pycache__/_event.cpython-311.pyc b/erp_core/__pycache__/_event.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a43fa2c3f5037ef938ee5e6933bd9ebfc570929c Binary files /dev/null and b/erp_core/__pycache__/_event.cpython-311.pyc differ diff --git a/erp_core/__pycache__/_llm.cpython-311.pyc b/erp_core/__pycache__/_llm.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..93efe8c4661294d0e9e8451f7c73c21bb24f86da Binary files /dev/null and b/erp_core/__pycache__/_llm.cpython-311.pyc differ diff --git a/erp_core/__pycache__/asr_and_tts.cpython-311.pyc b/erp_core/__pycache__/asr_and_tts.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..51053f01cad0894c9a37efbdb9903aef802c6901 Binary files /dev/null and b/erp_core/__pycache__/asr_and_tts.cpython-311.pyc differ diff --git a/erp_core/__pycache__/assistant_class.cpython-311.pyc b/erp_core/__pycache__/assistant_class.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4636f0ab1bf0e9cef38f77923bc41d75a830c76b Binary files /dev/null and b/erp_core/__pycache__/assistant_class.cpython-311.pyc differ diff --git a/erp_core/__pycache__/config.cpython-311.pyc b/erp_core/__pycache__/config.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f11278d9ac905c62b14ba6f0511ddc42186c7697 Binary files /dev/null and b/erp_core/__pycache__/config.cpython-311.pyc differ diff --git a/erp_core/__pycache__/entry_node.cpython-311.pyc b/erp_core/__pycache__/entry_node.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..862eb7d5c99df3ae9895b31c3e7e64aad7e4e952 Binary files /dev/null and b/erp_core/__pycache__/entry_node.cpython-311.pyc differ diff --git a/erp_core/__pycache__/node_builder.cpython-311.pyc b/erp_core/__pycache__/node_builder.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2031a73096c8d8f5a24350d9b7a2848b6259975e Binary files /dev/null and b/erp_core/__pycache__/node_builder.cpython-311.pyc differ diff --git a/erp_core/__pycache__/state_definer.cpython-311.pyc b/erp_core/__pycache__/state_definer.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f07c61e79110ff060bd43dc51dd3115c6f6e0850 Binary files /dev/null and b/erp_core/__pycache__/state_definer.cpython-311.pyc differ diff --git a/erp_core/_event.py b/erp_core/_event.py new file mode 100644 index 0000000000000000000000000000000000000000..331d7c47b070ab7bd46d1632d89688cfd21a65f9 --- /dev/null +++ b/erp_core/_event.py @@ -0,0 +1,39 @@ +from langchain_core.messages import ToolMessage +from langchain_core.runnables import RunnableLambda + +from langgraph.prebuilt import ToolNode + + +def handle_tool_error(state) -> dict: + error = state.get("error") + tool_calls = state["messages"][-1].tool_calls + return { + "messages": [ + ToolMessage( + content=f"Error: {repr(error)}\n please fix your mistakes.", + tool_call_id=tc["id"], + ) + for tc in tool_calls + ] + } + +def create_tool_node_with_fallback(tools: list) -> dict: + return ToolNode(tools).with_fallbacks( + [RunnableLambda(handle_tool_error)], exception_key="error" + ) + +def _print_event(event: dict, _printed: set, max_length=1500): + current_state = event.get("dialog_state") + # if current_state: + # print("Currently in: ", current_state) + message = event.get("messages") + if message: + if isinstance(message, list): + message = message[-1] + if message.id not in _printed: + msg_repr = message.pretty_repr(html=True) + if len(msg_repr) > max_length: + msg_repr = msg_repr[:max_length] + " ... (truncated)" + # print(msg_repr) + _printed.add(message.id) + return current_state, message diff --git a/erp_core/_llm.py b/erp_core/_llm.py new file mode 100644 index 0000000000000000000000000000000000000000..c138460872548b534e301cc763fe84ef4f73ac7b --- /dev/null +++ b/erp_core/_llm.py @@ -0,0 +1,12 @@ +# from dotenv import load_dotenv +# from langchain_anthropic import ChatAnthropic +from langchain_openai import ChatOpenAI +import erp_core.config as cfg +import os + + +# load_dotenv(override=True) +api_key = os.getenv('OPENAI_API_KEY') + +# llm = ChatAnthropic(model=cfg.anthropic_model_name, temperature=1) +llm = ChatOpenAI(model=cfg.model_name, temperature=0) diff --git a/erp_core/asr_and_tts.py b/erp_core/asr_and_tts.py new file mode 100644 index 0000000000000000000000000000000000000000..6192dd716e486429209d3886ead36ba23ad4f6b8 --- /dev/null +++ b/erp_core/asr_and_tts.py @@ -0,0 +1,62 @@ +import os +# from dotenv import load_dotenv +import tempfile +import scipy.io.wavfile as wavfile +from openai import OpenAI +from elevenlabs import ElevenLabs, VoiceSettings, play, stream + +# Load API keys from .env file +# load_dotenv(override=True) +openai_api_key = os.getenv('OPENAI_API_KEY') +elevenlabs_api_key = os.getenv('ELEVENLABS_API_KEY') + +# Initialize clients +openai_client = OpenAI() +elevenlabs_client = ElevenLabs(api_key=elevenlabs_api_key) + +# Function to transcribe audio using OpenAI Whisper API +def transcribe(audio): + if audio is None: + return "No audio provided.", None + + # Audio is received as a tuple (sample_rate, audio_data) + sample_rate, audio_data = audio + + # Save the audio data to a temporary file + with tempfile.NamedTemporaryFile(suffix=".wav", delete=False) as temp_file: + wavfile.write(temp_file.name, sample_rate, audio_data) + temp_file_path = temp_file.name + + # Transcribe the audio file using OpenAI Whisper API + with open(temp_file_path, "rb") as audio_file: + transcription_response = openai_client.audio.transcriptions.create( + model="whisper-1", + file=audio_file, + language="en", + ) + + transcription_text = transcription_response.text + return transcription_text + +def tts(response_text): + # Now, use ElevenLabs to convert the transcription text to speech + tts_response = elevenlabs_client.text_to_speech.convert( + voice_id="CwhRBWXzGAHq8TQ4Fs17", + optimize_streaming_latency="0", + output_format="mp3_22050_32", + text=response_text, + voice_settings=VoiceSettings( + stability=0.1, + similarity_boost=0.3, + style=0.2, + ), + ) + + audio_file_path = "output_audio.mp3" + with open(audio_file_path, "wb") as audio_file: + for chunk in tts_response: + audio_file.write(chunk) + + return audio_file_path + + diff --git a/erp_core/assistant_class.py b/erp_core/assistant_class.py new file mode 100644 index 0000000000000000000000000000000000000000..e282aa0967abd873ae64a66146a668e8d780fb75 --- /dev/null +++ b/erp_core/assistant_class.py @@ -0,0 +1,59 @@ +from langchain_anthropic import ChatAnthropic +from langchain_openai.chat_models import ChatOpenAI +from langchain_community.tools.tavily_search import TavilySearchResults +from langchain_core.prompts import ChatPromptTemplate +from langchain_core.pydantic_v1 import BaseModel, Field +from langchain_core.runnables import Runnable, RunnableConfig +from langgraph.checkpoint.sqlite import SqliteSaver +from erp_core.state_definer import State +import time +from datetime import datetime +import getpass + +class Assistant: + """ + Assistant class to handle the conversation with the user. + """ + def __init__(self, runnable: Runnable): + self.runnable = runnable + + def __call__(self, state: State, config: RunnableConfig): + while True: + result = self.runnable.invoke(state) + + if not result.tool_calls and ( + not result.content + or isinstance(result.content, list) + and not result.content[0].get("text") + ): + messages = state["messages"] + [("user", "Respond with a real output.")] + state = {**state, "messages": messages} + messages = state["messages"] + [("user", "Respond with a real output.")] + state = {**state, "messages": messages} + else: + break + return {"messages": result} + + +class CompleteOrEscalate(BaseModel): + """A tool to mark the current task as completed and/or to escalate control of the dialog to the main assistant, + who can re-route the dialog based on the user's needs.""" + + cancel: bool = True + reason: str + + class Config: + schema_extra = { + "example": { + "cancel": True, + "reason": "User changed their mind about the current task.", + }, + "example 2": { + "cancel": True, + "reason": "I have fully completed the task.", + }, + "example 3": { + "cancel": False, + "reason": "I need to search the user's emails or calendar for more information.", + }, + } \ No newline at end of file diff --git a/erp_core/config.py b/erp_core/config.py new file mode 100644 index 0000000000000000000000000000000000000000..0900f07cd13456d4c4d2a49957fe8336d29e792a --- /dev/null +++ b/erp_core/config.py @@ -0,0 +1,2 @@ +model_name = "gpt-4o-mini" +#anthropic_model_name = "claude-3-5-sonnet-20240620" \ No newline at end of file diff --git a/erp_core/display_image.py b/erp_core/display_image.py new file mode 100644 index 0000000000000000000000000000000000000000..20b5f78f5b969b41e04327f5a913c3e1ae9895f7 --- /dev/null +++ b/erp_core/display_image.py @@ -0,0 +1,14 @@ +from erp_core.node_builder import graph + +try: + image_path = "output_image.png" + # Get the image bytes + image_data = graph.get_graph(xray=True).draw_mermaid_png() + + # Save bytes to file + with open(image_path, 'wb') as f: + f.write(image_data) + + print(f"Image saved at {image_path}") +except Exception as e: + print(f"An error occurred: {e}") diff --git a/erp_core/entry_node.py b/erp_core/entry_node.py new file mode 100644 index 0000000000000000000000000000000000000000..5dda69129f827d2593bafd91df5f9db6289acf02 --- /dev/null +++ b/erp_core/entry_node.py @@ -0,0 +1,23 @@ +from typing import Callable + +from langchain_core.messages import ToolMessage +from erp_core.state_definer import State + +def create_entry_node(assistant_name: str, new_dialog_state: str) -> Callable: + def entry_node(state: State) -> dict: + tool_call_id = state["messages"][-1].tool_calls[0]["id"] + return { + "messages": [ + ToolMessage( + content=f"The assistant is now the {assistant_name}. Reflect on the above conversation between the host assistant and the user." + f" The user's intent is unsatisfied. Use the provided tools to assist the user. Remember, you are {assistant_name}," + " and the booking, update, other other action is not complete until after you have successfully invoked the appropriate tool." + " If the user changes their mind or needs help for other tasks, call the CompleteOrEscalate function to let the primary host assistant take control." + " Do not mention who you are - just act as the proxy for the assistant.", + tool_call_id=tool_call_id, + ) + ], + "dialog_state": new_dialog_state, + } + + return entry_node \ No newline at end of file diff --git a/erp_core/node_builder.py b/erp_core/node_builder.py new file mode 100644 index 0000000000000000000000000000000000000000..7f78fedb88acceaf288caa6034577eb9fe4f7290 --- /dev/null +++ b/erp_core/node_builder.py @@ -0,0 +1,297 @@ +from typing import Literal + +from erp_core.state_definer import State +from langchain_core.messages import ToolMessage +from erp_core._event import create_tool_node_with_fallback +from erp_core.assistant_class import Assistant, CompleteOrEscalate +from erp_core.entry_node import create_entry_node +from langgraph.graph import StateGraph +from langgraph.prebuilt import tools_condition +from langgraph.graph import END, StateGraph, START +from operator import __and__ +from langgraph.checkpoint.memory import MemorySaver +# from langgraph.checkpoint.sqlite import SqliteSaver + +from erp_core.runnable.fm_prompt import financial_management_runnable, financial_management_tools +from erp_core.runnable.scm_prompt import supply_chain_management_runnable, supply_chain_management_tools +from erp_core.runnable.hr_prompt import human_resource_runnable, human_resource_tools +from erp_core.runnable.pm_prompt import project_management_runnable, project_management_tools +from erp_core.runnable.crm_prompt import customer_relationship_management_runnable, customer_relationship_management_tools +from erp_core.runnable.primary_assistant_prompt import assistant_runnable, primary_assistant_tools + +from erp_core.tool_binder.tool_binder import ToHumanResourceDepartment, ToFinancialManagementDepartment, ToSupplyChainManagementDepartment, ToProjectManagementDepartment, ToCustomerRelationshipManagementDepartment +builder = StateGraph(State) + + +# fetch user info +# ........................................................................ +def user_info(state: State): + return {"user_info": ""} + +builder.add_node("fetch_user_info", user_info) +builder.add_edge(START, "fetch_user_info") + + +# financial management assistant +# ........................................................................ + +builder.add_node("enter_financial_management", create_entry_node("Financial Management Assistant", "financial_management")) +builder.add_node("financial_management", Assistant(financial_management_runnable)) +builder.add_edge("enter_financial_management", "financial_management") +builder.add_node("financial_management_tools", create_tool_node_with_fallback(financial_management_tools)) + +def route_financial_management( + state: State, +) -> Literal[ + "financial_management_tools", + "leave_skill", + "__end__", +]: + route = tools_condition(state) + if route == END: + return END + tool_calls = state["messages"][-1].tool_calls + did_cancel = any(tc["name"] == CompleteOrEscalate.__name__ for tc in tool_calls) + if did_cancel: + return "leave_skill" + safe_toolnames = [t.name for t in financial_management_tools] + if all(tc["name"] in safe_toolnames for tc in tool_calls): + return "financial_management_tools" + return "financial_management_tools" + +builder.add_edge("financial_management_tools", "financial_management") +builder.add_conditional_edges("financial_management", route_financial_management) + +# supply chain management assistant +# ........................................................................ + +builder.add_node("enter_supply_chain_management", create_entry_node("Supply Chain Management Assistant", "supply_chain_management")) +builder.add_node("supply_chain_management", Assistant(supply_chain_management_runnable)) +builder.add_edge("enter_supply_chain_management", "supply_chain_management") +builder.add_node("supply_chain_management_tools", create_tool_node_with_fallback(supply_chain_management_tools)) + +def route_supply_chain_management( + state: State, +) -> Literal[ + "supply_chain_management_tools", + "leave_skill", + "__end__", +]: + route = tools_condition(state) + if route == END: + return END + tool_calls = state["messages"][-1].tool_calls + did_cancel = any(tc["name"] == CompleteOrEscalate.__name__ for tc in tool_calls) + if did_cancel: + return "leave_skill" + safe_toolnames = [t.name for t in supply_chain_management_tools] + if all(tc["name"] in safe_toolnames for tc in tool_calls): + return "supply_chain_management_tools" + return "supply_chain_management_tools" + +builder.add_edge("supply_chain_management_tools", "supply_chain_management") +builder.add_conditional_edges("supply_chain_management", route_supply_chain_management) + + + +# human resource assistant +# ........................................................................ + +builder.add_node("enter_human_resource", create_entry_node("Human Resource Assistant", "human_resource")) +builder.add_node("human_resource", Assistant(human_resource_runnable)) +builder.add_edge("enter_human_resource", "human_resource") +builder.add_node("human_resource_tools", create_tool_node_with_fallback(human_resource_tools)) + +def route_human_resource( + state: State, +) -> Literal[ + "human_resource_tools", + "leave_skill", + "__end__", +]: + route = tools_condition(state) + if route == END: + return END # end the graph + tool_calls = state["messages"][-1].tool_calls + did_cancel = any(tc["name"] == CompleteOrEscalate.__name__ for tc in tool_calls) + if did_cancel: + return "leave_skill" + + safe_toolnames = [t.name for t in human_resource_tools] + if all(tc["name"] in safe_toolnames for tc in tool_calls): + return "human_resource_tools" + return "human_resource_tools" + +builder.add_edge("human_resource_tools", "human_resource") +builder.add_conditional_edges("human_resource", route_human_resource) + + +# Project management assistant +# ........................................................................ + +builder.add_node("enter_project_management", create_entry_node("Project Management Assistant", "project_management")) +builder.add_node("project_management", Assistant(project_management_runnable)) +builder.add_edge("enter_project_management", "project_management") +builder.add_node("project_management_tools", create_tool_node_with_fallback(project_management_tools)) + +def route_project_management( + state: State, +) -> Literal[ + "project_management_tools", + "leave_skill", + "__end__", +]: + route = tools_condition(state) + if route == END: + return END + tool_calls = state["messages"][-1].tool_calls + did_cancel = any(tc["name"] == CompleteOrEscalate.__name__ for tc in tool_calls) + if did_cancel: + return "leave_skill" + safe_toolnames = [t.name for t in project_management_tools] + if all(tc["name"] in safe_toolnames for tc in tool_calls): + return "project_management_tools" + return "project_management_tools" + +builder.add_edge("project_management_tools", "project_management") +builder.add_conditional_edges("project_management", route_project_management) + + +# customer relationship management assistant +# ........................................................................ +builder.add_node("enter_customer_relationship_management", create_entry_node("Customer Relationship Management Assistant", "customer_relationship_management")) +builder.add_node("customer_relationship_management", Assistant(customer_relationship_management_runnable)) +builder.add_edge("enter_customer_relationship_management", "customer_relationship_management") +builder.add_node("customer_relationship_management_tools", create_tool_node_with_fallback(customer_relationship_management_tools)) + +def route_customer_relationship_management( + state: State, +) -> Literal[ + "customer_relationship_management_tools", + "leave_skill", + "__end__", +]: + route = tools_condition(state) + if route == END: + return END + tool_calls = state["messages"][-1].tool_calls + did_cancel = any(tc["name"] == CompleteOrEscalate.__name__ for tc in tool_calls) + if did_cancel: + return "leave_skill" + safe_toolnames = [t.name for t in customer_relationship_management_tools] + if all(tc["name"] in safe_toolnames for tc in tool_calls): + return "customer_relationship_management_tools" + return "customer_relationship_management_tools" + +builder.add_edge("customer_relationship_management_tools", "customer_relationship_management") +builder.add_conditional_edges("customer_relationship_management", route_customer_relationship_management) + + +# leave skill +# ........................................................................ + +def pop_dialog_state(state: State) -> dict: + """Pop the dialog stack and return to the main assistant. + + This lets the full graph explicitly track the dialog flow and delegate control + to specific sub-graphs. + """ + messages = [] + if state["messages"][-1].tool_calls: + # Note: Doesn't currently handle the edge case where the llm performs parallel tool calls + messages.append( + ToolMessage( + content="Resuming dialog with the host assistant. Please reflect on the past conversation and assist the user as needed.", + tool_call_id=state["messages"][-1].tool_calls[0]["id"], + ) + ) + return { + "dialog_state": "pop", + "messages": messages, + } + +builder.add_node("leave_skill", pop_dialog_state) +builder.add_edge("leave_skill", "primary_assistant") + + +# primary assistant +# ........................................................................ + +builder.add_node("primary_assistant", Assistant(assistant_runnable)) +builder.add_node("primary_assistant_tools", create_tool_node_with_fallback(primary_assistant_tools)) + +def route_primary_assistant( + state: State, +) -> Literal[ + "primary_assistant_tools", + "enter_human_resource", + "enter_financial_management", + "enter_supply_chain_management", + "enter_project_management", + "enter_customer_relationship_management", + "__and__", +]: + route = tools_condition(state) + if route == END: + return END + tool_calls = state["messages"][-1].tool_calls + if tool_calls: + if tool_calls[0]["name"] == ToHumanResourceDepartment.__name__: + return "enter_human_resource" + elif tool_calls[0]["name"] == ToFinancialManagementDepartment.__name__: + return "enter_financial_management" + elif tool_calls[0]["name"] == ToSupplyChainManagementDepartment.__name__: + return "enter_supply_chain_management" + elif tool_calls[0]["name"] == ToProjectManagementDepartment.__name__: + return "enter_project_management" + elif tool_calls[0]["name"] == ToCustomerRelationshipManagementDepartment.__name__: + return "enter_customer_relationship_management" + return "primary_assistant_tools" + raise ValueError("Invalid route") + + +# The assistant can route to one of the delegated assistants, +# directly use a tool, or directly respond to the user +builder.add_conditional_edges( + "primary_assistant", + route_primary_assistant, + { + "enter_human_resource": "enter_human_resource", + "enter_financial_management": "enter_financial_management", + "enter_supply_chain_management": "enter_supply_chain_management", + "enter_project_management": "enter_project_management", + "enter_customer_relationship_management": "enter_customer_relationship_management", + "primary_assistant_tools": "primary_assistant_tools", + END: END, + }, +) +builder.add_edge("primary_assistant_tools", "primary_assistant") + + +# Each delegated workflow can directly respond to the user +# When the user responds, we want to return to the currently active workflow +def route_to_workflow( + state: State, +) -> Literal[ + "primary_assistant", + "human_resource", + "financial_management", + "supply_chain_management", + "project_management", + "customer_relationship_management", +]: + """If we are in a delegated state, route directly to the appropriate assistant.""" + dialog_state = state.get("dialog_state") + if not dialog_state: + return "primary_assistant" + return dialog_state[-1] + + +builder.add_conditional_edges("fetch_user_info", route_to_workflow) + + +# Compile graph +def compile_graph(): + memory = MemorySaver() + graph = builder.compile(checkpointer=memory) + return graph diff --git a/erp_core/node_builder/customer_relationship_management_node.py b/erp_core/node_builder/customer_relationship_management_node.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/erp_core/node_builder/finalcial_management_node.py b/erp_core/node_builder/finalcial_management_node.py new file mode 100644 index 0000000000000000000000000000000000000000..da3d2ba6d76db57f04717c2dc902682c89c0a15b --- /dev/null +++ b/erp_core/node_builder/finalcial_management_node.py @@ -0,0 +1,41 @@ +from typing import Literal + +from erp_core.state_definer import State +from langchain_core.messages import ToolMessage +from erp_core._event import create_tool_node_with_fallback +from erp_core.assistant_class import Assistant, CompleteOrEscalate +from erp_core.entry_node import create_entry_node +from langgraph.graph import StateGraph +from langgraph.prebuilt import tools_condition +from langgraph.graph import END, StateGraph, START + +from erp_core.runnable.fm_prompt import financial_management_runnable, financial_management_tools + +builder = StateGraph(State) + +builder.add_node("enter_financial_management", create_entry_node("Financial Management Assistant", "financial_management")) +builder.add_node("financial_management", Assistant(financial_management_runnable)) +builder.add_edge("enter_financial_management", "financial_management") +builder.add_node("financial_management_tools", create_tool_node_with_fallback(financial_management_tools)) + +def route_financial_management( + state: State, +) -> Literal[ + "financial_management_tools", + "leave_skill", + "__end__", +]: + route = tools_condition(state) + if route == END: + return END + tool_calls = state["messages"][-1].tool_calls + did_cancel = any(tc["name"] == CompleteOrEscalate.__name__ for tc in tool_calls) + if did_cancel: + return "leave_skill" + safe_toolnames = [t.name for t in financial_management_tools] + if all(tc["name"] in safe_toolnames for tc in tool_calls): + return "financial_management_tools" + return "financial_management_tools" + +builder.add_edge("financial_management_tools", "financial_management") +builder.add_conditional_edges("financial_management", route_financial_management) \ No newline at end of file diff --git a/erp_core/node_builder/graph_builder_node.py b/erp_core/node_builder/graph_builder_node.py new file mode 100644 index 0000000000000000000000000000000000000000..5c7b17370717e803a27f78572e5b0ea8015f560d --- /dev/null +++ b/erp_core/node_builder/graph_builder_node.py @@ -0,0 +1,50 @@ +from typing import Literal + +from erp_core.state_definer import State +from langchain_core.messages import ToolMessage +from erp_core._event import create_tool_node_with_fallback +from erp_core.assistant_class import Assistant, CompleteOrEscalate +from erp_core.entry_node import create_entry_node +from langgraph.graph import StateGraph +from langgraph.prebuilt import tools_condition +from langgraph.graph import END, StateGraph, START + +from erp_core.runnable.fm_prompt import financial_management_runnable, financial_management_tools +from erp_core.runnable.scm_prompt import supply_chain_management_runnable, supply_chain_management_tools +from erp_core.runnable.hr_prompt import human_resource_runnable, human_resource_tools +from erp_core.runnable.pm_prompt import project_management_runnable, project_management_tools +from erp_core.runnable.crm_prompt import customer_relationship_management_runnable, customer_relationship_management_tools + + +builder = StateGraph(State) + + +def user_info(state: State): + return {"user_info": "Kamal Ahmed, mobile number: 1234567890"} + + +builder.add_node("fetch_user_info", user_info) +builder.add_edge(START, "fetch_user_info") + +def pop_dialog_state(state: State) -> dict: + """Pop the dialog stack and return to the main assistant. + + This lets the full graph explicitly track the dialog flow and delegate control + to specific sub-graphs. + """ + messages = [] + if state["messages"][-1].tool_calls: + # Note: Doesn't currently handle the edge case where the llm performs parallel tool calls + messages.append( + ToolMessage( + content="Resuming dialog with the host assistant. Please reflect on the past conversation and assist the user as needed.", + tool_call_id=state["messages"][-1].tool_calls[0]["id"], + ) + ) + return { + "dialog_state": "pop", + "messages": messages, + } + +builder.add_node("leave_skill", pop_dialog_state) +builder.add_edge("leave_skill", "primary_assistant") \ No newline at end of file diff --git a/erp_core/node_builder/human_resource_node.py b/erp_core/node_builder/human_resource_node.py new file mode 100644 index 0000000000000000000000000000000000000000..4b01a68dd9aa54341dffde5414e552131160bcd5 --- /dev/null +++ b/erp_core/node_builder/human_resource_node.py @@ -0,0 +1,42 @@ +from typing import Literal + +from erp_core.state_definer import State +from langchain_core.messages import ToolMessage +from erp_core._event import create_tool_node_with_fallback +from erp_core.assistant_class import Assistant, CompleteOrEscalate +from erp_core.entry_node import create_entry_node +from langgraph.graph import StateGraph +from langgraph.prebuilt import tools_condition +from langgraph.graph import END, StateGraph, START + +from erp_core.runnable.hr_prompt import human_resource_runnable, human_resource_tools + +builder = StateGraph(State) + +builder.add_node("enter_human_resource_management", create_entry_node("Human Resource Management Assistant", "human_resource_management")) +builder.add_node("human_resource_management", Assistant(human_resource_runnable)) +builder.add_edge("enter_human_resource_management", "human_resource_management") +builder.add_node("human_resource_management_tools", create_tool_node_with_fallback(human_resource_tools)) + +def route_human_resource_management( + state: State, +) -> Literal[ + "human_resource_management_tools", + "leave_skill", + "__end__", +]: + route = tools_condition(state) + if route == END: + return END # end the graph + tool_calls = state["messages"][-1].tool_calls + did_cancel = any(tc["name"] == CompleteOrEscalate.__name__ for tc in tool_calls) + if did_cancel: + return "leave_skill" + + safe_toolnames = [t.name for t in human_resource_tools] + if all(tc["name"] in safe_toolnames for tc in tool_calls): + return "human_resource_management_tools" + return "human_resource_management_tools" + +builder.add_edge("human_resource_management_tools", "human_resource_management") +builder.add_conditional_edges("human_resource_management", route_human_resource_management) diff --git a/erp_core/node_builder/primary_assistant_node.py b/erp_core/node_builder/primary_assistant_node.py new file mode 100644 index 0000000000000000000000000000000000000000..b94244eb481c95df6790c555d7759a2f10a0e150 --- /dev/null +++ b/erp_core/node_builder/primary_assistant_node.py @@ -0,0 +1,70 @@ +from operator import __and__ +from langgraph.checkpoint.sqlite import SqliteSaver + +# Primary assistant +builder.add_node("primary_assistant", Assistant(assistant_runnable)) +builder.add_node( + "primary_assistant_tools", create_tool_node_with_fallback(primary_assistant_tools) +) + + +def route_primary_assistant( + state: State, +) -> Literal[ + "primary_assistant_tools", + "enter_internet_problem", + "enter_outgoing_call_problem", + "__and__", +]: + route = tools_condition(state) + if route == END: + return END + tool_calls = state["messages"][-1].tool_calls + if tool_calls: + if tool_calls[0]["name"] == ToInternetProblem.__name__: + return "enter_internet_problem" + elif tool_calls[0]["name"] == ToOutgoingCallProblem.__name__: + return "enter_outgoing_call_problem" + return "primary_assistant_tools" + raise ValueError("Invalid route") + + +# The assistant can route to one of the delegated assistants, +# directly use a tool, or directly respond to the user +builder.add_conditional_edges( + "primary_assistant", + route_primary_assistant, + { + "enter_internet_problem": "enter_internet_problem", + "enter_outgoing_call_problem": "enter_outgoing_call_problem", + "primary_assistant_tools": "primary_assistant_tools", + END: END, + }, +) +builder.add_edge("primary_assistant_tools", "primary_assistant") + + +# Each delegated workflow can directly respond to the user +# When the user responds, we want to return to the currently active workflow +def route_to_workflow( + state: State, +) -> Literal[ + "primary_assistant", + "internet_problem", + "outgoing_call_problem", +]: + """If we are in a delegated state, route directly to the appropriate assistant.""" + dialog_state = state.get("dialog_state") + if not dialog_state: + return "primary_assistant" + return dialog_state[-1] + + +builder.add_conditional_edges("fetch_user_info", route_to_workflow) + +# Compile graph +memory = SqliteSaver.from_conn_string(":memory:") +graph = builder.compile( + checkpointer=memory, + # Let the user approve or deny the use of sensitive tools +) \ No newline at end of file diff --git a/erp_core/node_builder/project_management_node.py b/erp_core/node_builder/project_management_node.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/erp_core/node_builder/supply_chain_management_node.py b/erp_core/node_builder/supply_chain_management_node.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/erp_core/runnable/__pycache__/crm_prompt.cpython-311.pyc b/erp_core/runnable/__pycache__/crm_prompt.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..64c307555ba3c5759b20c6dbf8bbe40c3234d2b7 Binary files /dev/null and b/erp_core/runnable/__pycache__/crm_prompt.cpython-311.pyc differ diff --git a/erp_core/runnable/__pycache__/fm_prompt.cpython-311.pyc b/erp_core/runnable/__pycache__/fm_prompt.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bb99940cdcfaac41a56d2464cc39070d4f23ec2e Binary files /dev/null and b/erp_core/runnable/__pycache__/fm_prompt.cpython-311.pyc differ diff --git a/erp_core/runnable/__pycache__/hr_prompt.cpython-311.pyc b/erp_core/runnable/__pycache__/hr_prompt.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bdf265d0c6614d389abbbd5df838681696637684 Binary files /dev/null and b/erp_core/runnable/__pycache__/hr_prompt.cpython-311.pyc differ diff --git a/erp_core/runnable/__pycache__/pm_prompt.cpython-311.pyc b/erp_core/runnable/__pycache__/pm_prompt.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0fc590e44d728acd42616619a2cc9f87af5ae887 Binary files /dev/null and b/erp_core/runnable/__pycache__/pm_prompt.cpython-311.pyc differ diff --git a/erp_core/runnable/__pycache__/primary_assistant_prompt.cpython-311.pyc b/erp_core/runnable/__pycache__/primary_assistant_prompt.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..43462daf683e63190272d690c64fa88e542bf7e3 Binary files /dev/null and b/erp_core/runnable/__pycache__/primary_assistant_prompt.cpython-311.pyc differ diff --git a/erp_core/runnable/__pycache__/scm_prompt.cpython-311.pyc b/erp_core/runnable/__pycache__/scm_prompt.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ad21bb01cb7a2eff9054eb15ec24554e437b3e44 Binary files /dev/null and b/erp_core/runnable/__pycache__/scm_prompt.cpython-311.pyc differ diff --git a/erp_core/runnable/crm_prompt.py b/erp_core/runnable/crm_prompt.py new file mode 100644 index 0000000000000000000000000000000000000000..7a54aaddd66971ac008f38b2d760ff850701402b --- /dev/null +++ b/erp_core/runnable/crm_prompt.py @@ -0,0 +1,31 @@ +from datetime import datetime +from langchain_core.prompts import ChatPromptTemplate + +from erp_core.Tools.customer_relationship_management import customer_support +from erp_core.assistant_class import CompleteOrEscalate +from erp_core._llm import llm +customer_relationship_management_prompt = ChatPromptTemplate.from_messages( + [ + ( + "system", + "You are a specialized assistant for handling customer relationship management issues. " + "The primary assistant delegates work to you whenever the user needs help with their customer relationship management problems. " + "Introduce yourself as a customer relationship management assistant" + "Start conversation respectfully." + "Diagnose the user query based on the user's input" + "If any information is missing to call proper tool, ask the user for clarification." + "While ready to call tool ask the user for confirmation once again by repeating the user's query." + "If the user confirms that it is correct only then call proper tool to solve user query. It is very important." + "Remember that an issue isn't resolved until the relevant tool or method has successfully been used." + "\n\nCurrent time: {time}." + "\n\nIf the user needs help, and none of your tools are appropriate for it, then" + ' "CompleteOrEscalate" the dialog to the host assistant. Do not waste the user\'s time. Do not make up invalid tools or functions.', + ), + ("placeholder", "{messages}"), + ] +).partial(time=datetime.now()) + +customer_relationship_management_tools = [customer_support] +customer_relationship_management_runnable = customer_relationship_management_prompt | llm.bind_tools( + customer_relationship_management_tools + [CompleteOrEscalate] +) \ No newline at end of file diff --git a/erp_core/runnable/fm_prompt.py b/erp_core/runnable/fm_prompt.py new file mode 100644 index 0000000000000000000000000000000000000000..80fd5e4e15bb789b05cde5bfb2534dcd5493bb21 --- /dev/null +++ b/erp_core/runnable/fm_prompt.py @@ -0,0 +1,36 @@ +from datetime import datetime +from langchain_core.prompts import ChatPromptTemplate + +from erp_core.Tools.finalcial_management import register_purchase_request, view_expense_report +from erp_core.assistant_class import CompleteOrEscalate +from erp_core._llm import llm +financial_management_prompt = ChatPromptTemplate.from_messages( + [ + ( + "system", + "You are a specialized assistant for handling financial management issues. " + "The primary assistant delegates work to you whenever the user needs help with their financial management problems. " + "Introduce yourself as a financial management assistant" + "Start conversation respectfully." + "Diagnose the user query based on the user's input" + "If any information is missing to call proper tool, ask the user for clarification." + "While ready to call tool ask the user for confirmation once again by repeating the user's query. This is very important" + "If the user confirms that it is correct only then call proper tool to solve user query. It is very important." + "Remember that an issue isn't resolved until the relevant tool or method has successfully been used." + "\nCurrent time: {time}." + '\n\nIf the user needs help, and none of your tools are appropriate for it, then "CompleteOrEscalate" the dialog to the host assistant.' + "Do not make up invalid tools or functions." + "\n\nSome examples for which you should CompleteOrEscalate:\n" + " - 'what's the weather like this time of year?'\n" + " - 'nevermind I think I'll try again later'\n" + " - 'Financial management issue resolved'", + ), + ("placeholder", "{messages}"), + ] + +).partial(time=datetime.now()) + +financial_management_tools = [register_purchase_request, view_expense_report] +financial_management_runnable = financial_management_prompt | llm.bind_tools( + financial_management_tools + [CompleteOrEscalate] +) \ No newline at end of file diff --git a/erp_core/runnable/hr_prompt.py b/erp_core/runnable/hr_prompt.py new file mode 100644 index 0000000000000000000000000000000000000000..81c985c84e23df188d3f3cc6f90057806f63d30e --- /dev/null +++ b/erp_core/runnable/hr_prompt.py @@ -0,0 +1,37 @@ +from datetime import datetime +from langchain_core.prompts import ChatPromptTemplate + +from erp_core.Tools.human_resource import employee_database_access, leave_management +from erp_core.assistant_class import CompleteOrEscalate +from erp_core._llm import llm + + +human_resource_prompt = ChatPromptTemplate.from_messages( + [ + ( + "system", + "You are a specialized assistant for handling human resource issues. " + "The primary assistant delegates work to you whenever the user needs help with their human resource problems. " + "Introduce yourself as a human resource assistant" + "Start conversation respectfully." + "Diagnose the user query based on the user's input" + "If any information is missing to call proper tool, ask the user for clarification." + "While ready to call tool ask the user for confirmation once again by repeating the user's query." + "If the user confirms that it is correct only then call proper tool to solve user query. It is very important." + "Remember that an issue isn't resolved until the relevant tool or method has successfully been used." + "\n\nCurrent user human resource information:\n\n{user_info}\n" + "\nCurrent time: {time}." + "\n\nIf the user needs help, and none of your tools are appropriate for it, then" + ' "CompleteOrEscalate" the dialog to the host assistant. Do not make up invalid tools or functions.', + ), + ("placeholder", "{messages}"), + ] +).partial(time=datetime.now()) + +human_resource_tools = [ + employee_database_access, + leave_management +] +human_resource_runnable = human_resource_prompt | llm.bind_tools( + human_resource_tools + [CompleteOrEscalate] +) \ No newline at end of file diff --git a/erp_core/runnable/pm_prompt.py b/erp_core/runnable/pm_prompt.py new file mode 100644 index 0000000000000000000000000000000000000000..6dc752d6a4cf86379fedb8ebc8e321f479aed0dd --- /dev/null +++ b/erp_core/runnable/pm_prompt.py @@ -0,0 +1,36 @@ +from datetime import datetime +from langchain_core.prompts import ChatPromptTemplate + +from erp_core.Tools.project_management import project_status_check +from erp_core.assistant_class import CompleteOrEscalate +from erp_core._llm import llm + +project_management_prompt = ChatPromptTemplate.from_messages( + [ + ( + "system", + "You are a specialized assistant for handling project management issues. " + "The primary assistant delegates work to you whenever the user needs help troubleshooting issues with project management. " + "Introduce yourself as a project management assistant" + "Start conversation respectfully." + "Diagnose the user query based on the user's input" + "If any information is missing to call proper tool, ask the user for clarification." + "While ready to call tool ask the user for confirmation once again by repeating the user's query." + "If the user confirms that it is correct only then call proper tool to solve user query. It is very important." + "Remember that an issue isn't resolved until the relevant tool or method has successfully been used." + "\nCurrent time: {time}." + '\n\nIf the user needs help, and none of your tools are appropriate for it, then "CompleteOrEscalate" the dialog to the host assistant.' + "Do not make up invalid tools or functions." + "\n\nSome examples for which you should CompleteOrEscalate:\n" + " - 'what's the weather like this time of year?'\n" + " - 'nevermind I think I'll try again later'\n", + ), + ("placeholder", "{messages}"), + ] + +).partial(time=datetime.now()) + +project_management_tools = [project_status_check] +project_management_runnable = project_management_prompt | llm.bind_tools( + project_management_tools + [CompleteOrEscalate] +) diff --git a/erp_core/runnable/primary_assistant_prompt.py b/erp_core/runnable/primary_assistant_prompt.py new file mode 100644 index 0000000000000000000000000000000000000000..2541886ad03f46eb661b3cbf77dc0dfbfb2b1d6a --- /dev/null +++ b/erp_core/runnable/primary_assistant_prompt.py @@ -0,0 +1,61 @@ +from langchain_core.prompts import ChatPromptTemplate +from datetime import datetime + +from erp_core.tool_binder.tool_binder import ( + ToCustomerRelationshipManagementDepartment, + ToFinancialManagementDepartment, + ToHumanResourceDepartment, + ToProjectManagementDepartment, + ToSupplyChainManagementDepartment +) +from erp_core._llm import llm + +primary_assistant_prompt = ChatPromptTemplate.from_messages( + [ + ( + "system", + "You are an intelligent ERP support assistant, designed to assist users in navigating various departments within the ERP system and resolving their queries. " + "Your primary goal is to guide the user to the right department or help them complete specific tasks using the ERP tools at your disposal." + "No matter how user starts the conversation, always start respectfully." + "Introduce yourself as an ERP support assistant" + "Start conversation respectfully. Pay salam to user saying 'Assalamu Alaikum'. Do not say 'Wa Alaikum Assalam'." + "Do not pay salam in each turn. Pay salam only once per conversation." + "User will either speak in english or arabic. In most cases, user will speak in english." + "Detect the language And respond in the same language." + "Do not speak any other language than english or arabic. This is very important." + "For department-specific issues, route the user’s request to the appropriate department tool based on their needs." + "Carefully listen to the user's input, identify their requirement, and confirm the department or action needed." + "For registering purchase request or getting financial report, go to financial management department." + "For project status check, go to project management department." + "For managing customer support, go to customer relationship management department." + "For employee database access and leave management, go to human resource management department." + "For product quantity check, go to supply chain management department." + "If the user's request doesn’t align with any of the available departments, normally say 'I'm sorry, I don't know how to help with that.'" + "Be efficient and direct, avoid unnecessary steps or delays." + "Ensure the user is directed to the right department or help within the ERP system." + "\n\nCurrent user information:\n\n{user_info}\n" + "\nCurrent time: {time}." + '\n\nIf the user’s request is outside the scope of the ERP tools, or they change their mind, use "CompleteOrEscalate" to return to the main assistant.' + "Do not waste the user's time. Do not make up invalid tools or functions.", + ), + ("placeholder", "{messages}"), + ] + +).partial(time=datetime.now()) +primary_assistant_tools = [ + ToFinancialManagementDepartment, + ToProjectManagementDepartment, + ToCustomerRelationshipManagementDepartment, + ToHumanResourceDepartment, + ToSupplyChainManagementDepartment +] +assistant_runnable = primary_assistant_prompt | llm.bind_tools( + primary_assistant_tools + + [ + ToFinancialManagementDepartment, + ToProjectManagementDepartment, + ToCustomerRelationshipManagementDepartment, + ToHumanResourceDepartment, + ToSupplyChainManagementDepartment + ] +) \ No newline at end of file diff --git a/erp_core/runnable/scm_prompt.py b/erp_core/runnable/scm_prompt.py new file mode 100644 index 0000000000000000000000000000000000000000..4c5cb862e47e94702cfd11255878195cf8db4f03 --- /dev/null +++ b/erp_core/runnable/scm_prompt.py @@ -0,0 +1,38 @@ +from datetime import datetime +from langchain_core.prompts import ChatPromptTemplate + +from erp_core.Tools.supply_chain_management import product_quantity_check +from erp_core.assistant_class import CompleteOrEscalate +from erp_core._llm import llm +supply_chain_management_prompt = ChatPromptTemplate.from_messages( + [ + ( + "system", + "You are a specialized assistant for handling supply chain management issues. " + "The primary assistant delegates work to you whenever the user needs help troubleshooting issues with supply chain management. " + "Introduce yourself as a supply chain management assistant" + "Start conversation respectfully." + "Diagnose the problem based on the user's input and confirm the troubleshooting steps with the customer. " + "If any information is missing to call proper tool, ask the user for clarification." + "While ready to call tool ask the user for confirmation once again by repeating the user's query." + "If the user confirms that it is correct only then call proper tool to solve user query. It is very important." + "Remember that an issue isn't resolved until the relevant tool or method has successfully been used." + "\nCurrent time: {time}." + '\n\nIf the user needs help, and none of your tools are appropriate for it, then "CompleteOrEscalate" the dialog to the host assistant.' + " Do not waste the user's time. Do not make up invalid tools or functions." + "\n\nSome examples for which you should CompleteOrEscalate:\n" + " - 'what's the weather like this time of year?'\n" + " - 'nevermind I think I'll try again later'\n" + " - 'I need help with another issue instead'\n" + " - 'Oh wait, I think the problem resolved itself'\n" + " - 'Call issue resolved'", + ), + ("placeholder", "{messages}"), + ] + +).partial(time=datetime.now()) + +supply_chain_management_tools = [product_quantity_check] +supply_chain_management_runnable = supply_chain_management_prompt | llm.bind_tools( + supply_chain_management_tools + [CompleteOrEscalate] +) diff --git a/erp_core/state_definer.py b/erp_core/state_definer.py new file mode 100644 index 0000000000000000000000000000000000000000..2776cd5f63943d1d22cec280f85ec7fcd85ea722 --- /dev/null +++ b/erp_core/state_definer.py @@ -0,0 +1,32 @@ +from typing import Annotated, Literal, Optional + +from typing_extensions import TypedDict + +from langgraph.graph.message import AnyMessage, add_messages + + +def update_dialog_stack(left: list[str], right: Optional[str]) -> list[str]: + """Push or pop the state.""" + if right is None: + return left + if right == "pop": + return left[:-1] + return left + [right] + + +class State(TypedDict): + messages: Annotated[list[AnyMessage], add_messages] + user_info: str + dialog_state: Annotated[ + list[ + Literal[ + "assistant", + "Human_Resource", + "Financial_Management", + "Supply_Chain_Management", + "Project_Management", + "Customer_Relationship_Management", + ] + ], + update_dialog_stack, + ] \ No newline at end of file diff --git a/erp_core/tool_binder/__pycache__/tool_binder.cpython-311.pyc b/erp_core/tool_binder/__pycache__/tool_binder.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..925a796281dbd4f8e03afd2a2b7e860a3a0767ab Binary files /dev/null and b/erp_core/tool_binder/__pycache__/tool_binder.cpython-311.pyc differ diff --git a/erp_core/tool_binder/tool_binder.py b/erp_core/tool_binder/tool_binder.py new file mode 100644 index 0000000000000000000000000000000000000000..47cb6a127ae72dc60ed5661369578cc430c5aa13 --- /dev/null +++ b/erp_core/tool_binder/tool_binder.py @@ -0,0 +1,36 @@ +from langchain_core.pydantic_v1 import BaseModel, Field + +class ToFinancialManagementDepartment(BaseModel): + """Transfers work to a specialized assistant to handle final management department issues.""" + + request: str = Field( + description="Any necessary followup questions the financial management department assistant should clarify before proceeding." + ) + +class ToProjectManagementDepartment(BaseModel): + """Transfers work to a specialized assistant to handle project management issues.""" + + request: str = Field( + description="Any necessary followup questions the project management department assistant should clarify before proceeding." + ) + +class ToCustomerRelationshipManagementDepartment(BaseModel): + """Transfers work to a specialized assistant to handle customer relationship management issues.""" + + request: str = Field( + description="Any necessary followup questions the customer relationship management assistant should clarify before proceeding." + ) + +class ToHumanResourceDepartment(BaseModel): + """Transfers work to a specialized assistant to handle human resource issues.""" + + request: str = Field( + description="Any necessary followup questions the human resource department assistant should clarify before proceeding." + ) + +class ToSupplyChainManagementDepartment(BaseModel): + """Transfers work to a specialized assistant to handle supply chain issues.""" + + request: str = Field( + description="Any necessary followup questions the supply chain department assistant should clarify before proceeding." + ) \ No newline at end of file diff --git a/llm_runner.py b/llm_runner.py new file mode 100644 index 0000000000000000000000000000000000000000..51d48f8ef58426cf8420bbc056c2acf8ad2fa651 --- /dev/null +++ b/llm_runner.py @@ -0,0 +1,24 @@ +from erp_core.node_builder import builder +from erp_core._event import _print_event +from langgraph.checkpoint.sqlite import SqliteSaver + +with SqliteSaver.from_conn_string(":memory:") as memory: + graph = builder.compile(checkpointer=memory) + while True: + try: + user_input = input("User: ") + #print("User:", user_input) + if user_input.lower() in ["quit", "exit", "q"]: + print("Goodbye!") + break + for event in graph.stream({"messages": ("user", user_input)}, config={"configurable": {"thread_id": 42}}): + for value in event.values(): + # print("Assistant:", value) + if "messages" in value: + _printed = set() + state, message = _print_event(value, _printed) + print("State:", state) + print("Message:", message) + except KeyboardInterrupt: + print("Goodbye!") + break \ No newline at end of file diff --git a/output_image.png b/output_image.png new file mode 100644 index 0000000000000000000000000000000000000000..03a08ccceff1c81d5697637eb0abfd2cd386eca2 Binary files /dev/null and b/output_image.png differ diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..d99470510779b722cb4840864cf4649fad87dc79 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,10 @@ +langgraph==0.2.48 +langchain_anthropic==0.3.0 +langchain_openai==0.2.8 +langchain_community==0.3.7 +python-dotenv==1.0.1 +langgraph-checkpoint-sqlite==2.0.1 +sounddevice +openai +scipy +elevenlabs \ No newline at end of file