Update app.py
Browse files
app.py
CHANGED
@@ -151,6 +151,7 @@ with gr.Blocks() as demo:
|
|
151 |
# Launch the Gradio application
|
152 |
demo.launch()
|
153 |
'''
|
|
|
154 |
from typing import Annotated, Sequence, TypedDict
|
155 |
import operator
|
156 |
import functools
|
@@ -328,6 +329,143 @@ try:
|
|
328 |
print(f"[TRACE] Workflow Result: {result}") # Final workflow result
|
329 |
except Exception as e:
|
330 |
print(f"[ERROR] Workflow execution failed: {e}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
331 |
|
332 |
|
333 |
|
|
|
151 |
# Launch the Gradio application
|
152 |
demo.launch()
|
153 |
'''
|
154 |
+
'''
|
155 |
from typing import Annotated, Sequence, TypedDict
|
156 |
import operator
|
157 |
import functools
|
|
|
329 |
print(f"[TRACE] Workflow Result: {result}") # Final workflow result
|
330 |
except Exception as e:
|
331 |
print(f"[ERROR] Workflow execution failed: {e}")
|
332 |
+
'''
|
333 |
+
|
334 |
+
from typing import Annotated, Dict, Any
|
335 |
+
import functools
|
336 |
+
from langgraph.graph import StateGraph, END
|
337 |
+
from langchain_core.messages import HumanMessage
|
338 |
+
from langchain_community.tools.tavily_search import TavilySearchResults
|
339 |
+
from langchain_experimental.tools import PythonREPLTool
|
340 |
+
from langchain_huggingface import HuggingFacePipeline
|
341 |
+
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
|
342 |
+
import gradio as gr
|
343 |
+
|
344 |
+
# Define the agents and tools
|
345 |
+
def create_llm():
|
346 |
+
"""Create the HuggingFace LLM pipeline."""
|
347 |
+
model_name = "Qwen/Qwen2.5-7B-Instruct-1M"
|
348 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
349 |
+
model = AutoModelForCausalLM.from_pretrained(model_name)
|
350 |
+
|
351 |
+
llm_pipeline = pipeline(
|
352 |
+
task="text-generation",
|
353 |
+
model=model,
|
354 |
+
tokenizer=tokenizer,
|
355 |
+
device_map="auto",
|
356 |
+
max_new_tokens=500,
|
357 |
+
)
|
358 |
+
return HuggingFacePipeline(pipeline=llm_pipeline)
|
359 |
+
|
360 |
+
# Mock Tools for Registration and Scheduling
|
361 |
+
registration_tool = TavilySearchResults(max_results=1) # A mock registration tool
|
362 |
+
scheduling_tool = PythonREPLTool() # A mock scheduling tool
|
363 |
+
|
364 |
+
# Define the agents
|
365 |
+
def registration_agent(visitor_details: Dict[str, str]) -> Dict[str, Any]:
|
366 |
+
"""Check if the visitor is registered."""
|
367 |
+
visitor_name = visitor_details.get("visitor_name")
|
368 |
+
visitor_mobile = visitor_details.get("visitor_mobile")
|
369 |
+
|
370 |
+
# Mock registration lookup (replace this with actual database/API call)
|
371 |
+
registered_visitors = [{"visitor_name": "John Doe", "visitor_mobile": "1234567890"}]
|
372 |
+
|
373 |
+
# Check if visitor exists
|
374 |
+
is_registered = any(
|
375 |
+
v["visitor_name"] == visitor_name and v["visitor_mobile"] == visitor_mobile
|
376 |
+
for v in registered_visitors
|
377 |
+
)
|
378 |
+
return {"registered": is_registered}
|
379 |
+
|
380 |
+
def scheduling_agent(scheduling_details: Dict[str, str]) -> Dict[str, str]:
|
381 |
+
"""Schedule an appointment."""
|
382 |
+
doctor_name = scheduling_details.get("doctor_name")
|
383 |
+
department_name = scheduling_details.get("department_name")
|
384 |
+
|
385 |
+
# Mock scheduling logic (replace with actual logic/API integration)
|
386 |
+
appointment_status = "Scheduled successfully"
|
387 |
+
return {"status": appointment_status, "doctor_name": doctor_name, "department_name": department_name}
|
388 |
+
|
389 |
+
|
390 |
+
# Define Langgraph States
|
391 |
+
class VisitorState:
|
392 |
+
messages: Annotated[list, functools.partial(lambda x: [])]
|
393 |
+
|
394 |
+
|
395 |
+
def input_state(state):
|
396 |
+
"""State to input visitor details."""
|
397 |
+
return {"messages": [HumanMessage(content="Please provide your name and mobile number.")]}
|
398 |
+
|
399 |
+
def registration_state(state):
|
400 |
+
"""State to check visitor registration."""
|
401 |
+
visitor_name = state["visitor_name"]
|
402 |
+
visitor_mobile = state["visitor_mobile"]
|
403 |
+
result = registration_agent({"visitor_name": visitor_name, "visitor_mobile": visitor_mobile})
|
404 |
+
|
405 |
+
if result["registered"]:
|
406 |
+
return {"messages": ["Visitor is registered."], "next": "SchedulingState"}
|
407 |
+
else:
|
408 |
+
return {"messages": ["Visitor not found in records. Please register first."], "next": END}
|
409 |
+
|
410 |
+
def scheduling_state(state):
|
411 |
+
"""State for scheduling appointment."""
|
412 |
+
doctor_name = state["doctor_name"]
|
413 |
+
department_name = state["department_name"]
|
414 |
+
result = scheduling_agent({"doctor_name": doctor_name, "department_name": department_name})
|
415 |
+
|
416 |
+
return {
|
417 |
+
"messages": [f"Appointment {result['status']} with Dr.{result['doctor_name']} in {result['department_name']} department."],
|
418 |
+
"next": END
|
419 |
+
}
|
420 |
+
|
421 |
+
|
422 |
+
# Build Langgraph Workflow
|
423 |
+
workflow = StateGraph(VisitorState)
|
424 |
+
|
425 |
+
# Add nodes
|
426 |
+
workflow.add_node("InputState", input_state)
|
427 |
+
workflow.add_node("RegistrationState", registration_state)
|
428 |
+
workflow.add_node("SchedulingState", scheduling_state)
|
429 |
+
|
430 |
+
# Define edges
|
431 |
+
workflow.add_edge("InputState", "RegistrationState")
|
432 |
+
workflow.add_edge("RegistrationState", "SchedulingState")
|
433 |
+
workflow.add_conditional_edges(
|
434 |
+
"RegistrationState",
|
435 |
+
lambda x: x.get("next"),
|
436 |
+
{"SchedulingState": "SchedulingState", END: END}
|
437 |
+
)
|
438 |
+
|
439 |
+
workflow.set_entry_point("InputState")
|
440 |
+
compiled_graph = workflow.compile()
|
441 |
+
|
442 |
+
# Gradio Frontend
|
443 |
+
def gradio_interface(visitor_name, visitor_mobile, doctor_name, department_name):
|
444 |
+
"""Gradio interface to interact with the workflow."""
|
445 |
+
state = {
|
446 |
+
"visitor_name": visitor_name,
|
447 |
+
"visitor_mobile": visitor_mobile,
|
448 |
+
"doctor_name": doctor_name,
|
449 |
+
"department_name": department_name,
|
450 |
+
}
|
451 |
+
|
452 |
+
# Invoke workflow
|
453 |
+
result = compiled_graph.invoke(state)
|
454 |
+
return result["messages"][0]
|
455 |
+
|
456 |
+
iface = gr.Interface(
|
457 |
+
fn=gradio_interface,
|
458 |
+
inputs=[
|
459 |
+
gr.Textbox(label="Visitor Name"),
|
460 |
+
gr.Textbox(label="Visitor Mobile Number"),
|
461 |
+
gr.Textbox(label="Doctor Name"),
|
462 |
+
gr.Textbox(label="Department Name"),
|
463 |
+
],
|
464 |
+
outputs="textbox",
|
465 |
+
)
|
466 |
+
|
467 |
+
if __name__ == "__main__":
|
468 |
+
iface.launch()
|
469 |
|
470 |
|
471 |
|