Ram580 commited on
Commit
91d192d
·
1 Parent(s): 40e25c4

added all the files

Browse files
Files changed (44) hide show
  1. Dockerfile +20 -14
  2. README.md +127 -12
  3. requirements.txt +18 -1
  4. src/__init__.py +0 -0
  5. src/__pycache__/__init__.cpython-312.pyc +0 -0
  6. src/langgraphagenticai/LLMS/__init__.py +0 -0
  7. src/langgraphagenticai/LLMS/__pycache__/__init__.cpython-312.pyc +0 -0
  8. src/langgraphagenticai/LLMS/__pycache__/groqllm.cpython-312.pyc +0 -0
  9. src/langgraphagenticai/LLMS/groqllm.py +20 -0
  10. src/langgraphagenticai/__init__.py +0 -0
  11. src/langgraphagenticai/__pycache__/__init__.cpython-312.pyc +0 -0
  12. src/langgraphagenticai/__pycache__/main.cpython-312.pyc +0 -0
  13. src/langgraphagenticai/app.py +9 -0
  14. src/langgraphagenticai/graph/__init__.py +0 -0
  15. src/langgraphagenticai/graph/__pycache__/__init__.cpython-312.pyc +0 -0
  16. src/langgraphagenticai/graph/__pycache__/graph_builder.cpython-312.pyc +0 -0
  17. src/langgraphagenticai/graph/graph_builder.py +70 -0
  18. src/langgraphagenticai/main.py +66 -0
  19. src/langgraphagenticai/nodes/__init__.py +0 -0
  20. src/langgraphagenticai/nodes/__pycache__/__init__.cpython-312.pyc +0 -0
  21. src/langgraphagenticai/nodes/__pycache__/basic_chatbot_node.cpython-312.pyc +0 -0
  22. src/langgraphagenticai/nodes/__pycache__/chatbot_with_Tool_node.cpython-312.pyc +0 -0
  23. src/langgraphagenticai/nodes/basic_chatbot_node.py +14 -0
  24. src/langgraphagenticai/nodes/chatbot_with_Tool_node.py +42 -0
  25. src/langgraphagenticai/state/__init__.py +0 -0
  26. src/langgraphagenticai/state/__pycache__/__init__.cpython-312.pyc +0 -0
  27. src/langgraphagenticai/state/__pycache__/state.cpython-312.pyc +0 -0
  28. src/langgraphagenticai/state/state.py +11 -0
  29. src/langgraphagenticai/tools/__init__.py +0 -0
  30. src/langgraphagenticai/tools/__pycache__/__init__.cpython-312.pyc +0 -0
  31. src/langgraphagenticai/tools/__pycache__/search_tool.cpython-312.pyc +0 -0
  32. src/langgraphagenticai/tools/search_tool.py +22 -0
  33. src/langgraphagenticai/ui/__init__.py +0 -0
  34. src/langgraphagenticai/ui/__pycache__/__init__.cpython-312.pyc +0 -0
  35. src/langgraphagenticai/ui/__pycache__/uiconfigfile.cpython-312.pyc +0 -0
  36. src/langgraphagenticai/ui/streamlitui/__pycache__/display_result.cpython-312.pyc +0 -0
  37. src/langgraphagenticai/ui/streamlitui/__pycache__/display_results.cpython-312.pyc +0 -0
  38. src/langgraphagenticai/ui/streamlitui/__pycache__/loadui.cpython-312.pyc +0 -0
  39. src/langgraphagenticai/ui/streamlitui/display_result.py +42 -0
  40. src/langgraphagenticai/ui/streamlitui/loadui.py +72 -0
  41. src/langgraphagenticai/ui/uiconfigfile.ini +5 -0
  42. src/langgraphagenticai/ui/uiconfigfile.py +26 -0
  43. src/langgraphagenticai/vectorstore/__init__.py +0 -0
  44. src/streamlit_app.py +0 -40
Dockerfile CHANGED
@@ -1,21 +1,27 @@
1
- FROM python:3.9-slim
 
2
 
3
- WORKDIR /app
 
 
4
 
5
- RUN apt-get update && apt-get install -y \
6
- build-essential \
7
- curl \
8
- software-properties-common \
9
- git \
10
- && rm -rf /var/lib/apt/lists/*
11
 
12
- COPY requirements.txt ./
13
- COPY src/ ./src/
14
 
15
- RUN pip3 install -r requirements.txt
 
 
16
 
17
- EXPOSE 8501
 
18
 
19
- HEALTHCHECK CMD curl --fail http://localhost:8501/_stcore/health
 
 
 
20
 
21
- ENTRYPOINT ["streamlit", "run", "src/streamlit_app.py", "--server.port=8501", "--server.address=0.0.0.0"]
 
 
1
+ # Use official Python image as base
2
+ FROM python:3.10-slim
3
 
4
+ # Set environment variables
5
+ ENV PYTHONDONTWRITEBYTECODE=1
6
+ ENV PYTHONUNBUFFERED=1
7
 
8
+ # Set work directory
9
+ WORKDIR /app
 
 
 
 
10
 
11
+ # Copy project files
12
+ COPY . /app
13
 
14
+ # Install dependencies
15
+ RUN pip install --upgrade pip \
16
+ && pip install -r requirements.txt
17
 
18
+ # Expose the port Streamlit will run on
19
+ EXPOSE 7860
20
 
21
+ # Streamlit-specific environment variables for Hugging Face Spaces
22
+ ENV PORT 7860
23
+ ENV STREAMLIT_SERVER_PORT 7860
24
+ ENV STREAMLIT_SERVER_ADDRESS 0.0.0.0
25
 
26
+ # Run the Streamlit app
27
+ CMD ["streamlit", "run", "src/langgraphagenticai/app.py"]
README.md CHANGED
@@ -1,20 +1,135 @@
 
1
  ---
2
- title: Agenticai App
3
- emoji: 🚀
4
- colorFrom: red
5
  colorTo: red
6
- sdk: docker
7
- app_port: 8501
8
- tags:
9
- - streamlit
10
  pinned: false
11
- short_description: Streamlit template space
12
  license: apache-2.0
 
13
  ---
14
 
15
- # Welcome to Streamlit!
16
 
17
- Edit `/src/streamlit_app.py` to customize this app to your heart's desire. :heart:
18
 
19
- If you have any questions, checkout our [documentation](https://docs.streamlit.io) and [community
20
- forums](https://discuss.streamlit.io).
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Agentic-AI-Project
2
  ---
3
+ title: LanggraphAgenticAI
4
+ emoji: 🐨
5
+ colorFrom: blue
6
  colorTo: red
7
+ sdk: streamlit
8
+ sdk_version: 1.42.0
9
+ app_file: app.py
 
10
  pinned: false
 
11
  license: apache-2.0
12
+ short_description: Refined langgraphAgenticAI
13
  ---
14
 
15
+ ### End To End Agentic AI Projects
16
 
17
+ The project is in development
18
 
19
+ ## 🤖 LangGraph: Build Stateful Agentic AI Graphs
20
+
21
+ [![Python 3.8+](https://img.shields.io/badge/python-3.8+-blue.svg)](https://www.python.org/downloads/)
22
+ [![Streamlit](https://img.shields.io/badge/streamlit-1.27+-red.svg)](https://streamlit.io/)
23
+ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](LICENSE)
24
+
25
+ ## 📋 Overview
26
+
27
+ Agentic-AI-Project leverages the power of LangGraph to enable the creation of stateful, agentic AI workflows. This project provides a modular, extensible framework for building advanced AI agents that can reason, interact, and adapt using graph-based architectures. The intuitive Streamlit UI allows users to configure, run, and visualize agentic workflows with ease.
28
+
29
+ ## ✨ Key Features
30
+
31
+ - **🔄 Stateful Agentic Graphs:** Build and manage AI agents as nodes in a dynamic graph
32
+ - **🧠 LLM Integration:** Seamless connection to Groq's powerful models
33
+ - **🛠️ Flexible Use Cases:** Pre-built chatbots and tool-enabled agents
34
+ - **🎯 Interactive UI:** User-friendly Streamlit interface
35
+ - **📦 Modular Design:** Extensible architecture for custom capabilities
36
+
37
+ ## 🔄 Use Case Workflow
38
+
39
+ ### 1. UI Configuration
40
+ ```python
41
+ # Configure your agent through Streamlit UI
42
+ - Select LLM provider (e.g., Groq)
43
+ - Choose use case (Basic Chatbot/Chatbot with Tool)
44
+ - Pick model from available options
45
+ ```
46
+
47
+ ### 2. Agent Interaction Flow
48
+ ```mermaid
49
+ graph LR
50
+ A[User Input] --> B[LLM Processing]
51
+ B --> C[Graph Execution]
52
+ C --> D[Tool Usage]
53
+ D --> E[Response Generation]
54
+ E --> F[Result Display]
55
+ ```
56
+
57
+ ## 🚀 Getting Started
58
+
59
+ ### Prerequisites
60
+
61
+ - Python 3.8+
62
+ - Git
63
+ - PowerShell
64
+
65
+ ### Installation
66
+
67
+ ```powershell
68
+ # Clone the repository
69
+ git clone <your-repo-url>
70
+ cd langgraph_project
71
+
72
+ # Create and activate virtual environment
73
+ python -m venv venv
74
+ .\venv\Scripts\Activate
75
+
76
+ # Install dependencies
77
+ pip install -r requirements.txt
78
+ ```
79
+
80
+ ### Running the App
81
+
82
+ ```powershell
83
+ # From the project root
84
+ streamlit run src/langgraphagenticai/app.py
85
+ ```
86
+
87
+ ## 📁 Project Structure
88
+
89
+ ```
90
+ src/
91
+ └── langgraphagenticai/
92
+ ├── app.py # Application entry point
93
+ ├── main.py # Core application logic
94
+ ├── ui/ # UI components and configuration
95
+ ├── LLMS/ # LLM integration modules
96
+ ├── graph/ # Graph construction logic
97
+ ├── nodes/ # Agent node definitions
98
+ ├── state/ # State management
99
+ ├── tools/ # Agent tools and utilities
100
+ └── vectorstore/ # Vector storage components
101
+ ```
102
+
103
+ ## 🛠️ Customization
104
+
105
+ ### Adding New Use Cases
106
+ ```python
107
+ # 1. Create a new node in nodes/
108
+ # 2. Define graph logic in graph/
109
+ # 3. Update UI configuration
110
+ ```
111
+
112
+ ### Integrating New LLMs
113
+ ```python
114
+ # 1. Add LLM wrapper in LLMS/
115
+ # 2. Update uiconfigfile.ini
116
+ # 3. Extend UI options
117
+ ```
118
+
119
+ ## 📝 License
120
+
121
+ This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
122
+
123
+ ## 🙏 Acknowledgements
124
+
125
+ - Built with [LangGraph](https://github.com/langchain-ai/langgraph)
126
+ - Powered by [Streamlit](https://streamlit.io/)
127
+ - LLM support by [Groq](https://groq.com/)
128
+
129
+ ---
130
+
131
+ <div align="center">
132
+
133
+ **🚀 Empower your AI agents with stateful, agentic reasoning—start building with LangGraph today! 🚀**
134
+
135
+ </div>
requirements.txt CHANGED
@@ -1,3 +1,20 @@
1
- altair
 
 
 
 
 
 
 
 
 
 
 
 
2
  pandas
 
 
 
 
 
3
  streamlit
 
1
+ langchain
2
+ langchain_core
3
+ langchain_community
4
+ langchain-mcp-adapters
5
+
6
+ python-dotenv
7
+
8
+ langgraph
9
+ langchain_groq
10
+ langchain_google_genai
11
+ langchain_huggingface
12
+
13
+
14
  pandas
15
+ numpy
16
+ PyPDF2
17
+
18
+ faiss-cpu
19
+
20
  streamlit
src/__init__.py ADDED
File without changes
src/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (210 Bytes). View file
 
src/langgraphagenticai/LLMS/__init__.py ADDED
File without changes
src/langgraphagenticai/LLMS/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (234 Bytes). View file
 
src/langgraphagenticai/LLMS/__pycache__/groqllm.cpython-312.pyc ADDED
Binary file (1.4 kB). View file
 
src/langgraphagenticai/LLMS/groqllm.py ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import streamlit as st
3
+ from langchain_groq import ChatGroq
4
+
5
+ class GroqLLM:
6
+ def __init__(self,user_controls_input):
7
+ self.user_controls_input=user_controls_input
8
+
9
+ def get_llm_model(self):
10
+ try:
11
+ groq_api_key=self.user_controls_input['GROQ_API_KEY']
12
+ selected_groq_model=self.user_controls_input['selected_groq_model']
13
+ if groq_api_key=='' and os.environ["GROQ_API_KEY"] =='':
14
+ st.error("Please Enter the Groq API KEY")
15
+
16
+ llm = ChatGroq(api_key =groq_api_key, model=selected_groq_model)
17
+
18
+ except Exception as e:
19
+ raise ValueError(f"Error Occurred with Exception : {e}")
20
+ return llm
src/langgraphagenticai/__init__.py ADDED
File without changes
src/langgraphagenticai/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (229 Bytes). View file
 
src/langgraphagenticai/__pycache__/main.cpython-312.pyc ADDED
Binary file (2.65 kB). View file
 
src/langgraphagenticai/app.py ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+ import os
3
+ sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../../")))
4
+
5
+ from src.langgraphagenticai.main import load_langgraph_agenticai_app
6
+
7
+
8
+ if __name__=="__main__":
9
+ load_langgraph_agenticai_app()
src/langgraphagenticai/graph/__init__.py ADDED
File without changes
src/langgraphagenticai/graph/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (235 Bytes). View file
 
src/langgraphagenticai/graph/__pycache__/graph_builder.cpython-312.pyc ADDED
Binary file (3.82 kB). View file
 
src/langgraphagenticai/graph/graph_builder.py ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langgraph.graph import StateGraph, START, END
2
+ from langgraph.prebuilt import tools_condition, ToolNode
3
+ from langchain_core.prompts import ChatPromptTemplate
4
+
5
+ from src.langgraphagenticai.state.state import State
6
+ from src.langgraphagenticai.LLMS.groqllm import GroqLLM
7
+ from src.langgraphagenticai.nodes.basic_chatbot_node import BasicChatbotNode
8
+ from src.langgraphagenticai.tools.search_tool import get_tool, create_tool_node
9
+ from src.langgraphagenticai.nodes.chatbot_with_Tool_node import ChatbotWithToolNode
10
+
11
+ class GraphBuilder:
12
+ def __init__(self, model):
13
+ self.llm = model
14
+ self.graph_builder=StateGraph(State)
15
+
16
+ def basic_chatbot_build_graph(self):
17
+ """
18
+ Builds a basic chatbot graph with a single tool node.
19
+ """
20
+ self.basic_chatbot_node = BasicChatbotNode(self.llm)
21
+ self.graph_builder.add_node("chatbot",self.basic_chatbot_node.process)
22
+ self.graph_builder.add_edge(START,"chatbot")
23
+ self.graph_builder.add_edge("chatbot", END)
24
+
25
+
26
+ def chatbot_with_tools_build_graph(self):
27
+ """
28
+ Builds an advanced chatbot graph with tool integration.
29
+ This method creates a chatbot graph that includes both a chatbot node
30
+ and a tool node. It defines tools, initializes the chatbot with tool
31
+ capabilities, and sets up conditional and direct edges between nodes.
32
+ The chatbot node is set as the entry point.
33
+ """
34
+
35
+ # Assuming get_tool() returns a list of tools
36
+
37
+ tools = get_tool()
38
+ tool_node = create_tool_node(tools)
39
+
40
+ # define the llm
41
+ llm = self.llm
42
+
43
+ ## Define the tool and tool node
44
+ obj_chatbot_with_node = ChatbotWithToolNode(self.llm)
45
+ chatbot = obj_chatbot_with_node.create_chatbot(tools)
46
+
47
+ self.graph_builder.add_node("chatbot", chatbot)
48
+ self.graph_builder.add_node("tools", tool_node)
49
+
50
+ self.graph_builder.add_edge(START, "chatbot")
51
+ self.graph_builder.add_conditional_edges("chatbot", tools_condition)
52
+ self.graph_builder.add_edge("tools", END)
53
+
54
+
55
+ def setup_graph(self, usecase):
56
+ """
57
+ Sets up the graph based on the selected use case.
58
+
59
+ Args:
60
+ usecase (str): The selected use case for the graph.
61
+ """
62
+ if usecase == "Basic Chatbot":
63
+ self.basic_chatbot_build_graph()
64
+
65
+
66
+ if usecase == "Chatbot with Tool":
67
+ self.chatbot_with_tools_build_graph()
68
+
69
+ return self.graph_builder.compile()
70
+
src/langgraphagenticai/main.py ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import json
3
+ from src.langgraphagenticai.ui.streamlitui.loadui import LoadStreamlitUI
4
+ from src.langgraphagenticai.LLMS.groqllm import GroqLLM
5
+ from src.langgraphagenticai.graph.graph_builder import GraphBuilder
6
+ from src.langgraphagenticai.ui.streamlitui.display_result import DisplayResultStreamlit
7
+
8
+ # MAIN Function START
9
+ def load_langgraph_agenticai_app():
10
+ """
11
+ Loads and runs the LangGraph AgenticAI application with Streamlit UI.
12
+ This function initializes the UI, handles user input, configures the LLM model,
13
+ sets up the graph based on the selected use case, and displays the output while
14
+ implementing exception handling for robustness.
15
+ """
16
+
17
+ # Load UI
18
+ ui = LoadStreamlitUI()
19
+ user_input = ui.load_streamlit_ui()
20
+
21
+ if not user_input:
22
+ st.error("Error: Failed to load user input from the UI.")
23
+ return
24
+
25
+ # Text input for user message
26
+ if st.session_state.IsFetchButtonClicked:
27
+ user_message = st.session_state.timeframe
28
+ else :
29
+ user_message = st.chat_input("Enter your message:")
30
+
31
+ if user_message:
32
+ try:
33
+ # Configure LLM
34
+ obj_llm_config = GroqLLM(user_controls_input=user_input)
35
+ model = obj_llm_config.get_llm_model()
36
+
37
+ if not model:
38
+ st.error("Error: LLM model could not be initialized.")
39
+ return
40
+
41
+ # Initialize and set up the graph based on use case
42
+ usecase = user_input.get('selected_usecase')
43
+ if not usecase:
44
+ st.error("Error: No use case selected.")
45
+ return
46
+
47
+
48
+ ### Graph Builder
49
+ graph_builder=GraphBuilder(model)
50
+ try:
51
+ graph = graph_builder.setup_graph(usecase)
52
+ DisplayResultStreamlit(usecase,graph,user_message).display_result_on_ui()
53
+ except Exception as e:
54
+ st.error(f"Error: Graph setup failed - {e}")
55
+ return
56
+
57
+
58
+ except Exception as e:
59
+ raise ValueError(f"Error Occurred with Exception : {e}")
60
+
61
+
62
+
63
+
64
+
65
+
66
+
src/langgraphagenticai/nodes/__init__.py ADDED
File without changes
src/langgraphagenticai/nodes/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (235 Bytes). View file
 
src/langgraphagenticai/nodes/__pycache__/basic_chatbot_node.cpython-312.pyc ADDED
Binary file (1.08 kB). View file
 
src/langgraphagenticai/nodes/__pycache__/chatbot_with_Tool_node.cpython-312.pyc ADDED
Binary file (2.29 kB). View file
 
src/langgraphagenticai/nodes/basic_chatbot_node.py ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from src.langgraphagenticai.state.state import State
2
+
3
+ class BasicChatbotNode:
4
+ """
5
+ Basic chatbot logic implementation.
6
+ """
7
+ def __init__(self,model):
8
+ self.llm = model
9
+
10
+ def process(self, state: State) -> dict:
11
+ """
12
+ Processes the input state and generates a chatbot response.
13
+ """
14
+ return {"messages":self.llm.invoke(state['messages'])}
src/langgraphagenticai/nodes/chatbot_with_Tool_node.py ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from src.langgraphagenticai.state.state import State
2
+
3
+ class ChatbotWithToolNode:
4
+ """
5
+ Chatbot with tool logic implementation.
6
+ """
7
+ def __init__(self, model):
8
+ self.llm = model
9
+
10
+ def process(self, state: State) -> dict:
11
+ """
12
+ Processes the input state and generates a chatbot response with tool invocation.
13
+ """
14
+ user_input = state['messages'][-1] if state['messages'] else ""
15
+ llm_response = self.llm.invoke([{"role": "user", "content": user_input}])
16
+
17
+ # Invoke the LLM with messages and tools
18
+ # Simulate tool-specific logic
19
+ tools_response = f"Tool integration for: '{user_input}'"
20
+
21
+ return {"messages": [llm_response, tools_response]}
22
+
23
+ def create_chatbot(self, tools):
24
+ """
25
+ Creates a chatbot instance with the configured model.
26
+
27
+ Returns:
28
+ ChatbotWithToolNode: An instance of the chatbot with tool integration.
29
+ Returns a chatbot node function that can be used in a LangGraph workflow.
30
+ """
31
+ llm_with_tools = self.llm.bind_tools(tools)
32
+
33
+ def chatbot_node(state: State) -> dict:
34
+ """
35
+ chatbot logic for processing the input state and generating a response.
36
+ """
37
+ return {"messages": [llm_with_tools.invoke(state["messages"])]}
38
+
39
+ return chatbot_node
40
+
41
+
42
+
src/langgraphagenticai/state/__init__.py ADDED
File without changes
src/langgraphagenticai/state/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (235 Bytes). View file
 
src/langgraphagenticai/state/__pycache__/state.cpython-312.pyc ADDED
Binary file (898 Bytes). View file
 
src/langgraphagenticai/state/state.py ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Annotated, Literal, Optional
2
+ from typing_extensions import TypedDict
3
+ from langgraph.graph.message import add_messages
4
+ from typing import TypedDict, Annotated, List
5
+ from langchain_core.messages import HumanMessage, AIMessage
6
+
7
+ class State(TypedDict):
8
+ """
9
+ Represents the structure of the state used in the graph.
10
+ """
11
+ messages: Annotated[list, add_messages]
src/langgraphagenticai/tools/__init__.py ADDED
File without changes
src/langgraphagenticai/tools/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (235 Bytes). View file
 
src/langgraphagenticai/tools/__pycache__/search_tool.cpython-312.pyc ADDED
Binary file (1.17 kB). View file
 
src/langgraphagenticai/tools/search_tool.py ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain_community.tools.tavily_search import TavilySearchResults
2
+ from langchain_core.tools import tool
3
+ from langgraph.prebuilt import ToolNode
4
+
5
+ def get_tool():
6
+ """
7
+ Returns a Tool that allows searching using Tavilly.
8
+ This function creates a Tavilly search tool with a maximum of 5 results.
9
+ """
10
+ tools = [TavilySearchResults(max_results=5)]
11
+ return tools
12
+
13
+ def create_tool_node(tools):
14
+ """
15
+ Creates a ToolNode for the Tavilly search tool.
16
+
17
+ This function wraps the Tavilly search tool in a ToolNode, allowing it to be used in a LangGraph workflow.
18
+
19
+ Returns:
20
+ ToolNode: A node that can be used in a LangGraph workflow for search functionality.
21
+ """
22
+ return ToolNode(tools=tools)
src/langgraphagenticai/ui/__init__.py ADDED
File without changes
src/langgraphagenticai/ui/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (232 Bytes). View file
 
src/langgraphagenticai/ui/__pycache__/uiconfigfile.cpython-312.pyc ADDED
Binary file (2.14 kB). View file
 
src/langgraphagenticai/ui/streamlitui/__pycache__/display_result.cpython-312.pyc ADDED
Binary file (3.05 kB). View file
 
src/langgraphagenticai/ui/streamlitui/__pycache__/display_results.cpython-312.pyc ADDED
Binary file (3.05 kB). View file
 
src/langgraphagenticai/ui/streamlitui/__pycache__/loadui.cpython-312.pyc ADDED
Binary file (4.04 kB). View file
 
src/langgraphagenticai/ui/streamlitui/display_result.py ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from langchain_core.messages import HumanMessage,AIMessage,ToolMessage
3
+ import json
4
+
5
+
6
+ class DisplayResultStreamlit:
7
+ def __init__(self,usecase,graph,user_message):
8
+ self.usecase= usecase
9
+ self.graph = graph
10
+ self.user_message = user_message
11
+
12
+ def display_result_on_ui(self):
13
+ usecase= self.usecase
14
+ graph = self.graph
15
+ user_message = self.user_message
16
+ if usecase =="Basic Chatbot":
17
+ for event in graph.stream({'messages':("user",user_message)}):
18
+ print(event.values())
19
+ for value in event.values():
20
+ print(value['messages'])
21
+ with st.chat_message("user"):
22
+ st.write(user_message)
23
+ with st.chat_message("assistant"):
24
+ st.write(value["messages"].content)
25
+
26
+ elif usecase=="Chatbot with Tool":
27
+ # Prepare state and invoke the graph
28
+ initial_state = {"messages": [user_message]}
29
+ res = graph.invoke(initial_state)
30
+ for message in res['messages']:
31
+ if type(message) == HumanMessage:
32
+ with st.chat_message("user"):
33
+ st.write(message.content)
34
+ elif type(message)==ToolMessage:
35
+ with st.chat_message("ai"):
36
+ st.write("Tool Call Start")
37
+ st.write(message.content)
38
+ st.write("Tool Call End")
39
+ elif type(message)==AIMessage and message.content:
40
+ with st.chat_message("assistant"):
41
+ st.write(message.content)
42
+
src/langgraphagenticai/ui/streamlitui/loadui.py ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import os
3
+ from datetime import date
4
+
5
+ from langchain_core.messages import AIMessage,HumanMessage
6
+ from src.langgraphagenticai.ui.uiconfigfile import Config
7
+
8
+
9
+ class LoadStreamlitUI:
10
+ def __init__(self):
11
+ self.config = Config() # config
12
+ self.user_controls = {}
13
+
14
+ def initialize_session(self):
15
+ return {
16
+ "current_step": "requirements",
17
+ "requirements": "",
18
+ "user_stories": "",
19
+ "po_feedback": "",
20
+ "generated_code": "",
21
+ "review_feedback": "",
22
+ "decision": None
23
+ }
24
+
25
+
26
+
27
+ def load_streamlit_ui(self):
28
+ st.set_page_config(page_title= "🤖 " + self.config.get_page_title(), layout="wide")
29
+ st.header("🤖 " + self.config.get_page_title())
30
+ st.session_state.timeframe = ''
31
+ st.session_state.IsFetchButtonClicked = False
32
+ st.session_state.IsSDLC = False
33
+
34
+
35
+
36
+ with st.sidebar:
37
+ # Get options from config
38
+ llm_options = self.config.get_llm_options()
39
+ usecase_options = self.config.get_usecase_options()
40
+
41
+ # LLM selection
42
+ self.user_controls["selected_llm"] = st.selectbox("Select LLM", llm_options)
43
+
44
+ if self.user_controls["selected_llm"] == 'Groq':
45
+ # Model selection
46
+ model_options = self.config.get_groq_model_options()
47
+ self.user_controls["selected_groq_model"] = st.selectbox("Select Model", model_options)
48
+ # API key input
49
+ self.user_controls["GROQ_API_KEY"] = st.session_state["GROQ_API_KEY"] = st.text_input("API Key",
50
+ type="password")
51
+ # Validate API key
52
+ if not self.user_controls["GROQ_API_KEY"]:
53
+ st.warning("⚠️ Please enter your GROQ API key to proceed. Don't have? refer : https://console.groq.com/keys ")
54
+
55
+
56
+ # Use case selection
57
+ self.user_controls["selected_usecase"] = st.selectbox("Select Usecases", usecase_options)
58
+
59
+ if self.user_controls["selected_usecase"] =="Chatbot with Tool":
60
+ # API key input
61
+ os.environ["TAVILY_API_KEY"] = self.user_controls["TAVILY_API_KEY"] = st.session_state["TAVILY_API_KEY"] = st.text_input("TAVILY API KEY",
62
+ type="password")
63
+ # Validate API key
64
+ if not self.user_controls["TAVILY_API_KEY"]:
65
+ st.warning("⚠️ Please enter your TAVILY_API_KEY key to proceed. Don't have? refer : https://app.tavily.com/home")
66
+
67
+ if "state" not in st.session_state:
68
+ st.session_state.state = self.initialize_session()
69
+
70
+
71
+
72
+ return self.user_controls
src/langgraphagenticai/ui/uiconfigfile.ini ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ [DEFAULT]
2
+ PAGE_TITLE = LangGraph: Build Stateful Agentic AI graph
3
+ LLM_OPTIONS = Groq
4
+ USECASE_OPTIONS = Basic Chatbot, Chatbot with Tool
5
+ GROQ_MODEL_OPTIONS = qwen-qwq-32b, llama3-70b-8192, deepseek-r1-distill-llama-70b, meta-llama/llama-4-maverick-17b-128e-instruct
src/langgraphagenticai/ui/uiconfigfile.py ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from configparser import ConfigParser
2
+ import os
3
+
4
+ class Config:
5
+ def __init__(self, config_file=None):
6
+ # Use os.path for cross-platform compatibility
7
+ if config_file is None:
8
+ config_file = os.path.join(
9
+ "C:/Users/LENOVO/OneDrive - Manipal Academy of Higher Education/GEN AI/Langgraph/langgraph_project/src/langgraphagenticai/ui",
10
+ "uiconfigfile.ini"
11
+ )
12
+ self.config_file = config_file
13
+ self.config = ConfigParser()
14
+ self.config.read(config_file)
15
+
16
+ def get_llm_options(self):
17
+ return self.config["DEFAULT"].get("LLM_OPTIONS").split(", ")
18
+
19
+ def get_usecase_options(self):
20
+ return self.config["DEFAULT"].get("USECASE_OPTIONS").split(", ")
21
+
22
+ def get_groq_model_options(self):
23
+ return self.config["DEFAULT"].get("GROQ_MODEL_OPTIONS").split(", ")
24
+
25
+ def get_page_title(self):
26
+ return self.config["DEFAULT"].get("PAGE_TITLE")
src/langgraphagenticai/vectorstore/__init__.py ADDED
File without changes
src/streamlit_app.py DELETED
@@ -1,40 +0,0 @@
1
- import altair as alt
2
- import numpy as np
3
- import pandas as pd
4
- import streamlit as st
5
-
6
- """
7
- # Welcome to Streamlit!
8
-
9
- Edit `/streamlit_app.py` to customize this app to your heart's desire :heart:.
10
- If you have any questions, checkout our [documentation](https://docs.streamlit.io) and [community
11
- forums](https://discuss.streamlit.io).
12
-
13
- In the meantime, below is an example of what you can do with just a few lines of code:
14
- """
15
-
16
- num_points = st.slider("Number of points in spiral", 1, 10000, 1100)
17
- num_turns = st.slider("Number of turns in spiral", 1, 300, 31)
18
-
19
- indices = np.linspace(0, 1, num_points)
20
- theta = 2 * np.pi * num_turns * indices
21
- radius = indices
22
-
23
- x = radius * np.cos(theta)
24
- y = radius * np.sin(theta)
25
-
26
- df = pd.DataFrame({
27
- "x": x,
28
- "y": y,
29
- "idx": indices,
30
- "rand": np.random.randn(num_points),
31
- })
32
-
33
- st.altair_chart(alt.Chart(df, height=700, width=700)
34
- .mark_point(filled=True)
35
- .encode(
36
- x=alt.X("x", axis=None),
37
- y=alt.Y("y", axis=None),
38
- color=alt.Color("idx", legend=None, scale=alt.Scale()),
39
- size=alt.Size("rand", legend=None, scale=alt.Scale(range=[1, 150])),
40
- ))