Deepri24 commited on
Commit
0b19c40
·
0 Parent(s):

modular struct and files

Browse files
Files changed (37) hide show
  1. .gitignore +1 -0
  2. README.md +109 -0
  3. app.py +5 -0
  4. requirements.txt +8 -0
  5. src/__init__.py +0 -0
  6. src/__pycache__/__init__.cpython-312.pyc +0 -0
  7. src/langgraphagenticai/LLMS/__init__.py +0 -0
  8. src/langgraphagenticai/LLMS/__pycache__/__init__.cpython-312.pyc +0 -0
  9. src/langgraphagenticai/LLMS/__pycache__/groqllm.cpython-312.pyc +0 -0
  10. src/langgraphagenticai/LLMS/groqllm.py +20 -0
  11. src/langgraphagenticai/__init__.py +0 -0
  12. src/langgraphagenticai/__pycache__/__init__.cpython-312.pyc +0 -0
  13. src/langgraphagenticai/__pycache__/main.cpython-312.pyc +0 -0
  14. src/langgraphagenticai/graph/__init__.py +0 -0
  15. src/langgraphagenticai/graph/__pycache__/__init__.cpython-312.pyc +0 -0
  16. src/langgraphagenticai/graph/__pycache__/graph_builder.cpython-312.pyc +0 -0
  17. src/langgraphagenticai/graph/graph_builder.py +39 -0
  18. src/langgraphagenticai/main.py +66 -0
  19. src/langgraphagenticai/nodes/__init__.py +0 -0
  20. src/langgraphagenticai/nodes/__pycache__/__init__.cpython-312.pyc +0 -0
  21. src/langgraphagenticai/nodes/__pycache__/basic_chatbot_node.cpython-312.pyc +0 -0
  22. src/langgraphagenticai/nodes/basic_chatbot_node.py +14 -0
  23. src/langgraphagenticai/state/__init__.py +0 -0
  24. src/langgraphagenticai/state/__pycache__/__init__.cpython-312.pyc +0 -0
  25. src/langgraphagenticai/state/__pycache__/state.cpython-312.pyc +0 -0
  26. src/langgraphagenticai/state/state.py +11 -0
  27. src/langgraphagenticai/tools/__init__.py +0 -0
  28. src/langgraphagenticai/ui/__init__.py +0 -0
  29. src/langgraphagenticai/ui/__pycache__/__init__.cpython-312.pyc +0 -0
  30. src/langgraphagenticai/ui/__pycache__/uiconfigfile.cpython-312.pyc +0 -0
  31. src/langgraphagenticai/ui/streamlitui/__pycache__/display_result.cpython-312.pyc +0 -0
  32. src/langgraphagenticai/ui/streamlitui/__pycache__/loadui.cpython-312.pyc +0 -0
  33. src/langgraphagenticai/ui/streamlitui/display_result.py +24 -0
  34. src/langgraphagenticai/ui/streamlitui/loadui.py +64 -0
  35. src/langgraphagenticai/ui/uiconfigfile.ini +6 -0
  36. src/langgraphagenticai/ui/uiconfigfile.py +19 -0
  37. src/langgraphagenticai/vectorstore/__init__.py +0 -0
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ venv/
README.md ADDED
@@ -0,0 +1,109 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Project Setup Steps
2
+
3
+ ## Steps
4
+
5
+ ### 1. Open a New Folder
6
+
7
+ ### 2. Create a New Virtual Environment
8
+
9
+ ### 3. Install Requirements
10
+ ```sh
11
+ pip install -r requirements.txt
12
+ ```
13
+
14
+ ### 4. Define Folder Structure & Commit Code to GitHub
15
+ #### a. Create a New Repository
16
+ ```sh
17
+ git init
18
+ ```
19
+ #### b. Add `.gitignore` and Include `venv/`
20
+ #### c. Commit One File to Establish Connection with GitHub
21
+ #### d. Verify Initial Commit in GitHub
22
+
23
+ ### 5. Create `src` Folder and Make it a Package
24
+ ```sh
25
+ mkdir src
26
+ ```
27
+ Add `__init__.py` to make it a package.
28
+
29
+ ### 6. Define Project Structure
30
+ Inside `src/`, create a folder `langgraphagenticai/` and add `__init__.py`. Inside it, create:
31
+ - `LLMs/` - `__init__.py`
32
+ - `Nodes/` - `__init__.py`
33
+ - `Graph/` - `__init__.py`
34
+ - `State/` - `__init__.py`
35
+ - `Tools/` - `__init__.py`
36
+ - `Vectorstore/` - `__init__.py`
37
+ - `ui/` - `__init__.py`
38
+ - Lastly, add `main.py`
39
+
40
+ ### 7. Create `app.py` Outside `src/`
41
+ This is the main execution entry point.
42
+
43
+ ## Start Writing Code
44
+
45
+ ### 8. LLM Setup
46
+ #### a. Inside `LLMs/`, create `qroqllm.py` to load LLM via Streamlit UI.
47
+
48
+ ### 9. UI Setup
49
+ #### a. Inside `ui/`, create:
50
+ - `loadui.py`
51
+ - `display_result.py`
52
+
53
+ #### b. Create a Config File
54
+ Inside `ui/`, create `uiconfigfile.ini` (plain text key-value pairs).
55
+
56
+ #### c. Create a Config Parser
57
+ Inside `ui/streamlit_ui/`, create `uiconfigfile.py` to read from `uiconfigfile.ini`.
58
+
59
+ #### d. Implement `loadui.py`
60
+ ```python
61
+ from src.langgraphagenticai.ui.uiconfigfile import Config
62
+ ```
63
+ - Load sidebar options from the config file
64
+ - Store session state
65
+ - Load UI components (left & right sections)
66
+
67
+ ### 10. Running the UI
68
+ Run the UI to check if it loads correctly:
69
+ ```sh
70
+ streamlit run app.py
71
+ ```
72
+
73
+ ### 11. Implement `main.py`
74
+ #### a. Execution starts here based on user choices.
75
+ #### b. Load the LLM in `qroqllm.py`.
76
+ #### c. Implement `graph_builder.py` inside `Graph/`.
77
+ - Import `State` inside `graph_builder.py`
78
+
79
+ #### d. Implement `Nodes`
80
+ - Start with `BasicChatbotNode`.
81
+ ```python
82
+ from src.langgraphagenticai.nodes.basic_chatbot_node import BasicChatbotNode
83
+ ```
84
+ - Define edges, start, and end functions.
85
+
86
+ #### e. Connect Graph in `main.py`
87
+ ```python
88
+ from src.langgraphagenticai.graph.graph_builder import GraphBuilder
89
+ ```
90
+ - Invoke the graph based on use case.
91
+ - Compile and stream results.
92
+
93
+ #### f. Update Display Results
94
+ Modify `display_result.py` to update UI results and call it in `main.py`.
95
+ ```python
96
+ from src.langgraphagenticai.ui.streamlitui.display_result import DisplayResultStreamlit
97
+ ```
98
+
99
+ ### 12. Run the Application
100
+ ```sh
101
+ streamlit run app.py
102
+ ```
103
+
104
+
105
+
106
+
107
+
108
+
109
+
app.py ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ from src.langgraphagenticai.main import load_langgraph_agenticai_app
2
+
3
+
4
+ if __name__=="__main__":
5
+ load_langgraph_agenticai_app()
requirements.txt ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ langchain
2
+ langgraph
3
+ langchain_community
4
+ langchain_core
5
+ langchain_groq
6
+ langchain_openai
7
+ faiss-cpu
8
+ streamlit
src/__init__.py ADDED
File without changes
src/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (161 Bytes). View file
 
src/langgraphagenticai/LLMS/__init__.py ADDED
File without changes
src/langgraphagenticai/LLMS/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (185 Bytes). View file
 
src/langgraphagenticai/LLMS/__pycache__/groqllm.cpython-312.pyc ADDED
Binary file (1.35 kB). View file
 
src/langgraphagenticai/LLMS/groqllm.py ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import streamlit as st
3
+ from langchain_groq import ChatGroq
4
+
5
+ class GroqLLM:
6
+ def __init__(self,user_controls_input):
7
+ self.user_controls_input=user_controls_input
8
+
9
+ def get_llm_model(self):
10
+ try:
11
+ groq_api_key=self.user_controls_input['GROQ_API_KEY']
12
+ selected_groq_model=self.user_controls_input['selected_groq_model']
13
+ if groq_api_key=='' and os.environ["GROQ_API_KEY"] =='':
14
+ st.error("Please Enter the Groq API KEY")
15
+
16
+ llm = ChatGroq(api_key =groq_api_key, model=selected_groq_model)
17
+
18
+ except Exception as e:
19
+ raise ValueError(f"Error Occurred with Exception : {e}")
20
+ return llm
src/langgraphagenticai/__init__.py ADDED
File without changes
src/langgraphagenticai/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (180 Bytes). View file
 
src/langgraphagenticai/__pycache__/main.cpython-312.pyc ADDED
Binary file (2.6 kB). View file
 
src/langgraphagenticai/graph/__init__.py ADDED
File without changes
src/langgraphagenticai/graph/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (186 Bytes). View file
 
src/langgraphagenticai/graph/__pycache__/graph_builder.cpython-312.pyc ADDED
Binary file (2.27 kB). View file
 
src/langgraphagenticai/graph/graph_builder.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langgraph.graph import StateGraph, START,END, MessagesState
2
+ from langgraph.prebuilt import tools_condition,ToolNode
3
+ from langchain_core.prompts import ChatPromptTemplate
4
+ from src.langgraphagenticai.state.state import State
5
+ from src.langgraphagenticai.nodes.basic_chatbot_node import BasicChatbotNode
6
+
7
+
8
+
9
+
10
+ class GraphBuilder:
11
+
12
+ def __init__(self,model):
13
+ self.llm=model
14
+ self.graph_builder=StateGraph(State)
15
+
16
+ def basic_chatbot_build_graph(self):
17
+ """
18
+ Builds a basic chatbot graph using LangGraph.
19
+ This method initializes a chatbot node using the `BasicChatbotNode` class
20
+ and integrates it into the graph. The chatbot node is set as both the
21
+ entry and exit point of the graph.
22
+ """
23
+ self.basic_chatbot_node=BasicChatbotNode(self.llm)
24
+ self.graph_builder.add_node("chatbot",self.basic_chatbot_node.process)
25
+ self.graph_builder.add_edge(START,"chatbot")
26
+ self.graph_builder.add_edge("chatbot",END)
27
+
28
+ def setup_graph(self, usecase: str):
29
+ """
30
+ Sets up the graph for the selected use case.
31
+ """
32
+ if usecase == "Basic Chatbot":
33
+ self.basic_chatbot_build_graph()
34
+ return self.graph_builder.compile()
35
+
36
+
37
+
38
+
39
+
src/langgraphagenticai/main.py ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import json
3
+ from src.langgraphagenticai.ui.streamlitui.loadui import LoadStreamlitUI
4
+ from src.langgraphagenticai.LLMS.groqllm import GroqLLM
5
+ from src.langgraphagenticai.graph.graph_builder import GraphBuilder
6
+ from src.langgraphagenticai.ui.streamlitui.display_result import DisplayResultStreamlit
7
+
8
+ # MAIN Function START
9
+ def load_langgraph_agenticai_app():
10
+ """
11
+ Loads and runs the LangGraph AgenticAI application with Streamlit UI.
12
+ This function initializes the UI, handles user input, configures the LLM model,
13
+ sets up the graph based on the selected use case, and displays the output while
14
+ implementing exception handling for robustness.
15
+ """
16
+
17
+ # Load UI
18
+ ui = LoadStreamlitUI()
19
+ user_input = ui.load_streamlit_ui()
20
+
21
+ if not user_input:
22
+ st.error("Error: Failed to load user input from the UI.")
23
+ return
24
+
25
+ # Text input for user message
26
+ if st.session_state.IsFetchButtonClicked:
27
+ user_message = st.session_state.timeframe
28
+ else :
29
+ user_message = st.chat_input("Enter your message:")
30
+
31
+ if user_message:
32
+ try:
33
+ # Configure LLM
34
+ obj_llm_config = GroqLLM(user_controls_input=user_input)
35
+ model = obj_llm_config.get_llm_model()
36
+
37
+ if not model:
38
+ st.error("Error: LLM model could not be initialized.")
39
+ return
40
+
41
+ # Initialize and set up the graph based on use case
42
+ usecase = user_input.get('selected_usecase')
43
+ if not usecase:
44
+ st.error("Error: No use case selected.")
45
+ return
46
+
47
+
48
+ ### Graph Builder
49
+ graph_builder=GraphBuilder(model)
50
+ try:
51
+ graph = graph_builder.setup_graph(usecase)
52
+ DisplayResultStreamlit(usecase,graph,user_message).display_result_on_ui()
53
+ except Exception as e:
54
+ st.error(f"Error: Graph setup failed - {e}")
55
+ return
56
+
57
+
58
+ except Exception as e:
59
+ raise ValueError(f"Error Occurred with Exception : {e}")
60
+
61
+
62
+
63
+
64
+
65
+
66
+
src/langgraphagenticai/nodes/__init__.py ADDED
File without changes
src/langgraphagenticai/nodes/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (186 Bytes). View file
 
src/langgraphagenticai/nodes/__pycache__/basic_chatbot_node.cpython-312.pyc ADDED
Binary file (1.03 kB). View file
 
src/langgraphagenticai/nodes/basic_chatbot_node.py ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from src.langgraphagenticai.state.state import State
2
+
3
+ class BasicChatbotNode:
4
+ """
5
+ Basic chatbot logic implementation.
6
+ """
7
+ def __init__(self,model):
8
+ self.llm = model
9
+
10
+ def process(self, state: State) -> dict:
11
+ """
12
+ Processes the input state and generates a chatbot response.
13
+ """
14
+ return {"messages":self.llm.invoke(state['messages'])}
src/langgraphagenticai/state/__init__.py ADDED
File without changes
src/langgraphagenticai/state/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (186 Bytes). View file
 
src/langgraphagenticai/state/__pycache__/state.cpython-312.pyc ADDED
Binary file (849 Bytes). View file
 
src/langgraphagenticai/state/state.py ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Annotated, Literal, Optional
2
+ from typing_extensions import TypedDict
3
+ from langgraph.graph.message import add_messages
4
+ from typing import TypedDict, Annotated, List
5
+ from langchain_core.messages import HumanMessage, AIMessage
6
+
7
+ class State(TypedDict):
8
+ """
9
+ Represents the structure of the state used in the graph.
10
+ """
11
+ messages: Annotated[list, add_messages]
src/langgraphagenticai/tools/__init__.py ADDED
File without changes
src/langgraphagenticai/ui/__init__.py ADDED
File without changes
src/langgraphagenticai/ui/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (183 Bytes). View file
 
src/langgraphagenticai/ui/__pycache__/uiconfigfile.cpython-312.pyc ADDED
Binary file (1.83 kB). View file
 
src/langgraphagenticai/ui/streamlitui/__pycache__/display_result.cpython-312.pyc ADDED
Binary file (1.85 kB). View file
 
src/langgraphagenticai/ui/streamlitui/__pycache__/loadui.cpython-312.pyc ADDED
Binary file (3.42 kB). View file
 
src/langgraphagenticai/ui/streamlitui/display_result.py ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from langchain_core.messages import HumanMessage,AIMessage
3
+ import json
4
+
5
+
6
+ class DisplayResultStreamlit:
7
+ def __init__(self,usecase,graph,user_message):
8
+ self.usecase= usecase
9
+ self.graph = graph
10
+ self.user_message = user_message
11
+
12
+ def display_result_on_ui(self):
13
+ usecase= self.usecase
14
+ graph = self.graph
15
+ user_message = self.user_message
16
+ if usecase =="Basic Chatbot":
17
+ for event in graph.stream({'messages':("user",user_message)}):
18
+ print(event.values())
19
+ for value in event.values():
20
+ print(value['messages'])
21
+ with st.chat_message("user"):
22
+ st.write(user_message)
23
+ with st.chat_message("assistant"):
24
+ st.write(value["messages"].content)
src/langgraphagenticai/ui/streamlitui/loadui.py ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import os
3
+ from datetime import date
4
+
5
+ from langchain_core.messages import AIMessage,HumanMessage
6
+ from src.langgraphagenticai.ui.uiconfigfile import Config
7
+
8
+
9
+ class LoadStreamlitUI:
10
+ def __init__(self):
11
+ self.config = Config() # config
12
+ self.user_controls = {}
13
+
14
+ def initialize_session(self):
15
+ return {
16
+ "current_step": "requirements",
17
+ "requirements": "",
18
+ "user_stories": "",
19
+ "po_feedback": "",
20
+ "generated_code": "",
21
+ "review_feedback": "",
22
+ "decision": None
23
+ }
24
+
25
+
26
+
27
+ def load_streamlit_ui(self):
28
+ st.set_page_config(page_title= "🤖 " + self.config.get_page_title(), layout="wide")
29
+ st.header("🤖 " + self.config.get_page_title())
30
+ st.session_state.timeframe = ''
31
+ st.session_state.IsFetchButtonClicked = False
32
+ st.session_state.IsSDLC = False
33
+
34
+
35
+
36
+ with st.sidebar:
37
+ # Get options from config
38
+ llm_options = self.config.get_llm_options()
39
+ usecase_options = self.config.get_usecase_options()
40
+
41
+ # LLM selection
42
+ self.user_controls["selected_llm"] = st.selectbox("Select LLM", llm_options)
43
+
44
+ if self.user_controls["selected_llm"] == 'Groq':
45
+ # Model selection
46
+ model_options = self.config.get_groq_model_options()
47
+ self.user_controls["selected_groq_model"] = st.selectbox("Select Model", model_options)
48
+ # API key input
49
+ self.user_controls["GROQ_API_KEY"] = st.session_state["GROQ_API_KEY"] = st.text_input("API Key",
50
+ type="password")
51
+ # Validate API key
52
+ if not self.user_controls["GROQ_API_KEY"]:
53
+ st.warning("⚠️ Please enter your GROQ API key to proceed. Don't have? refer : https://console.groq.com/keys ")
54
+
55
+
56
+ # Use case selection
57
+ self.user_controls["selected_usecase"] = st.selectbox("Select Usecases", usecase_options)
58
+
59
+ if "state" not in st.session_state:
60
+ st.session_state.state = self.initialize_session()
61
+
62
+
63
+
64
+ return self.user_controls
src/langgraphagenticai/ui/uiconfigfile.ini ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ [DEFAULT]
2
+ PAGE_TITLE = LangGraph: Build Stateful Agentic AI graph
3
+ LLM_OPTIONS = Groq
4
+ USECASE_OPTIONS = Basic Chatbot
5
+ GROQ_MODEL_OPTIONS = mixtral-8x7b-32768, llama3-8b-8192, llama3-70b-8192, gemma-7b-i
6
+
src/langgraphagenticai/ui/uiconfigfile.py ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from configparser import ConfigParser
2
+
3
+ class Config:
4
+ def __init__(self,config_file="./src/langgraphagenticai/ui/uiconfigfile.ini"):
5
+ self.config=ConfigParser()
6
+ self.config.read(config_file)
7
+
8
+ def get_llm_options(self):
9
+ return self.config["DEFAULT"].get("LLM_OPTIONS").split(", ")
10
+
11
+ def get_usecase_options(self):
12
+ return self.config["DEFAULT"].get("USECASE_OPTIONS").split(", ")
13
+
14
+ def get_groq_model_options(self):
15
+ return self.config["DEFAULT"].get("GROQ_MODEL_OPTIONS").split(", ")
16
+
17
+ def get_page_title(self):
18
+ return self.config["DEFAULT"].get("PAGE_TITLE")
19
+
src/langgraphagenticai/vectorstore/__init__.py ADDED
File without changes