Spaces:
Sleeping
Sleeping
Chatbot with Tool Integration
Browse files- README.md +52 -1
- image.png +0 -0
- src/__pycache__/__init__.cpython-312.pyc +0 -0
- src/langgraphagenticai/LLMS/__pycache__/__init__.cpython-312.pyc +0 -0
- src/langgraphagenticai/LLMS/__pycache__/groqllm.cpython-312.pyc +0 -0
- src/langgraphagenticai/__pycache__/__init__.cpython-312.pyc +0 -0
- src/langgraphagenticai/__pycache__/main.cpython-312.pyc +0 -0
- src/langgraphagenticai/graph/__pycache__/__init__.cpython-312.pyc +0 -0
- src/langgraphagenticai/graph/__pycache__/graph_builder.cpython-312.pyc +0 -0
- src/langgraphagenticai/graph/graph_builder.py +40 -3
- src/langgraphagenticai/nodes/__pycache__/__init__.cpython-312.pyc +0 -0
- src/langgraphagenticai/nodes/__pycache__/basic_chatbot_node.cpython-312.pyc +0 -0
- src/langgraphagenticai/nodes/__pycache__/chatbot_with_Tool_node.cpython-312.pyc +0 -0
- src/langgraphagenticai/nodes/chatbot_with_Tool_node.py +50 -0
- src/langgraphagenticai/state/__pycache__/__init__.cpython-312.pyc +0 -0
- src/langgraphagenticai/state/__pycache__/state.cpython-312.pyc +0 -0
- src/langgraphagenticai/state/state.py +2 -2
- src/langgraphagenticai/tools/__pycache__/__init__.cpython-312.pyc +0 -0
- src/langgraphagenticai/tools/__pycache__/search_tool.cpython-312.pyc +0 -0
- src/langgraphagenticai/tools/search_tool.py +18 -0
- src/langgraphagenticai/ui/__pycache__/__init__.cpython-312.pyc +0 -0
- src/langgraphagenticai/ui/__pycache__/uiconfigfile.cpython-312.pyc +0 -0
- src/langgraphagenticai/ui/streamlitui/__pycache__/display_result.cpython-312.pyc +0 -0
- src/langgraphagenticai/ui/streamlitui/__pycache__/loadui.cpython-312.pyc +0 -0
- src/langgraphagenticai/ui/streamlitui/display_result.py +19 -2
- src/langgraphagenticai/ui/streamlitui/loadui.py +8 -1
- src/langgraphagenticai/ui/uiconfigfile.ini +1 -1
README.md
CHANGED
@@ -9,7 +9,7 @@ app_file: app.py
|
|
9 |
pinned: false
|
10 |
---
|
11 |
|
12 |
-
# Project Setup
|
13 |
|
14 |
## Steps
|
15 |
|
@@ -111,8 +111,59 @@ from src.langgraphagenticai.ui.streamlitui.display_result import DisplayResultSt
|
|
111 |
```sh
|
112 |
streamlit run app.py
|
113 |
```
|
|
|
114 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
115 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
116 |
|
117 |
|
118 |
|
|
|
9 |
pinned: false
|
10 |
---
|
11 |
|
12 |
+
# Project Setup
|
13 |
|
14 |
## Steps
|
15 |
|
|
|
111 |
```sh
|
112 |
streamlit run app.py
|
113 |
```
|
114 |
+
# 🚀 CI/CD Setup: GitHub to Hugging Face
|
115 |
|
116 |
+
## 🔹 1. Set Up CI/CD Between GitHub and Hugging Face
|
117 |
+
- **Create** a new Space on Hugging Face.
|
118 |
+
- A **unique URL** will be generated for your Space.
|
119 |
+
- Open **`main.yml`** and update the **last line**:
|
120 |
+
- Replace it with your Hugging Face Space **unique URL** (it will include `$HF_TOKEN`).
|
121 |
+
- **Update the README**:
|
122 |
+
- The **first section** of the README should contain the **Streamlit configuration**.
|
123 |
|
124 |
+
## 🔹 2. Deploy on Streamlit
|
125 |
+
- **Go to the Streamlit website** and add your GitHub repo and branch details.
|
126 |
+
- Deployment example:
|
127 |
+
👉 [LangGraph Streamlit App](https://langgraphe2e-4cg4bxcc3ysrhwrmregluy.streamlit.app/)
|
128 |
+
|
129 |
+
---
|
130 |
+
|
131 |
+
Your CI/CD pipeline should now be set up for seamless deployment! 🚀✨
|
132 |
+
|
133 |
+
---
|
134 |
+
# Chatbot with Tool Integration
|
135 |
+
|
136 |
+
## 📌 Steps to Implement
|
137 |
+
|
138 |
+
### 🔹 1. Add a Search Tool
|
139 |
+
- Navigate to the `TOOLS` folder.
|
140 |
+
- **Create** a new file: `search_tool.py`.
|
141 |
+
- **Add Tavily** as a tool inside this file.
|
142 |
+
|
143 |
+
### 🔹 2. Create a Chatbot with Tool Node
|
144 |
+
- Navigate to the `NODES` folder.
|
145 |
+
- **Create** a new file: `Chatbot_with_tool_node.py`.
|
146 |
+
- Implement chatbot logic with tool integration inside this file.
|
147 |
+
|
148 |
+
### 🔹 3. Modify Graph Builder
|
149 |
+
- Open `graph_builder.py`.
|
150 |
+
- **Edit** the file to include tool functionality.
|
151 |
+
|
152 |
+
### 🔹 4. Update the UI
|
153 |
+
- Modify the following UI components:
|
154 |
+
- `Load_streamlit_ui.py`
|
155 |
+
- `uiconfig.ini`
|
156 |
+
- `display_results.py`
|
157 |
+
|
158 |
+
### 🔹 5. Run the Application
|
159 |
+
To start the chatbot, run the following command:
|
160 |
+
streamlit run app.py
|
161 |
+
|
162 |
+
🚀 Now your chatbot with tool integration should be live!
|
163 |
+
|
164 |
+
## Ouput Screenshots
|
165 |
+
Streamlit UI interface showcasing chatbot utilizing tools
|
166 |
+

|
167 |
|
168 |
|
169 |
|
image.png
ADDED
![]() |
src/__pycache__/__init__.cpython-312.pyc
CHANGED
Binary files a/src/__pycache__/__init__.cpython-312.pyc and b/src/__pycache__/__init__.cpython-312.pyc differ
|
|
src/langgraphagenticai/LLMS/__pycache__/__init__.cpython-312.pyc
CHANGED
Binary files a/src/langgraphagenticai/LLMS/__pycache__/__init__.cpython-312.pyc and b/src/langgraphagenticai/LLMS/__pycache__/__init__.cpython-312.pyc differ
|
|
src/langgraphagenticai/LLMS/__pycache__/groqllm.cpython-312.pyc
CHANGED
Binary files a/src/langgraphagenticai/LLMS/__pycache__/groqllm.cpython-312.pyc and b/src/langgraphagenticai/LLMS/__pycache__/groqllm.cpython-312.pyc differ
|
|
src/langgraphagenticai/__pycache__/__init__.cpython-312.pyc
CHANGED
Binary files a/src/langgraphagenticai/__pycache__/__init__.cpython-312.pyc and b/src/langgraphagenticai/__pycache__/__init__.cpython-312.pyc differ
|
|
src/langgraphagenticai/__pycache__/main.cpython-312.pyc
CHANGED
Binary files a/src/langgraphagenticai/__pycache__/main.cpython-312.pyc and b/src/langgraphagenticai/__pycache__/main.cpython-312.pyc differ
|
|
src/langgraphagenticai/graph/__pycache__/__init__.cpython-312.pyc
CHANGED
Binary files a/src/langgraphagenticai/graph/__pycache__/__init__.cpython-312.pyc and b/src/langgraphagenticai/graph/__pycache__/__init__.cpython-312.pyc differ
|
|
src/langgraphagenticai/graph/__pycache__/graph_builder.cpython-312.pyc
CHANGED
Binary files a/src/langgraphagenticai/graph/__pycache__/graph_builder.cpython-312.pyc and b/src/langgraphagenticai/graph/__pycache__/graph_builder.cpython-312.pyc differ
|
|
src/langgraphagenticai/graph/graph_builder.py
CHANGED
@@ -3,8 +3,8 @@ from langgraph.prebuilt import tools_condition,ToolNode
|
|
3 |
from langchain_core.prompts import ChatPromptTemplate
|
4 |
from src.langgraphagenticai.state.state import State
|
5 |
from src.langgraphagenticai.nodes.basic_chatbot_node import BasicChatbotNode
|
6 |
-
|
7 |
-
|
8 |
|
9 |
|
10 |
class GraphBuilder:
|
@@ -25,15 +25,52 @@ class GraphBuilder:
|
|
25 |
self.graph_builder.add_edge(START,"chatbot")
|
26 |
self.graph_builder.add_edge("chatbot",END)
|
27 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
28 |
def setup_graph(self, usecase: str):
|
29 |
"""
|
30 |
Sets up the graph for the selected use case.
|
31 |
"""
|
32 |
if usecase == "Basic Chatbot":
|
33 |
self.basic_chatbot_build_graph()
|
|
|
|
|
|
|
34 |
return self.graph_builder.compile()
|
|
|
35 |
|
36 |
|
37 |
|
38 |
-
|
39 |
|
|
|
|
3 |
from langchain_core.prompts import ChatPromptTemplate
|
4 |
from src.langgraphagenticai.state.state import State
|
5 |
from src.langgraphagenticai.nodes.basic_chatbot_node import BasicChatbotNode
|
6 |
+
from src.langgraphagenticai.nodes.chatbot_with_Tool_node import ChatbotWithToolNode
|
7 |
+
from src.langgraphagenticai.tools.search_tool import get_tools,create_tool_node
|
8 |
|
9 |
|
10 |
class GraphBuilder:
|
|
|
25 |
self.graph_builder.add_edge(START,"chatbot")
|
26 |
self.graph_builder.add_edge("chatbot",END)
|
27 |
|
28 |
+
|
29 |
+
def chatbot_with_tools_build_graph(self):
|
30 |
+
"""
|
31 |
+
Builds an advanced chatbot graph with tool integration.
|
32 |
+
This method creates a chatbot graph that includes both a chatbot node
|
33 |
+
and a tool node. It defines tools, initializes the chatbot with tool
|
34 |
+
capabilities, and sets up conditional and direct edges between nodes.
|
35 |
+
The chatbot node is set as the entry point.
|
36 |
+
"""
|
37 |
+
## Define the tool and tool node
|
38 |
+
|
39 |
+
tools=get_tools()
|
40 |
+
tool_node=create_tool_node(tools)
|
41 |
+
|
42 |
+
##Define LLM
|
43 |
+
llm = self.llm
|
44 |
+
|
45 |
+
# Define chatbot node
|
46 |
+
obj_chatbot_with_node = ChatbotWithToolNode(llm)
|
47 |
+
chatbot_node = obj_chatbot_with_node.create_chatbot(tools)
|
48 |
+
|
49 |
+
# Add nodes
|
50 |
+
self.graph_builder.add_node("chatbot", chatbot_node)
|
51 |
+
self.graph_builder.add_node("tools", tool_node)
|
52 |
+
|
53 |
+
# Define conditional and direct edges
|
54 |
+
self.graph_builder.add_edge(START,"chatbot")
|
55 |
+
self.graph_builder.add_conditional_edges("chatbot", tools_condition)
|
56 |
+
self.graph_builder.add_edge("tools","chatbot")
|
57 |
+
|
58 |
+
|
59 |
+
|
60 |
+
|
61 |
def setup_graph(self, usecase: str):
|
62 |
"""
|
63 |
Sets up the graph for the selected use case.
|
64 |
"""
|
65 |
if usecase == "Basic Chatbot":
|
66 |
self.basic_chatbot_build_graph()
|
67 |
+
|
68 |
+
if usecase == "Chatbot with Tool":
|
69 |
+
self.chatbot_with_tools_build_graph()
|
70 |
return self.graph_builder.compile()
|
71 |
+
|
72 |
|
73 |
|
74 |
|
|
|
75 |
|
76 |
+
|
src/langgraphagenticai/nodes/__pycache__/__init__.cpython-312.pyc
CHANGED
Binary files a/src/langgraphagenticai/nodes/__pycache__/__init__.cpython-312.pyc and b/src/langgraphagenticai/nodes/__pycache__/__init__.cpython-312.pyc differ
|
|
src/langgraphagenticai/nodes/__pycache__/basic_chatbot_node.cpython-312.pyc
CHANGED
Binary files a/src/langgraphagenticai/nodes/__pycache__/basic_chatbot_node.cpython-312.pyc and b/src/langgraphagenticai/nodes/__pycache__/basic_chatbot_node.cpython-312.pyc differ
|
|
src/langgraphagenticai/nodes/__pycache__/chatbot_with_Tool_node.cpython-312.pyc
ADDED
Binary file (1.98 kB). View file
|
|
src/langgraphagenticai/nodes/chatbot_with_Tool_node.py
ADDED
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from src.langgraphagenticai.state.state import State
|
2 |
+
|
3 |
+
#State is like a memory that stores chat messages
|
4 |
+
#creates a chatbot that can Take user messages as input, Use an AI model (llm) to generate responses and Integrate tools to improve its responses
|
5 |
+
|
6 |
+
#This class manages a chatbot that can use tools
|
7 |
+
class ChatbotWithToolNode:
|
8 |
+
"""
|
9 |
+
Chatbot logic enhanced with tool integration.
|
10 |
+
"""
|
11 |
+
#constructor method that takes self (the instance of the class) and model (a parameter)
|
12 |
+
def __init__(self,model):
|
13 |
+
self.llm = model # Attribute (self.llm) stores the parameter (model)
|
14 |
+
|
15 |
+
#This method processes user input and generates responses. Methods always have self as the first parameter
|
16 |
+
def process(self, state: State) -> dict: #-> dict is a return type hint and state:State is also a type hint, expects state to be of type State
|
17 |
+
"""
|
18 |
+
Processes the input state and generates a response with tool integration.
|
19 |
+
"""
|
20 |
+
user_input = state["messages"][-1] if state["messages"] else "" #gets the latest user message from state else nothing
|
21 |
+
llm_response = self.llm.invoke([{"role": "user", "content": user_input}]) #AI model (self.llm) generates a response based on user input
|
22 |
+
|
23 |
+
# Simulate tool-specific logic
|
24 |
+
tools_response = f"Tool integration for: '{user_input}'"
|
25 |
+
|
26 |
+
return {"messages": [llm_response, tools_response]}
|
27 |
+
|
28 |
+
#This function adds tools to the chatbot.
|
29 |
+
def create_chatbot(self, tools):
|
30 |
+
"""
|
31 |
+
Returns a chatbot node function.
|
32 |
+
"""
|
33 |
+
llm_with_tools = self.llm.bind_tools(tools) #connects the chatbot to external tools
|
34 |
+
|
35 |
+
#function that processes messages
|
36 |
+
def chatbot_node(state: State):
|
37 |
+
"""
|
38 |
+
Chatbot logic for processing the input state and returning a response.
|
39 |
+
"""
|
40 |
+
return {"messages": [llm_with_tools.invoke(state["messages"])]}
|
41 |
+
|
42 |
+
return chatbot_node
|
43 |
+
|
44 |
+
|
45 |
+
|
46 |
+
|
47 |
+
|
48 |
+
|
49 |
+
|
50 |
+
|
src/langgraphagenticai/state/__pycache__/__init__.cpython-312.pyc
CHANGED
Binary files a/src/langgraphagenticai/state/__pycache__/__init__.cpython-312.pyc and b/src/langgraphagenticai/state/__pycache__/__init__.cpython-312.pyc differ
|
|
src/langgraphagenticai/state/__pycache__/state.cpython-312.pyc
CHANGED
Binary files a/src/langgraphagenticai/state/__pycache__/state.cpython-312.pyc and b/src/langgraphagenticai/state/__pycache__/state.cpython-312.pyc differ
|
|
src/langgraphagenticai/state/state.py
CHANGED
@@ -4,8 +4,8 @@ from langgraph.graph.message import add_messages
|
|
4 |
from typing import TypedDict, Annotated, List
|
5 |
from langchain_core.messages import HumanMessage, AIMessage
|
6 |
|
7 |
-
class State(TypedDict):
|
8 |
"""
|
9 |
Represents the structure of the state used in the graph.
|
10 |
"""
|
11 |
-
messages: Annotated[list, add_messages]
|
|
|
4 |
from typing import TypedDict, Annotated, List
|
5 |
from langchain_core.messages import HumanMessage, AIMessage
|
6 |
|
7 |
+
class State(TypedDict): #State is a class that inherits from TypedDict, State will act like a dictionary but with fixed keys and expected data types
|
8 |
"""
|
9 |
Represents the structure of the state used in the graph.
|
10 |
"""
|
11 |
+
messages: Annotated[list, add_messages] # messages is a key inside State dictionary and Annotated → A way to add extra metadata to a type hint.
|
src/langgraphagenticai/tools/__pycache__/__init__.cpython-312.pyc
ADDED
Binary file (179 Bytes). View file
|
|
src/langgraphagenticai/tools/__pycache__/search_tool.cpython-312.pyc
ADDED
Binary file (760 Bytes). View file
|
|
src/langgraphagenticai/tools/search_tool.py
ADDED
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from langchain_community.tools.tavily_search import TavilySearchResults
|
2 |
+
from langgraph.prebuilt import ToolNode
|
3 |
+
|
4 |
+
#insert any number of tools created into this fuction
|
5 |
+
def get_tools():
|
6 |
+
"""
|
7 |
+
Return the list of tools to be used in the chatbot
|
8 |
+
"""
|
9 |
+
tools=[TavilySearchResults(max_results=2)]
|
10 |
+
return tools
|
11 |
+
|
12 |
+
#above tools in the form of node
|
13 |
+
def create_tool_node(tools):
|
14 |
+
"""
|
15 |
+
creates and returns a tool node for the graph
|
16 |
+
"""
|
17 |
+
return ToolNode(tools=tools) #ToolNode is a ready-made building block that helps an AI agent use external tools
|
18 |
+
#assigning the input argument tools to the tools parameter inside ToolNode
|
src/langgraphagenticai/ui/__pycache__/__init__.cpython-312.pyc
CHANGED
Binary files a/src/langgraphagenticai/ui/__pycache__/__init__.cpython-312.pyc and b/src/langgraphagenticai/ui/__pycache__/__init__.cpython-312.pyc differ
|
|
src/langgraphagenticai/ui/__pycache__/uiconfigfile.cpython-312.pyc
CHANGED
Binary files a/src/langgraphagenticai/ui/__pycache__/uiconfigfile.cpython-312.pyc and b/src/langgraphagenticai/ui/__pycache__/uiconfigfile.cpython-312.pyc differ
|
|
src/langgraphagenticai/ui/streamlitui/__pycache__/display_result.cpython-312.pyc
CHANGED
Binary files a/src/langgraphagenticai/ui/streamlitui/__pycache__/display_result.cpython-312.pyc and b/src/langgraphagenticai/ui/streamlitui/__pycache__/display_result.cpython-312.pyc differ
|
|
src/langgraphagenticai/ui/streamlitui/__pycache__/loadui.cpython-312.pyc
CHANGED
Binary files a/src/langgraphagenticai/ui/streamlitui/__pycache__/loadui.cpython-312.pyc and b/src/langgraphagenticai/ui/streamlitui/__pycache__/loadui.cpython-312.pyc differ
|
|
src/langgraphagenticai/ui/streamlitui/display_result.py
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
import streamlit as st
|
2 |
-
from langchain_core.messages import HumanMessage,AIMessage
|
3 |
import json
|
4 |
|
5 |
|
@@ -21,4 +21,21 @@ class DisplayResultStreamlit:
|
|
21 |
with st.chat_message("user"):
|
22 |
st.write(user_message)
|
23 |
with st.chat_message("assistant"):
|
24 |
-
st.write(value["messages"].content)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import streamlit as st
|
2 |
+
from langchain_core.messages import HumanMessage,AIMessage,ToolMessage
|
3 |
import json
|
4 |
|
5 |
|
|
|
21 |
with st.chat_message("user"):
|
22 |
st.write(user_message)
|
23 |
with st.chat_message("assistant"):
|
24 |
+
st.write(value["messages"].content)
|
25 |
+
|
26 |
+
elif usecase=="Chatbot with Tool":
|
27 |
+
# Prepare state and invoke the graph
|
28 |
+
initial_state = {"messages": [user_message]}
|
29 |
+
res = graph.invoke(initial_state)
|
30 |
+
for message in res['messages']:
|
31 |
+
if type(message) == HumanMessage:
|
32 |
+
with st.chat_message("user"):
|
33 |
+
st.write(message.content)
|
34 |
+
elif type(message)==ToolMessage:
|
35 |
+
with st.chat_message("ai"):
|
36 |
+
st.write("Tool Call Start")
|
37 |
+
st.write(message.content)
|
38 |
+
st.write("Tool Call End")
|
39 |
+
elif type(message)==AIMessage and message.content:
|
40 |
+
with st.chat_message("assistant"):
|
41 |
+
st.write(message.content)
|
src/langgraphagenticai/ui/streamlitui/loadui.py
CHANGED
@@ -56,9 +56,16 @@ class LoadStreamlitUI:
|
|
56 |
# Use case selection
|
57 |
self.user_controls["selected_usecase"] = st.selectbox("Select Usecases", usecase_options)
|
58 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
59 |
if "state" not in st.session_state:
|
60 |
st.session_state.state = self.initialize_session()
|
61 |
|
62 |
|
63 |
-
|
64 |
return self.user_controls
|
|
|
56 |
# Use case selection
|
57 |
self.user_controls["selected_usecase"] = st.selectbox("Select Usecases", usecase_options)
|
58 |
|
59 |
+
if self.user_controls["selected_usecase"] =="Chatbot with Tool":
|
60 |
+
# API key input
|
61 |
+
os.environ["TAVILY_API_KEY"] = self.user_controls["TAVILY_API_KEY"] = st.session_state["TAVILY_API_KEY"] = st.text_input("TAVILY API KEY",
|
62 |
+
type="password")
|
63 |
+
# Validate API key
|
64 |
+
if not self.user_controls["TAVILY_API_KEY"]:
|
65 |
+
st.warning("⚠️ Please enter your TAVILY_API_KEY key to proceed. Don't have? refer : https://app.tavily.com/home")
|
66 |
+
|
67 |
if "state" not in st.session_state:
|
68 |
st.session_state.state = self.initialize_session()
|
69 |
|
70 |
|
|
|
71 |
return self.user_controls
|
src/langgraphagenticai/ui/uiconfigfile.ini
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
[DEFAULT]
|
2 |
PAGE_TITLE = LangGraph: Build Stateful Agentic AI graph
|
3 |
LLM_OPTIONS = Groq
|
4 |
-
USECASE_OPTIONS = Basic Chatbot
|
5 |
GROQ_MODEL_OPTIONS = mixtral-8x7b-32768, llama3-8b-8192, llama3-70b-8192, gemma-7b-i
|
6 |
|
|
|
1 |
[DEFAULT]
|
2 |
PAGE_TITLE = LangGraph: Build Stateful Agentic AI graph
|
3 |
LLM_OPTIONS = Groq
|
4 |
+
USECASE_OPTIONS = Basic Chatbot, Chatbot with Tool
|
5 |
GROQ_MODEL_OPTIONS = mixtral-8x7b-32768, llama3-8b-8192, llama3-70b-8192, gemma-7b-i
|
6 |
|