sandeep1404 commited on
Commit
4d331a5
·
1 Parent(s): 27b6ab8

added tavily search tool node

Browse files
src/langgraphagenticai/__pycache__/main.cpython-310.pyc CHANGED
Binary files a/src/langgraphagenticai/__pycache__/main.cpython-310.pyc and b/src/langgraphagenticai/__pycache__/main.cpython-310.pyc differ
 
src/langgraphagenticai/graph/__pycache__/graph_builder.cpython-310.pyc CHANGED
Binary files a/src/langgraphagenticai/graph/__pycache__/graph_builder.cpython-310.pyc and b/src/langgraphagenticai/graph/__pycache__/graph_builder.cpython-310.pyc differ
 
src/langgraphagenticai/graph/graph_builder.py CHANGED
@@ -3,7 +3,10 @@ from langgraph.prebuilt import tools_condition,ToolNode
3
  from langchain_core.prompts import ChatPromptTemplate
4
  from src.langgraphagenticai.state.state import State
5
  from src.langgraphagenticai.nodes.basic_chatbot_node import BasicChatbotNode
 
 
6
 
 
7
 
8
 
9
 
@@ -25,12 +28,46 @@ class GraphBuilder:
25
  self.graph_builder.add_edge(START,"chatbot")
26
  self.graph_builder.add_edge("chatbot",END)
27
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  def setup_graph(self, usecase: str):
29
  """
30
  Sets up the graph for the selected use case.
31
  """
32
  if usecase == "Basic Chatbot":
33
  self.basic_chatbot_build_graph()
 
 
 
 
34
  return self.graph_builder.compile()
35
 
36
 
 
3
  from langchain_core.prompts import ChatPromptTemplate
4
  from src.langgraphagenticai.state.state import State
5
  from src.langgraphagenticai.nodes.basic_chatbot_node import BasicChatbotNode
6
+ from src.langgraphagenticai.nodes.chatbot_with_toolnode import ChatbotWithToolNode
7
+ from src.langgraphagenticai.tools.search_tool import get_tools,create_tool_node
8
 
9
+ ## adding the created tool node and whenever user selects the chatbot with tool it will be called
10
 
11
 
12
 
 
28
  self.graph_builder.add_edge(START,"chatbot")
29
  self.graph_builder.add_edge("chatbot",END)
30
 
31
+ def chatbot_with_tools_build_graph(self):
32
+ """
33
+ Builds an advanced chatbot graph with tool integration.
34
+ This method creates a chatbot graph that includes both a chatbot node
35
+ and a tool node. It defines tools, initializes the chatbot with tool
36
+ capabilities, and sets up conditional and direct edges between nodes.
37
+ The chatbot node is set as the entry point.
38
+ """
39
+ ## Define the tool and tool node
40
+
41
+ tools=get_tools()
42
+ tool_node=create_tool_node(tools)
43
+
44
+ ##Define LLM
45
+ llm = self.llm
46
+
47
+ # Define chatbot node
48
+ obj_chatbot_with_node = ChatbotWithToolNode(llm)
49
+ chatbot_node = obj_chatbot_with_node.create_chatbot(tools)
50
+
51
+ # Add nodes
52
+ self.graph_builder.add_node("chatbot", chatbot_node)
53
+ self.graph_builder.add_node("tools", tool_node)
54
+
55
+ # Define conditional and direct edges
56
+ self.graph_builder.add_edge(START,"chatbot")
57
+ self.graph_builder.add_conditional_edges("chatbot", tools_condition)
58
+ self.graph_builder.add_edge("tools","chatbot")
59
+
60
+
61
  def setup_graph(self, usecase: str):
62
  """
63
  Sets up the graph for the selected use case.
64
  """
65
  if usecase == "Basic Chatbot":
66
  self.basic_chatbot_build_graph()
67
+
68
+ if usecase == "Chatbot with Tool":
69
+ self.chatbot_with_tools_build_graph()
70
+
71
  return self.graph_builder.compile()
72
 
73
 
src/langgraphagenticai/nodes/__pycache__/chatbot_with_toolnode.cpython-310.pyc ADDED
Binary file (1.63 kB). View file
 
src/langgraphagenticai/nodes/chatbot_with_toolnode.py ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from src.langgraphagenticai.state.state import State
2
+
3
+ ## added a tool node, for each tool u create just add the tool functionality in tools folder file
4
+
5
+ class ChatbotWithToolNode:
6
+ """
7
+ Chatbot logic enhanced with tool integration.
8
+ """
9
+ def __init__(self,model):
10
+ self.llm = model
11
+
12
+ def process(self, state: State) -> dict:
13
+ """
14
+ Processes the input state and generates a response with tool integration.
15
+ """
16
+ user_input = state["messages"][-1] if state["messages"] else ""
17
+ llm_response = self.llm.invoke([{"role": "user", "content": user_input}])
18
+
19
+ # Simulate tool-specific logic
20
+ tools_response = f"Tool integration for: '{user_input}'"
21
+
22
+ return {"messages": [llm_response, tools_response]}
23
+
24
+ # def chatbot_node(state: State,llm_with_tools):
25
+ # """
26
+ # Chatbot logic for processing the input state and returning a response.
27
+ # """
28
+ # return {"messages": [llm_with_tools.invoke(state["messages"])]}
29
+
30
+ # def create_chatbot(self,tools):
31
+ # """
32
+ # Returns a chatbot node function.
33
+ # """
34
+ # llm_with_tools = self.llm.bind_tools(tools)
35
+
36
+ # return self.chatbot_node(State,llm_with_tools)
37
+
38
+
39
+ def create_chatbot(self, tools):
40
+ """
41
+ Returns a chatbot node function.
42
+ """
43
+ llm_with_tools = self.llm.bind_tools(tools)
44
+
45
+ def chatbot_node(state: State):
46
+ """
47
+ Chatbot logic for processing the input state and returning a response.
48
+ """
49
+ return {"messages": [llm_with_tools.invoke(state["messages"])]}
50
+
51
+ return chatbot_node
52
+
src/langgraphagenticai/tools/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (174 Bytes). View file
 
src/langgraphagenticai/tools/__pycache__/search_tool.cpython-310.pyc ADDED
Binary file (692 Bytes). View file
 
src/langgraphagenticai/tools/search_tool.py ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain_community.tools.tavily_search import TavilySearchResults
2
+ from langgraph.prebuilt import ToolNode
3
+
4
+
5
+ ## create a new search tool if u want to add more tools u can add them in tools [] list
6
+
7
+ def get_tools():
8
+ """
9
+ Return the list of tools to be used in the chatbot
10
+ """
11
+ tools=[TavilySearchResults(max_results=2)]
12
+ return tools
13
+
14
+ def create_tool_node(tools):
15
+ """
16
+ creates and returns a tool node for the graph
17
+ """
18
+ return ToolNode(tools=tools)
src/langgraphagenticai/ui/streamlitui/__pycache__/display_result.cpython-310.pyc CHANGED
Binary files a/src/langgraphagenticai/ui/streamlitui/__pycache__/display_result.cpython-310.pyc and b/src/langgraphagenticai/ui/streamlitui/__pycache__/display_result.cpython-310.pyc differ
 
src/langgraphagenticai/ui/streamlitui/__pycache__/loadui.cpython-310.pyc CHANGED
Binary files a/src/langgraphagenticai/ui/streamlitui/__pycache__/loadui.cpython-310.pyc and b/src/langgraphagenticai/ui/streamlitui/__pycache__/loadui.cpython-310.pyc differ
 
src/langgraphagenticai/ui/streamlitui/display_result.py CHANGED
@@ -1,8 +1,10 @@
1
  import streamlit as st
2
- from langchain_core.messages import HumanMessage,AIMessage
3
  import json
4
 
5
 
 
 
6
  class DisplayResultStreamlit:
7
  def __init__(self,usecase,graph,user_message):
8
  self.usecase= usecase
@@ -21,4 +23,24 @@ class DisplayResultStreamlit:
21
  with st.chat_message("user"):
22
  st.write(user_message)
23
  with st.chat_message("assistant"):
24
- st.write(value["messages"].content)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import streamlit as st
2
+ from langchain_core.messages import HumanMessage,AIMessage,ToolMessage
3
  import json
4
 
5
 
6
+ ## display change when chatbot with tools is added
7
+
8
  class DisplayResultStreamlit:
9
  def __init__(self,usecase,graph,user_message):
10
  self.usecase= usecase
 
23
  with st.chat_message("user"):
24
  st.write(user_message)
25
  with st.chat_message("assistant"):
26
+ st.write(value["messages"].content)
27
+
28
+ elif usecase=="Chatbot with Tool":
29
+ # Prepare state and invoke the graph
30
+ initial_state = {"messages": [user_message]}
31
+ res = graph.invoke(initial_state)
32
+ for message in res['messages']:
33
+ if type(message) == HumanMessage:
34
+ with st.chat_message("user"):
35
+ st.write(message.content)
36
+ elif type(message)==ToolMessage:
37
+ with st.chat_message("ai"):
38
+ st.write("Tool Call Start")
39
+ st.write(message.content)
40
+ st.write("Tool Call End")
41
+ elif type(message)==AIMessage and message.content:
42
+ with st.chat_message("assistant"):
43
+ st.write(message.content)
44
+
45
+
46
+
src/langgraphagenticai/ui/streamlitui/loadui.py CHANGED
@@ -5,6 +5,7 @@ from datetime import date
5
  from langchain_core.messages import AIMessage,HumanMessage
6
  from src.langgraphagenticai.ui.uiconfigfile import Config
7
 
 
8
 
9
  class LoadStreamlitUI:
10
  def __init__(self):
@@ -56,6 +57,17 @@ class LoadStreamlitUI:
56
  # Use case selection
57
  self.user_controls["selected_usecase"] = st.selectbox("Select Usecases", usecase_options)
58
 
 
 
 
 
 
 
 
 
 
 
 
59
  if "state" not in st.session_state:
60
  st.session_state.state = self.initialize_session()
61
 
 
5
  from langchain_core.messages import AIMessage,HumanMessage
6
  from src.langgraphagenticai.ui.uiconfigfile import Config
7
 
8
+ ## added when chatbot with tools is added
9
 
10
  class LoadStreamlitUI:
11
  def __init__(self):
 
57
  # Use case selection
58
  self.user_controls["selected_usecase"] = st.selectbox("Select Usecases", usecase_options)
59
 
60
+ ## added when chatbot with tools is added
61
+
62
+ if self.user_controls["selected_usecase"] =="Chatbot with Tool":
63
+ # API key input
64
+ os.environ["TAVILY_API_KEY"] = self.user_controls["TAVILY_API_KEY"] = st.session_state["TAVILY_API_KEY"] = st.text_input("TAVILY API KEY",
65
+ type="password")
66
+ # Validate API key
67
+ if not self.user_controls["TAVILY_API_KEY"]:
68
+ st.warning("⚠️ Please enter your TAVILY_API_KEY key to proceed. Don't have? refer : https://app.tavily.com/home")
69
+
70
+
71
  if "state" not in st.session_state:
72
  st.session_state.state = self.initialize_session()
73
 
src/langgraphagenticai/ui/uiconfigfile.ini CHANGED
@@ -1,6 +1,6 @@
1
  [DEFAULT]
2
  PAGE_TITLE = LangGraph: Build Stateful Agentic AI graph
3
  LLM_OPTIONS = Groq
4
- USECASE_OPTIONS = Basic Chatbot
5
  GROQ_MODEL_OPTIONS = mixtral-8x7b-32768, llama3-8b-8192, llama3-70b-8192, gemma-7b-i
6
 
 
1
  [DEFAULT]
2
  PAGE_TITLE = LangGraph: Build Stateful Agentic AI graph
3
  LLM_OPTIONS = Groq
4
+ USECASE_OPTIONS = Basic Chatbot, Chatbot with Tool
5
  GROQ_MODEL_OPTIONS = mixtral-8x7b-32768, llama3-8b-8192, llama3-70b-8192, gemma-7b-i
6