Spaces:
Sleeping
Sleeping
sandeep1404
commited on
Commit
·
6f7f403
0
Parent(s):
chatbot folder added
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .github//workflows/main.yml +25 -0
- .gitignore +1 -0
- README.md +16 -0
- app.py +5 -0
- requirements.txt +8 -0
- src/__init__.py +0 -0
- src/__pycache__/__init__.cpython-310.pyc +0 -0
- src/__pycache__/__init__.cpython-312.pyc +0 -0
- src/langgraphagenticai/LLMS/__init__.py +0 -0
- src/langgraphagenticai/LLMS/__pycache__/__init__.cpython-310.pyc +0 -0
- src/langgraphagenticai/LLMS/__pycache__/__init__.cpython-312.pyc +0 -0
- src/langgraphagenticai/LLMS/__pycache__/groqllm.cpython-310.pyc +0 -0
- src/langgraphagenticai/LLMS/__pycache__/groqllm.cpython-312.pyc +0 -0
- src/langgraphagenticai/LLMS/groqllm.py +20 -0
- src/langgraphagenticai/__init__.py +0 -0
- src/langgraphagenticai/__pycache__/__init__.cpython-310.pyc +0 -0
- src/langgraphagenticai/__pycache__/__init__.cpython-312.pyc +0 -0
- src/langgraphagenticai/__pycache__/main.cpython-310.pyc +0 -0
- src/langgraphagenticai/__pycache__/main.cpython-312.pyc +0 -0
- src/langgraphagenticai/graph/__init__.py +0 -0
- src/langgraphagenticai/graph/__pycache__/__init__.cpython-310.pyc +0 -0
- src/langgraphagenticai/graph/__pycache__/__init__.cpython-312.pyc +0 -0
- src/langgraphagenticai/graph/__pycache__/graph_builder.cpython-310.pyc +0 -0
- src/langgraphagenticai/graph/__pycache__/graph_builder.cpython-312.pyc +0 -0
- src/langgraphagenticai/graph/graph_builder.py +39 -0
- src/langgraphagenticai/main.py +66 -0
- src/langgraphagenticai/nodes/__init__.py +0 -0
- src/langgraphagenticai/nodes/__pycache__/__init__.cpython-310.pyc +0 -0
- src/langgraphagenticai/nodes/__pycache__/__init__.cpython-312.pyc +0 -0
- src/langgraphagenticai/nodes/__pycache__/basic_chatbot_node.cpython-310.pyc +0 -0
- src/langgraphagenticai/nodes/__pycache__/basic_chatbot_node.cpython-312.pyc +0 -0
- src/langgraphagenticai/nodes/basic_chatbot_node.py +14 -0
- src/langgraphagenticai/state/__init__.py +0 -0
- src/langgraphagenticai/state/__pycache__/__init__.cpython-310.pyc +0 -0
- src/langgraphagenticai/state/__pycache__/__init__.cpython-312.pyc +0 -0
- src/langgraphagenticai/state/__pycache__/state.cpython-310.pyc +0 -0
- src/langgraphagenticai/state/__pycache__/state.cpython-312.pyc +0 -0
- src/langgraphagenticai/state/state.py +11 -0
- src/langgraphagenticai/tools/__init__.py +0 -0
- src/langgraphagenticai/ui/__init__.py +0 -0
- src/langgraphagenticai/ui/__pycache__/__init__.cpython-310.pyc +0 -0
- src/langgraphagenticai/ui/__pycache__/__init__.cpython-312.pyc +0 -0
- src/langgraphagenticai/ui/__pycache__/uiconfigfile.cpython-310.pyc +0 -0
- src/langgraphagenticai/ui/__pycache__/uiconfigfile.cpython-312.pyc +0 -0
- src/langgraphagenticai/ui/streamlitui/__pycache__/display_result.cpython-310.pyc +0 -0
- src/langgraphagenticai/ui/streamlitui/__pycache__/display_result.cpython-312.pyc +0 -0
- src/langgraphagenticai/ui/streamlitui/__pycache__/loadui.cpython-310.pyc +0 -0
- src/langgraphagenticai/ui/streamlitui/__pycache__/loadui.cpython-312.pyc +0 -0
- src/langgraphagenticai/ui/streamlitui/display_result.py +24 -0
- src/langgraphagenticai/ui/streamlitui/loadui.py +64 -0
.github//workflows/main.yml
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: Sync to Hugging Face Space
|
| 2 |
+
on:
|
| 3 |
+
push:
|
| 4 |
+
branches: [main]
|
| 5 |
+
|
| 6 |
+
# to run this workflow manually from the Actions tab
|
| 7 |
+
workflow_dispatch:
|
| 8 |
+
|
| 9 |
+
jobs:
|
| 10 |
+
sync-to-hub:
|
| 11 |
+
runs-on: ubuntu-latest
|
| 12 |
+
steps:
|
| 13 |
+
- uses: actions/checkout@v3
|
| 14 |
+
with:
|
| 15 |
+
fetch-depth: 0
|
| 16 |
+
lfs: false
|
| 17 |
+
|
| 18 |
+
- name: Ignore large files
|
| 19 |
+
run : git filter-branch --index-filter 'git rm -rf --cached --ignore-unmatch "Rag_Documents/layout-parser-paper.pdf"' HEAD
|
| 20 |
+
|
| 21 |
+
- name: Push to hub
|
| 22 |
+
env:
|
| 23 |
+
HF_TOKEN: ${{secrets.HF_TOKEN}}
|
| 24 |
+
run: git push --force https://Techiiot:$HF_TOKEN@huggingface.co/spaces/Techiiot/sandeep_basic_chatbot main
|
| 25 |
+
|
.gitignore
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
venv/
|
README.md
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
title: LanggraphAgenticAI
|
| 3 |
+
emoji: 🐨
|
| 4 |
+
colorFrom: blue
|
| 5 |
+
colorTo: red
|
| 6 |
+
sdk: streamlit
|
| 7 |
+
sdk_version: 1.42.0
|
| 8 |
+
app_file: app.py
|
| 9 |
+
pinned: false
|
| 10 |
+
license: apache-2.0
|
| 11 |
+
short_description: Refined langgraphAgenticAI
|
| 12 |
+
---
|
| 13 |
+
|
| 14 |
+
### End To End Agentic AI Projects
|
| 15 |
+
|
| 16 |
+
|
app.py
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from src.langgraphagenticai.main import load_langgraph_agenticai_app
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
if __name__=="__main__":
|
| 5 |
+
load_langgraph_agenticai_app()
|
requirements.txt
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
langchain
|
| 2 |
+
langgraph
|
| 3 |
+
langchain_community
|
| 4 |
+
langchain_core
|
| 5 |
+
langchain_groq
|
| 6 |
+
langchain_openai
|
| 7 |
+
faiss-cpu
|
| 8 |
+
streamlit
|
src/__init__.py
ADDED
|
File without changes
|
src/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (160 Bytes). View file
|
|
|
src/__pycache__/__init__.cpython-312.pyc
ADDED
|
Binary file (132 Bytes). View file
|
|
|
src/langgraphagenticai/LLMS/__init__.py
ADDED
|
File without changes
|
src/langgraphagenticai/LLMS/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (184 Bytes). View file
|
|
|
src/langgraphagenticai/LLMS/__pycache__/__init__.cpython-312.pyc
ADDED
|
Binary file (156 Bytes). View file
|
|
|
src/langgraphagenticai/LLMS/__pycache__/groqllm.cpython-310.pyc
ADDED
|
Binary file (1.03 kB). View file
|
|
|
src/langgraphagenticai/LLMS/__pycache__/groqllm.cpython-312.pyc
ADDED
|
Binary file (1.32 kB). View file
|
|
|
src/langgraphagenticai/LLMS/groqllm.py
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import streamlit as st
|
| 3 |
+
from langchain_groq import ChatGroq
|
| 4 |
+
|
| 5 |
+
class GroqLLM:
|
| 6 |
+
def __init__(self,user_controls_input):
|
| 7 |
+
self.user_controls_input=user_controls_input
|
| 8 |
+
|
| 9 |
+
def get_llm_model(self):
|
| 10 |
+
try:
|
| 11 |
+
groq_api_key=self.user_controls_input['GROQ_API_KEY']
|
| 12 |
+
selected_groq_model=self.user_controls_input['selected_groq_model']
|
| 13 |
+
if groq_api_key=='' and os.environ["GROQ_API_KEY"] =='':
|
| 14 |
+
st.error("Please Enter the Groq API KEY")
|
| 15 |
+
|
| 16 |
+
llm = ChatGroq(api_key =groq_api_key, model=selected_groq_model)
|
| 17 |
+
|
| 18 |
+
except Exception as e:
|
| 19 |
+
raise ValueError(f"Error Occurred with Exception : {e}")
|
| 20 |
+
return llm
|
src/langgraphagenticai/__init__.py
ADDED
|
File without changes
|
src/langgraphagenticai/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (179 Bytes). View file
|
|
|
src/langgraphagenticai/__pycache__/__init__.cpython-312.pyc
ADDED
|
Binary file (151 Bytes). View file
|
|
|
src/langgraphagenticai/__pycache__/main.cpython-310.pyc
ADDED
|
Binary file (1.83 kB). View file
|
|
|
src/langgraphagenticai/__pycache__/main.cpython-312.pyc
ADDED
|
Binary file (2.58 kB). View file
|
|
|
src/langgraphagenticai/graph/__init__.py
ADDED
|
File without changes
|
src/langgraphagenticai/graph/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (185 Bytes). View file
|
|
|
src/langgraphagenticai/graph/__pycache__/__init__.cpython-312.pyc
ADDED
|
Binary file (157 Bytes). View file
|
|
|
src/langgraphagenticai/graph/__pycache__/graph_builder.cpython-310.pyc
ADDED
|
Binary file (1.77 kB). View file
|
|
|
src/langgraphagenticai/graph/__pycache__/graph_builder.cpython-312.pyc
ADDED
|
Binary file (2.25 kB). View file
|
|
|
src/langgraphagenticai/graph/graph_builder.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from langgraph.graph import StateGraph, START,END, MessagesState
|
| 2 |
+
from langgraph.prebuilt import tools_condition,ToolNode
|
| 3 |
+
from langchain_core.prompts import ChatPromptTemplate
|
| 4 |
+
from src.langgraphagenticai.state.state import State
|
| 5 |
+
from src.langgraphagenticai.nodes.basic_chatbot_node import BasicChatbotNode
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class GraphBuilder:
|
| 11 |
+
|
| 12 |
+
def __init__(self,model):
|
| 13 |
+
self.llm=model
|
| 14 |
+
self.graph_builder=StateGraph(State)
|
| 15 |
+
|
| 16 |
+
def basic_chatbot_build_graph(self):
|
| 17 |
+
"""
|
| 18 |
+
Builds a basic chatbot graph using LangGraph.
|
| 19 |
+
This method initializes a chatbot node using the `BasicChatbotNode` class
|
| 20 |
+
and integrates it into the graph. The chatbot node is set as both the
|
| 21 |
+
entry and exit point of the graph.
|
| 22 |
+
"""
|
| 23 |
+
self.basic_chatbot_node=BasicChatbotNode(self.llm)
|
| 24 |
+
self.graph_builder.add_node("chatbot",self.basic_chatbot_node.process)
|
| 25 |
+
self.graph_builder.add_edge(START,"chatbot")
|
| 26 |
+
self.graph_builder.add_edge("chatbot",END)
|
| 27 |
+
|
| 28 |
+
def setup_graph(self, usecase: str):
|
| 29 |
+
"""
|
| 30 |
+
Sets up the graph for the selected use case.
|
| 31 |
+
"""
|
| 32 |
+
if usecase == "Basic Chatbot":
|
| 33 |
+
self.basic_chatbot_build_graph()
|
| 34 |
+
return self.graph_builder.compile()
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
|
src/langgraphagenticai/main.py
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import streamlit as st
|
| 2 |
+
import json
|
| 3 |
+
from src.langgraphagenticai.ui.streamlitui.loadui import LoadStreamlitUI
|
| 4 |
+
from src.langgraphagenticai.LLMS.groqllm import GroqLLM
|
| 5 |
+
from src.langgraphagenticai.graph.graph_builder import GraphBuilder
|
| 6 |
+
from src.langgraphagenticai.ui.streamlitui.display_result import DisplayResultStreamlit
|
| 7 |
+
|
| 8 |
+
# MAIN Function START
|
| 9 |
+
def load_langgraph_agenticai_app():
|
| 10 |
+
"""
|
| 11 |
+
Loads and runs the LangGraph AgenticAI application with Streamlit UI.
|
| 12 |
+
This function initializes the UI, handles user input, configures the LLM model,
|
| 13 |
+
sets up the graph based on the selected use case, and displays the output while
|
| 14 |
+
implementing exception handling for robustness.
|
| 15 |
+
"""
|
| 16 |
+
|
| 17 |
+
# Load UI
|
| 18 |
+
ui = LoadStreamlitUI()
|
| 19 |
+
user_input = ui.load_streamlit_ui()
|
| 20 |
+
|
| 21 |
+
if not user_input:
|
| 22 |
+
st.error("Error: Failed to load user input from the U.")
|
| 23 |
+
return
|
| 24 |
+
|
| 25 |
+
# Text input for user message
|
| 26 |
+
if st.session_state.IsFetchButtonClicked:
|
| 27 |
+
user_message = st.session_state.timeframe
|
| 28 |
+
else :
|
| 29 |
+
user_message = st.chat_input("Enter your message:")
|
| 30 |
+
|
| 31 |
+
if user_message:
|
| 32 |
+
try:
|
| 33 |
+
# Configure LLM
|
| 34 |
+
obj_llm_config = GroqLLM(user_controls_input=user_input)
|
| 35 |
+
model = obj_llm_config.get_llm_model()
|
| 36 |
+
|
| 37 |
+
if not model:
|
| 38 |
+
st.error("Error: LLM model could not be initialized.")
|
| 39 |
+
return
|
| 40 |
+
|
| 41 |
+
# Initialize and set up the graph based on use case
|
| 42 |
+
usecase = user_input.get('selected_usecase')
|
| 43 |
+
if not usecase:
|
| 44 |
+
st.error("Error: No use case selected.")
|
| 45 |
+
return
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
### Graph Builder
|
| 49 |
+
graph_builder=GraphBuilder(model)
|
| 50 |
+
try:
|
| 51 |
+
graph = graph_builder.setup_graph(usecase)
|
| 52 |
+
DisplayResultStreamlit(usecase,graph,user_message).display_result_on_ui()
|
| 53 |
+
except Exception as e:
|
| 54 |
+
st.error(f"Error: Graph setup failed - {e}")
|
| 55 |
+
return
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
except Exception as e:
|
| 59 |
+
raise ValueError(f"Error Occurred with Exception : {e}")
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
|
src/langgraphagenticai/nodes/__init__.py
ADDED
|
File without changes
|
src/langgraphagenticai/nodes/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (185 Bytes). View file
|
|
|
src/langgraphagenticai/nodes/__pycache__/__init__.cpython-312.pyc
ADDED
|
Binary file (157 Bytes). View file
|
|
|
src/langgraphagenticai/nodes/__pycache__/basic_chatbot_node.cpython-310.pyc
ADDED
|
Binary file (907 Bytes). View file
|
|
|
src/langgraphagenticai/nodes/__pycache__/basic_chatbot_node.cpython-312.pyc
ADDED
|
Binary file (1 kB). View file
|
|
|
src/langgraphagenticai/nodes/basic_chatbot_node.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from src.langgraphagenticai.state.state import State
|
| 2 |
+
|
| 3 |
+
class BasicChatbotNode:
|
| 4 |
+
"""
|
| 5 |
+
Basic chatbot logic implementation.
|
| 6 |
+
"""
|
| 7 |
+
def __init__(self,model):
|
| 8 |
+
self.llm = model
|
| 9 |
+
|
| 10 |
+
def process(self, state: State) -> dict:
|
| 11 |
+
"""
|
| 12 |
+
Processes the input state and generates a chatbot response.
|
| 13 |
+
"""
|
| 14 |
+
return {"messages":self.llm.invoke(state['messages'])}
|
src/langgraphagenticai/state/__init__.py
ADDED
|
File without changes
|
src/langgraphagenticai/state/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (185 Bytes). View file
|
|
|
src/langgraphagenticai/state/__pycache__/__init__.cpython-312.pyc
ADDED
|
Binary file (157 Bytes). View file
|
|
|
src/langgraphagenticai/state/__pycache__/state.cpython-310.pyc
ADDED
|
Binary file (795 Bytes). View file
|
|
|
src/langgraphagenticai/state/__pycache__/state.cpython-312.pyc
ADDED
|
Binary file (820 Bytes). View file
|
|
|
src/langgraphagenticai/state/state.py
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Annotated, Literal, Optional
|
| 2 |
+
from typing_extensions import TypedDict
|
| 3 |
+
from langgraph.graph.message import add_messages
|
| 4 |
+
from typing import TypedDict, Annotated, List
|
| 5 |
+
from langchain_core.messages import HumanMessage, AIMessage
|
| 6 |
+
|
| 7 |
+
class State(TypedDict):
|
| 8 |
+
"""
|
| 9 |
+
Represents the structure of the state used in the graph.
|
| 10 |
+
"""
|
| 11 |
+
messages: Annotated[list, add_messages]
|
src/langgraphagenticai/tools/__init__.py
ADDED
|
File without changes
|
src/langgraphagenticai/ui/__init__.py
ADDED
|
File without changes
|
src/langgraphagenticai/ui/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (182 Bytes). View file
|
|
|
src/langgraphagenticai/ui/__pycache__/__init__.cpython-312.pyc
ADDED
|
Binary file (154 Bytes). View file
|
|
|
src/langgraphagenticai/ui/__pycache__/uiconfigfile.cpython-310.pyc
ADDED
|
Binary file (1.29 kB). View file
|
|
|
src/langgraphagenticai/ui/__pycache__/uiconfigfile.cpython-312.pyc
ADDED
|
Binary file (1.8 kB). View file
|
|
|
src/langgraphagenticai/ui/streamlitui/__pycache__/display_result.cpython-310.pyc
ADDED
|
Binary file (1.22 kB). View file
|
|
|
src/langgraphagenticai/ui/streamlitui/__pycache__/display_result.cpython-312.pyc
ADDED
|
Binary file (1.82 kB). View file
|
|
|
src/langgraphagenticai/ui/streamlitui/__pycache__/loadui.cpython-310.pyc
ADDED
|
Binary file (2.07 kB). View file
|
|
|
src/langgraphagenticai/ui/streamlitui/__pycache__/loadui.cpython-312.pyc
ADDED
|
Binary file (3.39 kB). View file
|
|
|
src/langgraphagenticai/ui/streamlitui/display_result.py
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import streamlit as st
|
| 2 |
+
from langchain_core.messages import HumanMessage,AIMessage
|
| 3 |
+
import json
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class DisplayResultStreamlit:
|
| 7 |
+
def __init__(self,usecase,graph,user_message):
|
| 8 |
+
self.usecase= usecase
|
| 9 |
+
self.graph = graph
|
| 10 |
+
self.user_message = user_message
|
| 11 |
+
|
| 12 |
+
def display_result_on_ui(self):
|
| 13 |
+
usecase= self.usecase
|
| 14 |
+
graph = self.graph
|
| 15 |
+
user_message = self.user_message
|
| 16 |
+
if usecase =="Basic Chatbot":
|
| 17 |
+
for event in graph.stream({'messages':("user",user_message)}):
|
| 18 |
+
print(event.values())
|
| 19 |
+
for value in event.values():
|
| 20 |
+
print(value['messages'])
|
| 21 |
+
with st.chat_message("user"):
|
| 22 |
+
st.write(user_message)
|
| 23 |
+
with st.chat_message("assistant"):
|
| 24 |
+
st.write(value["messages"].content)
|
src/langgraphagenticai/ui/streamlitui/loadui.py
ADDED
|
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import streamlit as st
|
| 2 |
+
import os
|
| 3 |
+
from datetime import date
|
| 4 |
+
|
| 5 |
+
from langchain_core.messages import AIMessage,HumanMessage
|
| 6 |
+
from src.langgraphagenticai.ui.uiconfigfile import Config
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class LoadStreamlitUI:
|
| 10 |
+
def __init__(self):
|
| 11 |
+
self.config = Config() # config
|
| 12 |
+
self.user_controls = {}
|
| 13 |
+
|
| 14 |
+
def initialize_session(self):
|
| 15 |
+
return {
|
| 16 |
+
"current_step": "requirements",
|
| 17 |
+
"requirements": "",
|
| 18 |
+
"user_stories": "",
|
| 19 |
+
"po_feedback": "",
|
| 20 |
+
"generated_code": "",
|
| 21 |
+
"review_feedback": "",
|
| 22 |
+
"decision": None
|
| 23 |
+
}
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def load_streamlit_ui(self):
|
| 28 |
+
st.set_page_config(page_title= self.config.get_page_title(), layout="wide") #"🤖 "
|
| 29 |
+
st.header(self.config.get_page_title())
|
| 30 |
+
st.session_state.timeframe = ''
|
| 31 |
+
st.session_state.IsFetchButtonClicked = False
|
| 32 |
+
st.session_state.IsSDLC = False
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
with st.sidebar:
|
| 37 |
+
# Get options from config
|
| 38 |
+
llm_options = self.config.get_llm_options()
|
| 39 |
+
usecase_options = self.config.get_usecase_options()
|
| 40 |
+
|
| 41 |
+
# LLM selection
|
| 42 |
+
self.user_controls["selected_llm"] = st.selectbox("Select LLM", llm_options)
|
| 43 |
+
|
| 44 |
+
if self.user_controls["selected_llm"] == 'Groq':
|
| 45 |
+
# Model selection
|
| 46 |
+
model_options = self.config.get_groq_model_options()
|
| 47 |
+
self.user_controls["selected_groq_model"] = st.selectbox("Select Model", model_options)
|
| 48 |
+
# API key input
|
| 49 |
+
self.user_controls["GROQ_API_KEY"] = st.session_state["GROQ_API_KEY"] = st.text_input("API Key",
|
| 50 |
+
type="password")
|
| 51 |
+
# Validate API key
|
| 52 |
+
if not self.user_controls["GROQ_API_KEY"]:
|
| 53 |
+
st.warning("⚠️ Please enter your GROQ API key to proceed. Don't have? refer : https://console.groq.com/keys ")
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
# Use case selection
|
| 57 |
+
self.user_controls["selected_usecase"] = st.selectbox("Select Usecases", usecase_options)
|
| 58 |
+
|
| 59 |
+
if "state" not in st.session_state:
|
| 60 |
+
st.session_state.state = self.initialize_session()
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
return self.user_controls
|