Spaces:
Sleeping
Sleeping
Update veryfinal.py
Browse files- veryfinal.py +32 -25
veryfinal.py
CHANGED
|
@@ -1,5 +1,4 @@
|
|
| 1 |
"""Enhanced LangGraph + Agno Hybrid Agent System"""
|
| 2 |
-
|
| 3 |
import os
|
| 4 |
import time
|
| 5 |
import random
|
|
@@ -160,7 +159,7 @@ def modulus(a: int, b: int) -> int:
|
|
| 160 |
|
| 161 |
@tool
|
| 162 |
def optimized_web_search(query: str) -> str:
|
| 163 |
-
"""Optimized Tavily web search
|
| 164 |
try:
|
| 165 |
time.sleep(random.uniform(1, 2))
|
| 166 |
docs = TavilySearchResults(max_results=2).invoke(query=query)
|
|
@@ -173,7 +172,7 @@ def optimized_web_search(query: str) -> str:
|
|
| 173 |
|
| 174 |
@tool
|
| 175 |
def optimized_wiki_search(query: str) -> str:
|
| 176 |
-
"""Optimized Wikipedia search
|
| 177 |
try:
|
| 178 |
time.sleep(random.uniform(0.5, 1))
|
| 179 |
docs = WikipediaLoader(query=query, load_max_docs=1).load()
|
|
@@ -283,35 +282,43 @@ class HybridLangGraphAgnoSystem:
|
|
| 283 |
g.add_node("agno_general",agno_general)
|
| 284 |
g.set_entry_point("router")
|
| 285 |
g.add_conditional_edges("router",pick,{
|
| 286 |
-
|
| 287 |
-
|
| 288 |
})
|
| 289 |
for n in ["lg_math","agno_research","lg_retrieval","agno_general"]:
|
| 290 |
g.add_edge(n,"END")
|
| 291 |
return g.compile(checkpointer=MemorySaver())
|
| 292 |
-
def process_query(self, q: str) -> Dict[str,Any]:
|
| 293 |
-
state={
|
| 294 |
-
"messages":[HumanMessage(content=q)],
|
| 295 |
-
"query":q,"agent_type":"","final_answer":"",
|
| 296 |
-
"perf":{},"agno_resp":""
|
| 297 |
-
}
|
| 298 |
-
cfg={"configurable":{"thread_id":f"hyb_{hash(q)}"}}
|
| 299 |
-
try:
|
| 300 |
-
out=self.graph.invoke(state,cfg)
|
| 301 |
-
return {
|
| 302 |
-
"answer":out["final_answer"],
|
| 303 |
-
"performance_metrics":out["perf"],
|
| 304 |
-
"provider_used":out["perf"].get("prov")
|
| 305 |
-
}
|
| 306 |
-
except Exception as e:
|
| 307 |
-
return {"answer":f"Error: {e}","performance_metrics":{},"provider_used":"Error"}
|
| 308 |
|
| 309 |
-
def
|
| 310 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 311 |
return HybridLangGraphAgnoSystem().graph
|
| 312 |
-
|
|
|
|
| 313 |
|
| 314 |
-
|
|
|
|
| 315 |
graph=build_graph()
|
| 316 |
msgs=[HumanMessage(content="What are the names of the US presidents who were assassinated?")]
|
| 317 |
res=graph.invoke({"messages":msgs},{"configurable":{"thread_id":"test"}})
|
|
|
|
| 1 |
"""Enhanced LangGraph + Agno Hybrid Agent System"""
|
|
|
|
| 2 |
import os
|
| 3 |
import time
|
| 4 |
import random
|
|
|
|
| 159 |
|
| 160 |
@tool
|
| 161 |
def optimized_web_search(query: str) -> str:
|
| 162 |
+
"""Optimized Tavily web search."""
|
| 163 |
try:
|
| 164 |
time.sleep(random.uniform(1, 2))
|
| 165 |
docs = TavilySearchResults(max_results=2).invoke(query=query)
|
|
|
|
| 172 |
|
| 173 |
@tool
|
| 174 |
def optimized_wiki_search(query: str) -> str:
|
| 175 |
+
"""Optimized Wikipedia search."""
|
| 176 |
try:
|
| 177 |
time.sleep(random.uniform(0.5, 1))
|
| 178 |
docs = WikipediaLoader(query=query, load_max_docs=1).load()
|
|
|
|
| 282 |
g.add_node("agno_general",agno_general)
|
| 283 |
g.set_entry_point("router")
|
| 284 |
g.add_conditional_edges("router",pick,{
|
| 285 |
+
"lg_math":"lg_math","agno_research":"agno_research",
|
| 286 |
+
"lg_retrieval":"lg_retrieval","agno_general":"agno_general"
|
| 287 |
})
|
| 288 |
for n in ["lg_math","agno_research","lg_retrieval","agno_general"]:
|
| 289 |
g.add_edge(n,"END")
|
| 290 |
return g.compile(checkpointer=MemorySaver())
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 291 |
|
| 292 |
+
def process_query(self, q: str) -> Dict[str,Any]:
|
| 293 |
+
state={
|
| 294 |
+
"messages":[HumanMessage(content=q)],
|
| 295 |
+
"query":q,"agent_type":"","final_answer":"",
|
| 296 |
+
"perf":{},"agno_resp":""
|
| 297 |
+
}
|
| 298 |
+
cfg={"configurable":{"thread_id":f"hyb_{hash(q)}"}}
|
| 299 |
+
try:
|
| 300 |
+
out=self.graph.invoke(state,cfg)
|
| 301 |
+
return {
|
| 302 |
+
"answer":out["final_answer"],
|
| 303 |
+
"performance_metrics":out["perf"],
|
| 304 |
+
"provider_used":out["perf"].get("prov")
|
| 305 |
+
}
|
| 306 |
+
except Exception as e:
|
| 307 |
+
return {"answer":f"Error: {e}","performance_metrics":{},"provider_used":"Error"}
|
| 308 |
+
|
| 309 |
+
def build_graph(provider: str = "hybrid"):
|
| 310 |
+
"""
|
| 311 |
+
Build and return the StateGraph for the requested provider.
|
| 312 |
+
- "hybrid", "groq", "google", and "nvidia" are all valid and
|
| 313 |
+
will return the full HybridLangGraphAgnoSystem graph.
|
| 314 |
+
"""
|
| 315 |
+
if provider in ("hybrid", "groq", "google", "nvidia"):
|
| 316 |
return HybridLangGraphAgnoSystem().graph
|
| 317 |
+
else:
|
| 318 |
+
raise ValueError(f"Unsupported provider: '{provider}'. Please use 'hybrid', 'groq', 'google', or 'nvidia'.")
|
| 319 |
|
| 320 |
+
# Test
|
| 321 |
+
if __name__=="__main__":
|
| 322 |
graph=build_graph()
|
| 323 |
msgs=[HumanMessage(content="What are the names of the US presidents who were assassinated?")]
|
| 324 |
res=graph.invoke({"messages":msgs},{"configurable":{"thread_id":"test"}})
|