rkihacker commited on
Commit
b58dc7b
·
verified ·
1 Parent(s): 9942a54

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +43 -0
main.py CHANGED
@@ -73,6 +73,11 @@ class DeepResearchRequest(BaseModel):
73
  query: str
74
  search_time: int = 300 # Default to 5 minutes
75
 
 
 
 
 
 
76
  app = FastAPI(
77
  title="AI Deep Research API",
78
  description="Provides comprehensive research reports from real web searches within 5 minutes.",
@@ -908,6 +913,44 @@ async def deep_research_endpoint(request: DeepResearchRequest):
908
  headers={"Cache-Control": "no-cache", "Connection": "keep-alive"}
909
  )
910
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
911
  if __name__ == "__main__":
912
  import uvicorn
913
  uvicorn.run(app, host="0.0.0.0", port=8000)
 
73
  query: str
74
  search_time: int = 300 # Default to 5 minutes
75
 
76
+ class SearchRequest(BaseModel):
77
+ query: str
78
+ search_time: int = 60 # Default: 1 minute for search-only
79
+ max_results: int = 20 # Number of results to return
80
+
81
  app = FastAPI(
82
  title="AI Deep Research API",
83
  description="Provides comprehensive research reports from real web searches within 5 minutes.",
 
913
  headers={"Cache-Control": "no-cache", "Connection": "keep-alive"}
914
  )
915
 
916
+ @app.post("/v1/search")
917
+ async def search_only_endpoint(request: SearchRequest):
918
+ """Search-only endpoint that returns JSON (no streaming)."""
919
+ if not request.query or len(request.query.strip()) < 3:
920
+ raise HTTPException(status_code=400, detail="Query must be at least 3 characters long")
921
+
922
+ # Clamp durations and limits
923
+ search_time = min(max(int(request.search_time), 5), 300)
924
+ max_results = min(max(int(request.max_results), 1), MAX_SOURCES_TO_PROCESS * 2)
925
+
926
+ aggregated: List[Dict[str, str]] = []
927
+ async for update in continuous_search(request.query.strip(), search_time):
928
+ # We ignore status/warning events; only keep final results
929
+ if update.get("event") == "final_search_results":
930
+ aggregated = update.get("data", [])
931
+
932
+ # Deduplicate by normalized link
933
+ dedup: List[Dict[str, str]] = []
934
+ seen: set = set()
935
+ for r in aggregated:
936
+ link = clean_url(r.get("link", ""))
937
+ title = r.get("title", "")
938
+ snippet = r.get("snippet", "")
939
+ if not link:
940
+ continue
941
+ if link in seen:
942
+ continue
943
+ seen.add(link)
944
+ dedup.append({"title": title, "link": link, "snippet": snippet})
945
+ if len(dedup) >= max_results:
946
+ break
947
+
948
+ return {
949
+ "query": request.query.strip(),
950
+ "count": len(dedup),
951
+ "results": dedup,
952
+ }
953
+
954
  if __name__ == "__main__":
955
  import uvicorn
956
  uvicorn.run(app, host="0.0.0.0", port=8000)