Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
-
from fastapi import FastAPI, HTTPException
|
| 2 |
from fastapi.responses import JSONResponse
|
| 3 |
-
from webscout import WEBS, transcriber
|
| 4 |
-
from typing import Optional
|
| 5 |
from fastapi.encoders import jsonable_encoder
|
| 6 |
from bs4 import BeautifulSoup
|
| 7 |
import requests
|
|
@@ -90,6 +90,25 @@ async def news(
|
|
| 90 |
except Exception as e:
|
| 91 |
raise HTTPException(status_code=500, detail=f"Error during news search: {e}")
|
| 92 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 93 |
@app.get("/api/answers")
|
| 94 |
async def answers(q: str):
|
| 95 |
"""Get instant answers for a query."""
|
|
@@ -318,4 +337,4 @@ def get_ascii_weather(location: str):
|
|
| 318 |
# Run the API server if this script is executed
|
| 319 |
if __name__ == "__main__":
|
| 320 |
import uvicorn
|
| 321 |
-
uvicorn.run(app, host="0.0.0.0", port=
|
|
|
|
| 1 |
+
from fastapi import FastAPI, HTTPException, Query # Make sure Query is imported
|
| 2 |
from fastapi.responses import JSONResponse
|
| 3 |
+
from webscout import WEBS, transcriber, LLM
|
| 4 |
+
from typing import Optional, List, Dict, Union # Import List, Dict, Union
|
| 5 |
from fastapi.encoders import jsonable_encoder
|
| 6 |
from bs4 import BeautifulSoup
|
| 7 |
import requests
|
|
|
|
| 90 |
except Exception as e:
|
| 91 |
raise HTTPException(status_code=500, detail=f"Error during news search: {e}")
|
| 92 |
|
| 93 |
+
@app.get("/api/llm")
|
| 94 |
+
async def llm_chat(
|
| 95 |
+
model: str,
|
| 96 |
+
message: str,
|
| 97 |
+
system_prompt: str = Query(None, description="Optional custom system prompt")
|
| 98 |
+
):
|
| 99 |
+
"""Interact with a specified large language model with an optional system prompt."""
|
| 100 |
+
try:
|
| 101 |
+
messages = [{"role": "user", "content": message}]
|
| 102 |
+
if system_prompt:
|
| 103 |
+
messages.insert(0, {"role": "system", "content": system_prompt}) # Add system message at the beginning
|
| 104 |
+
|
| 105 |
+
llm = LLM(model=model)
|
| 106 |
+
response = llm.chat(messages=messages)
|
| 107 |
+
return JSONResponse(content={"response": response})
|
| 108 |
+
except Exception as e:
|
| 109 |
+
raise HTTPException(status_code=500, detail=f"Error during LLM chat: {e}")
|
| 110 |
+
|
| 111 |
+
|
| 112 |
@app.get("/api/answers")
|
| 113 |
async def answers(q: str):
|
| 114 |
"""Get instant answers for a query."""
|
|
|
|
| 337 |
# Run the API server if this script is executed
|
| 338 |
if __name__ == "__main__":
|
| 339 |
import uvicorn
|
| 340 |
+
uvicorn.run(app, host="0.0.0.0", port=8083)
|