Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
|
@@ -19,6 +19,7 @@ from huggingface_hub import HfApi
|
|
| 19 |
from huggingface_hub import InferenceClient
|
| 20 |
from PIL import Image
|
| 21 |
import io
|
|
|
|
| 22 |
|
| 23 |
app = FastAPI()
|
| 24 |
|
|
@@ -219,6 +220,25 @@ async def chat(
|
|
| 219 |
except Exception as e:
|
| 220 |
raise HTTPException(status_code=500, detail=f"Error getting chat results: {e}")
|
| 221 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 222 |
def extract_text_from_webpage(html_content):
|
| 223 |
"""Extracts visible text from HTML content using BeautifulSoup."""
|
| 224 |
soup = BeautifulSoup(html_content, "html.parser")
|
|
|
|
| 19 |
from huggingface_hub import InferenceClient
|
| 20 |
from PIL import Image
|
| 21 |
import io
|
| 22 |
+
import ast
|
| 23 |
|
| 24 |
app = FastAPI()
|
| 25 |
|
|
|
|
| 220 |
except Exception as e:
|
| 221 |
raise HTTPException(status_code=500, detail=f"Error getting chat results: {e}")
|
| 222 |
|
| 223 |
+
# Define a Pydantic model for the request payload
|
| 224 |
+
class ChatRequest(BaseModel):
|
| 225 |
+
q: str
|
| 226 |
+
history: str = "[]"
|
| 227 |
+
model: str = "gpt-4o-mini"
|
| 228 |
+
proxy: Optional[str] = None
|
| 229 |
+
|
| 230 |
+
@app.post("/api/chat-post")
|
| 231 |
+
async def chat(request: ChatRequest):
|
| 232 |
+
"""Perform a text search."""
|
| 233 |
+
try:
|
| 234 |
+
with WEBS(proxy=request.proxy) as webs:
|
| 235 |
+
chat_messages = []
|
| 236 |
+
chat_messages.extend(ast.literal_eval(request.history))
|
| 237 |
+
results = webs.chat(keywords=request.q, chat_messages = chat_messages, chat model=request.model)
|
| 238 |
+
return JSONResponse(content=jsonable_encoder(results))
|
| 239 |
+
except Exception as e:
|
| 240 |
+
raise HTTPException(status_code=500, detail=f"Error getting chat results: {e}")
|
| 241 |
+
|
| 242 |
def extract_text_from_webpage(html_content):
|
| 243 |
"""Extracts visible text from HTML content using BeautifulSoup."""
|
| 244 |
soup = BeautifulSoup(html_content, "html.parser")
|