|
|
import httpx |
|
|
from fastapi import FastAPI, Request, HTTPException |
|
|
from starlette.responses import StreamingResponse |
|
|
from starlette.background import BackgroundTask |
|
|
import os |
|
|
from contextlib import asynccontextmanager |
|
|
|
|
|
|
|
|
|
|
|
TARGET_URL = os.getenv("TARGET_URL", "https://console.gmicloud.ai") |
|
|
|
|
|
|
|
|
@asynccontextmanager |
|
|
async def lifespan(app: FastAPI): |
|
|
""" |
|
|
Manages the lifecycle of the HTTPX client. |
|
|
The client is created on startup and gracefully closed on shutdown. |
|
|
|
|
|
WARNING: This client has no timeout and no explicit connection pool limits. |
|
|
""" |
|
|
|
|
|
|
|
|
async with httpx.AsyncClient(base_url=TARGET_URL, timeout=None) as client: |
|
|
app.state.http_client = client |
|
|
yield |
|
|
|
|
|
|
|
|
app = FastAPI(docs_url=None, redoc_url=None, lifespan=lifespan) |
|
|
|
|
|
|
|
|
async def _reverse_proxy(request: Request): |
|
|
""" |
|
|
Forwards a request specifically for the /chat endpoint to the target URL. |
|
|
It injects required headers and strips any user-provided Authorization header. |
|
|
""" |
|
|
client: httpx.AsyncClient = request.app.state.http_client |
|
|
|
|
|
|
|
|
url = httpx.URL(path=request.url.path, query=request.url.query.encode("utf-8")) |
|
|
|
|
|
|
|
|
|
|
|
request_headers = dict(request.headers) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
request_headers.pop("host", None) |
|
|
request_headers.pop("authorization", None) |
|
|
|
|
|
|
|
|
|
|
|
specific_headers = { |
|
|
"accept": "application/json, text/plain, */*", |
|
|
"accept-language": "en-US,en;q=0.9,ru;q=0.8", |
|
|
"content-type": "application/json", |
|
|
"origin": "https://console.gmicloud.ai", |
|
|
"priority": "u=1, i", |
|
|
"referer": "https://console.gmicloud.ai/playground/llm/deepseek-r1-0528/01da5dd6-aa6a-40cb-9dbd-241467aa5cbb?tab=playground", |
|
|
"sec-ch-ua": '"Not;A=Brand";v="99", "Google Chrome";v="139", "Chromium";v="139"', |
|
|
"sec-ch-ua-mobile": "?0", |
|
|
"sec-ch-ua-platform": '"Windows"', |
|
|
"sec-fetch-dest": "empty", |
|
|
"sec-fetch-mode": "cors", |
|
|
"sec-fetch-site": "same-origin", |
|
|
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/139.0.0.0 Safari/537.36", |
|
|
} |
|
|
request_headers.update(specific_headers) |
|
|
|
|
|
|
|
|
rp_req = client.build_request( |
|
|
method=request.method, |
|
|
url=url, |
|
|
headers=request_headers, |
|
|
content=await request.body(), |
|
|
) |
|
|
|
|
|
try: |
|
|
|
|
|
rp_resp = await client.send(rp_req, stream=True) |
|
|
except httpx.ConnectError as e: |
|
|
|
|
|
raise HTTPException(status_code=502, detail=f"Bad Gateway: Cannot connect to target service. {e}") |
|
|
|
|
|
|
|
|
return StreamingResponse( |
|
|
rp_resp.aiter_raw(), |
|
|
status_code=rp_resp.status_code, |
|
|
headers=rp_resp.headers, |
|
|
background=BackgroundTask(rp_resp.aclose), |
|
|
) |
|
|
|
|
|
|
|
|
@app.api_route( |
|
|
"/chat", |
|
|
methods=["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS", "HEAD"] |
|
|
) |
|
|
async def chat_proxy_handler(request: Request): |
|
|
""" |
|
|
This endpoint captures requests specifically for the "/chat" path |
|
|
and forwards them through the reverse proxy. |
|
|
""" |
|
|
return await _reverse_proxy(request) |
|
|
|
|
|
|
|
|
@app.get("/") |
|
|
async def health_check(): |
|
|
"""Provides a basic health check endpoint.""" |
|
|
return {"status": "ok", "proxying_endpoint": "/chat", "target": "TypeGPT"} |
|
|
|
|
|
|
|
|
|