File size: 6,791 Bytes
60865cc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
# app.py — Space API FastAPI (Docker)
from __future__ import annotations
from typing import Dict, Any
import os, json, uuid, threading, time, traceback
from pathlib import Path
import logging, requests

from fastapi import FastAPI, HTTPException, Query
from fastapi.responses import JSONResponse, FileResponse
from fastapi.middleware.cors import CORSMiddleware

# --- Remplace ces imports par ton vrai parseur/exports
# from rfp_parser.prompting import build_chat_payload
# from rfp_parser.exports_xls import build_xls_from_doc

def build_chat_payload(text: str, model: str) -> Dict[str, Any]:
    # TODO: branche ton vrai payload
    return {
        "model": model,
        "max_tokens": 2048,
        "messages": [{"role":"user","content": text}],
        "temperature": 0.2,
    }

def build_xls_from_doc(doc: Dict[str, Any], out_path: str, baseline_kg: float = 100.0):
    # TODO: branche ton vrai export XLSX
    # Ici on crée juste un xlsx vide pour la démo
    import pandas as pd
    df = pd.DataFrame([{"baseline_kg": baseline_kg, "ok": True}])
    df.to_excel(out_path, index=False)

# ---------------- Config ----------------
DEEPINFRA_API_KEY = os.environ.get("DEEPINFRA_API_KEY", "")
MODEL_NAME        = os.environ.get("RFP_MODEL", "meta-llama/Meta-Llama-3.1-70B-Instruct")
DEEPINFRA_URL     = os.environ.get("DEEPINFRA_URL", "https://api.deepinfra.com/v1/openai/chat/completions")
RFP_DEBUG         = str(os.environ.get("RFP_DEBUG", "0")).lower() in {"1", "true", "yes"}
BASE_TMP          = Path("/tmp/rfp_jobs"); BASE_TMP.mkdir(parents=True, exist_ok=True)

logger = logging.getLogger("RFP_API")
if not logger.handlers:
    h = logging.StreamHandler()
    h.setFormatter(logging.Formatter("[API] %(levelname)s: %(message)s"))
    logger.addHandler(h)
logger.setLevel(logging.DEBUG if RFP_DEBUG else logging.INFO)

# -------------- Jobs en mémoire --------------
JOBS: Dict[str, Dict[str, Any]] = {}
JOBS_LOCK = threading.Lock()

def new_job(text: str) -> str:
    job_id = uuid.uuid4().hex[:12]
    with JOBS_LOCK:
        JOBS[job_id] = {
            "status": "queued",
            "error": None,
            "xlsx_path": None,
            "xlsx_url": None,
            "started_at": time.time(),
            "done_at": None,
            "meta": {"model": MODEL_NAME, "length": len(text or "")},
        }
    return job_id

def set_job_status(job_id: str, **updates):
    with JOBS_LOCK:
        if job_id in JOBS:
            JOBS[job_id].update(**updates)

# -------------- Cœur pipeline --------------
def parse_with_deepinfra(text: str) -> Dict[str, Any]:
    if not DEEPINFRA_API_KEY:
        raise RuntimeError("DEEPINFRA_API_KEY non défini.")
    payload = build_chat_payload(text, model=MODEL_NAME)
    headers = {"Authorization": f"Bearer {DEEPINFRA_API_KEY}", "Content-Type": "application/json"}
    logger.info("Appel DeepInfra model=%s max_tokens=%s", payload.get("model"), payload.get("max_tokens"))
    r = requests.post(DEEPINFRA_URL, headers=headers, json=payload, timeout=120)
    if r.status_code // 100 != 2:
        raise RuntimeError(f"DeepInfra HTTP {r.status_code}: {r.text}")
    data = r.json()
    try:
        content = data["choices"][0]["message"]["content"]
    except Exception:
        raise RuntimeError(f"Réponse inattendue DeepInfra: {json.dumps(data)[:400]}")
    try:
        doc = json.loads(content)
    except Exception as e:
        logger.warning("Échec json.loads(content); tentative strip. Err=%s", e)
        doc = json.loads(content.strip().strip('`').strip())
    if not isinstance(doc, dict):
        raise RuntimeError("Le contenu renvoyé n'est pas un objet JSON.")
    return doc

def build_xlsx(doc: Dict[str, Any], job_dir: Path) -> str:
    job_dir.mkdir(parents=True, exist_ok=True)
    out_path = str(job_dir / "feuille_de_charge.xlsx")
    baseline = (doc.get("assumptions") or {}).get("baseline_uop_kg") or 100.0
    try:
        baseline = float(baseline)
    except Exception:
        baseline = 100.0
    build_xls_from_doc(doc, out_path, baseline_kg=baseline)
    return out_path

def run_job(job_id: str, text: str) -> None:
    set_job_status(job_id, status="running")
    job_dir = BASE_TMP / job_id
    try:
        doc = parse_with_deepinfra(text)
        xlsx_path = build_xlsx(doc, job_dir)
        xlsx_url = f"/results/{job_id}/feuille_de_charge.xlsx"   # pas de /api en Docker (c’est la racine)
        set_job_status(job_id, status="done", xlsx_path=xlsx_path, xlsx_url=xlsx_url,
                       done_at=time.time(), meta={**JOBS[job_id]["meta"], "assumptions": doc.get("assumptions")})
        logger.info("Job %s terminé -> %s", job_id, xlsx_path)
    except Exception as e:
        logger.error("Job %s échoué: %s\n%s", job_id, e, traceback.format_exc())
        set_job_status(job_id, status="error", error=str(e), done_at=time.time())

# -------------- FastAPI app --------------
app = FastAPI(title="RFP_MASTER API", version="1.0.0")
app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)

@app.get("/health")
def health():
    return {"ok": True, "ts": time.time(), "model": MODEL_NAME}

@app.post("/submit")
def submit(payload: Dict[str, Any]):
    text = (payload or {}).get("text", "")
    if not isinstance(text, str) or not text.strip():
        raise HTTPException(400, "Champ 'text' manquant ou vide.")
    job_id = new_job(text)
    logger.info("Submit reçu job_id=%s len(text)=%d", job_id, len(text))
    t = threading.Thread(target=run_job, args=(job_id, text), daemon=True)
    t.start()
    return JSONResponse({"job_id": job_id, "status": "queued"})

@app.get("/status")
def status(job_id: str = Query(..., description="Identifiant renvoyé par /submit")):
    with JOBS_LOCK:
        info = JOBS.get(job_id)
    if not info:
        raise HTTPException(404, f"job_id inconnu: {job_id}")
    return JSONResponse({
        "job_id": job_id,
        "status": info.get("status"),
        "xlsx_url": info.get("xlsx_url"),
        "error": info.get("error"),
        "meta": info.get("meta"),
    })

@app.get("/results/{job_id}/feuille_de_charge.xlsx")
def download(job_id: str):
    with JOBS_LOCK:
        info = JOBS.get(job_id)
    if not info:
        raise HTTPException(404, f"job_id inconnu: {job_id}")
    if info.get("status") != "done":
        raise HTTPException(409, f"job {job_id} non prêt (status={info.get('status')})")
    xlsx_path = info.get("xlsx_path")
    if not xlsx_path or not Path(xlsx_path).exists():
        raise HTTPException(404, "Fichier indisponible.")
    return FileResponse(
        xlsx_path,
        media_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
        filename="feuille_de_charge.xlsx",
    )