Spaces:
Runtime error
Runtime error
Create app.py
Browse files
app.py
ADDED
|
@@ -0,0 +1,129 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os, tempfile, traceback
|
| 2 |
+
import gradio as gr
|
| 3 |
+
import spaces
|
| 4 |
+
import requests
|
| 5 |
+
|
| 6 |
+
# ---------- Cache & HF Hub settings ----------
|
| 7 |
+
os.environ.setdefault("HF_HOME", "/data/.cache/huggingface")
|
| 8 |
+
os.environ.setdefault("HF_HUB_CACHE", "/data/.cache/huggingface/hub")
|
| 9 |
+
os.environ.setdefault("TRANSFORMERS_CACHE", "/data/.cache/huggingface/transformers")
|
| 10 |
+
os.environ.setdefault("HF_HUB_ENABLE_XET", "0")
|
| 11 |
+
os.environ.setdefault("HF_HUB_ENABLE_HF_TRANSFER", "1")
|
| 12 |
+
os.environ.setdefault("TOKENIZERS_PARALLELISM", "false")
|
| 13 |
+
for p in (os.environ["HF_HOME"], os.environ["HF_HUB_CACHE"], os.environ["TRANSFORMERS_CACHE"]):
|
| 14 |
+
os.makedirs(p, exist_ok=True)
|
| 15 |
+
|
| 16 |
+
# ---------- Docling imports ----------
|
| 17 |
+
from docling.datamodel.base_models import InputFormat
|
| 18 |
+
from docling.document_converter import DocumentConverter, PdfFormatOption
|
| 19 |
+
from docling.pipeline.vlm_pipeline import VlmPipeline
|
| 20 |
+
|
| 21 |
+
# CUDA info (informational)
|
| 22 |
+
try:
|
| 23 |
+
import torch
|
| 24 |
+
HAS_CUDA = torch.cuda.is_available()
|
| 25 |
+
torch.set_num_threads(max(1, int(os.environ.get("OMP_NUM_THREADS", "2"))))
|
| 26 |
+
except Exception:
|
| 27 |
+
HAS_CUDA = False
|
| 28 |
+
|
| 29 |
+
# Converters
|
| 30 |
+
std_converter = DocumentConverter(format_options={InputFormat.PDF: PdfFormatOption()})
|
| 31 |
+
vlm_converter = DocumentConverter(format_options={InputFormat.PDF: PdfFormatOption(pipeline_cls=VlmPipeline)})
|
| 32 |
+
|
| 33 |
+
# ---------- Helpers ----------
|
| 34 |
+
def _success(md: str, html: str):
|
| 35 |
+
tmpdir = tempfile.gettempdir()
|
| 36 |
+
md_path = os.path.join(tmpdir, "output.md")
|
| 37 |
+
html_path = os.path.join(tmpdir, "output.html")
|
| 38 |
+
with open(md_path, "w", encoding="utf-8") as f: f.write(md)
|
| 39 |
+
with open(html_path, "w", encoding="utf-8") as f: f.write(html)
|
| 40 |
+
return md, html, md_path, html_path
|
| 41 |
+
|
| 42 |
+
def _fail(msg: str):
|
| 43 |
+
err = f"**Conversion failed**:\n```\n{msg}\n```"
|
| 44 |
+
return err, "<pre>" + err + "</pre>", None, None
|
| 45 |
+
|
| 46 |
+
def _convert_local_path(path: str, use_vlm: bool):
|
| 47 |
+
try:
|
| 48 |
+
conv = vlm_converter if use_vlm else std_converter
|
| 49 |
+
doc = conv.convert(source=path).document
|
| 50 |
+
md = doc.export_to_markdown()
|
| 51 |
+
html = doc.export_to_html()
|
| 52 |
+
return _success(md, html)
|
| 53 |
+
except Exception as e:
|
| 54 |
+
return _fail(f"{e}\n\n{traceback.format_exc()}")
|
| 55 |
+
|
| 56 |
+
# ---------- GPU-decorated endpoints ----------
|
| 57 |
+
@spaces.GPU(duration=600)
|
| 58 |
+
def run_convert_file(file, mode):
|
| 59 |
+
if file is None:
|
| 60 |
+
return _fail("No file provided.")
|
| 61 |
+
return _convert_local_path(file.name, mode.startswith("VLM"))
|
| 62 |
+
|
| 63 |
+
@spaces.GPU(duration=600)
|
| 64 |
+
def run_convert_url(url, mode):
|
| 65 |
+
if not url:
|
| 66 |
+
return _fail("No URL provided.")
|
| 67 |
+
try:
|
| 68 |
+
r = requests.get(url, stream=True, timeout=60)
|
| 69 |
+
r.raise_for_status()
|
| 70 |
+
fd, tmp_path = tempfile.mkstemp(suffix=".pdf")
|
| 71 |
+
with os.fdopen(fd, "wb") as tmp:
|
| 72 |
+
for chunk in r.iter_content(chunk_size=1 << 20):
|
| 73 |
+
if chunk:
|
| 74 |
+
tmp.write(chunk)
|
| 75 |
+
except Exception as e:
|
| 76 |
+
return _fail(f"Failed to download URL: {e}")
|
| 77 |
+
try:
|
| 78 |
+
return _convert_local_path(tmp_path, mode.startswith("VLM"))
|
| 79 |
+
finally:
|
| 80 |
+
try: os.remove(tmp_path)
|
| 81 |
+
except: pass
|
| 82 |
+
|
| 83 |
+
# ---------- UI ----------
|
| 84 |
+
subtitle = "Device: **CUDA (ZeroGPU)**" if HAS_CUDA else "Device: **CPU** (GPU warms on first call)"
|
| 85 |
+
|
| 86 |
+
with gr.Blocks(title="Granite-Docling 258M β PDF β Markdown/HTML") as demo:
|
| 87 |
+
gr.Markdown(
|
| 88 |
+
f"""# Granite-Docling 258M β PDF β Markdown / HTML
|
| 89 |
+
{subtitle}
|
| 90 |
+
|
| 91 |
+
**Modes**
|
| 92 |
+
- **Standard (faster)** β PDFs with a text layer
|
| 93 |
+
- **VLM (Granite β better for complex/scanned)** β scans / heavy tables / formulas
|
| 94 |
+
|
| 95 |
+
_First call may be slow while models download and ZeroGPU warms. Cache lives in `/data`._
|
| 96 |
+
"""
|
| 97 |
+
)
|
| 98 |
+
|
| 99 |
+
mode = gr.Radio(
|
| 100 |
+
["Standard (faster)", "VLM (Granite β better for complex/scanned)"],
|
| 101 |
+
value="Standard (faster)", label="Mode"
|
| 102 |
+
)
|
| 103 |
+
|
| 104 |
+
with gr.Tab("Upload PDF"):
|
| 105 |
+
fi = gr.File(file_types=[".pdf"], label="PDF")
|
| 106 |
+
md_preview = gr.Markdown(label="Markdown Preview")
|
| 107 |
+
html_preview = gr.HTML(label="HTML Preview") # <β rendered HTML
|
| 108 |
+
dl_md = gr.File(label="Download Markdown (.md)")
|
| 109 |
+
dl_html = gr.File(label="Download HTML (.html)")
|
| 110 |
+
gr.Button("Convert").click(
|
| 111 |
+
fn=run_convert_file,
|
| 112 |
+
inputs=[fi, mode],
|
| 113 |
+
outputs=[md_preview, html_preview, dl_md, dl_html]
|
| 114 |
+
)
|
| 115 |
+
|
| 116 |
+
with gr.Tab("Convert from URL"):
|
| 117 |
+
url = gr.Textbox(label="Public PDF URL", placeholder="https://.../file.pdf")
|
| 118 |
+
md_preview2 = gr.Markdown(label="Markdown Preview")
|
| 119 |
+
html_preview2 = gr.HTML(label="HTML Preview")
|
| 120 |
+
dl_md2 = gr.File(label="Download Markdown (.md)")
|
| 121 |
+
dl_html2 = gr.File(label="Download HTML (.html)")
|
| 122 |
+
gr.Button("Convert").click(
|
| 123 |
+
fn=run_convert_url,
|
| 124 |
+
inputs=[url, mode],
|
| 125 |
+
outputs=[md_preview2, html_preview2, dl_md2, dl_html2]
|
| 126 |
+
)
|
| 127 |
+
|
| 128 |
+
# Bind & queue
|
| 129 |
+
demo.queue().launch(server_name="0.0.0.0", server_port=int(os.environ.get("PORT", 7860)))
|