PaperShow / app.py
JaceWei's picture
update
80331d3
raw
history blame
6.71 kB
import gradio as gr
import subprocess, shutil, os, zipfile, datetime
from pathlib import Path
def _ensure_versions():
import importlib, subprocess, sys
def ver(pkg):
try:
m = importlib.import_module(pkg)
return getattr(m, "__version__", "0")
except Exception:
return "0"
hub_ok = False
try:
from packaging.version import Version
hv = Version(ver("huggingface_hub"))
hub_ok = Version("0.24.0") <= hv < Version("1.0.0")
except Exception:
pass
if not hub_ok:
subprocess.check_call([sys.executable, "-m", "pip",
"install", "huggingface-hub==0.27.1",
"transformers==4.48.0",
"--force-reinstall", "--no-deps"])
_ensure_versions()
ROOT = Path(__file__).resolve().parent
OUTPUT_DIR = ROOT / "output"
INPUT_DIR = ROOT / "input"
LOGO_DIR = INPUT_DIR / "logo"
POSTER_LATEX_DIR = ROOT / "posterbuilder" / "latex_proj"
ZIP_PATH = ROOT / "output.zip"
LOG_PATH = ROOT / "last_run.log"
def run_pipeline(arxiv_url, pdf_file, openai_key, logo_files):
start_time = datetime.datetime.now()
logs = [f"πŸš€ Starting pipeline at {start_time.strftime('%Y-%m-%d %H:%M:%S')}\n"]
# ====== Prepare directories ======
for d in [OUTPUT_DIR, LOGO_DIR, POSTER_LATEX_DIR, INPUT_DIR]:
d.mkdir(parents=True, exist_ok=True)
# Clean up old outputs
for item in OUTPUT_DIR.iterdir():
if item.is_dir():
shutil.rmtree(item)
else:
item.unlink()
if ZIP_PATH.exists():
ZIP_PATH.unlink()
logs.append("🧹 Cleaned previous output.\n")
# ====== Validation: must upload LOGO ======
# Gradio may return a single file or a list, normalize to list
if logo_files is None:
logo_files = []
if not isinstance(logo_files, (list, tuple)):
logo_files = [logo_files]
logo_files = [f for f in logo_files if f] # filter None
if len(logo_files) == 0:
msg = "❌ You must upload at least one institutional logo (multiple allowed)."
logs.append(msg)
_write_logs(logs)
return "\n".join(logs), None
# Clear input/logo and then save new files
for item in LOGO_DIR.iterdir():
if item.is_file():
item.unlink()
saved_logo_paths = []
for lf in logo_files:
p = LOGO_DIR / Path(lf.name).name
shutil.copy(lf.name, p)
saved_logo_paths.append(p)
logs.append(f"🏷️ Saved {len(saved_logo_paths)} logo file(s) to: {LOGO_DIR}\n")
# ====== Handle uploaded PDF (optional) ======
pdf_path = None
if pdf_file:
pdf_dir = INPUT_DIR / "pdf"
pdf_dir.mkdir(parents=True, exist_ok=True)
pdf_path = pdf_dir / Path(pdf_file.name).name
shutil.copy(pdf_file.name, pdf_path)
logs.append(f"πŸ“„ Uploaded PDF saved to: {pdf_path}\n")
# For pipeline Step 1.5 compatibility: also copy to input/paper.pdf
canonical_pdf = INPUT_DIR / "paper.pdf"
shutil.copy(pdf_file.name, canonical_pdf)
# ====== Validate input source ======
if not arxiv_url and not pdf_file:
msg = "❌ Please provide either an arXiv link or upload a PDF file (choose one)."
logs.append(msg)
_write_logs(logs)
return "\n".join(logs), None
# ====== Build command ======
cmd = [
"python", "pipeline.py",
"--model_name_t", "gpt-5",
"--model_name_v", "gpt-5",
"--result_dir", "output",
"--paper_latex_root", "input/latex_proj",
"--openai_key", openai_key,
"--gemini_key", "##",
"--logo_dir", str(LOGO_DIR) # πŸ‘ˆ Added: pass logo directory
]
if arxiv_url:
cmd += ["--arxiv_url", arxiv_url]
# if pdf_path:
# cmd += ["--pdf_path", str(pdf_path)]
try:
result = subprocess.run(
cmd, capture_output=True, text=True, timeout=1800
)
logs.append("\n======= STDOUT =======\n")
logs.append(result.stdout)
logs.append("\n======= STDERR =======\n")
logs.append(result.stderr)
except subprocess.TimeoutExpired:
msg = "❌ Pipeline timed out (30 min limit)."
logs.append(msg)
_write_logs(logs)
return "\n".join(logs), None
except Exception as e:
msg = f"❌ Pipeline error: {e}"
logs.append(msg)
_write_logs(logs)
return "\n".join(logs), None
# ====== Check output & zip ======
if not any(OUTPUT_DIR.iterdir()):
msg = "❌ No output generated. Please check logs below."
logs.append(msg)
_write_logs(logs)
return "\n".join(logs), None
with zipfile.ZipFile(ZIP_PATH, 'w', zipfile.ZIP_DEFLATED) as zipf:
for root, dirs, files in os.walk(OUTPUT_DIR):
for file in files:
file_path = Path(root) / file
arcname = file_path.relative_to(OUTPUT_DIR)
zipf.write(file_path, arcname=arcname)
logs.append(f"βœ… Zipped output folder to {ZIP_PATH}\n")
end_time = datetime.datetime.now()
logs.append(f"🏁 Completed at {end_time.strftime('%Y-%m-%d %H:%M:%S')} (Duration: {(end_time - start_time).seconds}s)\n")
_write_logs(logs)
return "\n".join(logs), str(ZIP_PATH)
def _write_logs(logs):
with open(LOG_PATH, "w", encoding="utf-8") as f:
f.write("\n".join(logs))
# ===================== Gradio UI =====================
iface = gr.Interface(
fn=run_pipeline,
inputs=[
gr.Textbox(label="πŸ“˜ ArXiv URL (choose one)", placeholder="https://arxiv.org/abs/2505.xxxxx"),
gr.File(label="πŸ“„ Upload PDF (choose one)"),
gr.Textbox(label="πŸ”‘ OpenAI API Key", placeholder="sk-...", type="password"),
gr.File(label="🏷️ Upload Institutional Logo(s) (required, multiple allowed)", file_count="multiple", file_types=["image"]),
],
outputs=[
gr.Textbox(label="🧾 Logs", lines=30, max_lines=50),
gr.File(label="πŸ“¦ Download Results (.zip)")
],
title="πŸ“„ Paper2Poster",
description=(
"Upload your paper, and the pipeline will automatically generate a fully compilable LaTeX poster; you can download the ZIP file and compile it yourself. Each paper takes approximately 6–10 minutes to process.\n"
"Provide either an arXiv link or upload a PDF file (choose one); the system will generate a poster and package it for download.\n"
"You must upload at least one institutional logo (multiple allowed).\n"
),
allow_flagging="never",
)
if __name__ == "__main__":
iface.launch(server_name="0.0.0.0", server_port=7860)