update: push latest content
Browse files
app.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
| 1 |
-
import gradio as gr
|
| 2 |
import subprocess, shutil, os, zipfile, datetime
|
| 3 |
from pathlib import Path
|
| 4 |
|
|
@@ -14,11 +14,11 @@ def run_pipeline(arxiv_url, pdf_file, openai_key, logo_files):
|
|
| 14 |
start_time = datetime.datetime.now()
|
| 15 |
logs = [f"🚀 Starting pipeline at {start_time.strftime('%Y-%m-%d %H:%M:%S')}\n"]
|
| 16 |
|
| 17 |
-
# ======
|
| 18 |
for d in [OUTPUT_DIR, LOGO_DIR, POSTER_LATEX_DIR, INPUT_DIR]:
|
| 19 |
d.mkdir(parents=True, exist_ok=True)
|
| 20 |
|
| 21 |
-
#
|
| 22 |
for item in OUTPUT_DIR.iterdir():
|
| 23 |
if item.is_dir():
|
| 24 |
shutil.rmtree(item)
|
|
@@ -28,21 +28,21 @@ def run_pipeline(arxiv_url, pdf_file, openai_key, logo_files):
|
|
| 28 |
ZIP_PATH.unlink()
|
| 29 |
logs.append("🧹 Cleaned previous output.\n")
|
| 30 |
|
| 31 |
-
# ======
|
| 32 |
-
# Gradio
|
| 33 |
if logo_files is None:
|
| 34 |
logo_files = []
|
| 35 |
if not isinstance(logo_files, (list, tuple)):
|
| 36 |
logo_files = [logo_files]
|
| 37 |
-
logo_files = [f for f in logo_files if f] #
|
| 38 |
|
| 39 |
if len(logo_files) == 0:
|
| 40 |
-
msg = "❌
|
| 41 |
logs.append(msg)
|
| 42 |
_write_logs(logs)
|
| 43 |
return "\n".join(logs), None
|
| 44 |
|
| 45 |
-
#
|
| 46 |
for item in LOGO_DIR.iterdir():
|
| 47 |
if item.is_file():
|
| 48 |
item.unlink()
|
|
@@ -53,7 +53,7 @@ def run_pipeline(arxiv_url, pdf_file, openai_key, logo_files):
|
|
| 53 |
saved_logo_paths.append(p)
|
| 54 |
logs.append(f"🏷️ Saved {len(saved_logo_paths)} logo file(s) to: {LOGO_DIR}\n")
|
| 55 |
|
| 56 |
-
# ======
|
| 57 |
pdf_path = None
|
| 58 |
if pdf_file:
|
| 59 |
pdf_dir = INPUT_DIR / "pdf"
|
|
@@ -62,18 +62,18 @@ def run_pipeline(arxiv_url, pdf_file, openai_key, logo_files):
|
|
| 62 |
shutil.copy(pdf_file.name, pdf_path)
|
| 63 |
logs.append(f"📄 Uploaded PDF saved to: {pdf_path}\n")
|
| 64 |
|
| 65 |
-
#
|
| 66 |
canonical_pdf = INPUT_DIR / "paper.pdf"
|
| 67 |
shutil.copy(pdf_file.name, canonical_pdf)
|
| 68 |
|
| 69 |
-
# ======
|
| 70 |
if not arxiv_url and not pdf_file:
|
| 71 |
-
msg = "❌
|
| 72 |
logs.append(msg)
|
| 73 |
_write_logs(logs)
|
| 74 |
return "\n".join(logs), None
|
| 75 |
|
| 76 |
-
# ======
|
| 77 |
cmd = [
|
| 78 |
"python", "pipeline.py",
|
| 79 |
"--model_name_t", "gpt-5",
|
|
@@ -82,7 +82,7 @@ def run_pipeline(arxiv_url, pdf_file, openai_key, logo_files):
|
|
| 82 |
"--paper_latex_root", "input/latex_proj",
|
| 83 |
"--openai_key", openai_key,
|
| 84 |
"--gemini_key", "##",
|
| 85 |
-
"--logo_dir", str(LOGO_DIR) # 👈
|
| 86 |
]
|
| 87 |
|
| 88 |
if arxiv_url:
|
|
@@ -109,7 +109,7 @@ def run_pipeline(arxiv_url, pdf_file, openai_key, logo_files):
|
|
| 109 |
_write_logs(logs)
|
| 110 |
return "\n".join(logs), None
|
| 111 |
|
| 112 |
-
# ======
|
| 113 |
if not any(OUTPUT_DIR.iterdir()):
|
| 114 |
msg = "❌ No output generated. Please check logs below."
|
| 115 |
logs.append(msg)
|
|
@@ -140,19 +140,20 @@ def _write_logs(logs):
|
|
| 140 |
iface = gr.Interface(
|
| 141 |
fn=run_pipeline,
|
| 142 |
inputs=[
|
| 143 |
-
gr.Textbox(label="📘 ArXiv URL
|
| 144 |
-
gr.File(label="📄
|
| 145 |
gr.Textbox(label="🔑 OpenAI API Key", placeholder="sk-...", type="password"),
|
| 146 |
-
gr.File(label="🏷️
|
| 147 |
],
|
| 148 |
outputs=[
|
| 149 |
gr.Textbox(label="🧾 Logs", lines=30, max_lines=50),
|
| 150 |
-
gr.File(label="📦
|
| 151 |
],
|
| 152 |
-
title="📄
|
| 153 |
description=(
|
| 154 |
-
"
|
| 155 |
-
"
|
|
|
|
| 156 |
),
|
| 157 |
allow_flagging="never",
|
| 158 |
)
|
|
|
|
| 1 |
+
import gradio as gr
|
| 2 |
import subprocess, shutil, os, zipfile, datetime
|
| 3 |
from pathlib import Path
|
| 4 |
|
|
|
|
| 14 |
start_time = datetime.datetime.now()
|
| 15 |
logs = [f"🚀 Starting pipeline at {start_time.strftime('%Y-%m-%d %H:%M:%S')}\n"]
|
| 16 |
|
| 17 |
+
# ====== Prepare directories ======
|
| 18 |
for d in [OUTPUT_DIR, LOGO_DIR, POSTER_LATEX_DIR, INPUT_DIR]:
|
| 19 |
d.mkdir(parents=True, exist_ok=True)
|
| 20 |
|
| 21 |
+
# Clean up old outputs
|
| 22 |
for item in OUTPUT_DIR.iterdir():
|
| 23 |
if item.is_dir():
|
| 24 |
shutil.rmtree(item)
|
|
|
|
| 28 |
ZIP_PATH.unlink()
|
| 29 |
logs.append("🧹 Cleaned previous output.\n")
|
| 30 |
|
| 31 |
+
# ====== Validation: must upload LOGO ======
|
| 32 |
+
# Gradio may return a single file or a list, normalize to list
|
| 33 |
if logo_files is None:
|
| 34 |
logo_files = []
|
| 35 |
if not isinstance(logo_files, (list, tuple)):
|
| 36 |
logo_files = [logo_files]
|
| 37 |
+
logo_files = [f for f in logo_files if f] # filter None
|
| 38 |
|
| 39 |
if len(logo_files) == 0:
|
| 40 |
+
msg = "❌ You must upload at least one institutional logo (multiple allowed)."
|
| 41 |
logs.append(msg)
|
| 42 |
_write_logs(logs)
|
| 43 |
return "\n".join(logs), None
|
| 44 |
|
| 45 |
+
# Clear input/logo and then save new files
|
| 46 |
for item in LOGO_DIR.iterdir():
|
| 47 |
if item.is_file():
|
| 48 |
item.unlink()
|
|
|
|
| 53 |
saved_logo_paths.append(p)
|
| 54 |
logs.append(f"🏷️ Saved {len(saved_logo_paths)} logo file(s) to: {LOGO_DIR}\n")
|
| 55 |
|
| 56 |
+
# ====== Handle uploaded PDF (optional) ======
|
| 57 |
pdf_path = None
|
| 58 |
if pdf_file:
|
| 59 |
pdf_dir = INPUT_DIR / "pdf"
|
|
|
|
| 62 |
shutil.copy(pdf_file.name, pdf_path)
|
| 63 |
logs.append(f"📄 Uploaded PDF saved to: {pdf_path}\n")
|
| 64 |
|
| 65 |
+
# For pipeline Step 1.5 compatibility: also copy to input/paper.pdf
|
| 66 |
canonical_pdf = INPUT_DIR / "paper.pdf"
|
| 67 |
shutil.copy(pdf_file.name, canonical_pdf)
|
| 68 |
|
| 69 |
+
# ====== Validate input source ======
|
| 70 |
if not arxiv_url and not pdf_file:
|
| 71 |
+
msg = "❌ Please provide either an arXiv link or upload a PDF file (choose one)."
|
| 72 |
logs.append(msg)
|
| 73 |
_write_logs(logs)
|
| 74 |
return "\n".join(logs), None
|
| 75 |
|
| 76 |
+
# ====== Build command ======
|
| 77 |
cmd = [
|
| 78 |
"python", "pipeline.py",
|
| 79 |
"--model_name_t", "gpt-5",
|
|
|
|
| 82 |
"--paper_latex_root", "input/latex_proj",
|
| 83 |
"--openai_key", openai_key,
|
| 84 |
"--gemini_key", "##",
|
| 85 |
+
"--logo_dir", str(LOGO_DIR) # 👈 Added: pass logo directory
|
| 86 |
]
|
| 87 |
|
| 88 |
if arxiv_url:
|
|
|
|
| 109 |
_write_logs(logs)
|
| 110 |
return "\n".join(logs), None
|
| 111 |
|
| 112 |
+
# ====== Check output & zip ======
|
| 113 |
if not any(OUTPUT_DIR.iterdir()):
|
| 114 |
msg = "❌ No output generated. Please check logs below."
|
| 115 |
logs.append(msg)
|
|
|
|
| 140 |
iface = gr.Interface(
|
| 141 |
fn=run_pipeline,
|
| 142 |
inputs=[
|
| 143 |
+
gr.Textbox(label="📘 ArXiv URL (choose one)", placeholder="https://arxiv.org/abs/2505.xxxxx"),
|
| 144 |
+
gr.File(label="📄 Upload PDF (choose one)"),
|
| 145 |
gr.Textbox(label="🔑 OpenAI API Key", placeholder="sk-...", type="password"),
|
| 146 |
+
gr.File(label="🏷️ Upload Institutional Logo(s) (required, multiple allowed)", file_count="multiple", file_types=["image"]),
|
| 147 |
],
|
| 148 |
outputs=[
|
| 149 |
gr.Textbox(label="🧾 Logs", lines=30, max_lines=50),
|
| 150 |
+
gr.File(label="📦 Download Results (.zip)")
|
| 151 |
],
|
| 152 |
+
title="📄 Paper2Poster",
|
| 153 |
description=(
|
| 154 |
+
"Upload your paper, and the pipeline will automatically generate a fully compilable LaTeX poster; you can download the ZIP file and compile it yourself. Each paper takes approximately 6–10 minutes to process."
|
| 155 |
+
"Provide either an arXiv link or upload a PDF file (choose one); the system will generate a poster and package it for download."
|
| 156 |
+
"You must upload at least one institutional logo (multiple allowed).\n"
|
| 157 |
),
|
| 158 |
allow_flagging="never",
|
| 159 |
)
|