DemoSpace / app.py
revi13's picture
Update app.py
fcae93c verified
raw
history blame
2.14 kB
import os
import gradio as gr
from huggingface_hub import hf_hub_download, login
import torch
try:
from safetensors import safe_open
SAFETENSORS_AVAILABLE = True
except ImportError:
SAFETENSORS_AVAILABLE = False
# Hugging Face Token(Secrets に登録しておく必要あり)
HF_TOKEN = os.environ.get("HUGGINGFACE_HUB_TOKEN")
if not HF_TOKEN:
raise RuntimeError("HUGGINGFACE_HUB_TOKEN が未設定です。SpacesのSecretsで設定してください。")
login(token=HF_TOKEN)
def check_model(repo_id, filename):
logs = []
try:
model_path = hf_hub_download(
repo_id=repo_id,
filename=filename,
token=HF_TOKEN
)
logs.append(f"📥 Downloaded: {model_path}")
except Exception as e:
return f"❌ Download failed: {str(e)}"
if SAFETENSORS_AVAILABLE and filename.endswith(".safetensors"):
try:
with safe_open(model_path, framework="pt", device="cpu") as f:
logs.append("✅ safetensors loaded successfully")
logs.append("Keys: " + ", ".join(list(f.keys())[:10]))
return "\n".join(logs)
except Exception as e:
logs.append(f"⚠ safetensors error: {e}")
try:
sd = torch.load(model_path, map_location="cpu")
logs.append("✅ torch.load OK")
if isinstance(sd, dict):
logs.append("Top-level keys: " + ", ".join(list(sd.keys())[:10]))
else:
logs.append(f"Loaded object type: {type(sd)}")
except Exception as e:
logs.append(f"❌ torch.load failed: {e}")
return "\n".join(logs)
demo = gr.Interface(
fn=check_model,
inputs=[
gr.Textbox(label="Repo ID", value="revi13/ip-adapter-faceid-private"),
gr.Textbox(label="Filename", value="ip-adapter-faceid-plusv2_sd15.bin")
],
outputs=gr.Text(label="Result"),
title="🧪 Hugging Face Hub モデル重みファイル検証ツール",
description="指定した repo_id / filename のモデル重みをダウンロードして torch.load/safetensors で検証します。"
)
demo.launch()