davanstrien
HF Staff
Fix HF Jobs compatibility: handle non-interactive environments and update examples
02b47e6
| # /// script | |
| # requires-python = ">=3.10" | |
| # dependencies = [ | |
| # "huggingface-hub[hf_transfer]", | |
| # "torch", # For GPU detection | |
| # ] | |
| # /// | |
| """ | |
| Generate static Embedding Atlas visualizations and deploy to HuggingFace Spaces. | |
| This script creates interactive embedding visualizations that run entirely in the browser, | |
| using WebGPU acceleration for smooth performance with millions of points. | |
| Example usage: | |
| # Basic usage (creates Space from dataset) | |
| uv run atlas-export.py \ | |
| stanfordnlp/imdb \ | |
| --space-name my-imdb-viz | |
| # With custom model and configuration | |
| uv run atlas-export.py \ | |
| beans \ | |
| --space-name bean-disease-atlas \ | |
| --image-column image \ | |
| --model openai/clip-vit-base-patch32 \ | |
| --sample 10000 | |
| # Run on HF Jobs with GPU (requires HF token for Space deployment) | |
| hf jobs run vllm/vllm-openai:latest --flavor t4-small \ | |
| -s HF_TOKEN=$HF_TOKEN \ | |
| uv run https://huggingface.co/datasets/uv-scripts/build-atlas/raw/main/atlas-export.py \ | |
| my-dataset \ | |
| --space-name my-atlas \ | |
| --model nomic-ai/nomic-embed-text-v1.5 | |
| # Use pre-computed embeddings | |
| uv run atlas-export.py \ | |
| my-dataset-with-embeddings \ | |
| --space-name my-viz \ | |
| --no-compute-embeddings \ | |
| --x-column umap_x \ | |
| --y-column umap_y | |
| """ | |
| import argparse | |
| import logging | |
| import os | |
| import shutil | |
| import subprocess | |
| import sys | |
| import tempfile | |
| import zipfile | |
| from pathlib import Path | |
| from typing import Optional | |
| from huggingface_hub import HfApi, create_repo, login, upload_folder | |
| logging.basicConfig(level=logging.INFO) | |
| logger = logging.getLogger(__name__) | |
| def check_gpu_available() -> bool: | |
| """Check if GPU is available for computation.""" | |
| try: | |
| import torch | |
| return torch.cuda.is_available() | |
| except ImportError: | |
| return False | |
| def build_atlas_command(args) -> list: | |
| """Build the embedding-atlas command with all parameters.""" | |
| # Use uvx to run embedding-atlas with required dependencies | |
| cmd = ["uvx", "--with", "datasets", "embedding-atlas"] | |
| cmd.append(args.dataset_id) | |
| # Add all optional parameters | |
| if args.model: | |
| cmd.extend(["--model", args.model]) | |
| # Always specify text column to avoid interactive prompt | |
| text_col = args.text_column or "text" # Default to "text" if not specified | |
| cmd.extend(["--text", text_col]) | |
| if args.image_column: | |
| cmd.extend(["--image", args.image_column]) | |
| if args.split: | |
| cmd.extend(["--split", args.split]) | |
| if args.sample: | |
| cmd.extend(["--sample", str(args.sample)]) | |
| if args.trust_remote_code: | |
| cmd.append("--trust-remote-code") | |
| if not args.compute_embeddings: | |
| cmd.append("--no-compute-embeddings") | |
| if args.x_column: | |
| cmd.extend(["--x", args.x_column]) | |
| if args.y_column: | |
| cmd.extend(["--y", args.y_column]) | |
| if args.neighbors_column: | |
| cmd.extend(["--neighbors", args.neighbors_column]) | |
| # Add export flag with output path | |
| export_path = "atlas_export.zip" | |
| cmd.extend(["--export-application", export_path]) | |
| return cmd, export_path | |
| def create_space_readme(args) -> str: | |
| """Generate README.md content for the Space.""" | |
| title = args.space_name.replace("-", " ").title() | |
| readme = f"""--- | |
| title: {title} | |
| emoji: 🗺️ | |
| colorFrom: blue | |
| colorTo: purple | |
| sdk: static | |
| pinned: false | |
| license: mit | |
| --- | |
| # 🗺️ {title} | |
| Interactive embedding visualization of [{args.dataset_id}](https://huggingface.co/datasets/{args.dataset_id}) using [Embedding Atlas](https://github.com/apple/embedding-atlas). | |
| ## Features | |
| - Interactive embedding visualization | |
| - Real-time search and filtering | |
| - Automatic clustering with labels | |
| - WebGPU-accelerated rendering | |
| """ | |
| if args.model: | |
| readme += f"\n## Model\n\nEmbeddings generated using: `{args.model}`\n" | |
| if args.sample: | |
| readme += f"\n## Data\n\nVisualization includes {args.sample:,} samples from the dataset.\n" | |
| readme += """ | |
| ## How to Use | |
| - **Click and drag** to navigate | |
| - **Scroll** to zoom in/out | |
| - **Click** on points to see details | |
| - **Search** using the search box | |
| - **Filter** using metadata panels | |
| --- | |
| *Generated with [UV Scripts Atlas Export](https://huggingface.co/uv-scripts)* | |
| """ | |
| return readme | |
| def extract_and_prepare_static_files(zip_path: str, output_dir: Path) -> None: | |
| """Extract the exported atlas ZIP and prepare for static deployment.""" | |
| logger.info(f"Extracting {zip_path} to {output_dir}") | |
| with zipfile.ZipFile(zip_path, 'r') as zip_ref: | |
| zip_ref.extractall(output_dir) | |
| # The ZIP should contain index.html and associated files | |
| if not (output_dir / "index.html").exists(): | |
| raise FileNotFoundError("index.html not found in exported atlas") | |
| logger.info(f"Extracted {len(list(output_dir.iterdir()))} items") | |
| def deploy_to_space( | |
| output_dir: Path, | |
| space_name: str, | |
| organization: Optional[str] = None, | |
| private: bool = False, | |
| hf_token: Optional[str] = None | |
| ) -> str: | |
| """Deploy the static files to a HuggingFace Space.""" | |
| api = HfApi(token=hf_token) | |
| # Construct full repo ID | |
| if organization: | |
| repo_id = f"{organization}/{space_name}" | |
| else: | |
| # Get username from API | |
| user_info = api.whoami() | |
| username = user_info["name"] | |
| repo_id = f"{username}/{space_name}" | |
| logger.info(f"Creating Space: {repo_id}") | |
| # Create the Space repository | |
| try: | |
| create_repo( | |
| repo_id, | |
| repo_type="space", | |
| space_sdk="static", | |
| private=private, | |
| token=hf_token | |
| ) | |
| logger.info(f"Created new Space: {repo_id}") | |
| except Exception as e: | |
| if "already exists" in str(e): | |
| logger.info(f"Space {repo_id} already exists, updating...") | |
| else: | |
| raise | |
| # Upload all files | |
| logger.info("Uploading files to Space...") | |
| upload_folder( | |
| folder_path=str(output_dir), | |
| repo_id=repo_id, | |
| repo_type="space", | |
| token=hf_token | |
| ) | |
| space_url = f"https://huggingface.co/spaces/{repo_id}" | |
| logger.info(f"✅ Space deployed successfully: {space_url}") | |
| return space_url | |
| def main(): | |
| # Enable HF Transfer for faster downloads if available | |
| os.environ.setdefault("HF_HUB_ENABLE_HF_TRANSFER", "1") | |
| parser = argparse.ArgumentParser( | |
| description="Generate and deploy static Embedding Atlas visualizations", | |
| formatter_class=argparse.RawDescriptionHelpFormatter, | |
| epilog=__doc__, | |
| ) | |
| # Required arguments | |
| parser.add_argument( | |
| "dataset_id", | |
| type=str, | |
| help="HuggingFace dataset ID to visualize", | |
| ) | |
| # Space configuration | |
| parser.add_argument( | |
| "--space-name", | |
| type=str, | |
| required=True, | |
| help="Name for the HuggingFace Space", | |
| ) | |
| parser.add_argument( | |
| "--organization", | |
| type=str, | |
| help="HuggingFace organization (default: your username)", | |
| ) | |
| parser.add_argument( | |
| "--private", | |
| action="store_true", | |
| help="Make the Space private", | |
| ) | |
| # Atlas configuration | |
| parser.add_argument( | |
| "--model", | |
| type=str, | |
| help="Embedding model to use (e.g., sentence-transformers/all-MiniLM-L6-v2)", | |
| ) | |
| parser.add_argument( | |
| "--text-column", | |
| type=str, | |
| help="Name of text column (default: auto-detect)", | |
| ) | |
| parser.add_argument( | |
| "--image-column", | |
| type=str, | |
| help="Name of image column for image datasets", | |
| ) | |
| parser.add_argument( | |
| "--split", | |
| type=str, | |
| default="train", | |
| help="Dataset split to use (default: train)", | |
| ) | |
| parser.add_argument( | |
| "--sample", | |
| type=int, | |
| help="Number of samples to visualize (default: all)", | |
| ) | |
| parser.add_argument( | |
| "--trust-remote-code", | |
| action="store_true", | |
| help="Trust remote code in dataset/model", | |
| ) | |
| # Pre-computed embeddings | |
| parser.add_argument( | |
| "--no-compute-embeddings", | |
| action="store_false", | |
| dest="compute_embeddings", | |
| help="Use pre-computed embeddings from dataset", | |
| ) | |
| parser.add_argument( | |
| "--x-column", | |
| type=str, | |
| help="Column with X coordinates (for pre-computed projections)", | |
| ) | |
| parser.add_argument( | |
| "--y-column", | |
| type=str, | |
| help="Column with Y coordinates (for pre-computed projections)", | |
| ) | |
| parser.add_argument( | |
| "--neighbors-column", | |
| type=str, | |
| help="Column with neighbor indices (for pre-computed)", | |
| ) | |
| # Additional options | |
| parser.add_argument( | |
| "--hf-token", | |
| type=str, | |
| help="HuggingFace API token (or set HF_TOKEN env var)", | |
| ) | |
| parser.add_argument( | |
| "--local-only", | |
| action="store_true", | |
| help="Only generate locally, don't deploy to Space", | |
| ) | |
| parser.add_argument( | |
| "--output-dir", | |
| type=str, | |
| help="Local directory for output (default: temp directory)", | |
| ) | |
| args = parser.parse_args() | |
| # Check GPU availability | |
| if check_gpu_available(): | |
| logger.info("🚀 GPU detected - may accelerate embedding generation") | |
| else: | |
| logger.info("💻 Running on CPU - embedding generation may be slower") | |
| # Login to HuggingFace if needed | |
| if not args.local_only: | |
| hf_token = args.hf_token or os.environ.get("HF_TOKEN") | |
| if hf_token: | |
| login(token=hf_token) | |
| else: | |
| # Check if running in non-interactive environment (HF Jobs, CI, etc.) | |
| is_interactive = sys.stdin.isatty() | |
| if is_interactive: | |
| logger.warning("No HF token provided. You may not be able to push to the Hub.") | |
| response = input("Continue anyway? (y/n): ") | |
| if response.lower() != 'y': | |
| sys.exit(0) | |
| else: | |
| # In non-interactive environments, fail immediately if no token | |
| logger.error("No HF token provided. Cannot deploy to Space in non-interactive environment.") | |
| logger.error("Please set HF_TOKEN environment variable or use --hf-token argument.") | |
| sys.exit(1) | |
| # Set up output directory | |
| if args.output_dir: | |
| output_dir = Path(args.output_dir) | |
| output_dir.mkdir(parents=True, exist_ok=True) | |
| temp_dir = None | |
| else: | |
| temp_dir = tempfile.mkdtemp(prefix="atlas_export_") | |
| output_dir = Path(temp_dir) | |
| logger.info(f"Using temporary directory: {output_dir}") | |
| try: | |
| # Build and run embedding-atlas command | |
| cmd, export_path = build_atlas_command(args) | |
| logger.info(f"Running command: {' '.join(cmd)}") | |
| # Run the command | |
| result = subprocess.run(cmd, capture_output=True, text=True) | |
| if result.returncode != 0: | |
| logger.error(f"Atlas export failed with return code {result.returncode}") | |
| logger.error(f"STDOUT: {result.stdout}") | |
| logger.error(f"STDERR: {result.stderr}") | |
| sys.exit(1) | |
| logger.info("✅ Atlas export completed successfully") | |
| # Extract the exported files | |
| extract_and_prepare_static_files(export_path, output_dir) | |
| # Create README for the Space | |
| readme_content = create_space_readme(args) | |
| (output_dir / "README.md").write_text(readme_content) | |
| if args.local_only: | |
| logger.info(f"✅ Static files prepared in: {output_dir}") | |
| logger.info("To deploy manually, upload the contents to a HuggingFace Space with sdk: static") | |
| else: | |
| # Deploy to HuggingFace Space | |
| space_url = deploy_to_space( | |
| output_dir, | |
| args.space_name, | |
| args.organization, | |
| args.private, | |
| hf_token | |
| ) | |
| logger.info(f"\n🎉 Success! Your atlas is live at: {space_url}") | |
| logger.info(f"The visualization will be available in a few moments.") | |
| # Clean up the ZIP file | |
| if Path(export_path).exists(): | |
| os.remove(export_path) | |
| finally: | |
| # Clean up temp directory if used | |
| if temp_dir and not args.local_only: | |
| shutil.rmtree(temp_dir) | |
| logger.info("Cleaned up temporary files") | |
| if __name__ == "__main__": | |
| # Show example commands if no args provided | |
| if len(sys.argv) == 1: | |
| print("Example commands:\n") | |
| print("# Basic usage:") | |
| print("uv run atlas-export.py stanfordnlp/imdb --space-name imdb-atlas\n") | |
| print("# With custom model and sampling:") | |
| print("uv run atlas-export.py my-dataset --space-name my-viz --model nomic-ai/nomic-embed-text-v1.5 --sample 10000\n") | |
| print("# For HF Jobs with GPU:") | |
| print("hf jobs run vllm/vllm-openai:latest --flavor t4-small -s HF_TOKEN=$HF_TOKEN uv run https://huggingface.co/datasets/uv-scripts/build-atlas/raw/main/atlas-export.py dataset --space-name viz --model sentence-transformers/all-mpnet-base-v2\n") | |
| print("# Local generation only:") | |
| print("uv run atlas-export.py dataset --space-name test --local-only --output-dir ./atlas-output") | |
| sys.exit(0) | |
| main() |