transformers>=4.45.0 accelerate>=0.33.0 qwen-vl-utils>=0.0.8 gradio>=5.49.1 spaces>=0.24.0 pillow torchvision # Optional: FlashAttention v2 for faster attention on compatible Linux CUDA GPUs. # This installs only on 64-bit Linux. It will be skipped on macOS/Windows/ARM. flash-attn; platform_system == "Linux" and platform_machine == "x86_64"