Datasets:

Formats:
csv
ArXiv:
Libraries:
Datasets
Dask
License:
EditVerseBench / download_source_video.py
sooyek's picture
Update download_source_video.py
8d58677 verified
import json
import os
import requests
BASE_DIR = "./EditVerseBench"
PIXABAY_API = "Your-Pixabay-API-Key" # Please change this to your own API key, which can be found here: https://pixabay.com/api/docs/
def download_pixabay_video(local_path, url_link):
video_id = url_link.strip("/").split("-")[-1]
get_url = f"https://pixabay.com/api/videos/?key={PIXABAY_API}&id={video_id}"
resp = requests.get(get_url)
if resp.status_code != 200:
print(f"❌ Request Failed: {resp.status_code}, {url_link}")
return
data = resp.json()
if not data.get("hits"):
print(f"❌ Can not find: {url_link}")
return
video_info = data["hits"][0]["videos"]
download_url = video_info.get("large", video_info.get("medium", video_info.get("small")))["url"]
os.makedirs(os.path.dirname(local_path), exist_ok=True)
print(f"Downloading: {download_url}")
video_resp = requests.get(download_url, stream=True)
if video_resp.status_code == 200:
with open(local_path, "wb") as f:
for chunk in video_resp.iter_content(chunk_size=1024*1024):
if chunk:
f.write(chunk)
print(f"✅ Download Success: {local_path}")
else:
print(f"❌ Download Failed: {video_resp.status_code}, {download_url}")
with open(f"{BASE_DIR}/EditVerseBench.json") as f:
benchmark_content = json.load(f)
for key, item in benchmark_content.items():
if "<video1>" in item and item["<video1>"].startswith("videos/"):
video1_link = item["<video1> link"]
video1_local_path = f"{BASE_DIR}/"+item["<video1>"]
if not os.path.exists(video1_local_path):
download_pixabay_video(video1_local_path, video1_link)
if "<video2>" in item and item["<video2>"].startswith("videos/"):
video2_link = item["<video2> link"]
video2_local_path = f"{BASE_DIR}/"+item["<video2>"]
if not os.path.exists(video2_local_path):
download_pixabay_video(video2_local_path, video2_link)
print("✅ All videos are downloaded successfully!")