github-awesome-repo-descriptions / github_api_utils.py
MRiabov's picture
Upload folder using huggingface_hub
9496607 verified
#!/usr/bin/env python3
"""
GitHub API utilities for scraping and metadata collection.
Separated from scrape_gh_docs.py to keep the main script slimmer.
"""
from __future__ import annotations
import os
import time
import asyncio
import logging
import threading
from urllib.parse import quote_plus
from typing import Optional, Dict, Any, List
import requests
import aiohttp
GITHUB_API = "https://api.github.com"
# Use the same logger name as the main script so logs route through its handler
logger = logging.getLogger("scrape_gh_docs")
_thread_local = threading.local()
def github_headers() -> Dict[str, str]:
token = os.getenv("GITHUB_TOKEN")
h = {"Accept": "application/vnd.github.v3+json", "User-Agent": "docs-scraper/1.0"}
if token:
h["Authorization"] = f"token {token}"
return h
def get_session() -> requests.Session:
sess = getattr(_thread_local, "session", None)
if sess is None:
sess = requests.Session()
_thread_local.session = sess
return sess
def request_json(
url: str, params: Optional[dict] = None, accept_status=(200,), max_retries: int = 3
):
for attempt in range(max_retries):
resp = get_session().get(
url, headers=github_headers(), params=params, timeout=30
)
if resp.status_code in accept_status:
# Some endpoints return empty responses on success (e.g. 204). Handle json errors defensively.
try:
return resp.json()
except Exception:
return None
if resp.status_code == 403 or resp.status_code == 529:
# rate limit, blocked, or service overloaded - try to get reset and sleep
reset = resp.headers.get("X-RateLimit-Reset")
ra = resp.headers.get("Retry-After")
if ra:
wait = int(ra)
elif reset:
wait = max(5, int(reset) - int(time.time()))
else:
wait = 30
logger.warning(
f"{resp.status_code} from {url}. Sleeping {wait}s (attempt {attempt + 1}/{max_retries})"
)
time.sleep(wait)
continue
if 500 <= resp.status_code < 600:
backoff = (attempt + 1) * 5
logger.warning(f"{resp.status_code} from {url}. Backing off {backoff}s")
time.sleep(backoff)
continue
logger.error(f"Request to {url} returned {resp.status_code}: {resp.text}")
return None
logger.error(f"Exhausted retries for {url}")
return None
def download_file(url: str, dest_path):
dest_path.parent.mkdir(parents=True, exist_ok=True)
with get_session().get(url, headers=github_headers(), stream=True, timeout=60) as r:
r.raise_for_status()
with open(dest_path, "wb") as f:
for chunk in r.iter_content(chunk_size=8192):
if chunk:
f.write(chunk)
# === High-level GitHub API helpers ===
def get_repo_info(owner: str, repo: str) -> Optional[Dict[str, Any]]:
url = f"{GITHUB_API}/repos/{owner}/{repo}"
return request_json(url)
def get_default_branch(
owner: str, repo: str, repo_json: Optional[Dict[str, Any]] = None
) -> Optional[str]:
if repo_json and "default_branch" in repo_json:
return repo_json["default_branch"]
info = get_repo_info(owner, repo)
if not info:
return None
return info.get("default_branch")
def get_latest_commit_date(
owner: str,
repo: str,
ref: Optional[str],
repo_json: Optional[Dict[str, Any]] = None,
) -> Optional[str]:
"""
Return ISO8601 date string of the latest commit on the given ref (branch or SHA).
Falls back to repo's pushed_at if commits endpoint returns nothing.
"""
branch = ref or (repo_json.get("default_branch") if repo_json else None) or "main"
commits = request_json(
f"{GITHUB_API}/repos/{owner}/{repo}/commits",
params={"sha": branch, "per_page": 1},
accept_status=(200,),
)
if isinstance(commits, list) and commits:
try:
return commits[0]["commit"]["author"]["date"]
except Exception:
pass
if repo_json is None:
repo_json = get_repo_info(owner, repo) or {}
return repo_json.get("pushed_at")
def get_contents(owner: str, repo: str, path: str, ref: Optional[str] = None):
url = f"{GITHUB_API}/repos/{owner}/{repo}/contents/{quote_plus(path)}"
params = {"ref": ref} if ref else None
return request_json(url, params=params, accept_status=(200, 404))
def get_owner_type(owner: str) -> Optional[str]:
info = request_json(f"{GITHUB_API}/users/{owner}", accept_status=(200, 404))
if not info:
return None
return info.get("type")
def get_org_repos(owner: str, per_page: int = 100) -> List[Dict[str, Any]]:
owner_type = get_owner_type(owner)
base = "orgs" if owner_type == "Organization" else "users"
repos: List[Dict[str, Any]] = []
page = 1
while True:
url = f"{GITHUB_API}/{base}/{owner}/repos"
params = {"per_page": per_page, "page": page}
data = request_json(url, params=params)
if not data:
if page == 1 and base == "orgs":
base = "users"
continue
break
repos.extend(data)
if len(data) < per_page:
break
page += 1
return repos
def search_repos(query: str, per_page: int = 5) -> List[Dict[str, Any]]:
url = f"{GITHUB_API}/search/repositories"
params = {"q": query, "per_page": per_page}
res = request_json(url, params=params, accept_status=(200,))
if not res:
return []
return res.get("items", [])
def get_repo_tree_paths(owner: str, repo: str, ref: Optional[str]) -> List[str]:
ref = ref or "main"
url = f"{GITHUB_API}/repos/{owner}/{repo}/git/trees/{quote_plus(ref)}"
params = {"recursive": 1}
data = request_json(url, params=params, accept_status=(200,))
if not data or "tree" not in data:
return []
paths: List[str] = []
for entry in data["tree"]:
if entry.get("type") == "blob" and "path" in entry:
paths.append(entry["path"])
return paths
def get_repo_tree_md_paths(owner: str, repo: str, ref: Optional[str]) -> List[str]:
"""
Return only Markdown file paths from the repository tree on the given ref
using the Git Trees API (recursive=1).
This is a convenience wrapper over get_repo_tree_paths() that filters to
.md files, case-insensitive.
"""
all_paths = get_repo_tree_paths(owner, repo, ref)
return [p for p in all_paths if p.lower().endswith(".md")]
# === Async helpers with rate-limit aware retries ===
async def _get_json(
session: aiohttp.ClientSession,
url: str,
headers: Dict[str, str],
*,
max_retries: int = 3,
accept_status: tuple[int, ...] = (200,),
) -> Optional[Dict[str, Any]]:
for attempt in range(max_retries):
try:
async with session.get(url, headers=headers) as resp:
if resp.status in accept_status:
try:
return await resp.json()
except Exception:
return None
if resp.status == 403 or resp.status == 529:
reset = resp.headers.get("X-RateLimit-Reset")
ra = resp.headers.get("Retry-After")
if ra:
wait = int(ra)
elif reset:
try:
wait = max(5, int(reset) - int(time.time()))
except Exception:
wait = 30
else:
wait = 30
logger.warning(
f"{resp.status} from {url}. Sleeping {wait}s (attempt {attempt + 1}/{max_retries})"
)
await asyncio.sleep(wait)
continue
if 500 <= resp.status < 600:
backoff = (attempt + 1) * 5
logger.warning(f"{resp.status} from {url}. Backing off {backoff}s")
await asyncio.sleep(backoff)
continue
return None
except Exception:
# Network hiccup; exponential-ish backoff
backoff = (attempt + 1) * 2
await asyncio.sleep(backoff)
continue
return None
async def _get_text(
session: aiohttp.ClientSession,
url: str,
headers: Dict[str, str],
*,
max_retries: int = 3,
accept_status: tuple[int, ...] = (200,),
) -> Optional[str]:
for attempt in range(max_retries):
try:
async with session.get(url, headers=headers) as resp:
if resp.status in accept_status:
try:
return await resp.text()
except Exception:
return None
if resp.status == 403 or resp.status == 529:
reset = resp.headers.get("X-RateLimit-Reset")
ra = resp.headers.get("Retry-After")
if ra:
wait = int(ra)
elif reset:
try:
wait = max(5, int(reset) - int(time.time()))
except Exception:
wait = 30
else:
wait = 30
logger.warning(
f"{resp.status} from {url}. Sleeping {wait}s (attempt {attempt + 1}/{max_retries})"
)
await asyncio.sleep(wait)
continue
if 500 <= resp.status < 600:
backoff = (attempt + 1) * 5
logger.warning(f"{resp.status} from {url}. Backing off {backoff}s")
await asyncio.sleep(backoff)
continue
return None
except Exception:
backoff = (attempt + 1) * 2
await asyncio.sleep(backoff)
continue
return None
async def fetch_repo_readme_markdown(
session: aiohttp.ClientSession, owner: str, repo: str
) -> Optional[str]:
"""
Fetch README markdown using the contents API, trying README.md and readme.md.
Returns the markdown text or None if not found.
"""
headers = github_headers()
# Try common README names via contents API
for name in ("README.md", "readme.md"):
url = f"{GITHUB_API}/repos/{owner}/{repo}/contents/{name}"
data = await _get_json(
session, url, headers, max_retries=3, accept_status=(200,)
)
if isinstance(data, dict) and data.get("download_url"):
md = await _get_text(
session,
data["download_url"],
headers,
max_retries=3,
accept_status=(200,),
)
if md is not None:
return md
# Fallback: inspect the repo root tree and pick the best README candidate
try:
repo_url = f"{GITHUB_API}/repos/{owner}/{repo}"
default_branch = "main"
info = await _get_json(
session, repo_url, headers, max_retries=3, accept_status=(200,)
)
if isinstance(info, dict) and info.get("default_branch"):
default_branch = info["default_branch"]
tree_url = (
f"{GITHUB_API}/repos/{owner}/{repo}/git/trees/{quote_plus(default_branch)}"
)
tree = await _get_json(
session, tree_url, headers, max_retries=3, accept_status=(200,)
)
if not isinstance(tree, dict) or "tree" not in tree:
return None
entries = tree["tree"]
candidates = []
for e in entries:
if e.get("type") != "blob":
continue
path = e.get("path")
if not path:
continue
name_lower = path.lower()
if name_lower.startswith("readme"):
prio_map = {".md": 0, ".rst": 1, ".org": 2}
ext = ""
if "." in path:
ext = path[path.rfind(".") :].lower()
prio = (prio_map.get(ext, 3), len(path))
candidates.append((prio, path))
if not candidates:
return None
candidates.sort()
chosen_path = candidates[0][1]
contents_url = (
f"{GITHUB_API}/repos/{owner}/{repo}/contents/{quote_plus(chosen_path)}"
)
cdata = await _get_json(
session, contents_url, headers, max_retries=3, accept_status=(200,)
)
if isinstance(cdata, dict) and cdata.get("download_url"):
return await _get_text(
session,
cdata["download_url"],
headers,
max_retries=3,
accept_status=(200,),
)
except Exception:
return None
return None
async def fetch_repo_description(
session: aiohttp.ClientSession, owner: str, repo: str
) -> Optional[str]:
url = f"https://api.github.com/repos/{owner}/{repo}"
headers = github_headers()
data = await _get_json(session, url, headers, max_retries=3, accept_status=(200,))
if isinstance(data, dict):
desc = data.get("description")
if isinstance(desc, str):
return desc
return None