|
|
|
|
|
import hashlib |
|
|
import json |
|
|
import sqlite3 |
|
|
from pathlib import Path |
|
|
from typing import Any |
|
|
|
|
|
|
|
|
class LLMCache: |
|
|
def __init__(self, db_path: str | Path = "llm_cache.db"): |
|
|
self.db_path = Path(db_path) |
|
|
self._init_db() |
|
|
|
|
|
def _init_db(self) -> None: |
|
|
with sqlite3.connect(self.db_path, autocommit=True) as conn: |
|
|
conn.execute( |
|
|
""" |
|
|
CREATE TABLE IF NOT EXISTS llm_cache ( |
|
|
prompt_hash TEXT PRIMARY KEY, |
|
|
prompt TEXT, |
|
|
response TEXT, |
|
|
metadata TEXT |
|
|
) |
|
|
""" |
|
|
) |
|
|
|
|
|
def _hash_prompt(self, prompt: str, metadata: dict[str, Any]) -> str: |
|
|
combined = json.dumps({"prompt": prompt, "metadata": metadata}, sort_keys=True) |
|
|
return hashlib.sha256(combined.encode()).hexdigest() |
|
|
|
|
|
def get(self, prompt: str, metadata: dict[str, Any]) -> str | None: |
|
|
prompt_hash = self._hash_prompt(prompt, metadata) |
|
|
with sqlite3.connect(self.db_path, autocommit=True) as conn: |
|
|
cursor = conn.cursor() |
|
|
cursor.execute("SELECT response FROM llm_cache WHERE prompt_hash = ?", (prompt_hash,)) |
|
|
result = cursor.fetchone() |
|
|
return result[0] if result else None |
|
|
|
|
|
def set(self, prompt: str, response: str, metadata: dict[str, Any]) -> None: |
|
|
prompt_hash = self._hash_prompt(prompt, metadata) |
|
|
with sqlite3.connect(self.db_path, autocommit=True) as conn: |
|
|
conn.execute( |
|
|
"INSERT OR REPLACE INTO llm_cache (prompt_hash, prompt, response, metadata) VALUES (?, ?, ?, ?)", |
|
|
(prompt_hash, prompt, response, json.dumps(metadata)), |
|
|
) |
|
|
|
|
|
def clear(self) -> None: |
|
|
with sqlite3.connect(self.db_path, autocommit=True) as conn: |
|
|
conn.execute("DELETE FROM llm_cache") |
|
|
|