Trad / learning_hub /curator.py
Riy777's picture
Create curator.py
0156a9b verified
# learning_hub/curator.py
import json
import asyncio
from typing import List, Dict, Any, TYPE_CHECKING
from .schemas import Delta
if TYPE_CHECKING:
from LLM import LLMService
from .memory_store import MemoryStore
class Curator:
def __init__(self, llm_service: 'LLMService', memory_store: 'MemoryStore'):
self.llm_service = llm_service
self.memory_store = memory_store
# (This is a configuration parameter from Point 6, not a placeholder)
self.distill_threshold: int = 50
self.distilled_rules_key: str = "learning_distilled_rules.json"
print("✅ Learning Hub Module: Curator (Distiller) loaded")
async def check_and_distill_domain(self, domain: str):
"""
Checks if a domain needs distillation and runs it if the threshold is met.
(Implements Point 6 - Distillation trigger)
"""
try:
deltas_list = await self.memory_store._load_deltas_from_r2(domain)
# 1. Filter for approved Deltas only for distillation
approved_deltas = [d for d in deltas_list if d.get('approved', False)]
if len(approved_deltas) >= self.distill_threshold:
print(f"ℹ️ [Curator] Distillation threshold reached for {domain} ({len(approved_deltas)} approved deltas). Starting...")
await self.distill_deltas(domain, approved_deltas)
else:
print(f"ℹ️ [Curator] {domain} has {len(approved_deltas)}/{self.distill_threshold} approved deltas. Distillation not yet required.")
except Exception as e:
print(f"❌ [Curator] Failed to check distillation for {domain}: {e}")
async def distill_deltas(self, domain: str, deltas_to_distill: List[Dict]):
"""
Runs the LLM distillation process to merge and summarize Deltas.
(Implements Point 4 - Curator (distillation job))
"""
try:
# 1. Create the distillation prompt (Now in English)
prompt = self._create_distillation_prompt(domain, deltas_to_distill)
# 2. Call the LLM
response_text = await self.llm_service._call_llm(prompt)
if not response_text:
raise ValueError("Distiller LLM call returned no response.")
# 3. Parse the response
distilled_json = self.llm_service._parse_llm_response_enhanced(
response_text,
fallback_strategy="distillation",
symbol=domain
)
if not distilled_json or "distilled_rules" not in distilled_json:
raise ValueError(f"Failed to parse Distiller LLM response: {response_text}")
distilled_rules_text_list = distilled_json.get("distilled_rules", [])
if not isinstance(distilled_rules_text_list, list):
raise ValueError(f"Distiller LLM returned 'distilled_rules' not as a list.")
# 4. Save the new distilled rules
await self._save_distilled_rules(domain, distilled_rules_text_list, deltas_to_distill)
# 5. Archive (delete) the old approved deltas that were just distilled
all_deltas = await self.memory_store._load_deltas_from_r2(domain)
approved_ids_to_archive = {d['id'] for d in deltas_to_distill}
# Keep only non-approved (in-review) deltas, or deltas that weren't part of this batch
remaining_deltas = [
d for d in all_deltas
if not (d.get('approved', False) and d.get('id') in approved_ids_to_archive)
]
await self.memory_store._save_deltas_to_r2(domain, remaining_deltas)
print(f"✅ [Curator] Distillation complete for {domain}. Created {len(distilled_rules_text_list)} new rules. Archived {len(approved_ids_to_archive)} old deltas.")
except Exception as e:
print(f"❌ [Curator] Distillation process failed for {domain}: {e}")
async def _save_distilled_rules(self, domain: str, new_rules_text: List[str], evidence_deltas: List[Dict]):
"""Saves the new distilled rules as high-priority Deltas."""
# We save them back into the main delta file as high-priority,
# so they get picked up by the get_active_context() function.
deltas_list = await self.memory_store._load_deltas_from_r2(domain)
evidence_ids = [d.get('id', 'N/A') for d in evidence_deltas]
for rule_text in new_rules_text:
if not rule_text: continue # Skip empty strings
distilled_delta = Delta(
text=rule_text,
domain=domain,
priority="high", # Distilled rules get high priority
score=0.95, # High confidence score
evidence_refs=evidence_ids, # References all the deltas it summarized
created_by="curator_v1 (distilled)",
approved=True, # Automatically approved
usage_count=0
)
deltas_list.append(distilled_delta.model_dump())
await self.memory_store._save_deltas_to_r2(domain, deltas_list)
def _create_distillation_prompt(self, domain: str, deltas: List[Dict]) -> str:
"""
Creates the (English-only) prompt for the LLM to act as a Distiller/Curator.
(Implements Point 4 - Curator prompt)
"""
deltas_text = "\n".join([f"- {d.get('text')} (Score: {d.get('score', 0.5):.2f})" for d in deltas])
prompt = f"""
SYSTEM: You are an expert "Curator" AI. Your job is to read a list of "Deltas" (learning rules) for crypto trading, identify recurring patterns, and merge them into 3-5 concise, powerful "Golden Rules".
DOMAIN: {domain}
RAW DELTAS TO ANALYZE ({len(deltas)} rules):
{deltas_text}
--- END OF DELTAS ---
TASK:
1. Analyze the "RAW DELTAS" above.
2. Find overlaps, repetitions, and contradictions.
3. Generate 3 to 5 new "Distilled Rules" that summarize the core wisdom of these deltas.
4. Each new rule must be concise (max 25 words) and actionable.
OUTPUT FORMAT (JSON Only):
{{
"justification": "A brief explanation of the patterns you found and how you merged them.",
"distilled_rules": [
"The first golden rule (e.g., 'Always use ATR trailing stops for breakout strategies.')",
"The second golden rule (e.g., 'If RSI is overbought on 1H, avoid breakout entries.')",
"..."
]
}}
"""
return prompt