Spaces:
Running
Running
| # app.py (Fully updated to V8.2 - Statistical VADER Scoring) | |
| import os | |
| import traceback | |
| import signal | |
| import sys | |
| import uvicorn | |
| import asyncio | |
| import json | |
| import time | |
| from contextlib import asynccontextmanager | |
| from fastapi import FastAPI, HTTPException | |
| from datetime import datetime | |
| from typing import List, Dict, Any | |
| try: | |
| from r2 import R2Service | |
| from LLM import LLMService | |
| from data_manager import DataManager | |
| from ml_engine.processor import MLProcessor | |
| from learning_hub.hub_manager import LearningHubManager | |
| from sentiment_news import SentimentAnalyzer, NewsFetcher # (V8.1) استيراد NewsFetcher | |
| from trade_manager import TradeManager | |
| from ml_engine.monte_carlo import _sanitize_results_for_json | |
| from helpers import safe_float_conversion, validate_candidate_data_enhanced | |
| except ImportError as e: | |
| print(f"❌ خطأ في استيراد الوحدات: {e}") | |
| if "ccxt.async_support" in str(e) or "ccxtpro" in str(e): | |
| print("🚨 خطأ فادح: تأكد من أن 'ccxt' (الإصدار 4+) مثبت وأن 'ccxt-pro' محذوف.") | |
| sys.exit(1) | |
| # (V8.1) استيراد VADER | |
| try: | |
| from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer | |
| VADER_ANALYZER = SentimentIntensityAnalyzer() | |
| print("✅ تم تحميل VADER Sentiment Analyzer بنجاح") | |
| except ImportError: | |
| print("❌❌ فشل استيراد VADER. درجة الأخبار ستكون معطلة. ❌❌") | |
| print(" قم بتثبيتها باستخدام: pip install vaderSentiment") | |
| VADER_ANALYZER = None | |
| # المتغيرات العالمية | |
| r2_service_global = None | |
| data_manager_global = None | |
| llm_service_global = None | |
| learning_hub_global = None | |
| trade_manager_global = None | |
| sentiment_analyzer_global = None | |
| symbol_whale_monitor_global = None | |
| news_fetcher_global = None # (V8.1) إضافة NewsFetcher | |
| MARKET_STATE_OK = True | |
| class StateManager: | |
| # ... (لا تغيير في هذا الكلاس) ... | |
| def __init__(self): | |
| self.market_analysis_lock = asyncio.Lock() | |
| self.trade_analysis_lock = asyncio.Lock() | |
| self.initialization_complete = False | |
| self.initialization_error = None | |
| self.services_initialized = { | |
| 'r2_service': False, 'data_manager': False, 'llm_service': False, | |
| 'learning_hub': False, 'trade_manager': False, 'sentiment_analyzer': False, | |
| 'symbol_whale_monitor': False, 'news_fetcher': False # (V8.1) | |
| } | |
| async def wait_for_initialization(self, timeout=60): | |
| start_time = time.time() | |
| while not self.initialization_complete and (time.time() - start_time) < timeout: | |
| if self.initialization_error: raise Exception(f"فشل التهيئة: {self.initialization_error}") | |
| await asyncio.sleep(2) | |
| if not self.initialization_complete: raise Exception(f"انتهت مهلة التهيئة ({timeout} ثانية)") | |
| return self.initialization_complete | |
| def set_service_initialized(self, service_name): | |
| self.services_initialized[service_name] = True | |
| if all(self.services_initialized.values()): | |
| self.initialization_complete = True | |
| print("🎯 جميع الخدمات مهيأة بالكامل") | |
| def set_initialization_error(self, error): | |
| self.initialization_error = error | |
| print(f"❌ خطأ في التهيئة: {error}") | |
| state_manager = StateManager() | |
| async def initialize_services(): | |
| """تهيئة جميع الخدمات بشكل منفصل""" | |
| global r2_service_global, data_manager_global, llm_service_global | |
| global learning_hub_global, trade_manager_global, sentiment_analyzer_global | |
| global symbol_whale_monitor_global, news_fetcher_global # (V8.1) | |
| try: | |
| # 🔴 --- START OF CHANGE (V7.0) --- 🔴 | |
| print("🚀 بدء تهيئة الخدمات (بنية Sentry الجديدة V5.9)...") | |
| # 🔴 --- END OF CHANGE --- 🔴 | |
| print(" 🔄 تهيئة R2Service..."); r2_service_global = R2Service(); state_manager.set_service_initialized('r2_service'); print(" ✅ R2Service مهيأة") | |
| print(" 🔄 جلب قاعدة بيانات العقود..."); contracts_database = await r2_service_global.load_contracts_db_async(); print(f" ✅ تم تحميل {len(contracts_database)} عقد") | |
| print(" 🔄 تهيئة مراقب الحيتان (Layer 1 Data)..."); | |
| try: | |
| from whale_monitor.core import EnhancedWhaleMonitor | |
| symbol_whale_monitor_global = EnhancedWhaleMonitor(contracts_database, r2_service_global) | |
| state_manager.set_service_initialized('symbol_whale_monitor'); print(" ✅ مراقب الحيتان مهيأ") | |
| except Exception as e: | |
| print(f" ⚠️ فشل تهيئة مراقب الحيتان: {e}"); | |
| traceback.print_exc() | |
| symbol_whale_monitor_global = None | |
| state_manager.set_service_initialized('symbol_whale_monitor'); | |
| print(" ℹ️ مراقبة الحيتان معطلة. استمرار التهيئة...") | |
| # (V8-MODIFICATION) تمرير r2_service_global إلى DataManager | |
| print(" 🔄 تهيئة DataManager (Layer 1 Data)..."); | |
| data_manager_global = DataManager(contracts_database, symbol_whale_monitor_global, r2_service_global) | |
| await data_manager_global.initialize(); | |
| state_manager.set_service_initialized('data_manager'); | |
| print(" ✅ DataManager مهيأ (ومحرك الأنماط V8 مُحمّل)") | |
| print(" 🔄 تهيئة LLMService (Layer 1 Brain)..."); | |
| llm_service_global = LLMService(); | |
| llm_service_global.r2_service = r2_service_global; | |
| print(" 🔄 تهيئة محلل المشاعر (Layer 1 Data)..."); | |
| sentiment_analyzer_global = SentimentAnalyzer(data_manager_global); | |
| state_manager.set_service_initialized('sentiment_analyzer'); | |
| print(" ✅ محلل المشاعر مهيأ") | |
| # (V8.1) تهيئة NewsFetcher | |
| print(" 🔄 تهيئة NewsFetcher (Layer 1 Data)..."); | |
| news_fetcher_global = NewsFetcher() | |
| state_manager.set_service_initialized('news_fetcher'); | |
| print(" ✅ NewsFetcher (V8.1) مهيأ") | |
| # (تمرير NewsFetcher إلى LLMService - مهم لإعادة التحليل) | |
| llm_service_global.news_fetcher = news_fetcher_global | |
| print(" 🔄 تهيئة محور التعلم (Hub)..."); | |
| learning_hub_global = LearningHubManager( | |
| r2_service=r2_service_global, | |
| llm_service=llm_service_global, | |
| data_manager=data_manager_global | |
| ) | |
| await learning_hub_global.initialize() | |
| state_manager.set_service_initialized('learning_hub'); | |
| print(" ✅ محور التعلم (Hub) مهيأ") | |
| llm_service_global.learning_hub = learning_hub_global | |
| state_manager.set_service_initialized('llm_service'); | |
| print(" ✅ LLMService مربوط بمحور التعلم") | |
| print(" 🔄 تهيئة مدير الصفقات (Layer 2 Sentry + Layer 3 Executor)..."); | |
| # (تمرير دالة الدورة كـ "رد نداء" ليتم استدعاؤها بعد إغلاق الصفقة) | |
| trade_manager_global = TradeManager( | |
| r2_service=r2_service_global, | |
| learning_hub=learning_hub_global, | |
| data_manager=data_manager_global, | |
| state_manager=state_manager, | |
| callback_on_close=run_bot_cycle_async | |
| ) | |
| await trade_manager_global.initialize_sentry_exchanges() | |
| state_manager.set_service_initialized('trade_manager'); | |
| print(" ✅ مدير الصفقات (Sentry/Executor) مهيأ") | |
| print("🎯 اكتملت تهيئة جميع الخدمات بنجاح"); return True | |
| except Exception as e: error_msg = f"فشل تهيئة الخدمات: {str(e)}"; print(f"❌ {error_msg}"); state_manager.set_initialization_error(error_msg); return False | |
| async def monitor_market_async(): | |
| # ... (لا تغيير في هذه الدالة) ... | |
| global data_manager_global, sentiment_analyzer_global, MARKET_STATE_OK | |
| try: | |
| if not await state_manager.wait_for_initialization(): print("❌ فشل تهيئة الخدمات - إيقاف مراقبة السوق"); return | |
| while True: | |
| try: | |
| async with state_manager.market_analysis_lock: | |
| market_context = await sentiment_analyzer_global.get_market_sentiment() | |
| if not market_context: MARKET_STATE_OK = True; await asyncio.sleep(60); continue | |
| bitcoin_sentiment = market_context.get('btc_sentiment') | |
| fear_greed_index = market_context.get('fear_and_greed_index') | |
| should_halt_trading, halt_reason = False, "" | |
| if bitcoin_sentiment == 'BEARISH' and (fear_greed_index is not None and fear_greed_index < 30): should_halt_trading, halt_reason = True, "ظروف سوق هابطة" | |
| if should_halt_trading: | |
| MARKET_STATE_OK = False; | |
| await r2_service_global.save_system_logs_async({"market_halt": True, "reason": halt_reason}) | |
| else: | |
| if not MARKET_STATE_OK: print("✅ تحسنت ظروف السوق. استئناف العمليات العادية.") | |
| MARKET_STATE_OK = True | |
| await asyncio.sleep(60) | |
| except Exception as error: | |
| print(f"❌ خطأ أثناء مراقبة السوق: {error}"); | |
| MARKET_STATE_OK = True; | |
| await asyncio.sleep(60) | |
| except Exception as e: print(f"❌ فشل تشغيل مراقبة السوق: {e}") | |
| async def run_periodic_distillation(): | |
| # ... (لا تغيير في هذه الدالة) ... | |
| print("background task: Periodic Distillation (Curator) scheduled.") | |
| await asyncio.sleep(300) | |
| while True: | |
| try: | |
| if not await state_manager.wait_for_initialization(): | |
| await asyncio.sleep(60) | |
| continue | |
| print("🔄 [Scheduler] Running periodic distillation check...") | |
| await learning_hub_global.run_distillation_check() | |
| await asyncio.sleep(6 * 60 * 60) | |
| except Exception as e: | |
| print(f"❌ [Scheduler] Error in periodic distillation task: {e}") | |
| traceback.print_exc() | |
| await asyncio.sleep(60 * 60) | |
| async def process_batch_parallel(batch, ml_processor, batch_num, total_batches, preloaded_whale_data): | |
| # ... (لا تغيير في هذه الدالة) ... | |
| try: | |
| batch_tasks = [] | |
| for symbol_data in batch: | |
| task = asyncio.create_task(ml_processor.process_multiple_symbols_parallel([symbol_data], preloaded_whale_data)) | |
| batch_tasks.append(task) | |
| batch_results_list_of_lists = await asyncio.gather(*batch_tasks, return_exceptions=True) | |
| successful_results = [] | |
| low_score_results = [] | |
| failed_results = [] | |
| for i, result_list in enumerate(batch_results_list_of_lists): | |
| symbol = batch[i].get('symbol', 'unknown') | |
| if isinstance(result_list, Exception): | |
| failed_results.append({"symbol": symbol, "error": f"Task Execution Error: {str(result_list)}"}) | |
| continue | |
| if result_list: | |
| result = result_list[0] | |
| if isinstance(result, dict): | |
| if result.get('enhanced_final_score', 0) > 0.4: | |
| successful_results.append(result) | |
| else: | |
| low_score_results.append(result) | |
| else: | |
| failed_results.append({"symbol": symbol, "error": f"ML processor returned invalid type: {type(result)}"}) | |
| else: | |
| failed_results.append({"symbol": symbol, "error": "ML processing returned None or empty list"}) | |
| return {'success': successful_results, 'low_score': low_score_results, 'failures': failed_results} | |
| except Exception as error: | |
| print(f"❌ [Consumer] Error processing batch {batch_num}: {error}") | |
| return {'success': [], 'low_score': [], 'failures': []} | |
| async def run_3_layer_analysis_explorer() -> List[Dict[str, Any]]: | |
| """ | |
| (معدل V8.2) - استخدام درجة الأخبار الإحصائية | |
| """ | |
| layer1_candidates = [] | |
| layer2_candidates = [] | |
| final_layer2_candidates = [] | |
| watchlist_candidates = [] | |
| preloaded_whale_data_dict = {} | |
| try: | |
| print("🎯 Starting Explorer Analysis (Layer 1)...") | |
| if not await state_manager.wait_for_initialization(): | |
| print("❌ Services not fully initialized (Explorer)"); return [] | |
| # (V8.1) التأكد من تهيئة VADER و NewsFetcher | |
| if not VADER_ANALYZER or not news_fetcher_global: | |
| print("❌ VADER or NewsFetcher not initialized! News analysis will be skipped.") | |
| print("\n🔍 Layer 1.1: Rapid Screening (data_manager V7.3)...") | |
| layer1_candidates = await data_manager_global.layer1_rapid_screening() | |
| if not layer1_candidates: print("❌ No candidates found in Layer 1.1"); return [] | |
| print(f"✅ Selected {len(layer1_candidates)} symbols for Layer 1.2") | |
| print(f"\n📊 Layer 1.2: Fetching OHLCV data for {len(layer1_candidates)} symbols (Streaming)...") | |
| DATA_QUEUE_MAX_SIZE = 2 | |
| ohlcv_data_queue = asyncio.Queue(maxsize=DATA_QUEUE_MAX_SIZE) | |
| ml_results_list = [] | |
| market_context = await data_manager_global.get_market_context_async() | |
| ml_processor = MLProcessor(market_context, data_manager_global, learning_hub_global) | |
| batch_size = 15 | |
| total_batches = (len(layer1_candidates) + batch_size - 1) // batch_size | |
| async def ml_consumer_task(queue: asyncio.Queue, results_list: list, whale_data_store: dict): | |
| # ... (لا تغيير في هذه الدالة الداخلية) ... | |
| batch_num = 0 | |
| while True: | |
| try: | |
| batch_data = await queue.get() | |
| if batch_data is None: | |
| queue.task_done() | |
| break | |
| batch_num += 1 | |
| batch_results_dict = await process_batch_parallel( | |
| batch_data, ml_processor, batch_num, total_batches, whale_data_store | |
| ) | |
| results_list.append(batch_results_dict) | |
| queue.task_done() | |
| except Exception as e: | |
| print(f"❌ [ML Consumer] Fatal Error: {e}"); | |
| traceback.print_exc(); | |
| queue.task_done() | |
| consumer_task = asyncio.create_task(ml_consumer_task(ohlcv_data_queue, ml_results_list, preloaded_whale_data_dict)) | |
| producer_task = asyncio.create_task(data_manager_global.stream_ohlcv_data(layer1_candidates, ohlcv_data_queue)) | |
| await producer_task; | |
| await ohlcv_data_queue.join() | |
| await consumer_task; | |
| print("🔄 Aggregating all ML (Layer 1.3) results...") | |
| for batch_result in ml_results_list: | |
| for success_item in batch_result['success']: | |
| symbol = success_item['symbol'] | |
| l1_data = success_item | |
| if l1_data: | |
| success_item['reasons_for_candidacy'] = l1_data.get('reasons_for_candidacy', []) | |
| success_item['layer1_score'] = l1_data.get('layer1_score', 0) | |
| success_item['whale_data'] = {'data_available': False, 'reason': 'Not fetched yet'} | |
| # (V8.2) إضافة قيم افتراضية للأخبار | |
| success_item['news_text'] = "" | |
| success_item['news_score_raw'] = 0.0 # درجة VADER الخام | |
| success_item['statistical_news_pnl'] = 0.0 # الدرجة المتعلمة | |
| layer2_candidates.append(success_item) | |
| if not layer2_candidates: print("❌ No candidates found in Layer 1.3"); return [] | |
| layer2_candidates.sort(key=lambda x: x.get('enhanced_final_score', 0), reverse=True) | |
| # (V8.1) نأخذ أفضل 10 هنا (بدلاً من 5) لإضافة الحيتان والأخبار | |
| target_count = min(10, len(layer2_candidates)) | |
| final_layer2_candidates = layer2_candidates[:target_count] | |
| # 🔴 --- START OF CHANGE (V8.2) --- 🔴 | |
| print(f"\n🐋📰 Layer 1.4 (Optimized): Fetching Whale Data & News for top {len(final_layer2_candidates)} candidates...") | |
| # (دالة مساعدة لجلب الحيتان) | |
| async def get_whale_data_for_candidate(candidate): | |
| symbol = candidate.get('symbol', 'UNKNOWN') | |
| try: | |
| data = await data_manager_global.get_whale_data_for_symbol(symbol) | |
| if data: | |
| candidate['whale_data'] = data | |
| else: | |
| candidate['whale_data'] = {'data_available': False, 'reason': 'No data returned'} | |
| except Exception as e: | |
| print(f" ❌ [Whale Fetch] {symbol} - Error: {e}") | |
| candidate['whale_data'] = {'data_available': False, 'error': str(e)} | |
| # (دالة مساعدة لجلب الأخبار وتحليل VADER) | |
| async def get_news_data_for_candidate(candidate): | |
| symbol = candidate.get('symbol', 'UNKNOWN') | |
| if not news_fetcher_global or not VADER_ANALYZER: | |
| candidate['news_text'] = "News analysis disabled." | |
| candidate['news_score_raw'] = 0.0 | |
| return | |
| try: | |
| # 1. جلب نص الأخبار (باستخدام NewsFetcher المطور) | |
| news_text = await news_fetcher_global.get_news_for_symbol(symbol) | |
| candidate['news_text'] = news_text | |
| # 2. حساب درجة VADER الخام | |
| if "No specific news found" in news_text or not news_text: | |
| candidate['news_score_raw'] = 0.0 # محايد | |
| else: | |
| vader_score = VADER_ANALYZER.polarity_scores(news_text) | |
| candidate['news_score_raw'] = vader_score.get('compound', 0.0) # النتيجة (-1 إلى +1) | |
| except Exception as e: | |
| print(f" ❌ [News Fetch] {symbol} - Error: {e}") | |
| candidate['news_text'] = f"Error fetching news: {e}" | |
| candidate['news_score_raw'] = 0.0 | |
| # (تنفيذ المهام بالتوازي) | |
| tasks = [] | |
| for candidate in final_layer2_candidates: | |
| tasks.append(asyncio.create_task(get_whale_data_for_candidate(candidate))) | |
| tasks.append(asyncio.create_task(get_news_data_for_candidate(candidate))) | |
| await asyncio.gather(*tasks) | |
| print(" ✅ Whale data and News data fetched for top candidates.") | |
| print(" 🔄 Re-calculating enhanced scores with new Whale & Statistical News data...") | |
| for candidate in final_layer2_candidates: | |
| try: | |
| # (V8.2) جلب درجة VADER الخام | |
| raw_vader_score = candidate.get('news_score_raw', 0.0) | |
| # (V8.2) جلب الربح/الخسارة الإحصائي المرتبط بهذه الدرجة من محور التعلم | |
| if learning_hub_global: | |
| statistical_pnl = await learning_hub_global.get_statistical_news_score(raw_vader_score) | |
| candidate['statistical_news_pnl'] = statistical_pnl | |
| else: | |
| candidate['statistical_news_pnl'] = 0.0 # (الوضع الآمن) | |
| # (هذه الدالة في processor.py تم تعديلها لتقبل statistical_news_pnl) | |
| new_score = ml_processor._calculate_enhanced_final_score(candidate) | |
| candidate['enhanced_final_score'] = new_score | |
| except Exception as e: | |
| print(f" ❌ [Score Recalc] {candidate.get('symbol')} - Error: {e}") | |
| final_layer2_candidates.sort(key=lambda x: x.get('enhanced_final_score', 0), reverse=True) | |
| print(" ✅ Top scores updated (with Stat. News + Whale) and re-sorted.") | |
| # 🔴 --- END OF CHANGE (V8.2) --- 🔴 | |
| print(f"\n🔬 Layer 1.5: Running Advanced MC (GARCH+LGBM) on top {len(final_layer2_candidates)} candidates...") | |
| advanced_mc_analyzer = ml_processor.monte_carlo_analyzer | |
| updated_candidates_for_llm = [] | |
| for candidate in final_layer2_candidates: | |
| symbol = candidate.get('symbol', 'UNKNOWN') | |
| try: | |
| advanced_mc_results = await advanced_mc_analyzer.generate_1h_distribution_advanced( | |
| candidate.get('ohlcv') | |
| ) | |
| if advanced_mc_results and advanced_mc_results.get('simulation_model') == 'Phase2_GARCH_LGBM': | |
| candidate['monte_carlo_distribution'] = advanced_mc_results | |
| candidate['monte_carlo_probability'] = advanced_mc_results.get('probability_of_gain', 0) | |
| candidate['advanced_mc_run'] = True | |
| else: | |
| candidate['advanced_mc_run'] = False | |
| updated_candidates_for_llm.append(candidate) | |
| except Exception as e: | |
| print(f" ❌ [Advanced MC] {symbol} - Error: {e}. Using Phase 1 results.") | |
| candidate['advanced_mc_run'] = False | |
| updated_candidates_for_llm.append(candidate) | |
| print(" 🔄 Sanitizing final candidates for JSON serialization...") | |
| sanitized_candidates = [] | |
| for cand in updated_candidates_for_llm: | |
| sanitized_candidates.append(_sanitize_results_for_json(cand)) | |
| final_layer2_candidates = sanitized_candidates | |
| await r2_service_global.save_candidates_async(final_layer2_candidates) | |
| print("\n🧠 Layer 1.6: LLM Strategic Analysis (Explorer Brain)...") | |
| # (V8.1) نختار أفضل 5 الآن *بعد* اكتمال كل التحليلات (حيتان + أخبار + MC متقدم) | |
| top_5_for_llm = final_layer2_candidates[:5] | |
| print(f" (Sending Top {len(top_5_for_llm)} candidates to LLM)") | |
| for candidate in top_5_for_llm: | |
| try: | |
| symbol = candidate['symbol'] | |
| ohlcv_data = candidate.get('ohlcv'); | |
| if not ohlcv_data: continue | |
| candidate['raw_ohlcv'] = ohlcv_data | |
| total_candles = sum(len(data) for data in ohlcv_data.values()) if ohlcv_data else 0 | |
| if total_candles < 30: continue | |
| candidate['sentiment_data'] = await data_manager_global.get_market_context_async() | |
| # (V8.2) النموذج الضخم سيحصل على 'news_text' (النص الخام) | |
| # ولن يرى 'statistical_news_pnl' (الدرجة المتعلمة) | |
| llm_analysis = await llm_service_global.get_trading_decision(candidate) | |
| if llm_analysis and llm_analysis.get('action') in ['WATCH']: | |
| strategy_to_watch = llm_analysis.get('strategy_to_watch', 'GENERIC') | |
| confidence = llm_analysis.get('confidence_level', 0) | |
| watchlist_entry = { | |
| 'symbol': symbol, | |
| 'strategy_hint': strategy_to_watch, | |
| 'explorer_score': candidate.get('enhanced_final_score', 0), | |
| 'llm_confidence': confidence, | |
| 'analysis_timestamp': datetime.now().isoformat(), | |
| 'llm_decision_context': { | |
| 'decision': llm_analysis, | |
| 'full_candidate_data': candidate | |
| } | |
| } | |
| watchlist_candidates.append(watchlist_entry) | |
| print(f" ✅ {symbol}: Added to Sentry Watchlist (Strategy: {strategy_to_watch} | Conf: {confidence:.2f})") | |
| else: | |
| action = llm_analysis.get('action', 'NO_DECISION') if llm_analysis else 'NO_RESPONSE'; | |
| print(f" ⚠️ {symbol}: Not recommended by LLM for watching ({action})") | |
| except Exception as e: print(f"❌ Error in LLM analysis for {candidate.get('symbol')}: {e}"); traceback.print_exc(); continue | |
| if watchlist_candidates: | |
| watchlist_candidates.sort(key=lambda x: (x['llm_confidence'] + x['explorer_score']) / 2, reverse=True) | |
| if not watchlist_candidates: | |
| print("❌ Explorer analysis complete: No suitable candidates for Sentry Watchlist.") | |
| return [] | |
| top_watchlist = watchlist_candidates | |
| # --- (V8-MODIFICATION) إنشاء وحفظ سجل التدقيق --- | |
| print("📊 إنشاء سجل تدقيق لمحرك الأنماط V8...") | |
| audit_log = { | |
| "log_id": f"audit_{int(datetime.now().timestamp())}", | |
| "timestamp": datetime.now().isoformat(), | |
| "model_key": "lgbm_pattern_model_combined.pkl", # (من النموذج الناجح) | |
| "scaler_key": "scaler_combined.pkl", # (من النموذج الناجH) | |
| "model_accuracy": 0.5870, # (من ملف evaluation_results.txt) | |
| "predictions": [] | |
| } | |
| # (اجمع كل التوقعات التي حدثت في هذه الدورة) | |
| for candidate in final_layer2_candidates: # (استخدام المرشحين النهائيين) | |
| pattern_analysis = candidate.get('pattern_analysis', {}) | |
| audit_entry = { | |
| "symbol": candidate.get('symbol', 'N/A'), | |
| "timeframe": pattern_analysis.get('timeframe', 'N/A'), | |
| "pattern_detected": pattern_analysis.get('pattern_detected', 'N/A'), | |
| "confidence": pattern_analysis.get('pattern_confidence', 0), | |
| "predicted_direction": pattern_analysis.get('predicted_direction', 'neutral'), | |
| "error": "None" # (نفترض عدم وجود خطأ، وإلا لما كان مرشحاً) | |
| } | |
| audit_log["predictions"].append(audit_entry) | |
| # (حفظ السجل في R2) | |
| if r2_service_global: | |
| await r2_service_global.save_analysis_audit_log_async(audit_log) | |
| # --- (نهاية الإضافة) --- | |
| print(f"✅ Explorer analysis complete. Sending {len(top_watchlist)} candidates to Sentry.") | |
| return top_watchlist | |
| except Exception as error: | |
| print(f"❌ Fatal error in Explorer (Layer 1) system: {error}"); traceback.print_exc() | |
| return [] | |
| async def re_analyze_open_trade_async(trade_data): | |
| """(V8.2) إضافة جلب الأخبار ودرجة VADER لـ Reflector""" | |
| symbol = trade_data.get('symbol') | |
| try: | |
| async with state_manager.trade_analysis_lock: | |
| print(f"🔄 [Re-Analyze] Starting strategic analysis for {symbol}...") | |
| market_context = await data_manager_global.get_market_context_async() | |
| ohlcv_data_list = [] | |
| temp_queue = asyncio.Queue() | |
| await data_manager_global.stream_ohlcv_data( | |
| [{'symbol': symbol, 'layer1_score': 0, 'reasons_for_candidacy': ['re-analysis']}], | |
| temp_queue | |
| ) | |
| while True: | |
| try: | |
| batch = await asyncio.wait_for(temp_queue.get(), timeout=1.0) | |
| if batch is None: temp_queue.task_done(); break | |
| ohlcv_data_list.extend(batch) | |
| temp_queue.task_done() | |
| except asyncio.TimeoutError: | |
| if temp_queue.empty(): break | |
| except Exception: break | |
| if not ohlcv_data_list: print(f"⚠️ Failed to get re-analysis data for {symbol}"); return None | |
| ohlcv_data = ohlcv_data_list[0] | |
| re_analysis_whale_data = await data_manager_global.get_whale_data_for_symbol(symbol) | |
| ml_processor = MLProcessor(market_context, data_manager_global, learning_hub_global) | |
| print(f"🔄 [Re-Analyze] Using Advanced MC (Phase 2+3) for {symbol}...") | |
| advanced_mc_results = await ml_processor.monte_carlo_analyzer.generate_1h_distribution_advanced( | |
| ohlcv_data.get('ohlcv') | |
| ) | |
| processed_data = await ml_processor.process_and_score_symbol_enhanced(ohlcv_data, {symbol: re_analysis_whale_data} if re_analysis_whale_data else {}) | |
| if not processed_data: return None | |
| if advanced_mc_results: | |
| processed_data['monte_carlo_distribution'] = advanced_mc_results | |
| processed_data['monte_carlo_probability'] = advanced_mc_results.get('probability_of_gain', 0) | |
| processed_data['raw_ohlcv'] = ohlcv_data.get('raw_ohlcv') or ohlcv_data.get('ohlcv') | |
| processed_data['ohlcv'] = processed_data['raw_ohlcv'] | |
| processed_data['sentiment_data'] = market_context | |
| # 🔴 --- START OF CHANGE (V8.2) --- 🔴 | |
| # (جلب الأخبار ودرجة VADER الخام لتمريرها إلى Reflector لاحقاً) | |
| if news_fetcher_global and VADER_ANALYZER: | |
| try: | |
| news_text = await news_fetcher_global.get_news_for_symbol(symbol) | |
| processed_data['news_text'] = news_text | |
| vader_score = VADER_ANALYZER.polarity_scores(news_text) | |
| processed_data['news_score'] = vader_score.get('compound', 0.0) | |
| except Exception as e: | |
| print(f" ❌ [Re-Analyze News] {symbol} - Error: {e}") | |
| processed_data['news_text'] = "News analysis failed." | |
| processed_data['news_score'] = 0.0 | |
| else: | |
| processed_data['news_text'] = "News analysis disabled." | |
| processed_data['news_score'] = 0.0 | |
| # 🔴 --- END OF CHANGE --- 🔴 | |
| # (LLMService سيستخدم NewsFetcher الخاص به لجلب أحدث الأخبار لتحليلها) | |
| re_analysis_decision = await llm_service_global.re_analyze_trade_async(trade_data, processed_data) | |
| if re_analysis_decision: | |
| await r2_service_global.save_system_logs_async({ "trade_reanalyzed": True, "symbol": symbol, "action": re_analysis_decision.get('action'), 'strategy': re_analysis_decision.get('strategy', 'GENERIC') }) | |
| print(f"✅ [Re-Analyze] Strategic analysis complete for {symbol}. Decision: {re_analysis_decision.get('action')}") | |
| return {"symbol": symbol, "decision": re_analysis_decision, "current_price": processed_data.get('current_price')} | |
| else: return None | |
| except Exception as error: await r2_service_global.save_system_logs_async({ "reanalysis_error": True, "symbol": symbol, "error": str(error) }); print(f"❌ Error in re_analyze_open_trade_async for {symbol}: {error}"); traceback.print_exc(); return None | |
| async def run_bot_cycle_async(): | |
| # ... (لا تغيير في هذه الدالة) ... | |
| """ | |
| (محدث V5.9) - دورة البوت الرئيسية (المستكشف) | |
| """ | |
| try: | |
| if not await state_manager.wait_for_initialization(): | |
| print("❌ Services not fully initialized - skipping cycle"); return | |
| # 🔴 --- START OF CHANGE (V6.9) --- 🔴 | |
| # (إضافة: تأخير بسيط لضمان تحرير القفل من الدورة السابقة) | |
| await asyncio.sleep(1.0) | |
| # 🔴 --- END OF CHANGE --- 🔴 | |
| print("🔄 Starting Explorer cycle (Layer 1)..."); | |
| await r2_service_global.save_system_logs_async({"explorer_cycle_started": True}) | |
| if not r2_service_global.acquire_lock(): | |
| print("❌ Failed to acquire lock - skipping cycle (another cycle likely running)"); return | |
| open_trades = [] | |
| try: | |
| open_trades = await trade_manager_global.get_open_trades(); | |
| print(f"📋 Open trades: {len(open_trades)}") | |
| if open_trades: | |
| now = datetime.now() | |
| trades_to_reanalyze = [t for t in open_trades if now >= datetime.fromisoformat(t.get('expected_target_time', now.isoformat()))] | |
| if trades_to_reanalyze: | |
| print(f"🔄 (Explorer) Re-analyzing {len(trades_to_reanalyze)} trades strategically...") | |
| reanalysis_results = await asyncio.gather(*[re_analyze_open_trade_async(trade) for trade in trades_to_reanalyze], return_exceptions=True) | |
| for i, result in enumerate(reanalysis_results): | |
| trade = trades_to_reanalyze[i] | |
| if isinstance(result, Exception): | |
| print(f" ❌ Re-analysis failed for {trade.get('symbol')}: {result}") | |
| elif result and result['decision'].get('action') == "UPDATE_TRADE": | |
| print(f" ✅ (Explorer) Updating strategy for {trade.get('symbol')}."); | |
| await trade_manager_global.update_trade_strategy(trade, result['decision']) | |
| elif result and result['decision'].get('action') == "HOLD": | |
| print(f" ℹ️ (Explorer) Holding {trade.get('symbol')}. Resetting 15-min timer.") | |
| await trade_manager_global.update_trade_strategy(trade, result['decision']) | |
| elif result and result['decision'].get('action') == "CLOSE_TRADE": | |
| print(f" 🛑 (Explorer) LLM Re-analysis ordered CLOSE_TRADE for {trade.get('symbol')}. Executing...") | |
| await trade_manager_global.immediate_close_trade( | |
| trade.get('symbol'), | |
| result['current_price'], | |
| f"Strategic Exit: LLM Re-analysis ({result['decision'].get('reasoning', 'N/A')[:50]}...)" | |
| ) | |
| elif result: | |
| print(f" ℹ️ (Explorer) Re-analysis returned unhandled action '{result['decision'].get('action')}' for {trade.get('symbol')}.") | |
| else: | |
| print(f" ⚠️ Re-analysis for {trade.get('symbol')} yielded no decision.") | |
| current_open_trades_count = len(await trade_manager_global.get_open_trades()) | |
| should_look_for_new_trade = current_open_trades_count == 0 | |
| if should_look_for_new_trade: | |
| portfolio_state = await r2_service_global.get_portfolio_state_async(); | |
| current_capital = portfolio_state.get("current_capital_usd", 0) | |
| if current_capital > 1: | |
| print("🎯 (Explorer) Looking for new trading opportunities...") | |
| sentry_watchlist = await run_3_layer_analysis_explorer() | |
| if sentry_watchlist: | |
| print(f"✅ (Explorer) Found {len(sentry_watchlist)} candidates. Sending to Sentry (Layer 2)...") | |
| await trade_manager_global.update_sentry_watchlist(sentry_watchlist) | |
| else: | |
| print("❌ (Explorer) No suitable trading opportunities found for Sentry.") | |
| await trade_manager_global.update_sentry_watchlist([]) | |
| else: | |
| print("❌ Insufficient capital to open new trades") | |
| else: | |
| print("ℹ️ A trade is already open, skipping new trade search.") | |
| await trade_manager_global.update_sentry_watchlist([]) | |
| finally: | |
| if r2_service_global.lock_acquired: r2_service_global.release_lock() | |
| await r2_service_global.save_system_logs_async({ "explorer_cycle_completed": True, "open_trades": len(open_trades)}) | |
| print("✅ Explorer cycle complete") | |
| except Exception as error: | |
| print(f"❌ Unhandled error in main cycle: {error}"); traceback.print_exc() | |
| await r2_service_global.save_system_logs_async({ "cycle_error": True, "error": str(error) }); | |
| if r2_service_global and r2_service_global.lock_acquired: r2_service_global.release_lock() | |
| async def lifespan(application: FastAPI): | |
| # ... (لا تغيير في هذه الدالة) ... | |
| """Application lifecycle management""" | |
| print("🚀 Starting application initialization (Explorer/Sentry/Executor)...") | |
| try: | |
| success = await initialize_services() | |
| if not success: print("❌ Application initialization failed - shutting down..."); yield; return | |
| asyncio.create_task(monitor_market_async()) | |
| asyncio.create_task(trade_manager_global.start_sentry_and_monitoring_loops()) | |
| asyncio.create_task(run_periodic_distillation()) | |
| await r2_service_global.save_system_logs_async({"application_started": True}) | |
| print("🎯 Application ready - Explorer-Sentry-Executor Architecture is active") | |
| print(" -> 📈 Sentry (Layer 2) & Executor (Layer 3) are active") | |
| print(" -> 🧠 Periodic Distillation (Curator) is scheduled") | |
| yield | |
| except Exception as error: | |
| print(f"❌ Application startup failed: {error}"); | |
| traceback.print_exc() | |
| if r2_service_global: | |
| await r2_service_global.save_system_logs_async({ "application_startup_failed": True, "error": str(error) }) | |
| raise | |
| finally: | |
| await cleanup_on_shutdown() | |
| application = FastAPI(lifespan=lifespan, title="AI Trading Bot", description="Explorer-Sentry-Executor Architecture (V5.9)", version="5.9.0") | |
| # ... (لا تغيير في نقاط النهاية (Endpoints)) ... | |
| async def root(): return {"message": "Welcome to the AI Trading System", "system": "Explorer-Sentry-Executor", "status": "running" if state_manager.initialization_complete else "initializing", "timestamp": datetime.now().isoformat()} | |
| async def run_cycle_api(): | |
| if not state_manager.initialization_complete: raise HTTPException(status_code=503, detail="Services not fully initialized") | |
| asyncio.create_task(run_bot_cycle_async()) | |
| return {"message": "Explorer (Layer 1) cycle initiated", "system": "Explorer-Sentry-Executor"} | |
| async def health_check(): return {"status": "healthy" if state_manager.initialization_complete else "initializing", "initialization_complete": state_manager.initialization_complete, "services_initialized": state_manager.services_initialized, "initialization_error": state_manager.initialization_error, "timestamp": datetime.now().isoformat(), "system_architecture": "Explorer-Sentry-Executor (V5.9)"} | |
| async def analyze_market_api(): | |
| if not state_manager.initialization_complete: raise HTTPException(status_code=503, detail="Services not fully initialized") | |
| result = await run_3_layer_analysis_explorer() | |
| if result: return {"watchlist_generated": True, "count": len(result), "top_candidate": result[0]} | |
| else: return {"watchlist_generated": False, "message": "No suitable candidates found for Sentry"} | |
| async def get_portfolio_api(): | |
| if not state_manager.initialization_complete: raise HTTPException(status_code=503, detail="Services not fully initialized") | |
| try: portfolio_state = await r2_service_global.get_portfolio_state_async(); open_trades = await trade_manager_global.get_open_trades(); return {"portfolio": portfolio_state, "open_trades": open_trades, "timestamp": datetime.now().isoformat()} | |
| except Exception as e: raise HTTPException(status_code=500, detail=f"Error getting portfolio: {str(e)}") | |
| async def get_system_status(): | |
| monitoring_status = trade_manager_global.get_sentry_status() if trade_manager_global else {}; | |
| return {"initialization_complete": state_manager.initialization_complete, "services_initialized": state_manager.services_initialized, "initialization_error": state_manager.initialization_error, "market_state_ok": MARKET_STATE_OK, "sentry_status": monitoring_status, "timestamp": datetime.now().isoformat()} | |
| async def cleanup_on_shutdown(): | |
| # ... (لا تغيير في هذه الدالة) ... | |
| global r2_service_global, data_manager_global, trade_manager_global, learning_hub_global, symbol_whale_monitor_global | |
| print("🛑 Shutdown signal received. Cleaning up...") | |
| if trade_manager_global: | |
| await trade_manager_global.stop_sentry_loops() | |
| print("✅ Sentry/Executor loops stopped") | |
| if learning_hub_global and learning_hub_global.initialized: | |
| try: | |
| await learning_hub_global.shutdown() | |
| print("✅ Learning hub data saved") | |
| except Exception as e: print(f"❌ Failed to save learning hub data: {e}") | |
| if symbol_whale_monitor_global: | |
| try: | |
| await symbol_whale_monitor_global.cleanup() | |
| print("✅ Whale monitor cleanup complete.") | |
| except Exception as e: | |
| print(f"❌ Failed to cleanup whale monitor: {e}") | |
| if data_manager_global: await data_manager_global.close(); print("✅ Data manager closed") | |
| if r2_service_global: | |
| try: await r2_service_global.save_system_logs_async({"application_shutdown": True}); print("✅ Shutdown log saved") | |
| except Exception as e: print(f"❌ Failed to save shutdown log: {e}") | |
| if r2_service_global.lock_acquired: r2_service_global.release_lock(); print("✅ R2 lock released") | |
| def signal_handler(signum, frame): print(f"🛑 Received signal {signum}. Initiating shutdown..."); asyncio.create_task(cleanup_on_shutdown()); sys.exit(0) | |
| signal.signal(signal.SIGINT, signal_handler); signal.signal(signal.SIGTERM, signal_handler) | |
| if __name__ == "__main__": | |
| print("🚀 Starting AI Trading Bot (Explorer-Sentry-Executor V5.9)...") | |
| uvicorn.run( application, host="0.0.0.0", port=7860, log_level="info", access_log=True ) |