Riy777 commited on
Commit
0158ed7
·
1 Parent(s): ef93022

Update LLM.py

Browse files
Files changed (1) hide show
  1. LLM.py +27 -7
LLM.py CHANGED
@@ -475,6 +475,7 @@ OUTPUT FORMAT (JSON - SPOT RE-ANALYSIS):
475
  pattern = pattern_analysis.get('pattern_detected', 'N/A'); confidence = pattern_analysis.get('pattern_confidence', 0); direction = pattern_analysis.get('predicted_direction', 'N/A'); timeframe = pattern_analysis.get('timeframe', 'N/A'); tf_display = f"on {timeframe} timeframe" if timeframe != 'N/A' else ""
476
  return f"System Pattern Analysis: Detected '{pattern}' {tf_display} with {confidence:.2f} confidence. Predicted direction: {direction}."
477
 
 
478
  @_rate_limit_nvidia_api
479
  async def _call_llm(self, prompt: str) -> str:
480
  try:
@@ -487,23 +488,42 @@ OUTPUT FORMAT (JSON - SPOT RE-ANALYSIS):
487
  seed=int(time.time()),
488
  max_tokens=4000
489
  )
490
- content = response.choices[0].message.content
491
- if content and '{' in content and '}' in content:
492
- return content
 
 
 
 
 
 
 
 
 
493
  else:
494
- print(f"⚠️ LLM returned invalid content (attempt {attempt+1}): {content[:100]}...")
495
- if attempt == 0: await asyncio.sleep(1)
 
 
 
 
 
 
496
  except (RateLimitError, APITimeoutError) as e:
497
  print(f"❌ LLM API Error (Rate Limit/Timeout): {e}. Retrying via backoff...")
498
  raise
499
  except Exception as e:
 
500
  print(f"❌ Unexpected LLM API error (attempt {attempt+1}): {e}")
501
  if attempt == 0: await asyncio.sleep(2)
502
  elif attempt == 1: raise
 
503
  print("❌ LLM failed to return valid content after retries.")
504
- return ""
 
505
  except Exception as e:
506
  print(f"❌ Final failure in _call_llm after backoff retries: {e}")
507
- raise
 
508
 
509
  print("✅ LLM Service loaded - V5.1 (Explorer/Sentry Mode - Outputs 'WATCH')")
 
475
  pattern = pattern_analysis.get('pattern_detected', 'N/A'); confidence = pattern_analysis.get('pattern_confidence', 0); direction = pattern_analysis.get('predicted_direction', 'N/A'); timeframe = pattern_analysis.get('timeframe', 'N/A'); tf_display = f"on {timeframe} timeframe" if timeframe != 'N/A' else ""
476
  return f"System Pattern Analysis: Detected '{pattern}' {tf_display} with {confidence:.2f} confidence. Predicted direction: {direction}."
477
 
478
+ # 🔴 --- START OF CHANGE (FIXED NoneType CRASH) --- 🔴
479
  @_rate_limit_nvidia_api
480
  async def _call_llm(self, prompt: str) -> str:
481
  try:
 
488
  seed=int(time.time()),
489
  max_tokens=4000
490
  )
491
+
492
+ # التحقق أولاً إذا كانت الاستجابة تحتوي على محتوى
493
+ if response.choices and response.choices[0].message and response.choices[0].message.content:
494
+ content = response.choices[0].message.content
495
+
496
+ # التحقق من أن المحتوى هو JSON صالح (أو على الأقل يبدو كذلك)
497
+ if '{' in content and '}' in content:
498
+ return content
499
+ else:
500
+ # المحتوى موجود ولكنه ليس JSON
501
+ print(f"⚠️ LLM returned invalid content (not JSON) (attempt {attempt+1}): {content[:100]}...")
502
+
503
  else:
504
+ # هذا هو الإصلاح الرئيسي: المحتوى هو None
505
+ # (قد يكون بسبب مرشحات المحتوى من NVIDIA أو خطأ في الخادم)
506
+ content = None # نضبطه صراحة إلى None لنتجنب خطأ الـ "unbound"
507
+ print(f"⚠️ LLM returned NO content (None) (attempt {attempt+1}). Check content filters or API status.")
508
+
509
+ # الانتظار قبل إعادة المحاولة
510
+ if attempt == 0: await asyncio.sleep(1)
511
+
512
  except (RateLimitError, APITimeoutError) as e:
513
  print(f"❌ LLM API Error (Rate Limit/Timeout): {e}. Retrying via backoff...")
514
  raise
515
  except Exception as e:
516
+ # (سيتم الآن تجنب خطأ 'NoneType' object is not subscriptable هنا)
517
  print(f"❌ Unexpected LLM API error (attempt {attempt+1}): {e}")
518
  if attempt == 0: await asyncio.sleep(2)
519
  elif attempt == 1: raise
520
+
521
  print("❌ LLM failed to return valid content after retries.")
522
+ return "" # إرجاع سلسلة فارغة آمنة
523
+
524
  except Exception as e:
525
  print(f"❌ Final failure in _call_llm after backoff retries: {e}")
526
+ raise # رفع الخطأ ليتم تسجيله في المستوى الأعلى
527
+ # 🔴 --- END OF CHANGE --- 🔴
528
 
529
  print("✅ LLM Service loaded - V5.1 (Explorer/Sentry Mode - Outputs 'WATCH')")