Spaces:
Sleeping
Sleeping
Update src/ai_processor.py
Browse files- src/ai_processor.py +23 -1
src/ai_processor.py
CHANGED
|
@@ -213,7 +213,10 @@ def load_yolo_model():
|
|
| 213 |
return model
|
| 214 |
def load_segmentation_model():
|
| 215 |
load_model = _import_tf_loader()
|
| 216 |
-
|
|
|
|
|
|
|
|
|
|
| 217 |
|
| 218 |
def load_classification_pipeline():
|
| 219 |
pipe = _import_hf_cls()
|
|
@@ -542,3 +545,22 @@ class AIProcessor:
|
|
| 542 |
except Exception as e:
|
| 543 |
logging.error(f"Pipeline error: {e}", exc_info=True)
|
| 544 |
return f"❌ Error: {e}"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 213 |
return model
|
| 214 |
def load_segmentation_model():
|
| 215 |
load_model = _import_tf_loader()
|
| 216 |
+
# Need to import tf.keras.layers within the no_cuda_env for custom_objects
|
| 217 |
+
with _no_cuda_env():
|
| 218 |
+
import tensorflow as tf
|
| 219 |
+
return load_model(SEG_MODEL_PATH, compile=False, custom_objects={'InputLayer': tf.keras.layers.InputLayer})
|
| 220 |
|
| 221 |
def load_classification_pipeline():
|
| 222 |
pipe = _import_hf_cls()
|
|
|
|
| 545 |
except Exception as e:
|
| 546 |
logging.error(f"Pipeline error: {e}", exc_info=True)
|
| 547 |
return f"❌ Error: {e}"
|
| 548 |
+
|
| 549 |
+
# Global processor instance
|
| 550 |
+
processor = None
|
| 551 |
+
|
| 552 |
+
def get_processor():
|
| 553 |
+
"""Get or create the global processor instance"""
|
| 554 |
+
global processor
|
| 555 |
+
if processor is None:
|
| 556 |
+
processor = AIProcessor()
|
| 557 |
+
return processor
|
| 558 |
+
|
| 559 |
+
# Convenience function for external use
|
| 560 |
+
@_SPACES_GPU(enable_queue=True, duration=180)
|
| 561 |
+
def analyze_wound(image: Image.Image, questionnaire_data: Dict[str, Any]) -> str:
|
| 562 |
+
"""
|
| 563 |
+
Main entry point for wound analysis
|
| 564 |
+
"""
|
| 565 |
+
proc = get_processor()
|
| 566 |
+
return proc.full_analysis_pipeline(image, questionnaire_data)
|