Update app.py
Browse files
app.py
CHANGED
|
@@ -15,7 +15,7 @@ from langchain.tools import StructuredTool
|
|
| 15 |
IMG_HEIGHT = 256
|
| 16 |
IMG_WIDTH = 256
|
| 17 |
|
| 18 |
-
|
| 19 |
model_path = "unet_model.h5"
|
| 20 |
if not os.path.exists(model_path):
|
| 21 |
hf_url = "https://huggingface.co/rishirajbal/UNET_plus_plus_Brain_segmentation/resolve/main/unet_model.h5"
|
|
@@ -30,7 +30,7 @@ print("Loading model...")
|
|
| 30 |
model = tf.keras.models.load_model(model_path, compile=False)
|
| 31 |
|
| 32 |
|
| 33 |
-
|
| 34 |
def classify_image_and_stats(image_input):
|
| 35 |
img = tf.image.resize(image_input, [IMG_HEIGHT, IMG_WIDTH])
|
| 36 |
img_norm = img / 255.0
|
|
@@ -46,7 +46,7 @@ def classify_image_and_stats(image_input):
|
|
| 46 |
total_area = IMG_HEIGHT * IMG_WIDTH
|
| 47 |
tumor_ratio = tumor_area / total_area
|
| 48 |
|
| 49 |
-
tumor_label = "Tumor Detected" if tumor_ratio > 0.
|
| 50 |
|
| 51 |
overlay = np.array(img)
|
| 52 |
red_mask = np.zeros_like(overlay)
|
|
@@ -64,7 +64,7 @@ def classify_image_and_stats(image_input):
|
|
| 64 |
return overlay_img, stats
|
| 65 |
|
| 66 |
|
| 67 |
-
|
| 68 |
def rishigpt_handler(image_input, groq_api_key):
|
| 69 |
os.environ["GROQ_API_KEY"] = groq_api_key
|
| 70 |
|
|
@@ -110,14 +110,14 @@ def rishigpt_handler(image_input, groq_api_key):
|
|
| 110 |
chain = prompt | llm
|
| 111 |
final_text = chain.invoke({"result": classification}).content.strip()
|
| 112 |
|
| 113 |
-
|
| 114 |
displayed_text = ""
|
| 115 |
for char in final_text:
|
| 116 |
displayed_text += char
|
| 117 |
time.sleep(0.015)
|
| 118 |
yield overlay_img, displayed_text
|
| 119 |
|
| 120 |
-
|
| 121 |
inputs = [
|
| 122 |
gr.Image(type="numpy", label="Upload Brain MRI Slice"),
|
| 123 |
gr.Textbox(type="password", label="Groq API Key")
|
|
|
|
| 15 |
IMG_HEIGHT = 256
|
| 16 |
IMG_WIDTH = 256
|
| 17 |
|
| 18 |
+
|
| 19 |
model_path = "unet_model.h5"
|
| 20 |
if not os.path.exists(model_path):
|
| 21 |
hf_url = "https://huggingface.co/rishirajbal/UNET_plus_plus_Brain_segmentation/resolve/main/unet_model.h5"
|
|
|
|
| 30 |
model = tf.keras.models.load_model(model_path, compile=False)
|
| 31 |
|
| 32 |
|
| 33 |
+
|
| 34 |
def classify_image_and_stats(image_input):
|
| 35 |
img = tf.image.resize(image_input, [IMG_HEIGHT, IMG_WIDTH])
|
| 36 |
img_norm = img / 255.0
|
|
|
|
| 46 |
total_area = IMG_HEIGHT * IMG_WIDTH
|
| 47 |
tumor_ratio = tumor_area / total_area
|
| 48 |
|
| 49 |
+
tumor_label = "Tumor Detected" if tumor_ratio > 0.00385 else "No Tumor Detected"
|
| 50 |
|
| 51 |
overlay = np.array(img)
|
| 52 |
red_mask = np.zeros_like(overlay)
|
|
|
|
| 64 |
return overlay_img, stats
|
| 65 |
|
| 66 |
|
| 67 |
+
|
| 68 |
def rishigpt_handler(image_input, groq_api_key):
|
| 69 |
os.environ["GROQ_API_KEY"] = groq_api_key
|
| 70 |
|
|
|
|
| 110 |
chain = prompt | llm
|
| 111 |
final_text = chain.invoke({"result": classification}).content.strip()
|
| 112 |
|
| 113 |
+
|
| 114 |
displayed_text = ""
|
| 115 |
for char in final_text:
|
| 116 |
displayed_text += char
|
| 117 |
time.sleep(0.015)
|
| 118 |
yield overlay_img, displayed_text
|
| 119 |
|
| 120 |
+
|
| 121 |
inputs = [
|
| 122 |
gr.Image(type="numpy", label="Upload Brain MRI Slice"),
|
| 123 |
gr.Textbox(type="password", label="Groq API Key")
|