Phauglin commited on
Commit
2e3d76a
·
verified ·
1 Parent(s): 7e6769e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +32 -38
app.py CHANGED
@@ -1,13 +1,14 @@
1
- from fastai.vision.all import *
2
- import gradio as gr
3
  import fal_client
4
- from PIL import Image
5
  import io
 
 
6
  import random
7
  import requests
 
 
 
8
  from pathlib import Path
9
- import openai
10
- import os
11
 
12
  # Dictionary of plant names and their Wikipedia links
13
  search_terms_wikipedia = {
@@ -68,6 +69,12 @@ flowers_endangerment = {
68
  "goldfields coreopsis": "Varies by species; many not endangered."
69
  }
70
 
 
 
 
 
 
 
71
  # Templates for AI image generation
72
  prompt_templates = [
73
  "A dreamy watercolor scene of a {flower} on a misty morning trail, with golden sunbeams filtering through towering redwoods, and a curious hummingbird hovering nearby.",
@@ -84,42 +91,31 @@ example_images = [
84
  str(Path('example_images/example_3.jpg')),
85
  str(Path('example_images/example_4.jpg')),
86
  str(Path('example_images/example_5.jpg'))
87
-
88
  ]
89
 
 
90
  # Function to handle AI generation progress updates
91
  def on_queue_update(update):
92
  if isinstance(update, fal_client.InProgress):
93
  for log in update.logs:
94
- print(log["message"])
95
-
96
 
97
 
98
- def get_status(flower_name):
99
- """Return the endangerment status of a given flower name."""
100
- # Normalize input for dictionary lookup
101
- normalized_name = flower_name.title()
102
- return flowers_endangerment.get(normalized_name, "Flower not found in database.")
103
-
104
-
105
-
106
-
107
- # Main function to process the uploaded image
108
  # Main function to process the uploaded image
109
  def process_image(img):
110
  print("Starting prediction...")
111
  predicted_class, _, probs = learn.predict(img)
112
  print(f"Prediction complete: {predicted_class}")
113
-
114
  classification_results = dict(zip(learn.dls.vocab, map(float, probs)))
115
-
116
  # Get Wikipedia link
117
  wiki_url = search_terms_wikipedia.get(predicted_class, "No Wikipedia entry found.")
118
-
119
  # Get endangerment status
120
  endangerment_status = get_status(predicted_class)
121
  print(f"Status found: {endangerment_status}")
122
-
123
  # Generate artistic interpretation using DALL-E
124
  print("Sending request to DALL-E...")
125
  try:
@@ -130,19 +126,19 @@ def process_image(img):
130
  quality="standard",
131
  n=1,
132
  )
133
-
134
  # Get the image URL
135
  image_url = response.data[0].url
136
  print(f"Image URL: {image_url}")
137
-
138
  # Download the generated image
139
  response = requests.get(image_url)
140
  generated_image = Image.open(io.BytesIO(response.content))
141
-
142
  except Exception as e:
143
  print(f"Error generating image: {e}")
144
  generated_image = None
145
-
146
  print("Image retrieved and ready to return")
147
  return classification_results, generated_image, wiki_url, endangerment_status
148
 
@@ -165,9 +161,8 @@ with gr.Blocks() as demo:
165
  # Input section
166
  with gr.Row():
167
  input_image = gr.Image(height=230, width=230, label="Upload Image for Classification", type="pil")
168
-
169
  # Output section
170
- # Output section
171
  with gr.Row():
172
  with gr.Column():
173
  label_output = gr.Label(label="Classification Results")
@@ -177,25 +172,24 @@ with gr.Blocks() as demo:
177
 
178
  # Add example images using local paths
179
  gr.Examples(
180
- examples=example_images,
181
- inputs=input_image,
182
- examples_per_page=6,
183
- fn=process_image,
184
- outputs=[label_output, generated_image, wiki_output, status_output] # ← UPDATED
185
- )
186
 
187
  input_image.change(
188
  fn=process_image,
189
  inputs=input_image,
190
  outputs=[label_output, generated_image, wiki_output, status_output] # ← UPDATED
191
- )
192
 
193
  input_image.clear(
194
  fn=clear_outputs,
195
  inputs=[],
196
  outputs=[label_output, generated_image, wiki_output, status_output] # ← UPDATED
197
- )
198
-
199
 
200
  # Start the application
201
- demo.launch(inline=False)
 
 
 
1
  import fal_client
2
+ import gradio as gr
3
  import io
4
+ import openai
5
+ import os
6
  import random
7
  import requests
8
+ from PIL import Image
9
+ from dotenv import load_dotenv
10
+ from fastai.vision.all import *
11
  from pathlib import Path
 
 
12
 
13
  # Dictionary of plant names and their Wikipedia links
14
  search_terms_wikipedia = {
 
69
  "goldfields coreopsis": "Varies by species; many not endangered."
70
  }
71
 
72
+
73
+ def get_status(flower_name):
74
+ """Return the endangerment status of a given flower name."""
75
+ return flowers_endangerment.get(flower_name, "Flower not found in database.")
76
+
77
+
78
  # Templates for AI image generation
79
  prompt_templates = [
80
  "A dreamy watercolor scene of a {flower} on a misty morning trail, with golden sunbeams filtering through towering redwoods, and a curious hummingbird hovering nearby.",
 
91
  str(Path('example_images/example_3.jpg')),
92
  str(Path('example_images/example_4.jpg')),
93
  str(Path('example_images/example_5.jpg'))
 
94
  ]
95
 
96
+
97
  # Function to handle AI generation progress updates
98
  def on_queue_update(update):
99
  if isinstance(update, fal_client.InProgress):
100
  for log in update.logs:
101
+ print(log["message"])
 
102
 
103
 
 
 
 
 
 
 
 
 
 
 
104
  # Main function to process the uploaded image
105
  def process_image(img):
106
  print("Starting prediction...")
107
  predicted_class, _, probs = learn.predict(img)
108
  print(f"Prediction complete: {predicted_class}")
109
+
110
  classification_results = dict(zip(learn.dls.vocab, map(float, probs)))
111
+
112
  # Get Wikipedia link
113
  wiki_url = search_terms_wikipedia.get(predicted_class, "No Wikipedia entry found.")
114
+
115
  # Get endangerment status
116
  endangerment_status = get_status(predicted_class)
117
  print(f"Status found: {endangerment_status}")
118
+
119
  # Generate artistic interpretation using DALL-E
120
  print("Sending request to DALL-E...")
121
  try:
 
126
  quality="standard",
127
  n=1,
128
  )
129
+
130
  # Get the image URL
131
  image_url = response.data[0].url
132
  print(f"Image URL: {image_url}")
133
+
134
  # Download the generated image
135
  response = requests.get(image_url)
136
  generated_image = Image.open(io.BytesIO(response.content))
137
+
138
  except Exception as e:
139
  print(f"Error generating image: {e}")
140
  generated_image = None
141
+
142
  print("Image retrieved and ready to return")
143
  return classification_results, generated_image, wiki_url, endangerment_status
144
 
 
161
  # Input section
162
  with gr.Row():
163
  input_image = gr.Image(height=230, width=230, label="Upload Image for Classification", type="pil")
164
+
165
  # Output section
 
166
  with gr.Row():
167
  with gr.Column():
168
  label_output = gr.Label(label="Classification Results")
 
172
 
173
  # Add example images using local paths
174
  gr.Examples(
175
+ examples=example_images,
176
+ inputs=input_image,
177
+ examples_per_page=6,
178
+ fn=process_image,
179
+ outputs=[label_output, generated_image, wiki_output, status_output] # ← UPDATED
180
+ )
181
 
182
  input_image.change(
183
  fn=process_image,
184
  inputs=input_image,
185
  outputs=[label_output, generated_image, wiki_output, status_output] # ← UPDATED
186
+ )
187
 
188
  input_image.clear(
189
  fn=clear_outputs,
190
  inputs=[],
191
  outputs=[label_output, generated_image, wiki_output, status_output] # ← UPDATED
192
+ )
 
193
 
194
  # Start the application
195
+ demo.launch(inline=False)