Spaces:
Runtime error
Runtime error
Delete App.py
Browse files
App.py
DELETED
|
@@ -1,91 +0,0 @@
|
|
| 1 |
-
import numpy as np
|
| 2 |
-
import gradio as gr
|
| 3 |
-
import os
|
| 4 |
-
import PIL
|
| 5 |
-
import PIL.Image
|
| 6 |
-
import tensorflow as tf
|
| 7 |
-
import tensorflow_datasets as tfds
|
| 8 |
-
|
| 9 |
-
import pathlib
|
| 10 |
-
dataset_url = "https://storage.googleapis.com/download.tensorflow.org/example_images/flower_photos.tgz"
|
| 11 |
-
data_dir = tf.keras.utils.get_file(origin=dataset_url,
|
| 12 |
-
fname='flower_photos',
|
| 13 |
-
untar=True)
|
| 14 |
-
data_dir = pathlib.Path(data_dir)
|
| 15 |
-
batch_size = 32
|
| 16 |
-
img_height = 180
|
| 17 |
-
img_width = 180
|
| 18 |
-
|
| 19 |
-
train_ds = tf.keras.utils.image_dataset_from_directory(
|
| 20 |
-
data_dir,
|
| 21 |
-
validation_split=0.2,
|
| 22 |
-
subset="training",
|
| 23 |
-
seed=123,
|
| 24 |
-
image_size=(img_height, img_width),
|
| 25 |
-
batch_size=batch_size)
|
| 26 |
-
val_ds = tf.keras.utils.image_dataset_from_directory(
|
| 27 |
-
data_dir,
|
| 28 |
-
validation_split=0.2,
|
| 29 |
-
subset="validation",
|
| 30 |
-
seed=123,
|
| 31 |
-
image_size=(img_height, img_width),
|
| 32 |
-
batch_size=batch_size)
|
| 33 |
-
|
| 34 |
-
class_names = train_ds.class_names
|
| 35 |
-
#print(class_names)
|
| 36 |
-
|
| 37 |
-
normalization_layer = tf.keras.layers.Rescaling(1./255)
|
| 38 |
-
normalized_ds = train_ds.map(lambda x, y: (normalization_layer(x), y))
|
| 39 |
-
image_batch, labels_batch = next(iter(normalized_ds))
|
| 40 |
-
first_image = image_batch[0]
|
| 41 |
-
# Notice the pixel values are now in `[0,1]`.
|
| 42 |
-
#print(np.min(first_image), np.max(first_image))
|
| 43 |
-
|
| 44 |
-
AUTOTUNE = tf.data.AUTOTUNE
|
| 45 |
-
|
| 46 |
-
train_ds = train_ds.cache().prefetch(buffer_size=AUTOTUNE)
|
| 47 |
-
val_ds = val_ds.cache().prefetch(buffer_size=AUTOTUNE)
|
| 48 |
-
|
| 49 |
-
num_classes = 5
|
| 50 |
-
|
| 51 |
-
model = tf.keras.Sequential([
|
| 52 |
-
tf.keras.layers.Rescaling(1./255),
|
| 53 |
-
tf.keras.layers.Conv2D(32, 3, activation='relu'),
|
| 54 |
-
tf.keras.layers.MaxPooling2D(),
|
| 55 |
-
tf.keras.layers.Dropout(0.4),
|
| 56 |
-
tf.keras.layers.Conv2D(32, 3, activation='relu'),
|
| 57 |
-
tf.keras.layers.MaxPooling2D(),
|
| 58 |
-
tf.keras.layers.Dropout(0.4),
|
| 59 |
-
tf.keras.layers.Conv2D(32, 3, activation='relu'),
|
| 60 |
-
tf.keras.layers.MaxPooling2D(),
|
| 61 |
-
tf.keras.layers.Flatten(),
|
| 62 |
-
tf.keras.layers.Dense(256, activation='relu'),
|
| 63 |
-
tf.keras.layers.Dense(num_classes, activation="softmax")
|
| 64 |
-
])
|
| 65 |
-
model.compile(
|
| 66 |
-
optimizer='adam',
|
| 67 |
-
loss='SparseCategoricalCrossentropy',
|
| 68 |
-
metrics=['accuracy'])
|
| 69 |
-
|
| 70 |
-
model.fit(
|
| 71 |
-
train_ds,
|
| 72 |
-
validation_data=val_ds,
|
| 73 |
-
epochs=5
|
| 74 |
-
)
|
| 75 |
-
|
| 76 |
-
def predict_input_image(img):
|
| 77 |
-
img_4d=img.reshape(-1,180,180,3)
|
| 78 |
-
prediction=model.predict(img_4d)[0]
|
| 79 |
-
|
| 80 |
-
return {class_names[i]: float(prediction[i]*0.100) for i in range(5)}
|
| 81 |
-
|
| 82 |
-
|
| 83 |
-
|
| 84 |
-
|
| 85 |
-
|
| 86 |
-
|
| 87 |
-
image = gr.inputs.Image(shape=(180,180))
|
| 88 |
-
label =gr.outputs.Label(num_top_classes=5)
|
| 89 |
-
|
| 90 |
-
gr.Interface(fn=predict_input_image, inputs=image, outputs=label,title="Flowers Image classification").launch()
|
| 91 |
-
#pt
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|