Spaces:
Build error
Build error
Jon Solow
commited on
Commit
·
fe1c3a3
1
Parent(s):
e35ad6d
Remove current fastapi files
Browse files- src/config.py +0 -4
- src/handler.py +0 -39
- src/labels.txt +0 -101
- src/main.py +0 -44
- src/model.py +0 -62
- src/predict.py +0 -10
src/config.py
DELETED
|
@@ -1,4 +0,0 @@
|
|
| 1 |
-
import os
|
| 2 |
-
|
| 3 |
-
MODEL_HDF5_PATH = os.getenv("MODEL_HDF5_PATH")
|
| 4 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
src/handler.py
DELETED
|
@@ -1,39 +0,0 @@
|
|
| 1 |
-
from fastapi import UploadFile
|
| 2 |
-
from skimage.io import imread
|
| 3 |
-
from io import BytesIO
|
| 4 |
-
import numpy as np
|
| 5 |
-
import urllib
|
| 6 |
-
from tensorflow.keras.preprocessing.image import array_to_img, img_to_array
|
| 7 |
-
|
| 8 |
-
from PIL import Image
|
| 9 |
-
|
| 10 |
-
def preprocess(img: np.ndarray) -> np.ndarray:
|
| 11 |
-
img = array_to_img(img, scale=False)
|
| 12 |
-
img = img.resize((224, 224))
|
| 13 |
-
img = img_to_array(img)
|
| 14 |
-
return img / 255.0
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
def handle_url(url: str) -> np.ndarray:
|
| 18 |
-
try:
|
| 19 |
-
img_data = imread(url)
|
| 20 |
-
except Exception:
|
| 21 |
-
req = urllib.request.Request(url, headers={"User-Agent": "Magic Browser"})
|
| 22 |
-
con = urllib.request.urlopen(req)
|
| 23 |
-
img_data = imread(con)
|
| 24 |
-
processed_img = preprocess(img_data)
|
| 25 |
-
img_array = np.array([processed_img])
|
| 26 |
-
return img_array
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
def read_imagefile(file):
|
| 30 |
-
file_bytes = BytesIO(file)
|
| 31 |
-
image = Image.open(file_bytes)
|
| 32 |
-
return image
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
def handle_file(file: UploadFile) -> np.ndarray:
|
| 36 |
-
img_data = read_imagefile(file)
|
| 37 |
-
processed_img = preprocess(img_data)
|
| 38 |
-
img_array = np.array([processed_img])
|
| 39 |
-
return img_array
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
src/labels.txt
DELETED
|
@@ -1,101 +0,0 @@
|
|
| 1 |
-
Apple pie
|
| 2 |
-
Baby back ribs
|
| 3 |
-
Baklava
|
| 4 |
-
Beef carpaccio
|
| 5 |
-
Beef tartare
|
| 6 |
-
Beet salad
|
| 7 |
-
Beignets
|
| 8 |
-
Bibimbap
|
| 9 |
-
Bread pudding
|
| 10 |
-
Breakfast burrito
|
| 11 |
-
Bruschetta
|
| 12 |
-
Caesar salad
|
| 13 |
-
Cannoli
|
| 14 |
-
Caprese salad
|
| 15 |
-
Carrot cake
|
| 16 |
-
Ceviche
|
| 17 |
-
Cheesecake
|
| 18 |
-
Cheese plate
|
| 19 |
-
Chicken curry
|
| 20 |
-
Chicken quesadilla
|
| 21 |
-
Chicken wings
|
| 22 |
-
Chocolate cake
|
| 23 |
-
Chocolate mousse
|
| 24 |
-
Churros
|
| 25 |
-
Clam chowder
|
| 26 |
-
Club sandwich
|
| 27 |
-
Crab cakes
|
| 28 |
-
Creme brulee
|
| 29 |
-
Croque madame
|
| 30 |
-
Cup cakes
|
| 31 |
-
Deviled eggs
|
| 32 |
-
Donuts
|
| 33 |
-
Dumplings
|
| 34 |
-
Edamame
|
| 35 |
-
Eggs benedict
|
| 36 |
-
Escargots
|
| 37 |
-
Falafel
|
| 38 |
-
Filet mignon
|
| 39 |
-
Fish and chips
|
| 40 |
-
Foie gras
|
| 41 |
-
French fries
|
| 42 |
-
French onion soup
|
| 43 |
-
French toast
|
| 44 |
-
Fried calamari
|
| 45 |
-
Fried rice
|
| 46 |
-
Frozen yogurt
|
| 47 |
-
Garlic bread
|
| 48 |
-
Gnocchi
|
| 49 |
-
Greek salad
|
| 50 |
-
Grilled cheese sandwich
|
| 51 |
-
Grilled salmon
|
| 52 |
-
Guacamole
|
| 53 |
-
Gyoza
|
| 54 |
-
Hamburger
|
| 55 |
-
Hot and sour soup
|
| 56 |
-
Hot dog
|
| 57 |
-
Huevos rancheros
|
| 58 |
-
Hummus
|
| 59 |
-
Ice cream
|
| 60 |
-
Lasagna
|
| 61 |
-
Lobster bisque
|
| 62 |
-
Lobster roll sandwich
|
| 63 |
-
Macaroni and cheese
|
| 64 |
-
Macarons
|
| 65 |
-
Miso soup
|
| 66 |
-
Mussels
|
| 67 |
-
Nachos
|
| 68 |
-
Omelette
|
| 69 |
-
Onion rings
|
| 70 |
-
Oysters
|
| 71 |
-
Pad thai
|
| 72 |
-
Paella
|
| 73 |
-
Pancakes
|
| 74 |
-
Panna cotta
|
| 75 |
-
Peking duck
|
| 76 |
-
Pho
|
| 77 |
-
Pizza
|
| 78 |
-
Pork chop
|
| 79 |
-
Poutine
|
| 80 |
-
Prime rib
|
| 81 |
-
Pulled pork sandwich
|
| 82 |
-
Ramen
|
| 83 |
-
Ravioli
|
| 84 |
-
Red velvet cake
|
| 85 |
-
Risotto
|
| 86 |
-
Samosa
|
| 87 |
-
Sashimi
|
| 88 |
-
Scallops
|
| 89 |
-
Seaweed salad
|
| 90 |
-
Shrimp and grits
|
| 91 |
-
Spaghetti bolognese
|
| 92 |
-
Spaghetti carbonara
|
| 93 |
-
Spring rolls
|
| 94 |
-
Steak
|
| 95 |
-
Strawberry shortcake
|
| 96 |
-
Sushi
|
| 97 |
-
Tacos
|
| 98 |
-
Takoyaki
|
| 99 |
-
Tiramisu
|
| 100 |
-
Tuna tartare
|
| 101 |
-
Waffles
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
src/main.py
DELETED
|
@@ -1,44 +0,0 @@
|
|
| 1 |
-
from fastapi import FastAPI, UploadFile, File, HTTPException, status, Header
|
| 2 |
-
from fastapi.responses import RedirectResponse
|
| 3 |
-
|
| 4 |
-
from handler import handle_file, handle_url
|
| 5 |
-
from predict import predict_model
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
app = FastAPI()
|
| 9 |
-
|
| 10 |
-
@app.get("/")
|
| 11 |
-
async def docs_redirect():
|
| 12 |
-
return RedirectResponse(url='/docs')
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
@app.get("/predict_url")
|
| 16 |
-
async def predict_url(url: str):
|
| 17 |
-
model_input = handle_url(url)
|
| 18 |
-
model_output = predict_model(model_input)
|
| 19 |
-
return model_output
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
@app.get("/healthcheck")
|
| 23 |
-
async def healthcheck():
|
| 24 |
-
return {"status": "alive"}
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
def validate_image_content(content_type: str = Header(...)):
|
| 28 |
-
"""Require request MIME-type to be image/*"""
|
| 29 |
-
|
| 30 |
-
content_main_type = content_type.split("/")[0]
|
| 31 |
-
if content_main_type != "image":
|
| 32 |
-
raise HTTPException(
|
| 33 |
-
status.HTTP_415_UNSUPPORTED_MEDIA_TYPE,
|
| 34 |
-
f"Unsupported media type: {content_type}."
|
| 35 |
-
" It must be image/",
|
| 36 |
-
)
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
@app.post("/predict_file")
|
| 40 |
-
async def predict_file(upload_file: UploadFile = File(...)):
|
| 41 |
-
validate_image_content(upload_file.content_type)
|
| 42 |
-
model_input = handle_file(await upload_file.read())
|
| 43 |
-
model_output = predict_model(model_input)
|
| 44 |
-
return model_output
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
src/model.py
DELETED
|
@@ -1,62 +0,0 @@
|
|
| 1 |
-
import numpy as np
|
| 2 |
-
from tensorflow.keras.utils import get_file
|
| 3 |
-
from config import MODEL_HDF5_PATH
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
with open("labels.txt", "r") as f:
|
| 7 |
-
LABELS = list(filter(None, f.read().split("\n")))
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
def initialize_model():
|
| 12 |
-
# import the necessary packages
|
| 13 |
-
from tensorflow.keras.models import Sequential
|
| 14 |
-
from tensorflow.keras.layers import BatchNormalization
|
| 15 |
-
from tensorflow.keras.layers import Conv2D, MaxPooling2D
|
| 16 |
-
from tensorflow.keras.layers import Flatten, Dropout, Dense
|
| 17 |
-
|
| 18 |
-
# CONV => RELU => POOL
|
| 19 |
-
cnn = Sequential()
|
| 20 |
-
inputShape = (224, 224, 3)
|
| 21 |
-
chanDim = -1
|
| 22 |
-
classes = 101
|
| 23 |
-
# Sequence of Convolution (scan filters), BatchNormalization (normalize numbers),
|
| 24 |
-
# MaxPooling (shrink tensor down), Dropout (prevent overfit)
|
| 25 |
-
cnn.add(
|
| 26 |
-
Conv2D(32, (3, 3), padding="same", input_shape=inputShape, activation="relu")
|
| 27 |
-
)
|
| 28 |
-
cnn.add(BatchNormalization(axis=chanDim))
|
| 29 |
-
cnn.add(MaxPooling2D(pool_size=(3, 3)))
|
| 30 |
-
cnn.add(Dropout(rate=0.25))
|
| 31 |
-
cnn.add(Conv2D(64, (3, 3), padding="same", activation="relu"))
|
| 32 |
-
cnn.add(BatchNormalization(axis=chanDim))
|
| 33 |
-
cnn.add(Conv2D(64, (3, 3), padding="same", activation="relu"))
|
| 34 |
-
cnn.add(BatchNormalization(axis=chanDim))
|
| 35 |
-
cnn.add(MaxPooling2D(pool_size=(2, 2)))
|
| 36 |
-
cnn.add(Dropout(rate=0.25))
|
| 37 |
-
cnn.add(Conv2D(128, (3, 3), padding="same", activation="relu"))
|
| 38 |
-
cnn.add(BatchNormalization(axis=chanDim))
|
| 39 |
-
cnn.add(Conv2D(128, (3, 3), padding="same", activation="relu"))
|
| 40 |
-
cnn.add(BatchNormalization(axis=chanDim))
|
| 41 |
-
cnn.add(MaxPooling2D(pool_size=(2, 2)))
|
| 42 |
-
cnn.add(Dropout(rate=0.25))
|
| 43 |
-
cnn.add(Flatten())
|
| 44 |
-
cnn.add(Dense(1024, activation="relu"))
|
| 45 |
-
cnn.add(BatchNormalization())
|
| 46 |
-
cnn.add(Dropout(rate=0.5))
|
| 47 |
-
# softmax classifier
|
| 48 |
-
cnn.add(Dense(classes, activation="softmax"))
|
| 49 |
-
|
| 50 |
-
return cnn
|
| 51 |
-
|
| 52 |
-
|
| 53 |
-
CNN = initialize_model()
|
| 54 |
-
|
| 55 |
-
|
| 56 |
-
CNN.load_weights(
|
| 57 |
-
get_file(
|
| 58 |
-
"weights.hdf5",
|
| 59 |
-
MODEL_HDF5_PATH,
|
| 60 |
-
cache_dir="."
|
| 61 |
-
)
|
| 62 |
-
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
src/predict.py
DELETED
|
@@ -1,10 +0,0 @@
|
|
| 1 |
-
from model import CNN, LABELS
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
def predict_model(img_array):
|
| 6 |
-
class_prob = CNN.predict(img_array)
|
| 7 |
-
top_values_index = (-class_prob).argsort()[0][:10]
|
| 8 |
-
top_guesses = [LABELS[i].title() for i in top_values_index]
|
| 9 |
-
|
| 10 |
-
return top_guesses
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|