Spaces:
Runtime error
Runtime error
ferdmartin
commited on
Commit
•
9c300d3
1
Parent(s):
63a9462
init
Browse files- .gitattributes +3 -0
- App/Dockerfile +10 -0
- App/GDownload.py +30 -0
- App/__pycache__/GDownload.cpython-39.pyc +0 -0
- App/app.py +96 -0
- App/classes_dict.json +120 -0
- App/dog_icon.png +0 -0
- App/requirements.txt +5 -0
- pechpoints_weights/checkpoint +2 -0
- pechpoints_weights/checkpoint.ckpt.data-00000-of-00001 +3 -0
- pechpoints_weights/checkpoint.ckpt.index +0 -0
- requirements.txt +3 -0
- saved_models/FerNetEfficientNetB2/keras_metadata.pb +3 -0
- saved_models/FerNetEfficientNetB2/saved_model.pb +3 -0
- saved_models/FerNetEfficientNetB2/variables/variables.data-00000-of-00001 +3 -0
- saved_models/FerNetEfficientNetB2/variables/variables.index +0 -0
- saved_models/TsinghuaFerNet_EfficientNetB2/keras_metadata.pb +3 -0
- saved_models/TsinghuaFerNet_EfficientNetB2/saved_model.pb +3 -0
- saved_models/TsinghuaFerNet_EfficientNetB2/variables/variables.data-00000-of-00001 +3 -0
- saved_models/TsinghuaFerNet_EfficientNetB2/variables/variables.index +0 -0
.gitattributes
CHANGED
@@ -32,3 +32,6 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
32 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
33 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
34 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
32 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
33 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
34 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
35 |
+
pechpoints_weights/checkpoint.ckpt.data-00000-of-00001 filter=lfs diff=lfs merge=lfs -text
|
36 |
+
saved_models/FerNetEfficientNetB2/variables/variables.data-00000-of-00001 filter=lfs diff=lfs merge=lfs -text
|
37 |
+
saved_models/TsinghuaFerNet_EfficientNetB2/variables/variables.data-00000-of-00001 filter=lfs diff=lfs merge=lfs -text
|
App/Dockerfile
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM python:3
|
2 |
+
RUN pip install flask
|
3 |
+
RUN pip install requests
|
4 |
+
RUN pip install gcloud
|
5 |
+
RUN pip install --upgrade google-cloud-translate
|
6 |
+
EXPOSE 5003/tcp
|
7 |
+
COPY app.py .
|
8 |
+
COPY fmartinezlopezGTranslateKey.json .
|
9 |
+
COPY templates/index.html templates/
|
10 |
+
ENTRYPOINT [ "python3", "app.py" ]
|
App/GDownload.py
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import requests
|
2 |
+
|
3 |
+
def download_file_from_google_drive(id, destination):
|
4 |
+
URL = "https://docs.google.com/uc?export=download"
|
5 |
+
|
6 |
+
session = requests.Session()
|
7 |
+
|
8 |
+
response = session.get(URL, params = { 'id' : id }, stream = True)
|
9 |
+
token = get_confirm_token(response)
|
10 |
+
|
11 |
+
if token:
|
12 |
+
params = { 'id' : id, 'confirm' : token }
|
13 |
+
response = session.get(URL, params = params, stream = True)
|
14 |
+
|
15 |
+
save_response_content(response, destination)
|
16 |
+
|
17 |
+
def get_confirm_token(response):
|
18 |
+
for key, value in response.cookies.items():
|
19 |
+
if key.startswith('download_warning'):
|
20 |
+
return value
|
21 |
+
|
22 |
+
return None
|
23 |
+
|
24 |
+
def save_response_content(response, destination):
|
25 |
+
CHUNK_SIZE = 32768
|
26 |
+
|
27 |
+
with open(destination, "wb") as f:
|
28 |
+
for chunk in response.iter_content(CHUNK_SIZE):
|
29 |
+
if chunk: # filter out keep-alive new chunks
|
30 |
+
f.write(chunk)
|
App/__pycache__/GDownload.cpython-39.pyc
ADDED
Binary file (1.04 kB). View file
|
|
App/app.py
ADDED
@@ -0,0 +1,96 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!streamlit/bin/python
|
2 |
+
import streamlit as st
|
3 |
+
from pathlib import Path
|
4 |
+
import pandas as pd
|
5 |
+
import numpy as np
|
6 |
+
import tensorflow as tf
|
7 |
+
from PIL import Image
|
8 |
+
from io import BytesIO
|
9 |
+
import json
|
10 |
+
#from GDownload import download_file_from_google_drive
|
11 |
+
|
12 |
+
@st.cache(allow_output_mutation=True)
|
13 |
+
def load_model():
|
14 |
+
# if selected_model == 'PVAN-Stanford':
|
15 |
+
# model_location = '1-q1R5dLfIFW7BbzKuYTjolAoqpjVClsb'
|
16 |
+
# save_dest = Path('saved_model')
|
17 |
+
# save_dest.mkdir(exist_ok=True)
|
18 |
+
# saved_model = Path("saved_model/FerNet_EfficientNet.h5")
|
19 |
+
|
20 |
+
# elif selected_model == 'PVAN-Tsinghua':
|
21 |
+
# model_location = '1-q1R5dLfIFW7BbzKuYTjolAoqpjVClsb'
|
22 |
+
# save_dest = Path('saved_model')
|
23 |
+
# save_dest.mkdir(exist_ok=True)
|
24 |
+
# saved_model = Path("saved_model/FerNet_EfficientNet.h5")
|
25 |
+
|
26 |
+
# if not saved_model.exists():
|
27 |
+
# download_file_from_google_drive(model_location, saved_model)
|
28 |
+
saved_model = str(Path().parent.absolute())+"/saved_models/FerNetEfficientNetB2"
|
29 |
+
saved_model = tf.keras.models.load_model(saved_model)
|
30 |
+
return saved_model
|
31 |
+
|
32 |
+
@st.cache
|
33 |
+
def load_classes():
|
34 |
+
with open(str(Path().parent.absolute())+'/App/classes_dict.json') as classes:
|
35 |
+
class_names = json.load(classes)
|
36 |
+
return class_names
|
37 |
+
|
38 |
+
def load_and_prep_image(filename, img_shape=260):
|
39 |
+
#img = tf.io.read_file(filename)
|
40 |
+
img = np.array(filename)#tf.io.decode_image(filename, channels=3)
|
41 |
+
# Resize our image
|
42 |
+
img = tf.image.resize(img, [img_shape,img_shape])
|
43 |
+
# Scale
|
44 |
+
return img # don't need to resclae images for EfficientNet models in Tensorflow
|
45 |
+
|
46 |
+
if __name__ == '__main__':
|
47 |
+
|
48 |
+
hide_st_style = """
|
49 |
+
<style>
|
50 |
+
footer {visibility: hidden;}
|
51 |
+
header {visibility: hidden;}
|
52 |
+
</style>
|
53 |
+
"""
|
54 |
+
st.markdown(hide_st_style, unsafe_allow_html=True)
|
55 |
+
|
56 |
+
st.title("Dog Breeds Detector")
|
57 |
+
|
58 |
+
options = ['PVAN-Stanford', 'PVAN-Tsinghua']
|
59 |
+
selected_model = st.selectbox('Select a model to use (Default: PVAN-Stanford):', options)
|
60 |
+
|
61 |
+
saved_model = load_model()
|
62 |
+
class_names = load_classes()
|
63 |
+
|
64 |
+
st.write("Choose any dog image and get the corresponding breed:")
|
65 |
+
|
66 |
+
uploaded_image = st.file_uploader("Choose an image...")
|
67 |
+
|
68 |
+
if uploaded_image:
|
69 |
+
uploaded_image = Image.open(uploaded_image)
|
70 |
+
# try:
|
71 |
+
uploaded_image = uploaded_image.convert("RGB")
|
72 |
+
membuf = BytesIO()
|
73 |
+
uploaded_image.save(membuf, format="jpeg")
|
74 |
+
uploaded_image = Image.open(membuf)
|
75 |
+
# finally:
|
76 |
+
|
77 |
+
|
78 |
+
image_for_the_model = load_and_prep_image(uploaded_image)
|
79 |
+
prediction = saved_model.predict(tf.expand_dims(image_for_the_model, axis=0), verbose=0)
|
80 |
+
|
81 |
+
top_k_proba, top_k_indices = tf.nn.top_k(prediction,k=5)
|
82 |
+
top_5_classes = {top_n+1:class_names[str(top_k)] for top_n, top_k in enumerate(list(tf.squeeze(top_k_indices).numpy()))}
|
83 |
+
top_k_proba = tf.squeeze(top_k_proba).numpy()
|
84 |
+
top_5_classes = pd.DataFrame({"Top-k":top_5_classes.keys(), "Dog Breed": top_5_classes.values(), "Probability": top_k_proba})
|
85 |
+
#top_5_classes.set_index("Top-k", inplace=True)
|
86 |
+
|
87 |
+
print(tf.argmax(prediction, axis=1).numpy())
|
88 |
+
predicted_breed = class_names[str(tf.argmax(prediction, axis=1).numpy()[0])]
|
89 |
+
predicted_breed = ' '.join(predicted_breed.split('_'))
|
90 |
+
predicted_breed = predicted_breed.title()
|
91 |
+
st.header(f'This dog looks like a {predicted_breed}')
|
92 |
+
|
93 |
+
col1, col2 = st.columns([1,2])
|
94 |
+
|
95 |
+
col1.image(uploaded_image,use_column_width=True)
|
96 |
+
col2.bar_chart(top_5_classes, x="Dog Breed", y="Probability")
|
App/classes_dict.json
ADDED
@@ -0,0 +1,120 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{"0": "Chihuahua",
|
2 |
+
"1": "Japanese Spaniel",
|
3 |
+
"2": "Maltese Dog",
|
4 |
+
"3": "Pekinese",
|
5 |
+
"4": "Shih Tzu",
|
6 |
+
"5": "Blenheim Spaniel",
|
7 |
+
"6": "Papillon",
|
8 |
+
"7": "Toy Terrier",
|
9 |
+
"8": "Rhodesian Ridgeback",
|
10 |
+
"9": "Afghan Hound",
|
11 |
+
"10": "Basset",
|
12 |
+
"11": "Beagle",
|
13 |
+
"12": "Bloodhound",
|
14 |
+
"13": "Bluetick",
|
15 |
+
"14": "Black And Tan Coonhound",
|
16 |
+
"15": "Walker Hound",
|
17 |
+
"16": "English Foxhound",
|
18 |
+
"17": "Redbone",
|
19 |
+
"18": "Borzoi",
|
20 |
+
"19": "Irish Wolfhound",
|
21 |
+
"20": "Italian Greyhound",
|
22 |
+
"21": "Whippet",
|
23 |
+
"22": "Ibizan Hound",
|
24 |
+
"23": "Norwegian Elkhound",
|
25 |
+
"24": "Otterhound",
|
26 |
+
"25": "Saluki",
|
27 |
+
"26": "Scottish Deerhound",
|
28 |
+
"27": "Weimaraner",
|
29 |
+
"28": "Staffordshire Bullterrier",
|
30 |
+
"29": "American Staffordshire Terrier",
|
31 |
+
"30": "Bedlington Terrier",
|
32 |
+
"31": "Border Terrier",
|
33 |
+
"32": "Kerry Blue Terrier",
|
34 |
+
"33": "Irish Terrier",
|
35 |
+
"34": "Norfolk Terrier",
|
36 |
+
"35": "Norwich Terrier",
|
37 |
+
"36": "Yorkshire Terrier",
|
38 |
+
"37": "Wire Haired Fox Terrier",
|
39 |
+
"38": "Lakeland Terrier",
|
40 |
+
"39": "Sealyham Terrier",
|
41 |
+
"40": "Airedale",
|
42 |
+
"41": "Cairn",
|
43 |
+
"42": "Australian Terrier",
|
44 |
+
"43": "Dandie Dinmont",
|
45 |
+
"44": "Boston Bull",
|
46 |
+
"45": "Miniature Schnauzer",
|
47 |
+
"46": "Giant Schnauzer",
|
48 |
+
"47": "Standard Schnauzer",
|
49 |
+
"48": "Scotch Terrier",
|
50 |
+
"49": "Tibetan Terrier",
|
51 |
+
"50": "Silky Terrier",
|
52 |
+
"51": "Soft Coated Wheaten Terrier",
|
53 |
+
"52": "West Highland White Terrier",
|
54 |
+
"53": "Lhasa",
|
55 |
+
"54": "Flat Coated Retriever",
|
56 |
+
"55": "Curly Coated Retriever",
|
57 |
+
"56": "Golden Retriever",
|
58 |
+
"57": "Labrador Retriever",
|
59 |
+
"58": "Chesapeake Bay Retriever",
|
60 |
+
"59": "German Short Haired Pointer",
|
61 |
+
"60": "Vizsla",
|
62 |
+
"61": "English Setter",
|
63 |
+
"62": "Irish Setter",
|
64 |
+
"63": "Gordon Setter",
|
65 |
+
"64": "Brittany Spaniel",
|
66 |
+
"65": "Clumber",
|
67 |
+
"66": "English Springer",
|
68 |
+
"67": "Welsh Springer Spaniel",
|
69 |
+
"68": "Cocker Spaniel",
|
70 |
+
"69": "Sussex Spaniel",
|
71 |
+
"70": "Irish Water Spaniel",
|
72 |
+
"71": "Kuvasz",
|
73 |
+
"72": "Schipperke",
|
74 |
+
"73": "Groenendael",
|
75 |
+
"74": "Malinois",
|
76 |
+
"75": "Briard",
|
77 |
+
"76": "Kelpie",
|
78 |
+
"77": "Komondor",
|
79 |
+
"78": "Old English Sheepdog",
|
80 |
+
"79": "Shetland Sheepdog",
|
81 |
+
"80": "Collie",
|
82 |
+
"81": "Border Collie",
|
83 |
+
"82": "Bouvier Des Flandres",
|
84 |
+
"83": "Rottweiler",
|
85 |
+
"84": "German Shepherd",
|
86 |
+
"85": "Doberman",
|
87 |
+
"86": "Miniature Pinscher",
|
88 |
+
"87": "Greater Swiss Mountain Dog",
|
89 |
+
"88": "Bernese Mountain Dog",
|
90 |
+
"89": "Appenzeller",
|
91 |
+
"90": "Entlebucher",
|
92 |
+
"91": "Boxer",
|
93 |
+
"92": "Bull Mastiff",
|
94 |
+
"93": "Tibetan Mastiff",
|
95 |
+
"94": "French Bulldog",
|
96 |
+
"95": "Great Dane",
|
97 |
+
"96": "Saint Bernard",
|
98 |
+
"97": "Eskimo Dog",
|
99 |
+
"98": "Malamute",
|
100 |
+
"99": "Siberian Husky",
|
101 |
+
"100": "Affenpinscher",
|
102 |
+
"101": "Basenji",
|
103 |
+
"102": "Pug",
|
104 |
+
"103": "Leonberg",
|
105 |
+
"104": "Newfoundland",
|
106 |
+
"105": "Great Pyrenees",
|
107 |
+
"106": "Samoyed",
|
108 |
+
"107": "Pomeranian",
|
109 |
+
"108": "Chow",
|
110 |
+
"109": "Keeshond",
|
111 |
+
"110": "Brabancon Griffon",
|
112 |
+
"111": "Pembroke",
|
113 |
+
"112": "Cardigan",
|
114 |
+
"113": "Toy Poodle",
|
115 |
+
"114": "Miniature Poodle",
|
116 |
+
"115": "Standard Poodle",
|
117 |
+
"116": "Mexican Hairless",
|
118 |
+
"117": "Dingo",
|
119 |
+
"118": "Dhole",
|
120 |
+
"119": "African Hunting Dog"}
|
App/dog_icon.png
ADDED
App/requirements.txt
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
numpy
|
2 |
+
pandas
|
3 |
+
streamlit
|
4 |
+
tensorflow
|
5 |
+
|
pechpoints_weights/checkpoint
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
model_checkpoint_path: "checkpoint.ckpt"
|
2 |
+
all_model_checkpoint_paths: "checkpoint.ckpt"
|
pechpoints_weights/checkpoint.ckpt.data-00000-of-00001
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3ee8e242268d5a3497664f1dc68ab5cc80ca6f64429ec00d7c6a32f27acf9b07
|
3 |
+
size 45603105
|
pechpoints_weights/checkpoint.ckpt.index
ADDED
Binary file (33.4 kB). View file
|
|
requirements.txt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
numpy
|
2 |
+
streamlit
|
3 |
+
tensorflow
|
saved_models/FerNetEfficientNetB2/keras_metadata.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d6a0d1967678bad8dce1d78f1ce16840fd7d2a06b0d364c086473dea90afe4c3
|
3 |
+
size 1148928
|
saved_models/FerNetEfficientNetB2/saved_model.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:758743cea86f94d8c5f78761b1ddde05828242fb1ba74e38a69968298ae408a8
|
3 |
+
size 9354845
|
saved_models/FerNetEfficientNetB2/variables/variables.data-00000-of-00001
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c98328aa6c54cdafef0e2e94a49240321b608f1f804ca8a206eee4f6ce56ef41
|
3 |
+
size 60572055
|
saved_models/FerNetEfficientNetB2/variables/variables.index
ADDED
Binary file (37.2 kB). View file
|
|
saved_models/TsinghuaFerNet_EfficientNetB2/keras_metadata.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7ea16ba8b0d5e80d78080dacd9f37c478c0c95a94a740330fef39a5a1ff2704a
|
3 |
+
size 1150682
|
saved_models/TsinghuaFerNet_EfficientNetB2/saved_model.pb
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3bb4c0a1de90ffd353d28fba2368978dc2d2f2fa89eade379daf28e4ac4594c1
|
3 |
+
size 9369225
|
saved_models/TsinghuaFerNet_EfficientNetB2/variables/variables.data-00000-of-00001
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cf37b3482cb5b3a3caa365b0bcf6c57eaffe218040af6674e7a3dd335e5f3132
|
3 |
+
size 72240319
|
saved_models/TsinghuaFerNet_EfficientNetB2/variables/variables.index
ADDED
Binary file (37.6 kB). View file
|
|