Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -5,23 +5,67 @@ from io import BytesIO
|
|
5 |
import requests
|
6 |
|
7 |
f = modal.Cls.lookup("casa-interior-hf-v3", "DesignModel")
|
8 |
-
f_gc = modal.Cls.lookup("casa-interior-gc-v2", "GetProduct")
|
9 |
|
10 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
11 |
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
|
|
|
|
19 |
|
20 |
-
|
21 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
22 |
|
23 |
-
|
24 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
25 |
|
26 |
def casa_ai_run_tab1(image=None, text=None):
|
27 |
|
@@ -61,18 +105,8 @@ def casa_ai_run_tab3(dict=None):
|
|
61 |
print('Please provide cropped object')
|
62 |
return None
|
63 |
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
result_image = []
|
68 |
-
for r in result_urls:
|
69 |
-
try:
|
70 |
-
response = requests.get(r)
|
71 |
-
img = Image.open(BytesIO(response.content))
|
72 |
-
result_image.append(img)
|
73 |
-
except:
|
74 |
-
pass
|
75 |
-
return result_image
|
76 |
|
77 |
with gr.Blocks() as casa:
|
78 |
title = "Casa-AI Demo"
|
@@ -84,21 +118,6 @@ with gr.Blocks() as casa:
|
|
84 |
inputs = [
|
85 |
gr.Image(sources='upload', type="pil", label="Upload"),
|
86 |
gr.Textbox(label="Room description.")
|
87 |
-
# gr.Dropdown(
|
88 |
-
# label="Room type",
|
89 |
-
# choices=["Living Room", "Bedroom", "Home Office", "Dining Room", "Kitchen", "Bathroom"],
|
90 |
-
# multiselect=False
|
91 |
-
# ),
|
92 |
-
# gr.Dropdown(
|
93 |
-
# label="Interior Style",
|
94 |
-
# choices=["Modern", "Traditional", "Contemporary", "Minimalist", "Rustic", "Industrial", "Scandinavian"],
|
95 |
-
# multiselect=False
|
96 |
-
# ),
|
97 |
-
# gr.Dropdown(
|
98 |
-
# label="Objects",
|
99 |
-
# choices=["Sofa", "Bed", "Table", "Chair", "Fridge", "Dinning table", "Coffee table"],
|
100 |
-
# multiselect=True
|
101 |
-
# ),
|
102 |
]
|
103 |
with gr.Column():
|
104 |
outputs = [gr.Image(label="Generated room image")]
|
@@ -132,16 +151,4 @@ with gr.Blocks() as casa:
|
|
132 |
submit_btn = gr.Button("Find similar products!")
|
133 |
submit_btn.click(casa_ai_run_tab3, inputs=inputs, outputs=outputs)
|
134 |
|
135 |
-
casa.launch()
|
136 |
-
|
137 |
-
# demo = gr.Interface(casa_ai_run,
|
138 |
-
# inputs = [gr.ImageEditor(sources='upload', brush=gr.Brush(colors=["#FFFFFF"]), elem_id="image_upload", type="pil", label="Upload", layers=False, eraser=True, transforms=[]),
|
139 |
-
# gr.Textbox(label="Prompt to design room"),
|
140 |
-
# ],
|
141 |
-
# outputs = [
|
142 |
-
# gr.Image(label="Generated room image"),
|
143 |
-
# ],
|
144 |
-
# title = title,
|
145 |
-
# description = description,
|
146 |
-
# )
|
147 |
-
# demo.launch()
|
|
|
5 |
import requests
|
6 |
|
7 |
f = modal.Cls.lookup("casa-interior-hf-v3", "DesignModel")
|
|
|
8 |
|
9 |
+
from PIL import Image
|
10 |
+
import requests
|
11 |
+
from io import BytesIO
|
12 |
+
from google.cloud import vision
|
13 |
+
from google.oauth2 import service_account
|
14 |
+
import json
|
15 |
+
import os
|
16 |
+
|
17 |
+
credentials = {
|
18 |
+
"type": "service_account",
|
19 |
+
"project_id": "furniture-423815",
|
20 |
+
"private_key_id": "be5e481a8e4499c164ed0147b3f024d4ef1f42f3",
|
21 |
+
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCdy13qrKLk+Lai\nspQgcgKU8YYBOfPdo+FGlodKVb7kTJiEsTN7Ovq69c4S9Hzsf/UNdiEB4wpDIG5m\nBaZrHPBeaZSxmSVhNjctaYR/id06Qvka/Y4PerntUA9ubcVYvZ/ntEpHaL1kVYNe\nATAD0LE0QuQuXPWfBDGvyfsy2hK91D+/WbPCby+pWhh4buRZk3xGku+SGtoTenMP\nzHagPCVNreJD13mrIJu5M1NkB0ZHAdlkOVdRqyxntgcg97krUpace8DM28xB0Pfb\nXk1vaESeUbrcjVt4RDxQAIZwYB4MQ68MiEsuOGZ3O/coXafK89ldMOu+zKlvgloB\ns/JlPtH5AgMBAAECggEABTXpmWXfQKyiWkvHlq0xHuI9XLXBUuq2Fg7DM64SbkdF\nu47+7lUvoaQbjJZweB5PFSVXGHD6/iW4Y4vQ96VGXjXCFF3EZVoFFy2uc4g1yxZa\nU7z295WjxV2BDvJWw5QKb1wtnj9MDr/ApWZoY53c9ib10j6dWUWKDv4eWornNse5\n0ZZYCJV3RtPgEeuf2dyWtFKeAGwiUKYf60l4sBloJbpI1Jedw/0WdlH8WyX5ufuN\nBb9ZWWOmjImr4KGnttLOGg0Id/NZNMJc1i3iz91qWKecregoBuMoNp0AnfclOc1h\nipHXg6zqRZXBDOGPTwBibm8YsR0wWuFx0qCuZNGaYQKBgQDVQW54oneinUL8vVIi\nSdoR8zDrEzje5mgjk68NXn/mUZXhc9toYWblDr5x+PR/LIkjGtUAo706ncV4ysON\nEPB2yrIY1SgTOHP9eW4uTqhQanNr/NgH1/viNXPeQIEx2BnQvcLuORU/V8ZPK+X5\nhRF/xoN9B0Phwxy10SSQZ/iVIQKBgQC9bByD3lvov5ibQn1x57B59zHkq5TPvnXU\ntSFNkWTqus3mmHttJQNP6PcwRiRBaHt2NfKxO9nfIq1rkTaSOMCtsu1N48MF7ccx\niBNnRYMNdu4xmB3JcLyfJ5SZhcO46lJQOrRg0JfemD+BrEgazJi8S7ECwAGemlY1\nrllZnsJJ2QKBgEMxzMdCGgQpHTRZywl2z7mcMSvA8Mh7PREItb22qwI9bsaNJPMs\nzakbDjMHSLLRq5xeFgOPlE5l7BT1fsxyK/KiR5+/elMkFJgnrOn2at57zEaYctF1\n4q4SPaIoHQ1BlFDLmiJJ5kIBPEEyCdKndS4XtNKueVsniWJYtfaybAdBAoGBALU4\n9Z8D4ZKvm2UPG80aCLDnWoiXz2thoIG8OPxpGc+ooMz5HTyyqJSPIc7BjHY3a8cQ\nnfwKcssT9i5vY3JJca28/WQDf9XwQx6UPVwUGOmM2x3/lp/eh9cMmxK18ya6p72y\nLFhjuKhxqHB7TxC0pXugPt2OrP38UnZRM5KWXPMhAoGALFZCVXiDaY/4ay9ATlLs\ndDhS+yX7zJ5vKusT42wAPrFlcu+3eKxGRzFL3c/yNQaFFcpV+TeVsHx2gQ/NRWaL\nu1+99cZ56tTMfajXmRkri+R9wz70awmDx9ReCrl1IMEvPFwtaMMWf6m1xbimfgDv\n3tIueX+ZTxWFRYcI6UGbW7k=\n-----END PRIVATE KEY-----\n",
|
22 |
+
"client_email": "[email protected]",
|
23 |
+
"client_id": "101044092237072973103",
|
24 |
+
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
|
25 |
+
"token_uri": "https://oauth2.googleapis.com/token",
|
26 |
+
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
|
27 |
+
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/furniture-service%40furniture-423815.iam.gserviceaccount.com",
|
28 |
+
"universe_domain": "googleapis.com"
|
29 |
+
}
|
30 |
+
|
31 |
+
class GetProduct:
|
32 |
|
33 |
+
def __init__(self):
|
34 |
+
creds = service_account.Credentials.from_service_account_info(credentials)
|
35 |
+
self.client = vision.ImageAnnotatorClient(credentials=creds)
|
36 |
+
|
37 |
+
def inference(self, cropped_image: Image) -> list:
|
38 |
+
annotations = self.annotate_image(cropped_image)
|
39 |
+
selected_images = self.report(annotations)
|
40 |
+
|
41 |
+
return selected_images
|
42 |
|
43 |
+
def annotate_image(self, image: Image):
|
44 |
+
|
45 |
+
buffer = BytesIO()
|
46 |
+
# Convert the image to RGB mode if it is RGBA
|
47 |
+
if image.mode == 'RGBA':
|
48 |
+
image = image.convert('RGB')
|
49 |
+
image.save(buffer, format="JPEG")
|
50 |
+
content = buffer.getvalue()
|
51 |
|
52 |
+
image = vision.Image(content=content)
|
53 |
+
web_detection = self.client.web_detection(image=image).web_detection
|
54 |
+
return web_detection
|
55 |
+
|
56 |
+
def report(self, annotations) -> list:
|
57 |
+
selected_images = []
|
58 |
+
if annotations.visually_similar_images:
|
59 |
+
for page in annotations.visually_similar_images:
|
60 |
+
try:
|
61 |
+
response = requests.get(page.url)
|
62 |
+
img = Image.open(BytesIO(response.content))
|
63 |
+
selected_images.append(img)
|
64 |
+
except:
|
65 |
+
pass
|
66 |
+
return selected_images
|
67 |
+
|
68 |
+
GP = GetProduct()
|
69 |
|
70 |
def casa_ai_run_tab1(image=None, text=None):
|
71 |
|
|
|
105 |
print('Please provide cropped object')
|
106 |
return None
|
107 |
|
108 |
+
results = GP.inference(selected_crop)
|
109 |
+
return results
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
110 |
|
111 |
with gr.Blocks() as casa:
|
112 |
title = "Casa-AI Demo"
|
|
|
118 |
inputs = [
|
119 |
gr.Image(sources='upload', type="pil", label="Upload"),
|
120 |
gr.Textbox(label="Room description.")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
121 |
]
|
122 |
with gr.Column():
|
123 |
outputs = [gr.Image(label="Generated room image")]
|
|
|
151 |
submit_btn = gr.Button("Find similar products!")
|
152 |
submit_btn.click(casa_ai_run_tab3, inputs=inputs, outputs=outputs)
|
153 |
|
154 |
+
casa.launch()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|