Update app.py
Browse files
app.py
CHANGED
@@ -1,215 +1,381 @@
|
|
1 |
-
import streamlit as st
|
2 |
-
import anthropic
|
3 |
import os
|
|
|
|
|
4 |
import base64
|
5 |
-
import glob
|
6 |
import json
|
7 |
-
import pytz
|
8 |
-
from datetime import datetime
|
9 |
-
from streamlit.components.v1 import html
|
10 |
-
from PIL import Image
|
11 |
import re
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
st.set_page_config(
|
22 |
-
page_title=title,
|
23 |
-
page_icon=icons,
|
24 |
-
layout="wide",
|
25 |
-
initial_sidebar_state="auto",
|
26 |
-
menu_items={
|
27 |
-
'Get Help': helpURL,
|
28 |
-
'Report a bug': bugURL,
|
29 |
-
'About': title
|
30 |
-
}
|
31 |
-
)
|
32 |
|
33 |
-
|
34 |
-
client = anthropic.Anthropic(api_key=os.environ.get("ANTHROPIC_API_KEY"))
|
35 |
|
36 |
-
#
|
37 |
-
|
38 |
-
|
39 |
|
40 |
-
#
|
|
|
|
|
|
|
41 |
|
42 |
-
#
|
43 |
-
|
44 |
-
with open(file_path, "rb") as file:
|
45 |
-
contents = file.read()
|
46 |
-
b64 = base64.b64encode(contents).decode()
|
47 |
-
file_name = os.path.basename(file_path)
|
48 |
-
return f'<a href="data:file/txt;base64,{b64}" download="{file_name}">Download {file_name}๐</a>'
|
49 |
|
50 |
-
#
|
51 |
-
def
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
return
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
|
|
|
|
|
|
|
|
77 |
}
|
78 |
-
|
79 |
-
|
80 |
-
|
81 |
-
#
|
82 |
-
def
|
83 |
-
|
84 |
-
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
|
90 |
-
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
return
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
112 |
-
def main():
|
113 |
-
|
114 |
-
# Sidebar with Useful Controls (All the VIP actions ๐)
|
115 |
-
st.sidebar.title("๐ง Claude๐")
|
116 |
-
|
117 |
-
all_files = glob.glob("*.md")
|
118 |
-
all_files.sort(reverse=True)
|
119 |
-
|
120 |
-
if st.sidebar.button("๐ Delete All"):
|
121 |
-
for file in all_files:
|
122 |
os.remove(file)
|
123 |
-
|
124 |
-
|
125 |
-
|
126 |
-
|
127 |
-
|
128 |
-
|
129 |
-
|
130 |
-
|
131 |
-
|
132 |
-
|
133 |
-
|
134 |
-
|
135 |
-
|
136 |
-
|
137 |
-
|
138 |
-
|
139 |
-
|
140 |
-
|
141 |
-
|
142 |
-
|
143 |
-
|
144 |
-
|
145 |
-
|
146 |
-
|
147 |
-
|
148 |
-
|
149 |
-
|
150 |
-
|
151 |
-
|
152 |
-
|
153 |
-
|
154 |
-
|
155 |
-
|
156 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
157 |
)
|
158 |
-
|
159 |
-
|
160 |
-
|
161 |
-
|
162 |
-
|
163 |
-
|
164 |
-
|
165 |
-
|
166 |
-
|
167 |
-
|
168 |
-
|
169 |
-
|
170 |
-
|
171 |
-
|
172 |
-
|
173 |
-
|
174 |
-
|
175 |
-
|
176 |
-
|
177 |
-
|
178 |
-
|
179 |
-
|
180 |
-
|
181 |
-
|
182 |
-
|
183 |
-
|
184 |
-
|
185 |
-
|
186 |
-
|
187 |
-
|
188 |
-
|
189 |
-
|
190 |
-
|
191 |
-
|
192 |
-
|
193 |
-
|
194 |
-
|
195 |
-
|
196 |
-
|
197 |
-
|
198 |
-
|
199 |
-
|
200 |
-
|
201 |
-
|
202 |
-
|
203 |
-
|
204 |
-
|
205 |
-
|
206 |
-
|
207 |
-
|
208 |
-
|
209 |
-
|
210 |
-
|
211 |
-
|
212 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
213 |
|
214 |
if __name__ == "__main__":
|
215 |
-
|
|
|
|
|
|
|
1 |
import os
|
2 |
+
import random
|
3 |
+
import uuid
|
4 |
import base64
|
|
|
5 |
import json
|
|
|
|
|
|
|
|
|
6 |
import re
|
7 |
+
import gradio as gr
|
8 |
+
import numpy as np
|
9 |
+
from PIL import Image
|
10 |
+
import torch
|
11 |
+
import glob
|
12 |
+
from datetime import datetime
|
13 |
+
import pandas as pd
|
14 |
+
import anthropic
|
15 |
+
import pytz
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
16 |
|
17 |
+
from diffusers import StableDiffusionXLPipeline, EulerAncestralDiscreteScheduler
|
|
|
18 |
|
19 |
+
DESCRIPTION = """# ๐จ ArtForge + Claude Chat
|
20 |
+
Create, curate, and compete with AI-generated art. Chat with Claude. Manage quotes. Track logs. Enjoy!
|
21 |
+
"""
|
22 |
|
23 |
+
# ------------------ Global Files & Constants ------------------ #
|
24 |
+
LIKES_CACHE_FILE = "likes_cache.json"
|
25 |
+
LOG_CACHE_FILE = "log_cache.json"
|
26 |
+
QUOTE_CACHE_FILE = "quotes_cache.json"
|
27 |
|
28 |
+
# For serving static content:
|
29 |
+
URL_PREFIX = "https://huggingface.co/spaces/awacke1/dalle-3-xl-lora-v2/file="
|
|
|
|
|
|
|
|
|
|
|
30 |
|
31 |
+
# ------------------ Load/Save Helpers ------------------ #
|
32 |
+
def load_json(file):
|
33 |
+
if os.path.exists(file):
|
34 |
+
with open(file, 'r', encoding='utf-8') as f:
|
35 |
+
return json.load(f)
|
36 |
+
return {}
|
37 |
+
|
38 |
+
def save_json(file, data):
|
39 |
+
with open(file, 'w', encoding='utf-8') as f:
|
40 |
+
json.dump(data, f, indent=4)
|
41 |
+
|
42 |
+
likes_cache = load_json(LIKES_CACHE_FILE) or {}
|
43 |
+
all_logs = load_json(LOG_CACHE_FILE) if os.path.exists(LOG_CACHE_FILE) else []
|
44 |
+
quotes_cache = load_json(QUOTE_CACHE_FILE) if os.path.exists(QUOTE_CACHE_FILE) else []
|
45 |
+
|
46 |
+
# Image metadata
|
47 |
+
image_metadata = pd.DataFrame(columns=['Filename', 'Prompt', 'Likes', 'Dislikes', 'Hearts', 'Created'])
|
48 |
+
|
49 |
+
# ---------- Anthropic (Claude) Setup (optional) ---------- #
|
50 |
+
anthropic_api_key = os.environ.get("ANTHROPIC_API_KEY", None)
|
51 |
+
claude_client = anthropic.Anthropic(api_key=anthropic_api_key) if anthropic_api_key else None
|
52 |
+
|
53 |
+
# ------------------ Logging of Inputs/Outputs ------------------ #
|
54 |
+
def log_input_output(user_input, model_output, file_link=""):
|
55 |
+
global all_logs
|
56 |
+
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
57 |
+
new_entry = {
|
58 |
+
"timestamp": timestamp,
|
59 |
+
"input": user_input,
|
60 |
+
"output": model_output,
|
61 |
+
"file_link": file_link
|
62 |
}
|
63 |
+
all_logs.append(new_entry)
|
64 |
+
save_json(LOG_CACHE_FILE, all_logs)
|
65 |
+
|
66 |
+
# ------------------ Votes & Images ------------------ #
|
67 |
+
def load_likes_cache():
|
68 |
+
return likes_cache
|
69 |
+
|
70 |
+
def save_likes_cache():
|
71 |
+
save_json(LIKES_CACHE_FILE, likes_cache)
|
72 |
+
|
73 |
+
def get_image_caption(filename):
|
74 |
+
global likes_cache, image_metadata
|
75 |
+
if filename in likes_cache:
|
76 |
+
likes = likes_cache[filename]['likes']
|
77 |
+
dislikes = likes_cache[filename]['dislikes']
|
78 |
+
hearts = likes_cache[filename]['hearts']
|
79 |
+
row = image_metadata.loc[image_metadata['Filename'] == filename]
|
80 |
+
if not row.empty:
|
81 |
+
prompt = row.iloc[0]['Prompt']
|
82 |
+
return f"{filename}\nPrompt: {prompt}\n๐ {likes} ๐ {dislikes} โค๏ธ {hearts}"
|
83 |
+
return filename
|
84 |
+
|
85 |
+
def get_image_gallery():
|
86 |
+
global image_metadata
|
87 |
+
image_files = image_metadata['Filename'].tolist()
|
88 |
+
return [
|
89 |
+
(file, get_image_caption(file))
|
90 |
+
for file in image_files if os.path.exists(file)
|
91 |
+
]
|
92 |
+
|
93 |
+
def delete_all_images():
|
94 |
+
global image_metadata, likes_cache
|
95 |
+
for file in image_metadata['Filename']:
|
96 |
+
if os.path.exists(file):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
97 |
os.remove(file)
|
98 |
+
image_metadata = pd.DataFrame(columns=['Filename','Prompt','Likes','Dislikes','Hearts','Created'])
|
99 |
+
likes_cache.clear()
|
100 |
+
save_likes_cache()
|
101 |
+
return get_image_gallery(), image_metadata.values.tolist()
|
102 |
+
|
103 |
+
def delete_image(filename):
|
104 |
+
global image_metadata, likes_cache
|
105 |
+
if filename and os.path.exists(filename):
|
106 |
+
os.remove(filename)
|
107 |
+
image_metadata = image_metadata[image_metadata['Filename'] != filename]
|
108 |
+
if filename in likes_cache:
|
109 |
+
del likes_cache[filename]
|
110 |
+
save_likes_cache()
|
111 |
+
return get_image_gallery(), image_metadata.values.tolist()
|
112 |
+
|
113 |
+
def vote(filename, vote_type):
|
114 |
+
global likes_cache, image_metadata
|
115 |
+
if filename in likes_cache:
|
116 |
+
likes_cache[filename][vote_type.lower()] += 1
|
117 |
+
save_likes_cache()
|
118 |
+
# Reflect in dataframe
|
119 |
+
row_idx = image_metadata[image_metadata['Filename'] == filename].index
|
120 |
+
if not row_idx.empty:
|
121 |
+
col = vote_type.capitalize()
|
122 |
+
curr_val = image_metadata.at[row_idx[0], col]
|
123 |
+
image_metadata.at[row_idx[0], col] = curr_val + 1
|
124 |
+
return get_image_gallery(), image_metadata.values.tolist()
|
125 |
+
|
126 |
+
# ------------------ Image Generation (Original Code) ------------------ #
|
127 |
+
MAX_SEED = np.iinfo(np.int32).max
|
128 |
+
if not torch.cuda.is_available():
|
129 |
+
DESCRIPTION += "\n<p>Running on CPU ๐ฅถ This demo may not work on CPU.</p>"
|
130 |
+
|
131 |
+
# Load pipeline
|
132 |
+
if torch.cuda.is_available():
|
133 |
+
pipe = StableDiffusionXLPipeline.from_pretrained(
|
134 |
+
"fluently/Fluently-XL-v4",
|
135 |
+
torch_dtype=torch.float16,
|
136 |
+
use_safetensors=True,
|
137 |
+
)
|
138 |
+
pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
|
139 |
+
pipe.load_lora_weights("ehristoforu/dalle-3-xl-v2", weight_name="dalle-3-xl-lora-v2.safetensors", adapter_name="dalle")
|
140 |
+
pipe.set_adapters("dalle")
|
141 |
+
pipe.to("cuda")
|
142 |
+
else:
|
143 |
+
pipe = None
|
144 |
+
|
145 |
+
def randomize_seed_fn(seed: int, randomize_seed: bool):
|
146 |
+
if randomize_seed:
|
147 |
+
seed = random.randint(0, MAX_SEED)
|
148 |
+
return seed
|
149 |
+
|
150 |
+
def save_image(img, prompt):
|
151 |
+
global image_metadata, likes_cache
|
152 |
+
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
153 |
+
safe_prompt = re.sub(r'[^\w\s-]', '', prompt.lower())[:50]
|
154 |
+
safe_prompt = re.sub(r'[-\s]+', '-', safe_prompt).strip('-')
|
155 |
+
filename = f"{timestamp}_{safe_prompt}.png"
|
156 |
+
img.save(filename)
|
157 |
+
# Add metadata row
|
158 |
+
new_row = {
|
159 |
+
'Filename': filename,
|
160 |
+
'Prompt': prompt,
|
161 |
+
'Likes': 0,
|
162 |
+
'Dislikes': 0,
|
163 |
+
'Hearts': 0,
|
164 |
+
'Created': datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
165 |
+
}
|
166 |
+
image_metadata.loc[len(image_metadata)] = new_row
|
167 |
+
likes_cache[filename] = {'likes': 0, 'dislikes': 0, 'hearts': 0}
|
168 |
+
save_likes_cache()
|
169 |
+
return filename
|
170 |
+
|
171 |
+
def generate(
|
172 |
+
prompt: str,
|
173 |
+
negative_prompt: str = "",
|
174 |
+
use_negative_prompt: bool = False,
|
175 |
+
seed: int = 0,
|
176 |
+
width: int = 1024,
|
177 |
+
height: int = 1024,
|
178 |
+
guidance_scale: float = 3,
|
179 |
+
randomize_seed: bool = False,
|
180 |
+
progress=gr.Progress(track_tqdm=True),
|
181 |
+
):
|
182 |
+
if pipe is None:
|
183 |
+
return ["Error: No GPU or pipeline not loaded"], 0, [], get_image_gallery(), image_metadata.values.tolist()
|
184 |
+
|
185 |
+
seed = randomize_seed_fn(seed, randomize_seed)
|
186 |
+
if not use_negative_prompt:
|
187 |
+
negative_prompt = ""
|
188 |
+
|
189 |
+
images = pipe(
|
190 |
+
prompt=prompt,
|
191 |
+
negative_prompt=negative_prompt,
|
192 |
+
width=width,
|
193 |
+
height=height,
|
194 |
+
guidance_scale=guidance_scale,
|
195 |
+
num_inference_steps=20,
|
196 |
+
num_images_per_prompt=1,
|
197 |
+
cross_attention_kwargs={"scale": 0.65},
|
198 |
+
output_type="pil",
|
199 |
+
).images
|
200 |
+
|
201 |
+
image_paths = [save_image(img, prompt) for img in images]
|
202 |
+
# Build static-content file links
|
203 |
+
file_links = [f"{URL_PREFIX}{p}" for p in image_paths]
|
204 |
+
|
205 |
+
# Log this generation
|
206 |
+
log_input_output(user_input=prompt, model_output="(image generated)", file_link=", ".join(file_links))
|
207 |
+
|
208 |
+
return image_paths, seed, file_links, get_image_gallery(), image_metadata.values.tolist()
|
209 |
+
|
210 |
+
# Some example prompts
|
211 |
+
def get_random_style():
|
212 |
+
styles = [
|
213 |
+
"Impressionist", "Cubist", "Surrealist", "Abstract Expressionist",
|
214 |
+
"Pop Art", "Minimalist", "Baroque", "Art Nouveau", "Pointillist", "Fauvism"
|
215 |
+
]
|
216 |
+
return random.choice(styles)
|
217 |
+
|
218 |
+
examples = [
|
219 |
+
f"{get_random_style()} painting of a majestic lighthouse...",
|
220 |
+
f"{get_random_style()} still life with a pair of vintage eyeglasses...",
|
221 |
+
f"{get_random_style()} depiction of a rustic wooden stool...",
|
222 |
+
]
|
223 |
+
|
224 |
+
# ------------------ Claude Chat (Added from Streamlit) ------------------ #
|
225 |
+
def chat_claude(user_input: str):
|
226 |
+
if not claude_client:
|
227 |
+
# If no API key or no client
|
228 |
+
return "Claude not available. Set ANTHROPIC_API_KEY."
|
229 |
+
if not user_input.strip():
|
230 |
+
return "No input provided."
|
231 |
+
|
232 |
+
response = claude_client.messages.create(
|
233 |
+
model="claude-3-sonnet-20240229",
|
234 |
+
max_tokens=1000,
|
235 |
+
messages=[{"role": "user", "content": user_input}]
|
236 |
+
)
|
237 |
+
text = response.content[0].text
|
238 |
+
log_input_output(user_input, text, "")
|
239 |
+
return text
|
240 |
+
|
241 |
+
# ------------------ Quotes Feature (Short Version) ------------------ #
|
242 |
+
def get_quotes_df():
|
243 |
+
return pd.DataFrame(quotes_cache) if quotes_cache else pd.DataFrame(columns=["text", "likes", "created"])
|
244 |
+
|
245 |
+
def add_quote(quote_text):
|
246 |
+
if not quote_text.strip():
|
247 |
+
return get_quotes_df()
|
248 |
+
central = pytz.timezone('US/Central')
|
249 |
+
dt_str = datetime.now(central).strftime("%Y-%m-%d %H:%M:%S")
|
250 |
+
quotes_cache.append({"text": quote_text, "likes": 0, "created": dt_str})
|
251 |
+
save_json(QUOTE_CACHE_FILE, quotes_cache)
|
252 |
+
return get_quotes_df()
|
253 |
+
|
254 |
+
def like_quote(index):
|
255 |
+
# index from DataFrame row index
|
256 |
+
if 0 <= index < len(quotes_cache):
|
257 |
+
quotes_cache[index]["likes"] += 1
|
258 |
+
save_json(QUOTE_CACHE_FILE, quotes_cache)
|
259 |
+
return get_quotes_df()
|
260 |
+
|
261 |
+
# ------------------ Build Gradio Interface ------------------ #
|
262 |
+
css = '''
|
263 |
+
.gradio-container{max-width: 1024px !important}
|
264 |
+
h1{text-align:center}
|
265 |
+
footer {
|
266 |
+
visibility: hidden
|
267 |
+
}
|
268 |
+
'''
|
269 |
+
|
270 |
+
with gr.Blocks(theme=gr.themes.Soft(), css=css) as demo:
|
271 |
+
gr.Markdown(DESCRIPTION)
|
272 |
+
|
273 |
+
with gr.Tab("Generate Images"):
|
274 |
+
with gr.Group():
|
275 |
+
with gr.Row():
|
276 |
+
prompt = gr.Text(
|
277 |
+
label="Prompt",
|
278 |
+
show_label=False,
|
279 |
+
max_lines=1,
|
280 |
+
placeholder="Enter your prompt",
|
281 |
+
container=False,
|
282 |
+
)
|
283 |
+
run_button = gr.Button("Run", scale=0)
|
284 |
+
result = gr.Gallery(label="Result", columns=1, preview=True, show_label=False)
|
285 |
+
|
286 |
+
with gr.Accordion("Advanced options", open=False):
|
287 |
+
use_negative_prompt = gr.Checkbox(label="Use negative prompt", value=True)
|
288 |
+
negative_prompt = gr.Text(
|
289 |
+
label="Negative prompt",
|
290 |
+
lines=4,
|
291 |
+
max_lines=6,
|
292 |
+
value="(deformed, distorted, disfigured:1.3), ...",
|
293 |
+
placeholder="Enter negative prompt"
|
294 |
+
)
|
295 |
+
seed = gr.Slider(
|
296 |
+
label="Seed",
|
297 |
+
minimum=0,
|
298 |
+
maximum=MAX_SEED,
|
299 |
+
step=1,
|
300 |
+
value=0
|
301 |
)
|
302 |
+
randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
|
303 |
+
with gr.Row():
|
304 |
+
width = gr.Slider(label="Width", minimum=512, maximum=2048, step=8, value=1024)
|
305 |
+
height = gr.Slider(label="Height", minimum=512, maximum=2048, step=8, value=1024)
|
306 |
+
guidance_scale = gr.Slider(label="Guidance Scale", minimum=0.1, maximum=20.0, step=0.1, value=3)
|
307 |
+
|
308 |
+
# Button triggers generate function
|
309 |
+
run_button.click(
|
310 |
+
fn=generate,
|
311 |
+
inputs=[prompt, negative_prompt, use_negative_prompt, seed, width, height, guidance_scale, randomize_seed],
|
312 |
+
outputs=[result, seed, gr.HTML(visible=False), gr.Gallery(), gr.Dataframe()],
|
313 |
+
)
|
314 |
+
|
315 |
+
gr.Examples(
|
316 |
+
examples=examples,
|
317 |
+
inputs=prompt,
|
318 |
+
outputs=[result, seed],
|
319 |
+
fn=generate,
|
320 |
+
cache_examples=False,
|
321 |
+
)
|
322 |
+
|
323 |
+
with gr.Tab("Gallery & Voting"):
|
324 |
+
image_gallery = gr.Gallery(label="Generated Images", show_label=True, columns=4, height="auto")
|
325 |
+
with gr.Row():
|
326 |
+
like_button = gr.Button("๐ Like")
|
327 |
+
dislike_button = gr.Button("๐ Dislike")
|
328 |
+
heart_button = gr.Button("โค๏ธ Heart")
|
329 |
+
delete_image_button = gr.Button("๐๏ธ Delete Selected Image")
|
330 |
+
selected_image = gr.State(None)
|
331 |
+
metadata_df = gr.Dataframe(
|
332 |
+
label="Image Metadata",
|
333 |
+
headers=["Filename", "Prompt", "Likes", "Dislikes", "Hearts", "Created"],
|
334 |
+
interactive=False
|
335 |
+
)
|
336 |
+
delete_all_button = gr.Button("๐๏ธ Delete All Images")
|
337 |
+
|
338 |
+
delete_all_button.click(
|
339 |
+
fn=delete_all_images,
|
340 |
+
inputs=[],
|
341 |
+
outputs=[image_gallery, metadata_df],
|
342 |
+
)
|
343 |
+
image_gallery.select(fn=lambda evt: evt, inputs=[], outputs=[selected_image])
|
344 |
+
like_button.click(fn=lambda x: vote(x, 'likes'), inputs=selected_image, outputs=[image_gallery, metadata_df])
|
345 |
+
dislike_button.click(fn=lambda x: vote(x, 'dislikes'), inputs=selected_image, outputs=[image_gallery, metadata_df])
|
346 |
+
heart_button.click(fn=lambda x: vote(x, 'hearts'), inputs=selected_image, outputs=[image_gallery, metadata_df])
|
347 |
+
delete_image_button.click(fn=delete_image, inputs=[selected_image], outputs=[image_gallery, metadata_df])
|
348 |
+
|
349 |
+
with gr.Tab("Chat Claude"):
|
350 |
+
claude_input = gr.Textbox(label="Your message:")
|
351 |
+
claude_output = gr.Textbox(label="Claude's reply:", lines=4)
|
352 |
+
claude_button = gr.Button("Send to Claude")
|
353 |
+
claude_button.click(fn=chat_claude, inputs=claude_input, outputs=claude_output)
|
354 |
+
|
355 |
+
with gr.Tab("Quotes"):
|
356 |
+
with gr.Row():
|
357 |
+
quote_text = gr.Textbox(label="Add a quote:")
|
358 |
+
add_quote_btn = gr.Button("Add Quote")
|
359 |
+
quote_data = gr.Dataframe(headers=["text","likes","created"], interactive=False)
|
360 |
+
with gr.Row():
|
361 |
+
row_index = gr.Number(label="Index to like:")
|
362 |
+
like_btn = gr.Button("Like Quote")
|
363 |
+
|
364 |
+
add_quote_btn.click(fn=add_quote, inputs=quote_text, outputs=quote_data)
|
365 |
+
like_btn.click(fn=like_quote, inputs=row_index, outputs=quote_data)
|
366 |
+
|
367 |
+
with gr.Tab("Logs"):
|
368 |
+
gr.Markdown("#### All logged inputs & outputs")
|
369 |
+
logs_data = gr.Dataframe(
|
370 |
+
label="Log Data",
|
371 |
+
value=pd.DataFrame(all_logs),
|
372 |
+
interactive=False
|
373 |
+
)
|
374 |
+
|
375 |
+
def update_gallery_and_metadata():
|
376 |
+
return gr.update(value=get_image_gallery()), gr.update(value=image_metadata.values.tolist())
|
377 |
+
|
378 |
+
demo.load(fn=update_gallery_and_metadata, outputs=[image_gallery, metadata_df])
|
379 |
|
380 |
if __name__ == "__main__":
|
381 |
+
demo.queue(max_size=20).launch(share=True, debug=False)
|