AlexNijjar commited on
Commit
dfdce18
·
1 Parent(s): ede7356

Add server offline warning

Browse files
Files changed (2) hide show
  1. src/chain_data.py +2 -2
  2. src/model_demo.py +53 -34
src/chain_data.py CHANGED
@@ -5,14 +5,14 @@ from enum import Enum
5
  from math import ceil
6
  from typing import TypeAlias
7
 
 
8
  from fiber.chain.interface import get_substrate
9
  from fiber.chain.metagraph import Metagraph
10
  from fiber.chain.models import Node
11
- from fiber.chain.commitments import _deserialize_commitment_field
12
  from substrateinterface.storage import StorageKey
13
 
14
  from network_commitments import Decoder
15
- from wandb_data import Hotkey, Uid, TIMEZONE
16
 
17
  Weight: TypeAlias = float
18
  Incentive: TypeAlias = float
 
5
  from math import ceil
6
  from typing import TypeAlias
7
 
8
+ from fiber.chain.commitments import _deserialize_commitment_field
9
  from fiber.chain.interface import get_substrate
10
  from fiber.chain.metagraph import Metagraph
11
  from fiber.chain.models import Node
 
12
  from substrateinterface.storage import StorageKey
13
 
14
  from network_commitments import Decoder
15
+ from wandb_data import TIMEZONE, Hotkey, Uid
16
 
17
  Weight: TypeAlias = float
18
  Incentive: TypeAlias = float
src/model_demo.py CHANGED
@@ -2,14 +2,38 @@ import base64
2
  import os
3
  import random
4
  import time
 
5
  from io import BytesIO
6
 
7
  import gradio as gr
8
  import requests
9
  from PIL import Image
10
 
 
 
11
  SERVER_API = os.environ["SERVER_API"]
12
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
 
14
  def image_from_base64(image_data: str) -> Image:
15
  image_buffer = BytesIO(base64.b64decode(image_data))
@@ -39,40 +63,35 @@ def submit(prompt: str, seed: int | str | None, baseline: bool) -> tuple:
39
  )
40
 
41
 
42
- def create_textbox() -> gr.Textbox:
43
- response = requests.get(f"{SERVER_API}/model", verify=False)
44
- response.raise_for_status()
45
- model = response.json()
46
- return gr.Textbox(f"{model['uid']} - {model['url']}", interactive=False, show_label=False)
47
-
48
-
49
  def create_demo():
50
  with gr.Accordion(f"EdgeMaxxing Model Comparison"):
51
- with gr.Group():
 
 
 
 
 
 
 
 
 
 
 
52
  with gr.Row():
53
- with gr.Column():
54
- gr.Textbox("Baseline", interactive=False, show_label=False)
55
- baseline_image_component = gr.Image(show_label=False)
56
-
57
- with gr.Column():
58
- textbox = gr.Textbox()
59
- textbox.attach_load_event(lambda: create_textbox(), None)
60
- optimized_image_component = gr.Image(show_label=False)
61
- with gr.Row():
62
- prompt = gr.Textbox(
63
- placeholder="Enter prompt...",
64
- interactive=True,
65
- submit_btn=True,
66
- show_label=False,
67
- autofocus=True,
68
- scale=10,
69
- )
70
-
71
- seed_input = gr.Textbox(
72
- placeholder="Enter seed...",
73
- interactive=True,
74
- show_label=False,
75
- )
76
-
77
- prompt.submit(lambda prompt, seed: submit(prompt, seed, True), inputs=[prompt, seed_input], outputs=[prompt, baseline_image_component])
78
- prompt.submit(lambda prompt, seed: submit(prompt, seed, False), inputs=[prompt, seed_input], outputs=[prompt, optimized_image_component])
 
2
  import os
3
  import random
4
  import time
5
+ from datetime import datetime, timedelta
6
  from io import BytesIO
7
 
8
  import gradio as gr
9
  import requests
10
  from PIL import Image
11
 
12
+ from wandb_data import TIMEZONE
13
+
14
  SERVER_API = os.environ["SERVER_API"]
15
 
16
+ current_model: str
17
+ last_current_model_sync: datetime = datetime.fromtimestamp(0, TIMEZONE)
18
+
19
+ def get_current_model() -> str | None:
20
+ try:
21
+ global current_model
22
+ global last_current_model_sync
23
+ now = datetime.now(TIMEZONE)
24
+ if now - last_current_model_sync < timedelta(minutes=5):
25
+ return current_model
26
+ last_current_model_sync = now
27
+
28
+ response = requests.get(f"{SERVER_API}/model", verify=False)
29
+ response.raise_for_status()
30
+ model = response.json()
31
+ current_model = f"{model['uid']} - {model['url']}"
32
+ return current_model
33
+ except:
34
+ return None
35
+
36
+
37
 
38
  def image_from_base64(image_data: str) -> Image:
39
  image_buffer = BytesIO(base64.b64decode(image_data))
 
63
  )
64
 
65
 
 
 
 
 
 
 
 
66
  def create_demo():
67
  with gr.Accordion(f"EdgeMaxxing Model Comparison"):
68
+ gr.Textbox("The server is offline! Please come back later", interactive=False, show_label=False, visible=get_current_model() is None)
69
+ with gr.Group(visible=get_current_model() is not None):
70
+ with gr.Group():
71
+ with gr.Row():
72
+ with gr.Column():
73
+ gr.Textbox("Baseline", interactive=False, show_label=False)
74
+ baseline_image_component = gr.Image(show_label=False)
75
+
76
+ with gr.Column():
77
+ textbox = gr.Textbox(interactive=False, show_label=False)
78
+ textbox.attach_load_event(lambda: get_current_model(), None)
79
+ optimized_image_component = gr.Image(show_label=False)
80
  with gr.Row():
81
+ prompt = gr.Textbox(
82
+ placeholder="Enter prompt...",
83
+ interactive=True,
84
+ submit_btn=True,
85
+ show_label=False,
86
+ autofocus=True,
87
+ scale=10,
88
+ )
89
+
90
+ seed_input = gr.Textbox(
91
+ placeholder="Enter seed...",
92
+ interactive=True,
93
+ show_label=False,
94
+ )
95
+
96
+ prompt.submit(lambda prompt, seed: submit(prompt, seed, True), inputs=[prompt, seed_input], outputs=[prompt, baseline_image_component])
97
+ prompt.submit(lambda prompt, seed: submit(prompt, seed, False), inputs=[prompt, seed_input], outputs=[prompt, optimized_image_component])