Spaces:
Sleeping
Sleeping
Jon Taylor
commited on
Commit
·
d0bc075
1
Parent(s):
9de7a95
lowered resolution
Browse files- app/bot.py +6 -5
- app/pipeline.py +4 -4
app/bot.py
CHANGED
@@ -87,8 +87,8 @@ class DailyVision(EventHandler):
|
|
87 |
def setup_camera(self):
|
88 |
if not self.__camera:
|
89 |
self.__camera = Daily.create_camera_device("camera",
|
90 |
-
width =
|
91 |
-
height =
|
92 |
color_format="RGB")
|
93 |
self.__client.update_inputs({
|
94 |
"camera": {
|
@@ -128,13 +128,14 @@ class DailyVision(EventHandler):
|
|
128 |
# Process ~15 frames per second (considering incoming frames at 30fps).
|
129 |
if time.time() - self.__time > float(os.getenv("FPS_CAP", 0.0333)):
|
130 |
self.__time = time.time()
|
131 |
-
self.__image_buffer = video_frame
|
132 |
#self.__queue.put(video_frame)
|
133 |
|
134 |
def on_app_message(self, message, sender):
|
135 |
# Update pipeline settings based on message data
|
136 |
-
|
137 |
-
|
|
|
138 |
return
|
139 |
|
140 |
def wave(self):
|
|
|
87 |
def setup_camera(self):
|
88 |
if not self.__camera:
|
89 |
self.__camera = Daily.create_camera_device("camera",
|
90 |
+
width = 512,
|
91 |
+
height = 512,
|
92 |
color_format="RGB")
|
93 |
self.__client.update_inputs({
|
94 |
"camera": {
|
|
|
128 |
# Process ~15 frames per second (considering incoming frames at 30fps).
|
129 |
if time.time() - self.__time > float(os.getenv("FPS_CAP", 0.0333)):
|
130 |
self.__time = time.time()
|
131 |
+
#self.__image_buffer = video_frame
|
132 |
#self.__queue.put(video_frame)
|
133 |
|
134 |
def on_app_message(self, message, sender):
|
135 |
# Update pipeline settings based on message data
|
136 |
+
#print(message)
|
137 |
+
#self.__params = self.__pipeline.InputParams(**message)
|
138 |
+
print(self.__pipeline.InputParams().model_dump_json())
|
139 |
return
|
140 |
|
141 |
def wave(self):
|
app/pipeline.py
CHANGED
@@ -58,10 +58,10 @@ class Pipeline:
|
|
58 |
1, min=1, max=15, title="Steps", field="range", hide=True, id="steps"
|
59 |
)
|
60 |
width: int = Field(
|
61 |
-
|
62 |
)
|
63 |
height: int = Field(
|
64 |
-
|
65 |
)
|
66 |
guidance_scale: float = Field(
|
67 |
1.0,
|
@@ -188,8 +188,8 @@ class Pipeline:
|
|
188 |
|
189 |
self.pipe(
|
190 |
prompt="warmup",
|
191 |
-
image=[Image.new("RGB", (
|
192 |
-
control_image=[Image.new("RGB", (
|
193 |
)
|
194 |
|
195 |
def predict(self, params: "Pipeline.InputParams", image) -> Image.Image:
|
|
|
58 |
1, min=1, max=15, title="Steps", field="range", hide=True, id="steps"
|
59 |
)
|
60 |
width: int = Field(
|
61 |
+
512, min=2, max=15, title="Width", disabled=True, hide=True, id="width"
|
62 |
)
|
63 |
height: int = Field(
|
64 |
+
512, min=2, max=15, title="Height", disabled=True, hide=True, id="height"
|
65 |
)
|
66 |
guidance_scale: float = Field(
|
67 |
1.0,
|
|
|
188 |
|
189 |
self.pipe(
|
190 |
prompt="warmup",
|
191 |
+
image=[Image.new("RGB", (768, 768))],
|
192 |
+
control_image=[Image.new("RGB", (768, 768))],
|
193 |
)
|
194 |
|
195 |
def predict(self, params: "Pipeline.InputParams", image) -> Image.Image:
|