Upload main.py
Browse files- Was_node_suite/main.py +6 -7
Was_node_suite/main.py
CHANGED
@@ -15,6 +15,7 @@ def cloudflared(port, metrics_port, output_queue):
|
|
15 |
if not tunnel_url:
|
16 |
raise Exception("Can't connect to Cloudflare Edge")
|
17 |
output_queue.put(tunnel_url)
|
|
|
18 |
output_queue, metrics_port = Queue(), randint(8100, 9000)
|
19 |
thread = Timer(2, cloudflared, args=(8188, metrics_port, output_queue))
|
20 |
thread.start()
|
@@ -26,6 +27,7 @@ print(tunnel_url)
|
|
26 |
import comfy.options
|
27 |
comfy.options.enable_args_parsing()
|
28 |
|
|
|
29 |
import os
|
30 |
import importlib.util
|
31 |
import folder_paths
|
@@ -45,9 +47,6 @@ def execute_prestartup_script():
|
|
45 |
print(f"Failed to execute startup-script: {script_path} / {e}")
|
46 |
return False
|
47 |
|
48 |
-
if args.disable_all_custom_nodes:
|
49 |
-
return
|
50 |
-
|
51 |
node_paths = folder_paths.get_folder_paths("custom_nodes")
|
52 |
for custom_node_path in node_paths:
|
53 |
possible_modules = os.listdir(custom_node_path)
|
@@ -126,7 +125,7 @@ def cuda_malloc_warning():
|
|
126 |
logging.warning("\nWARNING: this card most likely does not support cuda-malloc, if you get \"CUDA error\" please run ComfyUI with: --disable-cuda-malloc\n")
|
127 |
|
128 |
def prompt_worker(q, server):
|
129 |
-
e = execution.PromptExecutor(server
|
130 |
last_gc_collect = 0
|
131 |
need_gc = False
|
132 |
gc_collect_interval = 10.0
|
@@ -146,7 +145,7 @@ def prompt_worker(q, server):
|
|
146 |
e.execute(item[2], prompt_id, item[3], item[4])
|
147 |
need_gc = True
|
148 |
q.task_done(item_id,
|
149 |
-
e.
|
150 |
status=execution.PromptQueue.ExecutionStatus(
|
151 |
status_str='success' if e.success else 'error',
|
152 |
completed=e.success,
|
@@ -267,7 +266,6 @@ if __name__ == "__main__":
|
|
267 |
folder_paths.add_model_folder_path("checkpoints", os.path.join(folder_paths.get_output_directory(), "checkpoints"))
|
268 |
folder_paths.add_model_folder_path("clip", os.path.join(folder_paths.get_output_directory(), "clip"))
|
269 |
folder_paths.add_model_folder_path("vae", os.path.join(folder_paths.get_output_directory(), "vae"))
|
270 |
-
folder_paths.add_model_folder_path("diffusion_models", os.path.join(folder_paths.get_output_directory(), "diffusion_models"))
|
271 |
|
272 |
if args.input_directory:
|
273 |
input_dir = os.path.abspath(args.input_directory)
|
@@ -285,10 +283,11 @@ if __name__ == "__main__":
|
|
285 |
address = '127.0.0.1'
|
286 |
webbrowser.open(f"{scheme}://{address}:{port}")
|
287 |
call_on_start = startup_server
|
|
|
288 |
|
289 |
try:
|
290 |
-
loop.run_until_complete(server.setup())
|
291 |
loop.run_until_complete(run(server, address=args.listen, port=args.port, verbose=not args.dont_print_server, call_on_start=call_on_start))
|
|
|
292 |
except KeyboardInterrupt:
|
293 |
logging.info("\nStopped server")
|
294 |
|
|
|
15 |
if not tunnel_url:
|
16 |
raise Exception("Can't connect to Cloudflare Edge")
|
17 |
output_queue.put(tunnel_url)
|
18 |
+
|
19 |
output_queue, metrics_port = Queue(), randint(8100, 9000)
|
20 |
thread = Timer(2, cloudflared, args=(8188, metrics_port, output_queue))
|
21 |
thread.start()
|
|
|
27 |
import comfy.options
|
28 |
comfy.options.enable_args_parsing()
|
29 |
|
30 |
+
import subprocess
|
31 |
import os
|
32 |
import importlib.util
|
33 |
import folder_paths
|
|
|
47 |
print(f"Failed to execute startup-script: {script_path} / {e}")
|
48 |
return False
|
49 |
|
|
|
|
|
|
|
50 |
node_paths = folder_paths.get_folder_paths("custom_nodes")
|
51 |
for custom_node_path in node_paths:
|
52 |
possible_modules = os.listdir(custom_node_path)
|
|
|
125 |
logging.warning("\nWARNING: this card most likely does not support cuda-malloc, if you get \"CUDA error\" please run ComfyUI with: --disable-cuda-malloc\n")
|
126 |
|
127 |
def prompt_worker(q, server):
|
128 |
+
e = execution.PromptExecutor(server)
|
129 |
last_gc_collect = 0
|
130 |
need_gc = False
|
131 |
gc_collect_interval = 10.0
|
|
|
145 |
e.execute(item[2], prompt_id, item[3], item[4])
|
146 |
need_gc = True
|
147 |
q.task_done(item_id,
|
148 |
+
e.outputs_ui,
|
149 |
status=execution.PromptQueue.ExecutionStatus(
|
150 |
status_str='success' if e.success else 'error',
|
151 |
completed=e.success,
|
|
|
266 |
folder_paths.add_model_folder_path("checkpoints", os.path.join(folder_paths.get_output_directory(), "checkpoints"))
|
267 |
folder_paths.add_model_folder_path("clip", os.path.join(folder_paths.get_output_directory(), "clip"))
|
268 |
folder_paths.add_model_folder_path("vae", os.path.join(folder_paths.get_output_directory(), "vae"))
|
|
|
269 |
|
270 |
if args.input_directory:
|
271 |
input_dir = os.path.abspath(args.input_directory)
|
|
|
283 |
address = '127.0.0.1'
|
284 |
webbrowser.open(f"{scheme}://{address}:{port}")
|
285 |
call_on_start = startup_server
|
286 |
+
|
287 |
|
288 |
try:
|
|
|
289 |
loop.run_until_complete(run(server, address=args.listen, port=args.port, verbose=not args.dont_print_server, call_on_start=call_on_start))
|
290 |
+
subprocess.run(["python", "script.py"])
|
291 |
except KeyboardInterrupt:
|
292 |
logging.info("\nStopped server")
|
293 |
|