sandeshrajx commited on
Commit
03ab4d3
·
verified ·
1 Parent(s): a3e812d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -7
app.py CHANGED
@@ -1,7 +1,7 @@
1
  import gradio as gr
2
  from huggingface_hub import hf_hub_download, snapshot_download
3
  import subprocess
4
- import tempfile
5
  import shutil
6
  import os
7
  import spaces
@@ -31,6 +31,18 @@ def download_model(repo_id, model_name):
31
  import glob
32
 
33
  @spaces.GPU(duration=500)
 
 
 
 
 
 
 
 
 
 
 
 
34
  def run_inference(model_name, prompt_text):
35
  repo_id = "hpcai-tech/Open-Sora"
36
 
@@ -57,12 +69,7 @@ def run_inference(model_name, prompt_text):
57
  temp_file.write(config_content)
58
  temp_config_path = temp_file.name
59
 
60
- cmd = [
61
- "torchrun", "--standalone", "--nproc_per_node", "1",
62
- "scripts/inference.py", temp_config_path,
63
- "--ckpt-path", ckpt_path
64
- ]
65
- subprocess.run(cmd)
66
 
67
  save_dir = "./outputs/samples/" # Örneğin, inference.py tarafından kullanılan kayıt dizini
68
  list_of_files = glob.glob(f'{save_dir}/*')
@@ -77,6 +84,8 @@ def run_inference(model_name, prompt_text):
77
  os.remove(temp_file.name)
78
  os.remove(prompt_file.name)
79
 
 
 
80
  def main():
81
  gr.Interface(
82
  fn=run_inference,
 
1
  import gradio as gr
2
  from huggingface_hub import hf_hub_download, snapshot_download
3
  import subprocess
4
+ import tempfile, time
5
  import shutil
6
  import os
7
  import spaces
 
31
  import glob
32
 
33
  @spaces.GPU(duration=500)
34
+ def run_model(temp_config_path, ckpt_path):
35
+ start_time = time.time() # Record the start time
36
+ cmd = [
37
+ "torchrun", "--standalone", "--nproc_per_node", "1",
38
+ "scripts/inference.py", temp_config_path,
39
+ "--ckpt-path", ckpt_path
40
+ ]
41
+ subprocess.run(cmd)
42
+ end_time = time.time() # Record the end time
43
+ execution_time = end_time - start_time # Calculate the execution time
44
+ print(f"Model Execution time: {execution_time} seconds")
45
+
46
  def run_inference(model_name, prompt_text):
47
  repo_id = "hpcai-tech/Open-Sora"
48
 
 
69
  temp_file.write(config_content)
70
  temp_config_path = temp_file.name
71
 
72
+ run_model(temp_config_path, ckpt_path)
 
 
 
 
 
73
 
74
  save_dir = "./outputs/samples/" # Örneğin, inference.py tarafından kullanılan kayıt dizini
75
  list_of_files = glob.glob(f'{save_dir}/*')
 
84
  os.remove(temp_file.name)
85
  os.remove(prompt_file.name)
86
 
87
+
88
+
89
  def main():
90
  gr.Interface(
91
  fn=run_inference,