jbilcke-hf HF staff commited on
Commit
0cf5bce
1 Parent(s): 58774ec

Update handler.py

Browse files
Files changed (1) hide show
  1. handler.py +16 -4
handler.py CHANGED
@@ -14,7 +14,6 @@ from diffusers import LTXPipeline, LTXImageToVideoPipeline
14
  from PIL import Image
15
 
16
  from varnish import Varnish
17
- from varnish.debug_utils import setup_debug_logging
18
 
19
  # Configure logging
20
  logging.basicConfig(level=logging.INFO)
@@ -25,6 +24,22 @@ MAX_WIDTH = 1280
25
  MAX_HEIGHT = 720
26
  MAX_FRAMES = 257
27
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  @dataclass
29
  class GenerationConfig:
30
  """Configuration for video generation"""
@@ -87,9 +102,6 @@ class EndpointHandler:
87
  #self.text_to_video.enable_model_cpu_offload()
88
  #self.image_to_video.enable_model_cpu_offload()
89
 
90
- # temporary enable this if you have some issues with locating the model files
91
- setup_debug_logging()
92
-
93
  # Initialize Varnish for post-processing
94
  self.varnish = Varnish(
95
  device="cuda" if torch.cuda.is_available() else "cpu",
 
14
  from PIL import Image
15
 
16
  from varnish import Varnish
 
17
 
18
  # Configure logging
19
  logging.basicConfig(level=logging.INFO)
 
24
  MAX_HEIGHT = 720
25
  MAX_FRAMES = 257
26
 
27
+ def print_directory_structure(startpath):
28
+ """Print the directory structure starting from the given path."""
29
+ for root, dirs, files in os.walk(startpath):
30
+ level = root.replace(startpath, '').count(os.sep)
31
+ indent = ' ' * 4 * level
32
+ logger.info(f"{indent}{os.path.basename(root)}/")
33
+ subindent = ' ' * 4 * (level + 1)
34
+ for f in files:
35
+ logger.info(f"{subindent}{f}")
36
+
37
+ logger.info("💡 Printing directory structure of ""/repository"":")
38
+ print_directory_structure("/repository")
39
+
40
+ logger.info("💡 Printing directory structure of os.getcwd():")
41
+ print_directory_structure(os.getcwd())
42
+
43
  @dataclass
44
  class GenerationConfig:
45
  """Configuration for video generation"""
 
102
  #self.text_to_video.enable_model_cpu_offload()
103
  #self.image_to_video.enable_model_cpu_offload()
104
 
 
 
 
105
  # Initialize Varnish for post-processing
106
  self.varnish = Varnish(
107
  device="cuda" if torch.cuda.is_available() else "cpu",