rmoxon commited on
Commit
8aeafb6
·
verified ·
1 Parent(s): 841eea4

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -27
app.py CHANGED
@@ -795,33 +795,11 @@ class CLIPService:
795
  if max(image.size) > max_size:
796
  image.thumbnail((max_size, max_size), Image.Resampling.LANCZOS)
797
 
798
- # Try multiple processor configurations
799
- try:
800
- # Method 1: Standard CLIP processing
801
- inputs = self.clip_processor(
802
- images=image,
803
- return_tensors="pt",
804
- do_rescale=True,
805
- do_normalize=True
806
- )
807
- except Exception as e1:
808
- logger.warning(f"Method 1 failed: {e1}, trying method 2...")
809
- try:
810
- # Method 2: With padding
811
- inputs = self.clip_processor(
812
- images=image,
813
- return_tensors="pt",
814
- padding=True,
815
- do_rescale=True,
816
- do_normalize=True
817
- )
818
- except Exception as e2:
819
- logger.warning(f"Method 2 failed: {e2}, trying method 3...")
820
- # Method 3: Manual preprocessing
821
- inputs = self.clip_processor(
822
- images=[image],
823
- return_tensors="pt"
824
- )
825
 
826
  inputs = {k: v.to(self.device) for k, v in inputs.items()}
827
 
 
795
  if max(image.size) > max_size:
796
  image.thumbnail((max_size, max_size), Image.Resampling.LANCZOS)
797
 
798
+ # Use the working method directly (Method 3) to avoid fallback overhead
799
+ inputs = self.clip_processor(
800
+ images=[image],
801
+ return_tensors="pt"
802
+ )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
803
 
804
  inputs = {k: v.to(self.device) for k, v in inputs.items()}
805