ShreyMehra commited on
Commit
fda5fae
·
unverified ·
1 Parent(s): ebd223e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -3
app.py CHANGED
@@ -39,8 +39,6 @@ class UI:
39
  pixel_values = pixel_values.to(torch.float32)
40
 
41
  print(4)
42
- model = model.to(torch.float32)
43
-
44
  generated_ids = model.generate(pixel_values=pixel_values, max_length=25)
45
  generated_caption = processor.batch_decode(generated_ids, skip_special_tokens=True)[0]
46
 
@@ -61,7 +59,8 @@ class Model:
61
  global model
62
  global processor
63
  model = Blip2ForConditionalGeneration.from_pretrained(config.base_model_name_or_path, low_cpu_mem_usage=True, torch_dtype=torch.float16) #, device_map="auto", load_in_8bit=True
64
- model = PeftModel.from_pretrained(model, peft_model_id)
 
65
  processor = AutoProcessor.from_pretrained("Salesforce/blip2-opt-2.7b")
66
 
67
  def query(self , payload):
 
39
  pixel_values = pixel_values.to(torch.float32)
40
 
41
  print(4)
 
 
42
  generated_ids = model.generate(pixel_values=pixel_values, max_length=25)
43
  generated_caption = processor.batch_decode(generated_ids, skip_special_tokens=True)[0]
44
 
 
59
  global model
60
  global processor
61
  model = Blip2ForConditionalGeneration.from_pretrained(config.base_model_name_or_path, low_cpu_mem_usage=True, torch_dtype=torch.float16) #, device_map="auto", load_in_8bit=True
62
+ model = PeftModel.from_pretrained(model, peft_model_id).to(torch.float32)
63
+
64
  processor = AutoProcessor.from_pretrained("Salesforce/blip2-opt-2.7b")
65
 
66
  def query(self , payload):