LukasHug commited on
Commit
5e99db0
·
verified ·
1 Parent(s): 1dbed04

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -2
app.py CHANGED
@@ -225,7 +225,6 @@ def run_inference(prompt, image, temperature=0.2, top_p=0.95, max_tokens=512):
225
  padding=True,
226
  return_tensors="pt",
227
  )
228
- inputs = inputs.to("cuda")
229
 
230
 
231
  # Otherwise assume it's a LlavaGuard model
@@ -242,7 +241,7 @@ def run_inference(prompt, image, temperature=0.2, top_p=0.95, max_tokens=512):
242
  text_prompt = processor.apply_chat_template(conversation, add_generation_prompt=True)
243
  inputs = processor(text=text_prompt, images=image, return_tensors="pt")
244
 
245
- inputs = {k: v.to('cuda') for k, v in inputs.items()}
246
 
247
  with torch.no_grad():
248
  generated_ids = model.generate(
 
225
  padding=True,
226
  return_tensors="pt",
227
  )
 
228
 
229
 
230
  # Otherwise assume it's a LlavaGuard model
 
241
  text_prompt = processor.apply_chat_template(conversation, add_generation_prompt=True)
242
  inputs = processor(text=text_prompt, images=image, return_tensors="pt")
243
 
244
+ inputs = {k: v.to(model.device) for k, v in inputs.items()}
245
 
246
  with torch.no_grad():
247
  generated_ids = model.generate(