sophiaaez commited on
Commit
4577059
·
1 Parent(s): b323982

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -14
app.py CHANGED
@@ -67,20 +67,20 @@ def getModelPath(language):
67
  return(path)
68
 
69
  def inference(input_img,strategy,language):
70
- if model_n == 'Image Captioning':
71
- image = transform(raw_image).unsqueeze(0).to(device)
72
- with torch.no_grad():
73
- if strategy == "Beam search":
74
- cap = model.generate(image, sample=False, num_beams=3, max_length=20, min_length=5)
75
- else:
76
- cap = model.generate(image, sample=True, top_p=0.9, max_length=20, min_length=5)
77
- if modelpath:
78
- translator = pipeline("translation", model=modelpath)
79
- trans_cap = translator(cap[0])
80
- tc = trans_cap[0]['translation_text']
81
- return str(tc)
82
- else:
83
- return str(cap[0])
84
 
85
 
86
  description = "A pipeline of BLIP image captioning and Helsinki translation in order to generate image captions in a language of your choice either with beam search (deterministic) or nucleus sampling (stochastic). Enjoy! Is the language you want to use missing? Let me know and I'll integrate it."
 
67
  return(path)
68
 
69
  def inference(input_img,strategy,language):
70
+ image = transform(input_img).unsqueeze(0).to(device)
71
+ with torch.no_grad():
72
+ if strategy == "Beam search":
73
+ cap = model.generate(image, sample=False, num_beams=3, max_length=20, min_length=5)
74
+ else:
75
+ cap = model.generate(image, sample=True, top_p=0.9, max_length=20, min_length=5)
76
+ modelpath = getModelPath(language)
77
+ if modelpath:
78
+ translator = pipeline("translation", model=modelpath)
79
+ trans_cap = translator(cap[0])
80
+ tc = trans_cap[0]['translation_text']
81
+ return str(tc)
82
+ else:
83
+ return str(cap[0])
84
 
85
 
86
  description = "A pipeline of BLIP image captioning and Helsinki translation in order to generate image captions in a language of your choice either with beam search (deterministic) or nucleus sampling (stochastic). Enjoy! Is the language you want to use missing? Let me know and I'll integrate it."