nielsr HF staff hysts HF staff commited on
Commit
953e4d8
1 Parent(s): 87d0066

Parallelize client calls (#51)

Browse files

- Parallelize client calls (7aeaca1a0c09df26fa15ce290eb9bc2dbe47effd)


Co-authored-by: hysts <[email protected]>

Files changed (1) hide show
  1. app.py +28 -20
app.py CHANGED
@@ -5,6 +5,7 @@ import os
5
  import gradio as gr
6
  import torch
7
  from gradio_client import Client
 
8
 
9
  DESCRIPTION = "# Comparing image captioning models"
10
  ORIGINAL_SPACE_INFO = """\
@@ -25,28 +26,31 @@ torch.hub.download_url_to_file(
25
  )
26
 
27
 
28
- def generate_caption_git(image_path: str) -> str:
29
  try:
30
  client = Client("library-samples/image-captioning-with-git")
31
- return client.predict(image_path, api_name="/caption")
 
32
  except Exception:
33
  gr.Warning("The GIT-large Space is currently unavailable. Please try again later.")
34
  return ""
35
 
36
 
37
- def generate_caption_blip(image_path: str) -> str:
38
  try:
39
  client = Client("library-samples/image-captioning-with-blip")
40
- return client.predict(image_path, "A picture of", api_name="/caption")
 
41
  except Exception:
42
  gr.Warning("The BLIP-large Space is currently unavailable. Please try again later.")
43
  return ""
44
 
45
 
46
- def generate_caption_blip2_opt(image_path: str) -> str:
47
  try:
48
  client = Client("merve/BLIP2-with-transformers")
49
- return client.predict(
 
50
  image_path,
51
  "Beam search",
52
  1, # temperature
@@ -59,10 +63,11 @@ def generate_caption_blip2_opt(image_path: str) -> str:
59
  return ""
60
 
61
 
62
- def generate_caption_blip2_t5xxl(image_path: str) -> str:
63
  try:
64
  client = Client("hysts/BLIP2-with-transformers")
65
- return client.predict(
 
66
  image_path,
67
  "Beam search",
68
  1, # temperature
@@ -79,10 +84,11 @@ def generate_caption_blip2_t5xxl(image_path: str) -> str:
79
  return ""
80
 
81
 
82
- def generate_caption_instructblip(image_path: str) -> str:
83
  try:
84
  client = Client("library-samples/InstructBLIP")
85
- return client.predict(
 
86
  image_path,
87
  "Describe the image.",
88
  "Beam search",
@@ -100,24 +106,26 @@ def generate_caption_instructblip(image_path: str) -> str:
100
  return ""
101
 
102
 
103
- def generate_caption_fuyu(image_path: str) -> str:
104
  try:
105
  client = Client("adept/fuyu-8b-demo")
106
- return client.predict(image_path, "Generate a coco style caption.", fn_index=3)
 
107
  except Exception:
108
  gr.Warning("The Fuyu-8B Space is currently unavailable. Please try again later.")
109
  return ""
110
 
111
 
112
  def generate_captions(image_path: str) -> tuple[str, str, str, str, str, str]:
113
- return (
114
- generate_caption_git(image_path),
115
- generate_caption_blip(image_path),
116
- generate_caption_blip2_opt(image_path),
117
- generate_caption_blip2_t5xxl(image_path),
118
- generate_caption_instructblip(image_path),
119
- generate_caption_fuyu(image_path),
120
- )
 
121
 
122
 
123
  with gr.Blocks(css="style.css") as demo:
 
5
  import gradio as gr
6
  import torch
7
  from gradio_client import Client
8
+ from gradio_client.client import Job
9
 
10
  DESCRIPTION = "# Comparing image captioning models"
11
  ORIGINAL_SPACE_INFO = """\
 
26
  )
27
 
28
 
29
+ def generate_caption_git(image_path: str, return_job: bool = False) -> str | Job:
30
  try:
31
  client = Client("library-samples/image-captioning-with-git")
32
+ fn = client.submit if return_job else client.predict
33
+ return fn(image_path, api_name="/caption")
34
  except Exception:
35
  gr.Warning("The GIT-large Space is currently unavailable. Please try again later.")
36
  return ""
37
 
38
 
39
+ def generate_caption_blip(image_path: str, return_job: bool = False) -> str | Job:
40
  try:
41
  client = Client("library-samples/image-captioning-with-blip")
42
+ fn = client.submit if return_job else client.predict
43
+ return fn(image_path, "A picture of", api_name="/caption")
44
  except Exception:
45
  gr.Warning("The BLIP-large Space is currently unavailable. Please try again later.")
46
  return ""
47
 
48
 
49
+ def generate_caption_blip2_opt(image_path: str, return_job: bool = False) -> str | Job:
50
  try:
51
  client = Client("merve/BLIP2-with-transformers")
52
+ fn = client.submit if return_job else client.predict
53
+ return fn(
54
  image_path,
55
  "Beam search",
56
  1, # temperature
 
63
  return ""
64
 
65
 
66
+ def generate_caption_blip2_t5xxl(image_path: str, return_job: bool = False) -> str | Job:
67
  try:
68
  client = Client("hysts/BLIP2-with-transformers")
69
+ fn = client.submit if return_job else client.predict
70
+ return fn(
71
  image_path,
72
  "Beam search",
73
  1, # temperature
 
84
  return ""
85
 
86
 
87
+ def generate_caption_instructblip(image_path: str, return_job: bool = False) -> str | Job:
88
  try:
89
  client = Client("library-samples/InstructBLIP")
90
+ fn = client.submit if return_job else client.predict
91
+ return fn(
92
  image_path,
93
  "Describe the image.",
94
  "Beam search",
 
106
  return ""
107
 
108
 
109
+ def generate_caption_fuyu(image_path: str, return_job: bool = False) -> str | Job:
110
  try:
111
  client = Client("adept/fuyu-8b-demo")
112
+ fn = client.submit if return_job else client.predict
113
+ return fn(image_path, "Generate a coco style caption.", fn_index=3)
114
  except Exception:
115
  gr.Warning("The Fuyu-8B Space is currently unavailable. Please try again later.")
116
  return ""
117
 
118
 
119
  def generate_captions(image_path: str) -> tuple[str, str, str, str, str, str]:
120
+ jobs = [
121
+ generate_caption_git(image_path, return_job=True),
122
+ generate_caption_blip(image_path, return_job=True),
123
+ generate_caption_blip2_opt(image_path, return_job=True),
124
+ generate_caption_blip2_t5xxl(image_path, return_job=True),
125
+ generate_caption_instructblip(image_path, return_job=True),
126
+ generate_caption_fuyu(image_path, return_job=True),
127
+ ]
128
+ return tuple(job.result() if job else "" for job in jobs)
129
 
130
 
131
  with gr.Blocks(css="style.css") as demo: