KingNish commited on
Commit
a7f7acc
·
verified ·
1 Parent(s): 70360e7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -3
app.py CHANGED
@@ -10,8 +10,8 @@ import random
10
  from gradio_client import Client, file
11
 
12
  def generate_caption_instructblip(image_path, question):
13
- client = Client("hysts/image-captioning-with-blip")
14
- return client.predict(file(image_path), f"Answer this Question in detail {question}", api_name="/caption")
15
 
16
  def extract_text_from_webpage(html_content):
17
  """Extracts visible text from HTML content using BeautifulSoup."""
@@ -74,7 +74,7 @@ def respond(
74
  for image in message["files"]:
75
  vqa += "[CAPTION of IMAGE] "
76
  gr.Info("Analyzing image")
77
- vqa += generate_caption_instructblip(image, message["text"])
78
  print(vqa)
79
  except:
80
  vqa = ""
@@ -201,6 +201,18 @@ def respond(
201
  image = f"![](https://image.pollinations.ai/prompt/{query}?{seed})"
202
  yield image
203
  gr.Info("We are going to Update Our Image Generation Engine to more powerful ones in Next Update. ThankYou")
 
 
 
 
 
 
 
 
 
 
 
 
204
  else:
205
  messages = f"<|start_header_id|>system\nYou are OpenGPT 4o mini a helpful assistant made by KingNish. You answers users query like human friend. You are also Expert in every field and also learn and try to answer from contexts related to previous question. Try your best to give best response possible to user. You also try to show emotions using Emojis and reply like human, use short forms, friendly tone and emotions.<|end_header_id|>"
206
  for msg in history:
 
10
  from gradio_client import Client, file
11
 
12
  def generate_caption_instructblip(image_path, question):
13
+ client = Client("unography/image-captioning-with-longcap")
14
+ return client.predict(file(image_path), api_name="/caption")
15
 
16
  def extract_text_from_webpage(html_content):
17
  """Extracts visible text from HTML content using BeautifulSoup."""
 
74
  for image in message["files"]:
75
  vqa += "[CAPTION of IMAGE] "
76
  gr.Info("Analyzing image")
77
+ vqa += generate_caption_instructblip(image)
78
  print(vqa)
79
  except:
80
  vqa = ""
 
201
  image = f"![](https://image.pollinations.ai/prompt/{query}?{seed})"
202
  yield image
203
  gr.Info("We are going to Update Our Image Generation Engine to more powerful ones in Next Update. ThankYou")
204
+ elif json_data["name"] == "image_qna":
205
+ messages = f"<|start_header_id|>system\nYou are OpenGPT 4o mini a helpful assistant made by KingNish. You are provide with both images and captions and Your task is to answer of user with help of caption provided. Answer in human style and show emotions.<|end_header_id|>"
206
+ for msg in history:
207
+ messages += f"\n<|start_header_id|>user\n{str(msg[0])}<|end_header_id|>"
208
+ messages += f"\n<|start_header_id|>assistant\n{str(msg[1])}<|end_header_id|>"
209
+ messages+=f"\n<|start_header_id|>user\n{message_text} {vqa}<|end_header_id|>\n<|start_header_id|>assistant\n"
210
+ stream = client_llama.text_generation(messages, **generate_kwargs)
211
+ output = ""
212
+ for response in stream:
213
+ if not response.token.text == "<|eot_id|>":
214
+ output += response.token.text
215
+ yield output
216
  else:
217
  messages = f"<|start_header_id|>system\nYou are OpenGPT 4o mini a helpful assistant made by KingNish. You answers users query like human friend. You are also Expert in every field and also learn and try to answer from contexts related to previous question. Try your best to give best response possible to user. You also try to show emotions using Emojis and reply like human, use short forms, friendly tone and emotions.<|end_header_id|>"
218
  for msg in history: