FlawedLLM commited on
Commit
b679fe4
·
verified ·
1 Parent(s): 078a2bd

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -3
app.py CHANGED
@@ -17,10 +17,22 @@ model = AutoModelForCausalLM.from_pretrained(model_id, device_map="cuda", trust_
17
  processor = AutoProcessor.from_pretrained(model_id, trust_remote_code=True)
18
  model.to("cuda:0")
19
 
20
- def base64_to_image(base64_str):
21
- img_bytes = base64.b64decode(base64_str)
 
 
 
 
 
 
 
 
 
 
 
22
  img_buffer = BytesIO(img_bytes)
23
  image = Image.open(img_buffer)
 
24
  return image
25
 
26
  PLACEHOLDER = """
@@ -77,7 +89,7 @@ def bot_streaming(message, history):
77
  print(f"prompt is -\n{conversation}")
78
  prompt = processor.tokenizer.apply_chat_template(conversation, tokenize=False, add_generation_prompt=True)
79
  # image = Image.open(image)
80
- image = base64_to_image(image)
81
  inputs = processor(prompt, image, return_tensors="pt").to("cuda:0")
82
 
83
  streamer = TextIteratorStreamer(processor, **{"skip_special_tokens": True, "skip_prompt": True, 'clean_up_tokenization_spaces':False,})
 
17
  processor = AutoProcessor.from_pretrained(model_id, trust_remote_code=True)
18
  model.to("cuda:0")
19
 
20
+ from PIL import Image
21
+ import base64
22
+ import zlib
23
+ from io import BytesIO
24
+
25
+ def decode_and_decompress_image(base64_string):
26
+ # Decode the Base64 string to bytes
27
+ compressed_data = base64.b64decode(base64_string.encode('utf-8'))
28
+
29
+ # Decompress the data using zlib
30
+ img_bytes = zlib.decompress(compressed_data)
31
+
32
+ # Open the image from bytes
33
  img_buffer = BytesIO(img_bytes)
34
  image = Image.open(img_buffer)
35
+
36
  return image
37
 
38
  PLACEHOLDER = """
 
89
  print(f"prompt is -\n{conversation}")
90
  prompt = processor.tokenizer.apply_chat_template(conversation, tokenize=False, add_generation_prompt=True)
91
  # image = Image.open(image)
92
+ image = decode_and_decompress_image(image)
93
  inputs = processor(prompt, image, return_tensors="pt").to("cuda:0")
94
 
95
  streamer = TextIteratorStreamer(processor, **{"skip_special_tokens": True, "skip_prompt": True, 'clean_up_tokenization_spaces':False,})