Spaces:
Sleeping
Sleeping
demo buildout 5
Browse files
app.py
CHANGED
@@ -48,7 +48,7 @@ def infer(
|
|
48 |
text: str,
|
49 |
max_new_tokens: int
|
50 |
) -> str:
|
51 |
-
inputs = processor(text=text, images=resize_and_pad(image), return_tensors="pt").to(device)
|
52 |
with torch.inference_mode():
|
53 |
generated_ids = model.generate(
|
54 |
**inputs,
|
@@ -129,8 +129,8 @@ with gr.Blocks(css="style.css") as demo:
|
|
129 |
outputs=chat_outputs,
|
130 |
)
|
131 |
|
132 |
-
examples = [["./diagnosis-1.
|
133 |
-
["./
|
134 |
gr.Markdown("")
|
135 |
|
136 |
gr.Examples(
|
|
|
48 |
text: str,
|
49 |
max_new_tokens: int
|
50 |
) -> str:
|
51 |
+
inputs = processor(text=text, images=resize_and_pad(image, 448), return_tensors="pt").to(device)
|
52 |
with torch.inference_mode():
|
53 |
generated_ids = model.generate(
|
54 |
**inputs,
|
|
|
129 |
outputs=chat_outputs,
|
130 |
)
|
131 |
|
132 |
+
examples = [["./diagnosis-1.png", "Transcribe the Arabic text."],
|
133 |
+
["./sign.png", "Transcribe the Arabic text."]]
|
134 |
gr.Markdown("")
|
135 |
|
136 |
gr.Examples(
|