Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -30,7 +30,7 @@ MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "4096"))
|
|
30 |
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
31 |
|
32 |
# Load text-only model and tokenizer
|
33 |
-
model_id = "prithivMLmods/
|
34 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
35 |
model = AutoModelForCausalLM.from_pretrained(
|
36 |
model_id,
|
@@ -39,7 +39,7 @@ model = AutoModelForCausalLM.from_pretrained(
|
|
39 |
)
|
40 |
model.eval()
|
41 |
|
42 |
-
MODEL_ID = "prithivMLmods/
|
43 |
processor = AutoProcessor.from_pretrained(MODEL_ID, trust_remote_code=True)
|
44 |
model_m = Qwen2VLForConditionalGeneration.from_pretrained(
|
45 |
MODEL_ID,
|
@@ -236,6 +236,10 @@ demo = gr.ChatInterface(
|
|
236 |
gr.Slider(label="Top-k", minimum=1, maximum=1000, step=1, value=50),
|
237 |
gr.Slider(label="Repetition penalty", minimum=1.0, maximum=2.0, step=0.05, value=1.2),
|
238 |
],
|
|
|
|
|
|
|
|
|
239 |
cache_examples=False,
|
240 |
type="messages",
|
241 |
fill_height=True,
|
|
|
30 |
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
31 |
|
32 |
# Load text-only model and tokenizer
|
33 |
+
model_id = "prithivMLmods/Pocket-Llama-3.2-3B-Instruct"
|
34 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
35 |
model = AutoModelForCausalLM.from_pretrained(
|
36 |
model_id,
|
|
|
39 |
)
|
40 |
model.eval()
|
41 |
|
42 |
+
MODEL_ID = "prithivMLmods/Callisto-OCR3-2B-Instruct"
|
43 |
processor = AutoProcessor.from_pretrained(MODEL_ID, trust_remote_code=True)
|
44 |
model_m = Qwen2VLForConditionalGeneration.from_pretrained(
|
45 |
MODEL_ID,
|
|
|
236 |
gr.Slider(label="Top-k", minimum=1, maximum=1000, step=1, value=50),
|
237 |
gr.Slider(label="Repetition penalty", minimum=1.0, maximum=2.0, step=0.05, value=1.2),
|
238 |
],
|
239 |
+
examples=[
|
240 |
+
["Write the code that converts temperatures between celsius and fahrenheit"],
|
241 |
+
[{"text": "Create a short story based on the image.", "files": ["examples/1.jpg"]}],
|
242 |
+
],
|
243 |
cache_examples=False,
|
244 |
type="messages",
|
245 |
fill_height=True,
|