Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -7,10 +7,11 @@ import torch
|
|
7 |
@st.cache_resource
|
8 |
def load_model():
|
9 |
model_id = 'microsoft/Florence-2-large'
|
10 |
-
model = AutoModelForCausalLM.from_pretrained(model_id, trust_remote_code=True).eval()
|
11 |
processor = AutoProcessor.from_pretrained(model_id, trust_remote_code=True)
|
12 |
return model, processor
|
13 |
|
|
|
14 |
model, processor = load_model()
|
15 |
|
16 |
# Function to run the model
|
@@ -26,7 +27,7 @@ def run_example(task_prompt, image, text_input=None):
|
|
26 |
inputs["pixel_values"] = inputs["pixel_values"].to(torch.float32)
|
27 |
|
28 |
# Ensure the model is in float32 mode
|
29 |
-
model
|
30 |
|
31 |
# Generate predictions
|
32 |
generated_ids = model.generate(
|
|
|
7 |
@st.cache_resource
|
8 |
def load_model():
|
9 |
model_id = 'microsoft/Florence-2-large'
|
10 |
+
model = AutoModelForCausalLM.from_pretrained(model_id, trust_remote_code=True).eval().to(torch.float32)
|
11 |
processor = AutoProcessor.from_pretrained(model_id, trust_remote_code=True)
|
12 |
return model, processor
|
13 |
|
14 |
+
# Load the model and processor globally
|
15 |
model, processor = load_model()
|
16 |
|
17 |
# Function to run the model
|
|
|
27 |
inputs["pixel_values"] = inputs["pixel_values"].to(torch.float32)
|
28 |
|
29 |
# Ensure the model is in float32 mode
|
30 |
+
# The model has already been converted to float32 during loading, so this is not needed here.
|
31 |
|
32 |
# Generate predictions
|
33 |
generated_ids = model.generate(
|