Update app.py
Browse files
app.py
CHANGED
@@ -2,12 +2,16 @@ import gradio as gr
|
|
2 |
import pandas as pd
|
3 |
import torch
|
4 |
from transformers import AutoTokenizer, AutoModelForMaskedLM
|
|
|
5 |
|
6 |
-
|
|
|
|
|
|
|
|
|
7 |
model_name = "ChatterjeeLab/FusOn-pLM"
|
8 |
tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
|
9 |
model = AutoModelForMaskedLM.from_pretrained(model_name, trust_remote_code=True)
|
10 |
-
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
11 |
model.to(device)
|
12 |
model.eval()
|
13 |
|
|
|
2 |
import pandas as pd
|
3 |
import torch
|
4 |
from transformers import AutoTokenizer, AutoModelForMaskedLM
|
5 |
+
import logging
|
6 |
|
7 |
+
logging.getLogger("transformers.modeling_utils").setLevel(logging.ERROR)
|
8 |
+
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
9 |
+
print(f"Using device: {device}")
|
10 |
+
|
11 |
+
# Load the tokenizer and model
|
12 |
model_name = "ChatterjeeLab/FusOn-pLM"
|
13 |
tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
|
14 |
model = AutoModelForMaskedLM.from_pretrained(model_name, trust_remote_code=True)
|
|
|
15 |
model.to(device)
|
16 |
model.eval()
|
17 |
|