Update app.py
Browse files
app.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
from flask import Flask, request, jsonify
|
2 |
import torch
|
3 |
-
from transformers import
|
4 |
import os
|
5 |
|
6 |
app = Flask(__name__)
|
@@ -14,12 +14,12 @@ def load_model():
|
|
14 |
global global_tokenizer, global_model
|
15 |
try:
|
16 |
print("Loading model and tokenizer...")
|
17 |
-
#
|
18 |
-
MODEL_NAME = "
|
19 |
|
20 |
# Load tokenizer and model from Hugging Face Hub or a local path
|
21 |
-
global_tokenizer =
|
22 |
-
global_model =
|
23 |
global_model.eval()
|
24 |
|
25 |
print("Model loaded successfully!")
|
|
|
1 |
from flask import Flask, request, jsonify
|
2 |
import torch
|
3 |
+
from transformers import BertTokenizer, BertForSequenceClassification
|
4 |
import os
|
5 |
|
6 |
app = Flask(__name__)
|
|
|
14 |
global global_tokenizer, global_model
|
15 |
try:
|
16 |
print("Loading model and tokenizer...")
|
17 |
+
# Use a different model (bert-base-uncased)
|
18 |
+
MODEL_NAME = "bert-base-uncased" # You can replace this with your own fine-tuned model
|
19 |
|
20 |
# Load tokenizer and model from Hugging Face Hub or a local path
|
21 |
+
global_tokenizer = BertTokenizer.from_pretrained(MODEL_NAME)
|
22 |
+
global_model = BertForSequenceClassification.from_pretrained(MODEL_NAME)
|
23 |
global_model.eval()
|
24 |
|
25 |
print("Model loaded successfully!")
|