aideveloper24 commited on
Commit
195370c
·
verified ·
1 Parent(s): 1a0232d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -4
app.py CHANGED
@@ -14,8 +14,8 @@ def load_model():
14
  global global_tokenizer, global_model
15
  try:
16
  print("Loading model and tokenizer...")
17
- # Replace this path with your model's directory if using a custom model
18
- MODEL_NAME = "distilbert-base-uncased-finetuned-sst-2-english" # Can be a custom path if using your own model
19
 
20
  # Load tokenizer and model from Hugging Face Hub or a local path
21
  global_tokenizer = DistilBertTokenizer.from_pretrained(MODEL_NAME)
@@ -70,10 +70,10 @@ def classify_email():
70
  # Get the subject
71
  subject = data['subject']
72
 
73
- # Tokenize
74
  inputs = global_tokenizer(subject, return_tensors="pt", truncation=True, max_length=512)
75
 
76
- # Predict
77
  with torch.no_grad():
78
  outputs = global_model(**inputs)
79
  logits = outputs.logits
@@ -89,6 +89,7 @@ def classify_email():
89
  1: "Personal/Casual"
90
  }
91
 
 
92
  result = {
93
  'category': CUSTOM_LABELS[predicted_class_id],
94
  'confidence': round(confidence, 3),
 
14
  global global_tokenizer, global_model
15
  try:
16
  print("Loading model and tokenizer...")
17
+ # Replace this path with your model's directory or Hugging Face model name
18
+ MODEL_NAME = "aideveloper24/email_classify" # Replace with your custom model name
19
 
20
  # Load tokenizer and model from Hugging Face Hub or a local path
21
  global_tokenizer = DistilBertTokenizer.from_pretrained(MODEL_NAME)
 
70
  # Get the subject
71
  subject = data['subject']
72
 
73
+ # Tokenize the subject text
74
  inputs = global_tokenizer(subject, return_tensors="pt", truncation=True, max_length=512)
75
 
76
+ # Predict the class
77
  with torch.no_grad():
78
  outputs = global_model(**inputs)
79
  logits = outputs.logits
 
89
  1: "Personal/Casual"
90
  }
91
 
92
+ # Create the response
93
  result = {
94
  'category': CUSTOM_LABELS[predicted_class_id],
95
  'confidence': round(confidence, 3),