aravindbethapudi2017 commited on
Commit
b9d19f5
Β·
verified Β·
1 Parent(s): f7214b2

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +43 -0
app.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import torch
3
+ from transformers import AutoModelForSequenceClassification, AutoTokenizer
4
+
5
+ # βœ… Load model & tokenizer
6
+ model_name = "microsoft/deberta-v3-base" # Change if needed
7
+ model = AutoModelForSequenceClassification.from_pretrained(model_name, num_labels=16)
8
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
9
+
10
+ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
11
+ model.to(device)
12
+ model.eval()
13
+
14
+ # βœ… Define prediction function
15
+ def predict_mbti(text):
16
+ inputs = tokenizer(text, return_tensors="pt", truncation=True, padding="max_length", max_length=256)
17
+ inputs = {k: v.to(device) for k, v in inputs.items()}
18
+
19
+ with torch.no_grad():
20
+ outputs = model(**inputs)
21
+
22
+ predictions = torch.argmax(outputs.logits, dim=1).cpu().item()
23
+
24
+ # Mapping predicted labels back to MBTI types
25
+ mbti_types = [
26
+ "INFJ", "ENTP", "INTP", "INTJ", "ENTJ", "ENFJ", "INFP", "ENFP",
27
+ "ISFP", "ISTP", "ISFJ", "ISTJ", "ESTP", "ESFP", "ESTJ", "ESFJ"
28
+ ]
29
+
30
+ return mbti_types[predictions]
31
+
32
+ # βœ… Create Gradio UI
33
+ interface = gr.Interface(
34
+ fn=predict_mbti,
35
+ inputs=gr.Textbox(lines=3, placeholder="Enter a text to predict MBTI type"),
36
+ outputs="text",
37
+ title="MBTI Personality Predictor",
38
+ description="Enter a text and get the predicted MBTI personality type."
39
+ )
40
+
41
+ # βœ… Launch app
42
+ if __name__ == "__main__":
43
+ interface.launch()