Sirapatsorn commited on
Commit
ac53e7e
·
verified ·
1 Parent(s): 6f4f3d5

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +92 -0
app.py ADDED
@@ -0,0 +1,92 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import torch
3
+ from transformers import BertTokenizer, BertForSequenceClassification, XLNetTokenizer, XLNetForSequenceClassification
4
+
5
+ # โหลดโมเดลของคุณ
6
+ logbert_model = BertForSequenceClassification.from_pretrained("Sirapatsorn/Spark_Log_Analysis", use_auth_token=True)
7
+ xlnet_model = XLNetForSequenceClassification.from_pretrained("Sirapatsorn/Spark_Log_Analysis", use_auth_token=True)
8
+ logbert_tokenizer = BertTokenizer.from_pretrained("bert-base-uncased")
9
+ xlnet_tokenizer = XLNetTokenizer.from_pretrained("xlnet-base-cased")
10
+
11
+ # ตรวจสอบว่ามี GPU หรือไม่
12
+ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
13
+ logbert_model.to(device)
14
+ xlnet_model.to(device)
15
+
16
+ # ฟังก์ชันการพยากรณ์
17
+ def predict_log(text):
18
+ logbert_inputs = logbert_tokenizer(text, return_tensors="pt", truncation=True, max_length=512).to(device)
19
+ with torch.no_grad():
20
+ logbert_outputs = logbert_model(**logbert_inputs)
21
+ log_level = torch.argmax(logbert_outputs.logits, dim=1).item()
22
+ log_level_confidence = torch.softmax(logbert_outputs.logits, dim=1)[0][log_level].item()
23
+
24
+ log_levels = ["INFO", "WARN", "ERROR"]
25
+ log_level_result = log_levels[log_level]
26
+
27
+ xlnet_inputs = xlnet_tokenizer(text, return_tensors="pt", truncation=True, max_length=512).to(device)
28
+ with torch.no_grad():
29
+ xlnet_outputs = xlnet_model(**xlnet_inputs)
30
+ performance_value = xlnet_outputs.logits.item()
31
+
32
+ if performance_value < 0:
33
+ performance_status = "Good Performance"
34
+ elif performance_value < 3.0:
35
+ performance_status = "Normal Performance"
36
+ else:
37
+ performance_status = "Poor Performance"
38
+
39
+ return {
40
+ "Log Level": log_level_result,
41
+ "Confidence": f"{log_level_confidence:.2f}",
42
+ "Performance Value": f"{performance_value:.2f}",
43
+ "Performance Status": performance_status
44
+ }
45
+
46
+ # ฟังก์ชันสำหรับ Gradio: พยากรณ์จากข้อความโดยตรง
47
+ def predict_from_text(text):
48
+ prediction = predict_log(text)
49
+ return (f"Log Level: {prediction['Log Level']} (Confidence: {prediction['Confidence']})\n"
50
+ f"Performance Value: {prediction['Performance Value']}\n"
51
+ f"Performance Status: {prediction['Performance Status']}")
52
+
53
+ def predict_from_file(file):
54
+ results = []
55
+ with open(file.name, 'r') as f:
56
+ for line in f:
57
+ prediction = predict_log(line.strip())
58
+ result_text = (f"Log: {line.strip()}\n"
59
+ f"Log Level: {prediction['Log Level']} (Confidence: {prediction['Confidence']})\n"
60
+ f"Performance Value: {prediction['Performance Value']}\n"
61
+ f"Performance Status: {prediction['Performance Status']}")
62
+ results.append(result_text)
63
+ return "\n\n".join(results)
64
+
65
+ custom_css = """
66
+ .gr-button {
67
+ background-color: #FFA500 !important;
68
+ color: #FFFFFF !important;
69
+ border: none !important;
70
+ }
71
+ """
72
+
73
+ with gr.Blocks(css=custom_css) as demo:
74
+ with gr.Tabs():
75
+ with gr.TabItem("Upload File"):
76
+ file_upload = gr.File(label="Upload Log File")
77
+ file_btn = gr.Button("Predict")
78
+ file_output = gr.Textbox(label="Output")
79
+ file_btn.click(predict_from_file, inputs=file_upload, outputs=file_output)
80
+ with gr.TabItem("Text Input"):
81
+ text_input = gr.Textbox(label="Enter Log Message")
82
+ text_btn = gr.Button("Predict")
83
+ text_output = gr.Textbox(label="Output")
84
+ text_btn.click(predict_from_text, inputs=text_input, outputs=text_output)
85
+
86
+ demo.css += """
87
+ .gr-button.gr-button-lg.gr-button-secondary {
88
+ display: none !important;
89
+ }
90
+ """
91
+
92
+ demo.launch()