Spaces:
Sleeping
Sleeping
init
Browse files
app.py
CHANGED
@@ -1,50 +1,142 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
from tensorflow.keras.models import load_model
|
2 |
from tensorflow.keras.initializers import Orthogonal
|
3 |
from tensorflow.keras.utils import custom_object_scope
|
4 |
from tensorflow.keras.layers import LSTM
|
5 |
-
import gradio as gr
|
6 |
-
import pandas as pd
|
7 |
-
import numpy as np
|
8 |
-
# Initialize LSTM layer correctly without time_major
|
9 |
-
lstm_layer = LSTM(64, return_sequences=True)
|
10 |
|
11 |
-
|
|
|
12 |
with custom_object_scope({'Orthogonal': Orthogonal}):
|
13 |
model = load_model('models/lstm-combinedmodel.h5')
|
14 |
|
15 |
-
|
16 |
-
|
17 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
18 |
|
19 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
20 |
required_columns = ['CAN ID', 'RTR', 'DLC', 'Data1', 'Data2', 'Data3', 'Data4', 'Data5', 'Data6', 'Data7', 'Data8']
|
21 |
-
data = data[required_columns]
|
22 |
|
23 |
-
|
24 |
-
|
|
|
|
|
|
|
25 |
|
26 |
# Predict using the model
|
27 |
predictions = model.predict(input_data)
|
28 |
|
29 |
-
|
30 |
-
predicted_class = np.argmax(predictions, axis=1)
|
31 |
-
confidence = np.max(predictions, axis=1)
|
32 |
-
|
33 |
# Map numeric class to label
|
34 |
class_labels = {0: "Normal", 1: "Anomaly"}
|
35 |
-
label = class_labels[predicted_class]
|
36 |
-
output = f"Predicted Class: {label}, Confidence: {confidence:.4f}"
|
37 |
|
38 |
-
|
|
|
|
|
|
|
|
|
39 |
|
|
|
|
|
40 |
def interface_func(uploaded_file):
|
41 |
-
#
|
42 |
-
|
43 |
-
|
|
|
|
|
|
|
|
|
|
|
44 |
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
|
|
|
|
|
|
49 |
|
|
|
50 |
iface.launch()
|
|
|
1 |
+
# from tensorflow.keras.models import load_model
|
2 |
+
# from tensorflow.keras.initializers import Orthogonal
|
3 |
+
# from tensorflow.keras.utils import custom_object_scope
|
4 |
+
# from tensorflow.keras.layers import LSTM
|
5 |
+
# import gradio as gr
|
6 |
+
# import pandas as pd
|
7 |
+
# import numpy as np
|
8 |
+
# # Initialize LSTM layer correctly without time_major
|
9 |
+
# lstm_layer = LSTM(64, return_sequences=True)
|
10 |
+
|
11 |
+
# # Register custom initializers or objects when loading the model
|
12 |
+
# with custom_object_scope({'Orthogonal': Orthogonal}):
|
13 |
+
# model = load_model('models/lstm-combinedmodel.h5')
|
14 |
+
|
15 |
+
# def predict_from_csv(file_path):
|
16 |
+
# # Load the data from CSV
|
17 |
+
# data = pd.read_csv(file_path)
|
18 |
+
|
19 |
+
# # Reorder and preprocess data if necessary
|
20 |
+
# required_columns = ['CAN ID', 'RTR', 'DLC', 'Data1', 'Data2', 'Data3', 'Data4', 'Data5', 'Data6', 'Data7', 'Data8']
|
21 |
+
# data = data[required_columns]
|
22 |
+
|
23 |
+
# # Convert data to numpy array or the format your model expects
|
24 |
+
# input_data = data.values
|
25 |
+
|
26 |
+
# # Predict using the model
|
27 |
+
# predictions = model.predict(input_data)
|
28 |
+
|
29 |
+
# # Determine the predicted class and confidence
|
30 |
+
# predicted_class = np.argmax(predictions, axis=1)[0]
|
31 |
+
# confidence = np.max(predictions, axis=1)[0]
|
32 |
+
|
33 |
+
# # Map numeric class to label
|
34 |
+
# class_labels = {0: "Normal", 1: "Anomaly"}
|
35 |
+
# label = class_labels[predicted_class]
|
36 |
+
# output = f"Predicted Class: {label}, Confidence: {confidence:.4f}"
|
37 |
+
|
38 |
+
# return output
|
39 |
+
|
40 |
+
# def interface_func(uploaded_file):
|
41 |
+
# # Use the prediction function on the uploaded file path
|
42 |
+
# predictions = predict_from_csv(uploaded_file.name)
|
43 |
+
# return predictions
|
44 |
+
|
45 |
+
# iface = gr.Interface(fn=interface_func,
|
46 |
+
# inputs=gr.File(label="Upload CSV"),
|
47 |
+
# outputs="text",
|
48 |
+
# description="Upload a CSV file with the specified columns to predict.")
|
49 |
+
|
50 |
+
# iface.launch()
|
51 |
+
|
52 |
+
|
53 |
+
import pandas as pd
|
54 |
+
import gradio as gr
|
55 |
+
import numpy as np
|
56 |
from tensorflow.keras.models import load_model
|
57 |
from tensorflow.keras.initializers import Orthogonal
|
58 |
from tensorflow.keras.utils import custom_object_scope
|
59 |
from tensorflow.keras.layers import LSTM
|
|
|
|
|
|
|
|
|
|
|
60 |
|
61 |
+
|
62 |
+
# Initialize and load the model
|
63 |
with custom_object_scope({'Orthogonal': Orthogonal}):
|
64 |
model = load_model('models/lstm-combinedmodel.h5')
|
65 |
|
66 |
+
|
67 |
+
# Function to parse text file into a DataFrame
|
68 |
+
def parse_text_file(file):
|
69 |
+
# Dictionary to hold data extracted from the text file
|
70 |
+
dfdict = {}
|
71 |
+
|
72 |
+
# Read file lines and process them
|
73 |
+
for line in file.readlines():
|
74 |
+
line = line.split() # Split line into parts
|
75 |
+
if 'Timestamp:' in line:
|
76 |
+
line.remove('Timestamp:') # Remove specific label
|
77 |
+
if 'ID:' in line:
|
78 |
+
line.remove('ID:')
|
79 |
+
if 'DLC:' in line:
|
80 |
+
line.remove('DLC:')
|
81 |
+
|
82 |
+
# Extract timestamp as key and remaining data as value
|
83 |
+
if line:
|
84 |
+
key = float(line[0])
|
85 |
+
value = line[1:]
|
86 |
+
dfdict[key] = value # Add to dictionary
|
87 |
|
88 |
+
# Convert dictionary to DataFrame
|
89 |
+
df = pd.DataFrame.from_dict(dfdict, orient='index', columns=['CAN ID', 'RTR', 'DLC', 'Data1', 'Data2', 'Data3', 'Data4', 'Data5', 'Data6', 'Data7', 'Data8'])
|
90 |
+
df.index.name = 'Timestamp' # Set index name
|
91 |
+
return df
|
92 |
+
|
93 |
+
|
94 |
+
# Function to make predictions using the model
|
95 |
+
def make_predictions(dataframe):
|
96 |
+
# Ensure DataFrame has the necessary columns
|
97 |
required_columns = ['CAN ID', 'RTR', 'DLC', 'Data1', 'Data2', 'Data3', 'Data4', 'Data5', 'Data6', 'Data7', 'Data8']
|
|
|
98 |
|
99 |
+
if not all(col in dataframe.columns for col in required_columns):
|
100 |
+
raise ValueError("Missing required columns in the DataFrame.")
|
101 |
+
|
102 |
+
# Convert to the format expected by the model
|
103 |
+
input_data = dataframe[required_columns].values
|
104 |
|
105 |
# Predict using the model
|
106 |
predictions = model.predict(input_data)
|
107 |
|
108 |
+
# Determine the predicted class and confidence
|
109 |
+
predicted_class = np.argmax(predictions, axis=1)
|
110 |
+
confidence = np.max(predictions, axis=1)
|
111 |
+
|
112 |
# Map numeric class to label
|
113 |
class_labels = {0: "Normal", 1: "Anomaly"}
|
|
|
|
|
114 |
|
115 |
+
results = []
|
116 |
+
for cls, conf in zip(predicted_class, confidence):
|
117 |
+
results.append(f"Predicted Class: {class_labels[cls]}, Confidence: {conf:.4f}")
|
118 |
+
|
119 |
+
return results
|
120 |
|
121 |
+
|
122 |
+
# Gradio interface function
|
123 |
def interface_func(uploaded_file):
|
124 |
+
# Parse the text file into a DataFrame
|
125 |
+
df = parse_text_file(uploaded_file)
|
126 |
+
|
127 |
+
# Get predictions
|
128 |
+
predictions = make_predictions(df)
|
129 |
+
|
130 |
+
return "\n".join(predictions) # Return predictions as text
|
131 |
+
|
132 |
|
133 |
+
# Set up the Gradio interface
|
134 |
+
iface = gr.Interface(
|
135 |
+
fn=interface_func,
|
136 |
+
inputs=gr.File(label="Upload a text file"),
|
137 |
+
outputs="text",
|
138 |
+
description="Upload a text file with CAN data to receive predictions."
|
139 |
+
)
|
140 |
|
141 |
+
# Launch the interface
|
142 |
iface.launch()
|