Data_cellar_services / pages /6_Anomaly_Detection_Production.py
StefanoBergia's picture
fixed token
0ca6b56
raw
history blame
6.38 kB
import streamlit as st
import json
import pandas as pd
import os
from utils import load_and_process_data, create_time_series_plot, display_statistics, call_api
import plotly.express as px
import plotly.graph_objects as go
if 'api_token' not in st.session_state:
st.session_state.api_token = os.getenv('NILM_API_TOKEN')
page_id = 6
if 'current_page' not in st.session_state:
st.session_state.current_page = page_id
elif st.session_state.current_page != page_id:
# Clear API response when switching to this page
if 'api_response' in st.session_state:
st.session_state.api_response = None
# Update current page
st.session_state.current_page = page_id
# Initialize session state variables
if 'current_file' not in st.session_state:
st.session_state.current_file = None
if 'json_data' not in st.session_state:
st.session_state.json_data = None
if 'api_response' not in st.session_state:
st.session_state.api_response = None
if 'using_default_file' not in st.session_state:
st.session_state.using_default_file = True
st.title("Energy Production Anomaly Detection")
st.markdown("""
This service analyzes energy production patterns to detect anomalies and unusual behavior in your data.
### Features
- Real-time anomaly detection
- Production irregularity identification
- Interactive visualization of detected anomalies
""")
# Default file path
default_file_path = "samples/6_anomaly_detection_production.json" # Adjust this path to your default file
# File upload and processing
uploaded_file = st.file_uploader("Upload JSON file (or use default)", type=['json'])
# Load default file if no file is uploaded and using_default_file is True
if uploaded_file is None and st.session_state.using_default_file:
if os.path.exists(default_file_path):
st.info(f"Using default file: {default_file_path}")
with open(default_file_path, 'r') as f:
file_contents = f.read()
if st.session_state.current_file != file_contents:
st.session_state.current_file = file_contents
st.session_state.json_data = json.loads(file_contents)
else:
st.warning(f"Default file not found at: {default_file_path}")
st.session_state.using_default_file = False
# If a file is uploaded, process it
if uploaded_file:
st.session_state.using_default_file = False
try:
file_contents = uploaded_file.read()
st.session_state.current_file = file_contents
st.session_state.json_data = json.loads(file_contents)
except Exception as e:
st.error(f"Error processing file: {str(e)}")
# Process and display data if available
if st.session_state.json_data:
try:
dfs = load_and_process_data(st.session_state.json_data)
if dfs:
st.header("Input Data Analysis")
tabs = st.tabs(["Visualization", "Statistics", "Raw Data"])
with tabs[0]:
for unit, df in dfs.items():
st.plotly_chart(create_time_series_plot(df, unit), use_container_width=True)
# Show basic statistical analysis
col1, col2, col3 = st.columns(3)
with col1:
st.metric("Average Production",
f"{df['datacellar:value'].mean():.2f} {unit}")
with col2:
st.metric("Standard Deviation",
f"{df['datacellar:value'].std():.2f} {unit}")
with col3:
st.metric("Total Samples",
len(df))
with tabs[1]:
display_statistics(dfs)
with tabs[2]:
st.json(st.session_state.json_data)
# Add analysis options
st.subheader("Anomaly Detection")
col1, col2 = st.columns(2)
with col1:
if st.button("Detect Anomalies", key="detect_button"):
if not st.session_state.api_token:
st.error("Please enter your API token in the sidebar first.")
else:
with st.spinner("Analyzing production patterns..."):
# Add sensitivity and window_size to the request
modified_data = st.session_state.json_data.copy()
# Convert back to JSON and call API
modified_content = json.dumps(modified_data).encode('utf-8')
st.session_state.api_response = call_api(
modified_content,
st.session_state.api_token,
"inference_production_ad"
)
except Exception as e:
st.error(f"Error processing data: {str(e)}")
# Display API results
if st.session_state.api_response:
st.header("Anomaly Detection Results")
tabs = st.tabs(["Anomaly Visualization", "Raw Results"])
with tabs[0]:
response_dfs = load_and_process_data(
st.session_state.api_response,
input_data=st.session_state.json_data
)
if response_dfs:
anomalies = response_dfs['boolean']
anomalies = anomalies[anomalies['datacellar:value']==True]
del response_dfs['boolean']
for unit, df in response_dfs.items():
fig = create_time_series_plot(df, unit, service_type="Anomaly Detection")
# Get df values for anomalies
anomaly_df = df.iloc[anomalies['datacellar:timeStamp'].index]
fig.add_trace(go.Scatter(
x=anomaly_df['datacellar:timeStamp'],
y=anomaly_df['datacellar:value'],
mode='markers',
marker=dict(color='red'),
name='Anomalies'
))
# Create visualization with highlighted anomalies
st.plotly_chart(
fig,
use_container_width=True
)
with tabs[1]:
st.json(st.session_state.api_response)