Data_cellar_services / datacellar-service-demo /interface /pages /5_Anomaly_Detection_Consumption.py
StefanoBergia's picture
forked previous srvice demo
866cee0
raw
history blame
5.12 kB
import streamlit as st
import json
import pandas as pd
from utils import load_and_process_data, create_time_series_plot, display_statistics, call_api
import plotly.express as px
import plotly.graph_objects as go
if 'api_token' not in st.session_state:
st.session_state.api_token = "p2s8X9qL4zF7vN3mK6tR1bY5cA0wE3hJ"
# Clear other states
for key in ['current_file', 'json_data', 'api_response']:
if key in st.session_state:
del st.session_state[key]
# Initialize session state variables
if 'current_file' not in st.session_state:
st.session_state.current_file = None
if 'json_data' not in st.session_state:
st.session_state.json_data = None
if 'api_response' not in st.session_state:
st.session_state.api_response = None
st.title("Energy Consumption Anomaly Detection")
st.markdown("""
This service analyzes energy consumption patterns to detect anomalies and unusual behavior in your data.
### Features
- Real-time anomaly detection
- Consumption irregularity identification
- Interactive visualization of detected anomalies
""")
# File upload and processing
uploaded_file = st.file_uploader("Upload JSON file", type=['json'])
if uploaded_file:
try:
file_contents = uploaded_file.read()
st.session_state.current_file = file_contents
st.session_state.json_data = json.loads(file_contents)
dfs = load_and_process_data(st.session_state.json_data)
if dfs:
st.header("Input Data Analysis")
tabs = st.tabs(["Visualization", "Statistics", "Raw Data"])
with tabs[0]:
for unit, df in dfs.items():
st.plotly_chart(create_time_series_plot(df, unit), use_container_width=True)
# Show basic statistical analysis
col1, col2, col3 = st.columns(3)
with col1:
st.metric("Average Consumption",
f"{df['datacellar:value'].mean():.2f} {unit}")
with col2:
st.metric("Standard Deviation",
f"{df['datacellar:value'].std():.2f} {unit}")
with col3:
st.metric("Total Samples",
len(df))
with tabs[1]:
display_statistics(dfs)
with tabs[2]:
st.json(st.session_state.json_data)
# Add analysis options
st.subheader("Anomaly Detection")
col1, col2 = st.columns(2)
with col1:
if st.button("Detect Anomalies", key="detect_button"):
if not st.session_state.api_token:
st.error("Please enter your API token in the sidebar first.")
else:
with st.spinner("Analyzing consumption patterns..."):
# Add sensitivity and window_size to the request
modified_data = st.session_state.json_data.copy()
# Convert back to JSON and call API
modified_content = json.dumps(modified_data).encode('utf-8')
st.session_state.api_response = call_api(
modified_content,
st.session_state.api_token,
"inference_consumption_ad"
)
with col2:
if st.button("Clear Results", key="clear_button"):
st.session_state.api_response = None
st.experimental_rerun()
except Exception as e:
st.error(f"Error processing file: {str(e)}")
# Display API results
if st.session_state.api_response:
st.header("Anomaly Detection Results")
tabs = st.tabs(["Anomaly Visualization", "Raw Results"])
with tabs[0]:
response_dfs = load_and_process_data(
st.session_state.api_response,
input_data=st.session_state.json_data
)
if response_dfs:
anomalies=response_dfs['boolean']
anomalies=anomalies[anomalies['datacellar:value']==True]
del response_dfs['boolean']
for unit, df in response_dfs.items():
fig= create_time_series_plot(df, unit, service_type="Anomaly Detection")
#get df values for anomalies
anomaly_df=df.iloc[anomalies['datacellar:timeStamp'].index]
fig.add_trace(go.Scatter(x=anomaly_df['datacellar:timeStamp'], y=anomaly_df['datacellar:value'], mode='markers', marker=dict(color='red'), name='Anomalies'))
#print(unit)
# Create visualization with highlighted anomalies
st.plotly_chart(
fig,
use_container_width=True
)
with tabs[1]:
st.json(st.session_state.api_response)