File size: 5,124 Bytes
866cee0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
import streamlit as st
import json
import pandas as pd
from utils import load_and_process_data, create_time_series_plot, display_statistics, call_api
import plotly.express as px
import plotly.graph_objects as go


if 'api_token' not in st.session_state:
    st.session_state.api_token = "p2s8X9qL4zF7vN3mK6tR1bY5cA0wE3hJ"

# Clear other states
for key in ['current_file', 'json_data', 'api_response']:
    if key in st.session_state:
        del st.session_state[key]

# Initialize session state variables
if 'current_file' not in st.session_state:
    st.session_state.current_file = None
if 'json_data' not in st.session_state:
    st.session_state.json_data = None
if 'api_response' not in st.session_state:
    st.session_state.api_response = None

st.title("Energy Consumption Anomaly Detection")

st.markdown("""
This service analyzes energy consumption patterns to detect anomalies and unusual behavior in your data.

### Features
- Real-time anomaly detection
- Consumption irregularity identification
- Interactive visualization of detected anomalies

""")



# File upload and processing
uploaded_file = st.file_uploader("Upload JSON file", type=['json'])

if uploaded_file:
    try:
        file_contents = uploaded_file.read()
        st.session_state.current_file = file_contents
        st.session_state.json_data = json.loads(file_contents)
        
        dfs = load_and_process_data(st.session_state.json_data)
        if dfs:
            st.header("Input Data Analysis")
            tabs = st.tabs(["Visualization", "Statistics", "Raw Data"])
            
            with tabs[0]:
                for unit, df in dfs.items():
                    st.plotly_chart(create_time_series_plot(df, unit), use_container_width=True)
                    
                    # Show basic statistical analysis
                    col1, col2, col3 = st.columns(3)
                    with col1:
                        st.metric("Average Consumption", 
                                f"{df['datacellar:value'].mean():.2f} {unit}")
                    with col2:
                        st.metric("Standard Deviation", 
                                f"{df['datacellar:value'].std():.2f} {unit}")
                    with col3:
                        st.metric("Total Samples", 
                                len(df))
            
            with tabs[1]:
                display_statistics(dfs)
            
            with tabs[2]:
                st.json(st.session_state.json_data)
            
            # Add analysis options
            st.subheader("Anomaly Detection")
            col1, col2 = st.columns(2)
            with col1:
                if st.button("Detect Anomalies", key="detect_button"):
                    if not st.session_state.api_token:
                        st.error("Please enter your API token in the sidebar first.")
                    else:
                        with st.spinner("Analyzing consumption patterns..."):
                            # Add sensitivity and window_size to the request
                            modified_data = st.session_state.json_data.copy()
                           
                            
                            # Convert back to JSON and call API
                            modified_content = json.dumps(modified_data).encode('utf-8')
                            st.session_state.api_response = call_api(
                                modified_content,
                                st.session_state.api_token,
                                "inference_consumption_ad"
                            )
            with col2:
                if st.button("Clear Results", key="clear_button"):
                    st.session_state.api_response = None
                    st.experimental_rerun()

    except Exception as e:
        st.error(f"Error processing file: {str(e)}")

# Display API results
if st.session_state.api_response:
    st.header("Anomaly Detection Results")
    tabs = st.tabs(["Anomaly Visualization", "Raw Results"])
    
    with tabs[0]:
        response_dfs = load_and_process_data(
            st.session_state.api_response,
            input_data=st.session_state.json_data
        )
        if response_dfs:
            anomalies=response_dfs['boolean']
            anomalies=anomalies[anomalies['datacellar:value']==True]
            
            del response_dfs['boolean']
            for unit, df in response_dfs.items():

                fig= create_time_series_plot(df, unit, service_type="Anomaly Detection")
                #get df values for anomalies
                anomaly_df=df.iloc[anomalies['datacellar:timeStamp'].index]
                fig.add_trace(go.Scatter(x=anomaly_df['datacellar:timeStamp'], y=anomaly_df['datacellar:value'], mode='markers', marker=dict(color='red'), name='Anomalies'))
                #print(unit)
                # Create visualization with highlighted anomalies
                st.plotly_chart(
                    fig,
                    use_container_width=True
                )
    
    with tabs[1]:
        st.json(st.session_state.api_response)