StefanoBergia commited on
Commit
36ff46c
·
1 Parent(s): 155828c

added default files

Browse files
pages/1_Short_Term_Consumption.py CHANGED
@@ -1,16 +1,12 @@
1
  import streamlit as st
2
  import json
 
3
  from utils import load_and_process_data, create_time_series_plot, display_statistics, call_api
4
 
5
 
6
  if 'api_token' not in st.session_state:
7
  st.session_state.api_token = "p2s8X9qL4zF7vN3mK6tR1bY5cA0wE3hJ"
8
 
9
- # Clear other states
10
- for key in ['current_file', 'json_data', 'api_response']:
11
- if key in st.session_state:
12
- del st.session_state[key]
13
-
14
  # Initialize session state variables
15
  if 'current_file' not in st.session_state:
16
  st.session_state.current_file = None
@@ -18,6 +14,8 @@ if 'json_data' not in st.session_state:
18
  st.session_state.json_data = None
19
  if 'api_response' not in st.session_state:
20
  st.session_state.api_response = None
 
 
21
 
22
  st.title("Short Term Energy Consumption Forecasting")
23
 
@@ -31,15 +29,38 @@ Upload your energy consumption data to generate predictions for the near future.
31
  - Statistical analysis of predictions
32
  """)
33
 
 
 
 
34
  # File upload and processing
35
- uploaded_file = st.file_uploader("Upload JSON file", type=['json'])
 
 
 
 
 
 
 
 
 
 
 
 
 
36
 
 
37
  if uploaded_file:
 
38
  try:
39
  file_contents = uploaded_file.read()
40
  st.session_state.current_file = file_contents
41
  st.session_state.json_data = json.loads(file_contents)
42
-
 
 
 
 
 
43
  dfs = load_and_process_data(st.session_state.json_data)
44
  if dfs:
45
  st.header("Input Data")
@@ -65,9 +86,8 @@ if uploaded_file:
65
  st.session_state.api_token,
66
  "inference_consumption_short_term"
67
  )
68
-
69
  except Exception as e:
70
- st.error(f"Error processing file: {str(e)}")
71
 
72
  # Display API results
73
  if st.session_state.api_response:
@@ -80,7 +100,8 @@ if st.session_state.api_response:
80
  input_data=st.session_state.json_data
81
  )
82
  if response_dfs:
83
- del response_dfs['Celsius']
 
84
  for unit, df in response_dfs.items():
85
  st.plotly_chart(create_time_series_plot(df, unit), use_container_width=True)
86
 
 
1
  import streamlit as st
2
  import json
3
+ import os
4
  from utils import load_and_process_data, create_time_series_plot, display_statistics, call_api
5
 
6
 
7
  if 'api_token' not in st.session_state:
8
  st.session_state.api_token = "p2s8X9qL4zF7vN3mK6tR1bY5cA0wE3hJ"
9
 
 
 
 
 
 
10
  # Initialize session state variables
11
  if 'current_file' not in st.session_state:
12
  st.session_state.current_file = None
 
14
  st.session_state.json_data = None
15
  if 'api_response' not in st.session_state:
16
  st.session_state.api_response = None
17
+ if 'using_default_file' not in st.session_state:
18
+ st.session_state.using_default_file = True
19
 
20
  st.title("Short Term Energy Consumption Forecasting")
21
 
 
29
  - Statistical analysis of predictions
30
  """)
31
 
32
+ # Default file path
33
+ default_file_path = "samples/1_short_term_consumption.json" # Adjust this path to your default file
34
+
35
  # File upload and processing
36
+ uploaded_file = st.file_uploader("Upload JSON file (or use default)", type=['json'])
37
+
38
+ # Load default file if no file is uploaded and using_default_file is True
39
+ if uploaded_file is None and st.session_state.using_default_file:
40
+ if os.path.exists(default_file_path):
41
+ st.info(f"Using default file: {default_file_path}")
42
+ with open(default_file_path, 'r') as f:
43
+ file_contents = f.read()
44
+ if st.session_state.current_file != file_contents:
45
+ st.session_state.current_file = file_contents
46
+ st.session_state.json_data = json.loads(file_contents)
47
+ else:
48
+ st.warning(f"Default file not found at: {default_file_path}")
49
+ st.session_state.using_default_file = False
50
 
51
+ # If a file is uploaded, process it
52
  if uploaded_file:
53
+ st.session_state.using_default_file = False
54
  try:
55
  file_contents = uploaded_file.read()
56
  st.session_state.current_file = file_contents
57
  st.session_state.json_data = json.loads(file_contents)
58
+ except Exception as e:
59
+ st.error(f"Error processing file: {str(e)}")
60
+
61
+ # Process and display data if available
62
+ if st.session_state.json_data:
63
+ try:
64
  dfs = load_and_process_data(st.session_state.json_data)
65
  if dfs:
66
  st.header("Input Data")
 
86
  st.session_state.api_token,
87
  "inference_consumption_short_term"
88
  )
 
89
  except Exception as e:
90
+ st.error(f"Error processing data: {str(e)}")
91
 
92
  # Display API results
93
  if st.session_state.api_response:
 
100
  input_data=st.session_state.json_data
101
  )
102
  if response_dfs:
103
+ if 'Celsius' in response_dfs:
104
+ del response_dfs['Celsius']
105
  for unit, df in response_dfs.items():
106
  st.plotly_chart(create_time_series_plot(df, unit), use_container_width=True)
107
 
pages/2_Long_Term_Consumption.py CHANGED
@@ -1,15 +1,11 @@
1
  import streamlit as st
2
  import json
 
3
  from utils import load_and_process_data, create_time_series_plot, display_statistics, call_api
4
 
5
  if 'api_token' not in st.session_state:
6
  st.session_state.api_token = "p2s8X9qL4zF7vN3mK6tR1bY5cA0wE3hJ"
7
 
8
- # Clear other states
9
- for key in ['current_file', 'json_data', 'api_response']:
10
- if key in st.session_state:
11
- del st.session_state[key]
12
-
13
  # Initialize session state variables
14
  if 'current_file' not in st.session_state:
15
  st.session_state.current_file = None
@@ -17,6 +13,8 @@ if 'json_data' not in st.session_state:
17
  st.session_state.json_data = None
18
  if 'api_response' not in st.session_state:
19
  st.session_state.api_response = None
 
 
20
 
21
  st.title("Long Term Energy Consumption Forecasting")
22
 
@@ -30,15 +28,38 @@ Upload your historical consumption data to generate predictions for extended per
30
  - Statistical analysis of predictions
31
  """)
32
 
 
 
 
33
  # File upload and processing
34
- uploaded_file = st.file_uploader("Upload JSON file", type=['json'])
 
 
 
 
 
 
 
 
 
 
 
 
 
35
 
 
36
  if uploaded_file:
 
37
  try:
38
  file_contents = uploaded_file.read()
39
  st.session_state.current_file = file_contents
40
  st.session_state.json_data = json.loads(file_contents)
41
-
 
 
 
 
 
42
  dfs = load_and_process_data(st.session_state.json_data)
43
  if dfs:
44
  st.header("Input Data")
@@ -64,9 +85,8 @@ if uploaded_file:
64
  st.session_state.api_token,
65
  "inference_consumption_long_term"
66
  )
67
-
68
  except Exception as e:
69
- st.error(f"Error processing file: {str(e)}")
70
 
71
  # Display API results
72
  if st.session_state.api_response:
@@ -79,7 +99,8 @@ if st.session_state.api_response:
79
  input_data=st.session_state.json_data
80
  )
81
  if response_dfs:
82
- del response_dfs['Celsius']
 
83
  for unit, df in response_dfs.items():
84
  st.plotly_chart(create_time_series_plot(df, unit), use_container_width=True)
85
 
 
1
  import streamlit as st
2
  import json
3
+ import os
4
  from utils import load_and_process_data, create_time_series_plot, display_statistics, call_api
5
 
6
  if 'api_token' not in st.session_state:
7
  st.session_state.api_token = "p2s8X9qL4zF7vN3mK6tR1bY5cA0wE3hJ"
8
 
 
 
 
 
 
9
  # Initialize session state variables
10
  if 'current_file' not in st.session_state:
11
  st.session_state.current_file = None
 
13
  st.session_state.json_data = None
14
  if 'api_response' not in st.session_state:
15
  st.session_state.api_response = None
16
+ if 'using_default_file' not in st.session_state:
17
+ st.session_state.using_default_file = True
18
 
19
  st.title("Long Term Energy Consumption Forecasting")
20
 
 
28
  - Statistical analysis of predictions
29
  """)
30
 
31
+ # Default file path
32
+ default_file_path = "samples/2_long_term_consumption.json" # Adjust this path to your default file
33
+
34
  # File upload and processing
35
+ uploaded_file = st.file_uploader("Upload JSON file (or use default)", type=['json'])
36
+
37
+ # Load default file if no file is uploaded and using_default_file is True
38
+ if uploaded_file is None and st.session_state.using_default_file:
39
+ if os.path.exists(default_file_path):
40
+ st.info(f"Using default file: {default_file_path}")
41
+ with open(default_file_path, 'r') as f:
42
+ file_contents = f.read()
43
+ if st.session_state.current_file != file_contents:
44
+ st.session_state.current_file = file_contents
45
+ st.session_state.json_data = json.loads(file_contents)
46
+ else:
47
+ st.warning(f"Default file not found at: {default_file_path}")
48
+ st.session_state.using_default_file = False
49
 
50
+ # If a file is uploaded, process it
51
  if uploaded_file:
52
+ st.session_state.using_default_file = False
53
  try:
54
  file_contents = uploaded_file.read()
55
  st.session_state.current_file = file_contents
56
  st.session_state.json_data = json.loads(file_contents)
57
+ except Exception as e:
58
+ st.error(f"Error processing file: {str(e)}")
59
+
60
+ # Process and display data if available
61
+ if st.session_state.json_data:
62
+ try:
63
  dfs = load_and_process_data(st.session_state.json_data)
64
  if dfs:
65
  st.header("Input Data")
 
85
  st.session_state.api_token,
86
  "inference_consumption_long_term"
87
  )
 
88
  except Exception as e:
89
+ st.error(f"Error processing data: {str(e)}")
90
 
91
  # Display API results
92
  if st.session_state.api_response:
 
99
  input_data=st.session_state.json_data
100
  )
101
  if response_dfs:
102
+ if 'Celsius' in response_dfs:
103
+ del response_dfs['Celsius']
104
  for unit, df in response_dfs.items():
105
  st.plotly_chart(create_time_series_plot(df, unit), use_container_width=True)
106
 
pages/3_Short_Term_Production.py CHANGED
@@ -1,15 +1,11 @@
1
  import streamlit as st
2
  import json
 
3
  from utils import load_and_process_data, create_time_series_plot, display_statistics, call_api
4
 
5
  if 'api_token' not in st.session_state:
6
  st.session_state.api_token = "p2s8X9qL4zF7vN3mK6tR1bY5cA0wE3hJ"
7
 
8
- # Clear other states
9
- for key in ['current_file', 'json_data', 'api_response']:
10
- if key in st.session_state:
11
- del st.session_state[key]
12
-
13
  # Initialize session state variables
14
  if 'current_file' not in st.session_state:
15
  st.session_state.current_file = None
@@ -17,6 +13,8 @@ if 'json_data' not in st.session_state:
17
  st.session_state.json_data = None
18
  if 'api_response' not in st.session_state:
19
  st.session_state.api_response = None
 
 
20
 
21
  st.title("Short Term Energy Production Forecasting")
22
 
@@ -30,15 +28,38 @@ This service provides short-term forecasting of energy production patterns, part
30
  - Statistical analysis of predictions
31
  """)
32
 
 
 
 
33
  # File upload and processing
34
- uploaded_file = st.file_uploader("Upload JSON file", type=['json'])
 
 
 
 
 
 
 
 
 
 
 
 
 
35
 
 
36
  if uploaded_file:
 
37
  try:
38
  file_contents = uploaded_file.read()
39
  st.session_state.current_file = file_contents
40
  st.session_state.json_data = json.loads(file_contents)
41
-
 
 
 
 
 
42
  dfs = load_and_process_data(st.session_state.json_data)
43
  if dfs:
44
  st.header("Input Data")
@@ -64,9 +85,8 @@ if uploaded_file:
64
  st.session_state.api_token,
65
  "inference_production_short_term"
66
  )
67
-
68
  except Exception as e:
69
- st.error(f"Error processing file: {str(e)}")
70
 
71
  # Display API results
72
  if st.session_state.api_response:
 
1
  import streamlit as st
2
  import json
3
+ import os
4
  from utils import load_and_process_data, create_time_series_plot, display_statistics, call_api
5
 
6
  if 'api_token' not in st.session_state:
7
  st.session_state.api_token = "p2s8X9qL4zF7vN3mK6tR1bY5cA0wE3hJ"
8
 
 
 
 
 
 
9
  # Initialize session state variables
10
  if 'current_file' not in st.session_state:
11
  st.session_state.current_file = None
 
13
  st.session_state.json_data = None
14
  if 'api_response' not in st.session_state:
15
  st.session_state.api_response = None
16
+ if 'using_default_file' not in st.session_state:
17
+ st.session_state.using_default_file = True
18
 
19
  st.title("Short Term Energy Production Forecasting")
20
 
 
28
  - Statistical analysis of predictions
29
  """)
30
 
31
+ # Default file path
32
+ default_file_path = "samples/3_short_term_production.json" # Adjust this path to your default file
33
+
34
  # File upload and processing
35
+ uploaded_file = st.file_uploader("Upload JSON file (or use default)", type=['json'])
36
+
37
+ # Load default file if no file is uploaded and using_default_file is True
38
+ if uploaded_file is None and st.session_state.using_default_file:
39
+ if os.path.exists(default_file_path):
40
+ st.info(f"Using default file: {default_file_path}")
41
+ with open(default_file_path, 'r') as f:
42
+ file_contents = f.read()
43
+ if st.session_state.current_file != file_contents:
44
+ st.session_state.current_file = file_contents
45
+ st.session_state.json_data = json.loads(file_contents)
46
+ else:
47
+ st.warning(f"Default file not found at: {default_file_path}")
48
+ st.session_state.using_default_file = False
49
 
50
+ # If a file is uploaded, process it
51
  if uploaded_file:
52
+ st.session_state.using_default_file = False
53
  try:
54
  file_contents = uploaded_file.read()
55
  st.session_state.current_file = file_contents
56
  st.session_state.json_data = json.loads(file_contents)
57
+ except Exception as e:
58
+ st.error(f"Error processing file: {str(e)}")
59
+
60
+ # Process and display data if available
61
+ if st.session_state.json_data:
62
+ try:
63
  dfs = load_and_process_data(st.session_state.json_data)
64
  if dfs:
65
  st.header("Input Data")
 
85
  st.session_state.api_token,
86
  "inference_production_short_term"
87
  )
 
88
  except Exception as e:
89
+ st.error(f"Error processing data: {str(e)}")
90
 
91
  # Display API results
92
  if st.session_state.api_response:
pages/4_NILM_Analysis.py CHANGED
@@ -1,15 +1,11 @@
1
  import streamlit as st
2
  import json
 
3
  from utils import load_and_process_data, create_time_series_plot, display_statistics, call_api
4
 
5
  if 'api_token' not in st.session_state:
6
  st.session_state.api_token = "p2s8X9qL4zF7vN3mK6tR1bY5cA0wE3hJ"
7
 
8
- # Clear other states
9
- for key in ['current_file', 'json_data', 'api_response']:
10
- if key in st.session_state:
11
- del st.session_state[key]
12
-
13
  # Initialize session state variables
14
  if 'current_file' not in st.session_state:
15
  st.session_state.current_file = None
@@ -17,6 +13,8 @@ if 'json_data' not in st.session_state:
17
  st.session_state.json_data = None
18
  if 'api_response' not in st.session_state:
19
  st.session_state.api_response = None
 
 
20
 
21
  st.title("Non-Intrusive Load Monitoring (NILM) Analysis")
22
 
@@ -30,15 +28,38 @@ This service provides detailed breakdown of energy consumption by analyzing aggr
30
  - Detailed consumption insights
31
  """)
32
 
 
 
 
33
  # File upload and processing
34
- uploaded_file = st.file_uploader("Upload JSON file", type=['json'])
 
 
 
 
 
 
 
 
 
 
 
 
 
35
 
 
36
  if uploaded_file:
 
37
  try:
38
  file_contents = uploaded_file.read()
39
  st.session_state.current_file = file_contents
40
  st.session_state.json_data = json.loads(file_contents)
41
-
 
 
 
 
 
42
  dfs = load_and_process_data(st.session_state.json_data)
43
  if dfs:
44
  st.header("Input Data")
@@ -64,9 +85,8 @@ if uploaded_file:
64
  st.session_state.api_token,
65
  "inference_nilm"
66
  )
67
-
68
  except Exception as e:
69
- st.error(f"Error processing file: {str(e)}")
70
 
71
  # Display API results
72
  if st.session_state.api_response:
 
1
  import streamlit as st
2
  import json
3
+ import os
4
  from utils import load_and_process_data, create_time_series_plot, display_statistics, call_api
5
 
6
  if 'api_token' not in st.session_state:
7
  st.session_state.api_token = "p2s8X9qL4zF7vN3mK6tR1bY5cA0wE3hJ"
8
 
 
 
 
 
 
9
  # Initialize session state variables
10
  if 'current_file' not in st.session_state:
11
  st.session_state.current_file = None
 
13
  st.session_state.json_data = None
14
  if 'api_response' not in st.session_state:
15
  st.session_state.api_response = None
16
+ if 'using_default_file' not in st.session_state:
17
+ st.session_state.using_default_file = True
18
 
19
  st.title("Non-Intrusive Load Monitoring (NILM) Analysis")
20
 
 
28
  - Detailed consumption insights
29
  """)
30
 
31
+ # Default file path
32
+ default_file_path = "samples/4_NILM.json" # Adjust this path to your default file
33
+
34
  # File upload and processing
35
+ uploaded_file = st.file_uploader("Upload JSON file (or use default)", type=['json'])
36
+
37
+ # Load default file if no file is uploaded and using_default_file is True
38
+ if uploaded_file is None and st.session_state.using_default_file:
39
+ if os.path.exists(default_file_path):
40
+ st.info(f"Using default file: {default_file_path}")
41
+ with open(default_file_path, 'r') as f:
42
+ file_contents = f.read()
43
+ if st.session_state.current_file != file_contents:
44
+ st.session_state.current_file = file_contents
45
+ st.session_state.json_data = json.loads(file_contents)
46
+ else:
47
+ st.warning(f"Default file not found at: {default_file_path}")
48
+ st.session_state.using_default_file = False
49
 
50
+ # If a file is uploaded, process it
51
  if uploaded_file:
52
+ st.session_state.using_default_file = False
53
  try:
54
  file_contents = uploaded_file.read()
55
  st.session_state.current_file = file_contents
56
  st.session_state.json_data = json.loads(file_contents)
57
+ except Exception as e:
58
+ st.error(f"Error processing file: {str(e)}")
59
+
60
+ # Process and display data if available
61
+ if st.session_state.json_data:
62
+ try:
63
  dfs = load_and_process_data(st.session_state.json_data)
64
  if dfs:
65
  st.header("Input Data")
 
85
  st.session_state.api_token,
86
  "inference_nilm"
87
  )
 
88
  except Exception as e:
89
+ st.error(f"Error processing data: {str(e)}")
90
 
91
  # Display API results
92
  if st.session_state.api_response:
pages/5_Anomaly_Detection_Consumption.py CHANGED
@@ -1,6 +1,7 @@
1
  import streamlit as st
2
  import json
3
  import pandas as pd
 
4
  from utils import load_and_process_data, create_time_series_plot, display_statistics, call_api
5
  import plotly.express as px
6
  import plotly.graph_objects as go
@@ -9,11 +10,6 @@ import plotly.graph_objects as go
9
  if 'api_token' not in st.session_state:
10
  st.session_state.api_token = "p2s8X9qL4zF7vN3mK6tR1bY5cA0wE3hJ"
11
 
12
- # Clear other states
13
- for key in ['current_file', 'json_data', 'api_response']:
14
- if key in st.session_state:
15
- del st.session_state[key]
16
-
17
  # Initialize session state variables
18
  if 'current_file' not in st.session_state:
19
  st.session_state.current_file = None
@@ -21,6 +17,8 @@ if 'json_data' not in st.session_state:
21
  st.session_state.json_data = None
22
  if 'api_response' not in st.session_state:
23
  st.session_state.api_response = None
 
 
24
 
25
  st.title("Energy Consumption Anomaly Detection")
26
 
@@ -31,20 +29,40 @@ This service analyzes energy consumption patterns to detect anomalies and unusua
31
  - Real-time anomaly detection
32
  - Consumption irregularity identification
33
  - Interactive visualization of detected anomalies
34
-
35
  """)
36
 
37
-
 
38
 
39
  # File upload and processing
40
- uploaded_file = st.file_uploader("Upload JSON file", type=['json'])
41
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
42
  if uploaded_file:
 
43
  try:
44
  file_contents = uploaded_file.read()
45
  st.session_state.current_file = file_contents
46
  st.session_state.json_data = json.loads(file_contents)
47
-
 
 
 
 
 
48
  dfs = load_and_process_data(st.session_state.json_data)
49
  if dfs:
50
  st.header("Input Data Analysis")
@@ -84,7 +102,6 @@ if uploaded_file:
84
  # Add sensitivity and window_size to the request
85
  modified_data = st.session_state.json_data.copy()
86
 
87
-
88
  # Convert back to JSON and call API
89
  modified_content = json.dumps(modified_data).encode('utf-8')
90
  st.session_state.api_response = call_api(
@@ -96,9 +113,8 @@ if uploaded_file:
96
  if st.button("Clear Results", key="clear_button"):
97
  st.session_state.api_response = None
98
  st.experimental_rerun()
99
-
100
  except Exception as e:
101
- st.error(f"Error processing file: {str(e)}")
102
 
103
  # Display API results
104
  if st.session_state.api_response:
@@ -111,17 +127,21 @@ if st.session_state.api_response:
111
  input_data=st.session_state.json_data
112
  )
113
  if response_dfs:
114
- anomalies=response_dfs['boolean']
115
- anomalies=anomalies[anomalies['datacellar:value']==True]
116
 
117
  del response_dfs['boolean']
118
  for unit, df in response_dfs.items():
119
-
120
- fig= create_time_series_plot(df, unit, service_type="Anomaly Detection")
121
- #get df values for anomalies
122
- anomaly_df=df.iloc[anomalies['datacellar:timeStamp'].index]
123
- fig.add_trace(go.Scatter(x=anomaly_df['datacellar:timeStamp'], y=anomaly_df['datacellar:value'], mode='markers', marker=dict(color='red'), name='Anomalies'))
124
- #print(unit)
 
 
 
 
125
  # Create visualization with highlighted anomalies
126
  st.plotly_chart(
127
  fig,
@@ -129,4 +149,4 @@ if st.session_state.api_response:
129
  )
130
 
131
  with tabs[1]:
132
- st.json(st.session_state.api_response)
 
1
  import streamlit as st
2
  import json
3
  import pandas as pd
4
+ import os
5
  from utils import load_and_process_data, create_time_series_plot, display_statistics, call_api
6
  import plotly.express as px
7
  import plotly.graph_objects as go
 
10
  if 'api_token' not in st.session_state:
11
  st.session_state.api_token = "p2s8X9qL4zF7vN3mK6tR1bY5cA0wE3hJ"
12
 
 
 
 
 
 
13
  # Initialize session state variables
14
  if 'current_file' not in st.session_state:
15
  st.session_state.current_file = None
 
17
  st.session_state.json_data = None
18
  if 'api_response' not in st.session_state:
19
  st.session_state.api_response = None
20
+ if 'using_default_file' not in st.session_state:
21
+ st.session_state.using_default_file = True
22
 
23
  st.title("Energy Consumption Anomaly Detection")
24
 
 
29
  - Real-time anomaly detection
30
  - Consumption irregularity identification
31
  - Interactive visualization of detected anomalies
 
32
  """)
33
 
34
+ # Default file path
35
+ default_file_path = "samples/5_anomaly_detection_consumption.json" # Adjust this path to your default file
36
 
37
  # File upload and processing
38
+ uploaded_file = st.file_uploader("Upload JSON file (or use default)", type=['json'])
39
 
40
+ # Load default file if no file is uploaded and using_default_file is True
41
+ if uploaded_file is None and st.session_state.using_default_file:
42
+ if os.path.exists(default_file_path):
43
+ st.info(f"Using default file: {default_file_path}")
44
+ with open(default_file_path, 'r') as f:
45
+ file_contents = f.read()
46
+ if st.session_state.current_file != file_contents:
47
+ st.session_state.current_file = file_contents
48
+ st.session_state.json_data = json.loads(file_contents)
49
+ else:
50
+ st.warning(f"Default file not found at: {default_file_path}")
51
+ st.session_state.using_default_file = False
52
+
53
+ # If a file is uploaded, process it
54
  if uploaded_file:
55
+ st.session_state.using_default_file = False
56
  try:
57
  file_contents = uploaded_file.read()
58
  st.session_state.current_file = file_contents
59
  st.session_state.json_data = json.loads(file_contents)
60
+ except Exception as e:
61
+ st.error(f"Error processing file: {str(e)}")
62
+
63
+ # Process and display data if available
64
+ if st.session_state.json_data:
65
+ try:
66
  dfs = load_and_process_data(st.session_state.json_data)
67
  if dfs:
68
  st.header("Input Data Analysis")
 
102
  # Add sensitivity and window_size to the request
103
  modified_data = st.session_state.json_data.copy()
104
 
 
105
  # Convert back to JSON and call API
106
  modified_content = json.dumps(modified_data).encode('utf-8')
107
  st.session_state.api_response = call_api(
 
113
  if st.button("Clear Results", key="clear_button"):
114
  st.session_state.api_response = None
115
  st.experimental_rerun()
 
116
  except Exception as e:
117
+ st.error(f"Error processing data: {str(e)}")
118
 
119
  # Display API results
120
  if st.session_state.api_response:
 
127
  input_data=st.session_state.json_data
128
  )
129
  if response_dfs:
130
+ anomalies = response_dfs['boolean']
131
+ anomalies = anomalies[anomalies['datacellar:value']==True]
132
 
133
  del response_dfs['boolean']
134
  for unit, df in response_dfs.items():
135
+ fig = create_time_series_plot(df, unit, service_type="Anomaly Detection")
136
+ # Get df values for anomalies
137
+ anomaly_df = df.iloc[anomalies['datacellar:timeStamp'].index]
138
+ fig.add_trace(go.Scatter(
139
+ x=anomaly_df['datacellar:timeStamp'],
140
+ y=anomaly_df['datacellar:value'],
141
+ mode='markers',
142
+ marker=dict(color='red'),
143
+ name='Anomalies'
144
+ ))
145
  # Create visualization with highlighted anomalies
146
  st.plotly_chart(
147
  fig,
 
149
  )
150
 
151
  with tabs[1]:
152
+ st.json(st.session_state.api_response)
pages/6_Anomaly_Detection_Production.py CHANGED
@@ -1,6 +1,7 @@
1
  import streamlit as st
2
  import json
3
  import pandas as pd
 
4
  from utils import load_and_process_data, create_time_series_plot, display_statistics, call_api
5
  import plotly.express as px
6
  import plotly.graph_objects as go
@@ -9,11 +10,6 @@ import plotly.graph_objects as go
9
  if 'api_token' not in st.session_state:
10
  st.session_state.api_token = "p2s8X9qL4zF7vN3mK6tR1bY5cA0wE3hJ"
11
 
12
- # Clear other states
13
- for key in ['current_file', 'json_data', 'api_response']:
14
- if key in st.session_state:
15
- del st.session_state[key]
16
-
17
  # Initialize session state variables
18
  if 'current_file' not in st.session_state:
19
  st.session_state.current_file = None
@@ -21,30 +17,52 @@ if 'json_data' not in st.session_state:
21
  st.session_state.json_data = None
22
  if 'api_response' not in st.session_state:
23
  st.session_state.api_response = None
 
 
24
 
25
  st.title("Energy Production Anomaly Detection")
26
 
27
  st.markdown("""
28
- This service analyzes energy consumption patterns to detect anomalies and unusual behavior in your data.
29
 
30
  ### Features
31
  - Real-time anomaly detection
32
- - Consumption irregularity identification
33
  - Interactive visualization of detected anomalies
34
-
35
  """)
36
 
37
-
 
38
 
39
  # File upload and processing
40
- uploaded_file = st.file_uploader("Upload JSON file", type=['json'])
41
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
42
  if uploaded_file:
 
43
  try:
44
  file_contents = uploaded_file.read()
45
  st.session_state.current_file = file_contents
46
  st.session_state.json_data = json.loads(file_contents)
47
-
 
 
 
 
 
48
  dfs = load_and_process_data(st.session_state.json_data)
49
  if dfs:
50
  st.header("Input Data Analysis")
@@ -57,7 +75,7 @@ if uploaded_file:
57
  # Show basic statistical analysis
58
  col1, col2, col3 = st.columns(3)
59
  with col1:
60
- st.metric("Average Consumption",
61
  f"{df['datacellar:value'].mean():.2f} {unit}")
62
  with col2:
63
  st.metric("Standard Deviation",
@@ -80,11 +98,10 @@ if uploaded_file:
80
  if not st.session_state.api_token:
81
  st.error("Please enter your API token in the sidebar first.")
82
  else:
83
- with st.spinner("Analyzing consumption patterns..."):
84
  # Add sensitivity and window_size to the request
85
  modified_data = st.session_state.json_data.copy()
86
 
87
-
88
  # Convert back to JSON and call API
89
  modified_content = json.dumps(modified_data).encode('utf-8')
90
  st.session_state.api_response = call_api(
@@ -96,9 +113,8 @@ if uploaded_file:
96
  if st.button("Clear Results", key="clear_button"):
97
  st.session_state.api_response = None
98
  st.experimental_rerun()
99
-
100
  except Exception as e:
101
- st.error(f"Error processing file: {str(e)}")
102
 
103
  # Display API results
104
  if st.session_state.api_response:
@@ -111,19 +127,23 @@ if st.session_state.api_response:
111
  input_data=st.session_state.json_data
112
  )
113
  if response_dfs:
114
- anomalies=response_dfs['boolean']
115
- anomalies=anomalies[anomalies['datacellar:value']==True]
116
 
117
  del response_dfs['boolean']
118
  for unit, df in response_dfs.items():
119
-
120
- fig= create_time_series_plot(df, unit, service_type="Anomaly Detection")
121
- #get df values for anomalies
122
- anomaly_df=df.iloc[anomalies['datacellar:timeStamp'].index]
123
- #print(anomaly_df)
124
-
125
- fig.add_trace(go.Scatter(x=anomaly_df['datacellar:timeStamp'], y=anomaly_df['datacellar:value'], mode='markers', marker=dict(color='red'), name='Anomalies'))
126
- #print(unit)
 
 
 
 
127
  # Create visualization with highlighted anomalies
128
  st.plotly_chart(
129
  fig,
@@ -131,4 +151,4 @@ if st.session_state.api_response:
131
  )
132
 
133
  with tabs[1]:
134
- st.json(st.session_state.api_response)
 
1
  import streamlit as st
2
  import json
3
  import pandas as pd
4
+ import os
5
  from utils import load_and_process_data, create_time_series_plot, display_statistics, call_api
6
  import plotly.express as px
7
  import plotly.graph_objects as go
 
10
  if 'api_token' not in st.session_state:
11
  st.session_state.api_token = "p2s8X9qL4zF7vN3mK6tR1bY5cA0wE3hJ"
12
 
 
 
 
 
 
13
  # Initialize session state variables
14
  if 'current_file' not in st.session_state:
15
  st.session_state.current_file = None
 
17
  st.session_state.json_data = None
18
  if 'api_response' not in st.session_state:
19
  st.session_state.api_response = None
20
+ if 'using_default_file' not in st.session_state:
21
+ st.session_state.using_default_file = True
22
 
23
  st.title("Energy Production Anomaly Detection")
24
 
25
  st.markdown("""
26
+ This service analyzes energy production patterns to detect anomalies and unusual behavior in your data.
27
 
28
  ### Features
29
  - Real-time anomaly detection
30
+ - Production irregularity identification
31
  - Interactive visualization of detected anomalies
 
32
  """)
33
 
34
+ # Default file path
35
+ default_file_path = "samples/6_anomaly_detection_production.json" # Adjust this path to your default file
36
 
37
  # File upload and processing
38
+ uploaded_file = st.file_uploader("Upload JSON file (or use default)", type=['json'])
39
+
40
+ # Load default file if no file is uploaded and using_default_file is True
41
+ if uploaded_file is None and st.session_state.using_default_file:
42
+ if os.path.exists(default_file_path):
43
+ st.info(f"Using default file: {default_file_path}")
44
+ with open(default_file_path, 'r') as f:
45
+ file_contents = f.read()
46
+ if st.session_state.current_file != file_contents:
47
+ st.session_state.current_file = file_contents
48
+ st.session_state.json_data = json.loads(file_contents)
49
+ else:
50
+ st.warning(f"Default file not found at: {default_file_path}")
51
+ st.session_state.using_default_file = False
52
+
53
+ # If a file is uploaded, process it
54
  if uploaded_file:
55
+ st.session_state.using_default_file = False
56
  try:
57
  file_contents = uploaded_file.read()
58
  st.session_state.current_file = file_contents
59
  st.session_state.json_data = json.loads(file_contents)
60
+ except Exception as e:
61
+ st.error(f"Error processing file: {str(e)}")
62
+
63
+ # Process and display data if available
64
+ if st.session_state.json_data:
65
+ try:
66
  dfs = load_and_process_data(st.session_state.json_data)
67
  if dfs:
68
  st.header("Input Data Analysis")
 
75
  # Show basic statistical analysis
76
  col1, col2, col3 = st.columns(3)
77
  with col1:
78
+ st.metric("Average Production",
79
  f"{df['datacellar:value'].mean():.2f} {unit}")
80
  with col2:
81
  st.metric("Standard Deviation",
 
98
  if not st.session_state.api_token:
99
  st.error("Please enter your API token in the sidebar first.")
100
  else:
101
+ with st.spinner("Analyzing production patterns..."):
102
  # Add sensitivity and window_size to the request
103
  modified_data = st.session_state.json_data.copy()
104
 
 
105
  # Convert back to JSON and call API
106
  modified_content = json.dumps(modified_data).encode('utf-8')
107
  st.session_state.api_response = call_api(
 
113
  if st.button("Clear Results", key="clear_button"):
114
  st.session_state.api_response = None
115
  st.experimental_rerun()
 
116
  except Exception as e:
117
+ st.error(f"Error processing data: {str(e)}")
118
 
119
  # Display API results
120
  if st.session_state.api_response:
 
127
  input_data=st.session_state.json_data
128
  )
129
  if response_dfs:
130
+ anomalies = response_dfs['boolean']
131
+ anomalies = anomalies[anomalies['datacellar:value']==True]
132
 
133
  del response_dfs['boolean']
134
  for unit, df in response_dfs.items():
135
+ fig = create_time_series_plot(df, unit, service_type="Anomaly Detection")
136
+ # Get df values for anomalies
137
+ anomaly_df = df.iloc[anomalies['datacellar:timeStamp'].index]
138
+
139
+ fig.add_trace(go.Scatter(
140
+ x=anomaly_df['datacellar:timeStamp'],
141
+ y=anomaly_df['datacellar:value'],
142
+ mode='markers',
143
+ marker=dict(color='red'),
144
+ name='Anomalies'
145
+ ))
146
+
147
  # Create visualization with highlighted anomalies
148
  st.plotly_chart(
149
  fig,
 
151
  )
152
 
153
  with tabs[1]:
154
+ st.json(st.session_state.api_response)