Data_cellar_services / pages /1_Short_Term_Consumption.py
StefanoBergia's picture
fixed token
727035a
raw
history blame
4.58 kB
import streamlit as st
import json
import os
from utils import load_and_process_data, create_time_series_plot, display_statistics, call_api
if 'api_token' not in st.session_state:
st.session_state.api_token = os.getenv('NILM_API_TOKEN')
page_id = 1
if 'current_page' not in st.session_state:
st.session_state.current_page = page_id
elif st.session_state.current_page != page_id:
# Clear API response when switching to this page
if 'api_response' in st.session_state:
st.session_state.api_response = None
# Update current page
st.session_state.current_page = page_id
# Initialize session state variables
if 'current_file' not in st.session_state:
st.session_state.current_file = None
if 'json_data' not in st.session_state:
st.session_state.json_data = None
if 'api_response' not in st.session_state:
st.session_state.api_response = None
if 'using_default_file' not in st.session_state:
st.session_state.using_default_file = True
st.title("Short Term Energy Consumption Forecasting")
st.markdown("""
This service provides short-term forecasting of energy consumption patterns.
Upload your energy consumption data to generate predictions for the near future.
### Features
- Hourly consumption forecasting
- Interactive visualizations
- Statistical analysis of predictions
""")
# Default file path
default_file_path = "samples/1_short_term_consumption.json" # Adjust this path to your default file
# File upload and processing
uploaded_file = st.file_uploader("Upload JSON file (or use default)", type=['json'])
# Load default file if no file is uploaded and using_default_file is True
if uploaded_file is None and st.session_state.using_default_file:
if os.path.exists(default_file_path):
st.info(f"Using default file: {default_file_path}")
with open(default_file_path, 'r') as f:
file_contents = f.read()
if st.session_state.current_file != file_contents:
st.session_state.current_file = file_contents
st.session_state.json_data = json.loads(file_contents)
else:
st.warning(f"Default file not found at: {default_file_path}")
st.session_state.using_default_file = False
# If a file is uploaded, process it
if uploaded_file:
st.session_state.using_default_file = False
try:
file_contents = uploaded_file.read()
st.session_state.current_file = file_contents
st.session_state.json_data = json.loads(file_contents)
except Exception as e:
st.error(f"Error processing file: {str(e)}")
# Process and display data if available
if st.session_state.json_data:
try:
dfs = load_and_process_data(st.session_state.json_data)
if dfs:
st.header("Input Data")
tabs = st.tabs(["Visualization", "Raw JSON", "Statistics"])
with tabs[0]:
for unit, df in dfs.items():
st.plotly_chart(create_time_series_plot(df, unit), use_container_width=True)
with tabs[1]:
st.json(st.session_state.json_data)
with tabs[2]:
display_statistics(dfs)
if st.button("Generate Short Term Forecast"):
if not st.session_state.api_token:
st.error("Please enter your API token in the sidebar first.")
else:
with st.spinner("Generating forecast..."):
st.session_state.api_response = call_api(
st.session_state.current_file,
st.session_state.api_token,
"inference_consumption_short_term"
)
except Exception as e:
st.error(f"Error processing data: {str(e)}")
# Display API results
if st.session_state.api_response:
st.header("Forecast Results")
tabs = st.tabs(["Visualization", "Raw JSON", "Statistics"])
with tabs[0]:
response_dfs = load_and_process_data(
st.session_state.api_response,
input_data=st.session_state.json_data
)
if response_dfs:
if 'Celsius' in response_dfs:
del response_dfs['Celsius']
for unit, df in response_dfs.items():
st.plotly_chart(create_time_series_plot(df, unit), use_container_width=True)
with tabs[1]:
st.json(st.session_state.api_response)
with tabs[2]:
if response_dfs:
display_statistics(response_dfs)