File size: 4,071 Bytes
7bf1eec
9318761
a199ef4
c93a752
a199ef4
9318761
 
a199ef4
c93a752
9318761
a199ef4
 
9318761
 
92fc89d
9318761
 
 
 
7bf1eec
9318761
 
 
 
 
 
 
 
 
7bf1eec
9318761
 
 
 
 
 
 
 
 
7bf1eec
9318761
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c93a752
9318761
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7bf1eec
9318761
 
 
7bf1eec
9318761
 
 
 
7bf1eec
9318761
 
 
7bf1eec
9318761
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
import os
import streamlit as st
import pandas as pd
import numpy as np
from groq import Groq
from sentence_transformers import SentenceTransformer
import faiss

GROQ_API_KEY = "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"
# Initialize Groq API Client
client = Groq(api_key=GROQ_API_KEY)

# Load Pretrained Embedding Model
embedding_model = SentenceTransformer("all-MiniLM-L6-v2")

# Initialize global variables
uploaded_file = None
faiss_index = None
dataframe = None

# Function to load and preprocess dataset
def load_dataset(file):
    global dataframe, faiss_index
    dataframe = pd.read_csv(file)
    st.success("Dataset loaded successfully!")
    # Create FAISS index for the dataset
    embeddings = embedding_model.encode(dataframe["Energy Usage (kWh)"].astype(str).tolist())
    faiss_index = faiss.IndexFlatL2(embeddings.shape[1])
    faiss_index.add(np.array(embeddings))

# Function to retrieve relevant rows
def retrieve_relevant_data(query, top_k=5):
    if dataframe is None or faiss_index is None:
        st.error("Please upload a dataset first.")
        return []
    query_embedding = embedding_model.encode([query])
    distances, indices = faiss_index.search(np.array(query_embedding), top_k)
    relevant_rows = dataframe.iloc[indices[0]].to_dict(orient="records")
    return relevant_rows

# Function to analyze data and generate cost-saving recommendations
def generate_cost_saving_recommendations(data):
    if not data:
        return "No relevant data found for recommendations."
    
    total_energy_usage = sum(row["Energy Usage (kWh)"] for row in data)
    avg_energy_usage = total_energy_usage / len(data)
    total_cost = sum(row["Cost"] for row in data)
    
    recommendations = [
        f"Total energy usage: {total_energy_usage:.2f} kWh",
        f"Average energy usage per household: {avg_energy_usage:.2f} kWh",
        f"Total cost: ${total_cost:.2f}",
        "Recommendations:",
        "- Implement energy-efficient appliances.",
        "- Use renewable energy sources like solar or wind.",
        "- Schedule high-energy tasks during off-peak hours.",
        "- Conduct regular maintenance to reduce energy wastage."
    ]
    return "\n".join(recommendations)

# Function to generate a detailed analysis report
def generate_report(query):
    relevant_data = retrieve_relevant_data(query)
    context = "\n".join([str(row) for row in relevant_data])
    chat_completion = client.chat.completions.create(
        messages=[
            {
                "role": "user",
                "content": f"Based on the following query: '{query}' and context:\n{context}\nProvide an energy usage analysis report."
            }
        ],
        model="llama3-8b-8192",
        stream=False,
    )
    detailed_report = chat_completion.choices[0].message.content
    cost_saving_recommendations = generate_cost_saving_recommendations(relevant_data)
    return detailed_report, cost_saving_recommendations

# Streamlit app configuration
st.title("Energy Usage Analysis & Cost-Saving Report Generator")
st.sidebar.header("Upload Dataset")

# File upload
uploaded_file = st.sidebar.file_uploader("Upload your energy usage dataset (CSV)", type=["csv"])
if uploaded_file is not None:
    load_dataset(uploaded_file)

# Query input
st.header("Generate Energy Usage Report")
query = st.text_input("Enter your query (e.g., 'Analyze peak usage times in urban areas')")

if st.button("Generate Report"):
    if uploaded_file is None:
        st.error("Please upload a dataset first.")
    elif query.strip() == "":
        st.error("Please enter a query.")
    else:
        with st.spinner("Generating report..."):
            try:
                detailed_report, cost_saving_recommendations = generate_report(query)
                st.subheader("Energy Usage Analysis Report")
                st.write(detailed_report)
                st.subheader("Cost-Saving Recommendations")
                st.write(cost_saving_recommendations)
            except Exception as e:
                st.error(f"An error occurred: {e}")