Spaces:
Sleeping
Sleeping
File size: 3,525 Bytes
23eb8aa f1f08fd 23eb8aa f1f08fd 23eb8aa 3687f30 23eb8aa 3687f30 23eb8aa 3687f30 23eb8aa 3687f30 23eb8aa |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 |
import os
import streamlit as st
from groq import Groq
# Set the Groq API key
os.environ["GROQ_API_KEY"] = "key"
# Initialize Groq client
client = Groq(api_key=os.environ.get("key"))
# Carbon footprint reduction data (kg CO2 per kg recycled)
carbon_reduction_data = {
"Plastic Bottles": 3.8,
"Glass Bottles": 0.5,
"Metal Cans": 9.0,
"Old Clothes": 2.0,
"Paper and Cardboard": 1.3,
"E-Waste": 15.0,
"Tires": 8.0,
}
# Function to call Groq LLM
def get_recycling_suggestions_from_groq(item, quantity):
prompt = (
f"You are an expert in recycling and sustainability. "
f"Suggest profitable and eco-friendly uses for {quantity} kg of {item}, "
f"including household uses, ways to monetize them, and calculate carbon footprint reduction."
f"Keep your responce to maximum 5 points."
f"add emojis in your responce."
)
chat_completion = client.chat.completions.create(
messages=[{"role": "user", "content": prompt}],
model="llama-3.3-70b-versatile",
stream=False,
)
return chat_completion.choices[0].message.content
# App title
st.title("β»οΈ Recycle-Smart-PK powered by LLM π")
st.write("Select clutter items, specify quantities, and get tailored, profitable recycling suggestions along with carbon footprint reduction scores!")
# Multi-select input for clutter items
selected_items = st.multiselect(
"Select items to recycle:",
list(carbon_reduction_data.keys())
)
# Quantity input for selected items
quantities = {}
for item in selected_items:
quantities[item] = st.number_input(
f"Enter quantity for {item} (in kg):", min_value=0, step=1
)
# Process and display results
if st.button("Get Recycling Suggestions"):
if selected_items:
total_carbon_reduction = 0
st.write("### β»οΈ Recycling Suggestions and Impact:")
for item, quantity in quantities.items():
if quantity > 0:
# Call Groq LLM for dynamic suggestions
llm_response = get_recycling_suggestions_from_groq(item, quantity)
# Fetch carbon footprint reduction
carbon_reduction = carbon_reduction_data.get(item, 0) * quantity
total_carbon_reduction += carbon_reduction
# Display results for each item
st.write(f"**{item} ({quantity} kg)**")
st.write(llm_response)
st.write(f"π **Carbon Footprint Reduction**: {carbon_reduction:.2f} kg COβ")
st.write("---")
# Display total carbon footprint reduction credit score
st.write("### π Your Total Carbon Footprint Reduction π")
st.write(f"π **{total_carbon_reduction:.2f} kg COβ saved**")
st.success("Great job contributing to a greener planet! π±π")
else:
st.error("Please select at least one item and specify its quantity.")
# Follow-up Q&A with Groq LLM
st.write("### π€ Have more questions about recycling?")
user_query = st.text_input("Ask me about recycling:")
if st.button("Ask and you will learn"):
if user_query:
follow_up_response = client.chat.completions.create(
messages=[{"role": "user", "content": user_query}],
model="llama-3.3-70b-versatile",
stream=False,
).choices[0].message.content
st.write("### π§ LLM's Answer: Tailored for you")
st.write(follow_up_response)
else:
st.error("Please enter a question.")
|