Spaces:
Sleeping
Sleeping
import os | |
import streamlit as st | |
from groq import Groq | |
# Set the Groq API key | |
os.environ["GROQ_API_KEY"] = "key" | |
# Initialize Groq client | |
client = Groq(api_key=os.environ.get("key")) | |
# Carbon footprint reduction data (kg CO2 per kg recycled) | |
carbon_reduction_data = { | |
"Plastic Bottles": 3.8, | |
"Glass Bottles": 0.5, | |
"Metal Cans": 9.0, | |
"Old Clothes": 2.0, | |
"Paper and Cardboard": 1.3, | |
"E-Waste": 15.0, | |
"Tires": 8.0, | |
} | |
# Function to call Groq LLM | |
def get_recycling_suggestions_from_groq(item, quantity): | |
prompt = ( | |
f"You are an expert in recycling and sustainability. " | |
f"Suggest profitable and eco-friendly uses for {quantity} kg of {item}, " | |
f"including household uses, ways to monetize them, and calculate carbon footprint reduction." | |
f"Keep your responce to maximum 5 points." | |
f"add emojis in your responce." | |
) | |
chat_completion = client.chat.completions.create( | |
messages=[{"role": "user", "content": prompt}], | |
model="llama-3.3-70b-versatile", | |
stream=False, | |
) | |
return chat_completion.choices[0].message.content | |
# App title | |
st.title("β»οΈ Recycle-Smart-PK powered by LLM π") | |
st.write("Select clutter items, specify quantities, and get tailored, profitable recycling suggestions along with carbon footprint reduction scores!") | |
# Multi-select input for clutter items | |
selected_items = st.multiselect( | |
"Select items to recycle:", | |
list(carbon_reduction_data.keys()) | |
) | |
# Quantity input for selected items | |
quantities = {} | |
for item in selected_items: | |
quantities[item] = st.number_input( | |
f"Enter quantity for {item} (in kg):", min_value=0, step=1 | |
) | |
# Process and display results | |
if st.button("Get Recycling Suggestions"): | |
if selected_items: | |
total_carbon_reduction = 0 | |
st.write("### β»οΈ Recycling Suggestions and Impact:") | |
for item, quantity in quantities.items(): | |
if quantity > 0: | |
# Call Groq LLM for dynamic suggestions | |
llm_response = get_recycling_suggestions_from_groq(item, quantity) | |
# Fetch carbon footprint reduction | |
carbon_reduction = carbon_reduction_data.get(item, 0) * quantity | |
total_carbon_reduction += carbon_reduction | |
# Display results for each item | |
st.write(f"**{item} ({quantity} kg)**") | |
st.write(llm_response) | |
st.write(f"π **Carbon Footprint Reduction**: {carbon_reduction:.2f} kg COβ") | |
st.write("---") | |
# Display total carbon footprint reduction credit score | |
st.write("### π Your Total Carbon Footprint Reduction π") | |
st.write(f"π **{total_carbon_reduction:.2f} kg COβ saved**") | |
st.success("Great job contributing to a greener planet! π±π") | |
else: | |
st.error("Please select at least one item and specify its quantity.") | |
# Follow-up Q&A with Groq LLM | |
st.write("### π€ Have more questions about recycling?") | |
user_query = st.text_input("Ask me about recycling:") | |
if st.button("Ask and you will learn"): | |
if user_query: | |
follow_up_response = client.chat.completions.create( | |
messages=[{"role": "user", "content": user_query}], | |
model="llama-3.3-70b-versatile", | |
stream=False, | |
).choices[0].message.content | |
st.write("### π§ LLM's Answer: Tailored for you") | |
st.write(follow_up_response) | |
else: | |
st.error("Please enter a question.") | |