Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,4 +1,103 @@
|
|
1 |
import os
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2 |
import streamlit as st
|
3 |
from groq import Groq
|
4 |
|
@@ -93,3 +192,4 @@ if st.button("Ask and you will learn"):
|
|
93 |
st.write(follow_up_response)
|
94 |
else:
|
95 |
st.error("Please enter a question.")
|
|
|
|
1 |
import os
|
2 |
+
import requests
|
3 |
+
import streamlit as st
|
4 |
+
from groq import Groq
|
5 |
+
|
6 |
+
# Set the Groq API key
|
7 |
+
os.environ["GROQ_API_KEY"] = "key"
|
8 |
+
|
9 |
+
# Initialize Groq client
|
10 |
+
client = Groq(api_key=os.environ.get("key"))
|
11 |
+
|
12 |
+
# Carbon footprint reduction data (kg CO2 per kg recycled)
|
13 |
+
carbon_reduction_data = {
|
14 |
+
"Plastic Bottles": 3.8,
|
15 |
+
"Glass Bottles": 0.5,
|
16 |
+
"Metal Cans": 9.0,
|
17 |
+
"Old Clothes": 2.0,
|
18 |
+
"Paper and Cardboard": 1.3,
|
19 |
+
"E-Waste": 15.0,
|
20 |
+
"Tires": 8.0,
|
21 |
+
}
|
22 |
+
|
23 |
+
# Function to call Groq LLM for recycling suggestions
|
24 |
+
def get_recycling_suggestions_from_groq(item, quantity):
|
25 |
+
prompt = (
|
26 |
+
f"You are an expert in recycling and sustainability. "
|
27 |
+
f"Suggest profitable and eco-friendly uses for {quantity} kg of {item}, "
|
28 |
+
f"including household uses, ways to monetize them, and calculate carbon footprint reduction. "
|
29 |
+
f"Keep your response to 5 points and add relevant emojis."
|
30 |
+
)
|
31 |
+
chat_completion = client.chat.completions.create(
|
32 |
+
messages=[{"role": "user", "content": prompt}],
|
33 |
+
model="llama-3.3-70b-versatile",
|
34 |
+
stream=False,
|
35 |
+
)
|
36 |
+
return chat_completion.choices[0].message.content
|
37 |
+
|
38 |
+
# Function to generate images using Stable Diffusion API
|
39 |
+
def generate_image(prompt):
|
40 |
+
api_url = "https://api-inference.huggingface.co/models/CompVis/stable-diffusion-v1-4"
|
41 |
+
headers = {"Authorization": f"Bearer your_huggingface_api_key"}
|
42 |
+
response = requests.post(api_url, headers=headers, json={"inputs": prompt})
|
43 |
+
if response.status_code == 200:
|
44 |
+
return response.content
|
45 |
+
else:
|
46 |
+
st.error("β Image generation failed. Please try again later.")
|
47 |
+
return None
|
48 |
+
|
49 |
+
# App title
|
50 |
+
st.title("β»οΈ Recycle-Smart-PK π")
|
51 |
+
st.write("Select clutter items, specify quantities, and get tailored, profitable recycling suggestions along with generated images!")
|
52 |
+
|
53 |
+
# Multi-select input for clutter items
|
54 |
+
selected_items = st.multiselect(
|
55 |
+
"Select items to recycle:", list(carbon_reduction_data.keys())
|
56 |
+
)
|
57 |
+
|
58 |
+
# Quantity input for selected items
|
59 |
+
quantities = {}
|
60 |
+
for item in selected_items:
|
61 |
+
quantities[item] = st.number_input(
|
62 |
+
f"Enter quantity for {item} (in kg):", min_value=0, step=1
|
63 |
+
)
|
64 |
+
|
65 |
+
# Process and display results
|
66 |
+
if st.button("Get Recycling Suggestions"):
|
67 |
+
if selected_items:
|
68 |
+
total_carbon_reduction = 0
|
69 |
+
st.write("### β»οΈ Recycling Suggestions and Impact:")
|
70 |
+
for item, quantity in quantities.items():
|
71 |
+
if quantity > 0:
|
72 |
+
# Get text-based suggestions from Groq LLM
|
73 |
+
llm_response = get_recycling_suggestions_from_groq(item, quantity)
|
74 |
+
|
75 |
+
# Calculate carbon footprint reduction
|
76 |
+
carbon_reduction = carbon_reduction_data.get(item, 0) * quantity
|
77 |
+
total_carbon_reduction += carbon_reduction
|
78 |
+
|
79 |
+
# Generate image for the recycling suggestion
|
80 |
+
image_prompt = f"Visual representation of recycling {item} into eco-friendly and profitable products."
|
81 |
+
image = generate_image(image_prompt)
|
82 |
+
|
83 |
+
# Display text and image
|
84 |
+
st.write(f"**{item} ({quantity} kg)**")
|
85 |
+
st.write(llm_response)
|
86 |
+
st.write(f"π **Carbon Footprint Reduction**: {carbon_reduction:.2f} kg COβ")
|
87 |
+
if image:
|
88 |
+
st.image(image, caption=f"Generated: {item}", use_column_width=True)
|
89 |
+
st.markdown("---")
|
90 |
+
|
91 |
+
# Display total carbon footprint reduction
|
92 |
+
st.write("### π Your Total Carbon Footprint Reduction π")
|
93 |
+
st.write(f"π **{total_carbon_reduction:.2f} kg COβ saved**")
|
94 |
+
st.success("π Great job contributing to a greener planet!")
|
95 |
+
else:
|
96 |
+
st.error("β Please select at least one item and specify its quantity.")
|
97 |
+
|
98 |
+
|
99 |
+
"""
|
100 |
+
import os
|
101 |
import streamlit as st
|
102 |
from groq import Groq
|
103 |
|
|
|
192 |
st.write(follow_up_response)
|
193 |
else:
|
194 |
st.error("Please enter a question.")
|
195 |
+
"""
|