File size: 2,464 Bytes
1ed4acb
74dec5e
 
 
 
1ed4acb
74dec5e
 
 
 
 
1ed4acb
74dec5e
 
 
 
 
 
 
 
 
 
193edf8
1688328
 
5db541e
74dec5e
5db541e
 
 
 
 
74dec5e
8c75ad7
 
 
74dec5e
 
 
 
 
 
 
 
 
 
1eaf7c6
74dec5e
 
 
 
 
 
5db541e
74dec5e
 
5db541e
74dec5e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c616b46
74dec5e
 
 
 
1688328
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
import gradio as gr
import numpy as np
import cv2 as cv
import requests
import time

data = None
model = None
image = None
prediction = None
labels = None

print('START')
np.set_printoptions(suppress=True)

data = np.ndarray(shape=(1, 224, 224, 3), dtype=np.float32)

with open("labels.txt", "r") as file:
    labels = file.read().splitlines()

def classify(image_path):
    try:
        image_data = np.array(image_path)
        image_data = cv.resize(image_data, (224, 224))
        image_array = np.asarray(image_data)
        normalized_image_array = (image_array.astype(np float32) / 127.0) - 1
        data[0] = normalized_image_array

        # Load the model within the classify function
        import tensorflow as tf
        model = tf.keras.models.load_model('keras_model.h5')

        prediction = model.predict(data)
        
        max_label_index = None
        max_prediction_value = -1

        print('Prediction')

        for i, label in enumerate(labels):
            prediction_value = float(prediction[0][i])
            rounded_value = round(prediction_value, 2)
            print(f'{label}: {rounded_value}')

            if prediction_value > max_prediction_value:
                max_label_index = i
                max_prediction_value = prediction_value  # Update max_prediction_value

        if max_label_index is not None:
            max_label = labels[max_label_index].split(' ', 1)[1]
            print(f'Maximum Prediction: {max_label} with a value of {round(max_prediction_value, 2)}')

            time.sleep(1)
            print("\nWays to dispose of this waste: " + max_label)
            payload = [
                {"role": "system", "content": "You are a helpful assistant."},
                {"role": "user", "content": "Give me the steps to dispose of this waste in bullet points (5 max): " + "Plastic"}
            ]

            response = requests.post(host, json={
                "messages": payload,
                "temperature": 0.5,
                "presence_penalty": 0,
                "frequency_penalty": 0,
                "top_p": 1
            }).json()

            return response["choices"][0]["message"]["content"]

    except Exception as e:
        return f"An error occurred: {e}"

iface = gr.Interface(
    fn=classify,
    inputs=gr.inputs.Image(),
    outputs="text",
    title="Waste Classifier",
    description="Upload an image to classify and get disposal instructions."
)
iface.launch()