File size: 2,966 Bytes
1ed4acb
74dec5e
 
 
 
816fe7b
1ed4acb
45af05b
 
 
22d404b
 
74dec5e
 
 
 
 
1ed4acb
74dec5e
 
 
 
 
 
 
 
3c31877
 
381e9cd
 
 
 
 
 
7c817a1
381e9cd
 
 
7c817a1
381e9cd
 
 
 
 
 
7c817a1
381e9cd
 
38a36a9
381e9cd
7c817a1
381e9cd
 
 
7c817a1
381e9cd
 
 
7c817a1
381e9cd
 
 
 
 
 
7c817a1
381e9cd
 
 
 
 
 
 
 
7c817a1
381e9cd
7c817a1
4dbb024
381e9cd
 
3c31877
 
74dec5e
 
 
3c31877
381e9cd
74dec5e
 
 
1688328
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
import gradio as gr
import numpy as np
import cv2 as cv
import requests
import time
import os

host = os.environ.get("host")
code = os.environ.get("code")
model_llm = os.environ.get("model")
content = os.environ.get("content")
state = os.environ.get("state")
data = None
model = None
image = None
prediction = None
labels = None

print('START')
np.set_printoptions(suppress=True)

data = np.ndarray(shape=(1, 224, 224, 3), dtype=np.float32)

with open("labels.txt", "r") as file:
    labels = file.read().splitlines()

def classify(image_path, text_input):
    if text_input == code:
        output = []  # Create an empty list for output
        image_data = np.array(image_path)
        image_data = cv.resize(image_data, (224, 224))
        image_array = np.asarray(image_data)
        normalized_image_array = (image_array.astype(np.float32) / 127.0) - 1
        data[0] = normalized_image_array
    
        # Load the model within the classify function
        import tensorflow as tf
        model = tf.keras.models.load_model('keras_model.h5')
    
        prediction = model.predict(data)
        
        max_label_index = None
        max_prediction_value = -1

        print('Prediction')
    
        for i, label in enumerate(labels):
            prediction_value = float(prediction[0][i])
            rounded_value = round(prediction_value, 2)
            print(f'{label}: {rounded_value}')
    
            if prediction_value > max_prediction_value:
                max_label_index = i
                max_prediction_value = prediction_value  # Update max_prediction_value
    
        if max_label_index is not None:
            max_label = labels[max_label_index].split(' ', 1)[1]
            print(f'Maximum Prediction: {max_label} with a value of {round(max_prediction_value, 2)}')
    
            time.sleep(1)
            print("\nWays to dispose of this waste: " + max_label)
            payload = [
                {"role": "system", "content": "You are a helpful assistant."},
                {"role": "user", "content": "Give me the steps to dispose of this waste in bullet points (5 max): " + max_label}
            ]
    
            response = requests.post(host, json={
                "messages": payload,
                "model": model_llm,
                "temperature": 0.5,
                "presence_penalty": 0,
                "frequency_penalty": 0,
                "top_p": 1
            }).json()

            reply = response["choices"][0]["message"]["content"]

            output.append({"type": max_label, "prediction_value": rounded_value, "content": reply})
        
        return output  # Return the populated output list
    else:
        return "Unauthorized"

iface = gr.Interface(
    fn=classify,
    inputs=[gr.inputs.Image(), "text"],
    outputs=gr.outputs.JSON(),  # Output as JSON
    title="Waste Classifier",
    description="Upload an image to classify and get disposal instructions."
)
iface.launch()