mednow commited on
Commit
25bc673
·
verified ·
1 Parent(s): 4d7c915

Upload 7 files

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ best_model_.keras filter=lfs diff=lfs merge=lfs -text
ann_model.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f66f3a528dfb14e5e37f1c69ca59533cf1c3db4b091693108af5e31f1029b2d1
3
+ size 154741566
app.py ADDED
@@ -0,0 +1,94 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import tensorflow as tf
3
+ import pickle
4
+ from tensorflow.keras.preprocessing import image
5
+ import numpy as np
6
+
7
+ # Function to preprocess the image
8
+ def preprocess_image(img_path, img_height, img_width, model_type="CNN"):
9
+ # Load the image and convert to grayscale
10
+ img = image.load_img(img_path, target_size=(img_height, img_width), color_mode='grayscale')
11
+ img_array = image.img_to_array(img)
12
+
13
+ # Normalize the image array
14
+ img_array = img_array / 255.0 # Normalize for all models
15
+
16
+ if model_type in ["Logistic Regression", "Decision Tree"]:
17
+ img_array = img_array.flatten() # Flatten for Logistic Regression and Decision Tree
18
+ img_array = np.expand_dims(img_array, axis=0) # Add batch dimension
19
+ else:
20
+ img_array = np.expand_dims(img_array, axis=0) # Add batch dimension for CNN and ANN
21
+
22
+ return img_array
23
+
24
+ # Load the Keras model
25
+ @st.cache_resource
26
+ def load_keras_model():
27
+ return tf.keras.models.load_model('best_model_.keras')
28
+
29
+ # Load the other models
30
+ @st.cache_resource
31
+ def load_pickle_model(model_path):
32
+ with open(model_path, 'rb') as f:
33
+ return pickle.load(f)
34
+
35
+ # Define model paths and validation accuracies
36
+ models_info = {
37
+ "ANN Model": {
38
+ "path": "ann_model.pkl",
39
+ "accuracy": 0.60
40
+ },
41
+ "Decision Tree": {
42
+ "path": "decision_tree_classifier_model.pkl",
43
+ "accuracy": 0.70
44
+ },
45
+ "Logistic Regression": {
46
+ "path": "logistic_regression_model.pkl",
47
+ "accuracy": 0.60
48
+ },
49
+ "CNN Model": {
50
+ "path": "best_model_.keras",
51
+ "accuracy": 0.90
52
+ }
53
+ }
54
+
55
+ # Streamlit UI
56
+ st.title("X-ray Image Classification")
57
+ st.write("Upload an X-ray image to classify it as Normal or Pneumonia.")
58
+
59
+ # Model selection
60
+ model_name = st.selectbox("Choose a model:", list(models_info.keys()))
61
+
62
+ # Display selected model accuracy
63
+ st.write(f"Selected Model: {model_name}")
64
+ st.write(f"Validation Accuracy: {models_info[model_name]['accuracy'] * 100:.2f}%")
65
+
66
+ # Load the selected model
67
+ if model_name == "CNN Model":
68
+ model = load_keras_model()
69
+ else:
70
+ model = load_pickle_model(models_info[model_name]["path"])
71
+
72
+ # File uploader for image
73
+ uploaded_file = st.file_uploader("Choose an X-ray image...", type="jpeg")
74
+
75
+ if uploaded_file is not None:
76
+ with open("temp.jpeg", "wb") as f:
77
+ f.write(uploaded_file.getbuffer())
78
+
79
+ # Use the appropriate preprocessing for the selected model
80
+ img_height, img_width = 224, 224 # Use the same dimensions as used during training
81
+ preprocessed_img = preprocess_image(
82
+ "temp.jpeg",
83
+ img_height,
84
+ img_width,
85
+ model_type=model_name # Pass the model name directly
86
+ )
87
+
88
+ st.image(uploaded_file, caption="Uploaded X-ray Image", use_column_width=True)
89
+
90
+ # Prediction logic
91
+ prediction = model.predict(preprocessed_img)
92
+ prediction_label = "Pneumonia" if prediction[0] > 0.5 else "Normal"
93
+
94
+ st.write(f"Prediction: {prediction_label} (Model: {model_name})")
best_model_.keras ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:10fa4e3be07ff95be1cfd8b14a1a332d6798e2cad3175b524b286650818e5533
3
+ size 532793354
decision_tree_classifier_model.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ba187cb3ae4b84508751e6e975a1cffc7900e399c8bbbc3312a6d8ab94830c47
3
+ size 47851
dockerfile ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Use an official Python runtime as a parent image
2
+ FROM python:3.9-slim
3
+
4
+ # Set the working directory in the container
5
+ WORKDIR /app
6
+
7
+ # Copy the requirements file into the container
8
+ COPY requirements.txt ./
9
+
10
+ # Install any necessary dependencies
11
+ RUN pip install --no-cache-dir -r requirements.txt
12
+
13
+ # Copy the rest of the application code into the container
14
+ COPY . .
15
+
16
+ # Expose the port the app runs on
17
+ EXPOSE 8501
18
+
19
+ # Command to run the Streamlit app
20
+ CMD ["streamlit", "run", "app.py"]
logistic_regression_model.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:27fc617fd13a31c2aa9ff3f248df5e7f09dd1bb8485c0d783d31503093b90769
3
+ size 1606853
requirements.txt ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ tensorflow
2
+ opencv-python
3
+ matplotlib
4
+ streamlit
5
+ numpy
6
+ scipy
7
+ scikit-learn
8
+ keras_tuner
9
+ scikeras
10
+ lime