AswinPJaison commited on
Commit
9a95718
·
verified ·
1 Parent(s): 0537e12

Upload 12 files

Browse files
Files changed (12) hide show
  1. .gitattributes +6 -35
  2. BP.pkl +0 -0
  3. BackPropogation.py +53 -0
  4. CN.h5 +3 -0
  5. DP.keras +3 -0
  6. LS.keras +3 -0
  7. PP.pkl +0 -0
  8. Perceptron.py +46 -0
  9. README.md +55 -12
  10. RN.keras +3 -0
  11. Streamlit.py +79 -0
  12. requirements.txt +5 -0
.gitattributes CHANGED
@@ -1,35 +1,6 @@
1
- *.7z filter=lfs diff=lfs merge=lfs -text
2
- *.arrow filter=lfs diff=lfs merge=lfs -text
3
- *.bin filter=lfs diff=lfs merge=lfs -text
4
- *.bz2 filter=lfs diff=lfs merge=lfs -text
5
- *.ckpt filter=lfs diff=lfs merge=lfs -text
6
- *.ftz filter=lfs diff=lfs merge=lfs -text
7
- *.gz filter=lfs diff=lfs merge=lfs -text
8
- *.h5 filter=lfs diff=lfs merge=lfs -text
9
- *.joblib filter=lfs diff=lfs merge=lfs -text
10
- *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
- *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
- *.model filter=lfs diff=lfs merge=lfs -text
13
- *.msgpack filter=lfs diff=lfs merge=lfs -text
14
- *.npy filter=lfs diff=lfs merge=lfs -text
15
- *.npz filter=lfs diff=lfs merge=lfs -text
16
- *.onnx filter=lfs diff=lfs merge=lfs -text
17
- *.ot filter=lfs diff=lfs merge=lfs -text
18
- *.parquet filter=lfs diff=lfs merge=lfs -text
19
- *.pb filter=lfs diff=lfs merge=lfs -text
20
- *.pickle filter=lfs diff=lfs merge=lfs -text
21
- *.pkl filter=lfs diff=lfs merge=lfs -text
22
- *.pt filter=lfs diff=lfs merge=lfs -text
23
- *.pth filter=lfs diff=lfs merge=lfs -text
24
- *.rar filter=lfs diff=lfs merge=lfs -text
25
- *.safetensors filter=lfs diff=lfs merge=lfs -text
26
- saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
- *.tar.* filter=lfs diff=lfs merge=lfs -text
28
- *.tar filter=lfs diff=lfs merge=lfs -text
29
- *.tflite filter=lfs diff=lfs merge=lfs -text
30
- *.tgz filter=lfs diff=lfs merge=lfs -text
31
- *.wasm filter=lfs diff=lfs merge=lfs -text
32
- *.xz filter=lfs diff=lfs merge=lfs -text
33
- *.zip filter=lfs diff=lfs merge=lfs -text
34
- *.zst filter=lfs diff=lfs merge=lfs -text
35
- *tfevents* filter=lfs diff=lfs merge=lfs -text
 
1
+ C:/Users/shahi/Desktop/My filter=lfs diff=lfs merge=lfs -text
2
+ CN.keras filter=lfs diff=lfs merge=lfs -text
3
+ CN.h5 filter=lfs diff=lfs merge=lfs -text
4
+ DP.keras filter=lfs diff=lfs merge=lfs -text
5
+ LS.keras filter=lfs diff=lfs merge=lfs -text
6
+ RN.keras filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
BP.pkl ADDED
Binary file (4.3 kB). View file
 
BackPropogation.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ from tqdm import tqdm
3
+
4
+
5
+ class BackPropogation:
6
+ def __init__(self,learning_rate=0.01, epochs=100,activation_function='step'):
7
+ self.bias = 0
8
+ self.learning_rate = learning_rate
9
+ self.max_epochs = epochs
10
+ self.activation_function = activation_function
11
+
12
+
13
+ def activate(self, x):
14
+ if self.activation_function == 'step':
15
+ return 1 if x >= 0 else 0
16
+ elif self.activation_function == 'sigmoid':
17
+ return 1 if (1 / (1 + np.exp(-x)))>=0.5 else 0
18
+ elif self.activation_function == 'relu':
19
+ return 1 if max(0,x)>=0.5 else 0
20
+
21
+ def fit(self, X, y):
22
+ error_sum=0
23
+ n_features = X.shape[1]
24
+ self.weights = np.zeros((n_features))
25
+ for epoch in tqdm(range(self.max_epochs)):
26
+ for i in range(len(X)):
27
+ inputs = X[i]
28
+ target = y[i]
29
+ weighted_sum = np.dot(inputs, self.weights) + self.bias
30
+ prediction = self.activate(weighted_sum)
31
+
32
+ # Calculating loss and updating weights.
33
+ error = target - prediction
34
+ self.weights += self.learning_rate * error * inputs
35
+ self.bias += self.learning_rate * error
36
+
37
+ print(f"Updated Weights after epoch {epoch} with {self.weights}")
38
+ print("Training Completed")
39
+
40
+ def predict(self, X):
41
+ predictions = []
42
+ for i in range(len(X)):
43
+ inputs = X[i]
44
+ weighted_sum = np.dot(inputs, self.weights) + self.bias
45
+ prediction = self.activate(weighted_sum)
46
+ predictions.append(prediction)
47
+ return predictions
48
+
49
+
50
+
51
+
52
+
53
+
CN.h5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:564ea2ffe49525d563795f616261871d3ed5e3c271e54afa384b89cb12107266
3
+ size 391811360
DP.keras ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:effe5efada6ccfaab2dc6ce3e189954b9c9b3abbaf86b2bdbe1f11d18d3684f0
3
+ size 10735120
LS.keras ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:705ae60751f9f288daf9486f5b3535e437fd7aabb96c5b79f908e7f5e68c9b02
3
+ size 4194296
PP.pkl ADDED
Binary file (2.26 kB). View file
 
Perceptron.py ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ from tqdm import tqdm
3
+
4
+
5
+ class Perceptron:
6
+
7
+ def __init__(self,learning_rate=0.01, epochs=100,activation_function='step'):
8
+ self.bias = 0
9
+ self.learning_rate = learning_rate
10
+ self.max_epochs = epochs
11
+ self.activation_function = activation_function
12
+
13
+
14
+ def activate(self, x):
15
+ if self.activation_function == 'step':
16
+ return 1 if x >= 0 else 0
17
+ elif self.activation_function == 'sigmoid':
18
+ return 1 if (1 / (1 + np.exp(-x)))>=0.5 else 0
19
+ elif self.activation_function == 'relu':
20
+ return 1 if max(0,x)>=0.5 else 0
21
+
22
+ def fit(self, X, y):
23
+ n_features = X.shape[1]
24
+ self.weights = np.random.randint(n_features, size=(n_features))
25
+ for epoch in tqdm(range(self.max_epochs)):
26
+ for i in range(len(X)):
27
+ inputs = X[i]
28
+ target = y[i]
29
+ weighted_sum = np.dot(inputs, self.weights) + self.bias
30
+ prediction = self.activate(weighted_sum)
31
+ print("Training Completed")
32
+
33
+ def predict(self, X):
34
+ predictions = []
35
+ for i in range(len(X)):
36
+ inputs = X[i]
37
+ weighted_sum = np.dot(inputs, self.weights) + self.bias
38
+ prediction = self.activate(weighted_sum)
39
+ predictions.append(prediction)
40
+ return predictions
41
+
42
+
43
+
44
+
45
+
46
+
README.md CHANGED
@@ -1,12 +1,55 @@
1
- ---
2
- title: DeepL
3
- emoji: 🏢
4
- colorFrom: gray
5
- colorTo: green
6
- sdk: streamlit
7
- sdk_version: 1.31.0
8
- app_file: app.py
9
- pinned: false
10
- ---
11
-
12
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ## Deep Prediction Hub
2
+
3
+ Overview
4
+
5
+ Welcome to Deep Prediction Hub, a Streamlit web application that provides two deep learning-based tasks: Sentiment Classification and Tumor Detection.
6
+
7
+ Tasks
8
+
9
+ 1. Sentiment Classification
10
+ This task involves classifying the sentiment of a given text into "Positive" or "Negative". Users can input a review, and the application provides the sentiment classification using various models.
11
+
12
+ 2. Tumor Detection
13
+ In Tumor Detection, users can upload an image, and the application uses a Convolutional Neural Network (CNN) model to determine if a tumor is present or not.
14
+ Getting Started
15
+
16
+ Prerequisites
17
+
18
+ Python 3.6 or higher
19
+ Required packages: streamlit, numpy, cv2, PIL, tensorflow
20
+ Pre-trained models: PP.pkl, BP.pkl, DP.keras, RN.keras, LS.keras, CN.keras
21
+ Trained IMDb word index: Ensure the IMDb word index is available for sentiment classification.
22
+
23
+ Installation
24
+
25
+ Clone the repository: git clone https://github.com/yourusername/deep-prediction-hub.git
26
+
27
+ Usage
28
+
29
+ Access the application by opening the provided URL after running the Streamlit app.
30
+
31
+ Choose between "Sentiment Classification" and "Tumor Detection" tasks.
32
+
33
+ Sentiment Classification
34
+
35
+ Enter a review in the text area.
36
+ Select a model from the dropdown.
37
+ Click "Submit" and then "Classify Sentiment."
38
+
39
+ Tumor Detection
40
+
41
+ Upload an image using the file uploader.
42
+ Click "Detect Tumor" to perform tumor detection.
43
+
44
+ Models
45
+
46
+ Perceptron (PP.pkl): Perceptron-based sentiment classification model.
47
+ Backpropagation (BP.pkl): Backpropagation-based sentiment classification model.
48
+ DNN (DP.keras): Deep Neural Network sentiment classification model.
49
+ RNN (RN.keras): Recurrent Neural Network sentiment classification model.
50
+ LSTM (LS.keras): Long Short-Term Memory sentiment classification model.
51
+ CNN (CN.keras): Convolutional Neural Network tumor detection model.
52
+
53
+ Contributing
54
+
55
+ Feel free to contribute by opening issues or submitting pull requests. Please follow the contribution guidelines.
RN.keras ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:56f772e386a259788dabcb7189fbe4327b3a31924fd0104e9d52c1c626101262
3
+ size 1548448
Streamlit.py ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import numpy as np
3
+ from PIL import Image
4
+ from tensorflow.keras.models import load_model
5
+ from tensorflow.keras.datasets import imdb
6
+ from tensorflow.keras.preprocessing.sequence import pad_sequences
7
+ import pickle
8
+
9
+ # Load word index for Sentiment Classification
10
+ word_to_index = imdb.get_word_index()
11
+
12
+ # Function to perform sentiment classification
13
+ def sentiment_classification(new_review_text, model):
14
+ max_review_length = 500
15
+ new_review_tokens = [word_to_index.get(word, 0) for word in new_review_text.split()]
16
+ new_review_tokens = pad_sequences([new_review_tokens], maxlen=max_review_length)
17
+ prediction = model.predict(new_review_tokens)
18
+ if type(prediction) == list:
19
+ prediction = prediction[0]
20
+ return "Positive" if prediction > 0.5 else "Negative"
21
+
22
+ # Function to perform tumor detection
23
+ def tumor_detection(img, model):
24
+ img = Image.open(img)
25
+ img=img.resize((128,128))
26
+ img=np.array(img)
27
+ input_img = np.expand_dims(img, axis=0)
28
+ res = model.predict(input_img)
29
+ return "Tumor Detected" if res else "No Tumor"
30
+
31
+ # Streamlit App
32
+ st.title("Welcome To DL Predictions")
33
+
34
+ # Choose between tasks
35
+ task = st.radio("Please Select the task you want", ("Sentiment Classification using DL", "Tumor Detection from image"))
36
+
37
+ if task == "Sentiment Classification":
38
+ # Input box for new review
39
+ new_review_text = st.text_area("Enter a New Review:", value="")
40
+ if st.button("Submit") and not new_review_text.strip():
41
+ st.warning("Please enter a review.")
42
+
43
+ if new_review_text.strip():
44
+ st.subheader("Choose the needed Model for Sentiment Classification")
45
+ model_option = st.selectbox("Select Model", ("Perceptron", "Backpropagation", "DNN", "RNN", "LSTM"))
46
+
47
+ # Load models dynamically based on the selected option
48
+ if model_option == "Perceptron":
49
+ with open('PP.pkl', 'rb') as file:
50
+ model = pickle.load(file)
51
+ elif model_option == "Backpropagation":
52
+ with open('BP.pkl', 'rb') as file:
53
+ model = pickle.load(file)
54
+ elif model_option == "DNN":
55
+ model = load_model('DP.keras')
56
+ elif model_option == "RNN":
57
+ model = load_model('RN.keras')
58
+ elif model_option == "LSTM":
59
+ model = load_model('LS.keras')
60
+
61
+ if st.button("Classify Sentiment"):
62
+ result = sentiment_classification(new_review_text, model)
63
+ st.subheader("Sentiment Classification Results")
64
+ st.write(f"**{result}**")
65
+
66
+ elif task == "Tumor Detection":
67
+ st.subheader("Tumor Detection")
68
+ uploaded_file = st.file_uploader("Choose a tumor image...", type=["jpg", "jpeg", "png"])
69
+
70
+ if uploaded_file is not None:
71
+ # Load the tumor detection model
72
+ model = load_model('CN.h5')
73
+ st.image(uploaded_file, caption="Uploaded Image.", use_column_width=False, width=200)
74
+ st.write("")
75
+
76
+ if st.button("Detect Tumor"):
77
+ result = tumor_detection(uploaded_file, model)
78
+ st.subheader("Tumor Detection Results")
79
+ st.write(f"**{result}**")
requirements.txt ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ streamlit
2
+ numpy
3
+ Pillow
4
+ tensorflow
5
+ tqdm