Alexa17 commited on
Commit
5b3ede2
·
verified ·
1 Parent(s): 3de15ab

Delete titanic_rn_2024_2.py

Browse files
Files changed (1) hide show
  1. titanic_rn_2024_2.py +0 -155
titanic_rn_2024_2.py DELETED
@@ -1,155 +0,0 @@
1
- # -*- coding: utf-8 -*-
2
- """Titanic_RN.ipynb
3
-
4
- Automatically generated by Colab.
5
-
6
- Original file is located at
7
- https://colab.research.google.com/drive/1bBTik7AiMvb-_MLAf5rTR0STa8nfJp-d
8
- """
9
-
10
- # importar la libreria
11
- import pandas as pd
12
-
13
- """Crear el dataframe"""
14
-
15
- training = pd.read_csv("titanic-train.csv")
16
- training.head(5)
17
-
18
- """Verificar el cintenido del dataframe training"""
19
-
20
- training.info()
21
-
22
- """Reemplazar los datos del genero a entero
23
- male==> 0 female==> 1
24
- """
25
-
26
- training['Gender'] = training['Gender'].apply(lambda toLabel:0 if toLabel=='male' else 1)
27
-
28
- training.head(5)
29
-
30
- """Completar los valores Nan correspondiente a la edad ( Age ) con el promedio de edades validos"""
31
-
32
- training["Age"].fillna(training["Age"].mean(),inplace=True)
33
-
34
- training.head(5)
35
-
36
- """Verrificar los datos del dataset"""
37
-
38
- training.info()
39
-
40
- """Indentificar los input data y los target data"""
41
-
42
- y_target = training["Survived"].values
43
- print(y_target)
44
-
45
- columns = ["Fare","Pclass","Gender","Age","SibSp"]
46
- x_input = training[list(columns)].values
47
- print(x_input)
48
-
49
- """Definir la estructura de la red nuronal para el aprendizaje"""
50
-
51
- import keras
52
- from keras import layers
53
- from keras import ops
54
-
55
- model = keras.Sequential()
56
- model.add(layers.Dense(16,input_dim=5,activation='relu'))
57
- model.add(layers.Dense(16, activation="relu", name="layer1"))
58
- model.add(layers.Dense(1, activation='sigmoid',name="layer2"))
59
-
60
- """Configurar los parametros de la red"""
61
-
62
- model.compile(loss='binary_crossentropy',optimizer='adam',metrics=['accuracy'])
63
-
64
- """Realizar el proceso de entrenamiento de la red neuronal"""
65
-
66
- model.fit(x_input,y_target,epochs=1000)
67
-
68
- """Evaluar la presicion."""
69
-
70
- score = model.evaluate(x_input,y_target)
71
- print("\n %s: %.2f%%" % (model.metrics_names[1],score[1]*100))
72
-
73
- XX = np.array(x_input[[2]])
74
- print(XX)
75
- respuesta = model.predict(XX)
76
- print(respuesta.round()[0][0])
77
-
78
- y_simulado = []
79
- for i in range(len(y_target)):
80
- y_simulado.append(model.predict(x_input[[i]]).round())
81
- print(y_simulado)
82
-
83
- import numpy as np
84
- respuesta = model.predict(np.array([[83.475, 1, 1, 35, 1,]]))
85
- #respuesta = model.predict(np.array([[ 7.8958, 3.0, 0.0, 29.97086721, 0.0]]))
86
- #respuesta = model.predict(np.array([[82.2667,1,1,23.000000,1]]))
87
- print(respuesta.round()[0][0])
88
-
89
- precision = 0
90
- for i in range(len(y_simulado)):
91
- if y_target[i]==y_simulado[i].round()[0][0]:
92
- precision = precision + 1
93
- precision = precision/float(len(y_simulado))
94
- print(precision)
95
-
96
- y_simulado_lista = []
97
- for i in range(len(y_simulado)):
98
- y_simulado_lista.append(y_simulado[i].round()[0][0])
99
- print(y_simulado_lista)
100
-
101
- import numpy as np
102
- from sklearn.metrics import confusion_matrix
103
- Matriz_de_Confusion = confusion_matrix(y_target, y_simulado_lista)
104
- Matriz_de_Confusion
105
- """sensibilidad"""
106
- #sensibilidad = (Matriz_de_Confusion[0,0])/np.sum(Matriz_de_Confusion[0,0]+ Matriz_de_Confusion[1,0])
107
- VP = Matriz_de_Confusion[0,0]
108
- FN = Matriz_de_Confusion[1,0]
109
- sensibilidad = VP/(VP+FN)
110
- sensibilidad
111
-
112
- """especificidad"""
113
- #especificidad = (Matriz_de_Confusion[1,1])/np.sum(Matriz_de_Confusion[1,1]+ Matriz_de_Confusion[0,1])
114
- VN = Matriz_de_Confusion[1,1]
115
- FP = Matriz_de_Confusion[0,1]
116
- especificidad = VN/(VN+FP)
117
- especificidad
118
-
119
- from sklearn.metrics import confusion_matrix
120
- from matplotlib import pyplot as plt
121
- from sklearn.metrics import classification_report
122
-
123
- conf_mat = confusion_matrix(y_true=y_target, y_pred=y_simulado_lista)
124
- print('Matriz de Confusión - DATOS ORIGINALES:\n', conf_mat)
125
- print('Métricas de Matriz de Confusión - DATOS ORIGINALES:\n',classification_report(y_target,y_simulado_lista))
126
- labels = ['Class 0', 'Class 1']
127
- fig = plt.figure()
128
- ax = fig.add_subplot(111)
129
- cax = ax.matshow(conf_mat, cmap=plt.cm.Blues)
130
- fig.colorbar(cax)
131
- ax.set_xticklabels([''] + labels)
132
- ax.set_yticklabels([''] + labels)
133
- plt.xlabel('Predicted')
134
- plt.ylabel('Expected')
135
- plt.show()
136
-
137
- #Grabando la estructura
138
- model_json = model.to_json()
139
- with open("mimodel.json","w") as json_file:
140
- json_file.write(model_json)
141
-
142
- # Grabando los pesos
143
- model.save_weights("mimodelo.weights.h5")
144
- print("modelo guardado")
145
-
146
- from keras.models import Model,model_from_json
147
- json_file = open("model.json",'r')
148
- loaded_model_json = json_file.read()
149
- json_file.close()
150
- loaded_model = model_from_json(loaded_model_json)
151
- loaded_model.load_weights("mimodelo.weights.h5")
152
- print("modelo cargado en el disco")
153
- loaded_model.compile(loss='binary_crossentropy',optimizer='adam',metrics=['accuracy'])
154
-
155
- print(loaded_model.predict(np.array([[83.475, 1, 1, 35, 1,]])).round()[0][0])