dalmacyali commited on
Commit
1507eb2
·
verified ·
1 Parent(s): 2e3c91b

Update app.R

Browse files
Files changed (1) hide show
  1. app.R +69 -143
app.R CHANGED
@@ -3,90 +3,8 @@ library(shinyjs)
3
  library(bslib)
4
  library(dplyr)
5
  library(ggplot2)
6
- library(tm)
7
- library(SnowballC)
8
- library(plotly)
9
- library(dplyr)
10
- library(tidyr)
11
- library(igraph)
12
- library(ggraph)
13
- library(reshape2)
14
- library(SnowballC)
15
- library(RColorBrewer)
16
- library(syuzhet)
17
- library(cluster)
18
- library(Rtsne)
19
- library(umap)
20
- library(MASS)
21
- library(koRpus)
22
- library(openxlsx)
23
- library(tools)
24
- library(shinyWidgets)
25
  library(readxl)
26
- library(scales)
27
- library(caret)
28
- library(BBmisc)
29
- library(glmnet)
30
- library(pROC)
31
- library(ROCR)
32
- library(car)
33
- library(ResourceSelection)
34
- library(tree)
35
- library(ggplotify)
36
- library(lmtest)
37
- library(gridExtra)
38
- library(patchwork)
39
- library(caret)
40
- library(randomForest)
41
- library(gbm)
42
- library(earth)
43
- library(broom)
44
- library(rlang)
45
- library(ggdendro)
46
- library(pastecs)
47
- library(forecast)
48
- library(scales)
49
- library(caret)
50
- library(BBmisc)
51
- library(glmnet)
52
- library(pROC)
53
- library(ROCR)
54
- library(car)
55
- library(ResourceSelection)
56
- library(tree)
57
- library(ggplotify)
58
- library(lmtest)
59
- library(gridExtra)
60
- library(patchwork)
61
- library(caret)
62
- library(randomForest)
63
- library(gbm)
64
- library(earth)
65
- library(broom)
66
- library(rlang)
67
- library(ggdendro)
68
- library(pastecs)
69
- library(dbscan)
70
- library(fpc)
71
- library(factoextra)
72
- library(scales)
73
- library(openxlsx)
74
- library(arules)
75
- library(arulesViz)
76
- library(viridis)
77
- library(kohonen)
78
- library(purrr)
79
- library(rvest)
80
- library(Rtsne)
81
- library(shinydashboard)
82
- library(DT)
83
- library(DataExplorer)
84
- library(lubridate)
85
- library(readr)
86
  library(htmlwidgets)
87
- library(GGally)
88
- library(keras)
89
- library(tensorflow)
90
  library(neuralnet)
91
  library(rsample)
92
  options(width = 150)
@@ -356,77 +274,85 @@ server <- function(input, output, session) {
356
  })
357
 
358
  ##Multiple Perceptron Model
 
359
  # Reactive expression for data input
360
- # Reactive expression for data input
361
- dataMLP <- reactive({
362
- req(input$mlp_fileInput)
363
- inFile <- input$mlp_fileInput
364
- if (grepl("\\.csv$", inFile$name)) {
365
- read.csv(inFile$datapath, stringsAsFactors = FALSE)
366
- } else if (grepl("\\.(xlsx|xls)$", inFile$name)) {
367
- readxl::read_xlsx(inFile$datapath)
368
- } else {
369
- stop("Unsupported file type")
370
- }
371
- })
372
-
373
- output$mlp_preprocessUI <- renderUI({
374
- req(dataMLP())
375
- varNames <- names(dataMLP())
376
- tagList(
377
- selectInput("mlp_targetVariable", "Select Target Variable", choices = varNames),
378
- selectInput("mlp_variables", "Select Predictor Variables", choices = varNames, multiple = TRUE),
379
- selectInput("mlp_covariate", "Select Covariate Variable", choices = varNames),
380
- tags$p(HTML("Please select Covariate Variable from Predictor Variables"))
381
- )
382
- })
383
-
384
- observeEvent(input$mlp_trainButton, {
385
- req(dataMLP(), input$mlp_targetVariable, input$mlp_variables)
386
- data <- dataMLP()
387
- data <- na.omit(data)
388
-
389
- # Define the formula for the neural network
390
- formula <- as.formula(paste(input$mlp_targetVariable, "~", paste(input$mlp_variables, collapse = "+")))
391
-
392
- # Train the neural network model
393
- nn <- neuralnet(formula, data, hidden = rep(input$mlp_neurons, input$mlp_hiddenLayers), linear.output = FALSE, threshold = 0.01, stepmax = input$mlp_epochs)
394
 
395
- # Plot the neural network
396
- output$mlp_trainingPlot <- renderPlot({
397
- plot(nn, rep = "best")
 
 
 
 
 
 
398
  })
399
 
400
- # Print the result matrix of the neural network
401
- output$mlp_evaluation <- renderPrint({
402
- print(nn$result.matrix)
403
- cat("\nNeural Network Model Performance Summary:\n")
404
 
405
- if (nn$result.matrix["error", ] > 200) {
406
- cat("The model error of", nn$result.matrix["error", ], "is above the expected threshold. This may indicate that the model does not fit the data well. Consider collecting more data, feature engineering, or adjusting the model's complexity.\n")
407
- } else {
408
- cat("The model error of", nn$result.matrix["error", ], "is within the acceptable range, suggesting the model has learned the patterns from the data effectively.\n")
409
- }
410
 
411
- cat("The model took", nn$result.matrix["steps", ], "steps to converge, which indicates ", ifelse(nn$result.matrix["steps", ] < 3000, "an efficient training process.", "that the maximum set steps were reached without sufficient convergence."), "\n")
 
412
 
413
- cat("The model's weights have been optimized through training. Each weight reflects the importance of the corresponding input feature for predicting the output. For instance, the weight for 'Price' to the first hidden neuron is", nn$result.matrix["Price.to.1layhid1", ], ".\n")
 
 
 
414
 
415
- cat("The threshold for stopping the training was set to", nn$result.matrix["reached.threshold", ], ", and the model reached an error close to this threshold, which is a good sign of model convergence.\n")
416
 
417
- cat("This trained model can now be used to make predictions on new data. It's important to validate the model's performance on a separate test set to ensure its predictive accuracy.\n")
418
- })
419
-
420
- output$mlp_gwplot <- renderPlot({
421
- req(input$mlp_covariate)
422
- selected_var <- input$mlp_covariate
423
- if (selected_var %in% colnames(nn$covariate)) {
424
- gwplot(nn, selected.covariate = selected_var, min = -2.5, max = 5)
425
- } else {
426
- cat("Please select a valid predictor variable to view its weight distribution.")
427
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
428
  })
429
- })
430
 
431
 
432
  }
 
3
  library(bslib)
4
  library(dplyr)
5
  library(ggplot2)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  library(readxl)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
  library(htmlwidgets)
 
 
 
8
  library(neuralnet)
9
  library(rsample)
10
  options(width = 150)
 
274
  })
275
 
276
  ##Multiple Perceptron Model
277
+ ##Multiple Perceptron Model
278
  # Reactive expression for data input
279
+ dataMLP <- reactive({
280
+ req(input$mlp_fileInput)
281
+ inFile <- input$mlp_fileInput
282
+ if (grepl("\\.csv$", inFile$name)) {
283
+ read.csv(inFile$datapath, stringsAsFactors = FALSE)
284
+ } else if (grepl("\\.(xlsx|xls)$", inFile$name)) {
285
+ readxl::read_xlsx(inFile$datapath)
286
+ } else {
287
+ stop("Unsupported file type")
288
+ }
289
+ })
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
290
 
291
+ output$mlp_preprocessUI <- renderUI({
292
+ req(dataMLP())
293
+ varNames <- names(dataMLP())
294
+ tagList(
295
+ selectInput("mlp_targetVariable", "Select Target Variable", choices = varNames),
296
+ selectInput("mlp_variables", "Select Predictor Variables", choices = varNames, multiple = TRUE),
297
+ selectInput("mlp_covariate", "Select Covariate Variable", choices = varNames),
298
+ tags$p(HTML("Please select Covariate Variable from Predictor Variables"))
299
+ )
300
  })
301
 
302
+ observeEvent(input$mlp_trainButton, {
303
+ req(dataMLP(), input$mlp_targetVariable, input$mlp_variables)
304
+ data <- dataMLP()
305
+ data <- na.omit(data)
306
 
307
+ # Define the formula for the neural network
308
+ formula <- as.formula(paste(input$mlp_targetVariable, "~", paste(input$mlp_variables, collapse = "+")))
 
 
 
309
 
310
+ # Train the neural network model
311
+ nn <- neuralnet(formula, data, hidden = c(input$mlp_neurons), linear.output = FALSE, threshold = 0.01, stepmax = input$mlp_epochs)
312
 
313
+ # Plot the neural network
314
+ output$mlp_trainingPlot <- renderPlot({
315
+ plot(nn,rep = "best")
316
+ })
317
 
 
318
 
319
+ # Print the result matrix of the neural network
320
+ output$mlp_evaluation <- renderPrint({
321
+ print(nn$result.matrix)
322
+ # Neural Network Model Performance Summary
323
+ cat("\nNeural Network Model Performance Summary:\n")
324
+
325
+ # If the error is not within a reasonable range, you could give more context:
326
+ if (nn$result.matrix["error", ] > 200) {
327
+ cat("The model error of", nn$result.matrix["error", ], "is above the expected threshold. This may indicate that the model does not fit the data well. Consider collecting more data, feature engineering, or adjusting the model's complexity.\n")
328
+ } else {
329
+ cat("The model error of", nn$result.matrix["error", ], "is within the acceptable range, suggesting the model has learned the patterns from the data effectively.\n")
330
+ }
331
+
332
+ # Comment on the number of steps
333
+ cat("The model took", nn$result.matrix["steps", ], "steps to converge, which indicates ", ifelse(nn$result.matrix["steps", ] < 3000, "an efficient training process.", "that the maximum set steps were reached without sufficient convergence."), "\n")
334
+
335
+ # Comment on the weights
336
+ cat("The model's weights have been optimized through training. Each weight reflects the importance of the corresponding input feature for predicting the output. For instance, the weight for 'Price' to the first hidden neuron is", nn$result.matrix["Price.to.1layhid1", ], ".\n")
337
+
338
+ # Mention the threshold
339
+ cat("The threshold for stopping the training was set to", nn$result.matrix["reached.threshold", ], ", and the model reached an error close to this threshold, which is a good sign of model convergence.\n")
340
+
341
+ # Add a note on the usage of the model
342
+ cat("This trained model can now be used to make predictions on new data. It's important to validate the model's performance on a separate test set to ensure its predictive accuracy.\n")
343
+ })
344
+
345
+ output$mlp_gwplot <- renderPlot({
346
+ req(input$mlp_covariate) # Make sure input$mlp_variables is available
347
+ selected_var <- input$mlp_covariate # This should be a vector of selected variable names
348
+ if (length(selected_var) == 1) { # gwplot may only accept a single variable
349
+ gwplot(nn, selected.covariate = selected_var, min = -2.5, max = 5)
350
+ } else {
351
+ cat("Please select a single predictor variable to view its weight distribution.")
352
+ }
353
+ })
354
+
355
  })
 
356
 
357
 
358
  }