code
stringlengths
1
13.8M
Stirling2 = function(n, k){ if(any(n < 0) || any(k < 0)) stop("n and k must be positive integers, n >= 0, k >= 0") nN = length(n) nK = length(k) if(nN > 1 || nK > 1){ if(nN != nK){ grid = expand.grid(n = n, k = k) n = grid$n k = grid$k nN = nK = nrow(grid) } } result = rep(0, nN) for(r in 1:nN){ result[r] = Stirling2C(n[r], k[r]) } return(result) } S2 = function(n, k){Stirling2(n, k)} Bell = function(n){ if(any(n <=0)) stop("n must be greater than or equal to 1") nN = length(n) result = rep(0, nN) for(r in 1:nN) result[r] = sum(Stirling2(n[r], 1:n[r])) return(result) } B = function(n){Bell(n)}
ee = expect_equal EE = expect_error x = container(a = 0, b = "z") ee(replace_at(x, a = 1, b = "a"), container(a = 1, b = "a")) ee(replace_at(x, a = 1:3), container(a = 1:3, b = "z")) ee(replace_at(x, a = 1:2, b = list(2, 3)), container(a = 1:2, b = list(2, 3))) ee(replace_at(x, a = NULL), container(a = NULL, b = "z")) EE(replace_at(x, 1, b = "a"), "all elements must be named") EE(replace_at(x, "x" = 2), "names\\(s\\) not found: 'x'") ee(replace_at(x, "x" = 2, .add = TRUE), c(x, container("x" = 2))) ee(replace_at(x, "x" = 2:4, .add = TRUE), c(x, container("x" = 2:4))) ee(replace_at(x, "x" = 2, "a" = 1, .add = TRUE), container(a = 1, b = "z", x = 2)) ee(replace_at(x, 1, 9), container(a = 9, b = "z")) ee(replace_at(x, 1, NULL), container(a = NULL, b = "z")) ee(replace_at(x, 1, 1:3), container(a = 1:3, b = "z")) ee(replace_at(x, "a", 1:3), container(a = 1:3, b = "z")) ee(replace_at(x, 2, letters[1:3]), container(a = 0, b = letters[1:3])) ee(replace_at(x, "b", letters[1:3]), container(a = 0, b = letters[1:3])) ee(replace_at(x, 1, list(1, 2)), container(a = list(1, 2), b = "z")) ee(replace_at(x, 1:2, 1:2), container(a = 1, b = 2)) ee(replace_at(x, list(1, 2), list(1:2, 3:4)), container(a = 1:2, b = 3:4)) ee(replace_at(x, list("b", 1), 1:2), container(a = 2, b = 1)) ee(replace_at(x, list("b", 1), list(1:3, 4:6)), container(a = 4:6, b = 1:3)) EE(replace_at(x, "x", 1), "names\\(s\\) not found: 'x'") EE(replace_at(x, 1:4, 1:4), "index out of range \\(length = 2\\): 3") EE(replace_at(x, 4:1, 4:1), "index out of range \\(length = 2\\): 4") EE(replace_at(x, 1:2, 1), "length of indices \\(2\\) and values \\(1\\) don't match") EE(replace_at(x, list(1, 2), 1), "length of indices \\(2\\) and values \\(1\\) don't match") EE(replace_at(x, 1:2, as.list(1:3)), "length of indices \\(2\\) and values \\(3\\) don't match") ee(replace_at(x, "x", 1, .add = TRUE), c(x, container(x = 1))) EE(replace_at(x, 1:4, 1:4, .add = TRUE), "index out of range \\(length = 2\\): 3") ee(ref_replace_at(x, 1, 1), container(a = 1, b = "z")) was_changed_by_reference = all.equal(x, container(a = 1, b = "z")) expect_true(was_changed_by_reference) EE(ref_replace_at(x, 1:3, 1:3), "index out of range \\(length = 2\\): 3") x_was_not_touched = all.equal(x, container(a = 1, b = "z")) expect_true(x_was_not_touched) x = container(a = 0, b = "z") EE(ref_replace_at(x, a = 1, x = 1), "names\\(s\\) not found: 'x'") x_was_not_touched = all.equal(x, container(a = 0, b = "z")) expect_true(x_was_not_touched) dit = dict.table(a = 1:2, b = 3:4) ee(replace_at(dit, a = 2:1, b = 4:3), dict.table(a = 2:1, b = 4:3)) ee(replace_at(dit, a = 2:1), dict.table(a = 2:1, b = 3:4)) ee(replace_at(dit, a = NULL), dict.table(b = 3:4)) EE(replace_at(dit, 1, b = 4:3), "all elements must be named") EE(replace_at(dit, "x" = 2), "column\\(s\\) not found: 'x'") ee(replace_at(dit, "x" = 2, .add = TRUE), cbind(dit, dict.table(x = 2))) ee(replace_at(dit, a = 1, "x" = 2, .add = TRUE), dict.table(a = 1, b = 3:4, x = 2)) dit3 = dict.table(a = 1:2, b = 3:4, c = 5:6) ee(replace_at(dit3, a = 0, c = 0), dict.table(a = 0, b = 3:4, c = 0)) ee(replace_at(dit3, a = 0, x = 0, .add = TRUE), dict.table(a = 0, b = 3:4, c = 5:6, x = 0)) dit = dict.table(a = 1:2, b = 3:4) ee(replace_at(dit, "a", 2:1), dict.table(a = 2:1, b = 3:4)) ee(replace_at(dit, 1, 2:1), replace_at(dit, "a", 2:1)) ee(replace_at(dit, 1:2, list(2:1, 4:3)), replace_at(dit, a = 2:1, b = 4:3)) ee(replace_at(dit, list(1, 2), list(2:1, 4:3)), replace_at(dit, a = 2:1, b = 4:3)) ee(replace_at(dit, list("a", 2), list(2:1, 4:3)), replace_at(dit, a = 2:1, b = 4:3)) ee(replace_at(dit, list(1, "b"), list(2:1, 4:3)), replace_at(dit, a = 2:1, b = 4:3)) EE(replace_at(dit, "x", 5:6), "column\\(s\\) not found: 'x'") EE(replace_at(dit, 3, 5:6), "index out of range \\(ncol = 2\\): 3") EE(replace_at(dit, 2:3, list(2:1, 4:3)), "index out of range \\(ncol = 2\\): 3") ee(replace_at(dit, list("a", "x"), list(2:1, 6:5), .add = TRUE), dict.table(a = 2:1, b = 3:4, x = 6:5)) EE(replace_at(dit, 2:3, list(4:3, 5:6), .add = TRUE), "index out of range \\(ncol = 2\\): 3") dit3 = dict.table(a = 1:2, b = 3:4, c = 5:6) ee(replace_at(dit3, c(1, 3), 1:2), dict.table(a = 1, b = 3:4, c = 2)) ee(replace_at(dit3, c("a", "c"), 1:2), dict.table(a = 1, b = 3:4, c = 2)) ee(replace_at(dit3, list(1, "c"), 1:2), dict.table(a = 1, b = 3:4, c = 2)) ee(replace_at(dit3, list("a", 3), 1:2), dict.table(a = 1, b = 3:4, c = 2)) EE(replace_at(dit3, c("a", "x"), 1:2), "column\\(s\\) not found: 'x'") EE(replace_at(dit3, list("a", 4), 1:2), "index out of range \\(ncol = 3\\): 4") ee(replace_at(dit3, c("a", "x"), 1:2, .add = TRUE), dict.table(a = 1, b = 3:4, c = 5:6, x = 2)) dit = dict.table(a = 1:2, b = 3:4) ee(ref_replace_at(dit, "a", 2:1), dict.table(a = 2:1, b = 3:4)) was_changed_by_referene = ee(dit, dict.table(a = 2:1, b = 3:4)) expect_true(was_changed_by_referene) EE(ref_replace_at(dit, a = 0, x = 2:1)) is_unchanged = ee(dit, dict.table(a = 2:1, b = 3:4)) has_partial_operations = !is_unchanged expect_false(has_partial_operations)
echanges <- structure(function ( ps, eco = names(ps[[1:(nlayers(ps)-1)]]), change = names(ps[[(nlayers(ps))]]), sp_dist, eco_range = c(1,100), change_vals = 1:19, sp_dist_range = c(1,1), spread = TRUE, get_unaffected = TRUE, binary_output = FALSE, noDataValue = 0, mc.cores = round(detectCores()*0.6,0), ... ) { if(is.logical(ps)) return(ps) unlink(file.path(tempdir(),'ecochange','change'), recursive = TRUE) if(length(eco_range) > 2){ warning("'eco_range': the vector has length > 2 and only its range will be used") eco_range <- range(eco_range) } isLayer <- 'lyrs'%in%names(list(...)) if(isLayer) isLayer <- is.null(list(...)$'lyrs') if(inherits(ps, getOption('inh'))){ ps. <- ps ps <- rsp2ebv(ps,mc.cores = mc.cores, ...) if(is.null(ps.)) return(ps) if(isLayer) return(ps) } ecopatrn <- gsub("\\d+", "", eco) if(!all(grepl(ecopatrn[1L], ecopatrn))){ stop("Ambiguous layer names: provide arguments 'eco' and 'change'")} reg2rst <- function(exp){ exp. <- paste(exp, collapse = '|') exp <- names(ps)[grepl(exp., names(ps))] rst <- raster::subset(ps,exp) return(rst)} eco <- reg2rst(eco) change <- reg2rst(change) if(dim(change)[3] > 1) stop("'change' must be a single layer") if(!missing(sp_dist)){ sp_dist <- reg2rst(sp_dist)} if(!missing(sp_dist)){ marg. <- c(list(FUN = function(x) msk_sp_(x, sp_dist, tim = sp_dist_range), x = raster::as.list(eco)),marg) eco <- stack(do.call(getOption('fapp'), marg.)) } if(dim(eco)[3] > 1){ print("'eco' has length > 1: matching names of 'eco' with values in 'change_vals'...") change_vals <- nm2yr(eco) } if(!getOption('isWin')){ marg[['mc.cores']] <- mc.cores} if(!spread){ print("'Fast-computing inputs for landscape areas") marg. <- c(list(FUN = function(x,y) msk_0_(x, y, perc=eco_range, tim = c(1, max(change_vals))), x = raster::as.list(eco), y = raster::as.list(change)), marg) w <- do.call(getOption('fapp'), marg.) w <- stack(w) return(w) } marg. <- c(list(FUN = function(x,y) msk_2_(x, change, remnant = get_unaffected, keep = !binary_output, perc = eco_range, tim = c(0,y), noData = noDataValue), x = raster::as.list(eco), y = change_vals), marg) w <- stack(do.call(getOption('fapp'), marg.)) return(w) } , ex=function() { path. <- system.file('amazon.grd',package = 'ecochange') amazon <- suppressWarnings(brick(path.)) suppressWarnings( def <- echanges(amazon, eco = 'TC', change = 'lossyear', eco_range = c(1,80), get_unaffected = TRUE, binary_output = FALSE, mc.cores = 2) ) suppressWarnings( plotebv(def) ) })
library(PAFit) net <- generate_BA(N = 100, multiple_node = 20, m = 1) summary(net) system.time(b <- to_networkDynamic(net)) back <- from_networkDynamic(b)
context("mdd") num_obs <- 10 num_comp <- 2 test_that("mdd C vs. mdd R univariate X univariate Y", { X <- rnorm(num_obs) Y <- rnorm(num_obs) m1 <- EDMeasure::mdd(X, Y, compute = "C", center = "U") m2 <- EDMeasure::mdd(X, Y, compute = "R", center = "U") expect_equal(m1, m2) m3 <- EDMeasure::mdd(X, Y, compute = "C", center = "D") m4 <- EDMeasure::mdd(X, Y, compute = "R", center = "D") expect_equal(m3, m4) }) test_that("mdd C vs. mdd R univariate X multivariate Y", { X <- rnorm(num_obs) Y <- matrix(rnorm(num_obs * num_comp), num_obs, num_comp) m1 <- EDMeasure::mdd(X, Y, compute = "C", center = "U") m2 <- EDMeasure::mdd(X, Y, compute = "R", center = "U") expect_equal(m1, m2) m3 <- EDMeasure::mdd(X, Y, compute = "C", center = "D") m4 <- EDMeasure::mdd(X, Y, compute = "R", center = "D") expect_equal(m3, m4) }) test_that("mdd C vs. mdd R multivariate X univariate Y", { X <- matrix(rnorm(num_obs * num_comp), num_obs, num_comp) Y <- rnorm(num_obs) m1 <- EDMeasure::mdd(X, Y, compute = "C", center = "U") m2 <- EDMeasure::mdd(X, Y, compute = "R", center = "U") expect_equal(m1, m2) m3 <- EDMeasure::mdd(X, Y, compute = "C", center = "D") m4 <- EDMeasure::mdd(X, Y, compute = "R", center = "D") expect_equal(m3, m4) }) test_that("mdd C vs. mdd R multivariate X multivariate Y", { X <- matrix(rnorm(num_obs * num_comp), num_obs, num_comp) Y <- matrix(rnorm(num_obs * num_comp), num_obs, num_comp) m1 <- EDMeasure::mdd(X, Y, compute = "C", center = "U") m2 <- EDMeasure::mdd(X, Y, compute = "R", center = "U") expect_equal(m1, m2) m3 <- EDMeasure::mdd(X, Y, compute = "C", center = "D") m4 <- EDMeasure::mdd(X, Y, compute = "R", center = "D") expect_equal(m3, m4) })
nNonNumChar <- function(txt) {txt <- as.character(txt) sum(!sapply(1:nchar(txt),function(x) substr(txt,x,x) %in% c(".",0:9))) } .extrNumHeadingCap <- function(x){ tmp <- substr(x,regexpr("[[:digit:]]+[[:upper:]]",x),nchar(x)) as.numeric(substr(tmp,0,regexpr("[[:upper:]]",tmp)-1)) } .extrNumHeadingSepChar <- function(x,sep="_"){ tmp <- substr(x,1,attributes(regexpr(paste("[[:digit:]]+",sep,"[[:alpha:]]",sep=""),x))[[1]]-2) out <- as.numeric(tmp) if(!is.null(names(x))) names(out) <- names(x) out } .setLowestTo <- function(x,setTo) { mi <- min(x[which(is.finite(x))]); x[which(x==mi)] <- setTo; x}
fluidPage( fluidRow( box( width = 12, closable = T, enable_label = T, label_text = "New", label_status = "warning", solidHeader = T, status = "warning", title = tagList(icon("bullhorn"), i18n$t("お知らせ")), collapsible = T, collapsed = T, tags$small( paste0( "SIGNATE様のデータ構造が変更するため、新仕様の対応するまで少し時間がかかり、", "データ更新はしばらく中止致します。対応が完了次第アップデータ致します。" ) ) ) ), fluidRow( box( width = 12, closable = F, enable_label = T, collapsible = T, label = boxLabel("Archived", status = "danger"), title = tagList(icon("connectdevelop"), i18n$t("クラスターネットワーク")), footer = tags$small( icon("database"), i18n$t("データ提供:"), tags$a(href = "https://signate.jp/competitions/260/discussions", "SIGNATE - COVID-19 Chanllenge") ), fluidRow( column( width = 8, fluidRow( column( width = 6, pickerInput( inputId = "clusterRegionPicker", label = "", choices = provinceSelector, selected = 1, options = list( `actions-box` = TRUE, size = 10, `deselect-all-text` = i18n$t("クリア"), `select-all-text` = i18n$t("全部"), `selected-text-format` = i18n$t("三件以上選択されました"), `max-options` = 5 ), multiple = T, inline = T ) ), column( width = 4, uiOutput("clusterDateRangeSelector") ) ), fluidRow( column( width = 12, uiOutput("clusterNetworkWrapper") %>% withSpinner(), tags$hr(), tags$li(i18n$t("番号の枠:選択された公表日範囲内の事例。")), tags$li(i18n$t("†マーク:死亡者。")), tags$br(), accordion( accordionItem( id = 1, title = i18n$t("1. クラスターネットワークについて"), tags$small( tags$li( paste0( "当クラスターネットワークは、株式会社SIGNATEが提供したデータセットおよびリンク情報", "(SIGNATE COVID-19 Dataset)をそのまま可視化したものです。", "感染者数が急速に拡大していて、公表されている情報も限られているため、", "クラスターネットワークの正確性の保障は一切ないので、予めご了承ください。", "あくまで参考用です。" ) ), tags$li( "データセット自身の問題で、ある患者は、多数の患者とリンクがあるとしても、クラスターの中心とはいえませんのでご了承ください。" ), ) ), accordionItem( id = 2, title = i18n$t("2. データセットについて"), tags$small( paste0( "本分析に用いたデータセット(SIGNATE COVID-19 Dataset)は、現在、収集途中のものであり、データの正確性を保証するものではありません。", "また、本データセットは基本的に厚労省・自治体等の報道における症例データに基づいて作成されており、", "各種機関が発表している統計データと一致しないことがあります。予めご了承ください。" ) ) ), accordionItem( id = 3, title = i18n$t("3. 更新について"), tags$small( "更新頻度は二三日一回となります。データセットに貢献したい有志はぜひ下記のリンク先でデータの追加や訂正をしてください。", tags$a( href = "https://signate.jp/competitions/260/discussions", icon("external-link-alt"), "SIGNATE - COVID-19 Chanllenge" ) ) ) ) ) ) ), column( width = 4, uiOutput("clusterProfileSearchBox"), uiOutput("profile") ) ) ) ), fluidRow( box( width = 8, title = tagList(icon("project-diagram"), i18n$t("感染経路")), label = boxLabel("Archived", status = "danger"), enable_label = T, collapsible = T, closable = F, fluidRow(column( width = 12, uiOutput("infectedRouteRegionSelector") )), echarts4rOutput("infectedRouteByRegion") %>% withSpinner() ) ) )
tam_mml_mstep_regression <- function( resp, hwt, resp.ind, pweights, pweightsM, Y, theta, theta2, YYinv, ndim, nstud, beta.fixed, variance, Variance.fixed, group, G, snodes=0, thetasamp.density=NULL, nomiss=FALSE, iter=1E9, min.variance=0, userfct.variance=NULL, variance_acceleration=NULL, est.variance=TRUE, beta=NULL, latreg_use=FALSE, gwt=NULL, importance_sampling=FALSE ) { variance.fixed <- Variance.fixed beta_old <- beta variance_old <- variance itemwt <- NULL if ( snodes==0){ hwt_colsums <- colSums(hwt*pweights) if (!latreg_use){ if ( ! nomiss ){ itemwt <- crossprod( hwt, resp.ind * pweightsM ) } if ( nomiss ){ itemwt <- matrix( hwt_colsums, nrow=ncol(hwt), ncol=ncol(resp.ind) ) } } thetabar <- hwt %*% theta sumbeta <- crossprod( Y, thetabar*pweights ) sumsig2 <- as.vector( crossprod( hwt_colsums, theta2 ) ) } if ( snodes > 0 ){ if (importance_sampling){ hwt0 <- hwt / gwt TP <- length(thetasamp.density) tsd <- tam_matrix2( thetasamp.density, nrow=nstud, ncol=TP) rej_prob <- gwt / tsd rnm <- tam_matrix2( stats::runif(TP), nrow=nstud, ncol=TP ) hwt_acc <- 1 * ( rej_prob > rnm ) hwt <- tam_normalize_matrix_rows( hwt0 * tsd * hwt_acc ) } if ( ! latreg_use){ hwt <- hwt / rowSums( hwt ) itemwt <- crossprod( hwt, resp.ind*pweightsM ) } thetabar <- hwt %*% theta sumbeta <- crossprod( Y, thetabar*pweights ) sumsig2 <- as.vector( crossprod( colSums( pweights * hwt ), theta2 ) ) } beta <- YYinv %*% sumbeta sumsig2 <- matrix(sumsig2, nrow=ndim, ncol=ndim) if (G==1){ variance <- (sumsig2- crossprod( sumbeta, beta ) )/nstud } if ( ! is.null( beta.fixed )){ beta[ beta.fixed[,1:2,drop=FALSE] ] <- beta.fixed[,3] beta <- as.matrix( beta, ncol=ndim ) } if ( ! is.null(variance.fixed) ){ variance[ variance.fixed[,1:2,drop=FALSE] ] <- variance.fixed[,3] variance[ variance.fixed[,c(2,1),drop=FALSE] ] <- variance.fixed[,3] } if ( G > 1){ if ( snodes > 0 ){ hwt <- hwt / snodes hwt <- hwt / rowSums( hwt ) } for (gg in 1:G){ ind.gg <- which( group==gg ) thetabar <- hwt[ind.gg,] %*% theta sumbeta <- crossprod( Y[ind.gg,], thetabar*pweights[ind.gg] ) sumsig2 <- colSums((pweights[ind.gg]*hwt[ind.gg,]) %*% theta2) sumsig2 <- matrix(sumsig2,ndim,ndim) variance[ind.gg] <- (sumsig2- crossprod(sumbeta, beta) )/sum(pweights[ind.gg]) } } eps <- 1E-10 if( ndim==1 ){ variance[ variance < min.variance ] <- min.variance } if (G==1){ diag(variance) <- diag(variance) + eps } if ( ! est.variance ){ if ( G==1 ){ variance <- stats::cov2cor(variance) } if ( G > 1 ){ variance[ group==1 ] <- 1 } } if ( ! is.null( userfct.variance ) ){ variance <- do.call( userfct.variance, list(variance) ) } if ( ( iter < 4 ) ){ na_variance <- sum(is.na(variance)) > 0 if (na_variance){ v1 <- paste0("Problems in variance estimation.\n ", "Try to Choose argument control=list( xsi.start0=TRUE, ...) ") stop(v1) } } if ( ! is.null(variance_acceleration) ){ if ( variance_acceleration$acceleration !="none" ){ variance_acceleration <- tam_accelerate_parameters( xsi_acceleration=variance_acceleration, xsi=as.vector(variance), iter=iter, itermin=3) variance <- matrix( variance_acceleration$parm, nrow=nrow(variance), ncol=ncol(variance) ) } } beta_change <- max( abs( beta - beta_old ) ) variance_change <- max( abs( as.vector( variance ) - as.vector( variance_old ) ) ) res <- list( beta=beta, variance=variance, itemwt=itemwt, variance_acceleration=variance_acceleration, beta_change=beta_change, variance_change=variance_change ) return(res) } mstep.regression <- tam_mml_mstep_regression
as_cordf <- function(x, diagonal = NA) { if(inherits(x, "cor_df")){ warning("x is already a correlation data frame.") return(x) } x <- as.data.frame(x) row_name <- x$term x <- x[colnames(x) != "term"] rownames(x) <- row_name if(ncol(x) != nrow(x)) { stop("Input object x is not a square. ", "The number of columns must be equal to the number of rows.") } if (ncol(x) > 1) diag(x) <- diagonal new_cordf(x, names(x)) } new_cordf <- function(x, term = NULL) { if (!is.null(term)) { x <- first_col(x, term) } class(x) <- c("cor_df", class(x)) x } first_col <- function(df, ..., var = "term") { stopifnot(is.data.frame(df)) if (tibble::has_name(df, var)) stop("There is a column named ", var, " already!") new_col <- tibble::tibble(...) names(new_col) <- var new_df <- c(new_col, df) dplyr::as_tibble(new_df) } pair_n <- function(x, y = NULL) { if (is.null(y)) y <- x x <- t(!is.na(x)) %*% (!is.na(y)) class(x) <- c("n_mat", "matrix") x } as_matrix <- function(x, diagonal) { UseMethod("as_matrix") }
clean_names <- function(x, ...) { UseMethod("clean_names") } clean_names.default <- function(x, ...) { if (is.null(x)) { return(x) } cleaned <- unname(find_variables(x, flatten = TRUE, verbose = FALSE)) .remove_values(cleaned, c("1", "0")) } clean_names.character <- function(x, include_names = FALSE, ...) { .clean_names(x = x, include_names = include_names, ...) } .clean_names <- function(x, include_names = FALSE, is_emmeans = FALSE, ...) { if (is.null(x)) { return(x) } out <- sapply(x, function(.x) { .x <- sub("\\[(\\d+):(\\d+)\\]", "", .x) if (grepl(":", .x, fixed = TRUE) && !grepl("::", .x, fixed = TRUE)) { paste(sapply( strsplit(.x, ":", fixed = TRUE), .remove_pattern_from_names, is_emmeans = is_emmeans ), collapse = ":" ) } else { .remove_pattern_from_names(.x, is_emmeans = is_emmeans) } }) if (isTRUE(include_names)) { out } else { unname(out) } } .remove_pattern_from_names <- function(x, ignore_asis = FALSE, ignore_lag = FALSE, is_emmeans = FALSE) { if (.is_empty_string(x)) { return("") } pattern <- c( "as.factor", "as.numeric", "factor", "frailty", "offset", "log1p", "log10", "log2", "log-log", "scale-log", "log", "lag", "diff", "lspline", "pspline", "scale-poly", "poly", "catg", "asis", "matrx", "pol", "strata", "strat", "scale", "scored", "interaction", "sqrt", "sin", "cos", "tan", "acos", "asin", "atan", "atan2", "exp", "lsp", "rcs", "pb", "lo", "bs", "ns", "mSpline", "bSpline", "t2", "te", "ti", "tt", "mi", "mo", "gp", "s", "I" ) if (ignore_lag) { lag_pattern <- which(pattern == "lag") if (length(lag_pattern)) pattern <- pattern[-lag_pattern] } cleaned <- sapply(1:length(x), function(i) { if (isFALSE(is_emmeans) && grepl("^([0-9]+)", x[i])) { x[i] <- gsub("^([0-9]+)[^(\\.|[:alnum:])]+(.*)", "\\2", x[i]) } for (j in 1:length(pattern)) { x[i] <- sub("(.*)::(.*)", "\\2", x[i]) if (pattern[j] == "offset") { x[i] <- .trim(unique(sub("^offset\\(([^-+ )]*).*", "\\1", x[i]))) } else if (pattern[j] == "I") { if (!ignore_asis) x[i] <- .trim(unique(sub("I\\(((\\w|\\.)*).*", "\\1", x[i]))) } else if (pattern[j] == "asis") { if (!ignore_asis) x[i] <- .trim(unique(sub("asis\\(((\\w|\\.)*).*", "\\1", x[i]))) } else if (pattern[j] == "log-log") { x[i] <- .trim(unique(sub("^log\\(log\\(((\\w|\\.)*).*", "\\1", x[i]))) } else if (pattern[j] == "scale-log") { x[i] <- .trim(unique(sub("^scale\\(log\\(((\\w|\\.)*).*", "\\1", x[i]))) x[i] <- .trim(unique(sub("^scale\\(log1p\\(((\\w|\\.)*).*", "\\1", x[i]))) x[i] <- .trim(unique(sub("^scale\\(log2\\(((\\w|\\.)*).*", "\\1", x[i]))) x[i] <- .trim(unique(sub("^scale\\(log10\\(((\\w|\\.)*).*", "\\1", x[i]))) } else if (pattern[j] == "scale-poly") { x[i] <- .trim(unique(sub("^scale\\(poly\\(((\\w|\\.)*).*", "\\1", x[i]))) } else if (pattern[j] %in% c("mmc", "mm")) { p <- paste0("^", pattern[j], "\\((.*)\\).*") g <- .trim(sub(p, "\\1", x[i])) x[i] <- .trim(unlist(strsplit(g, ","))) } else { p <- paste0("^", pattern[j], "\\(((\\w|\\.)*).*") x[i] <- unique(sub(p, "\\1", x[i])) } } .trim(sub("^(.*)\\|(.*)", "\\2", x[i])) }) .remove_values(cleaned, c("1", "0")) } .clean_brms_mm <- function(x) { if (!grepl("^(mmc|mm)\\(", x)) { return(x) } unname(.compact_character(unlist(sapply(c("mmc", "mm"), function(j) { if (grepl(paste0("^", j, "\\("), x = x)) { p <- paste0("^", j, "\\((.*)\\).*") g <- .trim(sub(p, "\\1", x)) .trim(unlist(strsplit(g, ","))) } else { "" } }, simplify = FALSE)))) }
check.err<-function(cov.table, stage, alerts.stack, estimand, ess.ctrl, ess.treat) { if(estimand == "ATT") { ind <- (cov.table$tx.sd < .0001) | (cov.table$std.ef.sz > 500) prob <- cov.table$std.eff.sz[ind] if(length(prob)>0) { sink(alerts.stack, append=TRUE) cat("\n problematic standard deviations in stage ",stage,"\n\n") print(cov.table[which(ind),c("tx.sd","std.eff.sz")]) cat("\n\n\n") sink() } } if(estimand == "ATE") { sd.p = ((ess.treat * cov.table$tx.sd) + (ess.ctrl * cov.table$ct.sd))/(ess.treat + ess.ctrl) ind <- (sd.p < .0001) | (cov.table$std.ef.sz > 500) prob <- cov.table$std.eff.sz[ind] if(length(prob)>0) { sink(alerts.stack, append=TRUE) cat("\n problematic standard deviations in stage ",stage,"\n\n") print(cov.table[which(ind),c("sd.p","std.eff.sz")]) cat("\n\n\n") sink() } } }
enlist <-function(...) { result <- list(...) if((nargs() == 1) & is.character(n <- result[[1]])) { result <- as.list(seq(n)) names(result) <- n for(i in n) result[[i]] <- get(i) } else { junk <- sys.call() n <- NULL for(i in junk[-1]) n <- c(n, deparse(i)) if(!is.null(n2 <- names(result))) { which <- n2 != "" n[which] <- n2[which] } names(result) <- n } result }
diat_disp <- function(resultLoad, maxDistTaxa = 2){ if(missing(resultLoad)) { print("Please run the diat_loadData() function first to enter your species data in the correct format") if (missing(resultLoad)){ stop("Calculations cancelled") } } taxaIn <- resultLoad[[1]] dispDB <- diathor::disp taxaIn$species <- row.names(taxaIn) taxaIn$disp_v <- NA taxaIn$disp_s <- NA print("Calculating DISP index") for (i in 1:nrow(taxaIn)) { if (is.na(taxaIn$disp_s[i]) | is.na(taxaIn$disp_v[i])){ spname <- trimws(tolower(rownames(taxaIn[i,]))) species_found <- dispDB[stringdist::ain(trimws(tolower(dispDB$fullspecies)),spname, maxDist=maxDistTaxa, matchNA = FALSE),] if (nrow(species_found) == 1){ vvalue <- as.numeric(names(which.max(table(species_found$disp_v)))) svalue <- as.numeric(names(which.max(table(species_found$disp_s)))) taxaIn$new_species[i] <- species_found$fullspecies[1] } else if (nrow(species_found) > 1){ species_found <- species_found[match(spname, trimws(tolower(species_found$fullspecies)), nomatch=1),] vvalue <- as.numeric(names(which.max(table(species_found$disp_v)))) svalue <- as.numeric(names(which.max(table(species_found$disp_s)))) } else if (nrow(species_found) == 0){ spsplit <- strsplit(spname, " ") if (length(spsplit[[1]])>1){ newspname <- paste(spsplit[[1]][[1]], spsplit[[1]][[2]], "var.", spsplit[[1]][[length(spsplit[[1]])]], sep = " ") newspname <- c(newspname, paste(spsplit[[1]][[1]], spsplit[[1]][[2]], "fo.", spsplit[[1]][[length(spsplit[[1]])]], sep = " ")) newspname <- c(newspname, paste(spsplit[[1]][[1]], spsplit[[1]][[2]], "subsp.", spsplit[[1]][[length(spsplit[[1]])]], sep = " ")) newspname <- c(newspname, paste(spsplit[[1]][[1]], spsplit[[1]][[2]], "spp.", spsplit[[1]][[length(spsplit[[1]])]], sep = " ")) newspname <- c(newspname, paste(spsplit[[1]][[1]], spsplit[[1]][[2]], "ssp.", spsplit[[1]][[length(spsplit[[1]])]], sep = " ")) newspname <- c(newspname, paste(spsplit[[1]][[1]], spsplit[[1]][[2]], "var.", spsplit[[1]][[2]], "fo.", spsplit[[1]][[length(spsplit[[1]])]], sep = " ")) species_found <- dispDB[stringdist::ain(trimws(tolower(dispDB$fullspecies)),newspname, maxDist=maxDistTaxa, matchNA = FALSE),] if (nrow(species_found) > 0){ vvalue <- as.numeric(names(which.max(table(species_found$disp_v[1])))) svalue <- as.numeric(names(which.max(table(species_found$disp_s[1])))) taxaIn$new_species[i] <- species_found$fullspecies[1] } else { vvalue = NA svalue = NA } } } taxaIn$disp_v[i] <- vvalue taxaIn$disp_s[i] <- svalue } } lastcol <- which(colnames(taxaIn)=="new_species") disp.results <- data.frame(matrix(ncol = 2, nrow = (lastcol-1))) colnames(disp.results) <- c("DISP", "num_taxa") disp_s <- (taxaIn[,"disp_s"]) disp_v <- (taxaIn[,"disp_v"]) number_recognized_taxa <- round((100 - (sum(is.na(taxaIn$disp_s)) / nrow(taxaIn))*100),1) print(paste("Taxa recognized to be used in DISP index: ", number_recognized_taxa, "%")) pb <- txtProgressBar(min = 1, max = (lastcol-1), style = 3) for (sampleNumber in 1:(lastcol-1)){ num_taxa <- length(which(disp_s * taxaIn[,sampleNumber] > 0)) disp_s[is.na(disp_s)] = 0 disp_v[is.na(disp_v)] = 0 DISP <- sum((taxaIn[,sampleNumber]*as.double(disp_s)*as.double(disp_v)))/sum(taxaIn[,sampleNumber]*as.double(disp_v)) disp.results[sampleNumber, ] <- c(DISP, num_taxa) setTxtProgressBar(pb, sampleNumber) } close(pb) resultsPath <- resultLoad[[4]] precisionmatrix <- read.csv(file.path(resultsPath, "num_taxa.csv")) precisionmatrix <- cbind(precisionmatrix, disp.results$num_taxa) precisionmatrix <- precisionmatrix[-(1:which(colnames(precisionmatrix)=="Sample")-1)] names(precisionmatrix)[names(precisionmatrix)=="disp.results$num_taxa"] <- "DISP" write.csv(precisionmatrix, file.path(resultsPath, "num_taxa.csv")) taxaIncluded <- taxaIn$species[which(taxaIn$disp_s > 0)] inclusionmatrix <- read.csv(file.path(resultsPath, "Taxa included.csv")) colnamesInclusionMatrix <- c(colnames(inclusionmatrix), "DISP") newinclusionmatrix <- as.data.frame(matrix(nrow=max(length(taxaIncluded), nrow(inclusionmatrix)), ncol=ncol(inclusionmatrix)+1)) for (i in 1:ncol(inclusionmatrix)){ newinclusionmatrix[1:nrow(inclusionmatrix),i] <- as.character(inclusionmatrix[1:nrow(inclusionmatrix),i]) } if (nrow(newinclusionmatrix) > length(taxaIncluded)){ newinclusionmatrix[1:length(taxaIncluded), ncol(newinclusionmatrix)] <- taxaIncluded } else { newinclusionmatrix[1:nrow(newinclusionmatrix), ncol(newinclusionmatrix)] <- taxaIncluded } inclusionmatrix <- newinclusionmatrix colnames(inclusionmatrix) <- colnamesInclusionMatrix inclusionmatrix <- inclusionmatrix[-(1:which(colnames(inclusionmatrix)=="Eco.Morpho")-1)] write.csv(inclusionmatrix, file.path(resultsPath,"Taxa included.csv")) taxaExcluded <- taxaIn[!('%in%'(taxaIn$species,taxaIncluded)),"species"] exclusionmatrix <- read.csv(file.path(resultsPath, "Taxa excluded.csv")) newexclusionmatrix <- as.data.frame(matrix(nrow=max(length(taxaExcluded), nrow(exclusionmatrix)), ncol=ncol(exclusionmatrix)+1)) for (i in 1:ncol(exclusionmatrix)){ newexclusionmatrix[1:nrow(exclusionmatrix),i] <- as.character(exclusionmatrix[1:nrow(exclusionmatrix),i]) } if (nrow(newexclusionmatrix) > length(taxaExcluded)){ newexclusionmatrix[1:length(taxaExcluded), ncol(newexclusionmatrix)] <- taxaExcluded } else { newexclusionmatrix[1:nrow(newexclusionmatrix), ncol(newexclusionmatrix)] <- taxaExcluded } exclusionmatrix <- newexclusionmatrix colnames(exclusionmatrix) <- colnamesInclusionMatrix exclusionmatrix <- exclusionmatrix[-(1:which(colnames(exclusionmatrix)=="Eco.Morpho")-1)] write.csv(exclusionmatrix, file.path(resultsPath,"Taxa excluded.csv")) rownames(disp.results) <- resultLoad[[3]] return(disp.results) }
seqgen <- function(opts = NULL, rooted.tree = NULL, newick.tree = NULL, input = NULL, temp.file = NULL){ argv <- "seq-gen" if(! is.null(opts)){ if(is.null(temp.file)){ temp.file.seqgen <- tempfile("seqgen.") } else{ temp.file.seqgen <- temp.file } temp.file.ms <- tempfile("ms.") if((! is.null(rooted.tree)) && (class(rooted.tree) == "phylo")){ newick.tree <- write.tree(rooted.tree, digits = 12) } if((!is.null(newick.tree)) && (!is.null(input))){ stop("rooted.tree/newick.tree and input can not work at the same time.") } if(! is.null(newick.tree)){ write(newick.tree, file = temp.file.ms, sep = "") } else if(! is.null(input)){ write(input, file = temp.file.ms, sep = "\n") } else{ stop("A newick or rooted/phylo tree is required.") } argv <- c(argv, unlist(strsplit(opts, " ")), temp.file.ms) .Call("R_seq_gen_main", argv, temp.file.seqgen, PACKAGE = "phyclust") unlink(temp.file.ms) if(is.null(temp.file)){ ret <- readLines(con = temp.file.seqgen, warn = FALSE) ret <- ret[ret != ""] class(ret) <- "seqgen" unlink(temp.file.seqgen) return(ret) } } else{ temp.file.seqgen <- tempfile("seqgen.") argv <- c(argv, "-h") try(.Call("R_seq_gen_main", argv, temp.file.seqgen, PACKAGE = "phyclust"), silent = TRUE) unlink(temp.file.seqgen) } invisible() } print.seqgen <- function(x, ...){ seqgen <- x cat(seqgen, sep = "\n") }
as.data.frame.BibEntry <- function(x, row.names = NULL, optional = FALSE, ...){ col.names <- unique(unlist(fields(x))) n.fields <- length(col.names) n.entries <- length(x) y <- as.data.frame(matrix(nrow = n.entries, ncol = n.fields + 1L), stringsAsFactors = FALSE, row.names = make.unique(names(x), sep = "-")) colnames(y) <- c('bibtype', col.names) y[, 1L] <- unlist(x$bibtype) not.nulls <- 1L for (i in seq_len(n.fields)){ nom <- col.names[i] temp <- do.call(`$.BibEntry`, list(x = x, name = nom)) if (n.entries > 1L) not.nulls <- !vapply(temp, is.null, FALSE) if (nom %in% .BibEntryNameList){ if (n.entries == 1L) temp <- list(temp) temp <- vapply(temp[not.nulls], format_author, "") }else{ temp <- unlist(temp) } y[not.nulls, nom] <- temp } return(y) }
expCounts <- function(tab) { expected <- (rowSums(tab) %*% t(colSums(tab)))/sum(tab) rownames(expected) <- rownames(tab) colnames(expected) <- colnames(tab) return(expected) }
ci.cvAUC <- function(predictions, labels, label.ordering = NULL, folds = NULL, confidence = 0.95) { clean <- .process_input(predictions = predictions, labels = labels, label.ordering = label.ordering, folds = folds, ids = NULL, confidence = confidence) predictions <- clean$predictions labels <- clean$labels pos <- levels(labels[[1]])[[2]] neg <- levels(labels[[1]])[[1]] n_obs <- length(unlist(labels)) w1 <- 1/(sum(unlist(labels) == pos)/n_obs) w0 <- 1/(sum(unlist(labels) == neg)/n_obs) pred <- label <- NULL fracNegLabelsWithSmallerPreds <- fracPosLabelsWithLargerPreds <- icVal <- NULL .IC <- function(fold_preds, fold_labels, pos, neg, w1, w0) { n_rows <- length(fold_labels) n_pos <- sum(fold_labels == pos) n_neg <- n_rows - n_pos auc <- AUC(fold_preds, fold_labels) DT <- data.table(pred = fold_preds, label = fold_labels) DT <- DT[order(pred, -xtfrm(label))] DT[, fracNegLabelsWithSmallerPreds := cumsum(label == neg)/n_neg] DT <- DT[order(-pred, label)] DT[, fracPosLabelsWithLargerPreds := cumsum(label == pos)/n_pos] DT[, icVal := ifelse(label == pos, w1 * (fracNegLabelsWithSmallerPreds - auc), w0 * (fracPosLabelsWithLargerPreds - auc))] return(mean(DT$icVal^2)) } sighat2 <- mean(unlist(mapply(FUN = .IC, fold_preds = predictions, fold_labels = labels, MoreArgs = list(pos = pos, neg = neg, w1 = w1, w0 = w0)))) se <- sqrt(sighat2/n_obs) cvauc <- cvAUC(predictions, labels)$cvAUC z <- qnorm(confidence + (1 - confidence)/2) ci_cvauc <- c(cvauc - (z * se), cvauc + (z * se)) ci_cvauc[1] <- ifelse(ci_cvauc[1] < 0, 0, ci_cvauc[1]) ci_cvauc[2] <- ifelse(ci_cvauc[2] > 1, 1, ci_cvauc[2]) return(list(cvAUC = cvauc, se = se, ci = ci_cvauc, confidence = confidence)) }
rm(list=ls(all=TRUE)) graphics.off() closeAllConnections() library(PEcAn.all) library(PEcAn.SIPNET) library(PEcAn.LINKAGES) library(PEcAn.visualization) library(PEcAn.assim.sequential) library(nimble) library(lubridate) library(PEcAn.visualization) library(rgdal) library(ncdf4) library(purrr) library(listviewer) library(dplyr) library(furrr) library(tictoc) work_dir <- "/data/bmorrison/sda/lai" settings <- read.settings("pecan_MultiSite_SDA_LAI_AGB_sitegroup.xml") if ("sitegroup" %in% names(settings)){ if (is.null(settings$sitegroup$nSite)){ settings <- PEcAn.settings::createSitegroupMultiSettings(settings, sitegroupId = settings$sitegroup$id) } else { settings <- PEcAn.settings::createSitegroupMultiSettings(settings, sitegroupId = settings$sitegroup$id, nSite = settings$sitegroup$nSite) } settings$sitegroup <- NULL } observation <- c() for (i in seq_along(1:length(settings$run))) { command <- paste0("settings$run$settings.",i,"$site$id") obs <- eval(parse(text=command)) observation <- c(observation,obs) } if ("MultiSettings" %in% class(settings)) site.ids <- settings %>% map(~.x[['run']] ) %>% map('site') %>% map('id') %>% unlist() %>% as.character() observations = observation lai_data = data.frame() for (i in 1:5) { start = (1+((i-1)*10)) end = start+9 obs = observations[start:end] working = print(paste("working on: ", i)) sites = print(obs) PEcAn.logger::logger.info("**** Extracting LandTrendr AGB data for model sites ****") bety <- list(user='bety', password='bety', host='localhost', dbname='bety', driver='PostgreSQL',write=TRUE) con <- PEcAn.DB::db.open(bety) bety$con <- con site_ID <- obs suppressWarnings(site_qry <- glue::glue_sql("SELECT *, ST_X(ST_CENTROID(geometry)) AS lon, ST_Y(ST_CENTROID(geometry)) AS lat FROM sites WHERE id IN ({ids*})", ids = site_ID, .con = con)) suppressWarnings(qry_results <- DBI::dbSendQuery(con,site_qry)) suppressWarnings(qry_results <- DBI::dbFetch(qry_results)) site_info <- list(site_id=qry_results$id, site_name=qry_results$sitename, lat=qry_results$lat, lon=qry_results$lon, time_zone=qry_results$time_zone) lai = call_MODIS(outdir = NULL, var = "LAI", site_info = site_info, product_dates = c("1980001", "2018365"), run_parallel = TRUE, ncores = 10, product = "MOD15A2H", band = "Lai_500m", package_method = "MODISTools", QC_filter = TRUE, progress = FALSE) lai_data = rbind(lai_data, lai) } lai_sd = lai_data save(lai_data, file = '/data/bmorrison/sda/lai/50_site_run/lai_data_sites.Rdata') observation = observations PEcAn.logger::logger.info("**** Extracting LandTrendr AGB data for model sites ****") bety <- list(user='bety', password='bety', host='localhost', dbname='bety', driver='PostgreSQL',write=TRUE) con <- PEcAn.DB::db.open(bety) bety$con <- con site_ID <- observation suppressWarnings(site_qry <- glue::glue_sql("SELECT *, ST_X(ST_CENTROID(geometry)) AS lon, ST_Y(ST_CENTROID(geometry)) AS lat FROM sites WHERE id IN ({ids*})", ids = site_ID, .con = con)) suppressWarnings(qry_results <- DBI::dbSendQuery(con,site_qry)) suppressWarnings(qry_results <- DBI::dbFetch(qry_results)) site_info <- list(site_id=qry_results$id, site_name=qry_results$sitename, lat=qry_results$lat, lon=qry_results$lon, time_zone=qry_results$time_zone) names(lai_sd) = c("modis_date", "calendar_date", "band", "tile", "site_id", "lat", "lon", "pixels", "sd", "qc") output = cbind(lai_data, lai_sd$sd) names(output) = c(names(lai_data), "sd") save(output, file = '/data/bmorrison/sda/lai/50_site_run/all_lai_data.Rdata') h output = output[,c(5, 2, 9, 11)] colnames(output) = names(agb_data) data = output peak_lai = data.frame() years = unique(year(as.Date(data$Date, "%Y-%m-%d"))) for (i in seq_along(years)) { d = data[grep(data$Date, pattern = years[i]),] sites = unique(d$Site_ID) for (j in seq_along(sites)) { index = which(d$Site_ID == site_info$site_id[j]) site = d[index,] if (length(index) > 0) { max = site[which(site$Median == max(site$Median[which(site$Median <= quantile(site$Median, probs = 0.95))], na.rm = T))[1],] peak = data.frame(max$Site_ID, Date = paste("Year", years[i], sep = "_"), Median = max$Median, SD = max$SD) peak_lai = rbind(peak_lai, peak) } } } peak_lai$SD[peak_lai$SD < 0.66] = 0.66 names(peak_lai) = c("Site_ID", "Date", "Median", "SD") save(peak_lai, file = '/data/bmorrison/sda/lai/50_site_run/peak_lai_data.Rdata') peak_lai$Site_ID = as.numeric(as.character(peak_lai$Site_ID, stringsAsFactors = F)) peak_lai$Date = as.character(peak_lai$Date, stringsAsFactors = F) observed_vars = c("AbvGrndWood", "LAI") observed_data = merge(agb_data, peak_lai, by = c("Site_ID", "Date"), all = T) names(observed_data) = c("Site_ID", "Date", "med_agb", "sdev_agb", "med_lai", "sdev_lai") observed_data = observed_data[order(observed_data$Date),] dates = sort(unique(observed_data$Date)) obs.mean = data.frame(date = observed_data$Date, site_id = observed_data$Site_ID, med_agb = observed_data$med_agb, med_lai = observed_data$med_lai) obs.mean$date = as.character(obs.mean$date, stringsAsFactors = FALSE) obs.mean = obs.mean %>% split(.$date) date.obs <- strsplit(names(obs.mean), "_") %>% map_chr(~.x[2]) %>% paste0(.,"/07/15") obs.mean = names(obs.mean) %>% map(function(namesl){ obs.mean[[namesl]] %>% split(.$site_id) %>% map(~.x[3:4] %>% setNames(c("AbvGrndWood", "LAI")) %>% `row.names<-`(NULL)) }) %>% setNames(date.obs) names = date.obs for (name in names) { for (site in names(obs.mean[[name]])) { na_index = which(!(is.na(obs.mean[[ name]][[site]]))) colnames = names(obs.mean[[name]][[site]]) if (length(na_index) > 0) { obs.mean[[name]][[site]] = obs.mean[[name]][[site]][na_index] } } } obs.cov = data.frame(date = observed_data$Date, site_id = observed_data$Site_ID, sdev_agb = observed_data$sdev_agb, sdev_lai = observed_data$sdev_lai) obs.cov$date = as.character(obs.cov$date, stringsAsFactors = F) obs.cov = obs.cov %>% split(.$date) obs.cov = names(obs.cov) %>% map(function(namesl){ obs.cov[[namesl]] %>% split(.$site_id) %>% map(~.x[3:4]^2 %>% unlist %>% diag(nrow = 2, ncol = 2) ) }) %>% setNames(date.obs) names = date.obs for (name in names) { for (site in names(obs.cov[[name]])) { bad = which(apply(obs.cov[[name]][[site]], 2, function(x) any(is.na(x))) == TRUE) if (length(bad) > 0) { obs.cov[[name]][[site]] = obs.cov[[name]][[site]][,-bad] if (is.null(dim(obs.cov[[name]][[site]]))) { obs.cov[[name]][[site]] = obs.cov[[name]][[site]][-bad] } else { obs.cov[[name]][[site]] = obs.cov[[name]][[site]][-bad,] } } } } save(obs.mean, file = '/data/bmorrison/sda/lai/50_site_run/obs_mean_50.Rdata') save(obs.cov, file = '/data/bmorrison/sda/lai/50_site_run/obs_cov_50.Rdata')
tam_mml_2pl_sufficient_statistics_item_slope <- function(hwt, theta, cResp, pweights, maxK, nitems, ndim) { thetabar <- hwt %*% theta cB_obs <- crossprod( cResp*pweights, thetabar) B_obs <- aperm(array(cB_obs,dim=c(maxK, nitems,ndim)),c(2,1,3)) res <- list(thetabar=thetabar, cB_obs=cB_obs, B_obs=B_obs) return(res) }
Search4 <- function(MaxPhage,MaxBacteria,new_matrix,phage_names){ PhageSet4=0 BestBacteria4=0 for (i in 1:(MaxPhage-3)){ for(j in (i+1):(MaxPhage-2)){ for(k in (j+1):(MaxPhage-1)){ for(l in (k+1):(MaxPhage)){ BacteriaSet4=0 for (b in 1:MaxBacteria){ if (new_matrix[b,i] | new_matrix[b,j]| new_matrix[b,k]| new_matrix[b,l]){ BacteriaSet4=BacteriaSet4+1 } } if (BacteriaSet4 > BestBacteria4){ PhageSet4=c(i,j,k,l) BestBacteria4=BacteriaSet4 if (BestBacteria4 == MaxBacteria){ return(c(phage_names[PhageSet4],BestBacteria4)) } } } } } } return(c(phage_names[PhageSet4],BestBacteria4)) }
print.summary.tlm <- function(x, ...) { printPreamble(x) print(x$summary, ...) cat("\n") }
set_batch_template <- function(batch_id = "", template_id = "") { captr_CHECKAUTH() if ( is.null(template_id) | identical(template_id, "")) stop("Provide a Valid Template ID.") if ( is.null(batch_id) | identical(batch_id, "")) stop("Provide a Valid Batch ID.") h <- new_handle() handle_setopt(h, customrequest = "PUT") handle_setheaders(h, "Captricity-API-Token" = Sys.getenv('CaptricityToken')) handle_setform(h, documents = as.character(template_id)) tag_con <- curl_fetch_memory(paste0("https://shreddr.captricity.com/api/v1/batch/", batch_id), handle = h) tag <- rawToChar(tag_con$content) status <- ifelse(tag_con$status_code == 200, "Successfully Assigned", "Problem with the request") print(status) tag }
neldermead.istorestart <- function(this=NULL){ status <- optimbase.get(this=this$optbase,key='status') if (status=='maxfuneval'){ istorestart <- FALSE varargout <- list(this=this,istorestart=istorestart) return(varargout) } if (!any(this$restartdetection==c('oneill','kelley'))) stop(sprintf('neldermead.istorestart: Unknown restart detection %s', this$restartdetection), call.=FALSE) if (this$restartdetection=='oneill'){ tmp <- neldermead.isroneill(this=this) this <-tmp$this istorestart <- tmp$istorestart rm(tmp) } if (this$restartdetection=='kelley'){ tmp <- neldermead.isrkelley(this=this) this <- tmp$this istorestart <- tmp$istorestart rm(tmp) } varargout <- list(this=this,istorestart=istorestart) return(varargout) }
library("SPUTNIK") msIm <- msImage(values = matrix(rnorm(200), 40, 50), name = "test", scale = TRUE) msImSmoothed <- smoothImage(msIm, sigma = 5)
stopwords.en <- c("a", "about", "above", "across", "after", "again", "against", "all", "almost", "alone", "along", "already", "also", "although", "always", "am", "among", "an", "and", "another", "any", "anybody", "anyone", "anything", "anywhere", "are", "area", "areas", "aren't", "around", "as", "ask", "asked", "asking", "asks", "at", "away", "b", "back", "backed", "backing", "backs", "be", "became", "because", "become", "becomes", "been", "before", "began", "behind", "being", "beings", "below", "best", "better", "between", "big", "both", "but", "by", "c", "came", "can", "can't", "cannot", "case", "cases", "certain", "certainly", "clear", "clearly", "come", "could", "couldn't", "d", "did", "didn't", "differ", "different", "differently", "do", "does", "doesn't", "doing", "don't", "done", "down", "downed", "downing", "downs", "during", "e", "each", "early", "either", "end", "ended", "ending", "ends", "enough", "even", "evenly", "ever", "every", "everybody", "everyone", "everything", "everywhere", "f", "face", "faces", "fact", "facts", "far", "felt", "few", "find", "finds", "first", "for", "four", "from", "full", "fully", "further", "furthered", "furthering", "furthers", "g", "gave", "general", "generally", "get", "gets", "give", "given", "gives", "go", "going", "good", "goods", "got", "great", "greater", "greatest", "group", "grouped", "grouping", "groups", "h", "had", "hadn't", "has", "hasn't", "have", "haven't", "having", "he", "he'd", "he'll", "he's", "her", "here", "here's", "hers", "herself", "high", "higher", "highest", "him", "himself", "his", "how", "how's", "however", "i", "i'd", "i'll", "i'm", "i've", "if", "important", "in", "interest", "interested", "interesting", "interests", "into", "is", "isn't", "it", "it's", "its", "itself", "j", "just", "k", "keep", "keeps", "kind", "knew", "know", "known", "knows", "l", "large", "largely", "last", "later", "latest", "least", "less", "let", "let's", "lets", "like", "likely", "long", "longer", "longest", "m", "made", "make", "making", "man", "many", "may", "me", "member", "members", "men", "might", "more", "most", "mostly", "mr", "mrs", "much", "must", "mustn't", "my", "myself", "n", "necessary", "need", "needed", "needing", "needs", "never", "new", "newer", "newest", "next", "no", "nobody", "non", "noone", "nor", "not", "nothing", "now", "nowhere", "number", "numbers", "o", "of", "off", "often", "old", "older", "oldest", "on", "once", "one", "only", "open", "opened", "opening", "opens", "or", "order", "ordered", "ordering", "orders", "other", "others", "ought", "our", "ours", "ourselves", "out", "over", "own", "p", "part", "parted", "parting", "parts", "per", "perhaps", "place", "places", "point", "pointed", "pointing", "points", "possible", "present", "presented", "presenting", "presents", "problem", "problems", "put", "puts", "q", "quite", "r", "rather", "really", "right", "room", "rooms", "s", "said", "same", "saw", "say", "says", "second", "seconds", "see", "seem", "seemed", "seeming", "seems", "sees", "several", "shall", "shan't", "she", "she'd", "she'll", "she's", "should", "shouldn't", "show", "showed", "showing", "shows", "side", "sides", "since", "small", "smaller", "smallest", "so", "some", "somebody", "someone", "something", "somewhere", "state", "states", "still", "such", "sure", "t", "take", "taken", "than", "that", "that's", "the", "their", "theirs", "them", "themselves", "then", "there", "there's", "therefore", "these", "they", "they'd", "they'll", "they're", "they've", "thing", "things", "think", "thinks", "this", "those", "though", "thought", "thoughts", "three", "through", "thus", "to", "today", "together", "too", "took", "toward", "turn", "turned", "turning", "turns", "two", "u", "under", "until", "up", "upon", "us", "use", "used", "uses", "v", "very", "w", "want", "wanted", "wanting", "wants", "was", "wasn't", "way", "ways", "we", "we'd", "we'll", "we're", "we've", "well", "wells", "went", "were", "weren't", "what", "what's", "when", "when's", "where", "where's", "whether", "which", "while", "who", "who's", "whole", "whom", "whose", "why", "why's", "will", "with", "within", "without", "won't", "work", "worked", "working", "works", "would", "wouldn't", "x", "y", "year", "years", "yes", "yet", "you", "you'd", "you'll", "you're", "you've", "young", "younger", "youngest", "your", "yours", "yourself", "yourselves", "z") library(RcmdrPlugin.temis) library(tm.plugin.factiva) library(SnowballC) corpus <- Corpus(FactivaSource(system.file("texts", "reut21578-factiva.xml", package="tm.plugin.factiva")), readerControl=list(language="en")) names(corpus) <- make.unique(names(corpus)) corpusVars <- extractMetadata(corpus) corpusVars <- corpusVars[c("Origin", "Date", "United.States", "North.America", "Canada", "Ecuador", "South.America", "Kuwait", "Middle.East", "Indonesia", "Asia", "Bahrain", "Saudi.Arabia", "Qatar", "United.Arab.Emirates", "Argentina")] meta(corpus, "Date") <- corpusVars$Date dtmCorpus <- corpus dtmCorpus <- tm_map(dtmCorpus, content_transformer(tolower)) dtmCorpus <- tm_map(dtmCorpus, content_transformer(function(x) gsub("(['<U+2019>\n<U+202F><U+2009>]|[[:punct:]]|[[:space:]]|[[:cntrl:]])+", " ", x))) customRemoveNumbers <- function(x) gsub("[[:digit:]]+", "", x) dtmCorpus <- tm_map(dtmCorpus, content_transformer(customRemoveNumbers)) dtm <- DocumentTermMatrix(dtmCorpus, control=list(tolower=FALSE, wordLengths=c(2, Inf))) rm(dtmCorpus) dictionary <- data.frame(row.names=colnames(dtm), "Occurrences"=col_sums(dtm), "Stemmed.Term"=wordStem(colnames(dtm), "en"), "Stopword"=ifelse(colnames(dtm) %in% stopwords("en"), "Stopword", ""), stringsAsFactors=FALSE) dtm <- dtm[, !colnames(dtm) %in% stopwords.en] dtm <- rollup(dtm, 2, dictionary[colnames(dtm), 2]) attr(dtm, "dictionary") <- dictionary rm(dictionary) meta(corpus, type="corpus", tag="language") <- attr(dtm, "language") <- "en" meta(corpus, type="corpus", tag="processing") <- attr(dtm, "processing") <- c(lowercase=TRUE, punctuation=TRUE, digits=TRUE, stopwords=TRUE, stemming=TRUE, customStemming=FALSE, twitter=FALSE, removeHashtags=NA, removeNames=NA) corpus dtm specTerms <- specificTerms(dtm, meta(corpus, "Date")[[1]], p=0.1, min.occ=5, n.max=10) attr(specTerms, "title") <- "Specific terms by Date" stopifnot(all.equal(round(c(specTerms[[1]]), 4), c(2.0101, 1.2563, 1.5075, 1.7588, 1.005, 1.2563, 2.5126, 1.7588, NA, 0, 0, 66.6667, 71.4286, 60, 50, 66.6667, 55.5556, 38.4615, 43.75, NA, 0, 0, 0.5794, 0.338, 0.4829, 0.676, 0.2897, 0.4346, 1.2554, 0.7726, NA, 0.9174, 1.0623, 8, 5, 6, 7, 4, 5, 10, 7, NA, 0, 0, 12, 7, 10, 14, 6, 9, 26, 16, NA, 19, 22, 3.3408, 2.6673, 2.5714, 2.3631, 2.1825, 2.1365, 2.1162, 2.0293, NA, -2.1201, -2.3698, 0.0004, 0.0038, 0.0051, 0.0091, 0.0145, 0.0163, 0.0172, 0.0212, NA, 0.017, 0.0089), check.attributes=FALSE)) dissDtm <- rollup(dtm, 1, meta(corpus, "Date")) diss <- dist(sweep(dissDtm/row_sums(dissDtm), 2, sqrt(sum(dissDtm)/col_sums(dissDtm)), "*")) rm(dissDtm) attr(diss, "title") <- "Date by Date dissimilarity table" diss
alg.VU=function(data,k=2,eps=1e-8,it.max=100,B=30) { numobs=nrow(data) p=ncol(data) qq=matrix(0,k,p) VV=0 VV.temp=NULL QQ=array(0,c(numobs,k,p)) VV[1]=Inf for (hh in 1:B) {theta=stats::runif(p) for (j in 1:p) for (i in 1:k) {qq[i,j]=stats::quantile(data[,j],prob=(i-1)/(k-1)*0.5+theta[j]/2) QQ[,i,j]=(theta[j]+(1-2*theta[j])*(data[,j] < matrix(qq[i,j],numobs)))*abs(data[,j]-matrix(qq[i,j],numobs))-log(theta[j]*(1-theta[j]))} cl=apply(apply(QQ,c(1,2),sum),1,which.min) conta=0 for (j in 1:p) conta=conta+sum(QQ[cbind(seq_along(cl),cl,j)]) VV.temp=conta if (VV.temp<VV[1]) { VV[1]=VV.temp cl.true=cl theta.true=theta} } cl=cl.true theta=theta.true ratio=5 h=1 while ((ratio>eps) & (h<it.max)) { h=h+1 nk=table(cl) if (length(nk)<k) nk=table(factor(cl,levels=1:k)) for (j in 1:p) for (i in 1:k) {if (nk[i]>0) qq[i,j]=stats::quantile(data[cl==i,j],theta[j]) QQ[,i,j]=(theta[j]+(1-2*theta[j])*(data[,j] < matrix(qq[i,j],numobs)))*abs(data[,j]-matrix(qq[i,j],numobs))-log(theta[j]*(1-theta[j]))} for (j in 1:p) theta[j]=stats::optim(theta[j],fn.vu,method="L-BFGS-B",lower=0.0001,upper=0.999,data=data[,j,drop=FALSE],k=k,cl=cl,qq=qq[,j,drop=FALSE])$par cl=apply(apply(QQ,c(1,2),sum),1,which.min) conta=0 for (j in 1:p) conta=conta+sum(QQ[cbind(seq_along(cl),cl,j)]) VV[h]=conta ratio=(VV[h-1]-VV[h])/VV[h-1] if (h<5) ratio=2*eps } names(theta)<-colnames(qq)<-colnames(data) return(list(Vseq=VV,V=VV[h],cl=cl,qq=qq,theta=theta)) } fn.vu=function(theta,data,k,cl,qq) {VV=0 p=ncol(data) for (i in 1:k) if (sum(cl==i)>0) { nn=sum(cl==i) xx=data[cl==i,,drop=FALSE] a=rowSums((theta+((1-2*theta)*(xx<t(matrix(qq[i,],p,nn)))))*abs(xx-t(matrix(qq[i,],p,nn)))) VV=VV +sum(a) } numobs=length(cl) VV=VV-numobs*log(theta*(1-theta)) return(VV) }
splitAt <- function(x, pos) unname(split(x, cumsum(seq_along(x) %in% pos))) setup_opencl <- function(objects, intents, queues, kernel_maps = NULL){ assert_is_character(objects) assert_is_character(intents) assert_is_list(queues) assert_are_same_length(objects, intents) assert_are_same_length(objects, queues) assert_all_are_true(objects %in% c('gpuVector', 'vclVector', 'gpuMatrix', 'vclMatrix', 'scalar')) assert_all_are_true(intents %in% c("IN", "OUT", "INOUT")) if(is.null(kernel_maps) & is.null(names(objects))){ stop("Either 'objects' must have names corresponding to kernel arguments or kernel_maps must be defined") } out <- vector("list", length = length(objects)) mappings <- if(is.null(kernel_maps)) names(objects) else kernel_maps for(o in seq_along(objects)){ out[[o]] <- c(objects[o], intents[o], paste0(queues[[o]], collapse = ","), mappings[[o]]) } out <- do.call('rbind', out) dimnames(out) <- NULL colnames(out) <- c("object", "intents", "queues", "map") out <- as.data.frame(out, stringsAsFactors = FALSE) return(out) } custom_opencl <- function(kernel, cl_args, type){ assert_is_character(type) if(!type %in% c("integer", "float", "double")){ stop("type not recognized") } type <- if(type == "integer") "int" else type ocl_shell <- system.file("src", package = "gpuR") ocl_file <- paste0(tempfile(), '.cpp') tryCatch({ invisible( file.copy(paste0(ocl_shell, '/base_custom_opencl.cpp'), ocl_file) ) }, warning = function(w) { warning(w) stop("copying the base file failed, see warning message below") }) myfile <- readLines(ocl_file) input_args <- sapply(1:nrow(cl_args), function(x) { id <- cl_args[x,"map"] suffix <- if(x == nrow(cl_args)) "_" else "_," if(cl_args[x,"object"] != "scalar"){ paste0("SEXP ptr", id, suffix) }else{ paste0("SEXP ", id, suffix) } }) input_objs <- sapply(input_args, function(x){ unlist(lapply(strsplit(x, " "), function(y) { if(any(grepl(",", y) & !grepl("ptr", y))){ end <- nchar(y[[length(y)]])-2 }else{ end <- nchar(y[[length(y)]])-1 } substr(y[[length(y)]], 0, end) } )) }, USE.NAMES = FALSE) cl_arg <- cl_args[cl_args$object %in% c("gpuMatrix", "gpuVector", "vclMatrix", "vclVector"),] context_index_line <- paste('const int ctx_id = as<int>(s4', cl_arg[1,"map"], '.slot(".context_index")) - 1;', sep = "") s4_lines <- sapply(1:nrow(cl_arg), function(x) { id <- cl_args[x,"map"] paste0("Rcpp::S4 s4", id, "(ptr", id, "_);") }) id <- cl_args[1,"map"] context_lines <- c(paste0('viennacl::ocl::context ctx = vcl_', id, '->handle().opencl_handle().context();'), paste0('cl_context my_context = vcl_', id, '->handle().opencl_handle().context().handle().get();'), paste0('cl_device_id my_device = vcl_', id, '->handle().opencl_handle().context().devices()[0].id();'), paste0('cl_command_queue queue = vcl_', id, '->handle().opencl_handle().context().get_queue().handle().get();') ) import_lines <- sapply(1:nrow(cl_args), function(x) { switch(cl_args[x,"object"], "gpuVector" = { id <- cl_args[x,"map"] paste0("std::shared_ptr<viennacl::vector_base<", type, "> > vcl_", id, " = getVCLVecptr<", type, ">(s4", id, '.slot("address"), false, ctx_id);') }, "gpuMatrix" = { id <- cl_args[x,"map"] paste0("std::shared_ptr<viennacl::matrix<", type, "> > vcl_", id, " = getVCLptr<", type, ">(s4", id, '.slot("address"), false, ctx_id);') }, "vclVector" = { id <- cl_args[x,"map"] paste0("std::shared_ptr<viennacl::vector_base<", type, "> > vcl_", id, " = getVCLVecptr<", type, ">(s4", id, '.slot("address"), true, ctx_id);') }, "vclMatrix" = { id <- cl_args[x,"map"] paste0("std::shared_ptr<viennacl::matrix<", type, "> > vcl_", id, " = getVCLptr<", type, ">(s4", id, '.slot("address"), true, ctx_id);') }, "scalar" = { id <- cl_args[x,"map"] paste0(type, " ", id, "= as<", type, ">(", id, "_ ", ");") } ) }) import_dims <- lapply(1:nrow(cl_args), function(x){ switch(cl_args[x,"object"], "gpuVector" = { id <- cl_args[x,"map"] paste0("unsigned int ", id, "_size = vcl_", id, "->size();") }, "gpuMatrix" = { id <- cl_args[x,"map"] r <- paste0("unsigned int ", id, "_size1 = vcl_", id, "->size1();") ri <- paste0("unsigned int ", id, "_internal_size1 = vcl_", id, "->internal_size1();") c <- paste0("unsigned int ", id, "_size2 = vcl_", id, "->size2();") ci <- paste0("unsigned int ", id, "_internal_size2 = vcl_", id, "->internal_size2();") return(c(r, ri, c, ci)) }, "vclVector" = { id <- cl_args[x,"map"] paste0("unsigned int ", id, "_size = vcl_", id, "->size();") }, "vclMatrix" = { id <- cl_args[x,"map"] r <- paste0("unsigned int ", id, "_size1 = vcl_", id, "->size1();") ri <- paste0("unsigned int ", id, "_internal_size1 = vcl_", id, "->internal_size1();") c <- paste0("unsigned int ", id, "_size2 = vcl_", id, "->size2();") ci <- paste0("unsigned int ", id, "_internal_size2 = vcl_", id, "->internal_size2();") return(c(r, ri, c, ci)) }) }) dim_objs <- sapply(import_dims, function(x){ if(!is.null(x)){ unlist(lapply(strsplit(x, " "), function(y) y[[3]])) } }) src <- readLines(kernel, file.info(kernel)$size) src_quoted <- sprintf('"%s\\\\n"', src) src <- src[sapply(src, function(x) length(grep("^\\s*$", x)) == 0)] e <- new.env() e$flag <- FALSE src<- src[sapply(src, function(x){ if(grepl("__kernel", x)){ e$flag <- TRUE return(TRUE) }else{ if(e$flag){ return(e$flag) }else{ e$flag <- FALSE return(e$flag) } } })] rm(e) src <- src[!sapply(src, function(x) grepl(" kernels <- splitAt(src, which(sapply(src, function(x) grepl("\\_\\_kernel", x, perl = TRUE), USE.NAMES = FALSE))) knames <- sapply(kernels, function(x) { gsub("\\(.*", "", unlist(strsplit(x[1], " "))[3]) }, USE.NAMES = FALSE) if(any(!cl_args[,"queues"] %in% knames)){ mismatches <- unique(cl_args[!cl_args[,"queues"] %in% knames,"queues"]) stop(paste0(paste0("queues: ", paste(mismatches, collapse=", ")), " defined in 'setup_opencl' not found in kernel file: ", kernel)) } knames_line <- paste0('Rcpp::StringVector kernel_name("',paste(knames, collapse = '","'), '");') kernel_args <- sapply(kernels, function(x){ unlist(strsplit(paste(x, collapse = ' '), '[()]'))[2] }) import_kernels <- sapply(knames, function(x) { paste0("viennacl::ocl::kernel & ", x, ' = my_prog.get_kernel("', x, '");\n') }, USE.NAMES = FALSE) globals <- sapply(kernels, function(x) { sum(sapply(regmatches(x, gregexpr("get_global_id", x)), length)) }, USE.NAMES = FALSE) import_global <- sapply(seq_along(globals), function(x) { kname <- knames[x] kdim <- as.character(globals[x]) switch(kdim, "0" = {paste0(kname, ".global_work_size(0, 1);")}, "1" = { objects_in_kernel <- cl_args[which(sapply(cl_args[, "queues"], function(q) grepl(kname, q))),] if(any(grepl("Vector", objects_in_kernel[, "object"]))){ vecs <- paste0(objects_in_kernel[grepl("Vector", objects_in_kernel[,"object"]), "map"], "_size") }else{ vecs <- NULL } if(any(grepl("Matrix", objects_in_kernel[, "object"]))){ mats <- paste0(objects_in_kernel[grepl("Matrix", objects_in_kernel[,"object"]), "map"], "_internal_size1") }else{ mats <- NULL } paste0(kname, ".global_work_size(0, roundUp(std::max({", paste(c(vecs, mats), collapse = ","), "}), max_local_size[0]));") }, "2" = { objects_in_kernel <- cl_args[which(sapply(cl_args[, "queues"], function(q) grepl(kname, q))),] if(any(grepl("Vector", objects_in_kernel[, "object"]))){ vecs <- paste0(objects_in_kernel[grepl("Vector", objects_in_kernel[,"object"]), "map"], "_size") }else{ vecs <- NULL } if(any(grepl("Matrix", objects_in_kernel[, "object"]))){ mats1 <- paste0(objects_in_kernel[grepl("Matrix", objects_in_kernel[,"object"]), "map"], "_internal_size1") mats2 <- paste0(objects_in_kernel[grepl("Matrix", objects_in_kernel[,"object"]), "map"], "_internal_size2") }else{ mats1 <- NULL mats2 <- NULL } c(paste0(kname, ".global_work_size(0, roundUp(std::max({", paste(c(vecs, mats1), collapse = ","), "}), sqrt(max_local_size[0])));"), paste0(kname, ".global_work_size(1, roundUp(std::max({", paste(c(vecs, mats2), collapse = ","), "}), sqrt(max_local_size[0])));")) }, "3" = stop("3 dimensional not yet implemented"), stop("unrecognized dimension") ) }, USE.NAMES = FALSE) import_local <- sapply(seq_along(globals), function(x) { kname <- knames[x] kdim <- as.character(globals[x]) switch(kdim, "0" = {paste0(kname, ".local_work_size(0, 1);")}, "1" = { objects_in_kernel <- cl_args[which(sapply(cl_args[, "queues"], function(q) grepl(kname, q))),] if(any(grepl("Vector", objects_in_kernel[, "object"]))){ vecs <- paste0(objects_in_kernel[grepl("Vector", objects_in_kernel[,"object"]), "map"], "_size") }else{ vecs <- NULL } if(any(grepl("Matrix", objects_in_kernel[, "object"]))){ mats <- paste0(objects_in_kernel[grepl("Matrix", objects_in_kernel[,"object"]), "map"], "internal_size1") }else{ mats <- NULL } paste0(kname, ".local_work_size(0, max_local_size[0]);") }, "2" = { objects_in_kernel <- cl_args[which(sapply(cl_args[, "queues"], function(q) grepl(kname, q))),] if(any(grepl("Vector", objects_in_kernel[, "object"]))){ vecs <- paste0(objects_in_kernel[grepl("Vector", objects_in_kernel[,"object"]), "map"], "_size") }else{ vecs <- NULL } if(any(grepl("Matrix", objects_in_kernel[, "object"]))){ mats1 <- paste0(objects_in_kernel[grepl("Matrix", objects_in_kernel[,"object"]), "map"], "internal_size1") mats2 <- paste0(objects_in_kernel[grepl("Matrix", objects_in_kernel[,"object"]), "map"], "internal_size2") }else{ mats1 <- NULL mats2 <- NULL } c(paste0(kname, ".local_work_size(0, sqrt(max_local_size[0]));"), paste0(kname, ".local_work_size(1, sqrt(max_local_size[0]));")) }, "3" = stop("3 dimensional not yet implemented"), stop("unrecognized dimension") ) }, USE.NAMES = FALSE) k_args <- lapply(kernel_args, function(x) unlist(strsplit(x, ","))) k_args <- lapply(k_args, function(x) { unlist(lapply(strsplit(x, "\\bdouble|\\bfloat|\\bint"), function(y){ gsub(" ", "", y[[length(y)]]) })) }) ptr_args <- lapply(k_args, function(x) x[grepl('\\*', x)]) if(!all(cl_arg[,"map"] %in% gsub("\\*", "", unlist(ptr_args)))){ stop("Not all OpenCL buffers from kernel are mapped") } non_cl_objs <- lapply(k_args, function(x) x[!grepl('\\*', x)]) arg_checks <- unlist(lapply(non_cl_objs, function(x) any(!x %in% c(dim_objs, input_objs)))) if(any(arg_checks)){ stop("Non OpenCL buffer kernel arguments don't match to initialized objects.") } enqueue_lines <- sapply(seq_along(knames), function(k){ objects_in_kernel <- cl_args[which(sapply(cl_args[, "queues"], function(q) grepl(knames[k], q))),] cpp_objs <- paste0("*vcl_", objects_in_kernel[objects_in_kernel[,"object"] != "scalar","map"]) cl_objs <- paste0("*", objects_in_kernel[objects_in_kernel[,"object"] != "scalar","map"]) cl_objs <- c(cl_objs, objects_in_kernel[objects_in_kernel[,"object"] == "scalar","map"]) internal_cpp_objs <- non_cl_objs[[k]] my_vcl_objs <- c(cpp_objs, internal_cpp_objs) my_objs <- c(cl_objs, internal_cpp_objs) paste0("viennacl::ocl::enqueue(", knames[k], "(", paste( paste(my_vcl_objs[match(k_args[[k]], my_objs)], collapse = ","), sep = ","), "));") }) if(any(cl_args[,"object"] %in% c("gpuVector", "gpuMatrix"))){ gpu_objs <- cl_args[cl_args$object %in% c("gpuVector", "gpuMatrix"),] if(any(cl_args[,"intents"] %in% c("INOUT", "OUT"))){ tmp <- gpu_objs[gpu_objs[,"intents"] %in% c("INOUT", "OUT"),] frame <- "if(!OBJECTisVCL){ Rcpp::XPtr<dynEigenMat<T> > ptrOBJECT(ptrOBJECT_); // copy device data back to CPU ptrOBJECT->to_host(*vcl_OBJECT); ptrOBJECT->release_device(); }" frame <- gsub("<T>", paste0("<", type, ">"), frame) out_lines <- vector("list", length = nrow(tmp)) for(i in 1:nrow(tmp)){ out_lines[[i]] <- gsub("OBJECT", tmp[i,"map"], frame) } out_lines[[2]] <- out_lines[[1]] out_lines <- do.call("paste", list(out_lines, collapse = "\n\n")) } }else{ out_lines <- NULL } myfile[grepl("CPP_NAME", myfile)] <- gsub("CPP_NAME", basename(file_path_sans_ext(kernel)), myfile[grepl("CPP_NAME", myfile)]) myfile[grepl("MY_ARGS", myfile)] <- gsub("MY_ARGS", paste(input_args, collapse="\n"), myfile[grepl("MY_ARGS", myfile)]) myfile[grepl("MY_KERNEL_NAMES", myfile)] <- gsub("MY_KERNEL_NAMES", knames_line, myfile[grepl("MY_KERNEL_NAMES", myfile)]) myfile[grepl("MY_S4", myfile)] <- gsub("MY_S4", paste(s4_lines, collapse="\n"), myfile[grepl("MY_S4", myfile)]) myfile[grepl("MY_CTX_ID", myfile)] <- gsub("MY_CTX_ID", context_index_line, myfile[grepl("MY_CTX_ID", myfile)]) myfile[grepl("MY_CONTEXT", myfile)] <- gsub("MY_CONTEXT", paste(context_lines, collapse="\n"), myfile[grepl("MY_CONTEXT", myfile)]) myfile[grepl("MY_KERNEL_SRC", myfile)] <- gsub("MY_KERNEL_SRC", paste(src_quoted, collapse="\n"), myfile[grepl("MY_KERNEL_SRC", myfile)]) myfile[grepl("MY_DEFINES", myfile)] <- gsub("MY_DEFINES", paste(import_lines, collapse = "\n"), myfile[grepl("MY_DEFINES", myfile)]) myfile[grepl("MY_DIMS", myfile)] <- gsub("MY_DIMS", paste(unlist(import_dims), collapse = "\n"), myfile[grepl("MY_DIMS", myfile)]) myfile[grepl("MY_KERNELS", myfile)] <- gsub("MY_KERNELS", paste(import_kernels, collapse = "\n"), myfile[grepl("MY_KERNELS", myfile)]) myfile[grepl("MY_GLOBALS", myfile)] <- gsub("MY_GLOBALS", paste(import_global, collapse = "\n"), myfile[grepl("MY_GLOBALS", myfile)]) myfile[grepl("MY_LOCALS", myfile)] <- gsub("MY_LOCALS", paste(import_local, collapse = "\n"), myfile[grepl("MY_LOCALS", myfile)]) myfile[grepl("MY_QUEUES", myfile)] <- gsub("MY_QUEUES", paste(enqueue_lines, collapse = "\n"), myfile[grepl("MY_QUEUES", myfile)]) myfile[grepl("MY_OUT", myfile)] <- gsub("MY_OUT", if(is.null(out_lines)) "" else out_lines, myfile[grepl("MY_OUT", myfile)]) writeLines(myfile, ocl_file) os <- Sys.info()['sysname'] pkg_inc <- system.file("include", package = "gpuR") switch(os, "Windows" = { arch <- if(R.Version()[["arch"]] == "x86_64") "x64" else "i386" LIBS <- "-LPATH/loader/ARCH -lOpenCL -Wl,-rpath,PATH/loader/ARCH" LIBS <- gsub("PATH", paste('"', pkg_inc, '"', sep = ""), LIBS) LIBS <- gsub("ARCH", arch, LIBS) Sys.setenv(PKG_LIBS=LIBS) }, "Darwin" = { Sys.setenv(PKG_LIBS="-framework OpenCL") }, { Sys.setenv(PKG_LIBS="-lOpenCL") }) sourceCpp(ocl_file) }
print.epSVD <- function (x,...) { res.epSVD <- x if (!inherits(res.epSVD, "epSVD")) stop ("no convenient data") cat("**Results for SVD**\n") cat ("The SVD was performed on ", nrow(res.epSVD$p), "individuals, described by", nrow(res.epSVD$q), "variables\n of rank", res.epSVD$rank) cat("\n*The results are available in the following objects:\n\n") res <- array("", c(7, 2), list(1:7, c("name", "description"))) res[1,] <- c("$p","Left singular vectors.") res[2,] <- c("$Dv","Singular values (in a vector).") res[3,] <- c("$Dd","Singular values (in diagonal matrix).") res[4,] <- c("$q", "Right singular vectors.") res[5,] <- c("$ng", "Number of singular values/vectors.") res[6,] <- c("$rank", "Rank of decomposed matrix. If rank is 1, 0s are padded to singular values and vectors.") res[7,] <- c("$tau", "Explained variance per component.") print(res) }
library(methods) library(hamcrest) assertFalse(isGeneric("names")) molecule <- setClass("Molecule", slots = c( name = "character", content = "character", size = "numeric")) a = new("Molecule", name = "Water", content = "H2O", size = 100) setMethod("names", signature(x = "Molecule"), function(x) x@name) assertThat(names(a), identicalTo("Water")) assertTrue(isGeneric("names"))
.onAttach <- function(lib, pkg) { packageStartupMessage(paste("R Package to solve regression problems while imposing", "\t an L1 constraint on the parameters. Based on S-plus Release 2.1", "Copyright (C) 1998, 1999", "Justin Lokhorst <[email protected]>", "Berwin A. Turlach <[email protected]>", "Bill Venables <[email protected]>\n", "Copyright (C) 2002", "Martin Maechler <[email protected]>", sep="\n")) }
moranI <- function(x, w, scaled = FALSE, R = 999) { if ( !scaled ) w <- w / Rfast::rowsums(w) y <- w %*% x if (R > 1) { b <- Rfast::permcor(y, x, R = R) res <- c( b[1] * Rfast::Var(y, std = TRUE) / Rfast::Var(x, std = TRUE), b[2] ) } else res <- c( cor(y, x,) * Rfast::Var(y, std = TRUE) / Rfast::Var(x, std = TRUE), NA ) names(res) <- c("Moran's I", "permutation p-value") res }
library(pcalg) suppressWarnings(RNGversion("3.5.0")) set.seed(123) myDAG <- randomDAG(20, 0.3) mcov <- trueCov(myDAG) amat <- t(as(myDAG,"matrix")) amat[which(amat!=0)] <- 1 graphEst <- dag2cpdag(myDAG) amat.cpdag <- t(as(graphEst,"matrix")) stopifnot(sort(optAdjSet(amat,2,6))==c()) stopifnot(pcalg:::isAmenable(amat.cpdag,5,16,type="cpdag")) stopifnot(sort(optAdjSet(amat.cpdag,5,16))==c(1,2,3,4,6,7,8,11,13)) stopifnot(optAdjSet(amat.cpdag,5,16)==optAdjSet(amat,5,16)) stopifnot(pcalg:::isAmenable(amat.cpdag,2,12,type="cpdag")) stopifnot(sort(optAdjSet(amat.cpdag,2,12))==c(5,7)) stopifnot(optAdjSet(amat.cpdag,2,12)==optAdjSet(amat,2,12)) stopifnot(optAdjSet(amat.cpdag,2,c(3,7,12))==optAdjSet(amat,2,12))
grpreg.nb = function(y, X, X.test, groups, nb.size=1, penalty=c("gLASSO","gSCAD","gMCP"), weights, taper, nlambda=100, lambda, max.iter=10000, tol=1e-4) { penalty = match.arg(penalty) group.numbers = as.numeric(groups) G = length(unique(group.numbers)) n = dim(X)[1] J = dim(X)[2] if(J > n) { stop("For group-regularized negative binomial regression, we require the total number of covariates to be less than or equal to sample size. Consider reducing the number of covariates.") } X = as.matrix(X) if(missing(X.test)) X.test = X n.test = dim(X.test)[1] if(missing(taper)){ if(penalty=="gSCAD") taper=4 if(penalty=="gMCP") taper=3 } if(missing(weights)){ weights = rep(0, G) for(g in 1:G){ weights[g] = sqrt(as.vector(table(group.numbers))[g]) } } if(length(y) != dim(X)[1]) stop("Non-conformable dimensions of y and X.") if(dim(X.test)[2] != J) stop("X and X.test should have the same number of columns.") if(penalty=="gSCAD"){ if(taper<=2) stop("The taper parameter must be greater than 2 for the group SCAD penalty.") } if(penalty=="gMCP"){ if(taper<=1) stop("The taper parameter must be greater than 1 for the group MCP penalty.") } if (nb.size<=0) stop("Size parameter for negative binomial density must be strictly positive.") if(any(y<0)) stop("All counts y must be greater than or equal to zero.") if(!all(y==floor(y))) stop("All counts y must be whole numbers.") if(!missing(weights)){ if(!all(weights>=0)) stop("All group-specific weights should be nonnegative.") } if(nlambda < 1) stop("The number of lambdas must be at least one.") if(!missing(lambda)) { nlambda = length(lambda) if (!all(lambda>0)) stop("All lambdas should be strictly positive.") } if(missing(lambda)) { max.lambda = 1 eps = .05 if(nlambda==1){ lambda = max.lambda*eps } else if(nlambda > 1) { lambda = rep(0, nlambda) lambda[1] = max.lambda lambda[nlambda] = max.lambda*eps if(nlambda >= 3){ for(l in 2:(nlambda-1)){ loglambda = log(lambda[1])-(l-1)*((log(lambda[1])-log(lambda[nlambda]))/(nlambda-1)) lambda[l] = exp(loglambda) } } } } glm.mod = stats::glm(y~X, family=MASS::negative.binomial(nb.size)) nb.MLE = glm.mod$coefficients MLE.hessian = stats::vcov(glm.mod, type="hessian") sqrt.Sigma.inv = pracma::sqrtm(solve(MLE.hessian))$B y.check = sqrt.Sigma.inv %*% nb.MLE groups.MLE = group.numbers + 1 groups.MLE = c(1, groups.MLE) weights.MLE = c(0, weights) if(penalty=="gLASSO"){ if(nlambda > 1){ nb.group = grpreg::grpreg(X=sqrt.Sigma.inv, y=y.check, group=groups.MLE, penalty="grLasso", nlambda=nlambda, lambda=lambda, eps=tol, max.iter=max.iter, group.multiplier = weights.MLE) } else if (nlambda == 1){ nb.group = grpreg::grpreg(X=sqrt.Sigma.inv, y=y.check, group=groups.MLE, penalty="grLasso", lambda=lambda, eps=tol, max.iter=max.iter, group.multiplier = weights.MLE) } } else if(penalty=="gSCAD"){ if(nlambda > 1){ nb.group = grpreg::grpreg(X=sqrt.Sigma.inv, y=y.check, group=groups.MLE, penalty="grSCAD", nlambda=nlambda, lambda=lambda, eps=tol, max.iter=max.iter, gamma=taper, group.multiplier=weights.MLE) } else if (nlambda == 1){ nb.group = grpreg::grpreg(X=sqrt.Sigma.inv, y=y.check, group=groups.MLE, penalty="grSCAD", lambda=lambda, eps=tol, max.iter=max.iter, gamma=taper, group.multiplier = weights.MLE) } } else if(penalty=="gMCP"){ if(nlambda > 1){ nb.group = grpreg::grpreg(X=sqrt.Sigma.inv, y=y.check, group=groups.MLE, penalty="grMCP", nlambda=nlambda, lambda=lambda, eps=tol, max.iter=max.iter, gamma=taper, group.multiplier = weights.MLE) } else if (nlambda == 1){ nb.group = grpreg::grpreg(X=sqrt.Sigma.inv, y=y.check, group=groups.MLE, penalty="grMCP", lambda=lambda, eps=tol, max.iter=max.iter, gamma=taper, group.multiplier = weights.MLE) } } beta0 = nb.group$beta[2,] beta = as.matrix(nb.group$beta[-c(1,2),]) colnames(beta) = NULL rownames(beta) = groups lambda = lambda L = length(lambda) loss.ind = matrix(0, n, L) if(L > 1){ for(l in 1:L){ for(m in 1:n){ eta.pred.ind = t(as.matrix(X[m,])) %*% beta[,l] + beta0[l] mu.pred.ind = exp(eta.pred.ind) loss.ind[m,l] = -lgamma(nb.size+y[m])+lgamma(nb.size)+lgamma(1+y[m])-y[m]*log(nb.size/(nb.size+mu.pred.ind))-nb.size*log(nb.size/(nb.size+mu.pred.ind)) } } loss = colSums(loss.ind) } else if(L==1){ for(m in 1:n){ eta.pred.ind = t(as.matrix(X[m,])) %*% beta + beta0 mu.pred.ind = exp(eta.pred.ind) loss.ind[m,1] = -lgamma(nb.size+y[m])+lgamma(nb.size)+lgamma(1+y[m])-y[m]*log(nb.size/(nb.size+mu.pred.ind))-nb.size*log(nb.size/(nb.size+mu.pred.ind)) } loss = sum(loss.ind) } mu.pred = matrix(0, n.test, L) if(L>1){ for(l in 1:nlambda){ mu.pred[,l] = exp(rep(beta0[l],dim(X.test)[1])+X.test%*%beta[,l]) } } else if(L==1){ mu.pred[,1] = exp(rep(beta0,dim(X.test)[1])+X.test%*%beta) } classifications = matrix(0, nrow=G, ncol=L) if(L > 1){ for(l in 1:L){ for (g in 1:G) { active = which(group.numbers == g) if(!identical(as.numeric(beta[active,l]), rep(0,length(active)))) classifications[g,l] = 1 } } } else if(L==1){ for (g in 1:G) { active = which(group.numbers == g) if(!identical(as.numeric(beta[active]), rep(0,length(active)))) classifications[g,1] = 1 } } row.names(classifications) = unique(groups) grpreg.nb.output <- list(lambda=lambda, beta0=beta0, beta=beta, mu.pred=mu.pred, classifications = classifications, loss = loss) return(grpreg.nb.output) }
sort_itemnames <- function(x, order = "idnm") { x[order_itemnames(x, order)] } order_itemnames <- function(x, order = "idnm") { din <- decompose_itemnames(x) order( din[, pmatch(substr(order, 1, 1), names(din))], din[, pmatch(substr(order, 2, 2), names(din))], din[, pmatch(substr(order, 3, 3), names(din))], din[, pmatch(substr(order, 4, 4), names(din))] ) }
observ_normal <- function (x, q, covmat ) { k = ncol(covmat) Omega = covmat n = ncol(x) q = matrix(q,k,1) aux1 = t(q %*% matrix(1,1,n)- x) Omega_inv = solve(Omega) ck = 1/sqrt((2*pi)^k*det(Omega)) tpm = rowSums(((aux1) %*% Omega_inv) * (aux1)) odf = ck * exp(-tpm/2) odf = (as.vector(odf, mode="numeric")) odf = cbind((odf)) dimnames ( odf) = NULL return (odf) }
segmentDistances<-function(d, sites, surveys=NULL, distance.type ="directed-segment", add = TRUE, verbose=FALSE) { distance.type <- match.arg(distance.type, c("directed-segment", "Hausdorff", "PPA")) if(length(sites)!=nrow(as.matrix(d))) stop("'sites' needs to be of length equal to the number of rows/columns in d") if(!is.null(surveys)) if(length(sites)!=length(surveys)) stop("'sites' and 'surveys' need to be of the same length") siteIDs = unique(sites) nsite = length(siteIDs) nsurveysite<-numeric(nsite) for(i in 1:nsite) { nsurveysite[i] = sum(sites==siteIDs[i]) } if(sum(nsurveysite==1)>0) stop("All sites need to be surveyed at least twice") dmat = as.matrix(d) n = nrow(dmat) nseg = sum(nsurveysite)-nsite segnames = character(nseg) cnt=1 for(i in 1:nsite) { if(!is.null(surveys)) { surv = surveys[sites==siteIDs[i]] surv = sort(surv) } else surv = 1:nsurveysite[i] for(j in 1:(nsurveysite[i]-1)) { segnames[cnt] = paste0(siteIDs[i],"[",surv[j],"-",surv[j+1],"]") cnt = cnt+1 } } dsegmat = matrix(0, nseg, nseg) rownames(dsegmat) =segnames colnames(dsegmat) =segnames dinisegmat = dsegmat dfinsegmat = dsegmat dinifinsegmat = dsegmat os1 = 1 if(verbose) { cat("\nCalculating segment distances...\n") tb = txtProgressBar(1, nsite, style=3) } for(i1 in 1:nsite) { if(verbose) setTxtProgressBar(tb, i1) ind_surv1 = which(sites==siteIDs[i1]) if(!is.null(surveys)) ind_surv1 = ind_surv1[order(surveys[sites==siteIDs[i1]])] for(s1 in 1:(nsurveysite[i1]-1)) { os2 = 1 for(i2 in 1:nsite) { ind_surv2 = which(sites==siteIDs[i2]) if(!is.null(surveys)) ind_surv2 = ind_surv2[order(surveys[sites==siteIDs[i2]])] for(s2 in 1:(nsurveysite[i2]-1)) { ind12 = c(ind_surv1[s1],ind_surv1[s1+1],ind_surv2[s2],ind_surv2[s2+1]) dmat12 = dmat[c(ind_surv1[s1],ind_surv1[s1+1],ind_surv2[s2],ind_surv2[s2+1]), c(ind_surv1[s1],ind_surv1[s1+1],ind_surv2[s2],ind_surv2[s2+1])] dsegmat[os1,os2] <- .twoSegmentDistanceC(dmat12, type=distance.type, add) dsegmat[os2,os1] <- dsegmat[os1,os2] dinisegmat[os2,os1] <- dinisegmat[os1,os2]<-dmat[ind_surv1[s1],ind_surv2[s2]] dfinsegmat[os2,os1] <- dfinsegmat[os1,os2]<-dmat[ind_surv1[s1+1],ind_surv2[s2+1]] dinifinsegmat[os1,os2] <- dmat[ind_surv1[s1],ind_surv2[s2+1]] dinifinsegmat[os2,os1] <- dmat[ind_surv1[s1+1],ind_surv2[s2]] os2 = os2+1 } } os1 = os1+1 } } return(list(Dseg = as.dist(dsegmat), Dini=as.dist(dinisegmat), Dfin = as.dist(dfinsegmat), Dinifin=dinifinsegmat)) } trajectoryDistances<-function(d, sites, surveys=NULL, distance.type="DSPD", symmetrization = "mean" , add=TRUE, verbose=FALSE) { distance.type <- match.arg(distance.type, c("DSPD", "SPD", "Hausdorff")) if(length(sites)!=nrow(as.matrix(d))) stop("'sites' needs to be of length equal to the number of rows/columns in d") if(!is.null(surveys)) if(length(sites)!=length(surveys)) stop("'sites' and 'surveys' need to be of the same length") siteIDs = unique(sites) nsite = length(siteIDs) nsurveysite<-numeric(nsite) for(i in 1:nsite) nsurveysite[i] = sum(sites==siteIDs[i]) if(sum(nsurveysite==1)>0) stop("All sites need to be surveyed at least twice") n = nrow(as.matrix(d)) nseg = sum(nsurveysite)-nsite dtraj = matrix(0, nrow=nsite, ncol = nsite) rownames(dtraj) = siteIDs colnames(dtraj) = siteIDs if(distance.type=="DSPD"){ lsd = segmentDistances(d,sites, surveys,distance.type="directed-segment", add, verbose) dsegmat = as.matrix(lsd$Dseg) if(verbose) { cat("\nCalculating trajectory distances...\n") tb = txtProgressBar(1, nsite, style=3) } for(i1 in 1:nsite) { if(verbose) setTxtProgressBar(tb, i1) for(i2 in 1:nsite) { dt12 = 0 for(s1 in 1:(nsurveysite[i1]-1)) { dt12ivec = numeric(0) iseg1 = sum(nsurveysite[1:i1]-1)-(nsurveysite[i1]-1)+s1 for(s2 in 1:(nsurveysite[i2]-1)) { iseg2 = sum(nsurveysite[1:i2]-1)-(nsurveysite[i2]-1)+s2 dt12ivec = c(dt12ivec, dsegmat[iseg1, iseg2]) } dt12 = dt12 + min(dt12ivec) } dt12 = dt12/(nsurveysite[i1]-1) dt21 = 0 for(s2 in 1:(nsurveysite[i2]-1)) { dt21ivec = numeric(0) iseg2 = sum(nsurveysite[1:i2]-1)-(nsurveysite[i2]-1)+s2 for(s1 in 1:(nsurveysite[i1]-1)) { iseg1 = sum(nsurveysite[1:i1]-1)-(nsurveysite[i1]-1)+s1 dt21ivec = c(dt21ivec, dsegmat[iseg1, iseg2]) } dt21 = dt21 + min(dt21ivec) } dt21 = dt21/(nsurveysite[i2]-1) if(!is.null(symmetrization)) { dtraj[i1,i2] = do.call(symmetrization, list(c(dt12,dt21))) dtraj[i2,i1] = dtraj[i1,i2] } else { dtraj[i1,i2] = dt12 dtraj[i2,i1] = dt21 } } } } else if(distance.type=="SPD") { dmat = as.matrix(d) for(i1 in 1:nsite) { ind_surv1 = which(sites==siteIDs[i1]) if(!is.null(surveys)) ind_surv1 = ind_surv1[order(surveys[sites==siteIDs[i1]])] for(i2 in 1:nsite) { ind_surv2 = which(sites==siteIDs[i2]) if(!is.null(surveys)) ind_surv2 = ind_surv2[order(surveys[sites==siteIDs[i2]])] dt12 = 0 for(p1 in 1:nsurveysite[i1]) { dt12ivec = numeric(0) ip1 = ind_surv1[p1] for(s2 in 1:(nsurveysite[i2]-1)) { ipi2 = ind_surv2[s2] ipe2 = ind_surv2[s2+1] dt12ivec = c(dt12ivec, .distanceToSegmentC(dmat[ipi2,ipe2], dmat[ip1, ipi2], dmat[ip1,ipe2], add)[3]) } dt12 = dt12 + min(dt12ivec) } dt12 = dt12/nsurveysite[i1] dt21 = 0 for(p2 in 1:nsurveysite[i2]) { dt21ivec = numeric(0) ip2 = ind_surv2[p2] for(s1 in 1:(nsurveysite[i1]-1)) { ipi1 = ind_surv1[s1] ipe1 = ind_surv1[s1+1] dt21ivec = c(dt21ivec, .distanceToSegmentC(dmat[ipi1,ipe1], dmat[ip2, ipi1], dmat[ip2,ipe1], add)[3]) } dt21 = dt21 + min(dt21ivec) } dt21 = dt21/nsurveysite[i2] if(!is.null(symmetrization)) { dtraj[i1,i2] = (dt12+dt21)/2 dtraj[i2,i1] = dtraj[i1,i2] } else { dtraj[i1,i2] = dt12 dtraj[i2,i1] = dt21 } } } } else if(distance.type=="Hausdorff") { dmat = as.matrix(d) for(i1 in 1:nsite) { ind_surv1 = which(sites==siteIDs[i1]) if(!is.null(surveys)) ind_surv1 = ind_surv1[order(surveys[sites==siteIDs[i1]])] for(i2 in 1:nsite) { ind_surv2 = which(sites==siteIDs[i2]) if(!is.null(surveys)) ind_surv2 = ind_surv2[order(surveys[sites==siteIDs[i2]])] dt12 = 0 dt12vec = numeric(0) for(p1 in 1:nsurveysite[i1]) { ip1 = ind_surv1[p1] for(s2 in 1:(nsurveysite[i2]-1)) { ipi2 = ind_surv2[s2] ipe2 = ind_surv2[s2+1] dt12vec = c(dt12vec, .distanceToSegmentC(dmat[ipi2,ipe2], dmat[ip1, ipi2], dmat[ip1,ipe2], add)[3]) } } dt12 = max(dt12vec) dt21 = 0 dt21vec = numeric(0) for(p2 in 1:nsurveysite[i2]) { ip2 = ind_surv2[p2] for(s1 in 1:(nsurveysite[i1]-1)) { ipi1 = ind_surv1[s1] ipe1 = ind_surv1[s1+1] dt21vec = c(dt21vec, .distanceToSegmentC(dmat[ipi1,ipe1], dmat[ip2, ipi1], dmat[ip2,ipe1], add)[3]) } } dt21 = max(dt21vec) dtraj[i1,i2] = max(dt12, dt21) dtraj[i2,i1] = dtraj[i1,i2] } } } else stop("Wrong distance type") if(!is.null(symmetrization)) return(as.dist(dtraj)) return(dtraj) } trajectoryLengths<-function(d, sites, surveys=NULL, relativeToInitial = FALSE, all=FALSE, verbose= FALSE) { if(length(sites)!=nrow(as.matrix(d))) stop("'sites' needs to be of length equal to the number of rows/columns in d") if(!is.null(surveys)) if(length(sites)!=length(surveys)) stop("'sites' and 'surveys' need to be of the same length") siteIDs = unique(sites) nsite = length(siteIDs) surveyIDs<-unique(surveys) nsurvey<-length(surveyIDs) nsurveysite<-numeric(nsite) for(i in 1:nsite) { nsurveysite[i] = sum(sites==siteIDs[i]) } if(sum(nsurveysite==1)>0) stop("All sites need to be surveyed at least twice") dmat = as.matrix(d) n = nrow(dmat) maxnsurveys = max(nsurveysite) if(!all) { lengths = as.data.frame(matrix(NA, nrow=nsite, ncol=maxnsurveys)) row.names(lengths)<-siteIDs if(relativeToInitial) names(lengths)<-c(paste0("Lt1_t",as.character(2:(maxnsurveys))),"Trajectory") else names(lengths)<-c(paste0("S",as.character(1:(maxnsurveys-1))),"Trajectory") if(verbose) { cat("\nCalculating trajectory lengths...\n") tb = txtProgressBar(1, nsite, style=3) } for(i1 in 1:nsite) { if(verbose) setTxtProgressBar(tb, i1) ind_surv1 = which(sites==siteIDs[i1]) if(!is.null(surveys)) ind_surv1 = ind_surv1[order(surveys[sites==siteIDs[i1]])] for(s1 in 1:(nsurveysite[i1]-1)) { if(relativeToInitial) lengths[i1,s1] = dmat[ind_surv1[1], ind_surv1[s1+1]] else lengths[i1,s1] = dmat[ind_surv1[s1], ind_surv1[s1+1]] } lengths[i1, maxnsurveys] = sum(lengths[i1,], na.rm=T) } } else{ if(nsite==1) { vectord<-as.vector(d) lengths<-as.data.frame(matrix(NA, nrow=nsite, ncol=((nsurvey*(nsurvey-1))/2))) lengths[1,]<-vectord listsurvey<-c(1:nsurvey) tsurvey<-c() for (i in 1: nsurvey){ tsurvey<-c(tsurvey,paste0("Lt",listsurvey[i])) } comb<-combn(tsurvey, 2) tsurvey<-c(paste0(comb[1,], comb[2,])) colnames(lengths)<-c(tsurvey) rownames(lengths)<-c(siteIDs) }else{ seqline<-c(nsurvey-1, nsurvey, nsurvey) for(i in 1:(nsite-1)){ seqline<-c(seqline, seqline[length(seqline)-2]+nsurvey,seqline[length(seqline)-1]+nsurvey,seqline[length(seqline)-1]+nsurvey) } seqcolumn<-c(1,1,2) for(i in 1:(nsite-1)){ seqcolumn<-c(seqcolumn, seqcolumn[length(seqcolumn)-2]+nsurvey,seqcolumn[length(seqcolumn)-2]+nsurvey,seqcolumn[length(seqcolumn)]+nsurvey) } alllengths<-c() for(i in 1:(length(seqline))){ alllengths<-c(alllengths, dmat[seqline[i], seqcolumn[i]]) } lengths<-as.data.frame(matrix(NA, nrow=nsite, ncol=((nsurvey*(nsurvey-1))/2))) seqlength<-c(seq(0,length(alllengths),length(alllengths)/nsite)) for(i in 1:nsite){ lengths[i,]<-alllengths[(seqlength[i]+1):(seqlength[i+1])] } listsurvey<-c(1:nsurvey) tsurvey<-c() for (i in 1: nsurvey){ tsurvey<-c(tsurvey,paste0("Lt",listsurvey[i])) } comb<-combn(tsurvey, 2) tsurvey<-c(paste0(comb[1,], comb[2,])) colnames(lengths)<-c(tsurvey) rownames(lengths)<-c(siteIDs) }} return(lengths) } trajectoryLengths2D<-function(xy,sites,surveys, relativeToInitial=FALSE, all=FALSE, verbose = FALSE) { xy_temp<-as.data.frame(xy) xy_temp$sites<-sites xy_temp$surveys<-surveys xy_temp<-xy_temp[order(xy_temp$sites,xy_temp$surveys),] xy<-xy_temp[,1:2] sites<-c(xy_temp$sites) surveys<-c(xy_temp$surveys) siteIDs = unique(sites) surveyIDs<-unique(surveys) nsite<-length(siteIDs) nsurvey<-length(surveyIDs) if(nsite!=nrow(xy)/nsurvey) stop("'sites' needs to be of length equal in xy") if(nrow(xy)!=nsurvey*nsite) stop("All sites need to be surveyed at least twice") D<-dist(xy) return(trajectoryLengths(D,sites,surveys,relativeToInitial = relativeToInitial, all = all, verbose = verbose)) } trajectoryAngles<-function(d, sites, surveys=NULL, all = FALSE, relativeToInitial = FALSE, stats = TRUE, add=TRUE, verbose= FALSE) { if(length(sites)!=nrow(as.matrix(d))) stop("'sites' needs to be of length equal to the number of rows/columns in d") if(!is.null(surveys)) if(length(sites)!=length(surveys)) stop("'sites' and 'surveys' need to be of the same length") siteIDs = unique(sites) nsite = length(siteIDs) nsurveysite<-numeric(nsite) for(i in 1:nsite) { nsurveysite[i] = sum(sites==siteIDs[i]) } if(sum(nsurveysite==1)>0) stop("All sites need to be surveyed at least twice") dmat = as.matrix(d) n = nrow(dmat) maxnsurveys = max(nsurveysite) if(!all) angles = matrix(NA, nrow=nsite, ncol=maxnsurveys+1) else { angles = matrix(NA, nrow=nsite, ncol=choose(maxnsurveys,3)+3) } if(verbose) { cat("\nCalculating trajectory angles...\n") tb = txtProgressBar(1, nsite, style=3) } for(i1 in 1:nsite) { if(verbose) setTxtProgressBar(tb, i1) ind_surv1 = which(sites==siteIDs[i1]) if(!is.null(surveys)) ind_surv1 = ind_surv1[order(surveys[sites==siteIDs[i1]])] if(!all) { for(s1 in 1:(nsurveysite[i1]-2)) { if(relativeToInitial) { d12 = dmat[ind_surv1[1], ind_surv1[s1 + 1]] d23 = dmat[ind_surv1[s1 + 1], ind_surv1[s1 +2]] d13 = dmat[ind_surv1[1], ind_surv1[s1 + 2]] } else { d12 = dmat[ind_surv1[s1], ind_surv1[s1+1]] d23 = dmat[ind_surv1[s1+1], ind_surv1[s1+2]] d13 = dmat[ind_surv1[s1], ind_surv1[s1+2]] } angles[i1, s1] = .angleConsecutiveC(d12,d23,d13, add) } x <- circular::circular(angles[i1,1:(nsurveysite[i1]-2)], units="degrees") angles[i1, ncol(angles)-2] = circular::mean.circular(x, na.rm=T) angles[i1, ncol(angles)-1] = circular::sd.circular(x, na.rm=T) angles[i1, ncol(angles)] = circular::rho.circular(x, na.rm=T) } else { cs = combn(length(ind_surv1),3) dsub = dmat[ind_surv1, ind_surv1] for(s in 1:ncol(cs)) { d12 = dsub[cs[1,s],cs[2,s]] d23 = dsub[cs[2,s],cs[3,s]] d13 = dsub[cs[1,s],cs[3,s]] angles[i1, s] = .angleConsecutiveC(d12,d23,d13, add) } x <- circular::circular(angles[i1,], units="degrees") angles[i1, ncol(angles)-2] = circular::mean.circular(x, na.rm=T) angles[i1, ncol(angles)-1] = circular::sd.circular(x, na.rm=T) angles[i1, ncol(angles)] = circular::rho.circular(x, na.rm=T) } } angles = as.data.frame(angles) row.names(angles)<-siteIDs if(!all) { if(relativeToInitial) { names(angles) <- c(paste0("t", rep("1",maxnsurveys -2), "-S", as.character(2:(maxnsurveys - 1))), "mean", "sd", "rho") } else { names(angles)<-c(paste0("S",as.character(1:(maxnsurveys-2)),"-S", as.character(2:(maxnsurveys-1))), "mean", "sd", "rho") } } else { names(angles)<-c(paste0("A",as.character(1:(ncol(angles)-3))),"mean", "sd", "rho") } if(!stats) angles = angles[,1:(ncol(angles)-3), drop=FALSE] return(angles) } trajectoryAngles2D<-function(xy,sites,surveys,relativeToInitial=FALSE, betweenSegments=TRUE) { xy_temp<-as.data.frame(xy) xy_temp$sites<-sites xy_temp$surveys<-surveys xy_temp<-xy_temp[order(xy_temp$sites,xy_temp$surveys),] xy<-xy_temp[,1:2] sites<-c(xy_temp$sites) surveys<-c(xy_temp$surveys) siteIDs<-unique(sites) surveyIDs<-unique(surveys) nsite<-length(siteIDs) nsurvey<-length(surveyIDs) nsurveysite<-numeric(nsite) maxnsurveys<-length(surveys) dx<-as.data.frame(matrix(NA, nrow=nsite, ncol=nsurvey)) x<-xy[,1] seq1<-c(nsurvey) for (i in 1:(nsite-1)){ seq1<-c(seq1,seq1[i]+nsurvey) } seq2<-c(1) for (i in 1:(nsite-1)){ seq2<-c(seq2,seq2[i]+nsurvey) } for (i in 1:nsite){ dx[i,]<-x[seq2[i]:seq1[i]] } dy<-as.data.frame(matrix(NA, nrow=nsite, ncol=nsurvey)) y<-xy[,2] for (i in 1:nsite){ dy[i,]<-y[seq2[i]:seq1[i]] } if(length(sites)!=length(surveys)) stop("'sites' and 'surveys' need to be of the same length") if(nrow(xy)!=nsite*nsurvey) stop("nrow(xy) need to be equal to 'number of sites * number of surveys'") dxmod<-as.data.frame(matrix(NA, nrow=nsite, ncol=nsurvey-1)) for(i in 1:ncol(dxmod)){ dxmod[,i]<-dx[,i]-dx[,i+1] } dymod<-as.data.frame(matrix(NA, nrow=nsite, ncol=nsurvey-1)) for(i in 1:ncol(dymod)){ dymod[,i]<-dy[,i]-dy[,i+1] } dmod<-as.data.frame(cbind(dxmod,dymod)) if(!betweenSegments){ Angle_alpha_temp<-as.data.frame(matrix(NA, nrow=nsite, ncol=nsurvey-1)) for(i in 1:ncol(Angle_alpha_temp)){ Angle_alpha_temp[,i]<-apply(dmod[c(i,i+nsurvey-1)], 1, function(irow) { atan(irow[2]/irow[1]) }) } Angle_alpha_temp<-Angle_alpha_temp*(180/pi) dxy<-as.data.frame (cbind(dx,dy)) Angle_alpha<-as.data.frame(matrix(NA, nrow=nsite, ncol=nsurvey-1)) for (i in 1:ncol(Angle_alpha_temp)) { Angle_alpha[,i]<-as.numeric(c(ifelse(dxy[,i]==dxy[,i+1] & dxy[,i+(nsurvey)]<dxy[,i+(nsurvey+1)],0, ifelse(dxy[,i]==dxy[,i+1] & dxy[,i+(nsurvey)]>dxy[,i+(nsurvey+1)],180, ifelse(dxy[,i]<dxy[,i+1] & dxy[,i+(nsurvey)]==dxy[,i+(nsurvey+1)],90, ifelse(dxy[,i]>dxy[,i+1] & dxy[,i+(nsurvey)]==dxy[,i+(nsurvey+1)],270, ifelse(dxy[,i] < dxy[,i+1] & dxy[,i+(nsurvey)]< dxy[,i+(nsurvey+1)],90-Angle_alpha_temp[,i], ifelse(dxy[,i]< dxy[,i+1] & dxy[,i+(nsurvey)] > dxy[,i+(nsurvey+1)],90-Angle_alpha_temp[,i], ifelse(dxy[,i]>dxy[,i+1] & dxy[,i+(nsurvey)] > dxy[,i+(nsurvey+1)],270-Angle_alpha_temp[,i], ifelse(dxy[,i]>dxy[,i+1] & dxy[,i+(nsurvey)] < dxy[,i+(nsurvey+1)],270-Angle_alpha_temp[,i],"ERROR")))))))))) } colnames(Angle_alpha) <- c(paste0("Axis2", "-t", as.character(1:(nsurvey-1)))) angles_out = Angle_alpha } else if(!relativeToInitial){ dxy<-as.data.frame (cbind(dx,dy)) pos<-as.data.frame(matrix(NA, nrow=nsite, ncol=nsurvey-2)) for (i in 1:(nsurvey-2)){ pos[,i]<-c((dxy[,i+1]-dxy[,i])*(dxy[,nsurvey+i+2]-dxy[,nsurvey+i])-(dxy[,nsurvey+i+1]-dxy[,nsurvey+i])*(dxy[,i+2]-dxy[,i])) } Scons<-as.data.frame(trajectoryLengths2D(xy,sites,surveys, relativeToInitial=FALSE)) Scons<-Scons[,-c(ncol(Scons)-1,ncol(Scons))] distn2<-as.data.frame(matrix(NA, nrow=nsite, ncol=nsurvey-2)) for (i in 1:(nsurvey-2)){ distn2[,i]<-sqrt(((dxy[,i+2]-dxy[,i])^2)+((dxy[,nsurvey+i+2]-dxy[,nsurvey+i])^2)) } RDTcons<-distn2-Scons Angle_theta_temp<-as.data.frame(matrix(NA, nrow=nsite, ncol=nsurvey-2)) xvector1<-as.data.frame(matrix(NA, nrow=nsite, ncol=nsurvey-1)) yvector1<-as.data.frame(matrix(NA, nrow=nsite, ncol=nsurvey-1)) xvector2<-as.data.frame(matrix(NA, nrow=nsite, ncol=nsurvey-1)) yvector2<-as.data.frame(matrix(NA, nrow=nsite, ncol=nsurvey-1)) for(i in 1:(nsurvey-2)){ xvector1[,i] = dxy[,i] - dxy[,i+1] yvector1[,i] = dxy[,i+nsurvey] - dxy[,i+nsurvey+1] xvector2 [,i] = dxy[,i+2] - dxy[,i+1] yvector2 [,i] = dxy[,i+nsurvey+2] - dxy[,i+nsurvey+1] num = (xvector1[,i] * xvector2[,i] + yvector1[,i] * yvector2[,i]) den = sqrt(xvector1[,i]^2 + yvector1[,i]^2) * sqrt(xvector2[,i]^2 +yvector2[,i]^2) Angle_theta_temp[,i] = (360 * acos(num/den))/(2 * pi) } Angle_theta<-as.data.frame(matrix(NA, nrow=nsite, ncol=nsurvey-2)) for (i in 1:ncol(Angle_theta_temp)){ Angle_theta[,i]<-c(ifelse(Angle_theta_temp[,i]==180,0, ifelse(Angle_theta_temp[,i]==0,180, ifelse(pos[,i]<0 & RDTcons[,i]<0,180-Angle_theta_temp[,i], ifelse(pos[,i]>0 & RDTcons[,i]<0 ,360-(180-Angle_theta_temp[,i]), ifelse(pos[,i]<0 & RDTcons[,i]>0 & Angle_theta_temp[,i]<90,180-Angle_theta_temp[,i], ifelse(pos[,i]<0 & RDTcons[,i]>0 & Angle_theta_temp[,i]>90,180-Angle_theta_temp[,i], ifelse(pos[,i]>0 & RDTcons[,i]>0 & Angle_theta_temp[,i]<90 ,270-(90-Angle_theta_temp[,i]), ifelse(pos[,i]>0 & RDTcons[,i]>0 & Angle_theta_temp[,i]>90 ,270+(Angle_theta_temp[,i]-90),"ERROR"))))))))) } colnames(Angle_theta) <- c(paste0("t", as.character(1:(nsurvey-2)), "-t", as.character(2:(nsurvey-1)))) angles_out = Angle_theta } else { dxy<-as.data.frame (cbind(dx,dy)) pos<-as.data.frame(matrix(NA, nrow=nsite, ncol=nsurvey-2)) for (i in 1:(nsurvey-2)){ pos[,i]<-c((dxy[,i+1]-dxy[,i])*(dxy[,nsurvey+i+2]-dxy[,nsurvey+i])-(dxy[,nsurvey+i+1]-dxy[,nsurvey+i])*(dxy[,i+2]-dxy[,i])) } Scons<-as.data.frame(trajectoryLengths2D(xy,sites,surveys, relativeToInitial=FALSE)) Scons<-Scons[,-c(ncol(Scons)-1,ncol(Scons))] distn2<-as.data.frame(matrix(NA, nrow=nsite, ncol=nsurvey-2)) for (i in 1:(nsurvey-2)){ distn2[,i]<-sqrt(((dxy[,i+2]-dxy[,i])^2)+((dxy[,nsurvey+i+2]-dxy[,nsurvey+i])^2)) } RDTcons<-distn2-Scons Angle_theta_temp<-as.data.frame(matrix(NA, nrow=nsite, ncol=nsurvey-2)) xvector1<-as.data.frame(matrix(NA, nrow=nsite, ncol=nsurvey-1)) yvector1<-as.data.frame(matrix(NA, nrow=nsite, ncol=nsurvey-1)) xvector2<-as.data.frame(matrix(NA, nrow=nsite, ncol=nsurvey-1)) yvector2<-as.data.frame(matrix(NA, nrow=nsite, ncol=nsurvey-1)) for(i in 1:(nsurvey-2)){ xvector1[,i] = dxy[,i] - dxy[,i+1] yvector1[,i] = dxy[,i+nsurvey] - dxy[,i+nsurvey+1] xvector2 [,i] = dxy[,i+2] - dxy[,i+1] yvector2 [,i] = dxy[,i+nsurvey+2] - dxy[,i+nsurvey+1] num = (xvector1[,i] * xvector2[,i] + yvector1[,i] * yvector2[,i]) den = sqrt(xvector1[,i]^2 + yvector1[,i]^2) * sqrt(xvector2[,i]^2 +yvector2[,i]^2) Angle_theta_temp[,i] = (360 * acos(num/den))/(2 * pi) } pos<-as.data.frame(matrix(NA, nrow=nsite, ncol=nsurvey-2)) for (i in 1:(nsurvey-2)){ pos[,i]<-c((dxy[,2]-dxy[,1])*(dxy[,nsurvey+i+2]-dxy[,nsurvey+1])-(dxy[,nsurvey+2]-dxy[,nsurvey+1])*(dxy[,i+2]-dxy[,1])) } Angle_omega_temp<-as.data.frame(matrix(NA, nrow=nsite, ncol=nsurvey-2)) xvector1<-as.data.frame(matrix(NA, nrow=nsite, ncol=nsurvey-1)) yvector1<-as.data.frame(matrix(NA, nrow=nsite, ncol=nsurvey-1)) xvector2<-as.data.frame(matrix(NA, nrow=nsite, ncol=nsurvey-1)) yvector2<-as.data.frame(matrix(NA, nrow=nsite, ncol=nsurvey-1)) for(i in 1:(nsurvey-2)){ xvector1[,i] = dxy[,1] - dxy[,2] yvector1[,i] = dxy[,1+nsurvey] - dxy[,2+nsurvey] xvector2 [,i] = dxy[,i+2] - dxy[,2] yvector2 [,i] = dxy[,i+nsurvey+2] - dxy[,2+nsurvey] num = (xvector1[,i] * xvector2[,i] + yvector1[,i] * yvector2[,i]) den = sqrt(xvector1[,i]^2 + yvector1[,i]^2) * sqrt(xvector2[,i]^2 +yvector2[,i]^2) Angle_omega_temp[,i] = (360 * acos(num/den))/(2 * pi) } Angle_omega<-as.data.frame(matrix(NA, nrow=nrow(Angle_omega_temp), ncol=ncol(Angle_omega_temp)-1)) for (i in 1:ncol(Angle_omega_temp)){ Angle_omega[,i]<-c(ifelse(Angle_theta_temp[,i]==180,0, ifelse(Angle_theta_temp[,i]==0,180, ifelse(pos[,i]<0,180-Angle_omega_temp[,i], ifelse(pos[,i]>0,360-(180-Angle_omega_temp[,i]),"ERROR"))))) } colnames(Angle_omega) <- c(paste0("S1", "-t", as.character(2:(nsurvey-1)))) angles_out = Angle_omega } return(angles_out) } trajectoryProjection<-function(d, target, trajectory, tol = 0.000001, add=TRUE) { if(length(trajectory)<2) stop("Trajectory needs to include at least two states") dmat = as.matrix(d) npoints = length(target) nsteps = length(trajectory) -1 d2ref = dmat[target, trajectory, drop=FALSE] dsteps = diag(dmat[trajectory[1:(length(trajectory)-1)], trajectory[2:length(trajectory)]]) dstepcum = rep(0,nsteps+1) if(nsteps>1) { for(i in 2:nsteps) { dstepcum[i] = dstepcum[i-1]+dsteps[i-1] } } dstepcum[nsteps+1] = sum(dsteps) projH = matrix(NA, nrow=npoints, ncol = nsteps) projA1 = matrix(NA, nrow=npoints, ncol = nsteps) projA2 = matrix(NA, nrow=npoints, ncol = nsteps) whichstep = rep(NA, npoints) dgrad = rep(NA, npoints) posgrad = rep(NA, npoints) for(i in 1:npoints) { for(j in 1:nsteps) { p <-.projectionC(dsteps[j], d2ref[i, j], d2ref[i, j+1], add) if((!is.na(p[3])) & (p[1]>-tol) & (p[2]>-tol)) { projA1[i,j] = p[1] projA2[i,j] = p[2] projH[i,j] = p[3] if(is.na(dgrad[i])) { dgrad[i] = p[3] whichstep[i] = j } else { if(p[3]<dgrad[i]) { dgrad[i] = p[3] whichstep[i] = j } } } } if(!is.na(whichstep[i])) { dg = dstepcum[whichstep[i]]+projA1[i,whichstep[i]] posgrad[i] = dg/sum(dsteps) } } res = data.frame(distanceToTrajectory=dgrad, segment = whichstep, relativePosition = posgrad) row.names(res)<-row.names(d2ref) return(res) } trajectoryConvergence<-function(d, sites, surveys = NULL, symmetric = FALSE, add=TRUE, verbose = FALSE){ if(length(sites)!=nrow(as.matrix(d))) stop("'sites' needs to be of length equal to the number of rows/columns in d") if(!is.null(surveys)) if(length(sites)!=length(surveys)) stop("'sites' and 'surveys' need to be of the same length") siteIDs = unique(sites) nsite = length(siteIDs) nsurveysite<-numeric(nsite) for(i in 1:nsite) nsurveysite[i] = sum(sites==siteIDs[i]) if(sum(nsurveysite<3)>0) stop("All sites need to be surveyed at least three times") n = nrow(as.matrix(d)) tau = matrix(NA, nrow=nsite, ncol = nsite) rownames(tau) = siteIDs colnames(tau) = siteIDs p.value = tau dmat = as.matrix(d) if(verbose) { cat("\nCalculating trajectory convergence...\n") tb = txtProgressBar(1, nsite, style=3) } for(i1 in 1:(nsite-1)) { if(verbose) setTxtProgressBar(tb, i1) ind_surv1 = which(sites==siteIDs[i1]) if(!is.null(surveys)) ind_surv1 = ind_surv1[order(surveys[sites==siteIDs[i1]])] for(i2 in (i1+1):nsite) { ind_surv2 = which(sites==siteIDs[i2]) if(!is.null(surveys)) ind_surv2 = ind_surv2[order(surveys[sites==siteIDs[i2]])] if(!symmetric) { trajectory = ind_surv2 target = ind_surv1 trajProj = trajectoryProjection(d,target, trajectory, add=add) dT = trajProj$distanceToTrajectory mk.test = MannKendall(dT) tau[i1,i2] = mk.test$tau p.value[i1,i2] = mk.test$sl trajectory = ind_surv1 target = ind_surv2 trajProj = trajectoryProjection(d,target, trajectory, add=add) dT = trajProj$distanceToTrajectory mk.test = MannKendall(dT) tau[i2,i1] = mk.test$tau p.value[i2,i1] = mk.test$sl } else { if(length(ind_surv1)==length(ind_surv2)) { dT = numeric(length(ind_surv1)) for(j in 1:length(ind_surv1)) dT[j] = dmat[ind_surv1[j], ind_surv2[j]] mk.test = MannKendall(dT) tau[i1,i2] = mk.test$tau p.value[i1,i2] = mk.test$sl tau[i2,i1] = mk.test$tau p.value[i2,i1] = mk.test$sl } else { warning(paste0("sites ",i1, " and ",i2," do not have the same number of surveys.")) } } } } return(list(tau = tau, p.value = p.value)) } trajectoryDirectionality<-function(d, sites, surveys = NULL, add=TRUE, verbose = FALSE) { if(length(sites)!=nrow(as.matrix(d))) stop("'sites' needs to be of length equal to the number of rows/columns in d") if(!is.null(surveys)) if(length(sites)!=length(surveys)) stop("'sites' and 'surveys' need to be of the same length") siteIDs = unique(sites) nsite = length(siteIDs) nsurveysite<-numeric(nsite) for(i in 1:nsite) nsurveysite[i] = sum(sites==siteIDs[i]) if(sum(nsurveysite<3)>0) stop("All sites need to be surveyed at least three times") dmat = as.matrix(d) dir = rep(NA, nsite) names(dir) = siteIDs if(verbose) { cat("\nAssessing trajectory directionality...\n") tb = txtProgressBar(1, nsite, style=3) } for(i1 in 1:nsite) { if(verbose) setTxtProgressBar(tb, i1) ind_surv1 = which(sites==siteIDs[i1]) if(!is.null(surveys)) ind_surv1 = ind_surv1[order(surveys[sites==siteIDs[i1]])] dsub = dmat[ind_surv1, ind_surv1] n = length(ind_surv1) den = 0 num = 0 if(n>2) { for(i in 1:(n-2)) { for(j in (i+1):(n-1)) { for(k in (j+1):n) { da = dsub[i,j] db = dsub[j,k] dab = dsub[i,k] theta = .angleConsecutiveC(da,db,dab, add) if(!is.na(theta)) { den = den + (da + db) num = num + (da + db)*((180-theta)/180) } } } } dir[i1] = num/den } } return(dir) }
confint.mkinfit <- function(object, parm, level = 0.95, alpha = 1 - level, cutoff, method = c("quadratic", "profile"), transformed = TRUE, backtransform = TRUE, cores = parallel::detectCores(), rel_tol = 0.01, quiet = FALSE, ...) { tparms <- parms(object, transformed = TRUE) bparms <- parms(object, transformed = FALSE) tpnames <- names(tparms) bpnames <- names(bparms) return_pnames <- if (missing(parm)) { if (backtransform) bpnames else tpnames } else { parm } p <- length(return_pnames) method <- match.arg(method) a <- c(alpha / 2, 1 - (alpha / 2)) quantiles <- qt(a, object$df.residual) covar_pnames <- if (missing(parm)) { if (transformed) tpnames else bpnames } else { parm } return_parms <- if (backtransform) bparms[return_pnames] else tparms[return_pnames] covar_parms <- if (transformed) tparms[covar_pnames] else bparms[covar_pnames] if (transformed) { covar <- try(solve(object$hessian), silent = TRUE) } else { covar <- try(solve(object$hessian_notrans), silent = TRUE) } if (!is.numeric(covar) | is.na(covar[1])) { ses <- lci <- uci <- rep(NA, p) } else { ses <- sqrt(diag(covar))[covar_pnames] lci <- covar_parms + quantiles[1] * ses uci <- covar_parms + quantiles[2] * ses if (transformed & backtransform) { lci_back <- backtransform_odeparms(lci, object$mkinmod, object$transform_rates, object$transform_fractions) uci_back <- backtransform_odeparms(uci, object$mkinmod, object$transform_rates, object$transform_fractions) return_errparm_names <- intersect(names(object$errparms), return_pnames) lci <- c(lci_back, lci[return_errparm_names]) uci <- c(uci_back, uci[return_errparm_names]) } } ci <- cbind(lower = lci, upper = uci) if (method == "profile") { ci_quadratic <- ci if (!quiet) message("Profiling the likelihood") lci <- uci <- rep(NA, p) names(lci) <- names(uci) <- return_pnames profile_pnames <- if(missing(parm)) names(parms(object)) else parm if (missing(cutoff)) { cutoff <- 0.5 * qchisq(1 - alpha, 1) } all_parms <- parms(object) get_ci <- function(pname) { pnames_free <- setdiff(names(all_parms), pname) profile_ll <- function(x) { pll_cost <- function(P) { parms_cost <- all_parms parms_cost[pnames_free] <- P[pnames_free] parms_cost[pname] <- x - object$ll(parms_cost) } - nlminb(all_parms[pnames_free], pll_cost)$objective } cost <- function(x) { (cutoff - (object$logLik - profile_ll(x)))^2 } lower_quadratic <- ci_quadratic["lower"][pname] upper_quadratic <- ci_quadratic["upper"][pname] ltol <- if (!is.na(lower_quadratic)) rel_tol * lower_quadratic else .Machine$double.eps^0.25 utol <- if (!is.na(upper_quadratic)) rel_tol * upper_quadratic else .Machine$double.eps^0.25 lci_pname <- optimize(cost, lower = 0, upper = all_parms[pname], tol = ltol)$minimum uci_pname <- optimize(cost, lower = all_parms[pname], upper = ifelse(grepl("^f_|^g$", pname), 1, 15 * all_parms[pname]), tol = utol)$minimum return(c(lci_pname, uci_pname)) } ci <- t(parallel::mcmapply(get_ci, profile_pnames, mc.cores = cores)) } colnames(ci) <- paste0( format(100 * a, trim = TRUE, scientific = FALSE, digits = 3), "%") return(ci) }
read_kfamset <- function (filename, format="auto", as.letters = TRUE) { f <- readLines(con=filename) if (length(f) == 0) { stop(sprintf("Unable to read file %s!", filename)) } if (format == "SRBT") { p <- str_locate(f[1], " if (is.na(p[1][1]) | p[1][1] != 1) stop(sprintf("File %s has no correct SRBT header.", filename)) noi <- as.numeric(f[2]) if (noi <= 0) stop(sprintf("Invalid number of items in %s.", filename)) nos <- as.numeric(f[3]) if (nos <= 0) stop(sprintf("Invalid number of states in %s.", filename)) offset <- 3 p <- str_locate(f[4], " while (!is.na(p[1][1])) { offset <- offset + 1 p <- str_locate(f[offset+1], " } } else if (format == "KST") { noi <- as.numeric(f[1]) if (noi <= 0) stop(sprintf("Invalid number of items in %s.", filename)) nos <- as.numeric(f[2]) if (nos <= 0) stop(sprintf("Invalid number of states in %s.", filename)) offset <- 2 } else if (format == "matrix") { noi <- str_length(f[1]) nos <- length(f) offset <- 0 } else { p <- str_locate(f[1], " if (!is.na(p[1][1]) & p[1][1] == 1) { p <- str_locate(f[1], " if (p[1][1] != 1) stop(sprintf("File %s has no correct SRBT header.", filename)) noi <- as.numeric(f[2]) if (noi <= 0) stop(sprintf("Invalid number of items in %s.", filename)) nos <- as.numeric(f[3]) if (nos <= 0) stop(sprintf("Invalid number of states in %s.", filename)) offset <- 3 p <- str_locate(f[4], " while (!is.na(p[1][1])) { offset <- offset + 1 p <- str_locate(f[offset+1], " } } else if (str_length(f[1]) == str_length(f[length(f)])) { nos <- length(f) noi <- str_length(f[1]) offset <- 0 } else { noi <- as.numeric(f[1]) if (noi <= 0) stop(sprintf("Invalid number of items in %s.", filename)) nos <- as.numeric(f[2]) if (nos <= 0) stop(sprintf("Invalid number of states in %s.", filename)) offset <- 2 } } mat <- mat.or.vec(nos, noi) for (i in 1:nos) { mat[i,]<- 1L*as.logical(as.integer(unlist(strsplit(trimws(f[i+offset],which="both"),"")))) } storage.mode(mat) <- "integer" if (as.letters) { names <- make.unique(letters[(0L:(ncol(mat)-1)) %% 26 + 1]) } else { names <- as.integer(1L:ncol(mat)) } colnames(mat) <- names s <- as.pattern(mat, as.set=TRUE) class(s) <- unique(c("kfamset", class(s))) list(matrix=mat, sets=s) }
loglikelihood_global_admkr <- function(h, resid) { b = h[2] epsilon = scale(resid) std = sd(resid) cont = (2.0*pi)^(-0.5) logf = vector(,length(resid)) for(i in 1:length(resid)) { temp = epsilon[i] - epsilon[-i] res = sum(cont * exp(-0.5*((temp/b)^2))/b) logf[i] = log(res/length(temp)/std) } return(sum(logf)) }
net_query_batch <- function(query.nets, target.net, node.sim, query.type=4, delta.d=1e-10, delta.c=0.5, delta.e=1, delta.s=1, output="result.txt") { query.type <- as.numeric(query.type) delta <- lapply(list(d=delta.d, c=delta.c, e=delta.e, s=delta.s), as.numeric) target <- read_net(target.net) target$sim <- read_sim(node.sim) target$dist <- get_shortest_distances(target$matrix) for (query.net in query.nets) { query <- read_net(query.net) label <- .net_query.simplify_target(query, target, delta) model <- .net_query.build_model(query, label, delta) result <- .net_query.solve_crf(model, query.type) .net_query.write_result(query, label, model, result, paste(query.net, output, sep="_")) } }
BayesSAE <- function(formula, innov = "normal", df = NULL, b = NA, spatial = FALSE, tran = "F", prox = NULL, beta.start = NULL, theta.start = NULL, lam.start = runif(1), prior = NULL, mcmc = 5000, burnin = 2500, thin = 5, data){ call <- match.call() if (missing(data)) data <- environment(formula) mf <- match.call(expand.dots = FALSE) m <- match(c("formula", "data", "subset", "na.action"), names(mf), 0L) mf <- mf[c(1L, m)] mf$drop.unused.levels <- TRUE oformula <- as.formula(formula) formula <- as.Formula(formula) if (length(formula)[2L] < 2L){ formula <- as.Formula(formula(formula), ~1) } else { if (length(formula)[2L] > 2L) { formula <- Formula(formula(formula, rhs = 1:2)) warning("formula must not have more than two RHS parts") } } mf$formula <- formula mf[[1L]] <- as.name("model.frame") mf <- eval(mf, parent.frame()) mt <- terms(formula, data = data) mtX <- terms(formula, data = data, rhs = 1L) mtZ <- delete.response(terms(formula, data = data, rhs = 2L)) Y <- model.response(mf, "numeric") X <- model.matrix(mtX, mf) Z <- model.matrix(mtZ, mf) Z <- as.vector(Z[,2]) m <- length(Y) p <- ncol(X) if (is.na(b)) b <- rep(1, m) if (tran != "F" && tran != "log" && tran != "logit") stop("Only log or logit transformations are allowed") if (is.null(theta.start) || is.null(beta.start)){ if (tran == "F"){ if (is.null(beta.start)) beta.start = as.vector(coef(lm(Y~X[,-1], weights = 1 / b))) if (is.null(theta.start)) theta.start = as.vector(Y) } else if (tran == "log"){ if (is.null(beta.start)) beta.start = as.vector(coef(lm(log(Y)~X[,-1], weights = 1 / b))) if (is.null(theta.start)) theta.start = as.vector(Y) } else { if (is.null(beta.start)) beta.start = as.vector(coef(lm(log(Y)-log(1-Y)~X[,-1], weights = 1 / b))) if (is.null(theta.start)) theta.start = as.vector(Y) } } if (tran == "log" && any(theta.start <= 0)) stop("initial values for theta's must be positive") if (tran == "logit" && (any(theta.start <= 0) || any(theta.start >= 1))) stop("initial values for theta's must be bounded between 0 and 1") if (spatial){ if ((lam.start <= 0)||(lam.start >= 1)) stop("initial value for lambda must be bounded between 0 and 1") } if (tran == "log" && any(Y <= 0)) stop("response variable must be positive") if (tran == "logit" && (any(Y <= 0) || any(Y >= 1))) stop("response variable must bounded between 0 and 1") if (innov != "normal" && innov != "t") stop("innovations in the linking model must be normally or t distributed") if (spatial && tran != "F") stop("unmatched spatial models are not supported currently") if (is.null(prior)){ beta.type <- "non_in" beta.prior <- 0 sigv.type <- "unif" sigv.prior <- list(eps2 = 0.0001) } else{ beta.type <- prior$beta.type beta.prior <- prior$beta.prior sigv.type <- prior$sigv.type sigv.prior <- prior$sigv.prior } if (innov == "t"){ if (is.null(prior)) sig2.prior <- list(ai = rep(0.05, m), bi = rep(0.05, m)) else sig2.prior <- prior$sig2.prior } if (beta.type != "normal" && beta.type != "non_in") stop("only normal or non-informative distribution is allowed as prior for beta's at present") if (sigv.type != "inv_gamma" && sigv.type != "unif") stop("only invert gamma or uniform distribution is allowed as prior for sigma2v at present") if (beta.type == "normal") beta.type <- 0 else beta.type <- 1 if (beta.type == 0){ beta0 <- beta.prior$beta0 if (length(beta0) != p) stop("number of regressors and length of beta0 are inconsistent") eps1 <- beta.prior$eps1 if (eps1 <= 0) stop("eps1 must be positive") beta.prior <- c(beta0, eps1) } else beta.prior <- 0 if (sigv.type == "inv_gamma") sigv.type <- 0 else sigv.type <- 1 if (sigv.type == 0){ a0 <- sigv.prior$a0 b0 <- sigv.prior$b0 if (a0 <= 0 || b0 <= 0) stop("a0 and b0 must be positive") sigv.prior <- c(a0 ,b0) } else{ sigv.prior <- sigv.prior$eps2 if (sigv.prior <= 0) stop("eps2 must be positive") } if (innov == "t"){ ai <- sig2.prior$ai bi <- sig2.prior$bi if (any(ai <= 0) || any(bi <= 0)) stop("elements in ai and bi must be positive") if (length(ai) != m || length(bi) != m) stop("lengths of ai and bi should be the same as that of domains") } if (innov == "t"){ if (any(df <= 0)) stop("elements in df must be positive") if (length(df) != m) stop("length of df should be the same as that of domains") } subset <- seq(from = burnin+1, to = mcmc, by = thin) if (spatial){ li1 <- prox[,1] li2 <- prox[,2] num <- rep(0, m) for (i in 1:m){ num[i] = sum(li1 == i) + sum(li2 == i) } if (innov == "normal") result <- BayesSFH(theta.start, beta.start, lam.start, Y, t(X), Z, li1, li2, num, mcmc, beta.prior, sigv.prior, beta.type, sigv.type) else result <- BayesSYC(theta.start, beta.start, lam.start, Y, t(X), Z, li1, li2, num, mcmc, beta.prior, sigv.prior, c(ai, bi, df), beta.type, sigv.type) } else{ if (innov == "normal"){ if (tran == "F") result <- BayesFH(theta.start, beta.start, Y, t(X), b, Z, mcmc, beta.prior, sigv.prior, beta.type, sigv.type) else if (tran == "log") result <- BayesUFH(theta.start, beta.start, Y, t(X), b, Z, mcmc, beta.prior, sigv.prior, beta.type, 1, sigv.type) else result <- BayesUFH(theta.start, beta.start, Y, t(X), b, Z, mcmc, beta.prior, sigv.prior, beta.type, 2, sigv.type) } else{ if (tran == "F") result <- BayesYC(theta.start, beta.start, Y, t(X), b, Z, mcmc, beta.prior, sigv.prior, c(ai, bi, df), beta.type, sigv.type) else if (tran == "log") result <- BayesUYC(theta.start, beta.start, Y, t(X), b, Z, mcmc, beta.prior, sigv.prior, c(ai, bi, df), beta.type, 1, sigv.type) else result <- BayesUYC(theta.start, beta.start, Y, t(X), b, Z, mcmc, beta.prior, sigv.prior, c(ai, bi, df), beta.type, 2, sigv.type) } } result$innov <- innov result$prox <- prox result$X <- X result$Y <- Y result$Z <- Z result$b <- b result$m <- m result$p <- p result$subset <- subset theta <- t(result$theta) beta <- t(result$beta) sigv <- result$sigv if (!(is.null(result$lam))) lam <- result$lam if (!(is.null(result$sig2))) sig2 <- t(result$sig2) if (spatial){ li1 <- prox[,1] li2 <- prox[,2] num <- rep(0, m) for (i in 1:m){ num[i] = sum(li1 == i) + sum(li2 == i) } if (innov == "normal"){ result$type <- "SFH" HB <- theta.HB(result, subset) criter <- model.compare(result, subset, poest = "mean") result <- list(mcmc = mcmc(data.frame(theta = theta, beta = beta, sigv = sigv, lam = lam), start = burnin+1, end = mcmc, thin = thin), lambda.rate = result$lam.rate, D_avg = criter$D_avg, D_theta.hat = criter$D_theta.hat, DIC = criter$DIC, theta.HB = HB) result$type <- "SFH" result$HB <- HB } else{ result$type <- "SYC" HB <- theta.HB(result, subset) criter <- model.compare(result, subset, poest = "mean") result <- list(mcmc = mcmc(data.frame(theta = theta, beta = beta, sigv = sigv, sig2 = sig2, lam = lam), start = burnin+1, end = mcmc, thin = thin), lam.rate = result$lam.rate, D_avg = criter$D_avg, D_theta.hat = criter$D_theta.hat, DIC = criter$DIC, theta.HB = HB) result$type <- "SYC" result$HB <- HB } } else{ if (innov == "normal"){ if (tran == "F"){ result$type <- "FH" HB <- theta.HB(result, subset) criter <- model.compare(result, subset, poest = "mean") result <- list(mcmc = mcmc(data.frame(theta = theta, beta = beta, sigv = sigv), start = burnin+1, end = mcmc, thin = thin), D_avg = criter$D_avg, D_theta.hat = criter$D_theta.hat, DIC = criter$DIC, theta.HB = HB) result$type <- "FH" result$HB <- HB } else if (tran == "log"){ criter <- model.compare(result, subset, poest = "mean") result <- list(mcmc = mcmc(data.frame(theta = theta, beta = beta, sigv = sigv), start = burnin+1, end = mcmc, thin = thin), theta.rate = result$theta.rate, D_avg = criter$D_avg, D_theta.hat = criter$D_theta.hat, DIC = criter$DIC) result$type <- "UFH" } else{ criter <- model.compare(result, subset, poest = "mean") result <- list(mcmc = mcmc(data.frame(theta = theta, beta = beta, sigv = sigv), start = burnin+1, end = mcmc, thin = thin), theta.rate = result$theta.rate, D_avg = criter$D_avg, D_theta.hat = criter$D_theta.hat, DIC = criter$DIC) result$type <- "UFH" } } else{ if (tran == "F"){ result$type <- "YC" HB <- theta.HB(result, subset) criter <- model.compare(result, subset, poest = "mean") result <- list(mcmc = mcmc(data.frame(theta = theta, beta = beta, sigv = sigv, sig2 = sig2), start = burnin+1, end = mcmc, thin = thin), D_avg = criter$D_avg, D_theta.hat = criter$D_theta.hat, DIC = criter$DIC, theta.HB = HB) result$type <- "YC" result$HB <- HB } else if (tran == "log"){ criter <- model.compare(result, subset, poest = "mean") result <- list(mcmc = mcmc(data.frame(theta = theta, beta = beta, sigv = sigv, sig2 = sig2), start = burnin+1, end = mcmc, thin = thin), theta.rate = result$theta.rate, D_avg = criter$D_avg, D_theta.hat = criter$D_theta.hat, DIC = criter$DIC) result$type <- "UYC" } else{ criter <- model.compare(result, subset, poest = "mean") result <- list(mcmc = mcmc(data.frame(theta = theta, beta = beta, sigv = sigv, sig2 = sig2), start = burnin+1, end = mcmc, thin = thin), theta.rate = result$theta.rate, D_avg = criter$D_avg, D_theta.hat = criter$D_theta.hat, DIC = criter$DIC) result$type <- "UYC" } } } result$innov <- innov result$prox <- prox result$X <- X result$Y <- Y result$Z <- Z result$b <- b result$m <- m result$p <- p result$mf <- mf result$spatial <- spatial result$tran <- tran result$subset <- subset result$call <- call class(result) <- "BayesSAE" result }
coeff.check <- function (cross = cross, rg = rg) { if (is.null(rg)) { return(list(rg = rg, cross = cross, indep = TRUE)) } pairs <- rg npairs <- length(rg) maxpower <- max(sapply(rg, sum)) if (maxpower == 0) { return(list(rg = rg, cross = cross, indep = TRUE)) } coeff <- function(x) { r <- x[1] g <- x[2] k <- 0:g c(numeric(r), choose(g, k) * (-1)^k, numeric(max(maxpower - r - g, 0))) } match.fun(coeff) Mcoeff <- sapply(rg, coeff) if (cross == TRUE) { coeff_cross <- c(1, -2, numeric(max(c(0, maxpower - 1)))) Mcoeff <- cbind(Mcoeff, coeff_cross) npairs <- npairs + 1 pairs <- c(pairs, "cross") } change <- FALSE rank_coeff <- qr(Mcoeff)$rank ndepend_rows <- npairs - rank_coeff if (ndepend_rows > 0) { change <- TRUE for (i in 1:ndepend_rows) { index <- sapply(1:npairs, function(x) { qr(Mcoeff[, -x])$rank == rank_coeff }) index_remove <- min(which(index)) Mcoeff <- Mcoeff[, -index_remove] pairs <- pairs[-index_remove] npairs <- npairs - 1 } if (pairs[npairs] == "cross") { return(list(rg = pairs[-npairs], cross = TRUE, indep = FALSE)) } else { return(list(rg = pairs, cross = FALSE, indep = FALSE)) } } else { return(list(rg = rg, cross = cross, indep = TRUE)) } }
searchExtreme <- function(TX, n1, n2, alternative, method, int, delta, alpha, lookupArray) { TXunique <- unique(TX[is.na(TX[ , 4]), 3]) if (length(TXunique) == 0) {return(TX[as.logical(TX[ , 4]), 1:2, drop=FALSE])} m <- floor(length(TXunique)/2) + 1 s <- TXunique[m] if (method %in% c("pearson chisq", "yates chisq", "fisher")) { Tbls <- TX[TX[,3] == s, , drop=FALSE][1, 1:2] Tbls <- matrix(c(Tbls[1],n1-Tbls[1],Tbls[2],n2-Tbls[2]), byrow=TRUE, ncol=2) if (method == "fisher") { pvalue <- fisher.2x2(Tbls, alternative=alternative)[3] } else { pvalue <- suppressWarnings(prop.test(Tbls, alternative=alternative, correct=(method=="yates chisq"))$p.value) } } else { Tbls <- TX[TX[,3] <= s, 1:2, drop=FALSE] pvalue <- maxPvalueLookup(Tbls, int=int, lookupArray=lookupArray, doublePvalue=FALSE)$pvalue } if (pvalue <= alpha){ TX[TX[,3] <= s, 4] <- TRUE } else { TX[TX[,3] >= s, 4] <- FALSE } return(searchExtreme(TX = TX, n1 = n1, n2 = n2, alternative = alternative, method = method, int = int, delta = delta, alpha = alpha, lookupArray = lookupArray)) }
expected <- eval(parse(text="6L")); test(id=0, code={ argv <- eval(parse(text="list(structure(list(surname = structure(c(\"McNeil\", \"Ripley\", \"Ripley\", \"Tierney\", \"Tukey\", \"Venables\"), class = \"AsIs\"), nationality = structure(c(\"Australia\", \"UK\", \"UK\", \"US\", \"US\", \"Australia\"), class = \"AsIs\"), deceased = structure(c(\"no\", \"no\", \"no\", \"no\", \"yes\", \"no\"), class = \"AsIs\"), title = structure(c(\"Interactive Data Analysis\", \"Spatial Statistics\", \"Stochastic Simulation\", \"LISP-STAT\", \"Exploratory Data Analysis\", \"Modern Applied Statistics ...\"), class = \"AsIs\"), other.author = structure(c(NA, NA, NA, NA, NA, \"Ripley\"), class = \"AsIs\")), .Names = c(\"surname\", \"nationality\", \"deceased\", \"title\", \"other.author\"), row.names = c(\"1\", \"2\", \"3\", \"4\", \"5\", \"6\"), class = \"data.frame\"), 1L)")); .Internal(`shortRowNames`(argv[[1]], argv[[2]])); }, o=expected);
library(CHNOSZ) aa <- pinfo(pinfo("LYSC_CHICK")) pH <- seq(0, 14, 0.2) T <- seq(0, 200, 2) val <- expand.grid(pH=pH, T=T) par(mfrow=c(2, 2)) for(X in c("Z", "A", "Cp", "V")) { Y <- ionize.aa(aa, property=X, pH=val$pH, T=val$T) contour(pH, T, matrix(Y[, 1], ncol=length(T)), xlab="pH", ylab=axis.label("T")) title(main=axis.label(X)) } par(mfrow=c(1, 1)) pu <- par("usr") text(mean(pu[1:2]), sum(pu[3:4])*0.45, "additive properties of ionization of LYSC_CHICK")
test_that("cleaning team abbreviations", { o <- options(nflreadr.verbose = TRUE) expected_warning <- "Abbreviations not found in `nflreadr::team_abbr_mapping`: PIE" x <- c("PIE","LAR","PIT","CRD", "OAK", "CLV") expect_warning(new_abbr <- clean_team_abbrs(x),regexp = expected_warning) expect_warning(old_abbr <- clean_team_abbrs(x, current_location = FALSE), regexp = expected_warning) expect_warning(new_abbr_drop <- clean_team_abbrs(x, keep_non_matches = FALSE),regexp = expected_warning) expect_equal(new_abbr, c("PIE", "LA","PIT","ARI","LV","CLE")) expect_equal(new_abbr_drop, c(NA, "LA","PIT","ARI","LV","CLE")) expect_equal(old_abbr, c("PIE", "LA","PIT","ARI","OAK","CLE")) options(o) }) test_that("cleaning player names", { p <- c("Trubisky, Mitch", "Atwell, Chatarius", "Elliott, Zeke", "Elijah Moore", "A.J. Green", "Odell Beckham Jr.") lower <- clean_player_names(p,lowercase = TRUE, use_name_database = TRUE, convert_lastfirst = TRUE) expect_equal( lower, c("mitchell trubisky", "tutu atwell","ezekiel elliott", "elijah moore", "aj green", "odell beckham")) }) test_that("cleaning home and away columns",{ s <- load_schedules(2020) c <- clean_homeaway(s, invert = c("result","spread_line")) expect_equal(nrow(c),nrow(s)*2) expect(all(!grepl(x = names(c),pattern = "^home_")),"Error: `home_` was found in `names(c)`") })
is_source_filter <- function(x) { return("rock_source_filter" %in% class(x)); }
Ascensorm2<-function(andar,custo,nand,m2and,m2apt){ if(andar<=nand) if(nand<9){ suma = nand+(nand*(nand-1))/4 resultado = m2apt*(custo*((1+(andar-1)*0.5)/(suma*m2and))) return(resultado) } else{ print("nand non debe superar 9") } else{ print("O nivel do andar non pode ser maior que nand") } }
mvcokm.param <- function(obj){ formula = obj@formula output = obj@output input = obj@input param = obj@param cov.model = [email protected] NestDesign = obj@NestDesign phi = do.call(cbind, param) Dim = dim(input[[1]])[2] p.x = Dim if(dim(phi)[1]==Dim){ is.nugget=FALSE }else{ is.nugget=TRUE } S = length(output) out = augment.input(input) input.union = out$union input.miss = out$miss input.list = list(input=input, input.miss=input.miss) Cl = list() for(t in 1:S){ input.max = apply(input.list$input[[t]], 2, max) input.min = apply(input.list$input[[t]], 2, min) Cl[[t]] = abs(input.max-input.min) } y = output H = list() Hm = list() for(t in 1:S){ colnames(input[[t]]) = paste0("x", 1:p.x) df = data.frame(input[[t]]) H[[t]] = model.matrix(formula[[t]], df) if(t<S){ colnames(input.miss[[t]]) = paste0("x", 1:p.x) df = data.frame(input.miss[[t]]) Hm[[t]] = model.matrix(formula[[t]], df) } } dist.o = list() dist.m = list() dist.mo = list() distlist = list() for(t in 1:S){ dist.o[[t]] = compute_distance(input[[t]], input[[t]]) if(t<S){ dist.m[[t]] = compute_distance(input.miss[[t]], input.miss[[t]]) dist.mo[[t]] = compute_distance(input.miss[[t]], input[[t]]) } distlist[[t]] = compute_distance(input.union[[t]], input.union[[t]]) } n.aug = rep(NA, S) q = rep(NA, S) for(t in 1:S){ n.aug[t] = dim(y[[t]])[1] q[t] = dim(H[[t]])[2] } if(NestDesign){ stop("Not implemented yet.") }else{ sigma2.hat = list() beta.hat = list() ym.hat = list() t=1 R = buildcov(phi[ ,t], dist.o[[t]], covmodel=cov.model, nugget=is.nugget) U = chol(R) RInv = chol2inv(U) HRHInv = solve(t(H[[t]])%*%RInv%*%H[[t]]) Rm = buildcov(phi[ ,t], dist.m[[t]], covmodel=cov.model, nugget=is.nugget) Rmo = buildcov(phi[ ,t], dist.mo[[t]], covmodel=cov.model, nugget=FALSE) RmoRInv = Rmo%*%RInv KW = (Hm[[t]]-RmoRInv%*%H[[t]]) %*% HRHInv %*% t(H[[t]]) %*% RInv + RmoRInv ym.hat[[t]] = KW %*% y[[t]] Q = RInv - RInv%*%H[[t]]%*%HRHInv%*%t(H[[t]])%*%RInv sigma2.hat[[t]] = compute_Svec(output=y[[t]], Q=Q) / (n.aug[t]-q[t]+2) beta.hat[[t]] = HRHInv%*%t(H[[t]])%*%RInv%*%y[[t]] for(t in 2:S){ R = buildcov(phi[ ,t], dist.o[[t]], covmodel=cov.model, nugget=is.nugget) U = chol(R) RInv = chol2inv(U) y_t1 = create.w(t=t, input=input, input.miss=input.miss[[t-1]], y=y[[t-1]], ym=ym.hat[[t-1]]) out = compute_param(y_t=y[[t]], Ht=H[[t]], y_t1=y_t1, RInv) sigma2.hat[[t]] = out$sigma2 beta.hat[[t]] = out$beta } names(beta.hat) = paste0("Level", seq(1:S), "") names(sigma2.hat) = paste0("Level", seq(1:S), "") } return(list(corr=param, coeff=beta.hat, var=sigma2.hat)) }
checkEmptyValLabels <- function(GADSdat, vars = namesGADS(GADSdat), valueRange = NULL, output = c("list", "data.frame")) { UseMethod("checkEmptyValLabels") } checkEmptyValLabels.GADSdat <- function(GADSdat, vars = namesGADS(GADSdat), valueRange = NULL, output = c("list", "data.frame")) { check_GADSdat(GADSdat) check_vars_in_GADSdat(GADSdat, vars = vars) output <- match.arg(output) label_no_values <- vector("list", length = length(vars)) names(label_no_values) <- vars for(i in vars) { i_meta <- GADSdat$labels[GADSdat$labels$varName == i, ] i_labeled_values <- unique(i_meta[, "value"])[!is.na(unique(i_meta[, "value"]))] i_real_values <- unique(GADSdat$dat[, i])[!is.na(unique(GADSdat$dat[, i]))] empty_values <- setdiff(i_labeled_values, i_real_values) label_no_values[[i]] <- i_meta[i_meta$value %in% empty_values, c("value", "valLabel", "missings")] label_no_values[[i]] <- label_no_values[[i]][order(label_no_values[[i]][, "value"]), ] } if(!is.null(valueRange)) { if(!is.numeric(valueRange) || length(valueRange) != 2) stop("'valueRange' needs to be a numeric vector of length 2.") label_no_values <- lapply(label_no_values, function(label_no_values_single) { label_no_values_single[between(label_no_values_single$value, range(valueRange)[1], range(valueRange)[2]), ] }) } if(identical(output, "data.frame")) { out <- eatTools::do_call_rbind_withName(label_no_values, colName = "variable") } else out <- lapply(label_no_values, function(x) if(nrow(x) == 0) NULL else x) out } checkMissingValLabels <- function(GADSdat, vars = namesGADS(GADSdat), valueRange = NULL) { UseMethod("checkMissingValLabels") } checkMissingValLabels.GADSdat <- function(GADSdat, vars = namesGADS(GADSdat), valueRange = NULL) { check_GADSdat(GADSdat) check_vars_in_GADSdat(GADSdat, vars = vars) not_labeled <- vector("list", length = length(vars)) names(not_labeled) <- vars for(i in vars) { i_meta <- GADSdat$labels[GADSdat$labels$varName == i, ] i_labeled_values <- unique(i_meta[, "value"])[!is.na(unique(i_meta[, "value"]))] i_real_values <- unique(GADSdat$dat[, i])[!is.na(unique(GADSdat$dat[, i]))] missing_values <- setdiff(i_real_values, i_labeled_values) if(length(missing_values) > 0) { not_labeled[[i]] <- list() not_labeled[[i]]$varLabel <- i_meta[1, "varLabel"] not_labeled[[i]]$missing_labels <- sort(missing_values) } } if(!is.null(valueRange)) { if(!is.numeric(valueRange) || length(valueRange) != 2) stop("'valueRange' needs to be a numeric vector of length 2.") not_labeled <- lapply(not_labeled, function(not_labeled_single) { not_labeled_single$missing_labels <- not_labeled_single$missing_labels[between(not_labeled_single$missing_labels, range(valueRange)[1], range(valueRange)[2])] if(length(not_labeled_single$missing_labels) == 0) return(NULL) not_labeled_single }) } not_labeled }
pluscode_encode <- function(lats, lngs, precision) { precision<-if(missing(precision)) {10} else precision if(lats < -90 | lats > 90) { stop(paste0("The latitude is not valid, please enter value within the range -90 to 90")) } if(lngs < -180 | lngs > 180) { stop(paste0("The longitude is not valid, please enter value within the range -180 to 180")) } pluscode<-jsonlite::fromJSON(rawToChar(httr::GET(paste0("https://plus.codes/api?address=",lats,",",lngs))$content))$plus_code$global_code if(precision == 2) return(substr(pluscode,1,2)) else if(precision == 4) return(substr(pluscode,1,4)) else if(precision == 8) return(substr(pluscode,1,9)) else if(precision == 10) return(substr(pluscode,1,11)) else return(print("Invalid Precision - Please choose 2, 4, 8, or 10")) }
`trecase.sex.b0.X` = function(yi, ind.lst, X, ni, ni0, xs, l.tau.r, l.tau.a, start, iphi=1, theta=1, maxiter=100, eps=1E-3, tech.ctrl){ triali = 0 par0 = start repeat{ triali = triali + 1 tag = tryCatch({ iphi_i = iphi theta_i = theta log.lik0 = ll.jRCI.sex.b0.X(par0, yi=yi, ind.lst=ind.lst, X=X, ni=ni, ni0=ni0, xs=xs, iphi=iphi_i, theta=theta_i, l.tau.r=l.tau.r, l.tau.a=l.tau.a) for(i in 1:maxiter){ out = optim(par0, ll.jRCI.sex.b0.X, yi=yi, ind.lst=ind.lst, X=X, ni=ni, ni0=ni0, xs=xs, iphi=iphi_i, theta=theta_i, l.tau.r=l.tau.r, l.tau.a=l.tau.a) log.lik1 = ll.jRCI.sex.b0.X(par0, yi=yi, ind.lst=ind.lst, X=X, ni=ni, ni0=ni0, xs=xs, iphi=iphi_i, theta=theta_i, l.tau.r=l.tau.r, l.tau.a=l.tau.a) older = c(par0[1], par0, log.lik1, iphi_i, theta_i) par0 = out$par iphi_i = optimize(f=ll.tRCI.iphi.X, c(tech.ctrl$iphi_l, tech.ctrl$iphi_u), yi=yi, ind.lst=ind.lst, X=X, twosex=TRUE, betas=older[1:9], l.tau.r=l.tau.r, l.tau.a=l.tau.a)$minimum theta_i = optimize(f=ll.aRC.theta.X, c(tech.ctrl$theta_l, tech.ctrl$theta_u), bs=older[c(3, 4)], ni=ni, ni0=ni0, xs=xs, l.tau.r=l.tau.r)$minimum log.lik1 = ll.jRCI.sex.b0.X(par0, yi=yi, ind.lst=ind.lst, X=X, ni=ni, ni0=ni0, xs=xs, iphi=iphi_i, theta=theta_i, l.tau.r=l.tau.r, l.tau.a=l.tau.a) newer = c(par0[1], par0, log.lik1, iphi_i, theta_i) if((log.lik0 - log.lik1)<eps)break; log.lik0 = log.lik1 } if(log.lik0 < log.lik1){ ret = older }else{ ret = newer } 0 }, error=function(e) { 1 }) if((tag == 0) | (triali >= tech.ctrl$maxtrial))break; par0 = rnorm(length(par0), start, 1) } if(tag == 1){ ret = NULL } return(ret) }
gads_get_metadata <- function( category = c("RESOURCE", "ATTRIBUTE", "METRIC", "SEGMENT", 'ALL'), fields = c("name", "category", "data_type", "selectable", "filterable", "sortable", "selectable_with", "metrics", "segments", "is_repeated", "type_url", "enum_values", "attribute_resources") ) { category <- toupper(category) match.arg(category) fields <- gsub("[\\s\\n\\t]", "", tolower(fields), perl = TRUE) %>% tolower() %>% str_c(collapse = ', ') if ( category == "ALL" ) { where_clause <- "" } else { where_clause <- str_glue("WHERE category = '{category}'") } body <- list(query = str_glue(' SELECT {fields} {where_clause}')) %>% toJSON(auto_unbox = T, pretty = T) out <- request_build( method = "POST", path = str_glue('{options("gads.api.version")}/googleAdsFields:search'), body = body, token = gads_token(), base_url = getOption('gads.base.url') ) ans <- request_retry( out, encode = "json", add_headers(`developer-token`= gads_developer_token()) ) rq_ids <- headers(ans)$`request-id` rgoogleads$last_request_id <- rq_ids data <- response_process(ans, error_message = gads_check_errors2) res <- tibble(data = data$results) %>% unnest_wider(data) %>% rowwise() %>% mutate( across( where(is.list), function(col) if_else(is.null(col), list(col), list(unlist(col))) ) ) %>% rename_with( getOption('gads.column.name.case.fun') ) cli_alert_success('Success! Loaded {nrow(res)} rows!') return(res) }
create_beauti_options_v2_4 <- function( ) { beautier::create_beauti_options( capitalize_first_char_id = FALSE, nucleotides_uppercase = FALSE, beast2_version = "2.4", required = "", sequence_indent = 20 ) }
download.FACE <- function(sitename, outfolder, start_date, end_date, overwrite = FALSE, method, ...) { start_date <- as.POSIXlt(start_date, tz = "UTC") end_date <- as.POSIXlt(end_date, tz = "UTC") site <- site_from_tag(sitename, "FACE") if (!file.exists(outfolder)) { dir.create(outfolder, showWarnings = FALSE, recursive = TRUE) } raw.file <- paste0(site, "_forcing_h.nc") out.file <- file.path(outfolder, paste0("FACE_", raw.file)) url <- paste0("ftp://cdiac.ornl.gov/.private/eCO2_Modelling/Site_Data/", site, "/", raw.file) print(url) PEcAn.utils::download.file(url,out.file,method) return(invisible(data.frame(file = out.file, host = PEcAn.remote::fqdn(), mimetype = "application/x-netcdf", formatname = "FACE", startdate = start_date, enddate = end_date, dbfile.name = "FACE", stringsAsFactors = FALSE))) }
context("canvasXpress Web Charts - TCGA") test_that("cXtcga1", { check_ui_test(cXtcga1()) }) test_that("cXtcga2", { check_ui_test(cXtcga2()) }) test_that("cXtcga3", { check_ui_test(cXtcga3()) }) test_that("cXtcga4", { check_ui_test(cXtcga4()) }) test_that("cXtcga5", { check_ui_test(cXtcga5()) }) test_that("cXtcga6", { check_ui_test(cXtcga6()) }) test_that("cXtcga7", { check_ui_test(cXtcga7()) }) test_that("cXtcga8", { check_ui_test(cXtcga8()) }) test_that("cXtcga9", { check_ui_test(cXtcga9()) }) test_that("cXtcga10", { check_ui_test(cXtcga10()) })
library(survival) options(na.action=na.exclude) aeq <- function(x,y,...) all.equal(as.vector(x), as.vector(y), ...) fit1 <- coxph(Surv(time, status) ~ age + offset(ph.ecog*0) +strata(sex), lung) fit2 <- coxph(Surv(time, status) ~ age + ph.ecog +strata(sex), lung) test <- concordance(fit1, fit2, influence=1) ksex <- model.frame(fit1)[["strata(sex)"]] test1 <- concordance(fit1$y ~ fit1$linear.predictors + strata(ksex), reverse=TRUE, influence=1) test2 <- concordance(fit1$y ~ fit2$linear.predictors + strata(ksex), reverse=TRUE, influence=1) aeq(test$concordance, c(test1$concordance, test2$concordance)) aeq(diag(test$var), c(test1$var[1], test2$var[1])) aeq(test$dfbeta, cbind(test1$dfbeta, test2$dfbeta)) cvec <- c(-1, 1) aeq(cvec %*% test$var %*% cvec, sum((test1$dfbeta - test2$dfbeta)^2)) mfit <- coxph(Surv(futime, death) ~ creat + hgb, mgus2) cm1 <- concordance(mfit, timewt='n', ranks=TRUE) cm2 <- concordance(mfit, timewt='S', ranks=TRUE) cm3 <- concordance(mfit, timewt='S/G', ranks=TRUE) sfit <- survfit(Surv(futime, death) ~ 1, mgus2, subset=!is.na(creat+hgb)) gfit <- survfit(Surv(futime, 1-death)~1, mgus2, subset=!is.na(creat+hgb)) rd1 <- cm1$ranks rd2 <- cm2$ranks rd3 <- cm3$ranks all.equal(rd1[c('time', 'rank', 'casewt')], rd2[c('time', 'rank', 'casewt')]) all.equal(rd1[c('time', 'rank', 'casewt')], rd3[c('time', 'rank', 'casewt')]) indx <- match(rd1$time, sfit$time) nt <- sfit$n.risk[indx] - sfit$n.event[indx] all.equal(rd1$timewt, nt) gminus <- c(1, gfit$surv) all.equal(rd2$timewt, mfit$n* sfit$surv[indx]) all.equal(rd3$timewt, mfit$n* sfit$surv[indx] /gminus[indx])
calcDewPoint.A <- function(RH,temp) {return((RH/100 )^(1/8) * (110+temp) -110)} calcDewPoint.B <- function(RH,temp) { dw <- 0 if(RH < 50){ dw <- (0.198 + 0.0017*temp) * RH + (0.84*temp) - 19.2} else{ dw <- temp - ( ((100-RH)/5) * (temp/300)^2 ) - (0.00135 * (RH - 84)^2 )+ 0.35} return(dw) } calcDewPoint.C <- function(RH,temp){ return (243.04*(log(RH/100)+((17.625*temp)/(243.04+temp)))/(17.625-log(RH/100)-((17.625*temp)/(243.04+temp)))) } calcDewPoint <- function(RH,temp,mode = "A") { if(checkRH(RH) && checkTemp(temp) && mode %in% c("A","B","C")) { if(mode =="A") return(calcDewPoint.A(RH,temp)) else if(mode == "B")return(calcDewPoint.B(RH,temp)) else if(mode =="C") return(calcDewPoint.C(RH,temp)) }else return(NULL) }
cat("\014") rm(list = ls()) setwd("~/git/of_dollars_and_data") source(file.path(paste0(getwd(),"/header.R"))) library(scales) library(lubridate) library(stringr) library(ggrepel) library(zoo) library(Hmisc) library(igraph) library(lemon) library(readxl) library(tidyverse) folder_name <- "_jkb/0010_crash_buying_opportunity" out_path <- paste0(exportdir, folder_name) dir.create(file.path(paste0(out_path)), showWarnings = FALSE) bw_colors <- c(" percent_loss_gain <- data.frame(loss = seq(0.01, 0.5, 0.01)) %>% mutate(gain = 1/(1-loss) - 1) file_path <- paste0(out_path, "/gain_needed_to_recover_loss.jpeg") plot <- ggplot(percent_loss_gain, aes(x = loss, y = gain)) + geom_smooth(se = FALSE, col = bw_colors[2]) + scale_y_continuous(label = percent_format(accuracy = 1), limits = c(0, 1), breaks = seq(0, 1, 0.1)) + scale_x_continuous(label = percent_format(accuracy = 1), limits = c(0, 1), breaks = seq(0, 1, 0.1)) + of_dollars_and_data_theme + ggtitle(paste0("% Gain Needed to Fully Recover From % Loss")) + labs(x = "Loss" , y = "Gain Needed to Recover") ggsave(file_path, plot, width = 15, height = 12, units = "cm") shiller <- readRDS(paste0(localdir, "0009_sp500_ret_pe.RDS")) %>% select(date, price_plus_div) %>% filter(date >= "1920-01-01") dd <- drawdown_path(shiller) dd_num <- 1 for(i in 1:nrow(dd)){ dd_curr <- dd[i, "pct"] dd[i, "dd_num"] <- dd_num if(dd_curr == 0){ dd_num <- dd_num + 1 } } dd_final <- dd %>% left_join(shiller) dd_tops <- dd_final %>% filter(pct == 0) %>% group_by(dd_num) %>% summarise(recovery_price = max(price_plus_div), recovery_date = date) %>% ungroup() dd_lengths <- dd %>% group_by(dd_num) %>% summarise(n_months = n(), min_dd = min(pct)) %>% ungroup() %>% filter(min_dd < -0.2) %>% left_join(dd_tops) %>% select(-n_months) plot_dd_pct <- function(dd_pct){ dd_w_recovery <- dd_final %>% inner_join(dd_lengths) %>% mutate(n_years = (interval(date, recovery_date) %/% months(1))/12, annualized_recovery_ret = (recovery_price/price_plus_div)^(1/n_years) - 1, recovery_bucket = case_when( annualized_recovery_ret < 0.05 ~ "0%-5%", annualized_recovery_ret < 0.1 ~ "5%-10%", annualized_recovery_ret < 0.15 ~ "10%-15%", annualized_recovery_ret < 0.2 ~ "15%-20%", annualized_recovery_ret < 0.25 ~ "20%-25%", annualized_recovery_ret < 0.3 ~ "25%-30%", TRUE ~ ">30%" )) %>% filter(n_years != 0, date >= "1900-01-01", pct < dd_pct) tmp <- dd_w_recovery %>% group_by(recovery_bucket) %>% summarise(pct = n()/nrow(dd_w_recovery)) %>% ungroup() recovery_buckets <- c("0%-5%", "5%-10%","10%-15%", "15%-20%", "20%-25%", "25%-30%", ">30%") to_plot <- data.frame(recovery_bucket = recovery_buckets) %>% full_join(tmp) %>% mutate(pct = ifelse(is.na(pct), 0, pct)) to_plot$recovery_bucket <- factor(to_plot$recovery_bucket, levels = recovery_buckets) dd_string <- -100*dd_pct file_path <- paste0(out_path, "/dd_", dd_string, "_pct_rets.jpeg") plot <- ggplot(to_plot, aes(x = recovery_bucket, y = pct)) + geom_bar(stat="identity", fill = bw_colors[2]) + scale_y_continuous(label = percent_format(accuracy = 1), limits = c(0, 0.5)) + of_dollars_and_data_theme + ggtitle(paste0("Annualized Returns After Buying\nDuring ", dd_string, "%+ Drawdown")) + labs(x = "Annualized Return" , y = "Frequency") ggsave(file_path, plot, width = 15, height = 12, units = "cm") } dd_pcts <- seq(-0.2, -0.50, -0.05) for(d in dd_pcts){ plot_dd_pct(d) } calc_dca_growth <- function(start_date, end_date){ sp500 <- readRDS(paste0(localdir, "0009_sp500_ret_pe.Rds")) %>% filter(date>= start_date, date <= end_date) %>% select(date, price_plus_div) ending_val <- pull(sp500[nrow(sp500), "price_plus_div"]) to_plot <- sp500 %>% mutate(dca_growth = ending_val/price_plus_div * 100) %>% select(date, dca_growth) return(to_plot) } to_plot <- calc_dca_growth("1929-09-01", "1936-11-01") %>% mutate(label = "Sep 1929-Nov 1936") file_path <- paste0(out_path, "/dca_growth_1929.jpeg") plot <- ggplot(to_plot, aes(x=date, y=dca_growth)) + geom_bar(stat = "identity", fill = "black", width = 31) + scale_y_continuous(label = dollar) + scale_x_date(date_labels = "%Y") + of_dollars_and_data_theme + ggtitle(paste0("Final Growth of Each $100 Payment\ninto U.S. Stocks")) + labs(x = "Date" , y = "Final Amount") ggsave(file_path, plot, width = 15, height = 12, units = "cm") jpy <- read.csv(paste0(importdir,"/_jkb/0010_buying_during_a_crisis/NIKKEI225_fred.csv")) %>% mutate(date = as.Date(DATE), index_jpy = as.numeric(NIKKEI225), ret_jpy = index_jpy/lag(index_jpy, 1) - 1) %>% select(date, index_jpy, ret_jpy) %>% drop_na %>% filter(date >= "1980-01-01", date <= "2020-12-31") for(i in 1:nrow(jpy)){ if(i == 1){ jpy[i, "market_value"] <- 1 jpy[i, "basis"] <- 1 } else{ jpy[i, "market_value"] <- jpy[(i-1), "market_value"] * (1 + jpy[i, "ret_jpy"]) + 1 jpy[i, "basis"] <- jpy[(i-1), "basis"] + 1 } } to_plot <- jpy file_path <- paste0(out_path, "/jpy_1980_onward.jpeg") plot <- ggplot(to_plot, aes(x=date, y=index_jpy)) + geom_line(col = bw_colors[2]) + scale_y_continuous(label = comma) + scale_x_date(date_labels = "%Y") + of_dollars_and_data_theme + ggtitle(paste0("The Japanese Stock Market Was\nBelow Its Highs For Over Three Decades")) + labs(x = "Date" , y = "Index Value") ggsave(file_path, plot, width = 15, height = 12, units = "cm") to_plot <- to_plot %>% select(date, market_value, basis) %>% gather(-date, key=key, value=value) %>% mutate(key = case_when( key == "market_value" ~ "Market Value", TRUE ~ "Cost Basis" )) file_path <- paste0(out_path, "/jpy_1980_onward_dca.jpeg") plot <- ggplot(to_plot, aes(x=date, y=value, col = key)) + geom_line() + scale_color_manual(values = bw_colors) + scale_y_continuous(label = dollar) + scale_x_date(date_labels = "%Y") + of_dollars_and_data_theme + theme(legend.position = "bottom", legend.title = element_blank()) + ggtitle(paste0("Portfolio Value vs. Cost Basis\n$1 Per Day into Japanese Stocks")) + labs(x = "Date" , y = "Value") ggsave(file_path, plot, width = 15, height = 12, units = "cm")
rhalft <- function(n, df = 1, mean = 0, sd = 1) { abs(rt_(n, df, mean, sd)) }
byf.shapiro <- function(formula,data) { if (missing(formula)||(length(formula)!=3)) {stop("missing or incorrect formula")} m <- match.call() if (is.matrix(eval(m$data,parent.frame()))) {m$data <- as.data.frame(m$data)} m[[1]] <- as.name("model.frame") mf <- eval(m,parent.frame()) dname <- paste(names(mf)[1],paste(names(mf)[2:ncol(mf)],collapse=":"),sep=" by ") resp <- mf[,1] fact <- interaction(mf[,2:ncol(mf)],sep=":") nlev <- nlevels(fact) tab <- data.frame(W=integer(nlev),"p-value"=integer(nlev),check.names=FALSE) rownames(tab) <- levels(fact) for (i in 1:nlev) { test <- shapiro.test(resp[as.numeric(fact)==i]) tab[i,1] <- test$statistic tab[i,2] <- test$p.value } result <- list(method="Shapiro-Wilk normality tests",data.name=dname,tab=tab) class(result) <- "byf.test" return(result) }
logLik.summary.maxLik <- function( object, ...) { ll <- object$loglik attr(ll, "df") <- sum(activePar(object)) ll } logLik.maxLik <- function( object, ...) { ll <- maxValue(object) attr(ll, "df") <- sum(activePar(object)) ll }
knitr::opts_chunk$set(include = FALSE) library("ggplot2") library("SOMbrero") set.seed(4031719) the.data <- data.frame("x1" = runif(500), "x2" = runif(500)) ggplot(the.data, aes(x = x1, y = x2)) + geom_point() + theme_bw() set.seed(1105) my.som <- trainSOM(x.data=the.data, dimension=c(5,5), nb.save=10, maxit=2000, scaling="none", radius.type="letremy", topo="square", dist.type = "letremy") plot(my.som, what="energy") plot(my.som, what = "obs", type = "hitmap") my.colors <- rainbow(prod(my.som$parameters$the.grid$dim))[my.som$clustering] plot(my.som$data[,1], my.som$data[,2], col=my.colors, pch=19, xlab="x1", ylab="x2", main="Data according to final clustering") par(mfrow=c(1,2)) plot(my.som, what="prototypes", type="color", var=1) plot(my.som, what="prototypes", type="color", var=2) par(mfrow=c(1,2)) plot(my.som, what="obs", type="color", var=1) plot(my.som, what="obs", type="color", var=2) values <- protoDist(my.som, "neighbors") tmp <- data.frame("prot1" = rep.int(1:prod(my.som$parameters$the.grid$dim), times=sapply(values, length)), "nei" = as.numeric(as.character(names(unlist(values))))) tmp <- tmp[tmp[ ,1] < tmp[ ,2], ] par(mfrow=c(2, 5),mar=c(3,2,2,1)) invisible(sapply(1:my.som$parameters$nb.save, function(ind){ plot(my.som$backup$prototypes[[ind]][,1], my.som$backup$prototypes[[ind]][,2], xlab="", ylab="", main=c("iteration ", my.som$backup$steps[ind])) for (i in 1:nrow(tmp)){ segments(x0=my.som$backup$prototypes[[ind]][tmp[i,1],1], y0=my.som$backup$prototypes[[ind]][tmp[i,1],2], x1=my.som$backup$prototypes[[ind]][tmp[i,2],1], y1=my.som$backup$prototypes[[ind]][tmp[i,2],2], col="red", pch=19) } })) set.seed(255) iris.som <- trainSOM(x.data = iris[,1:4], dimension = c(5,5), verbose = TRUE, nb.save = 5, topo = "hexagonal") iris.som plot(iris.som, what="energy") iris.som$clustering table(iris.som$clustering) plot(iris.som, what="obs", type="hitmap") summary(iris.som) predict(iris.som, iris[1,1:4]) iris.som$clustering[1] par(mfrow = c(2,2)) plot(iris.som, what = "obs", type = "color", variable = 1) plot(iris.som, what = "obs", type = "color", variable = 2) plot(iris.som, what = "obs", type = "color", variable = 3) plot(iris.som, what = "obs", type = "color", variable = 4) plot(iris.som, what = "prototypes", type = "lines", show.names = TRUE) + theme(axis.text.x = element_blank()) plot(iris.som, what = "obs", type = "barplot", show.names = TRUE) + theme(axis.text.x = element_blank()) plot(iris.som, what = "obs", type = "boxplot", show.names = TRUE) plot(iris.som, what = "obs", type = "lines", show.names = TRUE) plot(iris.som, what = "obs", type = "names", show.names = TRUE) par(mfrow=c(2,2)) plot(iris.som, what = "prototypes", type = "3d", variable = 1) plot(iris.som, what = "prototypes", type = "3d", variable = 2) plot(iris.som, what = "prototypes", type = "3d", variable = 3) plot(iris.som, what = "prototypes", type = "3d", variable = 4) plot(iris.som, what = "prototypes", type = "poly.dist", show.names = FALSE) plot(iris.som, what = "prototypes", type = "umatrix") plot(iris.som, what = "prototypes", type = "smooth.dist") plot(iris.som, what = "prototypes", type = "mds") plot(iris.som, what = "prototypes", type = "grid.dist") class(iris$Species) levels(iris$Species) plot(iris.som, what = "add", type = "pie", variable = iris$Species) + scale_fill_brewer(type = "qual") + guides(fill = guide_legend(title = "Species")) plot(iris.som, what = "add", type = "color", variable = iris$Sepal.Length, show.names = FALSE) my.cont.mat <- matrix(data=c(rep(c(rep(1,50), rep(0,150)), 2), rep(1,50)), nrow = 150, ncol = 3) colnames(my.cont.mat) <- levels(iris$Species) head(my.cont.mat) plot(iris.som, what = "add", type = "words", variable = my.cont.mat, show.names = FALSE) plot(iris.som, what = "add", type = "names", variable = rownames(iris)) plot(iris.som, what = "add", type = "names", variable = iris$Species) quality(iris.som) qualities <- quality(iris.som) plot(superClass(iris.som)) my.sc <- superClass(iris.som, k = 3) summary(my.sc) plot(my.sc, plot.var = FALSE) plot(my.sc, type = "grid") plot(my.sc, type = "dendro3d") plot(my.sc, what = "obs", type = "hitmap", maxsize = 20) plot(my.sc, what = "prototypes", type = "lines") plot(my.sc, what = "prototypes", type = "barplot") plot(my.sc, what = "prototypes", type = "mds") plot(my.sc, what = "prototypes", type = "color", variable = "Sepal.Length") plot(my.sc, what = "prototypes", type = "poly.dist") plot(my.sc, what = "add", type = "pie", variable = iris$Species) + scale_fill_brewer(type = "qual") plot(my.sc, what = "add", type = "color", variable = iris$Sepal.Length) sessionInfo()
ctModelHigherOrder <- function(ctm, indices,diffusion=TRUE, crosseffects=FALSE,cint=FALSE, explosive=FALSE){ ctm$latentNames <- c(ctm$latentNames,paste0('d',ctm$latentNames[indices])) nl <- ctm$n.latent for(i in 1:length(indices)){ for(m in c('DRIFT','DIFFUSION','T0VAR')){ ctm[[m]] <- rbind(cbind(ctm[[m]],0),0) } for(m in c('CINT','T0MEANS')){ ctm[[m]] <- rbind(ctm[[m]],0) } ctm$LAMBDA <- cbind(ctm$LAMBDA,0) if(ctm$n.TDpred > 0) ctm$TDPREDEFFECT <- rbind(ctm$TDPREDEFFECT,0) } for(i in 1:length(indices)){ if(!crosseffects) ctm$DRIFT[i+nl,i+nl] <- ctm$DRIFT[indices[i],indices[i]] if(crosseffects){ m <- 'DRIFT' ctm[[m]][i+nl,] <- ctm[[m]][indices[i],] ctm[[m]][,i+nl] <- ctm[[m]][,indices[i]] ctm[[m]][indices[i],] <- 0 ctm[[m]][,indices[i]] <- 0 } ctm$DRIFT[indices[i],indices[i]] <- 0 ctm$DRIFT[indices[i],i+nl] <- 1 ctm$DRIFT[i+nl,indices[i]] <-paste0('drift_',ctm$latentNames[i+nl],'_', ctm$latentNames[indices[i]], ifelse(!explosive,'|-log1p(exp(-param*2))-1e-6','|param*2-1')) if(indices[i]==tail(indices,1)) rownames(ctm$DRIFT) <- ctm$latentNames if(indices[i]==tail(indices,1)) colnames(ctm$DRIFT) <- ctm$latentNames for(m in c('T0VAR','DIFFUSION')){ if(diffusion && m=='DIFFUSION'){ ctm[[m]][i+nl,] <- ctm[[m]][indices[i],] ctm[[m]][,i+nl] <- ctm[[m]][,indices[i]] ctm[[m]][indices[i],] <- 0 ctm[[m]][,indices[i]] <- 0 } if(indices[i]==tail(indices,1)) colnames(ctm[[m]]) <- ctm$latentNames if(indices[i]==tail(indices,1)) rownames(ctm[[m]]) <- ctm$latentNames } for(m in c('CINT','T0MEANS')){ if(m %in% 'T0MEANS') ctm[[m]][i+nl,1] <- paste0('T0mean_d_',ctm$latentNames[indices[i]]) if(!m %in% 'T0MEANS' && cint){ ctm[[m]][indices[i]+nl,] <- ctm[[m]][indices[i],] ctm[[m]][indices[i],] <- 0 } if(indices[i]==tail(indices,1)) rownames(ctm[[m]]) <- ctm$latentNames } } for(i in c(nl+seq_along(indices))){ for(j in c(1:nl,nl+seq_along(indices))){ if(i >= j) ctm$T0VAR[i,j] <- paste0('T0var_',ctm$latentNames[i],'_', ctm$latentNames[j]) } } ctm$n.latent <- ctm$n.latent + length(indices) return(ctm) }
BW3stagePPSe <- function(dat, v, Ni, Qi, Qij, m, lonely.SSU = "mean", lonely.TSU = "mean"){ browser() y <- dat[, v] wk.ij <- dat$w / dat$w2ij qij <- table(dat$ssuID) qbbar <- mean(qij) xx.psu <- do.call("rbind",list(by(1:nrow(dat),dat$psuID,head,1))) w1i.psu <- dat[xx.psu, "w1i"] pp <- 1/(m * dat[xx.psu,]$w1i) xx.ssu <- do.call("rbind",list(by(1:nrow(dat),dat$ssuID,head,1))) ni <- table(dat[xx.ssu, "psuID"]) f2i <- ni/Ni nbar <- mean(ni) w2ij.ssu <- dat[xx.ssu, "w2ij"] w2ijC.ssu <- w2ij.ssu/dat[xx.ssu, "w1i"] M.hat <- sum(w1i.psu) Qbbar <- sum(w2ij.ssu*Qij) / sum(w2ij.ssu) Qbar <- sum(w1i.psu*Qi) / M.hat tij <- by(wk.ij*y, dat$ssuID, sum) ti <- by(as.vector(w2ijC.ssu*tij), dat[xx.ssu,]$psuID, sum) t.pwr <- sum(dat$w * y) S2ai <- by(as.vector(tij), INDICES = dat[xx.ssu,]$psuID, var) S2ai.miss <- is.na(S2ai) if (lonely.SSU == "mean"){ S2ai[S2ai.miss] <- mean(S2ai[!S2ai.miss]) } else if (lonely.SSU == "zero"){ S2ai[S2ai.miss] <- 0 } else {stop("Illegal value of lonely.SSU: ", lonely.SSU, "\n")} S3ij <- by(y, INDICES = dat$ssuID, var) V3ij <- Qij * (Qij/qij - 1) * S3ij V3ijb <- Qij^2 * S3ij S2bi <- by(as.vector(V3ij), INDICES = dat[xx.ssu,]$psuID, sum)/ni S2bi.miss <- is.na(S2bi) if (lonely.SSU == "mean"){ S2bi[S2bi.miss] <- mean(S2bi[!S2bi.miss]) } else if (lonely.SSU == "zero"){ S2bi[S2bi.miss] <- 0 } else {stop("Illegal value of lonely.SSU: ", lonely.SSU, "\n")} sV3i <- by(as.vector(V3ij), INDICES = dat[xx.ssu,]$psuID, sum) sV3ib <- by(as.vector(V3ijb), INDICES = dat[xx.ssu,]$psuID, sum) sV3ib.miss <- is.na(sV3ib) if (lonely.TSU == "mean"){ sV3ib[!sV3ib.miss] <- mean(sV3ib[!sV3ib.miss]) } else if (lonely.TSU == "zero"){ sV3ib[!sV3ib.miss] <- 0 } else {stop("Illegal value of lonely.TSU: ", lonely.TSU, "\n")} S1a <- sum((ti/pp - t.pwr)^2)/(m-1) S1b <- sum(Ni^2/ni/pp^2*((1-f2i)*S2ai + f2i*S2bi))/m V3i <- vector("numeric", length = length(unique(dat$psuID))) PSUs <- unique(dat$psuID) for (ind in 1:length(PSUs)) { pick <- dat$psuID == PSUs[ind] V3i[ind] <- wtdvar(y[pick], w = dat$w[pick]) } V3i.miss <- is.na(V3i) if (lonely.SSU == "mean"){ V3i[V3i.miss] <- mean(V3i[!V3i.miss]) } else if (lonely.SSU == "zero"){ V3i[V3i.miss] <- 0 } else {stop("Illegal value of lonely.SSU: ", lonely.SSU, "\n")} Vtsu <- sum(Ni^2/ni^2 * w1i.psu^2 * sV3i) Vssu <- sum(Ni^2/ni/(m*pp)^2*(1-f2i)*(S2ai - S2bi)) Vpsu <- (S1a - S1b)/m B <- (S1a - S1b) / t.pwr^2 W <- sum(Qi^2 * V3i / m /pp^2) / t.pwr^2 W2 <- sum(Ni^2/m/pp^2*(S2ai - S2bi)) W2 <- W2 / t.pwr^2 W3 <- sum(Ni^2/ni/m/pp^2 * sV3ib) W3 <- W3 / t.pwr^2 y.mn <- sum(dat$w * y) / sum(dat$w) V <- wtdvar(x=y, w=dat$w) k1 <- (B + W)/(V/y.mn^2) k2 <- (W2 + W3)/(V/y.mn^2) delta1 <- B / (B + W) delta2 <- W2 / (W2 + W3) c(Vpsu=Vpsu, Vssu=Vssu, Vtsu=Vtsu, B=B, W=W, k1=k1, W2=W2, W3=W3, k2=k2, delta1=delta1, delta2=delta2) }
setClass( Class="ParamsScenarios", representation=representation( horizon = "numeric", nScenarios = "numeric", vol="numeric", k="numeric", volStock="numeric", volRealEstate="numeric", stock0="numeric", realEstate0="numeric", volDefault="numeric", alpha="numeric", beta="numeric", eta="numeric", liquiditySpread0="numeric", defaultSpread0="numeric", rho="numeric" ) )
MAXIF <- function(range,criteria, max_range) { if(is.na(as.numeric(criteria)) == FALSE){ c1 <- "==" } else if (str_detect(criteria,"^>") == TRUE){ c1 <- ">" criteria <- extract_numeric(criteria) } else if (str_detect(criteria,"^<") == TRUE){ c1 <- "<" criteria <- extract_numeric(criteria) } else if (str_detect(criteria,"^>=")){ c1 <- ">=" criteria <- extract_numeric(criteria) } else if (str_detect(criteria,"^<=")){ c1 <- "<=" criteria <- extract_numeric(criteria) } else if (is.character(criteria) == TRUE){ c1 <- "==" } ret <- max(max_range[get(c1)(range,criteria)] ) ret }
DGPs <- list.files("../data/", pattern = "DGP_") for(i in DGPs) { load(paste0("../data/", i)) dat <- MASS::mvrnorm(200, rep(0, nrow(Sigma$Sigma)), Sigma = Sigma$Sigma, empirical = TRUE) res <- csem(dat, model_Sigma) test_that(paste("testOMF works for DGP: ", i, "with default values"), { testOMF( .object = res, .R = 4, .handle_inadmissibles = "replace" ) }) test_that(paste("All arguments of testOMF work for DGP: ", i), { testOMF( .object = res, .R = 4, .fit_measures = TRUE, .alpha = c(0.1, 0.05), .handle_inadmissibles = "replace", .seed = 2010 ) }) } test_that(paste(".seed in testOMF works corretly"), { r1 <- .Random.seed a <- testOMF( .object = res, .R = 10, .seed = 1303 ) r2 <- .Random.seed b <- testOMF( .object = res, .R = 10, .seed = 1303 ) expect_equal(a$Information$Bootstrap_values, b$Information$Bootstrap_values) expect_identical(r1, r2) })
hddc <- function(data, K=1:10, model=c("AkjBkQkDk"), threshold=0.2, criterion="bic", com_dim=NULL, itermax=200, eps=1e-3, algo='EM', d_select="Cattell", init='kmeans', init.vector, show=getHDclassif.show(), mini.nb=c(5, 10), scaling=FALSE, min.individuals=2, noise.ctrl=1e-8, mc.cores=1, nb.rep=1, keepAllRes=TRUE, kmeans.control = list(), d_max=100, subset=Inf, d){ if(!missing(d) & missing(d_select)) d_select = d call = match.call() hddc_control(call) criterion = myAlerts(criterion, "criterion", "singleCharacterMatch.arg", "HDDC: ", c("bic", "icl")) algo = myAlerts(algo, "algo", "singleCharacterMatch.arg", "HDDC: ", c('EM', 'CEM', 'SEM')) d_select = myAlerts(d_select, "d_select", "singleCharacterMatch.arg", "HDDC: ", c("cattell", "bic")) init = myAlerts(init, "init", "singleCharacterMatch.arg", "HDDC: ", c('random', 'kmeans', 'mini-em', 'param', "vector")) model = hdc_getTheModel(model, all2models = TRUE) kmeans.control = default_kmeans_control(kmeans.control) data <- as.matrix(data) if (scaling) { data <- scale(data) scaling <- list(mu=attr(data, "scaled:center"), sd=attr(data, "scaled:scale")) } else scaling <- NULL BIC <- ICL <- c() p <- ncol(data) if(d_select=="bic"){ threshold = "bic" } if(max(table(K))>1) warning("The number of clusters, K, is made unique (repeated values are not tolerated).") K = sort(unique(K)) if(any(K==1)){ K = K[K!=1] addrows = data.frame(model="AKJBKQKDK", K=1, threshold) } else { addrows = c() } mkt_Expand = expand.grid(model=model, K=K, threshold=threshold) mkt_Expand = do.call(rbind, replicate(nb.rep, mkt_Expand, simplify=FALSE)) mkt_Expand = rbind(addrows, mkt_Expand) model = as.character(mkt_Expand$model) K = mkt_Expand$K threshold = mkt_Expand$threshold mkt_univariate = apply(mkt_Expand, 1, paste, collapse= "_") hddcWrapper = function(mkt_univariate, ...){ mkt_splitted = strsplit(mkt_univariate, "_") model = sapply(mkt_splitted, function(x) x[1]) K = sapply(mkt_splitted, function(x) as.numeric(x[2])) threshold = sapply(mkt_splitted, function(x) ifelse(x[3]=="bic","bic",as.numeric(x[3]))) res = "unknown error" try(res <- hddc_main(model=model, K=K, threshold=threshold, ...)) res } nRuns = length(mkt_univariate) if(nRuns<mc.cores) mc.cores = nRuns max_nb_of_cores = parallel::detectCores() if(mc.cores>max_nb_of_cores){ warning("The argument mc.cores is greater than its maximun.\nmc.cores was set to ", max_nb_of_cores) mc.cores = max_nb_of_cores } if(mc.cores == 1){ par.output = lapply(mkt_univariate, hddcWrapper, DATA=data, method=d_select, algo=algo, itermax=itermax, eps=eps, init=init, init.vector=init.vector, mini.nb=mini.nb, min.individuals=min.individuals, noise.ctrl=noise.ctrl, com_dim=com_dim, kmeans.control=kmeans.control, d_max=d_max, subset = subset) } else if(Sys.info()[['sysname']] == 'Windows'){ cl = parallel::makeCluster(mc.cores) loadMyPackages = function(x){ library(HDclassif) } par.setup = parallel::parLapply(cl, 1:length(cl), loadMyPackages) par.output = NULL try(par.output <- parallel::parLapply(cl, mkt_univariate, hddcWrapper, DATA=data, method=d_select, algo=algo, itermax=itermax, eps=eps, init=init, init.vector=ifelse(missing(init.vector), NA, init.vector), mini.nb=mini.nb, min.individuals=min.individuals, noise.ctrl=noise.ctrl, com_dim=com_dim, kmeans.control=kmeans.control, d_max=d_max, subset = subset)) parallel::stopCluster(cl) if(is.null(par.output)) stop("Unknown error in the parallel computing. Try mc.cores=1 to detect the problem.") } else { par.output = NULL try(par.output <- parallel::mclapply(mkt_univariate, hddcWrapper, DATA=data, method=d_select, algo=algo, itermax=itermax, eps=eps, init=init, init.vector=init.vector, mini.nb=mini.nb, min.individuals=min.individuals, noise.ctrl=noise.ctrl, com_dim=com_dim, kmeans.control=kmeans.control, d_max=d_max, subset = subset, mc.cores=mc.cores)) if(is.null(par.output)) stop("Unknown error in the parallel computing. Try mc.cores=1 to detect the problem.") } getElement = function(x, what, valueIfNull = -Inf){ if(length(x)==1) return(valueIfNull) if(!is.list(x) && !what %in% names(x)) return(NA) x[[what]][length(x[[what]])] } getComment = function(x){ if(length(x)==1) return(x) return("") } LL_all = sapply(par.output, getElement, what="loglik") comment_all = sapply(par.output, getComment) if(all(!is.finite(LL_all))){ warning("All models diverged.") allCriteria = data.frame(model=model, K=K, threshold=threshold, LL = LL_all, BIC=NA, comment=comment_all) res = list() res$allCriteria = allCriteria return(res) } n = nrow(mkt_Expand) modelKeep = sapply(unique(mkt_univariate), function(x) (1:n)[mkt_univariate==x][which.max(LL_all[mkt_univariate==x])]) LL_all = LL_all[modelKeep] comment_all = comment_all[modelKeep] par.output = par.output[modelKeep] BIC = sapply(par.output, getElement, what="BIC") ICL = sapply(par.output, getElement, what="ICL") comp_all = sapply(par.output, getElement, what="complexity", valueIfNull=NA) model = model[modelKeep] threshold = threshold[modelKeep] K = K[modelKeep] CRIT = switch(criterion, bic = BIC, icl = ICL) myOrder = order(CRIT, decreasing = TRUE) qui = which.max(CRIT) prms = par.output[[qui]] prms$criterion = CRIT[qui] names(prms$criterion) = criterion prms$call = call if(show){ if(n>1) cat("HDDC: \n") model2print = sapply(model, function(x) sprintf("%*s", max(nchar(model)), x)) K2print = as.character(K) K2print = sapply(K2print, function(x) sprintf("%*s", max(nchar(K2print)), x)) thresh2print = as.character(threshold) thresh_width = max(nchar(thresh2print)) thresh2print = sapply(thresh2print, function(x) sprintf("%s%s", x, paste0(rep("0", thresh_width - nchar(x)), collapse="") )) myResMat = cbind(model2print[myOrder], K2print[myOrder], thresh2print[myOrder], addCommas(CRIT[myOrder]), comment_all[myOrder]) myResMat = as.data.frame(myResMat) names(myResMat) = c("model", "K", "threshold", toupper(criterion), "comment") row.names(myResMat) = 1:nrow(myResMat) if(all(comment_all == "")) myResMat$comment = NULL print(myResMat) msg = switch(criterion, bic="BIC", icl="ICL") cat("\nSELECTED: model ", prms$model, " with ", prms$K, " clusters.\n") cat("Selection Criterion: ", msg, ".\n", sep="") } allCriteria = data.frame(model=model[myOrder], K=K[myOrder], threshold=threshold[myOrder], LL=LL_all[myOrder], BIC=BIC[myOrder], ICL=ICL[myOrder], rank = 1:length(myOrder), originalOrder = myOrder, complexity = comp_all[myOrder]) if(any(comment_all != "")) allCriteria$comment = comment_all[myOrder] prms$allCriteria = allCriteria if(keepAllRes){ all_results = par.output names(all_results) = mkt_univariate[modelKeep] prms$all_results = all_results } prms$scaling <- scaling prms$threshold <- threshold[qui] return(prms) } hddc_main <- function(DATA, K, model, threshold, method, algo, itermax, eps, init, init.vector, mini.nb, min.individuals, noise.ctrl, com_dim=NULL, kmeans.control, d_max, subset, ...){ debug = FALSE ModelNames <- c("AKJBKQKDK", "AKBKQKDK", "ABKQKDK", "AKJBQKDK", "AKBQKDK", "ABQKDK", "AKJBKQKD", "AKBKQKD", "ABKQKD", "AKJBQKD", "AKBQKD", "ABQKD", "AJBQD", "ABQD") p <- ncol(DATA) N <- nrow(DATA) com_ev <- NULL isSubset = FALSE if(subset < N){ isSubset = TRUE DATA_save = DATA id_subset = sample(N, subset) DATA = DATA[id_subset, ] N = subset } d_max = min(N, p, d_max) if ( any(model==ModelNames[7:14]) ){ MU <- colMeans(DATA) if (N<p) { Y <- (DATA-matrix(MU, N, p, byrow=TRUE))/sqrt(N) YYt <- tcrossprod(Y) com_ev <- hdc_myEigen(YYt, d_max, only.values = TRUE)$values } else{ S <- crossprod(DATA-matrix(MU, N, p, byrow=TRUE))/N com_ev <- hdc_myEigen(S, d_max, only.values = TRUE)$values } if(is.null(com_dim)) com_dim <- hdclassif_dim_choice(com_ev, N, method, threshold, FALSE, noise.ctrl) } if (K>1){ t <- matrix(0, N, K) if(init == "vector"){ init.vector = unclass(init.vector) name <- unique(init.vector) for (i in 1:K) t[which(init.vector==name[i]), i] <- 1 } else if (init=='param'){ MU <- colMeans(DATA) prop <- rep(1/K, K) S <- crossprod(DATA - matrix(MU, N, p, byrow=TRUE))/N donnees <- eigen(S, symmetric=TRUE) ev <- donnees$values d <- if(is.numeric(method)) method else hdclassif_dim_choice(ev, N, method, threshold, FALSE, noise.ctrl) a <- ev[1:d] b <- sum(ev[(d[1]+1):p])/(p-d[1]) Q <- donnees$vectors[, 1:d] mu <- MASS::mvrnorm(K, MU, S) K_pen <- diag((mu%*%Q%*%diag(1/a, d, d))%*%(t(Q)%*%t(mu)))-2*(mu%*%Q%*%diag(1/a, d, d))%*%(t(Q)%*%t(DATA))+1/b*(diag(tcrossprod(mu))-2*mu%*%t(DATA)+2*(mu%*%Q)%*%(t(Q)%*%t(DATA))-diag(tcrossprod(mu%*%Q)))-2*log(c(prop)) t <- matrix(0, N, K) for (i in 1:K) t[, i]=1/rowSums(exp((K_pen[i, ]-t(K_pen))/2)) } else if (init=='kmeans') { kmc = kmeans.control cluster <- kmeans(DATA, K, iter.max=kmc$iter.max, nstart=kmc$nstart, algorithm=kmc$algorithm, trace=kmc$trace)$cluster for (i in 1:K) t[which(cluster==i), i] <- 1 } else if (init=='mini-em'){ prms_best <- 1 for (i in 1:mini.nb[1]){ prms <- hddc_main(DATA, K, model, threshold, method, algo, mini.nb[2], 0, 'random', mini.nb = mini.nb, min.individuals = min.individuals, noise.ctrl = noise.ctrl, com_dim = com_dim, d_max=d_max, subset=subset) if(length(prms)!=1){ if (length(prms_best)==1) prms_best <- prms else if (prms_best$loglik[length(prms_best$loglik)]<prms$loglik[length(prms$loglik)]) prms_best <- prms } } if (length(prms_best)==1) return(1) t <- prms_best$posterior } else { t <- t(rmultinom(N, 1, rep(1/K, K))) compteur=1 while(min(colSums(t))<1 && (compteur <- compteur+1)<5) t <- t(rmultinom(N, 1, rep(1/K, K))) if(min(colSums(t))<1) return("Random initialization failed (n too small)") } } else t <- matrix(1, N, 1) likely <- c() iter <- 0 converged = FALSE IS_ALTERNATION = FALSE while ((iter <- iter+1)<=itermax && !converged){ if (algo!='EM' && iter!=1) t <- t2 if(debug) cat("Cluster sizes: ", colSums(t), "\n") if (K>1){ if(any(is.na(t))) return("unknown error: NA in t_ik") if(any(colSums(t>1/K)<min.individuals)) return("pop<min.individuals") } if(debug) cat("m-step...") m <- hddc_m_step(DATA, K, t, model, threshold, method, noise.ctrl, com_dim, d_max) if(debug) cat("e-step...") t <- hddc_e_step(DATA, m) L <- t$L t <- t$t if (algo=='CEM') { t2 <- matrix(0, N, K) t2[cbind(1:N, max.col(t))] <- 1 } else if(algo=='SEM') { t2 <- matrix(0, N, K) for (i in 1:N) t2[i, ] <- t(rmultinom(1, 1, t[i, ])) } likely[iter] <- L if (iter!=1){ abs_diff <- abs(L - likely[iter-1]) if((abs_diff < eps) || (abs_diff/(0.1 + abs(L)) < eps)){ converged = TRUE } } if(IS_ALTERNATION){ break } if(iter > 20 && !converged){ abs_diff_1 <- abs(L - likely[iter - 2]) if((abs_diff_1 < eps) || (abs_diff_1/(0.1 + abs(L)) < eps)){ L_m1 = likely[iter - 1] abs_diff_2 <- abs(L_m1 == likely[iter - 3]) if((abs_diff_2 < eps) || (abs_diff_2/(0.1 + abs(L_m1)) < eps)){ attr(converged, "reason") = "Alternation" if(L < L_m1){ IS_ALTERNATION = TRUE } else { break } } } } if(debug){ print("d=") print(m$d) print("a=") print(m$a) print("b=") print(m$b) } } if(iter >= itermax && itermax != mini.nb[2]){ warning("Maximum iterations reached (", itermax, "). Increase 'itermax'? It may be worth to plot the evolution of the log-likelihood (element loglik_all).") } if ( model%in%c('AKBKQKDK', 'AKBQKDK', 'AKBKQKD', 'AKBQKD') ) { a <- matrix(m$a[, 1], 1, m$K, dimnames=list(c("Ak:"), 1:m$K)) } else if(model=='AJBQD') { a <- matrix(m$a[1, ], 1, m$d[1], dimnames=list(c('Aj:'), paste('a', 1:m$d[1], sep=''))) } else if ( model%in%c('ABKQKDK', 'ABQKDK', 'ABKQKD', 'ABQKD', "ABQD") ) { a <- matrix(m$a[1], dimnames=list(c('A:'), c(''))) } else a <- matrix(m$a, m$K, max(m$d), dimnames=list('Class'=1:m$K, paste('a', 1:max(m$d), sep=''))) if ( model%in%c('AKJBQKDK', 'AKBQKDK', 'ABQKDK', 'AKJBQKD', 'AKBQKD', 'ABQKD', 'AJBQD', "ABQD") ) { b <- matrix(m$b[1], dimnames=list(c('B:'), c(''))) } else b <- matrix(m$b, 1, m$K, dimnames=list(c("Bk:"), 1:m$K)) d <- matrix(m$d, 1, m$K, dimnames=list(c('dim:'), "Intrinsic dimensions of the classes:"=1:m$K)) mu <- matrix(m$mu, m$K, p, dimnames=list('Class'=1:m$K, 'Posterior group means:'=paste('V', 1:p, sep=''))) prop <- matrix(m$prop, 1, m$K, dimnames=list(c(''), 'Posterior probabilities of groups'=1:m$K)) complexity <- hdc_getComplexity(m, p) class(b) <- class(a) <- class(d) <- class(prop) <- class(mu) <- 'hd' cls <- max.col(t) params = list(model=model, K=K, d=d, a=a, b=b, mu=mu, prop=prop, ev=m$ev, Q=m$Q, loglik=likely[length(likely)], loglik_all = likely, posterior=t, class=cls, com_ev=com_ev, N=N, complexity=complexity, threshold=threshold, d_select=method, converged=converged, iterations=iter-1) if(isSubset){ e = hddc_e_step(DATA_save, m) params$loglik = e$L params$posterior = e$t params$id_subset = id_subset if (model%in%c("ABQD", "AJBQD")){ DATA = DATA_save } } bic_icl = hdclassif_bic(params, p, DATA) params$BIC = bic_icl$bic params$ICL = bic_icl$icl class(params) <- 'hdc' return(params) } hddc_e_step <- function(x, par){ p <- ncol(x) N <- nrow(x) K <- par$K a <- par$a b <- par$b mu <- par$mu d <- par$d prop <- par$prop Q <- par$Q b[b<1e-6] <- 1e-6 if(par$model=="AJBQD") { K_pen <- diag((mu%*%Q%*%diag(1/a[1,1:d[1]],d[1]))%*%(t(Q)%*%t(mu)))-2*(mu%*%Q%*%diag(1/a[1,1:d[1]],d[1]))%*%(t(Q)%*%t(x))+1/b[1]*(diag(tcrossprod(mu))-2*mu%*%t(x)+2*(mu%*%Q)%*%(t(Q)%*%t(x))-diag(tcrossprod(mu%*%Q)))-2*log(c(prop)) } else if(par$model=="ABQD") { K_pen <- diag(1/a[1]*(mu%*%Q)%*%(t(Q)%*%t(mu)))+1/b[1]*(diag(tcrossprod(mu))-2*mu%*%t(x)-diag(tcrossprod(mu%*%Q)))-2*log(c(prop))+2*(1/b[1]-1/a[1])*(mu%*%Q)%*%(t(Q)%*%t(x)) } else{ K_pen <- matrix(0,K,N) for (i in 1:K) { s <- sum(log(a[i,1:d[i]])) X <- x - matrix(mu[i,], N, p, byrow=TRUE) proj <- (X%*%Q[[i]])%*%t(Q[[i]]) A <- (-proj)%*%Q[[i]]%*%sqrt(diag(1/a[i,1:d[i]],d[i])) B <- X-proj K_pen[i,] <- rowSums(A^2)+1/b[i]*rowSums(B^2)+s+(p-d[i])*log(b[i])-2*log(prop[i])+p*log(2*pi) } } A <- -1/2*t(K_pen) A_max = apply(A,1,max) L <- sum(log(rowSums(exp(A-A_max))) + A_max) t <- matrix(0,N,K) for (i in 1:K) t[,i] <- 1/rowSums(exp((K_pen[i,]-t(K_pen))/2)) list(t=t, L=L) } hddc_m_step <- function(x, K, t, model, threshold, method, noise.ctrl, com_dim, d_max){ N <- nrow(x) p <- ncol(x) prop <- c() n <- colSums(t) prop <- n/N mu <- matrix(NA, K, p) for (i in 1:K) mu[i, ] <- colSums(x*t[, i])/n[i] ind <- apply(t>0, 2, which) n_bis <- c() for(i in 1:K) n_bis[i] <- length(ind[[i]]) traceVect = c() if (N<p) { if( model%in%c("AJBQD", "ABQD") ){ Y <- matrix(0, N, p) for (i in 1:K) Y <- Y+(x-matrix(mu[i, ], N, p, byrow=TRUE))/sqrt(N)*sqrt(t[, i]) YYt = tcrossprod(Y) donnees <- hdc_myEigen(YYt, d_max) traceVect = sum(diag(YYt)) ev <- donnees$values } else { Y <- vector(mode='list', length=K) ev <- matrix(0, K, d_max) Q <- vector(mode='list', length=K) for (i in 1:K){ Y[[i]] <- (x-matrix(mu[i, ], N, p, byrow=TRUE))/sqrt(n[i])*sqrt(t[, i]) YYt = tcrossprod(Y[[i]]) donnees <- hdc_myEigen(YYt, d_max) traceVect[i] = sum(diag(YYt)) ev[i, ] <- donnees$values Q[[i]] <- donnees$vectors } } } else if ( model%in%c("AJBQD", "ABQD") ){ W <- matrix(0, p, p) for (i in 1:K) W <- W + crossprod((x-matrix(mu[i, ], N, p, byrow=TRUE))*sqrt(t[, i]))/N donnees <- hdc_myEigen(W, d_max) traceVect = sum(diag(W)) ev <- donnees$values } else { ev <- matrix(0, K, d_max) Q <- vector(mode='list', length=K) for (i in 1:K){ W = crossprod((x-matrix(mu[i, ], N, p, byrow=TRUE))*sqrt(t[, i]))/n[i] donnees <- hdc_myEigen(W, d_max) traceVect[i] = sum(diag(W)) ev[i, ] <- donnees$values Q[[i]] <- donnees$vectors } } if (model%in%c("AJBQD", "ABQD")){ d <- rep(com_dim, length=K) } else if ( model%in%c("AKJBKQKD", "AKBKQKD", "ABKQKD", "AKJBQKD", "AKBQKD", "ABQKD") ){ dmax <- min(apply((ev>noise.ctrl)*rep(1:ncol(ev), each=K), 1, which.max))-1 if(com_dim>dmax) com_dim <- max(dmax, 1) d <- rep(com_dim, length=K) } else { d <- hdclassif_dim_choice(ev, n, method, threshold, FALSE, noise.ctrl) } if ( model%in%c("AJBQD", "ABQD") ){ if (N>=p) Q <- matrix(donnees$vectors[, 1:d[1]], p, d[1]) else { Q <- matrix(t(Y)%*%donnees$vectors[, 1:d[1]], p, d[1]) normalise <- c() for(i in 1:d[1]) normalise[i] <- as.double(crossprod(Q[, i])) Q <- Q/matrix(sqrt(normalise), p, d, byrow=TRUE) } } else if(N>=p) { for(i in 1:K) Q[[i]] <- matrix(Q[[i]][, 1:d[i]], p, d[i]) } else{ for (i in 1:K){ Q[[i]] <- t(Y[[i]])%*%(Q[[i]][, 1:d[i]]) normalise <- c() for (j in 1:d[i]) normalise[j] <- as.double(crossprod(as.matrix(Q[[i]][, j]))) Q[[i]] <- Q[[i]]/matrix(sqrt(normalise), p, d[i], byrow=TRUE) } } ai <- matrix(NA, K, max(d)) if ( model%in%c('AKJBKQKDK', 'AKJBQKDK', 'AKJBKQKD', 'AKJBQKD') ){ for (i in 1:K) ai[i, 1:d[i]] <- ev[i, 1:d[i]] } else if ( model%in%c('AKBKQKDK', 'AKBQKDK' , 'AKBKQKD', 'AKBQKD') ){ for (i in 1:K) ai[i, ] <- rep(sum(ev[i, 1:d[i]])/d[i], length=max(d)) } else if(model=="AJBQD"){ for (i in 1:K) ai[i, ] <- ev[1:d[1]] } else if(model=="ABQD") { ai[] <- sum(ev[1:d[1]])/d[1] } else { a <- 0 eps <- sum(prop*d) for (i in 1:K) a <- a + sum(ev[i, 1:d[i]])*prop[i] ai <- matrix(a/eps, K, max(d)) } bi <- c() denom = p if ( model%in%c('AKJBKQKDK', 'AKBKQKDK', 'ABKQKDK', 'AKJBKQKD', 'AKBKQKD', 'ABKQKD') ){ for(i in 1:K){ remainEV = traceVect[i] - sum(ev[i, 1:d[i]]) bi[i] <- remainEV/(denom-d[i]) } } else if ( model%in%c("ABQD", "AJBQD") ){ remainEV = traceVect - sum(ev[1:d[1]]) bi[1:K] <- remainEV/(denom-d[1]) } else { b <- 0 eps <- sum(prop*d) for(i in 1:K){ remainEV = traceVect[i] - sum(ev[i, 1:d[i]]) b <- b + remainEV*prop[i] } bi[1:K] <- b/(denom-eps) } bi[bi<noise.ctrl] = noise.ctrl list(model=model, K=K, d=d, a=ai, b=bi, mu=mu, prop=prop, ev=ev, Q=Q) } hddc_ari <- function(x,y){ x <- as.vector(x) y <- as.vector(y) tab <- table(x, y) if (all(dim(tab) == c(1, 1))) return(1) a <- sum(choose(tab, 2)) b <- sum(choose(rowSums(tab), 2)) - a c <- sum(choose(colSums(tab), 2)) - a d <- choose(sum(tab), 2) - a - b - c ARI <- (a - (a + b) * (a + c)/(a + b + c + d))/((a + b + a + c)/2 - (a + b) * (a + c)/(a + b + c + d)) return(ARI) } slopeHeuristic <- function(x, plot = FALSE){ main_data = x$allCriteria who_notNA_norInfinite = !is.na(main_data$complexity) & is.finite(main_data$LL) n_valid = sum(who_notNA_norInfinite) if(n_valid == 0){ stop("There is not any valid model to be selected.") } else if(n_valid <= 2){ stop("At least 3 valid models are necessary to perform the slope heuristic. Otherwise, use another criterion.") } main_data = main_data[who_notNA_norInfinite, ] fit = MASS::rlm(LL ~ complexity, data=main_data, method='MM') fit_coef = fit$coefficients if(fit_coef[2]<0){ fit_coef[2]=0 } llpen = main_data$LL- 2* fit_coef[2]*main_data$complexity SH = 2* llpen res = x$allCriteria res$SlopeHeuristic = NA res$SlopeHeuristic[who_notNA_norInfinite] = SH if(plot){ color_comp = "darkred" color_SH = "navyblue" pch_comp = 17 pch_SH = 15 x = main_data$complexity y = main_data$LL plot(x, y, xlab = "Complexity", main = "Slope Heuristic", ylab = "", pch = pch_comp, col = color_comp, axes = FALSE) graphics::box() axis(1) axis(2, col.axis = color_comp) abline(fit_coef[1], fit_coef[2], col = color_comp) new_y = SH y_min = min(y) y_max = max(y) new_y_oldCoords = ( (new_y - min(new_y))/diff(range(new_y)) ) * (y_max - y_min) + y_min points(x, new_y_oldCoords, pch = pch_SH, col = color_SH) right_coord = axis(4, labels = NA, lwd = 0) right_coord_label = ( (right_coord - min(right_coord))/diff(range(right_coord)) ) * (max(SH) - min(SH)) + min(SH) axis(4, right_coord, round(right_coord_label), col.axis = color_SH) graphics::title(ylab = expression(paste("Likelihood", " ", phantom("Slope Heuristic (square, right)"))), col.lab = color_comp) graphics::title(ylab = expression(paste(phantom("Likelihood"), " / ", phantom("Slope Heuristic (square, right)")))) graphics::title(ylab = expression(paste(phantom("Likelihood "), "Slope Heuristic (square, right)")), col.lab = color_SH) } i = which.max(SH) best_model = res$originalOrder[which.max(SH)] names(best_model) = paste0(res$model[i], "_K", res$K[i], "_Thresh", res$threshold[i]) list(best_model_index = best_model, allCriteria = res) } hddc_control = function(call){ prefix = "HDDC: " myCallAlerts(call, "data", "matrix,data.frame", 3, TRUE, prefix) myCallAlerts(call, "K", "integerVector", 3, FALSE, prefix) myCallAlerts(call, "model", "vector", 3, FALSE, prefix) myCallAlerts(call, "threshold", "numericVectorGE0LE1", 3, FALSE, prefix) myCallAlerts(call, "criterion", "character", 3, FALSE, prefix) myCallAlerts(call, "com_dim", "singleIntegerGE1", 3, FALSE, prefix) myCallAlerts(call, "itermax", "singleIntegerGE0", 3, FALSE, prefix) myCallAlerts(call, "eps", "singleNumericGE0", 3, FALSE, prefix) myCallAlerts(call, "graph", "singleLogical", 3, FALSE, prefix) myCallAlerts(call, "algo", "singleCharacter", 3, FALSE, prefix) myCallAlerts(call, "d_select", "singleCharacter", 3, FALSE, prefix) myCallAlerts(call, "init", "singleCharacter", 3, FALSE, prefix) myCallAlerts(call, "show", "singleLogical", 3, FALSE, prefix) myCallAlerts(call, "mini.nb", "integerVectorGE1", 3, FALSE, prefix) myCallAlerts(call, "scaling", "singleLogical", 3, FALSE, prefix) myCallAlerts(call, "min.individuals", "singleIntegerGE2", 3, FALSE, prefix) myCallAlerts(call, "noise.ctrl", "singleNumericGE0", 3, FALSE, prefix) myCallAlerts(call, "mc.cores", "singleIntegerGE1", 3, FALSE, prefix) myCallAlerts(call, "nb.rep", "singleIntegerGE1", 3, FALSE, prefix) myCallAlerts(call, "keepAllRes", "singleLogical", 3, FALSE, prefix) myCallAlerts(call, "d_max", "singleIntegerGE1", 3, FALSE, prefix) myCallAlerts(call, "subset", "singleNumericGE1", 3, FALSE, prefix) data = eval.parent(call[["data"]], 2) K = eval.parent(call[["K"]], 2) init = eval.parent(call[["init"]], 2) criterion = eval.parent(call[["criterion"]], 2) if (any(is.na(data))) stop("NA values in the data are not supported. Please remove them beforehand.") if(any(K>2*NROW(data))) stop("The number of observations must be at least twice the number of clusters ") if(!is.null(init)){ init = myAlerts(init, "init", "singleCharacterMatch.arg", "HDDC: ", c('random', 'kmeans', 'mini-em', 'param', "vector")) if(init == "vector"){ myCallAlerts(call, "init.vector", "(integer,factor)Vector", 3, FALSE, prefix) init.vector = eval.parent(call[["init.vector"]], 2) if(is.null(init.vector)) stop("HDDC: When init='vector', the argument 'init.vector' should be provided.") if(length(unique(K))>1) stop("HDDC: Several number of classes K cannot be estimated when init='vector'.") init.vector <- unclass(as.factor(init.vector)) if(K!=max(init.vector)) stop("The number of class K, and the number of classes in the initialization vector are different") if( length(init.vector)!=nrow(data) ) stop("The size of the initialization vector is different of the size of the data") } if (init=='param' && nrow(data)<ncol(data)){ stop("The 'param' initialization can't be done when N<p") } if (init=='mini-em'){ mini.nb = eval.parent(call[["mini.nb"]], 2) if(!is.null(mini.nb) && length(mini.nb)!=2){ stop("The parameter mini.nb must be a vector of length 2 with integers\n") } } } } default_kmeans_control = function(control){ myAlerts(control,"kmeans.control","list","kmeans controls: ") myDefault = list() myDefault$iter.max = 10 myDefault$nstart = 1 myDefault$algorithm = c("Hartigan-Wong", "Lloyd", "Forgy","MacQueen") myDefault$trace = FALSE myTypes = c("singleIntegerGE1", "singleIntegerGE1", "match.arg", "singleLogical") control = matchTypeAndSetDefault(control, myDefault, myTypes, "kmeans list of controls: ") return(control) }
default.image.par <- function(data.range, var.range, legend=TRUE) { var.col <- try(colorspace::rainbow_hcl(12, c = 50, l = 70), silent=TRUE) if (is(var.col, "try-error")) { var.col <- try(RColorBrewer::brewer.pal(9, "Blues"), silent=TRUE) if (is(var.col, "try-error")) { if (RFoptions()$internal$warn_colour_palette) { RFoptions(warn_colour_palette = FALSE) message("Better install one of the packages 'colorspace' or 'RColorBrewer'. (This message appears only once per session.)") } data.col <- heat.colors(36) var.col <- cm.colors(36) } else { data.col <- RColorBrewer::brewer.pal(9, "Reds") } } else { data.col <- colorspace::heat_hcl(12, c. = c(80, 30), l = c(30, 90), power = c(1/5, 1.5)) } list(data=list(dot.name="col", default.col=data.col, pch=16, cex=1, range=data.range), var=list(dot.name="var.col", default.col=var.col, pch=16, cex=1, range=var.range), legend = legend, lower.leg = if (legend) 0.85 else 1, arrows = list(reduction = 1.5, nx.vectors = 20, leg.pos=c(1, 0.7)), text.col="blue" ) } my.arrows <- function(xy, z, r, thinning, col, nrow) { half <- as.integer(thinning / 2) thinned <- c(rep(FALSE, half), TRUE, rep(FALSE, thinning - half)) if (!missing(nrow) && !is.null(nrow)) { thinned <- as.vector(outer(rep(thinned, length = nrow), rep(thinned, length = nrow(xy) / nrow), "&")) } arrows(x0=xy[thinned, 1] - r/2*z[thinned,1], y0=xy[thinned, 2] - r/2*z[thinned,2], x1=xy[thinned, 1] + r/2*z[thinned,1], y1=xy[thinned, 2] + r/2*z[thinned,2], length=0.03, col=col) } prepareplotRFsp <- function(x, vdim, select, plot.var, data.range, var.range, MARGIN, n, n.slices, plot.legend, zlim, ...) { if (vdim == 1 && !identical(select, vdim)) stop("the given 'select.variables' do not match the data") timespacedim <- if (is(x, "RFspatialGridDataFrame")) length(x@grid@cellsize) else ncol(x@coords) if (!(length(MARGIN)==2)) stop("MARGIN must have length 2") if (!all(MARGIN %in% 1:timespacedim)) stop("chosen MARGINS are out of bounds") if (!missing(zlim)){ if (is.character(zlim)) stopifnot(zlim=="joint") mychk <- function(zlim) stopifnot((is.null(dim(zlim)) && length(zlim)==2) || (is.matrix(zlim) && nrow(zlim)==2)) if (is.numeric(zlim)) mychk(zlim) if (is.list(zlim)) { stopifnot(names(zlim) %in% c("data", "var")) lapply(zlim, mychk) } } [email protected]$coordunits [email protected]$varunits; graphics <- RFoptions()$graphics image.par <- default.image.par(data.range, var.range, legend=plot.legend) names.rep <- c(paste("realization", 1:(n-plot.var), sep=" "), "kriging variance") names.vdim <- if (!is.null(names(x@data)) && all(nchar(names(x@data))>0)) { if (is.list(select)) { u <- unlist(lapply(strsplit(names(x@data), ".n"), FUN=function(li) li[[1]])) lapply(select, function(indices) if (length(indices)==3) { if (FALSE) warning("first component interpreted as scalar, the other two as vector") paste(u[indices[1]], paste(u[indices[-1]], collapse="/"), sep=" and ") } else if (length(indices)>3) { stop("currently, only two-dimensional vectors can be plotted") } else paste(u[indices], collapse="/")) } else unlist(lapply(strsplit(names(x@data)[unlist(select)], ".n"), FUN=function(li) li[[1]])) } else { paste("variable", select) } names.coords <- if (.hasSlot(x, "coords")) dimnames(x@coords)[[2]] else names(x@[email protected]) dots <- mergeWithGlobal(list(...)) names.graphics <- names(graphics) for (i in 1:length(graphics)) { dots[[names.graphics[i]]] <- NULL } dotnames <- names(dots) if (bgInDots <- "bg" %in% dotnames) { bgdots <- dots$bg dots$bg <- NULL } lab <- xylabs(names.coords[MARGIN[1]], names.coords[MARGIN[2]], units=coordunits ) if (!("xlab" %in% dotnames)) dots$xlab <- lab$x if (!("ylab" %in% dotnames)) dots$ylab <- lab$y if (!("pch" %in% dotnames)) dots$pch=image.par$data$pch if (!("cex" %in% dotnames)) dots$cex=image.par$data$cex for (i in c("data", "var")) { if (i=="var" && !plot.var) next if (is.null(colour <- dots[[ image.par[[i]]$dot.name ]])) colour <- image.par[[i]]$default.col image.par[[i]]$col <- if (is.list(colour)) colour else list(colour) dots[[ image.par[[i]]$dot.name ]] <- NULL lencol <- length(colour) image.par[[i]]$range <- apply(image.par[[i]]$range, 2, function(x) { if (is.logical(all.equal(x[1], x[2]))) warning("range of data is a single value") stopifnot(all(is.finite(x))) r <- range(outer(x, 1 + 0.05 * c(-1,1), "*")) if (identical(r[1], r[2])) r[2] <- r[2]+1 return(r) }) if (!missing(zlim)) { idx <- unlist(lapply(as.list(select), FUN=function(x) if (length(x) == 2) NA else x[1])) idx <- idx[!is.na(idx)] if (is.character(zlim) && zlim=="joint") { image.par[[i]]$range[,idx] <- range(image.par[[i]]$range[,idx]) } else { zz <- if (is.list(zlim)) zlim[[i]] else zlim if (!is.null(zz)) image.par[[i]]$range[,idx] <- matrix(zz, nrow=2, ncol=length(idx)) } } image.par[[i]]$z.legend <- as.matrix(apply(image.par[[i]]$range, 2, function(x) seq(x[1], x[2], length=lencol))) image.par[[i]]$breaks <- as.matrix(apply(image.par[[i]]$range, 2, function(x) seq(x[1]-abs((x[2] - x[1])*1e-3), x[2]+abs((x[2]-x[1])*1e-3), len=lencol+1) )) } len.sel <- length(select) if (vdim>1) split.main <- c(n * n.slices, len.sel) else { if (n * n.slices > 1) split.main <- c(ceiling(n * n.slices/2), 2) else { split.main <- c(1,1) } } ArrangeDevice(graphics, figs=split.main) par(cex=dots$cex) if (bgInDots) par(bgdots) xlab.given <- 1 - as.integer("xlab" %in% dotnames && (is.null(dots$xlab) || dots$xlab=="")) mar <- c(1,1,0,0) image.par$data$mar.leg <- mar image.par$var$mar.leg <- mar + c(0,0,0,0) oma.top <- 2*plot.legend + if (is.null(dots$main)) 0 else 2 oma.left <- 2 * (1 + xlab.given) oma.bottom <- oma.left + 0*(plot.legend && plot.var) oma <- c(oma.bottom, oma.left, oma.top, xlab.given) + 0.2 figs <- c(len.sel, prod(split.main) / len.sel) legends <- scr <- scr.main <- scr.legends <- scr.leg <- NULL if (graphics$split_screen) { SCR <- scr <- split.screen(rbind( c(0,1, 0, if (plot.var) 1-2*(1-image.par$lower.leg) else image.par$lower.leg), if (plot.legend) c(0, 1, image.par$lower.leg, 1), if (plot.var && plot.legend) c(0,1,1-2*(1-image.par$lower.leg), image.par$lower.leg) )) scr.main <- matrix(nrow = figs[2], split.screen(split.main, scr[1]), byrow=TRUE) scr.legends <- integer(0) } else { if (any(split.main != 1)) { par(mfcol=split.main) } } if (plot.legend) { legends <- list(do = if (is.list(select)) sapply(select, length) != 2 else rep(TRUE, len.sel), units=coordunits) for (i in c("data", if (plot.var) "var")) { if (graphics$split_screen) { SCR <- SCR[-1] screen(SCR[1]) scr.leg <- split.screen(figs=c(1, len.sel)) scr.legends <- c(scr.legends, scr.leg) for (jx in 1:length(scr.leg)) { screen(scr.leg[jx]) par(oma=oma, mar=image.par[[i]]$mar.leg) } } col <- image.par[[i]]$col col <- sapply(1:len.sel, function(jx) col[[1+(jx-1) %% length(col)]], simplify=FALSE) if (!is.list(select) || length(select[[jx]]) != 2) { if (any(sapply(col, length) <= 1)) stop("number of colours is one -- please choose an appropriate colour palette. Maybe 'RFpar(col=NULL)' will help.") } legends[[i]] <- list(scr=scr.leg, col = col, z.legend = image.par[[i]]$z.legend) } } return(c(image.par, dots=list(dots), list(legends = legends, names.coords = names.coords, names.rep = names.rep, names.vdim = names.vdim, mar=mar, oma=oma, scr.main=scr.main, scr.legends=scr.legends, scr = scr, split.main=split.main, grPrintlevel = graphics$grPrintlevel))) } PlotTitle <- function(x, main) { p <- [email protected] par(mar=rep(0, 4), new=TRUE) plot(Inf, Inf, xlim=c(0,1), ylim=c(0,1), axes=FALSE) text(0, 0.5, labels=main, adj=1, xpd=NA, col="blue", cex=0.8) } RFplotSimulation <- function(x, y, MARGIN =c(1,2), MARGIN.slices =NULL, n.slices = if (is.null(MARGIN.slices)) 1 else 10, nmax=6, plot.variance = !is.null([email protected]$has.variance) && [email protected]$has.variance, select.variables, zlim, legend = TRUE, MARGIN.movie = NULL, file=NULL, speed=0.3, height.pixel=300, width.pixel=height.pixel, ..., plotmethod="image") { if (is(x, "RFdataFrame")) return(RFplotSimulation1D(x=x, y=y, nmax=nmax, plot.variance=plot.variance, legend=legend, ...)) if (legend && plotmethod == "contour") stop("no legend available for 'contour'") graphics <- RFoptions()$graphics x.grid <- is(x, "RFspatialGridDataFrame") do.slices <- !is.null(MARGIN.slices) do.movie <- !is.null(MARGIN.movie) if (length(MARGIN.slices) > 1) stop("MARGIN.slices must be a scalar.") if (length(MARGIN.movie) > 1) stop("MARGIN.movie must be a scalar.") if (!x.grid) { if (is(x, "RFspatialPointsDataFrame")) { if (do.slices || n.slices[length(n.slices)] != 1) stop("'MARGIN.slices' must be 'NULL' and 'n.slices' must be 1.") } else { stop("method only for objects of class 'RFspatialPointsDataFrame' and 'RFspatialGridDataFrame'") } } NEWMARGIN <- MARGIN has.variance <- !is.null([email protected]$has.variance) && [email protected]$has.variance if (!has.variance) plot.variance <- FALSE if (x.grid) { conventional <- RFspDataFrame2conventional(x) x.grid.vectors <- GridTopology2gridVectors(cbind(conventional$x,conventional$T)) timespacedim <- genuine.timespacedim <- length(x.grid.vectors) if (timespacedim!=length(x@grid@cellsize)) stop("sollte nicht auftauchen: programming error in plotRFspatialGridDataFrame, timespacedim wrong ... (AM)") if (do.slices){ if (!(MARGIN.slices <= timespacedim)) stop("chosen MARGIN.slices out of bounds") if (MARGIN.slices %in% MARGIN) stop("MARGIN.slices must be different from MARGIN") } if (length(n.slices)!=1 && length(n.slices)!=3) stop("n.slices must be an integer of length 1 or 3") data.arr <- RFspDataFrame2dataArray(x) vdim <- dim(data.arr)[timespacedim+1] n.orig <- dim(data.arr)[timespacedim+2] n.ohne.var <- n.orig - has.variance n <- min(n.ohne.var, nmax) + plot.variance dim_data <- dim(data.arr) if (timespacedim <= 3){ if (timespacedim == 3) dim(data.arr) <- c(dim_data[1:3], 1, dim_data[4:5]) else if (timespacedim==2) dim(data.arr) <- c(dim_data[1:2], 1, 1, dim_data[3:4]) else stop("dimension too small: dim=", timespacedim) timespacedim <- 4 } if (!all(MARGIN %in% 1:(length(dim_data) - 2))) stop("MARGIN out of range.") if (any(MARGIN.slices %in% MARGIN.movie)) stop("MARGIN.slices and MARGIN.movie are not disjoint.") if (length(MARGIN.slices) < 1) MARGIN.slices <- (1:(max(MARGIN, MARGIN.movie) + 1))[-c(MARGIN, MARGIN.movie)][1] if (length(MARGIN.movie) < 1) MARGIN.movie <- (1:(max(MARGIN, MARGIN.slices) + 1))[-c(MARGIN, MARGIN.slices)][1] dim_data <- dim(data.arr) vdimrep <- dim_data[(-1:0) + length(dim_data)] if (!all(c(MARGIN.movie, MARGIN.slices) %in% 1:(length(dim_data) - 2))) stop("MARGINs out of range.") xx <- x.grid.vectors[[MARGIN[1]]] xy <- x.grid.vectors[[MARGIN[2]]] mar.vec <- c(MARGIN, MARGIN.slices, MARGIN.movie) ind <- as.list(rep(1, length(dim_data) - 2)) ind[mar.vec] <- TRUE data.arr <- do.call("[", c(list(data.arr), ind, TRUE, TRUE, drop=FALSE)) dim(data.arr) <- c(dim_data[sort(mar.vec)], vdimrep) perm.tmp <- c(mar.vec, (-1:0) + length(dim_data)) data.arr <- aperm(data.arr, perm.tmp) dim_data <- dim(data.arr) NEWMARGIN <- 1:2 MARGIN.slices <- 3 MARGIN.movie <- 4 if (do.slices) { if (n != 1) { n <- 1 message("only first realization is shown") } if (plot.variance){ plot.variance <- FALSE message("plot.variance was set to FALSE") } mar.len <- dim_data[MARGIN.slices] if (n.slices[length(n.slices)] > mar.len) n.slices[length(n.slices)] <- mar.len slices.ind <- if (length(n.slices) == 1) seq(1, mar.len, length=n.slices) else seq(n.slices[1], n.slices[2], length=n.slices[3]) slices.ind <- unique(round(slices.ind)) slices.ind <- slices.ind[slices.ind >= 1 & slices.ind <= mar.len] } else { slices.ind <- 1 } n.slices <- length(slices.ind) data.idx <- 1 : (n.ohne.var*vdim) all.i <- as.matrix(expand.grid(1:n.slices, 1:n)[2:1]) coords <- as.matrix(expand.grid(xx, xy)) m.range <- if (do.movie) 1:dim_data[MARGIN.movie] else 1 } else { vdim <- [email protected]$vdim n <- min([email protected]$n, nmax) + plot.variance nc <- ncol(x@data) if (nc < n*vdim) if (n==1) vdim <- nc else if (vdim==1) n <- nc else { stop("ncol(x@data) does not match '[email protected]'; change '[email protected]'") } data.idx <- 1:([email protected]$n*vdim) all.i <- cbind(1:n, 1) genuine.timespacedim <- ncol(x@coords) coords <- x@coords[, MARGIN] m.range <- 1 } if (!(missing.y <- missing(y))) { if (do.slices) stop("'y' and 'MARGIN.slices' may not be given at the same time") if (is(y, 'RFspatialGridDataFrame')) { y.coords <- as(y, "RFspatialPointsDataFrame")@coords y.data <- y@data } else if (is(y, "matrix") || is(y, "data.frame")) { dc <- data.columns(data=y, xdim = dimensions(x), force=TRUE) y.coords <- y[, dc$is.x, drop=FALSE] y.data <- y[, dc$is.data, drop=FALSE] } else { y.coords <- y@coords y.data <- y@data } } data.range <- apply(as.matrix(1:vdim), 1, function(z) { d <- if (missing.y) x@data[z + vdim * 0:(n-plot.variance-1)] else { idx <- z + vdim * 0:(n-plot.variance-1) c( y.data[, idx[idx <= ncol(y.data)]]) } range(d,na.rm=TRUE) }) var.range <- if (plot.variance) sapply(x@data[-data.idx], range, na.rm=TRUE) else NULL if (missing(select.variables)) select.variables <- 1:vdim image.par <- prepareplotRFsp(x=x, vdim=vdim, select=select.variables, data.range = data.range, var.range=var.range, plot.var=plot.variance, MARGIN=NEWMARGIN, n=n, n.slices=n.slices, plot.legend=legend, zlim=zlim, ...) image.par$names.vdim <- add.units(image.par$names.vdim, [email protected]$varunits) legends <- image.par$legends if (x.grid) { nx.vectors <- min(length(xx), image.par$arrow$nx.vectors) thinning <- as.integer( (length(xx)-1) / nx.vectors) } else { nx.vectors <- min(nrow(coords), image.par$arrow$nx.vectors^2) thinning <- as.integer( (nrow(coords)-1) / nx.vectors^2) } current <- dev.cur() if (do.avi <- as.integer(do.movie && length(file) > 0)) { ans <- system("mencoder") if (ans != 0 && ans != 1) stop("'mencoder' needs to be installed first.") digits <- 1 + ceiling(log(max(m.range)) / log(10)) fn <- character(max(m.range)) } if (!graphics$split_screen) { if (graphics$always_open_device) par(oma=image.par$oma) else if (all(par()$oma == 0) && (any(par()$mfcol != 1) || any(par()$mfrow != 1))) stop("par()$oma has not been set; 'oma=rep(2,4)' is a good choice.") } for (m in m.range) { if (do.avi) { fn[m] <- paste(file, "__", formatC(m, width=digits, flag="0", format="d"), ".png", sep="") if (file.exists(fn[m])) stop(fn[m], "already exists."); png(height=height.pixel, width=width.pixel, filename=fn[m]) filedev <- dev.cur() par(mfcol=image.par$split.main, mar=c(.2, .2, .2, .2)) dev.set(current) } else filedev <- dev.cur() for (jx in 1:length(select.variables)) { j <- if (is.list(select.variables)) select.variables[[jx]] else select.variables[jx] for (ix in 1:nrow(all.i)) { i <- all.i[ix, ] dots <- dots.with.main.lab <- image.par$dots main <- dots$main dots$main <- NULL lab <- xylabs("", "", [email protected]$coordunits) dots$xlab <- lab$x dots$ylab <- lab$y if (do.plot.var <- (plot.variance && i[1]==n)){ k <- if (x.grid) n.orig else [email protected]$n + 1; dv <- "var" } else { k <- i[1] dv <- "data" } if (graphics$split_screen) { screen(image.par$scr.main[ix, jx]) par(oma=image.par$oma) } par(mar=image.par$mar) len.col <- length(image.par[[dv]]$col) col <- image.par[[dv]]$col[[ 1 + (j[1]-1) %% len.col]] breaks <- image.par[[dv]]$breaks[, j[1]] genuine.image <- (length(j) == 1 || length(j)==3) if (x.grid) { dots$type <- NULL dots$col <- if (genuine.image) col else par()$bg for (devices in 0:do.avi) { args <- c(dots, list(x=xx, y=xy, z=data.arr[,,i[2], m, j[1], k], zlim = image.par[[dv]]$range[, j[1]], axes = plotmethod == "persp")) plot.return <- do.call(plotmethod, args=args) dev.set(filedev) } dev.set(current) } else { idx <- if (n==1) j else if (vdim==1) k else (k-1)*vdim+j dots$col <- if (genuine.image) col[ cut(x@data[,idx[1]], breaks=breaks) ] else par()$bg for (devices in 0:do.avi) { do.call(graphics::plot, args=c(dots, list(x=coords[, 1], y=coords[, 2], axes=FALSE))) box() dev.set(filedev) } dev.set(current) } if (image.par$legend && ix == 1 && legends$do[jx]) { leg <- legends[[dv]]$z.legend[, j[1]] if (graphics$split_screen) { screen(legends[[dv]]$scr[jx]) lab <- xylabs("", "", units=legends$units) image(x=leg, y=c(0,1), z=matrix(ncol=2, rep(leg, 2)), axes=FALSE, xlab=lab$x, ylab=lab$y, col=legends[[dv]]$col[[jx]] ) axis(3, mgp=if (do.plot.var) c(3,0,0), hadj=if (do.plot.var) -0.5 else NA) box() screen(image.par$scr.main[ix, jx]) } else { my.legend(min(xx), max(xy), image.par[[dv]]$range[, j[1]], col=legends[[dv]]$col[[jx]], bg="white") } } for (devices in 0:do.avi) { if (n.slices > 1) legend("bottomright", bty="n", legend=paste(image.par$names.coords[MARGIN.slices], "=", x.grid.vectors[[MARGIN.slices]][slices.ind[i[2]]])) if (do.plot.arrows <- length(j) >= 2 && !do.plot.var) { jj <- if (length(j) == 3) j[-1] else jj <- j rx <- range(coords[, 1]) ry <- range(coords[, 2]) col.arrow <- if (length(image.par[["data"]]$col) >= jj[1] && length(image.par[["data"]]$col[[jj[1]]]) == 1) image.par[["data"]]$col[[jj[1]]] else "black" if (ix == 1) { factor <- image.par$arrow$reduction * sqrt(diff(rx) * diff(ry) / max(x@data[jj[1]]^2 + x@data[jj[2]]^2)) / nx.vectors } my.arrows(coords, x@data[jj], r = factor, thinning = thinning, col = col.arrow, nrow = if (x.grid) length(xx)) } if (!do.plot.var && !missing.y && (length(j)==1 || (length(j)==3))) { idx <- if (n==1) j else if (vdim==1) i[1] else (i[1]-1)*vdim+j if (ncol(y.data) < idx) idx <- 1 if (plotmethod == "persp") { xy <- trans3d(y.coords[, MARGIN[1]], y.coords[, MARGIN[2]], data[ , idx], pmat=plot.return) points(xy, pch=16, col="black") } else { col2 <- col[ cut(y.data[ , idx], breaks=breaks) ] dots2 <- dots dots2[c("type", "pch", "lty", "col", "bg", "cex", "lwd")] <- NULL addpoints <- function(pch, col, cex) { do.call(graphics::plot.xy, args=c(dots2, list(xy=xy.coords(y.coords[, MARGIN[1]], y.coords[, MARGIN[2]]), type="p", pch=pch, lty=1, col=col, bg=NA, cex=cex, lwd=1))) } if (plotmethod=="image") addpoints(15, "darkgray", dots$cex*2) addpoints(dots$pch, col2, dots$cex) } } if (ix==1 || ((image.par$split.main[1] != nrow(all.i)) && (ix <= image.par$split.main[2]))) { axis(1, outer=TRUE) } if (jx==1 && ((image.par$split.main[2] == length(select.variables)) || ((ix-1) %% image.par$split.main[2] == 0))) axis(2, outer=TRUE) if (all(i==1) && (image.par$grPrintlevel > 1 || vdim>1)) { mtext(text = image.par$names.vdim[jx], side=3, line=-1, col = image.par$text.col, cex=dots$cex) } if (n>1 && jx==1){ mtext(text = image.par$names.rep[ix], side=3, line=-2, cex=dots$cex) } dev.set(filedev) } dev.set(current) if (image.par$legend && ix == 1) { if (do.plot.arrows) { len <- max(pretty(diff(rx) / image.par$arrows$nx.vectors/2 /factor)) if (graphics$split_screen) { x.arrow <- cbind(mean(rx), image.par$arrows$leg.pos[1+genuine.image]) screen(image.par$scr.legends[jx]) do.call(graphics::plot, args=c(dots, list(x=Inf, y=Inf, xlim=rx, ylim=c(0,1), axes=FALSE))) colArrow <- col.arrow } else { dy <- diff(range(xy)) xl <- max(xx) - 0.05 * diff(range(xx)) yl <- max(xy) - 0.02 * dy points(rep(xl, 2), rep(yl-0.01 * dy, 2), pch=c(16, 1), cex=7, col=c("white", "black")) x.arrow <- cbind(xl, yl) colArrow <- "red" } my.arrows(x.arrow, cbind(len, 0), r = factor, thinning=0, col=colArrow) text(x.arrow, pos=1, labels = len, col=colArrow) } } } } dots.with.main.lab$type <- NULL dots.with.main.lab$zlab <- NULL do.call(graphics::title, args=list(main=dots.with.main.lab$main, outer=TRUE, line=image.par$oma[3]-1.5)) dots.with.main.lab$main <- NULL do.call(graphics::title, args=c(dots.with.main.lab, list(outer=TRUE, line=NA))) if (image.par$grPrintlevel > 0) { if (graphics$split_screen) { screen(image.par$scr[1 + image.par$legend], new=FALSE) PlotTitle(x, if (is.null(main)) "" else main) } } if (do.avi) { dev.off(filedev) dev.set(current) } } if (do.avi) { txt <- paste("mencoder -mf fps=30 -ffourcc DX50 -ovc lavc ", " -speed ", speed, " -lavcopts vcodec=mpeg4:vbitrate=9800:aspect=4/3:vhq:keyint=15", " -vf scale=720:576 -o ", file, ".avi mf://", file, "__*.png", sep="") system(txt) file.remove(fn) } scr <- image.par[c("scr.main", "scr.legends", "scr", "split.main")] if (graphics$split_screen && graphics$close_screen){ close.screen(unlist(scr)) scr <- NULL } return(invisible(scr)) } trafo_pointsdata <- function(x, dim) { if (isgrid <- is(x, "RFgridDataFrame")) { x <- grid2pts1D(x) } else if ((is(x, "matrix") || is(x, "data.frame")) && !missing(dim)) { dc <- data.columns(data=x, xdim = dim, force=TRUE) x <- list(coords=x[, dc$is.x, drop=FALSE], data=x[, dc$is.data, drop=FALSE]) } else { if (!is(x, "RFpointsDataFrame")) stop("method only for objects of class 'RFpointsDataFrame' and 'RFgridDataFrame'") } dummy <- dimnames(x@coords)[[2]][1] lab <- xylabs(if (is.null(dummy)) "" else dummy, "", [email protected]$coordunits) labdata <- names(x@data) colname <- colnames(x@data) if (isgrid) { return(list(coords=as.vector(x@coords), data=as.matrix(x@data), [email protected], lab=lab, labdata=labdata, colnames=colname)) } else { ord <- order(x@coords) return(list(coords=x@coords[ord, ], data=as.matrix(x@data)[ord, , drop=FALSE], [email protected], lab=lab, labdata=labdata, colnames=colname)) } } RFplotSimulation1D <- function(x, y, nmax=6, plot.variance=!is.null([email protected]$has.variance) && [email protected]$has.variance, legend=TRUE, ...) { stopifnot(!missing(x)) x <- trafo_pointsdata(x) nc <- ncol(x$data) if (!missing(y)) { y <- trafo_pointsdata(y, dimensions(dim)) y$data <- rep(y$data, length.out=nrow(y$data) * nc) dim(y$data) <- c(length(y$coords), nc) } has.variance <- !is.null(x$RFparams$has.variance) && x$RFparams$has.variance if (!has.variance) plot.variance <- FALSE n <- min(x$RFparams$n, nmax) + plot.variance vdim <- x$RFparams$vdim if (nc < n*vdim) { if (n==1) vdim <- nc else if (vdim==1) n <- nc else { stop("ncol(x@data) does not match '[email protected]'; change '[email protected]'") } } graphics <- RFoptions()$graphics ArrangeDevice(graphics, c(1, n)) dots <- mergeWithGlobal(list(...)) dotnames <- names(dots) if ("bg" %in% dotnames) { par(bg=dots$bg) dots$bg <- NULL } if (!("xlab" %in% dotnames)) dots$xlab <- x$lab$x if (!("type" %in% dotnames)) dots$type <- "l" make.small.mar <- ("xlab" %in% dotnames && is.null(dots$xlab) && is.null(dots$ylab)) if (!is.null(x$labdata) && all(nchar(x$labdata)>0)) names.vdim <- unlist(lapply(strsplit(x$labdata[1:vdim], ".n"), FUN=function(li) li[[1]])) else { names.vdim <- paste("variable", 1:vdim) x$labdata <- names.vdim } if (n>1){ ylab.vec <- c(paste("realization ", 1:(n-plot.variance), sep=""), if (plot.variance) "kriging variance") } else { ylab.vec <- if (vdim==1) x$colnames else "" } if ("ylab" %in% dotnames) { if (!is.null(dots$ylab)) ylab.vec[1:length(ylab.vec)] <- dots$ylab dots$ylab <- NULL } col <- 1:vdim if ("col" %in% dotnames) { if (!is.null(dots$col)) col[1:length(col)] <- dots$col dots$col <- NULL } if (graphics$split_screen) scr <- split.screen(c(n,1)) else { scr <- NULL par(mfrow=c(n, 1), mar=c(1, 1, 0.1, 0.1)) } for (i in 1:n){ if (graphics$split_screen) screen(i) if (make.small.mar) { if (graphics$split_screen) par(oma=c(3,0,1,1)+.1, mar=c(0,3,0,0)) else par(oma=rep(0,4), mar=c(3, 3, 1, 1), cex=0.6) } else par(oma=c(4,0,1,1)+.1, mar=c(0,4,0,0)) ylab <- ylab.vec[i] if (tmp.idx <- (plot.variance && i==n)){ i <- x$RFparams$n + plot.variance } do.call(graphics::matplot, args=c(dots, list( x=x$coords, y=x$data[ , vdim*(i-1)+(1:vdim)], xaxt="n", yaxt="n", ylab=ylab, col=col)) ) if (!missing(y)) { points(x=y$coords, y=y$data[ , vdim*(i-1)+(1:vdim)], pch=22, col="red") } axis(2) if (tmp.idx) i <- n if (i==1) { if ( (vdim > 1) && legend) { legend("topright", col=col, lty=1, legend = c(names.vdim)) } } else if (i==n) { axis(1, outer=n>1) title(xlab=dots$xlab, outer=TRUE) } else axis(1, labels=FALSE) } if (graphics$close_screen) { close.screen(scr) scr <- NULL } return(invisible(scr)) } errMsgNoPlotAvailable <- function(x, y) warning(paste("no plot method available for signature c(", class(x), ",", class(y), ")")) setMethod(f="plot", signature(x="RFgridDataFrame", y="missing"), definition=function(x, y, ...) RFplotSimulation1D(x, ...)) setMethod(f="plot", signature(x="RFpointsDataFrame", y="missing"), definition=function(x, y, ...) RFplotSimulation1D(x, ...)) setMethod(f="plot", signature(x="RFspatialGridDataFrame", y="missing"), definition=function(x, y, ...) RFplotSimulation(x, ...)) setMethod(f="plot", signature(x="RFspatialPointsDataFrame", y="missing"), definition=function(x, y, ...) RFplotSimulation(x, ...)) setMethod(f="plot", signature(x="RFgridDataFrame", y="matrix"), definition=function(x, y, ...) RFplotSimulation1D(x, y, ...)) setMethod(f="plot", signature(x="RFpointsDataFrame", y="matrix"), definition=function(x, y, ...) RFplotSimulation1D(x, y, ...)) setMethod(f="plot", signature(x="RFspatialGridDataFrame", y="matrix"), definition=function(x, y, ...) RFplotSimulation(x, y, ...)) setMethod(f="plot", signature(x="RFspatialPointsDataFrame", y="matrix"), definition=function(x, y, ...) RFplotSimulation(x, y, ...)) setMethod(f="plot", signature(x="RFgridDataFrame", y="data.frame"), definition=function(x, y, ...) RFplotSimulation1D(x, y, ...)) setMethod(f="plot", signature(x="RFpointsDataFrame", y="data.frame"), definition=function(x, y, ...) RFplotSimulation1D(x, y, ...)) setMethod(f="plot", signature(x="RFspatialGridDataFrame", y="data.frame"), definition=function(x, y, ...) RFplotSimulation(x, y, ...)) setMethod(f="plot", signature(x="RFspatialPointsDataFrame", y="data.frame"), definition=function(x, y, ...) RFplotSimulation(x, y, ...)) setMethod(f="plot", signature(x="RFgridDataFrame", y="RFgridDataFrame"), definition=function(x, y, ...) RFplotSimulation1D(x, y, ...)) setMethod(f="plot", signature(x="RFgridDataFrame", y="RFpointsDataFrame"), definition=function(x, y, ...) RFplotSimulation1D(x, y, ...)) setMethod(f="plot", signature(x="RFpointsDataFrame", y="RFgridDataFrame"), definition=function(x, y, ...) RFplotSimulation1D(x, y, ...)) setMethod(f="plot", signature(x="RFpointsDataFrame", y="RFpointsDataFrame"), definition=function(x, y, ...) RFplotSimulation1D(x, y, ...)) setMethod(f="plot", signature(x="RFspatialGridDataFrame", y="RFspatialGridDataFrame"), definition=function(x, y, ...) RFplotSimulation(x, y, ...)) setMethod(f="plot", signature(x="RFspatialGridDataFrame", y="RFspatialPointsDataFrame"), definition=function(x, y, ...) RFplotSimulation(x, y, ...)) setMethod(f="plot", signature(x="RFspatialPointsDataFrame", y="RFspatialGridDataFrame"), definition=function(x, y, ...) { errMsgNoPlotAvailable(x, y) return(invisible(NULL)) }) setMethod(f="plot", signature(x="RFspatialPointsDataFrame", y="RFspatialPointsDataFrame"), definition=function(x, y, ...) RFplotSimulation(x, y, ...)) setMethod(f="persp", signature(x="RFspatialGridDataFrame"), definition=function(x, ..., zlab="") RFplotSimulation(x, ..., zlab=zlab, plotmethod="persp")) contour.RFspatialGridDataFrame <- function(x, ...) RFplotSimulation(x, ..., plotmethod="contour")
knitr::opts_chunk$set( collapse = TRUE, comment = " ) library(FuzzySTs) mat <- matrix(c(1,2,2,2,2,1),ncol=1) MF111 <- TrapezoidalFuzzyNumber(0,1,1,2) MF112 <- TrapezoidalFuzzyNumber(1,2,2,3) PA11 <- c(1,2) data.fuzzified <- FUZZ(mat,mi=1,si=1,PA=PA11) emp.dist <- boot.mean.ml(data.fuzzified, algorithm = "algo1", distribution = "normal", sig = 0.05, nsim = 5, sigma = 1) (eta.boot <- quantile(emp.dist, probs = 95/100)) data <- matrix(c(1,2,3,2,2,1,1,3,1,2),ncol=1) MF111 <- TrapezoidalFuzzyNumber(0,1,1,2) MF112 <- TrapezoidalFuzzyNumber(1,2,2,3) MF113 <- TrapezoidalFuzzyNumber(2,3,3,4) PA11 <- c(1,2,3) data.fuzzified <- FUZZ(data,mi=1,si=1,PA=PA11) Fmean <- Fuzzy.sample.mean(data.fuzzified) emp.dist <- boot.mean.ml(data.fuzzified, algorithm = "algo1", distribution = "normal", sig = 0.05, nsim = 5, sigma = 0.79) coef.boot <- quantile(emp.dist, probs = 95/100) head(fci.ml.boot(data.fuzzified, t = Fmean, distribution = "normal", sig= 0.05, sigma = 0.62, coef.boot = coef.boot)) H0 <- alphacut(TriangularFuzzyNumber(2.9,3,3.1), seq(0,1, 0.01)) H1 <- alphacut(TriangularFuzzyNumber(3,3,5), seq(0,1,0.01)) t <- alphacut(TriangularFuzzyNumber(0.8,1.80,2.80), seq(0,1,0.01)) res <- Fuzzy.decisions(type = 0, H0, H1, t = t, s.d = 0.79, n = 10, sig = 0.05, distribution = "normal", distance.type = "GSGD") res$RH0 res$DRH0 res$D.RH0 res$D.DRH0 data <- matrix(c(1,2,3,2,2,1,1,3,1,2),ncol=1) MF111 <- TrapezoidalFuzzyNumber(0,1,1,2) MF112 <- TrapezoidalFuzzyNumber(1,2,2,3) MF113 <- TrapezoidalFuzzyNumber(2,3,3,4) PA11 <- c(1,2,3) data.fuzzified <- FUZZ(data,mi=1,si=1,PA=PA11) H0 <- alphacut(TriangularFuzzyNumber(2.9,3,3.1), seq(0,1, 0.01)) H1 <- alphacut(TriangularFuzzyNumber(3,3,5), seq(0,1,0.01)) t <- alphacut(TriangularFuzzyNumber(0.8,1.80,2.80), seq(0,1,0.01)) emp.dist <- boot.mean.ml(data.fuzzified, algorithm = "algo1", distribution = "normal", sig = 0.05, nsim = 5, sigma = 0.79) coef.boot <- quantile(emp.dist, probs = 95/100) res <- Fuzzy.decisions.ML(data.fuzzified, H0, H1, t = t, coef.boot = coef.boot, sigma = 0.79, sig = 0.05, distribution = "normal", distance.type = "GSGD") res$RH0 res$DRH0 res$D.RH0 res$D.DRH0 H0 <- TriangularFuzzyNumber(2.9,3,3.1) H1 <- TriangularFuzzyNumber(3,3,5) res <- Fuzzy.CI.test(type = 0, H0, H1, t = TriangularFuzzyNumber(0.8,1.80,2.80), s.d = 0.79, n = 10, sig = 0.05, distribution = "normal", distance.type="GSGD") res$decision res$RH0 res$DRH0 res$D.RH0 res$D.DRH0 data <- matrix(c(1,2,3,2,2,1,1,3,1,2),ncol=1) MF111 <- TrapezoidalFuzzyNumber(0,1,1,2) MF112 <- TrapezoidalFuzzyNumber(1,2,2,3) MF113 <- TrapezoidalFuzzyNumber(2,3,3,4) PA11 <- c(1,2,3) data.fuzzified <- FUZZ(data,mi=1,si=1,PA=PA11) Fmean <- Fuzzy.sample.mean(data.fuzzified) H0 <- TriangularFuzzyNumber(2.2,2.5,3) H1 <- TriangularFuzzyNumber(2.5,2.5,5) emp.dist <- boot.mean.ml(data.fuzzified, algorithm = "algo1", distribution = "normal", sig= 0.05, nsim = 5, sigma = 0.7888) coef.boot <- quantile(emp.dist, probs = 95/100) res <- Fuzzy.CI.ML.test(data.fuzzified, H0, H1, t = Fmean, sigma=0.7888, coef.boot = coef.boot, sig=0.05, distribution="normal", distance.type="GSGD") res$RH0 res$DRH0 res$decision H0 <- TriangularFuzzyNumber(2.2,2.5,3) H1 <- TriangularFuzzyNumber(2.5,2.5,5) Fuzzy.p.value(type=1, H0, H1, t=TriangularFuzzyNumber(0.8,1.8,2.8), s.d=0.7888, n=10, sig=0.05, distribution="normal", distance.type="GSGD")
require('openxlsx') wb <- createWorkbook() addWorksheet(wb, "Sheet 1") writeData(wb, 1, head(iris)) addStyle(wb, sheet = 1, style = createStyle(fgFill = "yellow", textDecoration = "bold"), rows = 1:2, cols = 1:5, gridExpand = TRUE, stack = TRUE) addStyle(wb, sheet = 1, style = createStyle(fgFill = "red", textDecoration = "italic"), rows = 1, cols = 1:5, gridExpand = TRUE, stack = TRUE) addStyle(wb, sheet = 1, style = createStyle(fgFill = "blue"), rows = 5, cols = 1:5, gridExpand = TRUE, stack = TRUE) addStyle(wb, sheet = 1, style = createStyle(border = "topbottomleftright", textDecoration = "underline"), rows = 2:3, cols = c(1, 5), gridExpand = TRUE, stack = TRUE) addStyle(wb, sheet = 1, style = createStyle(border = "top", borderColour = "blue"), rows = 1:3, cols = 1, gridExpand = TRUE, stack = TRUE) openXL(wb) wb$addStyle addWorksheet(wb, "Sheet 2") writeData(wb, 2, matrix("abc", nrow = 4, ncol = 5)) addStyle(wb, 2, createStyle(halign = "center", border = "TopBottomLeftRight"), 1:5, 1:5, gridExpand = TRUE) addStyle(wb, 2, createStyle(textDecoration = "bold", fgFill = "salmon"), 2:4, 2:4,gridExpand = F, stack = TRUE) addWorksheet(wb, "Sheet 3") writeData(wb, 3, matrix("abc", nrow = 4, ncol = 5)) addStyle(wb, 3, createStyle(halign = "center", border = "TopBottomLeftRight"), 1:5, 1:5, gridExpand = TRUE) addStyle(wb, 3, createStyle(textDecoration = "bold", fgFill = "salmon"), 2:4, 2:4,gridExpand = F, stack = TRUE) openXL(wb) wb <- createWorkbook() addWorksheet(wb, "Sheet 1") writeData(wb, 1, head(iris)) addStyle(wb, sheet = 1, style = createStyle(fgFill = "red", textDecoration = "italic"), rows = c(2, 3, 4), cols = 2:5, gridExpand = TRUE, stack = TRUE) addStyle(wb, sheet = 1, style = createStyle(fgFill = "yellow", textDecoration = "bold"), rows = c(1,2,3,4,5,5,5,5,5), cols = c(1,1,1,1,1,2,3,4,5), gridExpand = FALSE, stack = TRUE) addStyle(wb, sheet = 1, style = createStyle(border = "topbottomleftright", textDecoration = "underline"), rows = 1:3, cols = c(1, 5), gridExpand = TRUE, stack = TRUE) addStyle(wb, sheet = 1, style = createStyle(border = "top", borderColour = "blue"), rows = 1:3, cols = 1, gridExpand = TRUE, stack = TRUE) addStyle(wb, sheet = 1, style = createStyle(border = "topbottomleftright"), rows = 1:4, cols = c(3,3,3,3)) addStyle(wb, sheet = 1, style = createStyle(border = "bottom", borderColour = "red"), rows = 2:10, cols = 3, gridExpand = TRUE, stack = TRUE) addWorksheet(wb, "Sheet 2") writeData(wb, 2, matrix("abc", nrow = 4, ncol = 5)) addStyle(wb, 2, createStyle(halign = "center", border = "TopBottomLeftRight"), 1:5, 1:5, gridExpand = TRUE) addStyle(wb, 2, createStyle(textDecoration = "bold", fgFill = "salmon"), 2:4, 2:4,gridExpand = F, stack = TRUE) addWorksheet(wb, "Sheet 3") writeData(wb, 3, matrix("abc", nrow = 4, ncol = 5)) addStyle(wb, 3, createStyle(halign = "center", border = "TopBottomLeftRight"), 1:5, 1:5, gridExpand = TRUE) addStyle(wb, 3, createStyle(textDecoration = "bold", fgFill = "salmon"), 2:4, 2:4,gridExpand = F, stack = TRUE) openXL(wb)
library("arules") library("testthat") context("tidLists") data <- list( c("a","b","c"), c("a","b"), c("a","b","d"), c("b","e"), c("a","d"), c("d","e"), c("d","f"), c("a","b","d","e","f","g") ) names(data) <- paste("Tr",c(1:8), sep = "") trans <- as(data, "transactions") tl <- (as(trans,"tidLists")) expect_identical(dim(tl), rev(dim(trans))) expect_identical(length(tl), nitems(trans)) expect_identical(transactionInfo(tl), transactionInfo(trans)) expect_identical(length(as(tl, "list")), nitems(trans)) expect_identical(as(tl, "matrix"), t(as(trans, "matrix"))) expect_identical(as(tl, "transactions"), trans) expect_identical(tl[2:3,3:4], as(trans[3:4,2:3], "tidLists")) expect_identical(size(tl), unname(sapply(as(tl, "list"), length))) transactionInfo(tl) <- cbind(transactionInfo(tl), additional = 1)
context("verifyName") tn_good <- c('myFunctionName', 'myOtherFunctionName') tn_bad <- c('MyClassName', 'myParameterName_s_1') tn_error <- c(56, NA) test_that("verifyName", { myt <- function(aName_s_1) { expect_true(verifyName(!!aName_s_1)) } myf <- function(aName_s_1) { expect_false(verifyName(!!aName_s_1)) } mye <- function(aName_s_1) { expect_error(verifyName(!!aName_s_1)) } sapply(tn_good, myt) sapply(tn_bad, myf) sapply(tn_error, mye) expect_error(verifyName(logical(0))) expect_error(verifyName(NA_integer_)) expect_error(verifyName(NA_character_)) expect_error(verifyName(' \n \t ')) })
library(blink) library(plyr) library(klsh) data(RLdata500) head(RLdata500) data.500 <- RLdata500[-c(2,4)] head(data.500) set.seed(1234) klsh.blocks <- klsh(data.500, p=100, num.blocks=5, k=2) confusion.from.blocking(klsh.blocks, true_ids = identity.RLdata500) confusion.from.blocking(klsh.blocks, recall.only=TRUE, true_ids = identity.RLdata500) reduction.ratio.from.blocking(klsh.blocks) twohundred_blocks_for_2e4_recs <- klsh(p=100,data.500, num.blocks=200,k=4) onehundred_blocks_for_2e4_recs <- klsh(p=100,data.500, num.blocks=100,k=4) fifty_blocks_for_2e4_recs <- klsh(p=100,data.500, num.blocks=50,k=4) twentyfive_blocks_for_2e4_recs <- klsh(p=100,data.500, num.blocks=25,k=4) ten_blocks_for_2e4_recs <- klsh(p=100,data.500, num.blocks=10,k=4) five_blocks_for_2e4_recs <- klsh(p=100,data.500, num.blocks=5,k=4) three_blocks_for_2e4_recs <- klsh(p=100,data.500, num.blocks=3,k=4) blockings_k4 <- list( twohundred_blocks_for_2e4_recs, onehundred_blocks_for_2e4_recs, fifty_blocks_for_2e4_recs, twentyfive_blocks_for_2e4_recs, ten_blocks_for_2e4_recs, five_blocks_for_2e4_recs, three_blocks_for_2e4_recs) confusions_k4 <- sapply(blockings_k4, confusion.from.blocking, recall.only=TRUE, true_ids = identity.RLdata500) reduction.ratio.from.blocking_k4 <- sapply(blockings_k4, reduction.ratio.from.blocking) twohundred_blocks_for_2e4_recs_3 <- klsh(p=100,data.500, num.blocks=200,k=3) onehundred_blocks_for_2e4_recs_3 <- klsh(p=100,data.500, num.blocks=100,k=3) fifty_blocks_for_2e4_recs_3 <- klsh(p=100,data.500, num.blocks=50,k=3) twentyfive_blocks_for_2e4_recs_3 <- klsh(p=100,data.500, num.blocks=25,k=3) ten_blocks_for_2e4_recs_3 <- klsh(p=100,data.500, num.blocks=10,k=3) five_blocks_for_2e4_recs_3 <- klsh(p=100,data.500, num.blocks=5,k=3) three_blocks_for_2e4_recs_3 <- klsh(p=100,data.500, num.blocks=3,k=3) blockings_k3 <- list( twohundred_blocks_for_2e4_recs_3, onehundred_blocks_for_2e4_recs_3, fifty_blocks_for_2e4_recs_3, twentyfive_blocks_for_2e4_recs_3, ten_blocks_for_2e4_recs_3, five_blocks_for_2e4_recs_3, three_blocks_for_2e4_recs_3) confusions_k3 <- sapply(blockings_k3, confusion.from.blocking, recall.only=TRUE, true_ids = identity.RLdata500) reduction.ratio.from.blocking_k3 <- sapply(blockings_k3, reduction.ratio.from.blocking) twohundred_blocks_for_2e4_recs_2 <- klsh(p=100,data.500, num.blocks=200,k=2) onehundred_blocks_for_2e4_recs_2 <- klsh(p=100,data.500, num.blocks=100,k=2) fifty_blocks_for_2e4_recs_2 <- klsh(p=100,data.500, num.blocks=50,k=2) twentyfive_blocks_for_2e4_recs_2 <- klsh(p=100,data.500, num.blocks=25,k=2) ten_blocks_for_2e4_recs_2 <- klsh(p=100,data.500, num.blocks=10,k=2) five_blocks_for_2e4_recs_2 <- klsh(p=100,data.500, num.blocks=5,k=2) three_blocks_for_2e4_recs_2 <- klsh(p=100,data.500, num.blocks=3,k=2) blockings_k2 <- list( twohundred_blocks_for_2e4_recs_2, onehundred_blocks_for_2e4_recs_2, fifty_blocks_for_2e4_recs_2, twentyfive_blocks_for_2e4_recs_2, ten_blocks_for_2e4_recs_2, five_blocks_for_2e4_recs_2, three_blocks_for_2e4_recs_2) confusions_k2 <- sapply(blockings_k2, confusion.from.blocking, recall.only=TRUE, true_ids = identity.RLdata500) reduction.ratio.from.blocking_k2 <- sapply(blockings_k2, reduction.ratio.from.blocking) twohundred_blocks_for_2e4_recs_1 <- klsh(p=100,data.500, num.blocks=200,k=1) onehundred_blocks_for_2e4_recs_1 <- klsh(p=100,data.500, num.blocks=100,k=1) fifty_blocks_for_2e4_recs_1 <- klsh(p=100,data.500, num.blocks=50,k=1) twentyfive_blocks_for_2e4_recs_1 <- klsh(p=100,data.500, num.blocks=25,k=1) ten_blocks_for_2e4_recs_1 <- klsh(p=100,data.500, num.blocks=10,k=1) five_blocks_for_2e4_recs_1 <- klsh(p=100,data.500, num.blocks=5,k=1) three_blocks_for_2e4_recs_1 <- klsh(p=100,data.500, num.blocks=3,k=1) blockings_k1 <- list( twohundred_blocks_for_2e4_recs_1, onehundred_blocks_for_2e4_recs_1, fifty_blocks_for_2e4_recs_1, twentyfive_blocks_for_2e4_recs_1, ten_blocks_for_2e4_recs_1, five_blocks_for_2e4_recs_1, three_blocks_for_2e4_recs_1) confusions_k1 <- sapply(blockings_k1, confusion.from.blocking, recall.only=TRUE, true_ids = identity.RLdata500) reduction.ratio.from.blocking_k1 <- sapply(blockings_k1, reduction.ratio.from.blocking) library(ggplot2) plot_dat <- rbind( data.frame(k = "4", block_length = unlist(lapply(blockings_k4, length)), recall = confusions_k4, reduction_ratio = reduction.ratio.from.blocking_k4), data.frame(k = "3", block_length = unlist(lapply(blockings_k3, length)), recall = confusions_k3, reduction_ratio = reduction.ratio.from.blocking_k3), data.frame(k = "2", block_length = unlist(lapply(blockings_k2, length)), recall = confusions_k2, reduction_ratio = reduction.ratio.from.blocking_k2), data.frame(k = "1", block_length = unlist(lapply(blockings_k1, length)), recall = confusions_k1, reduction_ratio = reduction.ratio.from.blocking_k1) ) ggplot(plot_dat) + geom_point(aes(block_length, recall, colour = k)) + geom_line(aes(block_length, recall, colour = k, group = k)) + xlab("Total Number of Blocks") + ylab("Recall") + theme_bw(base_family = "serif") + ylim(c(0.4, 1)) ggplot(plot_dat) + geom_point(aes(block_length, reduction_ratio, colour = k)) + geom_line(aes(block_length, reduction_ratio, colour = k, group = k)) + xlab("Total Number of Blocks") + ylab("Reduction Ratio") + theme_bw(base_family = "serif")
start_app() on.exit(stop_app(), add = TRUE) test_that("simplest", { expect_snapshot({ for (n in 0:2) cli_text("{n} package{?s}") for (n in 0:2) print(pluralize("{n} package{?s}")) }) }) test_that("irregular", { expect_snapshot({ for (n in 0:2) cli_text("{n} dictionar{?y/ies}") for (n in 0:2) print(pluralize("{n} dictionar{?y/ies}")) }) }) test_that("multiple substitutions", { expect_snapshot({ for (n in 0:2) cli_text("{n} package{?s} {?is/are} ...") for (n in 0:2) print(pluralize("{n} package{?s} {?is/are} ...")) }) }) test_that("multiple quantities", { expect_snapshot({ for (m in 0:2) for (n in 0:2) cli_text("{m} package{?s} and {n} folder{?s}") for (m in 0:2) for (n in 0:2) print(pluralize("{m} package{?s} and {n} folder{?s}")) }) }) test_that("no()", { expect_snapshot({ for (n in 0:2) cli_text("{no(n)} package{?s}") for (n in 0:2) print(pluralize("{no(n)} package{?s}")) }) }) test_that("set qty() explicitly", { expect_snapshot({ for (n in 0:2) cli_text("{qty(n)}There {?is/are} {n} package{?s}") for (n in 0:2) print(pluralize("{qty(n)}There {?is/are} {n} package{?s}")) }) }) test_that("collapsing vectors", { expect_snapshot({ pkgs <- function(n) glue::glue("pkg{seq_len(n)}") for (n in 1:3) cli_text("The {pkgs(n)} package{?s}") for (n in 1:3) print(pluralize("The {pkgs(n)} package{?s}")) }) }) test_that("pluralization and style", { expect_snapshot({ special_style <- list(span.foo = list(before = "<", after = ">")) cli_div(theme = special_style) for (n in 0:2) cli_text("{n} {.foo package{?s}}") }) expect_snapshot({ pkgs <- function(n) glue::glue("pkg{seq_len(n)}") for (n in 1:3) cli_text("The {.foo {pkgs(n)}} package{?s}") }) }) test_that("post-processing", { expect_snapshot({ for (n in 0:2) cli_text("Package{?s}: {n}") }) expect_snapshot({ pkgs <- function(n) glue::glue("pkg{seq_len(n)}") for (n in 1:2) cli_text("Package{?s}: {pkgs(n)}") for (n in 1:2) print(pluralize("Package{?s}: {pkgs(n)}")) }) }) test_that("post-processing errors", { expect_error( cli_text("package{?s}"), "Cannot pluralize without a quantity" ) expect_error( pluralize("package{?s}"), "Cannot pluralize without a quantity" ) expect_error( cli_text("package{?s} {5} {10}"), "Multiple quantities for pluralization" ) expect_error( pluralize("package{?s} {5} {10}"), "Multiple quantities for pluralization" ) }) test_that("issue 158", { expect_snapshot({ print(pluralize("{0} word{?A/B/}")) print(pluralize("{1} word{?A/B/}")) print(pluralize("{9} word{?A/B/}")) }) })
mdply <- function(.data, .fun = NULL, ..., .expand = TRUE, .progress = "none", .inform = FALSE, .parallel = FALSE, .paropts = NULL) { if (is.matrix(.data) & !is.list(.data)) .data <- .matrix_to_df(.data) f <- splat(.fun) adply(.data = .data, .margins = 1, .fun = f, ..., .expand = .expand, .progress = .progress, .inform = .inform, .parallel = .parallel, .paropts = .paropts) }
evalMarkovChain <- function(x, conf){ regionData <- conf$regionData N <- conf$N days <- nrow(conf$regionData) p <- x[1] beta <- x[2] gamma <- x[3] CFR <- x[4] param <- c(p, beta, gamma) message("evalMarkovChain: p,beta,gamma,CFR: ", p, ", ", beta, ", ", gamma, ", ", CFR) m <- modelMarkovChain(x = param, days = days, N = N) message("evalMarkovChain: last(I), last(F) in 1e3: ", round( ( (m$X2 + m$X3)[days] )/1000), ", ", round( ( (m$X3 * CFR)[days] )/1000) ) realI <- regionData$confirmed realF <- regionData$fatalities hatI <- m$X2 + m$X3 hatF <- m$X3 * CFR mseI <- mean((realI - hatI) ^ 2) mseF <- mean((realF - hatF) ^ 2) if (is.nan(mseI)) mseI <- Inf if (is.nan(mseF)) mseF <- Inf y <- sqrt(mseI + mseF) message("evalMarkovChain / 1e6: y: ", (y * 1e-6)) return(y) } funMarkovChain <- function (x, conf) { matrix(apply(x, 1, evalMarkovChain, conf)) } tuneRegionModel <- function(regionData, pops=NULL, lower = NULL, upper = NULL, control=list()){ regionName <- attr(regionData, "regionName") regionPopulation <- attr(regionData, "regionPopulation") a <- c(0.1, 0.001, 0.001, 0.001) b <- c(0.7, 0.1, 0.1, 0.5) if(!is.null(lower)) a <- lower if(!is.null(upper)) b <- upper con <- spotControl(length(a)) con[names(control)] <- control control<-con conf <- list(regionData=regionData, N = regionPopulation) res <- spot(x=NULL, fun = funMarkovChain, lower=a, upper=b, control=control, conf) attr(res, "regionName") <- regionName attr(res, "regionPopulation") <- regionPopulation return(res) } parseTunedRegionModel <- function(xList){ models <- data.frame() pops <- list() y <- list() for (i in 1:length(xList)){ res <- xList[[i]] rn <- attr(res, "regionName") regionPopulation <- attr(res, "regionPopulation") pops[[rn]] <- res$x y[[rn]] <- res$y best <- res$xbest cost <- res$ybest r <- data.frame(p = best[1] , beta = best[2] , gamma = best[3] , CFR = best[4] , cost = cost , region = rn , regionPopulation = regionPopulation , row.names = NULL) models <- rbind(models, r) } list(models = models, pops = pops, y = y) } generateMCPrediction <- function(testData, models, startSimulation="2020-01-22", write = FALSE){ forecast <- data.frame() regionList <- levels(testData$Region) start <- as.Date(startSimulation) startTest <- min(testData$Date) endTest <- max(testData$Date) days <- 1 + as.integer(endTest - start) k <- as.double(endTest - startTest) for (i in 1:length(regionList)){ regionName <- regionList[i] params <- models[models[,"region"] == regionName,] N <- params$regionPopulation p <- params$p beta <- params$beta gamma <- params$gamma CFR <- params$CFR x <- c(p, beta, gamma, CFR) m <- modelMarkovChain(x = x , days = days, N = N) r <- data.frame(ForecastId = (1 + days*(i-1)):(days*i), ConfirmedCases = round(m$X2 + m$X3), Fatalities = round(CFR *m$X3)) r <- r[(days-k):days, ] r$ForecastId <- 1:(k+1) forecast <- rbind(forecast, r) } if (write){ write.csv(forecast, file = "submit.csv", row.names=FALSE, quote=FALSE) } return(forecast) }
context("Tests using Rosenbrock function") test_that("Rosenbrock", { testthat::skip_on_cran() f.rosen <- function(V) { N <- length(V)/2 x <- V[seq(1,2*N-1,by=2)] y <- V[seq(2,2*N,by=2)] return(sum(100*(x^2-y)^2+(x-1)^2)) } df.rosen <- function(V) { N <- length(V)/2 x <- V[seq(1,2*N-1,by=2)] y <- V[seq(2,2*N,by=2)] t <- x^2-y dxi <- 400*t*x+2*(x-1) dyi <- -200*t return(as.vector(rbind(dxi,dyi))) } hess.rosen <- function(V) { N <- length(V)/2 x <- V[seq(1,2*N-1,by=2)] y <- V[seq(2,2*N,by=2)] d0 <- rep(200,N*2) d0[seq(1,(2*N-1),by=2)] <- 1200*x^2-400*y+2 d1 <- rep(0,2*N-1) d1[seq(1,(2*N-1),by=2)] <- -400*x H <- bandSparse(2*N, k=c(-1,0,1), diagonals=list(d1,d0,d1), symmetric=FALSE, repr="C") return(drop0(H)) } set.seed(123) N <- 3 start <- as.vector(rnorm(2*N,-1,3)) m <- list(list(hs=hess.rosen, method="Sparse", precond=0), list(hs=NULL, method="BFGS", precond=0), list(hs=NULL, method="SR1", precond=0), list(hs=hess.rosen, method="Sparse", precond=1) ) for (meth in m) { if (!(Sys.info()[['sysname']] == 'sunos' & meth$method %in% c('BFGS', 'SR1'))) { opt0 <- trust.optim(start, fn=f.rosen, gr=df.rosen, hs=meth$hs, method=meth$method, control=list( preconditioner=meth$precond, report.freq=5L, maxit=5000L, report.level=0, stop.trust.radius=1e-9, prec=1e-6 ) ) norm_gr <- sqrt(sum(opt0$gradient ^ 2)) expect_equal(norm_gr, 0, tolerance=.0005) expect_match(opt0$status, "Success") expect_match(opt0$method, meth$method) } } })
MPhiFFT <- setClass( "MPhiFFT", slots = c( method= "character", d0 = "vector", firstColumn = "vector", firstRow = "vector", fftColumn = "vector", n = "numeric", zero = "numeric" ), contains="MPhiAbstract" ) setMethod( f="initialize", signature=c("MPhiFFT"), definition=function(.Object, firstColumn, firstRow, ...){ callNextMethod() args <- list(...) if(is.null(args$method)) .Object@method="FFT" else .Object@method=args$method .Object@firstColumn <- firstColumn .Object@firstRow <- firstRow if(length(.Object@firstColumn) != length(.Object@firstRow)) { stop("Error: first column and first row must have the same size") } if(.Object@firstColumn[1] != .Object@firstRow[1]) { stop("Error: first row and first column must have the same first element") } .Object@n <- length(firstColumn) .Object@d0 <- sapply(1:length(firstColumn), function(irow) {return(1/sum(c(firstColumn[irow:2], firstRow[1:(.Object@n+1-irow)])))}) .Object@d0[1] <- 1/sum(firstRow) N2 = 2 * .Object@n size=2^ceiling(log(N2)/log(2)) circulantColumn=c(firstColumn, rep(0, size + 1 - 2*.Object@n), firstRow[length(firstRow):2]) .Object@zero <- rep(0, size - .Object@n) .Object@fftColumn <- fft(circulantColumn) return(.Object) } ) setMethod( f="applyV", signature=c("MPhiFFT","vector"), definition=function(object,x){ return(object@d0 * Re(ifft_panda(object@fftColumn * fft(c(x,object@zero)))[1:object@n])) } ) setMethod( f="expV", signature=c("MPhiFFT","vector","vector","vector"), definition=function(object,diag1,diag2,x){ mask_diag1 = diag1 < -20 x[mask_diag1] = 0 expGv=x Gnv=x i=1 epsnormv = 1e-10 * sum(abs(x)) MATVECT <- selectMethod(applyV, c("MPhiFFT","vector")) while (sum(abs(Gnv)) > epsnormv){ Gnv = (diag1*Gnv + diag2*MATVECT(object,Gnv)) / i Gnv[mask_diag1] = 0 expGv = expGv + Gnv i=i+1 } expGv[mask_diag1] = 2e-9 return(expGv) } )
library(testthat) library(here) library(flashCard) context("test flashCard is functioning properly") testthat::test_that( "flashCard() produces expected", { df1 <- data.frame( front = c("Title front","contentfront", "content second line"), back =c("Title back","content back", "second line") ) result <- flashCard::flashCard(df1, elementId = "card", front_text_color = "grey") as.character(result$x$data) %>% expect_equal("{\"front\":[\"Title front\",\"contentfront\",\"content second line\"],\"back\":[\"Title back\",\"content back\",\"second line\"]}") } )
censor.weib.x <- function(delta,x,min.branch){ x[x==0] <- min.branch Fn <- sum(delta) n <- length(x) est <- optim(c(1,1), min.weib, Fn=Fn, delta=delta, x=x, n=n) par <- est$par LL <- -est$value a <- data.frame(t(par),LL) return(a) }
JD <- function(x, inverse=FALSE) { if (inverse){ return(as.POSIXct((x-2440587.5)*86400,origin=ISOdate(1970,01,01,0,0,0),format="%Y-%m-%d %H:%M:%S" )) }else{ return(as.numeric(x)/86400 + 2440587.5) } }
setClass("MultiFilter", slots = list( mc = "MultiCompanion", coef = "matrix", order = "numeric", sign = "numeric" ) ) setMethod("initialize", "MultiFilter", function(.Object, coef, mc, order, sign = 1) { if(missing(coef)){ coef <- mc_factors(mc,what="mat") coef <- coef[nrow(coef):1,1:ncol(coef),drop=FALSE] }else if(missing(mc)){ xtop <- mc_from_filter(coef) misc <- list(mC.factorsmat = coef[nrow(coef):1, ]) mc <- new("MultiCompanion", xtop = xtop, mo.col = "detect", misc=misc) } if(missing(order)){ order <- numeric(nrow(coef)) for(i in 1:length(order)){ order[i] <- max(which(coef[i,] != 0)) } } .Object@mc <- mc .Object@coef <- coef .Object@sign <- sign .Object@order <- order .Object } ) setMethod("[", signature(x = "MultiFilter"), function(x, i, j, k, lag0 = FALSE, what = "kc", form = "pc", drop = FALSE){ d <- mf_period(x) if(form == "pc"){ wrk <- if(missing(i) && missing(j)) x@coef[drop = drop] else if(missing(i)) x@coef[ , j = j, drop = drop] else if(missing(j)) x@coef[i = i, , drop = drop] else x@coef[i = i, j = j, drop = drop] if(x@sign < 0) wrk <- -wrk if(lag0){ if(substr(what, 2, 2) == "c") wrk <- -wrk tmpone <- if(substr(what, 1, 1) == "k") 1 else -1 wrk0 <- rep(tmpone, nrow(wrk)) wrk <- cbind(wrk0, wrk) } }else if(form %in% c("vs","v")){ if(form == "vs") vs <- mf_VSform(x) else vs <- mf_VSform(x, form = "I") if(!missing(k)){ if(0 %in% k) lag0 <- TRUE else if(lag0) k <- c(0, k) if(0 %in% k) k <- k + 1 } wrk <- vs$Phi if(lag0){ if(substr(what, 2, 2) == "c") wrk <- -wrk tmpone <- vs$Phi0 if(substr(what, 1, 1) != "k") tmpone <- -tmpone wrk0 <- tmpone wrk <- cbind(wrk0, wrk) } wrk <- array(wrk, dim = c(d, d, length(wrk) / d^2)) if(!missing(k)) wrk <- wrk[ , , k, drop = FALSE] wrk <- if(missing(i) && missing(j)) wrk else if(missing(i)) wrk[ , j, , drop = FALSE] else if(missing(j)) wrk[i, , , drop = FALSE] else wrk[i, j, , drop = FALSE] if(all(dim(wrk) == c(1, 1, 1))) wrk <- wrk[ , , ] else{ tmp <- dim(wrk) dim(wrk) <- c(tmp[1], tmp[2] * tmp[3]) } if(drop) wrk <- wrk[ , ] }else{ stop("Feature not yet implemented.") } res <- wrk res }) mf_period <- function(x){ length(x@order) } .mF.invperm <- function(x){ p <- as(x,"pMatrix") pt <- t(p) pt@perm } mf_order <- function(x, i = "max", form = "pc", perm){ res <- switch(form, pc = { if(is.numeric(i)) x@order[i] else if(i=="all") x@order else max(x@order) }, v = , vs = , I = , U = { if(missing(perm)) perm <- mf_period(x):1 wrk <- x@order[perm] - (mf_period(x) - 1):0 wrk <- ifelse(wrk <= 0, 0, ceiling(wrk/mf_period(x))) wrk <- wrk[.mF.invperm(perm)] if(is.numeric(i)) wrk[i] else if(i=="all") wrk else max(wrk) }, L = { if(missing(perm)) perm <- 1:mf_period(x) wrk <- x@order[perm] - 0:(mf_period(x) - 1) wrk <- ifelse(wrk <= 0, 0, ceiling(wrk/mf_period(x))) wrk <- wrk[.mF.invperm(perm)] if(is.numeric(i)) wrk[i] else if(i=="all") wrk else max(wrk) }, stop("argument \"form\" must be one of \"pc\", \"I\", \"U\", or \"L\".") ) res } mf_poles <- function(x, blocks = FALSE){ wrk <- mc_eigen(x@mc) if(blocks) cbind(wrk$values, wrk$len.block) else rep(wrk$values, times = wrk$len.block) } mf_VSform <- function(x, first = 1, form = "U", perm){ d <- mf_period(x) top <- if(first == 1) d else first - 1 s <- d:1 while(s[1] != top) s <- c(s[d], s[-d]) p <- mf_order(x, i = "all") P <- mf_order(x, form = "vs", perm = s) m <- P * d Phi0 <- diag(d) Phi <- matrix(0, nrow=d, ncol=m) phi <- x[] q <- p[s] for(i in seq_len(d - 1)){ jmax <- min(q[i], d - i) if(jmax > 0) Phi0[i, i+ 1:jmax] <- -phi[s[i], 1:jmax] if(jmax < q[i]) Phi[i, 1:(q[i]-jmax)] <- phi[s[i], (jmax+1):q[i]] } Phi[d, 1:q[d]] <- phi[s[d], 1:q[d]] vs <- list(Phi0 = Phi0, Phi = Phi) res <- switch(form, U = vs, L = { perm0 <- as(d:1,"pMatrix") tperm0 <- perm0 list(Phi0 = perm0 %*% vs$Phi0 %*% tperm0, Phi = rblockmult(perm0 %*% vs$Phi, tperm0) ) }, I = { list(Phi0 = diag(nrow(vs$Phi0)), Phi = solve(vs$Phi0,vs$Phi), Phi0inv = solve(vs$Phi0) ) }, stop("argument \"form\" must be one of \"I\", \"U\", or \"L\".") ) if(!missing(perm)){ perm <- as(perm, "pMatrix") perm0 <- as(d:1, "pMatrix") permP <- switch(form, U = perm %*% perm0, L = perm, I = perm %*% perm0, stop("argument \"form\" must be one of \"I\", \"U\", or \"L\".") ) tpermP <- t(permP) res$Phi0 <- permP %*% res$Phi0 %*% tpermP res$Phi <- rblockmult(permP %*% res$Phi, tpermP) if(!is.null(res$Phi0inv)) res$Phi0inv <- permP %*% res$Phi0inv %*% tpermP } res } setMethod("mcStable", signature( x = "MultiFilter" ), function(x){ wrk <- mf_poles(x) all(abs(wrk) < 1) } ) VAR2pcfilter <- function(phi, ..., Sigma, Phi0, Phi0inv, D, what = "coef", perm){ perm.flag <- missing(perm) Phi0.flag <- missing(Phi0) Phi0inv.flag <- missing(Phi0inv) Sigma.flag <- missing(Sigma) co <- cbind(phi, ...) if(!perm.flag){ if(Sigma.flag){ if(Phi0inv.flag){ Phi0inv <- solve(Phi0) e.var <- D } Sigma <- Phi0inv %*% diag(D) %*% t(Phi0inv) } user.perm <- nrow(co) + 1 - perm wrk <- permute_synch(list(co,Sigma), user.perm) co <- wrk[[1]] Sigma <- wrk[[2]] Sigma.flag <- FALSE }else{ perm <- nrow(co):1 } if(!Sigma.flag){ wrk <- .udu(Sigma) Phi0inv <- wrk$U Phi0 <- solve(wrk$U) e.var <- wrk$d }else if(!Phi0inv.flag){ Phi0 <- solve(Phi0inv) e.var <- D }else if(!Phi0.flag){ Phi0inv <- solve(Phi0) e.var <- D }else{ stop("One of Sigma, Phi0, Phi0inv must be specified.") } m <- Phi0 %*% co pcfilter <- matrix(0, nrow = nrow(Phi0), ncol = ncol(Phi0) - 1 + ncol(co)) for(i in 1:nrow(Phi0)){ perco <- c( - Phi0[i , -(1:i)], m[i, ]) pcfilter[i, seq_along(perco)] <- perco } pcfilter <- pcfilter[nrow(Phi0):1, ] if(what == "coef") pcfilter else if(what == "coef.and.var") list(pcfilter = pcfilter, var = e.var[nrow(Phi0):1] ) else list(pcfilter = pcfilter, var = e.var[nrow(Phi0):1], Uform = list( Sigma = e.var, U0 = Phi0, U = m, U0inv = Phi0inv, perm = perm )) }
show_output_in_terminal <- function() { system2("xterm", c("-e", shQuote("head tests/testthat/out/*; sleep 600"))) } df_all <- pillar:::new_tbl(list( a = c(1, 2.5, NA), b = c(1:2, NA), c = c(T, F, NA), d = c("a", "b", NA), e = factor(c("a", "b", NA)), f = as.Date("2015-12-09") + c(1:2, NA), g = as.POSIXct("2015-12-09 10:51:34.5678", tz = "UTC") + c(1:2, NA), h = as.list(c(1:2, NA)), i = list(list(1, 2:3), list(4:6), list(NA)) )) long_str <- strrep("Abcdefghij", 5) df_str <- pillar:::map(rlang::set_names(1:50), function(i) substr(long_str, 1, i)) add_special <- function(x) { if (inherits(x, "integer64")) { x <- c(x, bit64::NA_integer64_) } else { x <- x[seq2(1, length(x) + 1)] if (is.numeric(x) && is.double(x)) { x <- c(x, -Inf, Inf) } } x } continue <- function(x) { paste0(x, cli::symbol$continue) } local_colors <- function(.local_envir = parent.frame()) { withr::defer(envir = .local_envir, { num_colors(forget = TRUE) }) withr::local_options( list(cli.num_colors = 16L), .local_envir = .local_envir ) num_colors(forget = TRUE) } local_utf8 <- function(enable = TRUE, .local_envir = parent.frame()) { withr::local_options( list(cli.unicode = enable), .local_envir = .local_envir ) }
library(dplyr, warn.conflicts = FALSE) library(r2dii.data) test_that("w/ full demo datasets throws no error", { expect_no_error( loanbook_demo %>% slice(4:5) %>% match_name(ald_demo) %>% prioritize(priority = "ultimate_parent") ) }) test_that("errors gracefully if data lacks crucial columns", { expect_error(prioritize(fake_matched()), NA) expect_error( prioritize(select(fake_matched(), -id_loan)), class = "missing_names" ) expect_error( prioritize(select(fake_matched(), -level)), class = "missing_names" ) expect_error( prioritize(select(fake_matched(), -score)), class = "missing_names" ) expect_error( prioritize(select(fake_matched(), -sector_ald)), class = "missing_names" ) expect_error( prioritize(select(fake_matched(), -sector)), class = "missing_names" ) }) test_that("errors gracefully with bad `priority`", { expect_warning( prioritize(fake_matched(), priority = c("bad1", "bad2")), "[Ii]gnoring.*levels.*bad1.*bad2" ) }) test_that("picks score equal to 1", { matched <- fake_matched(score = c(1, 0.9)) expect_equal(min(prioritize(matched)$score), 1) }) test_that("picks the highest level per id_loan", { id_level <- tibble::tribble( ~id_loan, ~level, "aa", "ultimate_parent", "aa", "direct_loantaker", "bb", "intermediate_parent", "bb", "ultimate_parent", ) matched <- fake_matched(id_loan = id_level$id_loan, level = id_level$level) expect_equal( prioritize(matched)$level, c("direct_loantaker", "intermediate_parent") ) }) test_that("takes a `priority` function or lambda", { matched <- fake_matched(level = c("direct_loantaker", "ultimate_parent")) out <- prioritize(matched, priority = NULL) expect_equal(out$level, "direct_loantaker") out <- prioritize(matched, priority = rev) expect_equal(out$level, "ultimate_parent") out <- prioritize(matched, priority = ~ rev(.x)) expect_equal(out$level, "ultimate_parent") }) test_that("is sensitive to `priority`", { expect_equal( prioritize(fake_matched(level = c("z", "a")), priority = "z")$level, "z" ) }) test_that("ignores existing groups", { matched <- tibble::tribble( ~id_loan, ~other_id, ~level, "a", 1, "z", "a", 2, "a", "b", 3, "z", "b", 4, "a", ) %>% mutate(sector = "coal", sector_ald = "coal", score = 1) %>% group_by(other_id) expect_equal( prioritize(matched, priority = "z")$level, c("z", "z") ) }) test_that("when ignoring existing groups, does not throw a message", { matched <- group_by(fake_matched(other = 1), other) capture_msg <- function(expr) { tryCatch(expr, message = function(m) conditionMessage(m)) } unwanted_msg <- "missing grouping" has_unwanted_msg <- any(grepl(unwanted_msg, capture_msg(prioritize(matched)))) expect_false(has_unwanted_msg) }) test_that("previous preserves groups", { matched <- fake_matched(other_id = 1:4) %>% group_by(other_id, score) expect_equal( dplyr::group_vars(prioritize(matched)), c("other_id", "score") ) }) test_that("prioritize_level otputs expected vector", { matched <- tibble( level = c( "intermediate_parent_1", "direct_loantaker", "direct_loantaker", "direct_loantaker", "ultimate_parent", "intermediate_parent_2" ) ) expect_equal( prioritize_level(matched), c( "direct_loantaker", "intermediate_parent_1", "intermediate_parent_2", "ultimate_parent" ) ) }) test_that("prioritize_at with ungrouped data picks the highest priority row", { out <- tibble(x = c("a", "z")) %>% prioritize_at(.at = "x", priority = c("z", "a")) expect_equal(out$x, "z") }) test_that("prioritize_at with grouped data picks one row per group", { out <- tibble( x = c(1, 2, 2), y = c("a", "a", "z") ) %>% group_by(x) %>% prioritize_at(.at = "y", priority = c("z", "a")) %>% arrange(x) expect_equal(out$y, c("a", "z")) }) test_that("does not warn if a group has not all priority items", { expect_no_warning( fake_matched(level = c("a", "z"), new = level) %>% group_by(new) %>% prioritize(priority = c("z", "a")) ) }) test_that("w/ id_loan at level direct* & ultimate* picks only direct* ( matched <- fake_matched(level = c("ultimate_parent", "direct_loantaker")) expect_identical(prioritize(matched)$level, "direct_loantaker") }) test_that("output is independent from the row-order of the input ( matched_direct <- tibble::tribble( ~id_loan, ~id_2dii, ~level, ~score, ~sector, ~sector_ald, "A", "D", "direct_loantaker", 1, "automotive", "automotive", "A", "U", "ultimate_parent", 1, "automotive", "automotive", "B", "U", "ultimate_parent", 1, "automotive", "automotive", ) matched_invert <- dplyr::arrange(matched_direct, desc(id_loan)) testthat::expect_equal( prioritize(matched_direct)$id_loan, prioritize(matched_invert)$id_loan ) }) test_that("error if score=1 & values by id_loan+level are duplicated ( valid <- fake_matched(score = 0:1) expect_no_error(prioritize(valid)) invalid <- fake_matched(score = c(1, 1)) expect_error( class = "duplicated_score1_by_id_loan_by_level", prioritize(invalid) ) }) test_that("passes if score=1 & values by id_loan are duplicated for distinct levels ( valid <- fake_matched( score = 1, id_loan = "L1", level = c("direct_loantaker", "intermediate_parent", "ultimate_parent"), id_2dii = c("dl", "ip", "up") ) expect_no_error(prioritize(valid)) }) test_that("with 0-row input returns 0-row input", { lbk <- fake_lbk() ald <- fake_ald(name_company = "won't match") zero_row <- suppressWarnings(match_name(lbk, ald)) has_zero_row <- identical(nrow(zero_row), 0L) stopifnot(has_zero_row) expect_no_error(prioritize(zero_row)) })
plot.GLVmix <- function(x, ...){ g <- expand.grid(alpha = x$u, theta = x$v) g$fuv <- x$fuv pl <- lattice::cloud(fuv ~ alpha * theta, data = g, type = "h", lwd = 2, zlim = c(0, max(g$fuv)), scales = list(arrows = FALSE, xlab = "\u03B1", ylab = "\u03B8", zlab = "density", screen = list(z = 10, x = -70)), ...) print(pl) }
knitr::opts_chunk$set(message = FALSE, cache = FALSE,eval=FALSE)
knitr::include_graphics("figures/feature_table.jpg") library(PSSMCOOL) knitr::include_graphics("figures/pssm_ac.jpg") X<-pssm_ac(system.file("extdata", "C7GQS7.txt.pssm", package="PSSMCOOL")) head(X, n = 50) knitr::include_graphics("figures/dpc-pssm.jpg") X<-aac_pssm(system.file("extdata", "C7GQS7.txt.pssm", package="PSSMCOOL")) head(X, n = 50) ss<-dpc_pssm(system.file("extdata", "C7GQS7.txt.pssm", package="PSSMCOOL")) head(X, n = 50) ss<-aadp_pssm(system.file("extdata", "C7GQS7.txt.pssm", package="PSSMCOOL")) head(X, n = 50) knitr::include_graphics("figures/trigram.jpg") X<-trigrame_pssm(paste0(system.file("extdata",package="PSSMCOOL"),"/C7GSI6.txt.pssm")) head(X, n = 50) knitr::include_graphics("figures/pse-pssm.jpg") X<-pse_pssm(system.file("extdata", "C7GQS7.txt.pssm", package="PSSMCOOL")) head(X, n = 50) knitr::include_graphics("figures/k-separated.jpg") X<-k_separated_bigrams_pssm(system.file("extdata", "C7GQS7.txt.pssm", package="PSSMCOOL"),5) head(X, n = 50) knitr::include_graphics("figures/EEDP.jpg") X<-EDP_EEDP_MEDP(paste0(system.file("extdata",package="PSSMCOOL"),"/C7GS61.txt.pssm")) head(X[[3]], n = 50) knitr::include_graphics("figures/AB-PSSM.jpg") X<- AB_PSSM(system.file("extdata","C7GRQ3.txt.pssm",package="PSSMCOOL")) head(X[1], n = 50) X<-AATP_TPC(paste0(system.file("extdata",package="PSSMCOOL"),"/C7GQS7.txt.pssm")) head(X[[2]], n = 50) X<-CS_PSe_PSSM(system.file("extdata", "C7GSI6.txt.pssm", package="PSSMCOOL"),"total") head(X, n = 50) knitr::include_graphics("figures/s-fpssm.jpg") X<-FPSSM(system.file("extdata","C7GQS7.txt.pssm",package="PSSMCOOL"),20) head(X, n = 50) knitr::include_graphics("figures/SCSH2.jpg") knitr::include_graphics("figures/scshtable.jpg") X<- scsh2(system.file("extdata","C7GRQ3.txt.pssm",package="PSSMCOOL"),2) head(X, n = 200) X<-rpssm(system.file("extdata", "C7GQS7.txt.pssm", package="PSSMCOOL")) head(X, n = 50) knitr::include_graphics("figures/cc-pssm.jpg") X<-pssm_cc(system.file("extdata","C7GQS7.txt.pssm",package="PSSMCOOL")) head(X, n = 50) X<-Discrete_Cosine_Transform(system.file("extdata", "C7GQS7.txt.pssm", package="PSSMCOOL")) head(X, n = 50) knitr::include_graphics("figures/dwt.jpg") X<-dwt_PSSM(system.file("extdata", "C7GQS7.txt.pssm", package="PSSMCOOL")) head(X, n = 50) knitr::include_graphics("figures/disulfid.jpg") X<-disulfid(system.file("extdata", "C7GQS7.txt.pssm", package="PSSMCOOL")) head(X[,1:50]) X<-DP_PSSM(system.file("extdata", "C7GQS7.txt.pssm", package="PSSMCOOL")) head(X, n = 50) X<-DFMCA_PSSM(system.file("extdata", "C7GQS7.txt.pssm", package="PSSMCOOL"),7) head(X, n = 50) X<-grey_pssm_pseAAC(system.file("extdata", "C7GQS7.txt.pssm", package="PSSMCOOL")) head(X, n = 50) knitr::include_graphics("figures/smoothed.jpg") X<-smoothed_PSSM(system.file("extdata", "C7GQS7.txt.pssm", package="PSSMCOOL"),7,11,c(2,3,8,9)) head(X[,1:50], n = 50) X<-kiderafactor(system.file("extdata", "C7GQS7.txt.pssm", package="PSSMCOOL"),c(2,3,8,9)) head(X[,1:50], n = 50) X<-MBMGACPSSM(system.file("extdata", "C7GQS7.txt.pssm", package="PSSMCOOL")) head(X, n = 50) X<-LPC_PSSM(system.file("extdata", "C7GQS7.txt.pssm", package="PSSMCOOL")) head(X, n = 50) knitr::include_graphics("figures/pssm400.jpg") X<-pssm400(system.file("extdata","C7GQS7.txt.pssm",package="PSSMCOOL")) head(X, n = 50) X<- RPM_PSSM(system.file("extdata","C7GRQ3.txt.pssm",package="PSSMCOOL")) X X<-PSSMBLOCK(system.file("extdata", "C7GQS7.txt.pssm", package="PSSMCOOL"),5) head(X, n = 50) knitr::include_graphics("figures/pssmsd.jpg") X<-PSSM_SD(system.file("extdata", "C7GQS7.txt.pssm", package="PSSMCOOL")) head(X, n = 50) X<-pssm_seg(system.file("extdata", "C7GQS7.txt.pssm", package="PSSMCOOL"),3) head(X, n = 50) X<-SOMA_PSSM(system.file("extdata", "C7GQS7.txt.pssm", package="PSSMCOOL")) head(X, n = 50) X<-SVD_PSSM(system.file("extdata", "C7GQS7.txt.pssm", package="PSSMCOOL")) head(X, n = 20) knitr::include_graphics("figures/sessionInfo.PNG")
ccd_analysis <- function(x) { Coefficients <- rsm::coded.data(x) colnames(x)[ncol(x)] <- "Response" for (i in 1:(ncol(x) - 1)) { names(x[, i]) <- paste("Factor", i, sep = "") } rsmodel <- c(NA) for (i in 1:(ncol(x) - 1)) { rsmodel <- c(rsmodel, paste("Factor", i, sep = "")) } rsmodel <- rsmodel[-1] rsmodel <- paste(rsmodel, collapse = ',') rsmodel1 <- stats::as.formula(paste("Response ~ SO(", rsmodel, ")", sep = "")) ResponseSurface <- rsm::rsm(rsmodel1, data = x) rsmodel2 <- gsub(",", "+", rsmodel) rsmodel2 <- stats::as.formula(paste("~", rsmodel2, sep = "")) NormalizedFactors <- scale(x[-ncol(x)]) plotprompt <- paste( "For a 3-D plot of the response surface press 1, to plot the contour of the predicted variance of the response, press any other key: " ) plotval <- readline(plotprompt) if (plotval == 1) { graphics::persp( ResponseSurface, rsmodel2, at = rsm::canonical(ResponseSurface), col = grDevices::rainbow(50), contours = "colors" ) } else { X <- cbind(matrix(1, nrow(x), 1), as.matrix(x[1:nrow(x), 1:2])) X <- cbind(X, matrix(NA, nrow(X), 3)) for (i in 1:nrow(X)) { X[i, 4] <- X[i, 2]^2 X[i, 5] <- X[i, 3]^2 X[i, 6] <- X[i, 2] * X[i, 3] } BIGX <- matrix(NA, 1, 6) for (i in seq(min(X[1:nrow(X), 2]), max(X[1:nrow(X), 2]), 0.1)) { newrow <- matrix(NA, 1, 6) for (o in seq(min(X[1:nrow(X), 3]), max(X[1:nrow(X), 3]), 0.1)) { newrow[1, 1] <- 1 newrow[1, 2] <- i newrow[1, 3] <- o newrow[1, 4] <- i * i newrow[1, 5] <- o * o newrow[1, 6] <- i * o BIGX <- rbind(BIGX, newrow) } } BIGX <- BIGX[2:nrow(BIGX), 1:6] Factor1 <- BIGX[1:nrow(BIGX), 2] Factor2 <- BIGX[1:nrow(BIGX), 3] A <- solve(crossprod(X)) Predicted_Variance <- matrix(NA, nrow(BIGX), 1) for (i in 1:nrow(BIGX)) { b <- BIGX[i, 1:6] Predicted_Variance[i, 1] <- t(b) %*% A %*% b } ForPlot <- as.data.frame(cbind(Factor1, Factor2, Predicted_Variance)) colnames(ForPlot) <- c("Factor1", "Factor2", "Predicted_Variance") p <- plotly::plot_ly(ForPlot, x = ~Factor1, y = ~Factor2, z = ~Predicted_Variance, type = "contour", colorscale = "Portland", contours = list( showlabels = TRUE), line = list(smoothing = 0) ) print(p) } list( Data.For.Analysis = Coefficients, Response.Surface.Summary = summary(ResponseSurface) ) }
lsem_fitsem_joint_estimation_prepare_partable <- function(partable, G, par_invariant=NULL, par_linear=NULL, par_quadratic=NULL) { partable0 <- partable partable$id0 <- 1:nrow(partable) partable$con <- 0 label_list <- partable$plabel partable1 <- lsem_fitsem_joint_estimation_prepare_partable_include_group_label( partable=partable, gg=1, label_list=label_list) partable_mg <- partable1 for (gg in 2:G){ partable_gg <- partable partable_gg <- lsem_fitsem_joint_estimation_prepare_partable_include_group_label( partable=partable_gg, gg=gg, label_list=label_list) partable_gg$group <- partable_gg$block <- gg for (vv in c("free","id")){ partable_gg <- lsem_fitsem_joint_estimation_partable_id(partable_gg=partable_gg, partable_mg=partable_mg, vv=vv) partable_gg[,vv][ partable[,vv]==0 ] <- 0 } partable_gg$plabel <- paste0(label_list,"g",gg) partable_gg$plabel[ label_list==""] <- "" partable_mg <- rbind(partable_mg, partable_gg) } pars <- sirt_lavaan_partable_parnames(partable=partable_mg) partable_mg$par <- pars fixed_invariant <- intersect( paste(partable_mg$par[ partable$free==0]), par_invariant ) par_invariant <- setdiff( par_invariant, fixed_invariant) par1 <- sirt_define_vector( value="inv", names=par_invariant) par2 <- sirt_define_vector( value="lin", names=par_linear) par3 <- sirt_define_vector( value="quad", names=par_quadratic) par_vec <- c(par1, par2, par3) NI <- length(par_vec) par_vec_names <- names(par_vec) if ( NI > 0 ){ partable1 <- partable_mg[1,] NV <- ncol(partable_mg) for (vv in 1:NV){ if (is.numeric(partable1[1,vv])){ partable1[1,vv] <- 0 } else { partable1[1,vv] <- "" } } partable1$user <- 2 partable1$ustart <- NA partable1$con <- 0 partable1$op <- "==" partable1c <- partable1 for (vv in 1:NI){ par_vec_vv <- par_vec[vv] par_vec_names_vv <- par_vec_names[vv] ind_vv <- which( paste(partable_mg$par)==par_vec_names[vv] ) LV2 <- LV <- length(ind_vv) if (par_vec_vv=="lin"){ LV2 <- LV - 1 } if (par_vec_vv=="quad"){ LV2 <- LV - 2 } plabels <- paste(partable_mg$plabel[ind_vv]) for (ll in 2:LV2){ partable1c$con <- vv if (par_vec_vv=="inv"){ partable1c$lhs <- plabels[1] partable1c$rhs <- plabels[ll] } if (par_vec_vv=="lin"){ diff1 <- paste0( plabels[ll+1], "-2*", plabels[ll], "+", plabels[ll-1] ) partable1c$lhs <- diff1 partable1c$rhs <- 0 partable1c$user <- 1 } if (par_vec_vv=="quad"){ diff1 <- paste0( plabels[ll+2], "-3*", plabels[ll+1], "+3*", plabels[ll], "-", plabels[ll-1] ) partable1c$lhs <- diff1 partable1c$rhs <- 0 partable1c$user <- 1 } partable1c$par <- paste0(par_vec_names_vv, "_con", ll-1) partable1c$id <- max(partable_mg$id) + 1 partable_mg <- rbind(partable_mg, partable1c) } } } return(partable_mg) }
"summary.bayes.parobs" <- function(object, ...) { digits <- max(3, getOption("digits") - 3) if (!inherits(object, "bayes.parobs")) { stop(paste(sQuote('summary.bayes.parobs'), "designed for", sQuote('bayes.parobs'), "objects")) } if (inherits(object, "bsynthesis")) { cat("\nCall:\n", paste(deparse(object$call), sep = "\n", collapse = "\n"), "\n", sep = "") } if (object$scale_x) { J <- ncol(object$Outcome) xcols <- ncol(object$XCovariate) tlength <- nrow(object$mcmc.draws$theta) trlength <- tlength - xcols * J tscale <- c(rep(unname(attributes(object$XCovariate)$`scaled:scale`), J), rep(1, trlength)) } else { tlength <- nrow(object$mcmc.draws$theta) tscale <- rep(1, tlength) } theta <- list() theta.post <- vapply(1:object$mcmc$nkeep, function(ikeep) { object$mcmc.draws$theta[,ikeep] / tscale }, FUN.VALUE = numeric(tlength)) theta <- list() theta$mean <- rowMeans(theta.post) theta$sd <- apply(theta.post, 1, sd) sig.level <- 1 - 0.95 theta.hpd <- mhpd(theta.post, 0.95) theta$lower <- theta.hpd[,1] theta$upper <- theta.hpd[,2] r <- cbind(theta$mean, theta$sd, theta$lower, theta$upper) colnames(r) <- c("Post.Mean", "Std.Dev", "HPD(Lower)", "HPD(Upper)") xcc <- if (!is.null(colnames(object$XCovariate))) colnames(object$XCovariate) else paste0("beta", 1:ncol(object$XCovariate)) wcc <- if (!is.null(colnames(object$WCovariate))) colnames(object$WCovariate) else paste0("gam", 1:ncol(object$WCovariate)) J <- ncol(object$Outcome) if (is.null(object$group)) { rownames(r) <- c(paste0(rep(xcc, J), "_", rep(1:J, each=length(xcc))), paste0(rep(wcc, J), "_", rep(1:J, each=length(wcc)))) } else { rownames(r) <- c(paste0(rep(xcc, J), "_", rep(1:J, each=length(xcc))), paste0(rep(wcc, 2*J), rep(rep(c("*(1-2nd)", "*2nd"), each = length(wcc)), J), "_", rep(1:J, each = 2*length(wcc)))) } cat("Fixed-effects:\n") r <- round(r, digits=digits) print.default(r, print.gap = 2) invisible(r) }
library(HEMDAG); source("make.test.data.R"); context("test obozinski methods"); test_that("obozinski.max works", { S <- make.scores(); g <- make.graph(); root <- root.node(g); S.noroot <- S[,-which(colnames(S) %in% root)]; tmp <- tempfile(); S.max <- obozinski.max(S, g, root); write.table(S.max, row.names=TRUE, col.names=TRUE, quote=FALSE, file=tmp); S.check <- as.matrix(read.table(tmp)); expect_equal(S.max, S.check); S.max.noroot <- obozinski.max(S.noroot, g, root); write.table(S.max.noroot, row.names=TRUE, col.names=TRUE, quote=FALSE, file=tmp); S.check <- as.matrix(read.table(tmp)); expect_equal(S.max.noroot, S.check); S.error <- S[,-which(colnames(S) %in% c("D","H"))]; expect_error(obozinski.max(S.error, g, root), "mismatch between the number of nodes of the graph g and the number of classes of the scores matrix S"); }) test_that("obozinski.and works", { S <- make.scores(); g <- make.graph(); root <- root.node(g); S.noroot <- S[,-which(colnames(S) %in% root)]; tmp <- tempfile(); S.and <- obozinski.and(S, g, root); write.table(S.and, row.names=TRUE, col.names=TRUE, quote=FALSE, file=tmp); S.check <- as.matrix(read.table(tmp)); expect_equal(S.and, S.check); S.and.noroot <- obozinski.and(S.noroot, g, root); write.table(S.and.noroot, row.names=TRUE, col.names=TRUE, quote=FALSE, file=tmp); S.check <- as.matrix(read.table(tmp)); expect_equal(S.and.noroot, S.check); S.error <- S[,-which(colnames(S) %in% c("D","H"))]; expect_error(obozinski.and(S.error, g, root), "mismatch between the number of nodes of the graph g and the number of classes of the scores matrix S"); }) test_that("obozinski.or works", { S <- make.scores(); g <- make.graph(); root <- root.node(g); S.noroot <- S[,-which(colnames(S) %in% root)]; tmp <- tempfile(); S.or <- obozinski.or(S, g, root); write.table(S.or, row.names=TRUE, col.names=TRUE, quote=FALSE, file=tmp); S.check <- as.matrix(read.table(tmp)); expect_equal(S.or, S.check); S.or.noroot <- obozinski.or(S.noroot, g, root); write.table(S.or.noroot, row.names=TRUE, col.names=TRUE, quote=FALSE, file=tmp); S.check <- as.matrix(read.table(tmp)); expect_equal(S.or.noroot, S.check); S.error <- S[,-which(colnames(S) %in% c("D","H"))]; expect_error(obozinski.or(S.error, g, root), "mismatch between the number of nodes of the graph g and the number of classes of the scores matrix S"); }) test_that("obozinski.methods works", { g <- make.graph(); S <- make.scores(); expect_output(S.hier <- obozinski.methods(S, g, heuristic="max", norm=FALSE, norm.type=NULL), "Obozinski's heuristic max correction: done"); expect_output(S.hier <- obozinski.methods(S, g, heuristic="and", norm=FALSE, norm.type=NULL), "Obozinski's heuristic and correction: done"); expect_output(S.hier <- obozinski.methods(S, g, heuristic="or", norm=FALSE, norm.type=NULL), "Obozinski's heuristic or correction: done"); expect_output(S.hier <- obozinski.methods(S, g, heuristic="and", norm=TRUE, norm.type="maxnorm"), "maxnorm normalization: done\\nObozinski's heuristic and correction: done"); expect_error(S.hier <- obozinski.methods(S, g, heuristic="max", norm=FALSE, norm.type="maxnorm"), "do you wanna or not normalize the matrix S\\? norm and norm.type are inconsistent"); expect_error(S.hier <- obozinski.methods(S, g, heuristic="and", norm=TRUE, norm.type=NULL), "choose a normalization methods among those available"); expect_error(S.hier <- obozinski.methods(S, g, heuristic="o", norm=FALSE, norm.type=NULL), "the chosen heuristic method is not among those available or it has been misspelled"); }) test_that("obozinski.holdout works", { g <- make.graph(); S <- make.scores(); expect_output(S.hier <- obozinski.holdout(S, g, testIndex=1:2, heuristic="max", norm=FALSE, norm.type=NULL), "Obozinski's heuristic max correction: done"); expect_output(S.hier <- obozinski.holdout(S, g, testIndex=1:2, heuristic="and", norm=FALSE, norm.type=NULL), "Obozinski's heuristic and correction: done"); expect_output(S.hier <- obozinski.holdout(S, g, testIndex=1:2, heuristic="or", norm=FALSE, norm.type=NULL), "Obozinski's heuristic or correction: done"); expect_output(S.hier <- obozinski.holdout(S, g, testIndex=1:2, heuristic="and", norm=TRUE, norm.type="maxnorm"), "maxnorm normalization: done\\nObozinski's heuristic and correction: done"); expect_error(S.hier <- obozinski.holdout(S, g, testIndex=1:2, heuristic="max", norm=FALSE, norm.type="maxnorm"), "do you wanna or not normalize the matrix S\\? norm and norm.type are inconsistent"); expect_error(S.hier <- obozinski.holdout(S, g, testIndex=1:2, heuristic="and", norm=TRUE, norm.type=NULL), "choose a normalization methods among those available"); expect_error(S.hier <- obozinski.holdout(S, g, testIndex=1:2, heuristic="o", norm=FALSE, norm.type=NULL), "the chosen heuristic method is not among those available or it has been misspelled"); })
'tcga.bc.full'
GE_bias_normal_squaredmis_old <- function(beta_list, rho_list, prob_G, cov_Z=NULL, cov_W=NULL) { surv <- function(x) {1-pnorm(x)} rho_GE <- rho_list[[1]]; rho_GZ <- rho_list[[2]]; rho_EZ <- rho_list[[3]] rho_GW <- rho_list[[4]]; rho_EW <- rho_list[[5]]; rho_ZW <- rho_list[[6]] w <- qnorm(1-prob_G) r_GE <- rho_GE / (2*dnorm(w)) r_GZ <- rho_GZ / (2*dnorm(w)) r_GW <- rho_GW / (2*dnorm(w)) beta_0 <- beta_list[[1]]; beta_G <- beta_list[[2]]; beta_E <- beta_list[[3]] beta_I <- beta_list[[4]]; BETA_Z <- beta_list[[5]]; BETA_M <- beta_list[[6]] num_W <- length(beta_list[[6]]) num_Z <- length(beta_list[[5]]) translated_inputs <- GE_translate_inputs_old(beta_list, rho_list, prob_G, cov_Z, cov_W) sig_mat <- translated_inputs$sig_mat_total sig_mat_ZZ <- translated_inputs$sig_mat_ZZ sig_mat_WW <- translated_inputs$sig_mat_WW if (is.null(sig_mat_ZZ)) { MU_Z <- 0 } else { MU_Z <- rep(0, num_Z) } if (is.null(sig_mat_WW)) { MU_W <- 0 MU_M <- 0 } else { MU_M <- rep(1, num_W) MU_W <- rep(0, num_W) } mu_f <- 1 mu_h <- 1 mu_GE <- rho_GE mu_Gf <- 2*r_GE^2*w*dnorm(w) + 2*surv(w) - 2*prob_G mu_Gh <- mu_Gf mu_GG <- 2*prob_G*(1-prob_G) mu_EE <- 1 mu_Ef <- 0 MU_GZ <- rho_GZ MU_GW <- rho_GW MU_EM <- rep(0, num_W) MU_fW <- rep(0, num_W) MU_EW <- rho_EW MU_EZ <- rho_EZ MU_fZ <- rep(0, num_Z) if (is.null(sig_mat_WW)) { MU_GM <- 0 } else { MU_GM <- 2*r_GW^2*w*dnorm(w) + 2*surv(w) - 2*prob_G } MU_ZW <- matrix(data=rho_ZW, nrow=num_Z, ncol=num_W, byrow=TRUE) MU_WZ <- t(MU_ZW) MU_ZM <- matrix(data=0, nrow=num_Z, ncol=num_W) MU_WM <- matrix(data=0, nrow=num_W, ncol=num_W) MU_ZZ <- sig_mat_ZZ MU_WW <- sig_mat_WW mu_G1_E <- r_GE*dnorm(w) mu_G1_EE <- r_GE^2*w*dnorm(w) + surv(w) mu_G1_EEE <- r_GE^3*w^2*dnorm(w) - r_GE^3*dnorm(w) + 3*r_GE*dnorm(w) temp_sig <- matrix(data=c(1-r_GE^2, -r_GE^2, -r_GE^2, 1-r_GE^2), nrow=2) f_G1_G2_E <- function(x,w,r_GE) { x*dnorm(x)*mvtnorm::pmvnorm(lower=c(w,w), upper=c(Inf,Inf), mean=c(r_GE*x, r_GE*x), sigma=temp_sig) } mu_G1_G2_E <- pracma::quadinf(f=f_G1_G2_E, xa=-Inf, xb=Inf, w=w, r_GE=r_GE)$Q[1] temp_sig <- matrix(data=c(1-r_GE^2, -r_GE^2, -r_GE^2, 1-r_GE^2), nrow=2) f_G1_G2_EE <- function(x,w,r_GE) { x^2*dnorm(x)*mvtnorm::pmvnorm(lower=c(w,w), upper=c(Inf,Inf), mean=c(r_GE*x, r_GE*x), sigma=temp_sig) } mu_G1_G2_EE <- pracma::quadinf(f=f_G1_G2_EE, xa=-Inf, xb=Inf, w=w, r_GE=r_GE)$Q[1] temp_sig <- matrix(data=c(1-r_GE^2, -r_GE^2, -r_GE^2, 1-r_GE^2), nrow=2) f_G1_G2_EEE <- function(x,w,r_GE) { x^3*dnorm(x)*mvtnorm::pmvnorm(lower=c(w,w), upper=c(Inf,Inf), mean=c(r_GE*x, r_GE*x), sigma=temp_sig) } mu_G1_G2_EEE <- pracma::quadinf(f=f_G1_G2_EEE, xa=-Inf, xb=Inf, w=w, r_GE=r_GE)$Q[1] mu_GGE <- 2*mu_G1_E + 2*mu_G1_G2_E - 8*prob_G*mu_G1_E mu_GGh <- 2*mu_G1_EE + 2*mu_G1_G2_EE + 4*prob_G^2*1 - 8*prob_G*mu_G1_EE mu_GEE <- mu_Gf mu_GEf <- 2*(r_GE^3*w^2*dnorm(w) - r_GE^3*dnorm(w) + 3*r_GE*dnorm(w)) mu_GEh <- mu_GEf mu_GGEE <- 2*mu_G1_EE + 2*mu_G1_G2_EE + 4*prob_G^2*1 - 8*prob_G*mu_G1_EE mu_GGEf <- 2*mu_G1_EEE + 2*mu_G1_G2_EEE + 4*prob_G^2*0 - 8*prob_G*mu_G1_EEE mu_GGEh <- mu_GGEf f_G1_E_Z <- function(x, w, r_EZ, r_GE, r_GZ) { ( r_EZ * x * surv( (w-x*r_GE) / sqrt(1-r_GE^2) ) + dnorm( (w-r_GE*x) / sqrt(1-r_GE^2) ) * (r_GZ-r_GE*r_EZ) / sqrt(1-r_GE^2) ) * x* dnorm(x) } if (is.null(sig_mat_ZZ)) { mu_G1_E_Z <- 0 } else { mu_G1_E_Z <- rep(NA, num_Z) for (i in 1:num_Z) { mu_G1_E_Z[i] <- pracma::quadinf(f= f_G1_E_Z, xa=-Inf, xb=Inf, w=w, r_EZ=rho_EZ[i], r_GE=r_GE, r_GZ=r_GZ[i])$Q } } f_G1_E_W <- function(x, w, r_EW, r_GE, r_GW) { ( r_EW * x * surv( (w-x*r_GE) / sqrt(1-r_GE^2) ) + dnorm( (w-r_GE*x) / sqrt(1-r_GE^2) ) * (r_GW-r_GE*r_EW) / sqrt(1-r_GE^2) ) * x* dnorm(x) } if (is.null(sig_mat_WW)) { mu_G1_E_W <- 0 } else { mu_G1_E_W <- rep(NA, num_W) for (i in 1:num_W) { mu_G1_E_W[i] <- pracma::quadinf(f= f_G1_E_W, xa=-Inf, xb=Inf, w=w, r_EW=rho_EW[i], r_GE=r_GE, r_GW=r_GW[i])$Q } } f_G1_E_WW <- function(x, w, r_GE, r_GW, r_EW) { ( r_EW * x* surv( (w-x*r_GW) / sqrt(1-r_GW^2) ) + dnorm( (w-r_GW*x) / sqrt(1-r_GW^2) ) * (r_GE-r_GW*r_EW) / sqrt(1-r_GW^2) ) * x^2 * dnorm(x) } if (is.null(sig_mat_WW)) { mu_G1_E_WW <- 0 } else { mu_G1_E_WW <- rep(NA, num_W) for (i in 1:num_W) { mu_G1_E_WW[i] <- pracma::quadinf(f=f_G1_E_WW, xa=-Inf, xb=Inf, w=w , r_GE=r_GE, r_GW=r_GW[i], r_EW=rho_EW[i])$Q } } f_G1_W_EE <- function(x, w, r_GE, r_GW, r_EW) { ( r_EW * x* surv( (w-x*r_GE) / sqrt(1-r_GE^2) ) + dnorm( (w-r_GE*x) / sqrt(1-r_GE^2) ) * (r_GW-r_GE*r_EW) / sqrt(1-r_GE^2) ) * x^2 * dnorm(x) } if (is.null(sig_mat_WW)) { mu_G1_W_EE <- 0 } else { mu_G1_W_EE <- rep(NA, num_W) for (i in 1:num_W) { mu_G1_W_EE[i] <- pracma::quadinf(f=f_G1_W_EE, xa=-Inf, xb=Inf, w=w, r_GE=r_GE, r_GW=r_GW[i], r_EW=rho_EW[i])$Q } } f_G1_Z_EE <- function(x, w, r_GE, r_GZ, r_EZ) { ( r_EZ * x* surv( (w-x*r_GE) / sqrt(1-r_GE^2) ) + dnorm( (w-r_GE*x) / sqrt(1-r_GE^2) ) * (r_GZ-r_GE*r_EZ) / sqrt(1-r_GE^2) ) * x^2 * dnorm(x) } if (is.null(sig_mat_ZZ)) { mu_G1_Z_EE <- 0 } else { mu_G1_Z_EE <- rep(NA, num_Z) for (i in 1:num_Z) { mu_G1_Z_EE[i] <- pracma::quadinf(f=f_G1_Z_EE, xa=-Inf, xb=Inf, w=w, r_GE=r_GE, r_GZ=r_GZ[i], r_EZ=rho_EZ[i])$Q } } MU_GEZ <- 2*mu_G1_E_Z - 2*prob_G*rho_EZ MU_GEW <- 2*mu_G1_E_W - 2*prob_G*rho_EW MU_GEM <- 2*mu_G1_E_WW MU_GhW <- 2*mu_G1_W_EE MU_GhZ <- 2*mu_G1_Z_EE A <- (mu_GE * MU_GZ / mu_GG - MU_EZ) / (mu_EE - mu_GE^2/mu_GG) B <- (mu_GE * MU_GW / mu_GG - MU_EW) / (mu_EE - mu_GE^2/mu_GG) if (is.null(MU_ZZ)) { O <- 0 solve_O <- 0 } else { O <- MU_Z%*%t(MU_Z) + MU_GZ%*%t(MU_GZ)/mu_GG - MU_ZZ - A %*% t(MU_EZ - MU_GZ*mu_GE/mu_GG) solve_O <- solve(O) } C <- (B %*% t(MU_EZ - MU_GZ*mu_GE/mu_GG) - MU_W%*%t(MU_Z) - MU_GW%*%t(MU_GZ)/mu_GG + MU_WZ) %*% solve_O if ( is.null(MU_WW) ) { Q <- 0 solve_Q <- 0 } else { Q <- MU_W%*%t(MU_W) + MU_GW%*%t(MU_GW)/mu_GG - MU_WW + B %*% t(MU_GW*mu_GE/mu_GG - MU_EW) + C %*% ( MU_Z%*%t(MU_W) + MU_GZ%*%t(MU_GW)/mu_GG - MU_ZW + A %*% t(MU_GW*mu_GE/mu_GG - MU_EW) ) solve_Q <- solve(Q) } D <- (mu_GE * mu_GGE / mu_GG - mu_GEE) / (mu_EE - mu_GE^2 / mu_GG) E <- t(MU_GEZ - MU_Z*mu_GE - MU_GZ*mu_GGE/mu_GG + D*(MU_EZ - MU_GZ*mu_GE/mu_GG)) %*% solve_O EFF <- ( t(MU_W*mu_GE + MU_GW*mu_GGE/mu_GG - MU_GEW + D*(MU_GW * mu_GE / mu_GG - MU_EW)) + E %*% (A %*% t(MU_GW*mu_GE/mu_GG - MU_EW) + MU_Z%*%t(MU_W) + MU_GZ%*%t(MU_GW)/mu_GG - MU_ZW) ) %*% solve_Q alpha_I_num <- beta_E * (-mu_f*mu_GE - mu_Gf*mu_GGE/mu_GG + mu_GEf + D * (mu_Ef - mu_Gf*mu_GE/mu_GG)) + beta_E * E %*% (-mu_f*MU_Z - MU_GZ*mu_Gf/mu_GG + MU_fZ + A * (mu_Ef - mu_Gf*mu_GE/mu_GG)) + beta_I * (-mu_Gh*mu_GE - mu_GGh*mu_GGE/mu_GG + mu_GGEh + D * (mu_GEh - mu_GGh*mu_GE/mu_GG)) + beta_I * E %*% (MU_GhZ -mu_Gh*MU_Z - MU_GZ*mu_GGh/mu_GG + A * (mu_GEh - mu_GGh*mu_GE/mu_GG)) + t(MU_GEM - MU_M*mu_GE - MU_GM*mu_GGE/mu_GG + D * (MU_EM - MU_GM*mu_GE/mu_GG)) %*% BETA_M + E %*% (A %*% t(MU_EM - MU_GM*mu_GE/mu_GG) - MU_Z%*%t(MU_M) - MU_GZ%*%t(MU_GM)/mu_GG + MU_ZM) %*% BETA_M - beta_E * EFF %*% (-mu_f*MU_W - MU_GW*mu_Gf/mu_GG + MU_fW + B %*% as.matrix(mu_Ef - mu_Gf*mu_GE/mu_GG)) - beta_E * EFF %*% C %*% (-mu_f*MU_Z - mu_Gf*MU_GZ/mu_GG + MU_fZ + A %*% as.matrix(mu_Ef - mu_Gf*mu_GE/mu_GG)) - beta_I * EFF %*% (-mu_Gh*MU_W - MU_GW*mu_GGh/mu_GG + MU_GhW + B %*% as.matrix(mu_GEh - mu_GGh*mu_GE/mu_GG)) - beta_I * EFF %*% C %*% (MU_GhZ - MU_Z*mu_Gh - MU_GZ*mu_GGh/mu_GG + A %*% as.matrix(mu_GEh - mu_GGh*mu_GE/mu_GG)) - EFF %*% ( -MU_W%*%t(MU_M) - MU_GW%*%t(MU_GM)/mu_GG + MU_WM + B %*% t(MU_EM - MU_GM*mu_GE/mu_GG) ) %*% BETA_M - EFF %*% C %*% ( A %*% t(MU_EM - MU_GM*mu_GE/mu_GG) - MU_Z%*%t(MU_M) - MU_GZ%*%t(MU_GM)/mu_GG + MU_ZM) %*% BETA_M alpha_I_denom <- EFF %*% ( MU_W*mu_GE + MU_GW*mu_GGE/mu_GG - MU_GEW + B * (mu_GGE*mu_GE/mu_GG - mu_GEE) ) + EFF %*% C %*% ( MU_Z*mu_GE + MU_GZ*mu_GGE/mu_GG - MU_GEZ + A * (mu_GGE*mu_GE/mu_GG - mu_GEE) ) - ( mu_GE^2 + mu_GGE^2/mu_GG - mu_GGEE + D * (mu_GGE*mu_GE/mu_GG - mu_GEE) ) - E %*% ( MU_Z*mu_GE + MU_GZ*mu_GGE/mu_GG - MU_GEZ + A * (mu_GGE*mu_GE/mu_GG - mu_GEE) ) alpha_I <- alpha_I_num / alpha_I_denom R <- beta_E * (-MU_W*mu_f - MU_GW*mu_Gf/mu_GG + MU_fW + B * (mu_Ef - mu_Gf*mu_GE/mu_GG)) + beta_E * C %*% (-mu_f*MU_Z - MU_GZ*mu_Gf/mu_GG + MU_fZ + A %*% as.matrix(mu_Ef - mu_Gf*mu_GE/mu_GG)) + beta_I * (-MU_W*mu_Gh - MU_GW*mu_GGh/mu_GG + MU_GhW + B * (mu_GEh - mu_GGh*mu_GE/mu_GG)) + beta_I * C %*% (MU_GhZ - MU_Z*mu_Gh - MU_GZ*mu_GGh/mu_GG + A * (mu_GEh - mu_GGh*mu_GE/mu_GG)) + ( B %*% t(MU_EM - MU_GM*mu_GE/mu_GG) - MU_W%*%t(MU_M) - MU_GW%*%t(MU_GM)/mu_GG + MU_WM) %*% BETA_M + C %*% ( A %*% t(MU_EM - MU_GM*mu_GE/mu_GG) - MU_Z%*%t(MU_M) - MU_GZ%*%t(MU_GM)/mu_GG + MU_ZM) %*% BETA_M + alpha_I * (MU_W*mu_GE + MU_GW*mu_GGE/mu_GG - MU_GEW + B * (mu_GGE*mu_GE/mu_GG - mu_GEE)) + as.numeric(alpha_I) * C %*% (MU_Z*mu_GE + MU_GZ*mu_GGE/mu_GG - MU_GEZ + A*(mu_GGE*mu_GE/mu_GG - mu_GEE)) ALPHA_W <- - solve_Q %*% R P <- beta_E * (-MU_Z*mu_f - MU_GZ*mu_Gf/mu_GG + MU_fZ + A * (mu_Ef - mu_Gf*mu_GE/mu_GG)) + beta_I * (MU_GhZ - MU_Z*mu_Gh - MU_GZ*mu_GGh/mu_GG + A * (mu_GEh - mu_GGh*mu_GE/mu_GG)) + alpha_I * (MU_Z*mu_GE + MU_GZ*mu_GGE/mu_GG - MU_GEZ + A * (mu_GGE*mu_GE/mu_GG - mu_GEE)) + ( A %*% t(MU_EM - MU_GM*mu_GE/mu_GG) - MU_Z%*%t(MU_M) - MU_GZ%*%t(MU_GM)/mu_GG + MU_ZM) %*% BETA_M + ( A %*% t(MU_GW*mu_GE/mu_GG - MU_EW) + MU_Z%*%t(MU_W) + MU_GZ%*%t(MU_GW)/mu_GG - MU_ZW) %*% ALPHA_W Bz_Az <- solve_O %*% P ALPHA_Z <- BETA_Z - solve_O %*% P alpha_E <- ( beta_E * (mu_Ef - mu_Gf*mu_GE/mu_GG) + beta_I * (mu_GEh - mu_GGh*mu_GE/mu_GG) + alpha_I * (mu_GGE*mu_GE/mu_GG - mu_GEE) + t(MU_EZ - MU_GZ*mu_GE/mu_GG) %*% Bz_Az + t(MU_EM - MU_GM*mu_GE/mu_GG) %*% BETA_M + t(MU_GW*mu_GE/mu_GG - MU_EW) %*% ALPHA_W ) / (mu_EE - mu_GE^2/mu_GG) Bg_Ag <- ( alpha_E*mu_GE - beta_E*mu_Gf + alpha_I*mu_GGE - beta_I*mu_GGh - t(MU_GZ) %*% Bz_Az + t(MU_GW) %*% ALPHA_W - t(MU_GM) %*% BETA_M ) / mu_GG alpha_G <- beta_G - Bg_Ag alpha_0 <- beta_0 + beta_E*mu_f + beta_I*mu_Gh - alpha_I*mu_GE + t(MU_Z) %*% Bz_Az + t(MU_M) %*% BETA_M - t(MU_W) %*% ALPHA_W return(list(alpha_list=list(alpha_0, alpha_G, alpha_E, alpha_I, ALPHA_Z, ALPHA_W), beta_list = list(beta_0, beta_G, beta_E, beta_I, BETA_Z, BETA_M), cov_list = list(mu_GG, mu_GE, mu_Gf, mu_Gh, MU_GZ, MU_GM, MU_GW, mu_EE, mu_Ef, MU_EZ, MU_EM, MU_EW, MU_fZ, MU_fW), cov_mat_list = list(MU_ZZ, MU_WW, MU_ZW, MU_WZ, MU_ZM, MU_WM), mu_list = list(mu_f, mu_h, rep(0,num_Z), MU_M, rep(0,num_W)), HOM_list = list(mu_GGE, mu_GGh, mu_GEE, mu_GEf, mu_GEh, MU_GEZ, MU_GEM, MU_GEW, MU_GhW, MU_GhZ, mu_GGEE, mu_GGEf, mu_GGEh))) }
"sSummaryModel" <- function(x, maxorder = c(3,1,2), period = 12, criterion = "bic", method = "CSS"){ if(!is.matrix(x)) x <- as.matrix(x) Order <- NULL; Mean <- NULL for (i in 1:ncol(x)){ m1 <- sarimaSpec(x[,i],maxorder=maxorder,output=FALSE,criterion=criterion,method=method) Order <- rbind(Order,m1$order) Mean <- c(Mean,m1$include.mean) } nclass <- rep(0,6) k <- ncol(x) colnames(Order) <- c("p","d","q","P","D","Q") rownames(Order) <- paste("x",1:k,sep="") M1 <- M2 <- M3 <- M4 <- M5 <- M6 <- NULL istat <- c(1:k)[sapply(1:k, function(x) Order[x,2]==0 && Order[x,5]==0)] nsta <- length(istat) if(nsta > 0){ message("Number of stationary series: ",nsta,"\n") nclass[1] <- nsta order <- Order[istat,] M1 <- as.matrix(order) colnames(M1) <- colnames(Order) rownames(M1) <- rownames(Order)[istat] }else{ message("All series are non-stationary","\n") } idiff1 <- c(1:k)[sapply(1:k, function(x) Order[x,2]==1 && Order[x,5]==0)] ndiff1 <- length(idiff1) if(ndiff1 > 0){ order <- Order[idiff1,] message("Number of (d=1) and (D=0) series: ",ndiff1,"\n") M2 <- as.matrix(order) colnames(M2) <- colnames(Order) rownames(M2) <- rownames(Order)[idiff1] nclass[2] <- ndiff1 } idiff2 <- c(1:k)[sapply(1:k, function(x) Order[x,2]==2 && Order[x,5]==0)] ndiff2 <- length(idiff2) if(ndiff2 > 0){ order <- Order[idiff2,] message("Number of (d=2) and (D=0) series: ",ndiff2,"\n") M3 <- as.matrix(order) colnames(M3) <- colnames(Order) rownames(M3) <- rownames(Order)[idiff2] nclass[3] <- ndiff2 } idiff3 <- c(1:k)[sapply(1:k, function(x) Order[x,2]==0 && Order[x,5]==1)] ndiff3 <- length(idiff3) if(ndiff3 > 0){ order <- Order[idiff3,] order <- as.matrix(order) colnames(order) <- c("p","d","q","P","D","Q") rownames(order) <- rownames(idiff3) message("Number of (d=0) and (D=1) series: ",ndiff3,"\n") M4 <- order nclass[4] <- ndiff3 } idiff4 <- c(1:k)[sapply(1:k, function(x) Order[x,2]==1 && Order[x,5]==1)] ndiff4 <- length(idiff4) if(ndiff4 > 0){ order <- Order[idiff4,] order <- as.matrix(order) colnames(order) <- c("p","d","q","P","D","Q") rownames(order) <- rownames(idiff4) message("Number of (d=1) and (D=1) series: ",ndiff4,"\n") M5 <- order nclass[5] <- ndiff4 } idiff5 <- c(1:k)[sapply(1:k, function(x) Order[x,2]==2 && Order[x,5]==1)] ndiff5 <- length(idiff5) if(ndiff5 > 0){ order <- Order[idiff5,] order <- as.matrix(order) colnames(order) <- c("p","d","q","P","D","Q") rownames(order) <- rownames(idiff5) message("Number of (d=2) and (D=1) sries: ", ndiff5,"\n") M6 <- order nclass[6] <- ndiff5 } return(list(order=Order,Mean=Mean,M1=M1,M2=M2,M3=M3,M4=M4,M5=M5,M6=M6,nclass=nclass,data=x)) }
checkLM <- function(dat.array, path=NULL, prefix="", suffix=".ply", col="white", pt.size=NULL, alpha=1, begin=1, render=c("w","s"), point=c("s","p"), add=FALSE,meshlist=NULL, Rdata=FALSE, atlas=NULL, text.lm=FALSE) { k <- NULL marked <- NULL j <- 1 if (!Rdata) load <- file2mesh outid <- NULL point <- point[1] radius <- pt.size if (is.null(radius)) { if (point == "s") radius <- (cSize(dat.array[,,1])/sqrt(nrow(dat.array[,,1])))*(1/30) else radius <- 10 } size <- radius render <- render[1] arr <- FALSE point <- point[1] if (point == "s") { rendpoint <- spheres3d } else if (point == "p") { rendpoint <- points3d } else { stop("argument \"point\" must be \"s\" for spheres or \"p\" for points") } dimDat <- dim(dat.array) if (length(dimDat) == 3) { n <- dim(dat.array)[3] name <- dimnames(dat.array)[[3]] arr <- TRUE } else if (is.list(dat.array)) { n <- length(dat.array) name <- names(dat.array) } else { stop("data must be 3-dimensional array or a list") } i <- begin if (render=="w") { back <- front <- "lines" rend <- wire3d } else { back <- front <- "filled" } if (!add || rgl.cur()==0) open3d() if (!is.null(atlas)) { k <- dim(atlas$landmarks)[1] } meshnames <- paste(path,prefix,name,suffix,sep="") while (i <= n) { rgl.bringtotop() tmp.name <- meshnames[i] if (arr) landmarks <- dat.array[,,i] else landmarks <- dat.array[[i]] if (is.null(atlas)) { outid <- rendpoint(landmarks,radius=radius, size=size) if (text.lm) outid <- c(outid, text3d(landmarks, texts=paste(1:dim(landmarks)[1], sep=""), cex=1, adj=c(1,1.5))) if (!is.null(meshlist)) { tmpmesh <- meshlist[[i]] } else if (!is.null(path)) { if (!Rdata) { tmpmesh <- file2mesh(tmp.name,readcol=TRUE) } else { input <- load(tmp.name) tmp.name <- gsub(path,"",tmp.name) tmpmesh <- get(input) } } if (!is.null(meshlist) || !is.null(path)) { outid <- c(outid,shade3d(tmpmesh,col=col,alpha=alpha,back=back,front=front)) rm(tmpmesh) if (Rdata) rm(list=input) gc() } } else { atlas.tmp <- atlas atlas.tmp$mesh <- NULL atlas.tmp$landmarks <- landmarks[1:k,] atlas.tmp$patch <- landmarks[-c(1:k),] if (!is.null(meshlist)) { atlas.tmp$mesh <- meshlist[[i]] } if (!is.null(path) && is.null(meshlist)) { if (!Rdata) { atlas.tmp$mesh <- file2mesh(tmp.name) } else { input <- load(tmp.name) tmp.name <- gsub(path,"",tmp.name) atlas.tmp$mesh <- get(input) } } outid <- plotAtlas(atlas.tmp, add=TRUE, alpha=alpha, pt.size=radius, render=render, point=point, meshcol=col, legend=FALSE) } answer <- readline(paste("viewing if (answer == "m") { marked[j] <- i j <- j+1 } else if (answer == "s") { i <- n+1 } else if (answer == "p") { i <- i-1 } else i <- i+1 rgl.pop(id=outid) } invisible(marked) }
print.crtpwr <- function(x, ...) { cat(paste0("\n", x[['overview']], "\n")) cat(paste0("\nPower Estimate (alpha = ", x[['alpha']], "):\n")) print(x[['power']], row.names = FALSE) cat("\n") }
wkt <- "POLYGON((13.26349675655365 52.53991761181831,18.36115300655365 54.11445544219924, 21.87677800655365 53.80418956368524,24.68927800655365 54.217364774722455,28.20490300655365 54.320018299365124,30.49005925655365 52.85948216284084,34.70880925655365 52.753220564427814, 35.93927800655365 50.46131871049754,39.63068425655365 49.55761261299145,40.86115300655365 46.381388009130845,34.00568425655365 45.279102926537,33.30255925655365 48.636868465271846, 30.13849675655365 49.78513301801265,28.38068425655365 47.2236377039631,29.78693425655365 44.6572866068524,27.67755925655365 42.62220075124676,23.10724675655365 43.77542058000212, 24.51349675655365 47.10412345120368,26.79865300655365 49.55761261299145,23.98615300655365 52.00209943876426,23.63459050655365 49.44345313705238,19.41584050655365 47.580567827212114, 19.59162175655365 44.90682206053508,20.11896550655365 42.36297154876359,22.93146550655365 40.651849782081555,25.56818425655365 39.98171166226459,29.61115300655365 40.78507856230178, 32.95099675655365 40.38459278067577,32.95099675655365 37.37491910393631,26.27130925655365 33.65619609886799,22.05255925655365 36.814081996401605,18.71271550655365 36.1072176729021, 18.53693425655365 39.16878677351903,15.37287175655365 38.346355762190846,15.19709050655365 41.578843777436326,12.56037175655365 41.050735748143424,12.56037175655365 44.02872991212046, 15.19709050655365 45.52594200494078,16.42755925655365 48.05271546733352,17.48224675655365 48.86865641518059,10.62677800655365 47.817178329053135,9.57209050655365 44.154980365192, 8.16584050655365 40.51835445724746,6.05646550655365 36.53210972067291,0.9588092565536499 31.583640057148145,-5.54509699344635 35.68001485298146,-6.77556574344635 40.51835445724746, -9.41228449344635 38.346355762190846,-12.40056574344635 35.10683619158607,-15.74040949344635 38.07010978950028,-14.68572199344635 41.31532459432774,-11.69744074344635 43.64836179231387, -8.88494074344635 42.88035509418534,-4.31462824344635 43.52103366008421,-8.35759699344635 47.2236377039631,-8.18181574344635 50.12441989397795,-5.01775324344635 49.55761261299145, -2.73259699344635 46.25998980446569,-1.67790949344635 44.154980365192,-1.32634699344635 39.30493590580802,2.18927800655365 41.44721797271696,4.47443425655365 43.26556960420879, 2.18927800655365 46.7439668697322,1.83771550655365 50.3492841273576,6.93537175655365 49.671505849335254,5.00177800655365 52.32557322466785,7.81427800655365 51.67627099802223, 7.81427800655365 54.5245591562317,10.97834050655365 51.89375191441792,10.97834050655365 55.43241335888528,13.26349675655365 52.53991761181831))" wkt <- gsub("\n", " ", wkt) test_that("wkt_parse", { skip_if_not_installed("sf") aa <- wkt_parse(wkt, geom_big = "bbox") expect_is(aa, "character") expect_match(aa, "POLYGON") expect_equal(length(strextracta(aa, ",")), 4) bb <- wkt_parse(wkt, geom_big = "axe") expect_is(bb, "character") expect_equal(length(bb), 4) for (i in bb) expect_is(i, "character") for (i in bb) expect_match(i, "POLYGON") cc <- wkt_parse(wkt, geom_big = "axe", geom_size = 60) expect_is(cc, "character") expect_equal(length(cc), 1) for (i in cc) expect_is(i, "character") for (i in cc) expect_match(i, "POLYGON") dd <- wkt_parse(wkt, geom_big = "axe", geom_size = 5) expect_is(dd, "character") expect_equal(length(dd), 50) for (i in dd) expect_is(i, "character") for (i in dd) expect_match(i, "POLYGON") }) test_that("wkt_parse fails well", { expect_error(wkt_parse(wkt), "missing") expect_error(wkt_parse(wkt, 5), "must be one of") expect_error(wkt_parse(wkt, "bbox", geom_size = "adf"), "must be of class") expect_error(wkt_parse(wkt, "bbox", geom_n = "adf"), "must be of class") })
expected <- eval(parse(text="quote(quote(list(NULL, c(\"time\", \"status\"))))")); test(id=0, code={ argv <- eval(parse(text="list(list(quote(quote), list(NULL, c(\"time\", \"status\"))))")); do.call(`as.call`, argv); }, o=expected);